Compare commits

..

No commits in common. "main" and "ignore-files-v2.1.0" have entirely different histories.

123 changed files with 6389 additions and 5102 deletions

View file

@ -1,3 +1,4 @@
-D clippy::all
-W clippy::nursery -W clippy::nursery
-W clippy::pedantic -W clippy::pedantic
-A clippy::module-name-repetitions -A clippy::module-name-repetitions
@ -9,4 +10,3 @@
-A clippy::default-trait-access -A clippy::default-trait-access
-A clippy::enum-glob-use -A clippy::enum-glob-use
-A clippy::option-if-let-else -A clippy::option-if-let-else
-A clippy::blocks-in-conditions

View file

@ -46,7 +46,7 @@ jobs:
echo C:\Program Files\Git\usr\bin>>"%GITHUB_PATH%" echo C:\Program Files\Git\usr\bin>>"%GITHUB_PATH%"
- name: Configure caching - name: Configure caching
uses: actions/cache@v4 uses: actions/cache@v3
with: with:
path: | path: |
~/.cargo/registry/index/ ~/.cargo/registry/index/

View file

@ -4,6 +4,7 @@
app_name: "watchexec", app_name: "watchexec",
app_version: $version, app_version: $version,
changelog_title: "CLI \($version)", changelog_title: "CLI \($version)",
changelog_body: $changelog,
artifacts: [ $files | split("\n") | .[] | { artifacts: [ $files | split("\n") | .[] | {
name: ., name: .,
kind: (if (. | test("[.](deb|rpm)$")) then "installer" else "executable-zip" end), kind: (if (. | test("[.](deb|rpm)$")) then "installer" else "executable-zip" end),

View file

@ -1,6 +1,7 @@
name: CLI Release name: CLI Release
on: on:
workflow_call:
workflow_dispatch: workflow_dispatch:
push: push:
tags: tags:
@ -16,6 +17,8 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
outputs: outputs:
cli_version: ${{ steps.version.outputs.cli_version }} cli_version: ${{ steps.version.outputs.cli_version }}
release_notes: ${{ fromJSON(steps.notes.outputs.notes_json || 'null') }}
announce: ${{ steps.announce.outputs.announce || '' }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Extract version - name: Extract version
@ -33,6 +36,40 @@ jobs:
echo "cli_version=$version" >> $GITHUB_OUTPUT echo "cli_version=$version" >> $GITHUB_OUTPUT
- name: Extract release notes
if: github.event.head_commit.message
id: notes
shell: bash
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_REPO: ${{ github.repository }}
release_commit: ${{ github.event.head_commit.message }}
run: |
set -x
set +eo pipefail
if [[ -z "$release_commit" ]]; then
echo "notes_json=null" >> $GITHUB_OUTPUT
exit
fi
release_pr=$(head -n1 <<< "$release_commit" | grep -oP '(?<=[(]#)\d+(?=[)])')
if [[ -z "$release_pr" ]]; then
echo "notes_json=null" >> $GITHUB_OUTPUT
exit
fi
gh \
pr --repo "$GITHUB_REPO" \
view "$release_pr" \
--json body \
--jq '"notes_json=\((.body | split("### Release notes")[1] // "") | tojson)"' \
>> $GITHUB_OUTPUT
- name: Make a new announcement post
id: announce
if: endsWith(steps.version.outputs.cli_version, '.0')
run: echo "announce=Announcements" >> $GITHUB_OUTPUT
build: build:
strategy: strategy:
matrix: matrix:
@ -104,10 +141,10 @@ jobs:
experimental: false experimental: false
- name: mac-arm64 - name: mac-arm64
os: macos-latest os: macos-11.0
target: aarch64-apple-darwin target: aarch64-apple-darwin
cross: true cross: true
experimental: false experimental: true
- name: windows-x86-64 - name: windows-x86-64
os: windows-latest os: windows-latest
@ -142,7 +179,7 @@ jobs:
echo C:\Program Files\Git\usr\bin>>"%GITHUB_PATH%" echo C:\Program Files\Git\usr\bin>>"%GITHUB_PATH%"
- name: Configure caching - name: Configure caching
uses: actions/cache@v4 uses: actions/cache@v3
with: with:
path: | path: |
~/.cargo/registry/index/ ~/.cargo/registry/index/
@ -196,29 +233,19 @@ jobs:
with: with:
tool: cross tool: cross
- name: Build - name: Build (cargo)
shell: bash if: "!matrix.cross"
run: | run: cargo build --package watchexec-cli --release --locked --target ${{ matrix.target }}
${{ matrix.cross && 'cross' || 'cargo' }} build \
-p watchexec-cli \ - name: Build (cross)
--release --locked \ if: matrix.cross
--target ${{ matrix.target }} run: cross build --package watchexec-cli --release --locked --target ${{ matrix.target }}
- name: Make manpage - name: Make manpage
shell: bash run: cargo run -p watchexec-cli -- --manual > doc/watchexec.1
run: |
cargo run -p watchexec-cli \
${{ (!matrix.cross) && '--release --target' || '' }} \
${{ (!matrix.cross) && matrix.target || '' }} \
--locked -- --manual > doc/watchexec.1
- name: Make completions - name: Make completions
shell: bash run: bin/completions
run: |
bin/completions \
${{ (!matrix.cross) && '--release --target' || '' }} \
${{ (!matrix.cross) && matrix.target || '' }} \
--locked
- name: Package - name: Package
shell: bash shell: bash
@ -256,9 +283,9 @@ jobs:
shell: bash shell: bash
run: 7z a "$dst.zip" "$dst" run: 7z a "$dst.zip" "$dst"
- uses: actions/upload-artifact@v4 - uses: actions/upload-artifact@v3
with: with:
name: ${{ matrix.name }} name: builds
retention-days: 1 retention-days: 1
path: | path: |
watchexec-*.tar.xz watchexec-*.tar.xz
@ -281,14 +308,15 @@ jobs:
with: with:
tool: b3sum tool: b3sum
- uses: actions/download-artifact@v4 - uses: actions/download-artifact@v3
with: with:
merge-multiple: true name: builds
- name: Dist manifest - name: Dist manifest
run: | run: |
jq -ncf .github/workflows/dist-manifest.jq \ jq -ncf .github/workflows/dist-manifest.jq \
--arg version "${{ needs.info.outputs.cli_version }}" \ --arg version "${{ needs.info.outputs.cli_version }}" \
--arg changelog "${{ needs.info.outputs.release_notes }}" \
--arg files "$(ls watchexec-*)" \ --arg files "$(ls watchexec-*)" \
> dist-manifest.json > dist-manifest.json
@ -306,11 +334,13 @@ jobs:
sha512sum $file | cut -d ' ' -f1 > "$file.sha512" sha512sum $file | cut -d ' ' -f1 > "$file.sha512"
done done
- uses: softprops/action-gh-release@9d7c94cfd0a1f3ed45544c887983e9fa900f0564 - uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844
with: with:
tag_name: v${{ needs.info.outputs.cli_version }} tag_name: v${{ needs.info.outputs.cli_version }}
name: CLI v${{ needs.info.outputs.cli_version }} name: CLI v${{ needs.info.outputs.cli_version }}
body: ${{ needs.info.outputs.release_notes }}
append_body: true append_body: true
discussion_category_name: ${{ needs.info.outputs.announce }}
files: | files: |
dist-manifest.json dist-manifest.json
watchexec-*.tar.xz watchexec-*.tar.xz

61
.github/workflows/release-pr.ejs vendored Normal file
View file

@ -0,0 +1,61 @@
<!-- <%- JSON.stringify({ "release-pr": { v2: { crates, version } } }) %> -->
This is a release PR for **<%= crate.name %>** version **<%= version.actual %>**<%
if (version.actual != version.desired) {
%> (performing a <%= version.desired %> bump).<%
} else {
%>.<%
}
%>
**Use squash merge.**
<% if (crate.name == "watchexec-cli") { %>
Upon merging, this will automatically create the tag `v<%= version.actual %>`, build the CLI, and create a GitHub release.
You will still need to manually publish the cargo crate:
```
$ git switch main
$ git pull
$ git switch --detach v<%= version.actual %>
$ cargo publish -p <%= crate.name %>
```
<% } else { %>
Remember to review the crate's changelog!
Upon merging, this will create the tag `<%= crate.name %>-v<%= version.actual %>`.
You will still need to manually publish the cargo crate:
```
$ git switch main
$ git pull
$ git switch --detach <%= crate.name %>-v<%= version.actual %>
$ cargo publish -p <%= crate.name %>
```
<% } %>
To trigger builds initially: either close and then immediately re-open this PR once, or push to the branch (perhaps with edits to the README.md or CHANGELOG.md!).
<% if (pr.releaseNotes) { %>
---
_Edit release notes into the section below:_
<!-- do not change or remove this heading -->
<% if (crate.name == "watchexec-cli") { %>
### Release notes
_Software development often involves running the same commands over and over. Boring! Watchexec is a simple, standalone tool that watches a path and runs a command whenever it detects modifications. Install it today with [`cargo-binstall watchexec-cli`](https://github.com/cargo-bins/cargo-binstall), from the binaries below, find it [in your favourite package manager](https://github.com/watchexec/watchexec/blob/main/doc/packages.md), or build it from source with `cargo install watchexec-cli`._
#### In this release:
-
#### Other changes:
-
<% } else { %>
### Changelog
-
<% } %>
<% } %>

54
.github/workflows/release-pr.yml vendored Normal file
View file

@ -0,0 +1,54 @@
name: Open a release PR
on:
workflow_dispatch:
inputs:
crate:
description: Crate to release
required: true
type: choice
options:
- cli
- lib
- bosion
- events
- ignore-files
- project-origins
- signals
- supervisor
- filterer/globset
- filterer/ignore
- filterer/tagged
version:
description: Version to release
required: true
type: string
default: patch
jobs:
make-release-pr:
permissions:
id-token: write # Enable OIDC
pull-requests: write
contents: write
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: chainguard-dev/actions/setup-gitsign@main
- name: Install cargo-release
uses: taiki-e/install-action@v2
with:
tool: cargo-release
- uses: cargo-bins/release-pr@v2
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
version: ${{ inputs.version }}
crate-path: crates/${{ inputs.crate }}
pr-release-notes: ${{ inputs.crate == 'cli' }}
pr-label: release
pr-template-file: .github/workflows/release-pr.ejs
env:
GITSIGN_LOG: /tmp/gitsign.log
- run: cat /tmp/gitsign.log
if: ${{ failure() }}

45
.github/workflows/release-tag.yml vendored Normal file
View file

@ -0,0 +1,45 @@
name: Tag a release
on:
push:
branches:
- main
tags-ignore:
- "*"
jobs:
make-tag:
runs-on: ubuntu-latest
# because we control the release PR title and only allow squashes,
# PRs that are named `release: {crate-name} v{version}` will get tagged!
# the commit message will look like: `release: {crate-name} v{version} (#{pr-number})`
if: "startsWith(github.event.head_commit.message, 'release: ')"
steps:
- name: Extract tag from commit message
env:
COMMIT_MESSAGE: ${{ github.event.head_commit.message }}
run: |
set -euxo pipefail
message="$(head -n1 <<< "$COMMIT_MESSAGE")"
crate="$(cut -d ' ' -f 2 <<< "${message}")"
version="$(cut -d ' ' -f 3 <<< "${message}")"
if [[ "$crate" == "watchexec-cli" ]]; then
echo "CUSTOM_TAG=${version}" >> $GITHUB_ENV
else
echo "CUSTOM_TAG=${crate}-${version}" >> $GITHUB_ENV
fi
- uses: actions/checkout@v4
- name: Push release tag
id: tag_version
uses: mathieudutour/github-tag-action@v6.1
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
custom_tag: ${{ env.CUSTOM_TAG }}
tag_prefix: ''
release-cli:
needs: make-tag
if: "startsWith(github.event.head_commit.message, 'release: watchexec-cli v')"
uses: ./.github/workflows/release-cli.yml
secrets: inherit

View file

@ -50,7 +50,7 @@ jobs:
echo C:\Program Files\Git\usr\bin>>"%GITHUB_PATH%" echo C:\Program Files\Git\usr\bin>>"%GITHUB_PATH%"
- name: Cargo caching - name: Cargo caching
uses: actions/cache@v4 uses: actions/cache@v3
with: with:
path: | path: |
~/.cargo/registry/index/ ~/.cargo/registry/index/
@ -62,17 +62,15 @@ jobs:
${{ runner.os }}-cargo- ${{ runner.os }}-cargo-
- name: Compilation caching - name: Compilation caching
uses: actions/cache@v4 uses: actions/cache@v3
with: with:
path: target/ path: target/
key: ${{ runner.os }}-target-stable-${{ hashFiles('**/Cargo.lock') }} key: ${{ runner.os }}-target-stable-${{ hashFiles('**/Cargo.lock') }}
- name: Run test suite - name: Run test suite
run: cargo test run: cargo test ${{ env.flags }}
- name: Run watchexec-events integration tests
run: cargo test -p watchexec-events -F serde
- name: Check that CLI runs - name: Check that CLI runs
run: cargo run -p watchexec-cli -- -1 echo run: cargo run ${{ env.flags }} -p watchexec-cli -- -1 echo
- name: Install coreutils on mac - name: Install coreutils on mac
if: ${{ matrix.platform == 'macos' }} if: ${{ matrix.platform == 'macos' }}
@ -91,7 +89,7 @@ jobs:
shell: bash shell: bash
- name: Generate manpage - name: Generate manpage
run: cargo run -p watchexec-cli -- --manual > doc/watchexec.1 run: cargo run ${{ env.flags }} -p watchexec-cli -- --manual > doc/watchexec.1
- name: Check that manpage is up to date - name: Check that manpage is up to date
run: git diff --exit-code -- doc/ run: git diff --exit-code -- doc/
@ -120,7 +118,7 @@ jobs:
tool: cross tool: cross
- name: Cargo caching - name: Cargo caching
uses: actions/cache@v4 uses: actions/cache@v3
with: with:
path: | path: |
~/.cargo/registry/index/ ~/.cargo/registry/index/

View file

@ -3,8 +3,8 @@ message: |
If you use this software, please cite it using these metadata. If you use this software, please cite it using these metadata.
title: "Watchexec: a tool to react to filesystem changes, and a crate ecosystem to power it" title: "Watchexec: a tool to react to filesystem changes, and a crate ecosystem to power it"
version: "2.2.0" version: "1.25.0"
date-released: 2024-10-14 date-released: 2024-01-01
repository-code: https://github.com/watchexec/watchexec repository-code: https://github.com/watchexec/watchexec
license: Apache-2.0 license: Apache-2.0

View file

@ -44,11 +44,18 @@ Apart from that, welcome and thank you for your time!
## Releasing ## Releasing
``` A release goes like this:
cargo release -p crate-name --execute patch # or minor, major
```
When a CLI release is done, the [release notes](https://github.com/watchexec/watchexec/releases) should be edited with the changelog. 1. A maintainer launches the ["Open a release PR" workflow](https://github.com/watchexec/watchexec/actions/workflows/release-pr.yml).
2. A PR bumping the chosen crate's version is opened. Maintainers may then add stuff to it if
needed, like changelog entries for library crates. Release notes for CLI releases go directly on
the PR.
3. When the PR is merged, the release is tagged. CLI releases also get built and distributed.
4. A maintainer then manually publishes the crate (automated publishing is blocked on crates.io
implementing [scoped tokens](https://github.com/rust-lang/crates.io/issues/5443)).
### Release order ### Release order

2418
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -8,6 +8,7 @@ members = [
"crates/supervisor", "crates/supervisor",
"crates/filterer/globset", "crates/filterer/globset",
"crates/filterer/ignore", "crates/filterer/ignore",
"crates/filterer/tagged",
"crates/bosion", "crates/bosion",
"crates/ignore-files", "crates/ignore-files",
"crates/project-origins", "crates/project-origins",

View file

@ -1,7 +1,7 @@
#!/bin/sh #!/bin/sh
cargo run -p watchexec-cli $* -- --completions bash > completions/bash cargo run -p watchexec-cli -- --completions bash > completions/bash
cargo run -p watchexec-cli $* -- --completions elvish > completions/elvish cargo run -p watchexec-cli -- --completions elvish > completions/elvish
cargo run -p watchexec-cli $* -- --completions fish > completions/fish cargo run -p watchexec-cli -- --completions fish > completions/fish
cargo run -p watchexec-cli $* -- --completions nu > completions/nu cargo run -p watchexec-cli -- --completions nu > completions/nu
cargo run -p watchexec-cli $* -- --completions powershell > completions/powershell cargo run -p watchexec-cli -- --completions powershell > completions/powershell
cargo run -p watchexec-cli $* -- --completions zsh > completions/zsh cargo run -p watchexec-cli -- --completions zsh > completions/zsh

View file

@ -19,7 +19,7 @@ _watchexec() {
case "${cmd}" in case "${cmd}" in
watchexec) watchexec)
opts="-w -W -F -c -o -r -s -d -p -n -E -1 -N -q -e -f -j -i -v -h -V --watch --watch-non-recursive --watch-file --clear --on-busy-update --restart --signal --stop-signal --stop-timeout --map-signal --debounce --stdin-quit --no-vcs-ignore --no-project-ignore --no-global-ignore --no-default-ignore --no-discover-ignore --ignore-nothing --postpone --delay-run --poll --shell --no-environment --emit-events-to --only-emit-events --env --no-process-group --wrap-process --notify --color --timings --quiet --bell --project-origin --workdir --exts --filter --filter-file --filter-prog --ignore --ignore-file --fs-events --no-meta --print-events --manual --completions --verbose --log-file --help --version [COMMAND]..." opts="-w -c -o -W -r -s -k -d -p -n -E -1 -N -q -e -f -i -v -h -V --watch --clear --on-busy-update --watch-when-idle --restart --signal --kill --stop-signal --stop-timeout --map-signal --debounce --stdin-quit --no-vcs-ignore --no-project-ignore --no-global-ignore --no-default-ignore --no-discover-ignore --ignore-nothing --postpone --delay-run --poll --shell --no-shell-long --no-environment --emit-events-to --only-emit-events --env --no-process-group --notify --color --timings --quiet --bell --project-origin --workdir --exts --filter --filter-file --ignore --ignore-file --fs-events --no-meta --print-events --verbose --log-file --manual --completions --help --version [COMMAND]..."
if [[ ${cur} == -* || ${COMP_CWORD} -eq 1 ]] ; then if [[ ${cur} == -* || ${COMP_CWORD} -eq 1 ]] ; then
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
return 0 return 0
@ -33,22 +33,6 @@ _watchexec() {
COMPREPLY=($(compgen -f "${cur}")) COMPREPLY=($(compgen -f "${cur}"))
return 0 return 0
;; ;;
--watch-non-recursive)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
-W)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--watch-file)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
-F)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--clear) --clear)
COMPREPLY=($(compgen -W "clear reset" -- "${cur}")) COMPREPLY=($(compgen -W "clear reset" -- "${cur}"))
return 0 return 0
@ -117,26 +101,16 @@ _watchexec() {
COMPREPLY=($(compgen -f "${cur}")) COMPREPLY=($(compgen -f "${cur}"))
return 0 return 0
;; ;;
--wrap-process)
COMPREPLY=($(compgen -W "group session none" -- "${cur}"))
return 0
;;
--color) --color)
COMPREPLY=($(compgen -W "auto always never" -- "${cur}")) COMPREPLY=($(compgen -W "auto always never" -- "${cur}"))
return 0 return 0
;; ;;
--project-origin) --project-origin)
COMPREPLY=() COMPREPLY=($(compgen -f "${cur}"))
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
compopt -o plusdirs
fi
return 0 return 0
;; ;;
--workdir) --workdir)
COMPREPLY=() COMPREPLY=($(compgen -f "${cur}"))
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
compopt -o plusdirs
fi
return 0 return 0
;; ;;
--exts) --exts)
@ -156,25 +130,6 @@ _watchexec() {
return 0 return 0
;; ;;
--filter-file) --filter-file)
local oldifs
if [ -n "${IFS+x}" ]; then
oldifs="$IFS"
fi
IFS=$'\n'
COMPREPLY=($(compgen -f "${cur}"))
if [ -n "${oldifs+x}" ]; then
IFS="$oldifs"
fi
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
compopt -o filenames
fi
return 0
;;
--filter-prog)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
-j)
COMPREPLY=($(compgen -f "${cur}")) COMPREPLY=($(compgen -f "${cur}"))
return 0 return 0
;; ;;
@ -187,32 +142,21 @@ _watchexec() {
return 0 return 0
;; ;;
--ignore-file) --ignore-file)
local oldifs
if [ -n "${IFS+x}" ]; then
oldifs="$IFS"
fi
IFS=$'\n'
COMPREPLY=($(compgen -f "${cur}")) COMPREPLY=($(compgen -f "${cur}"))
if [ -n "${oldifs+x}" ]; then
IFS="$oldifs"
fi
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
compopt -o filenames
fi
return 0 return 0
;; ;;
--fs-events) --fs-events)
COMPREPLY=($(compgen -W "access create remove rename modify metadata" -- "${cur}")) COMPREPLY=($(compgen -W "access create remove rename modify metadata" -- "${cur}"))
return 0 return 0
;; ;;
--completions)
COMPREPLY=($(compgen -W "bash elvish fish nu powershell zsh" -- "${cur}"))
return 0
;;
--log-file) --log-file)
COMPREPLY=($(compgen -f "${cur}")) COMPREPLY=($(compgen -f "${cur}"))
return 0 return 0
;; ;;
--completions)
COMPREPLY=($(compgen -W "bash elvish fish nu powershell zsh" -- "${cur}"))
return 0
;;
*) *)
COMPREPLY=() COMPREPLY=()
;; ;;

View file

@ -20,10 +20,6 @@ set edit:completion:arg-completer[watchexec] = {|@words|
&'watchexec'= { &'watchexec'= {
cand -w 'Watch a specific file or directory' cand -w 'Watch a specific file or directory'
cand --watch 'Watch a specific file or directory' cand --watch 'Watch a specific file or directory'
cand -W 'Watch a specific directory, non-recursively'
cand --watch-non-recursive 'Watch a specific directory, non-recursively'
cand -F 'Watch files and directories from a file'
cand --watch-file 'Watch files and directories from a file'
cand -c 'Clear screen before running command' cand -c 'Clear screen before running command'
cand --clear 'Clear screen before running command' cand --clear 'Clear screen before running command'
cand -o 'What to do when receiving events while the command is running' cand -o 'What to do when receiving events while the command is running'
@ -41,7 +37,6 @@ set edit:completion:arg-completer[watchexec] = {|@words|
cand --emit-events-to 'Configure event emission' cand --emit-events-to 'Configure event emission'
cand -E 'Add env vars to the command' cand -E 'Add env vars to the command'
cand --env 'Add env vars to the command' cand --env 'Add env vars to the command'
cand --wrap-process 'Configure how the process is wrapped'
cand --color 'When to use terminal colours' cand --color 'When to use terminal colours'
cand --project-origin 'Set the project origin' cand --project-origin 'Set the project origin'
cand --workdir 'Set the working directory' cand --workdir 'Set the working directory'
@ -50,16 +45,18 @@ set edit:completion:arg-completer[watchexec] = {|@words|
cand -f 'Filename patterns to filter to' cand -f 'Filename patterns to filter to'
cand --filter 'Filename patterns to filter to' cand --filter 'Filename patterns to filter to'
cand --filter-file 'Files to load filters from' cand --filter-file 'Files to load filters from'
cand -j '[experimental] Filter programs'
cand --filter-prog '[experimental] Filter programs'
cand -i 'Filename patterns to filter out' cand -i 'Filename patterns to filter out'
cand --ignore 'Filename patterns to filter out' cand --ignore 'Filename patterns to filter out'
cand --ignore-file 'Files to load ignores from' cand --ignore-file 'Files to load ignores from'
cand --fs-events 'Filesystem events to filter to' cand --fs-events 'Filesystem events to filter to'
cand --completions 'Generate a shell completions script'
cand --log-file 'Write diagnostic logs to a file' cand --log-file 'Write diagnostic logs to a file'
cand --completions 'Generate a shell completions script'
cand -W 'Deprecated alias for ''--on-busy-update=do-nothing'''
cand --watch-when-idle 'Deprecated alias for ''--on-busy-update=do-nothing'''
cand -r 'Restart the process if it''s still running' cand -r 'Restart the process if it''s still running'
cand --restart 'Restart the process if it''s still running' cand --restart 'Restart the process if it''s still running'
cand -k 'Hidden legacy shorthand for ''--signal=kill'''
cand --kill 'Hidden legacy shorthand for ''--signal=kill'''
cand --stdin-quit 'Exit when stdin closes' cand --stdin-quit 'Exit when stdin closes'
cand --no-vcs-ignore 'Don''t load gitignores' cand --no-vcs-ignore 'Don''t load gitignores'
cand --no-project-ignore 'Don''t load project-local ignores' cand --no-project-ignore 'Don''t load project-local ignores'
@ -69,8 +66,9 @@ set edit:completion:arg-completer[watchexec] = {|@words|
cand --ignore-nothing 'Don''t ignore anything at all' cand --ignore-nothing 'Don''t ignore anything at all'
cand -p 'Wait until first change before running command' cand -p 'Wait until first change before running command'
cand --postpone 'Wait until first change before running command' cand --postpone 'Wait until first change before running command'
cand -n 'Shorthand for ''--shell=none''' cand -n 'Don''t use a shell'
cand --no-environment 'Deprecated shorthand for ''--emit-events=none''' cand --no-shell-long 'Don''t use a shell'
cand --no-environment 'Shorthand for ''--emit-events=none'''
cand --only-emit-events 'Only emit events to stdout, run no commands' cand --only-emit-events 'Only emit events to stdout, run no commands'
cand --no-process-group 'Don''t use a process group' cand --no-process-group 'Don''t use a process group'
cand -1 'Testing only: exit Watchexec after the first run' cand -1 'Testing only: exit Watchexec after the first run'
@ -82,9 +80,9 @@ set edit:completion:arg-completer[watchexec] = {|@words|
cand --bell 'Ring the terminal bell on command completion' cand --bell 'Ring the terminal bell on command completion'
cand --no-meta 'Don''t emit fs events for metadata changes' cand --no-meta 'Don''t emit fs events for metadata changes'
cand --print-events 'Print events that trigger actions' cand --print-events 'Print events that trigger actions'
cand --manual 'Show the manual page'
cand -v 'Set diagnostic log level' cand -v 'Set diagnostic log level'
cand --verbose 'Set diagnostic log level' cand --verbose 'Set diagnostic log level'
cand --manual 'Show the manual page'
cand -h 'Print help (see more with ''--help'')' cand -h 'Print help (see more with ''--help'')'
cand --help 'Print help (see more with ''--help'')' cand --help 'Print help (see more with ''--help'')'
cand -V 'Print version' cand -V 'Print version'

View file

@ -1,8 +1,6 @@
complete -c watchexec -s w -l watch -d 'Watch a specific file or directory' -r -F complete -c watchexec -s w -l watch -d 'Watch a specific file or directory' -r -F
complete -c watchexec -s W -l watch-non-recursive -d 'Watch a specific directory, non-recursively' -r -F complete -c watchexec -s c -l clear -d 'Clear screen before running command' -r -f -a "{clear '',reset ''}"
complete -c watchexec -s F -l watch-file -d 'Watch files and directories from a file' -r -F complete -c watchexec -s o -l on-busy-update -d 'What to do when receiving events while the command is running' -r -f -a "{queue '',do-nothing '',restart '',signal ''}"
complete -c watchexec -s c -l clear -d 'Clear screen before running command' -r -f -a "{clear\t'',reset\t''}"
complete -c watchexec -s o -l on-busy-update -d 'What to do when receiving events while the command is running' -r -f -a "{queue\t'',do-nothing\t'',restart\t'',signal\t''}"
complete -c watchexec -s s -l signal -d 'Send a signal to the process when it\'s still running' -r complete -c watchexec -s s -l signal -d 'Send a signal to the process when it\'s still running' -r
complete -c watchexec -l stop-signal -d 'Signal to send to stop the command' -r complete -c watchexec -l stop-signal -d 'Signal to send to stop the command' -r
complete -c watchexec -l stop-timeout -d 'Time to wait for the command to exit gracefully' -r complete -c watchexec -l stop-timeout -d 'Time to wait for the command to exit gracefully' -r
@ -11,22 +9,22 @@ complete -c watchexec -s d -l debounce -d 'Time to wait for new events before ta
complete -c watchexec -l delay-run -d 'Sleep before running the command' -r complete -c watchexec -l delay-run -d 'Sleep before running the command' -r
complete -c watchexec -l poll -d 'Poll for filesystem changes' -r complete -c watchexec -l poll -d 'Poll for filesystem changes' -r
complete -c watchexec -l shell -d 'Use a different shell' -r complete -c watchexec -l shell -d 'Use a different shell' -r
complete -c watchexec -l emit-events-to -d 'Configure event emission' -r -f -a "{environment\t'',stdio\t'',file\t'',json-stdio\t'',json-file\t'',none\t''}" complete -c watchexec -l emit-events-to -d 'Configure event emission' -r -f -a "{environment '',stdio '',file '',json-stdio '',json-file '',none ''}"
complete -c watchexec -s E -l env -d 'Add env vars to the command' -r complete -c watchexec -s E -l env -d 'Add env vars to the command' -r
complete -c watchexec -l wrap-process -d 'Configure how the process is wrapped' -r -f -a "{group\t'',session\t'',none\t''}" complete -c watchexec -l color -d 'When to use terminal colours' -r -f -a "{auto '',always '',never ''}"
complete -c watchexec -l color -d 'When to use terminal colours' -r -f -a "{auto\t'',always\t'',never\t''}"
complete -c watchexec -l project-origin -d 'Set the project origin' -r -f -a "(__fish_complete_directories)" complete -c watchexec -l project-origin -d 'Set the project origin' -r -f -a "(__fish_complete_directories)"
complete -c watchexec -l workdir -d 'Set the working directory' -r -f -a "(__fish_complete_directories)" complete -c watchexec -l workdir -d 'Set the working directory' -r -f -a "(__fish_complete_directories)"
complete -c watchexec -s e -l exts -d 'Filename extensions to filter to' -r complete -c watchexec -s e -l exts -d 'Filename extensions to filter to' -r
complete -c watchexec -s f -l filter -d 'Filename patterns to filter to' -r complete -c watchexec -s f -l filter -d 'Filename patterns to filter to' -r
complete -c watchexec -l filter-file -d 'Files to load filters from' -r -F complete -c watchexec -l filter-file -d 'Files to load filters from' -r -F
complete -c watchexec -s j -l filter-prog -d '[experimental] Filter programs' -r
complete -c watchexec -s i -l ignore -d 'Filename patterns to filter out' -r complete -c watchexec -s i -l ignore -d 'Filename patterns to filter out' -r
complete -c watchexec -l ignore-file -d 'Files to load ignores from' -r -F complete -c watchexec -l ignore-file -d 'Files to load ignores from' -r -F
complete -c watchexec -l fs-events -d 'Filesystem events to filter to' -r -f -a "{access\t'',create\t'',remove\t'',rename\t'',modify\t'',metadata\t''}" complete -c watchexec -l fs-events -d 'Filesystem events to filter to' -r -f -a "{access '',create '',remove '',rename '',modify '',metadata ''}"
complete -c watchexec -l completions -d 'Generate a shell completions script' -r -f -a "{bash\t'',elvish\t'',fish\t'',nu\t'',powershell\t'',zsh\t''}"
complete -c watchexec -l log-file -d 'Write diagnostic logs to a file' -r -F complete -c watchexec -l log-file -d 'Write diagnostic logs to a file' -r -F
complete -c watchexec -l completions -d 'Generate a shell completions script' -r -f -a "{bash '',elvish '',fish '',nu '',powershell '',zsh ''}"
complete -c watchexec -s W -l watch-when-idle -d 'Deprecated alias for \'--on-busy-update=do-nothing\''
complete -c watchexec -s r -l restart -d 'Restart the process if it\'s still running' complete -c watchexec -s r -l restart -d 'Restart the process if it\'s still running'
complete -c watchexec -s k -l kill -d 'Hidden legacy shorthand for \'--signal=kill\''
complete -c watchexec -l stdin-quit -d 'Exit when stdin closes' complete -c watchexec -l stdin-quit -d 'Exit when stdin closes'
complete -c watchexec -l no-vcs-ignore -d 'Don\'t load gitignores' complete -c watchexec -l no-vcs-ignore -d 'Don\'t load gitignores'
complete -c watchexec -l no-project-ignore -d 'Don\'t load project-local ignores' complete -c watchexec -l no-project-ignore -d 'Don\'t load project-local ignores'
@ -35,8 +33,9 @@ complete -c watchexec -l no-default-ignore -d 'Don\'t use internal default ignor
complete -c watchexec -l no-discover-ignore -d 'Don\'t discover ignore files at all' complete -c watchexec -l no-discover-ignore -d 'Don\'t discover ignore files at all'
complete -c watchexec -l ignore-nothing -d 'Don\'t ignore anything at all' complete -c watchexec -l ignore-nothing -d 'Don\'t ignore anything at all'
complete -c watchexec -s p -l postpone -d 'Wait until first change before running command' complete -c watchexec -s p -l postpone -d 'Wait until first change before running command'
complete -c watchexec -s n -d 'Shorthand for \'--shell=none\'' complete -c watchexec -s n -d 'Don\'t use a shell'
complete -c watchexec -l no-environment -d 'Deprecated shorthand for \'--emit-events=none\'' complete -c watchexec -l no-shell-long -d 'Don\'t use a shell'
complete -c watchexec -l no-environment -d 'Shorthand for \'--emit-events=none\''
complete -c watchexec -l only-emit-events -d 'Only emit events to stdout, run no commands' complete -c watchexec -l only-emit-events -d 'Only emit events to stdout, run no commands'
complete -c watchexec -l no-process-group -d 'Don\'t use a process group' complete -c watchexec -l no-process-group -d 'Don\'t use a process group'
complete -c watchexec -s 1 -d 'Testing only: exit Watchexec after the first run' complete -c watchexec -s 1 -d 'Testing only: exit Watchexec after the first run'
@ -46,7 +45,7 @@ complete -c watchexec -s q -l quiet -d 'Don\'t print starting and stopping messa
complete -c watchexec -l bell -d 'Ring the terminal bell on command completion' complete -c watchexec -l bell -d 'Ring the terminal bell on command completion'
complete -c watchexec -l no-meta -d 'Don\'t emit fs events for metadata changes' complete -c watchexec -l no-meta -d 'Don\'t emit fs events for metadata changes'
complete -c watchexec -l print-events -d 'Print events that trigger actions' complete -c watchexec -l print-events -d 'Print events that trigger actions'
complete -c watchexec -l manual -d 'Show the manual page'
complete -c watchexec -s v -l verbose -d 'Set diagnostic log level' complete -c watchexec -s v -l verbose -d 'Set diagnostic log level'
complete -c watchexec -l manual -d 'Show the manual page'
complete -c watchexec -s h -l help -d 'Print help (see more with \'--help\')' complete -c watchexec -s h -l help -d 'Print help (see more with \'--help\')'
complete -c watchexec -s V -l version -d 'Print version' complete -c watchexec -s V -l version -d 'Print version'

View file

@ -12,10 +12,6 @@ module completions {
[ "environment" "stdio" "file" "json-stdio" "json-file" "none" ] [ "environment" "stdio" "file" "json-stdio" "json-file" "none" ]
} }
def "nu-complete watchexec wrap_process" [] {
[ "group" "session" "none" ]
}
def "nu-complete watchexec color" [] { def "nu-complete watchexec color" [] {
[ "auto" "always" "never" ] [ "auto" "always" "never" ]
} }
@ -32,12 +28,12 @@ module completions {
export extern watchexec [ export extern watchexec [
...command: string # Command to run on changes ...command: string # Command to run on changes
--watch(-w): string # Watch a specific file or directory --watch(-w): string # Watch a specific file or directory
--watch-non-recursive(-W): string # Watch a specific directory, non-recursively
--watch-file(-F): string # Watch files and directories from a file
--clear(-c): string@"nu-complete watchexec screen_clear" # Clear screen before running command --clear(-c): string@"nu-complete watchexec screen_clear" # Clear screen before running command
--on-busy-update(-o): string@"nu-complete watchexec on_busy_update" # What to do when receiving events while the command is running --on-busy-update(-o): string@"nu-complete watchexec on_busy_update" # What to do when receiving events while the command is running
--watch-when-idle(-W) # Deprecated alias for '--on-busy-update=do-nothing'
--restart(-r) # Restart the process if it's still running --restart(-r) # Restart the process if it's still running
--signal(-s): string # Send a signal to the process when it's still running --signal(-s): string # Send a signal to the process when it's still running
--kill(-k) # Hidden legacy shorthand for '--signal=kill'
--stop-signal: string # Signal to send to stop the command --stop-signal: string # Signal to send to stop the command
--stop-timeout: string # Time to wait for the command to exit gracefully --stop-timeout: string # Time to wait for the command to exit gracefully
--map-signal: string # Translate signals from the OS to signals to send to the command --map-signal: string # Translate signals from the OS to signals to send to the command
@ -53,13 +49,13 @@ module completions {
--delay-run: string # Sleep before running the command --delay-run: string # Sleep before running the command
--poll: string # Poll for filesystem changes --poll: string # Poll for filesystem changes
--shell: string # Use a different shell --shell: string # Use a different shell
-n # Shorthand for '--shell=none' -n # Don't use a shell
--no-environment # Deprecated shorthand for '--emit-events=none' --no-shell-long # Don't use a shell
--no-environment # Shorthand for '--emit-events=none'
--emit-events-to: string@"nu-complete watchexec emit_events_to" # Configure event emission --emit-events-to: string@"nu-complete watchexec emit_events_to" # Configure event emission
--only-emit-events # Only emit events to stdout, run no commands --only-emit-events # Only emit events to stdout, run no commands
--env(-E): string # Add env vars to the command --env(-E): string # Add env vars to the command
--no-process-group # Don't use a process group --no-process-group # Don't use a process group
--wrap-process: string@"nu-complete watchexec wrap_process" # Configure how the process is wrapped
-1 # Testing only: exit Watchexec after the first run -1 # Testing only: exit Watchexec after the first run
--notify(-N) # Alert when commands start and end --notify(-N) # Alert when commands start and end
--color: string@"nu-complete watchexec color" # When to use terminal colours --color: string@"nu-complete watchexec color" # When to use terminal colours
@ -71,16 +67,15 @@ module completions {
--exts(-e): string # Filename extensions to filter to --exts(-e): string # Filename extensions to filter to
--filter(-f): string # Filename patterns to filter to --filter(-f): string # Filename patterns to filter to
--filter-file: string # Files to load filters from --filter-file: string # Files to load filters from
--filter-prog(-j): string # [experimental] Filter programs
--ignore(-i): string # Filename patterns to filter out --ignore(-i): string # Filename patterns to filter out
--ignore-file: string # Files to load ignores from --ignore-file: string # Files to load ignores from
--fs-events: string@"nu-complete watchexec filter_fs_events" # Filesystem events to filter to --fs-events: string@"nu-complete watchexec filter_fs_events" # Filesystem events to filter to
--no-meta # Don't emit fs events for metadata changes --no-meta # Don't emit fs events for metadata changes
--print-events # Print events that trigger actions --print-events # Print events that trigger actions
--manual # Show the manual page
--completions: string@"nu-complete watchexec completions" # Generate a shell completions script
--verbose(-v) # Set diagnostic log level --verbose(-v) # Set diagnostic log level
--log-file: string # Write diagnostic logs to a file --log-file: string # Write diagnostic logs to a file
--manual # Show the manual page
--completions: string@"nu-complete watchexec completions" # Generate a shell completions script
--help(-h) # Print help (see more with '--help') --help(-h) # Print help (see more with '--help')
--version(-V) # Print version --version(-V) # Print version
] ]

View file

@ -21,77 +21,75 @@ Register-ArgumentCompleter -Native -CommandName 'watchexec' -ScriptBlock {
$completions = @(switch ($command) { $completions = @(switch ($command) {
'watchexec' { 'watchexec' {
[CompletionResult]::new('-w', '-w', [CompletionResultType]::ParameterName, 'Watch a specific file or directory') [CompletionResult]::new('-w', 'w', [CompletionResultType]::ParameterName, 'Watch a specific file or directory')
[CompletionResult]::new('--watch', '--watch', [CompletionResultType]::ParameterName, 'Watch a specific file or directory') [CompletionResult]::new('--watch', 'watch', [CompletionResultType]::ParameterName, 'Watch a specific file or directory')
[CompletionResult]::new('-W', '-W ', [CompletionResultType]::ParameterName, 'Watch a specific directory, non-recursively') [CompletionResult]::new('-c', 'c', [CompletionResultType]::ParameterName, 'Clear screen before running command')
[CompletionResult]::new('--watch-non-recursive', '--watch-non-recursive', [CompletionResultType]::ParameterName, 'Watch a specific directory, non-recursively') [CompletionResult]::new('--clear', 'clear', [CompletionResultType]::ParameterName, 'Clear screen before running command')
[CompletionResult]::new('-F', '-F ', [CompletionResultType]::ParameterName, 'Watch files and directories from a file') [CompletionResult]::new('-o', 'o', [CompletionResultType]::ParameterName, 'What to do when receiving events while the command is running')
[CompletionResult]::new('--watch-file', '--watch-file', [CompletionResultType]::ParameterName, 'Watch files and directories from a file') [CompletionResult]::new('--on-busy-update', 'on-busy-update', [CompletionResultType]::ParameterName, 'What to do when receiving events while the command is running')
[CompletionResult]::new('-c', '-c', [CompletionResultType]::ParameterName, 'Clear screen before running command') [CompletionResult]::new('-s', 's', [CompletionResultType]::ParameterName, 'Send a signal to the process when it''s still running')
[CompletionResult]::new('--clear', '--clear', [CompletionResultType]::ParameterName, 'Clear screen before running command') [CompletionResult]::new('--signal', 'signal', [CompletionResultType]::ParameterName, 'Send a signal to the process when it''s still running')
[CompletionResult]::new('-o', '-o', [CompletionResultType]::ParameterName, 'What to do when receiving events while the command is running') [CompletionResult]::new('--stop-signal', 'stop-signal', [CompletionResultType]::ParameterName, 'Signal to send to stop the command')
[CompletionResult]::new('--on-busy-update', '--on-busy-update', [CompletionResultType]::ParameterName, 'What to do when receiving events while the command is running') [CompletionResult]::new('--stop-timeout', 'stop-timeout', [CompletionResultType]::ParameterName, 'Time to wait for the command to exit gracefully')
[CompletionResult]::new('-s', '-s', [CompletionResultType]::ParameterName, 'Send a signal to the process when it''s still running') [CompletionResult]::new('--map-signal', 'map-signal', [CompletionResultType]::ParameterName, 'Translate signals from the OS to signals to send to the command')
[CompletionResult]::new('--signal', '--signal', [CompletionResultType]::ParameterName, 'Send a signal to the process when it''s still running') [CompletionResult]::new('-d', 'd', [CompletionResultType]::ParameterName, 'Time to wait for new events before taking action')
[CompletionResult]::new('--stop-signal', '--stop-signal', [CompletionResultType]::ParameterName, 'Signal to send to stop the command') [CompletionResult]::new('--debounce', 'debounce', [CompletionResultType]::ParameterName, 'Time to wait for new events before taking action')
[CompletionResult]::new('--stop-timeout', '--stop-timeout', [CompletionResultType]::ParameterName, 'Time to wait for the command to exit gracefully') [CompletionResult]::new('--delay-run', 'delay-run', [CompletionResultType]::ParameterName, 'Sleep before running the command')
[CompletionResult]::new('--map-signal', '--map-signal', [CompletionResultType]::ParameterName, 'Translate signals from the OS to signals to send to the command') [CompletionResult]::new('--poll', 'poll', [CompletionResultType]::ParameterName, 'Poll for filesystem changes')
[CompletionResult]::new('-d', '-d', [CompletionResultType]::ParameterName, 'Time to wait for new events before taking action') [CompletionResult]::new('--shell', 'shell', [CompletionResultType]::ParameterName, 'Use a different shell')
[CompletionResult]::new('--debounce', '--debounce', [CompletionResultType]::ParameterName, 'Time to wait for new events before taking action') [CompletionResult]::new('--emit-events-to', 'emit-events-to', [CompletionResultType]::ParameterName, 'Configure event emission')
[CompletionResult]::new('--delay-run', '--delay-run', [CompletionResultType]::ParameterName, 'Sleep before running the command') [CompletionResult]::new('-E', 'E ', [CompletionResultType]::ParameterName, 'Add env vars to the command')
[CompletionResult]::new('--poll', '--poll', [CompletionResultType]::ParameterName, 'Poll for filesystem changes') [CompletionResult]::new('--env', 'env', [CompletionResultType]::ParameterName, 'Add env vars to the command')
[CompletionResult]::new('--shell', '--shell', [CompletionResultType]::ParameterName, 'Use a different shell') [CompletionResult]::new('--color', 'color', [CompletionResultType]::ParameterName, 'When to use terminal colours')
[CompletionResult]::new('--emit-events-to', '--emit-events-to', [CompletionResultType]::ParameterName, 'Configure event emission') [CompletionResult]::new('--project-origin', 'project-origin', [CompletionResultType]::ParameterName, 'Set the project origin')
[CompletionResult]::new('-E', '-E ', [CompletionResultType]::ParameterName, 'Add env vars to the command') [CompletionResult]::new('--workdir', 'workdir', [CompletionResultType]::ParameterName, 'Set the working directory')
[CompletionResult]::new('--env', '--env', [CompletionResultType]::ParameterName, 'Add env vars to the command') [CompletionResult]::new('-e', 'e', [CompletionResultType]::ParameterName, 'Filename extensions to filter to')
[CompletionResult]::new('--wrap-process', '--wrap-process', [CompletionResultType]::ParameterName, 'Configure how the process is wrapped') [CompletionResult]::new('--exts', 'exts', [CompletionResultType]::ParameterName, 'Filename extensions to filter to')
[CompletionResult]::new('--color', '--color', [CompletionResultType]::ParameterName, 'When to use terminal colours') [CompletionResult]::new('-f', 'f', [CompletionResultType]::ParameterName, 'Filename patterns to filter to')
[CompletionResult]::new('--project-origin', '--project-origin', [CompletionResultType]::ParameterName, 'Set the project origin') [CompletionResult]::new('--filter', 'filter', [CompletionResultType]::ParameterName, 'Filename patterns to filter to')
[CompletionResult]::new('--workdir', '--workdir', [CompletionResultType]::ParameterName, 'Set the working directory') [CompletionResult]::new('--filter-file', 'filter-file', [CompletionResultType]::ParameterName, 'Files to load filters from')
[CompletionResult]::new('-e', '-e', [CompletionResultType]::ParameterName, 'Filename extensions to filter to') [CompletionResult]::new('-i', 'i', [CompletionResultType]::ParameterName, 'Filename patterns to filter out')
[CompletionResult]::new('--exts', '--exts', [CompletionResultType]::ParameterName, 'Filename extensions to filter to') [CompletionResult]::new('--ignore', 'ignore', [CompletionResultType]::ParameterName, 'Filename patterns to filter out')
[CompletionResult]::new('-f', '-f', [CompletionResultType]::ParameterName, 'Filename patterns to filter to') [CompletionResult]::new('--ignore-file', 'ignore-file', [CompletionResultType]::ParameterName, 'Files to load ignores from')
[CompletionResult]::new('--filter', '--filter', [CompletionResultType]::ParameterName, 'Filename patterns to filter to') [CompletionResult]::new('--fs-events', 'fs-events', [CompletionResultType]::ParameterName, 'Filesystem events to filter to')
[CompletionResult]::new('--filter-file', '--filter-file', [CompletionResultType]::ParameterName, 'Files to load filters from') [CompletionResult]::new('--log-file', 'log-file', [CompletionResultType]::ParameterName, 'Write diagnostic logs to a file')
[CompletionResult]::new('-j', '-j', [CompletionResultType]::ParameterName, '[experimental] Filter programs') [CompletionResult]::new('--completions', 'completions', [CompletionResultType]::ParameterName, 'Generate a shell completions script')
[CompletionResult]::new('--filter-prog', '--filter-prog', [CompletionResultType]::ParameterName, '[experimental] Filter programs') [CompletionResult]::new('-W', 'W ', [CompletionResultType]::ParameterName, 'Deprecated alias for ''--on-busy-update=do-nothing''')
[CompletionResult]::new('-i', '-i', [CompletionResultType]::ParameterName, 'Filename patterns to filter out') [CompletionResult]::new('--watch-when-idle', 'watch-when-idle', [CompletionResultType]::ParameterName, 'Deprecated alias for ''--on-busy-update=do-nothing''')
[CompletionResult]::new('--ignore', '--ignore', [CompletionResultType]::ParameterName, 'Filename patterns to filter out') [CompletionResult]::new('-r', 'r', [CompletionResultType]::ParameterName, 'Restart the process if it''s still running')
[CompletionResult]::new('--ignore-file', '--ignore-file', [CompletionResultType]::ParameterName, 'Files to load ignores from') [CompletionResult]::new('--restart', 'restart', [CompletionResultType]::ParameterName, 'Restart the process if it''s still running')
[CompletionResult]::new('--fs-events', '--fs-events', [CompletionResultType]::ParameterName, 'Filesystem events to filter to') [CompletionResult]::new('-k', 'k', [CompletionResultType]::ParameterName, 'Hidden legacy shorthand for ''--signal=kill''')
[CompletionResult]::new('--completions', '--completions', [CompletionResultType]::ParameterName, 'Generate a shell completions script') [CompletionResult]::new('--kill', 'kill', [CompletionResultType]::ParameterName, 'Hidden legacy shorthand for ''--signal=kill''')
[CompletionResult]::new('--log-file', '--log-file', [CompletionResultType]::ParameterName, 'Write diagnostic logs to a file') [CompletionResult]::new('--stdin-quit', 'stdin-quit', [CompletionResultType]::ParameterName, 'Exit when stdin closes')
[CompletionResult]::new('-r', '-r', [CompletionResultType]::ParameterName, 'Restart the process if it''s still running') [CompletionResult]::new('--no-vcs-ignore', 'no-vcs-ignore', [CompletionResultType]::ParameterName, 'Don''t load gitignores')
[CompletionResult]::new('--restart', '--restart', [CompletionResultType]::ParameterName, 'Restart the process if it''s still running') [CompletionResult]::new('--no-project-ignore', 'no-project-ignore', [CompletionResultType]::ParameterName, 'Don''t load project-local ignores')
[CompletionResult]::new('--stdin-quit', '--stdin-quit', [CompletionResultType]::ParameterName, 'Exit when stdin closes') [CompletionResult]::new('--no-global-ignore', 'no-global-ignore', [CompletionResultType]::ParameterName, 'Don''t load global ignores')
[CompletionResult]::new('--no-vcs-ignore', '--no-vcs-ignore', [CompletionResultType]::ParameterName, 'Don''t load gitignores') [CompletionResult]::new('--no-default-ignore', 'no-default-ignore', [CompletionResultType]::ParameterName, 'Don''t use internal default ignores')
[CompletionResult]::new('--no-project-ignore', '--no-project-ignore', [CompletionResultType]::ParameterName, 'Don''t load project-local ignores') [CompletionResult]::new('--no-discover-ignore', 'no-discover-ignore', [CompletionResultType]::ParameterName, 'Don''t discover ignore files at all')
[CompletionResult]::new('--no-global-ignore', '--no-global-ignore', [CompletionResultType]::ParameterName, 'Don''t load global ignores') [CompletionResult]::new('--ignore-nothing', 'ignore-nothing', [CompletionResultType]::ParameterName, 'Don''t ignore anything at all')
[CompletionResult]::new('--no-default-ignore', '--no-default-ignore', [CompletionResultType]::ParameterName, 'Don''t use internal default ignores') [CompletionResult]::new('-p', 'p', [CompletionResultType]::ParameterName, 'Wait until first change before running command')
[CompletionResult]::new('--no-discover-ignore', '--no-discover-ignore', [CompletionResultType]::ParameterName, 'Don''t discover ignore files at all') [CompletionResult]::new('--postpone', 'postpone', [CompletionResultType]::ParameterName, 'Wait until first change before running command')
[CompletionResult]::new('--ignore-nothing', '--ignore-nothing', [CompletionResultType]::ParameterName, 'Don''t ignore anything at all') [CompletionResult]::new('-n', 'n', [CompletionResultType]::ParameterName, 'Don''t use a shell')
[CompletionResult]::new('-p', '-p', [CompletionResultType]::ParameterName, 'Wait until first change before running command') [CompletionResult]::new('--no-shell-long', 'no-shell-long', [CompletionResultType]::ParameterName, 'Don''t use a shell')
[CompletionResult]::new('--postpone', '--postpone', [CompletionResultType]::ParameterName, 'Wait until first change before running command') [CompletionResult]::new('--no-environment', 'no-environment', [CompletionResultType]::ParameterName, 'Shorthand for ''--emit-events=none''')
[CompletionResult]::new('-n', '-n', [CompletionResultType]::ParameterName, 'Shorthand for ''--shell=none''') [CompletionResult]::new('--only-emit-events', 'only-emit-events', [CompletionResultType]::ParameterName, 'Only emit events to stdout, run no commands')
[CompletionResult]::new('--no-environment', '--no-environment', [CompletionResultType]::ParameterName, 'Deprecated shorthand for ''--emit-events=none''') [CompletionResult]::new('--no-process-group', 'no-process-group', [CompletionResultType]::ParameterName, 'Don''t use a process group')
[CompletionResult]::new('--only-emit-events', '--only-emit-events', [CompletionResultType]::ParameterName, 'Only emit events to stdout, run no commands') [CompletionResult]::new('-1', '1', [CompletionResultType]::ParameterName, 'Testing only: exit Watchexec after the first run')
[CompletionResult]::new('--no-process-group', '--no-process-group', [CompletionResultType]::ParameterName, 'Don''t use a process group') [CompletionResult]::new('-N', 'N ', [CompletionResultType]::ParameterName, 'Alert when commands start and end')
[CompletionResult]::new('-1', '-1', [CompletionResultType]::ParameterName, 'Testing only: exit Watchexec after the first run') [CompletionResult]::new('--notify', 'notify', [CompletionResultType]::ParameterName, 'Alert when commands start and end')
[CompletionResult]::new('-N', '-N ', [CompletionResultType]::ParameterName, 'Alert when commands start and end') [CompletionResult]::new('--timings', 'timings', [CompletionResultType]::ParameterName, 'Print how long the command took to run')
[CompletionResult]::new('--notify', '--notify', [CompletionResultType]::ParameterName, 'Alert when commands start and end') [CompletionResult]::new('-q', 'q', [CompletionResultType]::ParameterName, 'Don''t print starting and stopping messages')
[CompletionResult]::new('--timings', '--timings', [CompletionResultType]::ParameterName, 'Print how long the command took to run') [CompletionResult]::new('--quiet', 'quiet', [CompletionResultType]::ParameterName, 'Don''t print starting and stopping messages')
[CompletionResult]::new('-q', '-q', [CompletionResultType]::ParameterName, 'Don''t print starting and stopping messages') [CompletionResult]::new('--bell', 'bell', [CompletionResultType]::ParameterName, 'Ring the terminal bell on command completion')
[CompletionResult]::new('--quiet', '--quiet', [CompletionResultType]::ParameterName, 'Don''t print starting and stopping messages') [CompletionResult]::new('--no-meta', 'no-meta', [CompletionResultType]::ParameterName, 'Don''t emit fs events for metadata changes')
[CompletionResult]::new('--bell', '--bell', [CompletionResultType]::ParameterName, 'Ring the terminal bell on command completion') [CompletionResult]::new('--print-events', 'print-events', [CompletionResultType]::ParameterName, 'Print events that trigger actions')
[CompletionResult]::new('--no-meta', '--no-meta', [CompletionResultType]::ParameterName, 'Don''t emit fs events for metadata changes') [CompletionResult]::new('-v', 'v', [CompletionResultType]::ParameterName, 'Set diagnostic log level')
[CompletionResult]::new('--print-events', '--print-events', [CompletionResultType]::ParameterName, 'Print events that trigger actions') [CompletionResult]::new('--verbose', 'verbose', [CompletionResultType]::ParameterName, 'Set diagnostic log level')
[CompletionResult]::new('--manual', '--manual', [CompletionResultType]::ParameterName, 'Show the manual page') [CompletionResult]::new('--manual', 'manual', [CompletionResultType]::ParameterName, 'Show the manual page')
[CompletionResult]::new('-v', '-v', [CompletionResultType]::ParameterName, 'Set diagnostic log level') [CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')')
[CompletionResult]::new('--verbose', '--verbose', [CompletionResultType]::ParameterName, 'Set diagnostic log level') [CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')')
[CompletionResult]::new('-h', '-h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') [CompletionResult]::new('-V', 'V ', [CompletionResultType]::ParameterName, 'Print version')
[CompletionResult]::new('--help', '--help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')') [CompletionResult]::new('--version', 'version', [CompletionResultType]::ParameterName, 'Print version')
[CompletionResult]::new('-V', '-V ', [CompletionResultType]::ParameterName, 'Print version')
[CompletionResult]::new('--version', '--version', [CompletionResultType]::ParameterName, 'Print version')
break break
} }
}) })

View file

@ -14,19 +14,15 @@ _watchexec() {
fi fi
local context curcontext="$curcontext" state line local context curcontext="$curcontext" state line
_arguments "${_arguments_options[@]}" : \ _arguments "${_arguments_options[@]}" \
'*-w+[Watch a specific file or directory]:PATH:_files' \ '*-w+[Watch a specific file or directory]:PATH:_files' \
'*--watch=[Watch a specific file or directory]:PATH:_files' \ '*--watch=[Watch a specific file or directory]:PATH:_files' \
'*-W+[Watch a specific directory, non-recursively]:PATH:_files' \
'*--watch-non-recursive=[Watch a specific directory, non-recursively]:PATH:_files' \
'-F+[Watch files and directories from a file]:PATH:_files' \
'--watch-file=[Watch files and directories from a file]:PATH:_files' \
'-c+[Clear screen before running command]' \ '-c+[Clear screen before running command]' \
'--clear=[Clear screen before running command]' \ '--clear=[Clear screen before running command]' \
'-o+[What to do when receiving events while the command is running]:MODE:(queue do-nothing restart signal)' \ '-o+[What to do when receiving events while the command is running]:MODE:(queue do-nothing restart signal)' \
'--on-busy-update=[What to do when receiving events while the command is running]:MODE:(queue do-nothing restart signal)' \ '--on-busy-update=[What to do when receiving events while the command is running]:MODE:(queue do-nothing restart signal)' \
'(-r --restart)-s+[Send a signal to the process when it'\''s still running]:SIGNAL: ' \ '(-r --restart -W --watch-when-idle)-s+[Send a signal to the process when it'\''s still running]:SIGNAL: ' \
'(-r --restart)--signal=[Send a signal to the process when it'\''s still running]:SIGNAL: ' \ '(-r --restart -W --watch-when-idle)--signal=[Send a signal to the process when it'\''s still running]:SIGNAL: ' \
'--stop-signal=[Signal to send to stop the command]:SIGNAL: ' \ '--stop-signal=[Signal to send to stop the command]:SIGNAL: ' \
'--stop-timeout=[Time to wait for the command to exit gracefully]:TIMEOUT: ' \ '--stop-timeout=[Time to wait for the command to exit gracefully]:TIMEOUT: ' \
'*--map-signal=[Translate signals from the OS to signals to send to the command]:SIGNAL:SIGNAL: ' \ '*--map-signal=[Translate signals from the OS to signals to send to the command]:SIGNAL:SIGNAL: ' \
@ -38,7 +34,6 @@ _watchexec() {
'--emit-events-to=[Configure event emission]:MODE:(environment stdio file json-stdio json-file none)' \ '--emit-events-to=[Configure event emission]:MODE:(environment stdio file json-stdio json-file none)' \
'*-E+[Add env vars to the command]:KEY=VALUE: ' \ '*-E+[Add env vars to the command]:KEY=VALUE: ' \
'*--env=[Add env vars to the command]:KEY=VALUE: ' \ '*--env=[Add env vars to the command]:KEY=VALUE: ' \
'--wrap-process=[Configure how the process is wrapped]:MODE:(group session none)' \
'--color=[When to use terminal colours]:MODE:(auto always never)' \ '--color=[When to use terminal colours]:MODE:(auto always never)' \
'--project-origin=[Set the project origin]:DIRECTORY:_files -/' \ '--project-origin=[Set the project origin]:DIRECTORY:_files -/' \
'--workdir=[Set the working directory]:DIRECTORY:_files -/' \ '--workdir=[Set the working directory]:DIRECTORY:_files -/' \
@ -47,16 +42,18 @@ _watchexec() {
'*-f+[Filename patterns to filter to]:PATTERN: ' \ '*-f+[Filename patterns to filter to]:PATTERN: ' \
'*--filter=[Filename patterns to filter to]:PATTERN: ' \ '*--filter=[Filename patterns to filter to]:PATTERN: ' \
'*--filter-file=[Files to load filters from]:PATH:_files' \ '*--filter-file=[Files to load filters from]:PATH:_files' \
'*-j+[\[experimental\] Filter programs]:EXPRESSION: ' \
'*--filter-prog=[\[experimental\] Filter programs]:EXPRESSION: ' \
'*-i+[Filename patterns to filter out]:PATTERN: ' \ '*-i+[Filename patterns to filter out]:PATTERN: ' \
'*--ignore=[Filename patterns to filter out]:PATTERN: ' \ '*--ignore=[Filename patterns to filter out]:PATTERN: ' \
'*--ignore-file=[Files to load ignores from]:PATH:_files' \ '*--ignore-file=[Files to load ignores from]:PATH:_files' \
'*--fs-events=[Filesystem events to filter to]:EVENTS:(access create remove rename modify metadata)' \ '*--fs-events=[Filesystem events to filter to]:EVENTS:(access create remove rename modify metadata)' \
'(--manual)--completions=[Generate a shell completions script]:COMPLETIONS:(bash elvish fish nu powershell zsh)' \
'--log-file=[Write diagnostic logs to a file]' \ '--log-file=[Write diagnostic logs to a file]' \
'(-o --on-busy-update)-r[Restart the process if it'\''s still running]' \ '(--manual)--completions=[Generate a shell completions script]:COMPLETIONS:(bash elvish fish nu powershell zsh)' \
'(-o --on-busy-update)--restart[Restart the process if it'\''s still running]' \ '(-o --on-busy-update -r --restart)-W[Deprecated alias for '\''--on-busy-update=do-nothing'\'']' \
'(-o --on-busy-update -r --restart)--watch-when-idle[Deprecated alias for '\''--on-busy-update=do-nothing'\'']' \
'(-o --on-busy-update -W --watch-when-idle)-r[Restart the process if it'\''s still running]' \
'(-o --on-busy-update -W --watch-when-idle)--restart[Restart the process if it'\''s still running]' \
'-k[Hidden legacy shorthand for '\''--signal=kill'\'']' \
'--kill[Hidden legacy shorthand for '\''--signal=kill'\'']' \
'--stdin-quit[Exit when stdin closes]' \ '--stdin-quit[Exit when stdin closes]' \
'--no-vcs-ignore[Don'\''t load gitignores]' \ '--no-vcs-ignore[Don'\''t load gitignores]' \
'--no-project-ignore[Don'\''t load project-local ignores]' \ '--no-project-ignore[Don'\''t load project-local ignores]' \
@ -66,8 +63,9 @@ _watchexec() {
'--ignore-nothing[Don'\''t ignore anything at all]' \ '--ignore-nothing[Don'\''t ignore anything at all]' \
'-p[Wait until first change before running command]' \ '-p[Wait until first change before running command]' \
'--postpone[Wait until first change before running command]' \ '--postpone[Wait until first change before running command]' \
'-n[Shorthand for '\''--shell=none'\'']' \ '-n[Don'\''t use a shell]' \
'--no-environment[Deprecated shorthand for '\''--emit-events=none'\'']' \ '--no-shell-long[Don'\''t use a shell]' \
'--no-environment[Shorthand for '\''--emit-events=none'\'']' \
'(--completions --manual)--only-emit-events[Only emit events to stdout, run no commands]' \ '(--completions --manual)--only-emit-events[Only emit events to stdout, run no commands]' \
'--no-process-group[Don'\''t use a process group]' \ '--no-process-group[Don'\''t use a process group]' \
'-1[Testing only\: exit Watchexec after the first run]' \ '-1[Testing only\: exit Watchexec after the first run]' \
@ -79,9 +77,9 @@ _watchexec() {
'--bell[Ring the terminal bell on command completion]' \ '--bell[Ring the terminal bell on command completion]' \
'(--fs-events)--no-meta[Don'\''t emit fs events for metadata changes]' \ '(--fs-events)--no-meta[Don'\''t emit fs events for metadata changes]' \
'--print-events[Print events that trigger actions]' \ '--print-events[Print events that trigger actions]' \
'(--completions)--manual[Show the manual page]' \
'*-v[Set diagnostic log level]' \ '*-v[Set diagnostic log level]' \
'*--verbose[Set diagnostic log level]' \ '*--verbose[Set diagnostic log level]' \
'(--completions)--manual[Show the manual page]' \
'-h[Print help (see more with '\''--help'\'')]' \ '-h[Print help (see more with '\''--help'\'')]' \
'--help[Print help (see more with '\''--help'\'')]' \ '--help[Print help (see more with '\''--help'\'')]' \
'-V[Print version]' \ '-V[Print version]' \

View file

@ -2,18 +2,6 @@
## Next (YYYY-MM-DD) ## Next (YYYY-MM-DD)
## v1.1.1 (2024-10-14)
- Deps: gix 0.66
## v1.1.0 (2024-05-16)
- Add `git-describe` support (#832, by @lu-zero)
## v1.0.3 (2024-04-20)
- Deps: gix 0.62
## v1.0.2 (2023-11-26) ## v1.0.2 (2023-11-26)
- Deps: upgrade to gix 0.55 - Deps: upgrade to gix 0.55

View file

@ -1,6 +1,6 @@
[package] [package]
name = "bosion" name = "bosion"
version = "1.1.1" version = "1.0.2"
authors = ["Félix Saparelli <felix@passcod.name>"] authors = ["Félix Saparelli <felix@passcod.name>"]
license = "Apache-2.0 OR MIT" license = "Apache-2.0 OR MIT"
@ -19,10 +19,9 @@ version = "0.3.30"
features = ["macros", "formatting"] features = ["macros", "formatting"]
[dependencies.gix] [dependencies.gix]
version = "0.66.0" version = "0.55.2"
optional = true optional = true
default-features = false default-features = false
features = ["revision"]
[features] [features]
default = ["git", "reproducible", "std"] default = ["git", "reproducible", "std"]

View file

@ -15,7 +15,7 @@ In your `Cargo.toml`:
```toml ```toml
[build-dependencies] [build-dependencies]
bosion = "1.1.1" bosion = "1.0.2"
``` ```
In your `build.rs`: In your `build.rs`:

File diff suppressed because it is too large Load diff

View file

@ -13,9 +13,6 @@ struct Args {
#[clap(long)] #[clap(long)]
dates: bool, dates: bool,
#[clap(long)]
describe: bool,
} }
fn main() { fn main() {
@ -26,15 +23,17 @@ fn main() {
"{}", "{}",
Bosion::long_version_with(&[("extra", "field"), ("custom", "1.2.3"),]) Bosion::long_version_with(&[("extra", "field"), ("custom", "1.2.3"),])
); );
} else if args.features { } else
if args.features {
println!("Features: {}", Bosion::CRATE_FEATURE_STRING); println!("Features: {}", Bosion::CRATE_FEATURE_STRING);
} else if args.dates { } else
if args.dates {
println!("commit date: {}", Bosion::GIT_COMMIT_DATE); println!("commit date: {}", Bosion::GIT_COMMIT_DATE);
println!("commit datetime: {}", Bosion::GIT_COMMIT_DATETIME); println!("commit datetime: {}", Bosion::GIT_COMMIT_DATETIME);
println!("build date: {}", Bosion::BUILD_DATE); println!("build date: {}", Bosion::BUILD_DATE);
println!("build datetime: {}", Bosion::BUILD_DATETIME); println!("build datetime: {}", Bosion::BUILD_DATETIME);
} else if args.describe {
println!("commit description: {}", Bosion::GIT_COMMIT_DESCRIPTION);
} else { } else {
println!("{}", Bosion::LONG_VERSION); println!("{}", Bosion::LONG_VERSION);
} }

File diff suppressed because it is too large Load diff

View file

@ -16,5 +16,5 @@ path = "../.."
[dependencies] [dependencies]
leon = { version = "2.0.1", default-features = false } leon = { version = "2.0.1", default-features = false }
snapbox = "0.5.9" snapbox = "0.4.8"
time = { version = "0.3.30", features = ["formatting", "macros"] } time = { version = "0.3.30", features = ["formatting", "macros"] }

View file

@ -4,48 +4,47 @@ version = 3
[[package]] [[package]]
name = "anstream" name = "anstream"
version = "0.6.15" version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64e15c1ab1f89faffbf04a634d5e1962e9074f2741eef6d97f3c4e322426d526" checksum = "2ab91ebe16eb252986481c5b62f6098f3b698a45e34b5b98200cf20dd2484a44"
dependencies = [ dependencies = [
"anstyle", "anstyle",
"anstyle-parse", "anstyle-parse",
"anstyle-query", "anstyle-query",
"anstyle-wincon", "anstyle-wincon",
"colorchoice", "colorchoice",
"is_terminal_polyfill",
"utf8parse", "utf8parse",
] ]
[[package]] [[package]]
name = "anstyle" name = "anstyle"
version = "1.0.8" version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1" checksum = "7079075b41f533b8c61d2a4d073c4676e1f8b249ff94a393b0595db304e0dd87"
[[package]] [[package]]
name = "anstyle-parse" name = "anstyle-parse"
version = "0.2.5" version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eb47de1e80c2b463c735db5b217a0ddc39d612e7ac9e2e96a5aed1f57616c1cb" checksum = "317b9a89c1868f5ea6ff1d9539a69f45dffc21ce321ac1fd1160dfa48c8e2140"
dependencies = [ dependencies = [
"utf8parse", "utf8parse",
] ]
[[package]] [[package]]
name = "anstyle-query" name = "anstyle-query"
version = "1.1.1" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a" checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b"
dependencies = [ dependencies = [
"windows-sys", "windows-sys",
] ]
[[package]] [[package]]
name = "anstyle-wincon" name = "anstyle-wincon"
version = "3.0.4" version = "3.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5bf74e1b6e971609db8ca7a9ce79fd5768ab6ae46441c572e46cf596f59e57f8" checksum = "f0699d10d2f4d628a98ee7b57b289abbc98ff3bad977cb3152709d4bf2330628"
dependencies = [ dependencies = [
"anstyle", "anstyle",
"windows-sys", "windows-sys",
@ -53,7 +52,7 @@ dependencies = [
[[package]] [[package]]
name = "bosion" name = "bosion"
version = "1.1.0" version = "1.0.1"
dependencies = [ dependencies = [
"time", "time",
] ]
@ -70,30 +69,24 @@ dependencies = [
[[package]] [[package]]
name = "colorchoice" name = "colorchoice"
version = "1.0.2" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0" checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7"
[[package]] [[package]]
name = "deranged" name = "deranged"
version = "0.3.11" version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" checksum = "0f32d04922c60427da6f9fef14d042d9edddef64cb9d4ce0d64d0685fbeb1fd3"
dependencies = [ dependencies = [
"powerfmt", "powerfmt",
] ]
[[package]]
name = "is_terminal_polyfill"
version = "1.70.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf"
[[package]] [[package]]
name = "itoa" name = "itoa"
version = "1.0.11" version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38"
[[package]] [[package]]
name = "leon" name = "leon"
@ -110,12 +103,6 @@ version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be" checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be"
[[package]]
name = "num-conv"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
[[package]] [[package]]
name = "powerfmt" name = "powerfmt"
version = "0.2.0" version = "0.2.0"
@ -124,36 +111,36 @@ checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391"
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "1.0.87" version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3e4daa0dcf6feba26f985457cdf104d4b4256fc5a09547140f3631bb076b19a" checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da"
dependencies = [ dependencies = [
"unicode-ident", "unicode-ident",
] ]
[[package]] [[package]]
name = "quote" name = "quote"
version = "1.0.37" version = "1.0.33"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
] ]
[[package]] [[package]]
name = "serde" name = "serde"
version = "1.0.210" version = "1.0.193"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a" checksum = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89"
dependencies = [ dependencies = [
"serde_derive", "serde_derive",
] ]
[[package]] [[package]]
name = "serde_derive" name = "serde_derive"
version = "1.0.210" version = "1.0.193"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -162,15 +149,15 @@ dependencies = [
[[package]] [[package]]
name = "similar" name = "similar"
version = "2.6.0" version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1de1d4f81173b03af4c0cbed3c898f6bff5b870e4a7f5d6f4057d62a7a4b686e" checksum = "2aeaf503862c419d66959f5d7ca015337d864e9c49485d771b732e2a20453597"
[[package]] [[package]]
name = "snapbox" name = "snapbox"
version = "0.5.14" version = "0.4.14"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f37d101fcafc8e73748fd8a1b7048f5979f93d372fd17027d7724c1643bc379b" checksum = "4b377c0b6e4715c116473d8e40d51e3fa5b0a2297ca9b2a931ba800667b259ed"
dependencies = [ dependencies = [
"anstream", "anstream",
"anstyle", "anstyle",
@ -181,18 +168,18 @@ dependencies = [
[[package]] [[package]]
name = "snapbox-macros" name = "snapbox-macros"
version = "0.3.10" version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "16569f53ca23a41bb6f62e0a5084aa1661f4814a67fa33696a79073e03a664af" checksum = "ed1559baff8a696add3322b9be3e940d433e7bb4e38d79017205fd37ff28b28e"
dependencies = [ dependencies = [
"anstream", "anstream",
] ]
[[package]] [[package]]
name = "syn" name = "syn"
version = "2.0.79" version = "2.0.39"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89132cd0bf050864e1d38dc3bbc07a0eb8e7530af26344d3d2bbbef83499f590" checksum = "23e78b90f2fcf45d3e842032ce32e3f2d1545ba6636271dcbf24fa306d87be7a"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -201,18 +188,18 @@ dependencies = [
[[package]] [[package]]
name = "thiserror" name = "thiserror"
version = "1.0.64" version = "1.0.50"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84" checksum = "f9a7210f5c9a7156bb50aa36aed4c95afb51df0df00713949448cf9e97d382d2"
dependencies = [ dependencies = [
"thiserror-impl", "thiserror-impl",
] ]
[[package]] [[package]]
name = "thiserror-impl" name = "thiserror-impl"
version = "1.0.64" version = "1.0.50"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3" checksum = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -221,13 +208,12 @@ dependencies = [
[[package]] [[package]]
name = "time" name = "time"
version = "0.3.36" version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" checksum = "c4a34ab300f2dee6e562c10a046fc05e358b29f9bf92277f30c3c8d82275f6f5"
dependencies = [ dependencies = [
"deranged", "deranged",
"itoa", "itoa",
"num-conv",
"powerfmt", "powerfmt",
"serde", "serde",
"time-core", "time-core",
@ -242,45 +228,43 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3"
[[package]] [[package]]
name = "time-macros" name = "time-macros"
version = "0.2.18" version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" checksum = "4ad70d68dba9e1f8aceda7aa6711965dfec1cac869f311a51bd08b3a2ccbce20"
dependencies = [ dependencies = [
"num-conv",
"time-core", "time-core",
] ]
[[package]] [[package]]
name = "unicode-ident" name = "unicode-ident"
version = "1.0.13" version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
[[package]] [[package]]
name = "utf8parse" name = "utf8parse"
version = "0.2.2" version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
[[package]] [[package]]
name = "windows-sys" name = "windows-sys"
version = "0.52.0" version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
dependencies = [ dependencies = [
"windows-targets", "windows-targets",
] ]
[[package]] [[package]]
name = "windows-targets" name = "windows-targets"
version = "0.52.6" version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
dependencies = [ dependencies = [
"windows_aarch64_gnullvm", "windows_aarch64_gnullvm",
"windows_aarch64_msvc", "windows_aarch64_msvc",
"windows_i686_gnu", "windows_i686_gnu",
"windows_i686_gnullvm",
"windows_i686_msvc", "windows_i686_msvc",
"windows_x86_64_gnu", "windows_x86_64_gnu",
"windows_x86_64_gnullvm", "windows_x86_64_gnullvm",
@ -289,48 +273,42 @@ dependencies = [
[[package]] [[package]]
name = "windows_aarch64_gnullvm" name = "windows_aarch64_gnullvm"
version = "0.52.6" version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
[[package]] [[package]]
name = "windows_aarch64_msvc" name = "windows_aarch64_msvc"
version = "0.52.6" version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
[[package]] [[package]]
name = "windows_i686_gnu" name = "windows_i686_gnu"
version = "0.52.6" version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
[[package]]
name = "windows_i686_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
[[package]] [[package]]
name = "windows_i686_msvc" name = "windows_i686_msvc"
version = "0.52.6" version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
[[package]] [[package]]
name = "windows_x86_64_gnu" name = "windows_x86_64_gnu"
version = "0.52.6" version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
[[package]] [[package]]
name = "windows_x86_64_gnullvm" name = "windows_x86_64_gnullvm"
version = "0.52.6" version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
[[package]] [[package]]
name = "windows_x86_64_msvc" name = "windows_x86_64_msvc"
version = "0.52.6" version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"

View file

@ -18,5 +18,5 @@ features = ["std"]
[dependencies] [dependencies]
leon = { version = "2.0.1", default-features = false } leon = { version = "2.0.1", default-features = false }
snapbox = "0.5.9" snapbox = "0.4.8"
time = { version = "0.3.30", features = ["formatting", "macros"] } time = { version = "0.3.30", features = ["formatting", "macros"] }

View file

@ -4,48 +4,47 @@ version = 3
[[package]] [[package]]
name = "anstream" name = "anstream"
version = "0.6.15" version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64e15c1ab1f89faffbf04a634d5e1962e9074f2741eef6d97f3c4e322426d526" checksum = "2ab91ebe16eb252986481c5b62f6098f3b698a45e34b5b98200cf20dd2484a44"
dependencies = [ dependencies = [
"anstyle", "anstyle",
"anstyle-parse", "anstyle-parse",
"anstyle-query", "anstyle-query",
"anstyle-wincon", "anstyle-wincon",
"colorchoice", "colorchoice",
"is_terminal_polyfill",
"utf8parse", "utf8parse",
] ]
[[package]] [[package]]
name = "anstyle" name = "anstyle"
version = "1.0.8" version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1" checksum = "7079075b41f533b8c61d2a4d073c4676e1f8b249ff94a393b0595db304e0dd87"
[[package]] [[package]]
name = "anstyle-parse" name = "anstyle-parse"
version = "0.2.5" version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eb47de1e80c2b463c735db5b217a0ddc39d612e7ac9e2e96a5aed1f57616c1cb" checksum = "317b9a89c1868f5ea6ff1d9539a69f45dffc21ce321ac1fd1160dfa48c8e2140"
dependencies = [ dependencies = [
"utf8parse", "utf8parse",
] ]
[[package]] [[package]]
name = "anstyle-query" name = "anstyle-query"
version = "1.1.1" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a" checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b"
dependencies = [ dependencies = [
"windows-sys", "windows-sys",
] ]
[[package]] [[package]]
name = "anstyle-wincon" name = "anstyle-wincon"
version = "3.0.4" version = "3.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5bf74e1b6e971609db8ca7a9ce79fd5768ab6ae46441c572e46cf596f59e57f8" checksum = "f0699d10d2f4d628a98ee7b57b289abbc98ff3bad977cb3152709d4bf2330628"
dependencies = [ dependencies = [
"anstyle", "anstyle",
"windows-sys", "windows-sys",
@ -53,7 +52,7 @@ dependencies = [
[[package]] [[package]]
name = "bosion" name = "bosion"
version = "1.1.0" version = "1.0.1"
dependencies = [ dependencies = [
"time", "time",
] ]
@ -70,30 +69,24 @@ dependencies = [
[[package]] [[package]]
name = "colorchoice" name = "colorchoice"
version = "1.0.2" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0" checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7"
[[package]] [[package]]
name = "deranged" name = "deranged"
version = "0.3.11" version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" checksum = "0f32d04922c60427da6f9fef14d042d9edddef64cb9d4ce0d64d0685fbeb1fd3"
dependencies = [ dependencies = [
"powerfmt", "powerfmt",
] ]
[[package]]
name = "is_terminal_polyfill"
version = "1.70.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf"
[[package]] [[package]]
name = "itoa" name = "itoa"
version = "1.0.11" version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38"
[[package]] [[package]]
name = "leon" name = "leon"
@ -110,12 +103,6 @@ version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be" checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be"
[[package]]
name = "num-conv"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
[[package]] [[package]]
name = "powerfmt" name = "powerfmt"
version = "0.2.0" version = "0.2.0"
@ -124,36 +111,36 @@ checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391"
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "1.0.87" version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3e4daa0dcf6feba26f985457cdf104d4b4256fc5a09547140f3631bb076b19a" checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da"
dependencies = [ dependencies = [
"unicode-ident", "unicode-ident",
] ]
[[package]] [[package]]
name = "quote" name = "quote"
version = "1.0.37" version = "1.0.33"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
] ]
[[package]] [[package]]
name = "serde" name = "serde"
version = "1.0.210" version = "1.0.193"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a" checksum = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89"
dependencies = [ dependencies = [
"serde_derive", "serde_derive",
] ]
[[package]] [[package]]
name = "serde_derive" name = "serde_derive"
version = "1.0.210" version = "1.0.193"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f" checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -162,15 +149,15 @@ dependencies = [
[[package]] [[package]]
name = "similar" name = "similar"
version = "2.6.0" version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1de1d4f81173b03af4c0cbed3c898f6bff5b870e4a7f5d6f4057d62a7a4b686e" checksum = "2aeaf503862c419d66959f5d7ca015337d864e9c49485d771b732e2a20453597"
[[package]] [[package]]
name = "snapbox" name = "snapbox"
version = "0.5.14" version = "0.4.14"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f37d101fcafc8e73748fd8a1b7048f5979f93d372fd17027d7724c1643bc379b" checksum = "4b377c0b6e4715c116473d8e40d51e3fa5b0a2297ca9b2a931ba800667b259ed"
dependencies = [ dependencies = [
"anstream", "anstream",
"anstyle", "anstyle",
@ -181,18 +168,18 @@ dependencies = [
[[package]] [[package]]
name = "snapbox-macros" name = "snapbox-macros"
version = "0.3.10" version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "16569f53ca23a41bb6f62e0a5084aa1661f4814a67fa33696a79073e03a664af" checksum = "ed1559baff8a696add3322b9be3e940d433e7bb4e38d79017205fd37ff28b28e"
dependencies = [ dependencies = [
"anstream", "anstream",
] ]
[[package]] [[package]]
name = "syn" name = "syn"
version = "2.0.79" version = "2.0.39"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89132cd0bf050864e1d38dc3bbc07a0eb8e7530af26344d3d2bbbef83499f590" checksum = "23e78b90f2fcf45d3e842032ce32e3f2d1545ba6636271dcbf24fa306d87be7a"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -201,18 +188,18 @@ dependencies = [
[[package]] [[package]]
name = "thiserror" name = "thiserror"
version = "1.0.64" version = "1.0.50"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d50af8abc119fb8bb6dbabcfa89656f46f84aa0ac7688088608076ad2b459a84" checksum = "f9a7210f5c9a7156bb50aa36aed4c95afb51df0df00713949448cf9e97d382d2"
dependencies = [ dependencies = [
"thiserror-impl", "thiserror-impl",
] ]
[[package]] [[package]]
name = "thiserror-impl" name = "thiserror-impl"
version = "1.0.64" version = "1.0.50"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08904e7672f5eb876eaaf87e0ce17857500934f4981c4a0ab2b4aa98baac7fc3" checksum = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -221,13 +208,12 @@ dependencies = [
[[package]] [[package]]
name = "time" name = "time"
version = "0.3.36" version = "0.3.30"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" checksum = "c4a34ab300f2dee6e562c10a046fc05e358b29f9bf92277f30c3c8d82275f6f5"
dependencies = [ dependencies = [
"deranged", "deranged",
"itoa", "itoa",
"num-conv",
"powerfmt", "powerfmt",
"serde", "serde",
"time-core", "time-core",
@ -242,45 +228,43 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3"
[[package]] [[package]]
name = "time-macros" name = "time-macros"
version = "0.2.18" version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" checksum = "4ad70d68dba9e1f8aceda7aa6711965dfec1cac869f311a51bd08b3a2ccbce20"
dependencies = [ dependencies = [
"num-conv",
"time-core", "time-core",
] ]
[[package]] [[package]]
name = "unicode-ident" name = "unicode-ident"
version = "1.0.13" version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
[[package]] [[package]]
name = "utf8parse" name = "utf8parse"
version = "0.2.2" version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
[[package]] [[package]]
name = "windows-sys" name = "windows-sys"
version = "0.52.0" version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
dependencies = [ dependencies = [
"windows-targets", "windows-targets",
] ]
[[package]] [[package]]
name = "windows-targets" name = "windows-targets"
version = "0.52.6" version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
dependencies = [ dependencies = [
"windows_aarch64_gnullvm", "windows_aarch64_gnullvm",
"windows_aarch64_msvc", "windows_aarch64_msvc",
"windows_i686_gnu", "windows_i686_gnu",
"windows_i686_gnullvm",
"windows_i686_msvc", "windows_i686_msvc",
"windows_x86_64_gnu", "windows_x86_64_gnu",
"windows_x86_64_gnullvm", "windows_x86_64_gnullvm",
@ -289,48 +273,42 @@ dependencies = [
[[package]] [[package]]
name = "windows_aarch64_gnullvm" name = "windows_aarch64_gnullvm"
version = "0.52.6" version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
[[package]] [[package]]
name = "windows_aarch64_msvc" name = "windows_aarch64_msvc"
version = "0.52.6" version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
[[package]] [[package]]
name = "windows_i686_gnu" name = "windows_i686_gnu"
version = "0.52.6" version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
[[package]]
name = "windows_i686_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
[[package]] [[package]]
name = "windows_i686_msvc" name = "windows_i686_msvc"
version = "0.52.6" version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
[[package]] [[package]]
name = "windows_x86_64_gnu" name = "windows_x86_64_gnu"
version = "0.52.6" version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
[[package]] [[package]]
name = "windows_x86_64_gnullvm" name = "windows_x86_64_gnullvm"
version = "0.52.6" version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
[[package]] [[package]]
name = "windows_x86_64_msvc" name = "windows_x86_64_msvc"
version = "0.52.6" version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"

View file

@ -23,5 +23,5 @@ default-features = false
[dependencies] [dependencies]
leon = { version = "2.0.1", default-features = false } leon = { version = "2.0.1", default-features = false }
snapbox = "0.5.9" snapbox = "0.4.8"
time = { version = "0.3.30", features = ["formatting", "macros"] } time = { version = "0.3.30", features = ["formatting", "macros"] }

View file

@ -145,9 +145,6 @@ pub struct GitInfo {
/// The datetime of the current commit, in the format `YYYY-MM-DD HH:MM:SS`, at UTC. /// The datetime of the current commit, in the format `YYYY-MM-DD HH:MM:SS`, at UTC.
pub git_datetime: String, pub git_datetime: String,
/// The `git describe` equivalent output
pub git_description: String,
} }
#[cfg(feature = "git")] #[cfg(feature = "git")]
@ -166,7 +163,6 @@ impl GitInfo {
git_shorthash: head.short_id().err_string()?.to_string(), git_shorthash: head.short_id().err_string()?.to_string(),
git_date: timestamp.format(DATE_FORMAT).err_string()?, git_date: timestamp.format(DATE_FORMAT).err_string()?,
git_datetime: timestamp.format(DATETIME_FORMAT).err_string()?, git_datetime: timestamp.format(DATETIME_FORMAT).err_string()?,
git_description: head.describe().format().err_string()?.to_string(),
}) })
} }
} }

View file

@ -74,7 +74,6 @@ pub fn gather_to(filename: &str, structname: &str, public: bool) {
git_shorthash, git_shorthash,
git_date, git_date,
git_datetime, git_datetime,
git_description,
.. ..
}) = git }) = git
{ {
@ -105,11 +104,6 @@ pub fn gather_to(filename: &str, structname: &str, public: bool) {
/// This is the date and time (`YYYY-MM-DD HH:MM:SS`) of the commit that was built. Same /// This is the date and time (`YYYY-MM-DD HH:MM:SS`) of the commit that was built. Same
/// caveats as with `GIT_COMMIT_HASH` apply. /// caveats as with `GIT_COMMIT_HASH` apply.
pub const GIT_COMMIT_DATETIME: &'static str = {git_datetime:?}; pub const GIT_COMMIT_DATETIME: &'static str = {git_datetime:?};
/// The git description
///
/// This is the string equivalent to what `git describe` would output
pub const GIT_COMMIT_DESCRIPTION: &'static str = {git_description:?};
" "
), format!("{crate_version} ({git_shorthash} {git_date}) {crate_feature_string}\ncommit-hash: {git_hash}\ncommit-date: {git_date}\nbuild-date: {build_date}\nrelease: {crate_version}\nfeatures: {crate_feature_list}")) ), format!("{crate_version} ({git_shorthash} {git_date}) {crate_feature_string}\ncommit-hash: {git_hash}\ncommit-date: {git_date}\nbuild-date: {build_date}\nrelease: {crate_version}\nfeatures: {crate_feature_list}"))
} else { } else {
@ -250,7 +244,6 @@ pub fn gather_to_env_with_prefix(prefix: &str) {
git_shorthash, git_shorthash,
git_date, git_date,
git_datetime, git_datetime,
git_description,
.. ..
}) = git }) = git
{ {
@ -258,6 +251,5 @@ pub fn gather_to_env_with_prefix(prefix: &str) {
println!("cargo:rustc-env={prefix}GIT_COMMIT_SHORTHASH={git_shorthash}"); println!("cargo:rustc-env={prefix}GIT_COMMIT_SHORTHASH={git_shorthash}");
println!("cargo:rustc-env={prefix}GIT_COMMIT_DATE={git_date}"); println!("cargo:rustc-env={prefix}GIT_COMMIT_DATE={git_date}");
println!("cargo:rustc-env={prefix}GIT_COMMIT_DATETIME={git_datetime}"); println!("cargo:rustc-env={prefix}GIT_COMMIT_DATETIME={git_datetime}");
println!("cargo:rustc-env={prefix}GIT_COMMIT_DESCRIPTION={git_description}");
} }
} }

View file

@ -1,6 +1,6 @@
[package] [package]
name = "watchexec-cli" name = "watchexec-cli"
version = "2.2.0" version = "1.25.0"
authors = ["Félix Saparelli <felix@passcod.name>", "Matt Green <mattgreenrocks@gmail.com>"] authors = ["Félix Saparelli <felix@passcod.name>", "Matt Green <mattgreenrocks@gmail.com>"]
license = "Apache-2.0" license = "Apache-2.0"
@ -20,57 +20,51 @@ name = "watchexec"
path = "src/main.rs" path = "src/main.rs"
[dependencies] [dependencies]
ahash = "0.8.6" # needs to be in sync with jaq's argfile = "0.1.6"
argfile = "0.2.0"
chrono = "0.4.31" chrono = "0.4.31"
clap_complete = "4.4.4" clap_complete = "4.4.4"
clap_complete_nushell = "4.4.2" clap_complete_nushell = "4.4.2"
clap_mangen = "0.2.15" clap_mangen = "0.2.15"
clearscreen = "3.0.0" clearscreen = "2.0.1"
dashmap = "6.1.0"
dirs = "5.0.0" dirs = "5.0.0"
dunce = "1.0.4"
futures = "0.3.29" futures = "0.3.29"
humantime = "2.1.0" humantime = "2.1.0"
indexmap = "2.2.6" # needs to be in sync with jaq's
is-terminal = "0.4.4" is-terminal = "0.4.4"
jaq-core = "1.2.1"
jaq-interpret = "1.2.1"
jaq-parse = "1.0.2"
jaq-std = "1.2.1"
jaq-syn = "1.1.0"
notify-rust = "4.9.0" notify-rust = "4.9.0"
once_cell = "1.17.1"
serde_json = "1.0.107" serde_json = "1.0.107"
tempfile = "3.8.1" tempfile = "3.8.1"
termcolor = "1.4.0" termcolor = "1.4.0"
tracing = "0.1.40" tracing = "0.1.40"
tracing-appender = "0.2.3" which = "5.0.0"
which = "6.0.1"
[dependencies.blake3] [dev-dependencies]
version = "1.3.3" tracing-test = "0.1"
features = ["rayon"] uuid = { workspace = true, features = [ "v4", "fast-rng" ] }
rand = { workspace = true }
[dependencies.command-group]
version = "2.1.0"
features = ["with-tokio"]
[dependencies.clap] [dependencies.clap]
version = "4.4.7" version = "4.4.7"
features = ["cargo", "derive", "env", "wrap_help"] features = ["cargo", "derive", "env", "wrap_help"]
[dependencies.console-subscriber] [dependencies.console-subscriber]
version = "0.4.0" version = "0.2.0"
optional = true optional = true
[dependencies.eyra] [dependencies.eyra]
version = "0.19.0" version = "0.16.8"
features = ["log", "env_logger"] features = ["log", "env_logger"]
optional = true optional = true
[dependencies.ignore-files] [dependencies.ignore-files]
version = "3.0.2" version = "2.1.0"
path = "../ignore-files" path = "../ignore-files"
[dependencies.miette] [dependencies.miette]
version = "7.2.0" version = "5.3.0"
features = ["fancy"] features = ["fancy"]
[dependencies.pid1] [dependencies.pid1]
@ -78,24 +72,24 @@ version = "0.1.1"
optional = true optional = true
[dependencies.project-origins] [dependencies.project-origins]
version = "1.4.0" version = "1.3.0"
path = "../project-origins" path = "../project-origins"
[dependencies.watchexec] [dependencies.watchexec]
version = "5.0.0" version = "3.0.1"
path = "../lib" path = "../lib"
[dependencies.watchexec-events] [dependencies.watchexec-events]
version = "4.0.0" version = "2.0.1"
path = "../events" path = "../events"
features = ["serde"] features = ["serde"]
[dependencies.watchexec-signals] [dependencies.watchexec-signals]
version = "4.0.0" version = "2.1.0"
path = "../signals" path = "../signals"
[dependencies.watchexec-filterer-globset] [dependencies.watchexec-filterer-globset]
version = "6.0.0" version = "3.0.0"
path = "../filterer/globset" path = "../filterer/globset"
[dependencies.tokio] [dependencies.tokio]
@ -127,14 +121,9 @@ mimalloc = "0.1.39"
embed-resource = "2.4.0" embed-resource = "2.4.0"
[build-dependencies.bosion] [build-dependencies.bosion]
version = "1.1.1" version = "1.0.2"
path = "../bosion" path = "../bosion"
[dev-dependencies]
tracing-test = "0.2.4"
uuid = { workspace = true, features = [ "v4", "fast-rng" ] }
rand = { workspace = true }
[features] [features]
default = ["pid1"] default = ["pid1"]
@ -168,9 +157,9 @@ assets = [
["../../target/release/watchexec", "usr/bin/watchexec", "755"], ["../../target/release/watchexec", "usr/bin/watchexec", "755"],
["README.md", "usr/share/doc/watchexec/README", "644"], ["README.md", "usr/share/doc/watchexec/README", "644"],
["../../doc/watchexec.1.md", "usr/share/doc/watchexec/watchexec.1.md", "644"], ["../../doc/watchexec.1.md", "usr/share/doc/watchexec/watchexec.1.md", "644"],
["../../doc/watchexec.1", "usr/share/man/man1/watchexec.1", "644"], ["../../doc/watchexec.1", "usr/share/man/man1/watchexec.1.html", "644"],
["../../completions/bash", "usr/share/bash-completion/completions/watchexec", "644"], ["../../completions/bash", "usr/share/bash-completion/completions/watchexec", "644"],
["../../completions/fish", "usr/share/fish/vendor_completions.d/watchexec.fish", "644"], ["../../completions/fish", "usr/share/fish/completions/watchexec.fish", "644"],
["../../completions/zsh", "usr/share/zsh/site-functions/_watchexec", "644"], ["../../completions/zsh", "usr/share/zsh/site-functions/_watchexec", "644"],
["../../doc/logo.svg", "usr/share/icons/hicolor/scalable/apps/watchexec.svg", "644"], ["../../doc/logo.svg", "usr/share/icons/hicolor/scalable/apps/watchexec.svg", "644"],
] ]
@ -180,9 +169,9 @@ assets = [
{ source = "../../target/release/watchexec", dest = "/usr/bin/watchexec", mode = "755" }, { source = "../../target/release/watchexec", dest = "/usr/bin/watchexec", mode = "755" },
{ source = "README.md", dest = "/usr/share/doc/watchexec/README", mode = "644", doc = true }, { source = "README.md", dest = "/usr/share/doc/watchexec/README", mode = "644", doc = true },
{ source = "../../doc/watchexec.1.md", dest = "/usr/share/doc/watchexec/watchexec.1.md", mode = "644", doc = true }, { source = "../../doc/watchexec.1.md", dest = "/usr/share/doc/watchexec/watchexec.1.md", mode = "644", doc = true },
{ source = "../../doc/watchexec.1", dest = "/usr/share/man/man1/watchexec.1", mode = "644" }, { source = "../../doc/watchexec.1", dest = "/usr/share/man/man1/watchexec.1.html", mode = "644" },
{ source = "../../completions/bash", dest = "/usr/share/bash-completion/completions/watchexec", mode = "644" }, { source = "../../completions/bash", dest = "/usr/share/bash-completion/completions/watchexec", mode = "644" },
{ source = "../../completions/fish", dest = "/usr/share/fish/vendor_completions.d/watchexec.fish", mode = "644" }, { source = "../../completions/fish", dest = "/usr/share/fish/completions/watchexec.fish", mode = "644" },
{ source = "../../completions/zsh", dest = "/usr/share/zsh/site-functions/_watchexec", mode = "644" }, { source = "../../completions/zsh", dest = "/usr/share/zsh/site-functions/_watchexec", mode = "644" },
{ source = "../../doc/logo.svg", dest = "/usr/share/icons/hicolor/scalable/apps/watchexec.svg", mode = "644" }, { source = "../../doc/logo.svg", dest = "/usr/share/icons/hicolor/scalable/apps/watchexec.svg", mode = "644" },
# set conf = true for config file when that lands # set conf = true for config file when that lands

View file

@ -37,7 +37,7 @@ Example use cases:
These variables may contain multiple paths: these are separated by the platform's path separator, as with the `PATH` system environment variable. On Unix that is `:`, and on Windows `;`. Within each variable, paths are deduplicated and sorted in binary order (i.e. neither Unicode nor locale aware). These variables may contain multiple paths: these are separated by the platform's path separator, as with the `PATH` system environment variable. On Unix that is `:`, and on Windows `;`. Within each variable, paths are deduplicated and sorted in binary order (i.e. neither Unicode nor locale aware).
This can be disabled with `--emit-events=none` or changed to JSON events on STDIN with `--emit-events=json-stdio`. This can be disabled or limited with `--no-environment` (doesn't set any of these variables) and `--no-meta` (ignores metadata changes).
## Anti-Features ## Anti-Features

View file

@ -1,9 +1,7 @@
pre-release-commit-message = "release: cli v{{version}}" pre-release-commit-message = "release: cli v{{version}}"
tag-prefix = "" tag-prefix = "cli-"
tag-message = "watchexec {{version}}" tag-message = "watchexec {{version}}"
pre-release-hook = ["sh", "-c", "cd ../.. && bin/completions && bin/manpage"]
[[pre-release-replacements]] [[pre-release-replacements]]
file = "watchexec.exe.manifest" file = "watchexec.exe.manifest"
search = "^ version=\"[\\d.]+[.]0\"" search = "^ version=\"[\\d.]+[.]0\""

View file

@ -1,32 +1,17 @@
use std::{ use std::{
collections::BTreeSet,
ffi::{OsStr, OsString}, ffi::{OsStr, OsString},
mem::take, path::PathBuf,
path::{Path, PathBuf},
str::FromStr, str::FromStr,
time::Duration, time::Duration,
}; };
use dunce::canonicalize;
use clap::{ use clap::{
builder::TypedValueParser, error::ErrorKind, Arg, Command, CommandFactory, Parser, ValueEnum, builder::TypedValueParser, error::ErrorKind, Arg, ArgAction, Command, CommandFactory, Parser,
ValueHint, ValueEnum, ValueHint,
}; };
use miette::{IntoDiagnostic, Result}; use watchexec::paths::PATH_SEPARATOR;
use tokio::{
fs::File,
io::{AsyncBufReadExt, AsyncReadExt, BufReader},
};
use tracing::{debug, info, trace, warn};
use tracing_appender::non_blocking::WorkerGuard;
use watchexec::{paths::PATH_SEPARATOR, sources::fs::WatchedPath};
use watchexec_signals::Signal; use watchexec_signals::Signal;
use crate::filterer::parse::parse_filter_program;
mod logging;
const OPTSET_FILTERING: &str = "Filtering"; const OPTSET_FILTERING: &str = "Filtering";
const OPTSET_COMMAND: &str = "Command"; const OPTSET_COMMAND: &str = "Command";
const OPTSET_DEBUGGING: &str = "Debugging"; const OPTSET_DEBUGGING: &str = "Debugging";
@ -139,42 +124,7 @@ pub struct Args {
value_hint = ValueHint::AnyPath, value_hint = ValueHint::AnyPath,
value_name = "PATH", value_name = "PATH",
)] )]
pub recursive_paths: Vec<PathBuf>, pub paths: Vec<PathBuf>,
/// Watch a specific directory, non-recursively
///
/// Unlike '-w', folders watched with this option are not recursed into.
///
/// This option can be specified multiple times to watch multiple directories non-recursively.
#[arg(
short = 'W',
long = "watch-non-recursive",
help_heading = OPTSET_FILTERING,
value_hint = ValueHint::AnyPath,
value_name = "PATH",
)]
pub non_recursive_paths: Vec<PathBuf>,
/// Watch files and directories from a file
///
/// Each line in the file will be interpreted as if given to '-w'.
///
/// For more complex uses (like watching non-recursively), use the argfile capability: build a
/// file containing command-line options and pass it to watchexec with `@path/to/argfile`.
///
/// The special value '-' will read from STDIN; this in incompatible with '--stdin-quit'.
#[arg(
short = 'F',
long,
help_heading = OPTSET_FILTERING,
value_hint = ValueHint::AnyPath,
value_name = "PATH",
)]
pub watch_file: Option<PathBuf>,
#[doc(hidden)]
#[arg(skip)]
pub paths: Vec<WatchedPath>,
/// Clear screen before running command /// Clear screen before running command
/// ///
@ -191,30 +141,44 @@ pub struct Args {
/// What to do when receiving events while the command is running /// What to do when receiving events while the command is running
/// ///
/// Default is to 'do-nothing', which ignores events while the command is running, so that /// Default is to 'queue' up events and run the command once again when the previous run has
/// changes that occur due to the command are ignored, like compilation outputs. You can also /// finished. You can also use 'do-nothing', which ignores events while the command is running
/// use 'queue' which will run the command once again when the current run has finished if any /// and may be useful to avoid spurious changes made by that command, or 'restart', which
/// events occur while it's running, or 'restart', which terminates the running command and starts /// terminates the running command and starts a new one. Finally, there's 'signal', which only
/// a new one. Finally, there's 'signal', which only sends a signal; this can be useful with /// sends a signal; this can be useful with programs that can reload their configuration without
/// programs that can reload their configuration without a full restart. /// a full restart.
/// ///
/// The signal can be specified with the '--signal' option. /// The signal can be specified with the '--signal' option.
///
/// Note that this option is scheduled to change its default to 'do-nothing' in the next major
/// release. File an issue if you have any concerns.
#[arg( #[arg(
short, short,
long, long,
default_value = "do-nothing", default_value = "queue",
hide_default_value = true, hide_default_value = true,
value_name = "MODE" value_name = "MODE"
)] )]
pub on_busy_update: OnBusyUpdate, pub on_busy_update: OnBusyUpdate,
/// Deprecated alias for '--on-busy-update=do-nothing'
///
/// This option is deprecated and will be removed in the next major release.
#[arg(
long,
short = 'W',
hide = true,
conflicts_with_all = ["on_busy_update", "restart"],
)]
pub watch_when_idle: bool,
/// Restart the process if it's still running /// Restart the process if it's still running
/// ///
/// This is a shorthand for '--on-busy-update=restart'. /// This is a shorthand for '--on-busy-update=restart'.
#[arg( #[arg(
short, short,
long, long,
conflicts_with_all = ["on_busy_update"], conflicts_with_all = ["on_busy_update", "watch_when_idle"],
)] )]
pub restart: bool, pub restart: bool,
@ -231,11 +195,15 @@ pub struct Args {
#[arg( #[arg(
short, short,
long, long,
conflicts_with_all = ["restart"], conflicts_with_all = ["restart", "watch_when_idle"],
value_name = "SIGNAL" value_name = "SIGNAL"
)] )]
pub signal: Option<Signal>, pub signal: Option<Signal>,
/// Hidden legacy shorthand for '--signal=kill'.
#[arg(short, long, hide = true)]
pub kill: bool,
/// Signal to send to stop the command /// Signal to send to stop the command
/// ///
/// This is used by 'restart' and 'signal' modes of '--on-busy-update' (unless '--signal' is /// This is used by 'restart' and 'signal' modes of '--on-busy-update' (unless '--signal' is
@ -262,15 +230,14 @@ pub struct Args {
/// it is forcefully terminated. /// it is forcefully terminated.
/// ///
/// Takes a unit-less value in seconds, or a time span value such as "5min 20s". /// Takes a unit-less value in seconds, or a time span value such as "5min 20s".
/// Providing a unit-less value is deprecated and will warn; it will be an error in the future.
/// ///
/// The default is 10 seconds. Set to 0 to immediately force-kill the command. /// The default is 60 seconds. Set to 0 to immediately force-kill the command.
/// ///
/// This has no practical effect on Windows as the command is always forcefully terminated; see /// This has no practical effect on Windows as the command is always forcefully terminated; see
/// '--stop-signal' for why. /// '--stop-signal' for why.
#[arg( #[arg(
long, long,
default_value = "10s", default_value = "60",
hide_default_value = true, hide_default_value = true,
value_name = "TIMEOUT" value_name = "TIMEOUT"
)] )]
@ -310,13 +277,12 @@ pub struct Args {
/// every accumulated event will build up in memory. /// every accumulated event will build up in memory.
/// ///
/// Takes a unit-less value in milliseconds, or a time span value such as "5sec 20ms". /// Takes a unit-less value in milliseconds, or a time span value such as "5sec 20ms".
/// Providing a unit-less value is deprecated and will warn; it will be an error in the future.
/// ///
/// The default is 50 milliseconds. Setting to 0 is highly discouraged. /// The default is 50 milliseconds. Setting to 0 is highly discouraged.
#[arg( #[arg(
long, long,
short, short,
default_value = "50ms", default_value = "50",
hide_default_value = true, hide_default_value = true,
value_name = "TIMEOUT" value_name = "TIMEOUT"
)] )]
@ -361,10 +327,14 @@ pub struct Args {
/// VCS ignore files (Git, Mercurial, Bazaar, Darcs, Fossil) are only used if the corresponding /// VCS ignore files (Git, Mercurial, Bazaar, Darcs, Fossil) are only used if the corresponding
/// VCS is discovered to be in use for the project/origin. For example, a .bzrignore in a Git /// VCS is discovered to be in use for the project/origin. For example, a .bzrignore in a Git
/// repository will be discarded. /// repository will be discarded.
///
/// Note that this was previously called '--no-ignore', but that's now deprecated and its use is
/// discouraged, as it may be repurposed in the future.
#[arg( #[arg(
long, long,
help_heading = OPTSET_FILTERING, help_heading = OPTSET_FILTERING,
verbatim_doc_comment, verbatim_doc_comment,
alias = "no-ignore", // deprecated
)] )]
pub no_project_ignore: bool, pub no_project_ignore: bool,
@ -439,7 +409,6 @@ pub struct Args {
/// but portable and slightly more efficient. /// but portable and slightly more efficient.
/// ///
/// Takes a unit-less value in seconds, or a time span value such as "2min 5s". /// Takes a unit-less value in seconds, or a time span value such as "2min 5s".
/// Providing a unit-less value is deprecated and will warn; it will be an error in the future.
#[arg(long, value_name = "DURATION")] #[arg(long, value_name = "DURATION")]
pub delay_run: Option<TimeSpan>, pub delay_run: Option<TimeSpan>,
@ -452,7 +421,6 @@ pub struct Args {
/// ///
/// Optionally takes a unit-less value in milliseconds, or a time span value such as "2s 500ms", /// Optionally takes a unit-less value in milliseconds, or a time span value such as "2s 500ms",
/// to use as the polling interval. If not specified, the default is 30 seconds. /// to use as the polling interval. If not specified, the default is 30 seconds.
/// Providing a unit-less value is deprecated and will warn; it will be an error in the future.
/// ///
/// Aliased as '--force-poll'. /// Aliased as '--force-poll'.
#[arg( #[arg(
@ -466,18 +434,18 @@ pub struct Args {
/// Use a different shell /// Use a different shell
/// ///
/// By default, Watchexec will use '$SHELL' if it's defined or a default of 'sh' on Unix-likes, /// By default, Watchexec will use 'sh' on unix and 'cmd' (CMD.EXE) on Windows. With this, you
/// and either 'pwsh', 'powershell', or 'cmd' (CMD.EXE) on Windows, depending on what Watchexec /// can override that and use a different shell, for example one with more features or one which
/// detects is the running shell. /// has your custom aliases and functions.
///
/// With this option, you can override that and use a different shell, for example one with more
/// features or one which has your custom aliases and functions.
/// ///
/// If the value has spaces, it is parsed as a command line, and the first word used as the /// If the value has spaces, it is parsed as a command line, and the first word used as the
/// shell program, with the rest as arguments to the shell. /// shell program, with the rest as arguments to the shell.
/// ///
/// The command is run with the '-c' flag (except for 'cmd' on Windows, where it's '/C'). /// The command is run with the '-c' flag (except for 'cmd' on Windows, where it's '/C').
/// ///
/// Note that the default shell will change at the next major release: the value of '$SHELL'
/// will be respected, falling back to 'sh' on unix and to PowerShell on Windows.
///
/// The special value 'none' can be used to disable shell use entirely. In that case, the /// The special value 'none' can be used to disable shell use entirely. In that case, the
/// command provided to Watchexec will be parsed, with the first word being the executable and /// command provided to Watchexec will be parsed, with the first word being the executable and
/// the rest being the arguments, and executed directly. Note that this parsing is rudimentary, /// the rest being the arguments, and executed directly. Note that this parsing is rudimentary,
@ -497,7 +465,7 @@ pub struct Args {
/// ///
/// $ watchexec --shell=pwsh -- Test-Connection localhost /// $ watchexec --shell=pwsh -- Test-Connection localhost
/// ///
/// Use with CMD.exe: /// Use with cmd (default on Windows):
/// ///
/// $ watchexec --shell=cmd -- dir /// $ watchexec --shell=cmd -- dir
/// ///
@ -515,28 +483,41 @@ pub struct Args {
)] )]
pub shell: Option<String>, pub shell: Option<String>,
/// Shorthand for '--shell=none' /// Don't use a shell
///
/// This is a shorthand for '--shell=none'.
#[arg( #[arg(
short = 'n', short = 'n',
help_heading = OPTSET_COMMAND, help_heading = OPTSET_COMMAND,
)] )]
pub no_shell: bool, pub no_shell: bool,
/// Deprecated shorthand for '--emit-events=none' /// Don't use a shell
///
/// This is a deprecated alias for '--shell=none'.
#[arg(
long,
hide = true,
help_heading = OPTSET_COMMAND,
alias = "no-shell", // deprecated
)]
pub no_shell_long: bool,
/// Shorthand for '--emit-events=none'
/// ///
/// This is the old way to disable event emission into the environment. See '--emit-events' for /// This is the old way to disable event emission into the environment. See '--emit-events' for
/// more. Will be removed at next major release. /// more.
#[arg( #[arg(
long, long,
help_heading = OPTSET_COMMAND, help_heading = OPTSET_COMMAND,
hide = true, // deprecated // TODO: deprecate then remove
)] )]
pub no_environment: bool, pub no_environment: bool,
/// Configure event emission /// Configure event emission
/// ///
/// Watchexec can emit event information when running a command, which can be used by the child /// Watchexec emits event information when running a command, which can be used by the command
/// process to target specific changed files. /// to target specific changed files.
/// ///
/// One thing to take care with is assuming inherent behaviour where there is only chance. /// One thing to take care with is assuming inherent behaviour where there is only chance.
/// Notably, it could appear as if the `RENAMED` variable contains both the original and the new /// Notably, it could appear as if the `RENAMED` variable contains both the original and the new
@ -547,14 +528,32 @@ pub struct Args {
/// whether it was the old or new isn't known), rename events might split across two debouncing /// whether it was the old or new isn't known), rename events might split across two debouncing
/// boundaries, and so on. /// boundaries, and so on.
/// ///
/// This option controls where that information is emitted. It defaults to 'none', which doesn't /// This option controls where that information is emitted. It defaults to 'environment', which
/// emit event information at all. The other options are 'environment' (deprecated), 'stdio', /// sets environment variables with the paths of the affected files, for filesystem events:
/// 'file', 'json-stdio', and 'json-file'.
/// ///
/// The 'stdio' and 'file' modes are text-based: 'stdio' writes absolute paths to the stdin of /// $WATCHEXEC_COMMON_PATH is set to the longest common path of all of the below variables,
/// the command, one per line, each prefixed with `create:`, `remove:`, `rename:`, `modify:`, /// and so should be prepended to each path to obtain the full/real path. Then:
/// or `other:`, then closes the handle; 'file' writes the same thing to a temporary file, and ///
/// its path is given with the $WATCHEXEC_EVENTS_FILE environment variable. /// - $WATCHEXEC_CREATED_PATH is set when files/folders were created
/// - $WATCHEXEC_REMOVED_PATH is set when files/folders were removed
/// - $WATCHEXEC_RENAMED_PATH is set when files/folders were renamed
/// - $WATCHEXEC_WRITTEN_PATH is set when files/folders were modified
/// - $WATCHEXEC_META_CHANGED_PATH is set when files/folders' metadata were modified
/// - $WATCHEXEC_OTHERWISE_CHANGED_PATH is set for every other kind of pathed event
///
/// Multiple paths are separated by the system path separator, ';' on Windows and ':' on unix.
/// Within each variable, paths are deduplicated and sorted in binary order (i.e. neither
/// Unicode nor locale aware).
///
/// This is the legacy mode and will be deprecated and removed in the future. The environment of
/// a process is a very restricted space, while also limited in what it can usefully represent.
/// Large numbers of files will either cause the environment to be truncated, or may error or
/// crash the process entirely.
///
/// Two new modes are available: 'stdio' writes absolute paths to the stdin of the command,
/// one per line, each prefixed with `create:`, `remove:`, `rename:`, `modify:`, or `other:`,
/// then closes the handle; 'file' writes the same thing to a temporary file, and its path is
/// given with the $WATCHEXEC_EVENTS_FILE environment variable.
/// ///
/// There are also two JSON modes, which are based on JSON objects and can represent the full /// There are also two JSON modes, which are based on JSON objects and can represent the full
/// set of events Watchexec handles. Here's an example of a folder being created on Linux: /// set of events Watchexec handles. Here's an example of a folder being created on Linux:
@ -611,33 +610,13 @@ pub struct Args {
/// events to it, and provide the path to the file with the $WATCHEXEC_EVENTS_FILE /// events to it, and provide the path to the file with the $WATCHEXEC_EVENTS_FILE
/// environment variable. /// environment variable.
/// ///
/// Finally, the 'environment' mode was the default until 2.0. It sets environment variables /// Finally, the special 'none' mode will disable event emission entirely.
/// with the paths of the affected files, for filesystem events: // TODO: when deprecating, make the none mode the default.
///
/// $WATCHEXEC_COMMON_PATH is set to the longest common path of all of the below variables,
/// and so should be prepended to each path to obtain the full/real path. Then:
///
/// - $WATCHEXEC_CREATED_PATH is set when files/folders were created
/// - $WATCHEXEC_REMOVED_PATH is set when files/folders were removed
/// - $WATCHEXEC_RENAMED_PATH is set when files/folders were renamed
/// - $WATCHEXEC_WRITTEN_PATH is set when files/folders were modified
/// - $WATCHEXEC_META_CHANGED_PATH is set when files/folders' metadata were modified
/// - $WATCHEXEC_OTHERWISE_CHANGED_PATH is set for every other kind of pathed event
///
/// Multiple paths are separated by the system path separator, ';' on Windows and ':' on unix.
/// Within each variable, paths are deduplicated and sorted in binary order (i.e. neither
/// Unicode nor locale aware).
///
/// This is the legacy mode, is deprecated, and will be removed in the future. The environment
/// is a very restricted space, while also limited in what it can usefully represent. Large
/// numbers of files will either cause the environment to be truncated, or may error or crash
/// the process entirely. The $WATCHEXEC_COMMON_PATH is also unintuitive, as demonstrated by the
/// multiple confused queries that have landed in my inbox over the years.
#[arg( #[arg(
long, long,
help_heading = OPTSET_COMMAND, help_heading = OPTSET_COMMAND,
verbatim_doc_comment, verbatim_doc_comment,
default_value = "none", default_value = "environment",
hide_default_value = true, hide_default_value = true,
value_name = "MODE", value_name = "MODE",
required_if_eq("only_emit_events", "true"), required_if_eq("only_emit_events", "true"),
@ -679,31 +658,12 @@ pub struct Args {
/// By default, Watchexec will run the command in a process group, so that signals and /// By default, Watchexec will run the command in a process group, so that signals and
/// terminations are sent to all processes in the group. Sometimes that's not what you want, and /// terminations are sent to all processes in the group. Sometimes that's not what you want, and
/// you can disable the behaviour with this option. /// you can disable the behaviour with this option.
///
/// Deprecated, use '--wrap-process=none' instead.
#[arg( #[arg(
long, long,
help_heading = OPTSET_COMMAND, help_heading = OPTSET_COMMAND,
)] )]
pub no_process_group: bool, pub no_process_group: bool,
/// Configure how the process is wrapped
///
/// By default, Watchexec will run the command in a process group in Unix, and in a Job Object
/// in Windows.
///
/// Some Unix programs prefer running in a session, while others do not work in a process group.
///
/// Use 'group' to use a process group, 'session' to use a process session, and 'none' to run
/// the command directly. On Windows, either of 'group' or 'session' will use a Job Object.
#[arg(
long,
help_heading = OPTSET_COMMAND,
value_name = "MODE",
default_value = "group",
)]
pub wrap_process: WrapMode,
/// Testing only: exit Watchexec after the first run /// Testing only: exit Watchexec after the first run
#[arg(short = '1', hide = true)] #[arg(short = '1', hide = true)]
pub once: bool, pub once: bool,
@ -720,8 +680,6 @@ pub struct Args {
pub notify: bool, pub notify: bool,
/// When to use terminal colours /// When to use terminal colours
///
/// Setting the environment variable `NO_COLOR` to any value is equivalent to `--color=never`.
#[arg( #[arg(
long, long,
help_heading = OPTSET_OUTPUT, help_heading = OPTSET_OUTPUT,
@ -831,78 +789,6 @@ pub struct Args {
)] )]
pub filter_files: Vec<PathBuf>, pub filter_files: Vec<PathBuf>,
/// [experimental] Filter programs.
///
/// /!\ This option is EXPERIMENTAL and may change and/or vanish without notice.
///
/// Provide your own custom filter programs in jaq (similar to jq) syntax. Programs are given
/// an event in the same format as described in '--emit-events-to' and must return a boolean.
/// Invalid programs will make watchexec fail to start; use '-v' to see program runtime errors.
///
/// In addition to the jaq stdlib, watchexec adds some custom filter definitions:
///
/// - 'path | file_meta' returns file metadata or null if the file does not exist.
///
/// - 'path | file_size' returns the size of the file at path, or null if it does not exist.
///
/// - 'path | file_read(bytes)' returns a string with the first n bytes of the file at path.
/// If the file is smaller than n bytes, the whole file is returned. There is no filter to
/// read the whole file at once to encourage limiting the amount of data read and processed.
///
/// - 'string | hash', and 'path | file_hash' return the hash of the string or file at path.
/// No guarantee is made about the algorithm used: treat it as an opaque value.
///
/// - 'any | kv_store(key)', 'kv_fetch(key)', and 'kv_clear' provide a simple key-value store.
/// Data is kept in memory only, there is no persistence. Consistency is not guaranteed.
///
/// - 'any | printout', 'any | printerr', and 'any | log(level)' will print or log any given
/// value to stdout, stderr, or the log (levels = error, warn, info, debug, trace), and
/// pass the value through (so '[1] | log("debug") | .[]' will produce a '1' and log '[1]').
///
/// All filtering done with such programs, and especially those using kv or filesystem access,
/// is much slower than the other filtering methods. If filtering is too slow, events will back
/// up and stall watchexec. Take care when designing your filters.
///
/// If the argument to this option starts with an '@', the rest of the argument is taken to be
/// the path to a file containing a jaq program.
///
/// Jaq programs are run in order, after all other filters, and short-circuit: if a filter (jaq
/// or not) rejects an event, execution stops there, and no other filters are run. Additionally,
/// they stop after outputting the first value, so you'll want to use 'any' or 'all' when
/// iterating, otherwise only the first item will be processed, which can be quite confusing!
///
/// Find user-contributed programs or submit your own useful ones at
/// <https://github.com/watchexec/watchexec/discussions/592>.
///
/// ## Examples:
///
/// Regexp ignore filter on paths:
///
/// 'all(.tags[] | select(.kind == "path"); .absolute | test("[.]test[.]js$")) | not'
///
/// Pass any event that creates a file:
///
/// 'any(.tags[] | select(.kind == "fs"); .simple == "create")'
///
/// Pass events that touch executable files:
///
/// 'any(.tags[] | select(.kind == "path" && .filetype == "file"); .absolute | metadata | .executable)'
///
/// Ignore files that start with shebangs:
///
/// 'any(.tags[] | select(.kind == "path" && .filetype == "file"); .absolute | read(2) == "#!") | not'
#[arg(
long = "filter-prog",
short = 'j',
help_heading = OPTSET_FILTERING,
value_name = "EXPRESSION",
)]
pub filter_programs: Vec<String>,
#[doc(hidden)]
#[clap(skip)]
pub filter_programs_parsed: Vec<jaq_syn::Main>,
/// Filename patterns to filter out /// Filename patterns to filter out
/// ///
/// Provide a glob-like filter pattern, and events for files matching the pattern will be /// Provide a glob-like filter pattern, and events for files matching the pattern will be
@ -968,13 +854,55 @@ pub struct Args {
/// This prints the events that triggered the action when handling it (after debouncing), in a /// This prints the events that triggered the action when handling it (after debouncing), in a
/// human readable form. This is useful for debugging filters. /// human readable form. This is useful for debugging filters.
/// ///
/// Use '-vvv' instead when you need more diagnostic information. /// Use '-v' when you need more diagnostic information.
#[arg( #[arg(
long, long,
alias = "changes-only", // deprecated
help_heading = OPTSET_DEBUGGING, help_heading = OPTSET_DEBUGGING,
)] )]
pub print_events: bool, pub print_events: bool,
/// Set diagnostic log level
///
/// This enables diagnostic logging, which is useful for investigating bugs or gaining more
/// insight into faulty filters or "missing" events. Use multiple times to increase verbosity.
///
/// Goes up to '-vvvv'. When submitting bug reports, default to a '-vvv' log level.
///
/// You may want to use with '--log-file' to avoid polluting your terminal.
///
/// Setting $RUST_LOG also works, and takes precedence, but is not recommended. However, using
/// $RUST_LOG is the only way to get logs from before these options are parsed.
#[arg(
long,
short,
help_heading = OPTSET_DEBUGGING,
action = ArgAction::Count,
num_args = 0,
)]
pub verbose: Option<u8>,
/// Write diagnostic logs to a file
///
/// This writes diagnostic logs to a file, instead of the terminal, in JSON format. If a log
/// level was not already specified, this will set it to '-vvv'.
///
/// If a path is not provided, the default is the working directory. Note that with
/// '--ignore-nothing', the write events to the log will likely get picked up by Watchexec,
/// causing a loop; prefer setting a path outside of the watched directory.
///
/// If the path provided is a directory, a file will be created in that directory. The file name
/// will be the current date and time, in the format 'watchexec.YYYY-MM-DDTHH-MM-SSZ.log'.
#[arg(
long,
help_heading = OPTSET_DEBUGGING,
num_args = 0..=1,
default_missing_value = ".",
value_hint = ValueHint::AnyPath,
value_name = "PATH",
)]
pub log_file: Option<PathBuf>,
/// Show the manual page /// Show the manual page
/// ///
/// This shows the manual page for Watchexec, if the output is a terminal and the 'man' program /// This shows the manual page for Watchexec, if the output is a terminal and the 'man' program
@ -999,17 +927,16 @@ pub struct Args {
conflicts_with_all = ["command", "manual"], conflicts_with_all = ["command", "manual"],
)] )]
pub completions: Option<ShellCompletion>, pub completions: Option<ShellCompletion>,
#[command(flatten)]
pub logging: logging::LoggingArgs,
} }
#[derive(Clone, Copy, Debug, Default, ValueEnum)] #[derive(Clone, Copy, Debug, Default, ValueEnum)]
pub enum EmitEvents { pub enum EmitEvents {
#[default] #[default]
Environment, Environment,
#[value(alias("stdin"))]
Stdio, Stdio,
File, File,
#[value(alias("json-stdin"))]
JsonStdio, JsonStdio,
JsonFile, JsonFile,
None, None,
@ -1024,14 +951,6 @@ pub enum OnBusyUpdate {
Signal, Signal,
} }
#[derive(Clone, Copy, Debug, Default, ValueEnum)]
pub enum WrapMode {
#[default]
Group,
Session,
None,
}
#[derive(Clone, Copy, Debug, Default, ValueEnum)] #[derive(Clone, Copy, Debug, Default, ValueEnum)]
pub enum ClearMode { pub enum ClearMode {
#[default] #[default]
@ -1076,10 +995,7 @@ impl<const UNITLESS_NANOS_MULTIPLIER: u64> FromStr for TimeSpan<UNITLESS_NANOS_M
s.parse::<u64>() s.parse::<u64>()
.map_or_else( .map_or_else(
|_| humantime::parse_duration(s), |_| humantime::parse_duration(s),
|unitless| { |unitless| Ok(Duration::from_nanos(unitless * UNITLESS_NANOS_MULTIPLIER)),
eprintln!("Warning: unitless time span values are deprecated and will be removed in an upcoming version");
Ok(Duration::from_nanos(unitless * UNITLESS_NANOS_MULTIPLIER))
},
) )
.map(TimeSpan) .map(TimeSpan)
} }
@ -1168,10 +1084,18 @@ fn expand_args_up_to_doubledash() -> Result<Vec<OsString>, std::io::Error> {
} }
#[inline] #[inline]
pub async fn get_args() -> Result<(Args, Option<WorkerGuard>)> { pub fn get_args() -> Args {
let prearg_logs = logging::preargs(); use tracing::{debug, warn};
if prearg_logs {
warn!("⚠ RUST_LOG environment variable set or hardcoded, logging options have no effect"); if std::env::var("RUST_LOG").is_ok() {
warn!("⚠ RUST_LOG environment variable set, logging options have no effect");
}
if let Ok(filt) = std::env::var("WATCHEXEC_FILTERER") {
warn!("WATCHEXEC_FILTERER is deprecated");
if filt == "tagged" {
eprintln!("Tagged filterer has been removed. Open an issue if you have no workaround.");
}
} }
debug!("expanding @argfile arguments if any"); debug!("expanding @argfile arguments if any");
@ -1180,17 +1104,6 @@ pub async fn get_args() -> Result<(Args, Option<WorkerGuard>)> {
debug!("parsing arguments"); debug!("parsing arguments");
let mut args = Args::parse_from(args); let mut args = Args::parse_from(args);
let log_guard = if !prearg_logs {
logging::postargs(&args.logging).await?
} else {
None
};
// https://no-color.org/
if args.color == ColourMode::Auto && std::env::var("NO_COLOR").is_ok() {
args.color = ColourMode::Never;
}
if args.ignore_nothing { if args.ignore_nothing {
args.no_global_ignore = true; args.no_global_ignore = true;
args.no_vcs_ignore = true; args.no_vcs_ignore = true;
@ -1199,22 +1112,22 @@ pub async fn get_args() -> Result<(Args, Option<WorkerGuard>)> {
args.no_discover_ignore = true; args.no_discover_ignore = true;
} }
if args.kill {
args.signal = Some(Signal::ForceStop);
}
if args.signal.is_some() { if args.signal.is_some() {
args.on_busy_update = OnBusyUpdate::Signal; args.on_busy_update = OnBusyUpdate::Signal;
} else if args.restart { } else if args.restart {
args.on_busy_update = OnBusyUpdate::Restart; args.on_busy_update = OnBusyUpdate::Restart;
} else if args.watch_when_idle {
args.on_busy_update = OnBusyUpdate::DoNothing;
} }
if args.no_environment { if args.no_environment {
warn!("--no-environment is deprecated");
args.emit_events_to = EmitEvents::None; args.emit_events_to = EmitEvents::None;
} }
if args.no_process_group {
warn!("--no-process-group is deprecated");
args.wrap_process = WrapMode::None;
}
if args.filter_fs_meta { if args.filter_fs_meta {
args.filter_fs_events = vec![ args.filter_fs_events = vec![
FsEvent::Create, FsEvent::Create,
@ -1237,108 +1150,6 @@ pub async fn get_args() -> Result<(Args, Option<WorkerGuard>)> {
.exit(); .exit();
} }
if args.stdin_quit && args.watch_file == Some(PathBuf::from("-")) { debug!(?args, "got arguments");
Args::command() args
.error(
ErrorKind::InvalidValue,
"stdin-quit cannot be used when --watch-file=-",
)
.exit();
}
let workdir = if let Some(w) = take(&mut args.workdir) {
w
} else {
let curdir = std::env::current_dir().into_diagnostic()?;
canonicalize(curdir).into_diagnostic()?
};
info!(path=?workdir, "effective working directory");
args.workdir = Some(workdir.clone());
let project_origin = if let Some(p) = take(&mut args.project_origin) {
p
} else {
crate::dirs::project_origin(&args).await?
};
info!(path=?project_origin, "effective project origin");
args.project_origin = Some(project_origin.clone());
if let Some(watch_file) = args.watch_file.as_ref() {
if watch_file == Path::new("-") {
let file = tokio::io::stdin();
let mut lines = BufReader::new(file).lines();
while let Ok(Some(line)) = lines.next_line().await {
args.recursive_paths.push(line.into());
}
} else {
let file = File::open(watch_file).await.into_diagnostic()?;
let mut lines = BufReader::new(file).lines();
while let Ok(Some(line)) = lines.next_line().await {
args.recursive_paths.push(line.into());
}
};
}
args.paths = take(&mut args.recursive_paths)
.into_iter()
.map(|path| {
{
if path.is_absolute() {
Ok(path)
} else {
canonicalize(project_origin.join(path)).into_diagnostic()
}
}
.map(WatchedPath::recursive)
})
.chain(take(&mut args.non_recursive_paths).into_iter().map(|path| {
{
if path.is_absolute() {
Ok(path)
} else {
canonicalize(project_origin.join(path)).into_diagnostic()
}
}
.map(WatchedPath::non_recursive)
}))
.collect::<Result<BTreeSet<_>>>()?
.into_iter()
.collect();
if args.paths.len() == 1
&& args
.paths
.first()
.map_or(false, |p| p.as_ref() == Path::new("/dev/null"))
{
info!("only path is /dev/null, not watching anything");
args.paths = Vec::new();
} else if args.paths.is_empty() {
info!("no paths, using current directory");
args.paths.push(args.workdir.clone().unwrap().into());
}
info!(paths=?args.paths, "effective watched paths");
for (n, prog) in args.filter_programs.iter_mut().enumerate() {
if let Some(progpath) = prog.strip_prefix('@') {
trace!(?n, path=?progpath, "reading filter program from file");
let mut progfile = File::open(&progpath).await.into_diagnostic()?;
let mut buf =
String::with_capacity(progfile.metadata().await.into_diagnostic()?.len() as _);
let bytes_read = progfile.read_to_string(&mut buf).await.into_diagnostic()?;
debug!(?n, path=?progpath, %bytes_read, "read filter program from file");
*prog = buf;
}
}
args.filter_programs_parsed = take(&mut args.filter_programs)
.into_iter()
.enumerate()
.map(parse_filter_program)
.collect::<Result<_, _>>()?;
debug_assert!(args.workdir.is_some());
debug_assert!(args.project_origin.is_some());
info!(?args, "got arguments");
Ok((args, log_guard))
} }

View file

@ -1,132 +0,0 @@
use std::{env::var, io::stderr, path::PathBuf};
use clap::{ArgAction, Parser, ValueHint};
use miette::{bail, Result};
use tokio::fs::metadata;
use tracing::{info, warn};
use tracing_appender::{non_blocking, non_blocking::WorkerGuard, rolling};
#[derive(Debug, Clone, Parser)]
pub struct LoggingArgs {
/// Set diagnostic log level
///
/// This enables diagnostic logging, which is useful for investigating bugs or gaining more
/// insight into faulty filters or "missing" events. Use multiple times to increase verbosity.
///
/// Goes up to '-vvvv'. When submitting bug reports, default to a '-vvv' log level.
///
/// You may want to use with '--log-file' to avoid polluting your terminal.
///
/// Setting $RUST_LOG also works, and takes precedence, but is not recommended. However, using
/// $RUST_LOG is the only way to get logs from before these options are parsed.
#[arg(
long,
short,
help_heading = super::OPTSET_DEBUGGING,
action = ArgAction::Count,
default_value = "0",
num_args = 0,
)]
pub verbose: u8,
/// Write diagnostic logs to a file
///
/// This writes diagnostic logs to a file, instead of the terminal, in JSON format. If a log
/// level was not already specified, this will set it to '-vvv'.
///
/// If a path is not provided, the default is the working directory. Note that with
/// '--ignore-nothing', the write events to the log will likely get picked up by Watchexec,
/// causing a loop; prefer setting a path outside of the watched directory.
///
/// If the path provided is a directory, a file will be created in that directory. The file name
/// will be the current date and time, in the format 'watchexec.YYYY-MM-DDTHH-MM-SSZ.log'.
#[arg(
long,
help_heading = super::OPTSET_DEBUGGING,
num_args = 0..=1,
default_missing_value = ".",
value_hint = ValueHint::AnyPath,
value_name = "PATH",
)]
pub log_file: Option<PathBuf>,
}
pub fn preargs() -> bool {
let mut log_on = false;
#[cfg(feature = "dev-console")]
match console_subscriber::try_init() {
Ok(_) => {
warn!("dev-console enabled");
log_on = true;
}
Err(e) => {
eprintln!("Failed to initialise tokio console, falling back to normal logging\n{e}")
}
}
if !log_on && var("RUST_LOG").is_ok() {
match tracing_subscriber::fmt::try_init() {
Ok(()) => {
warn!(RUST_LOG=%var("RUST_LOG").unwrap(), "logging configured from RUST_LOG");
log_on = true;
}
Err(e) => eprintln!("Failed to initialise logging with RUST_LOG, falling back\n{e}"),
}
}
log_on
}
pub async fn postargs(args: &LoggingArgs) -> Result<Option<WorkerGuard>> {
if args.verbose == 0 {
return Ok(None);
}
let (log_writer, guard) = if let Some(file) = &args.log_file {
let is_dir = metadata(&file).await.map_or(false, |info| info.is_dir());
let (dir, filename) = if is_dir {
(
file.to_owned(),
PathBuf::from(format!(
"watchexec.{}.log",
chrono::Utc::now().format("%Y-%m-%dT%H-%M-%SZ")
)),
)
} else if let (Some(parent), Some(file_name)) = (file.parent(), file.file_name()) {
(parent.into(), PathBuf::from(file_name))
} else {
bail!("Failed to determine log file name");
};
non_blocking(rolling::never(dir, filename))
} else {
non_blocking(stderr())
};
let mut builder = tracing_subscriber::fmt().with_env_filter(match args.verbose {
0 => unreachable!("checked by if earlier"),
1 => "warn",
2 => "info",
3 => "debug",
_ => "trace",
});
if args.verbose > 2 {
use tracing_subscriber::fmt::format::FmtSpan;
builder = builder.with_span_events(FmtSpan::NEW | FmtSpan::CLOSE);
}
match if args.log_file.is_some() {
builder.json().with_writer(log_writer).try_init()
} else if args.verbose > 3 {
builder.pretty().with_writer(log_writer).try_init()
} else {
builder.with_writer(log_writer).try_init()
} {
Ok(()) => info!("logging initialised"),
Err(e) => eprintln!("Failed to initialise logging, continuing with none\n{e}"),
}
Ok(Some(guard))
}

View file

@ -1,10 +1,11 @@
use std::{ use std::{
borrow::Cow, borrow::Cow,
collections::HashMap, collections::HashMap,
env::var, env::current_dir,
ffi::{OsStr, OsString}, ffi::{OsStr, OsString},
fs::File, fs::File,
io::{IsTerminal, Write}, io::{IsTerminal, Write},
path::Path,
process::Stdio, process::Stdio,
sync::{ sync::{
atomic::{AtomicBool, AtomicU8, Ordering}, atomic::{AtomicBool, AtomicU8, Ordering},
@ -31,7 +32,7 @@ use watchexec_events::{Event, Keyboard, ProcessEnd, Tag};
use watchexec_signals::Signal; use watchexec_signals::Signal;
use crate::{ use crate::{
args::{Args, ClearMode, ColourMode, EmitEvents, OnBusyUpdate, SignalMapping, WrapMode}, args::{Args, ClearMode, ColourMode, EmitEvents, OnBusyUpdate, SignalMapping},
state::RotatingTempFile, state::RotatingTempFile,
}; };
use crate::{emits::events_to_simple_format, state::State}; use crate::{emits::events_to_simple_format, state::State};
@ -67,7 +68,19 @@ pub fn make_config(args: &Args, state: &State) -> Result<Config> {
eprintln!("[[Error (not fatal)]]\n{}", Report::new(err.error)); eprintln!("[[Error (not fatal)]]\n{}", Report::new(err.error));
}); });
config.pathset(args.paths.clone()); config.pathset(if args.paths.is_empty() {
vec![current_dir().into_diagnostic()?]
} else if args.paths.len() == 1
&& args
.paths
.first()
.map_or(false, |p| p == Path::new("/dev/null"))
{
// special case: /dev/null means "don't start the fs event source"
Vec::new()
} else {
args.paths.clone()
});
config.throttle(args.debounce.0); config.throttle(args.debounce.0);
config.keyboard_events(args.stdin_quit); config.keyboard_events(args.stdin_quit);
@ -101,7 +114,15 @@ pub fn make_config(args: &Args, state: &State) -> Result<Config> {
clearscreen::clear().ok(); clearscreen::clear().ok();
} }
ClearMode::Reset => { ClearMode::Reset => {
reset_screen(); for cs in [
ClearScreen::WindowsCooked,
ClearScreen::WindowsVt,
ClearScreen::VtLeaveAlt,
ClearScreen::VtWellDone,
ClearScreen::default(),
] {
cs.clear().ok();
}
} }
} }
} }
@ -211,54 +232,19 @@ pub fn make_config(args: &Args, state: &State) -> Result<Config> {
if let Some(ref workdir) = workdir.as_ref() { if let Some(ref workdir) = workdir.as_ref() {
debug!(?workdir, "set command workdir"); debug!(?workdir, "set command workdir");
command.command_mut().current_dir(workdir); command.current_dir(workdir);
} }
emit_events_to_command( emit_events_to_command(command, events, emit_file, emit_events_to, add_envs);
command.command_mut(),
events,
emit_file,
emit_events_to,
add_envs,
);
}); });
let show_events = { let show_events = || {
let events = action.events.clone();
move || {
if print_events { if print_events {
trace!("print events to stderr"); trace!("print events to stderr");
for (n, event) in events.iter().enumerate() { for (n, event) in action.events.iter().enumerate() {
eprintln!("[EVENT {n}] {event}"); eprintln!("[EVENT {n}] {event}");
} }
} }
}
};
let clear_screen = {
let events = action.events.clone();
move || {
if let Some(mode) = clear {
match mode {
ClearMode::Clear => {
clearscreen::clear().ok();
debug!("cleared screen");
}
ClearMode::Reset => {
reset_screen();
debug!("hard-reset screen");
}
}
}
// re-show events after clearing
if print_events {
trace!("print events to stderr");
for (n, event) in events.iter().enumerate() {
eprintln!("[EVENT {n}] {event}");
}
}
}
}; };
let quit = |mut action: ActionHandler| { let quit = |mut action: ActionHandler| {
@ -349,6 +335,28 @@ pub fn make_config(args: &Args, state: &State) -> Result<Config> {
return action; return action;
} }
// clear the screen before printing events
if let Some(mode) = clear {
match mode {
ClearMode::Clear => {
clearscreen::clear().ok();
debug!("cleared screen");
}
ClearMode::Reset => {
for cs in [
ClearScreen::WindowsCooked,
ClearScreen::WindowsVt,
ClearScreen::VtLeaveAlt,
ClearScreen::VtWellDone,
ClearScreen::default(),
] {
cs.clear().ok();
}
debug!("hard-reset screen");
}
}
}
show_events(); show_events();
if let Some(delay) = delay_run { if let Some(delay) = delay_run {
@ -382,7 +390,6 @@ pub fn make_config(args: &Args, state: &State) -> Result<Config> {
OnBusyUpdate::Restart if cfg!(windows) => { OnBusyUpdate::Restart if cfg!(windows) => {
job.restart(); job.restart();
job.run(move |context| { job.run(move |context| {
clear_screen();
setup_process( setup_process(
innerjob.clone(), innerjob.clone(),
context.command.clone(), context.command.clone(),
@ -396,7 +403,6 @@ pub fn make_config(args: &Args, state: &State) -> Result<Config> {
stop_timeout, stop_timeout,
); );
job.run(move |context| { job.run(move |context| {
clear_screen();
setup_process( setup_process(
innerjob.clone(), innerjob.clone(),
context.command.clone(), context.command.clone(),
@ -420,7 +426,6 @@ pub fn make_config(args: &Args, state: &State) -> Result<Config> {
trace!("job finished, starting queued"); trace!("job finished, starting queued");
job.start(); job.start();
job.run(move |context| { job.run(move |context| {
clear_screen();
setup_process( setup_process(
innerjob.clone(), innerjob.clone(),
context.command.clone(), context.command.clone(),
@ -439,7 +444,6 @@ pub fn make_config(args: &Args, state: &State) -> Result<Config> {
trace!("job is not running, start it"); trace!("job is not running, start it");
job.start(); job.start();
job.run(move |context| { job.run(move |context| {
clear_screen();
setup_process( setup_process(
innerjob.clone(), innerjob.clone(),
context.command.clone(), context.command.clone(),
@ -467,38 +471,22 @@ fn interpret_command_args(args: &Args) -> Result<Arc<Command>> {
panic!("(clap) Bug: command is not present"); panic!("(clap) Bug: command is not present");
} }
let shell = if args.no_shell { let shell = match if args.no_shell || args.no_shell_long {
None None
} else { } else {
let shell = args.shell.clone().or_else(|| var("SHELL").ok()); args.shell.as_deref().or(Some("default"))
match shell } {
.as_deref()
.or_else(|| {
if cfg!(not(windows)) {
Some("sh")
} else if var("POWERSHELL_DISTRIBUTION_CHANNEL").is_ok()
&& (which::which("pwsh").is_ok() || which::which("pwsh.exe").is_ok())
{
trace!("detected pwsh");
Some("pwsh")
} else if var("PSModulePath").is_ok()
&& (which::which("powershell").is_ok()
|| which::which("powershell.exe").is_ok())
{
trace!("detected powershell");
Some("powershell")
} else {
Some("cmd")
}
})
.or(Some("default"))
{
Some("") => return Err(RuntimeError::CommandShellEmptyShell).into_diagnostic(), Some("") => return Err(RuntimeError::CommandShellEmptyShell).into_diagnostic(),
Some("none") | None => None, Some("none") | None => None,
#[cfg(windows)] #[cfg(windows)]
Some("cmd") | Some("cmd.exe") | Some("CMD") | Some("CMD.EXE") => Some(Shell::cmd()), Some("default") | Some("cmd") | Some("cmd.exe") | Some("CMD") | Some("CMD.EXE") => {
Some(Shell::cmd())
}
#[cfg(not(windows))]
Some("default") => Some(Shell::new("sh")),
Some(other) => { Some(other) => {
let sh = other.split_ascii_whitespace().collect::<Vec<_>>(); let sh = other.split_ascii_whitespace().collect::<Vec<_>>();
@ -513,7 +501,6 @@ fn interpret_command_args(args: &Args) -> Result<Arc<Command>> {
program_option: Some(Cow::Borrowed(OsStr::new("-c"))), program_option: Some(Cow::Borrowed(OsStr::new("-c"))),
}) })
} }
}
}; };
let program = if let Some(shell) = shell { let program = if let Some(shell) = shell {
@ -532,8 +519,7 @@ fn interpret_command_args(args: &Args) -> Result<Arc<Command>> {
Ok(Arc::new(Command { Ok(Arc::new(Command {
program, program,
options: SpawnOptions { options: SpawnOptions {
grouped: matches!(args.wrap_process, WrapMode::Group), grouped: !args.no_process_group,
session: matches!(args.wrap_process, WrapMode::Session),
..Default::default() ..Default::default()
}, },
})) }))
@ -694,15 +680,3 @@ fn emit_events_to_command(
command.stdin(stdin); command.stdin(stdin);
} }
} }
pub(crate) fn reset_screen() {
for cs in [
ClearScreen::WindowsCooked,
ClearScreen::WindowsVt,
ClearScreen::VtLeaveAlt,
ClearScreen::VtWellDone,
ClearScreen::default(),
] {
cs.clear().ok();
}
}

View file

@ -1,188 +1,4 @@
use std::{ mod common;
ffi::OsString, mod globset;
path::{Path, PathBuf, MAIN_SEPARATOR},
sync::Arc,
};
use miette::{IntoDiagnostic, Result}; pub use globset::globset;
use tokio::io::{AsyncBufReadExt, BufReader};
use tracing::{info, trace, trace_span};
use watchexec::{error::RuntimeError, filter::Filterer};
use watchexec_events::{
filekind::{FileEventKind, ModifyKind},
Event, Priority, Tag,
};
use watchexec_filterer_globset::GlobsetFilterer;
use crate::args::{Args, FsEvent};
pub(crate) mod parse;
mod proglib;
mod progs;
mod syncval;
/// A custom filterer that combines the library's Globset filterer and a switch for --no-meta
#[derive(Debug)]
pub struct WatchexecFilterer {
inner: GlobsetFilterer,
fs_events: Vec<FsEvent>,
progs: Option<progs::FilterProgs>,
}
impl Filterer for WatchexecFilterer {
#[tracing::instrument(level = "trace", skip(self))]
fn check_event(&self, event: &Event, priority: Priority) -> Result<bool, RuntimeError> {
for tag in &event.tags {
if let Tag::FileEventKind(fek) = tag {
let normalised = match fek {
FileEventKind::Access(_) => FsEvent::Access,
FileEventKind::Modify(ModifyKind::Name(_)) => FsEvent::Rename,
FileEventKind::Modify(ModifyKind::Metadata(_)) => FsEvent::Metadata,
FileEventKind::Modify(_) => FsEvent::Modify,
FileEventKind::Create(_) => FsEvent::Create,
FileEventKind::Remove(_) => FsEvent::Remove,
_ => continue,
};
trace!(allowed=?self.fs_events, this=?normalised, "check against fs event filter");
if !self.fs_events.contains(&normalised) {
return Ok(false);
}
}
}
trace!("check against original event");
if !self.inner.check_event(event, priority)? {
return Ok(false);
}
if let Some(progs) = &self.progs {
trace!("check against program filters");
if !progs.check(event)? {
return Ok(false);
}
}
Ok(true)
}
}
impl WatchexecFilterer {
/// Create a new filterer from the given arguments
pub async fn new(args: &Args) -> Result<Arc<Self>> {
let project_origin = args.project_origin.clone().unwrap();
let workdir = args.workdir.clone().unwrap();
let ignore_files = if args.no_discover_ignore {
Vec::new()
} else {
let vcs_types = crate::dirs::vcs_types(&project_origin).await;
crate::dirs::ignores(args, &vcs_types).await?
};
let mut ignores = Vec::new();
if !args.no_default_ignore {
ignores.extend([
(format!("**{MAIN_SEPARATOR}.DS_Store"), None),
(String::from("watchexec.*.log"), None),
(String::from("*.py[co]"), None),
(String::from("#*#"), None),
(String::from(".#*"), None),
(String::from(".*.kate-swp"), None),
(String::from(".*.sw?"), None),
(String::from(".*.sw?x"), None),
(format!("**{MAIN_SEPARATOR}.bzr{MAIN_SEPARATOR}**"), None),
(format!("**{MAIN_SEPARATOR}_darcs{MAIN_SEPARATOR}**"), None),
(
format!("**{MAIN_SEPARATOR}.fossil-settings{MAIN_SEPARATOR}**"),
None,
),
(format!("**{MAIN_SEPARATOR}.git{MAIN_SEPARATOR}**"), None),
(format!("**{MAIN_SEPARATOR}.hg{MAIN_SEPARATOR}**"), None),
(format!("**{MAIN_SEPARATOR}.pijul{MAIN_SEPARATOR}**"), None),
(format!("**{MAIN_SEPARATOR}.svn{MAIN_SEPARATOR}**"), None),
]);
}
let whitelist = args
.paths
.iter()
.map(|p| p.into())
.filter(|p: &PathBuf| p.is_file());
let mut filters = args
.filter_patterns
.iter()
.map(|f| (f.to_owned(), Some(workdir.clone())))
.collect::<Vec<_>>();
for filter_file in &args.filter_files {
filters.extend(read_filter_file(filter_file).await?);
}
ignores.extend(
args.ignore_patterns
.iter()
.map(|f| (f.to_owned(), Some(workdir.clone()))),
);
let exts = args
.filter_extensions
.iter()
.map(|e| OsString::from(e.strip_prefix('.').unwrap_or(e)));
info!("initialising Globset filterer");
Ok(Arc::new(Self {
inner: GlobsetFilterer::new(
project_origin,
filters,
ignores,
whitelist,
ignore_files,
exts,
)
.await
.into_diagnostic()?,
fs_events: args.filter_fs_events.clone(),
progs: if args.filter_programs_parsed.is_empty() {
None
} else {
Some(progs::FilterProgs::new(args)?)
},
}))
}
}
async fn read_filter_file(path: &Path) -> Result<Vec<(String, Option<PathBuf>)>> {
let _span = trace_span!("loading filter file", ?path).entered();
let file = tokio::fs::File::open(path).await.into_diagnostic()?;
let metadata_len = file
.metadata()
.await
.map(|m| usize::try_from(m.len()))
.unwrap_or(Ok(0))
.into_diagnostic()?;
let filter_capacity = if metadata_len == 0 {
0
} else {
metadata_len / 20
};
let mut filters = Vec::with_capacity(filter_capacity);
let reader = BufReader::new(file);
let mut lines = reader.lines();
while let Some(line) = lines.next_line().await.into_diagnostic()? {
let line = line.trim();
if line.is_empty() || line.starts_with('#') {
continue;
}
trace!(?line, "adding filter line");
filters.push((line.to_owned(), Some(path.to_owned())));
}
Ok(filters)
}

View file

@ -1,5 +1,7 @@
use std::{ use std::{
borrow::Cow,
collections::HashSet, collections::HashSet,
env,
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
@ -12,7 +14,16 @@ use watchexec::paths::common_prefix;
use crate::args::Args; use crate::args::Args;
pub async fn project_origin(args: &Args) -> Result<PathBuf> { type ProjectOriginPath = PathBuf;
type WorkDirPath = PathBuf;
/// Extract relevant directories (in particular the project origin and work directory)
/// given the command line arguments that were provided
pub async fn dirs(args: &Args) -> Result<(ProjectOriginPath, WorkDirPath)> {
let curdir = env::current_dir().into_diagnostic()?;
let curdir = canonicalize(curdir).await.into_diagnostic()?;
debug!(?curdir, "current directory");
let project_origin = if let Some(origin) = &args.project_origin { let project_origin = if let Some(origin) = &args.project_origin {
debug!(?origin, "project origin override"); debug!(?origin, "project origin override");
canonicalize(origin).await.into_diagnostic()? canonicalize(origin).await.into_diagnostic()?
@ -23,19 +34,27 @@ pub async fn project_origin(args: &Args) -> Result<PathBuf> {
}; };
debug!(?homedir, "home directory"); debug!(?homedir, "home directory");
let homedir_requested = homedir.as_ref().map_or(false, |home| { let mut paths = HashSet::new();
args.paths for path in &args.paths {
.binary_search_by_key(home, |w| PathBuf::from(w.clone())) paths.insert(canonicalize(path).await.into_diagnostic()?);
.is_ok() }
});
let homedir_requested = homedir.as_ref().map_or(false, |home| paths.contains(home));
debug!( debug!(
?homedir_requested, ?homedir_requested,
"resolved whether the homedir is explicitly requested" "resolved whether the homedir is explicitly requested"
); );
if paths.is_empty() {
debug!("no paths, using current directory");
paths.insert(curdir.clone());
}
debug!(?paths, "resolved all watched paths");
let mut origins = HashSet::new(); let mut origins = HashSet::new();
for path in &args.paths { for path in paths {
origins.extend(project_origins::origins(path).await); origins.extend(project_origins::origins(&path).await);
} }
match (homedir, homedir_requested) { match (homedir, homedir_requested) {
@ -48,7 +67,7 @@ pub async fn project_origin(args: &Args) -> Result<PathBuf> {
if origins.is_empty() { if origins.is_empty() {
debug!("no origins, using current directory"); debug!("no origins, using current directory");
origins.insert(args.workdir.clone().unwrap()); origins.insert(curdir.clone());
} }
debug!(?origins, "resolved all project origins"); debug!(?origins, "resolved all project origins");
@ -61,9 +80,12 @@ pub async fn project_origin(args: &Args) -> Result<PathBuf> {
.await .await
.into_diagnostic()? .into_diagnostic()?
}; };
debug!(?project_origin, "resolved common/project origin"); info!(?project_origin, "resolved common/project origin");
Ok(project_origin) let workdir = curdir;
info!(?workdir, "resolved working directory");
Ok((project_origin, workdir))
} }
pub async fn vcs_types(origin: &Path) -> Vec<ProjectType> { pub async fn vcs_types(origin: &Path) -> Vec<ProjectType> {
@ -72,31 +94,38 @@ pub async fn vcs_types(origin: &Path) -> Vec<ProjectType> {
.into_iter() .into_iter()
.filter(|pt| pt.is_vcs()) .filter(|pt| pt.is_vcs())
.collect::<Vec<_>>(); .collect::<Vec<_>>();
info!(?vcs_types, "effective vcs types"); info!(?vcs_types, "resolved vcs types");
vcs_types vcs_types
} }
pub async fn ignores(args: &Args, vcs_types: &[ProjectType]) -> Result<Vec<IgnoreFile>> { pub async fn ignores(
let origin = args.project_origin.clone().unwrap(); args: &Args,
vcs_types: &[ProjectType],
origin: &Path,
) -> Result<Vec<IgnoreFile>> {
fn higher_make_absolute_if_needed<'a>(
origin: &'a Path,
) -> impl 'a + Fn(&'a PathBuf) -> Cow<'a, Path> {
|path| {
if path.is_absolute() {
Cow::Borrowed(path)
} else {
Cow::Owned(origin.join(path))
}
}
}
let mut skip_git_global_excludes = false; let mut skip_git_global_excludes = false;
let mut ignores = if args.no_project_ignore { let mut ignores = if args.no_project_ignore {
Vec::new() Vec::new()
} else { } else {
let ignore_files = args.ignore_files.iter().map(|path| { let make_absolute_if_needed = higher_make_absolute_if_needed(origin);
if path.is_absolute() { let include_paths = args.paths.iter().map(&make_absolute_if_needed);
path.into() let ignore_files = args.ignore_files.iter().map(&make_absolute_if_needed);
} else {
origin.join(path)
}
});
let (mut ignores, errors) = ignore_files::from_origin( let (mut ignores, errors) = ignore_files::from_origin(
IgnoreFilesFromOriginArgs::new_unchecked( IgnoreFilesFromOriginArgs::new_unchecked(origin, include_paths, ignore_files)
&origin,
args.paths.iter().map(PathBuf::from),
ignore_files,
)
.canonicalise() .canonicalise()
.await .await
.into_diagnostic()?, .into_diagnostic()?,
@ -192,7 +221,7 @@ pub async fn ignores(args: &Args, vcs_types: &[ProjectType]) -> Result<Vec<Ignor
.filter(|ig| { .filter(|ig| {
!ig.applies_in !ig.applies_in
.as_ref() .as_ref()
.map_or(false, |p| p.starts_with(&origin)) .map_or(false, |p| p.starts_with(origin))
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
debug!( debug!(

View file

@ -0,0 +1,151 @@
use std::{
ffi::OsString,
path::{Path, PathBuf, MAIN_SEPARATOR},
sync::Arc,
};
use miette::{IntoDiagnostic, Result};
use tokio::io::{AsyncBufReadExt, BufReader};
use tracing::{info, trace, trace_span};
use watchexec::{error::RuntimeError, filter::Filterer};
use watchexec_events::{
filekind::{FileEventKind, ModifyKind},
Event, Priority, Tag,
};
use watchexec_filterer_globset::GlobsetFilterer;
use crate::args::{Args, FsEvent};
pub async fn globset(args: &Args) -> Result<Arc<WatchexecFilterer>> {
let (project_origin, workdir) = super::common::dirs(args).await?;
let ignore_files = if args.no_discover_ignore {
Vec::new()
} else {
let vcs_types = super::common::vcs_types(&project_origin).await;
super::common::ignores(args, &vcs_types, &project_origin).await?
};
let mut ignores = Vec::new();
if !args.no_default_ignore {
ignores.extend([
(format!("**{MAIN_SEPARATOR}.DS_Store"), None),
(String::from("watchexec.*.log"), None),
(String::from("*.py[co]"), None),
(String::from("#*#"), None),
(String::from(".#*"), None),
(String::from(".*.kate-swp"), None),
(String::from(".*.sw?"), None),
(String::from(".*.sw?x"), None),
(format!("**{MAIN_SEPARATOR}.bzr{MAIN_SEPARATOR}**"), None),
(format!("**{MAIN_SEPARATOR}_darcs{MAIN_SEPARATOR}**"), None),
(
format!("**{MAIN_SEPARATOR}.fossil-settings{MAIN_SEPARATOR}**"),
None,
),
(format!("**{MAIN_SEPARATOR}.git{MAIN_SEPARATOR}**"), None),
(format!("**{MAIN_SEPARATOR}.hg{MAIN_SEPARATOR}**"), None),
(format!("**{MAIN_SEPARATOR}.pijul{MAIN_SEPARATOR}**"), None),
(format!("**{MAIN_SEPARATOR}.svn{MAIN_SEPARATOR}**"), None),
]);
}
let mut filters = args
.filter_patterns
.iter()
.map(|f| (f.to_owned(), Some(workdir.clone())))
.collect::<Vec<_>>();
for filter_file in &args.filter_files {
filters.extend(read_filter_file(filter_file).await?);
}
ignores.extend(
args.ignore_patterns
.iter()
.map(|f| (f.to_owned(), Some(workdir.clone()))),
);
let exts = args
.filter_extensions
.iter()
.map(|e| OsString::from(e.strip_prefix('.').unwrap_or(e)));
info!("initialising Globset filterer");
Ok(Arc::new(WatchexecFilterer {
inner: GlobsetFilterer::new(project_origin, filters, ignores, ignore_files, exts)
.await
.into_diagnostic()?,
fs_events: args.filter_fs_events.clone(),
}))
}
async fn read_filter_file(path: &Path) -> Result<Vec<(String, Option<PathBuf>)>> {
let _span = trace_span!("loading filter file", ?path).entered();
let file = tokio::fs::File::open(path).await.into_diagnostic()?;
let metadata_len = file
.metadata()
.await
.map(|m| usize::try_from(m.len()))
.unwrap_or(Ok(0))
.into_diagnostic()?;
let filter_capacity = if metadata_len == 0 {
0
} else {
metadata_len / 20
};
let mut filters = Vec::with_capacity(filter_capacity);
let reader = BufReader::new(file);
let mut lines = reader.lines();
while let Some(line) = lines.next_line().await.into_diagnostic()? {
let line = line.trim();
if line.is_empty() || line.starts_with('#') {
continue;
}
trace!(?line, "adding filter line");
filters.push((line.to_owned(), Some(path.to_owned())));
}
Ok(filters)
}
/// A custom filterer that combines the library's Globset filterer and a switch for --no-meta
#[derive(Debug)]
pub struct WatchexecFilterer {
inner: GlobsetFilterer,
fs_events: Vec<FsEvent>,
}
impl Filterer for WatchexecFilterer {
fn check_event(&self, event: &Event, priority: Priority) -> Result<bool, RuntimeError> {
for tag in &event.tags {
if let Tag::FileEventKind(fek) = tag {
let normalised = match fek {
FileEventKind::Access(_) => FsEvent::Access,
FileEventKind::Modify(ModifyKind::Name(_)) => FsEvent::Rename,
FileEventKind::Modify(ModifyKind::Metadata(_)) => FsEvent::Metadata,
FileEventKind::Modify(_) => FsEvent::Modify,
FileEventKind::Create(_) => FsEvent::Create,
FileEventKind::Remove(_) => FsEvent::Remove,
_ => continue,
};
if !self.fs_events.contains(&normalised) {
return Ok(false);
}
}
}
trace!("check against original event");
if !self.inner.check_event(event, priority)? {
return Ok(false);
}
Ok(true)
}
}

View file

@ -1,17 +0,0 @@
use miette::{miette, Result};
pub fn parse_filter_program((n, prog): (usize, String)) -> Result<jaq_syn::Main> {
let parser = jaq_parse::main();
let (main, errs) = jaq_parse::parse(&prog, parser);
if !errs.is_empty() {
let errs = errs
.into_iter()
.map(|err| err.to_string())
.collect::<Vec<_>>()
.join("\n");
return Err(miette!("{}", errs).wrap_err(format!("failed to load filter program #{}", n)));
}
main.ok_or_else(|| miette!("failed to load filter program #{} (no reason given)", n))
}

View file

@ -1,27 +0,0 @@
use jaq_interpret::ParseCtx;
use miette::Result;
use tracing::debug;
mod file;
mod hash;
mod kv;
mod macros;
mod output;
pub fn jaq_lib() -> Result<ParseCtx> {
let mut jaq = ParseCtx::new(Vec::new());
debug!("loading jaq core library");
jaq.insert_natives(jaq_core::core());
debug!("loading jaq std library");
jaq.insert_defs(jaq_std::std());
debug!("loading jaq watchexec library");
file::load(&mut jaq);
hash::load(&mut jaq);
kv::load(&mut jaq);
output::load(&mut jaq);
Ok(jaq)
}

View file

@ -1,173 +0,0 @@
use std::{
fs::{metadata, File, FileType, Metadata},
io::{BufReader, Read},
iter::once,
time::{SystemTime, UNIX_EPOCH},
};
use jaq_interpret::{Error, Native, ParseCtx, Val};
use serde_json::{json, Value};
use tracing::{debug, error, trace};
use super::macros::*;
pub fn load(jaq: &mut ParseCtx) {
trace!("jaq: add file_read filter");
jaq.insert_native(
"file_read".into(),
1,
Native::new({
move |args, (ctx, val)| {
let path = match &val {
Val::Str(v) => v.to_string(),
_ => return_err!(Err(Error::str("expected string (path) but got {val:?}"))),
};
let bytes = match int_arg!(args, 0, ctx, &val) {
Ok(v) => v,
Err(e) => return_err!(Err(e)),
};
Box::new(once(Ok(match File::open(&path) {
Ok(file) => {
let buf_reader = BufReader::new(file);
let mut limited = buf_reader.take(bytes);
let mut buffer = String::with_capacity(bytes as _);
match limited.read_to_string(&mut buffer) {
Ok(read) => {
debug!("jaq: read {read} bytes from {path:?}");
Val::Str(buffer.into())
}
Err(err) => {
error!("jaq: failed to read from {path:?}: {err:?}");
Val::Null
}
}
}
Err(err) => {
error!("jaq: failed to open file {path:?}: {err:?}");
Val::Null
}
})))
}
}),
);
trace!("jaq: add file_meta filter");
jaq.insert_native(
"file_meta".into(),
0,
Native::new({
move |_, (_, val)| {
let path = match &val {
Val::Str(v) => v.to_string(),
_ => return_err!(Err(Error::str("expected string (path) but got {val:?}"))),
};
Box::new(once(Ok(match metadata(&path) {
Ok(meta) => Val::from(json_meta(meta)),
Err(err) => {
error!("jaq: failed to open {path:?}: {err:?}");
Val::Null
}
})))
}
}),
);
trace!("jaq: add file_size filter");
jaq.insert_native(
"file_size".into(),
0,
Native::new({
move |_, (_, val)| {
let path = match &val {
Val::Str(v) => v.to_string(),
_ => return_err!(Err(Error::str("expected string (path) but got {val:?}"))),
};
Box::new(once(Ok(match metadata(&path) {
Ok(meta) => Val::Int(meta.len() as _),
Err(err) => {
error!("jaq: failed to open {path:?}: {err:?}");
Val::Null
}
})))
}
}),
);
}
fn json_meta(meta: Metadata) -> Value {
let perms = meta.permissions();
let mut val = json!({
"type": filetype_str(meta.file_type()),
"size": meta.len(),
"modified": fs_time(meta.modified()),
"accessed": fs_time(meta.accessed()),
"created": fs_time(meta.created()),
"dir": meta.is_dir(),
"file": meta.is_file(),
"symlink": meta.is_symlink(),
"readonly": perms.readonly(),
});
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let map = val.as_object_mut().unwrap();
map.insert(
"mode".to_string(),
Value::String(format!("{:o}", perms.mode())),
);
map.insert("mode_byte".to_string(), Value::from(perms.mode()));
map.insert(
"executable".to_string(),
Value::Bool(perms.mode() & 0o111 != 0),
);
}
val
}
fn filetype_str(filetype: FileType) -> &'static str {
#[cfg(unix)]
{
use std::os::unix::fs::FileTypeExt;
if filetype.is_char_device() {
return "char";
} else if filetype.is_block_device() {
return "block";
} else if filetype.is_fifo() {
return "fifo";
} else if filetype.is_socket() {
return "socket";
}
}
#[cfg(windows)]
{
use std::os::windows::fs::FileTypeExt;
if filetype.is_symlink_dir() {
return "symdir";
} else if filetype.is_symlink_file() {
return "symfile";
}
}
if filetype.is_dir() {
"dir"
} else if filetype.is_file() {
"file"
} else if filetype.is_symlink() {
"symlink"
} else {
"unknown"
}
}
fn fs_time(time: std::io::Result<SystemTime>) -> Option<u64> {
time.ok()
.and_then(|time| time.duration_since(UNIX_EPOCH).ok())
.map(|dur| dur.as_secs())
}

View file

@ -1,62 +0,0 @@
use std::{fs::File, io::Read, iter::once};
use jaq_interpret::{Error, Native, ParseCtx, Val};
use tracing::{debug, error, trace};
use super::macros::*;
pub fn load(jaq: &mut ParseCtx) {
trace!("jaq: add hash filter");
jaq.insert_native(
"hash".into(),
0,
Native::new({
move |_, (_, val)| {
let string = match &val {
Val::Str(v) => v.to_string(),
_ => return_err!(Err(Error::str("expected string but got {val:?}"))),
};
Box::new(once(Ok(Val::Str(
blake3::hash(string.as_bytes()).to_hex().to_string().into(),
))))
}
}),
);
trace!("jaq: add file_hash filter");
jaq.insert_native(
"file_hash".into(),
0,
Native::new({
move |_, (_, val)| {
let path = match &val {
Val::Str(v) => v.to_string(),
_ => return_err!(Err(Error::str("expected string but got {val:?}"))),
};
Box::new(once(Ok(match File::open(&path) {
Ok(mut file) => {
const BUFFER_SIZE: usize = 1024 * 1024;
let mut hasher = blake3::Hasher::new();
let mut buf = vec![0; BUFFER_SIZE];
while let Ok(bytes) = file.read(&mut buf) {
debug!("jaq: read {bytes} bytes from {path:?}");
if bytes == 0 {
break;
}
hasher.update(&buf[..bytes]);
buf = vec![0; BUFFER_SIZE];
}
Val::Str(hasher.finalize().to_hex().to_string().into())
}
Err(err) => {
error!("jaq: failed to open file {path:?}: {err:?}");
Val::Null
}
})))
}
}),
);
}

View file

@ -1,69 +0,0 @@
use std::{iter::once, sync::Arc};
use dashmap::DashMap;
use jaq_interpret::{Error, Native, ParseCtx, Val};
use once_cell::sync::OnceCell;
use tracing::trace;
use crate::filterer::syncval::SyncVal;
use super::macros::*;
type KvStore = Arc<DashMap<String, SyncVal>>;
fn kv_store() -> KvStore {
static KV_STORE: OnceCell<KvStore> = OnceCell::new();
KV_STORE.get_or_init(|| KvStore::default()).clone()
}
pub fn load(jaq: &mut ParseCtx) {
trace!("jaq: add kv_clear filter");
jaq.insert_native(
"kv_clear".into(),
0,
Native::new({
move |_, (_, val)| {
let kv = kv_store();
kv.clear();
Box::new(once(Ok(val)))
}
}),
);
trace!("jaq: add kv_store filter");
jaq.insert_native(
"kv_store".into(),
1,
Native::new({
move |args, (ctx, val)| {
let kv = kv_store();
let key = match string_arg!(args, 0, ctx, val) {
Ok(v) => v,
Err(e) => return_err!(Err(e)),
};
kv.insert(key, (&val).into());
Box::new(once(Ok(val)))
}
}),
);
trace!("jaq: add kv_fetch filter");
jaq.insert_native(
"kv_fetch".into(),
1,
Native::new({
move |args, (ctx, val)| {
let kv = kv_store();
let key = match string_arg!(args, 0, ctx, val) {
Ok(v) => v,
Err(e) => return_err!(Err(e)),
};
Box::new(once(Ok(kv
.get(&key)
.map(|val| val.value().into())
.unwrap_or(Val::Null))))
}
}),
);
}

View file

@ -1,30 +0,0 @@
macro_rules! return_err {
($err:expr) => {
return Box::new(once($err))
};
}
pub(crate) use return_err;
macro_rules! string_arg {
($args:expr, $n:expr, $ctx:expr, $val:expr) => {
match ::jaq_interpret::FilterT::run($args.get($n), ($ctx.clone(), $val.clone())).next() {
Some(Ok(Val::Str(v))) => Ok(v.to_string()),
Some(Ok(val)) => Err(Error::str(format!("expected string but got {val:?}"))),
Some(Err(e)) => Err(e),
None => Err(Error::str("value expected but none found")),
}
};
}
pub(crate) use string_arg;
macro_rules! int_arg {
($args:expr, $n:expr, $ctx:expr, $val:expr) => {
match ::jaq_interpret::FilterT::run($args.get($n), ($ctx.clone(), $val.clone())).next() {
Some(Ok(Val::Int(v))) => Ok(v as _),
Some(Ok(val)) => Err(Error::str(format!("expected int but got {val:?}"))),
Some(Err(e)) => Err(e),
None => Err(Error::str("value expected but none found")),
}
};
}
pub(crate) use int_arg;

View file

@ -1,83 +0,0 @@
use std::iter::once;
use jaq_interpret::{Error, Native, ParseCtx, Val};
use tracing::{debug, error, info, trace, warn};
use super::macros::*;
macro_rules! log_action {
($level:expr, $val:expr) => {
match $level.to_ascii_lowercase().as_str() {
"trace" => trace!("jaq: {}", $val),
"debug" => debug!("jaq: {}", $val),
"info" => info!("jaq: {}", $val),
"warn" => warn!("jaq: {}", $val),
"error" => error!("jaq: {}", $val),
_ => return_err!(Err(Error::str("invalid log level"))),
}
};
}
pub fn load(jaq: &mut ParseCtx) {
trace!("jaq: add log filter");
jaq.insert_native(
"log".into(),
1,
Native::with_update(
|args, (ctx, val)| {
let level = match string_arg!(args, 0, ctx, val) {
Ok(v) => v,
Err(e) => return_err!(Err(e)),
};
log_action!(level, val);
// passthrough
Box::new(once(Ok(val)))
},
|args, (ctx, val), _| {
let level = match string_arg!(args, 0, ctx, val) {
Ok(v) => v,
Err(e) => return_err!(Err(e)),
};
log_action!(level, val);
// passthrough
Box::new(once(Ok(val)))
},
),
);
trace!("jaq: add printout filter");
jaq.insert_native(
"printout".into(),
0,
Native::with_update(
|_, (_, val)| {
println!("{}", val);
Box::new(once(Ok(val)))
},
|_, (_, val), _| {
println!("{}", val);
Box::new(once(Ok(val)))
},
),
);
trace!("jaq: add printerr filter");
jaq.insert_native(
"printerr".into(),
0,
Native::with_update(
|_, (_, val)| {
eprintln!("{}", val);
Box::new(once(Ok(val)))
},
|_, (_, val), _| {
eprintln!("{}", val);
Box::new(once(Ok(val)))
},
),
);
}

View file

@ -1,143 +0,0 @@
use std::{iter::empty, marker::PhantomData};
use jaq_interpret::{Ctx, FilterT, RcIter, Val};
use miette::miette;
use tokio::{
sync::{mpsc, oneshot},
task::{block_in_place, spawn_blocking},
};
use tracing::{error, trace, warn};
use watchexec::error::RuntimeError;
use watchexec_events::Event;
use crate::args::Args;
const BUFFER: usize = 128;
#[derive(Debug)]
pub struct FilterProgs {
channel: Requester<Event, bool>,
}
#[derive(Debug, Clone)]
pub struct Requester<S, R> {
sender: mpsc::Sender<(S, oneshot::Sender<R>)>,
_receiver: PhantomData<R>,
}
impl<S, R> Requester<S, R>
where
S: Send + Sync,
R: Send + Sync,
{
pub fn new(capacity: usize) -> (Self, mpsc::Receiver<(S, oneshot::Sender<R>)>) {
let (sender, receiver) = mpsc::channel(capacity);
(
Self {
sender,
_receiver: PhantomData,
},
receiver,
)
}
pub fn call(&self, value: S) -> Result<R, RuntimeError> {
// FIXME: this should really be async with a timeout, but that needs filtering in general
// to be async, which should be done at some point
block_in_place(|| {
let (sender, receiver) = oneshot::channel();
self.sender.blocking_send((value, sender)).map_err(|err| {
RuntimeError::External(miette!("filter progs internal channel: {}", err).into())
})?;
receiver
.blocking_recv()
.map_err(|err| RuntimeError::External(Box::new(err)))
})
}
}
impl FilterProgs {
pub fn check(&self, event: &Event) -> Result<bool, RuntimeError> {
self.channel.call(event.clone())
}
pub fn new(args: &Args) -> miette::Result<Self> {
let progs = args.filter_programs_parsed.clone();
eprintln!(
"EXPERIMENTAL: filter programs are unstable and may change/vanish without notice"
);
let (requester, mut receiver) = Requester::<Event, bool>::new(BUFFER);
let task =
spawn_blocking(move || {
'chan: while let Some((event, sender)) = receiver.blocking_recv() {
let val = serde_json::to_value(&event)
.map_err(|err| miette!("failed to serialize event: {}", err))
.map(Val::from)?;
for (n, prog) in progs.iter().enumerate() {
trace!(?n, "trying filter program");
let mut jaq = super::proglib::jaq_lib()?;
let filter = jaq.compile(prog.clone());
if !jaq.errs.is_empty() {
for (error, span) in jaq.errs {
error!(%error, "failed to compile filter program #{n}@{}:{}", span.start, span.end);
}
continue;
}
let inputs = RcIter::new(empty());
let mut results = filter.run((Ctx::new([], &inputs), val.clone()));
if let Some(res) = results.next() {
match res {
Ok(Val::Bool(false)) => {
trace!(
?n,
verdict = false,
"filter program finished; fail so stopping there"
);
sender
.send(false)
.unwrap_or_else(|_| warn!("failed to send filter result"));
continue 'chan;
}
Ok(Val::Bool(true)) => {
trace!(
?n,
verdict = true,
"filter program finished; pass so trying next"
);
continue;
}
Ok(val) => {
error!(?n, ?val, "filter program returned non-boolean, ignoring and trying next");
continue;
}
Err(err) => {
error!(?n, error=%err, "filter program failed, so trying next");
continue;
}
}
}
}
trace!("all filters failed, sending pass as default");
sender
.send(true)
.unwrap_or_else(|_| warn!("failed to send filter result"));
}
Ok(()) as miette::Result<()>
});
tokio::spawn(async {
match task.await {
Ok(Ok(())) => {}
Ok(Err(err)) => error!("filter progs task failed: {}", err),
Err(err) => error!("filter progs task panicked: {}", err),
}
});
Ok(Self { channel: requester })
}
}

View file

@ -1,71 +0,0 @@
/// Jaq's [Val](jaq_interpret::Val) uses Rc, but we want to use in Sync contexts. UGH!
use std::{rc::Rc, sync::Arc};
use indexmap::IndexMap;
use jaq_interpret::Val;
#[derive(Clone, Debug)]
pub enum SyncVal {
Null,
Bool(bool),
Int(isize),
Float(f64),
Num(Arc<str>),
Str(Arc<str>),
Arr(Arc<[SyncVal]>),
Obj(Arc<IndexMap<Arc<str>, SyncVal>>),
}
impl From<&Val> for SyncVal {
fn from(val: &Val) -> Self {
match val {
Val::Null => Self::Null,
Val::Bool(b) => Self::Bool(*b),
Val::Int(i) => Self::Int(*i),
Val::Float(f) => Self::Float(*f),
Val::Num(s) => Self::Num(s.to_string().into()),
Val::Str(s) => Self::Str(s.to_string().into()),
Val::Arr(a) => Self::Arr({
let mut arr = Vec::with_capacity(a.len());
for v in a.iter() {
arr.push(v.into());
}
arr.into()
}),
Val::Obj(m) => Self::Obj(Arc::new({
let mut map = IndexMap::new();
for (k, v) in m.iter() {
map.insert(k.to_string().into(), v.into());
}
map
})),
}
}
}
impl From<&SyncVal> for Val {
fn from(val: &SyncVal) -> Self {
match val {
SyncVal::Null => Self::Null,
SyncVal::Bool(b) => Self::Bool(*b),
SyncVal::Int(i) => Self::Int(*i),
SyncVal::Float(f) => Self::Float(*f),
SyncVal::Num(s) => Self::Num(s.to_string().into()),
SyncVal::Str(s) => Self::Str(s.to_string().into()),
SyncVal::Arr(a) => Self::Arr({
let mut arr = Vec::with_capacity(a.len());
for v in a.iter() {
arr.push(v.into());
}
arr.into()
}),
SyncVal::Obj(m) => Self::Obj(Rc::new({
let mut map: IndexMap<_, _, ahash::RandomState> = Default::default();
for (k, v) in m.iter() {
map.insert(k.to_string().into(), v.into());
}
map
})),
}
}
}

View file

@ -1,7 +1,7 @@
#![deny(rust_2018_idioms)] #![deny(rust_2018_idioms)]
#![allow(clippy::missing_const_for_fn, clippy::future_not_send)] #![allow(clippy::missing_const_for_fn, clippy::future_not_send)]
use std::{io::Write, process::Stdio}; use std::{env::var, fs::File, io::Write, process::Stdio, sync::Mutex};
use args::{Args, ShellCompletion}; use args::{Args, ShellCompletion};
use clap::CommandFactory; use clap::CommandFactory;
@ -9,26 +9,99 @@ use clap_complete::{Generator, Shell};
use clap_mangen::Man; use clap_mangen::Man;
use is_terminal::IsTerminal; use is_terminal::IsTerminal;
use miette::{IntoDiagnostic, Result}; use miette::{IntoDiagnostic, Result};
use tokio::{io::AsyncWriteExt, process::Command}; use tokio::{fs::metadata, io::AsyncWriteExt, process::Command};
use tracing::{debug, info}; use tracing::{debug, info, warn};
use watchexec::Watchexec; use watchexec::Watchexec;
use watchexec_events::{Event, Priority}; use watchexec_events::{Event, Priority};
use crate::filterer::WatchexecFilterer;
pub mod args; pub mod args;
mod config; mod config;
mod dirs;
mod emits; mod emits;
mod filterer; mod filterer;
mod state; mod state;
async fn init() -> Result<Args> {
let mut log_on = false;
#[cfg(feature = "dev-console")]
match console_subscriber::try_init() {
Ok(_) => {
warn!("dev-console enabled");
log_on = true;
}
Err(e) => {
eprintln!("Failed to initialise tokio console, falling back to normal logging\n{e}")
}
}
if !log_on && var("RUST_LOG").is_ok() {
match tracing_subscriber::fmt::try_init() {
Ok(()) => {
warn!(RUST_LOG=%var("RUST_LOG").unwrap(), "logging configured from RUST_LOG");
log_on = true;
}
Err(e) => eprintln!("Failed to initialise logging with RUST_LOG, falling back\n{e}"),
}
}
let args = args::get_args();
let verbosity = args.verbose.unwrap_or(0);
if log_on {
warn!("ignoring logging options from args");
} else if verbosity > 0 {
let log_file = if let Some(file) = &args.log_file {
let is_dir = metadata(&file).await.map_or(false, |info| info.is_dir());
let path = if is_dir {
let filename = format!(
"watchexec.{}.log",
chrono::Utc::now().format("%Y-%m-%dT%H-%M-%SZ")
);
file.join(filename)
} else {
file.to_owned()
};
// TODO: use tracing-appender instead
Some(File::create(path).into_diagnostic()?)
} else {
None
};
let mut builder = tracing_subscriber::fmt().with_env_filter(match verbosity {
0 => unreachable!("checked by if earlier"),
1 => "warn",
2 => "info",
3 => "debug",
_ => "trace",
});
if verbosity > 2 {
use tracing_subscriber::fmt::format::FmtSpan;
builder = builder.with_span_events(FmtSpan::NEW | FmtSpan::CLOSE);
}
match if let Some(writer) = log_file {
builder.json().with_writer(Mutex::new(writer)).try_init()
} else if verbosity > 3 {
builder.pretty().try_init()
} else {
builder.try_init()
} {
Ok(()) => info!("logging initialised"),
Err(e) => eprintln!("Failed to initialise logging, continuing with none\n{e}"),
}
}
Ok(args)
}
async fn run_watchexec(args: Args) -> Result<()> { async fn run_watchexec(args: Args) -> Result<()> {
info!(version=%env!("CARGO_PKG_VERSION"), "constructing Watchexec from CLI"); info!(version=%env!("CARGO_PKG_VERSION"), "constructing Watchexec from CLI");
let state = state::State::default(); let state = state::State::new()?;
let config = config::make_config(&args, &state)?; let config = config::make_config(&args, &state)?;
config.filterer(WatchexecFilterer::new(&args).await?); config.filterer(filterer::globset(&args).await?);
info!("initialising Watchexec runtime"); info!("initialising Watchexec runtime");
let wx = Watchexec::with_config(config)?; let wx = Watchexec::with_config(config)?;
@ -40,11 +113,6 @@ async fn run_watchexec(args: Args) -> Result<()> {
info!("running main loop"); info!("running main loop");
wx.main().await.into_diagnostic()??; wx.main().await.into_diagnostic()??;
if matches!(args.screen_clear, Some(args::ClearMode::Reset)) {
config::reset_screen();
}
info!("done with main loop"); info!("done with main loop");
Ok(()) Ok(())
@ -116,7 +184,8 @@ async fn run_completions(shell: ShellCompletion) -> Result<()> {
} }
pub async fn run() -> Result<()> { pub async fn run() -> Result<()> {
let (args, _log_guard) = args::get_args().await?; let args = init().await?;
debug!(?args, "arguments");
if args.manual { if args.manual {
run_manpage(args).await run_manpage(args).await

View file

@ -1,5 +1,4 @@
use std::{ use std::{
env::var_os,
io::Write, io::Write,
path::PathBuf, path::PathBuf,
sync::{Arc, Mutex}, sync::{Arc, Mutex},
@ -8,41 +7,38 @@ use std::{
use miette::{IntoDiagnostic, Result}; use miette::{IntoDiagnostic, Result};
use tempfile::NamedTempFile; use tempfile::NamedTempFile;
#[derive(Clone, Debug, Default)] #[derive(Clone, Debug)]
pub struct State { pub struct State {
pub emit_file: RotatingTempFile, pub emit_file: RotatingTempFile,
} }
#[derive(Clone, Debug, Default)] impl State {
pub struct RotatingTempFile(Arc<Mutex<Option<NamedTempFile>>>); pub fn new() -> Result<Self> {
let emit_file = RotatingTempFile::new()?;
Ok(Self { emit_file })
}
}
#[derive(Clone, Debug)]
pub struct RotatingTempFile(Arc<Mutex<NamedTempFile>>);
impl RotatingTempFile { impl RotatingTempFile {
pub fn new() -> Result<Self> {
let file = Arc::new(Mutex::new(NamedTempFile::new().into_diagnostic()?));
Ok(Self(file))
}
pub fn rotate(&self) -> Result<()> { pub fn rotate(&self) -> Result<()> {
// implicitly drops the old file // implicitly drops the old file
*self.0.lock().unwrap() = Some( *self.0.lock().unwrap() = NamedTempFile::new().into_diagnostic()?;
if let Some(dir) = var_os("WATCHEXEC_TMPDIR") {
NamedTempFile::new_in(dir)
} else {
NamedTempFile::new()
}
.into_diagnostic()?,
);
Ok(()) Ok(())
} }
pub fn write(&self, data: &[u8]) -> Result<()> { pub fn write(&self, data: &[u8]) -> Result<()> {
if let Some(file) = self.0.lock().unwrap().as_mut() { self.0.lock().unwrap().write_all(data).into_diagnostic()
file.write_all(data).into_diagnostic()?;
}
Ok(())
} }
pub fn path(&self) -> PathBuf { pub fn path(&self) -> PathBuf {
if let Some(file) = self.0.lock().unwrap().as_ref() { self.0.lock().unwrap().path().to_owned()
file.path().to_owned()
} else {
PathBuf::new()
}
} }
} }

View file

@ -3,7 +3,7 @@
<assemblyIdentity <assemblyIdentity
type="win32" type="win32"
name="Watchexec.Cli.watchexec" name="Watchexec.Cli.watchexec"
version="2.2.0.0" version="1.25.0.0"
/> />
<trustInfo> <trustInfo>

View file

@ -2,14 +2,6 @@
## Next (YYYY-MM-DD) ## Next (YYYY-MM-DD)
## v4.0.0 (2024-10-14)
- Deps: nix 0.29
## v3.0.0 (2024-04-20)
- Deps: nix 0.28
## v2.0.1 (2023-11-29) ## v2.0.1 (2023-11-29)
- Add `ProcessEnd::into_exitstatus` testing-only utility method. - Add `ProcessEnd::into_exitstatus` testing-only utility method.

View file

@ -1,6 +1,6 @@
[package] [package]
name = "watchexec-events" name = "watchexec-events"
version = "4.0.0" version = "2.0.1"
authors = ["Félix Saparelli <felix@passcod.name>"] authors = ["Félix Saparelli <felix@passcod.name>"]
license = "Apache-2.0 OR MIT" license = "Apache-2.0 OR MIT"
@ -24,16 +24,17 @@ optional = true
features = ["derive"] features = ["derive"]
[dependencies.watchexec-signals] [dependencies.watchexec-signals]
version = "4.0.0" version = "2.1.0"
path = "../signals" path = "../signals"
default-features = false default-features = false
[target.'cfg(unix)'.dependencies.nix] [target.'cfg(unix)'.dependencies.nix]
version = "0.29.0" version = "0.27.1"
features = ["signal"] features = ["signal"]
[dev-dependencies] [dev-dependencies]
snapbox = "0.6.18" watchexec-events = { version = "*", features = ["serde"], path = "." }
snapbox = "0.4.11"
serde_json = "1.0.107" serde_json = "1.0.107"
[features] [features]

View file

@ -1,5 +1,5 @@
pre-release-commit-message = "release: events v{{version}}" pre-release-commit-message = "release: events v{{version}}"
tag-prefix = "watchexec-events-" tag-prefix = "events-"
tag-message = "watchexec-events {{version}}" tag-message = "watchexec-events {{version}}"
[[pre-release-replacements]] [[pre-release-replacements]]

View file

@ -1,8 +1,6 @@
#![cfg(feature = "serde")]
use std::num::{NonZeroI32, NonZeroI64}; use std::num::{NonZeroI32, NonZeroI64};
use snapbox::{assert_data_eq, file}; use snapbox::assert_eq_path;
use watchexec_events::{ use watchexec_events::{
filekind::{CreateKind, FileEventKind as EventKind, ModifyKind, RemoveKind, RenameMode}, filekind::{CreateKind, FileEventKind as EventKind, ModifyKind, RemoveKind, RenameMode},
Event, FileType, Keyboard, ProcessEnd, Source, Tag, Event, FileType, Keyboard, ProcessEnd, Source, Tag,
@ -20,9 +18,9 @@ fn single() {
metadata: Default::default(), metadata: Default::default(),
}; };
assert_data_eq!( assert_eq_path(
"tests/snapshots/single.json",
serde_json::to_string_pretty(&single).unwrap(), serde_json::to_string_pretty(&single).unwrap(),
file!["snapshots/single.json"],
); );
assert_eq!( assert_eq!(
@ -54,9 +52,9 @@ fn array() {
}, },
]; ];
assert_data_eq!( assert_eq_path(
"tests/snapshots/array.json",
serde_json::to_string_pretty(array).unwrap(), serde_json::to_string_pretty(array).unwrap(),
file!["snapshots/array.json"],
); );
assert_eq!(parse_file("tests/snapshots/array.json"), array); assert_eq!(parse_file("tests/snapshots/array.json"), array);
@ -73,9 +71,9 @@ fn metadata() {
.into(), .into(),
}]; }];
assert_data_eq!( assert_eq_path(
"tests/snapshots/metadata.json",
serde_json::to_string_pretty(metadata).unwrap(), serde_json::to_string_pretty(metadata).unwrap(),
file!["snapshots/metadata.json"],
); );
assert_eq!(parse_file("tests/snapshots/metadata.json"), metadata); assert_eq!(parse_file("tests/snapshots/metadata.json"), metadata);
@ -136,9 +134,9 @@ fn sources() {
}, },
]; ];
assert_data_eq!( assert_eq_path(
"tests/snapshots/sources.json",
serde_json::to_string_pretty(&sources).unwrap(), serde_json::to_string_pretty(&sources).unwrap(),
file!["snapshots/sources.json"],
); );
assert_eq!(parse_file("tests/snapshots/sources.json"), sources); assert_eq!(parse_file("tests/snapshots/sources.json"), sources);
@ -164,9 +162,9 @@ fn signals() {
}, },
]; ];
assert_data_eq!( assert_eq_path(
"tests/snapshots/signals.json",
serde_json::to_string_pretty(&signals).unwrap(), serde_json::to_string_pretty(&signals).unwrap(),
file!["snapshots/signals.json"],
); );
assert_eq!(parse_file("tests/snapshots/signals.json"), signals); assert_eq!(parse_file("tests/snapshots/signals.json"), signals);
@ -195,9 +193,9 @@ fn completions() {
}, },
]; ];
assert_data_eq!( assert_eq_path(
"tests/snapshots/completions.json",
serde_json::to_string_pretty(&completions).unwrap(), serde_json::to_string_pretty(&completions).unwrap(),
file!["snapshots/completions.json"],
); );
assert_eq!(parse_file("tests/snapshots/completions.json"), completions); assert_eq!(parse_file("tests/snapshots/completions.json"), completions);
@ -246,9 +244,9 @@ fn paths() {
}, },
]; ];
assert_data_eq!( assert_eq_path(
"tests/snapshots/paths.json",
serde_json::to_string_pretty(&paths).unwrap(), serde_json::to_string_pretty(&paths).unwrap(),
file!["snapshots/paths.json"],
); );
assert_eq!(parse_file("tests/snapshots/paths.json"), paths); assert_eq!(parse_file("tests/snapshots/paths.json"), paths);

View file

@ -2,22 +2,6 @@
## Next (YYYY-MM-DD) ## Next (YYYY-MM-DD)
## v6.0.0 (2024-10-14)
- Deps: watchexec 5
## v5.0.0 (2024-10-13)
- Add whitelist parameter.
## v4.0.1 (2024-04-28)
- Hide fmt::Debug spew from ignore crate, use `full_debug` feature to restore.
## v4.0.0 (2024-04-20)
- Deps: watchexec 4
## v3.0.0 (2024-01-01) ## v3.0.0 (2024-01-01)
- Deps: `watchexec-filterer-ignore` and `ignore-files` - Deps: `watchexec-filterer-ignore` and `ignore-files`

View file

@ -1,6 +1,6 @@
[package] [package]
name = "watchexec-filterer-globset" name = "watchexec-filterer-globset"
version = "6.0.0" version = "3.0.0"
authors = ["Matt Green <mattgreenrocks@gmail.com>", "Félix Saparelli <felix@passcod.name>"] authors = ["Matt Green <mattgreenrocks@gmail.com>", "Félix Saparelli <felix@passcod.name>"]
license = "Apache-2.0" license = "Apache-2.0"
@ -20,24 +20,27 @@ ignore = "0.4.18"
tracing = "0.1.40" tracing = "0.1.40"
[dependencies.ignore-files] [dependencies.ignore-files]
version = "3.0.2" version = "2.1.0"
path = "../../ignore-files" path = "../../ignore-files"
[dependencies.watchexec] [dependencies.watchexec]
version = "5.0.0" version = "3.0.1"
path = "../../lib" path = "../../lib"
[dependencies.watchexec-events] [dependencies.watchexec-events]
version = "4.0.0" version = "2.0.1"
path = "../../events" path = "../../events"
[dependencies.watchexec-filterer-ignore] [dependencies.watchexec-filterer-ignore]
version = "5.0.0" version = "3.0.0"
path = "../ignore" path = "../ignore"
[dev-dependencies] [dev-dependencies]
tracing-subscriber = "0.3.6" tracing-subscriber = "0.3.6"
tempfile = "3"
[dev-dependencies.project-origins]
version = "1.3.0"
path = "../../project-origins"
[dev-dependencies.tokio] [dev-dependencies.tokio]
version = "1.33.0" version = "1.33.0"
@ -48,9 +51,3 @@ features = [
"rt-multi-thread", "rt-multi-thread",
"macros", "macros",
] ]
[features]
default = []
## Don't hide ignore::gitignore::Gitignore Debug impl
full_debug = []

View file

@ -1,5 +1,5 @@
pre-release-commit-message = "release: filterer-globset v{{version}}" pre-release-commit-message = "release: filterer-globset v{{version}}"
tag-prefix = "watchexec-filterer-globset-" tag-prefix = "filterer-globset-"
tag-message = "watchexec-filterer-globset {{version}}" tag-message = "watchexec-filterer-globset {{version}}"
[[pre-release-replacements]] [[pre-release-replacements]]

View file

@ -10,7 +10,6 @@
use std::{ use std::{
ffi::OsString, ffi::OsString,
fmt,
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
@ -22,38 +21,22 @@ use watchexec_events::{Event, FileType, Priority};
use watchexec_filterer_ignore::IgnoreFilterer; use watchexec_filterer_ignore::IgnoreFilterer;
/// A simple filterer in the style of the watchexec v1.17 filter. /// A simple filterer in the style of the watchexec v1.17 filter.
#[cfg_attr(feature = "full_debug", derive(Debug))] #[derive(Debug)]
pub struct GlobsetFilterer { pub struct GlobsetFilterer {
#[cfg_attr(not(unix), allow(dead_code))] #[cfg_attr(not(unix), allow(dead_code))]
origin: PathBuf, origin: PathBuf,
filters: Gitignore, filters: Gitignore,
ignores: Gitignore, ignores: Gitignore,
whitelist: Vec<PathBuf>,
ignore_files: IgnoreFilterer, ignore_files: IgnoreFilterer,
extensions: Vec<OsString>, extensions: Vec<OsString>,
} }
#[cfg(not(feature = "full_debug"))]
impl fmt::Debug for GlobsetFilterer {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("GlobsetFilterer")
.field("origin", &self.origin)
.field("filters", &"ignore::gitignore::Gitignore{...}")
.field("ignores", &"ignore::gitignore::Gitignore{...}")
.field("ignore_files", &self.ignore_files)
.field("extensions", &self.extensions)
.finish()
}
}
impl GlobsetFilterer { impl GlobsetFilterer {
/// Create a new `GlobsetFilterer` from a project origin, allowed extensions, and lists of globs. /// Create a new `GlobsetFilterer` from a project origin, allowed extensions, and lists of globs.
/// ///
/// The first list is used to filter paths (only matching paths will pass the filter), the /// The first list is used to filter paths (only matching paths will pass the filter), the
/// second is used to ignore paths (matching paths will fail the pattern). If the filter list is /// second is used to ignore paths (matching paths will fail the pattern). If the filter list is
/// empty, only the ignore list will be used. If both lists are empty, the filter always passes. /// empty, only the ignore list will be used. If both lists are empty, the filter always passes.
/// Whitelist is used to automatically accept files even if they would be filtered out
/// otherwise. It is passed as an absolute path to the file that should not be filtered.
/// ///
/// Ignores and filters are passed as a tuple of the glob pattern as a string and an optional /// Ignores and filters are passed as a tuple of the glob pattern as a string and an optional
/// path of the folder the pattern should apply in (e.g. the folder a gitignore file is in). /// path of the folder the pattern should apply in (e.g. the folder a gitignore file is in).
@ -67,7 +50,6 @@ impl GlobsetFilterer {
origin: impl AsRef<Path>, origin: impl AsRef<Path>,
filters: impl IntoIterator<Item = (String, Option<PathBuf>)>, filters: impl IntoIterator<Item = (String, Option<PathBuf>)>,
ignores: impl IntoIterator<Item = (String, Option<PathBuf>)>, ignores: impl IntoIterator<Item = (String, Option<PathBuf>)>,
whitelist: impl IntoIterator<Item = PathBuf>,
ignore_files: impl IntoIterator<Item = IgnoreFile>, ignore_files: impl IntoIterator<Item = IgnoreFile>,
extensions: impl IntoIterator<Item = OsString>, extensions: impl IntoIterator<Item = OsString>,
) -> Result<Self, Error> { ) -> Result<Self, Error> {
@ -103,8 +85,6 @@ impl GlobsetFilterer {
ignore_files.finish(); ignore_files.finish();
let ignore_files = IgnoreFilterer(ignore_files); let ignore_files = IgnoreFilterer(ignore_files);
let whitelist = whitelist.into_iter().collect::<Vec<_>>();
debug!( debug!(
?origin, ?origin,
num_filters=%filters.num_ignores(), num_filters=%filters.num_ignores(),
@ -119,7 +99,6 @@ impl GlobsetFilterer {
origin: origin.into(), origin: origin.into(),
filters, filters,
ignores, ignores,
whitelist,
ignore_files, ignore_files,
extensions, extensions,
}) })
@ -133,19 +112,6 @@ impl Filterer for GlobsetFilterer {
fn check_event(&self, event: &Event, priority: Priority) -> Result<bool, RuntimeError> { fn check_event(&self, event: &Event, priority: Priority) -> Result<bool, RuntimeError> {
let _span = trace_span!("filterer_check").entered(); let _span = trace_span!("filterer_check").entered();
{
trace!("checking internal whitelist");
// Ideally check path equality backwards for better perf
// There could be long matching prefixes so we will exit late
if event
.paths()
.any(|(p, _)| self.whitelist.iter().any(|w| w == p))
{
trace!("internal whitelist filterer matched (success)");
return Ok(true);
}
}
{ {
trace!("checking internal ignore filterer"); trace!("checking internal ignore filterer");
if !self if !self

View file

@ -1,10 +1,9 @@
mod helpers; mod helpers;
use helpers::globset::*; use helpers::globset::*;
use std::io::Write;
#[tokio::test] #[tokio::test]
async fn empty_filter_passes_everything() { async fn empty_filter_passes_everything() {
let filterer = filt(&[], &[], &[], &[], &[]).await; let filterer = filt(&[], &[], &[]).await;
filterer.file_does_pass("Cargo.toml"); filterer.file_does_pass("Cargo.toml");
filterer.file_does_pass("Cargo.json"); filterer.file_does_pass("Cargo.json");
@ -24,7 +23,7 @@ async fn empty_filter_passes_everything() {
#[tokio::test] #[tokio::test]
async fn exact_filename() { async fn exact_filename() {
let filterer = filt(&["Cargo.toml"], &[], &[], &[], &[]).await; let filterer = filt(&["Cargo.toml"], &[], &[]).await;
filterer.file_does_pass("Cargo.toml"); filterer.file_does_pass("Cargo.toml");
filterer.file_does_pass("/test/foo/bar/Cargo.toml"); filterer.file_does_pass("/test/foo/bar/Cargo.toml");
@ -37,7 +36,7 @@ async fn exact_filename() {
#[tokio::test] #[tokio::test]
async fn exact_filename_in_folder() { async fn exact_filename_in_folder() {
let filterer = filt(&["sub/Cargo.toml"], &[], &[], &[], &[]).await; let filterer = filt(&["sub/Cargo.toml"], &[], &[]).await;
filterer.file_doesnt_pass("Cargo.toml"); filterer.file_doesnt_pass("Cargo.toml");
filterer.file_does_pass("sub/Cargo.toml"); filterer.file_does_pass("sub/Cargo.toml");
@ -51,7 +50,7 @@ async fn exact_filename_in_folder() {
#[tokio::test] #[tokio::test]
async fn exact_filename_in_hidden_folder() { async fn exact_filename_in_hidden_folder() {
let filterer = filt(&[".sub/Cargo.toml"], &[], &[], &[], &[]).await; let filterer = filt(&[".sub/Cargo.toml"], &[], &[]).await;
filterer.file_doesnt_pass("Cargo.toml"); filterer.file_doesnt_pass("Cargo.toml");
filterer.file_does_pass(".sub/Cargo.toml"); filterer.file_does_pass(".sub/Cargo.toml");
@ -65,7 +64,7 @@ async fn exact_filename_in_hidden_folder() {
#[tokio::test] #[tokio::test]
async fn exact_filenames_multiple() { async fn exact_filenames_multiple() {
let filterer = filt(&["Cargo.toml", "package.json"], &[], &[], &[], &[]).await; let filterer = filt(&["Cargo.toml", "package.json"], &[], &[]).await;
filterer.file_does_pass("Cargo.toml"); filterer.file_does_pass("Cargo.toml");
filterer.file_does_pass("/test/foo/bar/Cargo.toml"); filterer.file_does_pass("/test/foo/bar/Cargo.toml");
@ -82,7 +81,7 @@ async fn exact_filenames_multiple() {
#[tokio::test] #[tokio::test]
async fn glob_single_final_ext_star() { async fn glob_single_final_ext_star() {
let filterer = filt(&["Cargo.*"], &[], &[], &[], &[]).await; let filterer = filt(&["Cargo.*"], &[], &[]).await;
filterer.file_does_pass("Cargo.toml"); filterer.file_does_pass("Cargo.toml");
filterer.file_does_pass("Cargo.json"); filterer.file_does_pass("Cargo.json");
@ -94,7 +93,7 @@ async fn glob_single_final_ext_star() {
#[tokio::test] #[tokio::test]
async fn glob_star_trailing_slash() { async fn glob_star_trailing_slash() {
let filterer = filt(&["Cargo.*/"], &[], &[], &[], &[]).await; let filterer = filt(&["Cargo.*/"], &[], &[]).await;
filterer.file_doesnt_pass("Cargo.toml"); filterer.file_doesnt_pass("Cargo.toml");
filterer.file_doesnt_pass("Cargo.json"); filterer.file_doesnt_pass("Cargo.json");
@ -107,7 +106,7 @@ async fn glob_star_trailing_slash() {
#[tokio::test] #[tokio::test]
async fn glob_star_leading_slash() { async fn glob_star_leading_slash() {
let filterer = filt(&["/Cargo.*"], &[], &[], &[], &[]).await; let filterer = filt(&["/Cargo.*"], &[], &[]).await;
filterer.file_does_pass("Cargo.toml"); filterer.file_does_pass("Cargo.toml");
filterer.file_does_pass("Cargo.json"); filterer.file_does_pass("Cargo.json");
@ -119,7 +118,7 @@ async fn glob_star_leading_slash() {
#[tokio::test] #[tokio::test]
async fn glob_leading_double_star() { async fn glob_leading_double_star() {
let filterer = filt(&["**/possum"], &[], &[], &[], &[]).await; let filterer = filt(&["**/possum"], &[], &[]).await;
filterer.file_does_pass("possum"); filterer.file_does_pass("possum");
filterer.file_does_pass("foo/bar/possum"); filterer.file_does_pass("foo/bar/possum");
@ -134,7 +133,7 @@ async fn glob_leading_double_star() {
#[tokio::test] #[tokio::test]
async fn glob_trailing_double_star() { async fn glob_trailing_double_star() {
let filterer = filt(&["possum/**"], &[], &[], &[], &[]).await; let filterer = filt(&["possum/**"], &[], &[]).await;
// these do work by expectation and in v1 // these do work by expectation and in v1
filterer.file_does_pass("/test/possum/foo/bar"); filterer.file_does_pass("/test/possum/foo/bar");
@ -148,7 +147,7 @@ async fn glob_trailing_double_star() {
#[tokio::test] #[tokio::test]
async fn glob_middle_double_star() { async fn glob_middle_double_star() {
let filterer = filt(&["apples/**/oranges"], &[], &[], &[], &[]).await; let filterer = filt(&["apples/**/oranges"], &[], &[]).await;
filterer.dir_doesnt_pass("/a/folder"); filterer.dir_doesnt_pass("/a/folder");
filterer.file_does_pass("apples/carrots/oranges"); filterer.file_does_pass("apples/carrots/oranges");
@ -163,7 +162,7 @@ async fn glob_middle_double_star() {
#[tokio::test] #[tokio::test]
async fn glob_double_star_trailing_slash() { async fn glob_double_star_trailing_slash() {
let filterer = filt(&["apples/**/oranges/"], &[], &[], &[], &[]).await; let filterer = filt(&["apples/**/oranges/"], &[], &[]).await;
filterer.dir_doesnt_pass("/a/folder"); filterer.dir_doesnt_pass("/a/folder");
filterer.file_doesnt_pass("apples/carrots/oranges"); filterer.file_doesnt_pass("apples/carrots/oranges");
@ -181,7 +180,7 @@ async fn glob_double_star_trailing_slash() {
#[tokio::test] #[tokio::test]
async fn ignore_exact_filename() { async fn ignore_exact_filename() {
let filterer = filt(&[], &["Cargo.toml"], &[], &[], &[]).await; let filterer = filt(&[], &["Cargo.toml"], &[]).await;
filterer.file_doesnt_pass("Cargo.toml"); filterer.file_doesnt_pass("Cargo.toml");
filterer.file_doesnt_pass("/test/foo/bar/Cargo.toml"); filterer.file_doesnt_pass("/test/foo/bar/Cargo.toml");
@ -194,7 +193,7 @@ async fn ignore_exact_filename() {
#[tokio::test] #[tokio::test]
async fn ignore_exact_filename_in_folder() { async fn ignore_exact_filename_in_folder() {
let filterer = filt(&[], &["sub/Cargo.toml"], &[], &[], &[]).await; let filterer = filt(&[], &["sub/Cargo.toml"], &[]).await;
filterer.file_does_pass("Cargo.toml"); filterer.file_does_pass("Cargo.toml");
filterer.file_doesnt_pass("sub/Cargo.toml"); filterer.file_doesnt_pass("sub/Cargo.toml");
@ -208,7 +207,7 @@ async fn ignore_exact_filename_in_folder() {
#[tokio::test] #[tokio::test]
async fn ignore_exact_filename_in_hidden_folder() { async fn ignore_exact_filename_in_hidden_folder() {
let filterer = filt(&[], &[".sub/Cargo.toml"], &[], &[], &[]).await; let filterer = filt(&[], &[".sub/Cargo.toml"], &[]).await;
filterer.file_does_pass("Cargo.toml"); filterer.file_does_pass("Cargo.toml");
filterer.file_doesnt_pass(".sub/Cargo.toml"); filterer.file_doesnt_pass(".sub/Cargo.toml");
@ -222,7 +221,7 @@ async fn ignore_exact_filename_in_hidden_folder() {
#[tokio::test] #[tokio::test]
async fn ignore_exact_filenames_multiple() { async fn ignore_exact_filenames_multiple() {
let filterer = filt(&[], &["Cargo.toml", "package.json"], &[], &[], &[]).await; let filterer = filt(&[], &["Cargo.toml", "package.json"], &[]).await;
filterer.file_doesnt_pass("Cargo.toml"); filterer.file_doesnt_pass("Cargo.toml");
filterer.file_doesnt_pass("/test/foo/bar/Cargo.toml"); filterer.file_doesnt_pass("/test/foo/bar/Cargo.toml");
@ -239,7 +238,7 @@ async fn ignore_exact_filenames_multiple() {
#[tokio::test] #[tokio::test]
async fn ignore_glob_single_final_ext_star() { async fn ignore_glob_single_final_ext_star() {
let filterer = filt(&[], &["Cargo.*"], &[], &[], &[]).await; let filterer = filt(&[], &["Cargo.*"], &[]).await;
filterer.file_doesnt_pass("Cargo.toml"); filterer.file_doesnt_pass("Cargo.toml");
filterer.file_doesnt_pass("Cargo.json"); filterer.file_doesnt_pass("Cargo.json");
@ -251,7 +250,7 @@ async fn ignore_glob_single_final_ext_star() {
#[tokio::test] #[tokio::test]
async fn ignore_glob_star_trailing_slash() { async fn ignore_glob_star_trailing_slash() {
let filterer = filt(&[], &["Cargo.*/"], &[], &[], &[]).await; let filterer = filt(&[], &["Cargo.*/"], &[]).await;
filterer.file_does_pass("Cargo.toml"); filterer.file_does_pass("Cargo.toml");
filterer.file_does_pass("Cargo.json"); filterer.file_does_pass("Cargo.json");
@ -264,7 +263,7 @@ async fn ignore_glob_star_trailing_slash() {
#[tokio::test] #[tokio::test]
async fn ignore_glob_star_leading_slash() { async fn ignore_glob_star_leading_slash() {
let filterer = filt(&[], &["/Cargo.*"], &[], &[], &[]).await; let filterer = filt(&[], &["/Cargo.*"], &[]).await;
filterer.file_doesnt_pass("Cargo.toml"); filterer.file_doesnt_pass("Cargo.toml");
filterer.file_doesnt_pass("Cargo.json"); filterer.file_doesnt_pass("Cargo.json");
@ -276,7 +275,7 @@ async fn ignore_glob_star_leading_slash() {
#[tokio::test] #[tokio::test]
async fn ignore_glob_leading_double_star() { async fn ignore_glob_leading_double_star() {
let filterer = filt(&[], &["**/possum"], &[], &[], &[]).await; let filterer = filt(&[], &["**/possum"], &[]).await;
filterer.file_doesnt_pass("possum"); filterer.file_doesnt_pass("possum");
filterer.file_doesnt_pass("foo/bar/possum"); filterer.file_doesnt_pass("foo/bar/possum");
@ -291,7 +290,7 @@ async fn ignore_glob_leading_double_star() {
#[tokio::test] #[tokio::test]
async fn ignore_glob_trailing_double_star() { async fn ignore_glob_trailing_double_star() {
let filterer = filt(&[], &["possum/**"], &[], &[], &[]).await; let filterer = filt(&[], &["possum/**"], &[]).await;
filterer.file_does_pass("possum"); filterer.file_does_pass("possum");
filterer.file_doesnt_pass("possum/foo/bar"); filterer.file_doesnt_pass("possum/foo/bar");
@ -310,7 +309,7 @@ async fn ignore_glob_trailing_double_star() {
#[tokio::test] #[tokio::test]
async fn ignore_glob_middle_double_star() { async fn ignore_glob_middle_double_star() {
let filterer = filt(&[], &["apples/**/oranges"], &[], &[], &[]).await; let filterer = filt(&[], &["apples/**/oranges"], &[]).await;
filterer.dir_does_pass("/a/folder"); filterer.dir_does_pass("/a/folder");
filterer.file_doesnt_pass("apples/carrots/oranges"); filterer.file_doesnt_pass("apples/carrots/oranges");
@ -325,7 +324,7 @@ async fn ignore_glob_middle_double_star() {
#[tokio::test] #[tokio::test]
async fn ignore_glob_double_star_trailing_slash() { async fn ignore_glob_double_star_trailing_slash() {
let filterer = filt(&[], &["apples/**/oranges/"], &[], &[], &[]).await; let filterer = filt(&[], &["apples/**/oranges/"], &[]).await;
filterer.dir_does_pass("/a/folder"); filterer.dir_does_pass("/a/folder");
filterer.file_does_pass("apples/carrots/oranges"); filterer.file_does_pass("apples/carrots/oranges");
@ -343,14 +342,7 @@ async fn ignore_glob_double_star_trailing_slash() {
#[tokio::test] #[tokio::test]
async fn ignores_take_precedence() { async fn ignores_take_precedence() {
let filterer = filt( let filterer = filt(&["*.docx", "*.toml", "*.json"], &["*.toml", "*.json"], &[]).await;
&["*.docx", "*.toml", "*.json"],
&["*.toml", "*.json"],
&[],
&[],
&[],
)
.await;
filterer.file_doesnt_pass("Cargo.toml"); filterer.file_doesnt_pass("Cargo.toml");
filterer.file_doesnt_pass("/test/foo/bar/Cargo.toml"); filterer.file_doesnt_pass("/test/foo/bar/Cargo.toml");
@ -363,7 +355,7 @@ async fn ignores_take_precedence() {
#[tokio::test] #[tokio::test]
async fn extensions_fail_dirs() { async fn extensions_fail_dirs() {
let filterer = filt(&[], &[], &[], &["py"], &[]).await; let filterer = filt(&[], &[], &["py"]).await;
filterer.file_does_pass("Cargo.py"); filterer.file_does_pass("Cargo.py");
filterer.file_doesnt_pass("Cargo.toml"); filterer.file_doesnt_pass("Cargo.toml");
@ -374,7 +366,7 @@ async fn extensions_fail_dirs() {
#[tokio::test] #[tokio::test]
async fn extensions_fail_extensionless() { async fn extensions_fail_extensionless() {
let filterer = filt(&[], &[], &[], &["py"], &[]).await; let filterer = filt(&[], &[], &["py"]).await;
filterer.file_does_pass("Cargo.py"); filterer.file_does_pass("Cargo.py");
filterer.file_doesnt_pass("Cargo"); filterer.file_doesnt_pass("Cargo");
@ -385,7 +377,7 @@ async fn multipath_allow_on_any_one_pass() {
use watchexec::filter::Filterer; use watchexec::filter::Filterer;
use watchexec_events::{Event, FileType, Tag}; use watchexec_events::{Event, FileType, Tag};
let filterer = filt(&[], &[], &[], &["py"], &[]).await; let filterer = filt(&[], &[], &["py"]).await;
let origin = tokio::fs::canonicalize(".").await.unwrap(); let origin = tokio::fs::canonicalize(".").await.unwrap();
let event = Event { let event = Event {
@ -411,7 +403,7 @@ async fn multipath_allow_on_any_one_pass() {
#[tokio::test] #[tokio::test]
async fn extensions_and_filters_glob() { async fn extensions_and_filters_glob() {
let filterer = filt(&["*/justfile"], &[], &[], &["md", "css"], &[]).await; let filterer = filt(&["*/justfile"], &[], &["md", "css"]).await;
filterer.file_does_pass("foo/justfile"); filterer.file_does_pass("foo/justfile");
filterer.file_does_pass("bar.md"); filterer.file_does_pass("bar.md");
@ -425,7 +417,7 @@ async fn extensions_and_filters_glob() {
#[tokio::test] #[tokio::test]
async fn extensions_and_filters_slash() { async fn extensions_and_filters_slash() {
let filterer = filt(&["/justfile"], &[], &[], &["md", "css"], &[]).await; let filterer = filt(&["/justfile"], &[], &["md", "css"]).await;
filterer.file_does_pass("justfile"); filterer.file_does_pass("justfile");
filterer.file_does_pass("bar.md"); filterer.file_does_pass("bar.md");
@ -435,7 +427,7 @@ async fn extensions_and_filters_slash() {
#[tokio::test] #[tokio::test]
async fn leading_single_glob_file() { async fn leading_single_glob_file() {
let filterer = filt(&["*/justfile"], &[], &[], &[], &[]).await; let filterer = filt(&["*/justfile"], &[], &[]).await;
filterer.file_does_pass("foo/justfile"); filterer.file_does_pass("foo/justfile");
filterer.file_doesnt_pass("notfile"); filterer.file_doesnt_pass("notfile");
@ -451,7 +443,7 @@ async fn nonpath_event_passes() {
use watchexec::filter::Filterer; use watchexec::filter::Filterer;
use watchexec_events::{Event, Source, Tag}; use watchexec_events::{Event, Source, Tag};
let filterer = filt(&[], &[], &[], &["py"], &[]).await; let filterer = filt(&[], &[], &["py"]).await;
assert!(filterer assert!(filterer
.check_event( .check_event(
@ -478,7 +470,7 @@ async fn nonpath_event_passes() {
#[tokio::test] #[tokio::test]
async fn ignore_folder_incorrectly_with_bare_match() { async fn ignore_folder_incorrectly_with_bare_match() {
let filterer = filt(&[], &["prunes"], &[], &[], &[]).await; let filterer = filt(&[], &["prunes"], &[]).await;
filterer.file_does_pass("apples"); filterer.file_does_pass("apples");
filterer.file_does_pass("apples/carrots/cauliflowers/oranges"); filterer.file_does_pass("apples/carrots/cauliflowers/oranges");
@ -509,7 +501,7 @@ async fn ignore_folder_incorrectly_with_bare_match() {
#[tokio::test] #[tokio::test]
async fn ignore_folder_incorrectly_with_bare_and_leading_slash() { async fn ignore_folder_incorrectly_with_bare_and_leading_slash() {
let filterer = filt(&[], &["/prunes"], &[], &[], &[]).await; let filterer = filt(&[], &["/prunes"], &[]).await;
filterer.file_does_pass("apples"); filterer.file_does_pass("apples");
filterer.file_does_pass("apples/carrots/cauliflowers/oranges"); filterer.file_does_pass("apples/carrots/cauliflowers/oranges");
@ -540,7 +532,7 @@ async fn ignore_folder_incorrectly_with_bare_and_leading_slash() {
#[tokio::test] #[tokio::test]
async fn ignore_folder_incorrectly_with_bare_and_trailing_slash() { async fn ignore_folder_incorrectly_with_bare_and_trailing_slash() {
let filterer = filt(&[], &["prunes/"], &[], &[], &[]).await; let filterer = filt(&[], &["prunes/"], &[]).await;
filterer.file_does_pass("apples"); filterer.file_does_pass("apples");
filterer.file_does_pass("apples/carrots/cauliflowers/oranges"); filterer.file_does_pass("apples/carrots/cauliflowers/oranges");
@ -571,7 +563,7 @@ async fn ignore_folder_incorrectly_with_bare_and_trailing_slash() {
#[tokio::test] #[tokio::test]
async fn ignore_folder_incorrectly_with_only_double_double_glob() { async fn ignore_folder_incorrectly_with_only_double_double_glob() {
let filterer = filt(&[], &["**/prunes/**"], &[], &[], &[]).await; let filterer = filt(&[], &["**/prunes/**"], &[]).await;
filterer.file_does_pass("apples"); filterer.file_does_pass("apples");
filterer.file_does_pass("apples/carrots/cauliflowers/oranges"); filterer.file_does_pass("apples/carrots/cauliflowers/oranges");
@ -602,7 +594,7 @@ async fn ignore_folder_incorrectly_with_only_double_double_glob() {
#[tokio::test] #[tokio::test]
async fn ignore_folder_correctly_with_double_and_double_double_globs() { async fn ignore_folder_correctly_with_double_and_double_double_globs() {
let filterer = filt(&[], &["**/prunes", "**/prunes/**"], &[], &[], &[]).await; let filterer = filt(&[], &["**/prunes", "**/prunes/**"], &[]).await;
filterer.file_does_pass("apples"); filterer.file_does_pass("apples");
filterer.file_does_pass("apples/carrots/cauliflowers/oranges"); filterer.file_does_pass("apples/carrots/cauliflowers/oranges");
@ -628,94 +620,3 @@ async fn ignore_folder_correctly_with_double_and_double_double_globs() {
filterer.dir_doesnt_pass("prunes/carrots/cauliflowers/oranges"); filterer.dir_doesnt_pass("prunes/carrots/cauliflowers/oranges");
filterer.dir_doesnt_pass("prunes/carrots/cauliflowers/artichokes/oranges"); filterer.dir_doesnt_pass("prunes/carrots/cauliflowers/artichokes/oranges");
} }
#[tokio::test]
async fn whitelist_overrides_ignore() {
let filterer = filt(&[], &["**/prunes"], &["/prunes"], &[], &[]).await;
filterer.file_does_pass("apples");
filterer.file_does_pass("/prunes");
filterer.dir_does_pass("apples");
filterer.dir_does_pass("/prunes");
filterer.file_does_pass("raw-prunes");
filterer.dir_does_pass("raw-prunes");
filterer.file_doesnt_pass("apples/prunes");
filterer.file_doesnt_pass("raw/prunes");
filterer.dir_doesnt_pass("apples/prunes");
filterer.dir_doesnt_pass("raw/prunes");
}
#[tokio::test]
async fn whitelist_overrides_ignore_files() {
let mut ignore_file = tempfile::NamedTempFile::new().unwrap();
let _ = ignore_file.write(b"prunes");
let origin = std::fs::canonicalize(".").unwrap();
let whitelist = origin.join("prunes").display().to_string();
let filterer = filt(
&[],
&[],
&[&whitelist],
&[],
&[ignore_file.path().to_path_buf()],
)
.await;
filterer.file_does_pass("apples");
filterer.file_does_pass("prunes");
filterer.dir_does_pass("apples");
filterer.dir_does_pass("prunes");
filterer.file_does_pass("raw-prunes");
filterer.dir_does_pass("raw-prunes");
filterer.file_doesnt_pass("apples/prunes");
filterer.file_doesnt_pass("raw/prunes");
filterer.dir_doesnt_pass("apples/prunes");
filterer.dir_doesnt_pass("raw/prunes");
}
#[tokio::test]
async fn whitelist_overrides_ignore_files_nested() {
let mut ignore_file = tempfile::NamedTempFile::new().unwrap();
let _ = ignore_file.write(b"prunes\n");
let origin = std::fs::canonicalize(".").unwrap();
let whitelist = origin.join("prunes").join("target").display().to_string();
let filterer = filt(
&[],
&[],
&[&whitelist],
&[],
&[ignore_file.path().to_path_buf()],
)
.await;
filterer.file_does_pass("apples");
filterer.file_doesnt_pass("prunes");
filterer.dir_does_pass("apples");
filterer.dir_doesnt_pass("prunes");
filterer.file_does_pass("raw-prunes");
filterer.dir_does_pass("raw-prunes");
filterer.file_doesnt_pass("prunes/apples");
filterer.file_doesnt_pass("prunes/raw");
filterer.dir_doesnt_pass("prunes/apples");
filterer.dir_doesnt_pass("prunes/raw");
filterer.file_doesnt_pass("apples/prunes");
filterer.file_doesnt_pass("raw/prunes");
filterer.dir_doesnt_pass("apples/prunes");
filterer.dir_doesnt_pass("raw/prunes");
filterer.file_does_pass("prunes/target");
filterer.dir_does_pass("prunes/target");
filterer.file_doesnt_pass("prunes/nested/target");
filterer.dir_doesnt_pass("prunes/nested/target");
}

View file

@ -4,6 +4,7 @@ use std::{
}; };
use ignore_files::IgnoreFile; use ignore_files::IgnoreFile;
use project_origins::ProjectType;
use watchexec::{error::RuntimeError, filter::Filterer}; use watchexec::{error::RuntimeError, filter::Filterer};
use watchexec_events::{Event, FileType, Priority, Tag}; use watchexec_events::{Event, FileType, Priority, Tag};
use watchexec_filterer_globset::GlobsetFilterer; use watchexec_filterer_globset::GlobsetFilterer;
@ -11,6 +12,7 @@ use watchexec_filterer_ignore::IgnoreFilterer;
pub mod globset { pub mod globset {
pub use super::globset_filt as filt; pub use super::globset_filt as filt;
pub use super::Applies;
pub use super::PathHarness; pub use super::PathHarness;
pub use watchexec_events::Priority; pub use watchexec_events::Priority;
} }
@ -103,9 +105,7 @@ fn tracing_init() {
pub async fn globset_filt( pub async fn globset_filt(
filters: &[&str], filters: &[&str],
ignores: &[&str], ignores: &[&str],
whitelists: &[&str],
extensions: &[&str], extensions: &[&str],
ignore_files: &[PathBuf],
) -> GlobsetFilterer { ) -> GlobsetFilterer {
let origin = tokio::fs::canonicalize(".").await.unwrap(); let origin = tokio::fs::canonicalize(".").await.unwrap();
tracing_init(); tracing_init();
@ -113,14 +113,27 @@ pub async fn globset_filt(
origin, origin,
filters.iter().map(|s| ((*s).to_string(), None)), filters.iter().map(|s| ((*s).to_string(), None)),
ignores.iter().map(|s| ((*s).to_string(), None)), ignores.iter().map(|s| ((*s).to_string(), None)),
whitelists.iter().map(|s| (*s).into()), vec![],
ignore_files.iter().map(|path| IgnoreFile {
path: path.clone(),
applies_in: None,
applies_to: None,
}),
extensions.iter().map(OsString::from), extensions.iter().map(OsString::from),
) )
.await .await
.expect("making filterer") .expect("making filterer")
} }
pub trait Applies {
fn applies_in(self, origin: &str) -> Self;
fn applies_to(self, project_type: ProjectType) -> Self;
}
impl Applies for IgnoreFile {
fn applies_in(mut self, origin: &str) -> Self {
let origin = std::fs::canonicalize(".").unwrap().join(origin);
self.applies_in = Some(origin);
self
}
fn applies_to(mut self, project_type: ProjectType) -> Self {
self.applies_to = Some(project_type);
self
}
}

View file

@ -2,16 +2,6 @@
## Next (YYYY-MM-DD) ## Next (YYYY-MM-DD)
## v5.0.0 (2024-10-14)
## v4.0.1 (2024-04-28)
## v4.0.0 (2024-04-20)
- Deps: watchexec 4
## v3.0.1 (2024-01-04)
- Normalise paths on all platforms (via `normalize-path`). - Normalise paths on all platforms (via `normalize-path`).
## v3.0.0 (2024-01-01) ## v3.0.0 (2024-01-01)

View file

@ -1,6 +1,6 @@
[package] [package]
name = "watchexec-filterer-ignore" name = "watchexec-filterer-ignore"
version = "5.0.0" version = "3.0.0"
authors = ["Félix Saparelli <felix@passcod.name>"] authors = ["Félix Saparelli <felix@passcod.name>"]
license = "Apache-2.0" license = "Apache-2.0"
@ -22,26 +22,26 @@ normalize-path = "0.2.1"
tracing = "0.1.40" tracing = "0.1.40"
[dependencies.ignore-files] [dependencies.ignore-files]
version = "3.0.2" version = "2.1.0"
path = "../../ignore-files" path = "../../ignore-files"
[dependencies.watchexec] [dependencies.watchexec]
version = "5.0.0" version = "3.0.1"
path = "../../lib" path = "../../lib"
[dependencies.watchexec-events] [dependencies.watchexec-events]
version = "4.0.0" version = "2.0.1"
path = "../../events" path = "../../events"
[dependencies.watchexec-signals] [dependencies.watchexec-signals]
version = "4.0.0" version = "2.1.0"
path = "../../signals" path = "../../signals"
[dev-dependencies] [dev-dependencies]
tracing-subscriber = "0.3.6" tracing-subscriber = "0.3.6"
[dev-dependencies.project-origins] [dev-dependencies.project-origins]
version = "1.4.0" version = "1.3.0"
path = "../../project-origins" path = "../../project-origins"
[dev-dependencies.tokio] [dev-dependencies.tokio]

View file

@ -1,5 +1,5 @@
pre-release-commit-message = "release: filterer-ignore v{{version}}" pre-release-commit-message = "release: filterer-ignore v{{version}}"
tag-prefix = "watchexec-filterer-ignore-" tag-prefix = "filterer-ignore-"
tag-message = "watchexec-filterer-ignore {{version}}" tag-message = "watchexec-filterer-ignore {{version}}"
[[pre-release-replacements]] [[pre-release-replacements]]

View file

@ -14,6 +14,7 @@ pub mod ignore {
pub use super::ignore_filt as filt; pub use super::ignore_filt as filt;
pub use super::Applies; pub use super::Applies;
pub use super::PathHarness; pub use super::PathHarness;
pub use watchexec_events::Priority;
} }
pub trait PathHarness: Filterer { pub trait PathHarness: Filterer {

View file

@ -0,0 +1,30 @@
# Changelog
## Next (YYYY-MM-DD)
## v2.0.0 (2024-01-01)
- Depend on `watchexec-events` instead of the `watchexec` re-export.
## v1.0.0 (2023-12-10)
- Officially deprecate (crate is now unmaintained).
- Depend on `watchexec-events` instead of the `watchexec` re-export.
- Remove error diagnostic codes.
- Deps: upgrade Tokio requirement to 1.32.
## v0.3.0 (2023-03-18)
- Ditch MSRV policy. The `rust-version` indication will remain, for the minimum estimated Rust version for the code features used in the crate's own code, but dependencies may have already moved on. From now on, only latest stable is assumed and tested for. ([#510](https://github.com/watchexec/watchexec/pull/510))
## v0.2.0 (2023-01-09)
- MSRV: bump to 1.61.0
## v0.1.1 (2022-09-07)
- Deps: update miette to 5.3.0
## v0.1.0 (2022-06-23)
- Initial release as a separate crate.

View file

@ -0,0 +1,71 @@
[package]
name = "watchexec-filterer-tagged"
version = "2.0.0"
authors = ["Félix Saparelli <felix@passcod.name>"]
license = "Apache-2.0"
description = "Watchexec filterer component using tagged filters"
keywords = ["watchexec", "filterer", "tags"]
documentation = "https://docs.rs/watchexec-filterer-tagged"
homepage = "https://watchexec.github.io"
repository = "https://github.com/watchexec/watchexec"
readme = "README.md"
rust-version = "1.61.0"
edition = "2021"
[badges.maintenance]
status = "deprecated"
[dependencies]
futures = "0.3.25"
globset = "0.4.8"
ignore = "0.4.18"
miette = "5.3.0"
nom = "7.0.0"
regex = "1.5.4"
thiserror = "1.0.26"
tracing = "0.1.26"
unicase = "2.6.0"
[dependencies.ignore-files]
version = "2.1.0"
path = "../../ignore-files"
[dependencies.tokio]
version = "1.32.0"
features = [
"fs",
]
[dependencies.watchexec]
version = "3.0.1"
path = "../../lib"
[dependencies.watchexec-events]
version = "2.0.1"
path = "../../events"
[dependencies.watchexec-filterer-ignore]
version = "3.0.0"
path = "../ignore"
[dependencies.watchexec-signals]
version = "2.1.0"
path = "../../signals"
[dev-dependencies]
tracing-subscriber = "0.3.6"
[dev-dependencies.project-origins]
version = "1.3.0"
path = "../../project-origins"
[dev-dependencies.tokio]
version = "1.32.0"
features = [
"fs",
"io-std",
"sync",
]

View file

@ -0,0 +1,19 @@
[![Crates.io page](https://badgen.net/crates/v/watchexec-filterer-tagged)](https://crates.io/crates/watchexec-filterer-tagged)
[![API Docs](https://docs.rs/watchexec-filterer-tagged/badge.svg)][docs]
[![Crate license: Apache 2.0](https://badgen.net/badge/license/Apache%202.0)][license]
[![CI status](https://github.com/watchexec/watchexec/actions/workflows/check.yml/badge.svg)](https://github.com/watchexec/watchexec/actions/workflows/check.yml)
# Watchexec filterer: tagged
_Experimental filterer using tagged filters._
- **[API documentation][docs]**.
- Licensed under [Apache 2.0][license].
- Status: soft-deprecated.
The tagged filterer is not in use in the Watchexec CLI, but this crate will continue being updated
until and unless it becomes too much of a pain to do so, for third party users. It is expected that
some of the code will eventually be reused for a more generic filter crate without the tagged syntax.
[docs]: https://docs.rs/watchexec-filterer-tagged
[license]: ../../../LICENSE

View file

@ -0,0 +1,10 @@
pre-release-commit-message = "release: filterer-tagged v{{version}}"
tag-prefix = "filterer-tagged-"
tag-message = "watchexec-filterer-tagged {{version}}"
[[pre-release-replacements]]
file = "CHANGELOG.md"
search = "^## Next.*$"
replace = "## Next (YYYY-MM-DD)\n\n## v{{version}} ({{date}})"
prerelease = true
max = 1

View file

@ -0,0 +1,73 @@
use std::collections::HashMap;
use ignore::gitignore::Gitignore;
use miette::Diagnostic;
use thiserror::Error;
use tokio::sync::watch::error::SendError;
use watchexec::error::RuntimeError;
use watchexec_filterer_ignore::IgnoreFilterer;
use crate::{Filter, Matcher};
/// Errors emitted by the `TaggedFilterer`.
#[derive(Debug, Diagnostic, Error)]
#[non_exhaustive]
pub enum TaggedFiltererError {
/// Generic I/O error, with some context.
#[error("io({about}): {err}")]
IoError {
/// What it was about.
about: &'static str,
/// The I/O error which occurred.
#[source]
err: std::io::Error,
},
/// Error received when a tagged filter cannot be parsed.
#[error("cannot parse filter `{src}`: {err:?}")]
Parse {
/// The source of the filter.
#[source_code]
src: String,
/// What went wrong.
err: nom::error::ErrorKind,
},
/// Error received when a filter cannot be added or removed from a tagged filter list.
#[error("cannot {action} filter: {err:?}")]
FilterChange {
/// The action that was attempted.
action: &'static str,
/// The underlying error.
#[source]
err: SendError<HashMap<Matcher, Vec<Filter>>>,
},
/// Error received when a glob cannot be parsed.
#[error("cannot parse glob: {0}")]
GlobParse(#[source] ignore::Error),
/// Error received when a compiled globset cannot be changed.
#[error("cannot change compiled globset: {0:?}")]
GlobsetChange(#[source] SendError<Option<Gitignore>>),
/// Error received about the internal ignore filterer.
#[error("ignore filterer: {0}")]
Ignore(#[source] ignore_files::Error),
/// Error received when a new ignore filterer cannot be swapped in.
#[error("cannot swap in new ignore filterer: {0:?}")]
IgnoreSwap(#[source] SendError<IgnoreFilterer>),
}
impl From<TaggedFiltererError> for RuntimeError {
fn from(err: TaggedFiltererError) -> Self {
Self::Filterer {
kind: "tagged",
err: Box::new(err) as _,
}
}
}

View file

@ -0,0 +1,93 @@
use std::{
env,
io::Error,
path::{Path, PathBuf},
str::FromStr,
};
use ignore_files::{discover_file, IgnoreFile};
use tokio::fs::read_to_string;
use crate::{Filter, TaggedFiltererError};
/// A filter file.
///
/// This is merely a type wrapper around an [`IgnoreFile`], as the only difference is how the file
/// is parsed.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct FilterFile(pub IgnoreFile);
/// Finds all filter files that apply to the current runtime.
///
/// This considers:
/// - `$XDG_CONFIG_HOME/watchexec/filter`, as well as other locations (APPDATA on Windows…)
/// - Files from the `WATCHEXEC_FILTER_FILES` environment variable (comma-separated)
///
/// All errors (permissions, etc) are collected and returned alongside the ignore files: you may
/// want to show them to the user while still using whatever ignores were successfully found. Errors
/// from files not being found are silently ignored (the files are just not returned).
pub async fn discover_files_from_environment() -> (Vec<FilterFile>, Vec<Error>) {
let mut files = Vec::new();
let mut errors = Vec::new();
for path in env::var("WATCHEXEC_FILTER_FILES")
.unwrap_or_default()
.split(',')
{
discover_file(&mut files, &mut errors, None, None, PathBuf::from(path)).await;
}
let mut wgis = Vec::with_capacity(5);
if let Ok(home) = env::var("XDG_CONFIG_HOME") {
wgis.push(Path::new(&home).join("watchexec/filter"));
}
if let Ok(home) = env::var("APPDATA") {
wgis.push(Path::new(&home).join("watchexec/filter"));
}
if let Ok(home) = env::var("USERPROFILE") {
wgis.push(Path::new(&home).join(".watchexec/filter"));
}
if let Ok(home) = env::var("HOME") {
wgis.push(Path::new(&home).join(".watchexec/filter"));
}
for path in wgis {
if discover_file(&mut files, &mut errors, None, None, path).await {
break;
}
}
(files.into_iter().map(FilterFile).collect(), errors)
}
impl FilterFile {
/// Read and parse into [`Filter`]s.
///
/// Empty lines and lines starting with `#` are ignored. The `applies_in` field of the
/// [`IgnoreFile`] is used for the `in_path` field of each [`Filter`].
///
/// This method reads the entire file into memory.
pub async fn load(&self) -> Result<Vec<Filter>, TaggedFiltererError> {
let content =
read_to_string(&self.0.path)
.await
.map_err(|err| TaggedFiltererError::IoError {
about: "filter file load",
err,
})?;
let lines = content.lines();
let mut filters = Vec::with_capacity(lines.size_hint().0);
for line in lines {
if line.is_empty() || line.starts_with('#') {
continue;
}
let mut f = Filter::from_str(line)?;
f.in_path = self.0.applies_in.clone();
filters.push(f);
}
Ok(filters)
}
}

View file

@ -0,0 +1,276 @@
use std::collections::HashSet;
use std::path::PathBuf;
use globset::Glob;
use regex::Regex;
use tokio::fs::canonicalize;
use tracing::{trace, warn};
use unicase::UniCase;
use watchexec_events::Tag;
use crate::TaggedFiltererError;
/// A tagged filter.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Filter {
/// Path the filter applies from.
pub in_path: Option<PathBuf>,
/// Which tag the filter applies to.
pub on: Matcher,
/// The operation to perform on the tag's value.
pub op: Op,
/// The pattern to match against the tag's value.
pub pat: Pattern,
/// If true, a positive match with this filter will override negative matches from previous
/// filters on the same tag, and negative matches will be ignored.
pub negate: bool,
}
impl Filter {
/// Matches the filter against a subject.
///
/// This is really an internal method to the tagged filterer machinery, exposed so you can build
/// your own filterer using the same types or the textual syntax. As such its behaviour is not
/// guaranteed to be stable (its signature is, though).
pub fn matches(&self, subject: impl AsRef<str>) -> Result<bool, TaggedFiltererError> {
let subject = subject.as_ref();
trace!(op=?self.op, pat=?self.pat, ?subject, "performing filter match");
Ok(match (self.op, &self.pat) {
(Op::Equal, Pattern::Exact(pat)) => UniCase::new(subject) == UniCase::new(pat),
(Op::NotEqual, Pattern::Exact(pat)) => UniCase::new(subject) != UniCase::new(pat),
(Op::Regex, Pattern::Regex(pat)) => pat.is_match(subject),
(Op::NotRegex, Pattern::Regex(pat)) => !pat.is_match(subject),
(Op::InSet, Pattern::Set(set)) => set.contains(subject),
(Op::InSet, Pattern::Exact(pat)) => subject == pat,
(Op::NotInSet, Pattern::Set(set)) => !set.contains(subject),
(Op::NotInSet, Pattern::Exact(pat)) => subject != pat,
(op @ (Op::Glob | Op::NotGlob), Pattern::Glob(glob)) => {
// FIXME: someway that isn't this horrible
match Glob::new(glob) {
Ok(glob) => {
let matches = glob.compile_matcher().is_match(subject);
match op {
Op::Glob => matches,
Op::NotGlob => !matches,
_ => unreachable!(),
}
}
Err(err) => {
warn!(
"failed to compile glob for non-path match, skipping (pass): {}",
err
);
true
}
}
}
(op, pat) => {
warn!(
"trying to match pattern {:?} with op {:?}, that cannot work",
pat, op
);
false
}
})
}
/// Create a filter from a gitignore-style glob pattern.
///
/// The optional path is for the `in_path` field of the filter. When parsing gitignore files, it
/// should be set to the path of the _directory_ the ignore file is in.
///
/// The resulting filter matches on [`Path`][Matcher::Path], with the [`NotGlob`][Op::NotGlob]
/// op, and a [`Glob`][Pattern::Glob] pattern. If it starts with a `!`, it is negated.
#[must_use]
pub fn from_glob_ignore(in_path: Option<PathBuf>, glob: &str) -> Self {
let (glob, negate) = glob.strip_prefix('!').map_or((glob, false), |g| (g, true));
Self {
in_path,
on: Matcher::Path,
op: Op::NotGlob,
pat: Pattern::Glob(glob.to_string()),
negate,
}
}
/// Returns the filter with its `in_path` canonicalised.
pub async fn canonicalised(mut self) -> Result<Self, TaggedFiltererError> {
if let Some(ctx) = self.in_path {
self.in_path =
Some(
canonicalize(&ctx)
.await
.map_err(|err| TaggedFiltererError::IoError {
about: "canonicalise Filter in_path",
err,
})?,
);
trace!(canon=?ctx, "canonicalised in_path");
}
Ok(self)
}
}
/// What a filter matches on.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
#[non_exhaustive]
pub enum Matcher {
/// The presence of a tag on an event.
Tag,
/// A path in a filesystem event. Paths are always canonicalised.
///
/// Note that there may be multiple paths in an event (e.g. both source and destination for renames), and filters
/// will be matched on all of them.
Path,
/// The file type of an object in a filesystem event.
///
/// This is not guaranteed to be present for every filesystem event.
///
/// It can be any of these values: `file`, `dir`, `symlink`, `other`. That last one means
/// "not any of the first three."
FileType,
/// The [`EventKind`][notify::event::EventKind] of a filesystem event.
///
/// This is the Debug representation of the event kind. Examples:
/// - `Access(Close(Write))`
/// - `Modify(Data(Any))`
/// - `Modify(Metadata(Permissions))`
/// - `Remove(Folder)`
///
/// You should probably use globs or regexes to match these, ex:
/// - `Create(*)`
/// - `Modify\(Name\(.+`
FileEventKind,
/// The [event source][crate::event::Source] the event came from.
///
/// These are the lowercase names of the variants.
Source,
/// The ID of the process which caused the event.
///
/// Note that it's rare for events to carry this information.
Process,
/// A signal sent to the main process.
///
/// This can be matched both on the signal number as an integer, and on the signal name as a
/// string. On Windows, only `BREAK` is supported; `CTRL_C` parses but won't work. Matching is
/// on both uppercase and lowercase forms.
///
/// Interrupt signals (`TERM` and `INT` on Unix, `CTRL_C` on Windows) are parsed, but these are
/// marked Urgent internally to Watchexec, and thus bypass filtering entirely.
Signal,
/// The exit status of a subprocess.
///
/// This is only present for events issued when the subprocess exits. The value is matched on
/// both the exit code as an integer, and either `success` or `fail`, whichever succeeds.
ProcessCompletion,
/// The [`Priority`] of the event.
///
/// This is never `urgent`, as urgent events bypass filtering.
Priority,
}
impl Matcher {
pub(crate) fn from_tag(tag: &Tag) -> &'static [Self] {
match tag {
Tag::Path {
file_type: None, ..
} => &[Self::Path],
Tag::Path { .. } => &[Self::Path, Self::FileType],
Tag::FileEventKind(_) => &[Self::FileEventKind],
Tag::Source(_) => &[Self::Source],
Tag::Process(_) => &[Self::Process],
Tag::Signal(_) => &[Self::Signal],
Tag::ProcessCompletion(_) => &[Self::ProcessCompletion],
_ => {
warn!("unhandled tag: {:?}", tag);
&[]
}
}
}
}
/// How a filter value is interpreted.
///
/// - `==` and `!=` match on the exact value as string equality (case-insensitively),
/// - `~=` and `~!` match using a [regex],
/// - `*=` and `*!` match using a glob, either via [globset] or [ignore]
/// - `:=` and `:!` match via exact string comparisons, but on any of the list of values separated
/// by `,`
/// - `=`, the "auto" operator, behaves as `*=` if the matcher is `Path`, and as `==` otherwise.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[non_exhaustive]
pub enum Op {
/// The auto operator, `=`, resolves to `*=` or `==` depending on the matcher.
Auto,
/// The `==` operator, matches on exact string equality.
Equal,
/// The `!=` operator, matches on exact string inequality.
NotEqual,
/// The `~=` operator, matches on a regex.
Regex,
/// The `~!` operator, matches on a regex (matches are fails).
NotRegex,
/// The `*=` operator, matches on a glob.
Glob,
/// The `*!` operator, matches on a glob (matches are fails).
NotGlob,
/// The `:=` operator, matches (with string compares) on a set of values (belongs are passes).
InSet,
/// The `:!` operator, matches on a set of values (belongs are fails).
NotInSet,
}
/// A filter value (pattern to match with).
#[derive(Debug, Clone)]
#[non_exhaustive]
pub enum Pattern {
/// An exact string.
Exact(String),
/// A regex.
Regex(Regex),
/// A glob.
///
/// This is stored as a string as globs are compiled together rather than on a per-filter basis.
Glob(String),
/// A set of exact strings.
Set(HashSet<String>),
}
impl PartialEq<Self> for Pattern {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(Self::Exact(l), Self::Exact(r)) | (Self::Glob(l), Self::Glob(r)) => l == r,
(Self::Regex(l), Self::Regex(r)) => l.as_str() == r.as_str(),
(Self::Set(l), Self::Set(r)) => l == r,
_ => false,
}
}
}
impl Eq for Pattern {}

View file

@ -0,0 +1,537 @@
use std::path::PathBuf;
use std::sync::Arc;
use std::{collections::HashMap, convert::Into};
use futures::{stream::FuturesOrdered, TryStreamExt};
use ignore::{
gitignore::{Gitignore, GitignoreBuilder},
Match,
};
use ignore_files::{IgnoreFile, IgnoreFilter};
use tokio::fs::canonicalize;
use tracing::{debug, trace, trace_span};
use watchexec::{error::RuntimeError, filter::Filterer};
use watchexec_events::{Event, FileType, Priority, ProcessEnd, Tag};
use watchexec_filterer_ignore::IgnoreFilterer;
use watchexec_signals::Signal;
use crate::{swaplock::SwapLock, Filter, Matcher, Op, Pattern, TaggedFiltererError};
/// A complex filterer that can match any event tag and supports different matching operators.
///
/// See the crate-level documentation for more information.
#[derive(Debug)]
pub struct TaggedFilterer {
/// The directory the project is in, its origin.
///
/// This is used to resolve absolute paths without an `in_path` context.
origin: PathBuf,
/// Where the program is running from.
///
/// This is used to resolve relative paths without an `in_path` context.
workdir: PathBuf,
/// All filters that are applied, in order, by matcher.
filters: SwapLock<HashMap<Matcher, Vec<Filter>>>,
/// Sub-filterer for ignore files.
ignore_filterer: SwapLock<IgnoreFilterer>,
/// Compiled matcher for Glob filters.
glob_compiled: SwapLock<Option<Gitignore>>,
/// Compiled matcher for NotGlob filters.
not_glob_compiled: SwapLock<Option<Gitignore>>,
}
impl Filterer for TaggedFilterer {
fn check_event(&self, event: &Event, priority: Priority) -> Result<bool, RuntimeError> {
self.check(event, priority).map_err(Into::into)
}
}
impl TaggedFilterer {
fn check(&self, event: &Event, priority: Priority) -> Result<bool, TaggedFiltererError> {
let _span = trace_span!("filterer_check").entered();
trace!(?event, ?priority, "checking event");
{
trace!("checking priority");
if let Some(filters) = self.filters.borrow().get(&Matcher::Priority).cloned() {
trace!(filters=%filters.len(), "found some filters for priority");
//
let mut pri_match = true;
for filter in &filters {
let _span = trace_span!("checking filter against priority", ?filter).entered();
let applies = filter.matches(match priority {
Priority::Low => "low",
Priority::Normal => "normal",
Priority::High => "high",
Priority::Urgent => unreachable!("urgent by-passes filtering"),
})?;
if filter.negate {
if applies {
trace!(prev=%pri_match, now=%true, "negate filter passes, passing this priority");
pri_match = true;
break;
}
trace!(prev=%pri_match, now=%pri_match, "negate filter fails, ignoring");
} else {
trace!(prev=%pri_match, this=%applies, now=%(pri_match&applies), "filter applies to priority");
pri_match &= applies;
}
}
if !pri_match {
trace!("priority fails check, failing entire event");
return Ok(false);
}
} else {
trace!("no filters for priority, skipping (pass)");
}
}
{
trace!("checking internal ignore filterer");
let igf = self.ignore_filterer.borrow();
if !igf
.check_event(event, priority)
.expect("IgnoreFilterer never errors")
{
trace!("internal ignore filterer matched (fail)");
return Ok(false);
}
}
if self.filters.borrow().is_empty() {
trace!("no filters, skipping entire check (pass)");
return Ok(true);
}
trace!(tags=%event.tags.len(), "checking all tags on the event");
for tag in &event.tags {
let _span = trace_span!("check_tag", ?tag).entered();
trace!("checking tag");
for matcher in Matcher::from_tag(tag) {
let _span = trace_span!("check_matcher", ?matcher).entered();
let filters = self.filters.borrow().get(matcher).cloned();
if let Some(tag_filters) = filters {
if tag_filters.is_empty() {
trace!("no filters for this matcher, skipping (pass)");
continue;
}
trace!(filters=%tag_filters.len(), "found some filters for this matcher");
let mut tag_match = true;
if let (Matcher::Path, Tag::Path { path, file_type }) = (matcher, tag) {
let is_dir = file_type.map_or(false, |ft| matches!(ft, FileType::Dir));
{
let gc = self.glob_compiled.borrow();
if let Some(igs) = gc.as_ref() {
let _span =
trace_span!("checking_compiled_filters", compiled=%"Glob")
.entered();
match if path.strip_prefix(&self.origin).is_ok() {
trace!("checking against path or parents");
igs.matched_path_or_any_parents(path, is_dir)
} else {
trace!("checking against path only");
igs.matched(path, is_dir)
} {
Match::None => {
trace!("no match (fail)");
tag_match &= false;
}
Match::Ignore(glob) => {
if glob
.from()
.map_or(true, |f| path.strip_prefix(f).is_ok())
{
trace!(?glob, "positive match (pass)");
tag_match &= true;
} else {
trace!(
?glob,
"positive match, but not in scope (ignore)"
);
}
}
Match::Whitelist(glob) => {
trace!(?glob, "negative match (ignore)");
}
}
}
}
{
let ngc = self.not_glob_compiled.borrow();
if let Some(ngs) = ngc.as_ref() {
let _span =
trace_span!("checking_compiled_filters", compiled=%"NotGlob")
.entered();
match if path.strip_prefix(&self.origin).is_ok() {
trace!("checking against path or parents");
ngs.matched_path_or_any_parents(path, is_dir)
} else {
trace!("checking against path only");
ngs.matched(path, is_dir)
} {
Match::None => {
trace!("no match (pass)");
tag_match &= true;
}
Match::Ignore(glob) => {
if glob
.from()
.map_or(true, |f| path.strip_prefix(f).is_ok())
{
trace!(?glob, "positive match (fail)");
tag_match &= false;
} else {
trace!(
?glob,
"positive match, but not in scope (ignore)"
);
}
}
Match::Whitelist(glob) => {
trace!(?glob, "negative match (pass)");
tag_match = true;
}
}
}
}
}
// those are handled with the compiled ignore filters above
let tag_filters = tag_filters
.into_iter()
.filter(|f| {
!matches!(
(tag, matcher, f),
(
Tag::Path { .. },
Matcher::Path,
Filter {
on: Matcher::Path,
op: Op::Glob | Op::NotGlob,
pat: Pattern::Glob(_),
..
}
)
)
})
.collect::<Vec<_>>();
if tag_filters.is_empty() && tag_match {
trace!("no more filters for this matcher, skipping (pass)");
continue;
}
trace!(filters=%tag_filters.len(), "got some filters to check still");
for filter in &tag_filters {
let _span = trace_span!("checking filter against tag", ?filter).entered();
if let Some(app) = self.match_tag(filter, tag)? {
if filter.negate {
if app {
trace!(prev=%tag_match, now=%true, "negate filter passes, passing this matcher");
tag_match = true;
break;
}
trace!(prev=%tag_match, now=%tag_match, "negate filter fails, ignoring");
} else {
trace!(prev=%tag_match, this=%app, now=%(tag_match&app), "filter applies to this tag");
tag_match &= app;
}
}
}
if !tag_match {
trace!("matcher fails check, failing entire event");
return Ok(false);
}
trace!("matcher passes check, continuing");
} else {
trace!("no filters for this matcher, skipping (pass)");
}
}
}
trace!("passing event");
Ok(true)
}
/// Initialise a new tagged filterer with no filters.
///
/// This takes two paths: the project origin, and the current directory. The current directory
/// is not obtained from the environment so you can customise it; generally you should use
/// [`std::env::current_dir()`] though.
///
/// The origin is the directory the main project that is being watched is in. This is used to
/// resolve absolute paths given in filters without an `in_path` field (e.g. all filters parsed
/// from text), and for ignore file based filtering.
///
/// The workdir is used to resolve relative paths given in filters without an `in_path` field.
///
/// So, if origin is `/path/to/project` and workdir is `/path/to/project/subtree`:
/// - `path=foo.bar` is resolved to `/path/to/project/subtree/foo.bar`
/// - `path=/foo.bar` is resolved to `/path/to/project/foo.bar`
pub async fn new(origin: PathBuf, workdir: PathBuf) -> Result<Arc<Self>, TaggedFiltererError> {
let origin = canonicalize(origin)
.await
.map_err(|err| TaggedFiltererError::IoError {
about: "canonicalise origin on new tagged filterer",
err,
})?;
Ok(Arc::new(Self {
filters: SwapLock::new(HashMap::new()),
ignore_filterer: SwapLock::new(IgnoreFilterer(IgnoreFilter::empty(&origin))),
glob_compiled: SwapLock::new(None),
not_glob_compiled: SwapLock::new(None),
workdir: canonicalize(workdir)
.await
.map_err(|err| TaggedFiltererError::IoError {
about: "canonicalise workdir on new tagged filterer",
err,
})?,
origin,
}))
}
// filter ctx event path filter outcome
// /foo/bar /foo/bar/baz.txt baz.txt pass
// /foo/bar /foo/bar/baz.txt /baz.txt pass
// /foo/bar /foo/bar/baz.txt /baz.* pass
// /foo/bar /foo/bar/baz.txt /blah fail
// /foo/quz /foo/bar/baz.txt /baz.* skip
// Ok(Some(bool)) => the match was applied, bool is the result
// Ok(None) => for some precondition, the match was not done (mismatched tag, out of context, …)
fn match_tag(&self, filter: &Filter, tag: &Tag) -> Result<Option<bool>, TaggedFiltererError> {
const fn sig_match(sig: Signal) -> (&'static str, i32) {
match sig {
Signal::Hangup | Signal::Custom(1) => ("HUP", 1),
Signal::ForceStop | Signal::Custom(9) => ("KILL", 9),
Signal::Interrupt | Signal::Custom(2) => ("INT", 2),
Signal::Quit | Signal::Custom(3) => ("QUIT", 3),
Signal::Terminate | Signal::Custom(15) => ("TERM", 15),
Signal::User1 | Signal::Custom(10) => ("USR1", 10),
Signal::User2 | Signal::Custom(12) => ("USR2", 12),
Signal::Custom(n) => ("UNK", n),
_ => ("UNK", 0),
}
}
trace!(matcher=?filter.on, "matching filter to tag");
match (tag, filter.on) {
(tag, Matcher::Tag) => filter.matches(tag.discriminant_name()),
(Tag::Path { path, .. }, Matcher::Path) => {
let resolved = if let Some(ctx) = &filter.in_path {
if let Ok(suffix) = path.strip_prefix(ctx) {
suffix.strip_prefix("/").unwrap_or(suffix)
} else {
return Ok(None);
}
} else if let Ok(suffix) = path.strip_prefix(&self.workdir) {
suffix.strip_prefix("/").unwrap_or(suffix)
} else if let Ok(suffix) = path.strip_prefix(&self.origin) {
suffix.strip_prefix("/").unwrap_or(suffix)
} else {
path.strip_prefix("/").unwrap_or(path)
};
trace!(?resolved, "resolved path to match filter against");
if matches!(filter.op, Op::Glob | Op::NotGlob) {
trace!("path glob match with match_tag is already handled");
return Ok(None);
}
filter.matches(resolved.to_string_lossy())
}
(
Tag::Path {
file_type: Some(ft),
..
},
Matcher::FileType,
) => filter.matches(ft.to_string()),
(Tag::FileEventKind(kind), Matcher::FileEventKind) => {
filter.matches(format!("{kind:?}"))
}
(Tag::Source(src), Matcher::Source) => filter.matches(src.to_string()),
(Tag::Process(pid), Matcher::Process) => filter.matches(pid.to_string()),
(Tag::Signal(sig), Matcher::Signal) => {
let (text, int) = sig_match(*sig);
Ok(filter.matches(text)?
|| filter.matches(format!("SIG{text}"))?
|| filter.matches(int.to_string())?)
}
(Tag::ProcessCompletion(ope), Matcher::ProcessCompletion) => match ope {
None => filter.matches("_"),
Some(ProcessEnd::Success) => filter.matches("success"),
Some(ProcessEnd::ExitError(int)) => filter.matches(format!("error({int})")),
Some(ProcessEnd::ExitSignal(sig)) => {
let (text, int) = sig_match(*sig);
Ok(filter.matches(format!("signal({text})"))?
|| filter.matches(format!("signal(SIG{text})"))?
|| filter.matches(format!("signal({int})"))?)
}
Some(ProcessEnd::ExitStop(int)) => filter.matches(format!("stop({int})")),
Some(ProcessEnd::Exception(int)) => filter.matches(format!("exception({int:X})")),
Some(ProcessEnd::Continued) => filter.matches("continued"),
},
(_, _) => {
trace!("no match for tag, skipping");
return Ok(None);
}
}
.map(Some)
}
/// Add some filters to the filterer.
///
/// This is async as it submits the new filters to the live filterer, which may be holding a
/// read lock. It takes a slice of filters so it can efficiently add a large number of filters
/// with a single write, without needing to acquire the lock repeatedly.
///
/// If filters with glob operations are added, the filterer's glob matchers are recompiled after
/// the new filters are added, in this method. This should not be used for inserting an
/// [`IgnoreFile`]: use [`add_ignore_file()`](Self::add_ignore_file) instead.
pub async fn add_filters(&self, filters: &[Filter]) -> Result<(), TaggedFiltererError> {
debug!(?filters, "adding filters to filterer");
let mut recompile_globs = false;
let mut recompile_not_globs = false;
#[allow(clippy::from_iter_instead_of_collect)]
let filters = FuturesOrdered::from_iter(
filters
.iter()
.cloned()
.inspect(|f| match f.op {
Op::Glob => {
recompile_globs = true;
}
Op::NotGlob => {
recompile_not_globs = true;
}
_ => {}
})
.map(Filter::canonicalised),
)
.try_collect::<Vec<_>>()
.await?;
trace!(?filters, "canonicalised filters");
// TODO: use miette's related and issue canonicalisation errors for all of them
self.filters
.change(|fs| {
for filter in filters {
fs.entry(filter.on).or_default().push(filter);
}
})
.map_err(|err| TaggedFiltererError::FilterChange { action: "add", err })?;
trace!("inserted filters into swaplock");
if recompile_globs {
self.recompile_globs(Op::Glob)?;
}
if recompile_not_globs {
self.recompile_globs(Op::NotGlob)?;
}
Ok(())
}
fn recompile_globs(&self, op_filter: Op) -> Result<(), TaggedFiltererError> {
trace!(?op_filter, "recompiling globs");
let target = match op_filter {
Op::Glob => &self.glob_compiled,
Op::NotGlob => &self.not_glob_compiled,
_ => unreachable!("recompile_globs called with invalid op"),
};
let globs = {
let filters = self.filters.borrow();
if let Some(fs) = filters.get(&Matcher::Path) {
trace!(?op_filter, "pulling filters from swaplock");
// we want to hold the lock as little as possible, so we clone the filters
fs.iter()
.filter(|&f| f.op == op_filter)
.cloned()
.collect::<Vec<_>>()
} else {
trace!(?op_filter, "no filters, erasing compiled glob");
return target
.replace(None)
.map_err(TaggedFiltererError::GlobsetChange);
}
};
let mut builder = GitignoreBuilder::new(&self.origin);
for filter in globs {
if let Pattern::Glob(mut glob) = filter.pat {
if filter.negate {
glob.insert(0, '!');
}
trace!(?op_filter, in_path=?filter.in_path, ?glob, "adding new glob line");
builder
.add_line(filter.in_path, &glob)
.map_err(TaggedFiltererError::GlobParse)?;
}
}
trace!(?op_filter, "finalising compiled glob");
let compiled = builder.build().map_err(TaggedFiltererError::GlobParse)?;
trace!(?op_filter, "swapping in new compiled glob");
target
.replace(Some(compiled))
.map_err(TaggedFiltererError::GlobsetChange)
}
/// Reads a gitignore-style [`IgnoreFile`] and adds it to the filterer.
pub async fn add_ignore_file(&self, file: &IgnoreFile) -> Result<(), TaggedFiltererError> {
let mut new = { self.ignore_filterer.borrow().clone() };
new.0
.add_file(file)
.await
.map_err(TaggedFiltererError::Ignore)?;
self.ignore_filterer
.replace(new)
.map_err(TaggedFiltererError::IgnoreSwap)?;
Ok(())
}
/// Clears all filters from the filterer.
///
/// This also recompiles the glob matchers, so essentially it resets the entire filterer state.
pub fn clear_filters(&self) -> Result<(), TaggedFiltererError> {
debug!("removing all filters from filterer");
self.filters.replace(Default::default()).map_err(|err| {
TaggedFiltererError::FilterChange {
action: "clear all",
err,
}
})?;
self.recompile_globs(Op::Glob)?;
self.recompile_globs(Op::NotGlob)?;
Ok(())
}
}

View file

@ -0,0 +1,92 @@
//! A filterer implementation that exposes the full capabilities of Watchexec.
//!
//! Filters match against [event tags][Tag]; can be exact matches, glob matches, regex matches, or
//! set matches; can reverse the match (equal/not equal, etc); and can be negated.
//!
//! [Filters][Filter] can be generated from your application and inserted directly, or they can be
//! parsed from a textual format:
//!
//! ```text
//! [!]{Matcher}{Op}{Value}
//! ```
//!
//! For example:
//!
//! ```text
//! path==/foo/bar
//! path*=**/bar
//! path~=bar$
//! !kind=file
//! ```
//!
//! There is a set of [operators][Op]:
//! - `==` and `!=`: exact match and exact not match (case insensitive)
//! - `~=` and `~!`: regex match and regex not match
//! - `*=` and `*!`: glob match and glob not match
//! - `:=` and `:!`: set match and set not match
//!
//! Sets are a list of values separated by `,`.
//!
//! In addition to the two-symbol operators, there is the `=` "auto" operator, which maps to the
//! most convenient operator for the given _matcher_. The current mapping is:
//!
//! | Matcher | Operator |
//! |---------------------------------------------------|---------------|
//! | [`Tag`](Matcher::Tag) | `:=` (in set) |
//! | [`Path`](Matcher::Path) | `*=` (glob) |
//! | [`FileType`](Matcher::FileType) | `:=` (in set) |
//! | [`FileEventKind`](Matcher::FileEventKind) | `*=` (glob) |
//! | [`Source`](Matcher::Source) | `:=` (in set) |
//! | [`Process`](Matcher::Process) | `:=` (in set) |
//! | [`Signal`](Matcher::Signal) | `:=` (in set) |
//! | [`ProcessCompletion`](Matcher::ProcessCompletion) | `*=` (glob) |
//! | [`Priority`](Matcher::Priority) | `:=` (in set) |
//!
//! [Matchers][Matcher] correspond to Tags, but are not one-to-one: the `path` matcher operates on
//! the `path` part of the `Path` tag, and the `type` matcher operates on the `file_type`, for
//! example.
//!
//! | Matcher | Syntax | Tag |
//! |-------------------------------------------|----------|----------------------------------------------|
//! | [`Tag`](Matcher::Tag) | `tag` | _the presence of a Tag on the event_ |
//! | [`Path`](Matcher::Path) | `path` | [`Path`](Tag::Path) (`path` field) |
//! | [`FileType`](Matcher::FileType) | `type` | [`Path`](Tag::Path) (`file_type` field, when Some) |
//! | [`FileEventKind`](Matcher::FileEventKind) | `kind` or `fek` | [`FileEventKind`](Tag::FileEventKind) |
//! | [`Source`](Matcher::Source) | `source` or `src` | [`Source`](Tag::Source) |
//! | [`Process`](Matcher::Process) | `process` or `pid` | [`Process`](Tag::Process) |
//! | [`Signal`](Matcher::Signal) | `signal` | [`Signal`](Tag::Signal) |
//! | [`ProcessCompletion`](Matcher::ProcessCompletion) | `complete` or `exit` | [`ProcessCompletion`](Tag::ProcessCompletion) |
//! | [`Priority`](Matcher::Priority) | `priority` | special: event [`Priority`] |
//!
//! Filters are checked in order, grouped per tag and per matcher. Filter groups may be checked in
//! any order, but the filters in the groups are checked in add order. Path glob filters are always
//! checked first, for internal reasons.
//!
//! The `negate` boolean field behaves specially: it is not operator negation, but rather the same
//! kind of behaviour that is applied to `!`-prefixed globs in gitignore files: if a negated filter
//! matches the event, the result of the event checking for that matcher is reverted to `true`, even
//! if a previous filter set it to `false`. Unmatched negated filters are ignored.
//!
//! Glob syntax is as supported by the [ignore] crate for Paths, and by [globset] otherwise. (As of
//! writing, the ignore crate uses globset internally). Regex syntax is the default syntax of the
//! [regex] crate.
#![doc(html_favicon_url = "https://watchexec.github.io/logo:watchexec.svg")]
#![doc(html_logo_url = "https://watchexec.github.io/logo:watchexec.svg")]
#![warn(clippy::unwrap_used, missing_docs)]
#![deny(rust_2018_idioms)]
// to make filters
pub use regex::Regex;
pub use error::*;
pub use files::*;
pub use filter::*;
pub use filterer::*;
mod error;
mod files;
mod filter;
mod filterer;
mod parse;
mod swaplock;

View file

@ -0,0 +1,139 @@
use std::str::FromStr;
use nom::{
branch::alt,
bytes::complete::{is_not, tag, tag_no_case, take_while1},
character::complete::char,
combinator::{map_res, opt},
sequence::{delimited, tuple},
Finish, IResult,
};
use regex::Regex;
use tracing::trace;
use crate::{Filter, Matcher, Op, Pattern, TaggedFiltererError};
impl FromStr for Filter {
type Err = TaggedFiltererError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
fn matcher(i: &str) -> IResult<&str, Matcher> {
map_res(
alt((
tag_no_case("tag"),
tag_no_case("path"),
tag_no_case("type"),
tag_no_case("kind"),
tag_no_case("fek"),
tag_no_case("source"),
tag_no_case("src"),
tag_no_case("priority"),
tag_no_case("process"),
tag_no_case("pid"),
tag_no_case("signal"),
tag_no_case("sig"),
tag_no_case("complete"),
tag_no_case("exit"),
)),
|m: &str| match m.to_ascii_lowercase().as_str() {
"tag" => Ok(Matcher::Tag),
"path" => Ok(Matcher::Path),
"type" => Ok(Matcher::FileType),
"kind" | "fek" => Ok(Matcher::FileEventKind),
"source" | "src" => Ok(Matcher::Source),
"priority" => Ok(Matcher::Priority),
"process" | "pid" => Ok(Matcher::Process),
"signal" | "sig" => Ok(Matcher::Signal),
"complete" | "exit" => Ok(Matcher::ProcessCompletion),
m => Err(format!("unknown matcher: {m}")),
},
)(i)
}
fn op(i: &str) -> IResult<&str, Op> {
map_res(
alt((
tag("=="),
tag("!="),
tag("~="),
tag("~!"),
tag("*="),
tag("*!"),
tag(":="),
tag(":!"),
tag("="),
)),
|o: &str| match o {
"==" => Ok(Op::Equal),
"!=" => Ok(Op::NotEqual),
"~=" => Ok(Op::Regex),
"~!" => Ok(Op::NotRegex),
"*=" => Ok(Op::Glob),
"*!" => Ok(Op::NotGlob),
":=" => Ok(Op::InSet),
":!" => Ok(Op::NotInSet),
"=" => Ok(Op::Auto),
o => Err(format!("unknown op: `{o}`")),
},
)(i)
}
fn pattern(i: &str) -> IResult<&str, &str> {
alt((
// TODO: escapes
delimited(char('"'), is_not("\""), char('"')),
delimited(char('\''), is_not("'"), char('\'')),
take_while1(|_| true),
))(i)
}
fn filter(i: &str) -> IResult<&str, Filter> {
map_res(
tuple((opt(tag("!")), matcher, op, pattern)),
|(n, m, o, p)| -> Result<_, ()> {
Ok(Filter {
in_path: None,
on: m,
op: match o {
Op::Auto => match m {
Matcher::Path
| Matcher::FileEventKind
| Matcher::ProcessCompletion => Op::Glob,
_ => Op::InSet,
},
o => o,
},
pat: match (o, m) {
// TODO: carry regex/glob errors through
(
Op::Auto,
Matcher::Path | Matcher::FileEventKind | Matcher::ProcessCompletion,
)
| (Op::Glob | Op::NotGlob, _) => Pattern::Glob(p.to_string()),
(Op::Auto | Op::InSet | Op::NotInSet, _) => {
Pattern::Set(p.split(',').map(|s| s.trim().to_string()).collect())
}
(Op::Regex | Op::NotRegex, _) => {
Pattern::Regex(Regex::new(p).map_err(drop)?)
}
(Op::Equal | Op::NotEqual, _) => Pattern::Exact(p.to_string()),
},
negate: n.is_some(),
})
},
)(i)
}
trace!(src=?s, "parsing tagged filter");
filter(s)
.finish()
.map(|(_, f)| {
trace!(src=?s, filter=?f, "parsed tagged filter");
f
})
.map_err(|e| TaggedFiltererError::Parse {
src: s.to_string(),
err: e.code,
})
}
}

View file

@ -0,0 +1,58 @@
//! A value that is always available, but can be swapped out.
use std::fmt;
use tokio::sync::watch::{channel, error::SendError, Receiver, Ref, Sender};
/// A value that is always available, but can be swapped out.
///
/// This is a wrapper around a [Tokio `watch`][tokio::sync::watch]. The value can be read without
/// await, but can only be written to with async. Borrows should be held for as little as possible,
/// as they keep a read lock.
pub struct SwapLock<T: Clone> {
r: Receiver<T>,
s: Sender<T>,
}
impl<T> SwapLock<T>
where
T: Clone,
{
/// Create a new `SwapLock` with the given value.
pub fn new(inner: T) -> Self {
let (s, r) = channel(inner);
Self { r, s }
}
/// Get a reference to the value.
pub fn borrow(&self) -> Ref<'_, T> {
self.r.borrow()
}
/// Rewrite the value using a closure.
///
/// This obtains a clone of the value, and then calls the closure with a mutable reference to
/// it. Once the closure returns, the value is swapped in.
pub fn change(&self, f: impl FnOnce(&mut T)) -> Result<(), SendError<T>> {
let mut new = { self.r.borrow().clone() };
f(&mut new);
self.s.send(new)
}
/// Replace the value with a new one.
pub fn replace(&self, new: T) -> Result<(), SendError<T>> {
self.s.send(new)
}
}
impl<T> fmt::Debug for SwapLock<T>
where
T: fmt::Debug + Clone,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
f.debug_struct("SwapLock")
.field("(watch)", &self.r)
.finish_non_exhaustive()
}
}

View file

@ -0,0 +1,114 @@
use watchexec_events::{filekind::*, ProcessEnd, Source};
use watchexec_signals::Signal;
mod helpers;
use helpers::tagged_ff::*;
#[tokio::test]
async fn empty_filter_passes_everything() {
let filterer = filt("", &[], &[file("empty.wef").await]).await;
filterer.file_does_pass("Cargo.toml");
filterer.file_does_pass("Cargo.json");
filterer.file_does_pass("Gemfile.toml");
filterer.file_does_pass("FINAL-FINAL.docx");
filterer.dir_does_pass("/test/Cargo.toml");
filterer.dir_does_pass("/a/folder");
filterer.file_does_pass("apples/carrots/oranges");
filterer.file_does_pass("apples/carrots/cauliflowers/oranges");
filterer.file_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.file_does_pass("apples/oranges/bananas");
filterer.dir_does_pass("apples/carrots/oranges");
filterer.dir_does_pass("apples/carrots/cauliflowers/oranges");
filterer.dir_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.dir_does_pass("apples/oranges/bananas");
filterer.source_does_pass(Source::Keyboard);
filterer.fek_does_pass(FileEventKind::Create(CreateKind::File));
filterer.pid_does_pass(1234);
filterer.signal_does_pass(Signal::User1);
filterer.complete_does_pass(None);
filterer.complete_does_pass(Some(ProcessEnd::Success));
}
#[tokio::test]
async fn folder() {
let filterer = filt("", &[], &[file("folder.wef").await]).await;
filterer.file_doesnt_pass("apples");
filterer.file_doesnt_pass("apples/oranges/bananas");
filterer.dir_doesnt_pass("apples");
filterer.dir_doesnt_pass("apples/carrots");
filterer.file_doesnt_pass("raw-prunes");
filterer.dir_doesnt_pass("raw-prunes");
filterer.file_doesnt_pass("prunes");
filterer.file_doesnt_pass("prunes/oranges/bananas");
filterer.dir_does_pass("prunes");
filterer.dir_does_pass("prunes/carrots/cauliflowers/oranges");
}
#[tokio::test]
async fn patterns() {
let filterer = filt("", &[], &[file("path-patterns.wef").await]).await;
// Unmatched
filterer.file_does_pass("FINAL-FINAL.docx");
filterer.dir_does_pass("/a/folder");
filterer.file_does_pass("rat");
filterer.file_does_pass("foo/bar/rat");
filterer.file_does_pass("/foo/bar/rat");
// Cargo.toml
filterer.file_doesnt_pass("Cargo.toml");
filterer.dir_doesnt_pass("Cargo.toml");
filterer.file_does_pass("Cargo.json");
// package.json
filterer.file_doesnt_pass("package.json");
filterer.dir_doesnt_pass("package.json");
filterer.file_does_pass("package.toml");
// *.gemspec
filterer.file_doesnt_pass("pearl.gemspec");
filterer.dir_doesnt_pass("sapphire.gemspec");
filterer.file_doesnt_pass(".gemspec");
filterer.file_does_pass("diamond.gemspecial");
// test-[^u]+
filterer.file_does_pass("test-unit");
filterer.dir_doesnt_pass("test-integration");
filterer.file_does_pass("tester-helper");
// [.]sw[a-z]$
filterer.file_doesnt_pass("source.swa");
filterer.file_doesnt_pass(".source.swb");
filterer.file_doesnt_pass("sub/source.swc");
filterer.file_does_pass("sub/dir.swa/file");
filterer.file_does_pass("source.sw1");
}
#[tokio::test]
async fn negate() {
let filterer = filt("", &[], &[file("negate.wef").await]).await;
filterer.file_doesnt_pass("yeah");
filterer.file_does_pass("nah");
filterer.file_does_pass("nah.yeah");
}
#[tokio::test]
async fn ignores_and_filters() {
let filterer = filt("", &[file("globs").await.0], &[file("folder.wef").await]).await;
// ignored
filterer.dir_doesnt_pass("test-helper");
// not filtered
filterer.dir_doesnt_pass("tester-helper");
// not ignored && filtered
filterer.dir_does_pass("prunes/tester-helper");
}

View file

@ -0,0 +1,349 @@
#![allow(dead_code)]
use std::{
path::{Path, PathBuf},
str::FromStr,
sync::Arc,
};
use ignore_files::{IgnoreFile, IgnoreFilter};
use project_origins::ProjectType;
use tokio::fs::canonicalize;
use watchexec::{error::RuntimeError, filter::Filterer};
use watchexec_events::{
filekind::FileEventKind, Event, FileType, Priority, ProcessEnd, Source, Tag,
};
use watchexec_filterer_ignore::IgnoreFilterer;
use watchexec_filterer_tagged::{Filter, FilterFile, Matcher, Op, Pattern, TaggedFilterer};
use watchexec_signals::Signal;
pub mod tagged {
pub use super::ig_file as file;
pub use super::tagged_filt as filt;
pub use super::Applies;
pub use super::FilterExt;
pub use super::PathHarness;
pub use super::TaggedHarness;
pub use super::{filter, glob_filter, notglob_filter};
pub use watchexec_events::Priority;
}
pub mod tagged_ff {
pub use super::ff_file as file;
pub use super::tagged::*;
pub use super::tagged_fffilt as filt;
}
pub trait PathHarness: Filterer {
fn check_path(
&self,
path: PathBuf,
file_type: Option<FileType>,
) -> std::result::Result<bool, RuntimeError> {
let event = Event {
tags: vec![Tag::Path { path, file_type }],
metadata: Default::default(),
};
self.check_event(&event, Priority::Normal)
}
fn path_pass(&self, path: &str, file_type: Option<FileType>, pass: bool) {
let origin = std::fs::canonicalize(".").unwrap();
let full_path = if let Some(suf) = path.strip_prefix("/test/") {
origin.join(suf)
} else if Path::new(path).has_root() {
path.into()
} else {
origin.join(path)
};
tracing::info!(?path, ?file_type, ?pass, "check");
assert_eq!(
self.check_path(full_path, file_type).unwrap(),
pass,
"{} {:?} (expected {})",
match file_type {
Some(FileType::File) => "file",
Some(FileType::Dir) => "dir",
Some(FileType::Symlink) => "symlink",
Some(FileType::Other) => "other",
None => "path",
},
path,
if pass { "pass" } else { "fail" }
);
}
fn file_does_pass(&self, path: &str) {
self.path_pass(path, Some(FileType::File), true);
}
fn file_doesnt_pass(&self, path: &str) {
self.path_pass(path, Some(FileType::File), false);
}
fn dir_does_pass(&self, path: &str) {
self.path_pass(path, Some(FileType::Dir), true);
}
fn dir_doesnt_pass(&self, path: &str) {
self.path_pass(path, Some(FileType::Dir), false);
}
fn unk_does_pass(&self, path: &str) {
self.path_pass(path, None, true);
}
fn unk_doesnt_pass(&self, path: &str) {
self.path_pass(path, None, false);
}
}
impl PathHarness for TaggedFilterer {}
impl PathHarness for IgnoreFilterer {}
pub trait TaggedHarness {
fn check_tag(&self, tag: Tag, priority: Priority) -> std::result::Result<bool, RuntimeError>;
fn priority_pass(&self, priority: Priority, pass: bool) {
tracing::info!(?priority, ?pass, "check");
assert_eq!(
self.check_tag(Tag::Source(Source::Filesystem), priority)
.unwrap(),
pass,
"{priority:?} (expected {})",
if pass { "pass" } else { "fail" }
);
}
fn priority_does_pass(&self, priority: Priority) {
self.priority_pass(priority, true);
}
fn priority_doesnt_pass(&self, priority: Priority) {
self.priority_pass(priority, false);
}
fn tag_pass(&self, tag: Tag, pass: bool) {
tracing::info!(?tag, ?pass, "check");
assert_eq!(
self.check_tag(tag.clone(), Priority::Normal).unwrap(),
pass,
"{tag:?} (expected {})",
if pass { "pass" } else { "fail" }
);
}
fn fek_does_pass(&self, fek: FileEventKind) {
self.tag_pass(Tag::FileEventKind(fek), true);
}
fn fek_doesnt_pass(&self, fek: FileEventKind) {
self.tag_pass(Tag::FileEventKind(fek), false);
}
fn source_does_pass(&self, source: Source) {
self.tag_pass(Tag::Source(source), true);
}
fn source_doesnt_pass(&self, source: Source) {
self.tag_pass(Tag::Source(source), false);
}
fn pid_does_pass(&self, pid: u32) {
self.tag_pass(Tag::Process(pid), true);
}
fn pid_doesnt_pass(&self, pid: u32) {
self.tag_pass(Tag::Process(pid), false);
}
fn signal_does_pass(&self, sig: Signal) {
self.tag_pass(Tag::Signal(sig), true);
}
fn signal_doesnt_pass(&self, sig: Signal) {
self.tag_pass(Tag::Signal(sig), false);
}
fn complete_does_pass(&self, exit: Option<ProcessEnd>) {
self.tag_pass(Tag::ProcessCompletion(exit), true);
}
fn complete_doesnt_pass(&self, exit: Option<ProcessEnd>) {
self.tag_pass(Tag::ProcessCompletion(exit), false);
}
}
impl TaggedHarness for TaggedFilterer {
fn check_tag(&self, tag: Tag, priority: Priority) -> std::result::Result<bool, RuntimeError> {
let event = Event {
tags: vec![tag],
metadata: Default::default(),
};
self.check_event(&event, priority)
}
}
fn tracing_init() {
use tracing_subscriber::{
fmt::{format::FmtSpan, Subscriber},
util::SubscriberInitExt,
EnvFilter,
};
Subscriber::builder()
.pretty()
.with_span_events(FmtSpan::NEW | FmtSpan::CLOSE)
.with_env_filter(EnvFilter::from_default_env())
.finish()
.try_init()
.ok();
}
pub async fn ignore_filt(origin: &str, ignore_files: &[IgnoreFile]) -> IgnoreFilter {
tracing_init();
let origin = canonicalize(".").await.unwrap().join(origin);
IgnoreFilter::new(origin, ignore_files)
.await
.expect("making filterer")
}
pub async fn tagged_filt(filters: &[Filter]) -> Arc<TaggedFilterer> {
let origin = canonicalize(".").await.unwrap();
tracing_init();
let filterer = TaggedFilterer::new(origin.clone(), origin)
.await
.expect("creating filterer");
filterer.add_filters(filters).await.expect("adding filters");
filterer
}
pub async fn tagged_igfilt(origin: &str, ignore_files: &[IgnoreFile]) -> Arc<TaggedFilterer> {
let origin = canonicalize(".").await.unwrap().join(origin);
tracing_init();
let filterer = TaggedFilterer::new(origin.clone(), origin)
.await
.expect("creating filterer");
for file in ignore_files {
tracing::info!(?file, "loading ignore file");
filterer
.add_ignore_file(file)
.await
.expect("adding ignore file");
}
filterer
}
pub async fn tagged_fffilt(
origin: &str,
ignore_files: &[IgnoreFile],
filter_files: &[FilterFile],
) -> Arc<TaggedFilterer> {
let filterer = tagged_igfilt(origin, ignore_files).await;
let mut filters = Vec::new();
for file in filter_files {
tracing::info!(?file, "loading filter file");
filters.extend(file.load().await.expect("loading filter file"));
}
filterer
.add_filters(&filters)
.await
.expect("adding filters");
filterer
}
pub async fn ig_file(name: &str) -> IgnoreFile {
let path = canonicalize(".")
.await
.unwrap()
.join("tests")
.join("ignores")
.join(name);
IgnoreFile {
path,
applies_in: None,
applies_to: None,
}
}
pub async fn ff_file(name: &str) -> FilterFile {
FilterFile(ig_file(name).await)
}
pub trait Applies {
fn applies_in(self, origin: &str) -> Self;
fn applies_to(self, project_type: ProjectType) -> Self;
}
impl Applies for IgnoreFile {
fn applies_in(mut self, origin: &str) -> Self {
let origin = std::fs::canonicalize(".").unwrap().join(origin);
self.applies_in = Some(origin);
self
}
fn applies_to(mut self, project_type: ProjectType) -> Self {
self.applies_to = Some(project_type);
self
}
}
impl Applies for FilterFile {
fn applies_in(self, origin: &str) -> Self {
Self(self.0.applies_in(origin))
}
fn applies_to(self, project_type: ProjectType) -> Self {
Self(self.0.applies_to(project_type))
}
}
pub fn filter(expr: &str) -> Filter {
Filter::from_str(expr).expect("parse filter")
}
pub fn glob_filter(pat: &str) -> Filter {
Filter {
in_path: None,
on: Matcher::Path,
op: Op::Glob,
pat: Pattern::Glob(pat.into()),
negate: false,
}
}
pub fn notglob_filter(pat: &str) -> Filter {
Filter {
in_path: None,
on: Matcher::Path,
op: Op::NotGlob,
pat: Pattern::Glob(pat.into()),
negate: false,
}
}
pub trait FilterExt {
fn in_path(self) -> Self
where
Self: Sized,
{
self.in_subpath("")
}
fn in_subpath(self, sub: impl AsRef<Path>) -> Self;
}
impl FilterExt for Filter {
fn in_subpath(mut self, sub: impl AsRef<Path>) -> Self {
let origin = std::fs::canonicalize(".").unwrap();
self.in_path = Some(origin.join(sub));
self
}
}

View file

@ -0,0 +1,3 @@
# comment
# blank line

View file

@ -0,0 +1,2 @@
type==dir
path*=prunes

View file

@ -0,0 +1,11 @@
Cargo.toml
package.json
*.gemspec
test-*
*.sw*
sources.*/
/output.*
**/possum
zebra/**
elep/**/hant
song/**/bird/

View file

@ -0,0 +1,2 @@
path=nah
!path=nah.yeah

View file

@ -0,0 +1,5 @@
path*!Cargo.toml
path*!package.json
path*!*.gemspec
path~!test-[^u]+
path~![.]sw[a-z]$

View file

@ -0,0 +1,453 @@
use std::num::{NonZeroI32, NonZeroI64};
use watchexec_events::{filekind::*, ProcessEnd, Source};
use watchexec_filterer_tagged::TaggedFilterer;
use watchexec_signals::Signal;
mod helpers;
use helpers::tagged::*;
#[tokio::test]
async fn empty_filter_passes_everything() {
let filterer = filt(&[]).await;
filterer.source_does_pass(Source::Keyboard);
filterer.fek_does_pass(FileEventKind::Create(CreateKind::File));
filterer.pid_does_pass(1234);
filterer.signal_does_pass(Signal::User1);
filterer.complete_does_pass(None);
filterer.complete_does_pass(Some(ProcessEnd::Success));
}
// Source is used as a relatively simple test case for common text-based ops, so
// these aren't repeated for the other tags, which instead focus on their own
// special characteristics.
#[tokio::test]
async fn source_exact() {
let filterer = filt(&[filter("source==keyboard")]).await;
filterer.source_does_pass(Source::Keyboard);
filterer.source_doesnt_pass(Source::Mouse);
}
#[tokio::test]
async fn source_glob() {
let filterer = filt(&[filter("source*=*i*m*")]).await;
filterer.source_does_pass(Source::Filesystem);
filterer.source_does_pass(Source::Time);
filterer.source_doesnt_pass(Source::Internal);
}
#[tokio::test]
async fn source_regex() {
let filterer = filt(&[filter("source~=(keyboard|mouse)")]).await;
filterer.source_does_pass(Source::Keyboard);
filterer.source_does_pass(Source::Mouse);
filterer.source_doesnt_pass(Source::Internal);
}
#[tokio::test]
async fn source_two_filters() {
let filterer = filt(&[filter("source*=*s*"), filter("source!=mouse")]).await;
filterer.source_doesnt_pass(Source::Mouse);
filterer.source_does_pass(Source::Filesystem);
}
#[tokio::test]
async fn source_allowlisting() {
// allowlisting is vastly easier to achieve with e.g. `source==mouse`
// but this pattern is nonetheless useful for more complex cases.
let filterer = filt(&[filter("source*!*"), filter("!source==mouse")]).await;
filterer.source_does_pass(Source::Mouse);
filterer.source_doesnt_pass(Source::Filesystem);
}
#[tokio::test]
async fn source_set() {
let f = filter("source:=keyboard,mouse");
assert_eq!(f, filter("source=keyboard,mouse"));
let filterer = filt(&[f]).await;
filterer.source_does_pass(Source::Keyboard);
filterer.source_does_pass(Source::Mouse);
filterer.source_doesnt_pass(Source::Internal);
let filterer = filt(&[filter("source:!keyboard,mouse")]).await;
filterer.source_doesnt_pass(Source::Keyboard);
filterer.source_doesnt_pass(Source::Mouse);
filterer.source_does_pass(Source::Internal);
}
#[tokio::test]
async fn fek_glob_level_one() {
let f = filter("kind*=Create(*)");
assert_eq!(f, filter("fek*=Create(*)"));
assert_eq!(f, filter("kind=Create(*)"));
assert_eq!(f, filter("fek=Create(*)"));
let filterer = filt(&[f]).await;
filterer.fek_does_pass(FileEventKind::Create(CreateKind::Any));
filterer.fek_does_pass(FileEventKind::Create(CreateKind::File));
filterer.fek_doesnt_pass(FileEventKind::Modify(ModifyKind::Data(DataChange::Content)));
}
#[tokio::test]
async fn fek_glob_level_two() {
let filterer = filt(&[filter("fek=Modify(Data(*))")]).await;
filterer.fek_does_pass(FileEventKind::Modify(ModifyKind::Data(DataChange::Content)));
filterer.fek_doesnt_pass(FileEventKind::Modify(ModifyKind::Other));
filterer.fek_doesnt_pass(FileEventKind::Modify(ModifyKind::Metadata(
MetadataKind::Permissions,
)));
filterer.fek_doesnt_pass(FileEventKind::Create(CreateKind::Any));
}
#[tokio::test]
async fn fek_level_three() {
fn suite(filterer: &TaggedFilterer) {
filterer.fek_does_pass(FileEventKind::Modify(ModifyKind::Data(DataChange::Content)));
filterer.fek_doesnt_pass(FileEventKind::Modify(ModifyKind::Data(DataChange::Size)));
filterer.fek_doesnt_pass(FileEventKind::Modify(ModifyKind::Other));
filterer.fek_doesnt_pass(FileEventKind::Modify(ModifyKind::Metadata(
MetadataKind::Permissions,
)));
filterer.fek_doesnt_pass(FileEventKind::Create(CreateKind::Any));
}
suite(filt(&[filter("fek=Modify(Data(Content))")]).await.as_ref());
suite(filt(&[filter("fek==Modify(Data(Content))")]).await.as_ref());
}
#[tokio::test]
async fn pid_set_single() {
let f = filter("process:=1234");
assert_eq!(f, filter("pid:=1234"));
assert_eq!(f, filter("process=1234"));
assert_eq!(f, filter("pid=1234"));
let filterer = filt(&[f]).await;
filterer.pid_does_pass(1234);
filterer.pid_doesnt_pass(5678);
filterer.pid_doesnt_pass(12345);
filterer.pid_doesnt_pass(123);
}
#[tokio::test]
async fn pid_set_multiple() {
let filterer = filt(&[filter("pid=123,456")]).await;
filterer.pid_does_pass(123);
filterer.pid_does_pass(456);
filterer.pid_doesnt_pass(123456);
filterer.pid_doesnt_pass(12);
filterer.pid_doesnt_pass(23);
filterer.pid_doesnt_pass(45);
filterer.pid_doesnt_pass(56);
filterer.pid_doesnt_pass(1234);
filterer.pid_doesnt_pass(3456);
filterer.pid_doesnt_pass(4567);
filterer.pid_doesnt_pass(34567);
filterer.pid_doesnt_pass(0);
}
#[tokio::test]
async fn pid_equals() {
let f = filter("process==1234");
assert_eq!(f, filter("pid==1234"));
let filterer = filt(&[f]).await;
filterer.pid_does_pass(1234);
filterer.pid_doesnt_pass(5678);
filterer.pid_doesnt_pass(12345);
filterer.pid_doesnt_pass(123);
}
#[tokio::test]
async fn signal_set_single_without_sig() {
let f = filter("signal=INT");
assert_eq!(f, filter("sig=INT"));
assert_eq!(f, filter("signal:=INT"));
assert_eq!(f, filter("sig:=INT"));
let filterer = filt(&[f]).await;
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_doesnt_pass(Signal::Hangup);
}
#[tokio::test]
async fn signal_set_single_with_sig() {
let filterer = filt(&[filter("signal:=SIGINT")]).await;
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_doesnt_pass(Signal::Hangup);
}
#[tokio::test]
async fn signal_set_multiple_without_sig() {
let filterer = filt(&[filter("sig:=INT,TERM")]).await;
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_does_pass(Signal::Terminate);
filterer.signal_doesnt_pass(Signal::Hangup);
}
#[tokio::test]
async fn signal_set_multiple_with_sig() {
let filterer = filt(&[filter("signal:=SIGINT,SIGTERM")]).await;
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_does_pass(Signal::Terminate);
filterer.signal_doesnt_pass(Signal::Hangup);
}
#[tokio::test]
async fn signal_set_multiple_mixed_sig() {
let filterer = filt(&[filter("sig:=SIGINT,TERM")]).await;
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_does_pass(Signal::Terminate);
filterer.signal_doesnt_pass(Signal::Hangup);
}
#[tokio::test]
async fn signal_equals_without_sig() {
let filterer = filt(&[filter("sig==INT")]).await;
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_doesnt_pass(Signal::Hangup);
}
#[tokio::test]
async fn signal_equals_with_sig() {
let filterer = filt(&[filter("signal==SIGINT")]).await;
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_doesnt_pass(Signal::Hangup);
}
#[tokio::test]
async fn signal_set_single_numbers() {
let filterer = filt(&[filter("signal:=2")]).await;
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_doesnt_pass(Signal::Hangup);
}
#[tokio::test]
async fn signal_set_multiple_numbers() {
let filterer = filt(&[filter("sig:=2,15")]).await;
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_does_pass(Signal::Terminate);
filterer.signal_doesnt_pass(Signal::Hangup);
}
#[tokio::test]
async fn signal_equals_numbers() {
let filterer = filt(&[filter("sig==2")]).await;
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_doesnt_pass(Signal::Hangup);
}
#[tokio::test]
async fn signal_set_all_mixed() {
let filterer = filt(&[filter("signal:=SIGHUP,INT,15")]).await;
filterer.signal_does_pass(Signal::Hangup);
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_does_pass(Signal::Terminate);
filterer.signal_doesnt_pass(Signal::User1);
}
#[tokio::test]
async fn complete_empty() {
let f = filter("complete=_");
assert_eq!(f, filter("complete*=_"));
assert_eq!(f, filter("exit=_"));
assert_eq!(f, filter("exit*=_"));
let filterer = filt(&[f]).await;
filterer.complete_does_pass(None);
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitError(NonZeroI64::new(1).unwrap())));
}
#[tokio::test]
async fn complete_any() {
let filterer = filt(&[filter("complete=*")]).await;
filterer.complete_does_pass(Some(ProcessEnd::Success));
filterer.complete_does_pass(Some(ProcessEnd::ExitError(NonZeroI64::new(1).unwrap())));
filterer.complete_does_pass(None);
}
#[tokio::test]
async fn complete_with_success() {
let filterer = filt(&[filter("complete*=success")]).await;
filterer.complete_does_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitError(NonZeroI64::new(1).unwrap())));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_continued() {
let filterer = filt(&[filter("complete*=continued")]).await;
filterer.complete_does_pass(Some(ProcessEnd::Continued));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitError(NonZeroI64::new(1).unwrap())));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_specific_exit_error() {
let filterer = filt(&[filter("complete*=error(1)")]).await;
filterer.complete_does_pass(Some(ProcessEnd::ExitError(NonZeroI64::new(1).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_any_exit_error() {
let filterer = filt(&[filter("complete*=error(*)")]).await;
filterer.complete_does_pass(Some(ProcessEnd::ExitError(NonZeroI64::new(1).unwrap())));
filterer.complete_does_pass(Some(ProcessEnd::ExitError(NonZeroI64::new(63).unwrap())));
filterer.complete_does_pass(Some(ProcessEnd::ExitError(
NonZeroI64::new(-12823912738).unwrap(),
)));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitStop(NonZeroI32::new(63).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_specific_stop() {
let filterer = filt(&[filter("complete*=stop(19)")]).await;
filterer.complete_does_pass(Some(ProcessEnd::ExitStop(NonZeroI32::new(19).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_any_stop() {
let filterer = filt(&[filter("complete*=stop(*)")]).await;
filterer.complete_does_pass(Some(ProcessEnd::ExitStop(NonZeroI32::new(1).unwrap())));
filterer.complete_does_pass(Some(ProcessEnd::ExitStop(NonZeroI32::new(63).unwrap())));
filterer.complete_does_pass(Some(ProcessEnd::ExitStop(
NonZeroI32::new(-128239127).unwrap(),
)));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitError(NonZeroI64::new(63).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_specific_exception() {
let filterer = filt(&[filter("complete*=exception(4B53)")]).await;
filterer.complete_does_pass(Some(ProcessEnd::Exception(NonZeroI32::new(19283).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_any_exception() {
let filterer = filt(&[filter("complete*=exception(*)")]).await;
filterer.complete_does_pass(Some(ProcessEnd::Exception(NonZeroI32::new(1).unwrap())));
filterer.complete_does_pass(Some(ProcessEnd::Exception(NonZeroI32::new(63).unwrap())));
filterer.complete_does_pass(Some(ProcessEnd::Exception(
NonZeroI32::new(-128239127).unwrap(),
)));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitStop(NonZeroI32::new(63).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitError(NonZeroI64::new(63).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_specific_signal_with_sig() {
let filterer = filt(&[filter("complete*=signal(SIGINT)")]).await;
filterer.complete_does_pass(Some(ProcessEnd::ExitSignal(Signal::Interrupt)));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitStop(NonZeroI32::new(19).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_specific_signal_without_sig() {
let filterer = filt(&[filter("complete*=signal(INT)")]).await;
filterer.complete_does_pass(Some(ProcessEnd::ExitSignal(Signal::Interrupt)));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitStop(NonZeroI32::new(19).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_specific_signal_number() {
let filterer = filt(&[filter("complete*=signal(2)")]).await;
filterer.complete_does_pass(Some(ProcessEnd::ExitSignal(Signal::Interrupt)));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitStop(NonZeroI32::new(19).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_any_signal() {
let filterer = filt(&[filter("complete*=signal(*)")]).await;
filterer.complete_does_pass(Some(ProcessEnd::ExitSignal(Signal::Interrupt)));
filterer.complete_does_pass(Some(ProcessEnd::ExitSignal(Signal::Terminate)));
filterer.complete_does_pass(Some(ProcessEnd::ExitSignal(Signal::Custom(123))));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitStop(NonZeroI32::new(63).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitError(NonZeroI64::new(63).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn priority_auto() {
let filterer = filt(&[filter("priority=normal")]).await;
filterer.priority_doesnt_pass(Priority::Low);
filterer.priority_does_pass(Priority::Normal);
filterer.priority_doesnt_pass(Priority::High);
}
#[tokio::test]
async fn priority_set() {
let filterer = filt(&[filter("priority:=normal,high")]).await;
filterer.priority_doesnt_pass(Priority::Low);
filterer.priority_does_pass(Priority::Normal);
filterer.priority_does_pass(Priority::High);
}
#[tokio::test]
async fn priority_none() {
let filterer = filt(&[]).await;
filterer.priority_does_pass(Priority::Low);
filterer.priority_does_pass(Priority::Normal);
filterer.priority_does_pass(Priority::High);
}

View file

@ -0,0 +1,226 @@
use std::{collections::HashSet, str::FromStr};
use watchexec_filterer_tagged::{Filter, Matcher, Op, Pattern, Regex, TaggedFiltererError};
mod helpers;
use helpers::tagged::*;
#[test]
fn empty_filter() {
assert!(matches!(
Filter::from_str(""),
Err(TaggedFiltererError::Parse { .. })
));
}
#[test]
fn only_bang() {
assert!(matches!(
Filter::from_str("!"),
Err(TaggedFiltererError::Parse { .. })
));
}
#[test]
fn no_op() {
assert!(matches!(
Filter::from_str("foobar"),
Err(TaggedFiltererError::Parse { .. })
));
}
#[test]
fn path_auto_op() {
assert_eq!(
filter("path=foo"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::Glob,
pat: Pattern::Glob("foo".to_string()),
negate: false,
}
);
}
#[test]
fn fek_auto_op() {
assert_eq!(
filter("fek=foo"),
Filter {
in_path: None,
on: Matcher::FileEventKind,
op: Op::Glob,
pat: Pattern::Glob("foo".to_string()),
negate: false,
}
);
}
#[test]
fn other_auto_op() {
assert_eq!(
filter("type=foo"),
Filter {
in_path: None,
on: Matcher::FileType,
op: Op::InSet,
pat: Pattern::Set(HashSet::from(["foo".to_string()])),
negate: false,
}
);
}
#[test]
fn op_equal() {
assert_eq!(
filter("path==foo"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::Equal,
pat: Pattern::Exact("foo".to_string()),
negate: false,
}
);
}
#[test]
fn op_not_equal() {
assert_eq!(
filter("path!=foo"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::NotEqual,
pat: Pattern::Exact("foo".to_string()),
negate: false,
}
);
}
#[test]
fn op_regex() {
assert_eq!(
filter("path~=^fo+$"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::Regex,
pat: Pattern::Regex(Regex::new("^fo+$").unwrap()),
negate: false,
}
);
}
#[test]
fn op_not_regex() {
assert_eq!(
filter("path~!f(o|al)+"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::NotRegex,
pat: Pattern::Regex(Regex::new("f(o|al)+").unwrap()),
negate: false,
}
);
}
#[test]
fn op_glob() {
assert_eq!(
filter("path*=**/foo"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::Glob,
pat: Pattern::Glob("**/foo".to_string()),
negate: false,
}
);
}
#[test]
fn op_not_glob() {
assert_eq!(
filter("path*!foo.*"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::NotGlob,
pat: Pattern::Glob("foo.*".to_string()),
negate: false,
}
);
}
#[test]
fn op_in_set() {
assert_eq!(
filter("path:=foo,bar"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::InSet,
pat: Pattern::Set(HashSet::from(["foo".to_string(), "bar".to_string()])),
negate: false,
}
);
}
#[test]
fn op_not_in_set() {
assert_eq!(
filter("path:!baz,qux"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::NotInSet,
pat: Pattern::Set(HashSet::from(["baz".to_string(), "qux".to_string()])),
negate: false,
}
);
}
#[test]
fn quoted_single() {
assert_eq!(
filter("path='blanche neige'"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::Glob,
pat: Pattern::Glob("blanche neige".to_string()),
negate: false,
}
);
}
#[test]
fn quoted_double() {
assert_eq!(
filter("path=\"et les sept nains\""),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::Glob,
pat: Pattern::Glob("et les sept nains".to_string()),
negate: false,
}
);
}
#[test]
fn negate() {
assert_eq!(
filter("!path~=^f[om]+$"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::Regex,
pat: Pattern::Regex(Regex::new("^f[om]+$").unwrap()),
negate: true,
}
);
}

View file

@ -0,0 +1,454 @@
use std::sync::Arc;
use watchexec_filterer_tagged::TaggedFilterer;
mod helpers;
use helpers::tagged::*;
#[tokio::test]
async fn empty_filter_passes_everything() {
let filterer = filt(&[]).await;
filterer.file_does_pass("Cargo.toml");
filterer.file_does_pass("Cargo.json");
filterer.file_does_pass("Gemfile.toml");
filterer.file_does_pass("FINAL-FINAL.docx");
filterer.dir_does_pass("/test/Cargo.toml");
filterer.dir_does_pass("/a/folder");
filterer.file_does_pass("apples/carrots/oranges");
filterer.file_does_pass("apples/carrots/cauliflowers/oranges");
filterer.file_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.file_does_pass("apples/oranges/bananas");
filterer.dir_does_pass("apples/carrots/oranges");
filterer.dir_does_pass("apples/carrots/cauliflowers/oranges");
filterer.dir_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.dir_does_pass("apples/oranges/bananas");
}
#[tokio::test]
async fn exact_filename() {
let filterer = filt(&[glob_filter("Cargo.toml")]).await;
filterer.file_does_pass("Cargo.toml");
filterer.file_does_pass("/test/foo/bar/Cargo.toml");
filterer.file_doesnt_pass("Cargo.json");
filterer.file_doesnt_pass("Gemfile.toml");
filterer.file_doesnt_pass("FINAL-FINAL.docx");
filterer.dir_doesnt_pass("/a/folder");
filterer.dir_does_pass("/test/Cargo.toml");
}
#[tokio::test]
async fn exact_filenames_multiple() {
let filterer = filt(&[glob_filter("Cargo.toml"), glob_filter("package.json")]).await;
filterer.file_does_pass("Cargo.toml");
filterer.file_does_pass("/test/foo/bar/Cargo.toml");
filterer.file_does_pass("package.json");
filterer.file_does_pass("/test/foo/bar/package.json");
filterer.file_doesnt_pass("Cargo.json");
filterer.file_doesnt_pass("package.toml");
filterer.file_doesnt_pass("Gemfile.toml");
filterer.file_doesnt_pass("FINAL-FINAL.docx");
filterer.dir_doesnt_pass("/a/folder");
filterer.dir_does_pass("/test/Cargo.toml");
filterer.dir_does_pass("/test/package.json");
}
#[tokio::test]
async fn glob_single_final_ext_star() {
let filterer = filt(&[glob_filter("Cargo.*")]).await;
filterer.file_does_pass("Cargo.toml");
filterer.file_does_pass("Cargo.json");
filterer.file_doesnt_pass("Gemfile.toml");
filterer.file_doesnt_pass("FINAL-FINAL.docx");
filterer.dir_doesnt_pass("/a/folder");
filterer.dir_does_pass("Cargo.toml");
}
#[tokio::test]
async fn glob_star_trailing_slash() {
let filterer = filt(&[glob_filter("Cargo.*/")]).await;
filterer.file_doesnt_pass("Cargo.toml");
filterer.file_doesnt_pass("Cargo.json");
filterer.file_doesnt_pass("Gemfile.toml");
filterer.file_doesnt_pass("FINAL-FINAL.docx");
filterer.dir_doesnt_pass("/a/folder");
filterer.dir_does_pass("Cargo.toml");
filterer.unk_doesnt_pass("Cargo.toml");
}
#[tokio::test]
async fn glob_star_leading_slash() {
let filterer = filt(&[glob_filter("/Cargo.*")]).await;
filterer.file_does_pass("Cargo.toml");
filterer.file_does_pass("Cargo.json");
filterer.dir_does_pass("Cargo.toml");
filterer.unk_does_pass("Cargo.toml");
filterer.file_doesnt_pass("foo/Cargo.toml");
filterer.dir_doesnt_pass("foo/Cargo.toml");
}
#[tokio::test]
async fn glob_leading_double_star() {
let filterer = filt(&[glob_filter("**/possum")]).await;
filterer.file_does_pass("possum");
filterer.file_does_pass("foo/bar/possum");
filterer.file_does_pass("/foo/bar/possum");
filterer.dir_does_pass("possum");
filterer.dir_does_pass("foo/bar/possum");
filterer.dir_does_pass("/foo/bar/possum");
filterer.file_doesnt_pass("rat");
filterer.file_doesnt_pass("foo/bar/rat");
filterer.file_doesnt_pass("/foo/bar/rat");
}
#[tokio::test]
async fn glob_trailing_double_star() {
let filterer = filt(&[glob_filter("possum/**")]).await;
filterer.file_doesnt_pass("possum");
filterer.file_does_pass("possum/foo/bar");
filterer.file_doesnt_pass("/possum/foo/bar");
filterer.file_does_pass("/test/possum/foo/bar");
filterer.dir_doesnt_pass("possum");
filterer.dir_doesnt_pass("foo/bar/possum");
filterer.dir_doesnt_pass("/foo/bar/possum");
filterer.dir_does_pass("possum/foo/bar");
filterer.dir_doesnt_pass("/possum/foo/bar");
filterer.dir_does_pass("/test/possum/foo/bar");
filterer.file_doesnt_pass("rat");
filterer.file_doesnt_pass("foo/bar/rat");
filterer.file_doesnt_pass("/foo/bar/rat");
}
#[tokio::test]
async fn glob_middle_double_star() {
let filterer = filt(&[glob_filter("apples/**/oranges")]).await;
filterer.dir_doesnt_pass("/a/folder");
filterer.file_does_pass("apples/carrots/oranges");
filterer.file_does_pass("apples/carrots/cauliflowers/oranges");
filterer.file_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.dir_does_pass("apples/carrots/oranges");
filterer.dir_does_pass("apples/carrots/cauliflowers/oranges");
filterer.dir_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
// different from globset/v1 behaviour, but correct:
filterer.file_does_pass("apples/oranges/bananas");
filterer.dir_does_pass("apples/oranges/bananas");
}
#[tokio::test]
async fn glob_double_star_trailing_slash() {
let filterer = filt(&[glob_filter("apples/**/oranges/")]).await;
filterer.dir_doesnt_pass("/a/folder");
filterer.file_doesnt_pass("apples/carrots/oranges");
filterer.file_doesnt_pass("apples/carrots/cauliflowers/oranges");
filterer.file_doesnt_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.dir_does_pass("apples/carrots/oranges");
filterer.dir_does_pass("apples/carrots/cauliflowers/oranges");
filterer.dir_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.unk_doesnt_pass("apples/carrots/oranges");
filterer.unk_doesnt_pass("apples/carrots/cauliflowers/oranges");
filterer.unk_doesnt_pass("apples/carrots/cauliflowers/artichokes/oranges");
// different from globset/v1 behaviour, but correct:
filterer.file_does_pass("apples/oranges/bananas");
filterer.dir_does_pass("apples/oranges/bananas");
}
#[tokio::test]
async fn ignore_exact_filename() {
let filterer = filt(&[notglob_filter("Cargo.toml")]).await;
filterer.file_doesnt_pass("Cargo.toml");
filterer.file_doesnt_pass("/test/foo/bar/Cargo.toml");
filterer.file_does_pass("Cargo.json");
filterer.file_does_pass("Gemfile.toml");
filterer.file_does_pass("FINAL-FINAL.docx");
filterer.dir_does_pass("/a/folder");
filterer.dir_doesnt_pass("/test/Cargo.toml");
}
#[tokio::test]
async fn ignore_exact_filenames_multiple() {
let filterer = filt(&[notglob_filter("Cargo.toml"), notglob_filter("package.json")]).await;
filterer.file_doesnt_pass("Cargo.toml");
filterer.file_doesnt_pass("/test/foo/bar/Cargo.toml");
filterer.file_doesnt_pass("package.json");
filterer.file_doesnt_pass("/test/foo/bar/package.json");
filterer.file_does_pass("Cargo.json");
filterer.file_does_pass("package.toml");
filterer.file_does_pass("Gemfile.toml");
filterer.file_does_pass("FINAL-FINAL.docx");
filterer.dir_does_pass("/a/folder");
filterer.dir_doesnt_pass("/test/Cargo.toml");
filterer.dir_doesnt_pass("/test/package.json");
}
#[tokio::test]
async fn ignore_glob_single_final_ext_star() {
let filterer = filt(&[notglob_filter("Cargo.*")]).await;
filterer.file_doesnt_pass("Cargo.toml");
filterer.file_doesnt_pass("Cargo.json");
filterer.file_does_pass("Gemfile.toml");
filterer.file_does_pass("FINAL-FINAL.docx");
filterer.dir_does_pass("/a/folder");
filterer.dir_doesnt_pass("Cargo.toml");
}
#[tokio::test]
async fn ignore_glob_star_trailing_slash() {
let filterer = filt(&[notglob_filter("Cargo.*/")]).await;
filterer.file_does_pass("Cargo.toml");
filterer.file_does_pass("Cargo.json");
filterer.file_does_pass("Gemfile.toml");
filterer.file_does_pass("FINAL-FINAL.docx");
filterer.dir_does_pass("/a/folder");
filterer.dir_doesnt_pass("Cargo.toml");
filterer.unk_does_pass("Cargo.toml");
}
#[tokio::test]
async fn ignore_glob_star_leading_slash() {
let filterer = filt(&[notglob_filter("/Cargo.*")]).await;
filterer.file_doesnt_pass("Cargo.toml");
filterer.file_doesnt_pass("Cargo.json");
filterer.dir_doesnt_pass("Cargo.toml");
filterer.unk_doesnt_pass("Cargo.toml");
filterer.file_does_pass("foo/Cargo.toml");
filterer.dir_does_pass("foo/Cargo.toml");
}
#[tokio::test]
async fn ignore_glob_leading_double_star() {
let filterer = filt(&[notglob_filter("**/possum")]).await;
filterer.file_doesnt_pass("possum");
filterer.file_doesnt_pass("foo/bar/possum");
filterer.file_doesnt_pass("/foo/bar/possum");
filterer.dir_doesnt_pass("possum");
filterer.dir_doesnt_pass("foo/bar/possum");
filterer.dir_doesnt_pass("/foo/bar/possum");
filterer.file_does_pass("rat");
filterer.file_does_pass("foo/bar/rat");
filterer.file_does_pass("/foo/bar/rat");
}
#[tokio::test]
async fn ignore_glob_trailing_double_star() {
let filterer = filt(&[notglob_filter("possum/**")]).await;
filterer.file_does_pass("possum");
filterer.file_doesnt_pass("possum/foo/bar");
filterer.file_does_pass("/possum/foo/bar");
filterer.file_doesnt_pass("/test/possum/foo/bar");
filterer.dir_does_pass("possum");
filterer.dir_does_pass("foo/bar/possum");
filterer.dir_does_pass("/foo/bar/possum");
filterer.dir_doesnt_pass("possum/foo/bar");
filterer.dir_does_pass("/possum/foo/bar");
filterer.dir_doesnt_pass("/test/possum/foo/bar");
filterer.file_does_pass("rat");
filterer.file_does_pass("foo/bar/rat");
filterer.file_does_pass("/foo/bar/rat");
}
#[tokio::test]
async fn ignore_glob_middle_double_star() {
let filterer = filt(&[notglob_filter("apples/**/oranges")]).await;
filterer.dir_does_pass("/a/folder");
filterer.file_doesnt_pass("apples/carrots/oranges");
filterer.file_doesnt_pass("apples/carrots/cauliflowers/oranges");
filterer.file_doesnt_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.dir_doesnt_pass("apples/carrots/oranges");
filterer.dir_doesnt_pass("apples/carrots/cauliflowers/oranges");
filterer.dir_doesnt_pass("apples/carrots/cauliflowers/artichokes/oranges");
// different from globset/v1 behaviour, but correct:
filterer.file_doesnt_pass("apples/oranges/bananas");
filterer.dir_doesnt_pass("apples/oranges/bananas");
}
#[tokio::test]
async fn ignore_glob_double_star_trailing_slash() {
let filterer = filt(&[notglob_filter("apples/**/oranges/")]).await;
filterer.dir_does_pass("/a/folder");
filterer.file_does_pass("apples/carrots/oranges");
filterer.file_does_pass("apples/carrots/cauliflowers/oranges");
filterer.file_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.dir_doesnt_pass("apples/carrots/oranges");
filterer.dir_doesnt_pass("apples/carrots/cauliflowers/oranges");
filterer.dir_doesnt_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.unk_does_pass("apples/carrots/oranges");
filterer.unk_does_pass("apples/carrots/cauliflowers/oranges");
filterer.unk_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
// different from globset/v1 behaviour, but correct:
filterer.file_doesnt_pass("apples/oranges/bananas");
filterer.dir_doesnt_pass("apples/oranges/bananas");
}
#[tokio::test]
async fn ignores_take_precedence() {
let filterer = filt(&[
glob_filter("*.docx"),
glob_filter("*.toml"),
glob_filter("*.json"),
notglob_filter("*.toml"),
notglob_filter("*.json"),
])
.await;
filterer.file_doesnt_pass("Cargo.toml");
filterer.file_doesnt_pass("/test/foo/bar/Cargo.toml");
filterer.file_doesnt_pass("package.json");
filterer.file_doesnt_pass("/test/foo/bar/package.json");
filterer.dir_doesnt_pass("/test/Cargo.toml");
filterer.dir_doesnt_pass("/test/package.json");
filterer.file_does_pass("FINAL-FINAL.docx");
}
#[tokio::test]
async fn scopes_global() {
let filterer = filt(&[notglob_filter("*.toml")]).await;
filterer.file_doesnt_pass("Cargo.toml");
filterer.dir_doesnt_pass("Cargo.toml");
filterer.file_doesnt_pass("/outside/Cargo.toml");
filterer.dir_doesnt_pass("/outside/Cargo.toml");
filterer.file_does_pass("/outside/package.json");
filterer.dir_does_pass("/outside/package.json");
filterer.file_does_pass("package.json");
filterer.file_does_pass("FINAL-FINAL.docx");
}
#[tokio::test]
async fn scopes_local() {
let filterer = filt(&[notglob_filter("*.toml").in_path()]).await;
filterer.file_doesnt_pass("/test/Cargo.toml");
filterer.dir_doesnt_pass("/test/Cargo.toml");
filterer.file_does_pass("/outside/Cargo.toml");
filterer.dir_does_pass("/outside/Cargo.toml");
filterer.file_does_pass("/outside/package.json");
filterer.dir_does_pass("/outside/package.json");
filterer.file_does_pass("package.json");
filterer.file_does_pass("FINAL-FINAL.docx");
}
#[tokio::test]
async fn scopes_sublocal() {
let filterer = filt(&[notglob_filter("*.toml").in_subpath("src")]).await;
filterer.file_doesnt_pass("/test/src/Cargo.toml");
filterer.dir_doesnt_pass("/test/src/Cargo.toml");
filterer.file_does_pass("/test/Cargo.toml");
filterer.dir_does_pass("/test/Cargo.toml");
filterer.file_does_pass("/test/tests/Cargo.toml");
filterer.dir_does_pass("/test/tests/Cargo.toml");
filterer.file_does_pass("/outside/Cargo.toml");
filterer.dir_does_pass("/outside/Cargo.toml");
filterer.file_does_pass("/outside/package.json");
filterer.dir_does_pass("/outside/package.json");
filterer.file_does_pass("package.json");
filterer.file_does_pass("FINAL-FINAL.docx");
}
// The following tests check that the "buggy"/"confusing" watchexec v1 behaviour
// is no longer present.
fn watchexec_v1_confusing_suite(filterer: Arc<TaggedFilterer>) {
filterer.file_does_pass("apples");
filterer.file_does_pass("apples/carrots/cauliflowers/oranges");
filterer.file_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.file_does_pass("apples/oranges/bananas");
filterer.dir_does_pass("apples");
filterer.dir_does_pass("apples/carrots/cauliflowers/oranges");
filterer.dir_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.file_does_pass("raw-prunes");
filterer.dir_does_pass("raw-prunes");
filterer.file_does_pass("raw-prunes/carrots/cauliflowers/oranges");
filterer.file_does_pass("raw-prunes/carrots/cauliflowers/artichokes/oranges");
filterer.file_does_pass("raw-prunes/oranges/bananas");
filterer.dir_does_pass("raw-prunes/carrots/cauliflowers/oranges");
filterer.dir_does_pass("raw-prunes/carrots/cauliflowers/artichokes/oranges");
filterer.dir_doesnt_pass("prunes/carrots/cauliflowers/oranges");
filterer.dir_doesnt_pass("prunes/carrots/cauliflowers/artichokes/oranges");
filterer.file_doesnt_pass("prunes/carrots/cauliflowers/oranges");
filterer.file_doesnt_pass("prunes/carrots/cauliflowers/artichokes/oranges");
filterer.file_doesnt_pass("prunes/oranges/bananas");
}
#[tokio::test]
async fn ignore_folder_with_bare_match() {
let filterer = filt(&[notglob_filter("prunes").in_path()]).await;
filterer.file_doesnt_pass("prunes");
filterer.dir_doesnt_pass("prunes");
watchexec_v1_confusing_suite(filterer);
}
#[tokio::test]
async fn ignore_folder_with_bare_and_leading_slash() {
let filterer = filt(&[notglob_filter("/prunes").in_path()]).await;
filterer.file_doesnt_pass("prunes");
filterer.dir_doesnt_pass("prunes");
watchexec_v1_confusing_suite(filterer);
}
#[tokio::test]
async fn ignore_folder_with_bare_and_trailing_slash() {
let filterer = filt(&[notglob_filter("prunes/").in_path()]).await;
filterer.file_does_pass("prunes");
filterer.dir_doesnt_pass("prunes");
watchexec_v1_confusing_suite(filterer);
}
#[tokio::test]
async fn ignore_folder_with_only_double_double_glob() {
let filterer = filt(&[notglob_filter("**/prunes/**").in_path()]).await;
filterer.file_does_pass("prunes");
filterer.dir_does_pass("prunes");
watchexec_v1_confusing_suite(filterer);
}
#[tokio::test]
async fn ignore_folder_with_double_and_double_double_globs() {
let filterer = filt(&[
notglob_filter("**/prunes").in_path(),
notglob_filter("**/prunes/**").in_path(),
])
.await;
filterer.file_doesnt_pass("prunes");
filterer.dir_doesnt_pass("prunes");
watchexec_v1_confusing_suite(filterer);
}

View file

@ -2,19 +2,6 @@
## Next (YYYY-MM-DD) ## Next (YYYY-MM-DD)
## v3.0.2 (2024-10-14)
- Deps: gix-config 0.40
## v3.0.1 (2024-04-28)
- Hide fmt::Debug spew from ignore crate, use `full_debug` feature to restore.
## v3.0.0 (2024-04-20)
- Deps: gix-config 0.36
- Deps: miette 7
## v2.1.0 (2024-01-04) ## v2.1.0 (2024-01-04)
- Normalise paths on all platforms (via `normalize-path`). - Normalise paths on all platforms (via `normalize-path`).

View file

@ -1,6 +1,6 @@
[package] [package]
name = "ignore-files" name = "ignore-files"
version = "3.0.2" version = "2.1.0"
authors = ["Félix Saparelli <felix@passcod.name>"] authors = ["Félix Saparelli <felix@passcod.name>"]
license = "Apache-2.0" license = "Apache-2.0"
@ -16,9 +16,9 @@ edition = "2021"
[dependencies] [dependencies]
futures = "0.3.29" futures = "0.3.29"
gix-config = "0.40.0" gix-config = "0.31.0"
ignore = "0.4.18" ignore = "0.4.18"
miette = "7.2.0" miette = "5.3.0"
normalize-path = "0.2.1" normalize-path = "0.2.1"
thiserror = "1.0.50" thiserror = "1.0.50"
tracing = "0.1.40" tracing = "0.1.40"
@ -35,14 +35,8 @@ features = [
] ]
[dependencies.project-origins] [dependencies.project-origins]
version = "1.4.0" version = "1.3.0"
path = "../project-origins" path = "../project-origins"
[dev-dependencies] [dev-dependencies]
tracing-subscriber = "0.3.6" tracing-subscriber = "0.3.6"
[features]
default = []
## Don't hide ignore::gitignore::Gitignore Debug impl
full_debug = []

View file

@ -175,7 +175,7 @@ pub async fn from_origin(
)), )),
Some(Err(err)) => errors.push(Error::new(ErrorKind::Other, err)), Some(Err(err)) => errors.push(Error::new(ErrorKind::Other, err)),
Some(Ok(config)) => { Some(Ok(config)) => {
let config_excludes = config.value::<GitPath<'_>>("core.excludesFile"); let config_excludes = config.value::<GitPath<'_>>("core", None, "excludesFile");
if let Ok(excludes) = config_excludes { if let Ok(excludes) = config_excludes {
match excludes.interpolate(InterpolateContext { match excludes.interpolate(InterpolateContext {
home_dir: env::var("HOME").ok().map(PathBuf::from).as_deref(), home_dir: env::var("HOME").ok().map(PathBuf::from).as_deref(),
@ -329,7 +329,7 @@ pub async fn from_environment(appname: Option<&str>) -> (Vec<IgnoreFile>, Vec<Er
Err(err) => errors.push(Error::new(ErrorKind::Other, err)), Err(err) => errors.push(Error::new(ErrorKind::Other, err)),
Ok(Err(err)) => errors.push(Error::new(ErrorKind::Other, err)), Ok(Err(err)) => errors.push(Error::new(ErrorKind::Other, err)),
Ok(Ok(config)) => { Ok(Ok(config)) => {
let config_excludes = config.value::<GitPath<'_>>("core.excludesFile"); let config_excludes = config.value::<GitPath<'_>>("core", None, "excludesFile");
if let Ok(excludes) = config_excludes { if let Ok(excludes) = config_excludes {
match excludes.interpolate(InterpolateContext { match excludes.interpolate(InterpolateContext {
home_dir: env::var("HOME").ok().map(PathBuf::from).as_deref(), home_dir: env::var("HOME").ok().map(PathBuf::from).as_deref(),

View file

@ -1,4 +1,3 @@
use std::fmt;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use futures::stream::{FuturesUnordered, StreamExt}; use futures::stream::{FuturesUnordered, StreamExt};
@ -12,23 +11,12 @@ use tracing::{trace, trace_span};
use crate::{simplify_path, Error, IgnoreFile}; use crate::{simplify_path, Error, IgnoreFile};
#[derive(Clone)] #[derive(Clone, Debug)]
#[cfg_attr(feature = "full_debug", derive(Debug))]
struct Ignore { struct Ignore {
gitignore: Gitignore, gitignore: Gitignore,
builder: Option<GitignoreBuilder>, builder: Option<GitignoreBuilder>,
} }
#[cfg(not(feature = "full_debug"))]
impl fmt::Debug for Ignore {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Ignore")
.field("gitignore", &"ignore::gitignore::Gitignore{...}")
.field("builder", &"ignore::gitignore::GitignoreBuilder{...}")
.finish()
}
}
/// A mutable filter dedicated to ignore files and trees of ignore files. /// A mutable filter dedicated to ignore files and trees of ignore files.
/// ///
/// This reads and compiles ignore files, and should be used for handling ignore files. It's created /// This reads and compiles ignore files, and should be used for handling ignore files. It's created

View file

@ -2,22 +2,6 @@
## Next (YYYY-MM-DD) ## Next (YYYY-MM-DD)
## v5.0.0 (2024-10-14)
- Deps: nix 0.29
## v4.1.0 (2024-04-28)
- Feature: non-recursive watches with `WatchedPath::non_recursive()`
- Fix: `config.pathset()` now preserves `WatchedPath` attributes
- Refactor: move `WatchedPath` to the root of the crate (old path remains as re-export for now)
## v4.0.0 (2024-04-20)
- Deps: replace command-group with process-wrap (in supervisor, but has flow-on effects)
- Deps: miette 7
- Deps: nix 0.28
## v3.0.1 (2023-11-29) ## v3.0.1 (2023-11-29)
- Deps: watchexec-events and watchexec-signals after major bump and yank - Deps: watchexec-events and watchexec-signals after major bump and yank

View file

@ -1,8 +1,8 @@
[package] [package]
name = "watchexec" name = "watchexec"
version = "5.0.0" version = "3.0.1"
authors = ["Félix Saparelli <felix@passcod.name>", "Matt Green <mattgreenrocks@gmail.com>"] authors = ["Matt Green <mattgreenrocks@gmail.com>", "Félix Saparelli <felix@passcod.name>"]
license = "Apache-2.0" license = "Apache-2.0"
description = "Library to execute commands in response to file modifications" description = "Library to execute commands in response to file modifications"
keywords = ["watcher", "filesystem", "watchexec"] keywords = ["watcher", "filesystem", "watchexec"]
@ -20,34 +20,34 @@ async-priority-channel = "0.2.0"
async-recursion = "1.0.5" async-recursion = "1.0.5"
atomic-take = "1.0.0" atomic-take = "1.0.0"
futures = "0.3.29" futures = "0.3.29"
miette = "7.2.0" miette = "5.3.0"
notify = "6.0.0" notify = "6.0.0"
once_cell = "1.8.0" once_cell = "1.8.0"
thiserror = "1.0.44" thiserror = "1.0.44"
normalize-path = "0.2.0" normalize-path = "0.2.0"
[dependencies.process-wrap] [dependencies.command-group]
version = "8.0.0" version = "5.0.1"
features = ["tokio1"] features = ["with-tokio"]
[dependencies.watchexec-events] [dependencies.watchexec-events]
version = "4.0.0" version = "2.0.1"
path = "../events" path = "../events"
[dependencies.watchexec-signals] [dependencies.watchexec-signals]
version = "4.0.0" version = "2.1.0"
path = "../signals" path = "../signals"
[dependencies.watchexec-supervisor] [dependencies.watchexec-supervisor]
version = "3.0.0" version = "1.0.3"
path = "../supervisor" path = "../supervisor"
[dependencies.ignore-files] [dependencies.ignore-files]
version = "3.0.2" version = "2.1.0"
path = "../ignore-files" path = "../ignore-files"
[dependencies.project-origins] [dependencies.project-origins]
version = "1.4.0" version = "1.3.0"
path = "../project-origins" path = "../project-origins"
[dependencies.tokio] [dependencies.tokio]
@ -67,7 +67,7 @@ version = "0.1.40"
features = ["log"] features = ["log"]
[target.'cfg(unix)'.dependencies.nix] [target.'cfg(unix)'.dependencies.nix]
version = "0.29.0" version = "0.27.1"
features = ["signal"] features = ["signal"]
[dev-dependencies.tracing-subscriber] [dev-dependencies.tracing-subscriber]

View file

@ -68,7 +68,7 @@ async fn main() -> Result<()> {
job.set_spawn_hook(|cmd, _| { job.set_spawn_hook(|cmd, _| {
use nix::sys::signal::{sigprocmask, SigSet, SigmaskHow, Signal}; use nix::sys::signal::{sigprocmask, SigSet, SigmaskHow, Signal};
unsafe { unsafe {
cmd.command_mut().pre_exec(|| { cmd.pre_exec(|| {
let mut newset = SigSet::empty(); let mut newset = SigSet::empty();
newset.add(Signal::SIGINT); newset.add(Signal::SIGINT);
sigprocmask(SigmaskHow::SIG_BLOCK, Some(&newset), None)?; sigprocmask(SigmaskHow::SIG_BLOCK, Some(&newset), None)?;

View file

@ -47,7 +47,7 @@ async fn main() -> Result<()> {
job.set_spawn_hook(|cmd, _| { job.set_spawn_hook(|cmd, _| {
use nix::sys::signal::{sigprocmask, SigSet, SigmaskHow, Signal}; use nix::sys::signal::{sigprocmask, SigSet, SigmaskHow, Signal};
unsafe { unsafe {
cmd.command_mut().pre_exec(|| { cmd.pre_exec(|| {
let mut newset = SigSet::empty(); let mut newset = SigSet::empty();
newset.add(Signal::SIGINT); newset.add(Signal::SIGINT);
sigprocmask(SigmaskHow::SIG_BLOCK, Some(&newset), None)?; sigprocmask(SigmaskHow::SIG_BLOCK, Some(&newset), None)?;

View file

@ -1,6 +1,6 @@
pre-release-commit-message = "release: lib v{{version}}" pre-release-commit-message = "release: lib v{{version}}"
tag-prefix = "watchexec-" tag-prefix = "lib-"
tag-message = "watchexec {{version}}" tag-message = "watchexec-lib {{version}}"
[[pre-release-replacements]] [[pre-release-replacements]]
file = "CHANGELOG.md" file = "CHANGELOG.md"

View file

@ -1,6 +1,6 @@
//! Configuration and builders for [`crate::Watchexec`]. //! Configuration and builders for [`crate::Watchexec`].
use std::{future::Future, pin::pin, sync::Arc, time::Duration}; use std::{future::Future, path::Path, pin::pin, sync::Arc, time::Duration};
use tokio::sync::Notify; use tokio::sync::Notify;
use tracing::{debug, trace}; use tracing::{debug, trace};
@ -195,9 +195,9 @@ impl Config {
pub fn pathset<I, P>(&self, pathset: I) -> &Self pub fn pathset<I, P>(&self, pathset: I) -> &Self
where where
I: IntoIterator<Item = P>, I: IntoIterator<Item = P>,
P: Into<WatchedPath>, P: AsRef<Path>,
{ {
let pathset = pathset.into_iter().map(|p| p.into()).collect(); let pathset = pathset.into_iter().map(|p| p.as_ref().into()).collect();
debug!(?pathset, "Config: pathset"); debug!(?pathset, "Config: pathset");
self.pathset.replace(pathset); self.pathset.replace(pathset);
self.signal_change() self.signal_change()

View file

@ -1,13 +1,13 @@
//! Watchexec: a library for utilities and programs which respond to (file, signal, etc) events //! Watchexec: a library for utilities and programs which respond to (file, signal, etc) events
//! primarily by launching or managing other programs. //! primarily by launching or managing other programs.
//! //!
//! Also see the CLI tool: <https://github.com/watchexec/watchexec> //! Also see the CLI tool: <https://watchexec.github.io/>
//! //!
//! This library is powered by [Tokio](https://tokio.rs). //! This library is powered by [Tokio](https://tokio.rs).
//! //!
//! The main way to use this crate involves constructing a [`Watchexec`] around a [`Config`], then //! The main way to use this crate involves constructing a [`Watchexec`] around a [`Config`], then
//! running it. Handlers (defined in [`Config`]) are used to hook into Watchexec at various points. //! running it. [`Handler`][handler::Handler]s are used to hook into Watchexec at various points.
//! The config can be changed at any time with the `config` field on your [`Watchexec`] instance. //! The config can be changed at any time with the [`Watchexec::reconfigure()`] method.
//! //!
//! It's recommended to use the [miette] erroring library in applications, but all errors implement //! It's recommended to use the [miette] erroring library in applications, but all errors implement
//! [`std::error::Error`] so your favourite error handling library can of course be used. //! [`std::error::Error`] so your favourite error handling library can of course be used.
@ -42,8 +42,8 @@
//! ``` //! ```
//! //!
//! Alternatively, you can use the modules exposed by the crate and the external crates such as //! Alternatively, you can use the modules exposed by the crate and the external crates such as
//! [`notify`], [`clearscreen`], [`process_wrap`]... to build something more advanced, at the cost //! [`ClearScreen`][clearscreen] and [Command Group][command_group] to build something more advanced,
//! of reimplementing the glue code. //! at the cost of reimplementing the glue code.
//! //!
//! Note that the library generates a _lot_ of debug messaging with [tracing]. **You should not //! Note that the library generates a _lot_ of debug messaging with [tracing]. **You should not
//! enable printing even `error`-level log messages for this crate unless it's for debugging.** //! enable printing even `error`-level log messages for this crate unless it's for debugging.**
@ -68,13 +68,11 @@ pub mod config;
mod id; mod id;
mod late_join_set; mod late_join_set;
mod watched_path;
mod watchexec; mod watchexec;
#[doc(inline)] #[doc(inline)]
pub use crate::{ pub use crate::{
id::Id, id::Id,
watched_path::WatchedPath,
watchexec::{ErrorHook, Watchexec}, watchexec::{ErrorHook, Watchexec},
}; };

Some files were not shown because too many files have changed in this diff Show more