Compare commits

...

24 Commits

Author SHA1 Message Date
Félix Saparelli fa7fcabc57
Merge c5a1cca81c into d388a280f0 2024-04-23 14:44:54 +12:00
Félix Saparelli d388a280f0
ci: more build improvements (for next time) 2024-04-21 02:11:37 +12:00
Félix Saparelli bb97f71c8c
gha: probably the most frustrating syntax in the world 2024-04-21 02:04:56 +12:00
Félix Saparelli 953fa89dd9
even better 2024-04-21 02:02:57 +12:00
Félix Saparelli 0ef87821f2
Run manpage and completions in release when we've already built in releases 2024-04-21 01:57:09 +12:00
Félix Saparelli 62af5dd868
Fix dist manifest 2024-04-21 01:52:11 +12:00
Félix Saparelli 4497aaf515
Fix release builder 2024-04-21 01:38:11 +12:00
Félix Saparelli a63864c5f2
chore: Release 2024-04-21 01:18:24 +12:00
Félix Saparelli ee815ba166
chore: Release 2024-04-21 01:06:46 +12:00
Félix Saparelli d6138b9961
chore: Release 2024-04-21 01:04:18 +12:00
Félix Saparelli f73d388d18
Changelogs for filterers 2024-04-21 01:03:58 +12:00
Félix Saparelli 86d6c7d448
Remove more PR machinery 2024-04-21 01:02:40 +12:00
Félix Saparelli d317540fd3
chore: Release 2024-04-21 01:00:28 +12:00
Félix Saparelli 9d91c51651
chore: Release 2024-04-21 00:56:27 +12:00
Félix Saparelli 96480cb588
chore: Release 2024-04-21 00:55:14 +12:00
Félix Saparelli fd5afb8b3a
Add --wrap-process (#822) 2024-04-20 12:39:28 +00:00
Félix Saparelli e1cef25d7f
Fix watchexec-events tests 2024-04-21 00:36:59 +12:00
Félix Saparelli 22b58a66ab
Remove tagged filterer 2024-04-21 00:32:01 +12:00
Félix Saparelli 1c47ffbe1a
Update release.toml config 2024-04-21 00:30:56 +12:00
Félix Saparelli 48ff7ec68b
Remove PR machinery 2024-04-21 00:28:06 +12:00
Félix Saparelli 4023bf7124
chore: Release 2024-04-21 00:21:04 +12:00
Félix Saparelli 8864811e79
Fix watchexec-events self-dependency 2024-04-21 00:19:11 +12:00
Félix Saparelli c5a1cca81c
WIP: listen for SIGSTOP in lib 2023-12-09 23:20:03 +13:00
Félix Saparelli 7ea7d2629d
signals: add first-class SIGSTOP etc 2023-12-09 23:08:18 +13:00
66 changed files with 253 additions and 3335 deletions

View File

@ -4,7 +4,6 @@
app_name: "watchexec",
app_version: $version,
changelog_title: "CLI \($version)",
changelog_body: $changelog,
artifacts: [ $files | split("\n") | .[] | {
name: .,
kind: (if (. | test("[.](deb|rpm)$")) then "installer" else "executable-zip" end),

View File

@ -1,7 +1,6 @@
name: CLI Release
on:
workflow_call:
workflow_dispatch:
push:
tags:
@ -17,8 +16,6 @@ jobs:
runs-on: ubuntu-latest
outputs:
cli_version: ${{ steps.version.outputs.cli_version }}
release_notes: ${{ fromJSON(steps.notes.outputs.notes_json || 'null') }}
announce: ${{ steps.announce.outputs.announce || '' }}
steps:
- uses: actions/checkout@v4
- name: Extract version
@ -36,40 +33,6 @@ jobs:
echo "cli_version=$version" >> $GITHUB_OUTPUT
- name: Extract release notes
if: github.event.head_commit.message
id: notes
shell: bash
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_REPO: ${{ github.repository }}
release_commit: ${{ github.event.head_commit.message }}
run: |
set -x
set +eo pipefail
if [[ -z "$release_commit" ]]; then
echo "notes_json=null" >> $GITHUB_OUTPUT
exit
fi
release_pr=$(head -n1 <<< "$release_commit" | grep -oP '(?<=[(]#)\d+(?=[)])')
if [[ -z "$release_pr" ]]; then
echo "notes_json=null" >> $GITHUB_OUTPUT
exit
fi
gh \
pr --repo "$GITHUB_REPO" \
view "$release_pr" \
--json body \
--jq '"notes_json=\((.body | split("### Release notes")[1] // "") | tojson)"' \
>> $GITHUB_OUTPUT
- name: Make a new announcement post
id: announce
if: endsWith(steps.version.outputs.cli_version, '.0')
run: echo "announce=Announcements" >> $GITHUB_OUTPUT
build:
strategy:
matrix:
@ -233,19 +196,22 @@ jobs:
with:
tool: cross
- name: Build (cargo)
if: "!matrix.cross"
run: cargo build --package watchexec-cli --release --locked --target ${{ matrix.target }}
- name: Build (cross)
if: matrix.cross
run: cross build --package watchexec-cli --release --locked --target ${{ matrix.target }}
- name: Build
run: ${{ matrix.cross && 'cross' || 'cargo' }} build --package watchexec-cli --release --locked --target ${{ matrix.target }}
- name: Make manpage
run: cargo run -p watchexec-cli -- --manual > doc/watchexec.1
run: |
cargo run -p watchexec-cli \
${{ (!matrix.cross) && '--release --target' || '' }} \
${{ (!matrix.cross) && matrix.target || '' }} \
--locked -- --manual > doc/watchexec.1
- name: Make completions
run: bin/completions
run: |
bin/completions \
${{ (!matrix.cross) && '--release --target' || '' }} \
${{ (!matrix.cross) && matrix.target || '' }} \
--locked
- name: Package
shell: bash
@ -285,7 +251,7 @@ jobs:
- uses: actions/upload-artifact@v4
with:
name: builds
name: ${{ matrix.name }}
retention-days: 1
path: |
watchexec-*.tar.xz
@ -310,13 +276,12 @@ jobs:
- uses: actions/download-artifact@v4
with:
name: builds
merge-multiple: true
- name: Dist manifest
run: |
jq -ncf .github/workflows/dist-manifest.jq \
--arg version "${{ needs.info.outputs.cli_version }}" \
--arg changelog "${{ needs.info.outputs.release_notes }}" \
--arg files "$(ls watchexec-*)" \
> dist-manifest.json
@ -338,9 +303,7 @@ jobs:
with:
tag_name: v${{ needs.info.outputs.cli_version }}
name: CLI v${{ needs.info.outputs.cli_version }}
body: ${{ needs.info.outputs.release_notes }}
append_body: true
discussion_category_name: ${{ needs.info.outputs.announce }}
files: |
dist-manifest.json
watchexec-*.tar.xz

View File

@ -1,61 +0,0 @@
<!-- <%- JSON.stringify({ "release-pr": { v2: { crates, version } } }) %> -->
This is a release PR for **<%= crate.name %>** version **<%= version.actual %>**<%
if (version.actual != version.desired) {
%> (performing a <%= version.desired %> bump).<%
} else {
%>.<%
}
%>
**Use squash merge.**
<% if (crate.name == "watchexec-cli") { %>
Upon merging, this will automatically create the tag `v<%= version.actual %>`, build the CLI, and create a GitHub release.
You will still need to manually publish the cargo crate:
```
$ git switch main
$ git pull
$ git switch --detach v<%= version.actual %>
$ cargo publish -p <%= crate.name %>
```
<% } else { %>
Remember to review the crate's changelog!
Upon merging, this will create the tag `<%= crate.name %>-v<%= version.actual %>`.
You will still need to manually publish the cargo crate:
```
$ git switch main
$ git pull
$ git switch --detach <%= crate.name %>-v<%= version.actual %>
$ cargo publish -p <%= crate.name %>
```
<% } %>
To trigger builds initially: either close and then immediately re-open this PR once, or push to the branch (perhaps with edits to the README.md or CHANGELOG.md!).
<% if (pr.releaseNotes) { %>
---
_Edit release notes into the section below:_
<!-- do not change or remove this heading -->
<% if (crate.name == "watchexec-cli") { %>
### Release notes
_Software development often involves running the same commands over and over. Boring! Watchexec is a simple, standalone tool that watches a path and runs a command whenever it detects modifications. Install it today with [`cargo-binstall watchexec-cli`](https://github.com/cargo-bins/cargo-binstall), from the binaries below, find it [in your favourite package manager](https://github.com/watchexec/watchexec/blob/main/doc/packages.md), or build it from source with `cargo install watchexec-cli`._
#### In this release:
-
#### Other changes:
-
<% } else { %>
### Changelog
-
<% } %>
<% } %>

View File

@ -1,54 +0,0 @@
name: Open a release PR
on:
workflow_dispatch:
inputs:
crate:
description: Crate to release
required: true
type: choice
options:
- cli
- lib
- bosion
- events
- ignore-files
- project-origins
- signals
- supervisor
- filterer/globset
- filterer/ignore
- filterer/tagged
version:
description: Version to release
required: true
type: string
default: patch
jobs:
make-release-pr:
permissions:
id-token: write # Enable OIDC
pull-requests: write
contents: write
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: chainguard-dev/actions/setup-gitsign@main
- name: Install cargo-release
uses: taiki-e/install-action@v2
with:
tool: cargo-release
- uses: cargo-bins/release-pr@v2
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
version: ${{ inputs.version }}
crate-path: crates/${{ inputs.crate }}
pr-release-notes: ${{ inputs.crate == 'cli' }}
pr-label: release
pr-template-file: .github/workflows/release-pr.ejs
env:
GITSIGN_LOG: /tmp/gitsign.log
- run: cat /tmp/gitsign.log
if: ${{ failure() }}

View File

@ -1,45 +0,0 @@
name: Tag a release
on:
push:
branches:
- main
tags-ignore:
- "*"
jobs:
make-tag:
runs-on: ubuntu-latest
# because we control the release PR title and only allow squashes,
# PRs that are named `release: {crate-name} v{version}` will get tagged!
# the commit message will look like: `release: {crate-name} v{version} (#{pr-number})`
if: "startsWith(github.event.head_commit.message, 'release: ')"
steps:
- name: Extract tag from commit message
env:
COMMIT_MESSAGE: ${{ github.event.head_commit.message }}
run: |
set -euxo pipefail
message="$(head -n1 <<< "$COMMIT_MESSAGE")"
crate="$(cut -d ' ' -f 2 <<< "${message}")"
version="$(cut -d ' ' -f 3 <<< "${message}")"
if [[ "$crate" == "watchexec-cli" ]]; then
echo "CUSTOM_TAG=${version}" >> $GITHUB_ENV
else
echo "CUSTOM_TAG=${crate}-${version}" >> $GITHUB_ENV
fi
- uses: actions/checkout@v4
- name: Push release tag
id: tag_version
uses: mathieudutour/github-tag-action@v6.2
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
custom_tag: ${{ env.CUSTOM_TAG }}
tag_prefix: ''
release-cli:
needs: make-tag
if: "startsWith(github.event.head_commit.message, 'release: watchexec-cli v')"
uses: ./.github/workflows/release-cli.yml
secrets: inherit

View File

@ -68,9 +68,11 @@ jobs:
key: ${{ runner.os }}-target-stable-${{ hashFiles('**/Cargo.lock') }}
- name: Run test suite
run: cargo test ${{ env.flags }}
run: cargo test
- name: Run watchexec-events integration tests
run: cargo test -p watchexec-events -F serde
- name: Check that CLI runs
run: cargo run ${{ env.flags }} -p watchexec-cli -- -1 echo
run: cargo run -p watchexec-cli -- -1 echo
- name: Install coreutils on mac
if: ${{ matrix.platform == 'macos' }}
@ -89,7 +91,7 @@ jobs:
shell: bash
- name: Generate manpage
run: cargo run ${{ env.flags }} -p watchexec-cli -- --manual > doc/watchexec.1
run: cargo run -p watchexec-cli -- --manual > doc/watchexec.1
- name: Check that manpage is up to date
run: git diff --exit-code -- doc/

View File

@ -3,8 +3,8 @@ message: |
If you use this software, please cite it using these metadata.
title: "Watchexec: a tool to react to filesystem changes, and a crate ecosystem to power it"
version: "1.25.1"
date-released: 2024-01-05
version: "2.0.0"
date-released: 2024-04-20
repository-code: https://github.com/watchexec/watchexec
license: Apache-2.0

47
Cargo.lock generated
View File

@ -1988,7 +1988,7 @@ dependencies = [
[[package]]
name = "ignore-files"
version = "2.1.0"
version = "3.0.0"
dependencies = [
"dunce",
"futures",
@ -3800,15 +3800,6 @@ dependencies = [
"winapi",
]
[[package]]
name = "unicase"
version = "2.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89"
dependencies = [
"version_check",
]
[[package]]
name = "unicode-bidi"
version = "0.3.15"
@ -4004,7 +3995,7 @@ checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96"
[[package]]
name = "watchexec"
version = "3.0.1"
version = "4.0.0"
dependencies = [
"async-priority-channel",
"async-recursion",
@ -4029,7 +4020,7 @@ dependencies = [
[[package]]
name = "watchexec-cli"
version = "1.25.1"
version = "2.0.0"
dependencies = [
"ahash",
"argfile",
@ -4081,20 +4072,19 @@ dependencies = [
[[package]]
name = "watchexec-events"
version = "2.0.1"
version = "3.0.0"
dependencies = [
"nix 0.28.0",
"notify",
"serde",
"serde_json",
"snapbox",
"watchexec-events",
"watchexec-signals",
]
[[package]]
name = "watchexec-filterer-globset"
version = "3.0.0"
version = "4.0.0"
dependencies = [
"ignore",
"ignore-files",
@ -4108,7 +4098,7 @@ dependencies = [
[[package]]
name = "watchexec-filterer-ignore"
version = "3.0.1"
version = "4.0.0"
dependencies = [
"dunce",
"ignore",
@ -4123,29 +4113,6 @@ dependencies = [
"watchexec-signals",
]
[[package]]
name = "watchexec-filterer-tagged"
version = "2.0.0"
dependencies = [
"futures",
"globset",
"ignore",
"ignore-files",
"miette",
"nom",
"project-origins",
"regex",
"thiserror",
"tokio",
"tracing",
"tracing-subscriber",
"unicase",
"watchexec",
"watchexec-events",
"watchexec-filterer-ignore",
"watchexec-signals",
]
[[package]]
name = "watchexec-signals"
version = "3.0.0"
@ -4158,7 +4125,7 @@ dependencies = [
[[package]]
name = "watchexec-supervisor"
version = "1.0.3"
version = "2.0.0"
dependencies = [
"boxcar",
"futures",

View File

@ -8,7 +8,6 @@ members = [
"crates/supervisor",
"crates/filterer/globset",
"crates/filterer/ignore",
"crates/filterer/tagged",
"crates/bosion",
"crates/ignore-files",
"crates/project-origins",

View File

@ -1,7 +1,7 @@
#!/bin/sh
cargo run -p watchexec-cli -- --completions bash > completions/bash
cargo run -p watchexec-cli -- --completions elvish > completions/elvish
cargo run -p watchexec-cli -- --completions fish > completions/fish
cargo run -p watchexec-cli -- --completions nu > completions/nu
cargo run -p watchexec-cli -- --completions powershell > completions/powershell
cargo run -p watchexec-cli -- --completions zsh > completions/zsh
cargo run -p watchexec-cli $* -- --completions bash > completions/bash
cargo run -p watchexec-cli $* -- --completions elvish > completions/elvish
cargo run -p watchexec-cli $* -- --completions fish > completions/fish
cargo run -p watchexec-cli $* -- --completions nu > completions/nu
cargo run -p watchexec-cli $* -- --completions powershell > completions/powershell
cargo run -p watchexec-cli $* -- --completions zsh > completions/zsh

View File

@ -19,7 +19,7 @@ _watchexec() {
case "${cmd}" in
watchexec)
opts="-w -c -o -r -s -d -p -n -E -1 -N -q -e -f -j -i -v -h -V --watch --clear --on-busy-update --restart --signal --stop-signal --stop-timeout --map-signal --debounce --stdin-quit --no-vcs-ignore --no-project-ignore --no-global-ignore --no-default-ignore --no-discover-ignore --ignore-nothing --postpone --delay-run --poll --shell --no-environment --emit-events-to --only-emit-events --env --no-process-group --notify --color --timings --quiet --bell --project-origin --workdir --exts --filter --filter-file --filter-prog --ignore --ignore-file --fs-events --no-meta --print-events --verbose --log-file --manual --completions --help --version [COMMAND]..."
opts="-w -c -o -r -s -d -p -n -E -1 -N -q -e -f -j -i -v -h -V --watch --clear --on-busy-update --restart --signal --stop-signal --stop-timeout --map-signal --debounce --stdin-quit --no-vcs-ignore --no-project-ignore --no-global-ignore --no-default-ignore --no-discover-ignore --ignore-nothing --postpone --delay-run --poll --shell --no-environment --emit-events-to --only-emit-events --env --no-process-group --wrap-process --notify --color --timings --quiet --bell --project-origin --workdir --exts --filter --filter-file --filter-prog --ignore --ignore-file --fs-events --no-meta --print-events --verbose --log-file --manual --completions --help --version [COMMAND]..."
if [[ ${cur} == -* || ${COMP_CWORD} -eq 1 ]] ; then
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
return 0
@ -101,6 +101,10 @@ _watchexec() {
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--wrap-process)
COMPREPLY=($(compgen -W "group session none" -- "${cur}"))
return 0
;;
--color)
COMPREPLY=($(compgen -W "auto always never" -- "${cur}"))
return 0

View File

@ -37,6 +37,7 @@ set edit:completion:arg-completer[watchexec] = {|@words|
cand --emit-events-to 'Configure event emission'
cand -E 'Add env vars to the command'
cand --env 'Add env vars to the command'
cand --wrap-process 'Configure how the process is wrapped'
cand --color 'When to use terminal colours'
cand --project-origin 'Set the project origin'
cand --workdir 'Set the working directory'

View File

@ -11,6 +11,7 @@ complete -c watchexec -l poll -d 'Poll for filesystem changes' -r
complete -c watchexec -l shell -d 'Use a different shell' -r
complete -c watchexec -l emit-events-to -d 'Configure event emission' -r -f -a "{environment '',stdio '',file '',json-stdio '',json-file '',none ''}"
complete -c watchexec -s E -l env -d 'Add env vars to the command' -r
complete -c watchexec -l wrap-process -d 'Configure how the process is wrapped' -r -f -a "{group '',session '',none ''}"
complete -c watchexec -l color -d 'When to use terminal colours' -r -f -a "{auto '',always '',never ''}"
complete -c watchexec -l project-origin -d 'Set the project origin' -r -f -a "(__fish_complete_directories)"
complete -c watchexec -l workdir -d 'Set the working directory' -r -f -a "(__fish_complete_directories)"

View File

@ -12,6 +12,10 @@ module completions {
[ "environment" "stdio" "file" "json-stdio" "json-file" "none" ]
}
def "nu-complete watchexec wrap_process" [] {
[ "group" "session" "none" ]
}
def "nu-complete watchexec color" [] {
[ "auto" "always" "never" ]
}
@ -53,6 +57,7 @@ module completions {
--only-emit-events # Only emit events to stdout, run no commands
--env(-E): string # Add env vars to the command
--no-process-group # Don't use a process group
--wrap-process: string@"nu-complete watchexec wrap_process" # Configure how the process is wrapped
-1 # Testing only: exit Watchexec after the first run
--notify(-N) # Alert when commands start and end
--color: string@"nu-complete watchexec color" # When to use terminal colours

View File

@ -40,6 +40,7 @@ Register-ArgumentCompleter -Native -CommandName 'watchexec' -ScriptBlock {
[CompletionResult]::new('--emit-events-to', 'emit-events-to', [CompletionResultType]::ParameterName, 'Configure event emission')
[CompletionResult]::new('-E', 'E ', [CompletionResultType]::ParameterName, 'Add env vars to the command')
[CompletionResult]::new('--env', 'env', [CompletionResultType]::ParameterName, 'Add env vars to the command')
[CompletionResult]::new('--wrap-process', 'wrap-process', [CompletionResultType]::ParameterName, 'Configure how the process is wrapped')
[CompletionResult]::new('--color', 'color', [CompletionResultType]::ParameterName, 'When to use terminal colours')
[CompletionResult]::new('--project-origin', 'project-origin', [CompletionResultType]::ParameterName, 'Set the project origin')
[CompletionResult]::new('--workdir', 'workdir', [CompletionResultType]::ParameterName, 'Set the working directory')

View File

@ -34,6 +34,7 @@ _watchexec() {
'--emit-events-to=[Configure event emission]:MODE:(environment stdio file json-stdio json-file none)' \
'*-E+[Add env vars to the command]:KEY=VALUE: ' \
'*--env=[Add env vars to the command]:KEY=VALUE: ' \
'--wrap-process=[Configure how the process is wrapped]:MODE:(group session none)' \
'--color=[When to use terminal colours]:MODE:(auto always never)' \
'--project-origin=[Set the project origin]:DIRECTORY:_files -/' \
'--workdir=[Set the working directory]:DIRECTORY:_files -/' \

View File

@ -1,6 +1,6 @@
[package]
name = "watchexec-cli"
version = "1.25.1"
version = "2.0.0"
authors = ["Félix Saparelli <felix@passcod.name>", "Matt Green <mattgreenrocks@gmail.com>"]
license = "Apache-2.0"
@ -68,7 +68,7 @@ features = ["log", "env_logger"]
optional = true
[dependencies.ignore-files]
version = "2.1.0"
version = "3.0.0"
path = "../ignore-files"
[dependencies.miette]
@ -84,11 +84,11 @@ version = "1.3.0"
path = "../project-origins"
[dependencies.watchexec]
version = "3.0.1"
version = "4.0.0"
path = "../lib"
[dependencies.watchexec-events]
version = "2.0.1"
version = "3.0.0"
path = "../events"
features = ["serde"]
@ -97,7 +97,7 @@ version = "3.0.0"
path = "../signals"
[dependencies.watchexec-filterer-globset]
version = "3.0.0"
version = "4.0.0"
path = "../filterer/globset"
[dependencies.tokio]

View File

@ -1,5 +1,5 @@
pre-release-commit-message = "release: cli v{{version}}"
tag-prefix = "cli-"
tag-prefix = ""
tag-message = "watchexec {{version}}"
[[pre-release-replacements]]

View File

@ -636,12 +636,31 @@ pub struct Args {
/// By default, Watchexec will run the command in a process group, so that signals and
/// terminations are sent to all processes in the group. Sometimes that's not what you want, and
/// you can disable the behaviour with this option.
///
/// Deprecated, use '--wrap-process=none' instead.
#[arg(
long,
help_heading = OPTSET_COMMAND,
)]
pub no_process_group: bool,
/// Configure how the process is wrapped
///
/// By default, Watchexec will run the command in a process group in Unix, and in a Job Object
/// in Windows.
///
/// Some Unix programs prefer running in a session, while others do not work in a process group.
///
/// Use 'group' to use a process group, 'session' to use a process session, and 'none' to run
/// the command directly. On Windows, either of 'group' or 'session' will use a Job Object.
#[arg(
long,
help_heading = OPTSET_COMMAND,
value_name = "MODE",
default_value = "group",
)]
pub wrap_process: WrapMode,
/// Testing only: exit Watchexec after the first run
#[arg(short = '1', hide = true)]
pub once: bool,
@ -999,6 +1018,14 @@ pub enum OnBusyUpdate {
Signal,
}
#[derive(Clone, Copy, Debug, Default, ValueEnum)]
pub enum WrapMode {
#[default]
Group,
Session,
None,
}
#[derive(Clone, Copy, Debug, Default, ValueEnum)]
pub enum ClearMode {
#[default]
@ -1171,6 +1198,10 @@ pub async fn get_args() -> Result<Args> {
args.emit_events_to = EmitEvents::None;
}
if args.no_process_group {
args.wrap_process = WrapMode::None;
}
if args.filter_fs_meta {
args.filter_fs_events = vec![
FsEvent::Create,

View File

@ -32,7 +32,7 @@ use watchexec_events::{Event, Keyboard, ProcessEnd, Tag};
use watchexec_signals::Signal;
use crate::{
args::{Args, ClearMode, ColourMode, EmitEvents, OnBusyUpdate, SignalMapping},
args::{Args, ClearMode, ColourMode, EmitEvents, OnBusyUpdate, SignalMapping, WrapMode},
state::RotatingTempFile,
};
use crate::{emits::events_to_simple_format, state::State};
@ -545,7 +545,8 @@ fn interpret_command_args(args: &Args) -> Result<Arc<Command>> {
Ok(Arc::new(Command {
program,
options: SpawnOptions {
grouped: !args.no_process_group,
grouped: matches!(args.wrap_process, WrapMode::Group),
session: matches!(args.wrap_process, WrapMode::Session),
..Default::default()
},
}))

View File

@ -3,7 +3,7 @@
<assemblyIdentity
type="win32"
name="Watchexec.Cli.watchexec"
version="1.25.1.0"
version="2.0.0.0"
/>
<trustInfo>

View File

@ -2,6 +2,8 @@
## Next (YYYY-MM-DD)
## v3.0.0 (2024-04-20)
- Deps: nix 0.28
## v2.0.1 (2023-11-29)

View File

@ -1,6 +1,6 @@
[package]
name = "watchexec-events"
version = "2.0.1"
version = "3.0.0"
authors = ["Félix Saparelli <felix@passcod.name>"]
license = "Apache-2.0 OR MIT"
@ -33,7 +33,6 @@ version = "0.28.0"
features = ["signal"]
[dev-dependencies]
watchexec-events = { version = "*", features = ["serde"], path = "." }
snapbox = "0.5.9"
serde_json = "1.0.107"

View File

@ -1,5 +1,5 @@
pre-release-commit-message = "release: events v{{version}}"
tag-prefix = "events-"
tag-prefix = "watchexec-events-"
tag-message = "watchexec-events {{version}}"
[[pre-release-replacements]]

View File

@ -1,3 +1,5 @@
#![cfg(feature = "serde")]
use std::num::{NonZeroI32, NonZeroI64};
use snapbox::{assert_eq, file};

View File

@ -2,6 +2,10 @@
## Next (YYYY-MM-DD)
## v4.0.0 (2024-04-20)
- Deps: watchexec 4
## v3.0.0 (2024-01-01)
- Deps: `watchexec-filterer-ignore` and `ignore-files`

View File

@ -1,6 +1,6 @@
[package]
name = "watchexec-filterer-globset"
version = "3.0.0"
version = "4.0.0"
authors = ["Matt Green <mattgreenrocks@gmail.com>", "Félix Saparelli <felix@passcod.name>"]
license = "Apache-2.0"
@ -20,19 +20,19 @@ ignore = "0.4.18"
tracing = "0.1.40"
[dependencies.ignore-files]
version = "2.1.0"
version = "3.0.0"
path = "../../ignore-files"
[dependencies.watchexec]
version = "3.0.1"
version = "4.0.0"
path = "../../lib"
[dependencies.watchexec-events]
version = "2.0.1"
version = "3.0.0"
path = "../../events"
[dependencies.watchexec-filterer-ignore]
version = "3.0.1"
version = "4.0.0"
path = "../ignore"
[dev-dependencies]

View File

@ -1,5 +1,5 @@
pre-release-commit-message = "release: filterer-globset v{{version}}"
tag-prefix = "filterer-globset-"
tag-prefix = "watchexec-filterer-globset-"
tag-message = "watchexec-filterer-globset {{version}}"
[[pre-release-replacements]]

View File

@ -2,6 +2,10 @@
## Next (YYYY-MM-DD)
## v4.0.0 (2024-04-20)
- Deps: watchexec 4
## v3.0.1 (2024-01-04)
- Normalise paths on all platforms (via `normalize-path`).

View File

@ -1,6 +1,6 @@
[package]
name = "watchexec-filterer-ignore"
version = "3.0.1"
version = "4.0.0"
authors = ["Félix Saparelli <felix@passcod.name>"]
license = "Apache-2.0"
@ -22,15 +22,15 @@ normalize-path = "0.2.1"
tracing = "0.1.40"
[dependencies.ignore-files]
version = "2.1.0"
version = "3.0.0"
path = "../../ignore-files"
[dependencies.watchexec]
version = "3.0.1"
version = "4.0.0"
path = "../../lib"
[dependencies.watchexec-events]
version = "2.0.1"
version = "3.0.0"
path = "../../events"
[dependencies.watchexec-signals]

View File

@ -1,5 +1,5 @@
pre-release-commit-message = "release: filterer-ignore v{{version}}"
tag-prefix = "filterer-ignore-"
tag-prefix = "watchexec-filterer-ignore-"
tag-message = "watchexec-filterer-ignore {{version}}"
[[pre-release-replacements]]

View File

@ -1,32 +0,0 @@
# Changelog
## Next (YYYY-MM-DD)
- Deps: miette 7
## v2.0.0 (2024-01-01)
- Depend on `watchexec-events` instead of the `watchexec` re-export.
## v1.0.0 (2023-12-10)
- Officially deprecate (crate is now unmaintained).
- Depend on `watchexec-events` instead of the `watchexec` re-export.
- Remove error diagnostic codes.
- Deps: upgrade Tokio requirement to 1.32.
## v0.3.0 (2023-03-18)
- Ditch MSRV policy. The `rust-version` indication will remain, for the minimum estimated Rust version for the code features used in the crate's own code, but dependencies may have already moved on. From now on, only latest stable is assumed and tested for. ([#510](https://github.com/watchexec/watchexec/pull/510))
## v0.2.0 (2023-01-09)
- MSRV: bump to 1.61.0
## v0.1.1 (2022-09-07)
- Deps: update miette to 5.3.0
## v0.1.0 (2022-06-23)
- Initial release as a separate crate.

View File

@ -1,71 +0,0 @@
[package]
name = "watchexec-filterer-tagged"
version = "2.0.0"
authors = ["Félix Saparelli <felix@passcod.name>"]
license = "Apache-2.0"
description = "Watchexec filterer component using tagged filters"
keywords = ["watchexec", "filterer", "tags"]
documentation = "https://docs.rs/watchexec-filterer-tagged"
homepage = "https://watchexec.github.io"
repository = "https://github.com/watchexec/watchexec"
readme = "README.md"
rust-version = "1.61.0"
edition = "2021"
[badges.maintenance]
status = "deprecated"
[dependencies]
futures = "0.3.25"
globset = "0.4.8"
ignore = "0.4.18"
miette = "7.2.0"
nom = "7.0.0"
regex = "1.5.4"
thiserror = "1.0.26"
tracing = "0.1.26"
unicase = "2.6.0"
[dependencies.ignore-files]
version = "2.1.0"
path = "../../ignore-files"
[dependencies.tokio]
version = "1.32.0"
features = [
"fs",
]
[dependencies.watchexec]
version = "3.0.1"
path = "../../lib"
[dependencies.watchexec-events]
version = "2.0.1"
path = "../../events"
[dependencies.watchexec-filterer-ignore]
version = "3.0.1"
path = "../ignore"
[dependencies.watchexec-signals]
version = "3.0.0"
path = "../../signals"
[dev-dependencies]
tracing-subscriber = "0.3.6"
[dev-dependencies.project-origins]
version = "1.3.0"
path = "../../project-origins"
[dev-dependencies.tokio]
version = "1.32.0"
features = [
"fs",
"io-std",
"sync",
]

View File

@ -1,19 +0,0 @@
[![Crates.io page](https://badgen.net/crates/v/watchexec-filterer-tagged)](https://crates.io/crates/watchexec-filterer-tagged)
[![API Docs](https://docs.rs/watchexec-filterer-tagged/badge.svg)][docs]
[![Crate license: Apache 2.0](https://badgen.net/badge/license/Apache%202.0)][license]
[![CI status](https://github.com/watchexec/watchexec/actions/workflows/check.yml/badge.svg)](https://github.com/watchexec/watchexec/actions/workflows/check.yml)
# Watchexec filterer: tagged
_Experimental filterer using tagged filters._
- **[API documentation][docs]**.
- Licensed under [Apache 2.0][license].
- Status: soft-deprecated.
The tagged filterer is not in use in the Watchexec CLI, but this crate will continue being updated
until and unless it becomes too much of a pain to do so, for third party users. It is expected that
some of the code will eventually be reused for a more generic filter crate without the tagged syntax.
[docs]: https://docs.rs/watchexec-filterer-tagged
[license]: ../../../LICENSE

View File

@ -1,10 +0,0 @@
pre-release-commit-message = "release: filterer-tagged v{{version}}"
tag-prefix = "filterer-tagged-"
tag-message = "watchexec-filterer-tagged {{version}}"
[[pre-release-replacements]]
file = "CHANGELOG.md"
search = "^## Next.*$"
replace = "## Next (YYYY-MM-DD)\n\n## v{{version}} ({{date}})"
prerelease = true
max = 1

View File

@ -1,73 +0,0 @@
use std::collections::HashMap;
use ignore::gitignore::Gitignore;
use miette::Diagnostic;
use thiserror::Error;
use tokio::sync::watch::error::SendError;
use watchexec::error::RuntimeError;
use watchexec_filterer_ignore::IgnoreFilterer;
use crate::{Filter, Matcher};
/// Errors emitted by the `TaggedFilterer`.
#[derive(Debug, Diagnostic, Error)]
#[non_exhaustive]
pub enum TaggedFiltererError {
/// Generic I/O error, with some context.
#[error("io({about}): {err}")]
IoError {
/// What it was about.
about: &'static str,
/// The I/O error which occurred.
#[source]
err: std::io::Error,
},
/// Error received when a tagged filter cannot be parsed.
#[error("cannot parse filter `{src}`: {err:?}")]
Parse {
/// The source of the filter.
#[source_code]
src: String,
/// What went wrong.
err: nom::error::ErrorKind,
},
/// Error received when a filter cannot be added or removed from a tagged filter list.
#[error("cannot {action} filter: {err:?}")]
FilterChange {
/// The action that was attempted.
action: &'static str,
/// The underlying error.
#[source]
err: SendError<HashMap<Matcher, Vec<Filter>>>,
},
/// Error received when a glob cannot be parsed.
#[error("cannot parse glob: {0}")]
GlobParse(#[source] ignore::Error),
/// Error received when a compiled globset cannot be changed.
#[error("cannot change compiled globset: {0:?}")]
GlobsetChange(#[source] SendError<Option<Gitignore>>),
/// Error received about the internal ignore filterer.
#[error("ignore filterer: {0}")]
Ignore(#[source] ignore_files::Error),
/// Error received when a new ignore filterer cannot be swapped in.
#[error("cannot swap in new ignore filterer: {0:?}")]
IgnoreSwap(#[source] SendError<IgnoreFilterer>),
}
impl From<TaggedFiltererError> for RuntimeError {
fn from(err: TaggedFiltererError) -> Self {
Self::Filterer {
kind: "tagged",
err: Box::new(err) as _,
}
}
}

View File

@ -1,93 +0,0 @@
use std::{
env,
io::Error,
path::{Path, PathBuf},
str::FromStr,
};
use ignore_files::{discover_file, IgnoreFile};
use tokio::fs::read_to_string;
use crate::{Filter, TaggedFiltererError};
/// A filter file.
///
/// This is merely a type wrapper around an [`IgnoreFile`], as the only difference is how the file
/// is parsed.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct FilterFile(pub IgnoreFile);
/// Finds all filter files that apply to the current runtime.
///
/// This considers:
/// - `$XDG_CONFIG_HOME/watchexec/filter`, as well as other locations (APPDATA on Windows…)
/// - Files from the `WATCHEXEC_FILTER_FILES` environment variable (comma-separated)
///
/// All errors (permissions, etc) are collected and returned alongside the ignore files: you may
/// want to show them to the user while still using whatever ignores were successfully found. Errors
/// from files not being found are silently ignored (the files are just not returned).
pub async fn discover_files_from_environment() -> (Vec<FilterFile>, Vec<Error>) {
let mut files = Vec::new();
let mut errors = Vec::new();
for path in env::var("WATCHEXEC_FILTER_FILES")
.unwrap_or_default()
.split(',')
{
discover_file(&mut files, &mut errors, None, None, PathBuf::from(path)).await;
}
let mut wgis = Vec::with_capacity(5);
if let Ok(home) = env::var("XDG_CONFIG_HOME") {
wgis.push(Path::new(&home).join("watchexec/filter"));
}
if let Ok(home) = env::var("APPDATA") {
wgis.push(Path::new(&home).join("watchexec/filter"));
}
if let Ok(home) = env::var("USERPROFILE") {
wgis.push(Path::new(&home).join(".watchexec/filter"));
}
if let Ok(home) = env::var("HOME") {
wgis.push(Path::new(&home).join(".watchexec/filter"));
}
for path in wgis {
if discover_file(&mut files, &mut errors, None, None, path).await {
break;
}
}
(files.into_iter().map(FilterFile).collect(), errors)
}
impl FilterFile {
/// Read and parse into [`Filter`]s.
///
/// Empty lines and lines starting with `#` are ignored. The `applies_in` field of the
/// [`IgnoreFile`] is used for the `in_path` field of each [`Filter`].
///
/// This method reads the entire file into memory.
pub async fn load(&self) -> Result<Vec<Filter>, TaggedFiltererError> {
let content =
read_to_string(&self.0.path)
.await
.map_err(|err| TaggedFiltererError::IoError {
about: "filter file load",
err,
})?;
let lines = content.lines();
let mut filters = Vec::with_capacity(lines.size_hint().0);
for line in lines {
if line.is_empty() || line.starts_with('#') {
continue;
}
let mut f = Filter::from_str(line)?;
f.in_path = self.0.applies_in.clone();
filters.push(f);
}
Ok(filters)
}
}

View File

@ -1,276 +0,0 @@
use std::collections::HashSet;
use std::path::PathBuf;
use globset::Glob;
use regex::Regex;
use tokio::fs::canonicalize;
use tracing::{trace, warn};
use unicase::UniCase;
use watchexec_events::Tag;
use crate::TaggedFiltererError;
/// A tagged filter.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Filter {
/// Path the filter applies from.
pub in_path: Option<PathBuf>,
/// Which tag the filter applies to.
pub on: Matcher,
/// The operation to perform on the tag's value.
pub op: Op,
/// The pattern to match against the tag's value.
pub pat: Pattern,
/// If true, a positive match with this filter will override negative matches from previous
/// filters on the same tag, and negative matches will be ignored.
pub negate: bool,
}
impl Filter {
/// Matches the filter against a subject.
///
/// This is really an internal method to the tagged filterer machinery, exposed so you can build
/// your own filterer using the same types or the textual syntax. As such its behaviour is not
/// guaranteed to be stable (its signature is, though).
pub fn matches(&self, subject: impl AsRef<str>) -> Result<bool, TaggedFiltererError> {
let subject = subject.as_ref();
trace!(op=?self.op, pat=?self.pat, ?subject, "performing filter match");
Ok(match (self.op, &self.pat) {
(Op::Equal, Pattern::Exact(pat)) => UniCase::new(subject) == UniCase::new(pat),
(Op::NotEqual, Pattern::Exact(pat)) => UniCase::new(subject) != UniCase::new(pat),
(Op::Regex, Pattern::Regex(pat)) => pat.is_match(subject),
(Op::NotRegex, Pattern::Regex(pat)) => !pat.is_match(subject),
(Op::InSet, Pattern::Set(set)) => set.contains(subject),
(Op::InSet, Pattern::Exact(pat)) => subject == pat,
(Op::NotInSet, Pattern::Set(set)) => !set.contains(subject),
(Op::NotInSet, Pattern::Exact(pat)) => subject != pat,
(op @ (Op::Glob | Op::NotGlob), Pattern::Glob(glob)) => {
// FIXME: someway that isn't this horrible
match Glob::new(glob) {
Ok(glob) => {
let matches = glob.compile_matcher().is_match(subject);
match op {
Op::Glob => matches,
Op::NotGlob => !matches,
_ => unreachable!(),
}
}
Err(err) => {
warn!(
"failed to compile glob for non-path match, skipping (pass): {}",
err
);
true
}
}
}
(op, pat) => {
warn!(
"trying to match pattern {:?} with op {:?}, that cannot work",
pat, op
);
false
}
})
}
/// Create a filter from a gitignore-style glob pattern.
///
/// The optional path is for the `in_path` field of the filter. When parsing gitignore files, it
/// should be set to the path of the _directory_ the ignore file is in.
///
/// The resulting filter matches on [`Path`][Matcher::Path], with the [`NotGlob`][Op::NotGlob]
/// op, and a [`Glob`][Pattern::Glob] pattern. If it starts with a `!`, it is negated.
#[must_use]
pub fn from_glob_ignore(in_path: Option<PathBuf>, glob: &str) -> Self {
let (glob, negate) = glob.strip_prefix('!').map_or((glob, false), |g| (g, true));
Self {
in_path,
on: Matcher::Path,
op: Op::NotGlob,
pat: Pattern::Glob(glob.to_string()),
negate,
}
}
/// Returns the filter with its `in_path` canonicalised.
pub async fn canonicalised(mut self) -> Result<Self, TaggedFiltererError> {
if let Some(ctx) = self.in_path {
self.in_path =
Some(
canonicalize(&ctx)
.await
.map_err(|err| TaggedFiltererError::IoError {
about: "canonicalise Filter in_path",
err,
})?,
);
trace!(canon=?ctx, "canonicalised in_path");
}
Ok(self)
}
}
/// What a filter matches on.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
#[non_exhaustive]
pub enum Matcher {
/// The presence of a tag on an event.
Tag,
/// A path in a filesystem event. Paths are always canonicalised.
///
/// Note that there may be multiple paths in an event (e.g. both source and destination for renames), and filters
/// will be matched on all of them.
Path,
/// The file type of an object in a filesystem event.
///
/// This is not guaranteed to be present for every filesystem event.
///
/// It can be any of these values: `file`, `dir`, `symlink`, `other`. That last one means
/// "not any of the first three."
FileType,
/// The [`EventKind`][notify::event::EventKind] of a filesystem event.
///
/// This is the Debug representation of the event kind. Examples:
/// - `Access(Close(Write))`
/// - `Modify(Data(Any))`
/// - `Modify(Metadata(Permissions))`
/// - `Remove(Folder)`
///
/// You should probably use globs or regexes to match these, ex:
/// - `Create(*)`
/// - `Modify\(Name\(.+`
FileEventKind,
/// The [event source][crate::event::Source] the event came from.
///
/// These are the lowercase names of the variants.
Source,
/// The ID of the process which caused the event.
///
/// Note that it's rare for events to carry this information.
Process,
/// A signal sent to the main process.
///
/// This can be matched both on the signal number as an integer, and on the signal name as a
/// string. On Windows, only `BREAK` is supported; `CTRL_C` parses but won't work. Matching is
/// on both uppercase and lowercase forms.
///
/// Interrupt signals (`TERM` and `INT` on Unix, `CTRL_C` on Windows) are parsed, but these are
/// marked Urgent internally to Watchexec, and thus bypass filtering entirely.
Signal,
/// The exit status of a subprocess.
///
/// This is only present for events issued when the subprocess exits. The value is matched on
/// both the exit code as an integer, and either `success` or `fail`, whichever succeeds.
ProcessCompletion,
/// The [`Priority`] of the event.
///
/// This is never `urgent`, as urgent events bypass filtering.
Priority,
}
impl Matcher {
pub(crate) fn from_tag(tag: &Tag) -> &'static [Self] {
match tag {
Tag::Path {
file_type: None, ..
} => &[Self::Path],
Tag::Path { .. } => &[Self::Path, Self::FileType],
Tag::FileEventKind(_) => &[Self::FileEventKind],
Tag::Source(_) => &[Self::Source],
Tag::Process(_) => &[Self::Process],
Tag::Signal(_) => &[Self::Signal],
Tag::ProcessCompletion(_) => &[Self::ProcessCompletion],
_ => {
warn!("unhandled tag: {:?}", tag);
&[]
}
}
}
}
/// How a filter value is interpreted.
///
/// - `==` and `!=` match on the exact value as string equality (case-insensitively),
/// - `~=` and `~!` match using a [regex],
/// - `*=` and `*!` match using a glob, either via [globset] or [ignore]
/// - `:=` and `:!` match via exact string comparisons, but on any of the list of values separated
/// by `,`
/// - `=`, the "auto" operator, behaves as `*=` if the matcher is `Path`, and as `==` otherwise.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[non_exhaustive]
pub enum Op {
/// The auto operator, `=`, resolves to `*=` or `==` depending on the matcher.
Auto,
/// The `==` operator, matches on exact string equality.
Equal,
/// The `!=` operator, matches on exact string inequality.
NotEqual,
/// The `~=` operator, matches on a regex.
Regex,
/// The `~!` operator, matches on a regex (matches are fails).
NotRegex,
/// The `*=` operator, matches on a glob.
Glob,
/// The `*!` operator, matches on a glob (matches are fails).
NotGlob,
/// The `:=` operator, matches (with string compares) on a set of values (belongs are passes).
InSet,
/// The `:!` operator, matches on a set of values (belongs are fails).
NotInSet,
}
/// A filter value (pattern to match with).
#[derive(Debug, Clone)]
#[non_exhaustive]
pub enum Pattern {
/// An exact string.
Exact(String),
/// A regex.
Regex(Regex),
/// A glob.
///
/// This is stored as a string as globs are compiled together rather than on a per-filter basis.
Glob(String),
/// A set of exact strings.
Set(HashSet<String>),
}
impl PartialEq<Self> for Pattern {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(Self::Exact(l), Self::Exact(r)) | (Self::Glob(l), Self::Glob(r)) => l == r,
(Self::Regex(l), Self::Regex(r)) => l.as_str() == r.as_str(),
(Self::Set(l), Self::Set(r)) => l == r,
_ => false,
}
}
}
impl Eq for Pattern {}

View File

@ -1,537 +0,0 @@
use std::path::PathBuf;
use std::sync::Arc;
use std::{collections::HashMap, convert::Into};
use futures::{stream::FuturesOrdered, TryStreamExt};
use ignore::{
gitignore::{Gitignore, GitignoreBuilder},
Match,
};
use ignore_files::{IgnoreFile, IgnoreFilter};
use tokio::fs::canonicalize;
use tracing::{debug, trace, trace_span};
use watchexec::{error::RuntimeError, filter::Filterer};
use watchexec_events::{Event, FileType, Priority, ProcessEnd, Tag};
use watchexec_filterer_ignore::IgnoreFilterer;
use watchexec_signals::Signal;
use crate::{swaplock::SwapLock, Filter, Matcher, Op, Pattern, TaggedFiltererError};
/// A complex filterer that can match any event tag and supports different matching operators.
///
/// See the crate-level documentation for more information.
#[derive(Debug)]
pub struct TaggedFilterer {
/// The directory the project is in, its origin.
///
/// This is used to resolve absolute paths without an `in_path` context.
origin: PathBuf,
/// Where the program is running from.
///
/// This is used to resolve relative paths without an `in_path` context.
workdir: PathBuf,
/// All filters that are applied, in order, by matcher.
filters: SwapLock<HashMap<Matcher, Vec<Filter>>>,
/// Sub-filterer for ignore files.
ignore_filterer: SwapLock<IgnoreFilterer>,
/// Compiled matcher for Glob filters.
glob_compiled: SwapLock<Option<Gitignore>>,
/// Compiled matcher for NotGlob filters.
not_glob_compiled: SwapLock<Option<Gitignore>>,
}
impl Filterer for TaggedFilterer {
fn check_event(&self, event: &Event, priority: Priority) -> Result<bool, RuntimeError> {
self.check(event, priority).map_err(Into::into)
}
}
impl TaggedFilterer {
fn check(&self, event: &Event, priority: Priority) -> Result<bool, TaggedFiltererError> {
let _span = trace_span!("filterer_check").entered();
trace!(?event, ?priority, "checking event");
{
trace!("checking priority");
if let Some(filters) = self.filters.borrow().get(&Matcher::Priority).cloned() {
trace!(filters=%filters.len(), "found some filters for priority");
//
let mut pri_match = true;
for filter in &filters {
let _span = trace_span!("checking filter against priority", ?filter).entered();
let applies = filter.matches(match priority {
Priority::Low => "low",
Priority::Normal => "normal",
Priority::High => "high",
Priority::Urgent => unreachable!("urgent by-passes filtering"),
})?;
if filter.negate {
if applies {
trace!(prev=%pri_match, now=%true, "negate filter passes, passing this priority");
pri_match = true;
break;
}
trace!(prev=%pri_match, now=%pri_match, "negate filter fails, ignoring");
} else {
trace!(prev=%pri_match, this=%applies, now=%(pri_match&applies), "filter applies to priority");
pri_match &= applies;
}
}
if !pri_match {
trace!("priority fails check, failing entire event");
return Ok(false);
}
} else {
trace!("no filters for priority, skipping (pass)");
}
}
{
trace!("checking internal ignore filterer");
let igf = self.ignore_filterer.borrow();
if !igf
.check_event(event, priority)
.expect("IgnoreFilterer never errors")
{
trace!("internal ignore filterer matched (fail)");
return Ok(false);
}
}
if self.filters.borrow().is_empty() {
trace!("no filters, skipping entire check (pass)");
return Ok(true);
}
trace!(tags=%event.tags.len(), "checking all tags on the event");
for tag in &event.tags {
let _span = trace_span!("check_tag", ?tag).entered();
trace!("checking tag");
for matcher in Matcher::from_tag(tag) {
let _span = trace_span!("check_matcher", ?matcher).entered();
let filters = self.filters.borrow().get(matcher).cloned();
if let Some(tag_filters) = filters {
if tag_filters.is_empty() {
trace!("no filters for this matcher, skipping (pass)");
continue;
}
trace!(filters=%tag_filters.len(), "found some filters for this matcher");
let mut tag_match = true;
if let (Matcher::Path, Tag::Path { path, file_type }) = (matcher, tag) {
let is_dir = file_type.map_or(false, |ft| matches!(ft, FileType::Dir));
{
let gc = self.glob_compiled.borrow();
if let Some(igs) = gc.as_ref() {
let _span =
trace_span!("checking_compiled_filters", compiled=%"Glob")
.entered();
match if path.strip_prefix(&self.origin).is_ok() {
trace!("checking against path or parents");
igs.matched_path_or_any_parents(path, is_dir)
} else {
trace!("checking against path only");
igs.matched(path, is_dir)
} {
Match::None => {
trace!("no match (fail)");
tag_match &= false;
}
Match::Ignore(glob) => {
if glob
.from()
.map_or(true, |f| path.strip_prefix(f).is_ok())
{
trace!(?glob, "positive match (pass)");
tag_match &= true;
} else {
trace!(
?glob,
"positive match, but not in scope (ignore)"
);
}
}
Match::Whitelist(glob) => {
trace!(?glob, "negative match (ignore)");
}
}
}
}
{
let ngc = self.not_glob_compiled.borrow();
if let Some(ngs) = ngc.as_ref() {
let _span =
trace_span!("checking_compiled_filters", compiled=%"NotGlob")
.entered();
match if path.strip_prefix(&self.origin).is_ok() {
trace!("checking against path or parents");
ngs.matched_path_or_any_parents(path, is_dir)
} else {
trace!("checking against path only");
ngs.matched(path, is_dir)
} {
Match::None => {
trace!("no match (pass)");
tag_match &= true;
}
Match::Ignore(glob) => {
if glob
.from()
.map_or(true, |f| path.strip_prefix(f).is_ok())
{
trace!(?glob, "positive match (fail)");
tag_match &= false;
} else {
trace!(
?glob,
"positive match, but not in scope (ignore)"
);
}
}
Match::Whitelist(glob) => {
trace!(?glob, "negative match (pass)");
tag_match = true;
}
}
}
}
}
// those are handled with the compiled ignore filters above
let tag_filters = tag_filters
.into_iter()
.filter(|f| {
!matches!(
(tag, matcher, f),
(
Tag::Path { .. },
Matcher::Path,
Filter {
on: Matcher::Path,
op: Op::Glob | Op::NotGlob,
pat: Pattern::Glob(_),
..
}
)
)
})
.collect::<Vec<_>>();
if tag_filters.is_empty() && tag_match {
trace!("no more filters for this matcher, skipping (pass)");
continue;
}
trace!(filters=%tag_filters.len(), "got some filters to check still");
for filter in &tag_filters {
let _span = trace_span!("checking filter against tag", ?filter).entered();
if let Some(app) = self.match_tag(filter, tag)? {
if filter.negate {
if app {
trace!(prev=%tag_match, now=%true, "negate filter passes, passing this matcher");
tag_match = true;
break;
}
trace!(prev=%tag_match, now=%tag_match, "negate filter fails, ignoring");
} else {
trace!(prev=%tag_match, this=%app, now=%(tag_match&app), "filter applies to this tag");
tag_match &= app;
}
}
}
if !tag_match {
trace!("matcher fails check, failing entire event");
return Ok(false);
}
trace!("matcher passes check, continuing");
} else {
trace!("no filters for this matcher, skipping (pass)");
}
}
}
trace!("passing event");
Ok(true)
}
/// Initialise a new tagged filterer with no filters.
///
/// This takes two paths: the project origin, and the current directory. The current directory
/// is not obtained from the environment so you can customise it; generally you should use
/// [`std::env::current_dir()`] though.
///
/// The origin is the directory the main project that is being watched is in. This is used to
/// resolve absolute paths given in filters without an `in_path` field (e.g. all filters parsed
/// from text), and for ignore file based filtering.
///
/// The workdir is used to resolve relative paths given in filters without an `in_path` field.
///
/// So, if origin is `/path/to/project` and workdir is `/path/to/project/subtree`:
/// - `path=foo.bar` is resolved to `/path/to/project/subtree/foo.bar`
/// - `path=/foo.bar` is resolved to `/path/to/project/foo.bar`
pub async fn new(origin: PathBuf, workdir: PathBuf) -> Result<Arc<Self>, TaggedFiltererError> {
let origin = canonicalize(origin)
.await
.map_err(|err| TaggedFiltererError::IoError {
about: "canonicalise origin on new tagged filterer",
err,
})?;
Ok(Arc::new(Self {
filters: SwapLock::new(HashMap::new()),
ignore_filterer: SwapLock::new(IgnoreFilterer(IgnoreFilter::empty(&origin))),
glob_compiled: SwapLock::new(None),
not_glob_compiled: SwapLock::new(None),
workdir: canonicalize(workdir)
.await
.map_err(|err| TaggedFiltererError::IoError {
about: "canonicalise workdir on new tagged filterer",
err,
})?,
origin,
}))
}
// filter ctx event path filter outcome
// /foo/bar /foo/bar/baz.txt baz.txt pass
// /foo/bar /foo/bar/baz.txt /baz.txt pass
// /foo/bar /foo/bar/baz.txt /baz.* pass
// /foo/bar /foo/bar/baz.txt /blah fail
// /foo/quz /foo/bar/baz.txt /baz.* skip
// Ok(Some(bool)) => the match was applied, bool is the result
// Ok(None) => for some precondition, the match was not done (mismatched tag, out of context, …)
fn match_tag(&self, filter: &Filter, tag: &Tag) -> Result<Option<bool>, TaggedFiltererError> {
const fn sig_match(sig: Signal) -> (&'static str, i32) {
match sig {
Signal::Hangup | Signal::Custom(1) => ("HUP", 1),
Signal::ForceStop | Signal::Custom(9) => ("KILL", 9),
Signal::Interrupt | Signal::Custom(2) => ("INT", 2),
Signal::Quit | Signal::Custom(3) => ("QUIT", 3),
Signal::Terminate | Signal::Custom(15) => ("TERM", 15),
Signal::User1 | Signal::Custom(10) => ("USR1", 10),
Signal::User2 | Signal::Custom(12) => ("USR2", 12),
Signal::Custom(n) => ("UNK", n),
_ => ("UNK", 0),
}
}
trace!(matcher=?filter.on, "matching filter to tag");
match (tag, filter.on) {
(tag, Matcher::Tag) => filter.matches(tag.discriminant_name()),
(Tag::Path { path, .. }, Matcher::Path) => {
let resolved = if let Some(ctx) = &filter.in_path {
if let Ok(suffix) = path.strip_prefix(ctx) {
suffix.strip_prefix("/").unwrap_or(suffix)
} else {
return Ok(None);
}
} else if let Ok(suffix) = path.strip_prefix(&self.workdir) {
suffix.strip_prefix("/").unwrap_or(suffix)
} else if let Ok(suffix) = path.strip_prefix(&self.origin) {
suffix.strip_prefix("/").unwrap_or(suffix)
} else {
path.strip_prefix("/").unwrap_or(path)
};
trace!(?resolved, "resolved path to match filter against");
if matches!(filter.op, Op::Glob | Op::NotGlob) {
trace!("path glob match with match_tag is already handled");
return Ok(None);
}
filter.matches(resolved.to_string_lossy())
}
(
Tag::Path {
file_type: Some(ft),
..
},
Matcher::FileType,
) => filter.matches(ft.to_string()),
(Tag::FileEventKind(kind), Matcher::FileEventKind) => {
filter.matches(format!("{kind:?}"))
}
(Tag::Source(src), Matcher::Source) => filter.matches(src.to_string()),
(Tag::Process(pid), Matcher::Process) => filter.matches(pid.to_string()),
(Tag::Signal(sig), Matcher::Signal) => {
let (text, int) = sig_match(*sig);
Ok(filter.matches(text)?
|| filter.matches(format!("SIG{text}"))?
|| filter.matches(int.to_string())?)
}
(Tag::ProcessCompletion(ope), Matcher::ProcessCompletion) => match ope {
None => filter.matches("_"),
Some(ProcessEnd::Success) => filter.matches("success"),
Some(ProcessEnd::ExitError(int)) => filter.matches(format!("error({int})")),
Some(ProcessEnd::ExitSignal(sig)) => {
let (text, int) = sig_match(*sig);
Ok(filter.matches(format!("signal({text})"))?
|| filter.matches(format!("signal(SIG{text})"))?
|| filter.matches(format!("signal({int})"))?)
}
Some(ProcessEnd::ExitStop(int)) => filter.matches(format!("stop({int})")),
Some(ProcessEnd::Exception(int)) => filter.matches(format!("exception({int:X})")),
Some(ProcessEnd::Continued) => filter.matches("continued"),
},
(_, _) => {
trace!("no match for tag, skipping");
return Ok(None);
}
}
.map(Some)
}
/// Add some filters to the filterer.
///
/// This is async as it submits the new filters to the live filterer, which may be holding a
/// read lock. It takes a slice of filters so it can efficiently add a large number of filters
/// with a single write, without needing to acquire the lock repeatedly.
///
/// If filters with glob operations are added, the filterer's glob matchers are recompiled after
/// the new filters are added, in this method. This should not be used for inserting an
/// [`IgnoreFile`]: use [`add_ignore_file()`](Self::add_ignore_file) instead.
pub async fn add_filters(&self, filters: &[Filter]) -> Result<(), TaggedFiltererError> {
debug!(?filters, "adding filters to filterer");
let mut recompile_globs = false;
let mut recompile_not_globs = false;
#[allow(clippy::from_iter_instead_of_collect)]
let filters = FuturesOrdered::from_iter(
filters
.iter()
.cloned()
.inspect(|f| match f.op {
Op::Glob => {
recompile_globs = true;
}
Op::NotGlob => {
recompile_not_globs = true;
}
_ => {}
})
.map(Filter::canonicalised),
)
.try_collect::<Vec<_>>()
.await?;
trace!(?filters, "canonicalised filters");
// TODO: use miette's related and issue canonicalisation errors for all of them
self.filters
.change(|fs| {
for filter in filters {
fs.entry(filter.on).or_default().push(filter);
}
})
.map_err(|err| TaggedFiltererError::FilterChange { action: "add", err })?;
trace!("inserted filters into swaplock");
if recompile_globs {
self.recompile_globs(Op::Glob)?;
}
if recompile_not_globs {
self.recompile_globs(Op::NotGlob)?;
}
Ok(())
}
fn recompile_globs(&self, op_filter: Op) -> Result<(), TaggedFiltererError> {
trace!(?op_filter, "recompiling globs");
let target = match op_filter {
Op::Glob => &self.glob_compiled,
Op::NotGlob => &self.not_glob_compiled,
_ => unreachable!("recompile_globs called with invalid op"),
};
let globs = {
let filters = self.filters.borrow();
if let Some(fs) = filters.get(&Matcher::Path) {
trace!(?op_filter, "pulling filters from swaplock");
// we want to hold the lock as little as possible, so we clone the filters
fs.iter()
.filter(|&f| f.op == op_filter)
.cloned()
.collect::<Vec<_>>()
} else {
trace!(?op_filter, "no filters, erasing compiled glob");
return target
.replace(None)
.map_err(TaggedFiltererError::GlobsetChange);
}
};
let mut builder = GitignoreBuilder::new(&self.origin);
for filter in globs {
if let Pattern::Glob(mut glob) = filter.pat {
if filter.negate {
glob.insert(0, '!');
}
trace!(?op_filter, in_path=?filter.in_path, ?glob, "adding new glob line");
builder
.add_line(filter.in_path, &glob)
.map_err(TaggedFiltererError::GlobParse)?;
}
}
trace!(?op_filter, "finalising compiled glob");
let compiled = builder.build().map_err(TaggedFiltererError::GlobParse)?;
trace!(?op_filter, "swapping in new compiled glob");
target
.replace(Some(compiled))
.map_err(TaggedFiltererError::GlobsetChange)
}
/// Reads a gitignore-style [`IgnoreFile`] and adds it to the filterer.
pub async fn add_ignore_file(&self, file: &IgnoreFile) -> Result<(), TaggedFiltererError> {
let mut new = { self.ignore_filterer.borrow().clone() };
new.0
.add_file(file)
.await
.map_err(TaggedFiltererError::Ignore)?;
self.ignore_filterer
.replace(new)
.map_err(TaggedFiltererError::IgnoreSwap)?;
Ok(())
}
/// Clears all filters from the filterer.
///
/// This also recompiles the glob matchers, so essentially it resets the entire filterer state.
pub fn clear_filters(&self) -> Result<(), TaggedFiltererError> {
debug!("removing all filters from filterer");
self.filters.replace(Default::default()).map_err(|err| {
TaggedFiltererError::FilterChange {
action: "clear all",
err,
}
})?;
self.recompile_globs(Op::Glob)?;
self.recompile_globs(Op::NotGlob)?;
Ok(())
}
}

View File

@ -1,92 +0,0 @@
//! A filterer implementation that exposes the full capabilities of Watchexec.
//!
//! Filters match against [event tags][Tag]; can be exact matches, glob matches, regex matches, or
//! set matches; can reverse the match (equal/not equal, etc); and can be negated.
//!
//! [Filters][Filter] can be generated from your application and inserted directly, or they can be
//! parsed from a textual format:
//!
//! ```text
//! [!]{Matcher}{Op}{Value}
//! ```
//!
//! For example:
//!
//! ```text
//! path==/foo/bar
//! path*=**/bar
//! path~=bar$
//! !kind=file
//! ```
//!
//! There is a set of [operators][Op]:
//! - `==` and `!=`: exact match and exact not match (case insensitive)
//! - `~=` and `~!`: regex match and regex not match
//! - `*=` and `*!`: glob match and glob not match
//! - `:=` and `:!`: set match and set not match
//!
//! Sets are a list of values separated by `,`.
//!
//! In addition to the two-symbol operators, there is the `=` "auto" operator, which maps to the
//! most convenient operator for the given _matcher_. The current mapping is:
//!
//! | Matcher | Operator |
//! |---------------------------------------------------|---------------|
//! | [`Tag`](Matcher::Tag) | `:=` (in set) |
//! | [`Path`](Matcher::Path) | `*=` (glob) |
//! | [`FileType`](Matcher::FileType) | `:=` (in set) |
//! | [`FileEventKind`](Matcher::FileEventKind) | `*=` (glob) |
//! | [`Source`](Matcher::Source) | `:=` (in set) |
//! | [`Process`](Matcher::Process) | `:=` (in set) |
//! | [`Signal`](Matcher::Signal) | `:=` (in set) |
//! | [`ProcessCompletion`](Matcher::ProcessCompletion) | `*=` (glob) |
//! | [`Priority`](Matcher::Priority) | `:=` (in set) |
//!
//! [Matchers][Matcher] correspond to Tags, but are not one-to-one: the `path` matcher operates on
//! the `path` part of the `Path` tag, and the `type` matcher operates on the `file_type`, for
//! example.
//!
//! | Matcher | Syntax | Tag |
//! |-------------------------------------------|----------|----------------------------------------------|
//! | [`Tag`](Matcher::Tag) | `tag` | _the presence of a Tag on the event_ |
//! | [`Path`](Matcher::Path) | `path` | [`Path`](Tag::Path) (`path` field) |
//! | [`FileType`](Matcher::FileType) | `type` | [`Path`](Tag::Path) (`file_type` field, when Some) |
//! | [`FileEventKind`](Matcher::FileEventKind) | `kind` or `fek` | [`FileEventKind`](Tag::FileEventKind) |
//! | [`Source`](Matcher::Source) | `source` or `src` | [`Source`](Tag::Source) |
//! | [`Process`](Matcher::Process) | `process` or `pid` | [`Process`](Tag::Process) |
//! | [`Signal`](Matcher::Signal) | `signal` | [`Signal`](Tag::Signal) |
//! | [`ProcessCompletion`](Matcher::ProcessCompletion) | `complete` or `exit` | [`ProcessCompletion`](Tag::ProcessCompletion) |
//! | [`Priority`](Matcher::Priority) | `priority` | special: event [`Priority`] |
//!
//! Filters are checked in order, grouped per tag and per matcher. Filter groups may be checked in
//! any order, but the filters in the groups are checked in add order. Path glob filters are always
//! checked first, for internal reasons.
//!
//! The `negate` boolean field behaves specially: it is not operator negation, but rather the same
//! kind of behaviour that is applied to `!`-prefixed globs in gitignore files: if a negated filter
//! matches the event, the result of the event checking for that matcher is reverted to `true`, even
//! if a previous filter set it to `false`. Unmatched negated filters are ignored.
//!
//! Glob syntax is as supported by the [ignore] crate for Paths, and by [globset] otherwise. (As of
//! writing, the ignore crate uses globset internally). Regex syntax is the default syntax of the
//! [regex] crate.
#![doc(html_favicon_url = "https://watchexec.github.io/logo:watchexec.svg")]
#![doc(html_logo_url = "https://watchexec.github.io/logo:watchexec.svg")]
#![warn(clippy::unwrap_used, missing_docs)]
#![deny(rust_2018_idioms)]
// to make filters
pub use regex::Regex;
pub use error::*;
pub use files::*;
pub use filter::*;
pub use filterer::*;
mod error;
mod files;
mod filter;
mod filterer;
mod parse;
mod swaplock;

View File

@ -1,139 +0,0 @@
use std::str::FromStr;
use nom::{
branch::alt,
bytes::complete::{is_not, tag, tag_no_case, take_while1},
character::complete::char,
combinator::{map_res, opt},
sequence::{delimited, tuple},
Finish, IResult,
};
use regex::Regex;
use tracing::trace;
use crate::{Filter, Matcher, Op, Pattern, TaggedFiltererError};
impl FromStr for Filter {
type Err = TaggedFiltererError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
fn matcher(i: &str) -> IResult<&str, Matcher> {
map_res(
alt((
tag_no_case("tag"),
tag_no_case("path"),
tag_no_case("type"),
tag_no_case("kind"),
tag_no_case("fek"),
tag_no_case("source"),
tag_no_case("src"),
tag_no_case("priority"),
tag_no_case("process"),
tag_no_case("pid"),
tag_no_case("signal"),
tag_no_case("sig"),
tag_no_case("complete"),
tag_no_case("exit"),
)),
|m: &str| match m.to_ascii_lowercase().as_str() {
"tag" => Ok(Matcher::Tag),
"path" => Ok(Matcher::Path),
"type" => Ok(Matcher::FileType),
"kind" | "fek" => Ok(Matcher::FileEventKind),
"source" | "src" => Ok(Matcher::Source),
"priority" => Ok(Matcher::Priority),
"process" | "pid" => Ok(Matcher::Process),
"signal" | "sig" => Ok(Matcher::Signal),
"complete" | "exit" => Ok(Matcher::ProcessCompletion),
m => Err(format!("unknown matcher: {m}")),
},
)(i)
}
fn op(i: &str) -> IResult<&str, Op> {
map_res(
alt((
tag("=="),
tag("!="),
tag("~="),
tag("~!"),
tag("*="),
tag("*!"),
tag(":="),
tag(":!"),
tag("="),
)),
|o: &str| match o {
"==" => Ok(Op::Equal),
"!=" => Ok(Op::NotEqual),
"~=" => Ok(Op::Regex),
"~!" => Ok(Op::NotRegex),
"*=" => Ok(Op::Glob),
"*!" => Ok(Op::NotGlob),
":=" => Ok(Op::InSet),
":!" => Ok(Op::NotInSet),
"=" => Ok(Op::Auto),
o => Err(format!("unknown op: `{o}`")),
},
)(i)
}
fn pattern(i: &str) -> IResult<&str, &str> {
alt((
// TODO: escapes
delimited(char('"'), is_not("\""), char('"')),
delimited(char('\''), is_not("'"), char('\'')),
take_while1(|_| true),
))(i)
}
fn filter(i: &str) -> IResult<&str, Filter> {
map_res(
tuple((opt(tag("!")), matcher, op, pattern)),
|(n, m, o, p)| -> Result<_, ()> {
Ok(Filter {
in_path: None,
on: m,
op: match o {
Op::Auto => match m {
Matcher::Path
| Matcher::FileEventKind
| Matcher::ProcessCompletion => Op::Glob,
_ => Op::InSet,
},
o => o,
},
pat: match (o, m) {
// TODO: carry regex/glob errors through
(
Op::Auto,
Matcher::Path | Matcher::FileEventKind | Matcher::ProcessCompletion,
)
| (Op::Glob | Op::NotGlob, _) => Pattern::Glob(p.to_string()),
(Op::Auto | Op::InSet | Op::NotInSet, _) => {
Pattern::Set(p.split(',').map(|s| s.trim().to_string()).collect())
}
(Op::Regex | Op::NotRegex, _) => {
Pattern::Regex(Regex::new(p).map_err(drop)?)
}
(Op::Equal | Op::NotEqual, _) => Pattern::Exact(p.to_string()),
},
negate: n.is_some(),
})
},
)(i)
}
trace!(src=?s, "parsing tagged filter");
filter(s)
.finish()
.map(|(_, f)| {
trace!(src=?s, filter=?f, "parsed tagged filter");
f
})
.map_err(|e| TaggedFiltererError::Parse {
src: s.to_string(),
err: e.code,
})
}
}

View File

@ -1,58 +0,0 @@
//! A value that is always available, but can be swapped out.
use std::fmt;
use tokio::sync::watch::{channel, error::SendError, Receiver, Ref, Sender};
/// A value that is always available, but can be swapped out.
///
/// This is a wrapper around a [Tokio `watch`][tokio::sync::watch]. The value can be read without
/// await, but can only be written to with async. Borrows should be held for as little as possible,
/// as they keep a read lock.
pub struct SwapLock<T: Clone> {
r: Receiver<T>,
s: Sender<T>,
}
impl<T> SwapLock<T>
where
T: Clone,
{
/// Create a new `SwapLock` with the given value.
pub fn new(inner: T) -> Self {
let (s, r) = channel(inner);
Self { r, s }
}
/// Get a reference to the value.
pub fn borrow(&self) -> Ref<'_, T> {
self.r.borrow()
}
/// Rewrite the value using a closure.
///
/// This obtains a clone of the value, and then calls the closure with a mutable reference to
/// it. Once the closure returns, the value is swapped in.
pub fn change(&self, f: impl FnOnce(&mut T)) -> Result<(), SendError<T>> {
let mut new = { self.r.borrow().clone() };
f(&mut new);
self.s.send(new)
}
/// Replace the value with a new one.
pub fn replace(&self, new: T) -> Result<(), SendError<T>> {
self.s.send(new)
}
}
impl<T> fmt::Debug for SwapLock<T>
where
T: fmt::Debug + Clone,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
f.debug_struct("SwapLock")
.field("(watch)", &self.r)
.finish_non_exhaustive()
}
}

View File

@ -1,114 +0,0 @@
use watchexec_events::{filekind::*, ProcessEnd, Source};
use watchexec_signals::Signal;
mod helpers;
use helpers::tagged_ff::*;
#[tokio::test]
async fn empty_filter_passes_everything() {
let filterer = filt("", &[], &[file("empty.wef").await]).await;
filterer.file_does_pass("Cargo.toml");
filterer.file_does_pass("Cargo.json");
filterer.file_does_pass("Gemfile.toml");
filterer.file_does_pass("FINAL-FINAL.docx");
filterer.dir_does_pass("/test/Cargo.toml");
filterer.dir_does_pass("/a/folder");
filterer.file_does_pass("apples/carrots/oranges");
filterer.file_does_pass("apples/carrots/cauliflowers/oranges");
filterer.file_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.file_does_pass("apples/oranges/bananas");
filterer.dir_does_pass("apples/carrots/oranges");
filterer.dir_does_pass("apples/carrots/cauliflowers/oranges");
filterer.dir_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.dir_does_pass("apples/oranges/bananas");
filterer.source_does_pass(Source::Keyboard);
filterer.fek_does_pass(FileEventKind::Create(CreateKind::File));
filterer.pid_does_pass(1234);
filterer.signal_does_pass(Signal::User1);
filterer.complete_does_pass(None);
filterer.complete_does_pass(Some(ProcessEnd::Success));
}
#[tokio::test]
async fn folder() {
let filterer = filt("", &[], &[file("folder.wef").await]).await;
filterer.file_doesnt_pass("apples");
filterer.file_doesnt_pass("apples/oranges/bananas");
filterer.dir_doesnt_pass("apples");
filterer.dir_doesnt_pass("apples/carrots");
filterer.file_doesnt_pass("raw-prunes");
filterer.dir_doesnt_pass("raw-prunes");
filterer.file_doesnt_pass("prunes");
filterer.file_doesnt_pass("prunes/oranges/bananas");
filterer.dir_does_pass("prunes");
filterer.dir_does_pass("prunes/carrots/cauliflowers/oranges");
}
#[tokio::test]
async fn patterns() {
let filterer = filt("", &[], &[file("path-patterns.wef").await]).await;
// Unmatched
filterer.file_does_pass("FINAL-FINAL.docx");
filterer.dir_does_pass("/a/folder");
filterer.file_does_pass("rat");
filterer.file_does_pass("foo/bar/rat");
filterer.file_does_pass("/foo/bar/rat");
// Cargo.toml
filterer.file_doesnt_pass("Cargo.toml");
filterer.dir_doesnt_pass("Cargo.toml");
filterer.file_does_pass("Cargo.json");
// package.json
filterer.file_doesnt_pass("package.json");
filterer.dir_doesnt_pass("package.json");
filterer.file_does_pass("package.toml");
// *.gemspec
filterer.file_doesnt_pass("pearl.gemspec");
filterer.dir_doesnt_pass("sapphire.gemspec");
filterer.file_doesnt_pass(".gemspec");
filterer.file_does_pass("diamond.gemspecial");
// test-[^u]+
filterer.file_does_pass("test-unit");
filterer.dir_doesnt_pass("test-integration");
filterer.file_does_pass("tester-helper");
// [.]sw[a-z]$
filterer.file_doesnt_pass("source.swa");
filterer.file_doesnt_pass(".source.swb");
filterer.file_doesnt_pass("sub/source.swc");
filterer.file_does_pass("sub/dir.swa/file");
filterer.file_does_pass("source.sw1");
}
#[tokio::test]
async fn negate() {
let filterer = filt("", &[], &[file("negate.wef").await]).await;
filterer.file_doesnt_pass("yeah");
filterer.file_does_pass("nah");
filterer.file_does_pass("nah.yeah");
}
#[tokio::test]
async fn ignores_and_filters() {
let filterer = filt("", &[file("globs").await.0], &[file("folder.wef").await]).await;
// ignored
filterer.dir_doesnt_pass("test-helper");
// not filtered
filterer.dir_doesnt_pass("tester-helper");
// not ignored && filtered
filterer.dir_does_pass("prunes/tester-helper");
}

View File

@ -1,349 +0,0 @@
#![allow(dead_code)]
use std::{
path::{Path, PathBuf},
str::FromStr,
sync::Arc,
};
use ignore_files::{IgnoreFile, IgnoreFilter};
use project_origins::ProjectType;
use tokio::fs::canonicalize;
use watchexec::{error::RuntimeError, filter::Filterer};
use watchexec_events::{
filekind::FileEventKind, Event, FileType, Priority, ProcessEnd, Source, Tag,
};
use watchexec_filterer_ignore::IgnoreFilterer;
use watchexec_filterer_tagged::{Filter, FilterFile, Matcher, Op, Pattern, TaggedFilterer};
use watchexec_signals::Signal;
pub mod tagged {
pub use super::ig_file as file;
pub use super::tagged_filt as filt;
pub use super::Applies;
pub use super::FilterExt;
pub use super::PathHarness;
pub use super::TaggedHarness;
pub use super::{filter, glob_filter, notglob_filter};
pub use watchexec_events::Priority;
}
pub mod tagged_ff {
pub use super::ff_file as file;
pub use super::tagged::*;
pub use super::tagged_fffilt as filt;
}
pub trait PathHarness: Filterer {
fn check_path(
&self,
path: PathBuf,
file_type: Option<FileType>,
) -> std::result::Result<bool, RuntimeError> {
let event = Event {
tags: vec![Tag::Path { path, file_type }],
metadata: Default::default(),
};
self.check_event(&event, Priority::Normal)
}
fn path_pass(&self, path: &str, file_type: Option<FileType>, pass: bool) {
let origin = std::fs::canonicalize(".").unwrap();
let full_path = if let Some(suf) = path.strip_prefix("/test/") {
origin.join(suf)
} else if Path::new(path).has_root() {
path.into()
} else {
origin.join(path)
};
tracing::info!(?path, ?file_type, ?pass, "check");
assert_eq!(
self.check_path(full_path, file_type).unwrap(),
pass,
"{} {:?} (expected {})",
match file_type {
Some(FileType::File) => "file",
Some(FileType::Dir) => "dir",
Some(FileType::Symlink) => "symlink",
Some(FileType::Other) => "other",
None => "path",
},
path,
if pass { "pass" } else { "fail" }
);
}
fn file_does_pass(&self, path: &str) {
self.path_pass(path, Some(FileType::File), true);
}
fn file_doesnt_pass(&self, path: &str) {
self.path_pass(path, Some(FileType::File), false);
}
fn dir_does_pass(&self, path: &str) {
self.path_pass(path, Some(FileType::Dir), true);
}
fn dir_doesnt_pass(&self, path: &str) {
self.path_pass(path, Some(FileType::Dir), false);
}
fn unk_does_pass(&self, path: &str) {
self.path_pass(path, None, true);
}
fn unk_doesnt_pass(&self, path: &str) {
self.path_pass(path, None, false);
}
}
impl PathHarness for TaggedFilterer {}
impl PathHarness for IgnoreFilterer {}
pub trait TaggedHarness {
fn check_tag(&self, tag: Tag, priority: Priority) -> std::result::Result<bool, RuntimeError>;
fn priority_pass(&self, priority: Priority, pass: bool) {
tracing::info!(?priority, ?pass, "check");
assert_eq!(
self.check_tag(Tag::Source(Source::Filesystem), priority)
.unwrap(),
pass,
"{priority:?} (expected {})",
if pass { "pass" } else { "fail" }
);
}
fn priority_does_pass(&self, priority: Priority) {
self.priority_pass(priority, true);
}
fn priority_doesnt_pass(&self, priority: Priority) {
self.priority_pass(priority, false);
}
fn tag_pass(&self, tag: Tag, pass: bool) {
tracing::info!(?tag, ?pass, "check");
assert_eq!(
self.check_tag(tag.clone(), Priority::Normal).unwrap(),
pass,
"{tag:?} (expected {})",
if pass { "pass" } else { "fail" }
);
}
fn fek_does_pass(&self, fek: FileEventKind) {
self.tag_pass(Tag::FileEventKind(fek), true);
}
fn fek_doesnt_pass(&self, fek: FileEventKind) {
self.tag_pass(Tag::FileEventKind(fek), false);
}
fn source_does_pass(&self, source: Source) {
self.tag_pass(Tag::Source(source), true);
}
fn source_doesnt_pass(&self, source: Source) {
self.tag_pass(Tag::Source(source), false);
}
fn pid_does_pass(&self, pid: u32) {
self.tag_pass(Tag::Process(pid), true);
}
fn pid_doesnt_pass(&self, pid: u32) {
self.tag_pass(Tag::Process(pid), false);
}
fn signal_does_pass(&self, sig: Signal) {
self.tag_pass(Tag::Signal(sig), true);
}
fn signal_doesnt_pass(&self, sig: Signal) {
self.tag_pass(Tag::Signal(sig), false);
}
fn complete_does_pass(&self, exit: Option<ProcessEnd>) {
self.tag_pass(Tag::ProcessCompletion(exit), true);
}
fn complete_doesnt_pass(&self, exit: Option<ProcessEnd>) {
self.tag_pass(Tag::ProcessCompletion(exit), false);
}
}
impl TaggedHarness for TaggedFilterer {
fn check_tag(&self, tag: Tag, priority: Priority) -> std::result::Result<bool, RuntimeError> {
let event = Event {
tags: vec![tag],
metadata: Default::default(),
};
self.check_event(&event, priority)
}
}
fn tracing_init() {
use tracing_subscriber::{
fmt::{format::FmtSpan, Subscriber},
util::SubscriberInitExt,
EnvFilter,
};
Subscriber::builder()
.pretty()
.with_span_events(FmtSpan::NEW | FmtSpan::CLOSE)
.with_env_filter(EnvFilter::from_default_env())
.finish()
.try_init()
.ok();
}
pub async fn ignore_filt(origin: &str, ignore_files: &[IgnoreFile]) -> IgnoreFilter {
tracing_init();
let origin = canonicalize(".").await.unwrap().join(origin);
IgnoreFilter::new(origin, ignore_files)
.await
.expect("making filterer")
}
pub async fn tagged_filt(filters: &[Filter]) -> Arc<TaggedFilterer> {
let origin = canonicalize(".").await.unwrap();
tracing_init();
let filterer = TaggedFilterer::new(origin.clone(), origin)
.await
.expect("creating filterer");
filterer.add_filters(filters).await.expect("adding filters");
filterer
}
pub async fn tagged_igfilt(origin: &str, ignore_files: &[IgnoreFile]) -> Arc<TaggedFilterer> {
let origin = canonicalize(".").await.unwrap().join(origin);
tracing_init();
let filterer = TaggedFilterer::new(origin.clone(), origin)
.await
.expect("creating filterer");
for file in ignore_files {
tracing::info!(?file, "loading ignore file");
filterer
.add_ignore_file(file)
.await
.expect("adding ignore file");
}
filterer
}
pub async fn tagged_fffilt(
origin: &str,
ignore_files: &[IgnoreFile],
filter_files: &[FilterFile],
) -> Arc<TaggedFilterer> {
let filterer = tagged_igfilt(origin, ignore_files).await;
let mut filters = Vec::new();
for file in filter_files {
tracing::info!(?file, "loading filter file");
filters.extend(file.load().await.expect("loading filter file"));
}
filterer
.add_filters(&filters)
.await
.expect("adding filters");
filterer
}
pub async fn ig_file(name: &str) -> IgnoreFile {
let path = canonicalize(".")
.await
.unwrap()
.join("tests")
.join("ignores")
.join(name);
IgnoreFile {
path,
applies_in: None,
applies_to: None,
}
}
pub async fn ff_file(name: &str) -> FilterFile {
FilterFile(ig_file(name).await)
}
pub trait Applies {
fn applies_in(self, origin: &str) -> Self;
fn applies_to(self, project_type: ProjectType) -> Self;
}
impl Applies for IgnoreFile {
fn applies_in(mut self, origin: &str) -> Self {
let origin = std::fs::canonicalize(".").unwrap().join(origin);
self.applies_in = Some(origin);
self
}
fn applies_to(mut self, project_type: ProjectType) -> Self {
self.applies_to = Some(project_type);
self
}
}
impl Applies for FilterFile {
fn applies_in(self, origin: &str) -> Self {
Self(self.0.applies_in(origin))
}
fn applies_to(self, project_type: ProjectType) -> Self {
Self(self.0.applies_to(project_type))
}
}
pub fn filter(expr: &str) -> Filter {
Filter::from_str(expr).expect("parse filter")
}
pub fn glob_filter(pat: &str) -> Filter {
Filter {
in_path: None,
on: Matcher::Path,
op: Op::Glob,
pat: Pattern::Glob(pat.into()),
negate: false,
}
}
pub fn notglob_filter(pat: &str) -> Filter {
Filter {
in_path: None,
on: Matcher::Path,
op: Op::NotGlob,
pat: Pattern::Glob(pat.into()),
negate: false,
}
}
pub trait FilterExt {
fn in_path(self) -> Self
where
Self: Sized,
{
self.in_subpath("")
}
fn in_subpath(self, sub: impl AsRef<Path>) -> Self;
}
impl FilterExt for Filter {
fn in_subpath(mut self, sub: impl AsRef<Path>) -> Self {
let origin = std::fs::canonicalize(".").unwrap();
self.in_path = Some(origin.join(sub));
self
}
}

View File

@ -1,3 +0,0 @@
# comment
# blank line

View File

@ -1,2 +0,0 @@
type==dir
path*=prunes

View File

@ -1,11 +0,0 @@
Cargo.toml
package.json
*.gemspec
test-*
*.sw*
sources.*/
/output.*
**/possum
zebra/**
elep/**/hant
song/**/bird/

View File

@ -1,2 +0,0 @@
path=nah
!path=nah.yeah

View File

@ -1,5 +0,0 @@
path*!Cargo.toml
path*!package.json
path*!*.gemspec
path~!test-[^u]+
path~![.]sw[a-z]$

View File

@ -1,453 +0,0 @@
use std::num::{NonZeroI32, NonZeroI64};
use watchexec_events::{filekind::*, ProcessEnd, Source};
use watchexec_filterer_tagged::TaggedFilterer;
use watchexec_signals::Signal;
mod helpers;
use helpers::tagged::*;
#[tokio::test]
async fn empty_filter_passes_everything() {
let filterer = filt(&[]).await;
filterer.source_does_pass(Source::Keyboard);
filterer.fek_does_pass(FileEventKind::Create(CreateKind::File));
filterer.pid_does_pass(1234);
filterer.signal_does_pass(Signal::User1);
filterer.complete_does_pass(None);
filterer.complete_does_pass(Some(ProcessEnd::Success));
}
// Source is used as a relatively simple test case for common text-based ops, so
// these aren't repeated for the other tags, which instead focus on their own
// special characteristics.
#[tokio::test]
async fn source_exact() {
let filterer = filt(&[filter("source==keyboard")]).await;
filterer.source_does_pass(Source::Keyboard);
filterer.source_doesnt_pass(Source::Mouse);
}
#[tokio::test]
async fn source_glob() {
let filterer = filt(&[filter("source*=*i*m*")]).await;
filterer.source_does_pass(Source::Filesystem);
filterer.source_does_pass(Source::Time);
filterer.source_doesnt_pass(Source::Internal);
}
#[tokio::test]
async fn source_regex() {
let filterer = filt(&[filter("source~=(keyboard|mouse)")]).await;
filterer.source_does_pass(Source::Keyboard);
filterer.source_does_pass(Source::Mouse);
filterer.source_doesnt_pass(Source::Internal);
}
#[tokio::test]
async fn source_two_filters() {
let filterer = filt(&[filter("source*=*s*"), filter("source!=mouse")]).await;
filterer.source_doesnt_pass(Source::Mouse);
filterer.source_does_pass(Source::Filesystem);
}
#[tokio::test]
async fn source_allowlisting() {
// allowlisting is vastly easier to achieve with e.g. `source==mouse`
// but this pattern is nonetheless useful for more complex cases.
let filterer = filt(&[filter("source*!*"), filter("!source==mouse")]).await;
filterer.source_does_pass(Source::Mouse);
filterer.source_doesnt_pass(Source::Filesystem);
}
#[tokio::test]
async fn source_set() {
let f = filter("source:=keyboard,mouse");
assert_eq!(f, filter("source=keyboard,mouse"));
let filterer = filt(&[f]).await;
filterer.source_does_pass(Source::Keyboard);
filterer.source_does_pass(Source::Mouse);
filterer.source_doesnt_pass(Source::Internal);
let filterer = filt(&[filter("source:!keyboard,mouse")]).await;
filterer.source_doesnt_pass(Source::Keyboard);
filterer.source_doesnt_pass(Source::Mouse);
filterer.source_does_pass(Source::Internal);
}
#[tokio::test]
async fn fek_glob_level_one() {
let f = filter("kind*=Create(*)");
assert_eq!(f, filter("fek*=Create(*)"));
assert_eq!(f, filter("kind=Create(*)"));
assert_eq!(f, filter("fek=Create(*)"));
let filterer = filt(&[f]).await;
filterer.fek_does_pass(FileEventKind::Create(CreateKind::Any));
filterer.fek_does_pass(FileEventKind::Create(CreateKind::File));
filterer.fek_doesnt_pass(FileEventKind::Modify(ModifyKind::Data(DataChange::Content)));
}
#[tokio::test]
async fn fek_glob_level_two() {
let filterer = filt(&[filter("fek=Modify(Data(*))")]).await;
filterer.fek_does_pass(FileEventKind::Modify(ModifyKind::Data(DataChange::Content)));
filterer.fek_doesnt_pass(FileEventKind::Modify(ModifyKind::Other));
filterer.fek_doesnt_pass(FileEventKind::Modify(ModifyKind::Metadata(
MetadataKind::Permissions,
)));
filterer.fek_doesnt_pass(FileEventKind::Create(CreateKind::Any));
}
#[tokio::test]
async fn fek_level_three() {
fn suite(filterer: &TaggedFilterer) {
filterer.fek_does_pass(FileEventKind::Modify(ModifyKind::Data(DataChange::Content)));
filterer.fek_doesnt_pass(FileEventKind::Modify(ModifyKind::Data(DataChange::Size)));
filterer.fek_doesnt_pass(FileEventKind::Modify(ModifyKind::Other));
filterer.fek_doesnt_pass(FileEventKind::Modify(ModifyKind::Metadata(
MetadataKind::Permissions,
)));
filterer.fek_doesnt_pass(FileEventKind::Create(CreateKind::Any));
}
suite(filt(&[filter("fek=Modify(Data(Content))")]).await.as_ref());
suite(filt(&[filter("fek==Modify(Data(Content))")]).await.as_ref());
}
#[tokio::test]
async fn pid_set_single() {
let f = filter("process:=1234");
assert_eq!(f, filter("pid:=1234"));
assert_eq!(f, filter("process=1234"));
assert_eq!(f, filter("pid=1234"));
let filterer = filt(&[f]).await;
filterer.pid_does_pass(1234);
filterer.pid_doesnt_pass(5678);
filterer.pid_doesnt_pass(12345);
filterer.pid_doesnt_pass(123);
}
#[tokio::test]
async fn pid_set_multiple() {
let filterer = filt(&[filter("pid=123,456")]).await;
filterer.pid_does_pass(123);
filterer.pid_does_pass(456);
filterer.pid_doesnt_pass(123456);
filterer.pid_doesnt_pass(12);
filterer.pid_doesnt_pass(23);
filterer.pid_doesnt_pass(45);
filterer.pid_doesnt_pass(56);
filterer.pid_doesnt_pass(1234);
filterer.pid_doesnt_pass(3456);
filterer.pid_doesnt_pass(4567);
filterer.pid_doesnt_pass(34567);
filterer.pid_doesnt_pass(0);
}
#[tokio::test]
async fn pid_equals() {
let f = filter("process==1234");
assert_eq!(f, filter("pid==1234"));
let filterer = filt(&[f]).await;
filterer.pid_does_pass(1234);
filterer.pid_doesnt_pass(5678);
filterer.pid_doesnt_pass(12345);
filterer.pid_doesnt_pass(123);
}
#[tokio::test]
async fn signal_set_single_without_sig() {
let f = filter("signal=INT");
assert_eq!(f, filter("sig=INT"));
assert_eq!(f, filter("signal:=INT"));
assert_eq!(f, filter("sig:=INT"));
let filterer = filt(&[f]).await;
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_doesnt_pass(Signal::Hangup);
}
#[tokio::test]
async fn signal_set_single_with_sig() {
let filterer = filt(&[filter("signal:=SIGINT")]).await;
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_doesnt_pass(Signal::Hangup);
}
#[tokio::test]
async fn signal_set_multiple_without_sig() {
let filterer = filt(&[filter("sig:=INT,TERM")]).await;
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_does_pass(Signal::Terminate);
filterer.signal_doesnt_pass(Signal::Hangup);
}
#[tokio::test]
async fn signal_set_multiple_with_sig() {
let filterer = filt(&[filter("signal:=SIGINT,SIGTERM")]).await;
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_does_pass(Signal::Terminate);
filterer.signal_doesnt_pass(Signal::Hangup);
}
#[tokio::test]
async fn signal_set_multiple_mixed_sig() {
let filterer = filt(&[filter("sig:=SIGINT,TERM")]).await;
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_does_pass(Signal::Terminate);
filterer.signal_doesnt_pass(Signal::Hangup);
}
#[tokio::test]
async fn signal_equals_without_sig() {
let filterer = filt(&[filter("sig==INT")]).await;
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_doesnt_pass(Signal::Hangup);
}
#[tokio::test]
async fn signal_equals_with_sig() {
let filterer = filt(&[filter("signal==SIGINT")]).await;
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_doesnt_pass(Signal::Hangup);
}
#[tokio::test]
async fn signal_set_single_numbers() {
let filterer = filt(&[filter("signal:=2")]).await;
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_doesnt_pass(Signal::Hangup);
}
#[tokio::test]
async fn signal_set_multiple_numbers() {
let filterer = filt(&[filter("sig:=2,15")]).await;
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_does_pass(Signal::Terminate);
filterer.signal_doesnt_pass(Signal::Hangup);
}
#[tokio::test]
async fn signal_equals_numbers() {
let filterer = filt(&[filter("sig==2")]).await;
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_doesnt_pass(Signal::Hangup);
}
#[tokio::test]
async fn signal_set_all_mixed() {
let filterer = filt(&[filter("signal:=SIGHUP,INT,15")]).await;
filterer.signal_does_pass(Signal::Hangup);
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_does_pass(Signal::Terminate);
filterer.signal_doesnt_pass(Signal::User1);
}
#[tokio::test]
async fn complete_empty() {
let f = filter("complete=_");
assert_eq!(f, filter("complete*=_"));
assert_eq!(f, filter("exit=_"));
assert_eq!(f, filter("exit*=_"));
let filterer = filt(&[f]).await;
filterer.complete_does_pass(None);
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitError(NonZeroI64::new(1).unwrap())));
}
#[tokio::test]
async fn complete_any() {
let filterer = filt(&[filter("complete=*")]).await;
filterer.complete_does_pass(Some(ProcessEnd::Success));
filterer.complete_does_pass(Some(ProcessEnd::ExitError(NonZeroI64::new(1).unwrap())));
filterer.complete_does_pass(None);
}
#[tokio::test]
async fn complete_with_success() {
let filterer = filt(&[filter("complete*=success")]).await;
filterer.complete_does_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitError(NonZeroI64::new(1).unwrap())));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_continued() {
let filterer = filt(&[filter("complete*=continued")]).await;
filterer.complete_does_pass(Some(ProcessEnd::Continued));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitError(NonZeroI64::new(1).unwrap())));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_specific_exit_error() {
let filterer = filt(&[filter("complete*=error(1)")]).await;
filterer.complete_does_pass(Some(ProcessEnd::ExitError(NonZeroI64::new(1).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_any_exit_error() {
let filterer = filt(&[filter("complete*=error(*)")]).await;
filterer.complete_does_pass(Some(ProcessEnd::ExitError(NonZeroI64::new(1).unwrap())));
filterer.complete_does_pass(Some(ProcessEnd::ExitError(NonZeroI64::new(63).unwrap())));
filterer.complete_does_pass(Some(ProcessEnd::ExitError(
NonZeroI64::new(-12823912738).unwrap(),
)));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitStop(NonZeroI32::new(63).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_specific_stop() {
let filterer = filt(&[filter("complete*=stop(19)")]).await;
filterer.complete_does_pass(Some(ProcessEnd::ExitStop(NonZeroI32::new(19).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_any_stop() {
let filterer = filt(&[filter("complete*=stop(*)")]).await;
filterer.complete_does_pass(Some(ProcessEnd::ExitStop(NonZeroI32::new(1).unwrap())));
filterer.complete_does_pass(Some(ProcessEnd::ExitStop(NonZeroI32::new(63).unwrap())));
filterer.complete_does_pass(Some(ProcessEnd::ExitStop(
NonZeroI32::new(-128239127).unwrap(),
)));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitError(NonZeroI64::new(63).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_specific_exception() {
let filterer = filt(&[filter("complete*=exception(4B53)")]).await;
filterer.complete_does_pass(Some(ProcessEnd::Exception(NonZeroI32::new(19283).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_any_exception() {
let filterer = filt(&[filter("complete*=exception(*)")]).await;
filterer.complete_does_pass(Some(ProcessEnd::Exception(NonZeroI32::new(1).unwrap())));
filterer.complete_does_pass(Some(ProcessEnd::Exception(NonZeroI32::new(63).unwrap())));
filterer.complete_does_pass(Some(ProcessEnd::Exception(
NonZeroI32::new(-128239127).unwrap(),
)));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitStop(NonZeroI32::new(63).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitError(NonZeroI64::new(63).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_specific_signal_with_sig() {
let filterer = filt(&[filter("complete*=signal(SIGINT)")]).await;
filterer.complete_does_pass(Some(ProcessEnd::ExitSignal(Signal::Interrupt)));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitStop(NonZeroI32::new(19).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_specific_signal_without_sig() {
let filterer = filt(&[filter("complete*=signal(INT)")]).await;
filterer.complete_does_pass(Some(ProcessEnd::ExitSignal(Signal::Interrupt)));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitStop(NonZeroI32::new(19).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_specific_signal_number() {
let filterer = filt(&[filter("complete*=signal(2)")]).await;
filterer.complete_does_pass(Some(ProcessEnd::ExitSignal(Signal::Interrupt)));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitStop(NonZeroI32::new(19).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_any_signal() {
let filterer = filt(&[filter("complete*=signal(*)")]).await;
filterer.complete_does_pass(Some(ProcessEnd::ExitSignal(Signal::Interrupt)));
filterer.complete_does_pass(Some(ProcessEnd::ExitSignal(Signal::Terminate)));
filterer.complete_does_pass(Some(ProcessEnd::ExitSignal(Signal::Custom(123))));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitStop(NonZeroI32::new(63).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitError(NonZeroI64::new(63).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn priority_auto() {
let filterer = filt(&[filter("priority=normal")]).await;
filterer.priority_doesnt_pass(Priority::Low);
filterer.priority_does_pass(Priority::Normal);
filterer.priority_doesnt_pass(Priority::High);
}
#[tokio::test]
async fn priority_set() {
let filterer = filt(&[filter("priority:=normal,high")]).await;
filterer.priority_doesnt_pass(Priority::Low);
filterer.priority_does_pass(Priority::Normal);
filterer.priority_does_pass(Priority::High);
}
#[tokio::test]
async fn priority_none() {
let filterer = filt(&[]).await;
filterer.priority_does_pass(Priority::Low);
filterer.priority_does_pass(Priority::Normal);
filterer.priority_does_pass(Priority::High);
}

View File

@ -1,226 +0,0 @@
use std::{collections::HashSet, str::FromStr};
use watchexec_filterer_tagged::{Filter, Matcher, Op, Pattern, Regex, TaggedFiltererError};
mod helpers;
use helpers::tagged::*;
#[test]
fn empty_filter() {
assert!(matches!(
Filter::from_str(""),
Err(TaggedFiltererError::Parse { .. })
));
}
#[test]
fn only_bang() {
assert!(matches!(
Filter::from_str("!"),
Err(TaggedFiltererError::Parse { .. })
));
}
#[test]
fn no_op() {
assert!(matches!(
Filter::from_str("foobar"),
Err(TaggedFiltererError::Parse { .. })
));
}
#[test]
fn path_auto_op() {
assert_eq!(
filter("path=foo"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::Glob,
pat: Pattern::Glob("foo".to_string()),
negate: false,
}
);
}
#[test]
fn fek_auto_op() {
assert_eq!(
filter("fek=foo"),
Filter {
in_path: None,
on: Matcher::FileEventKind,
op: Op::Glob,
pat: Pattern::Glob("foo".to_string()),
negate: false,
}
);
}
#[test]
fn other_auto_op() {
assert_eq!(
filter("type=foo"),
Filter {
in_path: None,
on: Matcher::FileType,
op: Op::InSet,
pat: Pattern::Set(HashSet::from(["foo".to_string()])),
negate: false,
}
);
}
#[test]
fn op_equal() {
assert_eq!(
filter("path==foo"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::Equal,
pat: Pattern::Exact("foo".to_string()),
negate: false,
}
);
}
#[test]
fn op_not_equal() {
assert_eq!(
filter("path!=foo"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::NotEqual,
pat: Pattern::Exact("foo".to_string()),
negate: false,
}
);
}
#[test]
fn op_regex() {
assert_eq!(
filter("path~=^fo+$"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::Regex,
pat: Pattern::Regex(Regex::new("^fo+$").unwrap()),
negate: false,
}
);
}
#[test]
fn op_not_regex() {
assert_eq!(
filter("path~!f(o|al)+"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::NotRegex,
pat: Pattern::Regex(Regex::new("f(o|al)+").unwrap()),
negate: false,
}
);
}
#[test]
fn op_glob() {
assert_eq!(
filter("path*=**/foo"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::Glob,
pat: Pattern::Glob("**/foo".to_string()),
negate: false,
}
);
}
#[test]
fn op_not_glob() {
assert_eq!(
filter("path*!foo.*"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::NotGlob,
pat: Pattern::Glob("foo.*".to_string()),
negate: false,
}
);
}
#[test]
fn op_in_set() {
assert_eq!(
filter("path:=foo,bar"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::InSet,
pat: Pattern::Set(HashSet::from(["foo".to_string(), "bar".to_string()])),
negate: false,
}
);
}
#[test]
fn op_not_in_set() {
assert_eq!(
filter("path:!baz,qux"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::NotInSet,
pat: Pattern::Set(HashSet::from(["baz".to_string(), "qux".to_string()])),
negate: false,
}
);
}
#[test]
fn quoted_single() {
assert_eq!(
filter("path='blanche neige'"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::Glob,
pat: Pattern::Glob("blanche neige".to_string()),
negate: false,
}
);
}
#[test]
fn quoted_double() {
assert_eq!(
filter("path=\"et les sept nains\""),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::Glob,
pat: Pattern::Glob("et les sept nains".to_string()),
negate: false,
}
);
}
#[test]
fn negate() {
assert_eq!(
filter("!path~=^f[om]+$"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::Regex,
pat: Pattern::Regex(Regex::new("^f[om]+$").unwrap()),
negate: true,
}
);
}

View File

@ -1,454 +0,0 @@
use std::sync::Arc;
use watchexec_filterer_tagged::TaggedFilterer;
mod helpers;
use helpers::tagged::*;
#[tokio::test]
async fn empty_filter_passes_everything() {
let filterer = filt(&[]).await;
filterer.file_does_pass("Cargo.toml");
filterer.file_does_pass("Cargo.json");
filterer.file_does_pass("Gemfile.toml");
filterer.file_does_pass("FINAL-FINAL.docx");
filterer.dir_does_pass("/test/Cargo.toml");
filterer.dir_does_pass("/a/folder");
filterer.file_does_pass("apples/carrots/oranges");
filterer.file_does_pass("apples/carrots/cauliflowers/oranges");
filterer.file_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.file_does_pass("apples/oranges/bananas");
filterer.dir_does_pass("apples/carrots/oranges");
filterer.dir_does_pass("apples/carrots/cauliflowers/oranges");
filterer.dir_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.dir_does_pass("apples/oranges/bananas");
}
#[tokio::test]
async fn exact_filename() {
let filterer = filt(&[glob_filter("Cargo.toml")]).await;
filterer.file_does_pass("Cargo.toml");
filterer.file_does_pass("/test/foo/bar/Cargo.toml");
filterer.file_doesnt_pass("Cargo.json");
filterer.file_doesnt_pass("Gemfile.toml");
filterer.file_doesnt_pass("FINAL-FINAL.docx");
filterer.dir_doesnt_pass("/a/folder");
filterer.dir_does_pass("/test/Cargo.toml");
}
#[tokio::test]
async fn exact_filenames_multiple() {
let filterer = filt(&[glob_filter("Cargo.toml"), glob_filter("package.json")]).await;
filterer.file_does_pass("Cargo.toml");
filterer.file_does_pass("/test/foo/bar/Cargo.toml");
filterer.file_does_pass("package.json");
filterer.file_does_pass("/test/foo/bar/package.json");
filterer.file_doesnt_pass("Cargo.json");
filterer.file_doesnt_pass("package.toml");
filterer.file_doesnt_pass("Gemfile.toml");
filterer.file_doesnt_pass("FINAL-FINAL.docx");
filterer.dir_doesnt_pass("/a/folder");
filterer.dir_does_pass("/test/Cargo.toml");
filterer.dir_does_pass("/test/package.json");
}
#[tokio::test]
async fn glob_single_final_ext_star() {
let filterer = filt(&[glob_filter("Cargo.*")]).await;
filterer.file_does_pass("Cargo.toml");
filterer.file_does_pass("Cargo.json");
filterer.file_doesnt_pass("Gemfile.toml");
filterer.file_doesnt_pass("FINAL-FINAL.docx");
filterer.dir_doesnt_pass("/a/folder");
filterer.dir_does_pass("Cargo.toml");
}
#[tokio::test]
async fn glob_star_trailing_slash() {
let filterer = filt(&[glob_filter("Cargo.*/")]).await;
filterer.file_doesnt_pass("Cargo.toml");
filterer.file_doesnt_pass("Cargo.json");
filterer.file_doesnt_pass("Gemfile.toml");
filterer.file_doesnt_pass("FINAL-FINAL.docx");
filterer.dir_doesnt_pass("/a/folder");
filterer.dir_does_pass("Cargo.toml");
filterer.unk_doesnt_pass("Cargo.toml");
}
#[tokio::test]
async fn glob_star_leading_slash() {
let filterer = filt(&[glob_filter("/Cargo.*")]).await;
filterer.file_does_pass("Cargo.toml");
filterer.file_does_pass("Cargo.json");
filterer.dir_does_pass("Cargo.toml");
filterer.unk_does_pass("Cargo.toml");
filterer.file_doesnt_pass("foo/Cargo.toml");
filterer.dir_doesnt_pass("foo/Cargo.toml");
}
#[tokio::test]
async fn glob_leading_double_star() {
let filterer = filt(&[glob_filter("**/possum")]).await;
filterer.file_does_pass("possum");
filterer.file_does_pass("foo/bar/possum");
filterer.file_does_pass("/foo/bar/possum");
filterer.dir_does_pass("possum");
filterer.dir_does_pass("foo/bar/possum");
filterer.dir_does_pass("/foo/bar/possum");
filterer.file_doesnt_pass("rat");
filterer.file_doesnt_pass("foo/bar/rat");
filterer.file_doesnt_pass("/foo/bar/rat");
}
#[tokio::test]
async fn glob_trailing_double_star() {
let filterer = filt(&[glob_filter("possum/**")]).await;
filterer.file_doesnt_pass("possum");
filterer.file_does_pass("possum/foo/bar");
filterer.file_doesnt_pass("/possum/foo/bar");
filterer.file_does_pass("/test/possum/foo/bar");
filterer.dir_doesnt_pass("possum");
filterer.dir_doesnt_pass("foo/bar/possum");
filterer.dir_doesnt_pass("/foo/bar/possum");
filterer.dir_does_pass("possum/foo/bar");
filterer.dir_doesnt_pass("/possum/foo/bar");
filterer.dir_does_pass("/test/possum/foo/bar");
filterer.file_doesnt_pass("rat");
filterer.file_doesnt_pass("foo/bar/rat");
filterer.file_doesnt_pass("/foo/bar/rat");
}
#[tokio::test]
async fn glob_middle_double_star() {
let filterer = filt(&[glob_filter("apples/**/oranges")]).await;
filterer.dir_doesnt_pass("/a/folder");
filterer.file_does_pass("apples/carrots/oranges");
filterer.file_does_pass("apples/carrots/cauliflowers/oranges");
filterer.file_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.dir_does_pass("apples/carrots/oranges");
filterer.dir_does_pass("apples/carrots/cauliflowers/oranges");
filterer.dir_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
// different from globset/v1 behaviour, but correct:
filterer.file_does_pass("apples/oranges/bananas");
filterer.dir_does_pass("apples/oranges/bananas");
}
#[tokio::test]
async fn glob_double_star_trailing_slash() {
let filterer = filt(&[glob_filter("apples/**/oranges/")]).await;
filterer.dir_doesnt_pass("/a/folder");
filterer.file_doesnt_pass("apples/carrots/oranges");
filterer.file_doesnt_pass("apples/carrots/cauliflowers/oranges");
filterer.file_doesnt_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.dir_does_pass("apples/carrots/oranges");
filterer.dir_does_pass("apples/carrots/cauliflowers/oranges");
filterer.dir_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.unk_doesnt_pass("apples/carrots/oranges");
filterer.unk_doesnt_pass("apples/carrots/cauliflowers/oranges");
filterer.unk_doesnt_pass("apples/carrots/cauliflowers/artichokes/oranges");
// different from globset/v1 behaviour, but correct:
filterer.file_does_pass("apples/oranges/bananas");
filterer.dir_does_pass("apples/oranges/bananas");
}
#[tokio::test]
async fn ignore_exact_filename() {
let filterer = filt(&[notglob_filter("Cargo.toml")]).await;
filterer.file_doesnt_pass("Cargo.toml");
filterer.file_doesnt_pass("/test/foo/bar/Cargo.toml");
filterer.file_does_pass("Cargo.json");
filterer.file_does_pass("Gemfile.toml");
filterer.file_does_pass("FINAL-FINAL.docx");
filterer.dir_does_pass("/a/folder");
filterer.dir_doesnt_pass("/test/Cargo.toml");
}
#[tokio::test]
async fn ignore_exact_filenames_multiple() {
let filterer = filt(&[notglob_filter("Cargo.toml"), notglob_filter("package.json")]).await;
filterer.file_doesnt_pass("Cargo.toml");
filterer.file_doesnt_pass("/test/foo/bar/Cargo.toml");
filterer.file_doesnt_pass("package.json");
filterer.file_doesnt_pass("/test/foo/bar/package.json");
filterer.file_does_pass("Cargo.json");
filterer.file_does_pass("package.toml");
filterer.file_does_pass("Gemfile.toml");
filterer.file_does_pass("FINAL-FINAL.docx");
filterer.dir_does_pass("/a/folder");
filterer.dir_doesnt_pass("/test/Cargo.toml");
filterer.dir_doesnt_pass("/test/package.json");
}
#[tokio::test]
async fn ignore_glob_single_final_ext_star() {
let filterer = filt(&[notglob_filter("Cargo.*")]).await;
filterer.file_doesnt_pass("Cargo.toml");
filterer.file_doesnt_pass("Cargo.json");
filterer.file_does_pass("Gemfile.toml");
filterer.file_does_pass("FINAL-FINAL.docx");
filterer.dir_does_pass("/a/folder");
filterer.dir_doesnt_pass("Cargo.toml");
}
#[tokio::test]
async fn ignore_glob_star_trailing_slash() {
let filterer = filt(&[notglob_filter("Cargo.*/")]).await;
filterer.file_does_pass("Cargo.toml");
filterer.file_does_pass("Cargo.json");
filterer.file_does_pass("Gemfile.toml");
filterer.file_does_pass("FINAL-FINAL.docx");
filterer.dir_does_pass("/a/folder");
filterer.dir_doesnt_pass("Cargo.toml");
filterer.unk_does_pass("Cargo.toml");
}
#[tokio::test]
async fn ignore_glob_star_leading_slash() {
let filterer = filt(&[notglob_filter("/Cargo.*")]).await;
filterer.file_doesnt_pass("Cargo.toml");
filterer.file_doesnt_pass("Cargo.json");
filterer.dir_doesnt_pass("Cargo.toml");
filterer.unk_doesnt_pass("Cargo.toml");
filterer.file_does_pass("foo/Cargo.toml");
filterer.dir_does_pass("foo/Cargo.toml");
}
#[tokio::test]
async fn ignore_glob_leading_double_star() {
let filterer = filt(&[notglob_filter("**/possum")]).await;
filterer.file_doesnt_pass("possum");
filterer.file_doesnt_pass("foo/bar/possum");
filterer.file_doesnt_pass("/foo/bar/possum");
filterer.dir_doesnt_pass("possum");
filterer.dir_doesnt_pass("foo/bar/possum");
filterer.dir_doesnt_pass("/foo/bar/possum");
filterer.file_does_pass("rat");
filterer.file_does_pass("foo/bar/rat");
filterer.file_does_pass("/foo/bar/rat");
}
#[tokio::test]
async fn ignore_glob_trailing_double_star() {
let filterer = filt(&[notglob_filter("possum/**")]).await;
filterer.file_does_pass("possum");
filterer.file_doesnt_pass("possum/foo/bar");
filterer.file_does_pass("/possum/foo/bar");
filterer.file_doesnt_pass("/test/possum/foo/bar");
filterer.dir_does_pass("possum");
filterer.dir_does_pass("foo/bar/possum");
filterer.dir_does_pass("/foo/bar/possum");
filterer.dir_doesnt_pass("possum/foo/bar");
filterer.dir_does_pass("/possum/foo/bar");
filterer.dir_doesnt_pass("/test/possum/foo/bar");
filterer.file_does_pass("rat");
filterer.file_does_pass("foo/bar/rat");
filterer.file_does_pass("/foo/bar/rat");
}
#[tokio::test]
async fn ignore_glob_middle_double_star() {
let filterer = filt(&[notglob_filter("apples/**/oranges")]).await;
filterer.dir_does_pass("/a/folder");
filterer.file_doesnt_pass("apples/carrots/oranges");
filterer.file_doesnt_pass("apples/carrots/cauliflowers/oranges");
filterer.file_doesnt_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.dir_doesnt_pass("apples/carrots/oranges");
filterer.dir_doesnt_pass("apples/carrots/cauliflowers/oranges");
filterer.dir_doesnt_pass("apples/carrots/cauliflowers/artichokes/oranges");
// different from globset/v1 behaviour, but correct:
filterer.file_doesnt_pass("apples/oranges/bananas");
filterer.dir_doesnt_pass("apples/oranges/bananas");
}
#[tokio::test]
async fn ignore_glob_double_star_trailing_slash() {
let filterer = filt(&[notglob_filter("apples/**/oranges/")]).await;
filterer.dir_does_pass("/a/folder");
filterer.file_does_pass("apples/carrots/oranges");
filterer.file_does_pass("apples/carrots/cauliflowers/oranges");
filterer.file_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.dir_doesnt_pass("apples/carrots/oranges");
filterer.dir_doesnt_pass("apples/carrots/cauliflowers/oranges");
filterer.dir_doesnt_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.unk_does_pass("apples/carrots/oranges");
filterer.unk_does_pass("apples/carrots/cauliflowers/oranges");
filterer.unk_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
// different from globset/v1 behaviour, but correct:
filterer.file_doesnt_pass("apples/oranges/bananas");
filterer.dir_doesnt_pass("apples/oranges/bananas");
}
#[tokio::test]
async fn ignores_take_precedence() {
let filterer = filt(&[
glob_filter("*.docx"),
glob_filter("*.toml"),
glob_filter("*.json"),
notglob_filter("*.toml"),
notglob_filter("*.json"),
])
.await;
filterer.file_doesnt_pass("Cargo.toml");
filterer.file_doesnt_pass("/test/foo/bar/Cargo.toml");
filterer.file_doesnt_pass("package.json");
filterer.file_doesnt_pass("/test/foo/bar/package.json");
filterer.dir_doesnt_pass("/test/Cargo.toml");
filterer.dir_doesnt_pass("/test/package.json");
filterer.file_does_pass("FINAL-FINAL.docx");
}
#[tokio::test]
async fn scopes_global() {
let filterer = filt(&[notglob_filter("*.toml")]).await;
filterer.file_doesnt_pass("Cargo.toml");
filterer.dir_doesnt_pass("Cargo.toml");
filterer.file_doesnt_pass("/outside/Cargo.toml");
filterer.dir_doesnt_pass("/outside/Cargo.toml");
filterer.file_does_pass("/outside/package.json");
filterer.dir_does_pass("/outside/package.json");
filterer.file_does_pass("package.json");
filterer.file_does_pass("FINAL-FINAL.docx");
}
#[tokio::test]
async fn scopes_local() {
let filterer = filt(&[notglob_filter("*.toml").in_path()]).await;
filterer.file_doesnt_pass("/test/Cargo.toml");
filterer.dir_doesnt_pass("/test/Cargo.toml");
filterer.file_does_pass("/outside/Cargo.toml");
filterer.dir_does_pass("/outside/Cargo.toml");
filterer.file_does_pass("/outside/package.json");
filterer.dir_does_pass("/outside/package.json");
filterer.file_does_pass("package.json");
filterer.file_does_pass("FINAL-FINAL.docx");
}
#[tokio::test]
async fn scopes_sublocal() {
let filterer = filt(&[notglob_filter("*.toml").in_subpath("src")]).await;
filterer.file_doesnt_pass("/test/src/Cargo.toml");
filterer.dir_doesnt_pass("/test/src/Cargo.toml");
filterer.file_does_pass("/test/Cargo.toml");
filterer.dir_does_pass("/test/Cargo.toml");
filterer.file_does_pass("/test/tests/Cargo.toml");
filterer.dir_does_pass("/test/tests/Cargo.toml");
filterer.file_does_pass("/outside/Cargo.toml");
filterer.dir_does_pass("/outside/Cargo.toml");
filterer.file_does_pass("/outside/package.json");
filterer.dir_does_pass("/outside/package.json");
filterer.file_does_pass("package.json");
filterer.file_does_pass("FINAL-FINAL.docx");
}
// The following tests check that the "buggy"/"confusing" watchexec v1 behaviour
// is no longer present.
fn watchexec_v1_confusing_suite(filterer: Arc<TaggedFilterer>) {
filterer.file_does_pass("apples");
filterer.file_does_pass("apples/carrots/cauliflowers/oranges");
filterer.file_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.file_does_pass("apples/oranges/bananas");
filterer.dir_does_pass("apples");
filterer.dir_does_pass("apples/carrots/cauliflowers/oranges");
filterer.dir_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.file_does_pass("raw-prunes");
filterer.dir_does_pass("raw-prunes");
filterer.file_does_pass("raw-prunes/carrots/cauliflowers/oranges");
filterer.file_does_pass("raw-prunes/carrots/cauliflowers/artichokes/oranges");
filterer.file_does_pass("raw-prunes/oranges/bananas");
filterer.dir_does_pass("raw-prunes/carrots/cauliflowers/oranges");
filterer.dir_does_pass("raw-prunes/carrots/cauliflowers/artichokes/oranges");
filterer.dir_doesnt_pass("prunes/carrots/cauliflowers/oranges");
filterer.dir_doesnt_pass("prunes/carrots/cauliflowers/artichokes/oranges");
filterer.file_doesnt_pass("prunes/carrots/cauliflowers/oranges");
filterer.file_doesnt_pass("prunes/carrots/cauliflowers/artichokes/oranges");
filterer.file_doesnt_pass("prunes/oranges/bananas");
}
#[tokio::test]
async fn ignore_folder_with_bare_match() {
let filterer = filt(&[notglob_filter("prunes").in_path()]).await;
filterer.file_doesnt_pass("prunes");
filterer.dir_doesnt_pass("prunes");
watchexec_v1_confusing_suite(filterer);
}
#[tokio::test]
async fn ignore_folder_with_bare_and_leading_slash() {
let filterer = filt(&[notglob_filter("/prunes").in_path()]).await;
filterer.file_doesnt_pass("prunes");
filterer.dir_doesnt_pass("prunes");
watchexec_v1_confusing_suite(filterer);
}
#[tokio::test]
async fn ignore_folder_with_bare_and_trailing_slash() {
let filterer = filt(&[notglob_filter("prunes/").in_path()]).await;
filterer.file_does_pass("prunes");
filterer.dir_doesnt_pass("prunes");
watchexec_v1_confusing_suite(filterer);
}
#[tokio::test]
async fn ignore_folder_with_only_double_double_glob() {
let filterer = filt(&[notglob_filter("**/prunes/**").in_path()]).await;
filterer.file_does_pass("prunes");
filterer.dir_does_pass("prunes");
watchexec_v1_confusing_suite(filterer);
}
#[tokio::test]
async fn ignore_folder_with_double_and_double_double_globs() {
let filterer = filt(&[
notglob_filter("**/prunes").in_path(),
notglob_filter("**/prunes/**").in_path(),
])
.await;
filterer.file_doesnt_pass("prunes");
filterer.dir_doesnt_pass("prunes");
watchexec_v1_confusing_suite(filterer);
}

View File

@ -2,6 +2,8 @@
## Next (YYYY-MM-DD)
## v3.0.0 (2024-04-20)
- Deps: gix-config 0.36
- Deps: miette 7

View File

@ -1,6 +1,6 @@
[package]
name = "ignore-files"
version = "2.1.0"
version = "3.0.0"
authors = ["Félix Saparelli <felix@passcod.name>"]
license = "Apache-2.0"

View File

@ -2,6 +2,8 @@
## Next (YYYY-MM-DD)
## v4.0.0 (2024-04-20)
- Deps: replace command-group with process-wrap (in supervisor, but has flow-on effects)
- Deps: miette 7
- Deps: nix 0.28

View File

@ -1,6 +1,6 @@
[package]
name = "watchexec"
version = "3.0.1"
version = "4.0.0"
authors = ["Félix Saparelli <felix@passcod.name>", "Matt Green <mattgreenrocks@gmail.com>"]
license = "Apache-2.0"
@ -31,7 +31,7 @@ version = "8.0.0"
features = ["tokio1"]
[dependencies.watchexec-events]
version = "2.0.1"
version = "3.0.0"
path = "../events"
[dependencies.watchexec-signals]
@ -39,11 +39,11 @@ version = "3.0.0"
path = "../signals"
[dependencies.watchexec-supervisor]
version = "1.0.3"
version = "2.0.0"
path = "../supervisor"
[dependencies.ignore-files]
version = "2.1.0"
version = "3.0.0"
path = "../ignore-files"
[dependencies.project-origins]

View File

@ -1,6 +1,6 @@
pre-release-commit-message = "release: lib v{{version}}"
tag-prefix = "lib-"
tag-message = "watchexec-lib {{version}}"
tag-prefix = "watchexec-"
tag-message = "watchexec {{version}}"
[[pre-release-replacements]]
file = "CHANGELOG.md"

View File

@ -55,13 +55,14 @@ async fn imp_worker(
debug!("launching unix signal worker");
macro_rules! listen {
($sig:ident) => {{
trace!(kind=%stringify!($sig), "listening for unix signal");
signal(SignalKind::$sig()).map_err(|err| CriticalError::IoError {
about: concat!("setting ", stringify!($sig), " signal listener"), err
})?
}}
}
($sig:ident, $signum:expr) => {{
trace!(kind=%stringify!($sig), "listening for unix signal");
signal($signum).map_err(|err| CriticalError::IoError {
about: concat!("setting ", stringify!($sig), " signal listener"), err
})?
}};
($sig:ident) => (listen!($sig, SignalKind::$sig()));
}
let mut s_hangup = listen!(hangup);
let mut s_interrupt = listen!(interrupt);
@ -70,6 +71,30 @@ async fn imp_worker(
let mut s_user1 = listen!(user_defined1);
let mut s_user2 = listen!(user_defined2);
// TODO: option to customise set of signals being listened to, so we can safely listen to sigstop only when requested
let mut s_tstp = if let Some(signum) = Signal::TerminalSuspend.to_nix().map(|s| s as i32) {
listen!(terminal_suspend, SignalKind::from_raw(signum))
} else {
signal(SignalKind::from_raw(9)).map_err(|err| CriticalError::IoError {
about: concat!("setting unreceivable signal listener"), err
})?
};
let mut s_stop = if let Some(signum) = Signal::Suspend.to_nix().map(|s| s as i32) {
listen!(suspend, SignalKind::from_raw(signum))
} else {
signal(SignalKind::from_raw(9)).map_err(|err| CriticalError::IoError {
about: concat!("setting unreceivable signal listener"), err
})?
};
let mut s_cont = if let Some(signum) = Signal::Continue.to_nix().map(|s| s as i32) {
listen!(r#continue, SignalKind::from_raw(signum))
} else {
signal(SignalKind::from_raw(9)).map_err(|err| CriticalError::IoError {
about: concat!("setting unreceivable signal listener"), err
})?
};
loop {
let sig = select!(
_ = s_hangup.recv() => Signal::Hangup,
@ -78,6 +103,9 @@ async fn imp_worker(
_ = s_terminate.recv() => Signal::Terminate,
_ = s_user1.recv() => Signal::User1,
_ = s_user2.recv() => Signal::User2,
_ = s_tstp.recv() => Signal::TerminalSuspend,
_ = s_stop.recv() => Signal::Suspend,
_ = s_cont.recv() => Signal::Continue,
);
debug!(?sig, "received unix signal");

View File

@ -10,6 +10,7 @@
## v2.1.0 (2023-12-09)
- Derive `Hash` for `Signal`.
- Add `Continue`, `Suspend`, and `TerminalSuspend` as first-class signals.
## v2.0.0 (2023-11-29)

View File

@ -1,5 +1,5 @@
pre-release-commit-message = "release: signals v{{version}}"
tag-prefix = "signals-"
tag-prefix = "watchexec-signals-"
tag-message = "watchexec-signals {{version}}"
[[pre-release-replacements]]

View File

@ -85,6 +85,27 @@ pub enum Signal {
/// This signal is generally used to reload configuration.
User2,
/// Sent to a process to unsuspend it.
///
/// On Unix, this is `SIGCONT`. On Windows, it is ignored.
///
/// See also [`Suspend`](Signal::Suspend) and [`TerminalSuspend`][Signal::TerminalSuspend].
Continue,
/// Indicate that the process should suspend itself.
///
/// On Unix, this is `SIGSTOP`. On Windows, it is ignored.
///
/// See also [`TerminalSuspend`][Signal::TerminalSuspend].
Suspend,
/// Indicate that the process should suspend itself (issued from the terminal).
///
/// On Unix, this is `SIGTSTP`. On Windows, it is ignored.
///
/// See also [`Suspend`][Signal::Suspend].
TerminalSuspend,
/// Indicate using a custom signal.
///
/// Internally, this is converted to a [`nix::Signal`](https://docs.rs/nix/*/nix/sys/signal/enum.Signal.html)
@ -137,6 +158,9 @@ impl Signal {
Self::Terminate => Some(NixSignal::SIGTERM),
Self::User1 => Some(NixSignal::SIGUSR1),
Self::User2 => Some(NixSignal::SIGUSR2),
Self::Suspend => Some(NixSignal::SIGSTOP),
Self::TerminalSuspend => Some(NixSignal::SIGTSTP),
Self::Continue => Some(NixSignal::SIGCONT),
Self::Custom(sig) => NixSignal::try_from(sig).ok(),
}
}
@ -154,6 +178,9 @@ impl Signal {
NixSignal::SIGTERM => Self::Terminate,
NixSignal::SIGUSR1 => Self::User1,
NixSignal::SIGUSR2 => Self::User2,
NixSignal::SIGSTOP => Self::Suspend,
NixSignal::SIGTSTP => Self::TerminalSuspend,
NixSignal::SIGCONT => Self::Continue,
sig => Self::Custom(sig as _),
}
}
@ -172,6 +199,9 @@ impl From<i32> for Signal {
10 => Self::User1,
12 => Self::User2,
15 => Self::Terminate,
18 => Self::Continue,
19 => Self::Suspend,
20 => Self::TerminalSuspend,
_ => Self::Custom(raw),
}
}
@ -228,6 +258,9 @@ impl Signal {
"TERM" | "SIGTERM" | "15" => Ok(Self::Terminate),
"USR1" | "SIGUSR1" | "10" => Ok(Self::User1),
"USR2" | "SIGUSR2" | "12" => Ok(Self::User2),
"CONT" | "SIGCONT" | "18" => Ok(Self::Continue),
"STOP" | "SIGSTOP" | "19" => Ok(Self::Suspend),
"TSTP" | "SIGTSTP" | "20" => Ok(Self::TerminalSuspend),
number => match i32::from_str(number) {
Ok(int) => Ok(Self::Custom(int)),
Err(_) => Err(SignalParseError::new(s, "unsupported signal")),
@ -323,6 +356,9 @@ impl fmt::Display for Signal {
(Self::Terminate, true) => "CTRL-BREAK",
(Self::User1, _) => "SIGUSR1",
(Self::User2, _) => "SIGUSR2",
(Self::Continue, _) => "SIGCONT",
(Self::Suspend, _) => "SIGSTOP",
(Self::TerminalSuspend, _) => "SIGTSTP",
(Self::Custom(n), _) => {
return write!(f, "{n}");
}
@ -359,6 +395,12 @@ mod serde_support {
User1,
#[serde(rename = "SIGUSR2")]
User2,
#[serde(rename = "SIGCONT")]
Continue,
#[serde(rename = "SIGSTOP")]
Suspend,
#[serde(rename = "SIGTSTP")]
TerminalSuspend,
}
impl From<Signal> for SerdeSignal {
@ -371,6 +413,9 @@ mod serde_support {
Signal::User1 => Self::Named(NamedSignal::User1),
Signal::User2 => Self::Named(NamedSignal::User2),
Signal::ForceStop => Self::Named(NamedSignal::ForceStop),
Signal::Continue => Self::Named(NamedSignal::Continue),
Signal::Suspend => Self::Named(NamedSignal::Suspend),
Signal::TerminalSuspend => Self::Named(NamedSignal::TerminalSuspend),
Signal::Custom(number) => Self::Number(number),
}
}
@ -386,6 +431,9 @@ mod serde_support {
SerdeSignal::Named(NamedSignal::Terminate) => Self::Terminate,
SerdeSignal::Named(NamedSignal::User1) => Self::User1,
SerdeSignal::Named(NamedSignal::User2) => Self::User2,
SerdeSignal::Named(NamedSignal::Continue) => Self::Continue,
SerdeSignal::Named(NamedSignal::Suspend) => Self::Suspend,
SerdeSignal::Named(NamedSignal::TerminalSuspend) => Self::TerminalSuspend,
SerdeSignal::Number(number) => Self::Custom(number),
}
}

View File

@ -2,6 +2,8 @@
## Next (YYYY-MM-DD)
## v2.0.0 (2024-04-20)
- Deps: replace command-group with process-wrap
- Deps: nix 0.28

View File

@ -1,6 +1,6 @@
[package]
name = "watchexec-supervisor"
version = "1.0.3"
version = "2.0.0"
authors = ["Félix Saparelli <felix@passcod.name>"]
license = "Apache-2.0 OR MIT"
@ -28,7 +28,7 @@ default-features = false
features = ["macros", "process", "rt", "sync", "time"]
[dependencies.watchexec-events]
version = "2.0.1"
version = "3.0.0"
default-features = false
path = "../events"

View File

@ -1,5 +1,5 @@
pre-release-commit-message = "release: supervisor v{{version}}"
tag-prefix = "supervisor-"
tag-prefix = "watchexec-supervisor-"
tag-message = "watchexec-supervisor {{version}}"
[[pre-release-replacements]]

View File

@ -4,7 +4,7 @@
.SH NAME
watchexec \- Execute commands when watched files change
.SH SYNOPSIS
\fBwatchexec\fR [\fB\-w\fR|\fB\-\-watch\fR] [\fB\-c\fR|\fB\-\-clear\fR] [\fB\-o\fR|\fB\-\-on\-busy\-update\fR] [\fB\-r\fR|\fB\-\-restart\fR] [\fB\-s\fR|\fB\-\-signal\fR] [\fB\-\-stop\-signal\fR] [\fB\-\-stop\-timeout\fR] [\fB\-\-map\-signal\fR] [\fB\-d\fR|\fB\-\-debounce\fR] [\fB\-\-stdin\-quit\fR] [\fB\-\-no\-vcs\-ignore\fR] [\fB\-\-no\-project\-ignore\fR] [\fB\-\-no\-global\-ignore\fR] [\fB\-\-no\-default\-ignore\fR] [\fB\-\-no\-discover\-ignore\fR] [\fB\-\-ignore\-nothing\fR] [\fB\-p\fR|\fB\-\-postpone\fR] [\fB\-\-delay\-run\fR] [\fB\-\-poll\fR] [\fB\-\-shell\fR] [\fB\-n \fR] [\fB\-\-emit\-events\-to\fR] [\fB\-\-only\-emit\-events\fR] [\fB\-E\fR|\fB\-\-env\fR] [\fB\-\-no\-process\-group\fR] [\fB\-N\fR|\fB\-\-notify\fR] [\fB\-\-color\fR] [\fB\-\-timings\fR] [\fB\-q\fR|\fB\-\-quiet\fR] [\fB\-\-bell\fR] [\fB\-\-project\-origin\fR] [\fB\-\-workdir\fR] [\fB\-e\fR|\fB\-\-exts\fR] [\fB\-f\fR|\fB\-\-filter\fR] [\fB\-\-filter\-file\fR] [\fB\-j\fR|\fB\-\-filter\-prog\fR] [\fB\-i\fR|\fB\-\-ignore\fR] [\fB\-\-ignore\-file\fR] [\fB\-\-fs\-events\fR] [\fB\-\-no\-meta\fR] [\fB\-\-print\-events\fR] [\fB\-v\fR|\fB\-\-verbose\fR]... [\fB\-\-log\-file\fR] [\fB\-\-manual\fR] [\fB\-\-completions\fR] [\fB\-h\fR|\fB\-\-help\fR] [\fB\-V\fR|\fB\-\-version\fR] [\fICOMMAND\fR]
\fBwatchexec\fR [\fB\-w\fR|\fB\-\-watch\fR] [\fB\-c\fR|\fB\-\-clear\fR] [\fB\-o\fR|\fB\-\-on\-busy\-update\fR] [\fB\-r\fR|\fB\-\-restart\fR] [\fB\-s\fR|\fB\-\-signal\fR] [\fB\-\-stop\-signal\fR] [\fB\-\-stop\-timeout\fR] [\fB\-\-map\-signal\fR] [\fB\-d\fR|\fB\-\-debounce\fR] [\fB\-\-stdin\-quit\fR] [\fB\-\-no\-vcs\-ignore\fR] [\fB\-\-no\-project\-ignore\fR] [\fB\-\-no\-global\-ignore\fR] [\fB\-\-no\-default\-ignore\fR] [\fB\-\-no\-discover\-ignore\fR] [\fB\-\-ignore\-nothing\fR] [\fB\-p\fR|\fB\-\-postpone\fR] [\fB\-\-delay\-run\fR] [\fB\-\-poll\fR] [\fB\-\-shell\fR] [\fB\-n \fR] [\fB\-\-emit\-events\-to\fR] [\fB\-\-only\-emit\-events\fR] [\fB\-E\fR|\fB\-\-env\fR] [\fB\-\-no\-process\-group\fR] [\fB\-\-wrap\-process\fR] [\fB\-N\fR|\fB\-\-notify\fR] [\fB\-\-color\fR] [\fB\-\-timings\fR] [\fB\-q\fR|\fB\-\-quiet\fR] [\fB\-\-bell\fR] [\fB\-\-project\-origin\fR] [\fB\-\-workdir\fR] [\fB\-e\fR|\fB\-\-exts\fR] [\fB\-f\fR|\fB\-\-filter\fR] [\fB\-\-filter\-file\fR] [\fB\-j\fR|\fB\-\-filter\-prog\fR] [\fB\-i\fR|\fB\-\-ignore\fR] [\fB\-\-ignore\-file\fR] [\fB\-\-fs\-events\fR] [\fB\-\-no\-meta\fR] [\fB\-\-print\-events\fR] [\fB\-v\fR|\fB\-\-verbose\fR]... [\fB\-\-log\-file\fR] [\fB\-\-manual\fR] [\fB\-\-completions\fR] [\fB\-h\fR|\fB\-\-help\fR] [\fB\-V\fR|\fB\-\-version\fR] [\fICOMMAND\fR]
.SH DESCRIPTION
Execute commands when watched files change.
.PP
@ -370,6 +370,17 @@ Use key=value syntax. Multiple variables can be set by repeating the option.
Don\*(Aqt use a process group
By default, Watchexec will run the command in a process group, so that signals and terminations are sent to all processes in the group. Sometimes that\*(Aqs not what you want, and you can disable the behaviour with this option.
Deprecated, use \*(Aq\-\-wrap\-process=none\*(Aq instead.
.TP
\fB\-\-wrap\-process\fR=\fIMODE\fR [default: group]
Configure how the process is wrapped
By default, Watchexec will run the command in a process group in Unix, and in a Job Object in Windows.
Some Unix programs prefer running in a session, while others do not work in a process group.
Use \*(Aqgroup\*(Aq to use a process group, \*(Aqsession\*(Aq to use a process session, and \*(Aqnone\*(Aq to run the command directly. On Windows, either of \*(Aqgroup\*(Aq or \*(Aqsession\*(Aq will use a Job Object.
.TP
\fB\-N\fR, \fB\-\-notify\fR
Alert when commands start and end

View File

@ -14,15 +14,16 @@ watchexec - Execute commands when watched files change
\[**-p**\|**\--postpone**\] \[**\--delay-run**\] \[**\--poll**\]
\[**\--shell**\] \[**-n **\] \[**\--emit-events-to**\]
\[**\--only-emit-events**\] \[**-E**\|**\--env**\]
\[**\--no-process-group**\] \[**-N**\|**\--notify**\] \[**\--color**\]
\[**\--timings**\] \[**-q**\|**\--quiet**\] \[**\--bell**\]
\[**\--project-origin**\] \[**\--workdir**\] \[**-e**\|**\--exts**\]
\[**-f**\|**\--filter**\] \[**\--filter-file**\]
\[**-j**\|**\--filter-prog**\] \[**-i**\|**\--ignore**\]
\[**\--ignore-file**\] \[**\--fs-events**\] \[**\--no-meta**\]
\[**\--print-events**\] \[**-v**\|**\--verbose**\]\...
\[**\--log-file**\] \[**\--manual**\] \[**\--completions**\]
\[**-h**\|**\--help**\] \[**-V**\|**\--version**\] \[*COMMAND*\]
\[**\--no-process-group**\] \[**\--wrap-process**\]
\[**-N**\|**\--notify**\] \[**\--color**\] \[**\--timings**\]
\[**-q**\|**\--quiet**\] \[**\--bell**\] \[**\--project-origin**\]
\[**\--workdir**\] \[**-e**\|**\--exts**\] \[**-f**\|**\--filter**\]
\[**\--filter-file**\] \[**-j**\|**\--filter-prog**\]
\[**-i**\|**\--ignore**\] \[**\--ignore-file**\] \[**\--fs-events**\]
\[**\--no-meta**\] \[**\--print-events**\]
\[**-v**\|**\--verbose**\]\... \[**\--log-file**\] \[**\--manual**\]
\[**\--completions**\] \[**-h**\|**\--help**\]
\[**-V**\|**\--version**\] \[*COMMAND*\]
# DESCRIPTION
@ -518,6 +519,22 @@ signals and terminations are sent to all processes in the group.
Sometimes thats not what you want, and you can disable the behaviour
with this option.
Deprecated, use \--wrap-process=none instead.
**\--wrap-process**=*MODE* \[default: group\]
: Configure how the process is wrapped
By default, Watchexec will run the command in a process group in Unix,
and in a Job Object in Windows.
Some Unix programs prefer running in a session, while others do not work
in a process group.
Use group to use a process group, session to use a process session, and
none to run the command directly. On Windows, either of group or session
will use a Job Object.
**-N**, **\--notify**
: Alert when commands start and end