Remove tagged filterer

This commit is contained in:
Félix Saparelli 2024-04-21 00:32:01 +12:00
parent 1c47ffbe1a
commit 22b58a66ab
No known key found for this signature in database
23 changed files with 0 additions and 3052 deletions

32
Cargo.lock generated
View File

@ -3800,15 +3800,6 @@ dependencies = [
"winapi",
]
[[package]]
name = "unicase"
version = "2.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89"
dependencies = [
"version_check",
]
[[package]]
name = "unicode-bidi"
version = "0.3.15"
@ -4135,29 +4126,6 @@ dependencies = [
"watchexec-signals 3.0.0",
]
[[package]]
name = "watchexec-filterer-tagged"
version = "2.0.0"
dependencies = [
"futures",
"globset",
"ignore",
"ignore-files",
"miette",
"nom",
"project-origins",
"regex",
"thiserror",
"tokio",
"tracing",
"tracing-subscriber",
"unicase",
"watchexec",
"watchexec-events 3.0.0",
"watchexec-filterer-ignore",
"watchexec-signals 3.0.0",
]
[[package]]
name = "watchexec-signals"
version = "2.1.0"

View File

@ -8,7 +8,6 @@ members = [
"crates/supervisor",
"crates/filterer/globset",
"crates/filterer/ignore",
"crates/filterer/tagged",
"crates/bosion",
"crates/ignore-files",
"crates/project-origins",

View File

@ -1,32 +0,0 @@
# Changelog
## Next (YYYY-MM-DD)
- Deps: miette 7
## v2.0.0 (2024-01-01)
- Depend on `watchexec-events` instead of the `watchexec` re-export.
## v1.0.0 (2023-12-10)
- Officially deprecate (crate is now unmaintained).
- Depend on `watchexec-events` instead of the `watchexec` re-export.
- Remove error diagnostic codes.
- Deps: upgrade Tokio requirement to 1.32.
## v0.3.0 (2023-03-18)
- Ditch MSRV policy. The `rust-version` indication will remain, for the minimum estimated Rust version for the code features used in the crate's own code, but dependencies may have already moved on. From now on, only latest stable is assumed and tested for. ([#510](https://github.com/watchexec/watchexec/pull/510))
## v0.2.0 (2023-01-09)
- MSRV: bump to 1.61.0
## v0.1.1 (2022-09-07)
- Deps: update miette to 5.3.0
## v0.1.0 (2022-06-23)
- Initial release as a separate crate.

View File

@ -1,71 +0,0 @@
[package]
name = "watchexec-filterer-tagged"
version = "2.0.0"
authors = ["Félix Saparelli <felix@passcod.name>"]
license = "Apache-2.0"
description = "Watchexec filterer component using tagged filters"
keywords = ["watchexec", "filterer", "tags"]
documentation = "https://docs.rs/watchexec-filterer-tagged"
homepage = "https://watchexec.github.io"
repository = "https://github.com/watchexec/watchexec"
readme = "README.md"
rust-version = "1.61.0"
edition = "2021"
[badges.maintenance]
status = "deprecated"
[dependencies]
futures = "0.3.25"
globset = "0.4.8"
ignore = "0.4.18"
miette = "7.2.0"
nom = "7.0.0"
regex = "1.5.4"
thiserror = "1.0.26"
tracing = "0.1.26"
unicase = "2.6.0"
[dependencies.ignore-files]
version = "2.1.0"
path = "../../ignore-files"
[dependencies.tokio]
version = "1.32.0"
features = [
"fs",
]
[dependencies.watchexec]
version = "3.0.1"
path = "../../lib"
[dependencies.watchexec-events]
version = "3.0.0"
path = "../../events"
[dependencies.watchexec-filterer-ignore]
version = "3.0.1"
path = "../ignore"
[dependencies.watchexec-signals]
version = "3.0.0"
path = "../../signals"
[dev-dependencies]
tracing-subscriber = "0.3.6"
[dev-dependencies.project-origins]
version = "1.3.0"
path = "../../project-origins"
[dev-dependencies.tokio]
version = "1.32.0"
features = [
"fs",
"io-std",
"sync",
]

View File

@ -1,19 +0,0 @@
[![Crates.io page](https://badgen.net/crates/v/watchexec-filterer-tagged)](https://crates.io/crates/watchexec-filterer-tagged)
[![API Docs](https://docs.rs/watchexec-filterer-tagged/badge.svg)][docs]
[![Crate license: Apache 2.0](https://badgen.net/badge/license/Apache%202.0)][license]
[![CI status](https://github.com/watchexec/watchexec/actions/workflows/check.yml/badge.svg)](https://github.com/watchexec/watchexec/actions/workflows/check.yml)
# Watchexec filterer: tagged
_Experimental filterer using tagged filters._
- **[API documentation][docs]**.
- Licensed under [Apache 2.0][license].
- Status: soft-deprecated.
The tagged filterer is not in use in the Watchexec CLI, but this crate will continue being updated
until and unless it becomes too much of a pain to do so, for third party users. It is expected that
some of the code will eventually be reused for a more generic filter crate without the tagged syntax.
[docs]: https://docs.rs/watchexec-filterer-tagged
[license]: ../../../LICENSE

View File

@ -1,10 +0,0 @@
pre-release-commit-message = "release: filterer-tagged v{{version}}"
tag-prefix = "filterer-tagged-"
tag-message = "watchexec-filterer-tagged {{version}}"
[[pre-release-replacements]]
file = "CHANGELOG.md"
search = "^## Next.*$"
replace = "## Next (YYYY-MM-DD)\n\n## v{{version}} ({{date}})"
prerelease = true
max = 1

View File

@ -1,73 +0,0 @@
use std::collections::HashMap;
use ignore::gitignore::Gitignore;
use miette::Diagnostic;
use thiserror::Error;
use tokio::sync::watch::error::SendError;
use watchexec::error::RuntimeError;
use watchexec_filterer_ignore::IgnoreFilterer;
use crate::{Filter, Matcher};
/// Errors emitted by the `TaggedFilterer`.
#[derive(Debug, Diagnostic, Error)]
#[non_exhaustive]
pub enum TaggedFiltererError {
/// Generic I/O error, with some context.
#[error("io({about}): {err}")]
IoError {
/// What it was about.
about: &'static str,
/// The I/O error which occurred.
#[source]
err: std::io::Error,
},
/// Error received when a tagged filter cannot be parsed.
#[error("cannot parse filter `{src}`: {err:?}")]
Parse {
/// The source of the filter.
#[source_code]
src: String,
/// What went wrong.
err: nom::error::ErrorKind,
},
/// Error received when a filter cannot be added or removed from a tagged filter list.
#[error("cannot {action} filter: {err:?}")]
FilterChange {
/// The action that was attempted.
action: &'static str,
/// The underlying error.
#[source]
err: SendError<HashMap<Matcher, Vec<Filter>>>,
},
/// Error received when a glob cannot be parsed.
#[error("cannot parse glob: {0}")]
GlobParse(#[source] ignore::Error),
/// Error received when a compiled globset cannot be changed.
#[error("cannot change compiled globset: {0:?}")]
GlobsetChange(#[source] SendError<Option<Gitignore>>),
/// Error received about the internal ignore filterer.
#[error("ignore filterer: {0}")]
Ignore(#[source] ignore_files::Error),
/// Error received when a new ignore filterer cannot be swapped in.
#[error("cannot swap in new ignore filterer: {0:?}")]
IgnoreSwap(#[source] SendError<IgnoreFilterer>),
}
impl From<TaggedFiltererError> for RuntimeError {
fn from(err: TaggedFiltererError) -> Self {
Self::Filterer {
kind: "tagged",
err: Box::new(err) as _,
}
}
}

View File

@ -1,93 +0,0 @@
use std::{
env,
io::Error,
path::{Path, PathBuf},
str::FromStr,
};
use ignore_files::{discover_file, IgnoreFile};
use tokio::fs::read_to_string;
use crate::{Filter, TaggedFiltererError};
/// A filter file.
///
/// This is merely a type wrapper around an [`IgnoreFile`], as the only difference is how the file
/// is parsed.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct FilterFile(pub IgnoreFile);
/// Finds all filter files that apply to the current runtime.
///
/// This considers:
/// - `$XDG_CONFIG_HOME/watchexec/filter`, as well as other locations (APPDATA on Windows…)
/// - Files from the `WATCHEXEC_FILTER_FILES` environment variable (comma-separated)
///
/// All errors (permissions, etc) are collected and returned alongside the ignore files: you may
/// want to show them to the user while still using whatever ignores were successfully found. Errors
/// from files not being found are silently ignored (the files are just not returned).
pub async fn discover_files_from_environment() -> (Vec<FilterFile>, Vec<Error>) {
let mut files = Vec::new();
let mut errors = Vec::new();
for path in env::var("WATCHEXEC_FILTER_FILES")
.unwrap_or_default()
.split(',')
{
discover_file(&mut files, &mut errors, None, None, PathBuf::from(path)).await;
}
let mut wgis = Vec::with_capacity(5);
if let Ok(home) = env::var("XDG_CONFIG_HOME") {
wgis.push(Path::new(&home).join("watchexec/filter"));
}
if let Ok(home) = env::var("APPDATA") {
wgis.push(Path::new(&home).join("watchexec/filter"));
}
if let Ok(home) = env::var("USERPROFILE") {
wgis.push(Path::new(&home).join(".watchexec/filter"));
}
if let Ok(home) = env::var("HOME") {
wgis.push(Path::new(&home).join(".watchexec/filter"));
}
for path in wgis {
if discover_file(&mut files, &mut errors, None, None, path).await {
break;
}
}
(files.into_iter().map(FilterFile).collect(), errors)
}
impl FilterFile {
/// Read and parse into [`Filter`]s.
///
/// Empty lines and lines starting with `#` are ignored. The `applies_in` field of the
/// [`IgnoreFile`] is used for the `in_path` field of each [`Filter`].
///
/// This method reads the entire file into memory.
pub async fn load(&self) -> Result<Vec<Filter>, TaggedFiltererError> {
let content =
read_to_string(&self.0.path)
.await
.map_err(|err| TaggedFiltererError::IoError {
about: "filter file load",
err,
})?;
let lines = content.lines();
let mut filters = Vec::with_capacity(lines.size_hint().0);
for line in lines {
if line.is_empty() || line.starts_with('#') {
continue;
}
let mut f = Filter::from_str(line)?;
f.in_path = self.0.applies_in.clone();
filters.push(f);
}
Ok(filters)
}
}

View File

@ -1,276 +0,0 @@
use std::collections::HashSet;
use std::path::PathBuf;
use globset::Glob;
use regex::Regex;
use tokio::fs::canonicalize;
use tracing::{trace, warn};
use unicase::UniCase;
use watchexec_events::Tag;
use crate::TaggedFiltererError;
/// A tagged filter.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Filter {
/// Path the filter applies from.
pub in_path: Option<PathBuf>,
/// Which tag the filter applies to.
pub on: Matcher,
/// The operation to perform on the tag's value.
pub op: Op,
/// The pattern to match against the tag's value.
pub pat: Pattern,
/// If true, a positive match with this filter will override negative matches from previous
/// filters on the same tag, and negative matches will be ignored.
pub negate: bool,
}
impl Filter {
/// Matches the filter against a subject.
///
/// This is really an internal method to the tagged filterer machinery, exposed so you can build
/// your own filterer using the same types or the textual syntax. As such its behaviour is not
/// guaranteed to be stable (its signature is, though).
pub fn matches(&self, subject: impl AsRef<str>) -> Result<bool, TaggedFiltererError> {
let subject = subject.as_ref();
trace!(op=?self.op, pat=?self.pat, ?subject, "performing filter match");
Ok(match (self.op, &self.pat) {
(Op::Equal, Pattern::Exact(pat)) => UniCase::new(subject) == UniCase::new(pat),
(Op::NotEqual, Pattern::Exact(pat)) => UniCase::new(subject) != UniCase::new(pat),
(Op::Regex, Pattern::Regex(pat)) => pat.is_match(subject),
(Op::NotRegex, Pattern::Regex(pat)) => !pat.is_match(subject),
(Op::InSet, Pattern::Set(set)) => set.contains(subject),
(Op::InSet, Pattern::Exact(pat)) => subject == pat,
(Op::NotInSet, Pattern::Set(set)) => !set.contains(subject),
(Op::NotInSet, Pattern::Exact(pat)) => subject != pat,
(op @ (Op::Glob | Op::NotGlob), Pattern::Glob(glob)) => {
// FIXME: someway that isn't this horrible
match Glob::new(glob) {
Ok(glob) => {
let matches = glob.compile_matcher().is_match(subject);
match op {
Op::Glob => matches,
Op::NotGlob => !matches,
_ => unreachable!(),
}
}
Err(err) => {
warn!(
"failed to compile glob for non-path match, skipping (pass): {}",
err
);
true
}
}
}
(op, pat) => {
warn!(
"trying to match pattern {:?} with op {:?}, that cannot work",
pat, op
);
false
}
})
}
/// Create a filter from a gitignore-style glob pattern.
///
/// The optional path is for the `in_path` field of the filter. When parsing gitignore files, it
/// should be set to the path of the _directory_ the ignore file is in.
///
/// The resulting filter matches on [`Path`][Matcher::Path], with the [`NotGlob`][Op::NotGlob]
/// op, and a [`Glob`][Pattern::Glob] pattern. If it starts with a `!`, it is negated.
#[must_use]
pub fn from_glob_ignore(in_path: Option<PathBuf>, glob: &str) -> Self {
let (glob, negate) = glob.strip_prefix('!').map_or((glob, false), |g| (g, true));
Self {
in_path,
on: Matcher::Path,
op: Op::NotGlob,
pat: Pattern::Glob(glob.to_string()),
negate,
}
}
/// Returns the filter with its `in_path` canonicalised.
pub async fn canonicalised(mut self) -> Result<Self, TaggedFiltererError> {
if let Some(ctx) = self.in_path {
self.in_path =
Some(
canonicalize(&ctx)
.await
.map_err(|err| TaggedFiltererError::IoError {
about: "canonicalise Filter in_path",
err,
})?,
);
trace!(canon=?ctx, "canonicalised in_path");
}
Ok(self)
}
}
/// What a filter matches on.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
#[non_exhaustive]
pub enum Matcher {
/// The presence of a tag on an event.
Tag,
/// A path in a filesystem event. Paths are always canonicalised.
///
/// Note that there may be multiple paths in an event (e.g. both source and destination for renames), and filters
/// will be matched on all of them.
Path,
/// The file type of an object in a filesystem event.
///
/// This is not guaranteed to be present for every filesystem event.
///
/// It can be any of these values: `file`, `dir`, `symlink`, `other`. That last one means
/// "not any of the first three."
FileType,
/// The [`EventKind`][notify::event::EventKind] of a filesystem event.
///
/// This is the Debug representation of the event kind. Examples:
/// - `Access(Close(Write))`
/// - `Modify(Data(Any))`
/// - `Modify(Metadata(Permissions))`
/// - `Remove(Folder)`
///
/// You should probably use globs or regexes to match these, ex:
/// - `Create(*)`
/// - `Modify\(Name\(.+`
FileEventKind,
/// The [event source][crate::event::Source] the event came from.
///
/// These are the lowercase names of the variants.
Source,
/// The ID of the process which caused the event.
///
/// Note that it's rare for events to carry this information.
Process,
/// A signal sent to the main process.
///
/// This can be matched both on the signal number as an integer, and on the signal name as a
/// string. On Windows, only `BREAK` is supported; `CTRL_C` parses but won't work. Matching is
/// on both uppercase and lowercase forms.
///
/// Interrupt signals (`TERM` and `INT` on Unix, `CTRL_C` on Windows) are parsed, but these are
/// marked Urgent internally to Watchexec, and thus bypass filtering entirely.
Signal,
/// The exit status of a subprocess.
///
/// This is only present for events issued when the subprocess exits. The value is matched on
/// both the exit code as an integer, and either `success` or `fail`, whichever succeeds.
ProcessCompletion,
/// The [`Priority`] of the event.
///
/// This is never `urgent`, as urgent events bypass filtering.
Priority,
}
impl Matcher {
pub(crate) fn from_tag(tag: &Tag) -> &'static [Self] {
match tag {
Tag::Path {
file_type: None, ..
} => &[Self::Path],
Tag::Path { .. } => &[Self::Path, Self::FileType],
Tag::FileEventKind(_) => &[Self::FileEventKind],
Tag::Source(_) => &[Self::Source],
Tag::Process(_) => &[Self::Process],
Tag::Signal(_) => &[Self::Signal],
Tag::ProcessCompletion(_) => &[Self::ProcessCompletion],
_ => {
warn!("unhandled tag: {:?}", tag);
&[]
}
}
}
}
/// How a filter value is interpreted.
///
/// - `==` and `!=` match on the exact value as string equality (case-insensitively),
/// - `~=` and `~!` match using a [regex],
/// - `*=` and `*!` match using a glob, either via [globset] or [ignore]
/// - `:=` and `:!` match via exact string comparisons, but on any of the list of values separated
/// by `,`
/// - `=`, the "auto" operator, behaves as `*=` if the matcher is `Path`, and as `==` otherwise.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[non_exhaustive]
pub enum Op {
/// The auto operator, `=`, resolves to `*=` or `==` depending on the matcher.
Auto,
/// The `==` operator, matches on exact string equality.
Equal,
/// The `!=` operator, matches on exact string inequality.
NotEqual,
/// The `~=` operator, matches on a regex.
Regex,
/// The `~!` operator, matches on a regex (matches are fails).
NotRegex,
/// The `*=` operator, matches on a glob.
Glob,
/// The `*!` operator, matches on a glob (matches are fails).
NotGlob,
/// The `:=` operator, matches (with string compares) on a set of values (belongs are passes).
InSet,
/// The `:!` operator, matches on a set of values (belongs are fails).
NotInSet,
}
/// A filter value (pattern to match with).
#[derive(Debug, Clone)]
#[non_exhaustive]
pub enum Pattern {
/// An exact string.
Exact(String),
/// A regex.
Regex(Regex),
/// A glob.
///
/// This is stored as a string as globs are compiled together rather than on a per-filter basis.
Glob(String),
/// A set of exact strings.
Set(HashSet<String>),
}
impl PartialEq<Self> for Pattern {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(Self::Exact(l), Self::Exact(r)) | (Self::Glob(l), Self::Glob(r)) => l == r,
(Self::Regex(l), Self::Regex(r)) => l.as_str() == r.as_str(),
(Self::Set(l), Self::Set(r)) => l == r,
_ => false,
}
}
}
impl Eq for Pattern {}

View File

@ -1,537 +0,0 @@
use std::path::PathBuf;
use std::sync::Arc;
use std::{collections::HashMap, convert::Into};
use futures::{stream::FuturesOrdered, TryStreamExt};
use ignore::{
gitignore::{Gitignore, GitignoreBuilder},
Match,
};
use ignore_files::{IgnoreFile, IgnoreFilter};
use tokio::fs::canonicalize;
use tracing::{debug, trace, trace_span};
use watchexec::{error::RuntimeError, filter::Filterer};
use watchexec_events::{Event, FileType, Priority, ProcessEnd, Tag};
use watchexec_filterer_ignore::IgnoreFilterer;
use watchexec_signals::Signal;
use crate::{swaplock::SwapLock, Filter, Matcher, Op, Pattern, TaggedFiltererError};
/// A complex filterer that can match any event tag and supports different matching operators.
///
/// See the crate-level documentation for more information.
#[derive(Debug)]
pub struct TaggedFilterer {
/// The directory the project is in, its origin.
///
/// This is used to resolve absolute paths without an `in_path` context.
origin: PathBuf,
/// Where the program is running from.
///
/// This is used to resolve relative paths without an `in_path` context.
workdir: PathBuf,
/// All filters that are applied, in order, by matcher.
filters: SwapLock<HashMap<Matcher, Vec<Filter>>>,
/// Sub-filterer for ignore files.
ignore_filterer: SwapLock<IgnoreFilterer>,
/// Compiled matcher for Glob filters.
glob_compiled: SwapLock<Option<Gitignore>>,
/// Compiled matcher for NotGlob filters.
not_glob_compiled: SwapLock<Option<Gitignore>>,
}
impl Filterer for TaggedFilterer {
fn check_event(&self, event: &Event, priority: Priority) -> Result<bool, RuntimeError> {
self.check(event, priority).map_err(Into::into)
}
}
impl TaggedFilterer {
fn check(&self, event: &Event, priority: Priority) -> Result<bool, TaggedFiltererError> {
let _span = trace_span!("filterer_check").entered();
trace!(?event, ?priority, "checking event");
{
trace!("checking priority");
if let Some(filters) = self.filters.borrow().get(&Matcher::Priority).cloned() {
trace!(filters=%filters.len(), "found some filters for priority");
//
let mut pri_match = true;
for filter in &filters {
let _span = trace_span!("checking filter against priority", ?filter).entered();
let applies = filter.matches(match priority {
Priority::Low => "low",
Priority::Normal => "normal",
Priority::High => "high",
Priority::Urgent => unreachable!("urgent by-passes filtering"),
})?;
if filter.negate {
if applies {
trace!(prev=%pri_match, now=%true, "negate filter passes, passing this priority");
pri_match = true;
break;
}
trace!(prev=%pri_match, now=%pri_match, "negate filter fails, ignoring");
} else {
trace!(prev=%pri_match, this=%applies, now=%(pri_match&applies), "filter applies to priority");
pri_match &= applies;
}
}
if !pri_match {
trace!("priority fails check, failing entire event");
return Ok(false);
}
} else {
trace!("no filters for priority, skipping (pass)");
}
}
{
trace!("checking internal ignore filterer");
let igf = self.ignore_filterer.borrow();
if !igf
.check_event(event, priority)
.expect("IgnoreFilterer never errors")
{
trace!("internal ignore filterer matched (fail)");
return Ok(false);
}
}
if self.filters.borrow().is_empty() {
trace!("no filters, skipping entire check (pass)");
return Ok(true);
}
trace!(tags=%event.tags.len(), "checking all tags on the event");
for tag in &event.tags {
let _span = trace_span!("check_tag", ?tag).entered();
trace!("checking tag");
for matcher in Matcher::from_tag(tag) {
let _span = trace_span!("check_matcher", ?matcher).entered();
let filters = self.filters.borrow().get(matcher).cloned();
if let Some(tag_filters) = filters {
if tag_filters.is_empty() {
trace!("no filters for this matcher, skipping (pass)");
continue;
}
trace!(filters=%tag_filters.len(), "found some filters for this matcher");
let mut tag_match = true;
if let (Matcher::Path, Tag::Path { path, file_type }) = (matcher, tag) {
let is_dir = file_type.map_or(false, |ft| matches!(ft, FileType::Dir));
{
let gc = self.glob_compiled.borrow();
if let Some(igs) = gc.as_ref() {
let _span =
trace_span!("checking_compiled_filters", compiled=%"Glob")
.entered();
match if path.strip_prefix(&self.origin).is_ok() {
trace!("checking against path or parents");
igs.matched_path_or_any_parents(path, is_dir)
} else {
trace!("checking against path only");
igs.matched(path, is_dir)
} {
Match::None => {
trace!("no match (fail)");
tag_match &= false;
}
Match::Ignore(glob) => {
if glob
.from()
.map_or(true, |f| path.strip_prefix(f).is_ok())
{
trace!(?glob, "positive match (pass)");
tag_match &= true;
} else {
trace!(
?glob,
"positive match, but not in scope (ignore)"
);
}
}
Match::Whitelist(glob) => {
trace!(?glob, "negative match (ignore)");
}
}
}
}
{
let ngc = self.not_glob_compiled.borrow();
if let Some(ngs) = ngc.as_ref() {
let _span =
trace_span!("checking_compiled_filters", compiled=%"NotGlob")
.entered();
match if path.strip_prefix(&self.origin).is_ok() {
trace!("checking against path or parents");
ngs.matched_path_or_any_parents(path, is_dir)
} else {
trace!("checking against path only");
ngs.matched(path, is_dir)
} {
Match::None => {
trace!("no match (pass)");
tag_match &= true;
}
Match::Ignore(glob) => {
if glob
.from()
.map_or(true, |f| path.strip_prefix(f).is_ok())
{
trace!(?glob, "positive match (fail)");
tag_match &= false;
} else {
trace!(
?glob,
"positive match, but not in scope (ignore)"
);
}
}
Match::Whitelist(glob) => {
trace!(?glob, "negative match (pass)");
tag_match = true;
}
}
}
}
}
// those are handled with the compiled ignore filters above
let tag_filters = tag_filters
.into_iter()
.filter(|f| {
!matches!(
(tag, matcher, f),
(
Tag::Path { .. },
Matcher::Path,
Filter {
on: Matcher::Path,
op: Op::Glob | Op::NotGlob,
pat: Pattern::Glob(_),
..
}
)
)
})
.collect::<Vec<_>>();
if tag_filters.is_empty() && tag_match {
trace!("no more filters for this matcher, skipping (pass)");
continue;
}
trace!(filters=%tag_filters.len(), "got some filters to check still");
for filter in &tag_filters {
let _span = trace_span!("checking filter against tag", ?filter).entered();
if let Some(app) = self.match_tag(filter, tag)? {
if filter.negate {
if app {
trace!(prev=%tag_match, now=%true, "negate filter passes, passing this matcher");
tag_match = true;
break;
}
trace!(prev=%tag_match, now=%tag_match, "negate filter fails, ignoring");
} else {
trace!(prev=%tag_match, this=%app, now=%(tag_match&app), "filter applies to this tag");
tag_match &= app;
}
}
}
if !tag_match {
trace!("matcher fails check, failing entire event");
return Ok(false);
}
trace!("matcher passes check, continuing");
} else {
trace!("no filters for this matcher, skipping (pass)");
}
}
}
trace!("passing event");
Ok(true)
}
/// Initialise a new tagged filterer with no filters.
///
/// This takes two paths: the project origin, and the current directory. The current directory
/// is not obtained from the environment so you can customise it; generally you should use
/// [`std::env::current_dir()`] though.
///
/// The origin is the directory the main project that is being watched is in. This is used to
/// resolve absolute paths given in filters without an `in_path` field (e.g. all filters parsed
/// from text), and for ignore file based filtering.
///
/// The workdir is used to resolve relative paths given in filters without an `in_path` field.
///
/// So, if origin is `/path/to/project` and workdir is `/path/to/project/subtree`:
/// - `path=foo.bar` is resolved to `/path/to/project/subtree/foo.bar`
/// - `path=/foo.bar` is resolved to `/path/to/project/foo.bar`
pub async fn new(origin: PathBuf, workdir: PathBuf) -> Result<Arc<Self>, TaggedFiltererError> {
let origin = canonicalize(origin)
.await
.map_err(|err| TaggedFiltererError::IoError {
about: "canonicalise origin on new tagged filterer",
err,
})?;
Ok(Arc::new(Self {
filters: SwapLock::new(HashMap::new()),
ignore_filterer: SwapLock::new(IgnoreFilterer(IgnoreFilter::empty(&origin))),
glob_compiled: SwapLock::new(None),
not_glob_compiled: SwapLock::new(None),
workdir: canonicalize(workdir)
.await
.map_err(|err| TaggedFiltererError::IoError {
about: "canonicalise workdir on new tagged filterer",
err,
})?,
origin,
}))
}
// filter ctx event path filter outcome
// /foo/bar /foo/bar/baz.txt baz.txt pass
// /foo/bar /foo/bar/baz.txt /baz.txt pass
// /foo/bar /foo/bar/baz.txt /baz.* pass
// /foo/bar /foo/bar/baz.txt /blah fail
// /foo/quz /foo/bar/baz.txt /baz.* skip
// Ok(Some(bool)) => the match was applied, bool is the result
// Ok(None) => for some precondition, the match was not done (mismatched tag, out of context, …)
fn match_tag(&self, filter: &Filter, tag: &Tag) -> Result<Option<bool>, TaggedFiltererError> {
const fn sig_match(sig: Signal) -> (&'static str, i32) {
match sig {
Signal::Hangup | Signal::Custom(1) => ("HUP", 1),
Signal::ForceStop | Signal::Custom(9) => ("KILL", 9),
Signal::Interrupt | Signal::Custom(2) => ("INT", 2),
Signal::Quit | Signal::Custom(3) => ("QUIT", 3),
Signal::Terminate | Signal::Custom(15) => ("TERM", 15),
Signal::User1 | Signal::Custom(10) => ("USR1", 10),
Signal::User2 | Signal::Custom(12) => ("USR2", 12),
Signal::Custom(n) => ("UNK", n),
_ => ("UNK", 0),
}
}
trace!(matcher=?filter.on, "matching filter to tag");
match (tag, filter.on) {
(tag, Matcher::Tag) => filter.matches(tag.discriminant_name()),
(Tag::Path { path, .. }, Matcher::Path) => {
let resolved = if let Some(ctx) = &filter.in_path {
if let Ok(suffix) = path.strip_prefix(ctx) {
suffix.strip_prefix("/").unwrap_or(suffix)
} else {
return Ok(None);
}
} else if let Ok(suffix) = path.strip_prefix(&self.workdir) {
suffix.strip_prefix("/").unwrap_or(suffix)
} else if let Ok(suffix) = path.strip_prefix(&self.origin) {
suffix.strip_prefix("/").unwrap_or(suffix)
} else {
path.strip_prefix("/").unwrap_or(path)
};
trace!(?resolved, "resolved path to match filter against");
if matches!(filter.op, Op::Glob | Op::NotGlob) {
trace!("path glob match with match_tag is already handled");
return Ok(None);
}
filter.matches(resolved.to_string_lossy())
}
(
Tag::Path {
file_type: Some(ft),
..
},
Matcher::FileType,
) => filter.matches(ft.to_string()),
(Tag::FileEventKind(kind), Matcher::FileEventKind) => {
filter.matches(format!("{kind:?}"))
}
(Tag::Source(src), Matcher::Source) => filter.matches(src.to_string()),
(Tag::Process(pid), Matcher::Process) => filter.matches(pid.to_string()),
(Tag::Signal(sig), Matcher::Signal) => {
let (text, int) = sig_match(*sig);
Ok(filter.matches(text)?
|| filter.matches(format!("SIG{text}"))?
|| filter.matches(int.to_string())?)
}
(Tag::ProcessCompletion(ope), Matcher::ProcessCompletion) => match ope {
None => filter.matches("_"),
Some(ProcessEnd::Success) => filter.matches("success"),
Some(ProcessEnd::ExitError(int)) => filter.matches(format!("error({int})")),
Some(ProcessEnd::ExitSignal(sig)) => {
let (text, int) = sig_match(*sig);
Ok(filter.matches(format!("signal({text})"))?
|| filter.matches(format!("signal(SIG{text})"))?
|| filter.matches(format!("signal({int})"))?)
}
Some(ProcessEnd::ExitStop(int)) => filter.matches(format!("stop({int})")),
Some(ProcessEnd::Exception(int)) => filter.matches(format!("exception({int:X})")),
Some(ProcessEnd::Continued) => filter.matches("continued"),
},
(_, _) => {
trace!("no match for tag, skipping");
return Ok(None);
}
}
.map(Some)
}
/// Add some filters to the filterer.
///
/// This is async as it submits the new filters to the live filterer, which may be holding a
/// read lock. It takes a slice of filters so it can efficiently add a large number of filters
/// with a single write, without needing to acquire the lock repeatedly.
///
/// If filters with glob operations are added, the filterer's glob matchers are recompiled after
/// the new filters are added, in this method. This should not be used for inserting an
/// [`IgnoreFile`]: use [`add_ignore_file()`](Self::add_ignore_file) instead.
pub async fn add_filters(&self, filters: &[Filter]) -> Result<(), TaggedFiltererError> {
debug!(?filters, "adding filters to filterer");
let mut recompile_globs = false;
let mut recompile_not_globs = false;
#[allow(clippy::from_iter_instead_of_collect)]
let filters = FuturesOrdered::from_iter(
filters
.iter()
.cloned()
.inspect(|f| match f.op {
Op::Glob => {
recompile_globs = true;
}
Op::NotGlob => {
recompile_not_globs = true;
}
_ => {}
})
.map(Filter::canonicalised),
)
.try_collect::<Vec<_>>()
.await?;
trace!(?filters, "canonicalised filters");
// TODO: use miette's related and issue canonicalisation errors for all of them
self.filters
.change(|fs| {
for filter in filters {
fs.entry(filter.on).or_default().push(filter);
}
})
.map_err(|err| TaggedFiltererError::FilterChange { action: "add", err })?;
trace!("inserted filters into swaplock");
if recompile_globs {
self.recompile_globs(Op::Glob)?;
}
if recompile_not_globs {
self.recompile_globs(Op::NotGlob)?;
}
Ok(())
}
fn recompile_globs(&self, op_filter: Op) -> Result<(), TaggedFiltererError> {
trace!(?op_filter, "recompiling globs");
let target = match op_filter {
Op::Glob => &self.glob_compiled,
Op::NotGlob => &self.not_glob_compiled,
_ => unreachable!("recompile_globs called with invalid op"),
};
let globs = {
let filters = self.filters.borrow();
if let Some(fs) = filters.get(&Matcher::Path) {
trace!(?op_filter, "pulling filters from swaplock");
// we want to hold the lock as little as possible, so we clone the filters
fs.iter()
.filter(|&f| f.op == op_filter)
.cloned()
.collect::<Vec<_>>()
} else {
trace!(?op_filter, "no filters, erasing compiled glob");
return target
.replace(None)
.map_err(TaggedFiltererError::GlobsetChange);
}
};
let mut builder = GitignoreBuilder::new(&self.origin);
for filter in globs {
if let Pattern::Glob(mut glob) = filter.pat {
if filter.negate {
glob.insert(0, '!');
}
trace!(?op_filter, in_path=?filter.in_path, ?glob, "adding new glob line");
builder
.add_line(filter.in_path, &glob)
.map_err(TaggedFiltererError::GlobParse)?;
}
}
trace!(?op_filter, "finalising compiled glob");
let compiled = builder.build().map_err(TaggedFiltererError::GlobParse)?;
trace!(?op_filter, "swapping in new compiled glob");
target
.replace(Some(compiled))
.map_err(TaggedFiltererError::GlobsetChange)
}
/// Reads a gitignore-style [`IgnoreFile`] and adds it to the filterer.
pub async fn add_ignore_file(&self, file: &IgnoreFile) -> Result<(), TaggedFiltererError> {
let mut new = { self.ignore_filterer.borrow().clone() };
new.0
.add_file(file)
.await
.map_err(TaggedFiltererError::Ignore)?;
self.ignore_filterer
.replace(new)
.map_err(TaggedFiltererError::IgnoreSwap)?;
Ok(())
}
/// Clears all filters from the filterer.
///
/// This also recompiles the glob matchers, so essentially it resets the entire filterer state.
pub fn clear_filters(&self) -> Result<(), TaggedFiltererError> {
debug!("removing all filters from filterer");
self.filters.replace(Default::default()).map_err(|err| {
TaggedFiltererError::FilterChange {
action: "clear all",
err,
}
})?;
self.recompile_globs(Op::Glob)?;
self.recompile_globs(Op::NotGlob)?;
Ok(())
}
}

View File

@ -1,92 +0,0 @@
//! A filterer implementation that exposes the full capabilities of Watchexec.
//!
//! Filters match against [event tags][Tag]; can be exact matches, glob matches, regex matches, or
//! set matches; can reverse the match (equal/not equal, etc); and can be negated.
//!
//! [Filters][Filter] can be generated from your application and inserted directly, or they can be
//! parsed from a textual format:
//!
//! ```text
//! [!]{Matcher}{Op}{Value}
//! ```
//!
//! For example:
//!
//! ```text
//! path==/foo/bar
//! path*=**/bar
//! path~=bar$
//! !kind=file
//! ```
//!
//! There is a set of [operators][Op]:
//! - `==` and `!=`: exact match and exact not match (case insensitive)
//! - `~=` and `~!`: regex match and regex not match
//! - `*=` and `*!`: glob match and glob not match
//! - `:=` and `:!`: set match and set not match
//!
//! Sets are a list of values separated by `,`.
//!
//! In addition to the two-symbol operators, there is the `=` "auto" operator, which maps to the
//! most convenient operator for the given _matcher_. The current mapping is:
//!
//! | Matcher | Operator |
//! |---------------------------------------------------|---------------|
//! | [`Tag`](Matcher::Tag) | `:=` (in set) |
//! | [`Path`](Matcher::Path) | `*=` (glob) |
//! | [`FileType`](Matcher::FileType) | `:=` (in set) |
//! | [`FileEventKind`](Matcher::FileEventKind) | `*=` (glob) |
//! | [`Source`](Matcher::Source) | `:=` (in set) |
//! | [`Process`](Matcher::Process) | `:=` (in set) |
//! | [`Signal`](Matcher::Signal) | `:=` (in set) |
//! | [`ProcessCompletion`](Matcher::ProcessCompletion) | `*=` (glob) |
//! | [`Priority`](Matcher::Priority) | `:=` (in set) |
//!
//! [Matchers][Matcher] correspond to Tags, but are not one-to-one: the `path` matcher operates on
//! the `path` part of the `Path` tag, and the `type` matcher operates on the `file_type`, for
//! example.
//!
//! | Matcher | Syntax | Tag |
//! |-------------------------------------------|----------|----------------------------------------------|
//! | [`Tag`](Matcher::Tag) | `tag` | _the presence of a Tag on the event_ |
//! | [`Path`](Matcher::Path) | `path` | [`Path`](Tag::Path) (`path` field) |
//! | [`FileType`](Matcher::FileType) | `type` | [`Path`](Tag::Path) (`file_type` field, when Some) |
//! | [`FileEventKind`](Matcher::FileEventKind) | `kind` or `fek` | [`FileEventKind`](Tag::FileEventKind) |
//! | [`Source`](Matcher::Source) | `source` or `src` | [`Source`](Tag::Source) |
//! | [`Process`](Matcher::Process) | `process` or `pid` | [`Process`](Tag::Process) |
//! | [`Signal`](Matcher::Signal) | `signal` | [`Signal`](Tag::Signal) |
//! | [`ProcessCompletion`](Matcher::ProcessCompletion) | `complete` or `exit` | [`ProcessCompletion`](Tag::ProcessCompletion) |
//! | [`Priority`](Matcher::Priority) | `priority` | special: event [`Priority`] |
//!
//! Filters are checked in order, grouped per tag and per matcher. Filter groups may be checked in
//! any order, but the filters in the groups are checked in add order. Path glob filters are always
//! checked first, for internal reasons.
//!
//! The `negate` boolean field behaves specially: it is not operator negation, but rather the same
//! kind of behaviour that is applied to `!`-prefixed globs in gitignore files: if a negated filter
//! matches the event, the result of the event checking for that matcher is reverted to `true`, even
//! if a previous filter set it to `false`. Unmatched negated filters are ignored.
//!
//! Glob syntax is as supported by the [ignore] crate for Paths, and by [globset] otherwise. (As of
//! writing, the ignore crate uses globset internally). Regex syntax is the default syntax of the
//! [regex] crate.
#![doc(html_favicon_url = "https://watchexec.github.io/logo:watchexec.svg")]
#![doc(html_logo_url = "https://watchexec.github.io/logo:watchexec.svg")]
#![warn(clippy::unwrap_used, missing_docs)]
#![deny(rust_2018_idioms)]
// to make filters
pub use regex::Regex;
pub use error::*;
pub use files::*;
pub use filter::*;
pub use filterer::*;
mod error;
mod files;
mod filter;
mod filterer;
mod parse;
mod swaplock;

View File

@ -1,139 +0,0 @@
use std::str::FromStr;
use nom::{
branch::alt,
bytes::complete::{is_not, tag, tag_no_case, take_while1},
character::complete::char,
combinator::{map_res, opt},
sequence::{delimited, tuple},
Finish, IResult,
};
use regex::Regex;
use tracing::trace;
use crate::{Filter, Matcher, Op, Pattern, TaggedFiltererError};
impl FromStr for Filter {
type Err = TaggedFiltererError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
fn matcher(i: &str) -> IResult<&str, Matcher> {
map_res(
alt((
tag_no_case("tag"),
tag_no_case("path"),
tag_no_case("type"),
tag_no_case("kind"),
tag_no_case("fek"),
tag_no_case("source"),
tag_no_case("src"),
tag_no_case("priority"),
tag_no_case("process"),
tag_no_case("pid"),
tag_no_case("signal"),
tag_no_case("sig"),
tag_no_case("complete"),
tag_no_case("exit"),
)),
|m: &str| match m.to_ascii_lowercase().as_str() {
"tag" => Ok(Matcher::Tag),
"path" => Ok(Matcher::Path),
"type" => Ok(Matcher::FileType),
"kind" | "fek" => Ok(Matcher::FileEventKind),
"source" | "src" => Ok(Matcher::Source),
"priority" => Ok(Matcher::Priority),
"process" | "pid" => Ok(Matcher::Process),
"signal" | "sig" => Ok(Matcher::Signal),
"complete" | "exit" => Ok(Matcher::ProcessCompletion),
m => Err(format!("unknown matcher: {m}")),
},
)(i)
}
fn op(i: &str) -> IResult<&str, Op> {
map_res(
alt((
tag("=="),
tag("!="),
tag("~="),
tag("~!"),
tag("*="),
tag("*!"),
tag(":="),
tag(":!"),
tag("="),
)),
|o: &str| match o {
"==" => Ok(Op::Equal),
"!=" => Ok(Op::NotEqual),
"~=" => Ok(Op::Regex),
"~!" => Ok(Op::NotRegex),
"*=" => Ok(Op::Glob),
"*!" => Ok(Op::NotGlob),
":=" => Ok(Op::InSet),
":!" => Ok(Op::NotInSet),
"=" => Ok(Op::Auto),
o => Err(format!("unknown op: `{o}`")),
},
)(i)
}
fn pattern(i: &str) -> IResult<&str, &str> {
alt((
// TODO: escapes
delimited(char('"'), is_not("\""), char('"')),
delimited(char('\''), is_not("'"), char('\'')),
take_while1(|_| true),
))(i)
}
fn filter(i: &str) -> IResult<&str, Filter> {
map_res(
tuple((opt(tag("!")), matcher, op, pattern)),
|(n, m, o, p)| -> Result<_, ()> {
Ok(Filter {
in_path: None,
on: m,
op: match o {
Op::Auto => match m {
Matcher::Path
| Matcher::FileEventKind
| Matcher::ProcessCompletion => Op::Glob,
_ => Op::InSet,
},
o => o,
},
pat: match (o, m) {
// TODO: carry regex/glob errors through
(
Op::Auto,
Matcher::Path | Matcher::FileEventKind | Matcher::ProcessCompletion,
)
| (Op::Glob | Op::NotGlob, _) => Pattern::Glob(p.to_string()),
(Op::Auto | Op::InSet | Op::NotInSet, _) => {
Pattern::Set(p.split(',').map(|s| s.trim().to_string()).collect())
}
(Op::Regex | Op::NotRegex, _) => {
Pattern::Regex(Regex::new(p).map_err(drop)?)
}
(Op::Equal | Op::NotEqual, _) => Pattern::Exact(p.to_string()),
},
negate: n.is_some(),
})
},
)(i)
}
trace!(src=?s, "parsing tagged filter");
filter(s)
.finish()
.map(|(_, f)| {
trace!(src=?s, filter=?f, "parsed tagged filter");
f
})
.map_err(|e| TaggedFiltererError::Parse {
src: s.to_string(),
err: e.code,
})
}
}

View File

@ -1,58 +0,0 @@
//! A value that is always available, but can be swapped out.
use std::fmt;
use tokio::sync::watch::{channel, error::SendError, Receiver, Ref, Sender};
/// A value that is always available, but can be swapped out.
///
/// This is a wrapper around a [Tokio `watch`][tokio::sync::watch]. The value can be read without
/// await, but can only be written to with async. Borrows should be held for as little as possible,
/// as they keep a read lock.
pub struct SwapLock<T: Clone> {
r: Receiver<T>,
s: Sender<T>,
}
impl<T> SwapLock<T>
where
T: Clone,
{
/// Create a new `SwapLock` with the given value.
pub fn new(inner: T) -> Self {
let (s, r) = channel(inner);
Self { r, s }
}
/// Get a reference to the value.
pub fn borrow(&self) -> Ref<'_, T> {
self.r.borrow()
}
/// Rewrite the value using a closure.
///
/// This obtains a clone of the value, and then calls the closure with a mutable reference to
/// it. Once the closure returns, the value is swapped in.
pub fn change(&self, f: impl FnOnce(&mut T)) -> Result<(), SendError<T>> {
let mut new = { self.r.borrow().clone() };
f(&mut new);
self.s.send(new)
}
/// Replace the value with a new one.
pub fn replace(&self, new: T) -> Result<(), SendError<T>> {
self.s.send(new)
}
}
impl<T> fmt::Debug for SwapLock<T>
where
T: fmt::Debug + Clone,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
f.debug_struct("SwapLock")
.field("(watch)", &self.r)
.finish_non_exhaustive()
}
}

View File

@ -1,114 +0,0 @@
use watchexec_events::{filekind::*, ProcessEnd, Source};
use watchexec_signals::Signal;
mod helpers;
use helpers::tagged_ff::*;
#[tokio::test]
async fn empty_filter_passes_everything() {
let filterer = filt("", &[], &[file("empty.wef").await]).await;
filterer.file_does_pass("Cargo.toml");
filterer.file_does_pass("Cargo.json");
filterer.file_does_pass("Gemfile.toml");
filterer.file_does_pass("FINAL-FINAL.docx");
filterer.dir_does_pass("/test/Cargo.toml");
filterer.dir_does_pass("/a/folder");
filterer.file_does_pass("apples/carrots/oranges");
filterer.file_does_pass("apples/carrots/cauliflowers/oranges");
filterer.file_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.file_does_pass("apples/oranges/bananas");
filterer.dir_does_pass("apples/carrots/oranges");
filterer.dir_does_pass("apples/carrots/cauliflowers/oranges");
filterer.dir_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.dir_does_pass("apples/oranges/bananas");
filterer.source_does_pass(Source::Keyboard);
filterer.fek_does_pass(FileEventKind::Create(CreateKind::File));
filterer.pid_does_pass(1234);
filterer.signal_does_pass(Signal::User1);
filterer.complete_does_pass(None);
filterer.complete_does_pass(Some(ProcessEnd::Success));
}
#[tokio::test]
async fn folder() {
let filterer = filt("", &[], &[file("folder.wef").await]).await;
filterer.file_doesnt_pass("apples");
filterer.file_doesnt_pass("apples/oranges/bananas");
filterer.dir_doesnt_pass("apples");
filterer.dir_doesnt_pass("apples/carrots");
filterer.file_doesnt_pass("raw-prunes");
filterer.dir_doesnt_pass("raw-prunes");
filterer.file_doesnt_pass("prunes");
filterer.file_doesnt_pass("prunes/oranges/bananas");
filterer.dir_does_pass("prunes");
filterer.dir_does_pass("prunes/carrots/cauliflowers/oranges");
}
#[tokio::test]
async fn patterns() {
let filterer = filt("", &[], &[file("path-patterns.wef").await]).await;
// Unmatched
filterer.file_does_pass("FINAL-FINAL.docx");
filterer.dir_does_pass("/a/folder");
filterer.file_does_pass("rat");
filterer.file_does_pass("foo/bar/rat");
filterer.file_does_pass("/foo/bar/rat");
// Cargo.toml
filterer.file_doesnt_pass("Cargo.toml");
filterer.dir_doesnt_pass("Cargo.toml");
filterer.file_does_pass("Cargo.json");
// package.json
filterer.file_doesnt_pass("package.json");
filterer.dir_doesnt_pass("package.json");
filterer.file_does_pass("package.toml");
// *.gemspec
filterer.file_doesnt_pass("pearl.gemspec");
filterer.dir_doesnt_pass("sapphire.gemspec");
filterer.file_doesnt_pass(".gemspec");
filterer.file_does_pass("diamond.gemspecial");
// test-[^u]+
filterer.file_does_pass("test-unit");
filterer.dir_doesnt_pass("test-integration");
filterer.file_does_pass("tester-helper");
// [.]sw[a-z]$
filterer.file_doesnt_pass("source.swa");
filterer.file_doesnt_pass(".source.swb");
filterer.file_doesnt_pass("sub/source.swc");
filterer.file_does_pass("sub/dir.swa/file");
filterer.file_does_pass("source.sw1");
}
#[tokio::test]
async fn negate() {
let filterer = filt("", &[], &[file("negate.wef").await]).await;
filterer.file_doesnt_pass("yeah");
filterer.file_does_pass("nah");
filterer.file_does_pass("nah.yeah");
}
#[tokio::test]
async fn ignores_and_filters() {
let filterer = filt("", &[file("globs").await.0], &[file("folder.wef").await]).await;
// ignored
filterer.dir_doesnt_pass("test-helper");
// not filtered
filterer.dir_doesnt_pass("tester-helper");
// not ignored && filtered
filterer.dir_does_pass("prunes/tester-helper");
}

View File

@ -1,349 +0,0 @@
#![allow(dead_code)]
use std::{
path::{Path, PathBuf},
str::FromStr,
sync::Arc,
};
use ignore_files::{IgnoreFile, IgnoreFilter};
use project_origins::ProjectType;
use tokio::fs::canonicalize;
use watchexec::{error::RuntimeError, filter::Filterer};
use watchexec_events::{
filekind::FileEventKind, Event, FileType, Priority, ProcessEnd, Source, Tag,
};
use watchexec_filterer_ignore::IgnoreFilterer;
use watchexec_filterer_tagged::{Filter, FilterFile, Matcher, Op, Pattern, TaggedFilterer};
use watchexec_signals::Signal;
pub mod tagged {
pub use super::ig_file as file;
pub use super::tagged_filt as filt;
pub use super::Applies;
pub use super::FilterExt;
pub use super::PathHarness;
pub use super::TaggedHarness;
pub use super::{filter, glob_filter, notglob_filter};
pub use watchexec_events::Priority;
}
pub mod tagged_ff {
pub use super::ff_file as file;
pub use super::tagged::*;
pub use super::tagged_fffilt as filt;
}
pub trait PathHarness: Filterer {
fn check_path(
&self,
path: PathBuf,
file_type: Option<FileType>,
) -> std::result::Result<bool, RuntimeError> {
let event = Event {
tags: vec![Tag::Path { path, file_type }],
metadata: Default::default(),
};
self.check_event(&event, Priority::Normal)
}
fn path_pass(&self, path: &str, file_type: Option<FileType>, pass: bool) {
let origin = std::fs::canonicalize(".").unwrap();
let full_path = if let Some(suf) = path.strip_prefix("/test/") {
origin.join(suf)
} else if Path::new(path).has_root() {
path.into()
} else {
origin.join(path)
};
tracing::info!(?path, ?file_type, ?pass, "check");
assert_eq!(
self.check_path(full_path, file_type).unwrap(),
pass,
"{} {:?} (expected {})",
match file_type {
Some(FileType::File) => "file",
Some(FileType::Dir) => "dir",
Some(FileType::Symlink) => "symlink",
Some(FileType::Other) => "other",
None => "path",
},
path,
if pass { "pass" } else { "fail" }
);
}
fn file_does_pass(&self, path: &str) {
self.path_pass(path, Some(FileType::File), true);
}
fn file_doesnt_pass(&self, path: &str) {
self.path_pass(path, Some(FileType::File), false);
}
fn dir_does_pass(&self, path: &str) {
self.path_pass(path, Some(FileType::Dir), true);
}
fn dir_doesnt_pass(&self, path: &str) {
self.path_pass(path, Some(FileType::Dir), false);
}
fn unk_does_pass(&self, path: &str) {
self.path_pass(path, None, true);
}
fn unk_doesnt_pass(&self, path: &str) {
self.path_pass(path, None, false);
}
}
impl PathHarness for TaggedFilterer {}
impl PathHarness for IgnoreFilterer {}
pub trait TaggedHarness {
fn check_tag(&self, tag: Tag, priority: Priority) -> std::result::Result<bool, RuntimeError>;
fn priority_pass(&self, priority: Priority, pass: bool) {
tracing::info!(?priority, ?pass, "check");
assert_eq!(
self.check_tag(Tag::Source(Source::Filesystem), priority)
.unwrap(),
pass,
"{priority:?} (expected {})",
if pass { "pass" } else { "fail" }
);
}
fn priority_does_pass(&self, priority: Priority) {
self.priority_pass(priority, true);
}
fn priority_doesnt_pass(&self, priority: Priority) {
self.priority_pass(priority, false);
}
fn tag_pass(&self, tag: Tag, pass: bool) {
tracing::info!(?tag, ?pass, "check");
assert_eq!(
self.check_tag(tag.clone(), Priority::Normal).unwrap(),
pass,
"{tag:?} (expected {})",
if pass { "pass" } else { "fail" }
);
}
fn fek_does_pass(&self, fek: FileEventKind) {
self.tag_pass(Tag::FileEventKind(fek), true);
}
fn fek_doesnt_pass(&self, fek: FileEventKind) {
self.tag_pass(Tag::FileEventKind(fek), false);
}
fn source_does_pass(&self, source: Source) {
self.tag_pass(Tag::Source(source), true);
}
fn source_doesnt_pass(&self, source: Source) {
self.tag_pass(Tag::Source(source), false);
}
fn pid_does_pass(&self, pid: u32) {
self.tag_pass(Tag::Process(pid), true);
}
fn pid_doesnt_pass(&self, pid: u32) {
self.tag_pass(Tag::Process(pid), false);
}
fn signal_does_pass(&self, sig: Signal) {
self.tag_pass(Tag::Signal(sig), true);
}
fn signal_doesnt_pass(&self, sig: Signal) {
self.tag_pass(Tag::Signal(sig), false);
}
fn complete_does_pass(&self, exit: Option<ProcessEnd>) {
self.tag_pass(Tag::ProcessCompletion(exit), true);
}
fn complete_doesnt_pass(&self, exit: Option<ProcessEnd>) {
self.tag_pass(Tag::ProcessCompletion(exit), false);
}
}
impl TaggedHarness for TaggedFilterer {
fn check_tag(&self, tag: Tag, priority: Priority) -> std::result::Result<bool, RuntimeError> {
let event = Event {
tags: vec![tag],
metadata: Default::default(),
};
self.check_event(&event, priority)
}
}
fn tracing_init() {
use tracing_subscriber::{
fmt::{format::FmtSpan, Subscriber},
util::SubscriberInitExt,
EnvFilter,
};
Subscriber::builder()
.pretty()
.with_span_events(FmtSpan::NEW | FmtSpan::CLOSE)
.with_env_filter(EnvFilter::from_default_env())
.finish()
.try_init()
.ok();
}
pub async fn ignore_filt(origin: &str, ignore_files: &[IgnoreFile]) -> IgnoreFilter {
tracing_init();
let origin = canonicalize(".").await.unwrap().join(origin);
IgnoreFilter::new(origin, ignore_files)
.await
.expect("making filterer")
}
pub async fn tagged_filt(filters: &[Filter]) -> Arc<TaggedFilterer> {
let origin = canonicalize(".").await.unwrap();
tracing_init();
let filterer = TaggedFilterer::new(origin.clone(), origin)
.await
.expect("creating filterer");
filterer.add_filters(filters).await.expect("adding filters");
filterer
}
pub async fn tagged_igfilt(origin: &str, ignore_files: &[IgnoreFile]) -> Arc<TaggedFilterer> {
let origin = canonicalize(".").await.unwrap().join(origin);
tracing_init();
let filterer = TaggedFilterer::new(origin.clone(), origin)
.await
.expect("creating filterer");
for file in ignore_files {
tracing::info!(?file, "loading ignore file");
filterer
.add_ignore_file(file)
.await
.expect("adding ignore file");
}
filterer
}
pub async fn tagged_fffilt(
origin: &str,
ignore_files: &[IgnoreFile],
filter_files: &[FilterFile],
) -> Arc<TaggedFilterer> {
let filterer = tagged_igfilt(origin, ignore_files).await;
let mut filters = Vec::new();
for file in filter_files {
tracing::info!(?file, "loading filter file");
filters.extend(file.load().await.expect("loading filter file"));
}
filterer
.add_filters(&filters)
.await
.expect("adding filters");
filterer
}
pub async fn ig_file(name: &str) -> IgnoreFile {
let path = canonicalize(".")
.await
.unwrap()
.join("tests")
.join("ignores")
.join(name);
IgnoreFile {
path,
applies_in: None,
applies_to: None,
}
}
pub async fn ff_file(name: &str) -> FilterFile {
FilterFile(ig_file(name).await)
}
pub trait Applies {
fn applies_in(self, origin: &str) -> Self;
fn applies_to(self, project_type: ProjectType) -> Self;
}
impl Applies for IgnoreFile {
fn applies_in(mut self, origin: &str) -> Self {
let origin = std::fs::canonicalize(".").unwrap().join(origin);
self.applies_in = Some(origin);
self
}
fn applies_to(mut self, project_type: ProjectType) -> Self {
self.applies_to = Some(project_type);
self
}
}
impl Applies for FilterFile {
fn applies_in(self, origin: &str) -> Self {
Self(self.0.applies_in(origin))
}
fn applies_to(self, project_type: ProjectType) -> Self {
Self(self.0.applies_to(project_type))
}
}
pub fn filter(expr: &str) -> Filter {
Filter::from_str(expr).expect("parse filter")
}
pub fn glob_filter(pat: &str) -> Filter {
Filter {
in_path: None,
on: Matcher::Path,
op: Op::Glob,
pat: Pattern::Glob(pat.into()),
negate: false,
}
}
pub fn notglob_filter(pat: &str) -> Filter {
Filter {
in_path: None,
on: Matcher::Path,
op: Op::NotGlob,
pat: Pattern::Glob(pat.into()),
negate: false,
}
}
pub trait FilterExt {
fn in_path(self) -> Self
where
Self: Sized,
{
self.in_subpath("")
}
fn in_subpath(self, sub: impl AsRef<Path>) -> Self;
}
impl FilterExt for Filter {
fn in_subpath(mut self, sub: impl AsRef<Path>) -> Self {
let origin = std::fs::canonicalize(".").unwrap();
self.in_path = Some(origin.join(sub));
self
}
}

View File

@ -1,3 +0,0 @@
# comment
# blank line

View File

@ -1,2 +0,0 @@
type==dir
path*=prunes

View File

@ -1,11 +0,0 @@
Cargo.toml
package.json
*.gemspec
test-*
*.sw*
sources.*/
/output.*
**/possum
zebra/**
elep/**/hant
song/**/bird/

View File

@ -1,2 +0,0 @@
path=nah
!path=nah.yeah

View File

@ -1,5 +0,0 @@
path*!Cargo.toml
path*!package.json
path*!*.gemspec
path~!test-[^u]+
path~![.]sw[a-z]$

View File

@ -1,453 +0,0 @@
use std::num::{NonZeroI32, NonZeroI64};
use watchexec_events::{filekind::*, ProcessEnd, Source};
use watchexec_filterer_tagged::TaggedFilterer;
use watchexec_signals::Signal;
mod helpers;
use helpers::tagged::*;
#[tokio::test]
async fn empty_filter_passes_everything() {
let filterer = filt(&[]).await;
filterer.source_does_pass(Source::Keyboard);
filterer.fek_does_pass(FileEventKind::Create(CreateKind::File));
filterer.pid_does_pass(1234);
filterer.signal_does_pass(Signal::User1);
filterer.complete_does_pass(None);
filterer.complete_does_pass(Some(ProcessEnd::Success));
}
// Source is used as a relatively simple test case for common text-based ops, so
// these aren't repeated for the other tags, which instead focus on their own
// special characteristics.
#[tokio::test]
async fn source_exact() {
let filterer = filt(&[filter("source==keyboard")]).await;
filterer.source_does_pass(Source::Keyboard);
filterer.source_doesnt_pass(Source::Mouse);
}
#[tokio::test]
async fn source_glob() {
let filterer = filt(&[filter("source*=*i*m*")]).await;
filterer.source_does_pass(Source::Filesystem);
filterer.source_does_pass(Source::Time);
filterer.source_doesnt_pass(Source::Internal);
}
#[tokio::test]
async fn source_regex() {
let filterer = filt(&[filter("source~=(keyboard|mouse)")]).await;
filterer.source_does_pass(Source::Keyboard);
filterer.source_does_pass(Source::Mouse);
filterer.source_doesnt_pass(Source::Internal);
}
#[tokio::test]
async fn source_two_filters() {
let filterer = filt(&[filter("source*=*s*"), filter("source!=mouse")]).await;
filterer.source_doesnt_pass(Source::Mouse);
filterer.source_does_pass(Source::Filesystem);
}
#[tokio::test]
async fn source_allowlisting() {
// allowlisting is vastly easier to achieve with e.g. `source==mouse`
// but this pattern is nonetheless useful for more complex cases.
let filterer = filt(&[filter("source*!*"), filter("!source==mouse")]).await;
filterer.source_does_pass(Source::Mouse);
filterer.source_doesnt_pass(Source::Filesystem);
}
#[tokio::test]
async fn source_set() {
let f = filter("source:=keyboard,mouse");
assert_eq!(f, filter("source=keyboard,mouse"));
let filterer = filt(&[f]).await;
filterer.source_does_pass(Source::Keyboard);
filterer.source_does_pass(Source::Mouse);
filterer.source_doesnt_pass(Source::Internal);
let filterer = filt(&[filter("source:!keyboard,mouse")]).await;
filterer.source_doesnt_pass(Source::Keyboard);
filterer.source_doesnt_pass(Source::Mouse);
filterer.source_does_pass(Source::Internal);
}
#[tokio::test]
async fn fek_glob_level_one() {
let f = filter("kind*=Create(*)");
assert_eq!(f, filter("fek*=Create(*)"));
assert_eq!(f, filter("kind=Create(*)"));
assert_eq!(f, filter("fek=Create(*)"));
let filterer = filt(&[f]).await;
filterer.fek_does_pass(FileEventKind::Create(CreateKind::Any));
filterer.fek_does_pass(FileEventKind::Create(CreateKind::File));
filterer.fek_doesnt_pass(FileEventKind::Modify(ModifyKind::Data(DataChange::Content)));
}
#[tokio::test]
async fn fek_glob_level_two() {
let filterer = filt(&[filter("fek=Modify(Data(*))")]).await;
filterer.fek_does_pass(FileEventKind::Modify(ModifyKind::Data(DataChange::Content)));
filterer.fek_doesnt_pass(FileEventKind::Modify(ModifyKind::Other));
filterer.fek_doesnt_pass(FileEventKind::Modify(ModifyKind::Metadata(
MetadataKind::Permissions,
)));
filterer.fek_doesnt_pass(FileEventKind::Create(CreateKind::Any));
}
#[tokio::test]
async fn fek_level_three() {
fn suite(filterer: &TaggedFilterer) {
filterer.fek_does_pass(FileEventKind::Modify(ModifyKind::Data(DataChange::Content)));
filterer.fek_doesnt_pass(FileEventKind::Modify(ModifyKind::Data(DataChange::Size)));
filterer.fek_doesnt_pass(FileEventKind::Modify(ModifyKind::Other));
filterer.fek_doesnt_pass(FileEventKind::Modify(ModifyKind::Metadata(
MetadataKind::Permissions,
)));
filterer.fek_doesnt_pass(FileEventKind::Create(CreateKind::Any));
}
suite(filt(&[filter("fek=Modify(Data(Content))")]).await.as_ref());
suite(filt(&[filter("fek==Modify(Data(Content))")]).await.as_ref());
}
#[tokio::test]
async fn pid_set_single() {
let f = filter("process:=1234");
assert_eq!(f, filter("pid:=1234"));
assert_eq!(f, filter("process=1234"));
assert_eq!(f, filter("pid=1234"));
let filterer = filt(&[f]).await;
filterer.pid_does_pass(1234);
filterer.pid_doesnt_pass(5678);
filterer.pid_doesnt_pass(12345);
filterer.pid_doesnt_pass(123);
}
#[tokio::test]
async fn pid_set_multiple() {
let filterer = filt(&[filter("pid=123,456")]).await;
filterer.pid_does_pass(123);
filterer.pid_does_pass(456);
filterer.pid_doesnt_pass(123456);
filterer.pid_doesnt_pass(12);
filterer.pid_doesnt_pass(23);
filterer.pid_doesnt_pass(45);
filterer.pid_doesnt_pass(56);
filterer.pid_doesnt_pass(1234);
filterer.pid_doesnt_pass(3456);
filterer.pid_doesnt_pass(4567);
filterer.pid_doesnt_pass(34567);
filterer.pid_doesnt_pass(0);
}
#[tokio::test]
async fn pid_equals() {
let f = filter("process==1234");
assert_eq!(f, filter("pid==1234"));
let filterer = filt(&[f]).await;
filterer.pid_does_pass(1234);
filterer.pid_doesnt_pass(5678);
filterer.pid_doesnt_pass(12345);
filterer.pid_doesnt_pass(123);
}
#[tokio::test]
async fn signal_set_single_without_sig() {
let f = filter("signal=INT");
assert_eq!(f, filter("sig=INT"));
assert_eq!(f, filter("signal:=INT"));
assert_eq!(f, filter("sig:=INT"));
let filterer = filt(&[f]).await;
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_doesnt_pass(Signal::Hangup);
}
#[tokio::test]
async fn signal_set_single_with_sig() {
let filterer = filt(&[filter("signal:=SIGINT")]).await;
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_doesnt_pass(Signal::Hangup);
}
#[tokio::test]
async fn signal_set_multiple_without_sig() {
let filterer = filt(&[filter("sig:=INT,TERM")]).await;
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_does_pass(Signal::Terminate);
filterer.signal_doesnt_pass(Signal::Hangup);
}
#[tokio::test]
async fn signal_set_multiple_with_sig() {
let filterer = filt(&[filter("signal:=SIGINT,SIGTERM")]).await;
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_does_pass(Signal::Terminate);
filterer.signal_doesnt_pass(Signal::Hangup);
}
#[tokio::test]
async fn signal_set_multiple_mixed_sig() {
let filterer = filt(&[filter("sig:=SIGINT,TERM")]).await;
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_does_pass(Signal::Terminate);
filterer.signal_doesnt_pass(Signal::Hangup);
}
#[tokio::test]
async fn signal_equals_without_sig() {
let filterer = filt(&[filter("sig==INT")]).await;
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_doesnt_pass(Signal::Hangup);
}
#[tokio::test]
async fn signal_equals_with_sig() {
let filterer = filt(&[filter("signal==SIGINT")]).await;
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_doesnt_pass(Signal::Hangup);
}
#[tokio::test]
async fn signal_set_single_numbers() {
let filterer = filt(&[filter("signal:=2")]).await;
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_doesnt_pass(Signal::Hangup);
}
#[tokio::test]
async fn signal_set_multiple_numbers() {
let filterer = filt(&[filter("sig:=2,15")]).await;
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_does_pass(Signal::Terminate);
filterer.signal_doesnt_pass(Signal::Hangup);
}
#[tokio::test]
async fn signal_equals_numbers() {
let filterer = filt(&[filter("sig==2")]).await;
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_doesnt_pass(Signal::Hangup);
}
#[tokio::test]
async fn signal_set_all_mixed() {
let filterer = filt(&[filter("signal:=SIGHUP,INT,15")]).await;
filterer.signal_does_pass(Signal::Hangup);
filterer.signal_does_pass(Signal::Interrupt);
filterer.signal_does_pass(Signal::Terminate);
filterer.signal_doesnt_pass(Signal::User1);
}
#[tokio::test]
async fn complete_empty() {
let f = filter("complete=_");
assert_eq!(f, filter("complete*=_"));
assert_eq!(f, filter("exit=_"));
assert_eq!(f, filter("exit*=_"));
let filterer = filt(&[f]).await;
filterer.complete_does_pass(None);
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitError(NonZeroI64::new(1).unwrap())));
}
#[tokio::test]
async fn complete_any() {
let filterer = filt(&[filter("complete=*")]).await;
filterer.complete_does_pass(Some(ProcessEnd::Success));
filterer.complete_does_pass(Some(ProcessEnd::ExitError(NonZeroI64::new(1).unwrap())));
filterer.complete_does_pass(None);
}
#[tokio::test]
async fn complete_with_success() {
let filterer = filt(&[filter("complete*=success")]).await;
filterer.complete_does_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitError(NonZeroI64::new(1).unwrap())));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_continued() {
let filterer = filt(&[filter("complete*=continued")]).await;
filterer.complete_does_pass(Some(ProcessEnd::Continued));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitError(NonZeroI64::new(1).unwrap())));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_specific_exit_error() {
let filterer = filt(&[filter("complete*=error(1)")]).await;
filterer.complete_does_pass(Some(ProcessEnd::ExitError(NonZeroI64::new(1).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_any_exit_error() {
let filterer = filt(&[filter("complete*=error(*)")]).await;
filterer.complete_does_pass(Some(ProcessEnd::ExitError(NonZeroI64::new(1).unwrap())));
filterer.complete_does_pass(Some(ProcessEnd::ExitError(NonZeroI64::new(63).unwrap())));
filterer.complete_does_pass(Some(ProcessEnd::ExitError(
NonZeroI64::new(-12823912738).unwrap(),
)));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitStop(NonZeroI32::new(63).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_specific_stop() {
let filterer = filt(&[filter("complete*=stop(19)")]).await;
filterer.complete_does_pass(Some(ProcessEnd::ExitStop(NonZeroI32::new(19).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_any_stop() {
let filterer = filt(&[filter("complete*=stop(*)")]).await;
filterer.complete_does_pass(Some(ProcessEnd::ExitStop(NonZeroI32::new(1).unwrap())));
filterer.complete_does_pass(Some(ProcessEnd::ExitStop(NonZeroI32::new(63).unwrap())));
filterer.complete_does_pass(Some(ProcessEnd::ExitStop(
NonZeroI32::new(-128239127).unwrap(),
)));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitError(NonZeroI64::new(63).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_specific_exception() {
let filterer = filt(&[filter("complete*=exception(4B53)")]).await;
filterer.complete_does_pass(Some(ProcessEnd::Exception(NonZeroI32::new(19283).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_any_exception() {
let filterer = filt(&[filter("complete*=exception(*)")]).await;
filterer.complete_does_pass(Some(ProcessEnd::Exception(NonZeroI32::new(1).unwrap())));
filterer.complete_does_pass(Some(ProcessEnd::Exception(NonZeroI32::new(63).unwrap())));
filterer.complete_does_pass(Some(ProcessEnd::Exception(
NonZeroI32::new(-128239127).unwrap(),
)));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitStop(NonZeroI32::new(63).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitError(NonZeroI64::new(63).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_specific_signal_with_sig() {
let filterer = filt(&[filter("complete*=signal(SIGINT)")]).await;
filterer.complete_does_pass(Some(ProcessEnd::ExitSignal(Signal::Interrupt)));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitStop(NonZeroI32::new(19).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_specific_signal_without_sig() {
let filterer = filt(&[filter("complete*=signal(INT)")]).await;
filterer.complete_does_pass(Some(ProcessEnd::ExitSignal(Signal::Interrupt)));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitStop(NonZeroI32::new(19).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_specific_signal_number() {
let filterer = filt(&[filter("complete*=signal(2)")]).await;
filterer.complete_does_pass(Some(ProcessEnd::ExitSignal(Signal::Interrupt)));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitStop(NonZeroI32::new(19).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn complete_with_any_signal() {
let filterer = filt(&[filter("complete*=signal(*)")]).await;
filterer.complete_does_pass(Some(ProcessEnd::ExitSignal(Signal::Interrupt)));
filterer.complete_does_pass(Some(ProcessEnd::ExitSignal(Signal::Terminate)));
filterer.complete_does_pass(Some(ProcessEnd::ExitSignal(Signal::Custom(123))));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitStop(NonZeroI32::new(63).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::ExitError(NonZeroI64::new(63).unwrap())));
filterer.complete_doesnt_pass(Some(ProcessEnd::Success));
filterer.complete_doesnt_pass(None);
}
#[tokio::test]
async fn priority_auto() {
let filterer = filt(&[filter("priority=normal")]).await;
filterer.priority_doesnt_pass(Priority::Low);
filterer.priority_does_pass(Priority::Normal);
filterer.priority_doesnt_pass(Priority::High);
}
#[tokio::test]
async fn priority_set() {
let filterer = filt(&[filter("priority:=normal,high")]).await;
filterer.priority_doesnt_pass(Priority::Low);
filterer.priority_does_pass(Priority::Normal);
filterer.priority_does_pass(Priority::High);
}
#[tokio::test]
async fn priority_none() {
let filterer = filt(&[]).await;
filterer.priority_does_pass(Priority::Low);
filterer.priority_does_pass(Priority::Normal);
filterer.priority_does_pass(Priority::High);
}

View File

@ -1,226 +0,0 @@
use std::{collections::HashSet, str::FromStr};
use watchexec_filterer_tagged::{Filter, Matcher, Op, Pattern, Regex, TaggedFiltererError};
mod helpers;
use helpers::tagged::*;
#[test]
fn empty_filter() {
assert!(matches!(
Filter::from_str(""),
Err(TaggedFiltererError::Parse { .. })
));
}
#[test]
fn only_bang() {
assert!(matches!(
Filter::from_str("!"),
Err(TaggedFiltererError::Parse { .. })
));
}
#[test]
fn no_op() {
assert!(matches!(
Filter::from_str("foobar"),
Err(TaggedFiltererError::Parse { .. })
));
}
#[test]
fn path_auto_op() {
assert_eq!(
filter("path=foo"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::Glob,
pat: Pattern::Glob("foo".to_string()),
negate: false,
}
);
}
#[test]
fn fek_auto_op() {
assert_eq!(
filter("fek=foo"),
Filter {
in_path: None,
on: Matcher::FileEventKind,
op: Op::Glob,
pat: Pattern::Glob("foo".to_string()),
negate: false,
}
);
}
#[test]
fn other_auto_op() {
assert_eq!(
filter("type=foo"),
Filter {
in_path: None,
on: Matcher::FileType,
op: Op::InSet,
pat: Pattern::Set(HashSet::from(["foo".to_string()])),
negate: false,
}
);
}
#[test]
fn op_equal() {
assert_eq!(
filter("path==foo"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::Equal,
pat: Pattern::Exact("foo".to_string()),
negate: false,
}
);
}
#[test]
fn op_not_equal() {
assert_eq!(
filter("path!=foo"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::NotEqual,
pat: Pattern::Exact("foo".to_string()),
negate: false,
}
);
}
#[test]
fn op_regex() {
assert_eq!(
filter("path~=^fo+$"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::Regex,
pat: Pattern::Regex(Regex::new("^fo+$").unwrap()),
negate: false,
}
);
}
#[test]
fn op_not_regex() {
assert_eq!(
filter("path~!f(o|al)+"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::NotRegex,
pat: Pattern::Regex(Regex::new("f(o|al)+").unwrap()),
negate: false,
}
);
}
#[test]
fn op_glob() {
assert_eq!(
filter("path*=**/foo"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::Glob,
pat: Pattern::Glob("**/foo".to_string()),
negate: false,
}
);
}
#[test]
fn op_not_glob() {
assert_eq!(
filter("path*!foo.*"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::NotGlob,
pat: Pattern::Glob("foo.*".to_string()),
negate: false,
}
);
}
#[test]
fn op_in_set() {
assert_eq!(
filter("path:=foo,bar"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::InSet,
pat: Pattern::Set(HashSet::from(["foo".to_string(), "bar".to_string()])),
negate: false,
}
);
}
#[test]
fn op_not_in_set() {
assert_eq!(
filter("path:!baz,qux"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::NotInSet,
pat: Pattern::Set(HashSet::from(["baz".to_string(), "qux".to_string()])),
negate: false,
}
);
}
#[test]
fn quoted_single() {
assert_eq!(
filter("path='blanche neige'"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::Glob,
pat: Pattern::Glob("blanche neige".to_string()),
negate: false,
}
);
}
#[test]
fn quoted_double() {
assert_eq!(
filter("path=\"et les sept nains\""),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::Glob,
pat: Pattern::Glob("et les sept nains".to_string()),
negate: false,
}
);
}
#[test]
fn negate() {
assert_eq!(
filter("!path~=^f[om]+$"),
Filter {
in_path: None,
on: Matcher::Path,
op: Op::Regex,
pat: Pattern::Regex(Regex::new("^f[om]+$").unwrap()),
negate: true,
}
);
}

View File

@ -1,454 +0,0 @@
use std::sync::Arc;
use watchexec_filterer_tagged::TaggedFilterer;
mod helpers;
use helpers::tagged::*;
#[tokio::test]
async fn empty_filter_passes_everything() {
let filterer = filt(&[]).await;
filterer.file_does_pass("Cargo.toml");
filterer.file_does_pass("Cargo.json");
filterer.file_does_pass("Gemfile.toml");
filterer.file_does_pass("FINAL-FINAL.docx");
filterer.dir_does_pass("/test/Cargo.toml");
filterer.dir_does_pass("/a/folder");
filterer.file_does_pass("apples/carrots/oranges");
filterer.file_does_pass("apples/carrots/cauliflowers/oranges");
filterer.file_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.file_does_pass("apples/oranges/bananas");
filterer.dir_does_pass("apples/carrots/oranges");
filterer.dir_does_pass("apples/carrots/cauliflowers/oranges");
filterer.dir_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.dir_does_pass("apples/oranges/bananas");
}
#[tokio::test]
async fn exact_filename() {
let filterer = filt(&[glob_filter("Cargo.toml")]).await;
filterer.file_does_pass("Cargo.toml");
filterer.file_does_pass("/test/foo/bar/Cargo.toml");
filterer.file_doesnt_pass("Cargo.json");
filterer.file_doesnt_pass("Gemfile.toml");
filterer.file_doesnt_pass("FINAL-FINAL.docx");
filterer.dir_doesnt_pass("/a/folder");
filterer.dir_does_pass("/test/Cargo.toml");
}
#[tokio::test]
async fn exact_filenames_multiple() {
let filterer = filt(&[glob_filter("Cargo.toml"), glob_filter("package.json")]).await;
filterer.file_does_pass("Cargo.toml");
filterer.file_does_pass("/test/foo/bar/Cargo.toml");
filterer.file_does_pass("package.json");
filterer.file_does_pass("/test/foo/bar/package.json");
filterer.file_doesnt_pass("Cargo.json");
filterer.file_doesnt_pass("package.toml");
filterer.file_doesnt_pass("Gemfile.toml");
filterer.file_doesnt_pass("FINAL-FINAL.docx");
filterer.dir_doesnt_pass("/a/folder");
filterer.dir_does_pass("/test/Cargo.toml");
filterer.dir_does_pass("/test/package.json");
}
#[tokio::test]
async fn glob_single_final_ext_star() {
let filterer = filt(&[glob_filter("Cargo.*")]).await;
filterer.file_does_pass("Cargo.toml");
filterer.file_does_pass("Cargo.json");
filterer.file_doesnt_pass("Gemfile.toml");
filterer.file_doesnt_pass("FINAL-FINAL.docx");
filterer.dir_doesnt_pass("/a/folder");
filterer.dir_does_pass("Cargo.toml");
}
#[tokio::test]
async fn glob_star_trailing_slash() {
let filterer = filt(&[glob_filter("Cargo.*/")]).await;
filterer.file_doesnt_pass("Cargo.toml");
filterer.file_doesnt_pass("Cargo.json");
filterer.file_doesnt_pass("Gemfile.toml");
filterer.file_doesnt_pass("FINAL-FINAL.docx");
filterer.dir_doesnt_pass("/a/folder");
filterer.dir_does_pass("Cargo.toml");
filterer.unk_doesnt_pass("Cargo.toml");
}
#[tokio::test]
async fn glob_star_leading_slash() {
let filterer = filt(&[glob_filter("/Cargo.*")]).await;
filterer.file_does_pass("Cargo.toml");
filterer.file_does_pass("Cargo.json");
filterer.dir_does_pass("Cargo.toml");
filterer.unk_does_pass("Cargo.toml");
filterer.file_doesnt_pass("foo/Cargo.toml");
filterer.dir_doesnt_pass("foo/Cargo.toml");
}
#[tokio::test]
async fn glob_leading_double_star() {
let filterer = filt(&[glob_filter("**/possum")]).await;
filterer.file_does_pass("possum");
filterer.file_does_pass("foo/bar/possum");
filterer.file_does_pass("/foo/bar/possum");
filterer.dir_does_pass("possum");
filterer.dir_does_pass("foo/bar/possum");
filterer.dir_does_pass("/foo/bar/possum");
filterer.file_doesnt_pass("rat");
filterer.file_doesnt_pass("foo/bar/rat");
filterer.file_doesnt_pass("/foo/bar/rat");
}
#[tokio::test]
async fn glob_trailing_double_star() {
let filterer = filt(&[glob_filter("possum/**")]).await;
filterer.file_doesnt_pass("possum");
filterer.file_does_pass("possum/foo/bar");
filterer.file_doesnt_pass("/possum/foo/bar");
filterer.file_does_pass("/test/possum/foo/bar");
filterer.dir_doesnt_pass("possum");
filterer.dir_doesnt_pass("foo/bar/possum");
filterer.dir_doesnt_pass("/foo/bar/possum");
filterer.dir_does_pass("possum/foo/bar");
filterer.dir_doesnt_pass("/possum/foo/bar");
filterer.dir_does_pass("/test/possum/foo/bar");
filterer.file_doesnt_pass("rat");
filterer.file_doesnt_pass("foo/bar/rat");
filterer.file_doesnt_pass("/foo/bar/rat");
}
#[tokio::test]
async fn glob_middle_double_star() {
let filterer = filt(&[glob_filter("apples/**/oranges")]).await;
filterer.dir_doesnt_pass("/a/folder");
filterer.file_does_pass("apples/carrots/oranges");
filterer.file_does_pass("apples/carrots/cauliflowers/oranges");
filterer.file_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.dir_does_pass("apples/carrots/oranges");
filterer.dir_does_pass("apples/carrots/cauliflowers/oranges");
filterer.dir_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
// different from globset/v1 behaviour, but correct:
filterer.file_does_pass("apples/oranges/bananas");
filterer.dir_does_pass("apples/oranges/bananas");
}
#[tokio::test]
async fn glob_double_star_trailing_slash() {
let filterer = filt(&[glob_filter("apples/**/oranges/")]).await;
filterer.dir_doesnt_pass("/a/folder");
filterer.file_doesnt_pass("apples/carrots/oranges");
filterer.file_doesnt_pass("apples/carrots/cauliflowers/oranges");
filterer.file_doesnt_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.dir_does_pass("apples/carrots/oranges");
filterer.dir_does_pass("apples/carrots/cauliflowers/oranges");
filterer.dir_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.unk_doesnt_pass("apples/carrots/oranges");
filterer.unk_doesnt_pass("apples/carrots/cauliflowers/oranges");
filterer.unk_doesnt_pass("apples/carrots/cauliflowers/artichokes/oranges");
// different from globset/v1 behaviour, but correct:
filterer.file_does_pass("apples/oranges/bananas");
filterer.dir_does_pass("apples/oranges/bananas");
}
#[tokio::test]
async fn ignore_exact_filename() {
let filterer = filt(&[notglob_filter("Cargo.toml")]).await;
filterer.file_doesnt_pass("Cargo.toml");
filterer.file_doesnt_pass("/test/foo/bar/Cargo.toml");
filterer.file_does_pass("Cargo.json");
filterer.file_does_pass("Gemfile.toml");
filterer.file_does_pass("FINAL-FINAL.docx");
filterer.dir_does_pass("/a/folder");
filterer.dir_doesnt_pass("/test/Cargo.toml");
}
#[tokio::test]
async fn ignore_exact_filenames_multiple() {
let filterer = filt(&[notglob_filter("Cargo.toml"), notglob_filter("package.json")]).await;
filterer.file_doesnt_pass("Cargo.toml");
filterer.file_doesnt_pass("/test/foo/bar/Cargo.toml");
filterer.file_doesnt_pass("package.json");
filterer.file_doesnt_pass("/test/foo/bar/package.json");
filterer.file_does_pass("Cargo.json");
filterer.file_does_pass("package.toml");
filterer.file_does_pass("Gemfile.toml");
filterer.file_does_pass("FINAL-FINAL.docx");
filterer.dir_does_pass("/a/folder");
filterer.dir_doesnt_pass("/test/Cargo.toml");
filterer.dir_doesnt_pass("/test/package.json");
}
#[tokio::test]
async fn ignore_glob_single_final_ext_star() {
let filterer = filt(&[notglob_filter("Cargo.*")]).await;
filterer.file_doesnt_pass("Cargo.toml");
filterer.file_doesnt_pass("Cargo.json");
filterer.file_does_pass("Gemfile.toml");
filterer.file_does_pass("FINAL-FINAL.docx");
filterer.dir_does_pass("/a/folder");
filterer.dir_doesnt_pass("Cargo.toml");
}
#[tokio::test]
async fn ignore_glob_star_trailing_slash() {
let filterer = filt(&[notglob_filter("Cargo.*/")]).await;
filterer.file_does_pass("Cargo.toml");
filterer.file_does_pass("Cargo.json");
filterer.file_does_pass("Gemfile.toml");
filterer.file_does_pass("FINAL-FINAL.docx");
filterer.dir_does_pass("/a/folder");
filterer.dir_doesnt_pass("Cargo.toml");
filterer.unk_does_pass("Cargo.toml");
}
#[tokio::test]
async fn ignore_glob_star_leading_slash() {
let filterer = filt(&[notglob_filter("/Cargo.*")]).await;
filterer.file_doesnt_pass("Cargo.toml");
filterer.file_doesnt_pass("Cargo.json");
filterer.dir_doesnt_pass("Cargo.toml");
filterer.unk_doesnt_pass("Cargo.toml");
filterer.file_does_pass("foo/Cargo.toml");
filterer.dir_does_pass("foo/Cargo.toml");
}
#[tokio::test]
async fn ignore_glob_leading_double_star() {
let filterer = filt(&[notglob_filter("**/possum")]).await;
filterer.file_doesnt_pass("possum");
filterer.file_doesnt_pass("foo/bar/possum");
filterer.file_doesnt_pass("/foo/bar/possum");
filterer.dir_doesnt_pass("possum");
filterer.dir_doesnt_pass("foo/bar/possum");
filterer.dir_doesnt_pass("/foo/bar/possum");
filterer.file_does_pass("rat");
filterer.file_does_pass("foo/bar/rat");
filterer.file_does_pass("/foo/bar/rat");
}
#[tokio::test]
async fn ignore_glob_trailing_double_star() {
let filterer = filt(&[notglob_filter("possum/**")]).await;
filterer.file_does_pass("possum");
filterer.file_doesnt_pass("possum/foo/bar");
filterer.file_does_pass("/possum/foo/bar");
filterer.file_doesnt_pass("/test/possum/foo/bar");
filterer.dir_does_pass("possum");
filterer.dir_does_pass("foo/bar/possum");
filterer.dir_does_pass("/foo/bar/possum");
filterer.dir_doesnt_pass("possum/foo/bar");
filterer.dir_does_pass("/possum/foo/bar");
filterer.dir_doesnt_pass("/test/possum/foo/bar");
filterer.file_does_pass("rat");
filterer.file_does_pass("foo/bar/rat");
filterer.file_does_pass("/foo/bar/rat");
}
#[tokio::test]
async fn ignore_glob_middle_double_star() {
let filterer = filt(&[notglob_filter("apples/**/oranges")]).await;
filterer.dir_does_pass("/a/folder");
filterer.file_doesnt_pass("apples/carrots/oranges");
filterer.file_doesnt_pass("apples/carrots/cauliflowers/oranges");
filterer.file_doesnt_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.dir_doesnt_pass("apples/carrots/oranges");
filterer.dir_doesnt_pass("apples/carrots/cauliflowers/oranges");
filterer.dir_doesnt_pass("apples/carrots/cauliflowers/artichokes/oranges");
// different from globset/v1 behaviour, but correct:
filterer.file_doesnt_pass("apples/oranges/bananas");
filterer.dir_doesnt_pass("apples/oranges/bananas");
}
#[tokio::test]
async fn ignore_glob_double_star_trailing_slash() {
let filterer = filt(&[notglob_filter("apples/**/oranges/")]).await;
filterer.dir_does_pass("/a/folder");
filterer.file_does_pass("apples/carrots/oranges");
filterer.file_does_pass("apples/carrots/cauliflowers/oranges");
filterer.file_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.dir_doesnt_pass("apples/carrots/oranges");
filterer.dir_doesnt_pass("apples/carrots/cauliflowers/oranges");
filterer.dir_doesnt_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.unk_does_pass("apples/carrots/oranges");
filterer.unk_does_pass("apples/carrots/cauliflowers/oranges");
filterer.unk_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
// different from globset/v1 behaviour, but correct:
filterer.file_doesnt_pass("apples/oranges/bananas");
filterer.dir_doesnt_pass("apples/oranges/bananas");
}
#[tokio::test]
async fn ignores_take_precedence() {
let filterer = filt(&[
glob_filter("*.docx"),
glob_filter("*.toml"),
glob_filter("*.json"),
notglob_filter("*.toml"),
notglob_filter("*.json"),
])
.await;
filterer.file_doesnt_pass("Cargo.toml");
filterer.file_doesnt_pass("/test/foo/bar/Cargo.toml");
filterer.file_doesnt_pass("package.json");
filterer.file_doesnt_pass("/test/foo/bar/package.json");
filterer.dir_doesnt_pass("/test/Cargo.toml");
filterer.dir_doesnt_pass("/test/package.json");
filterer.file_does_pass("FINAL-FINAL.docx");
}
#[tokio::test]
async fn scopes_global() {
let filterer = filt(&[notglob_filter("*.toml")]).await;
filterer.file_doesnt_pass("Cargo.toml");
filterer.dir_doesnt_pass("Cargo.toml");
filterer.file_doesnt_pass("/outside/Cargo.toml");
filterer.dir_doesnt_pass("/outside/Cargo.toml");
filterer.file_does_pass("/outside/package.json");
filterer.dir_does_pass("/outside/package.json");
filterer.file_does_pass("package.json");
filterer.file_does_pass("FINAL-FINAL.docx");
}
#[tokio::test]
async fn scopes_local() {
let filterer = filt(&[notglob_filter("*.toml").in_path()]).await;
filterer.file_doesnt_pass("/test/Cargo.toml");
filterer.dir_doesnt_pass("/test/Cargo.toml");
filterer.file_does_pass("/outside/Cargo.toml");
filterer.dir_does_pass("/outside/Cargo.toml");
filterer.file_does_pass("/outside/package.json");
filterer.dir_does_pass("/outside/package.json");
filterer.file_does_pass("package.json");
filterer.file_does_pass("FINAL-FINAL.docx");
}
#[tokio::test]
async fn scopes_sublocal() {
let filterer = filt(&[notglob_filter("*.toml").in_subpath("src")]).await;
filterer.file_doesnt_pass("/test/src/Cargo.toml");
filterer.dir_doesnt_pass("/test/src/Cargo.toml");
filterer.file_does_pass("/test/Cargo.toml");
filterer.dir_does_pass("/test/Cargo.toml");
filterer.file_does_pass("/test/tests/Cargo.toml");
filterer.dir_does_pass("/test/tests/Cargo.toml");
filterer.file_does_pass("/outside/Cargo.toml");
filterer.dir_does_pass("/outside/Cargo.toml");
filterer.file_does_pass("/outside/package.json");
filterer.dir_does_pass("/outside/package.json");
filterer.file_does_pass("package.json");
filterer.file_does_pass("FINAL-FINAL.docx");
}
// The following tests check that the "buggy"/"confusing" watchexec v1 behaviour
// is no longer present.
fn watchexec_v1_confusing_suite(filterer: Arc<TaggedFilterer>) {
filterer.file_does_pass("apples");
filterer.file_does_pass("apples/carrots/cauliflowers/oranges");
filterer.file_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.file_does_pass("apples/oranges/bananas");
filterer.dir_does_pass("apples");
filterer.dir_does_pass("apples/carrots/cauliflowers/oranges");
filterer.dir_does_pass("apples/carrots/cauliflowers/artichokes/oranges");
filterer.file_does_pass("raw-prunes");
filterer.dir_does_pass("raw-prunes");
filterer.file_does_pass("raw-prunes/carrots/cauliflowers/oranges");
filterer.file_does_pass("raw-prunes/carrots/cauliflowers/artichokes/oranges");
filterer.file_does_pass("raw-prunes/oranges/bananas");
filterer.dir_does_pass("raw-prunes/carrots/cauliflowers/oranges");
filterer.dir_does_pass("raw-prunes/carrots/cauliflowers/artichokes/oranges");
filterer.dir_doesnt_pass("prunes/carrots/cauliflowers/oranges");
filterer.dir_doesnt_pass("prunes/carrots/cauliflowers/artichokes/oranges");
filterer.file_doesnt_pass("prunes/carrots/cauliflowers/oranges");
filterer.file_doesnt_pass("prunes/carrots/cauliflowers/artichokes/oranges");
filterer.file_doesnt_pass("prunes/oranges/bananas");
}
#[tokio::test]
async fn ignore_folder_with_bare_match() {
let filterer = filt(&[notglob_filter("prunes").in_path()]).await;
filterer.file_doesnt_pass("prunes");
filterer.dir_doesnt_pass("prunes");
watchexec_v1_confusing_suite(filterer);
}
#[tokio::test]
async fn ignore_folder_with_bare_and_leading_slash() {
let filterer = filt(&[notglob_filter("/prunes").in_path()]).await;
filterer.file_doesnt_pass("prunes");
filterer.dir_doesnt_pass("prunes");
watchexec_v1_confusing_suite(filterer);
}
#[tokio::test]
async fn ignore_folder_with_bare_and_trailing_slash() {
let filterer = filt(&[notglob_filter("prunes/").in_path()]).await;
filterer.file_does_pass("prunes");
filterer.dir_doesnt_pass("prunes");
watchexec_v1_confusing_suite(filterer);
}
#[tokio::test]
async fn ignore_folder_with_only_double_double_glob() {
let filterer = filt(&[notglob_filter("**/prunes/**").in_path()]).await;
filterer.file_does_pass("prunes");
filterer.dir_does_pass("prunes");
watchexec_v1_confusing_suite(filterer);
}
#[tokio::test]
async fn ignore_folder_with_double_and_double_double_globs() {
let filterer = filt(&[
notglob_filter("**/prunes").in_path(),
notglob_filter("**/prunes/**").in_path(),
])
.await;
filterer.file_doesnt_pass("prunes");
filterer.dir_doesnt_pass("prunes");
watchexec_v1_confusing_suite(filterer);
}