Optimise ignore file gathering (#663)

Co-authored-by: Félix Saparelli <felix@passcod.name>
This commit is contained in:
Victor Adossi ("vados") 2024-01-01 14:01:14 +09:00 committed by GitHub
parent bf9c85f598
commit cb1cfb6bf5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
27 changed files with 649 additions and 121 deletions

124
Cargo.lock generated
View File

@ -41,6 +41,15 @@ dependencies = [
"libc",
]
[[package]]
name = "ansi_term"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2"
dependencies = [
"winapi",
]
[[package]]
name = "anstream"
version = "0.6.5"
@ -664,6 +673,18 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7"
[[package]]
name = "command-group"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5080df6b0f0ecb76cab30808f00d937ba725cebe266a3da8cd89dff92f2a9916"
dependencies = [
"async-trait",
"nix 0.26.4",
"tokio",
"winapi",
]
[[package]]
name = "command-group"
version = "5.0.1"
@ -719,7 +740,7 @@ dependencies = [
"tonic",
"tracing",
"tracing-core",
"tracing-subscriber",
"tracing-subscriber 0.3.18",
]
[[package]]
@ -1936,7 +1957,7 @@ dependencies = [
"thiserror",
"tokio",
"tracing",
"tracing-subscriber",
"tracing-subscriber 0.3.18",
]
[[package]]
@ -2158,6 +2179,15 @@ dependencies = [
"libc",
]
[[package]]
name = "matchers"
version = "0.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f099785f7595cc4b4553a174ce30dd7589ef93391ff414dbb67f62392b9e0ce1"
dependencies = [
"regex-automata 0.1.10",
]
[[package]]
name = "matchers"
version = "0.1.0"
@ -2742,7 +2772,7 @@ dependencies = [
"miette",
"tokio",
"tokio-stream",
"tracing-subscriber",
"tracing-subscriber 0.3.18",
]
[[package]]
@ -3626,6 +3656,17 @@ dependencies = [
"valuable",
]
[[package]]
name = "tracing-log"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f751112709b4e791d8ce53e32c4ed2d353565a795ce84da2285393f41557bdf2"
dependencies = [
"log",
"once_cell",
"tracing-core",
]
[[package]]
name = "tracing-log"
version = "0.2.0"
@ -3647,13 +3688,35 @@ dependencies = [
"tracing-core",
]
[[package]]
name = "tracing-subscriber"
version = "0.2.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e0d2eaa99c3c2e41547cfa109e910a68ea03823cccad4a0525dcbc9b01e8c71"
dependencies = [
"ansi_term",
"chrono",
"lazy_static",
"matchers 0.0.1",
"regex",
"serde",
"serde_json",
"sharded-slab",
"smallvec",
"thread_local",
"tracing",
"tracing-core",
"tracing-log 0.1.4",
"tracing-serde",
]
[[package]]
name = "tracing-subscriber"
version = "0.3.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b"
dependencies = [
"matchers",
"matchers 0.1.0",
"nu-ansi-term",
"once_cell",
"regex",
@ -3664,10 +3727,33 @@ dependencies = [
"thread_local",
"tracing",
"tracing-core",
"tracing-log",
"tracing-log 0.2.0",
"tracing-serde",
]
[[package]]
name = "tracing-test"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a3b48778c2d401c6a7fcf38a0e3c55dc8e8e753cbd381044a8cdb6fd69a29f53"
dependencies = [
"lazy_static",
"tracing-core",
"tracing-subscriber 0.2.25",
"tracing-test-macro",
]
[[package]]
name = "tracing-test-macro"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c49adbab879d2e0dd7f75edace5f0ac2156939ecb7e6a1e8fa14e53728328c48"
dependencies = [
"lazy_static",
"quote",
"syn 1.0.109",
]
[[package]]
name = "try-lock"
version = "0.2.5"
@ -3772,6 +3858,16 @@ version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
[[package]]
name = "uuid"
version = "1.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e395fcf16a7a3d8127ec99782007af141946b4795001f876d54fb0d55978560"
dependencies = [
"getrandom",
"rand",
]
[[package]]
name = "valuable"
version = "0.1.0"
@ -3896,7 +3992,7 @@ dependencies = [
"async-priority-channel",
"async-recursion",
"atomic-take",
"command-group",
"command-group 5.0.1",
"futures",
"ignore-files",
"miette",
@ -3908,7 +4004,7 @@ dependencies = [
"thiserror",
"tokio",
"tracing",
"tracing-subscriber",
"tracing-subscriber 0.3.18",
"watchexec-events",
"watchexec-signals",
"watchexec-supervisor",
@ -3926,6 +4022,7 @@ dependencies = [
"clap_complete_nushell",
"clap_mangen",
"clearscreen",
"command-group 2.1.0",
"console-subscriber",
"dirs 5.0.1",
"embed-resource",
@ -3939,12 +4036,15 @@ dependencies = [
"notify-rust",
"pid1",
"project-origins",
"rand",
"serde_json",
"tempfile",
"termcolor",
"tokio",
"tracing",
"tracing-subscriber",
"tracing-subscriber 0.3.18",
"tracing-test",
"uuid",
"watchexec",
"watchexec-events",
"watchexec-filterer-globset",
@ -3974,7 +4074,7 @@ dependencies = [
"project-origins",
"tokio",
"tracing",
"tracing-subscriber",
"tracing-subscriber 0.3.18",
"watchexec",
"watchexec-events",
"watchexec-filterer-ignore",
@ -3990,7 +4090,7 @@ dependencies = [
"project-origins",
"tokio",
"tracing",
"tracing-subscriber",
"tracing-subscriber 0.3.18",
"watchexec",
"watchexec-events",
"watchexec-signals",
@ -4011,7 +4111,7 @@ dependencies = [
"thiserror",
"tokio",
"tracing",
"tracing-subscriber",
"tracing-subscriber 0.3.18",
"unicase",
"watchexec",
"watchexec-events",
@ -4034,7 +4134,7 @@ name = "watchexec-supervisor"
version = "1.0.3"
dependencies = [
"boxcar",
"command-group",
"command-group 5.0.1",
"futures",
"nix 0.27.1",
"tokio",

View File

@ -14,6 +14,13 @@ members = [
"crates/project-origins",
]
[workspace.dependencies]
miette = "5.10.0"
tempfile = "3.8.0"
tracing-test = "0.2.4"
rand = "0.8"
uuid = "1.5.0"
[profile.release]
lto = true
debug = 1 # for stack traces

View File

@ -120,7 +120,7 @@ pub fn gather_to(filename: &str, structname: &str, public: bool) {
let mut output = Self::LONG_VERSION.to_string();
for (k, v) in extra {
output.push_str(&format!("\n{}: {}", k, v));
output.push_str(&format!("\n{k}: {v}"));
}
output

View File

@ -37,6 +37,15 @@ termcolor = "1.4.0"
tracing = "0.1.40"
which = "5.0.0"
[dev-dependencies]
tracing-test = "0.1"
uuid = { workspace = true, features = [ "v4", "fast-rng" ] }
rand = { workspace = true }
[dependencies.command-group]
version = "2.1.0"
features = ["with-tokio"]
[dependencies.clap]
version = "4.4.7"
features = ["cargo", "derive", "env", "wrap_help"]
@ -102,6 +111,7 @@ features = [
"fmt",
"json",
"tracing-log",
"ansi",
]
[target.'cfg(target_env = "musl")'.dependencies]

View File

@ -64,6 +64,7 @@ include!(env!("BOSION_PATH"));
feature = "dev-console",
command(before_help = "⚠ DEV CONSOLE ENABLED ⚠")
)]
#[allow(clippy::struct_excessive_bools)]
pub struct Args {
/// Command to run on changes
///

View File

@ -4,7 +4,7 @@ use std::{
path::{Path, PathBuf},
};
use ignore_files::IgnoreFile;
use ignore_files::{IgnoreFile, IgnoreFilesFromOriginArgs};
use miette::{miette, IntoDiagnostic, Result};
use project_origins::ProjectType;
use tokio::fs::canonicalize;
@ -13,7 +13,12 @@ use watchexec::paths::common_prefix;
use crate::args::Args;
pub async fn dirs(args: &Args) -> Result<(PathBuf, PathBuf)> {
type ProjectOriginPath = PathBuf;
type WorkDirPath = PathBuf;
/// Extract relevant directories (in particular the project origin and work directory)
/// given the command line arguments that were provided
pub async fn dirs(args: &Args) -> Result<(ProjectOriginPath, WorkDirPath)> {
let curdir = env::current_dir().into_diagnostic()?;
let curdir = canonicalize(curdir).await.into_diagnostic()?;
debug!(?curdir, "current directory");
@ -92,13 +97,49 @@ pub async fn vcs_types(origin: &Path) -> Vec<ProjectType> {
vcs_types
}
pub async fn ignores(args: &Args, vcs_types: &[ProjectType], origin: &Path) -> Vec<IgnoreFile> {
pub async fn ignores(
args: &Args,
vcs_types: &[ProjectType],
origin: &Path,
) -> Result<Vec<IgnoreFile>> {
let mut skip_git_global_excludes = false;
let mut ignores = if args.no_project_ignore {
Vec::new()
} else {
let (mut ignores, errors) = ignore_files::from_origin(origin).await;
// Build list of absolute explicitly included paths
let include_paths = args
.paths
.iter()
.map(|p| {
if p.is_absolute() {
p.clone()
} else {
origin.join(p)
}
})
.collect();
// Build list of absolute explicitly ignored paths
let ignore_files = args
.ignore_files
.iter()
.map(|p| {
if p.is_absolute() {
p.clone()
} else {
origin.join(p)
}
})
.collect();
let (mut ignores, errors) = ignore_files::from_origin(IgnoreFilesFromOriginArgs::new(
origin,
include_paths,
ignore_files,
)?)
.await;
for err in errors {
warn!("while discovering project-local ignore files: {}", err);
}
@ -214,5 +255,5 @@ pub async fn ignores(args: &Args, vcs_types: &[ProjectType], origin: &Path) -> V
}
info!(files=?ignores.iter().map(|ig| ig.path.as_path()).collect::<Vec<_>>(), "found some ignores");
ignores
Ok(ignores)
}

View File

@ -18,11 +18,12 @@ use crate::args::{Args, FsEvent};
pub async fn globset(args: &Args) -> Result<Arc<WatchexecFilterer>> {
let (project_origin, workdir) = super::common::dirs(args).await?;
let ignore_files = if args.no_discover_ignore {
Vec::new()
} else {
let vcs_types = super::common::vcs_types(&project_origin).await;
super::common::ignores(args, &vcs_types, &project_origin).await
super::common::ignores(args, &vcs_types, &project_origin).await?
};
let mut ignores = Vec::new();
@ -85,8 +86,18 @@ async fn read_filter_file(path: &Path) -> Result<Vec<(String, Option<PathBuf>)>>
let file = tokio::fs::File::open(path).await.into_diagnostic()?;
let mut filters =
Vec::with_capacity(file.metadata().await.map(|m| m.len() as usize).unwrap_or(0) / 20);
let metadata_len = file
.metadata()
.await
.map(|m| usize::try_from(m.len()))
.unwrap_or(Ok(0))
.into_diagnostic()?;
let filter_capacity = if metadata_len == 0 {
0
} else {
metadata_len / 20
};
let mut filters = Vec::with_capacity(filter_capacity);
let reader = BufReader::new(file);
let mut lines = reader.lines();

View File

@ -36,7 +36,7 @@ async fn init() -> Result<Args> {
if !log_on && var("RUST_LOG").is_ok() {
match tracing_subscriber::fmt::try_init() {
Ok(_) => {
Ok(()) => {
warn!(RUST_LOG=%var("RUST_LOG").unwrap(), "logging configured from RUST_LOG");
log_on = true;
}
@ -88,7 +88,7 @@ async fn init() -> Result<Args> {
} else {
builder.try_init()
} {
Ok(_) => info!("logging initialised"),
Ok(()) => info!("logging initialised"),
Err(e) => eprintln!("Failed to initialise logging, continuing with none\n{e}"),
}
}
@ -162,14 +162,15 @@ async fn run_manpage(_args: Args) -> Result<()> {
Ok(())
}
#[allow(clippy::unused_async)]
async fn run_completions(shell: ShellCompletion) -> Result<()> {
info!(version=%env!("CARGO_PKG_VERSION"), "constructing completions");
fn generate(generator: impl Generator) {
let mut cmd = Args::command();
clap_complete::generate(generator, &mut cmd, "watchexec", &mut std::io::stdout());
}
info!(version=%env!("CARGO_PKG_VERSION"), "constructing completions");
match shell {
ShellCompletion::Bash => generate(Shell::Bash),
ShellCompletion::Elvish => generate(Shell::Elvish),

View File

@ -0,0 +1,121 @@
use std::path::PathBuf;
use std::{fs, sync::OnceLock};
use miette::{Context, IntoDiagnostic, Result};
use rand::Rng;
static PLACEHOLDER_DATA: OnceLock<String> = OnceLock::new();
fn get_placeholder_data() -> &'static str {
PLACEHOLDER_DATA.get_or_init(|| "PLACEHOLDER\n".repeat(500))
}
/// The amount of nesting that will be used for generated files
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) enum GeneratedFileNesting {
/// Only one level of files
Flat,
/// Random, up to a certiain maximum
RandomToMax(usize),
}
/// Configuration for creating testing subfolders
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct TestSubfolderConfiguration {
/// The amount of nesting that will be used when folders are generated
pub(crate) nesting: GeneratedFileNesting,
/// Number of files the folder should contain
pub(crate) file_count: usize,
/// Subfolder name
pub(crate) name: String,
}
/// Options for generating test files
#[derive(Debug, Clone, PartialEq, Eq, Default)]
pub(crate) struct GenerateTestFilesArgs {
/// The path where the files should be generated
/// if None, the current working directory will be used.
pub(crate) path: Option<PathBuf>,
/// Configurations for subfolders to generate
pub(crate) subfolder_configs: Vec<TestSubfolderConfiguration>,
}
/// Generate test files
///
/// This returns the same number of paths that were requested via subfolder_configs.
pub(crate) fn generate_test_files(args: GenerateTestFilesArgs) -> Result<Vec<PathBuf>> {
// Use or create a temporary directory for the test files
let tmpdir = if let Some(p) = args.path {
p
} else {
tempfile::tempdir()
.into_diagnostic()
.wrap_err("failed to build tempdir")?
.into_path()
};
let mut paths = vec![tmpdir.clone()];
// Generate subfolders matching each config
for subfolder_config in args.subfolder_configs.iter() {
// Create the subfolder path
let subfolder_path = tmpdir.join(&subfolder_config.name);
fs::create_dir(&subfolder_path)
.into_diagnostic()
.wrap_err(format!(
"failed to create path for dir [{}]",
subfolder_path.display()
))?;
paths.push(subfolder_path.clone());
// Fill the subfolder with files
match subfolder_config.nesting {
GeneratedFileNesting::Flat => {
for idx in 0..subfolder_config.file_count {
// Write stub file contents
fs::write(
subfolder_path.join(format!("stub-file-{idx}")),
get_placeholder_data(),
)
.into_diagnostic()
.wrap_err(format!(
"failed to write temporary file in subfolder {} @ idx {idx}",
subfolder_path.display()
))?;
}
}
GeneratedFileNesting::RandomToMax(max_depth) => {
let mut generator = rand::thread_rng();
for idx in 0..subfolder_config.file_count {
// Build a randomized path up to max depth
let mut generated_path = subfolder_path.clone();
let depth = generator.gen_range(0..max_depth);
for _ in 0..depth {
generated_path.push("stub-dir");
}
// Create the path
fs::create_dir_all(&generated_path)
.into_diagnostic()
.wrap_err(format!(
"failed to create randomly generated path [{}]",
generated_path.display()
))?;
// Write stub file contents @ the new randomized path
fs::write(
generated_path.join(format!("stub-file-{idx}")),
get_placeholder_data(),
)
.into_diagnostic()
.wrap_err(format!(
"failed to write temporary file in subfolder {} @ idx {idx}",
subfolder_path.display()
))?;
}
}
}
}
Ok(paths)
}

146
crates/cli/tests/ignore.rs Normal file
View File

@ -0,0 +1,146 @@
use std::{
path::{Path, PathBuf},
process::Stdio,
time::Duration,
};
use miette::{IntoDiagnostic, Result, WrapErr};
use tokio::{process::Command, time::Instant};
use tracing_test::traced_test;
use uuid::Uuid;
mod common;
use common::{generate_test_files, GenerateTestFilesArgs};
use crate::common::{GeneratedFileNesting, TestSubfolderConfiguration};
/// Directory name that will be sued for the dir that *should* be watched
const WATCH_DIR_NAME: &str = "watch";
/// The token that watch will echo every time a match is found
const WATCH_TOKEN: &str = "updated";
/// Ensure that watchexec runtime does not increase with the
/// number of *ignored* files in a given folder
///
/// This test creates two separate folders, one small and the other large
///
/// Each folder has two subfolders:
/// - a shallow one to be watched, with a few files of single depth (20 files)
/// - a deep one to be ignored, with many files at varying depths (small case 200 files, large case 200,000 files)
///
/// watchexec, when executed on *either* folder should *not* experience a more
/// than 10x degradation in performance, because the vast majority of the files
/// are supposed to be ignored to begin with.
///
/// When running the CLI on the root folders, it should *not* take a long time to start de
#[tokio::test]
#[traced_test]
async fn e2e_ignore_many_files_200_000() -> Result<()> {
// Create a tempfile so that drop will clean it up
let small_test_dir = tempfile::tempdir()
.into_diagnostic()
.wrap_err("failed to create tempdir for test use")?;
// Determine the watchexec bin to use & build arguments
let wexec_bin = std::env::var("TEST_WATCHEXEC_BIN").unwrap_or(
option_env!("CARGO_BIN_EXE_watchexec")
.map(std::string::ToString::to_string)
.unwrap_or("watchexec".into()),
);
let token = format!("{WATCH_TOKEN}-{}", Uuid::new_v4());
let args: Vec<String> = vec![
"-1".into(), // exit as soon as watch completes
"--watch".into(),
WATCH_DIR_NAME.into(),
"echo".into(),
token.clone(),
];
// Generate a small directory of files containing dirs that *will* and will *not* be watched
let [ref root_dir_path, _, _] = generate_test_files(GenerateTestFilesArgs {
path: Some(PathBuf::from(small_test_dir.path())),
subfolder_configs: vec![
// Shallow folder will have a small number of files and won't be watched
TestSubfolderConfiguration {
name: "watch".into(),
nesting: GeneratedFileNesting::Flat,
file_count: 5,
},
// Deep folder will have *many* amll files and will be watched
TestSubfolderConfiguration {
name: "unrelated".into(),
nesting: GeneratedFileNesting::RandomToMax(42),
file_count: 200,
},
],
})?[..] else {
panic!("unexpected number of paths returned from generate_test_files");
};
// Get the number of elapsed
let small_elapsed = run_watchexec_cmd(&wexec_bin, root_dir_path, args.clone()).await?;
// Create a tempfile so that drop will clean it up
let large_test_dir = tempfile::tempdir()
.into_diagnostic()
.wrap_err("failed to create tempdir for test use")?;
// Generate a *large* directory of files
let [ref root_dir_path, _, _] = generate_test_files(GenerateTestFilesArgs {
path: Some(PathBuf::from(large_test_dir.path())),
subfolder_configs: vec![
// Shallow folder will have a small number of files and won't be watched
TestSubfolderConfiguration {
name: "watch".into(),
nesting: GeneratedFileNesting::Flat,
file_count: 5,
},
// Deep folder will have *many* amll files and will be watched
TestSubfolderConfiguration {
name: "unrelated".into(),
nesting: GeneratedFileNesting::RandomToMax(42),
file_count: 200_000,
},
],
})?[..] else {
panic!("unexpected number of paths returned from generate_test_files");
};
// Get the number of elapsed
let large_elapsed = run_watchexec_cmd(&wexec_bin, root_dir_path, args.clone()).await?;
// We expect the ignores to not impact watchexec startup time at all
// whether there are 200 files in there or 200k
assert!(
large_elapsed < small_elapsed * 10,
"200k ignore folder ({:?}) took more than 10x more time ({:?}) than 200 ignore folder ({:?})",
large_elapsed,
small_elapsed * 10,
small_elapsed,
);
Ok(())
}
/// Run a watchexec command once
async fn run_watchexec_cmd(
wexec_bin: impl AsRef<str>,
dir: impl AsRef<Path>,
args: impl Into<Vec<String>>,
) -> Result<Duration> {
// Build the subprocess command
let mut cmd = Command::new(wexec_bin.as_ref());
cmd.args(args.into());
cmd.current_dir(dir);
cmd.stdout(Stdio::piped());
cmd.stderr(Stdio::piped());
let start = Instant::now();
cmd.kill_on_drop(true)
.output()
.await
.into_diagnostic()
.wrap_err("fixed")?;
Ok(start.elapsed())
}

View File

@ -97,11 +97,14 @@ impl ProcessEnd {
/// status union is platform-specific. Exit codes and signals are implemented, other variants
/// are not.
#[cfg(unix)]
#[must_use]
pub fn into_exitstatus(self) -> ExitStatus {
use std::os::unix::process::ExitStatusExt;
match self {
Self::Success => ExitStatus::from_raw(0),
Self::ExitError(code) => ExitStatus::from_raw((code.get() as u8 as i32) << 8),
Self::ExitError(code) => {
ExitStatus::from_raw(i32::from(u8::try_from(code.get()).unwrap_or_default()) << 8)
}
Self::ExitSignal(signal) => {
ExitStatus::from_raw(signal.to_nix().map_or(0, |sig| sig as i32))
}
@ -115,6 +118,7 @@ impl ProcessEnd {
/// This is a testing function only! **It will panic** if the `ProcessEnd` is not representable
/// as an `ExitStatus` on Windows.
#[cfg(windows)]
#[must_use]
pub fn into_exitstatus(self) -> ExitStatus {
use std::os::windows::process::ExitStatusExt;
match self {
@ -126,6 +130,7 @@ impl ProcessEnd {
/// Unimplemented on this platform.
#[cfg(not(any(unix, windows)))]
#[must_use]
pub fn into_exitstatus(self) -> ExitStatus {
unimplemented!()
}

View File

@ -113,7 +113,7 @@ impl From<Tag> for SerdeTag {
},
Tag::FileEventKind(fek) => Self {
kind: TagKind::Fs,
full: Some(format!("{:?}", fek)),
full: Some(format!("{fek:?}")),
simple: Some(fek.into()),
..Default::default()
},
@ -145,12 +145,10 @@ impl From<Tag> for SerdeTag {
Tag::ProcessCompletion(Some(end)) => Self {
kind: TagKind::Completion,
code: match &end {
ProcessEnd::Success => None,
ProcessEnd::ExitSignal(_) => None,
ProcessEnd::Success | ProcessEnd::Continued | ProcessEnd::ExitSignal(_) => None,
ProcessEnd::ExitError(err) => Some(err.get()),
ProcessEnd::ExitStop(code) => Some(code.get().into()),
ProcessEnd::Exception(exc) => Some(exc.get().into()),
ProcessEnd::Continued => None,
},
signal: if let ProcessEnd::ExitSignal(sig) = &end {
Some(*sig)

View File

@ -317,8 +317,6 @@ impl TaggedFilterer {
// Ok(Some(bool)) => the match was applied, bool is the result
// Ok(None) => for some precondition, the match was not done (mismatched tag, out of context, …)
fn match_tag(&self, filter: &Filter, tag: &Tag) -> Result<Option<bool>, TaggedFiltererError> {
trace!(matcher=?filter.on, "matching filter to tag");
const fn sig_match(sig: Signal) -> (&'static str, i32) {
match sig {
Signal::Hangup | Signal::Custom(1) => ("HUP", 1),
@ -333,6 +331,8 @@ impl TaggedFilterer {
}
}
trace!(matcher=?filter.on, "matching filter to tag");
match (tag, filter.on) {
(tag, Matcher::Tag) => filter.matches(tag.discriminant_name()),
(Tag::Path { path, .. }, Matcher::Path) => {

View File

@ -30,33 +30,33 @@
//! In addition to the two-symbol operators, there is the `=` "auto" operator, which maps to the
//! most convenient operator for the given _matcher_. The current mapping is:
//!
//! | Matcher | Operator |
//! |-------------------------------------------------|---------------|
//! | [Tag](Matcher::Tag) | `:=` (in set) |
//! | [Path](Matcher::Path) | `*=` (glob) |
//! | [FileType](Matcher::FileType) | `:=` (in set) |
//! | [FileEventKind](Matcher::FileEventKind) | `*=` (glob) |
//! | [Source](Matcher::Source) | `:=` (in set) |
//! | [Process](Matcher::Process) | `:=` (in set) |
//! | [Signal](Matcher::Signal) | `:=` (in set) |
//! | [ProcessCompletion](Matcher::ProcessCompletion) | `*=` (glob) |
//! | [Priority](Matcher::Priority) | `:=` (in set) |
//! | Matcher | Operator |
//! |---------------------------------------------------|---------------|
//! | [`Tag`](Matcher::Tag) | `:=` (in set) |
//! | [`Path`](Matcher::Path) | `*=` (glob) |
//! | [`FileType`](Matcher::FileType) | `:=` (in set) |
//! | [`FileEventKind`](Matcher::FileEventKind) | `*=` (glob) |
//! | [`Source`](Matcher::Source) | `:=` (in set) |
//! | [`Process`](Matcher::Process) | `:=` (in set) |
//! | [`Signal`](Matcher::Signal) | `:=` (in set) |
//! | [`ProcessCompletion`](Matcher::ProcessCompletion) | `*=` (glob) |
//! | [`Priority`](Matcher::Priority) | `:=` (in set) |
//!
//! [Matchers][Matcher] correspond to Tags, but are not one-to-one: the `path` matcher operates on
//! the `path` part of the `Path` tag, and the `type` matcher operates on the `file_type`, for
//! example.
//!
//! | Matcher | Syntax | Tag |
//! |------------------------------------|----------|----------------------------------------------|
//! | [Tag](Matcher::Tag) | `tag` | _the presence of a Tag on the event_ |
//! | [Path](Matcher::Path) | `path` | [Path](Tag::Path) (`path` field) |
//! | [FileType](Matcher::FileType) | `type` | [Path](Tag::Path) (`file_type` field, when Some) |
//! | [FileEventKind](Matcher::FileEventKind) | `kind` or `fek` | [FileEventKind](Tag::FileEventKind) |
//! | [Source](Matcher::Source) | `source` or `src` | [Source](Tag::Source) |
//! | [Process](Matcher::Process) | `process` or `pid` | [Process](Tag::Process) |
//! | [Signal](Matcher::Signal) | `signal` | [Signal](Tag::Signal) |
//! | [ProcessCompletion](Matcher::ProcessCompletion) | `complete` or `exit` | [ProcessCompletion](Tag::ProcessCompletion) |
//! | [Priority](Matcher::Priority) | `priority` | special: event [Priority] |
//! | Matcher | Syntax | Tag |
//! |-------------------------------------------|----------|----------------------------------------------|
//! | [`Tag`](Matcher::Tag) | `tag` | _the presence of a Tag on the event_ |
//! | [`Path`](Matcher::Path) | `path` | [`Path`](Tag::Path) (`path` field) |
//! | [`FileType`](Matcher::FileType) | `type` | [`Path`](Tag::Path) (`file_type` field, when Some) |
//! | [`FileEventKind`](Matcher::FileEventKind) | `kind` or `fek` | [`FileEventKind`](Tag::FileEventKind) |
//! | [`Source`](Matcher::Source) | `source` or `src` | [`Source`](Tag::Source) |
//! | [`Process`](Matcher::Process) | `process` or `pid` | [`Process`](Tag::Process) |
//! | [`Signal`](Matcher::Signal) | `signal` | [`Signal`](Tag::Signal) |
//! | [`ProcessCompletion`](Matcher::ProcessCompletion) | `complete` or `exit` | [`ProcessCompletion`](Tag::ProcessCompletion) |
//! | [`Priority`](Matcher::Priority) | `priority` | special: event [`Priority`] |
//!
//! Filters are checked in order, grouped per tag and per matcher. Filter groups may be checked in
//! any order, but the filters in the groups are checked in add order. Path glob filters are always

View File

@ -53,6 +53,6 @@ where
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
f.debug_struct("SwapLock")
.field("(watch)", &self.r)
.finish()
.finish_non_exhaustive()
}
}

View File

@ -6,12 +6,53 @@ use std::{
};
use gix_config::{path::interpolate::Context as InterpolateContext, File, Path as GitPath};
use miette::{bail, Result};
use project_origins::ProjectType;
use tokio::fs::{canonicalize, metadata, read_dir};
use tracing::{trace, trace_span};
use crate::{IgnoreFile, IgnoreFilter};
/// Arguments for finding ignored files in a given directory and subdirectories
pub struct IgnoreFilesFromOriginArgs {
/// Origin from which finding ignored files will start
origin: PathBuf,
/// Paths that have been explicitly selected to be watched.
///
/// If this list is non-empty, all paths not on this list will be ignored
///
/// These paths *must* be absolute, and are checked upon creation
explicit_watches: Vec<PathBuf>,
/// Paths that have been explicitly ignored
///
/// If this list is non-empty, all paths on this list will be ignored
///
/// These paths *must* be absolute
explicit_ignores: Vec<PathBuf>,
}
impl IgnoreFilesFromOriginArgs {
pub fn new(
origin: impl AsRef<Path>,
explicit_watches: Vec<PathBuf>,
explicit_ignores: Vec<PathBuf>,
) -> Result<Self> {
if explicit_watches.iter().any(|p| !p.is_absolute()) {
bail!("explicit watch dir contains non-absolute directories");
}
if explicit_ignores.iter().any(|p| !p.is_absolute()) {
bail!("explicit watch dir contains non-absolute directories");
}
Ok(Self {
origin: PathBuf::from(origin.as_ref()),
explicit_watches,
explicit_ignores,
})
}
}
/// Finds all ignore files in the given directory and subdirectories.
///
/// This considers:
@ -43,12 +84,23 @@ use crate::{IgnoreFile, IgnoreFilter};
///
/// This future is not `Send` due to [`gix_config`] internals.
#[allow(clippy::future_not_send)]
pub async fn from_origin(path: impl AsRef<Path> + Send) -> (Vec<IgnoreFile>, Vec<Error>) {
let base = path.as_ref().to_owned();
let mut files = Vec::new();
pub async fn from_origin(
args: impl Into<IgnoreFilesFromOriginArgs>,
) -> (Vec<IgnoreFile>, Vec<Error>) {
let args = args.into();
let origin = &args.origin;
let mut ignore_files = args
.explicit_ignores
.iter()
.map(|p| IgnoreFile {
path: p.clone(),
applies_in: Some(origin.clone()),
applies_to: None,
})
.collect();
let mut errors = Vec::new();
match find_file(base.join(".git/config")).await {
match find_file(origin.join(".git/config")).await {
Err(err) => errors.push(err),
Ok(None) => {}
Ok(Some(path)) => match path.parent().map(|path| File::from_git_dir(path.into())) {
@ -66,7 +118,7 @@ pub async fn from_origin(path: impl AsRef<Path> + Send) -> (Vec<IgnoreFile>, Vec
}) {
Ok(e) => {
discover_file(
&mut files,
&mut ignore_files,
&mut errors,
None,
Some(ProjectType::Git),
@ -84,51 +136,52 @@ pub async fn from_origin(path: impl AsRef<Path> + Send) -> (Vec<IgnoreFile>, Vec
}
discover_file(
&mut files,
&mut ignore_files,
&mut errors,
Some(base.clone()),
Some(origin.clone()),
Some(ProjectType::Bazaar),
base.join(".bzrignore"),
origin.join(".bzrignore"),
)
.await;
discover_file(
&mut files,
&mut ignore_files,
&mut errors,
Some(base.clone()),
Some(origin.clone()),
Some(ProjectType::Darcs),
base.join("_darcs/prefs/boring"),
origin.join("_darcs/prefs/boring"),
)
.await;
discover_file(
&mut files,
&mut ignore_files,
&mut errors,
Some(base.clone()),
Some(origin.clone()),
Some(ProjectType::Fossil),
base.join(".fossil-settings/ignore-glob"),
origin.join(".fossil-settings/ignore-glob"),
)
.await;
discover_file(
&mut files,
&mut ignore_files,
&mut errors,
Some(base.clone()),
Some(origin.clone()),
Some(ProjectType::Git),
base.join(".git/info/exclude"),
origin.join(".git/info/exclude"),
)
.await;
trace!("visiting child directories for ignore files");
match DirTourist::new(&base, &files).await {
match DirTourist::new(origin, &ignore_files, &args.explicit_watches).await {
Ok(mut dirs) => {
loop {
match dirs.next().await {
Visit::Done => break,
Visit::Skip => continue,
Visit::Find(dir) => {
// Attempt to find a .ignore file in the directory
if discover_file(
&mut files,
&mut ignore_files,
&mut errors,
Some(dir.clone()),
None,
@ -136,11 +189,13 @@ pub async fn from_origin(path: impl AsRef<Path> + Send) -> (Vec<IgnoreFile>, Vec
)
.await
{
dirs.add_last_file_to_filter(&files, &mut errors).await;
dirs.add_last_file_to_filter(&ignore_files, &mut errors)
.await;
}
// Attempt to find a .gitignore file in the directory
if discover_file(
&mut files,
&mut ignore_files,
&mut errors,
Some(dir.clone()),
Some(ProjectType::Git),
@ -148,11 +203,13 @@ pub async fn from_origin(path: impl AsRef<Path> + Send) -> (Vec<IgnoreFile>, Vec
)
.await
{
dirs.add_last_file_to_filter(&files, &mut errors).await;
dirs.add_last_file_to_filter(&ignore_files, &mut errors)
.await;
}
// Attempt to find a .hgignore file in the directory
if discover_file(
&mut files,
&mut ignore_files,
&mut errors,
Some(dir.clone()),
Some(ProjectType::Mercurial),
@ -160,7 +217,8 @@ pub async fn from_origin(path: impl AsRef<Path> + Send) -> (Vec<IgnoreFile>, Vec
)
.await
{
dirs.add_last_file_to_filter(&files, &mut errors).await;
dirs.add_last_file_to_filter(&ignore_files, &mut errors)
.await;
}
}
}
@ -172,7 +230,7 @@ pub async fn from_origin(path: impl AsRef<Path> + Send) -> (Vec<IgnoreFile>, Vec
}
}
(files, errors)
(ignore_files, errors)
}
/// Finds all ignore files that apply to the current runtime.
@ -354,6 +412,7 @@ struct DirTourist {
base: PathBuf,
to_visit: Vec<PathBuf>,
to_skip: HashSet<PathBuf>,
to_explicitly_watch: HashSet<PathBuf>,
pub errors: Vec<std::io::Error>,
filter: IgnoreFilter,
}
@ -366,10 +425,14 @@ enum Visit {
}
impl DirTourist {
pub async fn new(base: &Path, files: &[IgnoreFile]) -> Result<Self, Error> {
pub async fn new(
base: &Path,
ignore_files: &[IgnoreFile],
watch_files: &[PathBuf],
) -> Result<Self, Error> {
let base = canonicalize(base).await?;
trace!("create IgnoreFilterer for visiting directories");
let mut filter = IgnoreFilter::new(&base, files)
let mut filter = IgnoreFilter::new(&base, ignore_files)
.await
.map_err(|err| Error::new(ErrorKind::Other, err))?;
@ -392,6 +455,10 @@ impl DirTourist {
to_visit: vec![base.clone()],
base,
to_skip: HashSet::new(),
to_explicitly_watch: watch_files.iter().fold(HashSet::new(), |mut acc, path| {
acc.insert(path.clone());
acc
}),
errors: Vec::new(),
filter,
})
@ -415,7 +482,25 @@ impl DirTourist {
}
if !self.filter.check_dir(&path) {
trace!("path is ignored, adding to skip list");
trace!(?path, "path is ignored, adding to skip list");
self.skip(path);
return Visit::Skip;
}
// If explicitly watched paths were not specified, we can include any path
//
// If explicitly watched paths *were* specified, then to include the path, either:
// - the path in question starts with an explicitly included path (/a/b starting with /a)
// - the path in question is *above* the explicitly included path (/a is above /a/b)
if self.to_explicitly_watch.is_empty()
|| self
.to_explicitly_watch
.iter()
.any(|p| path.starts_with(p) || p.starts_with(&path))
{
trace!(?path, ?self.to_explicitly_watch, "including path; it starts with one of the explicitly watched paths");
} else {
trace!(?path, ?self.to_explicitly_watch, "excluding path; it did not start with any of explicitly watched paths");
self.skip(path);
return Visit::Skip;
}

View File

@ -127,7 +127,7 @@ impl IgnoreFilter {
trace!(?line, "adding ignore line");
builder
.add_line(Some(applies_in.clone().to_owned()), line)
.add_line(Some(applies_in.clone().clone()), line)
.map_err(|err| Error::Glob {
file: Some(file.path.clone()),
err,
@ -388,8 +388,7 @@ fn get_applies_in_path(origin: &Path, ignore_file: &IgnoreFile) -> PathBuf {
ignore_file
.applies_in
.as_ref()
.map(|p| PathBuf::from(dunce::simplified(p)))
.unwrap_or(root_path)
.map_or(root_path, |p| PathBuf::from(dunce::simplified(p)))
}
/// Gets the root component of a given path.

View File

@ -98,6 +98,7 @@ impl Handler {
/// Get a job given its Id.
///
/// This returns a job handle, if it existed when this handler was called.
#[must_use]
pub fn get_job(&self, id: Id) -> Option<Job> {
self.extant.get(&id).cloned()
}

View File

@ -93,7 +93,7 @@ impl LateJoinSet {
/// This does not remove the tasks from the `LateJoinSet`. To wait for the tasks to complete
/// cancellation, use `join_all` or call `join_next` in a loop until the `LateJoinSet` is empty.
pub fn abort_all(&self) {
self.tasks.iter().for_each(|jh| jh.abort());
self.tasks.iter().for_each(JoinHandle::abort);
}
}

View File

@ -42,7 +42,7 @@
//! ```
//!
//! Alternatively, you can use the modules exposed by the crate and the external crates such as
//! [ClearScreen][clearscreen] and [Command Group][command_group] to build something more advanced,
//! [`ClearScreen`][clearscreen] and [Command Group][command_group] to build something more advanced,
//! at the cost of reimplementing the glue code.
//!
//! Note that the library generates a _lot_ of debug messaging with [tracing]. **You should not

View File

@ -153,15 +153,15 @@ impl Watchexec {
let mut tasks = JoinSet::new();
tasks.spawn(action::worker(config.clone(), er_s.clone(), ev_r).map_ok(|_| "action"));
tasks.spawn(fs::worker(config.clone(), er_s.clone(), ev_s.clone()).map_ok(|_| "fs"));
tasks.spawn(action::worker(config.clone(), er_s.clone(), ev_r).map_ok(|()| "action"));
tasks.spawn(fs::worker(config.clone(), er_s.clone(), ev_s.clone()).map_ok(|()| "fs"));
tasks.spawn(
signal::worker(config.clone(), er_s.clone(), ev_s.clone()).map_ok(|_| "signal"),
signal::worker(config.clone(), er_s.clone(), ev_s.clone()).map_ok(|()| "signal"),
);
tasks.spawn(
keyboard::worker(config.clone(), er_s.clone(), ev_s.clone()).map_ok(|_| "keyboard"),
keyboard::worker(config.clone(), er_s.clone(), ev_s.clone()).map_ok(|()| "keyboard"),
);
tasks.spawn(error_hook(er_r, config.error_handler.clone()).map_ok(|_| "error"));
tasks.spawn(error_hook(er_r, config.error_handler.clone()).map_ok(|()| "error"));
while let Some(Ok(res)) = tasks.join_next().await {
match res {

View File

@ -296,6 +296,7 @@ pub struct SignalParseError {
#[cfg(feature = "fromstr")]
impl SignalParseError {
#[must_use]
pub fn new(src: &str, err: &str) -> Self {
Self {
src: src.to_owned(),
@ -323,7 +324,7 @@ impl fmt::Display for Signal {
(Self::User1, _) => "SIGUSR1",
(Self::User2, _) => "SIGUSR2",
(Self::Custom(n), _) => {
return write!(f, "{}", n);
return write!(f, "{n}");
}
}
)
@ -332,7 +333,7 @@ impl fmt::Display for Signal {
#[cfg(feature = "serde")]
mod serde_support {
use super::*;
use super::Signal;
#[derive(Clone, Copy, Debug, serde::Serialize, serde::Deserialize)]
#[serde(untagged)]

View File

@ -29,6 +29,7 @@ impl Shell {
}
#[cfg(windows)]
#[must_use]
/// Shorthand for the CMD.EXE shell.
pub fn cmd() -> Self {
Self {

View File

@ -9,6 +9,7 @@ use std::{
pub type SyncIoError = Arc<OnceLock<Error>>;
/// Make a [`SyncIoError`] from a [`std::io::Error`].
#[must_use]
pub fn sync_io_error(err: Error) -> SyncIoError {
let lock = OnceLock::new();
lock.set(err).expect("unreachable: lock was just created");

View File

@ -65,7 +65,7 @@ impl PriorityReceiver {
if let Some(timer) = stop_timer.clone() {
select! {
_ = timer.to_sleep() => {
() = timer.to_sleep() => {
*stop_timer = None;
Some(timer.to_control())
}

View File

@ -54,22 +54,25 @@ pub enum CommandState {
impl CommandState {
/// Whether the command is pending, i.e. not running or finished.
#[must_use]
pub const fn is_pending(&self) -> bool {
matches!(self, Self::Pending)
}
/// Whether the command is running.
#[must_use]
pub const fn is_running(&self) -> bool {
matches!(self, Self::Running { .. })
}
/// Whether the command is finished.
#[must_use]
pub const fn is_finished(&self) -> bool {
matches!(self, Self::Finished { .. })
}
#[cfg_attr(test, allow(unused_mut, unused_variables))]
pub(crate) async fn spawn(
pub(crate) fn spawn(
&mut self,
command: Arc<Command>,
mut spawnable: TokioCommand,

View File

@ -24,8 +24,13 @@ use super::{
#[must_use]
#[instrument(level = "trace")]
pub fn start_job(command: Arc<Command>) -> (Job, JoinHandle<()>) {
let (sender, mut receiver) = priority::new();
enum Loop {
Normally,
Skip,
Break,
}
let (sender, mut receiver) = priority::new();
let gone = Flag::default();
let done = gone.clone();
@ -44,12 +49,6 @@ pub fn start_job(command: Arc<Command>) -> (Job, JoinHandle<()>) {
let mut on_end: Vec<Flag> = Vec::new();
let mut on_end_restart: Option<Flag> = None;
enum Loop {
Normally,
Skip,
Break,
}
'main: loop {
select! {
result = command_state.wait(), if command_state.is_running() => {
@ -67,7 +66,7 @@ pub fn start_job(command: Arc<Command>) -> (Job, JoinHandle<()>) {
trace!(existing=?stop_timer, "erasing stop timer");
stop_timer = None;
trace!(count=%on_end.len(), "raising all pending end flags");
for done in take(&mut on_end).into_iter() {
for done in take(&mut on_end) {
done.raise();
}
@ -86,7 +85,7 @@ pub fn start_job(command: Arc<Command>) -> (Job, JoinHandle<()>) {
},
)
.await;
if let Err(err) = command_state.spawn(command.clone(), spawnable).await {
if let Err(err) = command_state.spawn(command.clone(), spawnable) {
let fut = error_handler.call(sync_io_error(err));
fut.await;
return Loop::Skip;
@ -151,7 +150,7 @@ pub fn start_job(command: Arc<Command>) -> (Job, JoinHandle<()>) {
},
)
.await;
try_with_handler!(command_state.spawn(command.clone(), spawnable).await);
try_with_handler!(command_state.spawn(command.clone(), spawnable));
}
}
Control::Stop => {
@ -169,7 +168,7 @@ pub fn start_job(command: Arc<Command>) -> (Job, JoinHandle<()>) {
};
trace!(count=%on_end.len(), "raising all pending end flags");
for done in take(&mut on_end).into_iter() {
for done in take(&mut on_end) {
done.raise();
}
} else {
@ -183,9 +182,8 @@ pub fn start_job(command: Arc<Command>) -> (Job, JoinHandle<()>) {
trace!(?grace, "setting up graceful stop timer");
stop_timer.replace(Timer::stop(grace, done));
return Loop::Skip;
} else {
trace!("child isn't running, skip");
}
trace!("child isn't running, skip");
}
Control::TryRestart => {
if let CommandState::Running { child, started, .. } = &mut command_state {
@ -203,7 +201,7 @@ pub fn start_job(command: Arc<Command>) -> (Job, JoinHandle<()>) {
previous_run = Some(command_state.reset());
trace!(count=%on_end.len(), "raising all pending end flags");
for done in take(&mut on_end).into_iter() {
for done in take(&mut on_end) {
done.raise();
}
@ -218,7 +216,7 @@ pub fn start_job(command: Arc<Command>) -> (Job, JoinHandle<()>) {
},
)
.await;
try_with_handler!(command_state.spawn(command.clone(), spawnable).await);
try_with_handler!(command_state.spawn(command.clone(), spawnable));
} else {
trace!("child isn't running, skip");
}
@ -232,9 +230,8 @@ pub fn start_job(command: Arc<Command>) -> (Job, JoinHandle<()>) {
trace!("setting up graceful restart flag");
on_end_restart = Some(done);
return Loop::Skip;
} else {
trace!("child isn't running, skip");
}
trace!("child isn't running, skip");
}
Control::ContinueTryGracefulRestart => {
trace!("continuing a graceful try-restart");
@ -253,7 +250,7 @@ pub fn start_job(command: Arc<Command>) -> (Job, JoinHandle<()>) {
};
trace!(count=%on_end.len(), "raising all pending end flags");
for done in take(&mut on_end).into_iter() {
for done in take(&mut on_end) {
done.raise();
}
}
@ -270,7 +267,7 @@ pub fn start_job(command: Arc<Command>) -> (Job, JoinHandle<()>) {
},
)
.await;
try_with_handler!(command_state.spawn(command.clone(), spawnable).await);
try_with_handler!(command_state.spawn(command.clone(), spawnable));
}
Control::Signal(signal) => {
if let CommandState::Running { child, .. } = &mut command_state {
@ -286,15 +283,14 @@ pub fn start_job(command: Arc<Command>) -> (Job, JoinHandle<()>) {
}
Control::NextEnding => {
if !matches!(command_state, CommandState::Finished { .. }) {
trace!("queue end flag");
on_end.push(done);
return Loop::Skip;
} else {
if matches!(command_state, CommandState::Finished { .. }) {
trace!("child is finished, raise done flag immediately");
done.raise();
return Loop::Normally;
}
trace!("queue end flag");
on_end.push(done);
return Loop::Skip;
}
Control::SyncFunc(f) => {