mirror of
https://github.com/sharkdp/fd.git
synced 2024-11-18 18:00:35 +01:00
Apply cargo fmt
This commit is contained in:
parent
fb1cd3a322
commit
884bd41cae
8 changed files with 75 additions and 57 deletions
16
src/app.rs
16
src/app.rs
|
@ -95,12 +95,7 @@ pub fn build_app() -> App<'static, 'static> {
|
||||||
.takes_value(true)
|
.takes_value(true)
|
||||||
.hidden(true),
|
.hidden(true),
|
||||||
)
|
)
|
||||||
.arg(
|
.arg(arg("exec").long("exec").short("x").takes_value(true))
|
||||||
arg("exec")
|
|
||||||
.long("exec")
|
|
||||||
.short("x")
|
|
||||||
.takes_value(true)
|
|
||||||
)
|
|
||||||
.arg(arg("pattern"))
|
.arg(arg("pattern"))
|
||||||
.arg(arg("path"))
|
.arg(arg("path"))
|
||||||
}
|
}
|
||||||
|
@ -151,8 +146,10 @@ fn usage() -> HashMap<&'static str, Help> {
|
||||||
'l' or 'symlink': symbolic links");
|
'l' or 'symlink': symbolic links");
|
||||||
doc!(h, "exec"
|
doc!(h, "exec"
|
||||||
, "Execute each discovered path using the argument that follows as the command expression."
|
, "Execute each discovered path using the argument that follows as the command expression."
|
||||||
, "Execute each discovered path using the argument that follows as the command expression.\n \
|
, "Execute each discovered path using the argument that follows as the command \
|
||||||
The following are valid tokens that can be used within the expression for generating commands:\n \
|
expression.\n \
|
||||||
|
The following are valid tokens that can be used within the expression for generating \
|
||||||
|
commands:\n \
|
||||||
'{}': places the input in the location of this token\n \
|
'{}': places the input in the location of this token\n \
|
||||||
'{.}': removes the extension from the input\n \
|
'{.}': removes the extension from the input\n \
|
||||||
'{/}': places the basename of the input\n \
|
'{/}': places the basename of the input\n \
|
||||||
|
@ -169,7 +166,8 @@ fn usage() -> HashMap<&'static str, Help> {
|
||||||
'always': always use colorized output");
|
'always': always use colorized output");
|
||||||
doc!(h, "threads"
|
doc!(h, "threads"
|
||||||
, "Set number of threads to use for searching & executing"
|
, "Set number of threads to use for searching & executing"
|
||||||
, "Set number of threads to use for searching & executing (default: number of available CPU cores)");
|
, "Set number of threads to use for searching & executing (default: number of available \
|
||||||
|
CPU cores)");
|
||||||
doc!(h, "max-buffer-time"
|
doc!(h, "max-buffer-time"
|
||||||
, "the time (in ms) to buffer, before streaming to the console"
|
, "the time (in ms) to buffer, before streaming to the console"
|
||||||
, "Amount of time in milliseconds to buffer, before streaming the search results to\
|
, "Amount of time in milliseconds to buffer, before streaming the search results to\
|
||||||
|
|
|
@ -36,7 +36,9 @@ impl<'a> CommandTicket<'a> {
|
||||||
|
|
||||||
// Then wait for the command to exit, if it was spawned.
|
// Then wait for the command to exit, if it was spawned.
|
||||||
match cmd {
|
match cmd {
|
||||||
Ok(mut child) => { let _ = child.wait(); },
|
Ok(mut child) => {
|
||||||
|
let _ = child.wait();
|
||||||
|
}
|
||||||
Err(why) => eprintln!("fd: exec error: {}", why),
|
Err(why) => eprintln!("fd: exec error: {}", why),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,7 @@ pub fn job(rx: Arc<Mutex<Receiver<PathBuf>>>, cmd: Arc<TokenizedCommand>) {
|
||||||
// has closed, exit from the loop
|
// has closed, exit from the loop
|
||||||
let value = match lock.recv() {
|
let value = match lock.recv() {
|
||||||
Ok(value) => value,
|
Ok(value) => value,
|
||||||
Err(_) => break
|
Err(_) => break,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Drop the lock so that other threads can read from the the receiver.
|
// Drop the lock so that other threads can read from the the receiver.
|
||||||
|
|
|
@ -52,12 +52,14 @@ impl TokenizedCommand {
|
||||||
match character {
|
match character {
|
||||||
// Backslashes are useful in cases where we want to use the '{' character
|
// Backslashes are useful in cases where we want to use the '{' character
|
||||||
// without having all occurrences of it to collect placeholder tokens.
|
// without having all occurrences of it to collect placeholder tokens.
|
||||||
'\\' => if let Some((_, nchar)) = chars.next() {
|
'\\' => {
|
||||||
|
if let Some((_, nchar)) = chars.next() {
|
||||||
if nchar != '{' {
|
if nchar != '{' {
|
||||||
text.push(character);
|
text.push(character);
|
||||||
}
|
}
|
||||||
text.push(nchar);
|
text.push(nchar);
|
||||||
},
|
}
|
||||||
|
}
|
||||||
// When a raw '{' is discovered, we will note it's position, and use that for a
|
// When a raw '{' is discovered, we will note it's position, and use that for a
|
||||||
// later comparison against valid placeholder tokens.
|
// later comparison against valid placeholder tokens.
|
||||||
'{' if flags & OPEN == 0 => {
|
'{' if flags & OPEN == 0 => {
|
||||||
|
@ -67,7 +69,7 @@ impl TokenizedCommand {
|
||||||
append(&mut tokens, &text);
|
append(&mut tokens, &text);
|
||||||
text.clear();
|
text.clear();
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
// If the `OPEN` bit is set, we will compare the contents between the discovered
|
// If the `OPEN` bit is set, we will compare the contents between the discovered
|
||||||
// '{' and '}' characters against a list of valid tokens, then pushing the
|
// '{' and '}' characters against a list of valid tokens, then pushing the
|
||||||
// corresponding token onto the `tokens` vector.
|
// corresponding token onto the `tokens` vector.
|
||||||
|
@ -81,20 +83,22 @@ impl TokenizedCommand {
|
||||||
"/." => tokens.push(Token::BasenameNoExt),
|
"/." => tokens.push(Token::BasenameNoExt),
|
||||||
_ => {
|
_ => {
|
||||||
append(&mut tokens, &input[start..id + 1]);
|
append(&mut tokens, &input[start..id + 1]);
|
||||||
continue
|
continue;
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
flags |= PLACE;
|
flags |= PLACE;
|
||||||
},
|
}
|
||||||
// We aren't collecting characters for a text string if the `OPEN` bit is set.
|
// We aren't collecting characters for a text string if the `OPEN` bit is set.
|
||||||
_ if flags & OPEN != 0 => (),
|
_ if flags & OPEN != 0 => (),
|
||||||
// Push the character onto the text buffer
|
// Push the character onto the text buffer
|
||||||
_ => text.push(character)
|
_ => text.push(character),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Take care of any stragglers left behind.
|
// Take care of any stragglers left behind.
|
||||||
if !text.is_empty() { append(&mut tokens, &text); }
|
if !text.is_empty() {
|
||||||
|
append(&mut tokens, &text);
|
||||||
|
}
|
||||||
|
|
||||||
// If a placeholder token was not supplied, append one at the end of the command.
|
// If a placeholder token was not supplied, append one at the end of the command.
|
||||||
if flags & PLACE == 0 {
|
if flags & PLACE == 0 {
|
||||||
|
@ -115,7 +119,9 @@ impl TokenizedCommand {
|
||||||
for token in &self.tokens {
|
for token in &self.tokens {
|
||||||
match *token {
|
match *token {
|
||||||
Token::Basename => *command += basename(&input.to_string_lossy()),
|
Token::Basename => *command += basename(&input.to_string_lossy()),
|
||||||
Token::BasenameNoExt => *command += remove_extension(basename(&input.to_string_lossy())),
|
Token::BasenameNoExt => {
|
||||||
|
*command += remove_extension(basename(&input.to_string_lossy()))
|
||||||
|
}
|
||||||
Token::NoExt => *command += remove_extension(&input.to_string_lossy()),
|
Token::NoExt => *command += remove_extension(&input.to_string_lossy()),
|
||||||
Token::Parent => *command += dirname(&input.to_string_lossy()),
|
Token::Parent => *command += dirname(&input.to_string_lossy()),
|
||||||
Token::Placeholder => *command += &input.to_string_lossy(),
|
Token::Placeholder => *command += &input.to_string_lossy(),
|
||||||
|
@ -167,19 +173,14 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn tokens() {
|
fn tokens() {
|
||||||
let expected = TokenizedCommand {
|
let expected = TokenizedCommand {
|
||||||
tokens: vec![
|
tokens: vec![Token::Text("echo ${SHELL}: ".into()), Token::Placeholder],
|
||||||
Token::Text("echo ${SHELL}: ".into()),
|
|
||||||
Token::Placeholder,
|
|
||||||
],
|
|
||||||
};
|
};
|
||||||
|
|
||||||
assert_eq!(TokenizedCommand::new("echo $\\{SHELL}: {}"), expected);
|
assert_eq!(TokenizedCommand::new("echo $\\{SHELL}: {}"), expected);
|
||||||
assert_eq!(TokenizedCommand::new("echo ${SHELL}:"), expected);
|
assert_eq!(TokenizedCommand::new("echo ${SHELL}:"), expected);
|
||||||
assert_eq!(TokenizedCommand::new("echo {.}"), TokenizedCommand {
|
assert_eq!(
|
||||||
tokens: vec![
|
TokenizedCommand::new("echo {.}"),
|
||||||
Token::Text("echo ".into()),
|
TokenizedCommand { tokens: vec![Token::Text("echo ".into()), Token::NoExt] }
|
||||||
Token::NoExt,
|
);
|
||||||
],
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -1,9 +1,15 @@
|
||||||
pub fn basename(input: &str) -> &str {
|
pub fn basename(input: &str) -> &str {
|
||||||
let mut index = 0;
|
let mut index = 0;
|
||||||
for (id, character) in input.bytes().enumerate() {
|
for (id, character) in input.bytes().enumerate() {
|
||||||
if character == b'/' { index = id; }
|
if character == b'/' {
|
||||||
|
index = id;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if index == 0 {
|
||||||
|
input
|
||||||
|
} else {
|
||||||
|
&input[index + 1..]
|
||||||
}
|
}
|
||||||
if index == 0 { input } else { &input[index+1..] }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Removes the extension of a given input
|
/// Removes the extension of a given input
|
||||||
|
@ -12,18 +18,28 @@ pub fn remove_extension(input: &str) -> &str {
|
||||||
let mut ext_index = 0;
|
let mut ext_index = 0;
|
||||||
|
|
||||||
for (id, character) in input.bytes().enumerate() {
|
for (id, character) in input.bytes().enumerate() {
|
||||||
if character == b'/' { dir_index = id; }
|
if character == b'/' {
|
||||||
if character == b'.' { ext_index = id; }
|
dir_index = id;
|
||||||
|
}
|
||||||
|
if character == b'.' {
|
||||||
|
ext_index = id;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Account for hidden files and directories
|
// Account for hidden files and directories
|
||||||
if ext_index == 0 || dir_index + 2 > ext_index { input } else { &input[0..ext_index] }
|
if ext_index == 0 || dir_index + 2 > ext_index {
|
||||||
|
input
|
||||||
|
} else {
|
||||||
|
&input[0..ext_index]
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn dirname(input: &str) -> &str {
|
pub fn dirname(input: &str) -> &str {
|
||||||
let mut index = 0;
|
let mut index = 0;
|
||||||
for (id, character) in input.bytes().enumerate() {
|
for (id, character) in input.bytes().enumerate() {
|
||||||
if character == b'/' { index = id; }
|
if character == b'/' {
|
||||||
|
index = id;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if index == 0 { "." } else { &input[0..index] }
|
if index == 0 { "." } else { &input[0..index] }
|
||||||
}
|
}
|
||||||
|
|
|
@ -75,7 +75,7 @@ pub struct FdOptions {
|
||||||
pub extension: Option<String>,
|
pub extension: Option<String>,
|
||||||
|
|
||||||
/// If a value is supplied, each item found will be used to generate and execute commands.
|
/// If a value is supplied, each item found will be used to generate and execute commands.
|
||||||
pub command: Option<TokenizedCommand>
|
pub command: Option<TokenizedCommand>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Print error message to stderr and exit with status `1`.
|
/// Print error message to stderr and exit with status `1`.
|
||||||
|
|
|
@ -90,8 +90,7 @@ fn main() {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
let command = matches.value_of("exec")
|
let command = matches.value_of("exec").map(|x| TokenizedCommand::new(&x));
|
||||||
.map(|x| TokenizedCommand::new(&x));
|
|
||||||
|
|
||||||
let config = FdOptions {
|
let config = FdOptions {
|
||||||
case_sensitive,
|
case_sensitive,
|
||||||
|
@ -132,7 +131,7 @@ fn main() {
|
||||||
extension: matches.value_of("extension").map(|e| {
|
extension: matches.value_of("extension").map(|e| {
|
||||||
e.trim_left_matches('.').to_lowercase()
|
e.trim_left_matches('.').to_lowercase()
|
||||||
}),
|
}),
|
||||||
command
|
command,
|
||||||
};
|
};
|
||||||
|
|
||||||
let root = Path::new(ROOT_DIR);
|
let root = Path::new(ROOT_DIR);
|
||||||
|
|
|
@ -78,7 +78,9 @@ pub fn scan(root: &Path, pattern: Arc<Regex>, base: &Path, config: Arc<FdOptions
|
||||||
}
|
}
|
||||||
|
|
||||||
// Wait for all threads to exit before exiting the program.
|
// Wait for all threads to exit before exiting the program.
|
||||||
for h in handles { h.join().unwrap(); }
|
for h in handles {
|
||||||
|
h.join().unwrap();
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
let start = time::Instant::now();
|
let start = time::Instant::now();
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue