mirror of
https://github.com/sharkdp/fd.git
synced 2024-11-18 09:50:34 +01:00
Apply cargo fmt
This commit is contained in:
parent
fb1cd3a322
commit
884bd41cae
8 changed files with 75 additions and 57 deletions
16
src/app.rs
16
src/app.rs
|
@ -95,12 +95,7 @@ pub fn build_app() -> App<'static, 'static> {
|
|||
.takes_value(true)
|
||||
.hidden(true),
|
||||
)
|
||||
.arg(
|
||||
arg("exec")
|
||||
.long("exec")
|
||||
.short("x")
|
||||
.takes_value(true)
|
||||
)
|
||||
.arg(arg("exec").long("exec").short("x").takes_value(true))
|
||||
.arg(arg("pattern"))
|
||||
.arg(arg("path"))
|
||||
}
|
||||
|
@ -151,8 +146,10 @@ fn usage() -> HashMap<&'static str, Help> {
|
|||
'l' or 'symlink': symbolic links");
|
||||
doc!(h, "exec"
|
||||
, "Execute each discovered path using the argument that follows as the command expression."
|
||||
, "Execute each discovered path using the argument that follows as the command expression.\n \
|
||||
The following are valid tokens that can be used within the expression for generating commands:\n \
|
||||
, "Execute each discovered path using the argument that follows as the command \
|
||||
expression.\n \
|
||||
The following are valid tokens that can be used within the expression for generating \
|
||||
commands:\n \
|
||||
'{}': places the input in the location of this token\n \
|
||||
'{.}': removes the extension from the input\n \
|
||||
'{/}': places the basename of the input\n \
|
||||
|
@ -169,7 +166,8 @@ fn usage() -> HashMap<&'static str, Help> {
|
|||
'always': always use colorized output");
|
||||
doc!(h, "threads"
|
||||
, "Set number of threads to use for searching & executing"
|
||||
, "Set number of threads to use for searching & executing (default: number of available CPU cores)");
|
||||
, "Set number of threads to use for searching & executing (default: number of available \
|
||||
CPU cores)");
|
||||
doc!(h, "max-buffer-time"
|
||||
, "the time (in ms) to buffer, before streaming to the console"
|
||||
, "Amount of time in milliseconds to buffer, before streaming the search results to\
|
||||
|
|
|
@ -36,7 +36,9 @@ impl<'a> CommandTicket<'a> {
|
|||
|
||||
// Then wait for the command to exit, if it was spawned.
|
||||
match cmd {
|
||||
Ok(mut child) => { let _ = child.wait(); },
|
||||
Ok(mut child) => {
|
||||
let _ = child.wait();
|
||||
}
|
||||
Err(why) => eprintln!("fd: exec error: {}", why),
|
||||
}
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ pub fn job(rx: Arc<Mutex<Receiver<PathBuf>>>, cmd: Arc<TokenizedCommand>) {
|
|||
// has closed, exit from the loop
|
||||
let value = match lock.recv() {
|
||||
Ok(value) => value,
|
||||
Err(_) => break
|
||||
Err(_) => break,
|
||||
};
|
||||
|
||||
// Drop the lock so that other threads can read from the the receiver.
|
||||
|
|
|
@ -52,12 +52,14 @@ impl TokenizedCommand {
|
|||
match character {
|
||||
// Backslashes are useful in cases where we want to use the '{' character
|
||||
// without having all occurrences of it to collect placeholder tokens.
|
||||
'\\' => if let Some((_, nchar)) = chars.next() {
|
||||
'\\' => {
|
||||
if let Some((_, nchar)) = chars.next() {
|
||||
if nchar != '{' {
|
||||
text.push(character);
|
||||
}
|
||||
text.push(nchar);
|
||||
},
|
||||
}
|
||||
}
|
||||
// When a raw '{' is discovered, we will note it's position, and use that for a
|
||||
// later comparison against valid placeholder tokens.
|
||||
'{' if flags & OPEN == 0 => {
|
||||
|
@ -67,34 +69,36 @@ impl TokenizedCommand {
|
|||
append(&mut tokens, &text);
|
||||
text.clear();
|
||||
}
|
||||
},
|
||||
}
|
||||
// If the `OPEN` bit is set, we will compare the contents between the discovered
|
||||
// '{' and '}' characters against a list of valid tokens, then pushing the
|
||||
// corresponding token onto the `tokens` vector.
|
||||
'}' if flags & OPEN != 0 => {
|
||||
flags ^= OPEN;
|
||||
match &input[start+1..id] {
|
||||
match &input[start + 1..id] {
|
||||
"" => tokens.push(Token::Placeholder),
|
||||
"." => tokens.push(Token::NoExt),
|
||||
"/" => tokens.push(Token::Basename),
|
||||
"//" => tokens.push(Token::Parent),
|
||||
"/." => tokens.push(Token::BasenameNoExt),
|
||||
_ => {
|
||||
append(&mut tokens, &input[start..id+1]);
|
||||
continue
|
||||
},
|
||||
append(&mut tokens, &input[start..id + 1]);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
flags |= PLACE;
|
||||
},
|
||||
}
|
||||
// We aren't collecting characters for a text string if the `OPEN` bit is set.
|
||||
_ if flags & OPEN != 0 => (),
|
||||
// Push the character onto the text buffer
|
||||
_ => text.push(character)
|
||||
_ => text.push(character),
|
||||
}
|
||||
}
|
||||
|
||||
// Take care of any stragglers left behind.
|
||||
if !text.is_empty() { append(&mut tokens, &text); }
|
||||
if !text.is_empty() {
|
||||
append(&mut tokens, &text);
|
||||
}
|
||||
|
||||
// If a placeholder token was not supplied, append one at the end of the command.
|
||||
if flags & PLACE == 0 {
|
||||
|
@ -115,7 +119,9 @@ impl TokenizedCommand {
|
|||
for token in &self.tokens {
|
||||
match *token {
|
||||
Token::Basename => *command += basename(&input.to_string_lossy()),
|
||||
Token::BasenameNoExt => *command += remove_extension(basename(&input.to_string_lossy())),
|
||||
Token::BasenameNoExt => {
|
||||
*command += remove_extension(basename(&input.to_string_lossy()))
|
||||
}
|
||||
Token::NoExt => *command += remove_extension(&input.to_string_lossy()),
|
||||
Token::Parent => *command += dirname(&input.to_string_lossy()),
|
||||
Token::Placeholder => *command += &input.to_string_lossy(),
|
||||
|
@ -167,19 +173,14 @@ mod tests {
|
|||
#[test]
|
||||
fn tokens() {
|
||||
let expected = TokenizedCommand {
|
||||
tokens: vec![
|
||||
Token::Text("echo ${SHELL}: ".into()),
|
||||
Token::Placeholder,
|
||||
],
|
||||
tokens: vec![Token::Text("echo ${SHELL}: ".into()), Token::Placeholder],
|
||||
};
|
||||
|
||||
assert_eq!(TokenizedCommand::new("echo $\\{SHELL}: {}"), expected);
|
||||
assert_eq!(TokenizedCommand::new("echo ${SHELL}:"), expected);
|
||||
assert_eq!(TokenizedCommand::new("echo {.}"), TokenizedCommand {
|
||||
tokens: vec![
|
||||
Token::Text("echo ".into()),
|
||||
Token::NoExt,
|
||||
],
|
||||
});
|
||||
assert_eq!(
|
||||
TokenizedCommand::new("echo {.}"),
|
||||
TokenizedCommand { tokens: vec![Token::Text("echo ".into()), Token::NoExt] }
|
||||
);
|
||||
}
|
||||
}
|
|
@ -1,9 +1,15 @@
|
|||
pub fn basename(input: &str) -> &str {
|
||||
let mut index = 0;
|
||||
for (id, character) in input.bytes().enumerate() {
|
||||
if character == b'/' { index = id; }
|
||||
if character == b'/' {
|
||||
index = id;
|
||||
}
|
||||
}
|
||||
if index == 0 {
|
||||
input
|
||||
} else {
|
||||
&input[index + 1..]
|
||||
}
|
||||
if index == 0 { input } else { &input[index+1..] }
|
||||
}
|
||||
|
||||
/// Removes the extension of a given input
|
||||
|
@ -12,18 +18,28 @@ pub fn remove_extension(input: &str) -> &str {
|
|||
let mut ext_index = 0;
|
||||
|
||||
for (id, character) in input.bytes().enumerate() {
|
||||
if character == b'/' { dir_index = id; }
|
||||
if character == b'.' { ext_index = id; }
|
||||
if character == b'/' {
|
||||
dir_index = id;
|
||||
}
|
||||
if character == b'.' {
|
||||
ext_index = id;
|
||||
}
|
||||
}
|
||||
|
||||
// Account for hidden files and directories
|
||||
if ext_index == 0 || dir_index + 2 > ext_index { input } else { &input[0..ext_index] }
|
||||
if ext_index == 0 || dir_index + 2 > ext_index {
|
||||
input
|
||||
} else {
|
||||
&input[0..ext_index]
|
||||
}
|
||||
}
|
||||
|
||||
pub fn dirname(input: &str) -> &str {
|
||||
let mut index = 0;
|
||||
for (id, character) in input.bytes().enumerate() {
|
||||
if character == b'/' { index = id; }
|
||||
if character == b'/' {
|
||||
index = id;
|
||||
}
|
||||
}
|
||||
if index == 0 { "." } else { &input[0..index] }
|
||||
}
|
||||
|
|
|
@ -75,7 +75,7 @@ pub struct FdOptions {
|
|||
pub extension: Option<String>,
|
||||
|
||||
/// If a value is supplied, each item found will be used to generate and execute commands.
|
||||
pub command: Option<TokenizedCommand>
|
||||
pub command: Option<TokenizedCommand>,
|
||||
}
|
||||
|
||||
/// Print error message to stderr and exit with status `1`.
|
||||
|
|
|
@ -90,8 +90,7 @@ fn main() {
|
|||
None
|
||||
};
|
||||
|
||||
let command = matches.value_of("exec")
|
||||
.map(|x| TokenizedCommand::new(&x));
|
||||
let command = matches.value_of("exec").map(|x| TokenizedCommand::new(&x));
|
||||
|
||||
let config = FdOptions {
|
||||
case_sensitive,
|
||||
|
@ -132,7 +131,7 @@ fn main() {
|
|||
extension: matches.value_of("extension").map(|e| {
|
||||
e.trim_left_matches('.').to_lowercase()
|
||||
}),
|
||||
command
|
||||
command,
|
||||
};
|
||||
|
||||
let root = Path::new(ROOT_DIR);
|
||||
|
|
|
@ -78,7 +78,9 @@ pub fn scan(root: &Path, pattern: Arc<Regex>, base: &Path, config: Arc<FdOptions
|
|||
}
|
||||
|
||||
// Wait for all threads to exit before exiting the program.
|
||||
for h in handles { h.join().unwrap(); }
|
||||
for h in handles {
|
||||
h.join().unwrap();
|
||||
}
|
||||
} else {
|
||||
let start = time::Instant::now();
|
||||
|
||||
|
|
Loading…
Reference in a new issue