mirror of https://github.com/sharkdp/fd.git
Compare commits
851 Commits
Author | SHA1 | Date |
---|---|---|
David Peter | 29936f0fba | |
David Peter | bfc16a1dee | |
Thayne McCombs | 289a68bac3 | |
Thayne McCombs | fcaff0f385 | |
Thayne McCombs | 36163f9c3a | |
Thayne McCombs | d90ec1758e | |
Thayne McCombs | ea22cbd712 | |
Thayne McCombs | d44badc190 | |
Thayne McCombs | 6becb66185 | |
Thayne McCombs | 1a1f057e5d | |
Thayne McCombs | 10a269bd3f | |
Thayne McCombs | 90d3381814 | |
Thayne McCombs | b1f83a0bb0 | |
Thayne McCombs | 3bc70925a9 | |
Thayne McCombs | f287f08b9f | |
Tavian Barnes | 0e4488e9dc | |
Tavian Barnes | d7d63eddbe | |
Thayne McCombs | 8acd7722f0 | |
Thayne McCombs | 92fab6e058 | |
Thayne McCombs | a0ee0856db | |
Thayne McCombs | b8df500a70 | |
Thayne McCombs | cd96ca071d | |
Tavian Barnes | 216472ff9f | |
Thayne McCombs | 3680d10e5c | |
dependabot[bot] | abe3b9cd78 | |
Thayne McCombs | 7aad6c9edf | |
Thayne McCombs | ddd3aae249 | |
dependabot[bot] | 6d3bb68faf | |
dependabot[bot] | 21d50dae8c | |
David Peter | 9279b1f0af | |
Thayne McCombs | 6647085015 | |
Thayne McCombs | 6af8f092ee | |
Thayne McCombs | c4094c7a05 | |
AlbydS | 6d58df5f0c | |
AlbydS | ffecccf209 | |
Thayne McCombs | 31f2839751 | |
Thayne McCombs | e10a4eab2b | |
Thayne McCombs | 8eb047945e | |
Tavian Barnes | 1031325cca | |
Tavian Barnes | 9fc2167cf9 | |
Tavian Barnes | ae1de4de24 | |
Thayne McCombs | 7e5d14b733 | |
Thayne McCombs | 85cbea8dcb | |
Thayne McCombs | bc6782624e | |
Thayne McCombs | cf6ff87c7d | |
Thayne McCombs | 3cd73d7927 | |
binlingyu | 7794c4aae5 | |
Thayne McCombs | 8c7a84ea30 | |
Thayne McCombs | e262ade74e | |
Thayne McCombs | 11069e284a | |
Thayne McCombs | 6e2e86decb | |
Thayne McCombs | 15d3b63ccc | |
dependabot[bot] | 453577651e | |
dependabot[bot] | 39c07b7b4c | |
dependabot[bot] | 5910285db0 | |
Thayne McCombs | 68fe31da3f | |
Jian Wang | f875ea9a52 | |
one230six | 138919907b | |
Thayne McCombs | b8744626e7 | |
dependabot[bot] | b08d78f6fc | |
Tavian Barnes | 4efc05ef27 | |
garlic-hub | 0788c43c3f | |
Thayne McCombs | 3b2fd158b5 | |
Thayne McCombs | c38dbacbd0 | |
Thayne McCombs | 728b3200c0 | |
dependabot[bot] | 7f74cd9e56 | |
dependabot[bot] | 6ae8da6a39 | |
dependabot[bot] | f699c8bb6a | |
Nathan Bellows | ffde94c10e | |
Nathan Bellows | b0a8848f68 | |
AlbydS | d651a595d4 | |
David Peter | 969316cc0e | |
David Peter | 5b46867507 | |
Thayne McCombs | e117a373a7 | |
dependabot[bot] | a4aed14337 | |
Thayne McCombs | 9cde3c12a2 | |
Thayne McCombs | 906e7a933e | |
dependabot[bot] | 077d28d13a | |
dependabot[bot] | b55bb1e9be | |
Thayne McCombs | 7a6cc92d6d | |
Thayne McCombs | b694c6e673 | |
dependabot[bot] | 17895538a0 | |
dependabot[bot] | 72ff1f9a87 | |
Thayne McCombs | ef3194a510 | |
Maksim Bondarenkov | 8773402246 | |
Rob | ff3fc81db4 | |
Tavian Barnes | 0dc3342c33 | |
Thayne McCombs | c66fc812ac | |
Thayne McCombs | 14ed023875 | |
Tavian Barnes | 58284b8dbe | |
Tavian Barnes | 60889d0b99 | |
dependabot[bot] | 7e19bad0a4 | |
Thayne McCombs | 4b1d73d39d | |
dependabot[bot] | 03e19a1ad2 | |
Thayne McCombs | 8fb9499c20 | |
Thayne McCombs | 38fb6a5958 | |
dependabot[bot] | 49cd62d65e | |
dependabot[bot] | 24bb5216bb | |
dependabot[bot] | 7f8760fd1f | |
Alexandru-Constantin Atomei | 3cb6b9d93a | |
Atomei Alexandru | c591106b86 | |
Alexandru-Constantin Atomei | 9f096737db | |
Alexandru-Constantin Atomei | 1bda165b25 | |
Thayne McCombs | f48372624d | |
Roshan Jossy | 5cd15536b6 | |
Sayan Goswami | aeb4a5fdad | |
Thayne McCombs | 9529f30129 | |
Thayne McCombs | 266311ca33 | |
Tavian Barnes | 954a3900b9 | |
David Peter | 07343b5baf | |
David Peter | a03ed8b300 | |
David Peter | 13a93e5cbe | |
David Peter | d9c4e6239f | |
David Peter | 61ebd9be6a | |
David Peter | e3b40208d5 | |
Tavian Barnes | 16c2d1e1d0 | |
Tavian Barnes | fea1622724 | |
David Peter | 00b64f3ccb | |
Thayne McCombs | 74b850a642 | |
dependabot[bot] | 4202f3939e | |
dependabot[bot] | e1ecba2ce4 | |
dependabot[bot] | 0853e35e1f | |
dependabot[bot] | 4b4a74c988 | |
Tavian Barnes | 84f032eba8 | |
Tavian Barnes | b8a5f95cf2 | |
Tavian Barnes | 73260c0e35 | |
Tavian Barnes | 5903dec289 | |
Tavian Barnes | 571ebb349b | |
Tavian Barnes | d62bbbbcd1 | |
Lena | ad5fb44ddc | |
Tavian Barnes | 8bbbd7679b | |
David Peter | cd32a3827d | |
Tavian Barnes | 66c0637c90 | |
Tavian Barnes | c9df4296f9 | |
Tavian Barnes | 7c5cf28ace | |
Tavian Barnes | 51002c842d | |
Tavian Barnes | 8e582971fa | |
Tavian Barnes | 6daa72f929 | |
dependabot[bot] | 8355d78359 | |
dependabot[bot] | dbc1818073 | |
dependabot[bot] | e57ce7f2a4 | |
dependabot[bot] | d8f89fa59e | |
dependabot[bot] | 350003d8da | |
tkb-github | 15329f9cfa | |
Thayne McCombs | 95b4dff379 | |
Thayne McCombs | c96b1af3be | |
Thayne McCombs | 5ee6365510 | |
Thayne McCombs | 1d57b3a064 | |
Thayne McCombs | 325d419e39 | |
Thayne McCombs | 8b5532d8dd | |
João Marcos P. Bezerra | 7263b5e01d | |
Thayne McCombs | c6fcdbe000 | |
Thayne McCombs | 306dacd0b4 | |
Thayne McCombs | 08910e4e3f | |
Thayne McCombs | 8897659607 | |
Thayne McCombs | 53fd416c47 | |
Thayne McCombs | 5e0018fb1f | |
Thayne McCombs | 054bae01ef | |
David Peter | 8f32a758a4 | |
David Peter | 0fc8facfb7 | |
Thayne McCombs | 069b181625 | |
Thayne McCombs | d9b69c8405 | |
Thayne McCombs | a11f8426d4 | |
Thayne McCombs | e6aa8e82f6 | |
sitiom | 978866d983 | |
Christian Göttsche | 36bc84041b | |
Thayne McCombs | 3ed4ea7538 | |
Thayne McCombs | 6b5fe1c634 | |
Thayne McCombs | 7c39fff969 | |
Thayne McCombs | b922ca18f0 | |
Thayne McCombs | b8e7cbd5e3 | |
Karthik Prakash | 9df9a489f0 | |
Thayne McCombs | fa01a280ed | |
Thayne McCombs | e6b5a4ef9d | |
Thayne McCombs | 19832fcbd3 | |
sonke | d371b10039 | |
skoriop | 8c50bc733d | |
skoriop | 3f9794cd1a | |
Thayne McCombs | fc240f7b2a | |
Eden Mikitas | dea9110b90 | |
Thayne McCombs | 93cdb2628e | |
Thayne McCombs | 817c0bc512 | |
Thayne McCombs | e97dec777c | |
Thayne McCombs | 5f494b0925 | |
Thayne McCombs | 59feb7d6ab | |
dependabot[bot] | 97f5326393 | |
dependabot[bot] | e2a298a84f | |
dependabot[bot] | 3317362e78 | |
dependabot[bot] | 39d0a3ff3c | |
dependabot[bot] | d36c59920d | |
Tavian Barnes | 995d2f5e44 | |
Tavian Barnes | 3884f054f1 | |
Collin Styles | 32504fa3d5 | |
Josh Taylor | afd0efa291 | |
Thayne McCombs | 737b5bc42e | |
dependabot[bot] | 601d2bb13e | |
Thayne McCombs | 917c56b120 | |
Thayne McCombs | 9ffd57f4ef | |
dependabot[bot] | 08a8723ee7 | |
Thayne McCombs | efdba804ac | |
Thayne McCombs | 6f0632273b | |
Thayne McCombs | c848af33d5 | |
dependabot[bot] | f33de6544f | |
dependabot[bot] | d7e5dcf9d2 | |
dependabot[bot] | b38ba68ccc | |
dependabot[bot] | e55907dc8b | |
Thayne McCombs | a248607bee | |
dependabot[bot] | ed23fb9054 | |
Thayne McCombs | 7d357a6cec | |
dependabot[bot] | 1feed8816a | |
Tavian Barnes | 9ce43b2d7b | |
dependabot[bot] | a6a78e1c65 | |
Thayne McCombs | cd14bb8a2c | |
Thayne McCombs | 7162f28a5b | |
dependabot[bot] | 2328e9cd17 | |
dependabot[bot] | 2a6026b25d | |
tkb-github | c62224d2c3 | |
Thayne McCombs | 9a40d21ceb | |
Andrea Frigido | d019b02829 | |
Thayne McCombs | 2f813601aa | |
Thayne McCombs | aae8519a1d | |
Thayne McCombs | 4bfb903b22 | |
dependabot[bot] | d91b2a202e | |
dependabot[bot] | a74a43987a | |
dependabot[bot] | 2a588a0171 | |
Utkarsh Gupta | 3ae04546ea | |
Thayne McCombs | a0370aaf25 | |
Thayne McCombs | 740edeb73f | |
Thayne McCombs | 91e3c3cba5 | |
Thayne McCombs | d6e9cbfff3 | |
dependabot[bot] | 8d3172f987 | |
Thayne McCombs | 5be58f0f76 | |
Thayne McCombs | 8d30d6a4fe | |
Thayne McCombs | 5ff866aa26 | |
dependabot[bot] | 4ecf013527 | |
dependabot[bot] | 1c3a38b423 | |
dependabot[bot] | a3a4912ced | |
Thayne McCombs | 0d32bebcc2 | |
Thayne McCombs | 0884b837b2 | |
dependabot[bot] | 11199079c3 | |
Thayne McCombs | 69521a1057 | |
Thayne McCombs | 59a487b524 | |
dependabot[bot] | 0e2a4bac72 | |
dependabot[bot] | 35aa52538c | |
dependabot[bot] | b680a9de9f | |
Nathan Houghton | 42244e5f32 | |
dependabot[bot] | 072c9e56e1 | |
dependabot[bot] | f7bb60aba5 | |
dependabot[bot] | b019d8f1bf | |
dependabot[bot] | 15c795d2e1 | |
dependabot[bot] | a428f7eb13 | |
dependabot[bot] | 02c9efba28 | |
dependabot[bot] | aebe7537c3 | |
Ryan Caezar Itang | 4356ba3c43 | |
Ryan Caezar Itang | c9afbc5b70 | |
Ryan Caezar Itang | e38e3078ac | |
David Peter | 5439326aa4 | |
Thayne McCombs | 35bc1f95fb | |
Thayne McCombs | 161ee64399 | |
Thayne McCombs | 399bf3a931 | |
Tavian Barnes | bae0a1bfa6 | |
Tavian Barnes | e4bca1033c | |
Thayne McCombs | da40e76aae | |
Thayne McCombs | 31ac4a3f5c | |
Thayne McCombs | 424d6efcc0 | |
Thayne McCombs | ccf8e69650 | |
Thayne McCombs | ee44c1ed90 | |
David Peter | 3ac2e13a25 | |
Thayne McCombs | 06a6a118a1 | |
Thayne McCombs | c095867154 | |
Tavian Barnes | 324005fb3a | |
cyqsimon | d8166907e6 | |
cyqsimon | 7cbfb8e29c | |
dependabot[bot] | 1c5ce0a661 | |
dependabot[bot] | f98496abcd | |
David Peter | 82aa17f9fb | |
David Peter | 9b8457aeb3 | |
dependabot[bot] | 535b34e48a | |
dependabot[bot] | 0909d413d0 | |
Thayne McCombs | 284ee3d0c6 | |
John Purnell | f3e6536d59 | |
Tavian Barnes | 002645d7ac | |
Thayne McCombs | 9f6abded0e | |
Thayne McCombs | 840a565d3a | |
David Peter | 3cf5ac0b9a | |
sitiom | a217823510 | |
Frank_Shek | f867c28a2c | |
David Peter | 73a693ef28 | |
David Peter | 9955e20d01 | |
David Peter | 03052757a7 | |
dependabot[bot] | bdcc24ed04 | |
dependabot[bot] | 8478a2c7eb | |
Thayne McCombs | c34bfa30fe | |
David Peter | af9daff4ee | |
Thayne McCombs | 10ba34f78b | |
Thayne McCombs | 503ede7535 | |
Max 👨🏽💻 Coplan | 08c0d427bf | |
Thayne McCombs | ab7d5eff87 | |
Thayne McCombs | 686318c005 | |
Thayne McCombs | c04ab74744 | |
Thayne McCombs | 8fdfc6c2ef | |
dependabot[bot] | 71393fa1be | |
Thayne McCombs | 5e50825af2 | |
dependabot[bot] | 8fed650de9 | |
dependabot[bot] | 4d8569ad6b | |
dependabot[bot] | 2f0677b556 | |
dependabot[bot] | 0a8a72d4f3 | |
David Peter | de611c8835 | |
Thayne McCombs | a36f2cf61c | |
Thayne McCombs | b6c7ebc4f1 | |
Thayne McCombs | fd707b42c2 | |
Thayne McCombs | 7c86c7d585 | |
Thayne McCombs | 27013537c9 | |
David Peter | addf00cb16 | |
Thayne McCombs | 1964e434e6 | |
Thayne McCombs | d5bca085dd | |
David Peter | 8ecfdfee43 | |
Thayne McCombs | b7a2f68d59 | |
Thayne McCombs | e98a6c6755 | |
David Peter | 614e637dbc | |
Thayne McCombs | 7ec795cd57 | |
Tavian Barnes | 8f510265fc | |
Thayne McCombs | 39d80a59b6 | |
Tavian Barnes | 6e3eb26af3 | |
Thayne McCombs | 4a66d8fcd8 | |
Thayne McCombs | daa986ea35 | |
Thayne McCombs | 0a575763a1 | |
dependabot[bot] | 547d08c1ef | |
Thayne McCombs | bbd66b3240 | |
dependabot[bot] | 2ddc2f6c18 | |
dependabot[bot] | 58a9dde73f | |
Thayne McCombs | d441516c9d | |
Thayne McCombs | d991beb942 | |
Thayne McCombs | 650a511fa4 | |
Thayne McCombs | 2aa966cb3c | |
Ptipiak | cd5fad3cf3 | |
David Peter | c9d3968475 | |
David Peter | 36e60223eb | |
David Peter | 781bd4bcf2 | |
David Peter | 0d9926de40 | |
David Peter | e147ba901b | |
Kasper Gałkowski | 7e26925933 | |
David Peter | 8dda499830 | |
Thayne McCombs | 317a0c3cd3 | |
Thayne McCombs | 67cf524287 | |
David Peter | db2590dca5 | |
Thayne McCombs | 64e642403e | |
David Peter | 38cdeb0413 | |
David Peter | 5c87ff524e | |
Ptipiak | 88bebb8aac | |
Thayne McCombs | d89b5755d9 | |
Miles Liu | a3489674bf | |
Thayne McCombs | f6e74407e8 | |
Thayne McCombs | 41d775bedd | |
David Peter | 45cb15d60f | |
Thayne McCombs | ce4e8675ed | |
Bjoern Hiller | 2fcfe7a5b7 | |
Miles Liu | fdab5da795 | |
Thayne McCombs | 85e3adaf18 | |
Thayne McCombs | 38d406876d | |
Thayne McCombs | 84bf65e023 | |
Thayne McCombs | 2c3e40c9d9 | |
Thayne McCombs | 9e88f91c22 | |
Thayne McCombs | 0773b3067e | |
Thayne McCombs | bba7e0acd8 | |
Dominik Hassler | 6ebc366146 | |
Miles Liu | b389f41cb2 | |
David Peter | 56c405d8e6 | |
David Peter | 99d1db8cb3 | |
David Peter | fbef976b92 | |
Thayne McCombs | 55aae3a8c0 | |
Thayne McCombs | b04cae2ca0 | |
Thayne McCombs | c159ea2042 | |
David Peter | 567ce2640f | |
David Peter | 527fe9e262 | |
David Peter | 0a7b51ad42 | |
David Peter | f15be89bff | |
David Peter | cbd11d8a45 | |
Miles Liu | f52eaa92e4 | |
Miles Liu | f4c6d55b9d | |
Miles Liu | 5e7a870af6 | |
Thayne McCombs | 6b7f8da381 | |
Miles Liu | 8d1c3631cf | |
David Peter | c85929045e | |
David Peter | 715f0a9e44 | |
David Peter | bab77275ec | |
David Peter | 12165c73f8 | |
David Peter | 70c4865191 | |
David Peter | 7f5bc9884f | |
David Peter | ac35b11dc8 | |
David Peter | f8e832f212 | |
David Peter | bc94fcc90f | |
David Peter | b57ed11f65 | |
David Peter | 831fe666fa | |
David Peter | 13a47c3a2c | |
David Peter | 5771e74b95 | |
David Peter | 55029e889e | |
David Peter | ebd48d406e | |
David Peter | 3f72ef4cdd | |
David Peter | 169d22fde2 | |
David Peter | c6f9595a02 | |
Tavian Barnes | 5278405263 | |
Tavian Barnes | 5bb7a52704 | |
Tavian Barnes | 93e5488420 | |
Thayne McCombs | 36ee44a3f6 | |
Thayne McCombs | 8500c3193d | |
Tavian Barnes | cd8ec44abf | |
Tavian Barnes | 17d849df6c | |
Tavian Barnes | b2c8888a50 | |
Tavian Barnes | f0c50befce | |
Tavian Barnes | 7917c00887 | |
Vlad Kooklev | c9f4dec2ed | |
Frieder Bluemle | e9121ee515 | |
Thayne McCombs | 3782278f02 | |
Thayne McCombs | a3622ba294 | |
Thayne McCombs | aec125637b | |
amesgen | cb95f1dcd5 | |
David Peter | 4257034209 | |
Thayne McCombs | b6f0088b68 | |
Thayne McCombs | c2115884de | |
Thayne McCombs | c0b14705cd | |
Thayne McCombs | bf1a6f6680 | |
Thayne McCombs | 10ecb64ff3 | |
Thayne McCombs | aca64c09f8 | |
Thayne McCombs | 86c33492a7 | |
Thayne McCombs | c41d61eeda | |
Thayne McCombs | 5b9e302e9b | |
Thayne McCombs | f4c34b81c7 | |
Thayne McCombs | a50e417c67 | |
Thayne McCombs | ff7336b202 | |
Thayne McCombs | e6f4805bae | |
Thayne McCombs | 066ce41299 | |
Thayne McCombs | b7f5f4ac7d | |
Thayne McCombs | 4e7b403c1f | |
Tavian Barnes | 0984ed91ea | |
Tavian Barnes | 4ffc34956f | |
Tavian Barnes | 5039d2db99 | |
Thayne McCombs | ee2396b57a | |
Thayne McCombs | 5376676d07 | |
dependabot[bot] | 3f33b15e52 | |
dependabot[bot] | b1ee6e3046 | |
dependabot[bot] | 46c38a827e | |
David Peter | 4f1bcfedf3 | |
David Peter | e3e6057434 | |
Scott Baker | 12142008df | |
Kamil Aronowski | 8ccf21437b | |
Tavian Barnes | 4419401b6c | |
Tavian Barnes | e46d402ed1 | |
David Peter | 9ea882d7cc | |
Thayne McCombs | f63c463471 | |
David Peter | bd44c82404 | |
David Peter | 2569317ee8 | |
Scott Baker | cb91a5c848 | |
Scott Baker | fcf8099587 | |
Scott Baker | 97dfd8b428 | |
Scott Baker | c127dc89a5 | |
David Peter | c48586de08 | |
Marie Katrine Ekeberg | d6b1d16bfc | |
Marie Katrine Ekeberg | 90e860c7a5 | |
Marie Katrine Ekeberg | c7370ca82c | |
Tavian Barnes | cbc6ddeefc | |
Thayne McCombs | 45d6f55d3a | |
Thayne McCombs | 6655356ce3 | |
Tom Eichlersmith | 3e68733c94 | |
Tom Eichlersmith | c9db74bfd4 | |
David Peter | ff77a2e29e | |
David Peter | 19f5de51af | |
David Peter | 9dc1b03905 | |
dependabot[bot] | b1296d8557 | |
dependabot[bot] | 1550ec3ffc | |
dependabot[bot] | 448ed6dfe5 | |
dependabot[bot] | a552b57d36 | |
Thayne McCombs | e17c592dec | |
Thayne McCombs | 441c726d47 | |
Thayne McCombs | 9ced5675d2 | |
dependabot[bot] | b984596e3c | |
dependabot[bot] | 9c1b025695 | |
dependabot[bot] | 65837a573e | |
Sijmen | a0062b9a1b | |
Sijmen | 922f127aca | |
Sijmen | cb6295d025 | |
Sijmen | a5f69579cf | |
Sijmen | fdcbb2f008 | |
Tavian Barnes | ac934bd703 | |
Tavian Barnes | 3a4bb4f344 | |
Tavian Barnes | aed3bac71b | |
dependabot[bot] | e76402374f | |
dependabot[bot] | 41237e73a5 | |
dependabot[bot] | bfba33a230 | |
Thayne McCombs | e8615b88f9 | |
dependabot[bot] | b1763ba7db | |
dependabot[bot] | 6219aa3b26 | |
Thayne McCombs | 218d475cb2 | |
dependabot[bot] | 00a1abbf43 | |
dependabot[bot] | f189d99b98 | |
SukkaW | dc1b4ea720 | |
Tavian Barnes | 1b71425419 | |
Tavian Barnes | 791a66893e | |
Tavian Barnes | d298ce15cc | |
dependabot[bot] | 7c32932ada | |
dependabot[bot] | 777b8a031b | |
dependabot[bot] | 4af4c8df44 | |
Thayne McCombs | d6e5ee2f63 | |
allandowney | a263b2e58e | |
Jonathan Goren | f227bb291a | |
Jonathan Goren | b82e48f6da | |
David Peter | c47501ef7c | |
David Peter | 1d2d06ea25 | |
David Peter | 33beb7fc64 | |
David Peter | 8b96a1e99c | |
David Peter | f52dec11a4 | |
David Peter | 941f712975 | |
David Peter | 74d6990b4e | |
David Peter | 42e7e9ec64 | |
Tavian Barnes | 40b368e761 | |
Amir Zolfaghari | 16acdeb6ce | |
Tavian Barnes | 6782c21d52 | |
Ryan Zoeller | d00c8ba0d8 | |
David Peter | de27835264 | |
David Peter | 41affe18c4 | |
David Peter | dcde6d358f | |
David Peter | f57206a3a1 | |
Jackson Theel | f823eac672 | |
Jackson Theel | cbf3f11cf8 | |
Thayne McCombs | 306cd99273 | |
Thayne McCombs | 6fed29eda2 | |
dependabot[bot] | 4fecbe4ede | |
dependabot[bot] | 3ad15721e0 | |
Thayne McCombs | 7ec7c56a01 | |
Thayne McCombs | d3dc18ba65 | |
Thayne McCombs | 8ab6dd3e45 | |
dependabot[bot] | b311b01336 | |
Thayne McCombs | ecfcfe448d | |
dependabot[bot] | aa5c03834f | |
dependabot[bot] | 4269217e28 | |
dependabot[bot] | 4e82809624 | |
Bost | cf4964d0f8 | |
Hang Qian | 3f6ff2ca32 | |
ethsol | bbd8c78861 | |
ethsol | 9d93807512 | |
ethsol | 8a17ca25d6 | |
ethsol | 6faf9ff1e0 | |
Tavian Barnes | 37697f4ad3 | |
Thayne McCombs | 36361c39d2 | |
Thayne McCombs | 4349bfe985 | |
Thayne McCombs | 2f9a858b2f | |
Thayne McCombs | d64fca0a32 | |
dependabot[bot] | 803ea0a0b1 | |
dependabot[bot] | cfa768755f | |
dependabot[bot] | 9dcfa4a662 | |
dependabot[bot] | 7ab29e17a3 | |
dependabot[bot] | 7d2740d6cd | |
Jonathan Goren | 60c14b1af2 | |
Jonathan Goren | 47e30d3d4a | |
Thayne McCombs | 38b84d08d7 | |
Thayne McCombs | 50c0fa812f | |
Thayne McCombs | 3e201de9b0 | |
Max Coplan | db2fd00c4a | |
Thayne McCombs | c577b0838b | |
Thayne McCombs | 5a12a5e421 | |
Thayne McCombs | 9f39f1d75b | |
Thayne McCombs | 9fb0c5d372 | |
Thayne McCombs | e54e352035 | |
Thayne McCombs | f27332ee8d | |
Thayne McCombs | 0aee9b0fd9 | |
Jacob Chapman | 45f490a407 | |
David Peter | 0fd7ec5c2a | |
dependabot[bot] | ba473fc925 | |
Thayne McCombs | 6e5c8d9c20 | |
Thayne McCombs | b29e87ec30 | |
Tavian Barnes | 7db25a7b23 | |
Thayne McCombs | bbdb8b9d9e | |
Thayne McCombs | 6b8056ca86 | |
Thayne McCombs | 5ec55eed96 | |
dependabot[bot] | efedd9c4e0 | |
dependabot[bot] | 4e0b193ab6 | |
dependabot[bot] | 1ac2c38b6b | |
Thayne McCombs | eb111aa835 | |
Thayne McCombs | 25b6ac8b3e | |
Thayne McCombs | cdd771a018 | |
Thayne McCombs | ae861f7e4b | |
dependabot[bot] | 43f683469b | |
dependabot[bot] | 2395e7cac5 | |
dependabot[bot] | abaed6686d | |
dependabot[bot] | 1844ed6b8c | |
David Peter | 9f5ed8534e | |
David Peter | c48d02e0a9 | |
David Peter | b3399c239b | |
David Peter | 4377954cf9 | |
Thayne McCombs | 125cb81a5b | |
Thayne McCombs | 2e9be3e3f5 | |
Thayne McCombs | 43f276e073 | |
David Peter | ec38e23d58 | |
Tavian Barnes | 56060a5b6b | |
Tavian Barnes | 03548a847f | |
Thayne McCombs | 06cd391845 | |
Gabriel Barta | ef4abfc1d2 | |
David Peter | 72895675ef | |
David Peter | ccc6cc54b2 | |
David Peter | aa1c0250b7 | |
Thayne McCombs | d5f740c9d3 | |
David Peter | aa6933d9e3 | |
dependabot[bot] | c800c79976 | |
dependabot[bot] | 075c1af7d8 | |
dependabot[bot] | fbebaa299d | |
dependabot[bot] | f6bbeb35f5 | |
dependabot[bot] | 0dc49b07a0 | |
dependabot[bot] | c2e5075316 | |
Tavian Barnes | e7f192f1c6 | |
Yujia Qiao | e5145ffb98 | |
Ryan Lue | 30add71233 | |
Thayne McCombs | dade9736ac | |
eatradish | 639ed34791 | |
David Peter | da7ea79034 | |
Thayne McCombs | b7e077320d | |
Jonathan Goren | 22dbed0545 | |
Jonathan Goren | 3dc61b5f28 | |
Jonathan Goren | 1153e3e155 | |
Jonathan Goren | a26bd3232c | |
Jonathan Goren | 813a802b2c | |
Tavian Barnes | 81669f4c10 | |
Thayne McCombs | 5ea7cb7a05 | |
Tavian Barnes | 4baefb2f5a | |
Tavian Barnes | 97a8825b00 | |
Tavian Barnes | 7fe4bfaacb | |
dependabot[bot] | 1a6638ba23 | |
dependabot[bot] | 8772708aa7 | |
Tavian Barnes | a4bb734482 | |
Alexander Sieg | 5fbfdfadb8 | |
Tavian Barnes | 16ae03c3b4 | |
David Peter | 52f22557a5 | |
David Peter | 72cc9a876c | |
David Peter | 5d06491370 | |
David Peter | 67aaf82d37 | |
David Peter | f219da4b3a | |
Thayne McCombs | e990a13405 | |
Thayne McCombs | 7b7876e701 | |
Thayne McCombs | feac240eb5 | |
David Peter | fba6db5096 | |
tacoda | f2eb1be678 | |
David Peter | b211ded5de | |
David Peter | 92bd7850d0 | |
David Peter | 06c58b99f5 | |
David Peter | 776f8d0f11 | |
David Peter | 47421a49b8 | |
David Peter | f347379fb0 | |
David Peter | 4d7e34ad59 | |
David Peter | 359c0269ba | |
David Peter | 13cdfb32f8 | |
Shun Sakai | 0484486f3f | |
Jacob Mischka | f32060b0de | |
Tavian Barnes | fe992706ae | |
Tavian Barnes | 2b1bf471b1 | |
David Peter | cab31e280b | |
David Peter | 088e42deef | |
David Peter | ecdaf58e7f | |
David Peter | fc2a972082 | |
Tavian Barnes | 66e3ccc5e1 | |
David Peter | 21fd013073 | |
David Peter | a539181f1f | |
David Peter | cdc6a37ed6 | |
David Peter | 2570fbd04e | |
David Peter | 690976380d | |
David Peter | 828649a30d | |
Tavian Barnes | 20dc7a984d | |
Joseph Lee | 653bc0e55d | |
Tavian Barnes | e8fe1508e2 | |
Thayne McCombs | fd493eb709 | |
Tavian Barnes | f1b39d49c1 | |
Thayne McCombs | 1236b1dbcf | |
Devon Hollowood | 17dd2a6dfe | |
Jonah Caplan | bf9e6fd36e | |
Jonah Caplan | 46db1c4ef3 | |
Jonah Caplan | b9cb5d54a4 | |
Jonah Caplan | a41021b0d2 | |
Jonah Caplan | 82f04755bf | |
Jonah Caplan | c42bf4b904 | |
Jonah Caplan | fbc836b553 | |
Jonah Caplan | 91860bf682 | |
Jonah Caplan | a4a4709320 | |
Jonah Caplan | b6f8bc8ff8 | |
Jonah Caplan | 782fc278aa | |
Jonah Caplan | 12650a0913 | |
Jonah Caplan | 2e115df9e7 | |
Jonah Caplan | 953f586f26 | |
Jonah Caplan | 08d913f167 | |
Jonah Caplan | 6b92c5db90 | |
Tavian Barnes | 7b5b3ec47b | |
Niklas Mohrin | 02e9850112 | |
Niklas Mohrin | b8c575cc8f | |
Niklas Mohrin | 3de948ae0d | |
Niklas Mohrin | 45a86459b2 | |
Niklas Mohrin | a64a607fd8 | |
Thayne McCombs | 00eb6461cb | |
dependabot[bot] | feb969881b | |
dependabot[bot] | c06efe1317 | |
Tavian Barnes | 04fac46c5c | |
Tavian Barnes | e0d03d170e | |
Frieder Bluemle | fd1c3d376e | |
exploide | c3f786db43 | |
Niklas Mohrin | a5f17db53a | |
Tavian Barnes | f1a5aa0887 | |
Thayne McCombs | 3c619afe30 | |
Tavian Barnes | 476d404938 | |
William Correia | 43f5c8adc9 | |
Vukašin Stepanović | c749c95136 | |
Vukašin Stepanović | 3ebd78cf02 | |
Vukašin Stepanović | 8da936abd8 | |
Vukašin Stepanović | 668af05dca | |
David Peter | 3ba90dd768 | |
David Peter | 87caef9513 | |
Thayne McCombs | 78e21395b8 | |
Thayne McCombs | 1b52948e42 | |
Asha20 | 334488cab7 | |
a1346054 | 1c72f80ff5 | |
a1346054 | 5b4869a940 | |
Thayne McCombs | 9bc8fabba0 | |
Thayne McCombs | 8984e57dd6 | |
Thayne McCombs | 049232439a | |
Thayne McCombs | e4730603d0 | |
Max Triano | 154cd6e329 | |
Max Triano | 394e967b5f | |
Max Triano | 0f0fa76c81 | |
Thayne McCombs | 1795e0e9dd | |
Thayne McCombs | 500f404c4f | |
Klim Tsoutsman | 6a18b36037 | |
Thayne McCombs | 115ae93df9 | |
David Peter | c06c9952b6 | |
Peter Newman | 42dce35dd7 | |
Peter Newman | c2e4be3d12 | |
Vukašin Stepanović | 37852aa388 | |
Sean Joseph | 25d7c4801f | |
Sean Joseph | d34e18a58c | |
Jacob Mischka | ba60a163fe | |
himself65 | 4e04d13215 | |
David Peter | c37592b0b7 | |
David Peter | 224b7f2354 | |
William Correia | f8ae334ca9 | |
David Peter | b5344dac30 | |
David Peter | 018556a367 | |
David Peter | aeff525c30 | |
David Peter | 2d398dc4a7 | |
David Peter | 6e44828cc8 | |
David Peter | b507449146 | |
David Peter | d9697d1486 | |
David Peter | 515e0ee469 | |
David Peter | a0ca460901 | |
Kaleb Olson | 693325bcf2 | |
dependabot[bot] | ab81cad3b5 | |
dependabot[bot] | 70850629e9 | |
dependabot[bot] | 8b0db189a1 | |
dependabot[bot] | ad1b267601 | |
dependabot[bot] | 04829c287d | |
dependabot[bot] | ca92bea0d7 | |
dependabot[bot] | 252beb8df7 | |
dependabot[bot] | b80764b19a | |
Daniel Bast | 148fa64a73 | |
Daniel Bast | 6ce55624f7 | |
David Peter | 708efaa812 | |
David Peter | a7a4499d1d | |
David Peter | 2a2126c40a | |
David Peter | db30bfc2b7 | |
David Peter | 81a051f07c | |
David Peter | 95d6470956 | |
David Peter | e59a2001cc | |
David Peter | c60338a28b | |
David Peter | 16464a35b1 | |
David Peter | 1d85e31260 | |
Thayne McCombs | 024309f378 | |
Luca Poldelmengo | ac4ded5b27 | |
Sudeshna Sur | c9873b4b82 | |
Kaspar | 07ebce9419 | |
Delapouite | f60b7687a2 | |
Andrew Mitchell | cb385a4822 | |
David Peter | efc71bc00b | |
Allen Wild | 1a3615df9c | |
David Peter | cf7dd43f80 | |
David Peter | 4ebb3bbc81 | |
David Peter | d283aba52e | |
David Peter | 5423c45660 | |
David Peter | 98738d2ac1 | |
David Peter | 371794047a | |
David Peter | c43c931985 | |
David Peter | 41dc622fb5 | |
David Peter | 266ba7893a | |
David Peter | 194dbe2033 | |
David Peter | 97f1558daf | |
David Peter | f00af363d1 | |
David Peter | 2155d19be0 | |
David Peter | 3e728f3edd | |
David Peter | 6645131234 | |
Travis Stewart | 189d6a907a | |
David Peter | 8a83ad41c8 | |
Chinmay | 5feb89aa99 | |
Allen Wild | 8a532e5196 | |
johnwdjiang | b8089f8d58 | |
David Peter | 85fb7389af | |
Archer Stéphane | eb28030f87 | |
Archer Stéphane | 5d2e011e90 | |
Archer Stéphane | 64bde11887 | |
Richard Taityr | b928af7d9c | |
Chathika Weerasuriya | 9bc20e38b1 | |
sharkdp | 6a3aae52fd | |
Travis Stewart | 7dc81381b0 | |
sharkdp | c7cf2d1104 | |
sharkdp | 91eaff056f | |
sharkdp | cadaef3f07 | |
sharkdp | 17bd256ae6 | |
sharkdp | ecfd8cea41 | |
sharkdp | d2659de782 | |
sharkdp | d205a7ff9e | |
sharkdp | 296300ce46 | |
sharkdp | ea48a96945 | |
sharkdp | f2fec45099 | |
sharkdp | 4faa201c89 | |
sharkdp | 54603501ec | |
sharkdp | 45c66e7f1b | |
xanonid | 441a235a0f | |
xanonid | 2cd3de8b94 | |
Marco Sirabella | 6830b3dee9 | |
Platon Pronko | f3edaf0e4d | |
Platon Pronko | e97a1e90cb | |
Andreas Lappe | 2338b3bc81 | |
sharkdp | 9412648ee9 | |
sharkdp | 0a22dcc044 | |
Nick Flueckiger | 24402dda7c | |
Nicholas Bailey | 4f8697c407 | |
sharkdp | b2fa188029 | |
David Peter | e0adb45d08 | |
Matthias Reitinger | ec4cc981fc | |
Amrit Rathie | 06eb231fbd | |
Amrit Rathie | fd1ddc9d08 | |
Matthias Reitinger | e852009876 | |
Allen Wild | 99934ab98c | |
Craig Hills | 633d486ac1 | |
Craig Hills | 4d2472bfab | |
Fredrik Fornwall | a851570b15 | |
sharkdp | f064e41bf1 | |
Sebastián Mancilla | ef1bfc7508 | |
Matthias Reitinger | a38da63a34 | |
Matthias Reitinger | 9a49d0d6d0 | |
gorogoroumaru | 74c3431a2b | |
yuuji.yaginuma | 1b4e1e9c1a | |
David Peter | 6f2c8cdf91 | |
David Peter | e2f90dba44 | |
sharkdp | 5648597a61 | |
sharkdp | b3258e00df | |
sharkdp | 41d7cc33ca | |
sharkdp | a81fef9992 | |
sharkdp | 53c338d71f | |
Dmitry Marakasov | d77daa89a2 |
|
@ -0,0 +1 @@
|
|||
github: [sharkdp, tavianator]
|
|
@ -1,23 +0,0 @@
|
|||
---
|
||||
name: Bug Report
|
||||
about: Report a bug.
|
||||
title: ""
|
||||
labels: bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Describe the bug you encountered:**
|
||||
|
||||
|
||||
**Describe what you expected to happen:**
|
||||
|
||||
|
||||
**What version of `fd` are you using?**
|
||||
<!-- paste the output of `fd --version` here -->
|
||||
|
||||
**Which operating system / distribution are you on?**
|
||||
<!--
|
||||
Unix: paste the output of `uname -srm` and `lsb_release -a` here.
|
||||
Windows: please tell us your Windows version
|
||||
-->
|
|
@ -0,0 +1,42 @@
|
|||
name: Bug Report
|
||||
description: Report a bug.
|
||||
title: "[BUG] "
|
||||
labels: bug
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Please check out the [troubleshooting section](https://github.com/sharkdp/fd#troubleshooting) first.
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Checks
|
||||
options:
|
||||
- label: I have read the troubleshooting section and still think this is a bug.
|
||||
required: true
|
||||
- type: textarea
|
||||
id: bug
|
||||
attributes:
|
||||
label: "Describe the bug you encountered:"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: expected
|
||||
attributes:
|
||||
label: "Describe what you expected to happen:"
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: "What version of `fd` are you using?"
|
||||
placeholder: "paste the output of `fd --version` here"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: os
|
||||
attributes:
|
||||
label: Which operating system / distribution are you on?
|
||||
placeholder: |
|
||||
Unix: paste the output of `uname -srm` and `lsb_release -a` here.
|
||||
Windows: please tell us your Windows version
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
|
@ -1,2 +1 @@
|
|||
blank_issues_enabled: true
|
||||
|
||||
|
|
|
@ -6,5 +6,3 @@ labels: feature-request
|
|||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,10 @@
|
|||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "cargo"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "monthly"
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
|
@ -0,0 +1,364 @@
|
|||
name: CICD
|
||||
|
||||
env:
|
||||
CICD_INTERMEDIATES_DIR: "_cicd-intermediates"
|
||||
MSRV_FEATURES: "--all-features"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
jobs:
|
||||
crate_metadata:
|
||||
name: Extract crate metadata
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Extract crate information
|
||||
id: crate_metadata
|
||||
run: |
|
||||
echo "name=fd" | tee -a $GITHUB_OUTPUT
|
||||
cargo metadata --no-deps --format-version 1 | jq -r '"version=" + .packages[0].version' | tee -a $GITHUB_OUTPUT
|
||||
cargo metadata --no-deps --format-version 1 | jq -r '"maintainer=" + .packages[0].authors[0]' | tee -a $GITHUB_OUTPUT
|
||||
cargo metadata --no-deps --format-version 1 | jq -r '"homepage=" + .packages[0].homepage' | tee -a $GITHUB_OUTPUT
|
||||
cargo metadata --no-deps --format-version 1 | jq -r '"msrv=" + .packages[0].rust_version' | tee -a $GITHUB_OUTPUT
|
||||
outputs:
|
||||
name: ${{ steps.crate_metadata.outputs.name }}
|
||||
version: ${{ steps.crate_metadata.outputs.version }}
|
||||
maintainer: ${{ steps.crate_metadata.outputs.maintainer }}
|
||||
homepage: ${{ steps.crate_metadata.outputs.homepage }}
|
||||
msrv: ${{ steps.crate_metadata.outputs.msrv }}
|
||||
|
||||
ensure_cargo_fmt:
|
||||
name: Ensure 'cargo fmt' has been run
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
components: rustfmt
|
||||
- uses: actions/checkout@v4
|
||||
- run: cargo fmt -- --check
|
||||
|
||||
lint_check:
|
||||
name: Ensure 'cargo clippy' has no warnings
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
components: clippy
|
||||
- uses: actions/checkout@v4
|
||||
- run: cargo clippy --all-targets --all-features -- -Dwarnings
|
||||
|
||||
min_version:
|
||||
name: Minimum supported rust version
|
||||
runs-on: ubuntu-20.04
|
||||
needs: crate_metadata
|
||||
steps:
|
||||
- name: Checkout source code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install rust toolchain (v${{ needs.crate_metadata.outputs.msrv }})
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: ${{ needs.crate_metadata.outputs.msrv }}
|
||||
components: clippy
|
||||
- name: Run clippy (on minimum supported rust version to prevent warnings we can't fix)
|
||||
run: cargo clippy --locked --all-targets ${{ env.MSRV_FEATURES }}
|
||||
- name: Run tests
|
||||
run: cargo test --locked ${{ env.MSRV_FEATURES }}
|
||||
|
||||
build:
|
||||
name: ${{ matrix.job.target }} (${{ matrix.job.os }})
|
||||
runs-on: ${{ matrix.job.os }}
|
||||
needs: crate_metadata
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
job:
|
||||
- { target: aarch64-unknown-linux-gnu , os: ubuntu-20.04, use-cross: true }
|
||||
- { target: aarch64-unknown-linux-musl , os: ubuntu-20.04, use-cross: true }
|
||||
- { target: arm-unknown-linux-gnueabihf , os: ubuntu-20.04, use-cross: true }
|
||||
- { target: arm-unknown-linux-musleabihf, os: ubuntu-20.04, use-cross: true }
|
||||
- { target: i686-pc-windows-msvc , os: windows-2019 }
|
||||
- { target: i686-unknown-linux-gnu , os: ubuntu-20.04, use-cross: true }
|
||||
- { target: i686-unknown-linux-musl , os: ubuntu-20.04, use-cross: true }
|
||||
- { target: x86_64-apple-darwin , os: macos-12 }
|
||||
- { target: aarch64-apple-darwin , os: macos-14 }
|
||||
- { target: x86_64-pc-windows-gnu , os: windows-2019 }
|
||||
- { target: x86_64-pc-windows-msvc , os: windows-2019 }
|
||||
- { target: x86_64-unknown-linux-gnu , os: ubuntu-20.04, use-cross: true }
|
||||
- { target: x86_64-unknown-linux-musl , os: ubuntu-20.04, use-cross: true }
|
||||
env:
|
||||
BUILD_CMD: cargo
|
||||
steps:
|
||||
- name: Checkout source code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install prerequisites
|
||||
shell: bash
|
||||
run: |
|
||||
case ${{ matrix.job.target }} in
|
||||
arm-unknown-linux-*) sudo apt-get -y update ; sudo apt-get -y install gcc-arm-linux-gnueabihf ;;
|
||||
aarch64-unknown-linux-gnu) sudo apt-get -y update ; sudo apt-get -y install gcc-aarch64-linux-gnu ;;
|
||||
esac
|
||||
|
||||
- name: Install Rust toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
targets: ${{ matrix.job.target }}
|
||||
|
||||
- name: Install cross
|
||||
if: matrix.job.use-cross
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cross
|
||||
|
||||
- name: Overwrite build command env variable
|
||||
if: matrix.job.use-cross
|
||||
shell: bash
|
||||
run: echo "BUILD_CMD=cross" >> $GITHUB_ENV
|
||||
|
||||
- name: Show version information (Rust, cargo, GCC)
|
||||
shell: bash
|
||||
run: |
|
||||
gcc --version || true
|
||||
rustup -V
|
||||
rustup toolchain list
|
||||
rustup default
|
||||
cargo -V
|
||||
rustc -V
|
||||
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: $BUILD_CMD build --locked --release --target=${{ matrix.job.target }}
|
||||
|
||||
- name: Set binary name & path
|
||||
id: bin
|
||||
shell: bash
|
||||
run: |
|
||||
# Figure out suffix of binary
|
||||
EXE_suffix=""
|
||||
case ${{ matrix.job.target }} in
|
||||
*-pc-windows-*) EXE_suffix=".exe" ;;
|
||||
esac;
|
||||
|
||||
# Setup paths
|
||||
BIN_NAME="${{ needs.crate_metadata.outputs.name }}${EXE_suffix}"
|
||||
BIN_PATH="target/${{ matrix.job.target }}/release/${BIN_NAME}"
|
||||
|
||||
# Let subsequent steps know where to find the binary
|
||||
echo "BIN_PATH=${BIN_PATH}" >> $GITHUB_OUTPUT
|
||||
echo "BIN_NAME=${BIN_NAME}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set testing options
|
||||
id: test-options
|
||||
shell: bash
|
||||
run: |
|
||||
# test only library unit tests and binary for arm-type targets
|
||||
unset CARGO_TEST_OPTIONS
|
||||
unset CARGO_TEST_OPTIONS ; case ${{ matrix.job.target }} in arm-* | aarch64-*) CARGO_TEST_OPTIONS="--bin ${{ needs.crate_metadata.outputs.name }}" ;; esac;
|
||||
echo "CARGO_TEST_OPTIONS=${CARGO_TEST_OPTIONS}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Run tests
|
||||
shell: bash
|
||||
run: $BUILD_CMD test --locked --target=${{ matrix.job.target }} ${{ steps.test-options.outputs.CARGO_TEST_OPTIONS}}
|
||||
|
||||
- name: Generate completions
|
||||
id: completions
|
||||
shell: bash
|
||||
run: make completions
|
||||
|
||||
- name: Create tarball
|
||||
id: package
|
||||
shell: bash
|
||||
run: |
|
||||
PKG_suffix=".tar.gz" ; case ${{ matrix.job.target }} in *-pc-windows-*) PKG_suffix=".zip" ;; esac;
|
||||
PKG_BASENAME=${{ needs.crate_metadata.outputs.name }}-v${{ needs.crate_metadata.outputs.version }}-${{ matrix.job.target }}
|
||||
PKG_NAME=${PKG_BASENAME}${PKG_suffix}
|
||||
echo "PKG_NAME=${PKG_NAME}" >> $GITHUB_OUTPUT
|
||||
|
||||
PKG_STAGING="${{ env.CICD_INTERMEDIATES_DIR }}/package"
|
||||
ARCHIVE_DIR="${PKG_STAGING}/${PKG_BASENAME}/"
|
||||
mkdir -p "${ARCHIVE_DIR}"
|
||||
|
||||
# Binary
|
||||
cp "${{ steps.bin.outputs.BIN_PATH }}" "$ARCHIVE_DIR"
|
||||
|
||||
# README, LICENSE and CHANGELOG files
|
||||
cp "README.md" "LICENSE-MIT" "LICENSE-APACHE" "CHANGELOG.md" "$ARCHIVE_DIR"
|
||||
|
||||
# Man page
|
||||
cp 'doc/${{ needs.crate_metadata.outputs.name }}.1' "$ARCHIVE_DIR"
|
||||
|
||||
# Autocompletion files
|
||||
cp -r autocomplete "${ARCHIVE_DIR}"
|
||||
|
||||
# base compressed package
|
||||
pushd "${PKG_STAGING}/" >/dev/null
|
||||
case ${{ matrix.job.target }} in
|
||||
*-pc-windows-*) 7z -y a "${PKG_NAME}" "${PKG_BASENAME}"/* | tail -2 ;;
|
||||
*) tar czf "${PKG_NAME}" "${PKG_BASENAME}"/* ;;
|
||||
esac;
|
||||
popd >/dev/null
|
||||
|
||||
# Let subsequent steps know where to find the compressed package
|
||||
echo "PKG_PATH=${PKG_STAGING}/${PKG_NAME}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Create Debian package
|
||||
id: debian-package
|
||||
shell: bash
|
||||
if: startsWith(matrix.job.os, 'ubuntu')
|
||||
run: |
|
||||
COPYRIGHT_YEARS="2018 - "$(date "+%Y")
|
||||
DPKG_STAGING="${{ env.CICD_INTERMEDIATES_DIR }}/debian-package"
|
||||
DPKG_DIR="${DPKG_STAGING}/dpkg"
|
||||
mkdir -p "${DPKG_DIR}"
|
||||
|
||||
DPKG_BASENAME=${{ needs.crate_metadata.outputs.name }}
|
||||
DPKG_CONFLICTS=${{ needs.crate_metadata.outputs.name }}-musl
|
||||
case ${{ matrix.job.target }} in *-musl*) DPKG_BASENAME=${{ needs.crate_metadata.outputs.name }}-musl ; DPKG_CONFLICTS=${{ needs.crate_metadata.outputs.name }} ;; esac;
|
||||
DPKG_VERSION=${{ needs.crate_metadata.outputs.version }}
|
||||
|
||||
unset DPKG_ARCH
|
||||
case ${{ matrix.job.target }} in
|
||||
aarch64-*-linux-*) DPKG_ARCH=arm64 ;;
|
||||
arm-*-linux-*hf) DPKG_ARCH=armhf ;;
|
||||
i686-*-linux-*) DPKG_ARCH=i686 ;;
|
||||
x86_64-*-linux-*) DPKG_ARCH=amd64 ;;
|
||||
*) DPKG_ARCH=notset ;;
|
||||
esac;
|
||||
|
||||
DPKG_NAME="${DPKG_BASENAME}_${DPKG_VERSION}_${DPKG_ARCH}.deb"
|
||||
echo "DPKG_NAME=${DPKG_NAME}" >> $GITHUB_OUTPUT
|
||||
|
||||
# Binary
|
||||
install -Dm755 "${{ steps.bin.outputs.BIN_PATH }}" "${DPKG_DIR}/usr/bin/${{ steps.bin.outputs.BIN_NAME }}"
|
||||
|
||||
# Man page
|
||||
install -Dm644 'doc/${{ needs.crate_metadata.outputs.name }}.1' "${DPKG_DIR}/usr/share/man/man1/${{ needs.crate_metadata.outputs.name }}.1"
|
||||
gzip -n --best "${DPKG_DIR}/usr/share/man/man1/${{ needs.crate_metadata.outputs.name }}.1"
|
||||
|
||||
# Autocompletion files
|
||||
install -Dm644 'autocomplete/fd.bash' "${DPKG_DIR}/usr/share/bash-completion/completions/${{ needs.crate_metadata.outputs.name }}"
|
||||
install -Dm644 'autocomplete/fd.fish' "${DPKG_DIR}/usr/share/fish/vendor_completions.d/${{ needs.crate_metadata.outputs.name }}.fish"
|
||||
install -Dm644 'autocomplete/_fd' "${DPKG_DIR}/usr/share/zsh/vendor-completions/_${{ needs.crate_metadata.outputs.name }}"
|
||||
|
||||
# README and LICENSE
|
||||
install -Dm644 "README.md" "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/README.md"
|
||||
install -Dm644 "LICENSE-MIT" "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/LICENSE-MIT"
|
||||
install -Dm644 "LICENSE-APACHE" "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/LICENSE-APACHE"
|
||||
install -Dm644 "CHANGELOG.md" "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/changelog"
|
||||
gzip -n --best "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/changelog"
|
||||
|
||||
cat > "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/copyright" <<EOF
|
||||
Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Upstream-Name: ${{ needs.crate_metadata.outputs.name }}
|
||||
Source: ${{ needs.crate_metadata.outputs.homepage }}
|
||||
|
||||
Files: *
|
||||
Copyright: ${{ needs.crate_metadata.outputs.maintainer }}
|
||||
Copyright: $COPYRIGHT_YEARS ${{ needs.crate_metadata.outputs.maintainer }}
|
||||
License: Apache-2.0 or MIT
|
||||
|
||||
License: Apache-2.0
|
||||
On Debian systems, the complete text of the Apache-2.0 can be found in the
|
||||
file /usr/share/common-licenses/Apache-2.0.
|
||||
|
||||
License: MIT
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal in the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
.
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions
|
||||
of the Software.
|
||||
.
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
||||
EOF
|
||||
chmod 644 "${DPKG_DIR}/usr/share/doc/${DPKG_BASENAME}/copyright"
|
||||
|
||||
# control file
|
||||
mkdir -p "${DPKG_DIR}/DEBIAN"
|
||||
cat > "${DPKG_DIR}/DEBIAN/control" <<EOF
|
||||
Package: ${DPKG_BASENAME}
|
||||
Version: ${DPKG_VERSION}
|
||||
Section: utils
|
||||
Priority: optional
|
||||
Maintainer: ${{ needs.crate_metadata.outputs.maintainer }}
|
||||
Homepage: ${{ needs.crate_metadata.outputs.homepage }}
|
||||
Architecture: ${DPKG_ARCH}
|
||||
Provides: ${{ needs.crate_metadata.outputs.name }}
|
||||
Conflicts: ${DPKG_CONFLICTS}
|
||||
Description: simple, fast and user-friendly alternative to find
|
||||
fd is a program to find entries in your filesystem.
|
||||
It is a simple, fast and user-friendly alternative to find.
|
||||
While it does not aim to support all of finds powerful functionality, it provides
|
||||
sensible (opinionated) defaults for a majority of use cases.
|
||||
EOF
|
||||
|
||||
DPKG_PATH="${DPKG_STAGING}/${DPKG_NAME}"
|
||||
echo "DPKG_PATH=${DPKG_PATH}" >> $GITHUB_OUTPUT
|
||||
|
||||
# build dpkg
|
||||
fakeroot dpkg-deb --build "${DPKG_DIR}" "${DPKG_PATH}"
|
||||
|
||||
- name: "Artifact upload: tarball"
|
||||
uses: actions/upload-artifact@master
|
||||
with:
|
||||
name: ${{ steps.package.outputs.PKG_NAME }}
|
||||
path: ${{ steps.package.outputs.PKG_PATH }}
|
||||
|
||||
- name: "Artifact upload: Debian package"
|
||||
uses: actions/upload-artifact@master
|
||||
if: steps.debian-package.outputs.DPKG_NAME
|
||||
with:
|
||||
name: ${{ steps.debian-package.outputs.DPKG_NAME }}
|
||||
path: ${{ steps.debian-package.outputs.DPKG_PATH }}
|
||||
|
||||
- name: Check for release
|
||||
id: is-release
|
||||
shell: bash
|
||||
run: |
|
||||
unset IS_RELEASE ; if [[ $GITHUB_REF =~ ^refs/tags/v[0-9].* ]]; then IS_RELEASE='true' ; fi
|
||||
echo "IS_RELEASE=${IS_RELEASE}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Publish archives and packages
|
||||
uses: softprops/action-gh-release@v2
|
||||
if: steps.is-release.outputs.IS_RELEASE
|
||||
with:
|
||||
files: |
|
||||
${{ steps.package.outputs.PKG_PATH }}
|
||||
${{ steps.debian-package.outputs.DPKG_PATH }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
winget:
|
||||
name: Publish to Winget
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
steps:
|
||||
- uses: vedantmgoyal2009/winget-releaser@v2
|
||||
with:
|
||||
identifier: sharkdp.fd
|
||||
installers-regex: '-pc-windows-msvc\.zip$'
|
||||
token: ${{ secrets.WINGET_TOKEN }}
|
|
@ -1,2 +1,3 @@
|
|||
target/
|
||||
/autocomplete/
|
||||
**/*.rs.bk
|
||||
|
|
106
.travis.yml
106
.travis.yml
|
@ -1,106 +0,0 @@
|
|||
language: rust
|
||||
matrix:
|
||||
include:
|
||||
# Stable channel.
|
||||
- os: linux
|
||||
rust: stable
|
||||
env: TARGET=x86_64-unknown-linux-gnu
|
||||
- os: linux
|
||||
rust: stable
|
||||
env: TARGET=x86_64-unknown-linux-musl
|
||||
- os: linux
|
||||
rust: stable
|
||||
env: TARGET=i686-unknown-linux-gnu
|
||||
- os: linux
|
||||
rust: stable
|
||||
env: TARGET=i686-unknown-linux-musl
|
||||
- os: osx
|
||||
osx_image: xcode11.4 # Catalina
|
||||
rust: stable
|
||||
env: TARGET=x86_64-apple-darwin
|
||||
- os: linux
|
||||
rust: stable
|
||||
env:
|
||||
- TARGET=arm-unknown-linux-gnueabihf
|
||||
- CC_arm_unknown_linux_gnueabihf=/usr/bin/arm-linux-gnueabihf-gcc-4.8
|
||||
- CARGO_TARGET_ARM_UNKNOWN_LINUX_GNUEABIHF_LINKER=arm-linux-gnueabihf-gcc-4.8
|
||||
- os: linux
|
||||
rust: stable
|
||||
env:
|
||||
- TARGET=arm-unknown-linux-musleabihf
|
||||
- CARGO_TARGET_ARM_UNKNOWN_LINUX_MUSLEABIHF_LINKER=arm-linux-gnueabihf-gcc-4.8
|
||||
|
||||
# Beta channel.
|
||||
- os: linux
|
||||
rust: beta
|
||||
env: TARGET=x86_64-unknown-linux-gnu
|
||||
|
||||
# Minimum Rust supported channel.
|
||||
- os: linux
|
||||
rust: 1.36.0
|
||||
env: TARGET=x86_64-unknown-linux-gnu
|
||||
- os: linux
|
||||
rust: 1.36.0
|
||||
env: TARGET=x86_64-unknown-linux-musl
|
||||
- os: linux
|
||||
rust: 1.36.0
|
||||
env: TARGET=i686-unknown-linux-gnu
|
||||
- os: linux
|
||||
rust: 1.36.0
|
||||
env: TARGET=i686-unknown-linux-musl
|
||||
- os: osx
|
||||
rust: 1.36.0
|
||||
env: TARGET=x86_64-apple-darwin
|
||||
|
||||
sudo: required
|
||||
|
||||
before_install:
|
||||
- ci/before_install.bash
|
||||
|
||||
env:
|
||||
global:
|
||||
# Default target on travis-ci.
|
||||
# Used as conditional check in the install stage
|
||||
- HOST=x86_64-unknown-linux-gnu
|
||||
# Used on the deployment script
|
||||
- PROJECT_NAME=fd
|
||||
|
||||
install:
|
||||
# prevent target re-add error from rustup
|
||||
- if [[ $TRAVIS_OS_NAME = linux && $HOST != $TARGET ]]; then rustup target add $TARGET; fi
|
||||
|
||||
script:
|
||||
- ci/script.bash
|
||||
|
||||
before_deploy:
|
||||
- bash ci/before_deploy.bash
|
||||
|
||||
deploy:
|
||||
provider: releases
|
||||
# NOTE updating the `api_key.secure`
|
||||
# - go to: https://github.com/settings/tokens/new
|
||||
# - generate new token using `public_repo` scope
|
||||
# - encrypt it using: `travis encrypt API_KEY_HERE`
|
||||
# - paste the output below
|
||||
api_key:
|
||||
secure: "RyFdh2lpDmaNhPar7ezsb18Xz+6XFM40y7cZCDRML+Sk+eYK1xtDNfEhDRJU5Qo1ReVsByds/QJTSXr2KmZPk3lXwG3SiN7UtrLUxCxFr6qrcM/iujlKTf5UxeRklkzPXxnH95DEyEgxvgbVhWTGVDWoyMnrVQXZKDy6z1iAiYB5h2Zl1rs+MRb/Enlt5q6XIKAlG0ppGtl8CfYudq5ZiqfJaMWTt9SWm2YskC8FeMc0S3IM6/EhTvaNYLdaarFqVWQEVql+6oCuL3ayPzmGyxLdxM37tIMNQ0f97zxqWodacXTG5ULdRD8if1l/SmTujrtjbZ0KWRjsjOq4vBtxBJKGdprcSiB0xH/hToqqtTSO0z5FPXi5cB8UlK6YLDDHcP3kXNer8CYMLI1VPaUDLTF57/0/RPi2DZiiGfZsIAS6PsICbHdTQVzxQckM4lN1vnAGgkhXIMbztml21pv+QrGy98OZJ0ubf5ztgQhpT0WPH4JXT8M6htsoo8dZf8lQ5aLfmW9RKePJDqixQwPqmimPIkrlxRDTDGII0ZAZws7l779eOLmEcM2tH2HbsUKUCZIG/pRHLSlP45Jn2bULGzuXZ2daq70z6zvIbom0CUzSXIvdTXEZI2AM5RBvPYGGaKI8YlxgRdQvJp3h0BzPdFOXI3RAxscCY7PJpa/RdIg="
|
||||
# for uploading multiple files
|
||||
file_glob: true
|
||||
# NOTE explanation on each env variable
|
||||
# - PROJECT_NAME: name of the project, set on the `env.global` above
|
||||
# - TRAVIS_TAG: tag name that the build is being deployed for, usually the version number
|
||||
# - TARGET: target triple of the build
|
||||
file:
|
||||
- $PROJECT_NAME-$TRAVIS_TAG-$TARGET.*
|
||||
- $PROJECT_NAME*.deb
|
||||
# don't delete artifacts from previous stage
|
||||
skip_cleanup: true
|
||||
on:
|
||||
# deploy only if we push a tag
|
||||
tags: true
|
||||
# deploy only on stable channel that has TARGET env variable sets
|
||||
condition: $TRAVIS_RUST_VERSION = stable && $TARGET != ""
|
||||
|
||||
notifications:
|
||||
email:
|
||||
on_success: never
|
325
CHANGELOG.md
325
CHANGELOG.md
|
@ -1,10 +1,305 @@
|
|||
# Upcoming release
|
||||
# 10.1.0
|
||||
|
||||
## Features
|
||||
|
||||
- Allow passing an optional argument to `--strip-cwd-prefix` of "always", "never", or "auto". to force whether the cwd prefix is stripped or not.
|
||||
- Add a `--format` option which allows using a format template for direct ouput similar to the template used for `--exec`. (#1043)
|
||||
|
||||
## Bugfixes
|
||||
## Changes
|
||||
- Fix aarch64 page size again. This time it should actually work. (#1085, #1549) (@tavianator)
|
||||
|
||||
|
||||
## Other
|
||||
|
||||
- aarch64-apple-darwin target added to builds on the release page. Note that this is a tier 2 rust target.
|
||||
|
||||
# v10.0.0
|
||||
|
||||
## Features
|
||||
|
||||
- Add `dir` as an alias to `directory` when using `-t` \ `--type`, see #1460 and #1464 (@Ato2207).
|
||||
- Add support for @%s date format in time filters similar to GNU date (seconds since Unix epoch for --older/--newer), see #1493 (@nabellows)
|
||||
- Breaking: No longer automatically ignore `.git` when using `--hidden` with vcs ignore enabled. This reverts the change in v9.0.0. While this feature
|
||||
was often useful, it also broke some existing workflows, and there wasn't a good way to opt out of it. And there isn't really a good way for us to add
|
||||
a way to opt out of it. And you can easily get similar behavior by adding `.git/` to your global fdignore file.
|
||||
See #1457.
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Respect NO_COLOR environment variable with `--list-details` option. (#1455)
|
||||
- Fix bug that would cause hidden files to be included despite gitignore rules
|
||||
if search path is "." (#1461, BurntSushi/ripgrep#2711).
|
||||
- aarch64 builds now use 64k page sizes with jemalloc. This fixes issues on some systems, such as ARM Macs that
|
||||
have a larger system page size than the system that the binary was built on. (#1547)
|
||||
- Address [CVE-2024-24576](https://blog.rust-lang.org/2024/04/09/cve-2024-24576.html), by increasing minimum rust version.
|
||||
|
||||
|
||||
## Changes
|
||||
- Minimum supported rust version is now 1.77.2
|
||||
|
||||
|
||||
# v9.0.0
|
||||
|
||||
## Performance
|
||||
|
||||
- Performance has been *significantly improved*, both due to optimizations in the underlying `ignore`
|
||||
crate (#1429), and in `fd` itself (#1422, #1408, #1362) - @tavianator.
|
||||
[Benchmarks results](https://gist.github.com/tavianator/32edbe052f33ef60570cf5456b59de81) show gains
|
||||
of 6-8x for full traversals of smaller directories (100k files) and up to 13x for larger directories (1M files).
|
||||
|
||||
- The default number of threads is now constrained to be at most 64. This should improve startup time on
|
||||
systems with many CPU cores. (#1203, #1410, #1412, #1431) - @tmccombs and @tavianator
|
||||
|
||||
- New flushing behavior when writing output to stdout, providing better performance for TTY and non-TTY
|
||||
use cases, see #1452 and #1313 (@tavianator).
|
||||
|
||||
## Features
|
||||
|
||||
- Support character and block device file types, see #1213 and #1336 (@cgzones)
|
||||
- Breaking: `.git/` is now ignored by default when using `--hidden` / `-H`, use `--no-ignore` / `-I` or
|
||||
`--no-ignore-vcs` to override, see #1387 and #1396 (@skoriop)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix `NO_COLOR` support, see #1421 (@acuteenvy)
|
||||
|
||||
## Other
|
||||
|
||||
- Fixed documentation typos, see #1409 (@marcospb19)
|
||||
|
||||
## Thanks
|
||||
|
||||
Special thanks to @tavianator for his incredible work on performance in the `ignore` crate and `fd` itself.
|
||||
|
||||
|
||||
|
||||
# v8.7.1
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- `-1` properly conflicts with the exec family of options.
|
||||
- `--max-results` overrides `-1`
|
||||
- `--quiet` properly conflicts with the exec family of options. This used to be the case, but broke during the switch to clap-derive
|
||||
- `--changed-within` now accepts a space as well as a "T" as the separator between date and time (due to update of chrono dependency)
|
||||
|
||||
## Other
|
||||
- Many dependencies were updated
|
||||
- Some documentation was updated and fixed
|
||||
|
||||
# v8.7.0
|
||||
|
||||
## Features
|
||||
|
||||
- Add flag --no-require-git to always respect gitignore files, see #1216 (@vegerot)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix logic for when to use global ignore file. There was a bug where the only case where the
|
||||
global ignore file wasn't processed was if `--no-ignore` was passed, but neither `--unrestricted`
|
||||
nor `--no-global-ignore-file` is passed. See #1209
|
||||
|
||||
# v8.6.0
|
||||
|
||||
## Features
|
||||
|
||||
- New `--and <pattern>` option to add additional patterns that must also be matched. See #315
|
||||
and #1139 (@Uthar)
|
||||
- Added `--changed-after` as alias for `--changed-within`, to have a name consistent with `--changed-before`.
|
||||
|
||||
|
||||
## Changes
|
||||
|
||||
- Breaking: On Unix-like systems, `--type executable` now additionally checks if
|
||||
the file is executable by the current user, see #1106 and #1169 (@ptipiak)
|
||||
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Use fd instead of fd.exe for Powershell completions (when completions are generated on windows)
|
||||
|
||||
|
||||
## Other
|
||||
|
||||
|
||||
# v8.5.3
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix completion generation to not include full path of fd command
|
||||
- Fix build error if completions feature is disabled
|
||||
|
||||
# v8.5.2
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix --owner option value parsing, see #1163 and #1164 (@tmccombs)
|
||||
|
||||
|
||||
# v8.5.1
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix --threads/-j option value parsing, see #1160 and #1162 (@sharkdp)
|
||||
|
||||
|
||||
# v8.5.0
|
||||
|
||||
## Features
|
||||
|
||||
- `--type executable`/`-t` now works on Windows, see #1051 and #1061 (@tavianator)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fixed differences between piped / non-piped output. This changes `fd`s behavior back to what we
|
||||
had before 8.3.0, i.e. there will be no leading `./` prefixes, unless `--exec`/`-x`,
|
||||
`--exec-batch`/`-X`, or `--print0`/`-0` are used. `--strip-cwd-prefix` can be used to strip that
|
||||
prefix in those cases. See #1046, #1115, and #1121 (@tavianator)
|
||||
- `fd` could previously crash with a panic due to a race condition in Rusts standard library
|
||||
(see https://github.com/rust-lang/rust/issues/39364). This has been fixed by switching to a different
|
||||
message passing implementation, see #1060 and #1146 (@tavianator)
|
||||
- `fd`s memory usage will not grow unboundedly on huge directory trees, see #1146 (@tavianator)
|
||||
- fd returns an error when current working directory does not exist while a search path is
|
||||
specified, see #1072 (@vijfhoek)
|
||||
- Improved "command not found" error message, see #1083 and #1109 (@themkat)
|
||||
- Preserve command exit codes when using `--exec-batch`, see #1136 and #1137 (@amesgen)
|
||||
|
||||
## Changes
|
||||
|
||||
- No leading `./` prefix for non-interactive results, see above.
|
||||
- fd now colorizes paths in parallel, significantly improving performance, see #1148 (@tavianator)
|
||||
- fd can now avoid `stat` syscalls even when colorizing paths, as long as the color scheme doesn't
|
||||
require metadata, see #1148 (@tavianator)
|
||||
- The statically linked `musl` versions of `fd` now use `jmalloc`, leading to a significant performance
|
||||
improvement, see #1062 (@tavianator)
|
||||
|
||||
## Other
|
||||
|
||||
- Added link back to GitHub in man page and `--help` text, see #1086 (@scottchiefbaker)
|
||||
- Major update in how `fd` handles command line options internally, see #1067 (@tmccombs)
|
||||
|
||||
# v8.4.0
|
||||
|
||||
## Features
|
||||
|
||||
- Support multiple `--exec <cmd>` instances, see #406 and #960 (@tmccombs)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- "Argument list too long" errors can not appear anymore when using `--exec-batch`/`-X`, as the command invocations are automatically batched at the maximum possible size, even if `--batch-size` is not given. See #410 and #1020 (@tavianator)
|
||||
|
||||
## Changes
|
||||
|
||||
- Directories are now printed with an additional path separator at the end: `foo/bar/`, see #436 and #812 (@yyogo)
|
||||
- The `-u` flag was changed to be equivalent to `-HI` (previously, a single `-u` was only equivalent to `-I`). Additional `-u` flags are still allowed, but ignored. See #840 and #986 (@jacksontheel)
|
||||
|
||||
## Other
|
||||
|
||||
- Added installation instructions for RHEL8, see #989 (@ethsol)
|
||||
|
||||
|
||||
# v8.3.2
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Invalid absolute path on windows when searching from the drive root, see #931 and #936 (@gbarta)
|
||||
|
||||
|
||||
# v8.3.1
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Stop implying `--no-ignore-parent` when `--no-vcs-ignore` is supplied, see #907, #901, #908 (@tmccombs)
|
||||
- fd no longer waits for the whole traversal if the only matches arrive within max_buffer_time, see #868 and #895 (@tavianator)
|
||||
- `--max-results=1` now immediately quits after the first result, see #867
|
||||
- `fd -h` does not panic anymore when stdout is closed, see #897
|
||||
|
||||
## Changes
|
||||
|
||||
- Disable jemalloc on FreeBSD, see #896 (@xanderio)
|
||||
- Updated man page, see #912 (@rlue)
|
||||
- Updated zsh completions, see #932 (@tmccombs)
|
||||
|
||||
|
||||
# v8.3.0
|
||||
|
||||
## Performance improvements
|
||||
|
||||
- Colorized output is now significantly faster, see #720 and #853 (@tavianator)
|
||||
- Writing to stdout is now buffered if the output does not go to a TTY. This increases performance
|
||||
when the output of `fd` is piped to another program or to a file, see #885 (@tmccombs, original
|
||||
implementation by @sourlemon207)
|
||||
- File metadata is now cached between the different filters that require it (e.g. `--owner`,
|
||||
`--size`), reducing the number of `stat` syscalls when multiple filters are used; see #863
|
||||
(@tavianator, original implementation by @alexmaco)
|
||||
|
||||
## Features
|
||||
|
||||
- Don't buffer command output from `--exec` when using a single thread. See #522
|
||||
- Add new `-q, --quiet` flag, see #303 (@Asha20)
|
||||
- Add new `--no-ignore-parent` flag, see #787 (@will459)
|
||||
- Add new `--batch-size` flag, see #410 (@devonhollowood)
|
||||
- Add opposing command-line options, see #595 (@Asha20)
|
||||
- Add support for more filesystem indicators in `LS_COLORS`, see
|
||||
https://github.com/sharkdp/lscolors/pull/35 (@tavianator)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Always show the `./` prefix for search results unless the output is a TTY or `--strip-cwd-prefix` is set, see #760 and #861 (@jcaplan)
|
||||
- Set default path separator to `/` in MSYS, see #537 and #730 (@aswild)
|
||||
- fd cannot search files under a RAM disk, see #752
|
||||
- fd doesn't show substituted drive on Windows, see #365
|
||||
- Properly handle write errors to devices that are full, see #737
|
||||
- Use local time zone for time functions (`--change-newer-than`, `--change-older-than`), see #631 (@jacobmischka)
|
||||
- Support `--list-details` on more platforms (like BusyBox), see #783
|
||||
- The filters `--owner`, `--size`, and `--changed-{within,before}` now apply to symbolic links
|
||||
themselves, rather than the link target, except when `--follow` is specified; see #863
|
||||
- Change time comparisons to be exclusive, see #794 (@jacobmischka)
|
||||
|
||||
## Changes
|
||||
|
||||
- Apply custom `--path-separator` to commands run with `--exec(-batch)` and `--list-details`, see #697 (@aswild)
|
||||
|
||||
## Other
|
||||
|
||||
- Many documentation updates
|
||||
|
||||
|
||||
# v8.2.1
|
||||
|
||||
No functional changes with respect to v8.2.0. Bugfix in the release process.
|
||||
|
||||
# v8.2.0
|
||||
|
||||
## Features
|
||||
|
||||
- Add new `--prune` flag, see #535 (@reima)
|
||||
- Improved the usability of the time-based options, see #624 and #645 (@gorogoroumaru)
|
||||
- Add support for exact file sizes in the `--size` filter, see #669 and #696 (@Rogach)
|
||||
- `fd` now prints an error message if the search pattern requires a leading dot but
|
||||
`--hidden` is not enabled (Unix only), see #615
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Avoid panic when performing limited searches in directories with restricted permissions, see #678
|
||||
- Invalid numeric command-line arguments are silently ignored, see #675
|
||||
- Disable jemalloc on Android, see #662
|
||||
- The `--help` text will be colorless if `NO_COLOR` has been set, see #600 (@xanonid)
|
||||
|
||||
## Changes
|
||||
|
||||
- If `LS_COLORS` is not set (e.g. on Windows), we now provide a more comprehensive default which
|
||||
includes much more filetypes, see #604 and #682 (mjsir911).
|
||||
|
||||
## Other
|
||||
|
||||
- Added `zsh` completion files, see #654 and #189 (@smancill)
|
||||
|
||||
# v8.1.1
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Support colored output on older Windows versions if either (1) `--color=always` is set or (2) the `TERM` environment variable is set. See #469
|
||||
|
||||
# v8.1.0
|
||||
|
||||
## Features
|
||||
|
@ -407,7 +702,7 @@ I'd also like to take this chance to say a special Thank You to a few people tha
|
|||
* Add option to force colored output: `--color always`, see #49 (@Detegr)
|
||||
* Generate Shell completions for Bash, ZSH, Fish and Powershell, see #64 (@ImbaKnugel)
|
||||
* Better & extended `--help` text (@abaez and @Detegr)
|
||||
* Proper Windows support, see #70
|
||||
* Proper Windows support, see #70
|
||||
|
||||
## Changes
|
||||
|
||||
|
@ -435,9 +730,9 @@ I'd also like to take this chance to say a special Thank You to a few people tha
|
|||
|
||||
* Changed `--sensitive` to `--case-sensitive`
|
||||
* Changed `--absolute` to `--absolute-path`
|
||||
* Throw an error if root directory is not existent, see #39
|
||||
* Use absolute paths if the root dir is an absolute path, see #40
|
||||
* Handle invalid UTF-8, see #34 #38
|
||||
* Throw an error if root directory is not existent, see #39
|
||||
* Use absolute paths if the root dir is an absolute path, see #40
|
||||
* Handle invalid UTF-8, see #34 #38
|
||||
* Support `-V`, `--version` by switching from `getopts` to `clap`.
|
||||
|
||||
Misc:
|
||||
|
@ -445,8 +740,8 @@ Misc:
|
|||
|
||||
# v1.1.0
|
||||
|
||||
- Windows compatibility (@sebasv), see #29 #35
|
||||
- Safely exit on broken output pipes (e.g.: usage with `head`, `tail`, ..), see #24
|
||||
- Windows compatibility (@sebasv), see #29 #35
|
||||
- Safely exit on broken output pipes (e.g.: usage with `head`, `tail`, ..), see #24
|
||||
- Backport for rust 1.16, see #23
|
||||
|
||||
# v1.0.0
|
||||
|
@ -458,17 +753,17 @@ Misc:
|
|||
|
||||
# v0.3.0
|
||||
|
||||
- Parse dircolors files, closes #20
|
||||
- Colorize each path component, closes #19
|
||||
- Add short command line option for --hidden, see #18
|
||||
- Parse dircolors files, closes #20
|
||||
- Colorize each path component, closes #19
|
||||
- Add short command line option for --hidden, see #18
|
||||
|
||||
# v0.2.0
|
||||
|
||||
- Option to follow symlinks, disable colors, closes #16, closes #17
|
||||
- Option to follow symlinks, disable colors, closes #16, closes #17
|
||||
- `--filename` instead of `--full-path`
|
||||
- Option to search hidden directories, closes #12
|
||||
- Configurable search depth, closes #13
|
||||
- Detect interactive terminal, closes #11
|
||||
- Option to search hidden directories, closes #12
|
||||
- Configurable search depth, closes #13
|
||||
- Detect interactive terminal, closes #11
|
||||
|
||||
# v0.1.0
|
||||
|
||||
|
|
|
@ -10,6 +10,24 @@ We welcome any form of contribution:
|
|||
**Note**: Before you take the time to open a pull request, please open a ticket first. This will
|
||||
give us the chance to discuss any potential changes first.
|
||||
|
||||
## Add an entry to the changelog
|
||||
|
||||
If your contribution changes the behavior of `fd` (as opposed to a typo-fix
|
||||
in the documentation), please update the [`CHANGELOG.md`](CHANGELOG.md#upcoming-release) file
|
||||
and describe your changes. This makes the release process much easier and
|
||||
therefore helps to get your changes into a new `fd` release faster.
|
||||
|
||||
The top of the `CHANGELOG` contains an *"Upcoming release"* section with a few
|
||||
subsections (Features, Bugfixes, …). Please add your entry to the subsection
|
||||
that best describes your change.
|
||||
|
||||
Entries follow this format:
|
||||
```
|
||||
- Short description of what has been changed, see #123 (@user)
|
||||
```
|
||||
Here, `#123` is the number of the original issue and/or your pull request.
|
||||
Please replace `@user` by your GitHub username.
|
||||
|
||||
## Important links
|
||||
|
||||
* [Open issues](https://github.com/sharkdp/fd/issues)
|
||||
|
|
File diff suppressed because it is too large
Load Diff
64
Cargo.toml
64
Cargo.toml
|
@ -12,12 +12,13 @@ keywords = [
|
|||
"filesystem",
|
||||
"tool",
|
||||
]
|
||||
license = "MIT/Apache-2.0"
|
||||
license = "MIT OR Apache-2.0"
|
||||
name = "fd-find"
|
||||
readme = "README.md"
|
||||
repository = "https://github.com/sharkdp/fd"
|
||||
version = "8.1.0"
|
||||
edition= "2018"
|
||||
version = "10.1.0"
|
||||
edition= "2021"
|
||||
rust-version = "1.77.2"
|
||||
|
||||
[badges.appveyor]
|
||||
repository = "sharkdp/fd"
|
||||
|
@ -30,30 +31,41 @@ name = "fd"
|
|||
path = "src/main.rs"
|
||||
|
||||
[build-dependencies]
|
||||
clap = "2.31.2"
|
||||
version_check = "0.9"
|
||||
|
||||
[dependencies]
|
||||
ansi_term = "0.12"
|
||||
atty = "0.2"
|
||||
ignore = "0.4.3"
|
||||
lazy_static = "1.1.0"
|
||||
num_cpus = "1.8"
|
||||
regex = "1.0.0"
|
||||
regex-syntax = "0.6"
|
||||
ctrlc = "3.1"
|
||||
humantime = "2.0"
|
||||
lscolors = "0.7"
|
||||
aho-corasick = "1.1"
|
||||
nu-ansi-term = "0.50"
|
||||
argmax = "0.3.1"
|
||||
ignore = "0.4.22"
|
||||
regex = "1.10.3"
|
||||
regex-syntax = "0.8"
|
||||
ctrlc = "3.2"
|
||||
humantime = "2.1"
|
||||
globset = "0.4"
|
||||
anyhow = "1.0"
|
||||
dirs = "2.0"
|
||||
etcetera = "0.8"
|
||||
normpath = "1.1.1"
|
||||
crossbeam-channel = "0.5.12"
|
||||
clap_complete = {version = "4.4.9", optional = true}
|
||||
faccess = "0.2.4"
|
||||
|
||||
[dependencies.clap]
|
||||
version = "2.31.2"
|
||||
features = ["suggestions", "color", "wrap_help"]
|
||||
version = "4.4.13"
|
||||
features = ["suggestions", "color", "wrap_help", "cargo", "derive"]
|
||||
|
||||
[dependencies.chrono]
|
||||
version = "0.4.38"
|
||||
default-features = false
|
||||
features = ["std", "clock"]
|
||||
|
||||
[dependencies.lscolors]
|
||||
version = "0.17"
|
||||
default-features = false
|
||||
features = ["nu-ansi-term"]
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
users = "0.10.0"
|
||||
nix = { version = "0.28.0", default-features = false, features = ["signal", "user"] }
|
||||
|
||||
[target.'cfg(all(unix, not(target_os = "redox")))'.dependencies]
|
||||
libc = "0.2"
|
||||
|
@ -61,14 +73,22 @@ libc = "0.2"
|
|||
# FIXME: Re-enable jemalloc on macOS
|
||||
# jemalloc is currently disabled on macOS due to a bug in jemalloc in combination with macOS
|
||||
# Catalina. See https://github.com/sharkdp/fd/issues/498 for details.
|
||||
[target.'cfg(all(not(windows), not(target_os = "macos"), not(target_env = "musl")))'.dependencies]
|
||||
jemallocator = "0.3.0"
|
||||
[target.'cfg(all(not(windows), not(target_os = "android"), not(target_os = "macos"), not(target_os = "freebsd"), not(target_os = "openbsd"), not(all(target_env = "musl", target_pointer_width = "32")), not(target_arch = "riscv64")))'.dependencies]
|
||||
jemallocator = {version = "0.5.4", optional = true}
|
||||
|
||||
[dev-dependencies]
|
||||
diff = "0.1"
|
||||
tempdir = "0.3"
|
||||
filetime = "0.2.1"
|
||||
tempfile = "3.10"
|
||||
filetime = "0.2"
|
||||
test-case = "3.3"
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
||||
strip = true
|
||||
codegen-units = 1
|
||||
|
||||
[features]
|
||||
use-jemalloc = ["jemallocator"]
|
||||
completions = ["clap_complete"]
|
||||
base = ["use-jemalloc"]
|
||||
default = ["use-jemalloc", "completions"]
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
# https://github.com/sharkdp/fd/issues/1085
|
||||
[target.aarch64-unknown-linux-gnu.env]
|
||||
passthrough = ["JEMALLOC_SYS_WITH_LG_PAGE=16"]
|
||||
|
||||
[target.aarch64-unknown-linux-musl.env]
|
||||
passthrough = ["JEMALLOC_SYS_WITH_LG_PAGE=16"]
|
|
@ -1,6 +1,6 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2017-2020 The fd developers
|
||||
Copyright (c) 2017-present The fd developers
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -0,0 +1,37 @@
|
|||
PROFILE=release
|
||||
EXE=target/$(PROFILE)/fd
|
||||
prefix=/usr/local
|
||||
bindir=$(prefix)/bin
|
||||
datadir=$(prefix)/share
|
||||
exe_name=fd
|
||||
|
||||
$(EXE): Cargo.toml src/**/*.rs
|
||||
cargo build --profile $(PROFILE) --locked
|
||||
|
||||
.PHONY: completions
|
||||
completions: autocomplete/fd.bash autocomplete/fd.fish autocomplete/fd.ps1 autocomplete/_fd
|
||||
|
||||
comp_dir=@mkdir -p autocomplete
|
||||
|
||||
autocomplete/fd.bash: $(EXE)
|
||||
$(comp_dir)
|
||||
$(EXE) --gen-completions bash > $@
|
||||
|
||||
autocomplete/fd.fish: $(EXE)
|
||||
$(comp_dir)
|
||||
$(EXE) --gen-completions fish > $@
|
||||
|
||||
autocomplete/fd.ps1: $(EXE)
|
||||
$(comp_dir)
|
||||
$(EXE) --gen-completions powershell > $@
|
||||
|
||||
autocomplete/_fd: contrib/completion/_fd
|
||||
$(comp_dir)
|
||||
cp $< $@
|
||||
|
||||
install: $(EXE) completions
|
||||
install -Dm755 $(EXE) $(DESTDIR)$(bindir)/fd
|
||||
install -Dm644 autocomplete/fd.bash $(DESTDIR)/$(datadir)/bash-completion/completions/$(exe_name)
|
||||
install -Dm644 autocomplete/fd.fish $(DESTDIR)/$(datadir)/fish/vendor_completions.d/$(exe_name).fish
|
||||
install -Dm644 autocomplete/_fd $(DESTDIR)/$(datadir)/zsh/site-functions/_$(exe_name)
|
||||
install -Dm644 doc/fd.1 $(DESTDIR)/$(datadir)/man/man1/$(exe_name).1
|
818
README.md
818
README.md
|
@ -1,338 +1,40 @@
|
|||
# fd
|
||||
[![Build Status](https://travis-ci.org/sharkdp/fd.svg?branch=master)](https://travis-ci.org/sharkdp/fd)
|
||||
[![Build status](https://ci.appveyor.com/api/projects/status/21c4p5fwggc5gy3j/branch/master?svg=true)](https://ci.appveyor.com/project/sharkdp/fd/branch/master)
|
||||
|
||||
[![CICD](https://github.com/sharkdp/fd/actions/workflows/CICD.yml/badge.svg)](https://github.com/sharkdp/fd/actions/workflows/CICD.yml)
|
||||
[![Version info](https://img.shields.io/crates/v/fd-find.svg)](https://crates.io/crates/fd-find)
|
||||
[中文](https://github.com/chinanf-boy/fd-zh)
|
||||
[한국어](https://github.com/spearkkk/fd-kor)
|
||||
[[中文](https://github.com/cha0ran/fd-zh)]
|
||||
[[한국어](https://github.com/spearkkk/fd-kor)]
|
||||
|
||||
*fd* is a simple, fast and user-friendly alternative to
|
||||
[*find*](https://www.gnu.org/software/findutils/).
|
||||
`fd` is a program to find entries in your filesystem.
|
||||
It is a simple, fast and user-friendly alternative to [`find`](https://www.gnu.org/software/findutils/).
|
||||
While it does not aim to support all of `find`'s powerful functionality, it provides sensible
|
||||
(opinionated) defaults for a majority of use cases.
|
||||
|
||||
While it does not seek to mirror all of *find*'s powerful functionality, it provides sensible
|
||||
(opinionated) defaults for [80%](https://en.wikipedia.org/wiki/Pareto_principle) of the use cases.
|
||||
[Installation](#installation) • [How to use](#how-to-use) • [Troubleshooting](#troubleshooting)
|
||||
|
||||
## Features
|
||||
* Convenient syntax: `fd PATTERN` instead of `find -iname '*PATTERN*'`.
|
||||
* Colorized terminal output (similar to *ls*).
|
||||
* It's *fast* (see [benchmarks](#benchmark) below).
|
||||
|
||||
* Intuitive syntax: `fd PATTERN` instead of `find -iname '*PATTERN*'`.
|
||||
* Regular expression (default) and glob-based patterns.
|
||||
* [Very fast](#benchmark) due to parallelized directory traversal.
|
||||
* Uses colors to highlight different file types (same as `ls`).
|
||||
* Supports [parallel command execution](#command-execution)
|
||||
* Smart case: the search is case-insensitive by default. It switches to
|
||||
case-sensitive if the pattern contains an uppercase
|
||||
character[\*](http://vimdoc.sourceforge.net/htmldoc/options.html#'smartcase').
|
||||
* Ignores hidden directories and files, by default.
|
||||
* Ignores patterns from your `.gitignore`, by default.
|
||||
* Regular expressions.
|
||||
* Unicode-awareness.
|
||||
* The command name is *50%* shorter[\*](https://github.com/ggreer/the_silver_searcher) than
|
||||
`find` :-).
|
||||
* Parallel command execution with a syntax similar to GNU Parallel.
|
||||
|
||||
## Demo
|
||||
|
||||
![Demo](doc/screencast.svg)
|
||||
|
||||
## Benchmark
|
||||
|
||||
Let's search my home folder for files that end in `[0-9].jpg`. It contains ~190.000
|
||||
subdirectories and about a million files. For averaging and statistical analysis, I'm using
|
||||
[hyperfine](https://github.com/sharkdp/hyperfine). The following benchmarks are performed
|
||||
with a "warm"/pre-filled disk-cache (results for a "cold" disk-cache show the same trends).
|
||||
|
||||
Let's start with `find`:
|
||||
```
|
||||
Benchmark #1: find ~ -iregex '.*[0-9]\.jpg$'
|
||||
|
||||
Time (mean ± σ): 7.236 s ± 0.090 s
|
||||
|
||||
Range (min … max): 7.133 s … 7.385 s
|
||||
```
|
||||
|
||||
`find` is much faster if it does not need to perform a regular-expression search:
|
||||
```
|
||||
Benchmark #2: find ~ -iname '*[0-9].jpg'
|
||||
|
||||
Time (mean ± σ): 3.914 s ± 0.027 s
|
||||
|
||||
Range (min … max): 3.876 s … 3.964 s
|
||||
```
|
||||
|
||||
Now let's try the same for `fd`. Note that `fd` *always* performs a regular expression
|
||||
search. The options `--hidden` and `--no-ignore` are needed for a fair comparison,
|
||||
otherwise `fd` does not have to traverse hidden folders and ignored paths (see below):
|
||||
```
|
||||
Benchmark #3: fd -HI '.*[0-9]\.jpg$' ~
|
||||
|
||||
Time (mean ± σ): 811.6 ms ± 26.9 ms
|
||||
|
||||
Range (min … max): 786.0 ms … 870.7 ms
|
||||
```
|
||||
For this particular example, `fd` is approximately nine times faster than `find -iregex`
|
||||
and about five times faster than `find -iname`. By the way, both tools found the exact
|
||||
same 20880 files :smile:.
|
||||
|
||||
Finally, let's run `fd` without `--hidden` and `--no-ignore` (this can lead to different
|
||||
search results, of course). If *fd* does not have to traverse the hidden and git-ignored
|
||||
folders, it is almost an order of magnitude faster:
|
||||
```
|
||||
Benchmark #4: fd '[0-9]\.jpg$' ~
|
||||
|
||||
Time (mean ± σ): 123.7 ms ± 6.0 ms
|
||||
|
||||
Range (min … max): 118.8 ms … 140.0 ms
|
||||
```
|
||||
|
||||
**Note**: This is *one particular* benchmark on *one particular* machine. While I have
|
||||
performed quite a lot of different tests (and found consistent results), things might
|
||||
be different for you! I encourage everyone to try it out on their own. See
|
||||
[this repository](https://github.com/sharkdp/fd-benchmarks) for all necessary scripts.
|
||||
|
||||
Concerning *fd*'s speed, the main credit goes to the `regex` and `ignore` crates that are also used
|
||||
in [ripgrep](https://github.com/BurntSushi/ripgrep) (check it out!).
|
||||
|
||||
## Colorized output
|
||||
`fd` can colorize files by extension, just like `ls`. In order for this to work, the environment
|
||||
variable [`LS_COLORS`](https://linux.die.net/man/5/dir_colors) has to be set. Typically, the value
|
||||
of this variable is set by the `dircolors` command which provides a convenient configuration format
|
||||
to define colors for different file formats.
|
||||
On most distributions, `LS_COLORS` should be set already. If you are looking for alternative, more
|
||||
complete (and more colorful) variants, see
|
||||
[here](https://github.com/seebi/dircolors-solarized) or
|
||||
[here](https://github.com/trapd00r/LS_COLORS).
|
||||
|
||||
`fd` also honors the [`NO_COLOR`](https://no-color.org/) environment variable.
|
||||
|
||||
## Parallel command execution
|
||||
If the `-x`/`--exec` option is specified alongside a command template, a job pool will be created
|
||||
for executing commands in parallel for each discovered path as the input. The syntax for generating
|
||||
commands is similar to that of GNU Parallel:
|
||||
|
||||
- `{}`: A placeholder token that will be replaced with the path of the search result
|
||||
(`documents/images/party.jpg`).
|
||||
- `{.}`: Like `{}`, but without the file extension (`documents/images/party`).
|
||||
- `{/}`: A placeholder that will be replaced by the basename of the search result (`party.jpg`).
|
||||
- `{//}`: Uses the parent of the discovered path (`documents/images`).
|
||||
- `{/.}`: Uses the basename, with the extension removed (`party`).
|
||||
|
||||
``` bash
|
||||
# Convert all jpg files to png files:
|
||||
fd -e jpg -x convert {} {.}.png
|
||||
|
||||
# Unpack all zip files (if no placeholder is given, the path is appended):
|
||||
fd -e zip -x unzip
|
||||
|
||||
# Convert all flac files into opus files:
|
||||
fd -e flac -x ffmpeg -i {} -c:a libopus {.}.opus
|
||||
|
||||
# Count the number of lines in Rust files (the command template can be terminated with ';'):
|
||||
fd -x wc -l \; -e rs
|
||||
```
|
||||
|
||||
The number of threads used for command execution can be set with the `--threads`/`-j` option.
|
||||
|
||||
## Installation
|
||||
|
||||
[![Packaging status](https://repology.org/badge/vertical-allrepos/fd.svg)](https://repology.org/project/fd/versions)
|
||||
|
||||
[![Packaging status](https://repology.org/badge/vertical-allrepos/rust:fd-find.svg)](https://repology.org/project/rust:fd-find/versions)
|
||||
|
||||
|
||||
[![Packaging status](https://repology.org/badge/vertical-allrepos/fd-find.svg)](https://repology.org/project/fd-find/versions)
|
||||
|
||||
### On Ubuntu
|
||||
*... and other Debian-based Linux distributions.*
|
||||
|
||||
If you run Ubuntu 19.04 (Disco Dingo) or newer, you can install the
|
||||
[officially maintained package](https://packages.ubuntu.com/disco/fd-find):
|
||||
```
|
||||
sudo apt install fd-find
|
||||
```
|
||||
Note that the binary is called `fdfind` as the binary name `fd` is already used by another package.
|
||||
It is recommended that you add an `alias fd=fdfind` to your shells initialization file, in order to
|
||||
use `fd` in the same way as in this documentation.
|
||||
|
||||
If you use an older version of Ubuntu, you can download the latest `.deb` package from the
|
||||
[release page](https://github.com/sharkdp/fd/releases) and install it via:
|
||||
``` bash
|
||||
sudo dpkg -i fd_8.1.0_amd64.deb # adapt version number and architecture
|
||||
```
|
||||
|
||||
### On Debian
|
||||
|
||||
If you run Debian Buster or newer, you can install the
|
||||
[officially maintained Debian package](https://tracker.debian.org/pkg/rust-fd-find):
|
||||
```
|
||||
sudo apt-get install fd-find
|
||||
```
|
||||
Note that the binary is called `fdfind` as the binary name `fd` is already used by another package.
|
||||
It is recommended that you add an `alias fd=fdfind` to your shells initialization file, in order to
|
||||
use `fd` in the same way as in this documentation.
|
||||
|
||||
### On Fedora
|
||||
|
||||
Starting with Fedora 28, you can install `fd` from the official package sources:
|
||||
``` bash
|
||||
dnf install fd-find
|
||||
```
|
||||
|
||||
For older versions, you can use this [Fedora copr](https://copr.fedorainfracloud.org/coprs/keefle/fd/) to install `fd`:
|
||||
``` bash
|
||||
dnf copr enable keefle/fd
|
||||
dnf install fd
|
||||
```
|
||||
|
||||
### On Alpine Linux
|
||||
|
||||
You can install [the fd package](https://pkgs.alpinelinux.org/packages?name=fd)
|
||||
from the official sources, provided you have the appropriate repository enabled:
|
||||
```
|
||||
apk add fd
|
||||
```
|
||||
|
||||
### On Arch Linux
|
||||
|
||||
You can install [the fd package](https://www.archlinux.org/packages/community/x86_64/fd/) from the official repos:
|
||||
```
|
||||
pacman -S fd
|
||||
```
|
||||
### On Gentoo Linux
|
||||
|
||||
You can use [the fd ebuild](https://packages.gentoo.org/packages/sys-apps/fd) from the official repo:
|
||||
```
|
||||
emerge -av fd
|
||||
```
|
||||
|
||||
### On openSUSE Linux
|
||||
|
||||
You can install [the fd package](https://software.opensuse.org/package/fd) from the official repo:
|
||||
```
|
||||
zypper in fd
|
||||
```
|
||||
|
||||
### On Void Linux
|
||||
|
||||
You can install `fd` via xbps-install:
|
||||
```
|
||||
xbps-install -S fd
|
||||
```
|
||||
|
||||
### On macOS
|
||||
|
||||
You can install `fd` with [Homebrew](https://formulae.brew.sh/formula/fd):
|
||||
```
|
||||
brew install fd
|
||||
```
|
||||
|
||||
… or with MacPorts:
|
||||
```
|
||||
sudo port install fd
|
||||
```
|
||||
|
||||
### On Windows
|
||||
|
||||
You can download pre-built binaries from the [release page](https://github.com/sharkdp/fd/releases).
|
||||
|
||||
Alternatively, you can install `fd` via [Scoop](http://scoop.sh):
|
||||
```
|
||||
scoop install fd
|
||||
```
|
||||
|
||||
Or via [Chocolatey](https://chocolatey.org):
|
||||
```
|
||||
choco install fd
|
||||
```
|
||||
|
||||
### On NixOS / via Nix
|
||||
|
||||
You can use the [Nix package manager](https://nixos.org/nix/) to install `fd`:
|
||||
```
|
||||
nix-env -i fd
|
||||
```
|
||||
|
||||
### On FreeBSD
|
||||
|
||||
You can install [the fd-find package](https://www.freshports.org/sysutils/fd) from the official repo:
|
||||
```
|
||||
pkg install fd-find
|
||||
```
|
||||
|
||||
### From NPM
|
||||
|
||||
On linux and macOS, you can install the [fd-find](https://npm.im/fd-find) package:
|
||||
|
||||
```
|
||||
npm install -g fd-find
|
||||
```
|
||||
|
||||
### From source
|
||||
|
||||
With Rust's package manager [cargo](https://github.com/rust-lang/cargo), you can install *fd* via:
|
||||
```
|
||||
cargo install fd-find
|
||||
```
|
||||
Note that rust version *1.36.0* or later is required.
|
||||
|
||||
### From binaries
|
||||
|
||||
The [release page](https://github.com/sharkdp/fd/releases) includes precompiled binaries for Linux, macOS and Windows.
|
||||
|
||||
## Development
|
||||
```bash
|
||||
git clone https://github.com/sharkdp/fd
|
||||
|
||||
# Build
|
||||
cd fd
|
||||
cargo build
|
||||
|
||||
# Run unit tests and integration tests
|
||||
cargo test
|
||||
|
||||
# Install
|
||||
cargo install
|
||||
```
|
||||
|
||||
## Command-line options
|
||||
```
|
||||
USAGE:
|
||||
fd [FLAGS/OPTIONS] [<pattern>] [<path>...]
|
||||
|
||||
FLAGS:
|
||||
-H, --hidden Search hidden files and directories
|
||||
-I, --no-ignore Do not respect .(git|fd)ignore files
|
||||
-s, --case-sensitive Case-sensitive search (default: smart case)
|
||||
-i, --ignore-case Case-insensitive search (default: smart case)
|
||||
-g, --glob Glob-based search (default: regular expression)
|
||||
-a, --absolute-path Show absolute instead of relative paths
|
||||
-l, --list-details Use a long listing format with file metadata
|
||||
-L, --follow Follow symbolic links
|
||||
-p, --full-path Search full path (default: file-/dirname only)
|
||||
-0, --print0 Separate results by the null character
|
||||
-h, --help Prints help information
|
||||
-V, --version Prints version information
|
||||
|
||||
OPTIONS:
|
||||
-d, --max-depth <depth> Set maximum search depth (default: none)
|
||||
-t, --type <filetype>... Filter by type: file (f), directory (d), symlink (l),
|
||||
executable (x), empty (e), socket (s), pipe (p)
|
||||
-e, --extension <ext>... Filter by file extension
|
||||
-x, --exec <cmd> Execute a command for each search result
|
||||
-X, --exec-batch <cmd> Execute a command with all search results at once
|
||||
-E, --exclude <pattern>... Exclude entries that match the given glob pattern
|
||||
-c, --color <when> When to use colors: never, *auto*, always
|
||||
-S, --size <size>... Limit results based on the size of files.
|
||||
--changed-within <date|dur> Filter by file modification time (newer than)
|
||||
--changed-before <date|dur> Filter by file modification time (older than)
|
||||
|
||||
ARGS:
|
||||
<pattern> the search pattern - a regular expression unless '--glob' is used (optional)
|
||||
<path>... the root directory for the filesystem search (optional)
|
||||
```
|
||||
|
||||
This is the output of `fd -h`. To see the full set of command-line options, use `fd --help` which
|
||||
also includes a much more detailed help text.
|
||||
|
||||
## Tutorial
|
||||
## How to use
|
||||
|
||||
First, to get an overview of all available command line options, you can either run
|
||||
`fd -h` for a concise help message (see above) or `fd --help` for a more detailed
|
||||
[`fd -h`](#command-line-options) for a concise help message or `fd --help` for a more detailed
|
||||
version.
|
||||
|
||||
### Simple search
|
||||
|
@ -358,6 +60,8 @@ X11/xinit/xinitrc
|
|||
X11/xinit/xserverrc
|
||||
```
|
||||
|
||||
The regular expression syntax used by `fd` is [documented here](https://docs.rs/regex/1.0.0/regex/#syntax).
|
||||
|
||||
### Specifying the root directory
|
||||
|
||||
If we want to search a specific directory, it can be given as a second argument to *fd*:
|
||||
|
@ -368,7 +72,7 @@ If we want to search a specific directory, it can be given as a second argument
|
|||
/etc/passwd
|
||||
```
|
||||
|
||||
### Running *fd* without any arguments
|
||||
### List all files, recursively
|
||||
|
||||
*fd* can be called with no arguments. This is very useful to get a quick overview of all entries
|
||||
in the current directory, recursively (similar to `ls -R`):
|
||||
|
@ -408,6 +112,15 @@ src/lscolors/mod.rs
|
|||
tests/testenv/mod.rs
|
||||
```
|
||||
|
||||
### Searching for a particular file name
|
||||
|
||||
To find files with exactly the provided search pattern, use the `-g` (or `--glob`) option:
|
||||
``` bash
|
||||
> fd -g libc.so /usr
|
||||
/usr/lib32/libc.so
|
||||
/usr/lib/libc.so
|
||||
```
|
||||
|
||||
### Hidden and ignored files
|
||||
By default, *fd* does not search hidden directories and does not show hidden files in the
|
||||
search results. To disable this behavior, we can use the `-H` (or `--hidden`) option:
|
||||
|
@ -427,7 +140,96 @@ target/debug/deps/libnum_cpus-f5ce7ef99006aa05.rlib
|
|||
```
|
||||
|
||||
To really search *all* files and directories, simply combine the hidden and ignore features to show
|
||||
everything (`-HI`).
|
||||
everything (`-HI`) or use `-u`/`--unrestricted`.
|
||||
|
||||
### Matching the full path
|
||||
By default, *fd* only matches the filename of each file. However, using the `--full-path` or `-p` option,
|
||||
you can match against the full path.
|
||||
|
||||
```bash
|
||||
> fd -p -g '**/.git/config'
|
||||
> fd -p '.*/lesson-\d+/[a-z]+.(jpg|png)'
|
||||
```
|
||||
|
||||
### Command execution
|
||||
|
||||
Instead of just showing the search results, you often want to *do something* with them. `fd`
|
||||
provides two ways to execute external commands for each of your search results:
|
||||
|
||||
* The `-x`/`--exec` option runs an external command *for each of the search results* (in parallel).
|
||||
* The `-X`/`--exec-batch` option launches the external command once, with *all search results as arguments*.
|
||||
|
||||
#### Examples
|
||||
|
||||
Recursively find all zip archives and unpack them:
|
||||
``` bash
|
||||
fd -e zip -x unzip
|
||||
```
|
||||
If there are two such files, `file1.zip` and `backup/file2.zip`, this would execute
|
||||
`unzip file1.zip` and `unzip backup/file2.zip`. The two `unzip` processes run in parallel
|
||||
(if the files are found fast enough).
|
||||
|
||||
Find all `*.h` and `*.cpp` files and auto-format them inplace with `clang-format -i`:
|
||||
``` bash
|
||||
fd -e h -e cpp -x clang-format -i
|
||||
```
|
||||
Note how the `-i` option to `clang-format` can be passed as a separate argument. This is why
|
||||
we put the `-x` option last.
|
||||
|
||||
Find all `test_*.py` files and open them in your favorite editor:
|
||||
``` bash
|
||||
fd -g 'test_*.py' -X vim
|
||||
```
|
||||
Note that we use capital `-X` here to open a single `vim` instance. If there are two such files,
|
||||
`test_basic.py` and `lib/test_advanced.py`, this will run `vim test_basic.py lib/test_advanced.py`.
|
||||
|
||||
To see details like file permissions, owners, file sizes etc., you can tell `fd` to show them
|
||||
by running `ls` for each result:
|
||||
``` bash
|
||||
fd … -X ls -lhd --color=always
|
||||
```
|
||||
This pattern is so useful that `fd` provides a shortcut. You can use the `-l`/`--list-details`
|
||||
option to execute `ls` in this way: `fd … -l`.
|
||||
|
||||
The `-X` option is also useful when combining `fd` with [ripgrep](https://github.com/BurntSushi/ripgrep/) (`rg`) in order to search within a certain class of files, like all C++ source files:
|
||||
```bash
|
||||
fd -e cpp -e cxx -e h -e hpp -X rg 'std::cout'
|
||||
```
|
||||
|
||||
Convert all `*.jpg` files to `*.png` files:
|
||||
``` bash
|
||||
fd -e jpg -x convert {} {.}.png
|
||||
```
|
||||
Here, `{}` is a placeholder for the search result. `{.}` is the same, without the file extension.
|
||||
See below for more details on the placeholder syntax.
|
||||
|
||||
The terminal output of commands run from parallel threads using `-x` will not be interlaced or garbled,
|
||||
so `fd -x` can be used to rudimentarily parallelize a task run over many files.
|
||||
An example of this is calculating the checksum of each individual file within a directory.
|
||||
```
|
||||
fd -tf -x md5sum > file_checksums.txt
|
||||
```
|
||||
|
||||
#### Placeholder syntax
|
||||
|
||||
The `-x` and `-X` options take a *command template* as a series of arguments (instead of a single string).
|
||||
If you want to add additional options to `fd` after the command template, you can terminate it with a `\;`.
|
||||
|
||||
The syntax for generating commands is similar to that of [GNU Parallel](https://www.gnu.org/software/parallel/):
|
||||
|
||||
- `{}`: A placeholder token that will be replaced with the path of the search result
|
||||
(`documents/images/party.jpg`).
|
||||
- `{.}`: Like `{}`, but without the file extension (`documents/images/party`).
|
||||
- `{/}`: A placeholder that will be replaced by the basename of the search result (`party.jpg`).
|
||||
- `{//}`: The parent of the discovered path (`documents/images`).
|
||||
- `{/.}`: The basename, with the extension removed (`party`).
|
||||
|
||||
If you do not include a placeholder, *fd* automatically adds a `{}` at the end.
|
||||
|
||||
#### Parallel vs. serial execution
|
||||
|
||||
For `-x`/`--exec`, you can control the number of parallel jobs by using the `-j`/`--threads` option.
|
||||
Use `--threads=1` for serial execution.
|
||||
|
||||
### Excluding specific files or directories
|
||||
|
||||
|
@ -456,20 +258,16 @@ To make exclude-patterns like these permanent, you can create a `.fdignore` file
|
|||
/mnt/external-drive
|
||||
*.bak
|
||||
```
|
||||
Note: `fd` also supports `.ignore` files that are used by other programs such as `rg` or `ag`.
|
||||
|
||||
> [!NOTE]
|
||||
> `fd` also supports `.ignore` files that are used by other programs such as `rg` or `ag`.
|
||||
|
||||
If you want `fd` to ignore these patterns globally, you can put them in `fd`'s global ignore file.
|
||||
This is usually located in `~/.config/fd/ignore` in macOS or Linux, and `%APPDATA%\fd\ignore` in
|
||||
Windows.
|
||||
|
||||
### Using fd with `xargs` or `parallel`
|
||||
|
||||
If we want to run a command on all search results, we can pipe the output to `xargs`:
|
||||
``` bash
|
||||
> fd -0 -e rs | xargs -0 wc -l
|
||||
```
|
||||
Here, the `-0` option tells *fd* to separate search results by the NULL character (instead of
|
||||
newlines). In the same way, the `-0` option of `xargs` tells it to read the input in this way.
|
||||
You may wish to include `.git/` in your `fd/ignore` file so that `.git` directories, and their contents
|
||||
are not included in output if you use the `--hidden` option.
|
||||
|
||||
### Deleting files
|
||||
|
||||
|
@ -488,23 +286,120 @@ option:
|
|||
If you also want to remove a certain class of directories, you can use the same technique. You will
|
||||
have to use `rm`s `--recursive`/`-r` flag to remove directories.
|
||||
|
||||
Note: there are scenarios where using `fd … -X rm -r` can cause race conditions: if you have a
|
||||
> [!NOTE]
|
||||
> There are scenarios where using `fd … -X rm -r` can cause race conditions: if you have a
|
||||
path like `…/foo/bar/foo/…` and want to remove all directories named `foo`, you can end up in a
|
||||
situation where the outer `foo` directory is removed first, leading to (harmless) *"'foo/bar/foo':
|
||||
No such file or directory"* errors in the `rm` call.
|
||||
|
||||
### Troubleshooting
|
||||
### Command-line options
|
||||
|
||||
#### `fd` does not find my file!
|
||||
This is the output of `fd -h`. To see the full set of command-line options, use `fd --help` which
|
||||
also includes a much more detailed help text.
|
||||
|
||||
```
|
||||
Usage: fd [OPTIONS] [pattern] [path]...
|
||||
|
||||
Arguments:
|
||||
[pattern] the search pattern (a regular expression, unless '--glob' is used; optional)
|
||||
[path]... the root directories for the filesystem search (optional)
|
||||
|
||||
Options:
|
||||
-H, --hidden Search hidden files and directories
|
||||
-I, --no-ignore Do not respect .(git|fd)ignore files
|
||||
-s, --case-sensitive Case-sensitive search (default: smart case)
|
||||
-i, --ignore-case Case-insensitive search (default: smart case)
|
||||
-g, --glob Glob-based search (default: regular expression)
|
||||
-a, --absolute-path Show absolute instead of relative paths
|
||||
-l, --list-details Use a long listing format with file metadata
|
||||
-L, --follow Follow symbolic links
|
||||
-p, --full-path Search full abs. path (default: filename only)
|
||||
-d, --max-depth <depth> Set maximum search depth (default: none)
|
||||
-E, --exclude <pattern> Exclude entries that match the given glob pattern
|
||||
-t, --type <filetype> Filter by type: file (f), directory (d/dir), symlink (l),
|
||||
executable (x), empty (e), socket (s), pipe (p), char-device
|
||||
(c), block-device (b)
|
||||
-e, --extension <ext> Filter by file extension
|
||||
-S, --size <size> Limit results based on the size of files
|
||||
--changed-within <date|dur> Filter by file modification time (newer than)
|
||||
--changed-before <date|dur> Filter by file modification time (older than)
|
||||
-o, --owner <user:group> Filter by owning user and/or group
|
||||
--format <fmt> Print results according to template
|
||||
-x, --exec <cmd>... Execute a command for each search result
|
||||
-X, --exec-batch <cmd>... Execute a command with all search results at once
|
||||
-c, --color <when> When to use colors [default: auto] [possible values: auto,
|
||||
always, never]
|
||||
-h, --help Print help (see more with '--help')
|
||||
-V, --version Print version
|
||||
```
|
||||
|
||||
## Benchmark
|
||||
|
||||
Let's search my home folder for files that end in `[0-9].jpg`. It contains ~750.000
|
||||
subdirectories and about a 4 million files. For averaging and statistical analysis, I'm using
|
||||
[hyperfine](https://github.com/sharkdp/hyperfine). The following benchmarks are performed
|
||||
with a "warm"/pre-filled disk-cache (results for a "cold" disk-cache show the same trends).
|
||||
|
||||
Let's start with `find`:
|
||||
```
|
||||
Benchmark 1: find ~ -iregex '.*[0-9]\.jpg$'
|
||||
Time (mean ± σ): 19.922 s ± 0.109 s
|
||||
Range (min … max): 19.765 s … 20.065 s
|
||||
```
|
||||
|
||||
`find` is much faster if it does not need to perform a regular-expression search:
|
||||
```
|
||||
Benchmark 2: find ~ -iname '*[0-9].jpg'
|
||||
Time (mean ± σ): 11.226 s ± 0.104 s
|
||||
Range (min … max): 11.119 s … 11.466 s
|
||||
```
|
||||
|
||||
Now let's try the same for `fd`. Note that `fd` performs a regular expression
|
||||
search by default. The options `-u`/`--unrestricted` option is needed here for
|
||||
a fair comparison. Otherwise `fd` does not have to traverse hidden folders and
|
||||
ignored paths (see below):
|
||||
```
|
||||
Benchmark 3: fd -u '[0-9]\.jpg$' ~
|
||||
Time (mean ± σ): 854.8 ms ± 10.0 ms
|
||||
Range (min … max): 839.2 ms … 868.9 ms
|
||||
```
|
||||
For this particular example, `fd` is approximately **23 times faster** than `find -iregex`
|
||||
and about **13 times faster** than `find -iname`. By the way, both tools found the exact
|
||||
same 546 files :smile:.
|
||||
|
||||
**Note**: This is *one particular* benchmark on *one particular* machine. While we have
|
||||
performed a lot of different tests (and found consistent results), things might
|
||||
be different for you! We encourage everyone to try it out on their own. See
|
||||
[this repository](https://github.com/sharkdp/fd-benchmarks) for all necessary scripts.
|
||||
|
||||
Concerning *fd*'s speed, a lot of credit goes to the `regex` and `ignore` crates that are
|
||||
also used in [ripgrep](https://github.com/BurntSushi/ripgrep) (check it out!).
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### `fd` does not find my file!
|
||||
|
||||
Remember that `fd` ignores hidden directories and files by default. It also ignores patterns
|
||||
from `.gitignore` files. If you want to make sure to find absolutely every possible file, always
|
||||
use the options `-H` and `-I` to disable these two features:
|
||||
use the options `-u`/`--unrestricted` option (or `-HI` to enable hidden and ignored files):
|
||||
``` bash
|
||||
> fd -HI …
|
||||
> fd -u …
|
||||
```
|
||||
|
||||
#### `fd` doesn't seem to interpret my regex pattern correctly
|
||||
### Colorized output
|
||||
|
||||
`fd` can colorize files by extension, just like `ls`. In order for this to work, the environment
|
||||
variable [`LS_COLORS`](https://linux.die.net/man/5/dir_colors) has to be set. Typically, the value
|
||||
of this variable is set by the `dircolors` command which provides a convenient configuration format
|
||||
to define colors for different file formats.
|
||||
On most distributions, `LS_COLORS` should be set already. If you are on Windows or if you are looking
|
||||
for alternative, more complete (or more colorful) variants, see [here](https://github.com/sharkdp/vivid),
|
||||
[here](https://github.com/seebi/dircolors-solarized) or
|
||||
[here](https://github.com/trapd00r/LS_COLORS).
|
||||
|
||||
`fd` also honors the [`NO_COLOR`](https://no-color.org/) environment variable.
|
||||
|
||||
### `fd` doesn't seem to interpret my regex pattern correctly
|
||||
|
||||
A lot of special regex characters (like `[]`, `^`, `$`, ..) are also special characters in your
|
||||
shell. If in doubt, always make sure to put single quotes around the regex pattern:
|
||||
|
@ -522,9 +417,17 @@ use a character class with a single hyphen character:
|
|||
> fd '[-]pattern'
|
||||
```
|
||||
|
||||
### Integration with other programs
|
||||
### "Command not found" for `alias`es or shell functions
|
||||
|
||||
#### Using fd with `fzf`
|
||||
Shell `alias`es and shell functions can not be used for command execution via `fd -x` or
|
||||
`fd -X`. In `zsh`, you can make the alias global via `alias -g myalias="…"`. In `bash`,
|
||||
you can use `export -f my_function` to make available to child processes. You would still
|
||||
need to call `fd -x bash -c 'my_function "$1"' bash`. For other use cases or shells, use
|
||||
a (temporary) shell script.
|
||||
|
||||
## Integration with other programs
|
||||
|
||||
### Using fd with `fzf`
|
||||
|
||||
You can use *fd* to generate input for the command-line fuzzy finder [fzf](https://github.com/junegunn/fzf):
|
||||
``` bash
|
||||
|
@ -547,7 +450,21 @@ export FZF_DEFAULT_OPTS="--ansi"
|
|||
|
||||
For more details, see the [Tips section](https://github.com/junegunn/fzf#tips) of the fzf README.
|
||||
|
||||
#### Using fd with `emacs`
|
||||
### Using fd with `rofi`
|
||||
|
||||
[*rofi*](https://github.com/davatorium/rofi) is a graphical launch menu application that is able to create menus by reading from *stdin*. Piping `fd` output into `rofi`s `-dmenu` mode creates fuzzy-searchable lists of files and directories.
|
||||
|
||||
#### Example
|
||||
|
||||
Create a case-insensitive searchable multi-select list of *PDF* files under your `$HOME` directory and open the selection with your configured PDF viewer. To list all file types, drop the `-e pdf` argument.
|
||||
|
||||
``` bash
|
||||
fd --type f -e pdf . $HOME | rofi -keep-right -dmenu -i -p FILES -multi-select | xargs -I {} xdg-open {}
|
||||
```
|
||||
|
||||
To modify the list that is presented by rofi, add arguments to the `fd` command. To modify the search behaviour of rofi, add arguments to the `rofi` command.
|
||||
|
||||
### Using fd with `emacs`
|
||||
|
||||
The emacs package [find-file-in-project](https://github.com/technomancy/find-file-in-project) can
|
||||
use *fd* to find files.
|
||||
|
@ -558,18 +475,19 @@ After installing `find-file-in-project`, add the line `(setq ffip-use-rust-fd t)
|
|||
In emacs, run `M-x find-file-in-project-by-selected` to find matching files. Alternatively, run
|
||||
`M-x find-file-in-project` to list all available files in the project.
|
||||
|
||||
#### Printing fd's output as a tree
|
||||
### Printing the output as a tree
|
||||
|
||||
To format the output of `fd` similar to the `tree` command, install [`as-tree`] and pipe the output
|
||||
of `fd` to `as-tree`:
|
||||
To format the output of `fd` as a file-tree you can use the `tree` command with
|
||||
`--fromfile`:
|
||||
```bash
|
||||
fd | as-tree
|
||||
❯ fd | tree --fromfile
|
||||
```
|
||||
|
||||
This can be more useful than running `tree` by itself because `tree` does not ignore any files by
|
||||
default, nor does it support as rich a set of options as `fd` does to control what to print:
|
||||
This can be more useful than running `tree` by itself because `tree` does not
|
||||
ignore any files by default, nor does it support as rich a set of options as
|
||||
`fd` does to control what to print:
|
||||
```bash
|
||||
❯ fd --extension rs | as-tree
|
||||
❯ fd --extension rs | tree --fromfile
|
||||
.
|
||||
├── build.rs
|
||||
└── src
|
||||
|
@ -577,12 +495,222 @@ default, nor does it support as rich a set of options as `fd` does to control wh
|
|||
└── error.rs
|
||||
```
|
||||
|
||||
For more information about `as-tree`, see [the `as-tree` README][`as-tree`].
|
||||
On bash and similar you can simply create an alias:
|
||||
```bash
|
||||
❯ alias as-tree='tree --fromfile'
|
||||
```
|
||||
|
||||
[`as-tree`]: https://github.com/jez/as-tree
|
||||
### Using fd with `xargs` or `parallel`
|
||||
|
||||
Note that `fd` has a builtin feature for [command execution](#command-execution) with
|
||||
its `-x`/`--exec` and `-X`/`--exec-batch` options. If you prefer, you can still use
|
||||
it in combination with `xargs`:
|
||||
``` bash
|
||||
> fd -0 -e rs | xargs -0 wc -l
|
||||
```
|
||||
Here, the `-0` option tells *fd* to separate search results by the NULL character (instead of
|
||||
newlines). In the same way, the `-0` option of `xargs` tells it to read the input in this way.
|
||||
|
||||
## Installation
|
||||
|
||||
[![Packaging status](https://repology.org/badge/vertical-allrepos/fd-find.svg)](https://repology.org/project/fd-find/versions)
|
||||
|
||||
### On Ubuntu
|
||||
*... and other Debian-based Linux distributions.*
|
||||
|
||||
If you run Ubuntu 19.04 (Disco Dingo) or newer, you can install the
|
||||
[officially maintained package](https://packages.ubuntu.com/fd-find):
|
||||
```
|
||||
apt install fd-find
|
||||
```
|
||||
Note that the binary is called `fdfind` as the binary name `fd` is already used by another package.
|
||||
It is recommended that after installation, you add a link to `fd` by executing command
|
||||
`ln -s $(which fdfind) ~/.local/bin/fd`, in order to use `fd` in the same way as in this documentation.
|
||||
Make sure that `$HOME/.local/bin` is in your `$PATH`.
|
||||
|
||||
If you use an older version of Ubuntu, you can download the latest `.deb` package from the
|
||||
[release page](https://github.com/sharkdp/fd/releases) and install it via:
|
||||
``` bash
|
||||
dpkg -i fd_9.0.0_amd64.deb # adapt version number and architecture
|
||||
```
|
||||
|
||||
### On Debian
|
||||
|
||||
If you run Debian Buster or newer, you can install the
|
||||
[officially maintained Debian package](https://tracker.debian.org/pkg/rust-fd-find):
|
||||
```
|
||||
apt-get install fd-find
|
||||
```
|
||||
Note that the binary is called `fdfind` as the binary name `fd` is already used by another package.
|
||||
It is recommended that after installation, you add a link to `fd` by executing command
|
||||
`ln -s $(which fdfind) ~/.local/bin/fd`, in order to use `fd` in the same way as in this documentation.
|
||||
Make sure that `$HOME/.local/bin` is in your `$PATH`.
|
||||
|
||||
### On Fedora
|
||||
|
||||
Starting with Fedora 28, you can install `fd` from the official package sources:
|
||||
``` bash
|
||||
dnf install fd-find
|
||||
```
|
||||
|
||||
### On Alpine Linux
|
||||
|
||||
You can install [the fd package](https://pkgs.alpinelinux.org/packages?name=fd)
|
||||
from the official sources, provided you have the appropriate repository enabled:
|
||||
```
|
||||
apk add fd
|
||||
```
|
||||
|
||||
### On Arch Linux
|
||||
|
||||
You can install [the fd package](https://www.archlinux.org/packages/community/x86_64/fd/) from the official repos:
|
||||
```
|
||||
pacman -S fd
|
||||
```
|
||||
You can also install fd [from the AUR](https://aur.archlinux.org/packages/fd-git).
|
||||
|
||||
### On Gentoo Linux
|
||||
|
||||
You can use [the fd ebuild](https://packages.gentoo.org/packages/sys-apps/fd) from the official repo:
|
||||
```
|
||||
emerge -av fd
|
||||
```
|
||||
|
||||
### On openSUSE Linux
|
||||
|
||||
You can install [the fd package](https://software.opensuse.org/package/fd) from the official repo:
|
||||
```
|
||||
zypper in fd
|
||||
```
|
||||
|
||||
### On Void Linux
|
||||
|
||||
You can install `fd` via xbps-install:
|
||||
```
|
||||
xbps-install -S fd
|
||||
```
|
||||
|
||||
### On ALT Linux
|
||||
|
||||
You can install [the fd package](https://packages.altlinux.org/en/sisyphus/srpms/fd/) from the official repo:
|
||||
```
|
||||
apt-get install fd
|
||||
```
|
||||
|
||||
### On Solus
|
||||
|
||||
You can install [the fd package](https://github.com/getsolus/packages/tree/main/packages/f/fd) from the official repo:
|
||||
```
|
||||
eopkg install fd
|
||||
```
|
||||
|
||||
### On RedHat Enterprise Linux 8/9 (RHEL8/9), Almalinux 8/9, EuroLinux 8/9 or Rocky Linux 8/9
|
||||
|
||||
You can install [the `fd` package](https://copr.fedorainfracloud.org/coprs/tkbcopr/fd/) from Fedora Copr.
|
||||
|
||||
```bash
|
||||
dnf copr enable tkbcopr/fd
|
||||
dnf install fd
|
||||
```
|
||||
|
||||
A different version using the [slower](https://github.com/sharkdp/fd/pull/481#issuecomment-534494592) malloc [instead of jemalloc](https://bugzilla.redhat.com/show_bug.cgi?id=2216193#c1) is also available from the EPEL8/9 repo as the package `fd-find`.
|
||||
|
||||
### On macOS
|
||||
|
||||
You can install `fd` with [Homebrew](https://formulae.brew.sh/formula/fd):
|
||||
```
|
||||
brew install fd
|
||||
```
|
||||
|
||||
… or with MacPorts:
|
||||
```
|
||||
port install fd
|
||||
```
|
||||
|
||||
### On Windows
|
||||
|
||||
You can download pre-built binaries from the [release page](https://github.com/sharkdp/fd/releases).
|
||||
|
||||
Alternatively, you can install `fd` via [Scoop](http://scoop.sh):
|
||||
```
|
||||
scoop install fd
|
||||
```
|
||||
|
||||
Or via [Chocolatey](https://chocolatey.org):
|
||||
```
|
||||
choco install fd
|
||||
```
|
||||
|
||||
Or via [Winget](https://learn.microsoft.com/en-us/windows/package-manager/):
|
||||
```
|
||||
winget install sharkdp.fd
|
||||
```
|
||||
|
||||
### On GuixOS
|
||||
|
||||
You can install [the fd package](https://guix.gnu.org/en/packages/fd-8.1.1/) from the official repo:
|
||||
```
|
||||
guix install fd
|
||||
```
|
||||
|
||||
### On NixOS / via Nix
|
||||
|
||||
You can use the [Nix package manager](https://nixos.org/nix/) to install `fd`:
|
||||
```
|
||||
nix-env -i fd
|
||||
```
|
||||
|
||||
### On FreeBSD
|
||||
|
||||
You can install [the fd-find package](https://www.freshports.org/sysutils/fd) from the official repo:
|
||||
```
|
||||
pkg install fd-find
|
||||
```
|
||||
|
||||
### From npm
|
||||
|
||||
On Linux and macOS, you can install the [fd-find](https://npm.im/fd-find) package:
|
||||
|
||||
```
|
||||
npm install -g fd-find
|
||||
```
|
||||
|
||||
### From source
|
||||
|
||||
With Rust's package manager [cargo](https://github.com/rust-lang/cargo), you can install *fd* via:
|
||||
```
|
||||
cargo install fd-find
|
||||
```
|
||||
Note that rust version *1.77.2* or later is required.
|
||||
|
||||
`make` is also needed for the build.
|
||||
|
||||
### From binaries
|
||||
|
||||
The [release page](https://github.com/sharkdp/fd/releases) includes precompiled binaries for Linux, macOS and Windows. Statically-linked binaries are also available: look for archives with `musl` in the file name.
|
||||
|
||||
## Development
|
||||
```bash
|
||||
git clone https://github.com/sharkdp/fd
|
||||
|
||||
# Build
|
||||
cd fd
|
||||
cargo build
|
||||
|
||||
# Run unit tests and integration tests
|
||||
cargo test
|
||||
|
||||
# Install
|
||||
cargo install --path .
|
||||
```
|
||||
|
||||
## Maintainers
|
||||
|
||||
- [sharkdp](https://github.com/sharkdp)
|
||||
- [tmccombs](https://github.com/tmccombs)
|
||||
- [tavianator](https://github.com/tavianator)
|
||||
|
||||
## License
|
||||
Copyright (c) 2017-2020 The fd developers
|
||||
|
||||
`fd` is distributed under the terms of both the MIT License and the Apache License 2.0.
|
||||
|
||||
|
|
71
appveyor.yml
71
appveyor.yml
|
@ -1,71 +0,0 @@
|
|||
# Source: https://github.com/starkat99/appveyor-rust/
|
||||
|
||||
environment:
|
||||
global:
|
||||
PROJECT_NAME: fd
|
||||
matrix:
|
||||
# Stable channel
|
||||
- TARGET: i686-pc-windows-gnu
|
||||
CHANNEL: stable
|
||||
- TARGET: i686-pc-windows-msvc
|
||||
CHANNEL: stable
|
||||
- TARGET: x86_64-pc-windows-gnu
|
||||
CHANNEL: stable
|
||||
- TARGET: x86_64-pc-windows-msvc
|
||||
CHANNEL: stable
|
||||
|
||||
# Install Rust and Cargo
|
||||
# (Based on from https://github.com/rust-lang/libc/blob/master/appveyor.yml)
|
||||
install:
|
||||
- curl -sSf -o rustup-init.exe https://win.rustup.rs
|
||||
- rustup-init.exe --default-host %TARGET% --default-toolchain %CHANNEL% -y
|
||||
- set PATH=%PATH%;C:\Users\appveyor\.cargo\bin
|
||||
- rustc -Vv
|
||||
- cargo -V
|
||||
|
||||
# 'cargo test' takes care of building for us, so disable Appveyor's build stage. This prevents
|
||||
# the "directory does not contain a project or solution file" error.
|
||||
# source: https://github.com/starkat99/appveyor-rust/blob/master/appveyor.yml#L113
|
||||
build: false
|
||||
|
||||
# Equivalent to Travis' `script` phase
|
||||
test_script:
|
||||
- cargo build --verbose
|
||||
- cargo run
|
||||
- cargo test
|
||||
|
||||
before_deploy:
|
||||
# Generate artifacts for release
|
||||
- cargo build --release
|
||||
- mkdir staging
|
||||
- copy target\release\fd.exe staging
|
||||
- ps: copy target\release\build\fd-find*\out\_fd.ps1 staging
|
||||
- cd staging
|
||||
# release zipfile will look like 'rust-everywhere-v1.2.3-x86_64-pc-windows-msvc'
|
||||
- 7z a ../%PROJECT_NAME%-%APPVEYOR_REPO_TAG_NAME%-%TARGET%.zip *
|
||||
- appveyor PushArtifact ../%PROJECT_NAME%-%APPVEYOR_REPO_TAG_NAME%-%TARGET%.zip
|
||||
|
||||
deploy:
|
||||
description: 'Windows release'
|
||||
# All the zipped artifacts will be deployed
|
||||
artifact: /.*\.zip/
|
||||
# Here's how:
|
||||
# - Go to 'https://github.com/settings/tokens/new' and generate a Token with only the
|
||||
# `public_repo` scope enabled
|
||||
# - Then go to 'https://ci.appveyor.com/tools/encrypt' and enter the newly generated token.
|
||||
# - Enter the "encrypted value" below
|
||||
auth_token:
|
||||
secure: mWJ8ieZdGEgHf232fdMyzb9T1rKbkJivYbp/REMo8ax3X5vkQJDHhFjWeuWA3wIC
|
||||
provider: GitHub
|
||||
# deploy when a new tag is pushed and only on the stable channel
|
||||
on:
|
||||
# channel to use to produce the release artifacts
|
||||
CHANNEL: stable
|
||||
appveyor_repo_tag: true
|
||||
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
# IMPORTANT Regex to match tags. Required, or appveyor may not trigger deploys when a new tag
|
||||
# is pushed. This regex matches semantic versions like v1.2.3-rc4+2016.02.22
|
||||
- /^v\d+\.\d+\.\d+.*$/
|
21
build.rs
21
build.rs
|
@ -1,11 +1,5 @@
|
|||
use std::fs;
|
||||
|
||||
use clap::Shell;
|
||||
|
||||
include!("src/app.rs");
|
||||
|
||||
fn main() {
|
||||
let min_version = "1.36";
|
||||
let min_version = "1.64";
|
||||
|
||||
match version_check::is_min_version(min_version) {
|
||||
Some(true) => {}
|
||||
|
@ -15,17 +9,4 @@ fn main() {
|
|||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
let var = std::env::var_os("SHELL_COMPLETIONS_DIR").or(std::env::var_os("OUT_DIR"));
|
||||
let outdir = match var {
|
||||
None => return,
|
||||
Some(outdir) => outdir,
|
||||
};
|
||||
fs::create_dir_all(&outdir).unwrap();
|
||||
|
||||
let mut app = build_app();
|
||||
app.gen_completions("fd", Shell::Bash, &outdir);
|
||||
app.gen_completions("fd", Shell::Fish, &outdir);
|
||||
app.gen_completions("fd", Shell::Zsh, &outdir);
|
||||
app.gen_completions("fd", Shell::PowerShell, &outdir);
|
||||
}
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
* linguist-vendored
|
|
@ -1,179 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
# Building and packaging for release
|
||||
|
||||
set -ex
|
||||
|
||||
build() {
|
||||
cargo build --target "$TARGET" --release --verbose
|
||||
}
|
||||
|
||||
pack() {
|
||||
local tempdir
|
||||
local out_dir
|
||||
local package_name
|
||||
local gcc_prefix
|
||||
|
||||
tempdir=$(mktemp -d 2>/dev/null || mktemp -d -t tmp)
|
||||
out_dir=$(pwd)
|
||||
package_name="$PROJECT_NAME-$TRAVIS_TAG-$TARGET"
|
||||
|
||||
if [[ $TARGET == arm-unknown-linux-* ]]; then
|
||||
gcc_prefix="arm-linux-gnueabihf-"
|
||||
else
|
||||
gcc_prefix=""
|
||||
fi
|
||||
|
||||
# create a "staging" directory
|
||||
mkdir "$tempdir/$package_name"
|
||||
mkdir "$tempdir/$package_name/autocomplete"
|
||||
|
||||
# copying the main binary
|
||||
cp "target/$TARGET/release/$PROJECT_NAME" "$tempdir/$package_name/"
|
||||
"${gcc_prefix}"strip "$tempdir/$package_name/$PROJECT_NAME"
|
||||
|
||||
# manpage, readme and license
|
||||
cp "doc/$PROJECT_NAME.1" "$tempdir/$package_name"
|
||||
cp README.md "$tempdir/$package_name"
|
||||
cp LICENSE-MIT "$tempdir/$package_name"
|
||||
cp LICENSE-APACHE "$tempdir/$package_name"
|
||||
|
||||
# various autocomplete
|
||||
cp target/"$TARGET"/release/build/"$PROJECT_NAME"-*/out/"$PROJECT_NAME".bash "$tempdir/$package_name/autocomplete/${PROJECT_NAME}.bash-completion"
|
||||
cp target/"$TARGET"/release/build/"$PROJECT_NAME"-*/out/"$PROJECT_NAME".fish "$tempdir/$package_name/autocomplete"
|
||||
cp target/"$TARGET"/release/build/"$PROJECT_NAME"-*/out/_"$PROJECT_NAME" "$tempdir/$package_name/autocomplete"
|
||||
|
||||
# archiving
|
||||
pushd "$tempdir"
|
||||
tar czf "$out_dir/$package_name.tar.gz" "$package_name"/*
|
||||
popd
|
||||
rm -r "$tempdir"
|
||||
}
|
||||
|
||||
make_deb() {
|
||||
local tempdir
|
||||
local architecture
|
||||
local version
|
||||
local dpkgname
|
||||
local conflictname
|
||||
local homepage
|
||||
local maintainer
|
||||
local gcc_prefix
|
||||
|
||||
homepage="https://github.com/sharkdp/fd"
|
||||
maintainer="David Peter <mail@david-peter.de>"
|
||||
|
||||
case $TARGET in
|
||||
x86_64*)
|
||||
architecture=amd64
|
||||
gcc_prefix=""
|
||||
;;
|
||||
i686*)
|
||||
architecture=i386
|
||||
gcc_prefix=""
|
||||
;;
|
||||
arm*hf)
|
||||
architecture=armhf
|
||||
gcc_prefix="arm-linux-gnueabihf-"
|
||||
;;
|
||||
*)
|
||||
echo "make_deb: skipping target '${TARGET}'" >&2
|
||||
return 0
|
||||
;;
|
||||
esac
|
||||
version=${TRAVIS_TAG#v}
|
||||
if [[ $TARGET = *musl* ]]; then
|
||||
dpkgname=$PROJECT_NAME-musl
|
||||
conflictname=$PROJECT_NAME
|
||||
else
|
||||
dpkgname=$PROJECT_NAME
|
||||
conflictname=$PROJECT_NAME-musl
|
||||
fi
|
||||
|
||||
tempdir=$(mktemp -d 2>/dev/null || mktemp -d -t tmp)
|
||||
|
||||
# copy the main binary
|
||||
install -Dm755 "target/$TARGET/release/$PROJECT_NAME" "$tempdir/usr/bin/$PROJECT_NAME"
|
||||
"${gcc_prefix}"strip "$tempdir/usr/bin/$PROJECT_NAME"
|
||||
|
||||
# manpage
|
||||
install -Dm644 "doc/$PROJECT_NAME.1" "$tempdir/usr/share/man/man1/$PROJECT_NAME.1"
|
||||
gzip --best "$tempdir/usr/share/man/man1/$PROJECT_NAME.1"
|
||||
|
||||
# readme and license
|
||||
install -Dm644 README.md "$tempdir/usr/share/doc/$PROJECT_NAME/README.md"
|
||||
install -Dm644 LICENSE-MIT "$tempdir/usr/share/doc/$PROJECT_NAME/LICENSE-MIT"
|
||||
install -Dm644 LICENSE-APACHE "$tempdir/usr/share/doc/$PROJECT_NAME/LICENSE-APACHE"
|
||||
cat > "$tempdir/usr/share/doc/$PROJECT_NAME/copyright" <<EOF
|
||||
Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Upstream-Name: $PROJECT_NAME
|
||||
Source: $homepage
|
||||
|
||||
Files: *
|
||||
Copyright: $maintainer
|
||||
License: Apache-2.0 or MIT
|
||||
|
||||
License: Apache-2.0
|
||||
On Debian systems, the complete text of the Apache-2.0 can be found in the
|
||||
file /usr/share/common-licenses/Apache-2.0.
|
||||
|
||||
License: MIT
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal in the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
.
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions
|
||||
of the Software.
|
||||
.
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
||||
EOF
|
||||
|
||||
# completions
|
||||
install -Dm644 target/$TARGET/release/build/$PROJECT_NAME-*/out/$PROJECT_NAME.bash "$tempdir/usr/share/bash-completion/completions/${PROJECT_NAME}"
|
||||
install -Dm644 target/$TARGET/release/build/$PROJECT_NAME-*/out/$PROJECT_NAME.fish "$tempdir/usr/share/fish/completions/$PROJECT_NAME.fish"
|
||||
install -Dm644 target/$TARGET/release/build/$PROJECT_NAME-*/out/_$PROJECT_NAME "$tempdir/usr/share/zsh/vendor-completions/_$PROJECT_NAME"
|
||||
|
||||
# Control file
|
||||
mkdir "$tempdir/DEBIAN"
|
||||
cat > "$tempdir/DEBIAN/control" <<EOF
|
||||
Package: $dpkgname
|
||||
Version: $version
|
||||
Section: utils
|
||||
Priority: optional
|
||||
Maintainer: $maintainer
|
||||
Architecture: $architecture
|
||||
Provides: $PROJECT_NAME
|
||||
Conflicts: $conflictname
|
||||
Homepage: $homepage
|
||||
Description: Simple, fast and user-friendly alternative to find
|
||||
While fd does not seek to mirror all of find's powerful functionality, it
|
||||
provides sensible (opinionated) defaults for 80% of the use cases.
|
||||
EOF
|
||||
|
||||
fakeroot dpkg-deb --build "$tempdir" "${dpkgname}_${version}_${architecture}.deb"
|
||||
}
|
||||
|
||||
|
||||
main() {
|
||||
build
|
||||
pack
|
||||
if [[ $TARGET = *linux* ]]; then
|
||||
make_deb
|
||||
fi
|
||||
}
|
||||
|
||||
main
|
|
@ -1,26 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -ex
|
||||
|
||||
if [ "$TRAVIS_OS_NAME" != linux ]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
sudo apt-get update
|
||||
|
||||
# needed to build deb packages
|
||||
sudo apt-get install -y fakeroot
|
||||
|
||||
# needed for i686 linux gnu target
|
||||
if [[ $TARGET == i686-unknown-linux-gnu ]]; then
|
||||
sudo apt-get install -y gcc-multilib
|
||||
fi
|
||||
|
||||
# needed for cross-compiling for arm
|
||||
if [[ $TARGET == arm-unknown-linux-* ]]; then
|
||||
sudo apt-get install -y \
|
||||
gcc-4.8-arm-linux-gnueabihf \
|
||||
binutils-arm-linux-gnueabihf \
|
||||
libc6-armhf-cross \
|
||||
libc6-dev-armhf-cross
|
||||
fi
|
|
@ -1,11 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -ex
|
||||
|
||||
# Incorporate TARGET env var to the build and test process
|
||||
cargo build --target "$TARGET" --verbose
|
||||
|
||||
# We cannot run arm executables on linux
|
||||
if [[ $TARGET != arm-unknown-linux-* ]]; then
|
||||
cargo test --target "$TARGET" --verbose
|
||||
fi
|
|
@ -0,0 +1,283 @@
|
|||
#compdef fd
|
||||
|
||||
##
|
||||
# zsh completion function for fd
|
||||
#
|
||||
# Based on ripgrep completion function.
|
||||
# Originally based on code from the zsh-users project — see copyright notice
|
||||
# below.
|
||||
|
||||
autoload -U is-at-least
|
||||
|
||||
_fd() {
|
||||
local curcontext="$curcontext" no='!' ret=1
|
||||
local -a context line state state_descr _arguments_options fd_types fd_args
|
||||
local -A opt_args
|
||||
|
||||
if is-at-least 5.2; then
|
||||
_arguments_options=( -s -S )
|
||||
else
|
||||
_arguments_options=( -s )
|
||||
fi
|
||||
|
||||
fd_types=(
|
||||
{f,file}'\:"regular files"'
|
||||
{d,directory}'\:"directories"'
|
||||
{l,symlink}'\:"symbolic links"'
|
||||
{e,empty}'\:"empty files or directories"'
|
||||
{x,executable}'\:"executable (files)"'
|
||||
{b,block-device}'\:"block devices"'
|
||||
{c,char-device}'\:"character devices"'
|
||||
{s,socket}'\:"sockets"'
|
||||
{p,pipe}'\:"named pipes (FIFOs)"'
|
||||
)
|
||||
|
||||
# Do not complete rare options unless either the current prefix
|
||||
# matches one of those options or the user has the `complete-all`
|
||||
# style set. Note that this prefix check has to be updated manually to account
|
||||
# for all of the potential negation options listed below!
|
||||
if
|
||||
# (--[bpsu]* => match all options marked with '$no')
|
||||
[[ $PREFIX$SUFFIX == --[bopsun]* ]] ||
|
||||
zstyle -t ":complete:$curcontext:*" complete-all
|
||||
then
|
||||
no=
|
||||
fi
|
||||
|
||||
# We make heavy use of argument groups here to prevent the option specs from
|
||||
# growing unwieldy. These aren't supported in zsh <5.4, though, so we'll strip
|
||||
# them out below if necessary. This makes the exclusions inaccurate on those
|
||||
# older versions, but oh well — it's not that big a deal
|
||||
fd_args=(
|
||||
+ '(hidden)' # hidden files
|
||||
{-H,--hidden}'[search hidden files/directories]'
|
||||
|
||||
+ '(no-ignore-full)' # all ignore files
|
||||
'(no-ignore-partial)'{-I,--no-ignore}"[don't respect .(git|fd)ignore and global ignore files]"
|
||||
$no'(no-ignore-partial)*'{-u,--unrestricted}'[alias for --no-ignore, when repeated also alias for --hidden]'
|
||||
|
||||
+ no-ignore-partial # some ignore files
|
||||
"(no-ignore-full --no-ignore-vcs)--no-ignore-vcs[don't respect .gitignore files]"
|
||||
"!(no-ignore-full --no-global-ignore-file)--no-global-ignore-file[don't respect the global ignore file]"
|
||||
$no'(no-ignore-full --no-ignore-parent)--no-ignore-parent[]'
|
||||
|
||||
+ '(case)' # case-sensitivity
|
||||
{-s,--case-sensitive}'[perform a case-sensitive search]'
|
||||
{-i,--ignore-case}'[perform a case-insensitive search]'
|
||||
|
||||
+ '(regex-pattern)' # regex-based search pattern
|
||||
'(no-regex-pattern)--regex[perform a regex-based search (default)]'
|
||||
|
||||
+ '(no-regex-pattern)' # non-regex-based search pattern
|
||||
{-g,--glob}'[perform a glob-based search]'
|
||||
{-F,--fixed-strings}'[treat pattern as literal string instead of a regex]'
|
||||
|
||||
+ '(no-require-git)'
|
||||
"$no(no-ignore-full --no-ignore-vcs --no-require-git)--no-require-git[don't require git repo to respect gitignores]"
|
||||
|
||||
+ '(match-full)' # match against full path
|
||||
{-p,--full-path}'[match the pattern against the full path instead of the basename]'
|
||||
|
||||
+ '(follow)' # follow symlinks
|
||||
{-L,--follow}'[follow symbolic links to directories]'
|
||||
|
||||
+ '(abs-path)' # show absolute paths
|
||||
'(long-listing)'{-a,--absolute-path}'[show absolute paths instead of relative paths]'
|
||||
|
||||
+ '(null-sep)' # use null separator for output
|
||||
'(long-listing)'{-0,--print0}'[separate search results by the null character]'
|
||||
|
||||
+ '(long-listing)' # long-listing output
|
||||
'(abs-path null-sep max-results exec-cmds)'{-l,--list-details}'[use a long listing format with file metadata]'
|
||||
|
||||
+ '(max-results)' # max number of results
|
||||
'(long-listing exec-cmds)--max-results=[limit number of search results to given count and quit]:count'
|
||||
'(long-listing exec-cmds)-1[limit to a single search result and quit]'
|
||||
|
||||
+ '(fs-errors)' # file-system errors
|
||||
$no'--show-errors[enable the display of filesystem errors]'
|
||||
|
||||
+ '(fs-traversal)' # file-system traversal
|
||||
$no"--one-file-system[don't descend into directories on other file systems]"
|
||||
'!--mount'
|
||||
'!--xdev'
|
||||
|
||||
+ dir-depth # directory depth
|
||||
'(--exact-depth -d --max-depth)'{-d+,--max-depth=}'[set max directory depth to descend when searching]:depth'
|
||||
'!(--exact-depth -d --max-depth)--maxdepth:depth'
|
||||
'(--exact-depth --min-depth)--min-depth=[set directory depth to descend before start searching]:depth'
|
||||
'(--exact-depth -d --max-depth --maxdepth --min-depth)--exact-depth=[only search at the exact given directory depth]:depth'
|
||||
|
||||
+ prune # pruning
|
||||
"--prune[don't traverse into matching directories]"
|
||||
|
||||
+ filter-misc # filter search
|
||||
'*'{-t+,--type=}"[filter search by type]:type:(($fd_types))"
|
||||
'*'{-e+,--extension=}'[filter search by file extension]:extension'
|
||||
'*'{-E+,--exclude=}'[exclude files/directories that match the given glob pattern]:glob pattern'
|
||||
'*'{-S+,--size=}'[limit search by file size]:size limit:->size'
|
||||
'(-o --owner)'{-o+,--owner=}'[filter by owning user and/or group]:owner and/or group:->owner'
|
||||
|
||||
+ ignore-file # extra ignore files
|
||||
'*--ignore-file=[add a custom, low-precedence ignore-file with .gitignore format]: :_files'
|
||||
|
||||
+ '(filter-mtime-newer)' # filter by files modified after than
|
||||
'--changed-within=[limit search to files/directories modified within the given date/duration]:date or duration'
|
||||
'--changed-after=[alias for --changed-within]:date/duration'
|
||||
'!--change-newer-than=:date/duration'
|
||||
'!--newer=:date/duration'
|
||||
|
||||
+ '(filter-mtime-older)' # filter by files modified before than
|
||||
'--changed-before=[limit search to files/directories modified before the given date/duration]:date or duration'
|
||||
'!--change-older-than=:date/duration'
|
||||
'!--older=:date/duration'
|
||||
|
||||
+ '(color)' # colorize output
|
||||
{-c+,--color=}'[declare when to colorize search results]:when to colorize:((
|
||||
auto\:"show colors if the output goes to an interactive console (default)"
|
||||
never\:"do not use colorized output"
|
||||
always\:"always use colorized output"
|
||||
))'
|
||||
|
||||
+ '(threads)'
|
||||
{-j+,--threads=}'[set the number of threads for searching and executing]:number of threads'
|
||||
|
||||
+ '(exec-cmds)' # execute command
|
||||
'(long-listing max-results)'{-x+,--exec=}'[execute command for each search result]:command: _command_names -e:*\;::program arguments: _normal'
|
||||
'(long-listing max-results)'{-X+,--exec-batch=}'[execute command for all search results at once]:command: _command_names -e:*\;::program arguments: _normal'
|
||||
'(long-listing max-results)--batch-size=[max number of args for each -X call]:size'
|
||||
|
||||
+ other
|
||||
'!(--max-buffer-time)--max-buffer-time=[set amount of time to buffer before showing output]:time (ms)'
|
||||
|
||||
+ '(about)' # about flags
|
||||
'(: * -)'{-h,--help}'[display help message]'
|
||||
'(: * -)'{-V,--version}'[display version information]'
|
||||
|
||||
+ path-sep # set path separator for output
|
||||
$no'(--path-separator)--path-separator=[set the path separator to use when printing file paths]:path separator'
|
||||
|
||||
+ search-path
|
||||
$no'(--base-directory)--base-directory=[change the current working directory to the given path]:directory:_files -/'
|
||||
$no'(*)*--search-path=[set search path (instead of positional <path> arguments)]:directory:_files -/'
|
||||
|
||||
+ strip-cwd-prefix
|
||||
$no'(strip-cwd-prefix exec-cmds)--strip-cwd-prefix=[When to strip ./]:when:(always never auto)'
|
||||
|
||||
+ and
|
||||
'--and=[additional required search path]:pattern'
|
||||
|
||||
|
||||
+ args # positional arguments
|
||||
'1: :_guard "^-*" pattern'
|
||||
'(--search-path)*:directory:_files -/'
|
||||
)
|
||||
|
||||
# Strip out argument groups where unsupported (see above)
|
||||
is-at-least 5.4 ||
|
||||
fd_args=( ${(@)args:#(#i)(+|[a-z0-9][a-z0-9_-]#|\([a-z0-9][a-z0-9_-]#\))} )
|
||||
|
||||
_arguments $_arguments_options : $fd_args && ret=0
|
||||
|
||||
case ${state} in
|
||||
owner)
|
||||
compset -P '(\\|)\!'
|
||||
if compset -P '*:'; then
|
||||
_groups && ret=0
|
||||
else
|
||||
if
|
||||
compset -S ':*' ||
|
||||
# Do not add the colon suffix when completing "!user<TAB>
|
||||
# (with a starting double-quote) otherwise pressing tab again
|
||||
# after the inserted colon "!user:<TAB> will complete history modifiers
|
||||
[[ $IPREFIX == (\\|\!)* && ($QIPREFIX == \"* && -z $QISUFFIX) ]]
|
||||
then
|
||||
_users && ret=0
|
||||
else
|
||||
local q
|
||||
# Since quotes are needed when using the negation prefix !,
|
||||
# automatically remove the colon suffix also when closing the quote
|
||||
if [[ $QIPREFIX == [\'\"]* ]]; then
|
||||
q=${QIPREFIX:0:1}
|
||||
fi
|
||||
_users -r ": \t\n\-$q" -S : && ret=0
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
|
||||
size)
|
||||
if compset -P '[-+][0-9]##'; then
|
||||
local -a suff=(
|
||||
'B:bytes'
|
||||
'K:kilobytes (10^3 = 1000 bytes)'
|
||||
'M:megabytes (10^6 = 1000^2 bytes)'
|
||||
'G:gigabytes (10^9 = 1000^3 bytes)'
|
||||
'T:terabytes (10^12 = 1000^4 bytes)'
|
||||
'Ki:kibibytes ( 2^10 = 1024 bytes)'
|
||||
'Mi:mebibytes ( 2^20 = 1024^2 bytes)'
|
||||
'Gi:gigibytes ( 2^30 = 1024^3 bytes)'
|
||||
'Ti:tebibytes ( 2^40 = 1024^4 bytes)'
|
||||
)
|
||||
_describe -t units 'size limit units' suff -V 'units'
|
||||
elif compset -P '[-+]'; then
|
||||
_message -e 'size limit number (full format: <+-><number><unit>)'
|
||||
else
|
||||
_values 'size limit prefix (full format: <prefix><number><unit>)' \
|
||||
'\+[file size must be greater or equal to]'\
|
||||
'-[file size must be less than or equal to]' && ret=0
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
_fd "$@"
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# Copyright (c) 2011 GitHub zsh-users - http://github.com/zsh-users
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above copyright
|
||||
# notice, this list of conditions and the following disclaimer in the
|
||||
# documentation and/or other materials provided with the distribution.
|
||||
# * Neither the name of the zsh-users nor the
|
||||
# names of its contributors may be used to endorse or promote products
|
||||
# derived from this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
# DISCLAIMED. IN NO EVENT SHALL ZSH-USERS BE LIABLE FOR ANY
|
||||
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
# ------------------------------------------------------------------------------
|
||||
# Description
|
||||
# -----------
|
||||
#
|
||||
# Completion script for fd
|
||||
#
|
||||
# ------------------------------------------------------------------------------
|
||||
# Authors
|
||||
# -------
|
||||
#
|
||||
# * smancill (https://github.com/smancill)
|
||||
#
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
# Local Variables:
|
||||
# mode: shell-script
|
||||
# coding: utf-8-unix
|
||||
# indent-tabs-mode: nil
|
||||
# sh-indentation: 2
|
||||
# sh-basic-offset: 2
|
||||
# End:
|
||||
# vim: ft=zsh sw=2 ts=2 et
|
|
@ -24,26 +24,72 @@ fd \- find entries in the filesystem
|
|||
.B fd
|
||||
is a simple, fast and user-friendly alternative to
|
||||
.BR find (1).
|
||||
.P
|
||||
By default
|
||||
.B fd
|
||||
uses regular expressions for the pattern. However, this can be changed to use simple glob patterns
|
||||
with the '\-\-glob' option.
|
||||
.P
|
||||
By default
|
||||
.B fd
|
||||
will exclude hidden files and directories, as well as any files that match gitignore rules
|
||||
or ignore rules in .ignore or .fdignore files.
|
||||
.SH OPTIONS
|
||||
.TP
|
||||
.B \-H, \-\-hidden
|
||||
Include hidden files and directories in the search results
|
||||
(default: hidden files and directories are skipped).
|
||||
(default: hidden files and directories are skipped). The flag can be overridden with '--no-hidden'.
|
||||
.IP
|
||||
Ignored files are still excluded unless \-\-no\-ignore or \-\-no\-ignore\-vcs
|
||||
is also used.
|
||||
.TP
|
||||
.B \-I, \-\-no\-ignore
|
||||
Show search results from files and directories that would otherwise be ignored by
|
||||
.IR .gitignore ,
|
||||
.IR .ignore ,
|
||||
.IR .fdignore ,
|
||||
or the global ignore file.
|
||||
.RS
|
||||
.IP \[bu] 2
|
||||
.I .gitignore
|
||||
.IP \[bu]
|
||||
.I .git/info/exclude
|
||||
.IP \[bu]
|
||||
The global gitignore configuration (by default
|
||||
.IR $HOME/.config/git/ignore )
|
||||
.IP \[bu]
|
||||
.I .ignore
|
||||
.IP \[bu]
|
||||
.I .fdignore
|
||||
.IP \[bu]
|
||||
The global fd ignore file (usually
|
||||
.I $HOME/.config/fd/ignore
|
||||
)
|
||||
.RE
|
||||
.IP
|
||||
The flag can be overridden with '--ignore'.
|
||||
.TP
|
||||
.B \-u, \-\-unrestricted
|
||||
Alias for '--no-ignore'. Can be repeated; '-uu' is an alias for '--no-ignore --hidden'.
|
||||
Perform an unrestricted search, including ignored and hidden files. This is an alias for '--hidden --no-ignore'.
|
||||
.TP
|
||||
.B \-\-no\-ignore\-vcs
|
||||
Show search results from files and directories that would otherwise be ignored by
|
||||
.I .gitignore
|
||||
files.
|
||||
Show search results from files and directories that would otherwise be ignored by gitignore files
|
||||
including
|
||||
.IR .gitignore ,
|
||||
.IR .git/info/exclude ,
|
||||
and the global gitignore configuration
|
||||
.RI ( core.excludesFile
|
||||
git setting, which defaults to
|
||||
.IR $HOME/.config/git/ignore ).
|
||||
The flag can be overridden with '--ignore-vcs'.
|
||||
.TP
|
||||
.B \-\-no\-require\-git
|
||||
Do not require a git repository to respect gitignores. By default, fd will only
|
||||
respect global gitignore rules, .gitignore rules and local exclude rules if fd
|
||||
detects that you are searching inside a git repository. This flag allows you to
|
||||
relax this restriction such that fd will respect all git related ignore rules
|
||||
regardless of whether you’re searching in a git repository or not. The flag can
|
||||
be overridden with '--require-git'.
|
||||
.TP
|
||||
.B \-\-no\-ignore\-parent
|
||||
Show search results from files and directories that would otherwise be ignored by gitignore files in
|
||||
parent directories.
|
||||
.TP
|
||||
.B \-s, \-\-case\-sensitive
|
||||
Perform a case-sensitive search. By default, fd uses case-insensitive searches, unless the
|
||||
|
@ -55,15 +101,23 @@ pattern contains an uppercase character (smart case).
|
|||
.TP
|
||||
.B \-g, \-\-glob
|
||||
Perform a glob-based search instead of a regular expression search.
|
||||
If combined with the '\-\-full-path' option, '**' can be used to match multiple path components.
|
||||
.TP
|
||||
.B \-\-regex
|
||||
Perform a regular-expression based seach (default). This can be used to override --glob.
|
||||
Perform a regular-expression based search (default). This can be used to override --glob.
|
||||
.TP
|
||||
.B \-F, \-\-fixed\-strings
|
||||
Treat the pattern as a literal string instead of a regular expression.
|
||||
Treat the pattern as a literal string instead of a regular expression. Note that this also
|
||||
performs substring comparison. If you want to match on an exact filename, consider using '\-\-glob'.
|
||||
.TP
|
||||
.BI "\-\-and " pattern
|
||||
Add additional required search patterns, all of which must be matched. Multiple additional
|
||||
patterns can be specified. The patterns are regular expressions, unless '\-\-glob'
|
||||
or '\-\-fixed\-strings' is used.
|
||||
.TP
|
||||
.B \-a, \-\-absolute\-path
|
||||
Shows the full path starting from the root as opposed to relative paths.
|
||||
The flag can be overridden with '--relative-path'.
|
||||
.TP
|
||||
.B \-l, \-\-list\-details
|
||||
Use a detailed listing format like 'ls -l'. This is basically an alias
|
||||
|
@ -73,7 +127,7 @@ sort order.
|
|||
.TP
|
||||
.B \-L, \-\-follow
|
||||
By default, fd does not descend into symlinked directories. Using this flag, symbolic links are
|
||||
also traversed.
|
||||
also traversed. The flag can be overridden with '--no-follow'.
|
||||
.TP
|
||||
.B \-p, \-\-full\-path
|
||||
By default, the search pattern is only matched against the filename (or directory name). Using
|
||||
|
@ -91,10 +145,32 @@ Limit the number of search results to 'count' and quit immediately.
|
|||
.B \-1
|
||||
Limit the search to a single result and quit immediately. This is an alias for '--max-results=1'.
|
||||
.TP
|
||||
.B \-q, \-\-quiet
|
||||
When the flag is present, the program does not print anything and will instead exit with a code of 0 if there is at least one search result.
|
||||
Otherwise, the exit code will be 1.
|
||||
This is mainly for usage in scripts and can be faster than checking for output because the search can be stopped early after the first match.
|
||||
.B \-\-has\-results
|
||||
can be used as an alias.
|
||||
.TP
|
||||
.B \-\-show-errors
|
||||
Enable the display of filesystem errors for situations such as insufficient
|
||||
permissions or dead symlinks.
|
||||
.TP
|
||||
.B \-\-strip-cwd-prefix [when]
|
||||
By default, relative paths are prefixed with './' when -x/--exec,
|
||||
-X/--exec-batch, or -0/--print0 are given, to reduce the risk of a
|
||||
path starting with '-' being treated as a command line option. Use
|
||||
this flag to change this behavior. If this flag is used without a value,
|
||||
it is equivalent to passing "always". Possible values are:
|
||||
.RS
|
||||
.IP never
|
||||
Never strip the ./ at the beginning of paths
|
||||
.IP always
|
||||
Always strip the ./ at the beginning of paths
|
||||
.IP auto
|
||||
Only strip if used with --exec, --exec-batch, or --print0. That is, it resets to the default behavior.
|
||||
.RE
|
||||
.TP
|
||||
.B \-\-one\-file\-system, \-\-mount, \-\-xdev
|
||||
By default, fd will traverse the file system tree as far as other options dictate. With this flag, fd ensures that it does not descend into a different file system than the one it started in. Comparable to the -mount or -xdev filters of find(1).
|
||||
.TP
|
||||
|
@ -115,38 +191,73 @@ Only show search results starting at the given depth. See also: '--max-depth' an
|
|||
.BI "\-\-exact\-depth " d
|
||||
Only show search results at the exact given depth. This is an alias for '--min-depth <depth> --max-depth <depth>'.
|
||||
.TP
|
||||
.B \-\-prune
|
||||
Do not traverse into matching directories.
|
||||
.TP
|
||||
.BI "\-t, \-\-type " filetype
|
||||
Filter search by type:
|
||||
.RS
|
||||
.IP "f, file"
|
||||
regular files
|
||||
.IP "d, directory"
|
||||
.IP "d, dir, directory"
|
||||
directories
|
||||
.IP "l, symlink"
|
||||
symbolic links
|
||||
.IP "x, executable"
|
||||
executable (files)
|
||||
.IP "e, empty"
|
||||
empty files or directories
|
||||
.IP "b, block-device"
|
||||
block devices
|
||||
.IP "c, char-device"
|
||||
character devices
|
||||
.IP "s, socket"
|
||||
sockets
|
||||
.IP "p, pipe"
|
||||
named pipes (FIFOs)
|
||||
.IP "x, executable"
|
||||
executable (files)
|
||||
.IP "e, empty"
|
||||
empty files or directories
|
||||
.RE
|
||||
|
||||
.RS
|
||||
This option can be used repeatedly to allow for multiple file types.
|
||||
This option can be specified more than once to include multiple file types.
|
||||
Searching for '--type file --type symlink' will show both regular files as well as
|
||||
symlinks. Note that the 'executable' and 'empty' filters work differently: '--type
|
||||
executable' implies '--type file' by default. And '--type empty' searches for
|
||||
empty files and directories, unless either '--type file' or '--type directory' is
|
||||
specified in addition.
|
||||
|
||||
Examples:
|
||||
- Only search for files:
|
||||
fd --type file …
|
||||
fd -tf …
|
||||
- Find both files and symlinks
|
||||
fd --type file --type symlink …
|
||||
fd -tf -tl …
|
||||
- Find executable files:
|
||||
fd --type executable
|
||||
fd -tx
|
||||
- Find empty files:
|
||||
fd --type empty --type file
|
||||
fd -te -tf
|
||||
- Find empty directories:
|
||||
fd --type empty --type directory
|
||||
fd -te -td
|
||||
.RE
|
||||
.TP
|
||||
.BI "\-e, \-\-extension " ext
|
||||
Filter search results by file extension
|
||||
.IR ext .
|
||||
This option can be used repeatedly to allow for multiple possible file extensions.
|
||||
|
||||
If you want to search for files without extension, you can use the regex '^[^.]+$'
|
||||
as a normal search pattern.
|
||||
.TP
|
||||
.BI "\-E, \-\-exclude " pattern
|
||||
Exclude files/directories that match the given glob pattern.
|
||||
This overrides any other ignore logic.
|
||||
Multiple exclude patterns can be specified.
|
||||
Examples:
|
||||
\-\-exclude '*.pyc'
|
||||
\-\-exclude node_modules
|
||||
.TP
|
||||
.BI "\-\-ignore-file " path
|
||||
Add a custom ignore-file in '.gitignore' format.
|
||||
|
@ -176,6 +287,8 @@ Limit results based on the size of files using the format
|
|||
file size must be greater than or equal to this
|
||||
.IP '-'
|
||||
file size must be less than or equal to this
|
||||
.P
|
||||
If neither '+' nor '-' is specified, file size must be exactly equal to this.
|
||||
.IP 'NUM'
|
||||
The numeric size (e.g. 500)
|
||||
.IP 'UNIT'
|
||||
|
@ -204,24 +317,40 @@ tebibytes
|
|||
.RE
|
||||
.TP
|
||||
.BI "\-\-changed-within " date|duration
|
||||
Filter results based on the file modification time. The argument can be provided as a specific
|
||||
point in time (\fIYYYY-MM-DD HH:MM:SS\fR) or as a duration (\fI10h, 1d, 35min\fR).
|
||||
.B --change-newer-than
|
||||
can be used as an alias.
|
||||
Filter results based on the file modification time.
|
||||
Files with modification times greater than the argument will be returned.
|
||||
The argument can be provided as a duration (\fI10h, 1d, 35min\fR) or as a specific point
|
||||
in time as full RFC3339 format with time zone, as a date or datetime in the
|
||||
local time zone (\fIYYYY-MM-DD\fR or \fIYYYY-MM-DD HH:MM:SS\fR), or as the prefix '@'
|
||||
followed by the number of seconds since the Unix epoch (@[0-9]+).
|
||||
\fB\-\-change-newer-than\fR,
|
||||
.B --newer
|
||||
or
|
||||
.B --changed-after
|
||||
can be used as aliases.
|
||||
|
||||
Examples:
|
||||
\-\-changed-within 2weeks
|
||||
\-\-change-newer-than "2018-10-27 10:00:00"
|
||||
\-\-newer 2018-10-27
|
||||
\-\-changed-after @1704067200
|
||||
.TP
|
||||
.BI "\-\-changed-before " date|duration
|
||||
Filter results based on the file modification time. The argument can be provided as a specific
|
||||
point in time (\fIYYYY-MM-DD HH:MM:SS\fR) or as a duration (\fI10h, 1d, 35min\fR).
|
||||
Filter results based on the file modification time.
|
||||
Files with modification times less than the argument will be returned.
|
||||
The argument can be provided as a duration (\fI10h, 1d, 35min\fR) or as a specific point
|
||||
in time as full RFC3339 format with time zone, as a date or datetime in the
|
||||
local time zone (\fIYYYY-MM-DD\fR or \fIYYYY-MM-DD HH:MM:SS\fR), or as the prefix '@'
|
||||
followed by the number of seconds since the Unix epoch (@[0-9]+).
|
||||
.B --change-older-than
|
||||
can be used as an alias.
|
||||
or
|
||||
.B --older
|
||||
can be used as aliases.
|
||||
|
||||
Examples:
|
||||
\-\-changed-before "2018-10-27 10:00:00"
|
||||
\-\-change-older-than 2weeks
|
||||
\-\-older @1704067200
|
||||
.TP
|
||||
.BI "-o, \-\-owner " [user][:group]
|
||||
Filter files by their user and/or group. Format: [(user|uid)][:(group|gid)]. Either side
|
||||
|
@ -232,13 +361,27 @@ Examples:
|
|||
\-\-owner :students
|
||||
\-\-owner "!john:students"
|
||||
.TP
|
||||
.BI "\-x, \-\-exec " command
|
||||
Execute
|
||||
.I command
|
||||
for each search result. The following placeholders are substituted by a path derived from the current search result:
|
||||
.BI "\-\-base\-directory " path
|
||||
Change the current working directory of fd to the provided path. This means that search results will
|
||||
be shown with respect to the given base path. Note that relative paths which are passed to fd via the
|
||||
positional \fIpath\fR argument or the \fB\-\-search\-path\fR option will also be resolved relative to
|
||||
this directory.
|
||||
.TP
|
||||
.BI "\-\-path\-separator " separator
|
||||
Set the path separator to use when printing file paths. The default is the OS-specific separator
|
||||
('/' on Unix, '\\' on Windows).
|
||||
.TP
|
||||
.BI "\-\-search\-path " search\-path
|
||||
Provide paths to search as an alternative to the positional \fIpath\fR argument. Changes the usage to
|
||||
\'fd [FLAGS/OPTIONS] \-\-search\-path PATH \-\-search\-path PATH2 [PATTERN]\'
|
||||
.TP
|
||||
.BI "\-\-format " fmt
|
||||
Specify a template string that is used for printing a line for each file found.
|
||||
|
||||
The following placeholders are substituted into the string for each file before printing:
|
||||
.RS
|
||||
.IP {}
|
||||
path
|
||||
path (of the current search result)
|
||||
.IP {/}
|
||||
basename
|
||||
.IP {//}
|
||||
|
@ -247,41 +390,108 @@ parent directory
|
|||
path without file extension
|
||||
.IP {/.}
|
||||
basename without file extension
|
||||
.IP {{
|
||||
literal '{' (an escape sequence)
|
||||
.IP }}
|
||||
literal '}' (an escape sequence)
|
||||
.P
|
||||
Notice that you can use "{{" and "}}" to escape "{" and "}" respectively, which is especially
|
||||
useful if you need to include the literal text of one of the above placeholders.
|
||||
.RE
|
||||
.TP
|
||||
.BI "\-x, \-\-exec " command
|
||||
.RS
|
||||
Execute
|
||||
.I command
|
||||
for each search result in parallel (use --threads=1 for sequential command execution).
|
||||
|
||||
Note that all subsequent positional arguments are considered to be arguments to the
|
||||
.I command
|
||||
- not to fd.
|
||||
It is therefore recommended to place the \-x/\-\-exec option last. Alternatively, you can supply
|
||||
a ';' argument to end the argument list and continue with more fd options.
|
||||
Most shells require ';' to be escaped: '\\;'.
|
||||
This option can be specified multiple times, in which case all commands are run for each
|
||||
file found, in the order they are provided. In that case, you must supply a ';' argument for
|
||||
all but the last commands.
|
||||
|
||||
If parallelism is enabled, the order commands will be executed in is non-deterministic. And even with
|
||||
--threads=1, the order is determined by the operating system and may not be what you expect. Thus, it is
|
||||
recommended that you don't rely on any ordering of the results.
|
||||
|
||||
Before executing the command, any placeholder patterns in the command are replaced with the
|
||||
corresponding values for the current file. The same placeholders are used as in the "\-\-format"
|
||||
option.
|
||||
|
||||
If no placeholder is present, an implicit "{}" at the end is assumed.
|
||||
|
||||
Examples:
|
||||
|
||||
- find all *.zip files and unzip them:
|
||||
|
||||
fd -e zip -x unzip
|
||||
|
||||
- find *.h and *.cpp files and run "clang-format -i .." for each of them:
|
||||
|
||||
fd -e h -e cpp -x clang-format -i
|
||||
|
||||
- Convert all *.jpg files to *.png files:
|
||||
|
||||
fd -e jpg -x convert {} {.}.png
|
||||
.RE
|
||||
.TP
|
||||
.BI "\-X, \-\-exec-batch " command
|
||||
.RS
|
||||
Execute
|
||||
.I command
|
||||
with all search results at once.
|
||||
A single occurence of the following placeholders is authorized and substituted by the paths derived from the search results before the command is executed:
|
||||
.RS
|
||||
.IP {}
|
||||
path
|
||||
.IP {/}
|
||||
basename
|
||||
.IP {//}
|
||||
parent directory
|
||||
.IP {.}
|
||||
path without file extension
|
||||
.IP {/.}
|
||||
basename without file extension
|
||||
once, with all search results as arguments.
|
||||
|
||||
The order of the arguments is non-deterministic and should not be relied upon.
|
||||
|
||||
This uses the same placeholders as "\-\-format" and "\-\-exec", but instead of expanding
|
||||
once per command invocation each argument containing a placeholder is expanding for every
|
||||
file in a batch and passed as separate arguments.
|
||||
|
||||
If no placeholder is present, an implicit "{}" at the end is assumed.
|
||||
|
||||
Like \-\-exec, this can be used multiple times, in which case each command will be run in
|
||||
the order given.
|
||||
|
||||
Examples:
|
||||
|
||||
- Find all test_*.py files and open them in your favorite editor:
|
||||
|
||||
fd -g 'test_*.py' -X vim
|
||||
|
||||
Note that this executes a single "vim" process with all search results as arguments.
|
||||
|
||||
- Find all *.rs files and count the lines with "wc -l ...":
|
||||
|
||||
fd -e rs -X wc -l
|
||||
.RE
|
||||
.TP
|
||||
.BI "\-\-batch-size " size
|
||||
Maximum number of arguments to pass to the command given with -X. If the number of results is
|
||||
greater than the given size, the command given with -X is run again with remaining arguments. A
|
||||
batch size of zero means there is no limit (default), but note that batching might still happen
|
||||
due to OS restrictions on the maximum length of command lines.
|
||||
.SH PATTERN SYNTAX
|
||||
The regular expression syntax used by fd is documented here:
|
||||
|
||||
.UR https://docs.rs/regex/1.0.0/regex/#syntax
|
||||
.UE
|
||||
https://docs.rs/regex/1.0.0/regex/#syntax
|
||||
|
||||
The glob syntax is documented here:
|
||||
|
||||
.UR https://docs.rs/globset/#syntax
|
||||
.UE
|
||||
https://docs.rs/globset/#syntax
|
||||
.SH ENVIRONMENT
|
||||
.TP
|
||||
.B LS_COLORS
|
||||
Determines how to colorize search results, see
|
||||
.BR dircolors (1) .
|
||||
.TP
|
||||
.B NO_COLOR
|
||||
Disables colorized output.
|
||||
.TP
|
||||
.B XDG_CONFIG_HOME, HOME
|
||||
Used to locate the global ignore file. If
|
||||
.B XDG_CONFIG_HOME
|
||||
|
@ -289,6 +499,17 @@ is set, use
|
|||
.IR $XDG_CONFIG_HOME/fd/ignore .
|
||||
Otherwise, use
|
||||
.IR $HOME/.config/fd/ignore .
|
||||
.SH FILES
|
||||
.TP
|
||||
.B .fdignore
|
||||
This file works similarly to a .gitignore file anywhere in the searched tree and specifies patterns
|
||||
that should be excluded from the search. However, this file is specific to fd, and will be used even
|
||||
if the --no-ignore-vcs option is used.
|
||||
.TP
|
||||
.B $XDG_CONFIG_HOME/fd/ignore
|
||||
Global ignore file. Unless ignore mode is turned off (such as with --no-ignore)
|
||||
ignore entries in this file will be ignored, as if it was an .fdignore file in the
|
||||
current directory.
|
||||
.SH EXAMPLES
|
||||
.TP
|
||||
.RI "Find files and directories that match the pattern '" needle "':"
|
||||
|
@ -297,10 +518,22 @@ $ fd needle
|
|||
.RI "Start a search in a given directory (" /var/log "):"
|
||||
$ fd nginx /var/log
|
||||
.TP
|
||||
.RI "Find all Python files (all files with the extention " .py ") in the current directory:"
|
||||
.RI "Find all Python files (all files with the extension " .py ") in the current directory:"
|
||||
$ fd -e py
|
||||
.TP
|
||||
.RI "Open all search results with vim:"
|
||||
$ fd pattern -X vim
|
||||
.SH Tips and Tricks
|
||||
.IP \[bu]
|
||||
If you add ".git/" to your global ignore file ($XDG_CONFIG_HOME/fd/ignore), then
|
||||
".git" folders will be ignored by default, even when the --hidden option is used.
|
||||
.IP \[bu]
|
||||
You can use a shell alias or a wrapper script in order to pass desired flags to fd
|
||||
by default. For example if you do not like the default behavior of respecting gitignore,
|
||||
you can use
|
||||
`alias fd="/usr/bin/fd --no-ignore-vcs"`
|
||||
in your .bashrc to create an alias for fd that doesn't ignore git files by default.
|
||||
.SH BUGS
|
||||
Bugs can be reported on GitHub: https://github.com/sharkdp/fd/issues
|
||||
.SH SEE ALSO
|
||||
.BR find (1)
|
||||
|
|
Binary file not shown.
After Width: | Height: | Size: 9.9 KiB |
|
@ -0,0 +1,161 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
||||
|
||||
<svg
|
||||
width="66mm"
|
||||
height="66mm"
|
||||
viewBox="0 0 66 66.000001"
|
||||
version="1.1"
|
||||
id="svg5"
|
||||
inkscape:version="1.1 (c4e8f9ed74, 2021-05-24)"
|
||||
sodipodi:docname="logo.svg"
|
||||
inkscape:export-filename="/home/shark/Informatik/rust/fd/doc/logo.png"
|
||||
inkscape:export-xdpi="192.42"
|
||||
inkscape:export-ydpi="192.42"
|
||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlns:svg="http://www.w3.org/2000/svg">
|
||||
<sodipodi:namedview
|
||||
id="namedview7"
|
||||
pagecolor="#ffffff"
|
||||
bordercolor="#999999"
|
||||
borderopacity="1"
|
||||
inkscape:pageshadow="0"
|
||||
inkscape:pageopacity="1"
|
||||
inkscape:pagecheckerboard="0"
|
||||
inkscape:document-units="mm"
|
||||
showgrid="false"
|
||||
inkscape:zoom="2.1795515"
|
||||
inkscape:cx="114.47309"
|
||||
inkscape:cy="176.18304"
|
||||
inkscape:window-width="1920"
|
||||
inkscape:window-height="1175"
|
||||
inkscape:window-x="1920"
|
||||
inkscape:window-y="0"
|
||||
inkscape:window-maximized="1"
|
||||
inkscape:current-layer="layer1"
|
||||
showguides="false"
|
||||
inkscape:guide-bbox="true"
|
||||
inkscape:snap-global="false"
|
||||
fit-margin-top="2"
|
||||
fit-margin-left="2"
|
||||
fit-margin-right="2"
|
||||
fit-margin-bottom="2"
|
||||
lock-margins="true">
|
||||
<sodipodi:guide
|
||||
position="26.228232,26.126763"
|
||||
orientation="0,-1"
|
||||
id="guide47826" />
|
||||
<sodipodi:guide
|
||||
position="25.799494,2.3628924"
|
||||
orientation="0,-1"
|
||||
id="guide47828" />
|
||||
</sodipodi:namedview>
|
||||
<defs
|
||||
id="defs2" />
|
||||
<g
|
||||
inkscape:label="Ebene 1"
|
||||
inkscape:groupmode="layer"
|
||||
id="layer1"
|
||||
transform="translate(-21.358009,-148.28012)">
|
||||
<g
|
||||
id="g66267"
|
||||
transform="matrix(0.84959471,0,0,0.84959471,7.9920783,43.351816)">
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:29.7126px;line-height:0;font-family:'Fira Sans Condensed';-inkscape-font-specification:'Fira Sans Condensed, ';white-space:pre;inline-size:37.3715;fill:#e5e5e5;fill-opacity:1;stroke-width:0.742816"
|
||||
x="50.647034"
|
||||
y="173.19841"
|
||||
id="text50653"
|
||||
transform="matrix(1.0604862,0,0,1.0604862,-3.3101428,-10.150043)"><tspan
|
||||
x="50.647034"
|
||||
y="173.19841"
|
||||
id="tspan66635"><tspan
|
||||
style="font-family:'Source Code Pro';-inkscape-font-specification:'Source Code Pro'"
|
||||
id="tspan66633">fd</tspan></tspan></text>
|
||||
<text
|
||||
xml:space="preserve"
|
||||
style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-size:29.7126px;line-height:0;font-family:'Fira Sans Condensed';-inkscape-font-specification:'Fira Sans Condensed, ';white-space:pre;inline-size:37.3715;fill:#00ccff;fill-opacity:0.996078;stroke-width:0.742816"
|
||||
x="50.647034"
|
||||
y="173.19841"
|
||||
id="text1244"
|
||||
transform="matrix(1.0604862,0,0,1.0604862,-2.8008599,-9.6407599)"><tspan
|
||||
x="50.647034"
|
||||
y="173.19841"
|
||||
id="tspan66639"><tspan
|
||||
style="font-family:'Source Code Pro';-inkscape-font-specification:'Source Code Pro'"
|
||||
id="tspan66637">fd</tspan></tspan></text>
|
||||
<g
|
||||
id="g47824"
|
||||
transform="translate(0.1724878,-0.35338542)">
|
||||
<g
|
||||
id="g42041">
|
||||
<path
|
||||
style="fill:none;stroke:#939dac;stroke-width:1.065;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
d="m 28.209616,155.51329 v 15.68758 H 40.83001"
|
||||
id="path39763"
|
||||
sodipodi:nodetypes="ccc" />
|
||||
<path
|
||||
style="fill:#b7bec8;stroke:#939dac;stroke-width:1.065;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
d="M 27.869464,161.83767 H 40.261291"
|
||||
id="path39765" />
|
||||
</g>
|
||||
<g
|
||||
id="g41945"
|
||||
transform="translate(-1.0583333)">
|
||||
<path
|
||||
style="fill:#0088aa;fill-opacity:0.993797;stroke:none;stroke-width:0.265;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
d="m 42.164961,159.42939 -1.554274,-1.89462 -1.975227,-0.005 c -0.941841,0.014 -1.165466,0.27232 -1.14085,2.88812 z"
|
||||
id="path40006"
|
||||
sodipodi:nodetypes="ccccc" />
|
||||
<rect
|
||||
style="fill:#01ccff;fill-opacity:1;stroke:none;stroke-width:2.3;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0.80126;paint-order:stroke fill markers"
|
||||
id="rect39949"
|
||||
width="9.4925022"
|
||||
height="6.2080379"
|
||||
x="37.492516"
|
||||
y="158.82776"
|
||||
ry="0.90871465" />
|
||||
</g>
|
||||
<g
|
||||
id="g41951"
|
||||
transform="translate(-1.0583334,9.3665773)">
|
||||
<path
|
||||
style="fill:#373e48;fill-opacity:0.993797;stroke:none;stroke-width:0.265;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
d="m 42.164961,159.42939 -1.554274,-1.89462 -1.975227,-0.005 c -0.941841,0.014 -1.165466,0.27232 -1.14085,2.88812 z"
|
||||
id="path41947"
|
||||
sodipodi:nodetypes="ccccc" />
|
||||
<rect
|
||||
style="fill:#535d6c;fill-opacity:0.993797;stroke:none;stroke-width:2.3;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0.80126;paint-order:stroke fill markers"
|
||||
id="rect41949"
|
||||
width="9.4925022"
|
||||
height="6.2080379"
|
||||
x="37.492516"
|
||||
y="158.82776"
|
||||
ry="0.90871465" />
|
||||
</g>
|
||||
<g
|
||||
id="g41957"
|
||||
transform="translate(-14.306994,-6.8962642)">
|
||||
<path
|
||||
style="fill:#373e48;fill-opacity:0.993797;stroke:none;stroke-width:0.265;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
|
||||
d="m 42.164961,159.42939 -1.554274,-1.89462 -1.975227,-0.005 c -0.941841,0.014 -1.165466,0.27232 -1.14085,2.88812 z"
|
||||
id="path41953"
|
||||
sodipodi:nodetypes="ccccc" />
|
||||
<rect
|
||||
style="fill:#535d6c;fill-opacity:0.993797;stroke:none;stroke-width:2.3;stroke-linejoin:round;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0.80126;paint-order:stroke fill markers"
|
||||
id="rect41955"
|
||||
width="9.4925022"
|
||||
height="6.2080379"
|
||||
x="37.492516"
|
||||
y="158.82776"
|
||||
ry="0.90871465" />
|
||||
</g>
|
||||
</g>
|
||||
<g
|
||||
id="g65006"
|
||||
transform="matrix(0.55302761,0,0,0.55302761,66.463548,117.45819)" />
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 6.8 KiB |
|
@ -0,0 +1,66 @@
|
|||
# Release checklist
|
||||
|
||||
This file can be used as-is, or copied into the GitHub PR description which includes
|
||||
necessary changes for the upcoming release.
|
||||
|
||||
## Version bump
|
||||
|
||||
- [ ] Create a new branch for the required changes for this release.
|
||||
- [ ] Update version in `Cargo.toml`. Run `cargo build` to update `Cargo.lock`.
|
||||
Make sure to `git add` the `Cargo.lock` changes as well.
|
||||
- [ ] Find the current min. supported Rust version by running
|
||||
`grep rust-version Cargo.toml`.
|
||||
- [ ] Update the `fd` version and the min. supported Rust version in `README.md`.
|
||||
- [ ] Update `CHANGELOG.md`. Change the heading of the *"Upcoming release"* section
|
||||
to the version of this release.
|
||||
|
||||
## Pre-release checks and updates
|
||||
|
||||
- [ ] Install the latest version (`cargo install --locked -f --path .`) and make
|
||||
sure that it is available on the `PATH` (`fd --version` should show the
|
||||
new version).
|
||||
- [ ] Review `-h`, `--help`, and the `man` page.
|
||||
- [ ] Run `fd -h` and copy the output to the *"Command-line options"* section in
|
||||
the README
|
||||
- [ ] Push all changes and wait for CI to succeed (before continuing with the
|
||||
next section).
|
||||
- [ ] Optional: manually test the new features and command-line options described
|
||||
in the `CHANGELOG.md`.
|
||||
- [ ] Run `cargo publish --dry-run` to make sure that it will succeed later
|
||||
(after creating the GitHub release).
|
||||
|
||||
## Release
|
||||
|
||||
- [ ] Merge your release branch (should be a fast-forward merge).
|
||||
- [ ] Create a tag and push it: `git tag vX.Y.Z; git push origin tag vX.Y.Z`.
|
||||
This will trigger the deployment via GitHub Actions.
|
||||
REMINDER: If your `origin` is a fork, don't forget to push to e.g. `upstream`
|
||||
instead.
|
||||
- [ ] Go to https://github.com/sharkdp/fd/releases/new to create the new
|
||||
release. Select the new tag and also use it as the release title. For the
|
||||
release notes, copy the corresponding section from `CHANGELOG.md` and
|
||||
possibly add additional remarks for package maintainers.
|
||||
Publish the release.
|
||||
- [ ] Check if the binary deployment works (archives and Debian packages should
|
||||
appear when the CI run *for the Git tag* has finished).
|
||||
- [ ] Publish to crates.io by running `cargo publish` in a *clean* repository.
|
||||
One way to do this is to clone a fresh copy.
|
||||
|
||||
## Post-release
|
||||
|
||||
- [ ] Prepare a new *"Upcoming release"* section at the top of `CHANGELOG.md`.
|
||||
Put this at the top:
|
||||
|
||||
# Upcoming release
|
||||
|
||||
## Features
|
||||
|
||||
|
||||
## Bugfixes
|
||||
|
||||
|
||||
## Changes
|
||||
|
||||
|
||||
## Other
|
||||
|
|
@ -1,6 +1,8 @@
|
|||
#!/bin/bash
|
||||
# Designed to be executed via svg-term from the fd root directory:
|
||||
# svg-term --command="bash doc/screencast.sh" --out doc/screencast.svg --padding=10
|
||||
# Then run this (workaround for #1003):
|
||||
# sed -i '' 's/<text/<text font-size="1.67"/g' doc/screencast.svg
|
||||
set -e
|
||||
set -u
|
||||
|
||||
|
@ -20,7 +22,7 @@ enter() {
|
|||
}
|
||||
|
||||
prompt() {
|
||||
printf '%b ' $PROMPT | pv -q
|
||||
printf '%b ' "$PROMPT" | pv -q
|
||||
}
|
||||
|
||||
type() {
|
||||
|
@ -34,9 +36,11 @@ main() {
|
|||
|
||||
enter "fd app"
|
||||
|
||||
enter "fd sh"
|
||||
enter "fd fi"
|
||||
|
||||
enter "fd sh --type f"
|
||||
enter "fd fi --type f"
|
||||
|
||||
enter "fd --type d"
|
||||
|
||||
enter "fd -e md"
|
||||
|
||||
|
|
File diff suppressed because one or more lines are too long
Before Width: | Height: | Size: 115 KiB After Width: | Height: | Size: 124 KiB |
|
@ -0,0 +1,12 @@
|
|||
## Sponsors
|
||||
|
||||
`fd` development is sponsored by many individuals and companies. Thank you very much!
|
||||
|
||||
Please note, that being sponsored does not affect the individuality of the `fd`
|
||||
project or affect the maintainers' actions in any way.
|
||||
We remain impartial and continue to assess pull requests solely on merit - the
|
||||
features added, bugs solved, and effect on the overall complexity of the code.
|
||||
No issue will have a different priority based on sponsorship status of the
|
||||
reporter.
|
||||
|
||||
Contributions from anybody are most welcomed, please see our [`CONTRIBUTING.md`](../CONTRIBUTING.md) guide.
|
File diff suppressed because one or more lines are too long
After Width: | Height: | Size: 7.2 KiB |
|
@ -0,0 +1 @@
|
|||
# Defaults are used
|
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/bash
|
||||
|
||||
set -eu
|
||||
|
||||
# This script automates the "Version bump" section
|
||||
|
||||
version="$1"
|
||||
|
||||
if [[ -z $version ]]; then
|
||||
echo "Usage: must supply version as first argument" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
git switch -C "release-$version"
|
||||
sed -i -e "0,/^\[badges/{s/^version =.*/version = \"$version\"/}" Cargo.toml
|
||||
|
||||
msrv="$(grep -F rust-version Cargo.toml | sed -e 's/^rust-version= "\(.*\)"/\1/')"
|
||||
|
||||
sed -i -e "s/Note that rust version \*[0-9.]+\* or later/Note that rust version *$msrv* or later/" README.md
|
||||
|
||||
sed -i -e "s/^# Upcoming release/# $version/" CHANGELOG.md
|
||||
|
572
src/app.rs
572
src/app.rs
|
@ -1,572 +0,0 @@
|
|||
use clap::{crate_version, App, AppSettings, Arg};
|
||||
|
||||
pub fn build_app() -> App<'static, 'static> {
|
||||
let mut app = App::new("fd")
|
||||
.version(crate_version!())
|
||||
.usage("fd [FLAGS/OPTIONS] [<pattern>] [<path>...]")
|
||||
.setting(AppSettings::ColoredHelp)
|
||||
.setting(AppSettings::DeriveDisplayOrder)
|
||||
.after_help(
|
||||
"Note: `fd -h` prints a short and concise overview while `fd --help` gives all \
|
||||
details.",
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("hidden")
|
||||
.long("hidden")
|
||||
.short("H")
|
||||
.overrides_with("hidden")
|
||||
.help("Search hidden files and directories")
|
||||
.long_help(
|
||||
"Include hidden directories and files in the search results (default: \
|
||||
hidden files and directories are skipped). Files and directories are \
|
||||
considered to be hidden if their name starts with a `.` sign (dot).",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("no-ignore")
|
||||
.long("no-ignore")
|
||||
.short("I")
|
||||
.overrides_with("no-ignore")
|
||||
.help("Do not respect .(git|fd)ignore files")
|
||||
.long_help(
|
||||
"Show search results from files and directories that would otherwise be \
|
||||
ignored by '.gitignore', '.ignore', '.fdignore', or the global ignore file.",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("no-ignore-vcs")
|
||||
.long("no-ignore-vcs")
|
||||
.overrides_with("no-ignore-vcs")
|
||||
.hidden_short_help(true)
|
||||
.long_help(
|
||||
"Show search results from files and directories that would otherwise be \
|
||||
ignored by '.gitignore' files.",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("no-global-ignore-file")
|
||||
.long("no-global-ignore-file")
|
||||
.hidden(true)
|
||||
.long_help("Do not respect the global ignore file."),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("rg-alias-hidden-ignore")
|
||||
.short("u")
|
||||
.long("unrestricted")
|
||||
.multiple(true)
|
||||
.hidden_short_help(true)
|
||||
.long_help(
|
||||
"Alias for '--no-ignore'. Can be repeated. '-uu' is an alias for \
|
||||
'--no-ignore --hidden'.",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("case-sensitive")
|
||||
.long("case-sensitive")
|
||||
.short("s")
|
||||
.overrides_with_all(&["ignore-case", "case-sensitive"])
|
||||
.help("Case-sensitive search (default: smart case)")
|
||||
.long_help(
|
||||
"Perform a case-sensitive search. By default, fd uses case-insensitive \
|
||||
searches, unless the pattern contains an uppercase character (smart \
|
||||
case).",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("ignore-case")
|
||||
.long("ignore-case")
|
||||
.short("i")
|
||||
.overrides_with_all(&["case-sensitive", "ignore-case"])
|
||||
.help("Case-insensitive search (default: smart case)")
|
||||
.long_help(
|
||||
"Perform a case-insensitive search. By default, fd uses case-insensitive \
|
||||
searches, unless the pattern contains an uppercase character (smart \
|
||||
case).",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("glob")
|
||||
.long("glob")
|
||||
.short("g")
|
||||
.conflicts_with("fixed-strings")
|
||||
.overrides_with("glob")
|
||||
.help("Glob-based search (default: regular expression)")
|
||||
.long_help("Perform a glob-based search instead of a regular expression search."),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("regex")
|
||||
.long("regex")
|
||||
.overrides_with_all(&["glob", "regex"])
|
||||
.hidden_short_help(true)
|
||||
.long_help(
|
||||
"Perform a regular-expression based search (default). This can be used to \
|
||||
override --glob.",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("fixed-strings")
|
||||
.long("fixed-strings")
|
||||
.short("F")
|
||||
.alias("literal")
|
||||
.overrides_with("fixed-strings")
|
||||
.hidden_short_help(true)
|
||||
.long_help(
|
||||
"Treat the pattern as a literal string instead of a regular expression.",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("absolute-path")
|
||||
.long("absolute-path")
|
||||
.short("a")
|
||||
.overrides_with("absolute-path")
|
||||
.help("Show absolute instead of relative paths")
|
||||
.long_help(
|
||||
"Shows the full path starting from the root as opposed to relative paths.",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("list-details")
|
||||
.long("list-details")
|
||||
.short("l")
|
||||
.conflicts_with("absolute-path")
|
||||
.help("Use a long listing format with file metadata")
|
||||
.long_help(
|
||||
"Use a detailed listing format like 'ls -l'. This is basically an alias \
|
||||
for '--exec-batch ls -l' with some additional 'ls' options. This can be \
|
||||
used to see more metadata, to show symlink targets and to achieve a \
|
||||
deterministic sort order.",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("follow")
|
||||
.long("follow")
|
||||
.short("L")
|
||||
.alias("dereference")
|
||||
.overrides_with("follow")
|
||||
.help("Follow symbolic links")
|
||||
.long_help(
|
||||
"By default, fd does not descend into symlinked directories. Using this \
|
||||
flag, symbolic links are also traversed.",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("full-path")
|
||||
.long("full-path")
|
||||
.short("p")
|
||||
.overrides_with("full-path")
|
||||
.help("Search full path (default: file-/dirname only)")
|
||||
.long_help(
|
||||
"By default, the search pattern is only matched against the filename (or \
|
||||
directory name). Using this flag, the pattern is matched against the \
|
||||
full path.",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("null_separator")
|
||||
.long("print0")
|
||||
.short("0")
|
||||
.overrides_with("print0")
|
||||
.conflicts_with("list-details")
|
||||
.help("Separate results by the null character")
|
||||
.long_help(
|
||||
"Separate search results by the null character (instead of newlines). \
|
||||
Useful for piping results to 'xargs'.",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("max-depth")
|
||||
.long("max-depth")
|
||||
.short("d")
|
||||
.takes_value(true)
|
||||
.value_name("depth")
|
||||
.help("Set maximum search depth (default: none)")
|
||||
.long_help(
|
||||
"Limit the directory traversal to a given depth. By default, there is no \
|
||||
limit on the search depth.",
|
||||
),
|
||||
)
|
||||
// support --maxdepth as well, for compatibility with rg
|
||||
.arg(
|
||||
Arg::with_name("rg-depth")
|
||||
.long("maxdepth")
|
||||
.hidden(true)
|
||||
.takes_value(true)
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("min-depth")
|
||||
.long("min-depth")
|
||||
.takes_value(true)
|
||||
.value_name("depth")
|
||||
.hidden_short_help(true)
|
||||
.long_help(
|
||||
"Only show search results starting at the given depth. \
|
||||
See also: '--max-depth' and '--exact-depth'",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("exact-depth")
|
||||
.long("exact-depth")
|
||||
.takes_value(true)
|
||||
.value_name("depth")
|
||||
.hidden_short_help(true)
|
||||
.conflicts_with_all(&["max-depth", "min-depth"])
|
||||
.long_help(
|
||||
"Only show search results at the exact given depth. This is an alias for \
|
||||
'--min-depth <depth> --max-depth <depth>'.",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("file-type")
|
||||
.long("type")
|
||||
.short("t")
|
||||
.multiple(true)
|
||||
.number_of_values(1)
|
||||
.takes_value(true)
|
||||
.value_name("filetype")
|
||||
.possible_values(&[
|
||||
"f",
|
||||
"file",
|
||||
"d",
|
||||
"directory",
|
||||
"l",
|
||||
"symlink",
|
||||
"x",
|
||||
"executable",
|
||||
"e",
|
||||
"empty",
|
||||
"s",
|
||||
"socket",
|
||||
"p",
|
||||
"pipe",
|
||||
])
|
||||
.hide_possible_values(true)
|
||||
.help(
|
||||
"Filter by type: file (f), directory (d), symlink (l),\nexecutable (x), \
|
||||
empty (e), socket (s), pipe (p)",
|
||||
)
|
||||
.long_help(
|
||||
"Filter the search by type (multiple allowable filetypes can be specified):\n \
|
||||
'f' or 'file': regular files\n \
|
||||
'd' or 'directory': directories\n \
|
||||
'l' or 'symlink': symbolic links\n \
|
||||
'x' or 'executable': executables\n \
|
||||
'e' or 'empty': empty files or directories\n \
|
||||
's' or 'socket': socket\n \
|
||||
'p' or 'pipe': named pipe (FIFO)",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("extension")
|
||||
.long("extension")
|
||||
.short("e")
|
||||
.multiple(true)
|
||||
.number_of_values(1)
|
||||
.takes_value(true)
|
||||
.value_name("ext")
|
||||
.help("Filter by file extension")
|
||||
.long_help(
|
||||
"(Additionally) filter search results by their file extension. Multiple \
|
||||
allowable file extensions can be specified.",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("exec")
|
||||
.long("exec")
|
||||
.short("x")
|
||||
.min_values(1)
|
||||
.allow_hyphen_values(true)
|
||||
.value_terminator(";")
|
||||
.value_name("cmd")
|
||||
.conflicts_with("list-details")
|
||||
.help("Execute a command for each search result")
|
||||
.long_help(
|
||||
"Execute a command for each search result.\n\
|
||||
All arguments following --exec are taken to be arguments to the command until the \
|
||||
argument ';' is encountered.\n\
|
||||
Each occurrence of the following placeholders is substituted by a path derived from the \
|
||||
current search result before the command is executed:\n \
|
||||
'{}': path\n \
|
||||
'{/}': basename\n \
|
||||
'{//}': parent directory\n \
|
||||
'{.}': path without file extension\n \
|
||||
'{/.}': basename without file extension",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("exec-batch")
|
||||
.long("exec-batch")
|
||||
.short("X")
|
||||
.min_values(1)
|
||||
.allow_hyphen_values(true)
|
||||
.value_terminator(";")
|
||||
.value_name("cmd")
|
||||
.conflicts_with_all(&["exec", "list-details"])
|
||||
.help("Execute a command with all search results at once")
|
||||
.long_help(
|
||||
"Execute a command with all search results at once.\n\
|
||||
All arguments following --exec-batch are taken to be arguments to the command until the \
|
||||
argument ';' is encountered.\n\
|
||||
A single occurrence of the following placeholders is authorized and substituted by the paths derived from the \
|
||||
search results before the command is executed:\n \
|
||||
'{}': path\n \
|
||||
'{/}': basename\n \
|
||||
'{//}': parent directory\n \
|
||||
'{.}': path without file extension\n \
|
||||
'{/.}': basename without file extension",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("exclude")
|
||||
.long("exclude")
|
||||
.short("E")
|
||||
.takes_value(true)
|
||||
.value_name("pattern")
|
||||
.number_of_values(1)
|
||||
.multiple(true)
|
||||
.help("Exclude entries that match the given glob pattern")
|
||||
.long_help(
|
||||
"Exclude files/directories that match the given glob pattern. This \
|
||||
overrides any other ignore logic. Multiple exclude patterns can be \
|
||||
specified.",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("ignore-file")
|
||||
.long("ignore-file")
|
||||
.takes_value(true)
|
||||
.value_name("path")
|
||||
.number_of_values(1)
|
||||
.multiple(true)
|
||||
.hidden_short_help(true)
|
||||
.long_help(
|
||||
"Add a custom ignore-file in '.gitignore' format. These files have a low \
|
||||
precedence.",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("color")
|
||||
.long("color")
|
||||
.short("c")
|
||||
.takes_value(true)
|
||||
.value_name("when")
|
||||
.possible_values(&["never", "auto", "always"])
|
||||
.hide_possible_values(true)
|
||||
.help("When to use colors: never, *auto*, always")
|
||||
.long_help(
|
||||
"Declare when to use color for the pattern match output:\n \
|
||||
'auto': show colors if the output goes to an interactive console (default)\n \
|
||||
'never': do not use colorized output\n \
|
||||
'always': always use colorized output",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("threads")
|
||||
.long("threads")
|
||||
.short("j")
|
||||
.takes_value(true)
|
||||
.value_name("num")
|
||||
.hidden_short_help(true)
|
||||
.long_help(
|
||||
"Set number of threads to use for searching & executing (default: number \
|
||||
of available CPU cores)",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("size")
|
||||
.long("size")
|
||||
.short("S")
|
||||
.takes_value(true)
|
||||
.number_of_values(1)
|
||||
.allow_hyphen_values(true)
|
||||
.multiple(true)
|
||||
.help("Limit results based on the size of files.")
|
||||
.long_help(
|
||||
"Limit results based on the size of files using the format <+-><NUM><UNIT>.\n \
|
||||
'+': file size must be greater than or equal to this\n \
|
||||
'-': file size must be less than or equal to this\n \
|
||||
'NUM': The numeric size (e.g. 500)\n \
|
||||
'UNIT': The units for NUM. They are not case-sensitive.\n\
|
||||
Allowed unit values:\n \
|
||||
'b': bytes\n \
|
||||
'k': kilobytes (base ten, 10^3 = 1000 bytes)\n \
|
||||
'm': megabytes\n \
|
||||
'g': gigabytes\n \
|
||||
't': terabytes\n \
|
||||
'ki': kibibytes (base two, 2^10 = 1024 bytes)\n \
|
||||
'mi': mebibytes\n \
|
||||
'gi': gibibytes\n \
|
||||
'ti': tebibytes",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("max-buffer-time")
|
||||
.long("max-buffer-time")
|
||||
.takes_value(true)
|
||||
.hidden(true)
|
||||
.long_help(
|
||||
"Amount of time in milliseconds to buffer, before streaming the search \
|
||||
results to the console.",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("changed-within")
|
||||
.long("changed-within")
|
||||
.alias("change-newer-than")
|
||||
.takes_value(true)
|
||||
.value_name("date|dur")
|
||||
.number_of_values(1)
|
||||
.help("Filter by file modification time (newer than)")
|
||||
.long_help(
|
||||
"Filter results based on the file modification time. The argument can be provided \
|
||||
as a specific point in time (YYYY-MM-DD HH:MM:SS) or as a duration (10h, 1d, 35min). \
|
||||
'--change-newer-than' can be used as an alias.\n\
|
||||
Examples:\n \
|
||||
--changed-within 2weeks\n \
|
||||
--change-newer-than '2018-10-27 10:00:00'",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("changed-before")
|
||||
.long("changed-before")
|
||||
.alias("change-older-than")
|
||||
.takes_value(true)
|
||||
.value_name("date|dur")
|
||||
.number_of_values(1)
|
||||
.help("Filter by file modification time (older than)")
|
||||
.long_help(
|
||||
"Filter results based on the file modification time. The argument can be provided \
|
||||
as a specific point in time (YYYY-MM-DD HH:MM:SS) or as a duration (10h, 1d, 35min). \
|
||||
'--change-older-than' can be used as an alias.\n\
|
||||
Examples:\n \
|
||||
--changed-before '2018-10-27 10:00:00'\n \
|
||||
--change-older-than 2weeks",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("max-results")
|
||||
.long("max-results")
|
||||
.takes_value(true)
|
||||
.value_name("count")
|
||||
// We currently do not support --max-results in combination with
|
||||
// program execution because the results that come up in a --max-results
|
||||
// search are non-deterministic. Users might think that they can run the
|
||||
// same search with `--exec rm` attached and get a reliable removal of
|
||||
// the files they saw in the previous search.
|
||||
.conflicts_with_all(&["exec", "exec-batch", "list-details"])
|
||||
.hidden_short_help(true)
|
||||
.long_help("Limit the number of search results to 'count' and quit immediately."),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("max-one-result")
|
||||
.short("1")
|
||||
.hidden_short_help(true)
|
||||
.overrides_with("max-results")
|
||||
.conflicts_with_all(&["exec", "exec-batch", "list-details"])
|
||||
.long_help("Limit the search to a single result and quit immediately. \
|
||||
This is an alias for '--max-results=1'.")
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("show-errors")
|
||||
.long("show-errors")
|
||||
.hidden_short_help(true)
|
||||
.overrides_with("show-errors")
|
||||
.long_help(
|
||||
"Enable the display of filesystem errors for situations such as \
|
||||
insufficient permissions or dead symlinks.",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("base-directory")
|
||||
.long("base-directory")
|
||||
.takes_value(true)
|
||||
.value_name("path")
|
||||
.number_of_values(1)
|
||||
.hidden_short_help(true)
|
||||
.long_help(
|
||||
"Change the current working directory of fd to the provided path. The \
|
||||
means that search results will be shown with respect to the given base \
|
||||
path. Note that relative paths which are passed to fd via the positional \
|
||||
<path> argument or the '--search-path' option will also be resolved \
|
||||
relative to this directory.",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("pattern").help(
|
||||
"the search pattern - a regular expression unless '--glob' is used (optional)",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("path-separator")
|
||||
.takes_value(true)
|
||||
.value_name("separator")
|
||||
.long("path-separator")
|
||||
.hidden_short_help(true)
|
||||
.long_help(
|
||||
"Set the path separator to use when printing file paths. The default is \
|
||||
the OS-specific separator ('/' on Unix, '\\' on Windows).",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("path")
|
||||
.multiple(true)
|
||||
.help("the root directory for the filesystem search (optional)")
|
||||
.long_help(
|
||||
"The directory where the filesystem search is rooted (optional). If \
|
||||
omitted, search the current working directory.",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("search-path")
|
||||
.long("search-path")
|
||||
.takes_value(true)
|
||||
.conflicts_with("path")
|
||||
.multiple(true)
|
||||
.hidden_short_help(true)
|
||||
.number_of_values(1)
|
||||
.long_help(
|
||||
"Provide paths to search as an alternative to the positional <path> \
|
||||
argument. Changes the usage to `fd [FLAGS/OPTIONS] --search-path <path> \
|
||||
--search-path <path2> [<pattern>]`",
|
||||
),
|
||||
);
|
||||
|
||||
if cfg!(unix) {
|
||||
app = app.arg(
|
||||
Arg::with_name("owner")
|
||||
.long("owner")
|
||||
.short("o")
|
||||
.takes_value(true)
|
||||
.value_name("user:group")
|
||||
.help("Filter by owning user and/or group")
|
||||
.long_help(
|
||||
"Filter files by their user and/or group. \
|
||||
Format: [(user|uid)][:(group|gid)]. Either side is optional. \
|
||||
Precede either side with a '!' to exclude files instead.\n\
|
||||
Examples:\n \
|
||||
--owner john\n \
|
||||
--owner :students\n \
|
||||
--owner '!john:students'",
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
// Make `--one-file-system` available only on Unix and Windows platforms, as per the
|
||||
// restrictions on the corresponding option in the `ignore` crate.
|
||||
// Provide aliases `mount` and `xdev` for people coming from `find`.
|
||||
if cfg!(any(unix, windows)) {
|
||||
app = app.arg(
|
||||
Arg::with_name("one-file-system")
|
||||
.long("one-file-system")
|
||||
.aliases(&["mount", "xdev"])
|
||||
.hidden_short_help(true)
|
||||
.long_help(
|
||||
"By default, fd will traverse the file system tree as far as other options \
|
||||
dictate. With this flag, fd ensures that it does not descend into a \
|
||||
different file system than the one it started in. Comparable to the -mount \
|
||||
or -xdev filters of find(1).",
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
app
|
||||
}
|
|
@ -0,0 +1,911 @@
|
|||
use std::num::NonZeroUsize;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use clap::{
|
||||
error::ErrorKind, value_parser, Arg, ArgAction, ArgGroup, ArgMatches, Command, Parser,
|
||||
ValueEnum,
|
||||
};
|
||||
#[cfg(feature = "completions")]
|
||||
use clap_complete::Shell;
|
||||
use normpath::PathExt;
|
||||
|
||||
use crate::error::print_error;
|
||||
use crate::exec::CommandSet;
|
||||
use crate::filesystem;
|
||||
#[cfg(unix)]
|
||||
use crate::filter::OwnerFilter;
|
||||
use crate::filter::SizeFilter;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(
|
||||
name = "fd",
|
||||
version,
|
||||
about = "A program to find entries in your filesystem",
|
||||
after_long_help = "Bugs can be reported on GitHub: https://github.com/sharkdp/fd/issues",
|
||||
max_term_width = 98,
|
||||
args_override_self = true,
|
||||
group(ArgGroup::new("execs").args(&["exec", "exec_batch", "list_details"]).conflicts_with_all(&[
|
||||
"max_results", "quiet", "max_one_result"])),
|
||||
)]
|
||||
pub struct Opts {
|
||||
/// Include hidden directories and files in the search results (default:
|
||||
/// hidden files and directories are skipped). Files and directories are
|
||||
/// considered to be hidden if their name starts with a `.` sign (dot).
|
||||
/// Any files or directories that are ignored due to the rules described by
|
||||
/// --no-ignore are still ignored unless otherwise specified.
|
||||
/// The flag can be overridden with --no-hidden.
|
||||
#[arg(
|
||||
long,
|
||||
short = 'H',
|
||||
help = "Search hidden files and directories",
|
||||
long_help
|
||||
)]
|
||||
pub hidden: bool,
|
||||
|
||||
/// Overrides --hidden
|
||||
#[arg(long, overrides_with = "hidden", hide = true, action = ArgAction::SetTrue)]
|
||||
no_hidden: (),
|
||||
|
||||
/// Show search results from files and directories that would otherwise be
|
||||
/// ignored by '.gitignore', '.ignore', '.fdignore', or the global ignore file,
|
||||
/// The flag can be overridden with --ignore.
|
||||
#[arg(
|
||||
long,
|
||||
short = 'I',
|
||||
help = "Do not respect .(git|fd)ignore files",
|
||||
long_help
|
||||
)]
|
||||
pub no_ignore: bool,
|
||||
|
||||
/// Overrides --no-ignore
|
||||
#[arg(long, overrides_with = "no_ignore", hide = true, action = ArgAction::SetTrue)]
|
||||
ignore: (),
|
||||
|
||||
///Show search results from files and directories that
|
||||
///would otherwise be ignored by '.gitignore' files.
|
||||
///The flag can be overridden with --ignore-vcs.
|
||||
#[arg(
|
||||
long,
|
||||
hide_short_help = true,
|
||||
help = "Do not respect .gitignore files",
|
||||
long_help
|
||||
)]
|
||||
pub no_ignore_vcs: bool,
|
||||
|
||||
/// Overrides --no-ignore-vcs
|
||||
#[arg(long, overrides_with = "no_ignore_vcs", hide = true, action = ArgAction::SetTrue)]
|
||||
ignore_vcs: (),
|
||||
|
||||
/// Do not require a git repository to respect gitignores.
|
||||
/// By default, fd will only respect global gitignore rules, .gitignore rules,
|
||||
/// and local exclude rules if fd detects that you are searching inside a
|
||||
/// git repository. This flag allows you to relax this restriction such that
|
||||
/// fd will respect all git related ignore rules regardless of whether you're
|
||||
/// searching in a git repository or not.
|
||||
///
|
||||
///
|
||||
/// This flag can be disabled with --require-git.
|
||||
#[arg(
|
||||
long,
|
||||
overrides_with = "require_git",
|
||||
hide_short_help = true,
|
||||
// same description as ripgrep's flag: ripgrep/crates/core/app.rs
|
||||
long_help
|
||||
)]
|
||||
pub no_require_git: bool,
|
||||
|
||||
/// Overrides --no-require-git
|
||||
#[arg(long, overrides_with = "no_require_git", hide = true, action = ArgAction::SetTrue)]
|
||||
require_git: (),
|
||||
|
||||
/// Show search results from files and directories that would otherwise be
|
||||
/// ignored by '.gitignore', '.ignore', or '.fdignore' files in parent directories.
|
||||
#[arg(
|
||||
long,
|
||||
hide_short_help = true,
|
||||
help = "Do not respect .(git|fd)ignore files in parent directories",
|
||||
long_help
|
||||
)]
|
||||
pub no_ignore_parent: bool,
|
||||
|
||||
/// Do not respect the global ignore file
|
||||
#[arg(long, hide = true)]
|
||||
pub no_global_ignore_file: bool,
|
||||
|
||||
/// Perform an unrestricted search, including ignored and hidden files. This is
|
||||
/// an alias for '--no-ignore --hidden'.
|
||||
#[arg(long = "unrestricted", short = 'u', overrides_with_all(&["ignore", "no_hidden"]), action(ArgAction::Count), hide_short_help = true,
|
||||
help = "Unrestricted search, alias for '--no-ignore --hidden'",
|
||||
long_help,
|
||||
)]
|
||||
rg_alias_hidden_ignore: u8,
|
||||
|
||||
/// Case-sensitive search (default: smart case)
|
||||
#[arg(
|
||||
long,
|
||||
short = 's',
|
||||
overrides_with("ignore_case"),
|
||||
long_help = "Perform a case-sensitive search. By default, fd uses case-insensitive \
|
||||
searches, unless the pattern contains an uppercase character (smart \
|
||||
case)."
|
||||
)]
|
||||
pub case_sensitive: bool,
|
||||
|
||||
/// Perform a case-insensitive search. By default, fd uses case-insensitive
|
||||
/// searches, unless the pattern contains an uppercase character (smart
|
||||
/// case).
|
||||
#[arg(
|
||||
long,
|
||||
short = 'i',
|
||||
overrides_with("case_sensitive"),
|
||||
help = "Case-insensitive search (default: smart case)",
|
||||
long_help
|
||||
)]
|
||||
pub ignore_case: bool,
|
||||
|
||||
/// Perform a glob-based search instead of a regular expression search.
|
||||
#[arg(
|
||||
long,
|
||||
short = 'g',
|
||||
conflicts_with("fixed_strings"),
|
||||
help = "Glob-based search (default: regular expression)",
|
||||
long_help
|
||||
)]
|
||||
pub glob: bool,
|
||||
|
||||
/// Perform a regular-expression based search (default). This can be used to
|
||||
/// override --glob.
|
||||
#[arg(
|
||||
long,
|
||||
overrides_with("glob"),
|
||||
hide_short_help = true,
|
||||
help = "Regular-expression based search (default)",
|
||||
long_help
|
||||
)]
|
||||
pub regex: bool,
|
||||
|
||||
/// Treat the pattern as a literal string instead of a regular expression. Note
|
||||
/// that this also performs substring comparison. If you want to match on an
|
||||
/// exact filename, consider using '--glob'.
|
||||
#[arg(
|
||||
long,
|
||||
short = 'F',
|
||||
alias = "literal",
|
||||
hide_short_help = true,
|
||||
help = "Treat pattern as literal string stead of regex",
|
||||
long_help
|
||||
)]
|
||||
pub fixed_strings: bool,
|
||||
|
||||
/// Add additional required search patterns, all of which must be matched. Multiple
|
||||
/// additional patterns can be specified. The patterns are regular
|
||||
/// expressions, unless '--glob' or '--fixed-strings' is used.
|
||||
#[arg(
|
||||
long = "and",
|
||||
value_name = "pattern",
|
||||
help = "Additional search patterns that need to be matched",
|
||||
long_help,
|
||||
hide_short_help = true,
|
||||
allow_hyphen_values = true
|
||||
)]
|
||||
pub exprs: Option<Vec<String>>,
|
||||
|
||||
/// Shows the full path starting from the root as opposed to relative paths.
|
||||
/// The flag can be overridden with --relative-path.
|
||||
#[arg(
|
||||
long,
|
||||
short = 'a',
|
||||
help = "Show absolute instead of relative paths",
|
||||
long_help
|
||||
)]
|
||||
pub absolute_path: bool,
|
||||
|
||||
/// Overrides --absolute-path
|
||||
#[arg(long, overrides_with = "absolute_path", hide = true, action = ArgAction::SetTrue)]
|
||||
relative_path: (),
|
||||
|
||||
/// Use a detailed listing format like 'ls -l'. This is basically an alias
|
||||
/// for '--exec-batch ls -l' with some additional 'ls' options. This can be
|
||||
/// used to see more metadata, to show symlink targets and to achieve a
|
||||
/// deterministic sort order.
|
||||
#[arg(
|
||||
long,
|
||||
short = 'l',
|
||||
conflicts_with("absolute_path"),
|
||||
help = "Use a long listing format with file metadata",
|
||||
long_help
|
||||
)]
|
||||
pub list_details: bool,
|
||||
|
||||
/// Follow symbolic links
|
||||
#[arg(
|
||||
long,
|
||||
short = 'L',
|
||||
alias = "dereference",
|
||||
long_help = "By default, fd does not descend into symlinked directories. Using this \
|
||||
flag, symbolic links are also traversed. \
|
||||
Flag can be overridden with --no-follow."
|
||||
)]
|
||||
pub follow: bool,
|
||||
|
||||
/// Overrides --follow
|
||||
#[arg(long, overrides_with = "follow", hide = true, action = ArgAction::SetTrue)]
|
||||
no_follow: (),
|
||||
|
||||
/// By default, the search pattern is only matched against the filename (or directory name). Using this flag, the pattern is matched against the full (absolute) path. Example:
|
||||
/// fd --glob -p '**/.git/config'
|
||||
#[arg(
|
||||
long,
|
||||
short = 'p',
|
||||
help = "Search full abs. path (default: filename only)",
|
||||
long_help,
|
||||
verbatim_doc_comment
|
||||
)]
|
||||
pub full_path: bool,
|
||||
|
||||
/// Separate search results by the null character (instead of newlines).
|
||||
/// Useful for piping results to 'xargs'.
|
||||
#[arg(
|
||||
long = "print0",
|
||||
short = '0',
|
||||
conflicts_with("list_details"),
|
||||
hide_short_help = true,
|
||||
help = "Separate search results by the null character",
|
||||
long_help
|
||||
)]
|
||||
pub null_separator: bool,
|
||||
|
||||
/// Limit the directory traversal to a given depth. By default, there is no
|
||||
/// limit on the search depth.
|
||||
#[arg(
|
||||
long,
|
||||
short = 'd',
|
||||
value_name = "depth",
|
||||
alias("maxdepth"),
|
||||
help = "Set maximum search depth (default: none)",
|
||||
long_help
|
||||
)]
|
||||
max_depth: Option<usize>,
|
||||
|
||||
/// Only show search results starting at the given depth.
|
||||
/// See also: '--max-depth' and '--exact-depth'
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "depth",
|
||||
hide_short_help = true,
|
||||
help = "Only show search results starting at the given depth.",
|
||||
long_help
|
||||
)]
|
||||
min_depth: Option<usize>,
|
||||
|
||||
/// Only show search results at the exact given depth. This is an alias for
|
||||
/// '--min-depth <depth> --max-depth <depth>'.
|
||||
#[arg(long, value_name = "depth", hide_short_help = true, conflicts_with_all(&["max_depth", "min_depth"]),
|
||||
help = "Only show search results at the exact given depth",
|
||||
long_help,
|
||||
)]
|
||||
exact_depth: Option<usize>,
|
||||
|
||||
/// Exclude files/directories that match the given glob pattern. This
|
||||
/// overrides any other ignore logic. Multiple exclude patterns can be
|
||||
/// specified.
|
||||
///
|
||||
/// Examples:
|
||||
/// {n} --exclude '*.pyc'
|
||||
/// {n} --exclude node_modules
|
||||
#[arg(
|
||||
long,
|
||||
short = 'E',
|
||||
value_name = "pattern",
|
||||
help = "Exclude entries that match the given glob pattern",
|
||||
long_help
|
||||
)]
|
||||
pub exclude: Vec<String>,
|
||||
|
||||
/// Do not traverse into directories that match the search criteria. If
|
||||
/// you want to exclude specific directories, use the '--exclude=…' option.
|
||||
#[arg(long, hide_short_help = true, conflicts_with_all(&["size", "exact_depth"]),
|
||||
long_help,
|
||||
)]
|
||||
pub prune: bool,
|
||||
|
||||
/// Filter the search by type:
|
||||
/// {n} 'f' or 'file': regular files
|
||||
/// {n} 'd' or 'dir' or 'directory': directories
|
||||
/// {n} 'l' or 'symlink': symbolic links
|
||||
/// {n} 's' or 'socket': socket
|
||||
/// {n} 'p' or 'pipe': named pipe (FIFO)
|
||||
/// {n} 'b' or 'block-device': block device
|
||||
/// {n} 'c' or 'char-device': character device
|
||||
/// {n}{n} 'x' or 'executable': executables
|
||||
/// {n} 'e' or 'empty': empty files or directories
|
||||
///
|
||||
/// This option can be specified more than once to include multiple file types.
|
||||
/// Searching for '--type file --type symlink' will show both regular files as
|
||||
/// well as symlinks. Note that the 'executable' and 'empty' filters work differently:
|
||||
/// '--type executable' implies '--type file' by default. And '--type empty' searches
|
||||
/// for empty files and directories, unless either '--type file' or '--type directory'
|
||||
/// is specified in addition.
|
||||
///
|
||||
/// Examples:
|
||||
/// {n} - Only search for files:
|
||||
/// {n} fd --type file …
|
||||
/// {n} fd -tf …
|
||||
/// {n} - Find both files and symlinks
|
||||
/// {n} fd --type file --type symlink …
|
||||
/// {n} fd -tf -tl …
|
||||
/// {n} - Find executable files:
|
||||
/// {n} fd --type executable
|
||||
/// {n} fd -tx
|
||||
/// {n} - Find empty files:
|
||||
/// {n} fd --type empty --type file
|
||||
/// {n} fd -te -tf
|
||||
/// {n} - Find empty directories:
|
||||
/// {n} fd --type empty --type directory
|
||||
/// {n} fd -te -td
|
||||
#[arg(
|
||||
long = "type",
|
||||
short = 't',
|
||||
value_name = "filetype",
|
||||
hide_possible_values = true,
|
||||
value_enum,
|
||||
help = "Filter by type: file (f), directory (d/dir), symlink (l), \
|
||||
executable (x), empty (e), socket (s), pipe (p), \
|
||||
char-device (c), block-device (b)",
|
||||
long_help
|
||||
)]
|
||||
pub filetype: Option<Vec<FileType>>,
|
||||
|
||||
/// (Additionally) filter search results by their file extension. Multiple
|
||||
/// allowable file extensions can be specified.
|
||||
///
|
||||
/// If you want to search for files without extension,
|
||||
/// you can use the regex '^[^.]+$' as a normal search pattern.
|
||||
#[arg(
|
||||
long = "extension",
|
||||
short = 'e',
|
||||
value_name = "ext",
|
||||
help = "Filter by file extension",
|
||||
long_help
|
||||
)]
|
||||
pub extensions: Option<Vec<String>>,
|
||||
|
||||
/// Limit results based on the size of files using the format <+-><NUM><UNIT>.
|
||||
/// '+': file size must be greater than or equal to this
|
||||
/// '-': file size must be less than or equal to this
|
||||
///
|
||||
/// If neither '+' nor '-' is specified, file size must be exactly equal to this.
|
||||
/// 'NUM': The numeric size (e.g. 500)
|
||||
/// 'UNIT': The units for NUM. They are not case-sensitive.
|
||||
/// Allowed unit values:
|
||||
/// 'b': bytes
|
||||
/// 'k': kilobytes (base ten, 10^3 = 1000 bytes)
|
||||
/// 'm': megabytes
|
||||
/// 'g': gigabytes
|
||||
/// 't': terabytes
|
||||
/// 'ki': kibibytes (base two, 2^10 = 1024 bytes)
|
||||
/// 'mi': mebibytes
|
||||
/// 'gi': gibibytes
|
||||
/// 'ti': tebibytes
|
||||
#[arg(long, short = 'S', value_parser = SizeFilter::from_string, allow_hyphen_values = true, verbatim_doc_comment, value_name = "size",
|
||||
help = "Limit results based on the size of files",
|
||||
long_help,
|
||||
verbatim_doc_comment,
|
||||
)]
|
||||
pub size: Vec<SizeFilter>,
|
||||
|
||||
/// Filter results based on the file modification time. Files with modification times
|
||||
/// greater than the argument are returned. The argument can be provided
|
||||
/// as a specific point in time (YYYY-MM-DD HH:MM:SS or @timestamp) or as a duration (10h, 1d, 35min).
|
||||
/// If the time is not specified, it defaults to 00:00:00.
|
||||
/// '--change-newer-than', '--newer', or '--changed-after' can be used as aliases.
|
||||
///
|
||||
/// Examples:
|
||||
/// {n} --changed-within 2weeks
|
||||
/// {n} --change-newer-than '2018-10-27 10:00:00'
|
||||
/// {n} --newer 2018-10-27
|
||||
/// {n} --changed-after 1day
|
||||
#[arg(
|
||||
long,
|
||||
alias("change-newer-than"),
|
||||
alias("newer"),
|
||||
alias("changed-after"),
|
||||
value_name = "date|dur",
|
||||
help = "Filter by file modification time (newer than)",
|
||||
long_help
|
||||
)]
|
||||
pub changed_within: Option<String>,
|
||||
|
||||
/// Filter results based on the file modification time. Files with modification times
|
||||
/// less than the argument are returned. The argument can be provided
|
||||
/// as a specific point in time (YYYY-MM-DD HH:MM:SS or @timestamp) or as a duration (10h, 1d, 35min).
|
||||
/// '--change-older-than' or '--older' can be used as aliases.
|
||||
///
|
||||
/// Examples:
|
||||
/// {n} --changed-before '2018-10-27 10:00:00'
|
||||
/// {n} --change-older-than 2weeks
|
||||
/// {n} --older 2018-10-27
|
||||
#[arg(
|
||||
long,
|
||||
alias("change-older-than"),
|
||||
alias("older"),
|
||||
value_name = "date|dur",
|
||||
help = "Filter by file modification time (older than)",
|
||||
long_help
|
||||
)]
|
||||
pub changed_before: Option<String>,
|
||||
|
||||
/// Filter files by their user and/or group.
|
||||
/// Format: [(user|uid)][:(group|gid)]. Either side is optional.
|
||||
/// Precede either side with a '!' to exclude files instead.
|
||||
///
|
||||
/// Examples:
|
||||
/// {n} --owner john
|
||||
/// {n} --owner :students
|
||||
/// {n} --owner '!john:students'
|
||||
#[cfg(unix)]
|
||||
#[arg(long, short = 'o', value_parser = OwnerFilter::from_string, value_name = "user:group",
|
||||
help = "Filter by owning user and/or group",
|
||||
long_help,
|
||||
)]
|
||||
pub owner: Option<OwnerFilter>,
|
||||
|
||||
/// Instead of printing the file normally, print the format string with the following placeholders replaced:
|
||||
/// '{}': path (of the current search result)
|
||||
/// '{/}': basename
|
||||
/// '{//}': parent directory
|
||||
/// '{.}': path without file extension
|
||||
/// '{/.}': basename without file extension
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "fmt",
|
||||
help = "Print results according to template",
|
||||
conflicts_with = "list_details"
|
||||
)]
|
||||
pub format: Option<String>,
|
||||
|
||||
#[command(flatten)]
|
||||
pub exec: Exec,
|
||||
|
||||
/// Maximum number of arguments to pass to the command given with -X.
|
||||
/// If the number of results is greater than the given size,
|
||||
/// the command given with -X is run again with remaining arguments.
|
||||
/// A batch size of zero means there is no limit (default), but note
|
||||
/// that batching might still happen due to OS restrictions on the
|
||||
/// maximum length of command lines.
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "size",
|
||||
hide_short_help = true,
|
||||
requires("exec_batch"),
|
||||
value_parser = value_parser!(usize),
|
||||
default_value_t,
|
||||
help = "Max number of arguments to run as a batch size with -X",
|
||||
long_help,
|
||||
)]
|
||||
pub batch_size: usize,
|
||||
|
||||
/// Add a custom ignore-file in '.gitignore' format. These files have a low precedence.
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "path",
|
||||
hide_short_help = true,
|
||||
help = "Add a custom ignore-file in '.gitignore' format",
|
||||
long_help
|
||||
)]
|
||||
pub ignore_file: Vec<PathBuf>,
|
||||
|
||||
/// Declare when to use color for the pattern match output
|
||||
#[arg(
|
||||
long,
|
||||
short = 'c',
|
||||
value_enum,
|
||||
default_value_t = ColorWhen::Auto,
|
||||
value_name = "when",
|
||||
help = "When to use colors",
|
||||
long_help,
|
||||
)]
|
||||
pub color: ColorWhen,
|
||||
|
||||
/// Set number of threads to use for searching & executing (default: number
|
||||
/// of available CPU cores)
|
||||
#[arg(long, short = 'j', value_name = "num", hide_short_help = true, value_parser = str::parse::<NonZeroUsize>)]
|
||||
pub threads: Option<NonZeroUsize>,
|
||||
|
||||
/// Milliseconds to buffer before streaming search results to console
|
||||
///
|
||||
/// Amount of time in milliseconds to buffer, before streaming the search
|
||||
/// results to the console.
|
||||
#[arg(long, hide = true, value_parser = parse_millis)]
|
||||
pub max_buffer_time: Option<Duration>,
|
||||
|
||||
///Limit the number of search results to 'count' and quit immediately.
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "count",
|
||||
hide_short_help = true,
|
||||
overrides_with("max_one_result"),
|
||||
help = "Limit the number of search results",
|
||||
long_help
|
||||
)]
|
||||
max_results: Option<usize>,
|
||||
|
||||
/// Limit the search to a single result and quit immediately.
|
||||
/// This is an alias for '--max-results=1'.
|
||||
#[arg(
|
||||
short = '1',
|
||||
hide_short_help = true,
|
||||
overrides_with("max_results"),
|
||||
help = "Limit search to a single result",
|
||||
long_help
|
||||
)]
|
||||
max_one_result: bool,
|
||||
|
||||
/// When the flag is present, the program does not print anything and will
|
||||
/// return with an exit code of 0 if there is at least one match. Otherwise, the
|
||||
/// exit code will be 1.
|
||||
/// '--has-results' can be used as an alias.
|
||||
#[arg(
|
||||
long,
|
||||
short = 'q',
|
||||
alias = "has-results",
|
||||
hide_short_help = true,
|
||||
conflicts_with("max_results"),
|
||||
help = "Print nothing, exit code 0 if match found, 1 otherwise",
|
||||
long_help
|
||||
)]
|
||||
pub quiet: bool,
|
||||
|
||||
/// Enable the display of filesystem errors for situations such as
|
||||
/// insufficient permissions or dead symlinks.
|
||||
#[arg(
|
||||
long,
|
||||
hide_short_help = true,
|
||||
help = "Show filesystem errors",
|
||||
long_help
|
||||
)]
|
||||
pub show_errors: bool,
|
||||
|
||||
/// Change the current working directory of fd to the provided path. This
|
||||
/// means that search results will be shown with respect to the given base
|
||||
/// path. Note that relative paths which are passed to fd via the positional
|
||||
/// <path> argument or the '--search-path' option will also be resolved
|
||||
/// relative to this directory.
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "path",
|
||||
hide_short_help = true,
|
||||
help = "Change current working directory",
|
||||
long_help
|
||||
)]
|
||||
pub base_directory: Option<PathBuf>,
|
||||
|
||||
/// the search pattern which is either a regular expression (default) or a glob
|
||||
/// pattern (if --glob is used). If no pattern has been specified, every entry
|
||||
/// is considered a match. If your pattern starts with a dash (-), make sure to
|
||||
/// pass '--' first, or it will be considered as a flag (fd -- '-foo').
|
||||
#[arg(
|
||||
default_value = "",
|
||||
hide_default_value = true,
|
||||
value_name = "pattern",
|
||||
help = "the search pattern (a regular expression, unless '--glob' is used; optional)",
|
||||
long_help
|
||||
)]
|
||||
pub pattern: String,
|
||||
|
||||
/// Set the path separator to use when printing file paths. The default is
|
||||
/// the OS-specific separator ('/' on Unix, '\' on Windows).
|
||||
#[arg(
|
||||
long,
|
||||
value_name = "separator",
|
||||
hide_short_help = true,
|
||||
help = "Set path separator when printing file paths",
|
||||
long_help
|
||||
)]
|
||||
pub path_separator: Option<String>,
|
||||
|
||||
/// The directory where the filesystem search is rooted (optional). If
|
||||
/// omitted, search the current working directory.
|
||||
#[arg(action = ArgAction::Append,
|
||||
value_name = "path",
|
||||
help = "the root directories for the filesystem search (optional)",
|
||||
long_help,
|
||||
)]
|
||||
path: Vec<PathBuf>,
|
||||
|
||||
/// Provide paths to search as an alternative to the positional <path>
|
||||
/// argument. Changes the usage to `fd [OPTIONS] --search-path <path>
|
||||
/// --search-path <path2> [<pattern>]`
|
||||
#[arg(
|
||||
long,
|
||||
conflicts_with("path"),
|
||||
value_name = "search-path",
|
||||
hide_short_help = true,
|
||||
help = "Provides paths to search as an alternative to the positional <path> argument",
|
||||
long_help
|
||||
)]
|
||||
search_path: Vec<PathBuf>,
|
||||
|
||||
/// By default, relative paths are prefixed with './' when -x/--exec,
|
||||
/// -X/--exec-batch, or -0/--print0 are given, to reduce the risk of a
|
||||
/// path starting with '-' being treated as a command line option. Use
|
||||
/// this flag to change this behavior. If this flag is used without a value,
|
||||
/// it is equivalent to passing "always".
|
||||
#[arg(long, conflicts_with_all(&["path", "search_path"]), value_name = "when", hide_short_help = true, require_equals = true, long_help)]
|
||||
strip_cwd_prefix: Option<Option<StripCwdWhen>>,
|
||||
|
||||
/// By default, fd will traverse the file system tree as far as other options
|
||||
/// dictate. With this flag, fd ensures that it does not descend into a
|
||||
/// different file system than the one it started in. Comparable to the -mount
|
||||
/// or -xdev filters of find(1).
|
||||
#[cfg(any(unix, windows))]
|
||||
#[arg(long, aliases(&["mount", "xdev"]), hide_short_help = true, long_help)]
|
||||
pub one_file_system: bool,
|
||||
|
||||
#[cfg(feature = "completions")]
|
||||
#[arg(long, hide = true, exclusive = true)]
|
||||
gen_completions: Option<Option<Shell>>,
|
||||
}
|
||||
|
||||
impl Opts {
|
||||
pub fn search_paths(&self) -> anyhow::Result<Vec<PathBuf>> {
|
||||
// would it make sense to concatenate these?
|
||||
let paths = if !self.path.is_empty() {
|
||||
&self.path
|
||||
} else if !self.search_path.is_empty() {
|
||||
&self.search_path
|
||||
} else {
|
||||
let current_directory = Path::new("./");
|
||||
ensure_current_directory_exists(current_directory)?;
|
||||
return Ok(vec![self.normalize_path(current_directory)]);
|
||||
};
|
||||
Ok(paths
|
||||
.iter()
|
||||
.filter_map(|path| {
|
||||
if filesystem::is_existing_directory(path) {
|
||||
Some(self.normalize_path(path))
|
||||
} else {
|
||||
print_error(format!(
|
||||
"Search path '{}' is not a directory.",
|
||||
path.to_string_lossy()
|
||||
));
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect())
|
||||
}
|
||||
|
||||
fn normalize_path(&self, path: &Path) -> PathBuf {
|
||||
if self.absolute_path {
|
||||
filesystem::absolute_path(path.normalize().unwrap().as_path()).unwrap()
|
||||
} else if path == Path::new(".") {
|
||||
// Change "." to "./" as a workaround for https://github.com/BurntSushi/ripgrep/pull/2711
|
||||
PathBuf::from("./")
|
||||
} else {
|
||||
path.to_path_buf()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn no_search_paths(&self) -> bool {
|
||||
self.path.is_empty() && self.search_path.is_empty()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn rg_alias_ignore(&self) -> bool {
|
||||
self.rg_alias_hidden_ignore > 0
|
||||
}
|
||||
|
||||
pub fn max_depth(&self) -> Option<usize> {
|
||||
self.max_depth.or(self.exact_depth)
|
||||
}
|
||||
|
||||
pub fn min_depth(&self) -> Option<usize> {
|
||||
self.min_depth.or(self.exact_depth)
|
||||
}
|
||||
|
||||
pub fn threads(&self) -> NonZeroUsize {
|
||||
self.threads.unwrap_or_else(default_num_threads)
|
||||
}
|
||||
|
||||
pub fn max_results(&self) -> Option<usize> {
|
||||
self.max_results
|
||||
.filter(|&m| m > 0)
|
||||
.or_else(|| self.max_one_result.then_some(1))
|
||||
}
|
||||
|
||||
pub fn strip_cwd_prefix<P: FnOnce() -> bool>(&self, auto_pred: P) -> bool {
|
||||
use self::StripCwdWhen::*;
|
||||
self.no_search_paths()
|
||||
&& match self.strip_cwd_prefix.map_or(Auto, |o| o.unwrap_or(Always)) {
|
||||
Auto => auto_pred(),
|
||||
Always => true,
|
||||
Never => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "completions")]
|
||||
pub fn gen_completions(&self) -> anyhow::Result<Option<Shell>> {
|
||||
self.gen_completions
|
||||
.map(|maybe_shell| match maybe_shell {
|
||||
Some(sh) => Ok(sh),
|
||||
None => {
|
||||
Shell::from_env().ok_or_else(|| anyhow!("Unable to get shell from environment"))
|
||||
}
|
||||
})
|
||||
.transpose()
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the default number of threads to use, if not explicitly specified.
|
||||
fn default_num_threads() -> NonZeroUsize {
|
||||
// If we can't get the amount of parallelism for some reason, then
|
||||
// default to a single thread, because that is safe.
|
||||
let fallback = NonZeroUsize::MIN;
|
||||
// To limit startup overhead on massively parallel machines, don't use more
|
||||
// than 64 threads.
|
||||
let limit = NonZeroUsize::new(64).unwrap();
|
||||
|
||||
std::thread::available_parallelism()
|
||||
.unwrap_or(fallback)
|
||||
.min(limit)
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, ValueEnum)]
|
||||
pub enum FileType {
|
||||
#[value(alias = "f")]
|
||||
File,
|
||||
#[value(alias = "d", alias = "dir")]
|
||||
Directory,
|
||||
#[value(alias = "l")]
|
||||
Symlink,
|
||||
#[value(alias = "b")]
|
||||
BlockDevice,
|
||||
#[value(alias = "c")]
|
||||
CharDevice,
|
||||
/// A file which is executable by the current effective user
|
||||
#[value(alias = "x")]
|
||||
Executable,
|
||||
#[value(alias = "e")]
|
||||
Empty,
|
||||
#[value(alias = "s")]
|
||||
Socket,
|
||||
#[value(alias = "p")]
|
||||
Pipe,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug, ValueEnum)]
|
||||
pub enum ColorWhen {
|
||||
/// show colors if the output goes to an interactive console (default)
|
||||
Auto,
|
||||
/// always use colorized output
|
||||
Always,
|
||||
/// do not use colorized output
|
||||
Never,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug, ValueEnum)]
|
||||
pub enum StripCwdWhen {
|
||||
/// Use the default behavior
|
||||
Auto,
|
||||
/// Always strip the ./ at the beginning of paths
|
||||
Always,
|
||||
/// Never strip the ./
|
||||
Never,
|
||||
}
|
||||
|
||||
// there isn't a derive api for getting grouped values yet,
|
||||
// so we have to use hand-rolled parsing for exec and exec-batch
|
||||
pub struct Exec {
|
||||
pub command: Option<CommandSet>,
|
||||
}
|
||||
|
||||
impl clap::FromArgMatches for Exec {
|
||||
fn from_arg_matches(matches: &ArgMatches) -> clap::error::Result<Self> {
|
||||
let command = matches
|
||||
.get_occurrences::<String>("exec")
|
||||
.map(CommandSet::new)
|
||||
.or_else(|| {
|
||||
matches
|
||||
.get_occurrences::<String>("exec_batch")
|
||||
.map(CommandSet::new_batch)
|
||||
})
|
||||
.transpose()
|
||||
.map_err(|e| clap::Error::raw(ErrorKind::InvalidValue, e))?;
|
||||
Ok(Exec { command })
|
||||
}
|
||||
|
||||
fn update_from_arg_matches(&mut self, matches: &ArgMatches) -> clap::error::Result<()> {
|
||||
*self = Self::from_arg_matches(matches)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl clap::Args for Exec {
|
||||
fn augment_args(cmd: Command) -> Command {
|
||||
cmd.arg(Arg::new("exec")
|
||||
.action(ArgAction::Append)
|
||||
.long("exec")
|
||||
.short('x')
|
||||
.num_args(1..)
|
||||
.allow_hyphen_values(true)
|
||||
.value_terminator(";")
|
||||
.value_name("cmd")
|
||||
.conflicts_with("list_details")
|
||||
.help("Execute a command for each search result")
|
||||
.long_help(
|
||||
"Execute a command for each search result in parallel (use --threads=1 for sequential command execution). \
|
||||
There is no guarantee of the order commands are executed in, and the order should not be depended upon. \
|
||||
All positional arguments following --exec are considered to be arguments to the command - not to fd. \
|
||||
It is therefore recommended to place the '-x'/'--exec' option last.\n\
|
||||
The following placeholders are substituted before the command is executed:\n \
|
||||
'{}': path (of the current search result)\n \
|
||||
'{/}': basename\n \
|
||||
'{//}': parent directory\n \
|
||||
'{.}': path without file extension\n \
|
||||
'{/.}': basename without file extension\n \
|
||||
'{{': literal '{' (for escaping)\n \
|
||||
'}}': literal '}' (for escaping)\n\n\
|
||||
If no placeholder is present, an implicit \"{}\" at the end is assumed.\n\n\
|
||||
Examples:\n\n \
|
||||
- find all *.zip files and unzip them:\n\n \
|
||||
fd -e zip -x unzip\n\n \
|
||||
- find *.h and *.cpp files and run \"clang-format -i ..\" for each of them:\n\n \
|
||||
fd -e h -e cpp -x clang-format -i\n\n \
|
||||
- Convert all *.jpg files to *.png files:\n\n \
|
||||
fd -e jpg -x convert {} {.}.png\
|
||||
",
|
||||
),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("exec_batch")
|
||||
.action(ArgAction::Append)
|
||||
.long("exec-batch")
|
||||
.short('X')
|
||||
.num_args(1..)
|
||||
.allow_hyphen_values(true)
|
||||
.value_terminator(";")
|
||||
.value_name("cmd")
|
||||
.conflicts_with_all(["exec", "list_details"])
|
||||
.help("Execute a command with all search results at once")
|
||||
.long_help(
|
||||
"Execute the given command once, with all search results as arguments.\n\
|
||||
The order of the arguments is non-deterministic, and should not be relied upon.\n\
|
||||
One of the following placeholders is substituted before the command is executed:\n \
|
||||
'{}': path (of all search results)\n \
|
||||
'{/}': basename\n \
|
||||
'{//}': parent directory\n \
|
||||
'{.}': path without file extension\n \
|
||||
'{/.}': basename without file extension\n \
|
||||
'{{': literal '{' (for escaping)\n \
|
||||
'}}': literal '}' (for escaping)\n\n\
|
||||
If no placeholder is present, an implicit \"{}\" at the end is assumed.\n\n\
|
||||
Examples:\n\n \
|
||||
- Find all test_*.py files and open them in your favorite editor:\n\n \
|
||||
fd -g 'test_*.py' -X vim\n\n \
|
||||
- Find all *.rs files and count the lines with \"wc -l ...\":\n\n \
|
||||
fd -e rs -X wc -l\
|
||||
"
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
fn augment_args_for_update(cmd: Command) -> Command {
|
||||
Self::augment_args(cmd)
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_millis(arg: &str) -> Result<Duration, std::num::ParseIntError> {
|
||||
Ok(Duration::from_millis(arg.parse()?))
|
||||
}
|
||||
|
||||
fn ensure_current_directory_exists(current_directory: &Path) -> anyhow::Result<()> {
|
||||
if filesystem::is_existing_directory(current_directory) {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"Could not retrieve current directory (has it been deleted?)."
|
||||
))
|
||||
}
|
||||
}
|
|
@ -3,14 +3,15 @@ use std::{path::PathBuf, sync::Arc, time::Duration};
|
|||
use lscolors::LsColors;
|
||||
use regex::bytes::RegexSet;
|
||||
|
||||
use crate::exec::CommandTemplate;
|
||||
use crate::exec::CommandSet;
|
||||
use crate::filetypes::FileTypes;
|
||||
#[cfg(unix)]
|
||||
use crate::filter::OwnerFilter;
|
||||
use crate::filter::{SizeFilter, TimeFilter};
|
||||
use crate::fmt::FormatTemplate;
|
||||
|
||||
/// Configuration options for *fd*.
|
||||
pub struct Options {
|
||||
pub struct Config {
|
||||
/// Whether the search is case-sensitive or case-insensitive.
|
||||
pub case_sensitive: bool,
|
||||
|
||||
|
@ -24,9 +25,15 @@ pub struct Options {
|
|||
/// Whether to respect `.fdignore` files or not.
|
||||
pub read_fdignore: bool,
|
||||
|
||||
/// Whether to respect ignore files in parent directories or not.
|
||||
pub read_parent_ignore: bool,
|
||||
|
||||
/// Whether to respect VCS ignore files (`.gitignore`, ..) or not.
|
||||
pub read_vcsignore: bool,
|
||||
|
||||
/// Whether to require a `.git` directory to respect gitignore files.
|
||||
pub require_git_to_read_vcsignore: bool,
|
||||
|
||||
/// Whether to respect the global ignore file or not.
|
||||
pub read_global_ignore: bool,
|
||||
|
||||
|
@ -48,9 +55,16 @@ pub struct Options {
|
|||
/// The minimum depth for reported entries, or `None`.
|
||||
pub min_depth: Option<usize>,
|
||||
|
||||
/// Whether to stop traversing into matching directories.
|
||||
pub prune: bool,
|
||||
|
||||
/// The number of threads to use.
|
||||
pub threads: usize,
|
||||
|
||||
/// If true, the program doesn't print anything and will instead return an exit code of 0
|
||||
/// if there's at least one match. Otherwise, the exit code will be 1.
|
||||
pub quiet: bool,
|
||||
|
||||
/// Time to buffer results internally before streaming to the console. This is useful to
|
||||
/// provide a sorted output, in case the total execution time is shorter than
|
||||
/// `max_buffer_time`.
|
||||
|
@ -72,8 +86,15 @@ pub struct Options {
|
|||
/// The value (if present) will be a lowercase string without leading dots.
|
||||
pub extensions: Option<RegexSet>,
|
||||
|
||||
/// A format string to use to format results, similarly to exec
|
||||
pub format: Option<FormatTemplate>,
|
||||
|
||||
/// If a value is supplied, each item found will be used to generate and execute commands.
|
||||
pub command: Option<Arc<CommandTemplate>>,
|
||||
pub command: Option<Arc<CommandSet>>,
|
||||
|
||||
/// Maximum number of search results to pass to each `command`. If zero, the number is
|
||||
/// unlimited.
|
||||
pub batch_size: usize,
|
||||
|
||||
/// A list of glob patterns that should be excluded from the search.
|
||||
pub exclude_patterns: Vec<String>,
|
||||
|
@ -97,6 +118,19 @@ pub struct Options {
|
|||
/// The separator used to print file paths.
|
||||
pub path_separator: Option<String>,
|
||||
|
||||
/// The actual separator, either the system default separator or `path_separator`
|
||||
pub actual_path_separator: String,
|
||||
|
||||
/// The maximum number of search results
|
||||
pub max_results: Option<usize>,
|
||||
|
||||
/// Whether or not to strip the './' prefix for search results
|
||||
pub strip_cwd_prefix: bool,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
/// Check whether results are being printed.
|
||||
pub fn is_printing(&self) -> bool {
|
||||
self.command.is_none()
|
||||
}
|
||||
}
|
|
@ -0,0 +1,155 @@
|
|||
use std::cell::OnceCell;
|
||||
use std::ffi::OsString;
|
||||
use std::fs::{FileType, Metadata};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use lscolors::{Colorable, LsColors, Style};
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::filesystem::strip_current_dir;
|
||||
|
||||
#[derive(Debug)]
|
||||
enum DirEntryInner {
|
||||
Normal(ignore::DirEntry),
|
||||
BrokenSymlink(PathBuf),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct DirEntry {
|
||||
inner: DirEntryInner,
|
||||
metadata: OnceCell<Option<Metadata>>,
|
||||
style: OnceCell<Option<Style>>,
|
||||
}
|
||||
|
||||
impl DirEntry {
|
||||
#[inline]
|
||||
pub fn normal(e: ignore::DirEntry) -> Self {
|
||||
Self {
|
||||
inner: DirEntryInner::Normal(e),
|
||||
metadata: OnceCell::new(),
|
||||
style: OnceCell::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn broken_symlink(path: PathBuf) -> Self {
|
||||
Self {
|
||||
inner: DirEntryInner::BrokenSymlink(path),
|
||||
metadata: OnceCell::new(),
|
||||
style: OnceCell::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn path(&self) -> &Path {
|
||||
match &self.inner {
|
||||
DirEntryInner::Normal(e) => e.path(),
|
||||
DirEntryInner::BrokenSymlink(pathbuf) => pathbuf.as_path(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_path(self) -> PathBuf {
|
||||
match self.inner {
|
||||
DirEntryInner::Normal(e) => e.into_path(),
|
||||
DirEntryInner::BrokenSymlink(p) => p,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the path as it should be presented to the user.
|
||||
pub fn stripped_path(&self, config: &Config) -> &Path {
|
||||
if config.strip_cwd_prefix {
|
||||
strip_current_dir(self.path())
|
||||
} else {
|
||||
self.path()
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the path as it should be presented to the user.
|
||||
pub fn into_stripped_path(self, config: &Config) -> PathBuf {
|
||||
if config.strip_cwd_prefix {
|
||||
self.stripped_path(config).to_path_buf()
|
||||
} else {
|
||||
self.into_path()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn file_type(&self) -> Option<FileType> {
|
||||
match &self.inner {
|
||||
DirEntryInner::Normal(e) => e.file_type(),
|
||||
DirEntryInner::BrokenSymlink(_) => self.metadata().map(|m| m.file_type()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn metadata(&self) -> Option<&Metadata> {
|
||||
self.metadata
|
||||
.get_or_init(|| match &self.inner {
|
||||
DirEntryInner::Normal(e) => e.metadata().ok(),
|
||||
DirEntryInner::BrokenSymlink(path) => path.symlink_metadata().ok(),
|
||||
})
|
||||
.as_ref()
|
||||
}
|
||||
|
||||
pub fn depth(&self) -> Option<usize> {
|
||||
match &self.inner {
|
||||
DirEntryInner::Normal(e) => Some(e.depth()),
|
||||
DirEntryInner::BrokenSymlink(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn style(&self, ls_colors: &LsColors) -> Option<&Style> {
|
||||
self.style
|
||||
.get_or_init(|| ls_colors.style_for(self).cloned())
|
||||
.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for DirEntry {
|
||||
#[inline]
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.path() == other.path()
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for DirEntry {}
|
||||
|
||||
impl PartialOrd for DirEntry {
|
||||
#[inline]
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for DirEntry {
|
||||
#[inline]
|
||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||
self.path().cmp(other.path())
|
||||
}
|
||||
}
|
||||
|
||||
impl Colorable for DirEntry {
|
||||
fn path(&self) -> PathBuf {
|
||||
self.path().to_owned()
|
||||
}
|
||||
|
||||
fn file_name(&self) -> OsString {
|
||||
let name = match &self.inner {
|
||||
DirEntryInner::Normal(e) => e.file_name(),
|
||||
DirEntryInner::BrokenSymlink(path) => {
|
||||
// Path::file_name() only works if the last component is Normal,
|
||||
// but we want it for all component types, so we open code it.
|
||||
// Copied from LsColors::style_for_path_with_metadata().
|
||||
path.components()
|
||||
.last()
|
||||
.map(|c| c.as_os_str())
|
||||
.unwrap_or_else(|| path.as_os_str())
|
||||
}
|
||||
};
|
||||
name.to_owned()
|
||||
}
|
||||
|
||||
fn file_type(&self) -> Option<FileType> {
|
||||
self.file_type()
|
||||
}
|
||||
|
||||
fn metadata(&self) -> Option<Metadata> {
|
||||
self.metadata().cloned()
|
||||
}
|
||||
}
|
|
@ -1,41 +1,109 @@
|
|||
use std::io;
|
||||
use std::io::Write;
|
||||
use std::process::Command;
|
||||
use std::sync::Mutex;
|
||||
|
||||
use argmax::Command;
|
||||
|
||||
use crate::error::print_error;
|
||||
use crate::exit_codes::ExitCode;
|
||||
|
||||
struct Outputs {
|
||||
stdout: Vec<u8>,
|
||||
stderr: Vec<u8>,
|
||||
}
|
||||
struct OutputBuffer<'a> {
|
||||
output_permission: &'a Mutex<()>,
|
||||
outputs: Vec<Outputs>,
|
||||
}
|
||||
|
||||
impl<'a> OutputBuffer<'a> {
|
||||
fn new(output_permission: &'a Mutex<()>) -> Self {
|
||||
Self {
|
||||
output_permission,
|
||||
outputs: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn push(&mut self, stdout: Vec<u8>, stderr: Vec<u8>) {
|
||||
self.outputs.push(Outputs { stdout, stderr });
|
||||
}
|
||||
|
||||
fn write(self) {
|
||||
// avoid taking the lock if there is nothing to do
|
||||
if self.outputs.is_empty() {
|
||||
return;
|
||||
}
|
||||
// While this lock is active, this thread will be the only thread allowed
|
||||
// to write its outputs.
|
||||
let _lock = self.output_permission.lock().unwrap();
|
||||
|
||||
let stdout = io::stdout();
|
||||
let stderr = io::stderr();
|
||||
|
||||
let mut stdout = stdout.lock();
|
||||
let mut stderr = stderr.lock();
|
||||
|
||||
for output in self.outputs.iter() {
|
||||
let _ = stdout.write_all(&output.stdout);
|
||||
let _ = stderr.write_all(&output.stderr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Executes a command.
|
||||
pub fn execute_command(mut cmd: Command, out_perm: &Mutex<()>) -> ExitCode {
|
||||
// Spawn the supplied command.
|
||||
let output = cmd.output();
|
||||
pub fn execute_commands<I: Iterator<Item = io::Result<Command>>>(
|
||||
cmds: I,
|
||||
out_perm: &Mutex<()>,
|
||||
enable_output_buffering: bool,
|
||||
) -> ExitCode {
|
||||
let mut output_buffer = OutputBuffer::new(out_perm);
|
||||
for result in cmds {
|
||||
let mut cmd = match result {
|
||||
Ok(cmd) => cmd,
|
||||
Err(e) => return handle_cmd_error(None, e),
|
||||
};
|
||||
|
||||
// Then wait for the command to exit, if it was spawned.
|
||||
match output {
|
||||
Ok(output) => {
|
||||
// While this lock is active, this thread will be the only thread allowed
|
||||
// to write its outputs.
|
||||
let _lock = out_perm.lock().unwrap();
|
||||
// Spawn the supplied command.
|
||||
let output = if enable_output_buffering {
|
||||
cmd.output()
|
||||
} else {
|
||||
// If running on only one thread, don't buffer output
|
||||
// Allows for viewing and interacting with intermediate command output
|
||||
cmd.spawn().and_then(|c| c.wait_with_output())
|
||||
};
|
||||
|
||||
let stdout = io::stdout();
|
||||
let stderr = io::stderr();
|
||||
|
||||
let _ = stdout.lock().write_all(&output.stdout);
|
||||
let _ = stderr.lock().write_all(&output.stderr);
|
||||
|
||||
if output.status.code() == Some(0) {
|
||||
ExitCode::Success
|
||||
} else {
|
||||
ExitCode::GeneralError
|
||||
// Then wait for the command to exit, if it was spawned.
|
||||
match output {
|
||||
Ok(output) => {
|
||||
if enable_output_buffering {
|
||||
output_buffer.push(output.stdout, output.stderr);
|
||||
}
|
||||
if output.status.code() != Some(0) {
|
||||
output_buffer.write();
|
||||
return ExitCode::GeneralError;
|
||||
}
|
||||
}
|
||||
Err(why) => {
|
||||
output_buffer.write();
|
||||
return handle_cmd_error(Some(&cmd), why);
|
||||
}
|
||||
}
|
||||
Err(ref why) if why.kind() == io::ErrorKind::NotFound => {
|
||||
print_error(format!("Command not found: {:?}", cmd));
|
||||
}
|
||||
output_buffer.write();
|
||||
ExitCode::Success
|
||||
}
|
||||
|
||||
pub fn handle_cmd_error(cmd: Option<&Command>, err: io::Error) -> ExitCode {
|
||||
match (cmd, err) {
|
||||
(Some(cmd), err) if err.kind() == io::ErrorKind::NotFound => {
|
||||
print_error(format!(
|
||||
"Command not found: {}",
|
||||
cmd.get_program().to_string_lossy()
|
||||
));
|
||||
ExitCode::GeneralError
|
||||
}
|
||||
Err(why) => {
|
||||
print_error(format!("Problem while executing command: {}", why));
|
||||
(_, err) => {
|
||||
print_error(format!("Problem while executing command: {}", err));
|
||||
ExitCode::GeneralError
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,62 +1,67 @@
|
|||
use std::path::PathBuf;
|
||||
use std::sync::mpsc::Receiver;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::sync::Mutex;
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::error::print_error;
|
||||
use crate::exit_codes::{merge_exitcodes, ExitCode};
|
||||
use crate::walk::WorkerResult;
|
||||
|
||||
use super::CommandTemplate;
|
||||
use super::CommandSet;
|
||||
|
||||
/// An event loop that listens for inputs from the `rx` receiver. Each received input will
|
||||
/// generate a command with the supplied command template. The generated command will then
|
||||
/// be executed, and this process will continue until the receiver's sender has closed.
|
||||
pub fn job(
|
||||
rx: Arc<Mutex<Receiver<WorkerResult>>>,
|
||||
cmd: Arc<CommandTemplate>,
|
||||
out_perm: Arc<Mutex<()>>,
|
||||
show_filesystem_errors: bool,
|
||||
results: impl IntoIterator<Item = WorkerResult>,
|
||||
cmd: &CommandSet,
|
||||
out_perm: &Mutex<()>,
|
||||
config: &Config,
|
||||
) -> ExitCode {
|
||||
let mut results: Vec<ExitCode> = Vec::new();
|
||||
loop {
|
||||
// Create a lock on the shared receiver for this thread.
|
||||
let lock = rx.lock().unwrap();
|
||||
// Output should be buffered when only running a single thread
|
||||
let buffer_output: bool = config.threads > 1;
|
||||
|
||||
let mut ret = ExitCode::Success;
|
||||
for result in results {
|
||||
// Obtain the next result from the receiver, else if the channel
|
||||
// has closed, exit from the loop
|
||||
let value: PathBuf = match lock.recv() {
|
||||
Ok(WorkerResult::Entry(val)) => val,
|
||||
Ok(WorkerResult::Error(err)) => {
|
||||
if show_filesystem_errors {
|
||||
let dir_entry = match result {
|
||||
WorkerResult::Entry(dir_entry) => dir_entry,
|
||||
WorkerResult::Error(err) => {
|
||||
if config.show_filesystem_errors {
|
||||
print_error(err.to_string());
|
||||
}
|
||||
continue;
|
||||
}
|
||||
Err(_) => break,
|
||||
};
|
||||
|
||||
// Drop the lock so that other threads can read from the receiver.
|
||||
drop(lock);
|
||||
// Generate a command, execute it and store its exit code.
|
||||
results.push(cmd.generate_and_execute(&value, Arc::clone(&out_perm)))
|
||||
let code = cmd.execute(
|
||||
dir_entry.stripped_path(config),
|
||||
config.path_separator.as_deref(),
|
||||
out_perm,
|
||||
buffer_output,
|
||||
);
|
||||
ret = merge_exitcodes([ret, code]);
|
||||
}
|
||||
// Returns error in case of any error.
|
||||
merge_exitcodes(&results)
|
||||
ret
|
||||
}
|
||||
|
||||
pub fn batch(
|
||||
rx: Receiver<WorkerResult>,
|
||||
cmd: &CommandTemplate,
|
||||
show_filesystem_errors: bool,
|
||||
results: impl IntoIterator<Item = WorkerResult>,
|
||||
cmd: &CommandSet,
|
||||
config: &Config,
|
||||
) -> ExitCode {
|
||||
let paths = rx.iter().filter_map(|value| match value {
|
||||
WorkerResult::Entry(val) => Some(val),
|
||||
WorkerResult::Error(err) => {
|
||||
if show_filesystem_errors {
|
||||
print_error(err.to_string());
|
||||
let paths = results
|
||||
.into_iter()
|
||||
.filter_map(|worker_result| match worker_result {
|
||||
WorkerResult::Entry(dir_entry) => Some(dir_entry.into_stripped_path(config)),
|
||||
WorkerResult::Error(err) => {
|
||||
if config.show_filesystem_errors {
|
||||
print_error(err.to_string());
|
||||
}
|
||||
None
|
||||
}
|
||||
None
|
||||
}
|
||||
});
|
||||
cmd.generate_and_execute_batch(paths)
|
||||
});
|
||||
|
||||
cmd.execute_batch(paths, config.batch_size, config.path_separator.as_deref())
|
||||
}
|
||||
|
|
571
src/exec/mod.rs
571
src/exec/mod.rs
|
@ -1,27 +1,24 @@
|
|||
mod command;
|
||||
mod input;
|
||||
mod job;
|
||||
mod token;
|
||||
|
||||
use std::ffi::OsString;
|
||||
use std::io;
|
||||
use std::iter;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::{Command, Stdio};
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::process::Stdio;
|
||||
use std::sync::Mutex;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use lazy_static::lazy_static;
|
||||
use regex::Regex;
|
||||
use anyhow::{bail, Result};
|
||||
use argmax::Command;
|
||||
|
||||
use crate::exit_codes::ExitCode;
|
||||
use crate::filesystem::strip_current_dir;
|
||||
use crate::exit_codes::{merge_exitcodes, ExitCode};
|
||||
use crate::fmt::{FormatTemplate, Token};
|
||||
|
||||
use self::command::execute_command;
|
||||
use self::input::{basename, dirname, remove_extension};
|
||||
use self::command::{execute_commands, handle_cmd_error};
|
||||
pub use self::job::{batch, job};
|
||||
use self::token::Token;
|
||||
|
||||
/// Execution mode of the command
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum ExecutionMode {
|
||||
/// Command is executed for each search result
|
||||
OneByOne,
|
||||
|
@ -29,100 +26,231 @@ pub enum ExecutionMode {
|
|||
Batch,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct CommandSet {
|
||||
mode: ExecutionMode,
|
||||
commands: Vec<CommandTemplate>,
|
||||
}
|
||||
|
||||
impl CommandSet {
|
||||
pub fn new<I, T, S>(input: I) -> Result<CommandSet>
|
||||
where
|
||||
I: IntoIterator<Item = T>,
|
||||
T: IntoIterator<Item = S>,
|
||||
S: AsRef<str>,
|
||||
{
|
||||
Ok(CommandSet {
|
||||
mode: ExecutionMode::OneByOne,
|
||||
commands: input
|
||||
.into_iter()
|
||||
.map(CommandTemplate::new)
|
||||
.collect::<Result<_>>()?,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn new_batch<I, T, S>(input: I) -> Result<CommandSet>
|
||||
where
|
||||
I: IntoIterator<Item = T>,
|
||||
T: IntoIterator<Item = S>,
|
||||
S: AsRef<str>,
|
||||
{
|
||||
Ok(CommandSet {
|
||||
mode: ExecutionMode::Batch,
|
||||
commands: input
|
||||
.into_iter()
|
||||
.map(|args| {
|
||||
let cmd = CommandTemplate::new(args)?;
|
||||
if cmd.number_of_tokens() > 1 {
|
||||
bail!("Only one placeholder allowed for batch commands");
|
||||
}
|
||||
if cmd.args[0].has_tokens() {
|
||||
bail!("First argument of exec-batch is expected to be a fixed executable");
|
||||
}
|
||||
Ok(cmd)
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()?,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn in_batch_mode(&self) -> bool {
|
||||
self.mode == ExecutionMode::Batch
|
||||
}
|
||||
|
||||
pub fn execute(
|
||||
&self,
|
||||
input: &Path,
|
||||
path_separator: Option<&str>,
|
||||
out_perm: &Mutex<()>,
|
||||
buffer_output: bool,
|
||||
) -> ExitCode {
|
||||
let commands = self
|
||||
.commands
|
||||
.iter()
|
||||
.map(|c| c.generate(input, path_separator));
|
||||
execute_commands(commands, out_perm, buffer_output)
|
||||
}
|
||||
|
||||
pub fn execute_batch<I>(&self, paths: I, limit: usize, path_separator: Option<&str>) -> ExitCode
|
||||
where
|
||||
I: Iterator<Item = PathBuf>,
|
||||
{
|
||||
let builders: io::Result<Vec<_>> = self
|
||||
.commands
|
||||
.iter()
|
||||
.map(|c| CommandBuilder::new(c, limit))
|
||||
.collect();
|
||||
|
||||
match builders {
|
||||
Ok(mut builders) => {
|
||||
for path in paths {
|
||||
for builder in &mut builders {
|
||||
if let Err(e) = builder.push(&path, path_separator) {
|
||||
return handle_cmd_error(Some(&builder.cmd), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for builder in &mut builders {
|
||||
if let Err(e) = builder.finish() {
|
||||
return handle_cmd_error(Some(&builder.cmd), e);
|
||||
}
|
||||
}
|
||||
|
||||
merge_exitcodes(builders.iter().map(|b| b.exit_code()))
|
||||
}
|
||||
Err(e) => handle_cmd_error(None, e),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents a multi-exec command as it is built.
|
||||
#[derive(Debug)]
|
||||
struct CommandBuilder {
|
||||
pre_args: Vec<OsString>,
|
||||
path_arg: FormatTemplate,
|
||||
post_args: Vec<OsString>,
|
||||
cmd: Command,
|
||||
count: usize,
|
||||
limit: usize,
|
||||
exit_code: ExitCode,
|
||||
}
|
||||
|
||||
impl CommandBuilder {
|
||||
fn new(template: &CommandTemplate, limit: usize) -> io::Result<Self> {
|
||||
let mut pre_args = vec![];
|
||||
let mut path_arg = None;
|
||||
let mut post_args = vec![];
|
||||
|
||||
for arg in &template.args {
|
||||
if arg.has_tokens() {
|
||||
path_arg = Some(arg.clone());
|
||||
} else if path_arg.is_none() {
|
||||
pre_args.push(arg.generate("", None));
|
||||
} else {
|
||||
post_args.push(arg.generate("", None));
|
||||
}
|
||||
}
|
||||
|
||||
let cmd = Self::new_command(&pre_args)?;
|
||||
|
||||
Ok(Self {
|
||||
pre_args,
|
||||
path_arg: path_arg.unwrap(),
|
||||
post_args,
|
||||
cmd,
|
||||
count: 0,
|
||||
limit,
|
||||
exit_code: ExitCode::Success,
|
||||
})
|
||||
}
|
||||
|
||||
fn new_command(pre_args: &[OsString]) -> io::Result<Command> {
|
||||
let mut cmd = Command::new(&pre_args[0]);
|
||||
cmd.stdin(Stdio::inherit());
|
||||
cmd.stdout(Stdio::inherit());
|
||||
cmd.stderr(Stdio::inherit());
|
||||
cmd.try_args(&pre_args[1..])?;
|
||||
Ok(cmd)
|
||||
}
|
||||
|
||||
fn push(&mut self, path: &Path, separator: Option<&str>) -> io::Result<()> {
|
||||
if self.limit > 0 && self.count >= self.limit {
|
||||
self.finish()?;
|
||||
}
|
||||
|
||||
let arg = self.path_arg.generate(path, separator);
|
||||
if !self
|
||||
.cmd
|
||||
.args_would_fit(iter::once(&arg).chain(&self.post_args))
|
||||
{
|
||||
self.finish()?;
|
||||
}
|
||||
|
||||
self.cmd.try_arg(arg)?;
|
||||
self.count += 1;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn finish(&mut self) -> io::Result<()> {
|
||||
if self.count > 0 {
|
||||
self.cmd.try_args(&self.post_args)?;
|
||||
if !self.cmd.status()?.success() {
|
||||
self.exit_code = ExitCode::GeneralError;
|
||||
}
|
||||
|
||||
self.cmd = Self::new_command(&self.pre_args)?;
|
||||
self.count = 0;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn exit_code(&self) -> ExitCode {
|
||||
self.exit_code
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents a template that is utilized to generate command strings.
|
||||
///
|
||||
/// The template is meant to be coupled with an input in order to generate a command. The
|
||||
/// `generate_and_execute()` method will be used to generate a command and execute it.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct CommandTemplate {
|
||||
args: Vec<ArgumentTemplate>,
|
||||
mode: ExecutionMode,
|
||||
struct CommandTemplate {
|
||||
args: Vec<FormatTemplate>,
|
||||
}
|
||||
|
||||
impl CommandTemplate {
|
||||
pub fn new<I, S>(input: I) -> CommandTemplate
|
||||
fn new<I, S>(input: I) -> Result<CommandTemplate>
|
||||
where
|
||||
I: IntoIterator<Item = S>,
|
||||
S: AsRef<str>,
|
||||
{
|
||||
Self::build(input, ExecutionMode::OneByOne)
|
||||
}
|
||||
|
||||
pub fn new_batch<I, S>(input: I) -> Result<CommandTemplate>
|
||||
where
|
||||
I: IntoIterator<Item = S>,
|
||||
S: AsRef<str>,
|
||||
{
|
||||
let cmd = Self::build(input, ExecutionMode::Batch);
|
||||
if cmd.number_of_tokens() > 1 {
|
||||
return Err(anyhow!("Only one placeholder allowed for batch commands"));
|
||||
}
|
||||
if cmd.args[0].has_tokens() {
|
||||
return Err(anyhow!(
|
||||
"First argument of exec-batch is expected to be a fixed executable"
|
||||
));
|
||||
}
|
||||
Ok(cmd)
|
||||
}
|
||||
|
||||
fn build<I, S>(input: I, mode: ExecutionMode) -> CommandTemplate
|
||||
where
|
||||
I: IntoIterator<Item = S>,
|
||||
S: AsRef<str>,
|
||||
{
|
||||
lazy_static! {
|
||||
static ref PLACEHOLDER_PATTERN: Regex = Regex::new(r"\{(/?\.?|//)\}").unwrap();
|
||||
}
|
||||
|
||||
let mut args = Vec::new();
|
||||
let mut has_placeholder = false;
|
||||
|
||||
for arg in input {
|
||||
let arg = arg.as_ref();
|
||||
|
||||
let mut tokens = Vec::new();
|
||||
let mut start = 0;
|
||||
let tmpl = FormatTemplate::parse(arg);
|
||||
has_placeholder |= tmpl.has_tokens();
|
||||
args.push(tmpl);
|
||||
}
|
||||
|
||||
for placeholder in PLACEHOLDER_PATTERN.find_iter(arg) {
|
||||
// Leading text before the placeholder.
|
||||
if placeholder.start() > start {
|
||||
tokens.push(Token::Text(arg[start..placeholder.start()].to_owned()));
|
||||
}
|
||||
|
||||
start = placeholder.end();
|
||||
|
||||
match placeholder.as_str() {
|
||||
"{}" => tokens.push(Token::Placeholder),
|
||||
"{.}" => tokens.push(Token::NoExt),
|
||||
"{/}" => tokens.push(Token::Basename),
|
||||
"{//}" => tokens.push(Token::Parent),
|
||||
"{/.}" => tokens.push(Token::BasenameNoExt),
|
||||
_ => unreachable!("Unhandled placeholder"),
|
||||
}
|
||||
|
||||
has_placeholder = true;
|
||||
}
|
||||
|
||||
// Without a placeholder, the argument is just fixed text.
|
||||
if tokens.is_empty() {
|
||||
args.push(ArgumentTemplate::Text(arg.to_owned()));
|
||||
continue;
|
||||
}
|
||||
|
||||
if start < arg.len() {
|
||||
// Trailing text after last placeholder.
|
||||
tokens.push(Token::Text(arg[start..].to_owned()));
|
||||
}
|
||||
|
||||
args.push(ArgumentTemplate::Tokens(tokens));
|
||||
// We need to check that we have at least one argument, because if not
|
||||
// it will try to execute each file and directory it finds.
|
||||
//
|
||||
// Sadly, clap can't currently handle this for us, see
|
||||
// https://github.com/clap-rs/clap/issues/3542
|
||||
if args.is_empty() {
|
||||
bail!("No executable provided for --exec or --exec-batch");
|
||||
}
|
||||
|
||||
// If a placeholder token was not supplied, append one at the end of the command.
|
||||
if !has_placeholder {
|
||||
args.push(ArgumentTemplate::Tokens(vec![Token::Placeholder]));
|
||||
args.push(FormatTemplate::Tokens(vec![Token::Placeholder]));
|
||||
}
|
||||
|
||||
CommandTemplate { args, mode }
|
||||
Ok(CommandTemplate { args })
|
||||
}
|
||||
|
||||
fn number_of_tokens(&self) -> usize {
|
||||
|
@ -132,97 +260,13 @@ impl CommandTemplate {
|
|||
/// Generates and executes a command.
|
||||
///
|
||||
/// Using the internal `args` field, and a supplied `input` variable, a `Command` will be
|
||||
/// build. Once all arguments have been processed, the command is executed.
|
||||
pub fn generate_and_execute(&self, input: &Path, out_perm: Arc<Mutex<()>>) -> ExitCode {
|
||||
let input = strip_current_dir(input);
|
||||
|
||||
let mut cmd = Command::new(self.args[0].generate(&input));
|
||||
/// build.
|
||||
fn generate(&self, input: &Path, path_separator: Option<&str>) -> io::Result<Command> {
|
||||
let mut cmd = Command::new(self.args[0].generate(input, path_separator));
|
||||
for arg in &self.args[1..] {
|
||||
cmd.arg(arg.generate(&input));
|
||||
}
|
||||
|
||||
execute_command(cmd, &out_perm)
|
||||
}
|
||||
|
||||
pub fn in_batch_mode(&self) -> bool {
|
||||
self.mode == ExecutionMode::Batch
|
||||
}
|
||||
|
||||
pub fn generate_and_execute_batch<I>(&self, paths: I) -> ExitCode
|
||||
where
|
||||
I: Iterator<Item = PathBuf>,
|
||||
{
|
||||
let mut cmd = Command::new(self.args[0].generate(""));
|
||||
cmd.stdin(Stdio::inherit());
|
||||
cmd.stdout(Stdio::inherit());
|
||||
cmd.stderr(Stdio::inherit());
|
||||
|
||||
let mut paths: Vec<_> = paths.collect();
|
||||
let mut has_path = false;
|
||||
|
||||
for arg in &self.args[1..] {
|
||||
if arg.has_tokens() {
|
||||
paths.sort();
|
||||
|
||||
// A single `Tokens` is expected
|
||||
// So we can directly consume the iterator once and for all
|
||||
for path in &mut paths {
|
||||
cmd.arg(arg.generate(strip_current_dir(path)));
|
||||
has_path = true;
|
||||
}
|
||||
} else {
|
||||
cmd.arg(arg.generate(""));
|
||||
}
|
||||
}
|
||||
|
||||
if has_path {
|
||||
execute_command(cmd, &Mutex::new(()))
|
||||
} else {
|
||||
ExitCode::Success
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents a template for a single command argument.
|
||||
///
|
||||
/// The argument is either a collection of `Token`s including at least one placeholder variant, or
|
||||
/// a fixed text.
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
enum ArgumentTemplate {
|
||||
Tokens(Vec<Token>),
|
||||
Text(String),
|
||||
}
|
||||
|
||||
impl ArgumentTemplate {
|
||||
pub fn has_tokens(&self) -> bool {
|
||||
match self {
|
||||
ArgumentTemplate::Tokens(_) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn generate(&self, path: impl AsRef<Path>) -> OsString {
|
||||
use self::Token::*;
|
||||
|
||||
match *self {
|
||||
ArgumentTemplate::Tokens(ref tokens) => {
|
||||
let mut s = OsString::new();
|
||||
for token in tokens {
|
||||
match *token {
|
||||
Basename => s.push(basename(path.as_ref())),
|
||||
BasenameNoExt => {
|
||||
s.push(remove_extension(&PathBuf::from(basename(path.as_ref()))))
|
||||
}
|
||||
NoExt => s.push(remove_extension(path.as_ref())),
|
||||
Parent => s.push(dirname(path.as_ref())),
|
||||
Placeholder => s.push(path.as_ref()),
|
||||
Text(ref string) => s.push(string),
|
||||
}
|
||||
}
|
||||
s
|
||||
}
|
||||
ArgumentTemplate::Text(ref text) => OsString::from(text),
|
||||
cmd.try_arg(arg.generate(input, path_separator))?;
|
||||
}
|
||||
Ok(cmd)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -230,16 +274,26 @@ impl ArgumentTemplate {
|
|||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn generate_str(template: &CommandTemplate, input: &str) -> Vec<String> {
|
||||
template
|
||||
.args
|
||||
.iter()
|
||||
.map(|arg| arg.generate(input, None).into_string().unwrap())
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokens_with_placeholder() {
|
||||
assert_eq!(
|
||||
CommandTemplate::new(&[&"echo", &"${SHELL}:"]),
|
||||
CommandTemplate {
|
||||
args: vec![
|
||||
ArgumentTemplate::Text("echo".into()),
|
||||
ArgumentTemplate::Text("${SHELL}:".into()),
|
||||
ArgumentTemplate::Tokens(vec![Token::Placeholder]),
|
||||
],
|
||||
CommandSet::new(vec![vec![&"echo", &"${SHELL}:"]]).unwrap(),
|
||||
CommandSet {
|
||||
commands: vec![CommandTemplate {
|
||||
args: vec![
|
||||
FormatTemplate::Text("echo".into()),
|
||||
FormatTemplate::Text("${SHELL}:".into()),
|
||||
FormatTemplate::Tokens(vec![Token::Placeholder]),
|
||||
]
|
||||
}],
|
||||
mode: ExecutionMode::OneByOne,
|
||||
}
|
||||
);
|
||||
|
@ -248,12 +302,14 @@ mod tests {
|
|||
#[test]
|
||||
fn tokens_with_no_extension() {
|
||||
assert_eq!(
|
||||
CommandTemplate::new(&["echo", "{.}"]),
|
||||
CommandTemplate {
|
||||
args: vec![
|
||||
ArgumentTemplate::Text("echo".into()),
|
||||
ArgumentTemplate::Tokens(vec![Token::NoExt]),
|
||||
],
|
||||
CommandSet::new(vec![vec!["echo", "{.}"]]).unwrap(),
|
||||
CommandSet {
|
||||
commands: vec![CommandTemplate {
|
||||
args: vec![
|
||||
FormatTemplate::Text("echo".into()),
|
||||
FormatTemplate::Tokens(vec![Token::NoExt]),
|
||||
],
|
||||
}],
|
||||
mode: ExecutionMode::OneByOne,
|
||||
}
|
||||
);
|
||||
|
@ -262,12 +318,14 @@ mod tests {
|
|||
#[test]
|
||||
fn tokens_with_basename() {
|
||||
assert_eq!(
|
||||
CommandTemplate::new(&["echo", "{/}"]),
|
||||
CommandTemplate {
|
||||
args: vec![
|
||||
ArgumentTemplate::Text("echo".into()),
|
||||
ArgumentTemplate::Tokens(vec![Token::Basename]),
|
||||
],
|
||||
CommandSet::new(vec![vec!["echo", "{/}"]]).unwrap(),
|
||||
CommandSet {
|
||||
commands: vec![CommandTemplate {
|
||||
args: vec![
|
||||
FormatTemplate::Text("echo".into()),
|
||||
FormatTemplate::Tokens(vec![Token::Basename]),
|
||||
],
|
||||
}],
|
||||
mode: ExecutionMode::OneByOne,
|
||||
}
|
||||
);
|
||||
|
@ -276,12 +334,14 @@ mod tests {
|
|||
#[test]
|
||||
fn tokens_with_parent() {
|
||||
assert_eq!(
|
||||
CommandTemplate::new(&["echo", "{//}"]),
|
||||
CommandTemplate {
|
||||
args: vec![
|
||||
ArgumentTemplate::Text("echo".into()),
|
||||
ArgumentTemplate::Tokens(vec![Token::Parent]),
|
||||
],
|
||||
CommandSet::new(vec![vec!["echo", "{//}"]]).unwrap(),
|
||||
CommandSet {
|
||||
commands: vec![CommandTemplate {
|
||||
args: vec![
|
||||
FormatTemplate::Text("echo".into()),
|
||||
FormatTemplate::Tokens(vec![Token::Parent]),
|
||||
],
|
||||
}],
|
||||
mode: ExecutionMode::OneByOne,
|
||||
}
|
||||
);
|
||||
|
@ -290,30 +350,49 @@ mod tests {
|
|||
#[test]
|
||||
fn tokens_with_basename_no_extension() {
|
||||
assert_eq!(
|
||||
CommandTemplate::new(&["echo", "{/.}"]),
|
||||
CommandTemplate {
|
||||
args: vec![
|
||||
ArgumentTemplate::Text("echo".into()),
|
||||
ArgumentTemplate::Tokens(vec![Token::BasenameNoExt]),
|
||||
],
|
||||
CommandSet::new(vec![vec!["echo", "{/.}"]]).unwrap(),
|
||||
CommandSet {
|
||||
commands: vec![CommandTemplate {
|
||||
args: vec![
|
||||
FormatTemplate::Text("echo".into()),
|
||||
FormatTemplate::Tokens(vec![Token::BasenameNoExt]),
|
||||
],
|
||||
}],
|
||||
mode: ExecutionMode::OneByOne,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokens_with_literal_braces() {
|
||||
let template = CommandTemplate::new(vec!["{{}}", "{{", "{.}}"]).unwrap();
|
||||
assert_eq!(
|
||||
generate_str(&template, "foo"),
|
||||
vec!["{}", "{", "{.}", "foo"]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokens_with_literal_braces_and_placeholder() {
|
||||
let template = CommandTemplate::new(vec!["{{{},end}"]).unwrap();
|
||||
assert_eq!(generate_str(&template, "foo"), vec!["{foo,end}"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tokens_multiple() {
|
||||
assert_eq!(
|
||||
CommandTemplate::new(&["cp", "{}", "{/.}.ext"]),
|
||||
CommandTemplate {
|
||||
args: vec![
|
||||
ArgumentTemplate::Text("cp".into()),
|
||||
ArgumentTemplate::Tokens(vec![Token::Placeholder]),
|
||||
ArgumentTemplate::Tokens(vec![
|
||||
Token::BasenameNoExt,
|
||||
Token::Text(".ext".into())
|
||||
]),
|
||||
],
|
||||
CommandSet::new(vec![vec!["cp", "{}", "{/.}.ext"]]).unwrap(),
|
||||
CommandSet {
|
||||
commands: vec![CommandTemplate {
|
||||
args: vec![
|
||||
FormatTemplate::Text("cp".into()),
|
||||
FormatTemplate::Tokens(vec![Token::Placeholder]),
|
||||
FormatTemplate::Tokens(vec![
|
||||
Token::BasenameNoExt,
|
||||
Token::Text(".ext".into())
|
||||
]),
|
||||
],
|
||||
}],
|
||||
mode: ExecutionMode::OneByOne,
|
||||
}
|
||||
);
|
||||
|
@ -322,12 +401,14 @@ mod tests {
|
|||
#[test]
|
||||
fn tokens_single_batch() {
|
||||
assert_eq!(
|
||||
CommandTemplate::new_batch(&["echo", "{.}"]).unwrap(),
|
||||
CommandTemplate {
|
||||
args: vec![
|
||||
ArgumentTemplate::Text("echo".into()),
|
||||
ArgumentTemplate::Tokens(vec![Token::NoExt]),
|
||||
],
|
||||
CommandSet::new_batch(vec![vec!["echo", "{.}"]]).unwrap(),
|
||||
CommandSet {
|
||||
commands: vec![CommandTemplate {
|
||||
args: vec![
|
||||
FormatTemplate::Text("echo".into()),
|
||||
FormatTemplate::Tokens(vec![Token::NoExt]),
|
||||
],
|
||||
}],
|
||||
mode: ExecutionMode::Batch,
|
||||
}
|
||||
);
|
||||
|
@ -335,6 +416,58 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn tokens_multiple_batch() {
|
||||
assert!(CommandTemplate::new_batch(&["echo", "{.}", "{}"]).is_err());
|
||||
assert!(CommandSet::new_batch(vec![vec!["echo", "{.}", "{}"]]).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn template_no_args() {
|
||||
assert!(CommandTemplate::new::<Vec<_>, &'static str>(vec![]).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn command_set_no_args() {
|
||||
assert!(CommandSet::new(vec![vec!["echo"], vec![]]).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generate_custom_path_separator() {
|
||||
let arg = FormatTemplate::Tokens(vec![Token::Placeholder]);
|
||||
macro_rules! check {
|
||||
($input:expr, $expected:expr) => {
|
||||
assert_eq!(arg.generate($input, Some("#")), OsString::from($expected));
|
||||
};
|
||||
}
|
||||
|
||||
check!("foo", "foo");
|
||||
check!("foo/bar", "foo#bar");
|
||||
check!("/foo/bar/baz", "#foo#bar#baz");
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
#[test]
|
||||
fn generate_custom_path_separator_windows() {
|
||||
let arg = FormatTemplate::Tokens(vec![Token::Placeholder]);
|
||||
macro_rules! check {
|
||||
($input:expr, $expected:expr) => {
|
||||
assert_eq!(arg.generate($input, Some("#")), OsString::from($expected));
|
||||
};
|
||||
}
|
||||
|
||||
// path starting with a drive letter
|
||||
check!(r"C:\foo\bar", "C:#foo#bar");
|
||||
// UNC path
|
||||
check!(r"\\server\share\path", "##server#share#path");
|
||||
// Drive Relative path - no separator after the colon omits the RootDir path component.
|
||||
// This is uncommon, but valid
|
||||
check!(r"C:foo\bar", "C:foo#bar");
|
||||
|
||||
// forward slashes should get normalized and interpreted as separators
|
||||
check!("C:/foo/bar", "C:#foo#bar");
|
||||
check!("C:foo/bar", "C:foo#bar");
|
||||
|
||||
// Rust does not interpret "//server/share" as a UNC path, but rather as a normal
|
||||
// absolute path that begins with RootDir, and the two slashes get combined together as
|
||||
// a single path separator during normalization.
|
||||
//check!("//server/share/path", "##server#share#path");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,29 +0,0 @@
|
|||
use std::fmt::{self, Display, Formatter};
|
||||
|
||||
/// Designates what should be written to a buffer
|
||||
///
|
||||
/// Each `Token` contains either text, or a placeholder variant, which will be used to generate
|
||||
/// commands after all tokens for a given command template have been collected.
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum Token {
|
||||
Placeholder,
|
||||
Basename,
|
||||
Parent,
|
||||
NoExt,
|
||||
BasenameNoExt,
|
||||
Text(String),
|
||||
}
|
||||
|
||||
impl Display for Token {
|
||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||
match *self {
|
||||
Token::Placeholder => f.write_str("{}")?,
|
||||
Token::Basename => f.write_str("{/}")?,
|
||||
Token::Parent => f.write_str("{//}")?,
|
||||
Token::NoExt => f.write_str("{.}")?,
|
||||
Token::BasenameNoExt => f.write_str("{/.}")?,
|
||||
Token::Text(ref string) => f.write_str(string)?,
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -1,14 +1,21 @@
|
|||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
use std::process;
|
||||
|
||||
#[cfg(unix)]
|
||||
use nix::sys::signal::{raise, signal, SigHandler, Signal};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum ExitCode {
|
||||
Success,
|
||||
HasResults(bool),
|
||||
GeneralError,
|
||||
KilledBySigint,
|
||||
}
|
||||
|
||||
impl Into<i32> for ExitCode {
|
||||
fn into(self) -> i32 {
|
||||
match self {
|
||||
impl From<ExitCode> for i32 {
|
||||
fn from(code: ExitCode) -> Self {
|
||||
match code {
|
||||
ExitCode::Success => 0,
|
||||
ExitCode::HasResults(has_results) => !has_results as i32,
|
||||
ExitCode::GeneralError => 1,
|
||||
ExitCode::KilledBySigint => 130,
|
||||
}
|
||||
|
@ -16,13 +23,28 @@ impl Into<i32> for ExitCode {
|
|||
}
|
||||
|
||||
impl ExitCode {
|
||||
fn is_error(&self) -> bool {
|
||||
*self != ExitCode::Success
|
||||
fn is_error(self) -> bool {
|
||||
i32::from(self) != 0
|
||||
}
|
||||
|
||||
/// Exit the process with the appropriate code.
|
||||
pub fn exit(self) -> ! {
|
||||
#[cfg(unix)]
|
||||
if self == ExitCode::KilledBySigint {
|
||||
// Get rid of the SIGINT handler, if present, and raise SIGINT
|
||||
unsafe {
|
||||
if signal(Signal::SIGINT, SigHandler::SigDfl).is_ok() {
|
||||
let _ = raise(Signal::SIGINT);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
process::exit(self.into())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn merge_exitcodes(results: &[ExitCode]) -> ExitCode {
|
||||
if results.iter().any(ExitCode::is_error) {
|
||||
pub fn merge_exitcodes(results: impl IntoIterator<Item = ExitCode>) -> ExitCode {
|
||||
if results.into_iter().any(ExitCode::is_error) {
|
||||
return ExitCode::GeneralError;
|
||||
}
|
||||
ExitCode::Success
|
||||
|
@ -34,38 +56,38 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn success_when_no_results() {
|
||||
assert_eq!(merge_exitcodes(&[]), ExitCode::Success);
|
||||
assert_eq!(merge_exitcodes([]), ExitCode::Success);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn general_error_if_at_least_one_error() {
|
||||
assert_eq!(
|
||||
merge_exitcodes(&[ExitCode::GeneralError]),
|
||||
merge_exitcodes([ExitCode::GeneralError]),
|
||||
ExitCode::GeneralError
|
||||
);
|
||||
assert_eq!(
|
||||
merge_exitcodes(&[ExitCode::KilledBySigint]),
|
||||
merge_exitcodes([ExitCode::KilledBySigint]),
|
||||
ExitCode::GeneralError
|
||||
);
|
||||
assert_eq!(
|
||||
merge_exitcodes(&[ExitCode::KilledBySigint, ExitCode::Success]),
|
||||
merge_exitcodes([ExitCode::KilledBySigint, ExitCode::Success]),
|
||||
ExitCode::GeneralError
|
||||
);
|
||||
assert_eq!(
|
||||
merge_exitcodes(&[ExitCode::Success, ExitCode::GeneralError]),
|
||||
merge_exitcodes([ExitCode::Success, ExitCode::GeneralError]),
|
||||
ExitCode::GeneralError
|
||||
);
|
||||
assert_eq!(
|
||||
merge_exitcodes(&[ExitCode::GeneralError, ExitCode::KilledBySigint]),
|
||||
merge_exitcodes([ExitCode::GeneralError, ExitCode::KilledBySigint]),
|
||||
ExitCode::GeneralError
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn success_if_no_error() {
|
||||
assert_eq!(merge_exitcodes(&[ExitCode::Success]), ExitCode::Success);
|
||||
assert_eq!(merge_exitcodes([ExitCode::Success]), ExitCode::Success);
|
||||
assert_eq!(
|
||||
merge_exitcodes(&[ExitCode::Success, ExitCode::Success]),
|
||||
merge_exitcodes([ExitCode::Success, ExitCode::Success]),
|
||||
ExitCode::Success
|
||||
);
|
||||
}
|
||||
|
|
|
@ -1,13 +1,15 @@
|
|||
use std::borrow::Cow;
|
||||
use std::env::current_dir;
|
||||
use std::env;
|
||||
use std::ffi::OsStr;
|
||||
use std::fs;
|
||||
use std::io;
|
||||
#[cfg(any(unix, target_os = "redox"))]
|
||||
use std::os::unix::fs::{FileTypeExt, PermissionsExt};
|
||||
use std::os::unix::fs::FileTypeExt;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use crate::walk;
|
||||
use normpath::PathExt;
|
||||
|
||||
use crate::dir_entry;
|
||||
|
||||
pub fn path_absolute_form(path: &Path) -> io::Result<PathBuf> {
|
||||
if path.is_absolute() {
|
||||
|
@ -15,7 +17,7 @@ pub fn path_absolute_form(path: &Path) -> io::Result<PathBuf> {
|
|||
}
|
||||
|
||||
let path = path.strip_prefix(".").unwrap_or(path);
|
||||
current_dir().map(|path_buf| path_buf.join(path))
|
||||
env::current_dir().map(|path_buf| path_buf.join(path))
|
||||
}
|
||||
|
||||
pub fn absolute_path(path: &Path) -> io::Result<PathBuf> {
|
||||
|
@ -33,23 +35,13 @@ pub fn absolute_path(path: &Path) -> io::Result<PathBuf> {
|
|||
Ok(path_buf)
|
||||
}
|
||||
|
||||
// Path::is_dir() is not guaranteed to be intuitively correct for "." and ".."
|
||||
// See: https://github.com/rust-lang/rust/issues/45302
|
||||
pub fn is_dir(path: &Path) -> bool {
|
||||
path.is_dir() && (path.file_name().is_some() || path.canonicalize().is_ok())
|
||||
pub fn is_existing_directory(path: &Path) -> bool {
|
||||
// Note: we do not use `.exists()` here, as `.` always exists, even if
|
||||
// the CWD has been deleted.
|
||||
path.is_dir() && (path.file_name().is_some() || path.normalize().is_ok())
|
||||
}
|
||||
|
||||
#[cfg(any(unix, target_os = "redox"))]
|
||||
pub fn is_executable(md: &fs::Metadata) -> bool {
|
||||
md.permissions().mode() & 0o111 != 0
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
pub fn is_executable(_: &fs::Metadata) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
pub fn is_empty(entry: &walk::DirEntry) -> bool {
|
||||
pub fn is_empty(entry: &dir_entry::DirEntry) -> bool {
|
||||
if let Some(file_type) = entry.file_type() {
|
||||
if file_type.is_dir() {
|
||||
if let Ok(mut entries) = fs::read_dir(entry.path()) {
|
||||
|
@ -68,22 +60,42 @@ pub fn is_empty(entry: &walk::DirEntry) -> bool {
|
|||
}
|
||||
|
||||
#[cfg(any(unix, target_os = "redox"))]
|
||||
pub fn is_socket(ft: &fs::FileType) -> bool {
|
||||
ft.is_socket()
|
||||
pub fn is_block_device(ft: fs::FileType) -> bool {
|
||||
ft.is_block_device()
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
pub fn is_socket(_: &fs::FileType) -> bool {
|
||||
pub fn is_block_device(_: fs::FileType) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
#[cfg(any(unix, target_os = "redox"))]
|
||||
pub fn is_pipe(ft: &fs::FileType) -> bool {
|
||||
pub fn is_char_device(ft: fs::FileType) -> bool {
|
||||
ft.is_char_device()
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
pub fn is_char_device(_: fs::FileType) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
#[cfg(any(unix, target_os = "redox"))]
|
||||
pub fn is_socket(ft: fs::FileType) -> bool {
|
||||
ft.is_socket()
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
pub fn is_socket(_: fs::FileType) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
#[cfg(any(unix, target_os = "redox"))]
|
||||
pub fn is_pipe(ft: fs::FileType) -> bool {
|
||||
ft.is_fifo()
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
pub fn is_pipe(_: &fs::FileType) -> bool {
|
||||
pub fn is_pipe(_: fs::FileType) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
|
@ -108,6 +120,21 @@ pub fn strip_current_dir(path: &Path) -> &Path {
|
|||
path.strip_prefix(".").unwrap_or(path)
|
||||
}
|
||||
|
||||
/// Default value for the path_separator, mainly for MSYS/MSYS2, which set the MSYSTEM
|
||||
/// environment variable, and we set fd's path separator to '/' rather than Rust's default of '\'.
|
||||
///
|
||||
/// Returns Some to use a nonstandard path separator, or None to use rust's default on the target
|
||||
/// platform.
|
||||
pub fn default_path_separator() -> Option<String> {
|
||||
if cfg!(windows) {
|
||||
let msystem = env::var("MSYSTEM").ok()?;
|
||||
if !msystem.is_empty() {
|
||||
return Some("/".to_owned());
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::strip_current_dir;
|
||||
|
|
|
@ -1,24 +1,43 @@
|
|||
use crate::dir_entry;
|
||||
use crate::filesystem;
|
||||
|
||||
use faccess::PathExt;
|
||||
|
||||
/// Whether or not to show
|
||||
#[derive(Default)]
|
||||
pub struct FileTypes {
|
||||
pub files: bool,
|
||||
pub directories: bool,
|
||||
pub symlinks: bool,
|
||||
pub block_devices: bool,
|
||||
pub char_devices: bool,
|
||||
pub sockets: bool,
|
||||
pub pipes: bool,
|
||||
pub executables_only: bool,
|
||||
pub empty_only: bool,
|
||||
}
|
||||
|
||||
impl Default for FileTypes {
|
||||
fn default() -> FileTypes {
|
||||
FileTypes {
|
||||
files: false,
|
||||
directories: false,
|
||||
symlinks: false,
|
||||
sockets: false,
|
||||
pipes: false,
|
||||
executables_only: false,
|
||||
empty_only: false,
|
||||
impl FileTypes {
|
||||
pub fn should_ignore(&self, entry: &dir_entry::DirEntry) -> bool {
|
||||
if let Some(ref entry_type) = entry.file_type() {
|
||||
(!self.files && entry_type.is_file())
|
||||
|| (!self.directories && entry_type.is_dir())
|
||||
|| (!self.symlinks && entry_type.is_symlink())
|
||||
|| (!self.block_devices && filesystem::is_block_device(*entry_type))
|
||||
|| (!self.char_devices && filesystem::is_char_device(*entry_type))
|
||||
|| (!self.sockets && filesystem::is_socket(*entry_type))
|
||||
|| (!self.pipes && filesystem::is_pipe(*entry_type))
|
||||
|| (self.executables_only && !entry.path().executable())
|
||||
|| (self.empty_only && !filesystem::is_empty(entry))
|
||||
|| !(entry_type.is_file()
|
||||
|| entry_type.is_dir()
|
||||
|| entry_type.is_symlink()
|
||||
|| filesystem::is_block_device(*entry_type)
|
||||
|| filesystem::is_char_device(*entry_type)
|
||||
|| filesystem::is_socket(*entry_type)
|
||||
|| filesystem::is_pipe(*entry_type))
|
||||
} else {
|
||||
true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
use anyhow::{anyhow, Result};
|
||||
use nix::unistd::{Group, User};
|
||||
use std::fs;
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub struct OwnerFilter {
|
||||
uid: Check<u32>,
|
||||
gid: Check<u32>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
enum Check<T> {
|
||||
Equal(T),
|
||||
NotEq(T),
|
||||
|
@ -15,10 +16,15 @@ enum Check<T> {
|
|||
}
|
||||
|
||||
impl OwnerFilter {
|
||||
const IGNORE: Self = OwnerFilter {
|
||||
uid: Check::Ignore,
|
||||
gid: Check::Ignore,
|
||||
};
|
||||
|
||||
/// Parses an owner constraint
|
||||
/// Returns an error if the string is invalid
|
||||
/// Returns Ok(None) when string is acceptable but a noop (such as "" or ":")
|
||||
pub fn from_string(input: &str) -> Result<Option<Self>> {
|
||||
pub fn from_string(input: &str) -> Result<Self> {
|
||||
let mut it = input.split(':');
|
||||
let (fst, snd) = (it.next(), it.next());
|
||||
|
||||
|
@ -30,22 +36,33 @@ impl OwnerFilter {
|
|||
}
|
||||
|
||||
let uid = Check::parse(fst, |s| {
|
||||
s.parse()
|
||||
.ok()
|
||||
.or_else(|| users::get_user_by_name(s).map(|user| user.uid()))
|
||||
.ok_or_else(|| anyhow!("'{}' is not a recognized user name", s))
|
||||
if let Ok(uid) = s.parse() {
|
||||
Ok(uid)
|
||||
} else {
|
||||
User::from_name(s)?
|
||||
.map(|user| user.uid.as_raw())
|
||||
.ok_or_else(|| anyhow!("'{}' is not a recognized user name", s))
|
||||
}
|
||||
})?;
|
||||
let gid = Check::parse(snd, |s| {
|
||||
s.parse()
|
||||
.ok()
|
||||
.or_else(|| users::get_group_by_name(s).map(|group| group.gid()))
|
||||
.ok_or_else(|| anyhow!("'{}' is not a recognized group name", s))
|
||||
if let Ok(gid) = s.parse() {
|
||||
Ok(gid)
|
||||
} else {
|
||||
Group::from_name(s)?
|
||||
.map(|group| group.gid.as_raw())
|
||||
.ok_or_else(|| anyhow!("'{}' is not a recognized group name", s))
|
||||
}
|
||||
})?;
|
||||
|
||||
if let (Check::Ignore, Check::Ignore) = (uid, gid) {
|
||||
Ok(None)
|
||||
Ok(OwnerFilter { uid, gid })
|
||||
}
|
||||
|
||||
/// If self is a no-op (ignore both uid and gid) then return `None`, otherwise wrap in a `Some`
|
||||
pub fn filter_ignore(self) -> Option<Self> {
|
||||
if self == Self::IGNORE {
|
||||
None
|
||||
} else {
|
||||
Ok(Some(OwnerFilter { uid, gid }))
|
||||
Some(self)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -106,16 +123,16 @@ mod owner_parsing {
|
|||
|
||||
use super::Check::*;
|
||||
owner_tests! {
|
||||
empty: "" => Ok(None),
|
||||
uid_only: "5" => Ok(Some(OwnerFilter { uid: Equal(5), gid: Ignore })),
|
||||
uid_gid: "9:3" => Ok(Some(OwnerFilter { uid: Equal(9), gid: Equal(3) })),
|
||||
gid_only: ":8" => Ok(Some(OwnerFilter { uid: Ignore, gid: Equal(8) })),
|
||||
colon_only: ":" => Ok(None),
|
||||
trailing: "5:" => Ok(Some(OwnerFilter { uid: Equal(5), gid: Ignore })),
|
||||
empty: "" => Ok(OwnerFilter::IGNORE),
|
||||
uid_only: "5" => Ok(OwnerFilter { uid: Equal(5), gid: Ignore }),
|
||||
uid_gid: "9:3" => Ok(OwnerFilter { uid: Equal(9), gid: Equal(3) }),
|
||||
gid_only: ":8" => Ok(OwnerFilter { uid: Ignore, gid: Equal(8) }),
|
||||
colon_only: ":" => Ok(OwnerFilter::IGNORE),
|
||||
trailing: "5:" => Ok(OwnerFilter { uid: Equal(5), gid: Ignore }),
|
||||
|
||||
uid_negate: "!5" => Ok(Some(OwnerFilter { uid: NotEq(5), gid: Ignore })),
|
||||
both_negate:"!4:!3" => Ok(Some(OwnerFilter { uid: NotEq(4), gid: NotEq(3) })),
|
||||
uid_not_gid:"6:!8" => Ok(Some(OwnerFilter { uid: Equal(6), gid: NotEq(8) })),
|
||||
uid_negate: "!5" => Ok(OwnerFilter { uid: NotEq(5), gid: Ignore }),
|
||||
both_negate:"!4:!3" => Ok(OwnerFilter { uid: NotEq(4), gid: NotEq(3) }),
|
||||
uid_not_gid:"6:!8" => Ok(OwnerFilter { uid: Equal(6), gid: NotEq(8) }),
|
||||
|
||||
more_colons:"3:5:" => Err(_),
|
||||
only_colons:"::" => Err(_),
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
use lazy_static::lazy_static;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use regex::Regex;
|
||||
|
||||
lazy_static! {
|
||||
static ref SIZE_CAPTURES: Regex = Regex::new(r"(?i)^([+-])(\d+)(b|[kmgt]i?b?)$").unwrap();
|
||||
}
|
||||
static SIZE_CAPTURES: OnceLock<Regex> = OnceLock::new();
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum SizeFilter {
|
||||
Max(u64),
|
||||
Min(u64),
|
||||
Equals(u64),
|
||||
}
|
||||
|
||||
// SI prefixes (powers of 10)
|
||||
|
@ -24,12 +25,19 @@ const GIBI: u64 = MEBI * 1024;
|
|||
const TEBI: u64 = GIBI * 1024;
|
||||
|
||||
impl SizeFilter {
|
||||
pub fn from_string(s: &str) -> Option<Self> {
|
||||
if !SIZE_CAPTURES.is_match(s) {
|
||||
pub fn from_string(s: &str) -> anyhow::Result<Self> {
|
||||
SizeFilter::parse_opt(s)
|
||||
.ok_or_else(|| anyhow!("'{}' is not a valid size constraint. See 'fd --help'.", s))
|
||||
}
|
||||
|
||||
fn parse_opt(s: &str) -> Option<Self> {
|
||||
let pattern =
|
||||
SIZE_CAPTURES.get_or_init(|| Regex::new(r"(?i)^([+-]?)(\d+)(b|[kmgt]i?b?)$").unwrap());
|
||||
if !pattern.is_match(s) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let captures = SIZE_CAPTURES.captures(s)?;
|
||||
let captures = pattern.captures(s)?;
|
||||
let limit_kind = captures.get(1).map_or("+", |m| m.as_str());
|
||||
let quantity = captures
|
||||
.get(2)
|
||||
|
@ -49,16 +57,19 @@ impl SizeFilter {
|
|||
};
|
||||
|
||||
let size = quantity * multiplier;
|
||||
Some(match limit_kind {
|
||||
"+" => SizeFilter::Min(size),
|
||||
_ => SizeFilter::Max(size),
|
||||
})
|
||||
match limit_kind {
|
||||
"+" => Some(SizeFilter::Min(size)),
|
||||
"-" => Some(SizeFilter::Max(size)),
|
||||
"" => Some(SizeFilter::Equals(size)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_within(&self, size: u64) -> bool {
|
||||
match *self {
|
||||
SizeFilter::Max(limit) => size <= limit,
|
||||
SizeFilter::Min(limit) => size >= limit,
|
||||
SizeFilter::Equals(limit) => size == limit,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -89,70 +100,70 @@ mod tests {
|
|||
kilo_plus: ("+1k", SizeFilter::Min(1000)),
|
||||
kilo_plus_suffix: ("+1kb", SizeFilter::Min(1000)),
|
||||
kilo_minus: ("-1k", SizeFilter::Max(1000)),
|
||||
kilo_minus_multiplier: ("-100k", SizeFilter::Max(100000)),
|
||||
kilo_minus_multiplier: ("-100k", SizeFilter::Max(100_000)),
|
||||
kilo_minus_suffix: ("-1kb", SizeFilter::Max(1000)),
|
||||
kilo_plus_upper: ("+1K", SizeFilter::Min(1000)),
|
||||
kilo_plus_suffix_upper: ("+1KB", SizeFilter::Min(1000)),
|
||||
kilo_minus_upper: ("-1K", SizeFilter::Max(1000)),
|
||||
kilo_minus_suffix_upper: ("-1Kb", SizeFilter::Max(1000)),
|
||||
kibi_plus: ("+1ki", SizeFilter::Min(1024)),
|
||||
kibi_plus_multiplier: ("+10ki", SizeFilter::Min(10240)),
|
||||
kibi_plus_multiplier: ("+10ki", SizeFilter::Min(10_240)),
|
||||
kibi_plus_suffix: ("+1kib", SizeFilter::Min(1024)),
|
||||
kibi_minus: ("-1ki", SizeFilter::Max(1024)),
|
||||
kibi_minus_multiplier: ("-100ki", SizeFilter::Max(102400)),
|
||||
kibi_minus_multiplier: ("-100ki", SizeFilter::Max(102_400)),
|
||||
kibi_minus_suffix: ("-1kib", SizeFilter::Max(1024)),
|
||||
kibi_plus_upper: ("+1KI", SizeFilter::Min(1024)),
|
||||
kibi_plus_suffix_upper: ("+1KiB", SizeFilter::Min(1024)),
|
||||
kibi_minus_upper: ("-1Ki", SizeFilter::Max(1024)),
|
||||
kibi_minus_suffix_upper: ("-1KIB", SizeFilter::Max(1024)),
|
||||
mega_plus: ("+1m", SizeFilter::Min(1000000)),
|
||||
mega_plus_suffix: ("+1mb", SizeFilter::Min(1000000)),
|
||||
mega_minus: ("-1m", SizeFilter::Max(1000000)),
|
||||
mega_minus_suffix: ("-1mb", SizeFilter::Max(1000000)),
|
||||
mega_plus_upper: ("+1M", SizeFilter::Min(1000000)),
|
||||
mega_plus_suffix_upper: ("+1MB", SizeFilter::Min(1000000)),
|
||||
mega_minus_upper: ("-1M", SizeFilter::Max(1000000)),
|
||||
mega_minus_suffix_upper: ("-1Mb", SizeFilter::Max(1000000)),
|
||||
mebi_plus: ("+1mi", SizeFilter::Min(1048576)),
|
||||
mebi_plus_suffix: ("+1mib", SizeFilter::Min(1048576)),
|
||||
mebi_minus: ("-1mi", SizeFilter::Max(1048576)),
|
||||
mebi_minus_suffix: ("-1mib", SizeFilter::Max(1048576)),
|
||||
mebi_plus_upper: ("+1MI", SizeFilter::Min(1048576)),
|
||||
mebi_plus_suffix_upper: ("+1MiB", SizeFilter::Min(1048576)),
|
||||
mebi_minus_upper: ("-1Mi", SizeFilter::Max(1048576)),
|
||||
mebi_minus_suffix_upper: ("-1MIB", SizeFilter::Max(1048576)),
|
||||
giga_plus: ("+1g", SizeFilter::Min(1000000000)),
|
||||
giga_plus_suffix: ("+1gb", SizeFilter::Min(1000000000)),
|
||||
giga_minus: ("-1g", SizeFilter::Max(1000000000)),
|
||||
giga_minus_suffix: ("-1gb", SizeFilter::Max(1000000000)),
|
||||
giga_plus_upper: ("+1G", SizeFilter::Min(1000000000)),
|
||||
giga_plus_suffix_upper: ("+1GB", SizeFilter::Min(1000000000)),
|
||||
giga_minus_upper: ("-1G", SizeFilter::Max(1000000000)),
|
||||
giga_minus_suffix_upper: ("-1Gb", SizeFilter::Max(1000000000)),
|
||||
gibi_plus: ("+1gi", SizeFilter::Min(1073741824)),
|
||||
gibi_plus_suffix: ("+1gib", SizeFilter::Min(1073741824)),
|
||||
gibi_minus: ("-1gi", SizeFilter::Max(1073741824)),
|
||||
gibi_minus_suffix: ("-1gib", SizeFilter::Max(1073741824)),
|
||||
gibi_plus_upper: ("+1GI", SizeFilter::Min(1073741824)),
|
||||
gibi_plus_suffix_upper: ("+1GiB", SizeFilter::Min(1073741824)),
|
||||
gibi_minus_upper: ("-1Gi", SizeFilter::Max(1073741824)),
|
||||
gibi_minus_suffix_upper: ("-1GIB", SizeFilter::Max(1073741824)),
|
||||
tera_plus: ("+1t", SizeFilter::Min(1000000000000)),
|
||||
tera_plus_suffix: ("+1tb", SizeFilter::Min(1000000000000)),
|
||||
tera_minus: ("-1t", SizeFilter::Max(1000000000000)),
|
||||
tera_minus_suffix: ("-1tb", SizeFilter::Max(1000000000000)),
|
||||
tera_plus_upper: ("+1T", SizeFilter::Min(1000000000000)),
|
||||
tera_plus_suffix_upper: ("+1TB", SizeFilter::Min(1000000000000)),
|
||||
tera_minus_upper: ("-1T", SizeFilter::Max(1000000000000)),
|
||||
tera_minus_suffix_upper: ("-1Tb", SizeFilter::Max(1000000000000)),
|
||||
tebi_plus: ("+1ti", SizeFilter::Min(1099511627776)),
|
||||
tebi_plus_suffix: ("+1tib", SizeFilter::Min(1099511627776)),
|
||||
tebi_minus: ("-1ti", SizeFilter::Max(1099511627776)),
|
||||
tebi_minus_suffix: ("-1tib", SizeFilter::Max(1099511627776)),
|
||||
tebi_plus_upper: ("+1TI", SizeFilter::Min(1099511627776)),
|
||||
tebi_plus_suffix_upper: ("+1TiB", SizeFilter::Min(1099511627776)),
|
||||
tebi_minus_upper: ("-1Ti", SizeFilter::Max(1099511627776)),
|
||||
tebi_minus_suffix_upper: ("-1TIB", SizeFilter::Max(1099511627776)),
|
||||
mega_plus: ("+1m", SizeFilter::Min(1_000_000)),
|
||||
mega_plus_suffix: ("+1mb", SizeFilter::Min(1_000_000)),
|
||||
mega_minus: ("-1m", SizeFilter::Max(1_000_000)),
|
||||
mega_minus_suffix: ("-1mb", SizeFilter::Max(1_000_000)),
|
||||
mega_plus_upper: ("+1M", SizeFilter::Min(1_000_000)),
|
||||
mega_plus_suffix_upper: ("+1MB", SizeFilter::Min(1_000_000)),
|
||||
mega_minus_upper: ("-1M", SizeFilter::Max(1_000_000)),
|
||||
mega_minus_suffix_upper: ("-1Mb", SizeFilter::Max(1_000_000)),
|
||||
mebi_plus: ("+1mi", SizeFilter::Min(1_048_576)),
|
||||
mebi_plus_suffix: ("+1mib", SizeFilter::Min(1_048_576)),
|
||||
mebi_minus: ("-1mi", SizeFilter::Max(1_048_576)),
|
||||
mebi_minus_suffix: ("-1mib", SizeFilter::Max(1_048_576)),
|
||||
mebi_plus_upper: ("+1MI", SizeFilter::Min(1_048_576)),
|
||||
mebi_plus_suffix_upper: ("+1MiB", SizeFilter::Min(1_048_576)),
|
||||
mebi_minus_upper: ("-1Mi", SizeFilter::Max(1_048_576)),
|
||||
mebi_minus_suffix_upper: ("-1MIB", SizeFilter::Max(1_048_576)),
|
||||
giga_plus: ("+1g", SizeFilter::Min(1_000_000_000)),
|
||||
giga_plus_suffix: ("+1gb", SizeFilter::Min(1_000_000_000)),
|
||||
giga_minus: ("-1g", SizeFilter::Max(1_000_000_000)),
|
||||
giga_minus_suffix: ("-1gb", SizeFilter::Max(1_000_000_000)),
|
||||
giga_plus_upper: ("+1G", SizeFilter::Min(1_000_000_000)),
|
||||
giga_plus_suffix_upper: ("+1GB", SizeFilter::Min(1_000_000_000)),
|
||||
giga_minus_upper: ("-1G", SizeFilter::Max(1_000_000_000)),
|
||||
giga_minus_suffix_upper: ("-1Gb", SizeFilter::Max(1_000_000_000)),
|
||||
gibi_plus: ("+1gi", SizeFilter::Min(1_073_741_824)),
|
||||
gibi_plus_suffix: ("+1gib", SizeFilter::Min(1_073_741_824)),
|
||||
gibi_minus: ("-1gi", SizeFilter::Max(1_073_741_824)),
|
||||
gibi_minus_suffix: ("-1gib", SizeFilter::Max(1_073_741_824)),
|
||||
gibi_plus_upper: ("+1GI", SizeFilter::Min(1_073_741_824)),
|
||||
gibi_plus_suffix_upper: ("+1GiB", SizeFilter::Min(1_073_741_824)),
|
||||
gibi_minus_upper: ("-1Gi", SizeFilter::Max(1_073_741_824)),
|
||||
gibi_minus_suffix_upper: ("-1GIB", SizeFilter::Max(1_073_741_824)),
|
||||
tera_plus: ("+1t", SizeFilter::Min(1_000_000_000_000)),
|
||||
tera_plus_suffix: ("+1tb", SizeFilter::Min(1_000_000_000_000)),
|
||||
tera_minus: ("-1t", SizeFilter::Max(1_000_000_000_000)),
|
||||
tera_minus_suffix: ("-1tb", SizeFilter::Max(1_000_000_000_000)),
|
||||
tera_plus_upper: ("+1T", SizeFilter::Min(1_000_000_000_000)),
|
||||
tera_plus_suffix_upper: ("+1TB", SizeFilter::Min(1_000_000_000_000)),
|
||||
tera_minus_upper: ("-1T", SizeFilter::Max(1_000_000_000_000)),
|
||||
tera_minus_suffix_upper: ("-1Tb", SizeFilter::Max(1_000_000_000_000)),
|
||||
tebi_plus: ("+1ti", SizeFilter::Min(1_099_511_627_776)),
|
||||
tebi_plus_suffix: ("+1tib", SizeFilter::Min(1_099_511_627_776)),
|
||||
tebi_minus: ("-1ti", SizeFilter::Max(1_099_511_627_776)),
|
||||
tebi_minus_suffix: ("-1tib", SizeFilter::Max(1_099_511_627_776)),
|
||||
tebi_plus_upper: ("+1TI", SizeFilter::Min(1_099_511_627_776)),
|
||||
tebi_plus_suffix_upper: ("+1TiB", SizeFilter::Min(1_099_511_627_776)),
|
||||
tebi_minus_upper: ("-1Ti", SizeFilter::Max(1_099_511_627_776)),
|
||||
tebi_minus_suffix_upper: ("-1TIB", SizeFilter::Max(1_099_511_627_776)),
|
||||
}
|
||||
|
||||
/// Invalid parse testing
|
||||
|
@ -162,7 +173,7 @@ mod tests {
|
|||
#[test]
|
||||
fn $name() {
|
||||
let i = SizeFilter::from_string($value);
|
||||
assert!(i.is_none());
|
||||
assert!(i.is_err());
|
||||
}
|
||||
)*
|
||||
};
|
||||
|
@ -170,7 +181,6 @@ mod tests {
|
|||
|
||||
// Invalid parse data
|
||||
gen_size_filter_failure! {
|
||||
ensure_missing_symbol_returns_none: "10M",
|
||||
ensure_missing_number_returns_none: "+g",
|
||||
ensure_missing_unit_returns_none: "+18",
|
||||
ensure_bad_format_returns_none_1: "$10M",
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
use chrono::{DateTime, Local, NaiveDate, NaiveDateTime};
|
||||
|
||||
use std::time::SystemTime;
|
||||
|
||||
/// Filter based on time ranges.
|
||||
#[derive(Debug, PartialEq)]
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum TimeFilter {
|
||||
Before(SystemTime),
|
||||
After(SystemTime),
|
||||
|
@ -11,8 +13,30 @@ impl TimeFilter {
|
|||
fn from_str(ref_time: &SystemTime, s: &str) -> Option<SystemTime> {
|
||||
humantime::parse_duration(s)
|
||||
.map(|duration| *ref_time - duration)
|
||||
.or_else(|_| humantime::parse_rfc3339_weak(s))
|
||||
.ok()
|
||||
.or_else(|| {
|
||||
DateTime::parse_from_rfc3339(s)
|
||||
.map(|dt| dt.into())
|
||||
.ok()
|
||||
.or_else(|| {
|
||||
NaiveDate::parse_from_str(s, "%F")
|
||||
.ok()?
|
||||
.and_hms_opt(0, 0, 0)?
|
||||
.and_local_timezone(Local)
|
||||
.latest()
|
||||
})
|
||||
.or_else(|| {
|
||||
NaiveDateTime::parse_from_str(s, "%F %T")
|
||||
.ok()?
|
||||
.and_local_timezone(Local)
|
||||
.latest()
|
||||
})
|
||||
.or_else(|| {
|
||||
let timestamp_secs = s.strip_prefix('@')?.parse().ok()?;
|
||||
DateTime::from_timestamp(timestamp_secs, 0).map(Into::into)
|
||||
})
|
||||
.map(|dt| dt.into())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn before(ref_time: &SystemTime, s: &str) -> Option<TimeFilter> {
|
||||
|
@ -25,8 +49,8 @@ impl TimeFilter {
|
|||
|
||||
pub fn applies_to(&self, t: &SystemTime) -> bool {
|
||||
match self {
|
||||
TimeFilter::Before(limit) => t <= limit,
|
||||
TimeFilter::After(limit) => t >= limit,
|
||||
TimeFilter::Before(limit) => t < limit,
|
||||
TimeFilter::After(limit) => t > limit,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -38,7 +62,13 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn is_time_filter_applicable() {
|
||||
let ref_time = humantime::parse_rfc3339("2010-10-10T10:10:10Z").unwrap();
|
||||
let ref_time = NaiveDateTime::parse_from_str("2010-10-10 10:10:10", "%F %T")
|
||||
.unwrap()
|
||||
.and_local_timezone(Local)
|
||||
.latest()
|
||||
.unwrap()
|
||||
.into();
|
||||
|
||||
assert!(TimeFilter::after(&ref_time, "1min")
|
||||
.unwrap()
|
||||
.applies_to(&ref_time));
|
||||
|
@ -75,5 +105,66 @@ mod tests {
|
|||
assert!(!TimeFilter::after(&ref_time, t10s_before)
|
||||
.unwrap()
|
||||
.applies_to(&t1m_ago));
|
||||
|
||||
let same_day = "2010-10-10";
|
||||
assert!(!TimeFilter::before(&ref_time, same_day)
|
||||
.unwrap()
|
||||
.applies_to(&ref_time));
|
||||
assert!(!TimeFilter::before(&ref_time, same_day)
|
||||
.unwrap()
|
||||
.applies_to(&t1m_ago));
|
||||
|
||||
assert!(TimeFilter::after(&ref_time, same_day)
|
||||
.unwrap()
|
||||
.applies_to(&ref_time));
|
||||
assert!(TimeFilter::after(&ref_time, same_day)
|
||||
.unwrap()
|
||||
.applies_to(&t1m_ago));
|
||||
|
||||
let ref_time = DateTime::parse_from_rfc3339("2010-10-10T10:10:10+00:00")
|
||||
.unwrap()
|
||||
.into();
|
||||
let t1m_ago = ref_time - Duration::from_secs(60);
|
||||
let t10s_before = "2010-10-10T10:10:00+00:00";
|
||||
assert!(!TimeFilter::before(&ref_time, t10s_before)
|
||||
.unwrap()
|
||||
.applies_to(&ref_time));
|
||||
assert!(TimeFilter::before(&ref_time, t10s_before)
|
||||
.unwrap()
|
||||
.applies_to(&t1m_ago));
|
||||
|
||||
assert!(TimeFilter::after(&ref_time, t10s_before)
|
||||
.unwrap()
|
||||
.applies_to(&ref_time));
|
||||
assert!(!TimeFilter::after(&ref_time, t10s_before)
|
||||
.unwrap()
|
||||
.applies_to(&t1m_ago));
|
||||
|
||||
let ref_timestamp = 1707723412u64; // Mon Feb 12 07:36:52 UTC 2024
|
||||
let ref_time = DateTime::parse_from_rfc3339("2024-02-12T07:36:52+00:00")
|
||||
.unwrap()
|
||||
.into();
|
||||
let t1m_ago = ref_time - Duration::from_secs(60);
|
||||
let t1s_later = ref_time + Duration::from_secs(1);
|
||||
// Timestamp only supported via '@' prefix
|
||||
assert!(TimeFilter::before(&ref_time, &ref_timestamp.to_string()).is_none());
|
||||
assert!(
|
||||
TimeFilter::before(&ref_time, &format!("@{}", ref_timestamp))
|
||||
.unwrap()
|
||||
.applies_to(&t1m_ago)
|
||||
);
|
||||
assert!(
|
||||
!TimeFilter::before(&ref_time, &format!("@{}", ref_timestamp))
|
||||
.unwrap()
|
||||
.applies_to(&t1s_later)
|
||||
);
|
||||
assert!(
|
||||
!TimeFilter::after(&ref_time, &format!("@{}", ref_timestamp))
|
||||
.unwrap()
|
||||
.applies_to(&t1m_ago)
|
||||
);
|
||||
assert!(TimeFilter::after(&ref_time, &format!("@{}", ref_timestamp))
|
||||
.unwrap()
|
||||
.applies_to(&t1s_later));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,13 +5,13 @@ use crate::filesystem::strip_current_dir;
|
|||
|
||||
/// Removes the parent component of the path
|
||||
pub fn basename(path: &Path) -> &OsStr {
|
||||
path.file_name().unwrap_or_else(|| path.as_os_str())
|
||||
path.file_name().unwrap_or(path.as_os_str())
|
||||
}
|
||||
|
||||
/// Removes the extension from the path
|
||||
pub fn remove_extension(path: &Path) -> OsString {
|
||||
let dirname = dirname(path);
|
||||
let stem = path.file_stem().unwrap_or_else(|| path.as_os_str());
|
||||
let stem = path.file_stem().unwrap_or(path.as_os_str());
|
||||
|
||||
let path = PathBuf::from(dirname).join(stem);
|
||||
|
||||
|
@ -34,10 +34,10 @@ pub fn dirname(path: &Path) -> OsString {
|
|||
#[cfg(test)]
|
||||
mod path_tests {
|
||||
use super::*;
|
||||
use std::path::MAIN_SEPARATOR;
|
||||
use std::path::MAIN_SEPARATOR_STR;
|
||||
|
||||
fn correct(input: &str) -> String {
|
||||
input.replace('/', &MAIN_SEPARATOR.to_string())
|
||||
input.replace('/', MAIN_SEPARATOR_STR)
|
||||
}
|
||||
|
||||
macro_rules! func_tests {
|
|
@ -0,0 +1,281 @@
|
|||
mod input;
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::ffi::{OsStr, OsString};
|
||||
use std::fmt::{self, Display, Formatter};
|
||||
use std::path::{Component, Path, Prefix};
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use aho_corasick::AhoCorasick;
|
||||
|
||||
use self::input::{basename, dirname, remove_extension};
|
||||
|
||||
/// Designates what should be written to a buffer
|
||||
///
|
||||
/// Each `Token` contains either text, or a placeholder variant, which will be used to generate
|
||||
/// commands after all tokens for a given command template have been collected.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum Token {
|
||||
Placeholder,
|
||||
Basename,
|
||||
Parent,
|
||||
NoExt,
|
||||
BasenameNoExt,
|
||||
Text(String),
|
||||
}
|
||||
|
||||
impl Display for Token {
|
||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||
match *self {
|
||||
Token::Placeholder => f.write_str("{}")?,
|
||||
Token::Basename => f.write_str("{/}")?,
|
||||
Token::Parent => f.write_str("{//}")?,
|
||||
Token::NoExt => f.write_str("{.}")?,
|
||||
Token::BasenameNoExt => f.write_str("{/.}")?,
|
||||
Token::Text(ref string) => f.write_str(string)?,
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// A parsed format string
|
||||
///
|
||||
/// This is either a collection of `Token`s including at least one placeholder variant,
|
||||
/// or a fixed text.
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum FormatTemplate {
|
||||
Tokens(Vec<Token>),
|
||||
Text(String),
|
||||
}
|
||||
|
||||
static PLACEHOLDERS: OnceLock<AhoCorasick> = OnceLock::new();
|
||||
|
||||
impl FormatTemplate {
|
||||
pub fn has_tokens(&self) -> bool {
|
||||
matches!(self, FormatTemplate::Tokens(_))
|
||||
}
|
||||
|
||||
pub fn parse(fmt: &str) -> Self {
|
||||
// NOTE: we assume that { and } have the same length
|
||||
const BRACE_LEN: usize = '{'.len_utf8();
|
||||
let mut tokens = Vec::new();
|
||||
let mut remaining = fmt;
|
||||
let mut buf = String::new();
|
||||
let placeholders = PLACEHOLDERS.get_or_init(|| {
|
||||
AhoCorasick::new(["{{", "}}", "{}", "{/}", "{//}", "{.}", "{/.}"]).unwrap()
|
||||
});
|
||||
while let Some(m) = placeholders.find(remaining) {
|
||||
match m.pattern().as_u32() {
|
||||
0 | 1 => {
|
||||
// we found an escaped {{ or }}, so add
|
||||
// everything up to the first char to the buffer
|
||||
// then skip the second one.
|
||||
buf += &remaining[..m.start() + BRACE_LEN];
|
||||
remaining = &remaining[m.end()..];
|
||||
}
|
||||
id if !remaining[m.end()..].starts_with('}') => {
|
||||
buf += &remaining[..m.start()];
|
||||
if !buf.is_empty() {
|
||||
tokens.push(Token::Text(std::mem::take(&mut buf)));
|
||||
}
|
||||
tokens.push(token_from_pattern_id(id));
|
||||
remaining = &remaining[m.end()..];
|
||||
}
|
||||
_ => {
|
||||
// We got a normal pattern, but the final "}"
|
||||
// is escaped, so add up to that to the buffer, then
|
||||
// skip the final }
|
||||
buf += &remaining[..m.end()];
|
||||
remaining = &remaining[m.end() + BRACE_LEN..];
|
||||
}
|
||||
}
|
||||
}
|
||||
// Add the rest of the string to the buffer, and add the final buffer to the tokens
|
||||
if !remaining.is_empty() {
|
||||
buf += remaining;
|
||||
}
|
||||
if tokens.is_empty() {
|
||||
// No placeholders were found, so just return the text
|
||||
return FormatTemplate::Text(buf);
|
||||
}
|
||||
// Add final text segment
|
||||
if !buf.is_empty() {
|
||||
tokens.push(Token::Text(buf));
|
||||
}
|
||||
debug_assert!(!tokens.is_empty());
|
||||
FormatTemplate::Tokens(tokens)
|
||||
}
|
||||
|
||||
/// Generate a result string from this template. If path_separator is Some, then it will replace
|
||||
/// the path separator in all placeholder tokens. Fixed text and tokens are not affected by
|
||||
/// path separator substitution.
|
||||
pub fn generate(&self, path: impl AsRef<Path>, path_separator: Option<&str>) -> OsString {
|
||||
use Token::*;
|
||||
let path = path.as_ref();
|
||||
|
||||
match *self {
|
||||
Self::Tokens(ref tokens) => {
|
||||
let mut s = OsString::new();
|
||||
for token in tokens {
|
||||
match token {
|
||||
Basename => s.push(Self::replace_separator(basename(path), path_separator)),
|
||||
BasenameNoExt => s.push(Self::replace_separator(
|
||||
&remove_extension(basename(path).as_ref()),
|
||||
path_separator,
|
||||
)),
|
||||
NoExt => s.push(Self::replace_separator(
|
||||
&remove_extension(path),
|
||||
path_separator,
|
||||
)),
|
||||
Parent => s.push(Self::replace_separator(&dirname(path), path_separator)),
|
||||
Placeholder => {
|
||||
s.push(Self::replace_separator(path.as_ref(), path_separator))
|
||||
}
|
||||
Text(ref string) => s.push(string),
|
||||
}
|
||||
}
|
||||
s
|
||||
}
|
||||
Self::Text(ref text) => OsString::from(text),
|
||||
}
|
||||
}
|
||||
|
||||
/// Replace the path separator in the input with the custom separator string. If path_separator
|
||||
/// is None, simply return a borrowed Cow<OsStr> of the input. Otherwise, the input is
|
||||
/// interpreted as a Path and its components are iterated through and re-joined into a new
|
||||
/// OsString.
|
||||
fn replace_separator<'a>(path: &'a OsStr, path_separator: Option<&str>) -> Cow<'a, OsStr> {
|
||||
// fast-path - no replacement necessary
|
||||
if path_separator.is_none() {
|
||||
return Cow::Borrowed(path);
|
||||
}
|
||||
|
||||
let path_separator = path_separator.unwrap();
|
||||
let mut out = OsString::with_capacity(path.len());
|
||||
let mut components = Path::new(path).components().peekable();
|
||||
|
||||
while let Some(comp) = components.next() {
|
||||
match comp {
|
||||
// Absolute paths on Windows are tricky. A Prefix component is usually a drive
|
||||
// letter or UNC path, and is usually followed by RootDir. There are also
|
||||
// "verbatim" prefixes beginning with "\\?\" that skip normalization. We choose to
|
||||
// ignore verbatim path prefixes here because they're very rare, might be
|
||||
// impossible to reach here, and there's no good way to deal with them. If users
|
||||
// are doing something advanced involving verbatim windows paths, they can do their
|
||||
// own output filtering with a tool like sed.
|
||||
Component::Prefix(prefix) => {
|
||||
if let Prefix::UNC(server, share) = prefix.kind() {
|
||||
// Prefix::UNC is a parsed version of '\\server\share'
|
||||
out.push(path_separator);
|
||||
out.push(path_separator);
|
||||
out.push(server);
|
||||
out.push(path_separator);
|
||||
out.push(share);
|
||||
} else {
|
||||
// All other Windows prefix types are rendered as-is. This results in e.g. "C:" for
|
||||
// drive letters. DeviceNS and Verbatim* prefixes won't have backslashes converted,
|
||||
// but they're not returned by directories fd can search anyway so we don't worry
|
||||
// about them.
|
||||
out.push(comp.as_os_str());
|
||||
}
|
||||
}
|
||||
|
||||
// Root directory is always replaced with the custom separator.
|
||||
Component::RootDir => out.push(path_separator),
|
||||
|
||||
// Everything else is joined normally, with a trailing separator if we're not last
|
||||
_ => {
|
||||
out.push(comp.as_os_str());
|
||||
if components.peek().is_some() {
|
||||
out.push(path_separator);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Cow::Owned(out)
|
||||
}
|
||||
}
|
||||
|
||||
// Convert the id from an aho-corasick match to the
|
||||
// appropriate token
|
||||
fn token_from_pattern_id(id: u32) -> Token {
|
||||
use Token::*;
|
||||
match id {
|
||||
2 => Placeholder,
|
||||
3 => Basename,
|
||||
4 => Parent,
|
||||
5 => NoExt,
|
||||
6 => BasenameNoExt,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod fmt_tests {
|
||||
use super::*;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[test]
|
||||
fn parse_no_placeholders() {
|
||||
let templ = FormatTemplate::parse("This string has no placeholders");
|
||||
assert_eq!(
|
||||
templ,
|
||||
FormatTemplate::Text("This string has no placeholders".into())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_only_brace_escapes() {
|
||||
let templ = FormatTemplate::parse("This string only has escapes like {{ and }}");
|
||||
assert_eq!(
|
||||
templ,
|
||||
FormatTemplate::Text("This string only has escapes like { and }".into())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn all_placeholders() {
|
||||
use Token::*;
|
||||
|
||||
let templ = FormatTemplate::parse(
|
||||
"{{path={} \
|
||||
basename={/} \
|
||||
parent={//} \
|
||||
noExt={.} \
|
||||
basenameNoExt={/.} \
|
||||
}}",
|
||||
);
|
||||
assert_eq!(
|
||||
templ,
|
||||
FormatTemplate::Tokens(vec![
|
||||
Text("{path=".into()),
|
||||
Placeholder,
|
||||
Text(" basename=".into()),
|
||||
Basename,
|
||||
Text(" parent=".into()),
|
||||
Parent,
|
||||
Text(" noExt=".into()),
|
||||
NoExt,
|
||||
Text(" basenameNoExt=".into()),
|
||||
BasenameNoExt,
|
||||
Text(" }".into()),
|
||||
])
|
||||
);
|
||||
|
||||
let mut path = PathBuf::new();
|
||||
path.push("a");
|
||||
path.push("folder");
|
||||
path.push("file.txt");
|
||||
|
||||
let expanded = templ.generate(&path, Some("/")).into_string().unwrap();
|
||||
|
||||
assert_eq!(
|
||||
expanded,
|
||||
"{path=a/folder/file.txt \
|
||||
basename=file.txt \
|
||||
parent=a/folder \
|
||||
noExt=a/folder/file \
|
||||
basenameNoExt=file }"
|
||||
);
|
||||
}
|
||||
}
|
680
src/main.rs
680
src/main.rs
|
@ -1,50 +1,132 @@
|
|||
mod app;
|
||||
mod cli;
|
||||
mod config;
|
||||
mod dir_entry;
|
||||
mod error;
|
||||
mod exec;
|
||||
mod exit_codes;
|
||||
mod filesystem;
|
||||
mod filetypes;
|
||||
mod filter;
|
||||
mod options;
|
||||
mod fmt;
|
||||
mod output;
|
||||
mod regex_helper;
|
||||
mod walk;
|
||||
|
||||
use std::env;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process;
|
||||
use std::io::IsTerminal;
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use std::time;
|
||||
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use atty::Stream;
|
||||
use anyhow::{anyhow, bail, Context, Result};
|
||||
use clap::{CommandFactory, Parser};
|
||||
use globset::GlobBuilder;
|
||||
use lscolors::LsColors;
|
||||
use regex::bytes::{RegexBuilder, RegexSetBuilder};
|
||||
use regex::bytes::{Regex, RegexBuilder, RegexSetBuilder};
|
||||
|
||||
use crate::error::print_error;
|
||||
use crate::exec::CommandTemplate;
|
||||
use crate::cli::{ColorWhen, Opts};
|
||||
use crate::config::Config;
|
||||
use crate::exec::CommandSet;
|
||||
use crate::exit_codes::ExitCode;
|
||||
use crate::filetypes::FileTypes;
|
||||
#[cfg(unix)]
|
||||
use crate::filter::OwnerFilter;
|
||||
use crate::filter::{SizeFilter, TimeFilter};
|
||||
use crate::options::Options;
|
||||
use crate::regex_helper::pattern_has_uppercase_char;
|
||||
use crate::filter::TimeFilter;
|
||||
use crate::regex_helper::{pattern_has_uppercase_char, pattern_matches_strings_with_leading_dot};
|
||||
|
||||
// We use jemalloc for performance reasons, see https://github.com/sharkdp/fd/pull/481
|
||||
// FIXME: re-enable jemalloc on macOS, see comment in Cargo.toml file for more infos
|
||||
#[cfg(all(not(windows), not(target_os = "macos"), not(target_env = "musl")))]
|
||||
#[cfg(all(
|
||||
not(windows),
|
||||
not(target_os = "android"),
|
||||
not(target_os = "macos"),
|
||||
not(target_os = "freebsd"),
|
||||
not(target_os = "openbsd"),
|
||||
not(all(target_env = "musl", target_pointer_width = "32")),
|
||||
not(target_arch = "riscv64"),
|
||||
feature = "use-jemalloc"
|
||||
))]
|
||||
#[global_allocator]
|
||||
static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
|
||||
|
||||
fn run() -> Result<ExitCode> {
|
||||
let matches = app::build_app().get_matches_from(env::args_os());
|
||||
// vivid --color-mode 8-bit generate molokai
|
||||
const DEFAULT_LS_COLORS: &str = "
|
||||
ow=0:or=0;38;5;16;48;5;203:no=0:ex=1;38;5;203:cd=0;38;5;203;48;5;236:mi=0;38;5;16;48;5;203:*~=0;38;5;243:st=0:pi=0;38;5;16;48;5;81:fi=0:di=0;38;5;81:so=0;38;5;16;48;5;203:bd=0;38;5;81;48;5;236:tw=0:ln=0;38;5;203:*.m=0;38;5;48:*.o=0;38;5;243:*.z=4;38;5;203:*.a=1;38;5;203:*.r=0;38;5;48:*.c=0;38;5;48:*.d=0;38;5;48:*.t=0;38;5;48:*.h=0;38;5;48:*.p=0;38;5;48:*.cc=0;38;5;48:*.ll=0;38;5;48:*.jl=0;38;5;48:*css=0;38;5;48:*.md=0;38;5;185:*.gz=4;38;5;203:*.nb=0;38;5;48:*.mn=0;38;5;48:*.go=0;38;5;48:*.xz=4;38;5;203:*.so=1;38;5;203:*.rb=0;38;5;48:*.pm=0;38;5;48:*.bc=0;38;5;243:*.py=0;38;5;48:*.as=0;38;5;48:*.pl=0;38;5;48:*.rs=0;38;5;48:*.sh=0;38;5;48:*.7z=4;38;5;203:*.ps=0;38;5;186:*.cs=0;38;5;48:*.el=0;38;5;48:*.rm=0;38;5;208:*.hs=0;38;5;48:*.td=0;38;5;48:*.ui=0;38;5;149:*.ex=0;38;5;48:*.js=0;38;5;48:*.cp=0;38;5;48:*.cr=0;38;5;48:*.la=0;38;5;243:*.kt=0;38;5;48:*.ml=0;38;5;48:*.vb=0;38;5;48:*.gv=0;38;5;48:*.lo=0;38;5;243:*.hi=0;38;5;243:*.ts=0;38;5;48:*.ko=1;38;5;203:*.hh=0;38;5;48:*.pp=0;38;5;48:*.di=0;38;5;48:*.bz=4;38;5;203:*.fs=0;38;5;48:*.png=0;38;5;208:*.zsh=0;38;5;48:*.mpg=0;38;5;208:*.pid=0;38;5;243:*.xmp=0;38;5;149:*.iso=4;38;5;203:*.m4v=0;38;5;208:*.dot=0;38;5;48:*.ods=0;38;5;186:*.inc=0;38;5;48:*.sxw=0;38;5;186:*.aif=0;38;5;208:*.git=0;38;5;243:*.gvy=0;38;5;48:*.tbz=4;38;5;203:*.log=0;38;5;243:*.txt=0;38;5;185:*.ico=0;38;5;208:*.csx=0;38;5;48:*.vob=0;38;5;208:*.pgm=0;38;5;208:*.pps=0;38;5;186:*.ics=0;38;5;186:*.img=4;38;5;203:*.fon=0;38;5;208:*.hpp=0;38;5;48:*.bsh=0;38;5;48:*.sql=0;38;5;48:*TODO=1:*.php=0;38;5;48:*.pkg=4;38;5;203:*.ps1=0;38;5;48:*.csv=0;38;5;185:*.ilg=0;38;5;243:*.ini=0;38;5;149:*.pyc=0;38;5;243:*.psd=0;38;5;208:*.htc=0;38;5;48:*.swp=0;38;5;243:*.mli=0;38;5;48:*hgrc=0;38;5;149:*.bst=0;38;5;149:*.ipp=0;38;5;48:*.fsi=0;38;5;48:*.tcl=0;38;5;48:*.exs=0;38;5;48:*.out=0;38;5;243:*.jar=4;38;5;203:*.xls=0;38;5;186:*.ppm=0;38;5;208:*.apk=4;38;5;203:*.aux=0;38;5;243:*.rpm=4;38;5;203:*.dll=1;38;5;203:*.eps=0;38;5;208:*.exe=1;38;5;203:*.doc=0;38;5;186:*.wma=0;38;5;208:*.deb=4;38;5;203:*.pod=0;38;5;48:*.ind=0;38;5;243:*.nix=0;38;5;149:*.lua=0;38;5;48:*.epp=0;38;5;48:*.dpr=0;38;5;48:*.htm=0;38;5;185:*.ogg=0;38;5;208:*.bin=4;38;5;203:*.otf=0;38;5;208:*.yml=0;38;5;149:*.pro=0;38;5;149:*.cxx=0;38;5;48:*.tex=0;38;5;48:*.fnt=0;38;5;208:*.erl=0;38;5;48:*.sty=0;38;5;243:*.bag=4;38;5;203:*.rst=0;38;5;185:*.pdf=0;38;5;186:*.pbm=0;38;5;208:*.xcf=0;38;5;208:*.clj=0;38;5;48:*.gif=0;38;5;208:*.rar=4;38;5;203:*.elm=0;38;5;48:*.bib=0;38;5;149:*.tsx=0;38;5;48:*.dmg=4;38;5;203:*.tmp=0;38;5;243:*.bcf=0;38;5;243:*.mkv=0;38;5;208:*.svg=0;38;5;208:*.cpp=0;38;5;48:*.vim=0;38;5;48:*.bmp=0;38;5;208:*.ltx=0;38;5;48:*.fls=0;38;5;243:*.flv=0;38;5;208:*.wav=0;38;5;208:*.m4a=0;38;5;208:*.mid=0;38;5;208:*.hxx=0;38;5;48:*.pas=0;38;5;48:*.wmv=0;38;5;208:*.tif=0;38;5;208:*.kex=0;38;5;186:*.mp4=0;38;5;208:*.bak=0;38;5;243:*.xlr=0;38;5;186:*.dox=0;38;5;149:*.swf=0;38;5;208:*.tar=4;38;5;203:*.tgz=4;38;5;203:*.cfg=0;38;5;149:*.xml=0;
|
||||
38;5;185:*.jpg=0;38;5;208:*.mir=0;38;5;48:*.sxi=0;38;5;186:*.bz2=4;38;5;203:*.odt=0;38;5;186:*.mov=0;38;5;208:*.toc=0;38;5;243:*.bat=1;38;5;203:*.asa=0;38;5;48:*.awk=0;38;5;48:*.sbt=0;38;5;48:*.vcd=4;38;5;203:*.kts=0;38;5;48:*.arj=4;38;5;203:*.blg=0;38;5;243:*.c++=0;38;5;48:*.odp=0;38;5;186:*.bbl=0;38;5;243:*.idx=0;38;5;243:*.com=1;38;5;203:*.mp3=0;38;5;208:*.avi=0;38;5;208:*.def=0;38;5;48:*.cgi=0;38;5;48:*.zip=4;38;5;203:*.ttf=0;38;5;208:*.ppt=0;38;5;186:*.tml=0;38;5;149:*.fsx=0;38;5;48:*.h++=0;38;5;48:*.rtf=0;38;5;186:*.inl=0;38;5;48:*.yaml=0;38;5;149:*.html=0;38;5;185:*.mpeg=0;38;5;208:*.java=0;38;5;48:*.hgrc=0;38;5;149:*.orig=0;38;5;243:*.conf=0;38;5;149:*.dart=0;38;5;48:*.psm1=0;38;5;48:*.rlib=0;38;5;243:*.fish=0;38;5;48:*.bash=0;38;5;48:*.make=0;38;5;149:*.docx=0;38;5;186:*.json=0;38;5;149:*.psd1=0;38;5;48:*.lisp=0;38;5;48:*.tbz2=4;38;5;203:*.diff=0;38;5;48:*.epub=0;38;5;186:*.xlsx=0;38;5;186:*.pptx=0;38;5;186:*.toml=0;38;5;149:*.h264=0;38;5;208:*.purs=0;38;5;48:*.flac=0;38;5;208:*.tiff=0;38;5;208:*.jpeg=0;38;5;208:*.lock=0;38;5;243:*.less=0;38;5;48:*.dyn_o=0;38;5;243:*.scala=0;38;5;48:*.mdown=0;38;5;185:*.shtml=0;38;5;185:*.class=0;38;5;243:*.cache=0;38;5;243:*.cmake=0;38;5;149:*passwd=0;38;5;149:*.swift=0;38;5;48:*shadow=0;38;5;149:*.xhtml=0;38;5;185:*.patch=0;38;5;48:*.cabal=0;38;5;48:*README=0;38;5;16;48;5;186:*.toast=4;38;5;203:*.ipynb=0;38;5;48:*COPYING=0;38;5;249:*.gradle=0;38;5;48:*.matlab=0;38;5;48:*.config=0;38;5;149:*LICENSE=0;38;5;249:*.dyn_hi=0;38;5;243:*.flake8=0;38;5;149:*.groovy=0;38;5;48:*INSTALL=0;38;5;16;48;5;186:*TODO.md=1:*.ignore=0;38;5;149:*Doxyfile=0;38;5;149:*TODO.txt=1:*setup.py=0;38;5;149:*Makefile=0;38;5;149:*.gemspec=0;38;5;149:*.desktop=0;38;5;149:*.rgignore=0;38;5;149:*.markdown=0;38;5;185:*COPYRIGHT=0;38;5;249:*configure=0;38;5;149:*.DS_Store=0;38;5;243:*.kdevelop=0;38;5;149:*.fdignore=0;38;5;149:*README.md=0;38;5;16;48;5;186:*.cmake.in=0;38;5;149:*SConscript=0;38;5;149:*CODEOWNERS=0;38;5;149:*.localized=0;38;5;243:*.gitignore=0;38;5;149:*Dockerfile=0;38;5;149:*.gitconfig=0;38;5;149:*INSTALL.md=0;38;5;16;48;5;186:*README.txt=0;38;5;16;48;5;186:*SConstruct=0;38;5;149:*.scons_opt=0;38;5;243:*.travis.yml=0;38;5;186:*.gitmodules=0;38;5;149:*.synctex.gz=0;38;5;243:*LICENSE-MIT=0;38;5;249:*MANIFEST.in=0;38;5;149:*Makefile.in=0;38;5;243:*Makefile.am=0;38;5;149:*INSTALL.txt=0;38;5;16;48;5;186:*configure.ac=0;38;5;149:*.applescript=0;38;5;48:*appveyor.yml=0;38;5;186:*.fdb_latexmk=0;38;5;243:*CONTRIBUTORS=0;38;5;16;48;5;186:*.clang-format=0;38;5;149:*LICENSE-APACHE=0;38;5;249:*CMakeLists.txt=0;38;5;149:*CMakeCache.txt=0;38;5;243:*.gitattributes=0;38;5;149:*CONTRIBUTORS.md=0;38;5;16;48;5;186:*.sconsign.dblite=0;38;5;243:*requirements.txt=0;38;5;149:*CONTRIBUTORS.txt=0;38;5;16;48;5;186:*package-lock.json=0;38;5;243:*.CFUserTextEncoding=0;38;5;243
|
||||
";
|
||||
|
||||
// Set the current working directory of the process
|
||||
if let Some(base_directory) = matches.value_of_os("base-directory") {
|
||||
let base_directory = Path::new(base_directory);
|
||||
if !filesystem::is_dir(base_directory) {
|
||||
fn main() {
|
||||
let result = run();
|
||||
match result {
|
||||
Ok(exit_code) => {
|
||||
exit_code.exit();
|
||||
}
|
||||
Err(err) => {
|
||||
eprintln!("[fd error]: {:#}", err);
|
||||
ExitCode::GeneralError.exit();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn run() -> Result<ExitCode> {
|
||||
let opts = Opts::parse();
|
||||
|
||||
#[cfg(feature = "completions")]
|
||||
if let Some(shell) = opts.gen_completions()? {
|
||||
return print_completions(shell);
|
||||
}
|
||||
|
||||
set_working_dir(&opts)?;
|
||||
let search_paths = opts.search_paths()?;
|
||||
if search_paths.is_empty() {
|
||||
bail!("No valid search paths given.");
|
||||
}
|
||||
|
||||
ensure_search_pattern_is_not_a_path(&opts)?;
|
||||
let pattern = &opts.pattern;
|
||||
let exprs = &opts.exprs;
|
||||
let empty = Vec::new();
|
||||
|
||||
let pattern_regexps = exprs
|
||||
.as_ref()
|
||||
.unwrap_or(&empty)
|
||||
.iter()
|
||||
.chain([pattern])
|
||||
.map(|pat| build_pattern_regex(pat, &opts))
|
||||
.collect::<Result<Vec<String>>>()?;
|
||||
|
||||
let config = construct_config(opts, &pattern_regexps)?;
|
||||
|
||||
ensure_use_hidden_option_for_leading_dot_pattern(&config, &pattern_regexps)?;
|
||||
|
||||
let regexps = pattern_regexps
|
||||
.into_iter()
|
||||
.map(|pat| build_regex(pat, &config))
|
||||
.collect::<Result<Vec<Regex>>>()?;
|
||||
|
||||
walk::scan(&search_paths, regexps, config)
|
||||
}
|
||||
|
||||
#[cfg(feature = "completions")]
|
||||
#[cold]
|
||||
fn print_completions(shell: clap_complete::Shell) -> Result<ExitCode> {
|
||||
// The program name is the first argument.
|
||||
let first_arg = env::args().next();
|
||||
let program_name = first_arg
|
||||
.as_ref()
|
||||
.map(Path::new)
|
||||
.and_then(|path| path.file_stem())
|
||||
.and_then(|file| file.to_str())
|
||||
.unwrap_or("fd");
|
||||
let mut cmd = Opts::command();
|
||||
cmd.build();
|
||||
clap_complete::generate(shell, &mut cmd, program_name, &mut std::io::stdout());
|
||||
Ok(ExitCode::Success)
|
||||
}
|
||||
|
||||
fn set_working_dir(opts: &Opts) -> Result<()> {
|
||||
if let Some(ref base_directory) = opts.base_directory {
|
||||
if !filesystem::is_existing_directory(base_directory) {
|
||||
return Err(anyhow!(
|
||||
"The '--base-directory' path '{}' is not a directory.",
|
||||
base_directory.to_string_lossy()
|
||||
|
@ -52,301 +134,148 @@ fn run() -> Result<ExitCode> {
|
|||
}
|
||||
env::set_current_dir(base_directory).with_context(|| {
|
||||
format!(
|
||||
"Could not set '{}' as the current working directory.",
|
||||
"Could not set '{}' as the current working directory",
|
||||
base_directory.to_string_lossy()
|
||||
)
|
||||
})?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
let current_directory = Path::new(".");
|
||||
if !filesystem::is_dir(current_directory) {
|
||||
return Err(anyhow!(
|
||||
"Could not retrieve current directory (has it been deleted?)."
|
||||
));
|
||||
}
|
||||
|
||||
// Get the search pattern
|
||||
let pattern = matches
|
||||
.value_of_os("pattern")
|
||||
.map(|p| {
|
||||
p.to_str()
|
||||
.ok_or_else(|| anyhow!("The search pattern includes invalid UTF-8 sequences."))
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or("");
|
||||
|
||||
// Get one or more root directories to search.
|
||||
let passed_arguments = matches
|
||||
.values_of_os("path")
|
||||
.or_else(|| matches.values_of_os("search-path"));
|
||||
|
||||
let mut search_paths = if let Some(paths) = passed_arguments {
|
||||
let mut directories = vec![];
|
||||
for path in paths {
|
||||
let path_buffer = PathBuf::from(path);
|
||||
if filesystem::is_dir(&path_buffer) {
|
||||
directories.push(path_buffer);
|
||||
} else {
|
||||
print_error(format!(
|
||||
"Search path '{}' is not a directory.",
|
||||
path_buffer.to_string_lossy()
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
directories
|
||||
} else {
|
||||
vec![current_directory.to_path_buf()]
|
||||
};
|
||||
|
||||
// Check if we have no valid search paths.
|
||||
if search_paths.is_empty() {
|
||||
return Err(anyhow!("No valid search paths given."));
|
||||
}
|
||||
|
||||
if matches.is_present("absolute-path") {
|
||||
search_paths = search_paths
|
||||
.iter()
|
||||
.map(|path_buffer| {
|
||||
path_buffer
|
||||
.canonicalize()
|
||||
.and_then(|pb| filesystem::absolute_path(pb.as_path()))
|
||||
.unwrap()
|
||||
})
|
||||
.collect();
|
||||
}
|
||||
|
||||
// Detect if the user accidentally supplied a path instead of a search pattern
|
||||
if !matches.is_present("full-path")
|
||||
&& pattern.contains(std::path::MAIN_SEPARATOR)
|
||||
&& filesystem::is_dir(Path::new(pattern))
|
||||
/// Detect if the user accidentally supplied a path instead of a search pattern
|
||||
fn ensure_search_pattern_is_not_a_path(opts: &Opts) -> Result<()> {
|
||||
if !opts.full_path
|
||||
&& opts.pattern.contains(std::path::MAIN_SEPARATOR)
|
||||
&& Path::new(&opts.pattern).is_dir()
|
||||
{
|
||||
return Err(anyhow!(
|
||||
Err(anyhow!(
|
||||
"The search pattern '{pattern}' contains a path-separation character ('{sep}') \
|
||||
and will not lead to any search results.\n\n\
|
||||
If you want to search for all files inside the '{pattern}' directory, use a match-all pattern:\n\n \
|
||||
fd . '{pattern}'\n\n\
|
||||
Instead, if you want your pattern to match the full file path, use:\n\n \
|
||||
fd --full-path '{pattern}'",
|
||||
pattern = pattern,
|
||||
pattern = &opts.pattern,
|
||||
sep = std::path::MAIN_SEPARATOR,
|
||||
));
|
||||
))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
let pattern_regex = if matches.is_present("glob") && !pattern.is_empty() {
|
||||
fn build_pattern_regex(pattern: &str, opts: &Opts) -> Result<String> {
|
||||
Ok(if opts.glob && !pattern.is_empty() {
|
||||
let glob = GlobBuilder::new(pattern).literal_separator(true).build()?;
|
||||
glob.regex().to_owned()
|
||||
} else if matches.is_present("fixed-strings") {
|
||||
} else if opts.fixed_strings {
|
||||
// Treat pattern as literal string if '--fixed-strings' is used
|
||||
regex::escape(pattern)
|
||||
} else {
|
||||
String::from(pattern)
|
||||
};
|
||||
})
|
||||
}
|
||||
|
||||
fn check_path_separator_length(path_separator: Option<&str>) -> Result<()> {
|
||||
match (cfg!(windows), path_separator) {
|
||||
(true, Some(sep)) if sep.len() > 1 => Err(anyhow!(
|
||||
"A path separator must be exactly one byte, but \
|
||||
the given separator is {} bytes: '{}'.\n\
|
||||
In some shells on Windows, '/' is automatically \
|
||||
expanded. Try to use '//' instead.",
|
||||
sep.len(),
|
||||
sep
|
||||
)),
|
||||
_ => Ok(()),
|
||||
}
|
||||
}
|
||||
|
||||
fn construct_config(mut opts: Opts, pattern_regexps: &[String]) -> Result<Config> {
|
||||
// The search will be case-sensitive if the command line flag is set or
|
||||
// if the pattern has an uppercase character (smart case).
|
||||
let case_sensitive = !matches.is_present("ignore-case")
|
||||
&& (matches.is_present("case-sensitive") || pattern_has_uppercase_char(&pattern_regex));
|
||||
// if any of the patterns has an uppercase character (smart case).
|
||||
let case_sensitive = !opts.ignore_case
|
||||
&& (opts.case_sensitive
|
||||
|| pattern_regexps
|
||||
.iter()
|
||||
.any(|pat| pattern_has_uppercase_char(pat)));
|
||||
|
||||
let interactive_terminal = atty::is(Stream::Stdout);
|
||||
let colored_output = match matches.value_of("color") {
|
||||
Some("always") => true,
|
||||
Some("never") => false,
|
||||
_ => env::var_os("NO_COLOR").is_none() && interactive_terminal,
|
||||
};
|
||||
let path_separator = opts
|
||||
.path_separator
|
||||
.take()
|
||||
.or_else(filesystem::default_path_separator);
|
||||
let actual_path_separator = path_separator
|
||||
.clone()
|
||||
.unwrap_or_else(|| std::path::MAIN_SEPARATOR.to_string());
|
||||
check_path_separator_length(path_separator.as_deref())?;
|
||||
|
||||
let path_separator = matches.value_of("path-separator").map(|str| str.to_owned());
|
||||
let size_limits = std::mem::take(&mut opts.size);
|
||||
let time_constraints = extract_time_constraints(&opts)?;
|
||||
#[cfg(unix)]
|
||||
let owner_constraint: Option<OwnerFilter> = opts.owner.and_then(OwnerFilter::filter_ignore);
|
||||
|
||||
#[cfg(windows)]
|
||||
let colored_output = colored_output && ansi_term::enable_ansi_support().is_ok();
|
||||
let ansi_colors_support =
|
||||
nu_ansi_term::enable_ansi_support().is_ok() || std::env::var_os("TERM").is_some();
|
||||
#[cfg(not(windows))]
|
||||
let ansi_colors_support = true;
|
||||
|
||||
let interactive_terminal = std::io::stdout().is_terminal();
|
||||
|
||||
let colored_output = match opts.color {
|
||||
ColorWhen::Always => true,
|
||||
ColorWhen::Never => false,
|
||||
ColorWhen::Auto => {
|
||||
let no_color = env::var_os("NO_COLOR").is_some_and(|x| !x.is_empty());
|
||||
ansi_colors_support && !no_color && interactive_terminal
|
||||
}
|
||||
};
|
||||
|
||||
let ls_colors = if colored_output {
|
||||
Some(LsColors::from_env().unwrap_or_default())
|
||||
Some(LsColors::from_env().unwrap_or_else(|| LsColors::from_string(DEFAULT_LS_COLORS)))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let command = extract_command(&mut opts, colored_output)?;
|
||||
let has_command = command.is_some();
|
||||
|
||||
let command = if let Some(args) = matches.values_of("exec") {
|
||||
Some(CommandTemplate::new(args))
|
||||
} else if let Some(args) = matches.values_of("exec-batch") {
|
||||
Some(CommandTemplate::new_batch(args)?)
|
||||
} else if matches.is_present("list-details") {
|
||||
let color = matches.value_of("color").unwrap_or("auto");
|
||||
let color_arg = ["--color=", color].concat();
|
||||
|
||||
#[allow(unused)]
|
||||
let gnu_ls = |command_name| {
|
||||
vec![
|
||||
command_name,
|
||||
"-l", // long listing format
|
||||
"--human-readable", // human readable file sizes
|
||||
"--directory", // list directories themselves, not their contents
|
||||
&color_arg,
|
||||
]
|
||||
};
|
||||
|
||||
let cmd: Vec<&str> = if cfg!(unix) {
|
||||
if !cfg!(any(
|
||||
target_os = "macos",
|
||||
target_os = "dragonfly",
|
||||
target_os = "freebsd",
|
||||
target_os = "netbsd",
|
||||
target_os = "openbsd"
|
||||
)) {
|
||||
// Assume ls is GNU ls
|
||||
gnu_ls("ls")
|
||||
} else {
|
||||
// MacOS, DragonFlyBSD, FreeBSD
|
||||
use std::process::{Command, Stdio};
|
||||
|
||||
// Use GNU ls, if available (support for --color=auto, better LS_COLORS support)
|
||||
let gnu_ls_exists = Command::new("gls")
|
||||
.arg("--version")
|
||||
.stdout(Stdio::null())
|
||||
.stderr(Stdio::null())
|
||||
.status()
|
||||
.is_ok();
|
||||
|
||||
if gnu_ls_exists {
|
||||
gnu_ls("gls")
|
||||
} else {
|
||||
let mut cmd = vec![
|
||||
"ls", // BSD version of ls
|
||||
"-l", // long listing format
|
||||
"-h", // '--human-readable' is not available, '-h' is
|
||||
"-d", // '--directory' is not available, but '-d' is
|
||||
];
|
||||
|
||||
if !cfg!(any(target_os = "netbsd", target_os = "openbsd")) && colored_output {
|
||||
// -G is not available in NetBSD's and OpenBSD's ls
|
||||
cmd.push("-G");
|
||||
}
|
||||
|
||||
cmd
|
||||
}
|
||||
}
|
||||
} else if cfg!(windows) {
|
||||
use std::process::{Command, Stdio};
|
||||
|
||||
// Use GNU ls, if available
|
||||
let gnu_ls_exists = Command::new("ls")
|
||||
.arg("--version")
|
||||
.stdout(Stdio::null())
|
||||
.stderr(Stdio::null())
|
||||
.status()
|
||||
.is_ok();
|
||||
|
||||
if gnu_ls_exists {
|
||||
gnu_ls("ls")
|
||||
} else {
|
||||
return Err(anyhow!(
|
||||
"'fd --list-details' is not supported on Windows unless GNU 'ls' is installed."
|
||||
));
|
||||
}
|
||||
} else {
|
||||
return Err(anyhow!(
|
||||
"'fd --list-details' is not supported on this platform."
|
||||
));
|
||||
};
|
||||
|
||||
Some(CommandTemplate::new_batch(&cmd).unwrap())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let size_limits = if let Some(vs) = matches.values_of("size") {
|
||||
vs.map(|sf| {
|
||||
SizeFilter::from_string(sf)
|
||||
.ok_or_else(|| anyhow!("'{}' is not a valid size constraint. See 'fd --help'.", sf))
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()?
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
let now = time::SystemTime::now();
|
||||
let mut time_constraints: Vec<TimeFilter> = Vec::new();
|
||||
if let Some(t) = matches.value_of("changed-within") {
|
||||
if let Some(f) = TimeFilter::after(&now, t) {
|
||||
time_constraints.push(f);
|
||||
} else {
|
||||
return Err(anyhow!(
|
||||
"'{}' is not a valid date or duration. See 'fd --help'.",
|
||||
t
|
||||
));
|
||||
}
|
||||
}
|
||||
if let Some(t) = matches.value_of("changed-before") {
|
||||
if let Some(f) = TimeFilter::before(&now, t) {
|
||||
time_constraints.push(f);
|
||||
} else {
|
||||
return Err(anyhow!(
|
||||
"'{}' is not a valid date or duration. See 'fd --help'.",
|
||||
t
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
let owner_constraint = if let Some(s) = matches.value_of("owner") {
|
||||
OwnerFilter::from_string(s)?
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let config = Options {
|
||||
Ok(Config {
|
||||
case_sensitive,
|
||||
search_full_path: matches.is_present("full-path"),
|
||||
ignore_hidden: !(matches.is_present("hidden")
|
||||
|| matches.occurrences_of("rg-alias-hidden-ignore") >= 2),
|
||||
read_fdignore: !(matches.is_present("no-ignore")
|
||||
|| matches.is_present("rg-alias-hidden-ignore")),
|
||||
read_vcsignore: !(matches.is_present("no-ignore")
|
||||
|| matches.is_present("rg-alias-hidden-ignore")
|
||||
|| matches.is_present("no-ignore-vcs")),
|
||||
read_global_ignore: !(matches.is_present("no-ignore")
|
||||
|| matches.is_present("rg-alias-hidden-ignore")
|
||||
|| matches.is_present("no-global-ignore-file")),
|
||||
follow_links: matches.is_present("follow"),
|
||||
one_file_system: matches.is_present("one-file-system"),
|
||||
null_separator: matches.is_present("null_separator"),
|
||||
max_depth: matches
|
||||
.value_of("max-depth")
|
||||
.or_else(|| matches.value_of("rg-depth"))
|
||||
.or_else(|| matches.value_of("exact-depth"))
|
||||
.and_then(|n| usize::from_str_radix(n, 10).ok()),
|
||||
min_depth: matches
|
||||
.value_of("min-depth")
|
||||
.or_else(|| matches.value_of("exact-depth"))
|
||||
.and_then(|n| usize::from_str_radix(n, 10).ok()),
|
||||
threads: std::cmp::max(
|
||||
matches
|
||||
.value_of("threads")
|
||||
.and_then(|n| usize::from_str_radix(n, 10).ok())
|
||||
.unwrap_or_else(num_cpus::get),
|
||||
1,
|
||||
),
|
||||
max_buffer_time: matches
|
||||
.value_of("max-buffer-time")
|
||||
.and_then(|n| u64::from_str_radix(n, 10).ok())
|
||||
.map(time::Duration::from_millis),
|
||||
search_full_path: opts.full_path,
|
||||
ignore_hidden: !(opts.hidden || opts.rg_alias_ignore()),
|
||||
read_fdignore: !(opts.no_ignore || opts.rg_alias_ignore()),
|
||||
read_vcsignore: !(opts.no_ignore || opts.rg_alias_ignore() || opts.no_ignore_vcs),
|
||||
require_git_to_read_vcsignore: !opts.no_require_git,
|
||||
read_parent_ignore: !opts.no_ignore_parent,
|
||||
read_global_ignore: !(opts.no_ignore
|
||||
|| opts.rg_alias_ignore()
|
||||
|| opts.no_global_ignore_file),
|
||||
follow_links: opts.follow,
|
||||
one_file_system: opts.one_file_system,
|
||||
null_separator: opts.null_separator,
|
||||
quiet: opts.quiet,
|
||||
max_depth: opts.max_depth(),
|
||||
min_depth: opts.min_depth(),
|
||||
prune: opts.prune,
|
||||
threads: opts.threads().get(),
|
||||
max_buffer_time: opts.max_buffer_time,
|
||||
ls_colors,
|
||||
interactive_terminal,
|
||||
file_types: matches.values_of("file-type").map(|values| {
|
||||
file_types: opts.filetype.as_ref().map(|values| {
|
||||
use crate::cli::FileType::*;
|
||||
let mut file_types = FileTypes::default();
|
||||
for value in values {
|
||||
match value {
|
||||
"f" | "file" => file_types.files = true,
|
||||
"d" | "directory" => file_types.directories = true,
|
||||
"l" | "symlink" => file_types.symlinks = true,
|
||||
"x" | "executable" => {
|
||||
File => file_types.files = true,
|
||||
Directory => file_types.directories = true,
|
||||
Symlink => file_types.symlinks = true,
|
||||
Executable => {
|
||||
file_types.executables_only = true;
|
||||
file_types.files = true;
|
||||
}
|
||||
"e" | "empty" => file_types.empty_only = true,
|
||||
"s" | "socket" => file_types.sockets = true,
|
||||
"p" | "pipe" => file_types.pipes = true,
|
||||
_ => unreachable!(),
|
||||
Empty => file_types.empty_only = true,
|
||||
BlockDevice => file_types.block_devices = true,
|
||||
CharDevice => file_types.char_devices = true,
|
||||
Socket => file_types.sockets = true,
|
||||
Pipe => file_types.pipes = true,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -358,10 +287,12 @@ fn run() -> Result<ExitCode> {
|
|||
|
||||
file_types
|
||||
}),
|
||||
extensions: matches
|
||||
.values_of("extension")
|
||||
extensions: opts
|
||||
.extensions
|
||||
.as_ref()
|
||||
.map(|exts| {
|
||||
let patterns = exts
|
||||
.iter()
|
||||
.map(|e| e.trim_start_matches('.'))
|
||||
.map(|e| format!(r".\.{}$", regex::escape(e)));
|
||||
RegexSetBuilder::new(patterns)
|
||||
|
@ -369,35 +300,175 @@ fn run() -> Result<ExitCode> {
|
|||
.build()
|
||||
})
|
||||
.transpose()?,
|
||||
format: opts
|
||||
.format
|
||||
.as_deref()
|
||||
.map(crate::fmt::FormatTemplate::parse),
|
||||
command: command.map(Arc::new),
|
||||
exclude_patterns: matches
|
||||
.values_of("exclude")
|
||||
.map(|v| v.map(|p| String::from("!") + p).collect())
|
||||
.unwrap_or_else(|| vec![]),
|
||||
ignore_files: matches
|
||||
.values_of("ignore-file")
|
||||
.map(|vs| vs.map(PathBuf::from).collect())
|
||||
.unwrap_or_else(|| vec![]),
|
||||
batch_size: opts.batch_size,
|
||||
exclude_patterns: opts.exclude.iter().map(|p| String::from("!") + p).collect(),
|
||||
ignore_files: std::mem::take(&mut opts.ignore_file),
|
||||
size_constraints: size_limits,
|
||||
time_constraints,
|
||||
#[cfg(unix)]
|
||||
owner_constraint,
|
||||
show_filesystem_errors: matches.is_present("show-errors"),
|
||||
show_filesystem_errors: opts.show_errors,
|
||||
path_separator,
|
||||
max_results: matches
|
||||
.value_of("max-results")
|
||||
.and_then(|n| usize::from_str_radix(n, 10).ok())
|
||||
.filter(|&n| n != 0)
|
||||
.or_else(|| {
|
||||
if matches.is_present("max-one-result") {
|
||||
Some(1)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}),
|
||||
};
|
||||
actual_path_separator,
|
||||
max_results: opts.max_results(),
|
||||
strip_cwd_prefix: opts.strip_cwd_prefix(|| !(opts.null_separator || has_command)),
|
||||
})
|
||||
}
|
||||
|
||||
let re = RegexBuilder::new(&pattern_regex)
|
||||
fn extract_command(opts: &mut Opts, colored_output: bool) -> Result<Option<CommandSet>> {
|
||||
opts.exec
|
||||
.command
|
||||
.take()
|
||||
.map(Ok)
|
||||
.or_else(|| {
|
||||
if !opts.list_details {
|
||||
return None;
|
||||
}
|
||||
|
||||
let res = determine_ls_command(colored_output)
|
||||
.map(|cmd| CommandSet::new_batch([cmd]).unwrap());
|
||||
Some(res)
|
||||
})
|
||||
.transpose()
|
||||
}
|
||||
|
||||
fn determine_ls_command(colored_output: bool) -> Result<Vec<&'static str>> {
|
||||
#[allow(unused)]
|
||||
let gnu_ls = |command_name| {
|
||||
let color_arg = if colored_output {
|
||||
"--color=always"
|
||||
} else {
|
||||
"--color=never"
|
||||
};
|
||||
// Note: we use short options here (instead of --long-options) to support more
|
||||
// platforms (like BusyBox).
|
||||
vec![
|
||||
command_name,
|
||||
"-l", // long listing format
|
||||
"-h", // human readable file sizes
|
||||
"-d", // list directories themselves, not their contents
|
||||
color_arg,
|
||||
]
|
||||
};
|
||||
let cmd: Vec<&str> = if cfg!(unix) {
|
||||
if !cfg!(any(
|
||||
target_os = "macos",
|
||||
target_os = "dragonfly",
|
||||
target_os = "freebsd",
|
||||
target_os = "netbsd",
|
||||
target_os = "openbsd"
|
||||
)) {
|
||||
// Assume ls is GNU ls
|
||||
gnu_ls("ls")
|
||||
} else {
|
||||
// MacOS, DragonFlyBSD, FreeBSD
|
||||
use std::process::{Command, Stdio};
|
||||
|
||||
// Use GNU ls, if available (support for --color=auto, better LS_COLORS support)
|
||||
let gnu_ls_exists = Command::new("gls")
|
||||
.arg("--version")
|
||||
.stdout(Stdio::null())
|
||||
.stderr(Stdio::null())
|
||||
.status()
|
||||
.is_ok();
|
||||
|
||||
if gnu_ls_exists {
|
||||
gnu_ls("gls")
|
||||
} else {
|
||||
let mut cmd = vec![
|
||||
"ls", // BSD version of ls
|
||||
"-l", // long listing format
|
||||
"-h", // '--human-readable' is not available, '-h' is
|
||||
"-d", // '--directory' is not available, but '-d' is
|
||||
];
|
||||
|
||||
if !cfg!(any(target_os = "netbsd", target_os = "openbsd")) && colored_output {
|
||||
// -G is not available in NetBSD's and OpenBSD's ls
|
||||
cmd.push("-G");
|
||||
}
|
||||
|
||||
cmd
|
||||
}
|
||||
}
|
||||
} else if cfg!(windows) {
|
||||
use std::process::{Command, Stdio};
|
||||
|
||||
// Use GNU ls, if available
|
||||
let gnu_ls_exists = Command::new("ls")
|
||||
.arg("--version")
|
||||
.stdout(Stdio::null())
|
||||
.stderr(Stdio::null())
|
||||
.status()
|
||||
.is_ok();
|
||||
|
||||
if gnu_ls_exists {
|
||||
gnu_ls("ls")
|
||||
} else {
|
||||
return Err(anyhow!(
|
||||
"'fd --list-details' is not supported on Windows unless GNU 'ls' is installed."
|
||||
));
|
||||
}
|
||||
} else {
|
||||
return Err(anyhow!(
|
||||
"'fd --list-details' is not supported on this platform."
|
||||
));
|
||||
};
|
||||
Ok(cmd)
|
||||
}
|
||||
|
||||
fn extract_time_constraints(opts: &Opts) -> Result<Vec<TimeFilter>> {
|
||||
let now = time::SystemTime::now();
|
||||
let mut time_constraints: Vec<TimeFilter> = Vec::new();
|
||||
if let Some(ref t) = opts.changed_within {
|
||||
if let Some(f) = TimeFilter::after(&now, t) {
|
||||
time_constraints.push(f);
|
||||
} else {
|
||||
return Err(anyhow!(
|
||||
"'{}' is not a valid date or duration. See 'fd --help'.",
|
||||
t
|
||||
));
|
||||
}
|
||||
}
|
||||
if let Some(ref t) = opts.changed_before {
|
||||
if let Some(f) = TimeFilter::before(&now, t) {
|
||||
time_constraints.push(f);
|
||||
} else {
|
||||
return Err(anyhow!(
|
||||
"'{}' is not a valid date or duration. See 'fd --help'.",
|
||||
t
|
||||
));
|
||||
}
|
||||
}
|
||||
Ok(time_constraints)
|
||||
}
|
||||
|
||||
fn ensure_use_hidden_option_for_leading_dot_pattern(
|
||||
config: &Config,
|
||||
pattern_regexps: &[String],
|
||||
) -> Result<()> {
|
||||
if cfg!(unix)
|
||||
&& config.ignore_hidden
|
||||
&& pattern_regexps
|
||||
.iter()
|
||||
.any(|pat| pattern_matches_strings_with_leading_dot(pat))
|
||||
{
|
||||
Err(anyhow!(
|
||||
"The pattern(s) seems to only match files with a leading dot, but hidden files are \
|
||||
filtered by default. Consider adding -H/--hidden to search hidden files as well \
|
||||
or adjust your search pattern(s)."
|
||||
))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn build_regex(pattern_regex: String, config: &Config) -> Result<regex::bytes::Regex> {
|
||||
RegexBuilder::new(&pattern_regex)
|
||||
.case_insensitive(!config.case_sensitive)
|
||||
.dot_matches_new_line(true)
|
||||
.build()
|
||||
|
@ -408,20 +479,5 @@ fn run() -> Result<ExitCode> {
|
|||
also use the '--glob' option to match on a glob pattern.",
|
||||
e.to_string()
|
||||
)
|
||||
})?;
|
||||
|
||||
walk::scan(&search_paths, Arc::new(re), Arc::new(config))
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let result = run();
|
||||
match result {
|
||||
Ok(exit_code) => {
|
||||
process::exit(exit_code.into());
|
||||
}
|
||||
Err(err) => {
|
||||
eprintln!("[fd error]: {}", err);
|
||||
process::exit(ExitCode::GeneralError.into());
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
216
src/output.rs
216
src/output.rs
|
@ -1,120 +1,180 @@
|
|||
use std::io::{self, StdoutLock, Write};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::sync::Arc;
|
||||
use std::borrow::Cow;
|
||||
use std::io::{self, Write};
|
||||
|
||||
use ansi_term;
|
||||
use lscolors::{LsColors, Style};
|
||||
use lscolors::{Indicator, LsColors, Style};
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::dir_entry::DirEntry;
|
||||
use crate::error::print_error;
|
||||
use crate::exit_codes::ExitCode;
|
||||
use crate::filesystem::strip_current_dir;
|
||||
use crate::options::Options;
|
||||
use crate::fmt::FormatTemplate;
|
||||
|
||||
pub fn replace_path_separator(path: &str, new_path_separator: &str) -> String {
|
||||
path.replace(std::path::MAIN_SEPARATOR, &new_path_separator)
|
||||
fn replace_path_separator(path: &str, new_path_separator: &str) -> String {
|
||||
path.replace(std::path::MAIN_SEPARATOR, new_path_separator)
|
||||
}
|
||||
|
||||
// TODO: this function is performance critical and can probably be optimized
|
||||
pub fn print_entry(
|
||||
stdout: &mut StdoutLock,
|
||||
entry: &PathBuf,
|
||||
config: &Options,
|
||||
wants_to_quit: &Arc<AtomicBool>,
|
||||
) {
|
||||
let path = if entry.is_absolute() {
|
||||
entry.as_path()
|
||||
pub fn print_entry<W: Write>(stdout: &mut W, entry: &DirEntry, config: &Config) {
|
||||
// TODO: use format if supplied
|
||||
let r = if let Some(ref format) = config.format {
|
||||
print_entry_format(stdout, entry, config, format)
|
||||
} else if let Some(ref ls_colors) = config.ls_colors {
|
||||
print_entry_colorized(stdout, entry, config, ls_colors)
|
||||
} else {
|
||||
strip_current_dir(entry)
|
||||
print_entry_uncolorized(stdout, entry, config)
|
||||
};
|
||||
|
||||
let r = if let Some(ref ls_colors) = config.ls_colors {
|
||||
print_entry_colorized(stdout, path, config, ls_colors, &wants_to_quit)
|
||||
} else {
|
||||
print_entry_uncolorized(stdout, path, config)
|
||||
};
|
||||
|
||||
if r.is_err() {
|
||||
// Probably a broken pipe. Exit gracefully.
|
||||
process::exit(ExitCode::GeneralError.into());
|
||||
if let Err(e) = r {
|
||||
if e.kind() == ::std::io::ErrorKind::BrokenPipe {
|
||||
// Exit gracefully in case of a broken pipe (e.g. 'fd ... | head -n 3').
|
||||
ExitCode::Success.exit();
|
||||
} else {
|
||||
print_error(format!("Could not write to output: {}", e));
|
||||
ExitCode::GeneralError.exit();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: this function is performance critical and can probably be optimized
|
||||
fn print_entry_colorized(
|
||||
stdout: &mut StdoutLock,
|
||||
path: &Path,
|
||||
config: &Options,
|
||||
ls_colors: &LsColors,
|
||||
wants_to_quit: &Arc<AtomicBool>,
|
||||
// Display a trailing slash if the path is a directory and the config option is enabled.
|
||||
// If the path_separator option is set, display that instead.
|
||||
// The trailing slash will not be colored.
|
||||
#[inline]
|
||||
fn print_trailing_slash<W: Write>(
|
||||
stdout: &mut W,
|
||||
entry: &DirEntry,
|
||||
config: &Config,
|
||||
style: Option<&Style>,
|
||||
) -> io::Result<()> {
|
||||
let default_style = ansi_term::Style::default();
|
||||
|
||||
// Traverse the path and colorize each component
|
||||
for (component, style) in ls_colors.style_for_path_components(path) {
|
||||
let style = style
|
||||
.map(Style::to_ansi_term_style)
|
||||
.unwrap_or(default_style);
|
||||
|
||||
let mut path_string = component.to_string_lossy();
|
||||
if let Some(ref separator) = config.path_separator {
|
||||
*path_string.to_mut() = replace_path_separator(&path_string, &separator);
|
||||
}
|
||||
write!(stdout, "{}", style.paint(path_string))?;
|
||||
|
||||
// TODO: can we move this out of the if-statement? Why do we call it that often?
|
||||
if wants_to_quit.load(Ordering::Relaxed) {
|
||||
writeln!(stdout)?;
|
||||
process::exit(ExitCode::KilledBySigint.into());
|
||||
}
|
||||
}
|
||||
|
||||
if config.null_separator {
|
||||
write!(stdout, "\0")
|
||||
} else {
|
||||
writeln!(stdout)
|
||||
if entry.file_type().map_or(false, |ft| ft.is_dir()) {
|
||||
write!(
|
||||
stdout,
|
||||
"{}",
|
||||
style
|
||||
.map(Style::to_nu_ansi_term_style)
|
||||
.unwrap_or_default()
|
||||
.paint(&config.actual_path_separator)
|
||||
)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// TODO: this function is performance critical and can probably be optimized
|
||||
fn print_entry_uncolorized_base(
|
||||
stdout: &mut StdoutLock,
|
||||
path: &Path,
|
||||
config: &Options,
|
||||
fn print_entry_format<W: Write>(
|
||||
stdout: &mut W,
|
||||
entry: &DirEntry,
|
||||
config: &Config,
|
||||
format: &FormatTemplate,
|
||||
) -> io::Result<()> {
|
||||
let separator = if config.null_separator { "\0" } else { "\n" };
|
||||
let output = format.generate(
|
||||
entry.stripped_path(config),
|
||||
config.path_separator.as_deref(),
|
||||
);
|
||||
// TODO: support writing raw bytes on unix?
|
||||
write!(stdout, "{}{}", output.to_string_lossy(), separator)
|
||||
}
|
||||
|
||||
// TODO: this function is performance critical and can probably be optimized
|
||||
fn print_entry_colorized<W: Write>(
|
||||
stdout: &mut W,
|
||||
entry: &DirEntry,
|
||||
config: &Config,
|
||||
ls_colors: &LsColors,
|
||||
) -> io::Result<()> {
|
||||
// Split the path between the parent and the last component
|
||||
let mut offset = 0;
|
||||
let path = entry.stripped_path(config);
|
||||
let path_str = path.to_string_lossy();
|
||||
|
||||
if let Some(parent) = path.parent() {
|
||||
offset = parent.to_string_lossy().len();
|
||||
for c in path_str[offset..].chars() {
|
||||
if std::path::is_separator(c) {
|
||||
offset += c.len_utf8();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if offset > 0 {
|
||||
let mut parent_str = Cow::from(&path_str[..offset]);
|
||||
if let Some(ref separator) = config.path_separator {
|
||||
*parent_str.to_mut() = replace_path_separator(&parent_str, separator);
|
||||
}
|
||||
|
||||
let style = ls_colors
|
||||
.style_for_indicator(Indicator::Directory)
|
||||
.map(Style::to_nu_ansi_term_style)
|
||||
.unwrap_or_default();
|
||||
write!(stdout, "{}", style.paint(parent_str))?;
|
||||
}
|
||||
|
||||
let style = entry
|
||||
.style(ls_colors)
|
||||
.map(Style::to_nu_ansi_term_style)
|
||||
.unwrap_or_default();
|
||||
write!(stdout, "{}", style.paint(&path_str[offset..]))?;
|
||||
|
||||
print_trailing_slash(
|
||||
stdout,
|
||||
entry,
|
||||
config,
|
||||
ls_colors.style_for_indicator(Indicator::Directory),
|
||||
)?;
|
||||
|
||||
if config.null_separator {
|
||||
write!(stdout, "\0")?;
|
||||
} else {
|
||||
writeln!(stdout)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// TODO: this function is performance critical and can probably be optimized
|
||||
fn print_entry_uncolorized_base<W: Write>(
|
||||
stdout: &mut W,
|
||||
entry: &DirEntry,
|
||||
config: &Config,
|
||||
) -> io::Result<()> {
|
||||
let separator = if config.null_separator { "\0" } else { "\n" };
|
||||
let path = entry.stripped_path(config);
|
||||
|
||||
let mut path_string = path.to_string_lossy();
|
||||
if let Some(ref separator) = config.path_separator {
|
||||
*path_string.to_mut() = replace_path_separator(&path_string, &separator);
|
||||
*path_string.to_mut() = replace_path_separator(&path_string, separator);
|
||||
}
|
||||
write!(stdout, "{}{}", path_string, separator)
|
||||
write!(stdout, "{}", path_string)?;
|
||||
print_trailing_slash(stdout, entry, config, None)?;
|
||||
write!(stdout, "{}", separator)
|
||||
}
|
||||
|
||||
#[cfg(not(unix))]
|
||||
fn print_entry_uncolorized(
|
||||
stdout: &mut StdoutLock,
|
||||
path: &Path,
|
||||
config: &Options,
|
||||
fn print_entry_uncolorized<W: Write>(
|
||||
stdout: &mut W,
|
||||
entry: &DirEntry,
|
||||
config: &Config,
|
||||
) -> io::Result<()> {
|
||||
print_entry_uncolorized_base(stdout, path, config)
|
||||
print_entry_uncolorized_base(stdout, entry, config)
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn print_entry_uncolorized(
|
||||
stdout: &mut StdoutLock,
|
||||
path: &Path,
|
||||
config: &Options,
|
||||
fn print_entry_uncolorized<W: Write>(
|
||||
stdout: &mut W,
|
||||
entry: &DirEntry,
|
||||
config: &Config,
|
||||
) -> io::Result<()> {
|
||||
use std::os::unix::ffi::OsStrExt;
|
||||
|
||||
if config.interactive_terminal || config.path_separator.is_some() {
|
||||
// Fall back to the base implementation
|
||||
print_entry_uncolorized_base(stdout, path, config)
|
||||
print_entry_uncolorized_base(stdout, entry, config)
|
||||
} else {
|
||||
// Print path as raw bytes, allowing invalid UTF-8 filenames to be passed to other processes
|
||||
let separator = if config.null_separator { b"\0" } else { b"\n" };
|
||||
stdout.write_all(path.as_os_str().as_bytes())?;
|
||||
stdout.write_all(entry.stripped_path(config).as_os_str().as_bytes())?;
|
||||
print_trailing_slash(stdout, entry, config, None)?;
|
||||
stdout.write_all(separator)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ use regex_syntax::ParserBuilder;
|
|||
|
||||
/// Determine if a regex pattern contains a literal uppercase character.
|
||||
pub fn pattern_has_uppercase_char(pattern: &str) -> bool {
|
||||
let mut parser = ParserBuilder::new().allow_invalid_utf8(true).build();
|
||||
let mut parser = ParserBuilder::new().utf8(false).build();
|
||||
|
||||
parser
|
||||
.parse(pattern)
|
||||
|
@ -15,25 +15,69 @@ pub fn pattern_has_uppercase_char(pattern: &str) -> bool {
|
|||
fn hir_has_uppercase_char(hir: &Hir) -> bool {
|
||||
use regex_syntax::hir::*;
|
||||
|
||||
match *hir.kind() {
|
||||
HirKind::Literal(Literal::Unicode(c)) => c.is_uppercase(),
|
||||
HirKind::Literal(Literal::Byte(b)) => char::from(b).is_uppercase(),
|
||||
HirKind::Class(Class::Unicode(ref ranges)) => ranges
|
||||
match hir.kind() {
|
||||
HirKind::Literal(Literal(bytes)) => match std::str::from_utf8(bytes) {
|
||||
Ok(s) => s.chars().any(|c| c.is_uppercase()),
|
||||
Err(_) => bytes.iter().any(|b| char::from(*b).is_uppercase()),
|
||||
},
|
||||
HirKind::Class(Class::Unicode(ranges)) => ranges
|
||||
.iter()
|
||||
.any(|r| r.start().is_uppercase() || r.end().is_uppercase()),
|
||||
HirKind::Class(Class::Bytes(ref ranges)) => ranges
|
||||
HirKind::Class(Class::Bytes(ranges)) => ranges
|
||||
.iter()
|
||||
.any(|r| char::from(r.start()).is_uppercase() || char::from(r.end()).is_uppercase()),
|
||||
HirKind::Group(Group { ref hir, .. }) | HirKind::Repetition(Repetition { ref hir, .. }) => {
|
||||
hir_has_uppercase_char(hir)
|
||||
HirKind::Capture(Capture { sub, .. }) | HirKind::Repetition(Repetition { sub, .. }) => {
|
||||
hir_has_uppercase_char(sub)
|
||||
}
|
||||
HirKind::Concat(ref hirs) | HirKind::Alternation(ref hirs) => {
|
||||
HirKind::Concat(hirs) | HirKind::Alternation(hirs) => {
|
||||
hirs.iter().any(hir_has_uppercase_char)
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Determine if a regex pattern only matches strings starting with a literal dot (hidden files)
|
||||
pub fn pattern_matches_strings_with_leading_dot(pattern: &str) -> bool {
|
||||
let mut parser = ParserBuilder::new().utf8(false).build();
|
||||
|
||||
parser
|
||||
.parse(pattern)
|
||||
.map(|hir| hir_matches_strings_with_leading_dot(&hir))
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
/// See above.
|
||||
fn hir_matches_strings_with_leading_dot(hir: &Hir) -> bool {
|
||||
use regex_syntax::hir::*;
|
||||
|
||||
// Note: this only really detects the simplest case where a regex starts with
|
||||
// "^\\.", i.e. a start text anchor and a literal dot character. There are a lot
|
||||
// of other patterns that ONLY match hidden files, e.g. ^(\\.foo|\\.bar) which are
|
||||
// not (yet) detected by this algorithm.
|
||||
match hir.kind() {
|
||||
HirKind::Concat(hirs) => {
|
||||
let mut hirs = hirs.iter();
|
||||
if let Some(hir) = hirs.next() {
|
||||
if hir.kind() != &HirKind::Look(Look::Start) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
|
||||
if let Some(hir) = hirs.next() {
|
||||
match hir.kind() {
|
||||
HirKind::Literal(Literal(bytes)) => bytes.starts_with(&[b'.']),
|
||||
_ => false,
|
||||
}
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pattern_has_uppercase_char_simple() {
|
||||
assert!(pattern_has_uppercase_char("A"));
|
||||
|
@ -50,3 +94,12 @@ fn pattern_has_uppercase_char_advanced() {
|
|||
assert!(!pattern_has_uppercase_char(r"\Acargo"));
|
||||
assert!(!pattern_has_uppercase_char(r"carg\x6F"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn matches_strings_with_leading_dot_simple() {
|
||||
assert!(pattern_matches_strings_with_leading_dot("^\\.gitignore"));
|
||||
|
||||
assert!(!pattern_matches_strings_with_leading_dot("^.gitignore"));
|
||||
assert!(!pattern_matches_strings_with_leading_dot("\\.gitignore"));
|
||||
assert!(!pattern_matches_strings_with_leading_dot("^gitignore"));
|
||||
}
|
||||
|
|
996
src/walk.rs
996
src/walk.rs
File diff suppressed because it is too large
Load Diff
|
@ -8,7 +8,7 @@ use std::os::windows;
|
|||
use std::path::{Path, PathBuf};
|
||||
use std::process;
|
||||
|
||||
use tempdir::TempDir;
|
||||
use tempfile::TempDir;
|
||||
|
||||
/// Environment for the integration tests.
|
||||
pub struct TestEnv {
|
||||
|
@ -20,6 +20,9 @@ pub struct TestEnv {
|
|||
|
||||
/// Normalize each line by sorting the whitespace-separated words
|
||||
normalize_line: bool,
|
||||
|
||||
/// Temporary directory for storing test config (global ignore file)
|
||||
config_dir: Option<TempDir>,
|
||||
}
|
||||
|
||||
/// Create the working directory and the test files.
|
||||
|
@ -27,7 +30,7 @@ fn create_working_directory(
|
|||
directories: &[&'static str],
|
||||
files: &[&'static str],
|
||||
) -> Result<TempDir, io::Error> {
|
||||
let temp_dir = TempDir::new("fd-tests")?;
|
||||
let temp_dir = tempfile::Builder::new().prefix("fd-tests").tempdir()?;
|
||||
|
||||
{
|
||||
let root = temp_dir.path();
|
||||
|
@ -59,6 +62,16 @@ fn create_working_directory(
|
|||
Ok(temp_dir)
|
||||
}
|
||||
|
||||
fn create_config_directory_with_global_ignore(ignore_file_content: &str) -> io::Result<TempDir> {
|
||||
let config_dir = tempfile::Builder::new().prefix("fd-config").tempdir()?;
|
||||
let fd_dir = config_dir.path().join("fd");
|
||||
fs::create_dir(&fd_dir)?;
|
||||
let mut ignore_file = fs::File::create(fd_dir.join("ignore"))?;
|
||||
ignore_file.write_all(ignore_file_content.as_bytes())?;
|
||||
|
||||
Ok(config_dir)
|
||||
}
|
||||
|
||||
/// Find the *fd* executable.
|
||||
fn find_fd_exe() -> PathBuf {
|
||||
// Tests exe is in target/debug/deps, the *fd* exe is in target/debug
|
||||
|
@ -116,10 +129,10 @@ fn normalize_output(s: &str, trim_start: bool, normalize_line: bool) -> String {
|
|||
.lines()
|
||||
.map(|line| {
|
||||
let line = if trim_start { line.trim_start() } else { line };
|
||||
let line = line.replace('/', &std::path::MAIN_SEPARATOR.to_string());
|
||||
let line = line.replace('/', std::path::MAIN_SEPARATOR_STR);
|
||||
if normalize_line {
|
||||
let mut words: Vec<_> = line.split_whitespace().collect();
|
||||
words.sort();
|
||||
words.sort_unstable();
|
||||
return words.join(" ");
|
||||
}
|
||||
line
|
||||
|
@ -130,6 +143,17 @@ fn normalize_output(s: &str, trim_start: bool, normalize_line: bool) -> String {
|
|||
lines.join("\n")
|
||||
}
|
||||
|
||||
/// Trim whitespace from the beginning of each line.
|
||||
fn trim_lines(s: &str) -> String {
|
||||
s.lines()
|
||||
.map(|line| line.trim_start())
|
||||
.fold(String::new(), |mut str, line| {
|
||||
str.push_str(line);
|
||||
str.push('\n');
|
||||
str
|
||||
})
|
||||
}
|
||||
|
||||
impl TestEnv {
|
||||
pub fn new(directories: &[&'static str], files: &[&'static str]) -> TestEnv {
|
||||
let temp_dir = create_working_directory(directories, files).expect("working directory");
|
||||
|
@ -139,6 +163,7 @@ impl TestEnv {
|
|||
temp_dir,
|
||||
fd_exe,
|
||||
normalize_line: false,
|
||||
config_dir: None,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -147,6 +172,16 @@ impl TestEnv {
|
|||
temp_dir: self.temp_dir,
|
||||
fd_exe: self.fd_exe,
|
||||
normalize_line: normalize,
|
||||
config_dir: self.config_dir,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn global_ignore_file(self, content: &str) -> TestEnv {
|
||||
let config_dir =
|
||||
create_config_directory_with_global_ignore(content).expect("config directory");
|
||||
TestEnv {
|
||||
config_dir: Some(config_dir),
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -158,7 +193,9 @@ impl TestEnv {
|
|||
let root = self.test_root();
|
||||
let broken_symlink_link = root.join(link_path);
|
||||
{
|
||||
let temp_target_dir = TempDir::new("fd-tests-broken-symlink")?;
|
||||
let temp_target_dir = tempfile::Builder::new()
|
||||
.prefix("fd-tests-broken-symlink")
|
||||
.tempdir()?;
|
||||
let broken_symlink_target = temp_target_dir.path().join("broken_symlink_target");
|
||||
fs::File::create(&broken_symlink_target)?;
|
||||
#[cfg(unix)]
|
||||
|
@ -174,6 +211,12 @@ impl TestEnv {
|
|||
self.temp_dir.path().to_path_buf()
|
||||
}
|
||||
|
||||
/// Get the path of the fd executable.
|
||||
#[cfg_attr(windows, allow(unused))]
|
||||
pub fn test_exe(&self) -> &PathBuf {
|
||||
&self.fd_exe
|
||||
}
|
||||
|
||||
/// Get the root directory of the file system.
|
||||
pub fn system_root(&self) -> PathBuf {
|
||||
let mut components = self.temp_dir.path().components();
|
||||
|
@ -187,22 +230,30 @@ impl TestEnv {
|
|||
path: P,
|
||||
args: &[&str],
|
||||
) -> process::Output {
|
||||
// Setup *fd* command.
|
||||
let mut cmd = process::Command::new(&self.fd_exe);
|
||||
cmd.current_dir(self.temp_dir.path().join(path));
|
||||
cmd.arg("--no-global-ignore-file").args(args);
|
||||
|
||||
// Run *fd*.
|
||||
let output = cmd.output().expect("fd output");
|
||||
let output = self.run_command(path.as_ref(), args);
|
||||
|
||||
// Check for exit status.
|
||||
if !output.status.success() {
|
||||
panic!(format_exit_error(args, &output));
|
||||
panic!("{}", format_exit_error(args, &output));
|
||||
}
|
||||
|
||||
output
|
||||
}
|
||||
|
||||
pub fn assert_success_and_get_normalized_output<P: AsRef<Path>>(
|
||||
&self,
|
||||
path: P,
|
||||
args: &[&str],
|
||||
) -> String {
|
||||
let output = self.assert_success_and_get_output(path, args);
|
||||
normalize_output(
|
||||
&String::from_utf8_lossy(&output.stdout),
|
||||
false,
|
||||
self.normalize_line,
|
||||
)
|
||||
}
|
||||
|
||||
/// Assert that calling *fd* with the specified arguments produces the expected output.
|
||||
pub fn assert_output(&self, args: &[&str], expected: &str) {
|
||||
self.assert_output_subdirectory(".", args, expected)
|
||||
|
@ -224,34 +275,51 @@ impl TestEnv {
|
|||
args: &[&str],
|
||||
expected: &str,
|
||||
) {
|
||||
let output = self.assert_success_and_get_output(path, args);
|
||||
|
||||
// Normalize both expected and actual output.
|
||||
let expected = normalize_output(expected, true, self.normalize_line);
|
||||
let actual = normalize_output(
|
||||
&String::from_utf8_lossy(&output.stdout),
|
||||
false,
|
||||
self.normalize_line,
|
||||
);
|
||||
let actual = self.assert_success_and_get_normalized_output(path, args);
|
||||
|
||||
// Compare actual output to expected output.
|
||||
if expected != actual {
|
||||
panic!(format_output_error(args, &expected, &actual));
|
||||
panic!("{}", format_output_error(args, &expected, &actual));
|
||||
}
|
||||
}
|
||||
|
||||
/// Assert that calling *fd* with the specified arguments produces the expected error,
|
||||
/// and does not succeed.
|
||||
pub fn assert_failure_with_error(&self, args: &[&str], expected: &str) {
|
||||
let status = self.assert_error_subdirectory(".", args, expected);
|
||||
let status = self.assert_error_subdirectory(".", args, Some(expected));
|
||||
if status.success() {
|
||||
panic!("error '{}' did not occur.", expected);
|
||||
}
|
||||
}
|
||||
|
||||
/// Assert that calling *fd* with the specified arguments does not succeed.
|
||||
pub fn assert_failure(&self, args: &[&str]) {
|
||||
let status = self.assert_error_subdirectory(".", args, None);
|
||||
if status.success() {
|
||||
panic!("Failure did not occur as expected.");
|
||||
}
|
||||
}
|
||||
|
||||
/// Assert that calling *fd* with the specified arguments produces the expected error.
|
||||
pub fn assert_error(&self, args: &[&str], expected: &str) -> process::ExitStatus {
|
||||
self.assert_error_subdirectory(".", args, expected)
|
||||
self.assert_error_subdirectory(".", args, Some(expected))
|
||||
}
|
||||
|
||||
fn run_command(&self, path: &Path, args: &[&str]) -> process::Output {
|
||||
// Setup *fd* command.
|
||||
let mut cmd = process::Command::new(&self.fd_exe);
|
||||
cmd.current_dir(self.temp_dir.path().join(path));
|
||||
if let Some(config_dir) = &self.config_dir {
|
||||
cmd.env("XDG_CONFIG_HOME", config_dir.path());
|
||||
} else {
|
||||
cmd.arg("--no-global-ignore-file");
|
||||
}
|
||||
cmd.args(args);
|
||||
|
||||
// Run *fd*.
|
||||
cmd.output().expect("fd output")
|
||||
}
|
||||
|
||||
/// Assert that calling *fd* in the specified path under the root working directory,
|
||||
|
@ -260,29 +328,24 @@ impl TestEnv {
|
|||
&self,
|
||||
path: P,
|
||||
args: &[&str],
|
||||
expected: &str,
|
||||
expected: Option<&str>,
|
||||
) -> process::ExitStatus {
|
||||
// Setup *fd* command.
|
||||
let mut cmd = process::Command::new(&self.fd_exe);
|
||||
cmd.current_dir(self.temp_dir.path().join(path));
|
||||
cmd.arg("--no-global-ignore-file").args(args);
|
||||
let output = self.run_command(path.as_ref(), args);
|
||||
|
||||
// Run *fd*.
|
||||
let output = cmd.output().expect("fd output");
|
||||
if let Some(expected) = expected {
|
||||
// Normalize both expected and actual output.
|
||||
let expected_error = trim_lines(expected);
|
||||
let actual_err = trim_lines(&String::from_utf8_lossy(&output.stderr));
|
||||
|
||||
// Normalize both expected and actual output.
|
||||
let expected_error = normalize_output(expected, true, self.normalize_line);
|
||||
let actual_err = normalize_output(
|
||||
&String::from_utf8_lossy(&output.stderr),
|
||||
false,
|
||||
self.normalize_line,
|
||||
);
|
||||
|
||||
// Compare actual output to expected output.
|
||||
if !actual_err.trim_start().starts_with(&expected_error) {
|
||||
panic!(format_output_error(args, &expected_error, &actual_err));
|
||||
// Compare actual output to expected output.
|
||||
if !actual_err.trim_start().starts_with(&expected_error) {
|
||||
panic!(
|
||||
"{}",
|
||||
format_output_error(args, &expected_error, &actual_err)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return output.status;
|
||||
output.status
|
||||
}
|
||||
}
|
||||
|
|
1305
tests/tests.rs
1305
tests/tests.rs
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue