Compare commits

..

1 Commits

Author SHA1 Message Date
andy.boot
7f93c90f5e tests: Cleanup test_exact_ouput 2024-06-19 22:39:20 +01:00
29 changed files with 372 additions and 1314 deletions

View File

@@ -45,11 +45,6 @@ jobs:
override: true
profile: minimal # minimal component installation (ie, no documentation)
components: rustfmt, clippy
- name: Install wget for Windows
if: matrix.job.os == 'windows-latest'
run: choco install wget --no-progress
- name: typos-action
uses: crate-ci/typos@v1.28.4
- name: "`fmt` testing"
if: steps.vars.outputs.JOB_DO_FORMAT_TESTING
uses: actions-rs/cargo@v1

View File

@@ -1,11 +0,0 @@
repos:
- repo: https://github.com/doublify/pre-commit-rust
rev: v1.0
hooks:
- id: cargo-check
stages: [commit]
- id: fmt
stages: [commit]
- id: clippy
args: [--all-targets, --all-features]
stages: [commit]

309
Cargo.lock generated
View File

@@ -37,48 +37,47 @@ dependencies = [
[[package]]
name = "anstream"
version = "0.6.15"
version = "0.6.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64e15c1ab1f89faffbf04a634d5e1962e9074f2741eef6d97f3c4e322426d526"
checksum = "d96bd03f33fe50a863e394ee9718a706f988b9079b20c3784fb726e7678b62fb"
dependencies = [
"anstyle",
"anstyle-parse",
"anstyle-query",
"anstyle-wincon",
"colorchoice",
"is_terminal_polyfill",
"utf8parse",
]
[[package]]
name = "anstyle"
version = "1.0.8"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1"
checksum = "8901269c6307e8d93993578286ac0edf7f195079ffff5ebdeea6a59ffb7e36bc"
[[package]]
name = "anstyle-parse"
version = "0.2.5"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eb47de1e80c2b463c735db5b217a0ddc39d612e7ac9e2e96a5aed1f57616c1cb"
checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c"
dependencies = [
"utf8parse",
]
[[package]]
name = "anstyle-query"
version = "1.1.1"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a"
checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648"
dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "anstyle-wincon"
version = "3.0.4"
version = "3.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5bf74e1b6e971609db8ca7a9ce79fd5768ab6ae46441c572e46cf596f59e57f8"
checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7"
dependencies = [
"anstyle",
"windows-sys 0.52.0",
@@ -86,14 +85,13 @@ dependencies = [
[[package]]
name = "assert_cmd"
version = "2.0.16"
version = "2.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc1835b7f27878de8525dc71410b5a31cdcc5f230aed5ba5df968e09c201b23d"
checksum = "ed72493ac66d5804837f480ab3766c72bdfab91a65e565fc54fa9e42db0073a8"
dependencies = [
"anstyle",
"bstr",
"doc-comment",
"libc",
"predicates",
"predicates-core",
"predicates-tree",
@@ -102,9 +100,9 @@ dependencies = [
[[package]]
name = "autocfg"
version = "1.3.0"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0"
checksum = "f1fdabc7756949593fe60f30ec81974b613357de856987752631dea1e3394c80"
[[package]]
name = "bitflags"
@@ -114,15 +112,15 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
version = "2.6.0"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"
checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1"
[[package]]
name = "bstr"
version = "1.10.0"
version = "1.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40723b8fb387abc38f4f4a37c09073622e41dd12327033091ef8950659e6dc0c"
checksum = "05efc5cfd9110c8416e471df0e96702d58690178e206e61b7173706673c93706"
dependencies = [
"memchr",
"regex-automata",
@@ -137,9 +135,9 @@ checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c"
[[package]]
name = "cc"
version = "1.1.8"
version = "1.0.95"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "504bdec147f2cc13c8b57ed9401fd8a147cc66b67ad5cb241394244f2c947549"
checksum = "d32a725bc159af97c3e629873bb9f88fb8cf8a4867175f76dc987815ea07c83b"
[[package]]
name = "cfg-if"
@@ -164,23 +162,23 @@ dependencies = [
"js-sys",
"num-traits",
"wasm-bindgen",
"windows-targets 0.52.6",
"windows-targets 0.52.4",
]
[[package]]
name = "clap"
version = "4.5.14"
version = "4.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c937d4061031a6d0c8da4b9a4f98a172fc2976dfb1c19213a9cf7d0d3c837e36"
checksum = "90bc066a67923782aa8515dbaea16946c5bcc5addbd668bb80af688e53e548a0"
dependencies = [
"clap_builder",
]
[[package]]
name = "clap_builder"
version = "4.5.14"
version = "4.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "85379ba512b21a328adf887e85f7742d12e96eb31f3ef077df4ffc26b506ffed"
checksum = "ae129e2e766ae0ec03484e609954119f123cc1fe650337e155d03b022f24f7b4"
dependencies = [
"anstream",
"anstyle",
@@ -190,24 +188,24 @@ dependencies = [
[[package]]
name = "clap_complete"
version = "4.5.13"
version = "4.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa3c596da3cf0983427b0df0dba359df9182c13bd5b519b585a482b0c351f4e8"
checksum = "885e4d7d5af40bfb99ae6f9433e292feac98d452dcb3ec3d25dfe7552b77da8c"
dependencies = [
"clap",
]
[[package]]
name = "clap_lex"
version = "0.7.2"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97"
checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce"
[[package]]
name = "clap_mangen"
version = "0.2.23"
version = "0.2.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f17415fd4dfbea46e3274fcd8d368284519b358654772afb700dc2e8d2b24eeb"
checksum = "e1dd95b5ebb5c1c54581dd6346f3ed6a79a3eef95dd372fc2ac13d535535300e"
dependencies = [
"clap",
"roff",
@@ -215,9 +213,9 @@ dependencies = [
[[package]]
name = "colorchoice"
version = "1.0.2"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0"
checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7"
[[package]]
name = "config-file"
@@ -257,9 +255,9 @@ dependencies = [
[[package]]
name = "crossbeam-utils"
version = "0.8.20"
version = "0.8.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80"
checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345"
[[package]]
name = "ctrlc"
@@ -305,7 +303,7 @@ checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10"
[[package]]
name = "du-dust"
version = "1.1.1"
version = "1.0.0"
dependencies = [
"ansi_term",
"assert_cmd",
@@ -318,7 +316,6 @@ dependencies = [
"directories",
"filesize",
"lscolors",
"portable-atomic",
"rayon",
"regex",
"serde",
@@ -334,15 +331,15 @@ dependencies = [
[[package]]
name = "either"
version = "1.13.0"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0"
checksum = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a"
[[package]]
name = "errno"
version = "0.3.9"
version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba"
checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245"
dependencies = [
"libc",
"windows-sys 0.52.0",
@@ -350,9 +347,9 @@ dependencies = [
[[package]]
name = "fastrand"
version = "2.1.0"
version = "2.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a"
checksum = "658bd65b1cf4c852a3cc96f18a8ce7b5640f6b703f905c7d74532294c2a63984"
[[package]]
name = "filesize"
@@ -365,9 +362,9 @@ dependencies = [
[[package]]
name = "getrandom"
version = "0.2.15"
version = "0.2.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7"
checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5"
dependencies = [
"cfg-if",
"libc",
@@ -414,12 +411,6 @@ dependencies = [
"windows-sys 0.48.0",
]
[[package]]
name = "is_terminal_polyfill"
version = "1.70.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf"
[[package]]
name = "itoa"
version = "1.0.11"
@@ -443,12 +434,13 @@ checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c"
[[package]]
name = "libredox"
version = "0.1.3"
version = "0.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d"
checksum = "85c833ca1e66078851dba29046874e38f08b2c883700aa29a03ddd3b23814ee8"
dependencies = [
"bitflags 2.6.0",
"bitflags 2.5.0",
"libc",
"redox_syscall",
]
[[package]]
@@ -459,15 +451,15 @@ checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519"
[[package]]
name = "linux-raw-sys"
version = "0.4.14"
version = "0.4.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89"
checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c"
[[package]]
name = "log"
version = "0.4.22"
version = "0.4.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c"
[[package]]
name = "lscolors"
@@ -481,9 +473,9 @@ dependencies = [
[[package]]
name = "memchr"
version = "2.7.4"
version = "2.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149"
[[package]]
name = "nix"
@@ -491,7 +483,7 @@ version = "0.28.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4"
dependencies = [
"bitflags 2.6.0",
"bitflags 2.5.0",
"cfg-if",
"cfg_aliases",
"libc",
@@ -518,9 +510,9 @@ dependencies = [
[[package]]
name = "num-traits"
version = "0.2.19"
version = "0.2.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a"
dependencies = [
"autocfg",
]
@@ -537,17 +529,11 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
[[package]]
name = "portable-atomic"
version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da544ee218f0d287a911e9c99a39a8c9bc8fcad3cb8db5959940044ecfc67265"
[[package]]
name = "predicates"
version = "3.1.2"
version = "3.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7e9086cc7640c29a356d1a29fd134380bee9d8f79a17410aa76e7ad295f42c97"
checksum = "68b87bfd4605926cdfefc1c3b5f8fe560e3feca9d5552cf68c466d3d8236c7e8"
dependencies = [
"anstyle",
"difflib",
@@ -556,15 +542,15 @@ dependencies = [
[[package]]
name = "predicates-core"
version = "1.0.8"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae8177bee8e75d6846599c6b9ff679ed51e882816914eec639944d7c9aa11931"
checksum = "b794032607612e7abeb4db69adb4e33590fa6cf1149e95fd7cb00e634b92f174"
[[package]]
name = "predicates-tree"
version = "1.0.11"
version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41b740d195ed3166cd147c8047ec98db0e22ec019eb8eeb76d343b795304fb13"
checksum = "368ba315fb8c5052ab692e68a0eefec6ec57b23a36959c14496f0b0df2c0cecf"
dependencies = [
"predicates-core",
"termtree",
@@ -572,18 +558,18 @@ dependencies = [
[[package]]
name = "proc-macro2"
version = "1.0.86"
version = "1.0.79"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77"
checksum = "e835ff2298f5721608eb1a980ecaee1aef2c132bf95ecc026a11b7bf3c01c02e"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.36"
version = "1.0.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7"
checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef"
dependencies = [
"proc-macro2",
]
@@ -609,10 +595,19 @@ dependencies = [
]
[[package]]
name = "redox_users"
version = "0.4.5"
name = "redox_syscall"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891"
checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa"
dependencies = [
"bitflags 1.3.2",
]
[[package]]
name = "redox_users"
version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a18479200779601e498ada4e8c1e1f50e3ee19deb0259c25825a98b5603b2cb4"
dependencies = [
"getrandom",
"libredox",
@@ -621,9 +616,9 @@ dependencies = [
[[package]]
name = "regex"
version = "1.10.6"
version = "1.10.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619"
checksum = "c117dbdfde9c8308975b6a18d71f3f385c89461f7b3fb054288ecf2a2058ba4c"
dependencies = [
"aho-corasick",
"memchr",
@@ -633,9 +628,9 @@ dependencies = [
[[package]]
name = "regex-automata"
version = "0.4.7"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df"
checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea"
dependencies = [
"aho-corasick",
"memchr",
@@ -644,15 +639,15 @@ dependencies = [
[[package]]
name = "regex-syntax"
version = "0.8.4"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b"
checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f"
[[package]]
name = "roff"
version = "0.2.2"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "88f8660c1ff60292143c98d08fc6e2f654d722db50410e3f3797d40baaf9d8f3"
checksum = "b833d8d034ea094b1ea68aa6d5c740e0d04bad9d16568d08ba6f76823a114316"
[[package]]
name = "rustix"
@@ -670,37 +665,37 @@ dependencies = [
[[package]]
name = "rustix"
version = "0.38.34"
version = "0.38.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f"
checksum = "65e04861e65f21776e67888bfbea442b3642beaa0138fdb1dd7a84a52dffdb89"
dependencies = [
"bitflags 2.6.0",
"bitflags 2.5.0",
"errno",
"libc",
"linux-raw-sys 0.4.14",
"linux-raw-sys 0.4.13",
"windows-sys 0.52.0",
]
[[package]]
name = "ryu"
version = "1.0.18"
version = "1.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f"
checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1"
[[package]]
name = "serde"
version = "1.0.205"
version = "1.0.197"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e33aedb1a7135da52b7c21791455563facbbcc43d0f0f66165b42c21b3dfb150"
checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.205"
version = "1.0.197"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "692d6f5ac90220161d6774db30c662202721e64aed9058d2c394f451261420c1"
checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b"
dependencies = [
"proc-macro2",
"quote",
@@ -709,12 +704,11 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.122"
version = "1.0.116"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "784b6203951c57ff748476b126ccb5e8e2959a5c19e5c617ab1956be3dbc68da"
checksum = "3e17db7126d17feb94eb3fad46bf1a96b034e8aacbc2e775fe81505f8b0b2813"
dependencies = [
"itoa",
"memchr",
"ryu",
"serde",
]
@@ -727,15 +721,15 @@ checksum = "e51f1e89f093f99e7432c491c382b88a6860a5adbe6bf02574bf0a08efff1978"
[[package]]
name = "strsim"
version = "0.11.1"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
checksum = "5ee073c9e4cd00e28217186dbe12796d692868f432bf2e97ee73bed0c56dfa01"
[[package]]
name = "syn"
version = "2.0.72"
version = "2.0.55"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc4b9b9bf2add8093d3f2c0204471e951b2285580335de42f9d2534f3ae7a8af"
checksum = "002a1b3dbf967edfafc32655d0f377ab0bb7b994aa1d32c8cc7e9b8bf3ebb8f0"
dependencies = [
"proc-macro2",
"quote",
@@ -759,15 +753,14 @@ dependencies = [
[[package]]
name = "tempfile"
version = "3.12.0"
version = "3.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04cbcdd0c794ebb0d4cf35e88edd2f7d2c4c3e9a5a6dab322839b321c6a87a64"
checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1"
dependencies = [
"cfg-if",
"fastrand",
"once_cell",
"rustix 0.38.34",
"windows-sys 0.59.0",
"rustix 0.38.32",
"windows-sys 0.52.0",
]
[[package]]
@@ -788,18 +781,18 @@ checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76"
[[package]]
name = "thiserror"
version = "1.0.63"
version = "1.0.58"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724"
checksum = "03468839009160513471e86a034bb2c5c0e4baae3b43f79ffc55c4a5427b3297"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.63"
version = "1.0.58"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261"
checksum = "c61f3ba182994efc43764a46c018c347bc492c79f024e705f46567b418f6d4f7"
dependencies = [
"proc-macro2",
"quote",
@@ -829,15 +822,15 @@ checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
[[package]]
name = "unicode-width"
version = "0.1.13"
version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d"
checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85"
[[package]]
name = "utf8parse"
version = "0.2.2"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
[[package]]
name = "wait-timeout"
@@ -926,11 +919,11 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
[[package]]
name = "winapi-util"
version = "0.1.9"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596"
dependencies = [
"windows-sys 0.59.0",
"winapi",
]
[[package]]
@@ -945,7 +938,7 @@ version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9"
dependencies = [
"windows-targets 0.52.6",
"windows-targets 0.52.4",
]
[[package]]
@@ -963,16 +956,7 @@ version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
dependencies = [
"windows-targets 0.52.6",
]
[[package]]
name = "windows-sys"
version = "0.59.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
dependencies = [
"windows-targets 0.52.6",
"windows-targets 0.52.4",
]
[[package]]
@@ -992,18 +976,17 @@ dependencies = [
[[package]]
name = "windows-targets"
version = "0.52.6"
version = "0.52.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b"
dependencies = [
"windows_aarch64_gnullvm 0.52.6",
"windows_aarch64_msvc 0.52.6",
"windows_i686_gnu 0.52.6",
"windows_i686_gnullvm",
"windows_i686_msvc 0.52.6",
"windows_x86_64_gnu 0.52.6",
"windows_x86_64_gnullvm 0.52.6",
"windows_x86_64_msvc 0.52.6",
"windows_aarch64_gnullvm 0.52.4",
"windows_aarch64_msvc 0.52.4",
"windows_i686_gnu 0.52.4",
"windows_i686_msvc 0.52.4",
"windows_x86_64_gnu 0.52.4",
"windows_x86_64_gnullvm 0.52.4",
"windows_x86_64_msvc 0.52.4",
]
[[package]]
@@ -1014,9 +997,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.52.6"
version = "0.52.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9"
[[package]]
name = "windows_aarch64_msvc"
@@ -1026,9 +1009,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
[[package]]
name = "windows_aarch64_msvc"
version = "0.52.6"
version = "0.52.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675"
[[package]]
name = "windows_i686_gnu"
@@ -1038,15 +1021,9 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
[[package]]
name = "windows_i686_gnu"
version = "0.52.6"
version = "0.52.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
[[package]]
name = "windows_i686_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3"
[[package]]
name = "windows_i686_msvc"
@@ -1056,9 +1033,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
[[package]]
name = "windows_i686_msvc"
version = "0.52.6"
version = "0.52.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02"
[[package]]
name = "windows_x86_64_gnu"
@@ -1068,9 +1045,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
[[package]]
name = "windows_x86_64_gnu"
version = "0.52.6"
version = "0.52.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03"
[[package]]
name = "windows_x86_64_gnullvm"
@@ -1080,9 +1057,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.52.6"
version = "0.52.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177"
[[package]]
name = "windows_x86_64_msvc"
@@ -1092,6 +1069,6 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
[[package]]
name = "windows_x86_64_msvc"
version = "0.52.6"
version = "0.52.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8"

View File

@@ -1,7 +1,7 @@
[package]
name = "du-dust"
description = "A more intuitive version of du"
version = "1.1.1"
version = "1.0.0"
authors = ["bootandy <bootandy@gmail.com>", "nebkor <code@ardent.nebcorp.com>"]
edition = "2021"
readme = "README.md"
@@ -44,9 +44,6 @@ sysinfo = "0.27"
ctrlc = "3.4"
chrono = "0.4"
[target.'cfg(not(target_has_atomic = "64"))'.dependencies]
portable-atomic = "1.4"
[target.'cfg(windows)'.dependencies]
winapi-util = "0.1"
filesize = "0.2.0"
@@ -86,16 +83,6 @@ assets = [
"usr/share/doc/du-dust/README",
"644",
],
[
"man-page/dust.1",
"usr/share/man/man1/dust.1",
"644",
],
[
"completions/dust.bash",
"usr/share/bash-completion/completions/dust",
"644",
],
]
extended-description = """\
Dust is meant to give you an instant overview of which directories are using

View File

@@ -27,17 +27,11 @@ Because I want an easy way to see where my disk is being used.
- `brew install dust`
#### [Snap](https://ubuntu.com/core/services/guide/snaps-intro) Ubuntu and [supported systems](https://snapcraft.io/docs/installing-snapd)
- `snap install dust`
Note: `dust` installed through `snap` can only access files stored in the `/home` directory. See daniejstriata/dust-snap#2 for more information.
#### [Pacstall](https://github.com/pacstall/pacstall) (Debian/Ubuntu)
- `pacstall -I dust-bin`
#### Anaconda (conda-forge)
### Anaconda (conda-forge)
- `conda install -c conda-forge dust`
@@ -45,10 +39,6 @@ Note: `dust` installed through `snap` can only access files stored in the `/home
- `deb-get install du-dust`
#### [x-cmd](https://www.x-cmd.com/pkg/#VPContent)
- `x env use dust`
#### Windows:
- `scoop install dust`
@@ -102,8 +92,6 @@ Usage: dust -S (Custom Stack size - Use if you see: 'fatal runtime error: stack
Usage: dust --skip-total (No total row will be displayed)
Usage: dust -z 40000/30MB/20kib (Exclude output files/directories below size 40000 bytes / 30MB / 20KiB)
Usage: dust -j (Prints JSON representation of directories, try: dust -j | jq)
Usage: dust --files0-from=FILE (Reads null-terminated file paths from FILE); If FILE is - then read from stdin
Usage: dust --collapse=node-modules will keep the node-modules folder collapsed in display instead of recursively opening it
```
## Config file

View File

@@ -1,21 +1,14 @@
# ----------- To do a release ---------
# ----------- Pre release ---------
# Compare times of runs to check no drastic slow down:
# hyperfine 'target/release/dust /home/andy'
# hyperfine 'dust /home/andy'
# ----------- Release ---------
# inc version in cargo.toml
# cargo build --release
# commit changed files
# merge to master in github
# time target/release/dust ~/dev
# time dust ~dev
# edit version in cargo.toml
# tag a commit and push (increment version in Cargo.toml first):
# git tag v0.4.5
# git push origin v0.4.5
# cargo publish to put it in crates.io
# Optional: To install locally
# To install locally [Do before pushing it]
#cargo install --path .

View File

@@ -14,16 +14,15 @@ _dust() {
fi
local context curcontext="$curcontext" state line
_arguments "${_arguments_options[@]}" : \
_arguments "${_arguments_options[@]}" \
'-d+[Depth to show]:DEPTH: ' \
'--depth=[Depth to show]:DEPTH: ' \
'-T+[Number of threads to use]: : ' \
'--threads=[Number of threads to use]: : ' \
'--config=[Specify a config file to use]:FILE:_files' \
'-n+[Number of lines of output to show. (Default is terminal_height - 10)]:NUMBER: ' \
'--number-of-lines=[Number of lines of output to show. (Default is terminal_height - 10)]:NUMBER: ' \
'*-X+[Exclude any file or directory with this path]:PATH:_files' \
'*--ignore-directory=[Exclude any file or directory with this path]:PATH:_files' \
'*-X+[Exclude any file or directory with this name]:PATH:_files' \
'*--ignore-directory=[Exclude any file or directory with this name]:PATH:_files' \
'-I+[Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter]:FILE:_files' \
'--ignore-all-in-file=[Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter]:FILE:_files' \
'-z+[Minimum size file to include in output]:MIN_SIZE: ' \
@@ -38,16 +37,6 @@ _dust() {
'--output-format=[Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.]:FORMAT:(si b k m g t kb mb gb tb)' \
'-S+[Specify memory to use as stack size - use if you see\: '\''fatal runtime error\: stack overflow'\'' (default low memory=1048576, high memory=1073741824)]:STACK_SIZE: ' \
'--stack-size=[Specify memory to use as stack size - use if you see\: '\''fatal runtime error\: stack overflow'\'' (default low memory=1048576, high memory=1073741824)]:STACK_SIZE: ' \
'-M+[+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => \[curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)]: : ' \
'--mtime=[+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => \[curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)]: : ' \
'-A+[just like -mtime, but based on file access time]: : ' \
'--atime=[just like -mtime, but based on file access time]: : ' \
'-y+[just like -mtime, but based on file change time]: : ' \
'--ctime=[just like -mtime, but based on file change time]: : ' \
'--files0-from=[run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input]: :_files' \
'*--collapse=[Keep these directories collapsed]: :_files' \
'-m+[Directory '\''size'\'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time]: :(a c m)' \
'--filetime=[Directory '\''size'\'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time]: :(a c m)' \
'-p[Subdirectories will not have their path shortened]' \
'--full-paths[Subdirectories will not have their path shortened]' \
'-L[dereference sym links - Treat sym links as directories and go into them]' \

View File

@@ -25,11 +25,10 @@ Register-ArgumentCompleter -Native -CommandName 'dust' -ScriptBlock {
[CompletionResult]::new('--depth', 'depth', [CompletionResultType]::ParameterName, 'Depth to show')
[CompletionResult]::new('-T', 'T ', [CompletionResultType]::ParameterName, 'Number of threads to use')
[CompletionResult]::new('--threads', 'threads', [CompletionResultType]::ParameterName, 'Number of threads to use')
[CompletionResult]::new('--config', 'config', [CompletionResultType]::ParameterName, 'Specify a config file to use')
[CompletionResult]::new('-n', 'n', [CompletionResultType]::ParameterName, 'Number of lines of output to show. (Default is terminal_height - 10)')
[CompletionResult]::new('--number-of-lines', 'number-of-lines', [CompletionResultType]::ParameterName, 'Number of lines of output to show. (Default is terminal_height - 10)')
[CompletionResult]::new('-X', 'X ', [CompletionResultType]::ParameterName, 'Exclude any file or directory with this path')
[CompletionResult]::new('--ignore-directory', 'ignore-directory', [CompletionResultType]::ParameterName, 'Exclude any file or directory with this path')
[CompletionResult]::new('-X', 'X ', [CompletionResultType]::ParameterName, 'Exclude any file or directory with this name')
[CompletionResult]::new('--ignore-directory', 'ignore-directory', [CompletionResultType]::ParameterName, 'Exclude any file or directory with this name')
[CompletionResult]::new('-I', 'I ', [CompletionResultType]::ParameterName, 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter')
[CompletionResult]::new('--ignore-all-in-file', 'ignore-all-in-file', [CompletionResultType]::ParameterName, 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter')
[CompletionResult]::new('-z', 'z', [CompletionResultType]::ParameterName, 'Minimum size file to include in output')
@@ -44,16 +43,6 @@ Register-ArgumentCompleter -Native -CommandName 'dust' -ScriptBlock {
[CompletionResult]::new('--output-format', 'output-format', [CompletionResultType]::ParameterName, 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.')
[CompletionResult]::new('-S', 'S ', [CompletionResultType]::ParameterName, 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)')
[CompletionResult]::new('--stack-size', 'stack-size', [CompletionResultType]::ParameterName, 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)')
[CompletionResult]::new('-M', 'M ', [CompletionResultType]::ParameterName, '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)')
[CompletionResult]::new('--mtime', 'mtime', [CompletionResultType]::ParameterName, '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)')
[CompletionResult]::new('-A', 'A ', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file access time')
[CompletionResult]::new('--atime', 'atime', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file access time')
[CompletionResult]::new('-y', 'y', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file change time')
[CompletionResult]::new('--ctime', 'ctime', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file change time')
[CompletionResult]::new('--files0-from', 'files0-from', [CompletionResultType]::ParameterName, 'run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input')
[CompletionResult]::new('--collapse', 'collapse', [CompletionResultType]::ParameterName, 'Keep these directories collapsed')
[CompletionResult]::new('-m', 'm', [CompletionResultType]::ParameterName, 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time')
[CompletionResult]::new('--filetime', 'filetime', [CompletionResultType]::ParameterName, 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time')
[CompletionResult]::new('-p', 'p', [CompletionResultType]::ParameterName, 'Subdirectories will not have their path shortened')
[CompletionResult]::new('--full-paths', 'full-paths', [CompletionResultType]::ParameterName, 'Subdirectories will not have their path shortened')
[CompletionResult]::new('-L', 'L ', [CompletionResultType]::ParameterName, 'dereference sym links - Treat sym links as directories and go into them')

View File

@@ -19,7 +19,7 @@ _dust() {
case "${cmd}" in
dust)
opts="-d -T -n -p -X -I -L -x -s -r -c -C -b -B -z -R -f -i -v -e -t -w -P -D -F -o -S -j -M -A -y -m -h -V --depth --threads --config --number-of-lines --full-paths --ignore-directory --ignore-all-in-file --dereference-links --limit-filesystem --apparent-size --reverse --no-colors --force-colors --no-percent-bars --bars-on-right --min-size --screen-reader --skip-total --filecount --ignore_hidden --invert-filter --filter --file_types --terminal_width --no-progress --print-errors --only-dir --only-file --output-format --stack-size --output-json --mtime --atime --ctime --files0-from --collapse --filetime --help --version [PATH]..."
opts="-d -T -n -p -X -I -L -x -s -r -c -C -b -B -z -R -f -i -v -e -t -w -P -D -F -o -S -j -h -V --depth --threads --number-of-lines --full-paths --ignore-directory --ignore-all-in-file --dereference-links --limit-filesystem --apparent-size --reverse --no-colors --force-colors --no-percent-bars --bars-on-right --min-size --screen-reader --skip-total --filecount --ignore_hidden --invert-filter --filter --file_types --terminal_width --no-progress --print-errors --only-dir --only-file --output-format --stack-size --output-json --help --version [PATH]..."
if [[ ${cur} == -* || ${COMP_CWORD} -eq 1 ]] ; then
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
return 0
@@ -41,21 +41,6 @@ _dust() {
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--config)
local oldifs
if [ -n "${IFS+x}" ]; then
oldifs="$IFS"
fi
IFS=$'\n'
COMPREPLY=($(compgen -f "${cur}"))
if [ -n "${oldifs+x}" ]; then
IFS="$oldifs"
fi
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
compopt -o filenames
fi
return 0
;;
--number-of-lines)
COMPREPLY=($(compgen -f "${cur}"))
return 0
@@ -74,12 +59,12 @@ _dust() {
;;
--ignore-all-in-file)
local oldifs
if [ -n "${IFS+x}" ]; then
if [[ -v IFS ]]; then
oldifs="$IFS"
fi
IFS=$'\n'
COMPREPLY=($(compgen -f "${cur}"))
if [ -n "${oldifs+x}" ]; then
if [[ -v oldifs ]]; then
IFS="$oldifs"
fi
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
@@ -89,12 +74,12 @@ _dust() {
;;
-I)
local oldifs
if [ -n "${IFS+x}" ]; then
if [[ -v IFS ]]; then
oldifs="$IFS"
fi
IFS=$'\n'
COMPREPLY=($(compgen -f "${cur}"))
if [ -n "${oldifs+x}" ]; then
if [[ -v oldifs ]]; then
IFS="$oldifs"
fi
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
@@ -150,46 +135,6 @@ _dust() {
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--mtime)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
-M)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--atime)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
-A)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--ctime)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
-y)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--files0-from)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--collapse)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--filetime)
COMPREPLY=($(compgen -W "a c m" -- "${cur}"))
return 0
;;
-m)
COMPREPLY=($(compgen -W "a c m" -- "${cur}"))
return 0
;;
*)
COMPREPLY=()
;;

View File

@@ -22,11 +22,10 @@ set edit:completion:arg-completer[dust] = {|@words|
cand --depth 'Depth to show'
cand -T 'Number of threads to use'
cand --threads 'Number of threads to use'
cand --config 'Specify a config file to use'
cand -n 'Number of lines of output to show. (Default is terminal_height - 10)'
cand --number-of-lines 'Number of lines of output to show. (Default is terminal_height - 10)'
cand -X 'Exclude any file or directory with this path'
cand --ignore-directory 'Exclude any file or directory with this path'
cand -X 'Exclude any file or directory with this name'
cand --ignore-directory 'Exclude any file or directory with this name'
cand -I 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter'
cand --ignore-all-in-file 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter'
cand -z 'Minimum size file to include in output'
@@ -41,16 +40,6 @@ set edit:completion:arg-completer[dust] = {|@words|
cand --output-format 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.'
cand -S 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)'
cand --stack-size 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)'
cand -M '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)'
cand --mtime '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)'
cand -A 'just like -mtime, but based on file access time'
cand --atime 'just like -mtime, but based on file access time'
cand -y 'just like -mtime, but based on file change time'
cand --ctime 'just like -mtime, but based on file change time'
cand --files0-from 'run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input'
cand --collapse 'Keep these directories collapsed'
cand -m 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time'
cand --filetime 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time'
cand -p 'Subdirectories will not have their path shortened'
cand --full-paths 'Subdirectories will not have their path shortened'
cand -L 'dereference sym links - Treat sym links as directories and go into them'

View File

@@ -1,21 +1,14 @@
complete -c dust -s d -l depth -d 'Depth to show' -r
complete -c dust -s T -l threads -d 'Number of threads to use' -r
complete -c dust -l config -d 'Specify a config file to use' -r -F
complete -c dust -s n -l number-of-lines -d 'Number of lines of output to show. (Default is terminal_height - 10)' -r
complete -c dust -s X -l ignore-directory -d 'Exclude any file or directory with this path' -r -F
complete -c dust -s X -l ignore-directory -d 'Exclude any file or directory with this name' -r -F
complete -c dust -s I -l ignore-all-in-file -d 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter' -r -F
complete -c dust -s z -l min-size -d 'Minimum size file to include in output' -r
complete -c dust -s v -l invert-filter -d 'Exclude filepaths matching this regex. To ignore png files type: -v "\\.png$" ' -r
complete -c dust -s e -l filter -d 'Only include filepaths matching this regex. For png files type: -e "\\.png$" ' -r
complete -c dust -s w -l terminal_width -d 'Specify width of output overriding the auto detection of terminal width' -r
complete -c dust -s o -l output-format -d 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.' -r -f -a "{si\t'',b\t'',k\t'',m\t'',g\t'',t\t'',kb\t'',mb\t'',gb\t'',tb\t''}"
complete -c dust -s o -l output-format -d 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.' -r -f -a "{si '',b '',k '',m '',g '',t '',kb '',mb '',gb '',tb ''}"
complete -c dust -s S -l stack-size -d 'Specify memory to use as stack size - use if you see: \'fatal runtime error: stack overflow\' (default low memory=1048576, high memory=1073741824)' -r
complete -c dust -s M -l mtime -d '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)' -r
complete -c dust -s A -l atime -d 'just like -mtime, but based on file access time' -r
complete -c dust -s y -l ctime -d 'just like -mtime, but based on file change time' -r
complete -c dust -l files0-from -d 'run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input' -r -F
complete -c dust -l collapse -d 'Keep these directories collapsed' -r -F
complete -c dust -s m -l filetime -d 'Directory \'size\' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time' -r -f -a "{a\t'',c\t'',m\t''}"
complete -c dust -s p -l full-paths -d 'Subdirectories will not have their path shortened'
complete -c dust -s L -l dereference-links -d 'dereference sym links - Treat sym links as directories and go into them'
complete -c dust -s x -l limit-filesystem -d 'Only count the files and directories on the same filesystem as the supplied directory'

View File

@@ -25,4 +25,4 @@ skip-total=true
ignore-hidden=true
# print sizes in powers of 1000 (e.g., 1.1G)
output-format="si"
iso=true

View File

@@ -1,10 +1,10 @@
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.TH Dust 1 "Dust 1.1.1"
.TH Dust 1 "Dust 1.0.0"
.SH NAME
Dust \- Like du but more intuitive
.SH SYNOPSIS
\fBdust\fR [\fB\-d\fR|\fB\-\-depth\fR] [\fB\-T\fR|\fB\-\-threads\fR] [\fB\-\-config\fR] [\fB\-n\fR|\fB\-\-number\-of\-lines\fR] [\fB\-p\fR|\fB\-\-full\-paths\fR] [\fB\-X\fR|\fB\-\-ignore\-directory\fR] [\fB\-I\fR|\fB\-\-ignore\-all\-in\-file\fR] [\fB\-L\fR|\fB\-\-dereference\-links\fR] [\fB\-x\fR|\fB\-\-limit\-filesystem\fR] [\fB\-s\fR|\fB\-\-apparent\-size\fR] [\fB\-r\fR|\fB\-\-reverse\fR] [\fB\-c\fR|\fB\-\-no\-colors\fR] [\fB\-C\fR|\fB\-\-force\-colors\fR] [\fB\-b\fR|\fB\-\-no\-percent\-bars\fR] [\fB\-B\fR|\fB\-\-bars\-on\-right\fR] [\fB\-z\fR|\fB\-\-min\-size\fR] [\fB\-R\fR|\fB\-\-screen\-reader\fR] [\fB\-\-skip\-total\fR] [\fB\-f\fR|\fB\-\-filecount\fR] [\fB\-i\fR|\fB\-\-ignore_hidden\fR] [\fB\-v\fR|\fB\-\-invert\-filter\fR] [\fB\-e\fR|\fB\-\-filter\fR] [\fB\-t\fR|\fB\-\-file_types\fR] [\fB\-w\fR|\fB\-\-terminal_width\fR] [\fB\-P\fR|\fB\-\-no\-progress\fR] [\fB\-\-print\-errors\fR] [\fB\-D\fR|\fB\-\-only\-dir\fR] [\fB\-F\fR|\fB\-\-only\-file\fR] [\fB\-o\fR|\fB\-\-output\-format\fR] [\fB\-S\fR|\fB\-\-stack\-size\fR] [\fB\-j\fR|\fB\-\-output\-json\fR] [\fB\-M\fR|\fB\-\-mtime\fR] [\fB\-A\fR|\fB\-\-atime\fR] [\fB\-y\fR|\fB\-\-ctime\fR] [\fB\-\-files0\-from\fR] [\fB\-\-collapse\fR] [\fB\-m\fR|\fB\-\-filetime\fR] [\fB\-h\fR|\fB\-\-help\fR] [\fB\-V\fR|\fB\-\-version\fR] [\fIPATH\fR]
\fBdust\fR [\fB\-d\fR|\fB\-\-depth\fR] [\fB\-T\fR|\fB\-\-threads\fR] [\fB\-n\fR|\fB\-\-number\-of\-lines\fR] [\fB\-p\fR|\fB\-\-full\-paths\fR] [\fB\-X\fR|\fB\-\-ignore\-directory\fR] [\fB\-I\fR|\fB\-\-ignore\-all\-in\-file\fR] [\fB\-L\fR|\fB\-\-dereference\-links\fR] [\fB\-x\fR|\fB\-\-limit\-filesystem\fR] [\fB\-s\fR|\fB\-\-apparent\-size\fR] [\fB\-r\fR|\fB\-\-reverse\fR] [\fB\-c\fR|\fB\-\-no\-colors\fR] [\fB\-C\fR|\fB\-\-force\-colors\fR] [\fB\-b\fR|\fB\-\-no\-percent\-bars\fR] [\fB\-B\fR|\fB\-\-bars\-on\-right\fR] [\fB\-z\fR|\fB\-\-min\-size\fR] [\fB\-R\fR|\fB\-\-screen\-reader\fR] [\fB\-\-skip\-total\fR] [\fB\-f\fR|\fB\-\-filecount\fR] [\fB\-i\fR|\fB\-\-ignore_hidden\fR] [\fB\-v\fR|\fB\-\-invert\-filter\fR] [\fB\-e\fR|\fB\-\-filter\fR] [\fB\-t\fR|\fB\-\-file_types\fR] [\fB\-w\fR|\fB\-\-terminal_width\fR] [\fB\-P\fR|\fB\-\-no\-progress\fR] [\fB\-\-print\-errors\fR] [\fB\-D\fR|\fB\-\-only\-dir\fR] [\fB\-F\fR|\fB\-\-only\-file\fR] [\fB\-o\fR|\fB\-\-output\-format\fR] [\fB\-S\fR|\fB\-\-stack\-size\fR] [\fB\-j\fR|\fB\-\-output\-json\fR] [\fB\-h\fR|\fB\-\-help\fR] [\fB\-V\fR|\fB\-\-version\fR] [\fIPATH\fR]
.SH DESCRIPTION
Like du but more intuitive
.SH OPTIONS
@@ -15,9 +15,6 @@ Depth to show
\fB\-T\fR, \fB\-\-threads\fR
Number of threads to use
.TP
\fB\-\-config\fR=\fIFILE\fR
Specify a config file to use
.TP
\fB\-n\fR, \fB\-\-number\-of\-lines\fR=\fINUMBER\fR
Number of lines of output to show. (Default is terminal_height \- 10)
.TP
@@ -25,7 +22,7 @@ Number of lines of output to show. (Default is terminal_height \- 10)
Subdirectories will not have their path shortened
.TP
\fB\-X\fR, \fB\-\-ignore\-directory\fR=\fIPATH\fR
Exclude any file or directory with this path
Exclude any file or directory with this name
.TP
\fB\-I\fR, \fB\-\-ignore\-all\-in\-file\fR=\fIFILE\fR
Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by \-\-invert_filter
@@ -106,28 +103,6 @@ Specify memory to use as stack size \- use if you see: \*(Aqfatal runtime error:
\fB\-j\fR, \fB\-\-output\-json\fR
Output the directory tree as json to the current directory
.TP
\fB\-M\fR, \fB\-\-mtime\fR
+/\-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and \-n => (𝑐𝑢𝑟𝑟𝑛, +∞)
.TP
\fB\-A\fR, \fB\-\-atime\fR
just like \-mtime, but based on file access time
.TP
\fB\-y\fR, \fB\-\-ctime\fR
just like \-mtime, but based on file change time
.TP
\fB\-\-files0\-from\fR
run dust on NUL\-terminated file names specified in file; if argument is \-, then read names from standard input
.TP
\fB\-\-collapse\fR
Keep these directories collapsed
.TP
\fB\-m\fR, \fB\-\-filetime\fR
Directory \*(Aqsize\*(Aq is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time
.br
.br
[\fIpossible values: \fRa, c, m]
.TP
\fB\-h\fR, \fB\-\-help\fR
Print help
.TP
@@ -137,4 +112,4 @@ Print version
[\fIPATH\fR]
.SH VERSION
v1.1.1
v1.0.0

View File

@@ -24,15 +24,6 @@ pub fn build_cli() -> Command {
.help("Number of threads to use")
.num_args(1)
)
.arg(
Arg::new("config")
.long("config")
.help("Specify a config file to use")
.value_name("FILE")
.value_hint(clap::ValueHint::FilePath)
.value_parser(value_parser!(String))
.num_args(1)
)
.arg(
Arg::new("number_of_lines")
.short('n')
@@ -56,7 +47,7 @@ pub fn build_cli() -> Command {
.value_name("PATH")
.value_hint(clap::ValueHint::AnyPath)
.action(clap::ArgAction::Append)
.help("Exclude any file or directory with this path"),
.help("Exclude any file or directory with this name"),
)
.arg(
Arg::new("ignore_all_in_file")
@@ -268,59 +259,4 @@ pub fn build_cli() -> Command {
.action(clap::ArgAction::SetTrue)
.help("Output the directory tree as json to the current directory"),
)
.arg(
Arg::new("mtime")
.short('M')
.long("mtime")
.num_args(1)
.allow_hyphen_values(true)
.value_parser(value_parser!(String))
.help("+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)")
)
.arg(
Arg::new("atime")
.short('A')
.long("atime")
.num_args(1)
.allow_hyphen_values(true)
.value_parser(value_parser!(String))
.help("just like -mtime, but based on file access time")
)
.arg(
Arg::new("ctime")
.short('y')
.long("ctime")
.num_args(1)
.allow_hyphen_values(true)
.value_parser(value_parser!(String))
.help("just like -mtime, but based on file change time")
)
.arg(
Arg::new("files0_from")
.long("files0-from")
.value_hint(clap::ValueHint::AnyPath)
.value_parser(value_parser!(String))
.num_args(1)
.help("run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input"),
)
.arg(
Arg::new("collapse")
.long("collapse")
.value_hint(clap::ValueHint::AnyPath)
.value_parser(value_parser!(String))
.action(clap::ArgAction::Append)
.help("Keep these directories collapsed"),
)
.arg(
Arg::new("filetime")
.short('m')
.long("filetime")
.num_args(1)
.value_parser([
PossibleValue::new("a").alias("accessed"),
PossibleValue::new("c").alias("changed"),
PossibleValue::new("m").alias("modified"),
])
.help("Directory 'size' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time"),
)
}

View File

@@ -1,5 +1,3 @@
use crate::node::FileTime;
use chrono::{Local, TimeZone};
use clap::ArgMatches;
use config_file::FromConfigFile;
use regex::Regex;
@@ -8,13 +6,11 @@ use std::io::IsTerminal;
use std::path::Path;
use std::path::PathBuf;
use crate::dir_walker::Operator;
use crate::display::get_number_format;
pub static DAY_SECONDS: i64 = 24 * 60 * 60;
#[derive(Deserialize, Default)]
#[serde(rename_all = "kebab-case")]
#[serde(deny_unknown_fields)]
pub struct Config {
pub display_full_paths: Option<bool>,
pub display_apparent_size: Option<bool>,
@@ -36,17 +32,9 @@ pub struct Config {
pub threads: Option<usize>,
pub output_json: Option<bool>,
pub print_errors: Option<bool>,
pub files0_from: Option<String>,
}
impl Config {
pub fn get_files_from(&self, options: &ArgMatches) -> Option<String> {
let from_file = options.get_one::<String>("files0_from");
match from_file {
None => self.files0_from.as_ref().map(|x| x.to_string()),
Some(x) => Some(x.to_string()),
}
}
pub fn get_no_colors(&self, options: &ArgMatches) -> bool {
Some(true) == self.no_colors || options.get_flag("no_colors")
}
@@ -84,20 +72,6 @@ impl Config {
})
.to_lowercase()
}
pub fn get_filetime(&self, options: &ArgMatches) -> Option<FileTime> {
let out_fmt = options.get_one::<String>("filetime");
match out_fmt {
None => None,
Some(x) => match x.as_str() {
"m" | "modified" => Some(FileTime::Modified),
"a" | "accessed" => Some(FileTime::Accessed),
"c" | "changed" => Some(FileTime::Changed),
_ => unreachable!(),
},
}
}
pub fn get_skip_total(&self, options: &ArgMatches) -> bool {
Some(true) == self.skip_total || options.get_flag("skip_total")
}
@@ -158,61 +132,6 @@ impl Config {
pub fn get_output_json(&self, options: &ArgMatches) -> bool {
Some(true) == self.output_json || options.get_flag("output_json")
}
pub fn get_modified_time_operator(&self, options: &ArgMatches) -> Option<(Operator, i64)> {
get_filter_time_operator(
options.get_one::<String>("mtime"),
get_current_date_epoch_seconds(),
)
}
pub fn get_accessed_time_operator(&self, options: &ArgMatches) -> Option<(Operator, i64)> {
get_filter_time_operator(
options.get_one::<String>("atime"),
get_current_date_epoch_seconds(),
)
}
pub fn get_changed_time_operator(&self, options: &ArgMatches) -> Option<(Operator, i64)> {
get_filter_time_operator(
options.get_one::<String>("ctime"),
get_current_date_epoch_seconds(),
)
}
}
fn get_current_date_epoch_seconds() -> i64 {
// calculate current date epoch seconds
let now = Local::now();
let current_date = now.date_naive();
let current_date_time = current_date.and_hms_opt(0, 0, 0).unwrap();
Local
.from_local_datetime(&current_date_time)
.unwrap()
.timestamp()
}
fn get_filter_time_operator(
option_value: Option<&String>,
current_date_epoch_seconds: i64,
) -> Option<(Operator, i64)> {
match option_value {
Some(val) => {
let time = current_date_epoch_seconds
- val
.parse::<i64>()
.unwrap_or_else(|_| panic!("invalid data format"))
.abs()
* DAY_SECONDS;
match val.chars().next().expect("Value should not be empty") {
'+' => Some((Operator::LessThan, time - DAY_SECONDS)),
'-' => Some((Operator::GreaterThan, time)),
_ => Some((Operator::Equal, time - DAY_SECONDS)),
}
}
None => None,
}
}
fn convert_min_size(input: &str) -> Option<usize> {
@@ -230,7 +149,7 @@ fn convert_min_size(input: &str) -> Option<usize> {
match number_format {
Some((multiple, _)) => Some(parsed_digits * (multiple as usize)),
None => {
if letters.is_empty() {
if letters.eq("") {
Some(parsed_digits)
} else {
eprintln!("Ignoring invalid min-size: {input}");
@@ -253,29 +172,12 @@ fn get_config_locations(base: &Path) -> Vec<PathBuf> {
]
}
pub fn get_config(conf_path: Option<String>) -> Config {
match conf_path {
Some(path_str) => {
let path = Path::new(&path_str);
pub fn get_config() -> Config {
if let Some(home) = directories::BaseDirs::new() {
for path in get_config_locations(home.home_dir()) {
if path.exists() {
match Config::from_config_file(path) {
Ok(config) => return config,
Err(e) => {
eprintln!("Ignoring invalid config file '{}': {}", &path.display(), e)
}
}
} else {
eprintln!("Config file {:?} doesn't exists", &path.display());
}
}
None => {
if let Some(home) = directories::BaseDirs::new() {
for path in get_config_locations(home.home_dir()) {
if path.exists() {
if let Ok(config) = Config::from_config_file(&path) {
return config;
}
}
if let Ok(config) = Config::from_config_file(path) {
return config;
}
}
}
@@ -289,23 +191,8 @@ pub fn get_config(conf_path: Option<String>) -> Config {
mod tests {
#[allow(unused_imports)]
use super::*;
use chrono::{Datelike, Timelike};
use clap::builder::PossibleValue;
use clap::{value_parser, Arg, ArgMatches, Command};
#[test]
fn test_get_current_date_epoch_seconds() {
let epoch_seconds = get_current_date_epoch_seconds();
let dt = Local.timestamp_opt(epoch_seconds, 0).unwrap();
assert_eq!(dt.hour(), 0);
assert_eq!(dt.minute(), 0);
assert_eq!(dt.second(), 0);
assert_eq!(dt.date_naive().day(), Local::now().date_naive().day());
assert_eq!(dt.date_naive().month(), Local::now().date_naive().month());
assert_eq!(dt.date_naive().year(), Local::now().date_naive().year());
}
#[test]
fn test_conversion() {
assert_eq!(convert_min_size("55"), Some(55));
@@ -370,56 +257,4 @@ mod tests {
)
.get_matches_from(args)
}
#[test]
fn test_get_filetime() {
// No config and no flag.
let c = Config::default();
let args = get_filetime_args(vec!["dust"]);
assert_eq!(c.get_filetime(&args), None);
// Config is not defined and flag is defined as access time
let c = Config::default();
let args = get_filetime_args(vec!["dust", "--filetime", "a"]);
assert_eq!(c.get_filetime(&args), Some(FileTime::Accessed));
let c = Config::default();
let args = get_filetime_args(vec!["dust", "--filetime", "accessed"]);
assert_eq!(c.get_filetime(&args), Some(FileTime::Accessed));
// Config is not defined and flag is defined as modified time
let c = Config::default();
let args = get_filetime_args(vec!["dust", "--filetime", "m"]);
assert_eq!(c.get_filetime(&args), Some(FileTime::Modified));
let c = Config::default();
let args = get_filetime_args(vec!["dust", "--filetime", "modified"]);
assert_eq!(c.get_filetime(&args), Some(FileTime::Modified));
// Config is not defined and flag is defined as changed time
let c = Config::default();
let args = get_filetime_args(vec!["dust", "--filetime", "c"]);
assert_eq!(c.get_filetime(&args), Some(FileTime::Changed));
let c = Config::default();
let args = get_filetime_args(vec!["dust", "--filetime", "changed"]);
assert_eq!(c.get_filetime(&args), Some(FileTime::Changed));
}
fn get_filetime_args(args: Vec<&str>) -> ArgMatches {
Command::new("Dust")
.arg(
Arg::new("filetime")
.short('m')
.long("filetime")
.num_args(1)
.value_parser([
PossibleValue::new("a").alias("accessed"),
PossibleValue::new("c").alias("changed"),
PossibleValue::new("m").alias("modified"),
])
.help("Directory 'size' is max filetime of child files instead of disk size. while a/c/m for accessed/changed/modified time"),
)
.get_matches_from(args)
}
}

View File

@@ -1,6 +1,4 @@
use std::cmp::Ordering;
use std::fs;
use std::io::Error;
use std::sync::Arc;
use std::sync::Mutex;
@@ -9,13 +7,11 @@ use crate::progress::Operation;
use crate::progress::PAtomicInfo;
use crate::progress::RuntimeErrors;
use crate::progress::ORDERING;
use crate::utils::is_filtered_out_due_to_file_time;
use crate::utils::is_filtered_out_due_to_invert_regex;
use crate::utils::is_filtered_out_due_to_regex;
use rayon::iter::ParallelBridge;
use rayon::prelude::ParallelIterator;
use regex::Regex;
use std::path::Path;
use std::path::PathBuf;
use std::collections::HashSet;
@@ -23,27 +19,14 @@ use std::collections::HashSet;
use crate::node::build_node;
use std::fs::DirEntry;
use crate::node::FileTime;
use crate::platform::get_metadata;
#[derive(Debug)]
pub enum Operator {
Equal = 0,
LessThan = 1,
GreaterThan = 2,
}
pub struct WalkData<'a> {
pub ignore_directories: HashSet<PathBuf>,
pub filter_regex: &'a [Regex],
pub invert_filter_regex: &'a [Regex],
pub allowed_filesystems: HashSet<u64>,
pub filter_modified_time: Option<(Operator, i64)>,
pub filter_accessed_time: Option<(Operator, i64)>,
pub filter_changed_time: Option<(Operator, i64)>,
pub use_apparent_size: bool,
pub by_filecount: bool,
pub by_filetime: &'a Option<FileTime>,
pub ignore_hidden: bool,
pub follow_links: bool,
pub progress_data: Arc<PAtomicInfo>,
@@ -61,15 +44,19 @@ pub fn walk_it(dirs: HashSet<PathBuf>, walk_data: &WalkData) -> Vec<Node> {
prog_data.state.store(Operation::PREPARING, ORDERING);
clean_inodes(node, &mut inodes, walk_data)
clean_inodes(node, &mut inodes, walk_data.use_apparent_size)
})
.collect();
top_level_nodes
}
// Remove files which have the same inode, we don't want to double count them.
fn clean_inodes(x: Node, inodes: &mut HashSet<(u64, u64)>, walk_data: &WalkData) -> Option<Node> {
if !walk_data.use_apparent_size {
fn clean_inodes(
x: Node,
inodes: &mut HashSet<(u64, u64)>,
use_apparent_size: bool,
) -> Option<Node> {
if !use_apparent_size {
if let Some(id) = x.inode_device {
if !inodes.insert(id) {
return None;
@@ -82,25 +69,12 @@ fn clean_inodes(x: Node, inodes: &mut HashSet<(u64, u64)>, walk_data: &WalkData)
tmp.sort_by(sort_by_inode);
let new_children: Vec<_> = tmp
.into_iter()
.filter_map(|c| clean_inodes(c, inodes, walk_data))
.filter_map(|c| clean_inodes(c, inodes, use_apparent_size))
.collect();
let actual_size = if walk_data.by_filetime.is_some() {
// If by_filetime is Some, directory 'size' is the maximum filetime among child files instead of disk size
new_children
.iter()
.map(|c| c.size)
.chain(std::iter::once(x.size))
.max()
.unwrap_or(0)
} else {
// If by_filetime is None, directory 'size' is the sum of disk sizes or file counts of child files
x.size + new_children.iter().map(|c| c.size).sum::<u64>()
};
Some(Node {
name: x.name,
size: actual_size,
size: x.size + new_children.iter().map(|c| c.size).sum::<u64>(),
children: new_children,
inode_device: x.inode_device,
depth: x.depth,
@@ -109,56 +83,31 @@ fn clean_inodes(x: Node, inodes: &mut HashSet<(u64, u64)>, walk_data: &WalkData)
fn sort_by_inode(a: &Node, b: &Node) -> std::cmp::Ordering {
// Sorting by inode is quicker than by sorting by name/size
match (a.inode_device, b.inode_device) {
(Some(x), Some(y)) => {
if let Some(x) = a.inode_device {
if let Some(y) = b.inode_device {
if x.0 != y.0 {
x.0.cmp(&y.0)
return x.0.cmp(&y.0);
} else if x.1 != y.1 {
x.1.cmp(&y.1)
} else {
a.name.cmp(&b.name)
return x.1.cmp(&y.1);
}
}
(Some(_), None) => Ordering::Greater,
(None, Some(_)) => Ordering::Less,
(None, None) => a.name.cmp(&b.name),
}
a.name.cmp(&b.name)
}
fn ignore_file(entry: &DirEntry, walk_data: &WalkData) -> bool {
let is_dot_file = entry.file_name().to_str().unwrap_or("").starts_with('.');
let is_ignored_path = walk_data.ignore_directories.contains(&entry.path());
let follow_links = walk_data.follow_links && entry.file_type().is_ok_and(|ft| ft.is_symlink());
if !walk_data.allowed_filesystems.is_empty() {
let size_inode_device = get_metadata(entry.path(), false, follow_links);
if let Some((_size, Some((_id, dev)), _gunk)) = size_inode_device {
let size_inode_device = get_metadata(&entry.path(), false);
if let Some((_size, Some((_id, dev)))) = size_inode_device {
if !walk_data.allowed_filesystems.contains(&dev) {
return true;
}
}
}
if walk_data.filter_accessed_time.is_some()
|| walk_data.filter_modified_time.is_some()
|| walk_data.filter_changed_time.is_some()
{
let size_inode_device = get_metadata(entry.path(), false, follow_links);
if let Some((_, _, (modified_time, accessed_time, changed_time))) = size_inode_device {
if entry.path().is_file()
&& [
(&walk_data.filter_modified_time, modified_time),
(&walk_data.filter_accessed_time, accessed_time),
(&walk_data.filter_changed_time, changed_time),
]
.iter()
.any(|(filter_time, actual_time)| {
is_filtered_out_due_to_file_time(filter_time, *actual_time)
})
{
return true;
}
}
}
// Keeping `walk_data.filter_regex.is_empty()` is important for performance reasons, it stops unnecessary work
if !walk_data.filter_regex.is_empty()
@@ -181,7 +130,6 @@ fn ignore_file(entry: &DirEntry, walk_data: &WalkData) -> bool {
fn walk(dir: PathBuf, walk_data: &WalkData, depth: usize) -> Option<Node> {
let prog_data = &walk_data.progress_data;
let errors = &walk_data.errors;
if errors.lock().unwrap().abort {
return None;
}
@@ -213,10 +161,13 @@ fn walk(dir: PathBuf, walk_data: &WalkData, depth: usize) -> Option<Node> {
let node = build_node(
entry.path(),
vec![],
walk_data.filter_regex,
walk_data.invert_filter_regex,
walk_data.use_apparent_size,
data.is_symlink(),
data.is_file(),
walk_data.by_filecount,
depth,
walk_data,
);
prog_data.num_files.fetch_add(1, ORDERING);
@@ -231,9 +182,8 @@ fn walk(dir: PathBuf, walk_data: &WalkData, depth: usize) -> Option<Node> {
}
}
Err(ref failed) => {
if handle_error_and_retry(failed, &dir, walk_data) {
return walk(dir.clone(), walk_data, depth);
}
let mut editable_error = errors.lock().unwrap();
editable_error.no_permissions.insert(failed.to_string());
}
}
None
@@ -241,11 +191,21 @@ fn walk(dir: PathBuf, walk_data: &WalkData, depth: usize) -> Option<Node> {
.collect()
}
Err(failed) => {
if handle_error_and_retry(&failed, &dir, walk_data) {
return walk(dir, walk_data, depth);
} else {
vec![]
let mut editable_error = errors.lock().unwrap();
match failed.kind() {
std::io::ErrorKind::PermissionDenied => {
editable_error
.no_permissions
.insert(dir.to_string_lossy().into());
}
std::io::ErrorKind::NotFound => {
editable_error.file_not_found.insert(failed.to_string());
}
_ => {
editable_error.unknown_error.insert(failed.to_string());
}
}
vec![]
}
}
} else {
@@ -256,51 +216,20 @@ fn walk(dir: PathBuf, walk_data: &WalkData, depth: usize) -> Option<Node> {
}
vec![]
};
let is_symlink = if walk_data.follow_links {
match fs::symlink_metadata(&dir) {
Ok(metadata) => metadata.file_type().is_symlink(),
Err(_) => false,
}
} else {
false
};
build_node(dir, children, is_symlink, false, depth, walk_data)
}
fn handle_error_and_retry(failed: &Error, dir: &Path, walk_data: &WalkData) -> bool {
let mut editable_error = walk_data.errors.lock().unwrap();
match failed.kind() {
std::io::ErrorKind::PermissionDenied => {
editable_error
.no_permissions
.insert(dir.to_string_lossy().into());
}
std::io::ErrorKind::InvalidInput => {
editable_error
.no_permissions
.insert(dir.to_string_lossy().into());
}
std::io::ErrorKind::NotFound => {
editable_error.file_not_found.insert(failed.to_string());
}
std::io::ErrorKind::Interrupted => {
let mut editable_error = walk_data.errors.lock().unwrap();
editable_error.interrupted_error += 1;
if editable_error.interrupted_error > 3 {
panic!("Multiple Interrupted Errors occurred while scanning filesystem. Aborting");
} else {
return true;
}
}
_ => {
editable_error.unknown_error.insert(failed.to_string());
}
}
false
build_node(
dir,
children,
walk_data.filter_regex,
walk_data.invert_filter_regex,
walk_data.use_apparent_size,
false,
false,
walk_data.by_filecount,
depth,
)
}
mod tests {
#[allow(unused_imports)]
use super::*;
@@ -315,43 +244,17 @@ mod tests {
}
}
#[cfg(test)]
fn create_walker<'a>(use_apparent_size: bool) -> WalkData<'a> {
use crate::PIndicator;
let indicator = PIndicator::build_me();
WalkData {
ignore_directories: HashSet::new(),
filter_regex: &[],
invert_filter_regex: &[],
allowed_filesystems: HashSet::new(),
filter_modified_time: Some((Operator::GreaterThan, 0)),
filter_accessed_time: Some((Operator::GreaterThan, 0)),
filter_changed_time: Some((Operator::GreaterThan, 0)),
use_apparent_size,
by_filecount: false,
by_filetime: &None,
ignore_hidden: false,
follow_links: false,
progress_data: indicator.data.clone(),
errors: Arc::new(Mutex::new(RuntimeErrors::default())),
}
}
#[test]
#[allow(clippy::redundant_clone)]
fn test_should_ignore_file() {
let mut inodes = HashSet::new();
let n = create_node();
let walkdata = create_walker(false);
// First time we insert the node
assert_eq!(
clean_inodes(n.clone(), &mut inodes, &walkdata),
Some(n.clone())
);
assert_eq!(clean_inodes(n.clone(), &mut inodes, false), Some(n.clone()));
// Second time is a duplicate - we ignore it
assert_eq!(clean_inodes(n.clone(), &mut inodes, &walkdata), None);
assert_eq!(clean_inodes(n.clone(), &mut inodes, false), None);
}
#[test]
@@ -359,53 +262,9 @@ mod tests {
fn test_should_not_ignore_files_if_using_apparent_size() {
let mut inodes = HashSet::new();
let n = create_node();
let walkdata = create_walker(true);
// If using apparent size we include Nodes, even if duplicate inodes
assert_eq!(
clean_inodes(n.clone(), &mut inodes, &walkdata),
Some(n.clone())
);
assert_eq!(
clean_inodes(n.clone(), &mut inodes, &walkdata),
Some(n.clone())
);
}
#[test]
fn test_total_ordering_of_sort_by_inode() {
use std::str::FromStr;
let a = Node {
name: PathBuf::from_str("a").unwrap(),
size: 0,
children: vec![],
inode_device: Some((3, 66310)),
depth: 0,
};
let b = Node {
name: PathBuf::from_str("b").unwrap(),
size: 0,
children: vec![],
inode_device: None,
depth: 0,
};
let c = Node {
name: PathBuf::from_str("c").unwrap(),
size: 0,
children: vec![],
inode_device: Some((1, 66310)),
depth: 0,
};
assert_eq!(sort_by_inode(&a, &b), Ordering::Greater);
assert_eq!(sort_by_inode(&a, &c), Ordering::Greater);
assert_eq!(sort_by_inode(&c, &b), Ordering::Greater);
assert_eq!(sort_by_inode(&b, &a), Ordering::Less);
assert_eq!(sort_by_inode(&c, &a), Ordering::Less);
assert_eq!(sort_by_inode(&b, &c), Ordering::Less);
assert_eq!(clean_inodes(n.clone(), &mut inodes, true), Some(n.clone()));
assert_eq!(clean_inodes(n.clone(), &mut inodes, true), Some(n.clone()));
}
}

View File

@@ -1,5 +1,4 @@
use crate::display_node::DisplayNode;
use crate::node::FileTime;
use ansi_term::Colour::Red;
use lscolors::{LsColors, Style};
@@ -8,29 +7,21 @@ use unicode_width::UnicodeWidthStr;
use stfu8::encode_u8;
use chrono::{DateTime, Local, TimeZone, Utc};
use std::cmp::max;
use std::cmp::min;
use std::collections::HashMap;
use std::collections::HashSet;
use std::collections::VecDeque;
use std::fs;
use std::hash::Hash;
use std::iter::repeat;
use std::os::unix::ffi::OsStrExt;
use std::path::Path;
use thousands::Separable;
pub static UNITS: [char; 5] = ['P', 'T', 'G', 'M', 'K'];
pub static UNITS: [char; 4] = ['T', 'G', 'M', 'K'];
static BLOCKS: [char; 5] = ['█', '▓', '▒', '░', ' '];
const FILETIME_SHOW_LENGTH: usize = 19;
pub struct InitialDisplayData {
pub short_paths: bool,
pub is_reversed: bool,
pub colors_on: bool,
pub by_filecount: bool,
pub by_filetime: Option<FileTime>,
pub is_screen_reader: bool,
pub output_format: String,
pub bars_on_right: bool,
@@ -42,7 +33,6 @@ pub struct DisplayData {
pub base_size: u64,
pub longest_string_length: usize,
pub ls_colors: LsColors,
pub duplicate_names: HashMap<String, u32>,
}
impl DisplayData {
@@ -140,14 +130,17 @@ pub fn draw_it(
root_node: &DisplayNode,
skip_total: bool,
) {
let duplicate_names = check_for_dup_names(&root_node);
let biggest = match skip_total {
false => root_node,
true => root_node
.get_children_from_node(false)
.next()
.unwrap_or(root_node),
};
let num_chars_needed_on_left_most = if idd.by_filecount {
let max_size = root_node.size;
let max_size = biggest.size;
max_size.separate_with_commas().chars().count()
} else if idd.by_filetime.is_some() {
FILETIME_SHOW_LENGTH
} else {
find_biggest_size_str(root_node, &idd.output_format)
};
@@ -157,12 +150,10 @@ pub fn draw_it(
"Not enough terminal width"
);
// let duplicate_dir_names = find_duplicate_names(root_node, idd.short_paths);
let allowed_width = terminal_width - num_chars_needed_on_left_most - 2;
let num_indent_chars = 3;
let longest_string_length =
find_longest_dir_name(root_node, num_indent_chars, allowed_width, &idd, &duplicate_names);
find_longest_dir_name(root_node, num_indent_chars, allowed_width, &idd);
let max_bar_length = if no_percent_bars || longest_string_length + 7 >= allowed_width {
0
@@ -175,10 +166,9 @@ pub fn draw_it(
let display_data = DisplayData {
initial: idd,
num_chars_needed_on_left_most,
base_size: root_node.size,
base_size: biggest.size,
longest_string_length,
ls_colors: LsColors::from_env().unwrap_or_default(),
duplicate_names
};
let draw_data = DrawData {
indent: "".to_string(),
@@ -199,82 +189,6 @@ pub fn draw_it(
}
}
}
fn check_for_dup_names(result:&DisplayNode) -> HashMap<String, u32> {
let mut names = HashMap::new();
let mut dup_names = HashMap::new();
// let empty = HashSet::new();
let mut results = VecDeque::new();
results.push_back((result, 0));
while results.len() > 0 {
let (current, level) = results.pop_front().unwrap();
let mut folders = current.name.iter().rev();
let mut s = String::new();
// Look at parent folder names - if they differ and we are printing them
// we dont need the helper
for _ in 0..level {
s.push_str( &encode_u8(folders.next().unwrap().as_bytes()));
}
if names.contains_key(&s){
// TODO: compare s with names[s]
// and walk back until you find a difference.
dup_names.insert(s, level);
} else {
names.insert(s, vec![&current.name]);
}
current.children.iter().for_each(|node| {results.push_back((&node, level+1));});
}
println!("{:?}", names);
println!("{:?}", dup_names);
dup_names
}
pub fn get_printable_name(node: &DisplayNode, short_paths: bool, dup_names: &HashMap<String, u32>) -> String {
let dir_name = &node.name;
let printable_name = {
if short_paths {
match dir_name.parent() {
Some(prefix) => match dir_name.strip_prefix(prefix) {
Ok(base) => base,
Err(_) => dir_name,
},
None => dir_name,
}
} else {
dir_name
}
};
let core = encode_u8(printable_name.display().to_string().as_bytes());
if dup_names.contains_key(&core) {
let level = dup_names[&core];
let mut folders = node.name.iter().rev();
folders.next();
let mut extra = VecDeque::new();
for _ in (0..level){
extra.push_back( encode_u8(folders.next().unwrap().as_bytes()) );
}
let h = extra.iter().fold(String::new(), |acc, entry| {
acc + entry
});
// let helper = extra.make_contiguous().iter().collect::<Vec<&String>>();
// let h = helper.join("/");
// let mut folders = dir_name.iter().rev(); //.next().next().unwrap();
// folders.next();
// let par = encode_u8(folders.next().unwrap().as_bytes());
format!("{core} ({h})")
} else {
core
}
}
fn find_biggest_size_str(node: &DisplayNode, output_format: &str) -> usize {
let mut mx = human_readable_number(node.size, output_format)
@@ -291,9 +205,8 @@ fn find_longest_dir_name(
indent: usize,
terminal: usize,
idd: &InitialDisplayData,
dup_names: &HashMap<String, u32>,
) -> usize {
let printable_name = get_printable_name(&node, idd.short_paths, dup_names);
let printable_name = get_printable_name(&node.name, idd.short_paths);
let longest = if idd.is_screen_reader {
UnicodeWidthStr::width(&*printable_name) + 1
@@ -307,7 +220,7 @@ fn find_longest_dir_name(
// each none root tree drawing is 2 more chars, hence we increment indent by 2
node.children
.iter()
.map(|c| find_longest_dir_name(c, indent + 2, terminal, idd, dup_names))
.map(|c| find_longest_dir_name(c, indent + 2, terminal, idd))
.fold(longest, max)
}
@@ -362,8 +275,26 @@ fn clean_indentation_string(s: &str) -> String {
is
}
fn get_printable_name<P: AsRef<Path>>(dir_name: &P, short_paths: bool) -> String {
let dir_name = dir_name.as_ref();
let printable_name = {
if short_paths {
match dir_name.parent() {
Some(prefix) => match dir_name.strip_prefix(prefix) {
Ok(base) => base,
Err(_) => dir_name,
},
None => dir_name,
}
} else {
dir_name
}
};
encode_u8(printable_name.display().to_string().as_bytes())
}
fn pad_or_trim_filename(node: &DisplayNode, indent: &str, display_data: &DisplayData) -> String {
let name = get_printable_name(&node, display_data.initial.short_paths, &display_data.duplicate_names);
let name = get_printable_name(&node.name, display_data.initial.short_paths);
let indent_and_name = format!("{indent} {name}");
let width = UnicodeWidthStr::width(&*indent_and_name);
@@ -411,8 +342,6 @@ pub fn format_string(
if display_data.initial.is_screen_reader {
// if screen_reader then bars is 'depth'
format!("{pretty_name} {bars} {pretty_size}{percent}")
} else if display_data.initial.by_filetime.is_some() {
format!("{pretty_size} {indent}{pretty_name}")
} else {
format!("{pretty_size} {indent} {pretty_name}{percent}")
}
@@ -438,7 +367,7 @@ fn get_name_percent(
let name_and_padding = pad_or_trim_filename(node, indent, display_data);
(percents, name_and_padding)
} else {
let n = get_printable_name(&node, display_data.initial.short_paths, &display_data.duplicate_names);
let n = get_printable_name(&node.name, display_data.initial.short_paths);
let name = maybe_trim_filename(n, indent, display_data);
("".into(), name)
}
@@ -447,8 +376,6 @@ fn get_name_percent(
fn get_pretty_size(node: &DisplayNode, is_biggest: bool, display_data: &DisplayData) -> String {
let output = if display_data.initial.by_filecount {
node.size.separate_with_commas()
} else if display_data.initial.by_filetime.is_some() {
get_pretty_file_modified_time(node.size as i64)
} else {
human_readable_number(node.size, &display_data.initial.output_format)
};
@@ -462,14 +389,6 @@ fn get_pretty_size(node: &DisplayNode, is_biggest: bool, display_data: &DisplayD
}
}
fn get_pretty_file_modified_time(timestamp: i64) -> String {
let datetime: DateTime<Utc> = Utc.timestamp_opt(timestamp, 0).unwrap();
let local_datetime = datetime.with_timezone(&Local);
local_datetime.format("%Y-%m-%dT%H:%M:%S").to_string()
}
fn get_pretty_name(
node: &DisplayNode,
name_and_padding: String,
@@ -550,7 +469,6 @@ mod tests {
is_reversed: false,
colors_on: false,
by_filecount: false,
by_filetime: None,
is_screen_reader: false,
output_format: "".into(),
bars_on_right: false,
@@ -629,14 +547,6 @@ mod tests {
assert_eq!(human_readable_number(1024 * 1024 * 1024 - 1, ""), "1023M");
assert_eq!(human_readable_number(1024 * 1024 * 1024 * 20, ""), "20G");
assert_eq!(human_readable_number(1024 * 1024 * 1024 * 1024, ""), "1.0T");
assert_eq!(
human_readable_number(1024 * 1024 * 1024 * 1024 * 234, ""),
"234T"
);
assert_eq!(
human_readable_number(1024 * 1024 * 1024 * 1024 * 1024, ""),
"1.0P"
);
}
#[test]
@@ -715,37 +625,4 @@ mod tests {
let bar = dd.generate_bar(&n, 5);
assert_eq!(bar, "████▓▓▓▓▓▓▓▓▓");
}
#[test]
fn test_get_pretty_file_modified_time() {
// Create a timestamp for 2023-07-12 00:00:00 in local time
let local_dt = Local.with_ymd_and_hms(2023, 7, 12, 0, 0, 0).unwrap();
let timestamp = local_dt.timestamp();
// Format expected output
let expected_output = local_dt.format("%Y-%m-%dT%H:%M:%S").to_string();
assert_eq!(get_pretty_file_modified_time(timestamp), expected_output);
// Test another timestamp
let local_dt = Local.with_ymd_and_hms(2020, 1, 1, 12, 0, 0).unwrap();
let timestamp = local_dt.timestamp();
let expected_output = local_dt.format("%Y-%m-%dT%H:%M:%S").to_string();
assert_eq!(get_pretty_file_modified_time(timestamp), expected_output);
// Test timestamp for epoch start (1970-01-01T00:00:00)
let local_dt = Local.with_ymd_and_hms(1970, 1, 1, 0, 0, 0).unwrap();
let timestamp = local_dt.timestamp();
let expected_output = local_dt.format("%Y-%m-%dT%H:%M:%S").to_string();
assert_eq!(get_pretty_file_modified_time(timestamp), expected_output);
// Test a future timestamp
let local_dt = Local.with_ymd_and_hms(2030, 12, 25, 6, 30, 0).unwrap();
let timestamp = local_dt.timestamp();
let expected_output = local_dt.format("%Y-%m-%dT%H:%M:%S").to_string();
assert_eq!(get_pretty_file_modified_time(timestamp), expected_output);
}
}

View File

@@ -1,12 +1,8 @@
use std::cell::RefCell;
use std::path::PathBuf;
use serde::ser::SerializeStruct;
use serde::{Serialize, Serializer};
use serde::Serialize;
use crate::display::human_readable_number;
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)]
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Serialize)]
pub struct DisplayNode {
// Note: the order of fields in important here, for PartialEq and PartialOrd
pub size: u64,
@@ -29,30 +25,3 @@ impl DisplayNode {
out
}
}
// Only used for -j 'json' flag combined with -o 'output_type' flag
// Used to pass the output_type into the custom Serde serializer
thread_local! {
pub static OUTPUT_TYPE: RefCell<String> = const { RefCell::new(String::new()) };
}
/*
We need the custom Serialize incase someone uses the -o flag to pass a custom output type in
(show size in Mb / Gb etc).
Sadly this also necessitates a global variable OUTPUT_TYPE as we can not pass the output_type flag
into the serialize method
*/
impl Serialize for DisplayNode {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let readable_size = OUTPUT_TYPE
.with(|output_type| human_readable_number(self.size, output_type.borrow().as_str()));
let mut state = serializer.serialize_struct("DisplayNode", 2)?;
state.serialize_field("size", &(readable_size))?;
state.serialize_field("name", &self.name)?;
state.serialize_field("children", &self.children)?;
state.end()
}
}

View File

@@ -1,9 +1,7 @@
use crate::display_node::DisplayNode;
use crate::node::FileTime;
use crate::node::Node;
use std::collections::BinaryHeap;
use std::collections::HashMap;
use std::collections::HashSet;
use std::path::Path;
use std::path::PathBuf;
@@ -16,12 +14,7 @@ pub struct AggregateData {
pub using_a_filter: bool,
}
pub fn get_biggest(
mut top_level_nodes: Vec<Node>,
display_data: AggregateData,
by_filetime: &Option<FileTime>,
keep_collapsed: HashSet<PathBuf>,
) -> Option<DisplayNode> {
pub fn get_biggest(top_level_nodes: Vec<Node>, display_data: AggregateData) -> Option<DisplayNode> {
if top_level_nodes.is_empty() {
// perhaps change this, bring back Error object?
return None;
@@ -31,16 +24,7 @@ pub fn get_biggest(
let root;
if number_top_level_nodes > 1 {
let size = if by_filetime.is_some() {
top_level_nodes
.iter()
.map(|node| node.size)
.max()
.unwrap_or(0)
} else {
top_level_nodes.iter().map(|node| node.size).sum()
};
let size = top_level_nodes.iter().map(|node| node.size).sum();
root = Node {
name: PathBuf::from("(total)"),
size,
@@ -48,7 +32,6 @@ pub fn get_biggest(
inode_device: None,
depth: 0,
};
// Always include the base nodes if we add a 'parent' (total) node
heap = always_add_children(&display_data, &root, heap);
} else {
@@ -56,20 +39,13 @@ pub fn get_biggest(
heap = add_children(&display_data, &root, heap);
}
let result = fill_remaining_lines(
heap,
&root,
display_data,
keep_collapsed,
);
Some(result)
Some(fill_remaining_lines(heap, &root, display_data))
}
pub fn fill_remaining_lines<'a>(
mut heap: BinaryHeap<&'a Node>,
root: &'a Node,
display_data: AggregateData,
keep_collapsed: HashSet<PathBuf>,
) -> DisplayNode {
let mut allowed_nodes = HashMap::new();
@@ -80,9 +56,7 @@ pub fn fill_remaining_lines<'a>(
if !display_data.only_file || line.children.is_empty() {
allowed_nodes.insert(line.name.as_path(), line);
}
if !keep_collapsed.contains(&line.name) {
heap = add_children(&display_data, line, heap);
}
heap = add_children(&display_data, line, heap);
}
None => break,
}
@@ -140,7 +114,7 @@ fn recursive_rebuilder(allowed_nodes: &HashMap<&Path, &Node>, current: &Node) ->
.map(|c| recursive_rebuilder(allowed_nodes, c))
.collect();
build_display_node(new_children, current)
build_node(new_children, current)
}
// Applies all allowed nodes as children to current node
@@ -153,14 +127,13 @@ fn flat_rebuilder(allowed_nodes: HashMap<&Path, &Node>, current: &Node) -> Displ
children: vec![],
})
.collect::<Vec<DisplayNode>>();
build_display_node(new_children, current)
build_node(new_children, current)
}
fn build_display_node(mut new_children: Vec<DisplayNode>, current: &Node) -> DisplayNode {
fn build_node(mut new_children: Vec<DisplayNode>, current: &Node) -> DisplayNode {
new_children.sort_by(|lhs, rhs| lhs.cmp(rhs).reverse());
// println!("{:?}", current.name);
DisplayNode {
name: PathBuf::from(current.name.display().to_string()),
name: current.name.clone(),
size: current.size,
children: new_children,
}

View File

@@ -1,5 +1,4 @@
use crate::display_node::DisplayNode;
use crate::node::FileTime;
use crate::node::Node;
use std::collections::HashMap;
use std::ffi::OsStr;
@@ -11,11 +10,7 @@ struct ExtensionNode<'a> {
extension: Option<&'a OsStr>,
}
pub fn get_all_file_types(
top_level_nodes: &[Node],
n: usize,
by_filetime: &Option<FileTime>,
) -> Option<DisplayNode> {
pub fn get_all_file_types(top_level_nodes: &[Node], n: usize) -> Option<DisplayNode> {
let ext_nodes = {
let mut extension_cumulative_sizes = HashMap::new();
build_by_all_file_types(top_level_nodes, &mut extension_cumulative_sizes);
@@ -49,27 +44,16 @@ pub fn get_all_file_types(
// ...then, aggregate the remaining nodes (if any) into a single "(others)" node
if ext_nodes_iter.len() > 0 {
let actual_size = if by_filetime.is_some() {
ext_nodes_iter.map(|node| node.size).max().unwrap_or(0)
} else {
ext_nodes_iter.map(|node| node.size).sum()
};
displayed.push(DisplayNode {
name: PathBuf::from("(others)"),
size: actual_size,
size: ext_nodes_iter.map(|node| node.size).sum(),
children: vec![],
});
}
let actual_size: u64 = if by_filetime.is_some() {
displayed.iter().map(|node| node.size).max().unwrap_or(0)
} else {
displayed.iter().map(|node| node.size).sum()
};
let result = DisplayNode {
name: PathBuf::from("(total)"),
size: actual_size,
size: displayed.iter().map(|node| node.size).sum(),
children: displayed,
};

View File

@@ -21,11 +21,8 @@ use regex::Error;
use std::collections::HashSet;
use std::env;
use std::fs::read_to_string;
use std::io;
use std::panic;
use std::process;
use std::sync::atomic::AtomicBool;
use std::sync::atomic::Ordering;
use std::sync::Arc;
use std::sync::Mutex;
use sysinfo::{System, SystemExt};
@@ -33,7 +30,6 @@ use sysinfo::{System, SystemExt};
use self::display::draw_it;
use config::get_config;
use dir_walker::walk_it;
use display_node::OUTPUT_TYPE;
use filter::get_biggest;
use filter_type::get_all_file_types;
use regex::Regex;
@@ -82,19 +78,23 @@ fn should_init_color(no_color: bool, force_color: bool) -> bool {
}
fn get_height_of_terminal() -> usize {
// Simplify once https://github.com/eminence/terminal-size/pull/41 is
// merged
terminal_size()
// Windows CI runners detect a terminal height of 0
.map(|(_, Height(h))| max(h.into(), DEFAULT_NUMBER_OF_LINES))
.map(|(_, Height(h))| max(h as usize, DEFAULT_NUMBER_OF_LINES))
.unwrap_or(DEFAULT_NUMBER_OF_LINES)
- 10
}
fn get_width_of_terminal() -> usize {
// Simplify once https://github.com/eminence/terminal-size/pull/41 is
// merged
terminal_size()
.map(|(Width(w), _)| match cfg!(windows) {
// Windows CI runners detect a very low terminal width
true => max(w.into(), DEFAULT_TERMINAL_WIDTH),
false => w.into(),
true => max(w as usize, DEFAULT_TERMINAL_WIDTH),
false => w as usize,
})
.unwrap_or(DEFAULT_TERMINAL_WIDTH)
}
@@ -113,55 +113,23 @@ fn get_regex_value(maybe_value: Option<ValuesRef<String>>) -> Vec<Regex> {
fn main() {
let options = build_cli().get_matches();
let config = get_config(options.get_one::<String>("config").cloned());
let config = get_config();
let errors = RuntimeErrors::default();
let error_listen_for_ctrlc = Arc::new(Mutex::new(errors));
let errors_for_rayon = error_listen_for_ctrlc.clone();
let errors_final = error_listen_for_ctrlc.clone();
let is_in_listing = Arc::new(AtomicBool::new(false));
let cloned_is_in_listing = Arc::clone(&is_in_listing);
ctrlc::set_handler(move || {
error_listen_for_ctrlc.lock().unwrap().abort = true;
println!("\nAborting");
if cloned_is_in_listing.load(Ordering::Relaxed) {
process::exit(1);
}
})
.expect("Error setting Ctrl-C handler");
is_in_listing.store(true, Ordering::Relaxed);
let target_dirs = match config.get_files_from(&options) {
Some(path) => {
if path == "-" {
let mut targets_to_add = io::stdin()
.lines()
.map_while(Result::ok)
.collect::<Vec<String>>();
if targets_to_add.is_empty() {
eprintln!("No input provided, defaulting to current directory");
targets_to_add.push(".".to_owned());
}
targets_to_add
} else {
// read file
match read_to_string(path) {
Ok(file_content) => file_content.lines().map(|x| x.to_string()).collect(),
Err(e) => {
eprintln!("Error reading file: {e}");
vec![".".to_owned()]
}
}
}
}
None => match options.get_many::<String>("params") {
Some(values) => values.cloned().collect(),
None => vec![".".to_owned()],
},
let target_dirs = match options.get_many::<String>("params") {
Some(values) => values.map(|v| v.as_str()).collect::<Vec<&str>>(),
None => vec!["."],
};
is_in_listing.store(false, Ordering::Relaxed);
let summarize_file_types = options.get_flag("types");
@@ -221,14 +189,13 @@ fn main() {
.collect::<Vec<Regex>>();
let by_filecount = options.get_flag("by_filecount");
let by_filetime = config.get_filetime(&options);
let limit_filesystem = options.get_flag("limit_filesystem");
let follow_links = options.get_flag("dereference_links");
let simplified_dirs = simplify_dir_names(target_dirs);
let allowed_filesystems = limit_filesystem
.then(|| get_filesystem_devices(&target_dirs, follow_links))
.then(|| get_filesystem_devices(simplified_dirs.iter()))
.unwrap_or_default();
let simplified_dirs = simplify_dir_names(&target_dirs);
let ignored_full_path: HashSet<PathBuf> = ignore_directories
.into_iter()
@@ -244,34 +211,13 @@ fn main() {
indicator.spawn(output_format.clone())
}
let keep_collapsed: HashSet<PathBuf> = match options.get_many::<String>("collapse") {
Some(collapse) => {
let mut combined_dirs = HashSet::new();
for collapse_dir in collapse {
for target_dir in target_dirs.iter() {
combined_dirs.insert(PathBuf::from(target_dir).join(collapse_dir));
}
}
combined_dirs
}
None => HashSet::new(),
};
let filter_modified_time = config.get_modified_time_operator(&options);
let filter_accessed_time = config.get_accessed_time_operator(&options);
let filter_changed_time = config.get_changed_time_operator(&options);
let walk_data = WalkData {
ignore_directories: ignored_full_path,
filter_regex: &filter_regexs,
invert_filter_regex: &invert_filter_regexs,
allowed_filesystems,
filter_modified_time,
filter_accessed_time,
filter_changed_time,
use_apparent_size: config.get_apparent_size(&options),
by_filecount,
by_filetime: &by_filetime,
ignore_hidden,
follow_links,
progress_data: indicator.data.clone(),
@@ -284,7 +230,7 @@ fn main() {
let top_level_nodes = walk_it(simplified_dirs, &walk_data);
let tree = match summarize_file_types {
true => get_all_file_types(&top_level_nodes, number_of_lines, &by_filetime),
true => get_all_file_types(&top_level_nodes, number_of_lines),
false => {
let agg_data = AggregateData {
min_size: config.get_min_size(&options),
@@ -294,7 +240,7 @@ fn main() {
depth,
using_a_filter: !filter_regexs.is_empty() || !invert_filter_regexs.is_empty(),
};
get_biggest(top_level_nodes, agg_data, &by_filetime, keep_collapsed)
get_biggest(top_level_nodes, agg_data)
}
};
@@ -341,23 +287,19 @@ fn main() {
}
if let Some(root_node) = tree {
let idd = InitialDisplayData {
short_paths: !config.get_full_paths(&options),
is_reversed: !config.get_reverse(&options),
colors_on: is_colors,
by_filecount,
is_screen_reader: config.get_screen_reader(&options),
output_format,
bars_on_right: config.get_bars_on_right(&options),
};
if config.get_output_json(&options) {
OUTPUT_TYPE.with(|wrapped| {
wrapped.replace(output_format);
});
println!("{}", serde_json::to_string(&root_node).unwrap());
} else {
let idd = InitialDisplayData {
short_paths: !config.get_full_paths(&options),
is_reversed: !config.get_reverse(&options),
colors_on: is_colors,
by_filecount,
by_filetime,
is_screen_reader: config.get_screen_reader(&options),
output_format,
bars_on_right: config.get_bars_on_right(&options),
};
draw_it(
idd,
config.get_no_bars(&options),

View File

@@ -1,9 +1,8 @@
use crate::dir_walker::WalkData;
use crate::platform::get_metadata;
use crate::utils::is_filtered_out_due_to_file_time;
use crate::utils::is_filtered_out_due_to_invert_regex;
use crate::utils::is_filtered_out_due_to_regex;
use regex::Regex;
use std::cmp::Ordering;
use std::path::PathBuf;
@@ -16,56 +15,33 @@ pub struct Node {
pub depth: usize,
}
#[derive(Debug, PartialEq)]
pub enum FileTime {
Modified,
Accessed,
Changed,
}
#[allow(clippy::too_many_arguments)]
pub fn build_node(
dir: PathBuf,
children: Vec<Node>,
filter_regex: &[Regex],
invert_filter_regex: &[Regex],
use_apparent_size: bool,
is_symlink: bool,
is_file: bool,
by_filecount: bool,
depth: usize,
walk_data: &WalkData,
) -> Option<Node> {
let use_apparent_size = walk_data.use_apparent_size;
let by_filecount = walk_data.by_filecount;
let by_filetime = &walk_data.by_filetime;
get_metadata(&dir, use_apparent_size).map(|data| {
let inode_device = if is_symlink && !use_apparent_size {
None
} else {
data.1
};
get_metadata(
&dir,
use_apparent_size,
walk_data.follow_links && is_symlink,
)
.map(|data| {
let inode_device = data.1;
let size = if is_filtered_out_due_to_regex(walk_data.filter_regex, &dir)
|| is_filtered_out_due_to_invert_regex(walk_data.invert_filter_regex, &dir)
let size = if is_filtered_out_due_to_regex(filter_regex, &dir)
|| is_filtered_out_due_to_invert_regex(invert_filter_regex, &dir)
|| (is_symlink && !use_apparent_size)
|| by_filecount && !is_file
|| [
(&walk_data.filter_modified_time, data.2 .0),
(&walk_data.filter_accessed_time, data.2 .1),
(&walk_data.filter_changed_time, data.2 .2),
]
.iter()
.any(|(filter_time, actual_time)| {
is_filtered_out_due_to_file_time(filter_time, *actual_time)
}) {
{
0
} else if by_filecount {
1
} else if by_filetime.is_some() {
match by_filetime {
Some(FileTime::Modified) => data.2 .0.unsigned_abs(),
Some(FileTime::Accessed) => data.2 .1.unsigned_abs(),
Some(FileTime::Changed) => data.2 .2.unsigned_abs(),
None => unreachable!(),
}
} else {
data.0
};

View File

@@ -10,35 +10,15 @@ fn get_block_size() -> u64 {
512
}
type InodeAndDevice = (u64, u64);
type FileTime = (i64, i64, i64);
#[cfg(target_family = "unix")]
pub fn get_metadata<P: AsRef<Path>>(
path: P,
use_apparent_size: bool,
follow_links: bool,
) -> Option<(u64, Option<InodeAndDevice>, FileTime)> {
pub fn get_metadata(d: &Path, use_apparent_size: bool) -> Option<(u64, Option<(u64, u64)>)> {
use std::os::unix::fs::MetadataExt;
let metadata = if follow_links {
path.as_ref().metadata()
} else {
path.as_ref().symlink_metadata()
};
match metadata {
match d.metadata() {
Ok(md) => {
if use_apparent_size {
Some((
md.len(),
Some((md.ino(), md.dev())),
(md.mtime(), md.atime(), md.ctime()),
))
Some((md.len(), Some((md.ino(), md.dev()))))
} else {
Some((
md.blocks() * get_block_size(),
Some((md.ino(), md.dev())),
(md.mtime(), md.atime(), md.ctime()),
))
Some((md.blocks() * get_block_size(), Some((md.ino(), md.dev()))))
}
}
Err(_e) => None,
@@ -46,11 +26,7 @@ pub fn get_metadata<P: AsRef<Path>>(
}
#[cfg(target_family = "windows")]
pub fn get_metadata<P: AsRef<Path>>(
path: P,
use_apparent_size: bool,
follow_links: bool,
) -> Option<(u64, Option<InodeAndDevice>, FileTime)> {
pub fn get_metadata(d: &Path, use_apparent_size: bool) -> Option<(u64, Option<(u64, u64)>)> {
// On windows opening the file to get size, file ID and volume can be very
// expensive because 1) it causes a few system calls, and more importantly 2) it can cause
// windows defender to scan the file.
@@ -89,7 +65,7 @@ pub fn get_metadata<P: AsRef<Path>>(
use std::io;
use winapi_util::Handle;
fn handle_from_path_limited(path: &Path) -> io::Result<Handle> {
fn handle_from_path_limited<P: AsRef<Path>>(path: P) -> io::Result<Handle> {
use std::fs::OpenOptions;
use std::os::windows::fs::OpenOptionsExt;
const FILE_READ_ATTRIBUTES: u32 = 0x0080;
@@ -115,46 +91,30 @@ pub fn get_metadata<P: AsRef<Path>>(
}
fn get_metadata_expensive(
path: &Path,
d: &Path,
use_apparent_size: bool,
) -> Option<(u64, Option<InodeAndDevice>, FileTime)> {
) -> Option<(u64, Option<(u64, u64)>)> {
use winapi_util::file::information;
let h = handle_from_path_limited(path).ok()?;
let h = handle_from_path_limited(d).ok()?;
let info = information(&h).ok()?;
if use_apparent_size {
use filesize::PathExt;
Some((
path.size_on_disk().ok()?,
d.size_on_disk().ok()?,
Some((info.file_index(), info.volume_serial_number())),
(
info.last_write_time().unwrap() as i64,
info.last_access_time().unwrap() as i64,
info.creation_time().unwrap() as i64,
),
))
} else {
Some((
info.file_size(),
Some((info.file_index(), info.volume_serial_number())),
(
info.last_write_time().unwrap() as i64,
info.last_access_time().unwrap() as i64,
info.creation_time().unwrap() as i64,
),
))
}
}
use std::os::windows::fs::MetadataExt;
let path = path.as_ref();
let metadata = if follow_links {
path.metadata()
} else {
path.symlink_metadata()
};
match metadata {
match d.metadata() {
Ok(ref md) => {
const FILE_ATTRIBUTE_ARCHIVE: u32 = 0x20;
const FILE_ATTRIBUTE_READONLY: u32 = 0x01;
@@ -182,19 +142,11 @@ pub fn get_metadata<P: AsRef<Path>>(
|| md.file_attributes() == FILE_ATTRIBUTE_NORMAL)
&& !((attr_filtered & IS_PROBABLY_ONEDRIVE != 0) && use_apparent_size)
{
Some((
md.len(),
None,
(
md.last_write_time() as i64,
md.last_access_time() as i64,
md.creation_time() as i64,
),
))
Some((md.len(), None))
} else {
get_metadata_expensive(path, use_apparent_size)
get_metadata_expensive(d, use_apparent_size)
}
}
_ => get_metadata_expensive(path, use_apparent_size),
_ => get_metadata_expensive(d, use_apparent_size),
}
}

View File

@@ -3,7 +3,7 @@ use std::{
io::Write,
path::Path,
sync::{
atomic::{AtomicU8, AtomicUsize, Ordering},
atomic::{AtomicU64, AtomicU8, AtomicUsize, Ordering},
mpsc::{self, RecvTimeoutError, Sender},
Arc, RwLock,
},
@@ -11,11 +11,6 @@ use std::{
time::Duration,
};
#[cfg(not(target_has_atomic = "64"))]
use portable_atomic::AtomicU64;
#[cfg(target_has_atomic = "64")]
use std::sync::atomic::AtomicU64;
use crate::display::human_readable_number;
/* -------------------------------------------------------------------------- */
@@ -78,7 +73,6 @@ pub struct RuntimeErrors {
pub no_permissions: HashSet<String>,
pub file_not_found: HashSet<String>,
pub unknown_error: HashSet<String>,
pub interrupted_error: i32,
pub abort: bool,
}

View File

@@ -2,16 +2,13 @@ use platform::get_metadata;
use std::collections::HashSet;
use std::path::{Path, PathBuf};
use crate::config::DAY_SECONDS;
use crate::dir_walker::Operator;
use crate::platform;
use regex::Regex;
pub fn simplify_dir_names<P: AsRef<Path>>(dirs: &[P]) -> HashSet<PathBuf> {
let mut top_level_names: HashSet<PathBuf> = HashSet::with_capacity(dirs.len());
pub fn simplify_dir_names<P: AsRef<Path>>(filenames: Vec<P>) -> HashSet<PathBuf> {
let mut top_level_names: HashSet<PathBuf> = HashSet::with_capacity(filenames.len());
for t in dirs {
for t in filenames {
let top_level_name = normalize_path(t);
let mut can_add = true;
let mut to_remove: Vec<PathBuf> = Vec::new();
@@ -34,25 +31,13 @@ pub fn simplify_dir_names<P: AsRef<Path>>(dirs: &[P]) -> HashSet<PathBuf> {
top_level_names
}
pub fn get_filesystem_devices<P: AsRef<Path>>(paths: &[P], follow_links: bool) -> HashSet<u64> {
use std::fs;
pub fn get_filesystem_devices<'a, P: IntoIterator<Item = &'a PathBuf>>(paths: P) -> HashSet<u64> {
// Gets the device ids for the filesystems which are used by the argument paths
paths
.iter()
.filter_map(|p| {
let follow_links = if follow_links {
// slow path: If dereference-links is set, then we check if the file is a symbolic link
match fs::symlink_metadata(p) {
Ok(metadata) => metadata.file_type().is_symlink(),
Err(_) => false,
}
} else {
false
};
match get_metadata(p, false, follow_links) {
Some((_size, Some((_id, dev)), _time)) => Some(dev),
_ => None,
}
.into_iter()
.filter_map(|p| match get_metadata(p, false) {
Some((_size, Some((_id, dev)))) => Some(dev),
_ => None,
})
.collect()
}
@@ -77,20 +62,6 @@ pub fn is_filtered_out_due_to_regex(filter_regex: &[Regex], dir: &Path) -> bool
}
}
pub fn is_filtered_out_due_to_file_time(
filter_time: &Option<(Operator, i64)>,
actual_time: i64,
) -> bool {
match filter_time {
None => false,
Some((Operator::Equal, bound_time)) => {
!(actual_time >= *bound_time && actual_time < *bound_time + DAY_SECONDS)
}
Some((Operator::GreaterThan, bound_time)) => actual_time < *bound_time,
Some((Operator::LessThan, bound_time)) => actual_time > *bound_time,
}
}
pub fn is_filtered_out_due_to_invert_regex(filter_regex: &[Regex], dir: &Path) -> bool {
filter_regex
.iter()
@@ -111,15 +82,15 @@ mod tests {
fn test_simplify_dir() {
let mut correct = HashSet::new();
correct.insert(PathBuf::from("a"));
assert_eq!(simplify_dir_names(&["a"]), correct);
assert_eq!(simplify_dir_names(vec!["a"]), correct);
}
#[test]
fn test_simplify_dir_rm_subdir() {
let mut correct = HashSet::new();
correct.insert(["a", "b"].iter().collect::<PathBuf>());
assert_eq!(simplify_dir_names(&["a/b/c", "a/b", "a/b/d/f"]), correct);
assert_eq!(simplify_dir_names(&["a/b", "a/b/c", "a/b/d/f"]), correct);
assert_eq!(simplify_dir_names(vec!["a/b/c", "a/b", "a/b/d/f"]), correct);
assert_eq!(simplify_dir_names(vec!["a/b", "a/b/c", "a/b/d/f"]), correct);
}
#[test]
@@ -128,7 +99,7 @@ mod tests {
correct.insert(["a", "b"].iter().collect::<PathBuf>());
correct.insert(PathBuf::from("c"));
assert_eq!(
simplify_dir_names(&[
simplify_dir_names(vec![
"a/b",
"a/b//",
"a/././b///",
@@ -147,14 +118,14 @@ mod tests {
correct.insert(PathBuf::from("b"));
correct.insert(["c", "a", "b"].iter().collect::<PathBuf>());
correct.insert(["a", "b"].iter().collect::<PathBuf>());
assert_eq!(simplify_dir_names(&["a/b", "c/a/b/", "b"]), correct);
assert_eq!(simplify_dir_names(vec!["a/b", "c/a/b/", "b"]), correct);
}
#[test]
fn test_simplify_dir_dots() {
let mut correct = HashSet::new();
correct.insert(PathBuf::from("src"));
assert_eq!(simplify_dir_names(&["src/."]), correct);
assert_eq!(simplify_dir_names(vec!["src/."]), correct);
}
#[test]
@@ -162,7 +133,7 @@ mod tests {
let mut correct = HashSet::new();
correct.insert(PathBuf::from("src"));
correct.insert(PathBuf::from("src_v2"));
assert_eq!(simplify_dir_names(&["src/", "src_v2"]), correct);
assert_eq!(simplify_dir_names(vec!["src/", "src_v2"]), correct);
}
#[test]

View File

@@ -1,11 +1,10 @@
use assert_cmd::Command;
use std::ffi::OsStr;
use std::process::Output;
use std::str;
use std::sync::Once;
use std::{io, str};
static INIT: Once = Once::new();
static UNREADABLE_DIR_PATH: &str = "/tmp/unreadable_dir";
/**
* This file contains tests that verify the exact output of the command.
@@ -35,31 +34,21 @@ fn copy_test_data(dir: &str) {
.map_err(|err| eprintln!("Error copying directory for test setup\n{:?}", err));
}
fn create_unreadable_directory() -> io::Result<()> {
#[cfg(unix)]
{
use std::fs;
use std::fs::Permissions;
use std::os::unix::fs::PermissionsExt;
fs::create_dir_all(UNREADABLE_DIR_PATH)?;
fs::set_permissions(UNREADABLE_DIR_PATH, Permissions::from_mode(0))?;
}
Ok(())
}
fn initialize() {
INIT.call_once(|| {
copy_test_data("tests/test_dir");
copy_test_data("tests/test_dir2");
copy_test_data("tests/test_dir_unicode");
if let Err(e) = create_unreadable_directory() {
panic!("Failed to create unreadable directory: {}", e);
}
Command::new("sh")
.arg("-c")
.arg("mkdir -p /tmp/unreadable_folder && chmod 000 /tmp/unreadable_folder")
.output()
.unwrap();
});
}
fn run_cmd<T: AsRef<OsStr>>(command_args: &[T]) -> Output {
fn run_cmd<T: AsRef<OsStr>>(command_args: Vec<T>) -> Output {
initialize();
let mut to_run = &mut Command::cargo_bin("dust").unwrap();
for p in command_args {
@@ -68,7 +57,7 @@ fn run_cmd<T: AsRef<OsStr>>(command_args: &[T]) -> Output {
to_run.unwrap()
}
fn exact_stdout_test<T: AsRef<OsStr>>(command_args: &[T], valid_stdout: Vec<String>) {
fn exact_stdout_test<T: AsRef<OsStr>>(command_args: Vec<T>, valid_stdout: Vec<String>) {
let to_run = run_cmd(command_args);
let stdout_output = str::from_utf8(&to_run.stdout).unwrap().to_owned();
@@ -83,7 +72,7 @@ fn exact_stdout_test<T: AsRef<OsStr>>(command_args: &[T], valid_stdout: Vec<Stri
assert!(will_fail);
}
fn exact_stderr_test<T: AsRef<OsStr>>(command_args: &[T], valid_stderr: String) {
fn exact_stderr_test<T: AsRef<OsStr>>(command_args: Vec<T>, valid_stderr: String) {
let to_run = run_cmd(command_args);
let stderr_output = str::from_utf8(&to_run.stderr).unwrap().trim();
@@ -95,20 +84,20 @@ fn exact_stderr_test<T: AsRef<OsStr>>(command_args: &[T], valid_stderr: String)
#[test]
pub fn test_main_basic() {
// -c is no color mode - This makes testing much simpler
exact_stdout_test(&["-c", "-B", "/tmp/test_dir/"], main_output());
exact_stdout_test(vec!["-c", "-B", "/tmp/test_dir/"], main_output());
}
#[cfg_attr(target_os = "windows", ignore)]
#[test]
pub fn test_main_multi_arg() {
let command_args = [
let command_args = vec![
"-c",
"-B",
"/tmp/test_dir/many/",
"/tmp/test_dir",
"/tmp/test_dir",
];
exact_stdout_test(&command_args, main_output());
exact_stdout_test(command_args, main_output());
}
fn main_output() -> Vec<String> {
@@ -138,8 +127,8 @@ fn main_output() -> Vec<String> {
#[cfg_attr(target_os = "windows", ignore)]
#[test]
pub fn test_main_long_paths() {
let command_args = ["-c", "-p", "-B", "/tmp/test_dir/"];
exact_stdout_test(&command_args, main_output_long_paths());
let command_args = vec!["-c", "-p", "-B", "/tmp/test_dir/"];
exact_stdout_test(command_args, main_output_long_paths());
}
fn main_output_long_paths() -> Vec<String> {
@@ -166,8 +155,8 @@ fn main_output_long_paths() -> Vec<String> {
#[cfg_attr(target_os = "windows", ignore)]
#[test]
pub fn test_substring_of_names_and_long_names() {
let command_args = ["-c", "-B", "/tmp/test_dir2"];
exact_stdout_test(&command_args, no_substring_of_names_output());
let command_args = vec!["-c", "-B", "/tmp/test_dir2"];
exact_stdout_test(command_args, no_substring_of_names_output());
}
fn no_substring_of_names_output() -> Vec<String> {
@@ -200,8 +189,8 @@ fn no_substring_of_names_output() -> Vec<String> {
#[cfg_attr(target_os = "windows", ignore)]
#[test]
pub fn test_unicode_directories() {
let command_args = ["-c", "-B", "/tmp/test_dir_unicode"];
exact_stdout_test(&command_args, unicode_dir());
let command_args = vec!["-c", "-B", "/tmp/test_dir_unicode"];
exact_stdout_test(command_args, unicode_dir());
}
fn unicode_dir() -> Vec<String> {
@@ -227,8 +216,8 @@ fn unicode_dir() -> Vec<String> {
#[cfg_attr(target_os = "windows", ignore)]
#[test]
pub fn test_apparent_size() {
let command_args = ["-c", "-s", "-b", "/tmp/test_dir"];
exact_stdout_test(&command_args, apparent_size_output());
let command_args = vec!["-c", "-s", "-b", "/tmp/test_dir"];
exact_stdout_test(command_args, apparent_size_output());
}
fn apparent_size_output() -> Vec<String> {
@@ -253,22 +242,21 @@ fn apparent_size_output() -> Vec<String> {
#[cfg_attr(target_os = "windows", ignore)]
#[test]
pub fn test_permission_normal() {
let command_args = [UNREADABLE_DIR_PATH];
let command_args = vec!["/tmp/unreadable_folder"];
let permission_msg =
r#"Did not have permissions for all directories (add --print-errors to see errors)"#
.trim()
.to_string();
exact_stderr_test(&command_args, permission_msg);
exact_stderr_test(command_args, permission_msg);
}
#[cfg_attr(target_os = "windows", ignore)]
#[test]
pub fn test_permission_flag() {
// add the flag to CLI
let command_args = ["--print-errors", UNREADABLE_DIR_PATH];
let permission_msg = format!(
"Did not have permissions for directories: {}",
UNREADABLE_DIR_PATH
);
exact_stderr_test(&command_args, permission_msg);
let command_args = vec!["--print-errors", "/tmp/unreadable_folder"];
let permission_msg = r#"Did not have permissions for directories: /tmp/unreadable_folder"#
.trim()
.to_string();
exact_stderr_test(command_args, permission_msg);
}

View File

@@ -254,10 +254,3 @@ pub fn test_force_color() {
assert!(output.contains("\x1B[31m"));
assert!(output.contains("\x1B[0m"));
}
#[test]
pub fn test_collapse() {
let output = build_command(vec!["--collapse", "many", "tests/test_dir/"]);
assert!(output.contains("many"));
assert!(!output.contains("hello_file"));
}