mirror of
https://github.com/bootandy/dust.git
synced 2025-12-06 04:41:09 -08:00
Compare commits
444 Commits
v0.4.1.1
...
bye_bye_di
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8be6b2d9cd | ||
|
|
509d51e872 | ||
|
|
f98b841d23 | ||
|
|
67d23e80ff | ||
|
|
968377eebd | ||
|
|
96e04fe168 | ||
|
|
7974e2eaf0 | ||
|
|
14efddfd05 | ||
|
|
4e83421da6 | ||
|
|
901bc3895a | ||
|
|
3cce61f854 | ||
|
|
222cd83ff3 | ||
|
|
76b9f32859 | ||
|
|
17662e8ff1 | ||
|
|
9cc557cada | ||
|
|
81722b695d | ||
|
|
51dc167345 | ||
|
|
9b5f6d6c5a | ||
|
|
74ffd78901 | ||
|
|
9b2dc4655d | ||
|
|
29441eda19 | ||
|
|
e6f90362a7 | ||
|
|
702f0f0fe9 | ||
|
|
6a14d7e8b3 | ||
|
|
4e2d93f362 | ||
|
|
b616378ba0 | ||
|
|
646cdd976d | ||
|
|
9a49221ac1 | ||
|
|
1b4116e39d | ||
|
|
733abb2a3f | ||
|
|
dd799706fb | ||
|
|
b219981c52 | ||
|
|
c31468b199 | ||
|
|
28d409ea27 | ||
|
|
aa319e3599 | ||
|
|
c2a4c4573a | ||
|
|
d876cc28a7 | ||
|
|
137e366eca | ||
|
|
a962b80eec | ||
|
|
01c0aaeade | ||
|
|
6cbd736e11 | ||
|
|
8e087e09da | ||
|
|
9ba0b6d1d0 | ||
|
|
775d841840 | ||
|
|
609fc1e760 | ||
|
|
eeb686562d | ||
|
|
e0eaeccc0b | ||
|
|
2e56a261e0 | ||
|
|
bfe7323b20 | ||
|
|
1372815007 | ||
|
|
7c9e2f1833 | ||
|
|
1d40ca0870 | ||
|
|
86b2bd944c | ||
|
|
b63608604a | ||
|
|
b24fab720d | ||
|
|
d81b9065a1 | ||
|
|
38c4d23732 | ||
|
|
99bf0fc041 | ||
|
|
75d0566949 | ||
|
|
489d9ada44 | ||
|
|
f48fcc790a | ||
|
|
733117d0f6 | ||
|
|
dbd18f90e7 | ||
|
|
dad88ad660 | ||
|
|
00a7c410a0 | ||
|
|
1ab0b2f531 | ||
|
|
c09073151d | ||
|
|
b4a517a096 | ||
|
|
e654d30f9d | ||
|
|
4fc1897678 | ||
|
|
08b9c756ee | ||
|
|
394231683d | ||
|
|
a06a001886 | ||
|
|
fd9e97bcfa | ||
|
|
3ed95ee399 | ||
|
|
58c9f6d509 | ||
|
|
3f2f7a8bb2 | ||
|
|
b7176cf887 | ||
|
|
d65f41097e | ||
|
|
08e4240b41 | ||
|
|
028ca1fdc7 | ||
|
|
4f6255971b | ||
|
|
cab250aa0e | ||
|
|
5f76db27c9 | ||
|
|
a34e78f912 | ||
|
|
1ffda38264 | ||
|
|
e78690e4f5 | ||
|
|
5b87260467 | ||
|
|
2c34c38b29 | ||
|
|
a1574d6a06 | ||
|
|
184ea1f956 | ||
|
|
a3dcab9454 | ||
|
|
658b11d0f8 | ||
|
|
e2fe656296 | ||
|
|
87581f328e | ||
|
|
ecd6b85c17 | ||
|
|
b86e5c8c88 | ||
|
|
25c016f98a | ||
|
|
69c4c63357 | ||
|
|
fbd34ec4c2 | ||
|
|
7c75c1b0a9 | ||
|
|
b54a215805 | ||
|
|
0364cf781e | ||
|
|
a8bf76cb22 | ||
|
|
4df4eeaa38 | ||
|
|
ebb3b8cceb | ||
|
|
e9bacdf875 | ||
|
|
a4b5d8573b | ||
|
|
4a2778b6ea | ||
|
|
7ee744207b | ||
|
|
96068518f6 | ||
|
|
10168e0a47 | ||
|
|
6768df9a7b | ||
|
|
e80892a9e7 | ||
|
|
cd53fc7494 | ||
|
|
e8c7990a17 | ||
|
|
c8b61d2f46 | ||
|
|
6e0505bfd7 | ||
|
|
24bdbf036e | ||
|
|
29085686e1 | ||
|
|
8b1632dde8 | ||
|
|
f3275cd59c | ||
|
|
939ed89ebb | ||
|
|
a58e5f48f6 | ||
|
|
3f9014d8c7 | ||
|
|
7c54d41ace | ||
|
|
2fa14ca19c | ||
|
|
211d89e634 | ||
|
|
0038cb24b4 | ||
|
|
658f8d2e2b | ||
|
|
2c23336794 | ||
|
|
a4ae013459 | ||
|
|
c259d3b566 | ||
|
|
bdfd3c01a5 | ||
|
|
2fe91806c7 | ||
|
|
514bb2799c | ||
|
|
e17a1af476 | ||
|
|
2f7c197cd7 | ||
|
|
7d13fe972c | ||
|
|
5a3e15d0ce | ||
|
|
6db013a601 | ||
|
|
49a21b1121 | ||
|
|
7efdf63fbc | ||
|
|
184d1ec5e8 | ||
|
|
1e87a0661b | ||
|
|
187b8be2fa | ||
|
|
1495251ebc | ||
|
|
520c439edc | ||
|
|
712acc67fe | ||
|
|
fdbed14334 | ||
|
|
810cc8b604 | ||
|
|
83ef2525aa | ||
|
|
af9f0b5125 | ||
|
|
9ff28b3456 | ||
|
|
4242363f40 | ||
|
|
3fd78490e6 | ||
|
|
b903f58cea | ||
|
|
0f72ca328a | ||
|
|
6c130adb6c | ||
|
|
9f0f366187 | ||
|
|
81ad921e25 | ||
|
|
3708edc2d3 | ||
|
|
414bc9e5a7 | ||
|
|
66ad504848 | ||
|
|
5bfa44ec77 | ||
|
|
03a8d643c5 | ||
|
|
29957c1f2c | ||
|
|
400ff513f4 | ||
|
|
31eb650fbe | ||
|
|
f3c074759d | ||
|
|
ea3cc537ea | ||
|
|
c012567c38 | ||
|
|
26bc26277d | ||
|
|
abcc46c5ea | ||
|
|
a3ab5bfe0f | ||
|
|
04c4963a02 | ||
|
|
40a6f098ae | ||
|
|
5e607cf210 | ||
|
|
f546dbbede | ||
|
|
a91aa62060 | ||
|
|
a7b82f32d7 | ||
|
|
72b811c278 | ||
|
|
b478534b22 | ||
|
|
2ca7177446 | ||
|
|
e858f9e976 | ||
|
|
0a67191054 | ||
|
|
c363e5ff8b | ||
|
|
c148cd9044 | ||
|
|
2893f73f47 | ||
|
|
5103ebe0d8 | ||
|
|
40acc8f868 | ||
|
|
eebd9daf2a | ||
|
|
9bc1a6d625 | ||
|
|
d6f9bb3c47 | ||
|
|
f70f4b7e12 | ||
|
|
b9b2aee760 | ||
|
|
f60184ecbb | ||
|
|
81d52e6e3a | ||
|
|
5980858b39 | ||
|
|
ed6a8d0462 | ||
|
|
4cef6aaa84 | ||
|
|
d477145694 | ||
|
|
dc5b7b2c2e | ||
|
|
cf5ebd76fe | ||
|
|
fc548919c5 | ||
|
|
4b4bca52d9 | ||
|
|
2a9d545c3c | ||
|
|
20cc5cf7e0 | ||
|
|
5fcc45efbe | ||
|
|
282f6d314d | ||
|
|
c36ca33fe9 | ||
|
|
34ba99af2a | ||
|
|
2713445ad0 | ||
|
|
b62f35291d | ||
|
|
a7fbcb8156 | ||
|
|
a7120b949c | ||
|
|
812e1e3c53 | ||
|
|
4eb3f29565 | ||
|
|
d64092d8a1 | ||
|
|
77750c8149 | ||
|
|
b9386cd39e | ||
|
|
17112b09cc | ||
|
|
c5adff5348 | ||
|
|
ad2e52e211 | ||
|
|
164bec71a3 | ||
|
|
11b5c7227f | ||
|
|
fc70f9ba30 | ||
|
|
a00d1f0719 | ||
|
|
c4ea7815f8 | ||
|
|
afc36a633f | ||
|
|
7275b273d4 | ||
|
|
a3e59f9c25 | ||
|
|
48bf656123 | ||
|
|
fabb27908d | ||
|
|
52aeeebe1f | ||
|
|
1e27288ec2 | ||
|
|
9f4a5daee6 | ||
|
|
27f0a015ef | ||
|
|
20d89bef91 | ||
|
|
469e6d0a69 | ||
|
|
2d58609d54 | ||
|
|
109a0b90d4 | ||
|
|
ab67c1a50e | ||
|
|
6a34b52d15 | ||
|
|
f708305190 | ||
|
|
2749f56b7a | ||
|
|
d983175189 | ||
|
|
4b3dc3988d | ||
|
|
fa4405b58b | ||
|
|
abb08f8e1a | ||
|
|
9f91d446c1 | ||
|
|
1b07c3c4f3 | ||
|
|
e55b917c96 | ||
|
|
0301c7a058 | ||
|
|
bbed8d7478 | ||
|
|
17e7390e25 | ||
|
|
1cb731533b | ||
|
|
d6c2482150 | ||
|
|
9d2e6d2b36 | ||
|
|
124c19b5c9 | ||
|
|
d8a334df3b | ||
|
|
c485e84145 | ||
|
|
f51e9dd222 | ||
|
|
c25f7d342c | ||
|
|
ca0a93f222 | ||
|
|
9cf260e42b | ||
|
|
87b1f50b39 | ||
|
|
3458c98bd0 | ||
|
|
2ad420d370 | ||
|
|
2047f99c6d | ||
|
|
9bd2f9fc2a | ||
|
|
5116c1c8a1 | ||
|
|
07ffd04950 | ||
|
|
dfa574375b | ||
|
|
9de2e7d723 | ||
|
|
8fddb24165 | ||
|
|
ed3902f07e | ||
|
|
f219a752d6 | ||
|
|
f6e36aba52 | ||
|
|
c286b8ba97 | ||
|
|
3dad7abfb8 | ||
|
|
42163abb73 | ||
|
|
8e0188c755 | ||
|
|
555d86206d | ||
|
|
02392881c5 | ||
|
|
6f1175ef8d | ||
|
|
1d062cf683 | ||
|
|
be7a9181b2 | ||
|
|
9dcd4d0de4 | ||
|
|
00c7ce8f15 | ||
|
|
c4a73d5921 | ||
|
|
e1ffc92589 | ||
|
|
18729762ad | ||
|
|
8a499201de | ||
|
|
551c5d3bfa | ||
|
|
15a867636f | ||
|
|
1b3d0b2724 | ||
|
|
d5fa7f0861 | ||
|
|
e15cf0c42e | ||
|
|
1891de7fa3 | ||
|
|
998e7fb2f8 | ||
|
|
a48c7782ac | ||
|
|
e0347b0b43 | ||
|
|
5b6b449cbd | ||
|
|
c8568e5674 | ||
|
|
9d13994526 | ||
|
|
ca8b1efc18 | ||
|
|
c261492325 | ||
|
|
8d43185439 | ||
|
|
1d018bc80b | ||
|
|
2319c3032f | ||
|
|
53af0d486d | ||
|
|
b643abe05e | ||
|
|
695c9b6747 | ||
|
|
8e8f18b9bc | ||
|
|
e767be217a | ||
|
|
9187cb8ad2 | ||
|
|
873456eb97 | ||
|
|
4e1180e502 | ||
|
|
4ea8d9339e | ||
|
|
82237c6bde | ||
|
|
109df305a3 | ||
|
|
d03a9796f4 | ||
|
|
8e591f3dd5 | ||
|
|
086f11988c | ||
|
|
52eca1ff78 | ||
|
|
e80c60b027 | ||
|
|
e118814684 | ||
|
|
1546cf2eba | ||
|
|
4f06de8044 | ||
|
|
2ca6fcc9ce | ||
|
|
278a31971c | ||
|
|
479e5899c6 | ||
|
|
c4c173e40e | ||
|
|
b8410ee4da | ||
|
|
0cb0396a84 | ||
|
|
b1fe078e06 | ||
|
|
b3d446bfef | ||
|
|
34be81c216 | ||
|
|
342164d357 | ||
|
|
bfa3594fe8 | ||
|
|
1f120de168 | ||
|
|
c0048b2ae4 | ||
|
|
402a8f8249 | ||
|
|
7cc7047b28 | ||
|
|
1953e107c2 | ||
|
|
f096e82754 | ||
|
|
c3415df4b1 | ||
|
|
9452049aff | ||
|
|
09e3c27e70 | ||
|
|
b0bfe654c4 | ||
|
|
6bc44de495 | ||
|
|
efb455c739 | ||
|
|
2c58041885 | ||
|
|
c30f31c22c | ||
|
|
59f2cdfb84 | ||
|
|
795d91237d | ||
|
|
26ae050f16 | ||
|
|
58b395e7ee | ||
|
|
3beb2b5274 | ||
|
|
19d938b05e | ||
|
|
d4daa82297 | ||
|
|
21097578d9 | ||
|
|
069dc184a9 | ||
|
|
02fa657128 | ||
|
|
bc0e376c88 | ||
|
|
d7c602a2d2 | ||
|
|
603e6be7eb | ||
|
|
efa469e12f | ||
|
|
657858df16 | ||
|
|
a8d700d530 | ||
|
|
c408d8887d | ||
|
|
be2250d241 | ||
|
|
b6aa1378de | ||
|
|
2082141dfc | ||
|
|
8a9b5e889d | ||
|
|
871b7e90d8 | ||
|
|
edf300893c | ||
|
|
a3d8fc00e1 | ||
|
|
9d4531d48b | ||
|
|
75e419e7ed | ||
|
|
4a62fc5726 | ||
|
|
d2b959fdcf | ||
|
|
36ebb1b2b0 | ||
|
|
e126a01096 | ||
|
|
7a38a26593 | ||
|
|
1af6e1f757 | ||
|
|
affafcc5f2 | ||
|
|
26ef8c3e59 | ||
|
|
0898ee6bf0 | ||
|
|
5ac168868e | ||
|
|
684994ee11 | ||
|
|
416ad517fe | ||
|
|
7c34389aea | ||
|
|
18510130d8 | ||
|
|
0bf8c914b7 | ||
|
|
95888d5f31 | ||
|
|
5d195e27cb | ||
|
|
0c7b05fec9 | ||
|
|
bc895879e6 | ||
|
|
31f01c061a | ||
|
|
17894e723c | ||
|
|
6141ddddea | ||
|
|
352c0f7d90 | ||
|
|
842a8ec673 | ||
|
|
d97edba041 | ||
|
|
f64e0094f1 | ||
|
|
3d2477e554 | ||
|
|
350d695f7c | ||
|
|
f8b8b8a788 | ||
|
|
b9c27f9838 | ||
|
|
5541df6a73 | ||
|
|
bdc3d404ef | ||
|
|
f395a7d768 | ||
|
|
a36eec6cae | ||
|
|
38938e005e | ||
|
|
da61b15715 | ||
|
|
0b22d0a977 | ||
|
|
356d14ac0f | ||
|
|
311bc45388 | ||
|
|
ef66fb3938 | ||
|
|
b934445e04 | ||
|
|
a6839c020f | ||
|
|
7e47d5b47a | ||
|
|
6a65570f3f | ||
|
|
a4ca78dbe4 | ||
|
|
bfbe8a57ae | ||
|
|
b4b73e45f3 | ||
|
|
78119aba0f | ||
|
|
5535478fe8 | ||
|
|
7ba91a4a22 | ||
|
|
79416fd5fc | ||
|
|
b66523cff3 | ||
|
|
19a41aa382 | ||
|
|
62ac9b623a | ||
|
|
bf28d42483 | ||
|
|
f8ce6c97bf | ||
|
|
86b3cccaf6 | ||
|
|
3c920431fa | ||
|
|
a1ece05af5 | ||
|
|
cef2c588b7 | ||
|
|
7d8e498238 | ||
|
|
c83803b440 | ||
|
|
a41862d799 |
359
.github/workflows/CICD.yml
vendored
Normal file
359
.github/workflows/CICD.yml
vendored
Normal file
@@ -0,0 +1,359 @@
|
||||
name: CICD
|
||||
|
||||
# spell-checker:ignore CICD CODECOV MSVC MacOS Peltoche SHAs buildable clippy esac fakeroot gnueabihf halium libssl mkdir musl popd printf pushd rustfmt softprops toolchain
|
||||
|
||||
env:
|
||||
PROJECT_NAME: dust
|
||||
PROJECT_DESC: "du + rust = dust"
|
||||
PROJECT_AUTH: "bootandy"
|
||||
RUST_MIN_SRV: "1.31.0"
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
style:
|
||||
name: Style
|
||||
runs-on: ${{ matrix.job.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
job:
|
||||
- { os: ubuntu-latest }
|
||||
- { os: macos-latest }
|
||||
- { os: windows-latest }
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Initialize workflow variables
|
||||
id: vars
|
||||
shell: bash
|
||||
run: |
|
||||
# 'windows-latest' `cargo fmt` is bugged for this project (see reasons @ GH:rust-lang/rustfmt #3324, #3590, #3688 ; waiting for repair)
|
||||
JOB_DO_FORMAT_TESTING="true"
|
||||
case ${{ matrix.job.os }} in windows-latest) unset JOB_DO_FORMAT_TESTING ;; esac;
|
||||
echo set-output name=JOB_DO_FORMAT_TESTING::${JOB_DO_FORMAT_TESTING:-<empty>/false}
|
||||
echo ::set-output name=JOB_DO_FORMAT_TESTING::${JOB_DO_FORMAT_TESTING}
|
||||
# target-specific options
|
||||
# * CARGO_FEATURES_OPTION
|
||||
CARGO_FEATURES_OPTION='' ;
|
||||
if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features "${{ matrix.job.features }}"' ; fi
|
||||
echo set-output name=CARGO_FEATURES_OPTION::${CARGO_FEATURES_OPTION}
|
||||
echo ::set-output name=CARGO_FEATURES_OPTION::${CARGO_FEATURES_OPTION}
|
||||
- name: Install `rust` toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
profile: minimal # minimal component installation (ie, no documentation)
|
||||
components: rustfmt, clippy
|
||||
- name: Install wget for Windows
|
||||
if: matrix.job.os == 'windows-latest'
|
||||
run: choco install wget --no-progress
|
||||
- name: typos-action
|
||||
uses: crate-ci/typos@v1.28.4
|
||||
- name: "`fmt` testing"
|
||||
if: steps.vars.outputs.JOB_DO_FORMAT_TESTING
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: fmt
|
||||
args: --all -- --check
|
||||
- name: "`clippy` testing"
|
||||
if: success() || failure() # run regardless of prior step ("`fmt` testing") success/failure
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: clippy
|
||||
args: ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} -- -D warnings
|
||||
|
||||
min_version:
|
||||
name: MinSRV # Minimum supported rust version
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Install `rust` toolchain (v${{ env.RUST_MIN_SRV }})
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: ${{ env.RUST_MIN_SRV }}
|
||||
profile: minimal # minimal component installation (ie, no documentation)
|
||||
- name: Test
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
|
||||
build:
|
||||
name: Build
|
||||
runs-on: ${{ matrix.job.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
job:
|
||||
# { os, target, cargo-options, features, use-cross, toolchain }
|
||||
- {
|
||||
os: ubuntu-latest,
|
||||
target: aarch64-unknown-linux-gnu,
|
||||
use-cross: use-cross,
|
||||
}
|
||||
- {
|
||||
os: ubuntu-latest,
|
||||
target: aarch64-unknown-linux-musl,
|
||||
use-cross: use-cross,
|
||||
}
|
||||
- {
|
||||
os: ubuntu-latest,
|
||||
target: arm-unknown-linux-gnueabihf,
|
||||
use-cross: use-cross,
|
||||
}
|
||||
- {
|
||||
os: ubuntu-latest,
|
||||
target: arm-unknown-linux-musleabi,
|
||||
use-cross: use-cross,
|
||||
}
|
||||
- {
|
||||
os: ubuntu-latest,
|
||||
target: i686-unknown-linux-gnu,
|
||||
use-cross: use-cross,
|
||||
}
|
||||
- {
|
||||
os: ubuntu-latest,
|
||||
target: i686-unknown-linux-musl,
|
||||
use-cross: use-cross,
|
||||
}
|
||||
- {
|
||||
os: ubuntu-latest,
|
||||
target: x86_64-unknown-linux-gnu,
|
||||
use-cross: use-cross,
|
||||
}
|
||||
- {
|
||||
os: ubuntu-latest,
|
||||
target: x86_64-unknown-linux-musl,
|
||||
use-cross: use-cross,
|
||||
}
|
||||
- { os: macos-latest, target: x86_64-apple-darwin }
|
||||
- { os: windows-latest, target: i686-pc-windows-gnu }
|
||||
- { os: windows-latest, target: i686-pc-windows-msvc }
|
||||
- { os: windows-latest, target: x86_64-pc-windows-gnu } ## !maint: [rivy; 2020-01-21] may break due to rust bug; follow possible solution from GH:rust-lang/rust#47048 (refs: GH:rust-lang/rust#47048 , GH:rust-lang/rust#53454 , GH:bike-barn/hermit#172 )
|
||||
- { os: windows-latest, target: x86_64-pc-windows-msvc }
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Install any prerequisites
|
||||
shell: bash
|
||||
run: |
|
||||
case ${{ matrix.job.target }} in
|
||||
arm-unknown-linux-gnueabihf) sudo apt-get -y update ; sudo apt-get -y install gcc-arm-linux-gnueabihf ;;
|
||||
aarch64-unknown-linux-gnu) sudo apt-get -y update ; sudo apt-get -y install binutils-aarch64-linux-gnu ;;
|
||||
esac
|
||||
- name: Initialize workflow variables
|
||||
id: vars
|
||||
shell: bash
|
||||
run: |
|
||||
# toolchain
|
||||
TOOLCHAIN="stable" ## default to "stable" toolchain
|
||||
# * specify alternate TOOLCHAIN for *-pc-windows-gnu targets; gnu targets on Windows are broken for the standard *-pc-windows-msvc toolchain (refs: <https://github.com/rust-lang/rust/issues/47048>, <https://github.com/rust-lang/rust/issues/53454>, <https://github.com/rust-lang/cargo/issues/6754>)
|
||||
case ${{ matrix.job.target }} in *-pc-windows-gnu) TOOLCHAIN="stable-${{ matrix.job.target }}" ;; esac;
|
||||
# * use requested TOOLCHAIN if specified
|
||||
if [ -n "${{ matrix.job.toolchain }}" ]; then TOOLCHAIN="${{ matrix.job.toolchain }}" ; fi
|
||||
echo set-output name=TOOLCHAIN::${TOOLCHAIN}
|
||||
echo ::set-output name=TOOLCHAIN::${TOOLCHAIN}
|
||||
# staging directory
|
||||
STAGING='_staging'
|
||||
echo set-output name=STAGING::${STAGING}
|
||||
echo ::set-output name=STAGING::${STAGING}
|
||||
# determine EXE suffix
|
||||
EXE_suffix="" ; case ${{ matrix.job.target }} in *-pc-windows-*) EXE_suffix=".exe" ;; esac;
|
||||
echo set-output name=EXE_suffix::${EXE_suffix}
|
||||
echo ::set-output name=EXE_suffix::${EXE_suffix}
|
||||
# parse commit reference info
|
||||
REF_NAME=${GITHUB_REF#refs/*/}
|
||||
unset REF_BRANCH ; case ${GITHUB_REF} in refs/heads/*) REF_BRANCH=${GITHUB_REF#refs/heads/} ;; esac;
|
||||
unset REF_TAG ; case ${GITHUB_REF} in refs/tags/*) REF_TAG=${GITHUB_REF#refs/tags/} ;; esac;
|
||||
REF_SHAS=${GITHUB_SHA:0:8}
|
||||
echo set-output name=REF_NAME::${REF_NAME}
|
||||
echo set-output name=REF_BRANCH::${REF_BRANCH}
|
||||
echo set-output name=REF_TAG::${REF_TAG}
|
||||
echo set-output name=REF_SHAS::${REF_SHAS}
|
||||
echo ::set-output name=REF_NAME::${REF_NAME}
|
||||
echo ::set-output name=REF_BRANCH::${REF_BRANCH}
|
||||
echo ::set-output name=REF_TAG::${REF_TAG}
|
||||
echo ::set-output name=REF_SHAS::${REF_SHAS}
|
||||
# parse target
|
||||
unset TARGET_ARCH ; case ${{ matrix.job.target }} in arm-unknown-linux-gnueabihf) TARGET_ARCH=arm ;; aarch-*) TARGET_ARCH=aarch64 ;; i686-*) TARGET_ARCH=i686 ;; x86_64-*) TARGET_ARCH=x86_64 ;; esac;
|
||||
echo set-output name=TARGET_ARCH::${TARGET_ARCH}
|
||||
echo ::set-output name=TARGET_ARCH::${TARGET_ARCH}
|
||||
unset TARGET_OS ; case ${{ matrix.job.target }} in *-linux-*) TARGET_OS=linux ;; *-apple-*) TARGET_OS=macos ;; *-windows-*) TARGET_OS=windows ;; esac;
|
||||
echo set-output name=TARGET_OS::${TARGET_OS}
|
||||
echo ::set-output name=TARGET_OS::${TARGET_OS}
|
||||
# package name
|
||||
PKG_suffix=".tar.gz" ; case ${{ matrix.job.target }} in *-pc-windows-*) PKG_suffix=".zip" ;; esac;
|
||||
PKG_BASENAME=${PROJECT_NAME}-${REF_TAG:-$REF_SHAS}-${{ matrix.job.target }}
|
||||
PKG_NAME=${PKG_BASENAME}${PKG_suffix}
|
||||
echo set-output name=PKG_suffix::${PKG_suffix}
|
||||
echo set-output name=PKG_BASENAME::${PKG_BASENAME}
|
||||
echo set-output name=PKG_NAME::${PKG_NAME}
|
||||
echo ::set-output name=PKG_suffix::${PKG_suffix}
|
||||
echo ::set-output name=PKG_BASENAME::${PKG_BASENAME}
|
||||
echo ::set-output name=PKG_NAME::${PKG_NAME}
|
||||
# deployable tag? (ie, leading "vM" or "M"; M == version number)
|
||||
unset DEPLOY ; if [[ $REF_TAG =~ ^[vV]?[0-9].* ]]; then DEPLOY='true' ; fi
|
||||
echo set-output name=DEPLOY::${DEPLOY:-<empty>/false}
|
||||
echo ::set-output name=DEPLOY::${DEPLOY}
|
||||
# target-specific options
|
||||
# * CARGO_FEATURES_OPTION
|
||||
CARGO_FEATURES_OPTION='' ;
|
||||
if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features "${{ matrix.job.features }}"' ; fi
|
||||
echo set-output name=CARGO_FEATURES_OPTION::${CARGO_FEATURES_OPTION}
|
||||
echo ::set-output name=CARGO_FEATURES_OPTION::${CARGO_FEATURES_OPTION}
|
||||
# * CARGO_USE_CROSS (truthy)
|
||||
CARGO_USE_CROSS='true' ; case '${{ matrix.job.use-cross }}' in ''|0|f|false|n|no) unset CARGO_USE_CROSS ;; esac;
|
||||
echo set-output name=CARGO_USE_CROSS::${CARGO_USE_CROSS:-<empty>/false}
|
||||
echo ::set-output name=CARGO_USE_CROSS::${CARGO_USE_CROSS}
|
||||
# # * `arm` cannot be tested on ubuntu-* hosts (b/c testing is currently primarily done via comparison of target outputs with built-in outputs and the `arm` target is not executable on the host)
|
||||
JOB_DO_TESTING="true"
|
||||
case ${{ matrix.job.target }} in arm-*|aarch64-*) unset JOB_DO_TESTING ;; esac;
|
||||
echo set-output name=JOB_DO_TESTING::${JOB_DO_TESTING:-<empty>/false}
|
||||
echo ::set-output name=JOB_DO_TESTING::${JOB_DO_TESTING}
|
||||
# # * test only binary for arm-type targets
|
||||
unset CARGO_TEST_OPTIONS
|
||||
unset CARGO_TEST_OPTIONS ; case ${{ matrix.job.target }} in arm-*|aarch64-*) CARGO_TEST_OPTIONS="--bin ${PROJECT_NAME}" ;; esac;
|
||||
echo set-output name=CARGO_TEST_OPTIONS::${CARGO_TEST_OPTIONS}
|
||||
echo ::set-output name=CARGO_TEST_OPTIONS::${CARGO_TEST_OPTIONS}
|
||||
# * strip executable?
|
||||
STRIP="strip" ; case ${{ matrix.job.target }} in arm-unknown-linux-gnueabihf) STRIP="arm-linux-gnueabihf-strip" ;; *-pc-windows-msvc) STRIP="" ;; aarch64-unknown-linux-gnu) STRIP="aarch64-linux-gnu-strip" ;; aarch64-unknown-linux-musl) STRIP="" ;; armv7-unknown-linux-musleabi) STRIP="" ;; arm-unknown-linux-musleabi) STRIP="" ;; esac;
|
||||
|
||||
|
||||
echo set-output name=STRIP::${STRIP}
|
||||
echo ::set-output name=STRIP::${STRIP}
|
||||
- name: Create all needed build/work directories
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p '${{ steps.vars.outputs.STAGING }}'
|
||||
mkdir -p '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}'
|
||||
- name: rust toolchain ~ install
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: ${{ steps.vars.outputs.TOOLCHAIN }}
|
||||
target: ${{ matrix.job.target }}
|
||||
override: true
|
||||
profile: minimal # minimal component installation (ie, no documentation)
|
||||
- name: Info
|
||||
shell: bash
|
||||
run: |
|
||||
gcc --version || true
|
||||
rustup -V
|
||||
rustup toolchain list
|
||||
rustup default
|
||||
cargo -V
|
||||
rustc -V
|
||||
- name: Build
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
use-cross: ${{ steps.vars.outputs.CARGO_USE_CROSS }}
|
||||
command: build
|
||||
args: --release --target=${{ matrix.job.target }} ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }}
|
||||
- name: Install cargo-deb
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: install
|
||||
args: cargo-deb
|
||||
if: matrix.job.target == 'i686-unknown-linux-musl' || matrix.job.target == 'x86_64-unknown-linux-musl'
|
||||
- name: Build deb
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: deb
|
||||
args: --no-build --target=${{ matrix.job.target }}
|
||||
if: matrix.job.target == 'i686-unknown-linux-musl' || matrix.job.target == 'x86_64-unknown-linux-musl'
|
||||
- name: Test
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
use-cross: ${{ steps.vars.outputs.CARGO_USE_CROSS }}
|
||||
command: test
|
||||
args: --target=${{ matrix.job.target }} ${{ steps.vars.outputs.CARGO_TEST_OPTIONS}} ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }}
|
||||
- name: Archive executable artifacts
|
||||
uses: actions/upload-artifact@master
|
||||
with:
|
||||
name: ${{ env.PROJECT_NAME }}-${{ matrix.job.target }}
|
||||
path: target/${{ matrix.job.target }}/release/${{ env.PROJECT_NAME }}${{ steps.vars.outputs.EXE_suffix }}
|
||||
- name: Archive deb artifacts
|
||||
uses: actions/upload-artifact@master
|
||||
with:
|
||||
name: ${{ env.PROJECT_NAME }}-${{ matrix.job.target }}.deb
|
||||
path: target/${{ matrix.job.target }}/debian
|
||||
if: matrix.job.target == 'i686-unknown-linux-musl' || matrix.job.target == 'x86_64-unknown-linux-musl'
|
||||
- name: Package
|
||||
shell: bash
|
||||
run: |
|
||||
# binary
|
||||
cp 'target/${{ matrix.job.target }}/release/${{ env.PROJECT_NAME }}${{ steps.vars.outputs.EXE_suffix }}' '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}/'
|
||||
# `strip` binary (if needed)
|
||||
if [ -n "${{ steps.vars.outputs.STRIP }}" ]; then "${{ steps.vars.outputs.STRIP }}" '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}/${{ env.PROJECT_NAME }}${{ steps.vars.outputs.EXE_suffix }}' ; fi
|
||||
# README and LICENSE
|
||||
cp README.md '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}/'
|
||||
cp LICENSE '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}/'
|
||||
# base compressed package
|
||||
pushd '${{ steps.vars.outputs.STAGING }}/' >/dev/null
|
||||
case ${{ matrix.job.target }} in
|
||||
*-pc-windows-*) 7z -y a '${{ steps.vars.outputs.PKG_NAME }}' '${{ steps.vars.outputs.PKG_BASENAME }}'/* | tail -2 ;;
|
||||
*) tar czf '${{ steps.vars.outputs.PKG_NAME }}' '${{ steps.vars.outputs.PKG_BASENAME }}'/* ;;
|
||||
esac;
|
||||
popd >/dev/null
|
||||
- name: Publish
|
||||
uses: softprops/action-gh-release@v1
|
||||
if: steps.vars.outputs.DEPLOY
|
||||
with:
|
||||
files: |
|
||||
${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_NAME }}
|
||||
target/${{ matrix.job.target }}/debian/*.deb
|
||||
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
## fix! [rivy; 2020-22-01] `cargo tarpaulin` is unable to test this repo at the moment; alternate recipe or another testing framework?
|
||||
# coverage:
|
||||
# name: Code Coverage
|
||||
# runs-on: ${{ matrix.job.os }}
|
||||
# strategy:
|
||||
# fail-fast: true
|
||||
# matrix:
|
||||
# # job: [ { os: ubuntu-latest }, { os: macos-latest }, { os: windows-latest } ]
|
||||
# job: [ { os: ubuntu-latest } ] ## cargo-tarpaulin is currently only available on linux
|
||||
# steps:
|
||||
# - uses: actions/checkout@v1
|
||||
# # - name: Reattach HEAD ## may be needed for accurate code coverage info
|
||||
# # run: git checkout ${{ github.head_ref }}
|
||||
# - name: Initialize workflow variables
|
||||
# id: vars
|
||||
# shell: bash
|
||||
# run: |
|
||||
# # staging directory
|
||||
# STAGING='_staging'
|
||||
# echo set-output name=STAGING::${STAGING}
|
||||
# echo ::set-output name=STAGING::${STAGING}
|
||||
# # check for CODECOV_TOKEN availability (work-around for inaccessible 'secrets' object for 'if'; see <https://github.community/t5/GitHub-Actions/jobs-lt-job-id-gt-if-does-not-work-with-env-secrets/m-p/38549>)
|
||||
# unset HAS_CODECOV_TOKEN
|
||||
# if [ -n $CODECOV_TOKEN ]; then HAS_CODECOV_TOKEN='true' ; fi
|
||||
# echo set-output name=HAS_CODECOV_TOKEN::${HAS_CODECOV_TOKEN}
|
||||
# echo ::set-output name=HAS_CODECOV_TOKEN::${HAS_CODECOV_TOKEN}
|
||||
# env:
|
||||
# CODECOV_TOKEN: "${{ secrets.CODECOV_TOKEN }}"
|
||||
# - name: Create all needed build/work directories
|
||||
# shell: bash
|
||||
# run: |
|
||||
# mkdir -p '${{ steps.vars.outputs.STAGING }}/work'
|
||||
# - name: Install required packages
|
||||
# run: |
|
||||
# sudo apt-get -y install libssl-dev
|
||||
# pushd '${{ steps.vars.outputs.STAGING }}/work' >/dev/null
|
||||
# wget --no-verbose https://github.com/xd009642/tarpaulin/releases/download/0.9.3/cargo-tarpaulin-0.9.3-travis.tar.gz
|
||||
# tar xf cargo-tarpaulin-0.9.3-travis.tar.gz
|
||||
# cp cargo-tarpaulin "$(dirname -- "$(which cargo)")"/
|
||||
# popd >/dev/null
|
||||
# - name: Generate coverage
|
||||
# run: |
|
||||
# cargo tarpaulin --out Xml
|
||||
# - name: Upload coverage results (CodeCov.io)
|
||||
# # CODECOV_TOKEN (aka, "Repository Upload Token" for REPO from CodeCov.io) ## set via REPO/Settings/Secrets
|
||||
# # if: secrets.CODECOV_TOKEN (not supported {yet?}; see <https://github.community/t5/GitHub-Actions/jobs-lt-job-id-gt-if-does-not-work-with-env-secrets/m-p/38549>)
|
||||
# if: steps.vars.outputs.HAS_CODECOV_TOKEN
|
||||
# run: |
|
||||
# # CodeCov.io
|
||||
# cargo tarpaulin --out Xml
|
||||
# bash <(curl -s https://codecov.io/bash)
|
||||
# env:
|
||||
# CODECOV_TOKEN: "${{ secrets.CODECOV_TOKEN }}"
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -6,4 +6,7 @@
|
||||
**/*.rs.bk
|
||||
*.swp
|
||||
.vscode/*
|
||||
*.idea/*
|
||||
*.idea/*
|
||||
|
||||
#ignore macos files
|
||||
.DS_Store
|
||||
11
.pre-commit-config.yaml
Normal file
11
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,11 @@
|
||||
repos:
|
||||
- repo: https://github.com/doublify/pre-commit-rust
|
||||
rev: v1.0
|
||||
hooks:
|
||||
- id: cargo-check
|
||||
stages: [commit]
|
||||
- id: fmt
|
||||
stages: [commit]
|
||||
- id: clippy
|
||||
args: [--all-targets, --all-features]
|
||||
stages: [commit]
|
||||
76
.travis.yml
76
.travis.yml
@@ -1,76 +0,0 @@
|
||||
# Based on the "trust" template v0.1.2
|
||||
# https://github.com/japaric/trust/tree/v0.1.2
|
||||
|
||||
dist: trusty
|
||||
language: rust
|
||||
services: docker
|
||||
sudo: required
|
||||
|
||||
# TODO Rust builds on stable by default, this can be
|
||||
# overridden on a case by case basis down below.
|
||||
|
||||
env:
|
||||
global:
|
||||
# TODO Update this to match the name of your project.
|
||||
- CRATE_NAME=dust
|
||||
|
||||
matrix:
|
||||
# TODO These are all the build jobs. Adjust as necessary. Comment out what you
|
||||
# don't need
|
||||
include:
|
||||
# Linux
|
||||
- env: TARGET=x86_64-unknown-linux-gnu
|
||||
|
||||
# OSX
|
||||
- env: TARGET=x86_64-apple-darwin
|
||||
os: osx
|
||||
|
||||
before_install:
|
||||
- set -e
|
||||
- rustup self update
|
||||
|
||||
install:
|
||||
- sh ci/install.sh
|
||||
- source ~/.cargo/env || true
|
||||
|
||||
script:
|
||||
- bash ci/script.sh
|
||||
|
||||
after_script: set +e
|
||||
|
||||
before_deploy:
|
||||
- sh ci/before_deploy.sh
|
||||
|
||||
deploy:
|
||||
# TODO update `api_key.secure`
|
||||
# - Create a `public_repo` GitHub token. Go to: https://github.com/settings/tokens/new
|
||||
# - Encrypt it: `travis encrypt 0123456789012345678901234567890123456789
|
||||
# - Paste the output down here
|
||||
api_key:
|
||||
secure: UlU73Td7Bkb2N88ws4YGLWR+4U0IMgiou9QQtMnmpouJFjeUNxtLSPMPODVXP7zq4sKt5HR5B3fX9MW4mKm351fvnQEoihETn06pKiXGnY//SlTPTt67MX9ZOYmd9ohJReMDOZDgqhnGLxfymycGtsLAmdjDZnAl+IMqgg0FMyVFj9Cl9aKxnn12lxQyX4zabHKk8TUKD3By8ZoEUnJMHt3gEtOmbDgS4brcTPeHCzqnYFw73LEnkqvz+JP0XwauJY7Cf8lminKm/klmjCkQji8T9SHI52v1g0Fxpx0ucp2o3vulQrLHXaHvZ6Fr7J0cSXXzaFF3rrGLt4t4jU/+9TZm1+n5k5XuPW4x4NTCC9NmIj/z0/z41t82E9qZhzhtm2Jdsg6H2tNk+C774TYqcmR6GCvfRadfjRp3cA5dh0UwDVjH2MJFxlHDVkl6la0mVVRsCGF3oBKZVk0BDl1womfnmI46o/uU+gLknHN6Ed6PHHPPYDViWd3VKdmHKT7XrkMMUF6HjZUtla689DWIOWZSiV++1dVPcl/1TV+6tTmN4bBtPcLuX7SHRuLp2PI2kATvRMECsa7gZRypW4jKpVn7b2yetX9TVI3i1zR5zkQJ3dPg8sATvYPL53aKH/WsqUg4rzoAlbk9so+++R4bQY69LhV3B511B7EAynoZFdM
|
||||
file_glob: true
|
||||
file: $CRATE_NAME-$TRAVIS_TAG-$TARGET.*
|
||||
on:
|
||||
# TODO Here you can pick which targets will generate binary releases
|
||||
# In this example, there are some targets that are tested using the stable
|
||||
# and nightly channels. This condition makes sure there is only one release
|
||||
# for such targets and that's generated using the stable channel
|
||||
condition: $TRAVIS_RUST_VERSION = stable
|
||||
tags: true
|
||||
provider: releases
|
||||
skip_cleanup: true
|
||||
|
||||
cache: cargo
|
||||
before_cache:
|
||||
# Travis can't cache files that are not readable by "others"
|
||||
- chmod -R a+r $HOME/.cargo
|
||||
|
||||
branches:
|
||||
only:
|
||||
# release tags
|
||||
- /^v\d+\.\d+\.\d+.*$/
|
||||
- master
|
||||
|
||||
notifications:
|
||||
email:
|
||||
on_success: never
|
||||
1279
Cargo.lock
generated
1279
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
92
Cargo.toml
92
Cargo.toml
@@ -1,9 +1,10 @@
|
||||
[package]
|
||||
name = "du-dust"
|
||||
description = "A more intuitive version of du"
|
||||
version = "0.4.1"
|
||||
version = "1.2.3"
|
||||
authors = ["bootandy <bootandy@gmail.com>", "nebkor <code@ardent.nebcorp.com>"]
|
||||
edition = "2018"
|
||||
edition = "2024"
|
||||
readme = "README.md"
|
||||
|
||||
documentation = "https://github.com/bootandy/dust"
|
||||
homepage = "https://github.com/bootandy/dust"
|
||||
@@ -14,16 +15,89 @@ categories = ["command-line-utilities"]
|
||||
license = "Apache-2.0"
|
||||
|
||||
[badges]
|
||||
travis-ci = {repository = "https://travis-ci.org/bootandy/dust"}
|
||||
travis-ci = { repository = "https://travis-ci.org/bootandy/dust" }
|
||||
|
||||
[[bin]]
|
||||
name = "dust"
|
||||
path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
ansi_term = "=0.11"
|
||||
clap = "=2.33"
|
||||
assert_cli = "=0.5"
|
||||
tempfile = "=3"
|
||||
walkdir = "=2"
|
||||
[profile.release]
|
||||
codegen-units = 1
|
||||
lto = true
|
||||
strip = true
|
||||
|
||||
[dependencies]
|
||||
ansi_term = "0.12"
|
||||
clap = { version = "4.4", features = ["derive"] }
|
||||
lscolors = "0.13"
|
||||
terminal_size = "0.2"
|
||||
unicode-width = "0.1"
|
||||
rayon = "1"
|
||||
thousands = "0.2"
|
||||
stfu8 = "0.2"
|
||||
regex = "1"
|
||||
config-file = "0.2"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
sysinfo = "0.27"
|
||||
ctrlc = "3.4"
|
||||
chrono = "0.4"
|
||||
|
||||
[target.'cfg(not(target_has_atomic = "64"))'.dependencies]
|
||||
portable-atomic = "1.4"
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
winapi-util = "0.1"
|
||||
filesize = "0.2.0"
|
||||
|
||||
[dev-dependencies]
|
||||
assert_cmd = "2"
|
||||
tempfile = "=3"
|
||||
|
||||
[build-dependencies]
|
||||
clap = { version = "4.4", features = ["derive"] }
|
||||
clap_complete = "4.4"
|
||||
clap_mangen = "0.2"
|
||||
|
||||
[[test]]
|
||||
name = "integration"
|
||||
path = "tests/tests.rs"
|
||||
|
||||
[package.metadata.binstall]
|
||||
pkg-url = "{ repo }/releases/download/v{ version }/dust-v{ version }-{ target }{ archive-suffix }"
|
||||
bin-dir = "dust-v{ version }-{ target }/{ bin }{ binary-ext }"
|
||||
|
||||
[package.metadata.deb]
|
||||
section = "utils"
|
||||
assets = [
|
||||
[
|
||||
"target/release/dust",
|
||||
"usr/bin/",
|
||||
"755",
|
||||
],
|
||||
[
|
||||
"LICENSE",
|
||||
"usr/share/doc/du-dust/",
|
||||
"644",
|
||||
],
|
||||
[
|
||||
"README.md",
|
||||
"usr/share/doc/du-dust/README",
|
||||
"644",
|
||||
],
|
||||
[
|
||||
"man-page/dust.1",
|
||||
"usr/share/man/man1/dust.1",
|
||||
"644",
|
||||
],
|
||||
[
|
||||
"completions/dust.bash",
|
||||
"usr/share/bash-completion/completions/dust",
|
||||
"644",
|
||||
],
|
||||
]
|
||||
extended-description = """\
|
||||
Dust is meant to give you an instant overview of which directories are using
|
||||
disk space without requiring sort or head. Dust will print a maximum of one
|
||||
'Did not have permissions message'.
|
||||
"""
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -186,7 +186,7 @@
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
Copyright [2023] [andrew boot]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
||||
143
README.md
143
README.md
@@ -1,73 +1,128 @@
|
||||
[](https://github.com/bootandy/dust/actions)
|
||||
|
||||
[](https://travis-ci.org/bootandy/dust)
|
||||
|
||||
# Dust
|
||||
|
||||
du + rust = dust. Like du but more intuitive
|
||||
du + rust = dust. Like du but more intuitive.
|
||||
|
||||
# Why
|
||||
|
||||
Because I want an easy way to see where my disk is being used.
|
||||
|
||||
# Demo
|
||||
|
||||

|
||||
|
||||
## Install
|
||||
|
||||
#### Cargo Install
|
||||
#### Cargo <a href="https://repology.org/project/du-dust/versions"><img src="https://repology.org/badge/vertical-allrepos/du-dust.svg" alt="Packaging status" align="right"></a>
|
||||
|
||||
* cargo install du-dust
|
||||
- `cargo install du-dust`
|
||||
|
||||
#### Download Install
|
||||
#### 🍺 Homebrew (Mac OS)
|
||||
|
||||
* Download linux / mac binary from [Releases](https://github.com/bootandy/dust/releases)
|
||||
* unzip file: tar -xvf _downloaded_file.tar.gz_
|
||||
* move file to executable path: sudo mv dust /usr/local/bin/
|
||||
- `brew install dust`
|
||||
|
||||
#### 🍺 Homebrew (Linux)
|
||||
|
||||
- `brew install dust`
|
||||
|
||||
#### [Snap](https://ubuntu.com/core/services/guide/snaps-intro) Ubuntu and [supported systems](https://snapcraft.io/docs/installing-snapd)
|
||||
|
||||
- `snap install dust`
|
||||
|
||||
Note: `dust` installed through `snap` can only access files stored in the `/home` directory. See daniejstriata/dust-snap#2 for more information.
|
||||
|
||||
#### [Pacstall](https://github.com/pacstall/pacstall) (Debian/Ubuntu)
|
||||
|
||||
- `pacstall -I dust-bin`
|
||||
|
||||
#### Anaconda (conda-forge)
|
||||
|
||||
- `conda install -c conda-forge dust`
|
||||
|
||||
#### [deb-get](https://github.com/wimpysworld/deb-get) (Debian/Ubuntu)
|
||||
|
||||
- `deb-get install du-dust`
|
||||
|
||||
#### [x-cmd](https://www.x-cmd.com/pkg/#VPContent)
|
||||
|
||||
- `x env use dust`
|
||||
|
||||
#### Windows:
|
||||
|
||||
- `scoop install dust`
|
||||
- Windows GNU version - works
|
||||
- Windows MSVC - requires: [VCRUNTIME140.dll](https://docs.microsoft.com/en-gb/cpp/windows/latest-supported-vc-redist?view=msvc-170)
|
||||
|
||||
#### Download
|
||||
|
||||
- Download Linux/Mac binary from [Releases](https://github.com/bootandy/dust/releases)
|
||||
- unzip file: `tar -xvf _downloaded_file.tar.gz`
|
||||
- move file to executable path: `sudo mv dust /usr/local/bin/`
|
||||
|
||||
## Overview
|
||||
|
||||
Dust is meant to give you an instant overview of which directories are using disk space without requiring sort or head. Dust will print a maximum of 1 'Did not have permissions message'.
|
||||
Dust is meant to give you an instant overview of which directories are using disk space without requiring sort or head. Dust will print a maximum of one 'Did not have permissions message'.
|
||||
|
||||
Dust will list the 20 biggest sub directories or files and will smartly recurse down the tree to find the larger ones. There is no need for a '-d' flag or a '-h' flag. The largest sub directory will have its size shown in *red*
|
||||
Dust will list a slightly-less-than-the-terminal-height number of the biggest subdirectories or files and will smartly recurse down the tree to find the larger ones. There is no need for a '-d' flag or a '-h' flag. The largest subdirectories will be colored.
|
||||
|
||||
## Why?
|
||||
|
||||
du has a number of ways of showing you what it finds, in terms of disk consumption, but really, there are only one or two ways you invoke it: with -h for “human readable” units, like 100G or 89k, or with -b for “bytes”. The former is generally used for a quick survey of a directory with a small number of things in it, and the latter for when you have a bunch and need to sort the output numerically, and you’re obligated to either further pass it into something like awk to turn bytes into the appropriate human-friendly unit like mega or gigabytes, or pipe thru sort and head while remembering the '-h' flag. Then once you have the top offenders, you recurse down into the largest one and repeat the process until you’ve found your cruft or gems and can move on.
|
||||
|
||||
Dust assumes that’s what you wanted to do in the first place, and takes care of tracking the largest offenders in terms of actual size, and showing them to you with human-friendly units and in-context within the filetree.
|
||||
The different colors on the bars: These represent the combined tree hierarchy & disk usage. The shades of grey are used to indicate which parent folder a subfolder belongs to. For instance, look at the above screenshot. `.steam` is a folder taking 44% of the space. From the `.steam` bar is a light grey line that goes up. All these folders are inside `.steam` so if you delete `.steam` all that stuff will be gone too.
|
||||
|
||||
## Usage
|
||||
|
||||
```
|
||||
Usage: dust
|
||||
Usage: dust <dir>
|
||||
Usage: dust <dir> <another_dir> <and_more>
|
||||
Usage: dust -p <dir> (full-path - does not shorten the path of the subdirectories)
|
||||
Usage: dust -s <dir> (apparent-size - shows the length of the file as opposed to the amount of disk space it uses)
|
||||
Usage: dust -n 30 <dir> (Shows 30 directories not 20)
|
||||
Usage: dust -d 3 <dir> (Shows 3 levels of subdirectories)
|
||||
Usage: dust -r <dir> (Reverse order of output, with root at the lowest)
|
||||
Usage: dust -p (full-path - Show fullpath of the subdirectories)
|
||||
Usage: dust -s (apparent-size - shows the length of the file as opposed to the amount of disk space it uses)
|
||||
Usage: dust -n 30 (Shows 30 directories instead of the default [default is terminal height])
|
||||
Usage: dust -d 3 (Shows 3 levels of subdirectories)
|
||||
Usage: dust -D (Show only directories (eg dust -D))
|
||||
Usage: dust -F (Show only files - finds your largest files)
|
||||
Usage: dust -r (reverse order of output)
|
||||
Usage: dust -o si/b/kb/kib/mb/mib/gb/gib (si - prints sizes in powers of 1000. Others print size in that format).
|
||||
Usage: dust -X ignore (ignore all files and directories with the name 'ignore')
|
||||
Usage: dust -x (Only show directories on the same filesystem)
|
||||
Usage: dust -b (Do not show percentages or draw ASCII bars)
|
||||
Usage: dust -B (--bars-on-right - Percent bars moved to right side of screen)
|
||||
Usage: dust -i (Do not show hidden files)
|
||||
Usage: dust -c (No colors [monochrome])
|
||||
Usage: dust -C (Force colors)
|
||||
Usage: dust -f (Count files instead of diskspace [Counts by inode, to include duplicate inodes use dust -f -s])
|
||||
Usage: dust -t (Group by filetype)
|
||||
Usage: dust -z 10M (min-size, Only include files larger than 10M)
|
||||
Usage: dust -e regex (Only include files matching this regex (eg dust -e "\.png$" would match png files))
|
||||
Usage: dust -v regex (Exclude files matching this regex (eg dust -v "\.png$" would ignore png files))
|
||||
Usage: dust -L (dereference-links - Treat sym links as directories and go into them)
|
||||
Usage: dust -P (Disable the progress indicator)
|
||||
Usage: dust -R (For screen readers. Removes bars/symbols. Adds new column: depth level. (May want to use -p for full path too))
|
||||
Usage: dust -S (Custom Stack size - Use if you see: 'fatal runtime error: stack overflow' (default allocation: low memory=1048576, high memory=1073741824)"),
|
||||
Usage: dust --skip-total (No total row will be displayed)
|
||||
Usage: dust -z 40000/30MB/20kib (Exclude output files/directories below size 40000 bytes / 30MB / 20KiB)
|
||||
Usage: dust -j (Prints JSON representation of directories, try: dust -j | jq)
|
||||
Usage: dust --files0-from=FILE (Read NUL-terminated file paths from FILE; if FILE is '-', read from stdin)
|
||||
Usage: dust --files-from=FILE (Read newline-terminated file paths from FILE; if FILE is '-', read from stdin)
|
||||
Usage: dust --collapse=node-modules will keep the node-modules folder collapsed in display instead of recursively opening it
|
||||
```
|
||||
|
||||
```
|
||||
djin:git/dust> dust
|
||||
1.2G target
|
||||
622M ├─┬ debug
|
||||
445M │ ├── deps
|
||||
70M │ ├── incremental
|
||||
56M │ └── build
|
||||
262M ├─┬ rls
|
||||
262M │ └─┬ debug
|
||||
203M │ ├── deps
|
||||
56M │ └── build
|
||||
165M ├─┬ package
|
||||
165M │ └─┬ du-dust-0.2.4
|
||||
165M │ └─┬ target
|
||||
165M │ └─┬ debug
|
||||
131M │ └── deps
|
||||
165M └─┬ release
|
||||
124M └── deps
|
||||
```
|
||||
## Config file
|
||||
|
||||
## Performance
|
||||
|
||||
Dust is currently about 4 times slower than du.
|
||||
Dust has a config file where the above options can be set.
|
||||
Either: `~/.config/dust/config.toml` or `~/.dust.toml`
|
||||
```
|
||||
$ cat ~/.config/dust/config.toml
|
||||
reverse=true
|
||||
```
|
||||
|
||||
## Alternatives
|
||||
|
||||
* [NCDU](https://dev.yorhel.nl/ncdu)
|
||||
* du -d 1 -h | sort -h
|
||||
- [NCDU](https://dev.yorhel.nl/ncdu)
|
||||
- [dutree](https://github.com/nachoparker/dutree)
|
||||
- [dua](https://github.com/Byron/dua-cli/)
|
||||
- [pdu](https://github.com/KSXGitHub/parallel-disk-usage)
|
||||
- [dirstat-rs](https://github.com/scullionw/dirstat-rs)
|
||||
- `du -d 1 -h | sort -h`
|
||||
|
||||
Note: Apparent-size is calculated slightly differently in dust to gdu. In dust each hard link is counted as using file_length space. In gdu only the first entry is counted.
|
||||
|
||||
28
build.rs
Normal file
28
build.rs
Normal file
@@ -0,0 +1,28 @@
|
||||
use clap::CommandFactory;
|
||||
use clap_complete::{generate_to, shells::*};
|
||||
use clap_mangen::Man;
|
||||
use std::fs::File;
|
||||
use std::io::Error;
|
||||
use std::path::Path;
|
||||
|
||||
include!("src/cli.rs");
|
||||
|
||||
fn main() -> Result<(), Error> {
|
||||
let outdir = "completions";
|
||||
let app_name = "dust";
|
||||
let mut cmd = Cli::command();
|
||||
|
||||
generate_to(Bash, &mut cmd, app_name, outdir)?;
|
||||
generate_to(Zsh, &mut cmd, app_name, outdir)?;
|
||||
generate_to(Fish, &mut cmd, app_name, outdir)?;
|
||||
generate_to(PowerShell, &mut cmd, app_name, outdir)?;
|
||||
generate_to(Elvish, &mut cmd, app_name, outdir)?;
|
||||
|
||||
let file = Path::new("man-page").join("dust.1");
|
||||
std::fs::create_dir_all("man-page")?;
|
||||
let mut file = File::create(file)?;
|
||||
|
||||
Man::new(cmd).render(&mut file)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
21
ci/how2publish.txt
Normal file
21
ci/how2publish.txt
Normal file
@@ -0,0 +1,21 @@
|
||||
# ----------- To do a release ---------
|
||||
|
||||
# ----------- Pre release ---------
|
||||
# Compare times of runs to check no drastic slow down:
|
||||
# hyperfine 'target/release/dust /home/andy'
|
||||
# hyperfine 'dust /home/andy'
|
||||
|
||||
# ----------- Release ---------
|
||||
# inc version in cargo.toml
|
||||
# cargo build --release
|
||||
# commit changed files
|
||||
# merge to master in github
|
||||
|
||||
# tag a commit and push (increment version in Cargo.toml first):
|
||||
# git tag v0.4.5
|
||||
# git push origin v0.4.5
|
||||
|
||||
# cargo publish to put it in crates.io
|
||||
|
||||
# Optional: To install locally
|
||||
#cargo install --path .
|
||||
128
completions/_dust
Normal file
128
completions/_dust
Normal file
@@ -0,0 +1,128 @@
|
||||
#compdef dust
|
||||
|
||||
autoload -U is-at-least
|
||||
|
||||
_dust() {
|
||||
typeset -A opt_args
|
||||
typeset -a _arguments_options
|
||||
local ret=1
|
||||
|
||||
if is-at-least 5.2; then
|
||||
_arguments_options=(-s -S -C)
|
||||
else
|
||||
_arguments_options=(-s -C)
|
||||
fi
|
||||
|
||||
local context curcontext="$curcontext" state line
|
||||
_arguments "${_arguments_options[@]}" : \
|
||||
'-d+[Depth to show]:DEPTH:_default' \
|
||||
'--depth=[Depth to show]:DEPTH:_default' \
|
||||
'-T+[Number of threads to use]:THREADS:_default' \
|
||||
'--threads=[Number of threads to use]:THREADS:_default' \
|
||||
'--config=[Specify a config file to use]:FILE:_files' \
|
||||
'-n+[Number of lines of output to show. (Default is terminal_height - 10)]:NUMBER:_default' \
|
||||
'--number-of-lines=[Number of lines of output to show. (Default is terminal_height - 10)]:NUMBER:_default' \
|
||||
'*-X+[Exclude any file or directory with this path]:PATH:_files' \
|
||||
'*--ignore-directory=[Exclude any file or directory with this path]:PATH:_files' \
|
||||
'-I+[Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter]:FILE:_files' \
|
||||
'--ignore-all-in-file=[Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter]:FILE:_files' \
|
||||
'-z+[Minimum size file to include in output]:MIN_SIZE:_default' \
|
||||
'--min-size=[Minimum size file to include in output]:MIN_SIZE:_default' \
|
||||
'(-e --filter -t --file-types)*-v+[Exclude filepaths matching this regex. To ignore png files type\: -v "\\.png\$"]:REGEX:_default' \
|
||||
'(-e --filter -t --file-types)*--invert-filter=[Exclude filepaths matching this regex. To ignore png files type\: -v "\\.png\$"]:REGEX:_default' \
|
||||
'(-t --file-types)*-e+[Only include filepaths matching this regex. For png files type\: -e "\\.png\$"]:REGEX:_default' \
|
||||
'(-t --file-types)*--filter=[Only include filepaths matching this regex. For png files type\: -e "\\.png\$"]:REGEX:_default' \
|
||||
'-w+[Specify width of output overriding the auto detection of terminal width]:WIDTH:_default' \
|
||||
'--terminal-width=[Specify width of output overriding the auto detection of terminal width]:WIDTH:_default' \
|
||||
'-o+[Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size]:FORMAT:((si\:"SI prefix (powers of 1000)"
|
||||
b\:"byte (B)"
|
||||
k\:"kibibyte (KiB)"
|
||||
m\:"mebibyte (MiB)"
|
||||
g\:"gibibyte (GiB)"
|
||||
t\:"tebibyte (TiB)"
|
||||
kb\:"kilobyte (kB)"
|
||||
mb\:"megabyte (MB)"
|
||||
gb\:"gigabyte (GB)"
|
||||
tb\:"terabyte (TB)"))' \
|
||||
'--output-format=[Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size]:FORMAT:((si\:"SI prefix (powers of 1000)"
|
||||
b\:"byte (B)"
|
||||
k\:"kibibyte (KiB)"
|
||||
m\:"mebibyte (MiB)"
|
||||
g\:"gibibyte (GiB)"
|
||||
t\:"tebibyte (TiB)"
|
||||
kb\:"kilobyte (kB)"
|
||||
mb\:"megabyte (MB)"
|
||||
gb\:"gigabyte (GB)"
|
||||
tb\:"terabyte (TB)"))' \
|
||||
'-S+[Specify memory to use as stack size - use if you see\: '\''fatal runtime error\: stack overflow'\'' (default low memory=1048576, high memory=1073741824)]:STACK_SIZE:_default' \
|
||||
'--stack-size=[Specify memory to use as stack size - use if you see\: '\''fatal runtime error\: stack overflow'\'' (default low memory=1048576, high memory=1073741824)]:STACK_SIZE:_default' \
|
||||
'-M+[+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr−(n+1)), n => \[curr−(n+1), curr−n), and -n => (𝑐𝑢𝑟𝑟−𝑛, +∞)]:MTIME:_default' \
|
||||
'--mtime=[+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr−(n+1)), n => \[curr−(n+1), curr−n), and -n => (𝑐𝑢𝑟𝑟−𝑛, +∞)]:MTIME:_default' \
|
||||
'-A+[just like -mtime, but based on file access time]:ATIME:_default' \
|
||||
'--atime=[just like -mtime, but based on file access time]:ATIME:_default' \
|
||||
'-y+[just like -mtime, but based on file change time]:CTIME:_default' \
|
||||
'--ctime=[just like -mtime, but based on file change time]:CTIME:_default' \
|
||||
'(--files-from)--files0-from=[Read NUL-terminated paths from FILE (use \`-\` for stdin)]:FILES0_FROM:_files' \
|
||||
'(--files0-from)--files-from=[Read newline-terminated paths from FILE (use \`-\` for stdin)]:FILES_FROM:_files' \
|
||||
'*--collapse=[Keep these directories collapsed]:COLLAPSE:_files' \
|
||||
'-m+[Directory '\''size'\'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time]:FILETIME:((a\:"last accessed time"
|
||||
c\:"last changed time"
|
||||
m\:"last modified time"))' \
|
||||
'--filetime=[Directory '\''size'\'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time]:FILETIME:((a\:"last accessed time"
|
||||
c\:"last changed time"
|
||||
m\:"last modified time"))' \
|
||||
'-p[Subdirectories will not have their path shortened]' \
|
||||
'--full-paths[Subdirectories will not have their path shortened]' \
|
||||
'-L[dereference sym links - Treat sym links as directories and go into them]' \
|
||||
'--dereference-links[dereference sym links - Treat sym links as directories and go into them]' \
|
||||
'-x[Only count the files and directories on the same filesystem as the supplied directory]' \
|
||||
'--limit-filesystem[Only count the files and directories on the same filesystem as the supplied directory]' \
|
||||
'-s[Use file length instead of blocks]' \
|
||||
'--apparent-size[Use file length instead of blocks]' \
|
||||
'-r[Print tree upside down (biggest highest)]' \
|
||||
'--reverse[Print tree upside down (biggest highest)]' \
|
||||
'-c[No colors will be printed (Useful for commands like\: watch)]' \
|
||||
'--no-colors[No colors will be printed (Useful for commands like\: watch)]' \
|
||||
'-C[Force colors print]' \
|
||||
'--force-colors[Force colors print]' \
|
||||
'-b[No percent bars or percentages will be displayed]' \
|
||||
'--no-percent-bars[No percent bars or percentages will be displayed]' \
|
||||
'-B[percent bars moved to right side of screen]' \
|
||||
'--bars-on-right[percent bars moved to right side of screen]' \
|
||||
'-R[For screen readers. Removes bars. Adds new column\: depth level (May want to use -p too for full path)]' \
|
||||
'--screen-reader[For screen readers. Removes bars. Adds new column\: depth level (May want to use -p too for full path)]' \
|
||||
'--skip-total[No total row will be displayed]' \
|
||||
'-f[Directory '\''size'\'' is number of child files instead of disk size]' \
|
||||
'--filecount[Directory '\''size'\'' is number of child files instead of disk size]' \
|
||||
'-i[Do not display hidden files]' \
|
||||
'--ignore-hidden[Do not display hidden files]' \
|
||||
'(-d --depth -D --only-dir)-t[show only these file types]' \
|
||||
'(-d --depth -D --only-dir)--file-types[show only these file types]' \
|
||||
'-P[Disable the progress indication]' \
|
||||
'--no-progress[Disable the progress indication]' \
|
||||
'--print-errors[Print path with errors]' \
|
||||
'(-F --only-file -t --file-types)-D[Only directories will be displayed]' \
|
||||
'(-F --only-file -t --file-types)--only-dir[Only directories will be displayed]' \
|
||||
'(-D --only-dir)-F[Only files will be displayed. (Finds your largest files)]' \
|
||||
'(-D --only-dir)--only-file[Only files will be displayed. (Finds your largest files)]' \
|
||||
'-j[Output the directory tree as json to the current directory]' \
|
||||
'--output-json[Output the directory tree as json to the current directory]' \
|
||||
'-h[Print help (see more with '\''--help'\'')]' \
|
||||
'--help[Print help (see more with '\''--help'\'')]' \
|
||||
'-V[Print version]' \
|
||||
'--version[Print version]' \
|
||||
'*::params -- Input files or directories:_files' \
|
||||
&& ret=0
|
||||
}
|
||||
|
||||
(( $+functions[_dust_commands] )) ||
|
||||
_dust_commands() {
|
||||
local commands; commands=()
|
||||
_describe -t commands 'dust commands' commands "$@"
|
||||
}
|
||||
|
||||
if [ "$funcstack[1]" = "_dust" ]; then
|
||||
_dust "$@"
|
||||
else
|
||||
compdef _dust dust
|
||||
fi
|
||||
104
completions/_dust.ps1
Normal file
104
completions/_dust.ps1
Normal file
@@ -0,0 +1,104 @@
|
||||
|
||||
using namespace System.Management.Automation
|
||||
using namespace System.Management.Automation.Language
|
||||
|
||||
Register-ArgumentCompleter -Native -CommandName 'dust' -ScriptBlock {
|
||||
param($wordToComplete, $commandAst, $cursorPosition)
|
||||
|
||||
$commandElements = $commandAst.CommandElements
|
||||
$command = @(
|
||||
'dust'
|
||||
for ($i = 1; $i -lt $commandElements.Count; $i++) {
|
||||
$element = $commandElements[$i]
|
||||
if ($element -isnot [StringConstantExpressionAst] -or
|
||||
$element.StringConstantType -ne [StringConstantType]::BareWord -or
|
||||
$element.Value.StartsWith('-') -or
|
||||
$element.Value -eq $wordToComplete) {
|
||||
break
|
||||
}
|
||||
$element.Value
|
||||
}) -join ';'
|
||||
|
||||
$completions = @(switch ($command) {
|
||||
'dust' {
|
||||
[CompletionResult]::new('-d', '-d', [CompletionResultType]::ParameterName, 'Depth to show')
|
||||
[CompletionResult]::new('--depth', '--depth', [CompletionResultType]::ParameterName, 'Depth to show')
|
||||
[CompletionResult]::new('-T', '-T ', [CompletionResultType]::ParameterName, 'Number of threads to use')
|
||||
[CompletionResult]::new('--threads', '--threads', [CompletionResultType]::ParameterName, 'Number of threads to use')
|
||||
[CompletionResult]::new('--config', '--config', [CompletionResultType]::ParameterName, 'Specify a config file to use')
|
||||
[CompletionResult]::new('-n', '-n', [CompletionResultType]::ParameterName, 'Number of lines of output to show. (Default is terminal_height - 10)')
|
||||
[CompletionResult]::new('--number-of-lines', '--number-of-lines', [CompletionResultType]::ParameterName, 'Number of lines of output to show. (Default is terminal_height - 10)')
|
||||
[CompletionResult]::new('-X', '-X ', [CompletionResultType]::ParameterName, 'Exclude any file or directory with this path')
|
||||
[CompletionResult]::new('--ignore-directory', '--ignore-directory', [CompletionResultType]::ParameterName, 'Exclude any file or directory with this path')
|
||||
[CompletionResult]::new('-I', '-I ', [CompletionResultType]::ParameterName, 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter')
|
||||
[CompletionResult]::new('--ignore-all-in-file', '--ignore-all-in-file', [CompletionResultType]::ParameterName, 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter')
|
||||
[CompletionResult]::new('-z', '-z', [CompletionResultType]::ParameterName, 'Minimum size file to include in output')
|
||||
[CompletionResult]::new('--min-size', '--min-size', [CompletionResultType]::ParameterName, 'Minimum size file to include in output')
|
||||
[CompletionResult]::new('-v', '-v', [CompletionResultType]::ParameterName, 'Exclude filepaths matching this regex. To ignore png files type: -v "\.png$"')
|
||||
[CompletionResult]::new('--invert-filter', '--invert-filter', [CompletionResultType]::ParameterName, 'Exclude filepaths matching this regex. To ignore png files type: -v "\.png$"')
|
||||
[CompletionResult]::new('-e', '-e', [CompletionResultType]::ParameterName, 'Only include filepaths matching this regex. For png files type: -e "\.png$"')
|
||||
[CompletionResult]::new('--filter', '--filter', [CompletionResultType]::ParameterName, 'Only include filepaths matching this regex. For png files type: -e "\.png$"')
|
||||
[CompletionResult]::new('-w', '-w', [CompletionResultType]::ParameterName, 'Specify width of output overriding the auto detection of terminal width')
|
||||
[CompletionResult]::new('--terminal-width', '--terminal-width', [CompletionResultType]::ParameterName, 'Specify width of output overriding the auto detection of terminal width')
|
||||
[CompletionResult]::new('-o', '-o', [CompletionResultType]::ParameterName, 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size')
|
||||
[CompletionResult]::new('--output-format', '--output-format', [CompletionResultType]::ParameterName, 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size')
|
||||
[CompletionResult]::new('-S', '-S ', [CompletionResultType]::ParameterName, 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)')
|
||||
[CompletionResult]::new('--stack-size', '--stack-size', [CompletionResultType]::ParameterName, 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)')
|
||||
[CompletionResult]::new('-M', '-M ', [CompletionResultType]::ParameterName, '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr−(n+1)), n => [curr−(n+1), curr−n), and -n => (𝑐𝑢𝑟𝑟−𝑛, +∞)')
|
||||
[CompletionResult]::new('--mtime', '--mtime', [CompletionResultType]::ParameterName, '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr−(n+1)), n => [curr−(n+1), curr−n), and -n => (𝑐𝑢𝑟𝑟−𝑛, +∞)')
|
||||
[CompletionResult]::new('-A', '-A ', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file access time')
|
||||
[CompletionResult]::new('--atime', '--atime', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file access time')
|
||||
[CompletionResult]::new('-y', '-y', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file change time')
|
||||
[CompletionResult]::new('--ctime', '--ctime', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file change time')
|
||||
[CompletionResult]::new('--files0-from', '--files0-from', [CompletionResultType]::ParameterName, 'Read NUL-terminated paths from FILE (use `-` for stdin)')
|
||||
[CompletionResult]::new('--files-from', '--files-from', [CompletionResultType]::ParameterName, 'Read newline-terminated paths from FILE (use `-` for stdin)')
|
||||
[CompletionResult]::new('--collapse', '--collapse', [CompletionResultType]::ParameterName, 'Keep these directories collapsed')
|
||||
[CompletionResult]::new('-m', '-m', [CompletionResultType]::ParameterName, 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time')
|
||||
[CompletionResult]::new('--filetime', '--filetime', [CompletionResultType]::ParameterName, 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time')
|
||||
[CompletionResult]::new('-p', '-p', [CompletionResultType]::ParameterName, 'Subdirectories will not have their path shortened')
|
||||
[CompletionResult]::new('--full-paths', '--full-paths', [CompletionResultType]::ParameterName, 'Subdirectories will not have their path shortened')
|
||||
[CompletionResult]::new('-L', '-L ', [CompletionResultType]::ParameterName, 'dereference sym links - Treat sym links as directories and go into them')
|
||||
[CompletionResult]::new('--dereference-links', '--dereference-links', [CompletionResultType]::ParameterName, 'dereference sym links - Treat sym links as directories and go into them')
|
||||
[CompletionResult]::new('-x', '-x', [CompletionResultType]::ParameterName, 'Only count the files and directories on the same filesystem as the supplied directory')
|
||||
[CompletionResult]::new('--limit-filesystem', '--limit-filesystem', [CompletionResultType]::ParameterName, 'Only count the files and directories on the same filesystem as the supplied directory')
|
||||
[CompletionResult]::new('-s', '-s', [CompletionResultType]::ParameterName, 'Use file length instead of blocks')
|
||||
[CompletionResult]::new('--apparent-size', '--apparent-size', [CompletionResultType]::ParameterName, 'Use file length instead of blocks')
|
||||
[CompletionResult]::new('-r', '-r', [CompletionResultType]::ParameterName, 'Print tree upside down (biggest highest)')
|
||||
[CompletionResult]::new('--reverse', '--reverse', [CompletionResultType]::ParameterName, 'Print tree upside down (biggest highest)')
|
||||
[CompletionResult]::new('-c', '-c', [CompletionResultType]::ParameterName, 'No colors will be printed (Useful for commands like: watch)')
|
||||
[CompletionResult]::new('--no-colors', '--no-colors', [CompletionResultType]::ParameterName, 'No colors will be printed (Useful for commands like: watch)')
|
||||
[CompletionResult]::new('-C', '-C ', [CompletionResultType]::ParameterName, 'Force colors print')
|
||||
[CompletionResult]::new('--force-colors', '--force-colors', [CompletionResultType]::ParameterName, 'Force colors print')
|
||||
[CompletionResult]::new('-b', '-b', [CompletionResultType]::ParameterName, 'No percent bars or percentages will be displayed')
|
||||
[CompletionResult]::new('--no-percent-bars', '--no-percent-bars', [CompletionResultType]::ParameterName, 'No percent bars or percentages will be displayed')
|
||||
[CompletionResult]::new('-B', '-B ', [CompletionResultType]::ParameterName, 'percent bars moved to right side of screen')
|
||||
[CompletionResult]::new('--bars-on-right', '--bars-on-right', [CompletionResultType]::ParameterName, 'percent bars moved to right side of screen')
|
||||
[CompletionResult]::new('-R', '-R ', [CompletionResultType]::ParameterName, 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)')
|
||||
[CompletionResult]::new('--screen-reader', '--screen-reader', [CompletionResultType]::ParameterName, 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)')
|
||||
[CompletionResult]::new('--skip-total', '--skip-total', [CompletionResultType]::ParameterName, 'No total row will be displayed')
|
||||
[CompletionResult]::new('-f', '-f', [CompletionResultType]::ParameterName, 'Directory ''size'' is number of child files instead of disk size')
|
||||
[CompletionResult]::new('--filecount', '--filecount', [CompletionResultType]::ParameterName, 'Directory ''size'' is number of child files instead of disk size')
|
||||
[CompletionResult]::new('-i', '-i', [CompletionResultType]::ParameterName, 'Do not display hidden files')
|
||||
[CompletionResult]::new('--ignore-hidden', '--ignore-hidden', [CompletionResultType]::ParameterName, 'Do not display hidden files')
|
||||
[CompletionResult]::new('-t', '-t', [CompletionResultType]::ParameterName, 'show only these file types')
|
||||
[CompletionResult]::new('--file-types', '--file-types', [CompletionResultType]::ParameterName, 'show only these file types')
|
||||
[CompletionResult]::new('-P', '-P ', [CompletionResultType]::ParameterName, 'Disable the progress indication')
|
||||
[CompletionResult]::new('--no-progress', '--no-progress', [CompletionResultType]::ParameterName, 'Disable the progress indication')
|
||||
[CompletionResult]::new('--print-errors', '--print-errors', [CompletionResultType]::ParameterName, 'Print path with errors')
|
||||
[CompletionResult]::new('-D', '-D ', [CompletionResultType]::ParameterName, 'Only directories will be displayed')
|
||||
[CompletionResult]::new('--only-dir', '--only-dir', [CompletionResultType]::ParameterName, 'Only directories will be displayed')
|
||||
[CompletionResult]::new('-F', '-F ', [CompletionResultType]::ParameterName, 'Only files will be displayed. (Finds your largest files)')
|
||||
[CompletionResult]::new('--only-file', '--only-file', [CompletionResultType]::ParameterName, 'Only files will be displayed. (Finds your largest files)')
|
||||
[CompletionResult]::new('-j', '-j', [CompletionResultType]::ParameterName, 'Output the directory tree as json to the current directory')
|
||||
[CompletionResult]::new('--output-json', '--output-json', [CompletionResultType]::ParameterName, 'Output the directory tree as json to the current directory')
|
||||
[CompletionResult]::new('-h', '-h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')')
|
||||
[CompletionResult]::new('--help', '--help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')')
|
||||
[CompletionResult]::new('-V', '-V ', [CompletionResultType]::ParameterName, 'Print version')
|
||||
[CompletionResult]::new('--version', '--version', [CompletionResultType]::ParameterName, 'Print version')
|
||||
break
|
||||
}
|
||||
})
|
||||
|
||||
$completions.Where{ $_.CompletionText -like "$wordToComplete*" } |
|
||||
Sort-Object -Property ListItemText
|
||||
}
|
||||
215
completions/dust.bash
Normal file
215
completions/dust.bash
Normal file
@@ -0,0 +1,215 @@
|
||||
_dust() {
|
||||
local i cur prev opts cmd
|
||||
COMPREPLY=()
|
||||
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
|
||||
cur="$2"
|
||||
else
|
||||
cur="${COMP_WORDS[COMP_CWORD]}"
|
||||
fi
|
||||
prev="$3"
|
||||
cmd=""
|
||||
opts=""
|
||||
|
||||
for i in "${COMP_WORDS[@]:0:COMP_CWORD}"
|
||||
do
|
||||
case "${cmd},${i}" in
|
||||
",$1")
|
||||
cmd="dust"
|
||||
;;
|
||||
*)
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
case "${cmd}" in
|
||||
dust)
|
||||
opts="-d -T -n -p -X -I -L -x -s -r -c -C -b -B -z -R -f -i -v -e -t -w -P -D -F -o -S -j -M -A -y -m -h -V --depth --threads --config --number-of-lines --full-paths --ignore-directory --ignore-all-in-file --dereference-links --limit-filesystem --apparent-size --reverse --no-colors --force-colors --no-percent-bars --bars-on-right --min-size --screen-reader --skip-total --filecount --ignore-hidden --invert-filter --filter --file-types --terminal-width --no-progress --print-errors --only-dir --only-file --output-format --stack-size --output-json --mtime --atime --ctime --files0-from --files-from --collapse --filetime --help --version [PATH]..."
|
||||
if [[ ${cur} == -* || ${COMP_CWORD} -eq 1 ]] ; then
|
||||
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
|
||||
return 0
|
||||
fi
|
||||
case "${prev}" in
|
||||
--depth)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
-d)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--threads)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
-T)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--config)
|
||||
local oldifs
|
||||
if [ -n "${IFS+x}" ]; then
|
||||
oldifs="$IFS"
|
||||
fi
|
||||
IFS=$'\n'
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
if [ -n "${oldifs+x}" ]; then
|
||||
IFS="$oldifs"
|
||||
fi
|
||||
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
|
||||
compopt -o filenames
|
||||
fi
|
||||
return 0
|
||||
;;
|
||||
--number-of-lines)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
-n)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--ignore-directory)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
-X)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--ignore-all-in-file)
|
||||
local oldifs
|
||||
if [ -n "${IFS+x}" ]; then
|
||||
oldifs="$IFS"
|
||||
fi
|
||||
IFS=$'\n'
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
if [ -n "${oldifs+x}" ]; then
|
||||
IFS="$oldifs"
|
||||
fi
|
||||
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
|
||||
compopt -o filenames
|
||||
fi
|
||||
return 0
|
||||
;;
|
||||
-I)
|
||||
local oldifs
|
||||
if [ -n "${IFS+x}" ]; then
|
||||
oldifs="$IFS"
|
||||
fi
|
||||
IFS=$'\n'
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
if [ -n "${oldifs+x}" ]; then
|
||||
IFS="$oldifs"
|
||||
fi
|
||||
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
|
||||
compopt -o filenames
|
||||
fi
|
||||
return 0
|
||||
;;
|
||||
--min-size)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
-z)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--invert-filter)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
-v)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--filter)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
-e)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--terminal-width)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
-w)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--output-format)
|
||||
COMPREPLY=($(compgen -W "si b k m g t kb mb gb tb" -- "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
-o)
|
||||
COMPREPLY=($(compgen -W "si b k m g t kb mb gb tb" -- "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--stack-size)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
-S)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--mtime)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
-M)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--atime)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
-A)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--ctime)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
-y)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--files0-from)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--files-from)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--collapse)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--filetime)
|
||||
COMPREPLY=($(compgen -W "a c m" -- "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
-m)
|
||||
COMPREPLY=($(compgen -W "a c m" -- "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
COMPREPLY=()
|
||||
;;
|
||||
esac
|
||||
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
|
||||
return 0
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
if [[ "${BASH_VERSINFO[0]}" -eq 4 && "${BASH_VERSINFO[1]}" -ge 4 || "${BASH_VERSINFO[0]}" -gt 4 ]]; then
|
||||
complete -F _dust -o nosort -o bashdefault -o default dust
|
||||
else
|
||||
complete -F _dust -o bashdefault -o default dust
|
||||
fi
|
||||
98
completions/dust.elv
Normal file
98
completions/dust.elv
Normal file
@@ -0,0 +1,98 @@
|
||||
|
||||
use builtin;
|
||||
use str;
|
||||
|
||||
set edit:completion:arg-completer[dust] = {|@words|
|
||||
fn spaces {|n|
|
||||
builtin:repeat $n ' ' | str:join ''
|
||||
}
|
||||
fn cand {|text desc|
|
||||
edit:complex-candidate $text &display=$text' '(spaces (- 14 (wcswidth $text)))$desc
|
||||
}
|
||||
var command = 'dust'
|
||||
for word $words[1..-1] {
|
||||
if (str:has-prefix $word '-') {
|
||||
break
|
||||
}
|
||||
set command = $command';'$word
|
||||
}
|
||||
var completions = [
|
||||
&'dust'= {
|
||||
cand -d 'Depth to show'
|
||||
cand --depth 'Depth to show'
|
||||
cand -T 'Number of threads to use'
|
||||
cand --threads 'Number of threads to use'
|
||||
cand --config 'Specify a config file to use'
|
||||
cand -n 'Number of lines of output to show. (Default is terminal_height - 10)'
|
||||
cand --number-of-lines 'Number of lines of output to show. (Default is terminal_height - 10)'
|
||||
cand -X 'Exclude any file or directory with this path'
|
||||
cand --ignore-directory 'Exclude any file or directory with this path'
|
||||
cand -I 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter'
|
||||
cand --ignore-all-in-file 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter'
|
||||
cand -z 'Minimum size file to include in output'
|
||||
cand --min-size 'Minimum size file to include in output'
|
||||
cand -v 'Exclude filepaths matching this regex. To ignore png files type: -v "\.png$"'
|
||||
cand --invert-filter 'Exclude filepaths matching this regex. To ignore png files type: -v "\.png$"'
|
||||
cand -e 'Only include filepaths matching this regex. For png files type: -e "\.png$"'
|
||||
cand --filter 'Only include filepaths matching this regex. For png files type: -e "\.png$"'
|
||||
cand -w 'Specify width of output overriding the auto detection of terminal width'
|
||||
cand --terminal-width 'Specify width of output overriding the auto detection of terminal width'
|
||||
cand -o 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size'
|
||||
cand --output-format 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size'
|
||||
cand -S 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)'
|
||||
cand --stack-size 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)'
|
||||
cand -M '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr−(n+1)), n => [curr−(n+1), curr−n), and -n => (𝑐𝑢𝑟𝑟−𝑛, +∞)'
|
||||
cand --mtime '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr−(n+1)), n => [curr−(n+1), curr−n), and -n => (𝑐𝑢𝑟𝑟−𝑛, +∞)'
|
||||
cand -A 'just like -mtime, but based on file access time'
|
||||
cand --atime 'just like -mtime, but based on file access time'
|
||||
cand -y 'just like -mtime, but based on file change time'
|
||||
cand --ctime 'just like -mtime, but based on file change time'
|
||||
cand --files0-from 'Read NUL-terminated paths from FILE (use `-` for stdin)'
|
||||
cand --files-from 'Read newline-terminated paths from FILE (use `-` for stdin)'
|
||||
cand --collapse 'Keep these directories collapsed'
|
||||
cand -m 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time'
|
||||
cand --filetime 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time'
|
||||
cand -p 'Subdirectories will not have their path shortened'
|
||||
cand --full-paths 'Subdirectories will not have their path shortened'
|
||||
cand -L 'dereference sym links - Treat sym links as directories and go into them'
|
||||
cand --dereference-links 'dereference sym links - Treat sym links as directories and go into them'
|
||||
cand -x 'Only count the files and directories on the same filesystem as the supplied directory'
|
||||
cand --limit-filesystem 'Only count the files and directories on the same filesystem as the supplied directory'
|
||||
cand -s 'Use file length instead of blocks'
|
||||
cand --apparent-size 'Use file length instead of blocks'
|
||||
cand -r 'Print tree upside down (biggest highest)'
|
||||
cand --reverse 'Print tree upside down (biggest highest)'
|
||||
cand -c 'No colors will be printed (Useful for commands like: watch)'
|
||||
cand --no-colors 'No colors will be printed (Useful for commands like: watch)'
|
||||
cand -C 'Force colors print'
|
||||
cand --force-colors 'Force colors print'
|
||||
cand -b 'No percent bars or percentages will be displayed'
|
||||
cand --no-percent-bars 'No percent bars or percentages will be displayed'
|
||||
cand -B 'percent bars moved to right side of screen'
|
||||
cand --bars-on-right 'percent bars moved to right side of screen'
|
||||
cand -R 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)'
|
||||
cand --screen-reader 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)'
|
||||
cand --skip-total 'No total row will be displayed'
|
||||
cand -f 'Directory ''size'' is number of child files instead of disk size'
|
||||
cand --filecount 'Directory ''size'' is number of child files instead of disk size'
|
||||
cand -i 'Do not display hidden files'
|
||||
cand --ignore-hidden 'Do not display hidden files'
|
||||
cand -t 'show only these file types'
|
||||
cand --file-types 'show only these file types'
|
||||
cand -P 'Disable the progress indication'
|
||||
cand --no-progress 'Disable the progress indication'
|
||||
cand --print-errors 'Print path with errors'
|
||||
cand -D 'Only directories will be displayed'
|
||||
cand --only-dir 'Only directories will be displayed'
|
||||
cand -F 'Only files will be displayed. (Finds your largest files)'
|
||||
cand --only-file 'Only files will be displayed. (Finds your largest files)'
|
||||
cand -j 'Output the directory tree as json to the current directory'
|
||||
cand --output-json 'Output the directory tree as json to the current directory'
|
||||
cand -h 'Print help (see more with ''--help'')'
|
||||
cand --help 'Print help (see more with ''--help'')'
|
||||
cand -V 'Print version'
|
||||
cand --version 'Print version'
|
||||
}
|
||||
]
|
||||
$completions[$command]
|
||||
}
|
||||
51
completions/dust.fish
Normal file
51
completions/dust.fish
Normal file
@@ -0,0 +1,51 @@
|
||||
complete -c dust -s d -l depth -d 'Depth to show' -r
|
||||
complete -c dust -s T -l threads -d 'Number of threads to use' -r
|
||||
complete -c dust -l config -d 'Specify a config file to use' -r -F
|
||||
complete -c dust -s n -l number-of-lines -d 'Number of lines of output to show. (Default is terminal_height - 10)' -r
|
||||
complete -c dust -s X -l ignore-directory -d 'Exclude any file or directory with this path' -r -F
|
||||
complete -c dust -s I -l ignore-all-in-file -d 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter' -r -F
|
||||
complete -c dust -s z -l min-size -d 'Minimum size file to include in output' -r
|
||||
complete -c dust -s v -l invert-filter -d 'Exclude filepaths matching this regex. To ignore png files type: -v "\\.png$"' -r
|
||||
complete -c dust -s e -l filter -d 'Only include filepaths matching this regex. For png files type: -e "\\.png$"' -r
|
||||
complete -c dust -s w -l terminal-width -d 'Specify width of output overriding the auto detection of terminal width' -r
|
||||
complete -c dust -s o -l output-format -d 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size' -r -f -a "si\t'SI prefix (powers of 1000)'
|
||||
b\t'byte (B)'
|
||||
k\t'kibibyte (KiB)'
|
||||
m\t'mebibyte (MiB)'
|
||||
g\t'gibibyte (GiB)'
|
||||
t\t'tebibyte (TiB)'
|
||||
kb\t'kilobyte (kB)'
|
||||
mb\t'megabyte (MB)'
|
||||
gb\t'gigabyte (GB)'
|
||||
tb\t'terabyte (TB)'"
|
||||
complete -c dust -s S -l stack-size -d 'Specify memory to use as stack size - use if you see: \'fatal runtime error: stack overflow\' (default low memory=1048576, high memory=1073741824)' -r
|
||||
complete -c dust -s M -l mtime -d '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr−(n+1)), n => [curr−(n+1), curr−n), and -n => (𝑐𝑢𝑟𝑟−𝑛, +∞)' -r
|
||||
complete -c dust -s A -l atime -d 'just like -mtime, but based on file access time' -r
|
||||
complete -c dust -s y -l ctime -d 'just like -mtime, but based on file change time' -r
|
||||
complete -c dust -l files0-from -d 'Read NUL-terminated paths from FILE (use `-` for stdin)' -r -F
|
||||
complete -c dust -l files-from -d 'Read newline-terminated paths from FILE (use `-` for stdin)' -r -F
|
||||
complete -c dust -l collapse -d 'Keep these directories collapsed' -r -F
|
||||
complete -c dust -s m -l filetime -d 'Directory \'size\' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time' -r -f -a "a\t'last accessed time'
|
||||
c\t'last changed time'
|
||||
m\t'last modified time'"
|
||||
complete -c dust -s p -l full-paths -d 'Subdirectories will not have their path shortened'
|
||||
complete -c dust -s L -l dereference-links -d 'dereference sym links - Treat sym links as directories and go into them'
|
||||
complete -c dust -s x -l limit-filesystem -d 'Only count the files and directories on the same filesystem as the supplied directory'
|
||||
complete -c dust -s s -l apparent-size -d 'Use file length instead of blocks'
|
||||
complete -c dust -s r -l reverse -d 'Print tree upside down (biggest highest)'
|
||||
complete -c dust -s c -l no-colors -d 'No colors will be printed (Useful for commands like: watch)'
|
||||
complete -c dust -s C -l force-colors -d 'Force colors print'
|
||||
complete -c dust -s b -l no-percent-bars -d 'No percent bars or percentages will be displayed'
|
||||
complete -c dust -s B -l bars-on-right -d 'percent bars moved to right side of screen'
|
||||
complete -c dust -s R -l screen-reader -d 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)'
|
||||
complete -c dust -l skip-total -d 'No total row will be displayed'
|
||||
complete -c dust -s f -l filecount -d 'Directory \'size\' is number of child files instead of disk size'
|
||||
complete -c dust -s i -l ignore-hidden -d 'Do not display hidden files'
|
||||
complete -c dust -s t -l file-types -d 'show only these file types'
|
||||
complete -c dust -s P -l no-progress -d 'Disable the progress indication'
|
||||
complete -c dust -l print-errors -d 'Print path with errors'
|
||||
complete -c dust -s D -l only-dir -d 'Only directories will be displayed'
|
||||
complete -c dust -s F -l only-file -d 'Only files will be displayed. (Finds your largest files)'
|
||||
complete -c dust -s j -l output-json -d 'Output the directory tree as json to the current directory'
|
||||
complete -c dust -s h -l help -d 'Print help (see more with \'--help\')'
|
||||
complete -c dust -s V -l version -d 'Print version'
|
||||
30
config/config.toml
Normal file
30
config/config.toml
Normal file
@@ -0,0 +1,30 @@
|
||||
# Sample Config file, works with toml and yaml
|
||||
# Place in either:
|
||||
# ~/.config/dust/config.toml
|
||||
# ~/.dust.toml
|
||||
|
||||
# Print tree upside down (biggest highest)
|
||||
reverse=true
|
||||
|
||||
# Subdirectories will not have their path shortened
|
||||
display-full-paths=true
|
||||
|
||||
# Use file length instead of blocks
|
||||
display-apparent-size=true
|
||||
|
||||
# No colors will be printed
|
||||
no-colors=true
|
||||
|
||||
# No percent bars or percentages will be displayed
|
||||
no-bars=true
|
||||
|
||||
# No total row will be displayed
|
||||
skip-total=true
|
||||
|
||||
# Do not display hidden files
|
||||
ignore-hidden=true
|
||||
|
||||
# print sizes in powers of 1000 (e.g., 1.1G)
|
||||
output-format="si"
|
||||
|
||||
number-of-lines=5
|
||||
173
man-page/dust.1
Normal file
173
man-page/dust.1
Normal file
@@ -0,0 +1,173 @@
|
||||
.ie \n(.g .ds Aq \(aq
|
||||
.el .ds Aq '
|
||||
.TH Dust 1 "Dust 1.2.3"
|
||||
.SH NAME
|
||||
Dust \- Like du but more intuitive
|
||||
.SH SYNOPSIS
|
||||
\fBdust\fR [\fB\-d\fR|\fB\-\-depth\fR] [\fB\-T\fR|\fB\-\-threads\fR] [\fB\-\-config\fR] [\fB\-n\fR|\fB\-\-number\-of\-lines\fR] [\fB\-p\fR|\fB\-\-full\-paths\fR] [\fB\-X\fR|\fB\-\-ignore\-directory\fR] [\fB\-I\fR|\fB\-\-ignore\-all\-in\-file\fR] [\fB\-L\fR|\fB\-\-dereference\-links\fR] [\fB\-x\fR|\fB\-\-limit\-filesystem\fR] [\fB\-s\fR|\fB\-\-apparent\-size\fR] [\fB\-r\fR|\fB\-\-reverse\fR] [\fB\-c\fR|\fB\-\-no\-colors\fR] [\fB\-C\fR|\fB\-\-force\-colors\fR] [\fB\-b\fR|\fB\-\-no\-percent\-bars\fR] [\fB\-B\fR|\fB\-\-bars\-on\-right\fR] [\fB\-z\fR|\fB\-\-min\-size\fR] [\fB\-R\fR|\fB\-\-screen\-reader\fR] [\fB\-\-skip\-total\fR] [\fB\-f\fR|\fB\-\-filecount\fR] [\fB\-i\fR|\fB\-\-ignore\-hidden\fR] [\fB\-v\fR|\fB\-\-invert\-filter\fR] [\fB\-e\fR|\fB\-\-filter\fR] [\fB\-t\fR|\fB\-\-file\-types\fR] [\fB\-w\fR|\fB\-\-terminal\-width\fR] [\fB\-P\fR|\fB\-\-no\-progress\fR] [\fB\-\-print\-errors\fR] [\fB\-D\fR|\fB\-\-only\-dir\fR] [\fB\-F\fR|\fB\-\-only\-file\fR] [\fB\-o\fR|\fB\-\-output\-format\fR] [\fB\-S\fR|\fB\-\-stack\-size\fR] [\fB\-j\fR|\fB\-\-output\-json\fR] [\fB\-M\fR|\fB\-\-mtime\fR] [\fB\-A\fR|\fB\-\-atime\fR] [\fB\-y\fR|\fB\-\-ctime\fR] [\fB\-\-files0\-from\fR] [\fB\-\-files\-from\fR] [\fB\-\-collapse\fR] [\fB\-m\fR|\fB\-\-filetime\fR] [\fB\-h\fR|\fB\-\-help\fR] [\fB\-V\fR|\fB\-\-version\fR] [\fIPATH\fR]
|
||||
.SH DESCRIPTION
|
||||
Like du but more intuitive
|
||||
.SH OPTIONS
|
||||
.TP
|
||||
\fB\-d\fR, \fB\-\-depth\fR \fI<DEPTH>\fR
|
||||
Depth to show
|
||||
.TP
|
||||
\fB\-T\fR, \fB\-\-threads\fR \fI<THREADS>\fR
|
||||
Number of threads to use
|
||||
.TP
|
||||
\fB\-\-config\fR \fI<FILE>\fR
|
||||
Specify a config file to use
|
||||
.TP
|
||||
\fB\-n\fR, \fB\-\-number\-of\-lines\fR \fI<NUMBER>\fR
|
||||
Number of lines of output to show. (Default is terminal_height \- 10)
|
||||
.TP
|
||||
\fB\-p\fR, \fB\-\-full\-paths\fR
|
||||
Subdirectories will not have their path shortened
|
||||
.TP
|
||||
\fB\-X\fR, \fB\-\-ignore\-directory\fR \fI<PATH>\fR
|
||||
Exclude any file or directory with this path
|
||||
.TP
|
||||
\fB\-I\fR, \fB\-\-ignore\-all\-in\-file\fR \fI<FILE>\fR
|
||||
Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by \-\-invert_filter
|
||||
.TP
|
||||
\fB\-L\fR, \fB\-\-dereference\-links\fR
|
||||
dereference sym links \- Treat sym links as directories and go into them
|
||||
.TP
|
||||
\fB\-x\fR, \fB\-\-limit\-filesystem\fR
|
||||
Only count the files and directories on the same filesystem as the supplied directory
|
||||
.TP
|
||||
\fB\-s\fR, \fB\-\-apparent\-size\fR
|
||||
Use file length instead of blocks
|
||||
.TP
|
||||
\fB\-r\fR, \fB\-\-reverse\fR
|
||||
Print tree upside down (biggest highest)
|
||||
.TP
|
||||
\fB\-c\fR, \fB\-\-no\-colors\fR
|
||||
No colors will be printed (Useful for commands like: watch)
|
||||
.TP
|
||||
\fB\-C\fR, \fB\-\-force\-colors\fR
|
||||
Force colors print
|
||||
.TP
|
||||
\fB\-b\fR, \fB\-\-no\-percent\-bars\fR
|
||||
No percent bars or percentages will be displayed
|
||||
.TP
|
||||
\fB\-B\fR, \fB\-\-bars\-on\-right\fR
|
||||
percent bars moved to right side of screen
|
||||
.TP
|
||||
\fB\-z\fR, \fB\-\-min\-size\fR \fI<MIN_SIZE>\fR
|
||||
Minimum size file to include in output
|
||||
.TP
|
||||
\fB\-R\fR, \fB\-\-screen\-reader\fR
|
||||
For screen readers. Removes bars. Adds new column: depth level (May want to use \-p too for full path)
|
||||
.TP
|
||||
\fB\-\-skip\-total\fR
|
||||
No total row will be displayed
|
||||
.TP
|
||||
\fB\-f\fR, \fB\-\-filecount\fR
|
||||
Directory \*(Aqsize\*(Aq is number of child files instead of disk size
|
||||
.TP
|
||||
\fB\-i\fR, \fB\-\-ignore\-hidden\fR
|
||||
Do not display hidden files
|
||||
.TP
|
||||
\fB\-v\fR, \fB\-\-invert\-filter\fR \fI<REGEX>\fR
|
||||
Exclude filepaths matching this regex. To ignore png files type: \-v "\\.png$"
|
||||
.TP
|
||||
\fB\-e\fR, \fB\-\-filter\fR \fI<REGEX>\fR
|
||||
Only include filepaths matching this regex. For png files type: \-e "\\.png$"
|
||||
.TP
|
||||
\fB\-t\fR, \fB\-\-file\-types\fR
|
||||
show only these file types
|
||||
.TP
|
||||
\fB\-w\fR, \fB\-\-terminal\-width\fR \fI<WIDTH>\fR
|
||||
Specify width of output overriding the auto detection of terminal width
|
||||
.TP
|
||||
\fB\-P\fR, \fB\-\-no\-progress\fR
|
||||
Disable the progress indication
|
||||
.TP
|
||||
\fB\-\-print\-errors\fR
|
||||
Print path with errors
|
||||
.TP
|
||||
\fB\-D\fR, \fB\-\-only\-dir\fR
|
||||
Only directories will be displayed
|
||||
.TP
|
||||
\fB\-F\fR, \fB\-\-only\-file\fR
|
||||
Only files will be displayed. (Finds your largest files)
|
||||
.TP
|
||||
\fB\-o\fR, \fB\-\-output\-format\fR \fI<FORMAT>\fR
|
||||
Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size
|
||||
.br
|
||||
|
||||
.br
|
||||
\fIPossible values:\fR
|
||||
.RS 14
|
||||
.IP \(bu 2
|
||||
si: SI prefix (powers of 1000)
|
||||
.IP \(bu 2
|
||||
b: byte (B)
|
||||
.IP \(bu 2
|
||||
k: kibibyte (KiB)
|
||||
.IP \(bu 2
|
||||
m: mebibyte (MiB)
|
||||
.IP \(bu 2
|
||||
g: gibibyte (GiB)
|
||||
.IP \(bu 2
|
||||
t: tebibyte (TiB)
|
||||
.IP \(bu 2
|
||||
kb: kilobyte (kB)
|
||||
.IP \(bu 2
|
||||
mb: megabyte (MB)
|
||||
.IP \(bu 2
|
||||
gb: gigabyte (GB)
|
||||
.IP \(bu 2
|
||||
tb: terabyte (TB)
|
||||
.RE
|
||||
.TP
|
||||
\fB\-S\fR, \fB\-\-stack\-size\fR \fI<STACK_SIZE>\fR
|
||||
Specify memory to use as stack size \- use if you see: \*(Aqfatal runtime error: stack overflow\*(Aq (default low memory=1048576, high memory=1073741824)
|
||||
.TP
|
||||
\fB\-j\fR, \fB\-\-output\-json\fR
|
||||
Output the directory tree as json to the current directory
|
||||
.TP
|
||||
\fB\-M\fR, \fB\-\-mtime\fR \fI<MTIME>\fR
|
||||
+/\-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr−(n+1)), n => [curr−(n+1), curr−n), and \-n => (𝑐𝑢𝑟𝑟−𝑛, +∞)
|
||||
.TP
|
||||
\fB\-A\fR, \fB\-\-atime\fR \fI<ATIME>\fR
|
||||
just like \-mtime, but based on file access time
|
||||
.TP
|
||||
\fB\-y\fR, \fB\-\-ctime\fR \fI<CTIME>\fR
|
||||
just like \-mtime, but based on file change time
|
||||
.TP
|
||||
\fB\-\-files0\-from\fR \fI<FILES0_FROM>\fR
|
||||
Read NUL\-terminated paths from FILE (use `\-` for stdin)
|
||||
.TP
|
||||
\fB\-\-files\-from\fR \fI<FILES_FROM>\fR
|
||||
Read newline\-terminated paths from FILE (use `\-` for stdin)
|
||||
.TP
|
||||
\fB\-\-collapse\fR \fI<COLLAPSE>\fR
|
||||
Keep these directories collapsed
|
||||
.TP
|
||||
\fB\-m\fR, \fB\-\-filetime\fR \fI<FILETIME>\fR
|
||||
Directory \*(Aqsize\*(Aq is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time
|
||||
.br
|
||||
|
||||
.br
|
||||
\fIPossible values:\fR
|
||||
.RS 14
|
||||
.IP \(bu 2
|
||||
a: last accessed time
|
||||
.IP \(bu 2
|
||||
c: last changed time
|
||||
.IP \(bu 2
|
||||
m: last modified time
|
||||
.RE
|
||||
.TP
|
||||
\fB\-h\fR, \fB\-\-help\fR
|
||||
Print help (see a summary with \*(Aq\-h\*(Aq)
|
||||
.TP
|
||||
\fB\-V\fR, \fB\-\-version\fR
|
||||
Print version
|
||||
.TP
|
||||
[\fIPATH\fR]
|
||||
Input files or directories
|
||||
.SH VERSION
|
||||
v1.2.3
|
||||
BIN
media/snap.png
Normal file
BIN
media/snap.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 61 KiB |
261
src/cli.rs
Normal file
261
src/cli.rs
Normal file
@@ -0,0 +1,261 @@
|
||||
use std::fmt;
|
||||
|
||||
use clap::{Parser, ValueEnum, ValueHint};
|
||||
|
||||
// For single thread mode set this variable on your command line:
|
||||
// export RAYON_NUM_THREADS=1
|
||||
|
||||
/// Like du but more intuitive
|
||||
#[derive(Debug, Parser)]
|
||||
#[command(name("Dust"), version)]
|
||||
pub struct Cli {
|
||||
/// Depth to show
|
||||
#[arg(short, long)]
|
||||
pub depth: Option<usize>,
|
||||
|
||||
/// Number of threads to use
|
||||
#[arg(short('T'), long)]
|
||||
pub threads: Option<usize>,
|
||||
|
||||
/// Specify a config file to use
|
||||
#[arg(long, value_name("FILE"), value_hint(ValueHint::FilePath))]
|
||||
pub config: Option<String>,
|
||||
|
||||
/// Number of lines of output to show. (Default is terminal_height - 10)
|
||||
#[arg(short, long, value_name("NUMBER"))]
|
||||
pub number_of_lines: Option<usize>,
|
||||
|
||||
/// Subdirectories will not have their path shortened
|
||||
#[arg(short('p'), long)]
|
||||
pub full_paths: bool,
|
||||
|
||||
/// Exclude any file or directory with this path
|
||||
#[arg(short('X'), long, value_name("PATH"), value_hint(ValueHint::AnyPath))]
|
||||
pub ignore_directory: Option<Vec<String>>,
|
||||
|
||||
/// Exclude any file or directory with a regex matching that listed in this
|
||||
/// file, the file entries will be added to the ignore regexs provided by
|
||||
/// --invert_filter
|
||||
#[arg(short('I'), long, value_name("FILE"), value_hint(ValueHint::FilePath))]
|
||||
pub ignore_all_in_file: Option<String>,
|
||||
|
||||
/// dereference sym links - Treat sym links as directories and go into them
|
||||
#[arg(short('L'), long)]
|
||||
pub dereference_links: bool,
|
||||
|
||||
/// Only count the files and directories on the same filesystem as the
|
||||
/// supplied directory
|
||||
#[arg(short('x'), long)]
|
||||
pub limit_filesystem: bool,
|
||||
|
||||
/// Use file length instead of blocks
|
||||
#[arg(short('s'), long)]
|
||||
pub apparent_size: bool,
|
||||
|
||||
/// Print tree upside down (biggest highest)
|
||||
#[arg(short, long)]
|
||||
pub reverse: bool,
|
||||
|
||||
/// No colors will be printed (Useful for commands like: watch)
|
||||
#[arg(short('c'), long)]
|
||||
pub no_colors: bool,
|
||||
|
||||
/// Force colors print
|
||||
#[arg(short('C'), long)]
|
||||
pub force_colors: bool,
|
||||
|
||||
/// No percent bars or percentages will be displayed
|
||||
#[arg(short('b'), long)]
|
||||
pub no_percent_bars: bool,
|
||||
|
||||
/// percent bars moved to right side of screen
|
||||
#[arg(short('B'), long)]
|
||||
pub bars_on_right: bool,
|
||||
|
||||
/// Minimum size file to include in output
|
||||
#[arg(short('z'), long)]
|
||||
pub min_size: Option<String>,
|
||||
|
||||
/// For screen readers. Removes bars. Adds new column: depth level (May want
|
||||
/// to use -p too for full path)
|
||||
#[arg(short('R'), long)]
|
||||
pub screen_reader: bool,
|
||||
|
||||
/// No total row will be displayed
|
||||
#[arg(long)]
|
||||
pub skip_total: bool,
|
||||
|
||||
/// Directory 'size' is number of child files instead of disk size
|
||||
#[arg(short, long)]
|
||||
pub filecount: bool,
|
||||
|
||||
/// Do not display hidden files
|
||||
// Do not use 'h' this is used by 'help'
|
||||
#[arg(short, long)]
|
||||
pub ignore_hidden: bool,
|
||||
|
||||
/// Exclude filepaths matching this regex. To ignore png files type: -v
|
||||
/// "\.png$"
|
||||
#[arg(
|
||||
short('v'),
|
||||
long,
|
||||
value_name("REGEX"),
|
||||
conflicts_with("filter"),
|
||||
conflicts_with("file_types")
|
||||
)]
|
||||
pub invert_filter: Option<Vec<String>>,
|
||||
|
||||
/// Only include filepaths matching this regex. For png files type: -e
|
||||
/// "\.png$"
|
||||
#[arg(short('e'), long, value_name("REGEX"), conflicts_with("file_types"))]
|
||||
pub filter: Option<Vec<String>>,
|
||||
|
||||
/// show only these file types
|
||||
#[arg(short('t'), long, conflicts_with("depth"), conflicts_with("only_dir"))]
|
||||
pub file_types: bool,
|
||||
|
||||
/// Specify width of output overriding the auto detection of terminal width
|
||||
#[arg(short('w'), long, value_name("WIDTH"))]
|
||||
pub terminal_width: Option<usize>,
|
||||
|
||||
/// Disable the progress indication.
|
||||
#[arg(short('P'), long)]
|
||||
pub no_progress: bool,
|
||||
|
||||
/// Print path with errors.
|
||||
#[arg(long)]
|
||||
pub print_errors: bool,
|
||||
|
||||
/// Only directories will be displayed.
|
||||
#[arg(
|
||||
short('D'),
|
||||
long,
|
||||
conflicts_with("only_file"),
|
||||
conflicts_with("file_types")
|
||||
)]
|
||||
pub only_dir: bool,
|
||||
|
||||
/// Only files will be displayed. (Finds your largest files)
|
||||
#[arg(short('F'), long, conflicts_with("only_dir"))]
|
||||
pub only_file: bool,
|
||||
|
||||
/// Changes output display size. si will print sizes in powers of 1000. b k
|
||||
/// m g t kb mb gb tb will print the whole tree in that size.
|
||||
#[arg(short, long, value_enum, value_name("FORMAT"), ignore_case(true))]
|
||||
pub output_format: Option<OutputFormat>,
|
||||
|
||||
/// Specify memory to use as stack size - use if you see: 'fatal runtime
|
||||
/// error: stack overflow' (default low memory=1048576, high
|
||||
/// memory=1073741824)
|
||||
#[arg(short('S'), long)]
|
||||
pub stack_size: Option<usize>,
|
||||
|
||||
/// Input files or directories.
|
||||
#[arg(value_name("PATH"), value_hint(ValueHint::AnyPath))]
|
||||
pub params: Option<Vec<String>>,
|
||||
|
||||
/// Output the directory tree as json to the current directory
|
||||
#[arg(short('j'), long)]
|
||||
pub output_json: bool,
|
||||
|
||||
/// +/-n matches files modified more/less than n days ago , and n matches
|
||||
/// files modified exactly n days ago, days are rounded down.That is +n =>
|
||||
/// (−∞, curr−(n+1)), n => [curr−(n+1), curr−n), and -n => (𝑐𝑢𝑟𝑟−𝑛, +∞)
|
||||
#[arg(short('M'), long, allow_hyphen_values(true))]
|
||||
pub mtime: Option<String>,
|
||||
|
||||
/// just like -mtime, but based on file access time
|
||||
#[arg(short('A'), long, allow_hyphen_values(true))]
|
||||
pub atime: Option<String>,
|
||||
|
||||
/// just like -mtime, but based on file change time
|
||||
#[arg(short('y'), long, allow_hyphen_values(true))]
|
||||
pub ctime: Option<String>,
|
||||
|
||||
/// Read NUL-terminated paths from FILE (use `-` for stdin).
|
||||
#[arg(long, value_hint(ValueHint::AnyPath), conflicts_with("files_from"))]
|
||||
pub files0_from: Option<String>,
|
||||
|
||||
/// Read newline-terminated paths from FILE (use `-` for stdin).
|
||||
#[arg(long, value_hint(ValueHint::AnyPath), conflicts_with("files0_from"))]
|
||||
pub files_from: Option<String>,
|
||||
|
||||
/// Keep these directories collapsed
|
||||
#[arg(long, value_hint(ValueHint::AnyPath))]
|
||||
pub collapse: Option<Vec<String>>,
|
||||
|
||||
/// Directory 'size' is max filetime of child files instead of disk size.
|
||||
/// while a/c/m for last accessed/changed/modified time
|
||||
#[arg(short('m'), long, value_enum)]
|
||||
pub filetime: Option<FileTime>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, ValueEnum)]
|
||||
#[value(rename_all = "lower")]
|
||||
pub enum OutputFormat {
|
||||
/// SI prefix (powers of 1000)
|
||||
SI,
|
||||
|
||||
/// byte (B)
|
||||
B,
|
||||
|
||||
/// kibibyte (KiB)
|
||||
#[value(name = "k", alias("kib"))]
|
||||
KiB,
|
||||
|
||||
/// mebibyte (MiB)
|
||||
#[value(name = "m", alias("mib"))]
|
||||
MiB,
|
||||
|
||||
/// gibibyte (GiB)
|
||||
#[value(name = "g", alias("gib"))]
|
||||
GiB,
|
||||
|
||||
/// tebibyte (TiB)
|
||||
#[value(name = "t", alias("tib"))]
|
||||
TiB,
|
||||
|
||||
/// kilobyte (kB)
|
||||
KB,
|
||||
|
||||
/// megabyte (MB)
|
||||
MB,
|
||||
|
||||
/// gigabyte (GB)
|
||||
GB,
|
||||
|
||||
/// terabyte (TB)
|
||||
TB,
|
||||
}
|
||||
|
||||
impl fmt::Display for OutputFormat {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::SI => write!(f, "si"),
|
||||
Self::B => write!(f, "b"),
|
||||
Self::KiB => write!(f, "k"),
|
||||
Self::MiB => write!(f, "m"),
|
||||
Self::GiB => write!(f, "g"),
|
||||
Self::TiB => write!(f, "t"),
|
||||
Self::KB => write!(f, "kb"),
|
||||
Self::MB => write!(f, "mb"),
|
||||
Self::GB => write!(f, "gb"),
|
||||
Self::TB => write!(f, "tb"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, ValueEnum)]
|
||||
pub enum FileTime {
|
||||
/// last accessed time
|
||||
#[value(name = "a", alias("accessed"))]
|
||||
Accessed,
|
||||
|
||||
/// last changed time
|
||||
#[value(name = "c", alias("changed"))]
|
||||
Changed,
|
||||
|
||||
/// last modified time
|
||||
#[value(name = "m", alias("modified"))]
|
||||
Modified,
|
||||
}
|
||||
431
src/config.rs
Normal file
431
src/config.rs
Normal file
@@ -0,0 +1,431 @@
|
||||
use crate::node::FileTime;
|
||||
use chrono::{Local, TimeZone};
|
||||
use config_file::FromConfigFile;
|
||||
use regex::Regex;
|
||||
use serde::Deserialize;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::cli::Cli;
|
||||
use crate::dir_walker::Operator;
|
||||
use crate::display::get_number_format;
|
||||
|
||||
pub static DAY_SECONDS: i64 = 24 * 60 * 60;
|
||||
|
||||
#[derive(Deserialize, Default)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct Config {
|
||||
pub display_full_paths: Option<bool>,
|
||||
pub display_apparent_size: Option<bool>,
|
||||
pub reverse: Option<bool>,
|
||||
pub no_colors: Option<bool>,
|
||||
pub force_colors: Option<bool>,
|
||||
pub no_bars: Option<bool>,
|
||||
pub skip_total: Option<bool>,
|
||||
pub screen_reader: Option<bool>,
|
||||
pub ignore_hidden: Option<bool>,
|
||||
pub output_format: Option<String>,
|
||||
pub min_size: Option<String>,
|
||||
pub only_dir: Option<bool>,
|
||||
pub only_file: Option<bool>,
|
||||
pub disable_progress: Option<bool>,
|
||||
pub depth: Option<usize>,
|
||||
pub bars_on_right: Option<bool>,
|
||||
pub stack_size: Option<usize>,
|
||||
pub threads: Option<usize>,
|
||||
pub output_json: Option<bool>,
|
||||
pub print_errors: Option<bool>,
|
||||
pub files0_from: Option<String>,
|
||||
pub number_of_lines: Option<usize>,
|
||||
pub files_from: Option<String>,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub fn get_files0_from(&self, options: &Cli) -> Option<String> {
|
||||
let from_file = &options.files0_from;
|
||||
match from_file {
|
||||
None => self.files0_from.as_ref().map(|x| x.to_string()),
|
||||
Some(x) => Some(x.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_files_from(&self, options: &Cli) -> Option<String> {
|
||||
let from_file = &options.files_from;
|
||||
match from_file {
|
||||
None => self.files_from.as_ref().map(|x| x.to_string()),
|
||||
Some(x) => Some(x.to_string()),
|
||||
}
|
||||
}
|
||||
pub fn get_no_colors(&self, options: &Cli) -> bool {
|
||||
Some(true) == self.no_colors || options.no_colors
|
||||
}
|
||||
pub fn get_force_colors(&self, options: &Cli) -> bool {
|
||||
Some(true) == self.force_colors || options.force_colors
|
||||
}
|
||||
pub fn get_disable_progress(&self, options: &Cli) -> bool {
|
||||
Some(true) == self.disable_progress || options.no_progress
|
||||
}
|
||||
pub fn get_apparent_size(&self, options: &Cli) -> bool {
|
||||
Some(true) == self.display_apparent_size || options.apparent_size
|
||||
}
|
||||
pub fn get_ignore_hidden(&self, options: &Cli) -> bool {
|
||||
Some(true) == self.ignore_hidden || options.ignore_hidden
|
||||
}
|
||||
pub fn get_full_paths(&self, options: &Cli) -> bool {
|
||||
Some(true) == self.display_full_paths || options.full_paths
|
||||
}
|
||||
pub fn get_reverse(&self, options: &Cli) -> bool {
|
||||
Some(true) == self.reverse || options.reverse
|
||||
}
|
||||
pub fn get_no_bars(&self, options: &Cli) -> bool {
|
||||
Some(true) == self.no_bars || options.no_percent_bars
|
||||
}
|
||||
pub fn get_output_format(&self, options: &Cli) -> String {
|
||||
let out_fmt = options.output_format;
|
||||
(match out_fmt {
|
||||
None => match &self.output_format {
|
||||
None => "".to_string(),
|
||||
Some(x) => x.to_string(),
|
||||
},
|
||||
Some(x) => x.to_string(),
|
||||
})
|
||||
.to_lowercase()
|
||||
}
|
||||
|
||||
pub fn get_filetime(&self, options: &Cli) -> Option<FileTime> {
|
||||
options.filetime.map(FileTime::from)
|
||||
}
|
||||
|
||||
pub fn get_skip_total(&self, options: &Cli) -> bool {
|
||||
Some(true) == self.skip_total || options.skip_total
|
||||
}
|
||||
pub fn get_screen_reader(&self, options: &Cli) -> bool {
|
||||
Some(true) == self.screen_reader || options.screen_reader
|
||||
}
|
||||
pub fn get_depth(&self, options: &Cli) -> usize {
|
||||
if let Some(v) = options.depth {
|
||||
return v;
|
||||
}
|
||||
|
||||
self.depth.unwrap_or(usize::MAX)
|
||||
}
|
||||
pub fn get_min_size(&self, options: &Cli) -> Option<usize> {
|
||||
let size_from_param = options.min_size.as_ref();
|
||||
self._get_min_size(size_from_param)
|
||||
}
|
||||
fn _get_min_size(&self, min_size: Option<&String>) -> Option<usize> {
|
||||
let size_from_param = min_size.and_then(|a| convert_min_size(a));
|
||||
|
||||
if size_from_param.is_none() {
|
||||
self.min_size
|
||||
.as_ref()
|
||||
.and_then(|a| convert_min_size(a.as_ref()))
|
||||
} else {
|
||||
size_from_param
|
||||
}
|
||||
}
|
||||
pub fn get_only_dir(&self, options: &Cli) -> bool {
|
||||
Some(true) == self.only_dir || options.only_dir
|
||||
}
|
||||
|
||||
pub fn get_print_errors(&self, options: &Cli) -> bool {
|
||||
Some(true) == self.print_errors || options.print_errors
|
||||
}
|
||||
pub fn get_only_file(&self, options: &Cli) -> bool {
|
||||
Some(true) == self.only_file || options.only_file
|
||||
}
|
||||
pub fn get_bars_on_right(&self, options: &Cli) -> bool {
|
||||
Some(true) == self.bars_on_right || options.bars_on_right
|
||||
}
|
||||
pub fn get_custom_stack_size(&self, options: &Cli) -> Option<usize> {
|
||||
let from_cmd_line = options.stack_size;
|
||||
if from_cmd_line.is_none() {
|
||||
self.stack_size
|
||||
} else {
|
||||
from_cmd_line
|
||||
}
|
||||
}
|
||||
pub fn get_threads(&self, options: &Cli) -> Option<usize> {
|
||||
let from_cmd_line = options.threads;
|
||||
if from_cmd_line.is_none() {
|
||||
self.threads
|
||||
} else {
|
||||
from_cmd_line
|
||||
}
|
||||
}
|
||||
pub fn get_output_json(&self, options: &Cli) -> bool {
|
||||
Some(true) == self.output_json || options.output_json
|
||||
}
|
||||
|
||||
pub fn get_number_of_lines(&self, options: &Cli) -> Option<usize> {
|
||||
let from_cmd_line = options.number_of_lines;
|
||||
if from_cmd_line.is_none() {
|
||||
self.number_of_lines
|
||||
} else {
|
||||
from_cmd_line
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_modified_time_operator(&self, options: &Cli) -> Option<(Operator, i64)> {
|
||||
get_filter_time_operator(options.mtime.as_ref(), get_current_date_epoch_seconds())
|
||||
}
|
||||
|
||||
pub fn get_accessed_time_operator(&self, options: &Cli) -> Option<(Operator, i64)> {
|
||||
get_filter_time_operator(options.atime.as_ref(), get_current_date_epoch_seconds())
|
||||
}
|
||||
|
||||
pub fn get_changed_time_operator(&self, options: &Cli) -> Option<(Operator, i64)> {
|
||||
get_filter_time_operator(options.ctime.as_ref(), get_current_date_epoch_seconds())
|
||||
}
|
||||
}
|
||||
|
||||
fn get_current_date_epoch_seconds() -> i64 {
|
||||
// calculate current date epoch seconds
|
||||
let now = Local::now();
|
||||
let current_date = now.date_naive();
|
||||
|
||||
let current_date_time = current_date.and_hms_opt(0, 0, 0).unwrap();
|
||||
Local
|
||||
.from_local_datetime(¤t_date_time)
|
||||
.unwrap()
|
||||
.timestamp()
|
||||
}
|
||||
|
||||
fn get_filter_time_operator(
|
||||
option_value: Option<&String>,
|
||||
current_date_epoch_seconds: i64,
|
||||
) -> Option<(Operator, i64)> {
|
||||
match option_value {
|
||||
Some(val) => {
|
||||
let time = current_date_epoch_seconds
|
||||
- val
|
||||
.parse::<i64>()
|
||||
.unwrap_or_else(|_| panic!("invalid data format"))
|
||||
.abs()
|
||||
* DAY_SECONDS;
|
||||
match val.chars().next().expect("Value should not be empty") {
|
||||
'+' => Some((Operator::LessThan, time - DAY_SECONDS)),
|
||||
'-' => Some((Operator::GreaterThan, time)),
|
||||
_ => Some((Operator::Equal, time - DAY_SECONDS)),
|
||||
}
|
||||
}
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn convert_min_size(input: &str) -> Option<usize> {
|
||||
let re = Regex::new(r"([0-9]+)(\w*)").unwrap();
|
||||
|
||||
if let Some(cap) = re.captures(input) {
|
||||
let (_, [digits, letters]) = cap.extract();
|
||||
|
||||
// Failure to parse should be impossible due to regex match
|
||||
let digits_as_usize: Option<usize> = digits.parse().ok();
|
||||
|
||||
match digits_as_usize {
|
||||
Some(parsed_digits) => {
|
||||
let number_format = get_number_format(&letters.to_lowercase());
|
||||
match number_format {
|
||||
Some((multiple, _)) => Some(parsed_digits * (multiple as usize)),
|
||||
None => {
|
||||
if letters.is_empty() {
|
||||
Some(parsed_digits)
|
||||
} else {
|
||||
eprintln!("Ignoring invalid min-size: {input}");
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None => None,
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn get_config_locations(base: PathBuf) -> Vec<PathBuf> {
|
||||
vec![
|
||||
base.join(".dust.toml"),
|
||||
base.join(".config").join("dust").join("config.toml"),
|
||||
]
|
||||
}
|
||||
|
||||
pub fn get_config(conf_path: Option<&String>) -> Config {
|
||||
match conf_path {
|
||||
Some(path_str) => {
|
||||
let path = Path::new(path_str);
|
||||
if path.exists() {
|
||||
match Config::from_config_file(path) {
|
||||
Ok(config) => return config,
|
||||
Err(e) => {
|
||||
eprintln!("Ignoring invalid config file '{}': {}", &path.display(), e)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
eprintln!("Config file {:?} doesn't exists", &path.display());
|
||||
}
|
||||
}
|
||||
None => {
|
||||
if let Some(home) = std::env::home_dir() {
|
||||
for path in get_config_locations(home) {
|
||||
if path.exists()
|
||||
&& let Ok(config) = Config::from_config_file(&path)
|
||||
{
|
||||
return config;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Config {
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#[allow(unused_imports)]
|
||||
use super::*;
|
||||
use chrono::{Datelike, Timelike};
|
||||
use clap::Parser;
|
||||
|
||||
#[test]
|
||||
fn test_get_current_date_epoch_seconds() {
|
||||
let epoch_seconds = get_current_date_epoch_seconds();
|
||||
let dt = Local.timestamp_opt(epoch_seconds, 0).unwrap();
|
||||
|
||||
assert_eq!(dt.hour(), 0);
|
||||
assert_eq!(dt.minute(), 0);
|
||||
assert_eq!(dt.second(), 0);
|
||||
assert_eq!(dt.date_naive().day(), Local::now().date_naive().day());
|
||||
assert_eq!(dt.date_naive().month(), Local::now().date_naive().month());
|
||||
assert_eq!(dt.date_naive().year(), Local::now().date_naive().year());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_conversion() {
|
||||
assert_eq!(convert_min_size("55"), Some(55));
|
||||
assert_eq!(convert_min_size("12344321"), Some(12344321));
|
||||
assert_eq!(convert_min_size("95RUBBISH"), None);
|
||||
assert_eq!(convert_min_size("10Ki"), Some(10 * 1024));
|
||||
assert_eq!(convert_min_size("10MiB"), Some(10 * 1024usize.pow(2)));
|
||||
assert_eq!(convert_min_size("10M"), Some(10 * 1024usize.pow(2)));
|
||||
assert_eq!(convert_min_size("10Mb"), Some(10 * 1000usize.pow(2)));
|
||||
assert_eq!(convert_min_size("2Gi"), Some(2 * 1024usize.pow(3)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_min_size_from_config_applied_or_overridden() {
|
||||
let c = Config {
|
||||
min_size: Some("1KiB".to_owned()),
|
||||
..Default::default()
|
||||
};
|
||||
assert_eq!(c._get_min_size(None), Some(1024));
|
||||
assert_eq!(c._get_min_size(Some(&"2KiB".into())), Some(2048));
|
||||
|
||||
assert_eq!(c._get_min_size(Some(&"1kb".into())), Some(1000));
|
||||
assert_eq!(c._get_min_size(Some(&"2KB".into())), Some(2000));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_depth() {
|
||||
// No config and no flag.
|
||||
let c = Config::default();
|
||||
let args = get_args(vec![]);
|
||||
assert_eq!(c.get_depth(&args), usize::MAX);
|
||||
|
||||
// Config is not defined and flag is defined.
|
||||
let c = Config::default();
|
||||
let args = get_args(vec!["dust", "--depth", "5"]);
|
||||
assert_eq!(c.get_depth(&args), 5);
|
||||
|
||||
// Config is defined and flag is not defined.
|
||||
let c = Config {
|
||||
depth: Some(3),
|
||||
..Default::default()
|
||||
};
|
||||
let args = get_args(vec![]);
|
||||
assert_eq!(c.get_depth(&args), 3);
|
||||
|
||||
// Both config and flag are defined.
|
||||
let c = Config {
|
||||
depth: Some(3),
|
||||
..Default::default()
|
||||
};
|
||||
let args = get_args(vec!["dust", "--depth", "5"]);
|
||||
assert_eq!(c.get_depth(&args), 5);
|
||||
}
|
||||
|
||||
fn get_args(args: Vec<&str>) -> Cli {
|
||||
Cli::parse_from(args)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_filetime() {
|
||||
// No config and no flag.
|
||||
let c = Config::default();
|
||||
let args = get_filetime_args(vec!["dust"]);
|
||||
assert_eq!(c.get_filetime(&args), None);
|
||||
|
||||
// Config is not defined and flag is defined as access time
|
||||
let c = Config::default();
|
||||
let args = get_filetime_args(vec!["dust", "--filetime", "a"]);
|
||||
assert_eq!(c.get_filetime(&args), Some(FileTime::Accessed));
|
||||
|
||||
let c = Config::default();
|
||||
let args = get_filetime_args(vec!["dust", "--filetime", "accessed"]);
|
||||
assert_eq!(c.get_filetime(&args), Some(FileTime::Accessed));
|
||||
|
||||
// Config is not defined and flag is defined as modified time
|
||||
let c = Config::default();
|
||||
let args = get_filetime_args(vec!["dust", "--filetime", "m"]);
|
||||
assert_eq!(c.get_filetime(&args), Some(FileTime::Modified));
|
||||
|
||||
let c = Config::default();
|
||||
let args = get_filetime_args(vec!["dust", "--filetime", "modified"]);
|
||||
assert_eq!(c.get_filetime(&args), Some(FileTime::Modified));
|
||||
|
||||
// Config is not defined and flag is defined as changed time
|
||||
let c = Config::default();
|
||||
let args = get_filetime_args(vec!["dust", "--filetime", "c"]);
|
||||
assert_eq!(c.get_filetime(&args), Some(FileTime::Changed));
|
||||
|
||||
let c = Config::default();
|
||||
let args = get_filetime_args(vec!["dust", "--filetime", "changed"]);
|
||||
assert_eq!(c.get_filetime(&args), Some(FileTime::Changed));
|
||||
}
|
||||
|
||||
fn get_filetime_args(args: Vec<&str>) -> Cli {
|
||||
Cli::parse_from(args)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_number_of_lines() {
|
||||
// No config and no flag.
|
||||
let c = Config::default();
|
||||
let args = get_args(vec![]);
|
||||
assert_eq!(c.get_number_of_lines(&args), None);
|
||||
|
||||
// Config is not defined and flag is defined.
|
||||
let c = Config::default();
|
||||
let args = get_args(vec!["dust", "--number-of-lines", "5"]);
|
||||
assert_eq!(c.get_number_of_lines(&args), Some(5));
|
||||
|
||||
// Config is defined and flag is not defined.
|
||||
let c = Config {
|
||||
number_of_lines: Some(3),
|
||||
..Default::default()
|
||||
};
|
||||
let args = get_args(vec![]);
|
||||
assert_eq!(c.get_number_of_lines(&args), Some(3));
|
||||
|
||||
// Both config and flag are defined.
|
||||
let c = Config {
|
||||
number_of_lines: Some(3),
|
||||
..Default::default()
|
||||
};
|
||||
let args = get_args(vec!["dust", "--number-of-lines", "5"]);
|
||||
assert_eq!(c.get_number_of_lines(&args), Some(5));
|
||||
}
|
||||
}
|
||||
428
src/dir_walker.rs
Normal file
428
src/dir_walker.rs
Normal file
@@ -0,0 +1,428 @@
|
||||
use std::cmp::Ordering;
|
||||
use std::fs;
|
||||
use std::io::Error;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
|
||||
use crate::node::Node;
|
||||
use crate::progress::ORDERING;
|
||||
use crate::progress::Operation;
|
||||
use crate::progress::PAtomicInfo;
|
||||
use crate::progress::RuntimeErrors;
|
||||
use crate::utils::is_filtered_out_due_to_file_time;
|
||||
use crate::utils::is_filtered_out_due_to_invert_regex;
|
||||
use crate::utils::is_filtered_out_due_to_regex;
|
||||
use rayon::iter::ParallelBridge;
|
||||
use rayon::prelude::ParallelIterator;
|
||||
use regex::Regex;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use std::collections::HashSet;
|
||||
|
||||
use crate::node::build_node;
|
||||
use std::fs::DirEntry;
|
||||
|
||||
use crate::node::FileTime;
|
||||
use crate::platform::get_metadata;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Operator {
|
||||
Equal = 0,
|
||||
LessThan = 1,
|
||||
GreaterThan = 2,
|
||||
}
|
||||
|
||||
pub struct WalkData<'a> {
|
||||
pub ignore_directories: HashSet<PathBuf>,
|
||||
pub filter_regex: &'a [Regex],
|
||||
pub invert_filter_regex: &'a [Regex],
|
||||
pub allowed_filesystems: HashSet<u64>,
|
||||
pub filter_modified_time: Option<(Operator, i64)>,
|
||||
pub filter_accessed_time: Option<(Operator, i64)>,
|
||||
pub filter_changed_time: Option<(Operator, i64)>,
|
||||
pub use_apparent_size: bool,
|
||||
pub by_filecount: bool,
|
||||
pub by_filetime: &'a Option<FileTime>,
|
||||
pub ignore_hidden: bool,
|
||||
pub follow_links: bool,
|
||||
pub progress_data: Arc<PAtomicInfo>,
|
||||
pub errors: Arc<Mutex<RuntimeErrors>>,
|
||||
}
|
||||
|
||||
pub fn walk_it(dirs: HashSet<PathBuf>, walk_data: &WalkData) -> Vec<Node> {
|
||||
let mut inodes = HashSet::new();
|
||||
let top_level_nodes: Vec<_> = dirs
|
||||
.into_iter()
|
||||
.filter_map(|d| {
|
||||
let prog_data = &walk_data.progress_data;
|
||||
prog_data.clear_state(&d);
|
||||
let node = walk(d, walk_data, 0)?;
|
||||
|
||||
prog_data.state.store(Operation::PREPARING, ORDERING);
|
||||
|
||||
clean_inodes(node, &mut inodes, walk_data)
|
||||
})
|
||||
.collect();
|
||||
top_level_nodes
|
||||
}
|
||||
|
||||
// Remove files which have the same inode, we don't want to double count them.
|
||||
fn clean_inodes(x: Node, inodes: &mut HashSet<(u64, u64)>, walk_data: &WalkData) -> Option<Node> {
|
||||
if !walk_data.use_apparent_size
|
||||
&& let Some(id) = x.inode_device
|
||||
&& !inodes.insert(id)
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
||||
// Sort Nodes so iteration order is predictable
|
||||
let mut tmp: Vec<_> = x.children;
|
||||
tmp.sort_by(sort_by_inode);
|
||||
let new_children: Vec<_> = tmp
|
||||
.into_iter()
|
||||
.filter_map(|c| clean_inodes(c, inodes, walk_data))
|
||||
.collect();
|
||||
|
||||
let actual_size = if walk_data.by_filetime.is_some() {
|
||||
// If by_filetime is Some, directory 'size' is the maximum filetime among child files instead of disk size
|
||||
new_children
|
||||
.iter()
|
||||
.map(|c| c.size)
|
||||
.chain(std::iter::once(x.size))
|
||||
.max()
|
||||
.unwrap_or(0)
|
||||
} else {
|
||||
// If by_filetime is None, directory 'size' is the sum of disk sizes or file counts of child files
|
||||
x.size + new_children.iter().map(|c| c.size).sum::<u64>()
|
||||
};
|
||||
|
||||
Some(Node {
|
||||
name: x.name,
|
||||
size: actual_size,
|
||||
children: new_children,
|
||||
inode_device: x.inode_device,
|
||||
depth: x.depth,
|
||||
})
|
||||
}
|
||||
|
||||
fn sort_by_inode(a: &Node, b: &Node) -> std::cmp::Ordering {
|
||||
// Sorting by inode is quicker than by sorting by name/size
|
||||
match (a.inode_device, b.inode_device) {
|
||||
(Some(x), Some(y)) => {
|
||||
if x.0 != y.0 {
|
||||
x.0.cmp(&y.0)
|
||||
} else if x.1 != y.1 {
|
||||
x.1.cmp(&y.1)
|
||||
} else {
|
||||
a.name.cmp(&b.name)
|
||||
}
|
||||
}
|
||||
(Some(_), None) => Ordering::Greater,
|
||||
(None, Some(_)) => Ordering::Less,
|
||||
(None, None) => a.name.cmp(&b.name),
|
||||
}
|
||||
}
|
||||
|
||||
// Check if `path` is inside ignored directory
|
||||
fn is_ignored_path(path: &Path, walk_data: &WalkData) -> bool {
|
||||
if walk_data.ignore_directories.contains(path) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Entry is inside an ignored absolute path
|
||||
// Absolute paths should be canonicalized before being added to `WalkData.ignore_directories`
|
||||
for ignored_path in walk_data.ignore_directories.iter() {
|
||||
if !ignored_path.is_absolute() {
|
||||
continue;
|
||||
}
|
||||
let absolute_entry_path = std::fs::canonicalize(path).unwrap_or_default();
|
||||
if absolute_entry_path.starts_with(ignored_path) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
fn ignore_file(entry: &DirEntry, walk_data: &WalkData) -> bool {
|
||||
if is_ignored_path(&entry.path(), walk_data) {
|
||||
return true;
|
||||
}
|
||||
|
||||
let is_dot_file = entry.file_name().to_str().unwrap_or("").starts_with('.');
|
||||
let follow_links = walk_data.follow_links && entry.file_type().is_ok_and(|ft| ft.is_symlink());
|
||||
|
||||
if !walk_data.allowed_filesystems.is_empty() {
|
||||
let size_inode_device = get_metadata(entry.path(), false, follow_links);
|
||||
if let Some((_size, Some((_id, dev)), _gunk)) = size_inode_device
|
||||
&& !walk_data.allowed_filesystems.contains(&dev)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
if walk_data.filter_accessed_time.is_some()
|
||||
|| walk_data.filter_modified_time.is_some()
|
||||
|| walk_data.filter_changed_time.is_some()
|
||||
{
|
||||
let size_inode_device = get_metadata(entry.path(), false, follow_links);
|
||||
if let Some((_, _, (modified_time, accessed_time, changed_time))) = size_inode_device
|
||||
&& entry.path().is_file()
|
||||
&& [
|
||||
(&walk_data.filter_modified_time, modified_time),
|
||||
(&walk_data.filter_accessed_time, accessed_time),
|
||||
(&walk_data.filter_changed_time, changed_time),
|
||||
]
|
||||
.iter()
|
||||
.any(|(filter_time, actual_time)| {
|
||||
is_filtered_out_due_to_file_time(filter_time, *actual_time)
|
||||
})
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Keeping `walk_data.filter_regex.is_empty()` is important for performance reasons, it stops unnecessary work
|
||||
if !walk_data.filter_regex.is_empty()
|
||||
&& entry.path().is_file()
|
||||
&& is_filtered_out_due_to_regex(walk_data.filter_regex, &entry.path())
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if !walk_data.invert_filter_regex.is_empty()
|
||||
&& entry.path().is_file()
|
||||
&& is_filtered_out_due_to_invert_regex(walk_data.invert_filter_regex, &entry.path())
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
is_dot_file && walk_data.ignore_hidden
|
||||
}
|
||||
|
||||
fn walk(dir: PathBuf, walk_data: &WalkData, depth: usize) -> Option<Node> {
|
||||
let prog_data = &walk_data.progress_data;
|
||||
let errors = &walk_data.errors;
|
||||
|
||||
let children = if dir.is_dir() {
|
||||
let read_dir = fs::read_dir(&dir);
|
||||
match read_dir {
|
||||
Ok(entries) => {
|
||||
entries
|
||||
.into_iter()
|
||||
.par_bridge()
|
||||
.filter_map(|entry| {
|
||||
match entry {
|
||||
Ok(ref entry) => {
|
||||
// uncommenting the below line gives simpler code but
|
||||
// rayon doesn't parallelize as well giving a 3X performance drop
|
||||
// hence we unravel the recursion a bit
|
||||
|
||||
// return walk(entry.path(), walk_data, depth)
|
||||
|
||||
if !ignore_file(entry, walk_data)
|
||||
&& let Ok(data) = entry.file_type()
|
||||
{
|
||||
if data.is_dir()
|
||||
|| (walk_data.follow_links && data.is_symlink())
|
||||
{
|
||||
return walk(entry.path(), walk_data, depth + 1);
|
||||
}
|
||||
|
||||
let node = build_node(
|
||||
entry.path(),
|
||||
vec![],
|
||||
data.is_symlink(),
|
||||
data.is_file(),
|
||||
depth,
|
||||
walk_data,
|
||||
);
|
||||
|
||||
prog_data.num_files.fetch_add(1, ORDERING);
|
||||
if let Some(ref file) = node {
|
||||
prog_data.total_file_size.fetch_add(file.size, ORDERING);
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
}
|
||||
Err(ref failed) => {
|
||||
if handle_error_and_retry(failed, &dir, walk_data) {
|
||||
return walk(dir.clone(), walk_data, depth);
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
Err(failed) => {
|
||||
if handle_error_and_retry(&failed, &dir, walk_data) {
|
||||
return walk(dir, walk_data, depth);
|
||||
} else {
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if !dir.is_file() {
|
||||
let mut editable_error = errors.lock().unwrap();
|
||||
let bad_file = dir.as_os_str().to_string_lossy().into();
|
||||
editable_error.file_not_found.insert(bad_file);
|
||||
}
|
||||
vec![]
|
||||
};
|
||||
let is_symlink = if walk_data.follow_links {
|
||||
match fs::symlink_metadata(&dir) {
|
||||
Ok(metadata) => metadata.file_type().is_symlink(),
|
||||
Err(_) => false,
|
||||
}
|
||||
} else {
|
||||
false
|
||||
};
|
||||
build_node(dir, children, is_symlink, false, depth, walk_data)
|
||||
}
|
||||
|
||||
fn handle_error_and_retry(failed: &Error, dir: &Path, walk_data: &WalkData) -> bool {
|
||||
let mut editable_error = walk_data.errors.lock().unwrap();
|
||||
match failed.kind() {
|
||||
std::io::ErrorKind::PermissionDenied => {
|
||||
editable_error
|
||||
.no_permissions
|
||||
.insert(dir.to_string_lossy().into());
|
||||
}
|
||||
std::io::ErrorKind::InvalidInput => {
|
||||
editable_error
|
||||
.no_permissions
|
||||
.insert(dir.to_string_lossy().into());
|
||||
}
|
||||
std::io::ErrorKind::NotFound => {
|
||||
editable_error.file_not_found.insert(failed.to_string());
|
||||
}
|
||||
std::io::ErrorKind::Interrupted => {
|
||||
editable_error.interrupted_error += 1;
|
||||
// This does happen on some systems. It was set to 3 but sometimes dust runs would exceed this
|
||||
// However, if there is no limit this results in infinite retrys and dust never finishes
|
||||
if editable_error.interrupted_error > 999 {
|
||||
panic!("Multiple Interrupted Errors occurred while scanning filesystem. Aborting");
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
editable_error.unknown_error.insert(failed.to_string());
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
mod tests {
|
||||
|
||||
#[allow(unused_imports)]
|
||||
use super::*;
|
||||
|
||||
#[cfg(test)]
|
||||
fn create_node() -> Node {
|
||||
Node {
|
||||
name: PathBuf::new(),
|
||||
size: 10,
|
||||
children: vec![],
|
||||
inode_device: Some((5, 6)),
|
||||
depth: 0,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn create_walker<'a>(use_apparent_size: bool) -> WalkData<'a> {
|
||||
use crate::PIndicator;
|
||||
let indicator = PIndicator::build_me();
|
||||
WalkData {
|
||||
ignore_directories: HashSet::new(),
|
||||
filter_regex: &[],
|
||||
invert_filter_regex: &[],
|
||||
allowed_filesystems: HashSet::new(),
|
||||
filter_modified_time: Some((Operator::GreaterThan, 0)),
|
||||
filter_accessed_time: Some((Operator::GreaterThan, 0)),
|
||||
filter_changed_time: Some((Operator::GreaterThan, 0)),
|
||||
use_apparent_size,
|
||||
by_filecount: false,
|
||||
by_filetime: &None,
|
||||
ignore_hidden: false,
|
||||
follow_links: false,
|
||||
progress_data: indicator.data.clone(),
|
||||
errors: Arc::new(Mutex::new(RuntimeErrors::default())),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[allow(clippy::redundant_clone)]
|
||||
fn test_should_ignore_file() {
|
||||
let mut inodes = HashSet::new();
|
||||
let n = create_node();
|
||||
let walkdata = create_walker(false);
|
||||
|
||||
// First time we insert the node
|
||||
assert_eq!(
|
||||
clean_inodes(n.clone(), &mut inodes, &walkdata),
|
||||
Some(n.clone())
|
||||
);
|
||||
|
||||
// Second time is a duplicate - we ignore it
|
||||
assert_eq!(clean_inodes(n.clone(), &mut inodes, &walkdata), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[allow(clippy::redundant_clone)]
|
||||
fn test_should_not_ignore_files_if_using_apparent_size() {
|
||||
let mut inodes = HashSet::new();
|
||||
let n = create_node();
|
||||
let walkdata = create_walker(true);
|
||||
|
||||
// If using apparent size we include Nodes, even if duplicate inodes
|
||||
assert_eq!(
|
||||
clean_inodes(n.clone(), &mut inodes, &walkdata),
|
||||
Some(n.clone())
|
||||
);
|
||||
assert_eq!(
|
||||
clean_inodes(n.clone(), &mut inodes, &walkdata),
|
||||
Some(n.clone())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_total_ordering_of_sort_by_inode() {
|
||||
use std::str::FromStr;
|
||||
|
||||
let a = Node {
|
||||
name: PathBuf::from_str("a").unwrap(),
|
||||
size: 0,
|
||||
children: vec![],
|
||||
inode_device: Some((3, 66310)),
|
||||
depth: 0,
|
||||
};
|
||||
|
||||
let b = Node {
|
||||
name: PathBuf::from_str("b").unwrap(),
|
||||
size: 0,
|
||||
children: vec![],
|
||||
inode_device: None,
|
||||
depth: 0,
|
||||
};
|
||||
|
||||
let c = Node {
|
||||
name: PathBuf::from_str("c").unwrap(),
|
||||
size: 0,
|
||||
children: vec![],
|
||||
inode_device: Some((1, 66310)),
|
||||
depth: 0,
|
||||
};
|
||||
|
||||
assert_eq!(sort_by_inode(&a, &b), Ordering::Greater);
|
||||
assert_eq!(sort_by_inode(&a, &c), Ordering::Greater);
|
||||
assert_eq!(sort_by_inode(&c, &b), Ordering::Greater);
|
||||
|
||||
assert_eq!(sort_by_inode(&b, &a), Ordering::Less);
|
||||
assert_eq!(sort_by_inode(&c, &a), Ordering::Less);
|
||||
assert_eq!(sort_by_inode(&b, &c), Ordering::Less);
|
||||
}
|
||||
}
|
||||
726
src/display.rs
726
src/display.rs
@@ -1,114 +1,254 @@
|
||||
extern crate ansi_term;
|
||||
use crate::display_node::DisplayNode;
|
||||
use crate::node::FileTime;
|
||||
|
||||
use self::ansi_term::Colour::Fixed;
|
||||
use self::ansi_term::Style;
|
||||
use crate::utils::Node;
|
||||
use ansi_term::Colour::Red;
|
||||
use lscolors::{LsColors, Style};
|
||||
|
||||
static UNITS: [char; 4] = ['T', 'G', 'M', 'K'];
|
||||
use unicode_width::UnicodeWidthStr;
|
||||
|
||||
pub struct DisplayData {
|
||||
use stfu8::encode_u8;
|
||||
|
||||
use chrono::{DateTime, Local, TimeZone, Utc};
|
||||
use std::cmp::max;
|
||||
use std::cmp::min;
|
||||
use std::fs;
|
||||
use std::iter::repeat_n;
|
||||
use std::path::Path;
|
||||
use thousands::Separable;
|
||||
|
||||
pub static UNITS: [char; 5] = ['P', 'T', 'G', 'M', 'K'];
|
||||
static BLOCKS: [char; 5] = ['█', '▓', '▒', '░', ' '];
|
||||
const FILETIME_SHOW_LENGTH: usize = 19;
|
||||
|
||||
pub struct InitialDisplayData {
|
||||
pub short_paths: bool,
|
||||
pub is_reversed: bool,
|
||||
pub colors_on: bool,
|
||||
pub by_filecount: bool,
|
||||
pub by_filetime: Option<FileTime>,
|
||||
pub is_screen_reader: bool,
|
||||
pub output_format: String,
|
||||
pub bars_on_right: bool,
|
||||
}
|
||||
|
||||
pub struct DisplayData {
|
||||
pub initial: InitialDisplayData,
|
||||
pub num_chars_needed_on_left_most: usize,
|
||||
pub base_size: u64,
|
||||
pub longest_string_length: usize,
|
||||
pub ls_colors: LsColors,
|
||||
}
|
||||
|
||||
impl DisplayData {
|
||||
fn get_first_chars(&self) -> &str {
|
||||
if self.is_reversed {
|
||||
"─┴"
|
||||
} else {
|
||||
"─┬"
|
||||
fn get_tree_chars(&self, was_i_last: bool, has_children: bool) -> &'static str {
|
||||
match (self.initial.is_reversed, was_i_last, has_children) {
|
||||
(true, true, true) => "┌─┴",
|
||||
(true, true, false) => "┌──",
|
||||
(true, false, true) => "├─┴",
|
||||
(true, false, false) => "├──",
|
||||
(false, true, true) => "└─┬",
|
||||
(false, true, false) => "└──",
|
||||
(false, false, true) => "├─┬",
|
||||
(false, false, false) => "├──",
|
||||
}
|
||||
}
|
||||
|
||||
fn get_tree_chars(
|
||||
&self,
|
||||
num_siblings: u64,
|
||||
max_siblings: u64,
|
||||
has_children: bool,
|
||||
) -> &'static str {
|
||||
if self.is_reversed {
|
||||
if num_siblings == max_siblings - 1 {
|
||||
if has_children {
|
||||
"┌─┴"
|
||||
} else {
|
||||
"┌──"
|
||||
}
|
||||
} else if has_children {
|
||||
"├─┴"
|
||||
} else {
|
||||
"├──"
|
||||
}
|
||||
fn is_biggest(&self, num_siblings: usize, max_siblings: u64) -> bool {
|
||||
if self.initial.is_reversed {
|
||||
num_siblings == (max_siblings - 1) as usize
|
||||
} else {
|
||||
if num_siblings == 0 {
|
||||
if has_children {
|
||||
"└─┬"
|
||||
} else {
|
||||
"└──"
|
||||
}
|
||||
} else if has_children {
|
||||
"├─┬"
|
||||
} else {
|
||||
"├──"
|
||||
}
|
||||
num_siblings == 0
|
||||
}
|
||||
}
|
||||
|
||||
fn is_biggest(&self, num_siblings: u64, max_siblings: u64) -> bool {
|
||||
if self.is_reversed {
|
||||
fn is_last(&self, num_siblings: usize, max_siblings: u64) -> bool {
|
||||
if self.initial.is_reversed {
|
||||
num_siblings == 0
|
||||
} else {
|
||||
num_siblings == max_siblings - 1
|
||||
num_siblings == (max_siblings - 1) as usize
|
||||
}
|
||||
}
|
||||
|
||||
fn get_children_from_node(&self, node: Node) -> impl Iterator<Item = Box<Node>> {
|
||||
if self.is_reversed {
|
||||
let n: Vec<Box<Node>> = node.children.into_iter().rev().map(|a| a).collect();
|
||||
return n.into_iter();
|
||||
fn percent_size(&self, node: &DisplayNode) -> f32 {
|
||||
let result = node.size as f32 / self.base_size as f32;
|
||||
if result.is_normal() { result } else { 0.0 }
|
||||
}
|
||||
}
|
||||
|
||||
struct DrawData<'a> {
|
||||
indent: String,
|
||||
percent_bar: String,
|
||||
display_data: &'a DisplayData,
|
||||
}
|
||||
|
||||
impl DrawData<'_> {
|
||||
fn get_new_indent(&self, has_children: bool, was_i_last: bool) -> String {
|
||||
let chars = self.display_data.get_tree_chars(was_i_last, has_children);
|
||||
self.indent.to_string() + chars
|
||||
}
|
||||
|
||||
// TODO: can we test this?
|
||||
fn generate_bar(&self, node: &DisplayNode, level: usize) -> String {
|
||||
if self.display_data.initial.is_screen_reader {
|
||||
return level.to_string();
|
||||
}
|
||||
let chars_in_bar = self.percent_bar.chars().count();
|
||||
let num_bars = chars_in_bar as f32 * self.display_data.percent_size(node);
|
||||
let mut num_not_my_bar = (chars_in_bar as i32) - num_bars as i32;
|
||||
|
||||
let mut new_bar = "".to_string();
|
||||
let idx = 5 - level.clamp(1, 4);
|
||||
|
||||
let itr: Box<dyn Iterator<Item = char>> = if self.display_data.initial.bars_on_right {
|
||||
Box::new(self.percent_bar.chars())
|
||||
} else {
|
||||
return node.children.into_iter();
|
||||
Box::new(self.percent_bar.chars().rev())
|
||||
};
|
||||
|
||||
for c in itr {
|
||||
num_not_my_bar -= 1;
|
||||
if num_not_my_bar <= 0 {
|
||||
new_bar.push(BLOCKS[0]);
|
||||
} else if c == BLOCKS[0] {
|
||||
new_bar.push(BLOCKS[idx]);
|
||||
} else {
|
||||
new_bar.push(c);
|
||||
}
|
||||
}
|
||||
if self.display_data.initial.bars_on_right {
|
||||
new_bar
|
||||
} else {
|
||||
new_bar.chars().rev().collect()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn draw_it(permissions: bool, use_full_path: bool, is_reversed: bool, root_node: Node) {
|
||||
if !permissions {
|
||||
eprintln!("Did not have permissions for all directories");
|
||||
}
|
||||
let display_data = DisplayData {
|
||||
short_paths: !use_full_path,
|
||||
is_reversed,
|
||||
pub fn draw_it(
|
||||
idd: InitialDisplayData,
|
||||
root_node: &DisplayNode,
|
||||
no_percent_bars: bool,
|
||||
terminal_width: usize,
|
||||
skip_total: bool,
|
||||
) {
|
||||
let num_chars_needed_on_left_most = if idd.by_filecount {
|
||||
let max_size = root_node.size;
|
||||
max_size.separate_with_commas().chars().count()
|
||||
} else if idd.by_filetime.is_some() {
|
||||
FILETIME_SHOW_LENGTH
|
||||
} else {
|
||||
find_biggest_size_str(root_node, &idd.output_format)
|
||||
};
|
||||
|
||||
for c in display_data.get_children_from_node(root_node) {
|
||||
let first_tree_chars = display_data.get_first_chars();
|
||||
display_node(*c, true, first_tree_chars, &display_data)
|
||||
assert!(
|
||||
terminal_width > num_chars_needed_on_left_most + 2,
|
||||
"Not enough terminal width"
|
||||
);
|
||||
|
||||
let allowed_width = terminal_width - num_chars_needed_on_left_most - 2;
|
||||
let num_indent_chars = 3;
|
||||
let longest_string_length =
|
||||
find_longest_dir_name(root_node, num_indent_chars, allowed_width, &idd);
|
||||
|
||||
let max_bar_length = if no_percent_bars || longest_string_length + 7 >= allowed_width {
|
||||
0
|
||||
} else {
|
||||
allowed_width - longest_string_length - 7
|
||||
};
|
||||
|
||||
let first_size_bar = repeat_n(BLOCKS[0], max_bar_length).collect();
|
||||
|
||||
let display_data = DisplayData {
|
||||
initial: idd,
|
||||
num_chars_needed_on_left_most,
|
||||
base_size: root_node.size,
|
||||
longest_string_length,
|
||||
ls_colors: LsColors::from_env().unwrap_or_default(),
|
||||
};
|
||||
let draw_data = DrawData {
|
||||
indent: "".to_string(),
|
||||
percent_bar: first_size_bar,
|
||||
display_data: &display_data,
|
||||
};
|
||||
|
||||
if !skip_total {
|
||||
display_node(root_node, &draw_data, true, true);
|
||||
} else {
|
||||
for (count, c) in root_node
|
||||
.get_children_from_node(draw_data.display_data.initial.is_reversed)
|
||||
.enumerate()
|
||||
{
|
||||
let is_biggest = display_data.is_biggest(count, root_node.num_siblings());
|
||||
let was_i_last = display_data.is_last(count, root_node.num_siblings());
|
||||
display_node(c, &draw_data, is_biggest, was_i_last);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn display_node(node: Node, is_biggest: bool, indent: &str, display_data: &DisplayData) {
|
||||
let short = display_data.short_paths;
|
||||
fn find_biggest_size_str(node: &DisplayNode, output_format: &str) -> usize {
|
||||
let mut mx = human_readable_number(node.size, output_format)
|
||||
.chars()
|
||||
.count();
|
||||
for n in node.children.iter() {
|
||||
mx = max(mx, find_biggest_size_str(n, output_format));
|
||||
}
|
||||
mx
|
||||
}
|
||||
|
||||
let mut num_siblings = node.children.len() as u64;
|
||||
let max_sibling = num_siblings;
|
||||
let new_indent = clean_indentation_string(indent);
|
||||
let name = node.name.clone();
|
||||
let size = node.size;
|
||||
fn find_longest_dir_name(
|
||||
node: &DisplayNode,
|
||||
indent: usize,
|
||||
terminal: usize,
|
||||
idd: &InitialDisplayData,
|
||||
) -> usize {
|
||||
let printable_name = get_printable_name(&node.name, idd.short_paths);
|
||||
|
||||
if !display_data.is_reversed {
|
||||
print_this_node(&*name, size, is_biggest, short, indent);
|
||||
let longest = if idd.is_screen_reader {
|
||||
UnicodeWidthStr::width(&*printable_name) + 1
|
||||
} else {
|
||||
min(
|
||||
UnicodeWidthStr::width(&*printable_name) + 1 + indent,
|
||||
terminal,
|
||||
)
|
||||
};
|
||||
|
||||
// each none root tree drawing is 2 more chars, hence we increment indent by 2
|
||||
node.children
|
||||
.iter()
|
||||
.map(|c| find_longest_dir_name(c, indent + 2, terminal, idd))
|
||||
.fold(longest, max)
|
||||
}
|
||||
|
||||
fn display_node(node: &DisplayNode, draw_data: &DrawData, is_biggest: bool, is_last: bool) {
|
||||
// hacky way of working out how deep we are in the tree
|
||||
let indent = draw_data.get_new_indent(!node.children.is_empty(), is_last);
|
||||
let level = ((indent.chars().count() - 1) / 2) - 1;
|
||||
let bar_text = draw_data.generate_bar(node, level);
|
||||
|
||||
let to_print = format_string(node, &indent, &bar_text, is_biggest, draw_data.display_data);
|
||||
|
||||
if !draw_data.display_data.initial.is_reversed {
|
||||
println!("{to_print}")
|
||||
}
|
||||
|
||||
for c in display_data.get_children_from_node(node) {
|
||||
num_siblings -= 1;
|
||||
let chars = display_data.get_tree_chars(num_siblings, max_sibling, c.children.len() > 0);
|
||||
let is_biggest = display_data.is_biggest(num_siblings, max_sibling);
|
||||
let full_indent = new_indent.clone() + chars;
|
||||
display_node(*c, is_biggest, &*full_indent, display_data)
|
||||
let dd = DrawData {
|
||||
indent: clean_indentation_string(&indent),
|
||||
percent_bar: bar_text,
|
||||
display_data: draw_data.display_data,
|
||||
};
|
||||
|
||||
let num_siblings = node.num_siblings();
|
||||
|
||||
for (count, c) in node
|
||||
.get_children_from_node(draw_data.display_data.initial.is_reversed)
|
||||
.enumerate()
|
||||
{
|
||||
let is_biggest = dd.display_data.is_biggest(count, num_siblings);
|
||||
let was_i_last = dd.display_data.is_last(count, num_siblings);
|
||||
display_node(c, &dd, is_biggest, was_i_last);
|
||||
}
|
||||
|
||||
if display_data.is_reversed {
|
||||
print_this_node(&*name, size, is_biggest, short, indent);
|
||||
if draw_data.display_data.initial.is_reversed {
|
||||
println!("{to_print}")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -129,69 +269,415 @@ fn clean_indentation_string(s: &str) -> String {
|
||||
is
|
||||
}
|
||||
|
||||
fn print_this_node(name: &str, size: u64, is_biggest: bool, short_paths: bool, indentation: &str) {
|
||||
let pretty_size = format!("{:>5}", human_readable_number(size),);
|
||||
println!(
|
||||
"{}",
|
||||
format_string(name, is_biggest, short_paths, &*pretty_size, indentation)
|
||||
)
|
||||
}
|
||||
|
||||
pub fn format_string(
|
||||
dir_name: &str,
|
||||
is_biggest: bool,
|
||||
short_paths: bool,
|
||||
size: &str,
|
||||
indentation: &str,
|
||||
) -> String {
|
||||
pub fn get_printable_name<P: AsRef<Path>>(dir_name: &P, short_paths: bool) -> String {
|
||||
let dir_name = dir_name.as_ref();
|
||||
let printable_name = {
|
||||
if short_paths {
|
||||
dir_name.split('/').last().unwrap_or(dir_name)
|
||||
match dir_name.parent() {
|
||||
Some(prefix) => match dir_name.strip_prefix(prefix) {
|
||||
Ok(base) => base,
|
||||
Err(_) => dir_name,
|
||||
},
|
||||
None => dir_name,
|
||||
}
|
||||
} else {
|
||||
dir_name
|
||||
}
|
||||
};
|
||||
format!(
|
||||
"{} {} {}",
|
||||
if is_biggest {
|
||||
Fixed(196).paint(size)
|
||||
} else {
|
||||
Style::new().paint(size)
|
||||
},
|
||||
indentation,
|
||||
printable_name,
|
||||
)
|
||||
encode_u8(printable_name.display().to_string().as_bytes())
|
||||
}
|
||||
|
||||
fn human_readable_number(size: u64) -> String {
|
||||
fn pad_or_trim_filename(node: &DisplayNode, indent: &str, display_data: &DisplayData) -> String {
|
||||
let name = get_printable_name(&node.name, display_data.initial.short_paths);
|
||||
let indent_and_name = format!("{indent} {name}");
|
||||
let width = UnicodeWidthStr::width(&*indent_and_name);
|
||||
|
||||
assert!(
|
||||
display_data.longest_string_length >= width,
|
||||
"Terminal width not wide enough to draw directory tree"
|
||||
);
|
||||
|
||||
// Add spaces after the filename so we can draw the % used bar chart.
|
||||
name + " "
|
||||
.repeat(display_data.longest_string_length - width)
|
||||
.as_str()
|
||||
}
|
||||
|
||||
fn maybe_trim_filename(name_in: String, indent: &str, display_data: &DisplayData) -> String {
|
||||
let indent_length = UnicodeWidthStr::width(indent);
|
||||
assert!(
|
||||
display_data.longest_string_length >= indent_length + 2,
|
||||
"Terminal width not wide enough to draw directory tree"
|
||||
);
|
||||
|
||||
let max_size = display_data.longest_string_length - indent_length;
|
||||
if UnicodeWidthStr::width(&*name_in) > max_size {
|
||||
let name = name_in.chars().take(max_size - 2).collect::<String>();
|
||||
name + ".."
|
||||
} else {
|
||||
name_in
|
||||
}
|
||||
}
|
||||
|
||||
pub fn format_string(
|
||||
node: &DisplayNode,
|
||||
indent: &str,
|
||||
bars: &str,
|
||||
is_biggest: bool,
|
||||
display_data: &DisplayData,
|
||||
) -> String {
|
||||
let (percent, name_and_padding) = get_name_percent(node, indent, bars, display_data);
|
||||
let pretty_size = get_pretty_size(node, is_biggest, display_data);
|
||||
let pretty_name = get_pretty_name(node, name_and_padding, display_data);
|
||||
// we can clean this and the method below somehow, not sure yet
|
||||
if display_data.initial.is_screen_reader {
|
||||
// if screen_reader then bars is 'depth'
|
||||
format!("{pretty_name} {bars} {pretty_size}{percent}")
|
||||
} else if display_data.initial.by_filetime.is_some() {
|
||||
format!("{pretty_size} {indent}{pretty_name}")
|
||||
} else {
|
||||
format!("{pretty_size} {indent} {pretty_name}{percent}")
|
||||
}
|
||||
}
|
||||
|
||||
fn get_name_percent(
|
||||
node: &DisplayNode,
|
||||
indent: &str,
|
||||
bar_chart: &str,
|
||||
display_data: &DisplayData,
|
||||
) -> (String, String) {
|
||||
if display_data.initial.is_screen_reader {
|
||||
let percent = display_data.percent_size(node) * 100.0;
|
||||
let percent_size_str = format!("{percent:.0}%");
|
||||
let percents = format!(" {percent_size_str:>4}",);
|
||||
let name = pad_or_trim_filename(node, "", display_data);
|
||||
(percents, name)
|
||||
// Bar chart being empty may come from either config or the screen not being wide enough
|
||||
} else if !bar_chart.is_empty() {
|
||||
let percent = display_data.percent_size(node) * 100.0;
|
||||
let percent_size_str = format!("{percent:.0}%");
|
||||
let percents = format!("│{bar_chart} │ {percent_size_str:>4}");
|
||||
let name_and_padding = pad_or_trim_filename(node, indent, display_data);
|
||||
(percents, name_and_padding)
|
||||
} else {
|
||||
let n = get_printable_name(&node.name, display_data.initial.short_paths);
|
||||
let name = maybe_trim_filename(n, indent, display_data);
|
||||
("".into(), name)
|
||||
}
|
||||
}
|
||||
|
||||
fn get_pretty_size(node: &DisplayNode, is_biggest: bool, display_data: &DisplayData) -> String {
|
||||
let output = if display_data.initial.by_filecount {
|
||||
node.size.separate_with_commas()
|
||||
} else if display_data.initial.by_filetime.is_some() {
|
||||
get_pretty_file_modified_time(node.size as i64)
|
||||
} else {
|
||||
human_readable_number(node.size, &display_data.initial.output_format)
|
||||
};
|
||||
let spaces_to_add = display_data.num_chars_needed_on_left_most - output.chars().count();
|
||||
let output = " ".repeat(spaces_to_add) + output.as_str();
|
||||
|
||||
if is_biggest && display_data.initial.colors_on {
|
||||
format!("{}", Red.paint(output))
|
||||
} else {
|
||||
output
|
||||
}
|
||||
}
|
||||
|
||||
fn get_pretty_file_modified_time(timestamp: i64) -> String {
|
||||
let datetime: DateTime<Utc> = Utc.timestamp_opt(timestamp, 0).unwrap();
|
||||
|
||||
let local_datetime = datetime.with_timezone(&Local);
|
||||
|
||||
local_datetime.format("%Y-%m-%dT%H:%M:%S").to_string()
|
||||
}
|
||||
|
||||
fn get_pretty_name(
|
||||
node: &DisplayNode,
|
||||
name_and_padding: String,
|
||||
display_data: &DisplayData,
|
||||
) -> String {
|
||||
if display_data.initial.colors_on {
|
||||
let meta_result = fs::metadata(&node.name);
|
||||
let directory_color = display_data
|
||||
.ls_colors
|
||||
.style_for_path_with_metadata(&node.name, meta_result.as_ref().ok());
|
||||
let ansi_style = directory_color
|
||||
.map(Style::to_ansi_term_style)
|
||||
.unwrap_or_default();
|
||||
let out = ansi_style.paint(name_and_padding);
|
||||
format!("{out}")
|
||||
} else {
|
||||
name_and_padding
|
||||
}
|
||||
}
|
||||
|
||||
// If we are working with SI units or not
|
||||
pub fn get_type_of_thousand(output_str: &str) -> u64 {
|
||||
if output_str.is_empty() {
|
||||
1024
|
||||
} else if output_str == "si" {
|
||||
1000
|
||||
} else if output_str.contains('i') || output_str.len() == 1 {
|
||||
1024
|
||||
} else {
|
||||
1000
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_number_format(output_str: &str) -> Option<(u64, char)> {
|
||||
if output_str.starts_with('b') {
|
||||
return Some((1, 'B'));
|
||||
}
|
||||
for (i, u) in UNITS.iter().enumerate() {
|
||||
let marker = 1024u64.pow((UNITS.len() - i) as u32);
|
||||
if size >= marker {
|
||||
if size / marker < 10 {
|
||||
return format!("{:.1}{}", (size as f32 / marker as f32), u);
|
||||
} else {
|
||||
return format!("{}{}", (size / marker), u);
|
||||
}
|
||||
if output_str.starts_with((*u).to_ascii_lowercase()) {
|
||||
let marker = get_type_of_thousand(output_str).pow((UNITS.len() - i) as u32);
|
||||
return Some((marker, *u));
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub fn human_readable_number(size: u64, output_str: &str) -> String {
|
||||
if output_str == "count" {
|
||||
return size.to_string();
|
||||
};
|
||||
match get_number_format(output_str) {
|
||||
Some((x, u)) => {
|
||||
format!("{}{}", (size / x), u)
|
||||
}
|
||||
None => {
|
||||
for (i, u) in UNITS.iter().enumerate() {
|
||||
let marker = get_type_of_thousand(output_str).pow((UNITS.len() - i) as u32);
|
||||
if size >= marker {
|
||||
if size / marker < 10 {
|
||||
return format!("{:.1}{}", (size as f32 / marker as f32), u);
|
||||
} else {
|
||||
return format!("{}{}", (size / marker), u);
|
||||
}
|
||||
}
|
||||
}
|
||||
format!("{size}B")
|
||||
}
|
||||
}
|
||||
return format!("{}B", size);
|
||||
}
|
||||
|
||||
mod tests {
|
||||
#[allow(unused_imports)]
|
||||
use super::*;
|
||||
#[allow(unused_imports)]
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[cfg(test)]
|
||||
fn get_fake_display_data(longest_string_length: usize) -> DisplayData {
|
||||
let initial = InitialDisplayData {
|
||||
short_paths: true,
|
||||
is_reversed: false,
|
||||
colors_on: false,
|
||||
by_filecount: false,
|
||||
by_filetime: None,
|
||||
is_screen_reader: false,
|
||||
output_format: "".into(),
|
||||
bars_on_right: false,
|
||||
};
|
||||
DisplayData {
|
||||
initial,
|
||||
num_chars_needed_on_left_most: 5,
|
||||
base_size: 2_u64.pow(12), // 4.0K
|
||||
longest_string_length,
|
||||
ls_colors: LsColors::from_env().unwrap_or_default(),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_format_str() {
|
||||
let n = DisplayNode {
|
||||
name: PathBuf::from("/short"),
|
||||
size: 2_u64.pow(12), // This is 4.0K
|
||||
children: vec![],
|
||||
};
|
||||
let indent = "┌─┴";
|
||||
let percent_bar = "";
|
||||
let is_biggest = false;
|
||||
let data = get_fake_display_data(20);
|
||||
|
||||
let s = format_string(&n, indent, percent_bar, is_biggest, &data);
|
||||
assert_eq!(s, " 4.0K ┌─┴ short");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_format_str_long_name() {
|
||||
let name = "very_long_name_longer_than_the_eighty_character_limit_very_long_name_this_bit_will_truncate";
|
||||
let n = DisplayNode {
|
||||
name: PathBuf::from(name),
|
||||
size: 2_u64.pow(12), // This is 4.0K
|
||||
children: vec![],
|
||||
};
|
||||
let indent = "┌─┴";
|
||||
let percent_bar = "";
|
||||
let is_biggest = false;
|
||||
|
||||
let data = get_fake_display_data(64);
|
||||
let s = format_string(&n, indent, percent_bar, is_biggest, &data);
|
||||
assert_eq!(
|
||||
s,
|
||||
" 4.0K ┌─┴ very_long_name_longer_than_the_eighty_character_limit_very_.."
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_format_str_screen_reader() {
|
||||
let n = DisplayNode {
|
||||
name: PathBuf::from("/short"),
|
||||
size: 2_u64.pow(12), // This is 4.0K
|
||||
children: vec![],
|
||||
};
|
||||
let indent = "";
|
||||
let percent_bar = "3";
|
||||
let is_biggest = false;
|
||||
let mut data = get_fake_display_data(20);
|
||||
data.initial.is_screen_reader = true;
|
||||
|
||||
let s = format_string(&n, indent, percent_bar, is_biggest, &data);
|
||||
assert_eq!(s, "short 3 4.0K 100%");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_machine_readable_filecount() {
|
||||
assert_eq!(human_readable_number(1, "count"), "1");
|
||||
assert_eq!(human_readable_number(1000, "count"), "1000");
|
||||
assert_eq!(human_readable_number(1024, "count"), "1024");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_human_readable_number() {
|
||||
assert_eq!(human_readable_number(1), "1B");
|
||||
assert_eq!(human_readable_number(956), "956B");
|
||||
assert_eq!(human_readable_number(1004), "1004B");
|
||||
assert_eq!(human_readable_number(1024), "1.0K");
|
||||
assert_eq!(human_readable_number(1536), "1.5K");
|
||||
assert_eq!(human_readable_number(1024 * 512), "512K");
|
||||
assert_eq!(human_readable_number(1024 * 1024), "1.0M");
|
||||
assert_eq!(human_readable_number(1024 * 1024 * 1024 - 1), "1023M");
|
||||
assert_eq!(human_readable_number(1024 * 1024 * 1024 * 20), "20G");
|
||||
assert_eq!(human_readable_number(1024 * 1024 * 1024 * 1024), "1.0T");
|
||||
assert_eq!(human_readable_number(1, ""), "1B");
|
||||
assert_eq!(human_readable_number(956, ""), "956B");
|
||||
assert_eq!(human_readable_number(1004, ""), "1004B");
|
||||
assert_eq!(human_readable_number(1024, ""), "1.0K");
|
||||
assert_eq!(human_readable_number(1536, ""), "1.5K");
|
||||
assert_eq!(human_readable_number(1024 * 512, ""), "512K");
|
||||
assert_eq!(human_readable_number(1024 * 1024, ""), "1.0M");
|
||||
assert_eq!(human_readable_number(1024 * 1024 * 1024 - 1, ""), "1023M");
|
||||
assert_eq!(human_readable_number(1024 * 1024 * 1024 * 20, ""), "20G");
|
||||
assert_eq!(human_readable_number(1024 * 1024 * 1024 * 1024, ""), "1.0T");
|
||||
assert_eq!(
|
||||
human_readable_number(1024 * 1024 * 1024 * 1024 * 234, ""),
|
||||
"234T"
|
||||
);
|
||||
assert_eq!(
|
||||
human_readable_number(1024 * 1024 * 1024 * 1024 * 1024, ""),
|
||||
"1.0P"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_human_readable_number_si() {
|
||||
assert_eq!(human_readable_number(1024 * 100, ""), "100K");
|
||||
assert_eq!(human_readable_number(1024 * 100, "si"), "102K");
|
||||
}
|
||||
|
||||
// Refer to https://en.wikipedia.org/wiki/Byte#Multiple-byte_units
|
||||
#[test]
|
||||
fn test_human_readable_number_kb() {
|
||||
let hrn = human_readable_number;
|
||||
assert_eq!(hrn(1023, "b"), "1023B");
|
||||
assert_eq!(hrn(1000 * 1000, "bytes"), "1000000B");
|
||||
assert_eq!(hrn(1023, "kb"), "1K");
|
||||
assert_eq!(hrn(1023, "k"), "0K");
|
||||
assert_eq!(hrn(1023, "kib"), "0K");
|
||||
assert_eq!(hrn(1024, "kib"), "1K");
|
||||
assert_eq!(hrn(1024 * 512, "kib"), "512K");
|
||||
assert_eq!(hrn(1024 * 1024, "kib"), "1024K");
|
||||
assert_eq!(hrn(1024 * 1000 * 1000 * 20, "kib"), "20000000K");
|
||||
assert_eq!(hrn(1024 * 1024 * 1000 * 20, "mib"), "20000M");
|
||||
assert_eq!(hrn(1024 * 1024 * 1024 * 20, "gib"), "20G");
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn build_draw_data(disp: &DisplayData, size: u32) -> (DrawData<'_>, DisplayNode) {
|
||||
let n = DisplayNode {
|
||||
name: PathBuf::from("/short"),
|
||||
size: 2_u64.pow(size),
|
||||
children: vec![],
|
||||
};
|
||||
let first_size_bar = repeat_n(BLOCKS[0], 13).collect();
|
||||
let dd = DrawData {
|
||||
indent: "".into(),
|
||||
percent_bar: first_size_bar,
|
||||
display_data: disp,
|
||||
};
|
||||
(dd, n)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_draw_data() {
|
||||
let disp = &get_fake_display_data(20);
|
||||
let (dd, n) = build_draw_data(disp, 12);
|
||||
let bar = dd.generate_bar(&n, 1);
|
||||
assert_eq!(bar, "█████████████");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_draw_data2() {
|
||||
let disp = &get_fake_display_data(20);
|
||||
let (dd, n) = build_draw_data(disp, 11);
|
||||
let bar = dd.generate_bar(&n, 2);
|
||||
assert_eq!(bar, "███████░░░░░░");
|
||||
}
|
||||
#[test]
|
||||
fn test_draw_data3() {
|
||||
let mut disp = get_fake_display_data(20);
|
||||
let (dd, n) = build_draw_data(&disp, 11);
|
||||
let bar = dd.generate_bar(&n, 3);
|
||||
assert_eq!(bar, "███████▒▒▒▒▒▒");
|
||||
|
||||
disp.initial.bars_on_right = true;
|
||||
let (dd, n) = build_draw_data(&disp, 11);
|
||||
let bar = dd.generate_bar(&n, 3);
|
||||
assert_eq!(bar, "▒▒▒▒▒▒███████")
|
||||
}
|
||||
#[test]
|
||||
fn test_draw_data4() {
|
||||
let disp = &get_fake_display_data(20);
|
||||
let (dd, n) = build_draw_data(disp, 10);
|
||||
// After 4 we have no more levels of shading so 4+ is the same
|
||||
let bar = dd.generate_bar(&n, 4);
|
||||
assert_eq!(bar, "████▓▓▓▓▓▓▓▓▓");
|
||||
let bar = dd.generate_bar(&n, 5);
|
||||
assert_eq!(bar, "████▓▓▓▓▓▓▓▓▓");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_pretty_file_modified_time() {
|
||||
// Create a timestamp for 2023-07-12 00:00:00 in local time
|
||||
let local_dt = Local.with_ymd_and_hms(2023, 7, 12, 0, 0, 0).unwrap();
|
||||
let timestamp = local_dt.timestamp();
|
||||
|
||||
// Format expected output
|
||||
let expected_output = local_dt.format("%Y-%m-%dT%H:%M:%S").to_string();
|
||||
|
||||
assert_eq!(get_pretty_file_modified_time(timestamp), expected_output);
|
||||
|
||||
// Test another timestamp
|
||||
let local_dt = Local.with_ymd_and_hms(2020, 1, 1, 12, 0, 0).unwrap();
|
||||
let timestamp = local_dt.timestamp();
|
||||
let expected_output = local_dt.format("%Y-%m-%dT%H:%M:%S").to_string();
|
||||
|
||||
assert_eq!(get_pretty_file_modified_time(timestamp), expected_output);
|
||||
|
||||
// Test timestamp for epoch start (1970-01-01T00:00:00)
|
||||
let local_dt = Local.with_ymd_and_hms(1970, 1, 1, 0, 0, 0).unwrap();
|
||||
let timestamp = local_dt.timestamp();
|
||||
let expected_output = local_dt.format("%Y-%m-%dT%H:%M:%S").to_string();
|
||||
|
||||
assert_eq!(get_pretty_file_modified_time(timestamp), expected_output);
|
||||
|
||||
// Test a future timestamp
|
||||
let local_dt = Local.with_ymd_and_hms(2030, 12, 25, 6, 30, 0).unwrap();
|
||||
let timestamp = local_dt.timestamp();
|
||||
let expected_output = local_dt.format("%Y-%m-%dT%H:%M:%S").to_string();
|
||||
|
||||
assert_eq!(get_pretty_file_modified_time(timestamp), expected_output);
|
||||
}
|
||||
}
|
||||
|
||||
58
src/display_node.rs
Normal file
58
src/display_node.rs
Normal file
@@ -0,0 +1,58 @@
|
||||
use std::cell::RefCell;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use serde::ser::SerializeStruct;
|
||||
use serde::{Serialize, Serializer};
|
||||
|
||||
use crate::display::human_readable_number;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)]
|
||||
pub struct DisplayNode {
|
||||
// Note: the order of fields in important here, for PartialEq and PartialOrd
|
||||
pub size: u64,
|
||||
pub name: PathBuf,
|
||||
pub children: Vec<DisplayNode>,
|
||||
}
|
||||
|
||||
impl DisplayNode {
|
||||
pub fn num_siblings(&self) -> u64 {
|
||||
self.children.len() as u64
|
||||
}
|
||||
|
||||
pub fn get_children_from_node(&self, is_reversed: bool) -> impl Iterator<Item = &DisplayNode> {
|
||||
// we box to avoid the clippy lint warning
|
||||
let out: Box<dyn Iterator<Item = &DisplayNode>> = if is_reversed {
|
||||
Box::new(self.children.iter().rev())
|
||||
} else {
|
||||
Box::new(self.children.iter())
|
||||
};
|
||||
out
|
||||
}
|
||||
}
|
||||
|
||||
// Only used for -j 'json' flag combined with -o 'output_type' flag
|
||||
// Used to pass the output_type into the custom Serde serializer
|
||||
thread_local! {
|
||||
pub static OUTPUT_TYPE: RefCell<String> = const { RefCell::new(String::new()) };
|
||||
}
|
||||
|
||||
/*
|
||||
We need the custom Serialize incase someone uses the -o flag to pass a custom output type in
|
||||
(show size in Mb / Gb etc).
|
||||
Sadly this also necessitates a global variable OUTPUT_TYPE as we can not pass the output_type flag
|
||||
into the serialize method
|
||||
*/
|
||||
impl Serialize for DisplayNode {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let readable_size = OUTPUT_TYPE
|
||||
.with(|output_type| human_readable_number(self.size, output_type.borrow().as_str()));
|
||||
let mut state = serializer.serialize_struct("DisplayNode", 2)?;
|
||||
state.serialize_field("size", &(readable_size))?;
|
||||
state.serialize_field("name", &self.name)?;
|
||||
state.serialize_field("children", &self.children)?;
|
||||
state.end()
|
||||
}
|
||||
}
|
||||
221
src/filter.rs
Normal file
221
src/filter.rs
Normal file
@@ -0,0 +1,221 @@
|
||||
use stfu8::encode_u8;
|
||||
|
||||
use crate::display::get_printable_name;
|
||||
use crate::display_node::DisplayNode;
|
||||
use crate::node::FileTime;
|
||||
use crate::node::Node;
|
||||
use std::collections::BinaryHeap;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub struct AggregateData {
|
||||
pub min_size: Option<usize>,
|
||||
pub only_dir: bool,
|
||||
pub only_file: bool,
|
||||
pub number_of_lines: usize,
|
||||
pub depth: usize,
|
||||
pub using_a_filter: bool,
|
||||
pub short_paths: bool,
|
||||
}
|
||||
|
||||
pub fn get_biggest(
|
||||
top_level_nodes: Vec<Node>,
|
||||
display_data: AggregateData,
|
||||
by_filetime: &Option<FileTime>,
|
||||
keep_collapsed: HashSet<PathBuf>,
|
||||
) -> DisplayNode {
|
||||
let mut heap = BinaryHeap::new();
|
||||
let number_top_level_nodes = top_level_nodes.len();
|
||||
let root;
|
||||
|
||||
if number_top_level_nodes == 0 {
|
||||
root = total_node_builder(0, vec![])
|
||||
} else if number_top_level_nodes > 1 {
|
||||
let size = if by_filetime.is_some() {
|
||||
top_level_nodes
|
||||
.iter()
|
||||
.map(|node| node.size)
|
||||
.max()
|
||||
.unwrap_or(0)
|
||||
} else {
|
||||
top_level_nodes.iter().map(|node| node.size).sum()
|
||||
};
|
||||
|
||||
let nodes = handle_duplicate_top_level_names(top_level_nodes, display_data.short_paths);
|
||||
root = total_node_builder(size, nodes);
|
||||
heap = always_add_children(&display_data, &root, heap);
|
||||
} else {
|
||||
root = top_level_nodes.into_iter().next().unwrap();
|
||||
heap = add_children(&display_data, &root, heap);
|
||||
}
|
||||
|
||||
fill_remaining_lines(heap, &root, display_data, keep_collapsed)
|
||||
}
|
||||
|
||||
fn total_node_builder(size: u64, children: Vec<Node>) -> Node {
|
||||
Node {
|
||||
name: PathBuf::from("(total)"),
|
||||
size,
|
||||
children,
|
||||
inode_device: None,
|
||||
depth: 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fill_remaining_lines<'a>(
|
||||
mut heap: BinaryHeap<&'a Node>,
|
||||
root: &'a Node,
|
||||
display_data: AggregateData,
|
||||
keep_collapsed: HashSet<PathBuf>,
|
||||
) -> DisplayNode {
|
||||
let mut allowed_nodes = HashMap::new();
|
||||
|
||||
while allowed_nodes.len() < display_data.number_of_lines {
|
||||
let line = heap.pop();
|
||||
match line {
|
||||
Some(line) => {
|
||||
// If we are not doing only_file OR if we are doing
|
||||
// only_file and it has no children (ie is a file not a dir)
|
||||
if !display_data.only_file || line.children.is_empty() {
|
||||
allowed_nodes.insert(line.name.as_path(), line);
|
||||
}
|
||||
if !keep_collapsed.contains(&line.name) {
|
||||
heap = add_children(&display_data, line, heap);
|
||||
}
|
||||
}
|
||||
None => break,
|
||||
}
|
||||
}
|
||||
|
||||
if display_data.only_file {
|
||||
flat_rebuilder(allowed_nodes, root)
|
||||
} else {
|
||||
recursive_rebuilder(&allowed_nodes, root)
|
||||
}
|
||||
}
|
||||
|
||||
fn add_children<'a>(
|
||||
display_data: &AggregateData,
|
||||
file_or_folder: &'a Node,
|
||||
heap: BinaryHeap<&'a Node>,
|
||||
) -> BinaryHeap<&'a Node> {
|
||||
if display_data.depth > file_or_folder.depth {
|
||||
always_add_children(display_data, file_or_folder, heap)
|
||||
} else {
|
||||
heap
|
||||
}
|
||||
}
|
||||
|
||||
fn always_add_children<'a>(
|
||||
display_data: &AggregateData,
|
||||
file_or_folder: &'a Node,
|
||||
mut heap: BinaryHeap<&'a Node>,
|
||||
) -> BinaryHeap<&'a Node> {
|
||||
heap.extend(
|
||||
file_or_folder
|
||||
.children
|
||||
.iter()
|
||||
.filter(|c| match display_data.min_size {
|
||||
Some(ms) => c.size > ms as u64,
|
||||
None => !display_data.using_a_filter || c.name.is_file() || c.size > 0,
|
||||
})
|
||||
.filter(|c| {
|
||||
if display_data.only_dir {
|
||||
c.name.is_dir()
|
||||
} else {
|
||||
true
|
||||
}
|
||||
}),
|
||||
);
|
||||
heap
|
||||
}
|
||||
|
||||
// Finds children of current, if in allowed_nodes adds them as children to new DisplayNode
|
||||
fn recursive_rebuilder(allowed_nodes: &HashMap<&Path, &Node>, current: &Node) -> DisplayNode {
|
||||
let new_children: Vec<_> = current
|
||||
.children
|
||||
.iter()
|
||||
.filter(|c| allowed_nodes.contains_key(c.name.as_path()))
|
||||
.map(|c| recursive_rebuilder(allowed_nodes, c))
|
||||
.collect();
|
||||
|
||||
build_display_node(new_children, current)
|
||||
}
|
||||
|
||||
// Applies all allowed nodes as children to current node
|
||||
fn flat_rebuilder(allowed_nodes: HashMap<&Path, &Node>, current: &Node) -> DisplayNode {
|
||||
let new_children: Vec<DisplayNode> = allowed_nodes
|
||||
.into_values()
|
||||
.map(|v| DisplayNode {
|
||||
name: v.name.clone(),
|
||||
size: v.size,
|
||||
children: vec![],
|
||||
})
|
||||
.collect::<Vec<DisplayNode>>();
|
||||
build_display_node(new_children, current)
|
||||
}
|
||||
|
||||
fn build_display_node(mut new_children: Vec<DisplayNode>, current: &Node) -> DisplayNode {
|
||||
new_children.sort_by(|lhs, rhs| lhs.cmp(rhs).reverse());
|
||||
DisplayNode {
|
||||
name: current.name.clone(),
|
||||
size: current.size,
|
||||
children: new_children,
|
||||
}
|
||||
}
|
||||
|
||||
fn names_have_dup(top_level_nodes: &Vec<Node>) -> bool {
|
||||
let mut stored = HashSet::new();
|
||||
for node in top_level_nodes {
|
||||
let name = get_printable_name(&node.name, true);
|
||||
if stored.contains(&name) {
|
||||
return true;
|
||||
}
|
||||
stored.insert(name);
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
fn handle_duplicate_top_level_names(top_level_nodes: Vec<Node>, short_paths: bool) -> Vec<Node> {
|
||||
// If we have top level names that are the same - we need to tweak them:
|
||||
if short_paths && names_have_dup(&top_level_nodes) {
|
||||
let mut new_top_nodes = top_level_nodes.clone();
|
||||
let mut dir_walk_up_count = 0;
|
||||
|
||||
while names_have_dup(&new_top_nodes) && dir_walk_up_count < 10 {
|
||||
dir_walk_up_count += 1;
|
||||
let mut newer = vec![];
|
||||
|
||||
for node in new_top_nodes.iter() {
|
||||
let mut folders = node.name.iter().rev();
|
||||
// Get parent folder (if second time round get grandparent and so on)
|
||||
for _ in 0..dir_walk_up_count {
|
||||
folders.next();
|
||||
}
|
||||
match folders.next() {
|
||||
// Add (parent_name) to path of Node
|
||||
Some(data) => {
|
||||
let parent = encode_u8(data.as_encoded_bytes());
|
||||
let current_node = node.name.display();
|
||||
let n = Node {
|
||||
name: PathBuf::from(format!("{current_node}({parent})")),
|
||||
size: node.size,
|
||||
children: node.children.clone(),
|
||||
inode_device: node.inode_device,
|
||||
depth: node.depth,
|
||||
};
|
||||
newer.push(n)
|
||||
}
|
||||
// Node does not have a parent
|
||||
None => newer.push(node.clone()),
|
||||
}
|
||||
}
|
||||
new_top_nodes = newer;
|
||||
}
|
||||
new_top_nodes
|
||||
} else {
|
||||
top_level_nodes
|
||||
}
|
||||
}
|
||||
89
src/filter_type.rs
Normal file
89
src/filter_type.rs
Normal file
@@ -0,0 +1,89 @@
|
||||
use crate::display_node::DisplayNode;
|
||||
use crate::node::FileTime;
|
||||
use crate::node::Node;
|
||||
use std::collections::HashMap;
|
||||
use std::ffi::OsStr;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(PartialEq, Eq, PartialOrd, Ord)]
|
||||
struct ExtensionNode<'a> {
|
||||
size: u64,
|
||||
extension: Option<&'a OsStr>,
|
||||
}
|
||||
|
||||
pub fn get_all_file_types(
|
||||
top_level_nodes: &[Node],
|
||||
n: usize,
|
||||
by_filetime: &Option<FileTime>,
|
||||
) -> DisplayNode {
|
||||
let ext_nodes = {
|
||||
let mut extension_cumulative_sizes = HashMap::new();
|
||||
build_by_all_file_types(top_level_nodes, &mut extension_cumulative_sizes);
|
||||
|
||||
let mut extension_cumulative_sizes: Vec<ExtensionNode<'_>> = extension_cumulative_sizes
|
||||
.iter()
|
||||
.map(|(&extension, &size)| ExtensionNode { extension, size })
|
||||
.collect();
|
||||
|
||||
extension_cumulative_sizes.sort_by(|lhs, rhs| lhs.cmp(rhs).reverse());
|
||||
|
||||
extension_cumulative_sizes
|
||||
};
|
||||
|
||||
let mut ext_nodes_iter = ext_nodes.iter();
|
||||
|
||||
// First, collect the first N - 1 nodes...
|
||||
let mut displayed: Vec<DisplayNode> = ext_nodes_iter
|
||||
.by_ref()
|
||||
.take(if n > 1 { n - 1 } else { 1 })
|
||||
.map(|node| DisplayNode {
|
||||
name: PathBuf::from(
|
||||
node.extension
|
||||
.map(|ext| format!(".{}", ext.to_string_lossy()))
|
||||
.unwrap_or_else(|| "(no extension)".to_owned()),
|
||||
),
|
||||
size: node.size,
|
||||
children: vec![],
|
||||
})
|
||||
.collect();
|
||||
|
||||
// ...then, aggregate the remaining nodes (if any) into a single "(others)" node
|
||||
if ext_nodes_iter.len() > 0 {
|
||||
let actual_size = if by_filetime.is_some() {
|
||||
ext_nodes_iter.map(|node| node.size).max().unwrap_or(0)
|
||||
} else {
|
||||
ext_nodes_iter.map(|node| node.size).sum()
|
||||
};
|
||||
displayed.push(DisplayNode {
|
||||
name: PathBuf::from("(others)"),
|
||||
size: actual_size,
|
||||
children: vec![],
|
||||
});
|
||||
}
|
||||
|
||||
let actual_size: u64 = if by_filetime.is_some() {
|
||||
displayed.iter().map(|node| node.size).max().unwrap_or(0)
|
||||
} else {
|
||||
displayed.iter().map(|node| node.size).sum()
|
||||
};
|
||||
|
||||
DisplayNode {
|
||||
name: PathBuf::from("(total)"),
|
||||
size: actual_size,
|
||||
children: displayed,
|
||||
}
|
||||
}
|
||||
|
||||
fn build_by_all_file_types<'a>(
|
||||
top_level_nodes: &'a [Node],
|
||||
counter: &mut HashMap<Option<&'a OsStr>, u64>,
|
||||
) {
|
||||
for node in top_level_nodes {
|
||||
if node.name.is_file() {
|
||||
let ext = node.name.extension();
|
||||
let cumulative_size = counter.entry(ext).or_default();
|
||||
*cumulative_size += node.size;
|
||||
}
|
||||
build_by_all_file_types(&node.children, counter)
|
||||
}
|
||||
}
|
||||
588
src/main.rs
588
src/main.rs
@@ -1,148 +1,484 @@
|
||||
#[macro_use]
|
||||
extern crate clap;
|
||||
extern crate assert_cli;
|
||||
extern crate walkdir;
|
||||
|
||||
use self::display::draw_it;
|
||||
use clap::{App, AppSettings, Arg};
|
||||
use utils::{find_big_ones, get_dir_tree, simplify_dir_names, sort, trim_deep_ones, Node};
|
||||
|
||||
mod cli;
|
||||
mod config;
|
||||
mod dir_walker;
|
||||
mod display;
|
||||
mod display_node;
|
||||
mod filter;
|
||||
mod filter_type;
|
||||
mod node;
|
||||
mod platform;
|
||||
mod progress;
|
||||
mod utils;
|
||||
|
||||
static DEFAULT_NUMBER_OF_LINES: usize = 20;
|
||||
use crate::cli::Cli;
|
||||
use crate::config::Config;
|
||||
use crate::display_node::DisplayNode;
|
||||
use crate::progress::RuntimeErrors;
|
||||
use clap::Parser;
|
||||
use dir_walker::WalkData;
|
||||
use display::InitialDisplayData;
|
||||
use filter::AggregateData;
|
||||
use progress::PIndicator;
|
||||
use regex::Error;
|
||||
use std::collections::HashSet;
|
||||
use std::env;
|
||||
use std::fs::{read, read_to_string};
|
||||
use std::io;
|
||||
use std::io::Read;
|
||||
use std::panic;
|
||||
use std::process;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
use sysinfo::{System, SystemExt};
|
||||
use utils::canonicalize_absolute_path;
|
||||
|
||||
use self::display::draw_it;
|
||||
use config::get_config;
|
||||
use dir_walker::walk_it;
|
||||
use display_node::OUTPUT_TYPE;
|
||||
use filter::get_biggest;
|
||||
use filter_type::get_all_file_types;
|
||||
use regex::Regex;
|
||||
use std::cmp::max;
|
||||
use std::path::PathBuf;
|
||||
use terminal_size::{Height, Width, terminal_size};
|
||||
use utils::get_filesystem_devices;
|
||||
use utils::simplify_dir_names;
|
||||
|
||||
static DEFAULT_NUMBER_OF_LINES: usize = 30;
|
||||
static DEFAULT_TERMINAL_WIDTH: usize = 80;
|
||||
|
||||
fn should_init_color(no_color: bool, force_color: bool) -> bool {
|
||||
if force_color {
|
||||
return true;
|
||||
}
|
||||
if no_color {
|
||||
return false;
|
||||
}
|
||||
// check if NO_COLOR is set
|
||||
// https://no-color.org/
|
||||
if env::var_os("NO_COLOR").is_some() {
|
||||
return false;
|
||||
}
|
||||
if terminal_size().is_none() {
|
||||
// we are not in a terminal, color may not be needed
|
||||
return false;
|
||||
}
|
||||
// we are in a terminal
|
||||
#[cfg(windows)]
|
||||
{
|
||||
// Required for windows 10
|
||||
// Fails to resolve for windows 8 so disable color
|
||||
match ansi_term::enable_ansi_support() {
|
||||
Ok(_) => true,
|
||||
Err(_) => {
|
||||
eprintln!("This version of Windows does not support ANSI colors");
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
#[cfg(not(windows))]
|
||||
{
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
fn get_height_of_terminal() -> usize {
|
||||
terminal_size()
|
||||
// Windows CI runners detect a terminal height of 0
|
||||
.map(|(_, Height(h))| max(h.into(), DEFAULT_NUMBER_OF_LINES))
|
||||
.unwrap_or(DEFAULT_NUMBER_OF_LINES)
|
||||
- 10
|
||||
}
|
||||
|
||||
fn get_width_of_terminal() -> usize {
|
||||
terminal_size()
|
||||
.map(|(Width(w), _)| match cfg!(windows) {
|
||||
// Windows CI runners detect a very low terminal width
|
||||
true => max(w.into(), DEFAULT_TERMINAL_WIDTH),
|
||||
false => w.into(),
|
||||
})
|
||||
.unwrap_or(DEFAULT_TERMINAL_WIDTH)
|
||||
}
|
||||
|
||||
fn get_regex_value(maybe_value: Option<&Vec<String>>) -> Vec<Regex> {
|
||||
maybe_value
|
||||
.unwrap_or(&Vec::new())
|
||||
.iter()
|
||||
.map(|reg| {
|
||||
Regex::new(reg).unwrap_or_else(|err| {
|
||||
eprintln!("Ignoring bad value for regex {err:?}");
|
||||
process::exit(1)
|
||||
})
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let def_num_str = DEFAULT_NUMBER_OF_LINES.to_string();
|
||||
let options = App::new("Dust")
|
||||
.about("Like du but more intuitive")
|
||||
.version(crate_version!())
|
||||
.setting(AppSettings::TrailingVarArg)
|
||||
.arg(
|
||||
Arg::with_name("depth")
|
||||
.short("d")
|
||||
.long("depth")
|
||||
.help("Depth to show")
|
||||
.takes_value(true),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("number_of_lines")
|
||||
.short("n")
|
||||
.long("number-of-lines")
|
||||
.help("Number of lines of output to show")
|
||||
.takes_value(true)
|
||||
.default_value(def_num_str.as_ref()),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("display_full_paths")
|
||||
.short("p")
|
||||
.long("full-paths")
|
||||
.help("If set sub directories will not have their path shortened"),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("display_apparent_size")
|
||||
.short("s")
|
||||
.long("apparent-size")
|
||||
.help("If set will use file length. Otherwise we use blocks"),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("reverse")
|
||||
.short("r")
|
||||
.long("reverse")
|
||||
.help("If applied tree will be printed upside down (biggest lowest)"),
|
||||
)
|
||||
.arg(Arg::with_name("inputs").multiple(true))
|
||||
.get_matches();
|
||||
let options = Cli::parse();
|
||||
let config = get_config(options.config.as_ref());
|
||||
|
||||
let target_dirs = {
|
||||
match options.values_of("inputs") {
|
||||
None => vec!["."],
|
||||
Some(r) => r.collect(),
|
||||
let errors = RuntimeErrors::default();
|
||||
let error_listen_for_ctrlc = Arc::new(Mutex::new(errors));
|
||||
let errors_for_rayon = error_listen_for_ctrlc.clone();
|
||||
|
||||
ctrlc::set_handler(move || {
|
||||
println!("\nAborting");
|
||||
process::exit(1);
|
||||
})
|
||||
.expect("Error setting Ctrl-C handler");
|
||||
|
||||
let target_dirs = if let Some(path) = config.get_files0_from(&options) {
|
||||
read_paths_from_source(&path, true)
|
||||
} else if let Some(path) = config.get_files_from(&options) {
|
||||
read_paths_from_source(&path, false)
|
||||
} else {
|
||||
match options.params {
|
||||
Some(ref values) => values.clone(),
|
||||
None => vec![".".to_owned()],
|
||||
}
|
||||
};
|
||||
|
||||
let number_of_lines = match value_t!(options.value_of("number_of_lines"), usize) {
|
||||
let summarize_file_types = options.file_types;
|
||||
|
||||
let filter_regexs = get_regex_value(options.filter.as_ref());
|
||||
let invert_filter_regexs = get_regex_value(options.invert_filter.as_ref());
|
||||
|
||||
let terminal_width: usize = match options.terminal_width {
|
||||
Some(val) => val,
|
||||
None => get_width_of_terminal(),
|
||||
};
|
||||
|
||||
let depth = config.get_depth(&options);
|
||||
|
||||
// If depth is set, then we set the default number_of_lines to be max
|
||||
// instead of screen height
|
||||
|
||||
let number_of_lines = match config.get_number_of_lines(&options) {
|
||||
Some(val) => val,
|
||||
None => {
|
||||
if depth != usize::MAX {
|
||||
usize::MAX
|
||||
} else {
|
||||
get_height_of_terminal()
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let is_colors = should_init_color(
|
||||
config.get_no_colors(&options),
|
||||
config.get_force_colors(&options),
|
||||
);
|
||||
|
||||
let ignore_directories = match options.ignore_directory {
|
||||
Some(ref values) => values
|
||||
.iter()
|
||||
.map(PathBuf::from)
|
||||
.map(canonicalize_absolute_path)
|
||||
.collect::<Vec<PathBuf>>(),
|
||||
None => vec![],
|
||||
};
|
||||
|
||||
let ignore_from_file_result = match options.ignore_all_in_file {
|
||||
Some(ref val) => read_to_string(val)
|
||||
.unwrap()
|
||||
.lines()
|
||||
.map(Regex::new)
|
||||
.collect::<Vec<Result<Regex, Error>>>(),
|
||||
None => vec![],
|
||||
};
|
||||
let ignore_from_file = ignore_from_file_result
|
||||
.into_iter()
|
||||
.filter_map(|x| x.ok())
|
||||
.collect::<Vec<Regex>>();
|
||||
|
||||
let invert_filter_regexs = invert_filter_regexs
|
||||
.into_iter()
|
||||
.chain(ignore_from_file)
|
||||
.collect::<Vec<Regex>>();
|
||||
|
||||
let by_filecount = options.filecount;
|
||||
let by_filetime = config.get_filetime(&options);
|
||||
let limit_filesystem = options.limit_filesystem;
|
||||
let follow_links = options.dereference_links;
|
||||
|
||||
let allowed_filesystems = if limit_filesystem {
|
||||
get_filesystem_devices(&target_dirs, follow_links)
|
||||
} else {
|
||||
Default::default()
|
||||
};
|
||||
|
||||
let simplified_dirs = simplify_dir_names(&target_dirs);
|
||||
|
||||
let ignored_full_path: HashSet<PathBuf> = ignore_directories
|
||||
.into_iter()
|
||||
.flat_map(|x| simplified_dirs.iter().map(move |d| d.join(&x)))
|
||||
.collect();
|
||||
|
||||
let output_format = config.get_output_format(&options);
|
||||
|
||||
let ignore_hidden = config.get_ignore_hidden(&options);
|
||||
|
||||
let mut indicator = PIndicator::build_me();
|
||||
if !config.get_disable_progress(&options) {
|
||||
indicator.spawn(output_format.clone())
|
||||
}
|
||||
|
||||
let keep_collapsed: HashSet<PathBuf> = match options.collapse {
|
||||
Some(ref collapse) => {
|
||||
let mut combined_dirs = HashSet::new();
|
||||
for collapse_dir in collapse {
|
||||
for target_dir in target_dirs.iter() {
|
||||
combined_dirs.insert(PathBuf::from(target_dir).join(collapse_dir));
|
||||
}
|
||||
}
|
||||
combined_dirs
|
||||
}
|
||||
None => HashSet::new(),
|
||||
};
|
||||
|
||||
let filter_modified_time = config.get_modified_time_operator(&options);
|
||||
let filter_accessed_time = config.get_accessed_time_operator(&options);
|
||||
let filter_changed_time = config.get_changed_time_operator(&options);
|
||||
|
||||
let walk_data = WalkData {
|
||||
ignore_directories: ignored_full_path,
|
||||
filter_regex: &filter_regexs,
|
||||
invert_filter_regex: &invert_filter_regexs,
|
||||
allowed_filesystems,
|
||||
filter_modified_time,
|
||||
filter_accessed_time,
|
||||
filter_changed_time,
|
||||
use_apparent_size: config.get_apparent_size(&options),
|
||||
by_filecount,
|
||||
by_filetime: &by_filetime,
|
||||
ignore_hidden,
|
||||
follow_links,
|
||||
progress_data: indicator.data.clone(),
|
||||
errors: errors_for_rayon,
|
||||
};
|
||||
|
||||
let threads_to_use = config.get_threads(&options);
|
||||
let stack_size = config.get_custom_stack_size(&options);
|
||||
|
||||
init_rayon(&stack_size, &threads_to_use).install(|| {
|
||||
let top_level_nodes = walk_it(simplified_dirs, &walk_data);
|
||||
|
||||
let tree = match summarize_file_types {
|
||||
true => get_all_file_types(&top_level_nodes, number_of_lines, walk_data.by_filetime),
|
||||
false => {
|
||||
let agg_data = AggregateData {
|
||||
min_size: config.get_min_size(&options),
|
||||
only_dir: config.get_only_dir(&options),
|
||||
only_file: config.get_only_file(&options),
|
||||
number_of_lines,
|
||||
depth,
|
||||
using_a_filter: !filter_regexs.is_empty() || !invert_filter_regexs.is_empty(),
|
||||
short_paths: !config.get_full_paths(&options),
|
||||
};
|
||||
get_biggest(
|
||||
top_level_nodes,
|
||||
agg_data,
|
||||
walk_data.by_filetime,
|
||||
keep_collapsed,
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
// Must have stopped indicator before we print to stderr
|
||||
indicator.stop();
|
||||
|
||||
let print_errors = config.get_print_errors(&options);
|
||||
let final_errors = walk_data.errors.lock().unwrap();
|
||||
print_any_errors(print_errors, &final_errors);
|
||||
|
||||
if tree.children.is_empty() && !final_errors.file_not_found.is_empty() {
|
||||
std::process::exit(1)
|
||||
} else {
|
||||
print_output(
|
||||
config,
|
||||
options,
|
||||
tree,
|
||||
walk_data.by_filecount,
|
||||
is_colors,
|
||||
terminal_width,
|
||||
)
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn print_output(
|
||||
config: Config,
|
||||
options: Cli,
|
||||
tree: DisplayNode,
|
||||
by_filecount: bool,
|
||||
is_colors: bool,
|
||||
terminal_width: usize,
|
||||
) {
|
||||
let output_format = config.get_output_format(&options);
|
||||
|
||||
if config.get_output_json(&options) {
|
||||
OUTPUT_TYPE.with(|wrapped| {
|
||||
if by_filecount {
|
||||
wrapped.replace("count".to_string());
|
||||
} else {
|
||||
wrapped.replace(output_format);
|
||||
}
|
||||
});
|
||||
println!("{}", serde_json::to_string(&tree).unwrap());
|
||||
} else {
|
||||
let idd = InitialDisplayData {
|
||||
short_paths: !config.get_full_paths(&options),
|
||||
is_reversed: !config.get_reverse(&options),
|
||||
colors_on: is_colors,
|
||||
by_filecount,
|
||||
by_filetime: config.get_filetime(&options),
|
||||
is_screen_reader: config.get_screen_reader(&options),
|
||||
output_format,
|
||||
bars_on_right: config.get_bars_on_right(&options),
|
||||
};
|
||||
|
||||
draw_it(
|
||||
idd,
|
||||
&tree,
|
||||
config.get_no_bars(&options),
|
||||
terminal_width,
|
||||
config.get_skip_total(&options),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn print_any_errors(print_errors: bool, final_errors: &RuntimeErrors) {
|
||||
if !final_errors.file_not_found.is_empty() {
|
||||
let err = final_errors
|
||||
.file_not_found
|
||||
.iter()
|
||||
.map(|a| a.as_ref())
|
||||
.collect::<Vec<&str>>()
|
||||
.join(", ");
|
||||
eprintln!("No such file or directory: {err}");
|
||||
}
|
||||
if !final_errors.no_permissions.is_empty() {
|
||||
if print_errors {
|
||||
let err = final_errors
|
||||
.no_permissions
|
||||
.iter()
|
||||
.map(|a| a.as_ref())
|
||||
.collect::<Vec<&str>>()
|
||||
.join(", ");
|
||||
eprintln!("Did not have permissions for directories: {err}");
|
||||
} else {
|
||||
eprintln!(
|
||||
"Did not have permissions for all directories (add --print-errors to see errors)"
|
||||
);
|
||||
}
|
||||
}
|
||||
if !final_errors.unknown_error.is_empty() {
|
||||
let err = final_errors
|
||||
.unknown_error
|
||||
.iter()
|
||||
.map(|a| a.as_ref())
|
||||
.collect::<Vec<&str>>()
|
||||
.join(", ");
|
||||
eprintln!("Unknown Error: {err}");
|
||||
}
|
||||
}
|
||||
|
||||
fn read_paths_from_source(path: &str, null_terminated: bool) -> Vec<String> {
|
||||
let from_stdin = path == "-";
|
||||
|
||||
let result: Result<Vec<String>, Option<String>> = (|| {
|
||||
// 1) read bytes
|
||||
let bytes = if from_stdin {
|
||||
let mut b = Vec::new();
|
||||
io::stdin().lock().read_to_end(&mut b).map_err(|_| None)?;
|
||||
b
|
||||
} else {
|
||||
read(path).map_err(|e| Some(e.to_string()))?
|
||||
};
|
||||
|
||||
let text = std::str::from_utf8(&bytes).map_err(|e| {
|
||||
if from_stdin {
|
||||
None
|
||||
} else {
|
||||
Some(e.to_string())
|
||||
}
|
||||
})?;
|
||||
let items: Vec<String> = if null_terminated {
|
||||
text.split('\0')
|
||||
.filter(|s| !s.is_empty())
|
||||
.map(str::to_owned)
|
||||
.collect()
|
||||
} else {
|
||||
text.lines().map(str::to_owned).collect()
|
||||
};
|
||||
if from_stdin && items.is_empty() {
|
||||
return Err(None);
|
||||
}
|
||||
Ok(items)
|
||||
})();
|
||||
|
||||
match result {
|
||||
Ok(v) => v,
|
||||
Err(_) => {
|
||||
eprintln!("Ignoring bad value for number_of_lines");
|
||||
DEFAULT_NUMBER_OF_LINES
|
||||
Err(None) => {
|
||||
eprintln!("No files provided, defaulting to current directory");
|
||||
vec![".".to_owned()]
|
||||
}
|
||||
};
|
||||
Err(Some(msg)) => {
|
||||
eprintln!("Failed to read file: {msg}");
|
||||
vec![".".to_owned()]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let depth = {
|
||||
if options.is_present("depth") {
|
||||
match value_t!(options.value_of("depth"), u64) {
|
||||
Ok(v) => Some(v + 1),
|
||||
Err(_) => {
|
||||
eprintln!("Ignoring bad value for depth");
|
||||
fn init_rayon(stack: &Option<usize>, threads: &Option<usize>) -> rayon::ThreadPool {
|
||||
let stack_size = match stack {
|
||||
Some(s) => Some(*s),
|
||||
None => {
|
||||
// Do not increase the stack size on a 32 bit system, it will fail
|
||||
if cfg!(target_pointer_width = "32") {
|
||||
None
|
||||
} else {
|
||||
let large_stack = usize::pow(1024, 3);
|
||||
let mut s = System::new();
|
||||
s.refresh_memory();
|
||||
// Larger stack size if possible to handle cases with lots of nested directories
|
||||
let available = s.available_memory();
|
||||
if available > (large_stack * threads.unwrap_or(1)).try_into().unwrap() {
|
||||
Some(large_stack)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
};
|
||||
if options.is_present("depth") && number_of_lines != DEFAULT_NUMBER_OF_LINES {
|
||||
eprintln!("Use either -n or -d. Not both");
|
||||
return;
|
||||
}
|
||||
|
||||
let use_apparent_size = options.is_present("display_apparent_size");
|
||||
let use_full_path = options.is_present("display_full_paths");
|
||||
|
||||
let simplified_dirs = simplify_dir_names(target_dirs);
|
||||
let (permissions, nodes) = get_dir_tree(&simplified_dirs, use_apparent_size);
|
||||
let sorted_data = sort(nodes);
|
||||
let biggest_ones = {
|
||||
match depth {
|
||||
None => find_big_ones(sorted_data, number_of_lines + simplified_dirs.len()),
|
||||
Some(d) => trim_deep_ones(sorted_data, d, &simplified_dirs),
|
||||
}
|
||||
};
|
||||
let tree = build_tree(biggest_ones, depth);
|
||||
//println!("{:?}", tree);
|
||||
|
||||
draw_it(
|
||||
permissions,
|
||||
use_full_path,
|
||||
options.is_present("reverse"),
|
||||
tree,
|
||||
);
|
||||
}
|
||||
|
||||
fn build_tree(biggest_ones: Vec<(String, u64)>, depth: Option<u64>) -> Node {
|
||||
let mut top_parent = Node {
|
||||
name: "".to_string(),
|
||||
size: 0,
|
||||
children: vec![],
|
||||
};
|
||||
|
||||
// assume sorted order
|
||||
for b in biggest_ones {
|
||||
let n = Node {
|
||||
name: b.0,
|
||||
size: b.1,
|
||||
children: vec![],
|
||||
};
|
||||
recursively_build_tree(&mut top_parent, n, depth)
|
||||
}
|
||||
top_parent
|
||||
}
|
||||
|
||||
fn recursively_build_tree(parent_node: &mut Node, new_node: Node, depth: Option<u64>) {
|
||||
let new_depth = match depth {
|
||||
None => None,
|
||||
Some(0) => return,
|
||||
Some(d) => Some(d - 1),
|
||||
};
|
||||
for c in parent_node.children.iter_mut() {
|
||||
if new_node.name.starts_with(&c.name) {
|
||||
return recursively_build_tree(&mut *c, new_node, new_depth);
|
||||
match build_thread_pool(stack_size, threads) {
|
||||
Ok(pool) => pool,
|
||||
Err(err) => {
|
||||
eprintln!("Problem initializing rayon, try: export RAYON_NUM_THREADS=1");
|
||||
if stack.is_none() && stack_size.is_some() {
|
||||
// stack parameter was none, try with default stack size
|
||||
if let Ok(pool) = build_thread_pool(None, threads) {
|
||||
eprintln!("WARNING: not using large stack size, got error: {err}");
|
||||
return pool;
|
||||
}
|
||||
}
|
||||
panic!("{err}");
|
||||
}
|
||||
}
|
||||
let temp = Box::<Node>::new(new_node);
|
||||
parent_node.children.push(temp);
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
fn build_thread_pool(
|
||||
stack_size: Option<usize>,
|
||||
threads: &Option<usize>,
|
||||
) -> Result<rayon::ThreadPool, rayon::ThreadPoolBuildError> {
|
||||
let mut pool_builder = rayon::ThreadPoolBuilder::new();
|
||||
if let Some(stack_size_param) = stack_size {
|
||||
pool_builder = pool_builder.stack_size(stack_size_param);
|
||||
}
|
||||
if let Some(thread_count) = threads {
|
||||
pool_builder = pool_builder.num_threads(*thread_count);
|
||||
}
|
||||
pool_builder.build()
|
||||
}
|
||||
|
||||
112
src/node.rs
Normal file
112
src/node.rs
Normal file
@@ -0,0 +1,112 @@
|
||||
use crate::dir_walker::WalkData;
|
||||
use crate::platform::get_metadata;
|
||||
use crate::utils::is_filtered_out_due_to_file_time;
|
||||
use crate::utils::is_filtered_out_due_to_invert_regex;
|
||||
use crate::utils::is_filtered_out_due_to_regex;
|
||||
|
||||
use std::cmp::Ordering;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, Eq, Clone)]
|
||||
pub struct Node {
|
||||
pub name: PathBuf,
|
||||
pub size: u64,
|
||||
pub children: Vec<Node>,
|
||||
pub inode_device: Option<(u64, u64)>,
|
||||
pub depth: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum FileTime {
|
||||
Modified,
|
||||
Accessed,
|
||||
Changed,
|
||||
}
|
||||
|
||||
impl From<crate::cli::FileTime> for FileTime {
|
||||
fn from(time: crate::cli::FileTime) -> Self {
|
||||
match time {
|
||||
crate::cli::FileTime::Modified => Self::Modified,
|
||||
crate::cli::FileTime::Accessed => Self::Accessed,
|
||||
crate::cli::FileTime::Changed => Self::Changed,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn build_node(
|
||||
dir: PathBuf,
|
||||
children: Vec<Node>,
|
||||
is_symlink: bool,
|
||||
is_file: bool,
|
||||
depth: usize,
|
||||
walk_data: &WalkData,
|
||||
) -> Option<Node> {
|
||||
let use_apparent_size = walk_data.use_apparent_size;
|
||||
let by_filecount = walk_data.by_filecount;
|
||||
let by_filetime = &walk_data.by_filetime;
|
||||
|
||||
get_metadata(
|
||||
&dir,
|
||||
use_apparent_size,
|
||||
walk_data.follow_links && is_symlink,
|
||||
)
|
||||
.map(|data| {
|
||||
let inode_device = data.1;
|
||||
|
||||
let size = if is_filtered_out_due_to_regex(walk_data.filter_regex, &dir)
|
||||
|| is_filtered_out_due_to_invert_regex(walk_data.invert_filter_regex, &dir)
|
||||
|| by_filecount && !is_file
|
||||
|| [
|
||||
(&walk_data.filter_modified_time, data.2.0),
|
||||
(&walk_data.filter_accessed_time, data.2.1),
|
||||
(&walk_data.filter_changed_time, data.2.2),
|
||||
]
|
||||
.iter()
|
||||
.any(|(filter_time, actual_time)| {
|
||||
is_filtered_out_due_to_file_time(filter_time, *actual_time)
|
||||
}) {
|
||||
0
|
||||
} else if by_filecount {
|
||||
1
|
||||
} else if by_filetime.is_some() {
|
||||
match by_filetime {
|
||||
Some(FileTime::Modified) => data.2.0.unsigned_abs(),
|
||||
Some(FileTime::Accessed) => data.2.1.unsigned_abs(),
|
||||
Some(FileTime::Changed) => data.2.2.unsigned_abs(),
|
||||
None => unreachable!(),
|
||||
}
|
||||
} else {
|
||||
data.0
|
||||
};
|
||||
|
||||
Node {
|
||||
name: dir,
|
||||
size,
|
||||
children,
|
||||
inode_device,
|
||||
depth,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
impl PartialEq for Node {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.name == other.name && self.size == other.size && self.children == other.children
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for Node {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
self.size
|
||||
.cmp(&other.size)
|
||||
.then_with(|| self.name.cmp(&other.name))
|
||||
.then_with(|| self.children.cmp(&other.children))
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for Node {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
217
src/platform.rs
Normal file
217
src/platform.rs
Normal file
@@ -0,0 +1,217 @@
|
||||
#[allow(unused_imports)]
|
||||
use std::fs;
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
#[cfg(target_family = "unix")]
|
||||
fn get_block_size() -> u64 {
|
||||
// All os specific implementations of MetadataExt seem to define a block as 512 bytes
|
||||
// https://doc.rust-lang.org/std/os/linux/fs/trait.MetadataExt.html#tymethod.st_blocks
|
||||
512
|
||||
}
|
||||
|
||||
type InodeAndDevice = (u64, u64);
|
||||
type FileTime = (i64, i64, i64);
|
||||
|
||||
#[cfg(target_family = "unix")]
|
||||
pub fn get_metadata<P: AsRef<Path>>(
|
||||
path: P,
|
||||
use_apparent_size: bool,
|
||||
follow_links: bool,
|
||||
) -> Option<(u64, Option<InodeAndDevice>, FileTime)> {
|
||||
use std::os::unix::fs::MetadataExt;
|
||||
let metadata = if follow_links {
|
||||
path.as_ref().metadata()
|
||||
} else {
|
||||
path.as_ref().symlink_metadata()
|
||||
};
|
||||
match metadata {
|
||||
Ok(md) => {
|
||||
let file_size = md.len();
|
||||
if use_apparent_size {
|
||||
Some((
|
||||
file_size,
|
||||
Some((md.ino(), md.dev())),
|
||||
(md.mtime(), md.atime(), md.ctime()),
|
||||
))
|
||||
} else {
|
||||
// On NTFS mounts, the reported block count can be unexpectedly large.
|
||||
// To avoid overestimating disk usage, cap the allocated size to what the
|
||||
// file should occupy based on the file system I/O block size (blksize).
|
||||
// Related: https://github.com/bootandy/dust/issues/295
|
||||
let blksize = md.blksize();
|
||||
let target_size = file_size.div_ceil(blksize) * blksize;
|
||||
let reported_size = md.blocks() * get_block_size();
|
||||
|
||||
// File systems can pre-allocate more space for a file than what would be necessary
|
||||
let pre_allocation_buffer = blksize * 65536;
|
||||
let max_size = target_size + pre_allocation_buffer;
|
||||
let allocated_size = if reported_size > max_size {
|
||||
target_size
|
||||
} else {
|
||||
reported_size
|
||||
};
|
||||
Some((
|
||||
allocated_size,
|
||||
Some((md.ino(), md.dev())),
|
||||
(md.mtime(), md.atime(), md.ctime()),
|
||||
))
|
||||
}
|
||||
}
|
||||
Err(_e) => None,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_family = "windows")]
|
||||
pub fn get_metadata<P: AsRef<Path>>(
|
||||
path: P,
|
||||
use_apparent_size: bool,
|
||||
follow_links: bool,
|
||||
) -> Option<(u64, Option<InodeAndDevice>, FileTime)> {
|
||||
// On windows opening the file to get size, file ID and volume can be very
|
||||
// expensive because 1) it causes a few system calls, and more importantly 2) it can cause
|
||||
// windows defender to scan the file.
|
||||
// Therefore we try to avoid doing that for common cases, mainly those of
|
||||
// plain files:
|
||||
|
||||
// The idea is to make do with the file size that we get from the OS for
|
||||
// free as part of iterating a folder. Therefore we want to make sure that
|
||||
// it makes sense to use that free size information:
|
||||
|
||||
// Volume boundaries:
|
||||
// The user can ask us not to cross volume boundaries. If the DirEntry is a
|
||||
// plain file and not a reparse point or other non-trivial stuff, we assume
|
||||
// that the file is located on the same volume as the directory that
|
||||
// contains it.
|
||||
|
||||
// File ID:
|
||||
// This optimization does deprive us of access to a file ID. As a
|
||||
// workaround, we just make one up that hopefully does not collide with real
|
||||
// file IDs.
|
||||
// Hard links: Unresolved. We don't get inode/file index, so hard links
|
||||
// count once for each link. Hopefully they are not too commonly in use on
|
||||
// windows.
|
||||
|
||||
// Size:
|
||||
// We assume (naively?) that for the common cases the free size info is the
|
||||
// same as one would get by doing the expensive thing. Sparse, encrypted and
|
||||
// compressed files are not included in the common cases, as one can image
|
||||
// there being more than view on their size.
|
||||
|
||||
// Savings in orders of magnitude in terms of time, io and cpu have been
|
||||
// observed on hdd, windows 10, some 100Ks files taking up some hundreds of
|
||||
// GBs:
|
||||
// Consistently opening the file: 30 minutes.
|
||||
// With this optimization: 8 sec.
|
||||
|
||||
use std::io;
|
||||
use winapi_util::Handle;
|
||||
fn handle_from_path_limited(path: &Path) -> io::Result<Handle> {
|
||||
use std::fs::OpenOptions;
|
||||
use std::os::windows::fs::OpenOptionsExt;
|
||||
const FILE_READ_ATTRIBUTES: u32 = 0x0080;
|
||||
|
||||
// So, it seems that it does does have to be that expensive to open
|
||||
// files to get their info: Avoiding opening the file with the full
|
||||
// GENERIC_READ is key:
|
||||
|
||||
// https://docs.microsoft.com/en-us/windows/win32/secauthz/generic-access-rights:
|
||||
// "For example, a Windows file object maps the GENERIC_READ bit to the
|
||||
// READ_CONTROL and SYNCHRONIZE standard access rights and to the
|
||||
// FILE_READ_DATA, FILE_READ_EA, and FILE_READ_ATTRIBUTES
|
||||
// object-specific access rights"
|
||||
|
||||
// The flag FILE_READ_DATA seems to be the expensive one, so we'll avoid
|
||||
// that, and a most of the other ones. Simply because it seems that we
|
||||
// don't need them.
|
||||
|
||||
let file = OpenOptions::new()
|
||||
.access_mode(FILE_READ_ATTRIBUTES)
|
||||
.open(path)?;
|
||||
Ok(Handle::from_file(file))
|
||||
}
|
||||
|
||||
fn get_metadata_expensive(
|
||||
path: &Path,
|
||||
use_apparent_size: bool,
|
||||
) -> Option<(u64, Option<InodeAndDevice>, FileTime)> {
|
||||
use winapi_util::file::information;
|
||||
|
||||
let h = handle_from_path_limited(path).ok()?;
|
||||
let info = information(&h).ok()?;
|
||||
|
||||
if use_apparent_size {
|
||||
use filesize::PathExt;
|
||||
Some((
|
||||
path.size_on_disk().ok()?,
|
||||
Some((info.file_index(), info.volume_serial_number())),
|
||||
(
|
||||
info.last_write_time().unwrap() as i64,
|
||||
info.last_access_time().unwrap() as i64,
|
||||
info.creation_time().unwrap() as i64,
|
||||
),
|
||||
))
|
||||
} else {
|
||||
Some((
|
||||
info.file_size(),
|
||||
Some((info.file_index(), info.volume_serial_number())),
|
||||
(
|
||||
info.last_write_time().unwrap() as i64,
|
||||
info.last_access_time().unwrap() as i64,
|
||||
info.creation_time().unwrap() as i64,
|
||||
),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
use std::os::windows::fs::MetadataExt;
|
||||
let path = path.as_ref();
|
||||
let metadata = if follow_links {
|
||||
path.metadata()
|
||||
} else {
|
||||
path.symlink_metadata()
|
||||
};
|
||||
match metadata {
|
||||
Ok(ref md) => {
|
||||
const FILE_ATTRIBUTE_ARCHIVE: u32 = 0x20;
|
||||
const FILE_ATTRIBUTE_READONLY: u32 = 0x01;
|
||||
const FILE_ATTRIBUTE_HIDDEN: u32 = 0x02;
|
||||
const FILE_ATTRIBUTE_SYSTEM: u32 = 0x04;
|
||||
const FILE_ATTRIBUTE_NORMAL: u32 = 0x80;
|
||||
const FILE_ATTRIBUTE_DIRECTORY: u32 = 0x10;
|
||||
const FILE_ATTRIBUTE_SPARSE_FILE: u32 = 0x00000200;
|
||||
const FILE_ATTRIBUTE_PINNED: u32 = 0x00080000;
|
||||
const FILE_ATTRIBUTE_UNPINNED: u32 = 0x00100000;
|
||||
const FILE_ATTRIBUTE_RECALL_ON_OPEN: u32 = 0x00040000;
|
||||
const FILE_ATTRIBUTE_RECALL_ON_DATA_ACCESS: u32 = 0x00400000;
|
||||
const FILE_ATTRIBUTE_OFFLINE: u32 = 0x00001000;
|
||||
// normally FILE_ATTRIBUTE_SPARSE_FILE would be enough, however Windows sometimes likes to mask it out. see: https://stackoverflow.com/q/54560454
|
||||
const IS_PROBABLY_ONEDRIVE: u32 = FILE_ATTRIBUTE_SPARSE_FILE
|
||||
| FILE_ATTRIBUTE_PINNED
|
||||
| FILE_ATTRIBUTE_UNPINNED
|
||||
| FILE_ATTRIBUTE_RECALL_ON_OPEN
|
||||
| FILE_ATTRIBUTE_RECALL_ON_DATA_ACCESS
|
||||
| FILE_ATTRIBUTE_OFFLINE;
|
||||
let attr_filtered = md.file_attributes()
|
||||
& !(FILE_ATTRIBUTE_HIDDEN | FILE_ATTRIBUTE_READONLY | FILE_ATTRIBUTE_SYSTEM);
|
||||
if ((attr_filtered & FILE_ATTRIBUTE_ARCHIVE) != 0
|
||||
|| (attr_filtered & FILE_ATTRIBUTE_DIRECTORY) != 0
|
||||
|| md.file_attributes() == FILE_ATTRIBUTE_NORMAL)
|
||||
&& !((attr_filtered & IS_PROBABLY_ONEDRIVE != 0) && use_apparent_size)
|
||||
{
|
||||
Some((
|
||||
md.len(),
|
||||
None,
|
||||
(
|
||||
md.last_write_time() as i64,
|
||||
md.last_access_time() as i64,
|
||||
md.creation_time() as i64,
|
||||
),
|
||||
))
|
||||
} else {
|
||||
get_metadata_expensive(path, use_apparent_size)
|
||||
}
|
||||
}
|
||||
_ => get_metadata_expensive(path, use_apparent_size),
|
||||
}
|
||||
}
|
||||
161
src/progress.rs
Normal file
161
src/progress.rs
Normal file
@@ -0,0 +1,161 @@
|
||||
use std::{
|
||||
collections::HashSet,
|
||||
io::Write,
|
||||
path::Path,
|
||||
sync::{
|
||||
Arc, RwLock,
|
||||
atomic::{AtomicU8, AtomicUsize, Ordering},
|
||||
mpsc::{self, RecvTimeoutError, Sender},
|
||||
},
|
||||
thread::JoinHandle,
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
#[cfg(not(target_has_atomic = "64"))]
|
||||
use portable_atomic::AtomicU64;
|
||||
#[cfg(target_has_atomic = "64")]
|
||||
use std::sync::atomic::AtomicU64;
|
||||
|
||||
use crate::display::human_readable_number;
|
||||
|
||||
/* -------------------------------------------------------------------------- */
|
||||
|
||||
pub const ORDERING: Ordering = Ordering::Relaxed;
|
||||
|
||||
const SPINNER_SLEEP_TIME: u64 = 100;
|
||||
const PROGRESS_CHARS: [char; 4] = ['-', '\\', '|', '/'];
|
||||
const PROGRESS_CHARS_LEN: usize = PROGRESS_CHARS.len();
|
||||
|
||||
pub trait ThreadSyncTrait<T> {
|
||||
fn set(&self, val: T);
|
||||
fn get(&self) -> T;
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct ThreadStringWrapper {
|
||||
inner: RwLock<String>,
|
||||
}
|
||||
|
||||
impl ThreadSyncTrait<String> for ThreadStringWrapper {
|
||||
fn set(&self, val: String) {
|
||||
*self.inner.write().unwrap() = val;
|
||||
}
|
||||
|
||||
fn get(&self) -> String {
|
||||
(*self.inner.read().unwrap()).clone()
|
||||
}
|
||||
}
|
||||
|
||||
/* -------------------------------------------------------------------------- */
|
||||
|
||||
// creating an enum this way allows to have simpler syntax compared to a Mutex or a RwLock
|
||||
#[allow(non_snake_case)]
|
||||
pub mod Operation {
|
||||
pub const INDEXING: u8 = 0;
|
||||
pub const PREPARING: u8 = 1;
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct PAtomicInfo {
|
||||
pub num_files: AtomicUsize,
|
||||
pub total_file_size: AtomicU64,
|
||||
pub state: AtomicU8,
|
||||
pub current_path: ThreadStringWrapper,
|
||||
}
|
||||
|
||||
impl PAtomicInfo {
|
||||
pub fn clear_state(&self, dir: &Path) {
|
||||
self.state.store(Operation::INDEXING, ORDERING);
|
||||
let dir_name = dir.to_string_lossy().to_string();
|
||||
self.current_path.set(dir_name);
|
||||
self.total_file_size.store(0, ORDERING);
|
||||
self.num_files.store(0, ORDERING);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct RuntimeErrors {
|
||||
pub no_permissions: HashSet<String>,
|
||||
pub file_not_found: HashSet<String>,
|
||||
pub unknown_error: HashSet<String>,
|
||||
pub interrupted_error: i32,
|
||||
}
|
||||
|
||||
/* -------------------------------------------------------------------------- */
|
||||
|
||||
fn format_preparing_str(prog_char: char, data: &PAtomicInfo, output_display: &str) -> String {
|
||||
let path_in = data.current_path.get();
|
||||
let size = human_readable_number(data.total_file_size.load(ORDERING), output_display);
|
||||
format!("Preparing: {path_in} {size} ... {prog_char}")
|
||||
}
|
||||
|
||||
fn format_indexing_str(prog_char: char, data: &PAtomicInfo, output_display: &str) -> String {
|
||||
let path_in = data.current_path.get();
|
||||
let file_count = data.num_files.load(ORDERING);
|
||||
let size = human_readable_number(data.total_file_size.load(ORDERING), output_display);
|
||||
let file_str = format!("{file_count} files, {size}");
|
||||
format!("Indexing: {path_in} {file_str} ... {prog_char}")
|
||||
}
|
||||
|
||||
pub struct PIndicator {
|
||||
pub thread: Option<(Sender<()>, JoinHandle<()>)>,
|
||||
pub data: Arc<PAtomicInfo>,
|
||||
}
|
||||
|
||||
impl PIndicator {
|
||||
pub fn build_me() -> Self {
|
||||
Self {
|
||||
thread: None,
|
||||
data: Arc::new(PAtomicInfo {
|
||||
..Default::default()
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn spawn(&mut self, output_display: String) {
|
||||
let data = self.data.clone();
|
||||
let (stop_handler, receiver) = mpsc::channel::<()>();
|
||||
|
||||
let time_info_thread = std::thread::spawn(move || {
|
||||
let mut progress_char_i: usize = 0;
|
||||
let mut stderr = std::io::stderr();
|
||||
let mut msg = "".to_string();
|
||||
|
||||
// While the timeout triggers we go round the loop
|
||||
// If we disconnect or the sender sends its message we exit the while loop
|
||||
while let Err(RecvTimeoutError::Timeout) =
|
||||
receiver.recv_timeout(Duration::from_millis(SPINNER_SLEEP_TIME))
|
||||
{
|
||||
// Clear the text written by 'write!'& Return at the start of line
|
||||
let clear = format!("\r{:width$}", " ", width = msg.len());
|
||||
write!(stderr, "{clear}").unwrap();
|
||||
let prog_char = PROGRESS_CHARS[progress_char_i];
|
||||
|
||||
msg = match data.state.load(ORDERING) {
|
||||
Operation::INDEXING => format_indexing_str(prog_char, &data, &output_display),
|
||||
Operation::PREPARING => format_preparing_str(prog_char, &data, &output_display),
|
||||
_ => panic!("Unknown State"),
|
||||
};
|
||||
|
||||
write!(stderr, "\r{msg}").unwrap();
|
||||
stderr.flush().unwrap();
|
||||
|
||||
progress_char_i += 1;
|
||||
progress_char_i %= PROGRESS_CHARS_LEN;
|
||||
}
|
||||
|
||||
let clear = format!("\r{:width$}", " ", width = msg.len());
|
||||
write!(stderr, "{clear}").unwrap();
|
||||
write!(stderr, "\r").unwrap();
|
||||
stderr.flush().unwrap();
|
||||
});
|
||||
self.thread = Some((stop_handler, time_info_thread))
|
||||
}
|
||||
|
||||
pub fn stop(self) {
|
||||
if let Some((stop_handler, thread)) = self.thread {
|
||||
stop_handler.send(()).unwrap();
|
||||
thread.join().unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
312
src/tests.rs
312
src/tests.rs
@@ -1,312 +0,0 @@
|
||||
extern crate ansi_term;
|
||||
extern crate tempfile;
|
||||
use self::tempfile::Builder;
|
||||
use self::tempfile::TempDir;
|
||||
use super::*;
|
||||
use display::format_string;
|
||||
use std::fs::File;
|
||||
use std::io::Write;
|
||||
use std::panic;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
|
||||
#[test]
|
||||
pub fn test_main() {
|
||||
assert_cli::Assert::main_binary()
|
||||
.with_args(&["src/test_dir"])
|
||||
.stdout()
|
||||
.is(main_output(true))
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_main_long_paths() {
|
||||
assert_cli::Assert::main_binary()
|
||||
.with_args(&["-p", "src/test_dir"])
|
||||
.stdout()
|
||||
.is(main_output(false))
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_main_multi_arg() {
|
||||
assert_cli::Assert::main_binary()
|
||||
.with_args(&["src/test_dir/many/", "src/test_dir/", "src/test_dir"])
|
||||
.stdout()
|
||||
.is(main_output(true))
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
fn main_output(short_paths: bool) -> String {
|
||||
format!(
|
||||
"{}
|
||||
{}
|
||||
{}
|
||||
{}",
|
||||
format_string("src/test_dir", true, short_paths, " 4.0K", "─┬"),
|
||||
format_string("src/test_dir/many", true, short_paths, " 4.0K", " └─┬",),
|
||||
format_string(
|
||||
"src/test_dir/many/hello_file",
|
||||
true,
|
||||
short_paths,
|
||||
" 4.0K",
|
||||
" ├──",
|
||||
),
|
||||
format_string(
|
||||
"src/test_dir/many/a_file",
|
||||
false,
|
||||
short_paths,
|
||||
" 0B",
|
||||
" └──",
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
fn main_output(short_paths: bool) -> String {
|
||||
format!(
|
||||
"{}
|
||||
{}
|
||||
{}
|
||||
{}",
|
||||
format_string("src/test_dir", true, short_paths, " 12K", "─┬"),
|
||||
format_string("src/test_dir/many", true, short_paths, " 8.0K", " └─┬",),
|
||||
format_string(
|
||||
"src/test_dir/many/hello_file",
|
||||
true,
|
||||
short_paths,
|
||||
" 4.0K",
|
||||
" ├──",
|
||||
),
|
||||
format_string(
|
||||
"src/test_dir/many/a_file",
|
||||
false,
|
||||
short_paths,
|
||||
" 0B",
|
||||
" └──",
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_apparent_size() {
|
||||
let r = format!(
|
||||
"{}",
|
||||
format_string(
|
||||
"src/test_dir/many/hello_file",
|
||||
true,
|
||||
true,
|
||||
" 6B",
|
||||
" ├──",
|
||||
),
|
||||
);
|
||||
|
||||
assert_cli::Assert::main_binary()
|
||||
.with_args(&["-s", "src/test_dir"])
|
||||
.stdout()
|
||||
.contains(r)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_reverse_flag() {
|
||||
// variable names the same length make the output easier to read
|
||||
let a = " ┌── a_file";
|
||||
let b = " ├── hello_file";
|
||||
let c = " ┌─┴ many";
|
||||
let d = " ─┴ test_dir";
|
||||
|
||||
assert_cli::Assert::main_binary()
|
||||
.with_args(&["-r", "src/test_dir"])
|
||||
.stdout()
|
||||
.contains(a)
|
||||
.stdout()
|
||||
.contains(b)
|
||||
.stdout()
|
||||
.contains(c)
|
||||
.stdout()
|
||||
.contains(d)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_d_flag_works() {
|
||||
// We should see the top level directory but not the sub dirs / files:
|
||||
assert_cli::Assert::main_binary()
|
||||
.with_args(&["-d", "1", "-s", "src/test_dir"])
|
||||
.stdout()
|
||||
.doesnt_contain("hello_file")
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
fn build_temp_file(dir: &TempDir) -> (PathBuf) {
|
||||
let file_path = dir.path().join("notes.txt");
|
||||
let mut file = File::create(&file_path).unwrap();
|
||||
writeln!(file, "I am a temp file").unwrap();
|
||||
file_path
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_soft_sym_link() {
|
||||
let dir = Builder::new().tempdir().unwrap();
|
||||
let file = build_temp_file(&dir);
|
||||
let dir_s = dir.path().to_str().unwrap();
|
||||
let file_path_s = file.to_str().unwrap();
|
||||
|
||||
let link_name = dir.path().join("the_link");
|
||||
let link_name_s = link_name.to_str().unwrap();
|
||||
let c = Command::new("ln")
|
||||
.arg("-s")
|
||||
.arg(file_path_s)
|
||||
.arg(link_name_s)
|
||||
.output();
|
||||
assert!(c.is_ok());
|
||||
|
||||
let r = soft_sym_link_output(dir_s, file_path_s, link_name_s);
|
||||
|
||||
// We cannot guarantee which version will appear first.
|
||||
// TODO: Consider adding predictable itteration order (sort file entries by name?)
|
||||
assert_cli::Assert::main_binary()
|
||||
.with_args(&[dir_s])
|
||||
.stdout()
|
||||
.contains(r)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
fn soft_sym_link_output(dir: &str, file_path: &str, link_name: &str) -> String {
|
||||
format!(
|
||||
"{}
|
||||
{}
|
||||
{}",
|
||||
format_string(dir, true, true, " 8.0K", "─┬"),
|
||||
format_string(file_path, true, true, " 4.0K", " ├──",),
|
||||
format_string(link_name, false, true, " 4.0K", " └──",),
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
fn soft_sym_link_output(dir: &str, file_path: &str, link_name: &str) -> String {
|
||||
format!(
|
||||
"{}
|
||||
{}
|
||||
{}",
|
||||
format_string(dir, true, true, " 8.0K", "─┬"),
|
||||
format_string(file_path, true, true, " 4.0K", " ├──",),
|
||||
format_string(link_name, false, true, " 0B", " └──",),
|
||||
)
|
||||
}
|
||||
|
||||
// Hard links are ignored as the inode is the same as the file
|
||||
#[test]
|
||||
pub fn test_hard_sym_link() {
|
||||
let dir = Builder::new().tempdir().unwrap();
|
||||
let file = build_temp_file(&dir);
|
||||
let dir_s = dir.path().to_str().unwrap();
|
||||
let file_path_s = file.to_str().unwrap();
|
||||
|
||||
let link_name = dir.path().join("the_link");
|
||||
let link_name_s = link_name.to_str().unwrap();
|
||||
let c = Command::new("ln")
|
||||
.arg(file_path_s)
|
||||
.arg(link_name_s)
|
||||
.output();
|
||||
assert!(c.is_ok());
|
||||
|
||||
let (r, r2) = hard_link_output(dir_s, file_path_s, link_name_s);
|
||||
|
||||
// Because this is a hard link the file and hard link look identical. Therefore
|
||||
// we cannot guarantee which version will appear first.
|
||||
// TODO: Consider adding predictable iteration order (sort file entries by name?)
|
||||
let result = panic::catch_unwind(|| {
|
||||
assert_cli::Assert::main_binary()
|
||||
.with_args(&[dir_s])
|
||||
.stdout()
|
||||
.contains(r)
|
||||
.unwrap();
|
||||
});
|
||||
if result.is_err() {
|
||||
assert_cli::Assert::main_binary()
|
||||
.with_args(&[dir_s])
|
||||
.stdout()
|
||||
.contains(r2)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
fn hard_link_output(dir_s: &str, file_path_s: &str, link_name_s: &str) -> (String, String) {
|
||||
let r = format!(
|
||||
"{}
|
||||
{}",
|
||||
format_string(dir_s, true, true, " 4.0K", "─┬"),
|
||||
format_string(file_path_s, true, true, " 4.0K", " └──")
|
||||
);
|
||||
let r2 = format!(
|
||||
"{}
|
||||
{}",
|
||||
format_string(dir_s, true, true, " 4.0K", "─┬"),
|
||||
format_string(link_name_s, true, true, " 4.0K", " └──")
|
||||
);
|
||||
(r, r2)
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
fn hard_link_output(dir_s: &str, file_path_s: &str, link_name_s: &str) -> (String, String) {
|
||||
let r = format!(
|
||||
"{}
|
||||
{}",
|
||||
format_string(dir_s, true, true, " 8.0K", "─┬"),
|
||||
format_string(file_path_s, true, true, " 4.0K", " └──")
|
||||
);
|
||||
let r2 = format!(
|
||||
"{}
|
||||
{}",
|
||||
format_string(dir_s, true, true, " 8.0K", "─┬"),
|
||||
format_string(link_name_s, true, true, " 4.0K", " └──")
|
||||
);
|
||||
(r, r2)
|
||||
}
|
||||
|
||||
//Check we don't recurse down an infinite symlink tree
|
||||
#[test]
|
||||
pub fn test_recursive_sym_link() {
|
||||
let dir = Builder::new().tempdir().unwrap();
|
||||
let dir_s = dir.path().to_str().unwrap();
|
||||
|
||||
let link_name = dir.path().join("the_link");
|
||||
let link_name_s = link_name.to_str().unwrap();
|
||||
|
||||
let c = Command::new("ln")
|
||||
.arg("-s")
|
||||
.arg(dir_s)
|
||||
.arg(link_name_s)
|
||||
.output();
|
||||
assert!(c.is_ok());
|
||||
|
||||
assert_cli::Assert::main_binary()
|
||||
.with_args(&[dir_s])
|
||||
.stdout()
|
||||
.contains(recursive_sym_link_output(dir_s, link_name_s))
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
fn recursive_sym_link_output(dir: &str, link_name: &str) -> String {
|
||||
format!(
|
||||
"{}
|
||||
{}",
|
||||
format_string(dir, true, true, " 4.0K", "─┬"),
|
||||
format_string(link_name, true, true, " 4.0K", " └──",),
|
||||
)
|
||||
}
|
||||
#[cfg(target_os = "linux")]
|
||||
fn recursive_sym_link_output(dir: &str, link_name: &str) -> String {
|
||||
format!(
|
||||
"{}
|
||||
{}",
|
||||
format_string(dir, true, true, " 4.0K", "─┬"),
|
||||
format_string(link_name, true, true, " 0B", " └──",),
|
||||
)
|
||||
}
|
||||
198
src/utils.rs
Normal file
198
src/utils.rs
Normal file
@@ -0,0 +1,198 @@
|
||||
use platform::get_metadata;
|
||||
use std::collections::HashSet;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use crate::config::DAY_SECONDS;
|
||||
|
||||
use crate::dir_walker::Operator;
|
||||
use crate::platform;
|
||||
use regex::Regex;
|
||||
|
||||
pub fn simplify_dir_names<P: AsRef<Path>>(dirs: &[P]) -> HashSet<PathBuf> {
|
||||
let mut top_level_names: HashSet<PathBuf> = HashSet::with_capacity(dirs.len());
|
||||
|
||||
for t in dirs {
|
||||
let top_level_name = normalize_path(t);
|
||||
let mut can_add = true;
|
||||
let mut to_remove: Vec<PathBuf> = Vec::new();
|
||||
|
||||
for tt in top_level_names.iter() {
|
||||
if is_a_parent_of(&top_level_name, tt) {
|
||||
to_remove.push(tt.to_path_buf());
|
||||
} else if is_a_parent_of(tt, &top_level_name) {
|
||||
can_add = false;
|
||||
}
|
||||
}
|
||||
for r in to_remove {
|
||||
top_level_names.remove(&r);
|
||||
}
|
||||
if can_add {
|
||||
top_level_names.insert(top_level_name);
|
||||
}
|
||||
}
|
||||
|
||||
top_level_names
|
||||
}
|
||||
|
||||
pub fn get_filesystem_devices<P: AsRef<Path>>(paths: &[P], follow_links: bool) -> HashSet<u64> {
|
||||
use std::fs;
|
||||
// Gets the device ids for the filesystems which are used by the argument paths
|
||||
paths
|
||||
.iter()
|
||||
.filter_map(|p| {
|
||||
let follow_links = if follow_links {
|
||||
// slow path: If dereference-links is set, then we check if the file is a symbolic link
|
||||
match fs::symlink_metadata(p) {
|
||||
Ok(metadata) => metadata.file_type().is_symlink(),
|
||||
Err(_) => false,
|
||||
}
|
||||
} else {
|
||||
false
|
||||
};
|
||||
match get_metadata(p, false, follow_links) {
|
||||
Some((_size, Some((_id, dev)), _time)) => Some(dev),
|
||||
_ => None,
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn normalize_path<P: AsRef<Path>>(path: P) -> PathBuf {
|
||||
// normalize path ...
|
||||
// 1. removing repeated separators
|
||||
// 2. removing interior '.' ("current directory") path segments
|
||||
// 3. removing trailing extra separators and '.' ("current directory") path segments
|
||||
// * `Path.components()` does all the above work; ref: <https://doc.rust-lang.org/std/path/struct.Path.html#method.components>
|
||||
// 4. changing to os preferred separator (automatically done by recollecting components back into a PathBuf)
|
||||
path.as_ref().components().collect()
|
||||
}
|
||||
|
||||
// Canonicalize the path only if it is an absolute path
|
||||
pub fn canonicalize_absolute_path(path: PathBuf) -> PathBuf {
|
||||
if !path.is_absolute() {
|
||||
return path;
|
||||
}
|
||||
match std::fs::canonicalize(&path) {
|
||||
Ok(canonicalized_path) => canonicalized_path,
|
||||
Err(_) => path,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_filtered_out_due_to_regex(filter_regex: &[Regex], dir: &Path) -> bool {
|
||||
if filter_regex.is_empty() {
|
||||
false
|
||||
} else {
|
||||
filter_regex
|
||||
.iter()
|
||||
.all(|f| !f.is_match(&dir.as_os_str().to_string_lossy()))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_filtered_out_due_to_file_time(
|
||||
filter_time: &Option<(Operator, i64)>,
|
||||
actual_time: i64,
|
||||
) -> bool {
|
||||
match filter_time {
|
||||
None => false,
|
||||
Some((Operator::Equal, bound_time)) => {
|
||||
!(actual_time >= *bound_time && actual_time < *bound_time + DAY_SECONDS)
|
||||
}
|
||||
Some((Operator::GreaterThan, bound_time)) => actual_time < *bound_time,
|
||||
Some((Operator::LessThan, bound_time)) => actual_time > *bound_time,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_filtered_out_due_to_invert_regex(filter_regex: &[Regex], dir: &Path) -> bool {
|
||||
filter_regex
|
||||
.iter()
|
||||
.any(|f| f.is_match(&dir.as_os_str().to_string_lossy()))
|
||||
}
|
||||
|
||||
fn is_a_parent_of<P: AsRef<Path>>(parent: P, child: P) -> bool {
|
||||
let parent = parent.as_ref();
|
||||
let child = child.as_ref();
|
||||
child.starts_with(parent) && !parent.starts_with(child)
|
||||
}
|
||||
|
||||
mod tests {
|
||||
#[allow(unused_imports)]
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_simplify_dir() {
|
||||
let mut correct = HashSet::new();
|
||||
correct.insert(PathBuf::from("a"));
|
||||
assert_eq!(simplify_dir_names(&["a"]), correct);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_simplify_dir_rm_subdir() {
|
||||
let mut correct = HashSet::new();
|
||||
correct.insert(["a", "b"].iter().collect::<PathBuf>());
|
||||
assert_eq!(simplify_dir_names(&["a/b/c", "a/b", "a/b/d/f"]), correct);
|
||||
assert_eq!(simplify_dir_names(&["a/b", "a/b/c", "a/b/d/f"]), correct);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_simplify_dir_duplicates() {
|
||||
let mut correct = HashSet::new();
|
||||
correct.insert(["a", "b"].iter().collect::<PathBuf>());
|
||||
correct.insert(PathBuf::from("c"));
|
||||
assert_eq!(
|
||||
simplify_dir_names(&[
|
||||
"a/b",
|
||||
"a/b//",
|
||||
"a/././b///",
|
||||
"c",
|
||||
"c/",
|
||||
"c/.",
|
||||
"c/././",
|
||||
"c/././."
|
||||
]),
|
||||
correct
|
||||
);
|
||||
}
|
||||
#[test]
|
||||
fn test_simplify_dir_rm_subdir_and_not_substrings() {
|
||||
let mut correct = HashSet::new();
|
||||
correct.insert(PathBuf::from("b"));
|
||||
correct.insert(["c", "a", "b"].iter().collect::<PathBuf>());
|
||||
correct.insert(["a", "b"].iter().collect::<PathBuf>());
|
||||
assert_eq!(simplify_dir_names(&["a/b", "c/a/b/", "b"]), correct);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_simplify_dir_dots() {
|
||||
let mut correct = HashSet::new();
|
||||
correct.insert(PathBuf::from("src"));
|
||||
assert_eq!(simplify_dir_names(&["src/."]), correct);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_simplify_dir_substring_names() {
|
||||
let mut correct = HashSet::new();
|
||||
correct.insert(PathBuf::from("src"));
|
||||
correct.insert(PathBuf::from("src_v2"));
|
||||
assert_eq!(simplify_dir_names(&["src/", "src_v2"]), correct);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_a_parent_of() {
|
||||
assert!(is_a_parent_of("/usr", "/usr/andy"));
|
||||
assert!(is_a_parent_of("/usr", "/usr/andy/i/am/descendant"));
|
||||
assert!(!is_a_parent_of("/usr", "/usr/."));
|
||||
assert!(!is_a_parent_of("/usr", "/usr/"));
|
||||
assert!(!is_a_parent_of("/usr", "/usr"));
|
||||
assert!(!is_a_parent_of("/usr/", "/usr"));
|
||||
assert!(!is_a_parent_of("/usr/andy", "/usr"));
|
||||
assert!(!is_a_parent_of("/usr/andy", "/usr/sibling"));
|
||||
assert!(!is_a_parent_of("/usr/folder", "/usr/folder_not_a_child"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_a_parent_of_root() {
|
||||
assert!(is_a_parent_of("/", "/usr/andy"));
|
||||
assert!(is_a_parent_of("/", "/usr"));
|
||||
assert!(!is_a_parent_of("/", "/"));
|
||||
}
|
||||
}
|
||||
199
src/utils/mod.rs
199
src/utils/mod.rs
@@ -1,199 +0,0 @@
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
|
||||
use walkdir::WalkDir;
|
||||
|
||||
mod platform;
|
||||
use self::platform::*;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Node {
|
||||
pub name: String,
|
||||
pub size: u64,
|
||||
pub children: Vec<Box<Node>>,
|
||||
}
|
||||
|
||||
pub fn simplify_dir_names(filenames: Vec<&str>) -> HashSet<String> {
|
||||
let mut top_level_names: HashSet<String> = HashSet::new();
|
||||
|
||||
for t in filenames {
|
||||
let top_level_name = ensure_end_slash(t);
|
||||
let mut can_add = true;
|
||||
let mut to_remove: Vec<String> = Vec::new();
|
||||
|
||||
for tt in top_level_names.iter() {
|
||||
let temp = tt.to_string();
|
||||
if top_level_name.starts_with(&temp) {
|
||||
can_add = false;
|
||||
} else if tt.starts_with(&top_level_name) {
|
||||
to_remove.push(temp);
|
||||
}
|
||||
}
|
||||
for tr in to_remove {
|
||||
top_level_names.remove(&tr);
|
||||
}
|
||||
if can_add {
|
||||
top_level_names.insert(strip_end_slash(t));
|
||||
}
|
||||
}
|
||||
|
||||
top_level_names
|
||||
}
|
||||
|
||||
pub fn get_dir_tree(
|
||||
top_level_names: &HashSet<String>,
|
||||
apparent_size: bool,
|
||||
) -> (bool, HashMap<String, u64>) {
|
||||
let mut permissions = 0;
|
||||
let mut inodes: HashSet<(u64, u64)> = HashSet::new();
|
||||
let mut data: HashMap<String, u64> = HashMap::new();
|
||||
|
||||
for b in top_level_names.iter() {
|
||||
examine_dir(&b, apparent_size, &mut inodes, &mut data, &mut permissions);
|
||||
}
|
||||
(permissions == 0, data)
|
||||
}
|
||||
|
||||
pub fn ensure_end_slash(s: &str) -> String {
|
||||
let mut new_name = String::from(s);
|
||||
while new_name.ends_with('/') || new_name.ends_with("/.") {
|
||||
new_name.pop();
|
||||
}
|
||||
new_name + "/"
|
||||
}
|
||||
|
||||
pub fn strip_end_slash(s: &str) -> String {
|
||||
let mut new_name = String::from(s);
|
||||
while (new_name.ends_with('/') || new_name.ends_with("/.")) && new_name.len() > 1 {
|
||||
new_name.pop();
|
||||
}
|
||||
new_name
|
||||
}
|
||||
|
||||
fn examine_dir(
|
||||
top_dir: &str,
|
||||
apparent_size: bool,
|
||||
inodes: &mut HashSet<(u64, u64)>,
|
||||
data: &mut HashMap<String, u64>,
|
||||
file_count_no_permission: &mut u64,
|
||||
) {
|
||||
for entry in WalkDir::new(top_dir) {
|
||||
if let Ok(e) = entry {
|
||||
let maybe_size_and_inode = get_metadata(&e, apparent_size);
|
||||
|
||||
match maybe_size_and_inode {
|
||||
Some((size, maybe_inode)) => {
|
||||
if !apparent_size {
|
||||
if let Some(inode_dev_pair) = maybe_inode {
|
||||
if inodes.contains(&inode_dev_pair) {
|
||||
continue;
|
||||
}
|
||||
inodes.insert(inode_dev_pair);
|
||||
}
|
||||
}
|
||||
// This path and all its parent paths have their counter incremented
|
||||
let mut e_path = e.path().to_path_buf();
|
||||
loop {
|
||||
let path_name = e_path.to_string_lossy().to_string();
|
||||
let s = data.entry(path_name.clone()).or_insert(0);
|
||||
*s += size;
|
||||
if path_name == *top_dir {
|
||||
break;
|
||||
}
|
||||
assert!(path_name != "");
|
||||
e_path.pop();
|
||||
}
|
||||
}
|
||||
None => *file_count_no_permission += 1,
|
||||
}
|
||||
} else {
|
||||
*file_count_no_permission += 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn sort_by_size_first_name_second(a: &(String, u64), b: &(String, u64)) -> Ordering {
|
||||
let result = b.1.cmp(&a.1);
|
||||
if result == Ordering::Equal {
|
||||
a.0.cmp(&b.0)
|
||||
} else {
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
pub fn sort(data: HashMap<String, u64>) -> Vec<(String, u64)> {
|
||||
let mut new_l: Vec<(String, u64)> = data.iter().map(|(a, b)| (a.clone(), *b)).collect();
|
||||
new_l.sort_by(|a, b| sort_by_size_first_name_second(&a, &b));
|
||||
new_l
|
||||
}
|
||||
|
||||
pub fn find_big_ones(new_l: Vec<(String, u64)>, max_to_show: usize) -> Vec<(String, u64)> {
|
||||
if max_to_show > 0 && new_l.len() > max_to_show {
|
||||
new_l[0..max_to_show].to_vec()
|
||||
} else {
|
||||
new_l
|
||||
}
|
||||
}
|
||||
|
||||
pub fn trim_deep_ones(
|
||||
input: Vec<(String, u64)>,
|
||||
max_depth: u64,
|
||||
top_level_names: &HashSet<String>,
|
||||
) -> Vec<(String, u64)> {
|
||||
let mut result: Vec<(String, u64)> = vec![];
|
||||
|
||||
for name in top_level_names {
|
||||
let my_max_depth = name.matches('/').count() + max_depth as usize;
|
||||
let name_ref: &str = name.as_ref();
|
||||
|
||||
for &(ref k, ref v) in input.iter() {
|
||||
if k.starts_with(name_ref) && k.matches('/').count() <= my_max_depth {
|
||||
result.push((k.clone(), *v));
|
||||
}
|
||||
}
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
mod tests {
|
||||
#[allow(unused_imports)]
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_simplify_dir() {
|
||||
let mut correct = HashSet::new();
|
||||
correct.insert("a".to_string());
|
||||
assert_eq!(simplify_dir_names(vec!["a"]), correct);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_simplify_dir_rm_subdir() {
|
||||
let mut correct = HashSet::new();
|
||||
correct.insert("a/b".to_string());
|
||||
assert_eq!(simplify_dir_names(vec!["a/b", "a/b/c", "a/b/d/f"]), correct);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_simplify_dir_duplicates() {
|
||||
let mut correct = HashSet::new();
|
||||
correct.insert("a/b".to_string());
|
||||
correct.insert("c".to_string());
|
||||
assert_eq!(simplify_dir_names(vec!["a/b", "a/b//", "c", "c/"]), correct);
|
||||
}
|
||||
#[test]
|
||||
fn test_simplify_dir_rm_subdir_and_not_substrings() {
|
||||
let mut correct = HashSet::new();
|
||||
correct.insert("b".to_string());
|
||||
correct.insert("c/a/b".to_string());
|
||||
correct.insert("a/b".to_string());
|
||||
assert_eq!(simplify_dir_names(vec!["a/b", "c/a/b/", "b"]), correct);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_simplify_dir_dots() {
|
||||
let mut correct = HashSet::new();
|
||||
correct.insert("src".to_string());
|
||||
assert_eq!(simplify_dir_names(vec!["src/."]), correct);
|
||||
}
|
||||
}
|
||||
@@ -1,25 +0,0 @@
|
||||
use walkdir::DirEntry;
|
||||
|
||||
fn get_block_size() -> u64 {
|
||||
// All os specific implementations of MetatdataExt seem to define a block as 512 bytes
|
||||
// https://doc.rust-lang.org/std/os/linux/fs/trait.MetadataExt.html#tymethod.st_blocks
|
||||
512
|
||||
}
|
||||
|
||||
#[cfg(target_family = "unix")]
|
||||
pub fn get_metadata(d: &DirEntry, use_apparent_size: bool) -> Option<(u64, Option<(u64, u64)>)> {
|
||||
use std::os::unix::fs::MetadataExt;
|
||||
d.metadata().ok().and_then(|md| {
|
||||
let inode = Some((md.ino(), md.dev()));
|
||||
if use_apparent_size {
|
||||
Some((md.len(), inode))
|
||||
} else {
|
||||
Some((md.blocks() * get_block_size(), inode))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(not(target_family = "unix"))]
|
||||
pub fn get_metadata(d: &DirEntry, _apparent: bool) -> Option<(u64, Option<(u64, u64)>)> {
|
||||
d.metadata().ok().map_or(None, |md| Some((md.len(), None)))
|
||||
}
|
||||
1
tests/test_dir2/dir/hello
Normal file
1
tests/test_dir2/dir/hello
Normal file
@@ -0,0 +1 @@
|
||||
hello
|
||||
1
tests/test_dir2/dir_name_clash
Normal file
1
tests/test_dir2/dir_name_clash
Normal file
@@ -0,0 +1 @@
|
||||
hello
|
||||
1
tests/test_dir2/dir_substring/hello
Normal file
1
tests/test_dir2/dir_substring/hello
Normal file
@@ -0,0 +1 @@
|
||||
hello
|
||||
0
tests/test_dir_files_from/a_file
Normal file
0
tests/test_dir_files_from/a_file
Normal file
BIN
tests/test_dir_files_from/files0_from.txt
Normal file
BIN
tests/test_dir_files_from/files0_from.txt
Normal file
Binary file not shown.
2
tests/test_dir_files_from/files_from.txt
Normal file
2
tests/test_dir_files_from/files_from.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
tests/test_dir_files_from/a_file
|
||||
tests/test_dir_files_from/hello_file
|
||||
1
tests/test_dir_files_from/hello_file
Normal file
1
tests/test_dir_files_from/hello_file
Normal file
@@ -0,0 +1 @@
|
||||
hello
|
||||
2
tests/test_dir_hidden_entries/.hidden_file
Normal file
2
tests/test_dir_hidden_entries/.hidden_file
Normal file
@@ -0,0 +1,2 @@
|
||||
something
|
||||
.secret
|
||||
0
tests/test_dir_hidden_entries/.secret
Normal file
0
tests/test_dir_hidden_entries/.secret
Normal file
0
tests/test_dir_matching/andy/dup_name/hello
Normal file
0
tests/test_dir_matching/andy/dup_name/hello
Normal file
0
tests/test_dir_matching/dave/dup_name/hello
Normal file
0
tests/test_dir_matching/dave/dup_name/hello
Normal file
0
tests/test_dir_unicode/ラウトは難しいです!.japan
Normal file
0
tests/test_dir_unicode/ラウトは難しいです!.japan
Normal file
0
tests/test_dir_unicode/👩.unicode
Normal file
0
tests/test_dir_unicode/👩.unicode
Normal file
274
tests/test_exact_output.rs
Normal file
274
tests/test_exact_output.rs
Normal file
@@ -0,0 +1,274 @@
|
||||
use assert_cmd::Command;
|
||||
use std::ffi::OsStr;
|
||||
use std::process::Output;
|
||||
use std::sync::Once;
|
||||
use std::{io, str};
|
||||
|
||||
static INIT: Once = Once::new();
|
||||
static UNREADABLE_DIR_PATH: &str = "/tmp/unreadable_dir";
|
||||
|
||||
/**
|
||||
* This file contains tests that verify the exact output of the command.
|
||||
* This output differs on Linux / Mac so the tests are harder to write and debug
|
||||
* Windows is ignored here because the results vary by host making exact testing impractical
|
||||
*
|
||||
* Despite the above problems, these tests are good as they are the closest to 'the real thing'.
|
||||
*/
|
||||
|
||||
// Warning: File sizes differ on both platform and on the format of the disk.
|
||||
/// Copy to /tmp dir - we assume that the formatting of the /tmp partition
|
||||
/// is consistent. If the tests fail your /tmp filesystem probably differs
|
||||
fn copy_test_data(dir: &str) {
|
||||
// First remove the existing directory - just in case it is there and has incorrect data
|
||||
let last_slash = dir.rfind('/').unwrap();
|
||||
let last_part_of_dir = dir.chars().skip(last_slash).collect::<String>();
|
||||
let _ = Command::new("rm")
|
||||
.arg("-rf")
|
||||
.arg("/tmp/".to_owned() + &*last_part_of_dir)
|
||||
.ok();
|
||||
|
||||
let _ = Command::new("cp")
|
||||
.arg("-r")
|
||||
.arg(dir)
|
||||
.arg("/tmp/")
|
||||
.ok()
|
||||
.map_err(|err| eprintln!("Error copying directory for test setup\n{:?}", err));
|
||||
}
|
||||
|
||||
fn create_unreadable_directory() -> io::Result<()> {
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::fs;
|
||||
use std::fs::Permissions;
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
fs::create_dir_all(UNREADABLE_DIR_PATH)?;
|
||||
fs::set_permissions(UNREADABLE_DIR_PATH, Permissions::from_mode(0))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn initialize() {
|
||||
INIT.call_once(|| {
|
||||
copy_test_data("tests/test_dir");
|
||||
copy_test_data("tests/test_dir2");
|
||||
copy_test_data("tests/test_dir_unicode");
|
||||
|
||||
if let Err(e) = create_unreadable_directory() {
|
||||
panic!("Failed to create unreadable directory: {}", e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn run_cmd<T: AsRef<OsStr>>(command_args: &[T]) -> Output {
|
||||
initialize();
|
||||
let mut to_run = &mut Command::cargo_bin("dust").unwrap();
|
||||
for p in command_args {
|
||||
to_run = to_run.arg(p);
|
||||
}
|
||||
to_run.unwrap()
|
||||
}
|
||||
|
||||
fn exact_stdout_test<T: AsRef<OsStr>>(command_args: &[T], valid_stdout: Vec<String>) {
|
||||
let to_run = run_cmd(command_args);
|
||||
|
||||
let stdout_output = str::from_utf8(&to_run.stdout).unwrap().to_owned();
|
||||
let will_fail = valid_stdout.iter().any(|i| stdout_output.contains(i));
|
||||
if !will_fail {
|
||||
eprintln!(
|
||||
"output(stdout):\n{}\ndoes not contain any of:\n{}",
|
||||
stdout_output,
|
||||
valid_stdout.join("\n\n")
|
||||
);
|
||||
}
|
||||
assert!(will_fail);
|
||||
}
|
||||
|
||||
fn exact_stderr_test<T: AsRef<OsStr>>(command_args: &[T], valid_stderr: String) {
|
||||
let to_run = run_cmd(command_args);
|
||||
|
||||
let stderr_output = str::from_utf8(&to_run.stderr).unwrap().trim();
|
||||
assert_eq!(stderr_output, valid_stderr);
|
||||
}
|
||||
|
||||
// "windows" result data can vary by host (size seems to be variable by one byte); fix code vs test and re-enable
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
#[test]
|
||||
pub fn test_main_basic() {
|
||||
// -c is no color mode - This makes testing much simpler
|
||||
exact_stdout_test(&["-c", "-B", "/tmp/test_dir/"], main_output());
|
||||
}
|
||||
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
#[test]
|
||||
pub fn test_main_multi_arg() {
|
||||
let command_args = [
|
||||
"-c",
|
||||
"-B",
|
||||
"/tmp/test_dir/many/",
|
||||
"/tmp/test_dir",
|
||||
"/tmp/test_dir",
|
||||
];
|
||||
exact_stdout_test(&command_args, main_output());
|
||||
}
|
||||
|
||||
fn main_output() -> Vec<String> {
|
||||
// Some linux currently thought to be Manjaro, Arch
|
||||
// Although probably depends on how drive is formatted
|
||||
let mac_and_some_linux = r#"
|
||||
0B ┌── a_file │░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░█ │ 0%
|
||||
4.0K ├── hello_file│█████████████████████████████████████████████████ │ 100%
|
||||
4.0K ┌─┴ many │█████████████████████████████████████████████████ │ 100%
|
||||
4.0K ┌─┴ test_dir │█████████████████████████████████████████████████ │ 100%
|
||||
"#
|
||||
.trim()
|
||||
.to_string();
|
||||
|
||||
let ubuntu = r#"
|
||||
0B ┌── a_file │ ░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░█ │ 0%
|
||||
4.0K ├── hello_file│ ░░░░░░░░░░░░░░░░█████████████████ │ 33%
|
||||
8.0K ┌─┴ many │ █████████████████████████████████ │ 67%
|
||||
12K ┌─┴ test_dir │█████████████████████████████████████████████████ │ 100%
|
||||
"#
|
||||
.trim()
|
||||
.to_string();
|
||||
|
||||
vec![mac_and_some_linux, ubuntu]
|
||||
}
|
||||
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
#[test]
|
||||
pub fn test_main_long_paths() {
|
||||
let command_args = ["-c", "-p", "-B", "/tmp/test_dir/"];
|
||||
exact_stdout_test(&command_args, main_output_long_paths());
|
||||
}
|
||||
|
||||
fn main_output_long_paths() -> Vec<String> {
|
||||
let mac_and_some_linux = r#"
|
||||
0B ┌── /tmp/test_dir/many/a_file │░░░░░░░░░░░░░░░░░░░░░░░░░░░░░█ │ 0%
|
||||
4.0K ├── /tmp/test_dir/many/hello_file│██████████████████████████████ │ 100%
|
||||
4.0K ┌─┴ /tmp/test_dir/many │██████████████████████████████ │ 100%
|
||||
4.0K ┌─┴ /tmp/test_dir │██████████████████████████████ │ 100%
|
||||
"#
|
||||
.trim()
|
||||
.to_string();
|
||||
let ubuntu = r#"
|
||||
0B ┌── /tmp/test_dir/many/a_file │ ░░░░░░░░░░░░░░░░░░░░█ │ 0%
|
||||
4.0K ├── /tmp/test_dir/many/hello_file│ ░░░░░░░░░░███████████ │ 33%
|
||||
8.0K ┌─┴ /tmp/test_dir/many │ █████████████████████ │ 67%
|
||||
12K ┌─┴ /tmp/test_dir │██████████████████████████████ │ 100%
|
||||
"#
|
||||
.trim()
|
||||
.to_string();
|
||||
vec![mac_and_some_linux, ubuntu]
|
||||
}
|
||||
|
||||
// Check against directories and files whose names are substrings of each other
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
#[test]
|
||||
pub fn test_substring_of_names_and_long_names() {
|
||||
let command_args = ["-c", "-B", "/tmp/test_dir2"];
|
||||
exact_stdout_test(&command_args, no_substring_of_names_output());
|
||||
}
|
||||
|
||||
fn no_substring_of_names_output() -> Vec<String> {
|
||||
let ubuntu = "
|
||||
0B ┌── long_dir_name_what_a_very_long_dir_name_what_happens_when_this_goes..
|
||||
4.0K ├── dir_name_clash
|
||||
4.0K │ ┌── hello
|
||||
8.0K ├─┴ dir
|
||||
4.0K │ ┌── hello
|
||||
8.0K ├─┴ dir_substring
|
||||
24K ┌─┴ test_dir2
|
||||
"
|
||||
.trim()
|
||||
.into();
|
||||
|
||||
let mac_and_some_linux = "
|
||||
0B ┌── long_dir_name_what_a_very_long_dir_name_what_happens_when_this_goes..
|
||||
4.0K │ ┌── hello
|
||||
4.0K ├─┴ dir
|
||||
4.0K ├── dir_name_clash
|
||||
4.0K │ ┌── hello
|
||||
4.0K ├─┴ dir_substring
|
||||
12K ┌─┴ test_dir2
|
||||
"
|
||||
.trim()
|
||||
.into();
|
||||
vec![mac_and_some_linux, ubuntu]
|
||||
}
|
||||
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
#[test]
|
||||
pub fn test_unicode_directories() {
|
||||
let command_args = ["-c", "-B", "/tmp/test_dir_unicode"];
|
||||
exact_stdout_test(&command_args, unicode_dir());
|
||||
}
|
||||
|
||||
fn unicode_dir() -> Vec<String> {
|
||||
// The way unicode & asian characters are rendered on the terminal should make this line up
|
||||
let ubuntu = "
|
||||
0B ┌── ラウトは難しいです!.japan│ █ │ 0%
|
||||
0B ├── 👩.unicode │ █ │ 0%
|
||||
4.0K ┌─┴ test_dir_unicode │███████████████████████████████████ │ 100%
|
||||
"
|
||||
.trim()
|
||||
.into();
|
||||
|
||||
let mac_and_some_linux = "
|
||||
0B ┌── ラウトは難しいです!.japan│ █ │ 0%
|
||||
0B ├── 👩.unicode │ █ │ 0%
|
||||
0B ┌─┴ test_dir_unicode │ █ │ 0%
|
||||
"
|
||||
.trim()
|
||||
.into();
|
||||
vec![mac_and_some_linux, ubuntu]
|
||||
}
|
||||
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
#[test]
|
||||
pub fn test_apparent_size() {
|
||||
let command_args = ["-c", "-s", "-b", "/tmp/test_dir"];
|
||||
exact_stdout_test(&command_args, apparent_size_output());
|
||||
}
|
||||
|
||||
fn apparent_size_output() -> Vec<String> {
|
||||
// The apparent directory sizes are too unpredictable and system dependent to try and match
|
||||
let one_space_before = r#"
|
||||
0B ┌── a_file
|
||||
6B ├── hello_file
|
||||
"#
|
||||
.trim()
|
||||
.to_string();
|
||||
|
||||
let two_space_before = r#"
|
||||
0B ┌── a_file
|
||||
6B ├── hello_file
|
||||
"#
|
||||
.trim()
|
||||
.to_string();
|
||||
|
||||
vec![one_space_before, two_space_before]
|
||||
}
|
||||
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
#[test]
|
||||
pub fn test_permission_normal() {
|
||||
let command_args = [UNREADABLE_DIR_PATH];
|
||||
let permission_msg =
|
||||
r#"Did not have permissions for all directories (add --print-errors to see errors)"#
|
||||
.trim()
|
||||
.to_string();
|
||||
exact_stderr_test(&command_args, permission_msg);
|
||||
}
|
||||
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
#[test]
|
||||
pub fn test_permission_flag() {
|
||||
// add the flag to CLI
|
||||
let command_args = ["--print-errors", UNREADABLE_DIR_PATH];
|
||||
let permission_msg = format!(
|
||||
"Did not have permissions for directories: {}",
|
||||
UNREADABLE_DIR_PATH
|
||||
);
|
||||
exact_stderr_test(&command_args, permission_msg);
|
||||
}
|
||||
340
tests/test_flags.rs
Normal file
340
tests/test_flags.rs
Normal file
@@ -0,0 +1,340 @@
|
||||
use assert_cmd::Command;
|
||||
use std::ffi::OsStr;
|
||||
use std::str;
|
||||
|
||||
/**
|
||||
* This file contains tests that test a substring of the output using '.contains'
|
||||
*
|
||||
* These tests should be the same cross platform
|
||||
*/
|
||||
|
||||
fn build_command<T: AsRef<OsStr>>(command_args: Vec<T>) -> String {
|
||||
let mut cmd = &mut Command::cargo_bin("dust").unwrap();
|
||||
// Hide progress bar
|
||||
cmd = cmd.arg("-P");
|
||||
|
||||
for p in command_args {
|
||||
cmd = cmd.arg(p);
|
||||
}
|
||||
let finished = &cmd.unwrap();
|
||||
let stderr = str::from_utf8(&finished.stderr).unwrap();
|
||||
assert_eq!(stderr, "");
|
||||
|
||||
str::from_utf8(&finished.stdout).unwrap().into()
|
||||
}
|
||||
|
||||
// We can at least test the file names are there
|
||||
#[test]
|
||||
pub fn test_basic_output() {
|
||||
let output = build_command(vec!["tests/test_dir/"]);
|
||||
|
||||
assert!(output.contains(" ┌─┴ "));
|
||||
assert!(output.contains("test_dir "));
|
||||
assert!(output.contains(" ┌─┴ "));
|
||||
assert!(output.contains("many "));
|
||||
assert!(output.contains(" ├── "));
|
||||
assert!(output.contains("hello_file"));
|
||||
assert!(output.contains(" ┌── "));
|
||||
assert!(output.contains("a_file "));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_output_no_bars_means_no_excess_spaces() {
|
||||
let output = build_command(vec!["-b", "tests/test_dir/"]);
|
||||
// If bars are not being shown we don't need to pad the output with spaces
|
||||
assert!(output.contains("many"));
|
||||
assert!(!output.contains("many "));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_reverse_flag() {
|
||||
let output = build_command(vec!["-r", "-c", "tests/test_dir/"]);
|
||||
assert!(output.contains(" └─┬ test_dir "));
|
||||
assert!(output.contains(" └─┬ many "));
|
||||
assert!(output.contains(" ├── hello_file"));
|
||||
assert!(output.contains(" └── a_file "));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_d_flag_works() {
|
||||
// We should see the top level directory but not the sub dirs / files:
|
||||
let output = build_command(vec!["-d", "1", "tests/test_dir/"]);
|
||||
assert!(!output.contains("hello_file"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_d0_works_on_multiple() {
|
||||
// We should see the top level directory but not the sub dirs / files:
|
||||
let output = build_command(vec!["-d", "0", "tests/test_dir/", "tests/test_dir2"]);
|
||||
assert!(output.contains("test_dir "));
|
||||
assert!(output.contains("test_dir2"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_threads_flag_works() {
|
||||
let output = build_command(vec!["-T", "1", "tests/test_dir/"]);
|
||||
assert!(output.contains("hello_file"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_d_flag_works_and_still_recurses_down() {
|
||||
// We had a bug where running with '-d 1' would stop at the first directory and the code
|
||||
// would fail to recurse down
|
||||
let output = build_command(vec!["-d", "1", "-f", "-c", "tests/test_dir2/"]);
|
||||
assert!(output.contains("1 ┌── dir"));
|
||||
assert!(output.contains("4 ┌─┴ test_dir2"));
|
||||
}
|
||||
|
||||
// Check against directories and files whose names are substrings of each other
|
||||
#[test]
|
||||
pub fn test_ignore_dir() {
|
||||
let output = build_command(vec!["-c", "-X", "dir_substring", "tests/test_dir2/"]);
|
||||
assert!(!output.contains("dir_substring"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_ignore_all_in_file() {
|
||||
let output = build_command(vec![
|
||||
"-c",
|
||||
"-I",
|
||||
"tests/test_dir_hidden_entries/.hidden_file",
|
||||
"tests/test_dir_hidden_entries/",
|
||||
]);
|
||||
assert!(output.contains(" test_dir_hidden_entries"));
|
||||
assert!(!output.contains(".secret"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_files_from_flag_file() {
|
||||
let output = build_command(vec![
|
||||
"--files-from",
|
||||
"tests/test_dir_files_from/files_from.txt",
|
||||
]);
|
||||
assert!(output.contains("a_file"));
|
||||
assert!(output.contains("hello_file"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_files0_from_flag_file() {
|
||||
let output = build_command(vec![
|
||||
"--files0-from",
|
||||
"tests/test_dir_files_from/files0_from.txt",
|
||||
]);
|
||||
assert!(output.contains("a_file"));
|
||||
assert!(output.contains("hello_file"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_files_from_flag_stdin() {
|
||||
let mut cmd = Command::cargo_bin("dust").unwrap();
|
||||
cmd.arg("-P").arg("--files-from").arg("-");
|
||||
let input = b"tests/test_dir_files_from/a_file\ntests/test_dir_files_from/hello_file\n";
|
||||
cmd.write_stdin(input.as_ref());
|
||||
let finished = &cmd.unwrap();
|
||||
let stderr = std::str::from_utf8(&finished.stderr).unwrap();
|
||||
assert_eq!(stderr, "");
|
||||
let output = std::str::from_utf8(&finished.stdout).unwrap();
|
||||
assert!(output.contains("a_file"));
|
||||
assert!(output.contains("hello_file"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_files0_from_flag_stdin() {
|
||||
let mut cmd = Command::cargo_bin("dust").unwrap();
|
||||
cmd.arg("-P").arg("--files0-from").arg("-");
|
||||
let input = b"tests/test_dir_files_from/a_file\0tests/test_dir_files_from/hello_file\0";
|
||||
cmd.write_stdin(input.as_ref());
|
||||
let finished = &cmd.unwrap();
|
||||
let stderr = std::str::from_utf8(&finished.stderr).unwrap();
|
||||
assert_eq!(stderr, "");
|
||||
let output = std::str::from_utf8(&finished.stdout).unwrap();
|
||||
assert!(output.contains("a_file"));
|
||||
assert!(output.contains("hello_file"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_with_bad_param() {
|
||||
let mut cmd = Command::cargo_bin("dust").unwrap();
|
||||
cmd.arg("-P").arg("bad_place");
|
||||
let output_error = cmd.unwrap_err();
|
||||
let result = output_error.as_output().unwrap();
|
||||
let stderr = str::from_utf8(&result.stderr).unwrap();
|
||||
assert!(stderr.contains("No such file or directory"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_hidden_flag() {
|
||||
// Check we can see the hidden file normally
|
||||
let output = build_command(vec!["-c", "tests/test_dir_hidden_entries/"]);
|
||||
assert!(output.contains(".hidden_file"));
|
||||
assert!(output.contains("┌─┴ test_dir_hidden_entries"));
|
||||
|
||||
// Check that adding the '-h' flag causes us to not see hidden files
|
||||
let output = build_command(vec!["-c", "-i", "tests/test_dir_hidden_entries/"]);
|
||||
assert!(!output.contains(".hidden_file"));
|
||||
assert!(output.contains("┌── test_dir_hidden_entries"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_number_of_files() {
|
||||
// Check we can see the hidden file normally
|
||||
let output = build_command(vec!["-c", "-f", "tests/test_dir"]);
|
||||
assert!(output.contains("1 ┌── a_file "));
|
||||
assert!(output.contains("1 ├── hello_file"));
|
||||
assert!(output.contains("2 ┌─┴ many"));
|
||||
assert!(output.contains("2 ┌─┴ test_dir"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_show_files_by_type() {
|
||||
// Check we can list files by type
|
||||
let output = build_command(vec!["-c", "-t", "tests"]);
|
||||
assert!(output.contains(" .unicode"));
|
||||
assert!(output.contains(" .japan"));
|
||||
assert!(output.contains(" .rs"));
|
||||
assert!(output.contains(" (no extension)"));
|
||||
assert!(output.contains("┌─┴ (total)"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(target_family = "unix")]
|
||||
pub fn test_show_files_only() {
|
||||
let output = build_command(vec!["-c", "-F", "tests/test_dir"]);
|
||||
assert!(output.contains("a_file"));
|
||||
assert!(output.contains("hello_file"));
|
||||
assert!(!output.contains("many"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_output_skip_total() {
|
||||
let output = build_command(vec![
|
||||
"--skip-total",
|
||||
"tests/test_dir/many/hello_file",
|
||||
"tests/test_dir/many/a_file",
|
||||
]);
|
||||
assert!(output.contains("hello_file"));
|
||||
assert!(!output.contains("(total)"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_output_screen_reader() {
|
||||
let output = build_command(vec!["--screen-reader", "-c", "tests/test_dir/"]);
|
||||
println!("{}", output);
|
||||
assert!(output.contains("test_dir 0"));
|
||||
assert!(output.contains("many 1"));
|
||||
assert!(output.contains("hello_file 2"));
|
||||
assert!(output.contains("a_file 2"));
|
||||
|
||||
// Verify no 'symbols' reported by screen reader
|
||||
assert!(!output.contains('│'));
|
||||
|
||||
for block in ['█', '▓', '▒', '░'] {
|
||||
assert!(!output.contains(block));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_show_files_by_regex_match_lots() {
|
||||
// Check we can see '.rs' files in the tests directory
|
||||
let output = build_command(vec!["-c", "-e", "\\.rs$", "tests"]);
|
||||
assert!(output.contains(" ┌─┴ tests"));
|
||||
assert!(!output.contains("0B ┌── tests"));
|
||||
assert!(!output.contains("0B ┌─┴ tests"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_show_files_by_regex_match_nothing() {
|
||||
// Check there are no files named: '.match_nothing' in the tests directory
|
||||
let output = build_command(vec!["-c", "-e", "match_nothing$", "tests"]);
|
||||
assert!(output.contains("0B ┌── tests"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_show_files_by_regex_match_multiple() {
|
||||
let output = build_command(vec![
|
||||
"-c",
|
||||
"-e",
|
||||
"test_dir_hidden",
|
||||
"-e",
|
||||
"test_dir2",
|
||||
"-n",
|
||||
"100",
|
||||
"tests",
|
||||
]);
|
||||
assert!(output.contains("test_dir2"));
|
||||
assert!(output.contains("test_dir_hidden"));
|
||||
assert!(!output.contains("many")); // We do not find the 'many' folder in the 'test_dir' folder
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_show_files_by_invert_regex() {
|
||||
let output = build_command(vec!["-c", "-f", "-v", "e", "tests/test_dir2"]);
|
||||
// There are 0 files without 'e' in the name
|
||||
assert!(output.contains("0 ┌── test_dir2"));
|
||||
|
||||
let output = build_command(vec!["-c", "-f", "-v", "a", "tests/test_dir2"]);
|
||||
// There are 2 files without 'a' in the name
|
||||
assert!(output.contains("2 ┌─┴ test_dir2"));
|
||||
|
||||
// There are 4 files in the test_dir2 hierarchy
|
||||
let output = build_command(vec!["-c", "-f", "-v", "match_nothing$", "tests/test_dir2"]);
|
||||
assert!(output.contains("4 ┌─┴ test_dir2"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_show_files_by_invert_regex_match_multiple() {
|
||||
// We ignore test_dir2 & test_dir_unicode, leaving the test_dir folder
|
||||
// which has the 'many' folder inside
|
||||
let output = build_command(vec![
|
||||
"-c",
|
||||
"-v",
|
||||
"test_dir2",
|
||||
"-v",
|
||||
"test_dir_unicode",
|
||||
"-n",
|
||||
"100",
|
||||
"tests",
|
||||
]);
|
||||
assert!(!output.contains("test_dir2"));
|
||||
assert!(!output.contains("test_dir_unicode"));
|
||||
assert!(output.contains("many"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_no_color() {
|
||||
let output = build_command(vec!["-c"]);
|
||||
// Red is 31
|
||||
assert!(!output.contains("\x1B[31m"));
|
||||
assert!(!output.contains("\x1B[0m"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_force_color() {
|
||||
let output = build_command(vec!["-C"]);
|
||||
// Red is 31
|
||||
assert!(output.contains("\x1B[31m"));
|
||||
assert!(output.contains("\x1B[0m"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_collapse() {
|
||||
let output = build_command(vec!["--collapse", "many", "tests/test_dir/"]);
|
||||
assert!(output.contains("many"));
|
||||
assert!(!output.contains("hello_file"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_handle_duplicate_names() {
|
||||
// Check that even if we run on a multiple directories with the same name
|
||||
// we still show the distinct parent dir in the output
|
||||
let output = build_command(vec![
|
||||
"tests/test_dir_matching/dave/dup_name",
|
||||
"tests/test_dir_matching/andy/dup_name",
|
||||
"ci",
|
||||
]);
|
||||
assert!(output.contains("andy"));
|
||||
assert!(output.contains("dave"));
|
||||
assert!(output.contains("ci"));
|
||||
assert!(output.contains("dup_name"));
|
||||
assert!(!output.contains("test_dir_matching"));
|
||||
}
|
||||
1
tests/tests.rs
Normal file
1
tests/tests.rs
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
141
tests/tests_symlinks.rs
Normal file
141
tests/tests_symlinks.rs
Normal file
@@ -0,0 +1,141 @@
|
||||
use assert_cmd::Command;
|
||||
use std::fs::File;
|
||||
use std::io::Write;
|
||||
use std::path::PathBuf;
|
||||
use std::str;
|
||||
|
||||
use tempfile::Builder;
|
||||
use tempfile::TempDir;
|
||||
|
||||
// File sizes differ on both platform and on the format of the disk.
|
||||
// Windows: `ln` is not usually an available command; creation of symbolic links requires special enhanced permissions
|
||||
|
||||
fn build_temp_file(dir: &TempDir) -> PathBuf {
|
||||
let file_path = dir.path().join("notes.txt");
|
||||
let mut file = File::create(&file_path).unwrap();
|
||||
writeln!(file, "I am a temp file").unwrap();
|
||||
file_path
|
||||
}
|
||||
|
||||
fn link_it(link_path: PathBuf, file_path_s: &str, is_soft: bool) -> String {
|
||||
let link_name_s = link_path.to_str().unwrap();
|
||||
let mut c = Command::new("ln");
|
||||
if is_soft {
|
||||
c.arg("-s");
|
||||
}
|
||||
c.arg(file_path_s);
|
||||
c.arg(link_name_s);
|
||||
assert!(c.output().is_ok());
|
||||
link_name_s.into()
|
||||
}
|
||||
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
#[test]
|
||||
pub fn test_soft_sym_link() {
|
||||
let dir = Builder::new().tempdir().unwrap();
|
||||
let file = build_temp_file(&dir);
|
||||
let dir_s = dir.path().to_str().unwrap();
|
||||
let file_path_s = file.to_str().unwrap();
|
||||
|
||||
let link_name = dir.path().join("the_link");
|
||||
let link_name_s = link_it(link_name, file_path_s, true);
|
||||
|
||||
let c = format!(" ├── {}", link_name_s);
|
||||
let b = format!(" ┌── {}", file_path_s);
|
||||
let a = format!("─┴ {}", dir_s);
|
||||
|
||||
let mut cmd = Command::cargo_bin("dust").unwrap();
|
||||
// Mac test runners create long filenames in tmp directories
|
||||
let output = cmd
|
||||
.args(["-p", "-c", "-s", "-w", "999", dir_s])
|
||||
.unwrap()
|
||||
.stdout;
|
||||
|
||||
let output = str::from_utf8(&output).unwrap();
|
||||
|
||||
assert!(output.contains(a.as_str()));
|
||||
assert!(output.contains(b.as_str()));
|
||||
assert!(output.contains(c.as_str()));
|
||||
}
|
||||
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
#[test]
|
||||
pub fn test_hard_sym_link() {
|
||||
let dir = Builder::new().tempdir().unwrap();
|
||||
let file = build_temp_file(&dir);
|
||||
let dir_s = dir.path().to_str().unwrap();
|
||||
let file_path_s = file.to_str().unwrap();
|
||||
|
||||
let link_name = dir.path().join("the_link");
|
||||
link_it(link_name, file_path_s, false);
|
||||
|
||||
let file_output = format!(" ┌── {}", file_path_s);
|
||||
let dirs_output = format!("─┴ {}", dir_s);
|
||||
|
||||
let mut cmd = Command::cargo_bin("dust").unwrap();
|
||||
// Mac test runners create long filenames in tmp directories
|
||||
let output = cmd.args(["-p", "-c", "-w", "999", dir_s]).unwrap().stdout;
|
||||
|
||||
// The link should not appear in the output because multiple inodes are now ordered
|
||||
// then filtered.
|
||||
let output = str::from_utf8(&output).unwrap();
|
||||
assert!(output.contains(dirs_output.as_str()));
|
||||
assert!(output.contains(file_output.as_str()));
|
||||
}
|
||||
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
#[test]
|
||||
pub fn test_hard_sym_link_no_dup_multi_arg() {
|
||||
let dir = Builder::new().tempdir().unwrap();
|
||||
let dir_link = Builder::new().tempdir().unwrap();
|
||||
let file = build_temp_file(&dir);
|
||||
let dir_s = dir.path().to_str().unwrap();
|
||||
let dir_link_s = dir_link.path().to_str().unwrap();
|
||||
let file_path_s = file.to_str().unwrap();
|
||||
|
||||
let link_name = dir_link.path().join("the_link");
|
||||
let link_name_s = link_it(link_name, file_path_s, false);
|
||||
|
||||
let mut cmd = Command::cargo_bin("dust").unwrap();
|
||||
|
||||
// Mac test runners create long filenames in tmp directories
|
||||
let output = cmd
|
||||
.args(["-p", "-c", "-w", "999", "-b", dir_link_s, dir_s])
|
||||
.unwrap()
|
||||
.stdout;
|
||||
|
||||
// The link or the file should appear but not both
|
||||
let output = str::from_utf8(&output).unwrap();
|
||||
let has_file_only = output.contains(file_path_s) && !output.contains(&link_name_s);
|
||||
let has_link_only = !output.contains(file_path_s) && output.contains(&link_name_s);
|
||||
assert!(has_file_only || has_link_only);
|
||||
}
|
||||
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
#[test]
|
||||
pub fn test_recursive_sym_link() {
|
||||
let dir = Builder::new().tempdir().unwrap();
|
||||
let dir_s = dir.path().to_str().unwrap();
|
||||
|
||||
let link_name = dir.path().join("the_link");
|
||||
let link_name_s = link_it(link_name, dir_s, true);
|
||||
|
||||
let a = format!("─┬ {}", dir_s);
|
||||
let b = format!(" └── {}", link_name_s);
|
||||
|
||||
let mut cmd = Command::cargo_bin("dust").unwrap();
|
||||
let output = cmd
|
||||
.arg("-p")
|
||||
.arg("-c")
|
||||
.arg("-r")
|
||||
.arg("-s")
|
||||
.arg("-w")
|
||||
.arg("999")
|
||||
.arg(dir_s)
|
||||
.unwrap()
|
||||
.stdout;
|
||||
let output = str::from_utf8(&output).unwrap();
|
||||
|
||||
assert!(output.contains(a.as_str()));
|
||||
assert!(output.contains(b.as_str()));
|
||||
}
|
||||
Reference in New Issue
Block a user