mirror of
https://github.com/bootandy/dust.git
synced 2025-12-09 22:30:39 -08:00
Compare commits
191 Commits
v0.7.1.alp
...
v1.1.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dbd18f90e7 | ||
|
|
dad88ad660 | ||
|
|
00a7c410a0 | ||
|
|
1ab0b2f531 | ||
|
|
c09073151d | ||
|
|
b4a517a096 | ||
|
|
e654d30f9d | ||
|
|
4fc1897678 | ||
|
|
08b9c756ee | ||
|
|
394231683d | ||
|
|
a06a001886 | ||
|
|
fd9e97bcfa | ||
|
|
3ed95ee399 | ||
|
|
58c9f6d509 | ||
|
|
3f2f7a8bb2 | ||
|
|
b7176cf887 | ||
|
|
d65f41097e | ||
|
|
08e4240b41 | ||
|
|
028ca1fdc7 | ||
|
|
4f6255971b | ||
|
|
cab250aa0e | ||
|
|
5f76db27c9 | ||
|
|
a34e78f912 | ||
|
|
1ffda38264 | ||
|
|
e78690e4f5 | ||
|
|
5b87260467 | ||
|
|
2c34c38b29 | ||
|
|
a1574d6a06 | ||
|
|
184ea1f956 | ||
|
|
a3dcab9454 | ||
|
|
658b11d0f8 | ||
|
|
e2fe656296 | ||
|
|
87581f328e | ||
|
|
ecd6b85c17 | ||
|
|
b86e5c8c88 | ||
|
|
25c016f98a | ||
|
|
69c4c63357 | ||
|
|
fbd34ec4c2 | ||
|
|
7c75c1b0a9 | ||
|
|
b54a215805 | ||
|
|
0364cf781e | ||
|
|
a8bf76cb22 | ||
|
|
4df4eeaa38 | ||
|
|
ebb3b8cceb | ||
|
|
e9bacdf875 | ||
|
|
a4b5d8573b | ||
|
|
4a2778b6ea | ||
|
|
7ee744207b | ||
|
|
96068518f6 | ||
|
|
10168e0a47 | ||
|
|
6768df9a7b | ||
|
|
e80892a9e7 | ||
|
|
cd53fc7494 | ||
|
|
e8c7990a17 | ||
|
|
c8b61d2f46 | ||
|
|
6e0505bfd7 | ||
|
|
24bdbf036e | ||
|
|
29085686e1 | ||
|
|
8b1632dde8 | ||
|
|
f3275cd59c | ||
|
|
939ed89ebb | ||
|
|
a58e5f48f6 | ||
|
|
3f9014d8c7 | ||
|
|
7c54d41ace | ||
|
|
2fa14ca19c | ||
|
|
211d89e634 | ||
|
|
0038cb24b4 | ||
|
|
658f8d2e2b | ||
|
|
2c23336794 | ||
|
|
a4ae013459 | ||
|
|
c259d3b566 | ||
|
|
bdfd3c01a5 | ||
|
|
2fe91806c7 | ||
|
|
514bb2799c | ||
|
|
e17a1af476 | ||
|
|
2f7c197cd7 | ||
|
|
7d13fe972c | ||
|
|
5a3e15d0ce | ||
|
|
6db013a601 | ||
|
|
49a21b1121 | ||
|
|
7efdf63fbc | ||
|
|
184d1ec5e8 | ||
|
|
1e87a0661b | ||
|
|
187b8be2fa | ||
|
|
1495251ebc | ||
|
|
520c439edc | ||
|
|
712acc67fe | ||
|
|
fdbed14334 | ||
|
|
810cc8b604 | ||
|
|
83ef2525aa | ||
|
|
af9f0b5125 | ||
|
|
9ff28b3456 | ||
|
|
4242363f40 | ||
|
|
3fd78490e6 | ||
|
|
b903f58cea | ||
|
|
0f72ca328a | ||
|
|
6c130adb6c | ||
|
|
9f0f366187 | ||
|
|
81ad921e25 | ||
|
|
3708edc2d3 | ||
|
|
414bc9e5a7 | ||
|
|
66ad504848 | ||
|
|
5bfa44ec77 | ||
|
|
03a8d643c5 | ||
|
|
29957c1f2c | ||
|
|
400ff513f4 | ||
|
|
31eb650fbe | ||
|
|
f3c074759d | ||
|
|
ea3cc537ea | ||
|
|
c012567c38 | ||
|
|
26bc26277d | ||
|
|
abcc46c5ea | ||
|
|
a3ab5bfe0f | ||
|
|
04c4963a02 | ||
|
|
40a6f098ae | ||
|
|
5e607cf210 | ||
|
|
f546dbbede | ||
|
|
a91aa62060 | ||
|
|
a7b82f32d7 | ||
|
|
72b811c278 | ||
|
|
b478534b22 | ||
|
|
2ca7177446 | ||
|
|
e858f9e976 | ||
|
|
0a67191054 | ||
|
|
c363e5ff8b | ||
|
|
c148cd9044 | ||
|
|
2893f73f47 | ||
|
|
5103ebe0d8 | ||
|
|
40acc8f868 | ||
|
|
eebd9daf2a | ||
|
|
9bc1a6d625 | ||
|
|
d6f9bb3c47 | ||
|
|
f70f4b7e12 | ||
|
|
b9b2aee760 | ||
|
|
f60184ecbb | ||
|
|
81d52e6e3a | ||
|
|
5980858b39 | ||
|
|
ed6a8d0462 | ||
|
|
4cef6aaa84 | ||
|
|
d477145694 | ||
|
|
dc5b7b2c2e | ||
|
|
cf5ebd76fe | ||
|
|
fc548919c5 | ||
|
|
4b4bca52d9 | ||
|
|
2a9d545c3c | ||
|
|
20cc5cf7e0 | ||
|
|
5fcc45efbe | ||
|
|
282f6d314d | ||
|
|
c36ca33fe9 | ||
|
|
34ba99af2a | ||
|
|
2713445ad0 | ||
|
|
b62f35291d | ||
|
|
a7fbcb8156 | ||
|
|
a7120b949c | ||
|
|
812e1e3c53 | ||
|
|
4eb3f29565 | ||
|
|
d64092d8a1 | ||
|
|
77750c8149 | ||
|
|
b9386cd39e | ||
|
|
17112b09cc | ||
|
|
c5adff5348 | ||
|
|
ad2e52e211 | ||
|
|
164bec71a3 | ||
|
|
11b5c7227f | ||
|
|
fc70f9ba30 | ||
|
|
a00d1f0719 | ||
|
|
c4ea7815f8 | ||
|
|
afc36a633f | ||
|
|
7275b273d4 | ||
|
|
a3e59f9c25 | ||
|
|
48bf656123 | ||
|
|
fabb27908d | ||
|
|
52aeeebe1f | ||
|
|
1e27288ec2 | ||
|
|
9f4a5daee6 | ||
|
|
27f0a015ef | ||
|
|
20d89bef91 | ||
|
|
469e6d0a69 | ||
|
|
2d58609d54 | ||
|
|
109a0b90d4 | ||
|
|
ab67c1a50e | ||
|
|
6a34b52d15 | ||
|
|
f708305190 | ||
|
|
2749f56b7a | ||
|
|
d983175189 | ||
|
|
4b3dc3988d | ||
|
|
fa4405b58b | ||
|
|
abb08f8e1a | ||
|
|
9f91d446c1 | ||
|
|
1b07c3c4f3 | ||
|
|
e55b917c96 |
475
.github/workflows/CICD.yml
vendored
475
.github/workflows/CICD.yml
vendored
@@ -22,56 +22,56 @@ jobs:
|
|||||||
- { os: macos-latest }
|
- { os: macos-latest }
|
||||||
- { os: windows-latest }
|
- { os: windows-latest }
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v1
|
||||||
- name: Initialize workflow variables
|
- name: Initialize workflow variables
|
||||||
id: vars
|
id: vars
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
# 'windows-latest' `cargo fmt` is bugged for this project (see reasons @ GH:rust-lang/rustfmt #3324, #3590, #3688 ; waiting for repair)
|
# 'windows-latest' `cargo fmt` is bugged for this project (see reasons @ GH:rust-lang/rustfmt #3324, #3590, #3688 ; waiting for repair)
|
||||||
JOB_DO_FORMAT_TESTING="true"
|
JOB_DO_FORMAT_TESTING="true"
|
||||||
case ${{ matrix.job.os }} in windows-latest) unset JOB_DO_FORMAT_TESTING ;; esac;
|
case ${{ matrix.job.os }} in windows-latest) unset JOB_DO_FORMAT_TESTING ;; esac;
|
||||||
echo set-output name=JOB_DO_FORMAT_TESTING::${JOB_DO_FORMAT_TESTING:-<empty>/false}
|
echo set-output name=JOB_DO_FORMAT_TESTING::${JOB_DO_FORMAT_TESTING:-<empty>/false}
|
||||||
echo ::set-output name=JOB_DO_FORMAT_TESTING::${JOB_DO_FORMAT_TESTING}
|
echo ::set-output name=JOB_DO_FORMAT_TESTING::${JOB_DO_FORMAT_TESTING}
|
||||||
# target-specific options
|
# target-specific options
|
||||||
# * CARGO_FEATURES_OPTION
|
# * CARGO_FEATURES_OPTION
|
||||||
CARGO_FEATURES_OPTION='' ;
|
CARGO_FEATURES_OPTION='' ;
|
||||||
if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features "${{ matrix.job.features }}"' ; fi
|
if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features "${{ matrix.job.features }}"' ; fi
|
||||||
echo set-output name=CARGO_FEATURES_OPTION::${CARGO_FEATURES_OPTION}
|
echo set-output name=CARGO_FEATURES_OPTION::${CARGO_FEATURES_OPTION}
|
||||||
echo ::set-output name=CARGO_FEATURES_OPTION::${CARGO_FEATURES_OPTION}
|
echo ::set-output name=CARGO_FEATURES_OPTION::${CARGO_FEATURES_OPTION}
|
||||||
- name: Install `rust` toolchain
|
- name: Install `rust` toolchain
|
||||||
uses: actions-rs/toolchain@v1
|
uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: stable
|
toolchain: stable
|
||||||
override: true
|
override: true
|
||||||
profile: minimal # minimal component installation (ie, no documentation)
|
profile: minimal # minimal component installation (ie, no documentation)
|
||||||
components: rustfmt, clippy
|
components: rustfmt, clippy
|
||||||
- name: "`fmt` testing"
|
- name: "`fmt` testing"
|
||||||
if: steps.vars.outputs.JOB_DO_FORMAT_TESTING
|
if: steps.vars.outputs.JOB_DO_FORMAT_TESTING
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: fmt
|
command: fmt
|
||||||
args: --all -- --check
|
args: --all -- --check
|
||||||
- name: "`clippy` testing"
|
- name: "`clippy` testing"
|
||||||
if: success() || failure() # run regardless of prior step ("`fmt` testing") success/failure
|
if: success() || failure() # run regardless of prior step ("`fmt` testing") success/failure
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: clippy
|
command: clippy
|
||||||
args: ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} -- -D warnings
|
args: ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} -- -D warnings
|
||||||
|
|
||||||
min_version:
|
min_version:
|
||||||
name: MinSRV # Minimum supported rust version
|
name: MinSRV # Minimum supported rust version
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v1
|
||||||
- name: Install `rust` toolchain (v${{ env.RUST_MIN_SRV }})
|
- name: Install `rust` toolchain (v${{ env.RUST_MIN_SRV }})
|
||||||
uses: actions-rs/toolchain@v1
|
uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: ${{ env.RUST_MIN_SRV }}
|
toolchain: ${{ env.RUST_MIN_SRV }}
|
||||||
profile: minimal # minimal component installation (ie, no documentation)
|
profile: minimal # minimal component installation (ie, no documentation)
|
||||||
- name: Test
|
- name: Test
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: test
|
command: test
|
||||||
|
|
||||||
build:
|
build:
|
||||||
name: Build
|
name: Build
|
||||||
@@ -81,184 +81,223 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
job:
|
job:
|
||||||
# { os, target, cargo-options, features, use-cross, toolchain }
|
# { os, target, cargo-options, features, use-cross, toolchain }
|
||||||
- { os: ubuntu-latest , target: arm-unknown-linux-gnueabihf , use-cross: use-cross }
|
- {
|
||||||
- { os: ubuntu-20.04 , target: i686-unknown-linux-gnu , use-cross: use-cross }
|
os: ubuntu-latest,
|
||||||
- { os: ubuntu-20.04 , target: i686-unknown-linux-musl , use-cross: use-cross }
|
target: aarch64-unknown-linux-gnu,
|
||||||
- { os: ubuntu-20.04 , target: x86_64-unknown-linux-gnu , use-cross: use-cross }
|
use-cross: use-cross,
|
||||||
- { os: ubuntu-20.04 , target: x86_64-unknown-linux-musl , use-cross: use-cross }
|
}
|
||||||
- { os: ubuntu-18.04 , target: x86_64-unknown-linux-gnu , use-cross: use-cross }
|
- {
|
||||||
- { os: macos-latest , target: x86_64-apple-darwin }
|
os: ubuntu-latest,
|
||||||
- { os: windows-latest , target: i686-pc-windows-gnu }
|
target: aarch64-unknown-linux-musl,
|
||||||
- { os: windows-latest , target: i686-pc-windows-msvc }
|
use-cross: use-cross,
|
||||||
- { os: windows-latest , target: x86_64-pc-windows-gnu } ## !maint: [rivy; 2020-01-21] may break due to rust bug; follow possible solution from GH:rust-lang/rust#47048 (refs: GH:rust-lang/rust#47048 , GH:rust-lang/rust#53454 , GH:bike-barn/hermit#172 )
|
}
|
||||||
- { os: windows-latest , target: x86_64-pc-windows-msvc }
|
- {
|
||||||
|
os: ubuntu-latest,
|
||||||
|
target: arm-unknown-linux-gnueabihf,
|
||||||
|
use-cross: use-cross,
|
||||||
|
}
|
||||||
|
- {
|
||||||
|
os: ubuntu-latest,
|
||||||
|
target: arm-unknown-linux-musleabi,
|
||||||
|
use-cross: use-cross,
|
||||||
|
}
|
||||||
|
- {
|
||||||
|
os: ubuntu-latest,
|
||||||
|
target: i686-unknown-linux-gnu,
|
||||||
|
use-cross: use-cross,
|
||||||
|
}
|
||||||
|
- {
|
||||||
|
os: ubuntu-latest,
|
||||||
|
target: i686-unknown-linux-musl,
|
||||||
|
use-cross: use-cross,
|
||||||
|
}
|
||||||
|
- {
|
||||||
|
os: ubuntu-latest,
|
||||||
|
target: x86_64-unknown-linux-gnu,
|
||||||
|
use-cross: use-cross,
|
||||||
|
}
|
||||||
|
- {
|
||||||
|
os: ubuntu-latest,
|
||||||
|
target: x86_64-unknown-linux-musl,
|
||||||
|
use-cross: use-cross,
|
||||||
|
}
|
||||||
|
- { os: macos-latest, target: x86_64-apple-darwin }
|
||||||
|
- { os: windows-latest, target: i686-pc-windows-gnu }
|
||||||
|
- { os: windows-latest, target: i686-pc-windows-msvc }
|
||||||
|
- { os: windows-latest, target: x86_64-pc-windows-gnu } ## !maint: [rivy; 2020-01-21] may break due to rust bug; follow possible solution from GH:rust-lang/rust#47048 (refs: GH:rust-lang/rust#47048 , GH:rust-lang/rust#53454 , GH:bike-barn/hermit#172 )
|
||||||
|
- { os: windows-latest, target: x86_64-pc-windows-msvc }
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v1
|
||||||
- name: Install any prerequisites
|
- name: Install any prerequisites
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
case ${{ matrix.job.target }} in
|
case ${{ matrix.job.target }} in
|
||||||
arm-unknown-linux-gnueabihf) sudo apt-get -y update ; sudo apt-get -y install gcc-arm-linux-gnueabihf ;;
|
arm-unknown-linux-gnueabihf) sudo apt-get -y update ; sudo apt-get -y install gcc-arm-linux-gnueabihf ;;
|
||||||
esac
|
aarch64-unknown-linux-gnu) sudo apt-get -y update ; sudo apt-get -y install binutils-aarch64-linux-gnu ;;
|
||||||
- name: Initialize workflow variables
|
esac
|
||||||
id: vars
|
- name: Initialize workflow variables
|
||||||
shell: bash
|
id: vars
|
||||||
run: |
|
shell: bash
|
||||||
# toolchain
|
run: |
|
||||||
TOOLCHAIN="stable" ## default to "stable" toolchain
|
# toolchain
|
||||||
# * specify alternate TOOLCHAIN for *-pc-windows-gnu targets; gnu targets on Windows are broken for the standard *-pc-windows-msvc toolchain (refs: <https://github.com/rust-lang/rust/issues/47048>, <https://github.com/rust-lang/rust/issues/53454>, <https://github.com/rust-lang/cargo/issues/6754>)
|
TOOLCHAIN="stable" ## default to "stable" toolchain
|
||||||
case ${{ matrix.job.target }} in *-pc-windows-gnu) TOOLCHAIN="stable-${{ matrix.job.target }}" ;; esac;
|
# * specify alternate TOOLCHAIN for *-pc-windows-gnu targets; gnu targets on Windows are broken for the standard *-pc-windows-msvc toolchain (refs: <https://github.com/rust-lang/rust/issues/47048>, <https://github.com/rust-lang/rust/issues/53454>, <https://github.com/rust-lang/cargo/issues/6754>)
|
||||||
# * use requested TOOLCHAIN if specified
|
case ${{ matrix.job.target }} in *-pc-windows-gnu) TOOLCHAIN="stable-${{ matrix.job.target }}" ;; esac;
|
||||||
if [ -n "${{ matrix.job.toolchain }}" ]; then TOOLCHAIN="${{ matrix.job.toolchain }}" ; fi
|
# * use requested TOOLCHAIN if specified
|
||||||
echo set-output name=TOOLCHAIN::${TOOLCHAIN}
|
if [ -n "${{ matrix.job.toolchain }}" ]; then TOOLCHAIN="${{ matrix.job.toolchain }}" ; fi
|
||||||
echo ::set-output name=TOOLCHAIN::${TOOLCHAIN}
|
echo set-output name=TOOLCHAIN::${TOOLCHAIN}
|
||||||
# staging directory
|
echo ::set-output name=TOOLCHAIN::${TOOLCHAIN}
|
||||||
STAGING='_staging'
|
# staging directory
|
||||||
echo set-output name=STAGING::${STAGING}
|
STAGING='_staging'
|
||||||
echo ::set-output name=STAGING::${STAGING}
|
echo set-output name=STAGING::${STAGING}
|
||||||
# determine EXE suffix
|
echo ::set-output name=STAGING::${STAGING}
|
||||||
EXE_suffix="" ; case ${{ matrix.job.target }} in *-pc-windows-*) EXE_suffix=".exe" ;; esac;
|
# determine EXE suffix
|
||||||
echo set-output name=EXE_suffix::${EXE_suffix}
|
EXE_suffix="" ; case ${{ matrix.job.target }} in *-pc-windows-*) EXE_suffix=".exe" ;; esac;
|
||||||
echo ::set-output name=EXE_suffix::${EXE_suffix}
|
echo set-output name=EXE_suffix::${EXE_suffix}
|
||||||
# parse commit reference info
|
echo ::set-output name=EXE_suffix::${EXE_suffix}
|
||||||
REF_NAME=${GITHUB_REF#refs/*/}
|
# parse commit reference info
|
||||||
unset REF_BRANCH ; case ${GITHUB_REF} in refs/heads/*) REF_BRANCH=${GITHUB_REF#refs/heads/} ;; esac;
|
REF_NAME=${GITHUB_REF#refs/*/}
|
||||||
unset REF_TAG ; case ${GITHUB_REF} in refs/tags/*) REF_TAG=${GITHUB_REF#refs/tags/} ;; esac;
|
unset REF_BRANCH ; case ${GITHUB_REF} in refs/heads/*) REF_BRANCH=${GITHUB_REF#refs/heads/} ;; esac;
|
||||||
REF_SHAS=${GITHUB_SHA:0:8}
|
unset REF_TAG ; case ${GITHUB_REF} in refs/tags/*) REF_TAG=${GITHUB_REF#refs/tags/} ;; esac;
|
||||||
echo set-output name=REF_NAME::${REF_NAME}
|
REF_SHAS=${GITHUB_SHA:0:8}
|
||||||
echo set-output name=REF_BRANCH::${REF_BRANCH}
|
echo set-output name=REF_NAME::${REF_NAME}
|
||||||
echo set-output name=REF_TAG::${REF_TAG}
|
echo set-output name=REF_BRANCH::${REF_BRANCH}
|
||||||
echo set-output name=REF_SHAS::${REF_SHAS}
|
echo set-output name=REF_TAG::${REF_TAG}
|
||||||
echo ::set-output name=REF_NAME::${REF_NAME}
|
echo set-output name=REF_SHAS::${REF_SHAS}
|
||||||
echo ::set-output name=REF_BRANCH::${REF_BRANCH}
|
echo ::set-output name=REF_NAME::${REF_NAME}
|
||||||
echo ::set-output name=REF_TAG::${REF_TAG}
|
echo ::set-output name=REF_BRANCH::${REF_BRANCH}
|
||||||
echo ::set-output name=REF_SHAS::${REF_SHAS}
|
echo ::set-output name=REF_TAG::${REF_TAG}
|
||||||
# parse target
|
echo ::set-output name=REF_SHAS::${REF_SHAS}
|
||||||
unset TARGET_ARCH ; case ${{ matrix.job.target }} in arm-unknown-linux-gnueabihf) TARGET_ARCH=arm ;; i686-*) TARGET_ARCH=i686 ;; x86_64-*) TARGET_ARCH=x86_64 ;; esac;
|
# parse target
|
||||||
echo set-output name=TARGET_ARCH::${TARGET_ARCH}
|
unset TARGET_ARCH ; case ${{ matrix.job.target }} in arm-unknown-linux-gnueabihf) TARGET_ARCH=arm ;; aarch-*) TARGET_ARCH=aarch64 ;; i686-*) TARGET_ARCH=i686 ;; x86_64-*) TARGET_ARCH=x86_64 ;; esac;
|
||||||
echo ::set-output name=TARGET_ARCH::${TARGET_ARCH}
|
echo set-output name=TARGET_ARCH::${TARGET_ARCH}
|
||||||
unset TARGET_OS ; case ${{ matrix.job.target }} in *-linux-*) TARGET_OS=linux ;; *-apple-*) TARGET_OS=macos ;; *-windows-*) TARGET_OS=windows ;; esac;
|
echo ::set-output name=TARGET_ARCH::${TARGET_ARCH}
|
||||||
echo set-output name=TARGET_OS::${TARGET_OS}
|
unset TARGET_OS ; case ${{ matrix.job.target }} in *-linux-*) TARGET_OS=linux ;; *-apple-*) TARGET_OS=macos ;; *-windows-*) TARGET_OS=windows ;; esac;
|
||||||
echo ::set-output name=TARGET_OS::${TARGET_OS}
|
echo set-output name=TARGET_OS::${TARGET_OS}
|
||||||
# package name
|
echo ::set-output name=TARGET_OS::${TARGET_OS}
|
||||||
PKG_suffix=".tar.gz" ; case ${{ matrix.job.target }} in *-pc-windows-*) PKG_suffix=".zip" ;; esac;
|
# package name
|
||||||
PKG_BASENAME=${PROJECT_NAME}-${REF_TAG:-$REF_SHAS}-${{ matrix.job.target }}
|
PKG_suffix=".tar.gz" ; case ${{ matrix.job.target }} in *-pc-windows-*) PKG_suffix=".zip" ;; esac;
|
||||||
PKG_NAME=${PKG_BASENAME}${PKG_suffix}
|
PKG_BASENAME=${PROJECT_NAME}-${REF_TAG:-$REF_SHAS}-${{ matrix.job.target }}
|
||||||
echo set-output name=PKG_suffix::${PKG_suffix}
|
PKG_NAME=${PKG_BASENAME}${PKG_suffix}
|
||||||
echo set-output name=PKG_BASENAME::${PKG_BASENAME}
|
echo set-output name=PKG_suffix::${PKG_suffix}
|
||||||
echo set-output name=PKG_NAME::${PKG_NAME}
|
echo set-output name=PKG_BASENAME::${PKG_BASENAME}
|
||||||
echo ::set-output name=PKG_suffix::${PKG_suffix}
|
echo set-output name=PKG_NAME::${PKG_NAME}
|
||||||
echo ::set-output name=PKG_BASENAME::${PKG_BASENAME}
|
echo ::set-output name=PKG_suffix::${PKG_suffix}
|
||||||
echo ::set-output name=PKG_NAME::${PKG_NAME}
|
echo ::set-output name=PKG_BASENAME::${PKG_BASENAME}
|
||||||
# deployable tag? (ie, leading "vM" or "M"; M == version number)
|
echo ::set-output name=PKG_NAME::${PKG_NAME}
|
||||||
unset DEPLOY ; if [[ $REF_TAG =~ ^[vV]?[0-9].* ]]; then DEPLOY='true' ; fi
|
# deployable tag? (ie, leading "vM" or "M"; M == version number)
|
||||||
echo set-output name=DEPLOY::${DEPLOY:-<empty>/false}
|
unset DEPLOY ; if [[ $REF_TAG =~ ^[vV]?[0-9].* ]]; then DEPLOY='true' ; fi
|
||||||
echo ::set-output name=DEPLOY::${DEPLOY}
|
echo set-output name=DEPLOY::${DEPLOY:-<empty>/false}
|
||||||
# target-specific options
|
echo ::set-output name=DEPLOY::${DEPLOY}
|
||||||
# * CARGO_FEATURES_OPTION
|
# target-specific options
|
||||||
CARGO_FEATURES_OPTION='' ;
|
# * CARGO_FEATURES_OPTION
|
||||||
if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features "${{ matrix.job.features }}"' ; fi
|
CARGO_FEATURES_OPTION='' ;
|
||||||
echo set-output name=CARGO_FEATURES_OPTION::${CARGO_FEATURES_OPTION}
|
if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features "${{ matrix.job.features }}"' ; fi
|
||||||
echo ::set-output name=CARGO_FEATURES_OPTION::${CARGO_FEATURES_OPTION}
|
echo set-output name=CARGO_FEATURES_OPTION::${CARGO_FEATURES_OPTION}
|
||||||
# * CARGO_USE_CROSS (truthy)
|
echo ::set-output name=CARGO_FEATURES_OPTION::${CARGO_FEATURES_OPTION}
|
||||||
CARGO_USE_CROSS='true' ; case '${{ matrix.job.use-cross }}' in ''|0|f|false|n|no) unset CARGO_USE_CROSS ;; esac;
|
# * CARGO_USE_CROSS (truthy)
|
||||||
echo set-output name=CARGO_USE_CROSS::${CARGO_USE_CROSS:-<empty>/false}
|
CARGO_USE_CROSS='true' ; case '${{ matrix.job.use-cross }}' in ''|0|f|false|n|no) unset CARGO_USE_CROSS ;; esac;
|
||||||
echo ::set-output name=CARGO_USE_CROSS::${CARGO_USE_CROSS}
|
echo set-output name=CARGO_USE_CROSS::${CARGO_USE_CROSS:-<empty>/false}
|
||||||
# # * `arm` cannot be tested on ubuntu-* hosts (b/c testing is currently primarily done via comparison of target outputs with built-in outputs and the `arm` target is not executable on the host)
|
echo ::set-output name=CARGO_USE_CROSS::${CARGO_USE_CROSS}
|
||||||
JOB_DO_TESTING="true"
|
# # * `arm` cannot be tested on ubuntu-* hosts (b/c testing is currently primarily done via comparison of target outputs with built-in outputs and the `arm` target is not executable on the host)
|
||||||
case ${{ matrix.job.target }} in arm-*) unset JOB_DO_TESTING ;; esac;
|
JOB_DO_TESTING="true"
|
||||||
echo set-output name=JOB_DO_TESTING::${JOB_DO_TESTING:-<empty>/false}
|
case ${{ matrix.job.target }} in arm-*|aarch64-*) unset JOB_DO_TESTING ;; esac;
|
||||||
echo ::set-output name=JOB_DO_TESTING::${JOB_DO_TESTING}
|
echo set-output name=JOB_DO_TESTING::${JOB_DO_TESTING:-<empty>/false}
|
||||||
# # * test only binary for arm-type targets
|
echo ::set-output name=JOB_DO_TESTING::${JOB_DO_TESTING}
|
||||||
unset CARGO_TEST_OPTIONS
|
# # * test only binary for arm-type targets
|
||||||
unset CARGO_TEST_OPTIONS ; case ${{ matrix.job.target }} in arm-*) CARGO_TEST_OPTIONS="--bin ${PROJECT_NAME}" ;; esac;
|
unset CARGO_TEST_OPTIONS
|
||||||
echo set-output name=CARGO_TEST_OPTIONS::${CARGO_TEST_OPTIONS}
|
unset CARGO_TEST_OPTIONS ; case ${{ matrix.job.target }} in arm-*|aarch64-*) CARGO_TEST_OPTIONS="--bin ${PROJECT_NAME}" ;; esac;
|
||||||
echo ::set-output name=CARGO_TEST_OPTIONS::${CARGO_TEST_OPTIONS}
|
echo set-output name=CARGO_TEST_OPTIONS::${CARGO_TEST_OPTIONS}
|
||||||
# * strip executable?
|
echo ::set-output name=CARGO_TEST_OPTIONS::${CARGO_TEST_OPTIONS}
|
||||||
STRIP="strip" ; case ${{ matrix.job.target }} in arm-unknown-linux-gnueabihf) STRIP="arm-linux-gnueabihf-strip" ;; *-pc-windows-msvc) STRIP="" ;; esac;
|
# * strip executable?
|
||||||
echo set-output name=STRIP::${STRIP}
|
STRIP="strip" ; case ${{ matrix.job.target }} in arm-unknown-linux-gnueabihf) STRIP="arm-linux-gnueabihf-strip" ;; *-pc-windows-msvc) STRIP="" ;; aarch64-unknown-linux-gnu) STRIP="aarch64-linux-gnu-strip" ;; aarch64-unknown-linux-musl) STRIP="" ;; armv7-unknown-linux-musleabi) STRIP="" ;; arm-unknown-linux-musleabi) STRIP="" ;; esac;
|
||||||
echo ::set-output name=STRIP::${STRIP}
|
|
||||||
- name: Create all needed build/work directories
|
|
||||||
shell: bash
|
echo set-output name=STRIP::${STRIP}
|
||||||
run: |
|
echo ::set-output name=STRIP::${STRIP}
|
||||||
mkdir -p '${{ steps.vars.outputs.STAGING }}'
|
- name: Create all needed build/work directories
|
||||||
mkdir -p '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}'
|
shell: bash
|
||||||
- name: rust toolchain ~ install
|
run: |
|
||||||
uses: actions-rs/toolchain@v1
|
mkdir -p '${{ steps.vars.outputs.STAGING }}'
|
||||||
with:
|
mkdir -p '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}'
|
||||||
toolchain: ${{ steps.vars.outputs.TOOLCHAIN }}
|
- name: rust toolchain ~ install
|
||||||
target: ${{ matrix.job.target }}
|
uses: actions-rs/toolchain@v1
|
||||||
override: true
|
with:
|
||||||
profile: minimal # minimal component installation (ie, no documentation)
|
toolchain: ${{ steps.vars.outputs.TOOLCHAIN }}
|
||||||
- name: Info
|
target: ${{ matrix.job.target }}
|
||||||
shell: bash
|
override: true
|
||||||
run: |
|
profile: minimal # minimal component installation (ie, no documentation)
|
||||||
gcc --version || true
|
- name: Info
|
||||||
rustup -V
|
shell: bash
|
||||||
rustup toolchain list
|
run: |
|
||||||
rustup default
|
gcc --version || true
|
||||||
cargo -V
|
rustup -V
|
||||||
rustc -V
|
rustup toolchain list
|
||||||
- name: Build
|
rustup default
|
||||||
uses: actions-rs/cargo@v1
|
cargo -V
|
||||||
with:
|
rustc -V
|
||||||
use-cross: ${{ steps.vars.outputs.CARGO_USE_CROSS }}
|
- name: Build
|
||||||
command: build
|
uses: actions-rs/cargo@v1
|
||||||
args: --release --target=${{ matrix.job.target }} ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }}
|
with:
|
||||||
- name: Install cargo-deb
|
use-cross: ${{ steps.vars.outputs.CARGO_USE_CROSS }}
|
||||||
uses: actions-rs/cargo@v1
|
command: build
|
||||||
with:
|
args: --release --target=${{ matrix.job.target }} ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }}
|
||||||
|
- name: Install cargo-deb
|
||||||
|
uses: actions-rs/cargo@v1
|
||||||
|
with:
|
||||||
command: install
|
command: install
|
||||||
args: cargo-deb
|
args: cargo-deb
|
||||||
if: ${{ contains(matrix.job.target, 'musl') }}
|
if: matrix.job.target == 'i686-unknown-linux-musl' || matrix.job.target == 'x86_64-unknown-linux-musl'
|
||||||
- name: Build deb
|
- name: Build deb
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: deb
|
command: deb
|
||||||
args: --no-build --target=${{ matrix.job.target }}
|
args: --no-build --target=${{ matrix.job.target }}
|
||||||
if: ${{ contains(matrix.job.target, 'musl') }}
|
if: matrix.job.target == 'i686-unknown-linux-musl' || matrix.job.target == 'x86_64-unknown-linux-musl'
|
||||||
- name: Test
|
- name: Test
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
use-cross: ${{ steps.vars.outputs.CARGO_USE_CROSS }}
|
use-cross: ${{ steps.vars.outputs.CARGO_USE_CROSS }}
|
||||||
command: test
|
command: test
|
||||||
args: --target=${{ matrix.job.target }} ${{ steps.vars.outputs.CARGO_TEST_OPTIONS}} ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }}
|
args: --target=${{ matrix.job.target }} ${{ steps.vars.outputs.CARGO_TEST_OPTIONS}} ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }}
|
||||||
- name: Archive executable artifacts
|
- name: Archive executable artifacts
|
||||||
uses: actions/upload-artifact@master
|
uses: actions/upload-artifact@master
|
||||||
with:
|
with:
|
||||||
name: ${{ env.PROJECT_NAME }}-${{ matrix.job.target }}
|
name: ${{ env.PROJECT_NAME }}-${{ matrix.job.target }}
|
||||||
path: target/${{ matrix.job.target }}/release/${{ env.PROJECT_NAME }}${{ steps.vars.outputs.EXE_suffix }}
|
path: target/${{ matrix.job.target }}/release/${{ env.PROJECT_NAME }}${{ steps.vars.outputs.EXE_suffix }}
|
||||||
- name: Archive deb artifacts
|
- name: Archive deb artifacts
|
||||||
uses: actions/upload-artifact@master
|
uses: actions/upload-artifact@master
|
||||||
with:
|
with:
|
||||||
name: ${{ env.PROJECT_NAME }}-${{ matrix.job.target }}.deb
|
name: ${{ env.PROJECT_NAME }}-${{ matrix.job.target }}.deb
|
||||||
path: target/${{ matrix.job.target }}/debian
|
path: target/${{ matrix.job.target }}/debian
|
||||||
if: ${{ contains(matrix.job.target, 'musl') }}
|
if: matrix.job.target == 'i686-unknown-linux-musl' || matrix.job.target == 'x86_64-unknown-linux-musl'
|
||||||
- name: Package
|
- name: Package
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
# binary
|
# binary
|
||||||
cp 'target/${{ matrix.job.target }}/release/${{ env.PROJECT_NAME }}${{ steps.vars.outputs.EXE_suffix }}' '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}/'
|
cp 'target/${{ matrix.job.target }}/release/${{ env.PROJECT_NAME }}${{ steps.vars.outputs.EXE_suffix }}' '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}/'
|
||||||
# `strip` binary (if needed)
|
# `strip` binary (if needed)
|
||||||
if [ -n "${{ steps.vars.outputs.STRIP }}" ]; then "${{ steps.vars.outputs.STRIP }}" '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}/${{ env.PROJECT_NAME }}${{ steps.vars.outputs.EXE_suffix }}' ; fi
|
if [ -n "${{ steps.vars.outputs.STRIP }}" ]; then "${{ steps.vars.outputs.STRIP }}" '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}/${{ env.PROJECT_NAME }}${{ steps.vars.outputs.EXE_suffix }}' ; fi
|
||||||
# README and LICENSE
|
# README and LICENSE
|
||||||
cp README.md '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}/'
|
cp README.md '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}/'
|
||||||
cp LICENSE '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}/'
|
cp LICENSE '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}/'
|
||||||
# base compressed package
|
# base compressed package
|
||||||
pushd '${{ steps.vars.outputs.STAGING }}/' >/dev/null
|
pushd '${{ steps.vars.outputs.STAGING }}/' >/dev/null
|
||||||
case ${{ matrix.job.target }} in
|
case ${{ matrix.job.target }} in
|
||||||
*-pc-windows-*) 7z -y a '${{ steps.vars.outputs.PKG_NAME }}' '${{ steps.vars.outputs.PKG_BASENAME }}'/* | tail -2 ;;
|
*-pc-windows-*) 7z -y a '${{ steps.vars.outputs.PKG_NAME }}' '${{ steps.vars.outputs.PKG_BASENAME }}'/* | tail -2 ;;
|
||||||
*) tar czf '${{ steps.vars.outputs.PKG_NAME }}' '${{ steps.vars.outputs.PKG_BASENAME }}'/* ;;
|
*) tar czf '${{ steps.vars.outputs.PKG_NAME }}' '${{ steps.vars.outputs.PKG_BASENAME }}'/* ;;
|
||||||
esac;
|
esac;
|
||||||
popd >/dev/null
|
popd >/dev/null
|
||||||
- name: Publish
|
- name: Publish
|
||||||
uses: softprops/action-gh-release@v1
|
uses: softprops/action-gh-release@v1
|
||||||
if: steps.vars.outputs.DEPLOY
|
if: steps.vars.outputs.DEPLOY
|
||||||
with:
|
with:
|
||||||
files: |
|
files: |
|
||||||
${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_NAME }}
|
${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_NAME }}
|
||||||
env:
|
target/${{ matrix.job.target }}/debian/*.deb
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
## fix! [rivy; 2020-22-01] `cargo tarpaulin` is unable to test this repo at the moment; alternate recipe or another testing framework?
|
## fix! [rivy; 2020-22-01] `cargo tarpaulin` is unable to test this repo at the moment; alternate recipe or another testing framework?
|
||||||
# coverage:
|
# coverage:
|
||||||
|
|||||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -6,4 +6,7 @@
|
|||||||
**/*.rs.bk
|
**/*.rs.bk
|
||||||
*.swp
|
*.swp
|
||||||
.vscode/*
|
.vscode/*
|
||||||
*.idea/*
|
*.idea/*
|
||||||
|
|
||||||
|
#ignore macos files
|
||||||
|
.DS_Store
|
||||||
946
Cargo.lock
generated
946
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
54
Cargo.toml
54
Cargo.toml
@@ -1,9 +1,9 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "du-dust"
|
name = "du-dust"
|
||||||
description = "A more intuitive version of du"
|
description = "A more intuitive version of du"
|
||||||
version = "0.7.5"
|
version = "1.1.1"
|
||||||
authors = ["bootandy <bootandy@gmail.com>", "nebkor <code@ardent.nebcorp.com>"]
|
authors = ["bootandy <bootandy@gmail.com>", "nebkor <code@ardent.nebcorp.com>"]
|
||||||
edition = "2018"
|
edition = "2021"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
|
|
||||||
documentation = "https://github.com/bootandy/dust"
|
documentation = "https://github.com/bootandy/dust"
|
||||||
@@ -21,34 +21,68 @@ travis-ci = { repository = "https://travis-ci.org/bootandy/dust" }
|
|||||||
name = "dust"
|
name = "dust"
|
||||||
path = "src/main.rs"
|
path = "src/main.rs"
|
||||||
|
|
||||||
|
[profile.release]
|
||||||
|
codegen-units = 1
|
||||||
|
lto = true
|
||||||
|
strip = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
ansi_term = "0.12"
|
ansi_term = "0.12"
|
||||||
clap = { version = "=2.33", features = ["wrap_help"] }
|
clap = "4.4"
|
||||||
lscolors = "0.7"
|
lscolors = "0.13"
|
||||||
terminal_size = "0.1"
|
terminal_size = "0.2"
|
||||||
unicode-width = "0.1"
|
unicode-width = "0.1"
|
||||||
rayon="1"
|
rayon = "1"
|
||||||
thousands = "0.2"
|
thousands = "0.2"
|
||||||
stfu8 = "0.2"
|
stfu8 = "0.2"
|
||||||
regex = "1"
|
regex = "1"
|
||||||
|
config-file = "0.2"
|
||||||
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
serde_json = "1.0"
|
||||||
|
directories = "4"
|
||||||
|
sysinfo = "0.27"
|
||||||
|
ctrlc = "3.4"
|
||||||
|
chrono = "0.4"
|
||||||
|
|
||||||
[target.'cfg(windows)'.dependencies]
|
[target.'cfg(windows)'.dependencies]
|
||||||
winapi-util = "0.1"
|
winapi-util = "0.1"
|
||||||
|
filesize = "0.2.0"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
assert_cmd = "1"
|
assert_cmd = "2"
|
||||||
tempfile = "=3"
|
tempfile = "=3"
|
||||||
|
|
||||||
|
[build-dependencies]
|
||||||
|
clap = "4.4"
|
||||||
|
clap_complete = "4.4"
|
||||||
|
clap_mangen = "0.2"
|
||||||
|
|
||||||
[[test]]
|
[[test]]
|
||||||
name = "integration"
|
name = "integration"
|
||||||
path = "tests/tests.rs"
|
path = "tests/tests.rs"
|
||||||
|
|
||||||
|
[package.metadata.binstall]
|
||||||
|
pkg-url = "{ repo }/releases/download/v{ version }/dust-v{ version }-{ target }{ archive-suffix }"
|
||||||
|
bin-dir = "dust-v{ version }-{ target }/{ bin }{ binary-ext }"
|
||||||
|
|
||||||
[package.metadata.deb]
|
[package.metadata.deb]
|
||||||
section = "utils"
|
section = "utils"
|
||||||
assets = [
|
assets = [
|
||||||
["target/release/dust", "usr/bin/", "755"],
|
[
|
||||||
["LICENSE", "usr/share/doc/du-dust/", "644"],
|
"target/release/dust",
|
||||||
["README.md", "usr/share/doc/du-dust/README", "644"],
|
"usr/bin/",
|
||||||
|
"755",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"LICENSE",
|
||||||
|
"usr/share/doc/du-dust/",
|
||||||
|
"644",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"README.md",
|
||||||
|
"usr/share/doc/du-dust/README",
|
||||||
|
"644",
|
||||||
|
],
|
||||||
]
|
]
|
||||||
extended-description = """\
|
extended-description = """\
|
||||||
Dust is meant to give you an instant overview of which directories are using
|
Dust is meant to give you an instant overview of which directories are using
|
||||||
|
|||||||
2
LICENSE
2
LICENSE
@@ -186,7 +186,7 @@
|
|||||||
same "printed page" as the copyright notice for easier
|
same "printed page" as the copyright notice for easier
|
||||||
identification within third-party archives.
|
identification within third-party archives.
|
||||||
|
|
||||||
Copyright [yyyy] [name of copyright owner]
|
Copyright [2023] [andrew boot]
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
you may not use this file except in compliance with the License.
|
you may not use this file except in compliance with the License.
|
||||||
|
|||||||
84
README.md
84
README.md
@@ -1,5 +1,5 @@
|
|||||||
|
[](https://github.com/bootandy/dust/actions)
|
||||||
|
|
||||||
[](https://travis-ci.org/bootandy/dust)
|
|
||||||
|
|
||||||
# Dust
|
# Dust
|
||||||
|
|
||||||
@@ -10,27 +10,46 @@ du + rust = dust. Like du but more intuitive.
|
|||||||
Because I want an easy way to see where my disk is being used.
|
Because I want an easy way to see where my disk is being used.
|
||||||
|
|
||||||
# Demo
|
# Demo
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
## Install
|
## Install
|
||||||
|
|
||||||
#### Cargo <a href="https://repology.org/project/du-dust/versions"><img src="https://repology.org/badge/vertical-allrepos/du-dust.svg" alt="Packaging status" align="right"></a>
|
#### Cargo <a href="https://repology.org/project/du-dust/versions"><img src="https://repology.org/badge/vertical-allrepos/du-dust.svg" alt="Packaging status" align="right"></a>
|
||||||
|
|
||||||
* `cargo install du-dust`
|
- `cargo install du-dust`
|
||||||
|
|
||||||
#### 🍺 Homebrew (Mac OS)
|
#### 🍺 Homebrew (Mac OS)
|
||||||
|
|
||||||
* `brew install dust`
|
- `brew install dust`
|
||||||
|
|
||||||
#### 🍺 Homebrew (Linux)
|
#### 🍺 Homebrew (Linux)
|
||||||
|
|
||||||
* `brew tap tgotwig/linux-dust && brew install dust`
|
- `brew install dust`
|
||||||
|
|
||||||
|
#### [Pacstall](https://github.com/pacstall/pacstall) (Debian/Ubuntu)
|
||||||
|
|
||||||
|
- `pacstall -I dust-bin`
|
||||||
|
|
||||||
|
### Anaconda (conda-forge)
|
||||||
|
|
||||||
|
- `conda install -c conda-forge dust`
|
||||||
|
|
||||||
|
#### [deb-get](https://github.com/wimpysworld/deb-get) (Debian/Ubuntu)
|
||||||
|
|
||||||
|
- `deb-get install du-dust`
|
||||||
|
|
||||||
|
#### Windows:
|
||||||
|
|
||||||
|
- `scoop install dust`
|
||||||
|
- Windows GNU version - works
|
||||||
|
- Windows MSVC - requires: [VCRUNTIME140.dll](https://docs.microsoft.com/en-gb/cpp/windows/latest-supported-vc-redist?view=msvc-170)
|
||||||
|
|
||||||
#### Download
|
#### Download
|
||||||
|
|
||||||
* Download Linux/Mac binary from [Releases](https://github.com/bootandy/dust/releases)
|
- Download Linux/Mac binary from [Releases](https://github.com/bootandy/dust/releases)
|
||||||
* unzip file: `tar -xvf _downloaded_file.tar.gz`
|
- unzip file: `tar -xvf _downloaded_file.tar.gz`
|
||||||
* move file to executable path: `sudo mv dust /usr/local/bin/`
|
- move file to executable path: `sudo mv dust /usr/local/bin/`
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
@@ -48,27 +67,50 @@ Usage: dust <dir>
|
|||||||
Usage: dust <dir> <another_dir> <and_more>
|
Usage: dust <dir> <another_dir> <and_more>
|
||||||
Usage: dust -p (full-path - Show fullpath of the subdirectories)
|
Usage: dust -p (full-path - Show fullpath of the subdirectories)
|
||||||
Usage: dust -s (apparent-size - shows the length of the file as opposed to the amount of disk space it uses)
|
Usage: dust -s (apparent-size - shows the length of the file as opposed to the amount of disk space it uses)
|
||||||
Usage: dust -n 30 (shows 30 directories instead of the default [default is terminal height])
|
Usage: dust -n 30 (Shows 30 directories instead of the default [default is terminal height])
|
||||||
Usage: dust -d 3 (shows 3 levels of subdirectories)
|
Usage: dust -d 3 (Shows 3 levels of subdirectories)
|
||||||
Usage: dust -r (reverse order of output)
|
Usage: dust -D (Show only directories (eg dust -D))
|
||||||
|
Usage: dust -F (Show only files - finds your largest files)
|
||||||
|
Usage: dust -r (reverse order of output)
|
||||||
|
Usage: dust -o si/b/kb/kib/mb/mib/gb/gib (si - prints sizes in powers of 1000. Others print size in that format).
|
||||||
Usage: dust -X ignore (ignore all files and directories with the name 'ignore')
|
Usage: dust -X ignore (ignore all files and directories with the name 'ignore')
|
||||||
Usage: dust -x (only show directories on the same filesystem)
|
Usage: dust -x (Only show directories on the same filesystem)
|
||||||
Usage: dust -b (do not show percentages or draw ASCII bars)
|
Usage: dust -b (Do not show percentages or draw ASCII bars)
|
||||||
Usage: dust -i (do not show hidden files)
|
Usage: dust -B (--bars-on-right - Percent bars moved to right side of screen)
|
||||||
|
Usage: dust -i (Do not show hidden files)
|
||||||
Usage: dust -c (No colors [monochrome])
|
Usage: dust -c (No colors [monochrome])
|
||||||
|
Usage: dust -C (Force colors)
|
||||||
Usage: dust -f (Count files instead of diskspace)
|
Usage: dust -f (Count files instead of diskspace)
|
||||||
Usage: dust -t Group by filetype
|
Usage: dust -t (Group by filetype)
|
||||||
Usage: dust -e regex Only include files matching this regex (eg dust -e "\.png$" would match png files)
|
Usage: dust -z 10M (min-size, Only include files larger than 10M)
|
||||||
|
Usage: dust -e regex (Only include files matching this regex (eg dust -e "\.png$" would match png files))
|
||||||
|
Usage: dust -v regex (Exclude files matching this regex (eg dust -v "\.png$" would ignore png files))
|
||||||
|
Usage: dust -L (dereference-links - Treat sym links as directories and go into them)
|
||||||
|
Usage: dust -P (Disable the progress indicator)
|
||||||
|
Usage: dust -R (For screen readers. Removes bars/symbols. Adds new column: depth level. (May want to use -p for full path too))
|
||||||
|
Usage: dust -S (Custom Stack size - Use if you see: 'fatal runtime error: stack overflow' (default allocation: low memory=1048576, high memory=1073741824)"),
|
||||||
|
Usage: dust --skip-total (No total row will be displayed)
|
||||||
|
Usage: dust -z 40000/30MB/20kib (Exclude output files/directories below size 40000 bytes / 30MB / 20KiB)
|
||||||
|
Usage: dust -j (Prints JSON representation of directories, try: dust -j | jq)
|
||||||
|
Usage: dust --files0-from=FILE (Reads null-terminated file paths from FILE); If FILE is - then read from stdin
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Config file
|
||||||
|
|
||||||
|
Dust has a config file where the above options can be set.
|
||||||
|
Either: `~/.config/dust/config.toml` or `~/.dust.toml`
|
||||||
|
```
|
||||||
|
$ cat ~/.config/dust/config.toml
|
||||||
|
reverse=true
|
||||||
|
```
|
||||||
|
|
||||||
## Alternatives
|
## Alternatives
|
||||||
|
|
||||||
* [NCDU](https://dev.yorhel.nl/ncdu)
|
- [NCDU](https://dev.yorhel.nl/ncdu)
|
||||||
* [dutree](https://github.com/nachoparker/dutree)
|
- [dutree](https://github.com/nachoparker/dutree)
|
||||||
* [dua](https://github.com/Byron/dua-cli/)
|
- [dua](https://github.com/Byron/dua-cli/)
|
||||||
* [pdu](https://github.com/KSXGitHub/parallel-disk-usage)
|
- [pdu](https://github.com/KSXGitHub/parallel-disk-usage)
|
||||||
* [dirstat-rs](https://github.com/scullionw/dirstat-rs)
|
- [dirstat-rs](https://github.com/scullionw/dirstat-rs)
|
||||||
* du -d 1 -h | sort -h
|
- du -d 1 -h | sort -h
|
||||||
|
|
||||||
Note: Apparent-size is calculated slightly differently in dust to gdu. In dust each hard link is counted as using file_length space. In gdu only the first entry is counted.
|
Note: Apparent-size is calculated slightly differently in dust to gdu. In dust each hard link is counted as using file_length space. In gdu only the first entry is counted.
|
||||||
|
|||||||
27
build.rs
Normal file
27
build.rs
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
use clap_complete::{generate_to, shells::*};
|
||||||
|
use clap_mangen::Man;
|
||||||
|
use std::fs::File;
|
||||||
|
use std::io::Error;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
include!("src/cli.rs");
|
||||||
|
|
||||||
|
fn main() -> Result<(), Error> {
|
||||||
|
let outdir = "completions";
|
||||||
|
let app_name = "dust";
|
||||||
|
let mut cmd = build_cli();
|
||||||
|
|
||||||
|
generate_to(Bash, &mut cmd, app_name, outdir)?;
|
||||||
|
generate_to(Zsh, &mut cmd, app_name, outdir)?;
|
||||||
|
generate_to(Fish, &mut cmd, app_name, outdir)?;
|
||||||
|
generate_to(PowerShell, &mut cmd, app_name, outdir)?;
|
||||||
|
generate_to(Elvish, &mut cmd, app_name, outdir)?;
|
||||||
|
|
||||||
|
let file = Path::new("man-page").join("dust.1");
|
||||||
|
std::fs::create_dir_all("man-page")?;
|
||||||
|
let mut file = File::create(file)?;
|
||||||
|
|
||||||
|
Man::new(cmd).render(&mut file)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
@@ -1,10 +1,21 @@
|
|||||||
# ----------- To do a release ---------
|
# ----------- To do a release ---------
|
||||||
# edit version in cargo.toml
|
|
||||||
|
# ----------- Pre release ---------
|
||||||
|
# Compare times of runs to check no drastic slow down:
|
||||||
|
# hyperfine 'target/release/dust /home/andy'
|
||||||
|
# hyperfine 'dust /home/andy'
|
||||||
|
|
||||||
|
# ----------- Release ---------
|
||||||
|
# inc version in cargo.toml
|
||||||
|
# cargo build --release
|
||||||
|
# commit changed files
|
||||||
|
# merge to master in github
|
||||||
|
|
||||||
# tag a commit and push (increment version in Cargo.toml first):
|
# tag a commit and push (increment version in Cargo.toml first):
|
||||||
# git tag v0.4.5
|
# git tag v0.4.5
|
||||||
# git push origin v0.4.5
|
# git push origin v0.4.5
|
||||||
|
|
||||||
# cargo publish to put it in crates.io
|
# cargo publish to put it in crates.io
|
||||||
|
|
||||||
# To install locally [Do before pushing it]
|
# Optional: To install locally
|
||||||
#cargo install --path .
|
#cargo install --path .
|
||||||
|
|||||||
101
completions/_dust
Normal file
101
completions/_dust
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
#compdef dust
|
||||||
|
|
||||||
|
autoload -U is-at-least
|
||||||
|
|
||||||
|
_dust() {
|
||||||
|
typeset -A opt_args
|
||||||
|
typeset -a _arguments_options
|
||||||
|
local ret=1
|
||||||
|
|
||||||
|
if is-at-least 5.2; then
|
||||||
|
_arguments_options=(-s -S -C)
|
||||||
|
else
|
||||||
|
_arguments_options=(-s -C)
|
||||||
|
fi
|
||||||
|
|
||||||
|
local context curcontext="$curcontext" state line
|
||||||
|
_arguments "${_arguments_options[@]}" \
|
||||||
|
'-d+[Depth to show]:DEPTH: ' \
|
||||||
|
'--depth=[Depth to show]:DEPTH: ' \
|
||||||
|
'-T+[Number of threads to use]: : ' \
|
||||||
|
'--threads=[Number of threads to use]: : ' \
|
||||||
|
'-n+[Number of lines of output to show. (Default is terminal_height - 10)]:NUMBER: ' \
|
||||||
|
'--number-of-lines=[Number of lines of output to show. (Default is terminal_height - 10)]:NUMBER: ' \
|
||||||
|
'*-X+[Exclude any file or directory with this name]:PATH:_files' \
|
||||||
|
'*--ignore-directory=[Exclude any file or directory with this name]:PATH:_files' \
|
||||||
|
'-I+[Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter]:FILE:_files' \
|
||||||
|
'--ignore-all-in-file=[Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter]:FILE:_files' \
|
||||||
|
'-z+[Minimum size file to include in output]:MIN_SIZE: ' \
|
||||||
|
'--min-size=[Minimum size file to include in output]:MIN_SIZE: ' \
|
||||||
|
'(-e --filter -t --file_types)*-v+[Exclude filepaths matching this regex. To ignore png files type\: -v "\\.png\$" ]:REGEX: ' \
|
||||||
|
'(-e --filter -t --file_types)*--invert-filter=[Exclude filepaths matching this regex. To ignore png files type\: -v "\\.png\$" ]:REGEX: ' \
|
||||||
|
'(-t --file_types)*-e+[Only include filepaths matching this regex. For png files type\: -e "\\.png\$" ]:REGEX: ' \
|
||||||
|
'(-t --file_types)*--filter=[Only include filepaths matching this regex. For png files type\: -e "\\.png\$" ]:REGEX: ' \
|
||||||
|
'-w+[Specify width of output overriding the auto detection of terminal width]:WIDTH: ' \
|
||||||
|
'--terminal_width=[Specify width of output overriding the auto detection of terminal width]:WIDTH: ' \
|
||||||
|
'-o+[Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.]:FORMAT:(si b k m g t kb mb gb tb)' \
|
||||||
|
'--output-format=[Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.]:FORMAT:(si b k m g t kb mb gb tb)' \
|
||||||
|
'-S+[Specify memory to use as stack size - use if you see\: '\''fatal runtime error\: stack overflow'\'' (default low memory=1048576, high memory=1073741824)]:STACK_SIZE: ' \
|
||||||
|
'--stack-size=[Specify memory to use as stack size - use if you see\: '\''fatal runtime error\: stack overflow'\'' (default low memory=1048576, high memory=1073741824)]:STACK_SIZE: ' \
|
||||||
|
'-M+[+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr−(n+1)), n => \[curr−(n+1), curr−n), and -n => (𝑐𝑢𝑟𝑟−𝑛, +∞)]: : ' \
|
||||||
|
'--mtime=[+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr−(n+1)), n => \[curr−(n+1), curr−n), and -n => (𝑐𝑢𝑟𝑟−𝑛, +∞)]: : ' \
|
||||||
|
'-A+[just like -mtime, but based on file access time]: : ' \
|
||||||
|
'--atime=[just like -mtime, but based on file access time]: : ' \
|
||||||
|
'-y+[just like -mtime, but based on file change time]: : ' \
|
||||||
|
'--ctime=[just like -mtime, but based on file change time]: : ' \
|
||||||
|
'--files0-from=[run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input]: :_files' \
|
||||||
|
'-p[Subdirectories will not have their path shortened]' \
|
||||||
|
'--full-paths[Subdirectories will not have their path shortened]' \
|
||||||
|
'-L[dereference sym links - Treat sym links as directories and go into them]' \
|
||||||
|
'--dereference-links[dereference sym links - Treat sym links as directories and go into them]' \
|
||||||
|
'-x[Only count the files and directories on the same filesystem as the supplied directory]' \
|
||||||
|
'--limit-filesystem[Only count the files and directories on the same filesystem as the supplied directory]' \
|
||||||
|
'-s[Use file length instead of blocks]' \
|
||||||
|
'--apparent-size[Use file length instead of blocks]' \
|
||||||
|
'-r[Print tree upside down (biggest highest)]' \
|
||||||
|
'--reverse[Print tree upside down (biggest highest)]' \
|
||||||
|
'-c[No colors will be printed (Useful for commands like\: watch)]' \
|
||||||
|
'--no-colors[No colors will be printed (Useful for commands like\: watch)]' \
|
||||||
|
'-C[Force colors print]' \
|
||||||
|
'--force-colors[Force colors print]' \
|
||||||
|
'-b[No percent bars or percentages will be displayed]' \
|
||||||
|
'--no-percent-bars[No percent bars or percentages will be displayed]' \
|
||||||
|
'-B[percent bars moved to right side of screen]' \
|
||||||
|
'--bars-on-right[percent bars moved to right side of screen]' \
|
||||||
|
'-R[For screen readers. Removes bars. Adds new column\: depth level (May want to use -p too for full path)]' \
|
||||||
|
'--screen-reader[For screen readers. Removes bars. Adds new column\: depth level (May want to use -p too for full path)]' \
|
||||||
|
'--skip-total[No total row will be displayed]' \
|
||||||
|
'-f[Directory '\''size'\'' is number of child files instead of disk size]' \
|
||||||
|
'--filecount[Directory '\''size'\'' is number of child files instead of disk size]' \
|
||||||
|
'-i[Do not display hidden files]' \
|
||||||
|
'--ignore_hidden[Do not display hidden files]' \
|
||||||
|
'(-d --depth -D --only-dir)-t[show only these file types]' \
|
||||||
|
'(-d --depth -D --only-dir)--file_types[show only these file types]' \
|
||||||
|
'-P[Disable the progress indication.]' \
|
||||||
|
'--no-progress[Disable the progress indication.]' \
|
||||||
|
'--print-errors[Print path with errors.]' \
|
||||||
|
'(-F --only-file -t --file_types)-D[Only directories will be displayed.]' \
|
||||||
|
'(-F --only-file -t --file_types)--only-dir[Only directories will be displayed.]' \
|
||||||
|
'(-D --only-dir)-F[Only files will be displayed. (Finds your largest files)]' \
|
||||||
|
'(-D --only-dir)--only-file[Only files will be displayed. (Finds your largest files)]' \
|
||||||
|
'-j[Output the directory tree as json to the current directory]' \
|
||||||
|
'--output-json[Output the directory tree as json to the current directory]' \
|
||||||
|
'-h[Print help]' \
|
||||||
|
'--help[Print help]' \
|
||||||
|
'-V[Print version]' \
|
||||||
|
'--version[Print version]' \
|
||||||
|
'*::params:_files' \
|
||||||
|
&& ret=0
|
||||||
|
}
|
||||||
|
|
||||||
|
(( $+functions[_dust_commands] )) ||
|
||||||
|
_dust_commands() {
|
||||||
|
local commands; commands=()
|
||||||
|
_describe -t commands 'dust commands' commands "$@"
|
||||||
|
}
|
||||||
|
|
||||||
|
if [ "$funcstack[1]" = "_dust" ]; then
|
||||||
|
_dust "$@"
|
||||||
|
else
|
||||||
|
compdef _dust dust
|
||||||
|
fi
|
||||||
99
completions/_dust.ps1
Normal file
99
completions/_dust.ps1
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
|
||||||
|
using namespace System.Management.Automation
|
||||||
|
using namespace System.Management.Automation.Language
|
||||||
|
|
||||||
|
Register-ArgumentCompleter -Native -CommandName 'dust' -ScriptBlock {
|
||||||
|
param($wordToComplete, $commandAst, $cursorPosition)
|
||||||
|
|
||||||
|
$commandElements = $commandAst.CommandElements
|
||||||
|
$command = @(
|
||||||
|
'dust'
|
||||||
|
for ($i = 1; $i -lt $commandElements.Count; $i++) {
|
||||||
|
$element = $commandElements[$i]
|
||||||
|
if ($element -isnot [StringConstantExpressionAst] -or
|
||||||
|
$element.StringConstantType -ne [StringConstantType]::BareWord -or
|
||||||
|
$element.Value.StartsWith('-') -or
|
||||||
|
$element.Value -eq $wordToComplete) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
$element.Value
|
||||||
|
}) -join ';'
|
||||||
|
|
||||||
|
$completions = @(switch ($command) {
|
||||||
|
'dust' {
|
||||||
|
[CompletionResult]::new('-d', 'd', [CompletionResultType]::ParameterName, 'Depth to show')
|
||||||
|
[CompletionResult]::new('--depth', 'depth', [CompletionResultType]::ParameterName, 'Depth to show')
|
||||||
|
[CompletionResult]::new('-T', 'T ', [CompletionResultType]::ParameterName, 'Number of threads to use')
|
||||||
|
[CompletionResult]::new('--threads', 'threads', [CompletionResultType]::ParameterName, 'Number of threads to use')
|
||||||
|
[CompletionResult]::new('-n', 'n', [CompletionResultType]::ParameterName, 'Number of lines of output to show. (Default is terminal_height - 10)')
|
||||||
|
[CompletionResult]::new('--number-of-lines', 'number-of-lines', [CompletionResultType]::ParameterName, 'Number of lines of output to show. (Default is terminal_height - 10)')
|
||||||
|
[CompletionResult]::new('-X', 'X ', [CompletionResultType]::ParameterName, 'Exclude any file or directory with this name')
|
||||||
|
[CompletionResult]::new('--ignore-directory', 'ignore-directory', [CompletionResultType]::ParameterName, 'Exclude any file or directory with this name')
|
||||||
|
[CompletionResult]::new('-I', 'I ', [CompletionResultType]::ParameterName, 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter')
|
||||||
|
[CompletionResult]::new('--ignore-all-in-file', 'ignore-all-in-file', [CompletionResultType]::ParameterName, 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter')
|
||||||
|
[CompletionResult]::new('-z', 'z', [CompletionResultType]::ParameterName, 'Minimum size file to include in output')
|
||||||
|
[CompletionResult]::new('--min-size', 'min-size', [CompletionResultType]::ParameterName, 'Minimum size file to include in output')
|
||||||
|
[CompletionResult]::new('-v', 'v', [CompletionResultType]::ParameterName, 'Exclude filepaths matching this regex. To ignore png files type: -v "\.png$" ')
|
||||||
|
[CompletionResult]::new('--invert-filter', 'invert-filter', [CompletionResultType]::ParameterName, 'Exclude filepaths matching this regex. To ignore png files type: -v "\.png$" ')
|
||||||
|
[CompletionResult]::new('-e', 'e', [CompletionResultType]::ParameterName, 'Only include filepaths matching this regex. For png files type: -e "\.png$" ')
|
||||||
|
[CompletionResult]::new('--filter', 'filter', [CompletionResultType]::ParameterName, 'Only include filepaths matching this regex. For png files type: -e "\.png$" ')
|
||||||
|
[CompletionResult]::new('-w', 'w', [CompletionResultType]::ParameterName, 'Specify width of output overriding the auto detection of terminal width')
|
||||||
|
[CompletionResult]::new('--terminal_width', 'terminal_width', [CompletionResultType]::ParameterName, 'Specify width of output overriding the auto detection of terminal width')
|
||||||
|
[CompletionResult]::new('-o', 'o', [CompletionResultType]::ParameterName, 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.')
|
||||||
|
[CompletionResult]::new('--output-format', 'output-format', [CompletionResultType]::ParameterName, 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.')
|
||||||
|
[CompletionResult]::new('-S', 'S ', [CompletionResultType]::ParameterName, 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)')
|
||||||
|
[CompletionResult]::new('--stack-size', 'stack-size', [CompletionResultType]::ParameterName, 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)')
|
||||||
|
[CompletionResult]::new('-M', 'M ', [CompletionResultType]::ParameterName, '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr−(n+1)), n => [curr−(n+1), curr−n), and -n => (𝑐𝑢𝑟𝑟−𝑛, +∞)')
|
||||||
|
[CompletionResult]::new('--mtime', 'mtime', [CompletionResultType]::ParameterName, '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr−(n+1)), n => [curr−(n+1), curr−n), and -n => (𝑐𝑢𝑟𝑟−𝑛, +∞)')
|
||||||
|
[CompletionResult]::new('-A', 'A ', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file access time')
|
||||||
|
[CompletionResult]::new('--atime', 'atime', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file access time')
|
||||||
|
[CompletionResult]::new('-y', 'y', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file change time')
|
||||||
|
[CompletionResult]::new('--ctime', 'ctime', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file change time')
|
||||||
|
[CompletionResult]::new('--files0-from', 'files0-from', [CompletionResultType]::ParameterName, 'run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input')
|
||||||
|
[CompletionResult]::new('-p', 'p', [CompletionResultType]::ParameterName, 'Subdirectories will not have their path shortened')
|
||||||
|
[CompletionResult]::new('--full-paths', 'full-paths', [CompletionResultType]::ParameterName, 'Subdirectories will not have their path shortened')
|
||||||
|
[CompletionResult]::new('-L', 'L ', [CompletionResultType]::ParameterName, 'dereference sym links - Treat sym links as directories and go into them')
|
||||||
|
[CompletionResult]::new('--dereference-links', 'dereference-links', [CompletionResultType]::ParameterName, 'dereference sym links - Treat sym links as directories and go into them')
|
||||||
|
[CompletionResult]::new('-x', 'x', [CompletionResultType]::ParameterName, 'Only count the files and directories on the same filesystem as the supplied directory')
|
||||||
|
[CompletionResult]::new('--limit-filesystem', 'limit-filesystem', [CompletionResultType]::ParameterName, 'Only count the files and directories on the same filesystem as the supplied directory')
|
||||||
|
[CompletionResult]::new('-s', 's', [CompletionResultType]::ParameterName, 'Use file length instead of blocks')
|
||||||
|
[CompletionResult]::new('--apparent-size', 'apparent-size', [CompletionResultType]::ParameterName, 'Use file length instead of blocks')
|
||||||
|
[CompletionResult]::new('-r', 'r', [CompletionResultType]::ParameterName, 'Print tree upside down (biggest highest)')
|
||||||
|
[CompletionResult]::new('--reverse', 'reverse', [CompletionResultType]::ParameterName, 'Print tree upside down (biggest highest)')
|
||||||
|
[CompletionResult]::new('-c', 'c', [CompletionResultType]::ParameterName, 'No colors will be printed (Useful for commands like: watch)')
|
||||||
|
[CompletionResult]::new('--no-colors', 'no-colors', [CompletionResultType]::ParameterName, 'No colors will be printed (Useful for commands like: watch)')
|
||||||
|
[CompletionResult]::new('-C', 'C ', [CompletionResultType]::ParameterName, 'Force colors print')
|
||||||
|
[CompletionResult]::new('--force-colors', 'force-colors', [CompletionResultType]::ParameterName, 'Force colors print')
|
||||||
|
[CompletionResult]::new('-b', 'b', [CompletionResultType]::ParameterName, 'No percent bars or percentages will be displayed')
|
||||||
|
[CompletionResult]::new('--no-percent-bars', 'no-percent-bars', [CompletionResultType]::ParameterName, 'No percent bars or percentages will be displayed')
|
||||||
|
[CompletionResult]::new('-B', 'B ', [CompletionResultType]::ParameterName, 'percent bars moved to right side of screen')
|
||||||
|
[CompletionResult]::new('--bars-on-right', 'bars-on-right', [CompletionResultType]::ParameterName, 'percent bars moved to right side of screen')
|
||||||
|
[CompletionResult]::new('-R', 'R ', [CompletionResultType]::ParameterName, 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)')
|
||||||
|
[CompletionResult]::new('--screen-reader', 'screen-reader', [CompletionResultType]::ParameterName, 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)')
|
||||||
|
[CompletionResult]::new('--skip-total', 'skip-total', [CompletionResultType]::ParameterName, 'No total row will be displayed')
|
||||||
|
[CompletionResult]::new('-f', 'f', [CompletionResultType]::ParameterName, 'Directory ''size'' is number of child files instead of disk size')
|
||||||
|
[CompletionResult]::new('--filecount', 'filecount', [CompletionResultType]::ParameterName, 'Directory ''size'' is number of child files instead of disk size')
|
||||||
|
[CompletionResult]::new('-i', 'i', [CompletionResultType]::ParameterName, 'Do not display hidden files')
|
||||||
|
[CompletionResult]::new('--ignore_hidden', 'ignore_hidden', [CompletionResultType]::ParameterName, 'Do not display hidden files')
|
||||||
|
[CompletionResult]::new('-t', 't', [CompletionResultType]::ParameterName, 'show only these file types')
|
||||||
|
[CompletionResult]::new('--file_types', 'file_types', [CompletionResultType]::ParameterName, 'show only these file types')
|
||||||
|
[CompletionResult]::new('-P', 'P ', [CompletionResultType]::ParameterName, 'Disable the progress indication.')
|
||||||
|
[CompletionResult]::new('--no-progress', 'no-progress', [CompletionResultType]::ParameterName, 'Disable the progress indication.')
|
||||||
|
[CompletionResult]::new('--print-errors', 'print-errors', [CompletionResultType]::ParameterName, 'Print path with errors.')
|
||||||
|
[CompletionResult]::new('-D', 'D ', [CompletionResultType]::ParameterName, 'Only directories will be displayed.')
|
||||||
|
[CompletionResult]::new('--only-dir', 'only-dir', [CompletionResultType]::ParameterName, 'Only directories will be displayed.')
|
||||||
|
[CompletionResult]::new('-F', 'F ', [CompletionResultType]::ParameterName, 'Only files will be displayed. (Finds your largest files)')
|
||||||
|
[CompletionResult]::new('--only-file', 'only-file', [CompletionResultType]::ParameterName, 'Only files will be displayed. (Finds your largest files)')
|
||||||
|
[CompletionResult]::new('-j', 'j', [CompletionResultType]::ParameterName, 'Output the directory tree as json to the current directory')
|
||||||
|
[CompletionResult]::new('--output-json', 'output-json', [CompletionResultType]::ParameterName, 'Output the directory tree as json to the current directory')
|
||||||
|
[CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Print help')
|
||||||
|
[CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Print help')
|
||||||
|
[CompletionResult]::new('-V', 'V ', [CompletionResultType]::ParameterName, 'Print version')
|
||||||
|
[CompletionResult]::new('--version', 'version', [CompletionResultType]::ParameterName, 'Print version')
|
||||||
|
break
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
$completions.Where{ $_.CompletionText -like "$wordToComplete*" } |
|
||||||
|
Sort-Object -Property ListItemText
|
||||||
|
}
|
||||||
180
completions/dust.bash
Normal file
180
completions/dust.bash
Normal file
@@ -0,0 +1,180 @@
|
|||||||
|
_dust() {
|
||||||
|
local i cur prev opts cmd
|
||||||
|
COMPREPLY=()
|
||||||
|
cur="${COMP_WORDS[COMP_CWORD]}"
|
||||||
|
prev="${COMP_WORDS[COMP_CWORD-1]}"
|
||||||
|
cmd=""
|
||||||
|
opts=""
|
||||||
|
|
||||||
|
for i in ${COMP_WORDS[@]}
|
||||||
|
do
|
||||||
|
case "${cmd},${i}" in
|
||||||
|
",$1")
|
||||||
|
cmd="dust"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
case "${cmd}" in
|
||||||
|
dust)
|
||||||
|
opts="-d -T -n -p -X -I -L -x -s -r -c -C -b -B -z -R -f -i -v -e -t -w -P -D -F -o -S -j -M -A -y -h -V --depth --threads --number-of-lines --full-paths --ignore-directory --ignore-all-in-file --dereference-links --limit-filesystem --apparent-size --reverse --no-colors --force-colors --no-percent-bars --bars-on-right --min-size --screen-reader --skip-total --filecount --ignore_hidden --invert-filter --filter --file_types --terminal_width --no-progress --print-errors --only-dir --only-file --output-format --stack-size --output-json --mtime --atime --ctime --files0-from --help --version [PATH]..."
|
||||||
|
if [[ ${cur} == -* || ${COMP_CWORD} -eq 1 ]] ; then
|
||||||
|
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
case "${prev}" in
|
||||||
|
--depth)
|
||||||
|
COMPREPLY=($(compgen -f "${cur}"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
-d)
|
||||||
|
COMPREPLY=($(compgen -f "${cur}"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
--threads)
|
||||||
|
COMPREPLY=($(compgen -f "${cur}"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
-T)
|
||||||
|
COMPREPLY=($(compgen -f "${cur}"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
--number-of-lines)
|
||||||
|
COMPREPLY=($(compgen -f "${cur}"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
-n)
|
||||||
|
COMPREPLY=($(compgen -f "${cur}"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
--ignore-directory)
|
||||||
|
COMPREPLY=($(compgen -f "${cur}"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
-X)
|
||||||
|
COMPREPLY=($(compgen -f "${cur}"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
--ignore-all-in-file)
|
||||||
|
local oldifs
|
||||||
|
if [[ -v IFS ]]; then
|
||||||
|
oldifs="$IFS"
|
||||||
|
fi
|
||||||
|
IFS=$'\n'
|
||||||
|
COMPREPLY=($(compgen -f "${cur}"))
|
||||||
|
if [[ -v oldifs ]]; then
|
||||||
|
IFS="$oldifs"
|
||||||
|
fi
|
||||||
|
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
|
||||||
|
compopt -o filenames
|
||||||
|
fi
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
-I)
|
||||||
|
local oldifs
|
||||||
|
if [[ -v IFS ]]; then
|
||||||
|
oldifs="$IFS"
|
||||||
|
fi
|
||||||
|
IFS=$'\n'
|
||||||
|
COMPREPLY=($(compgen -f "${cur}"))
|
||||||
|
if [[ -v oldifs ]]; then
|
||||||
|
IFS="$oldifs"
|
||||||
|
fi
|
||||||
|
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
|
||||||
|
compopt -o filenames
|
||||||
|
fi
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
--min-size)
|
||||||
|
COMPREPLY=($(compgen -f "${cur}"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
-z)
|
||||||
|
COMPREPLY=($(compgen -f "${cur}"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
--invert-filter)
|
||||||
|
COMPREPLY=($(compgen -f "${cur}"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
-v)
|
||||||
|
COMPREPLY=($(compgen -f "${cur}"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
--filter)
|
||||||
|
COMPREPLY=($(compgen -f "${cur}"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
-e)
|
||||||
|
COMPREPLY=($(compgen -f "${cur}"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
--terminal_width)
|
||||||
|
COMPREPLY=($(compgen -f "${cur}"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
-w)
|
||||||
|
COMPREPLY=($(compgen -f "${cur}"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
--output-format)
|
||||||
|
COMPREPLY=($(compgen -W "si b k m g t kb mb gb tb" -- "${cur}"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
-o)
|
||||||
|
COMPREPLY=($(compgen -W "si b k m g t kb mb gb tb" -- "${cur}"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
--stack-size)
|
||||||
|
COMPREPLY=($(compgen -f "${cur}"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
-S)
|
||||||
|
COMPREPLY=($(compgen -f "${cur}"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
--mtime)
|
||||||
|
COMPREPLY=($(compgen -f "${cur}"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
-M)
|
||||||
|
COMPREPLY=($(compgen -f "${cur}"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
--atime)
|
||||||
|
COMPREPLY=($(compgen -f "${cur}"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
-A)
|
||||||
|
COMPREPLY=($(compgen -f "${cur}"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
--ctime)
|
||||||
|
COMPREPLY=($(compgen -f "${cur}"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
-y)
|
||||||
|
COMPREPLY=($(compgen -f "${cur}"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
--files0-from)
|
||||||
|
COMPREPLY=($(compgen -f "${cur}"))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
COMPREPLY=()
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
if [[ "${BASH_VERSINFO[0]}" -eq 4 && "${BASH_VERSINFO[1]}" -ge 4 || "${BASH_VERSINFO[0]}" -gt 4 ]]; then
|
||||||
|
complete -F _dust -o nosort -o bashdefault -o default dust
|
||||||
|
else
|
||||||
|
complete -F _dust -o bashdefault -o default dust
|
||||||
|
fi
|
||||||
93
completions/dust.elv
Normal file
93
completions/dust.elv
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
|
||||||
|
use builtin;
|
||||||
|
use str;
|
||||||
|
|
||||||
|
set edit:completion:arg-completer[dust] = {|@words|
|
||||||
|
fn spaces {|n|
|
||||||
|
builtin:repeat $n ' ' | str:join ''
|
||||||
|
}
|
||||||
|
fn cand {|text desc|
|
||||||
|
edit:complex-candidate $text &display=$text' '(spaces (- 14 (wcswidth $text)))$desc
|
||||||
|
}
|
||||||
|
var command = 'dust'
|
||||||
|
for word $words[1..-1] {
|
||||||
|
if (str:has-prefix $word '-') {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
set command = $command';'$word
|
||||||
|
}
|
||||||
|
var completions = [
|
||||||
|
&'dust'= {
|
||||||
|
cand -d 'Depth to show'
|
||||||
|
cand --depth 'Depth to show'
|
||||||
|
cand -T 'Number of threads to use'
|
||||||
|
cand --threads 'Number of threads to use'
|
||||||
|
cand -n 'Number of lines of output to show. (Default is terminal_height - 10)'
|
||||||
|
cand --number-of-lines 'Number of lines of output to show. (Default is terminal_height - 10)'
|
||||||
|
cand -X 'Exclude any file or directory with this name'
|
||||||
|
cand --ignore-directory 'Exclude any file or directory with this name'
|
||||||
|
cand -I 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter'
|
||||||
|
cand --ignore-all-in-file 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter'
|
||||||
|
cand -z 'Minimum size file to include in output'
|
||||||
|
cand --min-size 'Minimum size file to include in output'
|
||||||
|
cand -v 'Exclude filepaths matching this regex. To ignore png files type: -v "\.png$" '
|
||||||
|
cand --invert-filter 'Exclude filepaths matching this regex. To ignore png files type: -v "\.png$" '
|
||||||
|
cand -e 'Only include filepaths matching this regex. For png files type: -e "\.png$" '
|
||||||
|
cand --filter 'Only include filepaths matching this regex. For png files type: -e "\.png$" '
|
||||||
|
cand -w 'Specify width of output overriding the auto detection of terminal width'
|
||||||
|
cand --terminal_width 'Specify width of output overriding the auto detection of terminal width'
|
||||||
|
cand -o 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.'
|
||||||
|
cand --output-format 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.'
|
||||||
|
cand -S 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)'
|
||||||
|
cand --stack-size 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)'
|
||||||
|
cand -M '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr−(n+1)), n => [curr−(n+1), curr−n), and -n => (𝑐𝑢𝑟𝑟−𝑛, +∞)'
|
||||||
|
cand --mtime '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr−(n+1)), n => [curr−(n+1), curr−n), and -n => (𝑐𝑢𝑟𝑟−𝑛, +∞)'
|
||||||
|
cand -A 'just like -mtime, but based on file access time'
|
||||||
|
cand --atime 'just like -mtime, but based on file access time'
|
||||||
|
cand -y 'just like -mtime, but based on file change time'
|
||||||
|
cand --ctime 'just like -mtime, but based on file change time'
|
||||||
|
cand --files0-from 'run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input'
|
||||||
|
cand -p 'Subdirectories will not have their path shortened'
|
||||||
|
cand --full-paths 'Subdirectories will not have their path shortened'
|
||||||
|
cand -L 'dereference sym links - Treat sym links as directories and go into them'
|
||||||
|
cand --dereference-links 'dereference sym links - Treat sym links as directories and go into them'
|
||||||
|
cand -x 'Only count the files and directories on the same filesystem as the supplied directory'
|
||||||
|
cand --limit-filesystem 'Only count the files and directories on the same filesystem as the supplied directory'
|
||||||
|
cand -s 'Use file length instead of blocks'
|
||||||
|
cand --apparent-size 'Use file length instead of blocks'
|
||||||
|
cand -r 'Print tree upside down (biggest highest)'
|
||||||
|
cand --reverse 'Print tree upside down (biggest highest)'
|
||||||
|
cand -c 'No colors will be printed (Useful for commands like: watch)'
|
||||||
|
cand --no-colors 'No colors will be printed (Useful for commands like: watch)'
|
||||||
|
cand -C 'Force colors print'
|
||||||
|
cand --force-colors 'Force colors print'
|
||||||
|
cand -b 'No percent bars or percentages will be displayed'
|
||||||
|
cand --no-percent-bars 'No percent bars or percentages will be displayed'
|
||||||
|
cand -B 'percent bars moved to right side of screen'
|
||||||
|
cand --bars-on-right 'percent bars moved to right side of screen'
|
||||||
|
cand -R 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)'
|
||||||
|
cand --screen-reader 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)'
|
||||||
|
cand --skip-total 'No total row will be displayed'
|
||||||
|
cand -f 'Directory ''size'' is number of child files instead of disk size'
|
||||||
|
cand --filecount 'Directory ''size'' is number of child files instead of disk size'
|
||||||
|
cand -i 'Do not display hidden files'
|
||||||
|
cand --ignore_hidden 'Do not display hidden files'
|
||||||
|
cand -t 'show only these file types'
|
||||||
|
cand --file_types 'show only these file types'
|
||||||
|
cand -P 'Disable the progress indication.'
|
||||||
|
cand --no-progress 'Disable the progress indication.'
|
||||||
|
cand --print-errors 'Print path with errors.'
|
||||||
|
cand -D 'Only directories will be displayed.'
|
||||||
|
cand --only-dir 'Only directories will be displayed.'
|
||||||
|
cand -F 'Only files will be displayed. (Finds your largest files)'
|
||||||
|
cand --only-file 'Only files will be displayed. (Finds your largest files)'
|
||||||
|
cand -j 'Output the directory tree as json to the current directory'
|
||||||
|
cand --output-json 'Output the directory tree as json to the current directory'
|
||||||
|
cand -h 'Print help'
|
||||||
|
cand --help 'Print help'
|
||||||
|
cand -V 'Print version'
|
||||||
|
cand --version 'Print version'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
$completions[$command]
|
||||||
|
}
|
||||||
36
completions/dust.fish
Normal file
36
completions/dust.fish
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
complete -c dust -s d -l depth -d 'Depth to show' -r
|
||||||
|
complete -c dust -s T -l threads -d 'Number of threads to use' -r
|
||||||
|
complete -c dust -s n -l number-of-lines -d 'Number of lines of output to show. (Default is terminal_height - 10)' -r
|
||||||
|
complete -c dust -s X -l ignore-directory -d 'Exclude any file or directory with this name' -r -F
|
||||||
|
complete -c dust -s I -l ignore-all-in-file -d 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter' -r -F
|
||||||
|
complete -c dust -s z -l min-size -d 'Minimum size file to include in output' -r
|
||||||
|
complete -c dust -s v -l invert-filter -d 'Exclude filepaths matching this regex. To ignore png files type: -v "\\.png$" ' -r
|
||||||
|
complete -c dust -s e -l filter -d 'Only include filepaths matching this regex. For png files type: -e "\\.png$" ' -r
|
||||||
|
complete -c dust -s w -l terminal_width -d 'Specify width of output overriding the auto detection of terminal width' -r
|
||||||
|
complete -c dust -s o -l output-format -d 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.' -r -f -a "{si '',b '',k '',m '',g '',t '',kb '',mb '',gb '',tb ''}"
|
||||||
|
complete -c dust -s S -l stack-size -d 'Specify memory to use as stack size - use if you see: \'fatal runtime error: stack overflow\' (default low memory=1048576, high memory=1073741824)' -r
|
||||||
|
complete -c dust -s M -l mtime -d '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr−(n+1)), n => [curr−(n+1), curr−n), and -n => (𝑐𝑢𝑟𝑟−𝑛, +∞)' -r
|
||||||
|
complete -c dust -s A -l atime -d 'just like -mtime, but based on file access time' -r
|
||||||
|
complete -c dust -s y -l ctime -d 'just like -mtime, but based on file change time' -r
|
||||||
|
complete -c dust -l files0-from -d 'run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input' -r -F
|
||||||
|
complete -c dust -s p -l full-paths -d 'Subdirectories will not have their path shortened'
|
||||||
|
complete -c dust -s L -l dereference-links -d 'dereference sym links - Treat sym links as directories and go into them'
|
||||||
|
complete -c dust -s x -l limit-filesystem -d 'Only count the files and directories on the same filesystem as the supplied directory'
|
||||||
|
complete -c dust -s s -l apparent-size -d 'Use file length instead of blocks'
|
||||||
|
complete -c dust -s r -l reverse -d 'Print tree upside down (biggest highest)'
|
||||||
|
complete -c dust -s c -l no-colors -d 'No colors will be printed (Useful for commands like: watch)'
|
||||||
|
complete -c dust -s C -l force-colors -d 'Force colors print'
|
||||||
|
complete -c dust -s b -l no-percent-bars -d 'No percent bars or percentages will be displayed'
|
||||||
|
complete -c dust -s B -l bars-on-right -d 'percent bars moved to right side of screen'
|
||||||
|
complete -c dust -s R -l screen-reader -d 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)'
|
||||||
|
complete -c dust -l skip-total -d 'No total row will be displayed'
|
||||||
|
complete -c dust -s f -l filecount -d 'Directory \'size\' is number of child files instead of disk size'
|
||||||
|
complete -c dust -s i -l ignore_hidden -d 'Do not display hidden files'
|
||||||
|
complete -c dust -s t -l file_types -d 'show only these file types'
|
||||||
|
complete -c dust -s P -l no-progress -d 'Disable the progress indication.'
|
||||||
|
complete -c dust -l print-errors -d 'Print path with errors.'
|
||||||
|
complete -c dust -s D -l only-dir -d 'Only directories will be displayed.'
|
||||||
|
complete -c dust -s F -l only-file -d 'Only files will be displayed. (Finds your largest files)'
|
||||||
|
complete -c dust -s j -l output-json -d 'Output the directory tree as json to the current directory'
|
||||||
|
complete -c dust -s h -l help -d 'Print help'
|
||||||
|
complete -c dust -s V -l version -d 'Print version'
|
||||||
28
config/config.toml
Normal file
28
config/config.toml
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
# Sample Config file, works with toml and yaml
|
||||||
|
# Place in either:
|
||||||
|
# ~/.config/dust/config.toml
|
||||||
|
# ~/.dust.toml
|
||||||
|
|
||||||
|
# Print tree upside down (biggest highest)
|
||||||
|
reverse=true
|
||||||
|
|
||||||
|
# Subdirectories will not have their path shortened
|
||||||
|
display-full-paths=true
|
||||||
|
|
||||||
|
# Use file length instead of blocks
|
||||||
|
display-apparent-size=true
|
||||||
|
|
||||||
|
# No colors will be printed
|
||||||
|
no-colors=true
|
||||||
|
|
||||||
|
# No percent bars or percentages will be displayed
|
||||||
|
no-bars=true
|
||||||
|
|
||||||
|
# No total row will be displayed
|
||||||
|
skip-total=true
|
||||||
|
|
||||||
|
# Do not display hidden files
|
||||||
|
ignore-hidden=true
|
||||||
|
|
||||||
|
# print sizes in powers of 1000 (e.g., 1.1G)
|
||||||
|
iso=true
|
||||||
127
man-page/dust.1
Normal file
127
man-page/dust.1
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
.ie \n(.g .ds Aq \(aq
|
||||||
|
.el .ds Aq '
|
||||||
|
.TH Dust 1 "Dust 1.1.1"
|
||||||
|
.SH NAME
|
||||||
|
Dust \- Like du but more intuitive
|
||||||
|
.SH SYNOPSIS
|
||||||
|
\fBdust\fR [\fB\-d\fR|\fB\-\-depth\fR] [\fB\-T\fR|\fB\-\-threads\fR] [\fB\-n\fR|\fB\-\-number\-of\-lines\fR] [\fB\-p\fR|\fB\-\-full\-paths\fR] [\fB\-X\fR|\fB\-\-ignore\-directory\fR] [\fB\-I\fR|\fB\-\-ignore\-all\-in\-file\fR] [\fB\-L\fR|\fB\-\-dereference\-links\fR] [\fB\-x\fR|\fB\-\-limit\-filesystem\fR] [\fB\-s\fR|\fB\-\-apparent\-size\fR] [\fB\-r\fR|\fB\-\-reverse\fR] [\fB\-c\fR|\fB\-\-no\-colors\fR] [\fB\-C\fR|\fB\-\-force\-colors\fR] [\fB\-b\fR|\fB\-\-no\-percent\-bars\fR] [\fB\-B\fR|\fB\-\-bars\-on\-right\fR] [\fB\-z\fR|\fB\-\-min\-size\fR] [\fB\-R\fR|\fB\-\-screen\-reader\fR] [\fB\-\-skip\-total\fR] [\fB\-f\fR|\fB\-\-filecount\fR] [\fB\-i\fR|\fB\-\-ignore_hidden\fR] [\fB\-v\fR|\fB\-\-invert\-filter\fR] [\fB\-e\fR|\fB\-\-filter\fR] [\fB\-t\fR|\fB\-\-file_types\fR] [\fB\-w\fR|\fB\-\-terminal_width\fR] [\fB\-P\fR|\fB\-\-no\-progress\fR] [\fB\-\-print\-errors\fR] [\fB\-D\fR|\fB\-\-only\-dir\fR] [\fB\-F\fR|\fB\-\-only\-file\fR] [\fB\-o\fR|\fB\-\-output\-format\fR] [\fB\-S\fR|\fB\-\-stack\-size\fR] [\fB\-j\fR|\fB\-\-output\-json\fR] [\fB\-M\fR|\fB\-\-mtime\fR] [\fB\-A\fR|\fB\-\-atime\fR] [\fB\-y\fR|\fB\-\-ctime\fR] [\fB\-\-files0\-from\fR] [\fB\-h\fR|\fB\-\-help\fR] [\fB\-V\fR|\fB\-\-version\fR] [\fIPATH\fR]
|
||||||
|
.SH DESCRIPTION
|
||||||
|
Like du but more intuitive
|
||||||
|
.SH OPTIONS
|
||||||
|
.TP
|
||||||
|
\fB\-d\fR, \fB\-\-depth\fR=\fIDEPTH\fR
|
||||||
|
Depth to show
|
||||||
|
.TP
|
||||||
|
\fB\-T\fR, \fB\-\-threads\fR
|
||||||
|
Number of threads to use
|
||||||
|
.TP
|
||||||
|
\fB\-n\fR, \fB\-\-number\-of\-lines\fR=\fINUMBER\fR
|
||||||
|
Number of lines of output to show. (Default is terminal_height \- 10)
|
||||||
|
.TP
|
||||||
|
\fB\-p\fR, \fB\-\-full\-paths\fR
|
||||||
|
Subdirectories will not have their path shortened
|
||||||
|
.TP
|
||||||
|
\fB\-X\fR, \fB\-\-ignore\-directory\fR=\fIPATH\fR
|
||||||
|
Exclude any file or directory with this name
|
||||||
|
.TP
|
||||||
|
\fB\-I\fR, \fB\-\-ignore\-all\-in\-file\fR=\fIFILE\fR
|
||||||
|
Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by \-\-invert_filter
|
||||||
|
.TP
|
||||||
|
\fB\-L\fR, \fB\-\-dereference\-links\fR
|
||||||
|
dereference sym links \- Treat sym links as directories and go into them
|
||||||
|
.TP
|
||||||
|
\fB\-x\fR, \fB\-\-limit\-filesystem\fR
|
||||||
|
Only count the files and directories on the same filesystem as the supplied directory
|
||||||
|
.TP
|
||||||
|
\fB\-s\fR, \fB\-\-apparent\-size\fR
|
||||||
|
Use file length instead of blocks
|
||||||
|
.TP
|
||||||
|
\fB\-r\fR, \fB\-\-reverse\fR
|
||||||
|
Print tree upside down (biggest highest)
|
||||||
|
.TP
|
||||||
|
\fB\-c\fR, \fB\-\-no\-colors\fR
|
||||||
|
No colors will be printed (Useful for commands like: watch)
|
||||||
|
.TP
|
||||||
|
\fB\-C\fR, \fB\-\-force\-colors\fR
|
||||||
|
Force colors print
|
||||||
|
.TP
|
||||||
|
\fB\-b\fR, \fB\-\-no\-percent\-bars\fR
|
||||||
|
No percent bars or percentages will be displayed
|
||||||
|
.TP
|
||||||
|
\fB\-B\fR, \fB\-\-bars\-on\-right\fR
|
||||||
|
percent bars moved to right side of screen
|
||||||
|
.TP
|
||||||
|
\fB\-z\fR, \fB\-\-min\-size\fR=\fIMIN_SIZE\fR
|
||||||
|
Minimum size file to include in output
|
||||||
|
.TP
|
||||||
|
\fB\-R\fR, \fB\-\-screen\-reader\fR
|
||||||
|
For screen readers. Removes bars. Adds new column: depth level (May want to use \-p too for full path)
|
||||||
|
.TP
|
||||||
|
\fB\-\-skip\-total\fR
|
||||||
|
No total row will be displayed
|
||||||
|
.TP
|
||||||
|
\fB\-f\fR, \fB\-\-filecount\fR
|
||||||
|
Directory \*(Aqsize\*(Aq is number of child files instead of disk size
|
||||||
|
.TP
|
||||||
|
\fB\-i\fR, \fB\-\-ignore_hidden\fR
|
||||||
|
Do not display hidden files
|
||||||
|
.TP
|
||||||
|
\fB\-v\fR, \fB\-\-invert\-filter\fR=\fIREGEX\fR
|
||||||
|
Exclude filepaths matching this regex. To ignore png files type: \-v "\\.png$"
|
||||||
|
.TP
|
||||||
|
\fB\-e\fR, \fB\-\-filter\fR=\fIREGEX\fR
|
||||||
|
Only include filepaths matching this regex. For png files type: \-e "\\.png$"
|
||||||
|
.TP
|
||||||
|
\fB\-t\fR, \fB\-\-file_types\fR
|
||||||
|
show only these file types
|
||||||
|
.TP
|
||||||
|
\fB\-w\fR, \fB\-\-terminal_width\fR=\fIWIDTH\fR
|
||||||
|
Specify width of output overriding the auto detection of terminal width
|
||||||
|
.TP
|
||||||
|
\fB\-P\fR, \fB\-\-no\-progress\fR
|
||||||
|
Disable the progress indication.
|
||||||
|
.TP
|
||||||
|
\fB\-\-print\-errors\fR
|
||||||
|
Print path with errors.
|
||||||
|
.TP
|
||||||
|
\fB\-D\fR, \fB\-\-only\-dir\fR
|
||||||
|
Only directories will be displayed.
|
||||||
|
.TP
|
||||||
|
\fB\-F\fR, \fB\-\-only\-file\fR
|
||||||
|
Only files will be displayed. (Finds your largest files)
|
||||||
|
.TP
|
||||||
|
\fB\-o\fR, \fB\-\-output\-format\fR=\fIFORMAT\fR
|
||||||
|
Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.
|
||||||
|
.br
|
||||||
|
|
||||||
|
.br
|
||||||
|
[\fIpossible values: \fRsi, b, k, m, g, t, kb, mb, gb, tb]
|
||||||
|
.TP
|
||||||
|
\fB\-S\fR, \fB\-\-stack\-size\fR=\fISTACK_SIZE\fR
|
||||||
|
Specify memory to use as stack size \- use if you see: \*(Aqfatal runtime error: stack overflow\*(Aq (default low memory=1048576, high memory=1073741824)
|
||||||
|
.TP
|
||||||
|
\fB\-j\fR, \fB\-\-output\-json\fR
|
||||||
|
Output the directory tree as json to the current directory
|
||||||
|
.TP
|
||||||
|
\fB\-M\fR, \fB\-\-mtime\fR
|
||||||
|
+/\-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr−(n+1)), n => [curr−(n+1), curr−n), and \-n => (𝑐𝑢𝑟𝑟−𝑛, +∞)
|
||||||
|
.TP
|
||||||
|
\fB\-A\fR, \fB\-\-atime\fR
|
||||||
|
just like \-mtime, but based on file access time
|
||||||
|
.TP
|
||||||
|
\fB\-y\fR, \fB\-\-ctime\fR
|
||||||
|
just like \-mtime, but based on file change time
|
||||||
|
.TP
|
||||||
|
\fB\-\-files0\-from\fR
|
||||||
|
run dust on NUL\-terminated file names specified in file; if argument is \-, then read names from standard input
|
||||||
|
.TP
|
||||||
|
\fB\-h\fR, \fB\-\-help\fR
|
||||||
|
Print help
|
||||||
|
.TP
|
||||||
|
\fB\-V\fR, \fB\-\-version\fR
|
||||||
|
Print version
|
||||||
|
.TP
|
||||||
|
[\fIPATH\fR]
|
||||||
|
|
||||||
|
.SH VERSION
|
||||||
|
v1.1.1
|
||||||
297
src/cli.rs
Normal file
297
src/cli.rs
Normal file
@@ -0,0 +1,297 @@
|
|||||||
|
use clap::{builder::PossibleValue, value_parser, Arg, Command};
|
||||||
|
|
||||||
|
// For single thread mode set this variable on your command line:
|
||||||
|
// export RAYON_NUM_THREADS=1
|
||||||
|
|
||||||
|
pub fn build_cli() -> Command {
|
||||||
|
Command::new("Dust")
|
||||||
|
.about("Like du but more intuitive")
|
||||||
|
.version(env!("CARGO_PKG_VERSION"))
|
||||||
|
.arg(
|
||||||
|
Arg::new("depth")
|
||||||
|
.short('d')
|
||||||
|
.long("depth")
|
||||||
|
.value_name("DEPTH")
|
||||||
|
.value_parser(value_parser!(usize))
|
||||||
|
.help("Depth to show")
|
||||||
|
.num_args(1)
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("threads")
|
||||||
|
.short('T')
|
||||||
|
.long("threads")
|
||||||
|
.value_parser(value_parser!(usize))
|
||||||
|
.help("Number of threads to use")
|
||||||
|
.num_args(1)
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("number_of_lines")
|
||||||
|
.short('n')
|
||||||
|
.long("number-of-lines")
|
||||||
|
.value_name("NUMBER")
|
||||||
|
.value_parser(value_parser!(usize))
|
||||||
|
.help("Number of lines of output to show. (Default is terminal_height - 10)")
|
||||||
|
.num_args(1)
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("display_full_paths")
|
||||||
|
.short('p')
|
||||||
|
.long("full-paths")
|
||||||
|
.action(clap::ArgAction::SetTrue)
|
||||||
|
.help("Subdirectories will not have their path shortened"),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("ignore_directory")
|
||||||
|
.short('X')
|
||||||
|
.long("ignore-directory")
|
||||||
|
.value_name("PATH")
|
||||||
|
.value_hint(clap::ValueHint::AnyPath)
|
||||||
|
.action(clap::ArgAction::Append)
|
||||||
|
.help("Exclude any file or directory with this name"),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("ignore_all_in_file")
|
||||||
|
.short('I')
|
||||||
|
.long("ignore-all-in-file")
|
||||||
|
.value_name("FILE")
|
||||||
|
.value_hint(clap::ValueHint::FilePath)
|
||||||
|
.value_parser(value_parser!(String))
|
||||||
|
.help("Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter"),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("dereference_links")
|
||||||
|
.short('L')
|
||||||
|
.long("dereference-links")
|
||||||
|
.action(clap::ArgAction::SetTrue)
|
||||||
|
.help("dereference sym links - Treat sym links as directories and go into them"),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("limit_filesystem")
|
||||||
|
.short('x')
|
||||||
|
.long("limit-filesystem")
|
||||||
|
.action(clap::ArgAction::SetTrue)
|
||||||
|
.help("Only count the files and directories on the same filesystem as the supplied directory"),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("display_apparent_size")
|
||||||
|
.short('s')
|
||||||
|
.long("apparent-size")
|
||||||
|
.action(clap::ArgAction::SetTrue)
|
||||||
|
.help("Use file length instead of blocks"),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("reverse")
|
||||||
|
.short('r')
|
||||||
|
.long("reverse")
|
||||||
|
.action(clap::ArgAction::SetTrue)
|
||||||
|
.help("Print tree upside down (biggest highest)"),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("no_colors")
|
||||||
|
.short('c')
|
||||||
|
.long("no-colors")
|
||||||
|
.action(clap::ArgAction::SetTrue)
|
||||||
|
.help("No colors will be printed (Useful for commands like: watch)"),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("force_colors")
|
||||||
|
.short('C')
|
||||||
|
.long("force-colors")
|
||||||
|
.action(clap::ArgAction::SetTrue)
|
||||||
|
.help("Force colors print"),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("no_bars")
|
||||||
|
.short('b')
|
||||||
|
.long("no-percent-bars")
|
||||||
|
.action(clap::ArgAction::SetTrue)
|
||||||
|
.help("No percent bars or percentages will be displayed"),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("bars_on_right")
|
||||||
|
.short('B')
|
||||||
|
.long("bars-on-right")
|
||||||
|
.action(clap::ArgAction::SetTrue)
|
||||||
|
.help("percent bars moved to right side of screen"),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("min_size")
|
||||||
|
.short('z')
|
||||||
|
.long("min-size")
|
||||||
|
.value_name("MIN_SIZE")
|
||||||
|
.num_args(1)
|
||||||
|
.help("Minimum size file to include in output"),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("screen_reader")
|
||||||
|
.short('R')
|
||||||
|
.long("screen-reader")
|
||||||
|
.action(clap::ArgAction::SetTrue)
|
||||||
|
.help("For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)"),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("skip_total")
|
||||||
|
.long("skip-total")
|
||||||
|
.action(clap::ArgAction::SetTrue)
|
||||||
|
.help("No total row will be displayed"),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("by_filecount")
|
||||||
|
.short('f')
|
||||||
|
.long("filecount")
|
||||||
|
.action(clap::ArgAction::SetTrue)
|
||||||
|
.help("Directory 'size' is number of child files instead of disk size"),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("ignore_hidden")
|
||||||
|
.short('i') // Do not use 'h' this is used by 'help'
|
||||||
|
.long("ignore_hidden")
|
||||||
|
.action(clap::ArgAction::SetTrue)
|
||||||
|
.help("Do not display hidden files"),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("invert_filter")
|
||||||
|
.short('v')
|
||||||
|
.long("invert-filter")
|
||||||
|
.value_name("REGEX")
|
||||||
|
.action(clap::ArgAction::Append)
|
||||||
|
.conflicts_with("filter")
|
||||||
|
.conflicts_with("types")
|
||||||
|
.help("Exclude filepaths matching this regex. To ignore png files type: -v \"\\.png$\" "),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("filter")
|
||||||
|
.short('e')
|
||||||
|
.long("filter")
|
||||||
|
.value_name("REGEX")
|
||||||
|
.action(clap::ArgAction::Append)
|
||||||
|
.conflicts_with("types")
|
||||||
|
.help("Only include filepaths matching this regex. For png files type: -e \"\\.png$\" "),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("types")
|
||||||
|
.short('t')
|
||||||
|
.long("file_types")
|
||||||
|
.conflicts_with("depth")
|
||||||
|
.conflicts_with("only_dir")
|
||||||
|
.action(clap::ArgAction::SetTrue)
|
||||||
|
.help("show only these file types"),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("width")
|
||||||
|
.short('w')
|
||||||
|
.long("terminal_width")
|
||||||
|
.value_name("WIDTH")
|
||||||
|
.value_parser(value_parser!(usize))
|
||||||
|
.num_args(1)
|
||||||
|
.help("Specify width of output overriding the auto detection of terminal width"),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("disable_progress")
|
||||||
|
.short('P')
|
||||||
|
.long("no-progress")
|
||||||
|
.action(clap::ArgAction::SetTrue)
|
||||||
|
.help("Disable the progress indication."),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("print_errors")
|
||||||
|
.long("print-errors")
|
||||||
|
.action(clap::ArgAction::SetTrue)
|
||||||
|
.help("Print path with errors."),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("only_dir")
|
||||||
|
.short('D')
|
||||||
|
.long("only-dir")
|
||||||
|
.conflicts_with("only_file")
|
||||||
|
.conflicts_with("types")
|
||||||
|
.action(clap::ArgAction::SetTrue)
|
||||||
|
.help("Only directories will be displayed."),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("only_file")
|
||||||
|
.short('F')
|
||||||
|
.long("only-file")
|
||||||
|
.conflicts_with("only_dir")
|
||||||
|
.action(clap::ArgAction::SetTrue)
|
||||||
|
.help("Only files will be displayed. (Finds your largest files)"),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("output_format")
|
||||||
|
.short('o')
|
||||||
|
.long("output-format")
|
||||||
|
.value_name("FORMAT")
|
||||||
|
.value_parser([
|
||||||
|
PossibleValue::new("si"),
|
||||||
|
PossibleValue::new("b"),
|
||||||
|
PossibleValue::new("k").alias("kib"),
|
||||||
|
PossibleValue::new("m").alias("mib"),
|
||||||
|
PossibleValue::new("g").alias("gib"),
|
||||||
|
PossibleValue::new("t").alias("tib"),
|
||||||
|
PossibleValue::new("kb"),
|
||||||
|
PossibleValue::new("mb"),
|
||||||
|
PossibleValue::new("gb"),
|
||||||
|
PossibleValue::new("tb"),
|
||||||
|
])
|
||||||
|
.ignore_case(true)
|
||||||
|
.help("Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.")
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("stack_size")
|
||||||
|
.short('S')
|
||||||
|
.long("stack-size")
|
||||||
|
.value_name("STACK_SIZE")
|
||||||
|
.value_parser(value_parser!(usize))
|
||||||
|
.num_args(1)
|
||||||
|
.help("Specify memory to use as stack size - use if you see: 'fatal runtime error: stack overflow' (default low memory=1048576, high memory=1073741824)"),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("params")
|
||||||
|
.value_name("PATH")
|
||||||
|
.value_hint(clap::ValueHint::AnyPath)
|
||||||
|
.value_parser(value_parser!(String))
|
||||||
|
.num_args(1..)
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("output_json")
|
||||||
|
.short('j')
|
||||||
|
.long("output-json")
|
||||||
|
.action(clap::ArgAction::SetTrue)
|
||||||
|
.help("Output the directory tree as json to the current directory"),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("mtime")
|
||||||
|
.short('M')
|
||||||
|
.long("mtime")
|
||||||
|
.num_args(1)
|
||||||
|
.allow_hyphen_values(true)
|
||||||
|
.value_parser(value_parser!(String))
|
||||||
|
.help("+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr−(n+1)), n => [curr−(n+1), curr−n), and -n => (𝑐𝑢𝑟𝑟−𝑛, +∞)")
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("atime")
|
||||||
|
.short('A')
|
||||||
|
.long("atime")
|
||||||
|
.num_args(1)
|
||||||
|
.allow_hyphen_values(true)
|
||||||
|
.value_parser(value_parser!(String))
|
||||||
|
.help("just like -mtime, but based on file access time")
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("ctime")
|
||||||
|
.short('y')
|
||||||
|
.long("ctime")
|
||||||
|
.num_args(1)
|
||||||
|
.allow_hyphen_values(true)
|
||||||
|
.value_parser(value_parser!(String))
|
||||||
|
.help("just like -mtime, but based on file change time")
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("files0_from")
|
||||||
|
.long("files0-from")
|
||||||
|
.value_hint(clap::ValueHint::AnyPath)
|
||||||
|
.value_parser(value_parser!(String))
|
||||||
|
.num_args(1)
|
||||||
|
.help("run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input"),
|
||||||
|
)
|
||||||
|
}
|
||||||
341
src/config.rs
Normal file
341
src/config.rs
Normal file
@@ -0,0 +1,341 @@
|
|||||||
|
use chrono::{Local, TimeZone};
|
||||||
|
use clap::ArgMatches;
|
||||||
|
use config_file::FromConfigFile;
|
||||||
|
use regex::Regex;
|
||||||
|
use serde::Deserialize;
|
||||||
|
use std::io::IsTerminal;
|
||||||
|
use std::path::Path;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use crate::dir_walker::Operater;
|
||||||
|
use crate::display::get_number_format;
|
||||||
|
|
||||||
|
pub static DAY_SECONDS: i64 = 24 * 60 * 60;
|
||||||
|
|
||||||
|
#[derive(Deserialize, Default)]
|
||||||
|
#[serde(rename_all = "kebab-case")]
|
||||||
|
#[serde(deny_unknown_fields)]
|
||||||
|
pub struct Config {
|
||||||
|
pub display_full_paths: Option<bool>,
|
||||||
|
pub display_apparent_size: Option<bool>,
|
||||||
|
pub reverse: Option<bool>,
|
||||||
|
pub no_colors: Option<bool>,
|
||||||
|
pub force_colors: Option<bool>,
|
||||||
|
pub no_bars: Option<bool>,
|
||||||
|
pub skip_total: Option<bool>,
|
||||||
|
pub screen_reader: Option<bool>,
|
||||||
|
pub ignore_hidden: Option<bool>,
|
||||||
|
pub output_format: Option<String>,
|
||||||
|
pub min_size: Option<String>,
|
||||||
|
pub only_dir: Option<bool>,
|
||||||
|
pub only_file: Option<bool>,
|
||||||
|
pub disable_progress: Option<bool>,
|
||||||
|
pub depth: Option<usize>,
|
||||||
|
pub bars_on_right: Option<bool>,
|
||||||
|
pub stack_size: Option<usize>,
|
||||||
|
pub threads: Option<usize>,
|
||||||
|
pub output_json: Option<bool>,
|
||||||
|
pub print_errors: Option<bool>,
|
||||||
|
pub files0_from: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Config {
|
||||||
|
pub fn get_files_from(&self, options: &ArgMatches) -> Option<String> {
|
||||||
|
let from_file = options.get_one::<String>("files0_from");
|
||||||
|
match from_file {
|
||||||
|
None => self.files0_from.as_ref().map(|x| x.to_string()),
|
||||||
|
Some(x) => Some(x.to_string()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn get_no_colors(&self, options: &ArgMatches) -> bool {
|
||||||
|
Some(true) == self.no_colors || options.get_flag("no_colors")
|
||||||
|
}
|
||||||
|
pub fn get_force_colors(&self, options: &ArgMatches) -> bool {
|
||||||
|
Some(true) == self.force_colors || options.get_flag("force_colors")
|
||||||
|
}
|
||||||
|
pub fn get_disable_progress(&self, options: &ArgMatches) -> bool {
|
||||||
|
Some(true) == self.disable_progress
|
||||||
|
|| options.get_flag("disable_progress")
|
||||||
|
|| !std::io::stdout().is_terminal()
|
||||||
|
}
|
||||||
|
pub fn get_apparent_size(&self, options: &ArgMatches) -> bool {
|
||||||
|
Some(true) == self.display_apparent_size || options.get_flag("display_apparent_size")
|
||||||
|
}
|
||||||
|
pub fn get_ignore_hidden(&self, options: &ArgMatches) -> bool {
|
||||||
|
Some(true) == self.ignore_hidden || options.get_flag("ignore_hidden")
|
||||||
|
}
|
||||||
|
pub fn get_full_paths(&self, options: &ArgMatches) -> bool {
|
||||||
|
Some(true) == self.display_full_paths || options.get_flag("display_full_paths")
|
||||||
|
}
|
||||||
|
pub fn get_reverse(&self, options: &ArgMatches) -> bool {
|
||||||
|
Some(true) == self.reverse || options.get_flag("reverse")
|
||||||
|
}
|
||||||
|
pub fn get_no_bars(&self, options: &ArgMatches) -> bool {
|
||||||
|
Some(true) == self.no_bars || options.get_flag("no_bars")
|
||||||
|
}
|
||||||
|
pub fn get_output_format(&self, options: &ArgMatches) -> String {
|
||||||
|
let out_fmt = options.get_one::<String>("output_format");
|
||||||
|
(match out_fmt {
|
||||||
|
None => match &self.output_format {
|
||||||
|
None => "".to_string(),
|
||||||
|
Some(x) => x.to_string(),
|
||||||
|
},
|
||||||
|
Some(x) => x.into(),
|
||||||
|
})
|
||||||
|
.to_lowercase()
|
||||||
|
}
|
||||||
|
pub fn get_skip_total(&self, options: &ArgMatches) -> bool {
|
||||||
|
Some(true) == self.skip_total || options.get_flag("skip_total")
|
||||||
|
}
|
||||||
|
pub fn get_screen_reader(&self, options: &ArgMatches) -> bool {
|
||||||
|
Some(true) == self.screen_reader || options.get_flag("screen_reader")
|
||||||
|
}
|
||||||
|
pub fn get_depth(&self, options: &ArgMatches) -> usize {
|
||||||
|
if let Some(v) = options.get_one::<usize>("depth") {
|
||||||
|
return *v;
|
||||||
|
}
|
||||||
|
|
||||||
|
self.depth.unwrap_or(usize::MAX)
|
||||||
|
}
|
||||||
|
pub fn get_min_size(&self, options: &ArgMatches) -> Option<usize> {
|
||||||
|
let size_from_param = options.get_one::<String>("min_size");
|
||||||
|
self._get_min_size(size_from_param)
|
||||||
|
}
|
||||||
|
fn _get_min_size(&self, min_size: Option<&String>) -> Option<usize> {
|
||||||
|
let size_from_param = min_size.and_then(|a| convert_min_size(a));
|
||||||
|
|
||||||
|
if size_from_param.is_none() {
|
||||||
|
self.min_size
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|a| convert_min_size(a.as_ref()))
|
||||||
|
} else {
|
||||||
|
size_from_param
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn get_only_dir(&self, options: &ArgMatches) -> bool {
|
||||||
|
Some(true) == self.only_dir || options.get_flag("only_dir")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_print_errors(&self, options: &ArgMatches) -> bool {
|
||||||
|
Some(true) == self.print_errors || options.get_flag("print_errors")
|
||||||
|
}
|
||||||
|
pub fn get_only_file(&self, options: &ArgMatches) -> bool {
|
||||||
|
Some(true) == self.only_file || options.get_flag("only_file")
|
||||||
|
}
|
||||||
|
pub fn get_bars_on_right(&self, options: &ArgMatches) -> bool {
|
||||||
|
Some(true) == self.bars_on_right || options.get_flag("bars_on_right")
|
||||||
|
}
|
||||||
|
pub fn get_custom_stack_size(&self, options: &ArgMatches) -> Option<usize> {
|
||||||
|
let from_cmd_line = options.get_one::<usize>("stack_size");
|
||||||
|
if from_cmd_line.is_none() {
|
||||||
|
self.stack_size
|
||||||
|
} else {
|
||||||
|
from_cmd_line.copied()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn get_threads(&self, options: &ArgMatches) -> Option<usize> {
|
||||||
|
let from_cmd_line = options.get_one::<usize>("threads");
|
||||||
|
if from_cmd_line.is_none() {
|
||||||
|
self.threads
|
||||||
|
} else {
|
||||||
|
from_cmd_line.copied()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn get_output_json(&self, options: &ArgMatches) -> bool {
|
||||||
|
Some(true) == self.output_json || options.get_flag("output_json")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_modified_time_operator(&self, options: &ArgMatches) -> Option<(Operater, i64)> {
|
||||||
|
get_filter_time_operator(
|
||||||
|
options.get_one::<String>("mtime"),
|
||||||
|
get_current_date_epoch_seconds(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_accessed_time_operator(&self, options: &ArgMatches) -> Option<(Operater, i64)> {
|
||||||
|
get_filter_time_operator(
|
||||||
|
options.get_one::<String>("atime"),
|
||||||
|
get_current_date_epoch_seconds(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_created_time_operator(&self, options: &ArgMatches) -> Option<(Operater, i64)> {
|
||||||
|
get_filter_time_operator(
|
||||||
|
options.get_one::<String>("ctime"),
|
||||||
|
get_current_date_epoch_seconds(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_current_date_epoch_seconds() -> i64 {
|
||||||
|
// calcurate current date epoch seconds
|
||||||
|
let now = Local::now();
|
||||||
|
let current_date = now.date_naive();
|
||||||
|
|
||||||
|
let current_date_time = current_date.and_hms_opt(0, 0, 0).unwrap();
|
||||||
|
Local
|
||||||
|
.from_local_datetime(¤t_date_time)
|
||||||
|
.unwrap()
|
||||||
|
.timestamp()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_filter_time_operator(
|
||||||
|
option_value: Option<&String>,
|
||||||
|
current_date_epoch_seconds: i64,
|
||||||
|
) -> Option<(Operater, i64)> {
|
||||||
|
match option_value {
|
||||||
|
Some(val) => {
|
||||||
|
let time = current_date_epoch_seconds
|
||||||
|
- val
|
||||||
|
.parse::<i64>()
|
||||||
|
.unwrap_or_else(|_| panic!("invalid data format"))
|
||||||
|
.abs()
|
||||||
|
* DAY_SECONDS;
|
||||||
|
match val.chars().next().expect("Value should not be empty") {
|
||||||
|
'+' => Some((Operater::LessThan, time - DAY_SECONDS)),
|
||||||
|
'-' => Some((Operater::GreaterThan, time)),
|
||||||
|
_ => Some((Operater::Equal, time - DAY_SECONDS)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn convert_min_size(input: &str) -> Option<usize> {
|
||||||
|
let re = Regex::new(r"([0-9]+)(\w*)").unwrap();
|
||||||
|
|
||||||
|
if let Some(cap) = re.captures(input) {
|
||||||
|
let (_, [digits, letters]) = cap.extract();
|
||||||
|
|
||||||
|
// Failure to parse should be impossible due to regex match
|
||||||
|
let digits_as_usize: Option<usize> = digits.parse().ok();
|
||||||
|
|
||||||
|
match digits_as_usize {
|
||||||
|
Some(parsed_digits) => {
|
||||||
|
let number_format = get_number_format(&letters.to_lowercase());
|
||||||
|
match number_format {
|
||||||
|
Some((multiple, _)) => Some(parsed_digits * (multiple as usize)),
|
||||||
|
None => {
|
||||||
|
if letters.eq("") {
|
||||||
|
Some(parsed_digits)
|
||||||
|
} else {
|
||||||
|
eprintln!("Ignoring invalid min-size: {input}");
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => None,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_config_locations(base: &Path) -> Vec<PathBuf> {
|
||||||
|
vec![
|
||||||
|
base.join(".dust.toml"),
|
||||||
|
base.join(".config").join("dust").join("config.toml"),
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_config() -> Config {
|
||||||
|
if let Some(home) = directories::BaseDirs::new() {
|
||||||
|
for path in get_config_locations(home.home_dir()) {
|
||||||
|
if path.exists() {
|
||||||
|
if let Ok(config) = Config::from_config_file(path) {
|
||||||
|
return config;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Config {
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
#[allow(unused_imports)]
|
||||||
|
use super::*;
|
||||||
|
use chrono::{Datelike, Timelike};
|
||||||
|
use clap::{value_parser, Arg, ArgMatches, Command};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_get_current_date_epoch_seconds() {
|
||||||
|
let epoch_seconds = get_current_date_epoch_seconds();
|
||||||
|
let dt = Local.timestamp_opt(epoch_seconds, 0).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(dt.hour(), 0);
|
||||||
|
assert_eq!(dt.minute(), 0);
|
||||||
|
assert_eq!(dt.second(), 0);
|
||||||
|
assert_eq!(dt.date_naive().day(), Local::now().date_naive().day());
|
||||||
|
assert_eq!(dt.date_naive().month(), Local::now().date_naive().month());
|
||||||
|
assert_eq!(dt.date_naive().year(), Local::now().date_naive().year());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_conversion() {
|
||||||
|
assert_eq!(convert_min_size("55"), Some(55));
|
||||||
|
assert_eq!(convert_min_size("12344321"), Some(12344321));
|
||||||
|
assert_eq!(convert_min_size("95RUBBISH"), None);
|
||||||
|
assert_eq!(convert_min_size("10Ki"), Some(10 * 1024));
|
||||||
|
assert_eq!(convert_min_size("10MiB"), Some(10 * 1024usize.pow(2)));
|
||||||
|
assert_eq!(convert_min_size("10M"), Some(10 * 1024usize.pow(2)));
|
||||||
|
assert_eq!(convert_min_size("10Mb"), Some(10 * 1000usize.pow(2)));
|
||||||
|
assert_eq!(convert_min_size("2Gi"), Some(2 * 1024usize.pow(3)));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_min_size_from_config_applied_or_overridden() {
|
||||||
|
let c = Config {
|
||||||
|
min_size: Some("1KiB".to_owned()),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
assert_eq!(c._get_min_size(None), Some(1024));
|
||||||
|
assert_eq!(c._get_min_size(Some(&"2KiB".into())), Some(2048));
|
||||||
|
|
||||||
|
assert_eq!(c._get_min_size(Some(&"1kb".into())), Some(1000));
|
||||||
|
assert_eq!(c._get_min_size(Some(&"2KB".into())), Some(2000));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_get_depth() {
|
||||||
|
// No config and no flag.
|
||||||
|
let c = Config::default();
|
||||||
|
let args = get_args(vec![]);
|
||||||
|
assert_eq!(c.get_depth(&args), usize::MAX);
|
||||||
|
|
||||||
|
// Config is not defined and flag is defined.
|
||||||
|
let c = Config::default();
|
||||||
|
let args = get_args(vec!["dust", "--depth", "5"]);
|
||||||
|
assert_eq!(c.get_depth(&args), 5);
|
||||||
|
|
||||||
|
// Config is defined and flag is not defined.
|
||||||
|
let c = Config {
|
||||||
|
depth: Some(3),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let args = get_args(vec![]);
|
||||||
|
assert_eq!(c.get_depth(&args), 3);
|
||||||
|
|
||||||
|
// Both config and flag are defined.
|
||||||
|
let c = Config {
|
||||||
|
depth: Some(3),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let args = get_args(vec!["dust", "--depth", "5"]);
|
||||||
|
assert_eq!(c.get_depth(&args), 5);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_args(args: Vec<&str>) -> ArgMatches {
|
||||||
|
Command::new("Dust")
|
||||||
|
.arg(
|
||||||
|
Arg::new("depth")
|
||||||
|
.long("depth")
|
||||||
|
.num_args(1)
|
||||||
|
.value_parser(value_parser!(usize)),
|
||||||
|
)
|
||||||
|
.get_matches_from(args)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,6 +1,14 @@
|
|||||||
|
use std::cmp::Ordering;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use std::sync::Mutex;
|
||||||
|
|
||||||
use crate::node::Node;
|
use crate::node::Node;
|
||||||
|
use crate::progress::Operation;
|
||||||
|
use crate::progress::PAtomicInfo;
|
||||||
|
use crate::progress::RuntimeErrors;
|
||||||
|
use crate::progress::ORDERING;
|
||||||
|
use crate::utils::is_filtered_out_due_to_file_time;
|
||||||
use crate::utils::is_filtered_out_due_to_invert_regex;
|
use crate::utils::is_filtered_out_due_to_invert_regex;
|
||||||
use crate::utils::is_filtered_out_due_to_regex;
|
use crate::utils::is_filtered_out_due_to_regex;
|
||||||
use rayon::iter::ParallelBridge;
|
use rayon::iter::ParallelBridge;
|
||||||
@@ -8,9 +16,6 @@ use rayon::prelude::ParallelIterator;
|
|||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use std::sync::atomic;
|
|
||||||
use std::sync::atomic::AtomicBool;
|
|
||||||
|
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
|
|
||||||
use crate::node::build_node;
|
use crate::node::build_node;
|
||||||
@@ -18,33 +23,44 @@ use std::fs::DirEntry;
|
|||||||
|
|
||||||
use crate::platform::get_metadata;
|
use crate::platform::get_metadata;
|
||||||
|
|
||||||
pub struct WalkData {
|
#[derive(Debug)]
|
||||||
|
pub enum Operater {
|
||||||
|
Equal = 0,
|
||||||
|
LessThan = 1,
|
||||||
|
GreaterThan = 2,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct WalkData<'a> {
|
||||||
pub ignore_directories: HashSet<PathBuf>,
|
pub ignore_directories: HashSet<PathBuf>,
|
||||||
pub filter_regex: Option<Regex>,
|
pub filter_regex: &'a [Regex],
|
||||||
pub invert_filter_regex: Option<Regex>,
|
pub invert_filter_regex: &'a [Regex],
|
||||||
pub allowed_filesystems: HashSet<u64>,
|
pub allowed_filesystems: HashSet<u64>,
|
||||||
|
pub filter_modified_time: Option<(Operater, i64)>,
|
||||||
|
pub filter_accessed_time: Option<(Operater, i64)>,
|
||||||
|
pub filter_changed_time: Option<(Operater, i64)>,
|
||||||
pub use_apparent_size: bool,
|
pub use_apparent_size: bool,
|
||||||
pub by_filecount: bool,
|
pub by_filecount: bool,
|
||||||
pub ignore_hidden: bool,
|
pub ignore_hidden: bool,
|
||||||
|
pub follow_links: bool,
|
||||||
|
pub progress_data: Arc<PAtomicInfo>,
|
||||||
|
pub errors: Arc<Mutex<RuntimeErrors>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn walk_it(dirs: HashSet<PathBuf>, walk_data: WalkData) -> (Vec<Node>, bool) {
|
pub fn walk_it(dirs: HashSet<PathBuf>, walk_data: &WalkData) -> Vec<Node> {
|
||||||
let permissions_flag = AtomicBool::new(false);
|
let mut inodes = HashSet::new();
|
||||||
|
|
||||||
let top_level_nodes: Vec<_> = dirs
|
let top_level_nodes: Vec<_> = dirs
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|d| {
|
.filter_map(|d| {
|
||||||
let n = walk(d, &permissions_flag, &walk_data);
|
let prog_data = &walk_data.progress_data;
|
||||||
match n {
|
prog_data.clear_state(&d);
|
||||||
Some(n) => {
|
let node = walk(d, walk_data, 0)?;
|
||||||
let mut inodes: HashSet<(u64, u64)> = HashSet::new();
|
|
||||||
clean_inodes(n, &mut inodes, walk_data.use_apparent_size)
|
prog_data.state.store(Operation::PREPARING, ORDERING);
|
||||||
}
|
|
||||||
None => None,
|
clean_inodes(node, &mut inodes, walk_data.use_apparent_size)
|
||||||
}
|
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
(top_level_nodes, permissions_flag.into_inner())
|
top_level_nodes
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove files which have the same inode, we don't want to double count them.
|
// Remove files which have the same inode, we don't want to double count them.
|
||||||
@@ -55,25 +71,45 @@ fn clean_inodes(
|
|||||||
) -> Option<Node> {
|
) -> Option<Node> {
|
||||||
if !use_apparent_size {
|
if !use_apparent_size {
|
||||||
if let Some(id) = x.inode_device {
|
if let Some(id) = x.inode_device {
|
||||||
if inodes.contains(&id) {
|
if !inodes.insert(id) {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
inodes.insert(id);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let new_children: Vec<_> = x
|
// Sort Nodes so iteration order is predictable
|
||||||
.children
|
let mut tmp: Vec<_> = x.children;
|
||||||
|
tmp.sort_by(sort_by_inode);
|
||||||
|
let new_children: Vec<_> = tmp
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|c| clean_inodes(c, inodes, use_apparent_size))
|
.filter_map(|c| clean_inodes(c, inodes, use_apparent_size))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
return Some(Node {
|
Some(Node {
|
||||||
name: x.name,
|
name: x.name,
|
||||||
size: x.size + new_children.iter().map(|c| c.size).sum::<u64>(),
|
size: x.size + new_children.iter().map(|c| c.size).sum::<u64>(),
|
||||||
children: new_children,
|
children: new_children,
|
||||||
inode_device: x.inode_device,
|
inode_device: x.inode_device,
|
||||||
});
|
depth: x.depth,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn sort_by_inode(a: &Node, b: &Node) -> std::cmp::Ordering {
|
||||||
|
// Sorting by inode is quicker than by sorting by name/size
|
||||||
|
match (a.inode_device, b.inode_device) {
|
||||||
|
(Some(x), Some(y)) => {
|
||||||
|
if x.0 != y.0 {
|
||||||
|
x.0.cmp(&y.0)
|
||||||
|
} else if x.1 != y.1 {
|
||||||
|
x.1.cmp(&y.1)
|
||||||
|
} else {
|
||||||
|
a.name.cmp(&b.name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(Some(_), None) => Ordering::Greater,
|
||||||
|
(None, Some(_)) => Ordering::Less,
|
||||||
|
(None, None) => a.name.cmp(&b.name),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ignore_file(entry: &DirEntry, walk_data: &WalkData) -> bool {
|
fn ignore_file(entry: &DirEntry, walk_data: &WalkData) -> bool {
|
||||||
@@ -81,26 +117,46 @@ fn ignore_file(entry: &DirEntry, walk_data: &WalkData) -> bool {
|
|||||||
let is_ignored_path = walk_data.ignore_directories.contains(&entry.path());
|
let is_ignored_path = walk_data.ignore_directories.contains(&entry.path());
|
||||||
|
|
||||||
if !walk_data.allowed_filesystems.is_empty() {
|
if !walk_data.allowed_filesystems.is_empty() {
|
||||||
let size_inode_device = get_metadata(&entry.path(), false);
|
let size_inode_device = get_metadata(entry.path(), false);
|
||||||
|
if let Some((_size, Some((_id, dev)), _gunk)) = size_inode_device {
|
||||||
if let Some((_size, Some((_id, dev)))) = size_inode_device {
|
|
||||||
if !walk_data.allowed_filesystems.contains(&dev) {
|
if !walk_data.allowed_filesystems.contains(&dev) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if walk_data.filter_accessed_time.is_some()
|
||||||
|
|| walk_data.filter_modified_time.is_some()
|
||||||
|
|| walk_data.filter_changed_time.is_some()
|
||||||
|
{
|
||||||
|
let size_inode_device = get_metadata(entry.path(), false);
|
||||||
|
if let Some((_, _, (modified_time, accessed_time, changed_time))) = size_inode_device {
|
||||||
|
if entry.path().is_file()
|
||||||
|
&& [
|
||||||
|
(&walk_data.filter_modified_time, modified_time),
|
||||||
|
(&walk_data.filter_accessed_time, accessed_time),
|
||||||
|
(&walk_data.filter_changed_time, changed_time),
|
||||||
|
]
|
||||||
|
.iter()
|
||||||
|
.any(|(filter_time, actual_time)| {
|
||||||
|
is_filtered_out_due_to_file_time(filter_time, *actual_time)
|
||||||
|
})
|
||||||
|
{
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Keeping `walk_data.filter_regex.is_some()` is important for performance reasons, it stops unnecessary work
|
// Keeping `walk_data.filter_regex.is_empty()` is important for performance reasons, it stops unnecessary work
|
||||||
if walk_data.filter_regex.is_some()
|
if !walk_data.filter_regex.is_empty()
|
||||||
&& entry.path().is_file()
|
&& entry.path().is_file()
|
||||||
&& is_filtered_out_due_to_regex(&walk_data.filter_regex, &entry.path())
|
&& is_filtered_out_due_to_regex(walk_data.filter_regex, &entry.path())
|
||||||
{
|
{
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if walk_data.invert_filter_regex.is_some()
|
if !walk_data.invert_filter_regex.is_empty()
|
||||||
&& entry.path().is_file()
|
&& entry.path().is_file()
|
||||||
&& is_filtered_out_due_to_invert_regex(&walk_data.invert_filter_regex, &entry.path())
|
&& is_filtered_out_due_to_invert_regex(walk_data.invert_filter_regex, &entry.path())
|
||||||
{
|
{
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@@ -108,60 +164,98 @@ fn ignore_file(entry: &DirEntry, walk_data: &WalkData) -> bool {
|
|||||||
(is_dot_file && walk_data.ignore_hidden) || is_ignored_path
|
(is_dot_file && walk_data.ignore_hidden) || is_ignored_path
|
||||||
}
|
}
|
||||||
|
|
||||||
fn walk(dir: PathBuf, permissions_flag: &AtomicBool, walk_data: &WalkData) -> Option<Node> {
|
fn walk(dir: PathBuf, walk_data: &WalkData, depth: usize) -> Option<Node> {
|
||||||
let mut children = vec![];
|
let prog_data = &walk_data.progress_data;
|
||||||
|
let errors = &walk_data.errors;
|
||||||
|
|
||||||
if let Ok(entries) = fs::read_dir(dir.clone()) {
|
if errors.lock().unwrap().abort {
|
||||||
children = entries
|
return None;
|
||||||
.into_iter()
|
|
||||||
.par_bridge()
|
|
||||||
.filter_map(|entry| {
|
|
||||||
if let Ok(ref entry) = entry {
|
|
||||||
// uncommenting the below line gives simpler code but
|
|
||||||
// rayon doesn't parallelise as well giving a 3X performance drop
|
|
||||||
// hence we unravel the recursion a bit
|
|
||||||
|
|
||||||
// return walk(entry.path(), permissions_flag, ignore_directories, allowed_filesystems, use_apparent_size, by_filecount, ignore_hidden);
|
|
||||||
|
|
||||||
if !ignore_file(entry, walk_data) {
|
|
||||||
if let Ok(data) = entry.file_type() {
|
|
||||||
if data.is_dir() && !data.is_symlink() {
|
|
||||||
return walk(entry.path(), permissions_flag, walk_data);
|
|
||||||
}
|
|
||||||
return build_node(
|
|
||||||
entry.path(),
|
|
||||||
vec![],
|
|
||||||
&walk_data.filter_regex,
|
|
||||||
&walk_data.invert_filter_regex,
|
|
||||||
walk_data.use_apparent_size,
|
|
||||||
data.is_symlink(),
|
|
||||||
data.is_file(),
|
|
||||||
walk_data.by_filecount,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
permissions_flag.store(true, atomic::Ordering::Relaxed);
|
|
||||||
}
|
|
||||||
None
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
} else {
|
|
||||||
permissions_flag.store(true, atomic::Ordering::Relaxed);
|
|
||||||
}
|
}
|
||||||
build_node(
|
|
||||||
dir,
|
let children = if dir.is_dir() {
|
||||||
children,
|
let read_dir = fs::read_dir(&dir);
|
||||||
&walk_data.filter_regex,
|
match read_dir {
|
||||||
&walk_data.invert_filter_regex,
|
Ok(entries) => {
|
||||||
walk_data.use_apparent_size,
|
entries
|
||||||
false,
|
.into_iter()
|
||||||
false,
|
.par_bridge()
|
||||||
walk_data.by_filecount,
|
.filter_map(|entry| {
|
||||||
)
|
match entry {
|
||||||
|
Ok(ref entry) => {
|
||||||
|
// uncommenting the below line gives simpler code but
|
||||||
|
// rayon doesn't parallelize as well giving a 3X performance drop
|
||||||
|
// hence we unravel the recursion a bit
|
||||||
|
|
||||||
|
// return walk(entry.path(), walk_data, depth)
|
||||||
|
|
||||||
|
if !ignore_file(entry, walk_data) {
|
||||||
|
if let Ok(data) = entry.file_type() {
|
||||||
|
if data.is_dir()
|
||||||
|
|| (walk_data.follow_links && data.is_symlink())
|
||||||
|
{
|
||||||
|
return walk(entry.path(), walk_data, depth + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
let node = build_node(
|
||||||
|
entry.path(),
|
||||||
|
vec![],
|
||||||
|
data.is_symlink(),
|
||||||
|
data.is_file(),
|
||||||
|
depth,
|
||||||
|
walk_data,
|
||||||
|
);
|
||||||
|
|
||||||
|
prog_data.num_files.fetch_add(1, ORDERING);
|
||||||
|
if let Some(ref file) = node {
|
||||||
|
prog_data
|
||||||
|
.total_file_size
|
||||||
|
.fetch_add(file.size, ORDERING);
|
||||||
|
}
|
||||||
|
|
||||||
|
return node;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(ref failed) => {
|
||||||
|
let mut editable_error = errors.lock().unwrap();
|
||||||
|
editable_error.no_permissions.insert(failed.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
Err(failed) => {
|
||||||
|
let mut editable_error = errors.lock().unwrap();
|
||||||
|
match failed.kind() {
|
||||||
|
std::io::ErrorKind::PermissionDenied => {
|
||||||
|
editable_error
|
||||||
|
.no_permissions
|
||||||
|
.insert(dir.to_string_lossy().into());
|
||||||
|
}
|
||||||
|
std::io::ErrorKind::NotFound => {
|
||||||
|
editable_error.file_not_found.insert(failed.to_string());
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
editable_error.unknown_error.insert(failed.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
vec![]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if !dir.is_file() {
|
||||||
|
let mut editable_error = errors.lock().unwrap();
|
||||||
|
let bad_file = dir.as_os_str().to_string_lossy().into();
|
||||||
|
editable_error.file_not_found.insert(bad_file);
|
||||||
|
}
|
||||||
|
vec![]
|
||||||
|
};
|
||||||
|
build_node(dir, children, false, false, depth, walk_data)
|
||||||
}
|
}
|
||||||
|
|
||||||
mod tests {
|
mod tests {
|
||||||
|
|
||||||
#[allow(unused_imports)]
|
#[allow(unused_imports)]
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
@@ -172,28 +266,68 @@ mod tests {
|
|||||||
size: 10,
|
size: 10,
|
||||||
children: vec![],
|
children: vec![],
|
||||||
inode_device: Some((5, 6)),
|
inode_device: Some((5, 6)),
|
||||||
|
depth: 0,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[allow(clippy::redundant_clone)]
|
||||||
fn test_should_ignore_file() {
|
fn test_should_ignore_file() {
|
||||||
let mut inodes = HashSet::new();
|
let mut inodes = HashSet::new();
|
||||||
let n = create_node();
|
let n = create_node();
|
||||||
|
|
||||||
// First time we insert the node
|
// First time we insert the node
|
||||||
assert!(clean_inodes(n.clone(), &mut inodes, false) == Some(n.clone()));
|
assert_eq!(clean_inodes(n.clone(), &mut inodes, false), Some(n.clone()));
|
||||||
|
|
||||||
// Second time is a duplicate - we ignore it
|
// Second time is a duplicate - we ignore it
|
||||||
assert!(clean_inodes(n.clone(), &mut inodes, false) == None);
|
assert_eq!(clean_inodes(n.clone(), &mut inodes, false), None);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[allow(clippy::redundant_clone)]
|
||||||
fn test_should_not_ignore_files_if_using_apparent_size() {
|
fn test_should_not_ignore_files_if_using_apparent_size() {
|
||||||
let mut inodes = HashSet::new();
|
let mut inodes = HashSet::new();
|
||||||
let n = create_node();
|
let n = create_node();
|
||||||
|
|
||||||
// If using apparent size we include Nodes, even if duplicate inodes
|
// If using apparent size we include Nodes, even if duplicate inodes
|
||||||
assert!(clean_inodes(n.clone(), &mut inodes, true) == Some(n.clone()));
|
assert_eq!(clean_inodes(n.clone(), &mut inodes, true), Some(n.clone()));
|
||||||
assert!(clean_inodes(n.clone(), &mut inodes, true) == Some(n.clone()));
|
assert_eq!(clean_inodes(n.clone(), &mut inodes, true), Some(n.clone()));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_total_ordering_of_sort_by_inode() {
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
let a = Node {
|
||||||
|
name: PathBuf::from_str("a").unwrap(),
|
||||||
|
size: 0,
|
||||||
|
children: vec![],
|
||||||
|
inode_device: Some((3, 66310)),
|
||||||
|
depth: 0,
|
||||||
|
};
|
||||||
|
|
||||||
|
let b = Node {
|
||||||
|
name: PathBuf::from_str("b").unwrap(),
|
||||||
|
size: 0,
|
||||||
|
children: vec![],
|
||||||
|
inode_device: None,
|
||||||
|
depth: 0,
|
||||||
|
};
|
||||||
|
|
||||||
|
let c = Node {
|
||||||
|
name: PathBuf::from_str("c").unwrap(),
|
||||||
|
size: 0,
|
||||||
|
children: vec![],
|
||||||
|
inode_device: Some((1, 66310)),
|
||||||
|
depth: 0,
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_eq!(sort_by_inode(&a, &b), Ordering::Greater);
|
||||||
|
assert_eq!(sort_by_inode(&a, &c), Ordering::Greater);
|
||||||
|
assert_eq!(sort_by_inode(&c, &b), Ordering::Greater);
|
||||||
|
|
||||||
|
assert_eq!(sort_by_inode(&b, &a), Ordering::Less);
|
||||||
|
assert_eq!(sort_by_inode(&c, &a), Ordering::Less);
|
||||||
|
assert_eq!(sort_by_inode(&b, &c), Ordering::Less);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
430
src/display.rs
430
src/display.rs
@@ -1,8 +1,6 @@
|
|||||||
extern crate ansi_term;
|
|
||||||
|
|
||||||
use crate::display_node::DisplayNode;
|
use crate::display_node::DisplayNode;
|
||||||
|
|
||||||
use self::ansi_term::Colour::Red;
|
use ansi_term::Colour::Red;
|
||||||
use lscolors::{LsColors, Style};
|
use lscolors::{LsColors, Style};
|
||||||
|
|
||||||
use unicode_width::UnicodeWidthStr;
|
use unicode_width::UnicodeWidthStr;
|
||||||
@@ -16,14 +14,21 @@ use std::iter::repeat;
|
|||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use thousands::Separable;
|
use thousands::Separable;
|
||||||
|
|
||||||
static UNITS: [char; 4] = ['T', 'G', 'M', 'K'];
|
pub static UNITS: [char; 4] = ['T', 'G', 'M', 'K'];
|
||||||
static BLOCKS: [char; 5] = ['█', '▓', '▒', '░', ' '];
|
static BLOCKS: [char; 5] = ['█', '▓', '▒', '░', ' '];
|
||||||
|
|
||||||
pub struct DisplayData {
|
pub struct InitialDisplayData {
|
||||||
pub short_paths: bool,
|
pub short_paths: bool,
|
||||||
pub is_reversed: bool,
|
pub is_reversed: bool,
|
||||||
pub colors_on: bool,
|
pub colors_on: bool,
|
||||||
pub by_filecount: bool,
|
pub by_filecount: bool,
|
||||||
|
pub is_screen_reader: bool,
|
||||||
|
pub output_format: String,
|
||||||
|
pub bars_on_right: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct DisplayData {
|
||||||
|
pub initial: InitialDisplayData,
|
||||||
pub num_chars_needed_on_left_most: usize,
|
pub num_chars_needed_on_left_most: usize,
|
||||||
pub base_size: u64,
|
pub base_size: u64,
|
||||||
pub longest_string_length: usize,
|
pub longest_string_length: usize,
|
||||||
@@ -32,7 +37,7 @@ pub struct DisplayData {
|
|||||||
|
|
||||||
impl DisplayData {
|
impl DisplayData {
|
||||||
fn get_tree_chars(&self, was_i_last: bool, has_children: bool) -> &'static str {
|
fn get_tree_chars(&self, was_i_last: bool, has_children: bool) -> &'static str {
|
||||||
match (self.is_reversed, was_i_last, has_children) {
|
match (self.initial.is_reversed, was_i_last, has_children) {
|
||||||
(true, true, true) => "┌─┴",
|
(true, true, true) => "┌─┴",
|
||||||
(true, true, false) => "┌──",
|
(true, true, false) => "┌──",
|
||||||
(true, false, true) => "├─┴",
|
(true, false, true) => "├─┴",
|
||||||
@@ -45,7 +50,7 @@ impl DisplayData {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn is_biggest(&self, num_siblings: usize, max_siblings: u64) -> bool {
|
fn is_biggest(&self, num_siblings: usize, max_siblings: u64) -> bool {
|
||||||
if self.is_reversed {
|
if self.initial.is_reversed {
|
||||||
num_siblings == (max_siblings - 1) as usize
|
num_siblings == (max_siblings - 1) as usize
|
||||||
} else {
|
} else {
|
||||||
num_siblings == 0
|
num_siblings == 0
|
||||||
@@ -53,7 +58,7 @@ impl DisplayData {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn is_last(&self, num_siblings: usize, max_siblings: u64) -> bool {
|
fn is_last(&self, num_siblings: usize, max_siblings: u64) -> bool {
|
||||||
if self.is_reversed {
|
if self.initial.is_reversed {
|
||||||
num_siblings == 0
|
num_siblings == 0
|
||||||
} else {
|
} else {
|
||||||
num_siblings == (max_siblings - 1) as usize
|
num_siblings == (max_siblings - 1) as usize
|
||||||
@@ -84,14 +89,23 @@ impl DrawData<'_> {
|
|||||||
|
|
||||||
// TODO: can we test this?
|
// TODO: can we test this?
|
||||||
fn generate_bar(&self, node: &DisplayNode, level: usize) -> String {
|
fn generate_bar(&self, node: &DisplayNode, level: usize) -> String {
|
||||||
|
if self.display_data.initial.is_screen_reader {
|
||||||
|
return level.to_string();
|
||||||
|
}
|
||||||
let chars_in_bar = self.percent_bar.chars().count();
|
let chars_in_bar = self.percent_bar.chars().count();
|
||||||
let num_bars = chars_in_bar as f32 * self.display_data.percent_size(node);
|
let num_bars = chars_in_bar as f32 * self.display_data.percent_size(node);
|
||||||
let mut num_not_my_bar = (chars_in_bar as i32) - num_bars as i32;
|
let mut num_not_my_bar = (chars_in_bar as i32) - num_bars as i32;
|
||||||
|
|
||||||
let mut new_bar = "".to_string();
|
let mut new_bar = "".to_string();
|
||||||
let idx = 5 - min(4, max(1, level));
|
let idx = 5 - level.clamp(1, 4);
|
||||||
|
|
||||||
for c in self.percent_bar.chars() {
|
let itr: Box<dyn Iterator<Item = char>> = if self.display_data.initial.bars_on_right {
|
||||||
|
Box::new(self.percent_bar.chars())
|
||||||
|
} else {
|
||||||
|
Box::new(self.percent_bar.chars().rev())
|
||||||
|
};
|
||||||
|
|
||||||
|
for c in itr {
|
||||||
num_not_my_bar -= 1;
|
num_not_my_bar -= 1;
|
||||||
if num_not_my_bar <= 0 {
|
if num_not_my_bar <= 0 {
|
||||||
new_bar.push(BLOCKS[0]);
|
new_bar.push(BLOCKS[0]);
|
||||||
@@ -101,52 +115,58 @@ impl DrawData<'_> {
|
|||||||
new_bar.push(c);
|
new_bar.push(c);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
new_bar
|
if self.display_data.initial.bars_on_right {
|
||||||
|
new_bar
|
||||||
|
} else {
|
||||||
|
new_bar.chars().rev().collect()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
|
||||||
pub fn draw_it(
|
pub fn draw_it(
|
||||||
use_full_path: bool,
|
idd: InitialDisplayData,
|
||||||
is_reversed: bool,
|
no_percent_bars: bool,
|
||||||
no_colors: bool,
|
|
||||||
no_percents: bool,
|
|
||||||
terminal_width: usize,
|
terminal_width: usize,
|
||||||
by_filecount: bool,
|
root_node: &DisplayNode,
|
||||||
option_root_node: Option<DisplayNode>,
|
skip_total: bool,
|
||||||
) {
|
) {
|
||||||
if option_root_node.is_none() {
|
let biggest = match skip_total {
|
||||||
return;
|
false => root_node,
|
||||||
}
|
true => root_node
|
||||||
let root_node = option_root_node.unwrap();
|
.get_children_from_node(false)
|
||||||
|
.next()
|
||||||
|
.unwrap_or(root_node),
|
||||||
|
};
|
||||||
|
|
||||||
let num_chars_needed_on_left_most = if by_filecount {
|
let num_chars_needed_on_left_most = if idd.by_filecount {
|
||||||
let max_size = root_node.size;
|
let max_size = biggest.size;
|
||||||
max_size.separate_with_commas().chars().count()
|
max_size.separate_with_commas().chars().count()
|
||||||
} else {
|
} else {
|
||||||
5 // Under normal usage we need 5 chars to display the size of a directory
|
find_biggest_size_str(root_node, &idd.output_format)
|
||||||
};
|
};
|
||||||
|
|
||||||
let terminal_width = terminal_width - 9 - num_chars_needed_on_left_most;
|
assert!(
|
||||||
|
terminal_width > num_chars_needed_on_left_most + 2,
|
||||||
|
"Not enough terminal width"
|
||||||
|
);
|
||||||
|
|
||||||
|
let allowed_width = terminal_width - num_chars_needed_on_left_most - 2;
|
||||||
let num_indent_chars = 3;
|
let num_indent_chars = 3;
|
||||||
let longest_string_length =
|
let longest_string_length =
|
||||||
find_longest_dir_name(&root_node, num_indent_chars, terminal_width, !use_full_path);
|
find_longest_dir_name(root_node, num_indent_chars, allowed_width, &idd);
|
||||||
|
|
||||||
let max_bar_length = if no_percents || longest_string_length >= terminal_width as usize {
|
let max_bar_length = if no_percent_bars || longest_string_length + 7 >= allowed_width {
|
||||||
0
|
0
|
||||||
} else {
|
} else {
|
||||||
terminal_width as usize - longest_string_length
|
allowed_width - longest_string_length - 7
|
||||||
};
|
};
|
||||||
|
|
||||||
let first_size_bar = repeat(BLOCKS[0]).take(max_bar_length).collect::<String>();
|
let first_size_bar = repeat(BLOCKS[0]).take(max_bar_length).collect();
|
||||||
|
|
||||||
let display_data = DisplayData {
|
let display_data = DisplayData {
|
||||||
short_paths: !use_full_path,
|
initial: idd,
|
||||||
is_reversed,
|
|
||||||
colors_on: !no_colors,
|
|
||||||
by_filecount,
|
|
||||||
num_chars_needed_on_left_most,
|
num_chars_needed_on_left_most,
|
||||||
base_size: root_node.size,
|
base_size: biggest.size,
|
||||||
longest_string_length,
|
longest_string_length,
|
||||||
ls_colors: LsColors::from_env().unwrap_or_default(),
|
ls_colors: LsColors::from_env().unwrap_or_default(),
|
||||||
};
|
};
|
||||||
@@ -155,48 +175,69 @@ pub fn draw_it(
|
|||||||
percent_bar: first_size_bar,
|
percent_bar: first_size_bar,
|
||||||
display_data: &display_data,
|
display_data: &display_data,
|
||||||
};
|
};
|
||||||
display_node(root_node, &draw_data, true, true);
|
|
||||||
|
if !skip_total {
|
||||||
|
display_node(root_node, &draw_data, true, true);
|
||||||
|
} else {
|
||||||
|
for (count, c) in root_node
|
||||||
|
.get_children_from_node(draw_data.display_data.initial.is_reversed)
|
||||||
|
.enumerate()
|
||||||
|
{
|
||||||
|
let is_biggest = display_data.is_biggest(count, root_node.num_siblings());
|
||||||
|
let was_i_last = display_data.is_last(count, root_node.num_siblings());
|
||||||
|
display_node(c, &draw_data, is_biggest, was_i_last);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn find_biggest_size_str(node: &DisplayNode, output_format: &str) -> usize {
|
||||||
|
let mut mx = human_readable_number(node.size, output_format)
|
||||||
|
.chars()
|
||||||
|
.count();
|
||||||
|
for n in node.children.iter() {
|
||||||
|
mx = max(mx, find_biggest_size_str(n, output_format));
|
||||||
|
}
|
||||||
|
mx
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find_longest_dir_name(
|
fn find_longest_dir_name(
|
||||||
node: &DisplayNode,
|
node: &DisplayNode,
|
||||||
indent: usize,
|
indent: usize,
|
||||||
terminal: usize,
|
terminal: usize,
|
||||||
long_paths: bool,
|
idd: &InitialDisplayData,
|
||||||
) -> usize {
|
) -> usize {
|
||||||
let printable_name = get_printable_name(&node.name, long_paths);
|
let printable_name = get_printable_name(&node.name, idd.short_paths);
|
||||||
let longest = min(
|
|
||||||
UnicodeWidthStr::width(&*printable_name) + 1 + indent,
|
let longest = if idd.is_screen_reader {
|
||||||
terminal,
|
UnicodeWidthStr::width(&*printable_name) + 1
|
||||||
);
|
} else {
|
||||||
|
min(
|
||||||
|
UnicodeWidthStr::width(&*printable_name) + 1 + indent,
|
||||||
|
terminal,
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
// each none root tree drawing is 2 more chars, hence we increment indent by 2
|
// each none root tree drawing is 2 more chars, hence we increment indent by 2
|
||||||
node.children
|
node.children
|
||||||
.iter()
|
.iter()
|
||||||
.map(|c| find_longest_dir_name(c, indent + 2, terminal, long_paths))
|
.map(|c| find_longest_dir_name(c, indent + 2, terminal, idd))
|
||||||
.fold(longest, max)
|
.fold(longest, max)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn display_node(node: DisplayNode, draw_data: &DrawData, is_biggest: bool, is_last: bool) {
|
fn display_node(node: &DisplayNode, draw_data: &DrawData, is_biggest: bool, is_last: bool) {
|
||||||
// hacky way of working out how deep we are in the tree
|
// hacky way of working out how deep we are in the tree
|
||||||
let indent = draw_data.get_new_indent(!node.children.is_empty(), is_last);
|
let indent = draw_data.get_new_indent(!node.children.is_empty(), is_last);
|
||||||
let level = ((indent.chars().count() - 1) / 2) - 1;
|
let level = ((indent.chars().count() - 1) / 2) - 1;
|
||||||
let bar_text = draw_data.generate_bar(&node, level);
|
let bar_text = draw_data.generate_bar(node, level);
|
||||||
|
|
||||||
let to_print = format_string(
|
let to_print = format_string(node, &indent, &bar_text, is_biggest, draw_data.display_data);
|
||||||
&node,
|
|
||||||
&*indent,
|
|
||||||
&*bar_text,
|
|
||||||
is_biggest,
|
|
||||||
draw_data.display_data,
|
|
||||||
);
|
|
||||||
|
|
||||||
if !draw_data.display_data.is_reversed {
|
if !draw_data.display_data.initial.is_reversed {
|
||||||
println!("{}", to_print)
|
println!("{to_print}")
|
||||||
}
|
}
|
||||||
|
|
||||||
let dd = DrawData {
|
let dd = DrawData {
|
||||||
indent: clean_indentation_string(&*indent),
|
indent: clean_indentation_string(&indent),
|
||||||
percent_bar: bar_text,
|
percent_bar: bar_text,
|
||||||
display_data: draw_data.display_data,
|
display_data: draw_data.display_data,
|
||||||
};
|
};
|
||||||
@@ -204,7 +245,7 @@ fn display_node(node: DisplayNode, draw_data: &DrawData, is_biggest: bool, is_la
|
|||||||
let num_siblings = node.num_siblings();
|
let num_siblings = node.num_siblings();
|
||||||
|
|
||||||
for (count, c) in node
|
for (count, c) in node
|
||||||
.get_children_from_node(draw_data.display_data.is_reversed)
|
.get_children_from_node(draw_data.display_data.initial.is_reversed)
|
||||||
.enumerate()
|
.enumerate()
|
||||||
{
|
{
|
||||||
let is_biggest = dd.display_data.is_biggest(count, num_siblings);
|
let is_biggest = dd.display_data.is_biggest(count, num_siblings);
|
||||||
@@ -212,8 +253,8 @@ fn display_node(node: DisplayNode, draw_data: &DrawData, is_biggest: bool, is_la
|
|||||||
display_node(c, &dd, is_biggest, was_i_last);
|
display_node(c, &dd, is_biggest, was_i_last);
|
||||||
}
|
}
|
||||||
|
|
||||||
if draw_data.display_data.is_reversed {
|
if draw_data.display_data.initial.is_reversed {
|
||||||
println!("{}", to_print)
|
println!("{to_print}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -234,10 +275,10 @@ fn clean_indentation_string(s: &str) -> String {
|
|||||||
is
|
is
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_printable_name<P: AsRef<Path>>(dir_name: &P, long_paths: bool) -> String {
|
fn get_printable_name<P: AsRef<Path>>(dir_name: &P, short_paths: bool) -> String {
|
||||||
let dir_name = dir_name.as_ref();
|
let dir_name = dir_name.as_ref();
|
||||||
let printable_name = {
|
let printable_name = {
|
||||||
if long_paths {
|
if short_paths {
|
||||||
match dir_name.parent() {
|
match dir_name.parent() {
|
||||||
Some(prefix) => match dir_name.strip_prefix(prefix) {
|
Some(prefix) => match dir_name.strip_prefix(prefix) {
|
||||||
Ok(base) => base,
|
Ok(base) => base,
|
||||||
@@ -253,11 +294,14 @@ fn get_printable_name<P: AsRef<Path>>(dir_name: &P, long_paths: bool) -> String
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn pad_or_trim_filename(node: &DisplayNode, indent: &str, display_data: &DisplayData) -> String {
|
fn pad_or_trim_filename(node: &DisplayNode, indent: &str, display_data: &DisplayData) -> String {
|
||||||
let name = get_printable_name(&node.name, display_data.short_paths);
|
let name = get_printable_name(&node.name, display_data.initial.short_paths);
|
||||||
let indent_and_name = format!("{} {}", indent, name);
|
let indent_and_name = format!("{indent} {name}");
|
||||||
let width = UnicodeWidthStr::width(&*indent_and_name);
|
let width = UnicodeWidthStr::width(&*indent_and_name);
|
||||||
|
|
||||||
assert!(display_data.longest_string_length >= width);
|
assert!(
|
||||||
|
display_data.longest_string_length >= width,
|
||||||
|
"Terminal width not wide enough to draw directory tree"
|
||||||
|
);
|
||||||
|
|
||||||
// Add spaces after the filename so we can draw the % used bar chart.
|
// Add spaces after the filename so we can draw the % used bar chart.
|
||||||
let name_and_padding = name
|
let name_and_padding = name
|
||||||
@@ -265,15 +309,19 @@ fn pad_or_trim_filename(node: &DisplayNode, indent: &str, display_data: &Display
|
|||||||
.repeat(display_data.longest_string_length - width)
|
.repeat(display_data.longest_string_length - width)
|
||||||
.as_str();
|
.as_str();
|
||||||
|
|
||||||
maybe_trim_filename(name_and_padding, display_data)
|
name_and_padding
|
||||||
}
|
}
|
||||||
|
|
||||||
fn maybe_trim_filename(name_in: String, display_data: &DisplayData) -> String {
|
fn maybe_trim_filename(name_in: String, indent: &str, display_data: &DisplayData) -> String {
|
||||||
if UnicodeWidthStr::width(&*name_in) > display_data.longest_string_length {
|
let indent_length = UnicodeWidthStr::width(indent);
|
||||||
let name = name_in
|
assert!(
|
||||||
.chars()
|
display_data.longest_string_length >= indent_length + 2,
|
||||||
.take(display_data.longest_string_length - 2)
|
"Terminal width not wide enough to draw directory tree"
|
||||||
.collect::<String>();
|
);
|
||||||
|
|
||||||
|
let max_size = display_data.longest_string_length - indent_length;
|
||||||
|
if UnicodeWidthStr::width(&*name_in) > max_size {
|
||||||
|
let name = name_in.chars().take(max_size - 2).collect::<String>();
|
||||||
name + ".."
|
name + ".."
|
||||||
} else {
|
} else {
|
||||||
name_in
|
name_in
|
||||||
@@ -283,14 +331,20 @@ fn maybe_trim_filename(name_in: String, display_data: &DisplayData) -> String {
|
|||||||
pub fn format_string(
|
pub fn format_string(
|
||||||
node: &DisplayNode,
|
node: &DisplayNode,
|
||||||
indent: &str,
|
indent: &str,
|
||||||
percent_bar: &str,
|
bars: &str,
|
||||||
is_biggest: bool,
|
is_biggest: bool,
|
||||||
display_data: &DisplayData,
|
display_data: &DisplayData,
|
||||||
) -> String {
|
) -> String {
|
||||||
let (percents, name_and_padding) = get_name_percent(node, indent, percent_bar, display_data);
|
let (percent, name_and_padding) = get_name_percent(node, indent, bars, display_data);
|
||||||
let pretty_size = get_pretty_size(node, is_biggest, display_data);
|
let pretty_size = get_pretty_size(node, is_biggest, display_data);
|
||||||
let pretty_name = get_pretty_name(node, name_and_padding, display_data);
|
let pretty_name = get_pretty_name(node, name_and_padding, display_data);
|
||||||
format!("{} {} {}{}", pretty_size, indent, pretty_name, percents)
|
// we can clean this and the method below somehow, not sure yet
|
||||||
|
if display_data.initial.is_screen_reader {
|
||||||
|
// if screen_reader then bars is 'depth'
|
||||||
|
format!("{pretty_name} {bars} {pretty_size}{percent}")
|
||||||
|
} else {
|
||||||
|
format!("{pretty_size} {indent} {pretty_name}{percent}")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_name_percent(
|
fn get_name_percent(
|
||||||
@@ -299,29 +353,36 @@ fn get_name_percent(
|
|||||||
bar_chart: &str,
|
bar_chart: &str,
|
||||||
display_data: &DisplayData,
|
display_data: &DisplayData,
|
||||||
) -> (String, String) {
|
) -> (String, String) {
|
||||||
if !bar_chart.is_empty() {
|
if display_data.initial.is_screen_reader {
|
||||||
let percent_size_str = format!("{:.0}%", display_data.percent_size(node) * 100.0);
|
let percent = display_data.percent_size(node) * 100.0;
|
||||||
let percents = format!("│{} │ {:>4}", bar_chart, percent_size_str);
|
let percent_size_str = format!("{percent:.0}%");
|
||||||
|
let percents = format!(" {percent_size_str:>4}",);
|
||||||
|
let name = pad_or_trim_filename(node, "", display_data);
|
||||||
|
(percents, name)
|
||||||
|
// Bar chart being empty may come from either config or the screen not being wide enough
|
||||||
|
} else if !bar_chart.is_empty() {
|
||||||
|
let percent = display_data.percent_size(node) * 100.0;
|
||||||
|
let percent_size_str = format!("{percent:.0}%");
|
||||||
|
let percents = format!("│{bar_chart} │ {percent_size_str:>4}");
|
||||||
let name_and_padding = pad_or_trim_filename(node, indent, display_data);
|
let name_and_padding = pad_or_trim_filename(node, indent, display_data);
|
||||||
(percents, name_and_padding)
|
(percents, name_and_padding)
|
||||||
} else {
|
} else {
|
||||||
let n = get_printable_name(&node.name, display_data.short_paths);
|
let n = get_printable_name(&node.name, display_data.initial.short_paths);
|
||||||
let name = maybe_trim_filename(n, display_data);
|
let name = maybe_trim_filename(n, indent, display_data);
|
||||||
("".into(), name)
|
("".into(), name)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_pretty_size(node: &DisplayNode, is_biggest: bool, display_data: &DisplayData) -> String {
|
fn get_pretty_size(node: &DisplayNode, is_biggest: bool, display_data: &DisplayData) -> String {
|
||||||
let output = if display_data.by_filecount {
|
let output = if display_data.initial.by_filecount {
|
||||||
let size_as_str = node.size.separate_with_commas();
|
node.size.separate_with_commas()
|
||||||
let spaces_to_add =
|
|
||||||
display_data.num_chars_needed_on_left_most - size_as_str.chars().count();
|
|
||||||
size_as_str + " ".repeat(spaces_to_add).as_str()
|
|
||||||
} else {
|
} else {
|
||||||
format!("{:>5}", human_readable_number(node.size))
|
human_readable_number(node.size, &display_data.initial.output_format)
|
||||||
};
|
};
|
||||||
|
let spaces_to_add = display_data.num_chars_needed_on_left_most - output.chars().count();
|
||||||
|
let output = " ".repeat(spaces_to_add) + output.as_str();
|
||||||
|
|
||||||
if is_biggest && display_data.colors_on {
|
if is_biggest && display_data.initial.colors_on {
|
||||||
format!("{}", Red.paint(output))
|
format!("{}", Red.paint(output))
|
||||||
} else {
|
} else {
|
||||||
output
|
output
|
||||||
@@ -333,32 +394,66 @@ fn get_pretty_name(
|
|||||||
name_and_padding: String,
|
name_and_padding: String,
|
||||||
display_data: &DisplayData,
|
display_data: &DisplayData,
|
||||||
) -> String {
|
) -> String {
|
||||||
if display_data.colors_on {
|
if display_data.initial.colors_on {
|
||||||
let meta_result = fs::metadata(node.name.clone());
|
let meta_result = fs::metadata(&node.name);
|
||||||
let directory_color = display_data
|
let directory_color = display_data
|
||||||
.ls_colors
|
.ls_colors
|
||||||
.style_for_path_with_metadata(node.name.clone(), meta_result.as_ref().ok());
|
.style_for_path_with_metadata(&node.name, meta_result.as_ref().ok());
|
||||||
let ansi_style = directory_color
|
let ansi_style = directory_color
|
||||||
.map(Style::to_ansi_term_style)
|
.map(Style::to_ansi_term_style)
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
format!("{}", ansi_style.paint(name_and_padding))
|
let out = ansi_style.paint(name_and_padding);
|
||||||
|
format!("{out}")
|
||||||
} else {
|
} else {
|
||||||
name_and_padding
|
name_and_padding
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn human_readable_number(size: u64) -> String {
|
// If we are working with SI units or not
|
||||||
|
pub fn get_type_of_thousand(output_str: &str) -> u64 {
|
||||||
|
if output_str.is_empty() {
|
||||||
|
1024
|
||||||
|
} else if output_str == "si" {
|
||||||
|
1000
|
||||||
|
} else if output_str.contains('i') || output_str.len() == 1 {
|
||||||
|
1024
|
||||||
|
} else {
|
||||||
|
1000
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_number_format(output_str: &str) -> Option<(u64, char)> {
|
||||||
|
if output_str.starts_with('b') {
|
||||||
|
return Some((1, 'B'));
|
||||||
|
}
|
||||||
for (i, u) in UNITS.iter().enumerate() {
|
for (i, u) in UNITS.iter().enumerate() {
|
||||||
let marker = 1024u64.pow((UNITS.len() - i) as u32);
|
if output_str.starts_with((*u).to_ascii_lowercase()) {
|
||||||
if size >= marker {
|
let marker = get_type_of_thousand(output_str).pow((UNITS.len() - i) as u32);
|
||||||
if size / marker < 10 {
|
return Some((marker, *u));
|
||||||
return format!("{:.1}{}", (size as f32 / marker as f32), u);
|
}
|
||||||
} else {
|
}
|
||||||
return format!("{}{}", (size / marker), u);
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn human_readable_number(size: u64, output_str: &str) -> String {
|
||||||
|
match get_number_format(output_str) {
|
||||||
|
Some((x, u)) => {
|
||||||
|
format!("{}{}", (size / x), u)
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
for (i, u) in UNITS.iter().enumerate() {
|
||||||
|
let marker = get_type_of_thousand(output_str).pow((UNITS.len() - i) as u32);
|
||||||
|
if size >= marker {
|
||||||
|
if size / marker < 10 {
|
||||||
|
return format!("{:.1}{}", (size as f32 / marker as f32), u);
|
||||||
|
} else {
|
||||||
|
return format!("{}{}", (size / marker), u);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
format!("{size}B")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return format!("{}B", size);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
mod tests {
|
mod tests {
|
||||||
@@ -369,13 +464,19 @@ mod tests {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
fn get_fake_display_data(longest_string_length: usize) -> DisplayData {
|
fn get_fake_display_data(longest_string_length: usize) -> DisplayData {
|
||||||
DisplayData {
|
let initial = InitialDisplayData {
|
||||||
short_paths: true,
|
short_paths: true,
|
||||||
is_reversed: false,
|
is_reversed: false,
|
||||||
colors_on: false,
|
colors_on: false,
|
||||||
by_filecount: false,
|
by_filecount: false,
|
||||||
|
is_screen_reader: false,
|
||||||
|
output_format: "".into(),
|
||||||
|
bars_on_right: false,
|
||||||
|
};
|
||||||
|
DisplayData {
|
||||||
|
initial,
|
||||||
num_chars_needed_on_left_most: 5,
|
num_chars_needed_on_left_most: 5,
|
||||||
base_size: 1,
|
base_size: 2_u64.pow(12), // 4.0K
|
||||||
longest_string_length,
|
longest_string_length,
|
||||||
ls_colors: LsColors::from_env().unwrap_or_default(),
|
ls_colors: LsColors::from_env().unwrap_or_default(),
|
||||||
}
|
}
|
||||||
@@ -391,14 +492,9 @@ mod tests {
|
|||||||
let indent = "┌─┴";
|
let indent = "┌─┴";
|
||||||
let percent_bar = "";
|
let percent_bar = "";
|
||||||
let is_biggest = false;
|
let is_biggest = false;
|
||||||
|
let data = get_fake_display_data(20);
|
||||||
|
|
||||||
let s = format_string(
|
let s = format_string(&n, indent, percent_bar, is_biggest, &data);
|
||||||
&n,
|
|
||||||
indent,
|
|
||||||
percent_bar,
|
|
||||||
is_biggest,
|
|
||||||
&get_fake_display_data(6),
|
|
||||||
);
|
|
||||||
assert_eq!(s, " 4.0K ┌─┴ short");
|
assert_eq!(s, " 4.0K ┌─┴ short");
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -414,25 +510,119 @@ mod tests {
|
|||||||
let percent_bar = "";
|
let percent_bar = "";
|
||||||
let is_biggest = false;
|
let is_biggest = false;
|
||||||
|
|
||||||
let dd = get_fake_display_data(64);
|
let data = get_fake_display_data(64);
|
||||||
let s = format_string(&n, indent, percent_bar, is_biggest, &dd);
|
let s = format_string(&n, indent, percent_bar, is_biggest, &data);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
s,
|
s,
|
||||||
" 4.0K ┌─┴ very_long_name_longer_than_the_eighty_character_limit_very_lon.."
|
" 4.0K ┌─┴ very_long_name_longer_than_the_eighty_character_limit_very_.."
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_format_str_screen_reader() {
|
||||||
|
let n = DisplayNode {
|
||||||
|
name: PathBuf::from("/short"),
|
||||||
|
size: 2_u64.pow(12), // This is 4.0K
|
||||||
|
children: vec![],
|
||||||
|
};
|
||||||
|
let indent = "";
|
||||||
|
let percent_bar = "3";
|
||||||
|
let is_biggest = false;
|
||||||
|
let mut data = get_fake_display_data(20);
|
||||||
|
data.initial.is_screen_reader = true;
|
||||||
|
|
||||||
|
let s = format_string(&n, indent, percent_bar, is_biggest, &data);
|
||||||
|
assert_eq!(s, "short 3 4.0K 100%");
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_human_readable_number() {
|
fn test_human_readable_number() {
|
||||||
assert_eq!(human_readable_number(1), "1B");
|
assert_eq!(human_readable_number(1, ""), "1B");
|
||||||
assert_eq!(human_readable_number(956), "956B");
|
assert_eq!(human_readable_number(956, ""), "956B");
|
||||||
assert_eq!(human_readable_number(1004), "1004B");
|
assert_eq!(human_readable_number(1004, ""), "1004B");
|
||||||
assert_eq!(human_readable_number(1024), "1.0K");
|
assert_eq!(human_readable_number(1024, ""), "1.0K");
|
||||||
assert_eq!(human_readable_number(1536), "1.5K");
|
assert_eq!(human_readable_number(1536, ""), "1.5K");
|
||||||
assert_eq!(human_readable_number(1024 * 512), "512K");
|
assert_eq!(human_readable_number(1024 * 512, ""), "512K");
|
||||||
assert_eq!(human_readable_number(1024 * 1024), "1.0M");
|
assert_eq!(human_readable_number(1024 * 1024, ""), "1.0M");
|
||||||
assert_eq!(human_readable_number(1024 * 1024 * 1024 - 1), "1023M");
|
assert_eq!(human_readable_number(1024 * 1024 * 1024 - 1, ""), "1023M");
|
||||||
assert_eq!(human_readable_number(1024 * 1024 * 1024 * 20), "20G");
|
assert_eq!(human_readable_number(1024 * 1024 * 1024 * 20, ""), "20G");
|
||||||
assert_eq!(human_readable_number(1024 * 1024 * 1024 * 1024), "1.0T");
|
assert_eq!(human_readable_number(1024 * 1024 * 1024 * 1024, ""), "1.0T");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_human_readable_number_si() {
|
||||||
|
assert_eq!(human_readable_number(1024 * 100, ""), "100K");
|
||||||
|
assert_eq!(human_readable_number(1024 * 100, "si"), "102K");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Refer to https://en.wikipedia.org/wiki/Byte#Multiple-byte_units
|
||||||
|
#[test]
|
||||||
|
fn test_human_readable_number_kb() {
|
||||||
|
let hrn = human_readable_number;
|
||||||
|
assert_eq!(hrn(1023, "b"), "1023B");
|
||||||
|
assert_eq!(hrn(1000 * 1000, "bytes"), "1000000B");
|
||||||
|
assert_eq!(hrn(1023, "kb"), "1K");
|
||||||
|
assert_eq!(hrn(1023, "k"), "0K");
|
||||||
|
assert_eq!(hrn(1023, "kib"), "0K");
|
||||||
|
assert_eq!(hrn(1024, "kib"), "1K");
|
||||||
|
assert_eq!(hrn(1024 * 512, "kib"), "512K");
|
||||||
|
assert_eq!(hrn(1024 * 1024, "kib"), "1024K");
|
||||||
|
assert_eq!(hrn(1024 * 1000 * 1000 * 20, "kib"), "20000000K");
|
||||||
|
assert_eq!(hrn(1024 * 1024 * 1000 * 20, "mib"), "20000M");
|
||||||
|
assert_eq!(hrn(1024 * 1024 * 1024 * 20, "gib"), "20G");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
fn build_draw_data(disp: &DisplayData, size: u32) -> (DrawData<'_>, DisplayNode) {
|
||||||
|
let n = DisplayNode {
|
||||||
|
name: PathBuf::from("/short"),
|
||||||
|
size: 2_u64.pow(size),
|
||||||
|
children: vec![],
|
||||||
|
};
|
||||||
|
let first_size_bar = repeat(BLOCKS[0]).take(13).collect();
|
||||||
|
let dd = DrawData {
|
||||||
|
indent: "".into(),
|
||||||
|
percent_bar: first_size_bar,
|
||||||
|
display_data: disp,
|
||||||
|
};
|
||||||
|
(dd, n)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_draw_data() {
|
||||||
|
let disp = &get_fake_display_data(20);
|
||||||
|
let (dd, n) = build_draw_data(disp, 12);
|
||||||
|
let bar = dd.generate_bar(&n, 1);
|
||||||
|
assert_eq!(bar, "█████████████");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_draw_data2() {
|
||||||
|
let disp = &get_fake_display_data(20);
|
||||||
|
let (dd, n) = build_draw_data(disp, 11);
|
||||||
|
let bar = dd.generate_bar(&n, 2);
|
||||||
|
assert_eq!(bar, "███████░░░░░░");
|
||||||
|
}
|
||||||
|
#[test]
|
||||||
|
fn test_draw_data3() {
|
||||||
|
let mut disp = get_fake_display_data(20);
|
||||||
|
let (dd, n) = build_draw_data(&disp, 11);
|
||||||
|
let bar = dd.generate_bar(&n, 3);
|
||||||
|
assert_eq!(bar, "███████▒▒▒▒▒▒");
|
||||||
|
|
||||||
|
disp.initial.bars_on_right = true;
|
||||||
|
let (dd, n) = build_draw_data(&disp, 11);
|
||||||
|
let bar = dd.generate_bar(&n, 3);
|
||||||
|
assert_eq!(bar, "▒▒▒▒▒▒███████")
|
||||||
|
}
|
||||||
|
#[test]
|
||||||
|
fn test_draw_data4() {
|
||||||
|
let disp = &get_fake_display_data(20);
|
||||||
|
let (dd, n) = build_draw_data(disp, 10);
|
||||||
|
// After 4 we have no more levels of shading so 4+ is the same
|
||||||
|
let bar = dd.generate_bar(&n, 4);
|
||||||
|
assert_eq!(bar, "████▓▓▓▓▓▓▓▓▓");
|
||||||
|
let bar = dd.generate_bar(&n, 5);
|
||||||
|
assert_eq!(bar, "████▓▓▓▓▓▓▓▓▓");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,46 +1,26 @@
|
|||||||
use std::cmp::Ordering;
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
#[derive(Debug, Eq, Clone)]
|
use serde::Serialize;
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Serialize)]
|
||||||
pub struct DisplayNode {
|
pub struct DisplayNode {
|
||||||
pub name: PathBuf, //todo: consider moving to a string?
|
// Note: the order of fields in important here, for PartialEq and PartialOrd
|
||||||
pub size: u64,
|
pub size: u64,
|
||||||
|
pub name: PathBuf,
|
||||||
pub children: Vec<DisplayNode>,
|
pub children: Vec<DisplayNode>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Ord for DisplayNode {
|
|
||||||
fn cmp(&self, other: &Self) -> Ordering {
|
|
||||||
if self.size == other.size {
|
|
||||||
self.name.cmp(&other.name)
|
|
||||||
} else {
|
|
||||||
self.size.cmp(&other.size)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialOrd for DisplayNode {
|
|
||||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
|
||||||
Some(self.cmp(other))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialEq for DisplayNode {
|
|
||||||
fn eq(&self, other: &Self) -> bool {
|
|
||||||
self.name == other.name && self.size == other.size && self.children == other.children
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DisplayNode {
|
impl DisplayNode {
|
||||||
pub fn num_siblings(&self) -> u64 {
|
pub fn num_siblings(&self) -> u64 {
|
||||||
self.children.len() as u64
|
self.children.len() as u64
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_children_from_node(&self, is_reversed: bool) -> impl Iterator<Item = DisplayNode> {
|
pub fn get_children_from_node(&self, is_reversed: bool) -> impl Iterator<Item = &DisplayNode> {
|
||||||
// we box to avoid the clippy lint warning
|
// we box to avoid the clippy lint warning
|
||||||
let out: Box<dyn Iterator<Item = DisplayNode>> = if is_reversed {
|
let out: Box<dyn Iterator<Item = &DisplayNode>> = if is_reversed {
|
||||||
Box::new(self.children.clone().into_iter().rev())
|
Box::new(self.children.iter().rev())
|
||||||
} else {
|
} else {
|
||||||
Box::new(self.children.clone().into_iter())
|
Box::new(self.children.iter())
|
||||||
};
|
};
|
||||||
out
|
out
|
||||||
}
|
}
|
||||||
|
|||||||
234
src/filter.rs
234
src/filter.rs
@@ -2,173 +2,139 @@ use crate::display_node::DisplayNode;
|
|||||||
use crate::node::Node;
|
use crate::node::Node;
|
||||||
use std::collections::BinaryHeap;
|
use std::collections::BinaryHeap;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::collections::HashSet;
|
use std::path::Path;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
pub fn get_by_depth(top_level_nodes: Vec<Node>, n: usize) -> Option<DisplayNode> {
|
pub struct AggregateData {
|
||||||
if top_level_nodes.is_empty() {
|
pub min_size: Option<usize>,
|
||||||
// perhaps change this, bring back Error object?
|
pub only_dir: bool,
|
||||||
return None;
|
pub only_file: bool,
|
||||||
}
|
pub number_of_lines: usize,
|
||||||
let root = get_new_root(top_level_nodes);
|
pub depth: usize,
|
||||||
Some(build_by_depth(&root, n - 1))
|
pub using_a_filter: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_biggest(
|
pub fn get_biggest(top_level_nodes: Vec<Node>, display_data: AggregateData) -> Option<DisplayNode> {
|
||||||
top_level_nodes: Vec<Node>,
|
|
||||||
n: usize,
|
|
||||||
using_a_filter: bool,
|
|
||||||
) -> Option<DisplayNode> {
|
|
||||||
if top_level_nodes.is_empty() {
|
if top_level_nodes.is_empty() {
|
||||||
// perhaps change this, bring back Error object?
|
// perhaps change this, bring back Error object?
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut heap = BinaryHeap::new();
|
let mut heap = BinaryHeap::new();
|
||||||
let number_top_level_nodes = top_level_nodes.len();
|
let number_top_level_nodes = top_level_nodes.len();
|
||||||
let root = get_new_root(top_level_nodes);
|
let root;
|
||||||
let mut allowed_nodes = HashSet::new();
|
|
||||||
|
|
||||||
allowed_nodes.insert(&root.name);
|
if number_top_level_nodes > 1 {
|
||||||
heap = add_children(using_a_filter, &root, heap);
|
let size = top_level_nodes.iter().map(|node| node.size).sum();
|
||||||
|
root = Node {
|
||||||
|
name: PathBuf::from("(total)"),
|
||||||
|
size,
|
||||||
|
children: top_level_nodes,
|
||||||
|
inode_device: None,
|
||||||
|
depth: 0,
|
||||||
|
};
|
||||||
|
// Always include the base nodes if we add a 'parent' (total) node
|
||||||
|
heap = always_add_children(&display_data, &root, heap);
|
||||||
|
} else {
|
||||||
|
root = top_level_nodes.into_iter().next().unwrap();
|
||||||
|
heap = add_children(&display_data, &root, heap);
|
||||||
|
}
|
||||||
|
|
||||||
for _ in number_top_level_nodes..n {
|
Some(fill_remaining_lines(heap, &root, display_data))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn fill_remaining_lines<'a>(
|
||||||
|
mut heap: BinaryHeap<&'a Node>,
|
||||||
|
root: &'a Node,
|
||||||
|
display_data: AggregateData,
|
||||||
|
) -> DisplayNode {
|
||||||
|
let mut allowed_nodes = HashMap::new();
|
||||||
|
|
||||||
|
while allowed_nodes.len() < display_data.number_of_lines {
|
||||||
let line = heap.pop();
|
let line = heap.pop();
|
||||||
match line {
|
match line {
|
||||||
Some(line) => {
|
Some(line) => {
|
||||||
allowed_nodes.insert(&line.name);
|
if !display_data.only_file || line.children.is_empty() {
|
||||||
heap = add_children(using_a_filter, line, heap);
|
allowed_nodes.insert(line.name.as_path(), line);
|
||||||
|
}
|
||||||
|
heap = add_children(&display_data, line, heap);
|
||||||
}
|
}
|
||||||
None => break,
|
None => break,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
recursive_rebuilder(&allowed_nodes, &root)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_all_file_types(top_level_nodes: Vec<Node>, n: usize) -> Option<DisplayNode> {
|
if display_data.only_file {
|
||||||
let mut map: HashMap<String, DisplayNode> = HashMap::new();
|
flat_rebuilder(allowed_nodes, root)
|
||||||
build_by_all_file_types(top_level_nodes, &mut map);
|
|
||||||
let mut by_types: Vec<DisplayNode> = map.into_iter().map(|(_k, v)| v).collect();
|
|
||||||
by_types.sort();
|
|
||||||
by_types.reverse();
|
|
||||||
|
|
||||||
let displayed = if by_types.len() <= n {
|
|
||||||
by_types
|
|
||||||
} else {
|
} else {
|
||||||
let (displayed, rest) = by_types.split_at(if n > 1 { n - 1 } else { 1 });
|
recursive_rebuilder(&allowed_nodes, root)
|
||||||
let remaining = DisplayNode {
|
}
|
||||||
name: PathBuf::from("(others)"),
|
|
||||||
size: rest.iter().map(|a| a.size).sum(),
|
|
||||||
children: vec![],
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut displayed = displayed.to_vec();
|
|
||||||
displayed.push(remaining);
|
|
||||||
displayed
|
|
||||||
};
|
|
||||||
|
|
||||||
let result = DisplayNode {
|
|
||||||
name: PathBuf::from("(total)"),
|
|
||||||
size: displayed.iter().map(|a| a.size).sum(),
|
|
||||||
children: displayed,
|
|
||||||
};
|
|
||||||
Some(result)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_children<'a>(
|
fn add_children<'a>(
|
||||||
using_a_filter: bool,
|
display_data: &AggregateData,
|
||||||
line: &'a Node,
|
file_or_folder: &'a Node,
|
||||||
|
heap: BinaryHeap<&'a Node>,
|
||||||
|
) -> BinaryHeap<&'a Node> {
|
||||||
|
if display_data.depth > file_or_folder.depth {
|
||||||
|
always_add_children(display_data, file_or_folder, heap)
|
||||||
|
} else {
|
||||||
|
heap
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn always_add_children<'a>(
|
||||||
|
display_data: &AggregateData,
|
||||||
|
file_or_folder: &'a Node,
|
||||||
mut heap: BinaryHeap<&'a Node>,
|
mut heap: BinaryHeap<&'a Node>,
|
||||||
) -> BinaryHeap<&'a Node> {
|
) -> BinaryHeap<&'a Node> {
|
||||||
if using_a_filter {
|
heap.extend(
|
||||||
line.children.iter().for_each(|c| {
|
file_or_folder
|
||||||
if c.name.is_file() || c.size > 0 {
|
.children
|
||||||
heap.push(c)
|
.iter()
|
||||||
}
|
.filter(|c| match display_data.min_size {
|
||||||
});
|
Some(ms) => c.size > ms as u64,
|
||||||
} else {
|
None => !display_data.using_a_filter || c.name.is_file() || c.size > 0,
|
||||||
line.children.iter().for_each(|c| heap.push(c));
|
})
|
||||||
}
|
.filter(|c| {
|
||||||
|
if display_data.only_dir {
|
||||||
|
c.name.is_dir()
|
||||||
|
} else {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
);
|
||||||
heap
|
heap
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_by_all_file_types(top_level_nodes: Vec<Node>, counter: &mut HashMap<String, DisplayNode>) {
|
// Finds children of current, if in allowed_nodes adds them as children to new DisplayNode
|
||||||
for node in top_level_nodes {
|
fn recursive_rebuilder(allowed_nodes: &HashMap<&Path, &Node>, current: &Node) -> DisplayNode {
|
||||||
if node.name.is_file() {
|
let new_children: Vec<_> = current
|
||||||
let ext = node.name.extension();
|
|
||||||
let key: String = match ext {
|
|
||||||
Some(e) => ".".to_string() + &e.to_string_lossy(),
|
|
||||||
None => "(no extension)".into(),
|
|
||||||
};
|
|
||||||
let mut display_node = counter.entry(key.clone()).or_insert(DisplayNode {
|
|
||||||
name: PathBuf::from(key),
|
|
||||||
size: 0,
|
|
||||||
children: vec![],
|
|
||||||
});
|
|
||||||
display_node.size += node.size;
|
|
||||||
}
|
|
||||||
build_by_all_file_types(node.children, counter)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_by_depth(node: &Node, depth: usize) -> DisplayNode {
|
|
||||||
let new_children = {
|
|
||||||
if depth == 0 {
|
|
||||||
vec![]
|
|
||||||
} else {
|
|
||||||
let mut new_children: Vec<_> = node
|
|
||||||
.children
|
|
||||||
.iter()
|
|
||||||
.map(|c| build_by_depth(c, depth - 1))
|
|
||||||
.collect();
|
|
||||||
new_children.sort();
|
|
||||||
new_children.reverse();
|
|
||||||
new_children
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
DisplayNode {
|
|
||||||
name: node.name.clone(),
|
|
||||||
size: node.size,
|
|
||||||
children: new_children,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_new_root(top_level_nodes: Vec<Node>) -> Node {
|
|
||||||
if top_level_nodes.len() > 1 {
|
|
||||||
let total_size = top_level_nodes.iter().map(|node| node.size).sum();
|
|
||||||
Node {
|
|
||||||
name: PathBuf::from("(total)"),
|
|
||||||
size: total_size,
|
|
||||||
children: top_level_nodes,
|
|
||||||
inode_device: None,
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
top_level_nodes.into_iter().next().unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn recursive_rebuilder<'a>(
|
|
||||||
allowed_nodes: &'a HashSet<&PathBuf>,
|
|
||||||
current: &Node,
|
|
||||||
) -> Option<DisplayNode> {
|
|
||||||
let mut new_children: Vec<_> = current
|
|
||||||
.children
|
.children
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|c| {
|
.filter(|c| allowed_nodes.contains_key(c.name.as_path()))
|
||||||
if allowed_nodes.contains(&c.name) {
|
.map(|c| recursive_rebuilder(allowed_nodes, c))
|
||||||
recursive_rebuilder(allowed_nodes, c)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect();
|
.collect();
|
||||||
new_children.sort();
|
|
||||||
new_children.reverse();
|
build_display_node(new_children, current)
|
||||||
let newnode = DisplayNode {
|
}
|
||||||
|
|
||||||
|
// Applies all allowed nodes as children to current node
|
||||||
|
fn flat_rebuilder(allowed_nodes: HashMap<&Path, &Node>, current: &Node) -> DisplayNode {
|
||||||
|
let new_children: Vec<DisplayNode> = allowed_nodes
|
||||||
|
.into_values()
|
||||||
|
.map(|v| DisplayNode {
|
||||||
|
name: v.name.clone(),
|
||||||
|
size: v.size,
|
||||||
|
children: vec![],
|
||||||
|
})
|
||||||
|
.collect::<Vec<DisplayNode>>();
|
||||||
|
build_display_node(new_children, current)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_display_node(mut new_children: Vec<DisplayNode>, current: &Node) -> DisplayNode {
|
||||||
|
new_children.sort_by(|lhs, rhs| lhs.cmp(rhs).reverse());
|
||||||
|
DisplayNode {
|
||||||
name: current.name.clone(),
|
name: current.name.clone(),
|
||||||
size: current.size,
|
size: current.size,
|
||||||
children: new_children,
|
children: new_children,
|
||||||
};
|
}
|
||||||
Some(newnode)
|
|
||||||
}
|
}
|
||||||
|
|||||||
75
src/filter_type.rs
Normal file
75
src/filter_type.rs
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
use crate::display_node::DisplayNode;
|
||||||
|
use crate::node::Node;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::ffi::OsStr;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
#[derive(PartialEq, Eq, PartialOrd, Ord)]
|
||||||
|
struct ExtensionNode<'a> {
|
||||||
|
size: u64,
|
||||||
|
extension: Option<&'a OsStr>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_all_file_types(top_level_nodes: &[Node], n: usize) -> Option<DisplayNode> {
|
||||||
|
let ext_nodes = {
|
||||||
|
let mut extension_cumulative_sizes = HashMap::new();
|
||||||
|
build_by_all_file_types(top_level_nodes, &mut extension_cumulative_sizes);
|
||||||
|
|
||||||
|
let mut extension_cumulative_sizes: Vec<ExtensionNode<'_>> = extension_cumulative_sizes
|
||||||
|
.iter()
|
||||||
|
.map(|(&extension, &size)| ExtensionNode { extension, size })
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
extension_cumulative_sizes.sort_by(|lhs, rhs| lhs.cmp(rhs).reverse());
|
||||||
|
|
||||||
|
extension_cumulative_sizes
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut ext_nodes_iter = ext_nodes.iter();
|
||||||
|
|
||||||
|
// First, collect the first N - 1 nodes...
|
||||||
|
let mut displayed: Vec<DisplayNode> = ext_nodes_iter
|
||||||
|
.by_ref()
|
||||||
|
.take(if n > 1 { n - 1 } else { 1 })
|
||||||
|
.map(|node| DisplayNode {
|
||||||
|
name: PathBuf::from(
|
||||||
|
node.extension
|
||||||
|
.map(|ext| format!(".{}", ext.to_string_lossy()))
|
||||||
|
.unwrap_or_else(|| "(no extension)".to_owned()),
|
||||||
|
),
|
||||||
|
size: node.size,
|
||||||
|
children: vec![],
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// ...then, aggregate the remaining nodes (if any) into a single "(others)" node
|
||||||
|
if ext_nodes_iter.len() > 0 {
|
||||||
|
displayed.push(DisplayNode {
|
||||||
|
name: PathBuf::from("(others)"),
|
||||||
|
size: ext_nodes_iter.map(|node| node.size).sum(),
|
||||||
|
children: vec![],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let result = DisplayNode {
|
||||||
|
name: PathBuf::from("(total)"),
|
||||||
|
size: displayed.iter().map(|node| node.size).sum(),
|
||||||
|
children: displayed,
|
||||||
|
};
|
||||||
|
|
||||||
|
Some(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_by_all_file_types<'a>(
|
||||||
|
top_level_nodes: &'a [Node],
|
||||||
|
counter: &mut HashMap<Option<&'a OsStr>, u64>,
|
||||||
|
) {
|
||||||
|
for node in top_level_nodes {
|
||||||
|
if node.name.is_file() {
|
||||||
|
let ext = node.name.extension();
|
||||||
|
let cumulative_size = counter.entry(ext).or_default();
|
||||||
|
*cumulative_size += node.size;
|
||||||
|
}
|
||||||
|
build_by_all_file_types(&node.children, counter)
|
||||||
|
}
|
||||||
|
}
|
||||||
579
src/main.rs
579
src/main.rs
@@ -1,17 +1,40 @@
|
|||||||
#[macro_use]
|
mod cli;
|
||||||
extern crate clap;
|
mod config;
|
||||||
extern crate rayon;
|
mod dir_walker;
|
||||||
extern crate regex;
|
mod display;
|
||||||
extern crate unicode_width;
|
mod display_node;
|
||||||
|
mod filter;
|
||||||
|
mod filter_type;
|
||||||
|
mod node;
|
||||||
|
mod platform;
|
||||||
|
mod progress;
|
||||||
|
mod utils;
|
||||||
|
|
||||||
|
use crate::cli::build_cli;
|
||||||
|
use crate::progress::RuntimeErrors;
|
||||||
|
use clap::parser::ValuesRef;
|
||||||
|
use dir_walker::WalkData;
|
||||||
|
use display::InitialDisplayData;
|
||||||
|
use filter::AggregateData;
|
||||||
|
use progress::PIndicator;
|
||||||
|
use regex::Error;
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
|
use std::env;
|
||||||
|
use std::fs::read_to_string;
|
||||||
|
use std::io;
|
||||||
|
use std::panic;
|
||||||
use std::process;
|
use std::process;
|
||||||
|
use std::sync::atomic::AtomicBool;
|
||||||
|
use std::sync::atomic::Ordering;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use std::sync::Mutex;
|
||||||
|
use sysinfo::{System, SystemExt};
|
||||||
|
|
||||||
use self::display::draw_it;
|
use self::display::draw_it;
|
||||||
use clap::{App, AppSettings, Arg};
|
use config::get_config;
|
||||||
use dir_walker::walk_it;
|
use dir_walker::walk_it;
|
||||||
use dir_walker::WalkData;
|
use filter::get_biggest;
|
||||||
use filter::{get_all_file_types, get_biggest, get_by_depth};
|
use filter_type::get_all_file_types;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use std::cmp::max;
|
use std::cmp::max;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
@@ -19,299 +42,355 @@ use terminal_size::{terminal_size, Height, Width};
|
|||||||
use utils::get_filesystem_devices;
|
use utils::get_filesystem_devices;
|
||||||
use utils::simplify_dir_names;
|
use utils::simplify_dir_names;
|
||||||
|
|
||||||
mod dir_walker;
|
|
||||||
mod display;
|
|
||||||
mod display_node;
|
|
||||||
mod filter;
|
|
||||||
mod node;
|
|
||||||
mod platform;
|
|
||||||
mod utils;
|
|
||||||
|
|
||||||
static DEFAULT_NUMBER_OF_LINES: usize = 30;
|
static DEFAULT_NUMBER_OF_LINES: usize = 30;
|
||||||
static DEFAULT_TERMINAL_WIDTH: usize = 80;
|
static DEFAULT_TERMINAL_WIDTH: usize = 80;
|
||||||
|
|
||||||
#[cfg(windows)]
|
fn should_init_color(no_color: bool, force_color: bool) -> bool {
|
||||||
fn init_color(no_color: bool) -> bool {
|
if force_color {
|
||||||
// If no color is already set do not print a warning message
|
return true;
|
||||||
|
}
|
||||||
if no_color {
|
if no_color {
|
||||||
true
|
return false;
|
||||||
} else {
|
}
|
||||||
|
// check if NO_COLOR is set
|
||||||
|
// https://no-color.org/
|
||||||
|
if env::var_os("NO_COLOR").is_some() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if terminal_size().is_none() {
|
||||||
|
// we are not in a terminal, color may not be needed
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
// we are in a terminal
|
||||||
|
#[cfg(windows)]
|
||||||
|
{
|
||||||
// Required for windows 10
|
// Required for windows 10
|
||||||
// Fails to resolve for windows 8 so disable color
|
// Fails to resolve for windows 8 so disable color
|
||||||
match ansi_term::enable_ansi_support() {
|
match ansi_term::enable_ansi_support() {
|
||||||
Ok(_) => no_color,
|
Ok(_) => true,
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
eprintln!(
|
eprintln!("This version of Windows does not support ANSI colors");
|
||||||
"This version of Windows does not support ANSI colors, setting no_color flag"
|
false
|
||||||
);
|
|
||||||
true
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
#[cfg(not(windows))]
|
||||||
|
{
|
||||||
#[cfg(not(windows))]
|
true
|
||||||
fn init_color(no_color: bool) -> bool {
|
}
|
||||||
no_color
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_height_of_terminal() -> usize {
|
fn get_height_of_terminal() -> usize {
|
||||||
// Windows CI runners detect a terminal height of 0
|
// Simplify once https://github.com/eminence/terminal-size/pull/41 is
|
||||||
if let Some((Width(_w), Height(h))) = terminal_size() {
|
// merged
|
||||||
max(h as usize, DEFAULT_NUMBER_OF_LINES) - 10
|
terminal_size()
|
||||||
} else {
|
// Windows CI runners detect a terminal height of 0
|
||||||
DEFAULT_NUMBER_OF_LINES - 10
|
.map(|(_, Height(h))| max(h as usize, DEFAULT_NUMBER_OF_LINES))
|
||||||
}
|
.unwrap_or(DEFAULT_NUMBER_OF_LINES)
|
||||||
|
- 10
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(windows)]
|
|
||||||
fn get_width_of_terminal() -> usize {
|
fn get_width_of_terminal() -> usize {
|
||||||
// Windows CI runners detect a very low terminal width
|
// Simplify once https://github.com/eminence/terminal-size/pull/41 is
|
||||||
if let Some((Width(w), Height(_h))) = terminal_size() {
|
// merged
|
||||||
max(w as usize, DEFAULT_TERMINAL_WIDTH)
|
terminal_size()
|
||||||
} else {
|
.map(|(Width(w), _)| match cfg!(windows) {
|
||||||
DEFAULT_TERMINAL_WIDTH
|
// Windows CI runners detect a very low terminal width
|
||||||
}
|
true => max(w as usize, DEFAULT_TERMINAL_WIDTH),
|
||||||
|
false => w as usize,
|
||||||
|
})
|
||||||
|
.unwrap_or(DEFAULT_TERMINAL_WIDTH)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(windows))]
|
fn get_regex_value(maybe_value: Option<ValuesRef<String>>) -> Vec<Regex> {
|
||||||
fn get_width_of_terminal() -> usize {
|
maybe_value
|
||||||
if let Some((Width(w), Height(_h))) = terminal_size() {
|
.unwrap_or_default()
|
||||||
w as usize
|
.map(|reg| {
|
||||||
} else {
|
Regex::new(reg).unwrap_or_else(|err| {
|
||||||
DEFAULT_TERMINAL_WIDTH
|
eprintln!("Ignoring bad value for regex {err:?}");
|
||||||
}
|
process::exit(1)
|
||||||
}
|
})
|
||||||
|
})
|
||||||
fn get_regex_value(maybe_value: Option<&str>) -> Option<Regex> {
|
.collect()
|
||||||
match maybe_value {
|
|
||||||
Some(v) => match Regex::new(v) {
|
|
||||||
Ok(r) => Some(r),
|
|
||||||
Err(e) => {
|
|
||||||
eprintln!("Ignoring bad value for regex {:?}", e);
|
|
||||||
process::exit(1);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
None => None,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let default_height = get_height_of_terminal();
|
let options = build_cli().get_matches();
|
||||||
let def_num_str = default_height.to_string();
|
let config = get_config();
|
||||||
|
|
||||||
let options = App::new("Dust")
|
let errors = RuntimeErrors::default();
|
||||||
.about("Like du but more intuitive")
|
let error_listen_for_ctrlc = Arc::new(Mutex::new(errors));
|
||||||
.version(crate_version!())
|
let errors_for_rayon = error_listen_for_ctrlc.clone();
|
||||||
.setting(AppSettings::TrailingVarArg)
|
let errors_final = error_listen_for_ctrlc.clone();
|
||||||
.arg(
|
let is_in_listing = Arc::new(AtomicBool::new(false));
|
||||||
Arg::with_name("depth")
|
let cloned_is_in_listing = Arc::clone(&is_in_listing);
|
||||||
.short("d")
|
|
||||||
.long("depth")
|
|
||||||
.help("Depth to show")
|
|
||||||
.takes_value(true)
|
|
||||||
.conflicts_with("number_of_lines"),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::with_name("number_of_lines")
|
|
||||||
.short("n")
|
|
||||||
.long("number-of-lines")
|
|
||||||
.help("Number of lines of output to show. This is Height, (but h is help)")
|
|
||||||
.takes_value(true)
|
|
||||||
.default_value(def_num_str.as_ref()),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::with_name("display_full_paths")
|
|
||||||
.short("p")
|
|
||||||
.long("full-paths")
|
|
||||||
.help("Subdirectories will not have their path shortened"),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::with_name("ignore_directory")
|
|
||||||
.short("X")
|
|
||||||
.long("ignore-directory")
|
|
||||||
.takes_value(true)
|
|
||||||
.number_of_values(1)
|
|
||||||
.multiple(true)
|
|
||||||
.help("Exclude any file or directory with this name"),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::with_name("limit_filesystem")
|
|
||||||
.short("x")
|
|
||||||
.long("limit-filesystem")
|
|
||||||
.help("Only count the files and directories on the same filesystem as the supplied directory"),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::with_name("display_apparent_size")
|
|
||||||
.short("s")
|
|
||||||
.long("apparent-size")
|
|
||||||
.help("Use file length instead of blocks"),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::with_name("reverse")
|
|
||||||
.short("r")
|
|
||||||
.long("reverse")
|
|
||||||
.help("Print tree upside down (biggest highest)"),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::with_name("no_colors")
|
|
||||||
.short("c")
|
|
||||||
.long("no-colors")
|
|
||||||
.help("No colors will be printed (normally largest directories are colored)"),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::with_name("no_bars")
|
|
||||||
.short("b")
|
|
||||||
.long("no-percent-bars")
|
|
||||||
.help("No percent bars or percentages will be displayed"),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::with_name("by_filecount")
|
|
||||||
.short("f")
|
|
||||||
.long("filecount")
|
|
||||||
.help("Directory 'size' is number of child files/dirs not disk size"),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::with_name("ignore_hidden")
|
|
||||||
.short("i") // Do not use 'h' this is used by 'help'
|
|
||||||
.long("ignore_hidden") //TODO: fix change - -> _
|
|
||||||
.help("Do not display hidden files"),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::with_name("invert_filter")
|
|
||||||
.short("v")
|
|
||||||
.long("invert-filter")
|
|
||||||
.takes_value(true)
|
|
||||||
.number_of_values(1)
|
|
||||||
.multiple(true)
|
|
||||||
.conflicts_with("filter")
|
|
||||||
.conflicts_with("types")
|
|
||||||
.conflicts_with("depth")
|
|
||||||
.help("Exclude files matching this regex. To ignore png files type: -v \"\\.png$\" "),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::with_name("filter")
|
|
||||||
.short("e")
|
|
||||||
.long("filter")
|
|
||||||
.takes_value(true)
|
|
||||||
.number_of_values(1)
|
|
||||||
.multiple(true)
|
|
||||||
.conflicts_with("types")
|
|
||||||
.conflicts_with("depth")
|
|
||||||
.help("Only include files matching this regex. For png files type: -e \"\\.png$\" "),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::with_name("types")
|
|
||||||
.short("t")
|
|
||||||
.long("file_types")
|
|
||||||
.conflicts_with("depth")
|
|
||||||
.help("show only these file types"),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::with_name("width")
|
|
||||||
.short("w")
|
|
||||||
.long("terminal_width")
|
|
||||||
.takes_value(true)
|
|
||||||
.number_of_values(1)
|
|
||||||
.help("Specify width of output overriding the auto detection of terminal width"),
|
|
||||||
)
|
|
||||||
.arg(Arg::with_name("inputs").multiple(true).default_value("."))
|
|
||||||
.get_matches();
|
|
||||||
|
|
||||||
let target_dirs = options
|
ctrlc::set_handler(move || {
|
||||||
.values_of("inputs")
|
error_listen_for_ctrlc.lock().unwrap().abort = true;
|
||||||
.expect("Should be a default value here")
|
println!("\nAborting");
|
||||||
.collect();
|
if cloned_is_in_listing.load(Ordering::Relaxed) {
|
||||||
|
process::exit(1);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.expect("Error setting Ctrl-C handler");
|
||||||
|
|
||||||
let summarize_file_types = options.is_present("types");
|
is_in_listing.store(true, Ordering::Relaxed);
|
||||||
|
let target_dirs = match config.get_files_from(&options) {
|
||||||
|
Some(path) => {
|
||||||
|
if path == "-" {
|
||||||
|
let mut targets_to_add = io::stdin()
|
||||||
|
.lines()
|
||||||
|
.map_while(Result::ok)
|
||||||
|
.collect::<Vec<String>>();
|
||||||
|
|
||||||
let maybe_filter = get_regex_value(options.value_of("filter"));
|
if targets_to_add.is_empty() {
|
||||||
let maybe_invert_filter = get_regex_value(options.value_of("invert_filter"));
|
eprintln!("No input provided, defaulting to current directory");
|
||||||
|
targets_to_add.push(".".to_owned());
|
||||||
|
}
|
||||||
|
targets_to_add
|
||||||
|
} else {
|
||||||
|
// read file
|
||||||
|
match read_to_string(path) {
|
||||||
|
Ok(file_content) => file_content.lines().map(|x| x.to_string()).collect(),
|
||||||
|
Err(e) => {
|
||||||
|
eprintln!("Error reading file: {e}");
|
||||||
|
vec![".".to_owned()]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => match options.get_many::<String>("params") {
|
||||||
|
Some(values) => values.cloned().collect(),
|
||||||
|
None => vec![".".to_owned()],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
is_in_listing.store(false, Ordering::Relaxed);
|
||||||
|
|
||||||
let number_of_lines = match value_t!(options.value_of("number_of_lines"), usize) {
|
let summarize_file_types = options.get_flag("types");
|
||||||
Ok(v) => v,
|
|
||||||
Err(_) => {
|
let filter_regexs = get_regex_value(options.get_many("filter"));
|
||||||
eprintln!("Ignoring bad value for number_of_lines");
|
let invert_filter_regexs = get_regex_value(options.get_many("invert_filter"));
|
||||||
default_height
|
|
||||||
|
let terminal_width: usize = match options.get_one::<usize>("width") {
|
||||||
|
Some(&val) => val,
|
||||||
|
None => get_width_of_terminal(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let depth = config.get_depth(&options);
|
||||||
|
|
||||||
|
// If depth is set, then we set the default number_of_lines to be max
|
||||||
|
// instead of screen height
|
||||||
|
|
||||||
|
let number_of_lines = match options.get_one::<usize>("number_of_lines") {
|
||||||
|
Some(&val) => val,
|
||||||
|
None => {
|
||||||
|
if depth != usize::MAX {
|
||||||
|
usize::MAX
|
||||||
|
} else {
|
||||||
|
get_height_of_terminal()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let terminal_width = match value_t!(options.value_of("width"), usize) {
|
let is_colors = should_init_color(
|
||||||
Ok(v) => v,
|
config.get_no_colors(&options),
|
||||||
Err(_) => get_width_of_terminal(),
|
config.get_force_colors(&options),
|
||||||
|
);
|
||||||
|
|
||||||
|
let ignore_directories = match options.get_many::<String>("ignore_directory") {
|
||||||
|
Some(values) => values
|
||||||
|
.map(|v| v.as_str())
|
||||||
|
.map(PathBuf::from)
|
||||||
|
.collect::<Vec<PathBuf>>(),
|
||||||
|
None => vec![],
|
||||||
};
|
};
|
||||||
|
|
||||||
let depth = options.value_of("depth").and_then(|depth| {
|
let ignore_from_file_result = match options.get_one::<String>("ignore_all_in_file") {
|
||||||
depth
|
Some(val) => read_to_string(val)
|
||||||
.parse::<usize>()
|
.unwrap()
|
||||||
.map(|v| v + 1)
|
.lines()
|
||||||
.map_err(|_| eprintln!("Ignoring bad value for depth"))
|
.map(Regex::new)
|
||||||
.ok()
|
.collect::<Vec<Result<Regex, Error>>>(),
|
||||||
});
|
None => vec![],
|
||||||
|
};
|
||||||
|
let ignore_from_file = ignore_from_file_result
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|x| x.ok())
|
||||||
|
.collect::<Vec<Regex>>();
|
||||||
|
|
||||||
let no_colors = init_color(options.is_present("no_colors"));
|
let invert_filter_regexs = invert_filter_regexs
|
||||||
let use_apparent_size = options.is_present("display_apparent_size");
|
.into_iter()
|
||||||
let ignore_directories: Vec<PathBuf> = options
|
.chain(ignore_from_file)
|
||||||
.values_of("ignore_directory")
|
.collect::<Vec<Regex>>();
|
||||||
.map(|i| i.map(PathBuf::from).collect())
|
|
||||||
|
let by_filecount = options.get_flag("by_filecount");
|
||||||
|
let limit_filesystem = options.get_flag("limit_filesystem");
|
||||||
|
let follow_links = options.get_flag("dereference_links");
|
||||||
|
|
||||||
|
let allowed_filesystems = limit_filesystem
|
||||||
|
.then(|| get_filesystem_devices(&target_dirs))
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
let simplified_dirs = simplify_dir_names(&target_dirs);
|
||||||
let by_filecount = options.is_present("by_filecount");
|
|
||||||
let ignore_hidden = options.is_present("ignore_hidden");
|
|
||||||
let limit_filesystem = options.is_present("limit_filesystem");
|
|
||||||
|
|
||||||
let simplified_dirs = simplify_dir_names(target_dirs);
|
|
||||||
let allowed_filesystems = {
|
|
||||||
if limit_filesystem {
|
|
||||||
get_filesystem_devices(simplified_dirs.iter())
|
|
||||||
} else {
|
|
||||||
HashSet::new()
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let ignored_full_path: HashSet<PathBuf> = ignore_directories
|
let ignored_full_path: HashSet<PathBuf> = ignore_directories
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flat_map(|x| simplified_dirs.iter().map(move |d| d.join(x.clone())))
|
.flat_map(|x| simplified_dirs.iter().map(move |d| d.join(&x)))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
|
let output_format = config.get_output_format(&options);
|
||||||
|
|
||||||
|
let ignore_hidden = config.get_ignore_hidden(&options);
|
||||||
|
|
||||||
|
let mut indicator = PIndicator::build_me();
|
||||||
|
if !config.get_disable_progress(&options) {
|
||||||
|
indicator.spawn(output_format.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
let filter_modified_time = config.get_modified_time_operator(&options);
|
||||||
|
let filter_accessed_time = config.get_accessed_time_operator(&options);
|
||||||
|
let filter_changed_time = config.get_created_time_operator(&options);
|
||||||
|
|
||||||
let walk_data = WalkData {
|
let walk_data = WalkData {
|
||||||
ignore_directories: ignored_full_path,
|
ignore_directories: ignored_full_path,
|
||||||
filter_regex: maybe_filter,
|
filter_regex: &filter_regexs,
|
||||||
invert_filter_regex: maybe_invert_filter,
|
invert_filter_regex: &invert_filter_regexs,
|
||||||
allowed_filesystems,
|
allowed_filesystems,
|
||||||
use_apparent_size,
|
filter_modified_time,
|
||||||
|
filter_accessed_time,
|
||||||
|
filter_changed_time,
|
||||||
|
use_apparent_size: config.get_apparent_size(&options),
|
||||||
by_filecount,
|
by_filecount,
|
||||||
ignore_hidden,
|
ignore_hidden,
|
||||||
|
follow_links,
|
||||||
|
progress_data: indicator.data.clone(),
|
||||||
|
errors: errors_for_rayon,
|
||||||
};
|
};
|
||||||
|
let threads_to_use = config.get_threads(&options);
|
||||||
|
let stack_size = config.get_custom_stack_size(&options);
|
||||||
|
init_rayon(&stack_size, &threads_to_use);
|
||||||
|
|
||||||
let (top_level_nodes, has_errors) = walk_it(simplified_dirs, walk_data);
|
let top_level_nodes = walk_it(simplified_dirs, &walk_data);
|
||||||
|
|
||||||
let tree = {
|
let tree = match summarize_file_types {
|
||||||
match (depth, summarize_file_types) {
|
true => get_all_file_types(&top_level_nodes, number_of_lines),
|
||||||
(_, true) => get_all_file_types(top_level_nodes, number_of_lines),
|
false => {
|
||||||
(Some(depth), _) => get_by_depth(top_level_nodes, depth),
|
let agg_data = AggregateData {
|
||||||
(_, _) => get_biggest(
|
min_size: config.get_min_size(&options),
|
||||||
top_level_nodes,
|
only_dir: config.get_only_dir(&options),
|
||||||
|
only_file: config.get_only_file(&options),
|
||||||
number_of_lines,
|
number_of_lines,
|
||||||
options.values_of("filter").is_some()
|
depth,
|
||||||
|| options.value_of("invert_filter").is_some(),
|
using_a_filter: !filter_regexs.is_empty() || !invert_filter_regexs.is_empty(),
|
||||||
),
|
};
|
||||||
|
get_biggest(top_level_nodes, agg_data)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
if options.is_present("filter") {
|
// Must have stopped indicator before we print to stderr
|
||||||
println!("Filtering by: {}", options.value_of("filter").unwrap());
|
indicator.stop();
|
||||||
|
|
||||||
|
if errors_final.lock().unwrap().abort {
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
if has_errors {
|
|
||||||
eprintln!("Did not have permissions for all directories");
|
let final_errors = walk_data.errors.lock().unwrap();
|
||||||
|
if !final_errors.file_not_found.is_empty() {
|
||||||
|
let err = final_errors
|
||||||
|
.file_not_found
|
||||||
|
.iter()
|
||||||
|
.map(|a| a.as_ref())
|
||||||
|
.collect::<Vec<&str>>()
|
||||||
|
.join(", ");
|
||||||
|
eprintln!("No such file or directory: {}", err);
|
||||||
|
}
|
||||||
|
if !final_errors.no_permissions.is_empty() {
|
||||||
|
if config.get_print_errors(&options) {
|
||||||
|
let err = final_errors
|
||||||
|
.no_permissions
|
||||||
|
.iter()
|
||||||
|
.map(|a| a.as_ref())
|
||||||
|
.collect::<Vec<&str>>()
|
||||||
|
.join(", ");
|
||||||
|
eprintln!("Did not have permissions for directories: {}", err);
|
||||||
|
} else {
|
||||||
|
eprintln!(
|
||||||
|
"Did not have permissions for all directories (add --print-errors to see errors)"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !final_errors.unknown_error.is_empty() {
|
||||||
|
let err = final_errors
|
||||||
|
.unknown_error
|
||||||
|
.iter()
|
||||||
|
.map(|a| a.as_ref())
|
||||||
|
.collect::<Vec<&str>>()
|
||||||
|
.join(", ");
|
||||||
|
eprintln!("Unknown Error: {}", err);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(root_node) = tree {
|
||||||
|
let idd = InitialDisplayData {
|
||||||
|
short_paths: !config.get_full_paths(&options),
|
||||||
|
is_reversed: !config.get_reverse(&options),
|
||||||
|
colors_on: is_colors,
|
||||||
|
by_filecount,
|
||||||
|
is_screen_reader: config.get_screen_reader(&options),
|
||||||
|
output_format,
|
||||||
|
bars_on_right: config.get_bars_on_right(&options),
|
||||||
|
};
|
||||||
|
|
||||||
|
if config.get_output_json(&options) {
|
||||||
|
println!("{}", serde_json::to_string(&root_node).unwrap());
|
||||||
|
} else {
|
||||||
|
draw_it(
|
||||||
|
idd,
|
||||||
|
config.get_no_bars(&options),
|
||||||
|
terminal_width,
|
||||||
|
&root_node,
|
||||||
|
config.get_skip_total(&options),
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
draw_it(
|
}
|
||||||
options.is_present("display_full_paths"),
|
|
||||||
!options.is_present("reverse"),
|
fn init_rayon(stack_size: &Option<usize>, threads: &Option<usize>) {
|
||||||
no_colors,
|
// Rayon seems to raise this error on 32-bit builds
|
||||||
options.is_present("no_bars"),
|
// The global thread pool has not been initialized.: ThreadPoolBuildError { kind: GlobalPoolAlreadyInitialized }
|
||||||
terminal_width,
|
if cfg!(target_pointer_width = "64") {
|
||||||
by_filecount,
|
let result = panic::catch_unwind(|| build_thread_pool(*stack_size, *threads));
|
||||||
tree,
|
if result.is_err() {
|
||||||
);
|
eprintln!("Problem initializing rayon, try: export RAYON_NUM_THREADS=1")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_thread_pool(
|
||||||
|
stack: Option<usize>,
|
||||||
|
threads: Option<usize>,
|
||||||
|
) -> Result<(), rayon::ThreadPoolBuildError> {
|
||||||
|
let mut pool = rayon::ThreadPoolBuilder::new();
|
||||||
|
|
||||||
|
if let Some(thread_count) = threads {
|
||||||
|
pool = pool.num_threads(thread_count);
|
||||||
|
}
|
||||||
|
|
||||||
|
let stack_size = match stack {
|
||||||
|
Some(s) => Some(s),
|
||||||
|
None => {
|
||||||
|
let large_stack = usize::pow(1024, 3);
|
||||||
|
let mut s = System::new();
|
||||||
|
s.refresh_memory();
|
||||||
|
// Larger stack size if possible to handle cases with lots of nested directories
|
||||||
|
let available = s.available_memory();
|
||||||
|
if available > large_stack.try_into().unwrap() {
|
||||||
|
Some(large_stack)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
if let Some(stack_size_param) = stack_size {
|
||||||
|
pool = pool.stack_size(stack_size_param);
|
||||||
|
}
|
||||||
|
pool.build_global()
|
||||||
}
|
}
|
||||||
|
|||||||
80
src/node.rs
80
src/node.rs
@@ -1,8 +1,9 @@
|
|||||||
|
use crate::dir_walker::WalkData;
|
||||||
use crate::platform::get_metadata;
|
use crate::platform::get_metadata;
|
||||||
|
use crate::utils::is_filtered_out_due_to_file_time;
|
||||||
use crate::utils::is_filtered_out_due_to_invert_regex;
|
use crate::utils::is_filtered_out_due_to_invert_regex;
|
||||||
use crate::utils::is_filtered_out_due_to_regex;
|
use crate::utils::is_filtered_out_due_to_regex;
|
||||||
|
|
||||||
use regex::Regex;
|
|
||||||
use std::cmp::Ordering;
|
use std::cmp::Ordering;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
@@ -12,48 +13,56 @@ pub struct Node {
|
|||||||
pub size: u64,
|
pub size: u64,
|
||||||
pub children: Vec<Node>,
|
pub children: Vec<Node>,
|
||||||
pub inode_device: Option<(u64, u64)>,
|
pub inode_device: Option<(u64, u64)>,
|
||||||
|
pub depth: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[allow(clippy::too_many_arguments)]
|
||||||
pub fn build_node(
|
pub fn build_node(
|
||||||
dir: PathBuf,
|
dir: PathBuf,
|
||||||
children: Vec<Node>,
|
children: Vec<Node>,
|
||||||
filter_regex: &Option<Regex>,
|
|
||||||
invert_filter_regex: &Option<Regex>,
|
|
||||||
use_apparent_size: bool,
|
|
||||||
is_symlink: bool,
|
is_symlink: bool,
|
||||||
is_file: bool,
|
is_file: bool,
|
||||||
by_filecount: bool,
|
depth: usize,
|
||||||
|
walk_data: &WalkData,
|
||||||
) -> Option<Node> {
|
) -> Option<Node> {
|
||||||
match get_metadata(&dir, use_apparent_size) {
|
let use_apparent_size = walk_data.use_apparent_size;
|
||||||
Some(data) => {
|
let by_filecount = walk_data.by_filecount;
|
||||||
let inode_device = if is_symlink && !use_apparent_size {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
data.1
|
|
||||||
};
|
|
||||||
|
|
||||||
let size = if is_filtered_out_due_to_regex(filter_regex, &dir)
|
get_metadata(&dir, use_apparent_size).map(|data| {
|
||||||
|| is_filtered_out_due_to_invert_regex(invert_filter_regex, &dir)
|
let inode_device = if is_symlink && !use_apparent_size {
|
||||||
|| (is_symlink && !use_apparent_size)
|
None
|
||||||
|| by_filecount && !is_file
|
} else {
|
||||||
{
|
data.1
|
||||||
0
|
};
|
||||||
} else if by_filecount {
|
|
||||||
1
|
|
||||||
} else {
|
|
||||||
data.0
|
|
||||||
};
|
|
||||||
|
|
||||||
Some(Node {
|
let size = if is_filtered_out_due_to_regex(walk_data.filter_regex, &dir)
|
||||||
name: dir,
|
|| is_filtered_out_due_to_invert_regex(walk_data.invert_filter_regex, &dir)
|
||||||
size,
|
|| (is_symlink && !use_apparent_size)
|
||||||
children,
|
|| by_filecount && !is_file
|
||||||
inode_device,
|
|| [
|
||||||
})
|
(&walk_data.filter_modified_time, data.2 .0),
|
||||||
|
(&walk_data.filter_accessed_time, data.2 .1),
|
||||||
|
(&walk_data.filter_changed_time, data.2 .2),
|
||||||
|
]
|
||||||
|
.iter()
|
||||||
|
.any(|(filter_time, actual_time)| {
|
||||||
|
is_filtered_out_due_to_file_time(filter_time, *actual_time)
|
||||||
|
}) {
|
||||||
|
0
|
||||||
|
} else if by_filecount {
|
||||||
|
1
|
||||||
|
} else {
|
||||||
|
data.0
|
||||||
|
};
|
||||||
|
|
||||||
|
Node {
|
||||||
|
name: dir,
|
||||||
|
size,
|
||||||
|
children,
|
||||||
|
inode_device,
|
||||||
|
depth,
|
||||||
}
|
}
|
||||||
None => None,
|
})
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PartialEq for Node {
|
impl PartialEq for Node {
|
||||||
@@ -64,11 +73,10 @@ impl PartialEq for Node {
|
|||||||
|
|
||||||
impl Ord for Node {
|
impl Ord for Node {
|
||||||
fn cmp(&self, other: &Self) -> Ordering {
|
fn cmp(&self, other: &Self) -> Ordering {
|
||||||
if self.size == other.size {
|
self.size
|
||||||
self.name.cmp(&other.name)
|
.cmp(&other.size)
|
||||||
} else {
|
.then_with(|| self.name.cmp(&other.name))
|
||||||
self.size.cmp(&other.size)
|
.then_with(|| self.children.cmp(&other.children))
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
114
src/platform.rs
114
src/platform.rs
@@ -5,20 +5,34 @@ use std::path::Path;
|
|||||||
|
|
||||||
#[cfg(target_family = "unix")]
|
#[cfg(target_family = "unix")]
|
||||||
fn get_block_size() -> u64 {
|
fn get_block_size() -> u64 {
|
||||||
// All os specific implementations of MetatdataExt seem to define a block as 512 bytes
|
// All os specific implementations of MetadataExt seem to define a block as 512 bytes
|
||||||
// https://doc.rust-lang.org/std/os/linux/fs/trait.MetadataExt.html#tymethod.st_blocks
|
// https://doc.rust-lang.org/std/os/linux/fs/trait.MetadataExt.html#tymethod.st_blocks
|
||||||
512
|
512
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type InodeAndDevice = (u64, u64);
|
||||||
|
type FileTime = (i64, i64, i64);
|
||||||
|
|
||||||
#[cfg(target_family = "unix")]
|
#[cfg(target_family = "unix")]
|
||||||
pub fn get_metadata(d: &Path, use_apparent_size: bool) -> Option<(u64, Option<(u64, u64)>)> {
|
pub fn get_metadata<P: AsRef<Path>>(
|
||||||
|
path: P,
|
||||||
|
use_apparent_size: bool,
|
||||||
|
) -> Option<(u64, Option<InodeAndDevice>, FileTime)> {
|
||||||
use std::os::unix::fs::MetadataExt;
|
use std::os::unix::fs::MetadataExt;
|
||||||
match d.metadata() {
|
match path.as_ref().metadata() {
|
||||||
Ok(md) => {
|
Ok(md) => {
|
||||||
if use_apparent_size {
|
if use_apparent_size {
|
||||||
Some((md.len(), Some((md.ino(), md.dev()))))
|
Some((
|
||||||
|
md.len(),
|
||||||
|
Some((md.ino(), md.dev())),
|
||||||
|
(md.mtime(), md.atime(), md.ctime()),
|
||||||
|
))
|
||||||
} else {
|
} else {
|
||||||
Some((md.blocks() * get_block_size(), Some((md.ino(), md.dev()))))
|
Some((
|
||||||
|
md.blocks() * get_block_size(),
|
||||||
|
Some((md.ino(), md.dev())),
|
||||||
|
(md.mtime(), md.atime(), md.ctime()),
|
||||||
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(_e) => None,
|
Err(_e) => None,
|
||||||
@@ -26,7 +40,10 @@ pub fn get_metadata(d: &Path, use_apparent_size: bool) -> Option<(u64, Option<(u
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(target_family = "windows")]
|
#[cfg(target_family = "windows")]
|
||||||
pub fn get_metadata(d: &Path, _use_apparent_size: bool) -> Option<(u64, Option<(u64, u64)>)> {
|
pub fn get_metadata<P: AsRef<Path>>(
|
||||||
|
path: P,
|
||||||
|
use_apparent_size: bool,
|
||||||
|
) -> Option<(u64, Option<InodeAndDevice>, FileTime)> {
|
||||||
// On windows opening the file to get size, file ID and volume can be very
|
// On windows opening the file to get size, file ID and volume can be very
|
||||||
// expensive because 1) it causes a few system calls, and more importantly 2) it can cause
|
// expensive because 1) it causes a few system calls, and more importantly 2) it can cause
|
||||||
// windows defender to scan the file.
|
// windows defender to scan the file.
|
||||||
@@ -65,7 +82,7 @@ pub fn get_metadata(d: &Path, _use_apparent_size: bool) -> Option<(u64, Option<(
|
|||||||
|
|
||||||
use std::io;
|
use std::io;
|
||||||
use winapi_util::Handle;
|
use winapi_util::Handle;
|
||||||
fn handle_from_path_limited<P: AsRef<Path>>(path: P) -> io::Result<Handle> {
|
fn handle_from_path_limited(path: &Path) -> io::Result<Handle> {
|
||||||
use std::fs::OpenOptions;
|
use std::fs::OpenOptions;
|
||||||
use std::os::windows::fs::OpenOptionsExt;
|
use std::os::windows::fs::OpenOptionsExt;
|
||||||
const FILE_READ_ATTRIBUTES: u32 = 0x0080;
|
const FILE_READ_ATTRIBUTES: u32 = 0x0080;
|
||||||
@@ -90,39 +107,82 @@ pub fn get_metadata(d: &Path, _use_apparent_size: bool) -> Option<(u64, Option<(
|
|||||||
Ok(Handle::from_file(file))
|
Ok(Handle::from_file(file))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_metadata_expensive(d: &Path) -> Option<(u64, Option<(u64, u64)>)> {
|
fn get_metadata_expensive(
|
||||||
|
path: &Path,
|
||||||
|
use_apparent_size: bool,
|
||||||
|
) -> Option<(u64, Option<InodeAndDevice>, FileTime)> {
|
||||||
use winapi_util::file::information;
|
use winapi_util::file::information;
|
||||||
|
|
||||||
let h = handle_from_path_limited(d).ok()?;
|
let h = handle_from_path_limited(path).ok()?;
|
||||||
let info = information(&h).ok()?;
|
let info = information(&h).ok()?;
|
||||||
|
|
||||||
Some((
|
if use_apparent_size {
|
||||||
info.file_size(),
|
use filesize::PathExt;
|
||||||
Some((info.file_index(), info.volume_serial_number())),
|
Some((
|
||||||
))
|
path.size_on_disk().ok()?,
|
||||||
|
Some((info.file_index(), info.volume_serial_number())),
|
||||||
|
(
|
||||||
|
info.last_write_time().unwrap() as i64,
|
||||||
|
info.last_access_time().unwrap() as i64,
|
||||||
|
info.creation_time().unwrap() as i64,
|
||||||
|
),
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
Some((
|
||||||
|
info.file_size(),
|
||||||
|
Some((info.file_index(), info.volume_serial_number())),
|
||||||
|
(
|
||||||
|
info.last_write_time().unwrap() as i64,
|
||||||
|
info.last_access_time().unwrap() as i64,
|
||||||
|
info.creation_time().unwrap() as i64,
|
||||||
|
),
|
||||||
|
))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
use std::os::windows::fs::MetadataExt;
|
use std::os::windows::fs::MetadataExt;
|
||||||
match d.metadata() {
|
let path = path.as_ref();
|
||||||
|
match path.metadata() {
|
||||||
Ok(ref md) => {
|
Ok(ref md) => {
|
||||||
const FILE_ATTRIBUTE_ARCHIVE: u32 = 0x20u32;
|
const FILE_ATTRIBUTE_ARCHIVE: u32 = 0x20;
|
||||||
const FILE_ATTRIBUTE_READONLY: u32 = 0x1u32;
|
const FILE_ATTRIBUTE_READONLY: u32 = 0x01;
|
||||||
const FILE_ATTRIBUTE_HIDDEN: u32 = 0x2u32;
|
const FILE_ATTRIBUTE_HIDDEN: u32 = 0x02;
|
||||||
const FILE_ATTRIBUTE_SYSTEM: u32 = 0x4u32;
|
const FILE_ATTRIBUTE_SYSTEM: u32 = 0x04;
|
||||||
const FILE_ATTRIBUTE_NORMAL: u32 = 0x80u32;
|
const FILE_ATTRIBUTE_NORMAL: u32 = 0x80;
|
||||||
const FILE_ATTRIBUTE_DIRECTORY: u32 = 0x10u32;
|
const FILE_ATTRIBUTE_DIRECTORY: u32 = 0x10;
|
||||||
|
const FILE_ATTRIBUTE_SPARSE_FILE: u32 = 0x00000200;
|
||||||
|
const FILE_ATTRIBUTE_PINNED: u32 = 0x00080000;
|
||||||
|
const FILE_ATTRIBUTE_UNPINNED: u32 = 0x00100000;
|
||||||
|
const FILE_ATTRIBUTE_RECALL_ON_OPEN: u32 = 0x00040000;
|
||||||
|
const FILE_ATTRIBUTE_RECALL_ON_DATA_ACCESS: u32 = 0x00400000;
|
||||||
|
const FILE_ATTRIBUTE_OFFLINE: u32 = 0x00001000;
|
||||||
|
// normally FILE_ATTRIBUTE_SPARSE_FILE would be enough, however Windows sometimes likes to mask it out. see: https://stackoverflow.com/q/54560454
|
||||||
|
const IS_PROBABLY_ONEDRIVE: u32 = FILE_ATTRIBUTE_SPARSE_FILE
|
||||||
|
| FILE_ATTRIBUTE_PINNED
|
||||||
|
| FILE_ATTRIBUTE_UNPINNED
|
||||||
|
| FILE_ATTRIBUTE_RECALL_ON_OPEN
|
||||||
|
| FILE_ATTRIBUTE_RECALL_ON_DATA_ACCESS
|
||||||
|
| FILE_ATTRIBUTE_OFFLINE;
|
||||||
let attr_filtered = md.file_attributes()
|
let attr_filtered = md.file_attributes()
|
||||||
& !(FILE_ATTRIBUTE_HIDDEN | FILE_ATTRIBUTE_READONLY | FILE_ATTRIBUTE_SYSTEM);
|
& !(FILE_ATTRIBUTE_HIDDEN | FILE_ATTRIBUTE_READONLY | FILE_ATTRIBUTE_SYSTEM);
|
||||||
if attr_filtered == FILE_ATTRIBUTE_ARCHIVE
|
if ((attr_filtered & FILE_ATTRIBUTE_ARCHIVE) != 0
|
||||||
|| attr_filtered == FILE_ATTRIBUTE_DIRECTORY
|
|| (attr_filtered & FILE_ATTRIBUTE_DIRECTORY) != 0
|
||||||
|| md.file_attributes() == FILE_ATTRIBUTE_NORMAL
|
|| md.file_attributes() == FILE_ATTRIBUTE_NORMAL)
|
||||||
|
&& !((attr_filtered & IS_PROBABLY_ONEDRIVE != 0) && use_apparent_size)
|
||||||
{
|
{
|
||||||
Some((md.len(), None))
|
Some((
|
||||||
|
md.len(),
|
||||||
|
None,
|
||||||
|
(
|
||||||
|
md.last_write_time() as i64,
|
||||||
|
md.last_access_time() as i64,
|
||||||
|
md.creation_time() as i64,
|
||||||
|
),
|
||||||
|
))
|
||||||
} else {
|
} else {
|
||||||
get_metadata_expensive(d)
|
get_metadata_expensive(path, use_apparent_size)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => get_metadata_expensive(d),
|
_ => get_metadata_expensive(path, use_apparent_size),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
153
src/progress.rs
Normal file
153
src/progress.rs
Normal file
@@ -0,0 +1,153 @@
|
|||||||
|
use std::{
|
||||||
|
collections::HashSet,
|
||||||
|
io::Write,
|
||||||
|
path::Path,
|
||||||
|
sync::{
|
||||||
|
atomic::{AtomicU64, AtomicU8, AtomicUsize, Ordering},
|
||||||
|
mpsc::{self, RecvTimeoutError, Sender},
|
||||||
|
Arc, RwLock,
|
||||||
|
},
|
||||||
|
thread::JoinHandle,
|
||||||
|
time::Duration,
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::display::human_readable_number;
|
||||||
|
|
||||||
|
/* -------------------------------------------------------------------------- */
|
||||||
|
|
||||||
|
pub const ORDERING: Ordering = Ordering::Relaxed;
|
||||||
|
|
||||||
|
const SPINNER_SLEEP_TIME: u64 = 100;
|
||||||
|
const PROGRESS_CHARS: [char; 4] = ['-', '\\', '|', '/'];
|
||||||
|
const PROGRESS_CHARS_LEN: usize = PROGRESS_CHARS.len();
|
||||||
|
|
||||||
|
pub trait ThreadSyncTrait<T> {
|
||||||
|
fn set(&self, val: T);
|
||||||
|
fn get(&self) -> T;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct ThreadStringWrapper {
|
||||||
|
inner: RwLock<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ThreadSyncTrait<String> for ThreadStringWrapper {
|
||||||
|
fn set(&self, val: String) {
|
||||||
|
*self.inner.write().unwrap() = val;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get(&self) -> String {
|
||||||
|
(*self.inner.read().unwrap()).clone()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -------------------------------------------------------------------------- */
|
||||||
|
|
||||||
|
// creating an enum this way allows to have simpler syntax compared to a Mutex or a RwLock
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
pub mod Operation {
|
||||||
|
pub const INDEXING: u8 = 0;
|
||||||
|
pub const PREPARING: u8 = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct PAtomicInfo {
|
||||||
|
pub num_files: AtomicUsize,
|
||||||
|
pub total_file_size: AtomicU64,
|
||||||
|
pub state: AtomicU8,
|
||||||
|
pub current_path: ThreadStringWrapper,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PAtomicInfo {
|
||||||
|
pub fn clear_state(&self, dir: &Path) {
|
||||||
|
self.state.store(Operation::INDEXING, ORDERING);
|
||||||
|
let dir_name = dir.to_string_lossy().to_string();
|
||||||
|
self.current_path.set(dir_name);
|
||||||
|
self.total_file_size.store(0, ORDERING);
|
||||||
|
self.num_files.store(0, ORDERING);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct RuntimeErrors {
|
||||||
|
pub no_permissions: HashSet<String>,
|
||||||
|
pub file_not_found: HashSet<String>,
|
||||||
|
pub unknown_error: HashSet<String>,
|
||||||
|
pub abort: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
/* -------------------------------------------------------------------------- */
|
||||||
|
|
||||||
|
fn format_preparing_str(prog_char: char, data: &PAtomicInfo, output_display: &str) -> String {
|
||||||
|
let path_in = data.current_path.get();
|
||||||
|
let size = human_readable_number(data.total_file_size.load(ORDERING), output_display);
|
||||||
|
format!("Preparing: {path_in} {size} ... {prog_char}")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn format_indexing_str(prog_char: char, data: &PAtomicInfo, output_display: &str) -> String {
|
||||||
|
let path_in = data.current_path.get();
|
||||||
|
let file_count = data.num_files.load(ORDERING);
|
||||||
|
let size = human_readable_number(data.total_file_size.load(ORDERING), output_display);
|
||||||
|
let file_str = format!("{file_count} files, {size}");
|
||||||
|
format!("Indexing: {path_in} {file_str} ... {prog_char}")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct PIndicator {
|
||||||
|
pub thread: Option<(Sender<()>, JoinHandle<()>)>,
|
||||||
|
pub data: Arc<PAtomicInfo>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PIndicator {
|
||||||
|
pub fn build_me() -> Self {
|
||||||
|
Self {
|
||||||
|
thread: None,
|
||||||
|
data: Arc::new(PAtomicInfo {
|
||||||
|
..Default::default()
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn spawn(&mut self, output_display: String) {
|
||||||
|
let data = self.data.clone();
|
||||||
|
let (stop_handler, receiver) = mpsc::channel::<()>();
|
||||||
|
|
||||||
|
let time_info_thread = std::thread::spawn(move || {
|
||||||
|
let mut progress_char_i: usize = 0;
|
||||||
|
let mut stdout = std::io::stdout();
|
||||||
|
let mut msg = "".to_string();
|
||||||
|
|
||||||
|
// While the timeout triggers we go round the loop
|
||||||
|
// If we disconnect or the sender sends its message we exit the while loop
|
||||||
|
while let Err(RecvTimeoutError::Timeout) =
|
||||||
|
receiver.recv_timeout(Duration::from_millis(SPINNER_SLEEP_TIME))
|
||||||
|
{
|
||||||
|
// Clear the text written by 'write!'& Return at the start of line
|
||||||
|
print!("\r{:width$}", " ", width = msg.len());
|
||||||
|
let prog_char = PROGRESS_CHARS[progress_char_i];
|
||||||
|
|
||||||
|
msg = match data.state.load(ORDERING) {
|
||||||
|
Operation::INDEXING => format_indexing_str(prog_char, &data, &output_display),
|
||||||
|
Operation::PREPARING => format_preparing_str(prog_char, &data, &output_display),
|
||||||
|
_ => panic!("Unknown State"),
|
||||||
|
};
|
||||||
|
|
||||||
|
write!(stdout, "\r{msg}").unwrap();
|
||||||
|
stdout.flush().unwrap();
|
||||||
|
|
||||||
|
progress_char_i += 1;
|
||||||
|
progress_char_i %= PROGRESS_CHARS_LEN;
|
||||||
|
}
|
||||||
|
print!("\r{:width$}", " ", width = msg.len());
|
||||||
|
print!("\r");
|
||||||
|
stdout.flush().unwrap();
|
||||||
|
});
|
||||||
|
self.thread = Some((stop_handler, time_info_thread))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn stop(self) {
|
||||||
|
if let Some((stop_handler, thread)) = self.thread {
|
||||||
|
stop_handler.send(()).unwrap();
|
||||||
|
thread.join().unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
77
src/utils.rs
77
src/utils.rs
@@ -2,16 +2,19 @@ use platform::get_metadata;
|
|||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
|
use crate::config::DAY_SECONDS;
|
||||||
|
|
||||||
|
use crate::dir_walker::Operater;
|
||||||
use crate::platform;
|
use crate::platform;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
|
||||||
pub fn simplify_dir_names<P: AsRef<Path>>(filenames: Vec<P>) -> HashSet<PathBuf> {
|
pub fn simplify_dir_names<P: AsRef<Path>>(dirs: &[P]) -> HashSet<PathBuf> {
|
||||||
let mut top_level_names: HashSet<PathBuf> = HashSet::with_capacity(filenames.len());
|
let mut top_level_names: HashSet<PathBuf> = HashSet::with_capacity(dirs.len());
|
||||||
let mut to_remove: Vec<PathBuf> = Vec::with_capacity(filenames.len());
|
|
||||||
|
|
||||||
for t in filenames {
|
for t in dirs {
|
||||||
let top_level_name = normalize_path(t);
|
let top_level_name = normalize_path(t);
|
||||||
let mut can_add = true;
|
let mut can_add = true;
|
||||||
|
let mut to_remove: Vec<PathBuf> = Vec::new();
|
||||||
|
|
||||||
for tt in top_level_names.iter() {
|
for tt in top_level_names.iter() {
|
||||||
if is_a_parent_of(&top_level_name, tt) {
|
if is_a_parent_of(&top_level_name, tt) {
|
||||||
@@ -20,9 +23,9 @@ pub fn simplify_dir_names<P: AsRef<Path>>(filenames: Vec<P>) -> HashSet<PathBuf>
|
|||||||
can_add = false;
|
can_add = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
to_remove.sort_unstable();
|
for r in to_remove {
|
||||||
top_level_names.retain(|tr| to_remove.binary_search(tr).is_err());
|
top_level_names.remove(&r);
|
||||||
to_remove.clear();
|
}
|
||||||
if can_add {
|
if can_add {
|
||||||
top_level_names.insert(top_level_name);
|
top_level_names.insert(top_level_name);
|
||||||
}
|
}
|
||||||
@@ -31,18 +34,13 @@ pub fn simplify_dir_names<P: AsRef<Path>>(filenames: Vec<P>) -> HashSet<PathBuf>
|
|||||||
top_level_names
|
top_level_names
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_filesystem_devices<'a, P: IntoIterator<Item = &'a PathBuf>>(paths: P) -> HashSet<u64> {
|
pub fn get_filesystem_devices<P: AsRef<Path>>(paths: &[P]) -> HashSet<u64> {
|
||||||
// Gets the device ids for the filesystems which are used by the argument paths
|
// Gets the device ids for the filesystems which are used by the argument paths
|
||||||
paths
|
paths
|
||||||
.into_iter()
|
.iter()
|
||||||
.filter_map(|p| {
|
.filter_map(|p| match get_metadata(p, false) {
|
||||||
let meta = get_metadata(p, false);
|
Some((_size, Some((_id, dev)), _time)) => Some(dev),
|
||||||
|
_ => None,
|
||||||
if let Some((_size, Some((_id, dev)))) = meta {
|
|
||||||
Some(dev)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
@@ -54,23 +52,39 @@ pub fn normalize_path<P: AsRef<Path>>(path: P) -> PathBuf {
|
|||||||
// 3. removing trailing extra separators and '.' ("current directory") path segments
|
// 3. removing trailing extra separators and '.' ("current directory") path segments
|
||||||
// * `Path.components()` does all the above work; ref: <https://doc.rust-lang.org/std/path/struct.Path.html#method.components>
|
// * `Path.components()` does all the above work; ref: <https://doc.rust-lang.org/std/path/struct.Path.html#method.components>
|
||||||
// 4. changing to os preferred separator (automatically done by recollecting components back into a PathBuf)
|
// 4. changing to os preferred separator (automatically done by recollecting components back into a PathBuf)
|
||||||
path.as_ref().components().collect::<PathBuf>()
|
path.as_ref().components().collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_filtered_out_due_to_regex(filter_regex: &Option<Regex>, dir: &Path) -> bool {
|
pub fn is_filtered_out_due_to_regex(filter_regex: &[Regex], dir: &Path) -> bool {
|
||||||
match filter_regex {
|
if filter_regex.is_empty() {
|
||||||
Some(fr) => !fr.is_match(&dir.as_os_str().to_string_lossy()),
|
false
|
||||||
None => false,
|
} else {
|
||||||
|
filter_regex
|
||||||
|
.iter()
|
||||||
|
.all(|f| !f.is_match(&dir.as_os_str().to_string_lossy()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_filtered_out_due_to_invert_regex(filter_regex: &Option<Regex>, dir: &Path) -> bool {
|
pub fn is_filtered_out_due_to_file_time(
|
||||||
match filter_regex {
|
filter_time: &Option<(Operater, i64)>,
|
||||||
Some(fr) => fr.is_match(&dir.as_os_str().to_string_lossy()),
|
actual_time: i64,
|
||||||
|
) -> bool {
|
||||||
|
match filter_time {
|
||||||
None => false,
|
None => false,
|
||||||
|
Some((Operater::Equal, bound_time)) => {
|
||||||
|
!(actual_time >= *bound_time && actual_time < *bound_time + DAY_SECONDS)
|
||||||
|
}
|
||||||
|
Some((Operater::GreaterThan, bound_time)) => actual_time < *bound_time,
|
||||||
|
Some((Operater::LessThan, bound_time)) => actual_time > *bound_time,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn is_filtered_out_due_to_invert_regex(filter_regex: &[Regex], dir: &Path) -> bool {
|
||||||
|
filter_regex
|
||||||
|
.iter()
|
||||||
|
.any(|f| f.is_match(&dir.as_os_str().to_string_lossy()))
|
||||||
|
}
|
||||||
|
|
||||||
fn is_a_parent_of<P: AsRef<Path>>(parent: P, child: P) -> bool {
|
fn is_a_parent_of<P: AsRef<Path>>(parent: P, child: P) -> bool {
|
||||||
let parent = parent.as_ref();
|
let parent = parent.as_ref();
|
||||||
let child = child.as_ref();
|
let child = child.as_ref();
|
||||||
@@ -85,14 +99,15 @@ mod tests {
|
|||||||
fn test_simplify_dir() {
|
fn test_simplify_dir() {
|
||||||
let mut correct = HashSet::new();
|
let mut correct = HashSet::new();
|
||||||
correct.insert(PathBuf::from("a"));
|
correct.insert(PathBuf::from("a"));
|
||||||
assert_eq!(simplify_dir_names(vec!["a"]), correct);
|
assert_eq!(simplify_dir_names(&["a"]), correct);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_simplify_dir_rm_subdir() {
|
fn test_simplify_dir_rm_subdir() {
|
||||||
let mut correct = HashSet::new();
|
let mut correct = HashSet::new();
|
||||||
correct.insert(["a", "b"].iter().collect::<PathBuf>());
|
correct.insert(["a", "b"].iter().collect::<PathBuf>());
|
||||||
assert_eq!(simplify_dir_names(vec!["a/b", "a/b/c", "a/b/d/f"]), correct);
|
assert_eq!(simplify_dir_names(&["a/b/c", "a/b", "a/b/d/f"]), correct);
|
||||||
|
assert_eq!(simplify_dir_names(&["a/b", "a/b/c", "a/b/d/f"]), correct);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -101,7 +116,7 @@ mod tests {
|
|||||||
correct.insert(["a", "b"].iter().collect::<PathBuf>());
|
correct.insert(["a", "b"].iter().collect::<PathBuf>());
|
||||||
correct.insert(PathBuf::from("c"));
|
correct.insert(PathBuf::from("c"));
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
simplify_dir_names(vec![
|
simplify_dir_names(&[
|
||||||
"a/b",
|
"a/b",
|
||||||
"a/b//",
|
"a/b//",
|
||||||
"a/././b///",
|
"a/././b///",
|
||||||
@@ -120,14 +135,14 @@ mod tests {
|
|||||||
correct.insert(PathBuf::from("b"));
|
correct.insert(PathBuf::from("b"));
|
||||||
correct.insert(["c", "a", "b"].iter().collect::<PathBuf>());
|
correct.insert(["c", "a", "b"].iter().collect::<PathBuf>());
|
||||||
correct.insert(["a", "b"].iter().collect::<PathBuf>());
|
correct.insert(["a", "b"].iter().collect::<PathBuf>());
|
||||||
assert_eq!(simplify_dir_names(vec!["a/b", "c/a/b/", "b"]), correct);
|
assert_eq!(simplify_dir_names(&["a/b", "c/a/b/", "b"]), correct);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_simplify_dir_dots() {
|
fn test_simplify_dir_dots() {
|
||||||
let mut correct = HashSet::new();
|
let mut correct = HashSet::new();
|
||||||
correct.insert(PathBuf::from("src"));
|
correct.insert(PathBuf::from("src"));
|
||||||
assert_eq!(simplify_dir_names(vec!["src/."]), correct);
|
assert_eq!(simplify_dir_names(&["src/."]), correct);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -135,7 +150,7 @@ mod tests {
|
|||||||
let mut correct = HashSet::new();
|
let mut correct = HashSet::new();
|
||||||
correct.insert(PathBuf::from("src"));
|
correct.insert(PathBuf::from("src"));
|
||||||
correct.insert(PathBuf::from("src_v2"));
|
correct.insert(PathBuf::from("src_v2"));
|
||||||
assert_eq!(simplify_dir_names(vec!["src/", "src_v2"]), correct);
|
assert_eq!(simplify_dir_names(&["src/", "src_v2"]), correct);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|||||||
@@ -1 +1,2 @@
|
|||||||
hi
|
something
|
||||||
|
.secret
|
||||||
|
|||||||
0
tests/test_dir_hidden_entries/.secret
Normal file
0
tests/test_dir_hidden_entries/.secret
Normal file
@@ -1,11 +1,11 @@
|
|||||||
use assert_cmd::Command;
|
use assert_cmd::Command;
|
||||||
use std::ffi::OsStr;
|
use std::ffi::OsStr;
|
||||||
use std::str;
|
use std::process::Output;
|
||||||
use std::sync::Once;
|
use std::sync::Once;
|
||||||
|
use std::{io, str};
|
||||||
|
|
||||||
static INIT: Once = Once::new();
|
static INIT: Once = Once::new();
|
||||||
|
static UNREADABLE_DIR_PATH: &str = "/tmp/unreadable_dir";
|
||||||
mod tests_symlinks;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This file contains tests that verify the exact output of the command.
|
* This file contains tests that verify the exact output of the command.
|
||||||
@@ -19,23 +19,32 @@ mod tests_symlinks;
|
|||||||
/// Copy to /tmp dir - we assume that the formatting of the /tmp partition
|
/// Copy to /tmp dir - we assume that the formatting of the /tmp partition
|
||||||
/// is consistent. If the tests fail your /tmp filesystem probably differs
|
/// is consistent. If the tests fail your /tmp filesystem probably differs
|
||||||
fn copy_test_data(dir: &str) {
|
fn copy_test_data(dir: &str) {
|
||||||
// First remove the existing directory - just incase it is there and has incorrect data
|
// First remove the existing directory - just in case it is there and has incorrect data
|
||||||
let last_slash = dir.rfind('/').unwrap();
|
let last_slash = dir.rfind('/').unwrap();
|
||||||
let last_part_of_dir = dir.chars().skip(last_slash).collect::<String>();
|
let last_part_of_dir = dir.chars().skip(last_slash).collect::<String>();
|
||||||
match Command::new("rm")
|
let _ = Command::new("rm")
|
||||||
.arg("-rf")
|
.arg("-rf")
|
||||||
.arg("/tmp/".to_owned() + &*last_part_of_dir)
|
.arg("/tmp/".to_owned() + &*last_part_of_dir)
|
||||||
|
.ok();
|
||||||
|
|
||||||
|
let _ = Command::new("cp")
|
||||||
|
.arg("-r")
|
||||||
|
.arg(dir)
|
||||||
|
.arg("/tmp/")
|
||||||
.ok()
|
.ok()
|
||||||
|
.map_err(|err| eprintln!("Error copying directory for test setup\n{:?}", err));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_unreadable_directory() -> io::Result<()> {
|
||||||
|
#[cfg(unix)]
|
||||||
{
|
{
|
||||||
Ok(_) => {}
|
use std::fs;
|
||||||
Err(_) => {}
|
use std::fs::Permissions;
|
||||||
};
|
use std::os::unix::fs::PermissionsExt;
|
||||||
match Command::new("cp").arg("-r").arg(dir).arg("/tmp/").ok() {
|
fs::create_dir_all(UNREADABLE_DIR_PATH)?;
|
||||||
Ok(_) => {}
|
fs::set_permissions(UNREADABLE_DIR_PATH, Permissions::from_mode(0))?;
|
||||||
Err(err) => {
|
}
|
||||||
eprintln!("Error copying directory {:?}", err);
|
Ok(())
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn initialize() {
|
fn initialize() {
|
||||||
@@ -43,21 +52,42 @@ fn initialize() {
|
|||||||
copy_test_data("tests/test_dir");
|
copy_test_data("tests/test_dir");
|
||||||
copy_test_data("tests/test_dir2");
|
copy_test_data("tests/test_dir2");
|
||||||
copy_test_data("tests/test_dir_unicode");
|
copy_test_data("tests/test_dir_unicode");
|
||||||
|
|
||||||
|
if let Err(e) = create_unreadable_directory() {
|
||||||
|
panic!("Failed to create unreadable directory: {}", e);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn exact_output_test<T: AsRef<OsStr>>(valid_outputs: Vec<String>, command_args: Vec<T>) {
|
fn run_cmd<T: AsRef<OsStr>>(command_args: &[T]) -> Output {
|
||||||
initialize();
|
initialize();
|
||||||
|
let mut to_run = &mut Command::cargo_bin("dust").unwrap();
|
||||||
let mut a = &mut Command::cargo_bin("dust").unwrap();
|
|
||||||
for p in command_args {
|
for p in command_args {
|
||||||
a = a.arg(p);
|
to_run = to_run.arg(p);
|
||||||
}
|
}
|
||||||
let output: String = str::from_utf8(&a.unwrap().stdout).unwrap().into();
|
to_run.unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
assert!(valid_outputs
|
fn exact_stdout_test<T: AsRef<OsStr>>(command_args: &[T], valid_stdout: Vec<String>) {
|
||||||
.iter()
|
let to_run = run_cmd(command_args);
|
||||||
.fold(false, |sum, i| sum || output.contains(i)));
|
|
||||||
|
let stdout_output = str::from_utf8(&to_run.stdout).unwrap().to_owned();
|
||||||
|
let will_fail = valid_stdout.iter().any(|i| stdout_output.contains(i));
|
||||||
|
if !will_fail {
|
||||||
|
eprintln!(
|
||||||
|
"output(stdout):\n{}\ndoes not contain any of:\n{}",
|
||||||
|
stdout_output,
|
||||||
|
valid_stdout.join("\n\n")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
assert!(will_fail);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn exact_stderr_test<T: AsRef<OsStr>>(command_args: &[T], valid_stderr: String) {
|
||||||
|
let to_run = run_cmd(command_args);
|
||||||
|
|
||||||
|
let stderr_output = str::from_utf8(&to_run.stderr).unwrap().trim();
|
||||||
|
assert_eq!(stderr_output, valid_stderr);
|
||||||
}
|
}
|
||||||
|
|
||||||
// "windows" result data can vary by host (size seems to be variable by one byte); fix code vs test and re-enable
|
// "windows" result data can vary by host (size seems to be variable by one byte); fix code vs test and re-enable
|
||||||
@@ -65,38 +95,39 @@ fn exact_output_test<T: AsRef<OsStr>>(valid_outputs: Vec<String>, command_args:
|
|||||||
#[test]
|
#[test]
|
||||||
pub fn test_main_basic() {
|
pub fn test_main_basic() {
|
||||||
// -c is no color mode - This makes testing much simpler
|
// -c is no color mode - This makes testing much simpler
|
||||||
exact_output_test(main_output(), vec!["-c", "/tmp/test_dir/"])
|
exact_stdout_test(&["-c", "-B", "/tmp/test_dir/"], main_output());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg_attr(target_os = "windows", ignore)]
|
#[cfg_attr(target_os = "windows", ignore)]
|
||||||
#[test]
|
#[test]
|
||||||
pub fn test_main_multi_arg() {
|
pub fn test_main_multi_arg() {
|
||||||
let command_args = vec![
|
let command_args = [
|
||||||
"-c",
|
"-c",
|
||||||
|
"-B",
|
||||||
"/tmp/test_dir/many/",
|
"/tmp/test_dir/many/",
|
||||||
"/tmp/test_dir",
|
"/tmp/test_dir",
|
||||||
"/tmp/test_dir",
|
"/tmp/test_dir",
|
||||||
];
|
];
|
||||||
exact_output_test(main_output(), command_args);
|
exact_stdout_test(&command_args, main_output());
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main_output() -> Vec<String> {
|
fn main_output() -> Vec<String> {
|
||||||
// Some linux currently thought to be Manjaro, Arch
|
// Some linux currently thought to be Manjaro, Arch
|
||||||
// Although probably depends on how drive is formatted
|
// Although probably depends on how drive is formatted
|
||||||
let mac_and_some_linux = r#"
|
let mac_and_some_linux = r#"
|
||||||
0B ┌── a_file │░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░█ │ 0%
|
0B ┌── a_file │░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░█ │ 0%
|
||||||
4.0K ├── hello_file│████████████████████████████████████████████████ │ 100%
|
4.0K ├── hello_file│█████████████████████████████████████████████████ │ 100%
|
||||||
4.0K ┌─┴ many │████████████████████████████████████████████████ │ 100%
|
4.0K ┌─┴ many │█████████████████████████████████████████████████ │ 100%
|
||||||
4.0K ┌─┴ test_dir │████████████████████████████████████████████████ │ 100%
|
4.0K ┌─┴ test_dir │█████████████████████████████████████████████████ │ 100%
|
||||||
"#
|
"#
|
||||||
.trim()
|
.trim()
|
||||||
.to_string();
|
.to_string();
|
||||||
|
|
||||||
let ubuntu = r#"
|
let ubuntu = r#"
|
||||||
0B ┌── a_file │ ░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░█ │ 0%
|
0B ┌── a_file │ ░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░█ │ 0%
|
||||||
4.0K ├── hello_file│ ░░░░░░░░░░░░░░░░█████████████████ │ 33%
|
4.0K ├── hello_file│ ░░░░░░░░░░░░░░░░█████████████████ │ 33%
|
||||||
8.0K ┌─┴ many │ █████████████████████████████████ │ 67%
|
8.0K ┌─┴ many │ █████████████████████████████████ │ 67%
|
||||||
12K ┌─┴ test_dir │████████████████████████████████████████████████ │ 100%
|
12K ┌─┴ test_dir │█████████████████████████████████████████████████ │ 100%
|
||||||
"#
|
"#
|
||||||
.trim()
|
.trim()
|
||||||
.to_string();
|
.to_string();
|
||||||
@@ -107,59 +138,59 @@ fn main_output() -> Vec<String> {
|
|||||||
#[cfg_attr(target_os = "windows", ignore)]
|
#[cfg_attr(target_os = "windows", ignore)]
|
||||||
#[test]
|
#[test]
|
||||||
pub fn test_main_long_paths() {
|
pub fn test_main_long_paths() {
|
||||||
let command_args = vec!["-c", "-p", "/tmp/test_dir/"];
|
let command_args = ["-c", "-p", "-B", "/tmp/test_dir/"];
|
||||||
exact_output_test(main_output_long_paths(), command_args);
|
exact_stdout_test(&command_args, main_output_long_paths());
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main_output_long_paths() -> Vec<String> {
|
fn main_output_long_paths() -> Vec<String> {
|
||||||
let mac_and_some_linux = r#"
|
let mac_and_some_linux = r#"
|
||||||
0B ┌── /tmp/test_dir/many/a_file │░░░░░░░░░░░░░░░░░░░░░░░░░░░░█ │ 0%
|
0B ┌── /tmp/test_dir/many/a_file │░░░░░░░░░░░░░░░░░░░░░░░░░░░░░█ │ 0%
|
||||||
4.0K ├── /tmp/test_dir/many/hello_file│█████████████████████████████ │ 100%
|
4.0K ├── /tmp/test_dir/many/hello_file│██████████████████████████████ │ 100%
|
||||||
4.0K ┌─┴ /tmp/test_dir/many │█████████████████████████████ │ 100%
|
4.0K ┌─┴ /tmp/test_dir/many │██████████████████████████████ │ 100%
|
||||||
4.0K ┌─┴ /tmp/test_dir │█████████████████████████████ │ 100%
|
4.0K ┌─┴ /tmp/test_dir │██████████████████████████████ │ 100%
|
||||||
"#
|
"#
|
||||||
.trim()
|
.trim()
|
||||||
.to_string();
|
.to_string();
|
||||||
let ubuntu = r#"
|
let ubuntu = r#"
|
||||||
0B ┌── /tmp/test_dir/many/a_file │ ░░░░░░░░░░░░░░░░░░░█ │ 0%
|
0B ┌── /tmp/test_dir/many/a_file │ ░░░░░░░░░░░░░░░░░░░░█ │ 0%
|
||||||
4.0K ├── /tmp/test_dir/many/hello_file│ ░░░░░░░░░░██████████ │ 33%
|
4.0K ├── /tmp/test_dir/many/hello_file│ ░░░░░░░░░░███████████ │ 33%
|
||||||
8.0K ┌─┴ /tmp/test_dir/many │ ████████████████████ │ 67%
|
8.0K ┌─┴ /tmp/test_dir/many │ █████████████████████ │ 67%
|
||||||
12K ┌─┴ /tmp/test_dir │█████████████████████████████ │ 100%
|
12K ┌─┴ /tmp/test_dir │██████████████████████████████ │ 100%
|
||||||
"#
|
"#
|
||||||
.trim()
|
.trim()
|
||||||
.to_string();
|
.to_string();
|
||||||
vec![mac_and_some_linux, ubuntu]
|
vec![mac_and_some_linux, ubuntu]
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check against directories and files whos names are substrings of each other
|
// Check against directories and files whose names are substrings of each other
|
||||||
#[cfg_attr(target_os = "windows", ignore)]
|
#[cfg_attr(target_os = "windows", ignore)]
|
||||||
#[test]
|
#[test]
|
||||||
pub fn test_substring_of_names_and_long_names() {
|
pub fn test_substring_of_names_and_long_names() {
|
||||||
let command_args = vec!["-c", "/tmp/test_dir2"];
|
let command_args = ["-c", "-B", "/tmp/test_dir2"];
|
||||||
exact_output_test(no_substring_of_names_output(), command_args);
|
exact_stdout_test(&command_args, no_substring_of_names_output());
|
||||||
}
|
}
|
||||||
|
|
||||||
fn no_substring_of_names_output() -> Vec<String> {
|
fn no_substring_of_names_output() -> Vec<String> {
|
||||||
let ubuntu = "
|
let ubuntu = "
|
||||||
0B ┌── long_dir_name_what_a_very_long_dir_name_what_happens_when_this_g..
|
0B ┌── long_dir_name_what_a_very_long_dir_name_what_happens_when_this_goes..
|
||||||
4.0K ├── dir_name_clash
|
4.0K ├── dir_name_clash
|
||||||
4.0K │ ┌── hello
|
4.0K │ ┌── hello
|
||||||
8.0K ├─┴ dir
|
8.0K ├─┴ dir
|
||||||
4.0K │ ┌── hello
|
4.0K │ ┌── hello
|
||||||
8.0K ├─┴ dir_substring
|
8.0K ├─┴ dir_substring
|
||||||
24K ┌─┴ test_dir2
|
24K ┌─┴ test_dir2
|
||||||
"
|
"
|
||||||
.trim()
|
.trim()
|
||||||
.into();
|
.into();
|
||||||
|
|
||||||
let mac_and_some_linux = "
|
let mac_and_some_linux = "
|
||||||
0B ┌── long_dir_name_what_a_very_long_dir_name_what_happens_when_this_g..
|
0B ┌── long_dir_name_what_a_very_long_dir_name_what_happens_when_this_goes..
|
||||||
4.0K │ ┌── hello
|
4.0K │ ┌── hello
|
||||||
4.0K ├─┴ dir
|
4.0K ├─┴ dir
|
||||||
4.0K ├── dir_name_clash
|
4.0K ├── dir_name_clash
|
||||||
4.0K │ ┌── hello
|
4.0K │ ┌── hello
|
||||||
4.0K ├─┴ dir_substring
|
4.0K ├─┴ dir_substring
|
||||||
12K ┌─┴ test_dir2
|
12K ┌─┴ test_dir2
|
||||||
"
|
"
|
||||||
.trim()
|
.trim()
|
||||||
.into();
|
.into();
|
||||||
@@ -169,26 +200,75 @@ fn no_substring_of_names_output() -> Vec<String> {
|
|||||||
#[cfg_attr(target_os = "windows", ignore)]
|
#[cfg_attr(target_os = "windows", ignore)]
|
||||||
#[test]
|
#[test]
|
||||||
pub fn test_unicode_directories() {
|
pub fn test_unicode_directories() {
|
||||||
let command_args = vec!["-c", "/tmp/test_dir_unicode"];
|
let command_args = ["-c", "-B", "/tmp/test_dir_unicode"];
|
||||||
exact_output_test(unicode_dir(), command_args);
|
exact_stdout_test(&command_args, unicode_dir());
|
||||||
}
|
}
|
||||||
|
|
||||||
fn unicode_dir() -> Vec<String> {
|
fn unicode_dir() -> Vec<String> {
|
||||||
// The way unicode & asian characters are rendered on the terminal should make this line up
|
// The way unicode & asian characters are rendered on the terminal should make this line up
|
||||||
let ubuntu = "
|
let ubuntu = "
|
||||||
0B ┌── ラウトは難しいです!.japan│ █ │ 0%
|
0B ┌── ラウトは難しいです!.japan│ █ │ 0%
|
||||||
0B ├── 👩.unicode │ █ │ 0%
|
0B ├── 👩.unicode │ █ │ 0%
|
||||||
4.0K ┌─┴ test_dir_unicode │██████████████████████████████████ │ 100%
|
4.0K ┌─┴ test_dir_unicode │███████████████████████████████████ │ 100%
|
||||||
"
|
"
|
||||||
.trim()
|
.trim()
|
||||||
.into();
|
.into();
|
||||||
|
|
||||||
let mac_and_some_linux = "
|
let mac_and_some_linux = "
|
||||||
0B ┌── ラウトは難しいです!.japan│ █ │ 0%
|
0B ┌── ラウトは難しいです!.japan│ █ │ 0%
|
||||||
0B ├── 👩.unicode │ █ │ 0%
|
0B ├── 👩.unicode │ █ │ 0%
|
||||||
0B ┌─┴ test_dir_unicode │ █ │ 0%
|
0B ┌─┴ test_dir_unicode │ █ │ 0%
|
||||||
"
|
"
|
||||||
.trim()
|
.trim()
|
||||||
.into();
|
.into();
|
||||||
vec![mac_and_some_linux, ubuntu]
|
vec![mac_and_some_linux, ubuntu]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(target_os = "windows", ignore)]
|
||||||
|
#[test]
|
||||||
|
pub fn test_apparent_size() {
|
||||||
|
let command_args = ["-c", "-s", "-b", "/tmp/test_dir"];
|
||||||
|
exact_stdout_test(&command_args, apparent_size_output());
|
||||||
|
}
|
||||||
|
|
||||||
|
fn apparent_size_output() -> Vec<String> {
|
||||||
|
// The apparent directory sizes are too unpredictable and system dependent to try and match
|
||||||
|
let one_space_before = r#"
|
||||||
|
0B ┌── a_file
|
||||||
|
6B ├── hello_file
|
||||||
|
"#
|
||||||
|
.trim()
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let two_space_before = r#"
|
||||||
|
0B ┌── a_file
|
||||||
|
6B ├── hello_file
|
||||||
|
"#
|
||||||
|
.trim()
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
vec![one_space_before, two_space_before]
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(target_os = "windows", ignore)]
|
||||||
|
#[test]
|
||||||
|
pub fn test_permission_normal() {
|
||||||
|
let command_args = [UNREADABLE_DIR_PATH];
|
||||||
|
let permission_msg =
|
||||||
|
r#"Did not have permissions for all directories (add --print-errors to see errors)"#
|
||||||
|
.trim()
|
||||||
|
.to_string();
|
||||||
|
exact_stderr_test(&command_args, permission_msg);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(target_os = "windows", ignore)]
|
||||||
|
#[test]
|
||||||
|
pub fn test_permission_flag() {
|
||||||
|
// add the flag to CLI
|
||||||
|
let command_args = ["--print-errors", UNREADABLE_DIR_PATH];
|
||||||
|
let permission_msg = format!(
|
||||||
|
"Did not have permissions for directories: {}",
|
||||||
|
UNREADABLE_DIR_PATH
|
||||||
|
);
|
||||||
|
exact_stderr_test(&command_args, permission_msg);
|
||||||
|
}
|
||||||
|
|||||||
@@ -9,11 +9,15 @@ use std::str;
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
fn build_command<T: AsRef<OsStr>>(command_args: Vec<T>) -> String {
|
fn build_command<T: AsRef<OsStr>>(command_args: Vec<T>) -> String {
|
||||||
let mut a = &mut Command::cargo_bin("dust").unwrap();
|
let mut cmd = &mut Command::cargo_bin("dust").unwrap();
|
||||||
for p in command_args {
|
for p in command_args {
|
||||||
a = a.arg(p);
|
cmd = cmd.arg(p);
|
||||||
}
|
}
|
||||||
str::from_utf8(&a.unwrap().stdout).unwrap().into()
|
let finished = &cmd.unwrap();
|
||||||
|
let stderr = str::from_utf8(&finished.stderr).unwrap();
|
||||||
|
assert_eq!(stderr, "");
|
||||||
|
|
||||||
|
str::from_utf8(&finished.stdout).unwrap().into()
|
||||||
}
|
}
|
||||||
|
|
||||||
// We can at least test the file names are there
|
// We can at least test the file names are there
|
||||||
@@ -55,27 +59,46 @@ pub fn test_d_flag_works() {
|
|||||||
assert!(!output.contains("hello_file"));
|
assert!(!output.contains("hello_file"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
pub fn test_threads_flag_works() {
|
||||||
|
let output = build_command(vec!["-T", "1", "tests/test_dir/"]);
|
||||||
|
assert!(output.contains("hello_file"));
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
pub fn test_d_flag_works_and_still_recurses_down() {
|
pub fn test_d_flag_works_and_still_recurses_down() {
|
||||||
// We had a bug where running with '-d 1' would stop at the first directory and the code
|
// We had a bug where running with '-d 1' would stop at the first directory and the code
|
||||||
// would fail to recurse down
|
// would fail to recurse down
|
||||||
let output = build_command(vec!["-d", "1", "-f", "-c", "tests/test_dir2/"]);
|
let output = build_command(vec!["-d", "1", "-f", "-c", "tests/test_dir2/"]);
|
||||||
|
assert!(output.contains("1 ┌── dir"));
|
||||||
assert!(output.contains("4 ┌─┴ test_dir2"));
|
assert!(output.contains("4 ┌─┴ test_dir2"));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check against directories and files whos names are substrings of each other
|
// Check against directories and files whose names are substrings of each other
|
||||||
#[test]
|
#[test]
|
||||||
pub fn test_ignore_dir() {
|
pub fn test_ignore_dir() {
|
||||||
let output = build_command(vec!["-c", "-X", "dir_substring", "tests/test_dir2/"]);
|
let output = build_command(vec!["-c", "-X", "dir_substring", "tests/test_dir2/"]);
|
||||||
assert!(!output.contains("dir_substring"));
|
assert!(!output.contains("dir_substring"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
pub fn test_ignore_all_in_file() {
|
||||||
|
let output = build_command(vec![
|
||||||
|
"-c",
|
||||||
|
"-I",
|
||||||
|
"tests/test_dir_hidden_entries/.hidden_file",
|
||||||
|
"tests/test_dir_hidden_entries/",
|
||||||
|
]);
|
||||||
|
assert!(output.contains(" test_dir_hidden_entries"));
|
||||||
|
assert!(!output.contains(".secret"));
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
pub fn test_with_bad_param() {
|
pub fn test_with_bad_param() {
|
||||||
let mut cmd = Command::cargo_bin("dust").unwrap();
|
let mut cmd = Command::cargo_bin("dust").unwrap();
|
||||||
let stderr = cmd.arg("-").unwrap().stderr;
|
let result = cmd.arg("bad_place").unwrap();
|
||||||
let stderr = str::from_utf8(&stderr).unwrap();
|
let stderr = str::from_utf8(&result.stderr).unwrap();
|
||||||
assert!(stderr.contains("Did not have permissions for all directories"));
|
assert!(stderr.contains("No such file or directory"));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -101,22 +124,6 @@ pub fn test_number_of_files() {
|
|||||||
assert!(output.contains("2 ┌─┴ test_dir"));
|
assert!(output.contains("2 ┌─┴ test_dir"));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg_attr(target_os = "windows", ignore)]
|
|
||||||
#[test]
|
|
||||||
pub fn test_apparent_size() {
|
|
||||||
// Check the '-s' Flag gives us byte sizes and that it doesn't round up to a block
|
|
||||||
let command_args = vec!["-c", "-s", "/tmp/test_dir"];
|
|
||||||
let output = build_command(command_args);
|
|
||||||
|
|
||||||
let apparent_size1 = "6B ├── hello_file│";
|
|
||||||
let apparent_size2 = "0B ┌── a_file";
|
|
||||||
assert!(output.contains(apparent_size1));
|
|
||||||
assert!(output.contains(apparent_size2));
|
|
||||||
|
|
||||||
let incorrect_apparent_size = "4.0K ├── hello_file";
|
|
||||||
assert!(!output.contains(incorrect_apparent_size));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
pub fn test_show_files_by_type() {
|
pub fn test_show_files_by_type() {
|
||||||
// Check we can list files by type
|
// Check we can list files by type
|
||||||
@@ -129,18 +136,75 @@ pub fn test_show_files_by_type() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
pub fn test_show_files_by_regex() {
|
#[cfg(target_family = "unix")]
|
||||||
|
pub fn test_show_files_only() {
|
||||||
|
let output = build_command(vec!["-c", "-F", "tests/test_dir"]);
|
||||||
|
assert!(output.contains("a_file"));
|
||||||
|
assert!(output.contains("hello_file"));
|
||||||
|
assert!(!output.contains("many"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
pub fn test_output_skip_total() {
|
||||||
|
let output = build_command(vec![
|
||||||
|
"--skip-total",
|
||||||
|
"tests/test_dir/many/hello_file",
|
||||||
|
"tests/test_dir/many/a_file",
|
||||||
|
]);
|
||||||
|
assert!(output.contains("hello_file"));
|
||||||
|
assert!(!output.contains("(total)"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
pub fn test_output_screen_reader() {
|
||||||
|
let output = build_command(vec!["--screen-reader", "-c", "tests/test_dir/"]);
|
||||||
|
println!("{}", output);
|
||||||
|
assert!(output.contains("test_dir 0"));
|
||||||
|
assert!(output.contains("many 1"));
|
||||||
|
assert!(output.contains("hello_file 2"));
|
||||||
|
assert!(output.contains("a_file 2"));
|
||||||
|
|
||||||
|
// Verify no 'symbols' reported by screen reader
|
||||||
|
assert!(!output.contains('│'));
|
||||||
|
|
||||||
|
for block in ['█', '▓', '▒', '░'] {
|
||||||
|
assert!(!output.contains(block));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
pub fn test_show_files_by_regex_match_lots() {
|
||||||
// Check we can see '.rs' files in the tests directory
|
// Check we can see '.rs' files in the tests directory
|
||||||
let output = build_command(vec!["-c", "-e", "\\.rs$", "tests"]);
|
let output = build_command(vec!["-c", "-e", "\\.rs$", "tests"]);
|
||||||
assert!(output.contains(" ┌─┴ tests"));
|
assert!(output.contains(" ┌─┴ tests"));
|
||||||
assert!(!output.contains("0B ┌── tests"));
|
assert!(!output.contains("0B ┌── tests"));
|
||||||
assert!(!output.contains("0B ┌─┴ tests"));
|
assert!(!output.contains("0B ┌─┴ tests"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
pub fn test_show_files_by_regex_match_nothing() {
|
||||||
// Check there are no files named: '.match_nothing' in the tests directory
|
// Check there are no files named: '.match_nothing' in the tests directory
|
||||||
let output = build_command(vec!["-c", "-e", "match_nothing$", "tests"]);
|
let output = build_command(vec!["-c", "-e", "match_nothing$", "tests"]);
|
||||||
assert!(output.contains("0B ┌── tests"));
|
assert!(output.contains("0B ┌── tests"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
pub fn test_show_files_by_regex_match_multiple() {
|
||||||
|
let output = build_command(vec![
|
||||||
|
"-c",
|
||||||
|
"-e",
|
||||||
|
"test_dir_hidden",
|
||||||
|
"-e",
|
||||||
|
"test_dir2",
|
||||||
|
"-n",
|
||||||
|
"100",
|
||||||
|
"tests",
|
||||||
|
]);
|
||||||
|
assert!(output.contains("test_dir2"));
|
||||||
|
assert!(output.contains("test_dir_hidden"));
|
||||||
|
assert!(!output.contains("many")); // We do not find the 'many' folder in the 'test_dir' folder
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
pub fn test_show_files_by_invert_regex() {
|
pub fn test_show_files_by_invert_regex() {
|
||||||
let output = build_command(vec!["-c", "-f", "-v", "e", "tests/test_dir2"]);
|
let output = build_command(vec!["-c", "-f", "-v", "e", "tests/test_dir2"]);
|
||||||
@@ -155,3 +219,38 @@ pub fn test_show_files_by_invert_regex() {
|
|||||||
let output = build_command(vec!["-c", "-f", "-v", "match_nothing$", "tests/test_dir2"]);
|
let output = build_command(vec!["-c", "-f", "-v", "match_nothing$", "tests/test_dir2"]);
|
||||||
assert!(output.contains("4 ┌─┴ test_dir2"));
|
assert!(output.contains("4 ┌─┴ test_dir2"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
pub fn test_show_files_by_invert_regex_match_multiple() {
|
||||||
|
// We ignore test_dir2 & test_dir_unicode, leaving the test_dir folder
|
||||||
|
// which has the 'many' folder inside
|
||||||
|
let output = build_command(vec![
|
||||||
|
"-c",
|
||||||
|
"-v",
|
||||||
|
"test_dir2",
|
||||||
|
"-v",
|
||||||
|
"test_dir_unicode",
|
||||||
|
"-n",
|
||||||
|
"100",
|
||||||
|
"tests",
|
||||||
|
]);
|
||||||
|
assert!(!output.contains("test_dir2"));
|
||||||
|
assert!(!output.contains("test_dir_unicode"));
|
||||||
|
assert!(output.contains("many"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
pub fn test_no_color() {
|
||||||
|
let output = build_command(vec!["-c"]);
|
||||||
|
// Red is 31
|
||||||
|
assert!(!output.contains("\x1B[31m"));
|
||||||
|
assert!(!output.contains("\x1B[0m"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
pub fn test_force_color() {
|
||||||
|
let output = build_command(vec!["-C"]);
|
||||||
|
// Red is 31
|
||||||
|
assert!(output.contains("\x1B[31m"));
|
||||||
|
assert!(output.contains("\x1B[0m"));
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,41 +1,15 @@
|
|||||||
use assert_cmd::Command;
|
use assert_cmd::Command;
|
||||||
use std::cmp::max;
|
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::str;
|
use std::str;
|
||||||
|
|
||||||
use terminal_size::{terminal_size, Height, Width};
|
|
||||||
use unicode_width::UnicodeWidthStr;
|
|
||||||
|
|
||||||
use tempfile::Builder;
|
use tempfile::Builder;
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
|
|
||||||
// File sizes differ on both platform and on the format of the disk.
|
// File sizes differ on both platform and on the format of the disk.
|
||||||
// Windows: `ln` is not usually an available command; creation of symbolic links requires special enhanced permissions
|
// Windows: `ln` is not usually an available command; creation of symbolic links requires special enhanced permissions
|
||||||
|
|
||||||
fn get_width_of_terminal() -> u16 {
|
|
||||||
if let Some((Width(w), Height(_h))) = terminal_size() {
|
|
||||||
max(w, 80)
|
|
||||||
} else {
|
|
||||||
80
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Mac test runners create tmp files with very long names, hence it may be shortened in the output
|
|
||||||
fn get_file_name(name: String) -> String {
|
|
||||||
let terminal_plus_buffer = (get_width_of_terminal() - 14) as usize;
|
|
||||||
if UnicodeWidthStr::width(&*name) > terminal_plus_buffer {
|
|
||||||
let trimmed_name = name
|
|
||||||
.chars()
|
|
||||||
.take(terminal_plus_buffer - 2)
|
|
||||||
.collect::<String>();
|
|
||||||
trimmed_name + ".."
|
|
||||||
} else {
|
|
||||||
name
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn build_temp_file(dir: &TempDir) -> PathBuf {
|
fn build_temp_file(dir: &TempDir) -> PathBuf {
|
||||||
let file_path = dir.path().join("notes.txt");
|
let file_path = dir.path().join("notes.txt");
|
||||||
let mut file = File::create(&file_path).unwrap();
|
let mut file = File::create(&file_path).unwrap();
|
||||||
@@ -43,6 +17,18 @@ fn build_temp_file(dir: &TempDir) -> PathBuf {
|
|||||||
file_path
|
file_path
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn link_it(link_path: PathBuf, file_path_s: &str, is_soft: bool) -> String {
|
||||||
|
let link_name_s = link_path.to_str().unwrap();
|
||||||
|
let mut c = Command::new("ln");
|
||||||
|
if is_soft {
|
||||||
|
c.arg("-s");
|
||||||
|
}
|
||||||
|
c.arg(file_path_s);
|
||||||
|
c.arg(link_name_s);
|
||||||
|
assert!(c.output().is_ok());
|
||||||
|
link_name_s.into()
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg_attr(target_os = "windows", ignore)]
|
#[cfg_attr(target_os = "windows", ignore)]
|
||||||
#[test]
|
#[test]
|
||||||
pub fn test_soft_sym_link() {
|
pub fn test_soft_sym_link() {
|
||||||
@@ -52,20 +38,18 @@ pub fn test_soft_sym_link() {
|
|||||||
let file_path_s = file.to_str().unwrap();
|
let file_path_s = file.to_str().unwrap();
|
||||||
|
|
||||||
let link_name = dir.path().join("the_link");
|
let link_name = dir.path().join("the_link");
|
||||||
let link_name_s = link_name.to_str().unwrap();
|
let link_name_s = link_it(link_name, file_path_s, true);
|
||||||
let c = Command::new("ln")
|
|
||||||
.arg("-s")
|
|
||||||
.arg(file_path_s)
|
|
||||||
.arg(link_name_s)
|
|
||||||
.output();
|
|
||||||
assert!(c.is_ok());
|
|
||||||
|
|
||||||
let c = format!(" ├── {}", get_file_name(link_name_s.into()));
|
let c = format!(" ├── {}", link_name_s);
|
||||||
let b = format!(" ┌── {}", get_file_name(file_path_s.into()));
|
let b = format!(" ┌── {}", file_path_s);
|
||||||
let a = format!("─┴ {}", dir_s);
|
let a = format!("─┴ {}", dir_s);
|
||||||
|
|
||||||
let mut cmd = Command::cargo_bin("dust").unwrap();
|
let mut cmd = Command::cargo_bin("dust").unwrap();
|
||||||
let output = cmd.arg("-p").arg("-c").arg("-s").arg(dir_s).unwrap().stdout;
|
// Mac test runners create long filenames in tmp directories
|
||||||
|
let output = cmd
|
||||||
|
.args(["-p", "-c", "-s", "-w", "999", dir_s])
|
||||||
|
.unwrap()
|
||||||
|
.stdout;
|
||||||
|
|
||||||
let output = str::from_utf8(&output).unwrap();
|
let output = str::from_utf8(&output).unwrap();
|
||||||
|
|
||||||
@@ -83,25 +67,48 @@ pub fn test_hard_sym_link() {
|
|||||||
let file_path_s = file.to_str().unwrap();
|
let file_path_s = file.to_str().unwrap();
|
||||||
|
|
||||||
let link_name = dir.path().join("the_link");
|
let link_name = dir.path().join("the_link");
|
||||||
let link_name_s = link_name.to_str().unwrap();
|
link_it(link_name, file_path_s, false);
|
||||||
let c = Command::new("ln")
|
|
||||||
.arg(file_path_s)
|
|
||||||
.arg(link_name_s)
|
|
||||||
.output();
|
|
||||||
assert!(c.is_ok());
|
|
||||||
|
|
||||||
let link_output = format!(" ┌── {}", get_file_name(link_name_s.into()));
|
let file_output = format!(" ┌── {}", file_path_s);
|
||||||
let file_output = format!(" ┌── {}", get_file_name(file_path_s.into()));
|
|
||||||
let dirs_output = format!("─┴ {}", dir_s);
|
let dirs_output = format!("─┴ {}", dir_s);
|
||||||
|
|
||||||
let mut cmd = Command::cargo_bin("dust").unwrap();
|
let mut cmd = Command::cargo_bin("dust").unwrap();
|
||||||
let output = cmd.arg("-p").arg("-c").arg(dir_s).unwrap().stdout;
|
// Mac test runners create long filenames in tmp directories
|
||||||
|
let output = cmd.args(["-p", "-c", "-w", "999", dir_s]).unwrap().stdout;
|
||||||
|
|
||||||
// Because this is a hard link the file and hard link look identical. Therefore
|
// The link should not appear in the output because multiple inodes are now ordered
|
||||||
// we cannot guarantee which version will appear first.
|
// then filtered.
|
||||||
let output = str::from_utf8(&output).unwrap();
|
let output = str::from_utf8(&output).unwrap();
|
||||||
assert!(output.contains(dirs_output.as_str()));
|
assert!(output.contains(dirs_output.as_str()));
|
||||||
assert!(output.contains(link_output.as_str()) || output.contains(file_output.as_str()));
|
assert!(output.contains(file_output.as_str()));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(target_os = "windows", ignore)]
|
||||||
|
#[test]
|
||||||
|
pub fn test_hard_sym_link_no_dup_multi_arg() {
|
||||||
|
let dir = Builder::new().tempdir().unwrap();
|
||||||
|
let dir_link = Builder::new().tempdir().unwrap();
|
||||||
|
let file = build_temp_file(&dir);
|
||||||
|
let dir_s = dir.path().to_str().unwrap();
|
||||||
|
let dir_link_s = dir_link.path().to_str().unwrap();
|
||||||
|
let file_path_s = file.to_str().unwrap();
|
||||||
|
|
||||||
|
let link_name = dir_link.path().join("the_link");
|
||||||
|
let link_name_s = link_it(link_name, file_path_s, false);
|
||||||
|
|
||||||
|
let mut cmd = Command::cargo_bin("dust").unwrap();
|
||||||
|
|
||||||
|
// Mac test runners create long filenames in tmp directories
|
||||||
|
let output = cmd
|
||||||
|
.args(["-p", "-c", "-w", "999", "-b", dir_link_s, dir_s])
|
||||||
|
.unwrap()
|
||||||
|
.stdout;
|
||||||
|
|
||||||
|
// The link or the file should appear but not both
|
||||||
|
let output = str::from_utf8(&output).unwrap();
|
||||||
|
let has_file_only = output.contains(file_path_s) && !output.contains(&link_name_s);
|
||||||
|
let has_link_only = !output.contains(file_path_s) && output.contains(&link_name_s);
|
||||||
|
assert!(has_file_only || has_link_only);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg_attr(target_os = "windows", ignore)]
|
#[cfg_attr(target_os = "windows", ignore)]
|
||||||
@@ -111,17 +118,10 @@ pub fn test_recursive_sym_link() {
|
|||||||
let dir_s = dir.path().to_str().unwrap();
|
let dir_s = dir.path().to_str().unwrap();
|
||||||
|
|
||||||
let link_name = dir.path().join("the_link");
|
let link_name = dir.path().join("the_link");
|
||||||
let link_name_s = link_name.to_str().unwrap();
|
let link_name_s = link_it(link_name, dir_s, true);
|
||||||
|
|
||||||
let c = Command::new("ln")
|
|
||||||
.arg("-s")
|
|
||||||
.arg(dir_s)
|
|
||||||
.arg(link_name_s)
|
|
||||||
.output();
|
|
||||||
assert!(c.is_ok());
|
|
||||||
|
|
||||||
let a = format!("─┬ {}", dir_s);
|
let a = format!("─┬ {}", dir_s);
|
||||||
let b = format!(" └── {}", get_file_name(link_name_s.into()));
|
let b = format!(" └── {}", link_name_s);
|
||||||
|
|
||||||
let mut cmd = Command::cargo_bin("dust").unwrap();
|
let mut cmd = Command::cargo_bin("dust").unwrap();
|
||||||
let output = cmd
|
let output = cmd
|
||||||
@@ -129,6 +129,8 @@ pub fn test_recursive_sym_link() {
|
|||||||
.arg("-c")
|
.arg("-c")
|
||||||
.arg("-r")
|
.arg("-r")
|
||||||
.arg("-s")
|
.arg("-s")
|
||||||
|
.arg("-w")
|
||||||
|
.arg("999")
|
||||||
.arg(dir_s)
|
.arg(dir_s)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.stdout;
|
.stdout;
|
||||||
|
|||||||
Reference in New Issue
Block a user