mirror of
https://github.com/bootandy/dust.git
synced 2025-12-07 05:10:40 -08:00
Compare commits
3 Commits
same_dir_n
...
aarch_buil
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
42b13da321 | ||
|
|
11db301347 | ||
|
|
e52898186f |
13
.github/workflows/CICD.yml
vendored
13
.github/workflows/CICD.yml
vendored
@@ -45,11 +45,6 @@ jobs:
|
||||
override: true
|
||||
profile: minimal # minimal component installation (ie, no documentation)
|
||||
components: rustfmt, clippy
|
||||
- name: Install wget for Windows
|
||||
if: matrix.job.os == 'windows-latest'
|
||||
run: choco install wget --no-progress
|
||||
- name: typos-action
|
||||
uses: crate-ci/typos@v1.28.4
|
||||
- name: "`fmt` testing"
|
||||
if: steps.vars.outputs.JOB_DO_FORMAT_TESTING
|
||||
uses: actions-rs/cargo@v1
|
||||
@@ -215,7 +210,7 @@ jobs:
|
||||
echo set-output name=CARGO_TEST_OPTIONS::${CARGO_TEST_OPTIONS}
|
||||
echo ::set-output name=CARGO_TEST_OPTIONS::${CARGO_TEST_OPTIONS}
|
||||
# * strip executable?
|
||||
STRIP="strip" ; case ${{ matrix.job.target }} in arm-unknown-linux-gnueabihf) STRIP="arm-linux-gnueabihf-strip" ;; *-pc-windows-msvc) STRIP="" ;; aarch64-unknown-linux-gnu) STRIP="aarch64-linux-gnu-strip" ;; aarch64-unknown-linux-musl) STRIP="" ;; armv7-unknown-linux-musleabi) STRIP="" ;; arm-unknown-linux-musleabi) STRIP="" ;; esac;
|
||||
STRIP="strip" ; case ${{ matrix.job.target }} in arm-unknown-linux-gnueabihf) STRIP="arm-linux-gnueabihf-strip" ;; *-pc-windows-msvc) STRIP="" ;; aarch64-unknown-linux-gnu) STRIP="aarch64-linux-gnu-strip" ;; aarch64-unknown-linux-musl) STRIP="aarch64-linux-gnueabihf-strip" ;; armv7-unknown-linux-musleabi) STRIP="" ;; arm-unknown-linux-musleabi) STRIP="" ;; esac;
|
||||
|
||||
|
||||
echo set-output name=STRIP::${STRIP}
|
||||
@@ -252,13 +247,13 @@ jobs:
|
||||
with:
|
||||
command: install
|
||||
args: cargo-deb
|
||||
if: matrix.job.target == 'i686-unknown-linux-musl' || matrix.job.target == 'x86_64-unknown-linux-musl'
|
||||
if: matrix.job.target == 'i686-unknown-linux-musl' || matrix.job.target == 'x86_64-unknown-linux-musl' || matrix.job.target == 'aarch64-unknown-linux-musl'
|
||||
- name: Build deb
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: deb
|
||||
args: --no-build --target=${{ matrix.job.target }}
|
||||
if: matrix.job.target == 'i686-unknown-linux-musl' || matrix.job.target == 'x86_64-unknown-linux-musl'
|
||||
if: matrix.job.target == 'i686-unknown-linux-musl' || matrix.job.target == 'x86_64-unknown-linux-musl' || matrix.job.target == 'aarch64-unknown-linux-musl'
|
||||
- name: Test
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@@ -275,7 +270,7 @@ jobs:
|
||||
with:
|
||||
name: ${{ env.PROJECT_NAME }}-${{ matrix.job.target }}.deb
|
||||
path: target/${{ matrix.job.target }}/debian
|
||||
if: matrix.job.target == 'i686-unknown-linux-musl' || matrix.job.target == 'x86_64-unknown-linux-musl'
|
||||
if: matrix.job.target == 'i686-unknown-linux-musl' || matrix.job.target == 'x86_64-unknown-linux-musl' || matrix.job.target == 'aarch64-unknown-linux-musl'
|
||||
- name: Package
|
||||
shell: bash
|
||||
run: |
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
repos:
|
||||
- repo: https://github.com/doublify/pre-commit-rust
|
||||
rev: v1.0
|
||||
hooks:
|
||||
- id: cargo-check
|
||||
stages: [commit]
|
||||
- id: fmt
|
||||
stages: [commit]
|
||||
- id: clippy
|
||||
args: [--all-targets, --all-features]
|
||||
stages: [commit]
|
||||
680
Cargo.lock
generated
680
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
27
Cargo.toml
27
Cargo.toml
@@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "du-dust"
|
||||
description = "A more intuitive version of du"
|
||||
version = "1.1.1"
|
||||
version = "0.8.6"
|
||||
authors = ["bootandy <bootandy@gmail.com>", "nebkor <code@ardent.nebcorp.com>"]
|
||||
edition = "2021"
|
||||
readme = "README.md"
|
||||
@@ -28,7 +28,7 @@ strip = true
|
||||
|
||||
[dependencies]
|
||||
ansi_term = "0.12"
|
||||
clap = "4.4"
|
||||
clap = "3.2.17"
|
||||
lscolors = "0.13"
|
||||
terminal_size = "0.2"
|
||||
unicode-width = "0.1"
|
||||
@@ -38,27 +38,20 @@ stfu8 = "0.2"
|
||||
regex = "1"
|
||||
config-file = "0.2"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
directories = "4"
|
||||
sysinfo = "0.27"
|
||||
ctrlc = "3.4"
|
||||
chrono = "0.4"
|
||||
|
||||
[target.'cfg(not(target_has_atomic = "64"))'.dependencies]
|
||||
portable-atomic = "1.4"
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
winapi-util = "0.1"
|
||||
filesize = "0.2.0"
|
||||
|
||||
[dev-dependencies]
|
||||
assert_cmd = "2"
|
||||
tempfile = "=3"
|
||||
|
||||
[build-dependencies]
|
||||
clap = "4.4"
|
||||
clap_complete = "4.4"
|
||||
clap_mangen = "0.2"
|
||||
clap = "3.2.17"
|
||||
clap_complete = "3.2.4"
|
||||
clap_mangen = "0.1"
|
||||
|
||||
[[test]]
|
||||
name = "integration"
|
||||
@@ -86,16 +79,6 @@ assets = [
|
||||
"usr/share/doc/du-dust/README",
|
||||
"644",
|
||||
],
|
||||
[
|
||||
"man-page/dust.1",
|
||||
"usr/share/man/man1/dust.1",
|
||||
"644",
|
||||
],
|
||||
[
|
||||
"completions/dust.bash",
|
||||
"usr/share/bash-completion/completions/dust",
|
||||
"644",
|
||||
],
|
||||
]
|
||||
extended-description = """\
|
||||
Dust is meant to give you an instant overview of which directories are using
|
||||
|
||||
27
README.md
27
README.md
@@ -25,30 +25,16 @@ Because I want an easy way to see where my disk is being used.
|
||||
|
||||
#### 🍺 Homebrew (Linux)
|
||||
|
||||
- `brew install dust`
|
||||
|
||||
#### [Snap](https://ubuntu.com/core/services/guide/snaps-intro) Ubuntu and [supported systems](https://snapcraft.io/docs/installing-snapd)
|
||||
|
||||
- `snap install dust`
|
||||
|
||||
Note: `dust` installed through `snap` can only access files stored in the `/home` directory. See daniejstriata/dust-snap#2 for more information.
|
||||
- `brew tap tgotwig/linux-dust && brew install dust`
|
||||
|
||||
#### [Pacstall](https://github.com/pacstall/pacstall) (Debian/Ubuntu)
|
||||
|
||||
- `pacstall -I dust-bin`
|
||||
|
||||
#### Anaconda (conda-forge)
|
||||
|
||||
- `conda install -c conda-forge dust`
|
||||
|
||||
#### [deb-get](https://github.com/wimpysworld/deb-get) (Debian/Ubuntu)
|
||||
|
||||
- `deb-get install du-dust`
|
||||
|
||||
#### [x-cmd](https://www.x-cmd.com/pkg/#VPContent)
|
||||
|
||||
- `x env use dust`
|
||||
|
||||
#### Windows:
|
||||
|
||||
- `scoop install dust`
|
||||
@@ -82,14 +68,13 @@ Usage: dust -d 3 (Shows 3 levels of subdirectories)
|
||||
Usage: dust -D (Show only directories (eg dust -D))
|
||||
Usage: dust -F (Show only files - finds your largest files)
|
||||
Usage: dust -r (reverse order of output)
|
||||
Usage: dust -o si/b/kb/kib/mb/mib/gb/gib (si - prints sizes in powers of 1000. Others print size in that format).
|
||||
Usage: dust -H (si print sizes in powers of 1000 instead of 1024)
|
||||
Usage: dust -X ignore (ignore all files and directories with the name 'ignore')
|
||||
Usage: dust -x (Only show directories on the same filesystem)
|
||||
Usage: dust -b (Do not show percentages or draw ASCII bars)
|
||||
Usage: dust -B (--bars-on-right - Percent bars moved to right side of screen)
|
||||
Usage: dust -B (--bars-on-right - Percent bars moved to right side of screen])
|
||||
Usage: dust -i (Do not show hidden files)
|
||||
Usage: dust -c (No colors [monochrome])
|
||||
Usage: dust -C (Force colors)
|
||||
Usage: dust -f (Count files instead of diskspace)
|
||||
Usage: dust -t (Group by filetype)
|
||||
Usage: dust -z 10M (min-size, Only include files larger than 10M)
|
||||
@@ -98,12 +83,8 @@ Usage: dust -v regex (Exclude files matching this regex (eg dust -v "\.png$" wou
|
||||
Usage: dust -L (dereference-links - Treat sym links as directories and go into them)
|
||||
Usage: dust -P (Disable the progress indicator)
|
||||
Usage: dust -R (For screen readers. Removes bars/symbols. Adds new column: depth level. (May want to use -p for full path too))
|
||||
Usage: dust -S (Custom Stack size - Use if you see: 'fatal runtime error: stack overflow' (default allocation: low memory=1048576, high memory=1073741824)"),
|
||||
Usage: dust --skip-total (No total row will be displayed)
|
||||
Usage: dust -z 40000/30MB/20kib (Exclude output files/directories below size 40000 bytes / 30MB / 20KiB)
|
||||
Usage: dust -j (Prints JSON representation of directories, try: dust -j | jq)
|
||||
Usage: dust --files0-from=FILE (Reads null-terminated file paths from FILE); If FILE is - then read from stdin
|
||||
Usage: dust --collapse=node-modules will keep the node-modules folder collapsed in display instead of recursively opening it
|
||||
Usage: dust -z 4000000 (Exclude files below size 4MB)
|
||||
```
|
||||
|
||||
## Config file
|
||||
|
||||
@@ -1,21 +1,14 @@
|
||||
# ----------- To do a release ---------
|
||||
|
||||
# ----------- Pre release ---------
|
||||
# Compare times of runs to check no drastic slow down:
|
||||
# hyperfine 'target/release/dust /home/andy'
|
||||
# hyperfine 'dust /home/andy'
|
||||
|
||||
# ----------- Release ---------
|
||||
# inc version in cargo.toml
|
||||
# cargo build --release
|
||||
# commit changed files
|
||||
# merge to master in github
|
||||
# time target/release/dust ~/dev
|
||||
# time dust ~dev
|
||||
|
||||
# edit version in cargo.toml
|
||||
# tag a commit and push (increment version in Cargo.toml first):
|
||||
# git tag v0.4.5
|
||||
# git push origin v0.4.5
|
||||
|
||||
# cargo publish to put it in crates.io
|
||||
|
||||
# Optional: To install locally
|
||||
|
||||
# To install locally [Do before pushing it]
|
||||
#cargo install --path .
|
||||
|
||||
@@ -14,40 +14,25 @@ _dust() {
|
||||
fi
|
||||
|
||||
local context curcontext="$curcontext" state line
|
||||
_arguments "${_arguments_options[@]}" : \
|
||||
'-d+[Depth to show]:DEPTH: ' \
|
||||
'--depth=[Depth to show]:DEPTH: ' \
|
||||
'-T+[Number of threads to use]: : ' \
|
||||
'--threads=[Number of threads to use]: : ' \
|
||||
'--config=[Specify a config file to use]:FILE:_files' \
|
||||
'-n+[Number of lines of output to show. (Default is terminal_height - 10)]:NUMBER: ' \
|
||||
'--number-of-lines=[Number of lines of output to show. (Default is terminal_height - 10)]:NUMBER: ' \
|
||||
'*-X+[Exclude any file or directory with this path]:PATH:_files' \
|
||||
'*--ignore-directory=[Exclude any file or directory with this path]:PATH:_files' \
|
||||
'-I+[Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter]:FILE:_files' \
|
||||
'--ignore-all-in-file=[Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter]:FILE:_files' \
|
||||
'-z+[Minimum size file to include in output]:MIN_SIZE: ' \
|
||||
'--min-size=[Minimum size file to include in output]:MIN_SIZE: ' \
|
||||
'(-e --filter -t --file_types)*-v+[Exclude filepaths matching this regex. To ignore png files type\: -v "\\.png\$" ]:REGEX: ' \
|
||||
'(-e --filter -t --file_types)*--invert-filter=[Exclude filepaths matching this regex. To ignore png files type\: -v "\\.png\$" ]:REGEX: ' \
|
||||
'(-t --file_types)*-e+[Only include filepaths matching this regex. For png files type\: -e "\\.png\$" ]:REGEX: ' \
|
||||
'(-t --file_types)*--filter=[Only include filepaths matching this regex. For png files type\: -e "\\.png\$" ]:REGEX: ' \
|
||||
'-w+[Specify width of output overriding the auto detection of terminal width]:WIDTH: ' \
|
||||
'--terminal_width=[Specify width of output overriding the auto detection of terminal width]:WIDTH: ' \
|
||||
'-o+[Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.]:FORMAT:(si b k m g t kb mb gb tb)' \
|
||||
'--output-format=[Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.]:FORMAT:(si b k m g t kb mb gb tb)' \
|
||||
'-S+[Specify memory to use as stack size - use if you see\: '\''fatal runtime error\: stack overflow'\'' (default low memory=1048576, high memory=1073741824)]:STACK_SIZE: ' \
|
||||
'--stack-size=[Specify memory to use as stack size - use if you see\: '\''fatal runtime error\: stack overflow'\'' (default low memory=1048576, high memory=1073741824)]:STACK_SIZE: ' \
|
||||
'-M+[+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr−(n+1)), n => \[curr−(n+1), curr−n), and -n => (𝑐𝑢𝑟𝑟−𝑛, +∞)]: : ' \
|
||||
'--mtime=[+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr−(n+1)), n => \[curr−(n+1), curr−n), and -n => (𝑐𝑢𝑟𝑟−𝑛, +∞)]: : ' \
|
||||
'-A+[just like -mtime, but based on file access time]: : ' \
|
||||
'--atime=[just like -mtime, but based on file access time]: : ' \
|
||||
'-y+[just like -mtime, but based on file change time]: : ' \
|
||||
'--ctime=[just like -mtime, but based on file change time]: : ' \
|
||||
'--files0-from=[run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input]: :_files' \
|
||||
'*--collapse=[Keep these directories collapsed]: :_files' \
|
||||
'-m+[Directory '\''size'\'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time]: :(a c m)' \
|
||||
'--filetime=[Directory '\''size'\'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time]: :(a c m)' \
|
||||
_arguments "${_arguments_options[@]}" \
|
||||
'-d+[Depth to show]: : ' \
|
||||
'--depth=[Depth to show]: : ' \
|
||||
'-n+[Number of lines of output to show. (Default is terminal_height - 10)]: : ' \
|
||||
'--number-of-lines=[Number of lines of output to show. (Default is terminal_height - 10)]: : ' \
|
||||
'*-X+[Exclude any file or directory with this name]: : ' \
|
||||
'*--ignore-directory=[Exclude any file or directory with this name]: : ' \
|
||||
'-z+[Minimum size file to include in output]: : ' \
|
||||
'--min-size=[Minimum size file to include in output]: : ' \
|
||||
'(-e --filter -t --file_types)*-v+[Exclude filepaths matching this regex. To ignore png files type: -v "\\.png$" ]: : ' \
|
||||
'(-e --filter -t --file_types)*--invert-filter=[Exclude filepaths matching this regex. To ignore png files type: -v "\\.png$" ]: : ' \
|
||||
'(-t --file_types)*-e+[Only include filepaths matching this regex. For png files type: -e "\\.png$" ]: : ' \
|
||||
'(-t --file_types)*--filter=[Only include filepaths matching this regex. For png files type: -e "\\.png$" ]: : ' \
|
||||
'-w+[Specify width of output overriding the auto detection of terminal width]: : ' \
|
||||
'--terminal_width=[Specify width of output overriding the auto detection of terminal width]: : ' \
|
||||
'-h[Print help information]' \
|
||||
'--help[Print help information]' \
|
||||
'-V[Print version information]' \
|
||||
'--version[Print version information]' \
|
||||
'-p[Subdirectories will not have their path shortened]' \
|
||||
'--full-paths[Subdirectories will not have their path shortened]' \
|
||||
'-L[dereference sym links - Treat sym links as directories and go into them]' \
|
||||
@@ -58,16 +43,14 @@ _dust() {
|
||||
'--apparent-size[Use file length instead of blocks]' \
|
||||
'-r[Print tree upside down (biggest highest)]' \
|
||||
'--reverse[Print tree upside down (biggest highest)]' \
|
||||
'-c[No colors will be printed (Useful for commands like\: watch)]' \
|
||||
'--no-colors[No colors will be printed (Useful for commands like\: watch)]' \
|
||||
'-C[Force colors print]' \
|
||||
'--force-colors[Force colors print]' \
|
||||
'-c[No colors will be printed (Useful for commands like: watch)]' \
|
||||
'--no-colors[No colors will be printed (Useful for commands like: watch)]' \
|
||||
'-b[No percent bars or percentages will be displayed]' \
|
||||
'--no-percent-bars[No percent bars or percentages will be displayed]' \
|
||||
'-B[percent bars moved to right side of screen]' \
|
||||
'--bars-on-right[percent bars moved to right side of screen]' \
|
||||
'-R[For screen readers. Removes bars. Adds new column\: depth level (May want to use -p too for full path)]' \
|
||||
'--screen-reader[For screen readers. Removes bars. Adds new column\: depth level (May want to use -p too for full path)]' \
|
||||
'-R[For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)]' \
|
||||
'--screen-reader[For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)]' \
|
||||
'--skip-total[No total row will be displayed]' \
|
||||
'-f[Directory '\''size'\'' is number of child files instead of disk size]' \
|
||||
'--filecount[Directory '\''size'\'' is number of child files instead of disk size]' \
|
||||
@@ -75,20 +58,15 @@ _dust() {
|
||||
'--ignore_hidden[Do not display hidden files]' \
|
||||
'(-d --depth -D --only-dir)-t[show only these file types]' \
|
||||
'(-d --depth -D --only-dir)--file_types[show only these file types]' \
|
||||
'-H[print sizes in powers of 1000 (e.g., 1.1G)]' \
|
||||
'--si[print sizes in powers of 1000 (e.g., 1.1G)]' \
|
||||
'-P[Disable the progress indication.]' \
|
||||
'--no-progress[Disable the progress indication.]' \
|
||||
'--print-errors[Print path with errors.]' \
|
||||
'(-F --only-file -t --file_types)-D[Only directories will be displayed.]' \
|
||||
'(-F --only-file -t --file_types)--only-dir[Only directories will be displayed.]' \
|
||||
'(-D --only-dir)-F[Only files will be displayed. (Finds your largest files)]' \
|
||||
'(-D --only-dir)--only-file[Only files will be displayed. (Finds your largest files)]' \
|
||||
'-j[Output the directory tree as json to the current directory]' \
|
||||
'--output-json[Output the directory tree as json to the current directory]' \
|
||||
'-h[Print help]' \
|
||||
'--help[Print help]' \
|
||||
'-V[Print version]' \
|
||||
'--version[Print version]' \
|
||||
'*::params:_files' \
|
||||
'*::inputs:' \
|
||||
&& ret=0
|
||||
}
|
||||
|
||||
@@ -98,8 +76,4 @@ _dust_commands() {
|
||||
_describe -t commands 'dust commands' commands "$@"
|
||||
}
|
||||
|
||||
if [ "$funcstack[1]" = "_dust" ]; then
|
||||
_dust "$@"
|
||||
else
|
||||
compdef _dust dust
|
||||
fi
|
||||
_dust "$@"
|
||||
|
||||
@@ -23,15 +23,10 @@ Register-ArgumentCompleter -Native -CommandName 'dust' -ScriptBlock {
|
||||
'dust' {
|
||||
[CompletionResult]::new('-d', 'd', [CompletionResultType]::ParameterName, 'Depth to show')
|
||||
[CompletionResult]::new('--depth', 'depth', [CompletionResultType]::ParameterName, 'Depth to show')
|
||||
[CompletionResult]::new('-T', 'T ', [CompletionResultType]::ParameterName, 'Number of threads to use')
|
||||
[CompletionResult]::new('--threads', 'threads', [CompletionResultType]::ParameterName, 'Number of threads to use')
|
||||
[CompletionResult]::new('--config', 'config', [CompletionResultType]::ParameterName, 'Specify a config file to use')
|
||||
[CompletionResult]::new('-n', 'n', [CompletionResultType]::ParameterName, 'Number of lines of output to show. (Default is terminal_height - 10)')
|
||||
[CompletionResult]::new('--number-of-lines', 'number-of-lines', [CompletionResultType]::ParameterName, 'Number of lines of output to show. (Default is terminal_height - 10)')
|
||||
[CompletionResult]::new('-X', 'X ', [CompletionResultType]::ParameterName, 'Exclude any file or directory with this path')
|
||||
[CompletionResult]::new('--ignore-directory', 'ignore-directory', [CompletionResultType]::ParameterName, 'Exclude any file or directory with this path')
|
||||
[CompletionResult]::new('-I', 'I ', [CompletionResultType]::ParameterName, 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter')
|
||||
[CompletionResult]::new('--ignore-all-in-file', 'ignore-all-in-file', [CompletionResultType]::ParameterName, 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter')
|
||||
[CompletionResult]::new('-X', 'X', [CompletionResultType]::ParameterName, 'Exclude any file or directory with this name')
|
||||
[CompletionResult]::new('--ignore-directory', 'ignore-directory', [CompletionResultType]::ParameterName, 'Exclude any file or directory with this name')
|
||||
[CompletionResult]::new('-z', 'z', [CompletionResultType]::ParameterName, 'Minimum size file to include in output')
|
||||
[CompletionResult]::new('--min-size', 'min-size', [CompletionResultType]::ParameterName, 'Minimum size file to include in output')
|
||||
[CompletionResult]::new('-v', 'v', [CompletionResultType]::ParameterName, 'Exclude filepaths matching this regex. To ignore png files type: -v "\.png$" ')
|
||||
@@ -40,23 +35,13 @@ Register-ArgumentCompleter -Native -CommandName 'dust' -ScriptBlock {
|
||||
[CompletionResult]::new('--filter', 'filter', [CompletionResultType]::ParameterName, 'Only include filepaths matching this regex. For png files type: -e "\.png$" ')
|
||||
[CompletionResult]::new('-w', 'w', [CompletionResultType]::ParameterName, 'Specify width of output overriding the auto detection of terminal width')
|
||||
[CompletionResult]::new('--terminal_width', 'terminal_width', [CompletionResultType]::ParameterName, 'Specify width of output overriding the auto detection of terminal width')
|
||||
[CompletionResult]::new('-o', 'o', [CompletionResultType]::ParameterName, 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.')
|
||||
[CompletionResult]::new('--output-format', 'output-format', [CompletionResultType]::ParameterName, 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.')
|
||||
[CompletionResult]::new('-S', 'S ', [CompletionResultType]::ParameterName, 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)')
|
||||
[CompletionResult]::new('--stack-size', 'stack-size', [CompletionResultType]::ParameterName, 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)')
|
||||
[CompletionResult]::new('-M', 'M ', [CompletionResultType]::ParameterName, '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr−(n+1)), n => [curr−(n+1), curr−n), and -n => (𝑐𝑢𝑟𝑟−𝑛, +∞)')
|
||||
[CompletionResult]::new('--mtime', 'mtime', [CompletionResultType]::ParameterName, '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr−(n+1)), n => [curr−(n+1), curr−n), and -n => (𝑐𝑢𝑟𝑟−𝑛, +∞)')
|
||||
[CompletionResult]::new('-A', 'A ', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file access time')
|
||||
[CompletionResult]::new('--atime', 'atime', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file access time')
|
||||
[CompletionResult]::new('-y', 'y', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file change time')
|
||||
[CompletionResult]::new('--ctime', 'ctime', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file change time')
|
||||
[CompletionResult]::new('--files0-from', 'files0-from', [CompletionResultType]::ParameterName, 'run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input')
|
||||
[CompletionResult]::new('--collapse', 'collapse', [CompletionResultType]::ParameterName, 'Keep these directories collapsed')
|
||||
[CompletionResult]::new('-m', 'm', [CompletionResultType]::ParameterName, 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time')
|
||||
[CompletionResult]::new('--filetime', 'filetime', [CompletionResultType]::ParameterName, 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time')
|
||||
[CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Print help information')
|
||||
[CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Print help information')
|
||||
[CompletionResult]::new('-V', 'V', [CompletionResultType]::ParameterName, 'Print version information')
|
||||
[CompletionResult]::new('--version', 'version', [CompletionResultType]::ParameterName, 'Print version information')
|
||||
[CompletionResult]::new('-p', 'p', [CompletionResultType]::ParameterName, 'Subdirectories will not have their path shortened')
|
||||
[CompletionResult]::new('--full-paths', 'full-paths', [CompletionResultType]::ParameterName, 'Subdirectories will not have their path shortened')
|
||||
[CompletionResult]::new('-L', 'L ', [CompletionResultType]::ParameterName, 'dereference sym links - Treat sym links as directories and go into them')
|
||||
[CompletionResult]::new('-L', 'L', [CompletionResultType]::ParameterName, 'dereference sym links - Treat sym links as directories and go into them')
|
||||
[CompletionResult]::new('--dereference-links', 'dereference-links', [CompletionResultType]::ParameterName, 'dereference sym links - Treat sym links as directories and go into them')
|
||||
[CompletionResult]::new('-x', 'x', [CompletionResultType]::ParameterName, 'Only count the files and directories on the same filesystem as the supplied directory')
|
||||
[CompletionResult]::new('--limit-filesystem', 'limit-filesystem', [CompletionResultType]::ParameterName, 'Only count the files and directories on the same filesystem as the supplied directory')
|
||||
@@ -66,13 +51,11 @@ Register-ArgumentCompleter -Native -CommandName 'dust' -ScriptBlock {
|
||||
[CompletionResult]::new('--reverse', 'reverse', [CompletionResultType]::ParameterName, 'Print tree upside down (biggest highest)')
|
||||
[CompletionResult]::new('-c', 'c', [CompletionResultType]::ParameterName, 'No colors will be printed (Useful for commands like: watch)')
|
||||
[CompletionResult]::new('--no-colors', 'no-colors', [CompletionResultType]::ParameterName, 'No colors will be printed (Useful for commands like: watch)')
|
||||
[CompletionResult]::new('-C', 'C ', [CompletionResultType]::ParameterName, 'Force colors print')
|
||||
[CompletionResult]::new('--force-colors', 'force-colors', [CompletionResultType]::ParameterName, 'Force colors print')
|
||||
[CompletionResult]::new('-b', 'b', [CompletionResultType]::ParameterName, 'No percent bars or percentages will be displayed')
|
||||
[CompletionResult]::new('--no-percent-bars', 'no-percent-bars', [CompletionResultType]::ParameterName, 'No percent bars or percentages will be displayed')
|
||||
[CompletionResult]::new('-B', 'B ', [CompletionResultType]::ParameterName, 'percent bars moved to right side of screen')
|
||||
[CompletionResult]::new('-B', 'B', [CompletionResultType]::ParameterName, 'percent bars moved to right side of screen')
|
||||
[CompletionResult]::new('--bars-on-right', 'bars-on-right', [CompletionResultType]::ParameterName, 'percent bars moved to right side of screen')
|
||||
[CompletionResult]::new('-R', 'R ', [CompletionResultType]::ParameterName, 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)')
|
||||
[CompletionResult]::new('-R', 'R', [CompletionResultType]::ParameterName, 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)')
|
||||
[CompletionResult]::new('--screen-reader', 'screen-reader', [CompletionResultType]::ParameterName, 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)')
|
||||
[CompletionResult]::new('--skip-total', 'skip-total', [CompletionResultType]::ParameterName, 'No total row will be displayed')
|
||||
[CompletionResult]::new('-f', 'f', [CompletionResultType]::ParameterName, 'Directory ''size'' is number of child files instead of disk size')
|
||||
@@ -81,19 +64,14 @@ Register-ArgumentCompleter -Native -CommandName 'dust' -ScriptBlock {
|
||||
[CompletionResult]::new('--ignore_hidden', 'ignore_hidden', [CompletionResultType]::ParameterName, 'Do not display hidden files')
|
||||
[CompletionResult]::new('-t', 't', [CompletionResultType]::ParameterName, 'show only these file types')
|
||||
[CompletionResult]::new('--file_types', 'file_types', [CompletionResultType]::ParameterName, 'show only these file types')
|
||||
[CompletionResult]::new('-P', 'P ', [CompletionResultType]::ParameterName, 'Disable the progress indication.')
|
||||
[CompletionResult]::new('-H', 'H', [CompletionResultType]::ParameterName, 'print sizes in powers of 1000 (e.g., 1.1G)')
|
||||
[CompletionResult]::new('--si', 'si', [CompletionResultType]::ParameterName, 'print sizes in powers of 1000 (e.g., 1.1G)')
|
||||
[CompletionResult]::new('-P', 'P', [CompletionResultType]::ParameterName, 'Disable the progress indication.')
|
||||
[CompletionResult]::new('--no-progress', 'no-progress', [CompletionResultType]::ParameterName, 'Disable the progress indication.')
|
||||
[CompletionResult]::new('--print-errors', 'print-errors', [CompletionResultType]::ParameterName, 'Print path with errors.')
|
||||
[CompletionResult]::new('-D', 'D ', [CompletionResultType]::ParameterName, 'Only directories will be displayed.')
|
||||
[CompletionResult]::new('-D', 'D', [CompletionResultType]::ParameterName, 'Only directories will be displayed.')
|
||||
[CompletionResult]::new('--only-dir', 'only-dir', [CompletionResultType]::ParameterName, 'Only directories will be displayed.')
|
||||
[CompletionResult]::new('-F', 'F ', [CompletionResultType]::ParameterName, 'Only files will be displayed. (Finds your largest files)')
|
||||
[CompletionResult]::new('-F', 'F', [CompletionResultType]::ParameterName, 'Only files will be displayed. (Finds your largest files)')
|
||||
[CompletionResult]::new('--only-file', 'only-file', [CompletionResultType]::ParameterName, 'Only files will be displayed. (Finds your largest files)')
|
||||
[CompletionResult]::new('-j', 'j', [CompletionResultType]::ParameterName, 'Output the directory tree as json to the current directory')
|
||||
[CompletionResult]::new('--output-json', 'output-json', [CompletionResultType]::ParameterName, 'Output the directory tree as json to the current directory')
|
||||
[CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Print help')
|
||||
[CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Print help')
|
||||
[CompletionResult]::new('-V', 'V ', [CompletionResultType]::ParameterName, 'Print version')
|
||||
[CompletionResult]::new('--version', 'version', [CompletionResultType]::ParameterName, 'Print version')
|
||||
break
|
||||
}
|
||||
})
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
_dust() {
|
||||
local i cur prev opts cmd
|
||||
local i cur prev opts cmds
|
||||
COMPREPLY=()
|
||||
cur="${COMP_WORDS[COMP_CWORD]}"
|
||||
prev="${COMP_WORDS[COMP_CWORD-1]}"
|
||||
@@ -8,8 +8,8 @@ _dust() {
|
||||
|
||||
for i in ${COMP_WORDS[@]}
|
||||
do
|
||||
case "${cmd},${i}" in
|
||||
",$1")
|
||||
case "${i}" in
|
||||
"$1")
|
||||
cmd="dust"
|
||||
;;
|
||||
*)
|
||||
@@ -19,7 +19,7 @@ _dust() {
|
||||
|
||||
case "${cmd}" in
|
||||
dust)
|
||||
opts="-d -T -n -p -X -I -L -x -s -r -c -C -b -B -z -R -f -i -v -e -t -w -P -D -F -o -S -j -M -A -y -m -h -V --depth --threads --config --number-of-lines --full-paths --ignore-directory --ignore-all-in-file --dereference-links --limit-filesystem --apparent-size --reverse --no-colors --force-colors --no-percent-bars --bars-on-right --min-size --screen-reader --skip-total --filecount --ignore_hidden --invert-filter --filter --file_types --terminal_width --no-progress --print-errors --only-dir --only-file --output-format --stack-size --output-json --mtime --atime --ctime --files0-from --collapse --filetime --help --version [PATH]..."
|
||||
opts="-h -V -d -n -p -X -L -x -s -r -c -b -B -z -R -f -i -v -e -t -w -H -P -D -F --help --version --depth --number-of-lines --full-paths --ignore-directory --dereference-links --limit-filesystem --apparent-size --reverse --no-colors --no-percent-bars --bars-on-right --min-size --screen-reader --skip-total --filecount --ignore_hidden --invert-filter --filter --file_types --terminal_width --si --no-progress --only-dir --only-file <inputs>..."
|
||||
if [[ ${cur} == -* || ${COMP_CWORD} -eq 1 ]] ; then
|
||||
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
|
||||
return 0
|
||||
@@ -33,29 +33,6 @@ _dust() {
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--threads)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
-T)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--config)
|
||||
local oldifs
|
||||
if [ -n "${IFS+x}" ]; then
|
||||
oldifs="$IFS"
|
||||
fi
|
||||
IFS=$'\n'
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
if [ -n "${oldifs+x}" ]; then
|
||||
IFS="$oldifs"
|
||||
fi
|
||||
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
|
||||
compopt -o filenames
|
||||
fi
|
||||
return 0
|
||||
;;
|
||||
--number-of-lines)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
@@ -72,36 +49,6 @@ _dust() {
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--ignore-all-in-file)
|
||||
local oldifs
|
||||
if [ -n "${IFS+x}" ]; then
|
||||
oldifs="$IFS"
|
||||
fi
|
||||
IFS=$'\n'
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
if [ -n "${oldifs+x}" ]; then
|
||||
IFS="$oldifs"
|
||||
fi
|
||||
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
|
||||
compopt -o filenames
|
||||
fi
|
||||
return 0
|
||||
;;
|
||||
-I)
|
||||
local oldifs
|
||||
if [ -n "${IFS+x}" ]; then
|
||||
oldifs="$IFS"
|
||||
fi
|
||||
IFS=$'\n'
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
if [ -n "${oldifs+x}" ]; then
|
||||
IFS="$oldifs"
|
||||
fi
|
||||
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
|
||||
compopt -o filenames
|
||||
fi
|
||||
return 0
|
||||
;;
|
||||
--min-size)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
@@ -134,62 +81,6 @@ _dust() {
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--output-format)
|
||||
COMPREPLY=($(compgen -W "si b k m g t kb mb gb tb" -- "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
-o)
|
||||
COMPREPLY=($(compgen -W "si b k m g t kb mb gb tb" -- "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--stack-size)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
-S)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--mtime)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
-M)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--atime)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
-A)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--ctime)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
-y)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--files0-from)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--collapse)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--filetime)
|
||||
COMPREPLY=($(compgen -W "a c m" -- "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
-m)
|
||||
COMPREPLY=($(compgen -W "a c m" -- "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
COMPREPLY=()
|
||||
;;
|
||||
@@ -200,8 +91,4 @@ _dust() {
|
||||
esac
|
||||
}
|
||||
|
||||
if [[ "${BASH_VERSINFO[0]}" -eq 4 && "${BASH_VERSINFO[1]}" -ge 4 || "${BASH_VERSINFO[0]}" -gt 4 ]]; then
|
||||
complete -F _dust -o nosort -o bashdefault -o default dust
|
||||
else
|
||||
complete -F _dust -o bashdefault -o default dust
|
||||
fi
|
||||
complete -F _dust -o bashdefault -o default dust
|
||||
|
||||
@@ -20,15 +20,10 @@ set edit:completion:arg-completer[dust] = {|@words|
|
||||
&'dust'= {
|
||||
cand -d 'Depth to show'
|
||||
cand --depth 'Depth to show'
|
||||
cand -T 'Number of threads to use'
|
||||
cand --threads 'Number of threads to use'
|
||||
cand --config 'Specify a config file to use'
|
||||
cand -n 'Number of lines of output to show. (Default is terminal_height - 10)'
|
||||
cand --number-of-lines 'Number of lines of output to show. (Default is terminal_height - 10)'
|
||||
cand -X 'Exclude any file or directory with this path'
|
||||
cand --ignore-directory 'Exclude any file or directory with this path'
|
||||
cand -I 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter'
|
||||
cand --ignore-all-in-file 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter'
|
||||
cand -X 'Exclude any file or directory with this name'
|
||||
cand --ignore-directory 'Exclude any file or directory with this name'
|
||||
cand -z 'Minimum size file to include in output'
|
||||
cand --min-size 'Minimum size file to include in output'
|
||||
cand -v 'Exclude filepaths matching this regex. To ignore png files type: -v "\.png$" '
|
||||
@@ -37,20 +32,10 @@ set edit:completion:arg-completer[dust] = {|@words|
|
||||
cand --filter 'Only include filepaths matching this regex. For png files type: -e "\.png$" '
|
||||
cand -w 'Specify width of output overriding the auto detection of terminal width'
|
||||
cand --terminal_width 'Specify width of output overriding the auto detection of terminal width'
|
||||
cand -o 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.'
|
||||
cand --output-format 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.'
|
||||
cand -S 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)'
|
||||
cand --stack-size 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)'
|
||||
cand -M '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr−(n+1)), n => [curr−(n+1), curr−n), and -n => (𝑐𝑢𝑟𝑟−𝑛, +∞)'
|
||||
cand --mtime '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr−(n+1)), n => [curr−(n+1), curr−n), and -n => (𝑐𝑢𝑟𝑟−𝑛, +∞)'
|
||||
cand -A 'just like -mtime, but based on file access time'
|
||||
cand --atime 'just like -mtime, but based on file access time'
|
||||
cand -y 'just like -mtime, but based on file change time'
|
||||
cand --ctime 'just like -mtime, but based on file change time'
|
||||
cand --files0-from 'run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input'
|
||||
cand --collapse 'Keep these directories collapsed'
|
||||
cand -m 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time'
|
||||
cand --filetime 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time'
|
||||
cand -h 'Print help information'
|
||||
cand --help 'Print help information'
|
||||
cand -V 'Print version information'
|
||||
cand --version 'Print version information'
|
||||
cand -p 'Subdirectories will not have their path shortened'
|
||||
cand --full-paths 'Subdirectories will not have their path shortened'
|
||||
cand -L 'dereference sym links - Treat sym links as directories and go into them'
|
||||
@@ -63,8 +48,6 @@ set edit:completion:arg-completer[dust] = {|@words|
|
||||
cand --reverse 'Print tree upside down (biggest highest)'
|
||||
cand -c 'No colors will be printed (Useful for commands like: watch)'
|
||||
cand --no-colors 'No colors will be printed (Useful for commands like: watch)'
|
||||
cand -C 'Force colors print'
|
||||
cand --force-colors 'Force colors print'
|
||||
cand -b 'No percent bars or percentages will be displayed'
|
||||
cand --no-percent-bars 'No percent bars or percentages will be displayed'
|
||||
cand -B 'percent bars moved to right side of screen'
|
||||
@@ -78,19 +61,14 @@ set edit:completion:arg-completer[dust] = {|@words|
|
||||
cand --ignore_hidden 'Do not display hidden files'
|
||||
cand -t 'show only these file types'
|
||||
cand --file_types 'show only these file types'
|
||||
cand -H 'print sizes in powers of 1000 (e.g., 1.1G)'
|
||||
cand --si 'print sizes in powers of 1000 (e.g., 1.1G)'
|
||||
cand -P 'Disable the progress indication.'
|
||||
cand --no-progress 'Disable the progress indication.'
|
||||
cand --print-errors 'Print path with errors.'
|
||||
cand -D 'Only directories will be displayed.'
|
||||
cand --only-dir 'Only directories will be displayed.'
|
||||
cand -F 'Only files will be displayed. (Finds your largest files)'
|
||||
cand --only-file 'Only files will be displayed. (Finds your largest files)'
|
||||
cand -j 'Output the directory tree as json to the current directory'
|
||||
cand --output-json 'Output the directory tree as json to the current directory'
|
||||
cand -h 'Print help'
|
||||
cand --help 'Print help'
|
||||
cand -V 'Print version'
|
||||
cand --version 'Print version'
|
||||
}
|
||||
]
|
||||
$completions[$command]
|
||||
|
||||
@@ -1,28 +1,18 @@
|
||||
complete -c dust -s d -l depth -d 'Depth to show' -r
|
||||
complete -c dust -s T -l threads -d 'Number of threads to use' -r
|
||||
complete -c dust -l config -d 'Specify a config file to use' -r -F
|
||||
complete -c dust -s n -l number-of-lines -d 'Number of lines of output to show. (Default is terminal_height - 10)' -r
|
||||
complete -c dust -s X -l ignore-directory -d 'Exclude any file or directory with this path' -r -F
|
||||
complete -c dust -s I -l ignore-all-in-file -d 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter' -r -F
|
||||
complete -c dust -s X -l ignore-directory -d 'Exclude any file or directory with this name' -r
|
||||
complete -c dust -s z -l min-size -d 'Minimum size file to include in output' -r
|
||||
complete -c dust -s v -l invert-filter -d 'Exclude filepaths matching this regex. To ignore png files type: -v "\\.png$" ' -r
|
||||
complete -c dust -s e -l filter -d 'Only include filepaths matching this regex. For png files type: -e "\\.png$" ' -r
|
||||
complete -c dust -s w -l terminal_width -d 'Specify width of output overriding the auto detection of terminal width' -r
|
||||
complete -c dust -s o -l output-format -d 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.' -r -f -a "{si\t'',b\t'',k\t'',m\t'',g\t'',t\t'',kb\t'',mb\t'',gb\t'',tb\t''}"
|
||||
complete -c dust -s S -l stack-size -d 'Specify memory to use as stack size - use if you see: \'fatal runtime error: stack overflow\' (default low memory=1048576, high memory=1073741824)' -r
|
||||
complete -c dust -s M -l mtime -d '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr−(n+1)), n => [curr−(n+1), curr−n), and -n => (𝑐𝑢𝑟𝑟−𝑛, +∞)' -r
|
||||
complete -c dust -s A -l atime -d 'just like -mtime, but based on file access time' -r
|
||||
complete -c dust -s y -l ctime -d 'just like -mtime, but based on file change time' -r
|
||||
complete -c dust -l files0-from -d 'run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input' -r -F
|
||||
complete -c dust -l collapse -d 'Keep these directories collapsed' -r -F
|
||||
complete -c dust -s m -l filetime -d 'Directory \'size\' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time' -r -f -a "{a\t'',c\t'',m\t''}"
|
||||
complete -c dust -s h -l help -d 'Print help information'
|
||||
complete -c dust -s V -l version -d 'Print version information'
|
||||
complete -c dust -s p -l full-paths -d 'Subdirectories will not have their path shortened'
|
||||
complete -c dust -s L -l dereference-links -d 'dereference sym links - Treat sym links as directories and go into them'
|
||||
complete -c dust -s x -l limit-filesystem -d 'Only count the files and directories on the same filesystem as the supplied directory'
|
||||
complete -c dust -s s -l apparent-size -d 'Use file length instead of blocks'
|
||||
complete -c dust -s r -l reverse -d 'Print tree upside down (biggest highest)'
|
||||
complete -c dust -s c -l no-colors -d 'No colors will be printed (Useful for commands like: watch)'
|
||||
complete -c dust -s C -l force-colors -d 'Force colors print'
|
||||
complete -c dust -s b -l no-percent-bars -d 'No percent bars or percentages will be displayed'
|
||||
complete -c dust -s B -l bars-on-right -d 'percent bars moved to right side of screen'
|
||||
complete -c dust -s R -l screen-reader -d 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)'
|
||||
@@ -30,10 +20,7 @@ complete -c dust -l skip-total -d 'No total row will be displayed'
|
||||
complete -c dust -s f -l filecount -d 'Directory \'size\' is number of child files instead of disk size'
|
||||
complete -c dust -s i -l ignore_hidden -d 'Do not display hidden files'
|
||||
complete -c dust -s t -l file_types -d 'show only these file types'
|
||||
complete -c dust -s H -l si -d 'print sizes in powers of 1000 (e.g., 1.1G)'
|
||||
complete -c dust -s P -l no-progress -d 'Disable the progress indication.'
|
||||
complete -c dust -l print-errors -d 'Print path with errors.'
|
||||
complete -c dust -s D -l only-dir -d 'Only directories will be displayed.'
|
||||
complete -c dust -s F -l only-file -d 'Only files will be displayed. (Finds your largest files)'
|
||||
complete -c dust -s j -l output-json -d 'Output the directory tree as json to the current directory'
|
||||
complete -c dust -s h -l help -d 'Print help'
|
||||
complete -c dust -s V -l version -d 'Print version'
|
||||
|
||||
@@ -25,4 +25,4 @@ skip-total=true
|
||||
ignore-hidden=true
|
||||
|
||||
# print sizes in powers of 1000 (e.g., 1.1G)
|
||||
output-format="si"
|
||||
iso=true
|
||||
|
||||
@@ -1,34 +1,31 @@
|
||||
.ie \n(.g .ds Aq \(aq
|
||||
.el .ds Aq '
|
||||
.TH Dust 1 "Dust 1.1.1"
|
||||
.TH Dust 1 "Dust 0.8.6"
|
||||
.SH NAME
|
||||
Dust \- Like du but more intuitive
|
||||
.SH SYNOPSIS
|
||||
\fBdust\fR [\fB\-d\fR|\fB\-\-depth\fR] [\fB\-T\fR|\fB\-\-threads\fR] [\fB\-\-config\fR] [\fB\-n\fR|\fB\-\-number\-of\-lines\fR] [\fB\-p\fR|\fB\-\-full\-paths\fR] [\fB\-X\fR|\fB\-\-ignore\-directory\fR] [\fB\-I\fR|\fB\-\-ignore\-all\-in\-file\fR] [\fB\-L\fR|\fB\-\-dereference\-links\fR] [\fB\-x\fR|\fB\-\-limit\-filesystem\fR] [\fB\-s\fR|\fB\-\-apparent\-size\fR] [\fB\-r\fR|\fB\-\-reverse\fR] [\fB\-c\fR|\fB\-\-no\-colors\fR] [\fB\-C\fR|\fB\-\-force\-colors\fR] [\fB\-b\fR|\fB\-\-no\-percent\-bars\fR] [\fB\-B\fR|\fB\-\-bars\-on\-right\fR] [\fB\-z\fR|\fB\-\-min\-size\fR] [\fB\-R\fR|\fB\-\-screen\-reader\fR] [\fB\-\-skip\-total\fR] [\fB\-f\fR|\fB\-\-filecount\fR] [\fB\-i\fR|\fB\-\-ignore_hidden\fR] [\fB\-v\fR|\fB\-\-invert\-filter\fR] [\fB\-e\fR|\fB\-\-filter\fR] [\fB\-t\fR|\fB\-\-file_types\fR] [\fB\-w\fR|\fB\-\-terminal_width\fR] [\fB\-P\fR|\fB\-\-no\-progress\fR] [\fB\-\-print\-errors\fR] [\fB\-D\fR|\fB\-\-only\-dir\fR] [\fB\-F\fR|\fB\-\-only\-file\fR] [\fB\-o\fR|\fB\-\-output\-format\fR] [\fB\-S\fR|\fB\-\-stack\-size\fR] [\fB\-j\fR|\fB\-\-output\-json\fR] [\fB\-M\fR|\fB\-\-mtime\fR] [\fB\-A\fR|\fB\-\-atime\fR] [\fB\-y\fR|\fB\-\-ctime\fR] [\fB\-\-files0\-from\fR] [\fB\-\-collapse\fR] [\fB\-m\fR|\fB\-\-filetime\fR] [\fB\-h\fR|\fB\-\-help\fR] [\fB\-V\fR|\fB\-\-version\fR] [\fIPATH\fR]
|
||||
\fBDust\fR [\fB\-h\fR|\fB\-\-help\fR] [\fB\-V\fR|\fB\-\-version\fR] [\fB\-d\fR|\fB\-\-depth\fR] [\fB\-n\fR|\fB\-\-number\-of\-lines\fR] [\fB\-p\fR|\fB\-\-full\-paths\fR] [\fB\-X\fR|\fB\-\-ignore\-directory\fR] [\fB\-L\fR|\fB\-\-dereference\-links\fR] [\fB\-x\fR|\fB\-\-limit\-filesystem\fR] [\fB\-s\fR|\fB\-\-apparent\-size\fR] [\fB\-r\fR|\fB\-\-reverse\fR] [\fB\-c\fR|\fB\-\-no\-colors\fR] [\fB\-b\fR|\fB\-\-no\-percent\-bars\fR] [\fB\-B\fR|\fB\-\-bars\-on\-right\fR] [\fB\-z\fR|\fB\-\-min\-size\fR] [\fB\-R\fR|\fB\-\-screen\-reader\fR] [\fB\-\-skip\-total\fR] [\fB\-f\fR|\fB\-\-filecount\fR] [\fB\-i\fR|\fB\-\-ignore_hidden\fR] [\fB\-v\fR|\fB\-\-invert\-filter\fR] [\fB\-e\fR|\fB\-\-filter\fR] [\fB\-t\fR|\fB\-\-file_types\fR] [\fB\-w\fR|\fB\-\-terminal_width\fR] [\fB\-H\fR|\fB\-\-si\fR] [\fB\-P\fR|\fB\-\-no\-progress\fR] [\fB\-D\fR|\fB\-\-only\-dir\fR] [\fB\-F\fR|\fB\-\-only\-file\fR] [\fIinputs\fR]
|
||||
.SH DESCRIPTION
|
||||
Like du but more intuitive
|
||||
.SH OPTIONS
|
||||
.TP
|
||||
\fB\-d\fR, \fB\-\-depth\fR=\fIDEPTH\fR
|
||||
\fB\-h\fR, \fB\-\-help\fR
|
||||
Print help information
|
||||
.TP
|
||||
\fB\-V\fR, \fB\-\-version\fR
|
||||
Print version information
|
||||
.TP
|
||||
\fB\-d\fR, \fB\-\-depth\fR
|
||||
Depth to show
|
||||
.TP
|
||||
\fB\-T\fR, \fB\-\-threads\fR
|
||||
Number of threads to use
|
||||
.TP
|
||||
\fB\-\-config\fR=\fIFILE\fR
|
||||
Specify a config file to use
|
||||
.TP
|
||||
\fB\-n\fR, \fB\-\-number\-of\-lines\fR=\fINUMBER\fR
|
||||
\fB\-n\fR, \fB\-\-number\-of\-lines\fR
|
||||
Number of lines of output to show. (Default is terminal_height \- 10)
|
||||
.TP
|
||||
\fB\-p\fR, \fB\-\-full\-paths\fR
|
||||
Subdirectories will not have their path shortened
|
||||
.TP
|
||||
\fB\-X\fR, \fB\-\-ignore\-directory\fR=\fIPATH\fR
|
||||
Exclude any file or directory with this path
|
||||
.TP
|
||||
\fB\-I\fR, \fB\-\-ignore\-all\-in\-file\fR=\fIFILE\fR
|
||||
Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by \-\-invert_filter
|
||||
\fB\-X\fR, \fB\-\-ignore\-directory\fR
|
||||
Exclude any file or directory with this name
|
||||
.TP
|
||||
\fB\-L\fR, \fB\-\-dereference\-links\fR
|
||||
dereference sym links \- Treat sym links as directories and go into them
|
||||
@@ -45,16 +42,13 @@ Print tree upside down (biggest highest)
|
||||
\fB\-c\fR, \fB\-\-no\-colors\fR
|
||||
No colors will be printed (Useful for commands like: watch)
|
||||
.TP
|
||||
\fB\-C\fR, \fB\-\-force\-colors\fR
|
||||
Force colors print
|
||||
.TP
|
||||
\fB\-b\fR, \fB\-\-no\-percent\-bars\fR
|
||||
No percent bars or percentages will be displayed
|
||||
.TP
|
||||
\fB\-B\fR, \fB\-\-bars\-on\-right\fR
|
||||
percent bars moved to right side of screen
|
||||
.TP
|
||||
\fB\-z\fR, \fB\-\-min\-size\fR=\fIMIN_SIZE\fR
|
||||
\fB\-z\fR, \fB\-\-min\-size\fR
|
||||
Minimum size file to include in output
|
||||
.TP
|
||||
\fB\-R\fR, \fB\-\-screen\-reader\fR
|
||||
@@ -69,72 +63,31 @@ Directory \*(Aqsize\*(Aq is number of child files instead of disk size
|
||||
\fB\-i\fR, \fB\-\-ignore_hidden\fR
|
||||
Do not display hidden files
|
||||
.TP
|
||||
\fB\-v\fR, \fB\-\-invert\-filter\fR=\fIREGEX\fR
|
||||
\fB\-v\fR, \fB\-\-invert\-filter\fR
|
||||
Exclude filepaths matching this regex. To ignore png files type: \-v "\\.png$"
|
||||
.TP
|
||||
\fB\-e\fR, \fB\-\-filter\fR=\fIREGEX\fR
|
||||
\fB\-e\fR, \fB\-\-filter\fR
|
||||
Only include filepaths matching this regex. For png files type: \-e "\\.png$"
|
||||
.TP
|
||||
\fB\-t\fR, \fB\-\-file_types\fR
|
||||
show only these file types
|
||||
.TP
|
||||
\fB\-w\fR, \fB\-\-terminal_width\fR=\fIWIDTH\fR
|
||||
\fB\-w\fR, \fB\-\-terminal_width\fR
|
||||
Specify width of output overriding the auto detection of terminal width
|
||||
.TP
|
||||
\fB\-H\fR, \fB\-\-si\fR
|
||||
print sizes in powers of 1000 (e.g., 1.1G)
|
||||
.TP
|
||||
\fB\-P\fR, \fB\-\-no\-progress\fR
|
||||
Disable the progress indication.
|
||||
.TP
|
||||
\fB\-\-print\-errors\fR
|
||||
Print path with errors.
|
||||
.TP
|
||||
\fB\-D\fR, \fB\-\-only\-dir\fR
|
||||
Only directories will be displayed.
|
||||
.TP
|
||||
\fB\-F\fR, \fB\-\-only\-file\fR
|
||||
Only files will be displayed. (Finds your largest files)
|
||||
.TP
|
||||
\fB\-o\fR, \fB\-\-output\-format\fR=\fIFORMAT\fR
|
||||
Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.
|
||||
.br
|
||||
|
||||
.br
|
||||
[\fIpossible values: \fRsi, b, k, m, g, t, kb, mb, gb, tb]
|
||||
.TP
|
||||
\fB\-S\fR, \fB\-\-stack\-size\fR=\fISTACK_SIZE\fR
|
||||
Specify memory to use as stack size \- use if you see: \*(Aqfatal runtime error: stack overflow\*(Aq (default low memory=1048576, high memory=1073741824)
|
||||
.TP
|
||||
\fB\-j\fR, \fB\-\-output\-json\fR
|
||||
Output the directory tree as json to the current directory
|
||||
.TP
|
||||
\fB\-M\fR, \fB\-\-mtime\fR
|
||||
+/\-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr−(n+1)), n => [curr−(n+1), curr−n), and \-n => (𝑐𝑢𝑟𝑟−𝑛, +∞)
|
||||
.TP
|
||||
\fB\-A\fR, \fB\-\-atime\fR
|
||||
just like \-mtime, but based on file access time
|
||||
.TP
|
||||
\fB\-y\fR, \fB\-\-ctime\fR
|
||||
just like \-mtime, but based on file change time
|
||||
.TP
|
||||
\fB\-\-files0\-from\fR
|
||||
run dust on NUL\-terminated file names specified in file; if argument is \-, then read names from standard input
|
||||
.TP
|
||||
\fB\-\-collapse\fR
|
||||
Keep these directories collapsed
|
||||
.TP
|
||||
\fB\-m\fR, \fB\-\-filetime\fR
|
||||
Directory \*(Aqsize\*(Aq is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time
|
||||
.br
|
||||
|
||||
.br
|
||||
[\fIpossible values: \fRa, c, m]
|
||||
.TP
|
||||
\fB\-h\fR, \fB\-\-help\fR
|
||||
Print help
|
||||
.TP
|
||||
\fB\-V\fR, \fB\-\-version\fR
|
||||
Print version
|
||||
.TP
|
||||
[\fIPATH\fR]
|
||||
[\fIinputs\fR]
|
||||
|
||||
.SH VERSION
|
||||
v1.1.1
|
||||
v0.8.6
|
||||
|
||||
200
src/cli.rs
200
src/cli.rs
@@ -1,169 +1,122 @@
|
||||
use clap::{builder::PossibleValue, value_parser, Arg, Command};
|
||||
use clap::{Arg, Command};
|
||||
|
||||
// For single thread mode set this variable on your command line:
|
||||
// export RAYON_NUM_THREADS=1
|
||||
|
||||
pub fn build_cli() -> Command {
|
||||
pub fn build_cli() -> Command<'static> {
|
||||
Command::new("Dust")
|
||||
.about("Like du but more intuitive")
|
||||
.version(env!("CARGO_PKG_VERSION"))
|
||||
.trailing_var_arg(true)
|
||||
.arg(
|
||||
Arg::new("depth")
|
||||
.short('d')
|
||||
.long("depth")
|
||||
.value_name("DEPTH")
|
||||
.value_parser(value_parser!(usize))
|
||||
.help("Depth to show")
|
||||
.num_args(1)
|
||||
)
|
||||
.arg(
|
||||
Arg::new("threads")
|
||||
.short('T')
|
||||
.long("threads")
|
||||
.value_parser(value_parser!(usize))
|
||||
.help("Number of threads to use")
|
||||
.num_args(1)
|
||||
)
|
||||
.arg(
|
||||
Arg::new("config")
|
||||
.long("config")
|
||||
.help("Specify a config file to use")
|
||||
.value_name("FILE")
|
||||
.value_hint(clap::ValueHint::FilePath)
|
||||
.value_parser(value_parser!(String))
|
||||
.num_args(1)
|
||||
.takes_value(true)
|
||||
)
|
||||
.arg(
|
||||
Arg::new("number_of_lines")
|
||||
.short('n')
|
||||
.long("number-of-lines")
|
||||
.value_name("NUMBER")
|
||||
.value_parser(value_parser!(usize))
|
||||
.help("Number of lines of output to show. (Default is terminal_height - 10)")
|
||||
.num_args(1)
|
||||
.takes_value(true)
|
||||
)
|
||||
.arg(
|
||||
Arg::new("display_full_paths")
|
||||
.short('p')
|
||||
.long("full-paths")
|
||||
.action(clap::ArgAction::SetTrue)
|
||||
.help("Subdirectories will not have their path shortened"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("ignore_directory")
|
||||
.short('X')
|
||||
.long("ignore-directory")
|
||||
.value_name("PATH")
|
||||
.value_hint(clap::ValueHint::AnyPath)
|
||||
.action(clap::ArgAction::Append)
|
||||
.help("Exclude any file or directory with this path"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("ignore_all_in_file")
|
||||
.short('I')
|
||||
.long("ignore-all-in-file")
|
||||
.value_name("FILE")
|
||||
.value_hint(clap::ValueHint::FilePath)
|
||||
.value_parser(value_parser!(String))
|
||||
.help("Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter"),
|
||||
.takes_value(true)
|
||||
.number_of_values(1)
|
||||
.multiple_occurrences(true)
|
||||
.help("Exclude any file or directory with this name"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("dereference_links")
|
||||
.short('L')
|
||||
.long("dereference-links")
|
||||
.action(clap::ArgAction::SetTrue)
|
||||
.help("dereference sym links - Treat sym links as directories and go into them"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("limit_filesystem")
|
||||
.short('x')
|
||||
.long("limit-filesystem")
|
||||
.action(clap::ArgAction::SetTrue)
|
||||
.help("Only count the files and directories on the same filesystem as the supplied directory"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("display_apparent_size")
|
||||
.short('s')
|
||||
.long("apparent-size")
|
||||
.action(clap::ArgAction::SetTrue)
|
||||
.help("Use file length instead of blocks"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("reverse")
|
||||
.short('r')
|
||||
.long("reverse")
|
||||
.action(clap::ArgAction::SetTrue)
|
||||
.help("Print tree upside down (biggest highest)"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("no_colors")
|
||||
.short('c')
|
||||
.long("no-colors")
|
||||
.action(clap::ArgAction::SetTrue)
|
||||
.help("No colors will be printed (Useful for commands like: watch)"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("force_colors")
|
||||
.short('C')
|
||||
.long("force-colors")
|
||||
.action(clap::ArgAction::SetTrue)
|
||||
.help("Force colors print"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("no_bars")
|
||||
.short('b')
|
||||
.long("no-percent-bars")
|
||||
.action(clap::ArgAction::SetTrue)
|
||||
.help("No percent bars or percentages will be displayed"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("bars_on_right")
|
||||
.short('B')
|
||||
.long("bars-on-right")
|
||||
.action(clap::ArgAction::SetTrue)
|
||||
.help("percent bars moved to right side of screen"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("min_size")
|
||||
.short('z')
|
||||
.long("min-size")
|
||||
.value_name("MIN_SIZE")
|
||||
.num_args(1)
|
||||
.takes_value(true)
|
||||
.number_of_values(1)
|
||||
.help("Minimum size file to include in output"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("screen_reader")
|
||||
.short('R')
|
||||
.long("screen-reader")
|
||||
.action(clap::ArgAction::SetTrue)
|
||||
.help("For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("skip_total")
|
||||
.long("skip-total")
|
||||
.action(clap::ArgAction::SetTrue)
|
||||
.help("No total row will be displayed"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("by_filecount")
|
||||
.short('f')
|
||||
.long("filecount")
|
||||
.action(clap::ArgAction::SetTrue)
|
||||
.help("Directory 'size' is number of child files instead of disk size"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("ignore_hidden")
|
||||
.short('i') // Do not use 'h' this is used by 'help'
|
||||
.long("ignore_hidden")
|
||||
.action(clap::ArgAction::SetTrue)
|
||||
.help("Do not display hidden files"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("invert_filter")
|
||||
.short('v')
|
||||
.long("invert-filter")
|
||||
.value_name("REGEX")
|
||||
.action(clap::ArgAction::Append)
|
||||
.takes_value(true)
|
||||
.number_of_values(1)
|
||||
.multiple_occurrences(true)
|
||||
.conflicts_with("filter")
|
||||
.conflicts_with("types")
|
||||
.help("Exclude filepaths matching this regex. To ignore png files type: -v \"\\.png$\" "),
|
||||
@@ -172,8 +125,9 @@ pub fn build_cli() -> Command {
|
||||
Arg::new("filter")
|
||||
.short('e')
|
||||
.long("filter")
|
||||
.value_name("REGEX")
|
||||
.action(clap::ArgAction::Append)
|
||||
.takes_value(true)
|
||||
.number_of_values(1)
|
||||
.multiple_occurrences(true)
|
||||
.conflicts_with("types")
|
||||
.help("Only include filepaths matching this regex. For png files type: -e \"\\.png$\" "),
|
||||
)
|
||||
@@ -183,38 +137,34 @@ pub fn build_cli() -> Command {
|
||||
.long("file_types")
|
||||
.conflicts_with("depth")
|
||||
.conflicts_with("only_dir")
|
||||
.action(clap::ArgAction::SetTrue)
|
||||
.help("show only these file types"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("width")
|
||||
.short('w')
|
||||
.long("terminal_width")
|
||||
.value_name("WIDTH")
|
||||
.value_parser(value_parser!(usize))
|
||||
.num_args(1)
|
||||
.takes_value(true)
|
||||
.number_of_values(1)
|
||||
.help("Specify width of output overriding the auto detection of terminal width"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("iso")
|
||||
.short('H')
|
||||
.long("si")
|
||||
.help("print sizes in powers of 1000 (e.g., 1.1G)")
|
||||
)
|
||||
.arg(
|
||||
Arg::new("disable_progress")
|
||||
.short('P')
|
||||
.long("no-progress")
|
||||
.action(clap::ArgAction::SetTrue)
|
||||
.help("Disable the progress indication."),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("print_errors")
|
||||
.long("print-errors")
|
||||
.action(clap::ArgAction::SetTrue)
|
||||
.help("Print path with errors."),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("only_dir")
|
||||
.short('D')
|
||||
.long("only-dir")
|
||||
.conflicts_with("only_file")
|
||||
.conflicts_with("types")
|
||||
.action(clap::ArgAction::SetTrue)
|
||||
.help("Only directories will be displayed."),
|
||||
)
|
||||
.arg(
|
||||
@@ -222,105 +172,7 @@ pub fn build_cli() -> Command {
|
||||
.short('F')
|
||||
.long("only-file")
|
||||
.conflicts_with("only_dir")
|
||||
.action(clap::ArgAction::SetTrue)
|
||||
.help("Only files will be displayed. (Finds your largest files)"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("output_format")
|
||||
.short('o')
|
||||
.long("output-format")
|
||||
.value_name("FORMAT")
|
||||
.value_parser([
|
||||
PossibleValue::new("si"),
|
||||
PossibleValue::new("b"),
|
||||
PossibleValue::new("k").alias("kib"),
|
||||
PossibleValue::new("m").alias("mib"),
|
||||
PossibleValue::new("g").alias("gib"),
|
||||
PossibleValue::new("t").alias("tib"),
|
||||
PossibleValue::new("kb"),
|
||||
PossibleValue::new("mb"),
|
||||
PossibleValue::new("gb"),
|
||||
PossibleValue::new("tb"),
|
||||
])
|
||||
.ignore_case(true)
|
||||
.help("Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.")
|
||||
)
|
||||
.arg(
|
||||
Arg::new("stack_size")
|
||||
.short('S')
|
||||
.long("stack-size")
|
||||
.value_name("STACK_SIZE")
|
||||
.value_parser(value_parser!(usize))
|
||||
.num_args(1)
|
||||
.help("Specify memory to use as stack size - use if you see: 'fatal runtime error: stack overflow' (default low memory=1048576, high memory=1073741824)"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("params")
|
||||
.value_name("PATH")
|
||||
.value_hint(clap::ValueHint::AnyPath)
|
||||
.value_parser(value_parser!(String))
|
||||
.num_args(1..)
|
||||
)
|
||||
.arg(
|
||||
Arg::new("output_json")
|
||||
.short('j')
|
||||
.long("output-json")
|
||||
.action(clap::ArgAction::SetTrue)
|
||||
.help("Output the directory tree as json to the current directory"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("mtime")
|
||||
.short('M')
|
||||
.long("mtime")
|
||||
.num_args(1)
|
||||
.allow_hyphen_values(true)
|
||||
.value_parser(value_parser!(String))
|
||||
.help("+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr−(n+1)), n => [curr−(n+1), curr−n), and -n => (𝑐𝑢𝑟𝑟−𝑛, +∞)")
|
||||
)
|
||||
.arg(
|
||||
Arg::new("atime")
|
||||
.short('A')
|
||||
.long("atime")
|
||||
.num_args(1)
|
||||
.allow_hyphen_values(true)
|
||||
.value_parser(value_parser!(String))
|
||||
.help("just like -mtime, but based on file access time")
|
||||
)
|
||||
.arg(
|
||||
Arg::new("ctime")
|
||||
.short('y')
|
||||
.long("ctime")
|
||||
.num_args(1)
|
||||
.allow_hyphen_values(true)
|
||||
.value_parser(value_parser!(String))
|
||||
.help("just like -mtime, but based on file change time")
|
||||
)
|
||||
.arg(
|
||||
Arg::new("files0_from")
|
||||
.long("files0-from")
|
||||
.value_hint(clap::ValueHint::AnyPath)
|
||||
.value_parser(value_parser!(String))
|
||||
.num_args(1)
|
||||
.help("run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("collapse")
|
||||
.long("collapse")
|
||||
.value_hint(clap::ValueHint::AnyPath)
|
||||
.value_parser(value_parser!(String))
|
||||
.action(clap::ArgAction::Append)
|
||||
.help("Keep these directories collapsed"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("filetime")
|
||||
.short('m')
|
||||
.long("filetime")
|
||||
.num_args(1)
|
||||
.value_parser([
|
||||
PossibleValue::new("a").alias("accessed"),
|
||||
PossibleValue::new("c").alias("changed"),
|
||||
PossibleValue::new("m").alias("modified"),
|
||||
])
|
||||
.help("Directory 'size' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time"),
|
||||
)
|
||||
.arg(Arg::new("inputs").multiple_occurrences(true))
|
||||
}
|
||||
|
||||
349
src/config.rs
349
src/config.rs
@@ -1,248 +1,134 @@
|
||||
use crate::node::FileTime;
|
||||
use chrono::{Local, TimeZone};
|
||||
use clap::ArgMatches;
|
||||
use config_file::FromConfigFile;
|
||||
use regex::Regex;
|
||||
use serde::Deserialize;
|
||||
use std::io::IsTerminal;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::dir_walker::Operator;
|
||||
use crate::display::get_number_format;
|
||||
|
||||
pub static DAY_SECONDS: i64 = 24 * 60 * 60;
|
||||
use crate::display::UNITS;
|
||||
|
||||
#[derive(Deserialize, Default)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct Config {
|
||||
pub display_full_paths: Option<bool>,
|
||||
pub display_apparent_size: Option<bool>,
|
||||
pub reverse: Option<bool>,
|
||||
pub no_colors: Option<bool>,
|
||||
pub force_colors: Option<bool>,
|
||||
pub no_bars: Option<bool>,
|
||||
pub skip_total: Option<bool>,
|
||||
pub screen_reader: Option<bool>,
|
||||
pub ignore_hidden: Option<bool>,
|
||||
pub output_format: Option<String>,
|
||||
pub iso: Option<bool>,
|
||||
pub min_size: Option<String>,
|
||||
pub only_dir: Option<bool>,
|
||||
pub only_file: Option<bool>,
|
||||
pub disable_progress: Option<bool>,
|
||||
pub depth: Option<usize>,
|
||||
pub bars_on_right: Option<bool>,
|
||||
pub stack_size: Option<usize>,
|
||||
pub threads: Option<usize>,
|
||||
pub output_json: Option<bool>,
|
||||
pub print_errors: Option<bool>,
|
||||
pub files0_from: Option<String>,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub fn get_files_from(&self, options: &ArgMatches) -> Option<String> {
|
||||
let from_file = options.get_one::<String>("files0_from");
|
||||
match from_file {
|
||||
None => self.files0_from.as_ref().map(|x| x.to_string()),
|
||||
Some(x) => Some(x.to_string()),
|
||||
}
|
||||
}
|
||||
pub fn get_no_colors(&self, options: &ArgMatches) -> bool {
|
||||
Some(true) == self.no_colors || options.get_flag("no_colors")
|
||||
}
|
||||
pub fn get_force_colors(&self, options: &ArgMatches) -> bool {
|
||||
Some(true) == self.force_colors || options.get_flag("force_colors")
|
||||
Some(true) == self.no_colors || options.is_present("no_colors")
|
||||
}
|
||||
pub fn get_disable_progress(&self, options: &ArgMatches) -> bool {
|
||||
Some(true) == self.disable_progress
|
||||
|| options.get_flag("disable_progress")
|
||||
|| options.is_present("disable_progress")
|
||||
|| !std::io::stdout().is_terminal()
|
||||
}
|
||||
pub fn get_apparent_size(&self, options: &ArgMatches) -> bool {
|
||||
Some(true) == self.display_apparent_size || options.get_flag("display_apparent_size")
|
||||
Some(true) == self.display_apparent_size || options.is_present("display_apparent_size")
|
||||
}
|
||||
pub fn get_ignore_hidden(&self, options: &ArgMatches) -> bool {
|
||||
Some(true) == self.ignore_hidden || options.get_flag("ignore_hidden")
|
||||
Some(true) == self.ignore_hidden || options.is_present("ignore_hidden")
|
||||
}
|
||||
pub fn get_full_paths(&self, options: &ArgMatches) -> bool {
|
||||
Some(true) == self.display_full_paths || options.get_flag("display_full_paths")
|
||||
// If we are only showing files, always show full paths
|
||||
Some(true) == self.display_full_paths
|
||||
|| options.is_present("display_full_paths")
|
||||
|| self.get_only_file(options)
|
||||
}
|
||||
pub fn get_reverse(&self, options: &ArgMatches) -> bool {
|
||||
Some(true) == self.reverse || options.get_flag("reverse")
|
||||
Some(true) == self.reverse || options.is_present("reverse")
|
||||
}
|
||||
pub fn get_no_bars(&self, options: &ArgMatches) -> bool {
|
||||
Some(true) == self.no_bars || options.get_flag("no_bars")
|
||||
Some(true) == self.no_bars || options.is_present("no_bars")
|
||||
}
|
||||
pub fn get_output_format(&self, options: &ArgMatches) -> String {
|
||||
let out_fmt = options.get_one::<String>("output_format");
|
||||
(match out_fmt {
|
||||
None => match &self.output_format {
|
||||
None => "".to_string(),
|
||||
Some(x) => x.to_string(),
|
||||
},
|
||||
Some(x) => x.into(),
|
||||
})
|
||||
.to_lowercase()
|
||||
pub fn get_iso(&self, options: &ArgMatches) -> bool {
|
||||
Some(true) == self.iso || options.is_present("iso")
|
||||
}
|
||||
|
||||
pub fn get_filetime(&self, options: &ArgMatches) -> Option<FileTime> {
|
||||
let out_fmt = options.get_one::<String>("filetime");
|
||||
match out_fmt {
|
||||
None => None,
|
||||
Some(x) => match x.as_str() {
|
||||
"m" | "modified" => Some(FileTime::Modified),
|
||||
"a" | "accessed" => Some(FileTime::Accessed),
|
||||
"c" | "changed" => Some(FileTime::Changed),
|
||||
_ => unreachable!(),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_skip_total(&self, options: &ArgMatches) -> bool {
|
||||
Some(true) == self.skip_total || options.get_flag("skip_total")
|
||||
Some(true) == self.skip_total || options.is_present("skip_total")
|
||||
}
|
||||
pub fn get_screen_reader(&self, options: &ArgMatches) -> bool {
|
||||
Some(true) == self.screen_reader || options.get_flag("screen_reader")
|
||||
Some(true) == self.screen_reader || options.is_present("screen_reader")
|
||||
}
|
||||
pub fn get_depth(&self, options: &ArgMatches) -> usize {
|
||||
if let Some(v) = options.get_one::<usize>("depth") {
|
||||
return *v;
|
||||
if let Some(v) = options.value_of("depth") {
|
||||
if let Ok(v) = v.parse::<usize>() {
|
||||
return v;
|
||||
}
|
||||
}
|
||||
|
||||
self.depth.unwrap_or(usize::MAX)
|
||||
}
|
||||
pub fn get_min_size(&self, options: &ArgMatches) -> Option<usize> {
|
||||
let size_from_param = options.get_one::<String>("min_size");
|
||||
self._get_min_size(size_from_param)
|
||||
pub fn get_min_size(&self, options: &ArgMatches, iso: bool) -> Option<usize> {
|
||||
let size_from_param = options.value_of("min_size");
|
||||
self._get_min_size(size_from_param, iso)
|
||||
}
|
||||
fn _get_min_size(&self, min_size: Option<&String>) -> Option<usize> {
|
||||
let size_from_param = min_size.and_then(|a| convert_min_size(a));
|
||||
fn _get_min_size(&self, min_size: Option<&str>, iso: bool) -> Option<usize> {
|
||||
let size_from_param = min_size.and_then(|a| convert_min_size(a, iso));
|
||||
|
||||
if size_from_param.is_none() {
|
||||
self.min_size
|
||||
.as_ref()
|
||||
.and_then(|a| convert_min_size(a.as_ref()))
|
||||
.and_then(|a| convert_min_size(a.as_ref(), iso))
|
||||
} else {
|
||||
size_from_param
|
||||
}
|
||||
}
|
||||
pub fn get_only_dir(&self, options: &ArgMatches) -> bool {
|
||||
Some(true) == self.only_dir || options.get_flag("only_dir")
|
||||
}
|
||||
|
||||
pub fn get_print_errors(&self, options: &ArgMatches) -> bool {
|
||||
Some(true) == self.print_errors || options.get_flag("print_errors")
|
||||
Some(true) == self.only_dir || options.is_present("only_dir")
|
||||
}
|
||||
pub fn get_only_file(&self, options: &ArgMatches) -> bool {
|
||||
Some(true) == self.only_file || options.get_flag("only_file")
|
||||
Some(true) == self.only_file || options.is_present("only_file")
|
||||
}
|
||||
pub fn get_bars_on_right(&self, options: &ArgMatches) -> bool {
|
||||
Some(true) == self.bars_on_right || options.get_flag("bars_on_right")
|
||||
}
|
||||
pub fn get_custom_stack_size(&self, options: &ArgMatches) -> Option<usize> {
|
||||
let from_cmd_line = options.get_one::<usize>("stack_size");
|
||||
if from_cmd_line.is_none() {
|
||||
self.stack_size
|
||||
} else {
|
||||
from_cmd_line.copied()
|
||||
}
|
||||
}
|
||||
pub fn get_threads(&self, options: &ArgMatches) -> Option<usize> {
|
||||
let from_cmd_line = options.get_one::<usize>("threads");
|
||||
if from_cmd_line.is_none() {
|
||||
self.threads
|
||||
} else {
|
||||
from_cmd_line.copied()
|
||||
}
|
||||
}
|
||||
pub fn get_output_json(&self, options: &ArgMatches) -> bool {
|
||||
Some(true) == self.output_json || options.get_flag("output_json")
|
||||
}
|
||||
|
||||
pub fn get_modified_time_operator(&self, options: &ArgMatches) -> Option<(Operator, i64)> {
|
||||
get_filter_time_operator(
|
||||
options.get_one::<String>("mtime"),
|
||||
get_current_date_epoch_seconds(),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn get_accessed_time_operator(&self, options: &ArgMatches) -> Option<(Operator, i64)> {
|
||||
get_filter_time_operator(
|
||||
options.get_one::<String>("atime"),
|
||||
get_current_date_epoch_seconds(),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn get_changed_time_operator(&self, options: &ArgMatches) -> Option<(Operator, i64)> {
|
||||
get_filter_time_operator(
|
||||
options.get_one::<String>("ctime"),
|
||||
get_current_date_epoch_seconds(),
|
||||
)
|
||||
Some(true) == self.bars_on_right || options.is_present("bars_on_right")
|
||||
}
|
||||
}
|
||||
|
||||
fn get_current_date_epoch_seconds() -> i64 {
|
||||
// calculate current date epoch seconds
|
||||
let now = Local::now();
|
||||
let current_date = now.date_naive();
|
||||
fn convert_min_size(input: &str, iso: bool) -> Option<usize> {
|
||||
let chars_as_vec: Vec<char> = input.chars().collect();
|
||||
match chars_as_vec.split_last() {
|
||||
Some((last, start)) => {
|
||||
let mut starts: String = start.iter().collect::<String>();
|
||||
|
||||
let current_date_time = current_date.and_hms_opt(0, 0, 0).unwrap();
|
||||
Local
|
||||
.from_local_datetime(¤t_date_time)
|
||||
.unwrap()
|
||||
.timestamp()
|
||||
}
|
||||
|
||||
fn get_filter_time_operator(
|
||||
option_value: Option<&String>,
|
||||
current_date_epoch_seconds: i64,
|
||||
) -> Option<(Operator, i64)> {
|
||||
match option_value {
|
||||
Some(val) => {
|
||||
let time = current_date_epoch_seconds
|
||||
- val
|
||||
.parse::<i64>()
|
||||
.unwrap_or_else(|_| panic!("invalid data format"))
|
||||
.abs()
|
||||
* DAY_SECONDS;
|
||||
match val.chars().next().expect("Value should not be empty") {
|
||||
'+' => Some((Operator::LessThan, time - DAY_SECONDS)),
|
||||
'-' => Some((Operator::GreaterThan, time)),
|
||||
_ => Some((Operator::Equal, time - DAY_SECONDS)),
|
||||
}
|
||||
}
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn convert_min_size(input: &str) -> Option<usize> {
|
||||
let re = Regex::new(r"([0-9]+)(\w*)").unwrap();
|
||||
|
||||
if let Some(cap) = re.captures(input) {
|
||||
let (_, [digits, letters]) = cap.extract();
|
||||
|
||||
// Failure to parse should be impossible due to regex match
|
||||
let digits_as_usize: Option<usize> = digits.parse().ok();
|
||||
|
||||
match digits_as_usize {
|
||||
Some(parsed_digits) => {
|
||||
let number_format = get_number_format(&letters.to_lowercase());
|
||||
match number_format {
|
||||
Some((multiple, _)) => Some(parsed_digits * (multiple as usize)),
|
||||
None => {
|
||||
if letters.is_empty() {
|
||||
Some(parsed_digits)
|
||||
} else {
|
||||
for (i, u) in UNITS.iter().rev().enumerate() {
|
||||
if Some(*u) == last.to_uppercase().next() {
|
||||
return match starts.parse::<usize>() {
|
||||
Ok(pure) => {
|
||||
let num: usize = if iso { 1000 } else { 1024 };
|
||||
let marker = pure * num.pow((i + 1) as u32);
|
||||
Some(marker)
|
||||
}
|
||||
Err(_) => {
|
||||
eprintln!("Ignoring invalid min-size: {input}");
|
||||
None
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
None => None,
|
||||
starts.push(*last);
|
||||
starts
|
||||
.parse()
|
||||
.map_err(|_| {
|
||||
eprintln!("Ignoring invalid min-size: {input}");
|
||||
})
|
||||
.ok()
|
||||
}
|
||||
} else {
|
||||
None
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -253,29 +139,12 @@ fn get_config_locations(base: &Path) -> Vec<PathBuf> {
|
||||
]
|
||||
}
|
||||
|
||||
pub fn get_config(conf_path: Option<String>) -> Config {
|
||||
match conf_path {
|
||||
Some(path_str) => {
|
||||
let path = Path::new(&path_str);
|
||||
pub fn get_config() -> Config {
|
||||
if let Some(home) = directories::BaseDirs::new() {
|
||||
for path in get_config_locations(home.home_dir()) {
|
||||
if path.exists() {
|
||||
match Config::from_config_file(path) {
|
||||
Ok(config) => return config,
|
||||
Err(e) => {
|
||||
eprintln!("Ignoring invalid config file '{}': {}", &path.display(), e)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
eprintln!("Config file {:?} doesn't exists", &path.display());
|
||||
}
|
||||
}
|
||||
None => {
|
||||
if let Some(home) = directories::BaseDirs::new() {
|
||||
for path in get_config_locations(home.home_dir()) {
|
||||
if path.exists() {
|
||||
if let Ok(config) = Config::from_config_file(&path) {
|
||||
return config;
|
||||
}
|
||||
}
|
||||
if let Ok(config) = Config::from_config_file(path) {
|
||||
return config;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -289,46 +158,30 @@ pub fn get_config(conf_path: Option<String>) -> Config {
|
||||
mod tests {
|
||||
#[allow(unused_imports)]
|
||||
use super::*;
|
||||
use chrono::{Datelike, Timelike};
|
||||
use clap::builder::PossibleValue;
|
||||
use clap::{value_parser, Arg, ArgMatches, Command};
|
||||
|
||||
#[test]
|
||||
fn test_get_current_date_epoch_seconds() {
|
||||
let epoch_seconds = get_current_date_epoch_seconds();
|
||||
let dt = Local.timestamp_opt(epoch_seconds, 0).unwrap();
|
||||
|
||||
assert_eq!(dt.hour(), 0);
|
||||
assert_eq!(dt.minute(), 0);
|
||||
assert_eq!(dt.second(), 0);
|
||||
assert_eq!(dt.date_naive().day(), Local::now().date_naive().day());
|
||||
assert_eq!(dt.date_naive().month(), Local::now().date_naive().month());
|
||||
assert_eq!(dt.date_naive().year(), Local::now().date_naive().year());
|
||||
}
|
||||
use clap::{Arg, ArgMatches, Command};
|
||||
|
||||
#[test]
|
||||
fn test_conversion() {
|
||||
assert_eq!(convert_min_size("55"), Some(55));
|
||||
assert_eq!(convert_min_size("12344321"), Some(12344321));
|
||||
assert_eq!(convert_min_size("95RUBBISH"), None);
|
||||
assert_eq!(convert_min_size("10Ki"), Some(10 * 1024));
|
||||
assert_eq!(convert_min_size("10MiB"), Some(10 * 1024usize.pow(2)));
|
||||
assert_eq!(convert_min_size("10M"), Some(10 * 1024usize.pow(2)));
|
||||
assert_eq!(convert_min_size("10Mb"), Some(10 * 1000usize.pow(2)));
|
||||
assert_eq!(convert_min_size("2Gi"), Some(2 * 1024usize.pow(3)));
|
||||
assert_eq!(convert_min_size("55", false), Some(55));
|
||||
assert_eq!(convert_min_size("12344321", false), Some(12344321));
|
||||
assert_eq!(convert_min_size("95RUBBISH", false), None);
|
||||
assert_eq!(convert_min_size("10K", false), Some(10 * 1024));
|
||||
assert_eq!(convert_min_size("10M", false), Some(10 * 1024usize.pow(2)));
|
||||
assert_eq!(convert_min_size("10M", true), Some(10 * 1000usize.pow(2)));
|
||||
assert_eq!(convert_min_size("2G", false), Some(2 * 1024usize.pow(3)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_min_size_from_config_applied_or_overridden() {
|
||||
let c = Config {
|
||||
min_size: Some("1KiB".to_owned()),
|
||||
min_size: Some("1K".to_owned()),
|
||||
..Default::default()
|
||||
};
|
||||
assert_eq!(c._get_min_size(None), Some(1024));
|
||||
assert_eq!(c._get_min_size(Some(&"2KiB".into())), Some(2048));
|
||||
assert_eq!(c._get_min_size(None, false), Some(1024));
|
||||
assert_eq!(c._get_min_size(Some("2K"), false), Some(2048));
|
||||
|
||||
assert_eq!(c._get_min_size(Some(&"1kb".into())), Some(1000));
|
||||
assert_eq!(c._get_min_size(Some(&"2KB".into())), Some(2000));
|
||||
assert_eq!(c._get_min_size(None, true), Some(1000));
|
||||
assert_eq!(c._get_min_size(Some("2K"), true), Some(2000));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -362,64 +215,8 @@ mod tests {
|
||||
|
||||
fn get_args(args: Vec<&str>) -> ArgMatches {
|
||||
Command::new("Dust")
|
||||
.arg(
|
||||
Arg::new("depth")
|
||||
.long("depth")
|
||||
.num_args(1)
|
||||
.value_parser(value_parser!(usize)),
|
||||
)
|
||||
.get_matches_from(args)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_filetime() {
|
||||
// No config and no flag.
|
||||
let c = Config::default();
|
||||
let args = get_filetime_args(vec!["dust"]);
|
||||
assert_eq!(c.get_filetime(&args), None);
|
||||
|
||||
// Config is not defined and flag is defined as access time
|
||||
let c = Config::default();
|
||||
let args = get_filetime_args(vec!["dust", "--filetime", "a"]);
|
||||
assert_eq!(c.get_filetime(&args), Some(FileTime::Accessed));
|
||||
|
||||
let c = Config::default();
|
||||
let args = get_filetime_args(vec!["dust", "--filetime", "accessed"]);
|
||||
assert_eq!(c.get_filetime(&args), Some(FileTime::Accessed));
|
||||
|
||||
// Config is not defined and flag is defined as modified time
|
||||
let c = Config::default();
|
||||
let args = get_filetime_args(vec!["dust", "--filetime", "m"]);
|
||||
assert_eq!(c.get_filetime(&args), Some(FileTime::Modified));
|
||||
|
||||
let c = Config::default();
|
||||
let args = get_filetime_args(vec!["dust", "--filetime", "modified"]);
|
||||
assert_eq!(c.get_filetime(&args), Some(FileTime::Modified));
|
||||
|
||||
// Config is not defined and flag is defined as changed time
|
||||
let c = Config::default();
|
||||
let args = get_filetime_args(vec!["dust", "--filetime", "c"]);
|
||||
assert_eq!(c.get_filetime(&args), Some(FileTime::Changed));
|
||||
|
||||
let c = Config::default();
|
||||
let args = get_filetime_args(vec!["dust", "--filetime", "changed"]);
|
||||
assert_eq!(c.get_filetime(&args), Some(FileTime::Changed));
|
||||
}
|
||||
|
||||
fn get_filetime_args(args: Vec<&str>) -> ArgMatches {
|
||||
Command::new("Dust")
|
||||
.arg(
|
||||
Arg::new("filetime")
|
||||
.short('m')
|
||||
.long("filetime")
|
||||
.num_args(1)
|
||||
.value_parser([
|
||||
PossibleValue::new("a").alias("accessed"),
|
||||
PossibleValue::new("c").alias("changed"),
|
||||
PossibleValue::new("m").alias("modified"),
|
||||
])
|
||||
.help("Directory 'size' is max filetime of child files instead of disk size. while a/c/m for accessed/changed/modified time"),
|
||||
)
|
||||
.trailing_var_arg(true)
|
||||
.arg(Arg::new("depth").long("depth").takes_value(true))
|
||||
.get_matches_from(args)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,21 +1,15 @@
|
||||
use std::cmp::Ordering;
|
||||
use std::fs;
|
||||
use std::io::Error;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
|
||||
use crate::node::Node;
|
||||
use crate::progress::Operation;
|
||||
use crate::progress::PAtomicInfo;
|
||||
use crate::progress::RuntimeErrors;
|
||||
use crate::progress::ORDERING;
|
||||
use crate::utils::is_filtered_out_due_to_file_time;
|
||||
use crate::utils::is_filtered_out_due_to_invert_regex;
|
||||
use crate::utils::is_filtered_out_due_to_regex;
|
||||
use rayon::iter::ParallelBridge;
|
||||
use rayon::prelude::ParallelIterator;
|
||||
use regex::Regex;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use std::collections::HashSet;
|
||||
@@ -23,53 +17,43 @@ use std::collections::HashSet;
|
||||
use crate::node::build_node;
|
||||
use std::fs::DirEntry;
|
||||
|
||||
use crate::node::FileTime;
|
||||
use crate::platform::get_metadata;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Operator {
|
||||
Equal = 0,
|
||||
LessThan = 1,
|
||||
GreaterThan = 2,
|
||||
}
|
||||
|
||||
pub struct WalkData<'a> {
|
||||
pub ignore_directories: HashSet<PathBuf>,
|
||||
pub filter_regex: &'a [Regex],
|
||||
pub invert_filter_regex: &'a [Regex],
|
||||
pub allowed_filesystems: HashSet<u64>,
|
||||
pub filter_modified_time: Option<(Operator, i64)>,
|
||||
pub filter_accessed_time: Option<(Operator, i64)>,
|
||||
pub filter_changed_time: Option<(Operator, i64)>,
|
||||
pub use_apparent_size: bool,
|
||||
pub by_filecount: bool,
|
||||
pub by_filetime: &'a Option<FileTime>,
|
||||
pub ignore_hidden: bool,
|
||||
pub follow_links: bool,
|
||||
pub progress_data: Arc<PAtomicInfo>,
|
||||
pub errors: Arc<Mutex<RuntimeErrors>>,
|
||||
}
|
||||
|
||||
pub fn walk_it(dirs: HashSet<PathBuf>, walk_data: &WalkData) -> Vec<Node> {
|
||||
pub fn walk_it(dirs: HashSet<PathBuf>, walk_data: WalkData) -> Vec<Node> {
|
||||
let mut inodes = HashSet::new();
|
||||
let top_level_nodes: Vec<_> = dirs
|
||||
.into_iter()
|
||||
.filter_map(|d| {
|
||||
let prog_data = &walk_data.progress_data;
|
||||
prog_data.clear_state(&d);
|
||||
let node = walk(d, walk_data, 0)?;
|
||||
let node = walk(d, &walk_data, 0)?;
|
||||
|
||||
prog_data.state.store(Operation::PREPARING, ORDERING);
|
||||
|
||||
clean_inodes(node, &mut inodes, walk_data)
|
||||
clean_inodes(node, &mut inodes, walk_data.use_apparent_size)
|
||||
})
|
||||
.collect();
|
||||
top_level_nodes
|
||||
}
|
||||
|
||||
// Remove files which have the same inode, we don't want to double count them.
|
||||
fn clean_inodes(x: Node, inodes: &mut HashSet<(u64, u64)>, walk_data: &WalkData) -> Option<Node> {
|
||||
if !walk_data.use_apparent_size {
|
||||
fn clean_inodes(
|
||||
x: Node,
|
||||
inodes: &mut HashSet<(u64, u64)>,
|
||||
use_apparent_size: bool,
|
||||
) -> Option<Node> {
|
||||
if !use_apparent_size {
|
||||
if let Some(id) = x.inode_device {
|
||||
if !inodes.insert(id) {
|
||||
return None;
|
||||
@@ -82,25 +66,12 @@ fn clean_inodes(x: Node, inodes: &mut HashSet<(u64, u64)>, walk_data: &WalkData)
|
||||
tmp.sort_by(sort_by_inode);
|
||||
let new_children: Vec<_> = tmp
|
||||
.into_iter()
|
||||
.filter_map(|c| clean_inodes(c, inodes, walk_data))
|
||||
.filter_map(|c| clean_inodes(c, inodes, use_apparent_size))
|
||||
.collect();
|
||||
|
||||
let actual_size = if walk_data.by_filetime.is_some() {
|
||||
// If by_filetime is Some, directory 'size' is the maximum filetime among child files instead of disk size
|
||||
new_children
|
||||
.iter()
|
||||
.map(|c| c.size)
|
||||
.chain(std::iter::once(x.size))
|
||||
.max()
|
||||
.unwrap_or(0)
|
||||
} else {
|
||||
// If by_filetime is None, directory 'size' is the sum of disk sizes or file counts of child files
|
||||
x.size + new_children.iter().map(|c| c.size).sum::<u64>()
|
||||
};
|
||||
|
||||
Some(Node {
|
||||
name: x.name,
|
||||
size: actual_size,
|
||||
size: x.size + new_children.iter().map(|c| c.size).sum::<u64>(),
|
||||
children: new_children,
|
||||
inode_device: x.inode_device,
|
||||
depth: x.depth,
|
||||
@@ -109,56 +80,31 @@ fn clean_inodes(x: Node, inodes: &mut HashSet<(u64, u64)>, walk_data: &WalkData)
|
||||
|
||||
fn sort_by_inode(a: &Node, b: &Node) -> std::cmp::Ordering {
|
||||
// Sorting by inode is quicker than by sorting by name/size
|
||||
match (a.inode_device, b.inode_device) {
|
||||
(Some(x), Some(y)) => {
|
||||
if let Some(x) = a.inode_device {
|
||||
if let Some(y) = b.inode_device {
|
||||
if x.0 != y.0 {
|
||||
x.0.cmp(&y.0)
|
||||
return x.0.cmp(&y.0);
|
||||
} else if x.1 != y.1 {
|
||||
x.1.cmp(&y.1)
|
||||
} else {
|
||||
a.name.cmp(&b.name)
|
||||
return x.1.cmp(&y.1);
|
||||
}
|
||||
}
|
||||
(Some(_), None) => Ordering::Greater,
|
||||
(None, Some(_)) => Ordering::Less,
|
||||
(None, None) => a.name.cmp(&b.name),
|
||||
}
|
||||
a.name.cmp(&b.name)
|
||||
}
|
||||
|
||||
fn ignore_file(entry: &DirEntry, walk_data: &WalkData) -> bool {
|
||||
let is_dot_file = entry.file_name().to_str().unwrap_or("").starts_with('.');
|
||||
let is_ignored_path = walk_data.ignore_directories.contains(&entry.path());
|
||||
let follow_links = walk_data.follow_links && entry.file_type().is_ok_and(|ft| ft.is_symlink());
|
||||
|
||||
if !walk_data.allowed_filesystems.is_empty() {
|
||||
let size_inode_device = get_metadata(entry.path(), false, follow_links);
|
||||
if let Some((_size, Some((_id, dev)), _gunk)) = size_inode_device {
|
||||
let size_inode_device = get_metadata(&entry.path(), false);
|
||||
|
||||
if let Some((_size, Some((_id, dev)))) = size_inode_device {
|
||||
if !walk_data.allowed_filesystems.contains(&dev) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if walk_data.filter_accessed_time.is_some()
|
||||
|| walk_data.filter_modified_time.is_some()
|
||||
|| walk_data.filter_changed_time.is_some()
|
||||
{
|
||||
let size_inode_device = get_metadata(entry.path(), false, follow_links);
|
||||
if let Some((_, _, (modified_time, accessed_time, changed_time))) = size_inode_device {
|
||||
if entry.path().is_file()
|
||||
&& [
|
||||
(&walk_data.filter_modified_time, modified_time),
|
||||
(&walk_data.filter_accessed_time, accessed_time),
|
||||
(&walk_data.filter_changed_time, changed_time),
|
||||
]
|
||||
.iter()
|
||||
.any(|(filter_time, actual_time)| {
|
||||
is_filtered_out_due_to_file_time(filter_time, *actual_time)
|
||||
})
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Keeping `walk_data.filter_regex.is_empty()` is important for performance reasons, it stops unnecessary work
|
||||
if !walk_data.filter_regex.is_empty()
|
||||
@@ -180,127 +126,69 @@ fn ignore_file(entry: &DirEntry, walk_data: &WalkData) -> bool {
|
||||
|
||||
fn walk(dir: PathBuf, walk_data: &WalkData, depth: usize) -> Option<Node> {
|
||||
let prog_data = &walk_data.progress_data;
|
||||
let errors = &walk_data.errors;
|
||||
let mut children = vec![];
|
||||
|
||||
if errors.lock().unwrap().abort {
|
||||
return None;
|
||||
}
|
||||
if let Ok(entries) = fs::read_dir(&dir) {
|
||||
children = entries
|
||||
.into_iter()
|
||||
.par_bridge()
|
||||
.filter_map(|entry| {
|
||||
if let Ok(ref entry) = entry {
|
||||
// uncommenting the below line gives simpler code but
|
||||
// rayon doesn't parallelize as well giving a 3X performance drop
|
||||
// hence we unravel the recursion a bit
|
||||
|
||||
let children = if dir.is_dir() {
|
||||
let read_dir = fs::read_dir(&dir);
|
||||
match read_dir {
|
||||
Ok(entries) => {
|
||||
entries
|
||||
.into_iter()
|
||||
.par_bridge()
|
||||
.filter_map(|entry| {
|
||||
match entry {
|
||||
Ok(ref entry) => {
|
||||
// uncommenting the below line gives simpler code but
|
||||
// rayon doesn't parallelize as well giving a 3X performance drop
|
||||
// hence we unravel the recursion a bit
|
||||
// return walk(entry.path(), walk_data, depth)
|
||||
|
||||
// return walk(entry.path(), walk_data, depth)
|
||||
|
||||
if !ignore_file(entry, walk_data) {
|
||||
if let Ok(data) = entry.file_type() {
|
||||
if data.is_dir()
|
||||
|| (walk_data.follow_links && data.is_symlink())
|
||||
{
|
||||
return walk(entry.path(), walk_data, depth + 1);
|
||||
}
|
||||
|
||||
let node = build_node(
|
||||
entry.path(),
|
||||
vec![],
|
||||
data.is_symlink(),
|
||||
data.is_file(),
|
||||
depth,
|
||||
walk_data,
|
||||
);
|
||||
|
||||
prog_data.num_files.fetch_add(1, ORDERING);
|
||||
if let Some(ref file) = node {
|
||||
prog_data
|
||||
.total_file_size
|
||||
.fetch_add(file.size, ORDERING);
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
}
|
||||
if !ignore_file(entry, walk_data) {
|
||||
if let Ok(data) = entry.file_type() {
|
||||
if data.is_dir() || (walk_data.follow_links && data.is_symlink()) {
|
||||
return walk(entry.path(), walk_data, depth + 1);
|
||||
}
|
||||
Err(ref failed) => {
|
||||
if handle_error_and_retry(failed, &dir, walk_data) {
|
||||
return walk(dir.clone(), walk_data, depth);
|
||||
}
|
||||
|
||||
let node = build_node(
|
||||
entry.path(),
|
||||
vec![],
|
||||
walk_data.filter_regex,
|
||||
walk_data.invert_filter_regex,
|
||||
walk_data.use_apparent_size,
|
||||
data.is_symlink(),
|
||||
data.is_file(),
|
||||
walk_data.by_filecount,
|
||||
depth,
|
||||
);
|
||||
|
||||
prog_data.num_files.fetch_add(1, ORDERING);
|
||||
if let Some(ref file) = node {
|
||||
prog_data.total_file_size.fetch_add(file.size, ORDERING);
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
None
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
Err(failed) => {
|
||||
if handle_error_and_retry(&failed, &dir, walk_data) {
|
||||
return walk(dir, walk_data, depth);
|
||||
}
|
||||
} else {
|
||||
vec![]
|
||||
prog_data.no_permissions.store(true, ORDERING)
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if !dir.is_file() {
|
||||
let mut editable_error = errors.lock().unwrap();
|
||||
let bad_file = dir.as_os_str().to_string_lossy().into();
|
||||
editable_error.file_not_found.insert(bad_file);
|
||||
}
|
||||
vec![]
|
||||
};
|
||||
let is_symlink = if walk_data.follow_links {
|
||||
match fs::symlink_metadata(&dir) {
|
||||
Ok(metadata) => metadata.file_type().is_symlink(),
|
||||
Err(_) => false,
|
||||
}
|
||||
} else {
|
||||
false
|
||||
};
|
||||
build_node(dir, children, is_symlink, false, depth, walk_data)
|
||||
}
|
||||
|
||||
fn handle_error_and_retry(failed: &Error, dir: &Path, walk_data: &WalkData) -> bool {
|
||||
let mut editable_error = walk_data.errors.lock().unwrap();
|
||||
match failed.kind() {
|
||||
std::io::ErrorKind::PermissionDenied => {
|
||||
editable_error
|
||||
.no_permissions
|
||||
.insert(dir.to_string_lossy().into());
|
||||
}
|
||||
std::io::ErrorKind::InvalidInput => {
|
||||
editable_error
|
||||
.no_permissions
|
||||
.insert(dir.to_string_lossy().into());
|
||||
}
|
||||
std::io::ErrorKind::NotFound => {
|
||||
editable_error.file_not_found.insert(failed.to_string());
|
||||
}
|
||||
std::io::ErrorKind::Interrupted => {
|
||||
let mut editable_error = walk_data.errors.lock().unwrap();
|
||||
editable_error.interrupted_error += 1;
|
||||
if editable_error.interrupted_error > 3 {
|
||||
panic!("Multiple Interrupted Errors occurred while scanning filesystem. Aborting");
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
editable_error.unknown_error.insert(failed.to_string());
|
||||
}
|
||||
None
|
||||
})
|
||||
.collect();
|
||||
} else if !dir.is_file() {
|
||||
walk_data.progress_data.no_permissions.store(true, ORDERING)
|
||||
}
|
||||
false
|
||||
build_node(
|
||||
dir,
|
||||
children,
|
||||
walk_data.filter_regex,
|
||||
walk_data.invert_filter_regex,
|
||||
walk_data.use_apparent_size,
|
||||
false,
|
||||
false,
|
||||
walk_data.by_filecount,
|
||||
depth,
|
||||
)
|
||||
}
|
||||
|
||||
mod tests {
|
||||
|
||||
#[allow(unused_imports)]
|
||||
use super::*;
|
||||
|
||||
@@ -315,43 +203,17 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn create_walker<'a>(use_apparent_size: bool) -> WalkData<'a> {
|
||||
use crate::PIndicator;
|
||||
let indicator = PIndicator::build_me();
|
||||
WalkData {
|
||||
ignore_directories: HashSet::new(),
|
||||
filter_regex: &[],
|
||||
invert_filter_regex: &[],
|
||||
allowed_filesystems: HashSet::new(),
|
||||
filter_modified_time: Some((Operator::GreaterThan, 0)),
|
||||
filter_accessed_time: Some((Operator::GreaterThan, 0)),
|
||||
filter_changed_time: Some((Operator::GreaterThan, 0)),
|
||||
use_apparent_size,
|
||||
by_filecount: false,
|
||||
by_filetime: &None,
|
||||
ignore_hidden: false,
|
||||
follow_links: false,
|
||||
progress_data: indicator.data.clone(),
|
||||
errors: Arc::new(Mutex::new(RuntimeErrors::default())),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[allow(clippy::redundant_clone)]
|
||||
fn test_should_ignore_file() {
|
||||
let mut inodes = HashSet::new();
|
||||
let n = create_node();
|
||||
let walkdata = create_walker(false);
|
||||
|
||||
// First time we insert the node
|
||||
assert_eq!(
|
||||
clean_inodes(n.clone(), &mut inodes, &walkdata),
|
||||
Some(n.clone())
|
||||
);
|
||||
assert_eq!(clean_inodes(n.clone(), &mut inodes, false), Some(n.clone()));
|
||||
|
||||
// Second time is a duplicate - we ignore it
|
||||
assert_eq!(clean_inodes(n.clone(), &mut inodes, &walkdata), None);
|
||||
assert_eq!(clean_inodes(n.clone(), &mut inodes, false), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -359,53 +221,9 @@ mod tests {
|
||||
fn test_should_not_ignore_files_if_using_apparent_size() {
|
||||
let mut inodes = HashSet::new();
|
||||
let n = create_node();
|
||||
let walkdata = create_walker(true);
|
||||
|
||||
// If using apparent size we include Nodes, even if duplicate inodes
|
||||
assert_eq!(
|
||||
clean_inodes(n.clone(), &mut inodes, &walkdata),
|
||||
Some(n.clone())
|
||||
);
|
||||
assert_eq!(
|
||||
clean_inodes(n.clone(), &mut inodes, &walkdata),
|
||||
Some(n.clone())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_total_ordering_of_sort_by_inode() {
|
||||
use std::str::FromStr;
|
||||
|
||||
let a = Node {
|
||||
name: PathBuf::from_str("a").unwrap(),
|
||||
size: 0,
|
||||
children: vec![],
|
||||
inode_device: Some((3, 66310)),
|
||||
depth: 0,
|
||||
};
|
||||
|
||||
let b = Node {
|
||||
name: PathBuf::from_str("b").unwrap(),
|
||||
size: 0,
|
||||
children: vec![],
|
||||
inode_device: None,
|
||||
depth: 0,
|
||||
};
|
||||
|
||||
let c = Node {
|
||||
name: PathBuf::from_str("c").unwrap(),
|
||||
size: 0,
|
||||
children: vec![],
|
||||
inode_device: Some((1, 66310)),
|
||||
depth: 0,
|
||||
};
|
||||
|
||||
assert_eq!(sort_by_inode(&a, &b), Ordering::Greater);
|
||||
assert_eq!(sort_by_inode(&a, &c), Ordering::Greater);
|
||||
assert_eq!(sort_by_inode(&c, &b), Ordering::Greater);
|
||||
|
||||
assert_eq!(sort_by_inode(&b, &a), Ordering::Less);
|
||||
assert_eq!(sort_by_inode(&c, &a), Ordering::Less);
|
||||
assert_eq!(sort_by_inode(&b, &c), Ordering::Less);
|
||||
assert_eq!(clean_inodes(n.clone(), &mut inodes, true), Some(n.clone()));
|
||||
assert_eq!(clean_inodes(n.clone(), &mut inodes, true), Some(n.clone()));
|
||||
}
|
||||
}
|
||||
|
||||
298
src/display.rs
298
src/display.rs
@@ -1,5 +1,4 @@
|
||||
use crate::display_node::DisplayNode;
|
||||
use crate::node::FileTime;
|
||||
|
||||
use ansi_term::Colour::Red;
|
||||
use lscolors::{LsColors, Style};
|
||||
@@ -8,31 +7,23 @@ use unicode_width::UnicodeWidthStr;
|
||||
|
||||
use stfu8::encode_u8;
|
||||
|
||||
use chrono::{DateTime, Local, TimeZone, Utc};
|
||||
use std::cmp::max;
|
||||
use std::cmp::min;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::collections::VecDeque;
|
||||
use std::fs;
|
||||
use std::hash::Hash;
|
||||
use std::iter::repeat;
|
||||
use std::os::unix::ffi::OsStrExt;
|
||||
use std::path::Path;
|
||||
use thousands::Separable;
|
||||
|
||||
pub static UNITS: [char; 5] = ['P', 'T', 'G', 'M', 'K'];
|
||||
pub static UNITS: [char; 4] = ['T', 'G', 'M', 'K'];
|
||||
static BLOCKS: [char; 5] = ['█', '▓', '▒', '░', ' '];
|
||||
const FILETIME_SHOW_LENGTH: usize = 19;
|
||||
|
||||
pub struct InitialDisplayData {
|
||||
pub short_paths: bool,
|
||||
pub is_reversed: bool,
|
||||
pub colors_on: bool,
|
||||
pub by_filecount: bool,
|
||||
pub by_filetime: Option<FileTime>,
|
||||
pub is_screen_reader: bool,
|
||||
pub output_format: String,
|
||||
pub iso: bool,
|
||||
pub bars_on_right: bool,
|
||||
}
|
||||
|
||||
@@ -42,7 +33,6 @@ pub struct DisplayData {
|
||||
pub base_size: u64,
|
||||
pub longest_string_length: usize,
|
||||
pub ls_colors: LsColors,
|
||||
pub duplicate_names: HashMap<String, u32>,
|
||||
}
|
||||
|
||||
impl DisplayData {
|
||||
@@ -140,16 +130,19 @@ pub fn draw_it(
|
||||
root_node: &DisplayNode,
|
||||
skip_total: bool,
|
||||
) {
|
||||
|
||||
let duplicate_names = check_for_dup_names(&root_node);
|
||||
let biggest = match skip_total {
|
||||
false => root_node,
|
||||
true => root_node
|
||||
.get_children_from_node(false)
|
||||
.next()
|
||||
.unwrap_or(root_node),
|
||||
};
|
||||
|
||||
let num_chars_needed_on_left_most = if idd.by_filecount {
|
||||
let max_size = root_node.size;
|
||||
let max_size = biggest.size;
|
||||
max_size.separate_with_commas().chars().count()
|
||||
} else if idd.by_filetime.is_some() {
|
||||
FILETIME_SHOW_LENGTH
|
||||
} else {
|
||||
find_biggest_size_str(root_node, &idd.output_format)
|
||||
find_biggest_size_str(root_node, idd.iso)
|
||||
};
|
||||
|
||||
assert!(
|
||||
@@ -157,12 +150,10 @@ pub fn draw_it(
|
||||
"Not enough terminal width"
|
||||
);
|
||||
|
||||
// let duplicate_dir_names = find_duplicate_names(root_node, idd.short_paths);
|
||||
|
||||
let allowed_width = terminal_width - num_chars_needed_on_left_most - 2;
|
||||
let num_indent_chars = 3;
|
||||
let longest_string_length =
|
||||
find_longest_dir_name(root_node, num_indent_chars, allowed_width, &idd, &duplicate_names);
|
||||
find_longest_dir_name(root_node, num_indent_chars, allowed_width, &idd);
|
||||
|
||||
let max_bar_length = if no_percent_bars || longest_string_length + 7 >= allowed_width {
|
||||
0
|
||||
@@ -175,10 +166,9 @@ pub fn draw_it(
|
||||
let display_data = DisplayData {
|
||||
initial: idd,
|
||||
num_chars_needed_on_left_most,
|
||||
base_size: root_node.size,
|
||||
base_size: biggest.size,
|
||||
longest_string_length,
|
||||
ls_colors: LsColors::from_env().unwrap_or_default(),
|
||||
duplicate_names
|
||||
};
|
||||
let draw_data = DrawData {
|
||||
indent: "".to_string(),
|
||||
@@ -199,89 +189,11 @@ pub fn draw_it(
|
||||
}
|
||||
}
|
||||
}
|
||||
fn check_for_dup_names(result:&DisplayNode) -> HashMap<String, u32> {
|
||||
let mut names = HashMap::new();
|
||||
let mut dup_names = HashMap::new();
|
||||
// let empty = HashSet::new();
|
||||
|
||||
let mut results = VecDeque::new();
|
||||
results.push_back((result, 0));
|
||||
|
||||
while results.len() > 0 {
|
||||
let (current, level) = results.pop_front().unwrap();
|
||||
|
||||
let mut folders = current.name.iter().rev();
|
||||
let mut s = String::new();
|
||||
|
||||
// Look at parent folder names - if they differ and we are printing them
|
||||
// we dont need the helper
|
||||
for _ in 0..level {
|
||||
s.push_str( &encode_u8(folders.next().unwrap().as_bytes()));
|
||||
}
|
||||
|
||||
if names.contains_key(&s){
|
||||
// TODO: compare s with names[s]
|
||||
// and walk back until you find a difference.
|
||||
dup_names.insert(s, level);
|
||||
} else {
|
||||
names.insert(s, vec![¤t.name]);
|
||||
}
|
||||
|
||||
current.children.iter().for_each(|node| {results.push_back((&node, level+1));});
|
||||
}
|
||||
println!("{:?}", names);
|
||||
println!("{:?}", dup_names);
|
||||
dup_names
|
||||
}
|
||||
|
||||
|
||||
pub fn get_printable_name(node: &DisplayNode, short_paths: bool, dup_names: &HashMap<String, u32>) -> String {
|
||||
let dir_name = &node.name;
|
||||
let printable_name = {
|
||||
if short_paths {
|
||||
match dir_name.parent() {
|
||||
Some(prefix) => match dir_name.strip_prefix(prefix) {
|
||||
Ok(base) => base,
|
||||
Err(_) => dir_name,
|
||||
},
|
||||
None => dir_name,
|
||||
}
|
||||
} else {
|
||||
dir_name
|
||||
}
|
||||
};
|
||||
let core = encode_u8(printable_name.display().to_string().as_bytes());
|
||||
|
||||
if dup_names.contains_key(&core) {
|
||||
let level = dup_names[&core];
|
||||
|
||||
let mut folders = node.name.iter().rev();
|
||||
folders.next();
|
||||
let mut extra = VecDeque::new();
|
||||
for _ in (0..level){
|
||||
extra.push_back( encode_u8(folders.next().unwrap().as_bytes()) );
|
||||
}
|
||||
let h = extra.iter().fold(String::new(), |acc, entry| {
|
||||
acc + entry
|
||||
});
|
||||
// let helper = extra.make_contiguous().iter().collect::<Vec<&String>>();
|
||||
// let h = helper.join("/");
|
||||
|
||||
// let mut folders = dir_name.iter().rev(); //.next().next().unwrap();
|
||||
// folders.next();
|
||||
// let par = encode_u8(folders.next().unwrap().as_bytes());
|
||||
format!("{core} ({h})")
|
||||
} else {
|
||||
core
|
||||
}
|
||||
}
|
||||
|
||||
fn find_biggest_size_str(node: &DisplayNode, output_format: &str) -> usize {
|
||||
let mut mx = human_readable_number(node.size, output_format)
|
||||
.chars()
|
||||
.count();
|
||||
fn find_biggest_size_str(node: &DisplayNode, iso: bool) -> usize {
|
||||
let mut mx = human_readable_number(node.size, iso).chars().count();
|
||||
for n in node.children.iter() {
|
||||
mx = max(mx, find_biggest_size_str(n, output_format));
|
||||
mx = max(mx, find_biggest_size_str(n, iso));
|
||||
}
|
||||
mx
|
||||
}
|
||||
@@ -291,9 +203,8 @@ fn find_longest_dir_name(
|
||||
indent: usize,
|
||||
terminal: usize,
|
||||
idd: &InitialDisplayData,
|
||||
dup_names: &HashMap<String, u32>,
|
||||
) -> usize {
|
||||
let printable_name = get_printable_name(&node, idd.short_paths, dup_names);
|
||||
let printable_name = get_printable_name(&node.name, idd.short_paths);
|
||||
|
||||
let longest = if idd.is_screen_reader {
|
||||
UnicodeWidthStr::width(&*printable_name) + 1
|
||||
@@ -307,7 +218,7 @@ fn find_longest_dir_name(
|
||||
// each none root tree drawing is 2 more chars, hence we increment indent by 2
|
||||
node.children
|
||||
.iter()
|
||||
.map(|c| find_longest_dir_name(c, indent + 2, terminal, idd, dup_names))
|
||||
.map(|c| find_longest_dir_name(c, indent + 2, terminal, idd))
|
||||
.fold(longest, max)
|
||||
}
|
||||
|
||||
@@ -362,8 +273,26 @@ fn clean_indentation_string(s: &str) -> String {
|
||||
is
|
||||
}
|
||||
|
||||
fn get_printable_name<P: AsRef<Path>>(dir_name: &P, short_paths: bool) -> String {
|
||||
let dir_name = dir_name.as_ref();
|
||||
let printable_name = {
|
||||
if short_paths {
|
||||
match dir_name.parent() {
|
||||
Some(prefix) => match dir_name.strip_prefix(prefix) {
|
||||
Ok(base) => base,
|
||||
Err(_) => dir_name,
|
||||
},
|
||||
None => dir_name,
|
||||
}
|
||||
} else {
|
||||
dir_name
|
||||
}
|
||||
};
|
||||
encode_u8(printable_name.display().to_string().as_bytes())
|
||||
}
|
||||
|
||||
fn pad_or_trim_filename(node: &DisplayNode, indent: &str, display_data: &DisplayData) -> String {
|
||||
let name = get_printable_name(&node, display_data.initial.short_paths, &display_data.duplicate_names);
|
||||
let name = get_printable_name(&node.name, display_data.initial.short_paths);
|
||||
let indent_and_name = format!("{indent} {name}");
|
||||
let width = UnicodeWidthStr::width(&*indent_and_name);
|
||||
|
||||
@@ -411,8 +340,6 @@ pub fn format_string(
|
||||
if display_data.initial.is_screen_reader {
|
||||
// if screen_reader then bars is 'depth'
|
||||
format!("{pretty_name} {bars} {pretty_size}{percent}")
|
||||
} else if display_data.initial.by_filetime.is_some() {
|
||||
format!("{pretty_size} {indent}{pretty_name}")
|
||||
} else {
|
||||
format!("{pretty_size} {indent} {pretty_name}{percent}")
|
||||
}
|
||||
@@ -438,7 +365,7 @@ fn get_name_percent(
|
||||
let name_and_padding = pad_or_trim_filename(node, indent, display_data);
|
||||
(percents, name_and_padding)
|
||||
} else {
|
||||
let n = get_printable_name(&node, display_data.initial.short_paths, &display_data.duplicate_names);
|
||||
let n = get_printable_name(&node.name, display_data.initial.short_paths);
|
||||
let name = maybe_trim_filename(n, indent, display_data);
|
||||
("".into(), name)
|
||||
}
|
||||
@@ -447,10 +374,8 @@ fn get_name_percent(
|
||||
fn get_pretty_size(node: &DisplayNode, is_biggest: bool, display_data: &DisplayData) -> String {
|
||||
let output = if display_data.initial.by_filecount {
|
||||
node.size.separate_with_commas()
|
||||
} else if display_data.initial.by_filetime.is_some() {
|
||||
get_pretty_file_modified_time(node.size as i64)
|
||||
} else {
|
||||
human_readable_number(node.size, &display_data.initial.output_format)
|
||||
human_readable_number(node.size, display_data.initial.iso)
|
||||
};
|
||||
let spaces_to_add = display_data.num_chars_needed_on_left_most - output.chars().count();
|
||||
let output = " ".repeat(spaces_to_add) + output.as_str();
|
||||
@@ -462,14 +387,6 @@ fn get_pretty_size(node: &DisplayNode, is_biggest: bool, display_data: &DisplayD
|
||||
}
|
||||
}
|
||||
|
||||
fn get_pretty_file_modified_time(timestamp: i64) -> String {
|
||||
let datetime: DateTime<Utc> = Utc.timestamp_opt(timestamp, 0).unwrap();
|
||||
|
||||
let local_datetime = datetime.with_timezone(&Local);
|
||||
|
||||
local_datetime.format("%Y-%m-%dT%H:%M:%S").to_string()
|
||||
}
|
||||
|
||||
fn get_pretty_name(
|
||||
node: &DisplayNode,
|
||||
name_and_padding: String,
|
||||
@@ -490,51 +407,19 @@ fn get_pretty_name(
|
||||
}
|
||||
}
|
||||
|
||||
// If we are working with SI units or not
|
||||
pub fn get_type_of_thousand(output_str: &str) -> u64 {
|
||||
if output_str.is_empty() {
|
||||
1024
|
||||
} else if output_str == "si" {
|
||||
1000
|
||||
} else if output_str.contains('i') || output_str.len() == 1 {
|
||||
1024
|
||||
} else {
|
||||
1000
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_number_format(output_str: &str) -> Option<(u64, char)> {
|
||||
if output_str.starts_with('b') {
|
||||
return Some((1, 'B'));
|
||||
}
|
||||
pub fn human_readable_number(size: u64, iso: bool) -> String {
|
||||
for (i, u) in UNITS.iter().enumerate() {
|
||||
if output_str.starts_with((*u).to_ascii_lowercase()) {
|
||||
let marker = get_type_of_thousand(output_str).pow((UNITS.len() - i) as u32);
|
||||
return Some((marker, *u));
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub fn human_readable_number(size: u64, output_str: &str) -> String {
|
||||
match get_number_format(output_str) {
|
||||
Some((x, u)) => {
|
||||
format!("{}{}", (size / x), u)
|
||||
}
|
||||
None => {
|
||||
for (i, u) in UNITS.iter().enumerate() {
|
||||
let marker = get_type_of_thousand(output_str).pow((UNITS.len() - i) as u32);
|
||||
if size >= marker {
|
||||
if size / marker < 10 {
|
||||
return format!("{:.1}{}", (size as f32 / marker as f32), u);
|
||||
} else {
|
||||
return format!("{}{}", (size / marker), u);
|
||||
}
|
||||
}
|
||||
let num: u64 = if iso { 1000 } else { 1024 };
|
||||
let marker = num.pow((UNITS.len() - i) as u32);
|
||||
if size >= marker {
|
||||
if size / marker < 10 {
|
||||
return format!("{:.1}{}", (size as f32 / marker as f32), u);
|
||||
} else {
|
||||
return format!("{}{}", (size / marker), u);
|
||||
}
|
||||
format!("{size}B")
|
||||
}
|
||||
}
|
||||
format!("{size}B")
|
||||
}
|
||||
|
||||
mod tests {
|
||||
@@ -550,9 +435,8 @@ mod tests {
|
||||
is_reversed: false,
|
||||
colors_on: false,
|
||||
by_filecount: false,
|
||||
by_filetime: None,
|
||||
is_screen_reader: false,
|
||||
output_format: "".into(),
|
||||
iso: false,
|
||||
bars_on_right: false,
|
||||
};
|
||||
DisplayData {
|
||||
@@ -619,51 +503,26 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_human_readable_number() {
|
||||
assert_eq!(human_readable_number(1, ""), "1B");
|
||||
assert_eq!(human_readable_number(956, ""), "956B");
|
||||
assert_eq!(human_readable_number(1004, ""), "1004B");
|
||||
assert_eq!(human_readable_number(1024, ""), "1.0K");
|
||||
assert_eq!(human_readable_number(1536, ""), "1.5K");
|
||||
assert_eq!(human_readable_number(1024 * 512, ""), "512K");
|
||||
assert_eq!(human_readable_number(1024 * 1024, ""), "1.0M");
|
||||
assert_eq!(human_readable_number(1024 * 1024 * 1024 - 1, ""), "1023M");
|
||||
assert_eq!(human_readable_number(1024 * 1024 * 1024 * 20, ""), "20G");
|
||||
assert_eq!(human_readable_number(1024 * 1024 * 1024 * 1024, ""), "1.0T");
|
||||
assert_eq!(human_readable_number(1, false), "1B");
|
||||
assert_eq!(human_readable_number(956, false), "956B");
|
||||
assert_eq!(human_readable_number(1004, false), "1004B");
|
||||
assert_eq!(human_readable_number(1024, false), "1.0K");
|
||||
assert_eq!(human_readable_number(1536, false), "1.5K");
|
||||
assert_eq!(human_readable_number(1024 * 512, false), "512K");
|
||||
assert_eq!(human_readable_number(1024 * 1024, false), "1.0M");
|
||||
assert_eq!(
|
||||
human_readable_number(1024 * 1024 * 1024 * 1024 * 234, ""),
|
||||
"234T"
|
||||
human_readable_number(1024 * 1024 * 1024 - 1, false),
|
||||
"1023M"
|
||||
);
|
||||
assert_eq!(human_readable_number(1024 * 1024 * 1024 * 20, false), "20G");
|
||||
assert_eq!(
|
||||
human_readable_number(1024 * 1024 * 1024 * 1024 * 1024, ""),
|
||||
"1.0P"
|
||||
human_readable_number(1024 * 1024 * 1024 * 1024, false),
|
||||
"1.0T"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_human_readable_number_si() {
|
||||
assert_eq!(human_readable_number(1024 * 100, ""), "100K");
|
||||
assert_eq!(human_readable_number(1024 * 100, "si"), "102K");
|
||||
}
|
||||
|
||||
// Refer to https://en.wikipedia.org/wiki/Byte#Multiple-byte_units
|
||||
#[test]
|
||||
fn test_human_readable_number_kb() {
|
||||
let hrn = human_readable_number;
|
||||
assert_eq!(hrn(1023, "b"), "1023B");
|
||||
assert_eq!(hrn(1000 * 1000, "bytes"), "1000000B");
|
||||
assert_eq!(hrn(1023, "kb"), "1K");
|
||||
assert_eq!(hrn(1023, "k"), "0K");
|
||||
assert_eq!(hrn(1023, "kib"), "0K");
|
||||
assert_eq!(hrn(1024, "kib"), "1K");
|
||||
assert_eq!(hrn(1024 * 512, "kib"), "512K");
|
||||
assert_eq!(hrn(1024 * 1024, "kib"), "1024K");
|
||||
assert_eq!(hrn(1024 * 1000 * 1000 * 20, "kib"), "20000000K");
|
||||
assert_eq!(hrn(1024 * 1024 * 1000 * 20, "mib"), "20000M");
|
||||
assert_eq!(hrn(1024 * 1024 * 1024 * 20, "gib"), "20G");
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn build_draw_data(disp: &DisplayData, size: u32) -> (DrawData<'_>, DisplayNode) {
|
||||
fn build_draw_data<'a>(disp: &'a DisplayData, size: u32) -> (DrawData<'a>, DisplayNode) {
|
||||
let n = DisplayNode {
|
||||
name: PathBuf::from("/short"),
|
||||
size: 2_u64.pow(size),
|
||||
@@ -715,37 +574,4 @@ mod tests {
|
||||
let bar = dd.generate_bar(&n, 5);
|
||||
assert_eq!(bar, "████▓▓▓▓▓▓▓▓▓");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_pretty_file_modified_time() {
|
||||
// Create a timestamp for 2023-07-12 00:00:00 in local time
|
||||
let local_dt = Local.with_ymd_and_hms(2023, 7, 12, 0, 0, 0).unwrap();
|
||||
let timestamp = local_dt.timestamp();
|
||||
|
||||
// Format expected output
|
||||
let expected_output = local_dt.format("%Y-%m-%dT%H:%M:%S").to_string();
|
||||
|
||||
assert_eq!(get_pretty_file_modified_time(timestamp), expected_output);
|
||||
|
||||
// Test another timestamp
|
||||
let local_dt = Local.with_ymd_and_hms(2020, 1, 1, 12, 0, 0).unwrap();
|
||||
let timestamp = local_dt.timestamp();
|
||||
let expected_output = local_dt.format("%Y-%m-%dT%H:%M:%S").to_string();
|
||||
|
||||
assert_eq!(get_pretty_file_modified_time(timestamp), expected_output);
|
||||
|
||||
// Test timestamp for epoch start (1970-01-01T00:00:00)
|
||||
let local_dt = Local.with_ymd_and_hms(1970, 1, 1, 0, 0, 0).unwrap();
|
||||
let timestamp = local_dt.timestamp();
|
||||
let expected_output = local_dt.format("%Y-%m-%dT%H:%M:%S").to_string();
|
||||
|
||||
assert_eq!(get_pretty_file_modified_time(timestamp), expected_output);
|
||||
|
||||
// Test a future timestamp
|
||||
let local_dt = Local.with_ymd_and_hms(2030, 12, 25, 6, 30, 0).unwrap();
|
||||
let timestamp = local_dt.timestamp();
|
||||
let expected_output = local_dt.format("%Y-%m-%dT%H:%M:%S").to_string();
|
||||
|
||||
assert_eq!(get_pretty_file_modified_time(timestamp), expected_output);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,5 @@
|
||||
use std::cell::RefCell;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use serde::ser::SerializeStruct;
|
||||
use serde::{Serialize, Serializer};
|
||||
|
||||
use crate::display::human_readable_number;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)]
|
||||
pub struct DisplayNode {
|
||||
// Note: the order of fields in important here, for PartialEq and PartialOrd
|
||||
@@ -29,30 +23,3 @@ impl DisplayNode {
|
||||
out
|
||||
}
|
||||
}
|
||||
|
||||
// Only used for -j 'json' flag combined with -o 'output_type' flag
|
||||
// Used to pass the output_type into the custom Serde serializer
|
||||
thread_local! {
|
||||
pub static OUTPUT_TYPE: RefCell<String> = const { RefCell::new(String::new()) };
|
||||
}
|
||||
|
||||
/*
|
||||
We need the custom Serialize incase someone uses the -o flag to pass a custom output type in
|
||||
(show size in Mb / Gb etc).
|
||||
Sadly this also necessitates a global variable OUTPUT_TYPE as we can not pass the output_type flag
|
||||
into the serialize method
|
||||
*/
|
||||
impl Serialize for DisplayNode {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let readable_size = OUTPUT_TYPE
|
||||
.with(|output_type| human_readable_number(self.size, output_type.borrow().as_str()));
|
||||
let mut state = serializer.serialize_struct("DisplayNode", 2)?;
|
||||
state.serialize_field("size", &(readable_size))?;
|
||||
state.serialize_field("name", &self.name)?;
|
||||
state.serialize_field("children", &self.children)?;
|
||||
state.end()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
use crate::display_node::DisplayNode;
|
||||
use crate::node::FileTime;
|
||||
use crate::node::Node;
|
||||
use std::collections::BinaryHeap;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
@@ -16,12 +14,7 @@ pub struct AggregateData {
|
||||
pub using_a_filter: bool,
|
||||
}
|
||||
|
||||
pub fn get_biggest(
|
||||
mut top_level_nodes: Vec<Node>,
|
||||
display_data: AggregateData,
|
||||
by_filetime: &Option<FileTime>,
|
||||
keep_collapsed: HashSet<PathBuf>,
|
||||
) -> Option<DisplayNode> {
|
||||
pub fn get_biggest(top_level_nodes: Vec<Node>, display_data: AggregateData) -> Option<DisplayNode> {
|
||||
if top_level_nodes.is_empty() {
|
||||
// perhaps change this, bring back Error object?
|
||||
return None;
|
||||
@@ -31,16 +24,7 @@ pub fn get_biggest(
|
||||
let root;
|
||||
|
||||
if number_top_level_nodes > 1 {
|
||||
let size = if by_filetime.is_some() {
|
||||
top_level_nodes
|
||||
.iter()
|
||||
.map(|node| node.size)
|
||||
.max()
|
||||
.unwrap_or(0)
|
||||
} else {
|
||||
top_level_nodes.iter().map(|node| node.size).sum()
|
||||
};
|
||||
|
||||
let size = top_level_nodes.iter().map(|node| node.size).sum();
|
||||
root = Node {
|
||||
name: PathBuf::from("(total)"),
|
||||
size,
|
||||
@@ -48,7 +32,6 @@ pub fn get_biggest(
|
||||
inode_device: None,
|
||||
depth: 0,
|
||||
};
|
||||
|
||||
// Always include the base nodes if we add a 'parent' (total) node
|
||||
heap = always_add_children(&display_data, &root, heap);
|
||||
} else {
|
||||
@@ -56,20 +39,13 @@ pub fn get_biggest(
|
||||
heap = add_children(&display_data, &root, heap);
|
||||
}
|
||||
|
||||
let result = fill_remaining_lines(
|
||||
heap,
|
||||
&root,
|
||||
display_data,
|
||||
keep_collapsed,
|
||||
);
|
||||
Some(result)
|
||||
Some(fill_remaining_lines(heap, &root, display_data))
|
||||
}
|
||||
|
||||
pub fn fill_remaining_lines<'a>(
|
||||
mut heap: BinaryHeap<&'a Node>,
|
||||
root: &'a Node,
|
||||
display_data: AggregateData,
|
||||
keep_collapsed: HashSet<PathBuf>,
|
||||
) -> DisplayNode {
|
||||
let mut allowed_nodes = HashMap::new();
|
||||
|
||||
@@ -80,9 +56,7 @@ pub fn fill_remaining_lines<'a>(
|
||||
if !display_data.only_file || line.children.is_empty() {
|
||||
allowed_nodes.insert(line.name.as_path(), line);
|
||||
}
|
||||
if !keep_collapsed.contains(&line.name) {
|
||||
heap = add_children(&display_data, line, heap);
|
||||
}
|
||||
heap = add_children(&display_data, line, heap);
|
||||
}
|
||||
None => break,
|
||||
}
|
||||
@@ -140,7 +114,7 @@ fn recursive_rebuilder(allowed_nodes: &HashMap<&Path, &Node>, current: &Node) ->
|
||||
.map(|c| recursive_rebuilder(allowed_nodes, c))
|
||||
.collect();
|
||||
|
||||
build_display_node(new_children, current)
|
||||
build_node(new_children, current)
|
||||
}
|
||||
|
||||
// Applies all allowed nodes as children to current node
|
||||
@@ -153,14 +127,13 @@ fn flat_rebuilder(allowed_nodes: HashMap<&Path, &Node>, current: &Node) -> Displ
|
||||
children: vec![],
|
||||
})
|
||||
.collect::<Vec<DisplayNode>>();
|
||||
build_display_node(new_children, current)
|
||||
build_node(new_children, current)
|
||||
}
|
||||
|
||||
fn build_display_node(mut new_children: Vec<DisplayNode>, current: &Node) -> DisplayNode {
|
||||
fn build_node(mut new_children: Vec<DisplayNode>, current: &Node) -> DisplayNode {
|
||||
new_children.sort_by(|lhs, rhs| lhs.cmp(rhs).reverse());
|
||||
// println!("{:?}", current.name);
|
||||
DisplayNode {
|
||||
name: PathBuf::from(current.name.display().to_string()),
|
||||
name: current.name.clone(),
|
||||
size: current.size,
|
||||
children: new_children,
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use crate::display_node::DisplayNode;
|
||||
use crate::node::FileTime;
|
||||
use crate::node::Node;
|
||||
use std::collections::HashMap;
|
||||
use std::ffi::OsStr;
|
||||
@@ -11,11 +10,7 @@ struct ExtensionNode<'a> {
|
||||
extension: Option<&'a OsStr>,
|
||||
}
|
||||
|
||||
pub fn get_all_file_types(
|
||||
top_level_nodes: &[Node],
|
||||
n: usize,
|
||||
by_filetime: &Option<FileTime>,
|
||||
) -> Option<DisplayNode> {
|
||||
pub fn get_all_file_types(top_level_nodes: &[Node], n: usize) -> Option<DisplayNode> {
|
||||
let ext_nodes = {
|
||||
let mut extension_cumulative_sizes = HashMap::new();
|
||||
build_by_all_file_types(top_level_nodes, &mut extension_cumulative_sizes);
|
||||
@@ -49,27 +44,16 @@ pub fn get_all_file_types(
|
||||
|
||||
// ...then, aggregate the remaining nodes (if any) into a single "(others)" node
|
||||
if ext_nodes_iter.len() > 0 {
|
||||
let actual_size = if by_filetime.is_some() {
|
||||
ext_nodes_iter.map(|node| node.size).max().unwrap_or(0)
|
||||
} else {
|
||||
ext_nodes_iter.map(|node| node.size).sum()
|
||||
};
|
||||
displayed.push(DisplayNode {
|
||||
name: PathBuf::from("(others)"),
|
||||
size: actual_size,
|
||||
size: ext_nodes_iter.map(|node| node.size).sum(),
|
||||
children: vec![],
|
||||
});
|
||||
}
|
||||
|
||||
let actual_size: u64 = if by_filetime.is_some() {
|
||||
displayed.iter().map(|node| node.size).max().unwrap_or(0)
|
||||
} else {
|
||||
displayed.iter().map(|node| node.size).sum()
|
||||
};
|
||||
|
||||
let result = DisplayNode {
|
||||
name: PathBuf::from("(total)"),
|
||||
size: actual_size,
|
||||
size: displayed.iter().map(|node| node.size).sum(),
|
||||
children: displayed,
|
||||
};
|
||||
|
||||
|
||||
379
src/main.rs
379
src/main.rs
@@ -11,31 +11,23 @@ mod progress;
|
||||
mod utils;
|
||||
|
||||
use crate::cli::build_cli;
|
||||
use crate::progress::RuntimeErrors;
|
||||
use clap::parser::ValuesRef;
|
||||
use dir_walker::WalkData;
|
||||
use display::InitialDisplayData;
|
||||
use filter::AggregateData;
|
||||
use progress::PIndicator;
|
||||
use regex::Error;
|
||||
use progress::ORDERING;
|
||||
use std::collections::HashSet;
|
||||
use std::env;
|
||||
use std::fs::read_to_string;
|
||||
use std::io;
|
||||
use std::panic;
|
||||
use std::process;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::atomic::Ordering;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
use sysinfo::{System, SystemExt};
|
||||
|
||||
use self::display::draw_it;
|
||||
use clap::Values;
|
||||
use config::get_config;
|
||||
use dir_walker::walk_it;
|
||||
use display_node::OUTPUT_TYPE;
|
||||
use filter::get_biggest;
|
||||
use filter_type::get_all_file_types;
|
||||
use rayon::ThreadPoolBuildError;
|
||||
use regex::Regex;
|
||||
use std::cmp::max;
|
||||
use std::path::PathBuf;
|
||||
@@ -46,60 +38,55 @@ use utils::simplify_dir_names;
|
||||
static DEFAULT_NUMBER_OF_LINES: usize = 30;
|
||||
static DEFAULT_TERMINAL_WIDTH: usize = 80;
|
||||
|
||||
fn should_init_color(no_color: bool, force_color: bool) -> bool {
|
||||
if force_color {
|
||||
return true;
|
||||
}
|
||||
if no_color {
|
||||
return false;
|
||||
}
|
||||
// check if NO_COLOR is set
|
||||
// https://no-color.org/
|
||||
if env::var_os("NO_COLOR").is_some() {
|
||||
return false;
|
||||
}
|
||||
if terminal_size().is_none() {
|
||||
// we are not in a terminal, color may not be needed
|
||||
return false;
|
||||
}
|
||||
// we are in a terminal
|
||||
fn init_color(no_color: bool) -> bool {
|
||||
#[cfg(windows)]
|
||||
{
|
||||
// Required for windows 10
|
||||
// Fails to resolve for windows 8 so disable color
|
||||
match ansi_term::enable_ansi_support() {
|
||||
Ok(_) => true,
|
||||
Err(_) => {
|
||||
eprintln!("This version of Windows does not support ANSI colors");
|
||||
false
|
||||
// If no color is already set do not print a warning message
|
||||
if no_color {
|
||||
true
|
||||
} else {
|
||||
// Required for windows 10
|
||||
// Fails to resolve for windows 8 so disable color
|
||||
match ansi_term::enable_ansi_support() {
|
||||
Ok(_) => no_color,
|
||||
Err(_) => {
|
||||
eprintln!(
|
||||
"This version of Windows does not support ANSI colors, setting no_color flag"
|
||||
);
|
||||
true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
#[cfg(not(windows))]
|
||||
{
|
||||
true
|
||||
no_color
|
||||
}
|
||||
}
|
||||
|
||||
fn get_height_of_terminal() -> usize {
|
||||
// Simplify once https://github.com/eminence/terminal-size/pull/41 is
|
||||
// merged
|
||||
terminal_size()
|
||||
// Windows CI runners detect a terminal height of 0
|
||||
.map(|(_, Height(h))| max(h.into(), DEFAULT_NUMBER_OF_LINES))
|
||||
.map(|(_, Height(h))| max(h as usize, DEFAULT_NUMBER_OF_LINES))
|
||||
.unwrap_or(DEFAULT_NUMBER_OF_LINES)
|
||||
- 10
|
||||
}
|
||||
|
||||
fn get_width_of_terminal() -> usize {
|
||||
// Simplify once https://github.com/eminence/terminal-size/pull/41 is
|
||||
// merged
|
||||
terminal_size()
|
||||
.map(|(Width(w), _)| match cfg!(windows) {
|
||||
// Windows CI runners detect a very low terminal width
|
||||
true => max(w.into(), DEFAULT_TERMINAL_WIDTH),
|
||||
false => w.into(),
|
||||
true => max(w as usize, DEFAULT_TERMINAL_WIDTH),
|
||||
false => w as usize,
|
||||
})
|
||||
.unwrap_or(DEFAULT_TERMINAL_WIDTH)
|
||||
}
|
||||
|
||||
fn get_regex_value(maybe_value: Option<ValuesRef<String>>) -> Vec<Regex> {
|
||||
fn get_regex_value(maybe_value: Option<Values>) -> Vec<Regex> {
|
||||
maybe_value
|
||||
.unwrap_or_default()
|
||||
.map(|reg| {
|
||||
@@ -113,300 +100,144 @@ fn get_regex_value(maybe_value: Option<ValuesRef<String>>) -> Vec<Regex> {
|
||||
|
||||
fn main() {
|
||||
let options = build_cli().get_matches();
|
||||
let config = get_config(options.get_one::<String>("config").cloned());
|
||||
let config = get_config();
|
||||
|
||||
let errors = RuntimeErrors::default();
|
||||
let error_listen_for_ctrlc = Arc::new(Mutex::new(errors));
|
||||
let errors_for_rayon = error_listen_for_ctrlc.clone();
|
||||
let errors_final = error_listen_for_ctrlc.clone();
|
||||
let is_in_listing = Arc::new(AtomicBool::new(false));
|
||||
let cloned_is_in_listing = Arc::clone(&is_in_listing);
|
||||
|
||||
ctrlc::set_handler(move || {
|
||||
error_listen_for_ctrlc.lock().unwrap().abort = true;
|
||||
println!("\nAborting");
|
||||
if cloned_is_in_listing.load(Ordering::Relaxed) {
|
||||
process::exit(1);
|
||||
}
|
||||
})
|
||||
.expect("Error setting Ctrl-C handler");
|
||||
|
||||
is_in_listing.store(true, Ordering::Relaxed);
|
||||
let target_dirs = match config.get_files_from(&options) {
|
||||
Some(path) => {
|
||||
if path == "-" {
|
||||
let mut targets_to_add = io::stdin()
|
||||
.lines()
|
||||
.map_while(Result::ok)
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
if targets_to_add.is_empty() {
|
||||
eprintln!("No input provided, defaulting to current directory");
|
||||
targets_to_add.push(".".to_owned());
|
||||
}
|
||||
targets_to_add
|
||||
} else {
|
||||
// read file
|
||||
match read_to_string(path) {
|
||||
Ok(file_content) => file_content.lines().map(|x| x.to_string()).collect(),
|
||||
Err(e) => {
|
||||
eprintln!("Error reading file: {e}");
|
||||
vec![".".to_owned()]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None => match options.get_many::<String>("params") {
|
||||
Some(values) => values.cloned().collect(),
|
||||
None => vec![".".to_owned()],
|
||||
},
|
||||
let target_dirs = match options.values_of("inputs") {
|
||||
Some(values) => values.collect(),
|
||||
None => vec!["."],
|
||||
};
|
||||
is_in_listing.store(false, Ordering::Relaxed);
|
||||
|
||||
let summarize_file_types = options.get_flag("types");
|
||||
let summarize_file_types = options.is_present("types");
|
||||
|
||||
let filter_regexs = get_regex_value(options.get_many("filter"));
|
||||
let invert_filter_regexs = get_regex_value(options.get_many("invert_filter"));
|
||||
let filter_regexs = get_regex_value(options.values_of("filter"));
|
||||
let invert_filter_regexs = get_regex_value(options.values_of("invert_filter"));
|
||||
|
||||
let terminal_width: usize = match options.get_one::<usize>("width") {
|
||||
Some(&val) => val,
|
||||
None => get_width_of_terminal(),
|
||||
};
|
||||
let terminal_width = options
|
||||
.value_of_t("width")
|
||||
.unwrap_or_else(|_| get_width_of_terminal());
|
||||
|
||||
let depth = config.get_depth(&options);
|
||||
|
||||
// If depth is set, then we set the default number_of_lines to be max
|
||||
// instead of screen height
|
||||
|
||||
let number_of_lines = match options.get_one::<usize>("number_of_lines") {
|
||||
Some(&val) => val,
|
||||
None => {
|
||||
if depth != usize::MAX {
|
||||
usize::MAX
|
||||
} else {
|
||||
get_height_of_terminal()
|
||||
}
|
||||
}
|
||||
let default_height = if depth != usize::MAX {
|
||||
usize::MAX
|
||||
} else {
|
||||
get_height_of_terminal()
|
||||
};
|
||||
|
||||
let is_colors = should_init_color(
|
||||
config.get_no_colors(&options),
|
||||
config.get_force_colors(&options),
|
||||
);
|
||||
let number_of_lines = options
|
||||
.value_of("number_of_lines")
|
||||
.and_then(|v| {
|
||||
v.parse()
|
||||
.map_err(|_| eprintln!("Ignoring bad value for number_of_lines"))
|
||||
.ok()
|
||||
})
|
||||
.unwrap_or(default_height);
|
||||
|
||||
let ignore_directories = match options.get_many::<String>("ignore_directory") {
|
||||
Some(values) => values
|
||||
.map(|v| v.as_str())
|
||||
.map(PathBuf::from)
|
||||
.collect::<Vec<PathBuf>>(),
|
||||
None => vec![],
|
||||
};
|
||||
let no_colors = init_color(config.get_no_colors(&options));
|
||||
|
||||
let ignore_from_file_result = match options.get_one::<String>("ignore_all_in_file") {
|
||||
Some(val) => read_to_string(val)
|
||||
.unwrap()
|
||||
.lines()
|
||||
.map(Regex::new)
|
||||
.collect::<Vec<Result<Regex, Error>>>(),
|
||||
None => vec![],
|
||||
};
|
||||
let ignore_from_file = ignore_from_file_result
|
||||
.into_iter()
|
||||
.filter_map(|x| x.ok())
|
||||
.collect::<Vec<Regex>>();
|
||||
let ignore_directories = options
|
||||
.values_of("ignore_directory")
|
||||
.unwrap_or_default()
|
||||
.map(PathBuf::from);
|
||||
|
||||
let invert_filter_regexs = invert_filter_regexs
|
||||
.into_iter()
|
||||
.chain(ignore_from_file)
|
||||
.collect::<Vec<Regex>>();
|
||||
|
||||
let by_filecount = options.get_flag("by_filecount");
|
||||
let by_filetime = config.get_filetime(&options);
|
||||
let limit_filesystem = options.get_flag("limit_filesystem");
|
||||
let follow_links = options.get_flag("dereference_links");
|
||||
let by_filecount = options.is_present("by_filecount");
|
||||
let limit_filesystem = options.is_present("limit_filesystem");
|
||||
let follow_links = options.is_present("dereference_links");
|
||||
|
||||
let simplified_dirs = simplify_dir_names(target_dirs);
|
||||
let allowed_filesystems = limit_filesystem
|
||||
.then(|| get_filesystem_devices(&target_dirs, follow_links))
|
||||
.then(|| get_filesystem_devices(simplified_dirs.iter()))
|
||||
.unwrap_or_default();
|
||||
let simplified_dirs = simplify_dir_names(&target_dirs);
|
||||
|
||||
let ignored_full_path: HashSet<PathBuf> = ignore_directories
|
||||
.into_iter()
|
||||
.flat_map(|x| simplified_dirs.iter().map(move |d| d.join(&x)))
|
||||
.collect();
|
||||
|
||||
let output_format = config.get_output_format(&options);
|
||||
let iso = config.get_iso(&options);
|
||||
|
||||
let ignore_hidden = config.get_ignore_hidden(&options);
|
||||
|
||||
let mut indicator = PIndicator::build_me();
|
||||
if !config.get_disable_progress(&options) {
|
||||
indicator.spawn(output_format.clone())
|
||||
indicator.spawn(iso);
|
||||
}
|
||||
|
||||
let keep_collapsed: HashSet<PathBuf> = match options.get_many::<String>("collapse") {
|
||||
Some(collapse) => {
|
||||
let mut combined_dirs = HashSet::new();
|
||||
for collapse_dir in collapse {
|
||||
for target_dir in target_dirs.iter() {
|
||||
combined_dirs.insert(PathBuf::from(target_dir).join(collapse_dir));
|
||||
}
|
||||
}
|
||||
combined_dirs
|
||||
}
|
||||
None => HashSet::new(),
|
||||
};
|
||||
|
||||
let filter_modified_time = config.get_modified_time_operator(&options);
|
||||
let filter_accessed_time = config.get_accessed_time_operator(&options);
|
||||
let filter_changed_time = config.get_changed_time_operator(&options);
|
||||
|
||||
let walk_data = WalkData {
|
||||
ignore_directories: ignored_full_path,
|
||||
filter_regex: &filter_regexs,
|
||||
invert_filter_regex: &invert_filter_regexs,
|
||||
allowed_filesystems,
|
||||
filter_modified_time,
|
||||
filter_accessed_time,
|
||||
filter_changed_time,
|
||||
use_apparent_size: config.get_apparent_size(&options),
|
||||
by_filecount,
|
||||
by_filetime: &by_filetime,
|
||||
ignore_hidden,
|
||||
follow_links,
|
||||
progress_data: indicator.data.clone(),
|
||||
errors: errors_for_rayon,
|
||||
};
|
||||
let threads_to_use = config.get_threads(&options);
|
||||
let stack_size = config.get_custom_stack_size(&options);
|
||||
init_rayon(&stack_size, &threads_to_use);
|
||||
|
||||
let top_level_nodes = walk_it(simplified_dirs, &walk_data);
|
||||
let result = panic::catch_unwind(|| init_rayon);
|
||||
if result.is_err() {
|
||||
eprintln!("Problem initializing rayon, try: export RAYON_NUM_THREADS=1")
|
||||
}
|
||||
|
||||
let top_level_nodes = walk_it(simplified_dirs, walk_data);
|
||||
|
||||
let tree = match summarize_file_types {
|
||||
true => get_all_file_types(&top_level_nodes, number_of_lines, &by_filetime),
|
||||
true => get_all_file_types(&top_level_nodes, number_of_lines),
|
||||
false => {
|
||||
let agg_data = AggregateData {
|
||||
min_size: config.get_min_size(&options),
|
||||
min_size: config.get_min_size(&options, iso),
|
||||
only_dir: config.get_only_dir(&options),
|
||||
only_file: config.get_only_file(&options),
|
||||
number_of_lines,
|
||||
depth,
|
||||
using_a_filter: !filter_regexs.is_empty() || !invert_filter_regexs.is_empty(),
|
||||
using_a_filter: options.values_of("filter").is_some()
|
||||
|| options.value_of("invert_filter").is_some(),
|
||||
};
|
||||
get_biggest(top_level_nodes, agg_data, &by_filetime, keep_collapsed)
|
||||
get_biggest(top_level_nodes, agg_data)
|
||||
}
|
||||
};
|
||||
|
||||
// Must have stopped indicator before we print to stderr
|
||||
let failed_permissions = indicator.data.no_permissions.load(ORDERING);
|
||||
indicator.stop();
|
||||
|
||||
if errors_final.lock().unwrap().abort {
|
||||
return;
|
||||
}
|
||||
|
||||
let final_errors = walk_data.errors.lock().unwrap();
|
||||
if !final_errors.file_not_found.is_empty() {
|
||||
let err = final_errors
|
||||
.file_not_found
|
||||
.iter()
|
||||
.map(|a| a.as_ref())
|
||||
.collect::<Vec<&str>>()
|
||||
.join(", ");
|
||||
eprintln!("No such file or directory: {}", err);
|
||||
}
|
||||
if !final_errors.no_permissions.is_empty() {
|
||||
if config.get_print_errors(&options) {
|
||||
let err = final_errors
|
||||
.no_permissions
|
||||
.iter()
|
||||
.map(|a| a.as_ref())
|
||||
.collect::<Vec<&str>>()
|
||||
.join(", ");
|
||||
eprintln!("Did not have permissions for directories: {}", err);
|
||||
} else {
|
||||
eprintln!(
|
||||
"Did not have permissions for all directories (add --print-errors to see errors)"
|
||||
);
|
||||
}
|
||||
}
|
||||
if !final_errors.unknown_error.is_empty() {
|
||||
let err = final_errors
|
||||
.unknown_error
|
||||
.iter()
|
||||
.map(|a| a.as_ref())
|
||||
.collect::<Vec<&str>>()
|
||||
.join(", ");
|
||||
eprintln!("Unknown Error: {}", err);
|
||||
// Must have stopped indicator before we print to stderr
|
||||
if failed_permissions {
|
||||
eprintln!("Did not have permissions for all directories");
|
||||
}
|
||||
|
||||
if let Some(root_node) = tree {
|
||||
if config.get_output_json(&options) {
|
||||
OUTPUT_TYPE.with(|wrapped| {
|
||||
wrapped.replace(output_format);
|
||||
});
|
||||
println!("{}", serde_json::to_string(&root_node).unwrap());
|
||||
} else {
|
||||
let idd = InitialDisplayData {
|
||||
short_paths: !config.get_full_paths(&options),
|
||||
is_reversed: !config.get_reverse(&options),
|
||||
colors_on: is_colors,
|
||||
by_filecount,
|
||||
by_filetime,
|
||||
is_screen_reader: config.get_screen_reader(&options),
|
||||
output_format,
|
||||
bars_on_right: config.get_bars_on_right(&options),
|
||||
};
|
||||
|
||||
draw_it(
|
||||
idd,
|
||||
config.get_no_bars(&options),
|
||||
terminal_width,
|
||||
&root_node,
|
||||
config.get_skip_total(&options),
|
||||
)
|
||||
}
|
||||
let idd = InitialDisplayData {
|
||||
short_paths: !config.get_full_paths(&options),
|
||||
is_reversed: !config.get_reverse(&options),
|
||||
colors_on: !no_colors,
|
||||
by_filecount,
|
||||
iso,
|
||||
is_screen_reader: config.get_screen_reader(&options),
|
||||
bars_on_right: config.get_bars_on_right(&options),
|
||||
};
|
||||
draw_it(
|
||||
idd,
|
||||
config.get_no_bars(&options),
|
||||
terminal_width,
|
||||
&root_node,
|
||||
config.get_skip_total(&options),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn init_rayon(stack_size: &Option<usize>, threads: &Option<usize>) {
|
||||
// Rayon seems to raise this error on 32-bit builds
|
||||
// The global thread pool has not been initialized.: ThreadPoolBuildError { kind: GlobalPoolAlreadyInitialized }
|
||||
if cfg!(target_pointer_width = "64") {
|
||||
let result = panic::catch_unwind(|| build_thread_pool(*stack_size, *threads));
|
||||
if result.is_err() {
|
||||
eprintln!("Problem initializing rayon, try: export RAYON_NUM_THREADS=1")
|
||||
}
|
||||
fn init_rayon() -> Result<(), ThreadPoolBuildError> {
|
||||
let large_stack = usize::pow(1024, 3);
|
||||
let mut s = System::new();
|
||||
s.refresh_memory();
|
||||
let available = s.available_memory();
|
||||
|
||||
if available > large_stack.try_into().unwrap() {
|
||||
// Larger stack size to handle cases with lots of nested directories
|
||||
rayon::ThreadPoolBuilder::new()
|
||||
.stack_size(large_stack)
|
||||
.build_global()
|
||||
} else {
|
||||
rayon::ThreadPoolBuilder::new().build_global()
|
||||
}
|
||||
}
|
||||
|
||||
fn build_thread_pool(
|
||||
stack: Option<usize>,
|
||||
threads: Option<usize>,
|
||||
) -> Result<(), rayon::ThreadPoolBuildError> {
|
||||
let mut pool = rayon::ThreadPoolBuilder::new();
|
||||
|
||||
if let Some(thread_count) = threads {
|
||||
pool = pool.num_threads(thread_count);
|
||||
}
|
||||
|
||||
let stack_size = match stack {
|
||||
Some(s) => Some(s),
|
||||
None => {
|
||||
let large_stack = usize::pow(1024, 3);
|
||||
let mut s = System::new();
|
||||
s.refresh_memory();
|
||||
// Larger stack size if possible to handle cases with lots of nested directories
|
||||
let available = s.available_memory();
|
||||
if available > large_stack.try_into().unwrap() {
|
||||
Some(large_stack)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
};
|
||||
if let Some(stack_size_param) = stack_size {
|
||||
pool = pool.stack_size(stack_size_param);
|
||||
}
|
||||
pool.build_global()
|
||||
}
|
||||
|
||||
54
src/node.rs
54
src/node.rs
@@ -1,9 +1,8 @@
|
||||
use crate::dir_walker::WalkData;
|
||||
use crate::platform::get_metadata;
|
||||
use crate::utils::is_filtered_out_due_to_file_time;
|
||||
use crate::utils::is_filtered_out_due_to_invert_regex;
|
||||
use crate::utils::is_filtered_out_due_to_regex;
|
||||
|
||||
use regex::Regex;
|
||||
use std::cmp::Ordering;
|
||||
use std::path::PathBuf;
|
||||
|
||||
@@ -16,56 +15,33 @@ pub struct Node {
|
||||
pub depth: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum FileTime {
|
||||
Modified,
|
||||
Accessed,
|
||||
Changed,
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn build_node(
|
||||
dir: PathBuf,
|
||||
children: Vec<Node>,
|
||||
filter_regex: &[Regex],
|
||||
invert_filter_regex: &[Regex],
|
||||
use_apparent_size: bool,
|
||||
is_symlink: bool,
|
||||
is_file: bool,
|
||||
by_filecount: bool,
|
||||
depth: usize,
|
||||
walk_data: &WalkData,
|
||||
) -> Option<Node> {
|
||||
let use_apparent_size = walk_data.use_apparent_size;
|
||||
let by_filecount = walk_data.by_filecount;
|
||||
let by_filetime = &walk_data.by_filetime;
|
||||
get_metadata(&dir, use_apparent_size).map(|data| {
|
||||
let inode_device = if is_symlink && !use_apparent_size {
|
||||
None
|
||||
} else {
|
||||
data.1
|
||||
};
|
||||
|
||||
get_metadata(
|
||||
&dir,
|
||||
use_apparent_size,
|
||||
walk_data.follow_links && is_symlink,
|
||||
)
|
||||
.map(|data| {
|
||||
let inode_device = data.1;
|
||||
|
||||
let size = if is_filtered_out_due_to_regex(walk_data.filter_regex, &dir)
|
||||
|| is_filtered_out_due_to_invert_regex(walk_data.invert_filter_regex, &dir)
|
||||
let size = if is_filtered_out_due_to_regex(filter_regex, &dir)
|
||||
|| is_filtered_out_due_to_invert_regex(invert_filter_regex, &dir)
|
||||
|| (is_symlink && !use_apparent_size)
|
||||
|| by_filecount && !is_file
|
||||
|| [
|
||||
(&walk_data.filter_modified_time, data.2 .0),
|
||||
(&walk_data.filter_accessed_time, data.2 .1),
|
||||
(&walk_data.filter_changed_time, data.2 .2),
|
||||
]
|
||||
.iter()
|
||||
.any(|(filter_time, actual_time)| {
|
||||
is_filtered_out_due_to_file_time(filter_time, *actual_time)
|
||||
}) {
|
||||
{
|
||||
0
|
||||
} else if by_filecount {
|
||||
1
|
||||
} else if by_filetime.is_some() {
|
||||
match by_filetime {
|
||||
Some(FileTime::Modified) => data.2 .0.unsigned_abs(),
|
||||
Some(FileTime::Accessed) => data.2 .1.unsigned_abs(),
|
||||
Some(FileTime::Changed) => data.2 .2.unsigned_abs(),
|
||||
None => unreachable!(),
|
||||
}
|
||||
} else {
|
||||
data.0
|
||||
};
|
||||
|
||||
110
src/platform.rs
110
src/platform.rs
@@ -10,35 +10,15 @@ fn get_block_size() -> u64 {
|
||||
512
|
||||
}
|
||||
|
||||
type InodeAndDevice = (u64, u64);
|
||||
type FileTime = (i64, i64, i64);
|
||||
|
||||
#[cfg(target_family = "unix")]
|
||||
pub fn get_metadata<P: AsRef<Path>>(
|
||||
path: P,
|
||||
use_apparent_size: bool,
|
||||
follow_links: bool,
|
||||
) -> Option<(u64, Option<InodeAndDevice>, FileTime)> {
|
||||
pub fn get_metadata(d: &Path, use_apparent_size: bool) -> Option<(u64, Option<(u64, u64)>)> {
|
||||
use std::os::unix::fs::MetadataExt;
|
||||
let metadata = if follow_links {
|
||||
path.as_ref().metadata()
|
||||
} else {
|
||||
path.as_ref().symlink_metadata()
|
||||
};
|
||||
match metadata {
|
||||
match d.metadata() {
|
||||
Ok(md) => {
|
||||
if use_apparent_size {
|
||||
Some((
|
||||
md.len(),
|
||||
Some((md.ino(), md.dev())),
|
||||
(md.mtime(), md.atime(), md.ctime()),
|
||||
))
|
||||
Some((md.len(), Some((md.ino(), md.dev()))))
|
||||
} else {
|
||||
Some((
|
||||
md.blocks() * get_block_size(),
|
||||
Some((md.ino(), md.dev())),
|
||||
(md.mtime(), md.atime(), md.ctime()),
|
||||
))
|
||||
Some((md.blocks() * get_block_size(), Some((md.ino(), md.dev()))))
|
||||
}
|
||||
}
|
||||
Err(_e) => None,
|
||||
@@ -46,11 +26,7 @@ pub fn get_metadata<P: AsRef<Path>>(
|
||||
}
|
||||
|
||||
#[cfg(target_family = "windows")]
|
||||
pub fn get_metadata<P: AsRef<Path>>(
|
||||
path: P,
|
||||
use_apparent_size: bool,
|
||||
follow_links: bool,
|
||||
) -> Option<(u64, Option<InodeAndDevice>, FileTime)> {
|
||||
pub fn get_metadata(d: &Path, _use_apparent_size: bool) -> Option<(u64, Option<(u64, u64)>)> {
|
||||
// On windows opening the file to get size, file ID and volume can be very
|
||||
// expensive because 1) it causes a few system calls, and more importantly 2) it can cause
|
||||
// windows defender to scan the file.
|
||||
@@ -89,7 +65,7 @@ pub fn get_metadata<P: AsRef<Path>>(
|
||||
|
||||
use std::io;
|
||||
use winapi_util::Handle;
|
||||
fn handle_from_path_limited(path: &Path) -> io::Result<Handle> {
|
||||
fn handle_from_path_limited<P: AsRef<Path>>(path: P) -> io::Result<Handle> {
|
||||
use std::fs::OpenOptions;
|
||||
use std::os::windows::fs::OpenOptionsExt;
|
||||
const FILE_READ_ATTRIBUTES: u32 = 0x0080;
|
||||
@@ -114,47 +90,20 @@ pub fn get_metadata<P: AsRef<Path>>(
|
||||
Ok(Handle::from_file(file))
|
||||
}
|
||||
|
||||
fn get_metadata_expensive(
|
||||
path: &Path,
|
||||
use_apparent_size: bool,
|
||||
) -> Option<(u64, Option<InodeAndDevice>, FileTime)> {
|
||||
fn get_metadata_expensive(d: &Path) -> Option<(u64, Option<(u64, u64)>)> {
|
||||
use winapi_util::file::information;
|
||||
|
||||
let h = handle_from_path_limited(path).ok()?;
|
||||
let h = handle_from_path_limited(d).ok()?;
|
||||
let info = information(&h).ok()?;
|
||||
|
||||
if use_apparent_size {
|
||||
use filesize::PathExt;
|
||||
Some((
|
||||
path.size_on_disk().ok()?,
|
||||
Some((info.file_index(), info.volume_serial_number())),
|
||||
(
|
||||
info.last_write_time().unwrap() as i64,
|
||||
info.last_access_time().unwrap() as i64,
|
||||
info.creation_time().unwrap() as i64,
|
||||
),
|
||||
))
|
||||
} else {
|
||||
Some((
|
||||
info.file_size(),
|
||||
Some((info.file_index(), info.volume_serial_number())),
|
||||
(
|
||||
info.last_write_time().unwrap() as i64,
|
||||
info.last_access_time().unwrap() as i64,
|
||||
info.creation_time().unwrap() as i64,
|
||||
),
|
||||
))
|
||||
}
|
||||
Some((
|
||||
info.file_size(),
|
||||
Some((info.file_index(), info.volume_serial_number())),
|
||||
))
|
||||
}
|
||||
|
||||
use std::os::windows::fs::MetadataExt;
|
||||
let path = path.as_ref();
|
||||
let metadata = if follow_links {
|
||||
path.metadata()
|
||||
} else {
|
||||
path.symlink_metadata()
|
||||
};
|
||||
match metadata {
|
||||
match d.metadata() {
|
||||
Ok(ref md) => {
|
||||
const FILE_ATTRIBUTE_ARCHIVE: u32 = 0x20;
|
||||
const FILE_ATTRIBUTE_READONLY: u32 = 0x01;
|
||||
@@ -162,39 +111,18 @@ pub fn get_metadata<P: AsRef<Path>>(
|
||||
const FILE_ATTRIBUTE_SYSTEM: u32 = 0x04;
|
||||
const FILE_ATTRIBUTE_NORMAL: u32 = 0x80;
|
||||
const FILE_ATTRIBUTE_DIRECTORY: u32 = 0x10;
|
||||
const FILE_ATTRIBUTE_SPARSE_FILE: u32 = 0x00000200;
|
||||
const FILE_ATTRIBUTE_PINNED: u32 = 0x00080000;
|
||||
const FILE_ATTRIBUTE_UNPINNED: u32 = 0x00100000;
|
||||
const FILE_ATTRIBUTE_RECALL_ON_OPEN: u32 = 0x00040000;
|
||||
const FILE_ATTRIBUTE_RECALL_ON_DATA_ACCESS: u32 = 0x00400000;
|
||||
const FILE_ATTRIBUTE_OFFLINE: u32 = 0x00001000;
|
||||
// normally FILE_ATTRIBUTE_SPARSE_FILE would be enough, however Windows sometimes likes to mask it out. see: https://stackoverflow.com/q/54560454
|
||||
const IS_PROBABLY_ONEDRIVE: u32 = FILE_ATTRIBUTE_SPARSE_FILE
|
||||
| FILE_ATTRIBUTE_PINNED
|
||||
| FILE_ATTRIBUTE_UNPINNED
|
||||
| FILE_ATTRIBUTE_RECALL_ON_OPEN
|
||||
| FILE_ATTRIBUTE_RECALL_ON_DATA_ACCESS
|
||||
| FILE_ATTRIBUTE_OFFLINE;
|
||||
|
||||
let attr_filtered = md.file_attributes()
|
||||
& !(FILE_ATTRIBUTE_HIDDEN | FILE_ATTRIBUTE_READONLY | FILE_ATTRIBUTE_SYSTEM);
|
||||
if ((attr_filtered & FILE_ATTRIBUTE_ARCHIVE) != 0
|
||||
if (attr_filtered & FILE_ATTRIBUTE_ARCHIVE) != 0
|
||||
|| (attr_filtered & FILE_ATTRIBUTE_DIRECTORY) != 0
|
||||
|| md.file_attributes() == FILE_ATTRIBUTE_NORMAL)
|
||||
&& !((attr_filtered & IS_PROBABLY_ONEDRIVE != 0) && use_apparent_size)
|
||||
|| md.file_attributes() == FILE_ATTRIBUTE_NORMAL
|
||||
{
|
||||
Some((
|
||||
md.len(),
|
||||
None,
|
||||
(
|
||||
md.last_write_time() as i64,
|
||||
md.last_access_time() as i64,
|
||||
md.creation_time() as i64,
|
||||
),
|
||||
))
|
||||
Some((md.len(), None))
|
||||
} else {
|
||||
get_metadata_expensive(path, use_apparent_size)
|
||||
get_metadata_expensive(d)
|
||||
}
|
||||
}
|
||||
_ => get_metadata_expensive(path, use_apparent_size),
|
||||
_ => get_metadata_expensive(d),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
use std::{
|
||||
collections::HashSet,
|
||||
io::Write,
|
||||
path::Path,
|
||||
sync::{
|
||||
atomic::{AtomicU8, AtomicUsize, Ordering},
|
||||
atomic::{AtomicBool, AtomicU64, AtomicU8, AtomicUsize, Ordering},
|
||||
mpsc::{self, RecvTimeoutError, Sender},
|
||||
Arc, RwLock,
|
||||
},
|
||||
@@ -11,11 +10,6 @@ use std::{
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
#[cfg(not(target_has_atomic = "64"))]
|
||||
use portable_atomic::AtomicU64;
|
||||
#[cfg(target_has_atomic = "64")]
|
||||
use std::sync::atomic::AtomicU64;
|
||||
|
||||
use crate::display::human_readable_number;
|
||||
|
||||
/* -------------------------------------------------------------------------- */
|
||||
@@ -61,6 +55,7 @@ pub struct PAtomicInfo {
|
||||
pub total_file_size: AtomicU64,
|
||||
pub state: AtomicU8,
|
||||
pub current_path: ThreadStringWrapper,
|
||||
pub no_permissions: AtomicBool,
|
||||
}
|
||||
|
||||
impl PAtomicInfo {
|
||||
@@ -73,27 +68,18 @@ impl PAtomicInfo {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct RuntimeErrors {
|
||||
pub no_permissions: HashSet<String>,
|
||||
pub file_not_found: HashSet<String>,
|
||||
pub unknown_error: HashSet<String>,
|
||||
pub interrupted_error: i32,
|
||||
pub abort: bool,
|
||||
}
|
||||
|
||||
/* -------------------------------------------------------------------------- */
|
||||
|
||||
fn format_preparing_str(prog_char: char, data: &PAtomicInfo, output_display: &str) -> String {
|
||||
fn format_preparing_str(prog_char: char, data: &PAtomicInfo, is_iso: bool) -> String {
|
||||
let path_in = data.current_path.get();
|
||||
let size = human_readable_number(data.total_file_size.load(ORDERING), output_display);
|
||||
let size = human_readable_number(data.total_file_size.load(ORDERING), is_iso);
|
||||
format!("Preparing: {path_in} {size} ... {prog_char}")
|
||||
}
|
||||
|
||||
fn format_indexing_str(prog_char: char, data: &PAtomicInfo, output_display: &str) -> String {
|
||||
fn format_indexing_str(prog_char: char, data: &PAtomicInfo, is_iso: bool) -> String {
|
||||
let path_in = data.current_path.get();
|
||||
let file_count = data.num_files.load(ORDERING);
|
||||
let size = human_readable_number(data.total_file_size.load(ORDERING), output_display);
|
||||
let size = human_readable_number(data.total_file_size.load(ORDERING), is_iso);
|
||||
let file_str = format!("{file_count} files, {size}");
|
||||
format!("Indexing: {path_in} {file_str} ... {prog_char}")
|
||||
}
|
||||
@@ -113,7 +99,7 @@ impl PIndicator {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn spawn(&mut self, output_display: String) {
|
||||
pub fn spawn(&mut self, is_iso: bool) {
|
||||
let data = self.data.clone();
|
||||
let (stop_handler, receiver) = mpsc::channel::<()>();
|
||||
|
||||
@@ -132,8 +118,8 @@ impl PIndicator {
|
||||
let prog_char = PROGRESS_CHARS[progress_char_i];
|
||||
|
||||
msg = match data.state.load(ORDERING) {
|
||||
Operation::INDEXING => format_indexing_str(prog_char, &data, &output_display),
|
||||
Operation::PREPARING => format_preparing_str(prog_char, &data, &output_display),
|
||||
Operation::INDEXING => format_indexing_str(prog_char, &data, is_iso),
|
||||
Operation::PREPARING => format_preparing_str(prog_char, &data, is_iso),
|
||||
_ => panic!("Unknown State"),
|
||||
};
|
||||
|
||||
|
||||
59
src/utils.rs
59
src/utils.rs
@@ -2,16 +2,13 @@ use platform::get_metadata;
|
||||
use std::collections::HashSet;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use crate::config::DAY_SECONDS;
|
||||
|
||||
use crate::dir_walker::Operator;
|
||||
use crate::platform;
|
||||
use regex::Regex;
|
||||
|
||||
pub fn simplify_dir_names<P: AsRef<Path>>(dirs: &[P]) -> HashSet<PathBuf> {
|
||||
let mut top_level_names: HashSet<PathBuf> = HashSet::with_capacity(dirs.len());
|
||||
pub fn simplify_dir_names<P: AsRef<Path>>(filenames: Vec<P>) -> HashSet<PathBuf> {
|
||||
let mut top_level_names: HashSet<PathBuf> = HashSet::with_capacity(filenames.len());
|
||||
|
||||
for t in dirs {
|
||||
for t in filenames {
|
||||
let top_level_name = normalize_path(t);
|
||||
let mut can_add = true;
|
||||
let mut to_remove: Vec<PathBuf> = Vec::new();
|
||||
@@ -34,25 +31,13 @@ pub fn simplify_dir_names<P: AsRef<Path>>(dirs: &[P]) -> HashSet<PathBuf> {
|
||||
top_level_names
|
||||
}
|
||||
|
||||
pub fn get_filesystem_devices<P: AsRef<Path>>(paths: &[P], follow_links: bool) -> HashSet<u64> {
|
||||
use std::fs;
|
||||
pub fn get_filesystem_devices<'a, P: IntoIterator<Item = &'a PathBuf>>(paths: P) -> HashSet<u64> {
|
||||
// Gets the device ids for the filesystems which are used by the argument paths
|
||||
paths
|
||||
.iter()
|
||||
.filter_map(|p| {
|
||||
let follow_links = if follow_links {
|
||||
// slow path: If dereference-links is set, then we check if the file is a symbolic link
|
||||
match fs::symlink_metadata(p) {
|
||||
Ok(metadata) => metadata.file_type().is_symlink(),
|
||||
Err(_) => false,
|
||||
}
|
||||
} else {
|
||||
false
|
||||
};
|
||||
match get_metadata(p, false, follow_links) {
|
||||
Some((_size, Some((_id, dev)), _time)) => Some(dev),
|
||||
_ => None,
|
||||
}
|
||||
.into_iter()
|
||||
.filter_map(|p| match get_metadata(p, false) {
|
||||
Some((_size, Some((_id, dev)))) => Some(dev),
|
||||
_ => None,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
@@ -77,20 +62,6 @@ pub fn is_filtered_out_due_to_regex(filter_regex: &[Regex], dir: &Path) -> bool
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_filtered_out_due_to_file_time(
|
||||
filter_time: &Option<(Operator, i64)>,
|
||||
actual_time: i64,
|
||||
) -> bool {
|
||||
match filter_time {
|
||||
None => false,
|
||||
Some((Operator::Equal, bound_time)) => {
|
||||
!(actual_time >= *bound_time && actual_time < *bound_time + DAY_SECONDS)
|
||||
}
|
||||
Some((Operator::GreaterThan, bound_time)) => actual_time < *bound_time,
|
||||
Some((Operator::LessThan, bound_time)) => actual_time > *bound_time,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_filtered_out_due_to_invert_regex(filter_regex: &[Regex], dir: &Path) -> bool {
|
||||
filter_regex
|
||||
.iter()
|
||||
@@ -111,15 +82,15 @@ mod tests {
|
||||
fn test_simplify_dir() {
|
||||
let mut correct = HashSet::new();
|
||||
correct.insert(PathBuf::from("a"));
|
||||
assert_eq!(simplify_dir_names(&["a"]), correct);
|
||||
assert_eq!(simplify_dir_names(vec!["a"]), correct);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_simplify_dir_rm_subdir() {
|
||||
let mut correct = HashSet::new();
|
||||
correct.insert(["a", "b"].iter().collect::<PathBuf>());
|
||||
assert_eq!(simplify_dir_names(&["a/b/c", "a/b", "a/b/d/f"]), correct);
|
||||
assert_eq!(simplify_dir_names(&["a/b", "a/b/c", "a/b/d/f"]), correct);
|
||||
assert_eq!(simplify_dir_names(vec!["a/b/c", "a/b", "a/b/d/f"]), correct);
|
||||
assert_eq!(simplify_dir_names(vec!["a/b", "a/b/c", "a/b/d/f"]), correct);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -128,7 +99,7 @@ mod tests {
|
||||
correct.insert(["a", "b"].iter().collect::<PathBuf>());
|
||||
correct.insert(PathBuf::from("c"));
|
||||
assert_eq!(
|
||||
simplify_dir_names(&[
|
||||
simplify_dir_names(vec![
|
||||
"a/b",
|
||||
"a/b//",
|
||||
"a/././b///",
|
||||
@@ -147,14 +118,14 @@ mod tests {
|
||||
correct.insert(PathBuf::from("b"));
|
||||
correct.insert(["c", "a", "b"].iter().collect::<PathBuf>());
|
||||
correct.insert(["a", "b"].iter().collect::<PathBuf>());
|
||||
assert_eq!(simplify_dir_names(&["a/b", "c/a/b/", "b"]), correct);
|
||||
assert_eq!(simplify_dir_names(vec!["a/b", "c/a/b/", "b"]), correct);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_simplify_dir_dots() {
|
||||
let mut correct = HashSet::new();
|
||||
correct.insert(PathBuf::from("src"));
|
||||
assert_eq!(simplify_dir_names(&["src/."]), correct);
|
||||
assert_eq!(simplify_dir_names(vec!["src/."]), correct);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -162,7 +133,7 @@ mod tests {
|
||||
let mut correct = HashSet::new();
|
||||
correct.insert(PathBuf::from("src"));
|
||||
correct.insert(PathBuf::from("src_v2"));
|
||||
assert_eq!(simplify_dir_names(&["src/", "src_v2"]), correct);
|
||||
assert_eq!(simplify_dir_names(vec!["src/", "src_v2"]), correct);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -1,2 +1 @@
|
||||
something
|
||||
.secret
|
||||
hi
|
||||
@@ -1,11 +1,9 @@
|
||||
use assert_cmd::Command;
|
||||
use std::ffi::OsStr;
|
||||
use std::process::Output;
|
||||
use std::str;
|
||||
use std::sync::Once;
|
||||
use std::{io, str};
|
||||
|
||||
static INIT: Once = Once::new();
|
||||
static UNREADABLE_DIR_PATH: &str = "/tmp/unreadable_dir";
|
||||
|
||||
/**
|
||||
* This file contains tests that verify the exact output of the command.
|
||||
@@ -35,59 +33,26 @@ fn copy_test_data(dir: &str) {
|
||||
.map_err(|err| eprintln!("Error copying directory for test setup\n{:?}", err));
|
||||
}
|
||||
|
||||
fn create_unreadable_directory() -> io::Result<()> {
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::fs;
|
||||
use std::fs::Permissions;
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
fs::create_dir_all(UNREADABLE_DIR_PATH)?;
|
||||
fs::set_permissions(UNREADABLE_DIR_PATH, Permissions::from_mode(0))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn initialize() {
|
||||
INIT.call_once(|| {
|
||||
copy_test_data("tests/test_dir");
|
||||
copy_test_data("tests/test_dir2");
|
||||
copy_test_data("tests/test_dir_unicode");
|
||||
|
||||
if let Err(e) = create_unreadable_directory() {
|
||||
panic!("Failed to create unreadable directory: {}", e);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn run_cmd<T: AsRef<OsStr>>(command_args: &[T]) -> Output {
|
||||
fn exact_output_test<T: AsRef<OsStr>>(valid_outputs: Vec<String>, command_args: Vec<T>) {
|
||||
initialize();
|
||||
let mut to_run = &mut Command::cargo_bin("dust").unwrap();
|
||||
|
||||
let mut a = &mut Command::cargo_bin("dust").unwrap();
|
||||
|
||||
for p in command_args {
|
||||
to_run = to_run.arg(p);
|
||||
a = a.arg(p);
|
||||
}
|
||||
to_run.unwrap()
|
||||
}
|
||||
|
||||
fn exact_stdout_test<T: AsRef<OsStr>>(command_args: &[T], valid_stdout: Vec<String>) {
|
||||
let to_run = run_cmd(command_args);
|
||||
let output = str::from_utf8(&a.unwrap().stdout).unwrap().to_owned();
|
||||
|
||||
let stdout_output = str::from_utf8(&to_run.stdout).unwrap().to_owned();
|
||||
let will_fail = valid_stdout.iter().any(|i| stdout_output.contains(i));
|
||||
if !will_fail {
|
||||
eprintln!(
|
||||
"output(stdout):\n{}\ndoes not contain any of:\n{}",
|
||||
stdout_output,
|
||||
valid_stdout.join("\n\n")
|
||||
);
|
||||
}
|
||||
assert!(will_fail);
|
||||
}
|
||||
|
||||
fn exact_stderr_test<T: AsRef<OsStr>>(command_args: &[T], valid_stderr: String) {
|
||||
let to_run = run_cmd(command_args);
|
||||
|
||||
let stderr_output = str::from_utf8(&to_run.stderr).unwrap().trim();
|
||||
assert_eq!(stderr_output, valid_stderr);
|
||||
assert!(valid_outputs.iter().any(|i| output.contains(i)));
|
||||
}
|
||||
|
||||
// "windows" result data can vary by host (size seems to be variable by one byte); fix code vs test and re-enable
|
||||
@@ -95,20 +60,20 @@ fn exact_stderr_test<T: AsRef<OsStr>>(command_args: &[T], valid_stderr: String)
|
||||
#[test]
|
||||
pub fn test_main_basic() {
|
||||
// -c is no color mode - This makes testing much simpler
|
||||
exact_stdout_test(&["-c", "-B", "/tmp/test_dir/"], main_output());
|
||||
exact_output_test(main_output(), vec!["-c", "-B", "/tmp/test_dir/"])
|
||||
}
|
||||
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
#[test]
|
||||
pub fn test_main_multi_arg() {
|
||||
let command_args = [
|
||||
let command_args = vec![
|
||||
"-c",
|
||||
"-B",
|
||||
"/tmp/test_dir/many/",
|
||||
"/tmp/test_dir",
|
||||
"/tmp/test_dir",
|
||||
];
|
||||
exact_stdout_test(&command_args, main_output());
|
||||
exact_output_test(main_output(), command_args);
|
||||
}
|
||||
|
||||
fn main_output() -> Vec<String> {
|
||||
@@ -138,8 +103,8 @@ fn main_output() -> Vec<String> {
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
#[test]
|
||||
pub fn test_main_long_paths() {
|
||||
let command_args = ["-c", "-p", "-B", "/tmp/test_dir/"];
|
||||
exact_stdout_test(&command_args, main_output_long_paths());
|
||||
let command_args = vec!["-c", "-p", "-B", "/tmp/test_dir/"];
|
||||
exact_output_test(main_output_long_paths(), command_args);
|
||||
}
|
||||
|
||||
fn main_output_long_paths() -> Vec<String> {
|
||||
@@ -166,8 +131,8 @@ fn main_output_long_paths() -> Vec<String> {
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
#[test]
|
||||
pub fn test_substring_of_names_and_long_names() {
|
||||
let command_args = ["-c", "-B", "/tmp/test_dir2"];
|
||||
exact_stdout_test(&command_args, no_substring_of_names_output());
|
||||
let command_args = vec!["-c", "-B", "/tmp/test_dir2"];
|
||||
exact_output_test(no_substring_of_names_output(), command_args);
|
||||
}
|
||||
|
||||
fn no_substring_of_names_output() -> Vec<String> {
|
||||
@@ -200,8 +165,8 @@ fn no_substring_of_names_output() -> Vec<String> {
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
#[test]
|
||||
pub fn test_unicode_directories() {
|
||||
let command_args = ["-c", "-B", "/tmp/test_dir_unicode"];
|
||||
exact_stdout_test(&command_args, unicode_dir());
|
||||
let command_args = vec!["-c", "-B", "/tmp/test_dir_unicode"];
|
||||
exact_output_test(unicode_dir(), command_args);
|
||||
}
|
||||
|
||||
fn unicode_dir() -> Vec<String> {
|
||||
@@ -227,48 +192,18 @@ fn unicode_dir() -> Vec<String> {
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
#[test]
|
||||
pub fn test_apparent_size() {
|
||||
let command_args = ["-c", "-s", "-b", "/tmp/test_dir"];
|
||||
exact_stdout_test(&command_args, apparent_size_output());
|
||||
let command_args = vec!["-c", "-s", "-b", "/tmp/test_dir"];
|
||||
exact_output_test(apparent_size_output(), command_args);
|
||||
}
|
||||
|
||||
fn apparent_size_output() -> Vec<String> {
|
||||
// The apparent directory sizes are too unpredictable and system dependent to try and match
|
||||
let one_space_before = r#"
|
||||
0B ┌── a_file
|
||||
6B ├── hello_file
|
||||
"#
|
||||
.trim()
|
||||
.to_string();
|
||||
|
||||
let two_space_before = r#"
|
||||
let files = r#"
|
||||
0B ┌── a_file
|
||||
6B ├── hello_file
|
||||
"#
|
||||
.trim()
|
||||
.to_string();
|
||||
|
||||
vec![one_space_before, two_space_before]
|
||||
}
|
||||
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
#[test]
|
||||
pub fn test_permission_normal() {
|
||||
let command_args = [UNREADABLE_DIR_PATH];
|
||||
let permission_msg =
|
||||
r#"Did not have permissions for all directories (add --print-errors to see errors)"#
|
||||
.trim()
|
||||
.to_string();
|
||||
exact_stderr_test(&command_args, permission_msg);
|
||||
}
|
||||
|
||||
#[cfg_attr(target_os = "windows", ignore)]
|
||||
#[test]
|
||||
pub fn test_permission_flag() {
|
||||
// add the flag to CLI
|
||||
let command_args = ["--print-errors", UNREADABLE_DIR_PATH];
|
||||
let permission_msg = format!(
|
||||
"Did not have permissions for directories: {}",
|
||||
UNREADABLE_DIR_PATH
|
||||
);
|
||||
exact_stderr_test(&command_args, permission_msg);
|
||||
vec![files]
|
||||
}
|
||||
|
||||
@@ -59,18 +59,11 @@ pub fn test_d_flag_works() {
|
||||
assert!(!output.contains("hello_file"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_threads_flag_works() {
|
||||
let output = build_command(vec!["-T", "1", "tests/test_dir/"]);
|
||||
assert!(output.contains("hello_file"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_d_flag_works_and_still_recurses_down() {
|
||||
// We had a bug where running with '-d 1' would stop at the first directory and the code
|
||||
// would fail to recurse down
|
||||
let output = build_command(vec!["-d", "1", "-f", "-c", "tests/test_dir2/"]);
|
||||
assert!(output.contains("1 ┌── dir"));
|
||||
assert!(output.contains("4 ┌─┴ test_dir2"));
|
||||
}
|
||||
|
||||
@@ -80,25 +73,14 @@ pub fn test_ignore_dir() {
|
||||
let output = build_command(vec!["-c", "-X", "dir_substring", "tests/test_dir2/"]);
|
||||
assert!(!output.contains("dir_substring"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_ignore_all_in_file() {
|
||||
let output = build_command(vec![
|
||||
"-c",
|
||||
"-I",
|
||||
"tests/test_dir_hidden_entries/.hidden_file",
|
||||
"tests/test_dir_hidden_entries/",
|
||||
]);
|
||||
assert!(output.contains(" test_dir_hidden_entries"));
|
||||
assert!(!output.contains(".secret"));
|
||||
}
|
||||
// Add test for multiple dirs - with -d 0 and maybe -d 1 check the
|
||||
|
||||
#[test]
|
||||
pub fn test_with_bad_param() {
|
||||
let mut cmd = Command::cargo_bin("dust").unwrap();
|
||||
let result = cmd.arg("bad_place").unwrap();
|
||||
let stderr = str::from_utf8(&result.stderr).unwrap();
|
||||
assert!(stderr.contains("No such file or directory"));
|
||||
assert!(stderr.contains("Did not have permissions for all directories"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -139,9 +121,9 @@ pub fn test_show_files_by_type() {
|
||||
#[cfg(target_family = "unix")]
|
||||
pub fn test_show_files_only() {
|
||||
let output = build_command(vec!["-c", "-F", "tests/test_dir"]);
|
||||
assert!(output.contains("a_file"));
|
||||
assert!(output.contains("hello_file"));
|
||||
assert!(!output.contains("many"));
|
||||
assert!(output.contains("tests/test_dir/many/a_file"));
|
||||
assert!(output.contains("tests/test_dir/many/hello_file"));
|
||||
assert!(!output.contains("tests/test_dir/many "));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -238,26 +220,3 @@ pub fn test_show_files_by_invert_regex_match_multiple() {
|
||||
assert!(!output.contains("test_dir_unicode"));
|
||||
assert!(output.contains("many"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_no_color() {
|
||||
let output = build_command(vec!["-c"]);
|
||||
// Red is 31
|
||||
assert!(!output.contains("\x1B[31m"));
|
||||
assert!(!output.contains("\x1B[0m"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_force_color() {
|
||||
let output = build_command(vec!["-C"]);
|
||||
// Red is 31
|
||||
assert!(output.contains("\x1B[31m"));
|
||||
assert!(output.contains("\x1B[0m"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_collapse() {
|
||||
let output = build_command(vec!["--collapse", "many", "tests/test_dir/"]);
|
||||
assert!(output.contains("many"));
|
||||
assert!(!output.contains("hello_file"));
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user