Compare commits

..

3 Commits

Author SHA1 Message Date
andy.boot
88797b3f66 Fix completions. 2024-02-12 23:09:23 +00:00
andy.boot
13af2ff351 cargo update 2024-02-11 20:18:09 +00:00
wickles
b5e3f4ca2f Fix filename completions for zsh and bash (#331)
Reapply commit accidentally reverted
2024-02-11 20:17:33 +00:00
34 changed files with 1144 additions and 2987 deletions

View File

@@ -45,11 +45,6 @@ jobs:
override: true override: true
profile: minimal # minimal component installation (ie, no documentation) profile: minimal # minimal component installation (ie, no documentation)
components: rustfmt, clippy components: rustfmt, clippy
- name: Install wget for Windows
if: matrix.job.os == 'windows-latest'
run: choco install wget --no-progress
- name: typos-action
uses: crate-ci/typos@v1.28.4
- name: "`fmt` testing" - name: "`fmt` testing"
if: steps.vars.outputs.JOB_DO_FORMAT_TESTING if: steps.vars.outputs.JOB_DO_FORMAT_TESTING
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1

View File

@@ -1,11 +0,0 @@
repos:
- repo: https://github.com/doublify/pre-commit-rust
rev: v1.0
hooks:
- id: cargo-check
stages: [commit]
- id: fmt
stages: [commit]
- id: clippy
args: [--all-targets, --all-features]
stages: [commit]

931
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,9 +1,9 @@
[package] [package]
name = "du-dust" name = "du-dust"
description = "A more intuitive version of du" description = "A more intuitive version of du"
version = "1.2.3" version = "0.9.0"
authors = ["bootandy <bootandy@gmail.com>", "nebkor <code@ardent.nebcorp.com>"] authors = ["bootandy <bootandy@gmail.com>", "nebkor <code@ardent.nebcorp.com>"]
edition = "2024" edition = "2021"
readme = "README.md" readme = "README.md"
documentation = "https://github.com/bootandy/dust" documentation = "https://github.com/bootandy/dust"
@@ -28,34 +28,28 @@ strip = true
[dependencies] [dependencies]
ansi_term = "0.12" ansi_term = "0.12"
clap = { version = "4", features = ["derive"] } clap = "4.4"
lscolors = "0.21" lscolors = "0.13"
terminal_size = "0.4" terminal_size = "0.2"
unicode-width = "0.2" unicode-width = "0.1"
rayon = "1" rayon = "1"
thousands = "0.2" thousands = "0.2"
stfu8 = "0.2" stfu8 = "0.2"
regex = "1" regex = "1"
config-file = "0.2" config-file = "0.2"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" directories = "4"
sysinfo = "0.37" sysinfo = "0.27"
ctrlc = "3"
chrono = "0.4"
[target.'cfg(not(target_has_atomic = "64"))'.dependencies]
portable-atomic = "1.4"
[target.'cfg(windows)'.dependencies] [target.'cfg(windows)'.dependencies]
winapi-util = "0.1" winapi-util = "0.1"
filesize = "0.2.0"
[dev-dependencies] [dev-dependencies]
assert_cmd = "2" assert_cmd = "2"
tempfile = "=3" tempfile = "=3"
[build-dependencies] [build-dependencies]
clap = { version = "4.4", features = ["derive"] } clap = "4.4"
clap_complete = "4.4" clap_complete = "4.4"
clap_mangen = "0.2" clap_mangen = "0.2"
@@ -85,16 +79,6 @@ assets = [
"usr/share/doc/du-dust/README", "usr/share/doc/du-dust/README",
"644", "644",
], ],
[
"man-page/dust.1",
"usr/share/man/man1/dust.1",
"644",
],
[
"completions/dust.bash",
"usr/share/bash-completion/completions/dust",
"644",
],
] ]
extended-description = """\ extended-description = """\
Dust is meant to give you an instant overview of which directories are using Dust is meant to give you an instant overview of which directories are using

View File

@@ -25,30 +25,16 @@ Because I want an easy way to see where my disk is being used.
#### 🍺 Homebrew (Linux) #### 🍺 Homebrew (Linux)
- `brew install dust` - `brew tap tgotwig/linux-dust && brew install dust`
#### [Snap](https://ubuntu.com/core/services/guide/snaps-intro) Ubuntu and [supported systems](https://snapcraft.io/docs/installing-snapd)
- `snap install dust`
Note: `dust` installed through `snap` can only access files stored in the `/home` directory. See daniejstriata/dust-snap#2 for more information.
#### [Pacstall](https://github.com/pacstall/pacstall) (Debian/Ubuntu) #### [Pacstall](https://github.com/pacstall/pacstall) (Debian/Ubuntu)
- `pacstall -I dust-bin` - `pacstall -I dust-bin`
#### Anaconda (conda-forge)
- `conda install -c conda-forge dust`
#### [deb-get](https://github.com/wimpysworld/deb-get) (Debian/Ubuntu) #### [deb-get](https://github.com/wimpysworld/deb-get) (Debian/Ubuntu)
- `deb-get install du-dust` - `deb-get install du-dust`
#### [x-cmd](https://www.x-cmd.com/pkg/#VPContent)
- `x env use dust`
#### Windows: #### Windows:
- `scoop install dust` - `scoop install dust`
@@ -82,15 +68,14 @@ Usage: dust -d 3 (Shows 3 levels of subdirectories)
Usage: dust -D (Show only directories (eg dust -D)) Usage: dust -D (Show only directories (eg dust -D))
Usage: dust -F (Show only files - finds your largest files) Usage: dust -F (Show only files - finds your largest files)
Usage: dust -r (reverse order of output) Usage: dust -r (reverse order of output)
Usage: dust -o si/b/kb/kib/mb/mib/gb/gib (si - prints sizes in powers of 1000. Others print size in that format). Usage: dust -H (si print sizes in powers of 1000 instead of 1024)
Usage: dust -X ignore (ignore all files and directories with the name 'ignore') Usage: dust -X ignore (ignore all files and directories with the name 'ignore')
Usage: dust -x (Only show directories on the same filesystem) Usage: dust -x (Only show directories on the same filesystem)
Usage: dust -b (Do not show percentages or draw ASCII bars) Usage: dust -b (Do not show percentages or draw ASCII bars)
Usage: dust -B (--bars-on-right - Percent bars moved to right side of screen) Usage: dust -B (--bars-on-right - Percent bars moved to right side of screen])
Usage: dust -i (Do not show hidden files) Usage: dust -i (Do not show hidden files)
Usage: dust -c (No colors [monochrome]) Usage: dust -c (No colors [monochrome])
Usage: dust -C (Force colors) Usage: dust -f (Count files instead of diskspace)
Usage: dust -f (Count files instead of diskspace [Counts by inode, to include duplicate inodes use dust -f -s])
Usage: dust -t (Group by filetype) Usage: dust -t (Group by filetype)
Usage: dust -z 10M (min-size, Only include files larger than 10M) Usage: dust -z 10M (min-size, Only include files larger than 10M)
Usage: dust -e regex (Only include files matching this regex (eg dust -e "\.png$" would match png files)) Usage: dust -e regex (Only include files matching this regex (eg dust -e "\.png$" would match png files))
@@ -100,11 +85,7 @@ Usage: dust -P (Disable the progress indicator)
Usage: dust -R (For screen readers. Removes bars/symbols. Adds new column: depth level. (May want to use -p for full path too)) Usage: dust -R (For screen readers. Removes bars/symbols. Adds new column: depth level. (May want to use -p for full path too))
Usage: dust -S (Custom Stack size - Use if you see: 'fatal runtime error: stack overflow' (default allocation: low memory=1048576, high memory=1073741824)"), Usage: dust -S (Custom Stack size - Use if you see: 'fatal runtime error: stack overflow' (default allocation: low memory=1048576, high memory=1073741824)"),
Usage: dust --skip-total (No total row will be displayed) Usage: dust --skip-total (No total row will be displayed)
Usage: dust -z 40000/30MB/20kib (Exclude output files/directories below size 40000 bytes / 30MB / 20KiB) Usage: dust -z 4000000 (Exclude output files/directories below size 4MB)
Usage: dust -j (Prints JSON representation of directories, try: dust -j | jq)
Usage: dust --files0-from=FILE (Read NUL-terminated file paths from FILE; if FILE is '-', read from stdin)
Usage: dust --files-from=FILE (Read newline-terminated file paths from FILE; if FILE is '-', read from stdin)
Usage: dust --collapse=node-modules will keep the node-modules folder collapsed in display instead of recursively opening it
``` ```
## Config file ## Config file
@@ -123,6 +104,6 @@ reverse=true
- [dua](https://github.com/Byron/dua-cli/) - [dua](https://github.com/Byron/dua-cli/)
- [pdu](https://github.com/KSXGitHub/parallel-disk-usage) - [pdu](https://github.com/KSXGitHub/parallel-disk-usage)
- [dirstat-rs](https://github.com/scullionw/dirstat-rs) - [dirstat-rs](https://github.com/scullionw/dirstat-rs)
- `du -d 1 -h | sort -h` - du -d 1 -h | sort -h
Note: Apparent-size is calculated slightly differently in dust to gdu. In dust each hard link is counted as using file_length space. In gdu only the first entry is counted. Note: Apparent-size is calculated slightly differently in dust to gdu. In dust each hard link is counted as using file_length space. In gdu only the first entry is counted.

View File

@@ -1,4 +1,3 @@
use clap::CommandFactory;
use clap_complete::{generate_to, shells::*}; use clap_complete::{generate_to, shells::*};
use clap_mangen::Man; use clap_mangen::Man;
use std::fs::File; use std::fs::File;
@@ -10,7 +9,7 @@ include!("src/cli.rs");
fn main() -> Result<(), Error> { fn main() -> Result<(), Error> {
let outdir = "completions"; let outdir = "completions";
let app_name = "dust"; let app_name = "dust";
let mut cmd = Cli::command(); let mut cmd = build_cli();
generate_to(Bash, &mut cmd, app_name, outdir)?; generate_to(Bash, &mut cmd, app_name, outdir)?;
generate_to(Zsh, &mut cmd, app_name, outdir)?; generate_to(Zsh, &mut cmd, app_name, outdir)?;

View File

@@ -1,21 +1,14 @@
# ----------- To do a release --------- # ----------- To do a release ---------
# ----------- Pre release ---------
# Compare times of runs to check no drastic slow down: # Compare times of runs to check no drastic slow down:
# hyperfine 'target/release/dust /home/andy' # time target/release/dust ~/dev
# hyperfine 'dust /home/andy' # time dust ~dev
# ----------- Release ---------
# inc version in cargo.toml
# cargo build --release
# commit changed files
# merge to master in github
# edit version in cargo.toml
# tag a commit and push (increment version in Cargo.toml first): # tag a commit and push (increment version in Cargo.toml first):
# git tag v0.4.5 # git tag v0.4.5
# git push origin v0.4.5 # git push origin v0.4.5
# cargo publish to put it in crates.io # cargo publish to put it in crates.io
# Optional: To install locally # To install locally [Do before pushing it]
#cargo install --path . #cargo install --path .

View File

@@ -14,63 +14,25 @@ _dust() {
fi fi
local context curcontext="$curcontext" state line local context curcontext="$curcontext" state line
_arguments "${_arguments_options[@]}" : \ _arguments "${_arguments_options[@]}" \
'-d+[Depth to show]:DEPTH:_default' \ '-d+[Depth to show]: : ' \
'--depth=[Depth to show]:DEPTH:_default' \ '--depth=[Depth to show]: : ' \
'-T+[Number of threads to use]:THREADS:_default' \ '-n+[Number of lines of output to show. (Default is terminal_height - 10)]: : ' \
'--threads=[Number of threads to use]:THREADS:_default' \ '--number-of-lines=[Number of lines of output to show. (Default is terminal_height - 10)]: : ' \
'--config=[Specify a config file to use]:FILE:_files' \ '*-X+[Exclude any file or directory with this name]: : ' \
'-n+[Number of lines of output to show. (Default is terminal_height - 10)]:NUMBER:_default' \ '*--ignore-directory=[Exclude any file or directory with this name]: : ' \
'--number-of-lines=[Number of lines of output to show. (Default is terminal_height - 10)]:NUMBER:_default' \ '-I+[Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter]: : ' \
'*-X+[Exclude any file or directory with this path]:PATH:_files' \ '--ignore-all-in-file=[Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter]: : ' \
'*--ignore-directory=[Exclude any file or directory with this path]:PATH:_files' \ '-z+[Minimum size file to include in output]: : ' \
'-I+[Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter]:FILE:_files' \ '--min-size=[Minimum size file to include in output]: : ' \
'--ignore-all-in-file=[Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter]:FILE:_files' \ '(-e --filter -t --file_types)*-v+[Exclude filepaths matching this regex. To ignore png files type\: -v "\\.png\$" ]: : ' \
'-z+[Minimum size file to include in output]:MIN_SIZE:_default' \ '(-e --filter -t --file_types)*--invert-filter=[Exclude filepaths matching this regex. To ignore png files type\: -v "\\.png\$" ]: : ' \
'--min-size=[Minimum size file to include in output]:MIN_SIZE:_default' \ '(-t --file_types)*-e+[Only include filepaths matching this regex. For png files type\: -e "\\.png\$" ]: : ' \
'(-e --filter -t --file-types)*-v+[Exclude filepaths matching this regex. To ignore png files type\: -v "\\.png\$"]:REGEX:_default' \ '(-t --file_types)*--filter=[Only include filepaths matching this regex. For png files type\: -e "\\.png\$" ]: : ' \
'(-e --filter -t --file-types)*--invert-filter=[Exclude filepaths matching this regex. To ignore png files type\: -v "\\.png\$"]:REGEX:_default' \ '-w+[Specify width of output overriding the auto detection of terminal width]: : ' \
'(-t --file-types)*-e+[Only include filepaths matching this regex. For png files type\: -e "\\.png\$"]:REGEX:_default' \ '--terminal_width=[Specify width of output overriding the auto detection of terminal width]: : ' \
'(-t --file-types)*--filter=[Only include filepaths matching this regex. For png files type\: -e "\\.png\$"]:REGEX:_default' \ '-S+[Specify memory to use as stack size - use if you see\: '\''fatal runtime error\: stack overflow'\'' (default low memory=1048576, high memory=1073741824)]: : ' \
'-w+[Specify width of output overriding the auto detection of terminal width]:WIDTH:_default' \ '--stack-size=[Specify memory to use as stack size - use if you see\: '\''fatal runtime error\: stack overflow'\'' (default low memory=1048576, high memory=1073741824)]: : ' \
'--terminal-width=[Specify width of output overriding the auto detection of terminal width]:WIDTH:_default' \
'-o+[Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size]:FORMAT:((si\:"SI prefix (powers of 1000)"
b\:"byte (B)"
k\:"kibibyte (KiB)"
m\:"mebibyte (MiB)"
g\:"gibibyte (GiB)"
t\:"tebibyte (TiB)"
kb\:"kilobyte (kB)"
mb\:"megabyte (MB)"
gb\:"gigabyte (GB)"
tb\:"terabyte (TB)"))' \
'--output-format=[Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size]:FORMAT:((si\:"SI prefix (powers of 1000)"
b\:"byte (B)"
k\:"kibibyte (KiB)"
m\:"mebibyte (MiB)"
g\:"gibibyte (GiB)"
t\:"tebibyte (TiB)"
kb\:"kilobyte (kB)"
mb\:"megabyte (MB)"
gb\:"gigabyte (GB)"
tb\:"terabyte (TB)"))' \
'-S+[Specify memory to use as stack size - use if you see\: '\''fatal runtime error\: stack overflow'\'' (default low memory=1048576, high memory=1073741824)]:STACK_SIZE:_default' \
'--stack-size=[Specify memory to use as stack size - use if you see\: '\''fatal runtime error\: stack overflow'\'' (default low memory=1048576, high memory=1073741824)]:STACK_SIZE:_default' \
'-M+[+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => \[curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)]:MTIME:_default' \
'--mtime=[+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => \[curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)]:MTIME:_default' \
'-A+[just like -mtime, but based on file access time]:ATIME:_default' \
'--atime=[just like -mtime, but based on file access time]:ATIME:_default' \
'-y+[just like -mtime, but based on file change time]:CTIME:_default' \
'--ctime=[just like -mtime, but based on file change time]:CTIME:_default' \
'(--files-from)--files0-from=[Read NUL-terminated paths from FILE (use \`-\` for stdin)]:FILES0_FROM:_files' \
'(--files0-from)--files-from=[Read newline-terminated paths from FILE (use \`-\` for stdin)]:FILES_FROM:_files' \
'*--collapse=[Keep these directories collapsed]:COLLAPSE:_files' \
'-m+[Directory '\''size'\'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time]:FILETIME:((a\:"last accessed time"
c\:"last changed time"
m\:"last modified time"))' \
'--filetime=[Directory '\''size'\'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time]:FILETIME:((a\:"last accessed time"
c\:"last changed time"
m\:"last modified time"))' \
'-p[Subdirectories will not have their path shortened]' \ '-p[Subdirectories will not have their path shortened]' \
'--full-paths[Subdirectories will not have their path shortened]' \ '--full-paths[Subdirectories will not have their path shortened]' \
'-L[dereference sym links - Treat sym links as directories and go into them]' \ '-L[dereference sym links - Treat sym links as directories and go into them]' \
@@ -83,8 +45,6 @@ m\:"last modified time"))' \
'--reverse[Print tree upside down (biggest highest)]' \ '--reverse[Print tree upside down (biggest highest)]' \
'-c[No colors will be printed (Useful for commands like\: watch)]' \ '-c[No colors will be printed (Useful for commands like\: watch)]' \
'--no-colors[No colors will be printed (Useful for commands like\: watch)]' \ '--no-colors[No colors will be printed (Useful for commands like\: watch)]' \
'-C[Force colors print]' \
'--force-colors[Force colors print]' \
'-b[No percent bars or percentages will be displayed]' \ '-b[No percent bars or percentages will be displayed]' \
'--no-percent-bars[No percent bars or percentages will be displayed]' \ '--no-percent-bars[No percent bars or percentages will be displayed]' \
'-B[percent bars moved to right side of screen]' \ '-B[percent bars moved to right side of screen]' \
@@ -95,23 +55,22 @@ m\:"last modified time"))' \
'-f[Directory '\''size'\'' is number of child files instead of disk size]' \ '-f[Directory '\''size'\'' is number of child files instead of disk size]' \
'--filecount[Directory '\''size'\'' is number of child files instead of disk size]' \ '--filecount[Directory '\''size'\'' is number of child files instead of disk size]' \
'-i[Do not display hidden files]' \ '-i[Do not display hidden files]' \
'--ignore-hidden[Do not display hidden files]' \ '--ignore_hidden[Do not display hidden files]' \
'(-d --depth -D --only-dir)-t[show only these file types]' \ '(-d --depth -D --only-dir)-t[show only these file types]' \
'(-d --depth -D --only-dir)--file-types[show only these file types]' \ '(-d --depth -D --only-dir)--file_types[show only these file types]' \
'-P[Disable the progress indication]' \ '-H[print sizes in powers of 1000 (e.g., 1.1G)]' \
'--no-progress[Disable the progress indication]' \ '--si[print sizes in powers of 1000 (e.g., 1.1G)]' \
'--print-errors[Print path with errors]' \ '-P[Disable the progress indication.]' \
'(-F --only-file -t --file-types)-D[Only directories will be displayed]' \ '--no-progress[Disable the progress indication.]' \
'(-F --only-file -t --file-types)--only-dir[Only directories will be displayed]' \ '(-F --only-file -t --file_types)-D[Only directories will be displayed.]' \
'(-F --only-file -t --file_types)--only-dir[Only directories will be displayed.]' \
'(-D --only-dir)-F[Only files will be displayed. (Finds your largest files)]' \ '(-D --only-dir)-F[Only files will be displayed. (Finds your largest files)]' \
'(-D --only-dir)--only-file[Only files will be displayed. (Finds your largest files)]' \ '(-D --only-dir)--only-file[Only files will be displayed. (Finds your largest files)]' \
'-j[Output the directory tree as json to the current directory]' \ '-h[Print help]' \
'--output-json[Output the directory tree as json to the current directory]' \ '--help[Print help]' \
'-h[Print help (see more with '\''--help'\'')]' \
'--help[Print help (see more with '\''--help'\'')]' \
'-V[Print version]' \ '-V[Print version]' \
'--version[Print version]' \ '--version[Print version]' \
'*::params -- Input files or directories:_files' \ '*::params:_files' \
&& ret=0 && ret=0
} }

View File

@@ -21,80 +21,61 @@ Register-ArgumentCompleter -Native -CommandName 'dust' -ScriptBlock {
$completions = @(switch ($command) { $completions = @(switch ($command) {
'dust' { 'dust' {
[CompletionResult]::new('-d', '-d', [CompletionResultType]::ParameterName, 'Depth to show') [CompletionResult]::new('-d', 'd', [CompletionResultType]::ParameterName, 'Depth to show')
[CompletionResult]::new('--depth', '--depth', [CompletionResultType]::ParameterName, 'Depth to show') [CompletionResult]::new('--depth', 'depth', [CompletionResultType]::ParameterName, 'Depth to show')
[CompletionResult]::new('-T', '-T ', [CompletionResultType]::ParameterName, 'Number of threads to use') [CompletionResult]::new('-n', 'n', [CompletionResultType]::ParameterName, 'Number of lines of output to show. (Default is terminal_height - 10)')
[CompletionResult]::new('--threads', '--threads', [CompletionResultType]::ParameterName, 'Number of threads to use') [CompletionResult]::new('--number-of-lines', 'number-of-lines', [CompletionResultType]::ParameterName, 'Number of lines of output to show. (Default is terminal_height - 10)')
[CompletionResult]::new('--config', '--config', [CompletionResultType]::ParameterName, 'Specify a config file to use') [CompletionResult]::new('-X', 'X ', [CompletionResultType]::ParameterName, 'Exclude any file or directory with this name')
[CompletionResult]::new('-n', '-n', [CompletionResultType]::ParameterName, 'Number of lines of output to show. (Default is terminal_height - 10)') [CompletionResult]::new('--ignore-directory', 'ignore-directory', [CompletionResultType]::ParameterName, 'Exclude any file or directory with this name')
[CompletionResult]::new('--number-of-lines', '--number-of-lines', [CompletionResultType]::ParameterName, 'Number of lines of output to show. (Default is terminal_height - 10)') [CompletionResult]::new('-I', 'I ', [CompletionResultType]::ParameterName, 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter')
[CompletionResult]::new('-X', '-X ', [CompletionResultType]::ParameterName, 'Exclude any file or directory with this path') [CompletionResult]::new('--ignore-all-in-file', 'ignore-all-in-file', [CompletionResultType]::ParameterName, 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter')
[CompletionResult]::new('--ignore-directory', '--ignore-directory', [CompletionResultType]::ParameterName, 'Exclude any file or directory with this path') [CompletionResult]::new('-z', 'z', [CompletionResultType]::ParameterName, 'Minimum size file to include in output')
[CompletionResult]::new('-I', '-I ', [CompletionResultType]::ParameterName, 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter') [CompletionResult]::new('--min-size', 'min-size', [CompletionResultType]::ParameterName, 'Minimum size file to include in output')
[CompletionResult]::new('--ignore-all-in-file', '--ignore-all-in-file', [CompletionResultType]::ParameterName, 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter') [CompletionResult]::new('-v', 'v', [CompletionResultType]::ParameterName, 'Exclude filepaths matching this regex. To ignore png files type: -v "\.png$" ')
[CompletionResult]::new('-z', '-z', [CompletionResultType]::ParameterName, 'Minimum size file to include in output') [CompletionResult]::new('--invert-filter', 'invert-filter', [CompletionResultType]::ParameterName, 'Exclude filepaths matching this regex. To ignore png files type: -v "\.png$" ')
[CompletionResult]::new('--min-size', '--min-size', [CompletionResultType]::ParameterName, 'Minimum size file to include in output') [CompletionResult]::new('-e', 'e', [CompletionResultType]::ParameterName, 'Only include filepaths matching this regex. For png files type: -e "\.png$" ')
[CompletionResult]::new('-v', '-v', [CompletionResultType]::ParameterName, 'Exclude filepaths matching this regex. To ignore png files type: -v "\.png$"') [CompletionResult]::new('--filter', 'filter', [CompletionResultType]::ParameterName, 'Only include filepaths matching this regex. For png files type: -e "\.png$" ')
[CompletionResult]::new('--invert-filter', '--invert-filter', [CompletionResultType]::ParameterName, 'Exclude filepaths matching this regex. To ignore png files type: -v "\.png$"') [CompletionResult]::new('-w', 'w', [CompletionResultType]::ParameterName, 'Specify width of output overriding the auto detection of terminal width')
[CompletionResult]::new('-e', '-e', [CompletionResultType]::ParameterName, 'Only include filepaths matching this regex. For png files type: -e "\.png$"') [CompletionResult]::new('--terminal_width', 'terminal_width', [CompletionResultType]::ParameterName, 'Specify width of output overriding the auto detection of terminal width')
[CompletionResult]::new('--filter', '--filter', [CompletionResultType]::ParameterName, 'Only include filepaths matching this regex. For png files type: -e "\.png$"') [CompletionResult]::new('-S', 'S ', [CompletionResultType]::ParameterName, 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)')
[CompletionResult]::new('-w', '-w', [CompletionResultType]::ParameterName, 'Specify width of output overriding the auto detection of terminal width') [CompletionResult]::new('--stack-size', 'stack-size', [CompletionResultType]::ParameterName, 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)')
[CompletionResult]::new('--terminal-width', '--terminal-width', [CompletionResultType]::ParameterName, 'Specify width of output overriding the auto detection of terminal width') [CompletionResult]::new('-p', 'p', [CompletionResultType]::ParameterName, 'Subdirectories will not have their path shortened')
[CompletionResult]::new('-o', '-o', [CompletionResultType]::ParameterName, 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size') [CompletionResult]::new('--full-paths', 'full-paths', [CompletionResultType]::ParameterName, 'Subdirectories will not have their path shortened')
[CompletionResult]::new('--output-format', '--output-format', [CompletionResultType]::ParameterName, 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size') [CompletionResult]::new('-L', 'L ', [CompletionResultType]::ParameterName, 'dereference sym links - Treat sym links as directories and go into them')
[CompletionResult]::new('-S', '-S ', [CompletionResultType]::ParameterName, 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)') [CompletionResult]::new('--dereference-links', 'dereference-links', [CompletionResultType]::ParameterName, 'dereference sym links - Treat sym links as directories and go into them')
[CompletionResult]::new('--stack-size', '--stack-size', [CompletionResultType]::ParameterName, 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)') [CompletionResult]::new('-x', 'x', [CompletionResultType]::ParameterName, 'Only count the files and directories on the same filesystem as the supplied directory')
[CompletionResult]::new('-M', '-M ', [CompletionResultType]::ParameterName, '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)') [CompletionResult]::new('--limit-filesystem', 'limit-filesystem', [CompletionResultType]::ParameterName, 'Only count the files and directories on the same filesystem as the supplied directory')
[CompletionResult]::new('--mtime', '--mtime', [CompletionResultType]::ParameterName, '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)') [CompletionResult]::new('-s', 's', [CompletionResultType]::ParameterName, 'Use file length instead of blocks')
[CompletionResult]::new('-A', '-A ', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file access time') [CompletionResult]::new('--apparent-size', 'apparent-size', [CompletionResultType]::ParameterName, 'Use file length instead of blocks')
[CompletionResult]::new('--atime', '--atime', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file access time') [CompletionResult]::new('-r', 'r', [CompletionResultType]::ParameterName, 'Print tree upside down (biggest highest)')
[CompletionResult]::new('-y', '-y', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file change time') [CompletionResult]::new('--reverse', 'reverse', [CompletionResultType]::ParameterName, 'Print tree upside down (biggest highest)')
[CompletionResult]::new('--ctime', '--ctime', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file change time') [CompletionResult]::new('-c', 'c', [CompletionResultType]::ParameterName, 'No colors will be printed (Useful for commands like: watch)')
[CompletionResult]::new('--files0-from', '--files0-from', [CompletionResultType]::ParameterName, 'Read NUL-terminated paths from FILE (use `-` for stdin)') [CompletionResult]::new('--no-colors', 'no-colors', [CompletionResultType]::ParameterName, 'No colors will be printed (Useful for commands like: watch)')
[CompletionResult]::new('--files-from', '--files-from', [CompletionResultType]::ParameterName, 'Read newline-terminated paths from FILE (use `-` for stdin)') [CompletionResult]::new('-b', 'b', [CompletionResultType]::ParameterName, 'No percent bars or percentages will be displayed')
[CompletionResult]::new('--collapse', '--collapse', [CompletionResultType]::ParameterName, 'Keep these directories collapsed') [CompletionResult]::new('--no-percent-bars', 'no-percent-bars', [CompletionResultType]::ParameterName, 'No percent bars or percentages will be displayed')
[CompletionResult]::new('-m', '-m', [CompletionResultType]::ParameterName, 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time') [CompletionResult]::new('-B', 'B ', [CompletionResultType]::ParameterName, 'percent bars moved to right side of screen')
[CompletionResult]::new('--filetime', '--filetime', [CompletionResultType]::ParameterName, 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time') [CompletionResult]::new('--bars-on-right', 'bars-on-right', [CompletionResultType]::ParameterName, 'percent bars moved to right side of screen')
[CompletionResult]::new('-p', '-p', [CompletionResultType]::ParameterName, 'Subdirectories will not have their path shortened') [CompletionResult]::new('-R', 'R ', [CompletionResultType]::ParameterName, 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)')
[CompletionResult]::new('--full-paths', '--full-paths', [CompletionResultType]::ParameterName, 'Subdirectories will not have their path shortened') [CompletionResult]::new('--screen-reader', 'screen-reader', [CompletionResultType]::ParameterName, 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)')
[CompletionResult]::new('-L', '-L ', [CompletionResultType]::ParameterName, 'dereference sym links - Treat sym links as directories and go into them') [CompletionResult]::new('--skip-total', 'skip-total', [CompletionResultType]::ParameterName, 'No total row will be displayed')
[CompletionResult]::new('--dereference-links', '--dereference-links', [CompletionResultType]::ParameterName, 'dereference sym links - Treat sym links as directories and go into them') [CompletionResult]::new('-f', 'f', [CompletionResultType]::ParameterName, 'Directory ''size'' is number of child files instead of disk size')
[CompletionResult]::new('-x', '-x', [CompletionResultType]::ParameterName, 'Only count the files and directories on the same filesystem as the supplied directory') [CompletionResult]::new('--filecount', 'filecount', [CompletionResultType]::ParameterName, 'Directory ''size'' is number of child files instead of disk size')
[CompletionResult]::new('--limit-filesystem', '--limit-filesystem', [CompletionResultType]::ParameterName, 'Only count the files and directories on the same filesystem as the supplied directory') [CompletionResult]::new('-i', 'i', [CompletionResultType]::ParameterName, 'Do not display hidden files')
[CompletionResult]::new('-s', '-s', [CompletionResultType]::ParameterName, 'Use file length instead of blocks') [CompletionResult]::new('--ignore_hidden', 'ignore_hidden', [CompletionResultType]::ParameterName, 'Do not display hidden files')
[CompletionResult]::new('--apparent-size', '--apparent-size', [CompletionResultType]::ParameterName, 'Use file length instead of blocks') [CompletionResult]::new('-t', 't', [CompletionResultType]::ParameterName, 'show only these file types')
[CompletionResult]::new('-r', '-r', [CompletionResultType]::ParameterName, 'Print tree upside down (biggest highest)') [CompletionResult]::new('--file_types', 'file_types', [CompletionResultType]::ParameterName, 'show only these file types')
[CompletionResult]::new('--reverse', '--reverse', [CompletionResultType]::ParameterName, 'Print tree upside down (biggest highest)') [CompletionResult]::new('-H', 'H ', [CompletionResultType]::ParameterName, 'print sizes in powers of 1000 (e.g., 1.1G)')
[CompletionResult]::new('-c', '-c', [CompletionResultType]::ParameterName, 'No colors will be printed (Useful for commands like: watch)') [CompletionResult]::new('--si', 'si', [CompletionResultType]::ParameterName, 'print sizes in powers of 1000 (e.g., 1.1G)')
[CompletionResult]::new('--no-colors', '--no-colors', [CompletionResultType]::ParameterName, 'No colors will be printed (Useful for commands like: watch)') [CompletionResult]::new('-P', 'P ', [CompletionResultType]::ParameterName, 'Disable the progress indication.')
[CompletionResult]::new('-C', '-C ', [CompletionResultType]::ParameterName, 'Force colors print') [CompletionResult]::new('--no-progress', 'no-progress', [CompletionResultType]::ParameterName, 'Disable the progress indication.')
[CompletionResult]::new('--force-colors', '--force-colors', [CompletionResultType]::ParameterName, 'Force colors print') [CompletionResult]::new('-D', 'D ', [CompletionResultType]::ParameterName, 'Only directories will be displayed.')
[CompletionResult]::new('-b', '-b', [CompletionResultType]::ParameterName, 'No percent bars or percentages will be displayed') [CompletionResult]::new('--only-dir', 'only-dir', [CompletionResultType]::ParameterName, 'Only directories will be displayed.')
[CompletionResult]::new('--no-percent-bars', '--no-percent-bars', [CompletionResultType]::ParameterName, 'No percent bars or percentages will be displayed') [CompletionResult]::new('-F', 'F ', [CompletionResultType]::ParameterName, 'Only files will be displayed. (Finds your largest files)')
[CompletionResult]::new('-B', '-B ', [CompletionResultType]::ParameterName, 'percent bars moved to right side of screen') [CompletionResult]::new('--only-file', 'only-file', [CompletionResultType]::ParameterName, 'Only files will be displayed. (Finds your largest files)')
[CompletionResult]::new('--bars-on-right', '--bars-on-right', [CompletionResultType]::ParameterName, 'percent bars moved to right side of screen') [CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Print help')
[CompletionResult]::new('-R', '-R ', [CompletionResultType]::ParameterName, 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)') [CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Print help')
[CompletionResult]::new('--screen-reader', '--screen-reader', [CompletionResultType]::ParameterName, 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)') [CompletionResult]::new('-V', 'V ', [CompletionResultType]::ParameterName, 'Print version')
[CompletionResult]::new('--skip-total', '--skip-total', [CompletionResultType]::ParameterName, 'No total row will be displayed') [CompletionResult]::new('--version', 'version', [CompletionResultType]::ParameterName, 'Print version')
[CompletionResult]::new('-f', '-f', [CompletionResultType]::ParameterName, 'Directory ''size'' is number of child files instead of disk size')
[CompletionResult]::new('--filecount', '--filecount', [CompletionResultType]::ParameterName, 'Directory ''size'' is number of child files instead of disk size')
[CompletionResult]::new('-i', '-i', [CompletionResultType]::ParameterName, 'Do not display hidden files')
[CompletionResult]::new('--ignore-hidden', '--ignore-hidden', [CompletionResultType]::ParameterName, 'Do not display hidden files')
[CompletionResult]::new('-t', '-t', [CompletionResultType]::ParameterName, 'show only these file types')
[CompletionResult]::new('--file-types', '--file-types', [CompletionResultType]::ParameterName, 'show only these file types')
[CompletionResult]::new('-P', '-P ', [CompletionResultType]::ParameterName, 'Disable the progress indication')
[CompletionResult]::new('--no-progress', '--no-progress', [CompletionResultType]::ParameterName, 'Disable the progress indication')
[CompletionResult]::new('--print-errors', '--print-errors', [CompletionResultType]::ParameterName, 'Print path with errors')
[CompletionResult]::new('-D', '-D ', [CompletionResultType]::ParameterName, 'Only directories will be displayed')
[CompletionResult]::new('--only-dir', '--only-dir', [CompletionResultType]::ParameterName, 'Only directories will be displayed')
[CompletionResult]::new('-F', '-F ', [CompletionResultType]::ParameterName, 'Only files will be displayed. (Finds your largest files)')
[CompletionResult]::new('--only-file', '--only-file', [CompletionResultType]::ParameterName, 'Only files will be displayed. (Finds your largest files)')
[CompletionResult]::new('-j', '-j', [CompletionResultType]::ParameterName, 'Output the directory tree as json to the current directory')
[CompletionResult]::new('--output-json', '--output-json', [CompletionResultType]::ParameterName, 'Output the directory tree as json to the current directory')
[CompletionResult]::new('-h', '-h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')')
[CompletionResult]::new('--help', '--help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')')
[CompletionResult]::new('-V', '-V ', [CompletionResultType]::ParameterName, 'Print version')
[CompletionResult]::new('--version', '--version', [CompletionResultType]::ParameterName, 'Print version')
break break
} }
}) })

View File

@@ -1,16 +1,12 @@
_dust() { _dust() {
local i cur prev opts cmd local i cur prev opts cmd
COMPREPLY=() COMPREPLY=()
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then cur="${COMP_WORDS[COMP_CWORD]}"
cur="$2" prev="${COMP_WORDS[COMP_CWORD-1]}"
else
cur="${COMP_WORDS[COMP_CWORD]}"
fi
prev="$3"
cmd="" cmd=""
opts="" opts=""
for i in "${COMP_WORDS[@]:0:COMP_CWORD}" for i in ${COMP_WORDS[@]}
do do
case "${cmd},${i}" in case "${cmd},${i}" in
",$1") ",$1")
@@ -23,10 +19,13 @@ _dust() {
case "${cmd}" in case "${cmd}" in
dust) dust)
opts="-d -T -n -p -X -I -L -x -s -r -c -C -b -B -z -R -f -i -v -e -t -w -P -D -F -o -S -j -M -A -y -m -h -V --depth --threads --config --number-of-lines --full-paths --ignore-directory --ignore-all-in-file --dereference-links --limit-filesystem --apparent-size --reverse --no-colors --force-colors --no-percent-bars --bars-on-right --min-size --screen-reader --skip-total --filecount --ignore-hidden --invert-filter --filter --file-types --terminal-width --no-progress --print-errors --only-dir --only-file --output-format --stack-size --output-json --mtime --atime --ctime --files0-from --files-from --collapse --filetime --help --version [PATH]..." opts="-d -n -p -X -I -L -x -s -r -c -b -B -z -R -f -i -v -e -t -w -H -P -D -F -S -h -V --depth --number-of-lines --full-paths --ignore-directory --ignore-all-in-file --dereference-links --limit-filesystem --apparent-size --reverse --no-colors --no-percent-bars --bars-on-right --min-size --screen-reader --skip-total --filecount --ignore_hidden --invert-filter --filter --file_types --terminal_width --si --no-progress --only-dir --only-file --stack-size --help --version [params]..."
if [[ ${cur} == -* || ${COMP_CWORD} -eq 1 ]] ; then if [[ ${cur} == -* ]] ; then
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
return 0 return 0
elif [[ ${cur} == * ]] ; then
_filedir
return 0
fi fi
case "${prev}" in case "${prev}" in
--depth) --depth)
@@ -37,29 +36,6 @@ _dust() {
COMPREPLY=($(compgen -f "${cur}")) COMPREPLY=($(compgen -f "${cur}"))
return 0 return 0
;; ;;
--threads)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
-T)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--config)
local oldifs
if [ -n "${IFS+x}" ]; then
oldifs="$IFS"
fi
IFS=$'\n'
COMPREPLY=($(compgen -f "${cur}"))
if [ -n "${oldifs+x}" ]; then
IFS="$oldifs"
fi
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
compopt -o filenames
fi
return 0
;;
--number-of-lines) --number-of-lines)
COMPREPLY=($(compgen -f "${cur}")) COMPREPLY=($(compgen -f "${cur}"))
return 0 return 0
@@ -77,33 +53,11 @@ _dust() {
return 0 return 0
;; ;;
--ignore-all-in-file) --ignore-all-in-file)
local oldifs
if [ -n "${IFS+x}" ]; then
oldifs="$IFS"
fi
IFS=$'\n'
COMPREPLY=($(compgen -f "${cur}")) COMPREPLY=($(compgen -f "${cur}"))
if [ -n "${oldifs+x}" ]; then
IFS="$oldifs"
fi
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
compopt -o filenames
fi
return 0 return 0
;; ;;
-I) -I)
local oldifs
if [ -n "${IFS+x}" ]; then
oldifs="$IFS"
fi
IFS=$'\n'
COMPREPLY=($(compgen -f "${cur}")) COMPREPLY=($(compgen -f "${cur}"))
if [ -n "${oldifs+x}" ]; then
IFS="$oldifs"
fi
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
compopt -o filenames
fi
return 0 return 0
;; ;;
--min-size) --min-size)
@@ -130,7 +84,7 @@ _dust() {
COMPREPLY=($(compgen -f "${cur}")) COMPREPLY=($(compgen -f "${cur}"))
return 0 return 0
;; ;;
--terminal-width) --terminal_width)
COMPREPLY=($(compgen -f "${cur}")) COMPREPLY=($(compgen -f "${cur}"))
return 0 return 0
;; ;;
@@ -138,14 +92,6 @@ _dust() {
COMPREPLY=($(compgen -f "${cur}")) COMPREPLY=($(compgen -f "${cur}"))
return 0 return 0
;; ;;
--output-format)
COMPREPLY=($(compgen -W "si b k m g t kb mb gb tb" -- "${cur}"))
return 0
;;
-o)
COMPREPLY=($(compgen -W "si b k m g t kb mb gb tb" -- "${cur}"))
return 0
;;
--stack-size) --stack-size)
COMPREPLY=($(compgen -f "${cur}")) COMPREPLY=($(compgen -f "${cur}"))
return 0 return 0
@@ -154,50 +100,6 @@ _dust() {
COMPREPLY=($(compgen -f "${cur}")) COMPREPLY=($(compgen -f "${cur}"))
return 0 return 0
;; ;;
--mtime)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
-M)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--atime)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
-A)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--ctime)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
-y)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--files0-from)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--files-from)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--collapse)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--filetime)
COMPREPLY=($(compgen -W "a c m" -- "${cur}"))
return 0
;;
-m)
COMPREPLY=($(compgen -W "a c m" -- "${cur}"))
return 0
;;
*) *)
COMPREPLY=() COMPREPLY=()
;; ;;

View File

@@ -20,38 +20,22 @@ set edit:completion:arg-completer[dust] = {|@words|
&'dust'= { &'dust'= {
cand -d 'Depth to show' cand -d 'Depth to show'
cand --depth 'Depth to show' cand --depth 'Depth to show'
cand -T 'Number of threads to use'
cand --threads 'Number of threads to use'
cand --config 'Specify a config file to use'
cand -n 'Number of lines of output to show. (Default is terminal_height - 10)' cand -n 'Number of lines of output to show. (Default is terminal_height - 10)'
cand --number-of-lines 'Number of lines of output to show. (Default is terminal_height - 10)' cand --number-of-lines 'Number of lines of output to show. (Default is terminal_height - 10)'
cand -X 'Exclude any file or directory with this path' cand -X 'Exclude any file or directory with this name'
cand --ignore-directory 'Exclude any file or directory with this path' cand --ignore-directory 'Exclude any file or directory with this name'
cand -I 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter' cand -I 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter'
cand --ignore-all-in-file 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter' cand --ignore-all-in-file 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter'
cand -z 'Minimum size file to include in output' cand -z 'Minimum size file to include in output'
cand --min-size 'Minimum size file to include in output' cand --min-size 'Minimum size file to include in output'
cand -v 'Exclude filepaths matching this regex. To ignore png files type: -v "\.png$"' cand -v 'Exclude filepaths matching this regex. To ignore png files type: -v "\.png$" '
cand --invert-filter 'Exclude filepaths matching this regex. To ignore png files type: -v "\.png$"' cand --invert-filter 'Exclude filepaths matching this regex. To ignore png files type: -v "\.png$" '
cand -e 'Only include filepaths matching this regex. For png files type: -e "\.png$"' cand -e 'Only include filepaths matching this regex. For png files type: -e "\.png$" '
cand --filter 'Only include filepaths matching this regex. For png files type: -e "\.png$"' cand --filter 'Only include filepaths matching this regex. For png files type: -e "\.png$" '
cand -w 'Specify width of output overriding the auto detection of terminal width' cand -w 'Specify width of output overriding the auto detection of terminal width'
cand --terminal-width 'Specify width of output overriding the auto detection of terminal width' cand --terminal_width 'Specify width of output overriding the auto detection of terminal width'
cand -o 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size'
cand --output-format 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size'
cand -S 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)' cand -S 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)'
cand --stack-size 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)' cand --stack-size 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)'
cand -M '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)'
cand --mtime '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)'
cand -A 'just like -mtime, but based on file access time'
cand --atime 'just like -mtime, but based on file access time'
cand -y 'just like -mtime, but based on file change time'
cand --ctime 'just like -mtime, but based on file change time'
cand --files0-from 'Read NUL-terminated paths from FILE (use `-` for stdin)'
cand --files-from 'Read newline-terminated paths from FILE (use `-` for stdin)'
cand --collapse 'Keep these directories collapsed'
cand -m 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time'
cand --filetime 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time'
cand -p 'Subdirectories will not have their path shortened' cand -p 'Subdirectories will not have their path shortened'
cand --full-paths 'Subdirectories will not have their path shortened' cand --full-paths 'Subdirectories will not have their path shortened'
cand -L 'dereference sym links - Treat sym links as directories and go into them' cand -L 'dereference sym links - Treat sym links as directories and go into them'
@@ -64,8 +48,6 @@ set edit:completion:arg-completer[dust] = {|@words|
cand --reverse 'Print tree upside down (biggest highest)' cand --reverse 'Print tree upside down (biggest highest)'
cand -c 'No colors will be printed (Useful for commands like: watch)' cand -c 'No colors will be printed (Useful for commands like: watch)'
cand --no-colors 'No colors will be printed (Useful for commands like: watch)' cand --no-colors 'No colors will be printed (Useful for commands like: watch)'
cand -C 'Force colors print'
cand --force-colors 'Force colors print'
cand -b 'No percent bars or percentages will be displayed' cand -b 'No percent bars or percentages will be displayed'
cand --no-percent-bars 'No percent bars or percentages will be displayed' cand --no-percent-bars 'No percent bars or percentages will be displayed'
cand -B 'percent bars moved to right side of screen' cand -B 'percent bars moved to right side of screen'
@@ -76,20 +58,19 @@ set edit:completion:arg-completer[dust] = {|@words|
cand -f 'Directory ''size'' is number of child files instead of disk size' cand -f 'Directory ''size'' is number of child files instead of disk size'
cand --filecount 'Directory ''size'' is number of child files instead of disk size' cand --filecount 'Directory ''size'' is number of child files instead of disk size'
cand -i 'Do not display hidden files' cand -i 'Do not display hidden files'
cand --ignore-hidden 'Do not display hidden files' cand --ignore_hidden 'Do not display hidden files'
cand -t 'show only these file types' cand -t 'show only these file types'
cand --file-types 'show only these file types' cand --file_types 'show only these file types'
cand -P 'Disable the progress indication' cand -H 'print sizes in powers of 1000 (e.g., 1.1G)'
cand --no-progress 'Disable the progress indication' cand --si 'print sizes in powers of 1000 (e.g., 1.1G)'
cand --print-errors 'Print path with errors' cand -P 'Disable the progress indication.'
cand -D 'Only directories will be displayed' cand --no-progress 'Disable the progress indication.'
cand --only-dir 'Only directories will be displayed' cand -D 'Only directories will be displayed.'
cand --only-dir 'Only directories will be displayed.'
cand -F 'Only files will be displayed. (Finds your largest files)' cand -F 'Only files will be displayed. (Finds your largest files)'
cand --only-file 'Only files will be displayed. (Finds your largest files)' cand --only-file 'Only files will be displayed. (Finds your largest files)'
cand -j 'Output the directory tree as json to the current directory' cand -h 'Print help'
cand --output-json 'Output the directory tree as json to the current directory' cand --help 'Print help'
cand -h 'Print help (see more with ''--help'')'
cand --help 'Print help (see more with ''--help'')'
cand -V 'Print version' cand -V 'Print version'
cand --version 'Print version' cand --version 'Print version'
} }

View File

@@ -1,51 +1,28 @@
complete -c dust -s d -l depth -d 'Depth to show' -r complete -c dust -s d -l depth -d 'Depth to show' -r
complete -c dust -s T -l threads -d 'Number of threads to use' -r
complete -c dust -l config -d 'Specify a config file to use' -r -F
complete -c dust -s n -l number-of-lines -d 'Number of lines of output to show. (Default is terminal_height - 10)' -r complete -c dust -s n -l number-of-lines -d 'Number of lines of output to show. (Default is terminal_height - 10)' -r
complete -c dust -s X -l ignore-directory -d 'Exclude any file or directory with this path' -r -F complete -c dust -s X -l ignore-directory -d 'Exclude any file or directory with this name' -r
complete -c dust -s I -l ignore-all-in-file -d 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter' -r -F complete -c dust -s I -l ignore-all-in-file -d 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter' -r
complete -c dust -s z -l min-size -d 'Minimum size file to include in output' -r complete -c dust -s z -l min-size -d 'Minimum size file to include in output' -r
complete -c dust -s v -l invert-filter -d 'Exclude filepaths matching this regex. To ignore png files type: -v "\\.png$"' -r complete -c dust -s v -l invert-filter -d 'Exclude filepaths matching this regex. To ignore png files type: -v "\\.png$" ' -r
complete -c dust -s e -l filter -d 'Only include filepaths matching this regex. For png files type: -e "\\.png$"' -r complete -c dust -s e -l filter -d 'Only include filepaths matching this regex. For png files type: -e "\\.png$" ' -r
complete -c dust -s w -l terminal-width -d 'Specify width of output overriding the auto detection of terminal width' -r complete -c dust -s w -l terminal_width -d 'Specify width of output overriding the auto detection of terminal width' -r
complete -c dust -s o -l output-format -d 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size' -r -f -a "si\t'SI prefix (powers of 1000)'
b\t'byte (B)'
k\t'kibibyte (KiB)'
m\t'mebibyte (MiB)'
g\t'gibibyte (GiB)'
t\t'tebibyte (TiB)'
kb\t'kilobyte (kB)'
mb\t'megabyte (MB)'
gb\t'gigabyte (GB)'
tb\t'terabyte (TB)'"
complete -c dust -s S -l stack-size -d 'Specify memory to use as stack size - use if you see: \'fatal runtime error: stack overflow\' (default low memory=1048576, high memory=1073741824)' -r complete -c dust -s S -l stack-size -d 'Specify memory to use as stack size - use if you see: \'fatal runtime error: stack overflow\' (default low memory=1048576, high memory=1073741824)' -r
complete -c dust -s M -l mtime -d '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)' -r
complete -c dust -s A -l atime -d 'just like -mtime, but based on file access time' -r
complete -c dust -s y -l ctime -d 'just like -mtime, but based on file change time' -r
complete -c dust -l files0-from -d 'Read NUL-terminated paths from FILE (use `-` for stdin)' -r -F
complete -c dust -l files-from -d 'Read newline-terminated paths from FILE (use `-` for stdin)' -r -F
complete -c dust -l collapse -d 'Keep these directories collapsed' -r -F
complete -c dust -s m -l filetime -d 'Directory \'size\' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time' -r -f -a "a\t'last accessed time'
c\t'last changed time'
m\t'last modified time'"
complete -c dust -s p -l full-paths -d 'Subdirectories will not have their path shortened' complete -c dust -s p -l full-paths -d 'Subdirectories will not have their path shortened'
complete -c dust -s L -l dereference-links -d 'dereference sym links - Treat sym links as directories and go into them' complete -c dust -s L -l dereference-links -d 'dereference sym links - Treat sym links as directories and go into them'
complete -c dust -s x -l limit-filesystem -d 'Only count the files and directories on the same filesystem as the supplied directory' complete -c dust -s x -l limit-filesystem -d 'Only count the files and directories on the same filesystem as the supplied directory'
complete -c dust -s s -l apparent-size -d 'Use file length instead of blocks' complete -c dust -s s -l apparent-size -d 'Use file length instead of blocks'
complete -c dust -s r -l reverse -d 'Print tree upside down (biggest highest)' complete -c dust -s r -l reverse -d 'Print tree upside down (biggest highest)'
complete -c dust -s c -l no-colors -d 'No colors will be printed (Useful for commands like: watch)' complete -c dust -s c -l no-colors -d 'No colors will be printed (Useful for commands like: watch)'
complete -c dust -s C -l force-colors -d 'Force colors print'
complete -c dust -s b -l no-percent-bars -d 'No percent bars or percentages will be displayed' complete -c dust -s b -l no-percent-bars -d 'No percent bars or percentages will be displayed'
complete -c dust -s B -l bars-on-right -d 'percent bars moved to right side of screen' complete -c dust -s B -l bars-on-right -d 'percent bars moved to right side of screen'
complete -c dust -s R -l screen-reader -d 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)' complete -c dust -s R -l screen-reader -d 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)'
complete -c dust -l skip-total -d 'No total row will be displayed' complete -c dust -l skip-total -d 'No total row will be displayed'
complete -c dust -s f -l filecount -d 'Directory \'size\' is number of child files instead of disk size' complete -c dust -s f -l filecount -d 'Directory \'size\' is number of child files instead of disk size'
complete -c dust -s i -l ignore-hidden -d 'Do not display hidden files' complete -c dust -s i -l ignore_hidden -d 'Do not display hidden files'
complete -c dust -s t -l file-types -d 'show only these file types' complete -c dust -s t -l file_types -d 'show only these file types'
complete -c dust -s P -l no-progress -d 'Disable the progress indication' complete -c dust -s H -l si -d 'print sizes in powers of 1000 (e.g., 1.1G)'
complete -c dust -l print-errors -d 'Print path with errors' complete -c dust -s P -l no-progress -d 'Disable the progress indication.'
complete -c dust -s D -l only-dir -d 'Only directories will be displayed' complete -c dust -s D -l only-dir -d 'Only directories will be displayed.'
complete -c dust -s F -l only-file -d 'Only files will be displayed. (Finds your largest files)' complete -c dust -s F -l only-file -d 'Only files will be displayed. (Finds your largest files)'
complete -c dust -s j -l output-json -d 'Output the directory tree as json to the current directory' complete -c dust -s h -l help -d 'Print help'
complete -c dust -s h -l help -d 'Print help (see more with \'--help\')'
complete -c dust -s V -l version -d 'Print version' complete -c dust -s V -l version -d 'Print version'

View File

@@ -25,6 +25,4 @@ skip-total=true
ignore-hidden=true ignore-hidden=true
# print sizes in powers of 1000 (e.g., 1.1G) # print sizes in powers of 1000 (e.g., 1.1G)
output-format="si" iso=true
number-of-lines=5

View File

@@ -1,33 +1,27 @@
.ie \n(.g .ds Aq \(aq .ie \n(.g .ds Aq \(aq
.el .ds Aq ' .el .ds Aq '
.TH Dust 1 "Dust 1.2.3" .TH Dust 1 "Dust 0.9.0"
.SH NAME .SH NAME
Dust \- Like du but more intuitive Dust \- Like du but more intuitive
.SH SYNOPSIS .SH SYNOPSIS
\fBdust\fR [\fB\-d\fR|\fB\-\-depth\fR] [\fB\-T\fR|\fB\-\-threads\fR] [\fB\-\-config\fR] [\fB\-n\fR|\fB\-\-number\-of\-lines\fR] [\fB\-p\fR|\fB\-\-full\-paths\fR] [\fB\-X\fR|\fB\-\-ignore\-directory\fR] [\fB\-I\fR|\fB\-\-ignore\-all\-in\-file\fR] [\fB\-L\fR|\fB\-\-dereference\-links\fR] [\fB\-x\fR|\fB\-\-limit\-filesystem\fR] [\fB\-s\fR|\fB\-\-apparent\-size\fR] [\fB\-r\fR|\fB\-\-reverse\fR] [\fB\-c\fR|\fB\-\-no\-colors\fR] [\fB\-C\fR|\fB\-\-force\-colors\fR] [\fB\-b\fR|\fB\-\-no\-percent\-bars\fR] [\fB\-B\fR|\fB\-\-bars\-on\-right\fR] [\fB\-z\fR|\fB\-\-min\-size\fR] [\fB\-R\fR|\fB\-\-screen\-reader\fR] [\fB\-\-skip\-total\fR] [\fB\-f\fR|\fB\-\-filecount\fR] [\fB\-i\fR|\fB\-\-ignore\-hidden\fR] [\fB\-v\fR|\fB\-\-invert\-filter\fR] [\fB\-e\fR|\fB\-\-filter\fR] [\fB\-t\fR|\fB\-\-file\-types\fR] [\fB\-w\fR|\fB\-\-terminal\-width\fR] [\fB\-P\fR|\fB\-\-no\-progress\fR] [\fB\-\-print\-errors\fR] [\fB\-D\fR|\fB\-\-only\-dir\fR] [\fB\-F\fR|\fB\-\-only\-file\fR] [\fB\-o\fR|\fB\-\-output\-format\fR] [\fB\-S\fR|\fB\-\-stack\-size\fR] [\fB\-j\fR|\fB\-\-output\-json\fR] [\fB\-M\fR|\fB\-\-mtime\fR] [\fB\-A\fR|\fB\-\-atime\fR] [\fB\-y\fR|\fB\-\-ctime\fR] [\fB\-\-files0\-from\fR] [\fB\-\-files\-from\fR] [\fB\-\-collapse\fR] [\fB\-m\fR|\fB\-\-filetime\fR] [\fB\-h\fR|\fB\-\-help\fR] [\fB\-V\fR|\fB\-\-version\fR] [\fIPATH\fR] \fBDust\fR [\fB\-d\fR|\fB\-\-depth\fR] [\fB\-n\fR|\fB\-\-number\-of\-lines\fR] [\fB\-p\fR|\fB\-\-full\-paths\fR] [\fB\-X\fR|\fB\-\-ignore\-directory\fR] [\fB\-I\fR|\fB\-\-ignore\-all\-in\-file\fR] [\fB\-L\fR|\fB\-\-dereference\-links\fR] [\fB\-x\fR|\fB\-\-limit\-filesystem\fR] [\fB\-s\fR|\fB\-\-apparent\-size\fR] [\fB\-r\fR|\fB\-\-reverse\fR] [\fB\-c\fR|\fB\-\-no\-colors\fR] [\fB\-b\fR|\fB\-\-no\-percent\-bars\fR] [\fB\-B\fR|\fB\-\-bars\-on\-right\fR] [\fB\-z\fR|\fB\-\-min\-size\fR] [\fB\-R\fR|\fB\-\-screen\-reader\fR] [\fB\-\-skip\-total\fR] [\fB\-f\fR|\fB\-\-filecount\fR] [\fB\-i\fR|\fB\-\-ignore_hidden\fR] [\fB\-v\fR|\fB\-\-invert\-filter\fR] [\fB\-e\fR|\fB\-\-filter\fR] [\fB\-t\fR|\fB\-\-file_types\fR] [\fB\-w\fR|\fB\-\-terminal_width\fR] [\fB\-H\fR|\fB\-\-si\fR] [\fB\-P\fR|\fB\-\-no\-progress\fR] [\fB\-D\fR|\fB\-\-only\-dir\fR] [\fB\-F\fR|\fB\-\-only\-file\fR] [\fB\-S\fR|\fB\-\-stack\-size\fR] [\fB\-h\fR|\fB\-\-help\fR] [\fB\-V\fR|\fB\-\-version\fR] [\fIparams\fR]
.SH DESCRIPTION .SH DESCRIPTION
Like du but more intuitive Like du but more intuitive
.SH OPTIONS .SH OPTIONS
.TP .TP
\fB\-d\fR, \fB\-\-depth\fR \fI<DEPTH>\fR \fB\-d\fR, \fB\-\-depth\fR
Depth to show Depth to show
.TP .TP
\fB\-T\fR, \fB\-\-threads\fR \fI<THREADS>\fR \fB\-n\fR, \fB\-\-number\-of\-lines\fR
Number of threads to use
.TP
\fB\-\-config\fR \fI<FILE>\fR
Specify a config file to use
.TP
\fB\-n\fR, \fB\-\-number\-of\-lines\fR \fI<NUMBER>\fR
Number of lines of output to show. (Default is terminal_height \- 10) Number of lines of output to show. (Default is terminal_height \- 10)
.TP .TP
\fB\-p\fR, \fB\-\-full\-paths\fR \fB\-p\fR, \fB\-\-full\-paths\fR
Subdirectories will not have their path shortened Subdirectories will not have their path shortened
.TP .TP
\fB\-X\fR, \fB\-\-ignore\-directory\fR \fI<PATH>\fR \fB\-X\fR, \fB\-\-ignore\-directory\fR
Exclude any file or directory with this path Exclude any file or directory with this name
.TP .TP
\fB\-I\fR, \fB\-\-ignore\-all\-in\-file\fR \fI<FILE>\fR \fB\-I\fR, \fB\-\-ignore\-all\-in\-file\fR
Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by \-\-invert_filter Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by \-\-invert_filter
.TP .TP
\fB\-L\fR, \fB\-\-dereference\-links\fR \fB\-L\fR, \fB\-\-dereference\-links\fR
@@ -45,16 +39,13 @@ Print tree upside down (biggest highest)
\fB\-c\fR, \fB\-\-no\-colors\fR \fB\-c\fR, \fB\-\-no\-colors\fR
No colors will be printed (Useful for commands like: watch) No colors will be printed (Useful for commands like: watch)
.TP .TP
\fB\-C\fR, \fB\-\-force\-colors\fR
Force colors print
.TP
\fB\-b\fR, \fB\-\-no\-percent\-bars\fR \fB\-b\fR, \fB\-\-no\-percent\-bars\fR
No percent bars or percentages will be displayed No percent bars or percentages will be displayed
.TP .TP
\fB\-B\fR, \fB\-\-bars\-on\-right\fR \fB\-B\fR, \fB\-\-bars\-on\-right\fR
percent bars moved to right side of screen percent bars moved to right side of screen
.TP .TP
\fB\-z\fR, \fB\-\-min\-size\fR \fI<MIN_SIZE>\fR \fB\-z\fR, \fB\-\-min\-size\fR
Minimum size file to include in output Minimum size file to include in output
.TP .TP
\fB\-R\fR, \fB\-\-screen\-reader\fR \fB\-R\fR, \fB\-\-screen\-reader\fR
@@ -66,108 +57,43 @@ No total row will be displayed
\fB\-f\fR, \fB\-\-filecount\fR \fB\-f\fR, \fB\-\-filecount\fR
Directory \*(Aqsize\*(Aq is number of child files instead of disk size Directory \*(Aqsize\*(Aq is number of child files instead of disk size
.TP .TP
\fB\-i\fR, \fB\-\-ignore\-hidden\fR \fB\-i\fR, \fB\-\-ignore_hidden\fR
Do not display hidden files Do not display hidden files
.TP .TP
\fB\-v\fR, \fB\-\-invert\-filter\fR \fI<REGEX>\fR \fB\-v\fR, \fB\-\-invert\-filter\fR
Exclude filepaths matching this regex. To ignore png files type: \-v "\\.png$" Exclude filepaths matching this regex. To ignore png files type: \-v "\\.png$"
.TP .TP
\fB\-e\fR, \fB\-\-filter\fR \fI<REGEX>\fR \fB\-e\fR, \fB\-\-filter\fR
Only include filepaths matching this regex. For png files type: \-e "\\.png$" Only include filepaths matching this regex. For png files type: \-e "\\.png$"
.TP .TP
\fB\-t\fR, \fB\-\-file\-types\fR \fB\-t\fR, \fB\-\-file_types\fR
show only these file types show only these file types
.TP .TP
\fB\-w\fR, \fB\-\-terminal\-width\fR \fI<WIDTH>\fR \fB\-w\fR, \fB\-\-terminal_width\fR
Specify width of output overriding the auto detection of terminal width Specify width of output overriding the auto detection of terminal width
.TP .TP
\fB\-P\fR, \fB\-\-no\-progress\fR \fB\-H\fR, \fB\-\-si\fR
Disable the progress indication print sizes in powers of 1000 (e.g., 1.1G)
.TP .TP
\fB\-\-print\-errors\fR \fB\-P\fR, \fB\-\-no\-progress\fR
Print path with errors Disable the progress indication.
.TP .TP
\fB\-D\fR, \fB\-\-only\-dir\fR \fB\-D\fR, \fB\-\-only\-dir\fR
Only directories will be displayed Only directories will be displayed.
.TP .TP
\fB\-F\fR, \fB\-\-only\-file\fR \fB\-F\fR, \fB\-\-only\-file\fR
Only files will be displayed. (Finds your largest files) Only files will be displayed. (Finds your largest files)
.TP .TP
\fB\-o\fR, \fB\-\-output\-format\fR \fI<FORMAT>\fR \fB\-S\fR, \fB\-\-stack\-size\fR
Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size
.br
.br
\fIPossible values:\fR
.RS 14
.IP \(bu 2
si: SI prefix (powers of 1000)
.IP \(bu 2
b: byte (B)
.IP \(bu 2
k: kibibyte (KiB)
.IP \(bu 2
m: mebibyte (MiB)
.IP \(bu 2
g: gibibyte (GiB)
.IP \(bu 2
t: tebibyte (TiB)
.IP \(bu 2
kb: kilobyte (kB)
.IP \(bu 2
mb: megabyte (MB)
.IP \(bu 2
gb: gigabyte (GB)
.IP \(bu 2
tb: terabyte (TB)
.RE
.TP
\fB\-S\fR, \fB\-\-stack\-size\fR \fI<STACK_SIZE>\fR
Specify memory to use as stack size \- use if you see: \*(Aqfatal runtime error: stack overflow\*(Aq (default low memory=1048576, high memory=1073741824) Specify memory to use as stack size \- use if you see: \*(Aqfatal runtime error: stack overflow\*(Aq (default low memory=1048576, high memory=1073741824)
.TP .TP
\fB\-j\fR, \fB\-\-output\-json\fR
Output the directory tree as json to the current directory
.TP
\fB\-M\fR, \fB\-\-mtime\fR \fI<MTIME>\fR
+/\-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and \-n => (𝑐𝑢𝑟𝑟𝑛, +∞)
.TP
\fB\-A\fR, \fB\-\-atime\fR \fI<ATIME>\fR
just like \-mtime, but based on file access time
.TP
\fB\-y\fR, \fB\-\-ctime\fR \fI<CTIME>\fR
just like \-mtime, but based on file change time
.TP
\fB\-\-files0\-from\fR \fI<FILES0_FROM>\fR
Read NUL\-terminated paths from FILE (use `\-` for stdin)
.TP
\fB\-\-files\-from\fR \fI<FILES_FROM>\fR
Read newline\-terminated paths from FILE (use `\-` for stdin)
.TP
\fB\-\-collapse\fR \fI<COLLAPSE>\fR
Keep these directories collapsed
.TP
\fB\-m\fR, \fB\-\-filetime\fR \fI<FILETIME>\fR
Directory \*(Aqsize\*(Aq is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time
.br
.br
\fIPossible values:\fR
.RS 14
.IP \(bu 2
a: last accessed time
.IP \(bu 2
c: last changed time
.IP \(bu 2
m: last modified time
.RE
.TP
\fB\-h\fR, \fB\-\-help\fR \fB\-h\fR, \fB\-\-help\fR
Print help (see a summary with \*(Aq\-h\*(Aq) Print help
.TP .TP
\fB\-V\fR, \fB\-\-version\fR \fB\-V\fR, \fB\-\-version\fR
Print version Print version
.TP .TP
[\fIPATH\fR] [\fIparams\fR]
Input files or directories
.SH VERSION .SH VERSION
v1.2.3 v0.9.0

View File

@@ -1,261 +1,206 @@
use std::fmt; use clap::{value_parser, Arg, Command};
use clap::{Parser, ValueEnum, ValueHint};
// For single thread mode set this variable on your command line: // For single thread mode set this variable on your command line:
// export RAYON_NUM_THREADS=1 // export RAYON_NUM_THREADS=1
/// Like du but more intuitive pub fn build_cli() -> Command {
#[derive(Debug, Parser)] Command::new("Dust")
#[command(name("Dust"), version)] .about("Like du but more intuitive")
pub struct Cli { .version(env!("CARGO_PKG_VERSION"))
/// Depth to show .trailing_var_arg(true)
#[arg(short, long)] .arg(
pub depth: Option<usize>, Arg::new("depth")
.short('d')
/// Number of threads to use .long("depth")
#[arg(short('T'), long)] .value_parser(value_parser!(usize))
pub threads: Option<usize>, .help("Depth to show")
.num_args(1)
/// Specify a config file to use )
#[arg(long, value_name("FILE"), value_hint(ValueHint::FilePath))] .arg(
pub config: Option<String>, Arg::new("number_of_lines")
.short('n')
/// Number of lines of output to show. (Default is terminal_height - 10) .long("number-of-lines")
#[arg(short, long, value_name("NUMBER"))] .value_parser(value_parser!(usize))
pub number_of_lines: Option<usize>, .help("Number of lines of output to show. (Default is terminal_height - 10)")
.num_args(1)
/// Subdirectories will not have their path shortened )
#[arg(short('p'), long)] .arg(
pub full_paths: bool, Arg::new("display_full_paths")
.short('p')
/// Exclude any file or directory with this path .long("full-paths")
#[arg(short('X'), long, value_name("PATH"), value_hint(ValueHint::AnyPath))] .action(clap::ArgAction::SetTrue)
pub ignore_directory: Option<Vec<String>>, .help("Subdirectories will not have their path shortened"),
)
/// Exclude any file or directory with a regex matching that listed in this .arg(
/// file, the file entries will be added to the ignore regexs provided by Arg::new("ignore_directory")
/// --invert_filter .short('X')
#[arg(short('I'), long, value_name("FILE"), value_hint(ValueHint::FilePath))] .long("ignore-directory")
pub ignore_all_in_file: Option<String>, .action(clap::ArgAction::Append)
.help("Exclude any file or directory with this name"),
/// dereference sym links - Treat sym links as directories and go into them )
#[arg(short('L'), long)] .arg(
pub dereference_links: bool, Arg::new("ignore_all_in_file")
.short('I')
/// Only count the files and directories on the same filesystem as the .long("ignore-all-in-file")
/// supplied directory .value_parser(value_parser!(String))
#[arg(short('x'), long)] .help("Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter"),
pub limit_filesystem: bool, )
.arg(
/// Use file length instead of blocks Arg::new("dereference_links")
#[arg(short('s'), long)] .short('L')
pub apparent_size: bool, .long("dereference-links")
.action(clap::ArgAction::SetTrue)
/// Print tree upside down (biggest highest) .help("dereference sym links - Treat sym links as directories and go into them"),
#[arg(short, long)] )
pub reverse: bool, .arg(
Arg::new("limit_filesystem")
/// No colors will be printed (Useful for commands like: watch) .short('x')
#[arg(short('c'), long)] .long("limit-filesystem")
pub no_colors: bool, .action(clap::ArgAction::SetTrue)
.help("Only count the files and directories on the same filesystem as the supplied directory"),
/// Force colors print )
#[arg(short('C'), long)] .arg(
pub force_colors: bool, Arg::new("display_apparent_size")
.short('s')
/// No percent bars or percentages will be displayed .long("apparent-size")
#[arg(short('b'), long)] .action(clap::ArgAction::SetTrue)
pub no_percent_bars: bool, .help("Use file length instead of blocks"),
)
/// percent bars moved to right side of screen .arg(
#[arg(short('B'), long)] Arg::new("reverse")
pub bars_on_right: bool, .short('r')
.long("reverse")
/// Minimum size file to include in output .action(clap::ArgAction::SetTrue)
#[arg(short('z'), long)] .help("Print tree upside down (biggest highest)"),
pub min_size: Option<String>, )
.arg(
/// For screen readers. Removes bars. Adds new column: depth level (May want Arg::new("no_colors")
/// to use -p too for full path) .short('c')
#[arg(short('R'), long)] .long("no-colors")
pub screen_reader: bool, .action(clap::ArgAction::SetTrue)
.help("No colors will be printed (Useful for commands like: watch)"),
/// No total row will be displayed )
#[arg(long)] .arg(
pub skip_total: bool, Arg::new("no_bars")
.short('b')
/// Directory 'size' is number of child files instead of disk size .long("no-percent-bars")
#[arg(short, long)] .action(clap::ArgAction::SetTrue)
pub filecount: bool, .help("No percent bars or percentages will be displayed"),
)
/// Do not display hidden files .arg(
// Do not use 'h' this is used by 'help' Arg::new("bars_on_right")
#[arg(short, long)] .short('B')
pub ignore_hidden: bool, .long("bars-on-right")
.action(clap::ArgAction::SetTrue)
/// Exclude filepaths matching this regex. To ignore png files type: -v .help("percent bars moved to right side of screen"),
/// "\.png$" )
#[arg( .arg(
short('v'), Arg::new("min_size")
long, .short('z')
value_name("REGEX"), .long("min-size")
conflicts_with("filter"), .num_args(1)
conflicts_with("file_types") .help("Minimum size file to include in output"),
)] )
pub invert_filter: Option<Vec<String>>, .arg(
Arg::new("screen_reader")
/// Only include filepaths matching this regex. For png files type: -e .short('R')
/// "\.png$" .long("screen-reader")
#[arg(short('e'), long, value_name("REGEX"), conflicts_with("file_types"))] .action(clap::ArgAction::SetTrue)
pub filter: Option<Vec<String>>, .help("For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)"),
)
/// show only these file types .arg(
#[arg(short('t'), long, conflicts_with("depth"), conflicts_with("only_dir"))] Arg::new("skip_total")
pub file_types: bool, .long("skip-total")
.action(clap::ArgAction::SetTrue)
/// Specify width of output overriding the auto detection of terminal width .help("No total row will be displayed"),
#[arg(short('w'), long, value_name("WIDTH"))] )
pub terminal_width: Option<usize>, .arg(
Arg::new("by_filecount")
/// Disable the progress indication. .short('f')
#[arg(short('P'), long)] .long("filecount")
pub no_progress: bool, .action(clap::ArgAction::SetTrue)
.help("Directory 'size' is number of child files instead of disk size"),
/// Print path with errors. )
#[arg(long)] .arg(
pub print_errors: bool, Arg::new("ignore_hidden")
.short('i') // Do not use 'h' this is used by 'help'
/// Only directories will be displayed. .long("ignore_hidden")
#[arg( .action(clap::ArgAction::SetTrue)
short('D'), .help("Do not display hidden files"),
long, )
conflicts_with("only_file"), .arg(
conflicts_with("file_types") Arg::new("invert_filter")
)] .short('v')
pub only_dir: bool, .long("invert-filter")
.action(clap::ArgAction::Append)
/// Only files will be displayed. (Finds your largest files) .conflicts_with("filter")
#[arg(short('F'), long, conflicts_with("only_dir"))] .conflicts_with("types")
pub only_file: bool, .help("Exclude filepaths matching this regex. To ignore png files type: -v \"\\.png$\" "),
)
/// Changes output display size. si will print sizes in powers of 1000. b k .arg(
/// m g t kb mb gb tb will print the whole tree in that size. Arg::new("filter")
#[arg(short, long, value_enum, value_name("FORMAT"), ignore_case(true))] .short('e')
pub output_format: Option<OutputFormat>, .long("filter")
.action(clap::ArgAction::Append)
/// Specify memory to use as stack size - use if you see: 'fatal runtime .conflicts_with("types")
/// error: stack overflow' (default low memory=1048576, high .help("Only include filepaths matching this regex. For png files type: -e \"\\.png$\" "),
/// memory=1073741824) )
#[arg(short('S'), long)] .arg(
pub stack_size: Option<usize>, Arg::new("types")
.short('t')
/// Input files or directories. .long("file_types")
#[arg(value_name("PATH"), value_hint(ValueHint::AnyPath))] .conflicts_with("depth")
pub params: Option<Vec<String>>, .conflicts_with("only_dir")
.action(clap::ArgAction::SetTrue)
/// Output the directory tree as json to the current directory .help("show only these file types"),
#[arg(short('j'), long)] )
pub output_json: bool, .arg(
Arg::new("width")
/// +/-n matches files modified more/less than n days ago , and n matches .short('w')
/// files modified exactly n days ago, days are rounded down.That is +n => .long("terminal_width")
/// (−∞, curr(n+1)), n => [curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞) .num_args(1)
#[arg(short('M'), long, allow_hyphen_values(true))] .value_parser(value_parser!(usize))
pub mtime: Option<String>, .help("Specify width of output overriding the auto detection of terminal width"),
)
/// just like -mtime, but based on file access time .arg(
#[arg(short('A'), long, allow_hyphen_values(true))] Arg::new("iso")
pub atime: Option<String>, .short('H')
.long("si")
/// just like -mtime, but based on file change time .action(clap::ArgAction::SetTrue)
#[arg(short('y'), long, allow_hyphen_values(true))] .help("print sizes in powers of 1000 (e.g., 1.1G)")
pub ctime: Option<String>, )
.arg(
/// Read NUL-terminated paths from FILE (use `-` for stdin). Arg::new("disable_progress")
#[arg(long, value_hint(ValueHint::AnyPath), conflicts_with("files_from"))] .short('P')
pub files0_from: Option<String>, .long("no-progress")
.action(clap::ArgAction::SetTrue)
/// Read newline-terminated paths from FILE (use `-` for stdin). .help("Disable the progress indication."),
#[arg(long, value_hint(ValueHint::AnyPath), conflicts_with("files0_from"))] )
pub files_from: Option<String>, .arg(
Arg::new("only_dir")
/// Keep these directories collapsed .short('D')
#[arg(long, value_hint(ValueHint::AnyPath))] .long("only-dir")
pub collapse: Option<Vec<String>>, .conflicts_with("only_file")
.conflicts_with("types")
/// Directory 'size' is max filetime of child files instead of disk size. .action(clap::ArgAction::SetTrue)
/// while a/c/m for last accessed/changed/modified time .help("Only directories will be displayed."),
#[arg(short('m'), long, value_enum)] )
pub filetime: Option<FileTime>, .arg(
} Arg::new("only_file")
.short('F')
#[derive(Clone, Copy, Debug, ValueEnum)] .long("only-file")
#[value(rename_all = "lower")] .conflicts_with("only_dir")
pub enum OutputFormat { .action(clap::ArgAction::SetTrue)
/// SI prefix (powers of 1000) .help("Only files will be displayed. (Finds your largest files)"),
SI, )
.arg(
/// byte (B) Arg::new("stack_size")
B, .short('S')
.long("stack-size")
/// kibibyte (KiB) .num_args(1)
#[value(name = "k", alias("kib"))] .value_parser(value_parser!(usize))
KiB, .help("Specify memory to use as stack size - use if you see: 'fatal runtime error: stack overflow' (default low memory=1048576, high memory=1073741824)"),
)
/// mebibyte (MiB) .arg(Arg::new("params").num_args(1..)
#[value(name = "m", alias("mib"))] .value_parser(value_parser!(String)))
MiB,
/// gibibyte (GiB)
#[value(name = "g", alias("gib"))]
GiB,
/// tebibyte (TiB)
#[value(name = "t", alias("tib"))]
TiB,
/// kilobyte (kB)
KB,
/// megabyte (MB)
MB,
/// gigabyte (GB)
GB,
/// terabyte (TB)
TB,
}
impl fmt::Display for OutputFormat {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::SI => write!(f, "si"),
Self::B => write!(f, "b"),
Self::KiB => write!(f, "k"),
Self::MiB => write!(f, "m"),
Self::GiB => write!(f, "g"),
Self::TiB => write!(f, "t"),
Self::KB => write!(f, "kb"),
Self::MB => write!(f, "mb"),
Self::GB => write!(f, "gb"),
Self::TB => write!(f, "tb"),
}
}
}
#[derive(Clone, Copy, Debug, ValueEnum)]
pub enum FileTime {
/// last accessed time
#[value(name = "a", alias("accessed"))]
Accessed,
/// last changed time
#[value(name = "c", alias("changed"))]
Changed,
/// last modified time
#[value(name = "m", alias("modified"))]
Modified,
} }

View File

@@ -1,30 +1,25 @@
use crate::node::FileTime; use clap::ArgMatches;
use chrono::{Local, TimeZone};
use config_file::FromConfigFile; use config_file::FromConfigFile;
use regex::Regex;
use serde::Deserialize; use serde::Deserialize;
use std::io::IsTerminal;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use crate::cli::Cli; use crate::display::UNITS;
use crate::dir_walker::Operator;
use crate::display::get_number_format;
pub static DAY_SECONDS: i64 = 24 * 60 * 60;
#[derive(Deserialize, Default)] #[derive(Deserialize, Default)]
#[serde(rename_all = "kebab-case")] #[serde(rename_all = "kebab-case")]
#[serde(deny_unknown_fields)]
pub struct Config { pub struct Config {
pub display_full_paths: Option<bool>, pub display_full_paths: Option<bool>,
pub display_apparent_size: Option<bool>, pub display_apparent_size: Option<bool>,
pub reverse: Option<bool>, pub reverse: Option<bool>,
pub no_colors: Option<bool>, pub no_colors: Option<bool>,
pub force_colors: Option<bool>,
pub no_bars: Option<bool>, pub no_bars: Option<bool>,
pub skip_total: Option<bool>, pub skip_total: Option<bool>,
pub screen_reader: Option<bool>, pub screen_reader: Option<bool>,
pub ignore_hidden: Option<bool>, pub ignore_hidden: Option<bool>,
pub output_format: Option<String>, pub iso: Option<bool>,
pub min_size: Option<String>, pub min_size: Option<String>,
pub only_dir: Option<bool>, pub only_dir: Option<bool>,
pub only_file: Option<bool>, pub only_file: Option<bool>,
@@ -32,248 +27,131 @@ pub struct Config {
pub depth: Option<usize>, pub depth: Option<usize>,
pub bars_on_right: Option<bool>, pub bars_on_right: Option<bool>,
pub stack_size: Option<usize>, pub stack_size: Option<usize>,
pub threads: Option<usize>,
pub output_json: Option<bool>,
pub print_errors: Option<bool>,
pub files0_from: Option<String>,
pub number_of_lines: Option<usize>,
pub files_from: Option<String>,
} }
impl Config { impl Config {
pub fn get_files0_from(&self, options: &Cli) -> Option<String> { pub fn get_no_colors(&self, options: &ArgMatches) -> bool {
let from_file = &options.files0_from; Some(true) == self.no_colors || options.get_flag("no_colors")
match from_file {
None => self.files0_from.as_ref().map(|x| x.to_string()),
Some(x) => Some(x.to_string()),
}
} }
pub fn get_disable_progress(&self, options: &ArgMatches) -> bool {
pub fn get_files_from(&self, options: &Cli) -> Option<String> { Some(true) == self.disable_progress
let from_file = &options.files_from; || options.get_flag("disable_progress")
match from_file { || !std::io::stdout().is_terminal()
None => self.files_from.as_ref().map(|x| x.to_string()),
Some(x) => Some(x.to_string()),
}
} }
pub fn get_no_colors(&self, options: &Cli) -> bool { pub fn get_apparent_size(&self, options: &ArgMatches) -> bool {
Some(true) == self.no_colors || options.no_colors Some(true) == self.display_apparent_size || options.get_flag("display_apparent_size")
} }
pub fn get_force_colors(&self, options: &Cli) -> bool { pub fn get_ignore_hidden(&self, options: &ArgMatches) -> bool {
Some(true) == self.force_colors || options.force_colors Some(true) == self.ignore_hidden || options.get_flag("ignore_hidden")
} }
pub fn get_disable_progress(&self, options: &Cli) -> bool { pub fn get_full_paths(&self, options: &ArgMatches) -> bool {
Some(true) == self.disable_progress || options.no_progress // If we are only showing files, always show full paths
Some(true) == self.display_full_paths
|| options.get_flag("display_full_paths")
|| self.get_only_file(options)
} }
pub fn get_apparent_size(&self, options: &Cli) -> bool { pub fn get_reverse(&self, options: &ArgMatches) -> bool {
Some(true) == self.display_apparent_size || options.apparent_size Some(true) == self.reverse || options.get_flag("reverse")
} }
pub fn get_ignore_hidden(&self, options: &Cli) -> bool { pub fn get_no_bars(&self, options: &ArgMatches) -> bool {
Some(true) == self.ignore_hidden || options.ignore_hidden Some(true) == self.no_bars || options.get_flag("no_bars")
} }
pub fn get_full_paths(&self, options: &Cli) -> bool { pub fn get_iso(&self, options: &ArgMatches) -> bool {
Some(true) == self.display_full_paths || options.full_paths Some(true) == self.iso || options.get_flag("iso")
} }
pub fn get_reverse(&self, options: &Cli) -> bool { pub fn get_skip_total(&self, options: &ArgMatches) -> bool {
Some(true) == self.reverse || options.reverse Some(true) == self.skip_total || options.get_flag("skip_total")
} }
pub fn get_no_bars(&self, options: &Cli) -> bool { pub fn get_screen_reader(&self, options: &ArgMatches) -> bool {
Some(true) == self.no_bars || options.no_percent_bars Some(true) == self.screen_reader || options.get_flag("screen_reader")
} }
pub fn get_output_format(&self, options: &Cli) -> String { pub fn get_depth(&self, options: &ArgMatches) -> usize {
let out_fmt = options.output_format; if let Some(v) = options.get_one::<usize>("depth") {
(match out_fmt { return *v;
None => match &self.output_format {
None => "".to_string(),
Some(x) => x.to_string(),
},
Some(x) => x.to_string(),
})
.to_lowercase()
}
pub fn get_filetime(&self, options: &Cli) -> Option<FileTime> {
options.filetime.map(FileTime::from)
}
pub fn get_skip_total(&self, options: &Cli) -> bool {
Some(true) == self.skip_total || options.skip_total
}
pub fn get_screen_reader(&self, options: &Cli) -> bool {
Some(true) == self.screen_reader || options.screen_reader
}
pub fn get_depth(&self, options: &Cli) -> usize {
if let Some(v) = options.depth {
return v;
} }
self.depth.unwrap_or(usize::MAX) self.depth.unwrap_or(usize::MAX)
} }
pub fn get_min_size(&self, options: &Cli) -> Option<usize> { pub fn get_min_size(&self, options: &ArgMatches, iso: bool) -> Option<usize> {
let size_from_param = options.min_size.as_ref(); let size_from_param = options.get_one::<String>("min_size");
self._get_min_size(size_from_param) self._get_min_size(size_from_param, iso)
} }
fn _get_min_size(&self, min_size: Option<&String>) -> Option<usize> { fn _get_min_size(&self, min_size: Option<&String>, iso: bool) -> Option<usize> {
let size_from_param = min_size.and_then(|a| convert_min_size(a)); let size_from_param = min_size.and_then(|a| convert_min_size(a, iso));
if size_from_param.is_none() { if size_from_param.is_none() {
self.min_size self.min_size
.as_ref() .as_ref()
.and_then(|a| convert_min_size(a.as_ref())) .and_then(|a| convert_min_size(a.as_ref(), iso))
} else { } else {
size_from_param size_from_param
} }
} }
pub fn get_only_dir(&self, options: &Cli) -> bool { pub fn get_only_dir(&self, options: &ArgMatches) -> bool {
Some(true) == self.only_dir || options.only_dir Some(true) == self.only_dir || options.get_flag("only_dir")
} }
pub fn get_only_file(&self, options: &ArgMatches) -> bool {
pub fn get_print_errors(&self, options: &Cli) -> bool { Some(true) == self.only_file || options.get_flag("only_file")
Some(true) == self.print_errors || options.print_errors
} }
pub fn get_only_file(&self, options: &Cli) -> bool { pub fn get_bars_on_right(&self, options: &ArgMatches) -> bool {
Some(true) == self.only_file || options.only_file Some(true) == self.bars_on_right || options.get_flag("bars_on_right")
} }
pub fn get_bars_on_right(&self, options: &Cli) -> bool { pub fn get_custom_stack_size(&self, options: &ArgMatches) -> Option<usize> {
Some(true) == self.bars_on_right || options.bars_on_right let from_cmd_line = options.get_one::<usize>("stack_size");
}
pub fn get_custom_stack_size(&self, options: &Cli) -> Option<usize> {
let from_cmd_line = options.stack_size;
if from_cmd_line.is_none() { if from_cmd_line.is_none() {
self.stack_size self.stack_size
} else { } else {
from_cmd_line from_cmd_line.copied()
} }
} }
pub fn get_threads(&self, options: &Cli) -> Option<usize> {
let from_cmd_line = options.threads;
if from_cmd_line.is_none() {
self.threads
} else {
from_cmd_line
}
}
pub fn get_output_json(&self, options: &Cli) -> bool {
Some(true) == self.output_json || options.output_json
}
pub fn get_number_of_lines(&self, options: &Cli) -> Option<usize> {
let from_cmd_line = options.number_of_lines;
if from_cmd_line.is_none() {
self.number_of_lines
} else {
from_cmd_line
}
}
pub fn get_modified_time_operator(&self, options: &Cli) -> Option<(Operator, i64)> {
get_filter_time_operator(options.mtime.as_ref(), get_current_date_epoch_seconds())
}
pub fn get_accessed_time_operator(&self, options: &Cli) -> Option<(Operator, i64)> {
get_filter_time_operator(options.atime.as_ref(), get_current_date_epoch_seconds())
}
pub fn get_changed_time_operator(&self, options: &Cli) -> Option<(Operator, i64)> {
get_filter_time_operator(options.ctime.as_ref(), get_current_date_epoch_seconds())
}
} }
fn get_current_date_epoch_seconds() -> i64 { fn convert_min_size(input: &str, iso: bool) -> Option<usize> {
// calculate current date epoch seconds let chars_as_vec: Vec<char> = input.chars().collect();
let now = Local::now(); match chars_as_vec.split_last() {
let current_date = now.date_naive(); Some((last, start)) => {
let mut starts: String = start.iter().collect::<String>();
let current_date_time = current_date.and_hms_opt(0, 0, 0).unwrap(); for (i, u) in UNITS.iter().rev().enumerate() {
Local if Some(*u) == last.to_uppercase().next() {
.from_local_datetime(&current_date_time) return match starts.parse::<usize>() {
.unwrap() Ok(pure) => {
.timestamp() let num: usize = if iso { 1000 } else { 1024 };
} let marker = pure * num.pow((i + 1) as u32);
Some(marker)
fn get_filter_time_operator( }
option_value: Option<&String>, Err(_) => {
current_date_epoch_seconds: i64, eprintln!("Ignoring invalid min-size: {input}");
) -> Option<(Operator, i64)> { None
match option_value { }
Some(val) => { };
let time = current_date_epoch_seconds }
- val
.parse::<i64>()
.unwrap_or_else(|_| panic!("invalid data format"))
.abs()
* DAY_SECONDS;
match val.chars().next().expect("Value should not be empty") {
'+' => Some((Operator::LessThan, time - DAY_SECONDS)),
'-' => Some((Operator::GreaterThan, time)),
_ => Some((Operator::Equal, time - DAY_SECONDS)),
} }
starts.push(*last);
starts
.parse()
.map_err(|_| {
eprintln!("Ignoring invalid min-size: {input}");
})
.ok()
} }
None => None, None => None,
} }
} }
fn convert_min_size(input: &str) -> Option<usize> { fn get_config_locations(base: &Path) -> Vec<PathBuf> {
let re = Regex::new(r"([0-9]+)(\w*)").unwrap();
if let Some(cap) = re.captures(input) {
let (_, [digits, letters]) = cap.extract();
// Failure to parse should be impossible due to regex match
let digits_as_usize: Option<usize> = digits.parse().ok();
match digits_as_usize {
Some(parsed_digits) => {
let number_format = get_number_format(&letters.to_lowercase());
match number_format {
Some((multiple, _)) => Some(parsed_digits * (multiple as usize)),
None => {
if letters.is_empty() {
Some(parsed_digits)
} else {
eprintln!("Ignoring invalid min-size: {input}");
None
}
}
}
}
None => None,
}
} else {
None
}
}
fn get_config_locations(base: PathBuf) -> Vec<PathBuf> {
vec![ vec![
base.join(".dust.toml"), base.join(".dust.toml"),
base.join(".config").join("dust").join("config.toml"), base.join(".config").join("dust").join("config.toml"),
] ]
} }
pub fn get_config(conf_path: Option<&String>) -> Config { pub fn get_config() -> Config {
match conf_path { if let Some(home) = directories::BaseDirs::new() {
Some(path_str) => { for path in get_config_locations(home.home_dir()) {
let path = Path::new(path_str);
if path.exists() { if path.exists() {
match Config::from_config_file(path) { if let Ok(config) = Config::from_config_file(path) {
Ok(config) => return config, return config;
Err(e) => {
eprintln!("Ignoring invalid config file '{}': {}", &path.display(), e)
}
}
} else {
eprintln!("Config file {:?} doesn't exists", &path.display());
}
}
None => {
if let Some(home) = std::env::home_dir() {
for path in get_config_locations(home) {
if path.exists()
&& let Ok(config) = Config::from_config_file(&path)
{
return config;
}
} }
} }
} }
@@ -287,45 +165,30 @@ pub fn get_config(conf_path: Option<&String>) -> Config {
mod tests { mod tests {
#[allow(unused_imports)] #[allow(unused_imports)]
use super::*; use super::*;
use chrono::{Datelike, Timelike}; use clap::{value_parser, Arg, ArgMatches, Command};
use clap::Parser;
#[test]
fn test_get_current_date_epoch_seconds() {
let epoch_seconds = get_current_date_epoch_seconds();
let dt = Local.timestamp_opt(epoch_seconds, 0).unwrap();
assert_eq!(dt.hour(), 0);
assert_eq!(dt.minute(), 0);
assert_eq!(dt.second(), 0);
assert_eq!(dt.date_naive().day(), Local::now().date_naive().day());
assert_eq!(dt.date_naive().month(), Local::now().date_naive().month());
assert_eq!(dt.date_naive().year(), Local::now().date_naive().year());
}
#[test] #[test]
fn test_conversion() { fn test_conversion() {
assert_eq!(convert_min_size("55"), Some(55)); assert_eq!(convert_min_size("55", false), Some(55));
assert_eq!(convert_min_size("12344321"), Some(12344321)); assert_eq!(convert_min_size("12344321", false), Some(12344321));
assert_eq!(convert_min_size("95RUBBISH"), None); assert_eq!(convert_min_size("95RUBBISH", false), None);
assert_eq!(convert_min_size("10Ki"), Some(10 * 1024)); assert_eq!(convert_min_size("10K", false), Some(10 * 1024));
assert_eq!(convert_min_size("10MiB"), Some(10 * 1024usize.pow(2))); assert_eq!(convert_min_size("10M", false), Some(10 * 1024usize.pow(2)));
assert_eq!(convert_min_size("10M"), Some(10 * 1024usize.pow(2))); assert_eq!(convert_min_size("10M", true), Some(10 * 1000usize.pow(2)));
assert_eq!(convert_min_size("10Mb"), Some(10 * 1000usize.pow(2))); assert_eq!(convert_min_size("2G", false), Some(2 * 1024usize.pow(3)));
assert_eq!(convert_min_size("2Gi"), Some(2 * 1024usize.pow(3)));
} }
#[test] #[test]
fn test_min_size_from_config_applied_or_overridden() { fn test_min_size_from_config_applied_or_overridden() {
let c = Config { let c = Config {
min_size: Some("1KiB".to_owned()), min_size: Some("1K".to_owned()),
..Default::default() ..Default::default()
}; };
assert_eq!(c._get_min_size(None), Some(1024)); assert_eq!(c._get_min_size(None, false), Some(1024));
assert_eq!(c._get_min_size(Some(&"2KiB".into())), Some(2048)); assert_eq!(c._get_min_size(Some(&"2K".into()), false), Some(2048));
assert_eq!(c._get_min_size(Some(&"1kb".into())), Some(1000)); assert_eq!(c._get_min_size(None, true), Some(1000));
assert_eq!(c._get_min_size(Some(&"2KB".into())), Some(2000)); assert_eq!(c._get_min_size(Some(&"2K".into()), true), Some(2000));
} }
#[test] #[test]
@@ -357,75 +220,14 @@ mod tests {
assert_eq!(c.get_depth(&args), 5); assert_eq!(c.get_depth(&args), 5);
} }
fn get_args(args: Vec<&str>) -> Cli { fn get_args(args: Vec<&str>) -> ArgMatches {
Cli::parse_from(args) Command::new("Dust")
} .arg(
Arg::new("depth")
#[test] .long("depth")
fn test_get_filetime() { .num_args(1)
// No config and no flag. .value_parser(value_parser!(usize)),
let c = Config::default(); )
let args = get_filetime_args(vec!["dust"]); .get_matches_from(args)
assert_eq!(c.get_filetime(&args), None);
// Config is not defined and flag is defined as access time
let c = Config::default();
let args = get_filetime_args(vec!["dust", "--filetime", "a"]);
assert_eq!(c.get_filetime(&args), Some(FileTime::Accessed));
let c = Config::default();
let args = get_filetime_args(vec!["dust", "--filetime", "accessed"]);
assert_eq!(c.get_filetime(&args), Some(FileTime::Accessed));
// Config is not defined and flag is defined as modified time
let c = Config::default();
let args = get_filetime_args(vec!["dust", "--filetime", "m"]);
assert_eq!(c.get_filetime(&args), Some(FileTime::Modified));
let c = Config::default();
let args = get_filetime_args(vec!["dust", "--filetime", "modified"]);
assert_eq!(c.get_filetime(&args), Some(FileTime::Modified));
// Config is not defined and flag is defined as changed time
let c = Config::default();
let args = get_filetime_args(vec!["dust", "--filetime", "c"]);
assert_eq!(c.get_filetime(&args), Some(FileTime::Changed));
let c = Config::default();
let args = get_filetime_args(vec!["dust", "--filetime", "changed"]);
assert_eq!(c.get_filetime(&args), Some(FileTime::Changed));
}
fn get_filetime_args(args: Vec<&str>) -> Cli {
Cli::parse_from(args)
}
#[test]
fn test_get_number_of_lines() {
// No config and no flag.
let c = Config::default();
let args = get_args(vec![]);
assert_eq!(c.get_number_of_lines(&args), None);
// Config is not defined and flag is defined.
let c = Config::default();
let args = get_args(vec!["dust", "--number-of-lines", "5"]);
assert_eq!(c.get_number_of_lines(&args), Some(5));
// Config is defined and flag is not defined.
let c = Config {
number_of_lines: Some(3),
..Default::default()
};
let args = get_args(vec![]);
assert_eq!(c.get_number_of_lines(&args), Some(3));
// Both config and flag are defined.
let c = Config {
number_of_lines: Some(3),
..Default::default()
};
let args = get_args(vec!["dust", "--number-of-lines", "5"]);
assert_eq!(c.get_number_of_lines(&args), Some(5));
} }
} }

View File

@@ -1,21 +1,17 @@
use std::cmp::Ordering;
use std::fs; use std::fs;
use std::io::Error;
use std::sync::Arc; use std::sync::Arc;
use std::sync::Mutex; use std::sync::Mutex;
use crate::node::Node; use crate::node::Node;
use crate::progress::ORDERING;
use crate::progress::Operation; use crate::progress::Operation;
use crate::progress::PAtomicInfo; use crate::progress::PAtomicInfo;
use crate::progress::RuntimeErrors; use crate::progress::RuntimeErrors;
use crate::utils::is_filtered_out_due_to_file_time; use crate::progress::ORDERING;
use crate::utils::is_filtered_out_due_to_invert_regex; use crate::utils::is_filtered_out_due_to_invert_regex;
use crate::utils::is_filtered_out_due_to_regex; use crate::utils::is_filtered_out_due_to_regex;
use rayon::iter::ParallelBridge; use rayon::iter::ParallelBridge;
use rayon::prelude::ParallelIterator; use rayon::prelude::ParallelIterator;
use regex::Regex; use regex::Regex;
use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::collections::HashSet; use std::collections::HashSet;
@@ -23,27 +19,14 @@ use std::collections::HashSet;
use crate::node::build_node; use crate::node::build_node;
use std::fs::DirEntry; use std::fs::DirEntry;
use crate::node::FileTime;
use crate::platform::get_metadata; use crate::platform::get_metadata;
#[derive(Debug)]
pub enum Operator {
Equal = 0,
LessThan = 1,
GreaterThan = 2,
}
pub struct WalkData<'a> { pub struct WalkData<'a> {
pub ignore_directories: HashSet<PathBuf>, pub ignore_directories: HashSet<PathBuf>,
pub filter_regex: &'a [Regex], pub filter_regex: &'a [Regex],
pub invert_filter_regex: &'a [Regex], pub invert_filter_regex: &'a [Regex],
pub allowed_filesystems: HashSet<u64>, pub allowed_filesystems: HashSet<u64>,
pub filter_modified_time: Option<(Operator, i64)>,
pub filter_accessed_time: Option<(Operator, i64)>,
pub filter_changed_time: Option<(Operator, i64)>,
pub use_apparent_size: bool, pub use_apparent_size: bool,
pub by_filecount: bool, pub by_filecount: bool,
pub by_filetime: &'a Option<FileTime>,
pub ignore_hidden: bool, pub ignore_hidden: bool,
pub follow_links: bool, pub follow_links: bool,
pub progress_data: Arc<PAtomicInfo>, pub progress_data: Arc<PAtomicInfo>,
@@ -61,19 +44,24 @@ pub fn walk_it(dirs: HashSet<PathBuf>, walk_data: &WalkData) -> Vec<Node> {
prog_data.state.store(Operation::PREPARING, ORDERING); prog_data.state.store(Operation::PREPARING, ORDERING);
clean_inodes(node, &mut inodes, walk_data) clean_inodes(node, &mut inodes, walk_data.use_apparent_size)
}) })
.collect(); .collect();
top_level_nodes top_level_nodes
} }
// Remove files which have the same inode, we don't want to double count them. // Remove files which have the same inode, we don't want to double count them.
fn clean_inodes(x: Node, inodes: &mut HashSet<(u64, u64)>, walk_data: &WalkData) -> Option<Node> { fn clean_inodes(
if !walk_data.use_apparent_size x: Node,
&& let Some(id) = x.inode_device inodes: &mut HashSet<(u64, u64)>,
&& !inodes.insert(id) use_apparent_size: bool,
{ ) -> Option<Node> {
return None; if !use_apparent_size {
if let Some(id) = x.inode_device {
if !inodes.insert(id) {
return None;
}
}
} }
// Sort Nodes so iteration order is predictable // Sort Nodes so iteration order is predictable
@@ -81,25 +69,12 @@ fn clean_inodes(x: Node, inodes: &mut HashSet<(u64, u64)>, walk_data: &WalkData)
tmp.sort_by(sort_by_inode); tmp.sort_by(sort_by_inode);
let new_children: Vec<_> = tmp let new_children: Vec<_> = tmp
.into_iter() .into_iter()
.filter_map(|c| clean_inodes(c, inodes, walk_data)) .filter_map(|c| clean_inodes(c, inodes, use_apparent_size))
.collect(); .collect();
let actual_size = if walk_data.by_filetime.is_some() {
// If by_filetime is Some, directory 'size' is the maximum filetime among child files instead of disk size
new_children
.iter()
.map(|c| c.size)
.chain(std::iter::once(x.size))
.max()
.unwrap_or(0)
} else {
// If by_filetime is None, directory 'size' is the sum of disk sizes or file counts of child files
x.size + new_children.iter().map(|c| c.size).sum::<u64>()
};
Some(Node { Some(Node {
name: x.name, name: x.name,
size: actual_size, size: x.size + new_children.iter().map(|c| c.size).sum::<u64>(),
children: new_children, children: new_children,
inode_device: x.inode_device, inode_device: x.inode_device,
depth: x.depth, depth: x.depth,
@@ -108,77 +83,29 @@ fn clean_inodes(x: Node, inodes: &mut HashSet<(u64, u64)>, walk_data: &WalkData)
fn sort_by_inode(a: &Node, b: &Node) -> std::cmp::Ordering { fn sort_by_inode(a: &Node, b: &Node) -> std::cmp::Ordering {
// Sorting by inode is quicker than by sorting by name/size // Sorting by inode is quicker than by sorting by name/size
match (a.inode_device, b.inode_device) { if let Some(x) = a.inode_device {
(Some(x), Some(y)) => { if let Some(y) = b.inode_device {
if x.0 != y.0 { if x.0 != y.0 {
x.0.cmp(&y.0) return x.0.cmp(&y.0);
} else if x.1 != y.1 { } else if x.1 != y.1 {
x.1.cmp(&y.1) return x.1.cmp(&y.1);
} else {
a.name.cmp(&b.name)
} }
} }
(Some(_), None) => Ordering::Greater,
(None, Some(_)) => Ordering::Less,
(None, None) => a.name.cmp(&b.name),
} }
} a.name.cmp(&b.name)
// Check if `path` is inside ignored directory
fn is_ignored_path(path: &Path, walk_data: &WalkData) -> bool {
if walk_data.ignore_directories.contains(path) {
return true;
}
// Entry is inside an ignored absolute path
// Absolute paths should be canonicalized before being added to `WalkData.ignore_directories`
for ignored_path in walk_data.ignore_directories.iter() {
if !ignored_path.is_absolute() {
continue;
}
let absolute_entry_path = std::fs::canonicalize(path).unwrap_or_default();
if absolute_entry_path.starts_with(ignored_path) {
return true;
}
}
false
} }
fn ignore_file(entry: &DirEntry, walk_data: &WalkData) -> bool { fn ignore_file(entry: &DirEntry, walk_data: &WalkData) -> bool {
if is_ignored_path(&entry.path(), walk_data) {
return true;
}
let is_dot_file = entry.file_name().to_str().unwrap_or("").starts_with('.'); let is_dot_file = entry.file_name().to_str().unwrap_or("").starts_with('.');
let follow_links = walk_data.follow_links && entry.file_type().is_ok_and(|ft| ft.is_symlink()); let is_ignored_path = walk_data.ignore_directories.contains(&entry.path());
if !walk_data.allowed_filesystems.is_empty() { if !walk_data.allowed_filesystems.is_empty() {
let size_inode_device = get_metadata(entry.path(), false, follow_links); let size_inode_device = get_metadata(&entry.path(), false);
if let Some((_size, Some((_id, dev)), _gunk)) = size_inode_device
&& !walk_data.allowed_filesystems.contains(&dev) if let Some((_size, Some((_id, dev)))) = size_inode_device {
{ if !walk_data.allowed_filesystems.contains(&dev) {
return true; return true;
} }
}
if walk_data.filter_accessed_time.is_some()
|| walk_data.filter_modified_time.is_some()
|| walk_data.filter_changed_time.is_some()
{
let size_inode_device = get_metadata(entry.path(), false, follow_links);
if let Some((_, _, (modified_time, accessed_time, changed_time))) = size_inode_device
&& entry.path().is_file()
&& [
(&walk_data.filter_modified_time, modified_time),
(&walk_data.filter_accessed_time, accessed_time),
(&walk_data.filter_changed_time, changed_time),
]
.iter()
.any(|(filter_time, actual_time)| {
is_filtered_out_due_to_file_time(filter_time, *actual_time)
})
{
return true;
} }
} }
@@ -197,7 +124,7 @@ fn ignore_file(entry: &DirEntry, walk_data: &WalkData) -> bool {
return true; return true;
} }
is_dot_file && walk_data.ignore_hidden (is_dot_file && walk_data.ignore_hidden) || is_ignored_path
} }
fn walk(dir: PathBuf, walk_data: &WalkData, depth: usize) -> Option<Node> { fn walk(dir: PathBuf, walk_data: &WalkData, depth: usize) -> Option<Node> {
@@ -212,17 +139,15 @@ fn walk(dir: PathBuf, walk_data: &WalkData, depth: usize) -> Option<Node> {
.into_iter() .into_iter()
.par_bridge() .par_bridge()
.filter_map(|entry| { .filter_map(|entry| {
match entry { if let Ok(ref entry) = entry {
Ok(ref entry) => { // uncommenting the below line gives simpler code but
// uncommenting the below line gives simpler code but // rayon doesn't parallelize as well giving a 3X performance drop
// rayon doesn't parallelize as well giving a 3X performance drop // hence we unravel the recursion a bit
// hence we unravel the recursion a bit
// return walk(entry.path(), walk_data, depth) // return walk(entry.path(), walk_data, depth)
if !ignore_file(entry, walk_data) if !ignore_file(entry, walk_data) {
&& let Ok(data) = entry.file_type() if let Ok(data) = entry.file_type() {
{
if data.is_dir() if data.is_dir()
|| (walk_data.follow_links && data.is_symlink()) || (walk_data.follow_links && data.is_symlink())
{ {
@@ -232,10 +157,13 @@ fn walk(dir: PathBuf, walk_data: &WalkData, depth: usize) -> Option<Node> {
let node = build_node( let node = build_node(
entry.path(), entry.path(),
vec![], vec![],
walk_data.filter_regex,
walk_data.invert_filter_regex,
walk_data.use_apparent_size,
data.is_symlink(), data.is_symlink(),
data.is_file(), data.is_file(),
walk_data.by_filecount,
depth, depth,
walk_data,
); );
prog_data.num_files.fetch_add(1, ORDERING); prog_data.num_files.fetch_add(1, ORDERING);
@@ -246,22 +174,28 @@ fn walk(dir: PathBuf, walk_data: &WalkData, depth: usize) -> Option<Node> {
return node; return node;
} }
} }
Err(ref failed) => { } else {
if handle_error_and_retry(failed, &dir, walk_data) { let mut editable_error = errors.lock().unwrap();
return walk(dir.clone(), walk_data, depth); editable_error.no_permissions = true
}
}
} }
None None
}) })
.collect() .collect()
} }
Err(failed) => { Err(failed) => {
if handle_error_and_retry(&failed, &dir, walk_data) { let mut editable_error = errors.lock().unwrap();
return walk(dir, walk_data, depth); match failed.kind() {
} else { std::io::ErrorKind::PermissionDenied => {
vec![] editable_error.no_permissions = true;
}
std::io::ErrorKind::NotFound => {
editable_error.file_not_found.insert(failed.to_string());
}
_ => {
editable_error.unknown_error.insert(failed.to_string());
}
} }
vec![]
} }
} }
} else { } else {
@@ -272,52 +206,20 @@ fn walk(dir: PathBuf, walk_data: &WalkData, depth: usize) -> Option<Node> {
} }
vec![] vec![]
}; };
let is_symlink = if walk_data.follow_links { build_node(
match fs::symlink_metadata(&dir) { dir,
Ok(metadata) => metadata.file_type().is_symlink(), children,
Err(_) => false, walk_data.filter_regex,
} walk_data.invert_filter_regex,
} else { walk_data.use_apparent_size,
false false,
}; false,
build_node(dir, children, is_symlink, false, depth, walk_data) walk_data.by_filecount,
} depth,
)
fn handle_error_and_retry(failed: &Error, dir: &Path, walk_data: &WalkData) -> bool {
let mut editable_error = walk_data.errors.lock().unwrap();
match failed.kind() {
std::io::ErrorKind::PermissionDenied => {
editable_error
.no_permissions
.insert(dir.to_string_lossy().into());
}
std::io::ErrorKind::InvalidInput => {
editable_error
.no_permissions
.insert(dir.to_string_lossy().into());
}
std::io::ErrorKind::NotFound => {
editable_error.file_not_found.insert(failed.to_string());
}
std::io::ErrorKind::Interrupted => {
editable_error.interrupted_error += 1;
// This does happen on some systems. It was set to 3 but sometimes dust runs would exceed this
// However, if there is no limit this results in infinite retrys and dust never finishes
if editable_error.interrupted_error > 999 {
panic!("Multiple Interrupted Errors occurred while scanning filesystem. Aborting");
} else {
return true;
}
}
_ => {
editable_error.unknown_error.insert(failed.to_string());
}
}
false
} }
mod tests { mod tests {
#[allow(unused_imports)] #[allow(unused_imports)]
use super::*; use super::*;
@@ -332,43 +234,17 @@ mod tests {
} }
} }
#[cfg(test)]
fn create_walker<'a>(use_apparent_size: bool) -> WalkData<'a> {
use crate::PIndicator;
let indicator = PIndicator::build_me();
WalkData {
ignore_directories: HashSet::new(),
filter_regex: &[],
invert_filter_regex: &[],
allowed_filesystems: HashSet::new(),
filter_modified_time: Some((Operator::GreaterThan, 0)),
filter_accessed_time: Some((Operator::GreaterThan, 0)),
filter_changed_time: Some((Operator::GreaterThan, 0)),
use_apparent_size,
by_filecount: false,
by_filetime: &None,
ignore_hidden: false,
follow_links: false,
progress_data: indicator.data.clone(),
errors: Arc::new(Mutex::new(RuntimeErrors::default())),
}
}
#[test] #[test]
#[allow(clippy::redundant_clone)] #[allow(clippy::redundant_clone)]
fn test_should_ignore_file() { fn test_should_ignore_file() {
let mut inodes = HashSet::new(); let mut inodes = HashSet::new();
let n = create_node(); let n = create_node();
let walkdata = create_walker(false);
// First time we insert the node // First time we insert the node
assert_eq!( assert_eq!(clean_inodes(n.clone(), &mut inodes, false), Some(n.clone()));
clean_inodes(n.clone(), &mut inodes, &walkdata),
Some(n.clone())
);
// Second time is a duplicate - we ignore it // Second time is a duplicate - we ignore it
assert_eq!(clean_inodes(n.clone(), &mut inodes, &walkdata), None); assert_eq!(clean_inodes(n.clone(), &mut inodes, false), None);
} }
#[test] #[test]
@@ -376,53 +252,9 @@ mod tests {
fn test_should_not_ignore_files_if_using_apparent_size() { fn test_should_not_ignore_files_if_using_apparent_size() {
let mut inodes = HashSet::new(); let mut inodes = HashSet::new();
let n = create_node(); let n = create_node();
let walkdata = create_walker(true);
// If using apparent size we include Nodes, even if duplicate inodes // If using apparent size we include Nodes, even if duplicate inodes
assert_eq!( assert_eq!(clean_inodes(n.clone(), &mut inodes, true), Some(n.clone()));
clean_inodes(n.clone(), &mut inodes, &walkdata), assert_eq!(clean_inodes(n.clone(), &mut inodes, true), Some(n.clone()));
Some(n.clone())
);
assert_eq!(
clean_inodes(n.clone(), &mut inodes, &walkdata),
Some(n.clone())
);
}
#[test]
fn test_total_ordering_of_sort_by_inode() {
use std::str::FromStr;
let a = Node {
name: PathBuf::from_str("a").unwrap(),
size: 0,
children: vec![],
inode_device: Some((3, 66310)),
depth: 0,
};
let b = Node {
name: PathBuf::from_str("b").unwrap(),
size: 0,
children: vec![],
inode_device: None,
depth: 0,
};
let c = Node {
name: PathBuf::from_str("c").unwrap(),
size: 0,
children: vec![],
inode_device: Some((1, 66310)),
depth: 0,
};
assert_eq!(sort_by_inode(&a, &b), Ordering::Greater);
assert_eq!(sort_by_inode(&a, &c), Ordering::Greater);
assert_eq!(sort_by_inode(&c, &b), Ordering::Greater);
assert_eq!(sort_by_inode(&b, &a), Ordering::Less);
assert_eq!(sort_by_inode(&c, &a), Ordering::Less);
assert_eq!(sort_by_inode(&b, &c), Ordering::Less);
} }
} }

View File

@@ -1,5 +1,4 @@
use crate::display_node::DisplayNode; use crate::display_node::DisplayNode;
use crate::node::FileTime;
use ansi_term::Colour::Red; use ansi_term::Colour::Red;
use lscolors::{LsColors, Style}; use lscolors::{LsColors, Style};
@@ -8,26 +7,23 @@ use unicode_width::UnicodeWidthStr;
use stfu8::encode_u8; use stfu8::encode_u8;
use chrono::{DateTime, Local, TimeZone, Utc};
use std::cmp::max; use std::cmp::max;
use std::cmp::min; use std::cmp::min;
use std::fs; use std::fs;
use std::iter::repeat_n; use std::iter::repeat;
use std::path::Path; use std::path::Path;
use thousands::Separable; use thousands::Separable;
pub static UNITS: [char; 5] = ['P', 'T', 'G', 'M', 'K']; pub static UNITS: [char; 4] = ['T', 'G', 'M', 'K'];
static BLOCKS: [char; 5] = ['█', '▓', '▒', '░', ' ']; static BLOCKS: [char; 5] = ['█', '▓', '▒', '░', ' '];
const FILETIME_SHOW_LENGTH: usize = 19;
pub struct InitialDisplayData { pub struct InitialDisplayData {
pub short_paths: bool, pub short_paths: bool,
pub is_reversed: bool, pub is_reversed: bool,
pub colors_on: bool, pub colors_on: bool,
pub by_filecount: bool, pub by_filecount: bool,
pub by_filetime: Option<FileTime>,
pub is_screen_reader: bool, pub is_screen_reader: bool,
pub output_format: String, pub iso: bool,
pub bars_on_right: bool, pub bars_on_right: bool,
} }
@@ -71,7 +67,11 @@ impl DisplayData {
fn percent_size(&self, node: &DisplayNode) -> f32 { fn percent_size(&self, node: &DisplayNode) -> f32 {
let result = node.size as f32 / self.base_size as f32; let result = node.size as f32 / self.base_size as f32;
if result.is_normal() { result } else { 0.0 } if result.is_normal() {
result
} else {
0.0
}
} }
} }
@@ -125,18 +125,24 @@ impl DrawData<'_> {
pub fn draw_it( pub fn draw_it(
idd: InitialDisplayData, idd: InitialDisplayData,
root_node: &DisplayNode,
no_percent_bars: bool, no_percent_bars: bool,
terminal_width: usize, terminal_width: usize,
root_node: &DisplayNode,
skip_total: bool, skip_total: bool,
) { ) {
let biggest = match skip_total {
false => root_node,
true => root_node
.get_children_from_node(false)
.next()
.unwrap_or(root_node),
};
let num_chars_needed_on_left_most = if idd.by_filecount { let num_chars_needed_on_left_most = if idd.by_filecount {
let max_size = root_node.size; let max_size = biggest.size;
max_size.separate_with_commas().chars().count() max_size.separate_with_commas().chars().count()
} else if idd.by_filetime.is_some() {
FILETIME_SHOW_LENGTH
} else { } else {
find_biggest_size_str(root_node, &idd.output_format) find_biggest_size_str(root_node, idd.iso)
}; };
assert!( assert!(
@@ -155,12 +161,12 @@ pub fn draw_it(
allowed_width - longest_string_length - 7 allowed_width - longest_string_length - 7
}; };
let first_size_bar = repeat_n(BLOCKS[0], max_bar_length).collect(); let first_size_bar = repeat(BLOCKS[0]).take(max_bar_length).collect();
let display_data = DisplayData { let display_data = DisplayData {
initial: idd, initial: idd,
num_chars_needed_on_left_most, num_chars_needed_on_left_most,
base_size: root_node.size, base_size: biggest.size,
longest_string_length, longest_string_length,
ls_colors: LsColors::from_env().unwrap_or_default(), ls_colors: LsColors::from_env().unwrap_or_default(),
}; };
@@ -184,12 +190,10 @@ pub fn draw_it(
} }
} }
fn find_biggest_size_str(node: &DisplayNode, output_format: &str) -> usize { fn find_biggest_size_str(node: &DisplayNode, iso: bool) -> usize {
let mut mx = human_readable_number(node.size, output_format) let mut mx = human_readable_number(node.size, iso).chars().count();
.chars()
.count();
for n in node.children.iter() { for n in node.children.iter() {
mx = max(mx, find_biggest_size_str(n, output_format)); mx = max(mx, find_biggest_size_str(n, iso));
} }
mx mx
} }
@@ -269,7 +273,7 @@ fn clean_indentation_string(s: &str) -> String {
is is
} }
pub fn get_printable_name<P: AsRef<Path>>(dir_name: &P, short_paths: bool) -> String { fn get_printable_name<P: AsRef<Path>>(dir_name: &P, short_paths: bool) -> String {
let dir_name = dir_name.as_ref(); let dir_name = dir_name.as_ref();
let printable_name = { let printable_name = {
if short_paths { if short_paths {
@@ -298,9 +302,12 @@ fn pad_or_trim_filename(node: &DisplayNode, indent: &str, display_data: &Display
); );
// Add spaces after the filename so we can draw the % used bar chart. // Add spaces after the filename so we can draw the % used bar chart.
name + " " let name_and_padding = name
.repeat(display_data.longest_string_length - width) + " "
.as_str() .repeat(display_data.longest_string_length - width)
.as_str();
name_and_padding
} }
fn maybe_trim_filename(name_in: String, indent: &str, display_data: &DisplayData) -> String { fn maybe_trim_filename(name_in: String, indent: &str, display_data: &DisplayData) -> String {
@@ -333,8 +340,6 @@ pub fn format_string(
if display_data.initial.is_screen_reader { if display_data.initial.is_screen_reader {
// if screen_reader then bars is 'depth' // if screen_reader then bars is 'depth'
format!("{pretty_name} {bars} {pretty_size}{percent}") format!("{pretty_name} {bars} {pretty_size}{percent}")
} else if display_data.initial.by_filetime.is_some() {
format!("{pretty_size} {indent}{pretty_name}")
} else { } else {
format!("{pretty_size} {indent} {pretty_name}{percent}") format!("{pretty_size} {indent} {pretty_name}{percent}")
} }
@@ -369,10 +374,8 @@ fn get_name_percent(
fn get_pretty_size(node: &DisplayNode, is_biggest: bool, display_data: &DisplayData) -> String { fn get_pretty_size(node: &DisplayNode, is_biggest: bool, display_data: &DisplayData) -> String {
let output = if display_data.initial.by_filecount { let output = if display_data.initial.by_filecount {
node.size.separate_with_commas() node.size.separate_with_commas()
} else if display_data.initial.by_filetime.is_some() {
get_pretty_file_modified_time(node.size as i64)
} else { } else {
human_readable_number(node.size, &display_data.initial.output_format) human_readable_number(node.size, display_data.initial.iso)
}; };
let spaces_to_add = display_data.num_chars_needed_on_left_most - output.chars().count(); let spaces_to_add = display_data.num_chars_needed_on_left_most - output.chars().count();
let output = " ".repeat(spaces_to_add) + output.as_str(); let output = " ".repeat(spaces_to_add) + output.as_str();
@@ -384,14 +387,6 @@ fn get_pretty_size(node: &DisplayNode, is_biggest: bool, display_data: &DisplayD
} }
} }
fn get_pretty_file_modified_time(timestamp: i64) -> String {
let datetime: DateTime<Utc> = Utc.timestamp_opt(timestamp, 0).unwrap();
let local_datetime = datetime.with_timezone(&Local);
local_datetime.format("%Y-%m-%dT%H:%M:%S").to_string()
}
fn get_pretty_name( fn get_pretty_name(
node: &DisplayNode, node: &DisplayNode,
name_and_padding: String, name_and_padding: String,
@@ -403,7 +398,7 @@ fn get_pretty_name(
.ls_colors .ls_colors
.style_for_path_with_metadata(&node.name, meta_result.as_ref().ok()); .style_for_path_with_metadata(&node.name, meta_result.as_ref().ok());
let ansi_style = directory_color let ansi_style = directory_color
.map(Style::to_nu_ansi_term_style) .map(Style::to_ansi_term_style)
.unwrap_or_default(); .unwrap_or_default();
let out = ansi_style.paint(name_and_padding); let out = ansi_style.paint(name_and_padding);
format!("{out}") format!("{out}")
@@ -412,54 +407,19 @@ fn get_pretty_name(
} }
} }
// If we are working with SI units or not pub fn human_readable_number(size: u64, iso: bool) -> String {
pub fn get_type_of_thousand(output_str: &str) -> u64 {
if output_str.is_empty() {
1024
} else if output_str == "si" {
1000
} else if output_str.contains('i') || output_str.len() == 1 {
1024
} else {
1000
}
}
pub fn get_number_format(output_str: &str) -> Option<(u64, char)> {
if output_str.starts_with('b') {
return Some((1, 'B'));
}
for (i, u) in UNITS.iter().enumerate() { for (i, u) in UNITS.iter().enumerate() {
if output_str.starts_with((*u).to_ascii_lowercase()) { let num: u64 = if iso { 1000 } else { 1024 };
let marker = get_type_of_thousand(output_str).pow((UNITS.len() - i) as u32); let marker = num.pow((UNITS.len() - i) as u32);
return Some((marker, *u)); if size >= marker {
} if size / marker < 10 {
} return format!("{:.1}{}", (size as f32 / marker as f32), u);
None } else {
} return format!("{}{}", (size / marker), u);
pub fn human_readable_number(size: u64, output_str: &str) -> String {
if output_str == "count" {
return size.to_string();
};
match get_number_format(output_str) {
Some((x, u)) => {
format!("{}{}", (size / x), u)
}
None => {
for (i, u) in UNITS.iter().enumerate() {
let marker = get_type_of_thousand(output_str).pow((UNITS.len() - i) as u32);
if size >= marker {
if size / marker < 10 {
return format!("{:.1}{}", (size as f32 / marker as f32), u);
} else {
return format!("{}{}", (size / marker), u);
}
}
} }
format!("{size}B")
} }
} }
format!("{size}B")
} }
mod tests { mod tests {
@@ -475,9 +435,8 @@ mod tests {
is_reversed: false, is_reversed: false,
colors_on: false, colors_on: false,
by_filecount: false, by_filecount: false,
by_filetime: None,
is_screen_reader: false, is_screen_reader: false,
output_format: "".into(), iso: false,
bars_on_right: false, bars_on_right: false,
}; };
DisplayData { DisplayData {
@@ -542,66 +501,34 @@ mod tests {
assert_eq!(s, "short 3 4.0K 100%"); assert_eq!(s, "short 3 4.0K 100%");
} }
#[test]
fn test_machine_readable_filecount() {
assert_eq!(human_readable_number(1, "count"), "1");
assert_eq!(human_readable_number(1000, "count"), "1000");
assert_eq!(human_readable_number(1024, "count"), "1024");
}
#[test] #[test]
fn test_human_readable_number() { fn test_human_readable_number() {
assert_eq!(human_readable_number(1, ""), "1B"); assert_eq!(human_readable_number(1, false), "1B");
assert_eq!(human_readable_number(956, ""), "956B"); assert_eq!(human_readable_number(956, false), "956B");
assert_eq!(human_readable_number(1004, ""), "1004B"); assert_eq!(human_readable_number(1004, false), "1004B");
assert_eq!(human_readable_number(1024, ""), "1.0K"); assert_eq!(human_readable_number(1024, false), "1.0K");
assert_eq!(human_readable_number(1536, ""), "1.5K"); assert_eq!(human_readable_number(1536, false), "1.5K");
assert_eq!(human_readable_number(1024 * 512, ""), "512K"); assert_eq!(human_readable_number(1024 * 512, false), "512K");
assert_eq!(human_readable_number(1024 * 1024, ""), "1.0M"); assert_eq!(human_readable_number(1024 * 1024, false), "1.0M");
assert_eq!(human_readable_number(1024 * 1024 * 1024 - 1, ""), "1023M");
assert_eq!(human_readable_number(1024 * 1024 * 1024 * 20, ""), "20G");
assert_eq!(human_readable_number(1024 * 1024 * 1024 * 1024, ""), "1.0T");
assert_eq!( assert_eq!(
human_readable_number(1024 * 1024 * 1024 * 1024 * 234, ""), human_readable_number(1024 * 1024 * 1024 - 1, false),
"234T" "1023M"
); );
assert_eq!(human_readable_number(1024 * 1024 * 1024 * 20, false), "20G");
assert_eq!( assert_eq!(
human_readable_number(1024 * 1024 * 1024 * 1024 * 1024, ""), human_readable_number(1024 * 1024 * 1024 * 1024, false),
"1.0P" "1.0T"
); );
} }
#[test]
fn test_human_readable_number_si() {
assert_eq!(human_readable_number(1024 * 100, ""), "100K");
assert_eq!(human_readable_number(1024 * 100, "si"), "102K");
}
// Refer to https://en.wikipedia.org/wiki/Byte#Multiple-byte_units
#[test]
fn test_human_readable_number_kb() {
let hrn = human_readable_number;
assert_eq!(hrn(1023, "b"), "1023B");
assert_eq!(hrn(1000 * 1000, "bytes"), "1000000B");
assert_eq!(hrn(1023, "kb"), "1K");
assert_eq!(hrn(1023, "k"), "0K");
assert_eq!(hrn(1023, "kib"), "0K");
assert_eq!(hrn(1024, "kib"), "1K");
assert_eq!(hrn(1024 * 512, "kib"), "512K");
assert_eq!(hrn(1024 * 1024, "kib"), "1024K");
assert_eq!(hrn(1024 * 1000 * 1000 * 20, "kib"), "20000000K");
assert_eq!(hrn(1024 * 1024 * 1000 * 20, "mib"), "20000M");
assert_eq!(hrn(1024 * 1024 * 1024 * 20, "gib"), "20G");
}
#[cfg(test)] #[cfg(test)]
fn build_draw_data(disp: &DisplayData, size: u32) -> (DrawData<'_>, DisplayNode) { fn build_draw_data<'a>(disp: &'a DisplayData, size: u32) -> (DrawData<'a>, DisplayNode) {
let n = DisplayNode { let n = DisplayNode {
name: PathBuf::from("/short"), name: PathBuf::from("/short"),
size: 2_u64.pow(size), size: 2_u64.pow(size),
children: vec![], children: vec![],
}; };
let first_size_bar = repeat_n(BLOCKS[0], 13).collect(); let first_size_bar = repeat(BLOCKS[0]).take(13).collect();
let dd = DrawData { let dd = DrawData {
indent: "".into(), indent: "".into(),
percent_bar: first_size_bar, percent_bar: first_size_bar,
@@ -647,37 +574,4 @@ mod tests {
let bar = dd.generate_bar(&n, 5); let bar = dd.generate_bar(&n, 5);
assert_eq!(bar, "████▓▓▓▓▓▓▓▓▓"); assert_eq!(bar, "████▓▓▓▓▓▓▓▓▓");
} }
#[test]
fn test_get_pretty_file_modified_time() {
// Create a timestamp for 2023-07-12 00:00:00 in local time
let local_dt = Local.with_ymd_and_hms(2023, 7, 12, 0, 0, 0).unwrap();
let timestamp = local_dt.timestamp();
// Format expected output
let expected_output = local_dt.format("%Y-%m-%dT%H:%M:%S").to_string();
assert_eq!(get_pretty_file_modified_time(timestamp), expected_output);
// Test another timestamp
let local_dt = Local.with_ymd_and_hms(2020, 1, 1, 12, 0, 0).unwrap();
let timestamp = local_dt.timestamp();
let expected_output = local_dt.format("%Y-%m-%dT%H:%M:%S").to_string();
assert_eq!(get_pretty_file_modified_time(timestamp), expected_output);
// Test timestamp for epoch start (1970-01-01T00:00:00)
let local_dt = Local.with_ymd_and_hms(1970, 1, 1, 0, 0, 0).unwrap();
let timestamp = local_dt.timestamp();
let expected_output = local_dt.format("%Y-%m-%dT%H:%M:%S").to_string();
assert_eq!(get_pretty_file_modified_time(timestamp), expected_output);
// Test a future timestamp
let local_dt = Local.with_ymd_and_hms(2030, 12, 25, 6, 30, 0).unwrap();
let timestamp = local_dt.timestamp();
let expected_output = local_dt.format("%Y-%m-%dT%H:%M:%S").to_string();
assert_eq!(get_pretty_file_modified_time(timestamp), expected_output);
}
} }

View File

@@ -1,11 +1,5 @@
use std::cell::RefCell;
use std::path::PathBuf; use std::path::PathBuf;
use serde::ser::SerializeStruct;
use serde::{Serialize, Serializer};
use crate::display::human_readable_number;
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)] #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)]
pub struct DisplayNode { pub struct DisplayNode {
// Note: the order of fields in important here, for PartialEq and PartialOrd // Note: the order of fields in important here, for PartialEq and PartialOrd
@@ -29,30 +23,3 @@ impl DisplayNode {
out out
} }
} }
// Only used for -j 'json' flag combined with -o 'output_type' flag
// Used to pass the output_type into the custom Serde serializer
thread_local! {
pub static OUTPUT_TYPE: RefCell<String> = const { RefCell::new(String::new()) };
}
/*
We need the custom Serialize incase someone uses the -o flag to pass a custom output type in
(show size in Mb / Gb etc).
Sadly this also necessitates a global variable OUTPUT_TYPE as we can not pass the output_type flag
into the serialize method
*/
impl Serialize for DisplayNode {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let readable_size = OUTPUT_TYPE
.with(|output_type| human_readable_number(self.size, output_type.borrow().as_str()));
let mut state = serializer.serialize_struct("DisplayNode", 2)?;
state.serialize_field("size", &(readable_size))?;
state.serialize_field("name", &self.name)?;
state.serialize_field("children", &self.children)?;
state.end()
}
}

View File

@@ -1,12 +1,7 @@
use stfu8::encode_u8;
use crate::display::get_printable_name;
use crate::display_node::DisplayNode; use crate::display_node::DisplayNode;
use crate::node::FileTime;
use crate::node::Node; use crate::node::Node;
use std::collections::BinaryHeap; use std::collections::BinaryHeap;
use std::collections::HashMap; use std::collections::HashMap;
use std::collections::HashSet;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
@@ -17,58 +12,40 @@ pub struct AggregateData {
pub number_of_lines: usize, pub number_of_lines: usize,
pub depth: usize, pub depth: usize,
pub using_a_filter: bool, pub using_a_filter: bool,
pub short_paths: bool,
} }
pub fn get_biggest( pub fn get_biggest(top_level_nodes: Vec<Node>, display_data: AggregateData) -> Option<DisplayNode> {
top_level_nodes: Vec<Node>, if top_level_nodes.is_empty() {
display_data: AggregateData, // perhaps change this, bring back Error object?
by_filetime: &Option<FileTime>, return None;
keep_collapsed: HashSet<PathBuf>, }
) -> DisplayNode {
let mut heap = BinaryHeap::new(); let mut heap = BinaryHeap::new();
let number_top_level_nodes = top_level_nodes.len(); let number_top_level_nodes = top_level_nodes.len();
let root; let root;
if number_top_level_nodes == 0 { if number_top_level_nodes > 1 {
root = total_node_builder(0, vec![]) let size = top_level_nodes.iter().map(|node| node.size).sum();
} else if number_top_level_nodes > 1 { root = Node {
let size = if by_filetime.is_some() { name: PathBuf::from("(total)"),
top_level_nodes size,
.iter() children: top_level_nodes,
.map(|node| node.size) inode_device: None,
.max() depth: 0,
.unwrap_or(0)
} else {
top_level_nodes.iter().map(|node| node.size).sum()
}; };
// Always include the base nodes if we add a 'parent' (total) node
let nodes = handle_duplicate_top_level_names(top_level_nodes, display_data.short_paths);
root = total_node_builder(size, nodes);
heap = always_add_children(&display_data, &root, heap); heap = always_add_children(&display_data, &root, heap);
} else { } else {
root = top_level_nodes.into_iter().next().unwrap(); root = top_level_nodes.into_iter().next().unwrap();
heap = add_children(&display_data, &root, heap); heap = add_children(&display_data, &root, heap);
} }
fill_remaining_lines(heap, &root, display_data, keep_collapsed) Some(fill_remaining_lines(heap, &root, display_data))
}
fn total_node_builder(size: u64, children: Vec<Node>) -> Node {
Node {
name: PathBuf::from("(total)"),
size,
children,
inode_device: None,
depth: 0,
}
} }
pub fn fill_remaining_lines<'a>( pub fn fill_remaining_lines<'a>(
mut heap: BinaryHeap<&'a Node>, mut heap: BinaryHeap<&'a Node>,
root: &'a Node, root: &'a Node,
display_data: AggregateData, display_data: AggregateData,
keep_collapsed: HashSet<PathBuf>,
) -> DisplayNode { ) -> DisplayNode {
let mut allowed_nodes = HashMap::new(); let mut allowed_nodes = HashMap::new();
@@ -76,14 +53,10 @@ pub fn fill_remaining_lines<'a>(
let line = heap.pop(); let line = heap.pop();
match line { match line {
Some(line) => { Some(line) => {
// If we are not doing only_file OR if we are doing
// only_file and it has no children (ie is a file not a dir)
if !display_data.only_file || line.children.is_empty() { if !display_data.only_file || line.children.is_empty() {
allowed_nodes.insert(line.name.as_path(), line); allowed_nodes.insert(line.name.as_path(), line);
} }
if !keep_collapsed.contains(&line.name) { heap = add_children(&display_data, line, heap);
heap = add_children(&display_data, line, heap);
}
} }
None => break, None => break,
} }
@@ -141,7 +114,7 @@ fn recursive_rebuilder(allowed_nodes: &HashMap<&Path, &Node>, current: &Node) ->
.map(|c| recursive_rebuilder(allowed_nodes, c)) .map(|c| recursive_rebuilder(allowed_nodes, c))
.collect(); .collect();
build_display_node(new_children, current) build_node(new_children, current)
} }
// Applies all allowed nodes as children to current node // Applies all allowed nodes as children to current node
@@ -154,10 +127,10 @@ fn flat_rebuilder(allowed_nodes: HashMap<&Path, &Node>, current: &Node) -> Displ
children: vec![], children: vec![],
}) })
.collect::<Vec<DisplayNode>>(); .collect::<Vec<DisplayNode>>();
build_display_node(new_children, current) build_node(new_children, current)
} }
fn build_display_node(mut new_children: Vec<DisplayNode>, current: &Node) -> DisplayNode { fn build_node(mut new_children: Vec<DisplayNode>, current: &Node) -> DisplayNode {
new_children.sort_by(|lhs, rhs| lhs.cmp(rhs).reverse()); new_children.sort_by(|lhs, rhs| lhs.cmp(rhs).reverse());
DisplayNode { DisplayNode {
name: current.name.clone(), name: current.name.clone(),
@@ -165,57 +138,3 @@ fn build_display_node(mut new_children: Vec<DisplayNode>, current: &Node) -> Dis
children: new_children, children: new_children,
} }
} }
fn names_have_dup(top_level_nodes: &Vec<Node>) -> bool {
let mut stored = HashSet::new();
for node in top_level_nodes {
let name = get_printable_name(&node.name, true);
if stored.contains(&name) {
return true;
}
stored.insert(name);
}
false
}
fn handle_duplicate_top_level_names(top_level_nodes: Vec<Node>, short_paths: bool) -> Vec<Node> {
// If we have top level names that are the same - we need to tweak them:
if short_paths && names_have_dup(&top_level_nodes) {
let mut new_top_nodes = top_level_nodes.clone();
let mut dir_walk_up_count = 0;
while names_have_dup(&new_top_nodes) && dir_walk_up_count < 10 {
dir_walk_up_count += 1;
let mut newer = vec![];
for node in new_top_nodes.iter() {
let mut folders = node.name.iter().rev();
// Get parent folder (if second time round get grandparent and so on)
for _ in 0..dir_walk_up_count {
folders.next();
}
match folders.next() {
// Add (parent_name) to path of Node
Some(data) => {
let parent = encode_u8(data.as_encoded_bytes());
let current_node = node.name.display();
let n = Node {
name: PathBuf::from(format!("{current_node}({parent})")),
size: node.size,
children: node.children.clone(),
inode_device: node.inode_device,
depth: node.depth,
};
newer.push(n)
}
// Node does not have a parent
None => newer.push(node.clone()),
}
}
new_top_nodes = newer;
}
new_top_nodes
} else {
top_level_nodes
}
}

View File

@@ -1,5 +1,4 @@
use crate::display_node::DisplayNode; use crate::display_node::DisplayNode;
use crate::node::FileTime;
use crate::node::Node; use crate::node::Node;
use std::collections::HashMap; use std::collections::HashMap;
use std::ffi::OsStr; use std::ffi::OsStr;
@@ -11,11 +10,7 @@ struct ExtensionNode<'a> {
extension: Option<&'a OsStr>, extension: Option<&'a OsStr>,
} }
pub fn get_all_file_types( pub fn get_all_file_types(top_level_nodes: &[Node], n: usize) -> Option<DisplayNode> {
top_level_nodes: &[Node],
n: usize,
by_filetime: &Option<FileTime>,
) -> DisplayNode {
let ext_nodes = { let ext_nodes = {
let mut extension_cumulative_sizes = HashMap::new(); let mut extension_cumulative_sizes = HashMap::new();
build_by_all_file_types(top_level_nodes, &mut extension_cumulative_sizes); build_by_all_file_types(top_level_nodes, &mut extension_cumulative_sizes);
@@ -49,29 +44,20 @@ pub fn get_all_file_types(
// ...then, aggregate the remaining nodes (if any) into a single "(others)" node // ...then, aggregate the remaining nodes (if any) into a single "(others)" node
if ext_nodes_iter.len() > 0 { if ext_nodes_iter.len() > 0 {
let actual_size = if by_filetime.is_some() {
ext_nodes_iter.map(|node| node.size).max().unwrap_or(0)
} else {
ext_nodes_iter.map(|node| node.size).sum()
};
displayed.push(DisplayNode { displayed.push(DisplayNode {
name: PathBuf::from("(others)"), name: PathBuf::from("(others)"),
size: actual_size, size: ext_nodes_iter.map(|node| node.size).sum(),
children: vec![], children: vec![],
}); });
} }
let actual_size: u64 = if by_filetime.is_some() { let result = DisplayNode {
displayed.iter().map(|node| node.size).max().unwrap_or(0) name: PathBuf::from("(total)"),
} else { size: displayed.iter().map(|node| node.size).sum(),
displayed.iter().map(|node| node.size).sum() children: displayed,
}; };
DisplayNode { Some(result)
name: PathBuf::from("(total)"),
size: actual_size,
children: displayed,
}
} }
fn build_by_all_file_types<'a>( fn build_by_all_file_types<'a>(

View File

@@ -10,101 +10,88 @@ mod platform;
mod progress; mod progress;
mod utils; mod utils;
use crate::cli::Cli; use crate::cli::build_cli;
use crate::config::Config;
use crate::display_node::DisplayNode;
use crate::progress::RuntimeErrors; use crate::progress::RuntimeErrors;
use clap::Parser; use clap::parser::ValuesRef;
use dir_walker::WalkData; use dir_walker::WalkData;
use display::InitialDisplayData; use display::InitialDisplayData;
use filter::AggregateData; use filter::AggregateData;
use progress::PIndicator; use progress::PIndicator;
use regex::Error; use regex::Error;
use std::collections::HashSet; use std::collections::HashSet;
use std::env; use std::fs::read_to_string;
use std::fs::{read, read_to_string};
use std::io;
use std::io::Read;
use std::panic; use std::panic;
use std::process; use std::process;
use std::sync::Arc; use std::sync::Arc;
use std::sync::Mutex; use std::sync::Mutex;
use sysinfo::System; use sysinfo::{System, SystemExt};
use utils::canonicalize_absolute_path;
use self::display::draw_it; use self::display::draw_it;
use config::get_config; use config::get_config;
use dir_walker::walk_it; use dir_walker::walk_it;
use display_node::OUTPUT_TYPE;
use filter::get_biggest; use filter::get_biggest;
use filter_type::get_all_file_types; use filter_type::get_all_file_types;
use regex::Regex; use regex::Regex;
use std::cmp::max; use std::cmp::max;
use std::path::PathBuf; use std::path::PathBuf;
use terminal_size::{Height, Width, terminal_size}; use terminal_size::{terminal_size, Height, Width};
use utils::get_filesystem_devices; use utils::get_filesystem_devices;
use utils::simplify_dir_names; use utils::simplify_dir_names;
static DEFAULT_NUMBER_OF_LINES: usize = 30; static DEFAULT_NUMBER_OF_LINES: usize = 30;
static DEFAULT_TERMINAL_WIDTH: usize = 80; static DEFAULT_TERMINAL_WIDTH: usize = 80;
fn should_init_color(no_color: bool, force_color: bool) -> bool { fn init_color(no_color: bool) -> bool {
if force_color {
return true;
}
if no_color {
return false;
}
// check if NO_COLOR is set
// https://no-color.org/
if env::var_os("NO_COLOR").is_some() {
return false;
}
if terminal_size().is_none() {
// we are not in a terminal, color may not be needed
return false;
}
// we are in a terminal
#[cfg(windows)] #[cfg(windows)]
{ {
// Required for windows 10 // If no color is already set do not print a warning message
// Fails to resolve for windows 8 so disable color if no_color {
match ansi_term::enable_ansi_support() { true
Ok(_) => true, } else {
Err(_) => { // Required for windows 10
eprintln!("This version of Windows does not support ANSI colors"); // Fails to resolve for windows 8 so disable color
false match ansi_term::enable_ansi_support() {
Ok(_) => no_color,
Err(_) => {
eprintln!(
"This version of Windows does not support ANSI colors, setting no_color flag"
);
true
}
} }
} }
} }
#[cfg(not(windows))] #[cfg(not(windows))]
{ {
true no_color
} }
} }
fn get_height_of_terminal() -> usize { fn get_height_of_terminal() -> usize {
// Simplify once https://github.com/eminence/terminal-size/pull/41 is
// merged
terminal_size() terminal_size()
// Windows CI runners detect a terminal height of 0 // Windows CI runners detect a terminal height of 0
.map(|(_, Height(h))| max(h.into(), DEFAULT_NUMBER_OF_LINES)) .map(|(_, Height(h))| max(h as usize, DEFAULT_NUMBER_OF_LINES))
.unwrap_or(DEFAULT_NUMBER_OF_LINES) .unwrap_or(DEFAULT_NUMBER_OF_LINES)
- 10 - 10
} }
fn get_width_of_terminal() -> usize { fn get_width_of_terminal() -> usize {
// Simplify once https://github.com/eminence/terminal-size/pull/41 is
// merged
terminal_size() terminal_size()
.map(|(Width(w), _)| match cfg!(windows) { .map(|(Width(w), _)| match cfg!(windows) {
// Windows CI runners detect a very low terminal width // Windows CI runners detect a very low terminal width
true => max(w.into(), DEFAULT_TERMINAL_WIDTH), true => max(w as usize, DEFAULT_TERMINAL_WIDTH),
false => w.into(), false => w as usize,
}) })
.unwrap_or(DEFAULT_TERMINAL_WIDTH) .unwrap_or(DEFAULT_TERMINAL_WIDTH)
} }
fn get_regex_value(maybe_value: Option<&Vec<String>>) -> Vec<Regex> { fn get_regex_value(maybe_value: Option<ValuesRef<String>>) -> Vec<Regex> {
maybe_value maybe_value
.unwrap_or(&Vec::new()) .unwrap_or_default()
.iter()
.map(|reg| { .map(|reg| {
Regex::new(reg).unwrap_or_else(|err| { Regex::new(reg).unwrap_or_else(|err| {
eprintln!("Ignoring bad value for regex {err:?}"); eprintln!("Ignoring bad value for regex {err:?}");
@@ -115,37 +102,21 @@ fn get_regex_value(maybe_value: Option<&Vec<String>>) -> Vec<Regex> {
} }
fn main() { fn main() {
let options = Cli::parse(); let options = build_cli().get_matches();
let config = get_config(options.config.as_ref()); let config = get_config();
let errors = RuntimeErrors::default(); let target_dirs = match options.get_many::<String>("params") {
let error_listen_for_ctrlc = Arc::new(Mutex::new(errors)); Some(values) => values.map(|v| v.as_str()).collect::<Vec<&str>>(),
let errors_for_rayon = error_listen_for_ctrlc.clone(); None => vec!["."],
ctrlc::set_handler(move || {
println!("\nAborting");
process::exit(1);
})
.expect("Error setting Ctrl-C handler");
let target_dirs = if let Some(path) = config.get_files0_from(&options) {
read_paths_from_source(&path, true)
} else if let Some(path) = config.get_files_from(&options) {
read_paths_from_source(&path, false)
} else {
match options.params {
Some(ref values) => values.clone(),
None => vec![".".to_owned()],
}
}; };
let summarize_file_types = options.file_types; let summarize_file_types = options.get_flag("types");
let filter_regexs = get_regex_value(options.filter.as_ref()); let filter_regexs = get_regex_value(options.get_many("filter"));
let invert_filter_regexs = get_regex_value(options.invert_filter.as_ref()); let invert_filter_regexs = get_regex_value(options.get_many("invert_filter"));
let terminal_width: usize = match options.terminal_width { let terminal_width: usize = match options.get_one::<usize>("width") {
Some(val) => val, Some(&val) => val,
None => get_width_of_terminal(), None => get_width_of_terminal(),
}; };
@@ -154,8 +125,8 @@ fn main() {
// If depth is set, then we set the default number_of_lines to be max // If depth is set, then we set the default number_of_lines to be max
// instead of screen height // instead of screen height
let number_of_lines = match config.get_number_of_lines(&options) { let number_of_lines = match options.get_one::<usize>("number_of_lines") {
Some(val) => val, Some(&val) => val,
None => { None => {
if depth != usize::MAX { if depth != usize::MAX {
usize::MAX usize::MAX
@@ -165,22 +136,18 @@ fn main() {
} }
}; };
let is_colors = should_init_color( let no_colors = init_color(config.get_no_colors(&options));
config.get_no_colors(&options),
config.get_force_colors(&options),
);
let ignore_directories = match options.ignore_directory { let ignore_directories = match options.get_many::<String>("ignore_directory") {
Some(ref values) => values Some(values) => values
.iter() .map(|v| v.as_str())
.map(PathBuf::from) .map(PathBuf::from)
.map(canonicalize_absolute_path)
.collect::<Vec<PathBuf>>(), .collect::<Vec<PathBuf>>(),
None => vec![], None => vec![],
}; };
let ignore_from_file_result = match options.ignore_all_in_file { let ignore_from_file_result = match options.get_one::<String>("ignore_all_in_file") {
Some(ref val) => read_to_string(val) Some(val) => read_to_string(val)
.unwrap() .unwrap()
.lines() .lines()
.map(Regex::new) .map(Regex::new)
@@ -197,158 +164,66 @@ fn main() {
.chain(ignore_from_file) .chain(ignore_from_file)
.collect::<Vec<Regex>>(); .collect::<Vec<Regex>>();
let by_filecount = options.filecount; let by_filecount = options.get_flag("by_filecount");
let by_filetime = config.get_filetime(&options); let limit_filesystem = options.get_flag("limit_filesystem");
let limit_filesystem = options.limit_filesystem; let follow_links = options.get_flag("dereference_links");
let follow_links = options.dereference_links;
let allowed_filesystems = if limit_filesystem { let simplified_dirs = simplify_dir_names(target_dirs);
get_filesystem_devices(&target_dirs, follow_links) let allowed_filesystems = limit_filesystem
} else { .then(|| get_filesystem_devices(simplified_dirs.iter()))
Default::default() .unwrap_or_default();
};
let simplified_dirs = simplify_dir_names(&target_dirs);
let ignored_full_path: HashSet<PathBuf> = ignore_directories let ignored_full_path: HashSet<PathBuf> = ignore_directories
.into_iter() .into_iter()
.flat_map(|x| simplified_dirs.iter().map(move |d| d.join(&x))) .flat_map(|x| simplified_dirs.iter().map(move |d| d.join(&x)))
.collect(); .collect();
let output_format = config.get_output_format(&options); let iso = config.get_iso(&options);
let ignore_hidden = config.get_ignore_hidden(&options); let ignore_hidden = config.get_ignore_hidden(&options);
let mut indicator = PIndicator::build_me(); let mut indicator = PIndicator::build_me();
if !config.get_disable_progress(&options) { if !config.get_disable_progress(&options) {
indicator.spawn(output_format.clone()) indicator.spawn(iso);
} }
let keep_collapsed: HashSet<PathBuf> = match options.collapse {
Some(ref collapse) => {
let mut combined_dirs = HashSet::new();
for collapse_dir in collapse {
for target_dir in target_dirs.iter() {
combined_dirs.insert(PathBuf::from(target_dir).join(collapse_dir));
}
}
combined_dirs
}
None => HashSet::new(),
};
let filter_modified_time = config.get_modified_time_operator(&options);
let filter_accessed_time = config.get_accessed_time_operator(&options);
let filter_changed_time = config.get_changed_time_operator(&options);
let walk_data = WalkData { let walk_data = WalkData {
ignore_directories: ignored_full_path, ignore_directories: ignored_full_path,
filter_regex: &filter_regexs, filter_regex: &filter_regexs,
invert_filter_regex: &invert_filter_regexs, invert_filter_regex: &invert_filter_regexs,
allowed_filesystems, allowed_filesystems,
filter_modified_time,
filter_accessed_time,
filter_changed_time,
use_apparent_size: config.get_apparent_size(&options), use_apparent_size: config.get_apparent_size(&options),
by_filecount, by_filecount,
by_filetime: &by_filetime,
ignore_hidden, ignore_hidden,
follow_links, follow_links,
progress_data: indicator.data.clone(), progress_data: indicator.data.clone(),
errors: errors_for_rayon, errors: Arc::new(Mutex::new(RuntimeErrors::default())),
};
let stack_size = config.get_custom_stack_size(&options);
init_rayon(&stack_size);
let top_level_nodes = walk_it(simplified_dirs, &walk_data);
let tree = match summarize_file_types {
true => get_all_file_types(&top_level_nodes, number_of_lines),
false => {
let agg_data = AggregateData {
min_size: config.get_min_size(&options, iso),
only_dir: config.get_only_dir(&options),
only_file: config.get_only_file(&options),
number_of_lines,
depth,
using_a_filter: !filter_regexs.is_empty() || !invert_filter_regexs.is_empty(),
};
get_biggest(top_level_nodes, agg_data)
}
}; };
let threads_to_use = config.get_threads(&options); // Must have stopped indicator before we print to stderr
let stack_size = config.get_custom_stack_size(&options); indicator.stop();
init_rayon(&stack_size, &threads_to_use).install(|| { let final_errors = walk_data.errors.lock().unwrap();
let top_level_nodes = walk_it(simplified_dirs, &walk_data); let failed_permissions = final_errors.no_permissions;
let tree = match summarize_file_types {
true => get_all_file_types(&top_level_nodes, number_of_lines, walk_data.by_filetime),
false => {
let agg_data = AggregateData {
min_size: config.get_min_size(&options),
only_dir: config.get_only_dir(&options),
only_file: config.get_only_file(&options),
number_of_lines,
depth,
using_a_filter: !filter_regexs.is_empty() || !invert_filter_regexs.is_empty(),
short_paths: !config.get_full_paths(&options),
};
get_biggest(
top_level_nodes,
agg_data,
walk_data.by_filetime,
keep_collapsed,
)
}
};
// Must have stopped indicator before we print to stderr
indicator.stop();
let print_errors = config.get_print_errors(&options);
let final_errors = walk_data.errors.lock().unwrap();
print_any_errors(print_errors, &final_errors);
if tree.children.is_empty() && !final_errors.file_not_found.is_empty() {
std::process::exit(1)
} else {
print_output(
config,
options,
tree,
walk_data.by_filecount,
is_colors,
terminal_width,
)
}
});
}
fn print_output(
config: Config,
options: Cli,
tree: DisplayNode,
by_filecount: bool,
is_colors: bool,
terminal_width: usize,
) {
let output_format = config.get_output_format(&options);
if config.get_output_json(&options) {
OUTPUT_TYPE.with(|wrapped| {
if by_filecount {
wrapped.replace("count".to_string());
} else {
wrapped.replace(output_format);
}
});
println!("{}", serde_json::to_string(&tree).unwrap());
} else {
let idd = InitialDisplayData {
short_paths: !config.get_full_paths(&options),
is_reversed: !config.get_reverse(&options),
colors_on: is_colors,
by_filecount,
by_filetime: config.get_filetime(&options),
is_screen_reader: config.get_screen_reader(&options),
output_format,
bars_on_right: config.get_bars_on_right(&options),
};
draw_it(
idd,
&tree,
config.get_no_bars(&options),
terminal_width,
config.get_skip_total(&options),
)
}
}
fn print_any_errors(print_errors: bool, final_errors: &RuntimeErrors) {
if !final_errors.file_not_found.is_empty() { if !final_errors.file_not_found.is_empty() {
let err = final_errors let err = final_errors
.file_not_found .file_not_found
@@ -356,22 +231,10 @@ fn print_any_errors(print_errors: bool, final_errors: &RuntimeErrors) {
.map(|a| a.as_ref()) .map(|a| a.as_ref())
.collect::<Vec<&str>>() .collect::<Vec<&str>>()
.join(", "); .join(", ");
eprintln!("No such file or directory: {err}"); eprintln!("No such file or directory: {}", err);
} }
if !final_errors.no_permissions.is_empty() { if failed_permissions {
if print_errors { eprintln!("Did not have permissions for all directories");
let err = final_errors
.no_permissions
.iter()
.map(|a| a.as_ref())
.collect::<Vec<&str>>()
.join(", ");
eprintln!("Did not have permissions for directories: {err}");
} else {
eprintln!(
"Did not have permissions for all directories (add --print-errors to see errors)"
);
}
} }
if !final_errors.unknown_error.is_empty() { if !final_errors.unknown_error.is_empty() {
let err = final_errors let err = final_errors
@@ -380,105 +243,57 @@ fn print_any_errors(print_errors: bool, final_errors: &RuntimeErrors) {
.map(|a| a.as_ref()) .map(|a| a.as_ref())
.collect::<Vec<&str>>() .collect::<Vec<&str>>()
.join(", "); .join(", ");
eprintln!("Unknown Error: {err}"); eprintln!("Unknown Error: {}", err);
}
if let Some(root_node) = tree {
let idd = InitialDisplayData {
short_paths: !config.get_full_paths(&options),
is_reversed: !config.get_reverse(&options),
colors_on: !no_colors,
by_filecount,
iso,
is_screen_reader: config.get_screen_reader(&options),
bars_on_right: config.get_bars_on_right(&options),
};
draw_it(
idd,
config.get_no_bars(&options),
terminal_width,
&root_node,
config.get_skip_total(&options),
)
} }
} }
fn read_paths_from_source(path: &str, null_terminated: bool) -> Vec<String> { fn init_rayon(stack_size: &Option<usize>) {
let from_stdin = path == "-"; // Rayon seems to raise this error on 32-bit builds
// The global thread pool has not been initialized.: ThreadPoolBuildError { kind: GlobalPoolAlreadyInitialized }
if cfg!(target_pointer_width = "64") {
let result = panic::catch_unwind(|| {
match stack_size {
Some(n) => rayon::ThreadPoolBuilder::new()
.stack_size(*n)
.build_global(),
None => {
let large_stack = usize::pow(1024, 3);
let mut s = System::new();
s.refresh_memory();
let available = s.available_memory();
let result: Result<Vec<String>, Option<String>> = (|| { if available > large_stack.try_into().unwrap() {
// 1) read bytes // Larger stack size to handle cases with lots of nested directories
let bytes = if from_stdin { rayon::ThreadPoolBuilder::new()
let mut b = Vec::new(); .stack_size(large_stack)
io::stdin().lock().read_to_end(&mut b).map_err(|_| None)?; .build_global()
b } else {
} else { rayon::ThreadPoolBuilder::new().build_global()
read(path).map_err(|e| Some(e.to_string()))? }
};
let text = std::str::from_utf8(&bytes).map_err(|e| {
if from_stdin {
None
} else {
Some(e.to_string())
}
})?;
let items: Vec<String> = if null_terminated {
text.split('\0')
.filter(|s| !s.is_empty())
.map(str::to_owned)
.collect()
} else {
text.lines().map(str::to_owned).collect()
};
if from_stdin && items.is_empty() {
return Err(None);
}
Ok(items)
})();
match result {
Ok(v) => v,
Err(None) => {
eprintln!("No files provided, defaulting to current directory");
vec![".".to_owned()]
}
Err(Some(msg)) => {
eprintln!("Failed to read file: {msg}");
vec![".".to_owned()]
}
}
}
fn init_rayon(stack: &Option<usize>, threads: &Option<usize>) -> rayon::ThreadPool {
let stack_size = match stack {
Some(s) => Some(*s),
None => {
// Do not increase the stack size on a 32 bit system, it will fail
if cfg!(target_pointer_width = "32") {
None
} else {
let large_stack = usize::pow(1024, 3);
let mut sys = System::new_all();
sys.refresh_memory();
// Larger stack size if possible to handle cases with lots of nested directories
let available = sys.available_memory();
if available > (large_stack * threads.unwrap_or(1)).try_into().unwrap() {
Some(large_stack)
} else {
None
} }
} }
} });
}; if result.is_err() {
eprintln!("Problem initializing rayon, try: export RAYON_NUM_THREADS=1")
match build_thread_pool(stack_size, threads) {
Ok(pool) => pool,
Err(err) => {
eprintln!("Problem initializing rayon, try: export RAYON_NUM_THREADS=1");
if stack.is_none() && stack_size.is_some() {
// stack parameter was none, try with default stack size
if let Ok(pool) = build_thread_pool(None, threads) {
eprintln!("WARNING: not using large stack size, got error: {err}");
return pool;
}
}
panic!("{err}");
} }
} }
} }
fn build_thread_pool(
stack_size: Option<usize>,
threads: &Option<usize>,
) -> Result<rayon::ThreadPool, rayon::ThreadPoolBuildError> {
let mut pool_builder = rayon::ThreadPoolBuilder::new();
if let Some(stack_size_param) = stack_size {
pool_builder = pool_builder.stack_size(stack_size_param);
}
if let Some(thread_count) = threads {
pool_builder = pool_builder.num_threads(*thread_count);
}
pool_builder.build()
}

View File

@@ -1,9 +1,8 @@
use crate::dir_walker::WalkData;
use crate::platform::get_metadata; use crate::platform::get_metadata;
use crate::utils::is_filtered_out_due_to_file_time;
use crate::utils::is_filtered_out_due_to_invert_regex; use crate::utils::is_filtered_out_due_to_invert_regex;
use crate::utils::is_filtered_out_due_to_regex; use crate::utils::is_filtered_out_due_to_regex;
use regex::Regex;
use std::cmp::Ordering; use std::cmp::Ordering;
use std::path::PathBuf; use std::path::PathBuf;
@@ -16,66 +15,33 @@ pub struct Node {
pub depth: usize, pub depth: usize,
} }
#[derive(Debug, PartialEq)]
pub enum FileTime {
Modified,
Accessed,
Changed,
}
impl From<crate::cli::FileTime> for FileTime {
fn from(time: crate::cli::FileTime) -> Self {
match time {
crate::cli::FileTime::Modified => Self::Modified,
crate::cli::FileTime::Accessed => Self::Accessed,
crate::cli::FileTime::Changed => Self::Changed,
}
}
}
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
pub fn build_node( pub fn build_node(
dir: PathBuf, dir: PathBuf,
children: Vec<Node>, children: Vec<Node>,
filter_regex: &[Regex],
invert_filter_regex: &[Regex],
use_apparent_size: bool,
is_symlink: bool, is_symlink: bool,
is_file: bool, is_file: bool,
by_filecount: bool,
depth: usize, depth: usize,
walk_data: &WalkData,
) -> Option<Node> { ) -> Option<Node> {
let use_apparent_size = walk_data.use_apparent_size; get_metadata(&dir, use_apparent_size).map(|data| {
let by_filecount = walk_data.by_filecount; let inode_device = if is_symlink && !use_apparent_size {
let by_filetime = &walk_data.by_filetime; None
} else {
data.1
};
get_metadata( let size = if is_filtered_out_due_to_regex(filter_regex, &dir)
&dir, || is_filtered_out_due_to_invert_regex(invert_filter_regex, &dir)
use_apparent_size, || (is_symlink && !use_apparent_size)
walk_data.follow_links && is_symlink,
)
.map(|data| {
let inode_device = data.1;
let size = if is_filtered_out_due_to_regex(walk_data.filter_regex, &dir)
|| is_filtered_out_due_to_invert_regex(walk_data.invert_filter_regex, &dir)
|| by_filecount && !is_file || by_filecount && !is_file
|| [ {
(&walk_data.filter_modified_time, data.2.0),
(&walk_data.filter_accessed_time, data.2.1),
(&walk_data.filter_changed_time, data.2.2),
]
.iter()
.any(|(filter_time, actual_time)| {
is_filtered_out_due_to_file_time(filter_time, *actual_time)
}) {
0 0
} else if by_filecount { } else if by_filecount {
1 1
} else if by_filetime.is_some() {
match by_filetime {
Some(FileTime::Modified) => data.2.0.unsigned_abs(),
Some(FileTime::Accessed) => data.2.1.unsigned_abs(),
Some(FileTime::Changed) => data.2.2.unsigned_abs(),
None => unreachable!(),
}
} else { } else {
data.0 data.0
}; };

View File

@@ -10,52 +10,15 @@ fn get_block_size() -> u64 {
512 512
} }
type InodeAndDevice = (u64, u64);
type FileTime = (i64, i64, i64);
#[cfg(target_family = "unix")] #[cfg(target_family = "unix")]
pub fn get_metadata<P: AsRef<Path>>( pub fn get_metadata(d: &Path, use_apparent_size: bool) -> Option<(u64, Option<(u64, u64)>)> {
path: P,
use_apparent_size: bool,
follow_links: bool,
) -> Option<(u64, Option<InodeAndDevice>, FileTime)> {
use std::os::unix::fs::MetadataExt; use std::os::unix::fs::MetadataExt;
let metadata = if follow_links { match d.metadata() {
path.as_ref().metadata()
} else {
path.as_ref().symlink_metadata()
};
match metadata {
Ok(md) => { Ok(md) => {
let file_size = md.len();
if use_apparent_size { if use_apparent_size {
Some(( Some((md.len(), Some((md.ino(), md.dev()))))
file_size,
Some((md.ino(), md.dev())),
(md.mtime(), md.atime(), md.ctime()),
))
} else { } else {
// On NTFS mounts, the reported block count can be unexpectedly large. Some((md.blocks() * get_block_size(), Some((md.ino(), md.dev()))))
// To avoid overestimating disk usage, cap the allocated size to what the
// file should occupy based on the file system I/O block size (blksize).
// Related: https://github.com/bootandy/dust/issues/295
let blksize = md.blksize();
let target_size = file_size.div_ceil(blksize) * blksize;
let reported_size = md.blocks() * get_block_size();
// File systems can pre-allocate more space for a file than what would be necessary
let pre_allocation_buffer = blksize * 65536;
let max_size = target_size + pre_allocation_buffer;
let allocated_size = if reported_size > max_size {
target_size
} else {
reported_size
};
Some((
allocated_size,
Some((md.ino(), md.dev())),
(md.mtime(), md.atime(), md.ctime()),
))
} }
} }
Err(_e) => None, Err(_e) => None,
@@ -63,11 +26,7 @@ pub fn get_metadata<P: AsRef<Path>>(
} }
#[cfg(target_family = "windows")] #[cfg(target_family = "windows")]
pub fn get_metadata<P: AsRef<Path>>( pub fn get_metadata(d: &Path, _use_apparent_size: bool) -> Option<(u64, Option<(u64, u64)>)> {
path: P,
use_apparent_size: bool,
follow_links: bool,
) -> Option<(u64, Option<InodeAndDevice>, FileTime)> {
// On windows opening the file to get size, file ID and volume can be very // On windows opening the file to get size, file ID and volume can be very
// expensive because 1) it causes a few system calls, and more importantly 2) it can cause // expensive because 1) it causes a few system calls, and more importantly 2) it can cause
// windows defender to scan the file. // windows defender to scan the file.
@@ -106,7 +65,7 @@ pub fn get_metadata<P: AsRef<Path>>(
use std::io; use std::io;
use winapi_util::Handle; use winapi_util::Handle;
fn handle_from_path_limited(path: &Path) -> io::Result<Handle> { fn handle_from_path_limited<P: AsRef<Path>>(path: P) -> io::Result<Handle> {
use std::fs::OpenOptions; use std::fs::OpenOptions;
use std::os::windows::fs::OpenOptionsExt; use std::os::windows::fs::OpenOptionsExt;
const FILE_READ_ATTRIBUTES: u32 = 0x0080; const FILE_READ_ATTRIBUTES: u32 = 0x0080;
@@ -131,47 +90,20 @@ pub fn get_metadata<P: AsRef<Path>>(
Ok(Handle::from_file(file)) Ok(Handle::from_file(file))
} }
fn get_metadata_expensive( fn get_metadata_expensive(d: &Path) -> Option<(u64, Option<(u64, u64)>)> {
path: &Path,
use_apparent_size: bool,
) -> Option<(u64, Option<InodeAndDevice>, FileTime)> {
use winapi_util::file::information; use winapi_util::file::information;
let h = handle_from_path_limited(path).ok()?; let h = handle_from_path_limited(d).ok()?;
let info = information(&h).ok()?; let info = information(&h).ok()?;
if use_apparent_size { Some((
use filesize::PathExt; info.file_size(),
Some(( Some((info.file_index(), info.volume_serial_number())),
path.size_on_disk().ok()?, ))
Some((info.file_index(), info.volume_serial_number())),
(
info.last_write_time().unwrap() as i64,
info.last_access_time().unwrap() as i64,
info.creation_time().unwrap() as i64,
),
))
} else {
Some((
info.file_size(),
Some((info.file_index(), info.volume_serial_number())),
(
info.last_write_time().unwrap() as i64,
info.last_access_time().unwrap() as i64,
info.creation_time().unwrap() as i64,
),
))
}
} }
use std::os::windows::fs::MetadataExt; use std::os::windows::fs::MetadataExt;
let path = path.as_ref(); match d.metadata() {
let metadata = if follow_links {
path.metadata()
} else {
path.symlink_metadata()
};
match metadata {
Ok(ref md) => { Ok(ref md) => {
const FILE_ATTRIBUTE_ARCHIVE: u32 = 0x20; const FILE_ATTRIBUTE_ARCHIVE: u32 = 0x20;
const FILE_ATTRIBUTE_READONLY: u32 = 0x01; const FILE_ATTRIBUTE_READONLY: u32 = 0x01;
@@ -179,39 +111,18 @@ pub fn get_metadata<P: AsRef<Path>>(
const FILE_ATTRIBUTE_SYSTEM: u32 = 0x04; const FILE_ATTRIBUTE_SYSTEM: u32 = 0x04;
const FILE_ATTRIBUTE_NORMAL: u32 = 0x80; const FILE_ATTRIBUTE_NORMAL: u32 = 0x80;
const FILE_ATTRIBUTE_DIRECTORY: u32 = 0x10; const FILE_ATTRIBUTE_DIRECTORY: u32 = 0x10;
const FILE_ATTRIBUTE_SPARSE_FILE: u32 = 0x00000200;
const FILE_ATTRIBUTE_PINNED: u32 = 0x00080000;
const FILE_ATTRIBUTE_UNPINNED: u32 = 0x00100000;
const FILE_ATTRIBUTE_RECALL_ON_OPEN: u32 = 0x00040000;
const FILE_ATTRIBUTE_RECALL_ON_DATA_ACCESS: u32 = 0x00400000;
const FILE_ATTRIBUTE_OFFLINE: u32 = 0x00001000;
// normally FILE_ATTRIBUTE_SPARSE_FILE would be enough, however Windows sometimes likes to mask it out. see: https://stackoverflow.com/q/54560454
const IS_PROBABLY_ONEDRIVE: u32 = FILE_ATTRIBUTE_SPARSE_FILE
| FILE_ATTRIBUTE_PINNED
| FILE_ATTRIBUTE_UNPINNED
| FILE_ATTRIBUTE_RECALL_ON_OPEN
| FILE_ATTRIBUTE_RECALL_ON_DATA_ACCESS
| FILE_ATTRIBUTE_OFFLINE;
let attr_filtered = md.file_attributes() let attr_filtered = md.file_attributes()
& !(FILE_ATTRIBUTE_HIDDEN | FILE_ATTRIBUTE_READONLY | FILE_ATTRIBUTE_SYSTEM); & !(FILE_ATTRIBUTE_HIDDEN | FILE_ATTRIBUTE_READONLY | FILE_ATTRIBUTE_SYSTEM);
if ((attr_filtered & FILE_ATTRIBUTE_ARCHIVE) != 0 if (attr_filtered & FILE_ATTRIBUTE_ARCHIVE) != 0
|| (attr_filtered & FILE_ATTRIBUTE_DIRECTORY) != 0 || (attr_filtered & FILE_ATTRIBUTE_DIRECTORY) != 0
|| md.file_attributes() == FILE_ATTRIBUTE_NORMAL) || md.file_attributes() == FILE_ATTRIBUTE_NORMAL
&& !((attr_filtered & IS_PROBABLY_ONEDRIVE != 0) && use_apparent_size)
{ {
Some(( Some((md.len(), None))
md.len(),
None,
(
md.last_write_time() as i64,
md.last_access_time() as i64,
md.creation_time() as i64,
),
))
} else { } else {
get_metadata_expensive(path, use_apparent_size) get_metadata_expensive(d)
} }
} }
_ => get_metadata_expensive(path, use_apparent_size), _ => get_metadata_expensive(d),
} }
} }

View File

@@ -3,19 +3,14 @@ use std::{
io::Write, io::Write,
path::Path, path::Path,
sync::{ sync::{
Arc, RwLock, atomic::{AtomicU64, AtomicU8, AtomicUsize, Ordering},
atomic::{AtomicU8, AtomicUsize, Ordering},
mpsc::{self, RecvTimeoutError, Sender}, mpsc::{self, RecvTimeoutError, Sender},
Arc, RwLock,
}, },
thread::JoinHandle, thread::JoinHandle,
time::Duration, time::Duration,
}; };
#[cfg(not(target_has_atomic = "64"))]
use portable_atomic::AtomicU64;
#[cfg(target_has_atomic = "64")]
use std::sync::atomic::AtomicU64;
use crate::display::human_readable_number; use crate::display::human_readable_number;
/* -------------------------------------------------------------------------- */ /* -------------------------------------------------------------------------- */
@@ -75,24 +70,23 @@ impl PAtomicInfo {
#[derive(Default)] #[derive(Default)]
pub struct RuntimeErrors { pub struct RuntimeErrors {
pub no_permissions: HashSet<String>, pub no_permissions: bool,
pub file_not_found: HashSet<String>, pub file_not_found: HashSet<String>,
pub unknown_error: HashSet<String>, pub unknown_error: HashSet<String>,
pub interrupted_error: i32,
} }
/* -------------------------------------------------------------------------- */ /* -------------------------------------------------------------------------- */
fn format_preparing_str(prog_char: char, data: &PAtomicInfo, output_display: &str) -> String { fn format_preparing_str(prog_char: char, data: &PAtomicInfo, is_iso: bool) -> String {
let path_in = data.current_path.get(); let path_in = data.current_path.get();
let size = human_readable_number(data.total_file_size.load(ORDERING), output_display); let size = human_readable_number(data.total_file_size.load(ORDERING), is_iso);
format!("Preparing: {path_in} {size} ... {prog_char}") format!("Preparing: {path_in} {size} ... {prog_char}")
} }
fn format_indexing_str(prog_char: char, data: &PAtomicInfo, output_display: &str) -> String { fn format_indexing_str(prog_char: char, data: &PAtomicInfo, is_iso: bool) -> String {
let path_in = data.current_path.get(); let path_in = data.current_path.get();
let file_count = data.num_files.load(ORDERING); let file_count = data.num_files.load(ORDERING);
let size = human_readable_number(data.total_file_size.load(ORDERING), output_display); let size = human_readable_number(data.total_file_size.load(ORDERING), is_iso);
let file_str = format!("{file_count} files, {size}"); let file_str = format!("{file_count} files, {size}");
format!("Indexing: {path_in} {file_str} ... {prog_char}") format!("Indexing: {path_in} {file_str} ... {prog_char}")
} }
@@ -112,13 +106,13 @@ impl PIndicator {
} }
} }
pub fn spawn(&mut self, output_display: String) { pub fn spawn(&mut self, is_iso: bool) {
let data = self.data.clone(); let data = self.data.clone();
let (stop_handler, receiver) = mpsc::channel::<()>(); let (stop_handler, receiver) = mpsc::channel::<()>();
let time_info_thread = std::thread::spawn(move || { let time_info_thread = std::thread::spawn(move || {
let mut progress_char_i: usize = 0; let mut progress_char_i: usize = 0;
let mut stderr = std::io::stderr(); let mut stdout = std::io::stdout();
let mut msg = "".to_string(); let mut msg = "".to_string();
// While the timeout triggers we go round the loop // While the timeout triggers we go round the loop
@@ -127,27 +121,24 @@ impl PIndicator {
receiver.recv_timeout(Duration::from_millis(SPINNER_SLEEP_TIME)) receiver.recv_timeout(Duration::from_millis(SPINNER_SLEEP_TIME))
{ {
// Clear the text written by 'write!'& Return at the start of line // Clear the text written by 'write!'& Return at the start of line
let clear = format!("\r{:width$}", " ", width = msg.len()); print!("\r{:width$}", " ", width = msg.len());
write!(stderr, "{clear}").unwrap();
let prog_char = PROGRESS_CHARS[progress_char_i]; let prog_char = PROGRESS_CHARS[progress_char_i];
msg = match data.state.load(ORDERING) { msg = match data.state.load(ORDERING) {
Operation::INDEXING => format_indexing_str(prog_char, &data, &output_display), Operation::INDEXING => format_indexing_str(prog_char, &data, is_iso),
Operation::PREPARING => format_preparing_str(prog_char, &data, &output_display), Operation::PREPARING => format_preparing_str(prog_char, &data, is_iso),
_ => panic!("Unknown State"), _ => panic!("Unknown State"),
}; };
write!(stderr, "\r{msg}").unwrap(); write!(stdout, "\r{msg}").unwrap();
stderr.flush().unwrap(); stdout.flush().unwrap();
progress_char_i += 1; progress_char_i += 1;
progress_char_i %= PROGRESS_CHARS_LEN; progress_char_i %= PROGRESS_CHARS_LEN;
} }
print!("\r{:width$}", " ", width = msg.len());
let clear = format!("\r{:width$}", " ", width = msg.len()); print!("\r");
write!(stderr, "{clear}").unwrap(); stdout.flush().unwrap();
write!(stderr, "\r").unwrap();
stderr.flush().unwrap();
}); });
self.thread = Some((stop_handler, time_info_thread)) self.thread = Some((stop_handler, time_info_thread))
} }

View File

@@ -2,16 +2,13 @@ use platform::get_metadata;
use std::collections::HashSet; use std::collections::HashSet;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use crate::config::DAY_SECONDS;
use crate::dir_walker::Operator;
use crate::platform; use crate::platform;
use regex::Regex; use regex::Regex;
pub fn simplify_dir_names<P: AsRef<Path>>(dirs: &[P]) -> HashSet<PathBuf> { pub fn simplify_dir_names<P: AsRef<Path>>(filenames: Vec<P>) -> HashSet<PathBuf> {
let mut top_level_names: HashSet<PathBuf> = HashSet::with_capacity(dirs.len()); let mut top_level_names: HashSet<PathBuf> = HashSet::with_capacity(filenames.len());
for t in dirs { for t in filenames {
let top_level_name = normalize_path(t); let top_level_name = normalize_path(t);
let mut can_add = true; let mut can_add = true;
let mut to_remove: Vec<PathBuf> = Vec::new(); let mut to_remove: Vec<PathBuf> = Vec::new();
@@ -34,25 +31,13 @@ pub fn simplify_dir_names<P: AsRef<Path>>(dirs: &[P]) -> HashSet<PathBuf> {
top_level_names top_level_names
} }
pub fn get_filesystem_devices<P: AsRef<Path>>(paths: &[P], follow_links: bool) -> HashSet<u64> { pub fn get_filesystem_devices<'a, P: IntoIterator<Item = &'a PathBuf>>(paths: P) -> HashSet<u64> {
use std::fs;
// Gets the device ids for the filesystems which are used by the argument paths // Gets the device ids for the filesystems which are used by the argument paths
paths paths
.iter() .into_iter()
.filter_map(|p| { .filter_map(|p| match get_metadata(p, false) {
let follow_links = if follow_links { Some((_size, Some((_id, dev)))) => Some(dev),
// slow path: If dereference-links is set, then we check if the file is a symbolic link _ => None,
match fs::symlink_metadata(p) {
Ok(metadata) => metadata.file_type().is_symlink(),
Err(_) => false,
}
} else {
false
};
match get_metadata(p, false, follow_links) {
Some((_size, Some((_id, dev)), _time)) => Some(dev),
_ => None,
}
}) })
.collect() .collect()
} }
@@ -67,17 +52,6 @@ pub fn normalize_path<P: AsRef<Path>>(path: P) -> PathBuf {
path.as_ref().components().collect() path.as_ref().components().collect()
} }
// Canonicalize the path only if it is an absolute path
pub fn canonicalize_absolute_path(path: PathBuf) -> PathBuf {
if !path.is_absolute() {
return path;
}
match std::fs::canonicalize(&path) {
Ok(canonicalized_path) => canonicalized_path,
Err(_) => path,
}
}
pub fn is_filtered_out_due_to_regex(filter_regex: &[Regex], dir: &Path) -> bool { pub fn is_filtered_out_due_to_regex(filter_regex: &[Regex], dir: &Path) -> bool {
if filter_regex.is_empty() { if filter_regex.is_empty() {
false false
@@ -88,20 +62,6 @@ pub fn is_filtered_out_due_to_regex(filter_regex: &[Regex], dir: &Path) -> bool
} }
} }
pub fn is_filtered_out_due_to_file_time(
filter_time: &Option<(Operator, i64)>,
actual_time: i64,
) -> bool {
match filter_time {
None => false,
Some((Operator::Equal, bound_time)) => {
!(actual_time >= *bound_time && actual_time < *bound_time + DAY_SECONDS)
}
Some((Operator::GreaterThan, bound_time)) => actual_time < *bound_time,
Some((Operator::LessThan, bound_time)) => actual_time > *bound_time,
}
}
pub fn is_filtered_out_due_to_invert_regex(filter_regex: &[Regex], dir: &Path) -> bool { pub fn is_filtered_out_due_to_invert_regex(filter_regex: &[Regex], dir: &Path) -> bool {
filter_regex filter_regex
.iter() .iter()
@@ -122,15 +82,15 @@ mod tests {
fn test_simplify_dir() { fn test_simplify_dir() {
let mut correct = HashSet::new(); let mut correct = HashSet::new();
correct.insert(PathBuf::from("a")); correct.insert(PathBuf::from("a"));
assert_eq!(simplify_dir_names(&["a"]), correct); assert_eq!(simplify_dir_names(vec!["a"]), correct);
} }
#[test] #[test]
fn test_simplify_dir_rm_subdir() { fn test_simplify_dir_rm_subdir() {
let mut correct = HashSet::new(); let mut correct = HashSet::new();
correct.insert(["a", "b"].iter().collect::<PathBuf>()); correct.insert(["a", "b"].iter().collect::<PathBuf>());
assert_eq!(simplify_dir_names(&["a/b/c", "a/b", "a/b/d/f"]), correct); assert_eq!(simplify_dir_names(vec!["a/b/c", "a/b", "a/b/d/f"]), correct);
assert_eq!(simplify_dir_names(&["a/b", "a/b/c", "a/b/d/f"]), correct); assert_eq!(simplify_dir_names(vec!["a/b", "a/b/c", "a/b/d/f"]), correct);
} }
#[test] #[test]
@@ -139,7 +99,7 @@ mod tests {
correct.insert(["a", "b"].iter().collect::<PathBuf>()); correct.insert(["a", "b"].iter().collect::<PathBuf>());
correct.insert(PathBuf::from("c")); correct.insert(PathBuf::from("c"));
assert_eq!( assert_eq!(
simplify_dir_names(&[ simplify_dir_names(vec![
"a/b", "a/b",
"a/b//", "a/b//",
"a/././b///", "a/././b///",
@@ -158,14 +118,14 @@ mod tests {
correct.insert(PathBuf::from("b")); correct.insert(PathBuf::from("b"));
correct.insert(["c", "a", "b"].iter().collect::<PathBuf>()); correct.insert(["c", "a", "b"].iter().collect::<PathBuf>());
correct.insert(["a", "b"].iter().collect::<PathBuf>()); correct.insert(["a", "b"].iter().collect::<PathBuf>());
assert_eq!(simplify_dir_names(&["a/b", "c/a/b/", "b"]), correct); assert_eq!(simplify_dir_names(vec!["a/b", "c/a/b/", "b"]), correct);
} }
#[test] #[test]
fn test_simplify_dir_dots() { fn test_simplify_dir_dots() {
let mut correct = HashSet::new(); let mut correct = HashSet::new();
correct.insert(PathBuf::from("src")); correct.insert(PathBuf::from("src"));
assert_eq!(simplify_dir_names(&["src/."]), correct); assert_eq!(simplify_dir_names(vec!["src/."]), correct);
} }
#[test] #[test]
@@ -173,7 +133,7 @@ mod tests {
let mut correct = HashSet::new(); let mut correct = HashSet::new();
correct.insert(PathBuf::from("src")); correct.insert(PathBuf::from("src"));
correct.insert(PathBuf::from("src_v2")); correct.insert(PathBuf::from("src_v2"));
assert_eq!(simplify_dir_names(&["src/", "src_v2"]), correct); assert_eq!(simplify_dir_names(vec!["src/", "src_v2"]), correct);
} }
#[test] #[test]

View File

@@ -1,2 +0,0 @@
tests/test_dir_files_from/a_file
tests/test_dir_files_from/hello_file

View File

@@ -1 +0,0 @@
hello

View File

@@ -1,11 +1,9 @@
use assert_cmd::Command; use assert_cmd::Command;
use std::ffi::OsStr; use std::ffi::OsStr;
use std::process::Output; use std::str;
use std::sync::Once; use std::sync::Once;
use std::{io, str};
static INIT: Once = Once::new(); static INIT: Once = Once::new();
static UNREADABLE_DIR_PATH: &str = "/tmp/unreadable_dir";
/** /**
* This file contains tests that verify the exact output of the command. * This file contains tests that verify the exact output of the command.
@@ -35,61 +33,34 @@ fn copy_test_data(dir: &str) {
.map_err(|err| eprintln!("Error copying directory for test setup\n{:?}", err)); .map_err(|err| eprintln!("Error copying directory for test setup\n{:?}", err));
} }
fn create_unreadable_directory() -> io::Result<()> {
#[cfg(unix)]
{
use std::fs;
use std::fs::Permissions;
use std::os::unix::fs::PermissionsExt;
fs::create_dir_all(UNREADABLE_DIR_PATH)?;
fs::set_permissions(UNREADABLE_DIR_PATH, Permissions::from_mode(0))?;
}
Ok(())
}
fn initialize() { fn initialize() {
INIT.call_once(|| { INIT.call_once(|| {
copy_test_data("tests/test_dir"); copy_test_data("tests/test_dir");
copy_test_data("tests/test_dir2"); copy_test_data("tests/test_dir2");
copy_test_data("tests/test_dir_unicode"); copy_test_data("tests/test_dir_unicode");
if let Err(e) = create_unreadable_directory() {
panic!("Failed to create unreadable directory: {}", e);
}
}); });
} }
fn run_cmd<T: AsRef<OsStr>>(command_args: &[T]) -> Output { fn exact_output_test<T: AsRef<OsStr>>(valid_outputs: Vec<String>, command_args: Vec<T>) {
initialize(); initialize();
let mut to_run = &mut Command::cargo_bin("dust").unwrap();
// Hide progress bar let mut a = &mut Command::cargo_bin("dust").unwrap();
to_run.arg("-P");
for p in command_args { for p in command_args {
to_run = to_run.arg(p); a = a.arg(p);
} }
to_run.unwrap()
}
fn exact_stdout_test<T: AsRef<OsStr>>(command_args: &[T], valid_stdout: Vec<String>) { let output = str::from_utf8(&a.unwrap().stdout).unwrap().to_owned();
let to_run = run_cmd(command_args);
let stdout_output = str::from_utf8(&to_run.stdout).unwrap().to_owned(); let will_fail = valid_outputs.iter().any(|i| output.contains(i));
let will_fail = valid_stdout.iter().any(|i| stdout_output.contains(i));
if !will_fail { if !will_fail {
eprintln!( eprintln!(
"output(stdout):\n{}\ndoes not contain any of:\n{}", "output:\n{}\ndoes not contain any of:\n{}",
stdout_output, output,
valid_stdout.join("\n\n") valid_outputs.join("\n\n")
); );
} }
assert!(will_fail); assert!(will_fail)
}
fn exact_stderr_test<T: AsRef<OsStr>>(command_args: &[T], valid_stderr: String) {
let to_run = run_cmd(command_args);
let stderr_output = str::from_utf8(&to_run.stderr).unwrap().trim();
assert_eq!(stderr_output, valid_stderr);
} }
// "windows" result data can vary by host (size seems to be variable by one byte); fix code vs test and re-enable // "windows" result data can vary by host (size seems to be variable by one byte); fix code vs test and re-enable
@@ -97,20 +68,20 @@ fn exact_stderr_test<T: AsRef<OsStr>>(command_args: &[T], valid_stderr: String)
#[test] #[test]
pub fn test_main_basic() { pub fn test_main_basic() {
// -c is no color mode - This makes testing much simpler // -c is no color mode - This makes testing much simpler
exact_stdout_test(&["-c", "-B", "/tmp/test_dir/"], main_output()); exact_output_test(main_output(), vec!["-c", "-B", "/tmp/test_dir/"])
} }
#[cfg_attr(target_os = "windows", ignore)] #[cfg_attr(target_os = "windows", ignore)]
#[test] #[test]
pub fn test_main_multi_arg() { pub fn test_main_multi_arg() {
let command_args = [ let command_args = vec![
"-c", "-c",
"-B", "-B",
"/tmp/test_dir/many/", "/tmp/test_dir/many/",
"/tmp/test_dir", "/tmp/test_dir",
"/tmp/test_dir", "/tmp/test_dir",
]; ];
exact_stdout_test(&command_args, main_output()); exact_output_test(main_output(), command_args);
} }
fn main_output() -> Vec<String> { fn main_output() -> Vec<String> {
@@ -140,8 +111,8 @@ fn main_output() -> Vec<String> {
#[cfg_attr(target_os = "windows", ignore)] #[cfg_attr(target_os = "windows", ignore)]
#[test] #[test]
pub fn test_main_long_paths() { pub fn test_main_long_paths() {
let command_args = ["-c", "-p", "-B", "/tmp/test_dir/"]; let command_args = vec!["-c", "-p", "-B", "/tmp/test_dir/"];
exact_stdout_test(&command_args, main_output_long_paths()); exact_output_test(main_output_long_paths(), command_args);
} }
fn main_output_long_paths() -> Vec<String> { fn main_output_long_paths() -> Vec<String> {
@@ -168,8 +139,8 @@ fn main_output_long_paths() -> Vec<String> {
#[cfg_attr(target_os = "windows", ignore)] #[cfg_attr(target_os = "windows", ignore)]
#[test] #[test]
pub fn test_substring_of_names_and_long_names() { pub fn test_substring_of_names_and_long_names() {
let command_args = ["-c", "-B", "/tmp/test_dir2"]; let command_args = vec!["-c", "-B", "/tmp/test_dir2"];
exact_stdout_test(&command_args, no_substring_of_names_output()); exact_output_test(no_substring_of_names_output(), command_args);
} }
fn no_substring_of_names_output() -> Vec<String> { fn no_substring_of_names_output() -> Vec<String> {
@@ -202,8 +173,8 @@ fn no_substring_of_names_output() -> Vec<String> {
#[cfg_attr(target_os = "windows", ignore)] #[cfg_attr(target_os = "windows", ignore)]
#[test] #[test]
pub fn test_unicode_directories() { pub fn test_unicode_directories() {
let command_args = ["-c", "-B", "/tmp/test_dir_unicode"]; let command_args = vec!["-c", "-B", "/tmp/test_dir_unicode"];
exact_stdout_test(&command_args, unicode_dir()); exact_output_test(unicode_dir(), command_args);
} }
fn unicode_dir() -> Vec<String> { fn unicode_dir() -> Vec<String> {
@@ -229,8 +200,8 @@ fn unicode_dir() -> Vec<String> {
#[cfg_attr(target_os = "windows", ignore)] #[cfg_attr(target_os = "windows", ignore)]
#[test] #[test]
pub fn test_apparent_size() { pub fn test_apparent_size() {
let command_args = ["-c", "-s", "-b", "/tmp/test_dir"]; let command_args = vec!["-c", "-s", "-b", "/tmp/test_dir"];
exact_stdout_test(&command_args, apparent_size_output()); exact_output_test(apparent_size_output(), command_args);
} }
fn apparent_size_output() -> Vec<String> { fn apparent_size_output() -> Vec<String> {
@@ -251,26 +222,3 @@ fn apparent_size_output() -> Vec<String> {
vec![one_space_before, two_space_before] vec![one_space_before, two_space_before]
} }
#[cfg_attr(target_os = "windows", ignore)]
#[test]
pub fn test_permission_normal() {
let command_args = [UNREADABLE_DIR_PATH];
let permission_msg =
r#"Did not have permissions for all directories (add --print-errors to see errors)"#
.trim()
.to_string();
exact_stderr_test(&command_args, permission_msg);
}
#[cfg_attr(target_os = "windows", ignore)]
#[test]
pub fn test_permission_flag() {
// add the flag to CLI
let command_args = ["--print-errors", UNREADABLE_DIR_PATH];
let permission_msg = format!(
"Did not have permissions for directories: {}",
UNREADABLE_DIR_PATH
);
exact_stderr_test(&command_args, permission_msg);
}

View File

@@ -10,9 +10,6 @@ use std::str;
fn build_command<T: AsRef<OsStr>>(command_args: Vec<T>) -> String { fn build_command<T: AsRef<OsStr>>(command_args: Vec<T>) -> String {
let mut cmd = &mut Command::cargo_bin("dust").unwrap(); let mut cmd = &mut Command::cargo_bin("dust").unwrap();
// Hide progress bar
cmd = cmd.arg("-P");
for p in command_args { for p in command_args {
cmd = cmd.arg(p); cmd = cmd.arg(p);
} }
@@ -62,20 +59,6 @@ pub fn test_d_flag_works() {
assert!(!output.contains("hello_file")); assert!(!output.contains("hello_file"));
} }
#[test]
pub fn test_d0_works_on_multiple() {
// We should see the top level directory but not the sub dirs / files:
let output = build_command(vec!["-d", "0", "tests/test_dir/", "tests/test_dir2"]);
assert!(output.contains("test_dir "));
assert!(output.contains("test_dir2"));
}
#[test]
pub fn test_threads_flag_works() {
let output = build_command(vec!["-T", "1", "tests/test_dir/"]);
assert!(output.contains("hello_file"));
}
#[test] #[test]
pub fn test_d_flag_works_and_still_recurses_down() { pub fn test_d_flag_works_and_still_recurses_down() {
// We had a bug where running with '-d 1' would stop at the first directory and the code // We had a bug where running with '-d 1' would stop at the first directory and the code
@@ -104,60 +87,10 @@ pub fn test_ignore_all_in_file() {
assert!(!output.contains(".secret")); assert!(!output.contains(".secret"));
} }
#[test]
pub fn test_files_from_flag_file() {
let output = build_command(vec![
"--files-from",
"tests/test_dir_files_from/files_from.txt",
]);
assert!(output.contains("a_file"));
assert!(output.contains("hello_file"));
}
#[test]
pub fn test_files0_from_flag_file() {
let output = build_command(vec![
"--files0-from",
"tests/test_dir_files_from/files0_from.txt",
]);
assert!(output.contains("a_file"));
assert!(output.contains("hello_file"));
}
#[test]
pub fn test_files_from_flag_stdin() {
let mut cmd = Command::cargo_bin("dust").unwrap();
cmd.arg("-P").arg("--files-from").arg("-");
let input = b"tests/test_dir_files_from/a_file\ntests/test_dir_files_from/hello_file\n";
cmd.write_stdin(input.as_ref());
let finished = &cmd.unwrap();
let stderr = std::str::from_utf8(&finished.stderr).unwrap();
assert_eq!(stderr, "");
let output = std::str::from_utf8(&finished.stdout).unwrap();
assert!(output.contains("a_file"));
assert!(output.contains("hello_file"));
}
#[test]
pub fn test_files0_from_flag_stdin() {
let mut cmd = Command::cargo_bin("dust").unwrap();
cmd.arg("-P").arg("--files0-from").arg("-");
let input = b"tests/test_dir_files_from/a_file\0tests/test_dir_files_from/hello_file\0";
cmd.write_stdin(input.as_ref());
let finished = &cmd.unwrap();
let stderr = std::str::from_utf8(&finished.stderr).unwrap();
assert_eq!(stderr, "");
let output = std::str::from_utf8(&finished.stdout).unwrap();
assert!(output.contains("a_file"));
assert!(output.contains("hello_file"));
}
#[test] #[test]
pub fn test_with_bad_param() { pub fn test_with_bad_param() {
let mut cmd = Command::cargo_bin("dust").unwrap(); let mut cmd = Command::cargo_bin("dust").unwrap();
cmd.arg("-P").arg("bad_place"); let result = cmd.arg("bad_place").unwrap();
let output_error = cmd.unwrap_err();
let result = output_error.as_output().unwrap();
let stderr = str::from_utf8(&result.stderr).unwrap(); let stderr = str::from_utf8(&result.stderr).unwrap();
assert!(stderr.contains("No such file or directory")); assert!(stderr.contains("No such file or directory"));
} }
@@ -200,9 +133,9 @@ pub fn test_show_files_by_type() {
#[cfg(target_family = "unix")] #[cfg(target_family = "unix")]
pub fn test_show_files_only() { pub fn test_show_files_only() {
let output = build_command(vec!["-c", "-F", "tests/test_dir"]); let output = build_command(vec!["-c", "-F", "tests/test_dir"]);
assert!(output.contains("a_file")); assert!(output.contains("tests/test_dir/many/a_file"));
assert!(output.contains("hello_file")); assert!(output.contains("tests/test_dir/many/hello_file"));
assert!(!output.contains("many")); assert!(!output.contains("tests/test_dir/many "));
} }
#[test] #[test]
@@ -299,42 +232,3 @@ pub fn test_show_files_by_invert_regex_match_multiple() {
assert!(!output.contains("test_dir_unicode")); assert!(!output.contains("test_dir_unicode"));
assert!(output.contains("many")); assert!(output.contains("many"));
} }
#[test]
pub fn test_no_color() {
let output = build_command(vec!["-c"]);
// Red is 31
assert!(!output.contains("\x1B[31m"));
assert!(!output.contains("\x1B[0m"));
}
#[test]
pub fn test_force_color() {
let output = build_command(vec!["-C"]);
// Red is 31
assert!(output.contains("\x1B[31m"));
assert!(output.contains("\x1B[0m"));
}
#[test]
pub fn test_collapse() {
let output = build_command(vec!["--collapse", "many", "tests/test_dir/"]);
assert!(output.contains("many"));
assert!(!output.contains("hello_file"));
}
#[test]
pub fn test_handle_duplicate_names() {
// Check that even if we run on a multiple directories with the same name
// we still show the distinct parent dir in the output
let output = build_command(vec![
"tests/test_dir_matching/dave/dup_name",
"tests/test_dir_matching/andy/dup_name",
"ci",
]);
assert!(output.contains("andy"));
assert!(output.contains("dave"));
assert!(output.contains("ci"));
assert!(output.contains("dup_name"));
assert!(!output.contains("test_dir_matching"));
}