mirror of
https://github.com/bootandy/dust.git
synced 2025-12-05 20:40:11 -08:00
Compare commits
21 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
62bf1e14de | ||
|
|
de2d748a88 | ||
|
|
509d51e872 | ||
|
|
f98b841d23 | ||
|
|
67d23e80ff | ||
|
|
968377eebd | ||
|
|
96e04fe168 | ||
|
|
7974e2eaf0 | ||
|
|
14efddfd05 | ||
|
|
4e83421da6 | ||
|
|
901bc3895a | ||
|
|
3cce61f854 | ||
|
|
222cd83ff3 | ||
|
|
76b9f32859 | ||
|
|
17662e8ff1 | ||
|
|
9cc557cada | ||
|
|
81722b695d | ||
|
|
51dc167345 | ||
|
|
9b5f6d6c5a | ||
|
|
74ffd78901 | ||
|
|
9b2dc4655d |
803
Cargo.lock
generated
803
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
15
Cargo.toml
15
Cargo.toml
@@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "du-dust"
|
||||
description = "A more intuitive version of du"
|
||||
version = "1.2.1"
|
||||
version = "1.2.3"
|
||||
authors = ["bootandy <bootandy@gmail.com>", "nebkor <code@ardent.nebcorp.com>"]
|
||||
edition = "2024"
|
||||
readme = "README.md"
|
||||
@@ -28,10 +28,10 @@ strip = true
|
||||
|
||||
[dependencies]
|
||||
ansi_term = "0.12"
|
||||
clap = { version = "4.4", features = ["derive"] }
|
||||
lscolors = "0.13"
|
||||
terminal_size = "0.2"
|
||||
unicode-width = "0.1"
|
||||
clap = { version = "4", features = ["derive"] }
|
||||
lscolors = "0.21"
|
||||
terminal_size = "0.4"
|
||||
unicode-width = "0.2"
|
||||
rayon = "1"
|
||||
thousands = "0.2"
|
||||
stfu8 = "0.2"
|
||||
@@ -39,9 +39,8 @@ regex = "1"
|
||||
config-file = "0.2"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
directories = "4"
|
||||
sysinfo = "0.27"
|
||||
ctrlc = "3.4"
|
||||
sysinfo = "0.37"
|
||||
ctrlc = "3"
|
||||
chrono = "0.4"
|
||||
|
||||
[target.'cfg(not(target_has_atomic = "64"))'.dependencies]
|
||||
|
||||
@@ -90,7 +90,7 @@ Usage: dust -B (--bars-on-right - Percent bars moved to right side of screen)
|
||||
Usage: dust -i (Do not show hidden files)
|
||||
Usage: dust -c (No colors [monochrome])
|
||||
Usage: dust -C (Force colors)
|
||||
Usage: dust -f (Count files instead of diskspace)
|
||||
Usage: dust -f (Count files instead of diskspace [Counts by inode, to include duplicate inodes use dust -f -s])
|
||||
Usage: dust -t (Group by filetype)
|
||||
Usage: dust -z 10M (min-size, Only include files larger than 10M)
|
||||
Usage: dust -e regex (Only include files matching this regex (eg dust -e "\.png$" would match png files))
|
||||
@@ -102,7 +102,8 @@ Usage: dust -S (Custom Stack size - Use if you see: 'fatal runtime error: stack
|
||||
Usage: dust --skip-total (No total row will be displayed)
|
||||
Usage: dust -z 40000/30MB/20kib (Exclude output files/directories below size 40000 bytes / 30MB / 20KiB)
|
||||
Usage: dust -j (Prints JSON representation of directories, try: dust -j | jq)
|
||||
Usage: dust --files0-from=FILE (Reads null-terminated file paths from FILE); If FILE is - then read from stdin
|
||||
Usage: dust --files0-from=FILE (Read NUL-terminated file paths from FILE; if FILE is '-', read from stdin)
|
||||
Usage: dust --files-from=FILE (Read newline-terminated file paths from FILE; if FILE is '-', read from stdin)
|
||||
Usage: dust --collapse=node-modules will keep the node-modules folder collapsed in display instead of recursively opening it
|
||||
```
|
||||
|
||||
@@ -122,6 +123,6 @@ reverse=true
|
||||
- [dua](https://github.com/Byron/dua-cli/)
|
||||
- [pdu](https://github.com/KSXGitHub/parallel-disk-usage)
|
||||
- [dirstat-rs](https://github.com/scullionw/dirstat-rs)
|
||||
- du -d 1 -h | sort -h
|
||||
- `du -d 1 -h | sort -h`
|
||||
|
||||
Note: Apparent-size is calculated slightly differently in dust to gdu. In dust each hard link is counted as using file_length space. In gdu only the first entry is counted.
|
||||
|
||||
@@ -62,7 +62,8 @@ tb\:"terabyte (TB)"))' \
|
||||
'--atime=[just like -mtime, but based on file access time]:ATIME:_default' \
|
||||
'-y+[just like -mtime, but based on file change time]:CTIME:_default' \
|
||||
'--ctime=[just like -mtime, but based on file change time]:CTIME:_default' \
|
||||
'--files0-from=[run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input]:FILES0_FROM:_files' \
|
||||
'(--files-from)--files0-from=[Read NUL-terminated paths from FILE (use \`-\` for stdin)]:FILES0_FROM:_files' \
|
||||
'(--files0-from)--files-from=[Read newline-terminated paths from FILE (use \`-\` for stdin)]:FILES_FROM:_files' \
|
||||
'*--collapse=[Keep these directories collapsed]:COLLAPSE:_files' \
|
||||
'-m+[Directory '\''size'\'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time]:FILETIME:((a\:"last accessed time"
|
||||
c\:"last changed time"
|
||||
|
||||
@@ -50,7 +50,8 @@ Register-ArgumentCompleter -Native -CommandName 'dust' -ScriptBlock {
|
||||
[CompletionResult]::new('--atime', '--atime', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file access time')
|
||||
[CompletionResult]::new('-y', '-y', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file change time')
|
||||
[CompletionResult]::new('--ctime', '--ctime', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file change time')
|
||||
[CompletionResult]::new('--files0-from', '--files0-from', [CompletionResultType]::ParameterName, 'run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input')
|
||||
[CompletionResult]::new('--files0-from', '--files0-from', [CompletionResultType]::ParameterName, 'Read NUL-terminated paths from FILE (use `-` for stdin)')
|
||||
[CompletionResult]::new('--files-from', '--files-from', [CompletionResultType]::ParameterName, 'Read newline-terminated paths from FILE (use `-` for stdin)')
|
||||
[CompletionResult]::new('--collapse', '--collapse', [CompletionResultType]::ParameterName, 'Keep these directories collapsed')
|
||||
[CompletionResult]::new('-m', '-m', [CompletionResultType]::ParameterName, 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time')
|
||||
[CompletionResult]::new('--filetime', '--filetime', [CompletionResultType]::ParameterName, 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time')
|
||||
|
||||
@@ -1,12 +1,16 @@
|
||||
_dust() {
|
||||
local i cur prev opts cmd
|
||||
COMPREPLY=()
|
||||
cur="${COMP_WORDS[COMP_CWORD]}"
|
||||
prev="${COMP_WORDS[COMP_CWORD-1]}"
|
||||
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
|
||||
cur="$2"
|
||||
else
|
||||
cur="${COMP_WORDS[COMP_CWORD]}"
|
||||
fi
|
||||
prev="$3"
|
||||
cmd=""
|
||||
opts=""
|
||||
|
||||
for i in ${COMP_WORDS[@]}
|
||||
for i in "${COMP_WORDS[@]:0:COMP_CWORD}"
|
||||
do
|
||||
case "${cmd},${i}" in
|
||||
",$1")
|
||||
@@ -19,7 +23,7 @@ _dust() {
|
||||
|
||||
case "${cmd}" in
|
||||
dust)
|
||||
opts="-d -T -n -p -X -I -L -x -s -r -c -C -b -B -z -R -f -i -v -e -t -w -P -D -F -o -S -j -M -A -y -m -h -V --depth --threads --config --number-of-lines --full-paths --ignore-directory --ignore-all-in-file --dereference-links --limit-filesystem --apparent-size --reverse --no-colors --force-colors --no-percent-bars --bars-on-right --min-size --screen-reader --skip-total --filecount --ignore-hidden --invert-filter --filter --file-types --terminal-width --no-progress --print-errors --only-dir --only-file --output-format --stack-size --output-json --mtime --atime --ctime --files0-from --collapse --filetime --help --version [PATH]..."
|
||||
opts="-d -T -n -p -X -I -L -x -s -r -c -C -b -B -z -R -f -i -v -e -t -w -P -D -F -o -S -j -M -A -y -m -h -V --depth --threads --config --number-of-lines --full-paths --ignore-directory --ignore-all-in-file --dereference-links --limit-filesystem --apparent-size --reverse --no-colors --force-colors --no-percent-bars --bars-on-right --min-size --screen-reader --skip-total --filecount --ignore-hidden --invert-filter --filter --file-types --terminal-width --no-progress --print-errors --only-dir --only-file --output-format --stack-size --output-json --mtime --atime --ctime --files0-from --files-from --collapse --filetime --help --version [PATH]..."
|
||||
if [[ ${cur} == -* || ${COMP_CWORD} -eq 1 ]] ; then
|
||||
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
|
||||
return 0
|
||||
@@ -178,6 +182,10 @@ _dust() {
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--files-from)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--collapse)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
|
||||
@@ -47,7 +47,8 @@ set edit:completion:arg-completer[dust] = {|@words|
|
||||
cand --atime 'just like -mtime, but based on file access time'
|
||||
cand -y 'just like -mtime, but based on file change time'
|
||||
cand --ctime 'just like -mtime, but based on file change time'
|
||||
cand --files0-from 'run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input'
|
||||
cand --files0-from 'Read NUL-terminated paths from FILE (use `-` for stdin)'
|
||||
cand --files-from 'Read newline-terminated paths from FILE (use `-` for stdin)'
|
||||
cand --collapse 'Keep these directories collapsed'
|
||||
cand -m 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time'
|
||||
cand --filetime 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time'
|
||||
|
||||
@@ -22,7 +22,8 @@ complete -c dust -s S -l stack-size -d 'Specify memory to use as stack size - us
|
||||
complete -c dust -s M -l mtime -d '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr−(n+1)), n => [curr−(n+1), curr−n), and -n => (𝑐𝑢𝑟𝑟−𝑛, +∞)' -r
|
||||
complete -c dust -s A -l atime -d 'just like -mtime, but based on file access time' -r
|
||||
complete -c dust -s y -l ctime -d 'just like -mtime, but based on file change time' -r
|
||||
complete -c dust -l files0-from -d 'run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input' -r -F
|
||||
complete -c dust -l files0-from -d 'Read NUL-terminated paths from FILE (use `-` for stdin)' -r -F
|
||||
complete -c dust -l files-from -d 'Read newline-terminated paths from FILE (use `-` for stdin)' -r -F
|
||||
complete -c dust -l collapse -d 'Keep these directories collapsed' -r -F
|
||||
complete -c dust -s m -l filetime -d 'Directory \'size\' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time' -r -f -a "a\t'last accessed time'
|
||||
c\t'last changed time'
|
||||
|
||||
@@ -25,4 +25,6 @@ skip-total=true
|
||||
ignore-hidden=true
|
||||
|
||||
# print sizes in powers of 1000 (e.g., 1.1G)
|
||||
output-format="si"
|
||||
output-format="si"
|
||||
|
||||
number-of-lines=5
|
||||
|
||||
@@ -1,33 +1,33 @@
|
||||
.ie \n(.g .ds Aq \(aq
|
||||
.el .ds Aq '
|
||||
.TH Dust 1 "Dust 1.2.1"
|
||||
.TH Dust 1 "Dust 1.2.3"
|
||||
.SH NAME
|
||||
Dust \- Like du but more intuitive
|
||||
.SH SYNOPSIS
|
||||
\fBdust\fR [\fB\-d\fR|\fB\-\-depth\fR] [\fB\-T\fR|\fB\-\-threads\fR] [\fB\-\-config\fR] [\fB\-n\fR|\fB\-\-number\-of\-lines\fR] [\fB\-p\fR|\fB\-\-full\-paths\fR] [\fB\-X\fR|\fB\-\-ignore\-directory\fR] [\fB\-I\fR|\fB\-\-ignore\-all\-in\-file\fR] [\fB\-L\fR|\fB\-\-dereference\-links\fR] [\fB\-x\fR|\fB\-\-limit\-filesystem\fR] [\fB\-s\fR|\fB\-\-apparent\-size\fR] [\fB\-r\fR|\fB\-\-reverse\fR] [\fB\-c\fR|\fB\-\-no\-colors\fR] [\fB\-C\fR|\fB\-\-force\-colors\fR] [\fB\-b\fR|\fB\-\-no\-percent\-bars\fR] [\fB\-B\fR|\fB\-\-bars\-on\-right\fR] [\fB\-z\fR|\fB\-\-min\-size\fR] [\fB\-R\fR|\fB\-\-screen\-reader\fR] [\fB\-\-skip\-total\fR] [\fB\-f\fR|\fB\-\-filecount\fR] [\fB\-i\fR|\fB\-\-ignore\-hidden\fR] [\fB\-v\fR|\fB\-\-invert\-filter\fR] [\fB\-e\fR|\fB\-\-filter\fR] [\fB\-t\fR|\fB\-\-file\-types\fR] [\fB\-w\fR|\fB\-\-terminal\-width\fR] [\fB\-P\fR|\fB\-\-no\-progress\fR] [\fB\-\-print\-errors\fR] [\fB\-D\fR|\fB\-\-only\-dir\fR] [\fB\-F\fR|\fB\-\-only\-file\fR] [\fB\-o\fR|\fB\-\-output\-format\fR] [\fB\-S\fR|\fB\-\-stack\-size\fR] [\fB\-j\fR|\fB\-\-output\-json\fR] [\fB\-M\fR|\fB\-\-mtime\fR] [\fB\-A\fR|\fB\-\-atime\fR] [\fB\-y\fR|\fB\-\-ctime\fR] [\fB\-\-files0\-from\fR] [\fB\-\-collapse\fR] [\fB\-m\fR|\fB\-\-filetime\fR] [\fB\-h\fR|\fB\-\-help\fR] [\fB\-V\fR|\fB\-\-version\fR] [\fIPATH\fR]
|
||||
\fBdust\fR [\fB\-d\fR|\fB\-\-depth\fR] [\fB\-T\fR|\fB\-\-threads\fR] [\fB\-\-config\fR] [\fB\-n\fR|\fB\-\-number\-of\-lines\fR] [\fB\-p\fR|\fB\-\-full\-paths\fR] [\fB\-X\fR|\fB\-\-ignore\-directory\fR] [\fB\-I\fR|\fB\-\-ignore\-all\-in\-file\fR] [\fB\-L\fR|\fB\-\-dereference\-links\fR] [\fB\-x\fR|\fB\-\-limit\-filesystem\fR] [\fB\-s\fR|\fB\-\-apparent\-size\fR] [\fB\-r\fR|\fB\-\-reverse\fR] [\fB\-c\fR|\fB\-\-no\-colors\fR] [\fB\-C\fR|\fB\-\-force\-colors\fR] [\fB\-b\fR|\fB\-\-no\-percent\-bars\fR] [\fB\-B\fR|\fB\-\-bars\-on\-right\fR] [\fB\-z\fR|\fB\-\-min\-size\fR] [\fB\-R\fR|\fB\-\-screen\-reader\fR] [\fB\-\-skip\-total\fR] [\fB\-f\fR|\fB\-\-filecount\fR] [\fB\-i\fR|\fB\-\-ignore\-hidden\fR] [\fB\-v\fR|\fB\-\-invert\-filter\fR] [\fB\-e\fR|\fB\-\-filter\fR] [\fB\-t\fR|\fB\-\-file\-types\fR] [\fB\-w\fR|\fB\-\-terminal\-width\fR] [\fB\-P\fR|\fB\-\-no\-progress\fR] [\fB\-\-print\-errors\fR] [\fB\-D\fR|\fB\-\-only\-dir\fR] [\fB\-F\fR|\fB\-\-only\-file\fR] [\fB\-o\fR|\fB\-\-output\-format\fR] [\fB\-S\fR|\fB\-\-stack\-size\fR] [\fB\-j\fR|\fB\-\-output\-json\fR] [\fB\-M\fR|\fB\-\-mtime\fR] [\fB\-A\fR|\fB\-\-atime\fR] [\fB\-y\fR|\fB\-\-ctime\fR] [\fB\-\-files0\-from\fR] [\fB\-\-files\-from\fR] [\fB\-\-collapse\fR] [\fB\-m\fR|\fB\-\-filetime\fR] [\fB\-h\fR|\fB\-\-help\fR] [\fB\-V\fR|\fB\-\-version\fR] [\fIPATH\fR]
|
||||
.SH DESCRIPTION
|
||||
Like du but more intuitive
|
||||
.SH OPTIONS
|
||||
.TP
|
||||
\fB\-d\fR, \fB\-\-depth\fR=\fIDEPTH\fR
|
||||
\fB\-d\fR, \fB\-\-depth\fR \fI<DEPTH>\fR
|
||||
Depth to show
|
||||
.TP
|
||||
\fB\-T\fR, \fB\-\-threads\fR=\fITHREADS\fR
|
||||
\fB\-T\fR, \fB\-\-threads\fR \fI<THREADS>\fR
|
||||
Number of threads to use
|
||||
.TP
|
||||
\fB\-\-config\fR=\fIFILE\fR
|
||||
\fB\-\-config\fR \fI<FILE>\fR
|
||||
Specify a config file to use
|
||||
.TP
|
||||
\fB\-n\fR, \fB\-\-number\-of\-lines\fR=\fINUMBER\fR
|
||||
\fB\-n\fR, \fB\-\-number\-of\-lines\fR \fI<NUMBER>\fR
|
||||
Number of lines of output to show. (Default is terminal_height \- 10)
|
||||
.TP
|
||||
\fB\-p\fR, \fB\-\-full\-paths\fR
|
||||
Subdirectories will not have their path shortened
|
||||
.TP
|
||||
\fB\-X\fR, \fB\-\-ignore\-directory\fR=\fIPATH\fR
|
||||
\fB\-X\fR, \fB\-\-ignore\-directory\fR \fI<PATH>\fR
|
||||
Exclude any file or directory with this path
|
||||
.TP
|
||||
\fB\-I\fR, \fB\-\-ignore\-all\-in\-file\fR=\fIFILE\fR
|
||||
\fB\-I\fR, \fB\-\-ignore\-all\-in\-file\fR \fI<FILE>\fR
|
||||
Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by \-\-invert_filter
|
||||
.TP
|
||||
\fB\-L\fR, \fB\-\-dereference\-links\fR
|
||||
@@ -54,7 +54,7 @@ No percent bars or percentages will be displayed
|
||||
\fB\-B\fR, \fB\-\-bars\-on\-right\fR
|
||||
percent bars moved to right side of screen
|
||||
.TP
|
||||
\fB\-z\fR, \fB\-\-min\-size\fR=\fIMIN_SIZE\fR
|
||||
\fB\-z\fR, \fB\-\-min\-size\fR \fI<MIN_SIZE>\fR
|
||||
Minimum size file to include in output
|
||||
.TP
|
||||
\fB\-R\fR, \fB\-\-screen\-reader\fR
|
||||
@@ -69,16 +69,16 @@ Directory \*(Aqsize\*(Aq is number of child files instead of disk size
|
||||
\fB\-i\fR, \fB\-\-ignore\-hidden\fR
|
||||
Do not display hidden files
|
||||
.TP
|
||||
\fB\-v\fR, \fB\-\-invert\-filter\fR=\fIREGEX\fR
|
||||
\fB\-v\fR, \fB\-\-invert\-filter\fR \fI<REGEX>\fR
|
||||
Exclude filepaths matching this regex. To ignore png files type: \-v "\\.png$"
|
||||
.TP
|
||||
\fB\-e\fR, \fB\-\-filter\fR=\fIREGEX\fR
|
||||
\fB\-e\fR, \fB\-\-filter\fR \fI<REGEX>\fR
|
||||
Only include filepaths matching this regex. For png files type: \-e "\\.png$"
|
||||
.TP
|
||||
\fB\-t\fR, \fB\-\-file\-types\fR
|
||||
show only these file types
|
||||
.TP
|
||||
\fB\-w\fR, \fB\-\-terminal\-width\fR=\fIWIDTH\fR
|
||||
\fB\-w\fR, \fB\-\-terminal\-width\fR \fI<WIDTH>\fR
|
||||
Specify width of output overriding the auto detection of terminal width
|
||||
.TP
|
||||
\fB\-P\fR, \fB\-\-no\-progress\fR
|
||||
@@ -93,7 +93,7 @@ Only directories will be displayed
|
||||
\fB\-F\fR, \fB\-\-only\-file\fR
|
||||
Only files will be displayed. (Finds your largest files)
|
||||
.TP
|
||||
\fB\-o\fR, \fB\-\-output\-format\fR=\fIFORMAT\fR
|
||||
\fB\-o\fR, \fB\-\-output\-format\fR \fI<FORMAT>\fR
|
||||
Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size
|
||||
.br
|
||||
|
||||
@@ -122,28 +122,31 @@ gb: gigabyte (GB)
|
||||
tb: terabyte (TB)
|
||||
.RE
|
||||
.TP
|
||||
\fB\-S\fR, \fB\-\-stack\-size\fR=\fISTACK_SIZE\fR
|
||||
\fB\-S\fR, \fB\-\-stack\-size\fR \fI<STACK_SIZE>\fR
|
||||
Specify memory to use as stack size \- use if you see: \*(Aqfatal runtime error: stack overflow\*(Aq (default low memory=1048576, high memory=1073741824)
|
||||
.TP
|
||||
\fB\-j\fR, \fB\-\-output\-json\fR
|
||||
Output the directory tree as json to the current directory
|
||||
.TP
|
||||
\fB\-M\fR, \fB\-\-mtime\fR=\fIMTIME\fR
|
||||
\fB\-M\fR, \fB\-\-mtime\fR \fI<MTIME>\fR
|
||||
+/\-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr−(n+1)), n => [curr−(n+1), curr−n), and \-n => (𝑐𝑢𝑟𝑟−𝑛, +∞)
|
||||
.TP
|
||||
\fB\-A\fR, \fB\-\-atime\fR=\fIATIME\fR
|
||||
\fB\-A\fR, \fB\-\-atime\fR \fI<ATIME>\fR
|
||||
just like \-mtime, but based on file access time
|
||||
.TP
|
||||
\fB\-y\fR, \fB\-\-ctime\fR=\fICTIME\fR
|
||||
\fB\-y\fR, \fB\-\-ctime\fR \fI<CTIME>\fR
|
||||
just like \-mtime, but based on file change time
|
||||
.TP
|
||||
\fB\-\-files0\-from\fR=\fIFILES0_FROM\fR
|
||||
run dust on NUL\-terminated file names specified in file; if argument is \-, then read names from standard input
|
||||
\fB\-\-files0\-from\fR \fI<FILES0_FROM>\fR
|
||||
Read NUL\-terminated paths from FILE (use `\-` for stdin)
|
||||
.TP
|
||||
\fB\-\-collapse\fR=\fICOLLAPSE\fR
|
||||
\fB\-\-files\-from\fR \fI<FILES_FROM>\fR
|
||||
Read newline\-terminated paths from FILE (use `\-` for stdin)
|
||||
.TP
|
||||
\fB\-\-collapse\fR \fI<COLLAPSE>\fR
|
||||
Keep these directories collapsed
|
||||
.TP
|
||||
\fB\-m\fR, \fB\-\-filetime\fR=\fIFILETIME\fR
|
||||
\fB\-m\fR, \fB\-\-filetime\fR \fI<FILETIME>\fR
|
||||
Directory \*(Aqsize\*(Aq is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time
|
||||
.br
|
||||
|
||||
@@ -167,4 +170,4 @@ Print version
|
||||
[\fIPATH\fR]
|
||||
Input files or directories
|
||||
.SH VERSION
|
||||
v1.2.1
|
||||
v1.2.3
|
||||
|
||||
@@ -172,11 +172,14 @@ pub struct Cli {
|
||||
#[arg(short('y'), long, allow_hyphen_values(true))]
|
||||
pub ctime: Option<String>,
|
||||
|
||||
/// run dust on NUL-terminated file names specified in file; if argument is
|
||||
/// -, then read names from standard input
|
||||
#[arg(long, value_hint(ValueHint::AnyPath))]
|
||||
/// Read NUL-terminated paths from FILE (use `-` for stdin).
|
||||
#[arg(long, value_hint(ValueHint::AnyPath), conflicts_with("files_from"))]
|
||||
pub files0_from: Option<String>,
|
||||
|
||||
/// Read newline-terminated paths from FILE (use `-` for stdin).
|
||||
#[arg(long, value_hint(ValueHint::AnyPath), conflicts_with("files0_from"))]
|
||||
pub files_from: Option<String>,
|
||||
|
||||
/// Keep these directories collapsed
|
||||
#[arg(long, value_hint(ValueHint::AnyPath))]
|
||||
pub collapse: Option<Vec<String>>,
|
||||
|
||||
@@ -36,16 +36,26 @@ pub struct Config {
|
||||
pub output_json: Option<bool>,
|
||||
pub print_errors: Option<bool>,
|
||||
pub files0_from: Option<String>,
|
||||
pub number_of_lines: Option<usize>,
|
||||
pub files_from: Option<String>,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub fn get_files_from(&self, options: &Cli) -> Option<String> {
|
||||
pub fn get_files0_from(&self, options: &Cli) -> Option<String> {
|
||||
let from_file = &options.files0_from;
|
||||
match from_file {
|
||||
None => self.files0_from.as_ref().map(|x| x.to_string()),
|
||||
Some(x) => Some(x.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_files_from(&self, options: &Cli) -> Option<String> {
|
||||
let from_file = &options.files_from;
|
||||
match from_file {
|
||||
None => self.files_from.as_ref().map(|x| x.to_string()),
|
||||
Some(x) => Some(x.to_string()),
|
||||
}
|
||||
}
|
||||
pub fn get_no_colors(&self, options: &Cli) -> bool {
|
||||
Some(true) == self.no_colors || options.no_colors
|
||||
}
|
||||
@@ -147,6 +157,15 @@ impl Config {
|
||||
Some(true) == self.output_json || options.output_json
|
||||
}
|
||||
|
||||
pub fn get_number_of_lines(&self, options: &Cli) -> Option<usize> {
|
||||
let from_cmd_line = options.number_of_lines;
|
||||
if from_cmd_line.is_none() {
|
||||
self.number_of_lines
|
||||
} else {
|
||||
from_cmd_line
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_modified_time_operator(&self, options: &Cli) -> Option<(Operator, i64)> {
|
||||
get_filter_time_operator(options.mtime.as_ref(), get_current_date_epoch_seconds())
|
||||
}
|
||||
@@ -225,7 +244,7 @@ fn convert_min_size(input: &str) -> Option<usize> {
|
||||
}
|
||||
}
|
||||
|
||||
fn get_config_locations(base: &Path) -> Vec<PathBuf> {
|
||||
fn get_config_locations(base: PathBuf) -> Vec<PathBuf> {
|
||||
vec![
|
||||
base.join(".dust.toml"),
|
||||
base.join(".config").join("dust").join("config.toml"),
|
||||
@@ -248,12 +267,12 @@ pub fn get_config(conf_path: Option<&String>) -> Config {
|
||||
}
|
||||
}
|
||||
None => {
|
||||
if let Some(home) = directories::BaseDirs::new() {
|
||||
for path in get_config_locations(home.home_dir()) {
|
||||
if path.exists() {
|
||||
if let Ok(config) = Config::from_config_file(&path) {
|
||||
return config;
|
||||
}
|
||||
if let Some(home) = std::env::home_dir() {
|
||||
for path in get_config_locations(home) {
|
||||
if path.exists()
|
||||
&& let Ok(config) = Config::from_config_file(&path)
|
||||
{
|
||||
return config;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -380,4 +399,33 @@ mod tests {
|
||||
fn get_filetime_args(args: Vec<&str>) -> Cli {
|
||||
Cli::parse_from(args)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_number_of_lines() {
|
||||
// No config and no flag.
|
||||
let c = Config::default();
|
||||
let args = get_args(vec![]);
|
||||
assert_eq!(c.get_number_of_lines(&args), None);
|
||||
|
||||
// Config is not defined and flag is defined.
|
||||
let c = Config::default();
|
||||
let args = get_args(vec!["dust", "--number-of-lines", "5"]);
|
||||
assert_eq!(c.get_number_of_lines(&args), Some(5));
|
||||
|
||||
// Config is defined and flag is not defined.
|
||||
let c = Config {
|
||||
number_of_lines: Some(3),
|
||||
..Default::default()
|
||||
};
|
||||
let args = get_args(vec![]);
|
||||
assert_eq!(c.get_number_of_lines(&args), Some(3));
|
||||
|
||||
// Both config and flag are defined.
|
||||
let c = Config {
|
||||
number_of_lines: Some(3),
|
||||
..Default::default()
|
||||
};
|
||||
let args = get_args(vec!["dust", "--number-of-lines", "5"]);
|
||||
assert_eq!(c.get_number_of_lines(&args), Some(5));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -69,12 +69,11 @@ pub fn walk_it(dirs: HashSet<PathBuf>, walk_data: &WalkData) -> Vec<Node> {
|
||||
|
||||
// Remove files which have the same inode, we don't want to double count them.
|
||||
fn clean_inodes(x: Node, inodes: &mut HashSet<(u64, u64)>, walk_data: &WalkData) -> Option<Node> {
|
||||
if !walk_data.use_apparent_size {
|
||||
if let Some(id) = x.inode_device {
|
||||
if !inodes.insert(id) {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
if !walk_data.use_apparent_size
|
||||
&& let Some(id) = x.inode_device
|
||||
&& !inodes.insert(id)
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
||||
// Sort Nodes so iteration order is predictable
|
||||
@@ -156,10 +155,10 @@ fn ignore_file(entry: &DirEntry, walk_data: &WalkData) -> bool {
|
||||
|
||||
if !walk_data.allowed_filesystems.is_empty() {
|
||||
let size_inode_device = get_metadata(entry.path(), false, follow_links);
|
||||
if let Some((_size, Some((_id, dev)), _gunk)) = size_inode_device {
|
||||
if !walk_data.allowed_filesystems.contains(&dev) {
|
||||
return true;
|
||||
}
|
||||
if let Some((_size, Some((_id, dev)), _gunk)) = size_inode_device
|
||||
&& !walk_data.allowed_filesystems.contains(&dev)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
if walk_data.filter_accessed_time.is_some()
|
||||
@@ -167,20 +166,19 @@ fn ignore_file(entry: &DirEntry, walk_data: &WalkData) -> bool {
|
||||
|| walk_data.filter_changed_time.is_some()
|
||||
{
|
||||
let size_inode_device = get_metadata(entry.path(), false, follow_links);
|
||||
if let Some((_, _, (modified_time, accessed_time, changed_time))) = size_inode_device {
|
||||
if entry.path().is_file()
|
||||
&& [
|
||||
(&walk_data.filter_modified_time, modified_time),
|
||||
(&walk_data.filter_accessed_time, accessed_time),
|
||||
(&walk_data.filter_changed_time, changed_time),
|
||||
]
|
||||
.iter()
|
||||
.any(|(filter_time, actual_time)| {
|
||||
is_filtered_out_due_to_file_time(filter_time, *actual_time)
|
||||
})
|
||||
{
|
||||
return true;
|
||||
}
|
||||
if let Some((_, _, (modified_time, accessed_time, changed_time))) = size_inode_device
|
||||
&& entry.path().is_file()
|
||||
&& [
|
||||
(&walk_data.filter_modified_time, modified_time),
|
||||
(&walk_data.filter_accessed_time, accessed_time),
|
||||
(&walk_data.filter_changed_time, changed_time),
|
||||
]
|
||||
.iter()
|
||||
.any(|(filter_time, actual_time)| {
|
||||
is_filtered_out_due_to_file_time(filter_time, *actual_time)
|
||||
})
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -222,32 +220,30 @@ fn walk(dir: PathBuf, walk_data: &WalkData, depth: usize) -> Option<Node> {
|
||||
|
||||
// return walk(entry.path(), walk_data, depth)
|
||||
|
||||
if !ignore_file(entry, walk_data) {
|
||||
if let Ok(data) = entry.file_type() {
|
||||
if data.is_dir()
|
||||
|| (walk_data.follow_links && data.is_symlink())
|
||||
{
|
||||
return walk(entry.path(), walk_data, depth + 1);
|
||||
}
|
||||
|
||||
let node = build_node(
|
||||
entry.path(),
|
||||
vec![],
|
||||
data.is_symlink(),
|
||||
data.is_file(),
|
||||
depth,
|
||||
walk_data,
|
||||
);
|
||||
|
||||
prog_data.num_files.fetch_add(1, ORDERING);
|
||||
if let Some(ref file) = node {
|
||||
prog_data
|
||||
.total_file_size
|
||||
.fetch_add(file.size, ORDERING);
|
||||
}
|
||||
|
||||
return node;
|
||||
if !ignore_file(entry, walk_data)
|
||||
&& let Ok(data) = entry.file_type()
|
||||
{
|
||||
if data.is_dir()
|
||||
|| (walk_data.follow_links && data.is_symlink())
|
||||
{
|
||||
return walk(entry.path(), walk_data, depth + 1);
|
||||
}
|
||||
|
||||
let node = build_node(
|
||||
entry.path(),
|
||||
vec![],
|
||||
data.is_symlink(),
|
||||
data.is_file(),
|
||||
depth,
|
||||
walk_data,
|
||||
);
|
||||
|
||||
prog_data.num_files.fetch_add(1, ORDERING);
|
||||
if let Some(ref file) = node {
|
||||
prog_data.total_file_size.fetch_add(file.size, ORDERING);
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
}
|
||||
Err(ref failed) => {
|
||||
@@ -305,7 +301,9 @@ fn handle_error_and_retry(failed: &Error, dir: &Path, walk_data: &WalkData) -> b
|
||||
}
|
||||
std::io::ErrorKind::Interrupted => {
|
||||
editable_error.interrupted_error += 1;
|
||||
if editable_error.interrupted_error > 3 {
|
||||
// This does happen on some systems. It was set to 3 but sometimes dust runs would exceed this
|
||||
// However, if there is no limit this results in infinite retrys and dust never finishes
|
||||
if editable_error.interrupted_error > 999 {
|
||||
panic!("Multiple Interrupted Errors occurred while scanning filesystem. Aborting");
|
||||
} else {
|
||||
return true;
|
||||
|
||||
@@ -125,9 +125,9 @@ impl DrawData<'_> {
|
||||
|
||||
pub fn draw_it(
|
||||
idd: InitialDisplayData,
|
||||
root_node: &DisplayNode,
|
||||
no_percent_bars: bool,
|
||||
terminal_width: usize,
|
||||
root_node: &DisplayNode,
|
||||
skip_total: bool,
|
||||
) {
|
||||
let num_chars_needed_on_left_most = if idd.by_filecount {
|
||||
@@ -403,7 +403,7 @@ fn get_pretty_name(
|
||||
.ls_colors
|
||||
.style_for_path_with_metadata(&node.name, meta_result.as_ref().ok());
|
||||
let ansi_style = directory_color
|
||||
.map(Style::to_ansi_term_style)
|
||||
.map(Style::to_nu_ansi_term_style)
|
||||
.unwrap_or_default();
|
||||
let out = ansi_style.paint(name_and_padding);
|
||||
format!("{out}")
|
||||
@@ -439,6 +439,9 @@ pub fn get_number_format(output_str: &str) -> Option<(u64, char)> {
|
||||
}
|
||||
|
||||
pub fn human_readable_number(size: u64, output_str: &str) -> String {
|
||||
if output_str == "count" {
|
||||
return size.to_string();
|
||||
};
|
||||
match get_number_format(output_str) {
|
||||
Some((x, u)) => {
|
||||
format!("{}{}", (size / x), u)
|
||||
@@ -539,6 +542,13 @@ mod tests {
|
||||
assert_eq!(s, "short 3 4.0K 100%");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_machine_readable_filecount() {
|
||||
assert_eq!(human_readable_number(1, "count"), "1");
|
||||
assert_eq!(human_readable_number(1000, "count"), "1000");
|
||||
assert_eq!(human_readable_number(1024, "count"), "1024");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_human_readable_number() {
|
||||
assert_eq!(human_readable_number(1, ""), "1B");
|
||||
|
||||
@@ -25,16 +25,14 @@ pub fn get_biggest(
|
||||
display_data: AggregateData,
|
||||
by_filetime: &Option<FileTime>,
|
||||
keep_collapsed: HashSet<PathBuf>,
|
||||
) -> Option<DisplayNode> {
|
||||
if top_level_nodes.is_empty() {
|
||||
// perhaps change this, bring back Error object?
|
||||
return None;
|
||||
}
|
||||
) -> DisplayNode {
|
||||
let mut heap = BinaryHeap::new();
|
||||
let number_top_level_nodes = top_level_nodes.len();
|
||||
let root;
|
||||
|
||||
if number_top_level_nodes > 1 {
|
||||
if number_top_level_nodes == 0 {
|
||||
root = total_node_builder(0, vec![])
|
||||
} else if number_top_level_nodes > 1 {
|
||||
let size = if by_filetime.is_some() {
|
||||
top_level_nodes
|
||||
.iter()
|
||||
@@ -46,28 +44,24 @@ pub fn get_biggest(
|
||||
};
|
||||
|
||||
let nodes = handle_duplicate_top_level_names(top_level_nodes, display_data.short_paths);
|
||||
|
||||
root = Node {
|
||||
name: PathBuf::from("(total)"),
|
||||
size,
|
||||
children: nodes,
|
||||
inode_device: None,
|
||||
depth: 0,
|
||||
};
|
||||
|
||||
// Always include the base nodes if we add a 'parent' (total) node
|
||||
root = total_node_builder(size, nodes);
|
||||
heap = always_add_children(&display_data, &root, heap);
|
||||
} else {
|
||||
root = top_level_nodes.into_iter().next().unwrap();
|
||||
heap = add_children(&display_data, &root, heap);
|
||||
}
|
||||
|
||||
Some(fill_remaining_lines(
|
||||
heap,
|
||||
&root,
|
||||
display_data,
|
||||
keep_collapsed,
|
||||
))
|
||||
fill_remaining_lines(heap, &root, display_data, keep_collapsed)
|
||||
}
|
||||
|
||||
fn total_node_builder(size: u64, children: Vec<Node>) -> Node {
|
||||
Node {
|
||||
name: PathBuf::from("(total)"),
|
||||
size,
|
||||
children,
|
||||
inode_device: None,
|
||||
depth: 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fill_remaining_lines<'a>(
|
||||
|
||||
@@ -15,7 +15,7 @@ pub fn get_all_file_types(
|
||||
top_level_nodes: &[Node],
|
||||
n: usize,
|
||||
by_filetime: &Option<FileTime>,
|
||||
) -> Option<DisplayNode> {
|
||||
) -> DisplayNode {
|
||||
let ext_nodes = {
|
||||
let mut extension_cumulative_sizes = HashMap::new();
|
||||
build_by_all_file_types(top_level_nodes, &mut extension_cumulative_sizes);
|
||||
@@ -67,13 +67,11 @@ pub fn get_all_file_types(
|
||||
displayed.iter().map(|node| node.size).sum()
|
||||
};
|
||||
|
||||
let result = DisplayNode {
|
||||
DisplayNode {
|
||||
name: PathBuf::from("(total)"),
|
||||
size: actual_size,
|
||||
children: displayed,
|
||||
};
|
||||
|
||||
Some(result)
|
||||
}
|
||||
}
|
||||
|
||||
fn build_by_all_file_types<'a>(
|
||||
|
||||
304
src/main.rs
304
src/main.rs
@@ -11,6 +11,8 @@ mod progress;
|
||||
mod utils;
|
||||
|
||||
use crate::cli::Cli;
|
||||
use crate::config::Config;
|
||||
use crate::display_node::DisplayNode;
|
||||
use crate::progress::RuntimeErrors;
|
||||
use clap::Parser;
|
||||
use dir_walker::WalkData;
|
||||
@@ -20,13 +22,14 @@ use progress::PIndicator;
|
||||
use regex::Error;
|
||||
use std::collections::HashSet;
|
||||
use std::env;
|
||||
use std::fs::read_to_string;
|
||||
use std::fs::{read, read_to_string};
|
||||
use std::io;
|
||||
use std::io::Read;
|
||||
use std::panic;
|
||||
use std::process;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
use sysinfo::{System, SystemExt};
|
||||
use sysinfo::System;
|
||||
use utils::canonicalize_absolute_path;
|
||||
|
||||
use self::display::draw_it;
|
||||
@@ -125,34 +128,15 @@ fn main() {
|
||||
})
|
||||
.expect("Error setting Ctrl-C handler");
|
||||
|
||||
let target_dirs = match config.get_files_from(&options) {
|
||||
Some(path) => {
|
||||
if path == "-" {
|
||||
let mut targets_to_add = io::stdin()
|
||||
.lines()
|
||||
.map_while(Result::ok)
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
if targets_to_add.is_empty() {
|
||||
eprintln!("No input provided, defaulting to current directory");
|
||||
targets_to_add.push(".".to_owned());
|
||||
}
|
||||
targets_to_add
|
||||
} else {
|
||||
// read file
|
||||
match read_to_string(path) {
|
||||
Ok(file_content) => file_content.lines().map(|x| x.to_string()).collect(),
|
||||
Err(e) => {
|
||||
eprintln!("Error reading file: {e}");
|
||||
vec![".".to_owned()]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None => match options.params {
|
||||
let target_dirs = if let Some(path) = config.get_files0_from(&options) {
|
||||
read_paths_from_source(&path, true)
|
||||
} else if let Some(path) = config.get_files_from(&options) {
|
||||
read_paths_from_source(&path, false)
|
||||
} else {
|
||||
match options.params {
|
||||
Some(ref values) => values.clone(),
|
||||
None => vec![".".to_owned()],
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
let summarize_file_types = options.file_types;
|
||||
@@ -170,7 +154,7 @@ fn main() {
|
||||
// If depth is set, then we set the default number_of_lines to be max
|
||||
// instead of screen height
|
||||
|
||||
let number_of_lines = match options.number_of_lines {
|
||||
let number_of_lines = match config.get_number_of_lines(&options) {
|
||||
Some(val) => val,
|
||||
None => {
|
||||
if depth != usize::MAX {
|
||||
@@ -218,9 +202,12 @@ fn main() {
|
||||
let limit_filesystem = options.limit_filesystem;
|
||||
let follow_links = options.dereference_links;
|
||||
|
||||
let allowed_filesystems = limit_filesystem
|
||||
.then(|| get_filesystem_devices(&target_dirs, follow_links))
|
||||
.unwrap_or_default();
|
||||
let allowed_filesystems = if limit_filesystem {
|
||||
get_filesystem_devices(&target_dirs, follow_links)
|
||||
} else {
|
||||
Default::default()
|
||||
};
|
||||
|
||||
let simplified_dirs = simplify_dir_names(&target_dirs);
|
||||
|
||||
let ignored_full_path: HashSet<PathBuf> = ignore_directories
|
||||
@@ -270,32 +257,98 @@ fn main() {
|
||||
progress_data: indicator.data.clone(),
|
||||
errors: errors_for_rayon,
|
||||
};
|
||||
|
||||
let threads_to_use = config.get_threads(&options);
|
||||
let stack_size = config.get_custom_stack_size(&options);
|
||||
init_rayon(&stack_size, &threads_to_use);
|
||||
|
||||
let top_level_nodes = walk_it(simplified_dirs, &walk_data);
|
||||
init_rayon(&stack_size, &threads_to_use).install(|| {
|
||||
let top_level_nodes = walk_it(simplified_dirs, &walk_data);
|
||||
|
||||
let tree = match summarize_file_types {
|
||||
true => get_all_file_types(&top_level_nodes, number_of_lines, &by_filetime),
|
||||
false => {
|
||||
let agg_data = AggregateData {
|
||||
min_size: config.get_min_size(&options),
|
||||
only_dir: config.get_only_dir(&options),
|
||||
only_file: config.get_only_file(&options),
|
||||
number_of_lines,
|
||||
depth,
|
||||
using_a_filter: !filter_regexs.is_empty() || !invert_filter_regexs.is_empty(),
|
||||
short_paths: !config.get_full_paths(&options),
|
||||
};
|
||||
get_biggest(top_level_nodes, agg_data, &by_filetime, keep_collapsed)
|
||||
let tree = match summarize_file_types {
|
||||
true => get_all_file_types(&top_level_nodes, number_of_lines, walk_data.by_filetime),
|
||||
false => {
|
||||
let agg_data = AggregateData {
|
||||
min_size: config.get_min_size(&options),
|
||||
only_dir: config.get_only_dir(&options),
|
||||
only_file: config.get_only_file(&options),
|
||||
number_of_lines,
|
||||
depth,
|
||||
using_a_filter: !filter_regexs.is_empty() || !invert_filter_regexs.is_empty(),
|
||||
short_paths: !config.get_full_paths(&options),
|
||||
};
|
||||
get_biggest(
|
||||
top_level_nodes,
|
||||
agg_data,
|
||||
walk_data.by_filetime,
|
||||
keep_collapsed,
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
// Must have stopped indicator before we print to stderr
|
||||
indicator.stop();
|
||||
|
||||
let print_errors = config.get_print_errors(&options);
|
||||
let final_errors = walk_data.errors.lock().unwrap();
|
||||
print_any_errors(print_errors, &final_errors);
|
||||
|
||||
if tree.children.is_empty() && !final_errors.file_not_found.is_empty() {
|
||||
std::process::exit(1)
|
||||
} else {
|
||||
print_output(
|
||||
config,
|
||||
options,
|
||||
tree,
|
||||
walk_data.by_filecount,
|
||||
is_colors,
|
||||
terminal_width,
|
||||
)
|
||||
}
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
// Must have stopped indicator before we print to stderr
|
||||
indicator.stop();
|
||||
fn print_output(
|
||||
config: Config,
|
||||
options: Cli,
|
||||
tree: DisplayNode,
|
||||
by_filecount: bool,
|
||||
is_colors: bool,
|
||||
terminal_width: usize,
|
||||
) {
|
||||
let output_format = config.get_output_format(&options);
|
||||
|
||||
let final_errors = walk_data.errors.lock().unwrap();
|
||||
if config.get_output_json(&options) {
|
||||
OUTPUT_TYPE.with(|wrapped| {
|
||||
if by_filecount {
|
||||
wrapped.replace("count".to_string());
|
||||
} else {
|
||||
wrapped.replace(output_format);
|
||||
}
|
||||
});
|
||||
println!("{}", serde_json::to_string(&tree).unwrap());
|
||||
} else {
|
||||
let idd = InitialDisplayData {
|
||||
short_paths: !config.get_full_paths(&options),
|
||||
is_reversed: !config.get_reverse(&options),
|
||||
colors_on: is_colors,
|
||||
by_filecount,
|
||||
by_filetime: config.get_filetime(&options),
|
||||
is_screen_reader: config.get_screen_reader(&options),
|
||||
output_format,
|
||||
bars_on_right: config.get_bars_on_right(&options),
|
||||
};
|
||||
|
||||
draw_it(
|
||||
idd,
|
||||
&tree,
|
||||
config.get_no_bars(&options),
|
||||
terminal_width,
|
||||
config.get_skip_total(&options),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn print_any_errors(print_errors: bool, final_errors: &RuntimeErrors) {
|
||||
if !final_errors.file_not_found.is_empty() {
|
||||
let err = final_errors
|
||||
.file_not_found
|
||||
@@ -303,17 +356,17 @@ fn main() {
|
||||
.map(|a| a.as_ref())
|
||||
.collect::<Vec<&str>>()
|
||||
.join(", ");
|
||||
eprintln!("No such file or directory: {}", err);
|
||||
eprintln!("No such file or directory: {err}");
|
||||
}
|
||||
if !final_errors.no_permissions.is_empty() {
|
||||
if config.get_print_errors(&options) {
|
||||
if print_errors {
|
||||
let err = final_errors
|
||||
.no_permissions
|
||||
.iter()
|
||||
.map(|a| a.as_ref())
|
||||
.collect::<Vec<&str>>()
|
||||
.join(", ");
|
||||
eprintln!("Did not have permissions for directories: {}", err);
|
||||
eprintln!("Did not have permissions for directories: {err}");
|
||||
} else {
|
||||
eprintln!(
|
||||
"Did not have permissions for all directories (add --print-errors to see errors)"
|
||||
@@ -327,76 +380,105 @@ fn main() {
|
||||
.map(|a| a.as_ref())
|
||||
.collect::<Vec<&str>>()
|
||||
.join(", ");
|
||||
eprintln!("Unknown Error: {}", err);
|
||||
eprintln!("Unknown Error: {err}");
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(root_node) = tree {
|
||||
if config.get_output_json(&options) {
|
||||
OUTPUT_TYPE.with(|wrapped| {
|
||||
wrapped.replace(output_format);
|
||||
});
|
||||
println!("{}", serde_json::to_string(&root_node).unwrap());
|
||||
fn read_paths_from_source(path: &str, null_terminated: bool) -> Vec<String> {
|
||||
let from_stdin = path == "-";
|
||||
|
||||
let result: Result<Vec<String>, Option<String>> = (|| {
|
||||
// 1) read bytes
|
||||
let bytes = if from_stdin {
|
||||
let mut b = Vec::new();
|
||||
io::stdin().lock().read_to_end(&mut b).map_err(|_| None)?;
|
||||
b
|
||||
} else {
|
||||
let idd = InitialDisplayData {
|
||||
short_paths: !config.get_full_paths(&options),
|
||||
is_reversed: !config.get_reverse(&options),
|
||||
colors_on: is_colors,
|
||||
by_filecount,
|
||||
by_filetime,
|
||||
is_screen_reader: config.get_screen_reader(&options),
|
||||
output_format,
|
||||
bars_on_right: config.get_bars_on_right(&options),
|
||||
};
|
||||
read(path).map_err(|e| Some(e.to_string()))?
|
||||
};
|
||||
|
||||
draw_it(
|
||||
idd,
|
||||
config.get_no_bars(&options),
|
||||
terminal_width,
|
||||
&root_node,
|
||||
config.get_skip_total(&options),
|
||||
)
|
||||
let text = std::str::from_utf8(&bytes).map_err(|e| {
|
||||
if from_stdin {
|
||||
None
|
||||
} else {
|
||||
Some(e.to_string())
|
||||
}
|
||||
})?;
|
||||
let items: Vec<String> = if null_terminated {
|
||||
text.split('\0')
|
||||
.filter(|s| !s.is_empty())
|
||||
.map(str::to_owned)
|
||||
.collect()
|
||||
} else {
|
||||
text.lines().map(str::to_owned).collect()
|
||||
};
|
||||
if from_stdin && items.is_empty() {
|
||||
return Err(None);
|
||||
}
|
||||
Ok(items)
|
||||
})();
|
||||
|
||||
match result {
|
||||
Ok(v) => v,
|
||||
Err(None) => {
|
||||
eprintln!("No files provided, defaulting to current directory");
|
||||
vec![".".to_owned()]
|
||||
}
|
||||
Err(Some(msg)) => {
|
||||
eprintln!("Failed to read file: {msg}");
|
||||
vec![".".to_owned()]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn init_rayon(stack_size: &Option<usize>, threads: &Option<usize>) {
|
||||
// Rayon seems to raise this error on 32-bit builds
|
||||
// The global thread pool has not been initialized.: ThreadPoolBuildError { kind: GlobalPoolAlreadyInitialized }
|
||||
if cfg!(target_pointer_width = "64") {
|
||||
let result = panic::catch_unwind(|| build_thread_pool(*stack_size, *threads));
|
||||
if result.is_err() {
|
||||
eprintln!("Problem initializing rayon, try: export RAYON_NUM_THREADS=1")
|
||||
fn init_rayon(stack: &Option<usize>, threads: &Option<usize>) -> rayon::ThreadPool {
|
||||
let stack_size = match stack {
|
||||
Some(s) => Some(*s),
|
||||
None => {
|
||||
// Do not increase the stack size on a 32 bit system, it will fail
|
||||
if cfg!(target_pointer_width = "32") {
|
||||
None
|
||||
} else {
|
||||
let large_stack = usize::pow(1024, 3);
|
||||
let mut sys = System::new_all();
|
||||
sys.refresh_memory();
|
||||
// Larger stack size if possible to handle cases with lots of nested directories
|
||||
let available = sys.available_memory();
|
||||
if available > (large_stack * threads.unwrap_or(1)).try_into().unwrap() {
|
||||
Some(large_stack)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
match build_thread_pool(stack_size, threads) {
|
||||
Ok(pool) => pool,
|
||||
Err(err) => {
|
||||
eprintln!("Problem initializing rayon, try: export RAYON_NUM_THREADS=1");
|
||||
if stack.is_none() && stack_size.is_some() {
|
||||
// stack parameter was none, try with default stack size
|
||||
if let Ok(pool) = build_thread_pool(None, threads) {
|
||||
eprintln!("WARNING: not using large stack size, got error: {err}");
|
||||
return pool;
|
||||
}
|
||||
}
|
||||
panic!("{err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn build_thread_pool(
|
||||
stack: Option<usize>,
|
||||
threads: Option<usize>,
|
||||
) -> Result<(), rayon::ThreadPoolBuildError> {
|
||||
let mut pool = rayon::ThreadPoolBuilder::new();
|
||||
|
||||
if let Some(thread_count) = threads {
|
||||
pool = pool.num_threads(thread_count);
|
||||
}
|
||||
|
||||
let stack_size = match stack {
|
||||
Some(s) => Some(s),
|
||||
None => {
|
||||
let large_stack = usize::pow(1024, 3);
|
||||
let mut s = System::new();
|
||||
s.refresh_memory();
|
||||
// Larger stack size if possible to handle cases with lots of nested directories
|
||||
let available = s.available_memory();
|
||||
if available > large_stack.try_into().unwrap() {
|
||||
Some(large_stack)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
};
|
||||
stack_size: Option<usize>,
|
||||
threads: &Option<usize>,
|
||||
) -> Result<rayon::ThreadPool, rayon::ThreadPoolBuildError> {
|
||||
let mut pool_builder = rayon::ThreadPoolBuilder::new();
|
||||
if let Some(stack_size_param) = stack_size {
|
||||
pool = pool.stack_size(stack_size_param);
|
||||
pool_builder = pool_builder.stack_size(stack_size_param);
|
||||
}
|
||||
pool.build_global()
|
||||
if let Some(thread_count) = threads {
|
||||
pool_builder = pool_builder.num_threads(*thread_count);
|
||||
}
|
||||
pool_builder.build()
|
||||
}
|
||||
|
||||
0
tests/test_dir_files_from/a_file
Normal file
0
tests/test_dir_files_from/a_file
Normal file
BIN
tests/test_dir_files_from/files0_from.txt
Normal file
BIN
tests/test_dir_files_from/files0_from.txt
Normal file
Binary file not shown.
2
tests/test_dir_files_from/files_from.txt
Normal file
2
tests/test_dir_files_from/files_from.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
tests/test_dir_files_from/a_file
|
||||
tests/test_dir_files_from/hello_file
|
||||
1
tests/test_dir_files_from/hello_file
Normal file
1
tests/test_dir_files_from/hello_file
Normal file
@@ -0,0 +1 @@
|
||||
hello
|
||||
@@ -62,6 +62,8 @@ fn initialize() {
|
||||
fn run_cmd<T: AsRef<OsStr>>(command_args: &[T]) -> Output {
|
||||
initialize();
|
||||
let mut to_run = &mut Command::cargo_bin("dust").unwrap();
|
||||
// Hide progress bar
|
||||
to_run.arg("-P");
|
||||
for p in command_args {
|
||||
to_run = to_run.arg(p);
|
||||
}
|
||||
|
||||
@@ -62,6 +62,14 @@ pub fn test_d_flag_works() {
|
||||
assert!(!output.contains("hello_file"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_d0_works_on_multiple() {
|
||||
// We should see the top level directory but not the sub dirs / files:
|
||||
let output = build_command(vec!["-d", "0", "tests/test_dir/", "tests/test_dir2"]);
|
||||
assert!(output.contains("test_dir "));
|
||||
assert!(output.contains("test_dir2"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_threads_flag_works() {
|
||||
let output = build_command(vec!["-T", "1", "tests/test_dir/"]);
|
||||
@@ -96,10 +104,60 @@ pub fn test_ignore_all_in_file() {
|
||||
assert!(!output.contains(".secret"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_files_from_flag_file() {
|
||||
let output = build_command(vec![
|
||||
"--files-from",
|
||||
"tests/test_dir_files_from/files_from.txt",
|
||||
]);
|
||||
assert!(output.contains("a_file"));
|
||||
assert!(output.contains("hello_file"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_files0_from_flag_file() {
|
||||
let output = build_command(vec![
|
||||
"--files0-from",
|
||||
"tests/test_dir_files_from/files0_from.txt",
|
||||
]);
|
||||
assert!(output.contains("a_file"));
|
||||
assert!(output.contains("hello_file"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_files_from_flag_stdin() {
|
||||
let mut cmd = Command::cargo_bin("dust").unwrap();
|
||||
cmd.arg("-P").arg("--files-from").arg("-");
|
||||
let input = b"tests/test_dir_files_from/a_file\ntests/test_dir_files_from/hello_file\n";
|
||||
cmd.write_stdin(input.as_ref());
|
||||
let finished = &cmd.unwrap();
|
||||
let stderr = std::str::from_utf8(&finished.stderr).unwrap();
|
||||
assert_eq!(stderr, "");
|
||||
let output = std::str::from_utf8(&finished.stdout).unwrap();
|
||||
assert!(output.contains("a_file"));
|
||||
assert!(output.contains("hello_file"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_files0_from_flag_stdin() {
|
||||
let mut cmd = Command::cargo_bin("dust").unwrap();
|
||||
cmd.arg("-P").arg("--files0-from").arg("-");
|
||||
let input = b"tests/test_dir_files_from/a_file\0tests/test_dir_files_from/hello_file\0";
|
||||
cmd.write_stdin(input.as_ref());
|
||||
let finished = &cmd.unwrap();
|
||||
let stderr = std::str::from_utf8(&finished.stderr).unwrap();
|
||||
assert_eq!(stderr, "");
|
||||
let output = std::str::from_utf8(&finished.stdout).unwrap();
|
||||
assert!(output.contains("a_file"));
|
||||
assert!(output.contains("hello_file"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_with_bad_param() {
|
||||
let mut cmd = Command::cargo_bin("dust").unwrap();
|
||||
let result = cmd.arg("bad_place").unwrap();
|
||||
cmd.arg("-P").arg("bad_place");
|
||||
let output_error = cmd.unwrap_err();
|
||||
let result = output_error.as_output().unwrap();
|
||||
let stderr = str::from_utf8(&result.stderr).unwrap();
|
||||
assert!(stderr.contains("No such file or directory"));
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user