Compare commits

..

35 Commits

Author SHA1 Message Date
andy.boot
759658ee96 feat: Handle duplicate dir names better
If we run `dust /usr/*/Trash`
We see several 'Trash' directories in the output but do not know which
user they belong to.

This fix means if we see duplicate names in a directory we will display
the parent directory name as well
2025-02-07 20:15:11 +00:00
andy.boot
a962b80eec deps: cargo update 2025-02-06 00:30:39 +00:00
andy.boot
01c0aaeade feat: New --collapse flag
--collapse will keep that directory collapsed and will not expand it.
2025-01-27 22:00:08 +00:00
andy.boot
6cbd736e11 fix: Bars in --skip-total flag
Before we calculated the % by taking the longest bar. If you use
--skip-total the longest bar is not the total. We need to sum up all the
children of root to work out what the largest size is.
2025-01-26 11:22:37 +00:00
andy.boot
8e087e09da fix: Handle Interrupted Error
Rust may throw Interrupted errors while scanning filesystem. These may
be retried:
https://doc.rust-lang.org/std/io/enum.ErrorKind.html#variant.Interrupted
2025-01-26 11:22:04 +00:00
andy.boot
9ba0b6d1d0 feat: Support -o flag for JSON output
requested in: https://github.com/bootandy/dust/issues/450
2025-01-17 20:48:56 +00:00
andy.boot
775d841840 style: clippy 2025-01-15 21:37:29 +00:00
n4n5
609fc1e760 clippy 2025-01-15 19:14:00 +00:00
n4n5
eeb686562d push config option 2025-01-15 19:14:00 +00:00
n4n5
e0eaeccc0b add wget install 2025-01-15 19:08:18 +00:00
n4n5
2e56a261e0 clippy 2025-01-15 19:08:18 +00:00
n4n5
bfe7323b20 fix typo Operator 2025-01-15 19:08:18 +00:00
janbridley
1372815007 Format src/display.rs 2024-11-08 22:50:03 +00:00
janbridley
7c9e2f1833 Enable pretty format for petabyte data 2024-11-08 22:50:03 +00:00
Camille Louédoc-Eyriès
1d40ca0870 docs(readme): warn about snap-dust limitations 2024-10-17 23:03:35 +01:00
Yukai Chou
86b2bd944c refactor: simplify get_height_of_terminal() and get_width... 2024-09-16 22:01:10 +01:00
andy.boot
b63608604a docs: Update README.md 2024-09-03 21:57:36 +01:00
andy.boot
b24fab720d deps: cargo update 2024-09-03 21:54:20 +01:00
wugeer
d81b9065a1 fix: man-page and completions missing in debian package 2024-09-03 21:49:23 +01:00
andy.boot
38c4d23732 docs: Update README.md
include snap
2024-08-21 18:53:55 +01:00
wugeer
99bf0fc041 docs: update sample config.toml 2024-08-21 18:47:15 +01:00
wugeer
75d0566949 feat: use pre-commit hooks to standardize commit messages 2024-08-21 18:46:20 +01:00
NoisyCoil
489d9ada44 fix: 64-bit atomics for platforms with no 64-bit atomics
Closes: #423
2024-08-09 19:28:20 +01:00
wugeer
f48fcc790a feat: support Dust tree by age 2024-08-07 19:31:22 +01:00
wugeer
733117d0f6 fix: retrieve metadata for symbolic links without following them
Previously, the function get_metadata in platform.rs used `fs::metadata` which follows symbolic links
and returns metadata for the target file. This caused issues #421: du / dust disagreement
when trying to determine properties of the symbolic link itself
2024-07-31 00:16:59 +01:00
andy.boot
dbd18f90e7 docs: update release procedure
Previously I accidentally tagged a release before building it leading
to an out of date Cargo.lock file.
2024-07-17 19:35:44 +01:00
andy.boot
dad88ad660 version: increment version 2024-07-17 19:35:44 +01:00
andy.boot
00a7c410a0 ci: fix warning in windows build 2024-07-17 19:35:44 +01:00
andy.boot
1ab0b2f531 refactor: rename variable 2024-07-17 19:35:44 +01:00
andy.boot
c09073151d fix: perf issues with v1.1.0
Bring performance back
2024-07-17 19:16:46 +01:00
andy.boot
b4a517a096 version: update version
Also update docs, so I don't partially update a version number again
2024-07-16 22:48:27 +01:00
andy.boot
e654d30f9d version: increment version 2024-07-16 22:05:17 +01:00
Maksim Bondarenkov
4fc1897678 deps: update winapi-util to 0.1.8 (#410) 2024-07-03 21:41:52 +01:00
andy.boot
08b9c756ee fix: total_ordering of sort_by_inode
Before sort_by_inode could result in unstable ordering. This change
ensures we will always have a reliable total ordering
2024-06-30 21:57:58 +01:00
n4n5
394231683d feat: Add flag to read from file/stdin (#405)
* from standard

* improve

* happy clippy

* explicit arg option

* fix problem
2024-06-27 23:51:47 +01:00
29 changed files with 1170 additions and 455 deletions

View File

@@ -45,6 +45,11 @@ jobs:
override: true
profile: minimal # minimal component installation (ie, no documentation)
components: rustfmt, clippy
- name: Install wget for Windows
if: matrix.job.os == 'windows-latest'
run: choco install wget --no-progress
- name: typos-action
uses: crate-ci/typos@v1.28.4
- name: "`fmt` testing"
if: steps.vars.outputs.JOB_DO_FORMAT_TESTING
uses: actions-rs/cargo@v1

11
.pre-commit-config.yaml Normal file
View File

@@ -0,0 +1,11 @@
repos:
- repo: https://github.com/doublify/pre-commit-rust
rev: v1.0
hooks:
- id: cargo-check
stages: [commit]
- id: fmt
stages: [commit]
- id: clippy
args: [--all-targets, --all-features]
stages: [commit]

462
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,7 @@
[package]
name = "du-dust"
description = "A more intuitive version of du"
version = "1.0.0"
version = "1.1.1"
authors = ["bootandy <bootandy@gmail.com>", "nebkor <code@ardent.nebcorp.com>"]
edition = "2021"
readme = "README.md"
@@ -44,6 +44,9 @@ sysinfo = "0.27"
ctrlc = "3.4"
chrono = "0.4"
[target.'cfg(not(target_has_atomic = "64"))'.dependencies]
portable-atomic = "1.4"
[target.'cfg(windows)'.dependencies]
winapi-util = "0.1"
filesize = "0.2.0"
@@ -83,6 +86,16 @@ assets = [
"usr/share/doc/du-dust/README",
"644",
],
[
"man-page/dust.1",
"usr/share/man/man1/dust.1",
"644",
],
[
"completions/dust.bash",
"usr/share/bash-completion/completions/dust",
"644",
],
]
extended-description = """\
Dust is meant to give you an instant overview of which directories are using

View File

@@ -27,11 +27,17 @@ Because I want an easy way to see where my disk is being used.
- `brew install dust`
#### [Snap](https://ubuntu.com/core/services/guide/snaps-intro) Ubuntu and [supported systems](https://snapcraft.io/docs/installing-snapd)
- `snap install dust`
Note: `dust` installed through `snap` can only access files stored in the `/home` directory. See daniejstriata/dust-snap#2 for more information.
#### [Pacstall](https://github.com/pacstall/pacstall) (Debian/Ubuntu)
- `pacstall -I dust-bin`
### Anaconda (conda-forge)
#### Anaconda (conda-forge)
- `conda install -c conda-forge dust`
@@ -39,6 +45,10 @@ Because I want an easy way to see where my disk is being used.
- `deb-get install du-dust`
#### [x-cmd](https://www.x-cmd.com/pkg/#VPContent)
- `x env use dust`
#### Windows:
- `scoop install dust`
@@ -92,6 +102,8 @@ Usage: dust -S (Custom Stack size - Use if you see: 'fatal runtime error: stack
Usage: dust --skip-total (No total row will be displayed)
Usage: dust -z 40000/30MB/20kib (Exclude output files/directories below size 40000 bytes / 30MB / 20KiB)
Usage: dust -j (Prints JSON representation of directories, try: dust -j | jq)
Usage: dust --files0-from=FILE (Reads null-terminated file paths from FILE); If FILE is - then read from stdin
Usage: dust --collapse=node-modules will keep the node-modules folder collapsed in display instead of recursively opening it
```
## Config file

View File

@@ -1,14 +1,21 @@
# ----------- To do a release ---------
# Compare times of runs to check no drastic slow down:
# time target/release/dust ~/dev
# time dust ~dev
# edit version in cargo.toml
# ----------- Pre release ---------
# Compare times of runs to check no drastic slow down:
# hyperfine 'target/release/dust /home/andy'
# hyperfine 'dust /home/andy'
# ----------- Release ---------
# inc version in cargo.toml
# cargo build --release
# commit changed files
# merge to master in github
# tag a commit and push (increment version in Cargo.toml first):
# git tag v0.4.5
# git push origin v0.4.5
# cargo publish to put it in crates.io
# To install locally [Do before pushing it]
# Optional: To install locally
#cargo install --path .

View File

@@ -14,35 +14,40 @@ _dust() {
fi
local context curcontext="$curcontext" state line
_arguments "${_arguments_options[@]}" \
'-d+[Depth to show]:DEPTH: ' \
'--depth=[Depth to show]:DEPTH: ' \
'-T+[Number of threads to use]: : ' \
'--threads=[Number of threads to use]: : ' \
'-n+[Number of lines of output to show. (Default is terminal_height - 10)]:NUMBER: ' \
'--number-of-lines=[Number of lines of output to show. (Default is terminal_height - 10)]:NUMBER: ' \
'*-X+[Exclude any file or directory with this name]:PATH:_files' \
'*--ignore-directory=[Exclude any file or directory with this name]:PATH:_files' \
_arguments "${_arguments_options[@]}" : \
'-d+[Depth to show]:DEPTH:_default' \
'--depth=[Depth to show]:DEPTH:_default' \
'-T+[Number of threads to use]: :_default' \
'--threads=[Number of threads to use]: :_default' \
'--config=[Specify a config file to use]:FILE:_files' \
'-n+[Number of lines of output to show. (Default is terminal_height - 10)]:NUMBER:_default' \
'--number-of-lines=[Number of lines of output to show. (Default is terminal_height - 10)]:NUMBER:_default' \
'*-X+[Exclude any file or directory with this path]:PATH:_files' \
'*--ignore-directory=[Exclude any file or directory with this path]:PATH:_files' \
'-I+[Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter]:FILE:_files' \
'--ignore-all-in-file=[Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter]:FILE:_files' \
'-z+[Minimum size file to include in output]:MIN_SIZE: ' \
'--min-size=[Minimum size file to include in output]:MIN_SIZE: ' \
'(-e --filter -t --file_types)*-v+[Exclude filepaths matching this regex. To ignore png files type\: -v "\\.png\$" ]:REGEX: ' \
'(-e --filter -t --file_types)*--invert-filter=[Exclude filepaths matching this regex. To ignore png files type\: -v "\\.png\$" ]:REGEX: ' \
'(-t --file_types)*-e+[Only include filepaths matching this regex. For png files type\: -e "\\.png\$" ]:REGEX: ' \
'(-t --file_types)*--filter=[Only include filepaths matching this regex. For png files type\: -e "\\.png\$" ]:REGEX: ' \
'-w+[Specify width of output overriding the auto detection of terminal width]:WIDTH: ' \
'--terminal_width=[Specify width of output overriding the auto detection of terminal width]:WIDTH: ' \
'-z+[Minimum size file to include in output]:MIN_SIZE:_default' \
'--min-size=[Minimum size file to include in output]:MIN_SIZE:_default' \
'(-e --filter -t --file_types)*-v+[Exclude filepaths matching this regex. To ignore png files type\: -v "\\.png\$" ]:REGEX:_default' \
'(-e --filter -t --file_types)*--invert-filter=[Exclude filepaths matching this regex. To ignore png files type\: -v "\\.png\$" ]:REGEX:_default' \
'(-t --file_types)*-e+[Only include filepaths matching this regex. For png files type\: -e "\\.png\$" ]:REGEX:_default' \
'(-t --file_types)*--filter=[Only include filepaths matching this regex. For png files type\: -e "\\.png\$" ]:REGEX:_default' \
'-w+[Specify width of output overriding the auto detection of terminal width]:WIDTH:_default' \
'--terminal_width=[Specify width of output overriding the auto detection of terminal width]:WIDTH:_default' \
'-o+[Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.]:FORMAT:(si b k m g t kb mb gb tb)' \
'--output-format=[Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.]:FORMAT:(si b k m g t kb mb gb tb)' \
'-S+[Specify memory to use as stack size - use if you see\: '\''fatal runtime error\: stack overflow'\'' (default low memory=1048576, high memory=1073741824)]:STACK_SIZE: ' \
'--stack-size=[Specify memory to use as stack size - use if you see\: '\''fatal runtime error\: stack overflow'\'' (default low memory=1048576, high memory=1073741824)]:STACK_SIZE: ' \
'-M+[+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => \[curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)]: : ' \
'--mtime=[+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => \[curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)]: : ' \
'-A+[just like -mtime, but based on file access time]: : ' \
'--atime=[just like -mtime, but based on file access time]: : ' \
'-y+[just like -mtime, but based on file change time]: : ' \
'--ctime=[just like -mtime, but based on file change time]: : ' \
'-S+[Specify memory to use as stack size - use if you see\: '\''fatal runtime error\: stack overflow'\'' (default low memory=1048576, high memory=1073741824)]:STACK_SIZE:_default' \
'--stack-size=[Specify memory to use as stack size - use if you see\: '\''fatal runtime error\: stack overflow'\'' (default low memory=1048576, high memory=1073741824)]:STACK_SIZE:_default' \
'-M+[+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => \[curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)]: :_default' \
'--mtime=[+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => \[curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)]: :_default' \
'-A+[just like -mtime, but based on file access time]: :_default' \
'--atime=[just like -mtime, but based on file access time]: :_default' \
'-y+[just like -mtime, but based on file change time]: :_default' \
'--ctime=[just like -mtime, but based on file change time]: :_default' \
'--files0-from=[run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input]: :_files' \
'*--collapse=[Keep these directories collapsed]: :_files' \
'-m+[Directory '\''size'\'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time]: :(a c m)' \
'--filetime=[Directory '\''size'\'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time]: :(a c m)' \
'-p[Subdirectories will not have their path shortened]' \
'--full-paths[Subdirectories will not have their path shortened]' \
'-L[dereference sym links - Treat sym links as directories and go into them]' \

View File

@@ -21,74 +21,79 @@ Register-ArgumentCompleter -Native -CommandName 'dust' -ScriptBlock {
$completions = @(switch ($command) {
'dust' {
[CompletionResult]::new('-d', 'd', [CompletionResultType]::ParameterName, 'Depth to show')
[CompletionResult]::new('--depth', 'depth', [CompletionResultType]::ParameterName, 'Depth to show')
[CompletionResult]::new('-T', 'T ', [CompletionResultType]::ParameterName, 'Number of threads to use')
[CompletionResult]::new('--threads', 'threads', [CompletionResultType]::ParameterName, 'Number of threads to use')
[CompletionResult]::new('-n', 'n', [CompletionResultType]::ParameterName, 'Number of lines of output to show. (Default is terminal_height - 10)')
[CompletionResult]::new('--number-of-lines', 'number-of-lines', [CompletionResultType]::ParameterName, 'Number of lines of output to show. (Default is terminal_height - 10)')
[CompletionResult]::new('-X', 'X ', [CompletionResultType]::ParameterName, 'Exclude any file or directory with this name')
[CompletionResult]::new('--ignore-directory', 'ignore-directory', [CompletionResultType]::ParameterName, 'Exclude any file or directory with this name')
[CompletionResult]::new('-I', 'I ', [CompletionResultType]::ParameterName, 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter')
[CompletionResult]::new('--ignore-all-in-file', 'ignore-all-in-file', [CompletionResultType]::ParameterName, 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter')
[CompletionResult]::new('-z', 'z', [CompletionResultType]::ParameterName, 'Minimum size file to include in output')
[CompletionResult]::new('--min-size', 'min-size', [CompletionResultType]::ParameterName, 'Minimum size file to include in output')
[CompletionResult]::new('-v', 'v', [CompletionResultType]::ParameterName, 'Exclude filepaths matching this regex. To ignore png files type: -v "\.png$" ')
[CompletionResult]::new('--invert-filter', 'invert-filter', [CompletionResultType]::ParameterName, 'Exclude filepaths matching this regex. To ignore png files type: -v "\.png$" ')
[CompletionResult]::new('-e', 'e', [CompletionResultType]::ParameterName, 'Only include filepaths matching this regex. For png files type: -e "\.png$" ')
[CompletionResult]::new('--filter', 'filter', [CompletionResultType]::ParameterName, 'Only include filepaths matching this regex. For png files type: -e "\.png$" ')
[CompletionResult]::new('-w', 'w', [CompletionResultType]::ParameterName, 'Specify width of output overriding the auto detection of terminal width')
[CompletionResult]::new('--terminal_width', 'terminal_width', [CompletionResultType]::ParameterName, 'Specify width of output overriding the auto detection of terminal width')
[CompletionResult]::new('-o', 'o', [CompletionResultType]::ParameterName, 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.')
[CompletionResult]::new('--output-format', 'output-format', [CompletionResultType]::ParameterName, 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.')
[CompletionResult]::new('-S', 'S ', [CompletionResultType]::ParameterName, 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)')
[CompletionResult]::new('--stack-size', 'stack-size', [CompletionResultType]::ParameterName, 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)')
[CompletionResult]::new('-M', 'M ', [CompletionResultType]::ParameterName, '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)')
[CompletionResult]::new('--mtime', 'mtime', [CompletionResultType]::ParameterName, '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)')
[CompletionResult]::new('-A', 'A ', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file access time')
[CompletionResult]::new('--atime', 'atime', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file access time')
[CompletionResult]::new('-y', 'y', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file change time')
[CompletionResult]::new('--ctime', 'ctime', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file change time')
[CompletionResult]::new('-p', 'p', [CompletionResultType]::ParameterName, 'Subdirectories will not have their path shortened')
[CompletionResult]::new('--full-paths', 'full-paths', [CompletionResultType]::ParameterName, 'Subdirectories will not have their path shortened')
[CompletionResult]::new('-L', 'L ', [CompletionResultType]::ParameterName, 'dereference sym links - Treat sym links as directories and go into them')
[CompletionResult]::new('--dereference-links', 'dereference-links', [CompletionResultType]::ParameterName, 'dereference sym links - Treat sym links as directories and go into them')
[CompletionResult]::new('-x', 'x', [CompletionResultType]::ParameterName, 'Only count the files and directories on the same filesystem as the supplied directory')
[CompletionResult]::new('--limit-filesystem', 'limit-filesystem', [CompletionResultType]::ParameterName, 'Only count the files and directories on the same filesystem as the supplied directory')
[CompletionResult]::new('-s', 's', [CompletionResultType]::ParameterName, 'Use file length instead of blocks')
[CompletionResult]::new('--apparent-size', 'apparent-size', [CompletionResultType]::ParameterName, 'Use file length instead of blocks')
[CompletionResult]::new('-r', 'r', [CompletionResultType]::ParameterName, 'Print tree upside down (biggest highest)')
[CompletionResult]::new('--reverse', 'reverse', [CompletionResultType]::ParameterName, 'Print tree upside down (biggest highest)')
[CompletionResult]::new('-c', 'c', [CompletionResultType]::ParameterName, 'No colors will be printed (Useful for commands like: watch)')
[CompletionResult]::new('--no-colors', 'no-colors', [CompletionResultType]::ParameterName, 'No colors will be printed (Useful for commands like: watch)')
[CompletionResult]::new('-C', 'C ', [CompletionResultType]::ParameterName, 'Force colors print')
[CompletionResult]::new('--force-colors', 'force-colors', [CompletionResultType]::ParameterName, 'Force colors print')
[CompletionResult]::new('-b', 'b', [CompletionResultType]::ParameterName, 'No percent bars or percentages will be displayed')
[CompletionResult]::new('--no-percent-bars', 'no-percent-bars', [CompletionResultType]::ParameterName, 'No percent bars or percentages will be displayed')
[CompletionResult]::new('-B', 'B ', [CompletionResultType]::ParameterName, 'percent bars moved to right side of screen')
[CompletionResult]::new('--bars-on-right', 'bars-on-right', [CompletionResultType]::ParameterName, 'percent bars moved to right side of screen')
[CompletionResult]::new('-R', 'R ', [CompletionResultType]::ParameterName, 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)')
[CompletionResult]::new('--screen-reader', 'screen-reader', [CompletionResultType]::ParameterName, 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)')
[CompletionResult]::new('--skip-total', 'skip-total', [CompletionResultType]::ParameterName, 'No total row will be displayed')
[CompletionResult]::new('-f', 'f', [CompletionResultType]::ParameterName, 'Directory ''size'' is number of child files instead of disk size')
[CompletionResult]::new('--filecount', 'filecount', [CompletionResultType]::ParameterName, 'Directory ''size'' is number of child files instead of disk size')
[CompletionResult]::new('-i', 'i', [CompletionResultType]::ParameterName, 'Do not display hidden files')
[CompletionResult]::new('--ignore_hidden', 'ignore_hidden', [CompletionResultType]::ParameterName, 'Do not display hidden files')
[CompletionResult]::new('-t', 't', [CompletionResultType]::ParameterName, 'show only these file types')
[CompletionResult]::new('--file_types', 'file_types', [CompletionResultType]::ParameterName, 'show only these file types')
[CompletionResult]::new('-P', 'P ', [CompletionResultType]::ParameterName, 'Disable the progress indication.')
[CompletionResult]::new('--no-progress', 'no-progress', [CompletionResultType]::ParameterName, 'Disable the progress indication.')
[CompletionResult]::new('--print-errors', 'print-errors', [CompletionResultType]::ParameterName, 'Print path with errors.')
[CompletionResult]::new('-D', 'D ', [CompletionResultType]::ParameterName, 'Only directories will be displayed.')
[CompletionResult]::new('--only-dir', 'only-dir', [CompletionResultType]::ParameterName, 'Only directories will be displayed.')
[CompletionResult]::new('-F', 'F ', [CompletionResultType]::ParameterName, 'Only files will be displayed. (Finds your largest files)')
[CompletionResult]::new('--only-file', 'only-file', [CompletionResultType]::ParameterName, 'Only files will be displayed. (Finds your largest files)')
[CompletionResult]::new('-j', 'j', [CompletionResultType]::ParameterName, 'Output the directory tree as json to the current directory')
[CompletionResult]::new('--output-json', 'output-json', [CompletionResultType]::ParameterName, 'Output the directory tree as json to the current directory')
[CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Print help')
[CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Print help')
[CompletionResult]::new('-V', 'V ', [CompletionResultType]::ParameterName, 'Print version')
[CompletionResult]::new('--version', 'version', [CompletionResultType]::ParameterName, 'Print version')
[CompletionResult]::new('-d', '-d', [CompletionResultType]::ParameterName, 'Depth to show')
[CompletionResult]::new('--depth', '--depth', [CompletionResultType]::ParameterName, 'Depth to show')
[CompletionResult]::new('-T', '-T ', [CompletionResultType]::ParameterName, 'Number of threads to use')
[CompletionResult]::new('--threads', '--threads', [CompletionResultType]::ParameterName, 'Number of threads to use')
[CompletionResult]::new('--config', '--config', [CompletionResultType]::ParameterName, 'Specify a config file to use')
[CompletionResult]::new('-n', '-n', [CompletionResultType]::ParameterName, 'Number of lines of output to show. (Default is terminal_height - 10)')
[CompletionResult]::new('--number-of-lines', '--number-of-lines', [CompletionResultType]::ParameterName, 'Number of lines of output to show. (Default is terminal_height - 10)')
[CompletionResult]::new('-X', '-X ', [CompletionResultType]::ParameterName, 'Exclude any file or directory with this path')
[CompletionResult]::new('--ignore-directory', '--ignore-directory', [CompletionResultType]::ParameterName, 'Exclude any file or directory with this path')
[CompletionResult]::new('-I', '-I ', [CompletionResultType]::ParameterName, 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter')
[CompletionResult]::new('--ignore-all-in-file', '--ignore-all-in-file', [CompletionResultType]::ParameterName, 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter')
[CompletionResult]::new('-z', '-z', [CompletionResultType]::ParameterName, 'Minimum size file to include in output')
[CompletionResult]::new('--min-size', '--min-size', [CompletionResultType]::ParameterName, 'Minimum size file to include in output')
[CompletionResult]::new('-v', '-v', [CompletionResultType]::ParameterName, 'Exclude filepaths matching this regex. To ignore png files type: -v "\.png$" ')
[CompletionResult]::new('--invert-filter', '--invert-filter', [CompletionResultType]::ParameterName, 'Exclude filepaths matching this regex. To ignore png files type: -v "\.png$" ')
[CompletionResult]::new('-e', '-e', [CompletionResultType]::ParameterName, 'Only include filepaths matching this regex. For png files type: -e "\.png$" ')
[CompletionResult]::new('--filter', '--filter', [CompletionResultType]::ParameterName, 'Only include filepaths matching this regex. For png files type: -e "\.png$" ')
[CompletionResult]::new('-w', '-w', [CompletionResultType]::ParameterName, 'Specify width of output overriding the auto detection of terminal width')
[CompletionResult]::new('--terminal_width', '--terminal_width', [CompletionResultType]::ParameterName, 'Specify width of output overriding the auto detection of terminal width')
[CompletionResult]::new('-o', '-o', [CompletionResultType]::ParameterName, 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.')
[CompletionResult]::new('--output-format', '--output-format', [CompletionResultType]::ParameterName, 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.')
[CompletionResult]::new('-S', '-S ', [CompletionResultType]::ParameterName, 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)')
[CompletionResult]::new('--stack-size', '--stack-size', [CompletionResultType]::ParameterName, 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)')
[CompletionResult]::new('-M', '-M ', [CompletionResultType]::ParameterName, '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)')
[CompletionResult]::new('--mtime', '--mtime', [CompletionResultType]::ParameterName, '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)')
[CompletionResult]::new('-A', '-A ', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file access time')
[CompletionResult]::new('--atime', '--atime', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file access time')
[CompletionResult]::new('-y', '-y', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file change time')
[CompletionResult]::new('--ctime', '--ctime', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file change time')
[CompletionResult]::new('--files0-from', '--files0-from', [CompletionResultType]::ParameterName, 'run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input')
[CompletionResult]::new('--collapse', '--collapse', [CompletionResultType]::ParameterName, 'Keep these directories collapsed')
[CompletionResult]::new('-m', '-m', [CompletionResultType]::ParameterName, 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time')
[CompletionResult]::new('--filetime', '--filetime', [CompletionResultType]::ParameterName, 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time')
[CompletionResult]::new('-p', '-p', [CompletionResultType]::ParameterName, 'Subdirectories will not have their path shortened')
[CompletionResult]::new('--full-paths', '--full-paths', [CompletionResultType]::ParameterName, 'Subdirectories will not have their path shortened')
[CompletionResult]::new('-L', '-L ', [CompletionResultType]::ParameterName, 'dereference sym links - Treat sym links as directories and go into them')
[CompletionResult]::new('--dereference-links', '--dereference-links', [CompletionResultType]::ParameterName, 'dereference sym links - Treat sym links as directories and go into them')
[CompletionResult]::new('-x', '-x', [CompletionResultType]::ParameterName, 'Only count the files and directories on the same filesystem as the supplied directory')
[CompletionResult]::new('--limit-filesystem', '--limit-filesystem', [CompletionResultType]::ParameterName, 'Only count the files and directories on the same filesystem as the supplied directory')
[CompletionResult]::new('-s', '-s', [CompletionResultType]::ParameterName, 'Use file length instead of blocks')
[CompletionResult]::new('--apparent-size', '--apparent-size', [CompletionResultType]::ParameterName, 'Use file length instead of blocks')
[CompletionResult]::new('-r', '-r', [CompletionResultType]::ParameterName, 'Print tree upside down (biggest highest)')
[CompletionResult]::new('--reverse', '--reverse', [CompletionResultType]::ParameterName, 'Print tree upside down (biggest highest)')
[CompletionResult]::new('-c', '-c', [CompletionResultType]::ParameterName, 'No colors will be printed (Useful for commands like: watch)')
[CompletionResult]::new('--no-colors', '--no-colors', [CompletionResultType]::ParameterName, 'No colors will be printed (Useful for commands like: watch)')
[CompletionResult]::new('-C', '-C ', [CompletionResultType]::ParameterName, 'Force colors print')
[CompletionResult]::new('--force-colors', '--force-colors', [CompletionResultType]::ParameterName, 'Force colors print')
[CompletionResult]::new('-b', '-b', [CompletionResultType]::ParameterName, 'No percent bars or percentages will be displayed')
[CompletionResult]::new('--no-percent-bars', '--no-percent-bars', [CompletionResultType]::ParameterName, 'No percent bars or percentages will be displayed')
[CompletionResult]::new('-B', '-B ', [CompletionResultType]::ParameterName, 'percent bars moved to right side of screen')
[CompletionResult]::new('--bars-on-right', '--bars-on-right', [CompletionResultType]::ParameterName, 'percent bars moved to right side of screen')
[CompletionResult]::new('-R', '-R ', [CompletionResultType]::ParameterName, 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)')
[CompletionResult]::new('--screen-reader', '--screen-reader', [CompletionResultType]::ParameterName, 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)')
[CompletionResult]::new('--skip-total', '--skip-total', [CompletionResultType]::ParameterName, 'No total row will be displayed')
[CompletionResult]::new('-f', '-f', [CompletionResultType]::ParameterName, 'Directory ''size'' is number of child files instead of disk size')
[CompletionResult]::new('--filecount', '--filecount', [CompletionResultType]::ParameterName, 'Directory ''size'' is number of child files instead of disk size')
[CompletionResult]::new('-i', '-i', [CompletionResultType]::ParameterName, 'Do not display hidden files')
[CompletionResult]::new('--ignore_hidden', '--ignore_hidden', [CompletionResultType]::ParameterName, 'Do not display hidden files')
[CompletionResult]::new('-t', '-t', [CompletionResultType]::ParameterName, 'show only these file types')
[CompletionResult]::new('--file_types', '--file_types', [CompletionResultType]::ParameterName, 'show only these file types')
[CompletionResult]::new('-P', '-P ', [CompletionResultType]::ParameterName, 'Disable the progress indication.')
[CompletionResult]::new('--no-progress', '--no-progress', [CompletionResultType]::ParameterName, 'Disable the progress indication.')
[CompletionResult]::new('--print-errors', '--print-errors', [CompletionResultType]::ParameterName, 'Print path with errors.')
[CompletionResult]::new('-D', '-D ', [CompletionResultType]::ParameterName, 'Only directories will be displayed.')
[CompletionResult]::new('--only-dir', '--only-dir', [CompletionResultType]::ParameterName, 'Only directories will be displayed.')
[CompletionResult]::new('-F', '-F ', [CompletionResultType]::ParameterName, 'Only files will be displayed. (Finds your largest files)')
[CompletionResult]::new('--only-file', '--only-file', [CompletionResultType]::ParameterName, 'Only files will be displayed. (Finds your largest files)')
[CompletionResult]::new('-j', '-j', [CompletionResultType]::ParameterName, 'Output the directory tree as json to the current directory')
[CompletionResult]::new('--output-json', '--output-json', [CompletionResultType]::ParameterName, 'Output the directory tree as json to the current directory')
[CompletionResult]::new('-h', '-h', [CompletionResultType]::ParameterName, 'Print help')
[CompletionResult]::new('--help', '--help', [CompletionResultType]::ParameterName, 'Print help')
[CompletionResult]::new('-V', '-V ', [CompletionResultType]::ParameterName, 'Print version')
[CompletionResult]::new('--version', '--version', [CompletionResultType]::ParameterName, 'Print version')
break
}
})

View File

@@ -19,7 +19,7 @@ _dust() {
case "${cmd}" in
dust)
opts="-d -T -n -p -X -I -L -x -s -r -c -C -b -B -z -R -f -i -v -e -t -w -P -D -F -o -S -j -M -A -y -h -V --depth --threads --number-of-lines --full-paths --ignore-directory --ignore-all-in-file --dereference-links --limit-filesystem --apparent-size --reverse --no-colors --force-colors --no-percent-bars --bars-on-right --min-size --screen-reader --skip-total --filecount --ignore_hidden --invert-filter --filter --file_types --terminal_width --no-progress --print-errors --only-dir --only-file --output-format --stack-size --output-json --mtime --atime --ctime --help --version [PATH]..."
opts="-d -T -n -p -X -I -L -x -s -r -c -C -b -B -z -R -f -i -v -e -t -w -P -D -F -o -S -j -M -A -y -m -h -V --depth --threads --config --number-of-lines --full-paths --ignore-directory --ignore-all-in-file --dereference-links --limit-filesystem --apparent-size --reverse --no-colors --force-colors --no-percent-bars --bars-on-right --min-size --screen-reader --skip-total --filecount --ignore_hidden --invert-filter --filter --file_types --terminal_width --no-progress --print-errors --only-dir --only-file --output-format --stack-size --output-json --mtime --atime --ctime --files0-from --collapse --filetime --help --version [PATH]..."
if [[ ${cur} == -* || ${COMP_CWORD} -eq 1 ]] ; then
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
return 0
@@ -41,6 +41,21 @@ _dust() {
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--config)
local oldifs
if [ -n "${IFS+x}" ]; then
oldifs="$IFS"
fi
IFS=$'\n'
COMPREPLY=($(compgen -f "${cur}"))
if [ -n "${oldifs+x}" ]; then
IFS="$oldifs"
fi
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
compopt -o filenames
fi
return 0
;;
--number-of-lines)
COMPREPLY=($(compgen -f "${cur}"))
return 0
@@ -59,12 +74,12 @@ _dust() {
;;
--ignore-all-in-file)
local oldifs
if [[ -v IFS ]]; then
if [ -n "${IFS+x}" ]; then
oldifs="$IFS"
fi
IFS=$'\n'
COMPREPLY=($(compgen -f "${cur}"))
if [[ -v oldifs ]]; then
if [ -n "${oldifs+x}" ]; then
IFS="$oldifs"
fi
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
@@ -74,12 +89,12 @@ _dust() {
;;
-I)
local oldifs
if [[ -v IFS ]]; then
if [ -n "${IFS+x}" ]; then
oldifs="$IFS"
fi
IFS=$'\n'
COMPREPLY=($(compgen -f "${cur}"))
if [[ -v oldifs ]]; then
if [ -n "${oldifs+x}" ]; then
IFS="$oldifs"
fi
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
@@ -159,6 +174,22 @@ _dust() {
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--files0-from)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--collapse)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--filetime)
COMPREPLY=($(compgen -W "a c m" -- "${cur}"))
return 0
;;
-m)
COMPREPLY=($(compgen -W "a c m" -- "${cur}"))
return 0
;;
*)
COMPREPLY=()
;;

View File

@@ -22,10 +22,11 @@ set edit:completion:arg-completer[dust] = {|@words|
cand --depth 'Depth to show'
cand -T 'Number of threads to use'
cand --threads 'Number of threads to use'
cand --config 'Specify a config file to use'
cand -n 'Number of lines of output to show. (Default is terminal_height - 10)'
cand --number-of-lines 'Number of lines of output to show. (Default is terminal_height - 10)'
cand -X 'Exclude any file or directory with this name'
cand --ignore-directory 'Exclude any file or directory with this name'
cand -X 'Exclude any file or directory with this path'
cand --ignore-directory 'Exclude any file or directory with this path'
cand -I 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter'
cand --ignore-all-in-file 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter'
cand -z 'Minimum size file to include in output'
@@ -46,6 +47,10 @@ set edit:completion:arg-completer[dust] = {|@words|
cand --atime 'just like -mtime, but based on file access time'
cand -y 'just like -mtime, but based on file change time'
cand --ctime 'just like -mtime, but based on file change time'
cand --files0-from 'run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input'
cand --collapse 'Keep these directories collapsed'
cand -m 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time'
cand --filetime 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time'
cand -p 'Subdirectories will not have their path shortened'
cand --full-paths 'Subdirectories will not have their path shortened'
cand -L 'dereference sym links - Treat sym links as directories and go into them'

View File

@@ -1,17 +1,32 @@
complete -c dust -s d -l depth -d 'Depth to show' -r
complete -c dust -s T -l threads -d 'Number of threads to use' -r
complete -c dust -l config -d 'Specify a config file to use' -r -F
complete -c dust -s n -l number-of-lines -d 'Number of lines of output to show. (Default is terminal_height - 10)' -r
complete -c dust -s X -l ignore-directory -d 'Exclude any file or directory with this name' -r -F
complete -c dust -s X -l ignore-directory -d 'Exclude any file or directory with this path' -r -F
complete -c dust -s I -l ignore-all-in-file -d 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter' -r -F
complete -c dust -s z -l min-size -d 'Minimum size file to include in output' -r
complete -c dust -s v -l invert-filter -d 'Exclude filepaths matching this regex. To ignore png files type: -v "\\.png$" ' -r
complete -c dust -s e -l filter -d 'Only include filepaths matching this regex. For png files type: -e "\\.png$" ' -r
complete -c dust -s w -l terminal_width -d 'Specify width of output overriding the auto detection of terminal width' -r
complete -c dust -s o -l output-format -d 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.' -r -f -a "{si '',b '',k '',m '',g '',t '',kb '',mb '',gb '',tb ''}"
complete -c dust -s o -l output-format -d 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.' -r -f -a "si\t''
b\t''
k\t''
m\t''
g\t''
t\t''
kb\t''
mb\t''
gb\t''
tb\t''"
complete -c dust -s S -l stack-size -d 'Specify memory to use as stack size - use if you see: \'fatal runtime error: stack overflow\' (default low memory=1048576, high memory=1073741824)' -r
complete -c dust -s M -l mtime -d '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)' -r
complete -c dust -s A -l atime -d 'just like -mtime, but based on file access time' -r
complete -c dust -s y -l ctime -d 'just like -mtime, but based on file change time' -r
complete -c dust -l files0-from -d 'run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input' -r -F
complete -c dust -l collapse -d 'Keep these directories collapsed' -r -F
complete -c dust -s m -l filetime -d 'Directory \'size\' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time' -r -f -a "a\t''
c\t''
m\t''"
complete -c dust -s p -l full-paths -d 'Subdirectories will not have their path shortened'
complete -c dust -s L -l dereference-links -d 'dereference sym links - Treat sym links as directories and go into them'
complete -c dust -s x -l limit-filesystem -d 'Only count the files and directories on the same filesystem as the supplied directory'

View File

@@ -25,4 +25,4 @@ skip-total=true
ignore-hidden=true
# print sizes in powers of 1000 (e.g., 1.1G)
iso=true
output-format="si"

View File

@@ -1,10 +1,10 @@
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.TH Dust 1 "Dust 1.0.0"
.TH Dust 1 "Dust 1.1.1"
.SH NAME
Dust \- Like du but more intuitive
.SH SYNOPSIS
\fBdust\fR [\fB\-d\fR|\fB\-\-depth\fR] [\fB\-T\fR|\fB\-\-threads\fR] [\fB\-n\fR|\fB\-\-number\-of\-lines\fR] [\fB\-p\fR|\fB\-\-full\-paths\fR] [\fB\-X\fR|\fB\-\-ignore\-directory\fR] [\fB\-I\fR|\fB\-\-ignore\-all\-in\-file\fR] [\fB\-L\fR|\fB\-\-dereference\-links\fR] [\fB\-x\fR|\fB\-\-limit\-filesystem\fR] [\fB\-s\fR|\fB\-\-apparent\-size\fR] [\fB\-r\fR|\fB\-\-reverse\fR] [\fB\-c\fR|\fB\-\-no\-colors\fR] [\fB\-C\fR|\fB\-\-force\-colors\fR] [\fB\-b\fR|\fB\-\-no\-percent\-bars\fR] [\fB\-B\fR|\fB\-\-bars\-on\-right\fR] [\fB\-z\fR|\fB\-\-min\-size\fR] [\fB\-R\fR|\fB\-\-screen\-reader\fR] [\fB\-\-skip\-total\fR] [\fB\-f\fR|\fB\-\-filecount\fR] [\fB\-i\fR|\fB\-\-ignore_hidden\fR] [\fB\-v\fR|\fB\-\-invert\-filter\fR] [\fB\-e\fR|\fB\-\-filter\fR] [\fB\-t\fR|\fB\-\-file_types\fR] [\fB\-w\fR|\fB\-\-terminal_width\fR] [\fB\-P\fR|\fB\-\-no\-progress\fR] [\fB\-\-print\-errors\fR] [\fB\-D\fR|\fB\-\-only\-dir\fR] [\fB\-F\fR|\fB\-\-only\-file\fR] [\fB\-o\fR|\fB\-\-output\-format\fR] [\fB\-S\fR|\fB\-\-stack\-size\fR] [\fB\-j\fR|\fB\-\-output\-json\fR] [\fB\-M\fR|\fB\-\-mtime\fR] [\fB\-A\fR|\fB\-\-atime\fR] [\fB\-y\fR|\fB\-\-ctime\fR] [\fB\-h\fR|\fB\-\-help\fR] [\fB\-V\fR|\fB\-\-version\fR] [\fIPATH\fR]
\fBdust\fR [\fB\-d\fR|\fB\-\-depth\fR] [\fB\-T\fR|\fB\-\-threads\fR] [\fB\-\-config\fR] [\fB\-n\fR|\fB\-\-number\-of\-lines\fR] [\fB\-p\fR|\fB\-\-full\-paths\fR] [\fB\-X\fR|\fB\-\-ignore\-directory\fR] [\fB\-I\fR|\fB\-\-ignore\-all\-in\-file\fR] [\fB\-L\fR|\fB\-\-dereference\-links\fR] [\fB\-x\fR|\fB\-\-limit\-filesystem\fR] [\fB\-s\fR|\fB\-\-apparent\-size\fR] [\fB\-r\fR|\fB\-\-reverse\fR] [\fB\-c\fR|\fB\-\-no\-colors\fR] [\fB\-C\fR|\fB\-\-force\-colors\fR] [\fB\-b\fR|\fB\-\-no\-percent\-bars\fR] [\fB\-B\fR|\fB\-\-bars\-on\-right\fR] [\fB\-z\fR|\fB\-\-min\-size\fR] [\fB\-R\fR|\fB\-\-screen\-reader\fR] [\fB\-\-skip\-total\fR] [\fB\-f\fR|\fB\-\-filecount\fR] [\fB\-i\fR|\fB\-\-ignore_hidden\fR] [\fB\-v\fR|\fB\-\-invert\-filter\fR] [\fB\-e\fR|\fB\-\-filter\fR] [\fB\-t\fR|\fB\-\-file_types\fR] [\fB\-w\fR|\fB\-\-terminal_width\fR] [\fB\-P\fR|\fB\-\-no\-progress\fR] [\fB\-\-print\-errors\fR] [\fB\-D\fR|\fB\-\-only\-dir\fR] [\fB\-F\fR|\fB\-\-only\-file\fR] [\fB\-o\fR|\fB\-\-output\-format\fR] [\fB\-S\fR|\fB\-\-stack\-size\fR] [\fB\-j\fR|\fB\-\-output\-json\fR] [\fB\-M\fR|\fB\-\-mtime\fR] [\fB\-A\fR|\fB\-\-atime\fR] [\fB\-y\fR|\fB\-\-ctime\fR] [\fB\-\-files0\-from\fR] [\fB\-\-collapse\fR] [\fB\-m\fR|\fB\-\-filetime\fR] [\fB\-h\fR|\fB\-\-help\fR] [\fB\-V\fR|\fB\-\-version\fR] [\fIPATH\fR]
.SH DESCRIPTION
Like du but more intuitive
.SH OPTIONS
@@ -15,6 +15,9 @@ Depth to show
\fB\-T\fR, \fB\-\-threads\fR
Number of threads to use
.TP
\fB\-\-config\fR=\fIFILE\fR
Specify a config file to use
.TP
\fB\-n\fR, \fB\-\-number\-of\-lines\fR=\fINUMBER\fR
Number of lines of output to show. (Default is terminal_height \- 10)
.TP
@@ -22,7 +25,7 @@ Number of lines of output to show. (Default is terminal_height \- 10)
Subdirectories will not have their path shortened
.TP
\fB\-X\fR, \fB\-\-ignore\-directory\fR=\fIPATH\fR
Exclude any file or directory with this name
Exclude any file or directory with this path
.TP
\fB\-I\fR, \fB\-\-ignore\-all\-in\-file\fR=\fIFILE\fR
Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by \-\-invert_filter
@@ -112,6 +115,19 @@ just like \-mtime, but based on file access time
\fB\-y\fR, \fB\-\-ctime\fR
just like \-mtime, but based on file change time
.TP
\fB\-\-files0\-from\fR
run dust on NUL\-terminated file names specified in file; if argument is \-, then read names from standard input
.TP
\fB\-\-collapse\fR
Keep these directories collapsed
.TP
\fB\-m\fR, \fB\-\-filetime\fR
Directory \*(Aqsize\*(Aq is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time
.br
.br
[\fIpossible values: \fRa, c, m]
.TP
\fB\-h\fR, \fB\-\-help\fR
Print help
.TP
@@ -121,4 +137,4 @@ Print version
[\fIPATH\fR]
.SH VERSION
v1.0.0
v1.1.1

View File

@@ -24,6 +24,15 @@ pub fn build_cli() -> Command {
.help("Number of threads to use")
.num_args(1)
)
.arg(
Arg::new("config")
.long("config")
.help("Specify a config file to use")
.value_name("FILE")
.value_hint(clap::ValueHint::FilePath)
.value_parser(value_parser!(String))
.num_args(1)
)
.arg(
Arg::new("number_of_lines")
.short('n')
@@ -47,7 +56,7 @@ pub fn build_cli() -> Command {
.value_name("PATH")
.value_hint(clap::ValueHint::AnyPath)
.action(clap::ArgAction::Append)
.help("Exclude any file or directory with this name"),
.help("Exclude any file or directory with this path"),
)
.arg(
Arg::new("ignore_all_in_file")
@@ -286,4 +295,32 @@ pub fn build_cli() -> Command {
.value_parser(value_parser!(String))
.help("just like -mtime, but based on file change time")
)
.arg(
Arg::new("files0_from")
.long("files0-from")
.value_hint(clap::ValueHint::AnyPath)
.value_parser(value_parser!(String))
.num_args(1)
.help("run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input"),
)
.arg(
Arg::new("collapse")
.long("collapse")
.value_hint(clap::ValueHint::AnyPath)
.value_parser(value_parser!(String))
.action(clap::ArgAction::Append)
.help("Keep these directories collapsed"),
)
.arg(
Arg::new("filetime")
.short('m')
.long("filetime")
.num_args(1)
.value_parser([
PossibleValue::new("a").alias("accessed"),
PossibleValue::new("c").alias("changed"),
PossibleValue::new("m").alias("modified"),
])
.help("Directory 'size' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time"),
)
}

View File

@@ -1,3 +1,4 @@
use crate::node::FileTime;
use chrono::{Local, TimeZone};
use clap::ArgMatches;
use config_file::FromConfigFile;
@@ -7,14 +8,13 @@ use std::io::IsTerminal;
use std::path::Path;
use std::path::PathBuf;
use crate::dir_walker::Operater;
use crate::dir_walker::Operator;
use crate::display::get_number_format;
pub static DAY_SECONDS: i64 = 24 * 60 * 60;
#[derive(Deserialize, Default)]
#[serde(rename_all = "kebab-case")]
#[serde(deny_unknown_fields)]
pub struct Config {
pub display_full_paths: Option<bool>,
pub display_apparent_size: Option<bool>,
@@ -36,9 +36,17 @@ pub struct Config {
pub threads: Option<usize>,
pub output_json: Option<bool>,
pub print_errors: Option<bool>,
pub files0_from: Option<String>,
}
impl Config {
pub fn get_files_from(&self, options: &ArgMatches) -> Option<String> {
let from_file = options.get_one::<String>("files0_from");
match from_file {
None => self.files0_from.as_ref().map(|x| x.to_string()),
Some(x) => Some(x.to_string()),
}
}
pub fn get_no_colors(&self, options: &ArgMatches) -> bool {
Some(true) == self.no_colors || options.get_flag("no_colors")
}
@@ -76,6 +84,20 @@ impl Config {
})
.to_lowercase()
}
pub fn get_filetime(&self, options: &ArgMatches) -> Option<FileTime> {
let out_fmt = options.get_one::<String>("filetime");
match out_fmt {
None => None,
Some(x) => match x.as_str() {
"m" | "modified" => Some(FileTime::Modified),
"a" | "accessed" => Some(FileTime::Accessed),
"c" | "changed" => Some(FileTime::Changed),
_ => unreachable!(),
},
}
}
pub fn get_skip_total(&self, options: &ArgMatches) -> bool {
Some(true) == self.skip_total || options.get_flag("skip_total")
}
@@ -137,21 +159,21 @@ impl Config {
Some(true) == self.output_json || options.get_flag("output_json")
}
pub fn get_modified_time_operator(&self, options: &ArgMatches) -> (Operater, i64) {
pub fn get_modified_time_operator(&self, options: &ArgMatches) -> Option<(Operator, i64)> {
get_filter_time_operator(
options.get_one::<String>("mtime"),
get_current_date_epoch_seconds(),
)
}
pub fn get_accessed_time_operator(&self, options: &ArgMatches) -> (Operater, i64) {
pub fn get_accessed_time_operator(&self, options: &ArgMatches) -> Option<(Operator, i64)> {
get_filter_time_operator(
options.get_one::<String>("atime"),
get_current_date_epoch_seconds(),
)
}
pub fn get_created_time_operator(&self, options: &ArgMatches) -> (Operater, i64) {
pub fn get_changed_time_operator(&self, options: &ArgMatches) -> Option<(Operator, i64)> {
get_filter_time_operator(
options.get_one::<String>("ctime"),
get_current_date_epoch_seconds(),
@@ -160,7 +182,7 @@ impl Config {
}
fn get_current_date_epoch_seconds() -> i64 {
// calcurate current date epoch seconds
// calculate current date epoch seconds
let now = Local::now();
let current_date = now.date_naive();
@@ -174,7 +196,7 @@ fn get_current_date_epoch_seconds() -> i64 {
fn get_filter_time_operator(
option_value: Option<&String>,
current_date_epoch_seconds: i64,
) -> (Operater, i64) {
) -> Option<(Operator, i64)> {
match option_value {
Some(val) => {
let time = current_date_epoch_seconds
@@ -184,12 +206,12 @@ fn get_filter_time_operator(
.abs()
* DAY_SECONDS;
match val.chars().next().expect("Value should not be empty") {
'+' => (Operater::LessThan, time - DAY_SECONDS),
'-' => (Operater::GreaterThan, time),
_ => (Operater::Equal, time - DAY_SECONDS),
'+' => Some((Operator::LessThan, time - DAY_SECONDS)),
'-' => Some((Operator::GreaterThan, time)),
_ => Some((Operator::Equal, time - DAY_SECONDS)),
}
}
None => (Operater::GreaterThan, 0),
None => None,
}
}
@@ -208,7 +230,7 @@ fn convert_min_size(input: &str) -> Option<usize> {
match number_format {
Some((multiple, _)) => Some(parsed_digits * (multiple as usize)),
None => {
if letters.eq("") {
if letters.is_empty() {
Some(parsed_digits)
} else {
eprintln!("Ignoring invalid min-size: {input}");
@@ -231,12 +253,29 @@ fn get_config_locations(base: &Path) -> Vec<PathBuf> {
]
}
pub fn get_config() -> Config {
if let Some(home) = directories::BaseDirs::new() {
for path in get_config_locations(home.home_dir()) {
pub fn get_config(conf_path: Option<String>) -> Config {
match conf_path {
Some(path_str) => {
let path = Path::new(&path_str);
if path.exists() {
if let Ok(config) = Config::from_config_file(path) {
return config;
match Config::from_config_file(path) {
Ok(config) => return config,
Err(e) => {
eprintln!("Ignoring invalid config file '{}': {}", &path.display(), e)
}
}
} else {
eprintln!("Config file {:?} doesn't exists", &path.display());
}
}
None => {
if let Some(home) = directories::BaseDirs::new() {
for path in get_config_locations(home.home_dir()) {
if path.exists() {
if let Ok(config) = Config::from_config_file(&path) {
return config;
}
}
}
}
}
@@ -251,6 +290,7 @@ mod tests {
#[allow(unused_imports)]
use super::*;
use chrono::{Datelike, Timelike};
use clap::builder::PossibleValue;
use clap::{value_parser, Arg, ArgMatches, Command};
#[test]
@@ -330,4 +370,56 @@ mod tests {
)
.get_matches_from(args)
}
#[test]
fn test_get_filetime() {
// No config and no flag.
let c = Config::default();
let args = get_filetime_args(vec!["dust"]);
assert_eq!(c.get_filetime(&args), None);
// Config is not defined and flag is defined as access time
let c = Config::default();
let args = get_filetime_args(vec!["dust", "--filetime", "a"]);
assert_eq!(c.get_filetime(&args), Some(FileTime::Accessed));
let c = Config::default();
let args = get_filetime_args(vec!["dust", "--filetime", "accessed"]);
assert_eq!(c.get_filetime(&args), Some(FileTime::Accessed));
// Config is not defined and flag is defined as modified time
let c = Config::default();
let args = get_filetime_args(vec!["dust", "--filetime", "m"]);
assert_eq!(c.get_filetime(&args), Some(FileTime::Modified));
let c = Config::default();
let args = get_filetime_args(vec!["dust", "--filetime", "modified"]);
assert_eq!(c.get_filetime(&args), Some(FileTime::Modified));
// Config is not defined and flag is defined as changed time
let c = Config::default();
let args = get_filetime_args(vec!["dust", "--filetime", "c"]);
assert_eq!(c.get_filetime(&args), Some(FileTime::Changed));
let c = Config::default();
let args = get_filetime_args(vec!["dust", "--filetime", "changed"]);
assert_eq!(c.get_filetime(&args), Some(FileTime::Changed));
}
fn get_filetime_args(args: Vec<&str>) -> ArgMatches {
Command::new("Dust")
.arg(
Arg::new("filetime")
.short('m')
.long("filetime")
.num_args(1)
.value_parser([
PossibleValue::new("a").alias("accessed"),
PossibleValue::new("c").alias("changed"),
PossibleValue::new("m").alias("modified"),
])
.help("Directory 'size' is max filetime of child files instead of disk size. while a/c/m for accessed/changed/modified time"),
)
.get_matches_from(args)
}
}

View File

@@ -1,5 +1,6 @@
use std::cmp::Ordering;
use std::fs;
use std::io::Error;
use std::sync::Arc;
use std::sync::Mutex;
@@ -14,6 +15,7 @@ use crate::utils::is_filtered_out_due_to_regex;
use rayon::iter::ParallelBridge;
use rayon::prelude::ParallelIterator;
use regex::Regex;
use std::path::Path;
use std::path::PathBuf;
use std::collections::HashSet;
@@ -21,10 +23,11 @@ use std::collections::HashSet;
use crate::node::build_node;
use std::fs::DirEntry;
use crate::node::FileTime;
use crate::platform::get_metadata;
#[derive(Debug)]
pub enum Operater {
pub enum Operator {
Equal = 0,
LessThan = 1,
GreaterThan = 2,
@@ -35,11 +38,12 @@ pub struct WalkData<'a> {
pub filter_regex: &'a [Regex],
pub invert_filter_regex: &'a [Regex],
pub allowed_filesystems: HashSet<u64>,
pub filter_modified_time: (Operater, i64),
pub filter_accessed_time: (Operater, i64),
pub filter_changed_time: (Operater, i64),
pub filter_modified_time: Option<(Operator, i64)>,
pub filter_accessed_time: Option<(Operator, i64)>,
pub filter_changed_time: Option<(Operator, i64)>,
pub use_apparent_size: bool,
pub by_filecount: bool,
pub by_filetime: &'a Option<FileTime>,
pub ignore_hidden: bool,
pub follow_links: bool,
pub progress_data: Arc<PAtomicInfo>,
@@ -57,19 +61,15 @@ pub fn walk_it(dirs: HashSet<PathBuf>, walk_data: &WalkData) -> Vec<Node> {
prog_data.state.store(Operation::PREPARING, ORDERING);
clean_inodes(node, &mut inodes, walk_data.use_apparent_size)
clean_inodes(node, &mut inodes, walk_data)
})
.collect();
top_level_nodes
}
// Remove files which have the same inode, we don't want to double count them.
fn clean_inodes(
x: Node,
inodes: &mut HashSet<(u64, u64)>,
use_apparent_size: bool,
) -> Option<Node> {
if !use_apparent_size {
fn clean_inodes(x: Node, inodes: &mut HashSet<(u64, u64)>, walk_data: &WalkData) -> Option<Node> {
if !walk_data.use_apparent_size {
if let Some(id) = x.inode_device {
if !inodes.insert(id) {
return None;
@@ -82,12 +82,25 @@ fn clean_inodes(
tmp.sort_by(sort_by_inode);
let new_children: Vec<_> = tmp
.into_iter()
.filter_map(|c| clean_inodes(c, inodes, use_apparent_size))
.filter_map(|c| clean_inodes(c, inodes, walk_data))
.collect();
let actual_size = if walk_data.by_filetime.is_some() {
// If by_filetime is Some, directory 'size' is the maximum filetime among child files instead of disk size
new_children
.iter()
.map(|c| c.size)
.chain(std::iter::once(x.size))
.max()
.unwrap_or(0)
} else {
// If by_filetime is None, directory 'size' is the sum of disk sizes or file counts of child files
x.size + new_children.iter().map(|c| c.size).sum::<u64>()
};
Some(Node {
name: x.name,
size: x.size + new_children.iter().map(|c| c.size).sum::<u64>(),
size: actual_size,
children: new_children,
inode_device: x.inode_device,
depth: x.depth,
@@ -115,28 +128,35 @@ fn sort_by_inode(a: &Node, b: &Node) -> std::cmp::Ordering {
fn ignore_file(entry: &DirEntry, walk_data: &WalkData) -> bool {
let is_dot_file = entry.file_name().to_str().unwrap_or("").starts_with('.');
let is_ignored_path = walk_data.ignore_directories.contains(&entry.path());
let follow_links = walk_data.follow_links && entry.file_type().is_ok_and(|ft| ft.is_symlink());
let size_inode_device = get_metadata(entry.path(), false);
if let Some((_size, Some((_id, dev)), (modified_time, accessed_time, changed_time))) =
size_inode_device
{
if !walk_data.allowed_filesystems.is_empty()
&& !walk_data.allowed_filesystems.contains(&dev)
{
return true;
if !walk_data.allowed_filesystems.is_empty() {
let size_inode_device = get_metadata(entry.path(), false, follow_links);
if let Some((_size, Some((_id, dev)), _gunk)) = size_inode_device {
if !walk_data.allowed_filesystems.contains(&dev) {
return true;
}
}
if entry.path().is_file()
&& [
(&walk_data.filter_modified_time, modified_time),
(&walk_data.filter_accessed_time, accessed_time),
(&walk_data.filter_changed_time, changed_time),
]
.iter()
.any(|(filter_time, actual_time)| {
is_filtered_out_due_to_file_time(filter_time, *actual_time)
})
{
return true;
}
if walk_data.filter_accessed_time.is_some()
|| walk_data.filter_modified_time.is_some()
|| walk_data.filter_changed_time.is_some()
{
let size_inode_device = get_metadata(entry.path(), false, follow_links);
if let Some((_, _, (modified_time, accessed_time, changed_time))) = size_inode_device {
if entry.path().is_file()
&& [
(&walk_data.filter_modified_time, modified_time),
(&walk_data.filter_accessed_time, accessed_time),
(&walk_data.filter_changed_time, changed_time),
]
.iter()
.any(|(filter_time, actual_time)| {
is_filtered_out_due_to_file_time(filter_time, *actual_time)
})
{
return true;
}
}
}
@@ -211,8 +231,9 @@ fn walk(dir: PathBuf, walk_data: &WalkData, depth: usize) -> Option<Node> {
}
}
Err(ref failed) => {
let mut editable_error = errors.lock().unwrap();
editable_error.no_permissions.insert(failed.to_string());
if handle_error_and_retry(failed, &dir, walk_data) {
return walk(dir.clone(), walk_data, depth);
}
}
}
None
@@ -220,21 +241,11 @@ fn walk(dir: PathBuf, walk_data: &WalkData, depth: usize) -> Option<Node> {
.collect()
}
Err(failed) => {
let mut editable_error = errors.lock().unwrap();
match failed.kind() {
std::io::ErrorKind::PermissionDenied => {
editable_error
.no_permissions
.insert(dir.to_string_lossy().into());
}
std::io::ErrorKind::NotFound => {
editable_error.file_not_found.insert(failed.to_string());
}
_ => {
editable_error.unknown_error.insert(failed.to_string());
}
if handle_error_and_retry(&failed, &dir, walk_data) {
return walk(dir, walk_data, depth);
} else {
vec![]
}
vec![]
}
}
} else {
@@ -245,7 +256,47 @@ fn walk(dir: PathBuf, walk_data: &WalkData, depth: usize) -> Option<Node> {
}
vec![]
};
build_node(dir, children, false, false, depth, walk_data)
let is_symlink = if walk_data.follow_links {
match fs::symlink_metadata(&dir) {
Ok(metadata) => metadata.file_type().is_symlink(),
Err(_) => false,
}
} else {
false
};
build_node(dir, children, is_symlink, false, depth, walk_data)
}
fn handle_error_and_retry(failed: &Error, dir: &Path, walk_data: &WalkData) -> bool {
let mut editable_error = walk_data.errors.lock().unwrap();
match failed.kind() {
std::io::ErrorKind::PermissionDenied => {
editable_error
.no_permissions
.insert(dir.to_string_lossy().into());
}
std::io::ErrorKind::InvalidInput => {
editable_error
.no_permissions
.insert(dir.to_string_lossy().into());
}
std::io::ErrorKind::NotFound => {
editable_error.file_not_found.insert(failed.to_string());
}
std::io::ErrorKind::Interrupted => {
let mut editable_error = walk_data.errors.lock().unwrap();
editable_error.interrupted_error += 1;
if editable_error.interrupted_error > 3 {
panic!("Multiple Interrupted Errors occurred while scanning filesystem. Aborting");
} else {
return true;
}
}
_ => {
editable_error.unknown_error.insert(failed.to_string());
}
}
false
}
mod tests {
@@ -264,17 +315,43 @@ mod tests {
}
}
#[cfg(test)]
fn create_walker<'a>(use_apparent_size: bool) -> WalkData<'a> {
use crate::PIndicator;
let indicator = PIndicator::build_me();
WalkData {
ignore_directories: HashSet::new(),
filter_regex: &[],
invert_filter_regex: &[],
allowed_filesystems: HashSet::new(),
filter_modified_time: Some((Operator::GreaterThan, 0)),
filter_accessed_time: Some((Operator::GreaterThan, 0)),
filter_changed_time: Some((Operator::GreaterThan, 0)),
use_apparent_size,
by_filecount: false,
by_filetime: &None,
ignore_hidden: false,
follow_links: false,
progress_data: indicator.data.clone(),
errors: Arc::new(Mutex::new(RuntimeErrors::default())),
}
}
#[test]
#[allow(clippy::redundant_clone)]
fn test_should_ignore_file() {
let mut inodes = HashSet::new();
let n = create_node();
let walkdata = create_walker(false);
// First time we insert the node
assert_eq!(clean_inodes(n.clone(), &mut inodes, false), Some(n.clone()));
assert_eq!(
clean_inodes(n.clone(), &mut inodes, &walkdata),
Some(n.clone())
);
// Second time is a duplicate - we ignore it
assert_eq!(clean_inodes(n.clone(), &mut inodes, false), None);
assert_eq!(clean_inodes(n.clone(), &mut inodes, &walkdata), None);
}
#[test]
@@ -282,10 +359,17 @@ mod tests {
fn test_should_not_ignore_files_if_using_apparent_size() {
let mut inodes = HashSet::new();
let n = create_node();
let walkdata = create_walker(true);
// If using apparent size we include Nodes, even if duplicate inodes
assert_eq!(clean_inodes(n.clone(), &mut inodes, true), Some(n.clone()));
assert_eq!(clean_inodes(n.clone(), &mut inodes, true), Some(n.clone()));
assert_eq!(
clean_inodes(n.clone(), &mut inodes, &walkdata),
Some(n.clone())
);
assert_eq!(
clean_inodes(n.clone(), &mut inodes, &walkdata),
Some(n.clone())
);
}
#[test]

View File

@@ -1,4 +1,5 @@
use crate::display_node::DisplayNode;
use crate::node::FileTime;
use ansi_term::Colour::Red;
use lscolors::{LsColors, Style};
@@ -7,6 +8,7 @@ use unicode_width::UnicodeWidthStr;
use stfu8::encode_u8;
use chrono::{DateTime, Local, TimeZone, Utc};
use std::cmp::max;
use std::cmp::min;
use std::fs;
@@ -14,14 +16,16 @@ use std::iter::repeat;
use std::path::Path;
use thousands::Separable;
pub static UNITS: [char; 4] = ['T', 'G', 'M', 'K'];
pub static UNITS: [char; 5] = ['P', 'T', 'G', 'M', 'K'];
static BLOCKS: [char; 5] = ['█', '▓', '▒', '░', ' '];
const FILETIME_SHOW_LENGTH: usize = 19;
pub struct InitialDisplayData {
pub short_paths: bool,
pub is_reversed: bool,
pub colors_on: bool,
pub by_filecount: bool,
pub by_filetime: Option<FileTime>,
pub is_screen_reader: bool,
pub output_format: String,
pub bars_on_right: bool,
@@ -130,17 +134,11 @@ pub fn draw_it(
root_node: &DisplayNode,
skip_total: bool,
) {
let biggest = match skip_total {
false => root_node,
true => root_node
.get_children_from_node(false)
.next()
.unwrap_or(root_node),
};
let num_chars_needed_on_left_most = if idd.by_filecount {
let max_size = biggest.size;
let max_size = root_node.size;
max_size.separate_with_commas().chars().count()
} else if idd.by_filetime.is_some() {
FILETIME_SHOW_LENGTH
} else {
find_biggest_size_str(root_node, &idd.output_format)
};
@@ -166,7 +164,7 @@ pub fn draw_it(
let display_data = DisplayData {
initial: idd,
num_chars_needed_on_left_most,
base_size: biggest.size,
base_size: root_node.size,
longest_string_length,
ls_colors: LsColors::from_env().unwrap_or_default(),
};
@@ -275,7 +273,7 @@ fn clean_indentation_string(s: &str) -> String {
is
}
fn get_printable_name<P: AsRef<Path>>(dir_name: &P, short_paths: bool) -> String {
pub fn get_printable_name<P: AsRef<Path>>(dir_name: &P, short_paths: bool) -> String {
let dir_name = dir_name.as_ref();
let printable_name = {
if short_paths {
@@ -342,6 +340,8 @@ pub fn format_string(
if display_data.initial.is_screen_reader {
// if screen_reader then bars is 'depth'
format!("{pretty_name} {bars} {pretty_size}{percent}")
} else if display_data.initial.by_filetime.is_some() {
format!("{pretty_size} {indent}{pretty_name}")
} else {
format!("{pretty_size} {indent} {pretty_name}{percent}")
}
@@ -376,6 +376,8 @@ fn get_name_percent(
fn get_pretty_size(node: &DisplayNode, is_biggest: bool, display_data: &DisplayData) -> String {
let output = if display_data.initial.by_filecount {
node.size.separate_with_commas()
} else if display_data.initial.by_filetime.is_some() {
get_pretty_file_modified_time(node.size as i64)
} else {
human_readable_number(node.size, &display_data.initial.output_format)
};
@@ -389,6 +391,14 @@ fn get_pretty_size(node: &DisplayNode, is_biggest: bool, display_data: &DisplayD
}
}
fn get_pretty_file_modified_time(timestamp: i64) -> String {
let datetime: DateTime<Utc> = Utc.timestamp_opt(timestamp, 0).unwrap();
let local_datetime = datetime.with_timezone(&Local);
local_datetime.format("%Y-%m-%dT%H:%M:%S").to_string()
}
fn get_pretty_name(
node: &DisplayNode,
name_and_padding: String,
@@ -469,6 +479,7 @@ mod tests {
is_reversed: false,
colors_on: false,
by_filecount: false,
by_filetime: None,
is_screen_reader: false,
output_format: "".into(),
bars_on_right: false,
@@ -547,6 +558,14 @@ mod tests {
assert_eq!(human_readable_number(1024 * 1024 * 1024 - 1, ""), "1023M");
assert_eq!(human_readable_number(1024 * 1024 * 1024 * 20, ""), "20G");
assert_eq!(human_readable_number(1024 * 1024 * 1024 * 1024, ""), "1.0T");
assert_eq!(
human_readable_number(1024 * 1024 * 1024 * 1024 * 234, ""),
"234T"
);
assert_eq!(
human_readable_number(1024 * 1024 * 1024 * 1024 * 1024, ""),
"1.0P"
);
}
#[test]
@@ -625,4 +644,37 @@ mod tests {
let bar = dd.generate_bar(&n, 5);
assert_eq!(bar, "████▓▓▓▓▓▓▓▓▓");
}
#[test]
fn test_get_pretty_file_modified_time() {
// Create a timestamp for 2023-07-12 00:00:00 in local time
let local_dt = Local.with_ymd_and_hms(2023, 7, 12, 0, 0, 0).unwrap();
let timestamp = local_dt.timestamp();
// Format expected output
let expected_output = local_dt.format("%Y-%m-%dT%H:%M:%S").to_string();
assert_eq!(get_pretty_file_modified_time(timestamp), expected_output);
// Test another timestamp
let local_dt = Local.with_ymd_and_hms(2020, 1, 1, 12, 0, 0).unwrap();
let timestamp = local_dt.timestamp();
let expected_output = local_dt.format("%Y-%m-%dT%H:%M:%S").to_string();
assert_eq!(get_pretty_file_modified_time(timestamp), expected_output);
// Test timestamp for epoch start (1970-01-01T00:00:00)
let local_dt = Local.with_ymd_and_hms(1970, 1, 1, 0, 0, 0).unwrap();
let timestamp = local_dt.timestamp();
let expected_output = local_dt.format("%Y-%m-%dT%H:%M:%S").to_string();
assert_eq!(get_pretty_file_modified_time(timestamp), expected_output);
// Test a future timestamp
let local_dt = Local.with_ymd_and_hms(2030, 12, 25, 6, 30, 0).unwrap();
let timestamp = local_dt.timestamp();
let expected_output = local_dt.format("%Y-%m-%dT%H:%M:%S").to_string();
assert_eq!(get_pretty_file_modified_time(timestamp), expected_output);
}
}

View File

@@ -1,8 +1,12 @@
use std::cell::RefCell;
use std::path::PathBuf;
use serde::Serialize;
use serde::ser::SerializeStruct;
use serde::{Serialize, Serializer};
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Serialize)]
use crate::display::human_readable_number;
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)]
pub struct DisplayNode {
// Note: the order of fields in important here, for PartialEq and PartialOrd
pub size: u64,
@@ -25,3 +29,30 @@ impl DisplayNode {
out
}
}
// Only used for -j 'json' flag combined with -o 'output_type' flag
// Used to pass the output_type into the custom Serde serializer
thread_local! {
pub static OUTPUT_TYPE: RefCell<String> = const { RefCell::new(String::new()) };
}
/*
We need the custom Serialize incase someone uses the -o flag to pass a custom output type in
(show size in Mb / Gb etc).
Sadly this also necessitates a global variable OUTPUT_TYPE as we can not pass the output_type flag
into the serialize method
*/
impl Serialize for DisplayNode {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let readable_size = OUTPUT_TYPE
.with(|output_type| human_readable_number(self.size, output_type.borrow().as_str()));
let mut state = serializer.serialize_struct("DisplayNode", 2)?;
state.serialize_field("size", &(readable_size))?;
state.serialize_field("name", &self.name)?;
state.serialize_field("children", &self.children)?;
state.end()
}
}

View File

@@ -1,7 +1,12 @@
use stfu8::encode_u8;
use crate::display::get_printable_name;
use crate::display_node::DisplayNode;
use crate::node::FileTime;
use crate::node::Node;
use std::collections::BinaryHeap;
use std::collections::HashMap;
use std::collections::HashSet;
use std::path::Path;
use std::path::PathBuf;
@@ -12,9 +17,15 @@ pub struct AggregateData {
pub number_of_lines: usize,
pub depth: usize,
pub using_a_filter: bool,
pub short_paths: bool,
}
pub fn get_biggest(top_level_nodes: Vec<Node>, display_data: AggregateData) -> Option<DisplayNode> {
pub fn get_biggest(
top_level_nodes: Vec<Node>,
display_data: AggregateData,
by_filetime: &Option<FileTime>,
keep_collapsed: HashSet<PathBuf>,
) -> Option<DisplayNode> {
if top_level_nodes.is_empty() {
// perhaps change this, bring back Error object?
return None;
@@ -24,14 +35,26 @@ pub fn get_biggest(top_level_nodes: Vec<Node>, display_data: AggregateData) -> O
let root;
if number_top_level_nodes > 1 {
let size = top_level_nodes.iter().map(|node| node.size).sum();
let size = if by_filetime.is_some() {
top_level_nodes
.iter()
.map(|node| node.size)
.max()
.unwrap_or(0)
} else {
top_level_nodes.iter().map(|node| node.size).sum()
};
let nodes = handle_duplicate_top_level_names(top_level_nodes, display_data.short_paths);
root = Node {
name: PathBuf::from("(total)"),
size,
children: top_level_nodes,
children: nodes,
inode_device: None,
depth: 0,
};
// Always include the base nodes if we add a 'parent' (total) node
heap = always_add_children(&display_data, &root, heap);
} else {
@@ -39,13 +62,19 @@ pub fn get_biggest(top_level_nodes: Vec<Node>, display_data: AggregateData) -> O
heap = add_children(&display_data, &root, heap);
}
Some(fill_remaining_lines(heap, &root, display_data))
Some(fill_remaining_lines(
heap,
&root,
display_data,
keep_collapsed,
))
}
pub fn fill_remaining_lines<'a>(
mut heap: BinaryHeap<&'a Node>,
root: &'a Node,
display_data: AggregateData,
keep_collapsed: HashSet<PathBuf>,
) -> DisplayNode {
let mut allowed_nodes = HashMap::new();
@@ -53,10 +82,14 @@ pub fn fill_remaining_lines<'a>(
let line = heap.pop();
match line {
Some(line) => {
// If we are not doing only_file OR if we are doing
// only_file and it has no children (ie is a file not a dir)
if !display_data.only_file || line.children.is_empty() {
allowed_nodes.insert(line.name.as_path(), line);
}
heap = add_children(&display_data, line, heap);
if !keep_collapsed.contains(&line.name) {
heap = add_children(&display_data, line, heap);
}
}
None => break,
}
@@ -114,7 +147,7 @@ fn recursive_rebuilder(allowed_nodes: &HashMap<&Path, &Node>, current: &Node) ->
.map(|c| recursive_rebuilder(allowed_nodes, c))
.collect();
build_node(new_children, current)
build_display_node(new_children, current)
}
// Applies all allowed nodes as children to current node
@@ -127,10 +160,10 @@ fn flat_rebuilder(allowed_nodes: HashMap<&Path, &Node>, current: &Node) -> Displ
children: vec![],
})
.collect::<Vec<DisplayNode>>();
build_node(new_children, current)
build_display_node(new_children, current)
}
fn build_node(mut new_children: Vec<DisplayNode>, current: &Node) -> DisplayNode {
fn build_display_node(mut new_children: Vec<DisplayNode>, current: &Node) -> DisplayNode {
new_children.sort_by(|lhs, rhs| lhs.cmp(rhs).reverse());
DisplayNode {
name: current.name.clone(),
@@ -138,3 +171,57 @@ fn build_node(mut new_children: Vec<DisplayNode>, current: &Node) -> DisplayNode
children: new_children,
}
}
fn names_have_dup(top_level_nodes: &Vec<Node>) -> bool {
let mut stored = HashSet::new();
for node in top_level_nodes {
let name = get_printable_name(&node.name, true);
if stored.contains(&name) {
return true;
}
stored.insert(name);
}
false
}
fn handle_duplicate_top_level_names(top_level_nodes: Vec<Node>, short_paths: bool) -> Vec<Node> {
// If we have top level names that are the same - we need to tweak them:
if short_paths && names_have_dup(&top_level_nodes) {
let mut new_top_nodes = top_level_nodes.clone();
let mut dir_walk_up_count = 0;
while names_have_dup(&new_top_nodes) && dir_walk_up_count < 10 {
dir_walk_up_count += 1;
let mut newer = vec![];
for node in new_top_nodes.iter() {
let mut folders = node.name.iter().rev();
// Get parent folder (if second time round get grandparent and so on)
for _ in 0..dir_walk_up_count {
folders.next();
}
match folders.next() {
// Add (parent_name) to path of Node
Some(data) => {
let parent = encode_u8(data.as_encoded_bytes());
let current_node = node.name.display();
let n = Node {
name: PathBuf::from(format!("{current_node}({parent})")),
size: node.size,
children: node.children.clone(),
inode_device: node.inode_device,
depth: node.depth,
};
newer.push(n)
}
// Node does not have a parent
None => newer.push(node.clone()),
}
}
new_top_nodes = newer;
}
new_top_nodes
} else {
top_level_nodes
}
}

View File

@@ -1,4 +1,5 @@
use crate::display_node::DisplayNode;
use crate::node::FileTime;
use crate::node::Node;
use std::collections::HashMap;
use std::ffi::OsStr;
@@ -10,7 +11,11 @@ struct ExtensionNode<'a> {
extension: Option<&'a OsStr>,
}
pub fn get_all_file_types(top_level_nodes: &[Node], n: usize) -> Option<DisplayNode> {
pub fn get_all_file_types(
top_level_nodes: &[Node],
n: usize,
by_filetime: &Option<FileTime>,
) -> Option<DisplayNode> {
let ext_nodes = {
let mut extension_cumulative_sizes = HashMap::new();
build_by_all_file_types(top_level_nodes, &mut extension_cumulative_sizes);
@@ -44,16 +49,27 @@ pub fn get_all_file_types(top_level_nodes: &[Node], n: usize) -> Option<DisplayN
// ...then, aggregate the remaining nodes (if any) into a single "(others)" node
if ext_nodes_iter.len() > 0 {
let actual_size = if by_filetime.is_some() {
ext_nodes_iter.map(|node| node.size).max().unwrap_or(0)
} else {
ext_nodes_iter.map(|node| node.size).sum()
};
displayed.push(DisplayNode {
name: PathBuf::from("(others)"),
size: ext_nodes_iter.map(|node| node.size).sum(),
size: actual_size,
children: vec![],
});
}
let actual_size: u64 = if by_filetime.is_some() {
displayed.iter().map(|node| node.size).max().unwrap_or(0)
} else {
displayed.iter().map(|node| node.size).sum()
};
let result = DisplayNode {
name: PathBuf::from("(total)"),
size: displayed.iter().map(|node| node.size).sum(),
size: actual_size,
children: displayed,
};

View File

@@ -21,8 +21,11 @@ use regex::Error;
use std::collections::HashSet;
use std::env;
use std::fs::read_to_string;
use std::io;
use std::panic;
use std::process;
use std::sync::atomic::AtomicBool;
use std::sync::atomic::Ordering;
use std::sync::Arc;
use std::sync::Mutex;
use sysinfo::{System, SystemExt};
@@ -30,6 +33,7 @@ use sysinfo::{System, SystemExt};
use self::display::draw_it;
use config::get_config;
use dir_walker::walk_it;
use display_node::OUTPUT_TYPE;
use filter::get_biggest;
use filter_type::get_all_file_types;
use regex::Regex;
@@ -78,23 +82,19 @@ fn should_init_color(no_color: bool, force_color: bool) -> bool {
}
fn get_height_of_terminal() -> usize {
// Simplify once https://github.com/eminence/terminal-size/pull/41 is
// merged
terminal_size()
// Windows CI runners detect a terminal height of 0
.map(|(_, Height(h))| max(h as usize, DEFAULT_NUMBER_OF_LINES))
.map(|(_, Height(h))| max(h.into(), DEFAULT_NUMBER_OF_LINES))
.unwrap_or(DEFAULT_NUMBER_OF_LINES)
- 10
}
fn get_width_of_terminal() -> usize {
// Simplify once https://github.com/eminence/terminal-size/pull/41 is
// merged
terminal_size()
.map(|(Width(w), _)| match cfg!(windows) {
// Windows CI runners detect a very low terminal width
true => max(w as usize, DEFAULT_TERMINAL_WIDTH),
false => w as usize,
true => max(w.into(), DEFAULT_TERMINAL_WIDTH),
false => w.into(),
})
.unwrap_or(DEFAULT_TERMINAL_WIDTH)
}
@@ -113,23 +113,55 @@ fn get_regex_value(maybe_value: Option<ValuesRef<String>>) -> Vec<Regex> {
fn main() {
let options = build_cli().get_matches();
let config = get_config();
let config = get_config(options.get_one::<String>("config").cloned());
let errors = RuntimeErrors::default();
let error_listen_for_ctrlc = Arc::new(Mutex::new(errors));
let errors_for_rayon = error_listen_for_ctrlc.clone();
let errors_final = error_listen_for_ctrlc.clone();
let is_in_listing = Arc::new(AtomicBool::new(false));
let cloned_is_in_listing = Arc::clone(&is_in_listing);
ctrlc::set_handler(move || {
error_listen_for_ctrlc.lock().unwrap().abort = true;
println!("\nAborting");
if cloned_is_in_listing.load(Ordering::Relaxed) {
process::exit(1);
}
})
.expect("Error setting Ctrl-C handler");
let target_dirs = match options.get_many::<String>("params") {
Some(values) => values.map(|v| v.as_str()).collect::<Vec<&str>>(),
None => vec!["."],
is_in_listing.store(true, Ordering::Relaxed);
let target_dirs = match config.get_files_from(&options) {
Some(path) => {
if path == "-" {
let mut targets_to_add = io::stdin()
.lines()
.map_while(Result::ok)
.collect::<Vec<String>>();
if targets_to_add.is_empty() {
eprintln!("No input provided, defaulting to current directory");
targets_to_add.push(".".to_owned());
}
targets_to_add
} else {
// read file
match read_to_string(path) {
Ok(file_content) => file_content.lines().map(|x| x.to_string()).collect(),
Err(e) => {
eprintln!("Error reading file: {e}");
vec![".".to_owned()]
}
}
}
}
None => match options.get_many::<String>("params") {
Some(values) => values.cloned().collect(),
None => vec![".".to_owned()],
},
};
is_in_listing.store(false, Ordering::Relaxed);
let summarize_file_types = options.get_flag("types");
@@ -189,11 +221,12 @@ fn main() {
.collect::<Vec<Regex>>();
let by_filecount = options.get_flag("by_filecount");
let by_filetime = config.get_filetime(&options);
let limit_filesystem = options.get_flag("limit_filesystem");
let follow_links = options.get_flag("dereference_links");
let allowed_filesystems = limit_filesystem
.then(|| get_filesystem_devices(&target_dirs))
.then(|| get_filesystem_devices(&target_dirs, follow_links))
.unwrap_or_default();
let simplified_dirs = simplify_dir_names(&target_dirs);
@@ -211,9 +244,22 @@ fn main() {
indicator.spawn(output_format.clone())
}
let keep_collapsed: HashSet<PathBuf> = match options.get_many::<String>("collapse") {
Some(collapse) => {
let mut combined_dirs = HashSet::new();
for collapse_dir in collapse {
for target_dir in target_dirs.iter() {
combined_dirs.insert(PathBuf::from(target_dir).join(collapse_dir));
}
}
combined_dirs
}
None => HashSet::new(),
};
let filter_modified_time = config.get_modified_time_operator(&options);
let filter_accessed_time = config.get_accessed_time_operator(&options);
let filter_changed_time = config.get_created_time_operator(&options);
let filter_changed_time = config.get_changed_time_operator(&options);
let walk_data = WalkData {
ignore_directories: ignored_full_path,
@@ -225,6 +271,7 @@ fn main() {
filter_changed_time,
use_apparent_size: config.get_apparent_size(&options),
by_filecount,
by_filetime: &by_filetime,
ignore_hidden,
follow_links,
progress_data: indicator.data.clone(),
@@ -237,7 +284,7 @@ fn main() {
let top_level_nodes = walk_it(simplified_dirs, &walk_data);
let tree = match summarize_file_types {
true => get_all_file_types(&top_level_nodes, number_of_lines),
true => get_all_file_types(&top_level_nodes, number_of_lines, &by_filetime),
false => {
let agg_data = AggregateData {
min_size: config.get_min_size(&options),
@@ -246,8 +293,9 @@ fn main() {
number_of_lines,
depth,
using_a_filter: !filter_regexs.is_empty() || !invert_filter_regexs.is_empty(),
short_paths: !config.get_full_paths(&options),
};
get_biggest(top_level_nodes, agg_data)
get_biggest(top_level_nodes, agg_data, &by_filetime, keep_collapsed)
}
};
@@ -294,19 +342,23 @@ fn main() {
}
if let Some(root_node) = tree {
let idd = InitialDisplayData {
short_paths: !config.get_full_paths(&options),
is_reversed: !config.get_reverse(&options),
colors_on: is_colors,
by_filecount,
is_screen_reader: config.get_screen_reader(&options),
output_format,
bars_on_right: config.get_bars_on_right(&options),
};
if config.get_output_json(&options) {
OUTPUT_TYPE.with(|wrapped| {
wrapped.replace(output_format);
});
println!("{}", serde_json::to_string(&root_node).unwrap());
} else {
let idd = InitialDisplayData {
short_paths: !config.get_full_paths(&options),
is_reversed: !config.get_reverse(&options),
colors_on: is_colors,
by_filecount,
by_filetime,
is_screen_reader: config.get_screen_reader(&options),
output_format,
bars_on_right: config.get_bars_on_right(&options),
};
draw_it(
idd,
config.get_no_bars(&options),

View File

@@ -16,6 +16,13 @@ pub struct Node {
pub depth: usize,
}
#[derive(Debug, PartialEq)]
pub enum FileTime {
Modified,
Accessed,
Changed,
}
#[allow(clippy::too_many_arguments)]
pub fn build_node(
dir: PathBuf,
@@ -27,17 +34,18 @@ pub fn build_node(
) -> Option<Node> {
let use_apparent_size = walk_data.use_apparent_size;
let by_filecount = walk_data.by_filecount;
let by_filetime = &walk_data.by_filetime;
get_metadata(&dir, use_apparent_size).map(|data| {
let inode_device = if is_symlink && !use_apparent_size {
None
} else {
data.1
};
get_metadata(
&dir,
use_apparent_size,
walk_data.follow_links && is_symlink,
)
.map(|data| {
let inode_device = data.1;
let size = if is_filtered_out_due_to_regex(walk_data.filter_regex, &dir)
|| is_filtered_out_due_to_invert_regex(walk_data.invert_filter_regex, &dir)
|| (is_symlink && !use_apparent_size)
|| by_filecount && !is_file
|| [
(&walk_data.filter_modified_time, data.2 .0),
@@ -51,6 +59,13 @@ pub fn build_node(
0
} else if by_filecount {
1
} else if by_filetime.is_some() {
match by_filetime {
Some(FileTime::Modified) => data.2 .0.unsigned_abs(),
Some(FileTime::Accessed) => data.2 .1.unsigned_abs(),
Some(FileTime::Changed) => data.2 .2.unsigned_abs(),
None => unreachable!(),
}
} else {
data.0
};

View File

@@ -17,9 +17,15 @@ type FileTime = (i64, i64, i64);
pub fn get_metadata<P: AsRef<Path>>(
path: P,
use_apparent_size: bool,
follow_links: bool,
) -> Option<(u64, Option<InodeAndDevice>, FileTime)> {
use std::os::unix::fs::MetadataExt;
match path.as_ref().metadata() {
let metadata = if follow_links {
path.as_ref().metadata()
} else {
path.as_ref().symlink_metadata()
};
match metadata {
Ok(md) => {
if use_apparent_size {
Some((
@@ -43,6 +49,7 @@ pub fn get_metadata<P: AsRef<Path>>(
pub fn get_metadata<P: AsRef<Path>>(
path: P,
use_apparent_size: bool,
follow_links: bool,
) -> Option<(u64, Option<InodeAndDevice>, FileTime)> {
// On windows opening the file to get size, file ID and volume can be very
// expensive because 1) it causes a few system calls, and more importantly 2) it can cause
@@ -142,7 +149,12 @@ pub fn get_metadata<P: AsRef<Path>>(
use std::os::windows::fs::MetadataExt;
let path = path.as_ref();
match path.metadata() {
let metadata = if follow_links {
path.metadata()
} else {
path.symlink_metadata()
};
match metadata {
Ok(ref md) => {
const FILE_ATTRIBUTE_ARCHIVE: u32 = 0x20;
const FILE_ATTRIBUTE_READONLY: u32 = 0x01;

View File

@@ -3,7 +3,7 @@ use std::{
io::Write,
path::Path,
sync::{
atomic::{AtomicU64, AtomicU8, AtomicUsize, Ordering},
atomic::{AtomicU8, AtomicUsize, Ordering},
mpsc::{self, RecvTimeoutError, Sender},
Arc, RwLock,
},
@@ -11,6 +11,11 @@ use std::{
time::Duration,
};
#[cfg(not(target_has_atomic = "64"))]
use portable_atomic::AtomicU64;
#[cfg(target_has_atomic = "64")]
use std::sync::atomic::AtomicU64;
use crate::display::human_readable_number;
/* -------------------------------------------------------------------------- */
@@ -73,6 +78,7 @@ pub struct RuntimeErrors {
pub no_permissions: HashSet<String>,
pub file_not_found: HashSet<String>,
pub unknown_error: HashSet<String>,
pub interrupted_error: i32,
pub abort: bool,
}

View File

@@ -4,7 +4,7 @@ use std::path::{Path, PathBuf};
use crate::config::DAY_SECONDS;
use crate::dir_walker::Operater;
use crate::dir_walker::Operator;
use crate::platform;
use regex::Regex;
@@ -34,13 +34,25 @@ pub fn simplify_dir_names<P: AsRef<Path>>(dirs: &[P]) -> HashSet<PathBuf> {
top_level_names
}
pub fn get_filesystem_devices<P: AsRef<Path>>(paths: &[P]) -> HashSet<u64> {
pub fn get_filesystem_devices<P: AsRef<Path>>(paths: &[P], follow_links: bool) -> HashSet<u64> {
use std::fs;
// Gets the device ids for the filesystems which are used by the argument paths
paths
.iter()
.filter_map(|p| match get_metadata(p, false) {
Some((_size, Some((_id, dev)), _time)) => Some(dev),
_ => None,
.filter_map(|p| {
let follow_links = if follow_links {
// slow path: If dereference-links is set, then we check if the file is a symbolic link
match fs::symlink_metadata(p) {
Ok(metadata) => metadata.file_type().is_symlink(),
Err(_) => false,
}
} else {
false
};
match get_metadata(p, false, follow_links) {
Some((_size, Some((_id, dev)), _time)) => Some(dev),
_ => None,
}
})
.collect()
}
@@ -65,13 +77,17 @@ pub fn is_filtered_out_due_to_regex(filter_regex: &[Regex], dir: &Path) -> bool
}
}
pub fn is_filtered_out_due_to_file_time(filter_time: &(Operater, i64), actual_time: i64) -> bool {
pub fn is_filtered_out_due_to_file_time(
filter_time: &Option<(Operator, i64)>,
actual_time: i64,
) -> bool {
match filter_time {
(Operater::Equal, bound_time) => {
None => false,
Some((Operator::Equal, bound_time)) => {
!(actual_time >= *bound_time && actual_time < *bound_time + DAY_SECONDS)
}
(Operater::GreaterThan, bound_time) => actual_time < *bound_time,
(Operater::LessThan, bound_time) => actual_time > *bound_time,
Some((Operator::GreaterThan, bound_time)) => actual_time < *bound_time,
Some((Operator::LessThan, bound_time)) => actual_time > *bound_time,
}
}

View File

@@ -2,7 +2,7 @@ use assert_cmd::Command;
use std::ffi::OsStr;
use std::process::Output;
use std::sync::Once;
use std::{fs, io, str};
use std::{io, str};
static INIT: Once = Once::new();
static UNREADABLE_DIR_PATH: &str = "/tmp/unreadable_dir";
@@ -38,6 +38,7 @@ fn copy_test_data(dir: &str) {
fn create_unreadable_directory() -> io::Result<()> {
#[cfg(unix)]
{
use std::fs;
use std::fs::Permissions;
use std::os::unix::fs::PermissionsExt;
fs::create_dir_all(UNREADABLE_DIR_PATH)?;

View File

@@ -254,3 +254,26 @@ pub fn test_force_color() {
assert!(output.contains("\x1B[31m"));
assert!(output.contains("\x1B[0m"));
}
#[test]
pub fn test_collapse() {
let output = build_command(vec!["--collapse", "many", "tests/test_dir/"]);
assert!(output.contains("many"));
assert!(!output.contains("hello_file"));
}
#[test]
pub fn test_handle_duplicate_names() {
// Check that even if we run on a multiple directories with the same name
// we still show the distinct parent dir in the output
let output = build_command(vec![
"tests/test_dir_matching/dave/dup_name",
"tests/test_dir_matching/andy/dup_name",
"ci",
]);
assert!(output.contains("andy"));
assert!(output.contains("dave"));
assert!(output.contains("ci"));
assert!(output.contains("dup_name"));
assert!(!output.contains("test_dir_matching"));
}