Compare commits

..

42 Commits

Author SHA1 Message Date
andy.boot
759658ee96 feat: Handle duplicate dir names better
If we run `dust /usr/*/Trash`
We see several 'Trash' directories in the output but do not know which
user they belong to.

This fix means if we see duplicate names in a directory we will display
the parent directory name as well
2025-02-07 20:15:11 +00:00
andy.boot
a962b80eec deps: cargo update 2025-02-06 00:30:39 +00:00
andy.boot
01c0aaeade feat: New --collapse flag
--collapse will keep that directory collapsed and will not expand it.
2025-01-27 22:00:08 +00:00
andy.boot
6cbd736e11 fix: Bars in --skip-total flag
Before we calculated the % by taking the longest bar. If you use
--skip-total the longest bar is not the total. We need to sum up all the
children of root to work out what the largest size is.
2025-01-26 11:22:37 +00:00
andy.boot
8e087e09da fix: Handle Interrupted Error
Rust may throw Interrupted errors while scanning filesystem. These may
be retried:
https://doc.rust-lang.org/std/io/enum.ErrorKind.html#variant.Interrupted
2025-01-26 11:22:04 +00:00
andy.boot
9ba0b6d1d0 feat: Support -o flag for JSON output
requested in: https://github.com/bootandy/dust/issues/450
2025-01-17 20:48:56 +00:00
andy.boot
775d841840 style: clippy 2025-01-15 21:37:29 +00:00
n4n5
609fc1e760 clippy 2025-01-15 19:14:00 +00:00
n4n5
eeb686562d push config option 2025-01-15 19:14:00 +00:00
n4n5
e0eaeccc0b add wget install 2025-01-15 19:08:18 +00:00
n4n5
2e56a261e0 clippy 2025-01-15 19:08:18 +00:00
n4n5
bfe7323b20 fix typo Operator 2025-01-15 19:08:18 +00:00
janbridley
1372815007 Format src/display.rs 2024-11-08 22:50:03 +00:00
janbridley
7c9e2f1833 Enable pretty format for petabyte data 2024-11-08 22:50:03 +00:00
Camille Louédoc-Eyriès
1d40ca0870 docs(readme): warn about snap-dust limitations 2024-10-17 23:03:35 +01:00
Yukai Chou
86b2bd944c refactor: simplify get_height_of_terminal() and get_width... 2024-09-16 22:01:10 +01:00
andy.boot
b63608604a docs: Update README.md 2024-09-03 21:57:36 +01:00
andy.boot
b24fab720d deps: cargo update 2024-09-03 21:54:20 +01:00
wugeer
d81b9065a1 fix: man-page and completions missing in debian package 2024-09-03 21:49:23 +01:00
andy.boot
38c4d23732 docs: Update README.md
include snap
2024-08-21 18:53:55 +01:00
wugeer
99bf0fc041 docs: update sample config.toml 2024-08-21 18:47:15 +01:00
wugeer
75d0566949 feat: use pre-commit hooks to standardize commit messages 2024-08-21 18:46:20 +01:00
NoisyCoil
489d9ada44 fix: 64-bit atomics for platforms with no 64-bit atomics
Closes: #423
2024-08-09 19:28:20 +01:00
wugeer
f48fcc790a feat: support Dust tree by age 2024-08-07 19:31:22 +01:00
wugeer
733117d0f6 fix: retrieve metadata for symbolic links without following them
Previously, the function get_metadata in platform.rs used `fs::metadata` which follows symbolic links
and returns metadata for the target file. This caused issues #421: du / dust disagreement
when trying to determine properties of the symbolic link itself
2024-07-31 00:16:59 +01:00
andy.boot
dbd18f90e7 docs: update release procedure
Previously I accidentally tagged a release before building it leading
to an out of date Cargo.lock file.
2024-07-17 19:35:44 +01:00
andy.boot
dad88ad660 version: increment version 2024-07-17 19:35:44 +01:00
andy.boot
00a7c410a0 ci: fix warning in windows build 2024-07-17 19:35:44 +01:00
andy.boot
1ab0b2f531 refactor: rename variable 2024-07-17 19:35:44 +01:00
andy.boot
c09073151d fix: perf issues with v1.1.0
Bring performance back
2024-07-17 19:16:46 +01:00
andy.boot
b4a517a096 version: update version
Also update docs, so I don't partially update a version number again
2024-07-16 22:48:27 +01:00
andy.boot
e654d30f9d version: increment version 2024-07-16 22:05:17 +01:00
Maksim Bondarenkov
4fc1897678 deps: update winapi-util to 0.1.8 (#410) 2024-07-03 21:41:52 +01:00
andy.boot
08b9c756ee fix: total_ordering of sort_by_inode
Before sort_by_inode could result in unstable ordering. This change
ensures we will always have a reliable total ordering
2024-06-30 21:57:58 +01:00
n4n5
394231683d feat: Add flag to read from file/stdin (#405)
* from standard

* improve

* happy clippy

* explicit arg option

* fix problem
2024-06-27 23:51:47 +01:00
Jan Chren ~rindeal
a06a001886 test_exact_output: simplify array handling 2024-06-27 21:47:18 +01:00
Jan Chren ~rindeal
fd9e97bcfa test_exact_output: refactor unreadable directory handling 2024-06-27 21:47:18 +01:00
Jan Chren ~rindeal
3ed95ee399 streamline func APIs processing target_dirs 2024-06-27 21:47:18 +01:00
Jan Chren ~rindeal
58c9f6d509 Fix -x option behavior
This PR addresses an issue where `target_dirs` were being optimized out\
before being passed to the `get_filesystem_devices()` function.
The changes in this PR ensure that `target_dirs` are passed directly
to `get_filesystem_devices()`, and only then are they simplified.

Example of a command that currently doesn't work correctly:
```sh
dust -x $(findmnt -S tmpfs -o target -n)
```

It should show the usage of all tmpfs mounts, but it currently shows
just the the root mountpoints like `/run`, since all the mountpoints
nested under it are optimized out.
2024-06-27 21:47:18 +01:00
wugeer
3f2f7a8bb2 Formatting 2024-06-27 21:11:11 +01:00
andy.boot
b7176cf887 tests: Cleanup test_exact_ouput 2024-06-24 23:36:09 +01:00
wugeer
d65f41097e feat: Added the ability to filter the corresponding files based on the access time, modify time, and change time of the file for statistics 2024-06-24 23:35:25 +01:00
29 changed files with 1487 additions and 485 deletions

View File

@@ -45,6 +45,11 @@ jobs:
override: true override: true
profile: minimal # minimal component installation (ie, no documentation) profile: minimal # minimal component installation (ie, no documentation)
components: rustfmt, clippy components: rustfmt, clippy
- name: Install wget for Windows
if: matrix.job.os == 'windows-latest'
run: choco install wget --no-progress
- name: typos-action
uses: crate-ci/typos@v1.28.4
- name: "`fmt` testing" - name: "`fmt` testing"
if: steps.vars.outputs.JOB_DO_FORMAT_TESTING if: steps.vars.outputs.JOB_DO_FORMAT_TESTING
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1

11
.pre-commit-config.yaml Normal file
View File

@@ -0,0 +1,11 @@
repos:
- repo: https://github.com/doublify/pre-commit-rust
rev: v1.0
hooks:
- id: cargo-check
stages: [commit]
- id: fmt
stages: [commit]
- id: clippy
args: [--all-targets, --all-features]
stages: [commit]

466
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,7 @@
[package] [package]
name = "du-dust" name = "du-dust"
description = "A more intuitive version of du" description = "A more intuitive version of du"
version = "1.0.0" version = "1.1.1"
authors = ["bootandy <bootandy@gmail.com>", "nebkor <code@ardent.nebcorp.com>"] authors = ["bootandy <bootandy@gmail.com>", "nebkor <code@ardent.nebcorp.com>"]
edition = "2021" edition = "2021"
readme = "README.md" readme = "README.md"
@@ -44,6 +44,9 @@ sysinfo = "0.27"
ctrlc = "3.4" ctrlc = "3.4"
chrono = "0.4" chrono = "0.4"
[target.'cfg(not(target_has_atomic = "64"))'.dependencies]
portable-atomic = "1.4"
[target.'cfg(windows)'.dependencies] [target.'cfg(windows)'.dependencies]
winapi-util = "0.1" winapi-util = "0.1"
filesize = "0.2.0" filesize = "0.2.0"
@@ -83,6 +86,16 @@ assets = [
"usr/share/doc/du-dust/README", "usr/share/doc/du-dust/README",
"644", "644",
], ],
[
"man-page/dust.1",
"usr/share/man/man1/dust.1",
"644",
],
[
"completions/dust.bash",
"usr/share/bash-completion/completions/dust",
"644",
],
] ]
extended-description = """\ extended-description = """\
Dust is meant to give you an instant overview of which directories are using Dust is meant to give you an instant overview of which directories are using

View File

@@ -27,11 +27,17 @@ Because I want an easy way to see where my disk is being used.
- `brew install dust` - `brew install dust`
#### [Snap](https://ubuntu.com/core/services/guide/snaps-intro) Ubuntu and [supported systems](https://snapcraft.io/docs/installing-snapd)
- `snap install dust`
Note: `dust` installed through `snap` can only access files stored in the `/home` directory. See daniejstriata/dust-snap#2 for more information.
#### [Pacstall](https://github.com/pacstall/pacstall) (Debian/Ubuntu) #### [Pacstall](https://github.com/pacstall/pacstall) (Debian/Ubuntu)
- `pacstall -I dust-bin` - `pacstall -I dust-bin`
### Anaconda (conda-forge) #### Anaconda (conda-forge)
- `conda install -c conda-forge dust` - `conda install -c conda-forge dust`
@@ -39,6 +45,10 @@ Because I want an easy way to see where my disk is being used.
- `deb-get install du-dust` - `deb-get install du-dust`
#### [x-cmd](https://www.x-cmd.com/pkg/#VPContent)
- `x env use dust`
#### Windows: #### Windows:
- `scoop install dust` - `scoop install dust`
@@ -92,6 +102,8 @@ Usage: dust -S (Custom Stack size - Use if you see: 'fatal runtime error: stack
Usage: dust --skip-total (No total row will be displayed) Usage: dust --skip-total (No total row will be displayed)
Usage: dust -z 40000/30MB/20kib (Exclude output files/directories below size 40000 bytes / 30MB / 20KiB) Usage: dust -z 40000/30MB/20kib (Exclude output files/directories below size 40000 bytes / 30MB / 20KiB)
Usage: dust -j (Prints JSON representation of directories, try: dust -j | jq) Usage: dust -j (Prints JSON representation of directories, try: dust -j | jq)
Usage: dust --files0-from=FILE (Reads null-terminated file paths from FILE); If FILE is - then read from stdin
Usage: dust --collapse=node-modules will keep the node-modules folder collapsed in display instead of recursively opening it
``` ```
## Config file ## Config file

View File

@@ -1,14 +1,21 @@
# ----------- To do a release --------- # ----------- To do a release ---------
# Compare times of runs to check no drastic slow down:
# time target/release/dust ~/dev
# time dust ~dev
# edit version in cargo.toml # ----------- Pre release ---------
# Compare times of runs to check no drastic slow down:
# hyperfine 'target/release/dust /home/andy'
# hyperfine 'dust /home/andy'
# ----------- Release ---------
# inc version in cargo.toml
# cargo build --release
# commit changed files
# merge to master in github
# tag a commit and push (increment version in Cargo.toml first): # tag a commit and push (increment version in Cargo.toml first):
# git tag v0.4.5 # git tag v0.4.5
# git push origin v0.4.5 # git push origin v0.4.5
# cargo publish to put it in crates.io # cargo publish to put it in crates.io
# To install locally [Do before pushing it] # Optional: To install locally
#cargo install --path . #cargo install --path .

View File

@@ -14,29 +14,40 @@ _dust() {
fi fi
local context curcontext="$curcontext" state line local context curcontext="$curcontext" state line
_arguments "${_arguments_options[@]}" \ _arguments "${_arguments_options[@]}" : \
'-d+[Depth to show]:DEPTH: ' \ '-d+[Depth to show]:DEPTH:_default' \
'--depth=[Depth to show]:DEPTH: ' \ '--depth=[Depth to show]:DEPTH:_default' \
'-T+[Number of threads to use]: : ' \ '-T+[Number of threads to use]: :_default' \
'--threads=[Number of threads to use]: : ' \ '--threads=[Number of threads to use]: :_default' \
'-n+[Number of lines of output to show. (Default is terminal_height - 10)]:NUMBER: ' \ '--config=[Specify a config file to use]:FILE:_files' \
'--number-of-lines=[Number of lines of output to show. (Default is terminal_height - 10)]:NUMBER: ' \ '-n+[Number of lines of output to show. (Default is terminal_height - 10)]:NUMBER:_default' \
'*-X+[Exclude any file or directory with this name]:PATH:_files' \ '--number-of-lines=[Number of lines of output to show. (Default is terminal_height - 10)]:NUMBER:_default' \
'*--ignore-directory=[Exclude any file or directory with this name]:PATH:_files' \ '*-X+[Exclude any file or directory with this path]:PATH:_files' \
'*--ignore-directory=[Exclude any file or directory with this path]:PATH:_files' \
'-I+[Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter]:FILE:_files' \ '-I+[Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter]:FILE:_files' \
'--ignore-all-in-file=[Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter]:FILE:_files' \ '--ignore-all-in-file=[Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter]:FILE:_files' \
'-z+[Minimum size file to include in output]:MIN_SIZE: ' \ '-z+[Minimum size file to include in output]:MIN_SIZE:_default' \
'--min-size=[Minimum size file to include in output]:MIN_SIZE: ' \ '--min-size=[Minimum size file to include in output]:MIN_SIZE:_default' \
'(-e --filter -t --file_types)*-v+[Exclude filepaths matching this regex. To ignore png files type\: -v "\\.png\$" ]:REGEX: ' \ '(-e --filter -t --file_types)*-v+[Exclude filepaths matching this regex. To ignore png files type\: -v "\\.png\$" ]:REGEX:_default' \
'(-e --filter -t --file_types)*--invert-filter=[Exclude filepaths matching this regex. To ignore png files type\: -v "\\.png\$" ]:REGEX: ' \ '(-e --filter -t --file_types)*--invert-filter=[Exclude filepaths matching this regex. To ignore png files type\: -v "\\.png\$" ]:REGEX:_default' \
'(-t --file_types)*-e+[Only include filepaths matching this regex. For png files type\: -e "\\.png\$" ]:REGEX: ' \ '(-t --file_types)*-e+[Only include filepaths matching this regex. For png files type\: -e "\\.png\$" ]:REGEX:_default' \
'(-t --file_types)*--filter=[Only include filepaths matching this regex. For png files type\: -e "\\.png\$" ]:REGEX: ' \ '(-t --file_types)*--filter=[Only include filepaths matching this regex. For png files type\: -e "\\.png\$" ]:REGEX:_default' \
'-w+[Specify width of output overriding the auto detection of terminal width]:WIDTH: ' \ '-w+[Specify width of output overriding the auto detection of terminal width]:WIDTH:_default' \
'--terminal_width=[Specify width of output overriding the auto detection of terminal width]:WIDTH: ' \ '--terminal_width=[Specify width of output overriding the auto detection of terminal width]:WIDTH:_default' \
'-o+[Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.]:FORMAT:(si b k m g t kb mb gb tb)' \ '-o+[Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.]:FORMAT:(si b k m g t kb mb gb tb)' \
'--output-format=[Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.]:FORMAT:(si b k m g t kb mb gb tb)' \ '--output-format=[Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.]:FORMAT:(si b k m g t kb mb gb tb)' \
'-S+[Specify memory to use as stack size - use if you see\: '\''fatal runtime error\: stack overflow'\'' (default low memory=1048576, high memory=1073741824)]:STACK_SIZE: ' \ '-S+[Specify memory to use as stack size - use if you see\: '\''fatal runtime error\: stack overflow'\'' (default low memory=1048576, high memory=1073741824)]:STACK_SIZE:_default' \
'--stack-size=[Specify memory to use as stack size - use if you see\: '\''fatal runtime error\: stack overflow'\'' (default low memory=1048576, high memory=1073741824)]:STACK_SIZE: ' \ '--stack-size=[Specify memory to use as stack size - use if you see\: '\''fatal runtime error\: stack overflow'\'' (default low memory=1048576, high memory=1073741824)]:STACK_SIZE:_default' \
'-M+[+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => \[curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)]: :_default' \
'--mtime=[+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => \[curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)]: :_default' \
'-A+[just like -mtime, but based on file access time]: :_default' \
'--atime=[just like -mtime, but based on file access time]: :_default' \
'-y+[just like -mtime, but based on file change time]: :_default' \
'--ctime=[just like -mtime, but based on file change time]: :_default' \
'--files0-from=[run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input]: :_files' \
'*--collapse=[Keep these directories collapsed]: :_files' \
'-m+[Directory '\''size'\'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time]: :(a c m)' \
'--filetime=[Directory '\''size'\'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time]: :(a c m)' \
'-p[Subdirectories will not have their path shortened]' \ '-p[Subdirectories will not have their path shortened]' \
'--full-paths[Subdirectories will not have their path shortened]' \ '--full-paths[Subdirectories will not have their path shortened]' \
'-L[dereference sym links - Treat sym links as directories and go into them]' \ '-L[dereference sym links - Treat sym links as directories and go into them]' \

View File

@@ -21,68 +21,79 @@ Register-ArgumentCompleter -Native -CommandName 'dust' -ScriptBlock {
$completions = @(switch ($command) { $completions = @(switch ($command) {
'dust' { 'dust' {
[CompletionResult]::new('-d', 'd', [CompletionResultType]::ParameterName, 'Depth to show') [CompletionResult]::new('-d', '-d', [CompletionResultType]::ParameterName, 'Depth to show')
[CompletionResult]::new('--depth', 'depth', [CompletionResultType]::ParameterName, 'Depth to show') [CompletionResult]::new('--depth', '--depth', [CompletionResultType]::ParameterName, 'Depth to show')
[CompletionResult]::new('-T', 'T ', [CompletionResultType]::ParameterName, 'Number of threads to use') [CompletionResult]::new('-T', '-T ', [CompletionResultType]::ParameterName, 'Number of threads to use')
[CompletionResult]::new('--threads', 'threads', [CompletionResultType]::ParameterName, 'Number of threads to use') [CompletionResult]::new('--threads', '--threads', [CompletionResultType]::ParameterName, 'Number of threads to use')
[CompletionResult]::new('-n', 'n', [CompletionResultType]::ParameterName, 'Number of lines of output to show. (Default is terminal_height - 10)') [CompletionResult]::new('--config', '--config', [CompletionResultType]::ParameterName, 'Specify a config file to use')
[CompletionResult]::new('--number-of-lines', 'number-of-lines', [CompletionResultType]::ParameterName, 'Number of lines of output to show. (Default is terminal_height - 10)') [CompletionResult]::new('-n', '-n', [CompletionResultType]::ParameterName, 'Number of lines of output to show. (Default is terminal_height - 10)')
[CompletionResult]::new('-X', 'X ', [CompletionResultType]::ParameterName, 'Exclude any file or directory with this name') [CompletionResult]::new('--number-of-lines', '--number-of-lines', [CompletionResultType]::ParameterName, 'Number of lines of output to show. (Default is terminal_height - 10)')
[CompletionResult]::new('--ignore-directory', 'ignore-directory', [CompletionResultType]::ParameterName, 'Exclude any file or directory with this name') [CompletionResult]::new('-X', '-X ', [CompletionResultType]::ParameterName, 'Exclude any file or directory with this path')
[CompletionResult]::new('-I', 'I ', [CompletionResultType]::ParameterName, 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter') [CompletionResult]::new('--ignore-directory', '--ignore-directory', [CompletionResultType]::ParameterName, 'Exclude any file or directory with this path')
[CompletionResult]::new('--ignore-all-in-file', 'ignore-all-in-file', [CompletionResultType]::ParameterName, 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter') [CompletionResult]::new('-I', '-I ', [CompletionResultType]::ParameterName, 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter')
[CompletionResult]::new('-z', 'z', [CompletionResultType]::ParameterName, 'Minimum size file to include in output') [CompletionResult]::new('--ignore-all-in-file', '--ignore-all-in-file', [CompletionResultType]::ParameterName, 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter')
[CompletionResult]::new('--min-size', 'min-size', [CompletionResultType]::ParameterName, 'Minimum size file to include in output') [CompletionResult]::new('-z', '-z', [CompletionResultType]::ParameterName, 'Minimum size file to include in output')
[CompletionResult]::new('-v', 'v', [CompletionResultType]::ParameterName, 'Exclude filepaths matching this regex. To ignore png files type: -v "\.png$" ') [CompletionResult]::new('--min-size', '--min-size', [CompletionResultType]::ParameterName, 'Minimum size file to include in output')
[CompletionResult]::new('--invert-filter', 'invert-filter', [CompletionResultType]::ParameterName, 'Exclude filepaths matching this regex. To ignore png files type: -v "\.png$" ') [CompletionResult]::new('-v', '-v', [CompletionResultType]::ParameterName, 'Exclude filepaths matching this regex. To ignore png files type: -v "\.png$" ')
[CompletionResult]::new('-e', 'e', [CompletionResultType]::ParameterName, 'Only include filepaths matching this regex. For png files type: -e "\.png$" ') [CompletionResult]::new('--invert-filter', '--invert-filter', [CompletionResultType]::ParameterName, 'Exclude filepaths matching this regex. To ignore png files type: -v "\.png$" ')
[CompletionResult]::new('--filter', 'filter', [CompletionResultType]::ParameterName, 'Only include filepaths matching this regex. For png files type: -e "\.png$" ') [CompletionResult]::new('-e', '-e', [CompletionResultType]::ParameterName, 'Only include filepaths matching this regex. For png files type: -e "\.png$" ')
[CompletionResult]::new('-w', 'w', [CompletionResultType]::ParameterName, 'Specify width of output overriding the auto detection of terminal width') [CompletionResult]::new('--filter', '--filter', [CompletionResultType]::ParameterName, 'Only include filepaths matching this regex. For png files type: -e "\.png$" ')
[CompletionResult]::new('--terminal_width', 'terminal_width', [CompletionResultType]::ParameterName, 'Specify width of output overriding the auto detection of terminal width') [CompletionResult]::new('-w', '-w', [CompletionResultType]::ParameterName, 'Specify width of output overriding the auto detection of terminal width')
[CompletionResult]::new('-o', 'o', [CompletionResultType]::ParameterName, 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.') [CompletionResult]::new('--terminal_width', '--terminal_width', [CompletionResultType]::ParameterName, 'Specify width of output overriding the auto detection of terminal width')
[CompletionResult]::new('--output-format', 'output-format', [CompletionResultType]::ParameterName, 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.') [CompletionResult]::new('-o', '-o', [CompletionResultType]::ParameterName, 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.')
[CompletionResult]::new('-S', 'S ', [CompletionResultType]::ParameterName, 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)') [CompletionResult]::new('--output-format', '--output-format', [CompletionResultType]::ParameterName, 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.')
[CompletionResult]::new('--stack-size', 'stack-size', [CompletionResultType]::ParameterName, 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)') [CompletionResult]::new('-S', '-S ', [CompletionResultType]::ParameterName, 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)')
[CompletionResult]::new('-p', 'p', [CompletionResultType]::ParameterName, 'Subdirectories will not have their path shortened') [CompletionResult]::new('--stack-size', '--stack-size', [CompletionResultType]::ParameterName, 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)')
[CompletionResult]::new('--full-paths', 'full-paths', [CompletionResultType]::ParameterName, 'Subdirectories will not have their path shortened') [CompletionResult]::new('-M', '-M ', [CompletionResultType]::ParameterName, '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)')
[CompletionResult]::new('-L', 'L ', [CompletionResultType]::ParameterName, 'dereference sym links - Treat sym links as directories and go into them') [CompletionResult]::new('--mtime', '--mtime', [CompletionResultType]::ParameterName, '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)')
[CompletionResult]::new('--dereference-links', 'dereference-links', [CompletionResultType]::ParameterName, 'dereference sym links - Treat sym links as directories and go into them') [CompletionResult]::new('-A', '-A ', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file access time')
[CompletionResult]::new('-x', 'x', [CompletionResultType]::ParameterName, 'Only count the files and directories on the same filesystem as the supplied directory') [CompletionResult]::new('--atime', '--atime', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file access time')
[CompletionResult]::new('--limit-filesystem', 'limit-filesystem', [CompletionResultType]::ParameterName, 'Only count the files and directories on the same filesystem as the supplied directory') [CompletionResult]::new('-y', '-y', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file change time')
[CompletionResult]::new('-s', 's', [CompletionResultType]::ParameterName, 'Use file length instead of blocks') [CompletionResult]::new('--ctime', '--ctime', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file change time')
[CompletionResult]::new('--apparent-size', 'apparent-size', [CompletionResultType]::ParameterName, 'Use file length instead of blocks') [CompletionResult]::new('--files0-from', '--files0-from', [CompletionResultType]::ParameterName, 'run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input')
[CompletionResult]::new('-r', 'r', [CompletionResultType]::ParameterName, 'Print tree upside down (biggest highest)') [CompletionResult]::new('--collapse', '--collapse', [CompletionResultType]::ParameterName, 'Keep these directories collapsed')
[CompletionResult]::new('--reverse', 'reverse', [CompletionResultType]::ParameterName, 'Print tree upside down (biggest highest)') [CompletionResult]::new('-m', '-m', [CompletionResultType]::ParameterName, 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time')
[CompletionResult]::new('-c', 'c', [CompletionResultType]::ParameterName, 'No colors will be printed (Useful for commands like: watch)') [CompletionResult]::new('--filetime', '--filetime', [CompletionResultType]::ParameterName, 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time')
[CompletionResult]::new('--no-colors', 'no-colors', [CompletionResultType]::ParameterName, 'No colors will be printed (Useful for commands like: watch)') [CompletionResult]::new('-p', '-p', [CompletionResultType]::ParameterName, 'Subdirectories will not have their path shortened')
[CompletionResult]::new('-C', 'C ', [CompletionResultType]::ParameterName, 'Force colors print') [CompletionResult]::new('--full-paths', '--full-paths', [CompletionResultType]::ParameterName, 'Subdirectories will not have their path shortened')
[CompletionResult]::new('--force-colors', 'force-colors', [CompletionResultType]::ParameterName, 'Force colors print') [CompletionResult]::new('-L', '-L ', [CompletionResultType]::ParameterName, 'dereference sym links - Treat sym links as directories and go into them')
[CompletionResult]::new('-b', 'b', [CompletionResultType]::ParameterName, 'No percent bars or percentages will be displayed') [CompletionResult]::new('--dereference-links', '--dereference-links', [CompletionResultType]::ParameterName, 'dereference sym links - Treat sym links as directories and go into them')
[CompletionResult]::new('--no-percent-bars', 'no-percent-bars', [CompletionResultType]::ParameterName, 'No percent bars or percentages will be displayed') [CompletionResult]::new('-x', '-x', [CompletionResultType]::ParameterName, 'Only count the files and directories on the same filesystem as the supplied directory')
[CompletionResult]::new('-B', 'B ', [CompletionResultType]::ParameterName, 'percent bars moved to right side of screen') [CompletionResult]::new('--limit-filesystem', '--limit-filesystem', [CompletionResultType]::ParameterName, 'Only count the files and directories on the same filesystem as the supplied directory')
[CompletionResult]::new('--bars-on-right', 'bars-on-right', [CompletionResultType]::ParameterName, 'percent bars moved to right side of screen') [CompletionResult]::new('-s', '-s', [CompletionResultType]::ParameterName, 'Use file length instead of blocks')
[CompletionResult]::new('-R', 'R ', [CompletionResultType]::ParameterName, 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)') [CompletionResult]::new('--apparent-size', '--apparent-size', [CompletionResultType]::ParameterName, 'Use file length instead of blocks')
[CompletionResult]::new('--screen-reader', 'screen-reader', [CompletionResultType]::ParameterName, 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)') [CompletionResult]::new('-r', '-r', [CompletionResultType]::ParameterName, 'Print tree upside down (biggest highest)')
[CompletionResult]::new('--skip-total', 'skip-total', [CompletionResultType]::ParameterName, 'No total row will be displayed') [CompletionResult]::new('--reverse', '--reverse', [CompletionResultType]::ParameterName, 'Print tree upside down (biggest highest)')
[CompletionResult]::new('-f', 'f', [CompletionResultType]::ParameterName, 'Directory ''size'' is number of child files instead of disk size') [CompletionResult]::new('-c', '-c', [CompletionResultType]::ParameterName, 'No colors will be printed (Useful for commands like: watch)')
[CompletionResult]::new('--filecount', 'filecount', [CompletionResultType]::ParameterName, 'Directory ''size'' is number of child files instead of disk size') [CompletionResult]::new('--no-colors', '--no-colors', [CompletionResultType]::ParameterName, 'No colors will be printed (Useful for commands like: watch)')
[CompletionResult]::new('-i', 'i', [CompletionResultType]::ParameterName, 'Do not display hidden files') [CompletionResult]::new('-C', '-C ', [CompletionResultType]::ParameterName, 'Force colors print')
[CompletionResult]::new('--ignore_hidden', 'ignore_hidden', [CompletionResultType]::ParameterName, 'Do not display hidden files') [CompletionResult]::new('--force-colors', '--force-colors', [CompletionResultType]::ParameterName, 'Force colors print')
[CompletionResult]::new('-t', 't', [CompletionResultType]::ParameterName, 'show only these file types') [CompletionResult]::new('-b', '-b', [CompletionResultType]::ParameterName, 'No percent bars or percentages will be displayed')
[CompletionResult]::new('--file_types', 'file_types', [CompletionResultType]::ParameterName, 'show only these file types') [CompletionResult]::new('--no-percent-bars', '--no-percent-bars', [CompletionResultType]::ParameterName, 'No percent bars or percentages will be displayed')
[CompletionResult]::new('-P', 'P ', [CompletionResultType]::ParameterName, 'Disable the progress indication.') [CompletionResult]::new('-B', '-B ', [CompletionResultType]::ParameterName, 'percent bars moved to right side of screen')
[CompletionResult]::new('--no-progress', 'no-progress', [CompletionResultType]::ParameterName, 'Disable the progress indication.') [CompletionResult]::new('--bars-on-right', '--bars-on-right', [CompletionResultType]::ParameterName, 'percent bars moved to right side of screen')
[CompletionResult]::new('--print-errors', 'print-errors', [CompletionResultType]::ParameterName, 'Print path with errors.') [CompletionResult]::new('-R', '-R ', [CompletionResultType]::ParameterName, 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)')
[CompletionResult]::new('-D', 'D ', [CompletionResultType]::ParameterName, 'Only directories will be displayed.') [CompletionResult]::new('--screen-reader', '--screen-reader', [CompletionResultType]::ParameterName, 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)')
[CompletionResult]::new('--only-dir', 'only-dir', [CompletionResultType]::ParameterName, 'Only directories will be displayed.') [CompletionResult]::new('--skip-total', '--skip-total', [CompletionResultType]::ParameterName, 'No total row will be displayed')
[CompletionResult]::new('-F', 'F ', [CompletionResultType]::ParameterName, 'Only files will be displayed. (Finds your largest files)') [CompletionResult]::new('-f', '-f', [CompletionResultType]::ParameterName, 'Directory ''size'' is number of child files instead of disk size')
[CompletionResult]::new('--only-file', 'only-file', [CompletionResultType]::ParameterName, 'Only files will be displayed. (Finds your largest files)') [CompletionResult]::new('--filecount', '--filecount', [CompletionResultType]::ParameterName, 'Directory ''size'' is number of child files instead of disk size')
[CompletionResult]::new('-j', 'j', [CompletionResultType]::ParameterName, 'Output the directory tree as json to the current directory') [CompletionResult]::new('-i', '-i', [CompletionResultType]::ParameterName, 'Do not display hidden files')
[CompletionResult]::new('--output-json', 'output-json', [CompletionResultType]::ParameterName, 'Output the directory tree as json to the current directory') [CompletionResult]::new('--ignore_hidden', '--ignore_hidden', [CompletionResultType]::ParameterName, 'Do not display hidden files')
[CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Print help') [CompletionResult]::new('-t', '-t', [CompletionResultType]::ParameterName, 'show only these file types')
[CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Print help') [CompletionResult]::new('--file_types', '--file_types', [CompletionResultType]::ParameterName, 'show only these file types')
[CompletionResult]::new('-V', 'V ', [CompletionResultType]::ParameterName, 'Print version') [CompletionResult]::new('-P', '-P ', [CompletionResultType]::ParameterName, 'Disable the progress indication.')
[CompletionResult]::new('--version', 'version', [CompletionResultType]::ParameterName, 'Print version') [CompletionResult]::new('--no-progress', '--no-progress', [CompletionResultType]::ParameterName, 'Disable the progress indication.')
[CompletionResult]::new('--print-errors', '--print-errors', [CompletionResultType]::ParameterName, 'Print path with errors.')
[CompletionResult]::new('-D', '-D ', [CompletionResultType]::ParameterName, 'Only directories will be displayed.')
[CompletionResult]::new('--only-dir', '--only-dir', [CompletionResultType]::ParameterName, 'Only directories will be displayed.')
[CompletionResult]::new('-F', '-F ', [CompletionResultType]::ParameterName, 'Only files will be displayed. (Finds your largest files)')
[CompletionResult]::new('--only-file', '--only-file', [CompletionResultType]::ParameterName, 'Only files will be displayed. (Finds your largest files)')
[CompletionResult]::new('-j', '-j', [CompletionResultType]::ParameterName, 'Output the directory tree as json to the current directory')
[CompletionResult]::new('--output-json', '--output-json', [CompletionResultType]::ParameterName, 'Output the directory tree as json to the current directory')
[CompletionResult]::new('-h', '-h', [CompletionResultType]::ParameterName, 'Print help')
[CompletionResult]::new('--help', '--help', [CompletionResultType]::ParameterName, 'Print help')
[CompletionResult]::new('-V', '-V ', [CompletionResultType]::ParameterName, 'Print version')
[CompletionResult]::new('--version', '--version', [CompletionResultType]::ParameterName, 'Print version')
break break
} }
}) })

View File

@@ -19,7 +19,7 @@ _dust() {
case "${cmd}" in case "${cmd}" in
dust) dust)
opts="-d -T -n -p -X -I -L -x -s -r -c -C -b -B -z -R -f -i -v -e -t -w -P -D -F -o -S -j -h -V --depth --threads --number-of-lines --full-paths --ignore-directory --ignore-all-in-file --dereference-links --limit-filesystem --apparent-size --reverse --no-colors --force-colors --no-percent-bars --bars-on-right --min-size --screen-reader --skip-total --filecount --ignore_hidden --invert-filter --filter --file_types --terminal_width --no-progress --print-errors --only-dir --only-file --output-format --stack-size --output-json --help --version [PATH]..." opts="-d -T -n -p -X -I -L -x -s -r -c -C -b -B -z -R -f -i -v -e -t -w -P -D -F -o -S -j -M -A -y -m -h -V --depth --threads --config --number-of-lines --full-paths --ignore-directory --ignore-all-in-file --dereference-links --limit-filesystem --apparent-size --reverse --no-colors --force-colors --no-percent-bars --bars-on-right --min-size --screen-reader --skip-total --filecount --ignore_hidden --invert-filter --filter --file_types --terminal_width --no-progress --print-errors --only-dir --only-file --output-format --stack-size --output-json --mtime --atime --ctime --files0-from --collapse --filetime --help --version [PATH]..."
if [[ ${cur} == -* || ${COMP_CWORD} -eq 1 ]] ; then if [[ ${cur} == -* || ${COMP_CWORD} -eq 1 ]] ; then
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
return 0 return 0
@@ -41,6 +41,21 @@ _dust() {
COMPREPLY=($(compgen -f "${cur}")) COMPREPLY=($(compgen -f "${cur}"))
return 0 return 0
;; ;;
--config)
local oldifs
if [ -n "${IFS+x}" ]; then
oldifs="$IFS"
fi
IFS=$'\n'
COMPREPLY=($(compgen -f "${cur}"))
if [ -n "${oldifs+x}" ]; then
IFS="$oldifs"
fi
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
compopt -o filenames
fi
return 0
;;
--number-of-lines) --number-of-lines)
COMPREPLY=($(compgen -f "${cur}")) COMPREPLY=($(compgen -f "${cur}"))
return 0 return 0
@@ -59,12 +74,12 @@ _dust() {
;; ;;
--ignore-all-in-file) --ignore-all-in-file)
local oldifs local oldifs
if [[ -v IFS ]]; then if [ -n "${IFS+x}" ]; then
oldifs="$IFS" oldifs="$IFS"
fi fi
IFS=$'\n' IFS=$'\n'
COMPREPLY=($(compgen -f "${cur}")) COMPREPLY=($(compgen -f "${cur}"))
if [[ -v oldifs ]]; then if [ -n "${oldifs+x}" ]; then
IFS="$oldifs" IFS="$oldifs"
fi fi
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
@@ -74,12 +89,12 @@ _dust() {
;; ;;
-I) -I)
local oldifs local oldifs
if [[ -v IFS ]]; then if [ -n "${IFS+x}" ]; then
oldifs="$IFS" oldifs="$IFS"
fi fi
IFS=$'\n' IFS=$'\n'
COMPREPLY=($(compgen -f "${cur}")) COMPREPLY=($(compgen -f "${cur}"))
if [[ -v oldifs ]]; then if [ -n "${oldifs+x}" ]; then
IFS="$oldifs" IFS="$oldifs"
fi fi
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
@@ -135,6 +150,46 @@ _dust() {
COMPREPLY=($(compgen -f "${cur}")) COMPREPLY=($(compgen -f "${cur}"))
return 0 return 0
;; ;;
--mtime)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
-M)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--atime)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
-A)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--ctime)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
-y)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--files0-from)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--collapse)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--filetime)
COMPREPLY=($(compgen -W "a c m" -- "${cur}"))
return 0
;;
-m)
COMPREPLY=($(compgen -W "a c m" -- "${cur}"))
return 0
;;
*) *)
COMPREPLY=() COMPREPLY=()
;; ;;

View File

@@ -22,10 +22,11 @@ set edit:completion:arg-completer[dust] = {|@words|
cand --depth 'Depth to show' cand --depth 'Depth to show'
cand -T 'Number of threads to use' cand -T 'Number of threads to use'
cand --threads 'Number of threads to use' cand --threads 'Number of threads to use'
cand --config 'Specify a config file to use'
cand -n 'Number of lines of output to show. (Default is terminal_height - 10)' cand -n 'Number of lines of output to show. (Default is terminal_height - 10)'
cand --number-of-lines 'Number of lines of output to show. (Default is terminal_height - 10)' cand --number-of-lines 'Number of lines of output to show. (Default is terminal_height - 10)'
cand -X 'Exclude any file or directory with this name' cand -X 'Exclude any file or directory with this path'
cand --ignore-directory 'Exclude any file or directory with this name' cand --ignore-directory 'Exclude any file or directory with this path'
cand -I 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter' cand -I 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter'
cand --ignore-all-in-file 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter' cand --ignore-all-in-file 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter'
cand -z 'Minimum size file to include in output' cand -z 'Minimum size file to include in output'
@@ -40,6 +41,16 @@ set edit:completion:arg-completer[dust] = {|@words|
cand --output-format 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.' cand --output-format 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.'
cand -S 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)' cand -S 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)'
cand --stack-size 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)' cand --stack-size 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)'
cand -M '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)'
cand --mtime '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)'
cand -A 'just like -mtime, but based on file access time'
cand --atime 'just like -mtime, but based on file access time'
cand -y 'just like -mtime, but based on file change time'
cand --ctime 'just like -mtime, but based on file change time'
cand --files0-from 'run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input'
cand --collapse 'Keep these directories collapsed'
cand -m 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time'
cand --filetime 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time'
cand -p 'Subdirectories will not have their path shortened' cand -p 'Subdirectories will not have their path shortened'
cand --full-paths 'Subdirectories will not have their path shortened' cand --full-paths 'Subdirectories will not have their path shortened'
cand -L 'dereference sym links - Treat sym links as directories and go into them' cand -L 'dereference sym links - Treat sym links as directories and go into them'

View File

@@ -1,14 +1,32 @@
complete -c dust -s d -l depth -d 'Depth to show' -r complete -c dust -s d -l depth -d 'Depth to show' -r
complete -c dust -s T -l threads -d 'Number of threads to use' -r complete -c dust -s T -l threads -d 'Number of threads to use' -r
complete -c dust -l config -d 'Specify a config file to use' -r -F
complete -c dust -s n -l number-of-lines -d 'Number of lines of output to show. (Default is terminal_height - 10)' -r complete -c dust -s n -l number-of-lines -d 'Number of lines of output to show. (Default is terminal_height - 10)' -r
complete -c dust -s X -l ignore-directory -d 'Exclude any file or directory with this name' -r -F complete -c dust -s X -l ignore-directory -d 'Exclude any file or directory with this path' -r -F
complete -c dust -s I -l ignore-all-in-file -d 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter' -r -F complete -c dust -s I -l ignore-all-in-file -d 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter' -r -F
complete -c dust -s z -l min-size -d 'Minimum size file to include in output' -r complete -c dust -s z -l min-size -d 'Minimum size file to include in output' -r
complete -c dust -s v -l invert-filter -d 'Exclude filepaths matching this regex. To ignore png files type: -v "\\.png$" ' -r complete -c dust -s v -l invert-filter -d 'Exclude filepaths matching this regex. To ignore png files type: -v "\\.png$" ' -r
complete -c dust -s e -l filter -d 'Only include filepaths matching this regex. For png files type: -e "\\.png$" ' -r complete -c dust -s e -l filter -d 'Only include filepaths matching this regex. For png files type: -e "\\.png$" ' -r
complete -c dust -s w -l terminal_width -d 'Specify width of output overriding the auto detection of terminal width' -r complete -c dust -s w -l terminal_width -d 'Specify width of output overriding the auto detection of terminal width' -r
complete -c dust -s o -l output-format -d 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.' -r -f -a "{si '',b '',k '',m '',g '',t '',kb '',mb '',gb '',tb ''}" complete -c dust -s o -l output-format -d 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size.' -r -f -a "si\t''
b\t''
k\t''
m\t''
g\t''
t\t''
kb\t''
mb\t''
gb\t''
tb\t''"
complete -c dust -s S -l stack-size -d 'Specify memory to use as stack size - use if you see: \'fatal runtime error: stack overflow\' (default low memory=1048576, high memory=1073741824)' -r complete -c dust -s S -l stack-size -d 'Specify memory to use as stack size - use if you see: \'fatal runtime error: stack overflow\' (default low memory=1048576, high memory=1073741824)' -r
complete -c dust -s M -l mtime -d '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)' -r
complete -c dust -s A -l atime -d 'just like -mtime, but based on file access time' -r
complete -c dust -s y -l ctime -d 'just like -mtime, but based on file change time' -r
complete -c dust -l files0-from -d 'run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input' -r -F
complete -c dust -l collapse -d 'Keep these directories collapsed' -r -F
complete -c dust -s m -l filetime -d 'Directory \'size\' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time' -r -f -a "a\t''
c\t''
m\t''"
complete -c dust -s p -l full-paths -d 'Subdirectories will not have their path shortened' complete -c dust -s p -l full-paths -d 'Subdirectories will not have their path shortened'
complete -c dust -s L -l dereference-links -d 'dereference sym links - Treat sym links as directories and go into them' complete -c dust -s L -l dereference-links -d 'dereference sym links - Treat sym links as directories and go into them'
complete -c dust -s x -l limit-filesystem -d 'Only count the files and directories on the same filesystem as the supplied directory' complete -c dust -s x -l limit-filesystem -d 'Only count the files and directories on the same filesystem as the supplied directory'

View File

@@ -25,4 +25,4 @@ skip-total=true
ignore-hidden=true ignore-hidden=true
# print sizes in powers of 1000 (e.g., 1.1G) # print sizes in powers of 1000 (e.g., 1.1G)
iso=true output-format="si"

View File

@@ -1,10 +1,10 @@
.ie \n(.g .ds Aq \(aq .ie \n(.g .ds Aq \(aq
.el .ds Aq ' .el .ds Aq '
.TH Dust 1 "Dust 1.0.0" .TH Dust 1 "Dust 1.1.1"
.SH NAME .SH NAME
Dust \- Like du but more intuitive Dust \- Like du but more intuitive
.SH SYNOPSIS .SH SYNOPSIS
\fBdust\fR [\fB\-d\fR|\fB\-\-depth\fR] [\fB\-T\fR|\fB\-\-threads\fR] [\fB\-n\fR|\fB\-\-number\-of\-lines\fR] [\fB\-p\fR|\fB\-\-full\-paths\fR] [\fB\-X\fR|\fB\-\-ignore\-directory\fR] [\fB\-I\fR|\fB\-\-ignore\-all\-in\-file\fR] [\fB\-L\fR|\fB\-\-dereference\-links\fR] [\fB\-x\fR|\fB\-\-limit\-filesystem\fR] [\fB\-s\fR|\fB\-\-apparent\-size\fR] [\fB\-r\fR|\fB\-\-reverse\fR] [\fB\-c\fR|\fB\-\-no\-colors\fR] [\fB\-C\fR|\fB\-\-force\-colors\fR] [\fB\-b\fR|\fB\-\-no\-percent\-bars\fR] [\fB\-B\fR|\fB\-\-bars\-on\-right\fR] [\fB\-z\fR|\fB\-\-min\-size\fR] [\fB\-R\fR|\fB\-\-screen\-reader\fR] [\fB\-\-skip\-total\fR] [\fB\-f\fR|\fB\-\-filecount\fR] [\fB\-i\fR|\fB\-\-ignore_hidden\fR] [\fB\-v\fR|\fB\-\-invert\-filter\fR] [\fB\-e\fR|\fB\-\-filter\fR] [\fB\-t\fR|\fB\-\-file_types\fR] [\fB\-w\fR|\fB\-\-terminal_width\fR] [\fB\-P\fR|\fB\-\-no\-progress\fR] [\fB\-\-print\-errors\fR] [\fB\-D\fR|\fB\-\-only\-dir\fR] [\fB\-F\fR|\fB\-\-only\-file\fR] [\fB\-o\fR|\fB\-\-output\-format\fR] [\fB\-S\fR|\fB\-\-stack\-size\fR] [\fB\-j\fR|\fB\-\-output\-json\fR] [\fB\-h\fR|\fB\-\-help\fR] [\fB\-V\fR|\fB\-\-version\fR] [\fIPATH\fR] \fBdust\fR [\fB\-d\fR|\fB\-\-depth\fR] [\fB\-T\fR|\fB\-\-threads\fR] [\fB\-\-config\fR] [\fB\-n\fR|\fB\-\-number\-of\-lines\fR] [\fB\-p\fR|\fB\-\-full\-paths\fR] [\fB\-X\fR|\fB\-\-ignore\-directory\fR] [\fB\-I\fR|\fB\-\-ignore\-all\-in\-file\fR] [\fB\-L\fR|\fB\-\-dereference\-links\fR] [\fB\-x\fR|\fB\-\-limit\-filesystem\fR] [\fB\-s\fR|\fB\-\-apparent\-size\fR] [\fB\-r\fR|\fB\-\-reverse\fR] [\fB\-c\fR|\fB\-\-no\-colors\fR] [\fB\-C\fR|\fB\-\-force\-colors\fR] [\fB\-b\fR|\fB\-\-no\-percent\-bars\fR] [\fB\-B\fR|\fB\-\-bars\-on\-right\fR] [\fB\-z\fR|\fB\-\-min\-size\fR] [\fB\-R\fR|\fB\-\-screen\-reader\fR] [\fB\-\-skip\-total\fR] [\fB\-f\fR|\fB\-\-filecount\fR] [\fB\-i\fR|\fB\-\-ignore_hidden\fR] [\fB\-v\fR|\fB\-\-invert\-filter\fR] [\fB\-e\fR|\fB\-\-filter\fR] [\fB\-t\fR|\fB\-\-file_types\fR] [\fB\-w\fR|\fB\-\-terminal_width\fR] [\fB\-P\fR|\fB\-\-no\-progress\fR] [\fB\-\-print\-errors\fR] [\fB\-D\fR|\fB\-\-only\-dir\fR] [\fB\-F\fR|\fB\-\-only\-file\fR] [\fB\-o\fR|\fB\-\-output\-format\fR] [\fB\-S\fR|\fB\-\-stack\-size\fR] [\fB\-j\fR|\fB\-\-output\-json\fR] [\fB\-M\fR|\fB\-\-mtime\fR] [\fB\-A\fR|\fB\-\-atime\fR] [\fB\-y\fR|\fB\-\-ctime\fR] [\fB\-\-files0\-from\fR] [\fB\-\-collapse\fR] [\fB\-m\fR|\fB\-\-filetime\fR] [\fB\-h\fR|\fB\-\-help\fR] [\fB\-V\fR|\fB\-\-version\fR] [\fIPATH\fR]
.SH DESCRIPTION .SH DESCRIPTION
Like du but more intuitive Like du but more intuitive
.SH OPTIONS .SH OPTIONS
@@ -15,6 +15,9 @@ Depth to show
\fB\-T\fR, \fB\-\-threads\fR \fB\-T\fR, \fB\-\-threads\fR
Number of threads to use Number of threads to use
.TP .TP
\fB\-\-config\fR=\fIFILE\fR
Specify a config file to use
.TP
\fB\-n\fR, \fB\-\-number\-of\-lines\fR=\fINUMBER\fR \fB\-n\fR, \fB\-\-number\-of\-lines\fR=\fINUMBER\fR
Number of lines of output to show. (Default is terminal_height \- 10) Number of lines of output to show. (Default is terminal_height \- 10)
.TP .TP
@@ -22,7 +25,7 @@ Number of lines of output to show. (Default is terminal_height \- 10)
Subdirectories will not have their path shortened Subdirectories will not have their path shortened
.TP .TP
\fB\-X\fR, \fB\-\-ignore\-directory\fR=\fIPATH\fR \fB\-X\fR, \fB\-\-ignore\-directory\fR=\fIPATH\fR
Exclude any file or directory with this name Exclude any file or directory with this path
.TP .TP
\fB\-I\fR, \fB\-\-ignore\-all\-in\-file\fR=\fIFILE\fR \fB\-I\fR, \fB\-\-ignore\-all\-in\-file\fR=\fIFILE\fR
Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by \-\-invert_filter Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by \-\-invert_filter
@@ -103,6 +106,28 @@ Specify memory to use as stack size \- use if you see: \*(Aqfatal runtime error:
\fB\-j\fR, \fB\-\-output\-json\fR \fB\-j\fR, \fB\-\-output\-json\fR
Output the directory tree as json to the current directory Output the directory tree as json to the current directory
.TP .TP
\fB\-M\fR, \fB\-\-mtime\fR
+/\-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and \-n => (𝑐𝑢𝑟𝑟𝑛, +∞)
.TP
\fB\-A\fR, \fB\-\-atime\fR
just like \-mtime, but based on file access time
.TP
\fB\-y\fR, \fB\-\-ctime\fR
just like \-mtime, but based on file change time
.TP
\fB\-\-files0\-from\fR
run dust on NUL\-terminated file names specified in file; if argument is \-, then read names from standard input
.TP
\fB\-\-collapse\fR
Keep these directories collapsed
.TP
\fB\-m\fR, \fB\-\-filetime\fR
Directory \*(Aqsize\*(Aq is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time
.br
.br
[\fIpossible values: \fRa, c, m]
.TP
\fB\-h\fR, \fB\-\-help\fR \fB\-h\fR, \fB\-\-help\fR
Print help Print help
.TP .TP
@@ -112,4 +137,4 @@ Print version
[\fIPATH\fR] [\fIPATH\fR]
.SH VERSION .SH VERSION
v1.0.0 v1.1.1

View File

@@ -24,6 +24,15 @@ pub fn build_cli() -> Command {
.help("Number of threads to use") .help("Number of threads to use")
.num_args(1) .num_args(1)
) )
.arg(
Arg::new("config")
.long("config")
.help("Specify a config file to use")
.value_name("FILE")
.value_hint(clap::ValueHint::FilePath)
.value_parser(value_parser!(String))
.num_args(1)
)
.arg( .arg(
Arg::new("number_of_lines") Arg::new("number_of_lines")
.short('n') .short('n')
@@ -47,7 +56,7 @@ pub fn build_cli() -> Command {
.value_name("PATH") .value_name("PATH")
.value_hint(clap::ValueHint::AnyPath) .value_hint(clap::ValueHint::AnyPath)
.action(clap::ArgAction::Append) .action(clap::ArgAction::Append)
.help("Exclude any file or directory with this name"), .help("Exclude any file or directory with this path"),
) )
.arg( .arg(
Arg::new("ignore_all_in_file") Arg::new("ignore_all_in_file")
@@ -259,4 +268,59 @@ pub fn build_cli() -> Command {
.action(clap::ArgAction::SetTrue) .action(clap::ArgAction::SetTrue)
.help("Output the directory tree as json to the current directory"), .help("Output the directory tree as json to the current directory"),
) )
.arg(
Arg::new("mtime")
.short('M')
.long("mtime")
.num_args(1)
.allow_hyphen_values(true)
.value_parser(value_parser!(String))
.help("+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)")
)
.arg(
Arg::new("atime")
.short('A')
.long("atime")
.num_args(1)
.allow_hyphen_values(true)
.value_parser(value_parser!(String))
.help("just like -mtime, but based on file access time")
)
.arg(
Arg::new("ctime")
.short('y')
.long("ctime")
.num_args(1)
.allow_hyphen_values(true)
.value_parser(value_parser!(String))
.help("just like -mtime, but based on file change time")
)
.arg(
Arg::new("files0_from")
.long("files0-from")
.value_hint(clap::ValueHint::AnyPath)
.value_parser(value_parser!(String))
.num_args(1)
.help("run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input"),
)
.arg(
Arg::new("collapse")
.long("collapse")
.value_hint(clap::ValueHint::AnyPath)
.value_parser(value_parser!(String))
.action(clap::ArgAction::Append)
.help("Keep these directories collapsed"),
)
.arg(
Arg::new("filetime")
.short('m')
.long("filetime")
.num_args(1)
.value_parser([
PossibleValue::new("a").alias("accessed"),
PossibleValue::new("c").alias("changed"),
PossibleValue::new("m").alias("modified"),
])
.help("Directory 'size' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time"),
)
} }

View File

@@ -1,3 +1,5 @@
use crate::node::FileTime;
use chrono::{Local, TimeZone};
use clap::ArgMatches; use clap::ArgMatches;
use config_file::FromConfigFile; use config_file::FromConfigFile;
use regex::Regex; use regex::Regex;
@@ -6,11 +8,13 @@ use std::io::IsTerminal;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use crate::dir_walker::Operator;
use crate::display::get_number_format; use crate::display::get_number_format;
pub static DAY_SECONDS: i64 = 24 * 60 * 60;
#[derive(Deserialize, Default)] #[derive(Deserialize, Default)]
#[serde(rename_all = "kebab-case")] #[serde(rename_all = "kebab-case")]
#[serde(deny_unknown_fields)]
pub struct Config { pub struct Config {
pub display_full_paths: Option<bool>, pub display_full_paths: Option<bool>,
pub display_apparent_size: Option<bool>, pub display_apparent_size: Option<bool>,
@@ -32,9 +36,17 @@ pub struct Config {
pub threads: Option<usize>, pub threads: Option<usize>,
pub output_json: Option<bool>, pub output_json: Option<bool>,
pub print_errors: Option<bool>, pub print_errors: Option<bool>,
pub files0_from: Option<String>,
} }
impl Config { impl Config {
pub fn get_files_from(&self, options: &ArgMatches) -> Option<String> {
let from_file = options.get_one::<String>("files0_from");
match from_file {
None => self.files0_from.as_ref().map(|x| x.to_string()),
Some(x) => Some(x.to_string()),
}
}
pub fn get_no_colors(&self, options: &ArgMatches) -> bool { pub fn get_no_colors(&self, options: &ArgMatches) -> bool {
Some(true) == self.no_colors || options.get_flag("no_colors") Some(true) == self.no_colors || options.get_flag("no_colors")
} }
@@ -72,6 +84,20 @@ impl Config {
}) })
.to_lowercase() .to_lowercase()
} }
pub fn get_filetime(&self, options: &ArgMatches) -> Option<FileTime> {
let out_fmt = options.get_one::<String>("filetime");
match out_fmt {
None => None,
Some(x) => match x.as_str() {
"m" | "modified" => Some(FileTime::Modified),
"a" | "accessed" => Some(FileTime::Accessed),
"c" | "changed" => Some(FileTime::Changed),
_ => unreachable!(),
},
}
}
pub fn get_skip_total(&self, options: &ArgMatches) -> bool { pub fn get_skip_total(&self, options: &ArgMatches) -> bool {
Some(true) == self.skip_total || options.get_flag("skip_total") Some(true) == self.skip_total || options.get_flag("skip_total")
} }
@@ -132,6 +158,61 @@ impl Config {
pub fn get_output_json(&self, options: &ArgMatches) -> bool { pub fn get_output_json(&self, options: &ArgMatches) -> bool {
Some(true) == self.output_json || options.get_flag("output_json") Some(true) == self.output_json || options.get_flag("output_json")
} }
pub fn get_modified_time_operator(&self, options: &ArgMatches) -> Option<(Operator, i64)> {
get_filter_time_operator(
options.get_one::<String>("mtime"),
get_current_date_epoch_seconds(),
)
}
pub fn get_accessed_time_operator(&self, options: &ArgMatches) -> Option<(Operator, i64)> {
get_filter_time_operator(
options.get_one::<String>("atime"),
get_current_date_epoch_seconds(),
)
}
pub fn get_changed_time_operator(&self, options: &ArgMatches) -> Option<(Operator, i64)> {
get_filter_time_operator(
options.get_one::<String>("ctime"),
get_current_date_epoch_seconds(),
)
}
}
fn get_current_date_epoch_seconds() -> i64 {
// calculate current date epoch seconds
let now = Local::now();
let current_date = now.date_naive();
let current_date_time = current_date.and_hms_opt(0, 0, 0).unwrap();
Local
.from_local_datetime(&current_date_time)
.unwrap()
.timestamp()
}
fn get_filter_time_operator(
option_value: Option<&String>,
current_date_epoch_seconds: i64,
) -> Option<(Operator, i64)> {
match option_value {
Some(val) => {
let time = current_date_epoch_seconds
- val
.parse::<i64>()
.unwrap_or_else(|_| panic!("invalid data format"))
.abs()
* DAY_SECONDS;
match val.chars().next().expect("Value should not be empty") {
'+' => Some((Operator::LessThan, time - DAY_SECONDS)),
'-' => Some((Operator::GreaterThan, time)),
_ => Some((Operator::Equal, time - DAY_SECONDS)),
}
}
None => None,
}
} }
fn convert_min_size(input: &str) -> Option<usize> { fn convert_min_size(input: &str) -> Option<usize> {
@@ -149,7 +230,7 @@ fn convert_min_size(input: &str) -> Option<usize> {
match number_format { match number_format {
Some((multiple, _)) => Some(parsed_digits * (multiple as usize)), Some((multiple, _)) => Some(parsed_digits * (multiple as usize)),
None => { None => {
if letters.eq("") { if letters.is_empty() {
Some(parsed_digits) Some(parsed_digits)
} else { } else {
eprintln!("Ignoring invalid min-size: {input}"); eprintln!("Ignoring invalid min-size: {input}");
@@ -172,12 +253,29 @@ fn get_config_locations(base: &Path) -> Vec<PathBuf> {
] ]
} }
pub fn get_config() -> Config { pub fn get_config(conf_path: Option<String>) -> Config {
if let Some(home) = directories::BaseDirs::new() { match conf_path {
for path in get_config_locations(home.home_dir()) { Some(path_str) => {
let path = Path::new(&path_str);
if path.exists() { if path.exists() {
if let Ok(config) = Config::from_config_file(path) { match Config::from_config_file(path) {
return config; Ok(config) => return config,
Err(e) => {
eprintln!("Ignoring invalid config file '{}': {}", &path.display(), e)
}
}
} else {
eprintln!("Config file {:?} doesn't exists", &path.display());
}
}
None => {
if let Some(home) = directories::BaseDirs::new() {
for path in get_config_locations(home.home_dir()) {
if path.exists() {
if let Ok(config) = Config::from_config_file(&path) {
return config;
}
}
} }
} }
} }
@@ -191,8 +289,23 @@ pub fn get_config() -> Config {
mod tests { mod tests {
#[allow(unused_imports)] #[allow(unused_imports)]
use super::*; use super::*;
use chrono::{Datelike, Timelike};
use clap::builder::PossibleValue;
use clap::{value_parser, Arg, ArgMatches, Command}; use clap::{value_parser, Arg, ArgMatches, Command};
#[test]
fn test_get_current_date_epoch_seconds() {
let epoch_seconds = get_current_date_epoch_seconds();
let dt = Local.timestamp_opt(epoch_seconds, 0).unwrap();
assert_eq!(dt.hour(), 0);
assert_eq!(dt.minute(), 0);
assert_eq!(dt.second(), 0);
assert_eq!(dt.date_naive().day(), Local::now().date_naive().day());
assert_eq!(dt.date_naive().month(), Local::now().date_naive().month());
assert_eq!(dt.date_naive().year(), Local::now().date_naive().year());
}
#[test] #[test]
fn test_conversion() { fn test_conversion() {
assert_eq!(convert_min_size("55"), Some(55)); assert_eq!(convert_min_size("55"), Some(55));
@@ -257,4 +370,56 @@ mod tests {
) )
.get_matches_from(args) .get_matches_from(args)
} }
#[test]
fn test_get_filetime() {
// No config and no flag.
let c = Config::default();
let args = get_filetime_args(vec!["dust"]);
assert_eq!(c.get_filetime(&args), None);
// Config is not defined and flag is defined as access time
let c = Config::default();
let args = get_filetime_args(vec!["dust", "--filetime", "a"]);
assert_eq!(c.get_filetime(&args), Some(FileTime::Accessed));
let c = Config::default();
let args = get_filetime_args(vec!["dust", "--filetime", "accessed"]);
assert_eq!(c.get_filetime(&args), Some(FileTime::Accessed));
// Config is not defined and flag is defined as modified time
let c = Config::default();
let args = get_filetime_args(vec!["dust", "--filetime", "m"]);
assert_eq!(c.get_filetime(&args), Some(FileTime::Modified));
let c = Config::default();
let args = get_filetime_args(vec!["dust", "--filetime", "modified"]);
assert_eq!(c.get_filetime(&args), Some(FileTime::Modified));
// Config is not defined and flag is defined as changed time
let c = Config::default();
let args = get_filetime_args(vec!["dust", "--filetime", "c"]);
assert_eq!(c.get_filetime(&args), Some(FileTime::Changed));
let c = Config::default();
let args = get_filetime_args(vec!["dust", "--filetime", "changed"]);
assert_eq!(c.get_filetime(&args), Some(FileTime::Changed));
}
fn get_filetime_args(args: Vec<&str>) -> ArgMatches {
Command::new("Dust")
.arg(
Arg::new("filetime")
.short('m')
.long("filetime")
.num_args(1)
.value_parser([
PossibleValue::new("a").alias("accessed"),
PossibleValue::new("c").alias("changed"),
PossibleValue::new("m").alias("modified"),
])
.help("Directory 'size' is max filetime of child files instead of disk size. while a/c/m for accessed/changed/modified time"),
)
.get_matches_from(args)
}
} }

View File

@@ -1,4 +1,6 @@
use std::cmp::Ordering;
use std::fs; use std::fs;
use std::io::Error;
use std::sync::Arc; use std::sync::Arc;
use std::sync::Mutex; use std::sync::Mutex;
@@ -7,11 +9,13 @@ use crate::progress::Operation;
use crate::progress::PAtomicInfo; use crate::progress::PAtomicInfo;
use crate::progress::RuntimeErrors; use crate::progress::RuntimeErrors;
use crate::progress::ORDERING; use crate::progress::ORDERING;
use crate::utils::is_filtered_out_due_to_file_time;
use crate::utils::is_filtered_out_due_to_invert_regex; use crate::utils::is_filtered_out_due_to_invert_regex;
use crate::utils::is_filtered_out_due_to_regex; use crate::utils::is_filtered_out_due_to_regex;
use rayon::iter::ParallelBridge; use rayon::iter::ParallelBridge;
use rayon::prelude::ParallelIterator; use rayon::prelude::ParallelIterator;
use regex::Regex; use regex::Regex;
use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::collections::HashSet; use std::collections::HashSet;
@@ -19,14 +23,27 @@ use std::collections::HashSet;
use crate::node::build_node; use crate::node::build_node;
use std::fs::DirEntry; use std::fs::DirEntry;
use crate::node::FileTime;
use crate::platform::get_metadata; use crate::platform::get_metadata;
#[derive(Debug)]
pub enum Operator {
Equal = 0,
LessThan = 1,
GreaterThan = 2,
}
pub struct WalkData<'a> { pub struct WalkData<'a> {
pub ignore_directories: HashSet<PathBuf>, pub ignore_directories: HashSet<PathBuf>,
pub filter_regex: &'a [Regex], pub filter_regex: &'a [Regex],
pub invert_filter_regex: &'a [Regex], pub invert_filter_regex: &'a [Regex],
pub allowed_filesystems: HashSet<u64>, pub allowed_filesystems: HashSet<u64>,
pub filter_modified_time: Option<(Operator, i64)>,
pub filter_accessed_time: Option<(Operator, i64)>,
pub filter_changed_time: Option<(Operator, i64)>,
pub use_apparent_size: bool, pub use_apparent_size: bool,
pub by_filecount: bool, pub by_filecount: bool,
pub by_filetime: &'a Option<FileTime>,
pub ignore_hidden: bool, pub ignore_hidden: bool,
pub follow_links: bool, pub follow_links: bool,
pub progress_data: Arc<PAtomicInfo>, pub progress_data: Arc<PAtomicInfo>,
@@ -44,19 +61,15 @@ pub fn walk_it(dirs: HashSet<PathBuf>, walk_data: &WalkData) -> Vec<Node> {
prog_data.state.store(Operation::PREPARING, ORDERING); prog_data.state.store(Operation::PREPARING, ORDERING);
clean_inodes(node, &mut inodes, walk_data.use_apparent_size) clean_inodes(node, &mut inodes, walk_data)
}) })
.collect(); .collect();
top_level_nodes top_level_nodes
} }
// Remove files which have the same inode, we don't want to double count them. // Remove files which have the same inode, we don't want to double count them.
fn clean_inodes( fn clean_inodes(x: Node, inodes: &mut HashSet<(u64, u64)>, walk_data: &WalkData) -> Option<Node> {
x: Node, if !walk_data.use_apparent_size {
inodes: &mut HashSet<(u64, u64)>,
use_apparent_size: bool,
) -> Option<Node> {
if !use_apparent_size {
if let Some(id) = x.inode_device { if let Some(id) = x.inode_device {
if !inodes.insert(id) { if !inodes.insert(id) {
return None; return None;
@@ -69,12 +82,25 @@ fn clean_inodes(
tmp.sort_by(sort_by_inode); tmp.sort_by(sort_by_inode);
let new_children: Vec<_> = tmp let new_children: Vec<_> = tmp
.into_iter() .into_iter()
.filter_map(|c| clean_inodes(c, inodes, use_apparent_size)) .filter_map(|c| clean_inodes(c, inodes, walk_data))
.collect(); .collect();
let actual_size = if walk_data.by_filetime.is_some() {
// If by_filetime is Some, directory 'size' is the maximum filetime among child files instead of disk size
new_children
.iter()
.map(|c| c.size)
.chain(std::iter::once(x.size))
.max()
.unwrap_or(0)
} else {
// If by_filetime is None, directory 'size' is the sum of disk sizes or file counts of child files
x.size + new_children.iter().map(|c| c.size).sum::<u64>()
};
Some(Node { Some(Node {
name: x.name, name: x.name,
size: x.size + new_children.iter().map(|c| c.size).sum::<u64>(), size: actual_size,
children: new_children, children: new_children,
inode_device: x.inode_device, inode_device: x.inode_device,
depth: x.depth, depth: x.depth,
@@ -83,31 +109,56 @@ fn clean_inodes(
fn sort_by_inode(a: &Node, b: &Node) -> std::cmp::Ordering { fn sort_by_inode(a: &Node, b: &Node) -> std::cmp::Ordering {
// Sorting by inode is quicker than by sorting by name/size // Sorting by inode is quicker than by sorting by name/size
if let Some(x) = a.inode_device { match (a.inode_device, b.inode_device) {
if let Some(y) = b.inode_device { (Some(x), Some(y)) => {
if x.0 != y.0 { if x.0 != y.0 {
return x.0.cmp(&y.0); x.0.cmp(&y.0)
} else if x.1 != y.1 { } else if x.1 != y.1 {
return x.1.cmp(&y.1); x.1.cmp(&y.1)
} else {
a.name.cmp(&b.name)
} }
} }
(Some(_), None) => Ordering::Greater,
(None, Some(_)) => Ordering::Less,
(None, None) => a.name.cmp(&b.name),
} }
a.name.cmp(&b.name)
} }
fn ignore_file(entry: &DirEntry, walk_data: &WalkData) -> bool { fn ignore_file(entry: &DirEntry, walk_data: &WalkData) -> bool {
let is_dot_file = entry.file_name().to_str().unwrap_or("").starts_with('.'); let is_dot_file = entry.file_name().to_str().unwrap_or("").starts_with('.');
let is_ignored_path = walk_data.ignore_directories.contains(&entry.path()); let is_ignored_path = walk_data.ignore_directories.contains(&entry.path());
let follow_links = walk_data.follow_links && entry.file_type().is_ok_and(|ft| ft.is_symlink());
if !walk_data.allowed_filesystems.is_empty() { if !walk_data.allowed_filesystems.is_empty() {
let size_inode_device = get_metadata(&entry.path(), false); let size_inode_device = get_metadata(entry.path(), false, follow_links);
if let Some((_size, Some((_id, dev)), _gunk)) = size_inode_device {
if let Some((_size, Some((_id, dev)))) = size_inode_device {
if !walk_data.allowed_filesystems.contains(&dev) { if !walk_data.allowed_filesystems.contains(&dev) {
return true; return true;
} }
} }
} }
if walk_data.filter_accessed_time.is_some()
|| walk_data.filter_modified_time.is_some()
|| walk_data.filter_changed_time.is_some()
{
let size_inode_device = get_metadata(entry.path(), false, follow_links);
if let Some((_, _, (modified_time, accessed_time, changed_time))) = size_inode_device {
if entry.path().is_file()
&& [
(&walk_data.filter_modified_time, modified_time),
(&walk_data.filter_accessed_time, accessed_time),
(&walk_data.filter_changed_time, changed_time),
]
.iter()
.any(|(filter_time, actual_time)| {
is_filtered_out_due_to_file_time(filter_time, *actual_time)
})
{
return true;
}
}
}
// Keeping `walk_data.filter_regex.is_empty()` is important for performance reasons, it stops unnecessary work // Keeping `walk_data.filter_regex.is_empty()` is important for performance reasons, it stops unnecessary work
if !walk_data.filter_regex.is_empty() if !walk_data.filter_regex.is_empty()
@@ -130,6 +181,7 @@ fn ignore_file(entry: &DirEntry, walk_data: &WalkData) -> bool {
fn walk(dir: PathBuf, walk_data: &WalkData, depth: usize) -> Option<Node> { fn walk(dir: PathBuf, walk_data: &WalkData, depth: usize) -> Option<Node> {
let prog_data = &walk_data.progress_data; let prog_data = &walk_data.progress_data;
let errors = &walk_data.errors; let errors = &walk_data.errors;
if errors.lock().unwrap().abort { if errors.lock().unwrap().abort {
return None; return None;
} }
@@ -161,13 +213,10 @@ fn walk(dir: PathBuf, walk_data: &WalkData, depth: usize) -> Option<Node> {
let node = build_node( let node = build_node(
entry.path(), entry.path(),
vec![], vec![],
walk_data.filter_regex,
walk_data.invert_filter_regex,
walk_data.use_apparent_size,
data.is_symlink(), data.is_symlink(),
data.is_file(), data.is_file(),
walk_data.by_filecount,
depth, depth,
walk_data,
); );
prog_data.num_files.fetch_add(1, ORDERING); prog_data.num_files.fetch_add(1, ORDERING);
@@ -182,8 +231,9 @@ fn walk(dir: PathBuf, walk_data: &WalkData, depth: usize) -> Option<Node> {
} }
} }
Err(ref failed) => { Err(ref failed) => {
let mut editable_error = errors.lock().unwrap(); if handle_error_and_retry(failed, &dir, walk_data) {
editable_error.no_permissions.insert(failed.to_string()); return walk(dir.clone(), walk_data, depth);
}
} }
} }
None None
@@ -191,21 +241,11 @@ fn walk(dir: PathBuf, walk_data: &WalkData, depth: usize) -> Option<Node> {
.collect() .collect()
} }
Err(failed) => { Err(failed) => {
let mut editable_error = errors.lock().unwrap(); if handle_error_and_retry(&failed, &dir, walk_data) {
match failed.kind() { return walk(dir, walk_data, depth);
std::io::ErrorKind::PermissionDenied => { } else {
editable_error vec![]
.no_permissions
.insert(dir.to_string_lossy().into());
}
std::io::ErrorKind::NotFound => {
editable_error.file_not_found.insert(failed.to_string());
}
_ => {
editable_error.unknown_error.insert(failed.to_string());
}
} }
vec![]
} }
} }
} else { } else {
@@ -216,20 +256,51 @@ fn walk(dir: PathBuf, walk_data: &WalkData, depth: usize) -> Option<Node> {
} }
vec![] vec![]
}; };
build_node( let is_symlink = if walk_data.follow_links {
dir, match fs::symlink_metadata(&dir) {
children, Ok(metadata) => metadata.file_type().is_symlink(),
walk_data.filter_regex, Err(_) => false,
walk_data.invert_filter_regex, }
walk_data.use_apparent_size, } else {
false, false
false, };
walk_data.by_filecount, build_node(dir, children, is_symlink, false, depth, walk_data)
depth, }
)
fn handle_error_and_retry(failed: &Error, dir: &Path, walk_data: &WalkData) -> bool {
let mut editable_error = walk_data.errors.lock().unwrap();
match failed.kind() {
std::io::ErrorKind::PermissionDenied => {
editable_error
.no_permissions
.insert(dir.to_string_lossy().into());
}
std::io::ErrorKind::InvalidInput => {
editable_error
.no_permissions
.insert(dir.to_string_lossy().into());
}
std::io::ErrorKind::NotFound => {
editable_error.file_not_found.insert(failed.to_string());
}
std::io::ErrorKind::Interrupted => {
let mut editable_error = walk_data.errors.lock().unwrap();
editable_error.interrupted_error += 1;
if editable_error.interrupted_error > 3 {
panic!("Multiple Interrupted Errors occurred while scanning filesystem. Aborting");
} else {
return true;
}
}
_ => {
editable_error.unknown_error.insert(failed.to_string());
}
}
false
} }
mod tests { mod tests {
#[allow(unused_imports)] #[allow(unused_imports)]
use super::*; use super::*;
@@ -244,17 +315,43 @@ mod tests {
} }
} }
#[cfg(test)]
fn create_walker<'a>(use_apparent_size: bool) -> WalkData<'a> {
use crate::PIndicator;
let indicator = PIndicator::build_me();
WalkData {
ignore_directories: HashSet::new(),
filter_regex: &[],
invert_filter_regex: &[],
allowed_filesystems: HashSet::new(),
filter_modified_time: Some((Operator::GreaterThan, 0)),
filter_accessed_time: Some((Operator::GreaterThan, 0)),
filter_changed_time: Some((Operator::GreaterThan, 0)),
use_apparent_size,
by_filecount: false,
by_filetime: &None,
ignore_hidden: false,
follow_links: false,
progress_data: indicator.data.clone(),
errors: Arc::new(Mutex::new(RuntimeErrors::default())),
}
}
#[test] #[test]
#[allow(clippy::redundant_clone)] #[allow(clippy::redundant_clone)]
fn test_should_ignore_file() { fn test_should_ignore_file() {
let mut inodes = HashSet::new(); let mut inodes = HashSet::new();
let n = create_node(); let n = create_node();
let walkdata = create_walker(false);
// First time we insert the node // First time we insert the node
assert_eq!(clean_inodes(n.clone(), &mut inodes, false), Some(n.clone())); assert_eq!(
clean_inodes(n.clone(), &mut inodes, &walkdata),
Some(n.clone())
);
// Second time is a duplicate - we ignore it // Second time is a duplicate - we ignore it
assert_eq!(clean_inodes(n.clone(), &mut inodes, false), None); assert_eq!(clean_inodes(n.clone(), &mut inodes, &walkdata), None);
} }
#[test] #[test]
@@ -262,9 +359,53 @@ mod tests {
fn test_should_not_ignore_files_if_using_apparent_size() { fn test_should_not_ignore_files_if_using_apparent_size() {
let mut inodes = HashSet::new(); let mut inodes = HashSet::new();
let n = create_node(); let n = create_node();
let walkdata = create_walker(true);
// If using apparent size we include Nodes, even if duplicate inodes // If using apparent size we include Nodes, even if duplicate inodes
assert_eq!(clean_inodes(n.clone(), &mut inodes, true), Some(n.clone())); assert_eq!(
assert_eq!(clean_inodes(n.clone(), &mut inodes, true), Some(n.clone())); clean_inodes(n.clone(), &mut inodes, &walkdata),
Some(n.clone())
);
assert_eq!(
clean_inodes(n.clone(), &mut inodes, &walkdata),
Some(n.clone())
);
}
#[test]
fn test_total_ordering_of_sort_by_inode() {
use std::str::FromStr;
let a = Node {
name: PathBuf::from_str("a").unwrap(),
size: 0,
children: vec![],
inode_device: Some((3, 66310)),
depth: 0,
};
let b = Node {
name: PathBuf::from_str("b").unwrap(),
size: 0,
children: vec![],
inode_device: None,
depth: 0,
};
let c = Node {
name: PathBuf::from_str("c").unwrap(),
size: 0,
children: vec![],
inode_device: Some((1, 66310)),
depth: 0,
};
assert_eq!(sort_by_inode(&a, &b), Ordering::Greater);
assert_eq!(sort_by_inode(&a, &c), Ordering::Greater);
assert_eq!(sort_by_inode(&c, &b), Ordering::Greater);
assert_eq!(sort_by_inode(&b, &a), Ordering::Less);
assert_eq!(sort_by_inode(&c, &a), Ordering::Less);
assert_eq!(sort_by_inode(&b, &c), Ordering::Less);
} }
} }

View File

@@ -1,4 +1,5 @@
use crate::display_node::DisplayNode; use crate::display_node::DisplayNode;
use crate::node::FileTime;
use ansi_term::Colour::Red; use ansi_term::Colour::Red;
use lscolors::{LsColors, Style}; use lscolors::{LsColors, Style};
@@ -7,6 +8,7 @@ use unicode_width::UnicodeWidthStr;
use stfu8::encode_u8; use stfu8::encode_u8;
use chrono::{DateTime, Local, TimeZone, Utc};
use std::cmp::max; use std::cmp::max;
use std::cmp::min; use std::cmp::min;
use std::fs; use std::fs;
@@ -14,14 +16,16 @@ use std::iter::repeat;
use std::path::Path; use std::path::Path;
use thousands::Separable; use thousands::Separable;
pub static UNITS: [char; 4] = ['T', 'G', 'M', 'K']; pub static UNITS: [char; 5] = ['P', 'T', 'G', 'M', 'K'];
static BLOCKS: [char; 5] = ['█', '▓', '▒', '░', ' ']; static BLOCKS: [char; 5] = ['█', '▓', '▒', '░', ' '];
const FILETIME_SHOW_LENGTH: usize = 19;
pub struct InitialDisplayData { pub struct InitialDisplayData {
pub short_paths: bool, pub short_paths: bool,
pub is_reversed: bool, pub is_reversed: bool,
pub colors_on: bool, pub colors_on: bool,
pub by_filecount: bool, pub by_filecount: bool,
pub by_filetime: Option<FileTime>,
pub is_screen_reader: bool, pub is_screen_reader: bool,
pub output_format: String, pub output_format: String,
pub bars_on_right: bool, pub bars_on_right: bool,
@@ -130,17 +134,11 @@ pub fn draw_it(
root_node: &DisplayNode, root_node: &DisplayNode,
skip_total: bool, skip_total: bool,
) { ) {
let biggest = match skip_total {
false => root_node,
true => root_node
.get_children_from_node(false)
.next()
.unwrap_or(root_node),
};
let num_chars_needed_on_left_most = if idd.by_filecount { let num_chars_needed_on_left_most = if idd.by_filecount {
let max_size = biggest.size; let max_size = root_node.size;
max_size.separate_with_commas().chars().count() max_size.separate_with_commas().chars().count()
} else if idd.by_filetime.is_some() {
FILETIME_SHOW_LENGTH
} else { } else {
find_biggest_size_str(root_node, &idd.output_format) find_biggest_size_str(root_node, &idd.output_format)
}; };
@@ -166,7 +164,7 @@ pub fn draw_it(
let display_data = DisplayData { let display_data = DisplayData {
initial: idd, initial: idd,
num_chars_needed_on_left_most, num_chars_needed_on_left_most,
base_size: biggest.size, base_size: root_node.size,
longest_string_length, longest_string_length,
ls_colors: LsColors::from_env().unwrap_or_default(), ls_colors: LsColors::from_env().unwrap_or_default(),
}; };
@@ -275,7 +273,7 @@ fn clean_indentation_string(s: &str) -> String {
is is
} }
fn get_printable_name<P: AsRef<Path>>(dir_name: &P, short_paths: bool) -> String { pub fn get_printable_name<P: AsRef<Path>>(dir_name: &P, short_paths: bool) -> String {
let dir_name = dir_name.as_ref(); let dir_name = dir_name.as_ref();
let printable_name = { let printable_name = {
if short_paths { if short_paths {
@@ -342,6 +340,8 @@ pub fn format_string(
if display_data.initial.is_screen_reader { if display_data.initial.is_screen_reader {
// if screen_reader then bars is 'depth' // if screen_reader then bars is 'depth'
format!("{pretty_name} {bars} {pretty_size}{percent}") format!("{pretty_name} {bars} {pretty_size}{percent}")
} else if display_data.initial.by_filetime.is_some() {
format!("{pretty_size} {indent}{pretty_name}")
} else { } else {
format!("{pretty_size} {indent} {pretty_name}{percent}") format!("{pretty_size} {indent} {pretty_name}{percent}")
} }
@@ -376,6 +376,8 @@ fn get_name_percent(
fn get_pretty_size(node: &DisplayNode, is_biggest: bool, display_data: &DisplayData) -> String { fn get_pretty_size(node: &DisplayNode, is_biggest: bool, display_data: &DisplayData) -> String {
let output = if display_data.initial.by_filecount { let output = if display_data.initial.by_filecount {
node.size.separate_with_commas() node.size.separate_with_commas()
} else if display_data.initial.by_filetime.is_some() {
get_pretty_file_modified_time(node.size as i64)
} else { } else {
human_readable_number(node.size, &display_data.initial.output_format) human_readable_number(node.size, &display_data.initial.output_format)
}; };
@@ -389,6 +391,14 @@ fn get_pretty_size(node: &DisplayNode, is_biggest: bool, display_data: &DisplayD
} }
} }
fn get_pretty_file_modified_time(timestamp: i64) -> String {
let datetime: DateTime<Utc> = Utc.timestamp_opt(timestamp, 0).unwrap();
let local_datetime = datetime.with_timezone(&Local);
local_datetime.format("%Y-%m-%dT%H:%M:%S").to_string()
}
fn get_pretty_name( fn get_pretty_name(
node: &DisplayNode, node: &DisplayNode,
name_and_padding: String, name_and_padding: String,
@@ -469,6 +479,7 @@ mod tests {
is_reversed: false, is_reversed: false,
colors_on: false, colors_on: false,
by_filecount: false, by_filecount: false,
by_filetime: None,
is_screen_reader: false, is_screen_reader: false,
output_format: "".into(), output_format: "".into(),
bars_on_right: false, bars_on_right: false,
@@ -547,6 +558,14 @@ mod tests {
assert_eq!(human_readable_number(1024 * 1024 * 1024 - 1, ""), "1023M"); assert_eq!(human_readable_number(1024 * 1024 * 1024 - 1, ""), "1023M");
assert_eq!(human_readable_number(1024 * 1024 * 1024 * 20, ""), "20G"); assert_eq!(human_readable_number(1024 * 1024 * 1024 * 20, ""), "20G");
assert_eq!(human_readable_number(1024 * 1024 * 1024 * 1024, ""), "1.0T"); assert_eq!(human_readable_number(1024 * 1024 * 1024 * 1024, ""), "1.0T");
assert_eq!(
human_readable_number(1024 * 1024 * 1024 * 1024 * 234, ""),
"234T"
);
assert_eq!(
human_readable_number(1024 * 1024 * 1024 * 1024 * 1024, ""),
"1.0P"
);
} }
#[test] #[test]
@@ -625,4 +644,37 @@ mod tests {
let bar = dd.generate_bar(&n, 5); let bar = dd.generate_bar(&n, 5);
assert_eq!(bar, "████▓▓▓▓▓▓▓▓▓"); assert_eq!(bar, "████▓▓▓▓▓▓▓▓▓");
} }
#[test]
fn test_get_pretty_file_modified_time() {
// Create a timestamp for 2023-07-12 00:00:00 in local time
let local_dt = Local.with_ymd_and_hms(2023, 7, 12, 0, 0, 0).unwrap();
let timestamp = local_dt.timestamp();
// Format expected output
let expected_output = local_dt.format("%Y-%m-%dT%H:%M:%S").to_string();
assert_eq!(get_pretty_file_modified_time(timestamp), expected_output);
// Test another timestamp
let local_dt = Local.with_ymd_and_hms(2020, 1, 1, 12, 0, 0).unwrap();
let timestamp = local_dt.timestamp();
let expected_output = local_dt.format("%Y-%m-%dT%H:%M:%S").to_string();
assert_eq!(get_pretty_file_modified_time(timestamp), expected_output);
// Test timestamp for epoch start (1970-01-01T00:00:00)
let local_dt = Local.with_ymd_and_hms(1970, 1, 1, 0, 0, 0).unwrap();
let timestamp = local_dt.timestamp();
let expected_output = local_dt.format("%Y-%m-%dT%H:%M:%S").to_string();
assert_eq!(get_pretty_file_modified_time(timestamp), expected_output);
// Test a future timestamp
let local_dt = Local.with_ymd_and_hms(2030, 12, 25, 6, 30, 0).unwrap();
let timestamp = local_dt.timestamp();
let expected_output = local_dt.format("%Y-%m-%dT%H:%M:%S").to_string();
assert_eq!(get_pretty_file_modified_time(timestamp), expected_output);
}
} }

View File

@@ -1,8 +1,12 @@
use std::cell::RefCell;
use std::path::PathBuf; use std::path::PathBuf;
use serde::Serialize; use serde::ser::SerializeStruct;
use serde::{Serialize, Serializer};
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Serialize)] use crate::display::human_readable_number;
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)]
pub struct DisplayNode { pub struct DisplayNode {
// Note: the order of fields in important here, for PartialEq and PartialOrd // Note: the order of fields in important here, for PartialEq and PartialOrd
pub size: u64, pub size: u64,
@@ -25,3 +29,30 @@ impl DisplayNode {
out out
} }
} }
// Only used for -j 'json' flag combined with -o 'output_type' flag
// Used to pass the output_type into the custom Serde serializer
thread_local! {
pub static OUTPUT_TYPE: RefCell<String> = const { RefCell::new(String::new()) };
}
/*
We need the custom Serialize incase someone uses the -o flag to pass a custom output type in
(show size in Mb / Gb etc).
Sadly this also necessitates a global variable OUTPUT_TYPE as we can not pass the output_type flag
into the serialize method
*/
impl Serialize for DisplayNode {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let readable_size = OUTPUT_TYPE
.with(|output_type| human_readable_number(self.size, output_type.borrow().as_str()));
let mut state = serializer.serialize_struct("DisplayNode", 2)?;
state.serialize_field("size", &(readable_size))?;
state.serialize_field("name", &self.name)?;
state.serialize_field("children", &self.children)?;
state.end()
}
}

View File

@@ -1,7 +1,12 @@
use stfu8::encode_u8;
use crate::display::get_printable_name;
use crate::display_node::DisplayNode; use crate::display_node::DisplayNode;
use crate::node::FileTime;
use crate::node::Node; use crate::node::Node;
use std::collections::BinaryHeap; use std::collections::BinaryHeap;
use std::collections::HashMap; use std::collections::HashMap;
use std::collections::HashSet;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
@@ -12,9 +17,15 @@ pub struct AggregateData {
pub number_of_lines: usize, pub number_of_lines: usize,
pub depth: usize, pub depth: usize,
pub using_a_filter: bool, pub using_a_filter: bool,
pub short_paths: bool,
} }
pub fn get_biggest(top_level_nodes: Vec<Node>, display_data: AggregateData) -> Option<DisplayNode> { pub fn get_biggest(
top_level_nodes: Vec<Node>,
display_data: AggregateData,
by_filetime: &Option<FileTime>,
keep_collapsed: HashSet<PathBuf>,
) -> Option<DisplayNode> {
if top_level_nodes.is_empty() { if top_level_nodes.is_empty() {
// perhaps change this, bring back Error object? // perhaps change this, bring back Error object?
return None; return None;
@@ -24,14 +35,26 @@ pub fn get_biggest(top_level_nodes: Vec<Node>, display_data: AggregateData) -> O
let root; let root;
if number_top_level_nodes > 1 { if number_top_level_nodes > 1 {
let size = top_level_nodes.iter().map(|node| node.size).sum(); let size = if by_filetime.is_some() {
top_level_nodes
.iter()
.map(|node| node.size)
.max()
.unwrap_or(0)
} else {
top_level_nodes.iter().map(|node| node.size).sum()
};
let nodes = handle_duplicate_top_level_names(top_level_nodes, display_data.short_paths);
root = Node { root = Node {
name: PathBuf::from("(total)"), name: PathBuf::from("(total)"),
size, size,
children: top_level_nodes, children: nodes,
inode_device: None, inode_device: None,
depth: 0, depth: 0,
}; };
// Always include the base nodes if we add a 'parent' (total) node // Always include the base nodes if we add a 'parent' (total) node
heap = always_add_children(&display_data, &root, heap); heap = always_add_children(&display_data, &root, heap);
} else { } else {
@@ -39,13 +62,19 @@ pub fn get_biggest(top_level_nodes: Vec<Node>, display_data: AggregateData) -> O
heap = add_children(&display_data, &root, heap); heap = add_children(&display_data, &root, heap);
} }
Some(fill_remaining_lines(heap, &root, display_data)) Some(fill_remaining_lines(
heap,
&root,
display_data,
keep_collapsed,
))
} }
pub fn fill_remaining_lines<'a>( pub fn fill_remaining_lines<'a>(
mut heap: BinaryHeap<&'a Node>, mut heap: BinaryHeap<&'a Node>,
root: &'a Node, root: &'a Node,
display_data: AggregateData, display_data: AggregateData,
keep_collapsed: HashSet<PathBuf>,
) -> DisplayNode { ) -> DisplayNode {
let mut allowed_nodes = HashMap::new(); let mut allowed_nodes = HashMap::new();
@@ -53,10 +82,14 @@ pub fn fill_remaining_lines<'a>(
let line = heap.pop(); let line = heap.pop();
match line { match line {
Some(line) => { Some(line) => {
// If we are not doing only_file OR if we are doing
// only_file and it has no children (ie is a file not a dir)
if !display_data.only_file || line.children.is_empty() { if !display_data.only_file || line.children.is_empty() {
allowed_nodes.insert(line.name.as_path(), line); allowed_nodes.insert(line.name.as_path(), line);
} }
heap = add_children(&display_data, line, heap); if !keep_collapsed.contains(&line.name) {
heap = add_children(&display_data, line, heap);
}
} }
None => break, None => break,
} }
@@ -114,7 +147,7 @@ fn recursive_rebuilder(allowed_nodes: &HashMap<&Path, &Node>, current: &Node) ->
.map(|c| recursive_rebuilder(allowed_nodes, c)) .map(|c| recursive_rebuilder(allowed_nodes, c))
.collect(); .collect();
build_node(new_children, current) build_display_node(new_children, current)
} }
// Applies all allowed nodes as children to current node // Applies all allowed nodes as children to current node
@@ -127,10 +160,10 @@ fn flat_rebuilder(allowed_nodes: HashMap<&Path, &Node>, current: &Node) -> Displ
children: vec![], children: vec![],
}) })
.collect::<Vec<DisplayNode>>(); .collect::<Vec<DisplayNode>>();
build_node(new_children, current) build_display_node(new_children, current)
} }
fn build_node(mut new_children: Vec<DisplayNode>, current: &Node) -> DisplayNode { fn build_display_node(mut new_children: Vec<DisplayNode>, current: &Node) -> DisplayNode {
new_children.sort_by(|lhs, rhs| lhs.cmp(rhs).reverse()); new_children.sort_by(|lhs, rhs| lhs.cmp(rhs).reverse());
DisplayNode { DisplayNode {
name: current.name.clone(), name: current.name.clone(),
@@ -138,3 +171,57 @@ fn build_node(mut new_children: Vec<DisplayNode>, current: &Node) -> DisplayNode
children: new_children, children: new_children,
} }
} }
fn names_have_dup(top_level_nodes: &Vec<Node>) -> bool {
let mut stored = HashSet::new();
for node in top_level_nodes {
let name = get_printable_name(&node.name, true);
if stored.contains(&name) {
return true;
}
stored.insert(name);
}
false
}
fn handle_duplicate_top_level_names(top_level_nodes: Vec<Node>, short_paths: bool) -> Vec<Node> {
// If we have top level names that are the same - we need to tweak them:
if short_paths && names_have_dup(&top_level_nodes) {
let mut new_top_nodes = top_level_nodes.clone();
let mut dir_walk_up_count = 0;
while names_have_dup(&new_top_nodes) && dir_walk_up_count < 10 {
dir_walk_up_count += 1;
let mut newer = vec![];
for node in new_top_nodes.iter() {
let mut folders = node.name.iter().rev();
// Get parent folder (if second time round get grandparent and so on)
for _ in 0..dir_walk_up_count {
folders.next();
}
match folders.next() {
// Add (parent_name) to path of Node
Some(data) => {
let parent = encode_u8(data.as_encoded_bytes());
let current_node = node.name.display();
let n = Node {
name: PathBuf::from(format!("{current_node}({parent})")),
size: node.size,
children: node.children.clone(),
inode_device: node.inode_device,
depth: node.depth,
};
newer.push(n)
}
// Node does not have a parent
None => newer.push(node.clone()),
}
}
new_top_nodes = newer;
}
new_top_nodes
} else {
top_level_nodes
}
}

View File

@@ -1,4 +1,5 @@
use crate::display_node::DisplayNode; use crate::display_node::DisplayNode;
use crate::node::FileTime;
use crate::node::Node; use crate::node::Node;
use std::collections::HashMap; use std::collections::HashMap;
use std::ffi::OsStr; use std::ffi::OsStr;
@@ -10,7 +11,11 @@ struct ExtensionNode<'a> {
extension: Option<&'a OsStr>, extension: Option<&'a OsStr>,
} }
pub fn get_all_file_types(top_level_nodes: &[Node], n: usize) -> Option<DisplayNode> { pub fn get_all_file_types(
top_level_nodes: &[Node],
n: usize,
by_filetime: &Option<FileTime>,
) -> Option<DisplayNode> {
let ext_nodes = { let ext_nodes = {
let mut extension_cumulative_sizes = HashMap::new(); let mut extension_cumulative_sizes = HashMap::new();
build_by_all_file_types(top_level_nodes, &mut extension_cumulative_sizes); build_by_all_file_types(top_level_nodes, &mut extension_cumulative_sizes);
@@ -44,16 +49,27 @@ pub fn get_all_file_types(top_level_nodes: &[Node], n: usize) -> Option<DisplayN
// ...then, aggregate the remaining nodes (if any) into a single "(others)" node // ...then, aggregate the remaining nodes (if any) into a single "(others)" node
if ext_nodes_iter.len() > 0 { if ext_nodes_iter.len() > 0 {
let actual_size = if by_filetime.is_some() {
ext_nodes_iter.map(|node| node.size).max().unwrap_or(0)
} else {
ext_nodes_iter.map(|node| node.size).sum()
};
displayed.push(DisplayNode { displayed.push(DisplayNode {
name: PathBuf::from("(others)"), name: PathBuf::from("(others)"),
size: ext_nodes_iter.map(|node| node.size).sum(), size: actual_size,
children: vec![], children: vec![],
}); });
} }
let actual_size: u64 = if by_filetime.is_some() {
displayed.iter().map(|node| node.size).max().unwrap_or(0)
} else {
displayed.iter().map(|node| node.size).sum()
};
let result = DisplayNode { let result = DisplayNode {
name: PathBuf::from("(total)"), name: PathBuf::from("(total)"),
size: displayed.iter().map(|node| node.size).sum(), size: actual_size,
children: displayed, children: displayed,
}; };

View File

@@ -21,8 +21,11 @@ use regex::Error;
use std::collections::HashSet; use std::collections::HashSet;
use std::env; use std::env;
use std::fs::read_to_string; use std::fs::read_to_string;
use std::io;
use std::panic; use std::panic;
use std::process; use std::process;
use std::sync::atomic::AtomicBool;
use std::sync::atomic::Ordering;
use std::sync::Arc; use std::sync::Arc;
use std::sync::Mutex; use std::sync::Mutex;
use sysinfo::{System, SystemExt}; use sysinfo::{System, SystemExt};
@@ -30,6 +33,7 @@ use sysinfo::{System, SystemExt};
use self::display::draw_it; use self::display::draw_it;
use config::get_config; use config::get_config;
use dir_walker::walk_it; use dir_walker::walk_it;
use display_node::OUTPUT_TYPE;
use filter::get_biggest; use filter::get_biggest;
use filter_type::get_all_file_types; use filter_type::get_all_file_types;
use regex::Regex; use regex::Regex;
@@ -78,23 +82,19 @@ fn should_init_color(no_color: bool, force_color: bool) -> bool {
} }
fn get_height_of_terminal() -> usize { fn get_height_of_terminal() -> usize {
// Simplify once https://github.com/eminence/terminal-size/pull/41 is
// merged
terminal_size() terminal_size()
// Windows CI runners detect a terminal height of 0 // Windows CI runners detect a terminal height of 0
.map(|(_, Height(h))| max(h as usize, DEFAULT_NUMBER_OF_LINES)) .map(|(_, Height(h))| max(h.into(), DEFAULT_NUMBER_OF_LINES))
.unwrap_or(DEFAULT_NUMBER_OF_LINES) .unwrap_or(DEFAULT_NUMBER_OF_LINES)
- 10 - 10
} }
fn get_width_of_terminal() -> usize { fn get_width_of_terminal() -> usize {
// Simplify once https://github.com/eminence/terminal-size/pull/41 is
// merged
terminal_size() terminal_size()
.map(|(Width(w), _)| match cfg!(windows) { .map(|(Width(w), _)| match cfg!(windows) {
// Windows CI runners detect a very low terminal width // Windows CI runners detect a very low terminal width
true => max(w as usize, DEFAULT_TERMINAL_WIDTH), true => max(w.into(), DEFAULT_TERMINAL_WIDTH),
false => w as usize, false => w.into(),
}) })
.unwrap_or(DEFAULT_TERMINAL_WIDTH) .unwrap_or(DEFAULT_TERMINAL_WIDTH)
} }
@@ -113,23 +113,55 @@ fn get_regex_value(maybe_value: Option<ValuesRef<String>>) -> Vec<Regex> {
fn main() { fn main() {
let options = build_cli().get_matches(); let options = build_cli().get_matches();
let config = get_config(); let config = get_config(options.get_one::<String>("config").cloned());
let errors = RuntimeErrors::default(); let errors = RuntimeErrors::default();
let error_listen_for_ctrlc = Arc::new(Mutex::new(errors)); let error_listen_for_ctrlc = Arc::new(Mutex::new(errors));
let errors_for_rayon = error_listen_for_ctrlc.clone(); let errors_for_rayon = error_listen_for_ctrlc.clone();
let errors_final = error_listen_for_ctrlc.clone(); let errors_final = error_listen_for_ctrlc.clone();
let is_in_listing = Arc::new(AtomicBool::new(false));
let cloned_is_in_listing = Arc::clone(&is_in_listing);
ctrlc::set_handler(move || { ctrlc::set_handler(move || {
error_listen_for_ctrlc.lock().unwrap().abort = true; error_listen_for_ctrlc.lock().unwrap().abort = true;
println!("\nAborting"); println!("\nAborting");
if cloned_is_in_listing.load(Ordering::Relaxed) {
process::exit(1);
}
}) })
.expect("Error setting Ctrl-C handler"); .expect("Error setting Ctrl-C handler");
let target_dirs = match options.get_many::<String>("params") { is_in_listing.store(true, Ordering::Relaxed);
Some(values) => values.map(|v| v.as_str()).collect::<Vec<&str>>(), let target_dirs = match config.get_files_from(&options) {
None => vec!["."], Some(path) => {
if path == "-" {
let mut targets_to_add = io::stdin()
.lines()
.map_while(Result::ok)
.collect::<Vec<String>>();
if targets_to_add.is_empty() {
eprintln!("No input provided, defaulting to current directory");
targets_to_add.push(".".to_owned());
}
targets_to_add
} else {
// read file
match read_to_string(path) {
Ok(file_content) => file_content.lines().map(|x| x.to_string()).collect(),
Err(e) => {
eprintln!("Error reading file: {e}");
vec![".".to_owned()]
}
}
}
}
None => match options.get_many::<String>("params") {
Some(values) => values.cloned().collect(),
None => vec![".".to_owned()],
},
}; };
is_in_listing.store(false, Ordering::Relaxed);
let summarize_file_types = options.get_flag("types"); let summarize_file_types = options.get_flag("types");
@@ -189,13 +221,14 @@ fn main() {
.collect::<Vec<Regex>>(); .collect::<Vec<Regex>>();
let by_filecount = options.get_flag("by_filecount"); let by_filecount = options.get_flag("by_filecount");
let by_filetime = config.get_filetime(&options);
let limit_filesystem = options.get_flag("limit_filesystem"); let limit_filesystem = options.get_flag("limit_filesystem");
let follow_links = options.get_flag("dereference_links"); let follow_links = options.get_flag("dereference_links");
let simplified_dirs = simplify_dir_names(target_dirs);
let allowed_filesystems = limit_filesystem let allowed_filesystems = limit_filesystem
.then(|| get_filesystem_devices(simplified_dirs.iter())) .then(|| get_filesystem_devices(&target_dirs, follow_links))
.unwrap_or_default(); .unwrap_or_default();
let simplified_dirs = simplify_dir_names(&target_dirs);
let ignored_full_path: HashSet<PathBuf> = ignore_directories let ignored_full_path: HashSet<PathBuf> = ignore_directories
.into_iter() .into_iter()
@@ -211,13 +244,34 @@ fn main() {
indicator.spawn(output_format.clone()) indicator.spawn(output_format.clone())
} }
let keep_collapsed: HashSet<PathBuf> = match options.get_many::<String>("collapse") {
Some(collapse) => {
let mut combined_dirs = HashSet::new();
for collapse_dir in collapse {
for target_dir in target_dirs.iter() {
combined_dirs.insert(PathBuf::from(target_dir).join(collapse_dir));
}
}
combined_dirs
}
None => HashSet::new(),
};
let filter_modified_time = config.get_modified_time_operator(&options);
let filter_accessed_time = config.get_accessed_time_operator(&options);
let filter_changed_time = config.get_changed_time_operator(&options);
let walk_data = WalkData { let walk_data = WalkData {
ignore_directories: ignored_full_path, ignore_directories: ignored_full_path,
filter_regex: &filter_regexs, filter_regex: &filter_regexs,
invert_filter_regex: &invert_filter_regexs, invert_filter_regex: &invert_filter_regexs,
allowed_filesystems, allowed_filesystems,
filter_modified_time,
filter_accessed_time,
filter_changed_time,
use_apparent_size: config.get_apparent_size(&options), use_apparent_size: config.get_apparent_size(&options),
by_filecount, by_filecount,
by_filetime: &by_filetime,
ignore_hidden, ignore_hidden,
follow_links, follow_links,
progress_data: indicator.data.clone(), progress_data: indicator.data.clone(),
@@ -230,7 +284,7 @@ fn main() {
let top_level_nodes = walk_it(simplified_dirs, &walk_data); let top_level_nodes = walk_it(simplified_dirs, &walk_data);
let tree = match summarize_file_types { let tree = match summarize_file_types {
true => get_all_file_types(&top_level_nodes, number_of_lines), true => get_all_file_types(&top_level_nodes, number_of_lines, &by_filetime),
false => { false => {
let agg_data = AggregateData { let agg_data = AggregateData {
min_size: config.get_min_size(&options), min_size: config.get_min_size(&options),
@@ -239,8 +293,9 @@ fn main() {
number_of_lines, number_of_lines,
depth, depth,
using_a_filter: !filter_regexs.is_empty() || !invert_filter_regexs.is_empty(), using_a_filter: !filter_regexs.is_empty() || !invert_filter_regexs.is_empty(),
short_paths: !config.get_full_paths(&options),
}; };
get_biggest(top_level_nodes, agg_data) get_biggest(top_level_nodes, agg_data, &by_filetime, keep_collapsed)
} }
}; };
@@ -287,19 +342,23 @@ fn main() {
} }
if let Some(root_node) = tree { if let Some(root_node) = tree {
let idd = InitialDisplayData {
short_paths: !config.get_full_paths(&options),
is_reversed: !config.get_reverse(&options),
colors_on: is_colors,
by_filecount,
is_screen_reader: config.get_screen_reader(&options),
output_format,
bars_on_right: config.get_bars_on_right(&options),
};
if config.get_output_json(&options) { if config.get_output_json(&options) {
OUTPUT_TYPE.with(|wrapped| {
wrapped.replace(output_format);
});
println!("{}", serde_json::to_string(&root_node).unwrap()); println!("{}", serde_json::to_string(&root_node).unwrap());
} else { } else {
let idd = InitialDisplayData {
short_paths: !config.get_full_paths(&options),
is_reversed: !config.get_reverse(&options),
colors_on: is_colors,
by_filecount,
by_filetime,
is_screen_reader: config.get_screen_reader(&options),
output_format,
bars_on_right: config.get_bars_on_right(&options),
};
draw_it( draw_it(
idd, idd,
config.get_no_bars(&options), config.get_no_bars(&options),

View File

@@ -1,8 +1,9 @@
use crate::dir_walker::WalkData;
use crate::platform::get_metadata; use crate::platform::get_metadata;
use crate::utils::is_filtered_out_due_to_file_time;
use crate::utils::is_filtered_out_due_to_invert_regex; use crate::utils::is_filtered_out_due_to_invert_regex;
use crate::utils::is_filtered_out_due_to_regex; use crate::utils::is_filtered_out_due_to_regex;
use regex::Regex;
use std::cmp::Ordering; use std::cmp::Ordering;
use std::path::PathBuf; use std::path::PathBuf;
@@ -15,33 +16,56 @@ pub struct Node {
pub depth: usize, pub depth: usize,
} }
#[derive(Debug, PartialEq)]
pub enum FileTime {
Modified,
Accessed,
Changed,
}
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
pub fn build_node( pub fn build_node(
dir: PathBuf, dir: PathBuf,
children: Vec<Node>, children: Vec<Node>,
filter_regex: &[Regex],
invert_filter_regex: &[Regex],
use_apparent_size: bool,
is_symlink: bool, is_symlink: bool,
is_file: bool, is_file: bool,
by_filecount: bool,
depth: usize, depth: usize,
walk_data: &WalkData,
) -> Option<Node> { ) -> Option<Node> {
get_metadata(&dir, use_apparent_size).map(|data| { let use_apparent_size = walk_data.use_apparent_size;
let inode_device = if is_symlink && !use_apparent_size { let by_filecount = walk_data.by_filecount;
None let by_filetime = &walk_data.by_filetime;
} else {
data.1
};
let size = if is_filtered_out_due_to_regex(filter_regex, &dir) get_metadata(
|| is_filtered_out_due_to_invert_regex(invert_filter_regex, &dir) &dir,
|| (is_symlink && !use_apparent_size) use_apparent_size,
walk_data.follow_links && is_symlink,
)
.map(|data| {
let inode_device = data.1;
let size = if is_filtered_out_due_to_regex(walk_data.filter_regex, &dir)
|| is_filtered_out_due_to_invert_regex(walk_data.invert_filter_regex, &dir)
|| by_filecount && !is_file || by_filecount && !is_file
{ || [
(&walk_data.filter_modified_time, data.2 .0),
(&walk_data.filter_accessed_time, data.2 .1),
(&walk_data.filter_changed_time, data.2 .2),
]
.iter()
.any(|(filter_time, actual_time)| {
is_filtered_out_due_to_file_time(filter_time, *actual_time)
}) {
0 0
} else if by_filecount { } else if by_filecount {
1 1
} else if by_filetime.is_some() {
match by_filetime {
Some(FileTime::Modified) => data.2 .0.unsigned_abs(),
Some(FileTime::Accessed) => data.2 .1.unsigned_abs(),
Some(FileTime::Changed) => data.2 .2.unsigned_abs(),
None => unreachable!(),
}
} else { } else {
data.0 data.0
}; };

View File

@@ -10,15 +10,35 @@ fn get_block_size() -> u64 {
512 512
} }
type InodeAndDevice = (u64, u64);
type FileTime = (i64, i64, i64);
#[cfg(target_family = "unix")] #[cfg(target_family = "unix")]
pub fn get_metadata(d: &Path, use_apparent_size: bool) -> Option<(u64, Option<(u64, u64)>)> { pub fn get_metadata<P: AsRef<Path>>(
path: P,
use_apparent_size: bool,
follow_links: bool,
) -> Option<(u64, Option<InodeAndDevice>, FileTime)> {
use std::os::unix::fs::MetadataExt; use std::os::unix::fs::MetadataExt;
match d.metadata() { let metadata = if follow_links {
path.as_ref().metadata()
} else {
path.as_ref().symlink_metadata()
};
match metadata {
Ok(md) => { Ok(md) => {
if use_apparent_size { if use_apparent_size {
Some((md.len(), Some((md.ino(), md.dev())))) Some((
md.len(),
Some((md.ino(), md.dev())),
(md.mtime(), md.atime(), md.ctime()),
))
} else { } else {
Some((md.blocks() * get_block_size(), Some((md.ino(), md.dev())))) Some((
md.blocks() * get_block_size(),
Some((md.ino(), md.dev())),
(md.mtime(), md.atime(), md.ctime()),
))
} }
} }
Err(_e) => None, Err(_e) => None,
@@ -26,7 +46,11 @@ pub fn get_metadata(d: &Path, use_apparent_size: bool) -> Option<(u64, Option<(u
} }
#[cfg(target_family = "windows")] #[cfg(target_family = "windows")]
pub fn get_metadata(d: &Path, use_apparent_size: bool) -> Option<(u64, Option<(u64, u64)>)> { pub fn get_metadata<P: AsRef<Path>>(
path: P,
use_apparent_size: bool,
follow_links: bool,
) -> Option<(u64, Option<InodeAndDevice>, FileTime)> {
// On windows opening the file to get size, file ID and volume can be very // On windows opening the file to get size, file ID and volume can be very
// expensive because 1) it causes a few system calls, and more importantly 2) it can cause // expensive because 1) it causes a few system calls, and more importantly 2) it can cause
// windows defender to scan the file. // windows defender to scan the file.
@@ -65,7 +89,7 @@ pub fn get_metadata(d: &Path, use_apparent_size: bool) -> Option<(u64, Option<(u
use std::io; use std::io;
use winapi_util::Handle; use winapi_util::Handle;
fn handle_from_path_limited<P: AsRef<Path>>(path: P) -> io::Result<Handle> { fn handle_from_path_limited(path: &Path) -> io::Result<Handle> {
use std::fs::OpenOptions; use std::fs::OpenOptions;
use std::os::windows::fs::OpenOptionsExt; use std::os::windows::fs::OpenOptionsExt;
const FILE_READ_ATTRIBUTES: u32 = 0x0080; const FILE_READ_ATTRIBUTES: u32 = 0x0080;
@@ -91,30 +115,46 @@ pub fn get_metadata(d: &Path, use_apparent_size: bool) -> Option<(u64, Option<(u
} }
fn get_metadata_expensive( fn get_metadata_expensive(
d: &Path, path: &Path,
use_apparent_size: bool, use_apparent_size: bool,
) -> Option<(u64, Option<(u64, u64)>)> { ) -> Option<(u64, Option<InodeAndDevice>, FileTime)> {
use winapi_util::file::information; use winapi_util::file::information;
let h = handle_from_path_limited(d).ok()?; let h = handle_from_path_limited(path).ok()?;
let info = information(&h).ok()?; let info = information(&h).ok()?;
if use_apparent_size { if use_apparent_size {
use filesize::PathExt; use filesize::PathExt;
Some(( Some((
d.size_on_disk().ok()?, path.size_on_disk().ok()?,
Some((info.file_index(), info.volume_serial_number())), Some((info.file_index(), info.volume_serial_number())),
(
info.last_write_time().unwrap() as i64,
info.last_access_time().unwrap() as i64,
info.creation_time().unwrap() as i64,
),
)) ))
} else { } else {
Some(( Some((
info.file_size(), info.file_size(),
Some((info.file_index(), info.volume_serial_number())), Some((info.file_index(), info.volume_serial_number())),
(
info.last_write_time().unwrap() as i64,
info.last_access_time().unwrap() as i64,
info.creation_time().unwrap() as i64,
),
)) ))
} }
} }
use std::os::windows::fs::MetadataExt; use std::os::windows::fs::MetadataExt;
match d.metadata() { let path = path.as_ref();
let metadata = if follow_links {
path.metadata()
} else {
path.symlink_metadata()
};
match metadata {
Ok(ref md) => { Ok(ref md) => {
const FILE_ATTRIBUTE_ARCHIVE: u32 = 0x20; const FILE_ATTRIBUTE_ARCHIVE: u32 = 0x20;
const FILE_ATTRIBUTE_READONLY: u32 = 0x01; const FILE_ATTRIBUTE_READONLY: u32 = 0x01;
@@ -142,11 +182,19 @@ pub fn get_metadata(d: &Path, use_apparent_size: bool) -> Option<(u64, Option<(u
|| md.file_attributes() == FILE_ATTRIBUTE_NORMAL) || md.file_attributes() == FILE_ATTRIBUTE_NORMAL)
&& !((attr_filtered & IS_PROBABLY_ONEDRIVE != 0) && use_apparent_size) && !((attr_filtered & IS_PROBABLY_ONEDRIVE != 0) && use_apparent_size)
{ {
Some((md.len(), None)) Some((
md.len(),
None,
(
md.last_write_time() as i64,
md.last_access_time() as i64,
md.creation_time() as i64,
),
))
} else { } else {
get_metadata_expensive(d, use_apparent_size) get_metadata_expensive(path, use_apparent_size)
} }
} }
_ => get_metadata_expensive(d, use_apparent_size), _ => get_metadata_expensive(path, use_apparent_size),
} }
} }

View File

@@ -3,7 +3,7 @@ use std::{
io::Write, io::Write,
path::Path, path::Path,
sync::{ sync::{
atomic::{AtomicU64, AtomicU8, AtomicUsize, Ordering}, atomic::{AtomicU8, AtomicUsize, Ordering},
mpsc::{self, RecvTimeoutError, Sender}, mpsc::{self, RecvTimeoutError, Sender},
Arc, RwLock, Arc, RwLock,
}, },
@@ -11,6 +11,11 @@ use std::{
time::Duration, time::Duration,
}; };
#[cfg(not(target_has_atomic = "64"))]
use portable_atomic::AtomicU64;
#[cfg(target_has_atomic = "64")]
use std::sync::atomic::AtomicU64;
use crate::display::human_readable_number; use crate::display::human_readable_number;
/* -------------------------------------------------------------------------- */ /* -------------------------------------------------------------------------- */
@@ -73,6 +78,7 @@ pub struct RuntimeErrors {
pub no_permissions: HashSet<String>, pub no_permissions: HashSet<String>,
pub file_not_found: HashSet<String>, pub file_not_found: HashSet<String>,
pub unknown_error: HashSet<String>, pub unknown_error: HashSet<String>,
pub interrupted_error: i32,
pub abort: bool, pub abort: bool,
} }

View File

@@ -2,13 +2,16 @@ use platform::get_metadata;
use std::collections::HashSet; use std::collections::HashSet;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use crate::config::DAY_SECONDS;
use crate::dir_walker::Operator;
use crate::platform; use crate::platform;
use regex::Regex; use regex::Regex;
pub fn simplify_dir_names<P: AsRef<Path>>(filenames: Vec<P>) -> HashSet<PathBuf> { pub fn simplify_dir_names<P: AsRef<Path>>(dirs: &[P]) -> HashSet<PathBuf> {
let mut top_level_names: HashSet<PathBuf> = HashSet::with_capacity(filenames.len()); let mut top_level_names: HashSet<PathBuf> = HashSet::with_capacity(dirs.len());
for t in filenames { for t in dirs {
let top_level_name = normalize_path(t); let top_level_name = normalize_path(t);
let mut can_add = true; let mut can_add = true;
let mut to_remove: Vec<PathBuf> = Vec::new(); let mut to_remove: Vec<PathBuf> = Vec::new();
@@ -31,13 +34,25 @@ pub fn simplify_dir_names<P: AsRef<Path>>(filenames: Vec<P>) -> HashSet<PathBuf>
top_level_names top_level_names
} }
pub fn get_filesystem_devices<'a, P: IntoIterator<Item = &'a PathBuf>>(paths: P) -> HashSet<u64> { pub fn get_filesystem_devices<P: AsRef<Path>>(paths: &[P], follow_links: bool) -> HashSet<u64> {
use std::fs;
// Gets the device ids for the filesystems which are used by the argument paths // Gets the device ids for the filesystems which are used by the argument paths
paths paths
.into_iter() .iter()
.filter_map(|p| match get_metadata(p, false) { .filter_map(|p| {
Some((_size, Some((_id, dev)))) => Some(dev), let follow_links = if follow_links {
_ => None, // slow path: If dereference-links is set, then we check if the file is a symbolic link
match fs::symlink_metadata(p) {
Ok(metadata) => metadata.file_type().is_symlink(),
Err(_) => false,
}
} else {
false
};
match get_metadata(p, false, follow_links) {
Some((_size, Some((_id, dev)), _time)) => Some(dev),
_ => None,
}
}) })
.collect() .collect()
} }
@@ -62,6 +77,20 @@ pub fn is_filtered_out_due_to_regex(filter_regex: &[Regex], dir: &Path) -> bool
} }
} }
pub fn is_filtered_out_due_to_file_time(
filter_time: &Option<(Operator, i64)>,
actual_time: i64,
) -> bool {
match filter_time {
None => false,
Some((Operator::Equal, bound_time)) => {
!(actual_time >= *bound_time && actual_time < *bound_time + DAY_SECONDS)
}
Some((Operator::GreaterThan, bound_time)) => actual_time < *bound_time,
Some((Operator::LessThan, bound_time)) => actual_time > *bound_time,
}
}
pub fn is_filtered_out_due_to_invert_regex(filter_regex: &[Regex], dir: &Path) -> bool { pub fn is_filtered_out_due_to_invert_regex(filter_regex: &[Regex], dir: &Path) -> bool {
filter_regex filter_regex
.iter() .iter()
@@ -82,15 +111,15 @@ mod tests {
fn test_simplify_dir() { fn test_simplify_dir() {
let mut correct = HashSet::new(); let mut correct = HashSet::new();
correct.insert(PathBuf::from("a")); correct.insert(PathBuf::from("a"));
assert_eq!(simplify_dir_names(vec!["a"]), correct); assert_eq!(simplify_dir_names(&["a"]), correct);
} }
#[test] #[test]
fn test_simplify_dir_rm_subdir() { fn test_simplify_dir_rm_subdir() {
let mut correct = HashSet::new(); let mut correct = HashSet::new();
correct.insert(["a", "b"].iter().collect::<PathBuf>()); correct.insert(["a", "b"].iter().collect::<PathBuf>());
assert_eq!(simplify_dir_names(vec!["a/b/c", "a/b", "a/b/d/f"]), correct); assert_eq!(simplify_dir_names(&["a/b/c", "a/b", "a/b/d/f"]), correct);
assert_eq!(simplify_dir_names(vec!["a/b", "a/b/c", "a/b/d/f"]), correct); assert_eq!(simplify_dir_names(&["a/b", "a/b/c", "a/b/d/f"]), correct);
} }
#[test] #[test]
@@ -99,7 +128,7 @@ mod tests {
correct.insert(["a", "b"].iter().collect::<PathBuf>()); correct.insert(["a", "b"].iter().collect::<PathBuf>());
correct.insert(PathBuf::from("c")); correct.insert(PathBuf::from("c"));
assert_eq!( assert_eq!(
simplify_dir_names(vec![ simplify_dir_names(&[
"a/b", "a/b",
"a/b//", "a/b//",
"a/././b///", "a/././b///",
@@ -118,14 +147,14 @@ mod tests {
correct.insert(PathBuf::from("b")); correct.insert(PathBuf::from("b"));
correct.insert(["c", "a", "b"].iter().collect::<PathBuf>()); correct.insert(["c", "a", "b"].iter().collect::<PathBuf>());
correct.insert(["a", "b"].iter().collect::<PathBuf>()); correct.insert(["a", "b"].iter().collect::<PathBuf>());
assert_eq!(simplify_dir_names(vec!["a/b", "c/a/b/", "b"]), correct); assert_eq!(simplify_dir_names(&["a/b", "c/a/b/", "b"]), correct);
} }
#[test] #[test]
fn test_simplify_dir_dots() { fn test_simplify_dir_dots() {
let mut correct = HashSet::new(); let mut correct = HashSet::new();
correct.insert(PathBuf::from("src")); correct.insert(PathBuf::from("src"));
assert_eq!(simplify_dir_names(vec!["src/."]), correct); assert_eq!(simplify_dir_names(&["src/."]), correct);
} }
#[test] #[test]
@@ -133,7 +162,7 @@ mod tests {
let mut correct = HashSet::new(); let mut correct = HashSet::new();
correct.insert(PathBuf::from("src")); correct.insert(PathBuf::from("src"));
correct.insert(PathBuf::from("src_v2")); correct.insert(PathBuf::from("src_v2"));
assert_eq!(simplify_dir_names(vec!["src/", "src_v2"]), correct); assert_eq!(simplify_dir_names(&["src/", "src_v2"]), correct);
} }
#[test] #[test]

View File

@@ -1,10 +1,11 @@
use assert_cmd::Command; use assert_cmd::Command;
use std::ffi::OsStr; use std::ffi::OsStr;
use std::process::Output; use std::process::Output;
use std::str;
use std::sync::Once; use std::sync::Once;
use std::{io, str};
static INIT: Once = Once::new(); static INIT: Once = Once::new();
static UNREADABLE_DIR_PATH: &str = "/tmp/unreadable_dir";
/** /**
* This file contains tests that verify the exact output of the command. * This file contains tests that verify the exact output of the command.
@@ -34,21 +35,31 @@ fn copy_test_data(dir: &str) {
.map_err(|err| eprintln!("Error copying directory for test setup\n{:?}", err)); .map_err(|err| eprintln!("Error copying directory for test setup\n{:?}", err));
} }
fn create_unreadable_directory() -> io::Result<()> {
#[cfg(unix)]
{
use std::fs;
use std::fs::Permissions;
use std::os::unix::fs::PermissionsExt;
fs::create_dir_all(UNREADABLE_DIR_PATH)?;
fs::set_permissions(UNREADABLE_DIR_PATH, Permissions::from_mode(0))?;
}
Ok(())
}
fn initialize() { fn initialize() {
INIT.call_once(|| { INIT.call_once(|| {
copy_test_data("tests/test_dir"); copy_test_data("tests/test_dir");
copy_test_data("tests/test_dir2"); copy_test_data("tests/test_dir2");
copy_test_data("tests/test_dir_unicode"); copy_test_data("tests/test_dir_unicode");
Command::new("sh") if let Err(e) = create_unreadable_directory() {
.arg("-c") panic!("Failed to create unreadable directory: {}", e);
.arg("mkdir -p /tmp/unreadable_folder && chmod 000 /tmp/unreadable_folder") }
.output()
.unwrap();
}); });
} }
fn run_cmd<T: AsRef<OsStr>>(command_args: Vec<T>) -> Output { fn run_cmd<T: AsRef<OsStr>>(command_args: &[T]) -> Output {
initialize(); initialize();
let mut to_run = &mut Command::cargo_bin("dust").unwrap(); let mut to_run = &mut Command::cargo_bin("dust").unwrap();
for p in command_args { for p in command_args {
@@ -57,7 +68,7 @@ fn run_cmd<T: AsRef<OsStr>>(command_args: Vec<T>) -> Output {
to_run.unwrap() to_run.unwrap()
} }
fn exact_stdout_test<T: AsRef<OsStr>>(command_args: Vec<T>, valid_stdout: Vec<String>) { fn exact_stdout_test<T: AsRef<OsStr>>(command_args: &[T], valid_stdout: Vec<String>) {
let to_run = run_cmd(command_args); let to_run = run_cmd(command_args);
let stdout_output = str::from_utf8(&to_run.stdout).unwrap().to_owned(); let stdout_output = str::from_utf8(&to_run.stdout).unwrap().to_owned();
@@ -72,7 +83,7 @@ fn exact_stdout_test<T: AsRef<OsStr>>(command_args: Vec<T>, valid_stdout: Vec<St
assert!(will_fail); assert!(will_fail);
} }
fn exact_stderr_test<T: AsRef<OsStr>>(command_args: Vec<T>, valid_stderr: String) { fn exact_stderr_test<T: AsRef<OsStr>>(command_args: &[T], valid_stderr: String) {
let to_run = run_cmd(command_args); let to_run = run_cmd(command_args);
let stderr_output = str::from_utf8(&to_run.stderr).unwrap().trim(); let stderr_output = str::from_utf8(&to_run.stderr).unwrap().trim();
@@ -84,20 +95,20 @@ fn exact_stderr_test<T: AsRef<OsStr>>(command_args: Vec<T>, valid_stderr: String
#[test] #[test]
pub fn test_main_basic() { pub fn test_main_basic() {
// -c is no color mode - This makes testing much simpler // -c is no color mode - This makes testing much simpler
exact_stdout_test(vec!["-c", "-B", "/tmp/test_dir/"], main_output()); exact_stdout_test(&["-c", "-B", "/tmp/test_dir/"], main_output());
} }
#[cfg_attr(target_os = "windows", ignore)] #[cfg_attr(target_os = "windows", ignore)]
#[test] #[test]
pub fn test_main_multi_arg() { pub fn test_main_multi_arg() {
let command_args = vec![ let command_args = [
"-c", "-c",
"-B", "-B",
"/tmp/test_dir/many/", "/tmp/test_dir/many/",
"/tmp/test_dir", "/tmp/test_dir",
"/tmp/test_dir", "/tmp/test_dir",
]; ];
exact_stdout_test(command_args, main_output()); exact_stdout_test(&command_args, main_output());
} }
fn main_output() -> Vec<String> { fn main_output() -> Vec<String> {
@@ -127,8 +138,8 @@ fn main_output() -> Vec<String> {
#[cfg_attr(target_os = "windows", ignore)] #[cfg_attr(target_os = "windows", ignore)]
#[test] #[test]
pub fn test_main_long_paths() { pub fn test_main_long_paths() {
let command_args = vec!["-c", "-p", "-B", "/tmp/test_dir/"]; let command_args = ["-c", "-p", "-B", "/tmp/test_dir/"];
exact_stdout_test(command_args, main_output_long_paths()); exact_stdout_test(&command_args, main_output_long_paths());
} }
fn main_output_long_paths() -> Vec<String> { fn main_output_long_paths() -> Vec<String> {
@@ -155,8 +166,8 @@ fn main_output_long_paths() -> Vec<String> {
#[cfg_attr(target_os = "windows", ignore)] #[cfg_attr(target_os = "windows", ignore)]
#[test] #[test]
pub fn test_substring_of_names_and_long_names() { pub fn test_substring_of_names_and_long_names() {
let command_args = vec!["-c", "-B", "/tmp/test_dir2"]; let command_args = ["-c", "-B", "/tmp/test_dir2"];
exact_stdout_test(command_args, no_substring_of_names_output()); exact_stdout_test(&command_args, no_substring_of_names_output());
} }
fn no_substring_of_names_output() -> Vec<String> { fn no_substring_of_names_output() -> Vec<String> {
@@ -189,8 +200,8 @@ fn no_substring_of_names_output() -> Vec<String> {
#[cfg_attr(target_os = "windows", ignore)] #[cfg_attr(target_os = "windows", ignore)]
#[test] #[test]
pub fn test_unicode_directories() { pub fn test_unicode_directories() {
let command_args = vec!["-c", "-B", "/tmp/test_dir_unicode"]; let command_args = ["-c", "-B", "/tmp/test_dir_unicode"];
exact_stdout_test(command_args, unicode_dir()); exact_stdout_test(&command_args, unicode_dir());
} }
fn unicode_dir() -> Vec<String> { fn unicode_dir() -> Vec<String> {
@@ -216,8 +227,8 @@ fn unicode_dir() -> Vec<String> {
#[cfg_attr(target_os = "windows", ignore)] #[cfg_attr(target_os = "windows", ignore)]
#[test] #[test]
pub fn test_apparent_size() { pub fn test_apparent_size() {
let command_args = vec!["-c", "-s", "-b", "/tmp/test_dir"]; let command_args = ["-c", "-s", "-b", "/tmp/test_dir"];
exact_stdout_test(command_args, apparent_size_output()); exact_stdout_test(&command_args, apparent_size_output());
} }
fn apparent_size_output() -> Vec<String> { fn apparent_size_output() -> Vec<String> {
@@ -242,21 +253,22 @@ fn apparent_size_output() -> Vec<String> {
#[cfg_attr(target_os = "windows", ignore)] #[cfg_attr(target_os = "windows", ignore)]
#[test] #[test]
pub fn test_permission_normal() { pub fn test_permission_normal() {
let command_args = vec!["/tmp/unreadable_folder"]; let command_args = [UNREADABLE_DIR_PATH];
let permission_msg = let permission_msg =
r#"Did not have permissions for all directories (add --print-errors to see errors)"# r#"Did not have permissions for all directories (add --print-errors to see errors)"#
.trim() .trim()
.to_string(); .to_string();
exact_stderr_test(command_args, permission_msg); exact_stderr_test(&command_args, permission_msg);
} }
#[cfg_attr(target_os = "windows", ignore)] #[cfg_attr(target_os = "windows", ignore)]
#[test] #[test]
pub fn test_permission_flag() { pub fn test_permission_flag() {
// add the flag to CLI // add the flag to CLI
let command_args = vec!["--print-errors", "/tmp/unreadable_folder"]; let command_args = ["--print-errors", UNREADABLE_DIR_PATH];
let permission_msg = r#"Did not have permissions for directories: /tmp/unreadable_folder"# let permission_msg = format!(
.trim() "Did not have permissions for directories: {}",
.to_string(); UNREADABLE_DIR_PATH
exact_stderr_test(command_args, permission_msg); );
exact_stderr_test(&command_args, permission_msg);
} }

View File

@@ -254,3 +254,26 @@ pub fn test_force_color() {
assert!(output.contains("\x1B[31m")); assert!(output.contains("\x1B[31m"));
assert!(output.contains("\x1B[0m")); assert!(output.contains("\x1B[0m"));
} }
#[test]
pub fn test_collapse() {
let output = build_command(vec!["--collapse", "many", "tests/test_dir/"]);
assert!(output.contains("many"));
assert!(!output.contains("hello_file"));
}
#[test]
pub fn test_handle_duplicate_names() {
// Check that even if we run on a multiple directories with the same name
// we still show the distinct parent dir in the output
let output = build_command(vec![
"tests/test_dir_matching/dave/dup_name",
"tests/test_dir_matching/andy/dup_name",
"ci",
]);
assert!(output.contains("andy"));
assert!(output.contains("dave"));
assert!(output.contains("ci"));
assert!(output.contains("dup_name"));
assert!(!output.contains("test_dir_matching"));
}