Compare commits

..

2 Commits

Author SHA1 Message Date
andy.boot
d033dd28d3 version: increment version 2025-07-29 22:56:09 +01:00
andy.boot
48c9e54678 feat: Return 1 if no dirs are found
A better error code
2025-07-29 22:56:09 +01:00
24 changed files with 966 additions and 719 deletions

View File

@@ -1,49 +1,359 @@
name: CICD
on:
push:
tags:
- 'v*'
pull_request:
# spell-checker:ignore CICD CODECOV MSVC MacOS Peltoche SHAs buildable clippy esac fakeroot gnueabihf halium libssl mkdir musl popd printf pushd rustfmt softprops toolchain
env:
PROJECT_NAME: dust
PROJECT_DESC: "du + rust = dust"
PROJECT_AUTH: "bootandy"
RUST_MIN_SRV: "1.31.0"
on: [push, pull_request]
jobs:
build-musl-deb:
runs-on: ubuntu-latest
style:
name: Style
runs-on: ${{ matrix.job.os }}
strategy:
fail-fast: false
matrix:
job:
- { os: ubuntu-latest }
- { os: macos-latest }
- { os: windows-latest }
steps:
- uses: actions/checkout@v4
- name: Install musl tools
- uses: actions/checkout@v1
- name: Initialize workflow variables
id: vars
shell: bash
run: |
sudo apt-get update
sudo apt-get install -y musl-tools
- name: Install Rust with musl target
# 'windows-latest' `cargo fmt` is bugged for this project (see reasons @ GH:rust-lang/rustfmt #3324, #3590, #3688 ; waiting for repair)
JOB_DO_FORMAT_TESTING="true"
case ${{ matrix.job.os }} in windows-latest) unset JOB_DO_FORMAT_TESTING ;; esac;
echo set-output name=JOB_DO_FORMAT_TESTING::${JOB_DO_FORMAT_TESTING:-<empty>/false}
echo ::set-output name=JOB_DO_FORMAT_TESTING::${JOB_DO_FORMAT_TESTING}
# target-specific options
# * CARGO_FEATURES_OPTION
CARGO_FEATURES_OPTION='' ;
if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features "${{ matrix.job.features }}"' ; fi
echo set-output name=CARGO_FEATURES_OPTION::${CARGO_FEATURES_OPTION}
echo ::set-output name=CARGO_FEATURES_OPTION::${CARGO_FEATURES_OPTION}
- name: Install `rust` toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: stable
target: x86_64-unknown-linux-musl
override: true
profile: minimal # minimal component installation (ie, no documentation)
components: rustfmt, clippy
- name: Install wget for Windows
if: matrix.job.os == 'windows-latest'
run: choco install wget --no-progress
- name: typos-action
uses: crate-ci/typos@v1.28.4
- name: "`fmt` testing"
if: steps.vars.outputs.JOB_DO_FORMAT_TESTING
uses: actions-rs/cargo@v1
with:
command: fmt
args: --all -- --check
- name: "`clippy` testing"
if: success() || failure() # run regardless of prior step ("`fmt` testing") success/failure
uses: actions-rs/cargo@v1
with:
command: clippy
args: ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} -- -D warnings
- name: Build with musl
run: cargo build --release --target x86_64-unknown-linux-musl
min_version:
name: MinSRV # Minimum supported rust version
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- name: Install `rust` toolchain (v${{ env.RUST_MIN_SRV }})
uses: actions-rs/toolchain@v1
with:
toolchain: ${{ env.RUST_MIN_SRV }}
profile: minimal # minimal component installation (ie, no documentation)
- name: Test
uses: actions-rs/cargo@v1
with:
command: test
- name: Install cargo-deb
run: cargo install cargo-deb
- name: Create .deb package
build:
name: Build
runs-on: ${{ matrix.job.os }}
strategy:
fail-fast: false
matrix:
job:
# { os, target, cargo-options, features, use-cross, toolchain }
- {
os: ubuntu-latest,
target: aarch64-unknown-linux-gnu,
use-cross: use-cross,
}
- {
os: ubuntu-latest,
target: aarch64-unknown-linux-musl,
use-cross: use-cross,
}
- {
os: ubuntu-latest,
target: arm-unknown-linux-gnueabihf,
use-cross: use-cross,
}
- {
os: ubuntu-latest,
target: arm-unknown-linux-musleabi,
use-cross: use-cross,
}
- {
os: ubuntu-latest,
target: i686-unknown-linux-gnu,
use-cross: use-cross,
}
- {
os: ubuntu-latest,
target: i686-unknown-linux-musl,
use-cross: use-cross,
}
- {
os: ubuntu-latest,
target: x86_64-unknown-linux-gnu,
use-cross: use-cross,
}
- {
os: ubuntu-latest,
target: x86_64-unknown-linux-musl,
use-cross: use-cross,
}
- { os: macos-latest, target: x86_64-apple-darwin }
- { os: windows-latest, target: i686-pc-windows-gnu }
- { os: windows-latest, target: i686-pc-windows-msvc }
- { os: windows-latest, target: x86_64-pc-windows-gnu } ## !maint: [rivy; 2020-01-21] may break due to rust bug; follow possible solution from GH:rust-lang/rust#47048 (refs: GH:rust-lang/rust#47048 , GH:rust-lang/rust#53454 , GH:bike-barn/hermit#172 )
- { os: windows-latest, target: x86_64-pc-windows-msvc }
steps:
- uses: actions/checkout@v1
- name: Install any prerequisites
shell: bash
run: |
cargo deb --target x86_64-unknown-linux-musl --no-build
case ${{ matrix.job.target }} in
arm-unknown-linux-gnueabihf) sudo apt-get -y update ; sudo apt-get -y install gcc-arm-linux-gnueabihf ;;
aarch64-unknown-linux-gnu) sudo apt-get -y update ; sudo apt-get -y install binutils-aarch64-linux-gnu ;;
esac
- name: Initialize workflow variables
id: vars
shell: bash
run: |
# toolchain
TOOLCHAIN="stable" ## default to "stable" toolchain
# * specify alternate TOOLCHAIN for *-pc-windows-gnu targets; gnu targets on Windows are broken for the standard *-pc-windows-msvc toolchain (refs: <https://github.com/rust-lang/rust/issues/47048>, <https://github.com/rust-lang/rust/issues/53454>, <https://github.com/rust-lang/cargo/issues/6754>)
case ${{ matrix.job.target }} in *-pc-windows-gnu) TOOLCHAIN="stable-${{ matrix.job.target }}" ;; esac;
# * use requested TOOLCHAIN if specified
if [ -n "${{ matrix.job.toolchain }}" ]; then TOOLCHAIN="${{ matrix.job.toolchain }}" ; fi
echo set-output name=TOOLCHAIN::${TOOLCHAIN}
echo ::set-output name=TOOLCHAIN::${TOOLCHAIN}
# staging directory
STAGING='_staging'
echo set-output name=STAGING::${STAGING}
echo ::set-output name=STAGING::${STAGING}
# determine EXE suffix
EXE_suffix="" ; case ${{ matrix.job.target }} in *-pc-windows-*) EXE_suffix=".exe" ;; esac;
echo set-output name=EXE_suffix::${EXE_suffix}
echo ::set-output name=EXE_suffix::${EXE_suffix}
# parse commit reference info
REF_NAME=${GITHUB_REF#refs/*/}
unset REF_BRANCH ; case ${GITHUB_REF} in refs/heads/*) REF_BRANCH=${GITHUB_REF#refs/heads/} ;; esac;
unset REF_TAG ; case ${GITHUB_REF} in refs/tags/*) REF_TAG=${GITHUB_REF#refs/tags/} ;; esac;
REF_SHAS=${GITHUB_SHA:0:8}
echo set-output name=REF_NAME::${REF_NAME}
echo set-output name=REF_BRANCH::${REF_BRANCH}
echo set-output name=REF_TAG::${REF_TAG}
echo set-output name=REF_SHAS::${REF_SHAS}
echo ::set-output name=REF_NAME::${REF_NAME}
echo ::set-output name=REF_BRANCH::${REF_BRANCH}
echo ::set-output name=REF_TAG::${REF_TAG}
echo ::set-output name=REF_SHAS::${REF_SHAS}
# parse target
unset TARGET_ARCH ; case ${{ matrix.job.target }} in arm-unknown-linux-gnueabihf) TARGET_ARCH=arm ;; aarch-*) TARGET_ARCH=aarch64 ;; i686-*) TARGET_ARCH=i686 ;; x86_64-*) TARGET_ARCH=x86_64 ;; esac;
echo set-output name=TARGET_ARCH::${TARGET_ARCH}
echo ::set-output name=TARGET_ARCH::${TARGET_ARCH}
unset TARGET_OS ; case ${{ matrix.job.target }} in *-linux-*) TARGET_OS=linux ;; *-apple-*) TARGET_OS=macos ;; *-windows-*) TARGET_OS=windows ;; esac;
echo set-output name=TARGET_OS::${TARGET_OS}
echo ::set-output name=TARGET_OS::${TARGET_OS}
# package name
PKG_suffix=".tar.gz" ; case ${{ matrix.job.target }} in *-pc-windows-*) PKG_suffix=".zip" ;; esac;
PKG_BASENAME=${PROJECT_NAME}-${REF_TAG:-$REF_SHAS}-${{ matrix.job.target }}
PKG_NAME=${PKG_BASENAME}${PKG_suffix}
echo set-output name=PKG_suffix::${PKG_suffix}
echo set-output name=PKG_BASENAME::${PKG_BASENAME}
echo set-output name=PKG_NAME::${PKG_NAME}
echo ::set-output name=PKG_suffix::${PKG_suffix}
echo ::set-output name=PKG_BASENAME::${PKG_BASENAME}
echo ::set-output name=PKG_NAME::${PKG_NAME}
# deployable tag? (ie, leading "vM" or "M"; M == version number)
unset DEPLOY ; if [[ $REF_TAG =~ ^[vV]?[0-9].* ]]; then DEPLOY='true' ; fi
echo set-output name=DEPLOY::${DEPLOY:-<empty>/false}
echo ::set-output name=DEPLOY::${DEPLOY}
# target-specific options
# * CARGO_FEATURES_OPTION
CARGO_FEATURES_OPTION='' ;
if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features "${{ matrix.job.features }}"' ; fi
echo set-output name=CARGO_FEATURES_OPTION::${CARGO_FEATURES_OPTION}
echo ::set-output name=CARGO_FEATURES_OPTION::${CARGO_FEATURES_OPTION}
# * CARGO_USE_CROSS (truthy)
CARGO_USE_CROSS='true' ; case '${{ matrix.job.use-cross }}' in ''|0|f|false|n|no) unset CARGO_USE_CROSS ;; esac;
echo set-output name=CARGO_USE_CROSS::${CARGO_USE_CROSS:-<empty>/false}
echo ::set-output name=CARGO_USE_CROSS::${CARGO_USE_CROSS}
# # * `arm` cannot be tested on ubuntu-* hosts (b/c testing is currently primarily done via comparison of target outputs with built-in outputs and the `arm` target is not executable on the host)
JOB_DO_TESTING="true"
case ${{ matrix.job.target }} in arm-*|aarch64-*) unset JOB_DO_TESTING ;; esac;
echo set-output name=JOB_DO_TESTING::${JOB_DO_TESTING:-<empty>/false}
echo ::set-output name=JOB_DO_TESTING::${JOB_DO_TESTING}
# # * test only binary for arm-type targets
unset CARGO_TEST_OPTIONS
unset CARGO_TEST_OPTIONS ; case ${{ matrix.job.target }} in arm-*|aarch64-*) CARGO_TEST_OPTIONS="--bin ${PROJECT_NAME}" ;; esac;
echo set-output name=CARGO_TEST_OPTIONS::${CARGO_TEST_OPTIONS}
echo ::set-output name=CARGO_TEST_OPTIONS::${CARGO_TEST_OPTIONS}
# * strip executable?
STRIP="strip" ; case ${{ matrix.job.target }} in arm-unknown-linux-gnueabihf) STRIP="arm-linux-gnueabihf-strip" ;; *-pc-windows-msvc) STRIP="" ;; aarch64-unknown-linux-gnu) STRIP="aarch64-linux-gnu-strip" ;; aarch64-unknown-linux-musl) STRIP="" ;; armv7-unknown-linux-musleabi) STRIP="" ;; arm-unknown-linux-musleabi) STRIP="" ;; esac;
- name: Upload .deb artifact
uses: actions/upload-artifact@v4
echo set-output name=STRIP::${STRIP}
echo ::set-output name=STRIP::${STRIP}
- name: Create all needed build/work directories
shell: bash
run: |
mkdir -p '${{ steps.vars.outputs.STAGING }}'
mkdir -p '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}'
- name: rust toolchain ~ install
uses: actions-rs/toolchain@v1
with:
name: musl-deb-package
path: target/x86_64-unknown-linux-musl/debian/*.deb
- name: Release
toolchain: ${{ steps.vars.outputs.TOOLCHAIN }}
target: ${{ matrix.job.target }}
override: true
profile: minimal # minimal component installation (ie, no documentation)
- name: Info
shell: bash
run: |
gcc --version || true
rustup -V
rustup toolchain list
rustup default
cargo -V
rustc -V
- name: Build
uses: actions-rs/cargo@v1
with:
use-cross: ${{ steps.vars.outputs.CARGO_USE_CROSS }}
command: build
args: --release --target=${{ matrix.job.target }} ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }}
- name: Install cargo-deb
uses: actions-rs/cargo@v1
with:
command: install
args: cargo-deb
if: matrix.job.target == 'i686-unknown-linux-musl' || matrix.job.target == 'x86_64-unknown-linux-musl'
- name: Build deb
uses: actions-rs/cargo@v1
with:
command: deb
args: --no-build --target=${{ matrix.job.target }}
if: matrix.job.target == 'i686-unknown-linux-musl' || matrix.job.target == 'x86_64-unknown-linux-musl'
- name: Test
uses: actions-rs/cargo@v1
with:
use-cross: ${{ steps.vars.outputs.CARGO_USE_CROSS }}
command: test
args: --target=${{ matrix.job.target }} ${{ steps.vars.outputs.CARGO_TEST_OPTIONS}} ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }}
- name: Archive executable artifacts
uses: actions/upload-artifact@master
with:
name: ${{ env.PROJECT_NAME }}-${{ matrix.job.target }}
path: target/${{ matrix.job.target }}/release/${{ env.PROJECT_NAME }}${{ steps.vars.outputs.EXE_suffix }}
- name: Archive deb artifacts
uses: actions/upload-artifact@master
with:
name: ${{ env.PROJECT_NAME }}-${{ matrix.job.target }}.deb
path: target/${{ matrix.job.target }}/debian
if: matrix.job.target == 'i686-unknown-linux-musl' || matrix.job.target == 'x86_64-unknown-linux-musl'
- name: Package
shell: bash
run: |
# binary
cp 'target/${{ matrix.job.target }}/release/${{ env.PROJECT_NAME }}${{ steps.vars.outputs.EXE_suffix }}' '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}/'
# `strip` binary (if needed)
if [ -n "${{ steps.vars.outputs.STRIP }}" ]; then "${{ steps.vars.outputs.STRIP }}" '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}/${{ env.PROJECT_NAME }}${{ steps.vars.outputs.EXE_suffix }}' ; fi
# README and LICENSE
cp README.md '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}/'
cp LICENSE '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}/'
# base compressed package
pushd '${{ steps.vars.outputs.STAGING }}/' >/dev/null
case ${{ matrix.job.target }} in
*-pc-windows-*) 7z -y a '${{ steps.vars.outputs.PKG_NAME }}' '${{ steps.vars.outputs.PKG_BASENAME }}'/* | tail -2 ;;
*) tar czf '${{ steps.vars.outputs.PKG_NAME }}' '${{ steps.vars.outputs.PKG_BASENAME }}'/* ;;
esac;
popd >/dev/null
- name: Publish
uses: softprops/action-gh-release@v1
if: startsWith(github.ref, 'refs/tags/')
if: steps.vars.outputs.DEPLOY
with:
files: target/x86_64-unknown-linux-musl/debian/*.deb
files: |
${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_NAME }}
target/${{ matrix.job.target }}/debian/*.deb
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
## fix! [rivy; 2020-22-01] `cargo tarpaulin` is unable to test this repo at the moment; alternate recipe or another testing framework?
# coverage:
# name: Code Coverage
# runs-on: ${{ matrix.job.os }}
# strategy:
# fail-fast: true
# matrix:
# # job: [ { os: ubuntu-latest }, { os: macos-latest }, { os: windows-latest } ]
# job: [ { os: ubuntu-latest } ] ## cargo-tarpaulin is currently only available on linux
# steps:
# - uses: actions/checkout@v1
# # - name: Reattach HEAD ## may be needed for accurate code coverage info
# # run: git checkout ${{ github.head_ref }}
# - name: Initialize workflow variables
# id: vars
# shell: bash
# run: |
# # staging directory
# STAGING='_staging'
# echo set-output name=STAGING::${STAGING}
# echo ::set-output name=STAGING::${STAGING}
# # check for CODECOV_TOKEN availability (work-around for inaccessible 'secrets' object for 'if'; see <https://github.community/t5/GitHub-Actions/jobs-lt-job-id-gt-if-does-not-work-with-env-secrets/m-p/38549>)
# unset HAS_CODECOV_TOKEN
# if [ -n $CODECOV_TOKEN ]; then HAS_CODECOV_TOKEN='true' ; fi
# echo set-output name=HAS_CODECOV_TOKEN::${HAS_CODECOV_TOKEN}
# echo ::set-output name=HAS_CODECOV_TOKEN::${HAS_CODECOV_TOKEN}
# env:
# CODECOV_TOKEN: "${{ secrets.CODECOV_TOKEN }}"
# - name: Create all needed build/work directories
# shell: bash
# run: |
# mkdir -p '${{ steps.vars.outputs.STAGING }}/work'
# - name: Install required packages
# run: |
# sudo apt-get -y install libssl-dev
# pushd '${{ steps.vars.outputs.STAGING }}/work' >/dev/null
# wget --no-verbose https://github.com/xd009642/tarpaulin/releases/download/0.9.3/cargo-tarpaulin-0.9.3-travis.tar.gz
# tar xf cargo-tarpaulin-0.9.3-travis.tar.gz
# cp cargo-tarpaulin "$(dirname -- "$(which cargo)")"/
# popd >/dev/null
# - name: Generate coverage
# run: |
# cargo tarpaulin --out Xml
# - name: Upload coverage results (CodeCov.io)
# # CODECOV_TOKEN (aka, "Repository Upload Token" for REPO from CodeCov.io) ## set via REPO/Settings/Secrets
# # if: secrets.CODECOV_TOKEN (not supported {yet?}; see <https://github.community/t5/GitHub-Actions/jobs-lt-job-id-gt-if-does-not-work-with-env-secrets/m-p/38549>)
# if: steps.vars.outputs.HAS_CODECOV_TOKEN
# run: |
# # CodeCov.io
# cargo tarpaulin --out Xml
# bash <(curl -s https://codecov.io/bash)
# env:
# CODECOV_TOKEN: "${{ secrets.CODECOV_TOKEN }}"

819
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -27,11 +27,11 @@ lto = true
strip = true
[dependencies]
clap = { version = "4", features = ["derive"] }
lscolors = "0.21"
nu-ansi-term = "0.50"
terminal_size = "0.4"
unicode-width = "0.2"
ansi_term = "0.12"
clap = { version = "4.4", features = ["derive"] }
lscolors = "0.13"
terminal_size = "0.2"
unicode-width = "0.1"
rayon = "1"
thousands = "0.2"
stfu8 = "0.2"
@@ -39,8 +39,9 @@ regex = "1"
config-file = "0.2"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
sysinfo = "0.37"
ctrlc = "3"
directories = "4"
sysinfo = "0.27"
ctrlc = "3.4"
chrono = "0.4"
[target.'cfg(not(target_has_atomic = "64"))'.dependencies]

View File

@@ -13,17 +13,6 @@ Because I want an easy way to see where my disk is being used.
![Example](media/snap.png)
Study the above picture.
* We see `target` has 1.8G
* `target/debug` is the same size as `target` - so we know nearly all the disk usage of the 1.5G is in this folder
* `target/debug/deps` this is 1.2G - Note the bar jumps down to 70% to indicate that most disk usage is here but not all.
* `target/debug/deps/dust-e78c9f87a17f24f3` - This is the largest file in this folder, but it is only 46M - Note the bar jumps down to 3% to indicate the file is small.
From here we can conclude:
* `target/debug/deps` takes the majority of the space in `target` and that `target/debug/deps` has a large number of relatively small files.
## Install
#### Cargo <a href="https://repology.org/project/du-dust/versions"><img src="https://repology.org/badge/vertical-allrepos/du-dust.svg" alt="Packaging status" align="right"></a>
@@ -80,8 +69,6 @@ Dust will list a slightly-less-than-the-terminal-height number of the biggest su
The different colors on the bars: These represent the combined tree hierarchy & disk usage. The shades of grey are used to indicate which parent folder a subfolder belongs to. For instance, look at the above screenshot. `.steam` is a folder taking 44% of the space. From the `.steam` bar is a light grey line that goes up. All these folders are inside `.steam` so if you delete `.steam` all that stuff will be gone too.
If you are new to the tool I recommend to try tweaking the `-n` parameter. `dust -n 10`, `dust -n 50`.
## Usage
```
@@ -115,8 +102,7 @@ Usage: dust -S (Custom Stack size - Use if you see: 'fatal runtime error: stack
Usage: dust --skip-total (No total row will be displayed)
Usage: dust -z 40000/30MB/20kib (Exclude output files/directories below size 40000 bytes / 30MB / 20KiB)
Usage: dust -j (Prints JSON representation of directories, try: dust -j | jq)
Usage: dust --files0-from=FILE (Read NUL-terminated file paths from FILE; if FILE is '-', read from stdin)
Usage: dust --files-from=FILE (Read newline-terminated file paths from FILE; if FILE is '-', read from stdin)
Usage: dust --files0-from=FILE (Reads null-terminated file paths from FILE); If FILE is - then read from stdin
Usage: dust --collapse=node-modules will keep the node-modules folder collapsed in display instead of recursively opening it
```
@@ -136,16 +122,6 @@ reverse=true
- [dua](https://github.com/Byron/dua-cli/)
- [pdu](https://github.com/KSXGitHub/parallel-disk-usage)
- [dirstat-rs](https://github.com/scullionw/dirstat-rs)
- `du -d 1 -h | sort -h`
## Why to use Dust over the Alternatives
Dust simply Does The Right Thing when handling lots of small files & directories. Dust keeps the output simple by only showing large entries.
Tools like ncdu & baobab, give you a view of directory sizes but you have no idea where the largest files are. For example directory A could have a size larger than directory B, but in fact the largest file is in B and not A. Finding this out via these other tools is not trivial whereas Dust will show the large file clearly in the tree hierarchy
Dust will not count hard links multiple times (unless you want to `-s`).
Typing `dust -n 90` will show you your 90 largest entries. `-n` is not quite like `head -n` or `tail -n`, dust is intelligent and chooses the largest entries
- du -d 1 -h | sort -h
Note: Apparent-size is calculated slightly differently in dust to gdu. In dust each hard link is counted as using file_length space. In gdu only the first entry is counted.

View File

@@ -20,8 +20,8 @@ _dust() {
'-T+[Number of threads to use]:THREADS:_default' \
'--threads=[Number of threads to use]:THREADS:_default' \
'--config=[Specify a config file to use]:FILE:_files' \
'-n+[Display the '\''n'\'' largest entries. (Default is terminal_height)]:NUMBER:_default' \
'--number-of-lines=[Display the '\''n'\'' largest entries. (Default is terminal_height)]:NUMBER:_default' \
'-n+[Number of lines of output to show. (Default is terminal_height - 10)]:NUMBER:_default' \
'--number-of-lines=[Number of lines of output to show. (Default is terminal_height - 10)]:NUMBER:_default' \
'*-X+[Exclude any file or directory with this path]:PATH:_files' \
'*--ignore-directory=[Exclude any file or directory with this path]:PATH:_files' \
'-I+[Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter]:FILE:_files' \
@@ -62,8 +62,7 @@ tb\:"terabyte (TB)"))' \
'--atime=[just like -mtime, but based on file access time]:ATIME:_default' \
'-y+[just like -mtime, but based on file change time]:CTIME:_default' \
'--ctime=[just like -mtime, but based on file change time]:CTIME:_default' \
'(--files-from)--files0-from=[Read NUL-terminated paths from FILE (use \`-\` for stdin)]:FILES0_FROM:_files' \
'(--files0-from)--files-from=[Read newline-terminated paths from FILE (use \`-\` for stdin)]:FILES_FROM:_files' \
'--files0-from=[run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input]:FILES0_FROM:_files' \
'*--collapse=[Keep these directories collapsed]:COLLAPSE:_files' \
'-m+[Directory '\''size'\'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time]:FILETIME:((a\:"last accessed time"
c\:"last changed time"

View File

@@ -26,8 +26,8 @@ Register-ArgumentCompleter -Native -CommandName 'dust' -ScriptBlock {
[CompletionResult]::new('-T', '-T ', [CompletionResultType]::ParameterName, 'Number of threads to use')
[CompletionResult]::new('--threads', '--threads', [CompletionResultType]::ParameterName, 'Number of threads to use')
[CompletionResult]::new('--config', '--config', [CompletionResultType]::ParameterName, 'Specify a config file to use')
[CompletionResult]::new('-n', '-n', [CompletionResultType]::ParameterName, 'Display the ''n'' largest entries. (Default is terminal_height)')
[CompletionResult]::new('--number-of-lines', '--number-of-lines', [CompletionResultType]::ParameterName, 'Display the ''n'' largest entries. (Default is terminal_height)')
[CompletionResult]::new('-n', '-n', [CompletionResultType]::ParameterName, 'Number of lines of output to show. (Default is terminal_height - 10)')
[CompletionResult]::new('--number-of-lines', '--number-of-lines', [CompletionResultType]::ParameterName, 'Number of lines of output to show. (Default is terminal_height - 10)')
[CompletionResult]::new('-X', '-X ', [CompletionResultType]::ParameterName, 'Exclude any file or directory with this path')
[CompletionResult]::new('--ignore-directory', '--ignore-directory', [CompletionResultType]::ParameterName, 'Exclude any file or directory with this path')
[CompletionResult]::new('-I', '-I ', [CompletionResultType]::ParameterName, 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter')
@@ -50,8 +50,7 @@ Register-ArgumentCompleter -Native -CommandName 'dust' -ScriptBlock {
[CompletionResult]::new('--atime', '--atime', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file access time')
[CompletionResult]::new('-y', '-y', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file change time')
[CompletionResult]::new('--ctime', '--ctime', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file change time')
[CompletionResult]::new('--files0-from', '--files0-from', [CompletionResultType]::ParameterName, 'Read NUL-terminated paths from FILE (use `-` for stdin)')
[CompletionResult]::new('--files-from', '--files-from', [CompletionResultType]::ParameterName, 'Read newline-terminated paths from FILE (use `-` for stdin)')
[CompletionResult]::new('--files0-from', '--files0-from', [CompletionResultType]::ParameterName, 'run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input')
[CompletionResult]::new('--collapse', '--collapse', [CompletionResultType]::ParameterName, 'Keep these directories collapsed')
[CompletionResult]::new('-m', '-m', [CompletionResultType]::ParameterName, 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time')
[CompletionResult]::new('--filetime', '--filetime', [CompletionResultType]::ParameterName, 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time')

View File

@@ -23,7 +23,7 @@ _dust() {
case "${cmd}" in
dust)
opts="-d -T -n -p -X -I -L -x -s -r -c -C -b -B -z -R -f -i -v -e -t -w -P -D -F -o -S -j -M -A -y -m -h -V --depth --threads --config --number-of-lines --full-paths --ignore-directory --ignore-all-in-file --dereference-links --limit-filesystem --apparent-size --reverse --no-colors --force-colors --no-percent-bars --bars-on-right --min-size --screen-reader --skip-total --filecount --ignore-hidden --invert-filter --filter --file-types --terminal-width --no-progress --print-errors --only-dir --only-file --output-format --stack-size --output-json --mtime --atime --ctime --files0-from --files-from --collapse --filetime --help --version [PATH]..."
opts="-d -T -n -p -X -I -L -x -s -r -c -C -b -B -z -R -f -i -v -e -t -w -P -D -F -o -S -j -M -A -y -m -h -V --depth --threads --config --number-of-lines --full-paths --ignore-directory --ignore-all-in-file --dereference-links --limit-filesystem --apparent-size --reverse --no-colors --force-colors --no-percent-bars --bars-on-right --min-size --screen-reader --skip-total --filecount --ignore-hidden --invert-filter --filter --file-types --terminal-width --no-progress --print-errors --only-dir --only-file --output-format --stack-size --output-json --mtime --atime --ctime --files0-from --collapse --filetime --help --version [PATH]..."
if [[ ${cur} == -* || ${COMP_CWORD} -eq 1 ]] ; then
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
return 0
@@ -182,10 +182,6 @@ _dust() {
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--files-from)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--collapse)
COMPREPLY=($(compgen -f "${cur}"))
return 0

View File

@@ -23,8 +23,8 @@ set edit:completion:arg-completer[dust] = {|@words|
cand -T 'Number of threads to use'
cand --threads 'Number of threads to use'
cand --config 'Specify a config file to use'
cand -n 'Display the ''n'' largest entries. (Default is terminal_height)'
cand --number-of-lines 'Display the ''n'' largest entries. (Default is terminal_height)'
cand -n 'Number of lines of output to show. (Default is terminal_height - 10)'
cand --number-of-lines 'Number of lines of output to show. (Default is terminal_height - 10)'
cand -X 'Exclude any file or directory with this path'
cand --ignore-directory 'Exclude any file or directory with this path'
cand -I 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter'
@@ -47,8 +47,7 @@ set edit:completion:arg-completer[dust] = {|@words|
cand --atime 'just like -mtime, but based on file access time'
cand -y 'just like -mtime, but based on file change time'
cand --ctime 'just like -mtime, but based on file change time'
cand --files0-from 'Read NUL-terminated paths from FILE (use `-` for stdin)'
cand --files-from 'Read newline-terminated paths from FILE (use `-` for stdin)'
cand --files0-from 'run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input'
cand --collapse 'Keep these directories collapsed'
cand -m 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time'
cand --filetime 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time'

View File

@@ -1,7 +1,7 @@
complete -c dust -s d -l depth -d 'Depth to show' -r
complete -c dust -s T -l threads -d 'Number of threads to use' -r
complete -c dust -l config -d 'Specify a config file to use' -r -F
complete -c dust -s n -l number-of-lines -d 'Display the \'n\' largest entries. (Default is terminal_height)' -r
complete -c dust -s n -l number-of-lines -d 'Number of lines of output to show. (Default is terminal_height - 10)' -r
complete -c dust -s X -l ignore-directory -d 'Exclude any file or directory with this path' -r -F
complete -c dust -s I -l ignore-all-in-file -d 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter' -r -F
complete -c dust -s z -l min-size -d 'Minimum size file to include in output' -r
@@ -22,8 +22,7 @@ complete -c dust -s S -l stack-size -d 'Specify memory to use as stack size - us
complete -c dust -s M -l mtime -d '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)' -r
complete -c dust -s A -l atime -d 'just like -mtime, but based on file access time' -r
complete -c dust -s y -l ctime -d 'just like -mtime, but based on file change time' -r
complete -c dust -l files0-from -d 'Read NUL-terminated paths from FILE (use `-` for stdin)' -r -F
complete -c dust -l files-from -d 'Read newline-terminated paths from FILE (use `-` for stdin)' -r -F
complete -c dust -l files0-from -d 'run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input' -r -F
complete -c dust -l collapse -d 'Keep these directories collapsed' -r -F
complete -c dust -s m -l filetime -d 'Directory \'size\' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time' -r -f -a "a\t'last accessed time'
c\t'last changed time'

View File

@@ -25,6 +25,4 @@ skip-total=true
ignore-hidden=true
# print sizes in powers of 1000 (e.g., 1.1G)
output-format="si"
number-of-lines=5
output-format="si"

View File

@@ -4,30 +4,30 @@
.SH NAME
Dust \- Like du but more intuitive
.SH SYNOPSIS
\fBdust\fR [\fB\-d\fR|\fB\-\-depth\fR] [\fB\-T\fR|\fB\-\-threads\fR] [\fB\-\-config\fR] [\fB\-n\fR|\fB\-\-number\-of\-lines\fR] [\fB\-p\fR|\fB\-\-full\-paths\fR] [\fB\-X\fR|\fB\-\-ignore\-directory\fR] [\fB\-I\fR|\fB\-\-ignore\-all\-in\-file\fR] [\fB\-L\fR|\fB\-\-dereference\-links\fR] [\fB\-x\fR|\fB\-\-limit\-filesystem\fR] [\fB\-s\fR|\fB\-\-apparent\-size\fR] [\fB\-r\fR|\fB\-\-reverse\fR] [\fB\-c\fR|\fB\-\-no\-colors\fR] [\fB\-C\fR|\fB\-\-force\-colors\fR] [\fB\-b\fR|\fB\-\-no\-percent\-bars\fR] [\fB\-B\fR|\fB\-\-bars\-on\-right\fR] [\fB\-z\fR|\fB\-\-min\-size\fR] [\fB\-R\fR|\fB\-\-screen\-reader\fR] [\fB\-\-skip\-total\fR] [\fB\-f\fR|\fB\-\-filecount\fR] [\fB\-i\fR|\fB\-\-ignore\-hidden\fR] [\fB\-v\fR|\fB\-\-invert\-filter\fR] [\fB\-e\fR|\fB\-\-filter\fR] [\fB\-t\fR|\fB\-\-file\-types\fR] [\fB\-w\fR|\fB\-\-terminal\-width\fR] [\fB\-P\fR|\fB\-\-no\-progress\fR] [\fB\-\-print\-errors\fR] [\fB\-D\fR|\fB\-\-only\-dir\fR] [\fB\-F\fR|\fB\-\-only\-file\fR] [\fB\-o\fR|\fB\-\-output\-format\fR] [\fB\-S\fR|\fB\-\-stack\-size\fR] [\fB\-j\fR|\fB\-\-output\-json\fR] [\fB\-M\fR|\fB\-\-mtime\fR] [\fB\-A\fR|\fB\-\-atime\fR] [\fB\-y\fR|\fB\-\-ctime\fR] [\fB\-\-files0\-from\fR] [\fB\-\-files\-from\fR] [\fB\-\-collapse\fR] [\fB\-m\fR|\fB\-\-filetime\fR] [\fB\-h\fR|\fB\-\-help\fR] [\fB\-V\fR|\fB\-\-version\fR] [\fIPATH\fR]
\fBdust\fR [\fB\-d\fR|\fB\-\-depth\fR] [\fB\-T\fR|\fB\-\-threads\fR] [\fB\-\-config\fR] [\fB\-n\fR|\fB\-\-number\-of\-lines\fR] [\fB\-p\fR|\fB\-\-full\-paths\fR] [\fB\-X\fR|\fB\-\-ignore\-directory\fR] [\fB\-I\fR|\fB\-\-ignore\-all\-in\-file\fR] [\fB\-L\fR|\fB\-\-dereference\-links\fR] [\fB\-x\fR|\fB\-\-limit\-filesystem\fR] [\fB\-s\fR|\fB\-\-apparent\-size\fR] [\fB\-r\fR|\fB\-\-reverse\fR] [\fB\-c\fR|\fB\-\-no\-colors\fR] [\fB\-C\fR|\fB\-\-force\-colors\fR] [\fB\-b\fR|\fB\-\-no\-percent\-bars\fR] [\fB\-B\fR|\fB\-\-bars\-on\-right\fR] [\fB\-z\fR|\fB\-\-min\-size\fR] [\fB\-R\fR|\fB\-\-screen\-reader\fR] [\fB\-\-skip\-total\fR] [\fB\-f\fR|\fB\-\-filecount\fR] [\fB\-i\fR|\fB\-\-ignore\-hidden\fR] [\fB\-v\fR|\fB\-\-invert\-filter\fR] [\fB\-e\fR|\fB\-\-filter\fR] [\fB\-t\fR|\fB\-\-file\-types\fR] [\fB\-w\fR|\fB\-\-terminal\-width\fR] [\fB\-P\fR|\fB\-\-no\-progress\fR] [\fB\-\-print\-errors\fR] [\fB\-D\fR|\fB\-\-only\-dir\fR] [\fB\-F\fR|\fB\-\-only\-file\fR] [\fB\-o\fR|\fB\-\-output\-format\fR] [\fB\-S\fR|\fB\-\-stack\-size\fR] [\fB\-j\fR|\fB\-\-output\-json\fR] [\fB\-M\fR|\fB\-\-mtime\fR] [\fB\-A\fR|\fB\-\-atime\fR] [\fB\-y\fR|\fB\-\-ctime\fR] [\fB\-\-files0\-from\fR] [\fB\-\-collapse\fR] [\fB\-m\fR|\fB\-\-filetime\fR] [\fB\-h\fR|\fB\-\-help\fR] [\fB\-V\fR|\fB\-\-version\fR] [\fIPATH\fR]
.SH DESCRIPTION
Like du but more intuitive
.SH OPTIONS
.TP
\fB\-d\fR, \fB\-\-depth\fR \fI<DEPTH>\fR
\fB\-d\fR, \fB\-\-depth\fR=\fIDEPTH\fR
Depth to show
.TP
\fB\-T\fR, \fB\-\-threads\fR \fI<THREADS>\fR
\fB\-T\fR, \fB\-\-threads\fR=\fITHREADS\fR
Number of threads to use
.TP
\fB\-\-config\fR \fI<FILE>\fR
\fB\-\-config\fR=\fIFILE\fR
Specify a config file to use
.TP
\fB\-n\fR, \fB\-\-number\-of\-lines\fR \fI<NUMBER>\fR
Display the \*(Aqn\*(Aq largest entries. (Default is terminal_height)
\fB\-n\fR, \fB\-\-number\-of\-lines\fR=\fINUMBER\fR
Number of lines of output to show. (Default is terminal_height \- 10)
.TP
\fB\-p\fR, \fB\-\-full\-paths\fR
Subdirectories will not have their path shortened
.TP
\fB\-X\fR, \fB\-\-ignore\-directory\fR \fI<PATH>\fR
\fB\-X\fR, \fB\-\-ignore\-directory\fR=\fIPATH\fR
Exclude any file or directory with this path
.TP
\fB\-I\fR, \fB\-\-ignore\-all\-in\-file\fR \fI<FILE>\fR
\fB\-I\fR, \fB\-\-ignore\-all\-in\-file\fR=\fIFILE\fR
Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by \-\-invert_filter
.TP
\fB\-L\fR, \fB\-\-dereference\-links\fR
@@ -54,7 +54,7 @@ No percent bars or percentages will be displayed
\fB\-B\fR, \fB\-\-bars\-on\-right\fR
percent bars moved to right side of screen
.TP
\fB\-z\fR, \fB\-\-min\-size\fR \fI<MIN_SIZE>\fR
\fB\-z\fR, \fB\-\-min\-size\fR=\fIMIN_SIZE\fR
Minimum size file to include in output
.TP
\fB\-R\fR, \fB\-\-screen\-reader\fR
@@ -69,16 +69,16 @@ Directory \*(Aqsize\*(Aq is number of child files instead of disk size
\fB\-i\fR, \fB\-\-ignore\-hidden\fR
Do not display hidden files
.TP
\fB\-v\fR, \fB\-\-invert\-filter\fR \fI<REGEX>\fR
\fB\-v\fR, \fB\-\-invert\-filter\fR=\fIREGEX\fR
Exclude filepaths matching this regex. To ignore png files type: \-v "\\.png$"
.TP
\fB\-e\fR, \fB\-\-filter\fR \fI<REGEX>\fR
\fB\-e\fR, \fB\-\-filter\fR=\fIREGEX\fR
Only include filepaths matching this regex. For png files type: \-e "\\.png$"
.TP
\fB\-t\fR, \fB\-\-file\-types\fR
show only these file types
.TP
\fB\-w\fR, \fB\-\-terminal\-width\fR \fI<WIDTH>\fR
\fB\-w\fR, \fB\-\-terminal\-width\fR=\fIWIDTH\fR
Specify width of output overriding the auto detection of terminal width
.TP
\fB\-P\fR, \fB\-\-no\-progress\fR
@@ -93,7 +93,7 @@ Only directories will be displayed
\fB\-F\fR, \fB\-\-only\-file\fR
Only files will be displayed. (Finds your largest files)
.TP
\fB\-o\fR, \fB\-\-output\-format\fR \fI<FORMAT>\fR
\fB\-o\fR, \fB\-\-output\-format\fR=\fIFORMAT\fR
Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size
.br
@@ -122,31 +122,28 @@ gb: gigabyte (GB)
tb: terabyte (TB)
.RE
.TP
\fB\-S\fR, \fB\-\-stack\-size\fR \fI<STACK_SIZE>\fR
\fB\-S\fR, \fB\-\-stack\-size\fR=\fISTACK_SIZE\fR
Specify memory to use as stack size \- use if you see: \*(Aqfatal runtime error: stack overflow\*(Aq (default low memory=1048576, high memory=1073741824)
.TP
\fB\-j\fR, \fB\-\-output\-json\fR
Output the directory tree as json to the current directory
.TP
\fB\-M\fR, \fB\-\-mtime\fR \fI<MTIME>\fR
\fB\-M\fR, \fB\-\-mtime\fR=\fIMTIME\fR
+/\-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and \-n => (𝑐𝑢𝑟𝑟𝑛, +∞)
.TP
\fB\-A\fR, \fB\-\-atime\fR \fI<ATIME>\fR
\fB\-A\fR, \fB\-\-atime\fR=\fIATIME\fR
just like \-mtime, but based on file access time
.TP
\fB\-y\fR, \fB\-\-ctime\fR \fI<CTIME>\fR
\fB\-y\fR, \fB\-\-ctime\fR=\fICTIME\fR
just like \-mtime, but based on file change time
.TP
\fB\-\-files0\-from\fR \fI<FILES0_FROM>\fR
Read NUL\-terminated paths from FILE (use `\-` for stdin)
\fB\-\-files0\-from\fR=\fIFILES0_FROM\fR
run dust on NUL\-terminated file names specified in file; if argument is \-, then read names from standard input
.TP
\fB\-\-files\-from\fR \fI<FILES_FROM>\fR
Read newline\-terminated paths from FILE (use `\-` for stdin)
.TP
\fB\-\-collapse\fR \fI<COLLAPSE>\fR
\fB\-\-collapse\fR=\fICOLLAPSE\fR
Keep these directories collapsed
.TP
\fB\-m\fR, \fB\-\-filetime\fR \fI<FILETIME>\fR
\fB\-m\fR, \fB\-\-filetime\fR=\fIFILETIME\fR
Directory \*(Aqsize\*(Aq is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time
.br

Binary file not shown.

Before

Width:  |  Height:  |  Size: 107 KiB

After

Width:  |  Height:  |  Size: 61 KiB

View File

@@ -21,7 +21,7 @@ pub struct Cli {
#[arg(long, value_name("FILE"), value_hint(ValueHint::FilePath))]
pub config: Option<String>,
/// Display the 'n' largest entries. (Default is terminal_height)
/// Number of lines of output to show. (Default is terminal_height - 10)
#[arg(short, long, value_name("NUMBER"))]
pub number_of_lines: Option<usize>,
@@ -172,14 +172,11 @@ pub struct Cli {
#[arg(short('y'), long, allow_hyphen_values(true))]
pub ctime: Option<String>,
/// Read NUL-terminated paths from FILE (use `-` for stdin).
#[arg(long, value_hint(ValueHint::AnyPath), conflicts_with("files_from"))]
/// run dust on NUL-terminated file names specified in file; if argument is
/// -, then read names from standard input
#[arg(long, value_hint(ValueHint::AnyPath))]
pub files0_from: Option<String>,
/// Read newline-terminated paths from FILE (use `-` for stdin).
#[arg(long, value_hint(ValueHint::AnyPath), conflicts_with("files0_from"))]
pub files_from: Option<String>,
/// Keep these directories collapsed
#[arg(long, value_hint(ValueHint::AnyPath))]
pub collapse: Option<Vec<String>>,

View File

@@ -36,26 +36,16 @@ pub struct Config {
pub output_json: Option<bool>,
pub print_errors: Option<bool>,
pub files0_from: Option<String>,
pub number_of_lines: Option<usize>,
pub files_from: Option<String>,
}
impl Config {
pub fn get_files0_from(&self, options: &Cli) -> Option<String> {
pub fn get_files_from(&self, options: &Cli) -> Option<String> {
let from_file = &options.files0_from;
match from_file {
None => self.files0_from.as_ref().map(|x| x.to_string()),
Some(x) => Some(x.to_string()),
}
}
pub fn get_files_from(&self, options: &Cli) -> Option<String> {
let from_file = &options.files_from;
match from_file {
None => self.files_from.as_ref().map(|x| x.to_string()),
Some(x) => Some(x.to_string()),
}
}
pub fn get_no_colors(&self, options: &Cli) -> bool {
Some(true) == self.no_colors || options.no_colors
}
@@ -157,15 +147,6 @@ impl Config {
Some(true) == self.output_json || options.output_json
}
pub fn get_number_of_lines(&self, options: &Cli) -> Option<usize> {
let from_cmd_line = options.number_of_lines;
if from_cmd_line.is_none() {
self.number_of_lines
} else {
from_cmd_line
}
}
pub fn get_modified_time_operator(&self, options: &Cli) -> Option<(Operator, i64)> {
get_filter_time_operator(options.mtime.as_ref(), get_current_date_epoch_seconds())
}
@@ -244,7 +225,7 @@ fn convert_min_size(input: &str) -> Option<usize> {
}
}
fn get_config_locations(base: PathBuf) -> Vec<PathBuf> {
fn get_config_locations(base: &Path) -> Vec<PathBuf> {
vec![
base.join(".dust.toml"),
base.join(".config").join("dust").join("config.toml"),
@@ -267,12 +248,12 @@ pub fn get_config(conf_path: Option<&String>) -> Config {
}
}
None => {
if let Some(home) = std::env::home_dir() {
for path in get_config_locations(home) {
if path.exists()
&& let Ok(config) = Config::from_config_file(&path)
{
return config;
if let Some(home) = directories::BaseDirs::new() {
for path in get_config_locations(home.home_dir()) {
if path.exists() {
if let Ok(config) = Config::from_config_file(&path) {
return config;
}
}
}
}
@@ -399,33 +380,4 @@ mod tests {
fn get_filetime_args(args: Vec<&str>) -> Cli {
Cli::parse_from(args)
}
#[test]
fn test_get_number_of_lines() {
// No config and no flag.
let c = Config::default();
let args = get_args(vec![]);
assert_eq!(c.get_number_of_lines(&args), None);
// Config is not defined and flag is defined.
let c = Config::default();
let args = get_args(vec!["dust", "--number-of-lines", "5"]);
assert_eq!(c.get_number_of_lines(&args), Some(5));
// Config is defined and flag is not defined.
let c = Config {
number_of_lines: Some(3),
..Default::default()
};
let args = get_args(vec![]);
assert_eq!(c.get_number_of_lines(&args), Some(3));
// Both config and flag are defined.
let c = Config {
number_of_lines: Some(3),
..Default::default()
};
let args = get_args(vec!["dust", "--number-of-lines", "5"]);
assert_eq!(c.get_number_of_lines(&args), Some(5));
}
}

View File

@@ -69,11 +69,12 @@ pub fn walk_it(dirs: HashSet<PathBuf>, walk_data: &WalkData) -> Vec<Node> {
// Remove files which have the same inode, we don't want to double count them.
fn clean_inodes(x: Node, inodes: &mut HashSet<(u64, u64)>, walk_data: &WalkData) -> Option<Node> {
if !walk_data.use_apparent_size
&& let Some(id) = x.inode_device
&& !inodes.insert(id)
{
return None;
if !walk_data.use_apparent_size {
if let Some(id) = x.inode_device {
if !inodes.insert(id) {
return None;
}
}
}
// Sort Nodes so iteration order is predictable
@@ -155,10 +156,10 @@ fn ignore_file(entry: &DirEntry, walk_data: &WalkData) -> bool {
if !walk_data.allowed_filesystems.is_empty() {
let size_inode_device = get_metadata(entry.path(), false, follow_links);
if let Some((_size, Some((_id, dev)), _gunk)) = size_inode_device
&& !walk_data.allowed_filesystems.contains(&dev)
{
return true;
if let Some((_size, Some((_id, dev)), _gunk)) = size_inode_device {
if !walk_data.allowed_filesystems.contains(&dev) {
return true;
}
}
}
if walk_data.filter_accessed_time.is_some()
@@ -166,19 +167,20 @@ fn ignore_file(entry: &DirEntry, walk_data: &WalkData) -> bool {
|| walk_data.filter_changed_time.is_some()
{
let size_inode_device = get_metadata(entry.path(), false, follow_links);
if let Some((_, _, (modified_time, accessed_time, changed_time))) = size_inode_device
&& entry.path().is_file()
&& [
(&walk_data.filter_modified_time, modified_time),
(&walk_data.filter_accessed_time, accessed_time),
(&walk_data.filter_changed_time, changed_time),
]
.iter()
.any(|(filter_time, actual_time)| {
is_filtered_out_due_to_file_time(filter_time, *actual_time)
})
{
return true;
if let Some((_, _, (modified_time, accessed_time, changed_time))) = size_inode_device {
if entry.path().is_file()
&& [
(&walk_data.filter_modified_time, modified_time),
(&walk_data.filter_accessed_time, accessed_time),
(&walk_data.filter_changed_time, changed_time),
]
.iter()
.any(|(filter_time, actual_time)| {
is_filtered_out_due_to_file_time(filter_time, *actual_time)
})
{
return true;
}
}
}
@@ -220,30 +222,32 @@ fn walk(dir: PathBuf, walk_data: &WalkData, depth: usize) -> Option<Node> {
// return walk(entry.path(), walk_data, depth)
if !ignore_file(entry, walk_data)
&& let Ok(data) = entry.file_type()
{
if data.is_dir()
|| (walk_data.follow_links && data.is_symlink())
{
return walk(entry.path(), walk_data, depth + 1);
if !ignore_file(entry, walk_data) {
if let Ok(data) = entry.file_type() {
if data.is_dir()
|| (walk_data.follow_links && data.is_symlink())
{
return walk(entry.path(), walk_data, depth + 1);
}
let node = build_node(
entry.path(),
vec![],
data.is_symlink(),
data.is_file(),
depth,
walk_data,
);
prog_data.num_files.fetch_add(1, ORDERING);
if let Some(ref file) = node {
prog_data
.total_file_size
.fetch_add(file.size, ORDERING);
}
return node;
}
let node = build_node(
entry.path(),
vec![],
data.is_symlink(),
data.is_file(),
depth,
walk_data,
);
prog_data.num_files.fetch_add(1, ORDERING);
if let Some(ref file) = node {
prog_data.total_file_size.fetch_add(file.size, ORDERING);
}
return node;
}
}
Err(ref failed) => {

View File

@@ -1,8 +1,8 @@
use crate::display_node::DisplayNode;
use crate::node::FileTime;
use ansi_term::Colour::Red;
use lscolors::{LsColors, Style};
use nu_ansi_term::Color::Red;
use unicode_width::UnicodeWidthStr;
@@ -403,7 +403,7 @@ fn get_pretty_name(
.ls_colors
.style_for_path_with_metadata(&node.name, meta_result.as_ref().ok());
let ansi_style = directory_color
.map(Style::to_nu_ansi_term_style)
.map(Style::to_ansi_term_style)
.unwrap_or_default();
let out = ansi_style.paint(name_and_padding);
format!("{out}")
@@ -439,9 +439,6 @@ pub fn get_number_format(output_str: &str) -> Option<(u64, char)> {
}
pub fn human_readable_number(size: u64, output_str: &str) -> String {
if output_str == "count" {
return size.to_string();
};
match get_number_format(output_str) {
Some((x, u)) => {
format!("{}{}", (size / x), u)
@@ -542,13 +539,6 @@ mod tests {
assert_eq!(s, "short 3 4.0K 100%");
}
#[test]
fn test_machine_readable_filecount() {
assert_eq!(human_readable_number(1, "count"), "1");
assert_eq!(human_readable_number(1000, "count"), "1000");
assert_eq!(human_readable_number(1024, "count"), "1024");
}
#[test]
fn test_human_readable_number() {
assert_eq!(human_readable_number(1, ""), "1B");

View File

@@ -22,14 +22,13 @@ use progress::PIndicator;
use regex::Error;
use std::collections::HashSet;
use std::env;
use std::fs::{read, read_to_string};
use std::fs::read_to_string;
use std::io;
use std::io::Read;
use std::panic;
use std::process;
use std::sync::Arc;
use std::sync::Mutex;
use sysinfo::System;
use sysinfo::{System, SystemExt};
use utils::canonicalize_absolute_path;
use self::display::draw_it;
@@ -69,7 +68,7 @@ fn should_init_color(no_color: bool, force_color: bool) -> bool {
{
// Required for windows 10
// Fails to resolve for windows 8 so disable color
match nu_ansi_term::enable_ansi_support() {
match ansi_term::enable_ansi_support() {
Ok(_) => true,
Err(_) => {
eprintln!("This version of Windows does not support ANSI colors");
@@ -128,15 +127,34 @@ fn main() {
})
.expect("Error setting Ctrl-C handler");
let target_dirs = if let Some(path) = config.get_files0_from(&options) {
read_paths_from_source(&path, true)
} else if let Some(path) = config.get_files_from(&options) {
read_paths_from_source(&path, false)
} else {
match options.params {
let target_dirs = match config.get_files_from(&options) {
Some(path) => {
if path == "-" {
let mut targets_to_add = io::stdin()
.lines()
.map_while(Result::ok)
.collect::<Vec<String>>();
if targets_to_add.is_empty() {
eprintln!("No input provided, defaulting to current directory");
targets_to_add.push(".".to_owned());
}
targets_to_add
} else {
// read file
match read_to_string(path) {
Ok(file_content) => file_content.lines().map(|x| x.to_string()).collect(),
Err(e) => {
eprintln!("Error reading file: {e}");
vec![".".to_owned()]
}
}
}
}
None => match options.params {
Some(ref values) => values.clone(),
None => vec![".".to_owned()],
}
},
};
let summarize_file_types = options.file_types;
@@ -154,7 +172,7 @@ fn main() {
// If depth is set, then we set the default number_of_lines to be max
// instead of screen height
let number_of_lines = match config.get_number_of_lines(&options) {
let number_of_lines = match options.number_of_lines {
Some(val) => val,
None => {
if depth != usize::MAX {
@@ -319,11 +337,7 @@ fn print_output(
if config.get_output_json(&options) {
OUTPUT_TYPE.with(|wrapped| {
if by_filecount {
wrapped.replace("count".to_string());
} else {
wrapped.replace(output_format);
}
wrapped.replace(output_format);
});
println!("{}", serde_json::to_string(&tree).unwrap());
} else {
@@ -384,53 +398,6 @@ fn print_any_errors(print_errors: bool, final_errors: &RuntimeErrors) {
}
}
fn read_paths_from_source(path: &str, null_terminated: bool) -> Vec<String> {
let from_stdin = path == "-";
let result: Result<Vec<String>, Option<String>> = (|| {
// 1) read bytes
let bytes = if from_stdin {
let mut b = Vec::new();
io::stdin().lock().read_to_end(&mut b).map_err(|_| None)?;
b
} else {
read(path).map_err(|e| Some(e.to_string()))?
};
let text = std::str::from_utf8(&bytes).map_err(|e| {
if from_stdin {
None
} else {
Some(e.to_string())
}
})?;
let items: Vec<String> = if null_terminated {
text.split('\0')
.filter(|s| !s.is_empty())
.map(str::to_owned)
.collect()
} else {
text.lines().map(str::to_owned).collect()
};
if from_stdin && items.is_empty() {
return Err(None);
}
Ok(items)
})();
match result {
Ok(v) => v,
Err(None) => {
eprintln!("No files provided, defaulting to current directory");
vec![".".to_owned()]
}
Err(Some(msg)) => {
eprintln!("Failed to read file: {msg}");
vec![".".to_owned()]
}
}
}
fn init_rayon(stack: &Option<usize>, threads: &Option<usize>) -> rayon::ThreadPool {
let stack_size = match stack {
Some(s) => Some(*s),
@@ -440,10 +407,10 @@ fn init_rayon(stack: &Option<usize>, threads: &Option<usize>) -> rayon::ThreadPo
None
} else {
let large_stack = usize::pow(1024, 3);
let mut sys = System::new_all();
sys.refresh_memory();
let mut s = System::new();
s.refresh_memory();
// Larger stack size if possible to handle cases with lots of nested directories
let available = sys.available_memory();
let available = s.available_memory();
if available > (large_stack * threads.unwrap_or(1)).try_into().unwrap() {
Some(large_stack)
} else {

View File

@@ -1,2 +0,0 @@
tests/test_dir_files_from/a_file
tests/test_dir_files_from/hello_file

View File

@@ -1 +0,0 @@
hello

View File

@@ -1,4 +1,4 @@
use assert_cmd::{Command, cargo_bin_cmd};
use assert_cmd::Command;
use std::ffi::OsStr;
use std::process::Output;
use std::sync::Once;
@@ -61,11 +61,9 @@ fn initialize() {
fn run_cmd<T: AsRef<OsStr>>(command_args: &[T]) -> Output {
initialize();
let mut to_run = cargo_bin_cmd!("dust");
// Hide progress bar
to_run.arg("-P");
let mut to_run = &mut Command::cargo_bin("dust").unwrap();
for p in command_args {
to_run.arg(p);
to_run = to_run.arg(p);
}
to_run.unwrap()
}

View File

@@ -1,4 +1,4 @@
use assert_cmd::cargo_bin_cmd;
use assert_cmd::Command;
use std::ffi::OsStr;
use std::str;
@@ -9,16 +9,17 @@ use std::str;
*/
fn build_command<T: AsRef<OsStr>>(command_args: Vec<T>) -> String {
let mut cmd = cargo_bin_cmd!("dust");
let mut cmd = &mut Command::cargo_bin("dust").unwrap();
// Hide progress bar
cmd.arg("-P");
cmd = cmd.arg("-P");
for p in command_args {
cmd.arg(p);
cmd = cmd.arg(p);
}
let finished = &cmd.unwrap();
assert_eq!(str::from_utf8(&finished.stderr).unwrap(), "");
let stderr = str::from_utf8(&finished.stderr).unwrap();
assert_eq!(stderr, "");
str::from_utf8(&finished.stdout).unwrap().into()
}
@@ -103,57 +104,9 @@ pub fn test_ignore_all_in_file() {
assert!(!output.contains(".secret"));
}
#[test]
pub fn test_files_from_flag_file() {
let output = build_command(vec![
"--files-from",
"tests/test_dir_files_from/files_from.txt",
]);
assert!(output.contains("a_file"));
assert!(output.contains("hello_file"));
}
#[test]
pub fn test_files0_from_flag_file() {
let output = build_command(vec![
"--files0-from",
"tests/test_dir_files_from/files0_from.txt",
]);
assert!(output.contains("a_file"));
assert!(output.contains("hello_file"));
}
#[test]
pub fn test_files_from_flag_stdin() {
let mut cmd = cargo_bin_cmd!("dust");
cmd.arg("-P").arg("--files-from").arg("-");
let input = b"tests/test_dir_files_from/a_file\ntests/test_dir_files_from/hello_file\n";
cmd.write_stdin(input.as_ref());
let finished = &cmd.unwrap();
let stderr = std::str::from_utf8(&finished.stderr).unwrap();
assert_eq!(stderr, "");
let output = std::str::from_utf8(&finished.stdout).unwrap();
assert!(output.contains("a_file"));
assert!(output.contains("hello_file"));
}
#[test]
pub fn test_files0_from_flag_stdin() {
let mut cmd = cargo_bin_cmd!("dust");
cmd.arg("-P").arg("--files0-from").arg("-");
let input = b"tests/test_dir_files_from/a_file\0tests/test_dir_files_from/hello_file\0";
cmd.write_stdin(input.as_ref());
let finished = &cmd.unwrap();
let stderr = std::str::from_utf8(&finished.stderr).unwrap();
assert_eq!(stderr, "");
let output = std::str::from_utf8(&finished.stdout).unwrap();
assert!(output.contains("a_file"));
assert!(output.contains("hello_file"));
}
#[test]
pub fn test_with_bad_param() {
let mut cmd = cargo_bin_cmd!("dust");
let mut cmd = Command::cargo_bin("dust").unwrap();
cmd.arg("-P").arg("bad_place");
let output_error = cmd.unwrap_err();
let result = output_error.as_output().unwrap();

View File

@@ -1,4 +1,4 @@
use assert_cmd::{Command, cargo_bin_cmd};
use assert_cmd::Command;
use std::fs::File;
use std::io::Write;
use std::path::PathBuf;
@@ -44,7 +44,7 @@ pub fn test_soft_sym_link() {
let b = format!(" ┌── {}", file_path_s);
let a = format!("─┴ {}", dir_s);
let mut cmd = cargo_bin_cmd!("dust");
let mut cmd = Command::cargo_bin("dust").unwrap();
// Mac test runners create long filenames in tmp directories
let output = cmd
.args(["-p", "-c", "-s", "-w", "999", dir_s])
@@ -72,7 +72,7 @@ pub fn test_hard_sym_link() {
let file_output = format!(" ┌── {}", file_path_s);
let dirs_output = format!("─┴ {}", dir_s);
let mut cmd = cargo_bin_cmd!("dust");
let mut cmd = Command::cargo_bin("dust").unwrap();
// Mac test runners create long filenames in tmp directories
let output = cmd.args(["-p", "-c", "-w", "999", dir_s]).unwrap().stdout;
@@ -96,7 +96,7 @@ pub fn test_hard_sym_link_no_dup_multi_arg() {
let link_name = dir_link.path().join("the_link");
let link_name_s = link_it(link_name, file_path_s, false);
let mut cmd = cargo_bin_cmd!("dust");
let mut cmd = Command::cargo_bin("dust").unwrap();
// Mac test runners create long filenames in tmp directories
let output = cmd
@@ -123,7 +123,7 @@ pub fn test_recursive_sym_link() {
let a = format!("─┬ {}", dir_s);
let b = format!(" └── {}", link_name_s);
let mut cmd = cargo_bin_cmd!("dust");
let mut cmd = Command::cargo_bin("dust").unwrap();
let output = cmd
.arg("-p")
.arg("-c")