Compare commits

..

5 Commits

Author SHA1 Message Date
andy.boot
e17e569c82 hack 2021-09-20 08:15:16 +01:00
andy.boot
4e3c50177b hack 2021-09-19 15:43:40 +01:00
andy.boot
73bdb328f2 hack 2021-09-19 15:03:24 +01:00
andy.boot
68648178b3 hack 2021-09-19 14:44:31 +01:00
andy.boot
3c2149b431 release hacking:
drop version number back to 0.7.1, curious if i can reuse it when
testing
2021-09-19 14:37:38 +01:00
35 changed files with 1350 additions and 5024 deletions

View File

@@ -22,61 +22,56 @@ jobs:
- { os: macos-latest }
- { os: windows-latest }
steps:
- uses: actions/checkout@v1
- name: Initialize workflow variables
id: vars
shell: bash
run: |
# 'windows-latest' `cargo fmt` is bugged for this project (see reasons @ GH:rust-lang/rustfmt #3324, #3590, #3688 ; waiting for repair)
JOB_DO_FORMAT_TESTING="true"
case ${{ matrix.job.os }} in windows-latest) unset JOB_DO_FORMAT_TESTING ;; esac;
echo set-output name=JOB_DO_FORMAT_TESTING::${JOB_DO_FORMAT_TESTING:-<empty>/false}
echo ::set-output name=JOB_DO_FORMAT_TESTING::${JOB_DO_FORMAT_TESTING}
# target-specific options
# * CARGO_FEATURES_OPTION
CARGO_FEATURES_OPTION='' ;
if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features "${{ matrix.job.features }}"' ; fi
echo set-output name=CARGO_FEATURES_OPTION::${CARGO_FEATURES_OPTION}
echo ::set-output name=CARGO_FEATURES_OPTION::${CARGO_FEATURES_OPTION}
- name: Install `rust` toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: stable
override: true
profile: minimal # minimal component installation (ie, no documentation)
components: rustfmt, clippy
- name: Install wget for Windows
if: matrix.job.os == 'windows-latest'
run: choco install wget --no-progress
- name: typos-action
uses: crate-ci/typos@v1.28.4
- name: "`fmt` testing"
if: steps.vars.outputs.JOB_DO_FORMAT_TESTING
uses: actions-rs/cargo@v1
with:
command: fmt
args: --all -- --check
- name: "`clippy` testing"
if: success() || failure() # run regardless of prior step ("`fmt` testing") success/failure
uses: actions-rs/cargo@v1
with:
command: clippy
args: ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} -- -D warnings
- uses: actions/checkout@v1
- name: Initialize workflow variables
id: vars
shell: bash
run: |
# 'windows-latest' `cargo fmt` is bugged for this project (see reasons @ GH:rust-lang/rustfmt #3324, #3590, #3688 ; waiting for repair)
JOB_DO_FORMAT_TESTING="true"
case ${{ matrix.job.os }} in windows-latest) unset JOB_DO_FORMAT_TESTING ;; esac;
echo set-output name=JOB_DO_FORMAT_TESTING::${JOB_DO_FORMAT_TESTING:-<empty>/false}
echo ::set-output name=JOB_DO_FORMAT_TESTING::${JOB_DO_FORMAT_TESTING}
# target-specific options
# * CARGO_FEATURES_OPTION
CARGO_FEATURES_OPTION='' ;
if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features "${{ matrix.job.features }}"' ; fi
echo set-output name=CARGO_FEATURES_OPTION::${CARGO_FEATURES_OPTION}
echo ::set-output name=CARGO_FEATURES_OPTION::${CARGO_FEATURES_OPTION}
- name: Install `rust` toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: stable
override: true
profile: minimal # minimal component installation (ie, no documentation)
components: rustfmt, clippy
- name: "`fmt` testing"
if: steps.vars.outputs.JOB_DO_FORMAT_TESTING
uses: actions-rs/cargo@v1
with:
command: fmt
args: --all -- --check
- name: "`clippy` testing"
if: success() || failure() # run regardless of prior step ("`fmt` testing") success/failure
uses: actions-rs/cargo@v1
with:
command: clippy
args: ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }} -- -D warnings
min_version:
name: MinSRV # Minimum supported rust version
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- name: Install `rust` toolchain (v${{ env.RUST_MIN_SRV }})
uses: actions-rs/toolchain@v1
with:
toolchain: ${{ env.RUST_MIN_SRV }}
profile: minimal # minimal component installation (ie, no documentation)
- name: Test
uses: actions-rs/cargo@v1
with:
command: test
- uses: actions/checkout@v1
- name: Install `rust` toolchain (v${{ env.RUST_MIN_SRV }})
uses: actions-rs/toolchain@v1
with:
toolchain: ${{ env.RUST_MIN_SRV }}
profile: minimal # minimal component installation (ie, no documentation)
- name: Test
uses: actions-rs/cargo@v1
with:
command: test
build:
name: Build
@@ -86,223 +81,186 @@ jobs:
matrix:
job:
# { os, target, cargo-options, features, use-cross, toolchain }
- {
os: ubuntu-latest,
target: aarch64-unknown-linux-gnu,
use-cross: use-cross,
}
- {
os: ubuntu-latest,
target: aarch64-unknown-linux-musl,
use-cross: use-cross,
}
- {
os: ubuntu-latest,
target: arm-unknown-linux-gnueabihf,
use-cross: use-cross,
}
- {
os: ubuntu-latest,
target: arm-unknown-linux-musleabi,
use-cross: use-cross,
}
- {
os: ubuntu-latest,
target: i686-unknown-linux-gnu,
use-cross: use-cross,
}
- {
os: ubuntu-latest,
target: i686-unknown-linux-musl,
use-cross: use-cross,
}
- {
os: ubuntu-latest,
target: x86_64-unknown-linux-gnu,
use-cross: use-cross,
}
- {
os: ubuntu-latest,
target: x86_64-unknown-linux-musl,
use-cross: use-cross,
}
- { os: macos-latest, target: x86_64-apple-darwin }
- { os: windows-latest, target: i686-pc-windows-gnu }
- { os: windows-latest, target: i686-pc-windows-msvc }
- { os: windows-latest, target: x86_64-pc-windows-gnu } ## !maint: [rivy; 2020-01-21] may break due to rust bug; follow possible solution from GH:rust-lang/rust#47048 (refs: GH:rust-lang/rust#47048 , GH:rust-lang/rust#53454 , GH:bike-barn/hermit#172 )
- { os: windows-latest, target: x86_64-pc-windows-msvc }
- { os: ubuntu-latest , target: arm-unknown-linux-gnueabihf , use-cross: use-cross }
- { os: ubuntu-20.04 , target: i686-unknown-linux-gnu , use-cross: use-cross }
- { os: ubuntu-20.04 , target: i686-unknown-linux-musl , use-cross: use-cross }
- { os: ubuntu-20.04 , target: x86_64-unknown-linux-gnu , use-cross: use-cross }
- { os: ubuntu-20.04 , target: x86_64-unknown-linux-musl , use-cross: use-cross }
- { os: ubuntu-18.04 , target: x86_64-unknown-linux-gnu , use-cross: use-cross }
- { os: macos-latest , target: x86_64-apple-darwin }
- { os: windows-latest , target: i686-pc-windows-gnu }
- { os: windows-latest , target: i686-pc-windows-msvc }
- { os: windows-latest , target: x86_64-pc-windows-gnu } ## !maint: [rivy; 2020-01-21] may break due to rust bug; follow possible solution from GH:rust-lang/rust#47048 (refs: GH:rust-lang/rust#47048 , GH:rust-lang/rust#53454 , GH:bike-barn/hermit#172 )
- { os: windows-latest , target: x86_64-pc-windows-msvc }
steps:
- uses: actions/checkout@v1
- name: Install any prerequisites
shell: bash
run: |
case ${{ matrix.job.target }} in
arm-unknown-linux-gnueabihf) sudo apt-get -y update ; sudo apt-get -y install gcc-arm-linux-gnueabihf ;;
aarch64-unknown-linux-gnu) sudo apt-get -y update ; sudo apt-get -y install binutils-aarch64-linux-gnu ;;
esac
- name: Initialize workflow variables
id: vars
shell: bash
run: |
# toolchain
TOOLCHAIN="stable" ## default to "stable" toolchain
# * specify alternate TOOLCHAIN for *-pc-windows-gnu targets; gnu targets on Windows are broken for the standard *-pc-windows-msvc toolchain (refs: <https://github.com/rust-lang/rust/issues/47048>, <https://github.com/rust-lang/rust/issues/53454>, <https://github.com/rust-lang/cargo/issues/6754>)
case ${{ matrix.job.target }} in *-pc-windows-gnu) TOOLCHAIN="stable-${{ matrix.job.target }}" ;; esac;
# * use requested TOOLCHAIN if specified
if [ -n "${{ matrix.job.toolchain }}" ]; then TOOLCHAIN="${{ matrix.job.toolchain }}" ; fi
echo set-output name=TOOLCHAIN::${TOOLCHAIN}
echo ::set-output name=TOOLCHAIN::${TOOLCHAIN}
# staging directory
STAGING='_staging'
echo set-output name=STAGING::${STAGING}
echo ::set-output name=STAGING::${STAGING}
# determine EXE suffix
EXE_suffix="" ; case ${{ matrix.job.target }} in *-pc-windows-*) EXE_suffix=".exe" ;; esac;
echo set-output name=EXE_suffix::${EXE_suffix}
echo ::set-output name=EXE_suffix::${EXE_suffix}
# parse commit reference info
REF_NAME=${GITHUB_REF#refs/*/}
unset REF_BRANCH ; case ${GITHUB_REF} in refs/heads/*) REF_BRANCH=${GITHUB_REF#refs/heads/} ;; esac;
unset REF_TAG ; case ${GITHUB_REF} in refs/tags/*) REF_TAG=${GITHUB_REF#refs/tags/} ;; esac;
REF_SHAS=${GITHUB_SHA:0:8}
echo set-output name=REF_NAME::${REF_NAME}
echo set-output name=REF_BRANCH::${REF_BRANCH}
echo set-output name=REF_TAG::${REF_TAG}
echo set-output name=REF_SHAS::${REF_SHAS}
echo ::set-output name=REF_NAME::${REF_NAME}
echo ::set-output name=REF_BRANCH::${REF_BRANCH}
echo ::set-output name=REF_TAG::${REF_TAG}
echo ::set-output name=REF_SHAS::${REF_SHAS}
# parse target
unset TARGET_ARCH ; case ${{ matrix.job.target }} in arm-unknown-linux-gnueabihf) TARGET_ARCH=arm ;; aarch-*) TARGET_ARCH=aarch64 ;; i686-*) TARGET_ARCH=i686 ;; x86_64-*) TARGET_ARCH=x86_64 ;; esac;
echo set-output name=TARGET_ARCH::${TARGET_ARCH}
echo ::set-output name=TARGET_ARCH::${TARGET_ARCH}
unset TARGET_OS ; case ${{ matrix.job.target }} in *-linux-*) TARGET_OS=linux ;; *-apple-*) TARGET_OS=macos ;; *-windows-*) TARGET_OS=windows ;; esac;
echo set-output name=TARGET_OS::${TARGET_OS}
echo ::set-output name=TARGET_OS::${TARGET_OS}
# package name
PKG_suffix=".tar.gz" ; case ${{ matrix.job.target }} in *-pc-windows-*) PKG_suffix=".zip" ;; esac;
PKG_BASENAME=${PROJECT_NAME}-${REF_TAG:-$REF_SHAS}-${{ matrix.job.target }}
PKG_NAME=${PKG_BASENAME}${PKG_suffix}
echo set-output name=PKG_suffix::${PKG_suffix}
echo set-output name=PKG_BASENAME::${PKG_BASENAME}
echo set-output name=PKG_NAME::${PKG_NAME}
echo ::set-output name=PKG_suffix::${PKG_suffix}
echo ::set-output name=PKG_BASENAME::${PKG_BASENAME}
echo ::set-output name=PKG_NAME::${PKG_NAME}
# deployable tag? (ie, leading "vM" or "M"; M == version number)
unset DEPLOY ; if [[ $REF_TAG =~ ^[vV]?[0-9].* ]]; then DEPLOY='true' ; fi
echo set-output name=DEPLOY::${DEPLOY:-<empty>/false}
echo ::set-output name=DEPLOY::${DEPLOY}
# target-specific options
# * CARGO_FEATURES_OPTION
CARGO_FEATURES_OPTION='' ;
if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features "${{ matrix.job.features }}"' ; fi
echo set-output name=CARGO_FEATURES_OPTION::${CARGO_FEATURES_OPTION}
echo ::set-output name=CARGO_FEATURES_OPTION::${CARGO_FEATURES_OPTION}
# * CARGO_USE_CROSS (truthy)
CARGO_USE_CROSS='true' ; case '${{ matrix.job.use-cross }}' in ''|0|f|false|n|no) unset CARGO_USE_CROSS ;; esac;
echo set-output name=CARGO_USE_CROSS::${CARGO_USE_CROSS:-<empty>/false}
echo ::set-output name=CARGO_USE_CROSS::${CARGO_USE_CROSS}
# # * `arm` cannot be tested on ubuntu-* hosts (b/c testing is currently primarily done via comparison of target outputs with built-in outputs and the `arm` target is not executable on the host)
JOB_DO_TESTING="true"
case ${{ matrix.job.target }} in arm-*|aarch64-*) unset JOB_DO_TESTING ;; esac;
echo set-output name=JOB_DO_TESTING::${JOB_DO_TESTING:-<empty>/false}
echo ::set-output name=JOB_DO_TESTING::${JOB_DO_TESTING}
# # * test only binary for arm-type targets
unset CARGO_TEST_OPTIONS
unset CARGO_TEST_OPTIONS ; case ${{ matrix.job.target }} in arm-*|aarch64-*) CARGO_TEST_OPTIONS="--bin ${PROJECT_NAME}" ;; esac;
echo set-output name=CARGO_TEST_OPTIONS::${CARGO_TEST_OPTIONS}
echo ::set-output name=CARGO_TEST_OPTIONS::${CARGO_TEST_OPTIONS}
# * strip executable?
STRIP="strip" ; case ${{ matrix.job.target }} in arm-unknown-linux-gnueabihf) STRIP="arm-linux-gnueabihf-strip" ;; *-pc-windows-msvc) STRIP="" ;; aarch64-unknown-linux-gnu) STRIP="aarch64-linux-gnu-strip" ;; aarch64-unknown-linux-musl) STRIP="" ;; armv7-unknown-linux-musleabi) STRIP="" ;; arm-unknown-linux-musleabi) STRIP="" ;; esac;
echo set-output name=STRIP::${STRIP}
echo ::set-output name=STRIP::${STRIP}
- name: Create all needed build/work directories
shell: bash
run: |
mkdir -p '${{ steps.vars.outputs.STAGING }}'
mkdir -p '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}'
- name: rust toolchain ~ install
uses: actions-rs/toolchain@v1
with:
toolchain: ${{ steps.vars.outputs.TOOLCHAIN }}
target: ${{ matrix.job.target }}
override: true
profile: minimal # minimal component installation (ie, no documentation)
- name: Info
shell: bash
run: |
gcc --version || true
rustup -V
rustup toolchain list
rustup default
cargo -V
rustc -V
- name: Build
uses: actions-rs/cargo@v1
with:
use-cross: ${{ steps.vars.outputs.CARGO_USE_CROSS }}
command: build
args: --release --target=${{ matrix.job.target }} ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }}
- name: Install cargo-deb
uses: actions-rs/cargo@v1
with:
- uses: actions/checkout@v1
- name: Install any prerequisites
shell: bash
run: |
case ${{ matrix.job.target }} in
arm-unknown-linux-gnueabihf) sudo apt-get -y update ; sudo apt-get -y install gcc-arm-linux-gnueabihf ;;
esac
- name: Initialize workflow variables
id: vars
shell: bash
run: |
# toolchain
TOOLCHAIN="stable" ## default to "stable" toolchain
# * specify alternate TOOLCHAIN for *-pc-windows-gnu targets; gnu targets on Windows are broken for the standard *-pc-windows-msvc toolchain (refs: <https://github.com/rust-lang/rust/issues/47048>, <https://github.com/rust-lang/rust/issues/53454>, <https://github.com/rust-lang/cargo/issues/6754>)
case ${{ matrix.job.target }} in *-pc-windows-gnu) TOOLCHAIN="stable-${{ matrix.job.target }}" ;; esac;
# * use requested TOOLCHAIN if specified
if [ -n "${{ matrix.job.toolchain }}" ]; then TOOLCHAIN="${{ matrix.job.toolchain }}" ; fi
echo set-output name=TOOLCHAIN::${TOOLCHAIN}
echo ::set-output name=TOOLCHAIN::${TOOLCHAIN}
# staging directory
STAGING='_staging'
echo set-output name=STAGING::${STAGING}
echo ::set-output name=STAGING::${STAGING}
# determine EXE suffix
EXE_suffix="" ; case ${{ matrix.job.target }} in *-pc-windows-*) EXE_suffix=".exe" ;; esac;
echo set-output name=EXE_suffix::${EXE_suffix}
echo ::set-output name=EXE_suffix::${EXE_suffix}
# parse commit reference info
REF_NAME=${GITHUB_REF#refs/*/}
unset REF_BRANCH ; case ${GITHUB_REF} in refs/heads/*) REF_BRANCH=${GITHUB_REF#refs/heads/} ;; esac;
unset REF_TAG ; case ${GITHUB_REF} in refs/tags/*) REF_TAG=${GITHUB_REF#refs/tags/} ;; esac;
REF_SHAS=${GITHUB_SHA:0:8}
echo set-output name=REF_NAME::${REF_NAME}
echo set-output name=REF_BRANCH::${REF_BRANCH}
echo set-output name=REF_TAG::${REF_TAG}
echo set-output name=REF_SHAS::${REF_SHAS}
echo ::set-output name=REF_NAME::${REF_NAME}
echo ::set-output name=REF_BRANCH::${REF_BRANCH}
echo ::set-output name=REF_TAG::${REF_TAG}
echo ::set-output name=REF_SHAS::${REF_SHAS}
# parse target
unset TARGET_ARCH ; case ${{ matrix.job.target }} in arm-unknown-linux-gnueabihf) TARGET_ARCH=arm ;; i686-*) TARGET_ARCH=i686 ;; x86_64-*) TARGET_ARCH=x86_64 ;; esac;
echo set-output name=TARGET_ARCH::${TARGET_ARCH}
echo ::set-output name=TARGET_ARCH::${TARGET_ARCH}
unset TARGET_OS ; case ${{ matrix.job.target }} in *-linux-*) TARGET_OS=linux ;; *-apple-*) TARGET_OS=macos ;; *-windows-*) TARGET_OS=windows ;; esac;
echo set-output name=TARGET_OS::${TARGET_OS}
echo ::set-output name=TARGET_OS::${TARGET_OS}
# package name
PKG_suffix=".tar.gz" ; case ${{ matrix.job.target }} in *-pc-windows-*) PKG_suffix=".zip" ;; esac;
PKG_BASENAME=${PROJECT_NAME}-${REF_TAG:-$REF_SHAS}-${{ matrix.job.target }}
PKG_NAME=${PKG_BASENAME}${PKG_suffix}
echo set-output name=PKG_suffix::${PKG_suffix}
echo set-output name=PKG_BASENAME::${PKG_BASENAME}
echo set-output name=PKG_NAME::${PKG_NAME}
echo ::set-output name=PKG_suffix::${PKG_suffix}
echo ::set-output name=PKG_BASENAME::${PKG_BASENAME}
echo ::set-output name=PKG_NAME::${PKG_NAME}
# deployable tag? (ie, leading "vM" or "M"; M == version number)
unset DEPLOY ; if [[ $REF_TAG =~ ^[vV]?[0-9].* ]]; then DEPLOY='true' ; fi
echo set-output name=DEPLOY::${DEPLOY:-<empty>/false}
echo ::set-output name=DEPLOY::${DEPLOY}
# target-specific options
# * CARGO_FEATURES_OPTION
CARGO_FEATURES_OPTION='' ;
if [ -n "${{ matrix.job.features }}" ]; then CARGO_FEATURES_OPTION='--features "${{ matrix.job.features }}"' ; fi
echo set-output name=CARGO_FEATURES_OPTION::${CARGO_FEATURES_OPTION}
echo ::set-output name=CARGO_FEATURES_OPTION::${CARGO_FEATURES_OPTION}
# * CARGO_USE_CROSS (truthy)
CARGO_USE_CROSS='true' ; case '${{ matrix.job.use-cross }}' in ''|0|f|false|n|no) unset CARGO_USE_CROSS ;; esac;
echo set-output name=CARGO_USE_CROSS::${CARGO_USE_CROSS:-<empty>/false}
echo ::set-output name=CARGO_USE_CROSS::${CARGO_USE_CROSS}
# # * `arm` cannot be tested on ubuntu-* hosts (b/c testing is currently primarily done via comparison of target outputs with built-in outputs and the `arm` target is not executable on the host)
JOB_DO_TESTING="true"
case ${{ matrix.job.target }} in arm-*) unset JOB_DO_TESTING ;; esac;
echo set-output name=JOB_DO_TESTING::${JOB_DO_TESTING:-<empty>/false}
echo ::set-output name=JOB_DO_TESTING::${JOB_DO_TESTING}
# # * test only binary for arm-type targets
unset CARGO_TEST_OPTIONS
unset CARGO_TEST_OPTIONS ; case ${{ matrix.job.target }} in arm-*) CARGO_TEST_OPTIONS="--bin ${PROJECT_NAME}" ;; esac;
echo set-output name=CARGO_TEST_OPTIONS::${CARGO_TEST_OPTIONS}
echo ::set-output name=CARGO_TEST_OPTIONS::${CARGO_TEST_OPTIONS}
# * strip executable?
STRIP="strip" ; case ${{ matrix.job.target }} in arm-unknown-linux-gnueabihf) STRIP="arm-linux-gnueabihf-strip" ;; *-pc-windows-msvc) STRIP="" ;; esac;
echo set-output name=STRIP::${STRIP}
echo ::set-output name=STRIP::${STRIP}
- name: Create all needed build/work directories
shell: bash
run: |
mkdir -p '${{ steps.vars.outputs.STAGING }}'
mkdir -p '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}'
- name: rust toolchain ~ install
uses: actions-rs/toolchain@v1
with:
toolchain: ${{ steps.vars.outputs.TOOLCHAIN }}
target: ${{ matrix.job.target }}
override: true
profile: minimal # minimal component installation (ie, no documentation)
- name: Info
shell: bash
run: |
gcc --version || true
rustup -V
rustup toolchain list
rustup default
cargo -V
rustc -V
- name: Build
uses: actions-rs/cargo@v1
with:
use-cross: ${{ steps.vars.outputs.CARGO_USE_CROSS }}
command: build
args: --release --target=${{ matrix.job.target }} ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }}
- name: Install cargo-deb
uses: actions-rs/cargo@v1
with:
command: install
args: cargo-deb
if: matrix.job.target == 'i686-unknown-linux-musl' || matrix.job.target == 'x86_64-unknown-linux-musl'
- name: Build deb
uses: actions-rs/cargo@v1
with:
if: ${{ contains(matrix.job.target, 'musl') }}
- name: Build deb
uses: actions-rs/cargo@v1
with:
command: deb
args: --no-build --target=${{ matrix.job.target }}
if: matrix.job.target == 'i686-unknown-linux-musl' || matrix.job.target == 'x86_64-unknown-linux-musl'
- name: Test
uses: actions-rs/cargo@v1
with:
use-cross: ${{ steps.vars.outputs.CARGO_USE_CROSS }}
command: test
args: --target=${{ matrix.job.target }} ${{ steps.vars.outputs.CARGO_TEST_OPTIONS}} ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }}
- name: Archive executable artifacts
uses: actions/upload-artifact@master
with:
name: ${{ env.PROJECT_NAME }}-${{ matrix.job.target }}
path: target/${{ matrix.job.target }}/release/${{ env.PROJECT_NAME }}${{ steps.vars.outputs.EXE_suffix }}
- name: Archive deb artifacts
uses: actions/upload-artifact@master
with:
name: ${{ env.PROJECT_NAME }}-${{ matrix.job.target }}.deb
path: target/${{ matrix.job.target }}/debian
if: matrix.job.target == 'i686-unknown-linux-musl' || matrix.job.target == 'x86_64-unknown-linux-musl'
- name: Package
shell: bash
run: |
# binary
cp 'target/${{ matrix.job.target }}/release/${{ env.PROJECT_NAME }}${{ steps.vars.outputs.EXE_suffix }}' '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}/'
# `strip` binary (if needed)
if [ -n "${{ steps.vars.outputs.STRIP }}" ]; then "${{ steps.vars.outputs.STRIP }}" '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}/${{ env.PROJECT_NAME }}${{ steps.vars.outputs.EXE_suffix }}' ; fi
# README and LICENSE
cp README.md '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}/'
cp LICENSE '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}/'
# base compressed package
pushd '${{ steps.vars.outputs.STAGING }}/' >/dev/null
case ${{ matrix.job.target }} in
*-pc-windows-*) 7z -y a '${{ steps.vars.outputs.PKG_NAME }}' '${{ steps.vars.outputs.PKG_BASENAME }}'/* | tail -2 ;;
*) tar czf '${{ steps.vars.outputs.PKG_NAME }}' '${{ steps.vars.outputs.PKG_BASENAME }}'/* ;;
esac;
popd >/dev/null
- name: Publish
uses: softprops/action-gh-release@v1
if: steps.vars.outputs.DEPLOY
with:
files: |
${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_NAME }}
target/${{ matrix.job.target }}/debian/*.deb
if: ${{ contains(matrix.job.target, 'musl') }}
- name: Test
uses: actions-rs/cargo@v1
with:
use-cross: ${{ steps.vars.outputs.CARGO_USE_CROSS }}
command: test
args: --target=${{ matrix.job.target }} ${{ steps.vars.outputs.CARGO_TEST_OPTIONS}} ${{ matrix.job.cargo-options }} ${{ steps.vars.outputs.CARGO_FEATURES_OPTION }}
- name: Archive executable artifacts
uses: actions/upload-artifact@master
with:
name: ${{ env.PROJECT_NAME }}-${{ matrix.job.target }}
path: target/${{ matrix.job.target }}/release/${{ env.PROJECT_NAME }}${{ steps.vars.outputs.EXE_suffix }}
- name: Archive deb artifacts
uses: actions/upload-artifact@master
with:
name: ${{ env.PROJECT_NAME }}-${{ matrix.job.target }}.deb
path: target/${{ matrix.job.target }}/debian
if: ${{ contains(matrix.job.target, 'musl') }}
- name: Package
shell: bash
run: |
# binary
cp 'target/${{ matrix.job.target }}/release/${{ env.PROJECT_NAME }}${{ steps.vars.outputs.EXE_suffix }}' '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}/'
# `strip` binary (if needed)
if [ -n "${{ steps.vars.outputs.STRIP }}" ]; then "${{ steps.vars.outputs.STRIP }}" '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}/${{ env.PROJECT_NAME }}${{ steps.vars.outputs.EXE_suffix }}' ; fi
# README and LICENSE
cp README.md '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}/'
cp LICENSE '${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_BASENAME }}/'
# base compressed package
pushd '${{ steps.vars.outputs.STAGING }}/' >/dev/null
case ${{ matrix.job.target }} in
*-pc-windows-*) 7z -y a '${{ steps.vars.outputs.PKG_NAME }}' '${{ steps.vars.outputs.PKG_BASENAME }}'/* | tail -2 ;;
*) tar czf '${{ steps.vars.outputs.PKG_NAME }}' '${{ steps.vars.outputs.PKG_BASENAME }}'/* ;;
esac;
popd >/dev/null
- name: Publish
uses: softprops/action-gh-release@v1
if: steps.vars.outputs.DEPLOY
with:
files: |
${{ steps.vars.outputs.STAGING }}/${{ steps.vars.outputs.PKG_NAME }}
target/${{ matrix.job.target }}/debian/*.deb
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
## fix! [rivy; 2020-22-01] `cargo tarpaulin` is unable to test this repo at the moment; alternate recipe or another testing framework?
# coverage:

5
.gitignore vendored
View File

@@ -6,7 +6,4 @@
**/*.rs.bk
*.swp
.vscode/*
*.idea/*
#ignore macos files
.DS_Store
*.idea/*

View File

@@ -1,11 +0,0 @@
repos:
- repo: https://github.com/doublify/pre-commit-rust
rev: v1.0
hooks:
- id: cargo-check
stages: [commit]
- id: fmt
stages: [commit]
- id: clippy
args: [--all-targets, --all-features]
stages: [commit]

1196
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,9 +1,9 @@
[package]
name = "du-dust"
description = "A more intuitive version of du"
version = "1.2.3"
version = "0.7.1"
authors = ["bootandy <bootandy@gmail.com>", "nebkor <code@ardent.nebcorp.com>"]
edition = "2024"
edition = "2018"
readme = "README.md"
documentation = "https://github.com/bootandy/dust"
@@ -21,81 +21,34 @@ travis-ci = { repository = "https://travis-ci.org/bootandy/dust" }
name = "dust"
path = "src/main.rs"
[profile.release]
codegen-units = 1
lto = true
strip = true
[dependencies]
ansi_term = "0.12"
clap = { version = "4.4", features = ["derive"] }
lscolors = "0.13"
terminal_size = "0.2"
clap = { version = "=2.33", features = ["wrap_help"] }
lscolors = "0.7"
terminal_size = "0.1"
unicode-width = "0.1"
rayon = "1"
rayon="1"
thousands = "0.2"
stfu8 = "0.2"
regex = "1"
config-file = "0.2"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
directories = "4"
sysinfo = "0.27"
ctrlc = "3.4"
chrono = "0.4"
[target.'cfg(not(target_has_atomic = "64"))'.dependencies]
portable-atomic = "1.4"
[target.'cfg(windows)'.dependencies]
winapi-util = "0.1"
filesize = "0.2.0"
[dev-dependencies]
assert_cmd = "2"
assert_cmd = "1"
tempfile = "=3"
[build-dependencies]
clap = { version = "4.4", features = ["derive"] }
clap_complete = "4.4"
clap_mangen = "0.2"
[[test]]
name = "integration"
path = "tests/tests.rs"
[package.metadata.binstall]
pkg-url = "{ repo }/releases/download/v{ version }/dust-v{ version }-{ target }{ archive-suffix }"
bin-dir = "dust-v{ version }-{ target }/{ bin }{ binary-ext }"
[package.metadata.deb]
section = "utils"
assets = [
[
"target/release/dust",
"usr/bin/",
"755",
],
[
"LICENSE",
"usr/share/doc/du-dust/",
"644",
],
[
"README.md",
"usr/share/doc/du-dust/README",
"644",
],
[
"man-page/dust.1",
"usr/share/man/man1/dust.1",
"644",
],
[
"completions/dust.bash",
"usr/share/bash-completion/completions/dust",
"644",
],
["target/release/dust", "usr/bin/", "755"],
["LICENSE", "usr/share/doc/du-dust/", "644"],
["README.md", "usr/share/doc/du-dust/README", "644"],
]
extended-description = """\
Dust is meant to give you an instant overview of which directories are using

View File

@@ -186,7 +186,7 @@
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [2023] [andrew boot]
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.

View File

@@ -1,5 +1,5 @@
[![Build Status](https://github.com/bootandy/dust/actions/workflows/CICD.yml/badge.svg)](https://github.com/bootandy/dust/actions)
[![Build Status](https://travis-ci.org/bootandy/dust.svg?branch=master)](https://travis-ci.org/bootandy/dust)
# Dust
@@ -10,56 +10,27 @@ du + rust = dust. Like du but more intuitive.
Because I want an easy way to see where my disk is being used.
# Demo
![Example](media/snap.png)
## Install
#### Cargo <a href="https://repology.org/project/du-dust/versions"><img src="https://repology.org/badge/vertical-allrepos/du-dust.svg" alt="Packaging status" align="right"></a>
- `cargo install du-dust`
* `cargo install du-dust`
#### 🍺 Homebrew (Mac OS)
- `brew install dust`
* `brew install dust`
#### 🍺 Homebrew (Linux)
- `brew install dust`
#### [Snap](https://ubuntu.com/core/services/guide/snaps-intro) Ubuntu and [supported systems](https://snapcraft.io/docs/installing-snapd)
- `snap install dust`
Note: `dust` installed through `snap` can only access files stored in the `/home` directory. See daniejstriata/dust-snap#2 for more information.
#### [Pacstall](https://github.com/pacstall/pacstall) (Debian/Ubuntu)
- `pacstall -I dust-bin`
#### Anaconda (conda-forge)
- `conda install -c conda-forge dust`
#### [deb-get](https://github.com/wimpysworld/deb-get) (Debian/Ubuntu)
- `deb-get install du-dust`
#### [x-cmd](https://www.x-cmd.com/pkg/#VPContent)
- `x env use dust`
#### Windows:
- `scoop install dust`
- Windows GNU version - works
- Windows MSVC - requires: [VCRUNTIME140.dll](https://docs.microsoft.com/en-gb/cpp/windows/latest-supported-vc-redist?view=msvc-170)
* `brew tap tgotwig/linux-dust && brew install dust`
#### Download
- Download Linux/Mac binary from [Releases](https://github.com/bootandy/dust/releases)
- unzip file: `tar -xvf _downloaded_file.tar.gz`
- move file to executable path: `sudo mv dust /usr/local/bin/`
* Download Linux/Mac binary from [Releases](https://github.com/bootandy/dust/releases)
* unzip file: `tar -xvf _downloaded_file.tar.gz`
* move file to executable path: `sudo mv dust /usr/local/bin/`
## Overview
@@ -77,51 +48,27 @@ Usage: dust <dir>
Usage: dust <dir> <another_dir> <and_more>
Usage: dust -p (full-path - Show fullpath of the subdirectories)
Usage: dust -s (apparent-size - shows the length of the file as opposed to the amount of disk space it uses)
Usage: dust -n 30 (Shows 30 directories instead of the default [default is terminal height])
Usage: dust -d 3 (Shows 3 levels of subdirectories)
Usage: dust -D (Show only directories (eg dust -D))
Usage: dust -F (Show only files - finds your largest files)
Usage: dust -r (reverse order of output)
Usage: dust -o si/b/kb/kib/mb/mib/gb/gib (si - prints sizes in powers of 1000. Others print size in that format).
Usage: dust -n 30 (shows 30 directories instead of the default [default is terminal height])
Usage: dust -d 3 (shows 3 levels of subdirectories)
Usage: dust -r (reverse order of output)
Usage: dust -X ignore (ignore all files and directories with the name 'ignore')
Usage: dust -x (Only show directories on the same filesystem)
Usage: dust -b (Do not show percentages or draw ASCII bars)
Usage: dust -B (--bars-on-right - Percent bars moved to right side of screen)
Usage: dust -i (Do not show hidden files)
Usage: dust -x (only show directories on the same filesystem)
Usage: dust -b (do not show percentages or draw ASCII bars)
Usage: dust -i (do not show hidden files)
Usage: dust -c (No colors [monochrome])
Usage: dust -C (Force colors)
Usage: dust -f (Count files instead of diskspace [Counts by inode, to include duplicate inodes use dust -f -s])
Usage: dust -t (Group by filetype)
Usage: dust -z 10M (min-size, Only include files larger than 10M)
Usage: dust -e regex (Only include files matching this regex (eg dust -e "\.png$" would match png files))
Usage: dust -v regex (Exclude files matching this regex (eg dust -v "\.png$" would ignore png files))
Usage: dust -L (dereference-links - Treat sym links as directories and go into them)
Usage: dust -P (Disable the progress indicator)
Usage: dust -R (For screen readers. Removes bars/symbols. Adds new column: depth level. (May want to use -p for full path too))
Usage: dust -S (Custom Stack size - Use if you see: 'fatal runtime error: stack overflow' (default allocation: low memory=1048576, high memory=1073741824)"),
Usage: dust --skip-total (No total row will be displayed)
Usage: dust -z 40000/30MB/20kib (Exclude output files/directories below size 40000 bytes / 30MB / 20KiB)
Usage: dust -j (Prints JSON representation of directories, try: dust -j | jq)
Usage: dust --files0-from=FILE (Reads null-terminated file paths from FILE); If FILE is - then read from stdin
Usage: dust --collapse=node-modules will keep the node-modules folder collapsed in display instead of recursively opening it
Usage: dust -f (Count files instead of diskspace)
Usage: dust -t Group by filetype
Usage: dust -e regex Only include files matching this regex (eg dust -e "\.png$" would match png files)
```
## Config file
Dust has a config file where the above options can be set.
Either: `~/.config/dust/config.toml` or `~/.dust.toml`
```
$ cat ~/.config/dust/config.toml
reverse=true
```
## Alternatives
- [NCDU](https://dev.yorhel.nl/ncdu)
- [dutree](https://github.com/nachoparker/dutree)
- [dua](https://github.com/Byron/dua-cli/)
- [pdu](https://github.com/KSXGitHub/parallel-disk-usage)
- [dirstat-rs](https://github.com/scullionw/dirstat-rs)
- du -d 1 -h | sort -h
* [NCDU](https://dev.yorhel.nl/ncdu)
* [dutree](https://github.com/nachoparker/dutree)
* [dua](https://github.com/Byron/dua-cli/)
* [pdu](https://github.com/KSXGitHub/parallel-disk-usage)
* [dirstat-rs](https://github.com/scullionw/dirstat-rs)
* du -d 1 -h | sort -h
Note: Apparent-size is calculated slightly differently in dust to gdu. In dust each hard link is counted as using file_length space. In gdu only the first entry is counted.

View File

@@ -1,28 +0,0 @@
use clap::CommandFactory;
use clap_complete::{generate_to, shells::*};
use clap_mangen::Man;
use std::fs::File;
use std::io::Error;
use std::path::Path;
include!("src/cli.rs");
fn main() -> Result<(), Error> {
let outdir = "completions";
let app_name = "dust";
let mut cmd = Cli::command();
generate_to(Bash, &mut cmd, app_name, outdir)?;
generate_to(Zsh, &mut cmd, app_name, outdir)?;
generate_to(Fish, &mut cmd, app_name, outdir)?;
generate_to(PowerShell, &mut cmd, app_name, outdir)?;
generate_to(Elvish, &mut cmd, app_name, outdir)?;
let file = Path::new("man-page").join("dust.1");
std::fs::create_dir_all("man-page")?;
let mut file = File::create(file)?;
Man::new(cmd).render(&mut file)?;
Ok(())
}

View File

@@ -1,21 +1,10 @@
# ----------- To do a release ---------
# ----------- Pre release ---------
# Compare times of runs to check no drastic slow down:
# hyperfine 'target/release/dust /home/andy'
# hyperfine 'dust /home/andy'
# ----------- Release ---------
# inc version in cargo.toml
# cargo build --release
# commit changed files
# merge to master in github
# edit version in cargo.toml
# tag a commit and push (increment version in Cargo.toml first):
# git tag v0.4.5
# git push origin v0.4.5
# cargo publish to put it in crates.io
# Optional: To install locally
# To install locally [Do before pushing it]
#cargo install --path .

View File

@@ -1,127 +0,0 @@
#compdef dust
autoload -U is-at-least
_dust() {
typeset -A opt_args
typeset -a _arguments_options
local ret=1
if is-at-least 5.2; then
_arguments_options=(-s -S -C)
else
_arguments_options=(-s -C)
fi
local context curcontext="$curcontext" state line
_arguments "${_arguments_options[@]}" : \
'-d+[Depth to show]:DEPTH:_default' \
'--depth=[Depth to show]:DEPTH:_default' \
'-T+[Number of threads to use]:THREADS:_default' \
'--threads=[Number of threads to use]:THREADS:_default' \
'--config=[Specify a config file to use]:FILE:_files' \
'-n+[Number of lines of output to show. (Default is terminal_height - 10)]:NUMBER:_default' \
'--number-of-lines=[Number of lines of output to show. (Default is terminal_height - 10)]:NUMBER:_default' \
'*-X+[Exclude any file or directory with this path]:PATH:_files' \
'*--ignore-directory=[Exclude any file or directory with this path]:PATH:_files' \
'-I+[Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter]:FILE:_files' \
'--ignore-all-in-file=[Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter]:FILE:_files' \
'-z+[Minimum size file to include in output]:MIN_SIZE:_default' \
'--min-size=[Minimum size file to include in output]:MIN_SIZE:_default' \
'(-e --filter -t --file-types)*-v+[Exclude filepaths matching this regex. To ignore png files type\: -v "\\.png\$"]:REGEX:_default' \
'(-e --filter -t --file-types)*--invert-filter=[Exclude filepaths matching this regex. To ignore png files type\: -v "\\.png\$"]:REGEX:_default' \
'(-t --file-types)*-e+[Only include filepaths matching this regex. For png files type\: -e "\\.png\$"]:REGEX:_default' \
'(-t --file-types)*--filter=[Only include filepaths matching this regex. For png files type\: -e "\\.png\$"]:REGEX:_default' \
'-w+[Specify width of output overriding the auto detection of terminal width]:WIDTH:_default' \
'--terminal-width=[Specify width of output overriding the auto detection of terminal width]:WIDTH:_default' \
'-o+[Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size]:FORMAT:((si\:"SI prefix (powers of 1000)"
b\:"byte (B)"
k\:"kibibyte (KiB)"
m\:"mebibyte (MiB)"
g\:"gibibyte (GiB)"
t\:"tebibyte (TiB)"
kb\:"kilobyte (kB)"
mb\:"megabyte (MB)"
gb\:"gigabyte (GB)"
tb\:"terabyte (TB)"))' \
'--output-format=[Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size]:FORMAT:((si\:"SI prefix (powers of 1000)"
b\:"byte (B)"
k\:"kibibyte (KiB)"
m\:"mebibyte (MiB)"
g\:"gibibyte (GiB)"
t\:"tebibyte (TiB)"
kb\:"kilobyte (kB)"
mb\:"megabyte (MB)"
gb\:"gigabyte (GB)"
tb\:"terabyte (TB)"))' \
'-S+[Specify memory to use as stack size - use if you see\: '\''fatal runtime error\: stack overflow'\'' (default low memory=1048576, high memory=1073741824)]:STACK_SIZE:_default' \
'--stack-size=[Specify memory to use as stack size - use if you see\: '\''fatal runtime error\: stack overflow'\'' (default low memory=1048576, high memory=1073741824)]:STACK_SIZE:_default' \
'-M+[+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => \[curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)]:MTIME:_default' \
'--mtime=[+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => \[curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)]:MTIME:_default' \
'-A+[just like -mtime, but based on file access time]:ATIME:_default' \
'--atime=[just like -mtime, but based on file access time]:ATIME:_default' \
'-y+[just like -mtime, but based on file change time]:CTIME:_default' \
'--ctime=[just like -mtime, but based on file change time]:CTIME:_default' \
'--files0-from=[run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input]:FILES0_FROM:_files' \
'*--collapse=[Keep these directories collapsed]:COLLAPSE:_files' \
'-m+[Directory '\''size'\'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time]:FILETIME:((a\:"last accessed time"
c\:"last changed time"
m\:"last modified time"))' \
'--filetime=[Directory '\''size'\'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time]:FILETIME:((a\:"last accessed time"
c\:"last changed time"
m\:"last modified time"))' \
'-p[Subdirectories will not have their path shortened]' \
'--full-paths[Subdirectories will not have their path shortened]' \
'-L[dereference sym links - Treat sym links as directories and go into them]' \
'--dereference-links[dereference sym links - Treat sym links as directories and go into them]' \
'-x[Only count the files and directories on the same filesystem as the supplied directory]' \
'--limit-filesystem[Only count the files and directories on the same filesystem as the supplied directory]' \
'-s[Use file length instead of blocks]' \
'--apparent-size[Use file length instead of blocks]' \
'-r[Print tree upside down (biggest highest)]' \
'--reverse[Print tree upside down (biggest highest)]' \
'-c[No colors will be printed (Useful for commands like\: watch)]' \
'--no-colors[No colors will be printed (Useful for commands like\: watch)]' \
'-C[Force colors print]' \
'--force-colors[Force colors print]' \
'-b[No percent bars or percentages will be displayed]' \
'--no-percent-bars[No percent bars or percentages will be displayed]' \
'-B[percent bars moved to right side of screen]' \
'--bars-on-right[percent bars moved to right side of screen]' \
'-R[For screen readers. Removes bars. Adds new column\: depth level (May want to use -p too for full path)]' \
'--screen-reader[For screen readers. Removes bars. Adds new column\: depth level (May want to use -p too for full path)]' \
'--skip-total[No total row will be displayed]' \
'-f[Directory '\''size'\'' is number of child files instead of disk size]' \
'--filecount[Directory '\''size'\'' is number of child files instead of disk size]' \
'-i[Do not display hidden files]' \
'--ignore-hidden[Do not display hidden files]' \
'(-d --depth -D --only-dir)-t[show only these file types]' \
'(-d --depth -D --only-dir)--file-types[show only these file types]' \
'-P[Disable the progress indication]' \
'--no-progress[Disable the progress indication]' \
'--print-errors[Print path with errors]' \
'(-F --only-file -t --file-types)-D[Only directories will be displayed]' \
'(-F --only-file -t --file-types)--only-dir[Only directories will be displayed]' \
'(-D --only-dir)-F[Only files will be displayed. (Finds your largest files)]' \
'(-D --only-dir)--only-file[Only files will be displayed. (Finds your largest files)]' \
'-j[Output the directory tree as json to the current directory]' \
'--output-json[Output the directory tree as json to the current directory]' \
'-h[Print help (see more with '\''--help'\'')]' \
'--help[Print help (see more with '\''--help'\'')]' \
'-V[Print version]' \
'--version[Print version]' \
'*::params -- Input files or directories:_files' \
&& ret=0
}
(( $+functions[_dust_commands] )) ||
_dust_commands() {
local commands; commands=()
_describe -t commands 'dust commands' commands "$@"
}
if [ "$funcstack[1]" = "_dust" ]; then
_dust "$@"
else
compdef _dust dust
fi

View File

@@ -1,103 +0,0 @@
using namespace System.Management.Automation
using namespace System.Management.Automation.Language
Register-ArgumentCompleter -Native -CommandName 'dust' -ScriptBlock {
param($wordToComplete, $commandAst, $cursorPosition)
$commandElements = $commandAst.CommandElements
$command = @(
'dust'
for ($i = 1; $i -lt $commandElements.Count; $i++) {
$element = $commandElements[$i]
if ($element -isnot [StringConstantExpressionAst] -or
$element.StringConstantType -ne [StringConstantType]::BareWord -or
$element.Value.StartsWith('-') -or
$element.Value -eq $wordToComplete) {
break
}
$element.Value
}) -join ';'
$completions = @(switch ($command) {
'dust' {
[CompletionResult]::new('-d', '-d', [CompletionResultType]::ParameterName, 'Depth to show')
[CompletionResult]::new('--depth', '--depth', [CompletionResultType]::ParameterName, 'Depth to show')
[CompletionResult]::new('-T', '-T ', [CompletionResultType]::ParameterName, 'Number of threads to use')
[CompletionResult]::new('--threads', '--threads', [CompletionResultType]::ParameterName, 'Number of threads to use')
[CompletionResult]::new('--config', '--config', [CompletionResultType]::ParameterName, 'Specify a config file to use')
[CompletionResult]::new('-n', '-n', [CompletionResultType]::ParameterName, 'Number of lines of output to show. (Default is terminal_height - 10)')
[CompletionResult]::new('--number-of-lines', '--number-of-lines', [CompletionResultType]::ParameterName, 'Number of lines of output to show. (Default is terminal_height - 10)')
[CompletionResult]::new('-X', '-X ', [CompletionResultType]::ParameterName, 'Exclude any file or directory with this path')
[CompletionResult]::new('--ignore-directory', '--ignore-directory', [CompletionResultType]::ParameterName, 'Exclude any file or directory with this path')
[CompletionResult]::new('-I', '-I ', [CompletionResultType]::ParameterName, 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter')
[CompletionResult]::new('--ignore-all-in-file', '--ignore-all-in-file', [CompletionResultType]::ParameterName, 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter')
[CompletionResult]::new('-z', '-z', [CompletionResultType]::ParameterName, 'Minimum size file to include in output')
[CompletionResult]::new('--min-size', '--min-size', [CompletionResultType]::ParameterName, 'Minimum size file to include in output')
[CompletionResult]::new('-v', '-v', [CompletionResultType]::ParameterName, 'Exclude filepaths matching this regex. To ignore png files type: -v "\.png$"')
[CompletionResult]::new('--invert-filter', '--invert-filter', [CompletionResultType]::ParameterName, 'Exclude filepaths matching this regex. To ignore png files type: -v "\.png$"')
[CompletionResult]::new('-e', '-e', [CompletionResultType]::ParameterName, 'Only include filepaths matching this regex. For png files type: -e "\.png$"')
[CompletionResult]::new('--filter', '--filter', [CompletionResultType]::ParameterName, 'Only include filepaths matching this regex. For png files type: -e "\.png$"')
[CompletionResult]::new('-w', '-w', [CompletionResultType]::ParameterName, 'Specify width of output overriding the auto detection of terminal width')
[CompletionResult]::new('--terminal-width', '--terminal-width', [CompletionResultType]::ParameterName, 'Specify width of output overriding the auto detection of terminal width')
[CompletionResult]::new('-o', '-o', [CompletionResultType]::ParameterName, 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size')
[CompletionResult]::new('--output-format', '--output-format', [CompletionResultType]::ParameterName, 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size')
[CompletionResult]::new('-S', '-S ', [CompletionResultType]::ParameterName, 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)')
[CompletionResult]::new('--stack-size', '--stack-size', [CompletionResultType]::ParameterName, 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)')
[CompletionResult]::new('-M', '-M ', [CompletionResultType]::ParameterName, '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)')
[CompletionResult]::new('--mtime', '--mtime', [CompletionResultType]::ParameterName, '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)')
[CompletionResult]::new('-A', '-A ', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file access time')
[CompletionResult]::new('--atime', '--atime', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file access time')
[CompletionResult]::new('-y', '-y', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file change time')
[CompletionResult]::new('--ctime', '--ctime', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file change time')
[CompletionResult]::new('--files0-from', '--files0-from', [CompletionResultType]::ParameterName, 'run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input')
[CompletionResult]::new('--collapse', '--collapse', [CompletionResultType]::ParameterName, 'Keep these directories collapsed')
[CompletionResult]::new('-m', '-m', [CompletionResultType]::ParameterName, 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time')
[CompletionResult]::new('--filetime', '--filetime', [CompletionResultType]::ParameterName, 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time')
[CompletionResult]::new('-p', '-p', [CompletionResultType]::ParameterName, 'Subdirectories will not have their path shortened')
[CompletionResult]::new('--full-paths', '--full-paths', [CompletionResultType]::ParameterName, 'Subdirectories will not have their path shortened')
[CompletionResult]::new('-L', '-L ', [CompletionResultType]::ParameterName, 'dereference sym links - Treat sym links as directories and go into them')
[CompletionResult]::new('--dereference-links', '--dereference-links', [CompletionResultType]::ParameterName, 'dereference sym links - Treat sym links as directories and go into them')
[CompletionResult]::new('-x', '-x', [CompletionResultType]::ParameterName, 'Only count the files and directories on the same filesystem as the supplied directory')
[CompletionResult]::new('--limit-filesystem', '--limit-filesystem', [CompletionResultType]::ParameterName, 'Only count the files and directories on the same filesystem as the supplied directory')
[CompletionResult]::new('-s', '-s', [CompletionResultType]::ParameterName, 'Use file length instead of blocks')
[CompletionResult]::new('--apparent-size', '--apparent-size', [CompletionResultType]::ParameterName, 'Use file length instead of blocks')
[CompletionResult]::new('-r', '-r', [CompletionResultType]::ParameterName, 'Print tree upside down (biggest highest)')
[CompletionResult]::new('--reverse', '--reverse', [CompletionResultType]::ParameterName, 'Print tree upside down (biggest highest)')
[CompletionResult]::new('-c', '-c', [CompletionResultType]::ParameterName, 'No colors will be printed (Useful for commands like: watch)')
[CompletionResult]::new('--no-colors', '--no-colors', [CompletionResultType]::ParameterName, 'No colors will be printed (Useful for commands like: watch)')
[CompletionResult]::new('-C', '-C ', [CompletionResultType]::ParameterName, 'Force colors print')
[CompletionResult]::new('--force-colors', '--force-colors', [CompletionResultType]::ParameterName, 'Force colors print')
[CompletionResult]::new('-b', '-b', [CompletionResultType]::ParameterName, 'No percent bars or percentages will be displayed')
[CompletionResult]::new('--no-percent-bars', '--no-percent-bars', [CompletionResultType]::ParameterName, 'No percent bars or percentages will be displayed')
[CompletionResult]::new('-B', '-B ', [CompletionResultType]::ParameterName, 'percent bars moved to right side of screen')
[CompletionResult]::new('--bars-on-right', '--bars-on-right', [CompletionResultType]::ParameterName, 'percent bars moved to right side of screen')
[CompletionResult]::new('-R', '-R ', [CompletionResultType]::ParameterName, 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)')
[CompletionResult]::new('--screen-reader', '--screen-reader', [CompletionResultType]::ParameterName, 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)')
[CompletionResult]::new('--skip-total', '--skip-total', [CompletionResultType]::ParameterName, 'No total row will be displayed')
[CompletionResult]::new('-f', '-f', [CompletionResultType]::ParameterName, 'Directory ''size'' is number of child files instead of disk size')
[CompletionResult]::new('--filecount', '--filecount', [CompletionResultType]::ParameterName, 'Directory ''size'' is number of child files instead of disk size')
[CompletionResult]::new('-i', '-i', [CompletionResultType]::ParameterName, 'Do not display hidden files')
[CompletionResult]::new('--ignore-hidden', '--ignore-hidden', [CompletionResultType]::ParameterName, 'Do not display hidden files')
[CompletionResult]::new('-t', '-t', [CompletionResultType]::ParameterName, 'show only these file types')
[CompletionResult]::new('--file-types', '--file-types', [CompletionResultType]::ParameterName, 'show only these file types')
[CompletionResult]::new('-P', '-P ', [CompletionResultType]::ParameterName, 'Disable the progress indication')
[CompletionResult]::new('--no-progress', '--no-progress', [CompletionResultType]::ParameterName, 'Disable the progress indication')
[CompletionResult]::new('--print-errors', '--print-errors', [CompletionResultType]::ParameterName, 'Print path with errors')
[CompletionResult]::new('-D', '-D ', [CompletionResultType]::ParameterName, 'Only directories will be displayed')
[CompletionResult]::new('--only-dir', '--only-dir', [CompletionResultType]::ParameterName, 'Only directories will be displayed')
[CompletionResult]::new('-F', '-F ', [CompletionResultType]::ParameterName, 'Only files will be displayed. (Finds your largest files)')
[CompletionResult]::new('--only-file', '--only-file', [CompletionResultType]::ParameterName, 'Only files will be displayed. (Finds your largest files)')
[CompletionResult]::new('-j', '-j', [CompletionResultType]::ParameterName, 'Output the directory tree as json to the current directory')
[CompletionResult]::new('--output-json', '--output-json', [CompletionResultType]::ParameterName, 'Output the directory tree as json to the current directory')
[CompletionResult]::new('-h', '-h', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')')
[CompletionResult]::new('--help', '--help', [CompletionResultType]::ParameterName, 'Print help (see more with ''--help'')')
[CompletionResult]::new('-V', '-V ', [CompletionResultType]::ParameterName, 'Print version')
[CompletionResult]::new('--version', '--version', [CompletionResultType]::ParameterName, 'Print version')
break
}
})
$completions.Where{ $_.CompletionText -like "$wordToComplete*" } |
Sort-Object -Property ListItemText
}

View File

@@ -1,211 +0,0 @@
_dust() {
local i cur prev opts cmd
COMPREPLY=()
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
cur="$2"
else
cur="${COMP_WORDS[COMP_CWORD]}"
fi
prev="$3"
cmd=""
opts=""
for i in "${COMP_WORDS[@]:0:COMP_CWORD}"
do
case "${cmd},${i}" in
",$1")
cmd="dust"
;;
*)
;;
esac
done
case "${cmd}" in
dust)
opts="-d -T -n -p -X -I -L -x -s -r -c -C -b -B -z -R -f -i -v -e -t -w -P -D -F -o -S -j -M -A -y -m -h -V --depth --threads --config --number-of-lines --full-paths --ignore-directory --ignore-all-in-file --dereference-links --limit-filesystem --apparent-size --reverse --no-colors --force-colors --no-percent-bars --bars-on-right --min-size --screen-reader --skip-total --filecount --ignore-hidden --invert-filter --filter --file-types --terminal-width --no-progress --print-errors --only-dir --only-file --output-format --stack-size --output-json --mtime --atime --ctime --files0-from --collapse --filetime --help --version [PATH]..."
if [[ ${cur} == -* || ${COMP_CWORD} -eq 1 ]] ; then
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
return 0
fi
case "${prev}" in
--depth)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
-d)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--threads)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
-T)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--config)
local oldifs
if [ -n "${IFS+x}" ]; then
oldifs="$IFS"
fi
IFS=$'\n'
COMPREPLY=($(compgen -f "${cur}"))
if [ -n "${oldifs+x}" ]; then
IFS="$oldifs"
fi
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
compopt -o filenames
fi
return 0
;;
--number-of-lines)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
-n)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--ignore-directory)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
-X)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--ignore-all-in-file)
local oldifs
if [ -n "${IFS+x}" ]; then
oldifs="$IFS"
fi
IFS=$'\n'
COMPREPLY=($(compgen -f "${cur}"))
if [ -n "${oldifs+x}" ]; then
IFS="$oldifs"
fi
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
compopt -o filenames
fi
return 0
;;
-I)
local oldifs
if [ -n "${IFS+x}" ]; then
oldifs="$IFS"
fi
IFS=$'\n'
COMPREPLY=($(compgen -f "${cur}"))
if [ -n "${oldifs+x}" ]; then
IFS="$oldifs"
fi
if [[ "${BASH_VERSINFO[0]}" -ge 4 ]]; then
compopt -o filenames
fi
return 0
;;
--min-size)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
-z)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--invert-filter)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
-v)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--filter)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
-e)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--terminal-width)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
-w)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--output-format)
COMPREPLY=($(compgen -W "si b k m g t kb mb gb tb" -- "${cur}"))
return 0
;;
-o)
COMPREPLY=($(compgen -W "si b k m g t kb mb gb tb" -- "${cur}"))
return 0
;;
--stack-size)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
-S)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--mtime)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
-M)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--atime)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
-A)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--ctime)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
-y)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--files0-from)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--collapse)
COMPREPLY=($(compgen -f "${cur}"))
return 0
;;
--filetime)
COMPREPLY=($(compgen -W "a c m" -- "${cur}"))
return 0
;;
-m)
COMPREPLY=($(compgen -W "a c m" -- "${cur}"))
return 0
;;
*)
COMPREPLY=()
;;
esac
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
return 0
;;
esac
}
if [[ "${BASH_VERSINFO[0]}" -eq 4 && "${BASH_VERSINFO[1]}" -ge 4 || "${BASH_VERSINFO[0]}" -gt 4 ]]; then
complete -F _dust -o nosort -o bashdefault -o default dust
else
complete -F _dust -o bashdefault -o default dust
fi

View File

@@ -1,97 +0,0 @@
use builtin;
use str;
set edit:completion:arg-completer[dust] = {|@words|
fn spaces {|n|
builtin:repeat $n ' ' | str:join ''
}
fn cand {|text desc|
edit:complex-candidate $text &display=$text' '(spaces (- 14 (wcswidth $text)))$desc
}
var command = 'dust'
for word $words[1..-1] {
if (str:has-prefix $word '-') {
break
}
set command = $command';'$word
}
var completions = [
&'dust'= {
cand -d 'Depth to show'
cand --depth 'Depth to show'
cand -T 'Number of threads to use'
cand --threads 'Number of threads to use'
cand --config 'Specify a config file to use'
cand -n 'Number of lines of output to show. (Default is terminal_height - 10)'
cand --number-of-lines 'Number of lines of output to show. (Default is terminal_height - 10)'
cand -X 'Exclude any file or directory with this path'
cand --ignore-directory 'Exclude any file or directory with this path'
cand -I 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter'
cand --ignore-all-in-file 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter'
cand -z 'Minimum size file to include in output'
cand --min-size 'Minimum size file to include in output'
cand -v 'Exclude filepaths matching this regex. To ignore png files type: -v "\.png$"'
cand --invert-filter 'Exclude filepaths matching this regex. To ignore png files type: -v "\.png$"'
cand -e 'Only include filepaths matching this regex. For png files type: -e "\.png$"'
cand --filter 'Only include filepaths matching this regex. For png files type: -e "\.png$"'
cand -w 'Specify width of output overriding the auto detection of terminal width'
cand --terminal-width 'Specify width of output overriding the auto detection of terminal width'
cand -o 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size'
cand --output-format 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size'
cand -S 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)'
cand --stack-size 'Specify memory to use as stack size - use if you see: ''fatal runtime error: stack overflow'' (default low memory=1048576, high memory=1073741824)'
cand -M '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)'
cand --mtime '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)'
cand -A 'just like -mtime, but based on file access time'
cand --atime 'just like -mtime, but based on file access time'
cand -y 'just like -mtime, but based on file change time'
cand --ctime 'just like -mtime, but based on file change time'
cand --files0-from 'run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input'
cand --collapse 'Keep these directories collapsed'
cand -m 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time'
cand --filetime 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time'
cand -p 'Subdirectories will not have their path shortened'
cand --full-paths 'Subdirectories will not have their path shortened'
cand -L 'dereference sym links - Treat sym links as directories and go into them'
cand --dereference-links 'dereference sym links - Treat sym links as directories and go into them'
cand -x 'Only count the files and directories on the same filesystem as the supplied directory'
cand --limit-filesystem 'Only count the files and directories on the same filesystem as the supplied directory'
cand -s 'Use file length instead of blocks'
cand --apparent-size 'Use file length instead of blocks'
cand -r 'Print tree upside down (biggest highest)'
cand --reverse 'Print tree upside down (biggest highest)'
cand -c 'No colors will be printed (Useful for commands like: watch)'
cand --no-colors 'No colors will be printed (Useful for commands like: watch)'
cand -C 'Force colors print'
cand --force-colors 'Force colors print'
cand -b 'No percent bars or percentages will be displayed'
cand --no-percent-bars 'No percent bars or percentages will be displayed'
cand -B 'percent bars moved to right side of screen'
cand --bars-on-right 'percent bars moved to right side of screen'
cand -R 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)'
cand --screen-reader 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)'
cand --skip-total 'No total row will be displayed'
cand -f 'Directory ''size'' is number of child files instead of disk size'
cand --filecount 'Directory ''size'' is number of child files instead of disk size'
cand -i 'Do not display hidden files'
cand --ignore-hidden 'Do not display hidden files'
cand -t 'show only these file types'
cand --file-types 'show only these file types'
cand -P 'Disable the progress indication'
cand --no-progress 'Disable the progress indication'
cand --print-errors 'Print path with errors'
cand -D 'Only directories will be displayed'
cand --only-dir 'Only directories will be displayed'
cand -F 'Only files will be displayed. (Finds your largest files)'
cand --only-file 'Only files will be displayed. (Finds your largest files)'
cand -j 'Output the directory tree as json to the current directory'
cand --output-json 'Output the directory tree as json to the current directory'
cand -h 'Print help (see more with ''--help'')'
cand --help 'Print help (see more with ''--help'')'
cand -V 'Print version'
cand --version 'Print version'
}
]
$completions[$command]
}

View File

@@ -1,50 +0,0 @@
complete -c dust -s d -l depth -d 'Depth to show' -r
complete -c dust -s T -l threads -d 'Number of threads to use' -r
complete -c dust -l config -d 'Specify a config file to use' -r -F
complete -c dust -s n -l number-of-lines -d 'Number of lines of output to show. (Default is terminal_height - 10)' -r
complete -c dust -s X -l ignore-directory -d 'Exclude any file or directory with this path' -r -F
complete -c dust -s I -l ignore-all-in-file -d 'Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by --invert_filter' -r -F
complete -c dust -s z -l min-size -d 'Minimum size file to include in output' -r
complete -c dust -s v -l invert-filter -d 'Exclude filepaths matching this regex. To ignore png files type: -v "\\.png$"' -r
complete -c dust -s e -l filter -d 'Only include filepaths matching this regex. For png files type: -e "\\.png$"' -r
complete -c dust -s w -l terminal-width -d 'Specify width of output overriding the auto detection of terminal width' -r
complete -c dust -s o -l output-format -d 'Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size' -r -f -a "si\t'SI prefix (powers of 1000)'
b\t'byte (B)'
k\t'kibibyte (KiB)'
m\t'mebibyte (MiB)'
g\t'gibibyte (GiB)'
t\t'tebibyte (TiB)'
kb\t'kilobyte (kB)'
mb\t'megabyte (MB)'
gb\t'gigabyte (GB)'
tb\t'terabyte (TB)'"
complete -c dust -s S -l stack-size -d 'Specify memory to use as stack size - use if you see: \'fatal runtime error: stack overflow\' (default low memory=1048576, high memory=1073741824)' -r
complete -c dust -s M -l mtime -d '+/-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)' -r
complete -c dust -s A -l atime -d 'just like -mtime, but based on file access time' -r
complete -c dust -s y -l ctime -d 'just like -mtime, but based on file change time' -r
complete -c dust -l files0-from -d 'run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input' -r -F
complete -c dust -l collapse -d 'Keep these directories collapsed' -r -F
complete -c dust -s m -l filetime -d 'Directory \'size\' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time' -r -f -a "a\t'last accessed time'
c\t'last changed time'
m\t'last modified time'"
complete -c dust -s p -l full-paths -d 'Subdirectories will not have their path shortened'
complete -c dust -s L -l dereference-links -d 'dereference sym links - Treat sym links as directories and go into them'
complete -c dust -s x -l limit-filesystem -d 'Only count the files and directories on the same filesystem as the supplied directory'
complete -c dust -s s -l apparent-size -d 'Use file length instead of blocks'
complete -c dust -s r -l reverse -d 'Print tree upside down (biggest highest)'
complete -c dust -s c -l no-colors -d 'No colors will be printed (Useful for commands like: watch)'
complete -c dust -s C -l force-colors -d 'Force colors print'
complete -c dust -s b -l no-percent-bars -d 'No percent bars or percentages will be displayed'
complete -c dust -s B -l bars-on-right -d 'percent bars moved to right side of screen'
complete -c dust -s R -l screen-reader -d 'For screen readers. Removes bars. Adds new column: depth level (May want to use -p too for full path)'
complete -c dust -l skip-total -d 'No total row will be displayed'
complete -c dust -s f -l filecount -d 'Directory \'size\' is number of child files instead of disk size'
complete -c dust -s i -l ignore-hidden -d 'Do not display hidden files'
complete -c dust -s t -l file-types -d 'show only these file types'
complete -c dust -s P -l no-progress -d 'Disable the progress indication'
complete -c dust -l print-errors -d 'Print path with errors'
complete -c dust -s D -l only-dir -d 'Only directories will be displayed'
complete -c dust -s F -l only-file -d 'Only files will be displayed. (Finds your largest files)'
complete -c dust -s j -l output-json -d 'Output the directory tree as json to the current directory'
complete -c dust -s h -l help -d 'Print help (see more with \'--help\')'
complete -c dust -s V -l version -d 'Print version'

View File

@@ -1,28 +0,0 @@
# Sample Config file, works with toml and yaml
# Place in either:
# ~/.config/dust/config.toml
# ~/.dust.toml
# Print tree upside down (biggest highest)
reverse=true
# Subdirectories will not have their path shortened
display-full-paths=true
# Use file length instead of blocks
display-apparent-size=true
# No colors will be printed
no-colors=true
# No percent bars or percentages will be displayed
no-bars=true
# No total row will be displayed
skip-total=true
# Do not display hidden files
ignore-hidden=true
# print sizes in powers of 1000 (e.g., 1.1G)
output-format="si"

View File

@@ -1,170 +0,0 @@
.ie \n(.g .ds Aq \(aq
.el .ds Aq '
.TH Dust 1 "Dust 1.2.3"
.SH NAME
Dust \- Like du but more intuitive
.SH SYNOPSIS
\fBdust\fR [\fB\-d\fR|\fB\-\-depth\fR] [\fB\-T\fR|\fB\-\-threads\fR] [\fB\-\-config\fR] [\fB\-n\fR|\fB\-\-number\-of\-lines\fR] [\fB\-p\fR|\fB\-\-full\-paths\fR] [\fB\-X\fR|\fB\-\-ignore\-directory\fR] [\fB\-I\fR|\fB\-\-ignore\-all\-in\-file\fR] [\fB\-L\fR|\fB\-\-dereference\-links\fR] [\fB\-x\fR|\fB\-\-limit\-filesystem\fR] [\fB\-s\fR|\fB\-\-apparent\-size\fR] [\fB\-r\fR|\fB\-\-reverse\fR] [\fB\-c\fR|\fB\-\-no\-colors\fR] [\fB\-C\fR|\fB\-\-force\-colors\fR] [\fB\-b\fR|\fB\-\-no\-percent\-bars\fR] [\fB\-B\fR|\fB\-\-bars\-on\-right\fR] [\fB\-z\fR|\fB\-\-min\-size\fR] [\fB\-R\fR|\fB\-\-screen\-reader\fR] [\fB\-\-skip\-total\fR] [\fB\-f\fR|\fB\-\-filecount\fR] [\fB\-i\fR|\fB\-\-ignore\-hidden\fR] [\fB\-v\fR|\fB\-\-invert\-filter\fR] [\fB\-e\fR|\fB\-\-filter\fR] [\fB\-t\fR|\fB\-\-file\-types\fR] [\fB\-w\fR|\fB\-\-terminal\-width\fR] [\fB\-P\fR|\fB\-\-no\-progress\fR] [\fB\-\-print\-errors\fR] [\fB\-D\fR|\fB\-\-only\-dir\fR] [\fB\-F\fR|\fB\-\-only\-file\fR] [\fB\-o\fR|\fB\-\-output\-format\fR] [\fB\-S\fR|\fB\-\-stack\-size\fR] [\fB\-j\fR|\fB\-\-output\-json\fR] [\fB\-M\fR|\fB\-\-mtime\fR] [\fB\-A\fR|\fB\-\-atime\fR] [\fB\-y\fR|\fB\-\-ctime\fR] [\fB\-\-files0\-from\fR] [\fB\-\-collapse\fR] [\fB\-m\fR|\fB\-\-filetime\fR] [\fB\-h\fR|\fB\-\-help\fR] [\fB\-V\fR|\fB\-\-version\fR] [\fIPATH\fR]
.SH DESCRIPTION
Like du but more intuitive
.SH OPTIONS
.TP
\fB\-d\fR, \fB\-\-depth\fR=\fIDEPTH\fR
Depth to show
.TP
\fB\-T\fR, \fB\-\-threads\fR=\fITHREADS\fR
Number of threads to use
.TP
\fB\-\-config\fR=\fIFILE\fR
Specify a config file to use
.TP
\fB\-n\fR, \fB\-\-number\-of\-lines\fR=\fINUMBER\fR
Number of lines of output to show. (Default is terminal_height \- 10)
.TP
\fB\-p\fR, \fB\-\-full\-paths\fR
Subdirectories will not have their path shortened
.TP
\fB\-X\fR, \fB\-\-ignore\-directory\fR=\fIPATH\fR
Exclude any file or directory with this path
.TP
\fB\-I\fR, \fB\-\-ignore\-all\-in\-file\fR=\fIFILE\fR
Exclude any file or directory with a regex matching that listed in this file, the file entries will be added to the ignore regexs provided by \-\-invert_filter
.TP
\fB\-L\fR, \fB\-\-dereference\-links\fR
dereference sym links \- Treat sym links as directories and go into them
.TP
\fB\-x\fR, \fB\-\-limit\-filesystem\fR
Only count the files and directories on the same filesystem as the supplied directory
.TP
\fB\-s\fR, \fB\-\-apparent\-size\fR
Use file length instead of blocks
.TP
\fB\-r\fR, \fB\-\-reverse\fR
Print tree upside down (biggest highest)
.TP
\fB\-c\fR, \fB\-\-no\-colors\fR
No colors will be printed (Useful for commands like: watch)
.TP
\fB\-C\fR, \fB\-\-force\-colors\fR
Force colors print
.TP
\fB\-b\fR, \fB\-\-no\-percent\-bars\fR
No percent bars or percentages will be displayed
.TP
\fB\-B\fR, \fB\-\-bars\-on\-right\fR
percent bars moved to right side of screen
.TP
\fB\-z\fR, \fB\-\-min\-size\fR=\fIMIN_SIZE\fR
Minimum size file to include in output
.TP
\fB\-R\fR, \fB\-\-screen\-reader\fR
For screen readers. Removes bars. Adds new column: depth level (May want to use \-p too for full path)
.TP
\fB\-\-skip\-total\fR
No total row will be displayed
.TP
\fB\-f\fR, \fB\-\-filecount\fR
Directory \*(Aqsize\*(Aq is number of child files instead of disk size
.TP
\fB\-i\fR, \fB\-\-ignore\-hidden\fR
Do not display hidden files
.TP
\fB\-v\fR, \fB\-\-invert\-filter\fR=\fIREGEX\fR
Exclude filepaths matching this regex. To ignore png files type: \-v "\\.png$"
.TP
\fB\-e\fR, \fB\-\-filter\fR=\fIREGEX\fR
Only include filepaths matching this regex. For png files type: \-e "\\.png$"
.TP
\fB\-t\fR, \fB\-\-file\-types\fR
show only these file types
.TP
\fB\-w\fR, \fB\-\-terminal\-width\fR=\fIWIDTH\fR
Specify width of output overriding the auto detection of terminal width
.TP
\fB\-P\fR, \fB\-\-no\-progress\fR
Disable the progress indication
.TP
\fB\-\-print\-errors\fR
Print path with errors
.TP
\fB\-D\fR, \fB\-\-only\-dir\fR
Only directories will be displayed
.TP
\fB\-F\fR, \fB\-\-only\-file\fR
Only files will be displayed. (Finds your largest files)
.TP
\fB\-o\fR, \fB\-\-output\-format\fR=\fIFORMAT\fR
Changes output display size. si will print sizes in powers of 1000. b k m g t kb mb gb tb will print the whole tree in that size
.br
.br
\fIPossible values:\fR
.RS 14
.IP \(bu 2
si: SI prefix (powers of 1000)
.IP \(bu 2
b: byte (B)
.IP \(bu 2
k: kibibyte (KiB)
.IP \(bu 2
m: mebibyte (MiB)
.IP \(bu 2
g: gibibyte (GiB)
.IP \(bu 2
t: tebibyte (TiB)
.IP \(bu 2
kb: kilobyte (kB)
.IP \(bu 2
mb: megabyte (MB)
.IP \(bu 2
gb: gigabyte (GB)
.IP \(bu 2
tb: terabyte (TB)
.RE
.TP
\fB\-S\fR, \fB\-\-stack\-size\fR=\fISTACK_SIZE\fR
Specify memory to use as stack size \- use if you see: \*(Aqfatal runtime error: stack overflow\*(Aq (default low memory=1048576, high memory=1073741824)
.TP
\fB\-j\fR, \fB\-\-output\-json\fR
Output the directory tree as json to the current directory
.TP
\fB\-M\fR, \fB\-\-mtime\fR=\fIMTIME\fR
+/\-n matches files modified more/less than n days ago , and n matches files modified exactly n days ago, days are rounded down.That is +n => (−∞, curr(n+1)), n => [curr(n+1), currn), and \-n => (𝑐𝑢𝑟𝑟𝑛, +∞)
.TP
\fB\-A\fR, \fB\-\-atime\fR=\fIATIME\fR
just like \-mtime, but based on file access time
.TP
\fB\-y\fR, \fB\-\-ctime\fR=\fICTIME\fR
just like \-mtime, but based on file change time
.TP
\fB\-\-files0\-from\fR=\fIFILES0_FROM\fR
run dust on NUL\-terminated file names specified in file; if argument is \-, then read names from standard input
.TP
\fB\-\-collapse\fR=\fICOLLAPSE\fR
Keep these directories collapsed
.TP
\fB\-m\fR, \fB\-\-filetime\fR=\fIFILETIME\fR
Directory \*(Aqsize\*(Aq is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time
.br
.br
\fIPossible values:\fR
.RS 14
.IP \(bu 2
a: last accessed time
.IP \(bu 2
c: last changed time
.IP \(bu 2
m: last modified time
.RE
.TP
\fB\-h\fR, \fB\-\-help\fR
Print help (see a summary with \*(Aq\-h\*(Aq)
.TP
\fB\-V\fR, \fB\-\-version\fR
Print version
.TP
[\fIPATH\fR]
Input files or directories
.SH VERSION
v1.2.3

View File

@@ -1,258 +0,0 @@
use std::fmt;
use clap::{Parser, ValueEnum, ValueHint};
// For single thread mode set this variable on your command line:
// export RAYON_NUM_THREADS=1
/// Like du but more intuitive
#[derive(Debug, Parser)]
#[command(name("Dust"), version)]
pub struct Cli {
/// Depth to show
#[arg(short, long)]
pub depth: Option<usize>,
/// Number of threads to use
#[arg(short('T'), long)]
pub threads: Option<usize>,
/// Specify a config file to use
#[arg(long, value_name("FILE"), value_hint(ValueHint::FilePath))]
pub config: Option<String>,
/// Number of lines of output to show. (Default is terminal_height - 10)
#[arg(short, long, value_name("NUMBER"))]
pub number_of_lines: Option<usize>,
/// Subdirectories will not have their path shortened
#[arg(short('p'), long)]
pub full_paths: bool,
/// Exclude any file or directory with this path
#[arg(short('X'), long, value_name("PATH"), value_hint(ValueHint::AnyPath))]
pub ignore_directory: Option<Vec<String>>,
/// Exclude any file or directory with a regex matching that listed in this
/// file, the file entries will be added to the ignore regexs provided by
/// --invert_filter
#[arg(short('I'), long, value_name("FILE"), value_hint(ValueHint::FilePath))]
pub ignore_all_in_file: Option<String>,
/// dereference sym links - Treat sym links as directories and go into them
#[arg(short('L'), long)]
pub dereference_links: bool,
/// Only count the files and directories on the same filesystem as the
/// supplied directory
#[arg(short('x'), long)]
pub limit_filesystem: bool,
/// Use file length instead of blocks
#[arg(short('s'), long)]
pub apparent_size: bool,
/// Print tree upside down (biggest highest)
#[arg(short, long)]
pub reverse: bool,
/// No colors will be printed (Useful for commands like: watch)
#[arg(short('c'), long)]
pub no_colors: bool,
/// Force colors print
#[arg(short('C'), long)]
pub force_colors: bool,
/// No percent bars or percentages will be displayed
#[arg(short('b'), long)]
pub no_percent_bars: bool,
/// percent bars moved to right side of screen
#[arg(short('B'), long)]
pub bars_on_right: bool,
/// Minimum size file to include in output
#[arg(short('z'), long)]
pub min_size: Option<String>,
/// For screen readers. Removes bars. Adds new column: depth level (May want
/// to use -p too for full path)
#[arg(short('R'), long)]
pub screen_reader: bool,
/// No total row will be displayed
#[arg(long)]
pub skip_total: bool,
/// Directory 'size' is number of child files instead of disk size
#[arg(short, long)]
pub filecount: bool,
/// Do not display hidden files
// Do not use 'h' this is used by 'help'
#[arg(short, long)]
pub ignore_hidden: bool,
/// Exclude filepaths matching this regex. To ignore png files type: -v
/// "\.png$"
#[arg(
short('v'),
long,
value_name("REGEX"),
conflicts_with("filter"),
conflicts_with("file_types")
)]
pub invert_filter: Option<Vec<String>>,
/// Only include filepaths matching this regex. For png files type: -e
/// "\.png$"
#[arg(short('e'), long, value_name("REGEX"), conflicts_with("file_types"))]
pub filter: Option<Vec<String>>,
/// show only these file types
#[arg(short('t'), long, conflicts_with("depth"), conflicts_with("only_dir"))]
pub file_types: bool,
/// Specify width of output overriding the auto detection of terminal width
#[arg(short('w'), long, value_name("WIDTH"))]
pub terminal_width: Option<usize>,
/// Disable the progress indication.
#[arg(short('P'), long)]
pub no_progress: bool,
/// Print path with errors.
#[arg(long)]
pub print_errors: bool,
/// Only directories will be displayed.
#[arg(
short('D'),
long,
conflicts_with("only_file"),
conflicts_with("file_types")
)]
pub only_dir: bool,
/// Only files will be displayed. (Finds your largest files)
#[arg(short('F'), long, conflicts_with("only_dir"))]
pub only_file: bool,
/// Changes output display size. si will print sizes in powers of 1000. b k
/// m g t kb mb gb tb will print the whole tree in that size.
#[arg(short, long, value_enum, value_name("FORMAT"), ignore_case(true))]
pub output_format: Option<OutputFormat>,
/// Specify memory to use as stack size - use if you see: 'fatal runtime
/// error: stack overflow' (default low memory=1048576, high
/// memory=1073741824)
#[arg(short('S'), long)]
pub stack_size: Option<usize>,
/// Input files or directories.
#[arg(value_name("PATH"), value_hint(ValueHint::AnyPath))]
pub params: Option<Vec<String>>,
/// Output the directory tree as json to the current directory
#[arg(short('j'), long)]
pub output_json: bool,
/// +/-n matches files modified more/less than n days ago , and n matches
/// files modified exactly n days ago, days are rounded down.That is +n =>
/// (−∞, curr(n+1)), n => [curr(n+1), currn), and -n => (𝑐𝑢𝑟𝑟𝑛, +∞)
#[arg(short('M'), long, allow_hyphen_values(true))]
pub mtime: Option<String>,
/// just like -mtime, but based on file access time
#[arg(short('A'), long, allow_hyphen_values(true))]
pub atime: Option<String>,
/// just like -mtime, but based on file change time
#[arg(short('y'), long, allow_hyphen_values(true))]
pub ctime: Option<String>,
/// run dust on NUL-terminated file names specified in file; if argument is
/// -, then read names from standard input
#[arg(long, value_hint(ValueHint::AnyPath))]
pub files0_from: Option<String>,
/// Keep these directories collapsed
#[arg(long, value_hint(ValueHint::AnyPath))]
pub collapse: Option<Vec<String>>,
/// Directory 'size' is max filetime of child files instead of disk size.
/// while a/c/m for last accessed/changed/modified time
#[arg(short('m'), long, value_enum)]
pub filetime: Option<FileTime>,
}
#[derive(Clone, Copy, Debug, ValueEnum)]
#[value(rename_all = "lower")]
pub enum OutputFormat {
/// SI prefix (powers of 1000)
SI,
/// byte (B)
B,
/// kibibyte (KiB)
#[value(name = "k", alias("kib"))]
KiB,
/// mebibyte (MiB)
#[value(name = "m", alias("mib"))]
MiB,
/// gibibyte (GiB)
#[value(name = "g", alias("gib"))]
GiB,
/// tebibyte (TiB)
#[value(name = "t", alias("tib"))]
TiB,
/// kilobyte (kB)
KB,
/// megabyte (MB)
MB,
/// gigabyte (GB)
GB,
/// terabyte (TB)
TB,
}
impl fmt::Display for OutputFormat {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::SI => write!(f, "si"),
Self::B => write!(f, "b"),
Self::KiB => write!(f, "k"),
Self::MiB => write!(f, "m"),
Self::GiB => write!(f, "g"),
Self::TiB => write!(f, "t"),
Self::KB => write!(f, "kb"),
Self::MB => write!(f, "mb"),
Self::GB => write!(f, "gb"),
Self::TB => write!(f, "tb"),
}
}
}
#[derive(Clone, Copy, Debug, ValueEnum)]
pub enum FileTime {
/// last accessed time
#[value(name = "a", alias("accessed"))]
Accessed,
/// last changed time
#[value(name = "c", alias("changed"))]
Changed,
/// last modified time
#[value(name = "m", alias("modified"))]
Modified,
}

View File

@@ -1,383 +0,0 @@
use crate::node::FileTime;
use chrono::{Local, TimeZone};
use config_file::FromConfigFile;
use regex::Regex;
use serde::Deserialize;
use std::path::Path;
use std::path::PathBuf;
use crate::cli::Cli;
use crate::dir_walker::Operator;
use crate::display::get_number_format;
pub static DAY_SECONDS: i64 = 24 * 60 * 60;
#[derive(Deserialize, Default)]
#[serde(rename_all = "kebab-case")]
pub struct Config {
pub display_full_paths: Option<bool>,
pub display_apparent_size: Option<bool>,
pub reverse: Option<bool>,
pub no_colors: Option<bool>,
pub force_colors: Option<bool>,
pub no_bars: Option<bool>,
pub skip_total: Option<bool>,
pub screen_reader: Option<bool>,
pub ignore_hidden: Option<bool>,
pub output_format: Option<String>,
pub min_size: Option<String>,
pub only_dir: Option<bool>,
pub only_file: Option<bool>,
pub disable_progress: Option<bool>,
pub depth: Option<usize>,
pub bars_on_right: Option<bool>,
pub stack_size: Option<usize>,
pub threads: Option<usize>,
pub output_json: Option<bool>,
pub print_errors: Option<bool>,
pub files0_from: Option<String>,
}
impl Config {
pub fn get_files_from(&self, options: &Cli) -> Option<String> {
let from_file = &options.files0_from;
match from_file {
None => self.files0_from.as_ref().map(|x| x.to_string()),
Some(x) => Some(x.to_string()),
}
}
pub fn get_no_colors(&self, options: &Cli) -> bool {
Some(true) == self.no_colors || options.no_colors
}
pub fn get_force_colors(&self, options: &Cli) -> bool {
Some(true) == self.force_colors || options.force_colors
}
pub fn get_disable_progress(&self, options: &Cli) -> bool {
Some(true) == self.disable_progress || options.no_progress
}
pub fn get_apparent_size(&self, options: &Cli) -> bool {
Some(true) == self.display_apparent_size || options.apparent_size
}
pub fn get_ignore_hidden(&self, options: &Cli) -> bool {
Some(true) == self.ignore_hidden || options.ignore_hidden
}
pub fn get_full_paths(&self, options: &Cli) -> bool {
Some(true) == self.display_full_paths || options.full_paths
}
pub fn get_reverse(&self, options: &Cli) -> bool {
Some(true) == self.reverse || options.reverse
}
pub fn get_no_bars(&self, options: &Cli) -> bool {
Some(true) == self.no_bars || options.no_percent_bars
}
pub fn get_output_format(&self, options: &Cli) -> String {
let out_fmt = options.output_format;
(match out_fmt {
None => match &self.output_format {
None => "".to_string(),
Some(x) => x.to_string(),
},
Some(x) => x.to_string(),
})
.to_lowercase()
}
pub fn get_filetime(&self, options: &Cli) -> Option<FileTime> {
options.filetime.map(FileTime::from)
}
pub fn get_skip_total(&self, options: &Cli) -> bool {
Some(true) == self.skip_total || options.skip_total
}
pub fn get_screen_reader(&self, options: &Cli) -> bool {
Some(true) == self.screen_reader || options.screen_reader
}
pub fn get_depth(&self, options: &Cli) -> usize {
if let Some(v) = options.depth {
return v;
}
self.depth.unwrap_or(usize::MAX)
}
pub fn get_min_size(&self, options: &Cli) -> Option<usize> {
let size_from_param = options.min_size.as_ref();
self._get_min_size(size_from_param)
}
fn _get_min_size(&self, min_size: Option<&String>) -> Option<usize> {
let size_from_param = min_size.and_then(|a| convert_min_size(a));
if size_from_param.is_none() {
self.min_size
.as_ref()
.and_then(|a| convert_min_size(a.as_ref()))
} else {
size_from_param
}
}
pub fn get_only_dir(&self, options: &Cli) -> bool {
Some(true) == self.only_dir || options.only_dir
}
pub fn get_print_errors(&self, options: &Cli) -> bool {
Some(true) == self.print_errors || options.print_errors
}
pub fn get_only_file(&self, options: &Cli) -> bool {
Some(true) == self.only_file || options.only_file
}
pub fn get_bars_on_right(&self, options: &Cli) -> bool {
Some(true) == self.bars_on_right || options.bars_on_right
}
pub fn get_custom_stack_size(&self, options: &Cli) -> Option<usize> {
let from_cmd_line = options.stack_size;
if from_cmd_line.is_none() {
self.stack_size
} else {
from_cmd_line
}
}
pub fn get_threads(&self, options: &Cli) -> Option<usize> {
let from_cmd_line = options.threads;
if from_cmd_line.is_none() {
self.threads
} else {
from_cmd_line
}
}
pub fn get_output_json(&self, options: &Cli) -> bool {
Some(true) == self.output_json || options.output_json
}
pub fn get_modified_time_operator(&self, options: &Cli) -> Option<(Operator, i64)> {
get_filter_time_operator(options.mtime.as_ref(), get_current_date_epoch_seconds())
}
pub fn get_accessed_time_operator(&self, options: &Cli) -> Option<(Operator, i64)> {
get_filter_time_operator(options.atime.as_ref(), get_current_date_epoch_seconds())
}
pub fn get_changed_time_operator(&self, options: &Cli) -> Option<(Operator, i64)> {
get_filter_time_operator(options.ctime.as_ref(), get_current_date_epoch_seconds())
}
}
fn get_current_date_epoch_seconds() -> i64 {
// calculate current date epoch seconds
let now = Local::now();
let current_date = now.date_naive();
let current_date_time = current_date.and_hms_opt(0, 0, 0).unwrap();
Local
.from_local_datetime(&current_date_time)
.unwrap()
.timestamp()
}
fn get_filter_time_operator(
option_value: Option<&String>,
current_date_epoch_seconds: i64,
) -> Option<(Operator, i64)> {
match option_value {
Some(val) => {
let time = current_date_epoch_seconds
- val
.parse::<i64>()
.unwrap_or_else(|_| panic!("invalid data format"))
.abs()
* DAY_SECONDS;
match val.chars().next().expect("Value should not be empty") {
'+' => Some((Operator::LessThan, time - DAY_SECONDS)),
'-' => Some((Operator::GreaterThan, time)),
_ => Some((Operator::Equal, time - DAY_SECONDS)),
}
}
None => None,
}
}
fn convert_min_size(input: &str) -> Option<usize> {
let re = Regex::new(r"([0-9]+)(\w*)").unwrap();
if let Some(cap) = re.captures(input) {
let (_, [digits, letters]) = cap.extract();
// Failure to parse should be impossible due to regex match
let digits_as_usize: Option<usize> = digits.parse().ok();
match digits_as_usize {
Some(parsed_digits) => {
let number_format = get_number_format(&letters.to_lowercase());
match number_format {
Some((multiple, _)) => Some(parsed_digits * (multiple as usize)),
None => {
if letters.is_empty() {
Some(parsed_digits)
} else {
eprintln!("Ignoring invalid min-size: {input}");
None
}
}
}
}
None => None,
}
} else {
None
}
}
fn get_config_locations(base: &Path) -> Vec<PathBuf> {
vec![
base.join(".dust.toml"),
base.join(".config").join("dust").join("config.toml"),
]
}
pub fn get_config(conf_path: Option<&String>) -> Config {
match conf_path {
Some(path_str) => {
let path = Path::new(path_str);
if path.exists() {
match Config::from_config_file(path) {
Ok(config) => return config,
Err(e) => {
eprintln!("Ignoring invalid config file '{}': {}", &path.display(), e)
}
}
} else {
eprintln!("Config file {:?} doesn't exists", &path.display());
}
}
None => {
if let Some(home) = directories::BaseDirs::new() {
for path in get_config_locations(home.home_dir()) {
if path.exists() {
if let Ok(config) = Config::from_config_file(&path) {
return config;
}
}
}
}
}
}
Config {
..Default::default()
}
}
#[cfg(test)]
mod tests {
#[allow(unused_imports)]
use super::*;
use chrono::{Datelike, Timelike};
use clap::Parser;
#[test]
fn test_get_current_date_epoch_seconds() {
let epoch_seconds = get_current_date_epoch_seconds();
let dt = Local.timestamp_opt(epoch_seconds, 0).unwrap();
assert_eq!(dt.hour(), 0);
assert_eq!(dt.minute(), 0);
assert_eq!(dt.second(), 0);
assert_eq!(dt.date_naive().day(), Local::now().date_naive().day());
assert_eq!(dt.date_naive().month(), Local::now().date_naive().month());
assert_eq!(dt.date_naive().year(), Local::now().date_naive().year());
}
#[test]
fn test_conversion() {
assert_eq!(convert_min_size("55"), Some(55));
assert_eq!(convert_min_size("12344321"), Some(12344321));
assert_eq!(convert_min_size("95RUBBISH"), None);
assert_eq!(convert_min_size("10Ki"), Some(10 * 1024));
assert_eq!(convert_min_size("10MiB"), Some(10 * 1024usize.pow(2)));
assert_eq!(convert_min_size("10M"), Some(10 * 1024usize.pow(2)));
assert_eq!(convert_min_size("10Mb"), Some(10 * 1000usize.pow(2)));
assert_eq!(convert_min_size("2Gi"), Some(2 * 1024usize.pow(3)));
}
#[test]
fn test_min_size_from_config_applied_or_overridden() {
let c = Config {
min_size: Some("1KiB".to_owned()),
..Default::default()
};
assert_eq!(c._get_min_size(None), Some(1024));
assert_eq!(c._get_min_size(Some(&"2KiB".into())), Some(2048));
assert_eq!(c._get_min_size(Some(&"1kb".into())), Some(1000));
assert_eq!(c._get_min_size(Some(&"2KB".into())), Some(2000));
}
#[test]
fn test_get_depth() {
// No config and no flag.
let c = Config::default();
let args = get_args(vec![]);
assert_eq!(c.get_depth(&args), usize::MAX);
// Config is not defined and flag is defined.
let c = Config::default();
let args = get_args(vec!["dust", "--depth", "5"]);
assert_eq!(c.get_depth(&args), 5);
// Config is defined and flag is not defined.
let c = Config {
depth: Some(3),
..Default::default()
};
let args = get_args(vec![]);
assert_eq!(c.get_depth(&args), 3);
// Both config and flag are defined.
let c = Config {
depth: Some(3),
..Default::default()
};
let args = get_args(vec!["dust", "--depth", "5"]);
assert_eq!(c.get_depth(&args), 5);
}
fn get_args(args: Vec<&str>) -> Cli {
Cli::parse_from(args)
}
#[test]
fn test_get_filetime() {
// No config and no flag.
let c = Config::default();
let args = get_filetime_args(vec!["dust"]);
assert_eq!(c.get_filetime(&args), None);
// Config is not defined and flag is defined as access time
let c = Config::default();
let args = get_filetime_args(vec!["dust", "--filetime", "a"]);
assert_eq!(c.get_filetime(&args), Some(FileTime::Accessed));
let c = Config::default();
let args = get_filetime_args(vec!["dust", "--filetime", "accessed"]);
assert_eq!(c.get_filetime(&args), Some(FileTime::Accessed));
// Config is not defined and flag is defined as modified time
let c = Config::default();
let args = get_filetime_args(vec!["dust", "--filetime", "m"]);
assert_eq!(c.get_filetime(&args), Some(FileTime::Modified));
let c = Config::default();
let args = get_filetime_args(vec!["dust", "--filetime", "modified"]);
assert_eq!(c.get_filetime(&args), Some(FileTime::Modified));
// Config is not defined and flag is defined as changed time
let c = Config::default();
let args = get_filetime_args(vec!["dust", "--filetime", "c"]);
assert_eq!(c.get_filetime(&args), Some(FileTime::Changed));
let c = Config::default();
let args = get_filetime_args(vec!["dust", "--filetime", "changed"]);
assert_eq!(c.get_filetime(&args), Some(FileTime::Changed));
}
fn get_filetime_args(args: Vec<&str>) -> Cli {
Cli::parse_from(args)
}
}

View File

@@ -1,327 +1,167 @@
use std::cmp::Ordering;
use std::fs;
use std::io::Error;
use std::sync::Arc;
use std::sync::Mutex;
use crate::node::Node;
use crate::progress::ORDERING;
use crate::progress::Operation;
use crate::progress::PAtomicInfo;
use crate::progress::RuntimeErrors;
use crate::utils::is_filtered_out_due_to_file_time;
use crate::utils::is_filtered_out_due_to_invert_regex;
use crate::utils::is_filtered_out_due_to_regex;
use rayon::iter::ParallelBridge;
use rayon::prelude::ParallelIterator;
use regex::Regex;
use std::path::Path;
use std::path::PathBuf;
use std::sync::atomic;
use std::sync::atomic::AtomicBool;
use std::collections::HashSet;
use crate::node::build_node;
use std::fs::DirEntry;
use crate::node::FileTime;
use crate::platform::get_metadata;
#[derive(Debug)]
pub enum Operator {
Equal = 0,
LessThan = 1,
GreaterThan = 2,
}
pub struct WalkData<'a> {
pub struct WalkData {
pub ignore_directories: HashSet<PathBuf>,
pub filter_regex: &'a [Regex],
pub invert_filter_regex: &'a [Regex],
pub filter_regex: Option<Regex>,
pub invert_filter_regex: Option<Regex>,
pub allowed_filesystems: HashSet<u64>,
pub filter_modified_time: Option<(Operator, i64)>,
pub filter_accessed_time: Option<(Operator, i64)>,
pub filter_changed_time: Option<(Operator, i64)>,
pub use_apparent_size: bool,
pub by_filecount: bool,
pub by_filetime: &'a Option<FileTime>,
pub ignore_hidden: bool,
pub follow_links: bool,
pub progress_data: Arc<PAtomicInfo>,
pub errors: Arc<Mutex<RuntimeErrors>>,
}
pub fn walk_it(dirs: HashSet<PathBuf>, walk_data: &WalkData) -> Vec<Node> {
let mut inodes = HashSet::new();
pub fn walk_it(dirs: HashSet<PathBuf>, walk_data: WalkData) -> (Vec<Node>, bool) {
let permissions_flag = AtomicBool::new(false);
let top_level_nodes: Vec<_> = dirs
.into_iter()
.filter_map(|d| {
let prog_data = &walk_data.progress_data;
prog_data.clear_state(&d);
let node = walk(d, walk_data, 0)?;
prog_data.state.store(Operation::PREPARING, ORDERING);
clean_inodes(node, &mut inodes, walk_data)
let n = walk(d, &permissions_flag, &walk_data);
match n {
Some(n) => {
let mut inodes: HashSet<(u64, u64)> = HashSet::new();
clean_inodes(n, &mut inodes, walk_data.use_apparent_size)
}
None => None,
}
})
.collect();
top_level_nodes
(top_level_nodes, permissions_flag.into_inner())
}
// Remove files which have the same inode, we don't want to double count them.
fn clean_inodes(x: Node, inodes: &mut HashSet<(u64, u64)>, walk_data: &WalkData) -> Option<Node> {
if !walk_data.use_apparent_size {
fn clean_inodes(
x: Node,
inodes: &mut HashSet<(u64, u64)>,
use_apparent_size: bool,
) -> Option<Node> {
if !use_apparent_size {
if let Some(id) = x.inode_device {
if !inodes.insert(id) {
if inodes.contains(&id) {
return None;
}
inodes.insert(id);
}
}
// Sort Nodes so iteration order is predictable
let mut tmp: Vec<_> = x.children;
tmp.sort_by(sort_by_inode);
let new_children: Vec<_> = tmp
let new_children: Vec<_> = x
.children
.into_iter()
.filter_map(|c| clean_inodes(c, inodes, walk_data))
.filter_map(|c| clean_inodes(c, inodes, use_apparent_size))
.collect();
let actual_size = if walk_data.by_filetime.is_some() {
// If by_filetime is Some, directory 'size' is the maximum filetime among child files instead of disk size
new_children
.iter()
.map(|c| c.size)
.chain(std::iter::once(x.size))
.max()
.unwrap_or(0)
} else {
// If by_filetime is None, directory 'size' is the sum of disk sizes or file counts of child files
x.size + new_children.iter().map(|c| c.size).sum::<u64>()
};
Some(Node {
return Some(Node {
name: x.name,
size: actual_size,
size: x.size + new_children.iter().map(|c| c.size).sum::<u64>(),
children: new_children,
inode_device: x.inode_device,
depth: x.depth,
})
}
fn sort_by_inode(a: &Node, b: &Node) -> std::cmp::Ordering {
// Sorting by inode is quicker than by sorting by name/size
match (a.inode_device, b.inode_device) {
(Some(x), Some(y)) => {
if x.0 != y.0 {
x.0.cmp(&y.0)
} else if x.1 != y.1 {
x.1.cmp(&y.1)
} else {
a.name.cmp(&b.name)
}
}
(Some(_), None) => Ordering::Greater,
(None, Some(_)) => Ordering::Less,
(None, None) => a.name.cmp(&b.name),
}
}
// Check if `path` is inside ignored directory
fn is_ignored_path(path: &Path, walk_data: &WalkData) -> bool {
if walk_data.ignore_directories.contains(path) {
return true;
}
// Entry is inside an ignored absolute path
// Absolute paths should be canonicalized before being added to `WalkData.ignore_directories`
for ignored_path in walk_data.ignore_directories.iter() {
if !ignored_path.is_absolute() {
continue;
}
let absolute_entry_path = std::fs::canonicalize(path).unwrap_or_default();
if absolute_entry_path.starts_with(ignored_path) {
return true;
}
}
false
});
}
fn ignore_file(entry: &DirEntry, walk_data: &WalkData) -> bool {
if is_ignored_path(&entry.path(), walk_data) {
return true;
}
let is_dot_file = entry.file_name().to_str().unwrap_or("").starts_with('.');
let follow_links = walk_data.follow_links && entry.file_type().is_ok_and(|ft| ft.is_symlink());
let is_ignored_path = walk_data.ignore_directories.contains(&entry.path());
if !walk_data.allowed_filesystems.is_empty() {
let size_inode_device = get_metadata(entry.path(), false, follow_links);
if let Some((_size, Some((_id, dev)), _gunk)) = size_inode_device {
let size_inode_device = get_metadata(&entry.path(), false);
if let Some((_size, Some((_id, dev)))) = size_inode_device {
if !walk_data.allowed_filesystems.contains(&dev) {
return true;
}
}
}
if walk_data.filter_accessed_time.is_some()
|| walk_data.filter_modified_time.is_some()
|| walk_data.filter_changed_time.is_some()
{
let size_inode_device = get_metadata(entry.path(), false, follow_links);
if let Some((_, _, (modified_time, accessed_time, changed_time))) = size_inode_device {
if entry.path().is_file()
&& [
(&walk_data.filter_modified_time, modified_time),
(&walk_data.filter_accessed_time, accessed_time),
(&walk_data.filter_changed_time, changed_time),
]
.iter()
.any(|(filter_time, actual_time)| {
is_filtered_out_due_to_file_time(filter_time, *actual_time)
})
{
return true;
}
}
}
// Keeping `walk_data.filter_regex.is_empty()` is important for performance reasons, it stops unnecessary work
if !walk_data.filter_regex.is_empty()
// Keeping `walk_data.filter_regex.is_some()` is important for performance reasons, it stops unnecessary work
if walk_data.filter_regex.is_some()
&& entry.path().is_file()
&& is_filtered_out_due_to_regex(walk_data.filter_regex, &entry.path())
&& is_filtered_out_due_to_regex(&walk_data.filter_regex, &entry.path())
{
return true;
}
if !walk_data.invert_filter_regex.is_empty()
if walk_data.invert_filter_regex.is_some()
&& entry.path().is_file()
&& is_filtered_out_due_to_invert_regex(walk_data.invert_filter_regex, &entry.path())
&& is_filtered_out_due_to_invert_regex(&walk_data.invert_filter_regex, &entry.path())
{
return true;
}
is_dot_file && walk_data.ignore_hidden
(is_dot_file && walk_data.ignore_hidden) || is_ignored_path
}
fn walk(dir: PathBuf, walk_data: &WalkData, depth: usize) -> Option<Node> {
let prog_data = &walk_data.progress_data;
let errors = &walk_data.errors;
fn walk(dir: PathBuf, permissions_flag: &AtomicBool, walk_data: &WalkData) -> Option<Node> {
let mut children = vec![];
let children = if dir.is_dir() {
let read_dir = fs::read_dir(&dir);
match read_dir {
Ok(entries) => {
entries
.into_iter()
.par_bridge()
.filter_map(|entry| {
match entry {
Ok(ref entry) => {
// uncommenting the below line gives simpler code but
// rayon doesn't parallelize as well giving a 3X performance drop
// hence we unravel the recursion a bit
if let Ok(entries) = fs::read_dir(dir.clone()) {
children = entries
.into_iter()
.par_bridge()
.filter_map(|entry| {
if let Ok(ref entry) = entry {
// uncommenting the below line gives simpler code but
// rayon doesn't parallelise as well giving a 3X performance drop
// hence we unravel the recursion a bit
// return walk(entry.path(), walk_data, depth)
// return walk(entry.path(), permissions_flag, ignore_directories, allowed_filesystems, use_apparent_size, by_filecount, ignore_hidden);
if !ignore_file(entry, walk_data) {
if let Ok(data) = entry.file_type() {
if data.is_dir()
|| (walk_data.follow_links && data.is_symlink())
{
return walk(entry.path(), walk_data, depth + 1);
}
let node = build_node(
entry.path(),
vec![],
data.is_symlink(),
data.is_file(),
depth,
walk_data,
);
prog_data.num_files.fetch_add(1, ORDERING);
if let Some(ref file) = node {
prog_data
.total_file_size
.fetch_add(file.size, ORDERING);
}
return node;
}
}
}
Err(ref failed) => {
if handle_error_and_retry(failed, &dir, walk_data) {
return walk(dir.clone(), walk_data, depth);
}
if !ignore_file(entry, walk_data) {
if let Ok(data) = entry.file_type() {
if data.is_dir() && !data.is_symlink() {
return walk(entry.path(), permissions_flag, walk_data);
}
return build_node(
entry.path(),
vec![],
&walk_data.filter_regex,
&walk_data.invert_filter_regex,
walk_data.use_apparent_size,
data.is_symlink(),
data.is_file(),
walk_data.by_filecount,
);
}
None
})
.collect()
}
Err(failed) => {
if handle_error_and_retry(&failed, &dir, walk_data) {
return walk(dir, walk_data, depth);
}
} else {
vec![]
permissions_flag.store(true, atomic::Ordering::Relaxed);
}
}
}
None
})
.collect();
} else {
if !dir.is_file() {
let mut editable_error = errors.lock().unwrap();
let bad_file = dir.as_os_str().to_string_lossy().into();
editable_error.file_not_found.insert(bad_file);
}
vec![]
};
let is_symlink = if walk_data.follow_links {
match fs::symlink_metadata(&dir) {
Ok(metadata) => metadata.file_type().is_symlink(),
Err(_) => false,
}
} else {
false
};
build_node(dir, children, is_symlink, false, depth, walk_data)
}
fn handle_error_and_retry(failed: &Error, dir: &Path, walk_data: &WalkData) -> bool {
let mut editable_error = walk_data.errors.lock().unwrap();
match failed.kind() {
std::io::ErrorKind::PermissionDenied => {
editable_error
.no_permissions
.insert(dir.to_string_lossy().into());
}
std::io::ErrorKind::InvalidInput => {
editable_error
.no_permissions
.insert(dir.to_string_lossy().into());
}
std::io::ErrorKind::NotFound => {
editable_error.file_not_found.insert(failed.to_string());
}
std::io::ErrorKind::Interrupted => {
editable_error.interrupted_error += 1;
// This does happen on some systems. It was set to 3 but sometimes dust runs would exceed this
// However, if there is no limit this results in infinite retrys and dust never finishes
if editable_error.interrupted_error > 999 {
panic!("Multiple Interrupted Errors occurred while scanning filesystem. Aborting");
} else {
return true;
}
}
_ => {
editable_error.unknown_error.insert(failed.to_string());
}
permissions_flag.store(true, atomic::Ordering::Relaxed);
}
false
build_node(
dir,
children,
&walk_data.filter_regex,
&walk_data.invert_filter_regex,
walk_data.use_apparent_size,
false,
false,
walk_data.by_filecount,
)
}
mod tests {
#[allow(unused_imports)]
use super::*;
@@ -332,101 +172,28 @@ mod tests {
size: 10,
children: vec![],
inode_device: Some((5, 6)),
depth: 0,
}
}
#[cfg(test)]
fn create_walker<'a>(use_apparent_size: bool) -> WalkData<'a> {
use crate::PIndicator;
let indicator = PIndicator::build_me();
WalkData {
ignore_directories: HashSet::new(),
filter_regex: &[],
invert_filter_regex: &[],
allowed_filesystems: HashSet::new(),
filter_modified_time: Some((Operator::GreaterThan, 0)),
filter_accessed_time: Some((Operator::GreaterThan, 0)),
filter_changed_time: Some((Operator::GreaterThan, 0)),
use_apparent_size,
by_filecount: false,
by_filetime: &None,
ignore_hidden: false,
follow_links: false,
progress_data: indicator.data.clone(),
errors: Arc::new(Mutex::new(RuntimeErrors::default())),
}
}
#[test]
#[allow(clippy::redundant_clone)]
fn test_should_ignore_file() {
let mut inodes = HashSet::new();
let n = create_node();
let walkdata = create_walker(false);
// First time we insert the node
assert_eq!(
clean_inodes(n.clone(), &mut inodes, &walkdata),
Some(n.clone())
);
assert!(clean_inodes(n.clone(), &mut inodes, false) == Some(n.clone()));
// Second time is a duplicate - we ignore it
assert_eq!(clean_inodes(n.clone(), &mut inodes, &walkdata), None);
assert!(clean_inodes(n.clone(), &mut inodes, false) == None);
}
#[test]
#[allow(clippy::redundant_clone)]
fn test_should_not_ignore_files_if_using_apparent_size() {
let mut inodes = HashSet::new();
let n = create_node();
let walkdata = create_walker(true);
// If using apparent size we include Nodes, even if duplicate inodes
assert_eq!(
clean_inodes(n.clone(), &mut inodes, &walkdata),
Some(n.clone())
);
assert_eq!(
clean_inodes(n.clone(), &mut inodes, &walkdata),
Some(n.clone())
);
}
#[test]
fn test_total_ordering_of_sort_by_inode() {
use std::str::FromStr;
let a = Node {
name: PathBuf::from_str("a").unwrap(),
size: 0,
children: vec![],
inode_device: Some((3, 66310)),
depth: 0,
};
let b = Node {
name: PathBuf::from_str("b").unwrap(),
size: 0,
children: vec![],
inode_device: None,
depth: 0,
};
let c = Node {
name: PathBuf::from_str("c").unwrap(),
size: 0,
children: vec![],
inode_device: Some((1, 66310)),
depth: 0,
};
assert_eq!(sort_by_inode(&a, &b), Ordering::Greater);
assert_eq!(sort_by_inode(&a, &c), Ordering::Greater);
assert_eq!(sort_by_inode(&c, &b), Ordering::Greater);
assert_eq!(sort_by_inode(&b, &a), Ordering::Less);
assert_eq!(sort_by_inode(&c, &a), Ordering::Less);
assert_eq!(sort_by_inode(&b, &c), Ordering::Less);
assert!(clean_inodes(n.clone(), &mut inodes, true) == Some(n.clone()));
assert!(clean_inodes(n.clone(), &mut inodes, true) == Some(n.clone()));
}
}

View File

@@ -1,38 +1,29 @@
use crate::display_node::DisplayNode;
use crate::node::FileTime;
extern crate ansi_term;
use ansi_term::Colour::Red;
use crate::display_node::DisplayNode;
use self::ansi_term::Colour::Red;
use lscolors::{LsColors, Style};
use unicode_width::UnicodeWidthStr;
use stfu8::encode_u8;
use chrono::{DateTime, Local, TimeZone, Utc};
use std::cmp::max;
use std::cmp::min;
use std::fs;
use std::iter::repeat_n;
use std::iter::repeat;
use std::path::Path;
use thousands::Separable;
pub static UNITS: [char; 5] = ['P', 'T', 'G', 'M', 'K'];
static UNITS: [char; 4] = ['T', 'G', 'M', 'K'];
static BLOCKS: [char; 5] = ['█', '▓', '▒', '░', ' '];
const FILETIME_SHOW_LENGTH: usize = 19;
pub struct InitialDisplayData {
pub struct DisplayData {
pub short_paths: bool,
pub is_reversed: bool,
pub colors_on: bool,
pub by_filecount: bool,
pub by_filetime: Option<FileTime>,
pub is_screen_reader: bool,
pub output_format: String,
pub bars_on_right: bool,
}
pub struct DisplayData {
pub initial: InitialDisplayData,
pub num_chars_needed_on_left_most: usize,
pub base_size: u64,
pub longest_string_length: usize,
@@ -41,7 +32,7 @@ pub struct DisplayData {
impl DisplayData {
fn get_tree_chars(&self, was_i_last: bool, has_children: bool) -> &'static str {
match (self.initial.is_reversed, was_i_last, has_children) {
match (self.is_reversed, was_i_last, has_children) {
(true, true, true) => "┌─┴",
(true, true, false) => "┌──",
(true, false, true) => "├─┴",
@@ -54,7 +45,7 @@ impl DisplayData {
}
fn is_biggest(&self, num_siblings: usize, max_siblings: u64) -> bool {
if self.initial.is_reversed {
if self.is_reversed {
num_siblings == (max_siblings - 1) as usize
} else {
num_siblings == 0
@@ -62,7 +53,7 @@ impl DisplayData {
}
fn is_last(&self, num_siblings: usize, max_siblings: u64) -> bool {
if self.initial.is_reversed {
if self.is_reversed {
num_siblings == 0
} else {
num_siblings == (max_siblings - 1) as usize
@@ -71,7 +62,11 @@ impl DisplayData {
fn percent_size(&self, node: &DisplayNode) -> f32 {
let result = node.size as f32 / self.base_size as f32;
if result.is_normal() { result } else { 0.0 }
if result.is_normal() {
result
} else {
0.0
}
}
}
@@ -89,23 +84,14 @@ impl DrawData<'_> {
// TODO: can we test this?
fn generate_bar(&self, node: &DisplayNode, level: usize) -> String {
if self.display_data.initial.is_screen_reader {
return level.to_string();
}
let chars_in_bar = self.percent_bar.chars().count();
let num_bars = chars_in_bar as f32 * self.display_data.percent_size(node);
let mut num_not_my_bar = (chars_in_bar as i32) - num_bars as i32;
let mut new_bar = "".to_string();
let idx = 5 - level.clamp(1, 4);
let idx = 5 - min(4, max(1, level));
let itr: Box<dyn Iterator<Item = char>> = if self.display_data.initial.bars_on_right {
Box::new(self.percent_bar.chars())
} else {
Box::new(self.percent_bar.chars().rev())
};
for c in itr {
for c in self.percent_bar.chars() {
num_not_my_bar -= 1;
if num_not_my_bar <= 0 {
new_bar.push(BLOCKS[0]);
@@ -115,50 +101,50 @@ impl DrawData<'_> {
new_bar.push(c);
}
}
if self.display_data.initial.bars_on_right {
new_bar
} else {
new_bar.chars().rev().collect()
}
new_bar
}
}
#[allow(clippy::too_many_arguments)]
pub fn draw_it(
idd: InitialDisplayData,
root_node: &DisplayNode,
no_percent_bars: bool,
use_full_path: bool,
is_reversed: bool,
no_colors: bool,
no_percents: bool,
terminal_width: usize,
skip_total: bool,
by_filecount: bool,
option_root_node: Option<DisplayNode>,
) {
let num_chars_needed_on_left_most = if idd.by_filecount {
if option_root_node.is_none() {
return;
}
let root_node = option_root_node.unwrap();
let num_chars_needed_on_left_most = if by_filecount {
let max_size = root_node.size;
max_size.separate_with_commas().chars().count()
} else if idd.by_filetime.is_some() {
FILETIME_SHOW_LENGTH
} else {
find_biggest_size_str(root_node, &idd.output_format)
5 // Under normal usage we need 5 chars to display the size of a directory
};
assert!(
terminal_width > num_chars_needed_on_left_most + 2,
"Not enough terminal width"
);
let allowed_width = terminal_width - num_chars_needed_on_left_most - 2;
let terminal_width = terminal_width - 9 - num_chars_needed_on_left_most;
let num_indent_chars = 3;
let longest_string_length =
find_longest_dir_name(root_node, num_indent_chars, allowed_width, &idd);
find_longest_dir_name(&root_node, num_indent_chars, terminal_width, !use_full_path);
let max_bar_length = if no_percent_bars || longest_string_length + 7 >= allowed_width {
let max_bar_length = if no_percents || longest_string_length >= terminal_width as usize {
0
} else {
allowed_width - longest_string_length - 7
terminal_width as usize - longest_string_length
};
let first_size_bar = repeat_n(BLOCKS[0], max_bar_length).collect();
let first_size_bar = repeat(BLOCKS[0]).take(max_bar_length).collect::<String>();
let display_data = DisplayData {
initial: idd,
short_paths: !use_full_path,
is_reversed,
colors_on: !no_colors,
by_filecount,
num_chars_needed_on_left_most,
base_size: root_node.size,
longest_string_length,
@@ -169,69 +155,48 @@ pub fn draw_it(
percent_bar: first_size_bar,
display_data: &display_data,
};
if !skip_total {
display_node(root_node, &draw_data, true, true);
} else {
for (count, c) in root_node
.get_children_from_node(draw_data.display_data.initial.is_reversed)
.enumerate()
{
let is_biggest = display_data.is_biggest(count, root_node.num_siblings());
let was_i_last = display_data.is_last(count, root_node.num_siblings());
display_node(c, &draw_data, is_biggest, was_i_last);
}
}
}
fn find_biggest_size_str(node: &DisplayNode, output_format: &str) -> usize {
let mut mx = human_readable_number(node.size, output_format)
.chars()
.count();
for n in node.children.iter() {
mx = max(mx, find_biggest_size_str(n, output_format));
}
mx
display_node(root_node, &draw_data, true, true);
}
fn find_longest_dir_name(
node: &DisplayNode,
indent: usize,
terminal: usize,
idd: &InitialDisplayData,
long_paths: bool,
) -> usize {
let printable_name = get_printable_name(&node.name, idd.short_paths);
let longest = if idd.is_screen_reader {
UnicodeWidthStr::width(&*printable_name) + 1
} else {
min(
UnicodeWidthStr::width(&*printable_name) + 1 + indent,
terminal,
)
};
let printable_name = get_printable_name(&node.name, long_paths);
let longest = min(
UnicodeWidthStr::width(&*printable_name) + 1 + indent,
terminal,
);
// each none root tree drawing is 2 more chars, hence we increment indent by 2
node.children
.iter()
.map(|c| find_longest_dir_name(c, indent + 2, terminal, idd))
.map(|c| find_longest_dir_name(c, indent + 2, terminal, long_paths))
.fold(longest, max)
}
fn display_node(node: &DisplayNode, draw_data: &DrawData, is_biggest: bool, is_last: bool) {
fn display_node(node: DisplayNode, draw_data: &DrawData, is_biggest: bool, is_last: bool) {
// hacky way of working out how deep we are in the tree
let indent = draw_data.get_new_indent(!node.children.is_empty(), is_last);
let level = ((indent.chars().count() - 1) / 2) - 1;
let bar_text = draw_data.generate_bar(node, level);
let bar_text = draw_data.generate_bar(&node, level);
let to_print = format_string(node, &indent, &bar_text, is_biggest, draw_data.display_data);
let to_print = format_string(
&node,
&*indent,
&*bar_text,
is_biggest,
draw_data.display_data,
);
if !draw_data.display_data.initial.is_reversed {
println!("{to_print}")
if !draw_data.display_data.is_reversed {
println!("{}", to_print)
}
let dd = DrawData {
indent: clean_indentation_string(&indent),
indent: clean_indentation_string(&*indent),
percent_bar: bar_text,
display_data: draw_data.display_data,
};
@@ -239,7 +204,7 @@ fn display_node(node: &DisplayNode, draw_data: &DrawData, is_biggest: bool, is_l
let num_siblings = node.num_siblings();
for (count, c) in node
.get_children_from_node(draw_data.display_data.initial.is_reversed)
.get_children_from_node(draw_data.display_data.is_reversed)
.enumerate()
{
let is_biggest = dd.display_data.is_biggest(count, num_siblings);
@@ -247,8 +212,8 @@ fn display_node(node: &DisplayNode, draw_data: &DrawData, is_biggest: bool, is_l
display_node(c, &dd, is_biggest, was_i_last);
}
if draw_data.display_data.initial.is_reversed {
println!("{to_print}")
if draw_data.display_data.is_reversed {
println!("{}", to_print)
}
}
@@ -269,10 +234,10 @@ fn clean_indentation_string(s: &str) -> String {
is
}
pub fn get_printable_name<P: AsRef<Path>>(dir_name: &P, short_paths: bool) -> String {
fn get_printable_name<P: AsRef<Path>>(dir_name: &P, long_paths: bool) -> String {
let dir_name = dir_name.as_ref();
let printable_name = {
if short_paths {
if long_paths {
match dir_name.parent() {
Some(prefix) => match dir_name.strip_prefix(prefix) {
Ok(base) => base,
@@ -288,31 +253,27 @@ pub fn get_printable_name<P: AsRef<Path>>(dir_name: &P, short_paths: bool) -> St
}
fn pad_or_trim_filename(node: &DisplayNode, indent: &str, display_data: &DisplayData) -> String {
let name = get_printable_name(&node.name, display_data.initial.short_paths);
let indent_and_name = format!("{indent} {name}");
let name = get_printable_name(&node.name, display_data.short_paths);
let indent_and_name = format!("{} {}", indent, name);
let width = UnicodeWidthStr::width(&*indent_and_name);
assert!(
display_data.longest_string_length >= width,
"Terminal width not wide enough to draw directory tree"
);
assert!(display_data.longest_string_length >= width);
// Add spaces after the filename so we can draw the % used bar chart.
name + " "
.repeat(display_data.longest_string_length - width)
.as_str()
let name_and_padding = name
+ " "
.repeat(display_data.longest_string_length - width)
.as_str();
maybe_trim_filename(name_and_padding, display_data)
}
fn maybe_trim_filename(name_in: String, indent: &str, display_data: &DisplayData) -> String {
let indent_length = UnicodeWidthStr::width(indent);
assert!(
display_data.longest_string_length >= indent_length + 2,
"Terminal width not wide enough to draw directory tree"
);
let max_size = display_data.longest_string_length - indent_length;
if UnicodeWidthStr::width(&*name_in) > max_size {
let name = name_in.chars().take(max_size - 2).collect::<String>();
fn maybe_trim_filename(name_in: String, display_data: &DisplayData) -> String {
if UnicodeWidthStr::width(&*name_in) > display_data.longest_string_length {
let name = name_in
.chars()
.take(display_data.longest_string_length - 2)
.collect::<String>();
name + ".."
} else {
name_in
@@ -322,22 +283,14 @@ fn maybe_trim_filename(name_in: String, indent: &str, display_data: &DisplayData
pub fn format_string(
node: &DisplayNode,
indent: &str,
bars: &str,
percent_bar: &str,
is_biggest: bool,
display_data: &DisplayData,
) -> String {
let (percent, name_and_padding) = get_name_percent(node, indent, bars, display_data);
let (percents, name_and_padding) = get_name_percent(node, indent, percent_bar, display_data);
let pretty_size = get_pretty_size(node, is_biggest, display_data);
let pretty_name = get_pretty_name(node, name_and_padding, display_data);
// we can clean this and the method below somehow, not sure yet
if display_data.initial.is_screen_reader {
// if screen_reader then bars is 'depth'
format!("{pretty_name} {bars} {pretty_size}{percent}")
} else if display_data.initial.by_filetime.is_some() {
format!("{pretty_size} {indent}{pretty_name}")
} else {
format!("{pretty_size} {indent} {pretty_name}{percent}")
}
format!("{} {} {}{}", pretty_size, indent, pretty_name, percents)
}
fn get_name_percent(
@@ -346,117 +299,66 @@ fn get_name_percent(
bar_chart: &str,
display_data: &DisplayData,
) -> (String, String) {
if display_data.initial.is_screen_reader {
let percent = display_data.percent_size(node) * 100.0;
let percent_size_str = format!("{percent:.0}%");
let percents = format!(" {percent_size_str:>4}",);
let name = pad_or_trim_filename(node, "", display_data);
(percents, name)
// Bar chart being empty may come from either config or the screen not being wide enough
} else if !bar_chart.is_empty() {
let percent = display_data.percent_size(node) * 100.0;
let percent_size_str = format!("{percent:.0}%");
let percents = format!("{bar_chart}{percent_size_str:>4}");
if !bar_chart.is_empty() {
let percent_size_str = format!("{:.0}%", display_data.percent_size(node) * 100.0);
let percents = format!("{}{:>4}", bar_chart, percent_size_str);
let name_and_padding = pad_or_trim_filename(node, indent, display_data);
(percents, name_and_padding)
} else {
let n = get_printable_name(&node.name, display_data.initial.short_paths);
let name = maybe_trim_filename(n, indent, display_data);
let n = get_printable_name(&node.name, display_data.short_paths);
let name = maybe_trim_filename(n, display_data);
("".into(), name)
}
}
fn get_pretty_size(node: &DisplayNode, is_biggest: bool, display_data: &DisplayData) -> String {
let output = if display_data.initial.by_filecount {
node.size.separate_with_commas()
} else if display_data.initial.by_filetime.is_some() {
get_pretty_file_modified_time(node.size as i64)
let output = if display_data.by_filecount {
let size_as_str = node.size.separate_with_commas();
let spaces_to_add =
display_data.num_chars_needed_on_left_most - size_as_str.chars().count();
size_as_str + " ".repeat(spaces_to_add).as_str()
} else {
human_readable_number(node.size, &display_data.initial.output_format)
format!("{:>5}", human_readable_number(node.size))
};
let spaces_to_add = display_data.num_chars_needed_on_left_most - output.chars().count();
let output = " ".repeat(spaces_to_add) + output.as_str();
if is_biggest && display_data.initial.colors_on {
if is_biggest && display_data.colors_on {
format!("{}", Red.paint(output))
} else {
output
}
}
fn get_pretty_file_modified_time(timestamp: i64) -> String {
let datetime: DateTime<Utc> = Utc.timestamp_opt(timestamp, 0).unwrap();
let local_datetime = datetime.with_timezone(&Local);
local_datetime.format("%Y-%m-%dT%H:%M:%S").to_string()
}
fn get_pretty_name(
node: &DisplayNode,
name_and_padding: String,
display_data: &DisplayData,
) -> String {
if display_data.initial.colors_on {
let meta_result = fs::metadata(&node.name);
if display_data.colors_on {
let meta_result = fs::metadata(node.name.clone());
let directory_color = display_data
.ls_colors
.style_for_path_with_metadata(&node.name, meta_result.as_ref().ok());
.style_for_path_with_metadata(node.name.clone(), meta_result.as_ref().ok());
let ansi_style = directory_color
.map(Style::to_ansi_term_style)
.unwrap_or_default();
let out = ansi_style.paint(name_and_padding);
format!("{out}")
format!("{}", ansi_style.paint(name_and_padding))
} else {
name_and_padding
}
}
// If we are working with SI units or not
pub fn get_type_of_thousand(output_str: &str) -> u64 {
if output_str.is_empty() {
1024
} else if output_str == "si" {
1000
} else if output_str.contains('i') || output_str.len() == 1 {
1024
} else {
1000
}
}
pub fn get_number_format(output_str: &str) -> Option<(u64, char)> {
if output_str.starts_with('b') {
return Some((1, 'B'));
}
fn human_readable_number(size: u64) -> String {
for (i, u) in UNITS.iter().enumerate() {
if output_str.starts_with((*u).to_ascii_lowercase()) {
let marker = get_type_of_thousand(output_str).pow((UNITS.len() - i) as u32);
return Some((marker, *u));
}
}
None
}
pub fn human_readable_number(size: u64, output_str: &str) -> String {
match get_number_format(output_str) {
Some((x, u)) => {
format!("{}{}", (size / x), u)
}
None => {
for (i, u) in UNITS.iter().enumerate() {
let marker = get_type_of_thousand(output_str).pow((UNITS.len() - i) as u32);
if size >= marker {
if size / marker < 10 {
return format!("{:.1}{}", (size as f32 / marker as f32), u);
} else {
return format!("{}{}", (size / marker), u);
}
}
let marker = 1024u64.pow((UNITS.len() - i) as u32);
if size >= marker {
if size / marker < 10 {
return format!("{:.1}{}", (size as f32 / marker as f32), u);
} else {
return format!("{}{}", (size / marker), u);
}
format!("{size}B")
}
}
return format!("{}B", size);
}
mod tests {
@@ -467,20 +369,13 @@ mod tests {
#[cfg(test)]
fn get_fake_display_data(longest_string_length: usize) -> DisplayData {
let initial = InitialDisplayData {
DisplayData {
short_paths: true,
is_reversed: false,
colors_on: false,
by_filecount: false,
by_filetime: None,
is_screen_reader: false,
output_format: "".into(),
bars_on_right: false,
};
DisplayData {
initial,
num_chars_needed_on_left_most: 5,
base_size: 2_u64.pow(12), // 4.0K
base_size: 1,
longest_string_length,
ls_colors: LsColors::from_env().unwrap_or_default(),
}
@@ -496,9 +391,14 @@ mod tests {
let indent = "┌─┴";
let percent_bar = "";
let is_biggest = false;
let data = get_fake_display_data(20);
let s = format_string(&n, indent, percent_bar, is_biggest, &data);
let s = format_string(
&n,
indent,
percent_bar,
is_biggest,
&get_fake_display_data(6),
);
assert_eq!(s, " 4.0K ┌─┴ short");
}
@@ -514,160 +414,25 @@ mod tests {
let percent_bar = "";
let is_biggest = false;
let data = get_fake_display_data(64);
let s = format_string(&n, indent, percent_bar, is_biggest, &data);
let dd = get_fake_display_data(64);
let s = format_string(&n, indent, percent_bar, is_biggest, &dd);
assert_eq!(
s,
" 4.0K ┌─┴ very_long_name_longer_than_the_eighty_character_limit_very_.."
" 4.0K ┌─┴ very_long_name_longer_than_the_eighty_character_limit_very_lon.."
);
}
#[test]
fn test_format_str_screen_reader() {
let n = DisplayNode {
name: PathBuf::from("/short"),
size: 2_u64.pow(12), // This is 4.0K
children: vec![],
};
let indent = "";
let percent_bar = "3";
let is_biggest = false;
let mut data = get_fake_display_data(20);
data.initial.is_screen_reader = true;
let s = format_string(&n, indent, percent_bar, is_biggest, &data);
assert_eq!(s, "short 3 4.0K 100%");
}
#[test]
fn test_human_readable_number() {
assert_eq!(human_readable_number(1, ""), "1B");
assert_eq!(human_readable_number(956, ""), "956B");
assert_eq!(human_readable_number(1004, ""), "1004B");
assert_eq!(human_readable_number(1024, ""), "1.0K");
assert_eq!(human_readable_number(1536, ""), "1.5K");
assert_eq!(human_readable_number(1024 * 512, ""), "512K");
assert_eq!(human_readable_number(1024 * 1024, ""), "1.0M");
assert_eq!(human_readable_number(1024 * 1024 * 1024 - 1, ""), "1023M");
assert_eq!(human_readable_number(1024 * 1024 * 1024 * 20, ""), "20G");
assert_eq!(human_readable_number(1024 * 1024 * 1024 * 1024, ""), "1.0T");
assert_eq!(
human_readable_number(1024 * 1024 * 1024 * 1024 * 234, ""),
"234T"
);
assert_eq!(
human_readable_number(1024 * 1024 * 1024 * 1024 * 1024, ""),
"1.0P"
);
}
#[test]
fn test_human_readable_number_si() {
assert_eq!(human_readable_number(1024 * 100, ""), "100K");
assert_eq!(human_readable_number(1024 * 100, "si"), "102K");
}
// Refer to https://en.wikipedia.org/wiki/Byte#Multiple-byte_units
#[test]
fn test_human_readable_number_kb() {
let hrn = human_readable_number;
assert_eq!(hrn(1023, "b"), "1023B");
assert_eq!(hrn(1000 * 1000, "bytes"), "1000000B");
assert_eq!(hrn(1023, "kb"), "1K");
assert_eq!(hrn(1023, "k"), "0K");
assert_eq!(hrn(1023, "kib"), "0K");
assert_eq!(hrn(1024, "kib"), "1K");
assert_eq!(hrn(1024 * 512, "kib"), "512K");
assert_eq!(hrn(1024 * 1024, "kib"), "1024K");
assert_eq!(hrn(1024 * 1000 * 1000 * 20, "kib"), "20000000K");
assert_eq!(hrn(1024 * 1024 * 1000 * 20, "mib"), "20000M");
assert_eq!(hrn(1024 * 1024 * 1024 * 20, "gib"), "20G");
}
#[cfg(test)]
fn build_draw_data(disp: &DisplayData, size: u32) -> (DrawData<'_>, DisplayNode) {
let n = DisplayNode {
name: PathBuf::from("/short"),
size: 2_u64.pow(size),
children: vec![],
};
let first_size_bar = repeat_n(BLOCKS[0], 13).collect();
let dd = DrawData {
indent: "".into(),
percent_bar: first_size_bar,
display_data: disp,
};
(dd, n)
}
#[test]
fn test_draw_data() {
let disp = &get_fake_display_data(20);
let (dd, n) = build_draw_data(disp, 12);
let bar = dd.generate_bar(&n, 1);
assert_eq!(bar, "█████████████");
}
#[test]
fn test_draw_data2() {
let disp = &get_fake_display_data(20);
let (dd, n) = build_draw_data(disp, 11);
let bar = dd.generate_bar(&n, 2);
assert_eq!(bar, "███████░░░░░░");
}
#[test]
fn test_draw_data3() {
let mut disp = get_fake_display_data(20);
let (dd, n) = build_draw_data(&disp, 11);
let bar = dd.generate_bar(&n, 3);
assert_eq!(bar, "███████▒▒▒▒▒▒");
disp.initial.bars_on_right = true;
let (dd, n) = build_draw_data(&disp, 11);
let bar = dd.generate_bar(&n, 3);
assert_eq!(bar, "▒▒▒▒▒▒███████")
}
#[test]
fn test_draw_data4() {
let disp = &get_fake_display_data(20);
let (dd, n) = build_draw_data(disp, 10);
// After 4 we have no more levels of shading so 4+ is the same
let bar = dd.generate_bar(&n, 4);
assert_eq!(bar, "████▓▓▓▓▓▓▓▓▓");
let bar = dd.generate_bar(&n, 5);
assert_eq!(bar, "████▓▓▓▓▓▓▓▓▓");
}
#[test]
fn test_get_pretty_file_modified_time() {
// Create a timestamp for 2023-07-12 00:00:00 in local time
let local_dt = Local.with_ymd_and_hms(2023, 7, 12, 0, 0, 0).unwrap();
let timestamp = local_dt.timestamp();
// Format expected output
let expected_output = local_dt.format("%Y-%m-%dT%H:%M:%S").to_string();
assert_eq!(get_pretty_file_modified_time(timestamp), expected_output);
// Test another timestamp
let local_dt = Local.with_ymd_and_hms(2020, 1, 1, 12, 0, 0).unwrap();
let timestamp = local_dt.timestamp();
let expected_output = local_dt.format("%Y-%m-%dT%H:%M:%S").to_string();
assert_eq!(get_pretty_file_modified_time(timestamp), expected_output);
// Test timestamp for epoch start (1970-01-01T00:00:00)
let local_dt = Local.with_ymd_and_hms(1970, 1, 1, 0, 0, 0).unwrap();
let timestamp = local_dt.timestamp();
let expected_output = local_dt.format("%Y-%m-%dT%H:%M:%S").to_string();
assert_eq!(get_pretty_file_modified_time(timestamp), expected_output);
// Test a future timestamp
let local_dt = Local.with_ymd_and_hms(2030, 12, 25, 6, 30, 0).unwrap();
let timestamp = local_dt.timestamp();
let expected_output = local_dt.format("%Y-%m-%dT%H:%M:%S").to_string();
assert_eq!(get_pretty_file_modified_time(timestamp), expected_output);
assert_eq!(human_readable_number(1), "1B");
assert_eq!(human_readable_number(956), "956B");
assert_eq!(human_readable_number(1004), "1004B");
assert_eq!(human_readable_number(1024), "1.0K");
assert_eq!(human_readable_number(1536), "1.5K");
assert_eq!(human_readable_number(1024 * 512), "512K");
assert_eq!(human_readable_number(1024 * 1024), "1.0M");
assert_eq!(human_readable_number(1024 * 1024 * 1024 - 1), "1023M");
assert_eq!(human_readable_number(1024 * 1024 * 1024 * 20), "20G");
assert_eq!(human_readable_number(1024 * 1024 * 1024 * 1024), "1.0T");
}
}

View File

@@ -1,58 +1,47 @@
use std::cell::RefCell;
use std::cmp::Ordering;
use std::path::PathBuf;
use serde::ser::SerializeStruct;
use serde::{Serialize, Serializer};
use crate::display::human_readable_number;
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)]
#[derive(Debug, Eq, Clone)]
pub struct DisplayNode {
// Note: the order of fields in important here, for PartialEq and PartialOrd
pub name: PathBuf, //todo: consider moving to a string?
pub size: u64,
pub name: PathBuf,
pub children: Vec<DisplayNode>,
}
impl Ord for DisplayNode {
fn cmp(&self, other: &Self) -> Ordering {
if self.size == other.size {
self.name.cmp(&other.name)
} else {
self.size.cmp(&other.size)
}
}
}
impl PartialOrd for DisplayNode {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl PartialEq for DisplayNode {
fn eq(&self, other: &Self) -> bool {
self.name == other.name && self.size == other.size && self.children == other.children
}
}
impl DisplayNode {
pub fn num_siblings(&self) -> u64 {
self.children.len() as u64
}
pub fn get_children_from_node(&self, is_reversed: bool) -> impl Iterator<Item = &DisplayNode> {
pub fn get_children_from_node(&self, is_reversed: bool) -> impl Iterator<Item = DisplayNode> {
// we box to avoid the clippy lint warning
let out: Box<dyn Iterator<Item = &DisplayNode>> = if is_reversed {
Box::new(self.children.iter().rev())
let out: Box<dyn Iterator<Item = DisplayNode>> = if is_reversed {
Box::new(self.children.clone().into_iter().rev())
} else {
Box::new(self.children.iter())
Box::new(self.children.clone().into_iter())
};
out
}
}
// Only used for -j 'json' flag combined with -o 'output_type' flag
// Used to pass the output_type into the custom Serde serializer
thread_local! {
pub static OUTPUT_TYPE: RefCell<String> = const { RefCell::new(String::new()) };
}
/*
We need the custom Serialize incase someone uses the -o flag to pass a custom output type in
(show size in Mb / Gb etc).
Sadly this also necessitates a global variable OUTPUT_TYPE as we can not pass the output_type flag
into the serialize method
*/
impl Serialize for DisplayNode {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let readable_size = OUTPUT_TYPE
.with(|output_type| human_readable_number(self.size, output_type.borrow().as_str()));
let mut state = serializer.serialize_struct("DisplayNode", 2)?;
state.serialize_field("size", &(readable_size))?;
state.serialize_field("name", &self.name)?;
state.serialize_field("children", &self.children)?;
state.end()
}
}

View File

@@ -1,221 +1,174 @@
use stfu8::encode_u8;
use crate::display::get_printable_name;
use crate::display_node::DisplayNode;
use crate::node::FileTime;
use crate::node::Node;
use std::collections::BinaryHeap;
use std::collections::HashMap;
use std::collections::HashSet;
use std::path::Path;
use std::path::PathBuf;
pub struct AggregateData {
pub min_size: Option<usize>,
pub only_dir: bool,
pub only_file: bool,
pub number_of_lines: usize,
pub depth: usize,
pub using_a_filter: bool,
pub short_paths: bool,
pub fn get_by_depth(top_level_nodes: Vec<Node>, n: usize) -> Option<DisplayNode> {
if top_level_nodes.is_empty() {
// perhaps change this, bring back Error object?
return None;
}
let root = get_new_root(top_level_nodes);
Some(build_by_depth(&root, n - 1))
}
pub fn get_biggest(
top_level_nodes: Vec<Node>,
display_data: AggregateData,
by_filetime: &Option<FileTime>,
keep_collapsed: HashSet<PathBuf>,
) -> DisplayNode {
n: usize,
using_a_filter: bool,
) -> Option<DisplayNode> {
if top_level_nodes.is_empty() {
// perhaps change this, bring back Error object?
return None;
}
let mut heap = BinaryHeap::new();
let number_top_level_nodes = top_level_nodes.len();
let root;
let root = get_new_root(top_level_nodes);
let mut allowed_nodes = HashSet::new();
if number_top_level_nodes == 0 {
root = total_node_builder(0, vec![])
} else if number_top_level_nodes > 1 {
let size = if by_filetime.is_some() {
top_level_nodes
.iter()
.map(|node| node.size)
.max()
.unwrap_or(0)
} else {
top_level_nodes.iter().map(|node| node.size).sum()
};
allowed_nodes.insert(&root.name);
heap = add_children(using_a_filter, &root, heap);
let nodes = handle_duplicate_top_level_names(top_level_nodes, display_data.short_paths);
root = total_node_builder(size, nodes);
heap = always_add_children(&display_data, &root, heap);
} else {
root = top_level_nodes.into_iter().next().unwrap();
heap = add_children(&display_data, &root, heap);
}
fill_remaining_lines(heap, &root, display_data, keep_collapsed)
}
fn total_node_builder(size: u64, children: Vec<Node>) -> Node {
Node {
name: PathBuf::from("(total)"),
size,
children,
inode_device: None,
depth: 0,
}
}
pub fn fill_remaining_lines<'a>(
mut heap: BinaryHeap<&'a Node>,
root: &'a Node,
display_data: AggregateData,
keep_collapsed: HashSet<PathBuf>,
) -> DisplayNode {
let mut allowed_nodes = HashMap::new();
while allowed_nodes.len() < display_data.number_of_lines {
for _ in number_top_level_nodes..n {
let line = heap.pop();
match line {
Some(line) => {
// If we are not doing only_file OR if we are doing
// only_file and it has no children (ie is a file not a dir)
if !display_data.only_file || line.children.is_empty() {
allowed_nodes.insert(line.name.as_path(), line);
}
if !keep_collapsed.contains(&line.name) {
heap = add_children(&display_data, line, heap);
}
allowed_nodes.insert(&line.name);
heap = add_children(using_a_filter, line, heap);
}
None => break,
}
}
recursive_rebuilder(&allowed_nodes, &root)
}
if display_data.only_file {
flat_rebuilder(allowed_nodes, root)
pub fn get_all_file_types(top_level_nodes: Vec<Node>, n: usize) -> Option<DisplayNode> {
let mut map: HashMap<String, DisplayNode> = HashMap::new();
build_by_all_file_types(top_level_nodes, &mut map);
let mut by_types: Vec<DisplayNode> = map.into_iter().map(|(_k, v)| v).collect();
by_types.sort();
by_types.reverse();
let displayed = if by_types.len() <= n {
by_types
} else {
recursive_rebuilder(&allowed_nodes, root)
}
let (displayed, rest) = by_types.split_at(if n > 1 { n - 1 } else { 1 });
let remaining = DisplayNode {
name: PathBuf::from("(others)"),
size: rest.iter().map(|a| a.size).sum(),
children: vec![],
};
let mut displayed = displayed.to_vec();
displayed.push(remaining);
displayed
};
let result = DisplayNode {
name: PathBuf::from("(total)"),
size: displayed.iter().map(|a| a.size).sum(),
children: displayed,
};
Some(result)
}
fn add_children<'a>(
display_data: &AggregateData,
file_or_folder: &'a Node,
heap: BinaryHeap<&'a Node>,
) -> BinaryHeap<&'a Node> {
if display_data.depth > file_or_folder.depth {
always_add_children(display_data, file_or_folder, heap)
} else {
heap
}
}
fn always_add_children<'a>(
display_data: &AggregateData,
file_or_folder: &'a Node,
using_a_filter: bool,
line: &'a Node,
mut heap: BinaryHeap<&'a Node>,
) -> BinaryHeap<&'a Node> {
heap.extend(
file_or_folder
.children
.iter()
.filter(|c| match display_data.min_size {
Some(ms) => c.size > ms as u64,
None => !display_data.using_a_filter || c.name.is_file() || c.size > 0,
})
.filter(|c| {
if display_data.only_dir {
c.name.is_dir()
} else {
true
}
}),
);
if using_a_filter {
line.children.iter().for_each(|c| {
if c.name.is_file() || c.size > 0 {
heap.push(c)
}
});
} else {
line.children.iter().for_each(|c| heap.push(c));
}
heap
}
// Finds children of current, if in allowed_nodes adds them as children to new DisplayNode
fn recursive_rebuilder(allowed_nodes: &HashMap<&Path, &Node>, current: &Node) -> DisplayNode {
let new_children: Vec<_> = current
.children
.iter()
.filter(|c| allowed_nodes.contains_key(c.name.as_path()))
.map(|c| recursive_rebuilder(allowed_nodes, c))
.collect();
build_display_node(new_children, current)
fn build_by_all_file_types(top_level_nodes: Vec<Node>, counter: &mut HashMap<String, DisplayNode>) {
for node in top_level_nodes {
if node.name.is_file() {
let ext = node.name.extension();
let key: String = match ext {
Some(e) => ".".to_string() + &e.to_string_lossy(),
None => "(no extension)".into(),
};
let mut display_node = counter.entry(key.clone()).or_insert(DisplayNode {
name: PathBuf::from(key),
size: 0,
children: vec![],
});
display_node.size += node.size;
}
build_by_all_file_types(node.children, counter)
}
}
// Applies all allowed nodes as children to current node
fn flat_rebuilder(allowed_nodes: HashMap<&Path, &Node>, current: &Node) -> DisplayNode {
let new_children: Vec<DisplayNode> = allowed_nodes
.into_values()
.map(|v| DisplayNode {
name: v.name.clone(),
size: v.size,
children: vec![],
})
.collect::<Vec<DisplayNode>>();
build_display_node(new_children, current)
}
fn build_by_depth(node: &Node, depth: usize) -> DisplayNode {
let new_children = {
if depth == 0 {
vec![]
} else {
let mut new_children: Vec<_> = node
.children
.iter()
.map(|c| build_by_depth(c, depth - 1))
.collect();
new_children.sort();
new_children.reverse();
new_children
}
};
fn build_display_node(mut new_children: Vec<DisplayNode>, current: &Node) -> DisplayNode {
new_children.sort_by(|lhs, rhs| lhs.cmp(rhs).reverse());
DisplayNode {
name: current.name.clone(),
size: current.size,
name: node.name.clone(),
size: node.size,
children: new_children,
}
}
fn names_have_dup(top_level_nodes: &Vec<Node>) -> bool {
let mut stored = HashSet::new();
for node in top_level_nodes {
let name = get_printable_name(&node.name, true);
if stored.contains(&name) {
return true;
fn get_new_root(top_level_nodes: Vec<Node>) -> Node {
if top_level_nodes.len() > 1 {
let total_size = top_level_nodes.iter().map(|node| node.size).sum();
Node {
name: PathBuf::from("(total)"),
size: total_size,
children: top_level_nodes,
inode_device: None,
}
stored.insert(name);
}
false
}
fn handle_duplicate_top_level_names(top_level_nodes: Vec<Node>, short_paths: bool) -> Vec<Node> {
// If we have top level names that are the same - we need to tweak them:
if short_paths && names_have_dup(&top_level_nodes) {
let mut new_top_nodes = top_level_nodes.clone();
let mut dir_walk_up_count = 0;
while names_have_dup(&new_top_nodes) && dir_walk_up_count < 10 {
dir_walk_up_count += 1;
let mut newer = vec![];
for node in new_top_nodes.iter() {
let mut folders = node.name.iter().rev();
// Get parent folder (if second time round get grandparent and so on)
for _ in 0..dir_walk_up_count {
folders.next();
}
match folders.next() {
// Add (parent_name) to path of Node
Some(data) => {
let parent = encode_u8(data.as_encoded_bytes());
let current_node = node.name.display();
let n = Node {
name: PathBuf::from(format!("{current_node}({parent})")),
size: node.size,
children: node.children.clone(),
inode_device: node.inode_device,
depth: node.depth,
};
newer.push(n)
}
// Node does not have a parent
None => newer.push(node.clone()),
}
}
new_top_nodes = newer;
}
new_top_nodes
} else {
top_level_nodes
top_level_nodes.into_iter().next().unwrap()
}
}
fn recursive_rebuilder<'a>(
allowed_nodes: &'a HashSet<&PathBuf>,
current: &Node,
) -> Option<DisplayNode> {
let mut new_children: Vec<_> = current
.children
.iter()
.filter_map(|c| {
if allowed_nodes.contains(&c.name) {
recursive_rebuilder(allowed_nodes, c)
} else {
None
}
})
.collect();
new_children.sort();
new_children.reverse();
let newnode = DisplayNode {
name: current.name.clone(),
size: current.size,
children: new_children,
};
Some(newnode)
}

View File

@@ -1,89 +0,0 @@
use crate::display_node::DisplayNode;
use crate::node::FileTime;
use crate::node::Node;
use std::collections::HashMap;
use std::ffi::OsStr;
use std::path::PathBuf;
#[derive(PartialEq, Eq, PartialOrd, Ord)]
struct ExtensionNode<'a> {
size: u64,
extension: Option<&'a OsStr>,
}
pub fn get_all_file_types(
top_level_nodes: &[Node],
n: usize,
by_filetime: &Option<FileTime>,
) -> DisplayNode {
let ext_nodes = {
let mut extension_cumulative_sizes = HashMap::new();
build_by_all_file_types(top_level_nodes, &mut extension_cumulative_sizes);
let mut extension_cumulative_sizes: Vec<ExtensionNode<'_>> = extension_cumulative_sizes
.iter()
.map(|(&extension, &size)| ExtensionNode { extension, size })
.collect();
extension_cumulative_sizes.sort_by(|lhs, rhs| lhs.cmp(rhs).reverse());
extension_cumulative_sizes
};
let mut ext_nodes_iter = ext_nodes.iter();
// First, collect the first N - 1 nodes...
let mut displayed: Vec<DisplayNode> = ext_nodes_iter
.by_ref()
.take(if n > 1 { n - 1 } else { 1 })
.map(|node| DisplayNode {
name: PathBuf::from(
node.extension
.map(|ext| format!(".{}", ext.to_string_lossy()))
.unwrap_or_else(|| "(no extension)".to_owned()),
),
size: node.size,
children: vec![],
})
.collect();
// ...then, aggregate the remaining nodes (if any) into a single "(others)" node
if ext_nodes_iter.len() > 0 {
let actual_size = if by_filetime.is_some() {
ext_nodes_iter.map(|node| node.size).max().unwrap_or(0)
} else {
ext_nodes_iter.map(|node| node.size).sum()
};
displayed.push(DisplayNode {
name: PathBuf::from("(others)"),
size: actual_size,
children: vec![],
});
}
let actual_size: u64 = if by_filetime.is_some() {
displayed.iter().map(|node| node.size).max().unwrap_or(0)
} else {
displayed.iter().map(|node| node.size).sum()
};
DisplayNode {
name: PathBuf::from("(total)"),
size: actual_size,
children: displayed,
}
}
fn build_by_all_file_types<'a>(
top_level_nodes: &'a [Node],
counter: &mut HashMap<Option<&'a OsStr>, u64>,
) {
for node in top_level_nodes {
if node.name.is_file() {
let ext = node.name.extension();
let cumulative_size = counter.entry(ext).or_default();
*cumulative_size += node.size;
}
build_by_all_file_types(&node.children, counter)
}
}

View File

@@ -1,451 +1,317 @@
mod cli;
mod config;
#[macro_use]
extern crate clap;
extern crate rayon;
extern crate regex;
extern crate unicode_width;
use std::collections::HashSet;
use std::process;
use self::display::draw_it;
use clap::{App, AppSettings, Arg};
use dir_walker::walk_it;
use dir_walker::WalkData;
use filter::{get_all_file_types, get_biggest, get_by_depth};
use regex::Regex;
use std::cmp::max;
use std::path::PathBuf;
use terminal_size::{terminal_size, Height, Width};
use utils::get_filesystem_devices;
use utils::simplify_dir_names;
mod dir_walker;
mod display;
mod display_node;
mod filter;
mod filter_type;
mod node;
mod platform;
mod progress;
mod utils;
use crate::cli::Cli;
use crate::config::Config;
use crate::display_node::DisplayNode;
use crate::progress::RuntimeErrors;
use clap::Parser;
use dir_walker::WalkData;
use display::InitialDisplayData;
use filter::AggregateData;
use progress::PIndicator;
use regex::Error;
use std::collections::HashSet;
use std::env;
use std::fs::read_to_string;
use std::io;
use std::panic;
use std::process;
use std::sync::Arc;
use std::sync::Mutex;
use sysinfo::{System, SystemExt};
use utils::canonicalize_absolute_path;
use self::display::draw_it;
use config::get_config;
use dir_walker::walk_it;
use display_node::OUTPUT_TYPE;
use filter::get_biggest;
use filter_type::get_all_file_types;
use regex::Regex;
use std::cmp::max;
use std::path::PathBuf;
use terminal_size::{Height, Width, terminal_size};
use utils::get_filesystem_devices;
use utils::simplify_dir_names;
static DEFAULT_NUMBER_OF_LINES: usize = 30;
static DEFAULT_TERMINAL_WIDTH: usize = 80;
fn should_init_color(no_color: bool, force_color: bool) -> bool {
if force_color {
return true;
}
#[cfg(windows)]
fn init_color(no_color: bool) -> bool {
// If no color is already set do not print a warning message
if no_color {
return false;
}
// check if NO_COLOR is set
// https://no-color.org/
if env::var_os("NO_COLOR").is_some() {
return false;
}
if terminal_size().is_none() {
// we are not in a terminal, color may not be needed
return false;
}
// we are in a terminal
#[cfg(windows)]
{
true
} else {
// Required for windows 10
// Fails to resolve for windows 8 so disable color
match ansi_term::enable_ansi_support() {
Ok(_) => true,
Ok(_) => no_color,
Err(_) => {
eprintln!("This version of Windows does not support ANSI colors");
false
eprintln!(
"This version of Windows does not support ANSI colors, setting no_color flag"
);
true
}
}
}
#[cfg(not(windows))]
{
true
}
}
#[cfg(not(windows))]
fn init_color(no_color: bool) -> bool {
no_color
}
fn get_height_of_terminal() -> usize {
terminal_size()
// Windows CI runners detect a terminal height of 0
.map(|(_, Height(h))| max(h.into(), DEFAULT_NUMBER_OF_LINES))
.unwrap_or(DEFAULT_NUMBER_OF_LINES)
- 10
// Windows CI runners detect a terminal height of 0
if let Some((Width(_w), Height(h))) = terminal_size() {
max(h as usize, DEFAULT_NUMBER_OF_LINES) - 10
} else {
DEFAULT_NUMBER_OF_LINES - 10
}
}
#[cfg(windows)]
fn get_width_of_terminal() -> usize {
terminal_size()
.map(|(Width(w), _)| match cfg!(windows) {
// Windows CI runners detect a very low terminal width
true => max(w.into(), DEFAULT_TERMINAL_WIDTH),
false => w.into(),
})
.unwrap_or(DEFAULT_TERMINAL_WIDTH)
// Windows CI runners detect a very low terminal width
if let Some((Width(w), Height(_h))) = terminal_size() {
max(w as usize, DEFAULT_TERMINAL_WIDTH)
} else {
DEFAULT_TERMINAL_WIDTH
}
}
fn get_regex_value(maybe_value: Option<&Vec<String>>) -> Vec<Regex> {
maybe_value
.unwrap_or(&Vec::new())
.iter()
.map(|reg| {
Regex::new(reg).unwrap_or_else(|err| {
eprintln!("Ignoring bad value for regex {err:?}");
process::exit(1)
})
})
.collect()
#[cfg(not(windows))]
fn get_width_of_terminal() -> usize {
if let Some((Width(w), Height(_h))) = terminal_size() {
w as usize
} else {
DEFAULT_TERMINAL_WIDTH
}
}
fn get_regex_value(maybe_value: Option<&str>) -> Option<Regex> {
match maybe_value {
Some(v) => match Regex::new(v) {
Ok(r) => Some(r),
Err(e) => {
eprintln!("Ignoring bad value for regex {:?}", e);
process::exit(1);
}
},
None => None,
}
}
fn main() {
let options = Cli::parse();
let config = get_config(options.config.as_ref());
let default_height = get_height_of_terminal();
let def_num_str = default_height.to_string();
let errors = RuntimeErrors::default();
let error_listen_for_ctrlc = Arc::new(Mutex::new(errors));
let errors_for_rayon = error_listen_for_ctrlc.clone();
let options = App::new("Dust")
.about("Like du but more intuitive")
.version(crate_version!())
.setting(AppSettings::TrailingVarArg)
.arg(
Arg::with_name("depth")
.short("d")
.long("depth")
.help("Depth to show")
.takes_value(true)
.conflicts_with("number_of_lines"),
)
.arg(
Arg::with_name("number_of_lines")
.short("n")
.long("number-of-lines")
.help("Number of lines of output to show. This is Height, (but h is help)")
.takes_value(true)
.default_value(def_num_str.as_ref()),
)
.arg(
Arg::with_name("display_full_paths")
.short("p")
.long("full-paths")
.help("Subdirectories will not have their path shortened"),
)
.arg(
Arg::with_name("ignore_directory")
.short("X")
.long("ignore-directory")
.takes_value(true)
.number_of_values(1)
.multiple(true)
.help("Exclude any file or directory with this name"),
)
.arg(
Arg::with_name("limit_filesystem")
.short("x")
.long("limit-filesystem")
.help("Only count the files and directories on the same filesystem as the supplied directory"),
)
.arg(
Arg::with_name("display_apparent_size")
.short("s")
.long("apparent-size")
.help("Use file length instead of blocks"),
)
.arg(
Arg::with_name("reverse")
.short("r")
.long("reverse")
.help("Print tree upside down (biggest highest)"),
)
.arg(
Arg::with_name("no_colors")
.short("c")
.long("no-colors")
.help("No colors will be printed (normally largest directories are colored)"),
)
.arg(
Arg::with_name("no_bars")
.short("b")
.long("no-percent-bars")
.help("No percent bars or percentages will be displayed"),
)
.arg(
Arg::with_name("by_filecount")
.short("f")
.long("filecount")
.help("Directory 'size' is number of child files/dirs not disk size"),
)
.arg(
Arg::with_name("ignore_hidden")
.short("i") // Do not use 'h' this is used by 'help'
.long("ignore_hidden") //TODO: fix change - -> _
.help("Do not display hidden files"),
)
.arg(
Arg::with_name("invert_filter")
.short("v")
.long("invert-filter")
.takes_value(true)
.number_of_values(1)
.multiple(true)
.conflicts_with("filter")
.conflicts_with("types")
.conflicts_with("depth")
.help("Exclude files matching this regex. To ignore png files type: -v \"\\.png$\" "),
)
.arg(
Arg::with_name("filter")
.short("e")
.long("filter")
.takes_value(true)
.number_of_values(1)
.multiple(true)
.conflicts_with("types")
.conflicts_with("depth")
.help("Only include files matching this regex. For png files type: -e \"\\.png$\" "),
)
.arg(
Arg::with_name("types")
.short("t")
.long("file_types")
.conflicts_with("depth")
.help("show only these file types"),
)
.arg(
Arg::with_name("width")
.short("w")
.long("terminal_width")
.takes_value(true)
.number_of_values(1)
.help("Specify width of output overriding the auto detection of terminal width"),
)
.arg(Arg::with_name("inputs").multiple(true).default_value("."))
.get_matches();
ctrlc::set_handler(move || {
println!("\nAborting");
process::exit(1);
})
.expect("Error setting Ctrl-C handler");
let target_dirs = options
.values_of("inputs")
.expect("Should be a default value here")
.collect();
let target_dirs = match config.get_files_from(&options) {
Some(path) => {
if path == "-" {
let mut targets_to_add = io::stdin()
.lines()
.map_while(Result::ok)
.collect::<Vec<String>>();
let summarize_file_types = options.is_present("types");
if targets_to_add.is_empty() {
eprintln!("No input provided, defaulting to current directory");
targets_to_add.push(".".to_owned());
}
targets_to_add
} else {
// read file
match read_to_string(path) {
Ok(file_content) => file_content.lines().map(|x| x.to_string()).collect(),
Err(e) => {
eprintln!("Error reading file: {e}");
vec![".".to_owned()]
}
}
}
}
None => match options.params {
Some(ref values) => values.clone(),
None => vec![".".to_owned()],
},
};
let maybe_filter = get_regex_value(options.value_of("filter"));
let maybe_invert_filter = get_regex_value(options.value_of("invert_filter"));
let summarize_file_types = options.file_types;
let filter_regexs = get_regex_value(options.filter.as_ref());
let invert_filter_regexs = get_regex_value(options.invert_filter.as_ref());
let terminal_width: usize = match options.terminal_width {
Some(val) => val,
None => get_width_of_terminal(),
};
let depth = config.get_depth(&options);
// If depth is set, then we set the default number_of_lines to be max
// instead of screen height
let number_of_lines = match options.number_of_lines {
Some(val) => val,
None => {
if depth != usize::MAX {
usize::MAX
} else {
get_height_of_terminal()
}
let number_of_lines = match value_t!(options.value_of("number_of_lines"), usize) {
Ok(v) => v,
Err(_) => {
eprintln!("Ignoring bad value for number_of_lines");
default_height
}
};
let is_colors = should_init_color(
config.get_no_colors(&options),
config.get_force_colors(&options),
);
let ignore_directories = match options.ignore_directory {
Some(ref values) => values
.iter()
.map(PathBuf::from)
.map(canonicalize_absolute_path)
.collect::<Vec<PathBuf>>(),
None => vec![],
let terminal_width = match value_t!(options.value_of("width"), usize) {
Ok(v) => v,
Err(_) => get_width_of_terminal(),
};
let ignore_from_file_result = match options.ignore_all_in_file {
Some(ref val) => read_to_string(val)
.unwrap()
.lines()
.map(Regex::new)
.collect::<Vec<Result<Regex, Error>>>(),
None => vec![],
let depth = options.value_of("depth").and_then(|depth| {
depth
.parse::<usize>()
.map(|v| v + 1)
.map_err(|_| eprintln!("Ignoring bad value for depth"))
.ok()
});
let no_colors = init_color(options.is_present("no_colors"));
let use_apparent_size = options.is_present("display_apparent_size");
let ignore_directories: Vec<PathBuf> = options
.values_of("ignore_directory")
.map(|i| i.map(PathBuf::from).collect())
.unwrap_or_default();
let by_filecount = options.is_present("by_filecount");
let ignore_hidden = options.is_present("ignore_hidden");
let limit_filesystem = options.is_present("limit_filesystem");
let simplified_dirs = simplify_dir_names(target_dirs);
let allowed_filesystems = {
if limit_filesystem {
get_filesystem_devices(simplified_dirs.iter())
} else {
HashSet::new()
}
};
let ignore_from_file = ignore_from_file_result
.into_iter()
.filter_map(|x| x.ok())
.collect::<Vec<Regex>>();
let invert_filter_regexs = invert_filter_regexs
.into_iter()
.chain(ignore_from_file)
.collect::<Vec<Regex>>();
let by_filecount = options.filecount;
let by_filetime = config.get_filetime(&options);
let limit_filesystem = options.limit_filesystem;
let follow_links = options.dereference_links;
let allowed_filesystems = if limit_filesystem {
get_filesystem_devices(&target_dirs, follow_links)
} else {
Default::default()
};
let simplified_dirs = simplify_dir_names(&target_dirs);
let ignored_full_path: HashSet<PathBuf> = ignore_directories
.into_iter()
.flat_map(|x| simplified_dirs.iter().map(move |d| d.join(&x)))
.flat_map(|x| simplified_dirs.iter().map(move |d| d.join(x.clone())))
.collect();
let output_format = config.get_output_format(&options);
let ignore_hidden = config.get_ignore_hidden(&options);
let mut indicator = PIndicator::build_me();
if !config.get_disable_progress(&options) {
indicator.spawn(output_format.clone())
}
let keep_collapsed: HashSet<PathBuf> = match options.collapse {
Some(ref collapse) => {
let mut combined_dirs = HashSet::new();
for collapse_dir in collapse {
for target_dir in target_dirs.iter() {
combined_dirs.insert(PathBuf::from(target_dir).join(collapse_dir));
}
}
combined_dirs
}
None => HashSet::new(),
};
let filter_modified_time = config.get_modified_time_operator(&options);
let filter_accessed_time = config.get_accessed_time_operator(&options);
let filter_changed_time = config.get_changed_time_operator(&options);
let walk_data = WalkData {
ignore_directories: ignored_full_path,
filter_regex: &filter_regexs,
invert_filter_regex: &invert_filter_regexs,
filter_regex: maybe_filter,
invert_filter_regex: maybe_invert_filter,
allowed_filesystems,
filter_modified_time,
filter_accessed_time,
filter_changed_time,
use_apparent_size: config.get_apparent_size(&options),
use_apparent_size,
by_filecount,
by_filetime: &by_filetime,
ignore_hidden,
follow_links,
progress_data: indicator.data.clone(),
errors: errors_for_rayon,
};
let threads_to_use = config.get_threads(&options);
let stack_size = config.get_custom_stack_size(&options);
let (top_level_nodes, has_errors) = walk_it(simplified_dirs, walk_data);
init_rayon(&stack_size, &threads_to_use).install(|| {
let top_level_nodes = walk_it(simplified_dirs, &walk_data);
let tree = match summarize_file_types {
true => get_all_file_types(&top_level_nodes, number_of_lines, walk_data.by_filetime),
false => {
let agg_data = AggregateData {
min_size: config.get_min_size(&options),
only_dir: config.get_only_dir(&options),
only_file: config.get_only_file(&options),
number_of_lines,
depth,
using_a_filter: !filter_regexs.is_empty() || !invert_filter_regexs.is_empty(),
short_paths: !config.get_full_paths(&options),
};
get_biggest(
top_level_nodes,
agg_data,
walk_data.by_filetime,
keep_collapsed,
)
}
};
// Must have stopped indicator before we print to stderr
indicator.stop();
let print_errors = config.get_print_errors(&options);
let final_errors = walk_data.errors.lock().unwrap();
print_any_errors(print_errors, &final_errors);
if tree.children.is_empty() && !final_errors.file_not_found.is_empty() {
std::process::exit(1)
} else {
print_output(
config,
options,
tree,
walk_data.by_filecount,
is_colors,
terminal_width,
)
}
});
}
fn print_output(
config: Config,
options: Cli,
tree: DisplayNode,
by_filecount: bool,
is_colors: bool,
terminal_width: usize,
) {
let output_format = config.get_output_format(&options);
if config.get_output_json(&options) {
OUTPUT_TYPE.with(|wrapped| {
wrapped.replace(output_format);
});
println!("{}", serde_json::to_string(&tree).unwrap());
} else {
let idd = InitialDisplayData {
short_paths: !config.get_full_paths(&options),
is_reversed: !config.get_reverse(&options),
colors_on: is_colors,
by_filecount,
by_filetime: config.get_filetime(&options),
is_screen_reader: config.get_screen_reader(&options),
output_format,
bars_on_right: config.get_bars_on_right(&options),
};
draw_it(
idd,
&tree,
config.get_no_bars(&options),
terminal_width,
config.get_skip_total(&options),
)
}
}
fn print_any_errors(print_errors: bool, final_errors: &RuntimeErrors) {
if !final_errors.file_not_found.is_empty() {
let err = final_errors
.file_not_found
.iter()
.map(|a| a.as_ref())
.collect::<Vec<&str>>()
.join(", ");
eprintln!("No such file or directory: {err}");
}
if !final_errors.no_permissions.is_empty() {
if print_errors {
let err = final_errors
.no_permissions
.iter()
.map(|a| a.as_ref())
.collect::<Vec<&str>>()
.join(", ");
eprintln!("Did not have permissions for directories: {err}");
} else {
eprintln!(
"Did not have permissions for all directories (add --print-errors to see errors)"
);
}
}
if !final_errors.unknown_error.is_empty() {
let err = final_errors
.unknown_error
.iter()
.map(|a| a.as_ref())
.collect::<Vec<&str>>()
.join(", ");
eprintln!("Unknown Error: {err}");
}
}
fn init_rayon(stack: &Option<usize>, threads: &Option<usize>) -> rayon::ThreadPool {
let stack_size = match stack {
Some(s) => Some(*s),
None => {
// Do not increase the stack size on a 32 bit system, it will fail
if cfg!(target_pointer_width = "32") {
None
} else {
let large_stack = usize::pow(1024, 3);
let mut s = System::new();
s.refresh_memory();
// Larger stack size if possible to handle cases with lots of nested directories
let available = s.available_memory();
if available > (large_stack * threads.unwrap_or(1)).try_into().unwrap() {
Some(large_stack)
} else {
None
}
}
let tree = {
match (depth, summarize_file_types) {
(_, true) => get_all_file_types(top_level_nodes, number_of_lines),
(Some(depth), _) => get_by_depth(top_level_nodes, depth),
(_, _) => get_biggest(
top_level_nodes,
number_of_lines,
options.values_of("filter").is_some()
|| options.value_of("invert_filter").is_some(),
),
}
};
match build_thread_pool(stack_size, threads) {
Ok(pool) => pool,
Err(err) => {
eprintln!("Problem initializing rayon, try: export RAYON_NUM_THREADS=1");
if stack.is_none() && stack_size.is_some() {
// stack parameter was none, try with default stack size
if let Ok(pool) = build_thread_pool(None, threads) {
eprintln!("WARNING: not using large stack size, got error: {err}");
return pool;
}
}
panic!("{err}");
}
if options.is_present("filter") {
println!("Filtering by: {}", options.value_of("filter").unwrap());
}
}
fn build_thread_pool(
stack_size: Option<usize>,
threads: &Option<usize>,
) -> Result<rayon::ThreadPool, rayon::ThreadPoolBuildError> {
let mut pool_builder = rayon::ThreadPoolBuilder::new();
if let Some(stack_size_param) = stack_size {
pool_builder = pool_builder.stack_size(stack_size_param);
}
if let Some(thread_count) = threads {
pool_builder = pool_builder.num_threads(*thread_count);
}
pool_builder.build()
if has_errors {
eprintln!("Did not have permissions for all directories");
}
draw_it(
options.is_present("display_full_paths"),
!options.is_present("reverse"),
no_colors,
options.is_present("no_bars"),
terminal_width,
by_filecount,
tree,
);
}

View File

@@ -1,9 +1,8 @@
use crate::dir_walker::WalkData;
use crate::platform::get_metadata;
use crate::utils::is_filtered_out_due_to_file_time;
use crate::utils::is_filtered_out_due_to_invert_regex;
use crate::utils::is_filtered_out_due_to_regex;
use regex::Regex;
use std::cmp::Ordering;
use std::path::PathBuf;
@@ -13,81 +12,48 @@ pub struct Node {
pub size: u64,
pub children: Vec<Node>,
pub inode_device: Option<(u64, u64)>,
pub depth: usize,
}
#[derive(Debug, PartialEq)]
pub enum FileTime {
Modified,
Accessed,
Changed,
}
impl From<crate::cli::FileTime> for FileTime {
fn from(time: crate::cli::FileTime) -> Self {
match time {
crate::cli::FileTime::Modified => Self::Modified,
crate::cli::FileTime::Accessed => Self::Accessed,
crate::cli::FileTime::Changed => Self::Changed,
}
}
}
#[allow(clippy::too_many_arguments)]
pub fn build_node(
dir: PathBuf,
children: Vec<Node>,
filter_regex: &Option<Regex>,
invert_filter_regex: &Option<Regex>,
use_apparent_size: bool,
is_symlink: bool,
is_file: bool,
depth: usize,
walk_data: &WalkData,
by_filecount: bool,
) -> Option<Node> {
let use_apparent_size = walk_data.use_apparent_size;
let by_filecount = walk_data.by_filecount;
let by_filetime = &walk_data.by_filetime;
match get_metadata(&dir, use_apparent_size) {
Some(data) => {
let inode_device = if is_symlink && !use_apparent_size {
None
} else {
data.1
};
get_metadata(
&dir,
use_apparent_size,
walk_data.follow_links && is_symlink,
)
.map(|data| {
let inode_device = data.1;
let size = if is_filtered_out_due_to_regex(filter_regex, &dir)
|| is_filtered_out_due_to_invert_regex(invert_filter_regex, &dir)
|| (is_symlink && !use_apparent_size)
|| by_filecount && !is_file
{
0
} else if by_filecount {
1
} else {
data.0
};
let size = if is_filtered_out_due_to_regex(walk_data.filter_regex, &dir)
|| is_filtered_out_due_to_invert_regex(walk_data.invert_filter_regex, &dir)
|| by_filecount && !is_file
|| [
(&walk_data.filter_modified_time, data.2.0),
(&walk_data.filter_accessed_time, data.2.1),
(&walk_data.filter_changed_time, data.2.2),
]
.iter()
.any(|(filter_time, actual_time)| {
is_filtered_out_due_to_file_time(filter_time, *actual_time)
}) {
0
} else if by_filecount {
1
} else if by_filetime.is_some() {
match by_filetime {
Some(FileTime::Modified) => data.2.0.unsigned_abs(),
Some(FileTime::Accessed) => data.2.1.unsigned_abs(),
Some(FileTime::Changed) => data.2.2.unsigned_abs(),
None => unreachable!(),
}
} else {
data.0
};
Node {
name: dir,
size,
children,
inode_device,
depth,
Some(Node {
name: dir,
size,
children,
inode_device,
})
}
})
None => None,
}
}
impl PartialEq for Node {
@@ -98,10 +64,11 @@ impl PartialEq for Node {
impl Ord for Node {
fn cmp(&self, other: &Self) -> Ordering {
self.size
.cmp(&other.size)
.then_with(|| self.name.cmp(&other.name))
.then_with(|| self.children.cmp(&other.children))
if self.size == other.size {
self.name.cmp(&other.name)
} else {
self.size.cmp(&other.size)
}
}
}

View File

@@ -5,57 +5,20 @@ use std::path::Path;
#[cfg(target_family = "unix")]
fn get_block_size() -> u64 {
// All os specific implementations of MetadataExt seem to define a block as 512 bytes
// All os specific implementations of MetatdataExt seem to define a block as 512 bytes
// https://doc.rust-lang.org/std/os/linux/fs/trait.MetadataExt.html#tymethod.st_blocks
512
}
type InodeAndDevice = (u64, u64);
type FileTime = (i64, i64, i64);
#[cfg(target_family = "unix")]
pub fn get_metadata<P: AsRef<Path>>(
path: P,
use_apparent_size: bool,
follow_links: bool,
) -> Option<(u64, Option<InodeAndDevice>, FileTime)> {
pub fn get_metadata(d: &Path, use_apparent_size: bool) -> Option<(u64, Option<(u64, u64)>)> {
use std::os::unix::fs::MetadataExt;
let metadata = if follow_links {
path.as_ref().metadata()
} else {
path.as_ref().symlink_metadata()
};
match metadata {
match d.metadata() {
Ok(md) => {
let file_size = md.len();
if use_apparent_size {
Some((
file_size,
Some((md.ino(), md.dev())),
(md.mtime(), md.atime(), md.ctime()),
))
Some((md.len(), Some((md.ino(), md.dev()))))
} else {
// On NTFS mounts, the reported block count can be unexpectedly large.
// To avoid overestimating disk usage, cap the allocated size to what the
// file should occupy based on the file system I/O block size (blksize).
// Related: https://github.com/bootandy/dust/issues/295
let blksize = md.blksize();
let target_size = file_size.div_ceil(blksize) * blksize;
let reported_size = md.blocks() * get_block_size();
// File systems can pre-allocate more space for a file than what would be necessary
let pre_allocation_buffer = blksize * 65536;
let max_size = target_size + pre_allocation_buffer;
let allocated_size = if reported_size > max_size {
target_size
} else {
reported_size
};
Some((
allocated_size,
Some((md.ino(), md.dev())),
(md.mtime(), md.atime(), md.ctime()),
))
Some((md.blocks() * get_block_size(), Some((md.ino(), md.dev()))))
}
}
Err(_e) => None,
@@ -63,11 +26,7 @@ pub fn get_metadata<P: AsRef<Path>>(
}
#[cfg(target_family = "windows")]
pub fn get_metadata<P: AsRef<Path>>(
path: P,
use_apparent_size: bool,
follow_links: bool,
) -> Option<(u64, Option<InodeAndDevice>, FileTime)> {
pub fn get_metadata(d: &Path, _use_apparent_size: bool) -> Option<(u64, Option<(u64, u64)>)> {
// On windows opening the file to get size, file ID and volume can be very
// expensive because 1) it causes a few system calls, and more importantly 2) it can cause
// windows defender to scan the file.
@@ -106,7 +65,7 @@ pub fn get_metadata<P: AsRef<Path>>(
use std::io;
use winapi_util::Handle;
fn handle_from_path_limited(path: &Path) -> io::Result<Handle> {
fn handle_from_path_limited<P: AsRef<Path>>(path: P) -> io::Result<Handle> {
use std::fs::OpenOptions;
use std::os::windows::fs::OpenOptionsExt;
const FILE_READ_ATTRIBUTES: u32 = 0x0080;
@@ -131,87 +90,39 @@ pub fn get_metadata<P: AsRef<Path>>(
Ok(Handle::from_file(file))
}
fn get_metadata_expensive(
path: &Path,
use_apparent_size: bool,
) -> Option<(u64, Option<InodeAndDevice>, FileTime)> {
fn get_metadata_expensive(d: &Path) -> Option<(u64, Option<(u64, u64)>)> {
use winapi_util::file::information;
let h = handle_from_path_limited(path).ok()?;
let h = handle_from_path_limited(d).ok()?;
let info = information(&h).ok()?;
if use_apparent_size {
use filesize::PathExt;
Some((
path.size_on_disk().ok()?,
Some((info.file_index(), info.volume_serial_number())),
(
info.last_write_time().unwrap() as i64,
info.last_access_time().unwrap() as i64,
info.creation_time().unwrap() as i64,
),
))
} else {
Some((
info.file_size(),
Some((info.file_index(), info.volume_serial_number())),
(
info.last_write_time().unwrap() as i64,
info.last_access_time().unwrap() as i64,
info.creation_time().unwrap() as i64,
),
))
}
Some((
info.file_size(),
Some((info.file_index(), info.volume_serial_number())),
))
}
use std::os::windows::fs::MetadataExt;
let path = path.as_ref();
let metadata = if follow_links {
path.metadata()
} else {
path.symlink_metadata()
};
match metadata {
match d.metadata() {
Ok(ref md) => {
const FILE_ATTRIBUTE_ARCHIVE: u32 = 0x20;
const FILE_ATTRIBUTE_READONLY: u32 = 0x01;
const FILE_ATTRIBUTE_HIDDEN: u32 = 0x02;
const FILE_ATTRIBUTE_SYSTEM: u32 = 0x04;
const FILE_ATTRIBUTE_NORMAL: u32 = 0x80;
const FILE_ATTRIBUTE_DIRECTORY: u32 = 0x10;
const FILE_ATTRIBUTE_SPARSE_FILE: u32 = 0x00000200;
const FILE_ATTRIBUTE_PINNED: u32 = 0x00080000;
const FILE_ATTRIBUTE_UNPINNED: u32 = 0x00100000;
const FILE_ATTRIBUTE_RECALL_ON_OPEN: u32 = 0x00040000;
const FILE_ATTRIBUTE_RECALL_ON_DATA_ACCESS: u32 = 0x00400000;
const FILE_ATTRIBUTE_OFFLINE: u32 = 0x00001000;
// normally FILE_ATTRIBUTE_SPARSE_FILE would be enough, however Windows sometimes likes to mask it out. see: https://stackoverflow.com/q/54560454
const IS_PROBABLY_ONEDRIVE: u32 = FILE_ATTRIBUTE_SPARSE_FILE
| FILE_ATTRIBUTE_PINNED
| FILE_ATTRIBUTE_UNPINNED
| FILE_ATTRIBUTE_RECALL_ON_OPEN
| FILE_ATTRIBUTE_RECALL_ON_DATA_ACCESS
| FILE_ATTRIBUTE_OFFLINE;
const FILE_ATTRIBUTE_ARCHIVE: u32 = 0x20u32;
const FILE_ATTRIBUTE_READONLY: u32 = 0x1u32;
const FILE_ATTRIBUTE_HIDDEN: u32 = 0x2u32;
const FILE_ATTRIBUTE_SYSTEM: u32 = 0x4u32;
const FILE_ATTRIBUTE_NORMAL: u32 = 0x80u32;
const FILE_ATTRIBUTE_DIRECTORY: u32 = 0x10u32;
let attr_filtered = md.file_attributes()
& !(FILE_ATTRIBUTE_HIDDEN | FILE_ATTRIBUTE_READONLY | FILE_ATTRIBUTE_SYSTEM);
if ((attr_filtered & FILE_ATTRIBUTE_ARCHIVE) != 0
|| (attr_filtered & FILE_ATTRIBUTE_DIRECTORY) != 0
|| md.file_attributes() == FILE_ATTRIBUTE_NORMAL)
&& !((attr_filtered & IS_PROBABLY_ONEDRIVE != 0) && use_apparent_size)
if attr_filtered == FILE_ATTRIBUTE_ARCHIVE
|| attr_filtered == FILE_ATTRIBUTE_DIRECTORY
|| md.file_attributes() == FILE_ATTRIBUTE_NORMAL
{
Some((
md.len(),
None,
(
md.last_write_time() as i64,
md.last_access_time() as i64,
md.creation_time() as i64,
),
))
Some((md.len(), None))
} else {
get_metadata_expensive(path, use_apparent_size)
get_metadata_expensive(d)
}
}
_ => get_metadata_expensive(path, use_apparent_size),
_ => get_metadata_expensive(d),
}
}

View File

@@ -1,161 +0,0 @@
use std::{
collections::HashSet,
io::Write,
path::Path,
sync::{
Arc, RwLock,
atomic::{AtomicU8, AtomicUsize, Ordering},
mpsc::{self, RecvTimeoutError, Sender},
},
thread::JoinHandle,
time::Duration,
};
#[cfg(not(target_has_atomic = "64"))]
use portable_atomic::AtomicU64;
#[cfg(target_has_atomic = "64")]
use std::sync::atomic::AtomicU64;
use crate::display::human_readable_number;
/* -------------------------------------------------------------------------- */
pub const ORDERING: Ordering = Ordering::Relaxed;
const SPINNER_SLEEP_TIME: u64 = 100;
const PROGRESS_CHARS: [char; 4] = ['-', '\\', '|', '/'];
const PROGRESS_CHARS_LEN: usize = PROGRESS_CHARS.len();
pub trait ThreadSyncTrait<T> {
fn set(&self, val: T);
fn get(&self) -> T;
}
#[derive(Default)]
pub struct ThreadStringWrapper {
inner: RwLock<String>,
}
impl ThreadSyncTrait<String> for ThreadStringWrapper {
fn set(&self, val: String) {
*self.inner.write().unwrap() = val;
}
fn get(&self) -> String {
(*self.inner.read().unwrap()).clone()
}
}
/* -------------------------------------------------------------------------- */
// creating an enum this way allows to have simpler syntax compared to a Mutex or a RwLock
#[allow(non_snake_case)]
pub mod Operation {
pub const INDEXING: u8 = 0;
pub const PREPARING: u8 = 1;
}
#[derive(Default)]
pub struct PAtomicInfo {
pub num_files: AtomicUsize,
pub total_file_size: AtomicU64,
pub state: AtomicU8,
pub current_path: ThreadStringWrapper,
}
impl PAtomicInfo {
pub fn clear_state(&self, dir: &Path) {
self.state.store(Operation::INDEXING, ORDERING);
let dir_name = dir.to_string_lossy().to_string();
self.current_path.set(dir_name);
self.total_file_size.store(0, ORDERING);
self.num_files.store(0, ORDERING);
}
}
#[derive(Default)]
pub struct RuntimeErrors {
pub no_permissions: HashSet<String>,
pub file_not_found: HashSet<String>,
pub unknown_error: HashSet<String>,
pub interrupted_error: i32,
}
/* -------------------------------------------------------------------------- */
fn format_preparing_str(prog_char: char, data: &PAtomicInfo, output_display: &str) -> String {
let path_in = data.current_path.get();
let size = human_readable_number(data.total_file_size.load(ORDERING), output_display);
format!("Preparing: {path_in} {size} ... {prog_char}")
}
fn format_indexing_str(prog_char: char, data: &PAtomicInfo, output_display: &str) -> String {
let path_in = data.current_path.get();
let file_count = data.num_files.load(ORDERING);
let size = human_readable_number(data.total_file_size.load(ORDERING), output_display);
let file_str = format!("{file_count} files, {size}");
format!("Indexing: {path_in} {file_str} ... {prog_char}")
}
pub struct PIndicator {
pub thread: Option<(Sender<()>, JoinHandle<()>)>,
pub data: Arc<PAtomicInfo>,
}
impl PIndicator {
pub fn build_me() -> Self {
Self {
thread: None,
data: Arc::new(PAtomicInfo {
..Default::default()
}),
}
}
pub fn spawn(&mut self, output_display: String) {
let data = self.data.clone();
let (stop_handler, receiver) = mpsc::channel::<()>();
let time_info_thread = std::thread::spawn(move || {
let mut progress_char_i: usize = 0;
let mut stderr = std::io::stderr();
let mut msg = "".to_string();
// While the timeout triggers we go round the loop
// If we disconnect or the sender sends its message we exit the while loop
while let Err(RecvTimeoutError::Timeout) =
receiver.recv_timeout(Duration::from_millis(SPINNER_SLEEP_TIME))
{
// Clear the text written by 'write!'& Return at the start of line
let clear = format!("\r{:width$}", " ", width = msg.len());
write!(stderr, "{clear}").unwrap();
let prog_char = PROGRESS_CHARS[progress_char_i];
msg = match data.state.load(ORDERING) {
Operation::INDEXING => format_indexing_str(prog_char, &data, &output_display),
Operation::PREPARING => format_preparing_str(prog_char, &data, &output_display),
_ => panic!("Unknown State"),
};
write!(stderr, "\r{msg}").unwrap();
stderr.flush().unwrap();
progress_char_i += 1;
progress_char_i %= PROGRESS_CHARS_LEN;
}
let clear = format!("\r{:width$}", " ", width = msg.len());
write!(stderr, "{clear}").unwrap();
write!(stderr, "\r").unwrap();
stderr.flush().unwrap();
});
self.thread = Some((stop_handler, time_info_thread))
}
pub fn stop(self) {
if let Some((stop_handler, thread)) = self.thread {
stop_handler.send(()).unwrap();
thread.join().unwrap();
}
}
}

View File

@@ -2,19 +2,16 @@ use platform::get_metadata;
use std::collections::HashSet;
use std::path::{Path, PathBuf};
use crate::config::DAY_SECONDS;
use crate::dir_walker::Operator;
use crate::platform;
use regex::Regex;
pub fn simplify_dir_names<P: AsRef<Path>>(dirs: &[P]) -> HashSet<PathBuf> {
let mut top_level_names: HashSet<PathBuf> = HashSet::with_capacity(dirs.len());
pub fn simplify_dir_names<P: AsRef<Path>>(filenames: Vec<P>) -> HashSet<PathBuf> {
let mut top_level_names: HashSet<PathBuf> = HashSet::with_capacity(filenames.len());
let mut to_remove: Vec<PathBuf> = Vec::with_capacity(filenames.len());
for t in dirs {
for t in filenames {
let top_level_name = normalize_path(t);
let mut can_add = true;
let mut to_remove: Vec<PathBuf> = Vec::new();
for tt in top_level_names.iter() {
if is_a_parent_of(&top_level_name, tt) {
@@ -23,9 +20,9 @@ pub fn simplify_dir_names<P: AsRef<Path>>(dirs: &[P]) -> HashSet<PathBuf> {
can_add = false;
}
}
for r in to_remove {
top_level_names.remove(&r);
}
to_remove.sort_unstable();
top_level_names.retain(|tr| to_remove.binary_search(tr).is_err());
to_remove.clear();
if can_add {
top_level_names.insert(top_level_name);
}
@@ -34,24 +31,17 @@ pub fn simplify_dir_names<P: AsRef<Path>>(dirs: &[P]) -> HashSet<PathBuf> {
top_level_names
}
pub fn get_filesystem_devices<P: AsRef<Path>>(paths: &[P], follow_links: bool) -> HashSet<u64> {
use std::fs;
pub fn get_filesystem_devices<'a, P: IntoIterator<Item = &'a PathBuf>>(paths: P) -> HashSet<u64> {
// Gets the device ids for the filesystems which are used by the argument paths
paths
.iter()
.into_iter()
.filter_map(|p| {
let follow_links = if follow_links {
// slow path: If dereference-links is set, then we check if the file is a symbolic link
match fs::symlink_metadata(p) {
Ok(metadata) => metadata.file_type().is_symlink(),
Err(_) => false,
}
let meta = get_metadata(p, false);
if let Some((_size, Some((_id, dev)))) = meta {
Some(dev)
} else {
false
};
match get_metadata(p, false, follow_links) {
Some((_size, Some((_id, dev)), _time)) => Some(dev),
_ => None,
None
}
})
.collect()
@@ -64,48 +54,21 @@ pub fn normalize_path<P: AsRef<Path>>(path: P) -> PathBuf {
// 3. removing trailing extra separators and '.' ("current directory") path segments
// * `Path.components()` does all the above work; ref: <https://doc.rust-lang.org/std/path/struct.Path.html#method.components>
// 4. changing to os preferred separator (automatically done by recollecting components back into a PathBuf)
path.as_ref().components().collect()
path.as_ref().components().collect::<PathBuf>()
}
// Canonicalize the path only if it is an absolute path
pub fn canonicalize_absolute_path(path: PathBuf) -> PathBuf {
if !path.is_absolute() {
return path;
}
match std::fs::canonicalize(&path) {
Ok(canonicalized_path) => canonicalized_path,
Err(_) => path,
}
}
pub fn is_filtered_out_due_to_regex(filter_regex: &[Regex], dir: &Path) -> bool {
if filter_regex.is_empty() {
false
} else {
filter_regex
.iter()
.all(|f| !f.is_match(&dir.as_os_str().to_string_lossy()))
}
}
pub fn is_filtered_out_due_to_file_time(
filter_time: &Option<(Operator, i64)>,
actual_time: i64,
) -> bool {
match filter_time {
pub fn is_filtered_out_due_to_regex(filter_regex: &Option<Regex>, dir: &Path) -> bool {
match filter_regex {
Some(fr) => !fr.is_match(&dir.as_os_str().to_string_lossy()),
None => false,
Some((Operator::Equal, bound_time)) => {
!(actual_time >= *bound_time && actual_time < *bound_time + DAY_SECONDS)
}
Some((Operator::GreaterThan, bound_time)) => actual_time < *bound_time,
Some((Operator::LessThan, bound_time)) => actual_time > *bound_time,
}
}
pub fn is_filtered_out_due_to_invert_regex(filter_regex: &[Regex], dir: &Path) -> bool {
filter_regex
.iter()
.any(|f| f.is_match(&dir.as_os_str().to_string_lossy()))
pub fn is_filtered_out_due_to_invert_regex(filter_regex: &Option<Regex>, dir: &Path) -> bool {
match filter_regex {
Some(fr) => fr.is_match(&dir.as_os_str().to_string_lossy()),
None => false,
}
}
fn is_a_parent_of<P: AsRef<Path>>(parent: P, child: P) -> bool {
@@ -122,15 +85,14 @@ mod tests {
fn test_simplify_dir() {
let mut correct = HashSet::new();
correct.insert(PathBuf::from("a"));
assert_eq!(simplify_dir_names(&["a"]), correct);
assert_eq!(simplify_dir_names(vec!["a"]), correct);
}
#[test]
fn test_simplify_dir_rm_subdir() {
let mut correct = HashSet::new();
correct.insert(["a", "b"].iter().collect::<PathBuf>());
assert_eq!(simplify_dir_names(&["a/b/c", "a/b", "a/b/d/f"]), correct);
assert_eq!(simplify_dir_names(&["a/b", "a/b/c", "a/b/d/f"]), correct);
assert_eq!(simplify_dir_names(vec!["a/b", "a/b/c", "a/b/d/f"]), correct);
}
#[test]
@@ -139,7 +101,7 @@ mod tests {
correct.insert(["a", "b"].iter().collect::<PathBuf>());
correct.insert(PathBuf::from("c"));
assert_eq!(
simplify_dir_names(&[
simplify_dir_names(vec![
"a/b",
"a/b//",
"a/././b///",
@@ -158,14 +120,14 @@ mod tests {
correct.insert(PathBuf::from("b"));
correct.insert(["c", "a", "b"].iter().collect::<PathBuf>());
correct.insert(["a", "b"].iter().collect::<PathBuf>());
assert_eq!(simplify_dir_names(&["a/b", "c/a/b/", "b"]), correct);
assert_eq!(simplify_dir_names(vec!["a/b", "c/a/b/", "b"]), correct);
}
#[test]
fn test_simplify_dir_dots() {
let mut correct = HashSet::new();
correct.insert(PathBuf::from("src"));
assert_eq!(simplify_dir_names(&["src/."]), correct);
assert_eq!(simplify_dir_names(vec!["src/."]), correct);
}
#[test]
@@ -173,7 +135,7 @@ mod tests {
let mut correct = HashSet::new();
correct.insert(PathBuf::from("src"));
correct.insert(PathBuf::from("src_v2"));
assert_eq!(simplify_dir_names(&["src/", "src_v2"]), correct);
assert_eq!(simplify_dir_names(vec!["src/", "src_v2"]), correct);
}
#[test]

View File

@@ -1,2 +1 @@
something
.secret
hi

View File

@@ -1,11 +1,11 @@
use assert_cmd::Command;
use std::ffi::OsStr;
use std::process::Output;
use std::str;
use std::sync::Once;
use std::{io, str};
static INIT: Once = Once::new();
static UNREADABLE_DIR_PATH: &str = "/tmp/unreadable_dir";
mod tests_symlinks;
/**
* This file contains tests that verify the exact output of the command.
@@ -19,32 +19,23 @@ static UNREADABLE_DIR_PATH: &str = "/tmp/unreadable_dir";
/// Copy to /tmp dir - we assume that the formatting of the /tmp partition
/// is consistent. If the tests fail your /tmp filesystem probably differs
fn copy_test_data(dir: &str) {
// First remove the existing directory - just in case it is there and has incorrect data
// First remove the existing directory - just incase it is there and has incorrect data
let last_slash = dir.rfind('/').unwrap();
let last_part_of_dir = dir.chars().skip(last_slash).collect::<String>();
let _ = Command::new("rm")
match Command::new("rm")
.arg("-rf")
.arg("/tmp/".to_owned() + &*last_part_of_dir)
.ok();
let _ = Command::new("cp")
.arg("-r")
.arg(dir)
.arg("/tmp/")
.ok()
.map_err(|err| eprintln!("Error copying directory for test setup\n{:?}", err));
}
fn create_unreadable_directory() -> io::Result<()> {
#[cfg(unix)]
{
use std::fs;
use std::fs::Permissions;
use std::os::unix::fs::PermissionsExt;
fs::create_dir_all(UNREADABLE_DIR_PATH)?;
fs::set_permissions(UNREADABLE_DIR_PATH, Permissions::from_mode(0))?;
}
Ok(())
Ok(_) => {}
Err(_) => {}
};
match Command::new("cp").arg("-r").arg(dir).arg("/tmp/").ok() {
Ok(_) => {}
Err(err) => {
eprintln!("Error copying directory {:?}", err);
}
};
}
fn initialize() {
@@ -52,42 +43,21 @@ fn initialize() {
copy_test_data("tests/test_dir");
copy_test_data("tests/test_dir2");
copy_test_data("tests/test_dir_unicode");
if let Err(e) = create_unreadable_directory() {
panic!("Failed to create unreadable directory: {}", e);
}
});
}
fn run_cmd<T: AsRef<OsStr>>(command_args: &[T]) -> Output {
fn exact_output_test<T: AsRef<OsStr>>(valid_outputs: Vec<String>, command_args: Vec<T>) {
initialize();
let mut to_run = &mut Command::cargo_bin("dust").unwrap();
let mut a = &mut Command::cargo_bin("dust").unwrap();
for p in command_args {
to_run = to_run.arg(p);
a = a.arg(p);
}
to_run.unwrap()
}
let output: String = str::from_utf8(&a.unwrap().stdout).unwrap().into();
fn exact_stdout_test<T: AsRef<OsStr>>(command_args: &[T], valid_stdout: Vec<String>) {
let to_run = run_cmd(command_args);
let stdout_output = str::from_utf8(&to_run.stdout).unwrap().to_owned();
let will_fail = valid_stdout.iter().any(|i| stdout_output.contains(i));
if !will_fail {
eprintln!(
"output(stdout):\n{}\ndoes not contain any of:\n{}",
stdout_output,
valid_stdout.join("\n\n")
);
}
assert!(will_fail);
}
fn exact_stderr_test<T: AsRef<OsStr>>(command_args: &[T], valid_stderr: String) {
let to_run = run_cmd(command_args);
let stderr_output = str::from_utf8(&to_run.stderr).unwrap().trim();
assert_eq!(stderr_output, valid_stderr);
assert!(valid_outputs
.iter()
.fold(false, |sum, i| sum || output.contains(i)));
}
// "windows" result data can vary by host (size seems to be variable by one byte); fix code vs test and re-enable
@@ -95,39 +65,38 @@ fn exact_stderr_test<T: AsRef<OsStr>>(command_args: &[T], valid_stderr: String)
#[test]
pub fn test_main_basic() {
// -c is no color mode - This makes testing much simpler
exact_stdout_test(&["-c", "-B", "/tmp/test_dir/"], main_output());
exact_output_test(main_output(), vec!["-c", "/tmp/test_dir/"])
}
#[cfg_attr(target_os = "windows", ignore)]
#[test]
pub fn test_main_multi_arg() {
let command_args = [
let command_args = vec![
"-c",
"-B",
"/tmp/test_dir/many/",
"/tmp/test_dir",
"/tmp/test_dir",
];
exact_stdout_test(&command_args, main_output());
exact_output_test(main_output(), command_args);
}
fn main_output() -> Vec<String> {
// Some linux currently thought to be Manjaro, Arch
// Although probably depends on how drive is formatted
let mac_and_some_linux = r#"
0B ┌── a_file │░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░█ │ 0%
4.0K ├── hello_file│████████████████████████████████████████████████ │ 100%
4.0K ┌─┴ many │████████████████████████████████████████████████ │ 100%
4.0K ┌─┴ test_dir │████████████████████████████████████████████████ │ 100%
"#
0B ┌── a_file │░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░█ │ 0%
4.0K ├── hello_file│████████████████████████████████████████████████ │ 100%
4.0K ┌─┴ many │████████████████████████████████████████████████ │ 100%
4.0K ┌─┴ test_dir │████████████████████████████████████████████████ │ 100%
"#
.trim()
.to_string();
let ubuntu = r#"
0B ┌── a_file │ ░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░█ │ 0%
4.0K ├── hello_file│ ░░░░░░░░░░░░░░░░█████████████████ │ 33%
8.0K ┌─┴ many │ █████████████████████████████████ │ 67%
12K ┌─┴ test_dir │████████████████████████████████████████████████ │ 100%
0B ┌── a_file │ ░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░█ │ 0%
4.0K ├── hello_file│ ░░░░░░░░░░░░░░░░█████████████████ │ 33%
8.0K ┌─┴ many │ █████████████████████████████████ │ 67%
12K ┌─┴ test_dir │████████████████████████████████████████████████ │ 100%
"#
.trim()
.to_string();
@@ -138,59 +107,59 @@ fn main_output() -> Vec<String> {
#[cfg_attr(target_os = "windows", ignore)]
#[test]
pub fn test_main_long_paths() {
let command_args = ["-c", "-p", "-B", "/tmp/test_dir/"];
exact_stdout_test(&command_args, main_output_long_paths());
let command_args = vec!["-c", "-p", "/tmp/test_dir/"];
exact_output_test(main_output_long_paths(), command_args);
}
fn main_output_long_paths() -> Vec<String> {
let mac_and_some_linux = r#"
0B ┌── /tmp/test_dir/many/a_file │░░░░░░░░░░░░░░░░░░░░░░░░░░░░█ │ 0%
4.0K ├── /tmp/test_dir/many/hello_file│█████████████████████████████ │ 100%
4.0K ┌─┴ /tmp/test_dir/many │█████████████████████████████ │ 100%
4.0K ┌─┴ /tmp/test_dir │█████████████████████████████ │ 100%
0B ┌── /tmp/test_dir/many/a_file │░░░░░░░░░░░░░░░░░░░░░░░░░░░░█ │ 0%
4.0K ├── /tmp/test_dir/many/hello_file│█████████████████████████████ │ 100%
4.0K ┌─┴ /tmp/test_dir/many │█████████████████████████████ │ 100%
4.0K ┌─┴ /tmp/test_dir │█████████████████████████████ │ 100%
"#
.trim()
.to_string();
let ubuntu = r#"
0B ┌── /tmp/test_dir/many/a_file │ ░░░░░░░░░░░░░░░░░░░█ │ 0%
4.0K ├── /tmp/test_dir/many/hello_file│ ░░░░░░░░░░██████████ │ 33%
8.0K ┌─┴ /tmp/test_dir/many │ ████████████████████ │ 67%
12K ┌─┴ /tmp/test_dir │█████████████████████████████ │ 100%
0B ┌── /tmp/test_dir/many/a_file │ ░░░░░░░░░░░░░░░░░░░█ │ 0%
4.0K ├── /tmp/test_dir/many/hello_file│ ░░░░░░░░░░██████████ │ 33%
8.0K ┌─┴ /tmp/test_dir/many │ ████████████████████ │ 67%
12K ┌─┴ /tmp/test_dir │█████████████████████████████ │ 100%
"#
.trim()
.to_string();
vec![mac_and_some_linux, ubuntu]
}
// Check against directories and files whose names are substrings of each other
// Check against directories and files whos names are substrings of each other
#[cfg_attr(target_os = "windows", ignore)]
#[test]
pub fn test_substring_of_names_and_long_names() {
let command_args = ["-c", "-B", "/tmp/test_dir2"];
exact_stdout_test(&command_args, no_substring_of_names_output());
let command_args = vec!["-c", "/tmp/test_dir2"];
exact_output_test(no_substring_of_names_output(), command_args);
}
fn no_substring_of_names_output() -> Vec<String> {
let ubuntu = "
0B ┌── long_dir_name_what_a_very_long_dir_name_what_happens_when_this_goes..
4.0K ├── dir_name_clash
4.0K │ ┌── hello
8.0K ├─┴ dir
4.0K │ ┌── hello
8.0K ├─┴ dir_substring
24K ┌─┴ test_dir2
0B ┌── long_dir_name_what_a_very_long_dir_name_what_happens_when_this_g..
4.0K ├── dir_name_clash
4.0K │ ┌── hello
8.0K ├─┴ dir
4.0K │ ┌── hello
8.0K ├─┴ dir_substring
24K ┌─┴ test_dir2
"
.trim()
.into();
let mac_and_some_linux = "
0B ┌── long_dir_name_what_a_very_long_dir_name_what_happens_when_this_goes..
4.0K │ ┌── hello
4.0K ├─┴ dir
4.0K ├── dir_name_clash
4.0K │ ┌── hello
4.0K ├─┴ dir_substring
12K ┌─┴ test_dir2
0B ┌── long_dir_name_what_a_very_long_dir_name_what_happens_when_this_g..
4.0K │ ┌── hello
4.0K ├─┴ dir
4.0K ├── dir_name_clash
4.0K │ ┌── hello
4.0K ├─┴ dir_substring
12K ┌─┴ test_dir2
"
.trim()
.into();
@@ -200,75 +169,26 @@ fn no_substring_of_names_output() -> Vec<String> {
#[cfg_attr(target_os = "windows", ignore)]
#[test]
pub fn test_unicode_directories() {
let command_args = ["-c", "-B", "/tmp/test_dir_unicode"];
exact_stdout_test(&command_args, unicode_dir());
let command_args = vec!["-c", "/tmp/test_dir_unicode"];
exact_output_test(unicode_dir(), command_args);
}
fn unicode_dir() -> Vec<String> {
// The way unicode & asian characters are rendered on the terminal should make this line up
let ubuntu = "
0B ┌── ラウトは難しいです!.japan│ █ │ 0%
0B ├── 👩.unicode │ █ │ 0%
4.0K ┌─┴ test_dir_unicode │██████████████████████████████████ │ 100%
0B ┌── ラウトは難しいです!.japan│ █ │ 0%
0B ├── 👩.unicode │ █ │ 0%
4.0K ┌─┴ test_dir_unicode │██████████████████████████████████ │ 100%
"
.trim()
.into();
let mac_and_some_linux = "
0B ┌── ラウトは難しいです!.japan│ █ │ 0%
0B ├── 👩.unicode │ █ │ 0%
0B ┌─┴ test_dir_unicode │ █ │ 0%
0B ┌── ラウトは難しいです!.japan│ █ │ 0%
0B ├── 👩.unicode │ █ │ 0%
0B ┌─┴ test_dir_unicode │ █ │ 0%
"
.trim()
.into();
vec![mac_and_some_linux, ubuntu]
}
#[cfg_attr(target_os = "windows", ignore)]
#[test]
pub fn test_apparent_size() {
let command_args = ["-c", "-s", "-b", "/tmp/test_dir"];
exact_stdout_test(&command_args, apparent_size_output());
}
fn apparent_size_output() -> Vec<String> {
// The apparent directory sizes are too unpredictable and system dependent to try and match
let one_space_before = r#"
0B ┌── a_file
6B ├── hello_file
"#
.trim()
.to_string();
let two_space_before = r#"
0B ┌── a_file
6B ├── hello_file
"#
.trim()
.to_string();
vec![one_space_before, two_space_before]
}
#[cfg_attr(target_os = "windows", ignore)]
#[test]
pub fn test_permission_normal() {
let command_args = [UNREADABLE_DIR_PATH];
let permission_msg =
r#"Did not have permissions for all directories (add --print-errors to see errors)"#
.trim()
.to_string();
exact_stderr_test(&command_args, permission_msg);
}
#[cfg_attr(target_os = "windows", ignore)]
#[test]
pub fn test_permission_flag() {
// add the flag to CLI
let command_args = ["--print-errors", UNREADABLE_DIR_PATH];
let permission_msg = format!(
"Did not have permissions for directories: {}",
UNREADABLE_DIR_PATH
);
exact_stderr_test(&command_args, permission_msg);
}

View File

@@ -9,18 +9,11 @@ use std::str;
*/
fn build_command<T: AsRef<OsStr>>(command_args: Vec<T>) -> String {
let mut cmd = &mut Command::cargo_bin("dust").unwrap();
// Hide progress bar
cmd = cmd.arg("-P");
let mut a = &mut Command::cargo_bin("dust").unwrap();
for p in command_args {
cmd = cmd.arg(p);
a = a.arg(p);
}
let finished = &cmd.unwrap();
let stderr = str::from_utf8(&finished.stderr).unwrap();
assert_eq!(stderr, "");
str::from_utf8(&finished.stdout).unwrap().into()
str::from_utf8(&a.unwrap().stdout).unwrap().into()
}
// We can at least test the file names are there
@@ -62,56 +55,27 @@ pub fn test_d_flag_works() {
assert!(!output.contains("hello_file"));
}
#[test]
pub fn test_d0_works_on_multiple() {
// We should see the top level directory but not the sub dirs / files:
let output = build_command(vec!["-d", "0", "tests/test_dir/", "tests/test_dir2"]);
assert!(output.contains("test_dir "));
assert!(output.contains("test_dir2"));
}
#[test]
pub fn test_threads_flag_works() {
let output = build_command(vec!["-T", "1", "tests/test_dir/"]);
assert!(output.contains("hello_file"));
}
#[test]
pub fn test_d_flag_works_and_still_recurses_down() {
// We had a bug where running with '-d 1' would stop at the first directory and the code
// would fail to recurse down
let output = build_command(vec!["-d", "1", "-f", "-c", "tests/test_dir2/"]);
assert!(output.contains("1 ┌── dir"));
assert!(output.contains("4 ┌─┴ test_dir2"));
}
// Check against directories and files whose names are substrings of each other
// Check against directories and files whos names are substrings of each other
#[test]
pub fn test_ignore_dir() {
let output = build_command(vec!["-c", "-X", "dir_substring", "tests/test_dir2/"]);
assert!(!output.contains("dir_substring"));
}
#[test]
pub fn test_ignore_all_in_file() {
let output = build_command(vec![
"-c",
"-I",
"tests/test_dir_hidden_entries/.hidden_file",
"tests/test_dir_hidden_entries/",
]);
assert!(output.contains(" test_dir_hidden_entries"));
assert!(!output.contains(".secret"));
}
#[test]
pub fn test_with_bad_param() {
let mut cmd = Command::cargo_bin("dust").unwrap();
cmd.arg("-P").arg("bad_place");
let output_error = cmd.unwrap_err();
let result = output_error.as_output().unwrap();
let stderr = str::from_utf8(&result.stderr).unwrap();
assert!(stderr.contains("No such file or directory"));
let stderr = cmd.arg("-").unwrap().stderr;
let stderr = str::from_utf8(&stderr).unwrap();
assert!(stderr.contains("Did not have permissions for all directories"));
}
#[test]
@@ -137,6 +101,22 @@ pub fn test_number_of_files() {
assert!(output.contains("2 ┌─┴ test_dir"));
}
#[cfg_attr(target_os = "windows", ignore)]
#[test]
pub fn test_apparent_size() {
// Check the '-s' Flag gives us byte sizes and that it doesn't round up to a block
let command_args = vec!["-c", "-s", "/tmp/test_dir"];
let output = build_command(command_args);
let apparent_size1 = "6B ├── hello_file│";
let apparent_size2 = "0B ┌── a_file";
assert!(output.contains(apparent_size1));
assert!(output.contains(apparent_size2));
let incorrect_apparent_size = "4.0K ├── hello_file";
assert!(!output.contains(incorrect_apparent_size));
}
#[test]
pub fn test_show_files_by_type() {
// Check we can list files by type
@@ -149,75 +129,18 @@ pub fn test_show_files_by_type() {
}
#[test]
#[cfg(target_family = "unix")]
pub fn test_show_files_only() {
let output = build_command(vec!["-c", "-F", "tests/test_dir"]);
assert!(output.contains("a_file"));
assert!(output.contains("hello_file"));
assert!(!output.contains("many"));
}
#[test]
pub fn test_output_skip_total() {
let output = build_command(vec![
"--skip-total",
"tests/test_dir/many/hello_file",
"tests/test_dir/many/a_file",
]);
assert!(output.contains("hello_file"));
assert!(!output.contains("(total)"));
}
#[test]
pub fn test_output_screen_reader() {
let output = build_command(vec!["--screen-reader", "-c", "tests/test_dir/"]);
println!("{}", output);
assert!(output.contains("test_dir 0"));
assert!(output.contains("many 1"));
assert!(output.contains("hello_file 2"));
assert!(output.contains("a_file 2"));
// Verify no 'symbols' reported by screen reader
assert!(!output.contains('│'));
for block in ['█', '▓', '▒', '░'] {
assert!(!output.contains(block));
}
}
#[test]
pub fn test_show_files_by_regex_match_lots() {
pub fn test_show_files_by_regex() {
// Check we can see '.rs' files in the tests directory
let output = build_command(vec!["-c", "-e", "\\.rs$", "tests"]);
assert!(output.contains(" ┌─┴ tests"));
assert!(!output.contains("0B ┌── tests"));
assert!(!output.contains("0B ┌─┴ tests"));
}
#[test]
pub fn test_show_files_by_regex_match_nothing() {
// Check there are no files named: '.match_nothing' in the tests directory
let output = build_command(vec!["-c", "-e", "match_nothing$", "tests"]);
assert!(output.contains("0B ┌── tests"));
}
#[test]
pub fn test_show_files_by_regex_match_multiple() {
let output = build_command(vec![
"-c",
"-e",
"test_dir_hidden",
"-e",
"test_dir2",
"-n",
"100",
"tests",
]);
assert!(output.contains("test_dir2"));
assert!(output.contains("test_dir_hidden"));
assert!(!output.contains("many")); // We do not find the 'many' folder in the 'test_dir' folder
}
#[test]
pub fn test_show_files_by_invert_regex() {
let output = build_command(vec!["-c", "-f", "-v", "e", "tests/test_dir2"]);
@@ -232,61 +155,3 @@ pub fn test_show_files_by_invert_regex() {
let output = build_command(vec!["-c", "-f", "-v", "match_nothing$", "tests/test_dir2"]);
assert!(output.contains("4 ┌─┴ test_dir2"));
}
#[test]
pub fn test_show_files_by_invert_regex_match_multiple() {
// We ignore test_dir2 & test_dir_unicode, leaving the test_dir folder
// which has the 'many' folder inside
let output = build_command(vec![
"-c",
"-v",
"test_dir2",
"-v",
"test_dir_unicode",
"-n",
"100",
"tests",
]);
assert!(!output.contains("test_dir2"));
assert!(!output.contains("test_dir_unicode"));
assert!(output.contains("many"));
}
#[test]
pub fn test_no_color() {
let output = build_command(vec!["-c"]);
// Red is 31
assert!(!output.contains("\x1B[31m"));
assert!(!output.contains("\x1B[0m"));
}
#[test]
pub fn test_force_color() {
let output = build_command(vec!["-C"]);
// Red is 31
assert!(output.contains("\x1B[31m"));
assert!(output.contains("\x1B[0m"));
}
#[test]
pub fn test_collapse() {
let output = build_command(vec!["--collapse", "many", "tests/test_dir/"]);
assert!(output.contains("many"));
assert!(!output.contains("hello_file"));
}
#[test]
pub fn test_handle_duplicate_names() {
// Check that even if we run on a multiple directories with the same name
// we still show the distinct parent dir in the output
let output = build_command(vec![
"tests/test_dir_matching/dave/dup_name",
"tests/test_dir_matching/andy/dup_name",
"ci",
]);
assert!(output.contains("andy"));
assert!(output.contains("dave"));
assert!(output.contains("ci"));
assert!(output.contains("dup_name"));
assert!(!output.contains("test_dir_matching"));
}

View File

@@ -1,15 +1,41 @@
use assert_cmd::Command;
use std::cmp::max;
use std::fs::File;
use std::io::Write;
use std::path::PathBuf;
use std::str;
use terminal_size::{terminal_size, Height, Width};
use unicode_width::UnicodeWidthStr;
use tempfile::Builder;
use tempfile::TempDir;
// File sizes differ on both platform and on the format of the disk.
// Windows: `ln` is not usually an available command; creation of symbolic links requires special enhanced permissions
fn get_width_of_terminal() -> u16 {
if let Some((Width(w), Height(_h))) = terminal_size() {
max(w, 80)
} else {
80
}
}
// Mac test runners create tmp files with very long names, hence it may be shortened in the output
fn get_file_name(name: String) -> String {
let terminal_plus_buffer = (get_width_of_terminal() - 14) as usize;
if UnicodeWidthStr::width(&*name) > terminal_plus_buffer {
let trimmed_name = name
.chars()
.take(terminal_plus_buffer - 2)
.collect::<String>();
trimmed_name + ".."
} else {
name
}
}
fn build_temp_file(dir: &TempDir) -> PathBuf {
let file_path = dir.path().join("notes.txt");
let mut file = File::create(&file_path).unwrap();
@@ -17,18 +43,6 @@ fn build_temp_file(dir: &TempDir) -> PathBuf {
file_path
}
fn link_it(link_path: PathBuf, file_path_s: &str, is_soft: bool) -> String {
let link_name_s = link_path.to_str().unwrap();
let mut c = Command::new("ln");
if is_soft {
c.arg("-s");
}
c.arg(file_path_s);
c.arg(link_name_s);
assert!(c.output().is_ok());
link_name_s.into()
}
#[cfg_attr(target_os = "windows", ignore)]
#[test]
pub fn test_soft_sym_link() {
@@ -38,18 +52,20 @@ pub fn test_soft_sym_link() {
let file_path_s = file.to_str().unwrap();
let link_name = dir.path().join("the_link");
let link_name_s = link_it(link_name, file_path_s, true);
let link_name_s = link_name.to_str().unwrap();
let c = Command::new("ln")
.arg("-s")
.arg(file_path_s)
.arg(link_name_s)
.output();
assert!(c.is_ok());
let c = format!(" ├── {}", link_name_s);
let b = format!(" ┌── {}", file_path_s);
let c = format!(" ├── {}", get_file_name(link_name_s.into()));
let b = format!(" ┌── {}", get_file_name(file_path_s.into()));
let a = format!("─┴ {}", dir_s);
let mut cmd = Command::cargo_bin("dust").unwrap();
// Mac test runners create long filenames in tmp directories
let output = cmd
.args(["-p", "-c", "-s", "-w", "999", dir_s])
.unwrap()
.stdout;
let output = cmd.arg("-p").arg("-c").arg("-s").arg(dir_s).unwrap().stdout;
let output = str::from_utf8(&output).unwrap();
@@ -67,48 +83,25 @@ pub fn test_hard_sym_link() {
let file_path_s = file.to_str().unwrap();
let link_name = dir.path().join("the_link");
link_it(link_name, file_path_s, false);
let link_name_s = link_name.to_str().unwrap();
let c = Command::new("ln")
.arg(file_path_s)
.arg(link_name_s)
.output();
assert!(c.is_ok());
let file_output = format!(" ┌── {}", file_path_s);
let link_output = format!(" ┌── {}", get_file_name(link_name_s.into()));
let file_output = format!(" ┌── {}", get_file_name(file_path_s.into()));
let dirs_output = format!("─┴ {}", dir_s);
let mut cmd = Command::cargo_bin("dust").unwrap();
// Mac test runners create long filenames in tmp directories
let output = cmd.args(["-p", "-c", "-w", "999", dir_s]).unwrap().stdout;
let output = cmd.arg("-p").arg("-c").arg(dir_s).unwrap().stdout;
// The link should not appear in the output because multiple inodes are now ordered
// then filtered.
// Because this is a hard link the file and hard link look identical. Therefore
// we cannot guarantee which version will appear first.
let output = str::from_utf8(&output).unwrap();
assert!(output.contains(dirs_output.as_str()));
assert!(output.contains(file_output.as_str()));
}
#[cfg_attr(target_os = "windows", ignore)]
#[test]
pub fn test_hard_sym_link_no_dup_multi_arg() {
let dir = Builder::new().tempdir().unwrap();
let dir_link = Builder::new().tempdir().unwrap();
let file = build_temp_file(&dir);
let dir_s = dir.path().to_str().unwrap();
let dir_link_s = dir_link.path().to_str().unwrap();
let file_path_s = file.to_str().unwrap();
let link_name = dir_link.path().join("the_link");
let link_name_s = link_it(link_name, file_path_s, false);
let mut cmd = Command::cargo_bin("dust").unwrap();
// Mac test runners create long filenames in tmp directories
let output = cmd
.args(["-p", "-c", "-w", "999", "-b", dir_link_s, dir_s])
.unwrap()
.stdout;
// The link or the file should appear but not both
let output = str::from_utf8(&output).unwrap();
let has_file_only = output.contains(file_path_s) && !output.contains(&link_name_s);
let has_link_only = !output.contains(file_path_s) && output.contains(&link_name_s);
assert!(has_file_only || has_link_only);
assert!(output.contains(link_output.as_str()) || output.contains(file_output.as_str()));
}
#[cfg_attr(target_os = "windows", ignore)]
@@ -118,10 +111,17 @@ pub fn test_recursive_sym_link() {
let dir_s = dir.path().to_str().unwrap();
let link_name = dir.path().join("the_link");
let link_name_s = link_it(link_name, dir_s, true);
let link_name_s = link_name.to_str().unwrap();
let c = Command::new("ln")
.arg("-s")
.arg(dir_s)
.arg(link_name_s)
.output();
assert!(c.is_ok());
let a = format!("─┬ {}", dir_s);
let b = format!(" └── {}", link_name_s);
let b = format!(" └── {}", get_file_name(link_name_s.into()));
let mut cmd = Command::cargo_bin("dust").unwrap();
let output = cmd
@@ -129,8 +129,6 @@ pub fn test_recursive_sym_link() {
.arg("-c")
.arg("-r")
.arg("-s")
.arg("-w")
.arg("999")
.arg(dir_s)
.unwrap()
.stdout;