mirror of
https://github.com/bootandy/dust.git
synced 2026-01-28 07:44:31 -08:00
Compare commits
5 Commits
skip_total
...
same_dir_n
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f92a97edc2 | ||
|
|
6b83281183 | ||
|
|
01c0aaeade | ||
|
|
6cbd736e11 | ||
|
|
8e087e09da |
@@ -103,6 +103,7 @@ Usage: dust --skip-total (No total row will be displayed)
|
||||
Usage: dust -z 40000/30MB/20kib (Exclude output files/directories below size 40000 bytes / 30MB / 20KiB)
|
||||
Usage: dust -j (Prints JSON representation of directories, try: dust -j | jq)
|
||||
Usage: dust --files0-from=FILE (Reads null-terminated file paths from FILE); If FILE is - then read from stdin
|
||||
Usage: dust --collapse=node-modules will keep the node-modules folder collapsed in display instead of recursively opening it
|
||||
```
|
||||
|
||||
## Config file
|
||||
|
||||
@@ -45,6 +45,7 @@ _dust() {
|
||||
'-y+[just like -mtime, but based on file change time]: : ' \
|
||||
'--ctime=[just like -mtime, but based on file change time]: : ' \
|
||||
'--files0-from=[run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input]: :_files' \
|
||||
'*--collapse=[Keep these directories collapsed]: :_files' \
|
||||
'-m+[Directory '\''size'\'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time]: :(a c m)' \
|
||||
'--filetime=[Directory '\''size'\'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time]: :(a c m)' \
|
||||
'-p[Subdirectories will not have their path shortened]' \
|
||||
|
||||
@@ -51,6 +51,7 @@ Register-ArgumentCompleter -Native -CommandName 'dust' -ScriptBlock {
|
||||
[CompletionResult]::new('-y', 'y', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file change time')
|
||||
[CompletionResult]::new('--ctime', 'ctime', [CompletionResultType]::ParameterName, 'just like -mtime, but based on file change time')
|
||||
[CompletionResult]::new('--files0-from', 'files0-from', [CompletionResultType]::ParameterName, 'run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input')
|
||||
[CompletionResult]::new('--collapse', 'collapse', [CompletionResultType]::ParameterName, 'Keep these directories collapsed')
|
||||
[CompletionResult]::new('-m', 'm', [CompletionResultType]::ParameterName, 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time')
|
||||
[CompletionResult]::new('--filetime', 'filetime', [CompletionResultType]::ParameterName, 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time')
|
||||
[CompletionResult]::new('-p', 'p', [CompletionResultType]::ParameterName, 'Subdirectories will not have their path shortened')
|
||||
|
||||
@@ -19,7 +19,7 @@ _dust() {
|
||||
|
||||
case "${cmd}" in
|
||||
dust)
|
||||
opts="-d -T -n -p -X -I -L -x -s -r -c -C -b -B -z -R -f -i -v -e -t -w -P -D -F -o -S -j -M -A -y -m -h -V --depth --threads --config --number-of-lines --full-paths --ignore-directory --ignore-all-in-file --dereference-links --limit-filesystem --apparent-size --reverse --no-colors --force-colors --no-percent-bars --bars-on-right --min-size --screen-reader --skip-total --filecount --ignore_hidden --invert-filter --filter --file_types --terminal_width --no-progress --print-errors --only-dir --only-file --output-format --stack-size --output-json --mtime --atime --ctime --files0-from --filetime --help --version [PATH]..."
|
||||
opts="-d -T -n -p -X -I -L -x -s -r -c -C -b -B -z -R -f -i -v -e -t -w -P -D -F -o -S -j -M -A -y -m -h -V --depth --threads --config --number-of-lines --full-paths --ignore-directory --ignore-all-in-file --dereference-links --limit-filesystem --apparent-size --reverse --no-colors --force-colors --no-percent-bars --bars-on-right --min-size --screen-reader --skip-total --filecount --ignore_hidden --invert-filter --filter --file_types --terminal_width --no-progress --print-errors --only-dir --only-file --output-format --stack-size --output-json --mtime --atime --ctime --files0-from --collapse --filetime --help --version [PATH]..."
|
||||
if [[ ${cur} == -* || ${COMP_CWORD} -eq 1 ]] ; then
|
||||
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
|
||||
return 0
|
||||
@@ -178,6 +178,10 @@ _dust() {
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--collapse)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--filetime)
|
||||
COMPREPLY=($(compgen -W "a c m" -- "${cur}"))
|
||||
return 0
|
||||
|
||||
@@ -48,6 +48,7 @@ set edit:completion:arg-completer[dust] = {|@words|
|
||||
cand -y 'just like -mtime, but based on file change time'
|
||||
cand --ctime 'just like -mtime, but based on file change time'
|
||||
cand --files0-from 'run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input'
|
||||
cand --collapse 'Keep these directories collapsed'
|
||||
cand -m 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time'
|
||||
cand --filetime 'Directory ''size'' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time'
|
||||
cand -p 'Subdirectories will not have their path shortened'
|
||||
|
||||
@@ -14,6 +14,7 @@ complete -c dust -s M -l mtime -d '+/-n matches files modified more/less than n
|
||||
complete -c dust -s A -l atime -d 'just like -mtime, but based on file access time' -r
|
||||
complete -c dust -s y -l ctime -d 'just like -mtime, but based on file change time' -r
|
||||
complete -c dust -l files0-from -d 'run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input' -r -F
|
||||
complete -c dust -l collapse -d 'Keep these directories collapsed' -r -F
|
||||
complete -c dust -s m -l filetime -d 'Directory \'size\' is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time' -r -f -a "{a\t'',c\t'',m\t''}"
|
||||
complete -c dust -s p -l full-paths -d 'Subdirectories will not have their path shortened'
|
||||
complete -c dust -s L -l dereference-links -d 'dereference sym links - Treat sym links as directories and go into them'
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
.SH NAME
|
||||
Dust \- Like du but more intuitive
|
||||
.SH SYNOPSIS
|
||||
\fBdust\fR [\fB\-d\fR|\fB\-\-depth\fR] [\fB\-T\fR|\fB\-\-threads\fR] [\fB\-\-config\fR] [\fB\-n\fR|\fB\-\-number\-of\-lines\fR] [\fB\-p\fR|\fB\-\-full\-paths\fR] [\fB\-X\fR|\fB\-\-ignore\-directory\fR] [\fB\-I\fR|\fB\-\-ignore\-all\-in\-file\fR] [\fB\-L\fR|\fB\-\-dereference\-links\fR] [\fB\-x\fR|\fB\-\-limit\-filesystem\fR] [\fB\-s\fR|\fB\-\-apparent\-size\fR] [\fB\-r\fR|\fB\-\-reverse\fR] [\fB\-c\fR|\fB\-\-no\-colors\fR] [\fB\-C\fR|\fB\-\-force\-colors\fR] [\fB\-b\fR|\fB\-\-no\-percent\-bars\fR] [\fB\-B\fR|\fB\-\-bars\-on\-right\fR] [\fB\-z\fR|\fB\-\-min\-size\fR] [\fB\-R\fR|\fB\-\-screen\-reader\fR] [\fB\-\-skip\-total\fR] [\fB\-f\fR|\fB\-\-filecount\fR] [\fB\-i\fR|\fB\-\-ignore_hidden\fR] [\fB\-v\fR|\fB\-\-invert\-filter\fR] [\fB\-e\fR|\fB\-\-filter\fR] [\fB\-t\fR|\fB\-\-file_types\fR] [\fB\-w\fR|\fB\-\-terminal_width\fR] [\fB\-P\fR|\fB\-\-no\-progress\fR] [\fB\-\-print\-errors\fR] [\fB\-D\fR|\fB\-\-only\-dir\fR] [\fB\-F\fR|\fB\-\-only\-file\fR] [\fB\-o\fR|\fB\-\-output\-format\fR] [\fB\-S\fR|\fB\-\-stack\-size\fR] [\fB\-j\fR|\fB\-\-output\-json\fR] [\fB\-M\fR|\fB\-\-mtime\fR] [\fB\-A\fR|\fB\-\-atime\fR] [\fB\-y\fR|\fB\-\-ctime\fR] [\fB\-\-files0\-from\fR] [\fB\-m\fR|\fB\-\-filetime\fR] [\fB\-h\fR|\fB\-\-help\fR] [\fB\-V\fR|\fB\-\-version\fR] [\fIPATH\fR]
|
||||
\fBdust\fR [\fB\-d\fR|\fB\-\-depth\fR] [\fB\-T\fR|\fB\-\-threads\fR] [\fB\-\-config\fR] [\fB\-n\fR|\fB\-\-number\-of\-lines\fR] [\fB\-p\fR|\fB\-\-full\-paths\fR] [\fB\-X\fR|\fB\-\-ignore\-directory\fR] [\fB\-I\fR|\fB\-\-ignore\-all\-in\-file\fR] [\fB\-L\fR|\fB\-\-dereference\-links\fR] [\fB\-x\fR|\fB\-\-limit\-filesystem\fR] [\fB\-s\fR|\fB\-\-apparent\-size\fR] [\fB\-r\fR|\fB\-\-reverse\fR] [\fB\-c\fR|\fB\-\-no\-colors\fR] [\fB\-C\fR|\fB\-\-force\-colors\fR] [\fB\-b\fR|\fB\-\-no\-percent\-bars\fR] [\fB\-B\fR|\fB\-\-bars\-on\-right\fR] [\fB\-z\fR|\fB\-\-min\-size\fR] [\fB\-R\fR|\fB\-\-screen\-reader\fR] [\fB\-\-skip\-total\fR] [\fB\-f\fR|\fB\-\-filecount\fR] [\fB\-i\fR|\fB\-\-ignore_hidden\fR] [\fB\-v\fR|\fB\-\-invert\-filter\fR] [\fB\-e\fR|\fB\-\-filter\fR] [\fB\-t\fR|\fB\-\-file_types\fR] [\fB\-w\fR|\fB\-\-terminal_width\fR] [\fB\-P\fR|\fB\-\-no\-progress\fR] [\fB\-\-print\-errors\fR] [\fB\-D\fR|\fB\-\-only\-dir\fR] [\fB\-F\fR|\fB\-\-only\-file\fR] [\fB\-o\fR|\fB\-\-output\-format\fR] [\fB\-S\fR|\fB\-\-stack\-size\fR] [\fB\-j\fR|\fB\-\-output\-json\fR] [\fB\-M\fR|\fB\-\-mtime\fR] [\fB\-A\fR|\fB\-\-atime\fR] [\fB\-y\fR|\fB\-\-ctime\fR] [\fB\-\-files0\-from\fR] [\fB\-\-collapse\fR] [\fB\-m\fR|\fB\-\-filetime\fR] [\fB\-h\fR|\fB\-\-help\fR] [\fB\-V\fR|\fB\-\-version\fR] [\fIPATH\fR]
|
||||
.SH DESCRIPTION
|
||||
Like du but more intuitive
|
||||
.SH OPTIONS
|
||||
@@ -118,6 +118,9 @@ just like \-mtime, but based on file change time
|
||||
\fB\-\-files0\-from\fR
|
||||
run dust on NUL\-terminated file names specified in file; if argument is \-, then read names from standard input
|
||||
.TP
|
||||
\fB\-\-collapse\fR
|
||||
Keep these directories collapsed
|
||||
.TP
|
||||
\fB\-m\fR, \fB\-\-filetime\fR
|
||||
Directory \*(Aqsize\*(Aq is max filetime of child files instead of disk size. while a/c/m for last accessed/changed/modified time
|
||||
.br
|
||||
|
||||
@@ -303,6 +303,14 @@ pub fn build_cli() -> Command {
|
||||
.num_args(1)
|
||||
.help("run dust on NUL-terminated file names specified in file; if argument is -, then read names from standard input"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("collapse")
|
||||
.long("collapse")
|
||||
.value_hint(clap::ValueHint::AnyPath)
|
||||
.value_parser(value_parser!(String))
|
||||
.action(clap::ArgAction::Append)
|
||||
.help("Keep these directories collapsed"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("filetime")
|
||||
.short('m')
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use std::cmp::Ordering;
|
||||
use std::fs;
|
||||
use std::io::Error;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
|
||||
@@ -14,6 +15,7 @@ use crate::utils::is_filtered_out_due_to_regex;
|
||||
use rayon::iter::ParallelBridge;
|
||||
use rayon::prelude::ParallelIterator;
|
||||
use regex::Regex;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use std::collections::HashSet;
|
||||
@@ -229,8 +231,9 @@ fn walk(dir: PathBuf, walk_data: &WalkData, depth: usize) -> Option<Node> {
|
||||
}
|
||||
}
|
||||
Err(ref failed) => {
|
||||
let mut editable_error = errors.lock().unwrap();
|
||||
editable_error.no_permissions.insert(failed.to_string());
|
||||
if handle_error_and_retry(failed, &dir, walk_data) {
|
||||
return walk(dir.clone(), walk_data, depth);
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
@@ -238,21 +241,11 @@ fn walk(dir: PathBuf, walk_data: &WalkData, depth: usize) -> Option<Node> {
|
||||
.collect()
|
||||
}
|
||||
Err(failed) => {
|
||||
let mut editable_error = errors.lock().unwrap();
|
||||
match failed.kind() {
|
||||
std::io::ErrorKind::PermissionDenied => {
|
||||
editable_error
|
||||
.no_permissions
|
||||
.insert(dir.to_string_lossy().into());
|
||||
}
|
||||
std::io::ErrorKind::NotFound => {
|
||||
editable_error.file_not_found.insert(failed.to_string());
|
||||
}
|
||||
_ => {
|
||||
editable_error.unknown_error.insert(failed.to_string());
|
||||
}
|
||||
if handle_error_and_retry(&failed, &dir, walk_data) {
|
||||
return walk(dir, walk_data, depth);
|
||||
} else {
|
||||
vec![]
|
||||
}
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@@ -274,6 +267,38 @@ fn walk(dir: PathBuf, walk_data: &WalkData, depth: usize) -> Option<Node> {
|
||||
build_node(dir, children, is_symlink, false, depth, walk_data)
|
||||
}
|
||||
|
||||
fn handle_error_and_retry(failed: &Error, dir: &Path, walk_data: &WalkData) -> bool {
|
||||
let mut editable_error = walk_data.errors.lock().unwrap();
|
||||
match failed.kind() {
|
||||
std::io::ErrorKind::PermissionDenied => {
|
||||
editable_error
|
||||
.no_permissions
|
||||
.insert(dir.to_string_lossy().into());
|
||||
}
|
||||
std::io::ErrorKind::InvalidInput => {
|
||||
editable_error
|
||||
.no_permissions
|
||||
.insert(dir.to_string_lossy().into());
|
||||
}
|
||||
std::io::ErrorKind::NotFound => {
|
||||
editable_error.file_not_found.insert(failed.to_string());
|
||||
}
|
||||
std::io::ErrorKind::Interrupted => {
|
||||
let mut editable_error = walk_data.errors.lock().unwrap();
|
||||
editable_error.interrupted_error += 1;
|
||||
if editable_error.interrupted_error > 3 {
|
||||
panic!("Multiple Interrupted Errors occurred while scanning filesystem. Aborting");
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
editable_error.unknown_error.insert(failed.to_string());
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
mod tests {
|
||||
|
||||
#[allow(unused_imports)]
|
||||
|
||||
127
src/display.rs
127
src/display.rs
@@ -11,8 +11,13 @@ use stfu8::encode_u8;
|
||||
use chrono::{DateTime, Local, TimeZone, Utc};
|
||||
use std::cmp::max;
|
||||
use std::cmp::min;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::collections::VecDeque;
|
||||
use std::fs;
|
||||
use std::hash::Hash;
|
||||
use std::iter::repeat;
|
||||
use std::os::unix::ffi::OsStrExt;
|
||||
use std::path::Path;
|
||||
use thousands::Separable;
|
||||
|
||||
@@ -37,6 +42,7 @@ pub struct DisplayData {
|
||||
pub base_size: u64,
|
||||
pub longest_string_length: usize,
|
||||
pub ls_colors: LsColors,
|
||||
pub duplicate_names: HashMap<String, u32>,
|
||||
}
|
||||
|
||||
impl DisplayData {
|
||||
@@ -134,16 +140,11 @@ pub fn draw_it(
|
||||
root_node: &DisplayNode,
|
||||
skip_total: bool,
|
||||
) {
|
||||
let biggest = match skip_total {
|
||||
false => root_node,
|
||||
true => root_node
|
||||
.get_children_from_node(false)
|
||||
.next()
|
||||
.unwrap_or(root_node),
|
||||
};
|
||||
|
||||
let duplicate_names = check_for_dup_names(&root_node);
|
||||
|
||||
let num_chars_needed_on_left_most = if idd.by_filecount {
|
||||
let max_size = biggest.size;
|
||||
let max_size = root_node.size;
|
||||
max_size.separate_with_commas().chars().count()
|
||||
} else if idd.by_filetime.is_some() {
|
||||
FILETIME_SHOW_LENGTH
|
||||
@@ -156,10 +157,12 @@ pub fn draw_it(
|
||||
"Not enough terminal width"
|
||||
);
|
||||
|
||||
// let duplicate_dir_names = find_duplicate_names(root_node, idd.short_paths);
|
||||
|
||||
let allowed_width = terminal_width - num_chars_needed_on_left_most - 2;
|
||||
let num_indent_chars = 3;
|
||||
let longest_string_length =
|
||||
find_longest_dir_name(root_node, num_indent_chars, allowed_width, &idd);
|
||||
find_longest_dir_name(root_node, num_indent_chars, allowed_width, &idd, &duplicate_names);
|
||||
|
||||
let max_bar_length = if no_percent_bars || longest_string_length + 7 >= allowed_width {
|
||||
0
|
||||
@@ -172,9 +175,10 @@ pub fn draw_it(
|
||||
let display_data = DisplayData {
|
||||
initial: idd,
|
||||
num_chars_needed_on_left_most,
|
||||
base_size: biggest.size,
|
||||
base_size: root_node.size,
|
||||
longest_string_length,
|
||||
ls_colors: LsColors::from_env().unwrap_or_default(),
|
||||
duplicate_names
|
||||
};
|
||||
let draw_data = DrawData {
|
||||
indent: "".to_string(),
|
||||
@@ -195,6 +199,82 @@ pub fn draw_it(
|
||||
}
|
||||
}
|
||||
}
|
||||
fn check_for_dup_names(result:&DisplayNode) -> HashMap<String, u32> {
|
||||
let mut names = HashMap::new();
|
||||
let mut dup_names = HashMap::new();
|
||||
// let empty = HashSet::new();
|
||||
|
||||
let mut results = VecDeque::new();
|
||||
results.push_back((result, 0));
|
||||
|
||||
while results.len() > 0 {
|
||||
let (current, level) = results.pop_front().unwrap();
|
||||
|
||||
let mut folders = current.name.iter().rev();
|
||||
let mut s = String::new();
|
||||
|
||||
// Look at parent folder names - if they differ and we are printing them
|
||||
// we dont need the helper
|
||||
for _ in 0..level {
|
||||
s.push_str( &encode_u8(folders.next().unwrap().as_bytes()));
|
||||
}
|
||||
|
||||
if names.contains_key(&s){
|
||||
// TODO: compare s with names[s]
|
||||
// and walk back until you find a difference.
|
||||
dup_names.insert(s, level);
|
||||
} else {
|
||||
names.insert(s, vec![¤t.name]);
|
||||
}
|
||||
|
||||
current.children.iter().for_each(|node| {results.push_back((&node, level+1));});
|
||||
}
|
||||
println!("{:?}", names);
|
||||
println!("{:?}", dup_names);
|
||||
dup_names
|
||||
}
|
||||
|
||||
|
||||
pub fn get_printable_name(node: &DisplayNode, short_paths: bool, dup_names: &HashMap<String, u32>) -> String {
|
||||
let dir_name = &node.name;
|
||||
let printable_name = {
|
||||
if short_paths {
|
||||
match dir_name.parent() {
|
||||
Some(prefix) => match dir_name.strip_prefix(prefix) {
|
||||
Ok(base) => base,
|
||||
Err(_) => dir_name,
|
||||
},
|
||||
None => dir_name,
|
||||
}
|
||||
} else {
|
||||
dir_name
|
||||
}
|
||||
};
|
||||
let core = encode_u8(printable_name.display().to_string().as_bytes());
|
||||
|
||||
if dup_names.contains_key(&core) {
|
||||
let level = dup_names[&core];
|
||||
|
||||
let mut folders = node.name.iter().rev();
|
||||
folders.next();
|
||||
let mut extra = VecDeque::new();
|
||||
for _ in (0..level){
|
||||
extra.push_back( encode_u8(folders.next().unwrap().as_bytes()) );
|
||||
}
|
||||
let h = extra.iter().fold(String::new(), |acc, entry| {
|
||||
acc + entry
|
||||
});
|
||||
// let helper = extra.make_contiguous().iter().collect::<Vec<&String>>();
|
||||
// let h = helper.join("/");
|
||||
|
||||
// let mut folders = dir_name.iter().rev(); //.next().next().unwrap();
|
||||
// folders.next();
|
||||
// let par = encode_u8(folders.next().unwrap().as_bytes());
|
||||
format!("{core} ({h})")
|
||||
} else {
|
||||
core
|
||||
}
|
||||
}
|
||||
|
||||
fn find_biggest_size_str(node: &DisplayNode, output_format: &str) -> usize {
|
||||
let mut mx = human_readable_number(node.size, output_format)
|
||||
@@ -211,8 +291,9 @@ fn find_longest_dir_name(
|
||||
indent: usize,
|
||||
terminal: usize,
|
||||
idd: &InitialDisplayData,
|
||||
dup_names: &HashMap<String, u32>,
|
||||
) -> usize {
|
||||
let printable_name = get_printable_name(&node.name, idd.short_paths);
|
||||
let printable_name = get_printable_name(&node, idd.short_paths, dup_names);
|
||||
|
||||
let longest = if idd.is_screen_reader {
|
||||
UnicodeWidthStr::width(&*printable_name) + 1
|
||||
@@ -226,7 +307,7 @@ fn find_longest_dir_name(
|
||||
// each none root tree drawing is 2 more chars, hence we increment indent by 2
|
||||
node.children
|
||||
.iter()
|
||||
.map(|c| find_longest_dir_name(c, indent + 2, terminal, idd))
|
||||
.map(|c| find_longest_dir_name(c, indent + 2, terminal, idd, dup_names))
|
||||
.fold(longest, max)
|
||||
}
|
||||
|
||||
@@ -281,26 +362,8 @@ fn clean_indentation_string(s: &str) -> String {
|
||||
is
|
||||
}
|
||||
|
||||
fn get_printable_name<P: AsRef<Path>>(dir_name: &P, short_paths: bool) -> String {
|
||||
let dir_name = dir_name.as_ref();
|
||||
let printable_name = {
|
||||
if short_paths {
|
||||
match dir_name.parent() {
|
||||
Some(prefix) => match dir_name.strip_prefix(prefix) {
|
||||
Ok(base) => base,
|
||||
Err(_) => dir_name,
|
||||
},
|
||||
None => dir_name,
|
||||
}
|
||||
} else {
|
||||
dir_name
|
||||
}
|
||||
};
|
||||
encode_u8(printable_name.display().to_string().as_bytes())
|
||||
}
|
||||
|
||||
fn pad_or_trim_filename(node: &DisplayNode, indent: &str, display_data: &DisplayData) -> String {
|
||||
let name = get_printable_name(&node.name, display_data.initial.short_paths);
|
||||
let name = get_printable_name(&node, display_data.initial.short_paths, &display_data.duplicate_names);
|
||||
let indent_and_name = format!("{indent} {name}");
|
||||
let width = UnicodeWidthStr::width(&*indent_and_name);
|
||||
|
||||
@@ -375,7 +438,7 @@ fn get_name_percent(
|
||||
let name_and_padding = pad_or_trim_filename(node, indent, display_data);
|
||||
(percents, name_and_padding)
|
||||
} else {
|
||||
let n = get_printable_name(&node.name, display_data.initial.short_paths);
|
||||
let n = get_printable_name(&node, display_data.initial.short_paths, &display_data.duplicate_names);
|
||||
let name = maybe_trim_filename(n, indent, display_data);
|
||||
("".into(), name)
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ use crate::node::FileTime;
|
||||
use crate::node::Node;
|
||||
use std::collections::BinaryHeap;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
@@ -16,9 +17,10 @@ pub struct AggregateData {
|
||||
}
|
||||
|
||||
pub fn get_biggest(
|
||||
top_level_nodes: Vec<Node>,
|
||||
mut top_level_nodes: Vec<Node>,
|
||||
display_data: AggregateData,
|
||||
by_filetime: &Option<FileTime>,
|
||||
keep_collapsed: HashSet<PathBuf>,
|
||||
) -> Option<DisplayNode> {
|
||||
if top_level_nodes.is_empty() {
|
||||
// perhaps change this, bring back Error object?
|
||||
@@ -38,6 +40,7 @@ pub fn get_biggest(
|
||||
} else {
|
||||
top_level_nodes.iter().map(|node| node.size).sum()
|
||||
};
|
||||
|
||||
root = Node {
|
||||
name: PathBuf::from("(total)"),
|
||||
size,
|
||||
@@ -45,6 +48,7 @@ pub fn get_biggest(
|
||||
inode_device: None,
|
||||
depth: 0,
|
||||
};
|
||||
|
||||
// Always include the base nodes if we add a 'parent' (total) node
|
||||
heap = always_add_children(&display_data, &root, heap);
|
||||
} else {
|
||||
@@ -52,13 +56,20 @@ pub fn get_biggest(
|
||||
heap = add_children(&display_data, &root, heap);
|
||||
}
|
||||
|
||||
Some(fill_remaining_lines(heap, &root, display_data))
|
||||
let result = fill_remaining_lines(
|
||||
heap,
|
||||
&root,
|
||||
display_data,
|
||||
keep_collapsed,
|
||||
);
|
||||
Some(result)
|
||||
}
|
||||
|
||||
pub fn fill_remaining_lines<'a>(
|
||||
mut heap: BinaryHeap<&'a Node>,
|
||||
root: &'a Node,
|
||||
display_data: AggregateData,
|
||||
keep_collapsed: HashSet<PathBuf>,
|
||||
) -> DisplayNode {
|
||||
let mut allowed_nodes = HashMap::new();
|
||||
|
||||
@@ -69,7 +80,9 @@ pub fn fill_remaining_lines<'a>(
|
||||
if !display_data.only_file || line.children.is_empty() {
|
||||
allowed_nodes.insert(line.name.as_path(), line);
|
||||
}
|
||||
heap = add_children(&display_data, line, heap);
|
||||
if !keep_collapsed.contains(&line.name) {
|
||||
heap = add_children(&display_data, line, heap);
|
||||
}
|
||||
}
|
||||
None => break,
|
||||
}
|
||||
@@ -145,8 +158,9 @@ fn flat_rebuilder(allowed_nodes: HashMap<&Path, &Node>, current: &Node) -> Displ
|
||||
|
||||
fn build_display_node(mut new_children: Vec<DisplayNode>, current: &Node) -> DisplayNode {
|
||||
new_children.sort_by(|lhs, rhs| lhs.cmp(rhs).reverse());
|
||||
// println!("{:?}", current.name);
|
||||
DisplayNode {
|
||||
name: current.name.clone(),
|
||||
name: PathBuf::from(current.name.display().to_string()),
|
||||
size: current.size,
|
||||
children: new_children,
|
||||
}
|
||||
|
||||
15
src/main.rs
15
src/main.rs
@@ -244,6 +244,19 @@ fn main() {
|
||||
indicator.spawn(output_format.clone())
|
||||
}
|
||||
|
||||
let keep_collapsed: HashSet<PathBuf> = match options.get_many::<String>("collapse") {
|
||||
Some(collapse) => {
|
||||
let mut combined_dirs = HashSet::new();
|
||||
for collapse_dir in collapse {
|
||||
for target_dir in target_dirs.iter() {
|
||||
combined_dirs.insert(PathBuf::from(target_dir).join(collapse_dir));
|
||||
}
|
||||
}
|
||||
combined_dirs
|
||||
}
|
||||
None => HashSet::new(),
|
||||
};
|
||||
|
||||
let filter_modified_time = config.get_modified_time_operator(&options);
|
||||
let filter_accessed_time = config.get_accessed_time_operator(&options);
|
||||
let filter_changed_time = config.get_changed_time_operator(&options);
|
||||
@@ -281,7 +294,7 @@ fn main() {
|
||||
depth,
|
||||
using_a_filter: !filter_regexs.is_empty() || !invert_filter_regexs.is_empty(),
|
||||
};
|
||||
get_biggest(top_level_nodes, agg_data, &by_filetime)
|
||||
get_biggest(top_level_nodes, agg_data, &by_filetime, keep_collapsed)
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -78,6 +78,7 @@ pub struct RuntimeErrors {
|
||||
pub no_permissions: HashSet<String>,
|
||||
pub file_not_found: HashSet<String>,
|
||||
pub unknown_error: HashSet<String>,
|
||||
pub interrupted_error: i32,
|
||||
pub abort: bool,
|
||||
}
|
||||
|
||||
|
||||
0
tests/test_dir_matching/andy/dup_name/hello
Normal file
0
tests/test_dir_matching/andy/dup_name/hello
Normal file
0
tests/test_dir_matching/dave/dup_name/hello
Normal file
0
tests/test_dir_matching/dave/dup_name/hello
Normal file
@@ -254,3 +254,10 @@ pub fn test_force_color() {
|
||||
assert!(output.contains("\x1B[31m"));
|
||||
assert!(output.contains("\x1B[0m"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_collapse() {
|
||||
let output = build_command(vec!["--collapse", "many", "tests/test_dir/"]);
|
||||
assert!(output.contains("many"));
|
||||
assert!(!output.contains("hello_file"));
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user