Add ability to display total sizes when displaying listings or

extracting archives
This commit is contained in:
Nathan Fisher 2024-02-24 00:29:23 -05:00
parent 44e2741727
commit 3e9a34af7e
4 changed files with 79 additions and 6 deletions

View File

@ -53,6 +53,11 @@ pub fn extract() -> Command {
.long("gid") .long("gid")
.value_parser(clap::value_parser!(u32)) .value_parser(clap::value_parser!(u32))
.num_args(1), .num_args(1),
Arg::new("total")
.help("Display the total size of extracted files")
.short('t')
.long("total")
.action(ArgAction::SetTrue),
Arg::new("archive") Arg::new("archive")
.num_args(1) .num_args(1)
.required_unless_present("stdin") .required_unless_present("stdin")
@ -154,6 +159,11 @@ pub fn list() -> Command {
.short('n') .short('n')
.long("no-sort") .long("no-sort")
.action(ArgAction::SetTrue), .action(ArgAction::SetTrue),
Arg::new("total")
.help("Display the total size if extracted")
.short('t')
.long("total")
.action(ArgAction::SetTrue),
Arg::new("archive") Arg::new("archive")
.num_args(1) .num_args(1)
.required(true) .required(true)

View File

@ -1,14 +1,17 @@
#![warn(clippy::all, clippy::pedantic)] #![warn(clippy::all, clippy::pedantic)]
use { use {
clap::ArgMatches, clap::ArgMatches,
haggis::{Algorithm, Listing, ListingKind, ListingStream, NodeStream, Message, StreamMessage}, haggis::{
Algorithm, HumanSize, Listing, ListingKind, ListingStream, Message, NodeStream,
StreamMessage,
},
indicatif::{ProgressBar, ProgressStyle}, indicatif::{ProgressBar, ProgressStyle},
std::{ std::{
fs::{self, File}, fs::{self, File},
io::{self, BufReader, BufWriter}, io::{self, BufReader, BufWriter},
os::fd::{AsRawFd, FromRawFd}, os::fd::{AsRawFd, FromRawFd},
process, process,
sync::mpsc, sync::{Arc, atomic::{AtomicU64, Ordering}, mpsc},
thread, thread,
}, },
walkdir::WalkDir, walkdir::WalkDir,
@ -153,6 +156,7 @@ fn create(matches: &ArgMatches) -> Result<(), haggis::Error> {
#[allow(clippy::similar_names)] #[allow(clippy::similar_names)]
fn extract(matches: &ArgMatches) -> Result<(), haggis::Error> { fn extract(matches: &ArgMatches) -> Result<(), haggis::Error> {
let total = Arc::new(AtomicU64::new(0));
let file = matches.get_one::<String>("archive"); let file = matches.get_one::<String>("archive");
let uid = matches.get_one::<u32>("uid").copied(); let uid = matches.get_one::<u32>("uid").copied();
let gid = matches.get_one::<u32>("gid").copied(); let gid = matches.get_one::<u32>("gid").copied();
@ -174,12 +178,14 @@ fn extract(matches: &ArgMatches) -> Result<(), haggis::Error> {
let dir = matches.get_one::<String>("change"); let dir = matches.get_one::<String>("change");
let (sender, receiver) = mpsc::channel(); let (sender, receiver) = mpsc::channel();
let file = file.cloned().unwrap_or("stdin".to_string()); let file = file.cloned().unwrap_or("stdin".to_string());
let total_ref = total.clone();
let handle = if zst { let handle = if zst {
let reader = Decoder::new(fd)?; let reader = Decoder::new(fd)?;
let mut stream = NodeStream::new(reader)?; let mut stream = NodeStream::new(reader)?;
let handle = if matches.get_flag("quiet") { let handle = if matches.get_flag("quiet") {
Some(thread::spawn(move || { Some(thread::spawn(move || {
progress(&file, &receiver, u64::from(stream.length)); let t = progress(&file, &receiver, u64::from(stream.length));
total_ref.store(t, Ordering::Relaxed);
Ok::<(), haggis::Error>(()) Ok::<(), haggis::Error>(())
})) }))
} else { } else {
@ -192,7 +198,8 @@ fn extract(matches: &ArgMatches) -> Result<(), haggis::Error> {
let mut stream = NodeStream::new(reader)?; let mut stream = NodeStream::new(reader)?;
let handle = if matches.get_flag("quiet") { let handle = if matches.get_flag("quiet") {
Some(thread::spawn(move || { Some(thread::spawn(move || {
progress(&file, &receiver, u64::from(stream.length)); let t = progress(&file, &receiver, u64::from(stream.length));
total_ref.store(t, Ordering::Relaxed);
Ok::<(), haggis::Error>(()) Ok::<(), haggis::Error>(())
})) }))
} else { } else {
@ -204,7 +211,9 @@ fn extract(matches: &ArgMatches) -> Result<(), haggis::Error> {
if let Some(handle) = handle { if let Some(handle) = handle {
match handle.join() { match handle.join() {
Ok(_) => { Ok(_) => {
if matches.get_flag("quiet") { if matches.get_flag("total") {
println!("{} extracted", HumanSize::from(total.load(Ordering::Relaxed)));
} else if matches.get_flag("quiet") {
println!("Archive extracted successfully"); println!("Archive extracted successfully");
} }
Ok(()) Ok(())
@ -219,7 +228,8 @@ fn extract(matches: &ArgMatches) -> Result<(), haggis::Error> {
} }
} }
fn progress(file: &str, receiver: &mpsc::Receiver<StreamMessage>, len: u64) { fn progress(file: &str, receiver: &mpsc::Receiver<StreamMessage>, len: u64) -> u64 {
let mut total: u64 = 0;
let pb = ProgressBar::new(len); let pb = ProgressBar::new(len);
pb.set_style(ProgressStyle::with_template(TEMPLATE).unwrap()); pb.set_style(ProgressStyle::with_template(TEMPLATE).unwrap());
pb.set_prefix("Extracting files"); pb.set_prefix("Extracting files");
@ -230,6 +240,7 @@ fn progress(file: &str, receiver: &mpsc::Receiver<StreamMessage>, len: u64) {
let name = name.split('/').last().unwrap(); let name = name.split('/').last().unwrap();
pb.set_prefix(format!("{name} extracted, {size} bytes")); pb.set_prefix(format!("{name} extracted, {size} bytes"));
pb.inc(1); pb.inc(1);
total += size;
} }
StreamMessage::LinkCreated { name, target } => { StreamMessage::LinkCreated { name, target } => {
let name = name.split('/').last().unwrap(); let name = name.split('/').last().unwrap();
@ -256,6 +267,7 @@ fn progress(file: &str, receiver: &mpsc::Receiver<StreamMessage>, len: u64) {
} }
} }
} }
return total;
} }
fn print_listing(li: &Listing, matches: &ArgMatches) -> Result<(), haggis::Error> { fn print_listing(li: &Listing, matches: &ArgMatches) -> Result<(), haggis::Error> {
@ -299,6 +311,7 @@ fn list_unsorted(matches: &ArgMatches) -> Result<(), haggis::Error> {
} }
fn list(matches: &ArgMatches) -> Result<(), haggis::Error> { fn list(matches: &ArgMatches) -> Result<(), haggis::Error> {
let mut total: u64 = 0;
let file = matches.get_one::<String>("archive").unwrap(); let file = matches.get_one::<String>("archive").unwrap();
let mut fd = File::open(file)?; let mut fd = File::open(file)?;
let zst = matches.get_flag("zstd") || haggis::detect_zstd(&mut fd)?; let zst = matches.get_flag("zstd") || haggis::detect_zstd(&mut fd)?;
@ -320,6 +333,14 @@ fn list(matches: &ArgMatches) -> Result<(), haggis::Error> {
}; };
for li in list { for li in list {
print_listing(&li, matches)?; print_listing(&li, matches)?;
if matches.get_flag("total") {
if let ListingKind::Normal(s) = li.kind {
total += s;
}
}
}
if matches.get_flag("total") {
println!("Total: {}", HumanSize::from(total));
} }
Ok(()) Ok(())
} }

40
src/humansize.rs Normal file
View File

@ -0,0 +1,40 @@
use std::fmt;
const KILOS: f64 = 1024.0;
const MEGAS: f64 = KILOS * 1024.0;
const GIGAS: f64 = MEGAS * 1024.0;
const TERAS: f64 = GIGAS * 1024.0;
#[derive(Clone, Copy, Debug)]
pub enum HumanSize {
Bytes(u64),
KiloBytes(f64),
MegaBytes(f64),
GigaBytes(f64),
TeraBytes(f64),
}
impl fmt::Display for HumanSize {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Bytes(b) => write!(f, "{b}"),
Self::KiloBytes(k) => write!(f, "{k:.1}K"),
Self::MegaBytes(m) => write!(f, "{m:.1}M"),
Self::GigaBytes(g) => write!(f, "{g:.1}G"),
Self::TeraBytes(t) => write!(f, "{t:.1}T"),
}
}
}
#[allow(clippy::cast_precision_loss)]
impl From<u64> for HumanSize {
fn from(value: u64) -> Self {
match value {
n if n as f64 > TERAS => Self::GigaBytes(n as f64 / TERAS),
n if n as f64 > GIGAS => Self::GigaBytes(n as f64 / GIGAS),
n if n as f64 > MEGAS => Self::MegaBytes(n as f64 / MEGAS),
n if n as f64 > KILOS => Self::KiloBytes(n as f64 / KILOS),
_ => Self::Bytes(value),
}
}
}

View File

@ -16,6 +16,7 @@ mod checksum;
mod error; mod error;
mod file; mod file;
mod filetype; mod filetype;
mod humansize;
mod listing; mod listing;
mod listing_stream; mod listing_stream;
pub(crate) mod nix; pub(crate) mod nix;
@ -28,6 +29,7 @@ pub use {
error::Error, error::Error,
file::File, file::File,
filetype::FileType, filetype::FileType,
humansize::HumanSize,
listing::Kind as ListingKind, listing::Kind as ListingKind,
listing::Listing, listing::Listing,
listing_stream::ListingStream, listing_stream::ListingStream,