Add ability to display total sizes when displaying listings or

extracting archives
This commit is contained in:
Nathan Fisher 2024-02-24 00:29:23 -05:00
parent 44e2741727
commit 3e9a34af7e
4 changed files with 79 additions and 6 deletions

View File

@ -53,6 +53,11 @@ pub fn extract() -> Command {
.long("gid")
.value_parser(clap::value_parser!(u32))
.num_args(1),
Arg::new("total")
.help("Display the total size of extracted files")
.short('t')
.long("total")
.action(ArgAction::SetTrue),
Arg::new("archive")
.num_args(1)
.required_unless_present("stdin")
@ -154,6 +159,11 @@ pub fn list() -> Command {
.short('n')
.long("no-sort")
.action(ArgAction::SetTrue),
Arg::new("total")
.help("Display the total size if extracted")
.short('t')
.long("total")
.action(ArgAction::SetTrue),
Arg::new("archive")
.num_args(1)
.required(true)

View File

@ -1,14 +1,17 @@
#![warn(clippy::all, clippy::pedantic)]
use {
clap::ArgMatches,
haggis::{Algorithm, Listing, ListingKind, ListingStream, NodeStream, Message, StreamMessage},
haggis::{
Algorithm, HumanSize, Listing, ListingKind, ListingStream, Message, NodeStream,
StreamMessage,
},
indicatif::{ProgressBar, ProgressStyle},
std::{
fs::{self, File},
io::{self, BufReader, BufWriter},
os::fd::{AsRawFd, FromRawFd},
process,
sync::mpsc,
sync::{Arc, atomic::{AtomicU64, Ordering}, mpsc},
thread,
},
walkdir::WalkDir,
@ -153,6 +156,7 @@ fn create(matches: &ArgMatches) -> Result<(), haggis::Error> {
#[allow(clippy::similar_names)]
fn extract(matches: &ArgMatches) -> Result<(), haggis::Error> {
let total = Arc::new(AtomicU64::new(0));
let file = matches.get_one::<String>("archive");
let uid = matches.get_one::<u32>("uid").copied();
let gid = matches.get_one::<u32>("gid").copied();
@ -174,12 +178,14 @@ fn extract(matches: &ArgMatches) -> Result<(), haggis::Error> {
let dir = matches.get_one::<String>("change");
let (sender, receiver) = mpsc::channel();
let file = file.cloned().unwrap_or("stdin".to_string());
let total_ref = total.clone();
let handle = if zst {
let reader = Decoder::new(fd)?;
let mut stream = NodeStream::new(reader)?;
let handle = if matches.get_flag("quiet") {
Some(thread::spawn(move || {
progress(&file, &receiver, u64::from(stream.length));
let t = progress(&file, &receiver, u64::from(stream.length));
total_ref.store(t, Ordering::Relaxed);
Ok::<(), haggis::Error>(())
}))
} else {
@ -192,7 +198,8 @@ fn extract(matches: &ArgMatches) -> Result<(), haggis::Error> {
let mut stream = NodeStream::new(reader)?;
let handle = if matches.get_flag("quiet") {
Some(thread::spawn(move || {
progress(&file, &receiver, u64::from(stream.length));
let t = progress(&file, &receiver, u64::from(stream.length));
total_ref.store(t, Ordering::Relaxed);
Ok::<(), haggis::Error>(())
}))
} else {
@ -204,7 +211,9 @@ fn extract(matches: &ArgMatches) -> Result<(), haggis::Error> {
if let Some(handle) = handle {
match handle.join() {
Ok(_) => {
if matches.get_flag("quiet") {
if matches.get_flag("total") {
println!("{} extracted", HumanSize::from(total.load(Ordering::Relaxed)));
} else if matches.get_flag("quiet") {
println!("Archive extracted successfully");
}
Ok(())
@ -219,7 +228,8 @@ fn extract(matches: &ArgMatches) -> Result<(), haggis::Error> {
}
}
fn progress(file: &str, receiver: &mpsc::Receiver<StreamMessage>, len: u64) {
fn progress(file: &str, receiver: &mpsc::Receiver<StreamMessage>, len: u64) -> u64 {
let mut total: u64 = 0;
let pb = ProgressBar::new(len);
pb.set_style(ProgressStyle::with_template(TEMPLATE).unwrap());
pb.set_prefix("Extracting files");
@ -230,6 +240,7 @@ fn progress(file: &str, receiver: &mpsc::Receiver<StreamMessage>, len: u64) {
let name = name.split('/').last().unwrap();
pb.set_prefix(format!("{name} extracted, {size} bytes"));
pb.inc(1);
total += size;
}
StreamMessage::LinkCreated { name, target } => {
let name = name.split('/').last().unwrap();
@ -256,6 +267,7 @@ fn progress(file: &str, receiver: &mpsc::Receiver<StreamMessage>, len: u64) {
}
}
}
return total;
}
fn print_listing(li: &Listing, matches: &ArgMatches) -> Result<(), haggis::Error> {
@ -299,6 +311,7 @@ fn list_unsorted(matches: &ArgMatches) -> Result<(), haggis::Error> {
}
fn list(matches: &ArgMatches) -> Result<(), haggis::Error> {
let mut total: u64 = 0;
let file = matches.get_one::<String>("archive").unwrap();
let mut fd = File::open(file)?;
let zst = matches.get_flag("zstd") || haggis::detect_zstd(&mut fd)?;
@ -320,6 +333,14 @@ fn list(matches: &ArgMatches) -> Result<(), haggis::Error> {
};
for li in list {
print_listing(&li, matches)?;
if matches.get_flag("total") {
if let ListingKind::Normal(s) = li.kind {
total += s;
}
}
}
if matches.get_flag("total") {
println!("Total: {}", HumanSize::from(total));
}
Ok(())
}

40
src/humansize.rs Normal file
View File

@ -0,0 +1,40 @@
use std::fmt;
const KILOS: f64 = 1024.0;
const MEGAS: f64 = KILOS * 1024.0;
const GIGAS: f64 = MEGAS * 1024.0;
const TERAS: f64 = GIGAS * 1024.0;
#[derive(Clone, Copy, Debug)]
pub enum HumanSize {
Bytes(u64),
KiloBytes(f64),
MegaBytes(f64),
GigaBytes(f64),
TeraBytes(f64),
}
impl fmt::Display for HumanSize {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Bytes(b) => write!(f, "{b}"),
Self::KiloBytes(k) => write!(f, "{k:.1}K"),
Self::MegaBytes(m) => write!(f, "{m:.1}M"),
Self::GigaBytes(g) => write!(f, "{g:.1}G"),
Self::TeraBytes(t) => write!(f, "{t:.1}T"),
}
}
}
#[allow(clippy::cast_precision_loss)]
impl From<u64> for HumanSize {
fn from(value: u64) -> Self {
match value {
n if n as f64 > TERAS => Self::GigaBytes(n as f64 / TERAS),
n if n as f64 > GIGAS => Self::GigaBytes(n as f64 / GIGAS),
n if n as f64 > MEGAS => Self::MegaBytes(n as f64 / MEGAS),
n if n as f64 > KILOS => Self::KiloBytes(n as f64 / KILOS),
_ => Self::Bytes(value),
}
}
}

View File

@ -16,6 +16,7 @@ mod checksum;
mod error;
mod file;
mod filetype;
mod humansize;
mod listing;
mod listing_stream;
pub(crate) mod nix;
@ -28,6 +29,7 @@ pub use {
error::Error,
file::File,
filetype::FileType,
humansize::HumanSize,
listing::Kind as ListingKind,
listing::Listing,
listing_stream::ListingStream,