Fix parallel archive creation, add archive streaming functions
This commit is contained in:
parent
797aa16a0d
commit
9ed4d503d1
51
src/lib.rs
51
src/lib.rs
@ -1,7 +1,10 @@
|
||||
#![doc = include_str!("../README.md")]
|
||||
use std::{collections::HashMap, fs, io::{BufWriter, Write}, sync::Mutex};
|
||||
#[cfg(feature = "parallel")]
|
||||
use rayon::prelude::{IntoParallelRefIterator, ParallelIterator};
|
||||
use std::{collections::HashMap, fs, io::BufWriter, sync::Mutex};
|
||||
use {
|
||||
rayon::prelude::{IntoParallelRefIterator, ParallelIterator},
|
||||
std::ops::DerefMut,
|
||||
};
|
||||
|
||||
mod checksum;
|
||||
mod error;
|
||||
@ -22,39 +25,55 @@ pub use {
|
||||
stream::Stream,
|
||||
};
|
||||
|
||||
/// Creates a haggis archive from a list of files
|
||||
pub fn create_archive(path: &str, files: Vec<&str>, algorithm: Algorithm) -> Result<(), Error> {
|
||||
let fd = fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.truncate(true)
|
||||
.open(path)?;
|
||||
let mut writer = BufWriter::new(fd);
|
||||
stream_archive(&mut writer, files, algorithm)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Streams a haggis archive over something which implements `Write`
|
||||
pub fn stream_archive<W: Write>(
|
||||
mut writer: W,
|
||||
files: Vec<&str>,
|
||||
algorithm: Algorithm,
|
||||
) -> Result<(), Error> {
|
||||
let links = Mutex::new(HashMap::new());
|
||||
for f in files.iter() {
|
||||
for f in &files {
|
||||
let node = Node::from_path(f, algorithm, &links)?;
|
||||
node.write(&mut writer)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Creates a Haggis archive from a list of files, processing each file in parallel
|
||||
#[cfg(feature = "parallel")]
|
||||
pub fn par_create_archive(
|
||||
path: &str,
|
||||
files: Vec<String>,
|
||||
pub fn par_create_archive(path: &str, files: Vec<&str>, algorithm: Algorithm) -> Result<(), Error> {
|
||||
let fd = fs::File::create(path)?;
|
||||
let writer = BufWriter::new(fd);
|
||||
par_stream_archive(writer, files, algorithm)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Streams a Haggis archive from a list of files, processing each file in parallel
|
||||
#[cfg(feature = "parallel")]
|
||||
pub fn par_stream_archive<W: Write + Send>(
|
||||
writer: W,
|
||||
files: Vec<&str>,
|
||||
algorithm: Algorithm,
|
||||
) -> Result<(), Error> {
|
||||
let links = Mutex::new(HashMap::<u64, String>::new());
|
||||
{
|
||||
let _fd = fs::File::create(path)?;
|
||||
}
|
||||
let writer = Mutex::new(writer);
|
||||
files.par_iter().try_for_each(|f| {
|
||||
let node = Node::from_path(f, algorithm, &links)?;
|
||||
let fd = fs::OpenOptions::new()
|
||||
.create(false)
|
||||
.truncate(false)
|
||||
.append(true)
|
||||
.open(path)?;
|
||||
let mut writer = BufWriter::new(fd);
|
||||
node.write(&mut writer)?;
|
||||
if let Ok(mut writer) = writer.lock() {
|
||||
let mut writer = writer.deref_mut();
|
||||
node.write(&mut writer)?;
|
||||
}
|
||||
Ok::<(), Error>(())
|
||||
})?;
|
||||
Ok(())
|
||||
|
Loading…
Reference in New Issue
Block a user