Encoding - columns are specified per byte of input
This commit is contained in:
parent
29172fb5f9
commit
53bbe41856
1 changed files with 12 additions and 8 deletions
|
@ -52,7 +52,13 @@ impl<R: Read, W: Write> Encoder<R, W> {
|
|||
break;
|
||||
}
|
||||
if n_bytes == 0 {
|
||||
if let Some(w) = self.wrap {
|
||||
if total % w != 0 {
|
||||
writeln!(self.writer)?;
|
||||
}
|
||||
} else {
|
||||
writeln!(self.writer)?;
|
||||
}
|
||||
break;
|
||||
}
|
||||
let mut idx = usize::from(ibuf[0] & 0b1111);
|
||||
|
@ -62,36 +68,34 @@ impl<R: Read, W: Write> Encoder<R, W> {
|
|||
match self.style {
|
||||
Style::Plain => {
|
||||
if let Some(w) = self.wrap {
|
||||
if total > 0 && total % w < 2 {
|
||||
if total > 0 && total % w == 0 {
|
||||
writeln!(self.writer)?;
|
||||
}
|
||||
}
|
||||
self.writer.write_all(&obuf)?;
|
||||
total += 2;
|
||||
}
|
||||
Style::Spaces => {
|
||||
if let Some(w) = self.wrap {
|
||||
if total % w < 3 {
|
||||
if total > 0 && total % w == 0 {
|
||||
writeln!(self.writer)?;
|
||||
} else if total > 0 {
|
||||
write!(self.writer, " ")?;
|
||||
}
|
||||
}
|
||||
self.writer.write_all(&obuf)?;
|
||||
total += 3;
|
||||
}
|
||||
Style::SpacesWithHex => {
|
||||
if let Some(w) = self.wrap {
|
||||
if total % w < 5 {
|
||||
if total > 0 && total % w == 0 {
|
||||
writeln!(self.writer)?;
|
||||
} else if total > 0 {
|
||||
write!(self.writer, " ")?;
|
||||
}
|
||||
}
|
||||
write!(self.writer, "0x{}{}", obuf[0], obuf[1])?;
|
||||
total += 5;
|
||||
}
|
||||
}
|
||||
total += 1;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -124,7 +128,7 @@ mod tests {
|
|||
let infile = File::open("testdata/lorem.txt").unwrap();
|
||||
let reader = BufReader::new(infile);
|
||||
let outfile = fs::read_to_string("testdata/lorem_b16.txt").unwrap();
|
||||
let mut encoder = Encoder::new(reader, vec![], None, Some(76));
|
||||
let mut encoder = Encoder::new(reader, vec![], None, Some(38));
|
||||
encoder.encode().unwrap();
|
||||
assert_eq!(encoder.writer, outfile.as_bytes());
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue