Skip to content

Commit

Permalink
Merge #31
Browse files Browse the repository at this point in the history
31: Gate logging behind an opt-in feature. r=gendx a=gendx

### Pull Request Overview

This pull request fixes #18. There is a new optional feature, `enable_logging` to turn on logging. When turned off (now default), the `lzma_trace!` and similar macros are no-ops.

This also removes the need for the `log` dependency (and all of its transitive dependencies) when turned off.


### Benchmarks

```
$ cargo bench --features enable_logging
running 8 tests
test compress_65536                  ... bench:   4,330,374 ns/iter (+/- 79,850)
test compress_empty                  ... bench:       2,221 ns/iter (+/- 370)
test compress_hello                  ... bench:       3,217 ns/iter (+/- 225)
test decompress_after_compress_65536 ... bench:   5,835,689 ns/iter (+/- 96,176)
test decompress_after_compress_empty ... bench:      10,845 ns/iter (+/- 553)
test decompress_after_compress_hello ... bench:      12,425 ns/iter (+/- 689)
test decompress_big_file             ... bench:   9,750,036 ns/iter (+/- 460,925)
test decompress_huge_dict            ... bench:      12,619 ns/iter (+/- 424)

$ cargo bench
running 8 tests
test compress_65536                  ... bench:   2,377,692 ns/iter (+/- 15,781)
test compress_empty                  ... bench:       2,067 ns/iter (+/- 64)
test compress_hello                  ... bench:       2,733 ns/iter (+/- 358)
test decompress_after_compress_65536 ... bench:   3,815,474 ns/iter (+/- 72,097)
test decompress_after_compress_empty ... bench:      10,760 ns/iter (+/- 5,275)
test decompress_after_compress_hello ... bench:      11,865 ns/iter (+/- 75)
test decompress_big_file             ... bench:   7,416,332 ns/iter (+/- 40,416)
test decompress_huge_dict            ... bench:      12,082 ns/iter (+/- 238)
```

Co-authored-by: G. Endignoux <ggendx@gmail.com>
  • Loading branch information
bors[bot] and gendx authored Mar 5, 2020
2 parents 28034ef + 800c377 commit 3fcf0bb
Show file tree
Hide file tree
Showing 16 changed files with 155 additions and 61 deletions.
2 changes: 2 additions & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,4 +35,6 @@ script:
- cargo clippy -- -D warnings -W clippy::match-same-arms
- cargo clippy --tests -- -D warnings -W clippy::match-same-arms
- cargo build --verbose
- cargo build --verbose --features enable_logging
- cargo test --verbose
- cargo test --verbose --features enable_logging
7 changes: 4 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,11 @@ edition = "2018"
[dependencies]
byteorder = "^1.0.0"
crc = "^1.0.0"
log = "^0.4.0"
log = { version = "^0.4.0", optional = true }
env_logger = { version = "^0.6.0", optional = true }

[dev-dependencies]
env_logger = "^0.6.0"
[features]
enable_logging = ["env_logger", "log"]

[badges]
travis-ci = { repository = "gendx/lzma-rs", branch = "master" }
9 changes: 9 additions & 0 deletions benches/lzma.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
#![feature(test)]

#[cfg(feature = "enable_logging")]
extern crate env_logger;
extern crate lzma_rs;
extern crate test;
Expand Down Expand Up @@ -42,48 +43,56 @@ fn decompress_bench_file(compfile: &str, b: &mut Bencher) {

#[bench]
fn compress_empty(b: &mut Bencher) {
#[cfg(feature = "enable_logging")]
let _ = env_logger::try_init();
compress_bench(b"", b);
}

#[bench]
fn decompress_after_compress_empty(b: &mut Bencher) {
#[cfg(feature = "enable_logging")]
let _ = env_logger::try_init();
decompress_after_compress_bench(b"", b);
}

#[bench]
fn compress_hello(b: &mut Bencher) {
#[cfg(feature = "enable_logging")]
let _ = env_logger::try_init();
compress_bench(b"Hello world", b);
}

#[bench]
fn decompress_after_compress_hello(b: &mut Bencher) {
#[cfg(feature = "enable_logging")]
let _ = env_logger::try_init();
decompress_after_compress_bench(b"Hello world", b);
}

#[bench]
fn compress_65536(b: &mut Bencher) {
#[cfg(feature = "enable_logging")]
let _ = env_logger::try_init();
compress_bench(&[0; 0x10000], b);
}

#[bench]
fn decompress_after_compress_65536(b: &mut Bencher) {
#[cfg(feature = "enable_logging")]
let _ = env_logger::try_init();
decompress_after_compress_bench(&[0; 0x10000], b);
}

#[bench]
fn decompress_big_file(b: &mut Bencher) {
#[cfg(feature = "enable_logging")]
let _ = env_logger::try_init();
decompress_bench_file("tests/files/foo.txt.lzma", b);
}

#[bench]
fn decompress_huge_dict(b: &mut Bencher) {
#[cfg(feature = "enable_logging")]
let _ = env_logger::try_init();
let compressed: &[u8] = b"\x5d\x7f\x7f\x7f\x7f\xff\xff\xff\
\xff\xff\xff\xff\xff\x00\x24\x19\
Expand Down
6 changes: 3 additions & 3 deletions src/decode/lzbuffer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ where

// Fetch an LZ sequence (length, distance) from inside the buffer
fn append_lz(&mut self, len: usize, dist: usize) -> error::Result<()> {
debug!("LZ {{ len: {}, dist: {} }}", len, dist);
lzma_debug!("LZ {{ len: {}, dist: {} }}", len, dist);
let buf_len = self.buf.len();
if dist > buf_len {
return Err(error::Error::LZMAError(format!(
Expand Down Expand Up @@ -136,7 +136,7 @@ where
W: io::Write,
{
pub fn from_stream(stream: &'a mut W, dict_size: usize) -> Self {
info!("Dict size in LZ buffer: {}", dict_size);
lzma_info!("Dict size in LZ buffer: {}", dict_size);
Self {
stream,
buf: Vec::new(),
Expand Down Expand Up @@ -211,7 +211,7 @@ where

// Fetch an LZ sequence (length, distance) from inside the buffer
fn append_lz(&mut self, len: usize, dist: usize) -> error::Result<()> {
debug!("LZ {{ len: {}, dist: {} }}", len, dist);
lzma_debug!("LZ {{ len: {}, dist: {} }}", len, dist);
if dist > self.dict_size {
return Err(error::Error::LZMAError(format!(
"LZ distance {} is beyond dictionary size {}",
Expand Down
8 changes: 4 additions & 4 deletions src/decode/lzma.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ impl LZMAParams {
let lp: u32 = pb % 5;
pb /= 5;

info!("Properties {{ lc: {}, lp: {}, pb: {} }}", lc, lp, pb);
lzma_info!("Properties {{ lc: {}, lp: {}, pb: {} }}", lc, lp, pb);

// Dictionary
let dict_size_provided = input.read_u32::<LittleEndian>().or_else(|e| {
Expand All @@ -58,7 +58,7 @@ impl LZMAParams {
dict_size_provided
};

info!("Dict size: {}", dict_size);
lzma_info!("Dict size: {}", dict_size);

// Unpacked size
let unpacked_size: Option<u64> = match options.unpacked_size {
Expand All @@ -83,7 +83,7 @@ impl LZMAParams {
UnpackedSize::UseProvided(x) => x,
};

info!("Unpacked size: {:?}", unpacked_size);
lzma_info!("Unpacked size: {:?}", unpacked_size);

let params = LZMAParams {
lc,
Expand Down Expand Up @@ -241,7 +241,7 @@ where
&mut self.is_match[(self.state << 4) + pos_state],
)? {
let byte: u8 = self.decode_literal(rangecoder)?;
debug!("Literal: {}", byte);
lzma_debug!("Literal: {}", byte);
self.output.append_literal(byte)?;

self.state = if self.state < 4 {
Expand Down
13 changes: 7 additions & 6 deletions src/decode/lzma2.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,10 @@ where
)))
})?;

info!("LZMA2 status: {}", status);
lzma_info!("LZMA2 status: {}", status);

if status == 0 {
info!("LZMA2 end of input");
lzma_info!("LZMA2 end of input");
break;
} else if status == 1 {
// uncompressed reset dict
Expand Down Expand Up @@ -97,7 +97,7 @@ where
})?;
let packed_size = (packed_size as u64) + 1;

info!(
lzma_info!(
"LZMA2 compressed block {{ unpacked_size: {}, packed_size: {}, reset_dict: {}, reset_state: {}, reset_props: {} }}",
unpacked_size,
packed_size,
Expand Down Expand Up @@ -143,7 +143,7 @@ where
)));
}

info!("Properties {{ lc: {}, lp: {}, pb: {} }}", lc, lp, pb);
lzma_info!("Properties {{ lc: {}, lp: {}, pb: {} }}", lc, lp, pb);
} else {
lc = decoder.lc;
lp = decoder.lp;
Expand Down Expand Up @@ -182,9 +182,10 @@ where
})?;
let unpacked_size = (unpacked_size as usize) + 1;

info!(
lzma_info!(
"LZMA2 uncompressed block {{ unpacked_size: {}, reset_dict: {} }}",
unpacked_size, reset_dict
unpacked_size,
reset_dict
);

if reset_dict {
Expand Down
8 changes: 4 additions & 4 deletions src/decode/rangecoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ where
};
let _ = dec.stream.read_u8()?;
dec.code = dec.stream.read_u32::<BigEndian>()?;
debug!("0 {{ range: {:08x}, code: {:08x} }}", dec.range, dec.code);
lzma_debug!("0 {{ range: {:08x}, code: {:08x} }}", dec.range, dec.code);
Ok(dec)
}

Expand All @@ -35,12 +35,12 @@ where

#[inline]
fn normalize(&mut self) -> io::Result<()> {
trace!(" {{ range: {:08x}, code: {:08x} }}", self.range, self.code);
lzma_trace!(" {{ range: {:08x}, code: {:08x} }}", self.range, self.code);
if self.range < 0x0100_0000 {
self.range <<= 8;
self.code = (self.code << 8) ^ (self.stream.read_u8()? as u32);

debug!("+ {{ range: {:08x}, code: {:08x} }}", self.range, self.code);
lzma_debug!("+ {{ range: {:08x}, code: {:08x} }}", self.range, self.code);
}
Ok(())
}
Expand Down Expand Up @@ -70,7 +70,7 @@ where
pub fn decode_bit(&mut self, prob: &mut u16) -> io::Result<bool> {
let bound: u32 = (self.range >> 11) * (*prob as u32);

trace!(
lzma_trace!(
" bound: {:08x}, prob: {:04x}, bit: {}",
bound,
prob,
Expand Down
36 changes: 20 additions & 16 deletions src/decode/xz.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ where
digested.read_u16::<BigEndian>()?
};
let check_method = get_check_method(flags)?;
info!("XZ check method: {:?}", check_method);
lzma_info!("XZ check method: {:?}", check_method);

let digest_crc32 = digest.sum32();

Expand All @@ -71,10 +71,10 @@ where
let index_size = loop {
let mut count_input = util::CountBufRead::new(input);
let header_size = count_input.read_u8()?;
info!("XZ block header_size byte: 0x{:02x}", header_size);
lzma_info!("XZ block header_size byte: 0x{:02x}", header_size);

if header_size == 0 {
info!("XZ records: {:?}", records);
lzma_info!("XZ records: {:?}", records);
check_index(&mut count_input, &records)?;
let index_size = count_input.count();
break index_size;
Expand Down Expand Up @@ -160,7 +160,7 @@ where
}

for (i, record) in records.iter().enumerate() {
info!("XZ index checking record {}: {:?}", i, record);
lzma_info!("XZ index checking record {}: {:?}", i, record);

let unpadded_size = get_multibyte(&mut digested)?;
if unpadded_size != record.unpadded_size as u64 {
Expand All @@ -183,9 +183,10 @@ where
// TODO: create padding parser function
let count = count_input.count();
let padding_size = ((count ^ 0x03) + 1) & 0x03;
info!(
lzma_info!(
"XZ index: {} byte(s) read, {} byte(s) of padding",
count, padding_size
count,
padding_size
);

{
Expand All @@ -201,7 +202,7 @@ where
}

let digest_crc32 = digest.sum32();
info!("XZ index checking digest 0x{:08x}", digest_crc32);
lzma_info!("XZ index checking digest 0x{:08x}", digest_crc32);

let crc32 = count_input.read_u32::<LittleEndian>()?;
if crc32 != digest_crc32 {
Expand Down Expand Up @@ -294,7 +295,7 @@ where
}

let unpacked_size = tmpbuf.len();
info!("XZ block decompressed to {} byte(s)", tmpbuf.len());
lzma_info!("XZ block decompressed to {} byte(s)", tmpbuf.len());

if let Some(expected_unpacked_size) = block_header.unpacked_size {
if (unpacked_size as u64) != expected_unpacked_size {
Expand All @@ -307,9 +308,10 @@ where

let count = count_input.count();
let padding_size = ((count ^ 0x03) + 1) & 0x03;
info!(
lzma_info!(
"XZ block: {} byte(s) read, {} byte(s) of padding",
count, padding_size
count,
padding_size
);
for _ in 0..padding_size {
let byte = count_input.read_u8()?;
Expand Down Expand Up @@ -395,7 +397,7 @@ where
let has_packed_size = flags & 0x40 != 0;
let has_unpacked_size = flags & 0x80 != 0;

info!(
lzma_info!(
"XZ block header: {{ header_size: {}, flags: {}, num_filters: {}, has_packed_size: {}, has_unpacked_size: {} }}",
header_size,
flags,
Expand Down Expand Up @@ -423,19 +425,21 @@ where
None
};

info!(
lzma_info!(
"XZ block header: {{ packed_size: {:?}, unpacked_size: {:?} }}",
packed_size, unpacked_size
packed_size,
unpacked_size
);

let mut filters: Vec<Filter> = vec![];
for _ in 0..num_filters {
let filter_id = get_filter_id(get_multibyte(input)?)?;
let size_of_properties = get_multibyte(input)?;

info!(
lzma_info!(
"XZ filter: {{ filter_id: {:?}, size_of_properties: {} }}",
filter_id, size_of_properties
filter_id,
size_of_properties
);

// Early abort to avoid allocating a large vector
Expand All @@ -454,7 +458,7 @@ where
)))
})?;

info!("XZ filter properties: {:?}", buf);
lzma_info!("XZ filter properties: {:?}", buf);

filters.push(Filter {
filter_id,
Expand Down
8 changes: 4 additions & 4 deletions src/encode/dumbencoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,23 +26,23 @@ where

// Properties
let props = (LC + 9 * (LP + 5 * PB)) as u8;
info!("Properties {{ lc: {}, lp: {}, pb: {} }}", LC, LP, PB);
lzma_info!("Properties {{ lc: {}, lp: {}, pb: {} }}", LC, LP, PB);
stream.write_u8(props)?;

// Dictionary
info!("Dict size: {}", dict_size);
lzma_info!("Dict size: {}", dict_size);
stream.write_u32::<LittleEndian>(dict_size)?;

// Unpacked size
match &options.unpacked_size {
UnpackedSize::WriteToHeader(unpacked_size) => {
let value: u64 = match unpacked_size {
None => {
info!("Unpacked size: unknown");
lzma_info!("Unpacked size: unknown");
0xFFFF_FFFF_FFFF_FFFF
}
Some(x) => {
info!("Unpacked size: {}", x);
lzma_info!("Unpacked size: {}", x);
*x
}
};
Expand Down
Loading

0 comments on commit 3fcf0bb

Please sign in to comment.