Skip to content

Commit

Permalink
Just get rid of this limit, we can handle it without
Browse files Browse the repository at this point in the history
  • Loading branch information
datdenkikniet committed Aug 4, 2023
1 parent 4154863 commit 74bbeac
Showing 1 changed file with 38 additions and 37 deletions.
75 changes: 38 additions & 37 deletions rust/src/pcube/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,6 @@ where
}
}

const MAX_PREFILL: u64 = 0x7FFF_FFFF_FFFF_FFFF;

impl<T> PCubeFile<T>
where
T: Read,
Expand Down Expand Up @@ -170,14 +168,28 @@ impl PCubeFile {

let [next_byte] = next_byte;

cube_count |= ((next_byte & 0x7F) as u64) << shift;
let value = (next_byte & 0x7F) as u64;

shift += 7;
if shift > 64 {
panic!("Cannot load possibly more than u64 cubes...");
let is_last_byte = next_byte & 0x80 == 0;
let is_single_bit = next_byte & 0x1 == 1 || next_byte == 0;
let overshift = shift > 56;

if shift > 63
|| overshift && !is_last_byte && !is_single_bit
|| overshift && !is_single_bit
{
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"Cannot load more than u64 cubes",
));
}

if next_byte & 0x80 == 0 {
cube_count |= value.overflowing_shl(shift).0;
shift += 7;

println!("{:08X}", cube_count);

if is_last_byte {
break;
}
}
Expand All @@ -188,27 +200,16 @@ impl PCubeFile {
/// Write a leb128 value
///
/// If `prefill` is `true`, this function will always
/// write 9 bytes of data describing `number`.
///
/// This way, one can rewrite the data after it is known how many cubes
/// are present.
///
/// # Panics
/// This function panics if `prefill && number > MAX_PREFILL`
// TODO: this should probably take a `u128`?
/// write 10 bytes of data describing `number`.
fn write_leb128(mut number: u64, mut writer: impl Write, prefill: bool) -> std::io::Result<()> {
if prefill && number > MAX_PREFILL {
panic!("Cannot prefill LEB128 value longer than 9 bytes");
}

let mut ran_once = false;
let mut bytes_written = 0;
while number > 0 || !ran_once || (prefill && bytes_written < 9) {
while number > 0 || !ran_once || (prefill && bytes_written < 10) {
ran_once = true;
let mut next_byte = (number as u8) & 0x7F;
number >>= 7;

if number > 0 || (prefill && bytes_written != 8) {
if number > 0 || (prefill && bytes_written != 9) {
next_byte |= 0x80;
}

Expand All @@ -222,11 +223,7 @@ impl PCubeFile {
/// Write the header
///
/// If `prefill_len` is `true`, the length is _always_ written
/// as 9 bytes. This way, rewriting the header in-place is possible.
///
/// # Panics
/// This function panics if `prefill && number > MAX_PREFILL`
// TODO: this should probably take a `u128`?
/// as 10 bytes. This way, rewriting the header in-place is possible.
fn write_header(
mut write: impl Write,
magic: [u8; 4],
Expand Down Expand Up @@ -304,11 +301,7 @@ impl PCubeFile {
Self::write_header(&mut seekable, magic, is_canonical, compression, len, true)?;

let len = Self::write_impl(cubes, compression, &mut seekable)?;
let len = if len as u64 <= MAX_PREFILL {
Some(len as u64)
} else {
None
};
let len = Some(len as u64);

// Write magic and cube length at the end
seekable.rewind()?;
Expand Down Expand Up @@ -475,19 +468,27 @@ where
impl<T> AllUniquePolycubeIterator for AllUnique<T> where T: Read {}

#[test]
pub fn len() {
let values = [0, 1, 24, 150283, 0x7FFFF_FFFF, MAX_PREFILL];
pub fn leb128_len() {
let values = [0, 1, 24, 150283, 0x7FFFF_FFFF, u64::MAX - 1, u64::MAX];

for value in values {
let mut data = Vec::new();
PCubeFile::write_leb128(value, &mut data, true).unwrap();

assert_eq!(value, PCubeFile::read_leb128(&data[..]).unwrap());
}
}

#[should_panic]
#[test]
pub fn len_panics() {
let mut data = Vec::new();
PCubeFile::write_leb128(MAX_PREFILL + 1, &mut data, true).ok();
pub fn leb128_unparseable() {
let unparseable_values = [
&[0x81, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x02][..],
&[
0x81, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x01,
][..],
];

for unparseable in unparseable_values {
assert!(PCubeFile::read_leb128(unparseable).is_err());
}
}

0 comments on commit 74bbeac

Please sign in to comment.