Skip to content

Commit

Permalink
removed Serializable and Deserializable impls from slices and vectors
Browse files Browse the repository at this point in the history
  • Loading branch information
irakliyk committed Feb 5, 2024
1 parent a450b81 commit 175d279
Show file tree
Hide file tree
Showing 15 changed files with 108 additions and 107 deletions.
3 changes: 2 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
# Changelog

## 0.7.5 (2023-12-21) - `utils/core` crate only
## 0.8.0 (TBD)
* Added variable-length serialization and deserialization for `usize` type (#238).
* [BREAKING] Removed `Serializable` and `Deserializable` implementations from slices and vectors (#239).

## 0.7.4 (2023-12-18) - `air` crate only
* Fixed a bug in `StarkProof` deserialization (#236).
Expand Down
10 changes: 5 additions & 5 deletions air/src/proof/commitments.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,9 @@ impl Commitments {
fri_roots: Vec<H::Digest>,
) -> Self {
let mut bytes = Vec::new();
bytes.write(trace_roots);
bytes.write_many(&trace_roots);
bytes.write(constraint_root);
bytes.write(fri_roots);
bytes.write_many(&fri_roots);
Commitments(bytes)
}

Expand Down Expand Up @@ -70,13 +70,13 @@ impl Commitments {
let mut reader = SliceReader::new(&self.0);

// parse trace commitments
let trace_commitments = H::Digest::read_batch_from(&mut reader, num_trace_segments)?;
let trace_commitments = reader.read_many(num_trace_segments)?;

// parse constraint evaluation commitment:
let constraint_commitment = H::Digest::read_from(&mut reader)?;
let constraint_commitment = reader.read()?;

// read FRI commitments (+ 1 for remainder polynomial commitment)
let fri_commitments = H::Digest::read_batch_from(&mut reader, num_fri_layers + 1)?;
let fri_commitments = reader.read_many(num_fri_layers + 1)?;

// make sure we consumed all available commitment bytes
if reader.has_more_bytes() {
Expand Down
2 changes: 1 addition & 1 deletion air/src/proof/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ impl Serializable for StarkProof {
self.context.write_into(target);
target.write_u8(self.num_unique_queries);
self.commitments.write_into(target);
self.trace_queries.write_into(target);
target.write_many(&self.trace_queries);
self.constraint_queries.write_into(target);
self.ood_frame.write_into(target);
self.fri_proof.write_into(target);
Expand Down
11 changes: 5 additions & 6 deletions air/src/proof/ood_frame.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ impl OodFrame {
// UPDATERS
// --------------------------------------------------------------------------------------------

/// Updates the trace state portion of this out-of-domain frame. This also returns a compactified
/// Updates the trace state portion of this out-of-domain frame. This also returns a compacted
/// version of the out-of-domain frame with the rows interleaved. This is done so that reseeding
/// of the random coin needs to be done only once as opposed to once per each row.
///
Expand All @@ -58,7 +58,7 @@ impl OodFrame {
}
debug_assert!(frame_size <= u8::MAX as usize);
self.trace_states.write_u8(frame_size as u8);
result.write_into(&mut self.trace_states);
self.trace_states.write_many(&result);

result
}
Expand All @@ -72,7 +72,7 @@ impl OodFrame {
pub fn set_constraint_evaluations<E: FieldElement>(&mut self, evaluations: &[E]) {
assert!(self.evaluations.is_empty(), "constraint evaluations have already been set");
assert!(!evaluations.is_empty(), "cannot set to empty constraint evaluations");
evaluations.write_into(&mut self.evaluations)
self.evaluations.write_many(evaluations);
}

// PARSER
Expand Down Expand Up @@ -102,15 +102,14 @@ impl OodFrame {
// parse main and auxiliary trace evaluation frames
let mut reader = SliceReader::new(&self.trace_states);
let frame_size = reader.read_u8()? as usize;
let trace =
E::read_batch_from(&mut reader, (main_trace_width + aux_trace_width) * frame_size)?;
let trace = reader.read_many((main_trace_width + aux_trace_width) * frame_size)?;
if reader.has_more_bytes() {
return Err(DeserializationError::UnconsumedBytes);
}

// parse the constraint evaluations
let mut reader = SliceReader::new(&self.evaluations);
let evaluations = E::read_batch_from(&mut reader, num_evaluations)?;
let evaluations = reader.read_many(num_evaluations)?;
if reader.has_more_bytes() {
return Err(DeserializationError::UnconsumedBytes);
}
Expand Down
2 changes: 1 addition & 1 deletion air/src/proof/queries.rs
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ impl Queries {
elements_per_query,
"all queries must contain the same number of evaluations"
);
values.write(elements);
values.write_many(elements);
}

// serialize internal nodes of the batch Merkle proof; we care about internal nodes only
Expand Down
3 changes: 2 additions & 1 deletion air/src/proof/table.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
use super::{DeserializationError, SliceReader, Vec};
use core::iter::FusedIterator;
use math::FieldElement;
use utils::ByteReader;

// CONSTANTS
// ================================================================================================
Expand Down Expand Up @@ -56,7 +57,7 @@ impl<E: FieldElement> Table<E> {
let mut reader = SliceReader::new(bytes);
let num_elements = num_rows * num_cols;
Ok(Self {
data: E::read_batch_from(&mut reader, num_elements)?,
data: reader.read_many(num_elements)?,
row_width: num_cols,
})
}
Expand Down
4 changes: 2 additions & 2 deletions crypto/src/hash/blake/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ impl<B: StarkField> ElementHasher for Blake3_256<B> {
// when elements' internal and canonical representations differ, we need to serialize
// them before hashing
let mut hasher = BlakeHasher::new();
hasher.write(elements);
hasher.write_many(elements);
ByteDigest(hasher.finalize())
}
}
Expand Down Expand Up @@ -106,7 +106,7 @@ impl<B: StarkField> ElementHasher for Blake3_192<B> {
// when elements' internal and canonical representations differ, we need to serialize
// them before hashing
let mut hasher = BlakeHasher::new();
hasher.write(elements);
hasher.write_many(elements);
let result = hasher.finalize();
ByteDigest(result[..24].try_into().unwrap())
}
Expand Down
2 changes: 1 addition & 1 deletion crypto/src/hash/sha/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ impl<B: StarkField> ElementHasher for Sha3_256<B> {
// when elements' internal and canonical representations differ, we need to serialize
// them before hashing
let mut hasher = ShaHasher::new();
hasher.write(elements);
hasher.write_many(elements);
ByteDigest(hasher.finalize())
}
}
Expand Down
4 changes: 2 additions & 2 deletions crypto/src/merkle/proofs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use crate::{errors::MerkleTreeError, Hasher};
use utils::{
collections::{BTreeMap, Vec},
string::ToString,
ByteReader, Deserializable, DeserializationError, Serializable,
ByteReader, DeserializationError, Serializable,
};

// CONSTANTS
Expand Down Expand Up @@ -463,7 +463,7 @@ impl<H: Hasher> BatchMerkleProof<H> {
let num_digests = node_bytes.read_u8()? as usize;

// read the digests and add them to the node vector
let digests = H::Digest::read_batch_from(node_bytes, num_digests)?;
let digests = node_bytes.read_many(num_digests)?;
nodes.push(digests);
}

Expand Down
17 changes: 12 additions & 5 deletions fri/src/proof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,9 +60,13 @@ impl FriProof {
num_partitions.is_power_of_two(),
"number of partitions must be a power of two, but was {num_partitions}"
);

let mut remainder_bytes = Vec::with_capacity(E::ELEMENT_BYTES * remainder.len());
remainder_bytes.write_many(&remainder);

FriProof {
layers,
remainder: remainder.to_bytes(),
remainder: remainder_bytes,
num_partitions: num_partitions.trailing_zeros() as u8,
}
}
Expand Down Expand Up @@ -166,7 +170,7 @@ impl FriProof {
)));
}
let mut reader = SliceReader::new(&self.remainder);
let remainder = E::read_batch_from(&mut reader, num_elements).map_err(|err| {
let remainder = reader.read_many(num_elements).map_err(|err| {
DeserializationError::InvalidValue(format!("failed to parse FRI remainder: {err}"))
})?;
if reader.has_more_bytes() {
Expand Down Expand Up @@ -205,7 +209,7 @@ impl Deserializable for FriProof {
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
// read layers
let num_layers = source.read_u8()? as usize;
let layers = FriProofLayer::read_batch_from(source, num_layers)?;
let layers = source.read_many(num_layers)?;

// read remainder
let num_remainder_bytes = source.read_u16()? as usize;
Expand Down Expand Up @@ -247,11 +251,14 @@ impl FriProofLayer {

// TODO: add debug check that values actually hash into the leaf nodes of the batch proof

let mut value_bytes = Vec::with_capacity(E::ELEMENT_BYTES * N * query_values.len());
value_bytes.write_many(&query_values);

// concatenate all query values and all internal Merkle proof nodes into vectors of bytes;
// we care about internal nodes only because leaf nodes can be reconstructed from hashes
// of query values
FriProofLayer {
values: query_values.to_bytes(),
values: value_bytes,
paths: merkle_proof.serialize_nodes(),
}
}
Expand Down Expand Up @@ -306,7 +313,7 @@ impl FriProofLayer {
// and also hash them to build leaf nodes of the batch Merkle proof
let mut reader = SliceReader::new(&self.values);
for query_hash in hashed_queries.iter_mut() {
let mut qe = E::read_batch_from(&mut reader, folding_factor)?;
let mut qe = reader.read_many(folding_factor)?;
*query_hash = H::hash_elements(&qe);
query_values.append(&mut qe);
}
Expand Down
9 changes: 4 additions & 5 deletions math/src/field/f128/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,7 @@
// LICENSE file in the root directory of this source tree.

use super::{
AsBytes, BaseElement, ByteReader, Deserializable, DeserializationError, FieldElement,
StarkField, Vec, M,
AsBytes, BaseElement, ByteReader, DeserializationError, FieldElement, StarkField, Vec, M,
};
use crate::field::{ExtensionOf, QuadExtension};
use core::convert::TryFrom;
Expand Down Expand Up @@ -207,21 +206,21 @@ fn read_elements_from() {

// fill whole target
let mut reader = SliceReader::new(&bytes[..64]);
let result = BaseElement::read_batch_from(&mut reader, 4);
let result = reader.read_many(4);
assert!(result.is_ok());
assert_eq!(expected, result.unwrap());
assert!(!reader.has_more_bytes());

// partial number of elements
let mut reader = SliceReader::new(&bytes[..65]);
let result = BaseElement::read_batch_from(&mut reader, 4);
let result = reader.read_many(4);
assert!(result.is_ok());
assert_eq!(expected, result.unwrap());
assert!(reader.has_more_bytes());

// invalid element
let mut reader = SliceReader::new(&bytes[16..]);
let result = BaseElement::read_batch_from(&mut reader, 4);
let result = reader.read_many::<BaseElement>(4);
assert!(result.is_err());
if let Err(err) = result {
assert!(matches!(err, DeserializationError::InvalidValue(_)));
Expand Down
45 changes: 34 additions & 11 deletions utils/core/src/serde/byte_reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,10 @@ use super::{Deserializable, DeserializationError, String, Vec};
// ================================================================================================

/// Defines how primitive values are to be read from `Self`.
///
/// Whenever data is read from the reader using any of the `read_*` functions, the reader advances
/// to the next unread byte. If the error occurs, the reader is not rolled back to the state prior
/// to calling any of the function.
pub trait ByteReader {
// REQUIRED METHODS
// --------------------------------------------------------------------------------------------
Expand Down Expand Up @@ -92,6 +96,15 @@ pub trait ByteReader {
Ok(u64::from_le_bytes(bytes))
}

/// Returns a u128 value read from `self` in little-endian byte order.
///
/// # Errors
/// Returns a [DeserializationError] if a u128 value could not be read from `self`.
fn read_u128(&mut self) -> Result<u128, DeserializationError> {
let bytes = self.read_array::<16>()?;
Ok(u128::from_le_bytes(bytes))
}

/// Returns a usize value read from `self` in [vint64](https://docs.rs/vint64/latest/vint64/)
/// format.
///
Expand Down Expand Up @@ -127,15 +140,6 @@ pub trait ByteReader {
Ok(result as usize)
}

/// Returns a u128 value read from `self` in little-endian byte order.
///
/// # Errors
/// Returns a [DeserializationError] if a u128 value could not be read from `self`.
fn read_u128(&mut self) -> Result<u128, DeserializationError> {
let bytes = self.read_array::<16>()?;
Ok(u128::from_le_bytes(bytes))
}

/// Returns a byte vector of the specified length read from `self`.
///
/// # Errors
Expand All @@ -158,15 +162,34 @@ pub trait ByteReader {

/// Reads a deserializable value from `self`.
///
/// # Panics
/// Panics if the value could not be read from `self`.
/// # Errors
/// Returns a [DeserializationError] if the specified value could not be read from `self`.
fn read<D>(&mut self) -> Result<D, DeserializationError>
where
Self: Sized,
D: Deserializable,
{
D::read_from(self)
}

/// Reads a sequence of bytes from `self`, attempts to deserialize these bytes into a vector
/// with the specified number of `D` elements, and returns the result.
///
/// # Errors
/// Returns a [DeserializationError] if the specified number elements could not be read from
/// `self`.
fn read_many<D>(&mut self, num_elements: usize) -> Result<Vec<D>, DeserializationError>
where
Self: Sized,
D: Deserializable,
{
let mut result = Vec::with_capacity(num_elements);
for _ in 0..num_elements {
let element = D::read_from(self)?;
result.push(element)
}
Ok(result)
}
}

// SLICE READER
Expand Down
21 changes: 19 additions & 2 deletions utils/core/src/serde/byte_writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,14 @@ pub trait ByteWriter: Sized {
self.write_bytes(&value.to_le_bytes());
}

/// Writes a u128 value in little-endian byte order into `self`.
///
/// # Panics
/// Panics if the value could not be written into `self`.
fn write_u128(&mut self, value: u128) {
self.write_bytes(&value.to_le_bytes());
}

/// Writes a usize value in [vint64](https://docs.rs/vint64/latest/vint64/) format into `self`.
///
/// # Panics
Expand All @@ -87,6 +95,15 @@ pub trait ByteWriter: Sized {
fn write<S: Serializable>(&mut self, value: S) {
value.write_into(self)
}

/// Serializes all `elements` and writes the resulting bytes into `self`.
///
/// This method does not write any metadata (e.g. number of serialized elements) into `self`.
fn write_many<S: Serializable>(&mut self, elements: &[S]) {
for element in elements {
element.write_into(self);
}
}
}

// BYTE WRITER IMPLEMENTATIONS
Expand All @@ -105,8 +122,8 @@ impl ByteWriter for Vec<u8> {
// HELPER FUNCTIONS
// ================================================================================================

/// Returns the length of the value in vint64 enсoding.
pub fn encoded_len(value: usize) -> usize {
/// Returns the length of the value in vint64 encoding.
fn encoded_len(value: usize) -> usize {
let zeros = value.leading_zeros() as usize;
let len = zeros.saturating_sub(1) / 7;
9 - core::cmp::min(len, 8)
Expand Down
Loading

0 comments on commit 175d279

Please sign in to comment.