diff --git a/air/src/proof/mod.rs b/air/src/proof/mod.rs index 35910819f..7307ba1d3 100644 --- a/air/src/proof/mod.rs +++ b/air/src/proof/mod.rs @@ -8,7 +8,7 @@ use alloc::vec::Vec; use core::cmp; -use crypto::Hasher; +use crypto::{Hasher, MerkleTree}; use fri::FriProof; use math::FieldElement; use utils::{ByteReader, Deserializable, DeserializationError, Serializable, SliceReader}; @@ -154,12 +154,8 @@ impl Proof { num_unique_queries: 0, commitments: Commitments::default(), trace_queries: Vec::new(), - constraint_queries: Queries::new::<_, DummyField>( - BatchMerkleProof::> { - leaves: Vec::new(), - nodes: Vec::new(), - depth: 0, - }, + constraint_queries: Queries::new::, DummyField, MerkleTree<_>>( + BatchMerkleProof::> { nodes: Vec::new(), depth: 0 }, vec![vec![DummyField::ONE]], ), ood_frame: OodFrame::default(), diff --git a/air/src/proof/queries.rs b/air/src/proof/queries.rs index 405545254..3c5250fc0 100644 --- a/air/src/proof/queries.rs +++ b/air/src/proof/queries.rs @@ -5,7 +5,7 @@ use alloc::vec::Vec; -use crypto::{BatchMerkleProof, ElementHasher, Hasher}; +use crypto::{ElementHasher, Hasher, VectorCommitment}; use math::FieldElement; use utils::{ ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable, SliceReader, @@ -17,21 +17,21 @@ use super::Table; // ================================================================================================ /// Decommitments to evaluations of a set of functions at multiple points. /// -/// Given a set of functions evaluated over a domain *D*, a commitment is assumed to be a Merkle -/// tree where a leaf at position *i* contains evaluations of all functions at *xi*. +/// Given a set of functions evaluated over a domain *D*, a commitment is assumed to be a vector +/// commitment where the *i*-th vector entry contains evaluations of all functions at *xi*. /// Thus, a query (i.e. a single decommitment) for position *i* includes evaluations of all -/// functions at *xi*, accompanied by a Merkle authentication path from the leaf *i* to -/// the tree root. +/// functions at *xi*, accompanied by an opening proof of leaf *i* against the vector +/// commitment string. /// /// This struct can contain one or more queries. In cases when more than one query is stored, -/// Merkle authentication paths are compressed to remove redundant nodes. +/// a batch opening proof is used in order to compress the individual opening proofs. /// -/// Internally, all Merkle paths and query values are stored as a sequence of bytes. Thus, to -/// retrieve query values and the corresponding Merkle authentication paths, -/// [parse()](Queries::parse) function should be used. +/// Internally, all opening proofs and query values are stored as a sequence of bytes. Thus, to +/// retrieve query values and their corresponding opening proofs, [parse()](Queries::parse) +/// function should be used. #[derive(Debug, Clone, Eq, PartialEq)] pub struct Queries { - paths: Vec, + opening_proof: Vec, values: Vec, } @@ -39,26 +39,23 @@ impl Queries { // CONSTRUCTOR // -------------------------------------------------------------------------------------------- /// Returns queries constructed from evaluations of a set of functions at some number of points - /// in a domain and their corresponding Merkle authentication paths. + /// in a domain and their corresponding batch opening proof. /// - /// For each evaluation point, the same number of values must be provided, and a hash of - /// these values must be equal to a leaf node in the corresponding Merkle authentication path. + /// For each evaluation point, the same number of values must be provided. /// /// # Panics /// Panics if: /// * No queries were provided (`query_values` is an empty vector). /// * Any of the queries does not contain any evaluations. /// * Not all queries contain the same number of evaluations. - pub fn new( - merkle_proof: BatchMerkleProof, + pub fn new>( + opening_proof: V::MultiProof, query_values: Vec>, ) -> Self { assert!(!query_values.is_empty(), "query values cannot be empty"); let elements_per_query = query_values[0].len(); assert_ne!(elements_per_query, 0, "a query must contain at least one evaluation"); - // TODO: add debug check that values actually hash into the leaf nodes of the batch proof - // concatenate all elements together into a single vector of bytes let num_queries = query_values.len(); let mut values = Vec::with_capacity(num_queries * elements_per_query * E::ELEMENT_BYTES); @@ -70,33 +67,31 @@ impl Queries { ); values.write_many(elements); } + let opening_proof = opening_proof.to_bytes(); - // serialize internal nodes of the batch Merkle proof; we care about internal nodes only - // because leaf nodes can be reconstructed from hashes of query values - let paths = merkle_proof.serialize_nodes(); - - Queries { paths, values } + Queries { opening_proof, values } } // PARSER // -------------------------------------------------------------------------------------------- - /// Convert internally stored bytes into a set of query values and the corresponding Merkle - /// authentication paths. + /// Convert internally stored bytes into a set of query values and the corresponding batch + /// opening proof. /// /// # Panics /// Panics if: /// * `domain_size` is not a power of two. /// * `num_queries` is zero. /// * `values_per_query` is zero. - pub fn parse( + pub fn parse( self, domain_size: usize, num_queries: usize, values_per_query: usize, - ) -> Result<(BatchMerkleProof, Table), DeserializationError> + ) -> Result<(V::MultiProof, Table), DeserializationError> where E: FieldElement, H: ElementHasher, + V: VectorCommitment, { assert!(domain_size.is_power_of_two(), "domain size must be a power of two"); assert!(num_queries > 0, "there must be at least one query"); @@ -113,20 +108,27 @@ impl Queries { ))); } - // read bytes corresponding to each query, convert them into field elements, - // and also hash them to build leaf nodes of the batch Merkle proof + // read bytes corresponding to each query and convert them into field elements. let query_values = Table::::from_bytes(&self.values, num_queries, values_per_query)?; - let hashed_queries = query_values.rows().map(|row| H::hash_elements(row)).collect(); - // build batch Merkle proof - let mut reader = SliceReader::new(&self.paths); - let tree_depth = domain_size.ilog2() as u8; - let merkle_proof = BatchMerkleProof::deserialize(&mut reader, hashed_queries, tree_depth)?; + // build batch opening proof + let mut reader = SliceReader::new(&self.opening_proof); + let opening_proof = ::read_from(&mut reader)?; + + // check that the opening proof matches the domain length + if >::get_multiproof_domain_len(&opening_proof) != domain_size { + return Err(DeserializationError::InvalidValue(format!( + "expected a domain of size {} but was {}", + domain_size, + >::get_multiproof_domain_len(&opening_proof), + ))); + } + if reader.has_more_bytes() { return Err(DeserializationError::UnconsumedBytes); } - Ok((merkle_proof, query_values)) + Ok((opening_proof, query_values)) } } @@ -137,17 +139,15 @@ impl Serializable for Queries { /// Serializes `self` and writes the resulting bytes into the `target`. fn write_into(&self, target: &mut W) { // write value bytes - target.write_u32(self.values.len() as u32); - target.write_bytes(&self.values); + self.values.write_into(target); // write path bytes - target.write_u32(self.paths.len() as u32); - target.write_bytes(&self.paths); + self.opening_proof.write_into(target); } /// Returns an estimate of how many bytes are needed to represent self. fn get_size_hint(&self) -> usize { - self.paths.len() + self.values.len() + 8 + self.opening_proof.len() + self.values.len() + 8 } } @@ -158,13 +158,11 @@ impl Deserializable for Queries { /// Returns an error of a valid query struct could not be read from the specified source. fn read_from(source: &mut R) -> Result { // read values - let num_value_bytes = source.read_u32()?; - let values = source.read_vec(num_value_bytes as usize)?; + let values = Vec::<_>::read_from(source)?; // read paths - let num_paths_bytes = source.read_u32()?; - let paths = source.read_vec(num_paths_bytes as usize)?; + let paths = Vec::<_>::read_from(source)?; - Ok(Queries { paths, values }) + Ok(Queries { opening_proof: paths, values }) } } diff --git a/crypto/src/commitment.rs b/crypto/src/commitment.rs new file mode 100644 index 000000000..1d2667f7a --- /dev/null +++ b/crypto/src/commitment.rs @@ -0,0 +1,86 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +// +// This source code is licensed under the MIT license found in the +// LICENSE file in the root directory of this source tree. + +use alloc::vec::Vec; +use core::fmt::Debug; + +use utils::{Deserializable, Serializable}; + +use crate::Hasher; + +/// A vector commitment (VC) scheme. +/// +/// This is a cryptographic primitive allowing one to commit, using a commitment string `com`, to +/// a vector of values (v_0, ..., v_{n-1}) such that one can later reveal the value at the i-th +/// position. +/// +/// This is achieved by providing the value `v_i` together with a proof `proof_i` such that anyone +/// posessing `com` can be convinced, with high confidence, that the claim is true. +/// +/// Vector commitment schemes usually have some batching properties in the sense that opening +/// proofs for a number of `(i, v_i)` can be batched together into one batch opening proof in order +/// to optimize both the proof size as well as the verification time. +/// +/// The current implementation restricts both of the commitment string as well as the leaf values +/// to be `H::Digest` where `H` is a type parameter such that `H: Hasher`. +pub trait VectorCommitment: Sized { + /// Options defining the VC i.e., public parameters. + type Options: Default; + /// Opening proof of some value at some position index. + type Proof: Clone + Serializable + Deserializable; + /// Batch opening proof of a number of {(i, v_i)}_{i ∈ S} for an index set. + type MultiProof: Serializable + Deserializable; + /// Error returned by the scheme. + type Error: Debug; + + /// Creates a commitment to a vector of values (v_0, ..., v_{n-1}) using the default + /// options. + fn new(items: Vec) -> Result { + Self::with_options(items, Self::Options::default()) + } + + /// Creates a commitment to a vector of values (v_0, ..., v_{n-1}) given a set of + /// options. + fn with_options(items: Vec, options: Self::Options) -> Result; + + /// Returns the commitment string to the committed values. + fn commitment(&self) -> H::Digest; + + /// Returns the length of the vector committed to for `Self`. + fn domain_len(&self) -> usize; + + /// Returns the length of the vector committed to for `Self::Proof`. + fn get_proof_domain_len(proof: &Self::Proof) -> usize; + + /// Returns the length of the vector committed to for `Self::MultiProof`. + fn get_multiproof_domain_len(proof: &Self::MultiProof) -> usize; + + /// Opens the value at a given index and provides a proof for the correctness of claimed value. + fn open(&self, index: usize) -> Result<(H::Digest, Self::Proof), Self::Error>; + + /// Opens the values at a given index set and provides a proof for the correctness of claimed + /// values. + #[allow(clippy::type_complexity)] + fn open_many( + &self, + indexes: &[usize], + ) -> Result<(Vec, Self::MultiProof), Self::Error>; + + /// Verifies that the claimed value is at the given index using a proof. + fn verify( + commitment: H::Digest, + index: usize, + item: H::Digest, + proof: &Self::Proof, + ) -> Result<(), Self::Error>; + + /// Verifies that the claimed values are at the given set of indices using a batch proof. + fn verify_many( + commitment: H::Digest, + indexes: &[usize], + items: &[H::Digest], + proof: &Self::MultiProof, + ) -> Result<(), Self::Error>; +} diff --git a/crypto/src/lib.rs b/crypto/src/lib.rs index 122b5de24..ff29176bb 100644 --- a/crypto/src/lib.rs +++ b/crypto/src/lib.rs @@ -39,3 +39,6 @@ pub use random::{DefaultRandomCoin, RandomCoin}; mod errors; pub use errors::{MerkleTreeError, RandomCoinError}; + +mod commitment; +pub use commitment::VectorCommitment; diff --git a/crypto/src/merkle/mod.rs b/crypto/src/merkle/mod.rs index 5929c79d4..51b4a76dc 100644 --- a/crypto/src/merkle/mod.rs +++ b/crypto/src/merkle/mod.rs @@ -9,11 +9,11 @@ use alloc::{ }; use core::slice; -use crate::{errors::MerkleTreeError, hash::Hasher}; - mod proofs; pub use proofs::BatchMerkleProof; +use crate::{Hasher, MerkleTreeError, VectorCommitment}; + #[cfg(feature = "concurrent")] pub mod concurrent; @@ -79,13 +79,13 @@ mod tests; /// assert_eq!(leaves, tree.leaves()); /// /// // generate a proof -/// let proof = tree.prove(2).unwrap(); -/// assert_eq!(3, proof.len()); -/// assert_eq!(leaves[2], proof[0]); +/// let (leaf, proof) = tree.prove(2).unwrap(); +/// assert_eq!(2, proof.len()); +/// assert_eq!(leaves[2], leaf); /// /// // verify proof -/// assert!(MerkleTree::::verify(*tree.root(), 2, &proof).is_ok()); -/// assert!(MerkleTree::::verify(*tree.root(), 1, &proof).is_err()); +/// assert!(MerkleTree::::verify(*tree.root(), 2, leaf, &proof).is_ok()); +/// assert!(MerkleTree::::verify(*tree.root(), 1, leaf, &proof).is_err()); /// ``` #[derive(Debug)] pub struct MerkleTree { @@ -93,6 +93,10 @@ pub struct MerkleTree { leaves: Vec, } +/// Merkle tree opening consisting of a leaf value and a Merkle path leading from this leaf +/// up to the root (excluding the root itself). +pub type MerkleTreeOpening = (::Digest, Vec<::Digest>); + // MERKLE TREE IMPLEMENTATION // ================================================================================================ @@ -179,19 +183,19 @@ impl MerkleTree { // PROVING METHODS // -------------------------------------------------------------------------------------------- - /// Returns a Merkle path to a leaf at the specified `index`. + /// Returns a Merkle proof to a leaf at the specified `index`. /// - /// The leaf itself will be the first element in the path. + /// The leaf itself will be the first element of the returned tuple. /// /// # Errors /// Returns an error if the specified index is greater than or equal to the number of leaves /// in the tree. - pub fn prove(&self, index: usize) -> Result, MerkleTreeError> { + pub fn prove(&self, index: usize) -> Result, MerkleTreeError> { if index >= self.leaves.len() { return Err(MerkleTreeError::LeafIndexOutOfBounds(self.leaves.len(), index)); } - - let mut proof = vec![self.leaves[index], self.leaves[index ^ 1]]; + let leaf = self.leaves[index]; + let mut proof = vec![self.leaves[index ^ 1]]; let mut index = (index + self.nodes.len()) >> 1; while index > 1 { @@ -199,25 +203,25 @@ impl MerkleTree { index >>= 1; } - Ok(proof) + Ok((leaf, proof)) } - /// Computes Merkle paths for the provided indexes and compresses the paths into a single proof. + /// Computes Merkle proofs for the provided indexes, compresses the proofs into a single batch + /// and returns the batch proof alongside the leaves at the provided indexes. /// /// # Errors /// Returns an error if: /// * No indexes were provided (i.e., `indexes` is an empty slice). - /// * Number of provided indexes is greater than 255. /// * Any of the provided indexes are greater than or equal to the number of leaves in the /// tree. /// * List of indexes contains duplicates. - pub fn prove_batch(&self, indexes: &[usize]) -> Result, MerkleTreeError> { + pub fn prove_batch( + &self, + indexes: &[usize], + ) -> Result<(Vec, BatchMerkleProof), MerkleTreeError> { if indexes.is_empty() { return Err(MerkleTreeError::TooFewLeafIndexes); } - if indexes.len() > proofs::MAX_PATHS { - return Err(MerkleTreeError::TooManyLeafIndexes(proofs::MAX_PATHS, indexes.len())); - } let index_map = map_indexes(indexes, self.depth())?; let indexes = normalize_indexes(indexes); @@ -265,13 +269,13 @@ impl MerkleTree { } } - Ok(BatchMerkleProof { leaves, nodes, depth: self.depth() as u8 }) + Ok((leaves, BatchMerkleProof { depth: self.depth() as u8, nodes })) } // VERIFICATION METHODS // -------------------------------------------------------------------------------------------- - /// Checks whether the `proof` for the specified `index` is valid. + /// Checks whether the `proof` for the given `leaf` at the specified `index` is valid. /// /// # Errors /// Returns an error if the specified `proof` (which is a Merkle path) does not resolve to the @@ -279,13 +283,18 @@ impl MerkleTree { pub fn verify( root: H::Digest, index: usize, + leaf: H::Digest, proof: &[H::Digest], ) -> Result<(), MerkleTreeError> { let r = index & 1; - let mut v = H::merge(&[proof[r], proof[1 - r]]); + let mut v = if r == 0 { + H::merge(&[leaf, proof[0]]) + } else { + H::merge(&[proof[0], leaf]) + }; - let mut index = (index + 2usize.pow((proof.len() - 1) as u32)) >> 1; - for &p in proof.iter().skip(2) { + let mut index = (index + 2usize.pow((proof.len()) as u32)) >> 1; + for &p in proof.iter().skip(1) { v = if index & 1 == 0 { H::merge(&[v, p]) } else { @@ -300,22 +309,23 @@ impl MerkleTree { Ok(()) } - /// Checks whether the batch proof contains Merkle paths for the of the specified `indexes`. + /// Checks whether the batch `proof` contains Merkle proofs resolving to `root` for + /// the provided `leaves` at the specified `indexes`. /// /// # Errors /// Returns an error if: /// * No indexes were provided (i.e., `indexes` is an empty slice). - /// * Number of provided indexes is greater than 255. /// * Any of the specified `indexes` is greater than or equal to the number of leaves in the /// tree from which the batch proof was generated. /// * List of indexes contains duplicates. - /// * Any of the paths in the batch proof does not resolve to the specified `root`. + /// * Any of the proofs in the batch proof does not resolve to the specified `root`. pub fn verify_batch( root: &H::Digest, indexes: &[usize], + leaves: &[H::Digest], proof: &BatchMerkleProof, ) -> Result<(), MerkleTreeError> { - if *root != proof.get_root(indexes)? { + if *root != proof.get_root(indexes, leaves)? { return Err(MerkleTreeError::InvalidProof); } Ok(()) @@ -385,3 +395,65 @@ fn normalize_indexes(indexes: &[usize]) -> Vec { } set.into_iter().collect() } + +// VECTOR COMMITMENT IMPLEMENTATION +// ================================================================================================ + +impl VectorCommitment for MerkleTree { + type Options = (); + + type Proof = Vec; + + type MultiProof = BatchMerkleProof; + + type Error = MerkleTreeError; + + fn with_options(items: Vec, _options: Self::Options) -> Result { + MerkleTree::new(items) + } + + fn commitment(&self) -> H::Digest { + *self.root() + } + + fn domain_len(&self) -> usize { + 1 << self.depth() + } + + fn get_proof_domain_len(proof: &Self::Proof) -> usize { + 1 << proof.len() + } + + fn get_multiproof_domain_len(proof: &Self::MultiProof) -> usize { + 1 << proof.depth + } + + fn open(&self, index: usize) -> Result<(H::Digest, Self::Proof), Self::Error> { + self.prove(index) + } + + fn open_many( + &self, + indexes: &[usize], + ) -> Result<(Vec, Self::MultiProof), Self::Error> { + self.prove_batch(indexes) + } + + fn verify( + commitment: H::Digest, + index: usize, + item: H::Digest, + proof: &Self::Proof, + ) -> Result<(), Self::Error> { + MerkleTree::::verify(commitment, index, item, proof) + } + + fn verify_many( + commitment: H::Digest, + indexes: &[usize], + items: &[H::Digest], + proof: &Self::MultiProof, + ) -> Result<(), Self::Error> { + MerkleTree::::verify_batch(&commitment, indexes, items, proof) + } +} diff --git a/crypto/src/merkle/proofs.rs b/crypto/src/merkle/proofs.rs index 95dd82d96..71b70d858 100644 --- a/crypto/src/merkle/proofs.rs +++ b/crypto/src/merkle/proofs.rs @@ -3,33 +3,24 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. -use alloc::{collections::BTreeMap, string::ToString, vec::Vec}; +use alloc::{collections::BTreeMap, vec::Vec}; -use utils::{ByteReader, DeserializationError, Serializable}; +use utils::{ByteReader, Deserializable, DeserializationError, Serializable}; +use super::MerkleTreeOpening; use crate::{errors::MerkleTreeError, Hasher}; -// CONSTANTS -// ================================================================================================ - -pub(super) const MAX_PATHS: usize = 255; - // BATCH MERKLE PROOF // ================================================================================================ -/// Multiple Merkle paths aggregated into a single proof. +/// Multiple Merkle proofs aggregated into a single proof. /// /// The aggregation is done in a way which removes all duplicate internal nodes, and thus, /// it is possible to achieve non-negligible compression as compared to naively concatenating -/// individual Merkle paths. The algorithm is for aggregation is a variation of +/// individual Merkle proofs. The algorithm is for aggregation is a variation of /// [Octopus](https://eprint.iacr.org/2017/933). -/// -/// Currently, at most 255 paths can be aggregated into a single proof. This limitation is -/// imposed primarily for serialization purposes. #[derive(Debug, Clone, PartialEq, Eq)] pub struct BatchMerkleProof { - /// The leaves being proven - pub leaves: Vec, /// Hashes of Merkle Tree proof values above the leaf layer pub nodes: Vec>, /// Depth of the leaves @@ -37,31 +28,32 @@ pub struct BatchMerkleProof { } impl BatchMerkleProof { - /// Constructs a batch Merkle proof from individual Merkle authentication paths. + /// Constructs a batch Merkle proof from collection of single Merkle proofs. /// /// # Panics /// Panics if: - /// * No paths have been provided (i.e., `paths` is an empty slice). - /// * More than 255 paths have been provided. - /// * Number of paths is not equal to the number of indexes. - /// * Not all paths have the same length. - pub fn from_paths(paths: &[Vec], indexes: &[usize]) -> BatchMerkleProof { + /// * No proofs have been provided (i.e., `proofs` is an empty slice). + /// * Number of proofs is not equal to the number of indexes. + /// * Not all proofs have the same length. + pub fn from_single_proofs( + proofs: &[MerkleTreeOpening], + indexes: &[usize], + ) -> BatchMerkleProof { // TODO: optimize this to reduce amount of vector cloning. - assert!(!paths.is_empty(), "at least one path must be provided"); - assert!(paths.len() <= MAX_PATHS, "number of paths cannot exceed {MAX_PATHS}"); - assert_eq!(paths.len(), indexes.len(), "number of paths must equal number of indexes"); + assert!(!proofs.is_empty(), "at least one proof must be provided"); + assert_eq!(proofs.len(), indexes.len(), "number of proofs must equal number of indexes"); - let depth = paths[0].len(); + let depth = proofs[0].1.len(); - // sort indexes in ascending order, and also re-arrange paths accordingly - let mut path_map = BTreeMap::new(); - for (&index, path) in indexes.iter().zip(paths.iter().cloned()) { - assert_eq!(depth, path.len(), "not all paths have the same length"); - path_map.insert(index, path); + // sort indexes in ascending order, and also re-arrange proofs accordingly + let mut proof_map = BTreeMap::new(); + for (&index, proof) in indexes.iter().zip(proofs.iter().cloned()) { + assert_eq!(depth, proof.1.len(), "not all proofs have the same length"); + proof_map.insert(index, proof); } - let indexes = path_map.keys().cloned().collect::>(); - let paths = path_map.values().cloned().collect::>(); - path_map.clear(); + let indexes = proof_map.keys().cloned().collect::>(); + let proofs = proof_map.values().cloned().collect::>(); + proof_map.clear(); let mut leaves = vec![H::Digest::default(); indexes.len()]; let mut nodes: Vec> = Vec::with_capacity(indexes.len()); @@ -69,59 +61,60 @@ impl BatchMerkleProof { // populate values and the first layer of proof nodes let mut i = 0; while i < indexes.len() { - leaves[i] = paths[i][0]; + leaves[i] = proofs[i].0; + if indexes.len() > i + 1 && are_siblings(indexes[i], indexes[i + 1]) { - leaves[i + 1] = paths[i][1]; + leaves[i + 1] = proofs[i].1[0]; nodes.push(vec![]); i += 1; } else { - nodes.push(vec![paths[i][1]]); + nodes.push(vec![proofs[i].1[0]]); } - path_map.insert(indexes[i] >> 1, paths[i].clone()); + proof_map.insert(indexes[i] >> 1, proofs[i].clone()); i += 1; } // populate all remaining layers of proof nodes - for d in 2..depth { - let indexes = path_map.keys().cloned().collect::>(); - let mut next_path_map = BTreeMap::new(); + for d in 1..depth { + let indexes = proof_map.keys().cloned().collect::>(); + let mut next_proof_map = BTreeMap::new(); let mut i = 0; while i < indexes.len() { let index = indexes[i]; - let path = path_map.get(&index).unwrap(); + let proof = proof_map.get(&index).unwrap(); if indexes.len() > i + 1 && are_siblings(index, indexes[i + 1]) { i += 1; } else { - nodes[i].push(path[d]); + nodes[i].push(proof.1[d]); } - next_path_map.insert(index >> 1, path.clone()); + next_proof_map.insert(index >> 1, proof.clone()); i += 1; } - core::mem::swap(&mut path_map, &mut next_path_map); + core::mem::swap(&mut proof_map, &mut next_proof_map); } - BatchMerkleProof { leaves, nodes, depth: (depth - 1) as u8 } + BatchMerkleProof { nodes, depth: (depth) as u8 } } - /// Computes a node to which all Merkle paths aggregated in this proof resolve. + /// Computes a node to which all Merkle proofs aggregated in this proof resolve. /// /// # Errors /// Returns an error if: /// * No indexes were provided (i.e., `indexes` is an empty slice). - /// * Number of provided indexes is greater than 255. /// * Any of the specified `indexes` is greater than or equal to the number of leaves in the /// tree for which this batch proof was generated. /// * List of indexes contains duplicates. /// * The proof does not resolve to a single root. - pub fn get_root(&self, indexes: &[usize]) -> Result { + pub fn get_root( + &self, + indexes: &[usize], + leaves: &[H::Digest], + ) -> Result { if indexes.is_empty() { return Err(MerkleTreeError::TooFewLeafIndexes); } - if indexes.len() > MAX_PATHS { - return Err(MerkleTreeError::TooManyLeafIndexes(MAX_PATHS, indexes.len())); - } let mut buf = [H::Digest::default(); 2]; let mut v = BTreeMap::new(); @@ -141,16 +134,16 @@ impl BatchMerkleProof { // copy values of leaf sibling leaf nodes into the buffer match index_map.get(&index) { Some(&index1) => { - if self.leaves.len() <= index1 { + if leaves.len() <= index1 { return Err(MerkleTreeError::InvalidProof); } - buf[0] = self.leaves[index1]; + buf[0] = leaves[index1]; match index_map.get(&(index + 1)) { Some(&index2) => { - if self.leaves.len() <= index2 { + if leaves.len() <= index2 { return Err(MerkleTreeError::InvalidProof); } - buf[1] = self.leaves[index2]; + buf[1] = leaves[index2]; proof_pointers.push(0); }, None => { @@ -169,10 +162,10 @@ impl BatchMerkleProof { buf[0] = self.nodes[i][0]; match index_map.get(&(index + 1)) { Some(&index2) => { - if self.leaves.len() <= index2 { + if leaves.len() <= index2 { return Err(MerkleTreeError::InvalidProof); } - buf[1] = self.leaves[index2]; + buf[1] = leaves[index2]; }, None => return Err(MerkleTreeError::InvalidProof), } @@ -242,27 +235,27 @@ impl BatchMerkleProof { v.remove(&1).ok_or(MerkleTreeError::InvalidProof) } - /// Computes the uncompressed Merkle paths which aggregate to this proof. + /// Computes the uncompressed individual Merkle proofs which aggregate to this batch proof. /// /// # Errors /// Returns an error if: /// * No indexes were provided (i.e., `indexes` is an empty slice). - /// * Number of provided indexes is greater than 255. /// * Number of provided indexes does not match the number of leaf nodes in the proof. - pub fn into_paths(self, indexes: &[usize]) -> Result>, MerkleTreeError> { + pub fn into_openings( + self, + leaves: &[H::Digest], + indexes: &[usize], + ) -> Result>, MerkleTreeError> { if indexes.is_empty() { return Err(MerkleTreeError::TooFewLeafIndexes); } - if indexes.len() > MAX_PATHS { - return Err(MerkleTreeError::TooManyLeafIndexes(MAX_PATHS, indexes.len())); - } - if indexes.len() != self.leaves.len() { + if indexes.len() != leaves.len() { return Err(MerkleTreeError::InvalidProof); } let mut partial_tree_map = BTreeMap::new(); - for (&i, leaf) in indexes.iter().zip(self.leaves.iter()) { + for (&i, leaf) in indexes.iter().zip(leaves.iter()) { partial_tree_map.insert(i + (1 << (self.depth)), *leaf); } @@ -285,16 +278,16 @@ impl BatchMerkleProof { // copy values of leaf sibling leaf nodes into the buffer match index_map.get(&index) { Some(&index1) => { - if self.leaves.len() <= index1 { + if leaves.len() <= index1 { return Err(MerkleTreeError::InvalidProof); } - buf[0] = self.leaves[index1]; + buf[0] = leaves[index1]; match index_map.get(&(index + 1)) { Some(&index2) => { - if self.leaves.len() <= index2 { + if leaves.len() <= index2 { return Err(MerkleTreeError::InvalidProof); } - buf[1] = self.leaves[index2]; + buf[1] = leaves[index2]; proof_pointers.push(0); }, None => { @@ -313,10 +306,10 @@ impl BatchMerkleProof { buf[0] = self.nodes[i][0]; match index_map.get(&(index + 1)) { Some(&index2) => { - if self.leaves.len() <= index2 { + if leaves.len() <= index2 { return Err(MerkleTreeError::InvalidProof); } - buf[1] = self.leaves[index2]; + buf[1] = leaves[index2]; }, None => return Err(MerkleTreeError::InvalidProof), } @@ -386,83 +379,46 @@ impl BatchMerkleProof { original_indexes .iter() - .map(|&i| get_path::(i, &partial_tree_map, self.depth as usize)) + .map(|&i| get_proof::(i, &partial_tree_map, self.depth as usize)) .collect() } +} - // SERIALIZATION / DESERIALIZATION - // -------------------------------------------------------------------------------------------- +// SERIALIZATION / DESERIALIZATION +// -------------------------------------------------------------------------------------------- - /// Converts all internal proof nodes into a vector of bytes. - /// - /// # Panics - /// Panics if: - /// * The proof contains more than 255 Merkle paths. - /// * The Merkle paths consist of more than 255 nodes. - pub fn serialize_nodes(&self) -> Vec { - let mut result = Vec::new(); - - // record total number of node vectors - assert!(self.nodes.len() <= u8::MAX as usize, "too many paths"); - result.push(self.nodes.len() as u8); +impl Serializable for BatchMerkleProof { + /// Writes all internal proof nodes into the provided target. + fn write_into(&self, target: &mut W) { + target.write_u8(self.depth); + target.write_usize(self.nodes.len()); - // record each node vector as individual bytes for nodes in self.nodes.iter() { - assert!(nodes.len() <= u8::MAX as usize, "too many nodes"); - // record the number of nodes, and append all nodes to the paths buffer - result.push(nodes.len() as u8); - for node in nodes.iter() { - result.append(&mut node.to_bytes()); - } + // record the number of nodes, and append all nodes to the proof buffer + nodes.write_into(target); } - - result } +} - /// Parses internal nodes from the provided `node_bytes`, and constructs a batch Merkle proof - /// from these nodes, provided `leaves`, and provided tree `depth`. +impl Deserializable for BatchMerkleProof { + /// Parses internal nodes from the provided `source`, and constructs a batch Merkle proof + /// from these nodes. /// /// # Errors /// Returns an error if: - /// * No leaves were provided (i.e., `leaves` is an empty slice). - /// * Number of provided leaves is greater than 255. - /// * Tree `depth` was set to zero. - /// * `node_bytes` could not be deserialized into a valid set of internal nodes. - pub fn deserialize( - node_bytes: &mut R, - leaves: Vec, - depth: u8, - ) -> Result { - if depth == 0 { - return Err(DeserializationError::InvalidValue( - "tree depth must be greater than zero".to_string(), - )); - } - if leaves.is_empty() { - return Err(DeserializationError::InvalidValue( - "at lease one leaf must be provided".to_string(), - )); - } - if leaves.len() > MAX_PATHS { - return Err(DeserializationError::InvalidValue(format!( - "number of leaves cannot exceed {}, but {} were provided", - MAX_PATHS, - leaves.len() - ))); - } + /// * `source` could not be deserialized into a valid set of internal nodes. + fn read_from(source: &mut R) -> Result { + let depth = source.read_u8()?; + let num_node_vectors = source.read_usize()?; - let num_node_vectors = node_bytes.read_u8()? as usize; let mut nodes = Vec::with_capacity(num_node_vectors); for _ in 0..num_node_vectors { - // read the number of digests in the vector - let num_digests = node_bytes.read_u8()? as usize; - // read the digests and add them to the node vector - let digests = node_bytes.read_many(num_digests)?; + let digests = Vec::<_>::read_from(source)?; nodes.push(digests); } - Ok(BatchMerkleProof { leaves, nodes, depth }) + Ok(BatchMerkleProof { nodes, depth }) } } @@ -475,12 +431,12 @@ fn are_siblings(left: usize, right: usize) -> bool { left & 1 == 0 && right - 1 == left } -/// Computes the Merkle path from the computed (partial) tree. -pub fn get_path( +/// Computes the Merkle proof from the computed (partial) tree. +pub fn get_proof( index: usize, tree: &BTreeMap::Digest>, depth: usize, -) -> Result, MerkleTreeError> { +) -> Result, MerkleTreeError> { let mut index = index + (1 << depth); let leaf = if let Some(leaf) = tree.get(&index) { *leaf @@ -488,7 +444,7 @@ pub fn get_path( return Err(MerkleTreeError::InvalidProof); }; - let mut proof = vec![leaf]; + let mut proof = vec![]; while index > 1 { let leaf = if let Some(leaf) = tree.get(&(index ^ 1)) { *leaf @@ -500,5 +456,5 @@ pub fn get_path( index >>= 1; } - Ok(proof) + Ok((leaf, proof)) } diff --git a/crypto/src/merkle/tests.rs b/crypto/src/merkle/tests.rs index 6610eb908..f66c638a2 100644 --- a/crypto/src/merkle/tests.rs +++ b/crypto/src/merkle/tests.rs @@ -89,31 +89,29 @@ fn prove() { let leaves = Digest256::bytes_as_digests(&LEAVES4).to_vec(); let tree = MerkleTree::::new(leaves.clone()).unwrap(); - let proof = vec![leaves[1], leaves[0], hash_2x1(leaves[2], leaves[3])]; - assert_eq!(proof, tree.prove(1).unwrap()); + let proof = vec![leaves[0], hash_2x1(leaves[2], leaves[3])]; + assert_eq!((leaves[1], proof), tree.prove(1).unwrap()); - let proof = vec![leaves[2], leaves[3], hash_2x1(leaves[0], leaves[1])]; - assert_eq!(proof, tree.prove(2).unwrap()); + let proof = vec![leaves[3], hash_2x1(leaves[0], leaves[1])]; + assert_eq!((leaves[2], proof), tree.prove(2).unwrap()); // depth 5 let leaves = Digest256::bytes_as_digests(&LEAVES8).to_vec(); let tree = MerkleTree::::new(leaves.clone()).unwrap(); let proof = vec![ - leaves[1], leaves[0], hash_2x1(leaves[2], leaves[3]), hash_2x1(hash_2x1(leaves[4], leaves[5]), hash_2x1(leaves[6], leaves[7])), ]; - assert_eq!(proof, tree.prove(1).unwrap()); + assert_eq!((leaves[1], proof), tree.prove(1).unwrap()); let proof = vec![ - leaves[6], leaves[7], hash_2x1(leaves[4], leaves[5]), hash_2x1(hash_2x1(leaves[0], leaves[1]), hash_2x1(leaves[2], leaves[3])), ]; - assert_eq!(proof, tree.prove(6).unwrap()); + assert_eq!((leaves[6], proof), tree.prove(6).unwrap()); } #[test] @@ -121,20 +119,20 @@ fn verify() { // depth 4 let leaves = Digest256::bytes_as_digests(&LEAVES4).to_vec(); let tree = MerkleTree::::new(leaves).unwrap(); - let proof = tree.prove(1).unwrap(); - assert!(MerkleTree::::verify(*tree.root(), 1, &proof).is_ok()); + let (leaf, proof) = tree.prove(1).unwrap(); + assert!(MerkleTree::::verify(*tree.root(), 1, leaf, &proof).is_ok()); - let proof = tree.prove(2).unwrap(); - assert!(MerkleTree::::verify(*tree.root(), 2, &proof).is_ok()); + let (leaf, proof) = tree.prove(2).unwrap(); + assert!(MerkleTree::::verify(*tree.root(), 2, leaf, &proof).is_ok()); // depth 5 - let leaves = Digest256::bytes_as_digests(&LEAVES8).to_vec(); - let tree = MerkleTree::::new(leaves).unwrap(); - let proof = tree.prove(1).unwrap(); - assert!(MerkleTree::::verify(*tree.root(), 1, &proof).is_ok()); + let leaf = Digest256::bytes_as_digests(&LEAVES8).to_vec(); + let tree = MerkleTree::::new(leaf).unwrap(); + let (leaf, proof) = tree.prove(1).unwrap(); + assert!(MerkleTree::::verify(*tree.root(), 1, leaf, &proof).is_ok()); - let proof = tree.prove(6).unwrap(); - assert!(MerkleTree::::verify(*tree.root(), 6, &proof).is_ok()); + let (leaf, proof) = tree.prove(6).unwrap(); + assert!(MerkleTree::::verify(*tree.root(), 6, leaf, &proof).is_ok()); } #[test] @@ -150,9 +148,9 @@ fn prove_batch() { hash_2x1(leaves[2], leaves[3]), hash_2x1(hash_2x1(leaves[4], leaves[5]), hash_2x1(leaves[6], leaves[7])), ]]; - assert_eq!(expected_values, proof.leaves); - assert_eq!(expected_nodes, proof.nodes); - assert_eq!(3, proof.depth); + assert_eq!(expected_values, proof.0); + assert_eq!(expected_nodes, proof.1.nodes); + assert_eq!(3, proof.1.depth); // 2 indexes let proof = tree.prove_batch(&[1, 2]).unwrap(); @@ -164,9 +162,9 @@ fn prove_batch() { ], vec![leaves[3]], ]; - assert_eq!(expected_values, proof.leaves); - assert_eq!(expected_nodes, proof.nodes); - assert_eq!(3, proof.depth); + assert_eq!(expected_values, proof.0); + assert_eq!(expected_nodes, proof.1.nodes); + assert_eq!(3, proof.1.depth); // 2 indexes on opposite sides let proof = tree.prove_batch(&[1, 6]).unwrap(); @@ -175,16 +173,16 @@ fn prove_batch() { vec![leaves[0], hash_2x1(leaves[2], leaves[3])], vec![leaves[7], hash_2x1(leaves[4], leaves[5])], ]; - assert_eq!(expected_values, proof.leaves); - assert_eq!(expected_nodes, proof.nodes); - assert_eq!(3, proof.depth); + assert_eq!(expected_values, proof.0); + assert_eq!(expected_nodes, proof.1.nodes); + assert_eq!(3, proof.1.depth); // all indexes let proof = tree.prove_batch(&[0, 1, 2, 3, 4, 5, 6, 7]).unwrap(); let expected_nodes: Vec> = vec![vec![], vec![], vec![], vec![]]; - assert_eq!(leaves, proof.leaves); - assert_eq!(expected_nodes, proof.nodes); - assert_eq!(3, proof.depth); + assert_eq!(leaves, proof.0); + assert_eq!(expected_nodes, proof.1.nodes); + assert_eq!(3, proof.1.depth); } #[test] @@ -192,48 +190,68 @@ fn verify_batch() { let leaves = Digest256::bytes_as_digests(&LEAVES8).to_vec(); let tree = MerkleTree::::new(leaves).unwrap(); - let proof = tree.prove_batch(&[1]).unwrap(); - assert!(MerkleTree::verify_batch(tree.root(), &[1], &proof).is_ok()); - assert!(MerkleTree::verify_batch(tree.root(), &[2], &proof).is_err()); + let (leaves, proof) = tree.prove_batch(&[1]).unwrap(); + assert!(MerkleTree::verify_batch(tree.root(), &[1], &leaves, &proof).is_ok()); + assert!(MerkleTree::verify_batch(tree.root(), &[2], &leaves, &proof).is_err()); - let proof = tree.prove_batch(&[1, 2]).unwrap(); - assert!(MerkleTree::verify_batch(tree.root(), &[1, 2], &proof).is_ok()); - assert!(MerkleTree::verify_batch(tree.root(), &[1], &proof).is_err()); - assert!(MerkleTree::verify_batch(tree.root(), &[1, 3], &proof).is_err()); - assert!(MerkleTree::verify_batch(tree.root(), &[1, 2, 3], &proof).is_err()); + let (leaves, proof) = tree.prove_batch(&[1, 2]).unwrap(); + assert!(MerkleTree::verify_batch(tree.root(), &[1, 2], &leaves, &proof).is_ok()); + assert!(MerkleTree::verify_batch(tree.root(), &[1], &leaves, &proof).is_err()); + assert!(MerkleTree::verify_batch(tree.root(), &[1, 3], &leaves, &proof).is_err()); + assert!(MerkleTree::verify_batch(tree.root(), &[1, 2, 3], &leaves, &proof).is_err()); - let proof = tree.prove_batch(&[1, 6]).unwrap(); - assert!(MerkleTree::verify_batch(tree.root(), &[1, 6], &proof).is_ok()); + let (leaves, proof) = tree.prove_batch(&[1, 6]).unwrap(); + assert!(MerkleTree::verify_batch(tree.root(), &[1, 6], &leaves, &proof).is_ok()); - let proof = tree.prove_batch(&[1, 3, 6]).unwrap(); - assert!(MerkleTree::verify_batch(tree.root(), &[1, 3, 6], &proof).is_ok()); + let (leaves, proof) = tree.prove_batch(&[1, 3, 6]).unwrap(); + assert!(MerkleTree::verify_batch(tree.root(), &[1, 3, 6], &leaves, &proof).is_ok()); - let proof = tree.prove_batch(&[0, 1, 2, 3, 4, 5, 6, 7]).unwrap(); - assert!(MerkleTree::verify_batch(tree.root(), &[0, 1, 2, 3, 4, 5, 6, 7], &proof).is_ok()); + let (leaves, proof) = tree.prove_batch(&[0, 1, 2, 3, 4, 5, 6, 7]).unwrap(); + assert!( + MerkleTree::verify_batch(tree.root(), &[0, 1, 2, 3, 4, 5, 6, 7], &leaves, &proof).is_ok() + ); } #[test] -fn verify_into_paths() { +fn verify_into_openings() { let leaves = Digest256::bytes_as_digests(&LEAVES8).to_vec(); let tree = MerkleTree::::new(leaves).unwrap(); - let proof1 = tree.prove(1).unwrap(); - let proof2 = tree.prove(2).unwrap(); - let proof1_2 = tree.prove_batch(&[1, 2]).unwrap(); - let result = proof1_2.into_paths(&[1, 2]).unwrap(); + let (_, proof1) = tree.prove(1).unwrap(); + let (_, proof2) = tree.prove(2).unwrap(); + let (leaves1_2, proof1_2) = tree.prove_batch(&[1, 2]).unwrap(); + let result = proof1_2.into_openings(&leaves1_2, &[1, 2]).unwrap(); + + assert_eq!(proof1, result[0].1); + assert_eq!(proof2, result[1].1); - assert_eq!(proof1, result[0]); - assert_eq!(proof2, result[1]); + let (_, proof3) = tree.prove(3).unwrap(); + let (_, proof4) = tree.prove(4).unwrap(); + let (_, proof6) = tree.prove(5).unwrap(); + let (leaves, proof3_4_6) = tree.prove_batch(&[3, 4, 5]).unwrap(); + let result = proof3_4_6.into_openings(&leaves, &[3, 4, 5]).unwrap(); - let proof3 = tree.prove(3).unwrap(); - let proof4 = tree.prove(4).unwrap(); - let proof6 = tree.prove(5).unwrap(); - let proof3_4_6 = tree.prove_batch(&[3, 4, 5]).unwrap(); - let result = proof3_4_6.into_paths(&[3, 4, 5]).unwrap(); + assert_eq!(proof3, result[0].1); + assert_eq!(proof4, result[1].1); + assert_eq!(proof6, result[2].1); +} + +#[test] +fn from_proofs() { + let leaves = Digest256::bytes_as_digests(&LEAVES8).to_vec(); + let tree = MerkleTree::::new(leaves).unwrap(); + let indices: Vec = vec![1, 2]; + let (_, proof1) = tree.prove_batch(&indices[..]).unwrap(); + + let mut proofs = Vec::new(); + for &idx in indices.iter() { + proofs.push(tree.prove(idx).unwrap()); + } + let proof2: BatchMerkleProof = + BatchMerkleProof::from_single_proofs(&proofs, &indices); - assert_eq!(proof3, result[0]); - assert_eq!(proof4, result[1]); - assert_eq!(proof6, result[2]); + assert!(proof1.nodes == proof2.nodes); + assert_eq!(proof1.depth, proof2.depth); } proptest! { @@ -242,8 +260,8 @@ proptest! { proof_indices in prop::collection::vec(any::(), 10..20) ) { for proof_index in proof_indices{ - let proof = tree.prove(proof_index.index(128)).unwrap(); - prop_assert!(MerkleTree::::verify(*tree.root(), proof_index.index(128), &proof).is_ok()) + let (leaves, proof) = tree.prove(proof_index.index(128)).unwrap(); + prop_assert!(MerkleTree::::verify(*tree.root(), proof_index.index(128), leaves, &proof).is_ok()) } } @@ -253,43 +271,43 @@ proptest! { ) { let mut indices: Vec = proof_indices.iter().map(|idx| idx.index(128)).collect(); indices.sort_unstable(); indices.dedup(); - let proof = tree.prove_batch(&indices[..]).unwrap(); - prop_assert!(MerkleTree::verify_batch(tree.root(), &indices[..], &proof).is_ok()); + let (leaves, proof) = tree.prove_batch(&indices[..]).unwrap(); + prop_assert!(MerkleTree::verify_batch(tree.root(), &indices[..], &leaves, &proof).is_ok()); } #[test] - fn batch_proof_from_paths(tree in random_blake3_merkle_tree(128), + fn batch_proof_from_proofs(tree in random_blake3_merkle_tree(128), proof_indices in prop::collection::vec(any::(), 10..20) ) { let mut indices: Vec = proof_indices.iter().map(|idx| idx.index(128)).collect(); indices.sort_unstable(); indices.dedup(); - let proof1 = tree.prove_batch(&indices[..]).unwrap(); + let (_, proof1) = tree.prove_batch(&indices[..]).unwrap(); - let mut paths = Vec::new(); + let mut proofs = Vec::new(); for &idx in indices.iter() { - paths.push(tree.prove(idx).unwrap()); + proofs.push(tree.prove(idx).unwrap()); } - let proof2 = BatchMerkleProof::from_paths(&paths, &indices); + let proof2 = BatchMerkleProof::from_single_proofs(&proofs, &indices); prop_assert!(proof1 == proof2); } #[test] - fn into_paths(tree in random_blake3_merkle_tree(32), + fn into_openings(tree in random_blake3_merkle_tree(32), proof_indices in prop::collection::vec(any::(), 1..30) ) { let mut indices: Vec = proof_indices.iter().map(|idx| idx.index(32)).collect(); indices.sort_unstable(); indices.dedup(); - let proof1 = tree.prove_batch(&indices[..]).unwrap(); + let (values1, proof1) = tree.prove_batch(&indices[..]).unwrap(); - let mut paths_expected = Vec::new(); + let mut proofs_expected = Vec::new(); for &idx in indices.iter() { - paths_expected.push(tree.prove(idx).unwrap()); + proofs_expected.push(tree.prove(idx).unwrap().1); } - let paths = proof1.into_paths(&indices); + let proofs: Vec<_> = proof1.into_openings(&values1, &indices).unwrap().into_iter().map(|(_, proofs)| proofs).collect(); - prop_assert!(paths_expected == paths.unwrap()); + prop_assert!(proofs_expected == proofs); } } diff --git a/examples/src/fibonacci/fib2/mod.rs b/examples/src/fibonacci/fib2/mod.rs index 49fd8f00d..ddc6cf77e 100644 --- a/examples/src/fibonacci/fib2/mod.rs +++ b/examples/src/fibonacci/fib2/mod.rs @@ -8,7 +8,7 @@ use std::time::Instant; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher}, + crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, math::{fields::f128::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -87,7 +87,7 @@ impl FibExample { impl Example for FibExample where - H: ElementHasher, + H: ElementHasher + Sync, { fn prove(&self) -> Proof { println!( @@ -115,7 +115,7 @@ where let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, self.result, &acceptable_options, @@ -125,7 +125,7 @@ where fn verify_with_wrong_inputs(&self, proof: Proof) -> Result<(), VerifierError> { let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, self.result + BaseElement::ONE, &acceptable_options, diff --git a/examples/src/fibonacci/fib2/prover.rs b/examples/src/fibonacci/fib2/prover.rs index 696bcf93d..9fb3dd500 100644 --- a/examples/src/fibonacci/fib2/prover.rs +++ b/examples/src/fibonacci/fib2/prover.rs @@ -4,7 +4,7 @@ // LICENSE file in the root directory of this source tree. use winterfell::{ - matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, }; @@ -50,14 +50,16 @@ impl FibProver { impl Prover for FibProver where - H: ElementHasher, + H: ElementHasher + Sync, { type BaseField = BaseElement; type Air = FibAir; type Trace = TraceTable; type HashFn = H; + type VC = MerkleTree; type RandomCoin = DefaultRandomCoin; - type TraceLde> = DefaultTraceLde; + type TraceLde> = + DefaultTraceLde; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; diff --git a/examples/src/fibonacci/fib8/mod.rs b/examples/src/fibonacci/fib8/mod.rs index 28962df26..322079c21 100644 --- a/examples/src/fibonacci/fib8/mod.rs +++ b/examples/src/fibonacci/fib8/mod.rs @@ -8,7 +8,7 @@ use std::time::Instant; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher}, + crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, math::{fields::f128::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -87,7 +87,7 @@ impl Fib8Example { impl Example for Fib8Example where - H: ElementHasher, + H: ElementHasher + Sync, { fn prove(&self) -> Proof { println!( @@ -114,7 +114,7 @@ where fn verify(&self, proof: Proof) -> Result<(), VerifierError> { let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, self.result, &acceptable_options, @@ -124,7 +124,7 @@ where fn verify_with_wrong_inputs(&self, proof: Proof) -> Result<(), VerifierError> { let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, self.result + BaseElement::ONE, &acceptable_options, diff --git a/examples/src/fibonacci/fib8/prover.rs b/examples/src/fibonacci/fib8/prover.rs index cc995a62d..425bfbd42 100644 --- a/examples/src/fibonacci/fib8/prover.rs +++ b/examples/src/fibonacci/fib8/prover.rs @@ -4,7 +4,7 @@ // LICENSE file in the root directory of this source tree. use winterfell::{ - matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, }; @@ -65,14 +65,16 @@ impl Fib8Prover { impl Prover for Fib8Prover where - H: ElementHasher, + H: ElementHasher + Sync, { type BaseField = BaseElement; type Air = Fib8Air; type Trace = TraceTable; type HashFn = H; + type VC = MerkleTree; type RandomCoin = DefaultRandomCoin; - type TraceLde> = DefaultTraceLde; + type TraceLde> = + DefaultTraceLde; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; diff --git a/examples/src/fibonacci/fib_small/mod.rs b/examples/src/fibonacci/fib_small/mod.rs index ce3fc229a..672605ac4 100644 --- a/examples/src/fibonacci/fib_small/mod.rs +++ b/examples/src/fibonacci/fib_small/mod.rs @@ -8,7 +8,7 @@ use std::time::Instant; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher}, + crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, math::{fields::f64::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -98,7 +98,7 @@ impl FibExample { impl Example for FibExample where - H: ElementHasher, + H: ElementHasher + Sync, { fn prove(&self) -> Proof { println!( @@ -126,7 +126,7 @@ where let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, self.result, &acceptable_options, @@ -136,7 +136,7 @@ where fn verify_with_wrong_inputs(&self, proof: Proof) -> Result<(), VerifierError> { let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, self.result + BaseElement::ONE, &acceptable_options, diff --git a/examples/src/fibonacci/fib_small/prover.rs b/examples/src/fibonacci/fib_small/prover.rs index 40285a386..53ba615da 100644 --- a/examples/src/fibonacci/fib_small/prover.rs +++ b/examples/src/fibonacci/fib_small/prover.rs @@ -3,7 +3,7 @@ // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree. use winterfell::{ - matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, }; @@ -16,12 +16,18 @@ use super::{ // FIBONACCI PROVER // ================================================================================================ -pub struct FibSmallProver { +pub struct FibSmallProver +where + H: Sync, +{ options: ProofOptions, _hasher: PhantomData, } -impl FibSmallProver { +impl FibSmallProver +where + H: Sync, +{ pub fn new(options: ProofOptions) -> Self { Self { options, _hasher: PhantomData } } @@ -47,7 +53,7 @@ impl FibSmallProver { } } -impl Prover for FibSmallProver +impl Prover for FibSmallProver where H: ElementHasher, { @@ -55,8 +61,10 @@ where type Air = FibSmall; type Trace = TraceTable; type HashFn = H; + type VC = MerkleTree; type RandomCoin = DefaultRandomCoin; - type TraceLde> = DefaultTraceLde; + type TraceLde> = + DefaultTraceLde; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; diff --git a/examples/src/fibonacci/mulfib2/mod.rs b/examples/src/fibonacci/mulfib2/mod.rs index e8da735e3..d7b3e11d8 100644 --- a/examples/src/fibonacci/mulfib2/mod.rs +++ b/examples/src/fibonacci/mulfib2/mod.rs @@ -8,7 +8,7 @@ use std::time::Instant; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher}, + crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, math::{fields::f128::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -86,7 +86,7 @@ impl MulFib2Example { impl Example for MulFib2Example where - H: ElementHasher, + H: ElementHasher + Sync, { fn prove(&self) -> Proof { let sequence_length = self.sequence_length; @@ -114,7 +114,7 @@ where fn verify(&self, proof: Proof) -> Result<(), VerifierError> { let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, self.result, &acceptable_options, @@ -124,7 +124,7 @@ where fn verify_with_wrong_inputs(&self, proof: Proof) -> Result<(), VerifierError> { let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, self.result + BaseElement::ONE, &acceptable_options, diff --git a/examples/src/fibonacci/mulfib2/prover.rs b/examples/src/fibonacci/mulfib2/prover.rs index 6636b5f79..b1daba2fb 100644 --- a/examples/src/fibonacci/mulfib2/prover.rs +++ b/examples/src/fibonacci/mulfib2/prover.rs @@ -4,7 +4,7 @@ // LICENSE file in the root directory of this source tree. use winterfell::{ - matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, }; @@ -46,14 +46,16 @@ impl MulFib2Prover { impl Prover for MulFib2Prover where - H: ElementHasher, + H: ElementHasher + Sync, { type BaseField = BaseElement; type Air = MulFib2Air; type Trace = TraceTable; type HashFn = H; + type VC = MerkleTree; type RandomCoin = DefaultRandomCoin; - type TraceLde> = DefaultTraceLde; + type TraceLde> = + DefaultTraceLde; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; diff --git a/examples/src/fibonacci/mulfib8/mod.rs b/examples/src/fibonacci/mulfib8/mod.rs index 8289831a4..43bd27be0 100644 --- a/examples/src/fibonacci/mulfib8/mod.rs +++ b/examples/src/fibonacci/mulfib8/mod.rs @@ -8,7 +8,7 @@ use std::time::Instant; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher}, + crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, math::{fields::f128::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -87,7 +87,7 @@ impl MulFib8Example { impl Example for MulFib8Example where - H: ElementHasher, + H: ElementHasher + Sync, { fn prove(&self) -> Proof { let sequence_length = self.sequence_length; @@ -115,7 +115,7 @@ where fn verify(&self, proof: Proof) -> Result<(), VerifierError> { let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, self.result, &acceptable_options, @@ -125,7 +125,7 @@ where fn verify_with_wrong_inputs(&self, proof: Proof) -> Result<(), VerifierError> { let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, self.result + BaseElement::ONE, &acceptable_options, diff --git a/examples/src/fibonacci/mulfib8/prover.rs b/examples/src/fibonacci/mulfib8/prover.rs index f1c693e98..20297d0e5 100644 --- a/examples/src/fibonacci/mulfib8/prover.rs +++ b/examples/src/fibonacci/mulfib8/prover.rs @@ -4,7 +4,7 @@ // LICENSE file in the root directory of this source tree. use winterfell::{ - matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, }; @@ -58,14 +58,16 @@ impl MulFib8Prover { impl Prover for MulFib8Prover where - H: ElementHasher, + H: ElementHasher + Sync, { type BaseField = BaseElement; type Air = MulFib8Air; type Trace = TraceTable; type HashFn = H; + type VC = MerkleTree; type RandomCoin = DefaultRandomCoin; - type TraceLde> = DefaultTraceLde; + type TraceLde> = + DefaultTraceLde; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; diff --git a/examples/src/lamport/aggregate/mod.rs b/examples/src/lamport/aggregate/mod.rs index be91bd1dd..6dd2a8d02 100644 --- a/examples/src/lamport/aggregate/mod.rs +++ b/examples/src/lamport/aggregate/mod.rs @@ -8,7 +8,7 @@ use std::time::Instant; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher}, + crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, math::{fields::f128::BaseElement, get_power_series, FieldElement, StarkField}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -114,7 +114,7 @@ impl LamportAggregateExample { impl Example for LamportAggregateExample where - H: ElementHasher, + H: ElementHasher + Sync, { fn prove(&self) -> Proof { // generate the execution trace @@ -144,7 +144,7 @@ where }; let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, pub_inputs, &acceptable_options, @@ -160,7 +160,7 @@ where }; let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, pub_inputs, &acceptable_options, diff --git a/examples/src/lamport/aggregate/prover.rs b/examples/src/lamport/aggregate/prover.rs index df27166d3..51d8e9c30 100644 --- a/examples/src/lamport/aggregate/prover.rs +++ b/examples/src/lamport/aggregate/prover.rs @@ -6,7 +6,7 @@ #[cfg(feature = "concurrent")] use winterfell::iterators::*; use winterfell::{ - matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, TraceInfo, TracePolyTable, TraceTable, }; @@ -95,14 +95,16 @@ impl LamportAggregateProver { impl Prover for LamportAggregateProver where - H: ElementHasher, + H: ElementHasher + Sync, { type BaseField = BaseElement; type Air = LamportAggregateAir; type Trace = TraceTable; type HashFn = H; + type VC = MerkleTree; type RandomCoin = DefaultRandomCoin; - type TraceLde> = DefaultTraceLde; + type TraceLde> = + DefaultTraceLde; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; diff --git a/examples/src/lamport/signature.rs b/examples/src/lamport/signature.rs index d4350b3dd..6818adfde 100644 --- a/examples/src/lamport/signature.rs +++ b/examples/src/lamport/signature.rs @@ -5,11 +5,9 @@ use std::cmp::Ordering; +use core_utils::Serializable; use rand_utils::prng_vector; -use winterfell::{ - math::{fields::f128::BaseElement, FieldElement, StarkField}, - Serializable, -}; +use winterfell::math::{fields::f128::BaseElement, FieldElement, StarkField}; use super::rescue::Rescue128; diff --git a/examples/src/lamport/threshold/mod.rs b/examples/src/lamport/threshold/mod.rs index 33eaf8cbd..c64fa7755 100644 --- a/examples/src/lamport/threshold/mod.rs +++ b/examples/src/lamport/threshold/mod.rs @@ -8,7 +8,7 @@ use std::time::Instant; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher}, + crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, math::{fields::f128::BaseElement, get_power_series, FieldElement, StarkField}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -112,7 +112,7 @@ impl LamportThresholdExample { impl Example for LamportThresholdExample where - H: ElementHasher, + H: ElementHasher + Sync, { fn prove(&self) -> Proof { // generate the execution trace @@ -152,7 +152,7 @@ where }; let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, pub_inputs, &acceptable_options, @@ -168,7 +168,7 @@ where }; let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, pub_inputs, &acceptable_options, diff --git a/examples/src/lamport/threshold/prover.rs b/examples/src/lamport/threshold/prover.rs index 5b7e76217..f5c9c748b 100644 --- a/examples/src/lamport/threshold/prover.rs +++ b/examples/src/lamport/threshold/prover.rs @@ -8,7 +8,7 @@ use std::collections::HashMap; #[cfg(feature = "concurrent")] use winterfell::iterators::*; use winterfell::{ - matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, TraceInfo, TracePolyTable, TraceTable, }; @@ -137,14 +137,16 @@ impl LamportThresholdProver { impl Prover for LamportThresholdProver where - H: ElementHasher, + H: ElementHasher + Sync, { type BaseField = BaseElement; type Air = LamportThresholdAir; type Trace = TraceTable; type HashFn = H; + type VC = MerkleTree; type RandomCoin = DefaultRandomCoin; - type TraceLde> = DefaultTraceLde; + type TraceLde> = + DefaultTraceLde; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; diff --git a/examples/src/lamport/threshold/signature.rs b/examples/src/lamport/threshold/signature.rs index 6fc7c0894..ec579d420 100644 --- a/examples/src/lamport/threshold/signature.rs +++ b/examples/src/lamport/threshold/signature.rs @@ -78,6 +78,9 @@ impl AggPublicKey { /// Returns a Merkle path to the specified leaf. pub fn get_leaf_path(&self, index: usize) -> Vec { - self.tree.prove(index).unwrap() + let (leaf, path) = self.tree.prove(index).unwrap(); + let mut result = vec![leaf]; + result.extend_from_slice(&path); + result } } diff --git a/examples/src/merkle/mod.rs b/examples/src/merkle/mod.rs index 0538716f8..6b8771218 100644 --- a/examples/src/merkle/mod.rs +++ b/examples/src/merkle/mod.rs @@ -82,7 +82,10 @@ impl MerkleExample { // compute Merkle path form the leaf specified by the index let now = Instant::now(); - let path = tree.prove(index).unwrap(); + let (leaf, path) = tree.prove(index).unwrap(); + let mut result = vec![leaf]; + result.extend_from_slice(&path); + println!( "Computed Merkle path from leaf {} to root {} in {} ms", index, @@ -95,7 +98,7 @@ impl MerkleExample { tree_root: *tree.root(), value, index, - path, + path: result, _hasher: PhantomData, } } @@ -106,7 +109,7 @@ impl MerkleExample { impl Example for MerkleExample where - H: ElementHasher, + H: ElementHasher + Sync, { fn prove(&self) -> Proof { // generate the execution trace @@ -134,7 +137,7 @@ where let pub_inputs = PublicInputs { tree_root: self.tree_root.to_elements() }; let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, pub_inputs, &acceptable_options, @@ -146,7 +149,7 @@ where let pub_inputs = PublicInputs { tree_root: [tree_root[1], tree_root[0]] }; let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, pub_inputs, &acceptable_options, diff --git a/examples/src/merkle/prover.rs b/examples/src/merkle/prover.rs index 10070279e..db6d7f407 100644 --- a/examples/src/merkle/prover.rs +++ b/examples/src/merkle/prover.rs @@ -4,7 +4,7 @@ // LICENSE file in the root directory of this source tree. use winterfell::{ - matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, }; @@ -99,14 +99,16 @@ impl MerkleProver { impl Prover for MerkleProver where - H: ElementHasher, + H: ElementHasher + Sync, { type BaseField = BaseElement; type Air = MerkleAir; type Trace = TraceTable; type HashFn = H; + type VC = MerkleTree; type RandomCoin = DefaultRandomCoin; - type TraceLde> = DefaultTraceLde; + type TraceLde> = + DefaultTraceLde; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; diff --git a/examples/src/rescue/mod.rs b/examples/src/rescue/mod.rs index 7f4e3e20b..5534625d5 100644 --- a/examples/src/rescue/mod.rs +++ b/examples/src/rescue/mod.rs @@ -8,7 +8,7 @@ use std::time::Instant; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher}, + crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, math::{fields::f128::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -94,7 +94,7 @@ impl RescueExample { impl Example for RescueExample where - H: ElementHasher, + H: ElementHasher + Sync, { fn prove(&self) -> Proof { // generate the execution trace @@ -120,7 +120,7 @@ where let pub_inputs = PublicInputs { seed: self.seed, result: self.result }; let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, pub_inputs, &acceptable_options, @@ -134,7 +134,7 @@ where }; let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, pub_inputs, &acceptable_options, diff --git a/examples/src/rescue/prover.rs b/examples/src/rescue/prover.rs index 5fc2224b5..050838af6 100644 --- a/examples/src/rescue/prover.rs +++ b/examples/src/rescue/prover.rs @@ -4,7 +4,7 @@ // LICENSE file in the root directory of this source tree. use winterfell::{ - matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, }; @@ -65,14 +65,16 @@ impl RescueProver { impl Prover for RescueProver where - H: ElementHasher, + H: ElementHasher + Sync, { type BaseField = BaseElement; type Air = RescueAir; type Trace = TraceTable; type HashFn = H; + type VC = MerkleTree; type RandomCoin = DefaultRandomCoin; - type TraceLde> = DefaultTraceLde; + type TraceLde> = + DefaultTraceLde; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; diff --git a/examples/src/rescue_raps/mod.rs b/examples/src/rescue_raps/mod.rs index 4ee52b480..533298097 100644 --- a/examples/src/rescue_raps/mod.rs +++ b/examples/src/rescue_raps/mod.rs @@ -9,7 +9,7 @@ use std::time::Instant; use rand_utils::rand_array; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher}, + crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, math::{fields::f128::BaseElement, ExtensionOf, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -107,7 +107,7 @@ impl RescueRapsExample { impl Example for RescueRapsExample where - H: ElementHasher, + H: ElementHasher + Sync, { fn prove(&self) -> Proof { // generate the execution trace @@ -134,7 +134,7 @@ where let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, pub_inputs, &acceptable_options, @@ -146,7 +146,7 @@ where let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, pub_inputs, &acceptable_options, diff --git a/examples/src/rescue_raps/prover.rs b/examples/src/rescue_raps/prover.rs index 2be9afafa..7adee9bbb 100644 --- a/examples/src/rescue_raps/prover.rs +++ b/examples/src/rescue_raps/prover.rs @@ -5,7 +5,7 @@ use core_utils::uninit_vector; use winterfell::{ - matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, Trace, TraceInfo, TracePolyTable, }; @@ -94,14 +94,16 @@ impl RescueRapsProver { impl Prover for RescueRapsProver where - H: ElementHasher, + H: ElementHasher + Sync, { type BaseField = BaseElement; type Air = RescueRapsAir; type Trace = RapTraceTable; type HashFn = H; + type VC = MerkleTree; type RandomCoin = DefaultRandomCoin; - type TraceLde> = DefaultTraceLde; + type TraceLde> = + DefaultTraceLde; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; diff --git a/examples/src/utils/rescue.rs b/examples/src/utils/rescue.rs index 323d44bf0..e09cb094e 100644 --- a/examples/src/utils/rescue.rs +++ b/examples/src/utils/rescue.rs @@ -5,10 +5,10 @@ use core::slice; +use core_utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; use winterfell::{ crypto::{Digest, Hasher}, math::{fields::f128::BaseElement, FieldElement}, - ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable, }; use crate::utils::{are_equal, EvaluationResult}; diff --git a/examples/src/vdf/exempt/mod.rs b/examples/src/vdf/exempt/mod.rs index 766adb5e9..cc1dd53e9 100644 --- a/examples/src/vdf/exempt/mod.rs +++ b/examples/src/vdf/exempt/mod.rs @@ -8,7 +8,7 @@ use std::time::Instant; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher}, + crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, math::{fields::f128::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -86,7 +86,7 @@ impl VdfExample { impl Example for VdfExample where - H: ElementHasher, + H: ElementHasher + Sync, { fn prove(&self) -> Proof { println!("Generating proof for executing a VDF function for {} steps", self.num_steps); @@ -111,7 +111,7 @@ where let pub_inputs = VdfInputs { seed: self.seed, result: self.result }; let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, pub_inputs, &acceptable_options, @@ -125,7 +125,7 @@ where }; let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, pub_inputs, &acceptable_options, diff --git a/examples/src/vdf/exempt/prover.rs b/examples/src/vdf/exempt/prover.rs index af50e49a3..cc5d3e8e8 100644 --- a/examples/src/vdf/exempt/prover.rs +++ b/examples/src/vdf/exempt/prover.rs @@ -4,7 +4,7 @@ // LICENSE file in the root directory of this source tree. use winterfell::{ - matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, }; @@ -46,14 +46,16 @@ impl VdfProver { impl Prover for VdfProver where - H: ElementHasher, + H: ElementHasher + Sync, { type BaseField = BaseElement; type Air = VdfAir; type Trace = TraceTable; type HashFn = H; + type VC = MerkleTree; type RandomCoin = DefaultRandomCoin; - type TraceLde> = DefaultTraceLde; + type TraceLde> = + DefaultTraceLde; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; diff --git a/examples/src/vdf/regular/mod.rs b/examples/src/vdf/regular/mod.rs index 7d69bc24b..3cdcaba3d 100644 --- a/examples/src/vdf/regular/mod.rs +++ b/examples/src/vdf/regular/mod.rs @@ -8,7 +8,7 @@ use std::time::Instant; use tracing::{field, info_span}; use winterfell::{ - crypto::{DefaultRandomCoin, ElementHasher}, + crypto::{DefaultRandomCoin, ElementHasher, MerkleTree}, math::{fields::f128::BaseElement, FieldElement}, Proof, ProofOptions, Prover, Trace, VerifierError, }; @@ -83,7 +83,7 @@ impl VdfExample { impl Example for VdfExample where - H: ElementHasher, + H: ElementHasher + Sync, { fn prove(&self) -> Proof { println!("Generating proof for executing a VDF function for {} steps", self.num_steps); @@ -108,7 +108,7 @@ where let pub_inputs = VdfInputs { seed: self.seed, result: self.result }; let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, pub_inputs, &acceptable_options, @@ -122,7 +122,7 @@ where }; let acceptable_options = winterfell::AcceptableOptions::OptionSet(vec![proof.options().clone()]); - winterfell::verify::>( + winterfell::verify::, MerkleTree>( proof, pub_inputs, &acceptable_options, diff --git a/examples/src/vdf/regular/prover.rs b/examples/src/vdf/regular/prover.rs index 12f272bb2..c880611ff 100644 --- a/examples/src/vdf/regular/prover.rs +++ b/examples/src/vdf/regular/prover.rs @@ -4,7 +4,7 @@ // LICENSE file in the root directory of this source tree. use winterfell::{ - matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, + crypto::MerkleTree, matrix::ColMatrix, AuxRandElements, ConstraintCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, StarkDomain, Trace, TraceInfo, TracePolyTable, TraceTable, }; @@ -43,14 +43,16 @@ impl VdfProver { impl Prover for VdfProver where - H: ElementHasher, + H: ElementHasher + Sync, { type BaseField = BaseElement; type Air = VdfAir; type Trace = TraceTable; type HashFn = H; + type VC = MerkleTree; type RandomCoin = DefaultRandomCoin; - type TraceLde> = DefaultTraceLde; + type TraceLde> = + DefaultTraceLde; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, Self::Air, E>; diff --git a/fri/benches/prover.rs b/fri/benches/prover.rs index b7b7c417f..bfc096fc3 100644 --- a/fri/benches/prover.rs +++ b/fri/benches/prover.rs @@ -6,7 +6,7 @@ use std::time::Duration; use criterion::{criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion}; -use crypto::{hashers::Blake3_256, DefaultRandomCoin}; +use crypto::{hashers::Blake3_256, DefaultRandomCoin, MerkleTree}; use math::{fft, fields::f128::BaseElement, FieldElement}; use rand_utils::rand_vector; use winter_fri::{DefaultProverChannel, FriOptions, FriProver}; @@ -28,7 +28,8 @@ pub fn build_layers(c: &mut Criterion) { BenchmarkId::new("build_layers", domain_size), &evaluations, |b, e| { - let mut prover = FriProver::new(options.clone()); + let mut prover = + FriProver::<_, _, _, MerkleTree>>::new(options.clone()); b.iter_batched( || e.clone(), |evaluations| { diff --git a/fri/src/lib.rs b/fri/src/lib.rs index 6f680c428..3d49ded34 100644 --- a/fri/src/lib.rs +++ b/fri/src/lib.rs @@ -51,7 +51,7 @@ //! * Base STARK field, //! * Extension field, //! * Domain blowup factor, -//! * Hash function (used for Merkle tree commitments), +//! * Hash function (used for building vector commitments), //! * Folding factor (used for degree reduction for each FRI layer), //! * Maximum size of the last FRI layer. //! diff --git a/fri/src/proof.rs b/fri/src/proof.rs index 73b05249a..65dd2af92 100644 --- a/fri/src/proof.rs +++ b/fri/src/proof.rs @@ -5,7 +5,7 @@ use alloc::{string::ToString, vec::Vec}; -use crypto::{BatchMerkleProof, ElementHasher, Hasher}; +use crypto::{ElementHasher, Hasher, VectorCommitment}; use math::FieldElement; use utils::{ ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable, SliceReader, @@ -17,14 +17,14 @@ use utils::{ /// A proof generated by a FRI prover. /// /// A FRI proof contains information proving that a function *f* is a polynomial of some bounded -/// degree *d*. FRI proofs cannot be instantiated directly - they must be generated by a instance -/// of a [FriProver](crate::FriProver), and can be verified by a instance of a +/// degree *d*. FRI proofs cannot be instantiated directly - they must be generated by an instance +/// of a [FriProver](crate::FriProver), and can be verified by an instance of a /// [FriVerifier](crate::FriVerifier) via [VerifierChannel](crate::VerifierChannel) interface. /// /// A proof consists of zero or more layers and a remainder polynomial. Each layer contains a set of -/// polynomial evaluations at positions queried by the verifier as well as Merkle authentication -/// paths for these evaluations (the Merkle paths are compressed into a batch Merkle proof). The -/// remainder polynomial is given by its list of coefficients i.e. field elements. +/// polynomial evaluations at positions queried by the verifier, a vector commitment to LDE of +/// each polynomial, as well as opening proofs for the evaluations against the vector commitments. +/// The remainder polynomial is given by its list of coefficients i.e. field elements. /// /// All values in a proof are stored as vectors of bytes. Thus, the values must be parsed before /// they can be returned to the user. To do this, [parse_layers()](FriProof::parse_layers()) @@ -113,8 +113,8 @@ impl FriProof { // PARSING // -------------------------------------------------------------------------------------------- - /// Decomposes this proof into vectors of query values for each layer and corresponding Merkle - /// authentication paths for each query (grouped into batch Merkle proofs). + /// Decomposes this proof into vectors of query values for each layer and corresponding batch + /// opening proofs. /// /// # Panics /// Panics if: @@ -126,14 +126,15 @@ impl FriProof { /// * This proof is not consistent with the specified `domain_size` and `folding_factor`. /// * Any of the layers could not be parsed successfully. #[allow(clippy::type_complexity)] - pub fn parse_layers( + pub fn parse_layers( self, mut domain_size: usize, folding_factor: usize, - ) -> Result<(Vec>, Vec>), DeserializationError> + ) -> Result<(Vec>, Vec<>::MultiProof>), DeserializationError> where E: FieldElement, H: ElementHasher, + V: VectorCommitment, { assert!(domain_size.is_power_of_two(), "domain size must be a power of two"); assert!(folding_factor.is_power_of_two(), "folding factor must be a power of two"); @@ -145,10 +146,20 @@ impl FriProof { // parse all layers for (i, layer) in self.layers.into_iter().enumerate() { domain_size /= folding_factor; - let (qv, mp) = layer.parse(domain_size, folding_factor).map_err(|err| { + let (qv, op) = layer.parse::<_, H, V>(folding_factor).map_err(|err| { DeserializationError::InvalidValue(format!("failed to parse FRI layer {i}: {err}")) })?; - layer_proofs.push(mp); + + // check that the opening proof matches the domain length + if >::get_multiproof_domain_len(&op) != domain_size { + return Err(DeserializationError::InvalidValue(format!( + "expected a domain of size {} but was {}", + domain_size, + >::get_multiproof_domain_len(&op), + ))); + } + + layer_proofs.push(op); layer_queries.push(qv); } @@ -235,14 +246,14 @@ pub struct FriProofLayer { impl FriProofLayer { // CONSTRUCTOR // -------------------------------------------------------------------------------------------- - /// Creates a new proof layer from the specified query values and the corresponding Merkle - /// paths aggregated into a single batch Merkle proof. + /// Creates a new proof layer from the specified query values and the corresponding batch + /// opening proof. /// /// # Panics /// Panics if `query_values` is an empty slice. - pub(crate) fn new( + pub(crate) fn new, const N: usize>( query_values: Vec<[E; N]>, - merkle_proof: BatchMerkleProof, + proof: >::MultiProof, ) -> Self { assert!(!query_values.is_empty(), "query values cannot be empty"); @@ -251,13 +262,10 @@ impl FriProofLayer { let mut value_bytes = Vec::with_capacity(E::ELEMENT_BYTES * N * query_values.len()); value_bytes.write_many(&query_values); - // concatenate all query values and all internal Merkle proof nodes into vectors of bytes; - // we care about internal nodes only because leaf nodes can be reconstructed from hashes - // of query values - FriProofLayer { - values: value_bytes, - paths: merkle_proof.serialize_nodes(), - } + let mut proof_bytes = Vec::new(); + proof.write_into(&mut proof_bytes); + + FriProofLayer { values: value_bytes, paths: proof_bytes } } // PUBLIC ACCESSORS @@ -271,22 +279,22 @@ impl FriProofLayer { // PARSING // -------------------------------------------------------------------------------------------- - /// Decomposes this layer into a combination of query values and corresponding Merkle - /// authentication paths (grouped together into a single batch Merkle proof). + /// Decomposes this layer into a combination of query values and corresponding batch opening + /// proof. /// /// # Errors /// Returns an error if: /// * This layer does not contain at least one query. - /// * Parsing of any of the query values or the corresponding Merkle paths fails. + /// * Parsing of any of the query values or the corresponding batch opening proof fails. /// * Not all bytes have been consumed while parsing this layer. - pub fn parse( + pub fn parse( self, - domain_size: usize, folding_factor: usize, - ) -> Result<(Vec, BatchMerkleProof), DeserializationError> + ) -> Result<(Vec, >::MultiProof), DeserializationError> where E: FieldElement, H: ElementHasher, + V: VectorCommitment, { // make sure the number of value bytes can be parsed into a whole number of queries let num_query_bytes = E::ELEMENT_BYTES * folding_factor; @@ -307,7 +315,7 @@ impl FriProofLayer { let mut query_values = Vec::with_capacity(num_queries * folding_factor); // read bytes corresponding to each query, convert them into field elements, - // and also hash them to build leaf nodes of the batch Merkle proof + // and also hash them to build leaf nodes of the batch opening proof let mut reader = SliceReader::new(&self.values); for query_hash in hashed_queries.iter_mut() { let mut qe = reader.read_many(folding_factor)?; @@ -318,15 +326,14 @@ impl FriProofLayer { return Err(DeserializationError::UnconsumedBytes); } - // build batch Merkle proof + // build batch opening proof let mut reader = SliceReader::new(&self.paths); - let tree_depth = domain_size.ilog2() as u8; - let merkle_proof = BatchMerkleProof::deserialize(&mut reader, hashed_queries, tree_depth)?; + let multi_proof = ::read_from(&mut reader)?; if reader.has_more_bytes() { return Err(DeserializationError::UnconsumedBytes); } - Ok((query_values, merkle_proof)) + Ok((query_values, multi_proof)) } } diff --git a/fri/src/prover/channel.rs b/fri/src/prover/channel.rs index 7fa81e3ac..7231e757c 100644 --- a/fri/src/prover/channel.rs +++ b/fri/src/prover/channel.rs @@ -23,20 +23,18 @@ use math::FieldElement; /// commitments the prover has written into the channel up to this point. pub trait ProverChannel { /// Hash function used by the prover to commit to polynomial evaluations. - type Hasher: Hasher; + type Hasher: ElementHasher; /// Sends a layer commitment to the verifier. /// - /// A layer commitment is a root of a Merkle tree built from evaluations of a polynomial - /// at a given layer. The Merkle tree is built by first transposing evaluations into a - /// two-dimensional matrix where each row contains values needed to compute a single - /// value of the next FRI layer, and then putting each row of the matrix into a single - /// leaf of the Merkle tree. Thus, the number of elements grouped into a single leaf is - /// equal to the `folding_factor` used for FRI layer construction. - fn commit_fri_layer( - &mut self, - layer_root: <>::Hasher as Hasher>::Digest, - ); + /// A layer commitment is the commitment string of a vector commitment to the vector of + /// evaluations of a polynomial at a given layer. The vector commitment is built by + /// first transposing evaluations into a two-dimensional matrix where each row contains + /// values needed to compute a single value of the next FRI layer, and then computing + /// the hash of each row to get one entry of the vector being committed to. Thus, the number + /// of elements grouped into a single leaf is equal to the `folding_factor` used for FRI layer + /// construction. + fn commit_fri_layer(&mut self, layer_root: ::Digest); /// Returns a random α drawn uniformly at random from the entire field. /// diff --git a/fri/src/prover/mod.rs b/fri/src/prover/mod.rs index 5bef65aae..17092ad34 100644 --- a/fri/src/prover/mod.rs +++ b/fri/src/prover/mod.rs @@ -6,14 +6,17 @@ use alloc::vec::Vec; use core::marker::PhantomData; -use crypto::{ElementHasher, Hasher, MerkleTree}; -use math::{fft, FieldElement, StarkField}; -use utils::{flatten_vector_elements, group_slice_elements, transpose_slice}; +use crypto::{ElementHasher, Hasher, VectorCommitment}; +use math::{fft, FieldElement}; +#[cfg(feature = "concurrent")] +use utils::iterators::*; +use utils::{ + flatten_vector_elements, group_slice_elements, iter_mut, transpose_slice, uninit_vector, +}; use crate::{ folding::{apply_drp, fold_positions}, proof::{FriProof, FriProofLayer}, - utils::hash_values, FriOptions, }; @@ -29,19 +32,19 @@ mod tests; /// Implements the prover component of the FRI protocol. /// /// Given evaluations of a function *f* over domain *D* (`evaluations`), a FRI prover generates -/// a proof that *f* is a polynomial of some bounded degree *d*, such that *d* < |*D*| / *blowup_factor*. -/// The proof is succinct: it exponentially smaller than `evaluations` and the verifier can verify it -/// exponentially faster than it would have taken them to read all `evaluations`. +/// a proof that *f* is a polynomial of some bounded degree *d*, such that +/// *d* < |*D*| / *blowup_factor*. +/// The proof is succinct: it exponentially smaller than `evaluations` and the verifier can verify +/// it exponentially faster than it would have taken them to read all `evaluations`. /// /// The prover is parametrized with the following types: /// -/// * `B` specifies the base field of the STARK protocol. -/// * `E` specifies the field in which the FRI protocol is executed. This can be the same as the -/// base field `B`, but it can also be an extension of the base field in cases when the base -/// field is too small to provide desired security level for the FRI protocol. +/// * `E` specifies the field in which the FRI protocol is executed. /// * `C` specifies the type used to simulate prover-verifier interaction. -/// * `H` specifies the hash function used to build layer Merkle trees. The same hash function -/// must be used in the prover channel to generate pseudo random values. +/// * `H` specifies the hash function used to build for each layer the vector of values committed to +/// using the specified vector commitment scheme. The same hash function must be used in +/// the prover channel to generate pseudo random values. +/// * `V` specifies the vector commitment scheme used in order to commit to each layer. /// /// Proof generation is performed in two phases: commit phase and query phase. /// @@ -54,12 +57,12 @@ mod tests; /// a number of coefficients less than or equal to `remainder_max_degree_plus_1`. /// /// At each layer of reduction, the prover commits to the current set of evaluations. This is done -/// by building a Merkle tree from the evaluations and sending the root of the tree to the verifier -/// (via [ProverChannel]). The Merkle tree is build in such a way that all evaluations needed to -/// compute a single value in the next FRI layer are grouped into the same leaf (the number of -/// evaluations needed to compute a single element in the next FRI layer is equal to the -/// `folding_factor`). This allows us to decommit all these values using a single Merkle -/// authentication path. +/// by building a vector commitment to hashed evaluations and sending the commitment string +/// to the verifier (via [ProverChannel]). The vector commitment is build in such a way that all +/// evaluations needed to compute a single value in the next FRI layer are grouped into the same +/// leaf (the number of evaluations needed to compute a single element in the next FRI layer is +/// equal to the `folding_factor`). This allows us to decommit all these values using a single +/// individual opening proof. /// /// After committing to the set of evaluations at the current layer, the prover draws a random /// field element α from the channel, and uses it to build the next FRI layer. In the interactive @@ -67,8 +70,8 @@ mod tests; /// sends it to the prover. In the non-interactive version, α is pseudo-randomly generated based /// on the values the prover has written into the channel up to that point. /// -/// The prover keeps all FRI layers (consisting of evaluations and corresponding Merkle trees) in -/// its internal state. +/// The prover keeps all FRI layers (consisting of evaluations and corresponding vector +/// commitments) in its internal state. /// /// # Query phase /// In the query phase, which is executed via [build_proof()](FriProver::build_proof()) function, @@ -89,23 +92,23 @@ mod tests; /// /// Calling [build_layers()](FriProver::build_layers()) when the internal state is dirty, or /// calling [build_proof()](FriProver::build_proof()) on a clean state will result in a panic. -pub struct FriProver +pub struct FriProver where - B: StarkField, - E: FieldElement, + E: FieldElement, C: ProverChannel, - H: ElementHasher, + H: ElementHasher, + V: VectorCommitment, { options: FriOptions, - layers: Vec>, + layers: Vec>, remainder_poly: FriRemainder, _channel: PhantomData, } -struct FriLayer, H: Hasher> { - tree: MerkleTree, +struct FriLayer> { + commitment: V, evaluations: Vec, - _base_field: PhantomData, + _h: PhantomData, } struct FriRemainder(Vec); @@ -113,12 +116,12 @@ struct FriRemainder(Vec); // PROVER IMPLEMENTATION // ================================================================================================ -impl FriProver +impl FriProver where - B: StarkField, - E: FieldElement, + E: FieldElement, C: ProverChannel, - H: ElementHasher, + H: ElementHasher, + V: VectorCommitment, { // CONSTRUCTOR // -------------------------------------------------------------------------------------------- @@ -141,7 +144,7 @@ where } /// Returns offset of the domain over which FRI protocol is executed by this prover. - pub fn domain_offset(&self) -> B { + pub fn domain_offset(&self) -> E::BaseField { self.options.domain_offset() } @@ -166,9 +169,10 @@ where /// application of the DRP the degree of the function (and size of the domain) is reduced by /// `folding_factor` until the remaining evaluations can be represented by a remainder polynomial /// with at most `remainder_max_degree_plus_1` number of coefficients. - /// At each layer of reduction the current evaluations are committed to using a Merkle tree, - /// and the root of this tree is written into the channel. After this the prover draws a random - /// field element α from the channel, and uses it in the next application of the DRP. + /// At each layer of reduction the current evaluations are committed to using a vector commitment + /// scheme, and the commitment string of this vector commitment is written into the channel. + /// After this the prover draws a random field element α from the channel, and uses it in + /// the next application of the DRP. /// /// # Panics /// Panics if the prover state is dirty (the vector of layers is not empty). @@ -197,23 +201,23 @@ where /// alpha from the channel and use it to perform degree-respecting projection. fn build_layer(&mut self, channel: &mut C, evaluations: &mut Vec) { // commit to the evaluations at the current layer; we do this by first transposing the - // evaluations into a matrix of N columns, and then building a Merkle tree from the - // rows of this matrix; we do this so that we could de-commit to N values with a single - // Merkle authentication path. + // evaluations into a matrix of N columns, then hashing each row into a digest, and finally + // commiting to vector of these digests; we do this so that we could de-commit to N values + // with a single opening proof. let transposed_evaluations = transpose_slice(evaluations); - let hashed_evaluations = hash_values::(&transposed_evaluations); - let evaluation_tree = - MerkleTree::::new(hashed_evaluations).expect("failed to construct FRI layer tree"); - channel.commit_fri_layer(*evaluation_tree.root()); + let evaluation_vector_commitment = + build_layer_commitment::<_, _, V, N>(&transposed_evaluations) + .expect("failed to construct FRI layer commitment"); + channel.commit_fri_layer(evaluation_vector_commitment.commitment()); // draw a pseudo-random coefficient from the channel, and use it in degree-respecting // projection to reduce the degree of evaluations by N let alpha = channel.draw_fri_alpha(); *evaluations = apply_drp(&transposed_evaluations, self.domain_offset(), alpha); self.layers.push(FriLayer { - tree: evaluation_tree, + commitment: evaluation_vector_commitment, evaluations: flatten_vector_elements(transposed_evaluations), - _base_field: PhantomData, + _h: PhantomData, }); } @@ -233,9 +237,9 @@ where /// Executes query phase of FRI protocol. /// /// For each of the provided `positions`, corresponding evaluations from each of the layers - /// (excluding the remainder layer) are recorded into the proof together with Merkle - /// authentication paths from the root of layer commitment trees. For the remainder, we send - /// the whole remainder polynomial resulting from interpolating the remainder layer. + /// (excluding the remainder layer) are recorded into the proof together with a batch opening + /// proof against the sent vector commitment. For the remainder, we send the whole remainder + /// polynomial resulting from interpolating the remainder layer evaluations. /// /// # Panics /// Panics is the prover state is clean (no FRI layers have been build yet). @@ -256,10 +260,10 @@ where // sort of a static dispatch for folding_factor parameter let proof_layer = match folding_factor { - 2 => query_layer::(&self.layers[i], &positions), - 4 => query_layer::(&self.layers[i], &positions), - 8 => query_layer::(&self.layers[i], &positions), - 16 => query_layer::(&self.layers[i], &positions), + 2 => query_layer::(&self.layers[i], &positions), + 4 => query_layer::(&self.layers[i], &positions), + 8 => query_layer::(&self.layers[i], &positions), + 16 => query_layer::(&self.layers[i], &positions), _ => unimplemented!("folding factor {} is not supported", folding_factor), }; @@ -283,15 +287,15 @@ where /// Builds a single proof layer by querying the evaluations of the passed in FRI layer at the /// specified positions. -fn query_layer, H: Hasher, const N: usize>( - layer: &FriLayer, +fn query_layer, const N: usize>( + layer: &FriLayer, positions: &[usize], ) -> FriProofLayer { - // build Merkle authentication paths for all query positions + // build a batch opening proof for all query positions let proof = layer - .tree - .prove_batch(positions) - .expect("failed to generate a Merkle proof for FRI layer queries"); + .commitment + .open_many(positions) + .expect("failed to generate a batch opening proof for FRI layer queries"); // build a list of polynomial evaluations at each position; since evaluations in FRI layers // are stored in transposed form, a position refers to N evaluations which are committed @@ -301,6 +305,24 @@ fn query_layer, H: Hasher, const N for &position in positions.iter() { queried_values.push(evaluations[position]); } + FriProofLayer::new::<_, _, V, N>(queried_values, proof.1) +} + +/// Hashes each of the arrays in the provided slice and returns a vector commitment to resulting +/// hashes. +pub fn build_layer_commitment( + values: &[[E; N]], +) -> Result>::Error> +where + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, +{ + let mut hashed_evaluations: Vec = unsafe { uninit_vector(values.len()) }; + iter_mut!(hashed_evaluations, 1024).zip(values).for_each(|(e, v)| { + let digest: H::Digest = H::hash_elements(v); + *e = digest + }); - FriProofLayer::new(queried_values, proof) + V::new(hashed_evaluations) } diff --git a/fri/src/prover/tests.rs b/fri/src/prover/tests.rs index 87cc7c798..e765092c5 100644 --- a/fri/src/prover/tests.rs +++ b/fri/src/prover/tests.rs @@ -5,7 +5,7 @@ use alloc::vec::Vec; -use crypto::{hashers::Blake3_256, DefaultRandomCoin, Hasher, RandomCoin}; +use crypto::{hashers::Blake3_256, DefaultRandomCoin, Hasher, MerkleTree, RandomCoin}; use math::{fft, fields::f128::BaseElement, FieldElement}; use utils::{Deserializable, Serializable, SliceReader}; @@ -76,14 +76,14 @@ pub fn verify_proof( let proof = FriProof::read_from(&mut reader).unwrap(); // verify the proof - let mut channel = DefaultVerifierChannel::::new( + let mut channel = DefaultVerifierChannel::>::new( proof, commitments, domain_size, options.folding_factor(), ) .unwrap(); - let mut coin = DefaultRandomCoin::::new(&[]); + let mut coin = crypto::DefaultRandomCoin::::new(&[]); let verifier = FriVerifier::new(&mut channel, &mut coin, options.clone(), max_degree)?; let queried_evaluations = positions.iter().map(|&p| evaluations[p]).collect::>(); verifier.verify(&mut channel, &queried_evaluations, positions) @@ -104,7 +104,7 @@ fn fri_prove_verify( let evaluations = build_evaluations(trace_length, lde_blowup); // instantiate the prover and generate the proof - let mut prover = FriProver::new(options.clone()); + let mut prover = FriProver::<_, _, _, MerkleTree>::new(options.clone()); prover.build_layers(&mut channel, evaluations.clone()); let positions = channel.draw_query_positions(0); let proof = prover.build_proof(&positions); diff --git a/fri/src/utils.rs b/fri/src/utils.rs index 1138b9608..725e5b4c9 100644 --- a/fri/src/utils.rs +++ b/fri/src/utils.rs @@ -5,20 +5,14 @@ use alloc::vec::Vec; -use crypto::ElementHasher; -use math::FieldElement; -#[cfg(feature = "concurrent")] -use utils::iterators::*; -use utils::{iter_mut, uninit_vector}; - -/// Maps positions in the evaluation domain to indexes of commitment Merkle tree. +/// Maps positions in the evaluation domain to indexes of of the vector commitment. pub fn map_positions_to_indexes( positions: &[usize], source_domain_size: usize, folding_factor: usize, num_partitions: usize, ) -> Vec { - // if there was only 1 partition, order of elements in the commitment tree + // if there was only 1 partition, order of elements in the vector commitment // is the same as the order of elements in the evaluation domain if num_partitions == 1 { return positions.to_vec(); @@ -37,16 +31,3 @@ pub fn map_positions_to_indexes( result } - -/// Hashes each of the arrays in the provided slice and returns a vector of resulting hashes. -pub fn hash_values(values: &[[E; N]]) -> Vec -where - E: FieldElement, - H: ElementHasher, -{ - let mut result: Vec = unsafe { uninit_vector(values.len()) }; - iter_mut!(result, 1024).zip(values).for_each(|(r, v)| { - *r = H::hash_elements(v); - }); - result -} diff --git a/fri/src/verifier/channel.rs b/fri/src/verifier/channel.rs index 0c34f73a1..6f8709858 100644 --- a/fri/src/verifier/channel.rs +++ b/fri/src/verifier/channel.rs @@ -4,8 +4,9 @@ // LICENSE file in the root directory of this source tree. use alloc::vec::Vec; +use core::marker::PhantomData; -use crypto::{BatchMerkleProof, ElementHasher, Hasher, MerkleTree}; +use crypto::{ElementHasher, Hasher, VectorCommitment}; use math::FieldElement; use utils::{group_slice_elements, DeserializationError}; @@ -25,6 +26,8 @@ use crate::{FriProof, VerifierError}; pub trait VerifierChannel { /// Hash function used by the prover to commit to polynomial evaluations. type Hasher: ElementHasher; + /// Vector commitment used to commit to polynomial evaluations. + type VectorCommitment: VectorCommitment; // REQUIRED METHODS // -------------------------------------------------------------------------------------------- @@ -39,9 +42,7 @@ pub trait VerifierChannel { /// from the entire field after each layer commitment is received. In the non-interactive /// version, the verifier can read all layer commitments at once, and then generate α values /// locally. - fn read_fri_layer_commitments( - &mut self, - ) -> Vec<<>::Hasher as Hasher>::Digest>; + fn read_fri_layer_commitments(&mut self) -> Vec<::Digest>; /// Reads and removes from the channel evaluations of the polynomial at the queried positions /// for the next FRI layer. @@ -50,20 +51,21 @@ pub trait VerifierChannel { /// the verifier during the query phase of the FRI protocol. /// /// It is expected that layer queries and layer proofs at the same FRI layer are consistent. - /// That is, query values hash into the leaf nodes of corresponding Merkle authentication - /// paths. + /// That is, query values hash into the leaf nodes of corresponding vector commitment. fn take_next_fri_layer_queries(&mut self) -> Vec; - /// Reads and removes from the channel Merkle authentication paths for queried evaluations for - /// the next FRI layer. + /// Reads and removes from the channel vector commitment opening proofs of queried evaluations + /// for the next FRI layer. /// /// In the interactive version of the protocol, these authentication paths are sent from the /// prover to the verifier during the query phase of the FRI protocol. /// /// It is expected that layer proofs and layer queries at the same FRI layer are consistent. - /// That is, query values hash into the leaf nodes of corresponding Merkle authentication - /// paths. - fn take_next_fri_layer_proof(&mut self) -> BatchMerkleProof; + /// That is, query values hash into the elements of the vector committed to using the specified + /// vector commitment scheme. + fn take_next_fri_layer_proof( + &mut self, + ) -> >::MultiProof; /// Reads and removes the remainder polynomial from the channel. fn take_fri_remainder(&mut self) -> Vec; @@ -81,16 +83,29 @@ pub trait VerifierChannel { fn read_layer_queries( &mut self, positions: &[usize], - commitment: &<>::Hasher as Hasher>::Digest, + commitment: &::Digest, ) -> Result, VerifierError> { let layer_proof = self.take_next_fri_layer_proof(); - MerkleTree::::verify_batch(commitment, positions, &layer_proof) - .map_err(|_| VerifierError::LayerCommitmentMismatch)?; - - // TODO: make sure layer queries hash into leaves of layer proof - let layer_queries = self.take_next_fri_layer_queries(); - Ok(group_slice_elements(&layer_queries).to_vec()) + // build the values (i.e., polynomial evaluations over a coset of a multiplicative subgroup + // of the current evaluation domain) corresponding to each leaf of the layer commitment + let leaf_values = group_slice_elements(&layer_queries); + // hash the aforementioned values to get the leaves to be verified against the previously + // received commitment + let hashed_values: Vec<::Digest> = leaf_values + .iter() + .map(|seg| ::hash_elements(seg)) + .collect(); + + <>::VectorCommitment as VectorCommitment>::verify_many( + *commitment, + positions, + &hashed_values, + &layer_proof, + ) + .map_err(|_| VerifierError::LayerCommitmentMismatch)?; + + Ok(leaf_values.to_vec()) } /// Returns FRI remainder polynomial read from this channel. @@ -110,18 +125,24 @@ pub trait VerifierChannel { /// /// Though this implementation is primarily intended for testing purposes, it can be used in /// production use cases as well. -pub struct DefaultVerifierChannel> { +pub struct DefaultVerifierChannel< + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, +> { layer_commitments: Vec, - layer_proofs: Vec>, + layer_proofs: Vec, layer_queries: Vec>, remainder: Vec, num_partitions: usize, + _h: PhantomData, } -impl DefaultVerifierChannel +impl DefaultVerifierChannel where E: FieldElement, H: ElementHasher, + V: VectorCommitment, { /// Builds a new verifier channel from the specified [FriProof]. /// @@ -137,7 +158,7 @@ where let remainder = proof.parse_remainder()?; let (layer_queries, layer_proofs) = - proof.parse_layers::(domain_size, folding_factor)?; + proof.parse_layers::(domain_size, folding_factor)?; Ok(DefaultVerifierChannel { layer_commitments, @@ -145,16 +166,19 @@ where layer_queries, remainder, num_partitions, + _h: PhantomData, }) } } -impl VerifierChannel for DefaultVerifierChannel +impl VerifierChannel for DefaultVerifierChannel where E: FieldElement, H: ElementHasher, + V: VectorCommitment, { type Hasher = H; + type VectorCommitment = V; fn read_fri_num_partitions(&self) -> usize { self.num_partitions @@ -164,7 +188,7 @@ where self.layer_commitments.drain(..).collect() } - fn take_next_fri_layer_proof(&mut self) -> BatchMerkleProof { + fn take_next_fri_layer_proof(&mut self) -> V::MultiProof { self.layer_proofs.remove(0) } diff --git a/fri/src/verifier/mod.rs b/fri/src/verifier/mod.rs index 9067fcb45..ff0582b2c 100644 --- a/fri/src/verifier/mod.rs +++ b/fri/src/verifier/mod.rs @@ -8,7 +8,7 @@ use alloc::vec::Vec; use core::{marker::PhantomData, mem}; -use crypto::{ElementHasher, RandomCoin}; +use crypto::{ElementHasher, RandomCoin, VectorCommitment}; use math::{polynom, FieldElement, StarkField}; use crate::{folding::fold_positions, utils::map_positions_to_indexes, FriOptions, VerifierError}; @@ -48,21 +48,22 @@ pub use channel::{DefaultVerifierChannel, VerifierChannel}; /// # Query phase /// During the query phase, which is executed via [verify()](FriVerifier::verify()) function, /// the verifier sends a set of positions in the domain *D* to the prover, and the prover responds -/// with polynomial evaluations at these positions (together with corresponding Merkle paths) +/// with polynomial evaluations at these positions (together with corresponding opening proofs) /// across all FRI layers. The verifier then checks that: -/// * The Merkle paths are valid against the layer commitments the verifier received during +/// * The opening proofs are valid against the layer commitments the verifier received during /// the commit phase. /// * The evaluations are consistent across FRI layers (i.e., the degree-respecting projection /// was applied correctly). /// * The degree of the polynomial implied by evaluations at the last FRI layer (the remainder) /// is smaller than the degree resulting from reducing degree *d* by `folding_factor` at each /// FRI layer. -pub struct FriVerifier +pub struct FriVerifier where E: FieldElement, C: VerifierChannel, H: ElementHasher, R: RandomCoin, + V: VectorCommitment, { max_poly_degree: usize, domain_size: usize, @@ -73,14 +74,16 @@ where num_partitions: usize, _channel: PhantomData, _public_coin: PhantomData, + _vector_com: PhantomData, } -impl FriVerifier +impl FriVerifier where E: FieldElement, - C: VerifierChannel, + C: VerifierChannel, H: ElementHasher, R: RandomCoin, + V: VectorCommitment, { /// Returns a new instance of FRI verifier created from the specified parameters. /// @@ -146,6 +149,7 @@ where num_partitions, _channel: PhantomData, _public_coin: PhantomData, + _vector_com: PhantomData, }) } @@ -251,14 +255,14 @@ where // determine which evaluations were queried in the folded layer let mut folded_positions = fold_positions(&positions, domain_size, self.options.folding_factor()); - // determine where these evaluations are in the commitment Merkle tree + // determine where these evaluations are in the vector commitment let position_indexes = map_positions_to_indexes( &folded_positions, domain_size, self.options.folding_factor(), self.num_partitions, ); - // read query values from the specified indexes in the Merkle tree + // read query values from the specified indexes let layer_commitment = self.layer_commitments[depth]; // TODO: add layer depth to the potential error message let layer_values = channel.read_layer_queries(&position_indexes, &layer_commitment)?; diff --git a/prover/benches/lagrange_kernel.rs b/prover/benches/lagrange_kernel.rs index 559af93ea..7ee8ab3c3 100644 --- a/prover/benches/lagrange_kernel.rs +++ b/prover/benches/lagrange_kernel.rs @@ -11,7 +11,7 @@ use air::{ TraceInfo, TransitionConstraintDegree, }; use criterion::{criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion}; -use crypto::{hashers::Blake3_256, DefaultRandomCoin, RandomCoin}; +use crypto::{hashers::Blake3_256, DefaultRandomCoin, MerkleTree, RandomCoin}; use math::{fields::f64::BaseElement, ExtensionOf, FieldElement}; use winter_prover::{ matrix::ColMatrix, DefaultConstraintEvaluator, DefaultTraceLde, Prover, ProverGkrProof, @@ -183,8 +183,10 @@ impl Prover for LagrangeProver { type Air = LagrangeKernelAir; type Trace = LagrangeTrace; type HashFn = Blake3_256; + type VC = MerkleTree>; type RandomCoin = DefaultRandomCoin; - type TraceLde> = DefaultTraceLde; + type TraceLde> = + DefaultTraceLde; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, LagrangeKernelAir, E>; diff --git a/prover/src/channel.rs b/prover/src/channel.rs index c3d99675a..34a39d3fc 100644 --- a/prover/src/channel.rs +++ b/prover/src/channel.rs @@ -10,7 +10,7 @@ use air::{ proof::{Commitments, Context, OodFrame, Proof, Queries, TraceOodFrame}, Air, ConstraintCompositionCoefficients, DeepCompositionCoefficients, }; -use crypto::{ElementHasher, RandomCoin}; +use crypto::{ElementHasher, RandomCoin, VectorCommitment}; use fri::FriProof; use math::{FieldElement, ToElements}; #[cfg(feature = "concurrent")] @@ -19,12 +19,13 @@ use utils::iterators::*; // TYPES AND INTERFACES // ================================================================================================ -pub struct ProverChannel<'a, A, E, H, R> +pub struct ProverChannel<'a, A, E, H, R, V> where A: Air, E: FieldElement, H: ElementHasher, R: RandomCoin, + V: VectorCommitment, { air: &'a A, public_coin: R, @@ -33,17 +34,19 @@ where ood_frame: OodFrame, pow_nonce: u64, _field_element: PhantomData, + _vector_commitment: PhantomData, } // PROVER CHANNEL IMPLEMENTATION // ================================================================================================ -impl<'a, A, E, H, R> ProverChannel<'a, A, E, H, R> +impl<'a, A, E, H, R, V> ProverChannel<'a, A, E, H, R, V> where A: Air, E: FieldElement, H: ElementHasher, R: RandomCoin, + V: VectorCommitment, { // CONSTRUCTOR // -------------------------------------------------------------------------------------------- @@ -65,6 +68,7 @@ where ood_frame: OodFrame::default(), pow_nonce: 0, _field_element: PhantomData, + _vector_commitment: PhantomData, } } @@ -199,12 +203,13 @@ where // FRI PROVER CHANNEL IMPLEMENTATION // ================================================================================================ -impl<'a, A, E, H, R> fri::ProverChannel for ProverChannel<'a, A, E, H, R> +impl<'a, A, E, H, R, V> fri::ProverChannel for ProverChannel<'a, A, E, H, R, V> where A: Air, E: FieldElement, H: ElementHasher, R: RandomCoin, + V: VectorCommitment, { type Hasher = H; diff --git a/prover/src/constraints/commitment.rs b/prover/src/constraints/commitment.rs index a28a2f873..ac71fdc94 100644 --- a/prover/src/constraints/commitment.rs +++ b/prover/src/constraints/commitment.rs @@ -4,9 +4,10 @@ // LICENSE file in the root directory of this source tree. use alloc::vec::Vec; +use core::marker::PhantomData; use air::proof::Queries; -use crypto::{ElementHasher, MerkleTree}; +use crypto::{ElementHasher, VectorCommitment}; use math::FieldElement; use super::RowMatrix; @@ -18,44 +19,54 @@ use super::RowMatrix; /// /// The commitment consists of two components: /// * Evaluations of composition polynomial columns over the LDE domain. -/// * Merkle tree where each leaf in the tree corresponds to a row in the composition polynomial -/// evaluation matrix. -pub struct ConstraintCommitment> { +/// * Vector commitment where each vector element corresponds to the digest of a row in +/// the composition polynomial evaluation matrix. +pub struct ConstraintCommitment< + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, +> { evaluations: RowMatrix, - commitment: MerkleTree, + vector_commitment: V, + _h: PhantomData, } -impl> ConstraintCommitment { +impl ConstraintCommitment +where + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, +{ /// Creates a new constraint evaluation commitment from the provided composition polynomial - /// evaluations and the corresponding Merkle tree commitment. - pub fn new(evaluations: RowMatrix, commitment: MerkleTree) -> ConstraintCommitment { + /// evaluations and the corresponding vector commitment. + pub fn new(evaluations: RowMatrix, commitment: V) -> ConstraintCommitment { assert_eq!( evaluations.num_rows(), - commitment.leaves().len(), - "number of rows in constraint evaluation matrix must be the same as number of leaves in constraint commitment" + commitment.domain_len(), + "number of rows in constraint evaluation matrix must be the same as the size \ + of the vector commitment domain" ); - ConstraintCommitment { evaluations, commitment } - } - /// Returns the root of the commitment Merkle tree. - pub fn root(&self) -> H::Digest { - *self.commitment.root() + ConstraintCommitment { + evaluations, + vector_commitment: commitment, + _h: PhantomData, + } } - /// Returns the depth of the commitment Merkle tree. - #[allow(unused)] - pub fn tree_depth(&self) -> usize { - self.commitment.depth() + /// Returns the commitment. + pub fn commitment(&self) -> H::Digest { + self.vector_commitment.commitment() } - /// Returns constraint evaluations at the specified positions along with Merkle authentication - /// paths from the root of the commitment to these evaluations. + /// Returns constraint evaluations at the specified positions along with a batch opening proof + /// against the vector commitment. pub fn query(self, positions: &[usize]) -> Queries { - // build Merkle authentication paths to the leaves specified by positions - let merkle_proof = self - .commitment - .prove_batch(positions) - .expect("failed to generate a Merkle proof for constraint queries"); + // build batch opening proof to the leaves specified by positions + let opening_proof = self + .vector_commitment + .open_many(positions) + .expect("failed to generate a batch opening proof for constraint queries"); // determine a set of evaluations corresponding to each position let mut evaluations = Vec::new(); @@ -64,6 +75,6 @@ impl> ConstraintComm evaluations.push(row); } - Queries::new(merkle_proof, evaluations) + Queries::new::(opening_proof.1, evaluations) } } diff --git a/prover/src/lib.rs b/prover/src/lib.rs index 4874973f2..ac0e82be2 100644 --- a/prover/src/lib.rs +++ b/prover/src/lib.rs @@ -50,7 +50,7 @@ pub use air::{ }; use air::{AuxRandElements, GkrRandElements}; pub use crypto; -use crypto::{ElementHasher, RandomCoin}; +use crypto::{ElementHasher, RandomCoin, VectorCommitment}; use fri::FriProver; pub use math; use math::{ @@ -58,7 +58,6 @@ use math::{ fields::{CubeExtension, QuadExtension}, ExtensibleField, FieldElement, StarkField, ToElements, }; -use maybe_async::{maybe_async, maybe_await}; use tracing::{event, info_span, instrument, Level}; pub use utils::{ iterators, ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable, @@ -81,6 +80,7 @@ mod composer; use composer::DeepCompositionPoly; mod trace; +use maybe_async::{maybe_async, maybe_await}; pub use trace::{ AuxTraceWithMetadata, DefaultTraceLde, Trace, TraceLde, TracePolyTable, TraceTable, TraceTableFragment, @@ -139,11 +139,14 @@ pub trait Prover { /// Hash function to be used. type HashFn: ElementHasher; + /// Vector commitment scheme to be used. + type VC: VectorCommitment; + /// PRNG to be used for generating random field elements. - type RandomCoin: RandomCoin + Send + Sync; + type RandomCoin: RandomCoin; /// Trace low-degree extension for building the LDEs of trace segments and their commitments. - type TraceLde: TraceLde + Send + Sync + type TraceLde: TraceLde where E: FieldElement; @@ -288,10 +291,11 @@ pub trait Prover { // create a channel which is used to simulate interaction between the prover and the // verifier; the channel will be used to commit to values and to draw randomness that // should come from the verifier. - let mut channel = ProverChannel::::new( - &air, - pub_inputs_elements, - ); + let mut channel = + ProverChannel::::new( + &air, + pub_inputs_elements, + ); // 1 ----- Commit to the execution trace -------------------------------------------------- @@ -334,15 +338,14 @@ pub trait Prover { // commit to the auxiliary trace segment let aux_segment_polys = { - // extend the auxiliary trace segment and build a Merkle tree from the extended - // trace + // extend the auxiliary trace segment and commit to the extended trace let span = info_span!("commit_to_aux_trace_segment").entered(); - let (aux_segment_polys, aux_segment_root) = + let (aux_segment_polys, aux_segment_commitment) = trace_lde.set_aux_trace(&aux_trace, &domain); - // commit to the LDE of the extended auxiliary trace segment by writing the root of - // its Merkle tree into the channel - channel.commit_trace(aux_segment_root); + // commit to the LDE of the extended auxiliary trace segment by writing its + // commitment into the channel + channel.commit_trace(aux_segment_commitment); drop(span); aux_segment_polys @@ -450,7 +453,7 @@ pub trait Prover { // 6 ----- compute FRI layers for the composition polynomial ------------------------------ let fri_options = air.options().to_fri_options(); let num_layers = fri_options.num_fri_layers(lde_domain_size); - let mut fri_prover = FriProver::new(fri_options); + let mut fri_prover = FriProver::<_, _, _, Self::VC>::new(fri_options); info_span!("compute_fri_layers", num_layers) .in_scope(|| fri_prover.build_layers(&mut channel, deep_evaluations)); @@ -479,13 +482,12 @@ pub trait Prover { let fri_proof = fri_prover.build_proof(&query_positions); // query the execution trace at the selected position; for each query, we need the - // state of the trace at that position + Merkle authentication path + // state of the trace at that position and a batch opening proof at specified queries let trace_queries = trace_lde.query(&query_positions); // query the constraint commitment at the selected positions; for each query, we need - // just a Merkle authentication path. this is because constraint evaluations for each - // step are merged into a single value and Merkle authentication paths contain these - // values already + // the state of the trace at that position and a batch opening proof at specified + // queries let constraint_queries = constraint_commitment.query(&query_positions); // build the proof object @@ -512,15 +514,15 @@ pub trait Prover { /// columns each of size equal to trace length, and finally evaluating each composition /// polynomial column over the LDE domain. /// - /// The commitment is computed by hashing each row in the evaluation matrix, and then building - /// a Merkle tree from the resulting hashes. + /// The commitment is computed by building a vector containing the hashes of each row in + /// the evaluation matrix, and then building vector commitment of the resulting vector. #[maybe_async] fn build_constraint_commitment( &self, composition_poly_trace: CompositionPolyTrace, num_constraint_composition_columns: usize, domain: &StarkDomain, - ) -> (ConstraintCommitment, CompositionPoly) + ) -> (ConstraintCommitment, CompositionPoly) where E: FieldElement, { @@ -549,13 +551,12 @@ pub trait Prover { // finally, build constraint evaluation commitment let constraint_commitment = info_span!( "compute_constraint_evaluation_commitment", - tree_depth = domain_size.ilog2() + log_domain_size = domain_size.ilog2() ) .in_scope(|| { - let commitment = composed_evaluations.commit_to_rows(); + let commitment = composed_evaluations.commit_to_rows::(); ConstraintCommitment::new(composed_evaluations, commitment) }); - assert_eq!(constraint_commitment.tree_depth(), domain_size.ilog2() as usize); (constraint_commitment, composition_poly) } @@ -567,21 +568,21 @@ pub trait Prover { &self, trace: &Self::Trace, domain: &StarkDomain, - channel: &mut ProverChannel<'_, Self::Air, E, Self::HashFn, Self::RandomCoin>, + channel: &mut ProverChannel<'_, Self::Air, E, Self::HashFn, Self::RandomCoin, Self::VC>, ) -> (Self::TraceLde, TracePolyTable) where E: FieldElement, { - // extend the main execution trace and build a Merkle tree from the extended trace + // extend the main execution trace and commit to the extended trace let (trace_lde, trace_polys) = maybe_await!(self.new_trace_lde(trace.info(), trace.main_segment(), domain)); // get the commitment to the main trace segment LDE - let main_trace_root = trace_lde.get_main_trace_commitment(); + let main_trace_commitment = trace_lde.get_main_trace_commitment(); - // commit to the LDE of the main trace by writing the root of its Merkle tree into + // commit to the LDE of the main trace by writing the the commitment string into // the channel - channel.commit_trace(main_trace_root); + channel.commit_trace(main_trace_commitment); (trace_lde, trace_polys) } @@ -594,8 +595,8 @@ pub trait Prover { air: &Self::Air, composition_poly_trace: CompositionPolyTrace, domain: &StarkDomain, - channel: &mut ProverChannel<'_, Self::Air, E, Self::HashFn, Self::RandomCoin>, - ) -> (ConstraintCommitment, CompositionPoly) + channel: &mut ProverChannel<'_, Self::Air, E, Self::HashFn, Self::RandomCoin, Self::VC>, + ) -> (ConstraintCommitment, CompositionPoly) where E: FieldElement, { @@ -608,9 +609,9 @@ pub trait Prover { domain, )); - // then, commit to the evaluations of constraints by writing the root of the constraint - // Merkle tree into the channel - channel.commit_constraints(constraint_commitment.root()); + // then, commit to the evaluations of constraints by writing the commitment string of + // the constraint commitment into the channel + channel.commit_constraints(constraint_commitment.commitment()); (constraint_commitment, composition_poly) } diff --git a/prover/src/matrix/col_matrix.rs b/prover/src/matrix/col_matrix.rs index 57a7f40ee..61f67aca1 100644 --- a/prover/src/matrix/col_matrix.rs +++ b/prover/src/matrix/col_matrix.rs @@ -6,7 +6,7 @@ use alloc::vec::Vec; use core::{iter::FusedIterator, slice}; -use crypto::{ElementHasher, MerkleTree}; +use crypto::{ElementHasher, VectorCommitment}; use math::{fft, polynom, FieldElement}; #[cfg(feature = "concurrent")] use utils::iterators::*; @@ -256,13 +256,13 @@ impl ColMatrix { /// /// The commitment is built as follows: /// * Each row of the matrix is hashed into a single digest of the specified hash function. - /// * The resulting values are used to built a binary Merkle tree such that each row digest - /// becomes a leaf in the tree. Thus, the number of leaves in the tree is equal to the - /// number of rows in the matrix. - /// * The resulting Merkle tree is return as the commitment to the entire matrix. - pub fn commit_to_rows(&self) -> MerkleTree + /// * The resulting vector of digests is committed to using the specified vector commitment + /// scheme. + /// * The resulting commitment is returned as the commitment to the entire matrix. + pub fn commit_to_rows(&self) -> V where H: ElementHasher, + V: VectorCommitment, { // allocate vector to store row hashes let mut row_hashes = unsafe { uninit_vector::(self.num_rows()) }; @@ -282,8 +282,7 @@ impl ColMatrix { } ); - // build Merkle tree out of hashed rows - MerkleTree::new(row_hashes).expect("failed to construct trace Merkle tree") + V::new(row_hashes).expect("failed to construct trace vector commitment") } // CONVERSIONS diff --git a/prover/src/matrix/row_matrix.rs b/prover/src/matrix/row_matrix.rs index ded689bd6..f42ca0e7a 100644 --- a/prover/src/matrix/row_matrix.rs +++ b/prover/src/matrix/row_matrix.rs @@ -5,7 +5,7 @@ use alloc::vec::Vec; -use crypto::{ElementHasher, MerkleTree}; +use crypto::{ElementHasher, VectorCommitment}; use math::{fft, FieldElement, StarkField}; #[cfg(feature = "concurrent")] use utils::iterators::*; @@ -176,13 +176,14 @@ impl RowMatrix { /// /// The commitment is built as follows: /// * Each row of the matrix is hashed into a single digest of the specified hash function. - /// * The resulting values are used to build a binary Merkle tree such that each row digest - /// becomes a leaf in the tree. Thus, the number of leaves in the tree is equal to the - /// number of rows in the matrix. - /// * The resulting Merkle tree is returned as the commitment to the entire matrix. - pub fn commit_to_rows(&self) -> MerkleTree + /// The result is a vector of digests of length equal to the number of matrix rows. + /// * A vector commitment is computed for the resulting vector using the specified vector + /// commitment scheme. + /// * The resulting vector commitment is returned as the commitment to the entire matrix. + pub fn commit_to_rows(&self) -> V where H: ElementHasher, + V: VectorCommitment, { // allocate vector to store row hashes let mut row_hashes = unsafe { uninit_vector::(self.num_rows()) }; @@ -198,8 +199,8 @@ impl RowMatrix { } ); - // build Merkle tree out of hashed rows - MerkleTree::new(row_hashes).expect("failed to construct trace Merkle tree") + // build the vector commitment to the hashed rows + V::new(row_hashes).expect("failed to construct trace vector commitment") } } diff --git a/prover/src/trace/trace_lde/default/mod.rs b/prover/src/trace/trace_lde/default/mod.rs index b5c7c1cce..e06839d53 100644 --- a/prover/src/trace/trace_lde/default/mod.rs +++ b/prover/src/trace/trace_lde/default/mod.rs @@ -4,14 +4,14 @@ // LICENSE file in the root directory of this source tree. use alloc::vec::Vec; +use core::marker::PhantomData; -use air::LagrangeKernelEvaluationFrame; -use crypto::MerkleTree; +use air::{proof::Queries, LagrangeKernelEvaluationFrame, TraceInfo}; +use crypto::VectorCommitment; use tracing::info_span; use super::{ - ColMatrix, ElementHasher, EvaluationFrame, FieldElement, Hasher, Queries, StarkDomain, - TraceInfo, TraceLde, TracePolyTable, + ColMatrix, ElementHasher, EvaluationFrame, FieldElement, StarkDomain, TraceLde, TracePolyTable, }; use crate::{RowMatrix, DEFAULT_SEGMENT_WIDTH}; @@ -28,20 +28,30 @@ mod tests; /// will always be elements in the base field (even when an extension field is used). /// - Auxiliary segments: a list of 0 or more segments for traces generated after the prover /// commits to the first trace segment. Currently, at most 1 auxiliary segment is possible. -pub struct DefaultTraceLde> { +pub struct DefaultTraceLde< + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, +> { // low-degree extension of the main segment of the trace main_segment_lde: RowMatrix, // commitment to the main segment of the trace - main_segment_tree: MerkleTree, + main_segment_oracles: V, // low-degree extensions of the auxiliary segment of the trace aux_segment_lde: Option>, // commitment to the auxiliary segment of the trace - aux_segment_tree: Option>, + aux_segment_oracles: Option, blowup: usize, trace_info: TraceInfo, + _h: PhantomData, } -impl> DefaultTraceLde { +impl DefaultTraceLde +where + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, +{ /// Takes the main trace segment columns as input, interpolates them into polynomials in /// coefficient form, evaluates the polynomials over the LDE domain, commits to the /// polynomial evaluations, and creates a new [DefaultTraceLde] with the LDE of the main trace @@ -54,18 +64,19 @@ impl> DefaultTraceLd main_trace: &ColMatrix, domain: &StarkDomain, ) -> (Self, TracePolyTable) { - // extend the main execution trace and build a Merkle tree from the extended trace - let (main_segment_lde, main_segment_tree, main_segment_polys) = - build_trace_commitment::(main_trace, domain); + // extend the main execution trace and build a commitment to the extended trace + let (main_segment_lde, main_segment_vector_com, main_segment_polys) = + build_trace_commitment::(main_trace, domain); let trace_poly_table = TracePolyTable::new(main_segment_polys); let trace_lde = DefaultTraceLde { main_segment_lde, - main_segment_tree, + main_segment_oracles: main_segment_vector_com, aux_segment_lde: None, - aux_segment_tree: None, + aux_segment_oracles: None, blowup: domain.trace_to_lde_blowup(), trace_info: trace_info.clone(), + _h: PhantomData, }; (trace_lde, trace_poly_table) @@ -95,17 +106,18 @@ impl> DefaultTraceLd } } -impl TraceLde for DefaultTraceLde +impl TraceLde for DefaultTraceLde where E: FieldElement, - H: ElementHasher, + H: ElementHasher + core::marker::Sync, + V: VectorCommitment + core::marker::Sync, { type HashFn = H; + type VC = V; /// Returns the commitment to the low-degree extension of the main trace segment. - fn get_main_trace_commitment(&self) -> ::Digest { - let root_hash = self.main_segment_tree.root(); - *root_hash + fn get_main_trace_commitment(&self) -> H::Digest { + self.main_segment_oracles.commitment() } /// Takes auxiliary trace segment columns as input, interpolates them into polynomials in @@ -124,10 +136,10 @@ where &mut self, aux_trace: &ColMatrix, domain: &StarkDomain, - ) -> (ColMatrix, ::Digest) { - // extend the auxiliary trace segment and build a Merkle tree from the extended trace - let (aux_segment_lde, aux_segment_tree, aux_segment_polys) = - build_trace_commitment::(aux_trace, domain); + ) -> (ColMatrix, H::Digest) { + // extend the auxiliary trace segment and build a commitment to the extended trace + let (aux_segment_lde, aux_segment_oracles, aux_segment_polys) = + build_trace_commitment::(aux_trace, domain); // check errors assert!( @@ -142,10 +154,10 @@ where // save the lde and commitment self.aux_segment_lde = Some(aux_segment_lde); - let root_hash = *aux_segment_tree.root(); - self.aux_segment_tree = Some(aux_segment_tree); + let commitment_string = aux_segment_oracles.commitment(); + self.aux_segment_oracles = Some(aux_segment_oracles); - (aux_segment_polys, root_hash) + (aux_segment_polys, commitment_string) } /// Reads current and next rows from the main trace segment into the specified frame. @@ -200,21 +212,21 @@ where } } - /// Returns trace table rows at the specified positions along with Merkle authentication paths - /// from the commitment root to these rows. + /// Returns trace table rows at the specified positions along with an opening proof to these + /// rows againt the already computed commitment. fn query(&self, positions: &[usize]) -> Vec { // build queries for the main trace segment - let mut result = vec![build_segment_queries( + let mut result = vec![build_segment_queries::( &self.main_segment_lde, - &self.main_segment_tree, + &self.main_segment_oracles, positions, )]; // build queries for the auxiliary trace segment - if let Some(ref segment_tree) = self.aux_segment_tree { + if let Some(ref segment_oracles) = self.aux_segment_oracles { let segment_lde = self.aux_segment_lde.as_ref().expect("expected aux segment to be present"); - result.push(build_segment_queries(segment_lde, segment_tree, positions)); + result.push(build_segment_queries::(segment_lde, segment_oracles, positions)); } result @@ -246,16 +258,17 @@ where /// polynomial of degree = trace_length - 1, and then evaluating the polynomial over the LDE /// domain. /// -/// The trace commitment is computed by hashing each row of the extended execution trace, then -/// building a Merkle tree from the resulting hashes. -fn build_trace_commitment( +/// The trace commitment is computed by building a vector containing the hashes of each row of +/// the extended execution trace, then building a vector commitment to the resulting vector. +fn build_trace_commitment( trace: &ColMatrix, domain: &StarkDomain, -) -> (RowMatrix, MerkleTree, ColMatrix) +) -> (RowMatrix, V, ColMatrix) where E: FieldElement, F: FieldElement, H: ElementHasher, + V: VectorCommitment, { // extend the execution trace let (trace_lde, trace_polys) = { @@ -277,32 +290,33 @@ where assert_eq!(trace_lde.num_rows(), domain.lde_domain_size()); // build trace commitment - let tree_depth = trace_lde.num_rows().ilog2() as usize; - let trace_tree = info_span!("compute_execution_trace_commitment", tree_depth) - .in_scope(|| trace_lde.commit_to_rows()); - assert_eq!(trace_tree.depth(), tree_depth); + let commitment_domain_size = trace_lde.num_rows(); + let trace_vector_com = info_span!("compute_execution_trace_commitment", commitment_domain_size) + .in_scope(|| trace_lde.commit_to_rows::()); + assert_eq!(trace_vector_com.domain_len(), commitment_domain_size); - (trace_lde, trace_tree, trace_polys) + (trace_lde, trace_vector_com, trace_polys) } -fn build_segment_queries( +fn build_segment_queries( segment_lde: &RowMatrix, - segment_tree: &MerkleTree, + segment_vector_com: &V, positions: &[usize], ) -> Queries where E: FieldElement, H: ElementHasher, + V: VectorCommitment, { // for each position, get the corresponding row from the trace segment LDE and put all these // rows into a single vector let trace_states = positions.iter().map(|&pos| segment_lde.row(pos).to_vec()).collect::>(); - // build Merkle authentication paths to the leaves specified by positions - let trace_proof = segment_tree - .prove_batch(positions) - .expect("failed to generate a Merkle proof for trace queries"); + // build a batch opening proof to the leaves specified by positions + let trace_proof = segment_vector_com + .open_many(positions) + .expect("failed to generate a batch opening proof for trace queries"); - Queries::new(trace_proof, trace_states) + Queries::new::(trace_proof.1, trace_states) } diff --git a/prover/src/trace/trace_lde/default/tests.rs b/prover/src/trace/trace_lde/default/tests.rs index 11100c03d..c06cc2e60 100644 --- a/prover/src/trace/trace_lde/default/tests.rs +++ b/prover/src/trace/trace_lde/default/tests.rs @@ -27,8 +27,11 @@ fn extend_trace_table() { let domain = StarkDomain::new(&air); // build the trace polynomials, extended trace, and commitment using the default TraceLde impl - let (trace_lde, trace_polys) = - DefaultTraceLde::::new(trace.info(), trace.main_segment(), &domain); + let (trace_lde, trace_polys) = DefaultTraceLde::>::new( + trace.info(), + trace.main_segment(), + &domain, + ); // check the width and length of the extended trace assert_eq!(2, trace_lde.main_segment_width()); @@ -74,10 +77,13 @@ fn commit_trace_table() { let domain = StarkDomain::new(&air); // build the trace polynomials, extended trace, and commitment using the default TraceLde impl - let (trace_lde, _) = - DefaultTraceLde::::new(trace.info(), trace.main_segment(), &domain); + let (trace_lde, _) = DefaultTraceLde::>::new( + trace.info(), + trace.main_segment(), + &domain, + ); - // build Merkle tree from trace rows + // build commitment, using a Merkle tree, to the trace rows let mut hashed_states = Vec::new(); let mut trace_state = vec![BaseElement::ZERO; trace_lde.main_segment_width()]; #[allow(clippy::needless_range_loop)] diff --git a/prover/src/trace/trace_lde/mod.rs b/prover/src/trace/trace_lde/mod.rs index 5429e3f5b..dbce21491 100644 --- a/prover/src/trace/trace_lde/mod.rs +++ b/prover/src/trace/trace_lde/mod.rs @@ -6,7 +6,7 @@ use alloc::vec::Vec; use air::{proof::Queries, LagrangeKernelEvaluationFrame, TraceInfo}; -use crypto::{ElementHasher, Hasher}; +use crypto::{ElementHasher, Hasher, VectorCommitment}; use super::{ColMatrix, EvaluationFrame, FieldElement, TracePolyTable}; use crate::StarkDomain; @@ -24,9 +24,12 @@ pub use default::DefaultTraceLde; /// - Auxiliary segments: a list of 0 or more segments for traces generated after the prover /// commits to the first trace segment. Currently, at most 1 auxiliary segment is possible. pub trait TraceLde: Sync { - /// The hash function used for building the Merkle tree commitments to trace segment LDEs. + /// The hash function used for hashing the rows of trace segment LDEs. type HashFn: ElementHasher; + /// The vector commitment scheme used for commiting to the trace. + type VC: VectorCommitment; + /// Returns the commitment to the low-degree extension of the main trace segment. fn get_main_trace_commitment(&self) -> ::Digest; @@ -70,8 +73,8 @@ pub trait TraceLde: Sync { frame: &mut LagrangeKernelEvaluationFrame, ); - /// Returns trace table rows at the specified positions along with Merkle authentication paths - /// from the commitment root to these rows. + /// Returns trace table rows at the specified positions along with an opening proof to these + /// rows. fn query(&self, positions: &[usize]) -> Vec; /// Returns the number of rows in the execution trace. diff --git a/verifier/src/channel.rs b/verifier/src/channel.rs index 6b008c700..c84f4ec2a 100644 --- a/verifier/src/channel.rs +++ b/verifier/src/channel.rs @@ -4,12 +4,13 @@ // LICENSE file in the root directory of this source tree. use alloc::{string::ToString, vec::Vec}; +use core::marker::PhantomData; use air::{ proof::{Proof, Queries, Table, TraceOodFrame}, Air, }; -use crypto::{BatchMerkleProof, ElementHasher, MerkleTree}; +use crypto::{ElementHasher, VectorCommitment}; use fri::VerifierChannel as FriVerifierChannel; use math::{FieldElement, StarkField}; @@ -23,16 +24,20 @@ use crate::VerifierError; /// A channel is instantiated for a specific proof, which is parsed into structs over the /// appropriate field (specified by type parameter `E`). This also validates that the proof is /// well-formed in the context of the computation for the specified [Air]. -pub struct VerifierChannel> { +pub struct VerifierChannel< + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, +> { // trace queries - trace_roots: Vec, - trace_queries: Option>, + trace_commitments: Vec, + trace_queries: Option>, // constraint queries - constraint_root: H::Digest, - constraint_queries: Option>, + constraint_commitment: H::Digest, + constraint_queries: Option>, // FRI proof - fri_roots: Option>, - fri_layer_proofs: Vec>, + fri_commitments: Option>, + fri_layer_proofs: Vec, fri_layer_queries: Vec>, fri_remainder: Option>, fri_num_partitions: usize, @@ -44,7 +49,12 @@ pub struct VerifierChannel>, } -impl> VerifierChannel { +impl VerifierChannel +where + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, +{ // CONSTRUCTOR // -------------------------------------------------------------------------------------------- /// Creates and returns a new [VerifierChannel] initialized from the specified `proof`. @@ -77,14 +87,18 @@ impl> VerifierChanne let fri_options = air.options().to_fri_options(); // --- parse commitments ------------------------------------------------------------------ - let (trace_roots, constraint_root, fri_roots) = commitments + let (trace_commitments, constraint_commitment, fri_commitments) = commitments .parse::(num_trace_segments, fri_options.num_fri_layers(lde_domain_size)) .map_err(|err| VerifierError::ProofDeserializationError(err.to_string()))?; // --- parse trace and constraint queries ------------------------------------------------- - let trace_queries = TraceQueries::new(trace_queries, air, num_unique_queries as usize)?; - let constraint_queries = - ConstraintQueries::new(constraint_queries, air, num_unique_queries as usize)?; + let trace_queries = + TraceQueries::::new(trace_queries, air, num_unique_queries as usize)?; + let constraint_queries = ConstraintQueries::::new( + constraint_queries, + air, + num_unique_queries as usize, + )?; // --- parse FRI proofs ------------------------------------------------------------------- let fri_num_partitions = fri_proof.num_partitions(); @@ -92,7 +106,7 @@ impl> VerifierChanne .parse_remainder() .map_err(|err| VerifierError::ProofDeserializationError(err.to_string()))?; let (fri_layer_queries, fri_layer_proofs) = fri_proof - .parse_layers::(lde_domain_size, fri_options.folding_factor()) + .parse_layers::(lde_domain_size, fri_options.folding_factor()) .map_err(|err| VerifierError::ProofDeserializationError(err.to_string()))?; // --- parse out-of-domain evaluation frame ----------------------------------------------- @@ -102,13 +116,13 @@ impl> VerifierChanne Ok(VerifierChannel { // trace queries - trace_roots, + trace_commitments, trace_queries: Some(trace_queries), // constraint queries - constraint_root, + constraint_commitment, constraint_queries: Some(constraint_queries), // FRI proof - fri_roots: Some(fri_roots), + fri_commitments: Some(fri_commitments), fri_layer_proofs, fri_layer_queries, fri_remainder: Some(fri_remainder), @@ -130,12 +144,12 @@ impl> VerifierChanne /// For computations requiring multiple trace segment, the returned slice will contain a /// commitment for each trace segment. pub fn read_trace_commitments(&self) -> &[H::Digest] { - &self.trace_roots + &self.trace_commitments } /// Returns constraint evaluation commitment sent by the prover. pub fn read_constraint_commitment(&self) -> H::Digest { - self.constraint_root + self.constraint_commitment } /// Returns trace polynomial evaluations at out-of-domain points z and z * g, where g is the @@ -177,9 +191,27 @@ impl> VerifierChanne let queries = self.trace_queries.take().expect("already read"); // make sure the states included in the proof correspond to the trace commitment - for (root, proof) in self.trace_roots.iter().zip(queries.query_proofs.iter()) { - MerkleTree::verify_batch(root, positions, proof) - .map_err(|_| VerifierError::TraceQueryDoesNotMatchCommitment)?; + + let items: Vec = + queries.main_states.rows().map(|row| H::hash_elements(row)).collect(); + >::verify_many( + self.trace_commitments[0], + positions, + &items, + &queries.query_proofs[0], + ) + .map_err(|_| VerifierError::TraceQueryDoesNotMatchCommitment)?; + + if let Some(ref aux_states) = queries.aux_states { + let items: Vec = + aux_states.rows().map(|row| H::hash_elements(row)).collect(); + >::verify_many( + self.trace_commitments[1], + positions, + &items, + &queries.query_proofs[1], + ) + .map_err(|_| VerifierError::TraceQueryDoesNotMatchCommitment)?; } Ok((queries.main_states, queries.aux_states)) @@ -193,9 +225,15 @@ impl> VerifierChanne positions: &[usize], ) -> Result, VerifierError> { let queries = self.constraint_queries.take().expect("already read"); - - MerkleTree::verify_batch(&self.constraint_root, positions, &queries.query_proofs) - .map_err(|_| VerifierError::ConstraintQueryDoesNotMatchCommitment)?; + let items: Vec = + queries.evaluations.rows().map(|row| H::hash_elements(row)).collect(); + >::verify_many( + self.constraint_commitment, + positions, + &items, + &queries.query_proofs, + ) + .map_err(|_| VerifierError::ConstraintQueryDoesNotMatchCommitment)?; Ok(queries.evaluations) } @@ -204,22 +242,24 @@ impl> VerifierChanne // FRI VERIFIER CHANNEL IMPLEMENTATION // ================================================================================================ -impl FriVerifierChannel for VerifierChannel +impl FriVerifierChannel for VerifierChannel where E: FieldElement, H: ElementHasher, + V: VectorCommitment, { type Hasher = H; + type VectorCommitment = V; fn read_fri_num_partitions(&self) -> usize { self.fri_num_partitions } fn read_fri_layer_commitments(&mut self) -> Vec { - self.fri_roots.take().expect("already read") + self.fri_commitments.take().expect("already read") } - fn take_next_fri_layer_proof(&mut self) -> BatchMerkleProof { + fn take_next_fri_layer_proof(&mut self) -> V::MultiProof { self.fri_layer_proofs.remove(0) } @@ -237,18 +277,28 @@ where /// Container of trace query data, including: /// * Queried states for all trace segments. -/// * Merkle authentication paths for all queries. +/// * Batch opening proof for all queries. /// /// Trace states for all auxiliary segments are stored in a single table. -struct TraceQueries> { - query_proofs: Vec>, +struct TraceQueries< + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, +> { + query_proofs: Vec, main_states: Table, aux_states: Option>, + _h: PhantomData, } -impl> TraceQueries { +impl TraceQueries +where + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, +{ /// Parses the provided trace queries into trace states in the specified field and - /// corresponding Merkle authentication paths. + /// corresponding batch opening proof. pub fn new>( mut queries: Vec, air: &A, @@ -262,12 +312,11 @@ impl> TraceQueries(air.lde_domain_size(), num_queries, main_segment_width) + .parse::(air.lde_domain_size(), num_queries, main_segment_width) .map_err(|err| { VerifierError::ProofDeserializationError(format!( "main trace segment query deserialization failed: {err}" @@ -278,14 +327,13 @@ impl> TraceQueries(air.lde_domain_size(), num_queries, segment_width) + .parse::(air.lde_domain_size(), num_queries, segment_width) .map_err(|err| { VerifierError::ProofDeserializationError(format!( "auxiliary trace segment query deserialization failed: {err}" @@ -305,6 +353,7 @@ impl> TraceQueries> TraceQueries> { - query_proofs: BatchMerkleProof, +/// * Batch opening proof for all queries. +struct ConstraintQueries< + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, +> { + query_proofs: V::MultiProof, evaluations: Table, + _h: PhantomData, } -impl> ConstraintQueries { +impl ConstraintQueries +where + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, +{ /// Parses the provided constraint queries into evaluations in the specified field and - /// corresponding Merkle authentication paths. + /// corresponding batch opening proof. pub fn new>( queries: Queries, air: &A, @@ -331,13 +390,17 @@ impl> ConstraintQuer let constraint_frame_width = air.context().num_constraint_composition_columns(); let (query_proofs, evaluations) = queries - .parse::(air.lde_domain_size(), num_queries, constraint_frame_width) + .parse::(air.lde_domain_size(), num_queries, constraint_frame_width) .map_err(|err| { VerifierError::ProofDeserializationError(format!( "constraint evaluation query deserialization failed: {err}" )) })?; - Ok(Self { query_proofs, evaluations }) + Ok(Self { + query_proofs, + evaluations, + _h: PhantomData, + }) } } diff --git a/verifier/src/errors.rs b/verifier/src/errors.rs index fb2aaa36e..e1b072db5 100644 --- a/verifier/src/errors.rs +++ b/verifier/src/errors.rs @@ -29,11 +29,10 @@ pub enum VerifierError { /// This error occurs when constraints evaluated over out-of-domain trace rows do not match /// evaluations of the constraint composition polynomial at the out-of-domain point. InconsistentOodConstraintEvaluations, - /// This error occurs when Merkle authentication paths of trace queries do not resolve to the - /// execution trace commitment included in the proof. + /// This error occurs when the batch opening proof fails to verify for trace queries. TraceQueryDoesNotMatchCommitment, - /// This error occurs when Merkle authentication paths of constraint evaluation queries do not - /// resolve to the constraint evaluation commitment included in the proof. + /// This error occurs when the batch opening proof fails to verify for constraint evaluation + /// queries. ConstraintQueryDoesNotMatchCommitment, /// This error occurs when the proof-of-work nonce hashed with the current state of the public /// coin resolves to a value which does not meet the proof-of-work threshold specified by the @@ -79,10 +78,10 @@ impl fmt::Display for VerifierError { write!(f, "constraint evaluations over the out-of-domain frame are inconsistent") } Self::TraceQueryDoesNotMatchCommitment => { - write!(f, "trace query did not match the commitment") + write!(f, "failed to open trace query against the given commitment") } Self::ConstraintQueryDoesNotMatchCommitment => { - write!(f, "constraint query did not match the commitment") + write!(f, "failed to open constraint query against the given commitment") } Self::QuerySeedProofOfWorkVerificationFailed => { write!(f, "query seed proof-of-work verification failed") diff --git a/verifier/src/lib.rs b/verifier/src/lib.rs index a9c5ab7f7..2c75ecd1d 100644 --- a/verifier/src/lib.rs +++ b/verifier/src/lib.rs @@ -40,7 +40,7 @@ pub use air::{ }; use air::{AuxRandElements, GkrVerifier}; pub use crypto; -use crypto::{ElementHasher, Hasher, RandomCoin}; +use crypto::{ElementHasher, Hasher, RandomCoin, VectorCommitment}; use fri::FriVerifier; pub use math; use math::{ @@ -78,7 +78,7 @@ pub use errors::VerifierError; /// - The specified proof was generated for a different computation. /// - The specified proof was generated for this computation but for different public inputs. /// - The specified proof was generated with parameters not providing an acceptable security level. -pub fn verify( +pub fn verify( proof: Proof, pub_inputs: AIR::PublicInputs, acceptable_options: &AcceptableOptions, @@ -87,6 +87,7 @@ where AIR: Air, HashFn: ElementHasher, RandCoin: RandomCoin, + VC: VectorCommitment, { // check that `proof` was generated with an acceptable set of parameters from the point of view // of the verifier @@ -107,7 +108,11 @@ where FieldExtension::None => { let public_coin = RandCoin::new(&public_coin_seed); let channel = VerifierChannel::new(&air, proof)?; - perform_verification::(air, channel, public_coin) + perform_verification::( + air, + channel, + public_coin, + ) }, FieldExtension::Quadratic => { if !>::is_supported() { @@ -115,7 +120,7 @@ where } let public_coin = RandCoin::new(&public_coin_seed); let channel = VerifierChannel::new(&air, proof)?; - perform_verification::, HashFn, RandCoin>( + perform_verification::, HashFn, RandCoin, VC>( air, channel, public_coin, @@ -127,7 +132,7 @@ where } let public_coin = RandCoin::new(&public_coin_seed); let channel = VerifierChannel::new(&air, proof)?; - perform_verification::, HashFn, RandCoin>( + perform_verification::, HashFn, RandCoin, VC>( air, channel, public_coin, @@ -140,9 +145,9 @@ where // ================================================================================================ /// Performs the actual verification by reading the data from the `channel` and making sure it /// attests to a correct execution of the computation specified by the provided `air`. -fn perform_verification( +fn perform_verification( air: A, - mut channel: VerifierChannel, + mut channel: VerifierChannel, mut public_coin: R, ) -> Result<(), VerifierError> where @@ -150,6 +155,7 @@ where A: Air, H: ElementHasher, R: RandomCoin, + V: VectorCommitment, { // 1 ----- trace commitment ------------------------------------------------------------------- // Read the commitments to evaluations of the trace polynomials over the LDE domain sent by the diff --git a/winterfell/src/lib.rs b/winterfell/src/lib.rs index a4fe90125..86c5e0345 100644 --- a/winterfell/src/lib.rs +++ b/winterfell/src/lib.rs @@ -152,7 +152,7 @@ //! math::{fields::f128::BaseElement, FieldElement, ToElements}, //! Air, AirContext, Assertion, GkrVerifier, EvaluationFrame, //! ProofOptions, TraceInfo, TransitionConstraintDegree, -//! crypto::{hashers::Blake3_256, DefaultRandomCoin}, +//! crypto::{hashers::Blake3_256, DefaultRandomCoin, MerkleTree}, //! }; //! //! // Public inputs for our computation will consist of the starting value and the end result. @@ -258,7 +258,7 @@ //! //! ```no_run //! use winterfell::{ -//! crypto::{hashers::Blake3_256, DefaultRandomCoin}, +//! crypto::{hashers::Blake3_256, DefaultRandomCoin, MerkleTree}, //! math::{fields::f128::BaseElement, FieldElement, ToElements}, //! matrix::ColMatrix, //! DefaultTraceLde, ProofOptions, Prover, StarkDomain, Trace, TracePolyTable, TraceTable, @@ -347,8 +347,9 @@ //! type Air = WorkAir; //! type Trace = TraceTable; //! type HashFn = Blake3_256; +//! type VC = MerkleTree; //! type RandomCoin = DefaultRandomCoin; -//! type TraceLde> = DefaultTraceLde; +//! type TraceLde> = DefaultTraceLde; //! type ConstraintEvaluator<'a, E: FieldElement> = //! DefaultConstraintEvaluator<'a, Self::Air, E>; //! @@ -394,7 +395,7 @@ //! //! ``` //! # use winterfell::{ -//! # crypto::{hashers::Blake3_256, DefaultRandomCoin}, +//! # crypto::{hashers::Blake3_256, DefaultRandomCoin, MerkleTree}, //! # math::{fields::f128::BaseElement, FieldElement, ToElements}, //! # matrix::ColMatrix, //! # Air, AirContext, Assertion, AuxRandElements, ByteWriter, DefaultConstraintEvaluator, @@ -490,8 +491,9 @@ //! # type Air = WorkAir; //! # type Trace = TraceTable; //! # type HashFn = Blake3_256; +//! # type VC = MerkleTree; //! # type RandomCoin = DefaultRandomCoin; -//! # type TraceLde> = DefaultTraceLde; +//! # type TraceLde> = DefaultTraceLde; //! # type ConstraintEvaluator<'a, E: FieldElement> = //! # DefaultConstraintEvaluator<'a, Self::Air, E>; //! # @@ -559,7 +561,8 @@ //! let pub_inputs = PublicInputs { start, result }; //! assert!(winterfell::verify::, -//! DefaultRandomCoin> +//! DefaultRandomCoin>, +//! MerkleTree> //! >(proof, pub_inputs, &min_opts).is_ok()); //! ``` //! @@ -594,14 +597,14 @@ extern crate std; pub use air::{AuxRandElements, GkrVerifier}; pub use prover::{ crypto, iterators, math, matrix, Air, AirContext, Assertion, AuxTraceWithMetadata, - BoundaryConstraint, BoundaryConstraintGroup, ByteReader, ByteWriter, CompositionPolyTrace, + BoundaryConstraint, BoundaryConstraintGroup, CompositionPolyTrace, ConstraintCompositionCoefficients, ConstraintDivisor, ConstraintEvaluator, - DeepCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, Deserializable, - DeserializationError, EvaluationFrame, FieldExtension, Proof, ProofOptions, Prover, - ProverError, ProverGkrProof, Serializable, SliceReader, StarkDomain, Trace, TraceInfo, - TraceLde, TracePolyTable, TraceTable, TraceTableFragment, TransitionConstraintDegree, + DeepCompositionCoefficients, DefaultConstraintEvaluator, DefaultTraceLde, EvaluationFrame, + FieldExtension, Proof, ProofOptions, Prover, ProverError, ProverGkrProof, StarkDomain, Trace, + TraceInfo, TraceLde, TracePolyTable, TraceTable, TraceTableFragment, + TransitionConstraintDegree, }; -pub use verifier::{verify, AcceptableOptions, VerifierError}; +pub use verifier::{verify, AcceptableOptions, ByteWriter, VerifierError}; #[cfg(test)] mod tests; diff --git a/winterfell/src/tests.rs b/winterfell/src/tests.rs index bfc0aa264..3757e2010 100644 --- a/winterfell/src/tests.rs +++ b/winterfell/src/tests.rs @@ -6,6 +6,7 @@ use std::{vec, vec::Vec}; use air::{GkrRandElements, LagrangeKernelRandElements}; +use crypto::MerkleTree; use prover::{ crypto::{hashers::Blake3_256, DefaultRandomCoin, RandomCoin}, math::{fields::f64::BaseElement, ExtensionOf, FieldElement}, @@ -28,6 +29,7 @@ fn test_complex_lagrange_kernel_air() { LagrangeKernelComplexAir, Blake3_256, DefaultRandomCoin>, + MerkleTree>, >(proof, (), &AcceptableOptions::MinConjecturedSecurity(0)) .unwrap() } @@ -213,8 +215,10 @@ impl Prover for LagrangeComplexProver { type Air = LagrangeKernelComplexAir; type Trace = LagrangeComplexTrace; type HashFn = Blake3_256; + type VC = MerkleTree>; type RandomCoin = DefaultRandomCoin; - type TraceLde> = DefaultTraceLde; + type TraceLde> = + DefaultTraceLde; type ConstraintEvaluator<'a, E: FieldElement> = DefaultConstraintEvaluator<'a, LagrangeKernelComplexAir, E>;