From 4ed37ae4a0f8a83cb465a0d730a0b07f47312dd8 Mon Sep 17 00:00:00 2001 From: Al-Kindi-0 <82364884+Al-Kindi-0@users.noreply.github.com> Date: Tue, 2 Jul 2024 11:46:44 +0200 Subject: [PATCH] chore: address various comments --- air/src/proof/queries.rs | 19 ++++----- crypto/src/commitment.rs | 15 +++++-- crypto/src/merkle/mod.rs | 4 ++ fri/src/proof.rs | 18 +++++++-- fri/src/prover/channel.rs | 8 ++-- fri/src/prover/mod.rs | 15 +++---- fri/src/prover/tests.rs | 2 +- fri/src/utils.rs | 2 +- fri/src/verifier/channel.rs | 9 ++++- prover/src/channel.rs | 2 +- prover/src/constraints/commitment.rs | 14 ++++++- prover/src/lib.rs | 2 +- prover/src/matrix/col_matrix.rs | 4 +- prover/src/matrix/row_matrix.rs | 2 +- prover/src/trace/trace_lde/default/mod.rs | 48 +++++++++++++---------- verifier/src/channel.rs | 44 +++++++++++---------- verifier/src/lib.rs | 10 ++--- 17 files changed, 131 insertions(+), 87 deletions(-) diff --git a/air/src/proof/queries.rs b/air/src/proof/queries.rs index ebd62e532..3c5250fc0 100644 --- a/air/src/proof/queries.rs +++ b/air/src/proof/queries.rs @@ -82,7 +82,7 @@ impl Queries { /// * `domain_size` is not a power of two. /// * `num_queries` is zero. /// * `values_per_query` is zero. - pub fn parse( + pub fn parse( self, domain_size: usize, num_queries: usize, @@ -116,10 +116,13 @@ impl Queries { let opening_proof = ::read_from(&mut reader)?; // check that the opening proof matches the domain length - assert_eq!( - >::get_multiproof_domain_len(&opening_proof), - domain_size - ); + if >::get_multiproof_domain_len(&opening_proof) != domain_size { + return Err(DeserializationError::InvalidValue(format!( + "expected a domain of size {} but was {}", + domain_size, + >::get_multiproof_domain_len(&opening_proof), + ))); + } if reader.has_more_bytes() { return Err(DeserializationError::UnconsumedBytes); @@ -155,12 +158,10 @@ impl Deserializable for Queries { /// Returns an error of a valid query struct could not be read from the specified source. fn read_from(source: &mut R) -> Result { // read values - let num_value_bytes = source.read_u32()?; - let values = source.read_vec(num_value_bytes as usize)?; + let values = Vec::<_>::read_from(source)?; // read paths - let num_paths_bytes = source.read_u32()?; - let paths = source.read_vec(num_paths_bytes as usize)?; + let paths = Vec::<_>::read_from(source)?; Ok(Queries { opening_proof: paths, values }) } diff --git a/crypto/src/commitment.rs b/crypto/src/commitment.rs index 80c6daa60..72ec674e7 100644 --- a/crypto/src/commitment.rs +++ b/crypto/src/commitment.rs @@ -15,12 +15,16 @@ use crate::Hasher; /// This is a cryptographic primitive allowing one to commit, using a commitment string `com`, to /// a vector of values (v_0, ..., v_{n-1}) such that one can later reveal the value at the i-th /// position. +/// /// This is achieved by providing the value `v_i` together with a proof `proof_i` such that anyone /// posessing `com` can be convinced, with high confidence, that the claim is true. /// /// Vector commitment schemes usually have some batching properties in the sense that opening /// proofs for a number of `(i, v_i)` can be batched together into one batch opening proof in order /// to optimize both the proof size as well as the verification time. +/// +/// The current implementation restricts both of the commitment string as well as the leaf values +/// to be `H::Digest` where `H` is a type parameter such that `H: Hasher`. pub trait VectorCommitment: Sized { /// Options defining the VC i.e., public parameters. type Options: Default; @@ -41,21 +45,24 @@ pub trait VectorCommitment: Sized { /// options. fn with_options(items: Vec, options: Self::Options) -> Result; - /// Returns the commitment string to the commited values. + /// Returns the commitment string to the committed values. fn commitment(&self) -> H::Digest; - /// Returns the length of the vector commited to for `Self::Proof`. + /// Returns the length of the vector committed to for `Self`. + fn get_domain_len(&self) -> usize; + + /// Returns the length of the vector committed to for `Self::Proof`. fn get_proof_domain_len(proof: &Self::Proof) -> usize; - /// Returns the length of the vector commited to for `Self::MultiProof`. + /// Returns the length of the vector committed to for `Self::MultiProof`. fn get_multiproof_domain_len(proof: &Self::MultiProof) -> usize; /// Opens the value at a given index and provides a proof for the correctness of claimed value. fn open(&self, index: usize) -> Result<(H::Digest, Self::Proof), Self::Error>; - #[allow(clippy::type_complexity)] /// Opens the values at a given index set and provides a proof for the correctness of claimed /// values. + #[allow(clippy::type_complexity)] fn open_many( &self, indexes: &[usize], diff --git a/crypto/src/merkle/mod.rs b/crypto/src/merkle/mod.rs index 98da01ddc..6df84fea0 100644 --- a/crypto/src/merkle/mod.rs +++ b/crypto/src/merkle/mod.rs @@ -416,6 +416,10 @@ impl VectorCommitment for MerkleTree { *self.root() } + fn get_domain_len(&self) -> usize { + 1 << self.depth() + } + fn get_proof_domain_len(proof: &Self::Proof) -> usize { 1 << proof.len() } diff --git a/fri/src/proof.rs b/fri/src/proof.rs index 2ba5c127e..65dd2af92 100644 --- a/fri/src/proof.rs +++ b/fri/src/proof.rs @@ -126,7 +126,7 @@ impl FriProof { /// * This proof is not consistent with the specified `domain_size` and `folding_factor`. /// * Any of the layers could not be parsed successfully. #[allow(clippy::type_complexity)] - pub fn parse_layers( + pub fn parse_layers( self, mut domain_size: usize, folding_factor: usize, @@ -146,9 +146,19 @@ impl FriProof { // parse all layers for (i, layer) in self.layers.into_iter().enumerate() { domain_size /= folding_factor; - let (qv, op) = layer.parse::(folding_factor).map_err(|err| { + let (qv, op) = layer.parse::<_, H, V>(folding_factor).map_err(|err| { DeserializationError::InvalidValue(format!("failed to parse FRI layer {i}: {err}")) })?; + + // check that the opening proof matches the domain length + if >::get_multiproof_domain_len(&op) != domain_size { + return Err(DeserializationError::InvalidValue(format!( + "expected a domain of size {} but was {}", + domain_size, + >::get_multiproof_domain_len(&op), + ))); + } + layer_proofs.push(op); layer_queries.push(qv); } @@ -241,7 +251,7 @@ impl FriProofLayer { /// /// # Panics /// Panics if `query_values` is an empty slice. - pub(crate) fn new>( + pub(crate) fn new, const N: usize>( query_values: Vec<[E; N]>, proof: >::MultiProof, ) -> Self { @@ -277,7 +287,7 @@ impl FriProofLayer { /// * This layer does not contain at least one query. /// * Parsing of any of the query values or the corresponding batch opening proof fails. /// * Not all bytes have been consumed while parsing this layer. - pub fn parse( + pub fn parse( self, folding_factor: usize, ) -> Result<(Vec, >::MultiProof), DeserializationError> diff --git a/fri/src/prover/channel.rs b/fri/src/prover/channel.rs index 3773b11c6..7231e757c 100644 --- a/fri/src/prover/channel.rs +++ b/fri/src/prover/channel.rs @@ -21,7 +21,7 @@ use math::FieldElement; /// In the interactive version of the protocol, the verifier chooses α uniformly at random from /// the entire field. In the non-interactive version, the α is drawn pseudo-randomly based on the /// commitments the prover has written into the channel up to this point. -pub trait ProverChannel { +pub trait ProverChannel { /// Hash function used by the prover to commit to polynomial evaluations. type Hasher: ElementHasher; @@ -31,10 +31,10 @@ pub trait ProverChannel { /// evaluations of a polynomial at a given layer. The vector commitment is built by /// first transposing evaluations into a two-dimensional matrix where each row contains /// values needed to compute a single value of the next FRI layer, and then computing - /// the hash of each row to get one entry of the vector being commited to. Thus, the number + /// the hash of each row to get one entry of the vector being committed to. Thus, the number /// of elements grouped into a single leaf is equal to the `folding_factor` used for FRI layer /// construction. - fn commit_fri_layer(&mut self, layer_root: H::Digest); + fn commit_fri_layer(&mut self, layer_root: ::Digest); /// Returns a random α drawn uniformly at random from the entire field. /// @@ -116,7 +116,7 @@ where } } -impl ProverChannel for DefaultProverChannel +impl ProverChannel for DefaultProverChannel where E: FieldElement, H: ElementHasher, diff --git a/fri/src/prover/mod.rs b/fri/src/prover/mod.rs index ac44018ad..17092ad34 100644 --- a/fri/src/prover/mod.rs +++ b/fri/src/prover/mod.rs @@ -39,12 +39,9 @@ mod tests; /// /// The prover is parametrized with the following types: /// -/// * `B` specifies the base field of the STARK protocol. -/// * `E` specifies the field in which the FRI protocol is executed. This can be the same as the -/// base field `B`, but it can also be an extension of the base field in cases when the base -/// field is too small to provide desired security level for the FRI protocol. +/// * `E` specifies the field in which the FRI protocol is executed. /// * `C` specifies the type used to simulate prover-verifier interaction. -/// * `H` specifies the hash function used to build for each layer the vector of values commited to +/// * `H` specifies the hash function used to build for each layer the vector of values committed to /// using the specified vector commitment scheme. The same hash function must be used in /// the prover channel to generate pseudo random values. /// * `V` specifies the vector commitment scheme used in order to commit to each layer. @@ -98,7 +95,7 @@ mod tests; pub struct FriProver where E: FieldElement, - C: ProverChannel, + C: ProverChannel, H: ElementHasher, V: VectorCommitment, { @@ -122,7 +119,7 @@ struct FriRemainder(Vec); impl FriProver where E: FieldElement, - C: ProverChannel, + C: ProverChannel, H: ElementHasher, V: VectorCommitment, { @@ -308,12 +305,12 @@ fn query_layer, const N: usiz for &position in positions.iter() { queried_values.push(evaluations[position]); } - FriProofLayer::new::<_, _, N, V>(queried_values, proof.1) + FriProofLayer::new::<_, _, V, N>(queried_values, proof.1) } /// Hashes each of the arrays in the provided slice and returns a vector commitment to resulting /// hashes. -pub fn build_layer_commitment( +pub fn build_layer_commitment( values: &[[E; N]], ) -> Result>::Error> where diff --git a/fri/src/prover/tests.rs b/fri/src/prover/tests.rs index 5bf2805de..e765092c5 100644 --- a/fri/src/prover/tests.rs +++ b/fri/src/prover/tests.rs @@ -107,7 +107,7 @@ fn fri_prove_verify( let mut prover = FriProver::<_, _, _, MerkleTree>::new(options.clone()); prover.build_layers(&mut channel, evaluations.clone()); let positions = channel.draw_query_positions(0); - let proof = prover.build_proof(&positions); // assert_eq!(1, 0 ); + let proof = prover.build_proof(&positions); // make sure the proof can be verified let commitments = channel.layer_commitments().to_vec(); diff --git a/fri/src/utils.rs b/fri/src/utils.rs index 1480f5470..725e5b4c9 100644 --- a/fri/src/utils.rs +++ b/fri/src/utils.rs @@ -12,7 +12,7 @@ pub fn map_positions_to_indexes( folding_factor: usize, num_partitions: usize, ) -> Vec { - // if there was only 1 partition, order of elements in the commitment tree + // if there was only 1 partition, order of elements in the vector commitment // is the same as the order of elements in the evaluation domain if num_partitions == 1 { return positions.to_vec(); diff --git a/fri/src/verifier/channel.rs b/fri/src/verifier/channel.rs index eb9dd6604..6f8709858 100644 --- a/fri/src/verifier/channel.rs +++ b/fri/src/verifier/channel.rs @@ -26,6 +26,7 @@ use crate::{FriProof, VerifierError}; pub trait VerifierChannel { /// Hash function used by the prover to commit to polynomial evaluations. type Hasher: ElementHasher; + /// Vector commitment used to commit to polynomial evaluations. type VectorCommitment: VectorCommitment; // REQUIRED METHODS @@ -60,7 +61,7 @@ pub trait VerifierChannel { /// prover to the verifier during the query phase of the FRI protocol. /// /// It is expected that layer proofs and layer queries at the same FRI layer are consistent. - /// That is, query values hash into the elements of the vector commited to using the specified + /// That is, query values hash into the elements of the vector committed to using the specified /// vector commitment scheme. fn take_next_fri_layer_proof( &mut self, @@ -86,7 +87,11 @@ pub trait VerifierChannel { ) -> Result, VerifierError> { let layer_proof = self.take_next_fri_layer_proof(); let layer_queries = self.take_next_fri_layer_queries(); + // build the values (i.e., polynomial evaluations over a coset of a multiplicative subgroup + // of the current evaluation domain) corresponding to each leaf of the layer commitment let leaf_values = group_slice_elements(&layer_queries); + // hash the aforementioned values to get the leaves to be verified against the previously + // received commitment let hashed_values: Vec<::Digest> = leaf_values .iter() .map(|seg| ::hash_elements(seg)) @@ -153,7 +158,7 @@ where let remainder = proof.parse_remainder()?; let (layer_queries, layer_proofs) = - proof.parse_layers::(domain_size, folding_factor)?; + proof.parse_layers::(domain_size, folding_factor)?; Ok(DefaultVerifierChannel { layer_commitments, diff --git a/prover/src/channel.rs b/prover/src/channel.rs index e0f992a6a..34a39d3fc 100644 --- a/prover/src/channel.rs +++ b/prover/src/channel.rs @@ -203,7 +203,7 @@ where // FRI PROVER CHANNEL IMPLEMENTATION // ================================================================================================ -impl<'a, A, E, H, R, V> fri::ProverChannel for ProverChannel<'a, A, E, H, R, V> +impl<'a, A, E, H, R, V> fri::ProverChannel for ProverChannel<'a, A, E, H, R, V> where A: Air, E: FieldElement, diff --git a/prover/src/constraints/commitment.rs b/prover/src/constraints/commitment.rs index 1877e5063..724b7454b 100644 --- a/prover/src/constraints/commitment.rs +++ b/prover/src/constraints/commitment.rs @@ -31,12 +31,22 @@ pub struct ConstraintCommitment< _h: PhantomData, } -impl, V: VectorCommitment> - ConstraintCommitment +impl ConstraintCommitment +where + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, { /// Creates a new constraint evaluation commitment from the provided composition polynomial /// evaluations and the corresponding vector commitment. pub fn new(evaluations: RowMatrix, commitment: V) -> ConstraintCommitment { + assert_eq!( + evaluations.num_rows(), + commitment.get_domain_len(), + "number of rows in constraint evaluation matrix must be the same as the size + of the vector commitment domain" + ); + ConstraintCommitment { evaluations, vector_commitment: commitment, diff --git a/prover/src/lib.rs b/prover/src/lib.rs index 55f381af2..25fbf52c1 100644 --- a/prover/src/lib.rs +++ b/prover/src/lib.rs @@ -549,7 +549,7 @@ pub trait Prover { // finally, build constraint evaluation commitment let constraint_commitment = info_span!( "compute_constraint_evaluation_commitment", - tree_depth = domain_size.ilog2() + log_domain_size = domain_size.ilog2() ) .in_scope(|| { let commitment = composed_evaluations.commit_to_rows::(); diff --git a/prover/src/matrix/col_matrix.rs b/prover/src/matrix/col_matrix.rs index c9db5a13c..61f67aca1 100644 --- a/prover/src/matrix/col_matrix.rs +++ b/prover/src/matrix/col_matrix.rs @@ -256,7 +256,7 @@ impl ColMatrix { /// /// The commitment is built as follows: /// * Each row of the matrix is hashed into a single digest of the specified hash function. - /// * The resulting vector of digests is commited to using the specified vector commitment + /// * The resulting vector of digests is committed to using the specified vector commitment /// scheme. /// * The resulting commitment is returned as the commitment to the entire matrix. pub fn commit_to_rows(&self) -> V @@ -282,7 +282,7 @@ impl ColMatrix { } ); - V::new(row_hashes).unwrap() + V::new(row_hashes).expect("failed to construct trace vector commitment") } // CONVERSIONS diff --git a/prover/src/matrix/row_matrix.rs b/prover/src/matrix/row_matrix.rs index 83460d64a..f42ca0e7a 100644 --- a/prover/src/matrix/row_matrix.rs +++ b/prover/src/matrix/row_matrix.rs @@ -200,7 +200,7 @@ impl RowMatrix { ); // build the vector commitment to the hashed rows - V::new(row_hashes).unwrap() + V::new(row_hashes).expect("failed to construct trace vector commitment") } } diff --git a/prover/src/trace/trace_lde/default/mod.rs b/prover/src/trace/trace_lde/default/mod.rs index de894b69e..094fc5dd5 100644 --- a/prover/src/trace/trace_lde/default/mod.rs +++ b/prover/src/trace/trace_lde/default/mod.rs @@ -36,18 +36,21 @@ pub struct DefaultTraceLde< // low-degree extension of the main segment of the trace main_segment_lde: RowMatrix, // commitment to the main segment of the trace - main_segment_tree: V, + main_segment_vector_com: V, // low-degree extensions of the auxiliary segment of the trace aux_segment_lde: Option>, // commitment to the auxiliary segment of the trace - aux_segment_tree: Option, + aux_segment_vector_com: Option, blowup: usize, trace_info: TraceInfo, _h: PhantomData, } -impl, V: VectorCommitment> - DefaultTraceLde +impl DefaultTraceLde +where + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, { /// Takes the main trace segment columns as input, interpolates them into polynomials in /// coefficient form, evaluates the polynomials over the LDE domain, commits to the @@ -62,15 +65,15 @@ impl, V: VectorCommi domain: &StarkDomain, ) -> (Self, TracePolyTable) { // extend the main execution trace and build a commitment to the extended trace - let (main_segment_lde, main_segment_tree, main_segment_polys) = + let (main_segment_lde, main_segment_vector_com, main_segment_polys) = build_trace_commitment::(main_trace, domain); let trace_poly_table = TracePolyTable::new(main_segment_polys); let trace_lde = DefaultTraceLde { main_segment_lde, - main_segment_tree, + main_segment_vector_com, aux_segment_lde: None, - aux_segment_tree: None, + aux_segment_vector_com: None, blowup: domain.trace_to_lde_blowup(), trace_info: trace_info.clone(), _h: PhantomData, @@ -114,7 +117,7 @@ where /// Returns the commitment to the low-degree extension of the main trace segment. fn get_main_trace_commitment(&self) -> H::Digest { - self.main_segment_tree.commitment() + self.main_segment_vector_com.commitment() } /// Takes auxiliary trace segment columns as input, interpolates them into polynomials in @@ -135,7 +138,7 @@ where domain: &StarkDomain, ) -> (ColMatrix, H::Digest) { // extend the auxiliary trace segment and build a commitment to the extended trace - let (aux_segment_lde, aux_segment_tree, aux_segment_polys) = + let (aux_segment_lde, aux_segment_vector_com, aux_segment_polys) = build_trace_commitment::(aux_trace, domain); // check errors @@ -151,10 +154,10 @@ where // save the lde and commitment self.aux_segment_lde = Some(aux_segment_lde); - let root_hash = aux_segment_tree.commitment(); - self.aux_segment_tree = Some(aux_segment_tree); + let commitment_string = aux_segment_vector_com.commitment(); + self.aux_segment_vector_com = Some(aux_segment_vector_com); - (aux_segment_polys, root_hash) + (aux_segment_polys, commitment_string) } /// Reads current and next rows from the main trace segment into the specified frame. @@ -215,15 +218,19 @@ where // build queries for the main trace segment let mut result = vec![build_segment_queries::( &self.main_segment_lde, - &self.main_segment_tree, + &self.main_segment_vector_com, positions, )]; // build queries for the auxiliary trace segment - if let Some(ref segment_tree) = self.aux_segment_tree { + if let Some(ref segment_vector_com) = self.aux_segment_vector_com { let segment_lde = self.aux_segment_lde.as_ref().expect("expected aux segment to be present"); - result.push(build_segment_queries::(segment_lde, segment_tree, positions)); + result.push(build_segment_queries::( + segment_lde, + segment_vector_com, + positions, + )); } result @@ -287,16 +294,17 @@ where assert_eq!(trace_lde.num_rows(), domain.lde_domain_size()); // build trace commitment - let tree_depth = trace_lde.num_rows().ilog2() as usize; - let trace_tree = info_span!("compute_execution_trace_commitment", tree_depth) + let commitment_domain_size = trace_lde.num_rows(); + let trace_vector_com = info_span!("compute_execution_trace_commitment", commitment_domain_size) .in_scope(|| trace_lde.commit_to_rows::()); + assert_eq!(trace_vector_com.get_domain_len(), commitment_domain_size); - (trace_lde, trace_tree, trace_polys) + (trace_lde, trace_vector_com, trace_polys) } fn build_segment_queries( segment_lde: &RowMatrix, - segment_tree: &V, + segment_vector_com: &V, positions: &[usize], ) -> Queries where @@ -310,7 +318,7 @@ where positions.iter().map(|&pos| segment_lde.row(pos).to_vec()).collect::>(); // build a batch opening proof to the leaves specified by positions - let trace_proof = segment_tree + let trace_proof = segment_vector_com .open_many(positions) .expect("failed to generate a batch opening proof for trace queries"); diff --git a/verifier/src/channel.rs b/verifier/src/channel.rs index d1b7cff74..c84f4ec2a 100644 --- a/verifier/src/channel.rs +++ b/verifier/src/channel.rs @@ -49,8 +49,11 @@ pub struct VerifierChannel< gkr_proof: Option>, } -impl, V: VectorCommitment> - VerifierChannel +impl VerifierChannel +where + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, { // CONSTRUCTOR // -------------------------------------------------------------------------------------------- @@ -103,7 +106,7 @@ impl, V: VectorCommi .parse_remainder() .map_err(|err| VerifierError::ProofDeserializationError(err.to_string()))?; let (fri_layer_queries, fri_layer_proofs) = fri_proof - .parse_layers::(lde_domain_size, fri_options.folding_factor()) + .parse_layers::(lde_domain_size, fri_options.folding_factor()) .map_err(|err| VerifierError::ProofDeserializationError(err.to_string()))?; // --- parse out-of-domain evaluation frame ----------------------------------------------- @@ -190,7 +193,7 @@ impl, V: VectorCommi // make sure the states included in the proof correspond to the trace commitment let items: Vec = - { queries.main_states.rows().map(|row| H::hash_elements(row)).collect() }; + queries.main_states.rows().map(|row| H::hash_elements(row)).collect(); >::verify_many( self.trace_commitments[0], positions, @@ -199,16 +202,9 @@ impl, V: VectorCommi ) .map_err(|_| VerifierError::TraceQueryDoesNotMatchCommitment)?; - if queries.aux_states.is_some() { - let items: Vec = { - queries - .aux_states - .clone() - .unwrap() - .rows() - .map(|row| H::hash_elements(row)) - .collect() - }; + if let Some(ref aux_states) = queries.aux_states { + let items: Vec = + aux_states.rows().map(|row| H::hash_elements(row)).collect(); >::verify_many( self.trace_commitments[1], positions, @@ -295,8 +291,11 @@ struct TraceQueries< _h: PhantomData, } -impl, V: VectorCommitment> - TraceQueries +impl TraceQueries +where + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, { /// Parses the provided trace queries into trace states in the specified field and /// corresponding batch opening proof. @@ -317,7 +316,7 @@ impl, V: VectorCommi let main_segment_width = air.trace_info().main_trace_width(); let main_segment_queries = queries.remove(0); let (main_segment_query_proofs, main_segment_states) = main_segment_queries - .parse::(air.lde_domain_size(), num_queries, main_segment_width) + .parse::(air.lde_domain_size(), num_queries, main_segment_width) .map_err(|err| { VerifierError::ProofDeserializationError(format!( "main trace segment query deserialization failed: {err}" @@ -334,7 +333,7 @@ impl, V: VectorCommi let segment_queries = queries.remove(0); let segment_width = air.trace_info().get_aux_segment_width(); let (segment_query_proof, segment_trace_states) = segment_queries - .parse::(air.lde_domain_size(), num_queries, segment_width) + .parse::(air.lde_domain_size(), num_queries, segment_width) .map_err(|err| { VerifierError::ProofDeserializationError(format!( "auxiliary trace segment query deserialization failed: {err}" @@ -375,8 +374,11 @@ struct ConstraintQueries< _h: PhantomData, } -impl, V: VectorCommitment> - ConstraintQueries +impl ConstraintQueries +where + E: FieldElement, + H: ElementHasher, + V: VectorCommitment, { /// Parses the provided constraint queries into evaluations in the specified field and /// corresponding batch opening proof. @@ -388,7 +390,7 @@ impl, V: VectorCommi let constraint_frame_width = air.context().num_constraint_composition_columns(); let (query_proofs, evaluations) = queries - .parse::(air.lde_domain_size(), num_queries, constraint_frame_width) + .parse::(air.lde_domain_size(), num_queries, constraint_frame_width) .map_err(|err| { VerifierError::ProofDeserializationError(format!( "constraint evaluation query deserialization failed: {err}" diff --git a/verifier/src/lib.rs b/verifier/src/lib.rs index a55b15914..e6773ff9a 100644 --- a/verifier/src/lib.rs +++ b/verifier/src/lib.rs @@ -78,7 +78,7 @@ pub use errors::VerifierError; /// - The specified proof was generated for a different computation. /// - The specified proof was generated for this computation but for different public inputs. /// - The specified proof was generated with parameters not providing an acceptable security level. -pub fn verify( +pub fn verify( proof: Proof, pub_inputs: AIR::PublicInputs, acceptable_options: &AcceptableOptions, @@ -87,7 +87,7 @@ where AIR: Air, HashFn: ElementHasher, RandCoin: RandomCoin, - V: VectorCommitment, + VC: VectorCommitment, { // check that `proof` was generated with an acceptable set of parameters from the point of view // of the verifier @@ -108,7 +108,7 @@ where FieldExtension::None => { let public_coin = RandCoin::new(&public_coin_seed); let channel = VerifierChannel::new(&air, proof)?; - perform_verification::( + perform_verification::( air, channel, public_coin, @@ -120,7 +120,7 @@ where } let public_coin = RandCoin::new(&public_coin_seed); let channel = VerifierChannel::new(&air, proof)?; - perform_verification::, HashFn, RandCoin, V>( + perform_verification::, HashFn, RandCoin, VC>( air, channel, public_coin, @@ -132,7 +132,7 @@ where } let public_coin = RandCoin::new(&public_coin_seed); let channel = VerifierChannel::new(&air, proof)?; - perform_verification::, HashFn, RandCoin, V>( + perform_verification::, HashFn, RandCoin, VC>( air, channel, public_coin,