From 25a58cc9e96cec29fb8c346717f9e9ef9224f4cf Mon Sep 17 00:00:00 2001 From: William Arnold Date: Tue, 21 Feb 2023 20:13:21 -0800 Subject: [PATCH 01/87] First pass at grid api --- Cargo.toml | 2 + kate/grid/Cargo.toml | 5 ++ kate/grid/src/dims.rs | 44 ++++++++++ kate/grid/src/grid.rs | 187 ++++++++++++++++++++++++++++++++++++++++++ kate/grid/src/lib.rs | 11 +++ 5 files changed, 249 insertions(+) create mode 100644 kate/grid/Cargo.toml create mode 100644 kate/grid/src/dims.rs create mode 100644 kate/grid/src/grid.rs create mode 100644 kate/grid/src/lib.rs diff --git a/Cargo.toml b/Cargo.toml index 60c7e4df..1e677318 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,6 +2,8 @@ members = [ "primitives/avail", "kate", + "kate/recovery", + "kate/grid", "primitives/nomad/signature", "primitives/nomad/nomad-core", "primitives/nomad/nomad-base", diff --git a/kate/grid/Cargo.toml b/kate/grid/Cargo.toml new file mode 100644 index 00000000..33fba998 --- /dev/null +++ b/kate/grid/Cargo.toml @@ -0,0 +1,5 @@ +[package] +name = "kate-grid" +version = "0.6.1" +authors = ["William Arnold warnold@polygon.technology"] +edition = "2021" diff --git a/kate/grid/src/dims.rs b/kate/grid/src/dims.rs new file mode 100644 index 00000000..c8321366 --- /dev/null +++ b/kate/grid/src/dims.rs @@ -0,0 +1,44 @@ +/// The dimensions of a grid +#[derive(Debug, Clone)] +pub struct Dimensions { + width: usize, + height: usize, +} + +/// The ways a set of dimensions can be extended +#[derive(Debug, Clone)] +pub struct Extension { + /// This means extending the height of the grid by some factor. + /// `2` would mean doubling the grid upwards, increasing the height by a factor of + /// 2 and multiplying the number of rows by 2 + height_factor: u16, + /// This means extending the width of the grid by some factor. + /// `2` would mean doubling the grid sideways, increasing the width by a factor of + /// 2 and multiplying the number of columns by 2 + width_factor: u16, +} + +impl Dimensions { + pub const fn new(width: usize, height: usize) -> Self { + Dimensions { width, height } + } + + pub fn width(&self) -> usize { + self.width + } + + pub fn height(&self) -> usize { + self.width + } + + pub fn size(&self) -> u32 { + self.width as u32 * self.height as u32 + } + + pub fn extend(&self, e: Extension) -> Self { + Self { + width: e.width_factor as usize * self.width, + height: e.height_factor as usize * self.height, + } + } +} diff --git a/kate/grid/src/grid.rs b/kate/grid/src/grid.rs new file mode 100644 index 00000000..37f33240 --- /dev/null +++ b/kate/grid/src/grid.rs @@ -0,0 +1,187 @@ +use core::marker::PhantomData; + +pub trait Grid { + fn width(&self) -> usize; + fn height(&self) -> usize; + // x indexes within a row, y indexes within a column + // 0 <= x < width, 0 <= y < height + fn get(&self, x: usize, y: usize) -> Option<&A>; +} + +pub struct RowMajor { + width: usize, + height: usize, + pub inner: T, + _phantom: PhantomData, +} + +pub struct ColumnMajor { + width: usize, + height: usize, + pub inner: T, + _phantom: PhantomData, +} + +impl> Grid for RowMajor { + fn width(&self) -> usize { + self.width + } + + fn height(&self) -> usize { + self.height + } + + fn get(&self, x: usize, y: usize) -> Option<&A> { + self.inner.as_ref().get(x + y * self.width) + } +} + +impl> Grid for ColumnMajor { + fn width(&self) -> usize { + self.width + } + + fn height(&self) -> usize { + self.height + } + + fn get(&self, x: usize, y: usize) -> Option<&A> { + self.inner.as_ref().get(y + x * self.height) + } +} + +impl> RowMajor { + pub fn row(&self, y: usize) -> Option<&[A]> { + if y >= self.height { + return None; + } + Some(&self.inner.as_ref()[(y * self.width)..((y + 1) * self.width)]) + } + + pub fn iter_col(&self, x: usize) -> Option + '_> { + if x >= self.width { + return None; + } + Some((0..self.height).map(move |y| self.get(x, y).expect("Size checked at instantiation"))) + } + + pub fn iter_row_wise(&self) -> impl Iterator + '_ { + (0..self.height).flat_map(move |y| { + (0..self.width).map(move |x| self.get(x, y).expect("Bounds already checked")) + }) + } +} + +impl> ColumnMajor { + pub fn col(&self, x: usize) -> Option<&[A]> { + if x >= self.width { + return None; + } + Some(&self.inner.as_ref()[(x * self.height)..((x + 1) * self.height)]) + } + + pub fn iter_row(&self, y: usize) -> Option + '_> { + if y >= self.height { + return None; + } + Some((0..self.width).map(move |x| self.get(x, y).expect("Size checked at instantiation"))) + } + + pub fn iter_row_wise(&self) -> impl Iterator + '_ { + (0..self.height).flat_map(move |y| { + (0..self.width).map(move |x| self.get(x, y).expect("Bounds already checked")) + }) + } +} + +pub trait AsRowMajor { + type Output: Sized; + fn as_row_major(self, width: usize, height: usize) -> Option>; +} + +pub trait AsColumnMajor { + type Output: Sized; + fn as_column_major(self, width: usize, height: usize) -> Option>; +} + +impl> AsRowMajor for T { + type Output = Self; + fn as_row_major(self, width: usize, height: usize) -> Option> { + if self.as_ref().len() == width * height { + Some(RowMajor { + width, + height, + inner: self, + _phantom: PhantomData, + }) + } else { + None + } + } +} + +impl> AsColumnMajor for T { + type Output = Self; + fn as_column_major(self, width: usize, height: usize) -> Option> { + if self.as_ref().len() == width * height { + Some(ColumnMajor { + width, + height, + inner: self, + _phantom: PhantomData, + }) + } else { + None + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use alloc::vec::Vec; + + #[test] + fn test_row_major() { + let data = [1, 2, 3, 4, 5, 6]; + let rm = data.as_row_major(3, 2).unwrap(); + + assert_eq!(rm.get(0, 0), Some(&1)); + assert_eq!(rm.get(1, 0), Some(&2)); + assert_eq!(rm.get(2, 0), Some(&3)); + assert_eq!(rm.get(0, 1), Some(&4)); + assert_eq!(rm.get(1, 1), Some(&5)); + assert_eq!(rm.get(2, 1), Some(&6)); + + assert_eq!([1, 2, 3].as_slice(), rm.row(0).unwrap()); + assert_eq!([4, 5, 6].as_slice(), rm.row(1).unwrap()); + assert_eq!(vec![&1, &4], rm.iter_col(0).unwrap().collect::>()); + assert_eq!(vec![&2, &5], rm.iter_col(1).unwrap().collect::>()); + assert_eq!(vec![&3, &6], rm.iter_col(2).unwrap().collect::>()); + } + + #[test] + fn test_column_major() { + let data = [1, 4, 2, 5, 3, 6]; + let cm = data.as_column_major(3, 2).unwrap(); + + assert_eq!(cm.get(0, 0), Some(&1)); + assert_eq!(cm.get(1, 0), Some(&2)); + assert_eq!(cm.get(2, 0), Some(&3)); + assert_eq!(cm.get(0, 1), Some(&4)); + assert_eq!(cm.get(1, 1), Some(&5)); + assert_eq!(cm.get(2, 1), Some(&6)); + + assert_eq!([1, 4].as_slice(), cm.col(0).unwrap()); + assert_eq!([2, 5].as_slice(), cm.col(1).unwrap()); + assert_eq!([3, 6].as_slice(), cm.col(2).unwrap()); + assert_eq!( + vec![&1, &2, &3], + cm.iter_row(0).unwrap().collect::>() + ); + assert_eq!( + vec![&4, &5, &6], + cm.iter_row(1).unwrap().collect::>() + ); + } +} diff --git a/kate/grid/src/lib.rs b/kate/grid/src/lib.rs new file mode 100644 index 00000000..f507f19c --- /dev/null +++ b/kate/grid/src/lib.rs @@ -0,0 +1,11 @@ +#![no_std] +//! Nice grid API, dealing with grids of different sizes and different orders +//! (column-major/row-major) + +#[cfg_attr(test, macro_use)] +extern crate alloc; + +mod dims; +mod grid; +pub use dims::*; +pub use grid::*; From 8f21df7674733803c7349d85144ccfb6b4927008 Mon Sep 17 00:00:00 2001 From: William Arnold Date: Fri, 24 Feb 2023 01:28:15 -0800 Subject: [PATCH 02/87] Refactor `par_extend_data_matrix` with new iters --- Cargo.lock | 5 +++++ kate/Cargo.toml | 1 + kate/grid/src/dims.rs | 22 +++++++++++++++++++--- kate/grid/src/grid.rs | 12 ++++++++++++ kate/src/com.rs | 42 ++++++++++++++++++++++-------------------- 5 files changed, 59 insertions(+), 23 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 24a57645..437ac894 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1786,6 +1786,7 @@ dependencies = [ "hex", "hex-literal", "itertools 0.10.5", + "kate-grid", "kate-recovery", "log", "num_cpus", @@ -1804,6 +1805,10 @@ dependencies = [ "test-case", ] +[[package]] +name = "kate-grid" +version = "0.6.1" + [[package]] name = "kate-recovery" version = "0.8.0" diff --git a/kate/Cargo.toml b/kate/Cargo.toml index 619fa96c..9fc0c15b 100644 --- a/kate/Cargo.toml +++ b/kate/Cargo.toml @@ -27,6 +27,7 @@ serde = { version = "1.0.121", optional = true, features = ["derive"] } sp-core = { version = "7.0.0", default-features = false } sp-std = { version = "4.0.0", default-features = false } static_assertions = "1.1.0" +kate-grid = { path = "grid" } [dev-dependencies] criterion = "0.3.5" diff --git a/kate/grid/src/dims.rs b/kate/grid/src/dims.rs index c8321366..98d929c7 100644 --- a/kate/grid/src/dims.rs +++ b/kate/grid/src/dims.rs @@ -11,11 +11,27 @@ pub struct Extension { /// This means extending the height of the grid by some factor. /// `2` would mean doubling the grid upwards, increasing the height by a factor of /// 2 and multiplying the number of rows by 2 - height_factor: u16, + pub height_factor: u16, /// This means extending the width of the grid by some factor. /// `2` would mean doubling the grid sideways, increasing the width by a factor of /// 2 and multiplying the number of columns by 2 - width_factor: u16, + pub width_factor: u16, +} + +impl Extension { + pub fn height(factor: u16) -> Self { + Self { + height_factor: factor, + width_factor: 1, + } + } + + pub fn width(factor: u16) -> Self { + Self { + height_factor: 1, + width_factor: factor, + } + } } impl Dimensions { @@ -28,7 +44,7 @@ impl Dimensions { } pub fn height(&self) -> usize { - self.width + self.height } pub fn size(&self) -> u32 { diff --git a/kate/grid/src/grid.rs b/kate/grid/src/grid.rs index 37f33240..76771994 100644 --- a/kate/grid/src/grid.rs +++ b/kate/grid/src/grid.rs @@ -70,6 +70,12 @@ impl> RowMajor { (0..self.width).map(move |x| self.get(x, y).expect("Bounds already checked")) }) } + + pub fn iter_column_wise(&self) -> impl Iterator + '_ { + (0..self.width).flat_map(move |x| { + (0..self.height).map(move |y| self.get(x, y).expect("Bounds already checked")) + }) + } } impl> ColumnMajor { @@ -92,6 +98,12 @@ impl> ColumnMajor { (0..self.width).map(move |x| self.get(x, y).expect("Bounds already checked")) }) } + + pub fn iter_column_wise(&self) -> impl Iterator + '_ { + (0..self.width).flat_map(move |x| { + (0..self.height).map(move |y| self.get(x, y).expect("Bounds already checked")) + }) + } } pub trait AsRowMajor { diff --git a/kate/src/com.rs b/kate/src/com.rs index a7d5a00d..7be454b5 100644 --- a/kate/src/com.rs +++ b/kate/src/com.rs @@ -247,12 +247,9 @@ fn pad_iec_9797_1(mut data: Vec) -> Vec { .expect("Const assertion ensures this transformation to `DataChunk`. qed") } -fn extend_column_with_zeros( - column: &[BlsScalar], - extended_rows: BlockLengthRows, -) -> Vec { +fn extend_column_with_zeros(column: &[BlsScalar], height: usize) -> Vec { let mut result = column.to_vec(); - result.resize(extended_rows.as_usize(), BlsScalar::zero()); + result.resize(height, BlsScalar::zero()); result } @@ -269,16 +266,23 @@ pub fn to_bls_scalar(chunk: &[u8]) -> Result { /// This means that extension factor has to be multiple of 2, /// and that original data will be interleaved with erasure codes, /// instead of being in first k chunks of a column. +/// +/// `block` should be the raw data of a matrix, stored in row-major orientation. #[cfg(feature = "std")] pub fn par_extend_data_matrix( block_dims: BlockDimensions, block: &[u8], ) -> Result, Error> { + use kate_grid::AsRowMajor; + use kate_grid::Extension; + let start = Instant::now(); - let dimensions: matrix::Dimensions = block_dims.try_into().map_err(|_| Error::BlockTooBig)?; - let rows_num: usize = dimensions.rows().into(); - let extended_rows_num = BlockLengthRows(dimensions.extended_rows()); + let dims = kate_grid::Dimensions::new(block_dims.cols.0 as usize, block_dims.rows.0 as usize); + let extended_dims = dims.extend(Extension::height(2)); + + // simple length with mod check would work... let chunks = block.par_chunks_exact(block_dims.chunk_size as usize); + // TODO: Shouldn't assert, should error assert!(chunks.remainder().is_empty()); let scalars = chunks @@ -286,25 +290,23 @@ pub fn par_extend_data_matrix( .map(to_bls_scalar) .collect::, Error>>()?; - let mut row_wise_scalars = Vec::with_capacity(dimensions.size() as usize); - dimensions - .iter_cells() - .for_each(|cell_i| row_wise_scalars.push(scalars[cell_i as usize])); + // The data is currently row-major, so we need to put it into column-major + let rm = scalars.as_row_major(dims.width(), dims.height()).unwrap(); + let col_wise_scalars = rm.iter_column_wise().map(Clone::clone).collect::>(); - let mut chunk_elements = row_wise_scalars - .par_chunks_exact(rows_num) - .flat_map(|column| extend_column_with_zeros(column, extended_rows_num)) + let mut chunk_elements = col_wise_scalars + .chunks_exact(dims.height()) + .flat_map(|column| extend_column_with_zeros(column, extended_dims.height())) .collect::>(); - // extend data matrix, column by column - let extended_column_eval_domain = EvaluationDomain::new(extended_rows_num.as_usize())?; - let column_eval_domain = EvaluationDomain::new(rows_num)?; // rows_num = column_length + let extended_column_eval_domain = EvaluationDomain::new(extended_dims.height())?; + let column_eval_domain = EvaluationDomain::new(dims.height())?; // rows_num = column_length chunk_elements - .par_chunks_exact_mut(extended_rows_num.as_usize()) + .par_chunks_exact_mut(extended_dims.height()) .for_each(|col| { // (i)fft functions input parameter slice size has to be a power of 2, otherwise it panics - column_eval_domain.ifft_slice(&mut col[0..rows_num]); + column_eval_domain.ifft_slice(&mut col[0..dims.height()]); extended_column_eval_domain.fft_slice(col); }); From 63604068f1d5f06ba94749e49d3c099c60523db4 Mon Sep 17 00:00:00 2001 From: William Arnold Date: Fri, 24 Feb 2023 03:52:57 -0800 Subject: [PATCH 03/87] Refactor `build_proof` --- kate/grid/src/dims.rs | 8 ++++---- kate/src/com.rs | 45 ++++++++++++++++++++----------------------- 2 files changed, 25 insertions(+), 28 deletions(-) diff --git a/kate/grid/src/dims.rs b/kate/grid/src/dims.rs index 98d929c7..85ae2028 100644 --- a/kate/grid/src/dims.rs +++ b/kate/grid/src/dims.rs @@ -11,22 +11,22 @@ pub struct Extension { /// This means extending the height of the grid by some factor. /// `2` would mean doubling the grid upwards, increasing the height by a factor of /// 2 and multiplying the number of rows by 2 - pub height_factor: u16, + pub height_factor: usize, /// This means extending the width of the grid by some factor. /// `2` would mean doubling the grid sideways, increasing the width by a factor of /// 2 and multiplying the number of columns by 2 - pub width_factor: u16, + pub width_factor: usize, } impl Extension { - pub fn height(factor: u16) -> Self { + pub fn height(factor: usize) -> Self { Self { height_factor: factor, width_factor: 1, } } - pub fn width(factor: u16) -> Self { + pub fn width(factor: usize) -> Self { Self { height_factor: 1, width_factor: factor, diff --git a/kate/src/com.rs b/kate/src/com.rs index 7be454b5..d2fa59ae 100644 --- a/kate/src/com.rs +++ b/kate/src/com.rs @@ -18,6 +18,7 @@ use dusk_plonk::{ prelude::{BlsScalar, CommitKey}, }; use frame_support::{ensure, sp_runtime::SaturatedConversion}; +use kate_grid::{AsColumnMajor, AsRowMajor, Extension}; #[cfg(feature = "std")] use kate_recovery::{com::app_specific_rows, index, matrix}; use log::info; @@ -50,6 +51,7 @@ pub enum Error { BadHeaderHash, BlockTooBig, InvalidChunkLength, + DimensionsMismatch, } impl From for Error { @@ -273,9 +275,6 @@ pub fn par_extend_data_matrix( block_dims: BlockDimensions, block: &[u8], ) -> Result, Error> { - use kate_grid::AsRowMajor; - use kate_grid::Extension; - let start = Instant::now(); let dims = kate_grid::Dimensions::new(block_dims.cols.0 as usize, block_dims.rows.0 as usize); let extended_dims = dims.extend(Extension::height(2)); @@ -327,27 +326,18 @@ pub fn build_proof( ext_data_matrix: &[BlsScalar], cells: &[Cell], ) -> Result, Error> { - let cols_num = block_dims.cols.as_usize(); - let extended_rows_num = block_dims - .rows - .0 - .checked_mul(EXTENSION_FACTOR) - .ok_or(Error::BlockTooBig)?; + let dims = kate_grid::Dimensions::new(block_dims.cols.as_usize(), block_dims.rows.as_usize()); + let extended_dims = dims.extend(Extension::height(EXTENSION_FACTOR as usize)); const SPROOF_SIZE: usize = PROOF_SIZE + SCALAR_SIZE; - let (prover_key, _) = public_params.trim(cols_num).map_err(Error::from)?; + let (prover_key, _) = public_params.trim(dims.width()).map_err(Error::from)?; // Generate all the x-axis points of the domain on which all the row polynomials reside - let row_eval_domain = EvaluationDomain::new(cols_num).map_err(Error::from)?; - let mut row_dom_x_pts = Vec::with_capacity(row_eval_domain.size()); - row_dom_x_pts.extend(row_eval_domain.elements()); + let row_eval_domain = EvaluationDomain::new(dims.width()).map_err(Error::from)?; + let row_dom_x_pts = row_eval_domain.elements().collect::>(); - let mut result_bytes: Vec = Vec::new(); - result_bytes.reserve_exact(SPROOF_SIZE * cells.len()); - unsafe { - result_bytes.set_len(SPROOF_SIZE * cells.len()); - } + let mut result_bytes: Vec = vec![0u8; SPROOF_SIZE * cells.len()]; let prover_key = &prover_key; let row_dom_x_pts = &row_dom_x_pts; @@ -360,30 +350,37 @@ pub fn build_proof( // generate proof only for requested cells let total_start = Instant::now(); + // TODO: better error type + let ext_data_matrix_cm = ext_data_matrix + .as_column_major(extended_dims.width(), extended_dims.height()) + .ok_or(Error::DimensionsMismatch)?; + // attempt to parallelly compute proof for all requested cells cells .into_par_iter() .zip(result_bytes.par_chunks_exact_mut(SPROOF_SIZE)) .for_each(|(cell, res)| { let r_index = cell.row.as_usize(); - if (r_index >= extended_rows_num as usize) || (cell.col >= block_dims.cols) { + if r_index >= extended_dims.height() || cell.col >= block_dims.cols { res.fill(0); // for bad cell identifier, fill whole proof with zero bytes ! } else { let c_index = cell.col.as_usize(); // construct polynomial per extended matrix row - let row = (0..cols_num) - .into_par_iter() - .map(|j| ext_data_matrix[r_index + j * extended_rows_num as usize]) - .collect::>(); + let row = ext_data_matrix_cm + .iter_row(r_index) + .expect("Already checked row index") + .map(Clone::clone) + .collect::>(); // row has to be a power of 2, otherwise interpolate() function panics + // TODO: cache evaluations let poly = Evaluations::from_vec_and_domain(row, row_eval_domain).interpolate(); let witness = prover_key.compute_single_witness(&poly, &row_dom_x_pts[c_index]); match prover_key.commit(&witness) { Ok(commitment_to_witness) => { let evaluated_point = - ext_data_matrix[r_index + c_index * extended_rows_num as usize]; + ext_data_matrix[r_index + c_index * extended_dims.height()]; res[0..PROOF_SIZE].copy_from_slice(&commitment_to_witness.to_bytes()); res[PROOF_SIZE..].copy_from_slice(&evaluated_point.to_bytes()); From 0eb55e7727173b490a9b6dd6baa8522c619851d9 Mon Sep 17 00:00:00 2001 From: William Arnold Date: Sun, 5 Mar 2023 14:32:10 -0800 Subject: [PATCH 04/87] First pass at new api --- Cargo.lock | 198 ++++++++++++++++++- kate/Cargo.toml | 7 +- kate/grid/src/dims.rs | 6 +- kate/grid/src/grid.rs | 107 +++++++--- kate/recovery/Cargo.toml | 2 +- kate/src/com.rs | 20 +- kate/src/gridgen.rs | 416 +++++++++++++++++++++++++++++++++++++++ kate/src/lib.rs | 4 + 8 files changed, 715 insertions(+), 45 deletions(-) create mode 100644 kate/src/gridgen.rs diff --git a/Cargo.lock b/Cargo.lock index 437ac894..a8e7bc7a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -64,6 +64,17 @@ dependencies = [ "version_check", ] +[[package]] +name = "ahash" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" +dependencies = [ + "cfg-if", + "once_cell", + "version_check", +] + [[package]] name = "aho-corasick" version = "0.7.20" @@ -97,6 +108,124 @@ version = "1.0.68" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2cb2f989d18dd141ab8ae82f64d1a8cdd37e0840f73a406896cf5e99502fab61" +[[package]] +name = "ark-bls12-381" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c775f0d12169cba7aae4caeb547bb6a50781c7449a8aa53793827c9ec4abf488" +dependencies = [ + "ark-ec", + "ark-ff", + "ark-serialize", + "ark-std", +] + +[[package]] +name = "ark-ec" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c60370a92f8e1a5f053cad73a862e1b99bc642333cd676fa11c0c39f80f4ac2" +dependencies = [ + "ark-ff", + "ark-poly", + "ark-serialize", + "ark-std", + "derivative", + "hashbrown 0.13.2", + "itertools 0.10.5", + "num-traits", + "zeroize", +] + +[[package]] +name = "ark-ff" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c2d42532524bee1da5a4f6f733eb4907301baa480829557adcff5dfaeee1d9a" +dependencies = [ + "ark-ff-asm", + "ark-ff-macros", + "ark-serialize", + "ark-std", + "derivative", + "digest 0.10.6", + "itertools 0.10.5", + "num-bigint", + "num-traits", + "paste", + "rustc_version", + "zeroize", +] + +[[package]] +name = "ark-ff-asm" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d6873aaba7959593d89babed381d33e2329453368f1bf3c67e07686a1c1056f" +dependencies = [ + "quote", + "syn", +] + +[[package]] +name = "ark-ff-macros" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3c2e7d0f2d67cc7fc925355c74d36e7eda19073639be4a0a233d4611b8c959d" +dependencies = [ + "num-bigint", + "num-traits", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "ark-poly" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f6ec811462cabe265cfe1b102fcfe3df79d7d2929c2425673648ee9abfd0272" +dependencies = [ + "ark-ff", + "ark-serialize", + "ark-std", + "derivative", + "hashbrown 0.13.2", +] + +[[package]] +name = "ark-serialize" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7e735959bc173ea4baf13327b19c22d452b8e9e8e8f7b7fc34e6bf0e316c33e" +dependencies = [ + "ark-serialize-derive", + "ark-std", + "digest 0.10.6", + "num-bigint", +] + +[[package]] +name = "ark-serialize-derive" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd34f0920d995d2c932f38861c416f70de89a6de9875876b012557079603e6cc" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "ark-std" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" +dependencies = [ + "num-traits", + "rand 0.8.5", +] + [[package]] name = "array-bytes" version = "4.2.0" @@ -338,6 +467,17 @@ dependencies = [ "byte-tools", ] +[[package]] +name = "blst" +version = "0.3.10" +source = "git+https://github.com/aphoh/blst?rev=556e037926d9c526c2eb6cb1522bea39690416ea#556e037926d9c526c2eb6cb1522bea39690416ea" +dependencies = [ + "cc", + "glob", + "threadpool", + "zeroize", +] + [[package]] name = "bs58" version = "0.4.0" @@ -851,6 +991,17 @@ dependencies = [ "zeroize", ] +[[package]] +name = "derivative" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "derive-hex" version = "0.1.2" @@ -947,7 +1098,7 @@ dependencies = [ [[package]] name = "dusk-plonk" version = "0.12.0" -source = "git+https://github.com/maticnetwork/plonk.git?tag=v0.12.0-polygon-2#6ada57a82b6e3a92a677b9f6263f9f3c9b501b23" +source = "git+https://github.com/maticnetwork/plonk?branch=will/polynomial-visibility#96ddfec2208cb32b7b51aa7b64736d1b4f580cbd" dependencies = [ "cfg-if", "dusk-bls12_381", @@ -1488,6 +1639,12 @@ version = "0.27.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "221996f774192f0f718773def8201c4ae31f02616a54ccfc2d358bb0e5cefdec" +[[package]] +name = "glob" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" + [[package]] name = "group" version = "0.12.1" @@ -1538,6 +1695,15 @@ dependencies = [ "ahash 0.7.6", ] +[[package]] +name = "hashbrown" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" +dependencies = [ + "ahash 0.8.3", +] + [[package]] name = "heck" version = "0.4.0" @@ -1776,6 +1942,9 @@ dependencies = [ name = "kate" version = "0.6.1" dependencies = [ + "ark-bls12-381", + "ark-ff", + "ark-serialize", "criterion", "da-primitives", "derive_more", @@ -1789,9 +1958,11 @@ dependencies = [ "kate-grid", "kate-recovery", "log", + "merlin 3.0.0", "num_cpus", "once_cell", "parity-scale-codec", + "poly-multiproof", "proptest", "rand 0.8.5", "rand_chacha 0.3.1", @@ -2470,6 +2641,22 @@ dependencies = [ "plotters-backend", ] +[[package]] +name = "poly-multiproof" +version = "0.0.1" +source = "git+https://github.com/aphoh/poly-multiproof#b0783e1b69b35c744abd522ee482de51a2181888" +dependencies = [ + "ark-bls12-381", + "ark-ec", + "ark-ff", + "ark-poly", + "ark-serialize", + "ark-std", + "blst", + "merlin 3.0.0", + "thiserror", +] + [[package]] name = "ppv-lite86" version = "0.2.17" @@ -3772,6 +3959,15 @@ dependencies = [ "once_cell", ] +[[package]] +name = "threadpool" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa" +dependencies = [ + "num_cpus", +] + [[package]] name = "tiny-bip39" version = "1.0.0" diff --git a/kate/Cargo.toml b/kate/Cargo.toml index 9fc0c15b..98d21908 100644 --- a/kate/Cargo.toml +++ b/kate/Cargo.toml @@ -11,7 +11,7 @@ codec = { package = "parity-scale-codec", version = "3", default-features = fals da-primitives = { path = "../primitives/avail", default-features = false } derive_more = "0.99.17" dusk-bytes = { version = "0.1.6", default-features = false, optional = true } -dusk-plonk = { git = "https://github.com/maticnetwork/plonk.git", tag = "v0.12.0-polygon-2", optional = true } +dusk-plonk = { git = "https://github.com/maticnetwork/plonk", branch = "will/polynomial-visibility", optional = true } frame-support = { version = "4.0.0-dev", default-features = false } getrandom = { version = "0.2", features = ["js"], optional = true } hex = { version = "0.4", default-features = false, features = ["alloc"] } @@ -28,6 +28,11 @@ sp-core = { version = "7.0.0", default-features = false } sp-std = { version = "4.0.0", default-features = false } static_assertions = "1.1.0" kate-grid = { path = "grid" } +poly-multiproof = { git = "https://github.com/aphoh/poly-multiproof" } +ark-bls12-381 = "0.4.0" +ark-ff = "0.4.1" +ark-serialize = "0.4" +merlin = "3" [dev-dependencies] criterion = "0.3.5" diff --git a/kate/grid/src/dims.rs b/kate/grid/src/dims.rs index 85ae2028..c802a5be 100644 --- a/kate/grid/src/dims.rs +++ b/kate/grid/src/dims.rs @@ -1,5 +1,5 @@ /// The dimensions of a grid -#[derive(Debug, Clone)] +#[derive(Debug, Clone, PartialEq, Eq)] pub struct Dimensions { width: usize, height: usize, @@ -47,8 +47,8 @@ impl Dimensions { self.height } - pub fn size(&self) -> u32 { - self.width as u32 * self.height as u32 + pub fn n_cells(&self) -> usize { + self.width * self.height } pub fn extend(&self, e: Extension) -> Self { diff --git a/kate/grid/src/grid.rs b/kate/grid/src/grid.rs index 76771994..dee1859e 100644 --- a/kate/grid/src/grid.rs +++ b/kate/grid/src/grid.rs @@ -1,4 +1,4 @@ -use core::marker::PhantomData; +use alloc::vec::Vec; pub trait Grid { fn width(&self) -> usize; @@ -8,21 +8,19 @@ pub trait Grid { fn get(&self, x: usize, y: usize) -> Option<&A>; } -pub struct RowMajor { +pub struct RowMajor { width: usize, height: usize, - pub inner: T, - _phantom: PhantomData, + pub inner: Vec, } -pub struct ColumnMajor { +pub struct ColumnMajor { width: usize, height: usize, - pub inner: T, - _phantom: PhantomData, + pub inner: Vec, } -impl> Grid for RowMajor { +impl Grid for RowMajor { fn width(&self) -> usize { self.width } @@ -32,11 +30,11 @@ impl> Grid for RowMajor { } fn get(&self, x: usize, y: usize) -> Option<&A> { - self.inner.as_ref().get(x + y * self.width) + self.inner.get(x + y * self.width) } } -impl> Grid for ColumnMajor { +impl Grid for ColumnMajor { fn width(&self) -> usize { self.width } @@ -46,16 +44,16 @@ impl> Grid for ColumnMajor { } fn get(&self, x: usize, y: usize) -> Option<&A> { - self.inner.as_ref().get(y + x * self.height) + self.inner.get(y + x * self.height) } } -impl> RowMajor { +impl RowMajor { pub fn row(&self, y: usize) -> Option<&[A]> { if y >= self.height { return None; } - Some(&self.inner.as_ref()[(y * self.width)..((y + 1) * self.width)]) + Some(&self.inner[(y * self.width)..((y + 1) * self.width)]) } pub fn iter_col(&self, x: usize) -> Option + '_> { @@ -65,6 +63,15 @@ impl> RowMajor { Some((0..self.height).map(move |y| self.get(x, y).expect("Size checked at instantiation"))) } + pub fn rows(&self) -> impl Iterator + '_ { + (0..self.height).map(|y| (y, self.row(y).expect("Bounds already checked"))) + } + + // TODO: this return type is kinda gross, should it just iterate over vecs? + pub fn columns(&self) -> impl Iterator)> + '_ { + (0..self.width).map(|x| (x, self.iter_col(x).expect("Bounds already checked"))) + } + pub fn iter_row_wise(&self) -> impl Iterator + '_ { (0..self.height).flat_map(move |y| { (0..self.width).map(move |x| self.get(x, y).expect("Bounds already checked")) @@ -76,14 +83,22 @@ impl> RowMajor { (0..self.height).map(move |y| self.get(x, y).expect("Bounds already checked")) }) } + + pub fn to_column_major(&self) -> ColumnMajor { + self.iter_column_wise() + .map(Clone::clone) + .collect::>() + .as_column_major(self.width, self.height) + .expect("Bounds already checked") + } } -impl> ColumnMajor { +impl ColumnMajor { pub fn col(&self, x: usize) -> Option<&[A]> { if x >= self.width { return None; } - Some(&self.inner.as_ref()[(x * self.height)..((x + 1) * self.height)]) + Some(&self.inner[(x * self.height)..((x + 1) * self.height)]) } pub fn iter_row(&self, y: usize) -> Option + '_> { @@ -104,27 +119,31 @@ impl> ColumnMajor { (0..self.height).map(move |y| self.get(x, y).expect("Bounds already checked")) }) } + + pub fn to_row_major(&self) -> RowMajor { + self.iter_row_wise() + .map(Clone::clone) + .collect::>() + .as_row_major(self.width, self.height) + .expect("Bounds already checked") + } } -pub trait AsRowMajor { - type Output: Sized; - fn as_row_major(self, width: usize, height: usize) -> Option>; +pub trait AsRowMajor { + fn as_row_major(self, width: usize, height: usize) -> Option>; } -pub trait AsColumnMajor { - type Output: Sized; - fn as_column_major(self, width: usize, height: usize) -> Option>; +pub trait AsColumnMajor { + fn as_column_major(self, width: usize, height: usize) -> Option>; } -impl> AsRowMajor for T { - type Output = Self; - fn as_row_major(self, width: usize, height: usize) -> Option> { - if self.as_ref().len() == width * height { +impl AsRowMajor for Vec { + fn as_row_major(self, width: usize, height: usize) -> Option> { + if self.len() == width * height { Some(RowMajor { width, height, inner: self, - _phantom: PhantomData, }) } else { None @@ -132,15 +151,41 @@ impl> AsRowMajor for T { } } -impl> AsColumnMajor for T { - type Output = Self; - fn as_column_major(self, width: usize, height: usize) -> Option> { - if self.as_ref().len() == width * height { +impl AsColumnMajor for Vec { + fn as_column_major(self, width: usize, height: usize) -> Option> { + if self.len() == width * height { Some(ColumnMajor { width, height, inner: self, - _phantom: PhantomData, + }) + } else { + None + } + } +} + +impl AsColumnMajor for [A; LEN] { + fn as_column_major(self, width: usize, height: usize) -> Option> { + if self.len() == width * height { + Some(ColumnMajor { + width, + height, + inner: self.into(), + }) + } else { + None + } + } +} + +impl AsRowMajor for [A; LEN] { + fn as_row_major(self, width: usize, height: usize) -> Option> { + if self.len() == width * height { + Some(RowMajor { + width, + height, + inner: self.into(), }) } else { None diff --git a/kate/recovery/Cargo.toml b/kate/recovery/Cargo.toml index 5b80b440..344fdf0c 100644 --- a/kate/recovery/Cargo.toml +++ b/kate/recovery/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [dependencies] codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] } dusk-bytes = "0.1.6" -dusk-plonk = { git = "https://github.com/maticnetwork/plonk.git", tag = "v0.12.0-polygon-2" } +dusk-plonk = { git = "https://github.com/maticnetwork/plonk", branch = "will/polynomial-visibility" } getrandom = { version = "0.2", features = ["js"] } num = "0.4.0" once_cell = { version = "1.9.0", default-features = false } diff --git a/kate/src/com.rs b/kate/src/com.rs index d2fa59ae..8c1cdd2e 100644 --- a/kate/src/com.rs +++ b/kate/src/com.rs @@ -351,9 +351,9 @@ pub fn build_proof( let total_start = Instant::now(); // TODO: better error type - let ext_data_matrix_cm = ext_data_matrix - .as_column_major(extended_dims.width(), extended_dims.height()) - .ok_or(Error::DimensionsMismatch)?; + //let ext_data_matrix_cm = ext_data_matrix + // .as_column_major(extended_dims.width(), extended_dims.height()) + // .ok_or(Error::DimensionsMismatch)?; // attempt to parallelly compute proof for all requested cells cells @@ -367,11 +367,15 @@ pub fn build_proof( let c_index = cell.col.as_usize(); // construct polynomial per extended matrix row - let row = ext_data_matrix_cm - .iter_row(r_index) - .expect("Already checked row index") - .map(Clone::clone) - .collect::>(); + let row = (0..extended_dims.width()) + .into_par_iter() + .map(|j| ext_data_matrix[r_index + j * extended_dims.height()]) + .collect::>(); + //let row = ext_data_matrix_cm + // .iter_row(r_index) + // .expect("Already checked row index") + // .map(Clone::clone) + // .collect::>(); // row has to be a power of 2, otherwise interpolate() function panics // TODO: cache evaluations diff --git a/kate/src/gridgen.rs b/kate/src/gridgen.rs new file mode 100644 index 00000000..d7e17b3d --- /dev/null +++ b/kate/src/gridgen.rs @@ -0,0 +1,416 @@ +use core::marker::PhantomData; + +use codec::Encode; +use da_primitives::asdr::{AppExtrinsic, AppId}; +use dusk_bytes::Serializable; +use dusk_plonk::{ + commitment_scheme::kzg10::commitment::Commitment, + fft::{EvaluationDomain, Polynomial}, + prelude::{BlsScalar, CommitKey}, +}; +use kate_grid::{AsColumnMajor, AsRowMajor, Dimensions, Extension, RowMajor}; +use kate_recovery::config::PADDING_TAIL_VALUE; +use merlin::Transcript; +use poly_multiproof::m1_blst::M1NoPrecomp; +use rand::{Rng, SeedableRng}; +use rand_chacha::ChaChaRng; + +use crate::{ + com::{Cell, Error, XtsLayout}, + config::DATA_CHUNK_SIZE, + Seed, +}; + +pub struct EvaluationGrid { + pub layout: XtsLayout, + pub evals: RowMajor, + pub dims: Dimensions, +} + +impl EvaluationGrid { + /// From the app extrinsics, create a data grid of Scalars + pub fn from_extrinsics( + mut extrinsics: Vec, + min_width: usize, + max_width: usize, + max_height: usize, + rng_seed: Seed, + ) -> Result { + // Group extrinsics by app id, also sorted by app id. + extrinsics.sort_by(|a, b| a.app_id.cmp(&b.app_id)); + let grouped = + extrinsics + .iter() + .fold::>)>, _>(vec![], |mut acc, e| { + match acc.last_mut() { + Some((app_id, data)) if e.app_id == *app_id => data.push(e.data.clone()), + None | Some(_) => acc.push((e.app_id, vec![e.data.clone()])), + } + acc + }); + + // Convert each grup of extrinsics into scalars + let encoded = grouped + .into_iter() + .map(|(id, datas)| { + let mut enc = datas.encode(); + enc.push(PADDING_TAIL_VALUE); // TODO: remove 9797 padding stuff + enc.chunks(DATA_CHUNK_SIZE) + .map(|c| pad_to_bls_scalar(c)) + .collect::, _>>() + .map(|scalars| (id, scalars)) + }) + .collect::, _>>()?; + + // Get the layout of each app id's start + let layout = encoded + .iter() + .map(|(id, data)| (*id, data.len() as u32)) + .collect::>(); + + // Flatten the grid + let mut grid = encoded + .into_iter() + .flat_map(|(_, scalars)| scalars) + .collect::>(); + + // Fit the grid to the desired grid size + let dims = get_block_dims(grid.len(), min_width, max_width, max_height)?; + let mut rng = ChaChaRng::from_seed(rng_seed); + while grid.len() != dims.n_cells() { + let rnd_values: [u8; BlsScalar::SIZE - 1] = rng.gen(); + // TODO: can we just use zeros instead? + grid.push(pad_to_bls_scalar(&rnd_values)?); + } + + Ok(EvaluationGrid { + layout, + evals: grid + .as_row_major(dims.width(), dims.height()) + .ok_or(Error::DimensionsMismatch)?, + dims, + }) + } + + pub fn extend_columns(&self, extension_factor: usize) -> Result { + let new_dims = self.dims.extend(Extension::height(extension_factor)); + + let domain = EvaluationDomain::new(self.dims.height())?; + let domain_new = EvaluationDomain::new(new_dims.height())?; + if domain_new.size() != new_dims.height() { + // TODO: throw a reasonable error + return Err(Error::CellLenghtExceeded); + } + + let new_evals = self + .evals + .columns() + .flat_map(|(_x, col)| { + // put elts into a new column + let mut ext_col = Vec::with_capacity(domain_new.size()); + col.for_each(|s| ext_col.push(s.clone())); + // ifft, resize, fft + domain.ifft_slice(&mut ext_col); + ext_col.resize(domain_new.size(), BlsScalar::zero()); + domain_new.fft_slice(&mut ext_col); + ext_col + }) + .collect::>() + .as_column_major(new_dims.width(), new_dims.height()) + .expect("Each column should be expanded to news dims") + .to_row_major(); + + Ok(Self { + layout: self.layout.clone(), + evals: new_evals, + dims: new_dims, + }) + } + + pub fn make_polynomial_grid(&self) -> Result { + let domain = EvaluationDomain::new(self.dims.width())?; + Ok(PolynomialGrid { + dims: self.dims.clone(), + points: domain.elements().collect(), + inner: self + .evals + .rows() + .map(|(_, row)| Polynomial { + coeffs: domain.ifft(row), + }) + .collect::>(), + }) + } +} + +pub struct PolynomialGrid { + inner: Vec, + points: Vec, + dims: Dimensions, +} + +impl PolynomialGrid { + pub fn commitments(&self, srs: &CommitKey) -> Result, Error> { + self.inner + .iter() + .map(|poly| srs.commit(&poly).map_err(|e| Error::PlonkError(e))) + .collect() + } + + pub fn proof(&self, srs: &CommitKey, cell: &Cell) -> Result { + let x = cell.col.0 as usize; + let y = cell.row.0 as usize; + // TODO: better error msg + let poly = self.inner.get(y).ok_or(Error::CellLenghtExceeded)?; + let witness = srs.compute_single_witness(poly, &self.points[x]); + Ok(srs.commit(&witness)?) + } + + pub fn multiproof( + &self, + srs: &M1NoPrecomp, + cell: &Cell, + eval_grid: &EvaluationGrid, + target_dims: &Dimensions, + ) -> Result { + use poly_multiproof::traits::PolyMultiProofNoPrecomp; + // TODO: useful error + let block = multiproof_block( + cell.col.0 as usize, + cell.row.0 as usize, + &self.dims, + target_dims, + ) + .ok_or(Error::CellLenghtExceeded)?; + let polys = self.inner[block.start_y..block.end_y] + .iter() + .map(|s| s.coeffs.iter().map(convert_bls).collect::>()) + .collect::>(); + let evals = (block.start_y..block.end_y) + .map(|y| { + eval_grid.evals.row(y).expect("Already bounds checked")[block.start_x..block.end_x] + .iter() + .map(convert_bls) + .collect::>() + }) + .collect::>(); + let points = &self.points[block.start_x..block.end_x] + .iter() + .map(convert_bls) + .collect::>(); + //let eval_slices = eval_grid.evals.rows().map(|(_, row)| &row[]).collect::>(); + + let mut ts = Transcript::new(b"avail-mp"); + let proof = srs + .open(&mut ts, &evals, &polys, &points) + .expect("TODO: real error msg"); + Ok(Multiproof { + proof, + evals, + block, + }) + } +} + +fn convert_bls(dusk: &dusk_plonk::bls12_381::BlsScalar) -> ark_bls12_381::Fr { + ark_bls12_381::Fr { + 0: ark_ff::BigInt(dusk.0.clone()), + 1: PhantomData, + } +} + +#[derive(Debug, Clone)] +pub struct Multiproof { + pub proof: poly_multiproof::m1_blst::Proof, + pub evals: Vec>, + pub block: CellBlock, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct CellBlock { + start_x: usize, + start_y: usize, + end_x: usize, + end_y: usize, +} +fn multiproof_block( + x: usize, + y: usize, + grid_dims: &Dimensions, + target_dims: &Dimensions, +) -> Option { + let target_width = core::cmp::min(grid_dims.width(), target_dims.width()); + let target_height = core::cmp::min(grid_dims.height(), target_dims.height()); + dbg!(&target_width, target_height); + dbg!(&x, &y); + if x >= target_width || y >= target_height { + return None; + } + + if grid_dims.width() % target_width != 0 || grid_dims.height() % target_height != 0 { + return None; + } + + let block_width = grid_dims.width() / target_width; + let block_height = grid_dims.height() / target_height; + Some(CellBlock { + start_x: x * block_width, + start_y: y * block_height, + end_x: (x + 1) * block_width, + end_y: (y + 1) * block_height, + }) +} + +fn get_block_dims( + n_scalars: usize, + min_width: usize, + max_width: usize, + max_height: usize, +) -> Result { + // Less than max_width wide block + if n_scalars < max_width { + let current_width = n_scalars; + // Don't let the width get lower than the minimum provided + let width = core::cmp::max(round_up_power_of_2(current_width), min_width); + Ok(Dimensions::new(width, 1)) + } else { + let width = max_width; + let current_height = round_up_to_multiple(n_scalars, width) / width; + // Round the height up to a power of 2 for ffts + let height = round_up_power_of_2(current_height); + // Error if height too big + if height > max_height { + return Err(Error::BlockTooBig); + } + Ok(Dimensions::new(width, height)) + } +} + +fn round_up_to_multiple(input: usize, multiple: usize) -> usize { + let n_multiples = (input + multiple - 1) / multiple; + n_multiples * multiple +} + +fn pad_to_bls_scalar(a: impl AsRef<[u8]>) -> Result { + if a.as_ref().len() > DATA_CHUNK_SIZE { + todo!() + } + let mut buf = [0u8; BlsScalar::SIZE]; + buf[0..a.as_ref().len()].copy_from_slice(a.as_ref()); + //TODO: better error type + BlsScalar::from_bytes(&buf).map_err(|_| Error::CellLenghtExceeded) +} + +// Round up. only valid for positive integers +fn round_up_power_of_2(mut v: usize) -> usize { + if v == 0 { + return 1; + } + v -= 1; + v |= v >> 1; + v |= v >> 2; + v |= v >> 4; + v |= v >> 8; + v |= v >> 16; + v += 1; + return v; +} + +#[cfg(test)] +mod tests { + use super::*; + use proptest::{prop_assert_eq, proptest}; + use test_case::test_case; + + // parameters that will split a 256x256 grid into pieces of size 4x16 + const TARGET: Dimensions = Dimensions::new(64, 16); + const GRID: Dimensions = Dimensions::new(256, 256); + fn cb(start_x: usize, start_y: usize, end_x: usize, end_y: usize) -> CellBlock { + CellBlock { + start_x, + start_y, + end_x, + end_y, + } + } + #[test_case(0, 0 => Some(cb(0, 0, 4, 16)))] + #[test_case(1, 0 => Some(cb(4, 0, 8, 16)))] + #[test_case(0, 1 => Some(cb(0, 16, 4, 32)))] + #[test_case(1, 1 => Some(cb(4, 16, 8, 32)))] + #[test_case(64, 0 => None)] + #[test_case(0, 16 => None)] + fn multiproof_max_grid_size(x: usize, y: usize) -> Option { + multiproof_block(x, y, &GRID, &TARGET) + } + + //#[test] + //// Test build_commitments() function with a predefined input + //fn test_build_commitments_simple_commitment_check() { + // let original_data = br#"test"#; + // let hash: Seed = [ + // 76, 41, 174, 145, 187, 12, 97, 32, 75, 111, 149, 209, 243, 195, 165, 10, 166, 172, 47, + // 41, 218, 24, 212, 66, 62, 5, 187, 191, 129, 5, 105, 3, + // ]; + + // let (_, commitments, dimensions, _) = par_build_commitments( + // block_rows, + // block_cols, + // chunk_size, + // &[AppExtrinsic::from(original_data.to_vec())], + // hash, + // ) + // .unwrap(); + + // assert_eq!(dimensions, Dimensions::new(4, 1)); + // let expected_commitments = hex!("960F08F97D3A8BD21C3F5682366130132E18E375A587A1E5900937D7AA5F33C4E20A1C0ACAE664DCE1FD99EDC2693B8D960F08F97D3A8BD21C3F5682366130132E18E375A587A1E5900937D7AA5F33C4E20A1C0ACAE664DCE1FD99EDC2693B8D"); + // assert_eq!(commitments, expected_commitments); + //} + + use proptest::prelude::*; + proptest! { + #![proptest_config(ProptestConfig { + cases: 200, .. ProptestConfig::default() + })] + #[test] + fn test_round_up_to_multiple(i in 1..1000usize, m in 1..32usize) { + for k in 0..m { + let a = i * m - k; + prop_assert_eq!(round_up_to_multiple(a, m), i * m) + } + } + + #[test] + fn test_convert_bls_scalar(input: [u8; 31]) { + use ark_serialize::CanonicalSerialize; + let dusk = pad_to_bls_scalar(&input).unwrap(); + let ark = convert_bls(&dusk); + let dusk_out = dusk.to_bytes(); + let mut ark_out = [0u8; 32]; + ark.serialize_compressed(&mut ark_out[..]).unwrap(); + assert_eq!(dusk_out, ark_out); + } + + } + #[test_case(0 => 1)] + #[test_case(1 => 1)] + #[test_case(2 => 2)] + #[test_case(3 => 4)] + #[test_case(6 => 8)] + #[test_case(972 => 1024)] + fn test_round_up_to_2(i: usize) -> usize { + round_up_power_of_2(i) + } + + #[test_case(0 => Dimensions::new(4, 1) ; "block size zero")] + #[test_case(1 => Dimensions::new(4, 1) ; "below minimum block size")] + #[test_case(10 => Dimensions::new(16, 1) ; "regular case")] + #[test_case(17 => Dimensions::new(32, 1) ; "minimum overhead after 512")] + #[test_case(256 => Dimensions::new(256, 1) ; "maximum cols")] + #[test_case(257 => Dimensions::new(256, 2) ; "two rows")] + #[test_case(256 * 256 => Dimensions::new(256, 256) ; "max block size")] + #[test_case(256 * 256 + 1 => panics "BlockTooBig" ; "too much data")] + fn test_get_block_dims(size: usize) -> Dimensions +where { + get_block_dims(size, 4, 256, 256).unwrap() + } +} diff --git a/kate/src/lib.rs b/kate/src/lib.rs index 4518712b..b3c50e85 100644 --- a/kate/src/lib.rs +++ b/kate/src/lib.rs @@ -16,7 +16,9 @@ pub type Seed = [u8; 32]; pub mod config { use super::{BlockLengthColumns, BlockLengthRows}; + // TODO: Delete this? not used anywhere pub const SCALAR_SIZE_WIDE: usize = 64; + pub const SCALAR_SIZE: usize = 32; pub const DATA_CHUNK_SIZE: usize = 31; // Actual chunk size is 32 after 0 padding is done pub const EXTENSION_FACTOR: u32 = 2; @@ -64,6 +66,8 @@ pub mod testnet { #[cfg(feature = "std")] pub mod com; + +pub mod gridgen; /// Precalculate the length of padding IEC 9797 1. /// /// # NOTE From d0cac5f817fb0bcd54b002e387a1199447e8b9b5 Mon Sep 17 00:00:00 2001 From: William Arnold Date: Sun, 5 Mar 2023 14:44:24 -0800 Subject: [PATCH 05/87] Passing simple test --- kate/src/gridgen.rs | 63 ++++++++++++++++++++++++++++----------------- 1 file changed, 39 insertions(+), 24 deletions(-) diff --git a/kate/src/gridgen.rs b/kate/src/gridgen.rs index d7e17b3d..fb18d393 100644 --- a/kate/src/gridgen.rs +++ b/kate/src/gridgen.rs @@ -241,8 +241,8 @@ fn multiproof_block( ) -> Option { let target_width = core::cmp::min(grid_dims.width(), target_dims.width()); let target_height = core::cmp::min(grid_dims.height(), target_dims.height()); - dbg!(&target_width, target_height); - dbg!(&x, &y); + dbg!(&target_width, target_height); + dbg!(&x, &y); if x >= target_width || y >= target_height { return None; } @@ -318,7 +318,10 @@ fn round_up_power_of_2(mut v: usize) -> usize { #[cfg(test)] mod tests { + use crate::testnet; + use super::*; + use hex_literal::hex; use proptest::{prop_assert_eq, proptest}; use test_case::test_case; @@ -343,28 +346,40 @@ mod tests { multiproof_block(x, y, &GRID, &TARGET) } - //#[test] - //// Test build_commitments() function with a predefined input - //fn test_build_commitments_simple_commitment_check() { - // let original_data = br#"test"#; - // let hash: Seed = [ - // 76, 41, 174, 145, 187, 12, 97, 32, 75, 111, 149, 209, 243, 195, 165, 10, 166, 172, 47, - // 41, 218, 24, 212, 66, 62, 5, 187, 191, 129, 5, 105, 3, - // ]; - - // let (_, commitments, dimensions, _) = par_build_commitments( - // block_rows, - // block_cols, - // chunk_size, - // &[AppExtrinsic::from(original_data.to_vec())], - // hash, - // ) - // .unwrap(); - - // assert_eq!(dimensions, Dimensions::new(4, 1)); - // let expected_commitments = hex!("960F08F97D3A8BD21C3F5682366130132E18E375A587A1E5900937D7AA5F33C4E20A1C0ACAE664DCE1FD99EDC2693B8D960F08F97D3A8BD21C3F5682366130132E18E375A587A1E5900937D7AA5F33C4E20A1C0ACAE664DCE1FD99EDC2693B8D"); - // assert_eq!(commitments, expected_commitments); - //} + #[test] + // Test build_commitments() function with a predefined input + fn newapi_test_build_commitments_simple_commitment_check() { + let original_data = br#"test"#; + let block_height = 256usize; + let block_width = 256usize; + let hash: Seed = [ + 76, 41, 174, 145, 187, 12, 97, 32, 75, 111, 149, 209, 243, 195, 165, 10, 166, 172, 47, + 41, 218, 24, 212, 66, 62, 5, 187, 191, 129, 5, 105, 3, + ]; + + let evals = EvaluationGrid::from_extrinsics( + vec![AppExtrinsic::from(original_data.to_vec())], + 4, + block_width, + block_height, + hash, + ) + .unwrap(); + let evals = evals.extend_columns(2).unwrap(); + let polys = evals.make_polynomial_grid().unwrap(); + let public_params = + testnet::public_params(da_primitives::BlockLengthColumns(block_width as u32)); + let commits = polys + .commitments(public_params.commit_key()) + .unwrap() + .into_iter() + .flat_map(|p| p.to_bytes()) + .collect::>(); + + assert_eq!(evals.dims, Dimensions::new(4, 2)); + let expected_commitments = hex!("960F08F97D3A8BD21C3F5682366130132E18E375A587A1E5900937D7AA5F33C4E20A1C0ACAE664DCE1FD99EDC2693B8D960F08F97D3A8BD21C3F5682366130132E18E375A587A1E5900937D7AA5F33C4E20A1C0ACAE664DCE1FD99EDC2693B8D"); + assert_eq!(commits, expected_commitments); + } use proptest::prelude::*; proptest! { From f76a49f471631a6adaeada57604c2cfd2e1eca88 Mon Sep 17 00:00:00 2001 From: William Arnold Date: Sun, 5 Mar 2023 15:39:03 -0800 Subject: [PATCH 06/87] More consistency tests --- kate/src/gridgen.rs | 141 ++++++++++++++++++++++++++++++++------------ 1 file changed, 103 insertions(+), 38 deletions(-) diff --git a/kate/src/gridgen.rs b/kate/src/gridgen.rs index fb18d393..b1587bb4 100644 --- a/kate/src/gridgen.rs +++ b/kate/src/gridgen.rs @@ -318,10 +318,7 @@ fn round_up_power_of_2(mut v: usize) -> usize { #[cfg(test)] mod tests { - use crate::testnet; - use super::*; - use hex_literal::hex; use proptest::{prop_assert_eq, proptest}; use test_case::test_case; @@ -346,41 +343,6 @@ mod tests { multiproof_block(x, y, &GRID, &TARGET) } - #[test] - // Test build_commitments() function with a predefined input - fn newapi_test_build_commitments_simple_commitment_check() { - let original_data = br#"test"#; - let block_height = 256usize; - let block_width = 256usize; - let hash: Seed = [ - 76, 41, 174, 145, 187, 12, 97, 32, 75, 111, 149, 209, 243, 195, 165, 10, 166, 172, 47, - 41, 218, 24, 212, 66, 62, 5, 187, 191, 129, 5, 105, 3, - ]; - - let evals = EvaluationGrid::from_extrinsics( - vec![AppExtrinsic::from(original_data.to_vec())], - 4, - block_width, - block_height, - hash, - ) - .unwrap(); - let evals = evals.extend_columns(2).unwrap(); - let polys = evals.make_polynomial_grid().unwrap(); - let public_params = - testnet::public_params(da_primitives::BlockLengthColumns(block_width as u32)); - let commits = polys - .commitments(public_params.commit_key()) - .unwrap() - .into_iter() - .flat_map(|p| p.to_bytes()) - .collect::>(); - - assert_eq!(evals.dims, Dimensions::new(4, 2)); - let expected_commitments = hex!("960F08F97D3A8BD21C3F5682366130132E18E375A587A1E5900937D7AA5F33C4E20A1C0ACAE664DCE1FD99EDC2693B8D960F08F97D3A8BD21C3F5682366130132E18E375A587A1E5900937D7AA5F33C4E20A1C0ACAE664DCE1FD99EDC2693B8D"); - assert_eq!(commits, expected_commitments); - } - use proptest::prelude::*; proptest! { #![proptest_config(ProptestConfig { @@ -429,3 +391,106 @@ where { get_block_dims(size, 4, 256, 256).unwrap() } } + +#[cfg(test)] +mod consistency_tests { + + use super::*; + use crate::testnet; + use dusk_plonk::prelude::PublicParameters; + use hex_literal::hex; + + fn pp() -> PublicParameters { + testnet::public_params(da_primitives::BlockLengthColumns(256)) + } + + #[test] + fn newapi_test_build_commitments_simple_commitment_check() { + let original_data = br#"test"#; + let block_height = 256usize; + let block_width = 256usize; + let hash: Seed = [ + 76, 41, 174, 145, 187, 12, 97, 32, 75, 111, 149, 209, 243, 195, 165, 10, 166, 172, 47, + 41, 218, 24, 212, 66, 62, 5, 187, 191, 129, 5, 105, 3, + ]; + + let evals = EvaluationGrid::from_extrinsics( + vec![AppExtrinsic::from(original_data.to_vec())], + 4, + block_width, + block_height, + hash, + ) + .unwrap(); + let evals = evals.extend_columns(2).unwrap(); + let polys = evals.make_polynomial_grid().unwrap(); + let commits = polys + .commitments(pp().commit_key()) + .unwrap() + .into_iter() + .flat_map(|p| p.to_bytes()) + .collect::>(); + + assert_eq!(evals.dims, Dimensions::new(4, 2)); + let expected_commitments = hex!("960F08F97D3A8BD21C3F5682366130132E18E375A587A1E5900937D7AA5F33C4E20A1C0ACAE664DCE1FD99EDC2693B8D960F08F97D3A8BD21C3F5682366130132E18E375A587A1E5900937D7AA5F33C4E20A1C0ACAE664DCE1FD99EDC2693B8D"); + assert_eq!(commits, expected_commitments); + } + + #[test] + fn newapi_par_build_commitments_row_wise_constant_row() { + // Due to scale encoding, first line is not constant. + // We will use second line to ensure constant row. + let hash = Seed::default(); + let xts = vec![AppExtrinsic { + app_id: AppId(0), + data: vec![0; 31 * 8], + }]; + + let evals = EvaluationGrid::from_extrinsics(xts, 4, 4, 4, hash).unwrap(); + let evals = evals.extend_columns(2).unwrap(); + let polys = evals.make_polynomial_grid().unwrap(); + polys.commitments(pp().commit_key()).unwrap(); + } + #[test] + fn newapi_test_flatten_block() { + let extrinsics: Vec = vec![ + AppExtrinsic { + app_id: 0.into(), + data: (1..=29).collect(), + }, + AppExtrinsic { + app_id: 1.into(), + data: (1..=30).collect(), + }, + AppExtrinsic { + app_id: 2.into(), + data: (1..=31).collect(), + }, + AppExtrinsic { + app_id: 3.into(), + data: (1..=60).collect(), + }, + ]; + + let expected_dims = Dimensions::new(16, 1); + let evals = + EvaluationGrid::from_extrinsics(extrinsics, 4, 256, 256, Seed::default()).unwrap(); + + let expected_layout = vec![(0.into(), 2), (1.into(), 2), (2.into(), 2), (3.into(), 3)]; + assert_eq!(evals.layout, expected_layout, "The layouts don't match"); + assert_eq!( + evals.dims, expected_dims, + "Dimensions don't match the expected" + ); + + let expected_data = hex!("04740102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d00800000000000000000000000000000000000000000000000000000000000000004780102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e80000000000000000000000000000000000000000000000000000000000000047c0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e1f80000000000000000000000000000000000000000000000000000000000004f00102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c00800000000000000000000000000000000000000000000000000000000000000076a04053bda0a88bda5177b86a15c3b29f559873cb481232299cd5743151ac004b2d63ae198e7bb0a9011f28e473c95f4013d7d53ec5fbc3b42df8ed101f6d00e831e52bfb76e51cca8b4e9016838657edfae09cb9a71eb219025c4c87a67c004aaa86f20ac0aa792bc121ee42e2c326127061eda15599cb5db3db870bea5a00ecf353161c3cb528b0c5d98050c4570bfc942d8b19ed7b0cbba5725e03e5f000b7e30db36b6df82ac151f668f5f80a5e2a9cac7c64991dd6a6ce21c060175800edb9260d2a86c836efc05f17e5c59525e404c6a93d051651fe2e4eefae281300"); + + let data = evals + .evals + .inner + .into_iter() + .flat_map(|s| s.to_bytes()) + .collect::>(); + assert_eq!(data, expected_data, "Data doesn't match the expected data"); + } +} From 881a44e9564f1a39ba6ed805ffe9a99723265ecd Mon Sep 17 00:00:00 2001 From: William Arnold Date: Sun, 5 Mar 2023 17:36:45 -0800 Subject: [PATCH 07/87] Test grid extension --- kate/src/gridgen.rs | 59 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 59 insertions(+) diff --git a/kate/src/gridgen.rs b/kate/src/gridgen.rs index b1587bb4..54a524bd 100644 --- a/kate/src/gridgen.rs +++ b/kate/src/gridgen.rs @@ -493,4 +493,63 @@ mod consistency_tests { .collect::>(); assert_eq!(data, expected_data, "Data doesn't match the expected data"); } + + #[test] + fn newapi_test_extend_data_matrix() { + // This test expects this result in column major + let expected_result = vec![ + hex!("000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e00"), + hex!("bc1c6b8b4b02ca677b825ec9dace9aa706813f3ec47abdf9f03c680f4468555e"), + hex!("7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a00"), + hex!("c16115f73784be22106830c9bc6bbb469bf5026ee80325e403efe5ccc3f55016"), + hex!("1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d00"), + hex!("db3b8aaa6a21e9869aa17de8f9edb9c625a05e5de399dc18105c872e6387745e"), + hex!("9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b900"), + hex!("e080341657a3dd412f874fe8db8ada65ba14228d07234403230e05ece2147016"), + hex!("3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c00"), + hex!("fa5aa9c9894008a6b9c09c07190dd9e544bf7d7c02b9fb372f7ba64d82a6935e"), + hex!("babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d800"), + hex!("ff9f533576c2fc604ea66e07fba9f984d93341ac26426322422d240b02348f16"), + hex!("5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b00"), + hex!("197ac8e8a85f27c5d8dfbb26382cf80464de9c9b21d81a574e9ac56ca1c5b25e"), + hex!("d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f700"), + hex!("1ebf725495e11b806dc58d261ac918a4f85260cb45618241614c432a2153ae16"), + ] + .into_iter() + .map(|e| BlsScalar::from_bytes(e.as_slice().try_into().unwrap()).unwrap()) + .collect::>() + .as_column_major(4, 4) + .unwrap() + .to_row_major() + .inner; + + let block_dims = Dimensions::new(4, 2); + let scalars = (0..=247) + .collect::>() + .chunks_exact(DATA_CHUNK_SIZE) + .flat_map(|chunk| pad_to_bls_scalar(chunk)) + .collect::>(); + dbg!(scalars.len()); + + let grid = EvaluationGrid { + layout: vec![], + evals: scalars + .as_row_major(block_dims.width(), block_dims.height()) + .unwrap(), + dims: block_dims, + }; + let extend = grid.extend_columns(2).unwrap(); + + for i in 0..expected_result.len() { + let e = expected_result[i]; + for j in 0..expected_result.len() { + let r = extend.evals.inner[j]; + if e == r { + eprintln!("Eq: {} {}", i, j); + } + } + } + + assert_eq!(extend.evals.inner, expected_result); + } } From 653ca3c3296904a3da1fa069db6f4014f2e8a02b Mon Sep 17 00:00:00 2001 From: William Arnold Date: Mon, 6 Mar 2023 15:17:59 -0800 Subject: [PATCH 08/87] Working reconstruction test --- kate/src/gridgen.rs | 128 +++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 126 insertions(+), 2 deletions(-) diff --git a/kate/src/gridgen.rs b/kate/src/gridgen.rs index 54a524bd..7abd1e69 100644 --- a/kate/src/gridgen.rs +++ b/kate/src/gridgen.rs @@ -157,10 +157,18 @@ impl PolynomialGrid { .collect() } + pub fn commitment(&self, srs: &CommitKey, row: usize) -> Result { + self.inner + .get(row) + .ok_or(Error::CellLenghtExceeded) + .and_then(|poly| srs.commit(&poly).map_err(|e| Error::PlonkError(e))) + } + pub fn proof(&self, srs: &CommitKey, cell: &Cell) -> Result { let x = cell.col.0 as usize; let y = cell.row.0 as usize; // TODO: better error msg + dbg!(y, self.inner.len()); let poly = self.inner.get(y).ok_or(Error::CellLenghtExceeded)?; let witness = srs.compute_single_witness(poly, &self.points[x]); Ok(srs.commit(&witness)?) @@ -394,11 +402,19 @@ where { #[cfg(test)] mod consistency_tests { - use super::*; use crate::testnet; use dusk_plonk::prelude::PublicParameters; use hex_literal::hex; + use kate_grid::Grid; + use kate_recovery::com::reconstruct_extrinsics; + use kate_recovery::data::Cell as DCell; + use kate_recovery::index::AppDataIndex; + use kate_recovery::matrix::Position as DPosition; + use proptest::prelude::*; + use proptest::{collection, sample::size_range, strategy::Strategy}; + use rand::distributions::Uniform; + use rand::prelude::Distribution; fn pp() -> PublicParameters { testnet::public_params(da_primitives::BlockLengthColumns(256)) @@ -496,7 +512,7 @@ mod consistency_tests { #[test] fn newapi_test_extend_data_matrix() { - // This test expects this result in column major + // This test expects this result in column major let expected_result = vec![ hex!("000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e00"), hex!("bc1c6b8b4b02ca677b825ec9dace9aa706813f3ec47abdf9f03c680f4468555e"), @@ -552,4 +568,112 @@ mod consistency_tests { assert_eq!(extend.evals.inner, expected_result); } + + fn app_extrinsic_strategy() -> impl Strategy { + ( + any::(), + any_with::>(size_range(1..2048).lift()), + ) + .prop_map(|(app_id, data)| AppExtrinsic { + app_id: app_id.into(), + data, + }) + } + + fn app_extrinsics_strategy() -> impl Strategy> { + collection::vec(app_extrinsic_strategy(), size_range(1..16)).prop_map(|xts| { + let mut new_xts = xts; + new_xts.sort_by(|a1, a2| a1.app_id.cmp(&a2.app_id)); + new_xts + }) + } + + fn sample_unique(rng: &mut impl Rng, n_samples: usize, n: usize) -> Vec { + let mut sampled = vec![]; + let u = Uniform::from(0..n); + while sampled.len() < n_samples || sampled.len() < n { + let t = u.sample(rng); + if !sampled.contains(&t) { + sampled.push(t) + } + } + sampled + } + + // This copied method is still confusing to me... it just accumulates the size but skips over + // the app_id 0 size? not sure what's going on... + fn app_data_index_try_from_layout(layout: Vec<(AppId, u32)>) -> AppDataIndex { + let mut index = Vec::new(); + // transactions are ordered by application id + // skip transactions with 0 application id - it's not a data txs + let mut size = 0u32; + let mut prev_app_id = AppId(0u32); + + for (app_id, data_len) in layout { + if app_id.0 != 0 && prev_app_id != app_id { + index.push((app_id.0, size)); + } + + size += data_len; + if prev_app_id > app_id { + panic!("App ID out of order") + } + prev_app_id = app_id; + } + + AppDataIndex { size, index } + } + + proptest! { + #![proptest_config(ProptestConfig::with_cases(5))] + #[test] + fn newapi_test_build_and_reconstruct(exts in app_extrinsics_strategy()) { + let grid = EvaluationGrid::from_extrinsics(exts.clone(), 4, 256, 256, Seed::default()).unwrap().extend_columns(2).unwrap(); + let gref = &grid; + let dims = &grid.dims; + //let (layout, commitments, dims, matrix) = par_build_commitments( + // BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default()).unwrap(); + const RNG_SEED: Seed = [42u8; 32]; + let mut rng = ChaChaRng::from_seed(RNG_SEED); + let cells = (0..dims.width()) + .flat_map(move |x| { + sample_unique(&mut rng, dims.height()/2, dims.height()) + .into_iter() + .map(move |y| { + kate_recovery::data::DataCell { + position: kate_recovery::matrix::Position { row: y as u32, col: x as u16 }, + data: gref.evals.get(x, y).unwrap().to_bytes() + } + }).collect::>() + }).collect::>(); + let index = app_data_index_try_from_layout(grid.layout.clone()); + let bdims = kate_recovery::matrix::Dimensions::new(dims.height() as u16, dims.width() as u16).unwrap(); + let reconstructed = reconstruct_extrinsics(&index, &bdims, cells).unwrap(); + for (result, xt) in reconstructed.iter().zip(exts) { + prop_assert_eq!(result.0, *xt.app_id); + prop_assert_eq!(result.1[0].as_slice(), &xt.data); + } + + let pp = pp(); + let polys = grid.make_polynomial_grid().unwrap(); + let commitments = polys.commitments(&pp.commit_key()).unwrap(); + let indices = (0..dims.width()).flat_map(|x| (0..dims.height()).map(move |y| (x, y))).collect::>(); + + // Sample some number 10 of the indices, all is too slow for tests... + let mut rng = ChaChaRng::from_seed(RNG_SEED); + let sampled = Uniform::from(0..indices.len()).sample_iter(&mut rng).take(10).map(|i| indices[i].clone()); + for (x, y) in sampled { + let cell = Cell { row: (y as u32).into(), col: (x as u32).into() }; + let proof = polys.proof(&pp.commit_key(), &cell).unwrap(); + let mut content = [0u8; 80]; + content[..48].copy_from_slice(&proof.to_bytes()[..]); + content[48..].copy_from_slice(&grid.evals.get(x, y).unwrap().to_bytes()[..]); + + let dcell = DCell{position: DPosition { row: y as u32, col: x as u16 }, content }; + let verification = kate_recovery::proof::verify(&pp, &bdims, &commitments[y].to_bytes(), &dcell); + prop_assert!(verification.is_ok()); + prop_assert!(verification.unwrap()); + } + } + } } From 57cb78fc442cd3a266a03b65fcc8c8d55d11600e Mon Sep 17 00:00:00 2001 From: William Arnold Date: Mon, 6 Mar 2023 15:24:06 -0800 Subject: [PATCH 09/87] Add simpler api for ark serialization --- kate/src/lib.rs | 1 + kate/src/utils.rs | 47 +++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 48 insertions(+) create mode 100644 kate/src/utils.rs diff --git a/kate/src/lib.rs b/kate/src/lib.rs index b3c50e85..f50493e6 100644 --- a/kate/src/lib.rs +++ b/kate/src/lib.rs @@ -68,6 +68,7 @@ pub mod testnet { pub mod com; pub mod gridgen; +pub mod utils; /// Precalculate the length of padding IEC 9797 1. /// /// # NOTE diff --git a/kate/src/utils.rs b/kate/src/utils.rs new file mode 100644 index 00000000..24d5e9da --- /dev/null +++ b/kate/src/utils.rs @@ -0,0 +1,47 @@ +macro_rules! ser_impl { + ($t:ty, $len:expr) => { + impl ArkSimpleSerialize<$len> for $t { + fn to_bytes(&self) -> [u8; $len] { + use ark_serialize::CanonicalSerialize; + let mut out = [0u8; $len]; + self.serialize_compressed(&mut out[..]).unwrap(); + out + } + } + }; +} +trait ArkSimpleSerialize { + const LEN: usize = N; + fn to_bytes(&self) -> [u8; N]; +} + +ser_impl!(ark_bls12_381::G1Affine, 48); +ser_impl!(ark_bls12_381::G1Projective, 48); +ser_impl!(ark_bls12_381::G2Affine, 96); +ser_impl!(ark_bls12_381::G2Projective, 96); +ser_impl!(ark_bls12_381::Fr, 32); + +#[cfg(test)] +mod tests { + use ark_bls12_381::*; + use ark_ff::UniformRand; + use rand::{Rng, SeedableRng}; + use rand_chacha::ChaChaRng; + + use crate::Seed; + use super::ArkSimpleSerialize; + + fn test_nopanic + UniformRand>(rng: &mut impl Rng) { + let p = T::rand(rng); + p.to_bytes(); + } + #[test] + fn basic_nopanic() { + let mut rng = ChaChaRng::from_seed(Seed::default()); + test_nopanic::<32, Fr>(&mut rng); + test_nopanic::<48, G1Affine>(&mut rng); + test_nopanic::<48, G1Projective>(&mut rng); + test_nopanic::<96, G2Affine>(&mut rng); + test_nopanic::<96, G2Projective>(&mut rng); + } +} From f5f030c1be42b4ac62fcd227ae2e28a8eabc6ed8 Mon Sep 17 00:00:00 2001 From: William Arnold Date: Mon, 6 Mar 2023 15:49:33 -0800 Subject: [PATCH 10/87] Add mp parameters --- Cargo.lock | 1 + kate/Cargo.toml | 1 + kate/src/lib.rs | 98 ++++++++++++++++++++++++++++++++++++++++++++++++- 3 files changed, 98 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a8e7bc7a..2b29601a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1944,6 +1944,7 @@ version = "0.6.1" dependencies = [ "ark-bls12-381", "ark-ff", + "ark-poly", "ark-serialize", "criterion", "da-primitives", diff --git a/kate/Cargo.toml b/kate/Cargo.toml index 98d21908..4e95bf19 100644 --- a/kate/Cargo.toml +++ b/kate/Cargo.toml @@ -31,6 +31,7 @@ kate-grid = { path = "grid" } poly-multiproof = { git = "https://github.com/aphoh/poly-multiproof" } ark-bls12-381 = "0.4.0" ark-ff = "0.4.1" +ark-poly = "0.4.1" ark-serialize = "0.4" merlin = "3" diff --git a/kate/src/lib.rs b/kate/src/lib.rs index f50493e6..41286d11 100644 --- a/kate/src/lib.rs +++ b/kate/src/lib.rs @@ -44,8 +44,12 @@ pub mod config { #[cfg(feature = "std")] pub mod testnet { use super::{BlockLengthColumns, PublicParameters}; - use once_cell::sync::Lazy; - use rand::SeedableRng; + use ark_bls12_381::{G1Projective, G2Projective, Fr}; +use ark_ff::{Fp, BigInt}; +use ark_serialize::CanonicalDeserialize; +use once_cell::sync::Lazy; + use poly_multiproof::m1_blst; +use rand::SeedableRng; use rand_chacha::ChaChaRng; use std::{collections::HashMap, sync::Mutex}; @@ -62,6 +66,96 @@ pub mod testnet { }) .clone() } + + const SEC_LIMBS: [u64; 4] = [ + 16526363067508752668, + 17870878028964021343, + 15693365399533249662, + 1020900941429372507, + ]; + const G1_BYTES: [u8; 48] = [ + 164, 95, 117, 74, 158, 148, 204, 203, 178, 203, 233, 215, 196, 65, 184, 181, 39, 2, 110, + 240, 94, 42, 58, 255, 74, 164, 187, 28, 87, 223, 55, 103, 251, 102, 156, 196, 199, 99, 155, + 211, 126, 104, 54, 83, 189, 197, 11, 90, + ]; + const G2_BYTES: [u8; 96] = [ + 184, 69, 172, 94, 123, 78, 200, 84, 29, 1, 38, 96, 39, 103, 114, 224, 1, 193, 224, 71, 94, + 96, 151, 24, 132, 72, 29, 67, 252, 189, 68, 222, 42, 2, 233, 134, 45, 191, 159, 83, 108, + 33, 24, 20, 246, 204, 84, 72, 16, 11, 205, 165, 220, 112, 120, 84, 175, 142, 56, 41, 117, + 13, 31, 177, 139, 18, 114, 134, 170, 164, 252, 149, 158, 115, 46, 33, 40, 168, 163, 21, + 242, 248, 244, 25, 191, 87, 116, 254, 4, 58, 244, 111, 187, 235, 75, 39, + ]; + + pub fn multiproof_params(max_degree: usize, max_pts: usize) -> m1_blst::M1NoPrecomp { + let x: Fr = Fp(BigInt(SEC_LIMBS), core::marker::PhantomData); + + let g1 = G1Projective::deserialize_compressed(&G1_BYTES[..]).unwrap(); + let g2 = G2Projective::deserialize_compressed(&G2_BYTES[..]).unwrap(); + + m1_blst::M1NoPrecomp::new_from_scalar(x, g1, g2, max_degree + 1, max_pts) + } + + #[cfg(test)] + mod tests { + use core::marker::PhantomData; + + use super::*; + use ark_bls12_381::Fr; + use ark_ff::{BigInt, Fp}; + use ark_poly::EvaluationDomain; + use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; + use dusk_bytes::Serializable; + use dusk_plonk::{ + fft::{EvaluationDomain as PlonkED, Evaluations as PlonkEV}, + prelude::BlsScalar, + }; + use poly_multiproof::traits::Committer; + use rand::thread_rng; + + use crate::testnet; + #[test] + fn test_consistent_testnet_params() { + let x: Fr = Fp(BigInt(SEC_LIMBS), core::marker::PhantomData); + let mut out = [0u8; 32]; + x.serialize_compressed(&mut out[..]).unwrap(); + const SEC_BYTES: [u8; 32] = [ + 120, 72, 181, 215, 17, 188, 152, 131, 153, 99, 23, 163, 249, 201, 2, 105, 213, 103, + 113, 0, 93, 84, 10, 25, 24, 73, 57, 201, 232, 208, 219, 42, + ]; + assert_eq!(SEC_BYTES, out); + + let g1 = ark_bls12_381::G1Projective::deserialize_compressed(&G1_BYTES[..]).unwrap(); + let g2 = ark_bls12_381::G2Projective::deserialize_compressed(&G2_BYTES[..]).unwrap(); + + let pmp = poly_multiproof::m1_blst::M1NoPrecomp::new_from_scalar(x, g1, g2, 1024, 256); + + let dp_evals = (0..30) + .map(|_| BlsScalar::random(&mut thread_rng())) + .collect::>(); + + let pmp_evals = dp_evals + .iter() + .map(|i| Fp(BigInt(i.0), PhantomData)) + .collect::>(); + + let dp_poly = + PlonkEV::from_vec_and_domain(dp_evals, PlonkED::new(1024).unwrap()).interpolate(); + let pmp_ev = ark_poly::GeneralEvaluationDomain::::new(1024).unwrap(); + let pmp_poly = pmp_ev.ifft(&pmp_evals); + + let pubs = testnet::public_params(da_primitives::BlockLengthColumns(1024)); + + let dp_commit = pubs.commit_key().commit(&dp_poly).unwrap().0.to_bytes(); + let mut pmp_commit = [0u8; 48]; + pmp.commit(&pmp_poly) + .unwrap() + .0 + .serialize_compressed(&mut pmp_commit[..]) + .unwrap(); + + assert_eq!(dp_commit, pmp_commit); + } + } } #[cfg(feature = "std")] From a19b5763ab4b993d625a06799cb3b23d2f37a2e8 Mon Sep 17 00:00:00 2001 From: William Arnold Date: Wed, 8 Mar 2023 03:04:43 -0800 Subject: [PATCH 11/87] New api for getting app rows --- kate/grid/src/dims.rs | 32 +++++---- kate/grid/src/grid.rs | 109 ++++++++++++++++++----------- kate/src/gridgen.rs | 157 ++++++++++++++++++++++++++++++++---------- 3 files changed, 207 insertions(+), 91 deletions(-) diff --git a/kate/grid/src/dims.rs b/kate/grid/src/dims.rs index c802a5be..f23d565f 100644 --- a/kate/grid/src/dims.rs +++ b/kate/grid/src/dims.rs @@ -19,19 +19,19 @@ pub struct Extension { } impl Extension { - pub fn height(factor: usize) -> Self { - Self { - height_factor: factor, - width_factor: 1, - } - } - - pub fn width(factor: usize) -> Self { - Self { - height_factor: 1, - width_factor: factor, - } - } + pub fn height(factor: usize) -> Self { + Self { + height_factor: factor, + width_factor: 1, + } + } + + pub fn width(factor: usize) -> Self { + Self { + height_factor: 1, + width_factor: factor, + } + } } impl Dimensions { @@ -48,7 +48,11 @@ impl Dimensions { } pub fn n_cells(&self) -> usize { - self.width * self.height + self.width * self.height + } + + pub fn divides(&self, other: &Self) -> bool { + other.width() % self.width() == 0 && other.height() % self.height() == 0 } pub fn extend(&self, e: Extension) -> Self { diff --git a/kate/grid/src/grid.rs b/kate/grid/src/grid.rs index dee1859e..826292ad 100644 --- a/kate/grid/src/grid.rs +++ b/kate/grid/src/grid.rs @@ -1,86 +1,117 @@ use alloc::vec::Vec; +use crate::Dimensions; + pub trait Grid { fn width(&self) -> usize; fn height(&self) -> usize; + fn dims(&self) -> &Dimensions; // x indexes within a row, y indexes within a column // 0 <= x < width, 0 <= y < height - fn get(&self, x: usize, y: usize) -> Option<&A>; + fn get(&self, x: usize, y: usize) -> Option<&A> { + let i = Self::coord_to_ind(self.dims(), x, y); + self.get_ind(i) + } + fn get_ind(&self, i: usize) -> Option<&A>; + fn ind_to_coord(dims: &Dimensions, i: usize) -> (usize, usize); + fn coord_to_ind(dims: &Dimensions, x: usize, y: usize) -> usize; } pub struct RowMajor { - width: usize, - height: usize, + dims: Dimensions, pub inner: Vec, } pub struct ColumnMajor { - width: usize, - height: usize, + dims: Dimensions, pub inner: Vec, } impl Grid for RowMajor { fn width(&self) -> usize { - self.width + self.dims.width() } fn height(&self) -> usize { - self.height + self.dims.height() } - fn get(&self, x: usize, y: usize) -> Option<&A> { - self.inner.get(x + y * self.width) + fn dims(&self) -> &Dimensions { + &self.dims + } + + fn get_ind(&self, i: usize) -> Option<&A> { + self.inner.get(i) + } + + fn ind_to_coord(dims: &Dimensions, i: usize) -> (usize, usize) { + (i % dims.width(), i / dims.width()) + } + + fn coord_to_ind(dims: &Dimensions, x: usize, y: usize) -> usize { + x + y * dims.width() } } impl Grid for ColumnMajor { fn width(&self) -> usize { - self.width + self.dims.width() } fn height(&self) -> usize { - self.height + self.dims.height() } - fn get(&self, x: usize, y: usize) -> Option<&A> { - self.inner.get(y + x * self.height) + fn dims(&self) -> &Dimensions { + &self.dims + } + + fn get_ind(&self, i: usize) -> Option<&A> { + self.inner.get(i) + } + + fn ind_to_coord(dims: &Dimensions, i: usize) -> (usize, usize) { + (i / dims.height(), i % dims.height()) + } + + fn coord_to_ind(dims: &Dimensions, x: usize, y: usize) -> usize { + y + x * dims.height() } } impl RowMajor { pub fn row(&self, y: usize) -> Option<&[A]> { - if y >= self.height { + if y >= self.height() { return None; } - Some(&self.inner[(y * self.width)..((y + 1) * self.width)]) + Some(&self.inner[(y * self.width())..((y + 1) * self.width())]) } pub fn iter_col(&self, x: usize) -> Option + '_> { - if x >= self.width { + if x >= self.width() { return None; } - Some((0..self.height).map(move |y| self.get(x, y).expect("Size checked at instantiation"))) + Some((0..self.height()).map(move |y| self.get(x, y).expect("Size checked at instantiation"))) } pub fn rows(&self) -> impl Iterator + '_ { - (0..self.height).map(|y| (y, self.row(y).expect("Bounds already checked"))) + (0..self.height()).map(|y| (y, self.row(y).expect("Bounds already checked"))) } // TODO: this return type is kinda gross, should it just iterate over vecs? pub fn columns(&self) -> impl Iterator)> + '_ { - (0..self.width).map(|x| (x, self.iter_col(x).expect("Bounds already checked"))) + (0..self.width()).map(|x| (x, self.iter_col(x).expect("Bounds already checked"))) } pub fn iter_row_wise(&self) -> impl Iterator + '_ { - (0..self.height).flat_map(move |y| { - (0..self.width).map(move |x| self.get(x, y).expect("Bounds already checked")) + (0..self.height()).flat_map(move |y| { + (0..self.width()).map(move |x| self.get(x, y).expect("Bounds already checked")) }) } pub fn iter_column_wise(&self) -> impl Iterator + '_ { - (0..self.width).flat_map(move |x| { - (0..self.height).map(move |y| self.get(x, y).expect("Bounds already checked")) + (0..self.width()).flat_map(move |x| { + (0..self.height()).map(move |y| self.get(x, y).expect("Bounds already checked")) }) } @@ -88,35 +119,35 @@ impl RowMajor { self.iter_column_wise() .map(Clone::clone) .collect::>() - .as_column_major(self.width, self.height) + .as_column_major(self.width(), self.height()) .expect("Bounds already checked") } } impl ColumnMajor { pub fn col(&self, x: usize) -> Option<&[A]> { - if x >= self.width { + if x >= self.width() { return None; } - Some(&self.inner[(x * self.height)..((x + 1) * self.height)]) + Some(&self.inner[(x * self.height())..((x + 1) * self.height())]) } pub fn iter_row(&self, y: usize) -> Option + '_> { - if y >= self.height { + if y >= self.height() { return None; } - Some((0..self.width).map(move |x| self.get(x, y).expect("Size checked at instantiation"))) + Some((0..self.width()).map(move |x| self.get(x, y).expect("Size checked at instantiation"))) } pub fn iter_row_wise(&self) -> impl Iterator + '_ { - (0..self.height).flat_map(move |y| { - (0..self.width).map(move |x| self.get(x, y).expect("Bounds already checked")) + (0..self.height()).flat_map(move |y| { + (0..self.width()).map(move |x| self.get(x, y).expect("Bounds already checked")) }) } pub fn iter_column_wise(&self) -> impl Iterator + '_ { - (0..self.width).flat_map(move |x| { - (0..self.height).map(move |y| self.get(x, y).expect("Bounds already checked")) + (0..self.width()).flat_map(move |x| { + (0..self.height()).map(move |y| self.get(x, y).expect("Bounds already checked")) }) } @@ -124,7 +155,7 @@ impl ColumnMajor { self.iter_row_wise() .map(Clone::clone) .collect::>() - .as_row_major(self.width, self.height) + .as_row_major(self.width(), self.height()) .expect("Bounds already checked") } } @@ -141,8 +172,7 @@ impl AsRowMajor for Vec { fn as_row_major(self, width: usize, height: usize) -> Option> { if self.len() == width * height { Some(RowMajor { - width, - height, + dims: Dimensions::new(width, height), inner: self, }) } else { @@ -155,8 +185,7 @@ impl AsColumnMajor for Vec { fn as_column_major(self, width: usize, height: usize) -> Option> { if self.len() == width * height { Some(ColumnMajor { - width, - height, + dims: Dimensions::new(width, height), inner: self, }) } else { @@ -169,8 +198,7 @@ impl AsColumnMajor for [A; LEN] { fn as_column_major(self, width: usize, height: usize) -> Option> { if self.len() == width * height { Some(ColumnMajor { - width, - height, + dims: Dimensions::new(width, height), inner: self.into(), }) } else { @@ -183,8 +211,7 @@ impl AsRowMajor for [A; LEN] { fn as_row_major(self, width: usize, height: usize) -> Option> { if self.len() == width * height { Some(RowMajor { - width, - height, + dims: Dimensions::new(width, height), inner: self.into(), }) } else { diff --git a/kate/src/gridgen.rs b/kate/src/gridgen.rs index 7abd1e69..fdd80bb5 100644 --- a/kate/src/gridgen.rs +++ b/kate/src/gridgen.rs @@ -1,28 +1,28 @@ use core::marker::PhantomData; use codec::Encode; -use da_primitives::asdr::{AppExtrinsic, AppId}; +use da_primitives::asdr::{AppExtrinsic, AppId, DataLookup, DataLookupIndexItem}; use dusk_bytes::Serializable; use dusk_plonk::{ commitment_scheme::kzg10::commitment::Commitment, fft::{EvaluationDomain, Polynomial}, prelude::{BlsScalar, CommitKey}, }; -use kate_grid::{AsColumnMajor, AsRowMajor, Dimensions, Extension, RowMajor}; -use kate_recovery::config::PADDING_TAIL_VALUE; +use kate_grid::{AsColumnMajor, AsRowMajor, Dimensions, Extension, Grid, RowMajor}; +use kate_recovery::config::PADDING_TAIL_VALUE, index::AppDataIndex}; use merlin::Transcript; use poly_multiproof::m1_blst::M1NoPrecomp; use rand::{Rng, SeedableRng}; use rand_chacha::ChaChaRng; use crate::{ - com::{Cell, Error, XtsLayout}, + com::{Cell, Error}, config::DATA_CHUNK_SIZE, Seed, }; pub struct EvaluationGrid { - pub layout: XtsLayout, + pub lookup: DataLookup, pub evals: RowMajor, pub dims: Dimensions, } @@ -62,11 +62,16 @@ impl EvaluationGrid { }) .collect::, _>>()?; - // Get the layout of each app id's start - let layout = encoded - .iter() - .map(|(id, data)| (*id, data.len() as u32)) - .collect::>(); + // make the index of app info + let mut start = 0u32; + let mut index = vec![]; + for (app_id, scalars) in &encoded { + index.push(DataLookupIndexItem { + app_id: *app_id, + start, + }); + start += scalars.len() as u32; // next item should start after current one + } // Flatten the grid let mut grid = encoded @@ -74,6 +79,11 @@ impl EvaluationGrid { .flat_map(|(_, scalars)| scalars) .collect::>(); + let lookup = DataLookup { + size: grid.len() as u32, + index, + }; + // Fit the grid to the desired grid size let dims = get_block_dims(grid.len(), min_width, max_width, max_height)?; let mut rng = ChaChaRng::from_seed(rng_seed); @@ -84,7 +94,7 @@ impl EvaluationGrid { } Ok(EvaluationGrid { - layout, + lookup, evals: grid .as_row_major(dims.width(), dims.height()) .ok_or(Error::DimensionsMismatch)?, @@ -92,6 +102,55 @@ impl EvaluationGrid { }) } + /// Returns the start/end indices of the given app id *for the non-extended grid* + fn app_data_indices(&self, app_id: &AppId) -> Option<(usize, usize)> { + if self.lookup.size == 0 { + // Empty block, short circuit. + return None; + } + let (i, start_index) = self + .lookup + .index + .iter() + .enumerate() + .find(|(_i, item)| &item.app_id == app_id) + .map(|(i, item)| (i, item.start as usize))?; + let end_index = self + .lookup + .index + .get(i + 1) + .map(|elem| elem.start) + .unwrap_or(self.lookup.size) as usize; + Some((start_index, end_index)) + } + + /// Returns a list of `(index, row)` pairs for the underlying rows of an application. + /// Returns `None` if the `app_id` cannot be found, or if the provided `orig_dims` are invalid. + pub fn app_rows( + &self, + app_id: &AppId, + orig_dims: Option<&Dimensions>, + ) -> Option)>> { + let orig_dims = orig_dims.unwrap_or(&self.dims); + if !orig_dims.divides(&self.dims) { + dbg!(&orig_dims, &self.dims); + dbg!("hello"); + return None; + } + let h_mul = self.dims.height() / orig_dims.height(); + + dbg!(&app_id, &self.lookup.index); + let (start_ind, end_ind) = self.app_data_indices(app_id)?; + let (_, start_y) = RowMajor::<()>::ind_to_coord(&orig_dims, start_ind); + let (_, end_y) = RowMajor::<()>::ind_to_coord(&orig_dims, end_ind - 1); // Find y of last cell elt + let (new_start_y, new_end_y) = (start_y * h_mul, end_y * h_mul); + + (new_start_y..=new_end_y) + .step_by(h_mul) + .map(|y| self.evals.row(y).map(|a| (y, a.to_vec()))) + .collect() + } + pub fn extend_columns(&self, extension_factor: usize) -> Result { let new_dims = self.dims.extend(Extension::height(extension_factor)); @@ -121,7 +180,7 @@ impl EvaluationGrid { .to_row_major(); Ok(Self { - layout: self.layout.clone(), + lookup: self.lookup.clone(), evals: new_evals, dims: new_dims, }) @@ -241,7 +300,7 @@ pub struct CellBlock { end_x: usize, end_y: usize, } -fn multiproof_block( +pub fn multiproof_block( x: usize, y: usize, grid_dims: &Dimensions, @@ -492,8 +551,17 @@ mod consistency_tests { let evals = EvaluationGrid::from_extrinsics(extrinsics, 4, 256, 256, Seed::default()).unwrap(); - let expected_layout = vec![(0.into(), 2), (1.into(), 2), (2.into(), 2), (3.into(), 3)]; - assert_eq!(evals.layout, expected_layout, "The layouts don't match"); + let expected_index = [(0.into(), 0), (1.into(), 2), (2.into(), 4), (3.into(), 6)] + .into_iter() + .map(|(app_id, start)| DataLookupIndexItem { app_id, start }) + .collect::>(); + + let expected_lookup = DataLookup { + size: 9, + index: expected_index, + }; + + assert_eq!(evals.lookup, expected_lookup, "The layouts don't match"); assert_eq!( evals.dims, expected_dims, "Dimensions don't match the expected" @@ -548,7 +616,7 @@ mod consistency_tests { dbg!(scalars.len()); let grid = EvaluationGrid { - layout: vec![], + lookup: DataLookup::default(), evals: scalars .as_row_major(block_dims.width(), block_dims.height()) .unwrap(), @@ -602,26 +670,11 @@ mod consistency_tests { // This copied method is still confusing to me... it just accumulates the size but skips over // the app_id 0 size? not sure what's going on... - fn app_data_index_try_from_layout(layout: Vec<(AppId, u32)>) -> AppDataIndex { - let mut index = Vec::new(); - // transactions are ordered by application id - // skip transactions with 0 application id - it's not a data txs - let mut size = 0u32; - let mut prev_app_id = AppId(0u32); - - for (app_id, data_len) in layout { - if app_id.0 != 0 && prev_app_id != app_id { - index.push((app_id.0, size)); - } - - size += data_len; - if prev_app_id > app_id { - panic!("App ID out of order") - } - prev_app_id = app_id; + fn app_data_index_from_lookup(lookup: &DataLookup) -> AppDataIndex { + AppDataIndex { + size: lookup.size, + index: lookup.index.iter().map(|e| (e.app_id.0, e.start)).collect(), } - - AppDataIndex { size, index } } proptest! { @@ -646,7 +699,7 @@ mod consistency_tests { } }).collect::>() }).collect::>(); - let index = app_data_index_try_from_layout(grid.layout.clone()); + let index = app_data_index_from_lookup(&grid.lookup); let bdims = kate_recovery::matrix::Dimensions::new(dims.height() as u16, dims.width() as u16).unwrap(); let reconstructed = reconstruct_extrinsics(&index, &bdims, cells).unwrap(); for (result, xt) in reconstructed.iter().zip(exts) { @@ -676,4 +729,36 @@ mod consistency_tests { } } } + + proptest! { + #![proptest_config(ProptestConfig::with_cases(1))] + #[test] + fn newapi_commitments_verify(ref exts in app_extrinsics_strategy()) { + //let (layout, commitments, dims, matrix) = par_build_commitments(BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default()).unwrap(); + let grid = EvaluationGrid::from_extrinsics(exts.clone(), 4, 16, 64, Seed::default()).unwrap().extend_columns(2).unwrap(); + let orig_dims = Dimensions::new(grid.dims.width(), grid.dims.height() / 2); + let polys = grid.make_polynomial_grid().unwrap(); + let commits = polys.commitments(&pp().commit_key()) + .unwrap() + .iter() + .map(|c| c.to_bytes()) + .collect::>(); + + let index = app_data_index_from_lookup(&grid.lookup); + let public_params = testnet::public_params((grid.dims.width() as u32).into()); + + for xt in exts { + let rows = grid.app_rows(&xt.app_id, Some(&orig_dims)).unwrap(); + // Have to put the rows we find in this funky data structure + let mut app_rows = vec![None; grid.dims.height()]; + for (row_i, row) in rows { + app_rows[row_i] = Some(row.iter().flat_map(|s| s.to_bytes()).collect()); + } + // Need to provide the original dimensions here too + let extended_dims = kate_recovery::matrix::Dimensions::new(orig_dims.height() as u16, orig_dims.width() as u16).unwrap(); + let (_, missing) = kate_recovery::commitments::verify_equality(&public_params, &commits, &app_rows, &index, &extended_dims, xt.app_id.0).unwrap(); + prop_assert!(missing.is_empty()); + } + } + } } From af12c23c96cfc2ef4f61139df9c4b93da4e81c15 Mon Sep 17 00:00:00 2001 From: William Arnold Date: Tue, 14 Mar 2023 00:42:48 -0700 Subject: [PATCH 12/87] Small refactor --- kate/src/gridgen.rs | 6 +++++- kate/src/lib.rs | 12 ++++++------ 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/kate/src/gridgen.rs b/kate/src/gridgen.rs index fdd80bb5..a2d0ff9b 100644 --- a/kate/src/gridgen.rs +++ b/kate/src/gridgen.rs @@ -9,7 +9,7 @@ use dusk_plonk::{ prelude::{BlsScalar, CommitKey}, }; use kate_grid::{AsColumnMajor, AsRowMajor, Dimensions, Extension, Grid, RowMajor}; -use kate_recovery::config::PADDING_TAIL_VALUE, index::AppDataIndex}; +use kate_recovery::config::PADDING_TAIL_VALUE; use merlin::Transcript; use poly_multiproof::m1_blst::M1NoPrecomp; use rand::{Rng, SeedableRng}; @@ -102,6 +102,10 @@ impl EvaluationGrid { }) } + pub fn row(&self, y: usize) -> Option<&[BlsScalar]> { + self.evals.row(y) + } + /// Returns the start/end indices of the given app id *for the non-extended grid* fn app_data_indices(&self, app_id: &AppId) -> Option<(usize, usize)> { if self.lookup.size == 0 { diff --git a/kate/src/lib.rs b/kate/src/lib.rs index 41286d11..ab480847 100644 --- a/kate/src/lib.rs +++ b/kate/src/lib.rs @@ -16,7 +16,7 @@ pub type Seed = [u8; 32]; pub mod config { use super::{BlockLengthColumns, BlockLengthRows}; - // TODO: Delete this? not used anywhere + // TODO: Delete this? not used anywhere pub const SCALAR_SIZE_WIDE: usize = 64; pub const SCALAR_SIZE: usize = 32; @@ -44,12 +44,12 @@ pub mod config { #[cfg(feature = "std")] pub mod testnet { use super::{BlockLengthColumns, PublicParameters}; - use ark_bls12_381::{G1Projective, G2Projective, Fr}; -use ark_ff::{Fp, BigInt}; -use ark_serialize::CanonicalDeserialize; -use once_cell::sync::Lazy; + use ark_bls12_381::{Fr, G1Projective, G2Projective}; + use ark_ff::{BigInt, Fp}; + use ark_serialize::CanonicalDeserialize; + use once_cell::sync::Lazy; use poly_multiproof::m1_blst; -use rand::SeedableRng; + use rand::SeedableRng; use rand_chacha::ChaChaRng; use std::{collections::HashMap, sync::Mutex}; From cab10176702b76c5853a28dd11bca6fbe58f9706 Mon Sep 17 00:00:00 2001 From: William Arnold Date: Thu, 16 Mar 2023 17:18:58 -0700 Subject: [PATCH 13/87] Fix wasm build --- Cargo.lock | 1 - kate/Cargo.toml | 18 ++++++++++++------ kate/src/lib.rs | 2 ++ 3 files changed, 14 insertions(+), 7 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8f3edf01..a14bf581 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2655,7 +2655,6 @@ dependencies = [ "ark-std", "blst", "merlin 3.0.0", - "thiserror", ] [[package]] diff --git a/kate/Cargo.toml b/kate/Cargo.toml index 7dadf5ad..8ec2bac7 100644 --- a/kate/Cargo.toml +++ b/kate/Cargo.toml @@ -28,12 +28,12 @@ sp-core = { version = "7.0.0", default-features = false } sp-std = { version = "4.0.0", default-features = false } static_assertions = "1.1.0" kate-grid = { path = "grid" } -poly-multiproof = { git = "https://github.com/aphoh/poly-multiproof" } -ark-bls12-381 = "0.4.0" -ark-ff = "0.4.1" -ark-poly = "0.4.1" -ark-serialize = "0.4" -merlin = "3" +poly-multiproof = { git = "https://github.com/aphoh/poly-multiproof", default-features = false } +ark-bls12-381 = { version = "0.4.0" } +ark-ff = { version = "0.4.1", default-features = false } +ark-poly = { version = "0.4.1", default-features = false } +ark-serialize = { version = "0.4", default-features = false } +merlin = { version = "3", default-features = false } [dev-dependencies] criterion = "0.3.5" @@ -68,6 +68,12 @@ std = [ "rand_core/std", "frame-support/std", "da-primitives/std", + "ark-bls12-381/std", + "ark-ff/std", + "ark-poly/std", + "ark-serialize/std", + "merlin/std", + "poly-multiproof/blst", ] extended-columns = [] maximum-block-size = [] diff --git a/kate/src/lib.rs b/kate/src/lib.rs index 11a143be..e36e24ac 100644 --- a/kate/src/lib.rs +++ b/kate/src/lib.rs @@ -163,7 +163,9 @@ pub mod metrics; #[cfg(feature = "std")] pub mod com; +#[cfg(feature = "std")] pub mod gridgen; + pub mod utils; /// Precalculate the length of padding IEC 9797 1. /// From 57602e7dda1906d207c22ca0afe3ec3419af36cc Mon Sep 17 00:00:00 2001 From: William Arnold Date: Thu, 16 Mar 2023 20:13:43 -0700 Subject: [PATCH 14/87] Simplify imports through PMP --- Cargo.lock | 7 +------ kate/Cargo.toml | 12 +----------- kate/src/com.rs | 2 +- kate/src/gridgen.rs | 25 ++++++++++++------------- kate/src/lib.rs | 28 +++++++++++++++------------- kate/src/utils.rs | 26 ++++++++++++++------------ 6 files changed, 44 insertions(+), 56 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a14bf581..e3681291 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1942,10 +1942,6 @@ dependencies = [ name = "kate" version = "0.7.0" dependencies = [ - "ark-bls12-381", - "ark-ff", - "ark-poly", - "ark-serialize", "criterion", "da-primitives", "derive_more", @@ -1959,7 +1955,6 @@ dependencies = [ "kate-grid", "kate-recovery", "log", - "merlin 3.0.0", "num_cpus", "once_cell", "parity-scale-codec", @@ -2645,7 +2640,7 @@ dependencies = [ [[package]] name = "poly-multiproof" version = "0.0.1" -source = "git+https://github.com/aphoh/poly-multiproof#b0783e1b69b35c744abd522ee482de51a2181888" +source = "git+https://github.com/aphoh/poly-multiproof?rev=3345c76b84a14accb90020974a8cab837fe598a0#3345c76b84a14accb90020974a8cab837fe598a0" dependencies = [ "ark-bls12-381", "ark-ec", diff --git a/kate/Cargo.toml b/kate/Cargo.toml index 8ec2bac7..49ac70af 100644 --- a/kate/Cargo.toml +++ b/kate/Cargo.toml @@ -28,12 +28,7 @@ sp-core = { version = "7.0.0", default-features = false } sp-std = { version = "4.0.0", default-features = false } static_assertions = "1.1.0" kate-grid = { path = "grid" } -poly-multiproof = { git = "https://github.com/aphoh/poly-multiproof", default-features = false } -ark-bls12-381 = { version = "0.4.0" } -ark-ff = { version = "0.4.1", default-features = false } -ark-poly = { version = "0.4.1", default-features = false } -ark-serialize = { version = "0.4", default-features = false } -merlin = { version = "3", default-features = false } +poly-multiproof = { git = "https://github.com/aphoh/poly-multiproof", default-features = false, rev = "3345c76b84a14accb90020974a8cab837fe598a0" } [dev-dependencies] criterion = "0.3.5" @@ -68,11 +63,6 @@ std = [ "rand_core/std", "frame-support/std", "da-primitives/std", - "ark-bls12-381/std", - "ark-ff/std", - "ark-poly/std", - "ark-serialize/std", - "merlin/std", "poly-multiproof/blst", ] extended-columns = [] diff --git a/kate/src/com.rs b/kate/src/com.rs index 396dee6b..18f76412 100644 --- a/kate/src/com.rs +++ b/kate/src/com.rs @@ -18,7 +18,7 @@ use dusk_plonk::{ prelude::{BlsScalar, CommitKey}, }; use frame_support::{ensure, sp_runtime::SaturatedConversion}; -use kate_grid::{AsColumnMajor, AsRowMajor, Extension}; +use kate_grid::{AsRowMajor, Extension}; #[cfg(feature = "std")] use kate_recovery::{com::app_specific_rows, index, matrix}; use rand::{Rng, SeedableRng}; diff --git a/kate/src/gridgen.rs b/kate/src/gridgen.rs index a2d0ff9b..a4ec22dc 100644 --- a/kate/src/gridgen.rs +++ b/kate/src/gridgen.rs @@ -10,8 +10,7 @@ use dusk_plonk::{ }; use kate_grid::{AsColumnMajor, AsRowMajor, Dimensions, Extension, Grid, RowMajor}; use kate_recovery::config::PADDING_TAIL_VALUE; -use merlin::Transcript; -use poly_multiproof::m1_blst::M1NoPrecomp; +use poly_multiproof::{m1_blst::M1NoPrecomp, merlin::Transcript}; use rand::{Rng, SeedableRng}; use rand_chacha::ChaChaRng; @@ -102,9 +101,9 @@ impl EvaluationGrid { }) } - pub fn row(&self, y: usize) -> Option<&[BlsScalar]> { - self.evals.row(y) - } + pub fn row(&self, y: usize) -> Option<&[BlsScalar]> { + self.evals.row(y) + } /// Returns the start/end indices of the given app id *for the non-extended grid* fn app_data_indices(&self, app_id: &AppId) -> Option<(usize, usize)> { @@ -283,9 +282,9 @@ impl PolynomialGrid { } } -fn convert_bls(dusk: &dusk_plonk::bls12_381::BlsScalar) -> ark_bls12_381::Fr { - ark_bls12_381::Fr { - 0: ark_ff::BigInt(dusk.0.clone()), +fn convert_bls(dusk: &dusk_plonk::bls12_381::BlsScalar) -> poly_multiproof::m1_blst::Fr { + poly_multiproof::m1_blst::Fr { + 0: poly_multiproof::ark_ff::BigInt(dusk.0.clone()), 1: PhantomData, } } @@ -293,7 +292,7 @@ fn convert_bls(dusk: &dusk_plonk::bls12_381::BlsScalar) -> ark_bls12_381::Fr { #[derive(Debug, Clone)] pub struct Multiproof { pub proof: poly_multiproof::m1_blst::Proof, - pub evals: Vec>, + pub evals: Vec>, pub block: CellBlock, } @@ -429,7 +428,7 @@ mod tests { #[test] fn test_convert_bls_scalar(input: [u8; 31]) { - use ark_serialize::CanonicalSerialize; + use poly_multiproof::ark_serialize::CanonicalSerialize; let dusk = pad_to_bls_scalar(&input).unwrap(); let ark = convert_bls(&dusk); let dusk_out = dusk.to_bytes(); @@ -753,13 +752,13 @@ mod consistency_tests { for xt in exts { let rows = grid.app_rows(&xt.app_id, Some(&orig_dims)).unwrap(); - // Have to put the rows we find in this funky data structure + // Have to put the rows we find in this funky data structure let mut app_rows = vec![None; grid.dims.height()]; for (row_i, row) in rows { app_rows[row_i] = Some(row.iter().flat_map(|s| s.to_bytes()).collect()); } - // Need to provide the original dimensions here too - let extended_dims = kate_recovery::matrix::Dimensions::new(orig_dims.height() as u16, orig_dims.width() as u16).unwrap(); + // Need to provide the original dimensions here too + let extended_dims = kate_recovery::matrix::Dimensions::new(orig_dims.height() as u16, orig_dims.width() as u16).unwrap(); let (_, missing) = kate_recovery::commitments::verify_equality(&public_params, &commits, &app_rows, &index, &extended_dims, xt.app_id.0).unwrap(); prop_assert!(missing.is_empty()); } diff --git a/kate/src/lib.rs b/kate/src/lib.rs index e36e24ac..04fc1d44 100644 --- a/kate/src/lib.rs +++ b/kate/src/lib.rs @@ -44,11 +44,11 @@ pub mod config { #[cfg(feature = "std")] pub mod testnet { use super::{BlockLengthColumns, PublicParameters}; - use ark_bls12_381::{Fr, G1Projective, G2Projective}; - use ark_ff::{BigInt, Fp}; - use ark_serialize::CanonicalDeserialize; use once_cell::sync::Lazy; + use poly_multiproof::ark_ff::{BigInt, Fp}; + use poly_multiproof::ark_serialize::CanonicalDeserialize; use poly_multiproof::m1_blst; + use poly_multiproof::m1_blst::{Fr, G1, G2}; use rand::SeedableRng; use rand_chacha::ChaChaRng; use std::{collections::HashMap, sync::Mutex}; @@ -89,8 +89,8 @@ pub mod testnet { pub fn multiproof_params(max_degree: usize, max_pts: usize) -> m1_blst::M1NoPrecomp { let x: Fr = Fp(BigInt(SEC_LIMBS), core::marker::PhantomData); - let g1 = G1Projective::deserialize_compressed(&G1_BYTES[..]).unwrap(); - let g2 = G2Projective::deserialize_compressed(&G2_BYTES[..]).unwrap(); + let g1 = G1::deserialize_compressed(&G1_BYTES[..]).unwrap(); + let g2 = G2::deserialize_compressed(&G2_BYTES[..]).unwrap(); m1_blst::M1NoPrecomp::new_from_scalar(x, g1, g2, max_degree + 1, max_pts) } @@ -100,16 +100,18 @@ pub mod testnet { use core::marker::PhantomData; use super::*; - use ark_bls12_381::Fr; - use ark_ff::{BigInt, Fp}; - use ark_poly::EvaluationDomain; - use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use dusk_bytes::Serializable; use dusk_plonk::{ fft::{EvaluationDomain as PlonkED, Evaluations as PlonkEV}, prelude::BlsScalar, }; - use poly_multiproof::traits::Committer; + use poly_multiproof::{ + ark_ff::{BigInt, Fp}, + ark_poly::{EvaluationDomain, GeneralEvaluationDomain}, + ark_serialize::{CanonicalDeserialize, CanonicalSerialize}, + m1_blst::Fr, + traits::Committer, + }; use rand::thread_rng; use crate::testnet; @@ -124,8 +126,8 @@ pub mod testnet { ]; assert_eq!(SEC_BYTES, out); - let g1 = ark_bls12_381::G1Projective::deserialize_compressed(&G1_BYTES[..]).unwrap(); - let g2 = ark_bls12_381::G2Projective::deserialize_compressed(&G2_BYTES[..]).unwrap(); + let g1 = G1::deserialize_compressed(&G1_BYTES[..]).unwrap(); + let g2 = G2::deserialize_compressed(&G2_BYTES[..]).unwrap(); let pmp = poly_multiproof::m1_blst::M1NoPrecomp::new_from_scalar(x, g1, g2, 1024, 256); @@ -140,7 +142,7 @@ pub mod testnet { let dp_poly = PlonkEV::from_vec_and_domain(dp_evals, PlonkED::new(1024).unwrap()).interpolate(); - let pmp_ev = ark_poly::GeneralEvaluationDomain::::new(1024).unwrap(); + let pmp_ev = GeneralEvaluationDomain::::new(1024).unwrap(); let pmp_poly = pmp_ev.ifft(&pmp_evals); let pubs = testnet::public_params(da_primitives::BlockLengthColumns(1024)); diff --git a/kate/src/utils.rs b/kate/src/utils.rs index 24d5e9da..45727cde 100644 --- a/kate/src/utils.rs +++ b/kate/src/utils.rs @@ -2,7 +2,7 @@ macro_rules! ser_impl { ($t:ty, $len:expr) => { impl ArkSimpleSerialize<$len> for $t { fn to_bytes(&self) -> [u8; $len] { - use ark_serialize::CanonicalSerialize; + use poly_multiproof::ark_serialize::CanonicalSerialize; let mut out = [0u8; $len]; self.serialize_compressed(&mut out[..]).unwrap(); out @@ -15,21 +15,23 @@ trait ArkSimpleSerialize { fn to_bytes(&self) -> [u8; N]; } -ser_impl!(ark_bls12_381::G1Affine, 48); -ser_impl!(ark_bls12_381::G1Projective, 48); -ser_impl!(ark_bls12_381::G2Affine, 96); -ser_impl!(ark_bls12_381::G2Projective, 96); -ser_impl!(ark_bls12_381::Fr, 32); +ser_impl!(poly_multiproof::m1_blst::G1Affine, 48); +ser_impl!(poly_multiproof::m1_blst::G1, 48); +ser_impl!(poly_multiproof::m1_blst::G2Affine, 96); +ser_impl!(poly_multiproof::m1_blst::G2, 96); +ser_impl!(poly_multiproof::m1_blst::Fr, 32); #[cfg(test)] mod tests { - use ark_bls12_381::*; - use ark_ff::UniformRand; + use poly_multiproof::{ + ark_ff::UniformRand, + m1_blst::{Fr, G1Affine, G2Affine, G1, G2}, + }; use rand::{Rng, SeedableRng}; use rand_chacha::ChaChaRng; - use crate::Seed; use super::ArkSimpleSerialize; + use crate::Seed; fn test_nopanic + UniformRand>(rng: &mut impl Rng) { let p = T::rand(rng); @@ -38,10 +40,10 @@ mod tests { #[test] fn basic_nopanic() { let mut rng = ChaChaRng::from_seed(Seed::default()); - test_nopanic::<32, Fr>(&mut rng); + test_nopanic::<32, Fr>(&mut rng); test_nopanic::<48, G1Affine>(&mut rng); - test_nopanic::<48, G1Projective>(&mut rng); + test_nopanic::<48, G1>(&mut rng); test_nopanic::<96, G2Affine>(&mut rng); - test_nopanic::<96, G2Projective>(&mut rng); + test_nopanic::<96, G2>(&mut rng); } } From 46efdcb1c1ba23425ce134861509500c54626d94 Mon Sep 17 00:00:00 2001 From: William Arnold Date: Thu, 16 Mar 2023 21:10:35 -0700 Subject: [PATCH 15/87] Open multiproof dim methods, re-export grid --- kate/src/gridgen.rs | 45 +++++++++++++++++++++++++++------------------ kate/src/lib.rs | 2 ++ 2 files changed, 29 insertions(+), 18 deletions(-) diff --git a/kate/src/gridgen.rs b/kate/src/gridgen.rs index a4ec22dc..c8388ddf 100644 --- a/kate/src/gridgen.rs +++ b/kate/src/gridgen.rs @@ -136,13 +136,10 @@ impl EvaluationGrid { ) -> Option)>> { let orig_dims = orig_dims.unwrap_or(&self.dims); if !orig_dims.divides(&self.dims) { - dbg!(&orig_dims, &self.dims); - dbg!("hello"); return None; } let h_mul = self.dims.height() / orig_dims.height(); - dbg!(&app_id, &self.lookup.index); let (start_ind, end_ind) = self.app_data_indices(app_id)?; let (_, start_y) = RowMajor::<()>::ind_to_coord(&orig_dims, start_ind); let (_, end_y) = RowMajor::<()>::ind_to_coord(&orig_dims, end_ind - 1); // Find y of last cell elt @@ -230,7 +227,6 @@ impl PolynomialGrid { let x = cell.col.0 as usize; let y = cell.row.0 as usize; // TODO: better error msg - dbg!(y, self.inner.len()); let poly = self.inner.get(y).ok_or(Error::CellLenghtExceeded)?; let witness = srs.compute_single_witness(poly, &self.points[x]); Ok(srs.commit(&witness)?) @@ -303,26 +299,29 @@ pub struct CellBlock { end_x: usize, end_y: usize, } + +/// Computes the `x, y`-th multiproof block of a grid of size `grid_dims`. +/// `mp_grid_dims` is the size of the multiproof grid, which `x,y` lies in. +/// For example, a 256x256 grid could be converted to a 4x4 multiproof grid, by making 16 multiproofs +/// of size 64x64. So the `mp_grid_dims` would be 4x4, and the `grid_dims` size would be 256x256. +/// +/// In order to get `mp_grid_dims`, it's recommended to use the `multiproof_dims` function. pub fn multiproof_block( x: usize, y: usize, grid_dims: &Dimensions, - target_dims: &Dimensions, + mp_grid_dims: &Dimensions, ) -> Option { - let target_width = core::cmp::min(grid_dims.width(), target_dims.width()); - let target_height = core::cmp::min(grid_dims.height(), target_dims.height()); - dbg!(&target_width, target_height); - dbg!(&x, &y); - if x >= target_width || y >= target_height { - return None; - } - - if grid_dims.width() % target_width != 0 || grid_dims.height() % target_height != 0 { + if x >= mp_grid_dims.width() + || y >= mp_grid_dims.height() + || grid_dims.width() % mp_grid_dims.width() != 0 + || grid_dims.height() % mp_grid_dims.height() != 0 + { return None; } - let block_width = grid_dims.width() / target_width; - let block_height = grid_dims.height() / target_height; + let block_width = grid_dims.width() / mp_grid_dims.width(); + let block_height = grid_dims.height() / mp_grid_dims.height(); Some(CellBlock { start_x: x * block_width, start_y: y * block_height, @@ -331,7 +330,18 @@ pub fn multiproof_block( }) } -fn get_block_dims( +/// Dimensions of the multiproof grid. These are guarenteed to cleanly divide `grid_dims`. +/// `target_dims` must cleanly divide `grid_dims`. +pub fn multiproof_dims(grid_dims: &Dimensions, target_dims: &Dimensions) -> Option { + let target_width = core::cmp::min(grid_dims.width(), target_dims.width()); + let target_height = core::cmp::min(grid_dims.height(), target_dims.height()); + if grid_dims.width() % target_width != 0 || grid_dims.height() % target_height != 0 { + return None; + } + Some(Dimensions::new(target_width, target_height)) +} + +pub fn get_block_dims( n_scalars: usize, min_width: usize, max_width: usize, @@ -616,7 +626,6 @@ mod consistency_tests { .chunks_exact(DATA_CHUNK_SIZE) .flat_map(|chunk| pad_to_bls_scalar(chunk)) .collect::>(); - dbg!(scalars.len()); let grid = EvaluationGrid { lookup: DataLookup::default(), diff --git a/kate/src/lib.rs b/kate/src/lib.rs index 04fc1d44..25a9d2e6 100644 --- a/kate/src/lib.rs +++ b/kate/src/lib.rs @@ -13,6 +13,8 @@ use crate::config::DATA_CHUNK_SIZE; pub const LOG_TARGET: &str = "kate"; pub type Seed = [u8; 32]; +pub use kate_grid as grid; + pub mod config { use super::{BlockLengthColumns, BlockLengthRows}; From a48356ad2a3b9d27fa7468759edc3ffefcab142a Mon Sep 17 00:00:00 2001 From: William Arnold Date: Thu, 16 Mar 2023 21:22:39 -0700 Subject: [PATCH 16/87] Expose pmp --- kate/src/lib.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/kate/src/lib.rs b/kate/src/lib.rs index 25a9d2e6..26675ad0 100644 --- a/kate/src/lib.rs +++ b/kate/src/lib.rs @@ -14,6 +14,7 @@ pub const LOG_TARGET: &str = "kate"; pub type Seed = [u8; 32]; pub use kate_grid as grid; +pub use poly_multiproof as pmp; pub mod config { use super::{BlockLengthColumns, BlockLengthRows}; From 80439beaa5ee9a09471d63968e212b0dda9128c1 Mon Sep 17 00:00:00 2001 From: William Arnold Date: Thu, 16 Mar 2023 21:33:24 -0700 Subject: [PATCH 17/87] Remove unneeded ark ser trait --- kate/src/lib.rs | 1 - kate/src/utils.rs | 49 ----------------------------------------------- 2 files changed, 50 deletions(-) delete mode 100644 kate/src/utils.rs diff --git a/kate/src/lib.rs b/kate/src/lib.rs index 26675ad0..552fb64c 100644 --- a/kate/src/lib.rs +++ b/kate/src/lib.rs @@ -171,7 +171,6 @@ pub mod com; #[cfg(feature = "std")] pub mod gridgen; -pub mod utils; /// Precalculate the length of padding IEC 9797 1. /// /// # NOTE diff --git a/kate/src/utils.rs b/kate/src/utils.rs deleted file mode 100644 index 45727cde..00000000 --- a/kate/src/utils.rs +++ /dev/null @@ -1,49 +0,0 @@ -macro_rules! ser_impl { - ($t:ty, $len:expr) => { - impl ArkSimpleSerialize<$len> for $t { - fn to_bytes(&self) -> [u8; $len] { - use poly_multiproof::ark_serialize::CanonicalSerialize; - let mut out = [0u8; $len]; - self.serialize_compressed(&mut out[..]).unwrap(); - out - } - } - }; -} -trait ArkSimpleSerialize { - const LEN: usize = N; - fn to_bytes(&self) -> [u8; N]; -} - -ser_impl!(poly_multiproof::m1_blst::G1Affine, 48); -ser_impl!(poly_multiproof::m1_blst::G1, 48); -ser_impl!(poly_multiproof::m1_blst::G2Affine, 96); -ser_impl!(poly_multiproof::m1_blst::G2, 96); -ser_impl!(poly_multiproof::m1_blst::Fr, 32); - -#[cfg(test)] -mod tests { - use poly_multiproof::{ - ark_ff::UniformRand, - m1_blst::{Fr, G1Affine, G2Affine, G1, G2}, - }; - use rand::{Rng, SeedableRng}; - use rand_chacha::ChaChaRng; - - use super::ArkSimpleSerialize; - use crate::Seed; - - fn test_nopanic + UniformRand>(rng: &mut impl Rng) { - let p = T::rand(rng); - p.to_bytes(); - } - #[test] - fn basic_nopanic() { - let mut rng = ChaChaRng::from_seed(Seed::default()); - test_nopanic::<32, Fr>(&mut rng); - test_nopanic::<48, G1Affine>(&mut rng); - test_nopanic::<48, G1>(&mut rng); - test_nopanic::<96, G2Affine>(&mut rng); - test_nopanic::<96, G2>(&mut rng); - } -} From 6560e2eaf6143f16918fa9653d78094a7e4dafa7 Mon Sep 17 00:00:00 2001 From: William Arnold Date: Thu, 16 Mar 2023 22:01:27 -0700 Subject: [PATCH 18/87] Better multiproof dim api, testing --- kate/src/gridgen.rs | 36 +++++++++++++++++++++++++----------- 1 file changed, 25 insertions(+), 11 deletions(-) diff --git a/kate/src/gridgen.rs b/kate/src/gridgen.rs index c8388ddf..b9c1da73 100644 --- a/kate/src/gridgen.rs +++ b/kate/src/gridgen.rs @@ -302,21 +302,16 @@ pub struct CellBlock { /// Computes the `x, y`-th multiproof block of a grid of size `grid_dims`. /// `mp_grid_dims` is the size of the multiproof grid, which `x,y` lies in. -/// For example, a 256x256 grid could be converted to a 4x4 multiproof grid, by making 16 multiproofs -/// of size 64x64. So the `mp_grid_dims` would be 4x4, and the `grid_dims` size would be 256x256. -/// -/// In order to get `mp_grid_dims`, it's recommended to use the `multiproof_dims` function. +/// For example, a 256x256 grid could be converted to a 4x4 target size multiproof grid, by making 16 multiproofs +/// of size 64x64. pub fn multiproof_block( x: usize, y: usize, grid_dims: &Dimensions, - mp_grid_dims: &Dimensions, + target_dims: &Dimensions, ) -> Option { - if x >= mp_grid_dims.width() - || y >= mp_grid_dims.height() - || grid_dims.width() % mp_grid_dims.width() != 0 - || grid_dims.height() % mp_grid_dims.height() != 0 - { + let mp_grid_dims = multiproof_dims(grid_dims, target_dims)?; + if x >= mp_grid_dims.width() || y >= mp_grid_dims.height() { return None; } @@ -330,7 +325,7 @@ pub fn multiproof_block( }) } -/// Dimensions of the multiproof grid. These are guarenteed to cleanly divide `grid_dims`. +/// Dimensions of the multiproof grid. These are guarenteed to cleanly divide `grid_dims`. /// `target_dims` must cleanly divide `grid_dims`. pub fn multiproof_dims(grid_dims: &Dimensions, target_dims: &Dimensions) -> Option { let target_width = core::cmp::min(grid_dims.width(), target_dims.width()); @@ -423,6 +418,25 @@ mod tests { multiproof_block(x, y, &GRID, &TARGET) } + #[test_case(256, 256, 64, 16 => Some((64, 16)))] + #[test_case(256, 256, 32, 32 => Some((32, 32)))] + #[test_case(256, 256, 7, 32 => None)] + #[test_case(32 , 32, 32, 32 => Some((32, 32)))] + #[test_case(256, 8, 32, 32 => Some((32, 8)))] + #[test_case(4 , 1, 32, 32 => Some((4, 1)))] + fn test_multiproof_dims( + grid_w: usize, + grid_h: usize, + target_w: usize, + target_h: usize, + ) -> Option<(usize, usize)> { + multiproof_dims( + &Dimensions::new(grid_w, grid_h), + &Dimensions::new(target_w, target_h), + ) + .map(|i| (i.width(), i.height())) + } + use proptest::prelude::*; proptest! { #![proptest_config(ProptestConfig { From 7473727a9b4e6f4776e87ad4aada10e6add4c63c Mon Sep 17 00:00:00 2001 From: William Arnold Date: Fri, 17 Mar 2023 01:01:29 -0700 Subject: [PATCH 19/87] Attempt to clear out substrate deps from kate --- Cargo.lock | 71 +++++++++-- Cargo.toml | 5 + kate/Cargo.toml | 27 ++-- kate/src/com.rs | 29 +++-- kate/src/gridgen.rs | 2 +- kate/src/lib.rs | 4 +- kate/src/metrics.rs | 2 +- primitives/avail/Cargo.toml | 1 + primitives/avail/src/asdr.rs | 79 +----------- primitives/avail/src/lib.rs | 68 +--------- primitives/types/Cargo.toml | 23 ++++ .../src/asdr => types/src}/data_lookup.rs | 40 +++--- primitives/types/src/lib.rs | 120 ++++++++++++++++++ 13 files changed, 260 insertions(+), 211 deletions(-) create mode 100644 primitives/types/Cargo.toml rename primitives/{avail/src/asdr => types/src}/data_lookup.rs (72%) create mode 100644 primitives/types/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index e3681291..98f24e4c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -961,6 +961,7 @@ name = "da-primitives" version = "0.4.4" dependencies = [ "beefy-merkle-tree", + "da-types", "derive_more", "frame-support", "hash256-std-hasher", @@ -981,6 +982,19 @@ dependencies = [ "thiserror-no-std", ] +[[package]] +name = "da-types" +version = "0.4.4" +dependencies = [ + "derive_more", + "num-traits", + "parity-scale-codec", + "parity-util-mem", + "scale-info", + "serde", + "sp-debug-derive 6.0.0", +] + [[package]] name = "der" version = "0.6.1" @@ -1434,7 +1448,7 @@ dependencies = [ "serde", "smallvec", "sp-api", - "sp-arithmetic", + "sp-arithmetic 6.0.0", "sp-core", "sp-core-hashing-proc-macro", "sp-inherents", @@ -1943,30 +1957,26 @@ name = "kate" version = "0.7.0" dependencies = [ "criterion", - "da-primitives", - "derive_more", + "da-types", "dusk-bytes", "dusk-plonk", - "frame-support", - "getrandom 0.2.8", "hex", "hex-literal", "itertools 0.10.5", "kate-grid", "kate-recovery", "log", - "num_cpus", "once_cell", "parity-scale-codec", "poly-multiproof", "proptest", "rand 0.8.5", "rand_chacha 0.3.1", - "rand_core 0.6.4", "rayon", "serde", "serde_json", - "sp-core", + "sp-arithmetic 7.0.0", + "sp-core-hashing", "sp-std 4.0.0", "static_assertions", "test-case", @@ -3360,6 +3370,22 @@ dependencies = [ "static_assertions", ] +[[package]] +name = "sp-arithmetic" +version = "7.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd6413ad82d166d40d995aa95ca6e0cbb473f973d3a2f0b433ae19813048c6c1" +dependencies = [ + "integer-sqrt", + "num-traits", + "parity-scale-codec", + "scale-info", + "serde", + "sp-debug-derive 6.0.0", + "sp-std 6.0.0", + "static_assertions", +] + [[package]] name = "sp-beefy" version = "4.0.0-dev" @@ -3407,7 +3433,7 @@ dependencies = [ "secrecy", "serde", "sp-core-hashing", - "sp-debug-derive", + "sp-debug-derive 5.0.0", "sp-externalities", "sp-runtime-interface", "sp-std 5.0.0", @@ -3454,6 +3480,17 @@ dependencies = [ "syn", ] +[[package]] +name = "sp-debug-derive" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66fb9dc63d54de7d7bed62a505b6e0bd66c122525ea1abb348f6564717c3df2d" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "sp-externalities" version = "0.13.0" @@ -3532,7 +3569,7 @@ dependencies = [ "serde", "sp-api", "sp-core", - "sp-debug-derive", + "sp-debug-derive 5.0.0", "sp-runtime", "sp-std 5.0.0", "thiserror", @@ -3563,7 +3600,7 @@ dependencies = [ "scale-info", "serde", "sp-application-crypto", - "sp-arithmetic", + "sp-arithmetic 6.0.0", "sp-core", "sp-io", "sp-std 5.0.0", @@ -3643,6 +3680,12 @@ name = "sp-std" version = "5.0.0" source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.37#946507ba9ef13e263534176b7b74e26fc56efbd4" +[[package]] +name = "sp-std" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af0ee286f98455272f64ac5bb1384ff21ac029fbb669afbaf48477faff12760e" + [[package]] name = "sp-storage" version = "7.0.0" @@ -3652,7 +3695,7 @@ dependencies = [ "parity-scale-codec", "ref-cast", "serde", - "sp-debug-derive", + "sp-debug-derive 5.0.0", "sp-std 5.0.0", ] @@ -3741,9 +3784,9 @@ dependencies = [ "scale-info", "serde", "smallvec", - "sp-arithmetic", + "sp-arithmetic 6.0.0", "sp-core", - "sp-debug-derive", + "sp-debug-derive 5.0.0", "sp-std 5.0.0", ] diff --git a/Cargo.toml b/Cargo.toml index 1e677318..b833ca61 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,7 @@ [workspace] members = [ "primitives/avail", + "primitives/types", "kate", "kate/recovery", "kate/grid", @@ -12,8 +13,12 @@ members = [ [patch.crates-io] # Substrate (polkadot-v0.9.37). sp-core = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } +sp-core-hashing = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } sp-io = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } sp-std = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } +sp-storage = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } +sp-debug-derive = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } +sp-arithmetic = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } sp-runtime = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } sp-trie = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } sp-runtime-interface = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } diff --git a/kate/Cargo.toml b/kate/Cargo.toml index 49ac70af..0d11533e 100644 --- a/kate/Cargo.toml +++ b/kate/Cargo.toml @@ -8,26 +8,22 @@ edition = "2021" [dependencies] codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] } -da-primitives = { path = "../primitives/avail", default-features = false } -derive_more = "0.99.17" dusk-bytes = { version = "0.1.6", default-features = false, optional = true } dusk-plonk = { git = "https://github.com/maticnetwork/plonk", branch = "will/polynomial-visibility", optional = true } -frame-support = { version = "4.0.0-dev", default-features = false } -getrandom = { version = "0.2", features = ["js"], optional = true } -hex = { version = "0.4", default-features = false, features = ["alloc"] } +hex = { version = "0.4", default-features = false, features = ["alloc"], optional = true } kate-recovery = { path = "recovery", default-features = false, optional = true } log = { version = "0.4.8", optional = true } -num_cpus = { version = "1.13.0", optional = true } once_cell = { version = "1.8.0", optional = true } rand = { version = "0.8.4", default-features = false, optional = true } rand_chacha = { version = "0.3", default-features = false, optional = true } -rand_core = { version = "0.6", default-features = false } rayon = { version = "1.5.2", optional = true } serde = { version = "1.0.121", optional = true, features = ["derive"] } -sp-core = { version = "7.0.0", default-features = false } +sp-arithmetic = { version = "7.0.0", default-features = false } sp-std = { version = "4.0.0", default-features = false } +sp-core-hashing = { version = "5.0.0", default-features = false, optional = true } static_assertions = "1.1.0" kate-grid = { path = "grid" } +da-types = { path = "../primitives/types" } poly-multiproof = { git = "https://github.com/aphoh/poly-multiproof", default-features = false, rev = "3345c76b84a14accb90020974a8cab837fe598a0" } [dev-dependencies] @@ -44,27 +40,28 @@ alloc = ["dusk-plonk/alloc"] std = [ "kate-recovery/std", + "hex", "hex/std", "once_cell", "codec/std", "alloc", "serde", - "num_cpus", "rayon", - "getrandom", "rand", "rand_chacha/std", "log", "dusk-plonk/std", "dusk-bytes", "sp-std/std", - "sp-core/std", - "getrandom/std", - "rand_core/std", - "frame-support/std", - "da-primitives/std", + "da-types/std", + "sp-arithmetic/std", + "sp-core-hashing", + "sp-core-hashing/std", "poly-multiproof/blst", ] +substrate = [ + "da-types/substrate", +] extended-columns = [] maximum-block-size = [] diff --git a/kate/src/com.rs b/kate/src/com.rs index 18f76412..c6a5113d 100644 --- a/kate/src/com.rs +++ b/kate/src/com.rs @@ -5,11 +5,7 @@ use std::{ }; use codec::Encode; -use da_primitives::{ - asdr::{AppExtrinsic, AppId}, - BlockLengthColumns, BlockLengthRows, -}; -use derive_more::Constructor; +use da_types::{AppId, AppExtrinsic, BlockLengthRows, BlockLengthColumns}; use dusk_bytes::Serializable; use dusk_plonk::{ commitment_scheme::kzg10, @@ -17,7 +13,6 @@ use dusk_plonk::{ fft::{EvaluationDomain, Evaluations}, prelude::{BlsScalar, CommitKey}, }; -use frame_support::{ensure, sp_runtime::SaturatedConversion}; use kate_grid::{AsRowMajor, Extension}; #[cfg(feature = "std")] use kate_recovery::{com::app_specific_rows, index, matrix}; @@ -25,6 +20,7 @@ use rand::{Rng, SeedableRng}; use rand_chacha::ChaChaRng; use rayon::prelude::*; use serde::{Deserialize, Serialize}; +use sp_arithmetic::traits::SaturatedConversion; use static_assertions::const_assert_eq; use crate::{ @@ -38,12 +34,18 @@ use crate::{ #[cfg(feature = "std")] use kate_recovery::testnet; -#[derive(Serialize, Deserialize, Constructor, Clone, Copy, PartialEq, Eq, Debug)] +#[derive(Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Debug)] pub struct Cell { pub row: BlockLengthRows, pub col: BlockLengthColumns, } +impl Cell { + pub fn new(row: BlockLengthRows, col: BlockLengthColumns) -> Self { + Cell { row, col } + } +} + #[derive(Debug)] pub enum Error { PlonkError(PlonkError), @@ -163,7 +165,9 @@ pub fn flatten_and_pad_block( // Determine the block size after padding let block_dims = get_block_dimensions(padded_block_len, max_rows, max_cols, chunk_size)?; - ensure!(padded_block.len() <= block_dims.size(), Error::BlockTooBig); + if !(padded_block.len() <= block_dims.size()) { + return Err(Error::BlockTooBig); + } let mut rng = ChaChaRng::from_seed(rng_seed); @@ -184,10 +188,9 @@ pub fn get_block_dimensions( chunk_size: u32, ) -> Result { let max_block_dimensions = BlockDimensions::new(max_rows, max_cols, chunk_size); - ensure!( - block_size as usize <= max_block_dimensions.size(), - Error::BlockTooBig - ); + if !(block_size as usize <= max_block_dimensions.size()) { + return Err(Error::BlockTooBig); + } if block_size as usize == max_block_dimensions.size() || MAXIMUM_BLOCK_SIZE { return Ok(max_block_dimensions); @@ -424,7 +427,7 @@ pub fn par_build_commitments( if log::log_enabled!(target: LOG_TARGET, log::Level::Debug) { let raw_pp = public_params.to_raw_var_bytes(); - let hash_pp = hex::encode(sp_core::blake2_128(&raw_pp)); + let hash_pp = hex::encode(sp_core_hashing::blake2_128(&raw_pp)); let hex_pp = hex::encode(raw_pp); log::debug!( target: LOG_TARGET, diff --git a/kate/src/gridgen.rs b/kate/src/gridgen.rs index b9c1da73..923db305 100644 --- a/kate/src/gridgen.rs +++ b/kate/src/gridgen.rs @@ -1,7 +1,7 @@ use core::marker::PhantomData; use codec::Encode; -use da_primitives::asdr::{AppExtrinsic, AppId, DataLookup, DataLookupIndexItem}; +use da_types::{AppExtrinsic, AppId, DataLookup, DataLookupIndexItem}; use dusk_bytes::Serializable; use dusk_plonk::{ commitment_scheme::kzg10::commitment::Commitment, diff --git a/kate/src/lib.rs b/kate/src/lib.rs index 552fb64c..9d3b6192 100644 --- a/kate/src/lib.rs +++ b/kate/src/lib.rs @@ -1,9 +1,9 @@ #![cfg_attr(not(feature = "std"), no_std)] -use da_primitives::{BlockLengthColumns, BlockLengthRows}; +use da_types::{BlockLengthColumns, BlockLengthRows}; #[cfg(feature = "std")] pub use dusk_plonk::{commitment_scheme::kzg10::PublicParameters, prelude::BlsScalar}; -use frame_support::sp_runtime::SaturatedConversion; +use sp_arithmetic::traits::SaturatedConversion; #[cfg(feature = "std")] use kate_recovery::matrix::Dimensions; use static_assertions::const_assert_ne; diff --git a/kate/src/metrics.rs b/kate/src/metrics.rs index b823e2be..ea3e74a9 100644 --- a/kate/src/metrics.rs +++ b/kate/src/metrics.rs @@ -1,5 +1,5 @@ use crate::BlockDimensions; -use sp_std::time::Duration; +use core::time::Duration; /// Trait for measurements during the header built process. pub trait Metrics { diff --git a/primitives/avail/Cargo.toml b/primitives/avail/Cargo.toml index f2db1d41..d3a148fc 100644 --- a/primitives/avail/Cargo.toml +++ b/primitives/avail/Cargo.toml @@ -11,6 +11,7 @@ log = { version = "0.4.8", default-features = false } serde = { version = "1.0.121", optional = true, features = ["derive"] } serde_json = { version = "1.0", optional = true } thiserror-no-std = "2.0.2" +da-types = { path = "../types", features = ["substrate"] } # Substrate beefy-merkle-tree = { git = "https://github.com/paritytech/substrate.git/", branch = "polkadot-v0.9.37", default-features = false } diff --git a/primitives/avail/src/asdr.rs b/primitives/avail/src/asdr.rs index c058f661..dab43d13 100644 --- a/primitives/avail/src/asdr.rs +++ b/primitives/avail/src/asdr.rs @@ -1,16 +1,4 @@ -use codec::{Decode, Encode, MaxEncodedLen}; -use derive_more::{Add, Deref, Display, From, Into}; -use frame_support::RuntimeDebug; -#[cfg(feature = "std")] -use parity_util_mem::{MallocSizeOf, MallocSizeOfOps}; -use scale_info::TypeInfo; -#[cfg(feature = "std")] -use serde::{Deserialize, Serialize}; -use sp_runtime::traits::Zero; -use sp_std::vec::Vec; - -mod data_lookup; -pub use data_lookup::*; +pub use da_types::{AppId, AppExtrinsic, DataLookup, DataLookupIndexItem}; mod get_app_id; pub use get_app_id::*; @@ -18,68 +6,3 @@ pub use get_app_id::*; mod app_unchecked_extrinsic; pub use app_unchecked_extrinsic::*; -#[derive( - Clone, - Copy, - PartialEq, - Eq, - PartialOrd, - Ord, - Add, - From, - Deref, - TypeInfo, - RuntimeDebug, - Encode, - Decode, - Display, - Into, - Default, - MaxEncodedLen, -)] -#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -pub struct AppId(#[codec(compact)] pub u32); - -#[cfg(feature = "std")] -impl MallocSizeOf for AppId { - fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize { - self.0.size_of(ops) - } -} - -impl Zero for AppId { - fn zero() -> Self { - 0u32.into() - } - - fn is_zero(&self) -> bool { - self.0 == 0u32 - } - - fn set_zero(&mut self) { - self.0 = 0u32; - } -} - -/// Raw Extrinsic with application id. -#[derive(Clone, TypeInfo, RuntimeDebug, Default, Encode, Decode)] -pub struct AppExtrinsic { - pub app_id: AppId, - pub data: Vec, -} - -impl From> for AppExtrinsic { - #[inline] - fn from(data: Vec) -> Self { - Self { - data, - app_id: <_>::default(), - } - } -} - -impl GetAppId for AppExtrinsic { - fn app_id(&self) -> AppId { - self.app_id - } -} diff --git a/primitives/avail/src/lib.rs b/primitives/avail/src/lib.rs index 13e3d888..562c047e 100644 --- a/primitives/avail/src/lib.rs +++ b/primitives/avail/src/lib.rs @@ -1,11 +1,7 @@ #![cfg_attr(not(feature = "std"), no_std)] -use codec::{Decode, Encode, MaxEncodedLen}; -use derive_more::{Add, Constructor, Display, From, Into, Mul}; -use scale_info::TypeInfo; -#[cfg(feature = "std")] -use serde::{Deserialize, Serialize}; use sp_runtime::Perbill; +pub use da_types::{BlockLengthColumns, BlockLengthRows}; /// Customized headers. pub mod header; @@ -80,65 +76,3 @@ where ) } } - -/// Strong type for `BlockLength::cols` -#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive( - Clone, - Copy, - Debug, - From, - Into, - Add, - Mul, - Display, - PartialEq, - Eq, - Encode, - Decode, - TypeInfo, - PartialOrd, - Ord, - Constructor, - MaxEncodedLen, -)] -#[mul(forward)] -pub struct BlockLengthColumns(#[codec(compact)] pub u32); - -impl BlockLengthColumns { - #[inline] - pub fn as_usize(&self) -> usize { - self.0 as usize - } -} - -/// Strong type for `BlockLength::rows` -#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive( - Clone, - Copy, - Debug, - From, - Into, - Add, - Mul, - Display, - PartialEq, - Eq, - Encode, - Decode, - TypeInfo, - PartialOrd, - Ord, - Constructor, - MaxEncodedLen, -)] -#[mul(forward)] -pub struct BlockLengthRows(#[codec(compact)] pub u32); - -impl BlockLengthRows { - #[inline] - pub fn as_usize(&self) -> usize { - self.0 as usize - } -} diff --git a/primitives/types/Cargo.toml b/primitives/types/Cargo.toml new file mode 100644 index 00000000..418c6967 --- /dev/null +++ b/primitives/types/Cargo.toml @@ -0,0 +1,23 @@ +[package] +name = "da-types" +version = "0.4.4" +authors = [] +edition = "2018" + +[dependencies] +parity-scale-codec = { version = "3", default-features = false } +scale-info = { version = "2.3.1"} +derive_more = "0.99.17" +num-traits = "0.2" + +serde = { version = "1.0", features = ["derive"], optional = true } + +sp-debug-derive = { version = "*", default-features = false, optional = true } +parity-util-mem = { version = "*", default-features = false, optional = true } + +[dev-dependencies] + +[features] +default = ["std"] +std = [ "serde" ] +substrate = ["sp-debug-derive", "parity-util-mem"] diff --git a/primitives/avail/src/asdr/data_lookup.rs b/primitives/types/src/data_lookup.rs similarity index 72% rename from primitives/avail/src/asdr/data_lookup.rs rename to primitives/types/src/data_lookup.rs index 64f4181b..9aa9977e 100644 --- a/primitives/avail/src/asdr/data_lookup.rs +++ b/primitives/types/src/data_lookup.rs @@ -1,17 +1,12 @@ -use codec::{Decode, Encode}; -use frame_support::ensure; -#[cfg(feature = "std")] -use parity_util_mem::{MallocSizeOf, MallocSizeOfOps}; +use parity_scale_codec::{Decode, Encode}; use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; -use sp_core::RuntimeDebug; -use sp_runtime::traits::Zero; -use sp_std::{convert::TryFrom, vec::Vec}; -use crate::asdr::AppId; +use crate::AppId; -#[derive(PartialEq, Eq, Clone, RuntimeDebug, Encode, Decode, Default, TypeInfo)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, Default, TypeInfo)] +#[cfg_attr(feature = "substrate", derive(sp_debug_derive::RuntimeDebug))] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct DataLookup { /// size of the look up @@ -21,7 +16,8 @@ pub struct DataLookup { pub index: Vec, } -#[derive(PartialEq, Eq, Copy, Clone, RuntimeDebug, Encode, Decode, Default, TypeInfo)] +#[derive(PartialEq, Eq, Copy, Clone, Encode, Decode, Default, TypeInfo)] +#[cfg_attr(feature = "substrate", derive(sp_debug_derive::RuntimeDebug))] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct DataLookupIndexItem { pub app_id: AppId, @@ -42,14 +38,15 @@ where } } -#[cfg(feature = "std")] -impl MallocSizeOf for DataLookupIndexItem { - fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize { +#[cfg(all(feature = "std", feature = "substrate"))] +impl parity_util_mem::MallocSizeOf for DataLookupIndexItem { + fn size_of(&self, ops: &mut parity_util_mem::MallocSizeOfOps) -> usize { self.app_id.size_of(ops) + self.start.size_of(ops) } } -#[derive(PartialEq, Eq, RuntimeDebug)] +#[derive(PartialEq, Eq)] +#[cfg_attr(feature = "substrate", derive(sp_debug_derive::RuntimeDebug))] /// Errors during the creation from `extrinsics`. pub enum TryFromError { /// Size overflows @@ -58,6 +55,7 @@ pub enum TryFromError { UnsortedExtrinsics, } +use core::convert::TryFrom; impl TryFrom<&[(AppId, u32)]> for DataLookup { type Error = TryFromError; @@ -66,10 +64,10 @@ impl TryFrom<&[(AppId, u32)]> for DataLookup { // transactions are order by application id // skip transactions with 0 application id - it's not a data txs let mut size = 0u32; - let mut prev_app_id = Zero::zero(); + let mut prev_app_id = AppId(0); for (app_id, data_len) in extrinsics { - if !app_id.is_zero() && prev_app_id != *app_id { + if !app_id.0 == 0 && prev_app_id != *app_id { index.push(DataLookupIndexItem { app_id: *app_id, start: size, @@ -79,7 +77,9 @@ impl TryFrom<&[(AppId, u32)]> for DataLookup { size = size .checked_add(*data_len) .ok_or(Self::Error::SizeOverflow)?; - ensure!(prev_app_id <= *app_id, Self::Error::UnsortedExtrinsics); + if !(prev_app_id <= *app_id) { + return Err(Self::Error::UnsortedExtrinsics); + } prev_app_id = *app_id; } @@ -87,9 +87,9 @@ impl TryFrom<&[(AppId, u32)]> for DataLookup { } } -#[cfg(feature = "std")] -impl MallocSizeOf for DataLookup { - fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize { +#[cfg(all(feature = "std", feature = "substrate"))] +impl parity_util_mem::MallocSizeOf for DataLookup { + fn size_of(&self, ops: &mut parity_util_mem::MallocSizeOfOps) -> usize { self.size.size_of(ops) + self.index.size_of(ops) } } diff --git a/primitives/types/src/lib.rs b/primitives/types/src/lib.rs new file mode 100644 index 00000000..b7604577 --- /dev/null +++ b/primitives/types/src/lib.rs @@ -0,0 +1,120 @@ +use derive_more::{Add, Constructor, Deref, Display, From, Into, Mul}; +use parity_scale_codec::{Decode, Encode, MaxEncodedLen}; +use scale_info::TypeInfo; +#[cfg(feature = "std")] +use serde::{Deserialize, Serialize}; + +mod data_lookup; +pub use data_lookup::*; + +/// Raw Extrinsic with application id. +#[derive(Clone, TypeInfo, Default, Encode, Decode)] +#[cfg_attr(feature = "substrate", derive(sp_debug_derive::RuntimeDebug))] +pub struct AppExtrinsic { + pub app_id: AppId, + pub data: Vec, +} + +#[derive( + Clone, + Copy, + PartialEq, + Eq, + PartialOrd, + Ord, + Add, + From, + Deref, + TypeInfo, + Encode, + Decode, + Display, + Into, + Default, + MaxEncodedLen, +)] +#[cfg_attr(feature = "substrate", derive(sp_debug_derive::RuntimeDebug))] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +pub struct AppId(#[codec(compact)] pub u32); + +impl num_traits::Zero for AppId { + fn zero() -> Self { + AppId(num_traits::Zero::zero()) + } + + fn is_zero(&self) -> bool { + self.0.is_zero() + } +} + +impl From> for AppExtrinsic { + #[inline] + fn from(data: Vec) -> Self { + Self { + data, + app_id: <_>::default(), + } + } +} + +/// Strong type for `BlockLength::cols` +#[derive( + Clone, + Copy, + Debug, + From, + Into, + Add, + Mul, + Display, + PartialEq, + Eq, + Encode, + Decode, + TypeInfo, + PartialOrd, + Ord, + Constructor, + MaxEncodedLen, +)] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +#[mul(forward)] +pub struct BlockLengthColumns(#[codec(compact)] pub u32); + +impl BlockLengthColumns { + #[inline] + pub fn as_usize(&self) -> usize { + self.0 as usize + } +} + +/// Strong type for `BlockLength::rows` +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +#[derive( + Clone, + Copy, + Debug, + From, + Into, + Add, + Mul, + Display, + PartialEq, + Eq, + Encode, + Decode, + TypeInfo, + PartialOrd, + Ord, + Constructor, + MaxEncodedLen, +)] +#[mul(forward)] +pub struct BlockLengthRows(#[codec(compact)] pub u32); + +impl BlockLengthRows { + #[inline] + pub fn as_usize(&self) -> usize { + self.0 as usize + } +} From 1540600b36896eebd64070839e5558fb7e72ada9 Mon Sep 17 00:00:00 2001 From: William Arnold Date: Sun, 19 Mar 2023 20:10:01 -0700 Subject: [PATCH 20/87] Fix toml formatting, some feature defs --- Cargo.lock | 1 + kate/Cargo.toml | 21 +++++++++++---------- primitives/avail/Cargo.toml | 4 ++-- primitives/types/Cargo.toml | 13 ++++++++----- primitives/types/src/data_lookup.rs | 2 +- 5 files changed, 23 insertions(+), 18 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 98f24e4c..9ff94b1a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1960,6 +1960,7 @@ dependencies = [ "da-types", "dusk-bytes", "dusk-plonk", + "getrandom 0.2.8", "hex", "hex-literal", "itertools 0.10.5", diff --git a/kate/Cargo.toml b/kate/Cargo.toml index 0d11533e..ec0354ec 100644 --- a/kate/Cargo.toml +++ b/kate/Cargo.toml @@ -8,23 +8,24 @@ edition = "2021" [dependencies] codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] } +da-types = { path = "../primitives/types", default-features = false } dusk-bytes = { version = "0.1.6", default-features = false, optional = true } dusk-plonk = { git = "https://github.com/maticnetwork/plonk", branch = "will/polynomial-visibility", optional = true } +getrandom = { version = "0.2.8", default-features = false, features = ["js"] } hex = { version = "0.4", default-features = false, features = ["alloc"], optional = true } +kate-grid = { path = "grid" } kate-recovery = { path = "recovery", default-features = false, optional = true } log = { version = "0.4.8", optional = true } once_cell = { version = "1.8.0", optional = true } +poly-multiproof = { git = "https://github.com/aphoh/poly-multiproof", default-features = false, rev = "3345c76b84a14accb90020974a8cab837fe598a0" } rand = { version = "0.8.4", default-features = false, optional = true } rand_chacha = { version = "0.3", default-features = false, optional = true } rayon = { version = "1.5.2", optional = true } serde = { version = "1.0.121", optional = true, features = ["derive"] } sp-arithmetic = { version = "7.0.0", default-features = false } -sp-std = { version = "4.0.0", default-features = false } sp-core-hashing = { version = "5.0.0", default-features = false, optional = true } +sp-std = { version = "4.0.0", default-features = false } static_assertions = "1.1.0" -kate-grid = { path = "grid" } -da-types = { path = "../primitives/types" } -poly-multiproof = { git = "https://github.com/aphoh/poly-multiproof", default-features = false, rev = "3345c76b84a14accb90020974a8cab837fe598a0" } [dev-dependencies] criterion = "0.3.5" @@ -40,7 +41,7 @@ alloc = ["dusk-plonk/alloc"] std = [ "kate-recovery/std", - "hex", + "hex", "hex/std", "once_cell", "codec/std", @@ -54,13 +55,13 @@ std = [ "dusk-bytes", "sp-std/std", "da-types/std", - "sp-arithmetic/std", - "sp-core-hashing", - "sp-core-hashing/std", - "poly-multiproof/blst", + "sp-arithmetic/std", + "sp-core-hashing", + "sp-core-hashing/std", + "poly-multiproof/blst", ] substrate = [ - "da-types/substrate", + "da-types/substrate", ] extended-columns = [] maximum-block-size = [] diff --git a/primitives/avail/Cargo.toml b/primitives/avail/Cargo.toml index d3a148fc..7987ba18 100644 --- a/primitives/avail/Cargo.toml +++ b/primitives/avail/Cargo.toml @@ -6,12 +6,12 @@ edition = "2018" [dependencies] # Others +da-types = { path = "../types", default-features = false, features = ["substrate"] } derive_more = "0.99.17" log = { version = "0.4.8", default-features = false } serde = { version = "1.0.121", optional = true, features = ["derive"] } serde_json = { version = "1.0", optional = true } thiserror-no-std = "2.0.2" -da-types = { path = "../types", features = ["substrate"] } # Substrate beefy-merkle-tree = { git = "https://github.com/paritytech/substrate.git/", branch = "polkadot-v0.9.37", default-features = false } @@ -23,7 +23,7 @@ scale-info = { version = "2.1.1", default-features = false, features = ["derive" sp-core = { version = "7.0.0", default-features = false } sp-io = { version = "7.0.0", default-features = false } sp-runtime = { version = "7.0.0", default-features = false } -sp-runtime-interface = { version = "7.0.0", default-features = false } +sp-runtime-interface = { version = "7.0.0", default-features = false, features = ["disable_target_static_assertions"] } sp-std = { version = "4.0.0", default-features = false } sp-trie = { version = "7.0.0", default-features = false } diff --git a/primitives/types/Cargo.toml b/primitives/types/Cargo.toml index 418c6967..f5eef933 100644 --- a/primitives/types/Cargo.toml +++ b/primitives/types/Cargo.toml @@ -5,19 +5,22 @@ authors = [] edition = "2018" [dependencies] -parity-scale-codec = { version = "3", default-features = false } -scale-info = { version = "2.3.1"} derive_more = "0.99.17" -num-traits = "0.2" +num-traits = { version = "0.2", default-features = false } +parity-scale-codec = { version = "3", default-features = false, features = ["derive", "max-encoded-len"] } +scale-info = { version = "2.3.1", default-features = false, features = ["derive"] } serde = { version = "1.0", features = ["derive"], optional = true } -sp-debug-derive = { version = "*", default-features = false, optional = true } parity-util-mem = { version = "*", default-features = false, optional = true } +sp-debug-derive = { version = "*", default-features = false, optional = true } [dev-dependencies] [features] default = ["std"] -std = [ "serde" ] +std = ["serde", "parity-scale-codec/std", "scale-info/std", "num-traits/std"] substrate = ["sp-debug-derive", "parity-util-mem"] + +[package.metadata.cargo-all-features] +skip_optional_dependencies = true diff --git a/primitives/types/src/data_lookup.rs b/primitives/types/src/data_lookup.rs index 9aa9977e..f8dccb09 100644 --- a/primitives/types/src/data_lookup.rs +++ b/primitives/types/src/data_lookup.rs @@ -48,7 +48,7 @@ impl parity_util_mem::MallocSizeOf for DataLookupIndexItem { #[derive(PartialEq, Eq)] #[cfg_attr(feature = "substrate", derive(sp_debug_derive::RuntimeDebug))] /// Errors during the creation from `extrinsics`. -pub enum TryFromError { +pub enum TryFromError{ /// Size overflows SizeOverflow, /// Extrinsics are not sorted. From 30bbd77c4ec4a6f552b1cd890df7e33f573cbd77 Mon Sep 17 00:00:00 2001 From: William Arnold Date: Tue, 21 Mar 2023 18:38:21 -0700 Subject: [PATCH 21/87] Formatting --- kate/grid/src/grid.rs | 12 +++++++----- kate/src/com.rs | 8 ++++---- kate/src/lib.rs | 2 +- primitives/avail/src/asdr.rs | 3 +-- primitives/avail/src/lib.rs | 2 +- primitives/types/Cargo.toml | 3 --- primitives/types/src/data_lookup.rs | 2 +- 7 files changed, 15 insertions(+), 17 deletions(-) diff --git a/kate/grid/src/grid.rs b/kate/grid/src/grid.rs index 826292ad..9e8d14a6 100644 --- a/kate/grid/src/grid.rs +++ b/kate/grid/src/grid.rs @@ -91,7 +91,9 @@ impl RowMajor { if x >= self.width() { return None; } - Some((0..self.height()).map(move |y| self.get(x, y).expect("Size checked at instantiation"))) + Some( + (0..self.height()).map(move |y| self.get(x, y).expect("Size checked at instantiation")), + ) } pub fn rows(&self) -> impl Iterator + '_ { @@ -172,7 +174,7 @@ impl AsRowMajor for Vec { fn as_row_major(self, width: usize, height: usize) -> Option> { if self.len() == width * height { Some(RowMajor { - dims: Dimensions::new(width, height), + dims: Dimensions::new(width, height), inner: self, }) } else { @@ -185,7 +187,7 @@ impl AsColumnMajor for Vec { fn as_column_major(self, width: usize, height: usize) -> Option> { if self.len() == width * height { Some(ColumnMajor { - dims: Dimensions::new(width, height), + dims: Dimensions::new(width, height), inner: self, }) } else { @@ -198,7 +200,7 @@ impl AsColumnMajor for [A; LEN] { fn as_column_major(self, width: usize, height: usize) -> Option> { if self.len() == width * height { Some(ColumnMajor { - dims: Dimensions::new(width, height), + dims: Dimensions::new(width, height), inner: self.into(), }) } else { @@ -211,7 +213,7 @@ impl AsRowMajor for [A; LEN] { fn as_row_major(self, width: usize, height: usize) -> Option> { if self.len() == width * height { Some(RowMajor { - dims: Dimensions::new(width, height), + dims: Dimensions::new(width, height), inner: self.into(), }) } else { diff --git a/kate/src/com.rs b/kate/src/com.rs index c6a5113d..57cdfe79 100644 --- a/kate/src/com.rs +++ b/kate/src/com.rs @@ -5,7 +5,7 @@ use std::{ }; use codec::Encode; -use da_types::{AppId, AppExtrinsic, BlockLengthRows, BlockLengthColumns}; +use da_types::{AppExtrinsic, AppId, BlockLengthColumns, BlockLengthRows}; use dusk_bytes::Serializable; use dusk_plonk::{ commitment_scheme::kzg10, @@ -41,9 +41,9 @@ pub struct Cell { } impl Cell { - pub fn new(row: BlockLengthRows, col: BlockLengthColumns) -> Self { - Cell { row, col } - } + pub fn new(row: BlockLengthRows, col: BlockLengthColumns) -> Self { + Cell { row, col } + } } #[derive(Debug)] diff --git a/kate/src/lib.rs b/kate/src/lib.rs index 9d3b6192..c82877ee 100644 --- a/kate/src/lib.rs +++ b/kate/src/lib.rs @@ -3,9 +3,9 @@ use da_types::{BlockLengthColumns, BlockLengthRows}; #[cfg(feature = "std")] pub use dusk_plonk::{commitment_scheme::kzg10::PublicParameters, prelude::BlsScalar}; -use sp_arithmetic::traits::SaturatedConversion; #[cfg(feature = "std")] use kate_recovery::matrix::Dimensions; +use sp_arithmetic::traits::SaturatedConversion; use static_assertions::const_assert_ne; use crate::config::DATA_CHUNK_SIZE; diff --git a/primitives/avail/src/asdr.rs b/primitives/avail/src/asdr.rs index dab43d13..4fd97718 100644 --- a/primitives/avail/src/asdr.rs +++ b/primitives/avail/src/asdr.rs @@ -1,8 +1,7 @@ -pub use da_types::{AppId, AppExtrinsic, DataLookup, DataLookupIndexItem}; +pub use da_types::{AppExtrinsic, AppId, DataLookup, DataLookupIndexItem}; mod get_app_id; pub use get_app_id::*; mod app_unchecked_extrinsic; pub use app_unchecked_extrinsic::*; - diff --git a/primitives/avail/src/lib.rs b/primitives/avail/src/lib.rs index 562c047e..8c9fee50 100644 --- a/primitives/avail/src/lib.rs +++ b/primitives/avail/src/lib.rs @@ -1,7 +1,7 @@ #![cfg_attr(not(feature = "std"), no_std)] -use sp_runtime::Perbill; pub use da_types::{BlockLengthColumns, BlockLengthRows}; +use sp_runtime::Perbill; /// Customized headers. pub mod header; diff --git a/primitives/types/Cargo.toml b/primitives/types/Cargo.toml index f5eef933..e52c5eea 100644 --- a/primitives/types/Cargo.toml +++ b/primitives/types/Cargo.toml @@ -21,6 +21,3 @@ sp-debug-derive = { version = "*", default-features = false, optional = true } default = ["std"] std = ["serde", "parity-scale-codec/std", "scale-info/std", "num-traits/std"] substrate = ["sp-debug-derive", "parity-util-mem"] - -[package.metadata.cargo-all-features] -skip_optional_dependencies = true diff --git a/primitives/types/src/data_lookup.rs b/primitives/types/src/data_lookup.rs index f8dccb09..9aa9977e 100644 --- a/primitives/types/src/data_lookup.rs +++ b/primitives/types/src/data_lookup.rs @@ -48,7 +48,7 @@ impl parity_util_mem::MallocSizeOf for DataLookupIndexItem { #[derive(PartialEq, Eq)] #[cfg_attr(feature = "substrate", derive(sp_debug_derive::RuntimeDebug))] /// Errors during the creation from `extrinsics`. -pub enum TryFromError{ +pub enum TryFromError { /// Size overflows SizeOverflow, /// Extrinsics are not sorted. From f046d6706aaf2419e8ae7fcde29ea75219e33ebf Mon Sep 17 00:00:00 2001 From: William Arnold Date: Tue, 21 Mar 2023 19:05:06 -0700 Subject: [PATCH 22/87] Fix test compilation errors due to new `da-types` --- kate/src/com.rs | 2 +- kate/src/gridgen.rs | 2 +- kate/src/lib.rs | 2 +- primitives/types/src/data_lookup.rs | 6 +++++- primitives/types/src/lib.rs | 3 +++ 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/kate/src/com.rs b/kate/src/com.rs index 57cdfe79..c8f051a7 100644 --- a/kate/src/com.rs +++ b/kate/src/com.rs @@ -505,7 +505,7 @@ fn commit( mod tests { use std::{convert::TryInto, iter::repeat, str::from_utf8}; - use da_primitives::asdr::AppExtrinsic; + use da_types::AppExtrinsic; use dusk_bytes::Serializable; use dusk_plonk::bls12_381::BlsScalar; use hex_literal::hex; diff --git a/kate/src/gridgen.rs b/kate/src/gridgen.rs index 923db305..85dfd366 100644 --- a/kate/src/gridgen.rs +++ b/kate/src/gridgen.rs @@ -503,7 +503,7 @@ mod consistency_tests { use rand::prelude::Distribution; fn pp() -> PublicParameters { - testnet::public_params(da_primitives::BlockLengthColumns(256)) + testnet::public_params(da_types::BlockLengthColumns(256)) } #[test] diff --git a/kate/src/lib.rs b/kate/src/lib.rs index c82877ee..3cd10649 100644 --- a/kate/src/lib.rs +++ b/kate/src/lib.rs @@ -148,7 +148,7 @@ pub mod testnet { let pmp_ev = GeneralEvaluationDomain::::new(1024).unwrap(); let pmp_poly = pmp_ev.ifft(&pmp_evals); - let pubs = testnet::public_params(da_primitives::BlockLengthColumns(1024)); + let pubs = testnet::public_params(da_types::BlockLengthColumns(1024)); let dp_commit = pubs.commit_key().commit(&dp_poly).unwrap().0.to_bytes(); let mut pmp_commit = [0u8; 48]; diff --git a/primitives/types/src/data_lookup.rs b/primitives/types/src/data_lookup.rs index 9aa9977e..b689cb75 100644 --- a/primitives/types/src/data_lookup.rs +++ b/primitives/types/src/data_lookup.rs @@ -1,3 +1,4 @@ +use num_traits::Zero; use parity_scale_codec::{Decode, Encode}; use scale_info::TypeInfo; #[cfg(feature = "std")] @@ -7,6 +8,7 @@ use crate::AppId; #[derive(PartialEq, Eq, Clone, Encode, Decode, Default, TypeInfo)] #[cfg_attr(feature = "substrate", derive(sp_debug_derive::RuntimeDebug))] +#[cfg_attr(all(feature = "std", not(feature = "substrate")), derive(Debug))] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct DataLookup { /// size of the look up @@ -18,6 +20,7 @@ pub struct DataLookup { #[derive(PartialEq, Eq, Copy, Clone, Encode, Decode, Default, TypeInfo)] #[cfg_attr(feature = "substrate", derive(sp_debug_derive::RuntimeDebug))] +#[cfg_attr(all(feature = "std", not(feature = "substrate")), derive(Debug))] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct DataLookupIndexItem { pub app_id: AppId, @@ -47,6 +50,7 @@ impl parity_util_mem::MallocSizeOf for DataLookupIndexItem { #[derive(PartialEq, Eq)] #[cfg_attr(feature = "substrate", derive(sp_debug_derive::RuntimeDebug))] +#[cfg_attr(all(feature = "std", not(feature = "substrate")), derive(Debug))] /// Errors during the creation from `extrinsics`. pub enum TryFromError { /// Size overflows @@ -67,7 +71,7 @@ impl TryFrom<&[(AppId, u32)]> for DataLookup { let mut prev_app_id = AppId(0); for (app_id, data_len) in extrinsics { - if !app_id.0 == 0 && prev_app_id != *app_id { + if !app_id.is_zero() && prev_app_id != *app_id { index.push(DataLookupIndexItem { app_id: *app_id, start: size, diff --git a/primitives/types/src/lib.rs b/primitives/types/src/lib.rs index b7604577..f850d4dc 100644 --- a/primitives/types/src/lib.rs +++ b/primitives/types/src/lib.rs @@ -10,6 +10,8 @@ pub use data_lookup::*; /// Raw Extrinsic with application id. #[derive(Clone, TypeInfo, Default, Encode, Decode)] #[cfg_attr(feature = "substrate", derive(sp_debug_derive::RuntimeDebug))] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +#[cfg_attr(all(feature = "std", not(feature = "substrate")), derive(Debug))] pub struct AppExtrinsic { pub app_id: AppId, pub data: Vec, @@ -35,6 +37,7 @@ pub struct AppExtrinsic { )] #[cfg_attr(feature = "substrate", derive(sp_debug_derive::RuntimeDebug))] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +#[cfg_attr(all(feature = "std", not(feature = "substrate")), derive(Debug))] pub struct AppId(#[codec(compact)] pub u32); impl num_traits::Zero for AppId { From 30d6443c4a61a0696dd52dd00d369ad8efa96e55 Mon Sep 17 00:00:00 2001 From: William Arnold Date: Tue, 21 Mar 2023 21:47:06 -0700 Subject: [PATCH 23/87] Strong arithmetic checks --- kate/grid/src/dims.rs | 104 ++++++++++++++------ kate/grid/src/grid.rs | 81 ++++++++------- kate/grid/src/lib.rs | 1 + kate/src/com.rs | 69 +++++++++---- kate/src/gridgen.rs | 146 ++++++++++++++-------------- kate/src/lib.rs | 10 +- primitives/types/src/data_lookup.rs | 2 +- rust-toolchain.toml | 2 +- 8 files changed, 251 insertions(+), 164 deletions(-) diff --git a/kate/grid/src/dims.rs b/kate/grid/src/dims.rs index f23d565f..71c2c97a 100644 --- a/kate/grid/src/dims.rs +++ b/kate/grid/src/dims.rs @@ -1,8 +1,54 @@ +use core::num::NonZeroUsize; + /// The dimensions of a grid #[derive(Debug, Clone, PartialEq, Eq)] pub struct Dimensions { - width: usize, - height: usize, + width: NonZeroUsize, + height: NonZeroUsize, +} + +impl Dimensions { + pub const fn new(width: NonZeroUsize, height: NonZeroUsize) -> Self { + Dimensions { width, height } + } + + pub const fn new_unchecked(width: usize, height: usize) -> Self { + Self { + width: nonzero_unchecked(width), + height: nonzero_unchecked(height), + } + } + + pub fn width(&self) -> usize { + self.width.get() + } + + pub fn width_nz(&self) -> NonZeroUsize { + self.width + } + + pub fn height(&self) -> usize { + self.height.get() + } + + pub fn height_nz(&self) -> NonZeroUsize { + self.height + } + + pub fn n_cells(&self) -> usize { + self.width.saturating_mul(self.height).get() + } + + pub fn divides(&self, other: &Self) -> bool { + other.width.get() % self.width == 0 && other.height.get() % self.height == 0 + } + + pub fn extend(&self, e: Extension) -> Self { + Self { + width: e.width_factor.saturating_mul(self.width), + height: e.height_factor.saturating_mul(self.height), + } + } } /// The ways a set of dimensions can be extended @@ -11,54 +57,48 @@ pub struct Extension { /// This means extending the height of the grid by some factor. /// `2` would mean doubling the grid upwards, increasing the height by a factor of /// 2 and multiplying the number of rows by 2 - pub height_factor: usize, + pub height_factor: NonZeroUsize, /// This means extending the width of the grid by some factor. /// `2` would mean doubling the grid sideways, increasing the width by a factor of /// 2 and multiplying the number of columns by 2 - pub width_factor: usize, + pub width_factor: NonZeroUsize, } impl Extension { - pub fn height(factor: usize) -> Self { + pub const fn height(factor: NonZeroUsize) -> Self { Self { height_factor: factor, - width_factor: 1, + width_factor: nonzero_unchecked(1), } } - pub fn width(factor: usize) -> Self { + pub const fn height_unchecked(factor: usize) -> Self { Self { - height_factor: 1, - width_factor: factor, + height_factor: nonzero_unchecked(factor), + width_factor: nonzero_unchecked(1), } } -} - -impl Dimensions { - pub const fn new(width: usize, height: usize) -> Self { - Dimensions { width, height } - } - - pub fn width(&self) -> usize { - self.width - } - pub fn height(&self) -> usize { - self.height - } - - pub fn n_cells(&self) -> usize { - self.width * self.height - } - - pub fn divides(&self, other: &Self) -> bool { - other.width() % self.width() == 0 && other.height() % self.height() == 0 + pub const fn width(factor: NonZeroUsize) -> Self { + Self { + height_factor: nonzero_unchecked(1), + width_factor: factor, + } } - pub fn extend(&self, e: Extension) -> Self { + pub const fn width_unchecked(factor: usize) -> Self { Self { - width: e.width_factor as usize * self.width, - height: e.height_factor as usize * self.height, + height_factor: nonzero_unchecked(1), + width_factor: nonzero_unchecked(factor), } + } +} + +#[allow(unconditional_panic)] +const fn nonzero_unchecked(a: usize) -> NonZeroUsize { + // Hack to get around not being able to unwrap in a const context + match NonZeroUsize::new(a) { + Some(a) => a, + None => [][0], } } diff --git a/kate/grid/src/grid.rs b/kate/grid/src/grid.rs index 9e8d14a6..5802cc98 100644 --- a/kate/grid/src/grid.rs +++ b/kate/grid/src/grid.rs @@ -6,6 +6,7 @@ pub trait Grid { fn width(&self) -> usize; fn height(&self) -> usize; fn dims(&self) -> &Dimensions; + fn inner(&self) -> &Vec; // x indexes within a row, y indexes within a column // 0 <= x < width, 0 <= y < height fn get(&self, x: usize, y: usize) -> Option<&A> { @@ -19,12 +20,12 @@ pub trait Grid { pub struct RowMajor { dims: Dimensions, - pub inner: Vec, + inner: Vec, } pub struct ColumnMajor { dims: Dimensions, - pub inner: Vec, + inner: Vec, } impl Grid for RowMajor { @@ -45,11 +46,15 @@ impl Grid for RowMajor { } fn ind_to_coord(dims: &Dimensions, i: usize) -> (usize, usize) { - (i % dims.width(), i / dims.width()) + (i % dims.width_nz(), i / dims.width_nz()) } fn coord_to_ind(dims: &Dimensions, x: usize, y: usize) -> usize { - x + y * dims.width() + x.saturating_add(y.saturating_mul(dims.width())) + } + + fn inner(&self) -> &Vec { + &self.inner } } @@ -71,11 +76,15 @@ impl Grid for ColumnMajor { } fn ind_to_coord(dims: &Dimensions, i: usize) -> (usize, usize) { - (i / dims.height(), i % dims.height()) + (i / dims.height_nz(), i % dims.height_nz()) } fn coord_to_ind(dims: &Dimensions, x: usize, y: usize) -> usize { - y + x * dims.height() + y.saturating_add(x.saturating_mul(dims.height())) + } + + fn inner(&self) -> &Vec { + &self.inner } } @@ -84,16 +93,16 @@ impl RowMajor { if y >= self.height() { return None; } - Some(&self.inner[(y * self.width())..((y + 1) * self.width())]) + let start = y.checked_mul(self.width())?; + let end = y.checked_add(1)?.checked_mul(self.width())?; + Some(&self.inner[start..end]) } pub fn iter_col(&self, x: usize) -> Option + '_> { if x >= self.width() { return None; } - Some( - (0..self.height()).map(move |y| self.get(x, y).expect("Size checked at instantiation")), - ) + Some((0..self.height()).map(move |y| self.get(x, y).expect("Bounds already checked"))) } pub fn rows(&self) -> impl Iterator + '_ { @@ -121,7 +130,7 @@ impl RowMajor { self.iter_column_wise() .map(Clone::clone) .collect::>() - .as_column_major(self.width(), self.height()) + .into_column_major(self.width(), self.height()) .expect("Bounds already checked") } } @@ -131,7 +140,9 @@ impl ColumnMajor { if x >= self.width() { return None; } - Some(&self.inner[(x * self.height())..((x + 1) * self.height())]) + let start = x.checked_mul(self.height())?; + let end = x.checked_add(1)?.checked_mul(self.height())?; + Some(&self.inner[start..end]) } pub fn iter_row(&self, y: usize) -> Option + '_> { @@ -157,24 +168,24 @@ impl ColumnMajor { self.iter_row_wise() .map(Clone::clone) .collect::>() - .as_row_major(self.width(), self.height()) + .into_row_major(self.width(), self.height()) .expect("Bounds already checked") } } -pub trait AsRowMajor { - fn as_row_major(self, width: usize, height: usize) -> Option>; +pub trait IntoRowMajor { + fn into_row_major(self, width: usize, height: usize) -> Option>; } -pub trait AsColumnMajor { - fn as_column_major(self, width: usize, height: usize) -> Option>; +pub trait IntoColumnMajor { + fn into_column_major(self, width: usize, height: usize) -> Option>; } -impl AsRowMajor for Vec { - fn as_row_major(self, width: usize, height: usize) -> Option> { - if self.len() == width * height { +impl IntoRowMajor for Vec { + fn into_row_major(self, width: usize, height: usize) -> Option> { + if self.len() == usize::checked_mul(width, height)? { Some(RowMajor { - dims: Dimensions::new(width, height), + dims: Dimensions::new(width.try_into().ok()?, height.try_into().ok()?), inner: self, }) } else { @@ -183,11 +194,11 @@ impl AsRowMajor for Vec { } } -impl AsColumnMajor for Vec { - fn as_column_major(self, width: usize, height: usize) -> Option> { - if self.len() == width * height { +impl IntoColumnMajor for Vec { + fn into_column_major(self, width: usize, height: usize) -> Option> { + if self.len() == width.checked_mul(height)? { Some(ColumnMajor { - dims: Dimensions::new(width, height), + dims: Dimensions::new(width.try_into().ok()?, height.try_into().ok()?), inner: self, }) } else { @@ -196,11 +207,11 @@ impl AsColumnMajor for Vec { } } -impl AsColumnMajor for [A; LEN] { - fn as_column_major(self, width: usize, height: usize) -> Option> { - if self.len() == width * height { +impl IntoColumnMajor for [A; LEN] { + fn into_column_major(self, width: usize, height: usize) -> Option> { + if self.len() == width.checked_mul(height)? { Some(ColumnMajor { - dims: Dimensions::new(width, height), + dims: Dimensions::new(width.try_into().ok()?, height.try_into().ok()?), inner: self.into(), }) } else { @@ -209,11 +220,11 @@ impl AsColumnMajor for [A; LEN] { } } -impl AsRowMajor for [A; LEN] { - fn as_row_major(self, width: usize, height: usize) -> Option> { - if self.len() == width * height { +impl IntoRowMajor for [A; LEN] { + fn into_row_major(self, width: usize, height: usize) -> Option> { + if self.len() == width.checked_mul(height)? { Some(RowMajor { - dims: Dimensions::new(width, height), + dims: Dimensions::new(width.try_into().ok()?, height.try_into().ok()?), inner: self.into(), }) } else { @@ -230,7 +241,7 @@ mod tests { #[test] fn test_row_major() { let data = [1, 2, 3, 4, 5, 6]; - let rm = data.as_row_major(3, 2).unwrap(); + let rm = data.into_row_major(3, 2).unwrap(); assert_eq!(rm.get(0, 0), Some(&1)); assert_eq!(rm.get(1, 0), Some(&2)); @@ -249,7 +260,7 @@ mod tests { #[test] fn test_column_major() { let data = [1, 4, 2, 5, 3, 6]; - let cm = data.as_column_major(3, 2).unwrap(); + let cm = data.into_column_major(3, 2).unwrap(); assert_eq!(cm.get(0, 0), Some(&1)); assert_eq!(cm.get(1, 0), Some(&2)); diff --git a/kate/grid/src/lib.rs b/kate/grid/src/lib.rs index f507f19c..0cf77e61 100644 --- a/kate/grid/src/lib.rs +++ b/kate/grid/src/lib.rs @@ -1,4 +1,5 @@ #![no_std] +#![deny(clippy::integer_arithmetic)] //! Nice grid API, dealing with grids of different sizes and different orders //! (column-major/row-major) diff --git a/kate/src/com.rs b/kate/src/com.rs index c8f051a7..6fca78fa 100644 --- a/kate/src/com.rs +++ b/kate/src/com.rs @@ -1,3 +1,4 @@ +use core::num::{NonZeroU32, NonZeroUsize}; use std::{ convert::{TryFrom, TryInto}, mem::size_of, @@ -13,7 +14,7 @@ use dusk_plonk::{ fft::{EvaluationDomain, Evaluations}, prelude::{BlsScalar, CommitKey}, }; -use kate_grid::{AsRowMajor, Extension}; +use kate_grid::{Dimensions, IntoRowMajor}; #[cfg(feature = "std")] use kate_recovery::{com::app_specific_rows, index, matrix}; use rand::{Rng, SeedableRng}; @@ -25,8 +26,8 @@ use static_assertions::const_assert_eq; use crate::{ config::{ - DATA_CHUNK_SIZE, EXTENSION_FACTOR, MAXIMUM_BLOCK_SIZE, MINIMUM_BLOCK_SIZE, PROOF_SIZE, - PROVER_KEY_SIZE, SCALAR_SIZE, + DATA_CHUNK_SIZE, EXTENSION, EXTENSION_FACTOR, MAXIMUM_BLOCK_SIZE, MINIMUM_BLOCK_SIZE, + PROOF_SIZE, PROVER_KEY_SIZE, SCALAR_SIZE, }, metrics::Metrics, padded_len_of_pad_iec_9797_1, BlockDimensions, Seed, LOG_TARGET, @@ -54,6 +55,7 @@ pub enum Error { BlockTooBig, InvalidChunkLength, DimensionsMismatch, + ZeroDimension, } impl From for Error { @@ -165,15 +167,23 @@ pub fn flatten_and_pad_block( // Determine the block size after padding let block_dims = get_block_dimensions(padded_block_len, max_rows, max_cols, chunk_size)?; - if !(padded_block.len() <= block_dims.size()) { + if padded_block.len() > block_dims.size() { return Err(Error::BlockTooBig); } let mut rng = ChaChaRng::from_seed(rng_seed); - assert!((block_dims.size() - padded_block.len()) % block_dims.chunk_size as usize == 0); + assert!( + (block_dims.size().saturating_sub(padded_block.len())) + .checked_rem(block_dims.chunk_size as usize) + == Some(0) + ); + let nz_chunk_size: NonZeroUsize = usize::try_from(block_dims.chunk_size) + .map_err(|_| Error::CellLenghtExceeded)? + .try_into() + .map_err(|_| Error::ZeroDimension)?; - for _ in 0..((block_dims.size() - padded_block.len()) / block_dims.chunk_size as usize) { + for _ in 0..(block_dims.size().saturating_sub(padded_block.len()) / nz_chunk_size) { let rnd_values: DataChunk = rng.gen(); padded_block.append(&mut pad_with_zeroes(rnd_values.to_vec(), chunk_size)); } @@ -188,7 +198,7 @@ pub fn get_block_dimensions( chunk_size: u32, ) -> Result { let max_block_dimensions = BlockDimensions::new(max_rows, max_cols, chunk_size); - if !(block_size as usize <= max_block_dimensions.size()) { + if block_size as usize > max_block_dimensions.size() { return Err(Error::BlockTooBig); } @@ -207,8 +217,9 @@ pub fn get_block_dimensions( // we must minimize number of rows, to minimize header size // (performance wise it doesn't matter) + let nz_max_cols = NonZeroU32::new(max_cols.0).ok_or(Error::ZeroDimension)?; let (cols, rows) = if total_cells > max_cols.0 { - (max_cols, BlockLengthRows(total_cells / max_cols.0)) + (max_cols, BlockLengthRows(total_cells / nz_max_cols)) } else { (total_cells.into(), 1.into()) }; @@ -265,6 +276,19 @@ pub fn to_bls_scalar(chunk: &[u8]) -> Result { BlsScalar::from_bytes(&scalar_size_chunk).map_err(|_| Error::CellLenghtExceeded) } +fn make_dims(bd: &BlockDimensions) -> Result { + Ok(Dimensions::new( + bd.cols + .as_usize() + .try_into() + .map_err(|_| Error::ZeroDimension)?, + bd.rows + .as_usize() + .try_into() + .map_err(|_| Error::ZeroDimension)?, + )) +} + /// Build extended data matrix, by columns. /// We are using dusk plonk for erasure coding, /// which is using roots of unity as evaluation domain for fft and ifft. @@ -280,8 +304,8 @@ pub fn par_extend_data_matrix( metrics: &M, ) -> Result, Error> { let start = Instant::now(); - let dims = kate_grid::Dimensions::new(block_dims.cols.0 as usize, block_dims.rows.0 as usize); - let extended_dims = dims.extend(Extension::height(2)); + let dims = make_dims(&block_dims)?; + let extended_dims = dims.extend(EXTENSION); // simple length with mod check would work... let chunks = block.par_chunks_exact(block_dims.chunk_size as usize); @@ -294,7 +318,7 @@ pub fn par_extend_data_matrix( .collect::, Error>>()?; // The data is currently row-major, so we need to put it into column-major - let rm = scalars.as_row_major(dims.width(), dims.height()).unwrap(); + let rm = scalars.into_row_major(dims.width(), dims.height()).unwrap(); let col_wise_scalars = rm.iter_column_wise().map(Clone::clone).collect::>(); let mut chunk_elements = col_wise_scalars @@ -327,8 +351,8 @@ pub fn build_proof( cells: &[Cell], metrics: &M, ) -> Result, Error> { - let dims = kate_grid::Dimensions::new(block_dims.cols.as_usize(), block_dims.rows.as_usize()); - let extended_dims = dims.extend(Extension::height(EXTENSION_FACTOR as usize)); + let dims = make_dims(&block_dims)?; + let extended_dims = dims.extend(EXTENSION); const SPROOF_SIZE: usize = PROOF_SIZE + SCALAR_SIZE; @@ -338,7 +362,7 @@ pub fn build_proof( let row_eval_domain = EvaluationDomain::new(dims.width()).map_err(Error::from)?; let row_dom_x_pts = row_eval_domain.elements().collect::>(); - let mut result_bytes: Vec = vec![0u8; SPROOF_SIZE * cells.len()]; + let mut result_bytes: Vec = vec![0u8; SPROOF_SIZE.saturating_mul(cells.len())]; let prover_key = &prover_key; let row_dom_x_pts = &row_dom_x_pts; @@ -365,7 +389,10 @@ pub fn build_proof( // construct polynomial per extended matrix row let row = (0..extended_dims.width()) .into_par_iter() - .map(|j| ext_data_matrix[r_index + j * extended_dims.height()]) + .map(|j| { + ext_data_matrix + [r_index.saturating_add(j.saturating_mul(extended_dims.height()))] + }) .collect::>(); //let row = ext_data_matrix_cm // .iter_row(r_index) @@ -379,8 +406,8 @@ pub fn build_proof( let witness = prover_key.compute_single_witness(&poly, &row_dom_x_pts[c_index]); match prover_key.commit(&witness) { Ok(commitment_to_witness) => { - let evaluated_point = - ext_data_matrix[r_index + c_index * extended_dims.height()]; + let evaluated_point = ext_data_matrix[r_index + .saturating_add(c_index.saturating_mul(extended_dims.height()))]; res[0..PROOF_SIZE].copy_from_slice(&commitment_to_witness.to_bytes()); res[PROOF_SIZE..].copy_from_slice(&evaluated_point.to_bytes()); @@ -480,9 +507,9 @@ fn row( extended_rows: BlockLengthRows, ) -> Vec { let mut row = Vec::with_capacity(cols.as_usize()); - (0..cols.as_usize() * extended_rows.as_usize()) + (0..cols.as_usize().saturating_mul(extended_rows.as_usize())) .step_by(extended_rows.as_usize()) - .for_each(|idx| row.push(matrix[i + idx])); + .for_each(|idx| row.push(matrix[i.saturating_add(idx)])); row } @@ -709,7 +736,7 @@ mod tests { const RNG_SEED: Seed = [42u8; 32]; matrix - .chunks_exact(dimensions.rows.as_usize() * 2) + .chunks_exact(dimensions.rows.as_usize().saturating_mul(2)) .enumerate() .map(|(col, e)| (col as u16, e)) .flat_map(|(col, e)| { @@ -1179,7 +1206,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat }) .collect::>(); - assert_eq!(row.len(), len as usize); + assert_eq!(row.len(), len); let mut result_bytes: Vec = vec![0u8; config::COMMITMENT_SIZE]; commit(&prover_key, row_eval_domain, row.clone(), &mut result_bytes).unwrap(); println!("Commitment: {result_bytes:?}"); diff --git a/kate/src/gridgen.rs b/kate/src/gridgen.rs index 85dfd366..e34a5851 100644 --- a/kate/src/gridgen.rs +++ b/kate/src/gridgen.rs @@ -1,4 +1,4 @@ -use core::marker::PhantomData; +use core::{marker::PhantomData, num::NonZeroUsize}; use codec::Encode; use da_types::{AppExtrinsic, AppId, DataLookup, DataLookupIndexItem}; @@ -8,7 +8,7 @@ use dusk_plonk::{ fft::{EvaluationDomain, Polynomial}, prelude::{BlsScalar, CommitKey}, }; -use kate_grid::{AsColumnMajor, AsRowMajor, Dimensions, Extension, Grid, RowMajor}; +use kate_grid::{Dimensions, Extension, Grid, IntoColumnMajor, IntoRowMajor, RowMajor}; use kate_recovery::config::PADDING_TAIL_VALUE; use poly_multiproof::{m1_blst::M1NoPrecomp, merlin::Transcript}; use rand::{Rng, SeedableRng}; @@ -55,7 +55,7 @@ impl EvaluationGrid { let mut enc = datas.encode(); enc.push(PADDING_TAIL_VALUE); // TODO: remove 9797 padding stuff enc.chunks(DATA_CHUNK_SIZE) - .map(|c| pad_to_bls_scalar(c)) + .map(pad_to_bls_scalar) .collect::, _>>() .map(|scalars| (id, scalars)) }) @@ -69,7 +69,7 @@ impl EvaluationGrid { app_id: *app_id, start, }); - start += scalars.len() as u32; // next item should start after current one + start = start.saturating_add(scalars.len() as u32); // next item should start after current one } // Flatten the grid @@ -89,13 +89,13 @@ impl EvaluationGrid { while grid.len() != dims.n_cells() { let rnd_values: [u8; BlsScalar::SIZE - 1] = rng.gen(); // TODO: can we just use zeros instead? - grid.push(pad_to_bls_scalar(&rnd_values)?); + grid.push(pad_to_bls_scalar(rnd_values)?); } Ok(EvaluationGrid { lookup, evals: grid - .as_row_major(dims.width(), dims.height()) + .into_row_major(dims.width(), dims.height()) .ok_or(Error::DimensionsMismatch)?, dims, }) @@ -121,7 +121,7 @@ impl EvaluationGrid { let end_index = self .lookup .index - .get(i + 1) + .get(i.saturating_add(1)) .map(|elem| elem.start) .unwrap_or(self.lookup.size) as usize; Some((start_index, end_index)) @@ -138,12 +138,12 @@ impl EvaluationGrid { if !orig_dims.divides(&self.dims) { return None; } - let h_mul = self.dims.height() / orig_dims.height(); + let h_mul = self.dims.height() / orig_dims.height_nz(); let (start_ind, end_ind) = self.app_data_indices(app_id)?; - let (_, start_y) = RowMajor::<()>::ind_to_coord(&orig_dims, start_ind); - let (_, end_y) = RowMajor::<()>::ind_to_coord(&orig_dims, end_ind - 1); // Find y of last cell elt - let (new_start_y, new_end_y) = (start_y * h_mul, end_y * h_mul); + let (_, start_y) = RowMajor::<()>::ind_to_coord(orig_dims, start_ind); + let (_, end_y) = RowMajor::<()>::ind_to_coord(orig_dims, end_ind.saturating_sub(1)); // Find y of last cell elt + let (new_start_y, new_end_y) = (start_y.saturating_mul(h_mul), end_y.saturating_mul(h_mul)); (new_start_y..=new_end_y) .step_by(h_mul) @@ -152,7 +152,11 @@ impl EvaluationGrid { } pub fn extend_columns(&self, extension_factor: usize) -> Result { - let new_dims = self.dims.extend(Extension::height(extension_factor)); + let new_dims = self.dims.extend(Extension::height( + extension_factor + .try_into() + .map_err(|_| Error::CellLenghtExceeded)?, + )); let domain = EvaluationDomain::new(self.dims.height())?; let domain_new = EvaluationDomain::new(new_dims.height())?; @@ -167,7 +171,7 @@ impl EvaluationGrid { .flat_map(|(_x, col)| { // put elts into a new column let mut ext_col = Vec::with_capacity(domain_new.size()); - col.for_each(|s| ext_col.push(s.clone())); + col.for_each(|s| ext_col.push(*s)); // ifft, resize, fft domain.ifft_slice(&mut ext_col); ext_col.resize(domain_new.size(), BlsScalar::zero()); @@ -175,7 +179,7 @@ impl EvaluationGrid { ext_col }) .collect::>() - .as_column_major(new_dims.width(), new_dims.height()) + .into_column_major(new_dims.width(), new_dims.height()) .expect("Each column should be expanded to news dims") .to_row_major(); @@ -212,7 +216,7 @@ impl PolynomialGrid { pub fn commitments(&self, srs: &CommitKey) -> Result, Error> { self.inner .iter() - .map(|poly| srs.commit(&poly).map_err(|e| Error::PlonkError(e))) + .map(|poly| srs.commit(poly).map_err(Error::PlonkError)) .collect() } @@ -220,7 +224,7 @@ impl PolynomialGrid { self.inner .get(row) .ok_or(Error::CellLenghtExceeded) - .and_then(|poly| srs.commit(&poly).map_err(|e| Error::PlonkError(e))) + .and_then(|poly| srs.commit(poly).map_err(Error::PlonkError)) } pub fn proof(&self, srs: &CommitKey, cell: &Cell) -> Result { @@ -268,7 +272,7 @@ impl PolynomialGrid { let mut ts = Transcript::new(b"avail-mp"); let proof = srs - .open(&mut ts, &evals, &polys, &points) + .open(&mut ts, &evals, &polys, points) .expect("TODO: real error msg"); Ok(Multiproof { proof, @@ -280,7 +284,7 @@ impl PolynomialGrid { fn convert_bls(dusk: &dusk_plonk::bls12_381::BlsScalar) -> poly_multiproof::m1_blst::Fr { poly_multiproof::m1_blst::Fr { - 0: poly_multiproof::ark_ff::BigInt(dusk.0.clone()), + 0: poly_multiproof::ark_ff::BigInt(dusk.0), 1: PhantomData, } } @@ -315,21 +319,21 @@ pub fn multiproof_block( return None; } - let block_width = grid_dims.width() / mp_grid_dims.width(); - let block_height = grid_dims.height() / mp_grid_dims.height(); + let block_width = grid_dims.width() / mp_grid_dims.width_nz(); + let block_height = grid_dims.height() / mp_grid_dims.height_nz(); Some(CellBlock { - start_x: x * block_width, - start_y: y * block_height, - end_x: (x + 1) * block_width, - end_y: (y + 1) * block_height, + start_x: x.checked_mul(block_width)?, + start_y: y.checked_mul(block_height)?, + end_x: x.checked_add(1)?.checked_mul(block_width)?, + end_y: y.checked_add(1)?.checked_mul(block_height)?, }) } /// Dimensions of the multiproof grid. These are guarenteed to cleanly divide `grid_dims`. /// `target_dims` must cleanly divide `grid_dims`. pub fn multiproof_dims(grid_dims: &Dimensions, target_dims: &Dimensions) -> Option { - let target_width = core::cmp::min(grid_dims.width(), target_dims.width()); - let target_height = core::cmp::min(grid_dims.height(), target_dims.height()); + let target_width = grid_dims.width_nz().min(target_dims.width_nz()); + let target_height = grid_dims.height_nz().min(target_dims._nz()); if grid_dims.width() % target_width != 0 || grid_dims.height() % target_height != 0 { return None; } @@ -347,9 +351,12 @@ pub fn get_block_dims( let current_width = n_scalars; // Don't let the width get lower than the minimum provided let width = core::cmp::max(round_up_power_of_2(current_width), min_width); - Ok(Dimensions::new(width, 1)) + Ok(Dimensions::new( + width.try_into().map_err(|_| Error::ZeroDimension)?, + 1.try_into().expect("1 is nonzero"), + )) } else { - let width = max_width; + let width = NonZeroUsize::new(max_width).ok_or(Error::ZeroDimension)?; let current_height = round_up_to_multiple(n_scalars, width) / width; // Round the height up to a power of 2 for ffts let height = round_up_power_of_2(current_height); @@ -357,13 +364,16 @@ pub fn get_block_dims( if height > max_height { return Err(Error::BlockTooBig); } - Ok(Dimensions::new(width, height)) + Ok(Dimensions::new( + width, + height.try_into().map_err(|_| Error::ZeroDimension)?, + )) } } -fn round_up_to_multiple(input: usize, multiple: usize) -> usize { - let n_multiples = (input + multiple - 1) / multiple; - n_multiples * multiple +fn round_up_to_multiple(input: usize, multiple: NonZeroUsize) -> usize { + let n_multiples = input.saturating_add(multiple.get()).saturating_sub(1) / multiple; + n_multiples.saturating_mul(multiple.get()) } fn pad_to_bls_scalar(a: impl AsRef<[u8]>) -> Result { @@ -377,6 +387,7 @@ fn pad_to_bls_scalar(a: impl AsRef<[u8]>) -> Result { } // Round up. only valid for positive integers +#[allow(clippy::integer_arithmetic)] fn round_up_power_of_2(mut v: usize) -> usize { if v == 0 { return 1; @@ -388,18 +399,20 @@ fn round_up_power_of_2(mut v: usize) -> usize { v |= v >> 8; v |= v >> 16; v += 1; - return v; + v } #[cfg(test)] +#[allow(clippy::integer_arithmetic)] mod tests { use super::*; use proptest::{prop_assert_eq, proptest}; use test_case::test_case; // parameters that will split a 256x256 grid into pieces of size 4x16 - const TARGET: Dimensions = Dimensions::new(64, 16); - const GRID: Dimensions = Dimensions::new(256, 256); + const TARGET: Dimensions = Dimensions::new_unchecked(64, 16); + const GRID: Dimensions = Dimensions::new_unchecked(256, 256); + fn cb(start_x: usize, start_y: usize, end_x: usize, end_y: usize) -> CellBlock { CellBlock { start_x, @@ -431,8 +444,8 @@ mod tests { target_h: usize, ) -> Option<(usize, usize)> { multiproof_dims( - &Dimensions::new(grid_w, grid_h), - &Dimensions::new(target_w, target_h), + &Dimensions::new_unchecked(grid_w, grid_h), + &Dimensions::new_unchecked(target_w, target_h), ) .map(|i| (i.width(), i.height())) } @@ -446,14 +459,14 @@ mod tests { fn test_round_up_to_multiple(i in 1..1000usize, m in 1..32usize) { for k in 0..m { let a = i * m - k; - prop_assert_eq!(round_up_to_multiple(a, m), i * m) + prop_assert_eq!(round_up_to_multiple(a, m.try_into().unwrap()), i * m) } } #[test] fn test_convert_bls_scalar(input: [u8; 31]) { use poly_multiproof::ark_serialize::CanonicalSerialize; - let dusk = pad_to_bls_scalar(&input).unwrap(); + let dusk = pad_to_bls_scalar(input).unwrap(); let ark = convert_bls(&dusk); let dusk_out = dusk.to_bytes(); let mut ark_out = [0u8; 32]; @@ -472,13 +485,13 @@ mod tests { round_up_power_of_2(i) } - #[test_case(0 => Dimensions::new(4, 1) ; "block size zero")] - #[test_case(1 => Dimensions::new(4, 1) ; "below minimum block size")] - #[test_case(10 => Dimensions::new(16, 1) ; "regular case")] - #[test_case(17 => Dimensions::new(32, 1) ; "minimum overhead after 512")] - #[test_case(256 => Dimensions::new(256, 1) ; "maximum cols")] - #[test_case(257 => Dimensions::new(256, 2) ; "two rows")] - #[test_case(256 * 256 => Dimensions::new(256, 256) ; "max block size")] + #[test_case(0 => Dimensions::new_unchecked(4, 1) ; "block size zero")] + #[test_case(1 => Dimensions::new_unchecked(4, 1) ; "below minimum block size")] + #[test_case(10 => Dimensions::new_unchecked(16, 1) ; "regular case")] + #[test_case(17 => Dimensions::new_unchecked(32, 1) ; "minimum overhead after 512")] + #[test_case(256 => Dimensions::new_unchecked(256, 1) ; "maximum cols")] + #[test_case(257 => Dimensions::new_unchecked(256, 2) ; "two rows")] + #[test_case(256 * 256 => Dimensions::new_unchecked(256, 256) ; "max block size")] #[test_case(256 * 256 + 1 => panics "BlockTooBig" ; "too much data")] fn test_get_block_dims(size: usize) -> Dimensions where { @@ -533,7 +546,7 @@ mod consistency_tests { .flat_map(|p| p.to_bytes()) .collect::>(); - assert_eq!(evals.dims, Dimensions::new(4, 2)); + assert_eq!(evals.dims, Dimensions::new_unchecked(4, 2)); let expected_commitments = hex!("960F08F97D3A8BD21C3F5682366130132E18E375A587A1E5900937D7AA5F33C4E20A1C0ACAE664DCE1FD99EDC2693B8D960F08F97D3A8BD21C3F5682366130132E18E375A587A1E5900937D7AA5F33C4E20A1C0ACAE664DCE1FD99EDC2693B8D"); assert_eq!(commits, expected_commitments); } @@ -574,7 +587,7 @@ mod consistency_tests { }, ]; - let expected_dims = Dimensions::new(16, 1); + let expected_dims = Dimensions::new_unchecked(16, 1); let evals = EvaluationGrid::from_extrinsics(extrinsics, 4, 256, 256, Seed::default()).unwrap(); @@ -598,8 +611,8 @@ mod consistency_tests { let data = evals .evals - .inner - .into_iter() + .inner() + .iter() .flat_map(|s| s.to_bytes()) .collect::>(); assert_eq!(data, expected_data, "Data doesn't match the expected data"); @@ -629,38 +642,27 @@ mod consistency_tests { .into_iter() .map(|e| BlsScalar::from_bytes(e.as_slice().try_into().unwrap()).unwrap()) .collect::>() - .as_column_major(4, 4) + .into_column_major(4, 4) .unwrap() - .to_row_major() - .inner; + .to_row_major(); - let block_dims = Dimensions::new(4, 2); + let block_dims = Dimensions::new_unchecked(4, 2); let scalars = (0..=247) .collect::>() .chunks_exact(DATA_CHUNK_SIZE) - .flat_map(|chunk| pad_to_bls_scalar(chunk)) + .flat_map(pad_to_bls_scalar) .collect::>(); let grid = EvaluationGrid { lookup: DataLookup::default(), evals: scalars - .as_row_major(block_dims.width(), block_dims.height()) + .into_row_major(block_dims.width(), block_dims.height()) .unwrap(), dims: block_dims, }; let extend = grid.extend_columns(2).unwrap(); - for i in 0..expected_result.len() { - let e = expected_result[i]; - for j in 0..expected_result.len() { - let r = extend.evals.inner[j]; - if e == r { - eprintln!("Eq: {} {}", i, j); - } - } - } - - assert_eq!(extend.evals.inner, expected_result); + assert_eq!(extend.evals.inner(), expected_result.inner()); } fn app_extrinsic_strategy() -> impl Strategy { @@ -735,15 +737,15 @@ mod consistency_tests { let pp = pp(); let polys = grid.make_polynomial_grid().unwrap(); - let commitments = polys.commitments(&pp.commit_key()).unwrap(); + let commitments = polys.commitments(pp.commit_key()).unwrap(); let indices = (0..dims.width()).flat_map(|x| (0..dims.height()).map(move |y| (x, y))).collect::>(); // Sample some number 10 of the indices, all is too slow for tests... let mut rng = ChaChaRng::from_seed(RNG_SEED); - let sampled = Uniform::from(0..indices.len()).sample_iter(&mut rng).take(10).map(|i| indices[i].clone()); + let sampled = Uniform::from(0..indices.len()).sample_iter(&mut rng).take(10).map(|i| indices[i]); for (x, y) in sampled { let cell = Cell { row: (y as u32).into(), col: (x as u32).into() }; - let proof = polys.proof(&pp.commit_key(), &cell).unwrap(); + let proof = polys.proof(pp.commit_key(), &cell).unwrap(); let mut content = [0u8; 80]; content[..48].copy_from_slice(&proof.to_bytes()[..]); content[48..].copy_from_slice(&grid.evals.get(x, y).unwrap().to_bytes()[..]); @@ -762,9 +764,9 @@ mod consistency_tests { fn newapi_commitments_verify(ref exts in app_extrinsics_strategy()) { //let (layout, commitments, dims, matrix) = par_build_commitments(BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default()).unwrap(); let grid = EvaluationGrid::from_extrinsics(exts.clone(), 4, 16, 64, Seed::default()).unwrap().extend_columns(2).unwrap(); - let orig_dims = Dimensions::new(grid.dims.width(), grid.dims.height() / 2); + let orig_dims = Dimensions::new(grid.dims.width_nz(), (grid.dims.height() / 2).try_into().unwrap()); let polys = grid.make_polynomial_grid().unwrap(); - let commits = polys.commitments(&pp().commit_key()) + let commits = polys.commitments(pp().commit_key()) .unwrap() .iter() .map(|c| c.to_bytes()) diff --git a/kate/src/lib.rs b/kate/src/lib.rs index 3cd10649..4b861e70 100644 --- a/kate/src/lib.rs +++ b/kate/src/lib.rs @@ -1,4 +1,5 @@ #![cfg_attr(not(feature = "std"), no_std)] +#![deny(clippy::integer_arithmetic)] use da_types::{BlockLengthColumns, BlockLengthRows}; #[cfg(feature = "std")] @@ -17,6 +18,8 @@ pub use kate_grid as grid; pub use poly_multiproof as pmp; pub mod config { + use kate_grid::Extension; + use super::{BlockLengthColumns, BlockLengthRows}; // TODO: Delete this? not used anywhere @@ -25,6 +28,7 @@ pub mod config { pub const SCALAR_SIZE: usize = 32; pub const DATA_CHUNK_SIZE: usize = 31; // Actual chunk size is 32 after 0 padding is done pub const EXTENSION_FACTOR: u32 = 2; + pub const EXTENSION: Extension = Extension::height_unchecked(2); pub const PROVER_KEY_SIZE: u32 = 48; pub const PROOF_SIZE: usize = 48; // MINIMUM_BLOCK_SIZE, MAX_BLOCK_ROWS and MAX_BLOCK_COLUMNS have to be a power of 2 because of the FFT functions requirements @@ -95,7 +99,7 @@ pub mod testnet { let g1 = G1::deserialize_compressed(&G1_BYTES[..]).unwrap(); let g2 = G2::deserialize_compressed(&G2_BYTES[..]).unwrap(); - m1_blst::M1NoPrecomp::new_from_scalar(x, g1, g2, max_degree + 1, max_pts) + m1_blst::M1NoPrecomp::new_from_scalar(x, g1, g2, max_degree.saturating_add(1), max_pts) } #[cfg(test)] @@ -152,7 +156,7 @@ pub mod testnet { let dp_commit = pubs.commit_key().commit(&dp_poly).unwrap().0.to_bytes(); let mut pmp_commit = [0u8; 48]; - pmp.commit(&pmp_poly) + pmp.commit(pmp_poly) .unwrap() .0 .serialize_compressed(&mut pmp_commit[..]) @@ -177,6 +181,7 @@ pub mod gridgen; /// There is a unit test to ensure this formula match with the current /// IEC 9797 1 algorithm we implemented. See `fn pad_iec_9797_1` #[inline] +#[allow(clippy::integer_arithmetic)] fn padded_len_of_pad_iec_9797_1(len: u32) -> u32 { let len_plus_one = len.saturating_add(1); let offset = (DATA_CHUNK_SIZE - (len_plus_one as usize % DATA_CHUNK_SIZE)) % DATA_CHUNK_SIZE; @@ -186,6 +191,7 @@ fn padded_len_of_pad_iec_9797_1(len: u32) -> u32 { } /// Calculates the padded len based of initial `len`. +#[allow(clippy::integer_arithmetic)] pub fn padded_len(len: u32, chunk_size: u32) -> u32 { let iec_9797_1_len = padded_len_of_pad_iec_9797_1(len); diff --git a/primitives/types/src/data_lookup.rs b/primitives/types/src/data_lookup.rs index b689cb75..6fdce24c 100644 --- a/primitives/types/src/data_lookup.rs +++ b/primitives/types/src/data_lookup.rs @@ -81,7 +81,7 @@ impl TryFrom<&[(AppId, u32)]> for DataLookup { size = size .checked_add(*data_len) .ok_or(Self::Error::SizeOverflow)?; - if !(prev_app_id <= *app_id) { + if prev_app_id > *app_id { return Err(Self::Error::UnsortedExtrinsics); } prev_app_id = *app_id; diff --git a/rust-toolchain.toml b/rust-toolchain.toml index ab25d60e..e9920bd0 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,5 +1,5 @@ [toolchain] -channel = "nightly-2022-11-15" +channel = "nightly-2023-03-17" components = ["rustfmt", "clippy", "llvm-tools-preview"] profile = "minimal" targets = ["wasm32-unknown-unknown"] From b497abceda66413c6f0226341f0765d22a46433e Mon Sep 17 00:00:00 2001 From: William Arnold Date: Tue, 21 Mar 2023 21:49:29 -0700 Subject: [PATCH 24/87] Tabs...? In 2023...? --- kate/grid/src/dims.rs | 4 ++-- kate/grid/src/grid.rs | 4 ++-- kate/src/gridgen.rs | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/kate/grid/src/dims.rs b/kate/grid/src/dims.rs index 71c2c97a..2e88acf5 100644 --- a/kate/grid/src/dims.rs +++ b/kate/grid/src/dims.rs @@ -86,12 +86,12 @@ impl Extension { } } - pub const fn width_unchecked(factor: usize) -> Self { + pub const fn width_unchecked(factor: usize) -> Self { Self { height_factor: nonzero_unchecked(1), width_factor: nonzero_unchecked(factor), } - } + } } #[allow(unconditional_panic)] diff --git a/kate/grid/src/grid.rs b/kate/grid/src/grid.rs index 5802cc98..6dc23316 100644 --- a/kate/grid/src/grid.rs +++ b/kate/grid/src/grid.rs @@ -140,8 +140,8 @@ impl ColumnMajor { if x >= self.width() { return None; } - let start = x.checked_mul(self.height())?; - let end = x.checked_add(1)?.checked_mul(self.height())?; + let start = x.checked_mul(self.height())?; + let end = x.checked_add(1)?.checked_mul(self.height())?; Some(&self.inner[start..end]) } diff --git a/kate/src/gridgen.rs b/kate/src/gridgen.rs index e34a5851..8efc91a0 100644 --- a/kate/src/gridgen.rs +++ b/kate/src/gridgen.rs @@ -333,7 +333,7 @@ pub fn multiproof_block( /// `target_dims` must cleanly divide `grid_dims`. pub fn multiproof_dims(grid_dims: &Dimensions, target_dims: &Dimensions) -> Option { let target_width = grid_dims.width_nz().min(target_dims.width_nz()); - let target_height = grid_dims.height_nz().min(target_dims._nz()); + let target_height = grid_dims.height_nz().min(target_dims.height_nz()); if grid_dims.width() % target_width != 0 || grid_dims.height() % target_height != 0 { return None; } From e28bb44cf736cf5c282e0a3c759854ea57d1dda2 Mon Sep 17 00:00:00 2001 From: William Arnold Date: Tue, 21 Mar 2023 22:10:38 -0700 Subject: [PATCH 25/87] Small refactors --- kate/Cargo.toml | 2 +- kate/src/com.rs | 6 ++++-- kate/src/lib.rs | 21 +++++---------------- 3 files changed, 10 insertions(+), 19 deletions(-) diff --git a/kate/Cargo.toml b/kate/Cargo.toml index ec0354ec..faf31972 100644 --- a/kate/Cargo.toml +++ b/kate/Cargo.toml @@ -26,10 +26,10 @@ sp-arithmetic = { version = "7.0.0", default-features = false } sp-core-hashing = { version = "5.0.0", default-features = false, optional = true } sp-std = { version = "4.0.0", default-features = false } static_assertions = "1.1.0" +hex-literal = "0.3.4" [dev-dependencies] criterion = "0.3.5" -hex-literal = "0.3.4" itertools = "0.10" proptest = "1.0.0" serde_json = "1.0" diff --git a/kate/src/com.rs b/kate/src/com.rs index 6fca78fa..98c6563f 100644 --- a/kate/src/com.rs +++ b/kate/src/com.rs @@ -318,11 +318,13 @@ pub fn par_extend_data_matrix( .collect::, Error>>()?; // The data is currently row-major, so we need to put it into column-major - let rm = scalars.into_row_major(dims.width(), dims.height()).unwrap(); + let rm = scalars + .into_row_major(dims.width(), dims.height()) + .ok_or(Error::DimensionsMismatch)?; let col_wise_scalars = rm.iter_column_wise().map(Clone::clone).collect::>(); let mut chunk_elements = col_wise_scalars - .chunks_exact(dims.height()) + .chunks_exact(dims.height_nz().get()) .flat_map(|column| extend_column_with_zeros(column, extended_dims.height())) .collect::>(); diff --git a/kate/src/lib.rs b/kate/src/lib.rs index 4b861e70..7411be13 100644 --- a/kate/src/lib.rs +++ b/kate/src/lib.rs @@ -51,6 +51,7 @@ pub mod config { #[cfg(feature = "std")] pub mod testnet { use super::{BlockLengthColumns, PublicParameters}; + use hex_literal::hex; use once_cell::sync::Lazy; use poly_multiproof::ark_ff::{BigInt, Fp}; use poly_multiproof::ark_serialize::CanonicalDeserialize; @@ -80,18 +81,8 @@ pub mod testnet { 15693365399533249662, 1020900941429372507, ]; - const G1_BYTES: [u8; 48] = [ - 164, 95, 117, 74, 158, 148, 204, 203, 178, 203, 233, 215, 196, 65, 184, 181, 39, 2, 110, - 240, 94, 42, 58, 255, 74, 164, 187, 28, 87, 223, 55, 103, 251, 102, 156, 196, 199, 99, 155, - 211, 126, 104, 54, 83, 189, 197, 11, 90, - ]; - const G2_BYTES: [u8; 96] = [ - 184, 69, 172, 94, 123, 78, 200, 84, 29, 1, 38, 96, 39, 103, 114, 224, 1, 193, 224, 71, 94, - 96, 151, 24, 132, 72, 29, 67, 252, 189, 68, 222, 42, 2, 233, 134, 45, 191, 159, 83, 108, - 33, 24, 20, 246, 204, 84, 72, 16, 11, 205, 165, 220, 112, 120, 84, 175, 142, 56, 41, 117, - 13, 31, 177, 139, 18, 114, 134, 170, 164, 252, 149, 158, 115, 46, 33, 40, 168, 163, 21, - 242, 248, 244, 25, 191, 87, 116, 254, 4, 58, 244, 111, 187, 235, 75, 39, - ]; + const G1_BYTES: [u8; 48] = hex!("a45f754a9e94cccbb2cbe9d7c441b8b527026ef05e2a3aff4aa4bb1c57df3767fb669cc4c7639bd37e683653bdc50b5a"); + const G2_BYTES: [u8; 96] = hex!("b845ac5e7b4ec8541d012660276772e001c1e0475e60971884481d43fcbd44de2a02e9862dbf9f536c211814f6cc5448100bcda5dc707854af8e3829750d1fb18b127286aaa4fc959e732e2128a8a315f2f8f419bf5774fe043af46fbbeb4b27"); pub fn multiproof_params(max_degree: usize, max_pts: usize) -> m1_blst::M1NoPrecomp { let x: Fr = Fp(BigInt(SEC_LIMBS), core::marker::PhantomData); @@ -127,10 +118,8 @@ pub mod testnet { let x: Fr = Fp(BigInt(SEC_LIMBS), core::marker::PhantomData); let mut out = [0u8; 32]; x.serialize_compressed(&mut out[..]).unwrap(); - const SEC_BYTES: [u8; 32] = [ - 120, 72, 181, 215, 17, 188, 152, 131, 153, 99, 23, 163, 249, 201, 2, 105, 213, 103, - 113, 0, 93, 84, 10, 25, 24, 73, 57, 201, 232, 208, 219, 42, - ]; + const SEC_BYTES: [u8; 32] = + hex!("7848b5d711bc9883996317a3f9c90269d56771005d540a19184939c9e8d0db2a"); assert_eq!(SEC_BYTES, out); let g1 = G1::deserialize_compressed(&G1_BYTES[..]).unwrap(); From ca512b1297dedfcd11559e66a087ddde9866593a Mon Sep 17 00:00:00 2001 From: William Arnold Date: Wed, 22 Mar 2023 00:24:50 -0700 Subject: [PATCH 26/87] Formatting, swap assert for returning an error --- kate/Cargo.toml | 2 +- kate/src/com.rs | 10 +++------- 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/kate/Cargo.toml b/kate/Cargo.toml index faf31972..e5121064 100644 --- a/kate/Cargo.toml +++ b/kate/Cargo.toml @@ -13,6 +13,7 @@ dusk-bytes = { version = "0.1.6", default-features = false, optional = true } dusk-plonk = { git = "https://github.com/maticnetwork/plonk", branch = "will/polynomial-visibility", optional = true } getrandom = { version = "0.2.8", default-features = false, features = ["js"] } hex = { version = "0.4", default-features = false, features = ["alloc"], optional = true } +hex-literal = "0.3.4" kate-grid = { path = "grid" } kate-recovery = { path = "recovery", default-features = false, optional = true } log = { version = "0.4.8", optional = true } @@ -26,7 +27,6 @@ sp-arithmetic = { version = "7.0.0", default-features = false } sp-core-hashing = { version = "5.0.0", default-features = false, optional = true } sp-std = { version = "4.0.0", default-features = false } static_assertions = "1.1.0" -hex-literal = "0.3.4" [dev-dependencies] criterion = "0.3.5" diff --git a/kate/src/com.rs b/kate/src/com.rs index 98c6563f..936434d0 100644 --- a/kate/src/com.rs +++ b/kate/src/com.rs @@ -309,8 +309,9 @@ pub fn par_extend_data_matrix( // simple length with mod check would work... let chunks = block.par_chunks_exact(block_dims.chunk_size as usize); - // TODO: Shouldn't assert, should error - assert!(chunks.remainder().is_empty()); + if chunks.remainder().is_empty() { + return Err(Error::DimensionsMismatch); + } let scalars = chunks .into_par_iter() @@ -372,11 +373,6 @@ pub fn build_proof( // generate proof only for requested cells let total_start = Instant::now(); - // TODO: better error type - //let ext_data_matrix_cm = ext_data_matrix - // .as_column_major(extended_dims.width(), extended_dims.height()) - // .ok_or(Error::DimensionsMismatch)?; - // attempt to parallelly compute proof for all requested cells cells .into_par_iter() From 01cf7c7f6faa83bad0ed87e54594b8f2bd267f6e Mon Sep 17 00:00:00 2001 From: William Arnold Date: Wed, 22 Mar 2023 23:27:27 -0700 Subject: [PATCH 27/87] [skip ci] Fix some no-std errors --- primitives/nomad/signature/Cargo.toml | 2 +- primitives/nomad/signature/src/signature.rs | 2 ++ primitives/types/src/data_lookup.rs | 1 + primitives/types/src/lib.rs | 3 +++ 4 files changed, 7 insertions(+), 1 deletion(-) diff --git a/primitives/nomad/signature/Cargo.toml b/primitives/nomad/signature/Cargo.toml index edeb8d37..4bc26073 100644 --- a/primitives/nomad/signature/Cargo.toml +++ b/primitives/nomad/signature/Cargo.toml @@ -37,7 +37,7 @@ hex = { version = "0.4.3", default-features = false } k256 = { version = "0.11.5", default-features = false, features = ["keccak256", "ecdsa"] } serde = { version = "1.0", default-features = false, optional = true, features = ["derive"] } thiserror-no-std = "2.0.2" -tiny-keccak = { version = "2.0.2", default-features = false } +tiny-keccak = { version = "2.0.2", default-features = false, features = ["keccak"] } [dev-dependencies] byte-slice-cast = "1.2.1" diff --git a/primitives/nomad/signature/src/signature.rs b/primitives/nomad/signature/src/signature.rs index 9969fac1..70844cbe 100644 --- a/primitives/nomad/signature/src/signature.rs +++ b/primitives/nomad/signature/src/signature.rs @@ -68,6 +68,7 @@ pub struct Signature { pub v: u64, } +#[cfg(feature = "std")] impl fmt::Display for Signature { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let sig = <[u8; 65]>::from(self); @@ -189,6 +190,7 @@ impl<'a> TryFrom<&'a [u8]> for Signature { } } +#[cfg(feature = "std")] impl FromStr for Signature { type Err = SignatureError; diff --git a/primitives/types/src/data_lookup.rs b/primitives/types/src/data_lookup.rs index 6fdce24c..7a19a5a6 100644 --- a/primitives/types/src/data_lookup.rs +++ b/primitives/types/src/data_lookup.rs @@ -1,4 +1,5 @@ use num_traits::Zero; +use alloc::vec::Vec; use parity_scale_codec::{Decode, Encode}; use scale_info::TypeInfo; #[cfg(feature = "std")] diff --git a/primitives/types/src/lib.rs b/primitives/types/src/lib.rs index f850d4dc..c162f052 100644 --- a/primitives/types/src/lib.rs +++ b/primitives/types/src/lib.rs @@ -1,3 +1,6 @@ +#![cfg_attr(not(feature = "std"), no_std)] +extern crate alloc; +use alloc::vec::Vec; use derive_more::{Add, Constructor, Deref, Display, From, Into, Mul}; use parity_scale_codec::{Decode, Encode, MaxEncodedLen}; use scale_info::TypeInfo; From c58c1bd50faca4eeb3cb0b40d1ff3b00578d27ba Mon Sep 17 00:00:00 2001 From: William Arnold Date: Sun, 26 Mar 2023 20:46:09 -0700 Subject: [PATCH 28/87] Start extracting tests into their own modules --- kate/src/com.rs | 2 +- kate/src/gridgen.rs | 84 +------------------------ kate/src/gridgen_tests/commitments.rs | 89 +++++++++++++++++++++++++++ kate/src/gridgen_tests/mod.rs | 38 ++++++++++++ kate/src/lib.rs | 3 + 5 files changed, 132 insertions(+), 84 deletions(-) create mode 100644 kate/src/gridgen_tests/commitments.rs create mode 100644 kate/src/gridgen_tests/mod.rs diff --git a/kate/src/com.rs b/kate/src/com.rs index 936434d0..e890675d 100644 --- a/kate/src/com.rs +++ b/kate/src/com.rs @@ -309,7 +309,7 @@ pub fn par_extend_data_matrix( // simple length with mod check would work... let chunks = block.par_chunks_exact(block_dims.chunk_size as usize); - if chunks.remainder().is_empty() { + if !chunks.remainder().is_empty() { return Err(Error::DimensionsMismatch); } diff --git a/kate/src/gridgen.rs b/kate/src/gridgen.rs index 8efc91a0..4ce54a04 100644 --- a/kate/src/gridgen.rs +++ b/kate/src/gridgen.rs @@ -515,57 +515,9 @@ mod consistency_tests { use rand::distributions::Uniform; use rand::prelude::Distribution; - fn pp() -> PublicParameters { + pub(crate) fn pp() -> PublicParameters { testnet::public_params(da_types::BlockLengthColumns(256)) } - - #[test] - fn newapi_test_build_commitments_simple_commitment_check() { - let original_data = br#"test"#; - let block_height = 256usize; - let block_width = 256usize; - let hash: Seed = [ - 76, 41, 174, 145, 187, 12, 97, 32, 75, 111, 149, 209, 243, 195, 165, 10, 166, 172, 47, - 41, 218, 24, 212, 66, 62, 5, 187, 191, 129, 5, 105, 3, - ]; - - let evals = EvaluationGrid::from_extrinsics( - vec![AppExtrinsic::from(original_data.to_vec())], - 4, - block_width, - block_height, - hash, - ) - .unwrap(); - let evals = evals.extend_columns(2).unwrap(); - let polys = evals.make_polynomial_grid().unwrap(); - let commits = polys - .commitments(pp().commit_key()) - .unwrap() - .into_iter() - .flat_map(|p| p.to_bytes()) - .collect::>(); - - assert_eq!(evals.dims, Dimensions::new_unchecked(4, 2)); - let expected_commitments = hex!("960F08F97D3A8BD21C3F5682366130132E18E375A587A1E5900937D7AA5F33C4E20A1C0ACAE664DCE1FD99EDC2693B8D960F08F97D3A8BD21C3F5682366130132E18E375A587A1E5900937D7AA5F33C4E20A1C0ACAE664DCE1FD99EDC2693B8D"); - assert_eq!(commits, expected_commitments); - } - - #[test] - fn newapi_par_build_commitments_row_wise_constant_row() { - // Due to scale encoding, first line is not constant. - // We will use second line to ensure constant row. - let hash = Seed::default(); - let xts = vec![AppExtrinsic { - app_id: AppId(0), - data: vec![0; 31 * 8], - }]; - - let evals = EvaluationGrid::from_extrinsics(xts, 4, 4, 4, hash).unwrap(); - let evals = evals.extend_columns(2).unwrap(); - let polys = evals.make_polynomial_grid().unwrap(); - polys.commitments(pp().commit_key()).unwrap(); - } #[test] fn newapi_test_flatten_block() { let extrinsics: Vec = vec![ @@ -696,8 +648,6 @@ mod consistency_tests { sampled } - // This copied method is still confusing to me... it just accumulates the size but skips over - // the app_id 0 size? not sure what's going on... fn app_data_index_from_lookup(lookup: &DataLookup) -> AppDataIndex { AppDataIndex { size: lookup.size, @@ -757,36 +707,4 @@ mod consistency_tests { } } } - - proptest! { - #![proptest_config(ProptestConfig::with_cases(1))] - #[test] - fn newapi_commitments_verify(ref exts in app_extrinsics_strategy()) { - //let (layout, commitments, dims, matrix) = par_build_commitments(BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default()).unwrap(); - let grid = EvaluationGrid::from_extrinsics(exts.clone(), 4, 16, 64, Seed::default()).unwrap().extend_columns(2).unwrap(); - let orig_dims = Dimensions::new(grid.dims.width_nz(), (grid.dims.height() / 2).try_into().unwrap()); - let polys = grid.make_polynomial_grid().unwrap(); - let commits = polys.commitments(pp().commit_key()) - .unwrap() - .iter() - .map(|c| c.to_bytes()) - .collect::>(); - - let index = app_data_index_from_lookup(&grid.lookup); - let public_params = testnet::public_params((grid.dims.width() as u32).into()); - - for xt in exts { - let rows = grid.app_rows(&xt.app_id, Some(&orig_dims)).unwrap(); - // Have to put the rows we find in this funky data structure - let mut app_rows = vec![None; grid.dims.height()]; - for (row_i, row) in rows { - app_rows[row_i] = Some(row.iter().flat_map(|s| s.to_bytes()).collect()); - } - // Need to provide the original dimensions here too - let extended_dims = kate_recovery::matrix::Dimensions::new(orig_dims.height() as u16, orig_dims.width() as u16).unwrap(); - let (_, missing) = kate_recovery::commitments::verify_equality(&public_params, &commits, &app_rows, &index, &extended_dims, xt.app_id.0).unwrap(); - prop_assert!(missing.is_empty()); - } - } - } } diff --git a/kate/src/gridgen_tests/commitments.rs b/kate/src/gridgen_tests/commitments.rs new file mode 100644 index 00000000..b17150ee --- /dev/null +++ b/kate/src/gridgen_tests/commitments.rs @@ -0,0 +1,89 @@ +use super::*; +use crate::Seed; +use crate::gridgen::*; +use crate::testnet; +use da_types::AppExtrinsic; +use da_types::AppId; +use dusk_bytes::Serializable; +use hex_literal::hex; +use kate_grid::Dimensions; + +#[test] +fn test_build_commitments_simple_commitment_check() { + let original_data = br#"test"#; + let block_height = 256usize; + let block_width = 256usize; + let hash: Seed = [ + 76, 41, 174, 145, 187, 12, 97, 32, 75, 111, 149, 209, 243, 195, 165, 10, 166, 172, 47, 41, + 218, 24, 212, 66, 62, 5, 187, 191, 129, 5, 105, 3, + ]; + + let evals = EvaluationGrid::from_extrinsics( + vec![AppExtrinsic::from(original_data.to_vec())], + 4, + block_width, + block_height, + hash, + ) + .unwrap(); + let evals = evals.extend_columns(2).unwrap(); + let polys = evals.make_polynomial_grid().unwrap(); + let commits = polys + .commitments(pp().commit_key()) + .unwrap() + .into_iter() + .flat_map(|p| p.to_bytes()) + .collect::>(); + + assert_eq!(evals.dims, Dimensions::new_unchecked(4, 2)); + let expected_commitments = hex!("960F08F97D3A8BD21C3F5682366130132E18E375A587A1E5900937D7AA5F33C4E20A1C0ACAE664DCE1FD99EDC2693B8D960F08F97D3A8BD21C3F5682366130132E18E375A587A1E5900937D7AA5F33C4E20A1C0ACAE664DCE1FD99EDC2693B8D"); + assert_eq!(commits, expected_commitments); +} + +#[test] +fn par_build_commitments_row_wise_constant_row() { + // Due to scale encoding, first line is not constant. + // We will use second line to ensure constant row. + let hash = Seed::default(); + let xts = vec![AppExtrinsic { + app_id: AppId(0), + data: vec![0; 31 * 8], + }]; + + let evals = EvaluationGrid::from_extrinsics(xts, 4, 4, 4, hash).unwrap(); + let evals = evals.extend_columns(2).unwrap(); + let polys = evals.make_polynomial_grid().unwrap(); + polys.commitments(pp().commit_key()).unwrap(); +} + +proptest! { + #![proptest_config(ProptestConfig::with_cases(20))] + #[test] + fn commitments_verify(ref exts in app_extrinsics_strategy()) { + //let (layout, commitments, dims, matrix) = par_build_commitments(BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default()).unwrap(); + let grid = EvaluationGrid::from_extrinsics(exts.clone(), 4, 16, 64, Seed::default()).unwrap().extend_columns(2).unwrap(); + let orig_dims = Dimensions::new(grid.dims.width_nz(), (grid.dims.height() / 2).try_into().unwrap()); + let polys = grid.make_polynomial_grid().unwrap(); + let commits = polys.commitments(pp().commit_key()) + .unwrap() + .iter() + .map(|c| c.to_bytes()) + .collect::>(); + + let index = app_data_index_from_lookup(&grid.lookup); + let public_params = testnet::public_params((grid.dims.width() as u32).into()); + + for xt in exts { + let rows = grid.app_rows(&xt.app_id, Some(&orig_dims)).unwrap(); + // Have to put the rows we find in this funky data structure + let mut app_rows = vec![None; grid.dims.height()]; + for (row_i, row) in rows { + app_rows[row_i] = Some(row.iter().flat_map(|s| s.to_bytes()).collect()); + } + // Need to provide the original dimensions here too + let extended_dims = kate_recovery::matrix::Dimensions::new(orig_dims.height() as u16, orig_dims.width() as u16).unwrap(); + let (_, missing) = kate_recovery::commitments::verify_equality(&public_params, &commits, &app_rows, &index, &extended_dims, xt.app_id.0).unwrap(); + prop_assert!(missing.is_empty()); + } + } +} diff --git a/kate/src/gridgen_tests/mod.rs b/kate/src/gridgen_tests/mod.rs new file mode 100644 index 00000000..bafa7c18 --- /dev/null +++ b/kate/src/gridgen_tests/mod.rs @@ -0,0 +1,38 @@ +use da_types::{AppExtrinsic, DataLookup}; +use dusk_plonk::prelude::PublicParameters; +use kate_recovery::index::AppDataIndex; +use proptest::{collection, prelude::*, sample::size_range}; + +use crate::testnet; + +mod commitments; + +pub(crate) fn pp() -> PublicParameters { + testnet::public_params(da_types::BlockLengthColumns(256)) +} + +fn app_extrinsic_strategy() -> impl Strategy { + ( + any::(), + any_with::>(size_range(1..2048).lift()), + ) + .prop_map(|(app_id, data)| AppExtrinsic { + app_id: app_id.into(), + data, + }) +} + +fn app_extrinsics_strategy() -> impl Strategy> { + collection::vec(app_extrinsic_strategy(), size_range(1..16)).prop_map(|xts| { + let mut new_xts = xts; + new_xts.sort_by(|a1, a2| a1.app_id.cmp(&a2.app_id)); + new_xts + }) +} + +fn app_data_index_from_lookup(lookup: &DataLookup) -> AppDataIndex { + AppDataIndex { + size: lookup.size, + index: lookup.index.iter().map(|e| (e.app_id.0, e.start)).collect(), + } +} diff --git a/kate/src/lib.rs b/kate/src/lib.rs index 7411be13..d966f0cd 100644 --- a/kate/src/lib.rs +++ b/kate/src/lib.rs @@ -164,6 +164,9 @@ pub mod com; #[cfg(feature = "std")] pub mod gridgen; +#[cfg(test)] +mod gridgen_tests; + /// Precalculate the length of padding IEC 9797 1. /// /// # NOTE From a7d0289bff4a8df1dc1d2b8970141d21b2dd76d6 Mon Sep 17 00:00:00 2001 From: William Arnold Date: Sun, 26 Mar 2023 21:14:11 -0700 Subject: [PATCH 29/87] Pull out more tests --- kate/src/gridgen.rs | 212 +---------------------- kate/src/gridgen_tests/formatting.rs | 107 ++++++++++++ kate/src/gridgen_tests/mod.rs | 2 + kate/src/gridgen_tests/reconstruction.rs | 79 +++++++++ 4 files changed, 189 insertions(+), 211 deletions(-) create mode 100644 kate/src/gridgen_tests/formatting.rs create mode 100644 kate/src/gridgen_tests/reconstruction.rs diff --git a/kate/src/gridgen.rs b/kate/src/gridgen.rs index 4ce54a04..22eaea99 100644 --- a/kate/src/gridgen.rs +++ b/kate/src/gridgen.rs @@ -376,7 +376,7 @@ fn round_up_to_multiple(input: usize, multiple: NonZeroUsize) -> usize { n_multiples.saturating_mul(multiple.get()) } -fn pad_to_bls_scalar(a: impl AsRef<[u8]>) -> Result { +pub(crate) fn pad_to_bls_scalar(a: impl AsRef<[u8]>) -> Result { if a.as_ref().len() > DATA_CHUNK_SIZE { todo!() } @@ -498,213 +498,3 @@ where { get_block_dims(size, 4, 256, 256).unwrap() } } - -#[cfg(test)] -mod consistency_tests { - use super::*; - use crate::testnet; - use dusk_plonk::prelude::PublicParameters; - use hex_literal::hex; - use kate_grid::Grid; - use kate_recovery::com::reconstruct_extrinsics; - use kate_recovery::data::Cell as DCell; - use kate_recovery::index::AppDataIndex; - use kate_recovery::matrix::Position as DPosition; - use proptest::prelude::*; - use proptest::{collection, sample::size_range, strategy::Strategy}; - use rand::distributions::Uniform; - use rand::prelude::Distribution; - - pub(crate) fn pp() -> PublicParameters { - testnet::public_params(da_types::BlockLengthColumns(256)) - } - #[test] - fn newapi_test_flatten_block() { - let extrinsics: Vec = vec![ - AppExtrinsic { - app_id: 0.into(), - data: (1..=29).collect(), - }, - AppExtrinsic { - app_id: 1.into(), - data: (1..=30).collect(), - }, - AppExtrinsic { - app_id: 2.into(), - data: (1..=31).collect(), - }, - AppExtrinsic { - app_id: 3.into(), - data: (1..=60).collect(), - }, - ]; - - let expected_dims = Dimensions::new_unchecked(16, 1); - let evals = - EvaluationGrid::from_extrinsics(extrinsics, 4, 256, 256, Seed::default()).unwrap(); - - let expected_index = [(0.into(), 0), (1.into(), 2), (2.into(), 4), (3.into(), 6)] - .into_iter() - .map(|(app_id, start)| DataLookupIndexItem { app_id, start }) - .collect::>(); - - let expected_lookup = DataLookup { - size: 9, - index: expected_index, - }; - - assert_eq!(evals.lookup, expected_lookup, "The layouts don't match"); - assert_eq!( - evals.dims, expected_dims, - "Dimensions don't match the expected" - ); - - let expected_data = hex!("04740102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d00800000000000000000000000000000000000000000000000000000000000000004780102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e80000000000000000000000000000000000000000000000000000000000000047c0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e1f80000000000000000000000000000000000000000000000000000000000004f00102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c00800000000000000000000000000000000000000000000000000000000000000076a04053bda0a88bda5177b86a15c3b29f559873cb481232299cd5743151ac004b2d63ae198e7bb0a9011f28e473c95f4013d7d53ec5fbc3b42df8ed101f6d00e831e52bfb76e51cca8b4e9016838657edfae09cb9a71eb219025c4c87a67c004aaa86f20ac0aa792bc121ee42e2c326127061eda15599cb5db3db870bea5a00ecf353161c3cb528b0c5d98050c4570bfc942d8b19ed7b0cbba5725e03e5f000b7e30db36b6df82ac151f668f5f80a5e2a9cac7c64991dd6a6ce21c060175800edb9260d2a86c836efc05f17e5c59525e404c6a93d051651fe2e4eefae281300"); - - let data = evals - .evals - .inner() - .iter() - .flat_map(|s| s.to_bytes()) - .collect::>(); - assert_eq!(data, expected_data, "Data doesn't match the expected data"); - } - - #[test] - fn newapi_test_extend_data_matrix() { - // This test expects this result in column major - let expected_result = vec![ - hex!("000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e00"), - hex!("bc1c6b8b4b02ca677b825ec9dace9aa706813f3ec47abdf9f03c680f4468555e"), - hex!("7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a00"), - hex!("c16115f73784be22106830c9bc6bbb469bf5026ee80325e403efe5ccc3f55016"), - hex!("1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d00"), - hex!("db3b8aaa6a21e9869aa17de8f9edb9c625a05e5de399dc18105c872e6387745e"), - hex!("9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b900"), - hex!("e080341657a3dd412f874fe8db8ada65ba14228d07234403230e05ece2147016"), - hex!("3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c00"), - hex!("fa5aa9c9894008a6b9c09c07190dd9e544bf7d7c02b9fb372f7ba64d82a6935e"), - hex!("babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d800"), - hex!("ff9f533576c2fc604ea66e07fba9f984d93341ac26426322422d240b02348f16"), - hex!("5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b00"), - hex!("197ac8e8a85f27c5d8dfbb26382cf80464de9c9b21d81a574e9ac56ca1c5b25e"), - hex!("d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f700"), - hex!("1ebf725495e11b806dc58d261ac918a4f85260cb45618241614c432a2153ae16"), - ] - .into_iter() - .map(|e| BlsScalar::from_bytes(e.as_slice().try_into().unwrap()).unwrap()) - .collect::>() - .into_column_major(4, 4) - .unwrap() - .to_row_major(); - - let block_dims = Dimensions::new_unchecked(4, 2); - let scalars = (0..=247) - .collect::>() - .chunks_exact(DATA_CHUNK_SIZE) - .flat_map(pad_to_bls_scalar) - .collect::>(); - - let grid = EvaluationGrid { - lookup: DataLookup::default(), - evals: scalars - .into_row_major(block_dims.width(), block_dims.height()) - .unwrap(), - dims: block_dims, - }; - let extend = grid.extend_columns(2).unwrap(); - - assert_eq!(extend.evals.inner(), expected_result.inner()); - } - - fn app_extrinsic_strategy() -> impl Strategy { - ( - any::(), - any_with::>(size_range(1..2048).lift()), - ) - .prop_map(|(app_id, data)| AppExtrinsic { - app_id: app_id.into(), - data, - }) - } - - fn app_extrinsics_strategy() -> impl Strategy> { - collection::vec(app_extrinsic_strategy(), size_range(1..16)).prop_map(|xts| { - let mut new_xts = xts; - new_xts.sort_by(|a1, a2| a1.app_id.cmp(&a2.app_id)); - new_xts - }) - } - - fn sample_unique(rng: &mut impl Rng, n_samples: usize, n: usize) -> Vec { - let mut sampled = vec![]; - let u = Uniform::from(0..n); - while sampled.len() < n_samples || sampled.len() < n { - let t = u.sample(rng); - if !sampled.contains(&t) { - sampled.push(t) - } - } - sampled - } - - fn app_data_index_from_lookup(lookup: &DataLookup) -> AppDataIndex { - AppDataIndex { - size: lookup.size, - index: lookup.index.iter().map(|e| (e.app_id.0, e.start)).collect(), - } - } - - proptest! { - #![proptest_config(ProptestConfig::with_cases(5))] - #[test] - fn newapi_test_build_and_reconstruct(exts in app_extrinsics_strategy()) { - let grid = EvaluationGrid::from_extrinsics(exts.clone(), 4, 256, 256, Seed::default()).unwrap().extend_columns(2).unwrap(); - let gref = &grid; - let dims = &grid.dims; - //let (layout, commitments, dims, matrix) = par_build_commitments( - // BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default()).unwrap(); - const RNG_SEED: Seed = [42u8; 32]; - let mut rng = ChaChaRng::from_seed(RNG_SEED); - let cells = (0..dims.width()) - .flat_map(move |x| { - sample_unique(&mut rng, dims.height()/2, dims.height()) - .into_iter() - .map(move |y| { - kate_recovery::data::DataCell { - position: kate_recovery::matrix::Position { row: y as u32, col: x as u16 }, - data: gref.evals.get(x, y).unwrap().to_bytes() - } - }).collect::>() - }).collect::>(); - let index = app_data_index_from_lookup(&grid.lookup); - let bdims = kate_recovery::matrix::Dimensions::new(dims.height() as u16, dims.width() as u16).unwrap(); - let reconstructed = reconstruct_extrinsics(&index, &bdims, cells).unwrap(); - for (result, xt) in reconstructed.iter().zip(exts) { - prop_assert_eq!(result.0, *xt.app_id); - prop_assert_eq!(result.1[0].as_slice(), &xt.data); - } - - let pp = pp(); - let polys = grid.make_polynomial_grid().unwrap(); - let commitments = polys.commitments(pp.commit_key()).unwrap(); - let indices = (0..dims.width()).flat_map(|x| (0..dims.height()).map(move |y| (x, y))).collect::>(); - - // Sample some number 10 of the indices, all is too slow for tests... - let mut rng = ChaChaRng::from_seed(RNG_SEED); - let sampled = Uniform::from(0..indices.len()).sample_iter(&mut rng).take(10).map(|i| indices[i]); - for (x, y) in sampled { - let cell = Cell { row: (y as u32).into(), col: (x as u32).into() }; - let proof = polys.proof(pp.commit_key(), &cell).unwrap(); - let mut content = [0u8; 80]; - content[..48].copy_from_slice(&proof.to_bytes()[..]); - content[48..].copy_from_slice(&grid.evals.get(x, y).unwrap().to_bytes()[..]); - - let dcell = DCell{position: DPosition { row: y as u32, col: x as u16 }, content }; - let verification = kate_recovery::proof::verify(&pp, &bdims, &commitments[y].to_bytes(), &dcell); - prop_assert!(verification.is_ok()); - prop_assert!(verification.unwrap()); - } - } - } -} diff --git a/kate/src/gridgen_tests/formatting.rs b/kate/src/gridgen_tests/formatting.rs new file mode 100644 index 00000000..fd776b1b --- /dev/null +++ b/kate/src/gridgen_tests/formatting.rs @@ -0,0 +1,107 @@ +use da_types::{AppExtrinsic, DataLookupIndexItem, DataLookup}; +use dusk_bytes::Serializable; +use dusk_plonk::prelude::BlsScalar; +use kate_grid::{Dimensions, Grid, IntoColumnMajor, IntoRowMajor}; +use hex_literal::hex; + +use crate::{gridgen::EvaluationGrid, Seed, config::DATA_CHUNK_SIZE}; + + + +#[test] +fn newapi_test_flatten_block() { + let extrinsics: Vec = vec![ + AppExtrinsic { + app_id: 0.into(), + data: (1..=29).collect(), + }, + AppExtrinsic { + app_id: 1.into(), + data: (1..=30).collect(), + }, + AppExtrinsic { + app_id: 2.into(), + data: (1..=31).collect(), + }, + AppExtrinsic { + app_id: 3.into(), + data: (1..=60).collect(), + }, + ]; + + let expected_dims = Dimensions::new_unchecked(16, 1); + let evals = EvaluationGrid::from_extrinsics(extrinsics, 4, 256, 256, Seed::default()).unwrap(); + + let expected_index = [(0.into(), 0), (1.into(), 2), (2.into(), 4), (3.into(), 6)] + .into_iter() + .map(|(app_id, start)| DataLookupIndexItem { app_id, start }) + .collect::>(); + + let expected_lookup = DataLookup { + size: 9, + index: expected_index, + }; + + assert_eq!(evals.lookup, expected_lookup, "The layouts don't match"); + assert_eq!( + evals.dims, expected_dims, + "Dimensions don't match the expected" + ); + + let expected_data = hex!("04740102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d00800000000000000000000000000000000000000000000000000000000000000004780102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e80000000000000000000000000000000000000000000000000000000000000047c0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e1f80000000000000000000000000000000000000000000000000000000000004f00102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c00800000000000000000000000000000000000000000000000000000000000000076a04053bda0a88bda5177b86a15c3b29f559873cb481232299cd5743151ac004b2d63ae198e7bb0a9011f28e473c95f4013d7d53ec5fbc3b42df8ed101f6d00e831e52bfb76e51cca8b4e9016838657edfae09cb9a71eb219025c4c87a67c004aaa86f20ac0aa792bc121ee42e2c326127061eda15599cb5db3db870bea5a00ecf353161c3cb528b0c5d98050c4570bfc942d8b19ed7b0cbba5725e03e5f000b7e30db36b6df82ac151f668f5f80a5e2a9cac7c64991dd6a6ce21c060175800edb9260d2a86c836efc05f17e5c59525e404c6a93d051651fe2e4eefae281300"); + + let data = evals + .evals + .inner() + .iter() + .flat_map(|s| s.to_bytes()) + .collect::>(); + assert_eq!(data, expected_data, "Data doesn't match the expected data"); +} + +#[test] +fn newapi_test_extend_data_matrix() { + // This test expects this result in column major + let expected_result = vec![ + hex!("000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e00"), + hex!("bc1c6b8b4b02ca677b825ec9dace9aa706813f3ec47abdf9f03c680f4468555e"), + hex!("7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a00"), + hex!("c16115f73784be22106830c9bc6bbb469bf5026ee80325e403efe5ccc3f55016"), + hex!("1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d00"), + hex!("db3b8aaa6a21e9869aa17de8f9edb9c625a05e5de399dc18105c872e6387745e"), + hex!("9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b900"), + hex!("e080341657a3dd412f874fe8db8ada65ba14228d07234403230e05ece2147016"), + hex!("3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c00"), + hex!("fa5aa9c9894008a6b9c09c07190dd9e544bf7d7c02b9fb372f7ba64d82a6935e"), + hex!("babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d800"), + hex!("ff9f533576c2fc604ea66e07fba9f984d93341ac26426322422d240b02348f16"), + hex!("5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b00"), + hex!("197ac8e8a85f27c5d8dfbb26382cf80464de9c9b21d81a574e9ac56ca1c5b25e"), + hex!("d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f700"), + hex!("1ebf725495e11b806dc58d261ac918a4f85260cb45618241614c432a2153ae16"), + ] + .into_iter() + .map(|e| BlsScalar::from_bytes(e.as_slice().try_into().unwrap()).unwrap()) + .collect::>() + .into_column_major(4, 4) + .unwrap() + .to_row_major(); + + let block_dims = Dimensions::new_unchecked(4, 2); + let scalars = (0..=247) + .collect::>() + .chunks_exact(DATA_CHUNK_SIZE) + .flat_map(crate::gridgen::pad_to_bls_scalar) + .collect::>(); + + let grid = EvaluationGrid { + lookup: DataLookup::default(), + evals: scalars + .into_row_major(block_dims.width(), block_dims.height()) + .unwrap(), + dims: block_dims, + }; + let extend = grid.extend_columns(2).unwrap(); + + assert_eq!(extend.evals.inner(), expected_result.inner()); +} diff --git a/kate/src/gridgen_tests/mod.rs b/kate/src/gridgen_tests/mod.rs index bafa7c18..32837a75 100644 --- a/kate/src/gridgen_tests/mod.rs +++ b/kate/src/gridgen_tests/mod.rs @@ -6,6 +6,8 @@ use proptest::{collection, prelude::*, sample::size_range}; use crate::testnet; mod commitments; +mod formatting; +mod reconstruction; pub(crate) fn pp() -> PublicParameters { testnet::public_params(da_types::BlockLengthColumns(256)) diff --git a/kate/src/gridgen_tests/reconstruction.rs b/kate/src/gridgen_tests/reconstruction.rs new file mode 100644 index 00000000..5a83db62 --- /dev/null +++ b/kate/src/gridgen_tests/reconstruction.rs @@ -0,0 +1,79 @@ +use crate::com::Cell; +use crate::gridgen::EvaluationGrid; +use crate::gridgen_tests::{app_data_index_from_lookup, pp}; +use crate::Seed; +use dusk_bytes::Serializable; +use kate_grid::Grid; +use kate_recovery::com::reconstruct_extrinsics; +use kate_recovery::data::Cell as DCell; +use kate_recovery::matrix::Position as DPosition; +use proptest::prelude::*; +use rand::distributions::Uniform; +use rand::prelude::Distribution; +use rand::SeedableRng; +use rand_chacha::ChaChaRng; + +fn sample_unique(rng: &mut impl Rng, n_samples: usize, n: usize) -> Vec { + let mut sampled = vec![]; + let u = Uniform::from(0..n); + while sampled.len() < n_samples || sampled.len() < n { + let t = u.sample(rng); + if !sampled.contains(&t) { + sampled.push(t) + } + } + sampled +} + +proptest! { +#![proptest_config(ProptestConfig::with_cases(5))] +#[test] +fn newapi_test_build_and_reconstruct(exts in super::app_extrinsics_strategy()) { + let grid = EvaluationGrid::from_extrinsics(exts.clone(), 4, 256, 256, Seed::default()).unwrap().extend_columns(2).unwrap(); + let gref = &grid; + let dims = &grid.dims; + //let (layout, commitments, dims, matrix) = par_build_commitments( + // BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default()).unwrap(); + const RNG_SEED: Seed = [42u8; 32]; + let mut rng = ChaChaRng::from_seed(RNG_SEED); + let cells = (0..dims.width()) + .flat_map(move |x| { + sample_unique(&mut rng, dims.height()/2, dims.height()) + .into_iter() + .map(move |y| { + kate_recovery::data::DataCell { + position: kate_recovery::matrix::Position { row: y as u32, col: x as u16 }, + data: gref.evals.get(x, y).unwrap().to_bytes() + } + }).collect::>() + }).collect::>(); + let index = app_data_index_from_lookup(&grid.lookup); + let bdims = kate_recovery::matrix::Dimensions::new(dims.height() as u16, dims.width() as u16).unwrap(); + let reconstructed = reconstruct_extrinsics(&index, &bdims, cells).unwrap(); + for (result, xt) in reconstructed.iter().zip(exts) { + prop_assert_eq!(result.0, *xt.app_id); + prop_assert_eq!(result.1[0].as_slice(), &xt.data); + } + + let pp = pp(); + let polys = grid.make_polynomial_grid().unwrap(); + let commitments = polys.commitments(pp.commit_key()).unwrap(); + let indices = (0..dims.width()).flat_map(|x| (0..dims.height()).map(move |y| (x, y))).collect::>(); + + // Sample some number 10 of the indices, all is too slow for tests... + let mut rng = ChaChaRng::from_seed(RNG_SEED); + let sampled = Uniform::from(0..indices.len()).sample_iter(&mut rng).take(10).map(|i| indices[i]); + for (x, y) in sampled { + let cell = Cell { row: (y as u32).into(), col: (x as u32).into() }; + let proof = polys.proof(pp.commit_key(), &cell).unwrap(); + let mut content = [0u8; 80]; + content[..48].copy_from_slice(&proof.to_bytes()[..]); + content[48..].copy_from_slice(&grid.evals.get(x, y).unwrap().to_bytes()[..]); + + let dcell = DCell{position: DPosition { row: y as u32, col: x as u16 }, content }; + let verification = kate_recovery::proof::verify(&pp, &bdims, &commitments[y].to_bytes(), &dcell); + prop_assert!(verification.is_ok()); + prop_assert!(verification.unwrap()); + } +} +} From 95a7bf01fc327700db800aa71d2b8ce763e17dd4 Mon Sep 17 00:00:00 2001 From: William Arnold Date: Sun, 26 Mar 2023 21:17:00 -0700 Subject: [PATCH 30/87] [skip ci] move into submodule --- kate/src/{gridgen.rs => gridgen/mod.rs} | 5 ++++- kate/src/{gridgen_tests => gridgen/tests}/commitments.rs | 0 kate/src/{gridgen_tests => gridgen/tests}/formatting.rs | 2 -- kate/src/{gridgen_tests => gridgen/tests}/mod.rs | 0 kate/src/{gridgen_tests => gridgen/tests}/reconstruction.rs | 2 +- kate/src/lib.rs | 3 --- 6 files changed, 5 insertions(+), 7 deletions(-) rename kate/src/{gridgen.rs => gridgen/mod.rs} (99%) rename kate/src/{gridgen_tests => gridgen/tests}/commitments.rs (100%) rename kate/src/{gridgen_tests => gridgen/tests}/formatting.rs (99%) rename kate/src/{gridgen_tests => gridgen/tests}/mod.rs (100%) rename kate/src/{gridgen_tests => gridgen/tests}/reconstruction.rs (98%) diff --git a/kate/src/gridgen.rs b/kate/src/gridgen/mod.rs similarity index 99% rename from kate/src/gridgen.rs rename to kate/src/gridgen/mod.rs index 22eaea99..403f4c4a 100644 --- a/kate/src/gridgen.rs +++ b/kate/src/gridgen/mod.rs @@ -20,6 +20,9 @@ use crate::{ Seed, }; +#[cfg(test)] +mod tests; + pub struct EvaluationGrid { pub lookup: DataLookup, pub evals: RowMajor, @@ -404,7 +407,7 @@ fn round_up_power_of_2(mut v: usize) -> usize { #[cfg(test)] #[allow(clippy::integer_arithmetic)] -mod tests { +mod unit_tests { use super::*; use proptest::{prop_assert_eq, proptest}; use test_case::test_case; diff --git a/kate/src/gridgen_tests/commitments.rs b/kate/src/gridgen/tests/commitments.rs similarity index 100% rename from kate/src/gridgen_tests/commitments.rs rename to kate/src/gridgen/tests/commitments.rs diff --git a/kate/src/gridgen_tests/formatting.rs b/kate/src/gridgen/tests/formatting.rs similarity index 99% rename from kate/src/gridgen_tests/formatting.rs rename to kate/src/gridgen/tests/formatting.rs index fd776b1b..e8a19d62 100644 --- a/kate/src/gridgen_tests/formatting.rs +++ b/kate/src/gridgen/tests/formatting.rs @@ -6,8 +6,6 @@ use hex_literal::hex; use crate::{gridgen::EvaluationGrid, Seed, config::DATA_CHUNK_SIZE}; - - #[test] fn newapi_test_flatten_block() { let extrinsics: Vec = vec![ diff --git a/kate/src/gridgen_tests/mod.rs b/kate/src/gridgen/tests/mod.rs similarity index 100% rename from kate/src/gridgen_tests/mod.rs rename to kate/src/gridgen/tests/mod.rs diff --git a/kate/src/gridgen_tests/reconstruction.rs b/kate/src/gridgen/tests/reconstruction.rs similarity index 98% rename from kate/src/gridgen_tests/reconstruction.rs rename to kate/src/gridgen/tests/reconstruction.rs index 5a83db62..46ff7d7d 100644 --- a/kate/src/gridgen_tests/reconstruction.rs +++ b/kate/src/gridgen/tests/reconstruction.rs @@ -1,6 +1,6 @@ use crate::com::Cell; use crate::gridgen::EvaluationGrid; -use crate::gridgen_tests::{app_data_index_from_lookup, pp}; +use super::{app_data_index_from_lookup, pp}; use crate::Seed; use dusk_bytes::Serializable; use kate_grid::Grid; diff --git a/kate/src/lib.rs b/kate/src/lib.rs index d966f0cd..7411be13 100644 --- a/kate/src/lib.rs +++ b/kate/src/lib.rs @@ -164,9 +164,6 @@ pub mod com; #[cfg(feature = "std")] pub mod gridgen; -#[cfg(test)] -mod gridgen_tests; - /// Precalculate the length of padding IEC 9797 1. /// /// # NOTE From 5b49e543d7ccdabca8c1337eb4090e44bc8e0932 Mon Sep 17 00:00:00 2001 From: William Arnold Date: Mon, 27 Mar 2023 01:41:26 -0700 Subject: [PATCH 31/87] [skip ci] port more tests, one failing --- kate/recovery/Cargo.toml | 2 +- kate/recovery/src/com.rs | 2 + kate/src/com.rs | 18 +++- kate/src/gridgen/mod.rs | 28 +++--- kate/src/gridgen/tests/commitments.rs | 93 ++++++++++++++++- kate/src/gridgen/tests/formatting.rs | 6 +- kate/src/gridgen/tests/reconstruction.rs | 121 ++++++++++++++++++++--- 7 files changed, 230 insertions(+), 40 deletions(-) diff --git a/kate/recovery/Cargo.toml b/kate/recovery/Cargo.toml index 344fdf0c..16d3602f 100644 --- a/kate/recovery/Cargo.toml +++ b/kate/recovery/Cargo.toml @@ -15,9 +15,9 @@ rand = "0.8.4" rand_chacha = "0.3" serde = { version = "1.0", features = ["derive"] } thiserror = "1.0.37" +hex = "0.4" [dev-dependencies] -hex = "0.4" once_cell = "1.9.0" rand = "0.8.4" rand_chacha = "0.3" diff --git a/kate/recovery/src/com.rs b/kate/recovery/src/com.rs index 1a42be9d..cb20ff3c 100644 --- a/kate/recovery/src/com.rs +++ b/kate/recovery/src/com.rs @@ -139,6 +139,7 @@ pub fn reconstruct_app_extrinsics( let data = reconstruct_available(dimensions, cells)?; let ranges = index.app_data_ranges(app_id); + dbg!(&hex::encode(&data), &ranges); Ok(unflatten_padded_data(ranges, data) .map_err(ReconstructionError::DataDecodingError)? .into_iter() @@ -305,6 +306,7 @@ pub fn unflatten_padded_data( data.pop(); } + dbg!(hex::encode(&data)); match data.pop() { None => Err("Cannot trim padding on empty data".to_string()), Some(config::PADDING_TAIL_VALUE) => Ok(data), diff --git a/kate/src/com.rs b/kate/src/com.rs index e890675d..925303af 100644 --- a/kate/src/com.rs +++ b/kate/src/com.rs @@ -50,12 +50,15 @@ impl Cell { #[derive(Debug)] pub enum Error { PlonkError(PlonkError), - CellLenghtExceeded, + DuskBytesError(dusk_bytes::Error), + MultiproofError(poly_multiproof::Error), + CellLengthExceeded, BadHeaderHash, BlockTooBig, InvalidChunkLength, DimensionsMismatch, ZeroDimension, + DomainSizeInalid, } impl From for Error { @@ -179,7 +182,7 @@ pub fn flatten_and_pad_block( == Some(0) ); let nz_chunk_size: NonZeroUsize = usize::try_from(block_dims.chunk_size) - .map_err(|_| Error::CellLenghtExceeded)? + .map_err(|_| Error::CellLengthExceeded)? .try_into() .map_err(|_| Error::ZeroDimension)?; @@ -273,7 +276,7 @@ pub fn to_bls_scalar(chunk: &[u8]) -> Result { // TODO: Better error type for BlsScalar case? let scalar_size_chunk = <[u8; SCALAR_SIZE]>::try_from(chunk).map_err(|_| Error::InvalidChunkLength)?; - BlsScalar::from_bytes(&scalar_size_chunk).map_err(|_| Error::CellLenghtExceeded) + BlsScalar::from_bytes(&scalar_size_chunk).map_err(|_| Error::CellLengthExceeded) } fn make_dims(bd: &BlockDimensions) -> Result { @@ -594,6 +597,7 @@ mod tests { #[test_case(8224, 256, 256 => BlockDimensions::new(2, 256, 32) ; "two rows")] #[test_case(2097152, 256, 256 => BlockDimensions::new(256, 256, 32) ; "max block size")] #[test_case(2097155, 256, 256 => panics "BlockTooBig" ; "too much data")] + // newapi done fn test_get_block_dimensions(size: u32, rows: R, cols: C) -> BlockDimensions where R: Into, @@ -602,6 +606,7 @@ mod tests { get_block_dimensions(size, rows.into(), cols.into(), 32).unwrap() } + // newapi done #[test] fn test_extend_data_matrix() { let expected_result = vec![ @@ -651,6 +656,7 @@ mod tests { #[test_case( 1..=32 => "0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20800000000000000000000000000000000000000000000000000000000000" ; "Chunk same size")] #[test_case( 1..=33 => "0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20218000000000000000000000000000000000000000000000000000000000" ; "Chunk 1 value longer")] #[test_case( 1..=34 => "0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212280000000000000000000000000000000000000000000000000000000" ; "Chunk 2 value longer")] + // newapi ignore fn test_padding>(block: I) -> String { let padded = pad_iec_9797_1(block.collect()) .iter() @@ -660,6 +666,7 @@ mod tests { hex::encode(padded) } + // newapi done #[test] fn test_flatten_block() { let chunk_size = 32; @@ -797,6 +804,7 @@ mod tests { proptest! { #![proptest_config(ProptestConfig::with_cases(20))] #[test] + // newapi done fn test_build_and_reconstruct(ref xts in app_extrinsics_strategy()) { let metrics = IgnoreMetrics {}; let (layout, commitments, dims, matrix) = par_build_commitments( @@ -834,6 +842,7 @@ mod tests { proptest! { #![proptest_config(ProptestConfig::with_cases(20))] #[test] + // newapi done fn test_commitments_verify(ref xts in app_extrinsics_strategy()) { let (layout, commitments, dims, matrix) = par_build_commitments(BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default(), &IgnoreMetrics{}).unwrap(); @@ -852,6 +861,7 @@ mod tests { proptest! { #![proptest_config(ProptestConfig::with_cases(20))] #[test] + // newapi done fn verify_commitmnets_missing_row(ref xts in app_extrinsics_strategy()) { let (layout, commitments, dims, matrix) = par_build_commitments(BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default(), &IgnoreMetrics{}).unwrap(); @@ -871,6 +881,7 @@ mod tests { #[test] // Test build_commitments() function with a predefined input + // newapi done fn test_build_commitments_simple_commitment_check() { let block_rows = BlockLengthRows(256); let block_cols = BlockLengthColumns(256); @@ -1206,6 +1217,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat assert_eq!(row.len(), len); let mut result_bytes: Vec = vec![0u8; config::COMMITMENT_SIZE]; + println!("Row: {:?}", row); commit(&prover_key, row_eval_domain, row.clone(), &mut result_bytes).unwrap(); println!("Commitment: {result_bytes:?}"); diff --git a/kate/src/gridgen/mod.rs b/kate/src/gridgen/mod.rs index 403f4c4a..04d385b6 100644 --- a/kate/src/gridgen/mod.rs +++ b/kate/src/gridgen/mod.rs @@ -5,7 +5,7 @@ use da_types::{AppExtrinsic, AppId, DataLookup, DataLookupIndexItem}; use dusk_bytes::Serializable; use dusk_plonk::{ commitment_scheme::kzg10::commitment::Commitment, - fft::{EvaluationDomain, Polynomial}, + fft::{EvaluationDomain, Evaluations, Polynomial}, prelude::{BlsScalar, CommitKey}, }; use kate_grid::{Dimensions, Extension, Grid, IntoColumnMajor, IntoRowMajor, RowMajor}; @@ -158,14 +158,13 @@ impl EvaluationGrid { let new_dims = self.dims.extend(Extension::height( extension_factor .try_into() - .map_err(|_| Error::CellLenghtExceeded)?, + .map_err(|_| Error::CellLengthExceeded)?, )); let domain = EvaluationDomain::new(self.dims.height())?; let domain_new = EvaluationDomain::new(new_dims.height())?; if domain_new.size() != new_dims.height() { - // TODO: throw a reasonable error - return Err(Error::CellLenghtExceeded); + return Err(Error::DomainSizeInalid); } let new_evals = self @@ -201,8 +200,8 @@ impl EvaluationGrid { inner: self .evals .rows() - .map(|(_, row)| Polynomial { - coeffs: domain.ifft(row), + .map(|(_, row)| { + Evaluations::from_vec_and_domain(row.to_vec(), domain).interpolate() }) .collect::>(), }) @@ -226,15 +225,14 @@ impl PolynomialGrid { pub fn commitment(&self, srs: &CommitKey, row: usize) -> Result { self.inner .get(row) - .ok_or(Error::CellLenghtExceeded) + .ok_or(Error::CellLengthExceeded) .and_then(|poly| srs.commit(poly).map_err(Error::PlonkError)) } pub fn proof(&self, srs: &CommitKey, cell: &Cell) -> Result { let x = cell.col.0 as usize; let y = cell.row.0 as usize; - // TODO: better error msg - let poly = self.inner.get(y).ok_or(Error::CellLenghtExceeded)?; + let poly = self.inner.get(y).ok_or(Error::CellLengthExceeded)?; let witness = srs.compute_single_witness(poly, &self.points[x]); Ok(srs.commit(&witness)?) } @@ -247,14 +245,13 @@ impl PolynomialGrid { target_dims: &Dimensions, ) -> Result { use poly_multiproof::traits::PolyMultiProofNoPrecomp; - // TODO: useful error let block = multiproof_block( cell.col.0 as usize, cell.row.0 as usize, &self.dims, target_dims, ) - .ok_or(Error::CellLenghtExceeded)?; + .ok_or(Error::CellLengthExceeded)?; let polys = self.inner[block.start_y..block.end_y] .iter() .map(|s| s.coeffs.iter().map(convert_bls).collect::>()) @@ -271,12 +268,12 @@ impl PolynomialGrid { .iter() .map(convert_bls) .collect::>(); - //let eval_slices = eval_grid.evals.rows().map(|(_, row)| &row[]).collect::>(); let mut ts = Transcript::new(b"avail-mp"); let proof = srs .open(&mut ts, &evals, &polys, points) - .expect("TODO: real error msg"); + .map_err(Error::MultiproofError)?; + Ok(Multiproof { proof, evals, @@ -381,12 +378,11 @@ fn round_up_to_multiple(input: usize, multiple: NonZeroUsize) -> usize { pub(crate) fn pad_to_bls_scalar(a: impl AsRef<[u8]>) -> Result { if a.as_ref().len() > DATA_CHUNK_SIZE { - todo!() + return Err(Error::InvalidChunkLength); } let mut buf = [0u8; BlsScalar::SIZE]; buf[0..a.as_ref().len()].copy_from_slice(a.as_ref()); - //TODO: better error type - BlsScalar::from_bytes(&buf).map_err(|_| Error::CellLenghtExceeded) + BlsScalar::from_bytes(&buf).map_err(Error::DuskBytesError) } // Round up. only valid for positive integers diff --git a/kate/src/gridgen/tests/commitments.rs b/kate/src/gridgen/tests/commitments.rs index b17150ee..515e8740 100644 --- a/kate/src/gridgen/tests/commitments.rs +++ b/kate/src/gridgen/tests/commitments.rs @@ -1,12 +1,16 @@ use super::*; -use crate::Seed; use crate::gridgen::*; use crate::testnet; +use crate::Seed; use da_types::AppExtrinsic; use da_types::AppId; +use da_types::BlockLengthColumns; +use da_types::BlockLengthRows; use dusk_bytes::Serializable; use hex_literal::hex; use kate_grid::Dimensions; +use kate_recovery::matrix::Position; +use test_case::test_case; #[test] fn test_build_commitments_simple_commitment_check() { @@ -86,4 +90,91 @@ proptest! { prop_assert!(missing.is_empty()); } } + + fn verify_commitments_missing_row(ref xts in app_extrinsics_strategy()) { + let grid = EvaluationGrid::from_extrinsics(xts.clone(), 4, 16, 64, Seed::default()).unwrap().extend_columns(2).unwrap(); + let orig_dims = Dimensions::new(grid.dims.width_nz(), (grid.dims.height() / 2).try_into().unwrap()); + let polys = grid.make_polynomial_grid().unwrap(); + let commits = polys.commitments(pp().commit_key()) + .unwrap() + .iter() + .map(|c| c.to_bytes()) + .collect::>(); + + let index = app_data_index_from_lookup(&grid.lookup); + let public_params = testnet::public_params((grid.dims.width() as u32).into()); + + //let (layout, commitments, dims, matrix) = par_build_commitments(BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default(), &IgnoreMetrics{}).unwrap(); + + //let index = app_data_index_try_from_layout(layout).unwrap(); + //let public_params = testnet::public_params(dims.cols.as_usize()); + //let extended_dims = dims.try_into().unwrap(); + //let commitments = commitments::from_slice(&commitments).unwrap(); + for xt in xts { + let rows = grid.app_rows(&xt.app_id, Some(&orig_dims)).unwrap(); + let mut row_elems = vec![None; grid.dims.height()]; + for (i, data) in &rows { + row_elems[*i] = Some(data.iter().flat_map(|s| s.to_bytes()).collect()); + } + let first_index = rows.iter().map(|(i, _)| *i).min().unwrap(); + row_elems.remove(first_index); + + let extended_dims = kate_recovery::matrix::Dimensions::new(orig_dims.height() as u16, orig_dims.width() as u16).unwrap(); + let (_, missing) = kate_recovery::commitments::verify_equality(&public_params, &commits, &row_elems,&index,&extended_dims,xt.app_id.0).unwrap(); + prop_assert!(!missing.is_empty()); + } + } +} + +#[test_case( ([1,1,1,1]).to_vec(); "All values are non-zero but same")] +#[test_case( ([0,0,0,0]).to_vec(); "All values are zero")] +#[test_case( ([0,5,2,1]).to_vec(); "All values are different")] +fn test_zero_deg_poly_commit(row_values: Vec) { + // There are two main cases that generate a zero degree polynomial. One is for data that is non-zero, but the same. + // The other is for all-zero data. They differ, as the former yields a polynomial with one coefficient, and latter generates zero coefficients. + let len = row_values.len(); + let public_params = pp(); + + let row = row_values + .iter() + .map(|val| pad_to_bls_scalar(&[*val]).unwrap()) + .collect::>(); + + //let ae = AppExtrinsic { 0.into(), vec![} + let ev = EvaluationGrid { + lookup: Default::default(), // Shouldn't need to care about this + dims: Dimensions::new_unchecked(row_values.len(), 1), + evals: row.into_row_major(row_values.len(), 1).unwrap(), + }; + + println!("Row: {:?}", ev.evals.inner()); + + let pg = ev.make_polynomial_grid().unwrap(); + println!("Poly: {:?}", pg.inner[0]); + let commitment = pg.commitment(pp().commit_key(), 0).unwrap().to_bytes(); + + for x in 0..len { + // Randomly chosen cell to prove, probably should test all of them + let cell = Cell { + col: BlockLengthColumns(x.try_into().unwrap()), + row: BlockLengthRows(0), + }; + + let proof = pg.proof(pp().commit_key(), &cell).unwrap(); + + let proof_bytes = proof.to_bytes(); + let cell_bytes = ev.evals.get(x, 0).unwrap().to_bytes(); + let content = [&proof_bytes[..], &cell_bytes[..]].concat(); + let dims = kate_recovery::matrix::Dimensions::new(1, 4).unwrap(); + let cell = kate_recovery::data::Cell { + position: Position { + row: 0, + col: x as u16, + }, + content: content.try_into().unwrap(), + }; + let verification = kate_recovery::proof::verify(&public_params, &dims, &commitment, &cell); + assert!(verification.is_ok()); + assert!(verification.unwrap()) + } } diff --git a/kate/src/gridgen/tests/formatting.rs b/kate/src/gridgen/tests/formatting.rs index e8a19d62..120042a0 100644 --- a/kate/src/gridgen/tests/formatting.rs +++ b/kate/src/gridgen/tests/formatting.rs @@ -1,10 +1,10 @@ -use da_types::{AppExtrinsic, DataLookupIndexItem, DataLookup}; +use da_types::{AppExtrinsic, DataLookup, DataLookupIndexItem}; use dusk_bytes::Serializable; use dusk_plonk::prelude::BlsScalar; -use kate_grid::{Dimensions, Grid, IntoColumnMajor, IntoRowMajor}; use hex_literal::hex; +use kate_grid::{Dimensions, Grid, IntoColumnMajor, IntoRowMajor}; -use crate::{gridgen::EvaluationGrid, Seed, config::DATA_CHUNK_SIZE}; +use crate::{config::DATA_CHUNK_SIZE, gridgen::EvaluationGrid, Seed}; #[test] fn newapi_test_flatten_block() { diff --git a/kate/src/gridgen/tests/reconstruction.rs b/kate/src/gridgen/tests/reconstruction.rs index 46ff7d7d..676fc5aa 100644 --- a/kate/src/gridgen/tests/reconstruction.rs +++ b/kate/src/gridgen/tests/reconstruction.rs @@ -1,11 +1,12 @@ +use super::{app_data_index_from_lookup, pp}; use crate::com::Cell; use crate::gridgen::EvaluationGrid; -use super::{app_data_index_from_lookup, pp}; use crate::Seed; +use da_types::AppExtrinsic; use dusk_bytes::Serializable; use kate_grid::Grid; use kate_recovery::com::reconstruct_extrinsics; -use kate_recovery::data::Cell as DCell; +use kate_recovery::data::{Cell as DCell, DataCell}; use kate_recovery::matrix::Position as DPosition; use proptest::prelude::*; use rand::distributions::Uniform; @@ -25,28 +26,70 @@ fn sample_unique(rng: &mut impl Rng, n_samples: usize, n: usize) -> Vec { sampled } +fn sample_cells(grid: &EvaluationGrid, columns: Option<&[usize]>) -> Vec { + let mut rng = ChaChaRng::from_seed([42u8; 32]); + let cols: Vec = match columns { + Some(cols) => cols.to_vec(), + None => (0..grid.dims.width()).into_iter().collect(), + }; + cols.iter() + .flat_map(|x| { + sample_unique(&mut rng, grid.dims.height() / 2, grid.dims.height()) + .into_iter() + .map(move |y| kate_recovery::data::DataCell { + position: kate_recovery::matrix::Position { + row: y as u32, + col: *x as u16, + }, + data: grid.evals.get(*x, y).unwrap().to_bytes(), + }) + }) + .collect::>() +} + +#[test] +fn test_multiple_extrinsics_for_same_app_id() { + let xt1 = vec![5, 5]; + let xt2 = vec![6, 6]; + let xts = [ + AppExtrinsic { + app_id: 1.into(), + data: xt1.clone(), + }, + AppExtrinsic { + app_id: 1.into(), + data: xt2.clone(), + }, + ]; + // The hash is used for seed for padding the block to next power of two value + let hash = Seed::default(); + let ev = EvaluationGrid::from_extrinsics(xts.into(), 4, 128, 2, hash) + .unwrap() + .extend_columns(2) + .unwrap(); + + let cells = sample_cells(&ev, None); + let index = app_data_index_from_lookup(&ev.lookup); + let bdims = + kate_recovery::matrix::Dimensions::new(ev.dims.height() as u16, ev.dims.width() as u16) + .unwrap(); + let res = reconstruct_extrinsics(&index, &bdims, cells).unwrap(); + + assert_eq!(res[0].1[0], xt1); + assert_eq!(res[0].1[1], xt2); +} + proptest! { #![proptest_config(ProptestConfig::with_cases(5))] #[test] -fn newapi_test_build_and_reconstruct(exts in super::app_extrinsics_strategy()) { +fn test_build_and_reconstruct(exts in super::app_extrinsics_strategy()) { let grid = EvaluationGrid::from_extrinsics(exts.clone(), 4, 256, 256, Seed::default()).unwrap().extend_columns(2).unwrap(); - let gref = &grid; let dims = &grid.dims; //let (layout, commitments, dims, matrix) = par_build_commitments( // BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default()).unwrap(); const RNG_SEED: Seed = [42u8; 32]; - let mut rng = ChaChaRng::from_seed(RNG_SEED); - let cells = (0..dims.width()) - .flat_map(move |x| { - sample_unique(&mut rng, dims.height()/2, dims.height()) - .into_iter() - .map(move |y| { - kate_recovery::data::DataCell { - position: kate_recovery::matrix::Position { row: y as u32, col: x as u16 }, - data: gref.evals.get(x, y).unwrap().to_bytes() - } - }).collect::>() - }).collect::>(); + + let cells = sample_cells(&grid, None); let index = app_data_index_from_lookup(&grid.lookup); let bdims = kate_recovery::matrix::Dimensions::new(dims.height() as u16, dims.width() as u16).unwrap(); let reconstructed = reconstruct_extrinsics(&index, &bdims, cells).unwrap(); @@ -77,3 +120,49 @@ fn newapi_test_build_and_reconstruct(exts in super::app_extrinsics_strategy()) } } } + +#[test] +fn test_reconstruct_app_extrinsics_with_app_id() { + let app_id_1_data = br#""This is mocked test data. It will be formatted as a matrix of BLS scalar cells and then individual columns +get erasure coded to ensure redundancy."#; + + let app_id_2_data = + br#""Let's see how this gets encoded and then reconstructed by sampling only some data."#; + + let xts = vec![ + AppExtrinsic { + app_id: 0.into(), + data: vec![0], + }, + AppExtrinsic { + app_id: 1.into(), + data: app_id_1_data.to_vec(), + }, + AppExtrinsic { + app_id: 2.into(), + data: app_id_2_data.to_vec(), + }, + ]; + + let grid = EvaluationGrid::from_extrinsics(xts.clone(), 4, 32, 4, Seed::default()) + .unwrap() + .extend_columns(2) + .unwrap(); + + dbg!(&grid.evals.inner()); + + let cols_1 = sample_cells(&grid, Some(&[0, 1, 2, 3])); + + let index = app_data_index_from_lookup(&grid.lookup); + + let bdims = + kate_recovery::matrix::Dimensions::new(grid.dims.height() as u16, grid.dims.width() as u16) + .unwrap(); + let res_1 = kate_recovery::com::reconstruct_app_extrinsics(&index, &bdims, cols_1, 1).unwrap(); + assert_eq!(res_1[0], app_id_1_data); + + let cols_2 = sample_cells(&grid, Some(&[0, 2, 3])); + + let res_2 = kate_recovery::com::reconstruct_app_extrinsics(&index, &bdims, cols_2, 2).unwrap(); + assert_eq!(res_2[0], app_id_2_data); +} From eaf7b2248ef49dff1eb9be8777fd70a57c003f94 Mon Sep 17 00:00:00 2001 From: William Arnold Date: Mon, 27 Mar 2023 22:04:30 -0700 Subject: [PATCH 32/87] [skip ci] add all remaining tests, still one failing --- kate/src/com.rs | 9 +++ kate/src/gridgen/tests/formatting.rs | 92 +++++++++++++++++++++++- kate/src/gridgen/tests/mod.rs | 42 ++++++++++- kate/src/gridgen/tests/reconstruction.rs | 36 +--------- 4 files changed, 143 insertions(+), 36 deletions(-) diff --git a/kate/src/com.rs b/kate/src/com.rs index 925303af..329d4864 100644 --- a/kate/src/com.rs +++ b/kate/src/com.rs @@ -915,6 +915,7 @@ mod tests { } #[test] + // newapi wip fn test_reconstruct_app_extrinsics_with_app_id() { let app_id_1_data = br#""This is mocked test data. It will be formatted as a matrix of BLS scalar cells and then individual columns get erasure coded to ensure redundancy."#; @@ -965,6 +966,7 @@ get erasure coded to ensure redundancy."#; } #[test] + // newapi done fn test_decode_app_extrinsics() { let app_id_1_data = br#""This is mocked test data. It will be formatted as a matrix of BLS scalar cells and then individual columns get erasure coded to ensure redundancy."#; @@ -1020,6 +1022,7 @@ get erasure coded to ensure redundancy."#; } #[test] + // newapi done fn test_extend_mock_data() { let orig_data = br#"This is mocked test data. It will be formatted as a matrix of BLS scalar cells and then individual columns get erasure coded to ensure redundancy. @@ -1053,6 +1056,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat } #[test] + // newapi done fn test_multiple_extrinsics_for_same_app_id() { let xt1 = vec![5, 5]; let xt2 = vec![6, 6]; @@ -1092,6 +1096,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat } #[test] + // newapi ignore fn test_extrinsics_grouping() { let xt1 = vec![5, 5]; let xt2 = vec![6, 6]; @@ -1152,6 +1157,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat } #[test] + // newapi ignore fn par_build_commitments_column_wise_constant_row() { // This test will fail once we switch to row-wise orientation. // We should move `should_panic` to next test, until constant line issue is fixed. @@ -1176,6 +1182,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat } #[test] + // newapi done fn par_build_commitments_row_wise_constant_row() { // Due to scale encoding, first line is not constant. // We will use second line to ensure constant row. @@ -1197,6 +1204,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat #[test_case( ([1,1,1,1]).to_vec(); "All values are non-zero but same")] #[test_case( ([0,0,0,0]).to_vec(); "All values are zero")] #[test_case( ([0,5,2,1]).to_vec(); "All values are different")] + // newapi done fn test_zero_deg_poly_commit(row_values: Vec) { // There are two main cases that generate a zero degree polynomial. One is for data that is non-zero, but the same. // The other is for all-zero data. They differ, as the former yields a polynomial with one coefficient, and latter generates zero coefficients. @@ -1263,6 +1271,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat #[test_case( r#"{ "row": 42, "col": 99 }"# => Cell::new(42.into(),99.into()) ; "Simple" )] #[test_case( r#"{ "row": 4294967295, "col": 99 }"# => Cell::new(4_294_967_295.into(),99.into()) ; "Max row" )] + // newapi ignore fn serde_block_length_types_untagged(data: &str) -> Cell { serde_json::from_str(data).unwrap() } diff --git a/kate/src/gridgen/tests/formatting.rs b/kate/src/gridgen/tests/formatting.rs index 120042a0..e85bd149 100644 --- a/kate/src/gridgen/tests/formatting.rs +++ b/kate/src/gridgen/tests/formatting.rs @@ -3,8 +3,19 @@ use dusk_bytes::Serializable; use dusk_plonk::prelude::BlsScalar; use hex_literal::hex; use kate_grid::{Dimensions, Grid, IntoColumnMajor, IntoRowMajor}; +use kate_recovery::{ + com::{app_specific_cells, decode_app_extrinsics, reconstruct_extrinsics}, + data::DataCell, +}; -use crate::{config::DATA_CHUNK_SIZE, gridgen::EvaluationGrid, Seed}; +use crate::{ + config::DATA_CHUNK_SIZE, + gridgen::{ + tests::{app_data_index_from_lookup, sample_cells}, + EvaluationGrid, + }, + Seed, +}; #[test] fn newapi_test_flatten_block() { @@ -103,3 +114,82 @@ fn newapi_test_extend_data_matrix() { assert_eq!(extend.evals.inner(), expected_result.inner()); } + +#[test] +fn test_decode_app_extrinsics() { + let app_id_1_data = br#""This is mocked test data. It will be formatted as a matrix of BLS scalar cells and then individual columns +get erasure coded to ensure redundancy."#; + + let app_id_2_data = + br#""Let's see how this gets encoded and then reconstructed by sampling only some data."#; + + let data = [vec![0], app_id_1_data.to_vec(), app_id_2_data.to_vec()]; + + let hash = Seed::default(); + let xts = (0..=2) + .zip(data) + .map(|(app_id, data)| AppExtrinsic { + app_id: app_id.into(), + data, + }) + .collect::>(); + + let grid = EvaluationGrid::from_extrinsics(xts.clone(), 4, 32, 4, hash) + .unwrap() + .extend_columns(2) + .unwrap(); + + let index = app_data_index_from_lookup(&grid.lookup); + let bdims = + kate_recovery::matrix::Dimensions::new(grid.dims.height() as u16, grid.dims.width() as u16) + .unwrap(); + for xt in &xts { + let positions = app_specific_cells(&index, &bdims, xt.app_id.0).unwrap(); + let cells = positions + .iter() + .map(|pos| DataCell { + position: pos.clone(), + data: grid + .evals + .get(pos.col as usize, pos.row as usize) + .unwrap() + .to_bytes(), + }) + .collect::>(); + let data = &decode_app_extrinsics(&index, &bdims, cells, xt.app_id.0).unwrap()[0]; + assert_eq!(data, &xt.data); + } + + assert!(matches!( + decode_app_extrinsics(&index, &bdims, vec![], 0), + Err(kate_recovery::com::ReconstructionError::MissingCell { .. }) + )); +} + +#[test] +fn test_extend_mock_data() { + let orig_data = br#"This is mocked test data. It will be formatted as a matrix of BLS scalar cells and then individual columns +get erasure coded to ensure redundancy. +Let's see how this gets encoded and then reconstructed by sampling only some data."#; + let exts = vec![AppExtrinsic::from(orig_data.to_vec())]; + + // The hash is used for seed for padding the block to next power of two value + let hash = Seed::default(); + let grid = EvaluationGrid::from_extrinsics(exts.clone(), 4, 128, 2, hash) + .unwrap() + .extend_columns(2) + .unwrap(); + + let cols = sample_cells(&grid, None); + let bdims = + kate_recovery::matrix::Dimensions::new(grid.dims.height() as u16, grid.dims.width() as u16) + .unwrap(); + + let index = app_data_index_from_lookup(&grid.lookup); + let res = reconstruct_extrinsics(&index, &bdims, cols).unwrap(); + let s = String::from_utf8_lossy(res[0].1[0].as_slice()); + + assert_eq!(res[0].1[0], orig_data); + + eprintln!("Decoded: {}", s); +} diff --git a/kate/src/gridgen/tests/mod.rs b/kate/src/gridgen/tests/mod.rs index 32837a75..de912394 100644 --- a/kate/src/gridgen/tests/mod.rs +++ b/kate/src/gridgen/tests/mod.rs @@ -1,10 +1,16 @@ use da_types::{AppExtrinsic, DataLookup}; +use dusk_bytes::Serializable; use dusk_plonk::prelude::PublicParameters; -use kate_recovery::index::AppDataIndex; +use kate_grid::Grid; +use kate_recovery::{index::AppDataIndex, data::DataCell}; use proptest::{collection, prelude::*, sample::size_range}; +use rand::{distributions::Uniform, prelude::Distribution, SeedableRng}; +use rand_chacha::ChaChaRng; use crate::testnet; +use super::EvaluationGrid; + mod commitments; mod formatting; mod reconstruction; @@ -38,3 +44,37 @@ fn app_data_index_from_lookup(lookup: &DataLookup) -> AppDataIndex { index: lookup.index.iter().map(|e| (e.app_id.0, e.start)).collect(), } } + +fn sample_unique(rng: &mut impl Rng, n_samples: usize, n: usize) -> Vec { + let mut sampled = vec![]; + let u = Uniform::from(0..n); + while sampled.len() < n_samples || sampled.len() < n { + let t = u.sample(rng); + if !sampled.contains(&t) { + sampled.push(t) + } + } + sampled +} + +fn sample_cells(grid: &EvaluationGrid, columns: Option<&[usize]>) -> Vec { + let mut rng = ChaChaRng::from_seed([42u8; 32]); + let cols: Vec = match columns { + Some(cols) => cols.to_vec(), + None => (0..grid.dims.width()).into_iter().collect(), + }; + cols.iter() + .flat_map(|x| { + sample_unique(&mut rng, grid.dims.height() / 2, grid.dims.height()) + .into_iter() + .map(move |y| kate_recovery::data::DataCell { + position: kate_recovery::matrix::Position { + row: y as u32, + col: *x as u16, + }, + data: grid.evals.get(*x, y).unwrap().to_bytes(), + }) + }) + .collect::>() +} + diff --git a/kate/src/gridgen/tests/reconstruction.rs b/kate/src/gridgen/tests/reconstruction.rs index 676fc5aa..2870c8ce 100644 --- a/kate/src/gridgen/tests/reconstruction.rs +++ b/kate/src/gridgen/tests/reconstruction.rs @@ -2,11 +2,12 @@ use super::{app_data_index_from_lookup, pp}; use crate::com::Cell; use crate::gridgen::EvaluationGrid; use crate::Seed; +use crate::gridgen::tests::sample_cells; use da_types::AppExtrinsic; use dusk_bytes::Serializable; use kate_grid::Grid; use kate_recovery::com::reconstruct_extrinsics; -use kate_recovery::data::{Cell as DCell, DataCell}; +use kate_recovery::data::{Cell as DCell}; use kate_recovery::matrix::Position as DPosition; use proptest::prelude::*; use rand::distributions::Uniform; @@ -14,39 +15,6 @@ use rand::prelude::Distribution; use rand::SeedableRng; use rand_chacha::ChaChaRng; -fn sample_unique(rng: &mut impl Rng, n_samples: usize, n: usize) -> Vec { - let mut sampled = vec![]; - let u = Uniform::from(0..n); - while sampled.len() < n_samples || sampled.len() < n { - let t = u.sample(rng); - if !sampled.contains(&t) { - sampled.push(t) - } - } - sampled -} - -fn sample_cells(grid: &EvaluationGrid, columns: Option<&[usize]>) -> Vec { - let mut rng = ChaChaRng::from_seed([42u8; 32]); - let cols: Vec = match columns { - Some(cols) => cols.to_vec(), - None => (0..grid.dims.width()).into_iter().collect(), - }; - cols.iter() - .flat_map(|x| { - sample_unique(&mut rng, grid.dims.height() / 2, grid.dims.height()) - .into_iter() - .map(move |y| kate_recovery::data::DataCell { - position: kate_recovery::matrix::Position { - row: y as u32, - col: *x as u16, - }, - data: grid.evals.get(*x, y).unwrap().to_bytes(), - }) - }) - .collect::>() -} - #[test] fn test_multiple_extrinsics_for_same_app_id() { let xt1 = vec![5, 5]; From d37e6eadc08ec54fe36c27ac0a3d89ca6a7e1346 Mon Sep 17 00:00:00 2001 From: William Arnold Date: Mon, 27 Mar 2023 22:30:44 -0700 Subject: [PATCH 33/87] Fix all tests, fully compatability with `com` --- kate/recovery/src/com.rs | 1 - kate/src/gridgen/tests/reconstruction.rs | 3 +-- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/kate/recovery/src/com.rs b/kate/recovery/src/com.rs index cb20ff3c..f2091b7b 100644 --- a/kate/recovery/src/com.rs +++ b/kate/recovery/src/com.rs @@ -139,7 +139,6 @@ pub fn reconstruct_app_extrinsics( let data = reconstruct_available(dimensions, cells)?; let ranges = index.app_data_ranges(app_id); - dbg!(&hex::encode(&data), &ranges); Ok(unflatten_padded_data(ranges, data) .map_err(ReconstructionError::DataDecodingError)? .into_iter() diff --git a/kate/src/gridgen/tests/reconstruction.rs b/kate/src/gridgen/tests/reconstruction.rs index 2870c8ce..f70d1d83 100644 --- a/kate/src/gridgen/tests/reconstruction.rs +++ b/kate/src/gridgen/tests/reconstruction.rs @@ -112,12 +112,11 @@ get erasure coded to ensure redundancy."#; }, ]; - let grid = EvaluationGrid::from_extrinsics(xts.clone(), 4, 32, 4, Seed::default()) + let grid = EvaluationGrid::from_extrinsics(xts.clone(), 4, 4, 32, Seed::default()) .unwrap() .extend_columns(2) .unwrap(); - dbg!(&grid.evals.inner()); let cols_1 = sample_cells(&grid, Some(&[0, 1, 2, 3])); From 56bf65b9846ab913c47e83c036880d7f1ed35c62 Mon Sep 17 00:00:00 2001 From: William Arnold Date: Mon, 27 Mar 2023 22:53:06 -0700 Subject: [PATCH 34/87] Formatting --- kate/recovery/Cargo.toml | 2 +- kate/recovery/src/com.rs | 1 - kate/src/com.rs | 48 ++++++++++++------------ kate/src/gridgen/tests/mod.rs | 3 +- kate/src/gridgen/tests/reconstruction.rs | 5 +-- primitives/types/src/data_lookup.rs | 2 +- 6 files changed, 29 insertions(+), 32 deletions(-) diff --git a/kate/recovery/Cargo.toml b/kate/recovery/Cargo.toml index 16d3602f..f6ed1914 100644 --- a/kate/recovery/Cargo.toml +++ b/kate/recovery/Cargo.toml @@ -9,13 +9,13 @@ codec = { package = "parity-scale-codec", version = "3", default-features = fals dusk-bytes = "0.1.6" dusk-plonk = { git = "https://github.com/maticnetwork/plonk", branch = "will/polynomial-visibility" } getrandom = { version = "0.2", features = ["js"] } +hex = "0.4" num = "0.4.0" once_cell = { version = "1.9.0", default-features = false } rand = "0.8.4" rand_chacha = "0.3" serde = { version = "1.0", features = ["derive"] } thiserror = "1.0.37" -hex = "0.4" [dev-dependencies] once_cell = "1.9.0" diff --git a/kate/recovery/src/com.rs b/kate/recovery/src/com.rs index f2091b7b..1a42be9d 100644 --- a/kate/recovery/src/com.rs +++ b/kate/recovery/src/com.rs @@ -305,7 +305,6 @@ pub fn unflatten_padded_data( data.pop(); } - dbg!(hex::encode(&data)); match data.pop() { None => Err("Cannot trim padding on empty data".to_string()), Some(config::PADDING_TAIL_VALUE) => Ok(data), diff --git a/kate/src/com.rs b/kate/src/com.rs index 329d4864..36e3c3e6 100644 --- a/kate/src/com.rs +++ b/kate/src/com.rs @@ -50,15 +50,15 @@ impl Cell { #[derive(Debug)] pub enum Error { PlonkError(PlonkError), - DuskBytesError(dusk_bytes::Error), - MultiproofError(poly_multiproof::Error), + DuskBytesError(dusk_bytes::Error), + MultiproofError(poly_multiproof::Error), CellLengthExceeded, BadHeaderHash, BlockTooBig, InvalidChunkLength, DimensionsMismatch, ZeroDimension, - DomainSizeInalid, + DomainSizeInalid, } impl From for Error { @@ -312,9 +312,9 @@ pub fn par_extend_data_matrix( // simple length with mod check would work... let chunks = block.par_chunks_exact(block_dims.chunk_size as usize); - if !chunks.remainder().is_empty() { - return Err(Error::DimensionsMismatch); - } + if !chunks.remainder().is_empty() { + return Err(Error::DimensionsMismatch); + } let scalars = chunks .into_par_iter() @@ -597,7 +597,7 @@ mod tests { #[test_case(8224, 256, 256 => BlockDimensions::new(2, 256, 32) ; "two rows")] #[test_case(2097152, 256, 256 => BlockDimensions::new(256, 256, 32) ; "max block size")] #[test_case(2097155, 256, 256 => panics "BlockTooBig" ; "too much data")] - // newapi done + // newapi done fn test_get_block_dimensions(size: u32, rows: R, cols: C) -> BlockDimensions where R: Into, @@ -606,7 +606,7 @@ mod tests { get_block_dimensions(size, rows.into(), cols.into(), 32).unwrap() } - // newapi done + // newapi done #[test] fn test_extend_data_matrix() { let expected_result = vec![ @@ -656,7 +656,7 @@ mod tests { #[test_case( 1..=32 => "0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20800000000000000000000000000000000000000000000000000000000000" ; "Chunk same size")] #[test_case( 1..=33 => "0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20218000000000000000000000000000000000000000000000000000000000" ; "Chunk 1 value longer")] #[test_case( 1..=34 => "0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20212280000000000000000000000000000000000000000000000000000000" ; "Chunk 2 value longer")] - // newapi ignore + // newapi ignore fn test_padding>(block: I) -> String { let padded = pad_iec_9797_1(block.collect()) .iter() @@ -666,7 +666,7 @@ mod tests { hex::encode(padded) } - // newapi done + // newapi done #[test] fn test_flatten_block() { let chunk_size = 32; @@ -804,7 +804,7 @@ mod tests { proptest! { #![proptest_config(ProptestConfig::with_cases(20))] #[test] - // newapi done + // newapi done fn test_build_and_reconstruct(ref xts in app_extrinsics_strategy()) { let metrics = IgnoreMetrics {}; let (layout, commitments, dims, matrix) = par_build_commitments( @@ -842,7 +842,7 @@ mod tests { proptest! { #![proptest_config(ProptestConfig::with_cases(20))] #[test] - // newapi done + // newapi done fn test_commitments_verify(ref xts in app_extrinsics_strategy()) { let (layout, commitments, dims, matrix) = par_build_commitments(BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default(), &IgnoreMetrics{}).unwrap(); @@ -861,7 +861,7 @@ mod tests { proptest! { #![proptest_config(ProptestConfig::with_cases(20))] #[test] - // newapi done + // newapi done fn verify_commitmnets_missing_row(ref xts in app_extrinsics_strategy()) { let (layout, commitments, dims, matrix) = par_build_commitments(BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default(), &IgnoreMetrics{}).unwrap(); @@ -881,7 +881,7 @@ mod tests { #[test] // Test build_commitments() function with a predefined input - // newapi done + // newapi done fn test_build_commitments_simple_commitment_check() { let block_rows = BlockLengthRows(256); let block_cols = BlockLengthColumns(256); @@ -915,7 +915,7 @@ mod tests { } #[test] - // newapi wip + // newapi wip fn test_reconstruct_app_extrinsics_with_app_id() { let app_id_1_data = br#""This is mocked test data. It will be formatted as a matrix of BLS scalar cells and then individual columns get erasure coded to ensure redundancy."#; @@ -966,7 +966,7 @@ get erasure coded to ensure redundancy."#; } #[test] - // newapi done + // newapi done fn test_decode_app_extrinsics() { let app_id_1_data = br#""This is mocked test data. It will be formatted as a matrix of BLS scalar cells and then individual columns get erasure coded to ensure redundancy."#; @@ -1022,7 +1022,7 @@ get erasure coded to ensure redundancy."#; } #[test] - // newapi done + // newapi done fn test_extend_mock_data() { let orig_data = br#"This is mocked test data. It will be formatted as a matrix of BLS scalar cells and then individual columns get erasure coded to ensure redundancy. @@ -1056,7 +1056,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat } #[test] - // newapi done + // newapi done fn test_multiple_extrinsics_for_same_app_id() { let xt1 = vec![5, 5]; let xt2 = vec![6, 6]; @@ -1096,7 +1096,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat } #[test] - // newapi ignore + // newapi ignore fn test_extrinsics_grouping() { let xt1 = vec![5, 5]; let xt2 = vec![6, 6]; @@ -1157,7 +1157,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat } #[test] - // newapi ignore + // newapi ignore fn par_build_commitments_column_wise_constant_row() { // This test will fail once we switch to row-wise orientation. // We should move `should_panic` to next test, until constant line issue is fixed. @@ -1182,7 +1182,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat } #[test] - // newapi done + // newapi done fn par_build_commitments_row_wise_constant_row() { // Due to scale encoding, first line is not constant. // We will use second line to ensure constant row. @@ -1204,7 +1204,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat #[test_case( ([1,1,1,1]).to_vec(); "All values are non-zero but same")] #[test_case( ([0,0,0,0]).to_vec(); "All values are zero")] #[test_case( ([0,5,2,1]).to_vec(); "All values are different")] - // newapi done + // newapi done fn test_zero_deg_poly_commit(row_values: Vec) { // There are two main cases that generate a zero degree polynomial. One is for data that is non-zero, but the same. // The other is for all-zero data. They differ, as the former yields a polynomial with one coefficient, and latter generates zero coefficients. @@ -1225,7 +1225,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat assert_eq!(row.len(), len); let mut result_bytes: Vec = vec![0u8; config::COMMITMENT_SIZE]; - println!("Row: {:?}", row); + println!("Row: {:?}", row); commit(&prover_key, row_eval_domain, row.clone(), &mut result_bytes).unwrap(); println!("Commitment: {result_bytes:?}"); @@ -1271,7 +1271,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat #[test_case( r#"{ "row": 42, "col": 99 }"# => Cell::new(42.into(),99.into()) ; "Simple" )] #[test_case( r#"{ "row": 4294967295, "col": 99 }"# => Cell::new(4_294_967_295.into(),99.into()) ; "Max row" )] - // newapi ignore + // newapi ignore fn serde_block_length_types_untagged(data: &str) -> Cell { serde_json::from_str(data).unwrap() } diff --git a/kate/src/gridgen/tests/mod.rs b/kate/src/gridgen/tests/mod.rs index de912394..7e3a616c 100644 --- a/kate/src/gridgen/tests/mod.rs +++ b/kate/src/gridgen/tests/mod.rs @@ -2,7 +2,7 @@ use da_types::{AppExtrinsic, DataLookup}; use dusk_bytes::Serializable; use dusk_plonk::prelude::PublicParameters; use kate_grid::Grid; -use kate_recovery::{index::AppDataIndex, data::DataCell}; +use kate_recovery::{data::DataCell, index::AppDataIndex}; use proptest::{collection, prelude::*, sample::size_range}; use rand::{distributions::Uniform, prelude::Distribution, SeedableRng}; use rand_chacha::ChaChaRng; @@ -77,4 +77,3 @@ fn sample_cells(grid: &EvaluationGrid, columns: Option<&[usize]>) -> Vec>() } - diff --git a/kate/src/gridgen/tests/reconstruction.rs b/kate/src/gridgen/tests/reconstruction.rs index f70d1d83..896463b2 100644 --- a/kate/src/gridgen/tests/reconstruction.rs +++ b/kate/src/gridgen/tests/reconstruction.rs @@ -1,13 +1,13 @@ use super::{app_data_index_from_lookup, pp}; use crate::com::Cell; +use crate::gridgen::tests::sample_cells; use crate::gridgen::EvaluationGrid; use crate::Seed; -use crate::gridgen::tests::sample_cells; use da_types::AppExtrinsic; use dusk_bytes::Serializable; use kate_grid::Grid; use kate_recovery::com::reconstruct_extrinsics; -use kate_recovery::data::{Cell as DCell}; +use kate_recovery::data::Cell as DCell; use kate_recovery::matrix::Position as DPosition; use proptest::prelude::*; use rand::distributions::Uniform; @@ -117,7 +117,6 @@ get erasure coded to ensure redundancy."#; .extend_columns(2) .unwrap(); - let cols_1 = sample_cells(&grid, Some(&[0, 1, 2, 3])); let index = app_data_index_from_lookup(&grid.lookup); diff --git a/primitives/types/src/data_lookup.rs b/primitives/types/src/data_lookup.rs index 7a19a5a6..6066194d 100644 --- a/primitives/types/src/data_lookup.rs +++ b/primitives/types/src/data_lookup.rs @@ -1,5 +1,5 @@ -use num_traits::Zero; use alloc::vec::Vec; +use num_traits::Zero; use parity_scale_codec::{Decode, Encode}; use scale_info::TypeInfo; #[cfg(feature = "std")] From e2cee367cba3a9ddfb7b56819abda29a197d82fb Mon Sep 17 00:00:00 2001 From: William Arnold Date: Mon, 27 Mar 2023 23:09:49 -0700 Subject: [PATCH 35/87] [skip ci] undo toolchain version --- rust-toolchain.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rust-toolchain.toml b/rust-toolchain.toml index e9920bd0..ab25d60e 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,5 +1,5 @@ [toolchain] -channel = "nightly-2023-03-17" +channel = "nightly-2022-11-15" components = ["rustfmt", "clippy", "llvm-tools-preview"] profile = "minimal" targets = ["wasm32-unknown-unknown"] From 91406002cadb290e004fad68766fadf51e099250 Mon Sep 17 00:00:00 2001 From: William Arnold Date: Wed, 12 Apr 2023 14:54:35 -0700 Subject: [PATCH 36/87] Initial working example --- kate/examples/multiproof_verification.rs | 134 +++++++++++++++++++++++ kate/src/gridgen/mod.rs | 18 ++- kate/src/lib.rs | 1 + 3 files changed, 148 insertions(+), 5 deletions(-) create mode 100644 kate/examples/multiproof_verification.rs diff --git a/kate/examples/multiproof_verification.rs b/kate/examples/multiproof_verification.rs new file mode 100644 index 00000000..cb6f4a00 --- /dev/null +++ b/kate/examples/multiproof_verification.rs @@ -0,0 +1,134 @@ +use da_types::{AppExtrinsic, AppId}; +use hex_literal::hex; +use kate::{Seed, Serializable}; +use poly_multiproof::{ + ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Compress}, + merlin::Transcript, + traits::PolyMultiProofNoPrecomp, +}; +use rand::thread_rng; + +fn main() { + let target_dims = kate::grid::Dimensions::new_unchecked(64, 16); + let pp = kate::testnet::public_params(256.into()); + let pmp = poly_multiproof::m1_blst::M1NoPrecomp::new(256, 256, &mut thread_rng()); + let (proof, evals, commitments, dims) = { + let exts = vec![ + AppExtrinsic { + app_id: AppId(0), + data: hex!("CAFEBABE").to_vec(), + }, + AppExtrinsic { + app_id: AppId(1), + data: hex!("DEADBEEF").to_vec(), + }, + AppExtrinsic { + app_id: AppId(2), + data: hex!("12345678").to_vec(), + }, + ]; + let seed = Seed::default(); + let grid = kate::gridgen::EvaluationGrid::from_extrinsics(exts, 4, 256, 256, seed) + .unwrap() + .extend_columns(2) + .unwrap(); + + // Setup, serializing as bytes + let polys = grid.make_polynomial_grid().unwrap(); + + let commitments = polys + .commitments(&pp.commit_key()) + .unwrap() + .iter() + .flat_map(|c| c.0.to_bytes()) + .collect::>(); + + let multiproof = polys + .multiproof( + &pmp, + &kate::com::Cell { + row: 0.into(), + col: 0.into(), + }, + &grid, + &target_dims, + ) + .unwrap(); + + for r in &multiproof.evals { + for e in r { + assert!(e.serialized_size(Compress::Yes) == 32) + } + } + + let mut proof_bytes = [0u8; 48]; + // TODO: better proof ser + multiproof + .proof + .0 + .serialize_compressed(&mut proof_bytes[..]) + .unwrap(); + + // TODO: better evals ser + let evals_bytes = multiproof + .evals + .iter() + .flat_map(|row| { + row.iter().flat_map(|e| { + let mut out = [0u8; 32]; + e.serialize_uncompressed(&mut out[..]).unwrap(); + out + }) + }) + .collect::>(); + (proof_bytes, evals_bytes, commitments, grid.dims) + }; + + let mp_block = kate::gridgen::multiproof_block(0, 0, &dims, &target_dims).unwrap(); + let commits = commitments + .chunks_exact(48) + .skip(mp_block.start_y) + .take(mp_block.end_y - mp_block.start_y) + .map(|c| { + let mut out = [0u8; 48]; + out.copy_from_slice(c); + kate::pmp::Commitment( + kate::pmp::m1_blst::G1Affine::deserialize_compressed(&out[..]).unwrap(), + ) + }) + .collect::>(); + + type Fr = kate::pmp::m1_blst::Fr; + use kate::pmp::ark_poly::EvaluationDomain; + let points = kate::pmp::ark_poly::GeneralEvaluationDomain::::new(dims.width()) + .unwrap() + .elements() + .skip(mp_block.start_x) + .take(mp_block.end_x - mp_block.start_x) + .collect::>(); + + let block_commits = &commits[mp_block.start_x..mp_block.end_x]; + let evals_flat = evals + .chunks_exact(32) + .map(|e| { + let mut out = [0u8; 32]; + out.copy_from_slice(e); + kate::pmp::m1_blst::Fr::deserialize_compressed(&out[..]).unwrap() + }) + .collect::>(); + let evals_grid = evals_flat + .chunks_exact(mp_block.end_x - mp_block.start_x) + .collect::>(); + + let proof_point = kate::pmp::m1_blst::G1Affine::deserialize_compressed(&proof[..]).unwrap(); + let proof = kate::pmp::m1_blst::Proof(proof_point); + + pmp.verify( + &mut Transcript::new(b"avail-mp"), + block_commits, + &points, + &evals_grid, + &proof, + ) + .unwrap(); +} diff --git a/kate/src/gridgen/mod.rs b/kate/src/gridgen/mod.rs index 04d385b6..256128a9 100644 --- a/kate/src/gridgen/mod.rs +++ b/kate/src/gridgen/mod.rs @@ -4,7 +4,6 @@ use codec::Encode; use da_types::{AppExtrinsic, AppId, DataLookup, DataLookupIndexItem}; use dusk_bytes::Serializable; use dusk_plonk::{ - commitment_scheme::kzg10::commitment::Commitment, fft::{EvaluationDomain, Evaluations, Polynomial}, prelude::{BlsScalar, CommitKey}, }; @@ -20,6 +19,8 @@ use crate::{ Seed, }; +pub use dusk_plonk::commitment_scheme::kzg10::commitment::Commitment; + #[cfg(test)] mod tests; @@ -274,6 +275,13 @@ impl PolynomialGrid { .open(&mut ts, &evals, &polys, points) .map_err(Error::MultiproofError)?; + for r in &evals { + for e in r { + use crate::pmp::ark_serialize::{CanonicalSerialize, Compress}; + assert!(e.serialized_size(Compress::Yes) == 32) + } + } + Ok(Multiproof { proof, evals, @@ -298,10 +306,10 @@ pub struct Multiproof { #[derive(Debug, Clone, PartialEq, Eq)] pub struct CellBlock { - start_x: usize, - start_y: usize, - end_x: usize, - end_y: usize, + pub start_x: usize, + pub start_y: usize, + pub end_x: usize, + pub end_y: usize, } /// Computes the `x, y`-th multiproof block of a grid of size `grid_dims`. diff --git a/kate/src/lib.rs b/kate/src/lib.rs index 7411be13..10bf52ef 100644 --- a/kate/src/lib.rs +++ b/kate/src/lib.rs @@ -15,6 +15,7 @@ pub const LOG_TARGET: &str = "kate"; pub type Seed = [u8; 32]; pub use kate_grid as grid; +pub use dusk_bytes::Serializable; pub use poly_multiproof as pmp; pub mod config { From 774496aefe6790637c8c7d480ae6512047569b55 Mon Sep 17 00:00:00 2001 From: William Arnold Date: Wed, 12 Apr 2023 16:04:05 -0700 Subject: [PATCH 37/87] Way cleaner pmp serialization --- Cargo.lock | 2 +- kate/Cargo.toml | 2 +- kate/examples/multiproof_verification.rs | 71 ++++++------------------ kate/src/gridgen/mod.rs | 28 +++++++--- 4 files changed, 40 insertions(+), 63 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0b87e61a..179767f6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2651,7 +2651,7 @@ dependencies = [ [[package]] name = "poly-multiproof" version = "0.0.1" -source = "git+https://github.com/aphoh/poly-multiproof?rev=3345c76b84a14accb90020974a8cab837fe598a0#3345c76b84a14accb90020974a8cab837fe598a0" +source = "git+https://github.com/aphoh/poly-multiproof?rev=48aafccb0e10278b16894034a0240a7561f1efa0#48aafccb0e10278b16894034a0240a7561f1efa0" dependencies = [ "ark-bls12-381", "ark-ec", diff --git a/kate/Cargo.toml b/kate/Cargo.toml index e5121064..4b9e8f94 100644 --- a/kate/Cargo.toml +++ b/kate/Cargo.toml @@ -18,7 +18,7 @@ kate-grid = { path = "grid" } kate-recovery = { path = "recovery", default-features = false, optional = true } log = { version = "0.4.8", optional = true } once_cell = { version = "1.8.0", optional = true } -poly-multiproof = { git = "https://github.com/aphoh/poly-multiproof", default-features = false, rev = "3345c76b84a14accb90020974a8cab837fe598a0" } +poly-multiproof = { git = "https://github.com/aphoh/poly-multiproof", default-features = false, rev = "48aafccb0e10278b16894034a0240a7561f1efa0" } rand = { version = "0.8.4", default-features = false, optional = true } rand_chacha = { version = "0.3", default-features = false, optional = true } rayon = { version = "1.5.2", optional = true } diff --git a/kate/examples/multiproof_verification.rs b/kate/examples/multiproof_verification.rs index cb6f4a00..a1db4202 100644 --- a/kate/examples/multiproof_verification.rs +++ b/kate/examples/multiproof_verification.rs @@ -1,30 +1,28 @@ use da_types::{AppExtrinsic, AppId}; use hex_literal::hex; +use kate::pmp::{merlin::Transcript, traits::PolyMultiProofNoPrecomp}; use kate::{Seed, Serializable}; -use poly_multiproof::{ - ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Compress}, - merlin::Transcript, - traits::PolyMultiProofNoPrecomp, -}; +use poly_multiproof::traits::AsBytes; use rand::thread_rng; fn main() { let target_dims = kate::grid::Dimensions::new_unchecked(64, 16); let pp = kate::testnet::public_params(256.into()); let pmp = poly_multiproof::m1_blst::M1NoPrecomp::new(256, 256, &mut thread_rng()); + let points = kate::gridgen::domain_points(256).unwrap(); let (proof, evals, commitments, dims) = { let exts = vec![ AppExtrinsic { app_id: AppId(0), - data: hex!("CAFEBABE").to_vec(), + data: hex!("CAFEBABE0000000000000000").to_vec(), }, AppExtrinsic { app_id: AppId(1), - data: hex!("DEADBEEF").to_vec(), + data: hex!("DEADBEEF1111111111111111111111111111111111").to_vec(), }, AppExtrinsic { app_id: AppId(2), - data: hex!("12345678").to_vec(), + data: hex!("1234567899999999999999999999999999999999").to_vec(), }, ]; let seed = Seed::default(); @@ -55,31 +53,11 @@ fn main() { ) .unwrap(); - for r in &multiproof.evals { - for e in r { - assert!(e.serialized_size(Compress::Yes) == 32) - } - } - - let mut proof_bytes = [0u8; 48]; - // TODO: better proof ser - multiproof - .proof - .0 - .serialize_compressed(&mut proof_bytes[..]) - .unwrap(); - - // TODO: better evals ser + let proof_bytes = multiproof.proof.to_bytes().unwrap(); let evals_bytes = multiproof .evals .iter() - .flat_map(|row| { - row.iter().flat_map(|e| { - let mut out = [0u8; 32]; - e.serialize_uncompressed(&mut out[..]).unwrap(); - out - }) - }) + .flat_map(|row| row.iter().flat_map(|e| e.to_bytes().unwrap())) .collect::>(); (proof_bytes, evals_bytes, commitments, grid.dims) }; @@ -89,39 +67,26 @@ fn main() { .chunks_exact(48) .skip(mp_block.start_y) .take(mp_block.end_y - mp_block.start_y) - .map(|c| { - let mut out = [0u8; 48]; - out.copy_from_slice(c); - kate::pmp::Commitment( - kate::pmp::m1_blst::G1Affine::deserialize_compressed(&out[..]).unwrap(), - ) - }) - .collect::>(); + .map(|c| kate::pmp::Commitment::from_bytes(c.try_into().unwrap())) + .collect::, _>>() + .unwrap(); - type Fr = kate::pmp::m1_blst::Fr; - use kate::pmp::ark_poly::EvaluationDomain; - let points = kate::pmp::ark_poly::GeneralEvaluationDomain::::new(dims.width()) - .unwrap() - .elements() - .skip(mp_block.start_x) - .take(mp_block.end_x - mp_block.start_x) + let points = points[mp_block.start_x..mp_block.end_x] + .iter() + .map(kate::gridgen::to_ark_scalar) .collect::>(); let block_commits = &commits[mp_block.start_x..mp_block.end_x]; let evals_flat = evals .chunks_exact(32) - .map(|e| { - let mut out = [0u8; 32]; - out.copy_from_slice(e); - kate::pmp::m1_blst::Fr::deserialize_compressed(&out[..]).unwrap() - }) - .collect::>(); + .map(|e| kate::gridgen::ArkScalar::from_bytes(e.try_into().unwrap())) + .collect::, _>>() + .unwrap(); let evals_grid = evals_flat .chunks_exact(mp_block.end_x - mp_block.start_x) .collect::>(); - let proof_point = kate::pmp::m1_blst::G1Affine::deserialize_compressed(&proof[..]).unwrap(); - let proof = kate::pmp::m1_blst::Proof(proof_point); + let proof = kate::pmp::m1_blst::Proof::from_bytes(&proof).unwrap(); pmp.verify( &mut Transcript::new(b"avail-mp"), diff --git a/kate/src/gridgen/mod.rs b/kate/src/gridgen/mod.rs index 256128a9..f9d42705 100644 --- a/kate/src/gridgen/mod.rs +++ b/kate/src/gridgen/mod.rs @@ -9,7 +9,7 @@ use dusk_plonk::{ }; use kate_grid::{Dimensions, Extension, Grid, IntoColumnMajor, IntoRowMajor, RowMajor}; use kate_recovery::config::PADDING_TAIL_VALUE; -use poly_multiproof::{m1_blst::M1NoPrecomp, merlin::Transcript}; +use poly_multiproof::{m1_blst::M1NoPrecomp, merlin::Transcript, traits::AsBytes}; use rand::{Rng, SeedableRng}; use rand_chacha::ChaChaRng; @@ -21,6 +21,9 @@ use crate::{ pub use dusk_plonk::commitment_scheme::kzg10::commitment::Commitment; +pub type ArkScalar = crate::pmp::m1_blst::Fr; +pub type MpCommitment = crate::pmp::Commitment; + #[cfg(test)] mod tests; @@ -275,13 +278,6 @@ impl PolynomialGrid { .open(&mut ts, &evals, &polys, points) .map_err(Error::MultiproofError)?; - for r in &evals { - for e in r { - use crate::pmp::ark_serialize::{CanonicalSerialize, Compress}; - assert!(e.serialized_size(Compress::Yes) == 32) - } - } - Ok(Multiproof { proof, evals, @@ -379,6 +375,22 @@ pub fn get_block_dims( } } +pub fn domain_points(n: usize) -> Result, Error> { + let domain = EvaluationDomain::new(n)?; + Ok(domain.elements().collect()) +} + +pub fn to_ark_scalar(s: &BlsScalar) -> ArkScalar { + ArkScalar { + 0: poly_multiproof::ark_ff::BigInt(s.0), + 1: PhantomData, + } +} + +pub fn to_mp_commitment(c: Commitment) -> MpCommitment { + MpCommitment::from_bytes(&c.to_bytes()).expect("commitment is valid") +} + fn round_up_to_multiple(input: usize, multiple: NonZeroUsize) -> usize { let n_multiples = input.saturating_add(multiple.get()).saturating_sub(1) / multiple; n_multiples.saturating_mul(multiple.get()) From 19c04e212caadaa553799c4ee896f6f0b67d646a Mon Sep 17 00:00:00 2001 From: William Arnold Date: Wed, 12 Apr 2023 16:16:39 -0700 Subject: [PATCH 38/87] Fix using std things in nostd --- kate/src/lib.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/kate/src/lib.rs b/kate/src/lib.rs index 10bf52ef..78168be0 100644 --- a/kate/src/lib.rs +++ b/kate/src/lib.rs @@ -14,8 +14,11 @@ use crate::config::DATA_CHUNK_SIZE; pub const LOG_TARGET: &str = "kate"; pub type Seed = [u8; 32]; +#[cfg(feature = "std")] pub use kate_grid as grid; +#[cfg(feature = "std")] pub use dusk_bytes::Serializable; +#[cfg(feature = "std")] pub use poly_multiproof as pmp; pub mod config { From 0cba11e45019550b4741ed259b1d37bd59d3312a Mon Sep 17 00:00:00 2001 From: William Arnold Date: Wed, 12 Apr 2023 21:53:56 -0700 Subject: [PATCH 39/87] Update multiproofs --- Cargo.lock | 2 +- kate/Cargo.toml | 2 +- kate/examples/multiproof_verification.rs | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 179767f6..4f0fb02f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2651,7 +2651,7 @@ dependencies = [ [[package]] name = "poly-multiproof" version = "0.0.1" -source = "git+https://github.com/aphoh/poly-multiproof?rev=48aafccb0e10278b16894034a0240a7561f1efa0#48aafccb0e10278b16894034a0240a7561f1efa0" +source = "git+https://github.com/aphoh/poly-multiproof?rev=f09b4d502e2239ccfc5a7a1658713393feca8674#f09b4d502e2239ccfc5a7a1658713393feca8674" dependencies = [ "ark-bls12-381", "ark-ec", diff --git a/kate/Cargo.toml b/kate/Cargo.toml index 4b9e8f94..83e26455 100644 --- a/kate/Cargo.toml +++ b/kate/Cargo.toml @@ -18,7 +18,7 @@ kate-grid = { path = "grid" } kate-recovery = { path = "recovery", default-features = false, optional = true } log = { version = "0.4.8", optional = true } once_cell = { version = "1.8.0", optional = true } -poly-multiproof = { git = "https://github.com/aphoh/poly-multiproof", default-features = false, rev = "48aafccb0e10278b16894034a0240a7561f1efa0" } +poly-multiproof = { git = "https://github.com/aphoh/poly-multiproof", default-features = false, rev = "f09b4d502e2239ccfc5a7a1658713393feca8674" } rand = { version = "0.8.4", default-features = false, optional = true } rand_chacha = { version = "0.3", default-features = false, optional = true } rayon = { version = "1.5.2", optional = true } diff --git a/kate/examples/multiproof_verification.rs b/kate/examples/multiproof_verification.rs index a1db4202..14b3fe9e 100644 --- a/kate/examples/multiproof_verification.rs +++ b/kate/examples/multiproof_verification.rs @@ -6,7 +6,7 @@ use poly_multiproof::traits::AsBytes; use rand::thread_rng; fn main() { - let target_dims = kate::grid::Dimensions::new_unchecked(64, 16); + let target_dims = kate::grid::Dimensions::new_unchecked(16, 64); let pp = kate::testnet::public_params(256.into()); let pmp = poly_multiproof::m1_blst::M1NoPrecomp::new(256, 256, &mut thread_rng()); let points = kate::gridgen::domain_points(256).unwrap(); @@ -14,7 +14,7 @@ fn main() { let exts = vec![ AppExtrinsic { app_id: AppId(0), - data: hex!("CAFEBABE0000000000000000").to_vec(), + data: hex!("CAFEBABE00000000000000000000000000000000000000").to_vec(), }, AppExtrinsic { app_id: AppId(1), From e06c2e7d9c64b048edc7948131a4b4d8259bcc73 Mon Sep 17 00:00:00 2001 From: William Arnold Date: Wed, 12 Apr 2023 21:55:05 -0700 Subject: [PATCH 40/87] Rustfmt --- kate/src/gridgen/mod.rs | 2 +- kate/src/lib.rs | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/kate/src/gridgen/mod.rs b/kate/src/gridgen/mod.rs index f9d42705..66c8f053 100644 --- a/kate/src/gridgen/mod.rs +++ b/kate/src/gridgen/mod.rs @@ -388,7 +388,7 @@ pub fn to_ark_scalar(s: &BlsScalar) -> ArkScalar { } pub fn to_mp_commitment(c: Commitment) -> MpCommitment { - MpCommitment::from_bytes(&c.to_bytes()).expect("commitment is valid") + MpCommitment::from_bytes(&c.to_bytes()).expect("commitment is valid") } fn round_up_to_multiple(input: usize, multiple: NonZeroUsize) -> usize { diff --git a/kate/src/lib.rs b/kate/src/lib.rs index 78168be0..34329b1e 100644 --- a/kate/src/lib.rs +++ b/kate/src/lib.rs @@ -14,11 +14,11 @@ use crate::config::DATA_CHUNK_SIZE; pub const LOG_TARGET: &str = "kate"; pub type Seed = [u8; 32]; -#[cfg(feature = "std")] -pub use kate_grid as grid; #[cfg(feature = "std")] pub use dusk_bytes::Serializable; #[cfg(feature = "std")] +pub use kate_grid as grid; +#[cfg(feature = "std")] pub use poly_multiproof as pmp; pub mod config { From ed0f2994506fd71550175722fe0e80f46d95c5e6 Mon Sep 17 00:00:00 2001 From: William Arnold Date: Wed, 12 Apr 2023 22:44:48 -0700 Subject: [PATCH 41/87] [skip ci] Update pmp --- kate/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kate/Cargo.toml b/kate/Cargo.toml index 83e26455..417262e9 100644 --- a/kate/Cargo.toml +++ b/kate/Cargo.toml @@ -18,7 +18,7 @@ kate-grid = { path = "grid" } kate-recovery = { path = "recovery", default-features = false, optional = true } log = { version = "0.4.8", optional = true } once_cell = { version = "1.8.0", optional = true } -poly-multiproof = { git = "https://github.com/aphoh/poly-multiproof", default-features = false, rev = "f09b4d502e2239ccfc5a7a1658713393feca8674" } +poly-multiproof = { git = "https://github.com/aphoh/poly-multiproof", default-features = false, rev = "99335b664253ba4eca35f52b812af8e691f2497e" } rand = { version = "0.8.4", default-features = false, optional = true } rand_chacha = { version = "0.3", default-features = false, optional = true } rayon = { version = "1.5.2", optional = true } From cb51fec5bb610ba8a5f0fe44426345072367acdd Mon Sep 17 00:00:00 2001 From: William Arnold Date: Wed, 26 Apr 2023 23:30:15 -0700 Subject: [PATCH 42/87] Add parallel method for poly grid construction --- Cargo.lock | 5 ++++- kate/Cargo.toml | 1 + kate/grid/Cargo.toml | 6 ++++++ kate/grid/src/grid.rs | 10 +++++++++- kate/src/gridgen/mod.rs | 21 ++++++++++++++++++--- 5 files changed, 38 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4f0fb02f..9373d3fc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1986,6 +1986,9 @@ dependencies = [ [[package]] name = "kate-grid" version = "0.6.1" +dependencies = [ + "rayon", +] [[package]] name = "kate-recovery" @@ -2651,7 +2654,7 @@ dependencies = [ [[package]] name = "poly-multiproof" version = "0.0.1" -source = "git+https://github.com/aphoh/poly-multiproof?rev=f09b4d502e2239ccfc5a7a1658713393feca8674#f09b4d502e2239ccfc5a7a1658713393feca8674" +source = "git+https://github.com/aphoh/poly-multiproof?rev=99335b664253ba4eca35f52b812af8e691f2497e#99335b664253ba4eca35f52b812af8e691f2497e" dependencies = [ "ark-bls12-381", "ark-ec", diff --git a/kate/Cargo.toml b/kate/Cargo.toml index 417262e9..169158d5 100644 --- a/kate/Cargo.toml +++ b/kate/Cargo.toml @@ -38,6 +38,7 @@ test-case = "1.2.3" [features] default = ["std"] alloc = ["dusk-plonk/alloc"] +parallel = ["std", "rayon", "kate-grid/parallel"] std = [ "kate-recovery/std", diff --git a/kate/grid/Cargo.toml b/kate/grid/Cargo.toml index 33fba998..edf33729 100644 --- a/kate/grid/Cargo.toml +++ b/kate/grid/Cargo.toml @@ -3,3 +3,9 @@ name = "kate-grid" version = "0.6.1" authors = ["William Arnold warnold@polygon.technology"] edition = "2021" + +[dependencies] +rayon = {version = "1.5.2", optional = true} + +[features] +parallel = ["rayon"] diff --git a/kate/grid/src/grid.rs b/kate/grid/src/grid.rs index 6dc23316..30ce77d5 100644 --- a/kate/grid/src/grid.rs +++ b/kate/grid/src/grid.rs @@ -88,7 +88,10 @@ impl Grid for ColumnMajor { } } -impl RowMajor { +#[cfg(feature = "parallel")] +use rayon::prelude::*; + +impl RowMajor { pub fn row(&self, y: usize) -> Option<&[A]> { if y >= self.height() { return None; @@ -109,6 +112,11 @@ impl RowMajor { (0..self.height()).map(|y| (y, self.row(y).expect("Bounds already checked"))) } + #[cfg(feature = "parallel")] + pub fn rows_par_iter(&self) -> impl ParallelIterator + '_ { + (0..self.height()).into_par_iter().map(|y| (y, self.row(y).expect("Bounds already checked"))) + } + // TODO: this return type is kinda gross, should it just iterate over vecs? pub fn columns(&self) -> impl Iterator)> + '_ { (0..self.width()).map(|x| (x, self.iter_col(x).expect("Bounds already checked"))) diff --git a/kate/src/gridgen/mod.rs b/kate/src/gridgen/mod.rs index 66c8f053..54884465 100644 --- a/kate/src/gridgen/mod.rs +++ b/kate/src/gridgen/mod.rs @@ -198,12 +198,27 @@ impl EvaluationGrid { pub fn make_polynomial_grid(&self) -> Result { let domain = EvaluationDomain::new(self.dims.width())?; + let rows = self.evals.rows(); Ok(PolynomialGrid { dims: self.dims.clone(), points: domain.elements().collect(), - inner: self - .evals - .rows() + inner: rows + .map(|(_, row)| { + Evaluations::from_vec_and_domain(row.to_vec(), domain).interpolate() + }) + .collect::>(), + }) + } + + #[cfg(feature = "parallel")] + pub fn make_polynomial_grid_par(&self) -> Result { + use rayon::prelude::*; + let domain = EvaluationDomain::new(self.dims.width())?; + let rows = self.evals.rows_par_iter(); + Ok(PolynomialGrid { + dims: self.dims.clone(), + points: domain.elements().collect(), + inner: rows .map(|(_, row)| { Evaluations::from_vec_and_domain(row.to_vec(), domain).interpolate() }) From 50f98be1458a2ecd6c89b6922224606fa0231a9e Mon Sep 17 00:00:00 2001 From: William Arnold Date: Thu, 27 Apr 2023 01:58:29 -0700 Subject: [PATCH 43/87] clean up parallel api for extension too --- kate/grid/Cargo.toml | 2 +- kate/grid/src/grid.rs | 10 ++++++---- kate/src/gridgen/mod.rs | 44 +++++++++++++++++++---------------------- 3 files changed, 27 insertions(+), 29 deletions(-) diff --git a/kate/grid/Cargo.toml b/kate/grid/Cargo.toml index edf33729..82cf3469 100644 --- a/kate/grid/Cargo.toml +++ b/kate/grid/Cargo.toml @@ -5,7 +5,7 @@ authors = ["William Arnold warnold@polygon.technology"] edition = "2021" [dependencies] -rayon = {version = "1.5.2", optional = true} +rayon = { version = "1.5.2", optional = true } [features] parallel = ["rayon"] diff --git a/kate/grid/src/grid.rs b/kate/grid/src/grid.rs index 30ce77d5..e483d1a0 100644 --- a/kate/grid/src/grid.rs +++ b/kate/grid/src/grid.rs @@ -112,10 +112,12 @@ impl RowMajor { (0..self.height()).map(|y| (y, self.row(y).expect("Bounds already checked"))) } - #[cfg(feature = "parallel")] - pub fn rows_par_iter(&self) -> impl ParallelIterator + '_ { - (0..self.height()).into_par_iter().map(|y| (y, self.row(y).expect("Bounds already checked"))) - } + #[cfg(feature = "parallel")] + pub fn rows_par_iter(&self) -> impl ParallelIterator + '_ { + (0..self.height()) + .into_par_iter() + .map(|y| (y, self.row(y).expect("Bounds already checked"))) + } // TODO: this return type is kinda gross, should it just iterate over vecs? pub fn columns(&self) -> impl Iterator)> + '_ { diff --git a/kate/src/gridgen/mod.rs b/kate/src/gridgen/mod.rs index 54884465..75b1431d 100644 --- a/kate/src/gridgen/mod.rs +++ b/kate/src/gridgen/mod.rs @@ -19,6 +19,9 @@ use crate::{ Seed, }; +#[cfg(feature = "parallel")] +use rayon::prelude::*; + pub use dusk_plonk::commitment_scheme::kzg10::commitment::Commitment; pub type ArkScalar = crate::pmp::m1_blst::Fr; @@ -171,18 +174,24 @@ impl EvaluationGrid { return Err(Error::DomainSizeInalid); } - let new_evals = self + let cols = self .evals .columns() - .flat_map(|(_x, col)| { - // put elts into a new column - let mut ext_col = Vec::with_capacity(domain_new.size()); - col.for_each(|s| ext_col.push(*s)); + .map(|(_i, col)| col.map(|s| *s).collect::>()) + .collect::>(); + + #[cfg(not(feature = "parallel"))] + let col_iter = cols.into_iter(); + #[cfg(feature = "parallel")] + let col_iter = cols.into_par_iter(); + + let new_evals = col_iter + .flat_map(|mut col| { // ifft, resize, fft - domain.ifft_slice(&mut ext_col); - ext_col.resize(domain_new.size(), BlsScalar::zero()); - domain_new.fft_slice(&mut ext_col); - ext_col + domain.ifft_slice(col.as_mut_slice()); + col.resize(domain_new.size(), BlsScalar::zero()); + domain_new.fft_slice(&mut col); + col }) .collect::>() .into_column_major(new_dims.width(), new_dims.height()) @@ -198,22 +207,9 @@ impl EvaluationGrid { pub fn make_polynomial_grid(&self) -> Result { let domain = EvaluationDomain::new(self.dims.width())?; + #[cfg(not(feature = "parallel"))] let rows = self.evals.rows(); - Ok(PolynomialGrid { - dims: self.dims.clone(), - points: domain.elements().collect(), - inner: rows - .map(|(_, row)| { - Evaluations::from_vec_and_domain(row.to_vec(), domain).interpolate() - }) - .collect::>(), - }) - } - - #[cfg(feature = "parallel")] - pub fn make_polynomial_grid_par(&self) -> Result { - use rayon::prelude::*; - let domain = EvaluationDomain::new(self.dims.width())?; + #[cfg(feature = "parallel")] let rows = self.evals.rows_par_iter(); Ok(PolynomialGrid { dims: self.dims.clone(), From f23cd90ac248ca17c2cf2be520ff36b5367e9a3f Mon Sep 17 00:00:00 2001 From: William Arnold Date: Thu, 27 Apr 2023 02:44:15 -0700 Subject: [PATCH 44/87] Add faster commitment gen call --- kate/src/gridgen/mod.rs | 68 +++++++++++++++++++++++---- kate/src/gridgen/tests/commitments.rs | 16 +++++-- 2 files changed, 73 insertions(+), 11 deletions(-) diff --git a/kate/src/gridgen/mod.rs b/kate/src/gridgen/mod.rs index 75b1431d..84bf17af 100644 --- a/kate/src/gridgen/mod.rs +++ b/kate/src/gridgen/mod.rs @@ -9,7 +9,11 @@ use dusk_plonk::{ }; use kate_grid::{Dimensions, Extension, Grid, IntoColumnMajor, IntoRowMajor, RowMajor}; use kate_recovery::config::PADDING_TAIL_VALUE; -use poly_multiproof::{m1_blst::M1NoPrecomp, merlin::Transcript, traits::AsBytes}; +use poly_multiproof::{ + m1_blst::{Bls12_381, M1NoPrecomp}, + merlin::Transcript, + traits::{AsBytes, Committer}, +}; use rand::{Rng, SeedableRng}; use rand_chacha::ChaChaRng; @@ -229,14 +233,54 @@ pub struct PolynomialGrid { dims: Dimensions, } +macro_rules! cfg_iter { + ($e: expr) => {{ + #[cfg(feature = "parallel")] + let result = $e.par_iter(); + + #[cfg(not(feature = "parallel"))] + let result = $e.iter(); + + result + }}; +} + impl PolynomialGrid { pub fn commitments(&self, srs: &CommitKey) -> Result, Error> { - self.inner - .iter() + cfg_iter!(self.inner) .map(|poly| srs.commit(poly).map_err(Error::PlonkError)) .collect() } + /// Computes the commitments of the grid for the given extension by committing, then fft-ing + /// the commitments. + // TODO: fix this all up without the gross conversions after moving to arkworks + pub fn extended_commitments( + &self, + srs: &(impl Committer + Sync), + extension_factor: usize, + ) -> Result, Error> { + use poly_multiproof::ark_poly::{EvaluationDomain, GeneralEvaluationDomain}; + use poly_multiproof::m1_blst::{Fr, G1}; + let mut res = cfg_iter!(self.inner) + .map(|poly| poly.coeffs.iter().map(convert_scalar).collect::>()) + .map(|coeffs| { + srs.commit(&coeffs) + .map_err(Error::MultiproofError) + .map(|a| a.0.into()) + }) + .collect::, _>>()?; + let domain_n = GeneralEvaluationDomain::::new(res.len()).unwrap(); + let domain_ext = + GeneralEvaluationDomain::::new(res.len().saturating_mul(extension_factor)).unwrap(); + domain_n.ifft_in_place(&mut res); + domain_ext.fft_in_place(&mut res); + Ok(res + .into_iter() + .map(|a| Commitment(convert_g1(a.into()))) + .collect()) + } + pub fn commitment(&self, srs: &CommitKey, row: usize) -> Result { self.inner .get(row) @@ -269,19 +313,19 @@ impl PolynomialGrid { .ok_or(Error::CellLengthExceeded)?; let polys = self.inner[block.start_y..block.end_y] .iter() - .map(|s| s.coeffs.iter().map(convert_bls).collect::>()) + .map(|s| s.coeffs.iter().map(convert_scalar).collect::>()) .collect::>(); let evals = (block.start_y..block.end_y) .map(|y| { eval_grid.evals.row(y).expect("Already bounds checked")[block.start_x..block.end_x] .iter() - .map(convert_bls) + .map(convert_scalar) .collect::>() }) .collect::>(); let points = &self.points[block.start_x..block.end_x] .iter() - .map(convert_bls) + .map(convert_scalar) .collect::>(); let mut ts = Transcript::new(b"avail-mp"); @@ -297,13 +341,21 @@ impl PolynomialGrid { } } -fn convert_bls(dusk: &dusk_plonk::bls12_381::BlsScalar) -> poly_multiproof::m1_blst::Fr { +fn convert_scalar(dusk: &dusk_plonk::bls12_381::BlsScalar) -> poly_multiproof::m1_blst::Fr { poly_multiproof::m1_blst::Fr { 0: poly_multiproof::ark_ff::BigInt(dusk.0), 1: PhantomData, } } +// TODO: stop using this when we switch everything over to arkworks +fn convert_g1(ark: poly_multiproof::m1_blst::G1Affine) -> dusk_plonk::bls12_381::G1Affine { + let comm = poly_multiproof::Commitment(ark) + .to_bytes() + .expect("TODO: stop using this"); + dusk_plonk::bls12_381::G1Affine::from_bytes(&comm).expect("TODO: stop using this") +} + #[derive(Debug, Clone)] pub struct Multiproof { pub proof: poly_multiproof::m1_blst::Proof, @@ -497,7 +549,7 @@ mod unit_tests { fn test_convert_bls_scalar(input: [u8; 31]) { use poly_multiproof::ark_serialize::CanonicalSerialize; let dusk = pad_to_bls_scalar(input).unwrap(); - let ark = convert_bls(&dusk); + let ark = convert_scalar(&dusk); let dusk_out = dusk.to_bytes(); let mut ark_out = [0u8; 32]; ark.serialize_compressed(&mut ark_out[..]).unwrap(); diff --git a/kate/src/gridgen/tests/commitments.rs b/kate/src/gridgen/tests/commitments.rs index 515e8740..e42d6576 100644 --- a/kate/src/gridgen/tests/commitments.rs +++ b/kate/src/gridgen/tests/commitments.rs @@ -21,6 +21,7 @@ fn test_build_commitments_simple_commitment_check() { 76, 41, 174, 145, 187, 12, 97, 32, 75, 111, 149, 209, 243, 195, 165, 10, 166, 172, 47, 41, 218, 24, 212, 66, 62, 5, 187, 191, 129, 5, 105, 3, ]; + let pmp_pp = crate::testnet::multiproof_params(256, 256); let evals = EvaluationGrid::from_extrinsics( vec![AppExtrinsic::from(original_data.to_vec())], @@ -30,18 +31,27 @@ fn test_build_commitments_simple_commitment_check() { hash, ) .unwrap(); - let evals = evals.extend_columns(2).unwrap(); - let polys = evals.make_polynomial_grid().unwrap(); + let ext_evals = evals.extend_columns(2).unwrap(); + let polys = ext_evals.make_polynomial_grid().unwrap(); let commits = polys .commitments(pp().commit_key()) .unwrap() .into_iter() .flat_map(|p| p.to_bytes()) .collect::>(); + let commits_fft_extended = evals + .make_polynomial_grid() + .unwrap() + .extended_commitments(&pmp_pp, 2) + .unwrap() + .into_iter() + .flat_map(|p| p.to_bytes()) + .collect::>(); - assert_eq!(evals.dims, Dimensions::new_unchecked(4, 2)); + assert_eq!(ext_evals.dims, Dimensions::new_unchecked(4, 2)); let expected_commitments = hex!("960F08F97D3A8BD21C3F5682366130132E18E375A587A1E5900937D7AA5F33C4E20A1C0ACAE664DCE1FD99EDC2693B8D960F08F97D3A8BD21C3F5682366130132E18E375A587A1E5900937D7AA5F33C4E20A1C0ACAE664DCE1FD99EDC2693B8D"); assert_eq!(commits, expected_commitments); + assert_eq!(commits_fft_extended, expected_commitments); } #[test] From 774e48f08c4601274b4d364b5b64a438cdb12f7f Mon Sep 17 00:00:00 2001 From: William Arnold Date: Thu, 27 Apr 2023 18:54:05 -0700 Subject: [PATCH 45/87] [skip ci] Clean up some extended commit stuff --- Cargo.lock | 2 +- kate/Cargo.toml | 2 +- kate/src/gridgen/mod.rs | 30 ++++++++++++------------------ 3 files changed, 14 insertions(+), 20 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9373d3fc..abd44ad3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2654,7 +2654,7 @@ dependencies = [ [[package]] name = "poly-multiproof" version = "0.0.1" -source = "git+https://github.com/aphoh/poly-multiproof?rev=99335b664253ba4eca35f52b812af8e691f2497e#99335b664253ba4eca35f52b812af8e691f2497e" +source = "git+https://github.com/aphoh/poly-multiproof?rev=359cf33b40d5e2b7e1dde8e92326d0685244960c#359cf33b40d5e2b7e1dde8e92326d0685244960c" dependencies = [ "ark-bls12-381", "ark-ec", diff --git a/kate/Cargo.toml b/kate/Cargo.toml index 169158d5..36bda361 100644 --- a/kate/Cargo.toml +++ b/kate/Cargo.toml @@ -18,7 +18,7 @@ kate-grid = { path = "grid" } kate-recovery = { path = "recovery", default-features = false, optional = true } log = { version = "0.4.8", optional = true } once_cell = { version = "1.8.0", optional = true } -poly-multiproof = { git = "https://github.com/aphoh/poly-multiproof", default-features = false, rev = "99335b664253ba4eca35f52b812af8e691f2497e" } +poly-multiproof = { git = "https://github.com/aphoh/poly-multiproof", default-features = false, rev = "359cf33b40d5e2b7e1dde8e92326d0685244960c" } rand = { version = "0.8.4", default-features = false, optional = true } rand_chacha = { version = "0.3", default-features = false, optional = true } rayon = { version = "1.5.2", optional = true } diff --git a/kate/src/gridgen/mod.rs b/kate/src/gridgen/mod.rs index 84bf17af..5371a42f 100644 --- a/kate/src/gridgen/mod.rs +++ b/kate/src/gridgen/mod.rs @@ -260,25 +260,19 @@ impl PolynomialGrid { srs: &(impl Committer + Sync), extension_factor: usize, ) -> Result, Error> { - use poly_multiproof::ark_poly::{EvaluationDomain, GeneralEvaluationDomain}; - use poly_multiproof::m1_blst::{Fr, G1}; - let mut res = cfg_iter!(self.inner) + let res = cfg_iter!(self.inner) .map(|poly| poly.coeffs.iter().map(convert_scalar).collect::>()) - .map(|coeffs| { - srs.commit(&coeffs) - .map_err(Error::MultiproofError) - .map(|a| a.0.into()) - }) - .collect::, _>>()?; - let domain_n = GeneralEvaluationDomain::::new(res.len()).unwrap(); - let domain_ext = - GeneralEvaluationDomain::::new(res.len().saturating_mul(extension_factor)).unwrap(); - domain_n.ifft_in_place(&mut res); - domain_ext.fft_in_place(&mut res); - Ok(res - .into_iter() - .map(|a| Commitment(convert_g1(a.into()))) - .collect()) + .map(|coeffs| srs.commit(&coeffs).map_err(Error::MultiproofError)) + .collect::, _>>()?; + let commits = poly_multiproof::Commitment::::extend_commitments( + &res, + res.len().saturating_mul(extension_factor), + ) + .map_err(Error::MultiproofError)?; + Ok(commits + .iter() + .map(|c| Commitment(convert_g1(c.0))) + .collect::>()) } pub fn commitment(&self, srs: &CommitKey, row: usize) -> Result { From 40eb7db24e53e297c2997d3dbd69f1507107818a Mon Sep 17 00:00:00 2001 From: William Arnold Date: Fri, 28 Apr 2023 16:09:22 -0700 Subject: [PATCH 46/87] Remove dusk_plonk from `kate::gridgen` --- Cargo.lock | 2 +- kate/Cargo.toml | 2 +- kate/examples/multiproof_verification.rs | 16 +- kate/src/com.rs | 8 +- kate/src/gridgen/mod.rs | 192 +++++++++-------------- kate/src/gridgen/tests/commitments.rs | 44 +++--- kate/src/gridgen/tests/formatting.rs | 12 +- kate/src/gridgen/tests/mod.rs | 10 +- kate/src/gridgen/tests/reconstruction.rs | 16 +- 9 files changed, 123 insertions(+), 179 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index abd44ad3..118e39c4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2654,7 +2654,7 @@ dependencies = [ [[package]] name = "poly-multiproof" version = "0.0.1" -source = "git+https://github.com/aphoh/poly-multiproof?rev=359cf33b40d5e2b7e1dde8e92326d0685244960c#359cf33b40d5e2b7e1dde8e92326d0685244960c" +source = "git+https://github.com/aphoh/poly-multiproof?rev=1ec7c7eca0861ad89427c45534006f49d039820a#1ec7c7eca0861ad89427c45534006f49d039820a" dependencies = [ "ark-bls12-381", "ark-ec", diff --git a/kate/Cargo.toml b/kate/Cargo.toml index 36bda361..a798dd8b 100644 --- a/kate/Cargo.toml +++ b/kate/Cargo.toml @@ -18,7 +18,7 @@ kate-grid = { path = "grid" } kate-recovery = { path = "recovery", default-features = false, optional = true } log = { version = "0.4.8", optional = true } once_cell = { version = "1.8.0", optional = true } -poly-multiproof = { git = "https://github.com/aphoh/poly-multiproof", default-features = false, rev = "359cf33b40d5e2b7e1dde8e92326d0685244960c" } +poly-multiproof = { git = "https://github.com/aphoh/poly-multiproof", default-features = false, rev = "1ec7c7eca0861ad89427c45534006f49d039820a" } rand = { version = "0.8.4", default-features = false, optional = true } rand_chacha = { version = "0.3", default-features = false, optional = true } rayon = { version = "1.5.2", optional = true } diff --git a/kate/examples/multiproof_verification.rs b/kate/examples/multiproof_verification.rs index 14b3fe9e..53df31cc 100644 --- a/kate/examples/multiproof_verification.rs +++ b/kate/examples/multiproof_verification.rs @@ -1,13 +1,12 @@ use da_types::{AppExtrinsic, AppId}; use hex_literal::hex; -use kate::pmp::{merlin::Transcript, traits::PolyMultiProofNoPrecomp}; -use kate::{Seed, Serializable}; +use kate::{pmp::{merlin::Transcript, traits::PolyMultiProofNoPrecomp}, Seed}; use poly_multiproof::traits::AsBytes; use rand::thread_rng; fn main() { let target_dims = kate::grid::Dimensions::new_unchecked(16, 64); - let pp = kate::testnet::public_params(256.into()); + let pp = kate::testnet::multiproof_params(256, 256); let pmp = poly_multiproof::m1_blst::M1NoPrecomp::new(256, 256, &mut thread_rng()); let points = kate::gridgen::domain_points(256).unwrap(); let (proof, evals, commitments, dims) = { @@ -35,10 +34,10 @@ fn main() { let polys = grid.make_polynomial_grid().unwrap(); let commitments = polys - .commitments(&pp.commit_key()) + .commitments(&pp) .unwrap() .iter() - .flat_map(|c| c.0.to_bytes()) + .flat_map(|c| c.to_bytes().unwrap()) .collect::>(); let multiproof = polys @@ -71,11 +70,6 @@ fn main() { .collect::, _>>() .unwrap(); - let points = points[mp_block.start_x..mp_block.end_x] - .iter() - .map(kate::gridgen::to_ark_scalar) - .collect::>(); - let block_commits = &commits[mp_block.start_x..mp_block.end_x]; let evals_flat = evals .chunks_exact(32) @@ -91,7 +85,7 @@ fn main() { pmp.verify( &mut Transcript::new(b"avail-mp"), block_commits, - &points, + &points[mp_block.start_x..mp_block.end_x], &evals_grid, &proof, ) diff --git a/kate/src/com.rs b/kate/src/com.rs index 36e3c3e6..681aa885 100644 --- a/kate/src/com.rs +++ b/kate/src/com.rs @@ -58,7 +58,7 @@ pub enum Error { InvalidChunkLength, DimensionsMismatch, ZeroDimension, - DomainSizeInalid, + DomainSizeInvalid, } impl From for Error { @@ -67,6 +67,12 @@ impl From for Error { } } +impl From for Error { + fn from(err: poly_multiproof::Error) -> Self { + Self::MultiproofError(err) + } +} + pub type XtsLayout = Vec<(AppId, u32)>; type FlatData = Vec; type DataChunk = [u8; DATA_CHUNK_SIZE]; diff --git a/kate/src/gridgen/mod.rs b/kate/src/gridgen/mod.rs index 5371a42f..c83fa3d3 100644 --- a/kate/src/gridgen/mod.rs +++ b/kate/src/gridgen/mod.rs @@ -1,18 +1,17 @@ -use core::{marker::PhantomData, num::NonZeroUsize}; - +use crate::pmp::{ + ark_poly::{EvaluationDomain, GeneralEvaluationDomain}, + m1_blst::{Bls12_381, M1NoPrecomp}, + merlin::Transcript, + traits::Committer, +}; use codec::Encode; +use core::num::NonZeroUsize; use da_types::{AppExtrinsic, AppId, DataLookup, DataLookupIndexItem}; -use dusk_bytes::Serializable; -use dusk_plonk::{ - fft::{EvaluationDomain, Evaluations, Polynomial}, - prelude::{BlsScalar, CommitKey}, -}; use kate_grid::{Dimensions, Extension, Grid, IntoColumnMajor, IntoRowMajor, RowMajor}; use kate_recovery::config::PADDING_TAIL_VALUE; use poly_multiproof::{ - m1_blst::{Bls12_381, M1NoPrecomp}, - merlin::Transcript, - traits::{AsBytes, Committer}, + m1_blst::Proof, + traits::{KZGProof, PolyMultiProofNoPrecomp}, }; use rand::{Rng, SeedableRng}; use rand_chacha::ChaChaRng; @@ -26,17 +25,37 @@ use crate::{ #[cfg(feature = "parallel")] use rayon::prelude::*; -pub use dusk_plonk::commitment_scheme::kzg10::commitment::Commitment; +macro_rules! cfg_iter { + ($e: expr) => {{ + #[cfg(feature = "parallel")] + let result = $e.par_iter(); + #[cfg(not(feature = "parallel"))] + let result = $e.iter(); + result + }}; +} + +macro_rules! cfg_into_iter { + ($e: expr) => {{ + #[cfg(feature = "parallel")] + let result = $e.into_par_iter(); + #[cfg(not(feature = "parallel"))] + let result = $e.into_iter(); + result + }}; +} +pub const SCALAR_SIZE: usize = 32; pub type ArkScalar = crate::pmp::m1_blst::Fr; -pub type MpCommitment = crate::pmp::Commitment; +pub type Commitment = crate::pmp::Commitment; +pub use poly_multiproof::traits::AsBytes; #[cfg(test)] mod tests; pub struct EvaluationGrid { pub lookup: DataLookup, - pub evals: RowMajor, + pub evals: RowMajor, pub dims: Dimensions, } @@ -101,7 +120,7 @@ impl EvaluationGrid { let dims = get_block_dims(grid.len(), min_width, max_width, max_height)?; let mut rng = ChaChaRng::from_seed(rng_seed); while grid.len() != dims.n_cells() { - let rnd_values: [u8; BlsScalar::SIZE - 1] = rng.gen(); + let rnd_values: [u8; SCALAR_SIZE - 1] = rng.gen(); // TODO: can we just use zeros instead? grid.push(pad_to_bls_scalar(rnd_values)?); } @@ -115,7 +134,7 @@ impl EvaluationGrid { }) } - pub fn row(&self, y: usize) -> Option<&[BlsScalar]> { + pub fn row(&self, y: usize) -> Option<&[ArkScalar]> { self.evals.row(y) } @@ -147,7 +166,7 @@ impl EvaluationGrid { &self, app_id: &AppId, orig_dims: Option<&Dimensions>, - ) -> Option)>> { + ) -> Option)>> { let orig_dims = orig_dims.unwrap_or(&self.dims); if !orig_dims.divides(&self.dims) { return None; @@ -172,29 +191,25 @@ impl EvaluationGrid { .map_err(|_| Error::CellLengthExceeded)?, )); - let domain = EvaluationDomain::new(self.dims.height())?; - let domain_new = EvaluationDomain::new(new_dims.height())?; + let domain = GeneralEvaluationDomain::::new(self.dims.height()) + .ok_or(Error::DomainSizeInvalid)?; + let domain_new = GeneralEvaluationDomain::::new(new_dims.height()) + .ok_or(Error::DomainSizeInvalid)?; if domain_new.size() != new_dims.height() { - return Err(Error::DomainSizeInalid); + return Err(Error::DomainSizeInvalid); } - let cols = self + let cols: Vec> = self .evals .columns() .map(|(_i, col)| col.map(|s| *s).collect::>()) .collect::>(); - #[cfg(not(feature = "parallel"))] - let col_iter = cols.into_iter(); - #[cfg(feature = "parallel")] - let col_iter = cols.into_par_iter(); - - let new_evals = col_iter + let new_evals = cfg_into_iter!(cols) .flat_map(|mut col| { // ifft, resize, fft - domain.ifft_slice(col.as_mut_slice()); - col.resize(domain_new.size(), BlsScalar::zero()); - domain_new.fft_slice(&mut col); + domain.ifft_in_place(&mut col); + domain_new.fft_in_place(&mut col); col }) .collect::>() @@ -210,7 +225,8 @@ impl EvaluationGrid { } pub fn make_polynomial_grid(&self) -> Result { - let domain = EvaluationDomain::new(self.dims.width())?; + let domain = GeneralEvaluationDomain::::new(self.dims.width()) + .ok_or(Error::DomainSizeInvalid)?; #[cfg(not(feature = "parallel"))] let rows = self.evals.rows(); #[cfg(feature = "parallel")] @@ -218,37 +234,21 @@ impl EvaluationGrid { Ok(PolynomialGrid { dims: self.dims.clone(), points: domain.elements().collect(), - inner: rows - .map(|(_, row)| { - Evaluations::from_vec_and_domain(row.to_vec(), domain).interpolate() - }) - .collect::>(), + inner: rows.map(|(_, row)| domain.ifft(row)).collect::>(), }) } } pub struct PolynomialGrid { - inner: Vec, - points: Vec, + inner: Vec>, + points: Vec, dims: Dimensions, } -macro_rules! cfg_iter { - ($e: expr) => {{ - #[cfg(feature = "parallel")] - let result = $e.par_iter(); - - #[cfg(not(feature = "parallel"))] - let result = $e.iter(); - - result - }}; -} - impl PolynomialGrid { - pub fn commitments(&self, srs: &CommitKey) -> Result, Error> { + pub fn commitments(&self, srs: &impl Committer) -> Result, Error> { cfg_iter!(self.inner) - .map(|poly| srs.commit(poly).map_err(Error::PlonkError)) + .map(|poly| srs.commit(poly).map_err(Error::MultiproofError)) .collect() } @@ -261,33 +261,32 @@ impl PolynomialGrid { extension_factor: usize, ) -> Result, Error> { let res = cfg_iter!(self.inner) - .map(|poly| poly.coeffs.iter().map(convert_scalar).collect::>()) .map(|coeffs| srs.commit(&coeffs).map_err(Error::MultiproofError)) .collect::, _>>()?; - let commits = poly_multiproof::Commitment::::extend_commitments( + poly_multiproof::Commitment::::extend_commitments( &res, res.len().saturating_mul(extension_factor), ) - .map_err(Error::MultiproofError)?; - Ok(commits - .iter() - .map(|c| Commitment(convert_g1(c.0))) - .collect::>()) + .map_err(Error::MultiproofError) } - pub fn commitment(&self, srs: &CommitKey, row: usize) -> Result { + pub fn commitment( + &self, + srs: &impl Committer, + row: usize, + ) -> Result { self.inner .get(row) .ok_or(Error::CellLengthExceeded) - .and_then(|poly| srs.commit(poly).map_err(Error::PlonkError)) + .and_then(|poly| srs.commit(poly).map_err(Error::MultiproofError)) } - pub fn proof(&self, srs: &CommitKey, cell: &Cell) -> Result { + pub fn proof(&self, srs: &M1NoPrecomp, cell: &Cell) -> Result { let x = cell.col.0 as usize; let y = cell.row.0 as usize; let poly = self.inner.get(y).ok_or(Error::CellLengthExceeded)?; - let witness = srs.compute_single_witness(poly, &self.points[x]); - Ok(srs.commit(&witness)?) + let witness = KZGProof::compute_witness_polynomial(srs, poly.clone(), self.points[x])?; + Ok(KZGProof::open(srs, witness)?) } pub fn multiproof( @@ -297,7 +296,6 @@ impl PolynomialGrid { eval_grid: &EvaluationGrid, target_dims: &Dimensions, ) -> Result { - use poly_multiproof::traits::PolyMultiProofNoPrecomp; let block = multiproof_block( cell.col.0 as usize, cell.row.0 as usize, @@ -305,26 +303,16 @@ impl PolynomialGrid { target_dims, ) .ok_or(Error::CellLengthExceeded)?; - let polys = self.inner[block.start_y..block.end_y] - .iter() - .map(|s| s.coeffs.iter().map(convert_scalar).collect::>()) - .collect::>(); + let polys = &self.inner[block.start_y..block.end_y]; let evals = (block.start_y..block.end_y) .map(|y| { eval_grid.evals.row(y).expect("Already bounds checked")[block.start_x..block.end_x] - .iter() - .map(convert_scalar) - .collect::>() + .to_vec() }) .collect::>(); - let points = &self.points[block.start_x..block.end_x] - .iter() - .map(convert_scalar) - .collect::>(); - + let points = &self.points[block.start_x..block.end_x]; let mut ts = Transcript::new(b"avail-mp"); - let proof = srs - .open(&mut ts, &evals, &polys, points) + let proof = PolyMultiProofNoPrecomp::open(srs, &mut ts, &evals, &polys, points) .map_err(Error::MultiproofError)?; Ok(Multiproof { @@ -335,21 +323,6 @@ impl PolynomialGrid { } } -fn convert_scalar(dusk: &dusk_plonk::bls12_381::BlsScalar) -> poly_multiproof::m1_blst::Fr { - poly_multiproof::m1_blst::Fr { - 0: poly_multiproof::ark_ff::BigInt(dusk.0), - 1: PhantomData, - } -} - -// TODO: stop using this when we switch everything over to arkworks -fn convert_g1(ark: poly_multiproof::m1_blst::G1Affine) -> dusk_plonk::bls12_381::G1Affine { - let comm = poly_multiproof::Commitment(ark) - .to_bytes() - .expect("TODO: stop using this"); - dusk_plonk::bls12_381::G1Affine::from_bytes(&comm).expect("TODO: stop using this") -} - #[derive(Debug, Clone)] pub struct Multiproof { pub proof: poly_multiproof::m1_blst::Proof, @@ -432,34 +405,23 @@ pub fn get_block_dims( } } -pub fn domain_points(n: usize) -> Result, Error> { - let domain = EvaluationDomain::new(n)?; +pub fn domain_points(n: usize) -> Result, Error> { + let domain = GeneralEvaluationDomain::::new(n).ok_or(Error::DomainSizeInvalid)?; Ok(domain.elements().collect()) } -pub fn to_ark_scalar(s: &BlsScalar) -> ArkScalar { - ArkScalar { - 0: poly_multiproof::ark_ff::BigInt(s.0), - 1: PhantomData, - } -} - -pub fn to_mp_commitment(c: Commitment) -> MpCommitment { - MpCommitment::from_bytes(&c.to_bytes()).expect("commitment is valid") -} - fn round_up_to_multiple(input: usize, multiple: NonZeroUsize) -> usize { let n_multiples = input.saturating_add(multiple.get()).saturating_sub(1) / multiple; n_multiples.saturating_mul(multiple.get()) } -pub(crate) fn pad_to_bls_scalar(a: impl AsRef<[u8]>) -> Result { +pub(crate) fn pad_to_bls_scalar(a: impl AsRef<[u8]>) -> Result { if a.as_ref().len() > DATA_CHUNK_SIZE { return Err(Error::InvalidChunkLength); } - let mut buf = [0u8; BlsScalar::SIZE]; + let mut buf = [0u8; SCALAR_SIZE]; buf[0..a.as_ref().len()].copy_from_slice(a.as_ref()); - BlsScalar::from_bytes(&buf).map_err(Error::DuskBytesError) + ArkScalar::from_bytes(&buf).map_err(Error::MultiproofError) } // Round up. only valid for positive integers @@ -538,18 +500,6 @@ mod unit_tests { prop_assert_eq!(round_up_to_multiple(a, m.try_into().unwrap()), i * m) } } - - #[test] - fn test_convert_bls_scalar(input: [u8; 31]) { - use poly_multiproof::ark_serialize::CanonicalSerialize; - let dusk = pad_to_bls_scalar(input).unwrap(); - let ark = convert_scalar(&dusk); - let dusk_out = dusk.to_bytes(); - let mut ark_out = [0u8; 32]; - ark.serialize_compressed(&mut ark_out[..]).unwrap(); - assert_eq!(dusk_out, ark_out); - } - } #[test_case(0 => 1)] #[test_case(1 => 1)] diff --git a/kate/src/gridgen/tests/commitments.rs b/kate/src/gridgen/tests/commitments.rs index e42d6576..202fa712 100644 --- a/kate/src/gridgen/tests/commitments.rs +++ b/kate/src/gridgen/tests/commitments.rs @@ -6,7 +6,6 @@ use da_types::AppExtrinsic; use da_types::AppId; use da_types::BlockLengthColumns; use da_types::BlockLengthRows; -use dusk_bytes::Serializable; use hex_literal::hex; use kate_grid::Dimensions; use kate_recovery::matrix::Position; @@ -34,10 +33,10 @@ fn test_build_commitments_simple_commitment_check() { let ext_evals = evals.extend_columns(2).unwrap(); let polys = ext_evals.make_polynomial_grid().unwrap(); let commits = polys - .commitments(pp().commit_key()) + .commitments(&*PMP) .unwrap() .into_iter() - .flat_map(|p| p.to_bytes()) + .flat_map(|p| p.to_bytes().unwrap()) .collect::>(); let commits_fft_extended = evals .make_polynomial_grid() @@ -45,7 +44,7 @@ fn test_build_commitments_simple_commitment_check() { .extended_commitments(&pmp_pp, 2) .unwrap() .into_iter() - .flat_map(|p| p.to_bytes()) + .flat_map(|p| p.to_bytes().unwrap()) .collect::>(); assert_eq!(ext_evals.dims, Dimensions::new_unchecked(4, 2)); @@ -67,7 +66,7 @@ fn par_build_commitments_row_wise_constant_row() { let evals = EvaluationGrid::from_extrinsics(xts, 4, 4, 4, hash).unwrap(); let evals = evals.extend_columns(2).unwrap(); let polys = evals.make_polynomial_grid().unwrap(); - polys.commitments(pp().commit_key()).unwrap(); + polys.commitments(&*PMP).unwrap(); } proptest! { @@ -78,10 +77,10 @@ proptest! { let grid = EvaluationGrid::from_extrinsics(exts.clone(), 4, 16, 64, Seed::default()).unwrap().extend_columns(2).unwrap(); let orig_dims = Dimensions::new(grid.dims.width_nz(), (grid.dims.height() / 2).try_into().unwrap()); let polys = grid.make_polynomial_grid().unwrap(); - let commits = polys.commitments(pp().commit_key()) + let commits = polys.commitments(&*PMP) .unwrap() .iter() - .map(|c| c.to_bytes()) + .map(|c| c.to_bytes().unwrap()) .collect::>(); let index = app_data_index_from_lookup(&grid.lookup); @@ -92,7 +91,7 @@ proptest! { // Have to put the rows we find in this funky data structure let mut app_rows = vec![None; grid.dims.height()]; for (row_i, row) in rows { - app_rows[row_i] = Some(row.iter().flat_map(|s| s.to_bytes()).collect()); + app_rows[row_i] = Some(row.iter().flat_map(|s| s.to_bytes().unwrap()).collect()); } // Need to provide the original dimensions here too let extended_dims = kate_recovery::matrix::Dimensions::new(orig_dims.height() as u16, orig_dims.width() as u16).unwrap(); @@ -105,26 +104,20 @@ proptest! { let grid = EvaluationGrid::from_extrinsics(xts.clone(), 4, 16, 64, Seed::default()).unwrap().extend_columns(2).unwrap(); let orig_dims = Dimensions::new(grid.dims.width_nz(), (grid.dims.height() / 2).try_into().unwrap()); let polys = grid.make_polynomial_grid().unwrap(); - let commits = polys.commitments(pp().commit_key()) + let commits = polys.commitments(&*PMP) .unwrap() .iter() - .map(|c| c.to_bytes()) + .map(|c| c.to_bytes().unwrap()) .collect::>(); let index = app_data_index_from_lookup(&grid.lookup); let public_params = testnet::public_params((grid.dims.width() as u32).into()); - //let (layout, commitments, dims, matrix) = par_build_commitments(BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default(), &IgnoreMetrics{}).unwrap(); - - //let index = app_data_index_try_from_layout(layout).unwrap(); - //let public_params = testnet::public_params(dims.cols.as_usize()); - //let extended_dims = dims.try_into().unwrap(); - //let commitments = commitments::from_slice(&commitments).unwrap(); for xt in xts { let rows = grid.app_rows(&xt.app_id, Some(&orig_dims)).unwrap(); let mut row_elems = vec![None; grid.dims.height()]; for (i, data) in &rows { - row_elems[*i] = Some(data.iter().flat_map(|s| s.to_bytes()).collect()); + row_elems[*i] = Some(data.iter().flat_map(|s| s.to_bytes().unwrap()).collect()); } let first_index = rows.iter().map(|(i, _)| *i).min().unwrap(); row_elems.remove(first_index); @@ -143,8 +136,6 @@ fn test_zero_deg_poly_commit(row_values: Vec) { // There are two main cases that generate a zero degree polynomial. One is for data that is non-zero, but the same. // The other is for all-zero data. They differ, as the former yields a polynomial with one coefficient, and latter generates zero coefficients. let len = row_values.len(); - let public_params = pp(); - let row = row_values .iter() .map(|val| pad_to_bls_scalar(&[*val]).unwrap()) @@ -161,7 +152,7 @@ fn test_zero_deg_poly_commit(row_values: Vec) { let pg = ev.make_polynomial_grid().unwrap(); println!("Poly: {:?}", pg.inner[0]); - let commitment = pg.commitment(pp().commit_key(), 0).unwrap().to_bytes(); + let commitment = pg.commitment(&*PMP, 0).unwrap().to_bytes().unwrap(); for x in 0..len { // Randomly chosen cell to prove, probably should test all of them @@ -170,10 +161,10 @@ fn test_zero_deg_poly_commit(row_values: Vec) { row: BlockLengthRows(0), }; - let proof = pg.proof(pp().commit_key(), &cell).unwrap(); + let proof = pg.proof(&*PMP, &cell).unwrap(); - let proof_bytes = proof.to_bytes(); - let cell_bytes = ev.evals.get(x, 0).unwrap().to_bytes(); + let proof_bytes = proof.to_bytes().unwrap(); + let cell_bytes = ev.evals.get(x, 0).unwrap().to_bytes().unwrap(); let content = [&proof_bytes[..], &cell_bytes[..]].concat(); let dims = kate_recovery::matrix::Dimensions::new(1, 4).unwrap(); let cell = kate_recovery::data::Cell { @@ -183,7 +174,12 @@ fn test_zero_deg_poly_commit(row_values: Vec) { }, content: content.try_into().unwrap(), }; - let verification = kate_recovery::proof::verify(&public_params, &dims, &commitment, &cell); + let verification = kate_recovery::proof::verify( + &kate_recovery::testnet::public_params(256), + &dims, + &commitment, + &cell, + ); assert!(verification.is_ok()); assert!(verification.unwrap()) } diff --git a/kate/src/gridgen/tests/formatting.rs b/kate/src/gridgen/tests/formatting.rs index e85bd149..7ac4e3f3 100644 --- a/kate/src/gridgen/tests/formatting.rs +++ b/kate/src/gridgen/tests/formatting.rs @@ -1,18 +1,17 @@ use da_types::{AppExtrinsic, DataLookup, DataLookupIndexItem}; -use dusk_bytes::Serializable; -use dusk_plonk::prelude::BlsScalar; use hex_literal::hex; use kate_grid::{Dimensions, Grid, IntoColumnMajor, IntoRowMajor}; use kate_recovery::{ com::{app_specific_cells, decode_app_extrinsics, reconstruct_extrinsics}, data::DataCell, }; +use poly_multiproof::traits::AsBytes; use crate::{ config::DATA_CHUNK_SIZE, gridgen::{ tests::{app_data_index_from_lookup, sample_cells}, - EvaluationGrid, + ArkScalar, EvaluationGrid, }, Seed, }; @@ -63,7 +62,7 @@ fn newapi_test_flatten_block() { .evals .inner() .iter() - .flat_map(|s| s.to_bytes()) + .flat_map(|s| s.to_bytes().unwrap()) .collect::>(); assert_eq!(data, expected_data, "Data doesn't match the expected data"); } @@ -90,7 +89,7 @@ fn newapi_test_extend_data_matrix() { hex!("1ebf725495e11b806dc58d261ac918a4f85260cb45618241614c432a2153ae16"), ] .into_iter() - .map(|e| BlsScalar::from_bytes(e.as_slice().try_into().unwrap()).unwrap()) + .map(|e| ArkScalar::from_bytes(e.as_slice().try_into().unwrap()).unwrap()) .collect::>() .into_column_major(4, 4) .unwrap() @@ -153,7 +152,8 @@ get erasure coded to ensure redundancy."#; .evals .get(pos.col as usize, pos.row as usize) .unwrap() - .to_bytes(), + .to_bytes() + .unwrap(), }) .collect::>(); let data = &decode_app_extrinsics(&index, &bdims, cells, xt.app_id.0).unwrap()[0]; diff --git a/kate/src/gridgen/tests/mod.rs b/kate/src/gridgen/tests/mod.rs index 7e3a616c..e0828278 100644 --- a/kate/src/gridgen/tests/mod.rs +++ b/kate/src/gridgen/tests/mod.rs @@ -1,8 +1,8 @@ use da_types::{AppExtrinsic, DataLookup}; -use dusk_bytes::Serializable; -use dusk_plonk::prelude::PublicParameters; use kate_grid::Grid; use kate_recovery::{data::DataCell, index::AppDataIndex}; +use once_cell::sync::Lazy; +use poly_multiproof::{m1_blst::M1NoPrecomp, traits::AsBytes}; use proptest::{collection, prelude::*, sample::size_range}; use rand::{distributions::Uniform, prelude::Distribution, SeedableRng}; use rand_chacha::ChaChaRng; @@ -15,9 +15,7 @@ mod commitments; mod formatting; mod reconstruction; -pub(crate) fn pp() -> PublicParameters { - testnet::public_params(da_types::BlockLengthColumns(256)) -} +pub static PMP: Lazy = Lazy::new(|| testnet::multiproof_params(256, 256)); fn app_extrinsic_strategy() -> impl Strategy { ( @@ -72,7 +70,7 @@ fn sample_cells(grid: &EvaluationGrid, columns: Option<&[usize]>) -> Vec>() diff --git a/kate/src/gridgen/tests/reconstruction.rs b/kate/src/gridgen/tests/reconstruction.rs index 896463b2..7c19d21b 100644 --- a/kate/src/gridgen/tests/reconstruction.rs +++ b/kate/src/gridgen/tests/reconstruction.rs @@ -1,14 +1,14 @@ -use super::{app_data_index_from_lookup, pp}; +use super::{app_data_index_from_lookup, PMP}; use crate::com::Cell; use crate::gridgen::tests::sample_cells; use crate::gridgen::EvaluationGrid; use crate::Seed; use da_types::AppExtrinsic; -use dusk_bytes::Serializable; use kate_grid::Grid; use kate_recovery::com::reconstruct_extrinsics; use kate_recovery::data::Cell as DCell; use kate_recovery::matrix::Position as DPosition; +use poly_multiproof::traits::AsBytes; use proptest::prelude::*; use rand::distributions::Uniform; use rand::prelude::Distribution; @@ -66,9 +66,9 @@ fn test_build_and_reconstruct(exts in super::app_extrinsics_strategy()) { prop_assert_eq!(result.1[0].as_slice(), &xt.data); } - let pp = pp(); + let pp = &*PMP; let polys = grid.make_polynomial_grid().unwrap(); - let commitments = polys.commitments(pp.commit_key()).unwrap(); + let commitments = polys.commitments(pp).unwrap(); let indices = (0..dims.width()).flat_map(|x| (0..dims.height()).map(move |y| (x, y))).collect::>(); // Sample some number 10 of the indices, all is too slow for tests... @@ -76,13 +76,13 @@ fn test_build_and_reconstruct(exts in super::app_extrinsics_strategy()) { let sampled = Uniform::from(0..indices.len()).sample_iter(&mut rng).take(10).map(|i| indices[i]); for (x, y) in sampled { let cell = Cell { row: (y as u32).into(), col: (x as u32).into() }; - let proof = polys.proof(pp.commit_key(), &cell).unwrap(); + let proof = polys.proof(pp, &cell).unwrap(); let mut content = [0u8; 80]; - content[..48].copy_from_slice(&proof.to_bytes()[..]); - content[48..].copy_from_slice(&grid.evals.get(x, y).unwrap().to_bytes()[..]); + content[..48].copy_from_slice(&proof.to_bytes().unwrap()[..]); + content[48..].copy_from_slice(&grid.evals.get(x, y).unwrap().to_bytes().unwrap()[..]); let dcell = DCell{position: DPosition { row: y as u32, col: x as u16 }, content }; - let verification = kate_recovery::proof::verify(&pp, &bdims, &commitments[y].to_bytes(), &dcell); + let verification = kate_recovery::proof::verify(&kate_recovery::testnet::public_params(256), &bdims, &commitments[y].to_bytes().unwrap(), &dcell); prop_assert!(verification.is_ok()); prop_assert!(verification.unwrap()); } From ff0af9d7fae3eca2489c87db50593cc53f9b0e5d Mon Sep 17 00:00:00 2001 From: William Arnold Date: Mon, 1 May 2023 00:39:58 -0700 Subject: [PATCH 47/87] Fix silly sync error --- kate/src/gridgen/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kate/src/gridgen/mod.rs b/kate/src/gridgen/mod.rs index c83fa3d3..a972638e 100644 --- a/kate/src/gridgen/mod.rs +++ b/kate/src/gridgen/mod.rs @@ -246,7 +246,7 @@ pub struct PolynomialGrid { } impl PolynomialGrid { - pub fn commitments(&self, srs: &impl Committer) -> Result, Error> { + pub fn commitments(&self, srs: &(impl Committer + Sync)) -> Result, Error> { cfg_iter!(self.inner) .map(|poly| srs.commit(poly).map_err(Error::MultiproofError)) .collect() From 5bd30863f15cca7b0158319979202650dc775d04 Mon Sep 17 00:00:00 2001 From: William Arnold Date: Thu, 1 Jun 2023 13:11:10 +0900 Subject: [PATCH 48/87] Add missing into impl --- primitives/avail/src/asdr.rs | 15 +++++++++++++++ .../avail/src/asdr/app_unchecked_extrinsic.rs | 15 +++++++++++++++ 2 files changed, 30 insertions(+) diff --git a/primitives/avail/src/asdr.rs b/primitives/avail/src/asdr.rs index 286143cd..5064fdba 100644 --- a/primitives/avail/src/asdr.rs +++ b/primitives/avail/src/asdr.rs @@ -19,3 +19,18 @@ where } } } + +impl From> for AppExtrinsic +where + A: Encode, + C: Encode, + S: Encode, + E: SignedExtension + GetAppId, +{ + fn from(app_ext: AppUncheckedExtrinsic) -> Self { + Self { + app_id: app_ext.app_id(), + data: app_ext.encode(), + } + } +} diff --git a/primitives/avail/src/asdr/app_unchecked_extrinsic.rs b/primitives/avail/src/asdr/app_unchecked_extrinsic.rs index 441db16e..12661eb2 100644 --- a/primitives/avail/src/asdr/app_unchecked_extrinsic.rs +++ b/primitives/avail/src/asdr/app_unchecked_extrinsic.rs @@ -486,6 +486,21 @@ where } } +impl TryFrom + for AppUncheckedExtrinsic +where + Address: Decode, + Signature: Decode, + Call: Decode, + Extra: SignedExtension, +{ + type Error = codec::Error; + + fn try_from(opaque: OpaqueExtrinsic) -> Result { + Self::try_from(opaque.0.as_slice()) + } +} + impl TryFrom<&[u8]> for AppUncheckedExtrinsic where From a85321dc6419053ec19b3d7ede20d38fc7c09495 Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Wed, 7 Jun 2023 17:23:12 +0200 Subject: [PATCH 49/87] Simplify `Debug` and `RuntimeDebug` --- primitives/types/Cargo.toml | 14 +++++--------- primitives/types/src/data_lookup.rs | 13 ++++--------- primitives/types/src/lib.rs | 8 +++----- 3 files changed, 12 insertions(+), 23 deletions(-) diff --git a/primitives/types/Cargo.toml b/primitives/types/Cargo.toml index e415b294..4cbbb3aa 100644 --- a/primitives/types/Cargo.toml +++ b/primitives/types/Cargo.toml @@ -5,20 +5,16 @@ authors = [] edition = "2018" [dependencies] -derive_more = "0.99.17" -num-traits = { version = "0.2", default-features = false } +# Substrate parity-scale-codec = { version = "3", default-features = false, features = ["derive", "max-encoded-len"] } scale-info = { version = "2.3.1", default-features = false, features = ["derive"] } +sp-debug-derive = { version = "5", default-features = false } +# 3rd-parties +derive_more = "0.99.17" +num-traits = { version = "0.2", default-features = false } serde = { version = "1.0", features = ["derive"], optional = true } -parity-util-mem = { version = "*", default-features = false, optional = true } -sp-debug-derive = { version = "*", default-features = false, optional = true } -sp-runtime = { version = "*", default-features = false, optional = true } - -[dev-dependencies] - [features] default = ["std"] std = ["serde", "parity-scale-codec/std", "scale-info/std", "num-traits/std"] -substrate = ["sp-debug-derive", "parity-util-mem", "sp-runtime"] diff --git a/primitives/types/src/data_lookup.rs b/primitives/types/src/data_lookup.rs index 6066194d..7da9fb7c 100644 --- a/primitives/types/src/data_lookup.rs +++ b/primitives/types/src/data_lookup.rs @@ -4,12 +4,11 @@ use parity_scale_codec::{Decode, Encode}; use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; +use sp_debug_derive::RuntimeDebug; use crate::AppId; -#[derive(PartialEq, Eq, Clone, Encode, Decode, Default, TypeInfo)] -#[cfg_attr(feature = "substrate", derive(sp_debug_derive::RuntimeDebug))] -#[cfg_attr(all(feature = "std", not(feature = "substrate")), derive(Debug))] +#[derive(PartialEq, Eq, Clone, Encode, Decode, Default, TypeInfo, RuntimeDebug)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct DataLookup { /// size of the look up @@ -19,9 +18,7 @@ pub struct DataLookup { pub index: Vec, } -#[derive(PartialEq, Eq, Copy, Clone, Encode, Decode, Default, TypeInfo)] -#[cfg_attr(feature = "substrate", derive(sp_debug_derive::RuntimeDebug))] -#[cfg_attr(all(feature = "std", not(feature = "substrate")), derive(Debug))] +#[derive(PartialEq, Eq, Copy, Clone, Encode, Decode, Default, TypeInfo, RuntimeDebug)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct DataLookupIndexItem { pub app_id: AppId, @@ -49,9 +46,7 @@ impl parity_util_mem::MallocSizeOf for DataLookupIndexItem { } } -#[derive(PartialEq, Eq)] -#[cfg_attr(feature = "substrate", derive(sp_debug_derive::RuntimeDebug))] -#[cfg_attr(all(feature = "std", not(feature = "substrate")), derive(Debug))] +#[derive(PartialEq, Eq, RuntimeDebug)] /// Errors during the creation from `extrinsics`. pub enum TryFromError { /// Size overflows diff --git a/primitives/types/src/lib.rs b/primitives/types/src/lib.rs index 786215b4..a1ceb285 100644 --- a/primitives/types/src/lib.rs +++ b/primitives/types/src/lib.rs @@ -6,6 +6,7 @@ use parity_scale_codec::{Decode, Encode, MaxEncodedLen}; use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; +use sp_debug_derive::RuntimeDebug; mod data_lookup; mod get_app_id; @@ -13,10 +14,8 @@ pub use data_lookup::*; pub use get_app_id::*; /// Raw Extrinsic with application id. -#[derive(Clone, TypeInfo, Default, Encode, Decode)] -#[cfg_attr(feature = "substrate", derive(sp_debug_derive::RuntimeDebug))] +#[derive(Clone, TypeInfo, Default, Encode, Decode, RuntimeDebug)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[cfg_attr(all(feature = "std", not(feature = "substrate")), derive(Debug))] pub struct AppExtrinsic { pub app_id: AppId, pub data: Vec, @@ -64,10 +63,9 @@ impl GetAppId for AppExtrinsic { Into, Default, MaxEncodedLen, + RuntimeDebug, )] -#[cfg_attr(feature = "substrate", derive(sp_debug_derive::RuntimeDebug))] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[cfg_attr(all(feature = "std", not(feature = "substrate")), derive(Debug))] pub struct AppId(#[codec(compact)] pub u32); impl num_traits::Zero for AppId { From a53b607b6394f40d9810bf23c6b74a88cb06945e Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Thu, 8 Jun 2023 11:17:22 +0200 Subject: [PATCH 50/87] Clean some deps --- primitives/avail/Cargo.toml | 49 +++++++++---------- .../avail/src/asdr/app_unchecked_extrinsic.rs | 2 +- primitives/avail/src/data_proof.rs | 3 +- primitives/avail/src/header/extension/mod.rs | 9 ---- primitives/avail/src/header/extension/v1.rs | 9 ---- primitives/avail/src/header/extension/v2.rs | 9 ---- .../avail/src/header/extension/v_test.rs | 9 ---- primitives/avail/src/kate_commitment.rs | 20 -------- primitives/avail/src/sha2.rs | 5 +- primitives/types/Cargo.toml | 10 +++- primitives/types/src/data_lookup.rs | 16 +----- primitives/types/src/lib.rs | 2 +- 12 files changed, 37 insertions(+), 106 deletions(-) diff --git a/primitives/avail/Cargo.toml b/primitives/avail/Cargo.toml index 4e720255..bf38d4f2 100644 --- a/primitives/avail/Cargo.toml +++ b/primitives/avail/Cargo.toml @@ -5,50 +5,47 @@ authors = [] edition = "2021" [dependencies] +# Internal +da-types = { path = "../types", default-features = false } + # Others -da-types = { path = "../types", default-features = false, features = ["substrate"] } -derive_more = "0.99.17" log = { version = "0.4.8", default-features = false } -serde = { version = "1.0.121", optional = true, features = ["derive"] } -serde_json = { version = "1.0", optional = true } +serde = { version = "1", optional = true, features = ["derive"] } thiserror-no-std = "2.0.2" - +hash256-std-hasher = { version = "0.15.2", default-features = false } + # Substrate beefy-merkle-tree = { git = "https://github.com/paritytech/substrate.git/", branch = "polkadot-v0.9.37", default-features = false } -codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive", "max-encoded-len"] } frame-support = { version = "4.0.0-dev", default-features = false } -hash256-std-hasher = { version = "0.15.2", default-features = false } -parity-util-mem = { version = "0.12.0", default-features = false, features = ["primitive-types"] } scale-info = { version = "2.1.1", default-features = false, features = ["derive"] } -sp-core = { version = "7.0.0", default-features = false } -sp-io = { version = "7.0.0", default-features = false } -sp-runtime = { version = "7.0.0", default-features = false } -sp-runtime-interface = { version = "7.0.0", default-features = false, features = ["disable_target_static_assertions"] } -sp-std = { version = "4.0.0", default-features = false } +sp-core = { version = "7", default-features = false } +sp-runtime = { version = "7", default-features = false } +sp-runtime-interface = { version = "7", default-features = false } +sp-std = { version = "5", default-features = false } sp-trie = { version = "7.0.0", default-features = false } [dev-dependencies] hex-literal = "0.3.4" test-case = "1.2.3" +serde_json = "1" [features] default = ["std"] std = [ - "serde", - "serde_json", - "codec/std", - "scale-info/std", - "log/std", - "sp-core/std", - "sp-std/std", - "sp-io/std", + "serde", + "codec/std", + "scale-info/std", + "log/std", + "sp-core/std", + "sp-std/std", "sp-runtime/std", - "sp-trie/std", - "sp-runtime-interface/std", - "hash256-std-hasher/std", - "frame-support/std", - "parity-util-mem/std", + "sp-trie/std", + "sp-runtime-interface/std", + "hash256-std-hasher/std", + "frame-support/std", "beefy-merkle-tree/std", + "da-types/std", ] header-backward-compatibility-test = [] diff --git a/primitives/avail/src/asdr/app_unchecked_extrinsic.rs b/primitives/avail/src/asdr/app_unchecked_extrinsic.rs index 12661eb2..16efb48d 100644 --- a/primitives/avail/src/asdr/app_unchecked_extrinsic.rs +++ b/primitives/avail/src/asdr/app_unchecked_extrinsic.rs @@ -23,7 +23,7 @@ use frame_support::{ traits::ExtrinsicCall, }; use scale_info::{build::Fields, meta_type, Path, StaticTypeInfo, Type, TypeInfo, TypeParameter}; -use sp_io::hashing::blake2_256; +use sp_core::blake2_256; use sp_runtime::{ generic::CheckedExtrinsic, traits::{ diff --git a/primitives/avail/src/data_proof.rs b/primitives/avail/src/data_proof.rs index f1bbca53..fea610b9 100644 --- a/primitives/avail/src/data_proof.rs +++ b/primitives/avail/src/data_proof.rs @@ -3,8 +3,7 @@ use codec::{Decode, Encode}; use frame_support::ensure; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; -use sp_core::H256; -use sp_io::hashing::sha2_256; +use sp_core::{hashing::sha2_256, H256}; use sp_std::{convert::TryFrom, vec::Vec}; use thiserror_no_std::Error; diff --git a/primitives/avail/src/header/extension/mod.rs b/primitives/avail/src/header/extension/mod.rs index ad5fd923..e336c416 100644 --- a/primitives/avail/src/header/extension/mod.rs +++ b/primitives/avail/src/header/extension/mod.rs @@ -1,7 +1,5 @@ use crate::asdr::DataLookup; use codec::{Decode, Encode}; -#[cfg(feature = "std")] -use parity_util_mem::{MallocSizeOf, MallocSizeOfOps}; use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; @@ -70,13 +68,6 @@ impl Default for HeaderExtension { } } -#[cfg(feature = "std")] -impl MallocSizeOf for HeaderExtension { - fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize { - forward_to_version!(self, size_of, ops) - } -} - impl From for HeaderExtension { #[inline] fn from(ext: v1::HeaderExtension) -> Self { diff --git a/primitives/avail/src/header/extension/v1.rs b/primitives/avail/src/header/extension/v1.rs index f0f9b1ed..53c0d7ed 100644 --- a/primitives/avail/src/header/extension/v1.rs +++ b/primitives/avail/src/header/extension/v1.rs @@ -1,6 +1,4 @@ use codec::{Decode, Encode}; -#[cfg(feature = "std")] -use parity_util_mem::{MallocSizeOf, MallocSizeOfOps}; use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; @@ -32,10 +30,3 @@ impl HeaderExtension { self.commitment.cols } } - -#[cfg(feature = "std")] -impl MallocSizeOf for HeaderExtension { - fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize { - self.commitment.size_of(ops) + self.app_lookup.size_of(ops) - } -} diff --git a/primitives/avail/src/header/extension/v2.rs b/primitives/avail/src/header/extension/v2.rs index f7a6855a..5fe3a2de 100644 --- a/primitives/avail/src/header/extension/v2.rs +++ b/primitives/avail/src/header/extension/v2.rs @@ -1,6 +1,4 @@ use codec::{Decode, Encode}; -#[cfg(feature = "std")] -use parity_util_mem::{MallocSizeOf, MallocSizeOfOps}; use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; @@ -32,10 +30,3 @@ impl HeaderExtension { self.commitment.cols } } - -#[cfg(feature = "std")] -impl MallocSizeOf for HeaderExtension { - fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize { - self.commitment.size_of(ops) + self.app_lookup.size_of(ops) - } -} diff --git a/primitives/avail/src/header/extension/v_test.rs b/primitives/avail/src/header/extension/v_test.rs index 23d989d6..a7e8b4bf 100644 --- a/primitives/avail/src/header/extension/v_test.rs +++ b/primitives/avail/src/header/extension/v_test.rs @@ -1,6 +1,4 @@ use codec::{Decode, Encode}; -#[cfg(feature = "std")] -use parity_util_mem::{MallocSizeOf, MallocSizeOfOps}; use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; @@ -23,13 +21,6 @@ impl HeaderExtension { } } -#[cfg(feature = "std")] -impl MallocSizeOf for HeaderExtension { - fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize { - self.new_field.size_of(ops) + self.commitment.size_of(ops) + self.app_lookup.size_of(ops) - } -} - impl From for HeaderExtension { fn from(ext: v1::HeaderExtension) -> Self { Self { diff --git a/primitives/avail/src/kate_commitment.rs b/primitives/avail/src/kate_commitment.rs index 14d11a5c..47347171 100644 --- a/primitives/avail/src/kate_commitment.rs +++ b/primitives/avail/src/kate_commitment.rs @@ -39,16 +39,6 @@ pub mod v1 { .finish() } } - - #[cfg(feature = "std")] - impl parity_util_mem::MallocSizeOf for KateCommitment { - fn size_of(&self, ops: &mut parity_util_mem::MallocSizeOfOps) -> usize { - self.commitment.size_of(ops) - + self.rows.size_of(ops) - + self.cols.size_of(ops) - + self.data_root.size_of(ops) - } - } } pub mod v2 { @@ -103,16 +93,6 @@ pub mod v2 { } } - #[cfg(feature = "std")] - impl parity_util_mem::MallocSizeOf for KateCommitment { - fn size_of(&self, ops: &mut parity_util_mem::MallocSizeOfOps) -> usize { - self.commitment.size_of(ops) - + self.rows.size_of(ops) - + self.cols.size_of(ops) - + self.data_root.size_of(ops) - } - } - #[cfg(test)] mod tests { use super::*; diff --git a/primitives/avail/src/sha2.rs b/primitives/avail/src/sha2.rs index af72d32c..39caa232 100644 --- a/primitives/avail/src/sha2.rs +++ b/primitives/avail/src/sha2.rs @@ -1,8 +1,7 @@ -use frame_support::RuntimeDebug; use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; -use sp_core::{storage::StateVersion, Hasher}; +use sp_core::{hashing::sha2_256, storage::StateVersion, Hasher, RuntimeDebug}; use sp_runtime::traits::Hash; use sp_std::vec::Vec; use sp_trie::{LayoutV0, LayoutV1, TrieConfiguration as _}; @@ -18,7 +17,7 @@ impl Hasher for ShaTwo256 { const LENGTH: usize = 32; fn hash(s: &[u8]) -> Self::Out { - sp_io::hashing::sha2_256(s).into() + sha2_256(s).into() } } diff --git a/primitives/types/Cargo.toml b/primitives/types/Cargo.toml index 4cbbb3aa..aec8f696 100644 --- a/primitives/types/Cargo.toml +++ b/primitives/types/Cargo.toml @@ -8,7 +8,7 @@ edition = "2018" # Substrate parity-scale-codec = { version = "3", default-features = false, features = ["derive", "max-encoded-len"] } scale-info = { version = "2.3.1", default-features = false, features = ["derive"] } -sp-debug-derive = { version = "5", default-features = false } +sp-core = { version = "7", default-features = false } # 3rd-parties derive_more = "0.99.17" @@ -17,4 +17,10 @@ serde = { version = "1.0", features = ["derive"], optional = true } [features] default = ["std"] -std = ["serde", "parity-scale-codec/std", "scale-info/std", "num-traits/std"] +std = [ + "serde", + "sp-core/std", + "parity-scale-codec/std", + "scale-info/std", + "num-traits/std" +] diff --git a/primitives/types/src/data_lookup.rs b/primitives/types/src/data_lookup.rs index 7da9fb7c..fae41f3d 100644 --- a/primitives/types/src/data_lookup.rs +++ b/primitives/types/src/data_lookup.rs @@ -4,7 +4,7 @@ use parity_scale_codec::{Decode, Encode}; use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; -use sp_debug_derive::RuntimeDebug; +use sp_core::RuntimeDebug; use crate::AppId; @@ -39,13 +39,6 @@ where } } -#[cfg(all(feature = "std", feature = "substrate"))] -impl parity_util_mem::MallocSizeOf for DataLookupIndexItem { - fn size_of(&self, ops: &mut parity_util_mem::MallocSizeOfOps) -> usize { - self.app_id.size_of(ops) + self.start.size_of(ops) - } -} - #[derive(PartialEq, Eq, RuntimeDebug)] /// Errors during the creation from `extrinsics`. pub enum TryFromError { @@ -87,13 +80,6 @@ impl TryFrom<&[(AppId, u32)]> for DataLookup { } } -#[cfg(all(feature = "std", feature = "substrate"))] -impl parity_util_mem::MallocSizeOf for DataLookup { - fn size_of(&self, ops: &mut parity_util_mem::MallocSizeOfOps) -> usize { - self.size.size_of(ops) + self.index.size_of(ops) - } -} - #[cfg(test)] mod test { use super::*; diff --git a/primitives/types/src/lib.rs b/primitives/types/src/lib.rs index a1ceb285..68bfc4d2 100644 --- a/primitives/types/src/lib.rs +++ b/primitives/types/src/lib.rs @@ -6,7 +6,7 @@ use parity_scale_codec::{Decode, Encode, MaxEncodedLen}; use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; -use sp_debug_derive::RuntimeDebug; +use sp_core::RuntimeDebug; mod data_lookup; mod get_app_id; From ecbd06b5fb34e56898a57c159b0c7520cd908cbe Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Thu, 8 Jun 2023 13:47:20 +0200 Subject: [PATCH 51/87] Remove asserts from production code --- kate/recovery/Cargo.toml | 35 +++++++++------- kate/recovery/src/com.rs | 68 +++++++++++++++++++------------- kate/recovery/src/commitments.rs | 4 +- kate/recovery/src/lib.rs | 25 ++++++++++++ kate/recovery/src/matrix.rs | 24 +++++------ kate/recovery/src/proof.rs | 4 +- primitives/avail/Cargo.toml | 26 ++++++------ primitives/types/Cargo.toml | 8 ++-- 8 files changed, 123 insertions(+), 71 deletions(-) diff --git a/kate/recovery/Cargo.toml b/kate/recovery/Cargo.toml index 013f0b43..a7809af8 100644 --- a/kate/recovery/Cargo.toml +++ b/kate/recovery/Cargo.toml @@ -5,24 +5,31 @@ authors = ["Denis Ermolin "] edition = "2018" [dependencies] -codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] } -dusk-bytes = "0.1.6" +# Internals dusk-plonk = { git = "https://github.com/availproject/plonk.git", tag = "v0.12.0-polygon-2" } -getrandom = { version = "0.2", features = ["js"] } -hex = "0.4" -num = "0.4.0" -once_cell = { version = "1.9.0", default-features = false } -rand = "0.8.4" -rand_chacha = "0.3" -serde = { version = "1.0", features = ["derive"] } -thiserror = "1.0.37" + +# Substrate +codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] } +sp-arithmetic = { version = "7", default-features = false } + +# 3rd-parties +dusk-bytes = { version = "0.1.6", default-features = false } +once_cell = { version = "1.9.0", optional = true } +rand = { version = "0.8.4", optional = true } +rand_chacha = { version = "0.3", optional = true } +serde = { version = "1", optional = true, features = ["derive"] } +thiserror-no-std = "2.0.2" [dev-dependencies] -once_cell = "1.9.0" -rand = "0.8.4" -rand_chacha = "0.3" +hex = "0.4" test-case = "1.2.3" [features] default = ["std"] -std = [] +std = [ + "once_cell", + "rand", + "rand_chacha", + "serde", + "sp-arithmetic/std", +] diff --git a/kate/recovery/src/com.rs b/kate/recovery/src/com.rs index 1a42be9d..7fb91127 100644 --- a/kate/recovery/src/com.rs +++ b/kate/recovery/src/com.rs @@ -1,18 +1,18 @@ use codec::Decode; -use dusk_bytes::Serializable; +use dusk_bytes::Serializable as _; use dusk_plonk::{fft::EvaluationDomain, prelude::BlsScalar}; -use num::ToPrimitive; use rand::seq::SliceRandom; +use sp_arithmetic::{traits::SaturatedConversion, Percent}; use std::{ collections::{HashMap, HashSet}, - convert::TryFrom, + convert::{TryFrom, TryInto}, iter::FromIterator, }; -use thiserror::Error; +use thiserror_no_std::Error; use crate::{ config::{self, CHUNK_SIZE}, - data, index, matrix, + data, ensure, index, matrix, }; #[derive(Debug, Error)] @@ -40,14 +40,11 @@ pub enum ReconstructionError { pub fn columns_positions( dimensions: &matrix::Dimensions, positions: &[matrix::Position], - factor: f64, + factor: Percent, ) -> Vec { - assert!(factor <= 1.0); - - let cells = (factor * dimensions.extended_rows() as f64) - .to_usize() - .expect("result is lesser than usize maximum"); - + let cells = factor + .mul_ceil(dimensions.extended_rows()) + .saturated_into::(); let rng = &mut rand::thread_rng(); let columns: HashSet = HashSet::from_iter(positions.iter().map(|position| position.col)); @@ -94,8 +91,8 @@ pub fn app_specific_rows( ) -> Vec { index .app_cells_range(app_id) - .map(|range| dimensions.extended_data_rows(range)) - .unwrap_or_else(std::vec::Vec::new) + .and_then(|range| dimensions.extended_data_rows(range)) + .unwrap_or_default() } /// Generates empty cell positions in extended data matrix, @@ -114,7 +111,7 @@ pub fn app_specific_cells( ) -> Option> { index .app_cells_range(app_id) - .map(|range| dimensions.extended_data_positions(range)) + .and_then(|range| dimensions.extended_data_positions(range)) } /// Application data, represents list of extrinsics encoded in a block. @@ -462,10 +459,12 @@ pub fn reconstruct_column( // just ensures all rows are from same column ! // it's required as that's how it's erasure coded during // construction in validator node - fn check_cells(cells: &[data::DataCell]) { - assert!(!cells.is_empty()); + fn check_cells(cells: &[data::DataCell]) -> bool { + if cells.is_empty() { + return false; + } let first_col = cells[0].position.col; - assert!(cells.iter().all(|c| c.position.col == first_col)); + cells.iter().all(|c| c.position.col == first_col) } // given row index in column of interest, finds it if present @@ -474,20 +473,35 @@ pub fn reconstruct_column( cells .iter() .find(|cell| cell.position.row == idx) - .map(|cell| { + .and_then(|cell| { <[u8; BlsScalar::SIZE]>::try_from(&cell.data[..]) - .expect("didn't find u8 array of length 32") + .map(|data| BlsScalar::from_bytes(&data).ok()) + .ok() + .flatten() }) - .and_then(|data| BlsScalar::from_bytes(&data).ok()) } // row count of data matrix must be power of two ! - assert!(row_count % 2 == 0); - assert!(cells.len() >= (row_count / 2) as usize && cells.len() <= row_count as usize); - check_cells(cells); - - let eval_domain = EvaluationDomain::new(row_count as usize).unwrap(); - let mut subset: Vec> = Vec::with_capacity(row_count as usize); + let row_count_sz: usize = row_count + .try_into() + .map_err(|_| "Row count overflows `usize`")?; + ensure!(row_count % 2 == 0, "`row_count` must be power of two"); + ensure!( + cells.len() >= row_count_sz / 2, + "Number of `cells` must be equal or greater than the half of `row_count`" + ); + ensure!( + cells.len() <= row_count_sz, + "Number of `cells` must be equal or less than `row_count`" + ); + ensure!( + check_cells(cells), + "At least one row is not from same column" + ); + + let eval_domain = EvaluationDomain::new(row_count_sz) + .map_err(|e| format!("Evaluation domain cannot be created: {e:?}"))?; + let mut subset: Vec> = Vec::with_capacity(row_count_sz); // fill up vector in ordered fashion // @note the way it's done should be improved diff --git a/kate/recovery/src/commitments.rs b/kate/recovery/src/commitments.rs index 419ab885..8455e5ad 100644 --- a/kate/recovery/src/commitments.rs +++ b/kate/recovery/src/commitments.rs @@ -4,12 +4,13 @@ use std::{ num::TryFromIntError, }; +#[cfg(feature = "std")] use dusk_bytes::Serializable; use dusk_plonk::{ fft::{EvaluationDomain, Evaluations}, prelude::{BlsScalar, PublicParameters}, }; -use thiserror::Error; +use thiserror_no_std::Error; use crate::{ com, @@ -55,6 +56,7 @@ impl From for Error { } } +#[cfg(feature = "std")] impl From for Error { fn from(e: dusk_bytes::Error) -> Self { match e { diff --git a/kate/recovery/src/lib.rs b/kate/recovery/src/lib.rs index 48f158ea..46aedd45 100644 --- a/kate/recovery/src/lib.rs +++ b/kate/recovery/src/lib.rs @@ -1,3 +1,5 @@ +#![cfg_attr(not(feature = "std"), no_std)] + pub mod com; pub mod commitments; pub mod config; @@ -5,5 +7,28 @@ pub mod data; pub mod index; pub mod matrix; pub mod proof; + #[cfg(feature = "std")] pub mod testnet; + +/// Return Err of the expression: `return Err($expression);`. +/// +/// Used as `fail!(expression)`. +#[macro_export] +macro_rules! fail { + ( $y:expr ) => {{ + return Err($y.into()); + }}; +} + +/// Evaluate `$x:expr` and if not true return `Err($y:expr)`. +/// +/// Used as `ensure!(expression_to_ensure, expression_to_return_on_false)`. +#[macro_export] +macro_rules! ensure { + ( $x:expr, $y:expr $(,)? ) => {{ + if !$x { + $crate::fail!($y); + } + }}; +} diff --git a/kate/recovery/src/matrix.rs b/kate/recovery/src/matrix.rs index 5c81697a..084dbc9e 100644 --- a/kate/recovery/src/matrix.rs +++ b/kate/recovery/src/matrix.rs @@ -105,18 +105,19 @@ impl Dimensions { } /// List of data row indexes in the extended matrix. - pub fn extended_data_rows(&self, cells: Range) -> Vec { - assert!(cells.end <= self.size()); - if cells.end == 0 { - return vec![]; + pub fn extended_data_rows(&self, cells: Range) -> Option> { + // Invalid range returns `None` + if cells.end > self.size() || cells.end == 0 { + return None; } let first_row = self.extended_data_row(cells.start); let last_row = self.extended_data_row(cells.end - 1); - (first_row..=last_row) + let data = (first_row..=last_row) .step_by(config::EXTENSION_FACTOR) - .collect::>() + .collect::>(); + Some(data) } /// Cell positions for given column in extended matrix. @@ -168,11 +169,12 @@ impl Dimensions { } /// Extended matrix data positions for given data matrix cells range. - pub fn extended_data_positions(&self, cells: Range) -> Vec { - assert!(cells.end <= self.size()); - cells - .map(|cell| self.extended_data_position(cell)) - .collect::>() + pub fn extended_data_positions(&self, cells: Range) -> Option> { + (cells.end <= self.size()).then(|| { + cells + .map(|cell| self.extended_data_position(cell)) + .collect::>() + }) } /// Checks if extended matrix contains given position. diff --git a/kate/recovery/src/proof.rs b/kate/recovery/src/proof.rs index c30c60fe..afd803d7 100644 --- a/kate/recovery/src/proof.rs +++ b/kate/recovery/src/proof.rs @@ -1,3 +1,4 @@ +#[cfg(feature = "std")] use dusk_bytes::Serializable; use dusk_plonk::{ bls12_381::G1Affine, @@ -5,7 +6,7 @@ use dusk_plonk::{ fft::EvaluationDomain, prelude::BlsScalar, }; -use thiserror::Error; +use thiserror_no_std::Error; use crate::{config::COMMITMENT_SIZE, data::Cell, matrix::Dimensions}; @@ -19,6 +20,7 @@ pub enum Error { InvalidDegree(String), } +#[cfg(feature = "std")] impl From for Error { fn from(error: dusk_bytes::Error) -> Self { Error::InvalidData(format!("{error:?}")) diff --git a/primitives/avail/Cargo.toml b/primitives/avail/Cargo.toml index bf38d4f2..147ca9e4 100644 --- a/primitives/avail/Cargo.toml +++ b/primitives/avail/Cargo.toml @@ -9,11 +9,11 @@ edition = "2021" da-types = { path = "../types", default-features = false } # Others +hash256-std-hasher = { version = "0.15.2", default-features = false } log = { version = "0.4.8", default-features = false } serde = { version = "1", optional = true, features = ["derive"] } thiserror-no-std = "2.0.2" -hash256-std-hasher = { version = "0.15.2", default-features = false } - + # Substrate beefy-merkle-tree = { git = "https://github.com/paritytech/substrate.git/", branch = "polkadot-v0.9.37", default-features = false } codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive", "max-encoded-len"] } @@ -27,23 +27,23 @@ sp-trie = { version = "7.0.0", default-features = false } [dev-dependencies] hex-literal = "0.3.4" -test-case = "1.2.3" serde_json = "1" +test-case = "1.2.3" [features] default = ["std"] std = [ - "serde", - "codec/std", - "scale-info/std", - "log/std", - "sp-core/std", - "sp-std/std", + "serde", + "codec/std", + "scale-info/std", + "log/std", + "sp-core/std", + "sp-std/std", "sp-runtime/std", - "sp-trie/std", - "sp-runtime-interface/std", - "hash256-std-hasher/std", - "frame-support/std", + "sp-trie/std", + "sp-runtime-interface/std", + "hash256-std-hasher/std", + "frame-support/std", "beefy-merkle-tree/std", "da-types/std", ] diff --git a/primitives/types/Cargo.toml b/primitives/types/Cargo.toml index aec8f696..2f0b55c5 100644 --- a/primitives/types/Cargo.toml +++ b/primitives/types/Cargo.toml @@ -18,9 +18,9 @@ serde = { version = "1.0", features = ["derive"], optional = true } [features] default = ["std"] std = [ - "serde", + "serde", "sp-core/std", - "parity-scale-codec/std", - "scale-info/std", - "num-traits/std" + "parity-scale-codec/std", + "scale-info/std", + "num-traits/std", ] From 48566c6d8bcfbdf38224176b08c4343cc8e9e055 Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Tue, 13 Jun 2023 15:56:29 +0200 Subject: [PATCH 52/87] CodeReview: Keep warning for future --- kate/recovery/src/matrix.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/kate/recovery/src/matrix.rs b/kate/recovery/src/matrix.rs index 084dbc9e..3b5c37c9 100644 --- a/kate/recovery/src/matrix.rs +++ b/kate/recovery/src/matrix.rs @@ -189,8 +189,8 @@ impl Dimensions { /// Creates iterator over data cells in data matrix (used to retrieve data from the matrix). pub fn iter_data(&self) -> impl Iterator { - let rows = self.rows as usize; - let cols = self.cols as usize; + let rows :usize = self.rows.get().into(); + let cols :usize = self.cols.get().into(); (0..rows).flat_map(move |row| (0..cols).map(move |col| (row, col))) } From a28fe6c0f73a950a8d005fb351de8e921679c844 Mon Sep 17 00:00:00 2001 From: William Arnold Date: Wed, 14 Jun 2023 19:35:06 +0900 Subject: [PATCH 53/87] `kate-grid` PR feedback --- kate/grid/src/dims.rs | 8 ++- kate/grid/src/grid.rs | 115 +++++++++++++++++++----------------------- 2 files changed, 60 insertions(+), 63 deletions(-) diff --git a/kate/grid/src/dims.rs b/kate/grid/src/dims.rs index 2e88acf5..7a6394dd 100644 --- a/kate/grid/src/dims.rs +++ b/kate/grid/src/dims.rs @@ -1,7 +1,7 @@ use core::num::NonZeroUsize; /// The dimensions of a grid -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, PartialEq, Eq)] pub struct Dimensions { width: NonZeroUsize, height: NonZeroUsize, @@ -12,6 +12,8 @@ impl Dimensions { Dimensions { width, height } } + /// Make a new `Dimensions` panicking if either width or height are zero. + /// Again, **this will panic if a zero width or zero height are given**. pub const fn new_unchecked(width: usize, height: usize) -> Self { Self { width: nonzero_unchecked(width), @@ -72,6 +74,8 @@ impl Extension { } } + /// Make a new height extension without checking if `factor` is nonzero. + /// Again, **this will panic if a zero `factor` is given**. pub const fn height_unchecked(factor: usize) -> Self { Self { height_factor: nonzero_unchecked(factor), @@ -86,6 +90,8 @@ impl Extension { } } + /// Make a new width extension without checking if `factor` is nonzero. + /// Again, **this will panic if a zero `factor` is given**. pub const fn width_unchecked(factor: usize) -> Self { Self { height_factor: nonzero_unchecked(1), diff --git a/kate/grid/src/grid.rs b/kate/grid/src/grid.rs index e483d1a0..f0f5b94a 100644 --- a/kate/grid/src/grid.rs +++ b/kate/grid/src/grid.rs @@ -10,12 +10,12 @@ pub trait Grid { // x indexes within a row, y indexes within a column // 0 <= x < width, 0 <= y < height fn get(&self, x: usize, y: usize) -> Option<&A> { - let i = Self::coord_to_ind(self.dims(), x, y); + let i = Self::coord_to_ind(self.dims(), x, y)?; self.get_ind(i) } fn get_ind(&self, i: usize) -> Option<&A>; fn ind_to_coord(dims: &Dimensions, i: usize) -> (usize, usize); - fn coord_to_ind(dims: &Dimensions, x: usize, y: usize) -> usize; + fn coord_to_ind(dims: &Dimensions, x: usize, y: usize) -> Option; } pub struct RowMajor { @@ -49,8 +49,8 @@ impl Grid for RowMajor { (i % dims.width_nz(), i / dims.width_nz()) } - fn coord_to_ind(dims: &Dimensions, x: usize, y: usize) -> usize { - x.saturating_add(y.saturating_mul(dims.width())) + fn coord_to_ind(dims: &Dimensions, x: usize, y: usize) -> Option { + x.checked_add(y.checked_mul(dims.width())?) } fn inner(&self) -> &Vec { @@ -79,8 +79,8 @@ impl Grid for ColumnMajor { (i / dims.height_nz(), i % dims.height_nz()) } - fn coord_to_ind(dims: &Dimensions, x: usize, y: usize) -> usize { - y.saturating_add(x.saturating_mul(dims.height())) + fn coord_to_ind(dims: &Dimensions, x: usize, y: usize) -> Option { + y.checked_add(x.checked_mul(dims.height())?) } fn inner(&self) -> &Vec { @@ -91,13 +91,25 @@ impl Grid for ColumnMajor { #[cfg(feature = "parallel")] use rayon::prelude::*; -impl RowMajor { +impl RowMajor { + pub fn new(width: usize, height: usize, data: Vec) -> Option { + if data.len() == usize::checked_mul(width, height)? { + Some(Self { + dims: Dimensions::new(width.try_into().ok()?, height.try_into().ok()?), + inner: data, + }) + } else { + None + } + } pub fn row(&self, y: usize) -> Option<&[A]> { if y >= self.height() { return None; } - let start = y.checked_mul(self.width())?; - let end = y.checked_add(1)?.checked_mul(self.width())?; + // SAFETY: `y < height` (just one line up) and `height * width` **is already checked** at `new / into_column_mayor` fns + // as invariant of this type, then we can omit `checked_` operations. + let start = y * self.width(); + let end = (y + 1) * self.width(); Some(&self.inner[start..end]) } @@ -111,14 +123,6 @@ impl RowMajor { pub fn rows(&self) -> impl Iterator + '_ { (0..self.height()).map(|y| (y, self.row(y).expect("Bounds already checked"))) } - - #[cfg(feature = "parallel")] - pub fn rows_par_iter(&self) -> impl ParallelIterator + '_ { - (0..self.height()) - .into_par_iter() - .map(|y| (y, self.row(y).expect("Bounds already checked"))) - } - // TODO: this return type is kinda gross, should it just iterate over vecs? pub fn columns(&self) -> impl Iterator)> + '_ { (0..self.width()).map(|x| (x, self.iter_col(x).expect("Bounds already checked"))) @@ -135,7 +139,9 @@ impl RowMajor { (0..self.height()).map(move |y| self.get(x, y).expect("Bounds already checked")) }) } +} +impl RowMajor { pub fn to_column_major(&self) -> ColumnMajor { self.iter_column_wise() .map(Clone::clone) @@ -145,7 +151,26 @@ impl RowMajor { } } -impl ColumnMajor { +#[cfg(feature = "parallel")] +impl RowMajor { + pub fn rows_par_iter(&self) -> impl ParallelIterator + '_ { + (0..self.height()) + .into_par_iter() + .map(|y| (y, self.row(y).expect("Bounds already checked"))) + } +} + +impl ColumnMajor { + pub fn new(width: usize, height: usize, data: Vec) -> Option { + if data.len() == usize::checked_mul(width, height)? { + Some(Self { + dims: Dimensions::new(width.try_into().ok()?, height.try_into().ok()?), + inner: data, + }) + } else { + None + } + } pub fn col(&self, x: usize) -> Option<&[A]> { if x >= self.width() { return None; @@ -173,7 +198,9 @@ impl ColumnMajor { (0..self.height()).map(move |y| self.get(x, y).expect("Bounds already checked")) }) } +} +impl ColumnMajor { pub fn to_row_major(&self) -> RowMajor { self.iter_row_wise() .map(Clone::clone) @@ -184,62 +211,26 @@ impl ColumnMajor { } pub trait IntoRowMajor { + /// Convert the underlying data structure to be row-major. This likely involves + /// re-allocating the array or re-arranging its elements. fn into_row_major(self, width: usize, height: usize) -> Option>; } pub trait IntoColumnMajor { + /// Convert the underlying data structure to be column-major. This likely involves + /// re-allocating the array or re-arranging its elements. fn into_column_major(self, width: usize, height: usize) -> Option>; } -impl IntoRowMajor for Vec { +impl>> IntoRowMajor for B { fn into_row_major(self, width: usize, height: usize) -> Option> { - if self.len() == usize::checked_mul(width, height)? { - Some(RowMajor { - dims: Dimensions::new(width.try_into().ok()?, height.try_into().ok()?), - inner: self, - }) - } else { - None - } - } -} - -impl IntoColumnMajor for Vec { - fn into_column_major(self, width: usize, height: usize) -> Option> { - if self.len() == width.checked_mul(height)? { - Some(ColumnMajor { - dims: Dimensions::new(width.try_into().ok()?, height.try_into().ok()?), - inner: self, - }) - } else { - None - } + RowMajor::new(width, height, self.into()) } } -impl IntoColumnMajor for [A; LEN] { +impl>> IntoColumnMajor for B { fn into_column_major(self, width: usize, height: usize) -> Option> { - if self.len() == width.checked_mul(height)? { - Some(ColumnMajor { - dims: Dimensions::new(width.try_into().ok()?, height.try_into().ok()?), - inner: self.into(), - }) - } else { - None - } - } -} - -impl IntoRowMajor for [A; LEN] { - fn into_row_major(self, width: usize, height: usize) -> Option> { - if self.len() == width.checked_mul(height)? { - Some(RowMajor { - dims: Dimensions::new(width.try_into().ok()?, height.try_into().ok()?), - inner: self.into(), - }) - } else { - None - } + ColumnMajor::new(width, height, self.into()) } } From ae9d6f353cb4a06f637611a5ab87c5c70867e568 Mon Sep 17 00:00:00 2001 From: William Arnold Date: Wed, 14 Jun 2023 20:09:34 +0900 Subject: [PATCH 54/87] PR Feedback --- Cargo.lock | 2 +- kate/Cargo.toml | 2 +- kate/src/com.rs | 12 ++++++++- kate/src/gridgen/mod.rs | 56 ++++++++++++++++++++--------------------- 4 files changed, 40 insertions(+), 32 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 03731c90..055256e3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2599,7 +2599,7 @@ dependencies = [ [[package]] name = "poly-multiproof" version = "0.0.1" -source = "git+https://github.com/aphoh/poly-multiproof?rev=1ec7c7eca0861ad89427c45534006f49d039820a#1ec7c7eca0861ad89427c45534006f49d039820a" +source = "git+https://github.com/aphoh/poly-multiproof?tag=v0.1.0#adca8b53aea6371981a289f9a74842b24fe0e197" dependencies = [ "ark-bls12-381", "ark-ec", diff --git a/kate/Cargo.toml b/kate/Cargo.toml index 17edd1bb..33edbe83 100644 --- a/kate/Cargo.toml +++ b/kate/Cargo.toml @@ -19,7 +19,7 @@ kate-grid = { path = "grid" } kate-recovery = { path = "recovery", default-features = false, optional = true } log = { version = "0.4.8", optional = true } once_cell = { version = "1.8.0", optional = true } -poly-multiproof = { git = "https://github.com/aphoh/poly-multiproof", default-features = false, rev = "1ec7c7eca0861ad89427c45534006f49d039820a" } +poly-multiproof = { git = "https://github.com/aphoh/poly-multiproof", default-features = false, tag = "v0.1.0" } rand = { version = "0.8.4", default-features = false, optional = true } rand_chacha = { version = "0.3", default-features = false, optional = true } rayon = { version = "1.5.2", optional = true } diff --git a/kate/src/com.rs b/kate/src/com.rs index 61703f2b..8fdba31a 100644 --- a/kate/src/com.rs +++ b/kate/src/com.rs @@ -1,4 +1,4 @@ -use core::num::{NonZeroU32, NonZeroUsize}; +use core::num::{NonZeroU32, NonZeroUsize, TryFromIntError}; use std::{ convert::{TryFrom, TryInto}, mem::size_of, @@ -59,6 +59,16 @@ pub enum Error { DimensionsMismatch, ZeroDimension, DomainSizeInvalid, + /// The base grid width, before extension, does not fit cleanly into a domain for FFTs + BaseGridDomainSizeInvalid(usize), + /// The extended grid width does not fit cleanly into a domain for FFTs + ExtendedGridDomianSizeInvalid(usize), +} + +impl From for Error { + fn from(_: TryFromIntError) -> Self { + Self::ZeroDimension + } } impl From for Error { diff --git a/kate/src/gridgen/mod.rs b/kate/src/gridgen/mod.rs index 7f561847..3f84d751 100644 --- a/kate/src/gridgen/mod.rs +++ b/kate/src/gridgen/mod.rs @@ -15,6 +15,7 @@ use poly_multiproof::{ }; use rand::{Rng, SeedableRng}; use rand_chacha::ChaChaRng; +use std::collections::BTreeMap; use crate::{ com::{Cell, Error}, @@ -62,24 +63,21 @@ pub struct EvaluationGrid { impl EvaluationGrid { /// From the app extrinsics, create a data grid of Scalars pub fn from_extrinsics( - mut extrinsics: Vec, + extrinsics: Vec, min_width: usize, max_width: usize, max_height: usize, rng_seed: Seed, ) -> Result { // Group extrinsics by app id, also sorted by app id. - extrinsics.sort_by(|a, b| a.app_id.cmp(&b.app_id)); - let grouped = - extrinsics - .iter() - .fold::>)>, _>(vec![], |mut acc, e| { - match acc.last_mut() { - Some((app_id, data)) if e.app_id == *app_id => data.push(e.data.clone()), - None | Some(_) => acc.push((e.app_id, vec![e.data.clone()])), - } - acc - }); + // Using a BTreeMap here will still iter in sorted order. Sweet! + let grouped = extrinsics.into_iter().fold::>, _>( + BTreeMap::default(), + |mut acc, e| { + acc.entry(e.app_id).or_default().push(e.data); + acc + }, + ); // Convert each grup of extrinsics into scalars let encoded = grouped @@ -102,7 +100,9 @@ impl EvaluationGrid { app_id: *app_id, start, }); - start = start.saturating_add(scalars.len() as u32); // next item should start after current one + start = start + .checked_add(scalars.len() as u32) + .ok_or(Error::CellLengthExceeded)?; // next item should start after current one } // Flatten the grid @@ -192,9 +192,9 @@ impl EvaluationGrid { )); let domain = GeneralEvaluationDomain::::new(self.dims.height()) - .ok_or(Error::DomainSizeInvalid)?; + .ok_or(Error::BaseGridDomainSizeInvalid(self.dims.width()))?; let domain_new = GeneralEvaluationDomain::::new(new_dims.height()) - .ok_or(Error::DomainSizeInvalid)?; + .ok_or(Error::ExtendedGridDomianSizeInvalid(new_dims.width()))?; if domain_new.size() != new_dims.height() { return Err(Error::DomainSizeInvalid); } @@ -358,11 +358,14 @@ pub fn multiproof_block( let block_width = grid_dims.width() / mp_grid_dims.width_nz(); let block_height = grid_dims.height() / mp_grid_dims.height_nz(); + // SAFETY: values never overflow since x,y are always less than grid_dims.{width,height}(). + // This is because x,y < mp_grid_dims.{width, height} and block width is the quotient of + // grid_dims and mp_grid_dims. Some(CellBlock { - start_x: x.checked_mul(block_width)?, - start_y: y.checked_mul(block_height)?, - end_x: x.checked_add(1)?.checked_mul(block_width)?, - end_y: y.checked_add(1)?.checked_mul(block_height)?, + start_x: x * block_width, + start_y: y * block_height, + end_x: (x + 1) * block_width, + end_y: (y + 1) * block_height, }) } @@ -387,13 +390,11 @@ pub fn get_block_dims( if n_scalars < max_width { let current_width = n_scalars; // Don't let the width get lower than the minimum provided - let width = core::cmp::max(round_up_power_of_2(current_width), min_width); - Ok(Dimensions::new( - width.try_into().map_err(|_| Error::ZeroDimension)?, - 1.try_into().expect("1 is nonzero"), - )) + let width = core::cmp::max(round_up_power_of_2(current_width), min_width).try_into()?; + let height = 1.try_into()?; + Ok(Dimensions::new(width, height)) } else { - let width = NonZeroUsize::new(max_width).ok_or(Error::ZeroDimension)?; + let width = NonZeroUsize::try_from(max_width)?; let current_height = round_up_to_multiple(n_scalars, width) / width; // Round the height up to a power of 2 for ffts let height = round_up_power_of_2(current_height); @@ -401,10 +402,7 @@ pub fn get_block_dims( if height > max_height { return Err(Error::BlockTooBig); } - Ok(Dimensions::new( - width, - height.try_into().map_err(|_| Error::ZeroDimension)?, - )) + Ok(Dimensions::new(width, height.try_into()?)) } } From 00595215eb9d4cae78ce8504545fba5dc15d6364 Mon Sep 17 00:00:00 2001 From: William Arnold Date: Wed, 14 Jun 2023 20:10:34 +0900 Subject: [PATCH 55/87] Formatting --- kate/grid/src/dims.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/kate/grid/src/dims.rs b/kate/grid/src/dims.rs index 7a6394dd..c7e89593 100644 --- a/kate/grid/src/dims.rs +++ b/kate/grid/src/dims.rs @@ -12,8 +12,8 @@ impl Dimensions { Dimensions { width, height } } - /// Make a new `Dimensions` panicking if either width or height are zero. - /// Again, **this will panic if a zero width or zero height are given**. + /// Make a new `Dimensions` panicking if either width or height are zero. + /// Again, **this will panic if a zero width or zero height are given**. pub const fn new_unchecked(width: usize, height: usize) -> Self { Self { width: nonzero_unchecked(width), @@ -74,8 +74,8 @@ impl Extension { } } - /// Make a new height extension without checking if `factor` is nonzero. - /// Again, **this will panic if a zero `factor` is given**. + /// Make a new height extension without checking if `factor` is nonzero. + /// Again, **this will panic if a zero `factor` is given**. pub const fn height_unchecked(factor: usize) -> Self { Self { height_factor: nonzero_unchecked(factor), @@ -90,8 +90,8 @@ impl Extension { } } - /// Make a new width extension without checking if `factor` is nonzero. - /// Again, **this will panic if a zero `factor` is given**. + /// Make a new width extension without checking if `factor` is nonzero. + /// Again, **this will panic if a zero `factor` is given**. pub const fn width_unchecked(factor: usize) -> Self { Self { height_factor: nonzero_unchecked(1), From 5c226099a38c5ead9dbff84cebd0b13358920dff Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Tue, 20 Jun 2023 14:43:51 +0200 Subject: [PATCH 56/87] Replace `Grid` by `NAlgebra` --- Cargo.lock | 436 +++++------------------ Cargo.toml | 1 - kate/Cargo.toml | 56 +-- kate/examples/multiproof_verification.rs | 10 +- kate/grid/Cargo.toml | 6 + kate/grid/src/grid.rs | 72 +++- kate/grid/src/lib.rs | 2 - kate/recovery/Cargo.toml | 3 +- kate/recovery/src/com.rs | 113 +++--- kate/recovery/src/commitments.rs | 85 ++--- kate/recovery/src/data.rs | 4 +- kate/recovery/src/matrix.rs | 166 +++++++-- kate/recovery/src/proof.rs | 5 +- kate/src/com.rs | 361 ++++++++++--------- kate/src/gridgen/mod.rs | 337 +++++++++--------- kate/src/gridgen/tests/commitments.rs | 63 ++-- kate/src/gridgen/tests/formatting.rs | 65 ++-- kate/src/gridgen/tests/mod.rs | 8 +- kate/src/gridgen/tests/reconstruction.rs | 48 ++- kate/src/lib.rs | 28 +- primitives/types/src/data_lookup.rs | 17 + 21 files changed, 926 insertions(+), 960 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 03731c90..1a8ddae0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -108,6 +108,15 @@ version = "1.0.71" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8" +[[package]] +name = "approx" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cab112f0a86d568ea0e627cc1d6be74a1e9cd55214684db5561995f6dad897c6" +dependencies = [ + "num-traits", +] + [[package]] name = "ark-bls12-381" version = "0.4.0" @@ -261,17 +270,6 @@ dependencies = [ "syn 2.0.16", ] -[[package]] -name = "atty" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" -dependencies = [ - "hermit-abi 0.1.19", - "libc", - "winapi", -] - [[package]] name = "auto_impl" version = "1.1.0" @@ -502,6 +500,12 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" +[[package]] +name = "bytemuck" +version = "1.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17febce684fd15d89027105661fec94afb475cb995fbc59d2865198446ba2eea" + [[package]] name = "byteorder" version = "1.4.3" @@ -517,12 +521,6 @@ dependencies = [ "serde", ] -[[package]] -name = "cast" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" - [[package]] name = "cc" version = "1.0.79" @@ -575,17 +573,6 @@ dependencies = [ "cfg-if", ] -[[package]] -name = "clap" -version = "2.34.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" -dependencies = [ - "bitflags", - "textwrap", - "unicode-width", -] - [[package]] name = "coins-bip32" version = "0.7.0" @@ -706,42 +693,6 @@ dependencies = [ "cfg-if", ] -[[package]] -name = "criterion" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b01d6de93b2b6c65e17c634a26653a29d107b3c98c607c765bf38d041531cd8f" -dependencies = [ - "atty", - "cast", - "clap", - "criterion-plot", - "csv", - "itertools 0.10.5", - "lazy_static", - "num-traits", - "oorandom", - "plotters", - "rayon", - "regex", - "serde", - "serde_cbor", - "serde_derive", - "serde_json", - "tinytemplate", - "walkdir", -] - -[[package]] -name = "criterion-plot" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2673cc8207403546f45f5fd319a974b1e6983ad1a3ee7e6041650013be041876" -dependencies = [ - "cast", - "itertools 0.10.5", -] - [[package]] name = "crossbeam-channel" version = "0.5.8" @@ -833,27 +784,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "csv" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b015497079b9a9d69c02ad25de6c0a6edef051ea6360a327d0bd05802ef64ad" -dependencies = [ - "csv-core", - "itoa", - "ryu", - "serde", -] - -[[package]] -name = "csv-core" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b2466559f260f48ad25fe6317b3c8dac77b5bdb5763ac7d9d6103530663bc90" -dependencies = [ - "memchr", -] - [[package]] name = "ctr" version = "0.9.2" @@ -895,21 +825,18 @@ version = "0.4.6" dependencies = [ "beefy-merkle-tree", "da-types", - "derive_more", "frame-support", "hash256-std-hasher", "hex-literal", "log", "parity-scale-codec", - "parity-util-mem", "scale-info", "serde", "serde_json", "sp-core", - "sp-io", "sp-runtime", "sp-runtime-interface", - "sp-std 4.0.0", + "sp-std 5.0.0", "sp-trie", "test-case", "thiserror-no-std", @@ -922,11 +849,9 @@ dependencies = [ "derive_more", "num-traits", "parity-scale-codec", - "parity-util-mem", "scale-info", "serde", - "sp-debug-derive 5.0.0", - "sp-runtime", + "sp-core", ] [[package]] @@ -1393,7 +1318,7 @@ dependencies = [ "serde", "smallvec", "sp-api", - "sp-arithmetic 6.0.0", + "sp-arithmetic", "sp-core", "sp-core-hashing-proc-macro", "sp-inherents", @@ -1615,12 +1540,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "half" -version = "1.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" - [[package]] name = "hash-db" version = "0.15.2" @@ -1669,15 +1588,6 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" -[[package]] -name = "hermit-abi" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" -dependencies = [ - "libc", -] - [[package]] name = "hermit-abi" version = "0.2.6" @@ -1852,9 +1762,9 @@ checksum = "59ce5ef949d49ee85593fc4d3f3f95ad61657076395cbbce23e2121fc5542074" [[package]] name = "io-lifetimes" -version = "1.0.10" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c66c74d2ae7e79a5a8f7ac924adbe38ee42a859c6539ad869eb51f0b52dc220" +checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" dependencies = [ "hermit-abi 0.3.1", "libc", @@ -1911,18 +1821,15 @@ dependencies = [ name = "kate" version = "0.7.1" dependencies = [ - "criterion", "da-types", + "derive_more", "dusk-bytes", "dusk-plonk", - "frame-support", - "getrandom 0.2.9", "hex", "hex-literal", - "itertools 0.10.5", - "kate-grid", "kate-recovery", "log", + "nalgebra", "once_cell", "parity-scale-codec", "poly-multiproof", @@ -1932,18 +1839,11 @@ dependencies = [ "rayon", "serde", "serde_json", - "sp-arithmetic 7.0.0", - "sp-core-hashing", - "sp-std 4.0.0", + "sp-arithmetic", + "sp-core", "static_assertions", "test-case", -] - -[[package]] -name = "kate-grid" -version = "0.6.1" -dependencies = [ - "rayon", + "thiserror-no-std", ] [[package]] @@ -1952,16 +1852,16 @@ version = "0.8.1" dependencies = [ "dusk-bytes", "dusk-plonk", - "getrandom 0.2.9", "hex", - "num", "once_cell", "parity-scale-codec", "rand 0.8.5", "rand_chacha 0.3.1", "serde", + "sp-arithmetic", + "static_assertions", "test-case", - "thiserror", + "thiserror-no-std", ] [[package]] @@ -2097,6 +1997,16 @@ dependencies = [ "regex-automata", ] +[[package]] +name = "matrixmultiply" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "090126dc04f95dc0d1c1c91f61bdd474b3930ca064c1edc8a849da2c6cbe1e77" +dependencies = [ + "autocfg", + "rawpointer", +] + [[package]] name = "memchr" version = "2.5.0" @@ -2170,6 +2080,21 @@ dependencies = [ "adler", ] +[[package]] +name = "nalgebra" +version = "0.32.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d68d47bba83f9e2006d117a9a33af1524e655516b8919caac694427a6fb1e511" +dependencies = [ + "approx", + "matrixmultiply", + "num-complex", + "num-rational", + "num-traits", + "simba", + "typenum", +] + [[package]] name = "nohash-hasher" version = "0.2.0" @@ -2260,20 +2185,6 @@ dependencies = [ "tiny-keccak", ] -[[package]] -name = "num" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43db66d1170d347f9a065114077f7dccb00c1b9478c89384490a3425279a4606" -dependencies = [ - "num-bigint", - "num-complex", - "num-integer", - "num-iter", - "num-rational", - "num-traits", -] - [[package]] name = "num-bigint" version = "0.4.3" @@ -2314,17 +2225,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "num-iter" -version = "0.1.43" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d03e6c028c5dc5cac6e2dec0efda81fc887605bb3d884578bb6d6bf7514e252" -dependencies = [ - "autocfg", - "num-integer", - "num-traits", -] - [[package]] name = "num-rational" version = "0.4.1" @@ -2384,12 +2284,6 @@ version = "1.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3" -[[package]] -name = "oorandom" -version = "11.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" - [[package]] name = "opaque-debug" version = "0.2.3" @@ -2454,31 +2348,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "parity-util-mem" -version = "0.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d32c34f4f5ca7f9196001c0aba5a1f9a5a12382c8944b8b0f90233282d1e8f8" -dependencies = [ - "cfg-if", - "impl-trait-for-tuples", - "parity-util-mem-derive", - "parking_lot", - "primitive-types", - "winapi", -] - -[[package]] -name = "parity-util-mem-derive" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f557c32c6d268a07c921471619c0295f5efad3a0e76d4f97a05c091a51d110b2" -dependencies = [ - "proc-macro2", - "syn 1.0.109", - "synstructure", -] - [[package]] name = "parity-wasm" version = "0.45.0" @@ -2568,34 +2437,6 @@ dependencies = [ "spki", ] -[[package]] -name = "plotters" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2538b639e642295546c50fcd545198c9d64ee2a38620a628724a3b266d5fbf97" -dependencies = [ - "num-traits", - "plotters-backend", - "plotters-svg", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "plotters-backend" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "193228616381fecdc1224c62e96946dfbc73ff4384fba576e052ff8c1bea8142" - -[[package]] -name = "plotters-svg" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9a81d2759aae1dae668f783c308bc5c8ebd191ff4184aaa1b37f65a6ae5a56f" -dependencies = [ - "plotters-backend", -] - [[package]] name = "poly-multiproof" version = "0.0.1" @@ -2810,6 +2651,12 @@ dependencies = [ "rand_core 0.6.4", ] +[[package]] +name = "rawpointer" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60a357793950651c4ed0f3f52338f53b2f809f32d83a07f72909fa13e4c6c1e3" + [[package]] name = "rayon" version = "1.7.0" @@ -2986,13 +2833,13 @@ dependencies = [ [[package]] name = "rustix" -version = "0.37.19" +version = "0.37.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acf8729d8542766f1b2cf77eb034d52f40d375bb8b615d0b147089946e16613d" +checksum = "b96e891d04aa506a6d1f318d2771bcb1c7dfda84e126660ace067c9b474bb2c0" dependencies = [ "bitflags", "errno 0.3.1", - "io-lifetimes 1.0.10", + "io-lifetimes 1.0.11", "libc", "linux-raw-sys 0.3.8", "windows-sys 0.48.0", @@ -3023,21 +2870,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041" [[package]] -name = "salsa20" -version = "0.10.2" +name = "safe_arch" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97a22f5af31f73a954c10289c93e8a50cc23d971e80ee446f1f6f7137a088213" +checksum = "62a7484307bd40f8f7ccbacccac730108f2cae119a3b11c74485b48aa9ea650f" dependencies = [ - "cipher", + "bytemuck", ] [[package]] -name = "same-file" -version = "1.0.6" +name = "salsa20" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +checksum = "97a22f5af31f73a954c10289c93e8a50cc23d971e80ee446f1f6f7137a088213" dependencies = [ - "winapi-util", + "cipher", ] [[package]] @@ -3158,16 +3005,6 @@ dependencies = [ "serde_derive", ] -[[package]] -name = "serde_cbor" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bef2ebfde456fb76bbcf9f59315333decc4fda0b2b44b420243c11e0f5ec1f5" -dependencies = [ - "half", - "serde", -] - [[package]] name = "serde_derive" version = "1.0.163" @@ -3255,6 +3092,19 @@ dependencies = [ "rand_core 0.6.4", ] +[[package]] +name = "simba" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "061507c94fc6ab4ba1c9a0305018408e312e17c041eb63bef8aa726fa33aceae" +dependencies = [ + "approx", + "num-complex", + "num-traits", + "paste", + "wide", +] + [[package]] name = "slab" version = "0.4.8" @@ -3327,22 +3177,6 @@ dependencies = [ "static_assertions", ] -[[package]] -name = "sp-arithmetic" -version = "7.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd6413ad82d166d40d995aa95ca6e0cbb473f973d3a2f0b433ae19813048c6c1" -dependencies = [ - "integer-sqrt", - "num-traits", - "parity-scale-codec", - "scale-info", - "serde", - "sp-debug-derive 6.0.0", - "sp-std 6.0.0", - "static_assertions", -] - [[package]] name = "sp-beefy" version = "4.0.0-dev" @@ -3390,7 +3224,7 @@ dependencies = [ "secrecy", "serde", "sp-core-hashing", - "sp-debug-derive 5.0.0", + "sp-debug-derive", "sp-externalities", "sp-runtime-interface", "sp-std 5.0.0", @@ -3437,17 +3271,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "sp-debug-derive" -version = "6.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66fb9dc63d54de7d7bed62a505b6e0bd66c122525ea1abb348f6564717c3df2d" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "sp-externalities" version = "0.13.0" @@ -3526,7 +3349,7 @@ dependencies = [ "serde", "sp-api", "sp-core", - "sp-debug-derive 5.0.0", + "sp-debug-derive", "sp-runtime", "sp-std 5.0.0", "thiserror", @@ -3557,7 +3380,7 @@ dependencies = [ "scale-info", "serde", "sp-application-crypto", - "sp-arithmetic 6.0.0", + "sp-arithmetic", "sp-core", "sp-io", "sp-std 5.0.0", @@ -3637,12 +3460,6 @@ name = "sp-std" version = "5.0.0" source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.37#6fa7fe1326ecaab9921c2c3888530ad679cfbb87" -[[package]] -name = "sp-std" -version = "6.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af0ee286f98455272f64ac5bb1384ff21ac029fbb669afbaf48477faff12760e" - [[package]] name = "sp-storage" version = "7.0.0" @@ -3652,7 +3469,7 @@ dependencies = [ "parity-scale-codec", "ref-cast", "serde", - "sp-debug-derive 5.0.0", + "sp-debug-derive", "sp-std 5.0.0", ] @@ -3741,9 +3558,9 @@ dependencies = [ "scale-info", "serde", "smallvec", - "sp-arithmetic 6.0.0", + "sp-arithmetic", "sp-core", - "sp-debug-derive 5.0.0", + "sp-debug-derive", "sp-std 5.0.0", ] @@ -3847,18 +3664,6 @@ dependencies = [ "unicode-ident", ] -[[package]] -name = "synstructure" -version = "0.12.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", - "unicode-xid", -] - [[package]] name = "tap" version = "1.0.1" @@ -3873,15 +3678,16 @@ checksum = "fd1ba337640d60c3e96bc6f0638a939b9c9a7f2c316a1598c279828b3d1dc8c5" [[package]] name = "tempfile" -version = "3.5.0" +version = "3.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9fbec84f381d5795b08656e4912bec604d162bff9291d6189a78f4c8ab87998" +checksum = "31c0432476357e58790aaa47a8efb0c5138f137343f3b5f23bd36a27e3b0a6d6" dependencies = [ + "autocfg", "cfg-if", "fastrand", "redox_syscall 0.3.5", - "rustix 0.37.19", - "windows-sys 0.45.0", + "rustix 0.37.20", + "windows-sys 0.48.0", ] [[package]] @@ -3897,15 +3703,6 @@ dependencies = [ "version_check", ] -[[package]] -name = "textwrap" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" -dependencies = [ - "unicode-width", -] - [[package]] name = "thiserror" version = "1.0.40" @@ -3993,16 +3790,6 @@ dependencies = [ "crunchy", ] -[[package]] -name = "tinytemplate" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" -dependencies = [ - "serde", - "serde_json", -] - [[package]] name = "tinyvec" version = "1.6.0" @@ -4196,12 +3983,6 @@ version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" -[[package]] -name = "unicode-width" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" - [[package]] name = "unicode-xid" version = "0.2.4" @@ -4239,16 +4020,6 @@ dependencies = [ "libc", ] -[[package]] -name = "walkdir" -version = "2.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698" -dependencies = [ - "same-file", - "winapi-util", -] - [[package]] name = "wasi" version = "0.9.0+wasi-snapshot-preview1" @@ -4480,13 +4251,13 @@ dependencies = [ ] [[package]] -name = "web-sys" -version = "0.3.63" +name = "wide" +version = "0.7.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bdd9ef4e984da1187bf8110c5cf5b845fbc87a23602cdf912386a76fcd3a7c2" +checksum = "40018623e2dba2602a9790faba8d33f2ebdebf4b86561b83928db735f8784728" dependencies = [ - "js-sys", - "wasm-bindgen", + "bytemuck", + "safe_arch", ] [[package]] @@ -4505,15 +4276,6 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" -[[package]] -name = "winapi-util" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" -dependencies = [ - "winapi", -] - [[package]] name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" diff --git a/Cargo.toml b/Cargo.toml index b833ca61..9f652415 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,7 +4,6 @@ members = [ "primitives/types", "kate", "kate/recovery", - "kate/grid", "primitives/nomad/signature", "primitives/nomad/nomad-core", "primitives/nomad/nomad-base", diff --git a/kate/Cargo.toml b/kate/Cargo.toml index 17edd1bb..7482b0f4 100644 --- a/kate/Cargo.toml +++ b/kate/Cargo.toml @@ -7,63 +7,63 @@ edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] } +# Pending to review +poly-multiproof = { git = "https://github.com/aphoh/poly-multiproof", default-features = false, rev = "1ec7c7eca0861ad89427c45534006f49d039820a" } + +# Internal da-types = { path = "../primitives/types", default-features = false } -dusk-bytes = { version = "0.1.6", default-features = false, optional = true } dusk-plonk = { git = "https://github.com/availproject/plonk.git", tag = "v0.12.0-polygon-2", optional = true } -frame-support = { version = "4.0.0-dev", default-features = false } -getrandom = { version = "0.2", features = ["js"], optional = true } -hex = { version = "0.4", default-features = false, features = ["alloc"] } -hex-literal = "0.3.4" -kate-grid = { path = "grid" } kate-recovery = { path = "recovery", default-features = false, optional = true } +# kate-grid = { path = "grid" } + +# Parity & Substrate +codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] } +sp-arithmetic = { version = "6", default-features = false } +sp-core = { version = "7.0.0", default-features = false, optional = true } + +# 3rd-party +derive_more = { version = "0.99.17", default-features = false, features = ["constructor"] } +dusk-bytes = { version = "0.1.6", default-features = false } +hex = { version = "0.4", optional = true, default-features = false, features = ["alloc"] } +hex-literal = { version = "0.3.4", optional = true } log = { version = "0.4.8", optional = true } +nalgebra = { version = "0.32.2", default-features = false } once_cell = { version = "1.8.0", optional = true } -poly-multiproof = { git = "https://github.com/aphoh/poly-multiproof", default-features = false, rev = "1ec7c7eca0861ad89427c45534006f49d039820a" } rand = { version = "0.8.4", default-features = false, optional = true } rand_chacha = { version = "0.3", default-features = false, optional = true } rayon = { version = "1.5.2", optional = true } -serde = { version = "1.0.121", optional = true, features = ["derive"] } -sp-arithmetic = { version = "7.0.0", default-features = false } -sp-core-hashing = { version = "5.0.0", default-features = false, optional = true } -sp-std = { version = "4.0.0", default-features = false } +serde = { version = "1", optional = true, features = ["derive"] } static_assertions = "1.1.0" +thiserror-no-std = "2.0.2" [dev-dependencies] -criterion = "0.3.5" -itertools = "0.10" -proptest = "1.0.0" -serde_json = "1.0" +proptest = "1" +serde_json = "1" test-case = "1.2.3" [features] default = ["std"] -alloc = ["dusk-plonk/alloc"] -parallel = ["std", "rayon", "kate-grid/parallel"] +alloc = ["dusk-plonk/alloc", "nalgebra/alloc"] +parallel = ["rayon"] std = [ "kate-recovery/std", - "hex/std", "once_cell", + "hex-literal", + "hex", "codec/std", - "alloc", "serde", - "rayon", "rand", "rand_chacha/std", "log", "dusk-plonk/std", - "dusk-bytes", - "sp-std/std", "da-types/std", "sp-arithmetic/std", - "sp-core-hashing", - "sp-core-hashing/std", + "sp-core/std", "poly-multiproof/blst", + "nalgebra/std", ] -substrate = [ - "da-types/substrate", -] + extended-columns = [] maximum-block-size = [] diff --git a/kate/examples/multiproof_verification.rs b/kate/examples/multiproof_verification.rs index 9ef785d7..0ffcf0ad 100644 --- a/kate/examples/multiproof_verification.rs +++ b/kate/examples/multiproof_verification.rs @@ -4,11 +4,13 @@ use kate::{ pmp::{merlin::Transcript, traits::PolyMultiProofNoPrecomp}, Seed, }; +use kate_recovery::matrix::Dimensions; use poly_multiproof::traits::AsBytes; use rand::thread_rng; +use std::num::NonZeroU16; fn main() { - let target_dims = kate::grid::Dimensions::new_unchecked(16, 64); + let target_dims = Dimensions::new_from(16, 64).unwrap(); let pp = kate::testnet::multiproof_params(256, 256); let pmp = poly_multiproof::m1_blst::M1NoPrecomp::new(256, 256, &mut thread_rng()); let points = kate::gridgen::domain_points(256).unwrap(); @@ -30,7 +32,7 @@ fn main() { let seed = Seed::default(); let grid = kate::gridgen::EvaluationGrid::from_extrinsics(exts, 4, 256, 256, seed) .unwrap() - .extend_columns(2) + .extend_columns(unsafe { NonZeroU16::new_unchecked(2) }) .unwrap(); // Setup, serializing as bytes @@ -61,10 +63,10 @@ fn main() { .iter() .flat_map(|row| row.iter().flat_map(|e| e.to_bytes().unwrap())) .collect::>(); - (proof_bytes, evals_bytes, commitments, grid.dims) + (proof_bytes, evals_bytes, commitments, grid.dims()) }; - let mp_block = kate::gridgen::multiproof_block(0, 0, &dims, &target_dims).unwrap(); + let mp_block = kate::gridgen::multiproof_block(0, 0, dims, &target_dims).unwrap(); let commits = commitments .chunks_exact(48) .skip(mp_block.start_y) diff --git a/kate/grid/Cargo.toml b/kate/grid/Cargo.toml index 82cf3469..4d646f10 100644 --- a/kate/grid/Cargo.toml +++ b/kate/grid/Cargo.toml @@ -5,7 +5,13 @@ authors = ["William Arnold warnold@polygon.technology"] edition = "2021" [dependencies] +kate-recovery = { path = "../recovery", default-features = false } rayon = { version = "1.5.2", optional = true } +[dev-dependencies] +nalgebra = "0.32.2" + [features] +default = ["std"] +std = ["kate-recovery/std"] parallel = ["rayon"] diff --git a/kate/grid/src/grid.rs b/kate/grid/src/grid.rs index e483d1a0..4226f1b8 100644 --- a/kate/grid/src/grid.rs +++ b/kate/grid/src/grid.rs @@ -1,6 +1,6 @@ use alloc::vec::Vec; -use crate::Dimensions; +use kate_recovery::matrix::Dimensions; pub trait Grid { fn width(&self) -> usize; @@ -30,11 +30,11 @@ pub struct ColumnMajor { impl Grid for RowMajor { fn width(&self) -> usize { - self.dims.width() + self.dims.cols().get().into() } fn height(&self) -> usize { - self.dims.height() + self.dims.rows().get().into() } fn dims(&self) -> &Dimensions { @@ -46,11 +46,12 @@ impl Grid for RowMajor { } fn ind_to_coord(dims: &Dimensions, i: usize) -> (usize, usize) { - (i % dims.width_nz(), i / dims.width_nz()) + let cols: usize = dims.cols().get().into(); + (i % cols, i / cols) } fn coord_to_ind(dims: &Dimensions, x: usize, y: usize) -> usize { - x.saturating_add(y.saturating_mul(dims.width())) + x.saturating_add(y.saturating_mul(dims.cols().get().into())) } fn inner(&self) -> &Vec { @@ -60,11 +61,11 @@ impl Grid for RowMajor { impl Grid for ColumnMajor { fn width(&self) -> usize { - self.dims.width() + self.dims.cols().get().into() } fn height(&self) -> usize { - self.dims.height() + self.dims.rows().get().into() } fn dims(&self) -> &Dimensions { @@ -76,11 +77,12 @@ impl Grid for ColumnMajor { } fn ind_to_coord(dims: &Dimensions, i: usize) -> (usize, usize) { - (i / dims.height_nz(), i % dims.height_nz()) + let rows: usize = dims.rows().get().into(); + (i / rows, i % rows) } fn coord_to_ind(dims: &Dimensions, x: usize, y: usize) -> usize { - y.saturating_add(x.saturating_mul(dims.height())) + y.saturating_add(x.saturating_mul(dims.rows().get().into())) } fn inner(&self) -> &Vec { @@ -195,7 +197,7 @@ impl IntoRowMajor for Vec { fn into_row_major(self, width: usize, height: usize) -> Option> { if self.len() == usize::checked_mul(width, height)? { Some(RowMajor { - dims: Dimensions::new(width.try_into().ok()?, height.try_into().ok()?), + dims: Dimensions::new_from(height, width)?, inner: self, }) } else { @@ -208,7 +210,7 @@ impl IntoColumnMajor for Vec { fn into_column_major(self, width: usize, height: usize) -> Option> { if self.len() == width.checked_mul(height)? { Some(ColumnMajor { - dims: Dimensions::new(width.try_into().ok()?, height.try_into().ok()?), + dims: Dimensions::new_from(height, width)?, inner: self, }) } else { @@ -221,7 +223,7 @@ impl IntoColumnMajor for [A; LEN] { fn into_column_major(self, width: usize, height: usize) -> Option> { if self.len() == width.checked_mul(height)? { Some(ColumnMajor { - dims: Dimensions::new(width.try_into().ok()?, height.try_into().ok()?), + dims: Dimensions::new_from(height, width)?, inner: self.into(), }) } else { @@ -234,7 +236,7 @@ impl IntoRowMajor for [A; LEN] { fn into_row_major(self, width: usize, height: usize) -> Option> { if self.len() == width.checked_mul(height)? { Some(RowMajor { - dims: Dimensions::new(width.try_into().ok()?, height.try_into().ok()?), + dims: Dimensions::new_from(height, width)?, inner: self.into(), }) } else { @@ -247,12 +249,13 @@ impl IntoRowMajor for [A; LEN] { mod tests { use super::*; use alloc::vec::Vec; + use nalgebra::base::DMatrix; #[test] fn test_row_major() { let data = [1, 2, 3, 4, 5, 6]; - let rm = data.into_row_major(3, 2).unwrap(); + let rm = data.clone().into_row_major(3, 2).unwrap(); assert_eq!(rm.get(0, 0), Some(&1)); assert_eq!(rm.get(1, 0), Some(&2)); assert_eq!(rm.get(2, 0), Some(&3)); @@ -265,12 +268,33 @@ mod tests { assert_eq!(vec![&1, &4], rm.iter_col(0).unwrap().collect::>()); assert_eq!(vec![&2, &5], rm.iter_col(1).unwrap().collect::>()); assert_eq!(vec![&3, &6], rm.iter_col(2).unwrap().collect::>()); + + let rm_matrix = DMatrix::from_row_iterator(2, 3, data); + assert_eq!(rm_matrix.get((0, 0)), Some(&1)); + assert_eq!(rm_matrix.get((1, 0)), Some(&4)); + assert_eq!(rm_matrix.get((0, 1)), Some(&2)); + assert_eq!(rm_matrix.get((1, 1)), Some(&5)); + assert_eq!(rm_matrix.get((0, 2)), Some(&3)); + assert_eq!(rm_matrix.get((1, 2)), Some(&6)); + + for (row, expected) in rm_matrix.row_iter().zip([[1, 2, 3], [4, 5, 6]].into_iter()) { + assert_eq!(row.iter().cloned().collect::>(), expected.to_vec()); + } + for (cols, expected) in rm_matrix + .column_iter() + .zip([[1, 4], [2, 5], [3, 6]].into_iter()) + { + assert_eq!( + cols.iter().cloned().collect::>(), + expected.to_vec() + ); + } } #[test] fn test_column_major() { let data = [1, 4, 2, 5, 3, 6]; - let cm = data.into_column_major(3, 2).unwrap(); + let cm = data.clone().into_column_major(3, 2).unwrap(); assert_eq!(cm.get(0, 0), Some(&1)); assert_eq!(cm.get(1, 0), Some(&2)); @@ -290,5 +314,23 @@ mod tests { vec![&4, &5, &6], cm.iter_row(1).unwrap().collect::>() ); + + let cm_matrix = DMatrix::from_column_slice(2, 3, &data); + assert_eq!(cm_matrix.get((0, 0)), Some(&1)); + assert_eq!(cm_matrix.get((1, 0)), Some(&4)); + assert_eq!(cm_matrix.get((0, 1)), Some(&2)); + assert_eq!(cm_matrix.get((1, 1)), Some(&5)); + assert_eq!(cm_matrix.get((0, 2)), Some(&3)); + assert_eq!(cm_matrix.get((1, 2)), Some(&6)); + + for (col, expected) in cm_matrix + .column_iter() + .zip([[1, 4], [2, 5], [3, 6]].into_iter()) + { + assert_eq!(col.iter().cloned().collect::>(), expected.to_vec()); + } + for (row, expected) in cm_matrix.row_iter().zip([[1, 2, 3], [4, 5, 6]].into_iter()) { + assert_eq!(row.iter().cloned().collect::>(), expected.to_vec()); + } } } diff --git a/kate/grid/src/lib.rs b/kate/grid/src/lib.rs index 0cf77e61..b45a5c4f 100644 --- a/kate/grid/src/lib.rs +++ b/kate/grid/src/lib.rs @@ -6,7 +6,5 @@ #[cfg_attr(test, macro_use)] extern crate alloc; -mod dims; mod grid; -pub use dims::*; pub use grid::*; diff --git a/kate/recovery/Cargo.toml b/kate/recovery/Cargo.toml index a7809af8..3dc3acf3 100644 --- a/kate/recovery/Cargo.toml +++ b/kate/recovery/Cargo.toml @@ -10,7 +10,7 @@ dusk-plonk = { git = "https://github.com/availproject/plonk.git", tag = "v0.12.0 # Substrate codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] } -sp-arithmetic = { version = "7", default-features = false } +sp-arithmetic = { version = "6", default-features = false } # 3rd-parties dusk-bytes = { version = "0.1.6", default-features = false } @@ -18,6 +18,7 @@ once_cell = { version = "1.9.0", optional = true } rand = { version = "0.8.4", optional = true } rand_chacha = { version = "0.3", optional = true } serde = { version = "1", optional = true, features = ["derive"] } +static_assertions = "1.1.0" thiserror-no-std = "2.0.2" [dev-dependencies] diff --git a/kate/recovery/src/com.rs b/kate/recovery/src/com.rs index 7fb91127..507c50e6 100644 --- a/kate/recovery/src/com.rs +++ b/kate/recovery/src/com.rs @@ -1,17 +1,20 @@ use codec::Decode; +use core::num::TryFromIntError; use dusk_bytes::Serializable as _; use dusk_plonk::{fft::EvaluationDomain, prelude::BlsScalar}; use rand::seq::SliceRandom; use sp_arithmetic::{traits::SaturatedConversion, Percent}; +use static_assertions::const_assert_ne; use std::{ collections::{HashMap, HashSet}, convert::{TryFrom, TryInto}, iter::FromIterator, + ops::Range, }; use thiserror_no_std::Error; use crate::{ - config::{self, CHUNK_SIZE}, + config::{self, CHUNK_SIZE, DATA_CHUNK_SIZE, PADDING_TAIL_VALUE}, data, ensure, index, matrix, }; @@ -28,7 +31,7 @@ pub enum ReconstructionError { #[error("Cannot reconstruct column: {0}")] ColumnReconstructionError(String), #[error("Cannot decode data: {0}")] - DataDecodingError(String), + DataDecodingError(#[from] UnflattenError), #[error("Column reconstruction supports up to {}", u16::MAX)] RowCountExceeded, } @@ -64,7 +67,7 @@ fn map_cells( ) -> Result>, ReconstructionError> { let mut result: HashMap> = HashMap::new(); for cell in cells { - let position = cell.position.clone(); + let position = cell.position; if !dimensions.extended_contains(&position) { return Err(ReconstructionError::InvalidCell { position }); } @@ -136,8 +139,7 @@ pub fn reconstruct_app_extrinsics( let data = reconstruct_available(dimensions, cells)?; let ranges = index.app_data_ranges(app_id); - Ok(unflatten_padded_data(ranges, data) - .map_err(ReconstructionError::DataDecodingError)? + Ok(unflatten_padded_data(ranges, data)? .into_iter() .flat_map(|(_, xts)| xts) .collect::>()) @@ -176,9 +178,10 @@ pub fn reconstruct_columns( columns .iter() .map(|(&col, cells)| { - if cells.len() < dimensions.rows().into() { - return Err(ReconstructionError::InvalidColumn(col)); - } + ensure!( + cells.len() >= dimensions.rows().get().into(), + ReconstructionError::InvalidColumn(col) + ); let cells = cells.values().cloned().collect::>(); @@ -198,14 +201,16 @@ fn reconstruct_available( cells: Vec, ) -> Result, ReconstructionError> { let columns = map_cells(dimensions, cells)?; + let rows: usize = dimensions.rows().get().into(); - let scalars = (0..dimensions.cols()) + let scalars = (0..dimensions.cols().get()) .map(|col| match columns.get(&col) { - None => Ok(vec![None; dimensions.rows() as usize]), + None => Ok(vec![None; rows]), Some(column_cells) => { - if column_cells.len() < dimensions.rows() as usize { - return Err(ReconstructionError::InvalidColumn(col)); - } + ensure!( + column_cells.len() >= rows, + ReconstructionError::InvalidColumn(col) + ); let cells = column_cells.values().cloned().collect::>(); reconstruct_column(dimensions.extended_rows(), &cells) @@ -241,15 +246,15 @@ fn reconstruct_available( /// * `app_id` - Application ID pub fn decode_app_extrinsics( index: &index::AppDataIndex, - dimensions: &matrix::Dimensions, + dimensions: matrix::Dimensions, cells: Vec, app_id: u32, ) -> Result { - let positions = app_specific_cells(index, dimensions, app_id).unwrap_or_default(); + let positions = app_specific_cells(index, &dimensions, app_id).unwrap_or_default(); if positions.is_empty() { return Ok(vec![]); } - let cells_map = map_cells(dimensions, cells)?; + let cells_map = map_cells(&dimensions, cells)?; for position in positions { cells_map @@ -279,50 +284,60 @@ pub fn decode_app_extrinsics( .collect::>()) } +#[derive(Error, Clone, Debug)] +pub enum UnflattenError { + #[error("`AppDataRange` cannot be converted into `Range`")] + RangeConversion(#[from] TryFromIntError), + #[error("`AppData` cannot be decoded due to {0}")] + Codec(#[from] codec::Error), + #[error("Invalid data size, it needs to be a multiple of CHUNK_SIZE")] + InvalidLen, +} + // Removes both extrinsics and block padding (iec_9797 and seeded random data) pub fn unflatten_padded_data( ranges: Vec<(u32, AppDataRange)>, data: Vec, -) -> Result, String> { - if data.len() % config::CHUNK_SIZE > 0 { - return Err("Invalid data size".to_string()); - } - - fn trim_to_data_chunks(range_data: &[u8]) -> Result, String> { - range_data - .chunks_exact(config::CHUNK_SIZE) - .map(|chunk| chunk.get(0..config::DATA_CHUNK_SIZE)) - .collect::>>() - .map(|data_chunks| data_chunks.concat()) - .ok_or_else(|| format!("Chunk data size less than {}", config::DATA_CHUNK_SIZE)) - } - - fn trim_padding(mut data: Vec) -> Result, String> { - while data.last() == Some(&0) { - data.pop(); - } - - match data.pop() { - None => Err("Cannot trim padding on empty data".to_string()), - Some(config::PADDING_TAIL_VALUE) => Ok(data), - Some(_) => Err("Invalid padding tail value".to_string()), +) -> Result, UnflattenError> { + ensure!(data.len() % CHUNK_SIZE == 0, UnflattenError::InvalidLen); + + fn extract_encoded_extrinsic(range_data: &[u8]) -> Vec { + const_assert_ne!(CHUNK_SIZE, 0); + const_assert_ne!(DATA_CHUNK_SIZE, 0); + + // INTERNAL: Chunk into 32 bytes (CHUNK_SIZE), then remove padding (0..30 bytes). + let mut data = range_data + .chunks_exact(CHUNK_SIZE) + .flat_map(|chunk| chunk[0..DATA_CHUNK_SIZE].iter()) + .cloned() + .collect::>(); + + // INTERNAL: Remove zeros and `PADDING_TAIL_VALUE` at the end. + let tail_value_pos = data + .iter() + .rev() + .enumerate() + .skip_while(|(_, byte)| **byte == 0) + .find(|(_, byte)| **byte == PADDING_TAIL_VALUE) + .map(|(rev_pos, _)| data.len() - rev_pos - 1); + if let Some(tail_value_pos) = tail_value_pos { + data.truncate(tail_value_pos); } - } - fn decode_extrinsics(data: Vec) -> Result { - ::decode(&mut data.as_slice()).map_err(|err| format!("Cannot decode data: {err}")) + data } ranges .into_iter() .map(|(app_id, range)| { - let range = range.start as usize..range.end as usize; - trim_to_data_chunks(&data[range]) - .and_then(trim_padding) - .and_then(decode_extrinsics) - .map(|data| (app_id, data)) + //let range = range.start as usize..range.end as usize; + let range: Range = range.start.try_into()?..range.end.try_into()?; + let encoded = extract_encoded_extrinsic(&data[range]); + let extrinsic = ::decode(&mut encoded.as_slice())?; + + Ok((app_id, extrinsic)) }) - .collect::, String>>() + .collect::, _>>() } // This module is taken from https://gist.github.com/itzmeanjan/4acf9338d9233e79cfbee5d311e7a0b4 @@ -443,7 +458,7 @@ fn unshift_poly(poly: &mut [BlsScalar]) { } } -pub type AppDataRange = std::ops::Range; +pub type AppDataRange = Range; // use this function for reconstructing back all cells of certain column // when at least 50% of them are available // diff --git a/kate/recovery/src/commitments.rs b/kate/recovery/src/commitments.rs index 8455e5ad..9b792909 100644 --- a/kate/recovery/src/commitments.rs +++ b/kate/recovery/src/commitments.rs @@ -8,20 +8,20 @@ use std::{ use dusk_bytes::Serializable; use dusk_plonk::{ fft::{EvaluationDomain, Evaluations}, - prelude::{BlsScalar, PublicParameters}, + prelude::{BlsScalar, CommitKey, PublicParameters}, }; use thiserror_no_std::Error; use crate::{ com, config::{self, COMMITMENT_SIZE}, - index, matrix, + ensure, index, matrix, }; #[derive(Error, Debug)] -pub enum DataError { +pub enum Error { #[error("Scalar slice error: {0}")] - SliceError(TryFromSliceError), + SliceError(#[from] TryFromSliceError), #[error("Scalar data is not valid")] ScalarDataError, #[error("Invalid scalar data length")] @@ -31,48 +31,26 @@ pub enum DataError { #[error("Bad data len")] BadLen, #[error("Plonk error: {0}")] - PlonkError(dusk_plonk::error::Error), + PlonkError(#[from] dusk_plonk::error::Error), #[error("Bad commitments data")] BadCommitmentsData, #[error("Bad rows data")] BadRowsData, -} - -#[derive(Error, Debug)] -pub enum Error { - #[error("Invalid data: {0}")] - InvalidData(DataError), -} - -impl From for Error { - fn from(e: TryFromSliceError) -> Self { - Self::InvalidData(DataError::SliceError(e)) - } -} - -impl From for Error { - fn from(_: TryFromIntError) -> Self { - Self::InvalidData(DataError::BadCommitmentsData) - } + #[error("Integer conversion error")] + IntError(#[from] TryFromIntError), } #[cfg(feature = "std")] impl From for Error { fn from(e: dusk_bytes::Error) -> Self { match e { - dusk_bytes::Error::InvalidData => Self::InvalidData(DataError::ScalarDataError), - dusk_bytes::Error::BadLength { .. } => Self::InvalidData(DataError::BadScalarDataLen), - dusk_bytes::Error::InvalidChar { .. } => Self::InvalidData(DataError::BadScalarData), + dusk_bytes::Error::InvalidData => Self::ScalarDataError, + dusk_bytes::Error::BadLength { .. } => Self::BadScalarDataLen, + dusk_bytes::Error::InvalidChar { .. } => Self::BadScalarData, } } } -impl From for Error { - fn from(e: dusk_plonk::error::Error) -> Self { - Self::InvalidData(DataError::PlonkError(e)) - } -} - fn try_into_scalar(chunk: &[u8]) -> Result { let sized_chunk = <[u8; config::CHUNK_SIZE]>::try_from(chunk)?; BlsScalar::from_bytes(&sized_chunk).map_err(From::from) @@ -80,9 +58,7 @@ fn try_into_scalar(chunk: &[u8]) -> Result { fn try_into_scalars(data: &[u8]) -> Result, Error> { let chunks = data.chunks_exact(config::CHUNK_SIZE); - if !chunks.remainder().is_empty() { - return Err(Error::InvalidData(DataError::BadLen)); - } + ensure!(chunks.remainder().is_empty(), Error::BadLen); chunks .map(try_into_scalar) .collect::, Error>>() @@ -109,18 +85,17 @@ pub fn verify_equality( dimensions: &matrix::Dimensions, app_id: u32, ) -> Result<(Vec, Vec), Error> { - if commitments.len() != dimensions.extended_rows().try_into()? { - return Err(Error::InvalidData(DataError::BadCommitmentsData)); - } - + let ext_rows: usize = dimensions.extended_rows().try_into()?; + ensure!(commitments.len() == ext_rows, Error::BadCommitmentsData); let mut app_rows = com::app_specific_rows(index, dimensions, app_id); - if rows.len() != dimensions.extended_rows().try_into()? { + if rows.len() != ext_rows { return Ok((vec![], app_rows)); } - let (prover_key, _) = public_params.trim(dimensions.cols() as usize)?; - let domain = EvaluationDomain::new(dimensions.cols() as usize)?; + let dim_cols = dimensions.cols().get().into(); + let (prover_key, _) = public_params.trim(dim_cols)?; + let domain = EvaluationDomain::new(dim_cols)?; // This is a single-threaded implementation. // At some point we should benchmark and decide @@ -130,11 +105,8 @@ pub fn verify_equality( .zip(rows.iter()) .zip(0u32..) .filter(|(.., index)| app_rows.contains(index)) - .filter_map(|((&commitment, row), index)| { - try_into_scalars(row.as_ref()?) - .map(|scalars| Evaluations::from_vec_and_domain(scalars, domain).interpolate()) - .and_then(|polynomial| prover_key.commit(&polynomial).map_err(From::from)) - .map(|result| (result.to_bytes() == commitment).then_some(index)) + .filter_map(|((commitment, maybe_row), index)| { + row_index_commitment_verification(&prover_key, domain, commitment, maybe_row, index) .transpose() }) .collect::, Error>>()?; @@ -144,6 +116,25 @@ pub fn verify_equality( Ok((verified, app_rows)) } +fn row_index_commitment_verification( + prover_key: &CommitKey, + domain: EvaluationDomain, + commitment: &[u8], + maybe_row: &Option>, + index: u32, +) -> Result, Error> { + if let Some(row) = maybe_row.as_ref() { + let scalars = try_into_scalars(row)?; + let polynomial = Evaluations::from_vec_and_domain(scalars, domain).interpolate(); + let result = prover_key.commit(&polynomial)?; + + if result.to_bytes() == commitment { + return Ok(Some(index)); + } + } + Ok(None) +} + /// Creates vector of exact size commitments, from commitments slice pub fn from_slice(source: &[u8]) -> Result, TryFromSliceError> { source diff --git a/kate/recovery/src/data.rs b/kate/recovery/src/data.rs index 7d47d049..63ef09eb 100644 --- a/kate/recovery/src/data.rs +++ b/kate/recovery/src/data.rs @@ -56,7 +56,7 @@ pub fn rows(dimensions: &Dimensions, cells: &[&Cell]) -> Vec<(RowIndex, Vec) impl From for DataCell { fn from(cell: Cell) -> Self { DataCell { - position: cell.position.clone(), + position: cell.position, data: cell.data(), } } @@ -123,7 +123,7 @@ mod tests { let mut rows = rows(&dimensions, &cells); rows.sort_by_key(|(key, _)| key.0); - assert!(rows.len() == 1); + assert_eq!(rows.len(), 1); let (row_index, row) = &rows[0]; assert_eq!(row_index.0, 0); assert_eq!(*row, [[0u8; 32], [1u8; 32]].concat()); diff --git a/kate/recovery/src/matrix.rs b/kate/recovery/src/matrix.rs index 3b5c37c9..fcc0e8c6 100644 --- a/kate/recovery/src/matrix.rs +++ b/kate/recovery/src/matrix.rs @@ -1,22 +1,57 @@ -use std::ops::Range; - +#[cfg(feature = "std")] use serde::{Deserialize, Serialize}; +use std::{ + convert::TryInto, + fmt::{Display, Formatter, Result}, + num::NonZeroU16, + ops::{Mul, Range}, +}; use crate::config::{self, CHUNK_SIZE}; const EXTENSION_FACTOR_U32: u32 = config::EXTENSION_FACTOR as u32; /// Position of a cell in the the matrix. -#[derive(Default, Debug, Clone, Hash, Eq, PartialEq, Serialize, Deserialize)] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +#[derive(Default, Debug, Clone, Copy, Hash, Eq, PartialEq)] pub struct Position { pub row: u32, pub col: u16, } +impl From<(R, C)> for Position +where + R: Into, + C: Into, +{ + fn from(row_col: (R, C)) -> Self { + Self { + row: row_col.0.into(), + col: row_col.1.into(), + } + } +} + +impl From for (R, C) +where + R: From, + C: From, +{ + fn from(p: Position) -> (R, C) { + (p.row.into(), p.col.into()) + } +} + +impl Display for Position { + fn fmt(&self, f: &mut Formatter<'_>) -> Result { + f.write_fmt(format_args!("{}:{}", self.col, self.row)) + } +} + impl Position { /// Refrence in format `block_number:column_number:row_number` pub fn reference(&self, block_number: u32) -> String { - format!("{}:{}:{}", block_number, self.col, self.row) + format!("{}:{}", block_number, self) } /// Checks if position is from extended row @@ -57,51 +92,103 @@ impl RowIndex { /// Extended columns (EC is erasure code): [1,EC,5,EC], [2,EC,6,EC], [3,EC,7,EC], [4,EC,8,EC] /// Matrix representation: [1,5,2,6,3,7,4,8] /// Extended matrix representation: [1,EC,5,EC,2,EC,6,EC,3,EC,7,EC,4,EC,8,EC] -#[derive(Debug, Clone)] +#[derive(Copy, Debug, Clone, PartialEq, Eq)] pub struct Dimensions { - rows: u16, - cols: u16, + rows: NonZeroU16, + cols: NonZeroU16, +} + +impl From<(R, C)> for Dimensions +where + R: Into, + C: Into, +{ + fn from(rows_cols: (R, C)) -> Self { + let (rows, cols) = rows_cols; + Self { + rows: rows.into(), + cols: cols.into(), + } + } +} + +impl From for (R, C) +where + R: From, + C: From, +{ + fn from(d: Dimensions) -> Self { + (d.rows.get().into(), d.cols.get().into()) + } } impl Dimensions { - /// Creates new matrix dimensions. - /// Data layout is assumed to be row-wise. - /// Returns `None` if rows or cols is 0. - pub const fn new(rows: u16, cols: u16) -> Option { - if rows == 0 || cols == 0 { - return None; + pub fn new, C: TryInto>(rows: R, cols: C) -> Option { + let rows = rows.try_into().ok()?; + let cols = cols.try_into().ok()?; + + Some(Self { rows, cols }) + } + + pub fn new_from, C: TryInto>(rows: R, cols: C) -> Option { + let rows: u16 = rows.try_into().ok()?; + let cols: u16 = cols.try_into().ok()?; + + Self::new(rows, cols) + } + + /// Creates a `Dimension` without checking whether parameters are non-zero. This results in + /// undefined behaviour if any parameter is zero. + /// + /// # Safety + /// Parameters `rows` and `cols` must not be zero. + pub const unsafe fn new_unchecked(rows: u16, cols: u16) -> Self { + Self { + rows: NonZeroU16::new_unchecked(rows), + cols: NonZeroU16::new_unchecked(cols), } - Some(Dimensions { rows, cols }) } /// Returns number of rows - pub fn rows(&self) -> u16 { + pub fn rows(&self) -> NonZeroU16 { self.rows } /// Returns number of columns - pub fn cols(&self) -> u16 { + pub fn cols(&self) -> NonZeroU16 { self.cols } /// Matrix size. - pub fn size(&self) -> u32 { - self.rows as u32 * self.cols as u32 + pub fn size + Mul>(&self) -> T { + T::from(self.rows.get()) * T::from(self.cols.get()) + } + + pub fn divides(&self, other: &Self) -> bool { + other.cols.get() % self.cols == 0u16 && other.rows.get() % self.rows == 0u16 + } + + /// Extends rows by `row_factor` and cols by `col_factor`. + pub fn extend(&self, row_factor: NonZeroU16, col_factor: NonZeroU16) -> Option { + let rows = self.rows.checked_mul(row_factor)?; + let cols = self.cols.checked_mul(col_factor)?; + + Some(Self { rows, cols }) } /// Extended matrix size. - pub fn extended_size(&self) -> u64 { - self.extended_rows() as u64 * self.cols as u64 + pub fn extended_size(&self) -> u32 { + self.extended_rows() * u32::from(self.cols.get()) } /// Row size in bytes pub fn row_byte_size(&self) -> usize { - CHUNK_SIZE * self.cols as usize + CHUNK_SIZE * usize::from(self.cols.get()) } /// Extended matrix rows count. pub fn extended_rows(&self) -> u32 { - (self.rows as u32) * EXTENSION_FACTOR_U32 + u32::from(self.rows.get()) * EXTENSION_FACTOR_U32 } /// List of data row indexes in the extended matrix. @@ -123,7 +210,7 @@ impl Dimensions { /// Cell positions for given column in extended matrix. /// Empty if column index is not valid. pub fn col_positions(&self, col: u16) -> Vec { - if self.cols() <= col { + if self.cols().get() <= col { return vec![]; } (0..self.extended_rows()) @@ -137,7 +224,7 @@ impl Dimensions { if self.extended_rows() <= row { return vec![]; } - (0..self.cols()) + (0..self.cols().get()) .map(|col| Position { col, row }) .collect::>() } @@ -152,12 +239,12 @@ impl Dimensions { /// Column index of a cell in the matrix. fn col(&self, cell: u32) -> u16 { - (cell % self.cols as u32) as u16 + (cell % u32::from(self.cols.get())) as u16 } /// Extended matrix data row index of cell in the data matrix. fn extended_data_row(&self, cell: u32) -> u32 { - (cell / self.cols as u32) * EXTENSION_FACTOR_U32 + (cell / u32::from(self.cols.get())) * EXTENSION_FACTOR_U32 } /// Extended matrix data position of a cell in the data matrix. @@ -179,7 +266,7 @@ impl Dimensions { /// Checks if extended matrix contains given position. pub fn extended_contains(&self, position: &Position) -> bool { - position.row < self.extended_rows() && position.col < self.cols + position.row < self.extended_rows() && position.col < self.cols.get() } /// Creates iterator over rows in extended matrix. @@ -189,22 +276,22 @@ impl Dimensions { /// Creates iterator over data cells in data matrix (used to retrieve data from the matrix). pub fn iter_data(&self) -> impl Iterator { - let rows :usize = self.rows.get().into(); - let cols :usize = self.cols.get().into(); + let rows = self.rows.get().into(); + let cols = self.cols.get().into(); (0..rows).flat_map(move |row| (0..cols).map(move |col| (row, col))) } /// Creates iterator over cell indexes in data matrix (used to store data in the matrix). pub fn iter_cells(&self) -> impl Iterator { - let rows = self.rows as u32; - let cols = self.cols; - (0..cols).flat_map(move |col| (0..rows).map(move |row| row * cols as u32 + col as u32)) + let rows: u32 = self.rows.get().into(); + let cols: u32 = self.cols.get().into(); + (0..cols).flat_map(move |col| (0..rows).map(move |row| row * cols + col)) } /// Creates iterator over data positions by row in extended matrix. pub fn iter_extended_data_positions(&self) -> impl Iterator { - let rows = self.rows as u32; - let cols = self.cols; + let rows: u32 = self.rows.get().into(); + let cols = self.cols.get(); (0..rows).flat_map(move |row| (0..cols).map(move |col| (row * EXTENSION_FACTOR_U32, col))) } @@ -216,13 +303,20 @@ impl Dimensions { let size = (self.extended_size() as f64 / partition.fraction as f64).ceil() as u32; let start = size * (partition.number - 1) as u32; let end = size * (partition.number as u32); - let cols: u32 = self.cols().into(); + let cols: u32 = self.cols.get().into(); (start..end).map(move |cell| Position { row: cell / cols, col: (cell % cols) as u16, }) } + + pub fn transpose(self) -> Self { + Self { + rows: self.cols, + cols: self.rows, + } + } } #[cfg(test)] @@ -276,6 +370,6 @@ mod tests { .unwrap() .iter_extended_partition_positions(&Partition { number, fraction }) .zip(expected.iter().map(|&(row, col)| Position { row, col })) - .for_each(|(p1, p2)| assert!(p1 == p2)); + .for_each(|(p1, p2)| assert_eq!(p1, p2)); } } diff --git a/kate/recovery/src/proof.rs b/kate/recovery/src/proof.rs index afd803d7..1b6b91b9 100644 --- a/kate/recovery/src/proof.rs +++ b/kate/recovery/src/proof.rs @@ -46,14 +46,15 @@ pub fn verify( commitment_to_polynomial, }; - let point = EvaluationDomain::new(dimensions.cols().into()) + let cols: usize = dimensions.cols().get().into(); + let point = EvaluationDomain::new(cols) .map_err(|error| Error::InvalidDomain(format!("{error:?}")))? .elements() .nth(cell.position.col.into()) .ok_or_else(|| Error::InvalidDomain("Position isn't in domain".to_string()))?; public_parameters - .trim(dimensions.cols().into()) + .trim(cols) .map(|(_, verifier_key)| verifier_key.check(point, proof)) .map_err(|error| Error::InvalidDegree(format!("{error:?}"))) } diff --git a/kate/src/com.rs b/kate/src/com.rs index 61703f2b..3d2e025f 100644 --- a/kate/src/com.rs +++ b/kate/src/com.rs @@ -2,11 +2,13 @@ use core::num::{NonZeroU32, NonZeroUsize}; use std::{ convert::{TryFrom, TryInto}, mem::size_of, + num::TryFromIntError, time::Instant, }; use codec::Encode; use da_types::{AppExtrinsic, AppId, BlockLengthColumns, BlockLengthRows}; +use derive_more::Constructor; use dusk_bytes::Serializable; use dusk_plonk::{ commitment_scheme::kzg10, @@ -14,20 +16,23 @@ use dusk_plonk::{ fft::{EvaluationDomain, Evaluations}, prelude::{BlsScalar, CommitKey}, }; -use kate_grid::{Dimensions, IntoRowMajor}; #[cfg(feature = "std")] -use kate_recovery::{com::app_specific_rows, index, matrix}; +use kate_recovery::{com::app_specific_rows, ensure, index, matrix::Dimensions}; +use nalgebra::base::DMatrix; use rand::{Rng, SeedableRng}; use rand_chacha::ChaChaRng; +#[cfg(feature = "parallel")] use rayon::prelude::*; +#[cfg(feature = "std")] use serde::{Deserialize, Serialize}; use sp_arithmetic::traits::SaturatedConversion; use static_assertions::const_assert_eq; +use thiserror_no_std::Error; use crate::{ config::{ - DATA_CHUNK_SIZE, EXTENSION, EXTENSION_FACTOR, MAXIMUM_BLOCK_SIZE, MINIMUM_BLOCK_SIZE, - PROOF_SIZE, PROVER_KEY_SIZE, SCALAR_SIZE, + COL_EXTENSION, DATA_CHUNK_SIZE, EXTENSION_FACTOR, MAXIMUM_BLOCK_SIZE, MINIMUM_BLOCK_SIZE, + PROOF_SIZE, PROVER_KEY_SIZE, ROW_EXTENSION, SCALAR_SIZE, }, metrics::Metrics, padded_len_of_pad_iec_9797_1, BlockDimensions, Seed, LOG_TARGET, @@ -35,41 +40,41 @@ use crate::{ #[cfg(feature = "std")] use kate_recovery::testnet; -#[derive(Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +#[derive(Constructor, Clone, Copy, PartialEq, Eq, Debug)] pub struct Cell { pub row: BlockLengthRows, pub col: BlockLengthColumns, } -impl Cell { - pub fn new(row: BlockLengthRows, col: BlockLengthColumns) -> Self { - Cell { row, col } - } -} - -#[derive(Debug)] +#[derive(Error, Debug)] pub enum Error { - PlonkError(PlonkError), - DuskBytesError(dusk_bytes::Error), - MultiproofError(poly_multiproof::Error), + PlonkError(#[from] PlonkError), + DuskBytesError(#[from] dusk_bytes::Error), + MultiproofError(#[from] poly_multiproof::Error), CellLengthExceeded, BadHeaderHash, BlockTooBig, InvalidChunkLength, DimensionsMismatch, ZeroDimension, + InvalidDimensionExtension, DomainSizeInvalid, } -impl From for Error { - fn from(error: PlonkError) -> Self { - Self::PlonkError(error) +impl From for Error { + fn from(_: TryFromIntError) -> Self { + Self::ZeroDimension } } -impl From for Error { - fn from(err: poly_multiproof::Error) -> Self { - Self::MultiproofError(err) +/// We cannot derive `PartialEq` becasue `PlonkError` does not support it in the current version. +/// and we only need to double check its discriminat for testing. +/// Only needed on tests by now. +#[cfg(test)] +impl PartialEq for Error { + fn eq(&self, other: &Self) -> bool { + std::mem::discriminant(self) == std::mem::discriminant(other) } } @@ -93,11 +98,11 @@ fn app_extrinsics_group_by_app_id(extrinsics: &[AppExtrinsic]) -> Vec<(AppId, Ve #[cfg(feature = "std")] pub fn scalars_to_rows( rows: &[u32], - dimensions: &matrix::Dimensions, + dimensions: &Dimensions, data: &[BlsScalar], ) -> Vec>> { let extended_rows = BlockLengthRows(dimensions.extended_rows()); - let cols = BlockLengthColumns(dimensions.cols() as u32); + let cols = BlockLengthColumns(dimensions.cols().get().into()); dimensions .iter_extended_rows() .map(|i| { @@ -115,11 +120,11 @@ pub fn scalars_to_rows( pub fn scalars_to_app_rows( app_id: u32, index: &index::AppDataIndex, - dimensions: &matrix::Dimensions, + dimensions: &Dimensions, data: &[BlsScalar], ) -> Vec>> { let extended_rows = BlockLengthRows(dimensions.extended_rows()); - let cols = BlockLengthColumns(dimensions.cols() as u32); + let cols = BlockLengthColumns(dimensions.cols().get().into()); let app_rows = app_specific_rows(index, dimensions, app_id); dimensions .iter_extended_rows() @@ -182,11 +187,20 @@ pub fn flatten_and_pad_block( let mut rng = ChaChaRng::from_seed(rng_seed); - assert!( - (block_dims.size().saturating_sub(padded_block.len())) - .checked_rem(block_dims.chunk_size as usize) - == Some(0) - ); + // SAFETY: `padded_block.len() <= block_dims.size()` checked some lines above. + if cfg!(debug_assertions) { + let chunk_size: usize = + usize::try_from(block_dims.chunk_size).expect("Cast to `usize` overflows"); + let dims_sub_pad = block_dims + .size() + .checked_sub(padded_block.len()) + .expect("`padded_block.len() <= block_dims.size() .qed"); + let rem = dims_sub_pad + .checked_rem(chunk_size) + .expect("`chunk_size != 0 .qed"); + assert_eq!(rem, 0); + } + let nz_chunk_size: NonZeroUsize = usize::try_from(block_dims.chunk_size) .map_err(|_| Error::CellLengthExceeded)? .try_into() @@ -207,11 +221,13 @@ pub fn get_block_dimensions( chunk_size: u32, ) -> Result { let max_block_dimensions = BlockDimensions::new(max_rows, max_cols, chunk_size); - if block_size as usize > max_block_dimensions.size() { - return Err(Error::BlockTooBig); - } + let block_size = usize::try_from(block_size)?; + ensure!( + block_size <= max_block_dimensions.size(), + Error::BlockTooBig + ); - if block_size as usize == max_block_dimensions.size() || MAXIMUM_BLOCK_SIZE { + if block_size == max_block_dimensions.size() || MAXIMUM_BLOCK_SIZE { return Ok(max_block_dimensions); } @@ -272,10 +288,13 @@ fn pad_iec_9797_1(mut data: Vec) -> Vec { .expect("Const assertion ensures this transformation to `DataChunk`. qed") } -fn extend_column_with_zeros(column: &[BlsScalar], height: usize) -> Vec { - let mut result = column.to_vec(); - result.resize(height, BlsScalar::zero()); - result +fn extend_column_with_zeros<'a, I>(column: I, height: usize) -> Vec +where + I: Iterator, +{ + let mut extended = column.take(height).cloned().collect::>(); + extended.resize(height, BlsScalar::zero()); + extended } pub fn to_bls_scalar(chunk: &[u8]) -> Result { @@ -286,16 +305,7 @@ pub fn to_bls_scalar(chunk: &[u8]) -> Result { } fn make_dims(bd: &BlockDimensions) -> Result { - Ok(Dimensions::new( - bd.cols - .as_usize() - .try_into() - .map_err(|_| Error::ZeroDimension)?, - bd.rows - .as_usize() - .try_into() - .map_err(|_| Error::ZeroDimension)?, - )) + Dimensions::new_from(bd.rows.0, bd.cols.0).ok_or(Error::ZeroDimension) } /// Build extended data matrix, by columns. @@ -314,40 +324,50 @@ pub fn par_extend_data_matrix( ) -> Result, Error> { let start = Instant::now(); let dims = make_dims(&block_dims)?; - let extended_dims = dims.extend(EXTENSION); + let (ext_rows, _): (usize, usize) = dims + .extend(ROW_EXTENSION, COL_EXTENSION) + .ok_or(Error::InvalidDimensionExtension)? + .into(); + let (rows, cols) = dims.into(); // simple length with mod check would work... - let chunks = block.par_chunks_exact(block_dims.chunk_size as usize); - if !chunks.remainder().is_empty() { - return Err(Error::DimensionsMismatch); - } + let chunk_size: usize = block_dims.chunk_size.try_into()?; + + #[cfg(feature = "parallel")] + let chunks = block.par_chunks_exact(chunk_size); + #[cfg(not(feature = "parallel"))] + let chunks = block.chunks_exact(chunk_size); + + ensure!(chunks.remainder().is_empty(), Error::DimensionsMismatch); + + #[cfg(feature = "parallel")] + let chunks = chunks.into_par_iter(); let scalars = chunks - .into_par_iter() .map(to_bls_scalar) .collect::, Error>>()?; // The data is currently row-major, so we need to put it into column-major - let rm = scalars - .into_row_major(dims.width(), dims.height()) - .ok_or(Error::DimensionsMismatch)?; - let col_wise_scalars = rm.iter_column_wise().map(Clone::clone).collect::>(); + let col_wise_scalars = DMatrix::from_row_iterator(rows, cols, scalars.into_iter()); let mut chunk_elements = col_wise_scalars - .chunks_exact(dims.height_nz().get()) - .flat_map(|column| extend_column_with_zeros(column, extended_dims.height())) + .column_iter() + .flat_map(|column| extend_column_with_zeros(column.iter(), ext_rows)) .collect::>(); - let extended_column_eval_domain = EvaluationDomain::new(extended_dims.height())?; - let column_eval_domain = EvaluationDomain::new(dims.height())?; // rows_num = column_length + let extended_column_eval_domain = EvaluationDomain::new(ext_rows)?; + let column_eval_domain = EvaluationDomain::new(rows)?; // rows_num = column_length - chunk_elements - .par_chunks_exact_mut(extended_dims.height()) - .for_each(|col| { - // (i)fft functions input parameter slice size has to be a power of 2, otherwise it panics - column_eval_domain.ifft_slice(&mut col[0..dims.height()]); - extended_column_eval_domain.fft_slice(col); - }); + #[cfg(feature = "parallel")] + let chunk_elements_iter = chunk_elements.par_chunks_exact_mut(ext_rows); + #[cfg(not(feature = "parallel"))] + let chunk_elements_iter = chunk_elements.chunks_exact_mut(ext_rows); + + chunk_elements_iter.for_each(|col| { + // (i)fft functions input parameter slice size has to be a power of 2, otherwise it panics + column_eval_domain.ifft_slice(&mut col[0..rows]); + extended_column_eval_domain.fft_slice(col); + }); metrics.extended_block_time(start.elapsed()); @@ -364,14 +384,18 @@ pub fn build_proof( metrics: &M, ) -> Result, Error> { let dims = make_dims(&block_dims)?; - let extended_dims = dims.extend(EXTENSION); + let (ext_rows, ext_cols): (usize, usize) = dims + .extend(ROW_EXTENSION, COL_EXTENSION) + .ok_or(Error::InvalidDimensionExtension)? + .into(); + let (_, cols): (usize, usize) = dims.into(); const SPROOF_SIZE: usize = PROOF_SIZE + SCALAR_SIZE; - let (prover_key, _) = public_params.trim(dims.width()).map_err(Error::from)?; + let (prover_key, _) = public_params.trim(cols).map_err(Error::from)?; // Generate all the x-axis points of the domain on which all the row polynomials reside - let row_eval_domain = EvaluationDomain::new(dims.width()).map_err(Error::from)?; + let row_eval_domain = EvaluationDomain::new(cols)?; let row_dom_x_pts = row_eval_domain.elements().collect::>(); let mut result_bytes: Vec = vec![0u8; SPROOF_SIZE.saturating_mul(cells.len())]; @@ -383,48 +407,52 @@ pub fn build_proof( let total_start = Instant::now(); // attempt to parallelly compute proof for all requested cells - cells + #[cfg(feature = "parallel")] + let cell_iter = cells .into_par_iter() - .zip(result_bytes.par_chunks_exact_mut(SPROOF_SIZE)) - .for_each(|(cell, res)| { - let r_index = cell.row.as_usize(); - if r_index >= extended_dims.height() || cell.col >= block_dims.cols { - res.fill(0); // for bad cell identifier, fill whole proof with zero bytes ! - } else { - let c_index = cell.col.as_usize(); - - // construct polynomial per extended matrix row - let row = (0..extended_dims.width()) - .into_par_iter() - .map(|j| { - ext_data_matrix - [r_index.saturating_add(j.saturating_mul(extended_dims.height()))] - }) - .collect::>(); - //let row = ext_data_matrix_cm - // .iter_row(r_index) - // .expect("Already checked row index") - // .map(Clone::clone) - // .collect::>(); - - // row has to be a power of 2, otherwise interpolate() function panics - // TODO: cache evaluations - let poly = Evaluations::from_vec_and_domain(row, row_eval_domain).interpolate(); - let witness = prover_key.compute_single_witness(&poly, &row_dom_x_pts[c_index]); - match prover_key.commit(&witness) { - Ok(commitment_to_witness) => { - let evaluated_point = ext_data_matrix[r_index - .saturating_add(c_index.saturating_mul(extended_dims.height()))]; - - res[0..PROOF_SIZE].copy_from_slice(&commitment_to_witness.to_bytes()); - res[PROOF_SIZE..].copy_from_slice(&evaluated_point.to_bytes()); - }, - Err(_) => { - res.fill(0); // for bad cell identifier, fill whole proof with zero bytes ! - }, - }; - } - }); + .zip(result_bytes.par_chunks_exact_mut(SPROOF_SIZE)); + #[cfg(not(feature = "parallel"))] + let cell_iter = cells.iter().zip(result_bytes.chunks_exact_mut(SPROOF_SIZE)); + + cell_iter.for_each(|(cell, res)| { + let r_index = cell.row.as_usize(); + if r_index >= ext_rows || cell.col >= block_dims.cols { + res.fill(0); // for bad cell identifier, fill whole proof with zero bytes ! + } else { + let c_index = cell.col.as_usize(); + + // construct polynomial per extended matrix row + #[cfg(feature = "parallel")] + let ext_cols_iter = (0..ext_cols).into_par_iter(); + #[cfg(not(feature = "parallel"))] + let ext_cols_iter = 0..ext_cols; + + let row = ext_cols_iter + .map(|j| ext_data_matrix[r_index.saturating_add(j.saturating_mul(ext_rows))]) + .collect::>(); + + //let row = ext_data_matrix_cm + // .iter_row(r_index) + // .expect("Already checked row index") + // .map(Clone::clone) + // .collect::>(); + // row has to be a power of 2, otherwise interpolate() function panics TODO: cache evaluations + let poly = Evaluations::from_vec_and_domain(row, row_eval_domain).interpolate(); + let witness = prover_key.compute_single_witness(&poly, &row_dom_x_pts[c_index]); + match prover_key.commit(&witness) { + Ok(commitment_to_witness) => { + let evaluated_point = + ext_data_matrix[r_index.saturating_add(c_index.saturating_mul(ext_rows))]; + + res[0..PROOF_SIZE].copy_from_slice(&commitment_to_witness.to_bytes()); + res[PROOF_SIZE..].copy_from_slice(&evaluated_point.to_bytes()); + }, + Err(_) => { + res.fill(0); // for bad cell identifier, fill whole proof with zero bytes ! + }, + }; + } + }); metrics.proof_build_time(total_start.elapsed(), cells.len().saturated_into()); @@ -461,7 +489,7 @@ pub fn par_build_commitments( if log::log_enabled!(target: LOG_TARGET, log::Level::Debug) { let raw_pp = public_params.to_raw_var_bytes(); - let hash_pp = hex::encode(sp_core_hashing::blake2_128(&raw_pp)); + let hash_pp = hex::encode(sp_core::hashing::blake2_128(&raw_pp)); let hex_pp = hex::encode(raw_pp); log::debug!( target: LOG_TARGET, @@ -487,19 +515,26 @@ pub fn par_build_commitments( let start = Instant::now(); - (0..extended_rows_num) - .into_par_iter() - .map(|i| { - row( - &ext_data_matrix, - i as usize, - block_dims.cols, - BlockLengthRows(extended_rows_num), - ) - }) - .zip(result_bytes.par_chunks_exact_mut(PROVER_KEY_SIZE as usize)) - .map(|(row, res)| commit(&prover_key, row_eval_domain, row, res)) - .collect::>()?; + #[cfg(feature = "parallel")] + let iter = (0..extended_rows_num).into_par_iter(); + #[cfg(not(feature = "parallel"))] + let iter = 0..extended_rows_num; + + let iter = iter.map(|i| { + row( + &ext_data_matrix, + i as usize, + block_dims.cols, + BlockLengthRows(extended_rows_num), + ) + }); + + #[cfg(feature = "parallel")] + let mut iter = iter.zip(result_bytes.par_chunks_exact_mut(PROVER_KEY_SIZE as usize)); + #[cfg(not(feature = "parallel"))] + let mut iter = iter.zip(result_bytes.chunks_exact_mut(PROVER_KEY_SIZE as usize)); + + iter.try_for_each(|(row, res)| commit(&prover_key, row_eval_domain, row, res))?; metrics.commitment_build_time(start.elapsed()); @@ -537,7 +572,7 @@ fn commit( #[cfg(test)] mod tests { - use std::{convert::TryInto, iter::repeat, str::from_utf8}; + use std::{convert::TryInto, iter::repeat}; use da_types::AppExtrinsic; use dusk_bytes::Serializable; @@ -559,6 +594,7 @@ mod tests { prelude::*, }; use rand::{prelude::IteratorRandom, Rng, SeedableRng}; + use sp_arithmetic::Percent; use test_case::test_case; use super::*; @@ -615,34 +651,32 @@ mod tests { // newapi done #[test] fn test_extend_data_matrix() { - let expected_result = vec![ - b"000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e00", - b"bc1c6b8b4b02ca677b825ec9dace9aa706813f3ec47abdf9f03c680f4468555e", - b"7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a00", - b"c16115f73784be22106830c9bc6bbb469bf5026ee80325e403efe5ccc3f55016", - b"1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d00", - b"db3b8aaa6a21e9869aa17de8f9edb9c625a05e5de399dc18105c872e6387745e", - b"9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b900", - b"e080341657a3dd412f874fe8db8ada65ba14228d07234403230e05ece2147016", - b"3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c00", - b"fa5aa9c9894008a6b9c09c07190dd9e544bf7d7c02b9fb372f7ba64d82a6935e", - b"babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d800", - b"ff9f533576c2fc604ea66e07fba9f984d93341ac26426322422d240b02348f16", - b"5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b00", - b"197ac8e8a85f27c5d8dfbb26382cf80464de9c9b21d81a574e9ac56ca1c5b25e", - b"d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f700", - b"1ebf725495e11b806dc58d261ac918a4f85260cb45618241614c432a2153ae16", + let expected_result = [ + // Row 0 + hex!("000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e00"), + hex!("bc1c6b8b4b02ca677b825ec9dace9aa706813f3ec47abdf9f03c680f4468555e"), + hex!("7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a00"), + hex!("c16115f73784be22106830c9bc6bbb469bf5026ee80325e403efe5ccc3f55016"), + // Row 1 + hex!("1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d00"), + hex!("db3b8aaa6a21e9869aa17de8f9edb9c625a05e5de399dc18105c872e6387745e"), + hex!("9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b900"), + hex!("e080341657a3dd412f874fe8db8ada65ba14228d07234403230e05ece2147016"), + // Row 2 + hex!("3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c00"), + hex!("fa5aa9c9894008a6b9c09c07190dd9e544bf7d7c02b9fb372f7ba64d82a6935e"), + hex!("babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d800"), + hex!("ff9f533576c2fc604ea66e07fba9f984d93341ac26426322422d240b02348f16"), + // Row 3 + hex!("5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b00"), + hex!("197ac8e8a85f27c5d8dfbb26382cf80464de9c9b21d81a574e9ac56ca1c5b25e"), + hex!("d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f700"), + hex!("1ebf725495e11b806dc58d261ac918a4f85260cb45618241614c432a2153ae16"), ] - .into_iter() - .map(|e| { - e.chunks_exact(2) - .map(|h| u8::from_str_radix(from_utf8(h).unwrap(), 16).unwrap()) - .collect::>() - }) - .map(|e| { - BlsScalar::from_bytes(e.as_slice().try_into().expect("wrong number of elems")).unwrap() - }) - .collect::>(); + .iter() + .map(BlsScalar::from_bytes) + .collect::, _>>() + .expect("Invalid Expected result"); let block_dims = BlockDimensions::new(BlockLengthRows(2), BlockLengthColumns(4), 32); let block = (0..=247) @@ -651,8 +685,6 @@ mod tests { .flat_map(|chunk| pad_with_zeroes(chunk.to_vec(), block_dims.chunk_size)) .collect::>(); let res = par_extend_data_matrix(block_dims, &block, &IgnoreMetrics {}); - eprintln!("result={:?}", res); - eprintln!("expect={:?}", expected_result); assert_eq!(res.unwrap(), expected_result); } @@ -790,14 +822,15 @@ mod tests { fn random_cells( max_cols: BlockLengthColumns, max_rows: BlockLengthRows, - percents: usize, + percents: Percent, ) -> Vec { - assert!(percents > 0 && percents <= 100); let max_cols = max_cols.into(); let max_rows = max_rows.into(); let rng = &mut ChaChaRng::from_seed([0u8; 32]); - let amount = ((max_cols * max_rows) as f32 * (percents as f32 / 100.0)).ceil() as usize; + let amount: usize = percents + .mul_ceil::(max_cols * max_rows) + .saturated_into(); (0..max_cols) .flat_map(move |col| { @@ -826,7 +859,7 @@ mod tests { } let public_params = testnet::public_params(dims.cols.as_usize()); - for cell in random_cells(dims.cols, dims.rows, 1) { + for cell in random_cells(dims.cols, dims.rows, Percent::one() ) { let row = cell.row.as_usize(); let proof = build_proof(&public_params, dims, &matrix, &[cell], &metrics).unwrap(); @@ -1017,12 +1050,12 @@ get erasure coded to ensure redundancy."#; data: extended_matrix[position.col as usize][position.row as usize].to_bytes(), }) .collect::>(); - let data = &decode_app_extrinsics(&index, &dimensions, cells, xt.app_id.0).unwrap()[0]; + let data = &decode_app_extrinsics(&index, dimensions, cells, xt.app_id.0).unwrap()[0]; assert_eq!(data, &xt.data); } assert!(matches!( - decode_app_extrinsics(&index, &dimensions, vec![], 0), + decode_app_extrinsics(&index, dimensions, vec![], 0), Err(ReconstructionError::MissingCell { .. }) )); } @@ -1261,7 +1294,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat .unwrap(); println!("Proof: {proof:?}"); - assert!(proof.len() == 80); + assert_eq!(proof.len(), 80); let commitment = result_bytes.clone().try_into().unwrap(); let dims = Dimensions::new(1, 4).unwrap(); diff --git a/kate/src/gridgen/mod.rs b/kate/src/gridgen/mod.rs index 7f561847..d6f7080f 100644 --- a/kate/src/gridgen/mod.rs +++ b/kate/src/gridgen/mod.rs @@ -5,16 +5,17 @@ use crate::pmp::{ traits::Committer, }; use codec::Encode; -use core::num::NonZeroUsize; +use core::{cmp::max, num::NonZeroU16}; use da_types::{AppExtrinsic, AppId, DataLookup, DataLookupIndexItem}; -use kate_grid::{Dimensions, Extension, Grid, IntoColumnMajor, IntoRowMajor, RowMajor}; -use kate_recovery::config::PADDING_TAIL_VALUE; +use kate_recovery::{config::PADDING_TAIL_VALUE, ensure, matrix::Dimensions}; +use nalgebra::base::DMatrix; use poly_multiproof::{ m1_blst::Proof, traits::{KZGProof, PolyMultiProofNoPrecomp}, }; use rand::{Rng, SeedableRng}; use rand_chacha::ChaChaRng; +use std::{cmp::min, collections::BTreeMap}; use crate::{ com::{Cell, Error}, @@ -55,31 +56,26 @@ mod tests; pub struct EvaluationGrid { pub lookup: DataLookup, - pub evals: RowMajor, - pub dims: Dimensions, + pub evals: DMatrix, } impl EvaluationGrid { /// From the app extrinsics, create a data grid of Scalars pub fn from_extrinsics( - mut extrinsics: Vec, + extrinsics: Vec, min_width: usize, max_width: usize, max_height: usize, rng_seed: Seed, ) -> Result { // Group extrinsics by app id, also sorted by app id. - extrinsics.sort_by(|a, b| a.app_id.cmp(&b.app_id)); - let grouped = - extrinsics - .iter() - .fold::>)>, _>(vec![], |mut acc, e| { - match acc.last_mut() { - Some((app_id, data)) if e.app_id == *app_id => data.push(e.data.clone()), - None | Some(_) => acc.push((e.app_id, vec![e.data.clone()])), - } - acc - }); + let grouped = extrinsics.into_iter().fold::>, _>( + BTreeMap::default(), + |mut acc, e| { + acc.entry(e.app_id).or_default().push(e.data); + acc + }, + ); // Convert each grup of extrinsics into scalars let encoded = grouped @@ -118,8 +114,10 @@ impl EvaluationGrid { // Fit the grid to the desired grid size let dims = get_block_dims(grid.len(), min_width, max_width, max_height)?; + let dim_size: usize = dims.size(); + let (rows, cols): (usize, usize) = dims.into(); let mut rng = ChaChaRng::from_seed(rng_seed); - while grid.len() != dims.n_cells() { + while grid.len() != dim_size { let rnd_values: [u8; SCALAR_SIZE - 1] = rng.gen(); // TODO: can we just use zeros instead? grid.push(pad_to_bls_scalar(rnd_values)?); @@ -127,83 +125,80 @@ impl EvaluationGrid { Ok(EvaluationGrid { lookup, - evals: grid - .into_row_major(dims.width(), dims.height()) - .ok_or(Error::DimensionsMismatch)?, - dims, + evals: DMatrix::from_row_iterator(rows, cols, grid.into_iter()), }) } - pub fn row(&self, y: usize) -> Option<&[ArkScalar]> { - self.evals.row(y) + pub fn row(&self, y: usize) -> Option> { + let (rows, _) = self.evals.shape(); + (y < rows).then(|| self.evals.row(y).into_iter().cloned().collect::>()) } - /// Returns the start/end indices of the given app id *for the non-extended grid* - fn app_data_indices(&self, app_id: &AppId) -> Option<(usize, usize)> { - if self.lookup.size == 0 { - // Empty block, short circuit. - return None; - } - let (i, start_index) = self - .lookup - .index - .iter() - .enumerate() - .find(|(_i, item)| &item.app_id == app_id) - .map(|(i, item)| (i, item.start as usize))?; - let end_index = self - .lookup - .index - .get(i.saturating_add(1)) - .map(|elem| elem.start) - .unwrap_or(self.lookup.size) as usize; - Some((start_index, end_index)) + pub fn dims(&self) -> Dimensions { + let (rows, cols) = self.evals.shape(); + // SAFETY: We cannot construct an `EvaluationGrid` with any dimension `< 1` or `> u16::MAX` + unsafe { Dimensions::new_unchecked(rows as u16, cols as u16) } } /// Returns a list of `(index, row)` pairs for the underlying rows of an application. /// Returns `None` if the `app_id` cannot be found, or if the provided `orig_dims` are invalid. pub fn app_rows( &self, - app_id: &AppId, - orig_dims: Option<&Dimensions>, + app_id: AppId, + orig_dims: Option, ) -> Option)>> { - let orig_dims = orig_dims.unwrap_or(&self.dims); - if !orig_dims.divides(&self.dims) { - return None; - } - let h_mul = self.dims.height() / orig_dims.height_nz(); + let (rows, _cols) = self.evals.shape(); + let dims = self.dims(); + let orig_dims = match orig_dims { + Some(d) => { + if !d.divides(&dims) { + return None; + } + d + }, + None => dims, + }; + + // SAFETY: `origin_dims.rows is NonZeroU16` + // Compiler checks that `Dimensions::rows()` returns a `NonZeroU16` using the expression + // `NonZeroU16::get(x)` instead of `x.get()`. + #[allow(clippy::integer_arithmetic)] + let h_mul: usize = rows / usize::from(NonZeroU16::get(orig_dims.rows())); + #[allow(clippy::integer_arithmetic)] + let index_to_y_coord = |dims: &Dimensions, index: u32| -> u32 { + index / u32::from(NonZeroU16::get(dims.rows())) + }; - let (start_ind, end_ind) = self.app_data_indices(app_id)?; - let (_, start_y) = RowMajor::<()>::ind_to_coord(orig_dims, start_ind); - let (_, end_y) = RowMajor::<()>::ind_to_coord(orig_dims, end_ind.saturating_sub(1)); // Find y of last cell elt - let (new_start_y, new_end_y) = (start_y.saturating_mul(h_mul), end_y.saturating_mul(h_mul)); + let (start_ind, end_ind) = self.lookup.range_of(app_id)?; + let start_y: usize = index_to_y_coord(&orig_dims, start_ind).try_into().ok()?; + let end_y: usize = index_to_y_coord(&orig_dims, end_ind.saturating_sub(1)) + .try_into() + .ok()?; // Find y of last cell elt + let (new_start_y, new_end_y) = (start_y.checked_mul(h_mul)?, end_y.checked_mul(h_mul)?); (new_start_y..=new_end_y) .step_by(h_mul) - .map(|y| self.evals.row(y).map(|a| (y, a.to_vec()))) + .map(|y| self.row(y).map(|a| (y, a))) .collect() } - pub fn extend_columns(&self, extension_factor: usize) -> Result { - let new_dims = self.dims.extend(Extension::height( - extension_factor - .try_into() - .map_err(|_| Error::CellLengthExceeded)?, - )); - - let domain = GeneralEvaluationDomain::::new(self.dims.height()) - .ok_or(Error::DomainSizeInvalid)?; - let domain_new = GeneralEvaluationDomain::::new(new_dims.height()) - .ok_or(Error::DomainSizeInvalid)?; - if domain_new.size() != new_dims.height() { - return Err(Error::DomainSizeInvalid); - } - - let cols: Vec> = self - .evals - .columns() - .map(|(_i, col)| col.map(|s| *s).collect::>()) - .collect::>(); + pub fn extend_columns(&self, row_factor: NonZeroU16) -> Result { + let dims = self.dims(); + let (new_rows, new_cols): (usize, usize) = dims + .extend(row_factor, unsafe { NonZeroU16::new_unchecked(1) }) + .ok_or(Error::CellLengthExceeded)? + .into(); + let (rows, cols): (usize, usize) = dims.into(); + + let domain = + GeneralEvaluationDomain::::new(rows).ok_or(Error::DomainSizeInvalid)?; + let domain_new = + GeneralEvaluationDomain::::new(new_rows).ok_or(Error::DomainSizeInvalid)?; + ensure!(domain_new.size() == new_rows, Error::DomainSizeInvalid); + + let cols = (0..cols) + .into_iter() + .map(|c| self.evals.column(c).iter().cloned().collect::>()); let new_evals = cfg_into_iter!(cols) .flat_map(|mut col| { @@ -212,29 +207,35 @@ impl EvaluationGrid { domain_new.fft_in_place(&mut col); col }) - .collect::>() - .into_column_major(new_dims.width(), new_dims.height()) - .expect("Each column should be expanded to news dims") - .to_row_major(); + .collect::>(); + + let new_evals = DMatrix::from_column_slice(new_rows, new_cols, &new_evals); + debug_assert!(new_evals.shape() == (new_rows, new_cols)); Ok(Self { lookup: self.lookup.clone(), evals: new_evals, - dims: new_dims, }) } pub fn make_polynomial_grid(&self) -> Result { - let domain = GeneralEvaluationDomain::::new(self.dims.width()) - .ok_or(Error::DomainSizeInvalid)?; - #[cfg(not(feature = "parallel"))] - let rows = self.evals.rows(); - #[cfg(feature = "parallel")] - let rows = self.evals.rows_par_iter(); + let (_rows, cols) = self.evals.shape(); + let domain = + GeneralEvaluationDomain::::new(cols).ok_or(Error::DomainSizeInvalid)?; + + let inner = self + .evals + .row_iter() + .map(|row_iter| { + let row = row_iter.iter().cloned().collect::>(); + domain.ifft(row.as_slice()) + }) + .collect::>(); + Ok(PolynomialGrid { - dims: self.dims.clone(), + dims: self.dims(), points: domain.elements().collect(), - inner: rows.map(|(_, row)| domain.ifft(row)).collect::>(), + inner, }) } } @@ -264,7 +265,7 @@ impl PolynomialGrid { extension_factor: usize, ) -> Result, Error> { let res = cfg_iter!(self.inner) - .map(|coeffs| srs.commit(&coeffs).map_err(Error::MultiproofError)) + .map(|coeffs| srs.commit(coeffs).map_err(Error::MultiproofError)) .collect::, _>>()?; poly_multiproof::Commitment::::extend_commitments( &res, @@ -302,20 +303,22 @@ impl PolynomialGrid { let block = multiproof_block( cell.col.0 as usize, cell.row.0 as usize, - &self.dims, + self.dims, target_dims, ) .ok_or(Error::CellLengthExceeded)?; let polys = &self.inner[block.start_y..block.end_y]; - let evals = (block.start_y..block.end_y) + let evals: Vec> = (block.start_y..block.end_y) .map(|y| { - eval_grid.evals.row(y).expect("Already bounds checked")[block.start_x..block.end_x] + eval_grid.row(y).expect("Already bounds checked .qed")[block.start_x..block.end_x] .to_vec() }) .collect::>(); + let evals_view = evals.iter().map(|row| row.as_slice()).collect::>(); + let points = &self.points[block.start_x..block.end_x]; let mut ts = Transcript::new(b"avail-mp"); - let proof = PolyMultiProofNoPrecomp::open(srs, &mut ts, &evals, &polys, points) + let proof = PolyMultiProofNoPrecomp::open(srs, &mut ts, &evals_view, polys, points) .map_err(Error::MultiproofError)?; Ok(Multiproof { @@ -348,16 +351,21 @@ pub struct CellBlock { pub fn multiproof_block( x: usize, y: usize, - grid_dims: &Dimensions, - target_dims: &Dimensions, + grid: Dimensions, + target: &Dimensions, ) -> Option { - let mp_grid_dims = multiproof_dims(grid_dims, target_dims)?; - if x >= mp_grid_dims.width() || y >= mp_grid_dims.height() { + let (mp_rows, mp_cols): (usize, usize) = multiproof_dims(grid, target)?.into(); + let (g_rows, g_cols): (usize, usize) = grid.into(); + if x >= mp_cols || y >= mp_rows { return None; } - let block_width = grid_dims.width() / mp_grid_dims.width_nz(); - let block_height = grid_dims.height() / mp_grid_dims.height_nz(); + let block_width = g_cols + .checked_div(mp_cols) + .expect("`mp_cols` created from a `NonZeroU16` .qed"); + let block_height = g_rows + .checked_div(mp_rows) + .expect("`mp_rows` created from a `NonZeroU16` .qed"); Some(CellBlock { start_x: x.checked_mul(block_width)?, start_y: y.checked_mul(block_height)?, @@ -368,13 +376,14 @@ pub fn multiproof_block( /// Dimensions of the multiproof grid. These are guarenteed to cleanly divide `grid_dims`. /// `target_dims` must cleanly divide `grid_dims`. -pub fn multiproof_dims(grid_dims: &Dimensions, target_dims: &Dimensions) -> Option { - let target_width = grid_dims.width_nz().min(target_dims.width_nz()); - let target_height = grid_dims.height_nz().min(target_dims.height_nz()); - if grid_dims.width() % target_width != 0 || grid_dims.height() % target_height != 0 { +pub fn multiproof_dims(grid: Dimensions, target: &Dimensions) -> Option { + let cols = min(grid.cols(), target.cols()); + let rows = min(grid.rows(), target.rows()); + if grid.cols().get() % cols != 0 || grid.rows().get() % rows != 0 { return None; } - Some(Dimensions::new(target_width, target_height)) + + Dimensions::new(rows, cols) } pub fn get_block_dims( @@ -387,24 +396,28 @@ pub fn get_block_dims( if n_scalars < max_width { let current_width = n_scalars; // Don't let the width get lower than the minimum provided - let width = core::cmp::max(round_up_power_of_2(current_width), min_width); - Ok(Dimensions::new( - width.try_into().map_err(|_| Error::ZeroDimension)?, - 1.try_into().expect("1 is nonzero"), - )) + let width = max( + current_width + .checked_next_power_of_two() + .ok_or(Error::BlockTooBig)?, + min_width, + ); + let height = unsafe { NonZeroU16::new_unchecked(1) }; + + Dimensions::new_from(height, width).ok_or(Error::ZeroDimension) } else { - let width = NonZeroUsize::new(max_width).ok_or(Error::ZeroDimension)?; - let current_height = round_up_to_multiple(n_scalars, width) / width; + let width = NonZeroU16::try_from(u16::try_from(max_width)?)?; + let current_height = round_up_to_multiple(n_scalars, width) + .checked_div(max_width) + .expect("`max_width` is non zero, checked one line before"); // Round the height up to a power of 2 for ffts - let height = round_up_power_of_2(current_height); + let height = current_height + .checked_next_power_of_two() + .ok_or(Error::BlockTooBig)?; // Error if height too big - if height > max_height { - return Err(Error::BlockTooBig); - } - Ok(Dimensions::new( - width, - height.try_into().map_err(|_| Error::ZeroDimension)?, - )) + ensure!(height <= max_height, Error::BlockTooBig); + + Dimensions::new_from(height, width).ok_or(Error::ZeroDimension) } } @@ -413,9 +426,12 @@ pub fn domain_points(n: usize) -> Result, Error> { Ok(domain.elements().collect()) } -fn round_up_to_multiple(input: usize, multiple: NonZeroUsize) -> usize { - let n_multiples = input.saturating_add(multiple.get()).saturating_sub(1) / multiple; - n_multiples.saturating_mul(multiple.get()) +/// SAFETY: As `multiple` is a `NonZeroU16` we can safetly make the following ops. +#[allow(clippy::integer_arithmetic)] +fn round_up_to_multiple(input: usize, multiple: NonZeroU16) -> usize { + let multiple: usize = multiple.get().into(); + let n_multiples = input.saturating_add(multiple - 1) / multiple; + n_multiples.saturating_mul(multiple) } pub(crate) fn pad_to_bls_scalar(a: impl AsRef<[u8]>) -> Result { @@ -427,22 +443,6 @@ pub(crate) fn pad_to_bls_scalar(a: impl AsRef<[u8]>) -> Result ArkScalar::from_bytes(&buf).map_err(Error::MultiproofError) } -// Round up. only valid for positive integers -#[allow(clippy::integer_arithmetic)] -fn round_up_power_of_2(mut v: usize) -> usize { - if v == 0 { - return 1; - } - v -= 1; - v |= v >> 1; - v |= v >> 2; - v |= v >> 4; - v |= v >> 8; - v |= v >> 16; - v += 1; - v -} - #[cfg(test)] #[allow(clippy::integer_arithmetic)] mod unit_tests { @@ -451,8 +451,8 @@ mod unit_tests { use test_case::test_case; // parameters that will split a 256x256 grid into pieces of size 4x16 - const TARGET: Dimensions = Dimensions::new_unchecked(64, 16); - const GRID: Dimensions = Dimensions::new_unchecked(256, 256); + const TARGET: Dimensions = unsafe { Dimensions::new_unchecked(16, 64) }; + const GRID: Dimensions = unsafe { Dimensions::new_unchecked(256, 256) }; fn cb(start_x: usize, start_y: usize, end_x: usize, end_y: usize) -> CellBlock { CellBlock { @@ -469,7 +469,7 @@ mod unit_tests { #[test_case(64, 0 => None)] #[test_case(0, 16 => None)] fn multiproof_max_grid_size(x: usize, y: usize) -> Option { - multiproof_block(x, y, &GRID, &TARGET) + multiproof_block(x, y, GRID.clone(), &TARGET) } #[test_case(256, 256, 64, 16 => Some((64, 16)))] @@ -479,16 +479,15 @@ mod unit_tests { #[test_case(256, 8, 32, 32 => Some((32, 8)))] #[test_case(4 , 1, 32, 32 => Some((4, 1)))] fn test_multiproof_dims( - grid_w: usize, - grid_h: usize, - target_w: usize, - target_h: usize, + grid_w: u16, + grid_h: u16, + target_w: u16, + target_h: u16, ) -> Option<(usize, usize)> { - multiproof_dims( - &Dimensions::new_unchecked(grid_w, grid_h), - &Dimensions::new_unchecked(target_w, target_h), - ) - .map(|i| (i.width(), i.height())) + let grid = unsafe { Dimensions::new_unchecked(grid_w, grid_h) }; + let target = unsafe { Dimensions::new_unchecked(target_w, target_h) }; + + multiproof_dims(grid, &target).map(Into::into) } use proptest::prelude::*; @@ -497,10 +496,12 @@ mod unit_tests { cases: 200, .. ProptestConfig::default() })] #[test] - fn test_round_up_to_multiple(i in 1..1000usize, m in 1..32usize) { - for k in 0..m { - let a = i * m - k; - prop_assert_eq!(round_up_to_multiple(a, m.try_into().unwrap()), i * m) + fn test_round_up_to_multiple(i in 1..1000usize, m in 1..32u16) { + for k in 0..usize::from(m) { + let a :usize = i * usize::from(m) - k; + let output = round_up_to_multiple(a, m.try_into().unwrap()); + let expected :usize = i * usize::from(m); + prop_assert_eq!( output, expected) } } } @@ -511,19 +512,23 @@ mod unit_tests { #[test_case(6 => 8)] #[test_case(972 => 1024)] fn test_round_up_to_2(i: usize) -> usize { - round_up_power_of_2(i) + i.next_power_of_two() + } + + fn new_dim(rows: u16, cols: u16) -> Result { + Dimensions::new(rows, cols).ok_or(Error::BlockTooBig) } - #[test_case(0 => Dimensions::new_unchecked(4, 1) ; "block size zero")] - #[test_case(1 => Dimensions::new_unchecked(4, 1) ; "below minimum block size")] - #[test_case(10 => Dimensions::new_unchecked(16, 1) ; "regular case")] - #[test_case(17 => Dimensions::new_unchecked(32, 1) ; "minimum overhead after 512")] - #[test_case(256 => Dimensions::new_unchecked(256, 1) ; "maximum cols")] - #[test_case(257 => Dimensions::new_unchecked(256, 2) ; "two rows")] - #[test_case(256 * 256 => Dimensions::new_unchecked(256, 256) ; "max block size")] - #[test_case(256 * 256 + 1 => panics "BlockTooBig" ; "too much data")] - fn test_get_block_dims(size: usize) -> Dimensions + #[test_case(0 => new_dim(1,4) ; "block size zero")] + #[test_case(1 => new_dim(1,4) ; "below minimum block size")] + #[test_case(10 => new_dim(1, 16) ; "regular case")] + #[test_case(17 => new_dim(1, 32) ; "minimum overhead after 512")] + #[test_case(256 => new_dim(1, 256) ; "maximum cols")] + #[test_case(257 => new_dim(2, 256) ; "two rows")] + #[test_case(256 * 256 => new_dim(256, 256) ; "max block size")] + #[test_case(256 * 256 + 1 => Err(Error::BlockTooBig) ; "too much data")] + fn test_get_block_dims(size: usize) -> Result where { - get_block_dims(size, 4, 256, 256).unwrap() + get_block_dims(size, 4, 256, 256) } } diff --git a/kate/src/gridgen/tests/commitments.rs b/kate/src/gridgen/tests/commitments.rs index 202fa712..5dbc10e6 100644 --- a/kate/src/gridgen/tests/commitments.rs +++ b/kate/src/gridgen/tests/commitments.rs @@ -7,8 +7,10 @@ use da_types::AppId; use da_types::BlockLengthColumns; use da_types::BlockLengthRows; use hex_literal::hex; -use kate_grid::Dimensions; -use kate_recovery::matrix::Position; +use kate_recovery::{ + commitments::verify_equality, + matrix::{Dimensions, Position}, +}; use test_case::test_case; #[test] @@ -30,7 +32,9 @@ fn test_build_commitments_simple_commitment_check() { hash, ) .unwrap(); - let ext_evals = evals.extend_columns(2).unwrap(); + let ext_evals = evals + .extend_columns(unsafe { NonZeroU16::new_unchecked(2) }) + .unwrap(); let polys = ext_evals.make_polynomial_grid().unwrap(); let commits = polys .commitments(&*PMP) @@ -47,7 +51,7 @@ fn test_build_commitments_simple_commitment_check() { .flat_map(|p| p.to_bytes().unwrap()) .collect::>(); - assert_eq!(ext_evals.dims, Dimensions::new_unchecked(4, 2)); + assert_eq!(ext_evals.dims(), Dimensions::new_from(2, 4).unwrap()); let expected_commitments = hex!("960F08F97D3A8BD21C3F5682366130132E18E375A587A1E5900937D7AA5F33C4E20A1C0ACAE664DCE1FD99EDC2693B8D960F08F97D3A8BD21C3F5682366130132E18E375A587A1E5900937D7AA5F33C4E20A1C0ACAE664DCE1FD99EDC2693B8D"); assert_eq!(commits, expected_commitments); assert_eq!(commits_fft_extended, expected_commitments); @@ -64,7 +68,9 @@ fn par_build_commitments_row_wise_constant_row() { }]; let evals = EvaluationGrid::from_extrinsics(xts, 4, 4, 4, hash).unwrap(); - let evals = evals.extend_columns(2).unwrap(); + let evals = evals + .extend_columns(unsafe { NonZeroU16::new_unchecked(2) }) + .unwrap(); let polys = evals.make_polynomial_grid().unwrap(); polys.commitments(&*PMP).unwrap(); } @@ -74,8 +80,10 @@ proptest! { #[test] fn commitments_verify(ref exts in app_extrinsics_strategy()) { //let (layout, commitments, dims, matrix) = par_build_commitments(BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default()).unwrap(); - let grid = EvaluationGrid::from_extrinsics(exts.clone(), 4, 16, 64, Seed::default()).unwrap().extend_columns(2).unwrap(); - let orig_dims = Dimensions::new(grid.dims.width_nz(), (grid.dims.height() / 2).try_into().unwrap()); + let grid = EvaluationGrid::from_extrinsics(exts.clone(), 4, 16, 64, Seed::default()).unwrap(); + let grid = grid.extend_columns( unsafe { NonZeroU16::new_unchecked(2)}).unwrap(); + let (g_rows, g_cols) :(u16,u16) = grid.dims().into(); + let orig_dims = Dimensions::new(g_rows / 2, g_cols).unwrap(); let polys = grid.make_polynomial_grid().unwrap(); let commits = polys.commitments(&*PMP) .unwrap() @@ -84,25 +92,31 @@ proptest! { .collect::>(); let index = app_data_index_from_lookup(&grid.lookup); - let public_params = testnet::public_params((grid.dims.width() as u32).into()); + let public_params = testnet::public_params(BlockLengthColumns(g_cols as u32)); - for xt in exts { - let rows = grid.app_rows(&xt.app_id, Some(&orig_dims)).unwrap(); + for (i, xt) in exts.iter().enumerate() { + let rows = grid.app_rows(xt.app_id, Some(orig_dims)).unwrap(); // Have to put the rows we find in this funky data structure - let mut app_rows = vec![None; grid.dims.height()]; + let mut app_rows = vec![None; g_rows.into()]; for (row_i, row) in rows { app_rows[row_i] = Some(row.iter().flat_map(|s| s.to_bytes().unwrap()).collect()); } // Need to provide the original dimensions here too - let extended_dims = kate_recovery::matrix::Dimensions::new(orig_dims.height() as u16, orig_dims.width() as u16).unwrap(); - let (_, missing) = kate_recovery::commitments::verify_equality(&public_params, &commits, &app_rows, &index, &extended_dims, xt.app_id.0).unwrap(); + let extended_dims = orig_dims.clone(); + let (_, missing) = verify_equality(&public_params, &commits, &app_rows, &index, &extended_dims, xt.app_id.0).unwrap(); + if !missing.is_empty() { + log::error!("Debug this spot at {i}"); + let d_rows = grid.app_rows(xt.app_id, Some(orig_dims)).unwrap(); + let (_, _) = verify_equality(&public_params, &commits, &app_rows, &index, &extended_dims, xt.app_id.0).unwrap(); + } prop_assert!(missing.is_empty()); } } fn verify_commitments_missing_row(ref xts in app_extrinsics_strategy()) { - let grid = EvaluationGrid::from_extrinsics(xts.clone(), 4, 16, 64, Seed::default()).unwrap().extend_columns(2).unwrap(); - let orig_dims = Dimensions::new(grid.dims.width_nz(), (grid.dims.height() / 2).try_into().unwrap()); + let grid = EvaluationGrid::from_extrinsics(xts.clone(), 4, 16, 64, Seed::default()).unwrap().extend_columns( unsafe { NonZeroU16::new_unchecked(2) }).unwrap(); + let (g_rows, g_cols):(u16,u16) = grid.dims().into(); + let orig_dims = Dimensions::new_from(g_rows / 2, g_cols).unwrap(); let polys = grid.make_polynomial_grid().unwrap(); let commits = polys.commitments(&*PMP) .unwrap() @@ -111,19 +125,19 @@ proptest! { .collect::>(); let index = app_data_index_from_lookup(&grid.lookup); - let public_params = testnet::public_params((grid.dims.width() as u32).into()); + let public_params = testnet::public_params((g_cols as u32).into()); for xt in xts { - let rows = grid.app_rows(&xt.app_id, Some(&orig_dims)).unwrap(); - let mut row_elems = vec![None; grid.dims.height()]; + let rows = grid.app_rows(xt.app_id, Some(orig_dims)).unwrap(); + let mut row_elems = vec![None; g_rows.into()]; for (i, data) in &rows { row_elems[*i] = Some(data.iter().flat_map(|s| s.to_bytes().unwrap()).collect()); } let first_index = rows.iter().map(|(i, _)| *i).min().unwrap(); row_elems.remove(first_index); - let extended_dims = kate_recovery::matrix::Dimensions::new(orig_dims.height() as u16, orig_dims.width() as u16).unwrap(); - let (_, missing) = kate_recovery::commitments::verify_equality(&public_params, &commits, &row_elems,&index,&extended_dims,xt.app_id.0).unwrap(); + let extended_dims = orig_dims.transpose(); + let (_, missing) = verify_equality(&public_params, &commits, &row_elems,&index,&extended_dims,xt.app_id.0).unwrap(); prop_assert!(!missing.is_empty()); } } @@ -144,11 +158,10 @@ fn test_zero_deg_poly_commit(row_values: Vec) { //let ae = AppExtrinsic { 0.into(), vec![} let ev = EvaluationGrid { lookup: Default::default(), // Shouldn't need to care about this - dims: Dimensions::new_unchecked(row_values.len(), 1), - evals: row.into_row_major(row_values.len(), 1).unwrap(), + evals: DMatrix::from_row_iterator(1, len, row.into_iter()), }; - println!("Row: {:?}", ev.evals.inner()); + println!("Row: {:?}", ev.evals); let pg = ev.make_polynomial_grid().unwrap(); println!("Poly: {:?}", pg.inner[0]); @@ -164,9 +177,9 @@ fn test_zero_deg_poly_commit(row_values: Vec) { let proof = pg.proof(&*PMP, &cell).unwrap(); let proof_bytes = proof.to_bytes().unwrap(); - let cell_bytes = ev.evals.get(x, 0).unwrap().to_bytes().unwrap(); + let cell_bytes = ev.evals.get((0, x)).unwrap().to_bytes().unwrap(); let content = [&proof_bytes[..], &cell_bytes[..]].concat(); - let dims = kate_recovery::matrix::Dimensions::new(1, 4).unwrap(); + let dims = Dimensions::new(1, 4).unwrap(); let cell = kate_recovery::data::Cell { position: Position { row: 0, diff --git a/kate/src/gridgen/tests/formatting.rs b/kate/src/gridgen/tests/formatting.rs index 7ac4e3f3..ca54ddc6 100644 --- a/kate/src/gridgen/tests/formatting.rs +++ b/kate/src/gridgen/tests/formatting.rs @@ -1,10 +1,11 @@ use da_types::{AppExtrinsic, DataLookup, DataLookupIndexItem}; use hex_literal::hex; -use kate_grid::{Dimensions, Grid, IntoColumnMajor, IntoRowMajor}; use kate_recovery::{ com::{app_specific_cells, decode_app_extrinsics, reconstruct_extrinsics}, data::DataCell, + matrix::Dimensions, }; +use nalgebra::base::DMatrix; use poly_multiproof::traits::AsBytes; use crate::{ @@ -15,6 +16,7 @@ use crate::{ }, Seed, }; +use core::num::NonZeroU16; #[test] fn newapi_test_flatten_block() { @@ -37,7 +39,7 @@ fn newapi_test_flatten_block() { }, ]; - let expected_dims = Dimensions::new_unchecked(16, 1); + let expected_dims = Dimensions::new_from(1, 16).unwrap(); let evals = EvaluationGrid::from_extrinsics(extrinsics, 4, 256, 256, Seed::default()).unwrap(); let expected_index = [(0.into(), 0), (1.into(), 2), (2.into(), 4), (3.into(), 6)] @@ -52,7 +54,8 @@ fn newapi_test_flatten_block() { assert_eq!(evals.lookup, expected_lookup, "The layouts don't match"); assert_eq!( - evals.dims, expected_dims, + evals.dims(), + expected_dims, "Dimensions don't match the expected" ); @@ -60,7 +63,8 @@ fn newapi_test_flatten_block() { let data = evals .evals - .inner() + .data + .as_slice() .iter() .flat_map(|s| s.to_bytes().unwrap()) .collect::>(); @@ -70,7 +74,7 @@ fn newapi_test_flatten_block() { #[test] fn newapi_test_extend_data_matrix() { // This test expects this result in column major - let expected_result = vec![ + let expected_data = vec![ hex!("000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e00"), hex!("bc1c6b8b4b02ca677b825ec9dace9aa706813f3ec47abdf9f03c680f4468555e"), hex!("7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a00"), @@ -88,14 +92,13 @@ fn newapi_test_extend_data_matrix() { hex!("d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f700"), hex!("1ebf725495e11b806dc58d261ac918a4f85260cb45618241614c432a2153ae16"), ] - .into_iter() - .map(|e| ArkScalar::from_bytes(e.as_slice().try_into().unwrap()).unwrap()) - .collect::>() - .into_column_major(4, 4) - .unwrap() - .to_row_major(); - - let block_dims = Dimensions::new_unchecked(4, 2); + .iter() + .map(ArkScalar::from_bytes) + .collect::, _>>() + .expect("Invalid Expected result"); + + let expected_result = DMatrix::from_column_slice(4, 4, &expected_data); + let scalars = (0..=247) .collect::>() .chunks_exact(DATA_CHUNK_SIZE) @@ -104,14 +107,13 @@ fn newapi_test_extend_data_matrix() { let grid = EvaluationGrid { lookup: DataLookup::default(), - evals: scalars - .into_row_major(block_dims.width(), block_dims.height()) - .unwrap(), - dims: block_dims, + evals: DMatrix::from_row_iterator(2, 4, scalars.into_iter()), }; - let extend = grid.extend_columns(2).unwrap(); + let extend = grid + .extend_columns(unsafe { NonZeroU16::new_unchecked(2) }) + .unwrap(); - assert_eq!(extend.evals.inner(), expected_result.inner()); + assert_eq!(extend.evals, expected_result); } #[test] @@ -135,13 +137,11 @@ get erasure coded to ensure redundancy."#; let grid = EvaluationGrid::from_extrinsics(xts.clone(), 4, 32, 4, hash) .unwrap() - .extend_columns(2) + .extend_columns(unsafe { NonZeroU16::new_unchecked(2) }) .unwrap(); let index = app_data_index_from_lookup(&grid.lookup); - let bdims = - kate_recovery::matrix::Dimensions::new(grid.dims.height() as u16, grid.dims.width() as u16) - .unwrap(); + let bdims = grid.dims(); for xt in &xts { let positions = app_specific_cells(&index, &bdims, xt.app_id.0).unwrap(); let cells = positions @@ -150,46 +150,45 @@ get erasure coded to ensure redundancy."#; position: pos.clone(), data: grid .evals - .get(pos.col as usize, pos.row as usize) + .get((pos.row as usize, pos.col as usize)) .unwrap() .to_bytes() .unwrap(), }) .collect::>(); - let data = &decode_app_extrinsics(&index, &bdims, cells, xt.app_id.0).unwrap()[0]; + let data = &decode_app_extrinsics(&index, bdims, cells, xt.app_id.0).unwrap()[0]; assert_eq!(data, &xt.data); } assert!(matches!( - decode_app_extrinsics(&index, &bdims, vec![], 0), + decode_app_extrinsics(&index, bdims, vec![], 0), Err(kate_recovery::com::ReconstructionError::MissingCell { .. }) )); } #[test] fn test_extend_mock_data() { - let orig_data = br#"This is mocked test data. It will be formatted as a matrix of BLS scalar cells and then individual columns + let orig_data = r#"This is mocked test data. It will be formatted as a matrix of BLS scalar cells and then individual columns get erasure coded to ensure redundancy. Let's see how this gets encoded and then reconstructed by sampling only some data."#; - let exts = vec![AppExtrinsic::from(orig_data.to_vec())]; + let exts = vec![AppExtrinsic::from(orig_data.as_bytes().to_vec())]; // The hash is used for seed for padding the block to next power of two value let hash = Seed::default(); let grid = EvaluationGrid::from_extrinsics(exts.clone(), 4, 128, 2, hash) .unwrap() - .extend_columns(2) + .extend_columns(unsafe { NonZeroU16::new_unchecked(2) }) .unwrap(); let cols = sample_cells(&grid, None); - let bdims = - kate_recovery::matrix::Dimensions::new(grid.dims.height() as u16, grid.dims.width() as u16) - .unwrap(); + let bdims = grid.dims(); let index = app_data_index_from_lookup(&grid.lookup); let res = reconstruct_extrinsics(&index, &bdims, cols).unwrap(); let s = String::from_utf8_lossy(res[0].1[0].as_slice()); - assert_eq!(res[0].1[0], orig_data); + assert_eq!(s, orig_data); + assert_eq!(res[0].1[0], orig_data.as_bytes()); eprintln!("Decoded: {}", s); } diff --git a/kate/src/gridgen/tests/mod.rs b/kate/src/gridgen/tests/mod.rs index e0828278..34158ad9 100644 --- a/kate/src/gridgen/tests/mod.rs +++ b/kate/src/gridgen/tests/mod.rs @@ -1,5 +1,4 @@ use da_types::{AppExtrinsic, DataLookup}; -use kate_grid::Grid; use kate_recovery::{data::DataCell, index::AppDataIndex}; use once_cell::sync::Lazy; use poly_multiproof::{m1_blst::M1NoPrecomp, traits::AsBytes}; @@ -57,20 +56,21 @@ fn sample_unique(rng: &mut impl Rng, n_samples: usize, n: usize) -> Vec { fn sample_cells(grid: &EvaluationGrid, columns: Option<&[usize]>) -> Vec { let mut rng = ChaChaRng::from_seed([42u8; 32]); + let (g_rows, g_cols) = grid.evals.shape(); let cols: Vec = match columns { Some(cols) => cols.to_vec(), - None => (0..grid.dims.width()).into_iter().collect(), + None => (0..g_cols).into_iter().collect(), }; cols.iter() .flat_map(|x| { - sample_unique(&mut rng, grid.dims.height() / 2, grid.dims.height()) + sample_unique(&mut rng, g_rows / 2, g_rows) .into_iter() .map(move |y| kate_recovery::data::DataCell { position: kate_recovery::matrix::Position { row: y as u32, col: *x as u16, }, - data: grid.evals.get(*x, y).unwrap().to_bytes().unwrap(), + data: grid.evals.get((y, *x)).unwrap().to_bytes().unwrap(), }) }) .collect::>() diff --git a/kate/src/gridgen/tests/reconstruction.rs b/kate/src/gridgen/tests/reconstruction.rs index 7c19d21b..5b89acb0 100644 --- a/kate/src/gridgen/tests/reconstruction.rs +++ b/kate/src/gridgen/tests/reconstruction.rs @@ -1,18 +1,19 @@ use super::{app_data_index_from_lookup, PMP}; -use crate::com::Cell; -use crate::gridgen::tests::sample_cells; -use crate::gridgen::EvaluationGrid; -use crate::Seed; +use crate::{ + com::Cell, + gridgen::{tests::sample_cells, EvaluationGrid}, + Seed, +}; +use core::num::NonZeroU16; use da_types::AppExtrinsic; -use kate_grid::Grid; -use kate_recovery::com::reconstruct_extrinsics; -use kate_recovery::data::Cell as DCell; -use kate_recovery::matrix::Position as DPosition; +use kate_recovery::{ + com::reconstruct_extrinsics, + data::Cell as DCell, + matrix::{Dimensions, Position}, +}; use poly_multiproof::traits::AsBytes; use proptest::prelude::*; -use rand::distributions::Uniform; -use rand::prelude::Distribution; -use rand::SeedableRng; +use rand::{distributions::Uniform, prelude::Distribution, SeedableRng}; use rand_chacha::ChaChaRng; #[test] @@ -33,14 +34,13 @@ fn test_multiple_extrinsics_for_same_app_id() { let hash = Seed::default(); let ev = EvaluationGrid::from_extrinsics(xts.into(), 4, 128, 2, hash) .unwrap() - .extend_columns(2) + .extend_columns(unsafe { NonZeroU16::new_unchecked(2) }) .unwrap(); let cells = sample_cells(&ev, None); let index = app_data_index_from_lookup(&ev.lookup); - let bdims = - kate_recovery::matrix::Dimensions::new(ev.dims.height() as u16, ev.dims.width() as u16) - .unwrap(); + let (rows, cols) = ev.evals.shape(); + let bdims = Dimensions::new_from(rows, cols).unwrap(); let res = reconstruct_extrinsics(&index, &bdims, cells).unwrap(); assert_eq!(res[0].1[0], xt1); @@ -51,15 +51,15 @@ proptest! { #![proptest_config(ProptestConfig::with_cases(5))] #[test] fn test_build_and_reconstruct(exts in super::app_extrinsics_strategy()) { - let grid = EvaluationGrid::from_extrinsics(exts.clone(), 4, 256, 256, Seed::default()).unwrap().extend_columns(2).unwrap(); - let dims = &grid.dims; + let grid = EvaluationGrid::from_extrinsics(exts.clone(), 4, 256, 256, Seed::default()).unwrap().extend_columns(unsafe { NonZeroU16::new_unchecked(2)}).unwrap(); + let (rows, cols) = grid.evals.shape(); //let (layout, commitments, dims, matrix) = par_build_commitments( // BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default()).unwrap(); const RNG_SEED: Seed = [42u8; 32]; let cells = sample_cells(&grid, None); let index = app_data_index_from_lookup(&grid.lookup); - let bdims = kate_recovery::matrix::Dimensions::new(dims.height() as u16, dims.width() as u16).unwrap(); + let bdims = Dimensions::new_from(rows, cols).unwrap(); let reconstructed = reconstruct_extrinsics(&index, &bdims, cells).unwrap(); for (result, xt) in reconstructed.iter().zip(exts) { prop_assert_eq!(result.0, *xt.app_id); @@ -69,7 +69,7 @@ fn test_build_and_reconstruct(exts in super::app_extrinsics_strategy()) { let pp = &*PMP; let polys = grid.make_polynomial_grid().unwrap(); let commitments = polys.commitments(pp).unwrap(); - let indices = (0..dims.width()).flat_map(|x| (0..dims.height()).map(move |y| (x, y))).collect::>(); + let indices = (0..cols).flat_map(|x| (0..rows).map(move |y| (x, y))).collect::>(); // Sample some number 10 of the indices, all is too slow for tests... let mut rng = ChaChaRng::from_seed(RNG_SEED); @@ -79,9 +79,9 @@ fn test_build_and_reconstruct(exts in super::app_extrinsics_strategy()) { let proof = polys.proof(pp, &cell).unwrap(); let mut content = [0u8; 80]; content[..48].copy_from_slice(&proof.to_bytes().unwrap()[..]); - content[48..].copy_from_slice(&grid.evals.get(x, y).unwrap().to_bytes().unwrap()[..]); + content[48..].copy_from_slice(&grid.evals.get((y,x)).unwrap().to_bytes().unwrap()[..]); - let dcell = DCell{position: DPosition { row: y as u32, col: x as u16 }, content }; + let dcell = DCell{position: Position { row: y as u32, col: x as u16 }, content }; let verification = kate_recovery::proof::verify(&kate_recovery::testnet::public_params(256), &bdims, &commitments[y].to_bytes().unwrap(), &dcell); prop_assert!(verification.is_ok()); prop_assert!(verification.unwrap()); @@ -114,16 +114,14 @@ get erasure coded to ensure redundancy."#; let grid = EvaluationGrid::from_extrinsics(xts.clone(), 4, 4, 32, Seed::default()) .unwrap() - .extend_columns(2) + .extend_columns(unsafe { NonZeroU16::new_unchecked(2) }) .unwrap(); let cols_1 = sample_cells(&grid, Some(&[0, 1, 2, 3])); let index = app_data_index_from_lookup(&grid.lookup); - let bdims = - kate_recovery::matrix::Dimensions::new(grid.dims.height() as u16, grid.dims.width() as u16) - .unwrap(); + let bdims = grid.dims(); let res_1 = kate_recovery::com::reconstruct_app_extrinsics(&index, &bdims, cols_1, 1).unwrap(); assert_eq!(res_1[0], app_id_1_data); diff --git a/kate/src/lib.rs b/kate/src/lib.rs index 34329b1e..33ad26ff 100644 --- a/kate/src/lib.rs +++ b/kate/src/lib.rs @@ -1,6 +1,7 @@ #![cfg_attr(not(feature = "std"), no_std)] #![deny(clippy::integer_arithmetic)] +use core::{convert::TryInto, num::TryFromIntError}; use da_types::{BlockLengthColumns, BlockLengthRows}; #[cfg(feature = "std")] pub use dusk_plonk::{commitment_scheme::kzg10::PublicParameters, prelude::BlsScalar}; @@ -8,6 +9,7 @@ pub use dusk_plonk::{commitment_scheme::kzg10::PublicParameters, prelude::BlsSca use kate_recovery::matrix::Dimensions; use sp_arithmetic::traits::SaturatedConversion; use static_assertions::const_assert_ne; +use thiserror_no_std::Error; use crate::config::DATA_CHUNK_SIZE; @@ -17,14 +19,11 @@ pub type Seed = [u8; 32]; #[cfg(feature = "std")] pub use dusk_bytes::Serializable; #[cfg(feature = "std")] -pub use kate_grid as grid; -#[cfg(feature = "std")] pub use poly_multiproof as pmp; pub mod config { - use kate_grid::Extension; - use super::{BlockLengthColumns, BlockLengthRows}; + use core::num::NonZeroU16; // TODO: Delete this? not used anywhere pub const SCALAR_SIZE_WIDE: usize = 64; @@ -32,7 +31,8 @@ pub mod config { pub const SCALAR_SIZE: usize = 32; pub const DATA_CHUNK_SIZE: usize = 31; // Actual chunk size is 32 after 0 padding is done pub const EXTENSION_FACTOR: u32 = 2; - pub const EXTENSION: Extension = Extension::height_unchecked(2); + pub const ROW_EXTENSION: NonZeroU16 = unsafe { NonZeroU16::new_unchecked(2) }; + pub const COL_EXTENSION: NonZeroU16 = unsafe { NonZeroU16::new_unchecked(1) }; pub const PROVER_KEY_SIZE: u32 = 48; pub const PROOF_SIZE: usize = 48; // MINIMUM_BLOCK_SIZE, MAX_BLOCK_ROWS and MAX_BLOCK_COLUMNS have to be a power of 2 because of the FFT functions requirements @@ -232,27 +232,17 @@ impl BlockDimensions { } } -#[derive(PartialEq, Eq, Debug)] +#[derive(Error, Copy, Clone, PartialEq, Eq, Debug)] pub enum TryFromBlockDimensionsError { - InvalidRowsOrColumns(sp_std::num::TryFromIntError), + InvalidRowsOrColumns(#[from] TryFromIntError), InvalidDimensions, } -impl From for TryFromBlockDimensionsError { - fn from(error: sp_std::num::TryFromIntError) -> Self { - TryFromBlockDimensionsError::InvalidRowsOrColumns(error) - } -} - -#[cfg(feature = "std")] -impl sp_std::convert::TryInto for BlockDimensions { +impl TryInto for BlockDimensions { type Error = TryFromBlockDimensionsError; fn try_into(self) -> Result { - let rows = self.rows.0.try_into()?; - let cols = self.cols.0.try_into()?; - - Dimensions::new(rows, cols).ok_or(TryFromBlockDimensionsError::InvalidDimensions) + Dimensions::new_from(self.rows.0, self.cols.0).ok_or(Self::Error::InvalidDimensions) } } diff --git a/primitives/types/src/data_lookup.rs b/primitives/types/src/data_lookup.rs index fae41f3d..cd274d71 100644 --- a/primitives/types/src/data_lookup.rs +++ b/primitives/types/src/data_lookup.rs @@ -18,6 +18,23 @@ pub struct DataLookup { pub index: Vec, } +impl DataLookup { + pub fn range_of(&self, app_id: AppId) -> Option<(u32, u32)> { + self.index + .iter() + .position(|item| item.app_id == app_id) + .map(|pos| { + let start_idx = unsafe { self.index.get_unchecked(pos).start }; + let end_idx = self + .index + .get(pos.saturating_add(1)) + .map(|item| item.start) + .unwrap_or(self.size); + (start_idx, end_idx) + }) + } +} + #[derive(PartialEq, Eq, Copy, Clone, Encode, Decode, Default, TypeInfo, RuntimeDebug)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct DataLookupIndexItem { From 2d7b441dda60a3a1ddeb65cc0042de9821c7347f Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Fri, 23 Jun 2023 09:18:14 +0200 Subject: [PATCH 57/87] Full replacement All UT pass --- Cargo.lock | 8 +- kate/Cargo.toml | 4 +- kate/recovery/Cargo.toml | 3 + kate/recovery/src/com.rs | 3 +- kate/recovery/src/commitments.rs | 3 +- kate/recovery/src/data.rs | 5 +- kate/recovery/src/index.rs | 3 + kate/recovery/src/lib.rs | 22 --- kate/src/com.rs | 6 +- kate/src/gridgen/mod.rs | 194 +++++++++++++---------- kate/src/gridgen/tests/commitments.rs | 21 +-- kate/src/gridgen/tests/formatting.rs | 23 ++- kate/src/gridgen/tests/mod.rs | 40 +++-- kate/src/gridgen/tests/reconstruction.rs | 10 +- primitives/avail/src/header/mod.rs | 20 +-- primitives/types/Cargo.toml | 4 + primitives/types/src/data_lookup.rs | 170 ++++++++++---------- primitives/types/src/lib.rs | 24 ++- 18 files changed, 304 insertions(+), 259 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1a8ddae0..ac7122d3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -468,7 +468,7 @@ dependencies = [ [[package]] name = "blst" version = "0.3.10" -source = "git+https://github.com/aphoh/blst?rev=556e037926d9c526c2eb6cb1522bea39690416ea#556e037926d9c526c2eb6cb1522bea39690416ea" +source = "git+https://github.com/availproject/blst?tag=v0.3.10#556e037926d9c526c2eb6cb1522bea39690416ea" dependencies = [ "cc", "glob", @@ -852,6 +852,8 @@ dependencies = [ "scale-info", "serde", "sp-core", + "test-case", + "thiserror-no-std", ] [[package]] @@ -1850,6 +1852,8 @@ dependencies = [ name = "kate-recovery" version = "0.8.1" dependencies = [ + "da-types", + "derive_more", "dusk-bytes", "dusk-plonk", "hex", @@ -2440,7 +2444,7 @@ dependencies = [ [[package]] name = "poly-multiproof" version = "0.0.1" -source = "git+https://github.com/aphoh/poly-multiproof?rev=1ec7c7eca0861ad89427c45534006f49d039820a#1ec7c7eca0861ad89427c45534006f49d039820a" +source = "git+https://github.com/availproject/poly-multiproof?tag=v0.0.1#cd8d31b7eb568dea2fddfc9237e2e31ea7ae7ed3" dependencies = [ "ark-bls12-381", "ark-ec", diff --git a/kate/Cargo.toml b/kate/Cargo.toml index 7482b0f4..5151619d 100644 --- a/kate/Cargo.toml +++ b/kate/Cargo.toml @@ -8,7 +8,7 @@ edition = "2021" [dependencies] # Pending to review -poly-multiproof = { git = "https://github.com/aphoh/poly-multiproof", default-features = false, rev = "1ec7c7eca0861ad89427c45534006f49d039820a" } +poly-multiproof = { git = "https://github.com/availproject/poly-multiproof", default-features = false, tag = "v0.0.1" } # Internal da-types = { path = "../primitives/types", default-features = false } @@ -44,7 +44,7 @@ test-case = "1.2.3" [features] default = ["std"] alloc = ["dusk-plonk/alloc", "nalgebra/alloc"] -parallel = ["rayon"] +parallel = ["rayon", "std"] std = [ "kate-recovery/std", diff --git a/kate/recovery/Cargo.toml b/kate/recovery/Cargo.toml index 3dc3acf3..d722aba1 100644 --- a/kate/recovery/Cargo.toml +++ b/kate/recovery/Cargo.toml @@ -6,6 +6,7 @@ edition = "2018" [dependencies] # Internals +da-types = { path = "../../primitives/types", default-features = false } dusk-plonk = { git = "https://github.com/availproject/plonk.git", tag = "v0.12.0-polygon-2" } # Substrate @@ -13,6 +14,7 @@ codec = { package = "parity-scale-codec", version = "3", default-features = fals sp-arithmetic = { version = "6", default-features = false } # 3rd-parties +derive_more = "0.99.17" dusk-bytes = { version = "0.1.6", default-features = false } once_cell = { version = "1.9.0", optional = true } rand = { version = "0.8.4", optional = true } @@ -33,4 +35,5 @@ std = [ "rand_chacha", "serde", "sp-arithmetic/std", + "da-types/std", ] diff --git a/kate/recovery/src/com.rs b/kate/recovery/src/com.rs index 507c50e6..6a42e38b 100644 --- a/kate/recovery/src/com.rs +++ b/kate/recovery/src/com.rs @@ -1,5 +1,6 @@ use codec::Decode; use core::num::TryFromIntError; +use da_types::ensure; use dusk_bytes::Serializable as _; use dusk_plonk::{fft::EvaluationDomain, prelude::BlsScalar}; use rand::seq::SliceRandom; @@ -15,7 +16,7 @@ use thiserror_no_std::Error; use crate::{ config::{self, CHUNK_SIZE, DATA_CHUNK_SIZE, PADDING_TAIL_VALUE}, - data, ensure, index, matrix, + data, index, matrix, }; #[derive(Debug, Error)] diff --git a/kate/recovery/src/commitments.rs b/kate/recovery/src/commitments.rs index 9b792909..4622cda7 100644 --- a/kate/recovery/src/commitments.rs +++ b/kate/recovery/src/commitments.rs @@ -4,6 +4,7 @@ use std::{ num::TryFromIntError, }; +use da_types::ensure; #[cfg(feature = "std")] use dusk_bytes::Serializable; use dusk_plonk::{ @@ -15,7 +16,7 @@ use thiserror_no_std::Error; use crate::{ com, config::{self, COMMITMENT_SIZE}, - ensure, index, matrix, + index, matrix, }; #[derive(Error, Debug)] diff --git a/kate/recovery/src/data.rs b/kate/recovery/src/data.rs index 63ef09eb..f3009910 100644 --- a/kate/recovery/src/data.rs +++ b/kate/recovery/src/data.rs @@ -1,9 +1,10 @@ +use derive_more::Constructor; use std::{collections::HashMap, convert::TryInto}; use crate::matrix::{Dimensions, Position, RowIndex}; /// Position and data of a cell in extended matrix -#[derive(Default, Debug, Clone)] +#[derive(Default, Debug, Clone, Constructor)] pub struct DataCell { /// Cell's position pub position: Position, @@ -12,7 +13,7 @@ pub struct DataCell { } /// Position and content of a cell in extended matrix -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Constructor)] pub struct Cell { /// Cell's position pub position: Position, diff --git a/kate/recovery/src/index.rs b/kate/recovery/src/index.rs index 61145ac5..67cb8abb 100644 --- a/kate/recovery/src/index.rs +++ b/kate/recovery/src/index.rs @@ -6,6 +6,9 @@ use crate::config; /// Index is list of pairs (app_id, start_index), /// where start index is index of first cell for that application. +/// +/// # TODO +/// - Replace it with `DataLookup`? #[derive(Serialize, Deserialize, Default, Debug, Clone)] pub struct AppDataIndex { /// Number of the data cells in the matrix diff --git a/kate/recovery/src/lib.rs b/kate/recovery/src/lib.rs index 46aedd45..33c4012c 100644 --- a/kate/recovery/src/lib.rs +++ b/kate/recovery/src/lib.rs @@ -10,25 +10,3 @@ pub mod proof; #[cfg(feature = "std")] pub mod testnet; - -/// Return Err of the expression: `return Err($expression);`. -/// -/// Used as `fail!(expression)`. -#[macro_export] -macro_rules! fail { - ( $y:expr ) => {{ - return Err($y.into()); - }}; -} - -/// Evaluate `$x:expr` and if not true return `Err($y:expr)`. -/// -/// Used as `ensure!(expression_to_ensure, expression_to_return_on_false)`. -#[macro_export] -macro_rules! ensure { - ( $x:expr, $y:expr $(,)? ) => {{ - if !$x { - $crate::fail!($y); - } - }}; -} diff --git a/kate/src/com.rs b/kate/src/com.rs index 3d2e025f..fb69f619 100644 --- a/kate/src/com.rs +++ b/kate/src/com.rs @@ -7,7 +7,7 @@ use std::{ }; use codec::Encode; -use da_types::{AppExtrinsic, AppId, BlockLengthColumns, BlockLengthRows}; +use da_types::{ensure, AppExtrinsic, AppId, BlockLengthColumns, BlockLengthRows, DataLookupError}; use derive_more::Constructor; use dusk_bytes::Serializable; use dusk_plonk::{ @@ -17,7 +17,7 @@ use dusk_plonk::{ prelude::{BlsScalar, CommitKey}, }; #[cfg(feature = "std")] -use kate_recovery::{com::app_specific_rows, ensure, index, matrix::Dimensions}; +use kate_recovery::{com::app_specific_rows, index, matrix::Dimensions}; use nalgebra::base::DMatrix; use rand::{Rng, SeedableRng}; use rand_chacha::ChaChaRng; @@ -60,6 +60,7 @@ pub enum Error { ZeroDimension, InvalidDimensionExtension, DomainSizeInvalid, + InvalidDataLookup(#[from] DataLookupError), } impl From for Error { @@ -843,6 +844,7 @@ mod tests { proptest! { #![proptest_config(ProptestConfig::with_cases(20))] #[test] + #[ignore] // newapi done fn test_build_and_reconstruct(ref xts in app_extrinsics_strategy()) { let metrics = IgnoreMetrics {}; diff --git a/kate/src/gridgen/mod.rs b/kate/src/gridgen/mod.rs index d6f7080f..0ea42272 100644 --- a/kate/src/gridgen/mod.rs +++ b/kate/src/gridgen/mod.rs @@ -5,17 +5,23 @@ use crate::pmp::{ traits::Committer, }; use codec::Encode; -use core::{cmp::max, num::NonZeroU16}; -use da_types::{AppExtrinsic, AppId, DataLookup, DataLookupIndexItem}; -use kate_recovery::{config::PADDING_TAIL_VALUE, ensure, matrix::Dimensions}; +use core::{ + cmp::{max, min}, + iter, + num::NonZeroU16, +}; +use da_types::{ensure, AppExtrinsic, AppId, DataLookup}; +use kate_recovery::{config::PADDING_TAIL_VALUE, matrix::Dimensions}; use nalgebra::base::DMatrix; use poly_multiproof::{ m1_blst::Proof, traits::{KZGProof, PolyMultiProofNoPrecomp}, }; -use rand::{Rng, SeedableRng}; +use rand::{CryptoRng, Rng, SeedableRng}; use rand_chacha::ChaChaRng; -use std::{cmp::min, collections::BTreeMap}; +use static_assertions::const_assert; +use std::collections::BTreeMap; +use thiserror_no_std::Error; use crate::{ com::{Cell, Error}, @@ -36,6 +42,7 @@ macro_rules! cfg_iter { }}; } +/* macro_rules! cfg_into_iter { ($e: expr) => {{ #[cfg(feature = "parallel")] @@ -44,7 +51,7 @@ macro_rules! cfg_into_iter { let result = $e.into_iter(); result }}; -} +}*/ pub const SCALAR_SIZE: usize = 32; pub type ArkScalar = crate::pmp::m1_blst::Fr; @@ -55,8 +62,18 @@ pub use poly_multiproof::traits::AsBytes; mod tests; pub struct EvaluationGrid { - pub lookup: DataLookup, - pub evals: DMatrix, + lookup: DataLookup, + evals: DMatrix, +} + +#[derive(Error, Debug, Clone, Copy)] +pub enum AppRowError { + #[error("Original dimensions are not divisible by current ones")] + OrigDimNotDivisible, + #[error("AppId({0}) not found")] + IdNotFound(AppId), + #[error("Lineal index overflows")] + LinealIndexOverflows, } impl EvaluationGrid { @@ -78,7 +95,7 @@ impl EvaluationGrid { ); // Convert each grup of extrinsics into scalars - let encoded = grouped + let scalars_by_app = grouped .into_iter() .map(|(id, datas)| { let mut enc = datas.encode(); @@ -90,70 +107,67 @@ impl EvaluationGrid { }) .collect::, _>>()?; + let len_by_app = scalars_by_app + .iter() + .map(|(app, scalars)| (*app, scalars.len())); + // make the index of app info - let mut start = 0u32; - let mut index = vec![]; - for (app_id, scalars) in &encoded { - index.push(DataLookupIndexItem { - app_id: *app_id, - start, - }); - start = start.saturating_add(scalars.len() as u32); // next item should start after current one - } + let lookup = DataLookup::new_from_id_lenght(len_by_app)?; + let grid_size = usize::try_from(lookup.len())?; + let (rows, cols): (usize, usize) = + get_block_dims(grid_size, min_width, max_width, max_height)?.into(); // Flatten the grid - let mut grid = encoded + let mut rng = ChaChaRng::from_seed(rng_seed); + let grid = scalars_by_app .into_iter() .flat_map(|(_, scalars)| scalars) - .collect::>(); + .chain(iter::repeat_with(|| random_scalar(&mut rng))); - let lookup = DataLookup { - size: grid.len() as u32, - index, - }; - - // Fit the grid to the desired grid size - let dims = get_block_dims(grid.len(), min_width, max_width, max_height)?; - let dim_size: usize = dims.size(); - let (rows, cols): (usize, usize) = dims.into(); - let mut rng = ChaChaRng::from_seed(rng_seed); - while grid.len() != dim_size { - let rnd_values: [u8; SCALAR_SIZE - 1] = rng.gen(); - // TODO: can we just use zeros instead? - grid.push(pad_to_bls_scalar(rnd_values)?); - } + let row_major_evals = DMatrix::from_row_iterator(rows, cols, grid); Ok(EvaluationGrid { lookup, - evals: DMatrix::from_row_iterator(rows, cols, grid.into_iter()), + evals: row_major_evals, }) } + /// Get the row `y` of the evaluation. pub fn row(&self, y: usize) -> Option> { - let (rows, _) = self.evals.shape(); - (y < rows).then(|| self.evals.row(y).into_iter().cloned().collect::>()) + let (rows, _cols) = self.evals.shape(); + (y < rows).then(|| self.evals.row(y).iter().cloned().collect()) } pub fn dims(&self) -> Dimensions { let (rows, cols) = self.evals.shape(); // SAFETY: We cannot construct an `EvaluationGrid` with any dimension `< 1` or `> u16::MAX` + debug_assert!(rows <= usize::from(u16::MAX) && cols <= usize::from(u16::MAX)); unsafe { Dimensions::new_unchecked(rows as u16, cols as u16) } } + #[inline] + pub fn get(&self, row: R, col: C) -> Option<&ArkScalar> + where + usize: From, + usize: From, + { + self.evals.get::<(usize, usize)>((row.into(), col.into())) + } + /// Returns a list of `(index, row)` pairs for the underlying rows of an application. /// Returns `None` if the `app_id` cannot be found, or if the provided `orig_dims` are invalid. pub fn app_rows( &self, app_id: AppId, - orig_dims: Option, - ) -> Option)>> { - let (rows, _cols) = self.evals.shape(); + maybe_orig_dims: Option, + ) -> Result)>>, AppRowError> { let dims = self.dims(); - let orig_dims = match orig_dims { + let (rows, _cols): (usize, usize) = dims.into(); + + // Ensure `origin_dims` is divisible by `dims` if some. + let orig_dims = match maybe_orig_dims { Some(d) => { - if !d.divides(&dims) { - return None; - } + ensure!(d.divides(&dims), AppRowError::OrigDimNotDivisible); d }, None => dims, @@ -165,21 +179,33 @@ impl EvaluationGrid { #[allow(clippy::integer_arithmetic)] let h_mul: usize = rows / usize::from(NonZeroU16::get(orig_dims.rows())); #[allow(clippy::integer_arithmetic)] - let index_to_y_coord = |dims: &Dimensions, index: u32| -> u32 { - index / u32::from(NonZeroU16::get(dims.rows())) + let row_from_lineal_index = |cols, lineal_index| { + let lineal_index = + usize::try_from(lineal_index).map_err(|_| AppRowError::LinealIndexOverflows)?; + let cols = usize::from(NonZeroU16::get(cols)); + + Ok(lineal_index / cols) }; - let (start_ind, end_ind) = self.lookup.range_of(app_id)?; - let start_y: usize = index_to_y_coord(&orig_dims, start_ind).try_into().ok()?; - let end_y: usize = index_to_y_coord(&orig_dims, end_ind.saturating_sub(1)) - .try_into() - .ok()?; // Find y of last cell elt - let (new_start_y, new_end_y) = (start_y.checked_mul(h_mul)?, end_y.checked_mul(h_mul)?); + let (data_begin, data_end) = self + .lookup + .range_of(app_id) + .ok_or(AppRowError::IdNotFound(app_id))?; + let start_y: usize = row_from_lineal_index(orig_dims.cols(), data_begin)?; + let end_y: usize = row_from_lineal_index(orig_dims.cols(), data_end.saturating_sub(1))?; + + // SAFETY: This won't overflow because `h_mul = rows / orig_dim.rows()` and `*_y < rows) + debug_assert!(start_y < rows); + debug_assert!(end_y < rows); + #[allow(clippy::integer_arithmetic)] + let (new_start_y, new_end_y) = (start_y * h_mul, end_y * h_mul); - (new_start_y..=new_end_y) + let app_rows = (new_start_y..=new_end_y) .step_by(h_mul) .map(|y| self.row(y).map(|a| (y, a))) - .collect() + .collect(); + + Ok(app_rows) } pub fn extend_columns(&self, row_factor: NonZeroU16) -> Result { @@ -188,7 +214,7 @@ impl EvaluationGrid { .extend(row_factor, unsafe { NonZeroU16::new_unchecked(1) }) .ok_or(Error::CellLengthExceeded)? .into(); - let (rows, cols): (usize, usize) = dims.into(); + let (rows, _cols): (usize, usize) = dims.into(); let domain = GeneralEvaluationDomain::::new(rows).ok_or(Error::DomainSizeInvalid)?; @@ -196,39 +222,32 @@ impl EvaluationGrid { GeneralEvaluationDomain::::new(new_rows).ok_or(Error::DomainSizeInvalid)?; ensure!(domain_new.size() == new_rows, Error::DomainSizeInvalid); - let cols = (0..cols) - .into_iter() - .map(|c| self.evals.column(c).iter().cloned().collect::>()); - - let new_evals = cfg_into_iter!(cols) - .flat_map(|mut col| { - // ifft, resize, fft - domain.ifft_in_place(&mut col); - domain_new.fft_in_place(&mut col); - col - }) - .collect::>(); - - let new_evals = DMatrix::from_column_slice(new_rows, new_cols, &new_evals); - debug_assert!(new_evals.shape() == (new_rows, new_cols)); + let new_data = self.evals.column_iter().flat_map(|col| { + let mut col = col.iter().cloned().collect::>(); + domain.ifft_in_place(&mut col); + domain_new.fft_in_place(&mut col); + col + }); + let row_major_evals = DMatrix::from_iterator(new_rows, new_cols, new_data); + debug_assert!(row_major_evals.shape() == (new_rows, new_cols)); Ok(Self { lookup: self.lookup.clone(), - evals: new_evals, + evals: row_major_evals, }) } pub fn make_polynomial_grid(&self) -> Result { - let (_rows, cols) = self.evals.shape(); + let (_rows, cols): (usize, usize) = self.evals.shape(); let domain = GeneralEvaluationDomain::::new(cols).ok_or(Error::DomainSizeInvalid)?; let inner = self .evals .row_iter() - .map(|row_iter| { - let row = row_iter.iter().cloned().collect::>(); - domain.ifft(row.as_slice()) + .map(|view| { + let row = view.iter().cloned().collect::>(); + domain.ifft(&row) }) .collect::>(); @@ -435,14 +454,29 @@ fn round_up_to_multiple(input: usize, multiple: NonZeroU16) -> usize { } pub(crate) fn pad_to_bls_scalar(a: impl AsRef<[u8]>) -> Result { - if a.as_ref().len() > DATA_CHUNK_SIZE { - return Err(Error::InvalidChunkLength); - } + let bytes = a.as_ref(); + ensure!(bytes.len() <= DATA_CHUNK_SIZE, Error::InvalidChunkLength); + const_assert!(DATA_CHUNK_SIZE <= SCALAR_SIZE); + let mut buf = [0u8; SCALAR_SIZE]; - buf[0..a.as_ref().len()].copy_from_slice(a.as_ref()); + buf[0..bytes.len()].copy_from_slice(bytes); + ArkScalar::from_bytes(&buf).map_err(Error::MultiproofError) } +pub(crate) fn random_scalar(rng: &mut R) -> ArkScalar { + /* + let mut random = [0u8; SCALAR_SIZE]; + rng.fill(&mut random[..SCALAR_SIZE - 1]); + debug_assert!(random[SCALAR_SIZE - 1] == 0u8); + + ArkScalar::from_bytes(&random) + .expect("ArkScalar can be generated from SCALAR_SIZE -1 bytes .qed") + */ + let rnd_values: [u8; SCALAR_SIZE - 1] = rng.gen(); + pad_to_bls_scalar(rnd_values).unwrap() +} + #[cfg(test)] #[allow(clippy::integer_arithmetic)] mod unit_tests { diff --git a/kate/src/gridgen/tests/commitments.rs b/kate/src/gridgen/tests/commitments.rs index 5dbc10e6..fe6ca34b 100644 --- a/kate/src/gridgen/tests/commitments.rs +++ b/kate/src/gridgen/tests/commitments.rs @@ -94,8 +94,8 @@ proptest! { let index = app_data_index_from_lookup(&grid.lookup); let public_params = testnet::public_params(BlockLengthColumns(g_cols as u32)); - for (i, xt) in exts.iter().enumerate() { - let rows = grid.app_rows(xt.app_id, Some(orig_dims)).unwrap(); + for xt in exts.iter() { + let rows = grid.app_rows(xt.app_id, Some(orig_dims)).unwrap().unwrap(); // Have to put the rows we find in this funky data structure let mut app_rows = vec![None; g_rows.into()]; for (row_i, row) in rows { @@ -104,11 +104,6 @@ proptest! { // Need to provide the original dimensions here too let extended_dims = orig_dims.clone(); let (_, missing) = verify_equality(&public_params, &commits, &app_rows, &index, &extended_dims, xt.app_id.0).unwrap(); - if !missing.is_empty() { - log::error!("Debug this spot at {i}"); - let d_rows = grid.app_rows(xt.app_id, Some(orig_dims)).unwrap(); - let (_, _) = verify_equality(&public_params, &commits, &app_rows, &index, &extended_dims, xt.app_id.0).unwrap(); - } prop_assert!(missing.is_empty()); } } @@ -128,7 +123,7 @@ proptest! { let public_params = testnet::public_params((g_cols as u32).into()); for xt in xts { - let rows = grid.app_rows(xt.app_id, Some(orig_dims)).unwrap(); + let rows = grid.app_rows(xt.app_id, Some(orig_dims)).unwrap().unwrap(); let mut row_elems = vec![None; g_rows.into()]; for (i, data) in &rows { row_elems[*i] = Some(data.iter().flat_map(|s| s.to_bytes().unwrap()).collect()); @@ -143,9 +138,9 @@ proptest! { } } -#[test_case( ([1,1,1,1]).to_vec(); "All values are non-zero but same")] -#[test_case( ([0,0,0,0]).to_vec(); "All values are zero")] -#[test_case( ([0,5,2,1]).to_vec(); "All values are different")] +#[test_case( vec![1;4]; "All values are non-zero but same")] +#[test_case( vec![0;4]; "All values are zero")] +#[test_case( vec![0,5,2,1]; "All values are different")] fn test_zero_deg_poly_commit(row_values: Vec) { // There are two main cases that generate a zero degree polynomial. One is for data that is non-zero, but the same. // The other is for all-zero data. They differ, as the former yields a polynomial with one coefficient, and latter generates zero coefficients. @@ -158,7 +153,7 @@ fn test_zero_deg_poly_commit(row_values: Vec) { //let ae = AppExtrinsic { 0.into(), vec![} let ev = EvaluationGrid { lookup: Default::default(), // Shouldn't need to care about this - evals: DMatrix::from_row_iterator(1, len, row.into_iter()), + evals: DMatrix::from_row_iterator(len, 1, row.into_iter()).transpose(), }; println!("Row: {:?}", ev.evals); @@ -177,7 +172,7 @@ fn test_zero_deg_poly_commit(row_values: Vec) { let proof = pg.proof(&*PMP, &cell).unwrap(); let proof_bytes = proof.to_bytes().unwrap(); - let cell_bytes = ev.evals.get((0, x)).unwrap().to_bytes().unwrap(); + let cell_bytes = ev.get(0usize, x).unwrap().to_bytes().unwrap(); let content = [&proof_bytes[..], &cell_bytes[..]].concat(); let dims = Dimensions::new(1, 4).unwrap(); let cell = kate_recovery::data::Cell { diff --git a/kate/src/gridgen/tests/formatting.rs b/kate/src/gridgen/tests/formatting.rs index ca54ddc6..1bade603 100644 --- a/kate/src/gridgen/tests/formatting.rs +++ b/kate/src/gridgen/tests/formatting.rs @@ -1,4 +1,4 @@ -use da_types::{AppExtrinsic, DataLookup, DataLookupIndexItem}; +use da_types::{AppExtrinsic, DataLookup}; use hex_literal::hex; use kate_recovery::{ com::{app_specific_cells, decode_app_extrinsics, reconstruct_extrinsics}, @@ -42,15 +42,8 @@ fn newapi_test_flatten_block() { let expected_dims = Dimensions::new_from(1, 16).unwrap(); let evals = EvaluationGrid::from_extrinsics(extrinsics, 4, 256, 256, Seed::default()).unwrap(); - let expected_index = [(0.into(), 0), (1.into(), 2), (2.into(), 4), (3.into(), 6)] - .into_iter() - .map(|(app_id, start)| DataLookupIndexItem { app_id, start }) - .collect::>(); - - let expected_lookup = DataLookup { - size: 9, - index: expected_index, - }; + let expected_lookup = + DataLookup::new_from_id_lenght([(0, 2), (1, 2), (2, 2), (3, 3)].into_iter()).unwrap(); assert_eq!(evals.lookup, expected_lookup, "The layouts don't match"); assert_eq!( @@ -63,10 +56,12 @@ fn newapi_test_flatten_block() { let data = evals .evals - .data - .as_slice() - .iter() - .flat_map(|s| s.to_bytes().unwrap()) + .row_iter() + .flat_map(|row| { + row.iter() + .flat_map(|s| s.to_bytes().unwrap()) + .collect::>() + }) .collect::>(); assert_eq!(data, expected_data, "Data doesn't match the expected data"); } diff --git a/kate/src/gridgen/tests/mod.rs b/kate/src/gridgen/tests/mod.rs index 34158ad9..bfed8059 100644 --- a/kate/src/gridgen/tests/mod.rs +++ b/kate/src/gridgen/tests/mod.rs @@ -1,12 +1,12 @@ use da_types::{AppExtrinsic, DataLookup}; -use kate_recovery::{data::DataCell, index::AppDataIndex}; +use kate_recovery::{data::DataCell, index::AppDataIndex, matrix::Position}; use once_cell::sync::Lazy; use poly_multiproof::{m1_blst::M1NoPrecomp, traits::AsBytes}; use proptest::{collection, prelude::*, sample::size_range}; use rand::{distributions::Uniform, prelude::Distribution, SeedableRng}; use rand_chacha::ChaChaRng; -use crate::testnet; +use crate::{gridgen::ArkScalar, testnet}; use super::EvaluationGrid; @@ -37,8 +37,12 @@ fn app_extrinsics_strategy() -> impl Strategy> { fn app_data_index_from_lookup(lookup: &DataLookup) -> AppDataIndex { AppDataIndex { - size: lookup.size, - index: lookup.index.iter().map(|e| (e.app_id.0, e.start)).collect(), + size: lookup.len(), + index: lookup + .index() + .iter() + .map(|e| (e.app_id.0, e.start)) + .collect(), } } @@ -54,23 +58,27 @@ fn sample_unique(rng: &mut impl Rng, n_samples: usize, n: usize) -> Vec { sampled } -fn sample_cells(grid: &EvaluationGrid, columns: Option<&[usize]>) -> Vec { +fn sample_cells(grid: &EvaluationGrid, columns: Option>) -> Vec { let mut rng = ChaChaRng::from_seed([42u8; 32]); - let (g_rows, g_cols) = grid.evals.shape(); - let cols: Vec = match columns { - Some(cols) => cols.to_vec(), - None => (0..g_cols).into_iter().collect(), - }; + let (g_rows, g_cols): (usize, usize) = grid.dims().into(); + let cols = columns.unwrap_or_else(|| (0..g_cols).into_iter().collect()); + cols.iter() .flat_map(|x| { + debug_assert!(*x < g_cols); sample_unique(&mut rng, g_rows / 2, g_rows) .into_iter() - .map(move |y| kate_recovery::data::DataCell { - position: kate_recovery::matrix::Position { - row: y as u32, - col: *x as u16, - }, - data: grid.evals.get((y, *x)).unwrap().to_bytes().unwrap(), + .map(move |y| { + let data = grid + .evals + .get((y, *x)) + .and_then(|s: &ArkScalar| s.to_bytes().ok()) + .unwrap(); + // SAFETY: `y` and `x` can be casted safetly becasue `x < g_cols (u16)` and `y + // < g_rows(u16)` + let position = Position::from((y as u32, *x as u16)); + + DataCell::new(position, data) }) }) .collect::>() diff --git a/kate/src/gridgen/tests/reconstruction.rs b/kate/src/gridgen/tests/reconstruction.rs index 5b89acb0..553a8ee7 100644 --- a/kate/src/gridgen/tests/reconstruction.rs +++ b/kate/src/gridgen/tests/reconstruction.rs @@ -39,7 +39,7 @@ fn test_multiple_extrinsics_for_same_app_id() { let cells = sample_cells(&ev, None); let index = app_data_index_from_lookup(&ev.lookup); - let (rows, cols) = ev.evals.shape(); + let (rows, cols): (u16, u16) = ev.dims().into(); let bdims = Dimensions::new_from(rows, cols).unwrap(); let res = reconstruct_extrinsics(&index, &bdims, cells).unwrap(); @@ -52,7 +52,7 @@ proptest! { #[test] fn test_build_and_reconstruct(exts in super::app_extrinsics_strategy()) { let grid = EvaluationGrid::from_extrinsics(exts.clone(), 4, 256, 256, Seed::default()).unwrap().extend_columns(unsafe { NonZeroU16::new_unchecked(2)}).unwrap(); - let (rows, cols) = grid.evals.shape(); + let (rows, cols) :(usize,usize)= grid.dims().into(); //let (layout, commitments, dims, matrix) = par_build_commitments( // BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default()).unwrap(); const RNG_SEED: Seed = [42u8; 32]; @@ -79,7 +79,7 @@ fn test_build_and_reconstruct(exts in super::app_extrinsics_strategy()) { let proof = polys.proof(pp, &cell).unwrap(); let mut content = [0u8; 80]; content[..48].copy_from_slice(&proof.to_bytes().unwrap()[..]); - content[48..].copy_from_slice(&grid.evals.get((y,x)).unwrap().to_bytes().unwrap()[..]); + content[48..].copy_from_slice(&grid.get(y, x).unwrap().to_bytes().unwrap()[..]); let dcell = DCell{position: Position { row: y as u32, col: x as u16 }, content }; let verification = kate_recovery::proof::verify(&kate_recovery::testnet::public_params(256), &bdims, &commitments[y].to_bytes().unwrap(), &dcell); @@ -117,7 +117,7 @@ get erasure coded to ensure redundancy."#; .extend_columns(unsafe { NonZeroU16::new_unchecked(2) }) .unwrap(); - let cols_1 = sample_cells(&grid, Some(&[0, 1, 2, 3])); + let cols_1 = sample_cells(&grid, Some(vec![0, 1, 2, 3])); let index = app_data_index_from_lookup(&grid.lookup); @@ -125,7 +125,7 @@ get erasure coded to ensure redundancy."#; let res_1 = kate_recovery::com::reconstruct_app_extrinsics(&index, &bdims, cols_1, 1).unwrap(); assert_eq!(res_1[0], app_id_1_data); - let cols_2 = sample_cells(&grid, Some(&[0, 2, 3])); + let cols_2 = sample_cells(&grid, Some(vec![0, 2, 3])); let res_2 = kate_recovery::com::reconstruct_app_extrinsics(&index, &bdims, cols_2, 2).unwrap(); assert_eq!(res_2[0], app_id_2_data); diff --git a/primitives/avail/src/header/mod.rs b/primitives/avail/src/header/mod.rs index 9f4682b5..e33fd5a1 100644 --- a/primitives/avail/src/header/mod.rs +++ b/primitives/avail/src/header/mod.rs @@ -453,10 +453,8 @@ mod tests { }; let extension = extension::v1::HeaderExtension { commitment, - app_lookup: DataLookup { - size: 1, - index: vec![], - }, + app_lookup: DataLookup::lenghts_from_sorted_by_app_id(vec![(0, 1)].into_iter()) + .expect("Valid DataLookup .qed"), }; let digest = Digest { logs: vec![ @@ -564,19 +562,6 @@ mod tests { (header, hash) } - fn corrupted_app_lookup(header_and_hash: (THeader, H256)) -> (THeader, H256) { - let (mut header, hash) = header_and_hash; - - match header.extension { - extension::HeaderExtension::V1(ref mut ext) => ext.app_lookup.size += 1, - extension::HeaderExtension::V2(ref mut ext) => ext.app_lookup.size += 1, - #[cfg(feature = "header-backward-compatibility-test")] - _ => unreachable!(), - }; - - (header, hash) - } - fn corrupted_number(mut header_and_hash: (THeader, H256)) -> (THeader, H256) { header_and_hash.0.number += 1; header_and_hash @@ -596,7 +581,6 @@ mod tests { #[test_case( corrupted_kate_data_root(header()) => false; "Corrupted data root in kate")] #[test_case( corrupted_kate_cols(header()) => false; "Corrupted cols in kate")] #[test_case( corrupted_kate_rows(header()) => false; "Corrupted rows in kate")] - #[test_case( corrupted_app_lookup(header()) => false )] #[test_case( corrupted_number(header()) => false )] #[test_case( corrupted_state_root(header()) => false )] #[test_case( corrupted_parent(header()) => false )] diff --git a/primitives/types/Cargo.toml b/primitives/types/Cargo.toml index 2f0b55c5..55b4560a 100644 --- a/primitives/types/Cargo.toml +++ b/primitives/types/Cargo.toml @@ -14,6 +14,10 @@ sp-core = { version = "7", default-features = false } derive_more = "0.99.17" num-traits = { version = "0.2", default-features = false } serde = { version = "1.0", features = ["derive"], optional = true } +thiserror-no-std = "2.0.2" + +[dev-dependencies] +test-case = "1.2.3" [features] default = ["std"] diff --git a/primitives/types/src/data_lookup.rs b/primitives/types/src/data_lookup.rs index cd274d71..24d960a5 100644 --- a/primitives/types/src/data_lookup.rs +++ b/primitives/types/src/data_lookup.rs @@ -1,24 +1,95 @@ use alloc::vec::Vec; -use num_traits::Zero; +use core::convert::TryFrom; +use derive_more::Constructor; +use num_traits::{CheckedAdd, Zero}; use parity_scale_codec::{Decode, Encode}; use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; use sp_core::RuntimeDebug; +use thiserror_no_std::Error; -use crate::AppId; +use crate::{ensure, AppId}; #[derive(PartialEq, Eq, Clone, Encode, Decode, Default, TypeInfo, RuntimeDebug)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +#[cfg_attr(test, derive(Constructor))] pub struct DataLookup { /// size of the look up #[codec(compact)] - pub size: u32, + size: u32, /// sorted vector of tuples(key, start index) - pub index: Vec, + index: Vec, +} + +#[derive(Error, Debug, Clone, Copy, PartialEq, Eq)] +pub enum Error { + #[error("Input data is not sorted by AppId")] + DataNotSorted, + #[error("Data is empty on AppId {0}")] + DataEmptyOn(AppId), + #[error("Offset overflows")] + OffsetOverflows, } impl DataLookup { + /// Creates the `DataLookup` from an iterator sorted by `AppId` + pub fn new_from_id_lenght(data: I) -> Result + where + I: Iterator, + AppId: From, + L: Zero + CheckedAdd, + u32: TryFrom, + { + let mut offset = 0; + let mut maybe_prev_id = None; + + let index = data + // .skip_while(|(id, _)| id.is_zero()) + .map(|(id, len)| { + // Check sorted by AppId + let id = AppId::from(id); + if let Some(prev_id) = maybe_prev_id.replace(id) { + ensure!(prev_id < id, Error::DataNotSorted); + } + + // Check non-empty data per AppId + let len = u32::try_from(len).map_err(|_| Error::OffsetOverflows)?; + ensure!(len > 0, Error::DataEmptyOn(id)); + + let item = DataLookupIndexItem::new(id, offset); + offset = offset.checked_add(len).ok_or(Error::OffsetOverflows)?; + + Ok::(item) + }) + .filter(|res_item| { + // Filter valid items where AppId == 0 + if let Ok(item) = res_item.as_ref() { + !item.app_id.is_zero() + } else { + true + } + }) + .collect::>()?; + + Ok(Self { + size: offset, + index, + }) + } + + pub fn len(&self) -> u32 { + self.size + } + + pub fn is_empty(&self) -> bool { + self.size == 0 + } + + pub fn index(&self) -> &Vec { + &self.index + } + pub fn range_of(&self, app_id: AppId) -> Option<(u32, u32)> { self.index .iter() @@ -30,12 +101,15 @@ impl DataLookup { .get(pos.saturating_add(1)) .map(|item| item.start) .unwrap_or(self.size); + debug_assert!(start_idx < end_idx); (start_idx, end_idx) }) } } -#[derive(PartialEq, Eq, Copy, Clone, Encode, Decode, Default, TypeInfo, RuntimeDebug)] +#[derive( + PartialEq, Eq, Copy, Clone, Encode, Decode, Default, TypeInfo, RuntimeDebug, Constructor, +)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct DataLookupIndexItem { pub app_id: AppId, @@ -56,60 +130,11 @@ where } } -#[derive(PartialEq, Eq, RuntimeDebug)] -/// Errors during the creation from `extrinsics`. -pub enum TryFromError { - /// Size overflows - SizeOverflow, - /// Extrinsics are not sorted. - UnsortedExtrinsics, -} - -use core::convert::TryFrom; -impl TryFrom<&[(AppId, u32)]> for DataLookup { - type Error = TryFromError; - - fn try_from(extrinsics: &[(AppId, u32)]) -> Result { - let mut index = Vec::new(); - // transactions are order by application id - // skip transactions with 0 application id - it's not a data txs - let mut size = 0u32; - let mut prev_app_id = AppId(0); - - for (app_id, data_len) in extrinsics { - if !app_id.is_zero() && prev_app_id != *app_id { - index.push(DataLookupIndexItem { - app_id: *app_id, - start: size, - }); - } - - size = size - .checked_add(*data_len) - .ok_or(Self::Error::SizeOverflow)?; - if prev_app_id > *app_id { - return Err(Self::Error::UnsortedExtrinsics); - } - prev_app_id = *app_id; - } - - Ok(DataLookup { size, index }) - } -} - #[cfg(test)] mod test { use super::*; + use test_case::test_case; - fn into_app_ids(vals: I) -> Vec<(AppId, u32)> - where - I: IntoIterator, - T: Into, - { - vals.into_iter() - .map(|(id, idx)| (id.into(), idx)) - .collect::>() - } fn into_lookup_items(vals: I) -> Vec where I: IntoIterator, @@ -118,31 +143,16 @@ mod test { vals.into_iter().map(Into::into).collect::>() } - fn from_extrinsics_data() -> Vec<(Vec<(AppId, u32)>, Result)> { - vec![ - ( - into_app_ids([(0, 5), (0, 10), (1, 5), (1, 10), (2, 100), (2, 50)]), - Ok(DataLookup { - size: 180, - index: into_lookup_items([(1, 15), (2, 30)]), - }), - ), - ( - into_app_ids([(0, 5), (0, 10), (1, u32::MAX)]), - Err(TryFromError::SizeOverflow), - ), - ( - into_app_ids([(0, 5), (0, 10), (1, 5), (2, 100), (1, 10), (2, 50)]), - Err(TryFromError::UnsortedExtrinsics), - ), - ] - } + #[test_case( vec![(0, 15), (1, 20), (2, 150)] => Ok(DataLookup::new(185, into_lookup_items([(1, 15), (2, 35)]))); "Valid case")] + #[test_case( vec![(0, usize::MAX)] => Err(Error::OffsetOverflows); "Offset overflows at zero")] + #[test_case( vec![(0, (u32::MAX -1) as usize), (1, 2)] => Err(Error::OffsetOverflows); "Offset overflows at non zero")] + #[test_case( vec![(1, 10), (0, 2)] => Err(Error::DataNotSorted); "Unsortend data")] + #[test_case( vec![] => Ok(DataLookup::new(0, vec![])); "Empty data")] + fn from_len(id_len_data: Vec<(u32, usize)>) -> Result { + let iter = id_len_data + .into_iter() + .map(|(id, len)| (AppId::from(id), len)); - #[test] - fn from_extrinsics() { - for (extrinsic, expected) in from_extrinsics_data() { - let data_lookup = DataLookup::try_from(extrinsic.as_slice()); - assert_eq!(data_lookup, expected); - } + DataLookup::new_from_id_lenght(iter) } } diff --git a/primitives/types/src/lib.rs b/primitives/types/src/lib.rs index 68bfc4d2..d5b340bb 100644 --- a/primitives/types/src/lib.rs +++ b/primitives/types/src/lib.rs @@ -10,7 +10,7 @@ use sp_core::RuntimeDebug; mod data_lookup; mod get_app_id; -pub use data_lookup::*; +pub use data_lookup::{DataLookup, DataLookupIndexItem, Error as DataLookupError}; pub use get_app_id::*; /// Raw Extrinsic with application id. @@ -149,3 +149,25 @@ impl BlockLengthRows { self.0 as usize } } + +/// Return Err of the expression: `return Err($expression);`. +/// +/// Used as `fail!(expression)`. +#[macro_export] +macro_rules! fail { + ( $y:expr ) => {{ + return Err($y.into()); + }}; +} + +/// Evaluate `$x:expr` and if not true return `Err($y:expr)`. +/// +/// Used as `ensure!(expression_to_ensure, expression_to_return_on_false)`. +#[macro_export] +macro_rules! ensure { + ( $x:expr, $y:expr $(,)? ) => {{ + if !$x { + $crate::fail!($y); + } + }}; +} From 2672485230b41be0d13d05fc9c0266c34ce43c31 Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Fri, 23 Jun 2023 13:02:29 +0200 Subject: [PATCH 58/87] Fix example --- kate/examples/multiproof_verification.rs | 43 ++++++++++++++++-------- 1 file changed, 29 insertions(+), 14 deletions(-) diff --git a/kate/examples/multiproof_verification.rs b/kate/examples/multiproof_verification.rs index 0ffcf0ad..17123d5f 100644 --- a/kate/examples/multiproof_verification.rs +++ b/kate/examples/multiproof_verification.rs @@ -1,19 +1,35 @@ +use core::num::NonZeroU16; use da_types::{AppExtrinsic, AppId}; use hex_literal::hex; use kate::{ + gridgen::EvaluationGrid, pmp::{merlin::Transcript, traits::PolyMultiProofNoPrecomp}, + testnet::multiproof_params, Seed, }; use kate_recovery::matrix::Dimensions; use poly_multiproof::traits::AsBytes; use rand::thread_rng; -use std::num::NonZeroU16; +use thiserror_no_std::Error; -fn main() { +#[derive(Error, Debug)] +enum AppError { + Kate(#[from] kate::com::Error), + MultiProof(#[from] poly_multiproof::Error), +} + +fn main() -> Result<(), AppError> { + let verified = multiproof_verification()?; + println!("Multiproof verfication is {verified}"); + + Ok(()) +} + +fn multiproof_verification() -> Result { let target_dims = Dimensions::new_from(16, 64).unwrap(); - let pp = kate::testnet::multiproof_params(256, 256); + let pp = multiproof_params(256, 256); let pmp = poly_multiproof::m1_blst::M1NoPrecomp::new(256, 256, &mut thread_rng()); - let points = kate::gridgen::domain_points(256).unwrap(); + let points = kate::gridgen::domain_points(256)?; let (proof, evals, commitments, dims) = { let exts = vec![ AppExtrinsic { @@ -30,13 +46,11 @@ fn main() { }, ]; let seed = Seed::default(); - let grid = kate::gridgen::EvaluationGrid::from_extrinsics(exts, 4, 256, 256, seed) - .unwrap() - .extend_columns(unsafe { NonZeroU16::new_unchecked(2) }) - .unwrap(); + let grid = EvaluationGrid::from_extrinsics(exts, 4, 256, 256, seed)? + .extend_columns(unsafe { NonZeroU16::new_unchecked(2) })?; // Setup, serializing as bytes - let polys = grid.make_polynomial_grid().unwrap(); + let polys = grid.make_polynomial_grid()?; let commitments = polys .commitments(&pp) @@ -57,7 +71,7 @@ fn main() { ) .unwrap(); - let proof_bytes = multiproof.proof.to_bytes().unwrap(); + let proof_bytes = multiproof.proof.to_bytes()?; let evals_bytes = multiproof .evals .iter() @@ -85,14 +99,15 @@ fn main() { .chunks_exact(mp_block.end_x - mp_block.start_x) .collect::>(); - let proof = kate::pmp::m1_blst::Proof::from_bytes(&proof).unwrap(); + let proof = kate::pmp::m1_blst::Proof::from_bytes(&proof)?; - pmp.verify( + let verified = pmp.verify( &mut Transcript::new(b"avail-mp"), block_commits, &points[mp_block.start_x..mp_block.end_x], &evals_grid, &proof, - ) - .unwrap(); + )?; + + Ok(verified) } From 98402aac1304d91240663fc57e577cbeb2358ea9 Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Mon, 26 Jun 2023 13:39:54 +0200 Subject: [PATCH 59/87] Add benchmark on reconstruction --- Cargo.lock | 233 +++++++++++++++++++++++++++-- kate/Cargo.toml | 9 +- kate/benches/kzg.rs | 210 -------------------------- kate/benches/reconstruct.rs | 190 +++++++++++++++++++++++ kate/src/com.rs | 36 ++--- primitives/avail/src/header/mod.rs | 2 +- 6 files changed, 432 insertions(+), 248 deletions(-) delete mode 100644 kate/benches/kzg.rs create mode 100644 kate/benches/reconstruct.rs diff --git a/Cargo.lock b/Cargo.lock index ac7122d3..e2cd3f5e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -93,6 +93,12 @@ dependencies = [ "libc", ] +[[package]] +name = "anes" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" + [[package]] name = "ansi_term" version = "0.12.1" @@ -102,6 +108,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "anstyle" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a30da5c5f2d5e72842e00bcb57657162cdabef0931f40e2deb9b4140440cecd" + [[package]] name = "anyhow" version = "1.0.71" @@ -521,6 +533,12 @@ dependencies = [ "serde", ] +[[package]] +name = "cast" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" + [[package]] name = "cc" version = "1.0.79" @@ -554,6 +572,33 @@ dependencies = [ "winapi", ] +[[package]] +name = "ciborium" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "effd91f6c78e5a4ace8a5d3c0b6bfaec9e2baaef55f3efc00e45fb2e477ee926" +dependencies = [ + "ciborium-io", + "ciborium-ll", + "serde", +] + +[[package]] +name = "ciborium-io" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdf919175532b369853f5d5e20b26b43112613fd6fe7aee757e35f7a44642656" + +[[package]] +name = "ciborium-ll" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "defaa24ecc093c77630e6c15e17c51f5e187bf35ee514f4e2d67baaa96dae22b" +dependencies = [ + "ciborium-io", + "half", +] + [[package]] name = "cipher" version = "0.4.4" @@ -573,6 +618,32 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "clap" +version = "4.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9394150f5b4273a1763355bd1c2ec54cc5a2593f790587bcd6b2c947cfa9211" +dependencies = [ + "clap_builder", +] + +[[package]] +name = "clap_builder" +version = "4.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a78fbdd3cc2914ddf37ba444114bc7765bbdcb55ec9cbe6fa054f0137400717" +dependencies = [ + "anstyle", + "bitflags", + "clap_lex", +] + +[[package]] +name = "clap_lex" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b" + [[package]] name = "coins-bip32" version = "0.7.0" @@ -693,6 +764,42 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "criterion" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f" +dependencies = [ + "anes", + "cast", + "ciborium", + "clap", + "criterion-plot", + "is-terminal", + "itertools 0.10.5", + "num-traits", + "once_cell", + "oorandom", + "plotters", + "rayon", + "regex", + "serde", + "serde_derive", + "serde_json", + "tinytemplate", + "walkdir", +] + +[[package]] +name = "criterion-plot" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" +dependencies = [ + "cast", + "itertools 0.10.5", +] + [[package]] name = "crossbeam-channel" version = "0.5.8" @@ -1542,6 +1649,12 @@ dependencies = [ "subtle", ] +[[package]] +name = "half" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" + [[package]] name = "hash-db" version = "0.15.2" @@ -1773,6 +1886,18 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "is-terminal" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adcf93614601c8129ddf72e2d5633df827ba6551541c6d8c59520a371475be1f" +dependencies = [ + "hermit-abi 0.3.1", + "io-lifetimes 1.0.11", + "rustix 0.37.20", + "windows-sys 0.48.0", +] + [[package]] name = "itertools" version = "0.9.0" @@ -1799,9 +1924,9 @@ checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6" [[package]] name = "js-sys" -version = "0.3.63" +version = "0.3.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f37a4a5928311ac501dee68b3c7613a1037d0edb30c8e5427bd832d55d1b790" +checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a" dependencies = [ "wasm-bindgen", ] @@ -1823,6 +1948,8 @@ dependencies = [ name = "kate" version = "0.7.1" dependencies = [ + "criterion", + "da-primitives", "da-types", "derive_more", "dusk-bytes", @@ -2288,6 +2415,12 @@ version = "1.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3" +[[package]] +name = "oorandom" +version = "11.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" + [[package]] name = "opaque-debug" version = "0.2.3" @@ -2441,6 +2574,34 @@ dependencies = [ "spki", ] +[[package]] +name = "plotters" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2c224ba00d7cadd4d5c660deaf2098e5e80e07846537c51f9cfa4be50c1fd45" +dependencies = [ + "num-traits", + "plotters-backend", + "plotters-svg", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "plotters-backend" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e76628b4d3a7581389a35d5b6e2139607ad7c75b17aed325f210aa91f4a9609" + +[[package]] +name = "plotters-svg" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38f6d39893cca0701371e3c27294f09797214b86f1fb951b89ade8ec04e2abab" +dependencies = [ + "plotters-backend", +] + [[package]] name = "poly-multiproof" version = "0.0.1" @@ -2891,6 +3052,15 @@ dependencies = [ "cipher", ] +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + [[package]] name = "scale-info" version = "2.7.0" @@ -3794,6 +3964,16 @@ dependencies = [ "crunchy", ] +[[package]] +name = "tinytemplate" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" +dependencies = [ + "serde", + "serde_json", +] + [[package]] name = "tinyvec" version = "1.6.0" @@ -4024,6 +4204,16 @@ dependencies = [ "libc", ] +[[package]] +name = "walkdir" +version = "2.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698" +dependencies = [ + "same-file", + "winapi-util", +] + [[package]] name = "wasi" version = "0.9.0+wasi-snapshot-preview1" @@ -4038,9 +4228,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.86" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bba0e8cb82ba49ff4e229459ff22a191bbe9a1cb3a341610c9c33efc27ddf73" +checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -4048,9 +4238,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.86" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19b04bc93f9d6bdee709f6bd2118f57dd6679cf1176a1af464fca3ab0d66d8fb" +checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" dependencies = [ "bumpalo", "log", @@ -4063,9 +4253,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.86" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14d6b024f1a526bb0234f52840389927257beb670610081360e5a03c5df9c258" +checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -4073,9 +4263,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.86" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e128beba882dd1eb6200e1dc92ae6c5dbaa4311aa7bb211ca035779e5efc39f8" +checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2", "quote", @@ -4086,9 +4276,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.86" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed9d5b4305409d1fc9482fee2d7f9bcbf24b3972bf59817ef757e23982242a93" +checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" [[package]] name = "wasmi" @@ -4254,6 +4444,16 @@ dependencies = [ "wasmparser", ] +[[package]] +name = "web-sys" +version = "0.3.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b85cbef8c220a6abc02aefd892dfc0fc23afb1c6a426316ec33253a3877249b" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + [[package]] name = "wide" version = "0.7.10" @@ -4280,6 +4480,15 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" +[[package]] +name = "winapi-util" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +dependencies = [ + "winapi", +] + [[package]] name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" diff --git a/kate/Cargo.toml b/kate/Cargo.toml index 5151619d..97737b59 100644 --- a/kate/Cargo.toml +++ b/kate/Cargo.toml @@ -40,13 +40,16 @@ thiserror-no-std = "2.0.2" proptest = "1" serde_json = "1" test-case = "1.2.3" +criterion = "0.5.1" +da-primitives = { path = "../primitives/avail" } [features] -default = ["std"] +default = ["std",] alloc = ["dusk-plonk/alloc", "nalgebra/alloc"] -parallel = ["rayon", "std"] +parallel = ["rayon"] std = [ + "parallel", "kate-recovery/std", "once_cell", "hex-literal", @@ -68,5 +71,5 @@ extended-columns = [] maximum-block-size = [] [[bench]] -name = "kzg" +name = "reconstruct" harness = false diff --git a/kate/benches/kzg.rs b/kate/benches/kzg.rs deleted file mode 100644 index dacaddf3..00000000 --- a/kate/benches/kzg.rs +++ /dev/null @@ -1,210 +0,0 @@ -use criterion::{black_box, criterion_group, criterion_main, Criterion}; -use da_primitives::{asdr::AppExtrinsic, BlockLengthColumns, BlockLengthRows}; -use itertools::Itertools; -use kate::{ - com::{build_proof, par_build_commitments, Cell}, - config::DATA_CHUNK_SIZE, -}; -use kate_recovery::{data, matrix::Position, proof, testnet}; -use rand::prelude::*; -use rand_chacha::ChaCha20Rng; - -fn variate_rc(rows: u32, cols: u32) -> Vec<(u32, u32)> { - assert_eq!(rows >= 64, true); - assert_eq!(cols >= 64, true); - - let mut dims = Vec::new(); - - let mut i = 64; - while i <= rows { - dims.push((i, cols * (rows / i))); - i <<= 1; - } - - let mut i = 64; - while i < cols { - dims.push((rows * (cols / i), i)); - i <<= 1; - } - - dims -} - -fn generate_matrix_dimensions() -> Vec<(u32, u32)> { - const MIN_ROWS: u32 = 256; - const MAX_ROWS: u32 = 2048; - - const MIN_COLS: u32 = 256; - const MAX_COLS: u32 = 2048; - - let mut dims = Vec::new(); - - let mut r = MIN_ROWS; - while r <= MAX_ROWS { - let mut c = MIN_COLS; - while c <= MAX_COLS { - dims.extend(&variate_rc(r, c)); - c <<= 1; - } - r <<= 1; - } - - dims.into_iter().unique().collect::>() -} - -// Commitment builder routine candidate -fn bench_par_build_commitments(c: &mut Criterion) { - let mut rng = ChaCha20Rng::from_entropy(); - - const CHUNK: usize = DATA_CHUNK_SIZE as usize + 1; - let dims = generate_matrix_dimensions(); - - for dim in dims { - let dlen = (dim.0 * dim.1) as usize * (CHUNK - 2); - - let mut seed = [0u8; 32]; - let mut data = vec![0u8; dlen]; - - rng.fill_bytes(&mut seed); - rng.fill_bytes(&mut data); - - let tx = AppExtrinsic::from(data.to_vec()); - let txs = [tx]; - - c.bench_function( - &format!( - "par_build_commitments/{}x{}/{} MB", - dim.0, - dim.1, - ((dim.0 * dim.1) as usize * CHUNK) >> 20 - ), - |b| { - b.iter(|| { - let (_, _, _, _) = par_build_commitments( - black_box(BlockLengthRows(dim.0)), - black_box(BlockLengthColumns(dim.1)), - black_box(CHUNK.try_into().unwrap()), - black_box(&txs), - black_box(seed), - ) - .unwrap(); - }); - }, - ); - } -} - -fn bench_build_proof(c: &mut Criterion) { - let mut rng = ChaCha20Rng::from_entropy(); - - const CHUNK: usize = DATA_CHUNK_SIZE as usize + 1; - let mdims = generate_matrix_dimensions(); - - for dim in mdims { - let dlen = (dim.0 * dim.1) as usize * (CHUNK - 2); - - let mut seed = [0u8; 32]; - let mut data = vec![0u8; dlen]; - - rng.fill_bytes(&mut seed); - rng.fill_bytes(&mut data); - - let tx = AppExtrinsic::from(data.to_vec()); - let txs = [tx]; - - let public_params = testnet::public_params(dim.1 as usize); - - let (_, _, dims, mat) = par_build_commitments( - BlockLengthRows(dim.0), - BlockLengthColumns(dim.1), - CHUNK.try_into().unwrap(), - &txs, - seed, - ) - .unwrap(); - - c.bench_function( - &format!( - "build_proof/{}x{}/ {} MB", - dim.0, - dim.1, - ((dim.0 * dim.1) as usize * CHUNK) >> 20 - ), - |b| { - b.iter(|| { - let cell = Cell::new( - BlockLengthRows(rng.next_u32() % dims.rows.0), - BlockLengthColumns(rng.next_u32() % dims.cols.0), - ); - - let proof = build_proof(&public_params, dims, &mat, &[cell]).unwrap(); - assert_eq!(proof.len(), 80); - }); - }, - ); - } -} - -fn bench_verify_proof(c: &mut Criterion) { - let mut rng = ChaCha20Rng::from_entropy(); - - const CHUNK: usize = DATA_CHUNK_SIZE as usize + 1; - let mdims = generate_matrix_dimensions(); - - for dim in mdims { - let dlen = (dim.0 * dim.1) as usize * (CHUNK - 2); - - let mut seed = [0u8; 32]; - let mut data = vec![0u8; dlen]; - - rng.fill_bytes(&mut seed); - rng.fill_bytes(&mut data); - - let tx = AppExtrinsic::from(data.to_vec()); - let txs = [tx]; - - let pp = testnet::public_params(dim.1 as usize); - - let (_, comms, dims, mat) = par_build_commitments( - BlockLengthRows(dim.0), - BlockLengthColumns(dim.1), - CHUNK.try_into().unwrap(), - &txs, - seed, - ) - .unwrap(); - - let row = BlockLengthRows(rng.next_u32() % dims.rows.0); - let col = BlockLengthColumns(rng.next_u32() % dims.cols.0); - - let proof = build_proof(&pp, dims, &mat, &[Cell { row, col }]).unwrap(); - assert_eq!(proof.len(), 80); - - c.bench_function( - &format!( - "verify_proof/{}x{}/ {} MB", - dim.0, - dim.1, - ((dim.0 * dim.1) as usize * CHUNK) >> 20 - ), - |b| { - b.iter(|| { - let comm: [u8; 48] = comms[row.as_usize() * 48..(row.as_usize() + 1) * 48] - .try_into() - .unwrap(); - let dims = dims.try_into().unwrap(); - let cell = data::Cell { - position: Position { row: 0, col: 0 }, - content: proof.clone().try_into().unwrap(), - }; - let flg = proof::verify(&pp, &dims, &comm, &cell); - - assert!(flg.unwrap()); - }); - }, - ); - } -} - -criterion_group! {name = kzg; config = Criterion::default().sample_size(10); targets = bench_par_build_commitments, bench_build_proof, bench_verify_proof} -criterion_main!(kzg); diff --git a/kate/benches/reconstruct.rs b/kate/benches/reconstruct.rs new file mode 100644 index 00000000..184dd30f --- /dev/null +++ b/kate/benches/reconstruct.rs @@ -0,0 +1,190 @@ +use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use da_primitives::{BlockLengthColumns, BlockLengthRows}; +use da_types::{AppExtrinsic, AppId}; +use dusk_plonk::prelude::BlsScalar; +use hex_literal::hex; +use kate::{ + com::{Cell, *}, + metrics::IgnoreMetrics, + BlockDimensions, Seed, Serializable, +}; +use kate_recovery::{ + com::reconstruct_extrinsics, + commitments, + data::{self, DataCell}, + index::*, + matrix::Position, + proof, testnet, +}; +use rand::{prelude::IteratorRandom, Rng, SeedableRng}; +use rand_chacha::ChaChaRng; +use sp_arithmetic::{traits::SaturatedConversion, Percent}; + +#[rustfmt::skip] +fn make_xts() -> Vec { + vec![ + AppExtrinsic { + app_id: AppId(0), + data: hex!("1470af08b0ab8ff9d79a3b6402c83fd92e4e26f6d9c60ca360144939362192441ae63d4c94a36f5e151d719366b9c214a9ff240ff6cc9385d8fb95acf51a8e6f07255e08cf3e283e14dca7aebe1e5afdb0502a780404702bd93711305375883738300450de7a8d8f98ff961bd887db94a83d06ccf622201f3c73a9c317c214ff9bd76eeff7ce6bb6fa4ce552800b57207a3738ed78540ff5a089f76b26ca4f4e9fc4968e75fc3eb9d81e58ec680e429741840abc372e226ad785795a5bdea186ada577ff610fee6233822513e98ce5a0e462deda92cbc2e576c3723c0e86c4a838fa55db0252c2c78730d3a4d7746e2fc54bde37a0181a161e09ec093391f3a79a5fd4f950e286aafb8168bb74d59742bdb74feab83d6d353d65a30e0c16793ee614ee1fcfc20d63137681315245508e20982133ebfa3bbc58679cd2e3cc6ac055474f826d8824d1d9e37fef906f3d145f4864f79112b5689341df0b410b6013974a5fa8b3ccac836c6ffe060915c5fa74ee0d4b15cd43581fb7d1ce2dcbdab4850fd4145598a031b8994bcceb12b6b7a8ca3f5c7aac39b8d42a590a5e82631872a4f637d4106a3ab293aa84ff076d4859eb164ed00f727d").to_vec() + }, + AppExtrinsic { + app_id: AppId(14183), + data: hex!("d48abcb0d4c6acef7119109d66187fba860fbd07283895235c5a60a987bdcf5125c61f5dc93d82daa5701c53e4a87461662d09e3919e6ae29eba8f907a223f5243111c4627f29bc8eaa0556f819d09aa8456a94b0d494d4bc3472e0b34339334d890c38746d398110b4bb2b830f533c544a96bfcacd40d9c02abccd2832e3159ec3dd105d6aa7cf0a15a43af1f1aea9a88ec9a2fc2f6f371811837486202ead04d77ad5c75c2b93f3cff29c990d90b151d0c8c506cf2d9ff9d0aae0e564f6c07b46a58b14d75b54365c03e4266f0c96c83b92969f109705d1000fc634c934648d90c9c73ca7fce164b4855817b242ca7448b50179f9d167be3ee27cdb6570cdefcb2b8cc6521d094ba74fa2838b1df6f48e044d013e88aec33de9a7f66bd5f2302ad298581f163ab843ec3a0252c1a6ed40ec5c15f28e63ee2c36821c61ab60e433ee7c8faa662a1a225f29dbe99af00d664dec4ad77a174b6f8ff7176f21f6953e0034ac66aa0a46bf87f4809a68c4d02fc63e9ade900119fee98dda988e6dd72a1fcac98e43bdfd1cbff48781918aa7ef7695e949d96c0fcaa65959ea3c24e3f66410c6f6e0d932aef9ed823d05bb048f93773e10e807e5e862dfb56fef4e39640e0b6b13e8ab0345b60ef0dbe57d289896fd002adc8a43021a629023116094020b2ce6aec1c1305b3124a42750a5be72577756e2a5306d0d8e8f0b1226216fc11a49cede06352bcc58bf854970a2ea3da95fa334c6e34bd6ba181ff2aaa918263210e998a7101f6cd111c827d0ed220328594688a9b011bbf753885e2ceb0f6974e1371c4655b10f38d81bfa8d092c13f8c5e4387d7ec09ccebf10bd2ebb82de30ff67fb1c493fdec2878fe0cb3cb024dea02c3caa8a82a8be85a5ac904e96c22d0c32bf7a5ea3c703d168ed8049cb7d4cf8f612fa7576814f4e09db516e97e2c82e8eb6976bdde44dc11a351a72cb8e9f6a7f14ca56c5b192a6c889c5d02137ad5a1b83ef1e8ffb1917d98624253b3fcd38afeaa7cfb0904a2ced2dcb51af8fcc9e6733368e18e55bc7f264e1e915b1d10772013e508b72a9def320913a8e6787523c69db034bd99d70fbdf8bfc4d1c137d741e9e0e4109546586601681815c0ea679942c0dfdeff3ef4ee285313d826076b9e84b2e17a30f274a7cf97665ab56c7da309d01191d5fb52c45c036025fd75f56353f6337bea19888d8b63e47de8a09a71793af2d370dbff6010ac6d26fbbb1f92fdad47446b60eb4d1cc04ebd20fe545f4c61f3bb1f0ba73458392259ed4d2e1a18acdf158002a72ff91f6fc690f5f7d1b9ce6bf1c91065169d6486db016ec10a8cd916089a79e7fd44ae6530144df23ed367e2c599dff0ef14215bf7deaf63911d453fa0a3d84d3f7319eed77bfa9e2df0e2a658077569a66bbd71b7ff856bcd9bc089e2ccbee7d7b12ce48c496b18673125ca32465af19796ee0edf53dcff48911fcd09af6647a081cb3126118b6974b905067e8a4985dec6f289f64abd6feaa975449df12119675981e603f9897876449faa6fe81a6d96771f5c35664017816fc0c953ff1aa4087385284ccbe9e7ae068da5ac015af82687dcbdc96a10e929a2ab40dff231b7cbbde59b5004e0150cbb0c6c7fe1291835d7e876dba7a31b0109ba76f8a9df4fd4af339e762931f942a29f593f61e12d407545f6a404f6e6505a348616f97fb0baf033b450c5bea2321b22eca83a8750efb98d1336864dec21ae9ba2b1b8b5319e631f9dfbb455dae7aa4e0e25a2cba68e14db323033eb16ed94614e85fe8829b35cb6f5422a7374a4cf6e21ff1445f28f89179fa75947b106dba902cd744d326cd26b71fbfac0f46f4cf26425f67ab7fa5cff0d9dd19293f54ae9a7115b2bf955424a93ab0f8067e55df600ad9cb49eaa1b9f40447f9896b75eae0e7d44ce6631ead3a39c81b17c24782ad015ac2306a75c80691a0f881787eb95029f3f66fded61ecbaa7b13692e3e920d3331afbafc8097028feea068e7badda27e45cb8422ed045474a83e08d8a9869a1f87bc45fc202fa2fc8cd0343e5d02fc1880c7ac717240002b171a23589dde0ae710c721b61fd418a3ef349b6c0b9c9abb9d134bc7b621b227f3e2021f9c95d1ad0bf6ce4e442f98b55115a73c934911373a4fc299979a014b61b5d88c8346829fff83cb3c1ec89e67d43be2e2aa80e0a85f7d434c47eb2c85af02084c2d2c90b972807322f45513fe7df4436494b161fe15a7e9bf8393ec9fa8cc0235a64bd29c429b3fc871cfc13415df3b9fe05748eb7e205c0d7e5").to_vec(), + }, + AppExtrinsic { + app_id: AppId(117134629), + data: hex!("9e6e075b63217f5b6cd1de4f824c49ce9123ea0a1307ab6a4a0c7296e9affd35073784c001ab15cb826c17ae606ade3b937beb3ef44187b1f73e1451c1a52d36523df6aedf40b835132ef770e54bf91511990da102f2fa54219b8633770865fe92d8c4b041354444f039541741445cb90509251f6fa377aa74bb82748da8ca2a6e1dc86164c9a01f0c3fb93d095dd999a0d66fe07dcd5eb9065ff0d227bcb66ff841d0bf0a9ce80647581c458e3bf15916aa1345c1ec99b140deaf1c6cd29f0c57a956dd230b8958e0d1c94e92568619167bbc06e0548298a24889421dada1765a1642bd84753e8da9155d311234e674bdad61cfd7a64b165374aa4687e7dae1a1f95a3cb2697dd0d8363436253b7acd55e853210519f81bb19ac55ef1a6d0a8a128").to_vec(), + }, + AppExtrinsic { + app_id: AppId(117134630), + data: hex!("488945bd0005807499f3f3fc0b395b607ed35d2d4bc6ac8b9ab0ff5cb36f58fd2237144c312dbc658e11fec7990febe8ffe4373e33bcfb5189a690b11e473aeb6d57787ef6ea0909e7988d993e583f589b31e8da63fe014db6d1fcadc4a6e99b15d21cfa5ff00cd93d89224b7bfccf7cb44f9b727bf7994b849300a8a4254feab27c9fc3918e4206febe64daa2b5f715fc6763d4fc1ece9be8424ab1db4bb843d097f66568101e586e47b220cf61a0ec635e0cb4490abaa4fefbdad6588eb3e670d1037845257f98971e014f9079ce507f660bf27d25704908dfa2520a92dd06feca0d8737a7c774ceaa1ba9887ff398da09b21bd78fa8dc835a5731d4a4914eddef16209d14e319a809306b62180fbf8d6fa5662e4f1ab09a1efe358a9a88a52393b825120648af932ba1dcd2d47a0ccadb0ba96e10d04d02afaeeee7c332560ebe54f7697ffb9a405398cc489dcf4812771731e9a39b375b37a35bdec4180fd0647f0daaad1327a7f1f6053125a8d64956123fa22d1cc2528f595465b924ed14142e97e0a92c34fbcf76a199c2fe84efb4cc7de2f0024ed5b29b0b81e2786652a8fceac787b23054466151600b5ecc47abd930b80cc78f6abf2811f6f93d33600fe3bf22bf8087d3d39df459170a7e7c26e3f143531208b2702002937eee2b5acdc2bda278c23455b14b060b01a8b9aa57e8ba499f0a38d429872e7701bd8b1f8161aaeec6f46d5f9c996fd83053f9dd787a4586107204d5d0bcb8abfe043bdb5c01d3b8fd667e8d8fc6e8ea7a2e8eb2fa9879b9d2ddaad1bf8550c61f7ac853eb8e9b708eb8eff4cc7adfab147dc355d27ddb0ce3cf78106c871855e1f9cbb340ed0652e691ef657f5a19f2f3f710f668121dea55727497633773b5e0abd7acf97d313139be57ff556a728933b1fccf3203071ac494686343530ac8b5a31951a9ba86048870cbaf626417b8278e8382dabf680da5d8d9de5dfaf6ed54b321c794dca67c10bb0e7e4e9e4b5a2e2edb9f5b5ed5188e7694488b39da2e8a0266569dc08e6e06a68e698085326b6b456d89993e72bddcf522c1f70a1d986c54bbb8328893e56a7fb58ec162dc5b31fefb94c417ce6bda86125b6b0ef4d97fef83bfa38b901f8b7bdce5d0b27c841dba04a99b6b0d88a9d5ae387f193bf4a40e2b4f301f7e63195a1102ec9f5779c9cdac0bcdc0c04c318a848bb018903e225df771fe92bca9b592681f584b9cb484eb2bf6cfdcc616cd08e16ff306b67f09b18279f3ee8fb30bddff62251452482b25980a08c6fa1d8d3e0118204269323e61f43e513f14c6a46a638a1159abe7b1acacbfae6d057e7eebcb03562aba7460a66fa1c547e857b31faea87a6e028fec4d3f05550e5e7af60fbb6e793ecd9bcf85b36a6244995ec33a85d627d9fdf47f185d4ad6fc90af245c6ba5b74bd69e28d29cb311da691308e7a89888dd54b8f4e760c8b809ef1a821507ba26dbdc411af54fddd9d8dd36062fa7f39b4b8293188813d7d93f74a7eade8b8132ab6a393fe4a92ee3eeb1526a0dab793ba41e6e92d9").to_vec(), + } + ] +} + +fn sample_cells_from_matrix( + matrix: &[BlsScalar], + dimensions: &BlockDimensions, + columns: Option<&[u16]>, +) -> Vec { + fn random_indexes(length: usize, seed: Seed) -> Vec { + // choose random len/2 (unique) indexes + let mut idx = (0..length).collect::>(); + let mut chosen_idx = Vec::::new(); + let mut rng = ChaChaRng::from_seed(seed); + + for _ in 0..length / 2 { + let i = rng.gen_range(0..idx.len()); + let v = idx.remove(i); + chosen_idx.push(v as u16); + } + chosen_idx + } + + const RNG_SEED: Seed = [42u8; 32]; + matrix + .chunks_exact(dimensions.rows.as_usize().saturating_mul(2)) + .enumerate() + .map(|(col, e)| (col as u16, e)) + .flat_map(|(col, e)| { + random_indexes(e.len(), RNG_SEED) + .into_iter() + .map(|row| DataCell { + position: Position { + row: row as u32, + col, + }, + data: e[row as usize].to_bytes(), + }) + .filter(|cell| { + columns.is_none() || columns.unwrap_or(&[]).contains(&cell.position.col) + }) + .collect::>() + }) + .collect::>() +} + +fn app_data_index_try_from_layout( + layout: Vec<(AppId, u32)>, +) -> Result { + let mut index = Vec::new(); + // transactions are ordered by application id + // skip transactions with 0 application id - it's not a data txs + let mut size = 0u32; + let mut prev_app_id = AppId(0u32); + + for (app_id, data_len) in layout { + if app_id.0 != 0 && prev_app_id != app_id { + index.push((app_id.0, size)); + } + + size = size + .checked_add(data_len) + .ok_or(AppDataIndexError::SizeOverflow)?; + if prev_app_id > app_id { + return Err(AppDataIndexError::UnsortedLayout); + } + prev_app_id = app_id; + } + + Ok(AppDataIndex { size, index }) +} + +fn random_cells( + max_cols: BlockLengthColumns, + max_rows: BlockLengthRows, + percents: Percent, +) -> Vec { + let max_cols = max_cols.into(); + let max_rows = max_rows.into(); + + let rng = &mut ChaChaRng::from_seed([0u8; 32]); + let amount: usize = percents + .mul_ceil::(max_cols * max_rows) + .saturated_into(); + + (0..max_cols) + .flat_map(move |col| { + (0..max_rows).map(move |row| Cell::new(BlockLengthRows(row), BlockLengthColumns(col))) + }) + .choose_multiple(rng, amount) +} + +fn bench_reconstruct(c: &mut Criterion) { + c.bench_function("reconstruct", |b| b.iter(|| reconstruct())); +} + +fn reconstruct() { + let xts = make_xts(); + + let metrics = IgnoreMetrics {}; + let (layout, commitments, dims, matrix) = par_build_commitments( + BlockLengthRows(64), + BlockLengthColumns(16), + 32, + xts.as_slice(), + Seed::default(), + &metrics, + ) + .unwrap(); + + let columns = sample_cells_from_matrix(&matrix, &dims, None); + let extended_dims = dims.try_into().unwrap(); + let index = app_data_index_try_from_layout(layout).unwrap(); + let reconstructed = reconstruct_extrinsics(&index, &extended_dims, columns).unwrap(); + for (result, xt) in reconstructed.iter().zip(xts.into_iter()) { + assert_eq!(result.0, *xt.app_id); + assert_eq!(result.1[0].as_slice(), &xt.data); + } + + let public_params = testnet::public_params(dims.cols.as_usize()); + for cell in random_cells(dims.cols, dims.rows, Percent::one()) { + let row = cell.row.as_usize(); + + let proof = build_proof(&public_params, dims, &matrix, &[cell], &metrics).unwrap(); + assert_eq!(proof.len(), 80); + + let col: u16 = cell + .col + .0 + .try_into() + .expect("`random_cells` function generates a valid `u16` for columns"); + let position = Position { + row: cell.row.0, + col, + }; + let cell = data::Cell { + position, + content: proof.try_into().unwrap(), + }; + + let extended_dims = dims.try_into().unwrap(); + let commitment = commitments::from_slice(&commitments).unwrap()[row]; + let verification = proof::verify(&public_params, &extended_dims, &commitment, &cell); + assert!(verification.is_ok()); + assert!(verification.unwrap()); + } +} + +criterion_group! { benches, bench_reconstruct } +criterion_main!(benches); diff --git a/kate/src/com.rs b/kate/src/com.rs index fb69f619..8b7bc839 100644 --- a/kate/src/com.rs +++ b/kate/src/com.rs @@ -21,7 +21,6 @@ use kate_recovery::{com::app_specific_rows, index, matrix::Dimensions}; use nalgebra::base::DMatrix; use rand::{Rng, SeedableRng}; use rand_chacha::ChaChaRng; -#[cfg(feature = "parallel")] use rayon::prelude::*; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; @@ -516,26 +515,19 @@ pub fn par_build_commitments( let start = Instant::now(); - #[cfg(feature = "parallel")] - let iter = (0..extended_rows_num).into_par_iter(); - #[cfg(not(feature = "parallel"))] - let iter = 0..extended_rows_num; - - let iter = iter.map(|i| { - row( - &ext_data_matrix, - i as usize, - block_dims.cols, - BlockLengthRows(extended_rows_num), - ) - }); - - #[cfg(feature = "parallel")] - let mut iter = iter.zip(result_bytes.par_chunks_exact_mut(PROVER_KEY_SIZE as usize)); - #[cfg(not(feature = "parallel"))] - let mut iter = iter.zip(result_bytes.chunks_exact_mut(PROVER_KEY_SIZE as usize)); - - iter.try_for_each(|(row, res)| commit(&prover_key, row_eval_domain, row, res))?; + (0..extended_rows_num) + .into_par_iter() + .map(|i| { + row( + &ext_data_matrix, + i as usize, + block_dims.cols, + BlockLengthRows(extended_rows_num), + ) + }) + .zip(result_bytes.par_chunks_exact_mut(PROVER_KEY_SIZE as usize)) + .map(|(row, res)| commit(&prover_key, row_eval_domain, row, res)) + .collect::>()?; metrics.commitment_build_time(start.elapsed()); @@ -844,9 +836,9 @@ mod tests { proptest! { #![proptest_config(ProptestConfig::with_cases(20))] #[test] - #[ignore] // newapi done fn test_build_and_reconstruct(ref xts in app_extrinsics_strategy()) { + let metrics = IgnoreMetrics {}; let (layout, commitments, dims, matrix) = par_build_commitments( BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default(), &metrics).unwrap(); diff --git a/primitives/avail/src/header/mod.rs b/primitives/avail/src/header/mod.rs index e33fd5a1..fe54bc53 100644 --- a/primitives/avail/src/header/mod.rs +++ b/primitives/avail/src/header/mod.rs @@ -453,7 +453,7 @@ mod tests { }; let extension = extension::v1::HeaderExtension { commitment, - app_lookup: DataLookup::lenghts_from_sorted_by_app_id(vec![(0, 1)].into_iter()) + app_lookup: DataLookup::new_from_id_lenght(vec![(0, 1)].into_iter()) .expect("Valid DataLookup .qed"), }; let digest = Digest { From d83f039cf8b0a4539e08d8c0836aa47448611ee0 Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Mon, 26 Jun 2023 19:15:19 +0200 Subject: [PATCH 60/87] Add benchmark --- Cargo.lock | 4 +++ Cargo.toml | 18 +++++++++++++ kate/Cargo.toml | 10 ++++--- kate/benches/reconstruct.rs | 52 ++++++++++++++++++------------------- kate/src/com.rs | 22 +++++++++------- primitives/types/Cargo.toml | 2 ++ primitives/types/src/lib.rs | 1 + 7 files changed, 69 insertions(+), 40 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e2cd3f5e..a6ff2b80 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -954,6 +954,7 @@ name = "da-types" version = "0.4.4" dependencies = [ "derive_more", + "hex", "num-traits", "parity-scale-codec", "scale-info", @@ -1723,6 +1724,9 @@ name = "hex" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" +dependencies = [ + "serde", +] [[package]] name = "hex-literal" diff --git a/Cargo.toml b/Cargo.toml index 9f652415..4ec7cda4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -23,3 +23,21 @@ sp-trie = { git = "https://github.com/paritytech/substrate.git", branch = "polka sp-runtime-interface = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } sp-weights = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } frame-support = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } + +[profile.dev.package] +nalgebra = { opt-level = 3 } +blst = { opt-level = 3 } +dusk-bls12_381 = { opt-level = 3 } +dusk-plonk = { opt-level = 3 } +dusk-jubjub = { opt-level = 3 } +dusk-bytes = { opt-level = 3 } +rayon = { opt-level = 3 } +rayon-core = { opt-level = 3 } +poly-multiproof = { opt-level = 3 } +ark-bls12-381 = { opt-level = 3 } +ark-ec = { opt-level = 3 } +ark-ff = { opt-level = 3 } +ark-poly = { opt-level = 3 } +ark-serialize = { opt-level = 3 } +ark-std = { opt-level = 3 } +merlin = { opt-level = 3 } diff --git a/kate/Cargo.toml b/kate/Cargo.toml index 97737b59..8a0689ac 100644 --- a/kate/Cargo.toml +++ b/kate/Cargo.toml @@ -24,7 +24,7 @@ sp-core = { version = "7.0.0", default-features = false, optional = true } # 3rd-party derive_more = { version = "0.99.17", default-features = false, features = ["constructor"] } dusk-bytes = { version = "0.1.6", default-features = false } -hex = { version = "0.4", optional = true, default-features = false, features = ["alloc"] } +hex = { version = "0.4", optional = true, default-features = false, features = ["alloc", "serde"] } hex-literal = { version = "0.3.4", optional = true } log = { version = "0.4.8", optional = true } nalgebra = { version = "0.32.2", default-features = false } @@ -33,18 +33,19 @@ rand = { version = "0.8.4", default-features = false, optional = true } rand_chacha = { version = "0.3", default-features = false, optional = true } rayon = { version = "1.5.2", optional = true } serde = { version = "1", optional = true, features = ["derive"] } +serde_json = { version = "1", optional = true } static_assertions = "1.1.0" thiserror-no-std = "2.0.2" [dev-dependencies] +criterion = "0.5.1" +da-primitives = { path = "../primitives/avail" } proptest = "1" serde_json = "1" test-case = "1.2.3" -criterion = "0.5.1" -da-primitives = { path = "../primitives/avail" } [features] -default = ["std",] +default = ["std"] alloc = ["dusk-plonk/alloc", "nalgebra/alloc"] parallel = ["rayon"] @@ -56,6 +57,7 @@ std = [ "hex", "codec/std", "serde", + "serde_json", "rand", "rand_chacha/std", "log", diff --git a/kate/benches/reconstruct.rs b/kate/benches/reconstruct.rs index 184dd30f..fa84828a 100644 --- a/kate/benches/reconstruct.rs +++ b/kate/benches/reconstruct.rs @@ -1,8 +1,7 @@ -use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Throughput}; use da_primitives::{BlockLengthColumns, BlockLengthRows}; use da_types::{AppExtrinsic, AppId}; use dusk_plonk::prelude::BlsScalar; -use hex_literal::hex; use kate::{ com::{Cell, *}, metrics::IgnoreMetrics, @@ -20,26 +19,11 @@ use rand::{prelude::IteratorRandom, Rng, SeedableRng}; use rand_chacha::ChaChaRng; use sp_arithmetic::{traits::SaturatedConversion, Percent}; +const XTS_JSON_SETS: &str = include_str!("reconstruct.data.json"); + #[rustfmt::skip] -fn make_xts() -> Vec { - vec![ - AppExtrinsic { - app_id: AppId(0), - data: hex!("1470af08b0ab8ff9d79a3b6402c83fd92e4e26f6d9c60ca360144939362192441ae63d4c94a36f5e151d719366b9c214a9ff240ff6cc9385d8fb95acf51a8e6f07255e08cf3e283e14dca7aebe1e5afdb0502a780404702bd93711305375883738300450de7a8d8f98ff961bd887db94a83d06ccf622201f3c73a9c317c214ff9bd76eeff7ce6bb6fa4ce552800b57207a3738ed78540ff5a089f76b26ca4f4e9fc4968e75fc3eb9d81e58ec680e429741840abc372e226ad785795a5bdea186ada577ff610fee6233822513e98ce5a0e462deda92cbc2e576c3723c0e86c4a838fa55db0252c2c78730d3a4d7746e2fc54bde37a0181a161e09ec093391f3a79a5fd4f950e286aafb8168bb74d59742bdb74feab83d6d353d65a30e0c16793ee614ee1fcfc20d63137681315245508e20982133ebfa3bbc58679cd2e3cc6ac055474f826d8824d1d9e37fef906f3d145f4864f79112b5689341df0b410b6013974a5fa8b3ccac836c6ffe060915c5fa74ee0d4b15cd43581fb7d1ce2dcbdab4850fd4145598a031b8994bcceb12b6b7a8ca3f5c7aac39b8d42a590a5e82631872a4f637d4106a3ab293aa84ff076d4859eb164ed00f727d").to_vec() - }, - AppExtrinsic { - app_id: AppId(14183), - data: hex!("d48abcb0d4c6acef7119109d66187fba860fbd07283895235c5a60a987bdcf5125c61f5dc93d82daa5701c53e4a87461662d09e3919e6ae29eba8f907a223f5243111c4627f29bc8eaa0556f819d09aa8456a94b0d494d4bc3472e0b34339334d890c38746d398110b4bb2b830f533c544a96bfcacd40d9c02abccd2832e3159ec3dd105d6aa7cf0a15a43af1f1aea9a88ec9a2fc2f6f371811837486202ead04d77ad5c75c2b93f3cff29c990d90b151d0c8c506cf2d9ff9d0aae0e564f6c07b46a58b14d75b54365c03e4266f0c96c83b92969f109705d1000fc634c934648d90c9c73ca7fce164b4855817b242ca7448b50179f9d167be3ee27cdb6570cdefcb2b8cc6521d094ba74fa2838b1df6f48e044d013e88aec33de9a7f66bd5f2302ad298581f163ab843ec3a0252c1a6ed40ec5c15f28e63ee2c36821c61ab60e433ee7c8faa662a1a225f29dbe99af00d664dec4ad77a174b6f8ff7176f21f6953e0034ac66aa0a46bf87f4809a68c4d02fc63e9ade900119fee98dda988e6dd72a1fcac98e43bdfd1cbff48781918aa7ef7695e949d96c0fcaa65959ea3c24e3f66410c6f6e0d932aef9ed823d05bb048f93773e10e807e5e862dfb56fef4e39640e0b6b13e8ab0345b60ef0dbe57d289896fd002adc8a43021a629023116094020b2ce6aec1c1305b3124a42750a5be72577756e2a5306d0d8e8f0b1226216fc11a49cede06352bcc58bf854970a2ea3da95fa334c6e34bd6ba181ff2aaa918263210e998a7101f6cd111c827d0ed220328594688a9b011bbf753885e2ceb0f6974e1371c4655b10f38d81bfa8d092c13f8c5e4387d7ec09ccebf10bd2ebb82de30ff67fb1c493fdec2878fe0cb3cb024dea02c3caa8a82a8be85a5ac904e96c22d0c32bf7a5ea3c703d168ed8049cb7d4cf8f612fa7576814f4e09db516e97e2c82e8eb6976bdde44dc11a351a72cb8e9f6a7f14ca56c5b192a6c889c5d02137ad5a1b83ef1e8ffb1917d98624253b3fcd38afeaa7cfb0904a2ced2dcb51af8fcc9e6733368e18e55bc7f264e1e915b1d10772013e508b72a9def320913a8e6787523c69db034bd99d70fbdf8bfc4d1c137d741e9e0e4109546586601681815c0ea679942c0dfdeff3ef4ee285313d826076b9e84b2e17a30f274a7cf97665ab56c7da309d01191d5fb52c45c036025fd75f56353f6337bea19888d8b63e47de8a09a71793af2d370dbff6010ac6d26fbbb1f92fdad47446b60eb4d1cc04ebd20fe545f4c61f3bb1f0ba73458392259ed4d2e1a18acdf158002a72ff91f6fc690f5f7d1b9ce6bf1c91065169d6486db016ec10a8cd916089a79e7fd44ae6530144df23ed367e2c599dff0ef14215bf7deaf63911d453fa0a3d84d3f7319eed77bfa9e2df0e2a658077569a66bbd71b7ff856bcd9bc089e2ccbee7d7b12ce48c496b18673125ca32465af19796ee0edf53dcff48911fcd09af6647a081cb3126118b6974b905067e8a4985dec6f289f64abd6feaa975449df12119675981e603f9897876449faa6fe81a6d96771f5c35664017816fc0c953ff1aa4087385284ccbe9e7ae068da5ac015af82687dcbdc96a10e929a2ab40dff231b7cbbde59b5004e0150cbb0c6c7fe1291835d7e876dba7a31b0109ba76f8a9df4fd4af339e762931f942a29f593f61e12d407545f6a404f6e6505a348616f97fb0baf033b450c5bea2321b22eca83a8750efb98d1336864dec21ae9ba2b1b8b5319e631f9dfbb455dae7aa4e0e25a2cba68e14db323033eb16ed94614e85fe8829b35cb6f5422a7374a4cf6e21ff1445f28f89179fa75947b106dba902cd744d326cd26b71fbfac0f46f4cf26425f67ab7fa5cff0d9dd19293f54ae9a7115b2bf955424a93ab0f8067e55df600ad9cb49eaa1b9f40447f9896b75eae0e7d44ce6631ead3a39c81b17c24782ad015ac2306a75c80691a0f881787eb95029f3f66fded61ecbaa7b13692e3e920d3331afbafc8097028feea068e7badda27e45cb8422ed045474a83e08d8a9869a1f87bc45fc202fa2fc8cd0343e5d02fc1880c7ac717240002b171a23589dde0ae710c721b61fd418a3ef349b6c0b9c9abb9d134bc7b621b227f3e2021f9c95d1ad0bf6ce4e442f98b55115a73c934911373a4fc299979a014b61b5d88c8346829fff83cb3c1ec89e67d43be2e2aa80e0a85f7d434c47eb2c85af02084c2d2c90b972807322f45513fe7df4436494b161fe15a7e9bf8393ec9fa8cc0235a64bd29c429b3fc871cfc13415df3b9fe05748eb7e205c0d7e5").to_vec(), - }, - AppExtrinsic { - app_id: AppId(117134629), - data: hex!("9e6e075b63217f5b6cd1de4f824c49ce9123ea0a1307ab6a4a0c7296e9affd35073784c001ab15cb826c17ae606ade3b937beb3ef44187b1f73e1451c1a52d36523df6aedf40b835132ef770e54bf91511990da102f2fa54219b8633770865fe92d8c4b041354444f039541741445cb90509251f6fa377aa74bb82748da8ca2a6e1dc86164c9a01f0c3fb93d095dd999a0d66fe07dcd5eb9065ff0d227bcb66ff841d0bf0a9ce80647581c458e3bf15916aa1345c1ec99b140deaf1c6cd29f0c57a956dd230b8958e0d1c94e92568619167bbc06e0548298a24889421dada1765a1642bd84753e8da9155d311234e674bdad61cfd7a64b165374aa4687e7dae1a1f95a3cb2697dd0d8363436253b7acd55e853210519f81bb19ac55ef1a6d0a8a128").to_vec(), - }, - AppExtrinsic { - app_id: AppId(117134630), - data: hex!("488945bd0005807499f3f3fc0b395b607ed35d2d4bc6ac8b9ab0ff5cb36f58fd2237144c312dbc658e11fec7990febe8ffe4373e33bcfb5189a690b11e473aeb6d57787ef6ea0909e7988d993e583f589b31e8da63fe014db6d1fcadc4a6e99b15d21cfa5ff00cd93d89224b7bfccf7cb44f9b727bf7994b849300a8a4254feab27c9fc3918e4206febe64daa2b5f715fc6763d4fc1ece9be8424ab1db4bb843d097f66568101e586e47b220cf61a0ec635e0cb4490abaa4fefbdad6588eb3e670d1037845257f98971e014f9079ce507f660bf27d25704908dfa2520a92dd06feca0d8737a7c774ceaa1ba9887ff398da09b21bd78fa8dc835a5731d4a4914eddef16209d14e319a809306b62180fbf8d6fa5662e4f1ab09a1efe358a9a88a52393b825120648af932ba1dcd2d47a0ccadb0ba96e10d04d02afaeeee7c332560ebe54f7697ffb9a405398cc489dcf4812771731e9a39b375b37a35bdec4180fd0647f0daaad1327a7f1f6053125a8d64956123fa22d1cc2528f595465b924ed14142e97e0a92c34fbcf76a199c2fe84efb4cc7de2f0024ed5b29b0b81e2786652a8fceac787b23054466151600b5ecc47abd930b80cc78f6abf2811f6f93d33600fe3bf22bf8087d3d39df459170a7e7c26e3f143531208b2702002937eee2b5acdc2bda278c23455b14b060b01a8b9aa57e8ba499f0a38d429872e7701bd8b1f8161aaeec6f46d5f9c996fd83053f9dd787a4586107204d5d0bcb8abfe043bdb5c01d3b8fd667e8d8fc6e8ea7a2e8eb2fa9879b9d2ddaad1bf8550c61f7ac853eb8e9b708eb8eff4cc7adfab147dc355d27ddb0ce3cf78106c871855e1f9cbb340ed0652e691ef657f5a19f2f3f710f668121dea55727497633773b5e0abd7acf97d313139be57ff556a728933b1fccf3203071ac494686343530ac8b5a31951a9ba86048870cbaf626417b8278e8382dabf680da5d8d9de5dfaf6ed54b321c794dca67c10bb0e7e4e9e4b5a2e2edb9f5b5ed5188e7694488b39da2e8a0266569dc08e6e06a68e698085326b6b456d89993e72bddcf522c1f70a1d986c54bbb8328893e56a7fb58ec162dc5b31fefb94c417ce6bda86125b6b0ef4d97fef83bfa38b901f8b7bdce5d0b27c841dba04a99b6b0d88a9d5ae387f193bf4a40e2b4f301f7e63195a1102ec9f5779c9cdac0bcdc0c04c318a848bb018903e225df771fe92bca9b592681f584b9cb484eb2bf6cfdcc616cd08e16ff306b67f09b18279f3ee8fb30bddff62251452482b25980a08c6fa1d8d3e0118204269323e61f43e513f14c6a46a638a1159abe7b1acacbfae6d057e7eebcb03562aba7460a66fa1c547e857b31faea87a6e028fec4d3f05550e5e7af60fbb6e793ecd9bcf85b36a6244995ec33a85d627d9fdf47f185d4ad6fc90af245c6ba5b74bd69e28d29cb311da691308e7a89888dd54b8f4e760c8b809ef1a821507ba26dbdc411af54fddd9d8dd36062fa7f39b4b8293188813d7d93f74a7eade8b8132ab6a393fe4a92ee3eeb1526a0dab793ba41e6e92d9").to_vec(), - } - ] +fn load_xts() -> Vec> { + serde_json::from_str(XTS_JSON_SETS).expect("Autogenerated Json file .qed") } fn sample_cells_from_matrix( @@ -131,18 +115,32 @@ fn random_cells( } fn bench_reconstruct(c: &mut Criterion) { - c.bench_function("reconstruct", |b| b.iter(|| reconstruct())); + let xts_sets = load_xts(); + + let mut group = c.benchmark_group("reconstruct from xts"); + for xts in xts_sets.into_iter() { + let size = xts + .iter() + .map(|app| app.data.len()) + .sum::() + .try_into() + .unwrap(); + group.throughput(Throughput::Bytes(size)); + group.sample_size(10); + group.bench_with_input(BenchmarkId::from_parameter(size), &xts, |b, xts| { + b.iter(|| reconstruct(xts.as_slice())) + }); + } + group.finish(); } -fn reconstruct() { - let xts = make_xts(); - +fn reconstruct(xts: &[AppExtrinsic]) { let metrics = IgnoreMetrics {}; let (layout, commitments, dims, matrix) = par_build_commitments( BlockLengthRows(64), BlockLengthColumns(16), 32, - xts.as_slice(), + xts, Seed::default(), &metrics, ) @@ -152,7 +150,7 @@ fn reconstruct() { let extended_dims = dims.try_into().unwrap(); let index = app_data_index_try_from_layout(layout).unwrap(); let reconstructed = reconstruct_extrinsics(&index, &extended_dims, columns).unwrap(); - for (result, xt) in reconstructed.iter().zip(xts.into_iter()) { + for (result, xt) in reconstructed.iter().zip(xts) { assert_eq!(result.0, *xt.app_id); assert_eq!(result.1[0].as_slice(), &xt.data); } diff --git a/kate/src/com.rs b/kate/src/com.rs index 8b7bc839..72cd6a1a 100644 --- a/kate/src/com.rs +++ b/kate/src/com.rs @@ -150,19 +150,19 @@ pub fn flatten_and_pad_block( let mut extrinsics = extrinsics.to_vec(); extrinsics.sort_by(|a, b| a.app_id.cmp(&b.app_id)); - let extrinsics = app_extrinsics_group_by_app_id(&extrinsics) - .iter() - .map(|e| (e.0, e.1.encode())) - .collect::>(); - // Pad data before determining exact block size // Padding occurs both inside a single chunk and with additional chunk (if needed) - let (tx_layout, padded_chunks): (Vec<_>, Vec<_>) = extrinsics + let (tx_layout, padded_chunks): (Vec<_>, Vec<_>) = app_extrinsics_group_by_app_id(&extrinsics) .iter() - .map(|(app_id, data)| { - let chunks = pad_iec_9797_1(data.clone()); - ((*app_id, chunks.len() as u32), chunks) + .map(|e| { + let app_id = e.0; + let data = e.1.encode(); + let chunks = pad_iec_9797_1(data); + let chunks_len = u32::try_from(chunks.len()).map_err(|_| Error::BlockTooBig)?; + Ok(((app_id, chunks_len), chunks)) }) + .collect::, Error>>()? + .into_iter() .unzip(); let mut padded_block = padded_chunks @@ -836,9 +836,13 @@ mod tests { proptest! { #![proptest_config(ProptestConfig::with_cases(20))] #[test] + #[ignore] // newapi done fn test_build_and_reconstruct(ref xts in app_extrinsics_strategy()) { + // let test_file = std::fs::OpenOptions::new().create(true).append(true).open("/tmp/test.json").unwrap(); + // serde_json::to_writer_pretty(test_file, &xts); + let metrics = IgnoreMetrics {}; let (layout, commitments, dims, matrix) = par_build_commitments( BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default(), &metrics).unwrap(); diff --git a/primitives/types/Cargo.toml b/primitives/types/Cargo.toml index 55b4560a..f011034b 100644 --- a/primitives/types/Cargo.toml +++ b/primitives/types/Cargo.toml @@ -12,6 +12,7 @@ sp-core = { version = "7", default-features = false } # 3rd-parties derive_more = "0.99.17" +hex = { version = "0.4", optional = true, default-features = false, features = ["alloc", "serde"] } num-traits = { version = "0.2", default-features = false } serde = { version = "1.0", features = ["derive"], optional = true } thiserror-no-std = "2.0.2" @@ -23,6 +24,7 @@ test-case = "1.2.3" default = ["std"] std = [ "serde", + "hex", "sp-core/std", "parity-scale-codec/std", "scale-info/std", diff --git a/primitives/types/src/lib.rs b/primitives/types/src/lib.rs index d5b340bb..534da907 100644 --- a/primitives/types/src/lib.rs +++ b/primitives/types/src/lib.rs @@ -18,6 +18,7 @@ pub use get_app_id::*; #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct AppExtrinsic { pub app_id: AppId, + #[cfg_attr(feature = "std", serde(with = "hex"))] pub data: Vec, } #[cfg(feature = "substrate")] From cc358fc80406fac4f5cd3f2756561f90c02b521c Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Mon, 26 Jun 2023 19:17:57 +0200 Subject: [PATCH 61/87] Add benchmark --- kate/benches/reconstruct.data.json | 135 +++++++++++++++++++++++++++++ 1 file changed, 135 insertions(+) create mode 100644 kate/benches/reconstruct.data.json diff --git a/kate/benches/reconstruct.data.json b/kate/benches/reconstruct.data.json new file mode 100644 index 00000000..dfa6a80e --- /dev/null +++ b/kate/benches/reconstruct.data.json @@ -0,0 +1,135 @@ +[ + [ + { + "app_id": 2867178220, + "data": "94461b692904e3288a40893d750f842f0f1f72e2d1d305c0e53e20b04abcb9224aa4c91c1dc391f2702ef3a0aab4f3488945bd0005807499f3f3fc0b395b607ed35d2d4bc6ac8b9ab0ff5cb36f58fd2237144c312dbc658e11fec7990febe8ffe4373e33bcfb5189a690b11e473aeb6d57787ef6ea0909e7988d993e583f589b31e8da63fe014db6d1fcadc4a6e99b15d21cfa5ff00cd93d89224b7bfccf7cb44f9b727bf7994b849300a8a4254feab27c9fc3918e4206febe64daa2b5f715fc6763d4fc1ece9be8424ab1db4bb843d097f66568101e586e47b220cf61a0ec635e0cb4490abaa4fefbdad6588eb3e670d1037845257f98971e014f9079ce507f660bf27d25704908dfa2520a92dd06feca0d8737a7c774ceaa1ba9887ff398da09b21bd78fa8dc835a5731d4a4914eddef16209d14e319a809306b62180fbf8d6fa5662e4f1ab09a1efe358a9a88a52393b825120648af932ba1dcd2d47a0ccadb0ba96e10d04d02afaeeee7c332560ebe54f7697ffb9a405398cc489dcf4812771731e9a39b375b37a35bdec4180fd0647f0daaad1327a7f1f6053125a8d64956123fa22d1cc2528f595465b924ed14142e97e0a92c34fbcf76a199c2fe84efb4cc7de2f0024ed5b29b0b81e2786652a8fceac787b23054466151600b5ecc47abd930b80cc78f6abf2811f6f93d33600fe3bf22bf8087d3d39df459170a7e7c26e3f143531208b2702002937eee2b5acdc2bda278c23455b14b060b01a8b9aa57e8ba499f0a38d429872e7701bd8b1f8161aaeec6f46d5f9c996fd83053f9dd787a4586107204d5d0bcb8abfe043bdb5c01d3b8fd667e8d8fc6e8ea7a2e8eb2fa9879b9d2ddaad1bf8550c61f7ac853eb8e9b708eb8eff4cc7adfab147dc355d27ddb0ce3cf78106c871855e1f9cbb340ed0652e691ef657f5a19f2f3f710f668121dea55727497633773b5e0abd7acf97d313139be57ff556a728933b1fccf3203071ac494686343530ac8b5a31951a9ba86048870cbaf626417b8278e8382dabf680da5d8d9de5dfaf6ed54b321c794dca67c10bb0e7e4e9e4b5a2e2edb9f5b5ed5188e7694488b39da2e8a0266569dc08e6e06a68e698085326b6b456d89993e72bddcf522c1f70a1d986c54bbb8328893e56a7fb58ec162dc5b31fefb94c417ce6bda86125b6b0ef4d97fef83bfa38b901f8b7bdce5d0b27c841dba04a99b6b0d88a9d5ae387f193bf4a40e2b4f301f7e63195a1102ec9f5779c9cdac0bcdc0c04c318a848bb018903e225df771fe92bca9b592681f584b9cb484eb2bf6cfdcc616cd08e16ff306b67f09b18279f3ee8fb30bddff62251452482b25980a08c6fa1d8d3e0118204269323e61f43e513f14c6a46a638a1159abe7b1acacbfae6d057e7eebcb03562aba7460a66fa1c547e857b31faea87a6e028fec4d3f05550e5e7af60fbb6e793ecd9bcf85b36a6244995ec33a85d627d9fdf47f185d4ad6fc90af245c6ba5b74bd69e28d29cb311da691308e7a89888dd54b8f4e760c8b809ef1a821507ba26dbdc411af54fddd9d8dd36062fa7f39b4b8293188813d7d93f74a7eade8b8132ab6a393fe4a92ee3eeb1526a0dab793ba41e6e92d9" + }, + { + "app_id": 3120385832, + "data": "978c8270499be4097e6873e513e863f307dd58362e735f8c63c43636c8d301fbc007fb7ec140abc2f1db6d90410e2a3a8fc2bd58cf2aef3545144acb66231943f7b8647872aa2869010aa7ba6a018239b3357574df6f831d9f0f4e6f2859b6387ad5faf7a736d9a88d61495d55091fea38480e956e5df3704ce43a0502ca21584ee570c7cf350f51b613419af2da7ef852a04d3663cd44d1ef07d61f8179f42d999c82c30b87beff1859375401b432cb9627a3d66bfbf32394824a1eaefae7792d6332c711f2ef7cf00b5b0c4b95954f510b9fae330751974a01563c74e6917c2e24e74ee74e35aec557ceb8a69b1d4340efb86acaac7f16d5eeb76126b6af0a966ddfdfc81149ecea188f7d519b50c58a82c2bbdd7d88b9285374d7db22f310a8488f21b238f6aebc9eb1c868b3ab5ba7026cc0c4849e6eb2c64c0df6684ba3523243ca63e044691b0f3e7a02aa4e751fa7fd4e11b69b6b76f302f1a82f86a8bda291875a5f65cb13d60f31a177f68eebd7703fdd6debc299e4f03ca40d0fae82ebb62d644d63608229b831c1f484f8c3c104a141f67d895a95cb4270253021e502d096f9bec316ce7add2dc733ce8a936189e9ce58cf2b6f4165392e6fcf6e162f0813ba7d1f5cc68b033418881c804f2c1988946b141a85212093a01395baa62e611a04169d5c1e03817695edc2f66190da077ab5b6efe8f1a20a294ded7334a054b22f722a7ace2d7b2a111819eab01e06447f2e1567dcf4de32998b3743dd64307a967f9c4d510a4fdd858f0394d4d69d4b7438a7c429c9d1d88fd9bbdd11e88e2e3aa70daa971b933d05c2b7931e3ddcf602c89f55ae58e5a5fb8fc990b9a722a73bfd2b33bc215dd2de3920ce335193f51bdc69f6e1f2e627558a8bfc1acc3e4a4df7ecea489c43b6cbf747f975f9abfe82403ce9de4474baa06296fe71db533ca049d2fa0e29cd4dbe857520cc60209995e333a7ffed2cc8f5068a283d672394d4a5b1a06861a233727e8ca55abe385efa8265e10edb95ceff8b92860d6b9af584e0bcdebb019e471250d39e8dc741ea6aa9b041046102fe5ce71d732f3d86cd00199311964bfe0364533c2ee34bda9fad96d3965ab021bd4720542aa6992faf94745b37c03376e3b284c98911511cc487cfe6bb53223a0cdb45f615f8ca3f5354f2dc7124f139cbd5a0987a5085c9df8face25a209f10ae522ad32e3f286edf400288cf1b5dfe345e7e5d3a8fa9930e79921b9993b7e507537357a9749e3bf41c9121a34cf335d835f6a792a005be878b5e03f56dda628f5bcc20a79e4c0cf5991125274163fb87fd7aa9ab80ff8a232fa7b00f347b0d8d35dd95919bcb9448a755de2469da82fbd1189ab297b23e7e610a42de808dfe3985622f2aeab646f2467af690d3801272995573625fd2f579efa632a1c0f008ea17075e61a9018e9daf0bcbf5338e10217160cbf0f628c2255948cbfa5667fb8fd1895d5d354b8c09ce97a0658153c20136dfb04f71c2dc3fbcc60192428962552e637e5a8b2ed68d8c35ba11749f28f67bc99d83ae04d65c0b0117da75d170fd7c10d06bc86159322a2ccf129da67418ce4ecbd4b635c95941e9f9840fb124a0be6a94a4c1b7a20aeda0b5c23f1c6fa180eb3dacad9df04848a38ef687b4948e8787b2006eb6f3ee9ad666f9d3fc8953e3a94d44ce35ec66a9fb5489e5fe01b0016ffa1078ebe888ca282152155a31affaaf76f2" + } + ], + [ + { + "app_id": 3570869364, + "data": "e9fd48e7a698af62c4c88692b9bcc576b31b048b393d9eb36be9ca767b58a05fb486d05f15f803b0967418ca49aada021e29c52a3550b182645222d5118fc5fc80b30d9b496143c79f5ff67ca959dbee3624a8c6ecb0cf4352e01aeca6b4a738d9ec22c732ec9c7e74c3d0270e6cff9c30bcbbbb715c7b6c73991c3f6b5894526b49bfc9ca7db2cefc6c369c28f6ee213040f1787e2c2c9751e5c40b3c386ebb7c6268b9d6adf2f34df24ca03e8021b894463b1c1e93a47574b5c001fc9f281521f5ab905c9cefe593d30c16071f2b4fd27d3f204854da10a37a378fdb999e9cd97f22079a2ceb00a206c370e5df3e78e0a1b4ef9c5c1933817a4c9280826e35f1f7ae824482c4cec068d19ec80e735ddb94a6ccce117fd5853bc061e71ce04789b9235cf6314d05ad0a17ed2dfa3ce3d7bf1466a6a6b1fef798dc1408cf2c51eee6e5de0e4f12f105cf9b99d0285e48fbfa9330a0b3faa4cbd32829a1cf904400c77b47508dc68f43d9b7066dd41d0697b2dd6a5e2dfaffe17a6884fa58fc383f5e070cf6fe33afaf1762d4f0e204558444f51f6d71faedf49bc51f64744cfdac6dc2fbbc3dbaf7bbe7638b27bbd5f559d3a4fddcfd04cb5dc5ca16cd5dee85097d61f7421b9e86155b4571fc0d91a1a634e0f3b01e5fbca9de94873d8905bde6c54c619ad15d5aad3238db60093f43e383a028091c4c0d27b916ae6509461b51cf276787c04c28bba9718b2393a7e7c6b93cd89ff5c0d547df92bcdf0155ddea23fae8e78f040082986fdcf45ed7f4c1d29ad19d746e139919f8a30423224444225dcddbac71c012b21f791d42f31ef287d1cb7f9b6c44f4154875c2146830ab48a495a836cb9968b76fdd11feaf5a3dd979e7ffb780c6b9e018550a7532cbefcb65ce1dd0be2afda3782346cd0a239969805d541c5d329d5bcbbb3d3e0576940ffe7124c56331749432ad719f721eea19438cd3d8c4c3f9ef59d3e495293d061964e10d26edb44e94ab74fe6fdb0de1b51f7304d76fd9f77c1d50dc51d14a94d2eb6784de3692f61073c4725a2f" + }, + { + "app_id": 3698898287, + "data": "1e7edb5645169db65b5a4333d7e24a7d5b6506e775b18765b3edfb523f9c3a2cba797479cff44fa3f6e2332b1a473cf7f2a859aa404d3a777580d9d11f0566366b6247e417b6c57a9c1cab0e64f74411aa7539002e3af3c64c8af860778c6ee51926a3d377e1d342aed4c2e01d061de793d96a1b01d78a106d44c4d57b3db5228474c5059ea7ecf735aca67bde00d6b3e69d9b3cdddfa918c4772287ab357abac966e789ddf139d69cdca3807cd38819d7c253868669070a34bbbeac4b1ef92eba5974110695e8eb19c5e82f4d0dd9da0a06c7865566ab9864cadfed43edda9a2021f882fcbd84f71049cdb9cc2e420972c83ede19e9459b8adf268a721ab1f1c8664df331f729ec69fb2b79054401c414b5dc525e4dd981483225ebc1e31886711fd504f11ee09fe9e33ad513f28923476f42b7974945623d" + }, + { + "app_id": 4086489077, + "data": "a2227840b7b420b07595ce1508a118714f1e68ae67441f09ca2dc106f8b07dd537993827d3d269663330112b01e315c77bf001dd16eff454c9d05cd647acfda96c9a3fc8ab2ae041cf94b74193defa170889f467d994999252c35235e2fe55da08b9288f0d58baf742343cec985a5f8858c276bf3c831b7a999203667be28406e4dd791322e9d833c5da477085be2f1e8432cc063ed018184acf6613efcff7b2ff501005a54abf6afb0d9325c25771e09229c1338ae88506271ba9b7e448c3da7fddceb6b0f694cb3b39df2243dcaa727acd3024c5ca8d3e6ec25ea78377a4f48ed78886da41b59495b959ff3f333c0a8e265a1a2fd79a0e28af2d41b59abc632ff7c9a39fd64d53298e27506eb9022217af934b1c7c90ca2fd9de1912071f80b748d3f13d9f8f2bfaebe468775af842212c44010d0fa85f1d3202cd922c1e12b6568def37b71e66f4eb20d36de2a27ba398911a6e49bf2540c9d6055f87630711283d9a1b6debce7a1e4658251faf8763b9b1ff6a8604ddee4395e3b73afc720617c69d4906a0202e6bc0aafdab1737b205614e0b3fc65b0c068908ff4c890f24dc5ded60d1a73e5380998b171fb66a8be4cb808f6aeb98548263b5804335c630d40335da18072d34206ac845db8736016a2ccd4d0a6b4846eed735c55c9b8458ca40cd3b72ce1110f2701f10bf4b6c0337a0df239762184e02519e217780c9866749d29bae993681694f53ba170c0f7911acc850d3eb145f119d978026878d598357efb0122fdcc381eb54508d1deaa7c983bb7b1aadce3eb0e6d5a9e1c0eea7e39190f52050288853f61b618445281af9c93c0912db987210ef2e156ad136fd3d5e54af173f76aeb1f65e8e69d4e923a06b1b6b40f89104cc06c4e9e27266ecc1df1964cde62c1e465a2103b556e2e977574a68a8a96106b196d6b21d278f91ea56810ffe5759ad1f5e174756872fe96408d4b765dd9609c91f86916d07095d9ddb631c41846948a779ffeefb39cb290da757edbbec73b28f86086e25b7b46ad428138ef5021201c4cf0ec57e4fbc8031e79ca60fda331e077fb015be6ebb382a098f85fdf15867a8a85ca896cd23a90ca7a7650cafd46a3089e8edfb22ea890551fba73205de8d07646995e6342d0ff911edae53ec050523f6bd6e080b76fa483d57ed75e825b2fa669bcb68394a5a3ca21a426ab2ca241beaae7f1d816b2079f60062ef2141cc86ba23edb39e19df5e2292a97e4a291c2580e1d8ec714d1243beda5fee6c46daac4d64ff9addba07a3268848ae90283e9ec10b21776aa261ea6f562a5bc69aebc01ba869d7b6b20ff86e2bb76ea29902c8725f5e5bb56929766798622671d4e9a5306e948848b701b4bef2d5b9a7a7ab88a831fc3ded843149c7c802481b84388aa6f78ff9923f5caa3b0ca9d157b9a72abae3150d3083e4291b81671d2379bde07865244aeb0023de0bc2845ba6c090880720c1d272e63aebeb5db8695840cd26c979797d8fe6d859eb7a37bf7e362a79a31f44321cb2d6f3d9c44109791deb40fa4a480ea0a1813b53525f24ad26193b6fc632f51729470de100ba90d27bfc5e05997a86be4bdbc03416d2ee4c4aaf9acfe0095a8d165bb799f479d2ac0d22ba211f6c1f61f99d26d333843baa7975dba9247f682bb2e204a16acfea127f359050818df01ba7f7619440406c0a4c8645421de5eb0ea9ebe70f96bd2efcb1218aa300d2ed7fe1c53dcfbab1619959b8d6a5810de8c5f30ebe1260a3607c2e3b249a97fd464edce42e0719a6396efe7b591eacfd76b1425de327882556b7fba82dec0b2ef6b1181ec13e4acbd007bdb4f17f2b0249b7cd660b00352f464a3cfce1a34c56a0602a9761f30d550a135a5326ddf1effe19b1587201f2bf7e3a132ccd16280695c5683b55703b1f8a4ce89897d9f9549c5bf53509cf4d6a43aabce807be0469a8c07125190db84f7f740f26438519bba0e05fbd6c987a42cade13977632d9d831a3a0fd04c3b588310e4db8599019573a2d724927154dacb3179178a9417f1b04e53192f587421334b31341cd77bbb2d6ff50e1b65d4d57b869cf8abda3764462a52b705a14918737e93f69a17433dec67bea6d1ecb91645ded5e5a1750a4ab395f4430a432538f38c9e50941c8000a2951727d9a523a97310bafd77af695fca81a2b72fda797537a9d4cde474faccb009a19c3a4ba15be4a1b9d49029abb94c30b20cac72e00abc2e42982f432b18622ebd67b13d9e1d92d3ceb50305bc3c67b896306a433a9457921c85f09b91e38cd1c50d03670c39a8c8c924848505e10202e6d4070875bc31a6a19c07c056520425b6d0fdc8f828a004e57dee437a21cca505970bb7468418309c8caa63554cfa7feca46878ae60b3495b8eb0453302295561de840dc0aa9b6fa9136494a0243c995fd571cf4ae1e30d37b8a360faee63eea920186b297078ebc58e65b6d62977787da91c2ef99bcbc3f50abd680f9d712b5472783d9be572c6b49317046eb6e6f8fad8dc9755004e17b4c6fc5c752e27f46a863508c0a2008462436e3fca4a144e04d5a32132c2e99bb49aada2b8fe74af15060724e5005c26d76faee8be01a05cc5f986888c1e21b3d70e146abad8d9478112250cfbd91f7675ab7c10f1ec7f1c3e625d75ba8047cb73e534e4a53b09ab08cbe3b1cdeaf6a714da5657117a9e666cea5065fcf0a24f046e12b53b2fe206decfde30be5219b6c6e38354f31b78070fe01044a70d052b7522bacb070d3e4262a7fa3f7321797792286870e0b0e806ee9d4eabd92e0ed9193b7247a5c0bbb025435207ed100e26091e76fcc809d1efc5a765b2154d1d70fdbb5c08805bb200e047a" + } + ], + + [ + { + "app_id": 912747417, + "data": "5cfa2b9e618e7ae0cfb2af0e6ee9f33677e3b4e513efab8a7bb4c9befac1953d72f41198959e47a1b199a5aa213baeaf070fa517a92611a9e7ead08295c243f025ceb58da6678af48eb5743d6c3964247f2028e4ab0485df1c12a897fb98c411dba7414010b2f14ebbb16882bee720df5e55f73b628d7855d008c21e7788e289bfd99c3427a849594038e02a2faea45bb31d902876bae22da1722262c1536f6d42aa839e29ebcba7c6835c0bb47d7651e1495bd62c3f01d93c23778146f33feb8f0660623141e2212b2d15a3195410654f803f0c765378968aa111e70d2febc426bb14290188a67d5733c1630da41d932f48917b5e3e3ce05370d2de8ca28e8b970e9bffef2f135f0195d9f51220c3a6ce5bfbaeedb9f5b8f14686bbc88249329e258ce2dfc4375d906496df835edb26e65be0fcfd93f0146116c5b555b5ffb6009407841e9880fb95303f08bef10b4c98a1d592979f00fd0ef451583a4a3c60a9f2e1b4c1084fec2ed38fa2a06ff65dc736d9037b99143ce264511c9d2b9e16190f5ec05c49c4ed2b31d8037564f91b876123a0a6fa4cf2f29769e790764c97027ddf51eeee803744e5a2597f317f4241e476ff642269f8e32d76ab610cf52a7b4b3970259d5f93a0f5c8c2a1c38a4bf74517d56efa2247256a4170b1dca0514e84ff7df496aeef0ca2823f7bfdad2346ab6cdd17121ad005de6a7e8cd19d6d02311f1bfe7d085ff468e92deaf5d8ac27b5458a6205e2e2d899334578be9aeb2d8597bde5c4d4111082996c3b95759a681d45143ac3ffd88cca866d60a2b0301dc4636733cd0ca21eb01c52b99792397f69bad37ec84da9f3cd00cc6f9489c6b60cc53aeb20475ce1d9a58aeb1bd5471203e2810d386edcd04a40f1dac82ed5104bdeb4c30df5b8a459d5ba00ff90ac9e05b487f7eba36860d6060a0d86253feb044e830c94d16bb0f6d4c153bacb3d541ac17f4d9d8bd46d6e367158d700e5e76d5c1f08c969b4c4d1ddc8025169f02c2412fb46452d3d729df59a4843b397e0c6d8421f87dbc4a065c2bc4012662b8e24e4c60087545f2e08a6029c07f9f579cff6d127d3d08a7b9bbb92e2392a92cdcd353958b148734cb209d1470bcf6fc01f97fa076acab2f698477d058b8048a1ae15b0afb89d7be071a9d2f636a1c65337ca5f603e6ec4c115f59266d12ecc8a6e6df351cb74d93b2479cbb5e2f0a9e42bf8df7bce0d4a8986328abf7792a141d160a177e5d9c9850d041225565bab3874675e578483ea12be82281d93fb8a879edf9f4000c3d682ad7bc123d3b7579b" + }, + { + "app_id": 1380428063, + "data": "7b4dccd49bb5dec165febc1915df957be3392886b2f8bea481737a397c3fed7c3bc593d52caadc83396c80cb755d3c0aa302f1e433e92b394509c380114a58aa25e6e2d7b609ef13e6d31a208ef0f881ced3cb0541e423cf4c401d1736bc31d978b52d242ebc906d59c996a91fd6916e095f11bea2adeb6b5c6ecc59d6e82e524376d7a9132cad915ba56ae17c854aa83282e33e726a03012843c943f82ec55180a54b6de9bf2e43c0eb6f388e9faab5d18d81aab1c5ed87f30467be76098a70570148f8ac020379e67521d02fae3b2a06bc22a422c21cb67aa675885373cf4c7bd2024378ee7a7329694048bd55fda1271d41bbff666dda6696b2ec823b43227184438a31adc2cb118e2c9ed150d64accd2efdc762bc359cda610aa55d9409f2fe910a1bb688b17e6e59d6f3743c6e0cc9c866f5b6748f8fd5e75517fca70ee4019ed04e0c6c11f678c61bc17d0588ac6fd1b5e9bafbf29ce8ffa20b1f39426123ca6603cc5dafda0dd9c27453ac026256368e6cce227dc55b58417939319436a1d9073c1b7116883c671503112f3309bcfac9cfbb3ae6f274ca24bf209721d4124e343f02cc8770affc7af5deb6fcff415a8ffd4b183386461b332195a08e3756687ef8cdad75f96277129fbf0d15ec574e75323adaeb7fa0bd0deab80163d21e5587a6a81f59ada826497b6ae20861304c8e3b3b1088bf07923515b37d1423d57e72f3b777d0a91259587e529b8c2b791089b3b31922c721f096b85f086d01448f0550f0fe7395dd8bb0c634e8210ac61f245dafd118f98838746c1dc744a4d957342afdb338191f14cd560ea41ebc8edfeac5761a9470772328666d8b744cd15bd47d297859dd8568cf56e8e4ed89a1c7b3361f704ae97df0990573770bb6a2a2ab04f89a34a24f7aab7036c2a2925249f7ca4900a55a3bd023be33c58b4220179bc87ce17855c9d70c3e39da0a59c36fcae1c49bd8ccd31fdd544c20ddb036c8cb9a1beb69228040f692dab92d40d8ea615c30cd44ad9b81b3ca9c3464dcf1f691fc44de5a4481366120dfd35936a1617b2ed92a6494cf4ca18a81cc99268d98dec85e0516df4e3dc8203371f8df4f38e037fd6433dbe10bac3efa09af4c478d5a2046869427caa1b058d64486bcffa4cf6cdd160174f46d35a31cf123a887c2b9c5c630cfd7d04fe6fb176e66b1eea16ac61421d274f7b6299b761361129fdbebfa9de6ae542de0dde62608d7f9954af8a5de5c33c4af138829183e64c2f1841caf3ef1b14d100f61742d12c46f667344a88cd46ee303fcfb5352e974dc7b9649e6a1f3419124c9b319cd847dc14f21f850e47fc5d27f6b1a42fbf188c151a237833961f42d5f90a642dffe327a0ca28fe73aaaddd538a4ec2a20e096e65c7959f3e10b8d7ec44a0125642c7b206b4f65c803d793962fcccf6e507b77c064bf7bc248e63333d698427d060f9a58eeafb579ba0382faa00db1be78a4cc4d508d51df1324861294f0bb1114b5995965f36a776a291a1e418aa12250664e8e69e0eea73fe73bd0912b357aeca72274a257487b250948f74a5ec120cc758864aa8962ba983f53730149df35db56e03f69e28816241c35a37eb7260b278133bee303b36f1c07d1bdc91f81e040cfd4d68987e3a702836eb6987cc1e23c260b7bd39f0431490fca7af8f0ef8d9d65bec10d24ef8144fbe95d4efe200205f314e460edd99495cabccd70bc4e44e983f0c73a013b667346c77eab2f309e5c580445c3db0dbeb8e49dede41c0fe0179fe1af106ede3e67e020424e48debd72a1fb95c79339e7db75fe8bc6270be72c869c6fe2be4f0caebfd6b685833572256dd7c070a4f1f06b33285de122cce7b1ef008001ac91bc102a941163041c70ae936a4f6b30a42549a73b8a1a1c69735d4ad652ef059dd3db665370e98d9707c6850ae86608b1ebeb9df6a62db29b18244ae5d8601b935890b02c8dfcec2525990e3c47c06720dd95d05ca3544ea4c16e4fcf21010edb9c06e3f953bca4aded0145da40dc0381a6477efaca487eb761081cccf5a8d06ae0962b7184937b49674fc95b146ce53d63deb23ee5e895b2c6c95127dab30934e77d8bef399a9d7c925e97c71ff963e9441b7e28b683c31a7d39fa62790e7e9dcd4d9e4a7fe48ee46e7e4edfdaf91013299bed0585906f2089b3fb4b8d2de4d86c56a886a26a0db91d9bd2c3fc6bd34ef5e80ba77a41b3c1b094848bf97cf1a84fa8a3308840f8f353af16c67a0397ceae0c49d38d07e80130e94e33bad83739accc8bb94689a02b82fcfd4cd588d81877eb3168a7c6f15f0a72912a8bdfaaa5ced46674cea37312f51b469dbff9f1de68b54eeb683fee40d49e293a7d3d209bec81cd5dabc8f0016f2199ef82817fd259c4381fbece2a95d4394b66803b275d14a0456de141825b8f41418c431dbcdd0d2814afbfd860a3398eaa2a5ecc389ddc4e15bdf437ffe25b9653d06179a887db739026bcd4c24892de19bf665a3352e405363ea70b1900fa5ccc38faed147ecec15ac0d49a0b289cb4ad0f64901f72bf75abe70fb59785095159f685981f5b935698c0260f977d17ebe8f30cbdc56d94f901b" + }, + { + "app_id": 1432371538, + "data": "42fcccf25c2d1dcefe12c2eb00b918ad31407c3e12d182bd133a254015a9fdd10c2cac3edfde06aacb7e5e4394e6eca9444679e615e048a887032618a9048a0c97b8b0c0fb0b6ba09e7f0d32fedeb15bc08e9f3e3001d83cb199d9214d3622011d0ae03bbd5ea66cf9559cc2b1210a0a5a92383cd97764f82a388eec8f3c481b50343c145bfe7bee56c91ec18aa712375bb68dcf074abde3c0d4d3d5db54ac28c1c46d16090ff3e45d304d0c9bdd2ad1011588fcfd5dbeb343e6f6e4ef9e4cd66ac94857d15f782dc215d5e54084ec11c2ea6205c9db736d46568a32dc77abc83a9677bf9b77eaefcc6dd5176208a20fd49c5c6bb080d3905be31ffeaddcfe2c6a69d18a938eb4639950ec19ba0c18507e8312f99ebb6715d0ff5cd81ab84bc7c602f51907cf8515af05ef3f058eee2983a2709e792b210e86311ced80d4f197f7799ce6179387ea240b915f9f3c83549d64227c929c272fc315c61c6ae42fb17103a8e83a42968d97735e425379204002a6756c42f2cf7b15ec4f47804cb159aac93fc442e6f3ef49df245bdf242e90b746b63b031618e26394df9598bc9be24a9207be2fcd92cea283660b68479563622245c5ca25413db246134885631be68a29ac83cdb85b133148eee299289518669c4f02a35240483294fe12faec9f89749d92fd70d203b485168403566dd356ebaad4943f7add5688138e6bec3da426d387ea8e31e0af41927649bed1933d8d6c1c9a0498f8690967481b9a91906721b29ae49a13119825d1871cc76380b7bbaaf88a786bfe7b86003598898e86d76bc02df9a8d8924f7315f816d8bd6c53934a30365182c5ddad438e8369abf12acb95f5d9195b958e464b9d895966f9cd134e7fc88a83fd871740fda0711975a2a5b508e4e68e0bda2d066e7817ab85791703be37e93dd1fd3d26418f64b314f49745706a85e967725dee51fe836ed76ed9324e5ef8c4e2c5b9cd47954cf0321ed056f644e79b9ac85d13443e925201d8b4c31b587372745b2516c58ff69f5b3ce6e9fbe480ba3903d5caa78932e108f8d76b6e579ab45244e46d5bb4c95a24a53e3b14bcaccf57a7163b94d6a0fa570e0de957c08668899bf74c0dbdb85e09c1331fbabdb310017c3d4eeb51ec0bf6afbdc86f8bf05efe4f6e751549894618d434333756735b94b02823f6c2ae10" + }, + { + "app_id": 1892003797, + "data": "278917d5da02c127e0f2f2e85fc4f66284117cd946a837861bda5c3c92534cc64ce4055f54362f48c407cf31c2d9ab05cd77f4d91f45b74ac4b510b4025a19d3205a3d34aea51c0d59fdcd7c45d67ea5bbcc6cb4d0a7223eabfe7a64e4fcafcb6de7b03d0f5581355d54364f50e6d56cec51fb8c6219422392bcc69925eaf1dc5ab5a9be8d34892c0ba9ee648705bd33d7af403fc34a883f96df7157022a56c0fb49b207cec2e1d754c61f2fafb87697b6561d77439dd50a79b657f6f4ac4fb9f954ebd1284d692aea8ffa5021b3691bac24e905c1fd83e5e4bc8ef84a1a44c8090103f97cf44484056d76c5335ac95b73944e0aab83900771a4f162b11c1392993bdc7996dfa133fd194f443143802be3304cfff6c6b85452f39225e9556d5aa5d28b397110dec0e8be3720a395f35daa526e7cdcec1b1075a0946f011ca197c136ae84d768863852804a0ecb501030f229b38920079d22b945b8e4a03fb1155f5445b207ba4f5a1b1868367e1002a9733abf56b9bc7371ae03f0102defc16d5930df4f79236e04a21f8d9bc6d8a2b9b2ac4ead3b18fa6054638520a5a5b8464a39a890b4483c9fb0f7a90ec1680364297e436fca55442b123947d8849170b7a161922a053caf3d6fcd015d9d9447dd146e9f69f97ed5cfcf19d8be240f9c43356ac7933e32ea99bae100bbf11e2fa20bfffed312f41f5fa6f77f7c0c08b3ff3e7266e3ad4bfe3a61cf36e77aafb16b6f9a8aa0db2d0a390a01c119d05142ae50d24399ced2411514db0123ec6b49c612c82bd3c3936bf98fc0af121da9d1c4d463dba46d708960ed6bbb7670fd72689d41c982fe812a540f08c0c08522ed0cdd2116e3da3a67c48d35bfb6b0a5cbf8bcbbbf66d36cd21e7186e0ea154552fbe6b83743d5f4befeb4766d5a8eaea800085fe0d388ac835c6323a2928a33cb403b94e9ac70ef213307f8b9abf1e395a444b42a9ee6914e99fe8e41d9cb621c894ce6b334520403079731f7657c0d61c030b1f6b71b90398f36d56ce8783eef17d3b46dd9fdea0dba83ca4937c3e777cfd18109ab5708bbe6f8e53adce4b0837a19abab1abcc98f127e21cc9a72288bf4234196061c3be4406bd44554c7235184d5c61cb949b174604c1485fb306106e3399120b11d1841bbc9dcf30d5fd13fb27d8242d305bb1cb07dde87a42ac09b3d8e4c9326b02d388fa88600743cee6c5c8a977bb7d3c5bcfe325e481f138ae8465848c28b11d3c4b6e2a301d03c4fa94c67386cb1633f1db81f0d7113cc4d086af904d606cb35165968533944d63873ea90bebd045bdc00b81c1612c154b96da3183854fed595f1e0b2317e7a76ac20569aa0d5e15dc63ae652d0997536a39fc91d04a4cf3dffe15a5db008b32c7b4b1ccd8af252c681e9da330233ed5146b25c0c38bf5d76c63faa42c0dbb3a75c41b0f1c1c75c7d28bdcd5e2a53161ed48dc48da965e7efea9c923e9295c8a7ed2a62035310851ad87b2feb30e8a435341d60c7d253db83294f368167fa352333" + }, + { + "app_id": 2008533748, + "data": "b1faa5a0adcdddadb9b67466a2b20eb2176a22116b108e1428b03c63e9a6e2cf8954aca0ce0c2307f499974a9c688db73751a2ea43d27a480b79d0b8d6da977034d61655f9621336f7bdc0a2c2bb2937bc172176fbc7c04f28fa80645210e0a8e06d1a0cb6fbdd3169ebf5c28d252ef4633026de48d48aae7405986f4d63ea1bd50e250c6ff19a825101a1cdd2bf4e4910104e19aacce296741fb5c5c5525c42e18e12fc16d182d0e2ee47fbb847d2b28b15c8e1018213d080c209e67332291ad625bed0a226acbf8b43e91df995661d289284c5757cd85d36f99c4284a9338e1b09677a61d3c7ecc2e35354ebd834c52612f14accfa55159c5523c9ed336a45659c818c2babd6edc26b1f2c23ec7a6890847503b1b30b5774ec1cd654f10d6278dafb9004f1d2ad0cd6aa59d55542cdc41c3a333205c0e2bf0fec1e88c01c49febdd958b35aa9c489aebd36f9142f5eacb52da5567109f86ffae1259d5721228fccc7b2ce85db68a56cc500909ba3539bace80e62f1e8d8e623e8e4c0476e33b25982e7d738b6f4057fd7eb9f0244e66cdc69c969b8024bc9c2dd787b375e77abc63106a082412f54e3888d55d06a05df3b72b54c0740dd49a67b72ea8a71f65afeab0caf9aa12cf1e45cb6caba12ac23f9af2fc2e3dc650012d4acdf04f88d5a73a9b674a10a759b4b1221bc4bade26af66cef78edf53cba127db3ed91ee1e4b7bdccf64c4f786f09c6a967c4a1abea6ec8561af72248bd4b11f357d694c94bcfb0a8c032019cf6ee3e59f9a6713ffe772f06138df626f59843f91f6cdaa97e0ea05fe1b98b06248021d5e43c41223da46489efca630c9001d23c1cf0b6346a3c982914c6ef588920f6712817ee262f66037ec84601f03336cc98cdf0f42f82f82459e0969c9a1cc3648ad3f5cd79d5a6613e9669cf8a819a6248025674f0fae6d4cd0015bb2edb586885a22310f11d78767ea98320f50ea95edfc9ea52225362c98d1f55ca6f1a7462bf622bd2360882830c7a3789a191e3f5e75ad3c11ebb7fa0c48c5b3a99b399906b78d8ee3305280de2c749d86658cc5fc9ad75383da78f7993c41526f9954b69f84852b333fee6c1e565e2cfce9fc604be1b27f6587f791f786fb56adf6a50eeab7de228e47c009f8b17308eed67dbc2174fe8b583c60fa420c364333e7d632403207e8adc53b1a0504011032ed7684f34fc330a6a05d249facd978035eb3fca09e138ca83f3f77fdb80a28583f9889879c0a81080386df075faa633ec5f4dd56a5f31c7ab1d159d1ef3f31ca9a6be3f11f793ab5e6e9cf9f2eaf393236115684faad9d1c78aee5b8c0007bc181a25e847cb5e2a6d2ea91a5d091c765eb7a6804fcb05fa7c96f5433a22b070c6f67080738f48d28eea7b705270e2c6b0dbc00826995c77dbffc2632169c1b4c41027b642cdd34d3699c5b2222405dbe276cec437be68eb097054029fc541d6d10803b53d5a5ca0bea0afd5f4e86dd4b82c5172954bca2d6e252c70e569084ad1bff86ce062f7e5cde981c6a95cc" + }, + { + "app_id": 2085427659, + "data": "b423cb3a0881e77e1bc7a4498a8a012b58437455e04f597b44fe4050eb42cc12d698fb1c8da1a88cd21597787da1140275a4604445f7cfa746885fdfea22d797b4a7d6260a707df7706f948f9da6ea4552d8bdc86d09ee2a4e2d8f2f5d6f524fde92b42451aa428a151cd69a035b84ec12113f2f9929b073b87898a0d9adc7a86a83cc46c35a154ce4171665430a55d660b79c550ff66766756a1c7bf368794814dd02d40e59771e1ff9c3d59f1c3c09e760a8ddf90300695ffabae00ab566fdeb538db919422251cd84bb919b8644d53e511e254e980cd7fa74b0838e9bb2d78d8769463be4e1753c8fb086bd14f4234baa56f6a416be3e088dc011a07827c81eb658df8c3daac3a505cae3f5c7e343e67c857c583faa8ef169246ccc1bf4d19eedeacb3a5f6396b07e4a986e77070771fb29ea73923891d7b067ce484ec88c02bd041525b028210f125f314d0472874fcd7ebff343ecfacbc1abb420385595ccc37968215010af35dce339391233d3a638ff8d592b6fc361899955691715535c3e8b82a2d4ab7320bf69e30841a0e7f5f41fcce7abe0f9b1ba151e2499e796170dcd524b3d81accc6f2b3e4bcfbb6d6e7e6e2c0013d56bbe1f688c9a4f05af430f201a28e949e98ff0e5a8f8c4a775b220860cac1f42517e3d706252ce4af1f86d54abbfd705cbc058683a4c3f3f162e4d879c578f3a0a5f9294cb75592a5a2697aa22cfab11c0775472e01a5c718d9e8f5e76d24980c041403dd66afb8ea589a9a4a18f9d8bea3b5ae07291b0c192318d78b6977afbb252e68ef1c23804ebb312c8876d1bfbf79ffb8afbce4da13791c2d0d2977f5bc415a381c6e41b06c033c2d61a43291aa41897ad2d27c1b7d7bc93bff7dc64df62ddcca892014511ee80979c5ad38de7a5607eafad746edeee2d9b22c005bf4b19be8cdb0ca9b1ef8d16a0c8b5f77cd9632f0f7d66a90051a87feb589b9cbbae18e9b15f3fed8d13d81860592f1bed54b4c5b25bfaf9a7a5c8466933ad30ae3b64aedb97b657afded76018d7b5a20509dbef416a3deae359d04a62912d1d6418da3e5a8515de5009bb9a2827d8219f8e09604a5e35fee7086a40bc34a01adbe48a05f1d8725669a0364e5acecd127a5871508bf8fa493364d14a5b5a1c0e20c34bb925adc99810d31a5c6fe5b82033502361dcae4675b59eb3da3e89c26db383bb383693994007464569a3aa7cb480cbe077129efc43f4934433aab11cd859d9dbcdcf5d5d33089194e7db69c6b0bf1a9699f60c0ef26757a474caaa95dd198dff9ad142bcb9df3c44fd924b4fc50e20b6aae359821a4e58f55601dc56ced35d272af769e4a20afb8e6f1720ffd2f8786ae72d013b2b3996cb4a72d9d17bb7097eb0ec173ade8c002c9d9adead8eaad18e80783049634b7ea7d6c7e7272846b7ce7baa206b2494cd9ea96cbf66ccfe980ea9ca5650aed5b5bcffb906f1a3a0fb32126d48563d6335bc13f2dc8d38926c5f12cc9a89d7870527d7382a8730075b564a4d195b33835578fb9e4601644912e88228365d6384012dfa3876b652685602d8c0e7f7fba4156a5c8132b902b2049bd53fcf9330c86bdcd2174c5ef3ce9c3363a1964510febd003b4a1c67b6dd21ef6915fe758ef8f59f663af837976a184230542935440a4b64703c7d9b28bc80f815dc9d79f2f8953cf1e82d6e5d31" + }, + { + "app_id": 2231607266, + "data": "cb36a6d8bfbf94e77e6bcb7b4244f36761b0b9314f598b2f68f32f7c1bd3372e09ea2bccba7edbe0901a7df5aa11373cd90cf282a5caf17521e144b17fd519287e86fbf8717fb40930fc1cf8da43d7433037483e884b691b1ad73085b751fe5c95f050fd0116493d6455c556ea0daee80d9ee23d51e023e6a2769df8b84c29cd36caf01631c65bce632f374b015e805b3c91f4c945cd7ed8ed3c848d4216b6022df447240c08ae5351bb8ef5ea557303c20be5159dc7e2ea96ea1a29d441c7b0efdf2f0dc6d701a2ea089652fb1501d857f99b88465f1f487820833ccbd48983e687e4c17e6a92aa90b7698ee1693ee5341f5763b2a588071f625d38f57bfbde989bde531404fe2ed8dd98a4573b4040a3d647dccbbacc2bbb34ed058072112fceb2e26995c7b805d969aada8d6d6527e97dd2b5be4b4c5ffa8fb8e0d88c82f2561985fd5141b8391266e87b39fe8ab0a4b1e762c5a28d23896128d2949c7c57ab849758089df909bb3a9dc43393d55ed074b4a34b5e05f87fe1c309722da96d8de725ac09af0f54b949c7ec8010d70bd9c8e6062d764b6519b9a5487947fbb29ecc8629eaf9616ce007c5739138e321edaff310ab36e69d3caac11d152ff18494ed02b455b9010b19bba14e9b811f8c8dc36de6bad6514cc33cfbb6aa85889c3d40d87a7a796f37d0eb4abf570db6379f46b1c8aa23aeb0504a7bafa06926c1e0d88c0d19703ac6a9baaa91556a291ca842082d8ad35be4008bf08c0e9240bea67f3f7a1f04854b9d09a9c33207fa8d2ad4c1d656a7679f9b3add654076791aec99cc96bf6ac1dd816a6d3f08f2f27bd993bf843ced0dfe4d9ca56c13b0b9d6e6c31c6a20a4c3012fbcaa6d016a22148f0b8a65d783e77f6963f91aff4d00803a420ac9263b121843a5a13ce30235addfcf62848d95279c150a6aa2e20280b4f213d536c8daa57b5b0b8336e82aea38265cf20254446397ebfd4889d2af263bd808370a1d6ca50e5112292e57147cd8f3efd0b6396ebbf537b497fddeb14cce474dff57c903bfd16882ac503d5ab31bc03081fdf58b5e0936699d0b2779c0781aadee4c50289364e3f75f8cf1c91888e05f62cc32b328ba95b0206c7ad2249f38a9fea7acacbf8c3ccdec2b069ac2ba96b624456f1f346f85c94b91a784355986bd327fa304b25839cc2f503d3909c86622f2bf75a0a01c52cec89359d339c82fc423cfb835a6a48338af0d9af723cd07c3ceb8ad13a5ebcb3d50206a4d6b63624a20bae6a641da1b0d526413e6ed900b50d2cb11a4cf69df0fefc1b9d3c1a90852e2d5c812cfa97e30185a6d14ef5704e985ef6046ded518125941d5b29c14e1854a4d6930aa59ce059d1dc6d91d47f5bb9bb9300d5d1807bf35fddce1943941b4508d397ab74a90025a9d6d7a4ad834aa79f320f47c19016e92a87f6a6899458912d032b9103117501a1180a0140047b0a8012e3f63724d0ffeec9111a7cd86c4c34c604b4ada056b2a2aa1ec1afeadc7018b9fe3199379570b57bc74df9ea74f1605a4f5f1f03bad061e8c0f138d2a901da7e1c3066af53b0f6887180d8063d8b9e7d0f934fb5ff66da3f1db60d135aae045d161c7e415aadea944e6749f962fc01e6159b6c871ce6821ab857224ac19c494cc8e416d3b2eb69f765e05958fe0e465dabe0455a352bdc4f19ea76894a70943830285555de6584af15dfa4e456359856b65ccf363fa3662a06f4e464136cb9f7e03c6472e6da8d47dd0f66eed08427e1bb0ad93902643573c30669f4d2c2b1b5b01947144a4285936db4f3649ddb6369420147e1d6b0218356d4e21cb2372eaee4c72a014525ca8ce551a83c419db081bb02d9cd914d2729d0978212dd8f1ad3953a870dec7f9065b551443e5cce33fbeac106fb0203eb9de067f8bcc7185a655a00b3fba88e32f722e42ea47415cc49341346d279aee8d437d837cba2baba3d4e76ea3e10fd13c41f1ba34" + }, + { + "app_id": 3255057126, + "data": "41ddeb850e4f9faf47c0fd5e82f7c299964c1b155f6267263d2d0cdc90064ae55032039a78dd9a155294e4121522c57c9100a96c2d6d28950bd3bf9c0d42b0f843ae2ca9e6b23ffec85a03ccab7826ccc099faf5353b844aee914835c7d02bd2ad65e16e741030b724e5975cf566eac98954a85f2f0e063f93190bcb2f9f638a5c1146a68fe971bee1d07a63efabaed01d415b683a5d03c1019a546ccfad298058d4ed6e930c03242d543d30cfdc6b6c41228bf227246a63080bd43ee0cf6611dfd0d00576b4de146f3a1534c7549623c2b308880870954f577694fa83c0d2be54ff690a59e26971e33e796181108409fc93a7c748df8c715f2b235a83d77f29f1504327bf9ab531a60535f8d8def0d9e0ad80b730791f3c599c56f385e991282fce7ca0de5da6c1955e3dc835b22cb30b5f26b0cd80947b8027bad457d3d997f593f34ef3d0d1f741ce486a02b402d29e4dcba69629425ad2cbfc2fe9d49fbe33bead46aa4ac1abe8946bc0c3432f1eff098f83ead1e07a8d6e4b9540d93e44e85a9db2ec703f05b95ccadd35b2850fbb31879d54ec18b9" + }, + { + "app_id": 3506639155, + "data": "c892f9474e222d50d8333e51fb08c90ce69aa46a574ed5a49e1e4321b7db794cc1190bacd49770dac07b6d452e516e833797edf591c5241279ddef33fb7d7a7f43e418db9f1d53b3878f70835cdd83945b15308dd2522c896d51c88d8ea3a203068fe4e7ffacc30acb5e1c6cde84306f4a30d645375210faf3293559a51871e4ad56a6c00571a2f264b96531d4b3151b8b2976e0d5cb77669a9f1b69f08381c6277d3144f3af1ddf1b2100ffb00fb953a7981e1a6f853f9398eb4a3c0db0b313548e0de4be72f8f876dcd893aa567073f368fba508467081cd739b53f6b381ba0360d88576c0c72ef0ddf355b4112b4834f60932ebc41e083ece51cd11ec16fb127bd06f39a6335b19c9b1d98ce10ce619c7253229fbe34c9f24969beb3987a15811541fed99b5b862d38fc4976f5667cf1550f036c156938d2f6056008eb2defe3bb1fee5209b18cb3f66939bea56965cd86faec4dea9e95a1da01a6fcce6c084b0390c4b0df3ddc410f509dceab6da4264d2c86a024269bca8e4e620a3b907f7ccb2ef624afd5da3b0cc75152e47ad0e05dc206bd602d454323df624affcad72f8b50a67a11bf755c6f23e2af76b63e6b5d34665f481c14ef9f584d8abfefa49955b262984e0fb2d4da8d56a18f7ed57027f68c4abdc8d88a8c90af99281cfed40bb9381780c70626f1e89681401e1b66cdc4c23514f4cbcc2f43e7af5eb61ee537e58205ec20c480d0061fa19d34a8920b142ef7d8b83515fae7570f3e2a4aca0038a5c507fdd48ecd009df1477a7cdc74ebaca7e04ce62654dce38e23f2708e1b8f008578d9064933ec27c5fbc08a6d2baea8e09ac5bed5cb91c137a7186ece4fe0b94ec5bafbe8789b6d358fcd273131bd2f0d5738b133a5f409d515334e2782439f944428f2d729db223ae952ed8134dc4e1c3ad65151e1fffa7e0db78c24dbba3509404a2b58791ec47e5dfb48e21a21ff333d65b24f083c10317bca7a1ac57ad9a47606483140d5941e726028074b6068994ad2654022eb60793999a813eba274daab9bc8376c0e6fcd4fb625496b3462e86b00dc62dbd69ce10c4f15c1e812ce8bb083ff402fa173d3ed384becf2449379bd535d5ea43d616ca2a068af4352f4051b5997a5c839f09172a90955452ee316b3e3df717c6457b32824778bf35c725380014363d082f72dbd53b9bceae659e6588bd0deafdf8347fc73bd83d57328a89b258b29345d8c08c4e1392c33810562eaf328260ef78dc0c513506f226a3f6ab94f428d82230b862b5e571083db9c2400678087f36f88ac38563af3e77656435ca3f4b4eceddde0ed8f4b6d0097264895783ac25edcc93f84f21ce3dca27da11fda51636fe2f43eed3f44f725aae577c0c6906986ec58f6a3fd5c16777c574efc1ff4b276cbbe7a55abbdb6b9994ae087e4ff4f51df5d6f7fb44acb13e6974f8508efc807b80bef8e59ed3a891994990a14688f3d1d037881d18ecf0c2279be68a12c7604763705123aba81ddd474e3169be6e67ab3f941fe16682509d38834405ce9ccc03a405ee8684285ebd51c09eb4d887e79aca7dcf1b8500bbbd0091d7762c995153b6577c33ab9c9f6947c664c57dfa568fb5b8aa496a3a2225c1da3477826cf8f862b09322b4aaf699b8b086b05e9dbe1ce8713cd88884254d42895d5e38cecb6ca1fa7a38b0ad5fa9bc4b3474cfcca26a366f14da79ffdefe412ff806ab9f16cb91db5e163107be002628aea220e6dc91cbc781a0bf74eb085aeef4254abef12c5beff2c6c743b86f17b959b7dd7509853dce39fbb6a7b5d820160afca76c" + }, + { + "app_id": 3828286333, + "data": "1058fd5c9bb07edcbd37a6eaaeffba93eefeba03a356ca" + }, + { + "app_id": 3885071354, + "data": "e615278c6ad2e2c95debb922a754cc4d0fb0a55f85de2554cb9c336c25ec9be2dd98c3bf3c4cf2321df996478e7d10c7acf12bb9331d3d0c7b953db104f0a33c6f80f3fbf5bf25570ad3fb6a7b9b02829d39e5749394947187f981914e8a194ec1b4e6dd053d25d8171eb3bbab2fa7b5aa3a3f65635108ae29255c004143d9f297a59316e2dd83ce16baf68769207ad0deada67c1af71d2eebc9612ce78ce8b5731e763ebe2a5a22282db99e11c519d30b745b749b43ee02b4f8d35b92650ea038870bdca34bf18898f9f2521351a247745cd377eba4d00de0a290ff86274b7f20e33c1b01839a87c96de6ca54cb96e3640f468aeaa927993176a7107f4124e33da379c706e4c4b38d1d1c1e112341b1424e689a10e7cc9626d90437fd07cd20fcfd3866a1c493b3ebe37e4471c2f9bc1957ddbd540ed2f83fea4f5f9f567d0fed776bf17481912b9548a23900b653388dbef4b13dbac490047d05a73d4759b148e490d842ebeddd784a4fa12d5d6e05d7664cd1cbd30a5a3e379a750b13c7fe4fa997a5ba038db884fc34a816949ba9cd6dcdb328a9aebc48bc78cedc89243f5300a06d218938f6dd2c6b7192abb22822a1a3554eb93e6631e43004aed719726741dbbd6349045ca08238dbaa47085573b7db35ee8acc33fffa860db2a7030d3fcbe59b2676f03560eb2844ff5d4e2bd03f4ba892c757824b917e6489d8690866c0abfed6457e0cdd10aca947be5ab989de3a86826b2b4a4efe06da83970196da991a189a1a1c3b779017bed6ad957baa979a3663a2d679116407078805b9235620d7fd8c81a889c6a184638c766c049a7b43d14532bc2c3201d1ed9215a1fb59e373f258499eff95f8eb7b649de3e07197db2e7965f44881fff435ff21370fce65e1b15c329bebbb53e4d161f7813b4a920bae532162ea69b06f15fc42a4bffc84e615b05ddb4f8d519a89531632e8ba37f3448ebce7ae3b725574e4e193e02894a511f73669594ffd7c2dc2189df40511d8a18b3603086b1ae6182a00a59a5175287967fe5abec83600103ab227734bdbb6dfa210b09b6abe200be870d0aef6a2d4482e73ba61a4c15f26f0dfd264067ca6feccc5ade7e4f9641d3f1a52fd746c021ca49962af73e0e679b46cd27237065dcef944c2842a7036752ecea10498d408b52e683e0f67a7f7ec89cc06132b54a7ad28678cbcc5f3bf7ce1cfffe3fd29aa5fb683eec929129dafe3d0170941124997090c9a2d473bc82dc5347caed90952a91520948c40ec4022b73e7ba47de045e02f7a138f602c9ae01f46c4274cfac6aba179ab8a2b3ea7a8e131407911b8eaae2822bf64206407e5852e7d27f8a4f109ed4602f0c0760568847dbc028f359970dbf3a4634628e58c562b4eda2f450b0cd0e6a3" + } + ], + [ + { + "app_id": 75043118, + "data": "ad4abd4afcde9b0885c9b62329a7e8a89c81c5782df83fb233e12832b6b90f612194b2e51a5d786b9bfc21410435db18e41e96eb2dfe67c969b86923476f86011de95c6dd18eda17b0e8011bc26843f07ae3ca7b481444d768869afda07a2cad07d45cd8c5d22654043ddea902253bab2fa2271fd13a5a04f396dd4d49135d8489e554517d1c352aa87fdd25bba6fa91d75c37a99560612caf1691d039c04d7e79fb6e2b741a6a80b5f4e1078ebcb1ed60149db232ba2eccc65a4c85c9b19a67e6d95416f1b5c0c3d002b99e8f9d321ea07e1e6ec818996d779206158f94d2a9935544b52128ad5ec7a6153f3e0c38f3fa48c4ae5c27f1c151642412813069cebf902b5015ea22da450cb87fd8a6ad9f40957741ac1e9587e40a3bb256110bcebfa6df404049b069932c53cf230293f0cece8abb3d199c5a3291deae4ac493b1d861bce6d5704ccf3ddc82ede8c60c187ca8c184bd94ae71f4f0daf5ee06055d6a36a37ebe447e915325c12bbb7a9b6850899846a539aa8e2c404e397d165a113e829cd0abfd5dbdcc8abc0ad92f9f95dee19099c680431dba6aca58688b3db5745084f6aab71716c49bc236f000c3c3282263d849ff262519151dcf52dccbeae964f01d49dfb659fa22e1f8d6ad3b85c3d1a81415470210db4524b1cdbe353cbaf28c0e9b7fac955a8adaa848c4c12605f26f2f489bd8aa5fc7ea0aa0788320e7c567aee8921c420b32d67826a8758826ce4821d22bf513c71b58af910607535ef9c42cadf128af49f1aff7fc30b9bd4ba5be66068e7e9f8af29d1debc37d457d99468297ede0c12a3143a5fed2f3ca6a42d3bfd42f549e5133ca881b46571ecdbc20af2e0c479ea0a2bfc7e12761f8037671b5cf8c1ec5d452d6710a6160ace32b28051f246d4780d7f26f0e30bac2dfe174926685b2c8f523dd82e2a520d8a703d5ec28186cfcb29152f6a3507543d67caa3cf2b44781be11c8a6625e642c96c8177ed92e749a375f24882c894f2fa92b1ee15a90c0c4e24c9cf36453f7ad4d76239eea15a9d09c147bcf0e5c8e35443b445462ede3f6176cbf6149c01781a861f9bdde9bb292d794cdc97ff028c6e6ecde3aeb917761f758e2f5ffeeccec6b54ea8ed7c7d83c9ab6b352ff627d4ac271ca84b841c00cde81535e3c519262d0e2080b46eff8f2c806ca29d0d77c7af8e082cb19a8d86fa51cbd70b0bb687fb58ff8d2d2b4be4f842424c3468e6fdaf24794b1078c6228fa28118ac731056cf663d5e0f69c60c6b0891987d733ea5d7cf4da26d59b552f6202da64e34215df24b8f3b522f7771127a7fc1094476bb264f780e12f91592f4502d06dfbc9117e73bfa2b6a4ce14580ed281613b9d3ae4083e3d3b682807326367a94898d9991b65d428da8e30dda697b5d9d4eba36286f762f0cffe7c85ecfc608c8724530ce5f0efa2ea0aa9e04076453e5c48c3f9fe82b19be84efe2248c97019ecce62c6d264b59248cbf754230620573380c7dd6cf0a5f59288e4b3f4fa59a3ec4cbb5a7532fc7414fb685ca4ae2c3f694a9ce6eba023be021403f4b607e58543a63971c80bc433d63884fd4534c0c42ead9b46ee401bc64e96bdba077b353492c0ceccb44b848749452ebeb37e4d41129a957761809f2ac837722ab2cd1e66640ac2a7d9ca14a1be882005d1491aadac0ad4e8f22eadb6f3864b0a6d13d6ab4a7998b57cf061864f46b5f1f7488b3a09ebe58c7fccb658e1e54671039ccba41d62259185e9a7448f7e77b21fed4716dae5de6991d8a6f7a5f25c899dbe4287fcedac2b0826608aae1df7b7082d2e3beb685225ae29ac170275771e244cde347036058eee2c3b570e123b62098de94100ca9269decd9d0e94b7ce2c02aef3f2dd27c09d97083fff75944c80c7" + }, + { + "app_id": 237249942, + "data": "35b0da313cc09598c70cff589ac1c602da0d9666984b608fc85a5489ba00344280ef7d76d9c2eb81bc9b02e7bc4bf28c721990d8abc078f5ac586fffe376fca27088bac1c6517ab07620f6da071330d9bf4e099251552af717e4a62bde4e735365a0f8b2c08ff69c90da11e1a7cd28cfe5cb90c69d206fce9e93191d388bf57e7e1e76665b7b555b7cd83674fc1e240302b4a02a0f4a1b0fc3c0ab45677a29bbf699218d140e397e9ea9ebc81fdfaf458deae0e0b8a9299f8262852195e2ae385a1f4acf14536d9ad6c067cd64f2de0c97d2a198160b57d6f8f8fa5274c80490d3ccfa318fcf0312355e2d6e83a42c9525f970854d1eeff138b2e94efa5388e63b5edc5c3ba343a0708e2a422ee8db6906a2f78025539a1d3fb6e700c0d70c65f19d7214b8a57858655b653c7e62f0a77ab287f9489a6891991d575b4a136658d7233651a454fcdd7366062ac9cfed294a6abe5a6868d54487707e24be00dc5b4ca3ab3680055a62484ac419ecbbeace7848d34ce69a25d4c5706ef8409daa54ef22a9b374ea87a287bb9ccf69426d2617b2a6dfa21e8bd5b46ef55bb039662d1d198084d1f43f1a6f8ead73534ac063e3687458bfa130d2cab1bf8e271ad014ac41e23711d47f6e4fbc5fbd53940bb1d5add280f3b670a24f6b04c2dbe746bb275e1d577c65fb6a547333f31593f1148b4b51afaf60989f2001c30856ebbbfa5996b1dbacd67c9bb010d5fbf77098db3355ac631fbcdc62fe03e673486441d8f4311e727ae6c1e1019c2179eda8781a6018ff9bcc6038c01ef9e6e60b6ac47328a0201518ccff05907a6f8d296f1a4bf30b694847e912d2b08f026963e605afd0eb13010fb811ec564d3bef297d2a5ca3139df5217e0dfe9f9a9d4e3747db7b722155e6c06ff7d679fd17db6b450cb2313917331d3a538cc63d418f2c164d5d738ab9a6205045bb957e685917c0011db3c068ca51b869e057e1e93fe23535ed56b5eac99bdaf92bf938594c18a15768258fef654fc7ebfe52d4677d2a4418260c7410590ec05891c33203a4e9bbce9ad71d2b1f6b9d649ea1142a410b58bebed56f01bedd77665f2c62a74e488a044d68c6beed4e70a6dba62cad1bb4536277bd3627f832bd88230405615ae29620db210664ad3b1f18c6eac9ea94b34c371799ecf7b51a7dcbf2d2bf1702d7e5a5e65a32e2a8355d88521be6110dc97cfacaec30009081db04ae49aeb82c228e93ce24524d3bf1ccdfc67d466a0ffb742bcf53cc8cc8902290f9694a027f70f2fb1d74b21e65ce05ca8695d82c15835be722c6a8ee28da56110f89985d75766d406ee0a40fb5432131779e656ccd16d992decc2f417d633eb5601aed906ce63cb3461e97a03b2763ba818972fa4554fe0519fbb502795b44590919ecae42e201c3cc3edf479f5e3e54031a81060d8687e5b88875a6c796c483ad585a04eab0de1c858133f5840479c44cca5679002f4585fbc7c429e7706a66f405721fd69d39241473f666df30d742115779712d63342ef197dd1a06dd7fbb96c53f46ac4e57a934a6b26ff66f7c73ed8e5c2d090d67648fa9f6f0fbcc428fac1b325f35d4b775206eb56cbcbb07c70635ae3d47fcacfe5864300f6d4e8d14a04d272a1fce" + }, + { + "app_id": 554175156, + "data": "b510a4a0d25905790b2641b6f08015195f6ee894520f62d6c403e3d406168a0e05c6eafaf69e05121f105226c9497525362deebc047c578c7afda2dfdf45c2784a743bc9f95146ec7d864ce83329ba12877389f95ca902392676ab1e71fedf3e6477d07a81d15e13ec0a05ffb5e1bdefdfa089cc01531a2159f78e445792591d6af4dab408f45fa8c79c5ec4aca39387ef7d78a80aa05b887407a204f4dfcbbfb335b75da9bcc29f738acec3d25bfdc133fc6b86696a891c1a0b4f7cf4c389e7e6270b75c0ed7461172d4479a5950f924aa32e5414ec164aa433c5dc853b73d243b794bbcb7849315b46c45fe1f73d93a682310dad5b587dc4634af5a06dbdc1c03d51837cfdd46b111ea396b6d3ecb417d1983ee1d126c7fa483d648264b6ec447b5eba1c2978112da2a6750b661bec69e95594bd0e19e9ff97942ffea3c5d74c224bc791592f20c6b8e05690f5305634aee99dc595f841043f8fa856938a70820b2188f8cb4ac7511148425fb3fdcd0dbb8923fec648aba3594c9d52d27bf58b0846370a69c00bdf1fa26085131dcf1358ec108529b07df3238dc37289609dbc4438ea2f9c70e392eb62176a479312f1aa43a7879b5f59e1c28cd3a840ccc0a2205045f236d598eb6c72e5ee9f5a14a44b52a60de58894ed5a21de056c8085391140be0a753a8915bbe0257893ef43960ddc3690732455c9c680e79645edb00eb0da29020207181a82410e31d566f22b09f064e8eed1c33b4d0a4d9c45e155b56a98fa7f76db4f8381e73d5cfaa5ba128fd045e251fb65fd1825f5cc709ee7755e794c21a961cbc751060ccbb26d036f4bee41b7726d394d660707d97aabc9bbc20f2a9ff274b3d230e8baa1c5fc897563c1c50b02decfcfe18046885e8cefd17a634fe6bb11f49a27e48ce0ecfca3b888810c01360eca6f2f10b6fcf9a3c7caf59ceb228b2e51e43d8e3abb7e50930ba5354074f3a060691b731eac5ab73f150e3c6d9458176871d15d9e884bd4daa16f9c873549e73e43cba6578b18974c7fadba7b09d17f703f05b11d94777c12269a57ac95aecb612496fd0490a4b7e5c368f3db9ba53e04066bc7b48288ab3586f1b0d3787fcd548e40ed72f5ea554fba9b20fb76c066c2bd4bacb22fd836ccfb497e49563801116f1aa2edc2d630020fa48a66c90b6e032fac03cf0d6c243e5b81225612109633f9c7678fad9f750913560fb1098edbde9cc325254a7e2f73e6b565a8a69f8542e7d69ddfa0f9bbbee515dd85f88662196810f39d8fb9d7ac1a452e7be0a3f80b90f7982db0b1f4e46d69b4a03fad52b16e8000b888a7834d760abd2706d641a05614fda3b615631f5de77a000f6a2f979053308ddc51e6c1a12162b7e7e92915d460e2357e527bcda314ccbc278bfaf01a1b6aa66b91b6e5bab7c84e9d6a1a83030fad2bfa37ae628576e6d9fb7a1ce87c765d18920654a6cabb2d0528204eebcc99fa60075320d1a48f9fa4dd082cc18688d76e16767e3b8c430a844d46b0ff3b039d9904fc94951424b55486f0cd0205b1047c80c229113fba5556b4ac78e2e1467c999c1829c1839cfebf3fbae4c962a8866cbe775c471930634462166bef749d1cbe8cfdce1fe1313870c4eb5c5d076d38b7aea561273104a4ff6703428b3b4669e6ddd5613eb4758577627cc43784ebe954f5d85a203e4da5596ac5238bcc60a721c1fdd55afc8e529b3da0d1e5b8130cb19fea58b6b9f68dc45716cb331cb1f26bcf6b8b7a9774dca19448a0a73eb85c4b0f617e267907e9750d4eeb2728d4a8f76dab2deddae0393825c005d5522b86cdc007562b542123bfac3551fae5daf55b78fcd9e5c617d72544254609bf34ea633beeb8bd6122123f1565bc65c1758abd62a947767b156e955cbc99eefb9fe1a4aa9a0513e9b88c7798b38f96810a0a9cd074eceb842c3440f2e001704979044cfec9154035ca1f82ac28cc0cc64d77fbd6732890ba73d86f73adb79f063e8f6c033f51c38e5d68bd8ae7b6f9d5212193249b24f6e7d74568826d59e5f87b3237a14326cbf5f24108a89fd6ba2922069c21b9e3fd6a60215bd725945bcaee6c51a22f9f30bfce0fa92a467f68b47cb253a6482dfa329ef8a6d3f6" + }, + { + "app_id": 652717895, + "data": "98362e720ac67151052183f17ba4eb81c7c0ba767aa1ac37ac2b399899ca50bc1566cb8a08a08ae1b26c7ec778013f8b43996a86f780ac9c782f3d1a434d28617b996d3c1a58f6af6c9a35eb66cd1bef82f4bca8d89b9a2c99635fdf230510a77aab0a9497d722e73822a226811ef9fc25827ec09beeb70b5ec320602359b9d534e124b595f6639bc156eb10bc219cdb92f82bb0efeba14047ee86c7e3f01044ef931d6a64af48f6bd461f9a20aa9bc5b2b19b5590cc54dd79d1b0482dfe787eee14a00bd448779403fe0ba42bc2f73b30effbf6bcddf1f715cefd5416e14ad5cbdbc7654f5d0ef37e1289da6d4c57b377dfc29e93afb45990b519d08d599915a927f05b0403d53c6ee925e01c8a9902cd2535916aba6bdb26a1ac1cb9d54e8bec3281cdc48b529e161bf3edc98c3f85ba9457db44e4060fea108c58158e0f2940e2ce9e996739438de52e59f1ff5fd3adc63cab4a475b532998e209d4720337b24b6a45e8152ff626e425ab3e7159d6604a1d342259838bbd50625c71bfd0d4f167ba2881d8ef2d4c996716b8fe6775f76b6d09130a3e3a175af3b35163a4279811c015a5c50b6331064d1afa5395cf4265cb3d8f7a921d37893fcaa06ebd4462a09acc6e2cdb747f4891b9c780e33ce2961d7b968a03bb64b2d832139515af074f45703e6159e90f27bf473eedb492a8691ec0ba25ddf529536eabb6ec5ca977d328c966cccf655d1341b2854df6bc6811f7b7763d254afb68bd2bbe06cd1988e6def8827fa0391a27c29bbd1e8443aab008c79f02ca155ba5a1ce8988bb5ad64e9679a5ca7010a7cfd3d960618726963a8d7df458a21a001f59f0f9ddc0ede830a3f5273d903717bb2789da219541c0c0c7d8091831376637a551ac28797ab11219f52a809478da5575109955bbf07f43132f150e3c17bdc2fe550390123c9631a81b449d393576e04a74718ab1f96edec2765ed640dfe4836375438bbd10d5903116244dd31a1117c6649df40c13bee45bbb6fa135e2e6fe733aae257b4ab9db874051fd9ac29dca7f8438c0ef2907d7d1353dbd0ef61acbc8f2794e083346e09fbd968a8a56a73756cf6b346872b781c6b1bb9b6801cff514a0e308895f73ce8a0468d7a64ee9e1e162002941a65bc8e46c07786cf66e51d9ab8777082d1e5d84117cb879f554d1fcf810a8ccffdb512e9ee177d6b871af0be3f9ec972c85ec4605ef06c6b2a760c679bcb5d269cb5bb63acee30bd732a153c817f1b2fc80188e762251215472480a7af52bc74004ab0cc5abf8170e32cd74dd37923ef9e3550a0ecb7c22b18eff4cd2e1876e688f2cbc497cc98fe02ab104c63eefc592e6e413917f10defb4d96f7cbe55ffdd91a2fea8c5eda3e834877bde98b6d81e521814401a21c6c57395de424abbad71a4fc21203cbf434dcf12d3f022f243b601b4aec2744d3ec9c758d02242075f9835c081221ac67ed8f201d19d87fe89e52d7df27323a9638048e343e03b08c5b9f3acfd29bd8aba4787ccdefb7f6e42ba2445d35a80861dcaccba6e24161122d2aa6610372b4b8080250e679eb8a1f0668a6f3e66ea21b80de687e3fcf6f0663c3815a1bac5bd044e90b0171962140e090961d2d8284f4ce7a52279ec098dd88fc2ef0df718ff5090ea685ffaa401f2134775b469e5a315bec4b11db09c40bc7c2592f0c057d8345d4b6bf9bde371750da6a04331f74c7f7ba57ec7c8df047570e65ce1923c3dbb3ab8034b89be882c75b20dc6102ef5bfa9c77ce0dbea3ffdc3ed32f294d31d1abc53aac14060123bcdde3f9e818d8617583afc518619e8b4291ae15cdcd85bf33781363f82e122eff90f80854f55d9f9bfc26b2e09982027c452bdf21ee8d4ff43681a2643efed43f79128f4607183fa31ff4926a7c2002e00a8bc3cc3c3e2fefbbb3bf0d9a263f3814bbdb2a7ff1162ebe656318358f93e732f440004e339751754dd470119500ea5c82a0245f1d2e21eb1a046134b017d7931d84d73b139e6e46f7d6380a9561f2c67a2e123c9a4597b617876fe1029ac12765ac8cb2efa1170c929af5eec39cb86849651ccb871d4097bd493f3b605f39daf54594b051b686492ddfc252cc3f79ac24246263" + }, + { + "app_id": 1104624959, + "data": "1113b808ef2675ef89058cf67411a9e8a9352eaf7ad5158e5e407efe02644403e78743feb61c1ac56d1ba824dd6c34c464abd7c6de0e5140e8bf9781c16a5314af77cfa2765d411cfa077ca3a3725d8efd2fba36521b18d926a553e3f51c76264d8ac65ee2d33ac48c29051abfc280" + }, + { + "app_id": 1460565589, + "data": "ccb41188cb33352b57c3edb78687167ee38c7111833c4b7faae71e905acd5437574268b1c425c194291249ff3f39cbd667fc65d74354a5ecb8792fb6a75e27cbed3862556b22332afb33ed7531a0cb9dd619f1b81cd6509f8e27ead2440f3294d601fc9f5969043d00b758bc4d98de01b4531a613bdfcf95683a2b3f1a67596ca14627c2cddf67bda02edfe92c0df03ebf2ecb42ac497f1c229f71eb7e22d9024d8c3e45abd83921151b1bc8ea4d8d6ecfc79de2539eb3db3eac67ced4294235a8c23b1ffa4f10a7baac071f0ffb20628ca85381dab76a99befa2e86b70b37e0215e942234403c17ad68ea32ffa41badb2692aafd1342f7d9b2fb7ef7019527eb073b92ded03e84b44248e0ba03cb86b4efef34223be43afb484522757ddc3eb1051ff245b1f39b543eb05ddbf7e91ba16b806294e0e242cb15cecb0f28f4df8f4fe535ba08dfaf03776bd53ee7f68970b52704c2434a5bffe7559f5519f10b31d5ca5eefbff534f6568cf8276d4d3e975d51a9189fff8f13eaecf5f65b3697520a1ff80d86a58f1e8a4ba73262534ab7f1b9d9b5b2238033bf1d5819d40c3f684b50f270a72f4ec5acbe94162b5eea346c6dbf6c1a679e502f3a4c8436a9febe31200f883d3c0d289d23f702bbf555fdb5cad3178497472417cba7fb70d94f5ad32b87b937c89f73731d6aebafcb5d54a61afbbf601d322dcc81b670d33cf9bb153c3ced53cd2ce11ad61aa267f703939562577517670808e1698e1f965b857b49a6de72e32d7dff3a8601f733b8e955575e5da6920ca1f3505c9d09bcc67429b4252c4ce13d798bf79083cfd7159d7a39e937f879e5016b67a218fb177232c910b6a3149f5259e7b58fd5ca85501e1f2e9d9079ce2791adfb2647d620c929782f45e49a3092c14dcf1fb9b0ea4fff5df65ab2a78f211f21dfb89fcd5ec1f181d821bbe12b347eaea677bf7058bdf8ca348100e18c09de851c27e6d589c8ab8976fc45aede667f6188fd19158f64b650a8a0894e9ce6f3a035cd333fa7d22b6eac56d380bff7e1614e4eb72ff27f5d579f56a85dffb1f98790e1fd769e741f99df54cd988823ac7e22f8ae9a9d1bb5dfcddfe557371be4b47b7ae6fdbc8a6dd4eb3b3be8f1bdbc90504bf9274ff72221102faf2a9e0fe3dd69f65d52b3ec7aad864e0971d563e06154c43" + }, + { + "app_id": 1611396816, + "data": "6c98ff14bf46d39786bc6f4dee9c80e346c67be1e8c295879faafeb9331bc6c137931fc937948941bd6fee58de2dd2cd4cfa2333bf1586fd477d9d02b3d7bde788d0a5a6d316cffc3dd5fe65612778bd7cbcc1dd9211130ad2a66005086e59b4f39717f7e43846da1f4a2d192f6d4d4ad431e3adcdf287222bdc693e913e02d62400dfaf4c6b3d5a635a77ff40b71ab06cdce7da303cfce648dd711e7362b2479c7af042be174ba1c60361daa3f1e01be588238190a3429a52655ef2840aa9159d98e27c155a4d41b2af8419b6c65da2f265ff9e653552703f59efb3ddb8b337b2ae62d3b361d3e94139b89e7d775dee9076aab9ae254868a2a06af8ff8291058e92e0bd68fd3f1f49aae82e0be37bbf0fa52e6101b606d41afe316b1ba77f62ba470292d09d514cdb115465d38249cc4102f9d50a9be8b26170d0d057c23d595a5d2e7d7890b6128b8b32da1697dca65169ca19c3d7a7976a9c0102460e69bf565a855e5f83d7e065b1ef48ab3dd61a881c5a574e4ee80eb9189645763e691e772517f36bfa9c42b0caec8b369102ffad3a45a541bbce1f19dfe3f570c690d3a4e91468b4077c47f0d15d412646218fe1b3a0a90c445e8b74c032844a032034c69c21a4a8cbc9fb6695ee8cbfa1afb33494d42fb39e3f62df00eed81c6b8806a7401c3294cdb732394bc5b4a3ac7c5a289abddd5050fa5b000f8f1dded1fbb477292188ff391944e76863992447061efba7f18bce2a194040fb147097f9820d9330e20a3e38f40a63ee36c5ae3b9cf2474530836fbb1317aadcc8e21d981989de0caa567213737db6255c6bc5a4551da1deee308e164ce138ba9dc57ac9be96ffc034533f07851c7ffc94ea54f530f1f1ebda9a5e126905d16ee69c1866df2a5d141795d3da86ddaf5d6b967a9fc2c78811cc74f30140864bd2a08c532ad22abc641da977b68d3c2d27c573756b443aab5df80c105d8ebaf20b22fe028862d5d8f92453e5c0ae810ab9e9177070d6b6e5f2e7df3a695206db0f8dc66f96cc1029489ccecebad45bbea56b45bc47052c46e60bcb112009456bfb09b11d21e117bd8cfe6c79648014b2c228226d99136dc5921ca5f29469e250fde6cb2bc3691a70ffc594aee440dbda4053b9fe9c99924c584ee639634562a213784ff70d3a9695a0fc0c8c1309f2a2bd81d869006e40425f94d68dac090453a7f6e93d5c5efe30048261e7fe7151455c44f47a94f8e6e9c89af288b84d397b2bdddd2ec304f2ba5c5964d48301e631f0153cc6562f25fd8a7d902d3a2038b84004e6647170546c63aa7db500097f79912bae110f520e5f8b3285bd4add7a13313ef914dd0221fd838ba9d0a04da705610462c79448f565276bc6e078" + }, + { + "app_id": 2209933717, + "data": "579e1f4b7be2155fa1930b4e5ecdffcc8a9aa8f7e694ca4e55a841c6a3486b282364f52be970411710600293a55431d37f869b62542a50636874839ed86218fdf5696afc9fed40f220bc0c6063dc7ad58cc6c95bf8c627f9f15a2e8b0c11dec18ab6d80454d8e03480124250288bdaf5167fa5085198ac72d5b20a092a283bbb50aa44e28c7c627dcd22a88ca99c28b65cfd243280c3d9e43b67424a814cea9ccd3ebcc0ac08afcbdd0abe561f390cbaa92f0212db1f040ecaca850aa09053ba62da6927eba5cf7d7a85b22ce8bc29152680980a99900d255c44441a3f080d6a7a0da4912518279f55e97f4d8e4b1d5c07ba769f5f3c4d2dab8e34f180f07e7709e90059b855d876ecd86cd1551517f5eb45504a22de5261d6f83cf945ec2de39e00be5157238f2a3eeb4a333fcda94d9f4e6789122944254925b36134b69128dc5ec173d98ee9817f2c6a0713285af92c1ef6534a8c95515edd452b765bc61df32206b4b84babd73ca6e499cfeeb1af37f07761e34fccab94799d3c67290c78958b77a933f1d76cad17a4e71456f25819c6fe94759331bff7b29ad8919f935a9cb55dbce91bf7d0afccdfa55ddc2abcf89406a1bc0a0407ce4ead2c2c22b784de7240b0ce12d861a744d7a4e4c320d18071242e8161409f40f6294676f459004b1f0b726a0b0bb379d7a8234cfdc86ab0440a7405b04ef81e38886a59a4eaa62796bb023c6b59397ea41d4118d1985302fd1593765c871ee87c72927d7c5f0a8f992336dda1bf0440c13b2eaa13f56d63978578ebd746d40d3c3760c0effe0699ce6b8d9bc1e448008babea6fbe92cf414eb21a338fdda1fdde9dd91a5cff689ccfbb37dedee63427f507b7f5cb202826f6ff053f80282d4844b2dc7f9924f8dd29e0d8f38013ddf2541b211d2d50f0ec7769ea3745c133346473c3b7e279c9bed03ccb82cb752518a67acfb1ef6aef47c9507cdf84af79fd989fccff8bbe2324fedaee56c247ca9217fe113915888cb4c9def8616f622a5cd9593e110a891c5d4797609b62eabcbfe00295f90f50b043693fcf23ce7d03da3d70eee4c9d13e80311a49f529fb1cd8186f89c37a045e3e7191bb157530ede14a3a033eabc42b7d168adc9743481471fd2148924a0114439bbaab6371c9ff78a3dca0a959d4cc492c2f25f97acb33956ec269e75ac8df83cd3c0bca5557fec8b398bb7e9b0f0975bd6f429a20c470963017ca7a4af335cc3cbe407bd31db208e9aaa21ec485f7439b711ad2a7fb843b799ef88bfa9f26f30c8a42ff43ec1557162fab66b29cba56a47cdce44f7a4915cba463d0eb86e87d24e625f457d2038fa34c3e6eee9fb5bbcb9ce8fb12c82b57d0196ca34b36329a99868c572ca10b39098852b8d56df42e72c9c026ce285248dd093fdd2cc21fc185d264309c988defc45a4fa0f88f99c299f1f18739b72a35385f07baa94fe1097f3276a1456123a13b41b43f315ea7c801a90fcbd75bd0bb25163fc22bc9c5500315c1726c45a6013ea6d0f746b7a8e67a3b0bbf2b473edfb6f98ea6f91bba3f034fe96b4aff5cd310a782530305dbf6da4e7704c63aae877c3ea8c8a1faf73a277f5d1998e78ad6f182c85936f3b1f3758165432d13e852423a878dd6778541c3713936b055319a715ad3187c531db3180bde740066cfd8b922ef500c82c41676d403225dbdf2bccd32955cbc55bab749f031d4efc3834c9d7599e3bf9461a880f2e0f040de7f171057d00400a20f08b9d470a028ea3fe971997262a4fd62b13264d02b491f975c8053831307a414644dda482372c91b008409c13279aea4eba3e20118d60a9147cb9369d4121c81b85309dcbf82f22fbd9c79001b91095b6924bc93b423de6d50acbf78ef0029437428027437b16b0dcc9f1ec1db630f95deed06174f0abc440f989e66c5d04e9f58f4ff6e7702ecda5b8c5a3eebb239ddac85d4d7beba210cfecbd1f3e3b9eac140302914d7cf722d8bd7c1da0ca9f70dd98e8ae13dd7f7063dea38472072dfdc57048c2b54fb5b0d0774d971d45e443d2e3259a0f30a7145edce7a9fac3148d64cd4ec0202beace20d2d381c02b9cb07fcfc4efaf71ddbb21aae01e13e5f9bc12d3371a296d9ac775afeca05dc4dc63a4f24ee9a58a95b2c05bd53990ee3af10525e7eba72765fe94871f638214b1f7012c058819548445e8847be24b31b25ea4be813448b0276c4d9ad066296f00d0f64a79ca3ebafc154f2ecbe81971bba7be9871e37a45a59ef37454d723684a3b0c8149167fc52c266673e91da9b95bf86259d3daab5b0c4c7b8e991e496faaa1c9a2ce2d1d668ec214cd37df05173e6d8c94f940187d50708cce086a66043ba2ec4aa38dc9cda26add5f923807c9c487abf3c994e062c99089b6ed16d6d01d7391727e435d4e40d1e23d0dd5905939abe36b71e2389c644dca8088a18f3303a851dcda938ffc09e5d6e0d6b9283a798e40f51a4587fad87e7514d5e2f510ff304f06125b1d45c170e7a66de9329927150c835d3f410ac834f61c2c0bd20b3d1c0f6e50303e87d49cbc9c2c6518bc2072eac5e6f526a7408bc0f7e4ca34da66add82150e9a2f5a5d7977d920f038a50dffeaeca928f87ed03ef13df866ba67e472c0724ca470d7bf02b8c48eac27305b9abe74ce8bc174f3299ac0941400a65936b8eaaa6a3f4339bbb114b4b56bb28231bb374daa19923e28e3ce552d38cf218ceccbf8ac795c0d8230ed5d47463a9f69e3b0e2e2e03f8489c365ffdba83a33426147d0d0abcfcd2037b96ddf283395f8679285063c39529d7086c1a64f45b2aa21b2f879ac34b60e7a6a54768226d8441131a60abe5e5ebe8e394af1ecea60b97640bccf275801185c83e5c0" + }, + { + "app_id": 2355063471, + "data": "f783115a39bb709592eb2197f6ba940ad77af14dbd366287ad353005d88ee023f5c93ee9fe3e816fc00ced35633ec3f9b4cb058e671547886b41c63f0f5a9605cf6f66ec689f6dabff294b8fc66e503710ef0160a14ebfa2f7f1eb9ffee24bd685ccb3e175a384eea22e2567a12054c4d599cf53f734a71732c0dddbdf00489a9bf08417303f9318d5a67bdf1789ebe8b812f257eb17ef251bbdcd6b158975f5f5c54c72aff3b65eabeae98cc7f93acb0daf19ef032044ac7563b0e0a7d14cb6abb2bac1255f4b14c8e8fe9fb9bf2b514c55e867076ff2be169f98ba59ad6e2267468befa2c69d448e909ce229feff6e7b25fa817df441e43cfbeb7dd75f84a9c2684d383972b5e491a870c6a1a85a0000a075bef3a974f03a4767f20ccee96aa6e15ab34d7207984a20cadafe3d1d3b7b4bb2482690fc09cf1c09dbcd7b44fc7001f103e71544988a4a52d4914b0faef9c08c1deedc147f2c3cc122732f32189f440b04782ff3035b9fc042ef23db788b52e868117dafbb1c6dcc358f28ba0df29d5c6d1c48e77eabeb742c73fb7ed12a057601e1b767745b6627bf2d31afc32172d27c" + }, + { + "app_id": 2492217952, + "data": "70d67e1aaeefb4e48c299a0240e8c5b98198931342344b4f5a1f4f308ece1e10dfd7ed93aecfe64390fd847e0208cd2d266b5fde7e12ff6c312150de9d1a30abafe8654b07226e7848280a14f5aea982c40f2dba6f17bcfb471b9bf4ddea0eba72cb4dd2f4dfb9fa935f576bb93b3e4fd1eb1b4afd33ed1715e567becaf3ffe3516c3a099582b9738353a7d9a099bb88e46af232ee19504d4d25db3252d20935921ae7f60c903664b8fc88d84896436bbe48dd" + }, + { + "app_id": 2802187347, + "data": "5d59a3919c30cb537c26fdbbe815bfa829df645fe3bb2102165379f3f50b1f47c6c47731311729439c2ddb17568ea346ccc9f32d1a67f9b325de5a9a2a6c1179ea22dc6fcaad144819daf387fab46276c2bbdcc7d0862f1c0d6f57a8cc7a4cfeb77ae4a81ee76833342453e0761a675fa24d984fe5657aed7eca76055da5c8eb0804f002a744d0188e6752a3c4f7076766a0f005a6480b03c9aca79404fdb83fc9b9af8312a346c7ff940242a80c24c231a3aa556f3ce30f4a5e53421037af7fcb3b14899adbbe1565ffd2a95618918c41e4ba7f469e298d6ec929f211c80f401c368e82906cdf4f514ed2ebfede7dfd24aa84d613ac0fdc459d56f2bcbe0096a431d829d76abc21eafa721bb2619bb793c67b951a283eebdfd0e5dcb4570809ac65137b0a428335ee3465df582274bdab2d7d502f70827590c2b7ebc5cb802b05f6ad3fda8443497920d5f39d23c26b374d483134c3efdb8254cb4aa1d31ac8e7d581cbe364defe8bb8c9a38b82972c07460ca5d93d247de2b57335f428002f93882775fba1010fae8b7ab704f8c3281ac2c7a671c8de1f83eaf1b4d23f7c49030facb43e1c6f837df672eb0bb01f7f1be60198c31785b3f4f743e721be9458cc29b9dde90680c0f332c3dc49157aa0fec1317a546ed784d6b827d60662e8a083ec2a36614abeb8f9a0eaf9620f7cd4d3dd2d6282d8d4bcb3ccd05b215fa7c667f14cab10b0ce1422b79ec55d32084a5458334a802efc9eedc73682c85ce8249cd922a5735f603584f74d0be6d108b560c96861f5f138d4de147942a8f8891052b65d9d5085c6a734739243e058a5fa35f4ffb98aca8eb5768e5e382ae3c2be85b8d8f55f1c9ab3fcfe9f5ee485430afbbaf48cb70c5bc8dd85f7066c1b72cc61ea75254ec043ed52ea74ff161855b89f1d968ac9b4bf0fb451fb5ad5766f434782cae42bc715d420a60c3ee39a75c0ae25280c94392f08604fbb97bf1071099c7b98d0e3d2092b5220031b11fe9ec56b30303f120d7b29430c052ac6d1090f90e614eecd53b52914b94b4ca0ae97e5cc59d75c9d2b274b2ef29cbaa5f9f8351d384b993a2915f55036252af2b62b7ef042c82e241eda801a3c509eb3742bbd1154f56ce6e48b2958f7bfe78006c7a47a3aff74424165a73bbc9c9b0b9b44a4ccf4e08ef03248694d798d931878351166606d8cd5df6c3e7582153cab7b15e644ce6bb69be08dce94cfc327bbebe92c1433a3f922dac4a10763164939540cf03a382f25e9772d32a152d62bbe3765e952a85e14ea6a0dc5f51ae2ba02f979a566186c5251a9a74f68c29e1d2bc3785d91315bd2fb321635a3c47cc0d16ef4d32da544ff8c8cd927c276c49a2b15eeef494e494aa0a8358c6d4b67f4acf1f044d270839f52fcc72977f73009008ebd92fb9b68f937a9d75c52053f25f6bb06a09e2cf4b6189f76567b4d14802430375f97ebd1e232a5c6ca4870200b00afafb45ab54789ab988bcebf2640e6b44c586735705e2e38a6cf7266fcee028941d540511751d050afcb7b6bc88a3a593d79bc30bd54bfc3367a21e86a17f45801b932d5862f0aedecfbd4aef578bffd9dc6932df7d06f0d744843c7b899704ca33eb2368e6dc4c67ea56780ac9b7362b94bb1c7824ef38cd00ac05dcf88f710048caccff13edd9637da487a35a4ed840fc0cd841a798116e0864a345666e43cd85c0323f5feb34851bbf66ab70f517179c9b568acbd4a79bbe1b9ee77533f5c8077141998be6e280c44ee5ac7ebefd460fdb44a9cee921c58d2d934e85c4bcafc197bb6f816b451a5a22334f0d299c7b3a4d5eff63055d51f7521dc0f70f7b0aabb60012f091e522475221ace85c9e636cc5d69197c25eb7a0848e3401bb1b71b68052e3e918532b5902df8cbe8ec40aba0b39d3a81620def982ed63b1a3d47fa09b4fedaeb3260ae13e200782d35ee1af652b396d4f304342af6268f72464380431cac1a43061e050b08f7255af030a4f3c9c67dec8dd15bb854bd2b4fcefbcb49363ee09ff57a857f889485e5a4330e11e89c221b45649c45e80a53a036b3114432ba09ce43b5ad531a461337960668e6bc87f3cbd323b13c669e1568232b6f8b940632a05e5731fa2109889afafa58e6c02ee70ca123cfca2e05f1a2147fdf34bb4380c188b3e28033e5485d48d1c0c248f26b003582f2c2ee5c3b3c4cbd6e0d15dd00a4c5ec3fce30916f158329953014887f853242a507fda46cb7ef16062cdd2b84cd6b6d7d1b40f279c713851d7f5428ff7b6d9ca638c0ec0a60a6c00a2fcdb7c882ccd27c68e34e6fe425b34584cb43b699a603cf8e2b019a12708f4133f55c8511ade05325f457" + }, + { + "app_id": 2996444749, + "data": "19477d140b32d4cacdefe2cad3cad1942b0539de6b3a2fd321e66f005c38a5a9542d4d3f2f08a4e88f6acd7e4518156b3b138abbb19f2f3be961ceb725a8b8de7247d6151de5ca5f5a5e65339ceff3da2f5b7127f2ab348cfc908571315abedcb8a08357c1c459210ebb317d97853fd5b6d1741438799a5a650bb98028ba1e4b9d2902d97a9368939bdb85153624c3eaa13278dadf6adc88dd3608e4ef19063ff20f40342cb9bb9f9c0dc691c34cdbeafe767478ebafe4bf2ef043fd306b6ebcd2837b5948029c534d4bb76609da457bd2930b76a3e4236bf3bca314321f21be45da586179b853d80c49a5b95bf67d863052a6ad4dd4b081860e05da4ff2e1e81e8c7861509c0481fab6587781889554cbd9981286ec437b36065f56e5a0eac5f5ce683931e421121769787b7d49154bab6fb22a46aad8ebfc3a84cb791ef4b4a0dfbd4cb34554f204c21e5b975bd345e0c670906d3bfcfd4c1a3d1b0a73f0629e7437d75f511e43c9eab4f308bc06a59786c69e4b412471dc63413144035b37610f5f5fa1cc6503bdb6140b1e75dfe3d3b18b36ecdd78f6dfeeacc12c585bd191d1b32ba1c8330d335ff61ba5e2898f3cf466068e8d7a1b4b9ec42652794e5a810f80e234091443481c864f13fb3d5d27f3ec90ea8e4e3ba736e714b5d12ae2fbe975efe0b59a540c76af7a1a7929fb5f86bdf4ce5c84788cd919abad1ed0f091c0b33ca9d86e5d1773545d41f99b0ab2bfd4a6aacdd28edd3bc798360b6e72eebf55bc1e6a62f017b39965f4df4b7c5f39a55d8f95b058946652372b559b49ee367faa7008863990b83aebf1ce789c955f9aaa3b6d189623f710ce84ef7eaa344ce678a39b05c0ea37229932e459af010fe6d20ca9b8cfbf9914d560782c0162eca5ac82a06ce5b6755711b20f40d3c63ad065d06b821743c0cd380720a3892d6d38cf17889ff06d544df4d92935f5f68109074af8cf270e47bdf164f38626f047db1535f303827e56c82e4f028a52a99e6088d197e453fa0197a49bbc54f728a8ddd3ee9cddfa7bc97a1c1bc733275497e4df1e410e6cf007c6fe76c6ec23962602c123a4537964ca60e282dde80559d55c9779055339edf9291f415ad373c66cf1911ee20f9d1633476a54e78783ee7aed4ae2f0256a841c0a3b1fdcf289179420555a1275c625fc1c08b22fd3890e33d831fa5e1deace2d7137cab6fa095935637e6b04f8672061b0c7eb73f4e6b5c5bbef3871b8fc61a42cbf6f83191bf129197190a46fcf7a7bb24c37bc8a4208e592e2a8609c209b6e7e6dcf759d8f690666284d9fd06e176f2f94c2d98effd1d6b18c9721ea8a85a68651904c3e51a72699d0a922a94805b76f702f5925cba30984e16d3e8cf608dde11ef34b91c07636c056c6748a066809aa43ab5c5127b2dab00e32a1b7b0f526bb6083402ff42e366a39670d5a468ca2b2cf7b102744779b05661c8e973665d40426e3eaacbebc3f18c6161e3007fac1b1281d948bbcde2b316582906398eb1e2235e1d9d385e300b04df8d34801d95f8db315b1f6ed5e62429ac40927bd33f6fff6a5d1c729508d95365a3e1f5fb3ac1ef49f1fb75257457e4bcf89a1cfdeb2cec1f9d61ec578973756b7b3117bdbb767a0cd7df487b98122122f9917d45e9b8f571f109632db09f1b688b552bfa472b272286dd5dadc1427757f3f24e9c3fdaebf709341a525b0aa989458557365b5957f1264b79056a883153e551291cc8dcc4aa0559e06d5b22b3e56e1aa7f2c0d6013c57611cb095a157c2b10b43e0a7b1c61d05fe2f3f0c63829fc2477e3d94b0b2b60c5e45bbdc1f8a39e7e77a4ffd7f5cdea80fdb13663b9b06da92412fd052a08f8cc59d6b3eee2a680550d33de7a59108f78c18e584b7b6db4d9e4b184038794294a9fa6738a293d71b760812e10b959088d5aa157cc18f10ed3e59d2ce577e244ee89bf11e15f7d430da8248929a62902a192ae1b2badf28494bc77c66e9db50bf598fa9165e752729d78e7de69ea57b6be609ccea8d83d2b827bb87640e6e888865858d29a8a2e132cf4c8c43472e18d6d9f22ed1e42ea48488db3aa54d08a9cc64d14377f06c4d2267c2e709b597cfbcfb284c6d27b2d701517102d3d612d9dcc8391c5b99af67ce85e822381037245e15ea533b6bfcbbedd4755b549b4e6f52931029475b2cd5bd5b9907e7a9c5f0a9178e7e1bee3895a860f92960894eab8bce437794356579ed0b1c21fb2ada821b28fb1e32d4141db90c6289118d76c442d85da08f78b459d323a3935174661098bcd6c567cb0d6683677303a4412648db184fa3d440461efc67e3deab668758d1c8976984294199b28134ce02de6b68b46d2297a5df5ed40705806f43836064e29912a6a43e4871c2a643848fcbc7553de6db91edc743de6c821b57049c937c6e652fb0a55ae94d5cfbed5d4b4a9b59cd3c0282becc86efb1f99328241d4c59e63dbafdf01f4b4c11e1e37fcd0b352f26c1d07a81683dc5d593fcdc96ae0e22ab66749a0c7e9dcd0c81ceaf40241093591a65f9c57dec137dd48707f4db223ac13b483c1e1cb941ee7d5a7093eac21906310d4d8c86ec7db30aa11cef179257ef050c239f3ec7110c404c29389b0ae3f2c07eecd4a690fc2f606a96278f0d2052591cf72ca2ade6ce644170140c27bb498d7972c7df34ce62d6a4f30e2ff9c35254743e9eca1894492e6c28fdbecf52282e2881dc7b85d5b34762f826856a2de5cad687dab30455ae721d687ff21603a848335d901835993c9365f7ffd0af803bb672993e69b7cfda5c76e32a6d2f09d13487193c5" + }, + { + "app_id": 3315510934, + "data": "79249977e3d11b007f11355f602905ae2915f9ef77b5dbbf7f29f6c6ef38cdda86c24b9181e77fc617db2a7b14811c3270b174e44c8f0789d710297636e9bd2cddc780d94e049bef7d9003cdd26115137e351c6adbd29897cb6bc449d31c502af84351c76487c2ee4652dba1c688711eadd45aacca46ec8feb59200af90150ef25c01f2ac1aa43120f79c99f6cd698b7619e1c7a5c3e7726d90d42737c24364db4d33a87faf6d88e05831906d89e6f36bf066a8ce765782c9aeec2795be308a3e5656d1f41d62cc035ecf653c5c930ff9e6ca21deedbd02010bad3df9cfa9f41ebd8400cbaf7fec514f46c93be1912594c7decd09d78528345b676778d1b9017813c55f467e41fd8f1a5870b33696246760b489f1460d50e6d4e7b902daa3bd527d11eff647454ae5ea2171383962c7443588a86aa88864ae703ba5b7b9a6c440b98222a27f46a15c61db166b2fa000efb7423beacb97eec80bfd7294f06acae1d409f79b9536cc9e6b7d4f5ae5fd537fe19bf0a667aca789186fa1f449c7a638c396cba1c19881e16c5c2e1613a6382e04df54ebcb9282ffd6bc6df71cfecb3629811015c7d63525e2d1a0a2b7b32352055a03c1d8dabe27a4142b7648a5813645612c8d47dc8760cdffc1effe33849c104bad0c089f9b589e297bb3eb41d2e53d7372b8154802b2f158e34d6c6468a6286bc9b30d9dfeca62698f89331c46c2c1d9dc68360aedd48a8bb4ca636598366f03f99ff138bfcf47289d97ea9959b4b0f925d5b04e8c15a8e9c6b813157ac2174bdaf09da67a853d9fbff62cef069c6d1c515740c" + }, + { + "app_id": 3408444956, + "data": "bb89f92fb327746b2a2b9e86266d583b6e587755ebfcdb1cbfea1cf59bcfe7e222b8fd73e3156aa054b0ffea1495f47083597ef699955b23dd18ef533757a7a455fc386039ddcf396facd016a166f022b69882d0a428b03c7db597bf432e086e13212787bf208ff60a5b18fc62ed0bf6f4b9fa793e7dff4006a5d7afbc66c84ec9ac19eca8dfa5a3aed00aa011404a47d7567795f6d4e6181d1eb2e7687afcd70ddf3bcf91f707532737f16caf045fbfed9a25de66543be0d9258a176b4ca0d5d5a0c2f9df6ad4e51698569c1fb0920ec8d4a98b1ea63baac061dc6baad62691ad8940f75d2b65ab38d0fa839d6f491f97f20cebc618f177a1364e2893f3d815e8a3e0fe69141271a7d1740864de4a5c5275a6e04342212b0478ec4e9f510b6bce7febf7ebfdfa81de4fb88b6d749c22e61c531dcc7974a4cce574fa1c8ad181b0f6b5798b30c6cc118ed94c0056b3fa62e3b6ae774bfb8a3dc43b306e0f4f73ed32c04ee51b0350fd08016618acd12fd231a1a4324696796128948ff64618a6fc68788cbf0896741af2521a8e00e7d0b7aa835ce97934921bfd28ed18eb8391109d95692d2335f8add32fb505c769d0961dea5e3f40d1651ea41b66ba78f987294ef310a41a3a96f75f8d9e6c898f5655c79fef35ffc2b3905181f11f3ef438aab778dcce85bde52fb5f9b862338d9b1f473f734a16ed67f34e65df7bbec84f8b20e268963682102be15ace8ec18c9e0a879d1005e0fd7308b0803464b9f1fd82c4b3540bc503d3d2eb6a461f7dec768800db56ee22c496cab71444b3a75ee676af1d687c9f2fabdae0aac2f6d6e65facc4730281dda985eaaf56f02244f1b04a634b2523da86610017b60a60b3f588c1052edd46f8bd03464d8b488cfb2d39c0cc140adbaf16397cba217cafabd6f69a17fff5e97082d6618130ef896e227db7b90965f7e485c083d8caf43c591e51fc84f74d11ba727469c1979c48bfa07c3c4c999aedbb2fd64208b013d516e3626a1b3f03e6cd712bb003004ea3bf84a699dc04a6b8d270e3be5df14c99b5c7c5ea587d17fda0800288569ceb20caa4841c1f77b91c2c0a8a7cbf13caa7fc47ee309076027b930c01892066c1797e365d2f62401ad456b30b980a8afe52ab2524ee739b3efd03e9a2dc30b3a6c59dd7acfb7480fa349e80b119f77f98a29fdb095ac67716cf44d4c7727ba84301d0f28d490f158b4145be93db9969ca2502e23b33a09f7a170041797b989ffcef656ca2ced67b5d0106d53f189dcf4820abe3d97b5c74e4f2ec20bad690cf2b314889fb1fc10bc5b2cd92783330bba8460d8c757107f5331ae4e4fb335416d65b59450a281ab643c9f26fdac4738454feaf175a780041e83d90f90254eb2e409141448dbd3403fe9c060571a2d22851316eab5bb601720420d09d3d2b1d793fb4456fb798d97ee99c2a4d9c3df15820eef414801be4484f3ed8e340b556a5d6526f25e41dcb4c098c655895310721f2eecd6ed87e65f745795da3aff6d8dd92b1673783fbc1a57fe2b091f1eace76d33ae2229f9b8e9a16aac64c527c34ce423a9f4dec816117ad7c9928f5b747502688e643e73a01d48936594b0a8997b76a5ab8016b743324f2fe9ddf8fa3ae1ed39737863d732efaa1917ff80f9c6424205143652bebb9c82d27130ae4690bebb6169edf5a6493f67b5adda352ec7fb7fe9a0e8ae5c43744a0ba899bef675ab456ff39ec1bce4a95a56b84787f8f0c790c0caf2e5e78b6505d5af4d570239c98a5a5cafd1096a8f2f91fdf3350266dc30f1d188764cbe7bdfa90f581186aac25ca71a5ca1b90c9ccaca0b474f4c48a345e7dd0d22627ded5f4de932d63e6c3cfcf0240b916469cab20c8b097577bfa916587fbbaf38ee0f5bbf12f10ddb50b2b85a114e874b06154eb34e89f75c1a3edcef52a08ebc2c52935c8d29e0854bb2eff3f8524a9770e9877139cbc240187f5d1db651140353ed040282b43e2e887584ab85a1f0e9cecfb643054e5cf3c987bc28cc3d8903d3e37d9c5b01a55ab06a2b7f131ffd7fc9e8e8d10a8df00fd7e0e3229065287c1b26e857221ed459b63bc313886b75c53d51b58f221117ffbb19edf83d122fd514edb6f4d0499f880bbe1ab237e1a3c0fb4b6f9ec87b09f27ffd9a02f5c7993cd01c97ff3a4faaa63a362fc675a19c9ee314248d57caf3504337e331dc7590ebde2a469" + }, + { + "app_id": 3878605408, + "data": "f318643f745891fef0e0547a94b02430a2e0717db2b3b5f0140c1107c727469f607313471f90c9f6e3c201090a4b0a7b19ed7f1fe8e33e0e9f6460f215abab05a48c571e876cbe73de43f885410e36369e182ae1a320942d963706fbc805dbdfe964e43d89938d85d5352132928592b4bbc20cc5775f27ec5f5c27fa42f7c6231fb278ff17bae6ca92b980b279691439ccacb65c267c24572f6c4435318dae6ee134249457d29e0eeb54ac83d73c55139f9cc01585a622190c3ba17f1d49ab54a4e7b997183427689067767503573c9fa3bafa5b1e2ba38631b6499174d5f0d9077fde4e6b3e4e3e317f842307ca7aa5ecd6447800bca1eceed88ee58247ff04dec7604a5a82f90919a887c994fb4a95272cb0906ac9d47ff9a4e168024ad41033ec17bd98d9a39a49c51c2298d055e3cccb8e7279717506190a8e05f3627e91fabae803063da84147f6c0ebb889a479f37599d4e9feaf700dac1d664a29b086625e5309c1010f46f14f620535bb5ef904d7bd8bc4177ce15eca8f537c8af7a9504d52ef3c3959243c209e41383949d8184fa52fb0cb8a8df88f45268d82da3b93b2da77c868409d56586e6339b757771865fdc6b92047ea3267ff4a7090b4f4b6f86e2bb1bb07ddb2eea6b97c2dedfbd8736c1f8d790a60879bfc3b650cf3d6ef13da00415389db0030da5c026386cb5bb0884eb709e8b1bcb96bcdb090b1312165202a346495ccffe493c03b8f41d1f4ffc4e4c31f7fe9a1b89b85a17896af6e9f89b6c21b967157a1a77d1e840ec20940a0bc348caa10471946eb30f0f1798797fac9282bcd29adc2d1dbc3fa65435fcc6ac161aff3bf8b6ff574bc2884d7115e1c9df3358b24c7ad232d3a615eb01c732e28a496ebb7e8e8b06e84241528340270fae38b6a0033d960bb410acbb881de7d683fc8c8b65a69515e35911957f94de935832d08aafc7b98145d13e2418e085cf0148dff17733fa457285c6f2f665b7ce2da7d7e87883a67ded8c4211e15b9f41bf49c56c3038ac534adbf8e47530e194478446fa728717f3d1d06472b72999842825cffd8a5415089d5843839fe05e58dc9a0695f5c1cefb26bb70576a53353be42195498f62327703f87a281c2bbe99b61c85aec8f1c2258a5149716ab84ec5a77d45ea56323e717da0fe1d9c47dcbf4be9d02bf5953ed34187fc1c2e383ab4c492e025d0078665b9d834a538e2a2ed018e5bb5a0fd292164972a8983f2a7614f17fa6f751418193807bc3d568f910c15ad369ab10affbbf254758392d6a1cbdd6ce0563b2d014a7ab8c5a96b2e96679191b6959843037c2280f6735715c547855594fb63c50310cf9cabccf0b44d8ffd53f1add0cf35cab35eb8d06b1cc3eb46e526583d156a9d8dc648f015dae376af15709d7e092707f520d9858bc97ee4317e1b8b4871be537223e9abc98e63e451c31b173066fe9fddb9a0b39707e3ca1c9b77455d9fe4d26f16d6791aabc1d8b1c75da3fcf09716f6cec19dcff4fc869ce7c73584569e51f666bd90d39a930ec4305cf1015cd1f09716c9bcb02fe7b23c0af55e84af1505156e1b58f453b4881bd51436581a559b84841c641da6112a7b8b0c86af0b97741f7b899ce68db0c174a39865c1afa0c1a48ac4590925f5f07becac1a3774b87e252b3ec7ca5378a175b44374138419058064836f63ccf967704dcf7270a20b8e6f6e2ee86d62c3b0c34d29e3ee54df5c4ed5f4e3551a3caa00ae0cd75a729a8fe7ebd78c1e5e4354a399399a9dbefbf42a39e45f726c128e23a3f8f73749d985e290a08a5cd05e6a98ffbe5c0072b8a04867019488b79a4e640cb15ffeb6231078e5e0ede5453dd6fc94f3fc475a9ff1fb2666d559fc170f4b80aa98a633b6d0ffbb605d09c6b7efe4808d2ee4d49dc28748c769f745df6009624ab334fdff90b0e029a5ac51083dec1a62c7c9b0dc18e0f281fd54fac7f607f9b22d09a4fd5d17e70ff28181068b37af137a61e8ae0fa8cc0482bf4398bad275353a9c539f3ca72e47803efb7d2fc4732fa898cc556f96023186196bb56a153bd57ba70977f775f51faf1995f0811e63967b9b7551a1286021eb6956f56e673" + } + ] +] From 35fcf5004030f40224116e20007be75e609887f9 Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Wed, 28 Jun 2023 09:45:06 +0200 Subject: [PATCH 62/87] Reduce memory copies --- kate/recovery/src/com.rs | 81 ++++++++++++++++++++++++++++------------ 1 file changed, 57 insertions(+), 24 deletions(-) diff --git a/kate/recovery/src/com.rs b/kate/recovery/src/com.rs index 6a42e38b..bc56f093 100644 --- a/kate/recovery/src/com.rs +++ b/kate/recovery/src/com.rs @@ -1,4 +1,4 @@ -use codec::Decode; +use codec::{Decode, IoReader}; use core::num::TryFromIntError; use da_types::ensure; use dusk_bytes::Serializable as _; @@ -15,7 +15,7 @@ use std::{ use thiserror_no_std::Error; use crate::{ - config::{self, CHUNK_SIZE, DATA_CHUNK_SIZE, PADDING_TAIL_VALUE}, + config::{self, CHUNK_SIZE, DATA_CHUNK_SIZE}, data, index, matrix, }; @@ -295,6 +295,53 @@ pub enum UnflattenError { InvalidLen, } +use std::{collections::VecDeque, io}; + +struct SparseSliceRead<'a> { + parts: VecDeque<&'a [u8]>, +} + +impl<'a> FromIterator<&'a [u8]> for SparseSliceRead<'a> { + fn from_iter>(iter: I) -> Self { + let parts = VecDeque::from_iter(iter); + Self { parts } + } +} + +impl<'a> io::Read for SparseSliceRead<'a> { + fn read(&mut self, mut buf: &mut [u8]) -> io::Result { + let mut bytes = 0usize; + + loop { + let buf_len = buf.len(); + if buf_len == 0 || self.parts.is_empty() { + break; + } + + if let Some(next_part) = self.parts.pop_front() { + // Define max copied bytes and pending for next iteration. + let copied_len = std::cmp::min(next_part.len(), buf_len); + bytes += copied_len; + + // Copy data into `buf`. + let (source, pending_next_part) = next_part.split_at(copied_len); + let (dest, pending_buf) = buf.split_at_mut(copied_len); + dest.copy_from_slice(source); + + // Advance output buffer. + buf = pending_buf; + + // Reinsert if it is still pending + if !pending_next_part.is_empty() { + self.parts.push_front(pending_next_part); + } + } + } + + Ok(bytes) + } +} + // Removes both extrinsics and block padding (iec_9797 and seeded random data) pub fn unflatten_padded_data( ranges: Vec<(u32, AppDataRange)>, @@ -302,30 +349,16 @@ pub fn unflatten_padded_data( ) -> Result, UnflattenError> { ensure!(data.len() % CHUNK_SIZE == 0, UnflattenError::InvalidLen); - fn extract_encoded_extrinsic(range_data: &[u8]) -> Vec { + fn extract_encoded_extrinsic<'a>(range_data: &'a [u8]) -> SparseSliceRead<'a> { const_assert_ne!(CHUNK_SIZE, 0); const_assert_ne!(DATA_CHUNK_SIZE, 0); // INTERNAL: Chunk into 32 bytes (CHUNK_SIZE), then remove padding (0..30 bytes). - let mut data = range_data - .chunks_exact(CHUNK_SIZE) - .flat_map(|chunk| chunk[0..DATA_CHUNK_SIZE].iter()) - .cloned() - .collect::>(); - - // INTERNAL: Remove zeros and `PADDING_TAIL_VALUE` at the end. - let tail_value_pos = data - .iter() - .rev() - .enumerate() - .skip_while(|(_, byte)| **byte == 0) - .find(|(_, byte)| **byte == PADDING_TAIL_VALUE) - .map(|(rev_pos, _)| data.len() - rev_pos - 1); - if let Some(tail_value_pos) = tail_value_pos { - data.truncate(tail_value_pos); - } - - data + SparseSliceRead::from_iter( + range_data + .chunks_exact(CHUNK_SIZE) + .map(|chunk| &chunk[0..DATA_CHUNK_SIZE]), + ) } ranges @@ -333,8 +366,8 @@ pub fn unflatten_padded_data( .map(|(app_id, range)| { //let range = range.start as usize..range.end as usize; let range: Range = range.start.try_into()?..range.end.try_into()?; - let encoded = extract_encoded_extrinsic(&data[range]); - let extrinsic = ::decode(&mut encoded.as_slice())?; + let reader = extract_encoded_extrinsic(&data[range]); + let extrinsic = ::decode(&mut IoReader(reader))?; Ok((app_id, extrinsic)) }) From 37d6c252ab70846532e73fd2bad1e8ac61d392b8 Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Fri, 30 Jun 2023 13:12:22 +0200 Subject: [PATCH 63/87] Some improvements --- kate/benches/reconstruct.rs | 61 ++-- kate/examples/multiproof_verification.rs | 4 +- kate/grid/Cargo.toml | 17 - kate/grid/src/dims.rs | 104 ------ kate/grid/src/grid.rs | 336 ------------------ kate/grid/src/lib.rs | 10 - kate/recovery/src/com.rs | 27 +- kate/recovery/src/commitments.rs | 13 +- kate/recovery/src/data.rs | 6 +- kate/recovery/src/matrix.rs | 23 +- kate/recovery/src/proof.rs | 2 +- kate/src/com.rs | 429 ++++++++++------------- kate/src/gridgen/mod.rs | 10 +- kate/src/gridgen/tests/commitments.rs | 6 +- kate/src/gridgen/tests/formatting.rs | 4 +- kate/src/gridgen/tests/reconstruction.rs | 10 +- kate/src/lib.rs | 1 - kate/src/metrics.rs | 4 +- 18 files changed, 280 insertions(+), 787 deletions(-) delete mode 100644 kate/grid/Cargo.toml delete mode 100644 kate/grid/src/dims.rs delete mode 100644 kate/grid/src/grid.rs delete mode 100644 kate/grid/src/lib.rs diff --git a/kate/benches/reconstruct.rs b/kate/benches/reconstruct.rs index fa84828a..4460647f 100644 --- a/kate/benches/reconstruct.rs +++ b/kate/benches/reconstruct.rs @@ -5,7 +5,7 @@ use dusk_plonk::prelude::BlsScalar; use kate::{ com::{Cell, *}, metrics::IgnoreMetrics, - BlockDimensions, Seed, Serializable, + Seed, Serializable as _, }; use kate_recovery::{ com::reconstruct_extrinsics, @@ -15,6 +15,7 @@ use kate_recovery::{ matrix::Position, proof, testnet, }; +use nalgebra::DMatrix; use rand::{prelude::IteratorRandom, Rng, SeedableRng}; use rand_chacha::ChaChaRng; use sp_arithmetic::{traits::SaturatedConversion, Percent}; @@ -26,46 +27,46 @@ fn load_xts() -> Vec> { serde_json::from_str(XTS_JSON_SETS).expect("Autogenerated Json file .qed") } -fn sample_cells_from_matrix( - matrix: &[BlsScalar], - dimensions: &BlockDimensions, - columns: Option<&[u16]>, -) -> Vec { - fn random_indexes(length: usize, seed: Seed) -> Vec { +fn sample_cells_from_matrix(matrix: &DMatrix, columns: Option<&[u16]>) -> Vec { + fn random_indexes(length: usize, seed: Seed) -> Vec { // choose random len/2 (unique) indexes let mut idx = (0..length).collect::>(); - let mut chosen_idx = Vec::::new(); + let mut chosen_idx = Vec::::new(); let mut rng = ChaChaRng::from_seed(seed); for _ in 0..length / 2 { let i = rng.gen_range(0..idx.len()); let v = idx.remove(i); - chosen_idx.push(v as u16); + chosen_idx.push(v); } chosen_idx } - const RNG_SEED: Seed = [42u8; 32]; - matrix - .chunks_exact(dimensions.rows.as_usize().saturating_mul(2)) - .enumerate() - .map(|(col, e)| (col as u16, e)) - .flat_map(|(col, e)| { - random_indexes(e.len(), RNG_SEED) - .into_iter() - .map(|row| DataCell { - position: Position { - row: row as u32, - col, - }, - data: e[row as usize].to_bytes(), - }) - .filter(|cell| { - columns.is_none() || columns.unwrap_or(&[]).contains(&cell.position.col) + + let (rows, cols) = matrix.shape(); + let cols = u16::try_from(cols).unwrap(); + let indexes = random_indexes(rows, RNG_SEED); + + (0u16..cols) + .filter(|col_idx| match &columns { + None => true, + Some(allowed) => allowed.contains(&col_idx), + }) + .flat_map(|col_idx| { + let col_view = matrix.column(col_idx.into()).data.into_slice(); + + indexes + .iter() + .map(|row_idx| { + let row_pos = u32::try_from(*row_idx).unwrap(); + let position = Position::new(row_pos, col_idx); + debug_assert!(*row_idx < col_view.len()); + let data = col_view[*row_idx].to_bytes(); + DataCell::new(position, data) }) .collect::>() }) - .collect::>() + .collect() } fn app_data_index_try_from_layout( @@ -146,10 +147,10 @@ fn reconstruct(xts: &[AppExtrinsic]) { ) .unwrap(); - let columns = sample_cells_from_matrix(&matrix, &dims, None); + let columns = sample_cells_from_matrix(&matrix, None); let extended_dims = dims.try_into().unwrap(); let index = app_data_index_try_from_layout(layout).unwrap(); - let reconstructed = reconstruct_extrinsics(&index, &extended_dims, columns).unwrap(); + let reconstructed = reconstruct_extrinsics(&index, extended_dims, columns).unwrap(); for (result, xt) in reconstructed.iter().zip(xts) { assert_eq!(result.0, *xt.app_id); assert_eq!(result.1[0].as_slice(), &xt.data); @@ -178,7 +179,7 @@ fn reconstruct(xts: &[AppExtrinsic]) { let extended_dims = dims.try_into().unwrap(); let commitment = commitments::from_slice(&commitments).unwrap()[row]; - let verification = proof::verify(&public_params, &extended_dims, &commitment, &cell); + let verification = proof::verify(&public_params, extended_dims, &commitment, &cell); assert!(verification.is_ok()); assert!(verification.unwrap()); } diff --git a/kate/examples/multiproof_verification.rs b/kate/examples/multiproof_verification.rs index 17123d5f..07cfa1e7 100644 --- a/kate/examples/multiproof_verification.rs +++ b/kate/examples/multiproof_verification.rs @@ -67,7 +67,7 @@ fn multiproof_verification() -> Result { col: 0.into(), }, &grid, - &target_dims, + target_dims, ) .unwrap(); @@ -80,7 +80,7 @@ fn multiproof_verification() -> Result { (proof_bytes, evals_bytes, commitments, grid.dims()) }; - let mp_block = kate::gridgen::multiproof_block(0, 0, dims, &target_dims).unwrap(); + let mp_block = kate::gridgen::multiproof_block(0, 0, dims, target_dims).unwrap(); let commits = commitments .chunks_exact(48) .skip(mp_block.start_y) diff --git a/kate/grid/Cargo.toml b/kate/grid/Cargo.toml deleted file mode 100644 index 4d646f10..00000000 --- a/kate/grid/Cargo.toml +++ /dev/null @@ -1,17 +0,0 @@ -[package] -name = "kate-grid" -version = "0.6.1" -authors = ["William Arnold warnold@polygon.technology"] -edition = "2021" - -[dependencies] -kate-recovery = { path = "../recovery", default-features = false } -rayon = { version = "1.5.2", optional = true } - -[dev-dependencies] -nalgebra = "0.32.2" - -[features] -default = ["std"] -std = ["kate-recovery/std"] -parallel = ["rayon"] diff --git a/kate/grid/src/dims.rs b/kate/grid/src/dims.rs deleted file mode 100644 index 2e88acf5..00000000 --- a/kate/grid/src/dims.rs +++ /dev/null @@ -1,104 +0,0 @@ -use core::num::NonZeroUsize; - -/// The dimensions of a grid -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct Dimensions { - width: NonZeroUsize, - height: NonZeroUsize, -} - -impl Dimensions { - pub const fn new(width: NonZeroUsize, height: NonZeroUsize) -> Self { - Dimensions { width, height } - } - - pub const fn new_unchecked(width: usize, height: usize) -> Self { - Self { - width: nonzero_unchecked(width), - height: nonzero_unchecked(height), - } - } - - pub fn width(&self) -> usize { - self.width.get() - } - - pub fn width_nz(&self) -> NonZeroUsize { - self.width - } - - pub fn height(&self) -> usize { - self.height.get() - } - - pub fn height_nz(&self) -> NonZeroUsize { - self.height - } - - pub fn n_cells(&self) -> usize { - self.width.saturating_mul(self.height).get() - } - - pub fn divides(&self, other: &Self) -> bool { - other.width.get() % self.width == 0 && other.height.get() % self.height == 0 - } - - pub fn extend(&self, e: Extension) -> Self { - Self { - width: e.width_factor.saturating_mul(self.width), - height: e.height_factor.saturating_mul(self.height), - } - } -} - -/// The ways a set of dimensions can be extended -#[derive(Debug, Clone)] -pub struct Extension { - /// This means extending the height of the grid by some factor. - /// `2` would mean doubling the grid upwards, increasing the height by a factor of - /// 2 and multiplying the number of rows by 2 - pub height_factor: NonZeroUsize, - /// This means extending the width of the grid by some factor. - /// `2` would mean doubling the grid sideways, increasing the width by a factor of - /// 2 and multiplying the number of columns by 2 - pub width_factor: NonZeroUsize, -} - -impl Extension { - pub const fn height(factor: NonZeroUsize) -> Self { - Self { - height_factor: factor, - width_factor: nonzero_unchecked(1), - } - } - - pub const fn height_unchecked(factor: usize) -> Self { - Self { - height_factor: nonzero_unchecked(factor), - width_factor: nonzero_unchecked(1), - } - } - - pub const fn width(factor: NonZeroUsize) -> Self { - Self { - height_factor: nonzero_unchecked(1), - width_factor: factor, - } - } - - pub const fn width_unchecked(factor: usize) -> Self { - Self { - height_factor: nonzero_unchecked(1), - width_factor: nonzero_unchecked(factor), - } - } -} - -#[allow(unconditional_panic)] -const fn nonzero_unchecked(a: usize) -> NonZeroUsize { - // Hack to get around not being able to unwrap in a const context - match NonZeroUsize::new(a) { - Some(a) => a, - None => [][0], - } -} diff --git a/kate/grid/src/grid.rs b/kate/grid/src/grid.rs deleted file mode 100644 index 4226f1b8..00000000 --- a/kate/grid/src/grid.rs +++ /dev/null @@ -1,336 +0,0 @@ -use alloc::vec::Vec; - -use kate_recovery::matrix::Dimensions; - -pub trait Grid { - fn width(&self) -> usize; - fn height(&self) -> usize; - fn dims(&self) -> &Dimensions; - fn inner(&self) -> &Vec; - // x indexes within a row, y indexes within a column - // 0 <= x < width, 0 <= y < height - fn get(&self, x: usize, y: usize) -> Option<&A> { - let i = Self::coord_to_ind(self.dims(), x, y); - self.get_ind(i) - } - fn get_ind(&self, i: usize) -> Option<&A>; - fn ind_to_coord(dims: &Dimensions, i: usize) -> (usize, usize); - fn coord_to_ind(dims: &Dimensions, x: usize, y: usize) -> usize; -} - -pub struct RowMajor { - dims: Dimensions, - inner: Vec, -} - -pub struct ColumnMajor { - dims: Dimensions, - inner: Vec, -} - -impl Grid for RowMajor { - fn width(&self) -> usize { - self.dims.cols().get().into() - } - - fn height(&self) -> usize { - self.dims.rows().get().into() - } - - fn dims(&self) -> &Dimensions { - &self.dims - } - - fn get_ind(&self, i: usize) -> Option<&A> { - self.inner.get(i) - } - - fn ind_to_coord(dims: &Dimensions, i: usize) -> (usize, usize) { - let cols: usize = dims.cols().get().into(); - (i % cols, i / cols) - } - - fn coord_to_ind(dims: &Dimensions, x: usize, y: usize) -> usize { - x.saturating_add(y.saturating_mul(dims.cols().get().into())) - } - - fn inner(&self) -> &Vec { - &self.inner - } -} - -impl Grid for ColumnMajor { - fn width(&self) -> usize { - self.dims.cols().get().into() - } - - fn height(&self) -> usize { - self.dims.rows().get().into() - } - - fn dims(&self) -> &Dimensions { - &self.dims - } - - fn get_ind(&self, i: usize) -> Option<&A> { - self.inner.get(i) - } - - fn ind_to_coord(dims: &Dimensions, i: usize) -> (usize, usize) { - let rows: usize = dims.rows().get().into(); - (i / rows, i % rows) - } - - fn coord_to_ind(dims: &Dimensions, x: usize, y: usize) -> usize { - y.saturating_add(x.saturating_mul(dims.rows().get().into())) - } - - fn inner(&self) -> &Vec { - &self.inner - } -} - -#[cfg(feature = "parallel")] -use rayon::prelude::*; - -impl RowMajor { - pub fn row(&self, y: usize) -> Option<&[A]> { - if y >= self.height() { - return None; - } - let start = y.checked_mul(self.width())?; - let end = y.checked_add(1)?.checked_mul(self.width())?; - Some(&self.inner[start..end]) - } - - pub fn iter_col(&self, x: usize) -> Option + '_> { - if x >= self.width() { - return None; - } - Some((0..self.height()).map(move |y| self.get(x, y).expect("Bounds already checked"))) - } - - pub fn rows(&self) -> impl Iterator + '_ { - (0..self.height()).map(|y| (y, self.row(y).expect("Bounds already checked"))) - } - - #[cfg(feature = "parallel")] - pub fn rows_par_iter(&self) -> impl ParallelIterator + '_ { - (0..self.height()) - .into_par_iter() - .map(|y| (y, self.row(y).expect("Bounds already checked"))) - } - - // TODO: this return type is kinda gross, should it just iterate over vecs? - pub fn columns(&self) -> impl Iterator)> + '_ { - (0..self.width()).map(|x| (x, self.iter_col(x).expect("Bounds already checked"))) - } - - pub fn iter_row_wise(&self) -> impl Iterator + '_ { - (0..self.height()).flat_map(move |y| { - (0..self.width()).map(move |x| self.get(x, y).expect("Bounds already checked")) - }) - } - - pub fn iter_column_wise(&self) -> impl Iterator + '_ { - (0..self.width()).flat_map(move |x| { - (0..self.height()).map(move |y| self.get(x, y).expect("Bounds already checked")) - }) - } - - pub fn to_column_major(&self) -> ColumnMajor { - self.iter_column_wise() - .map(Clone::clone) - .collect::>() - .into_column_major(self.width(), self.height()) - .expect("Bounds already checked") - } -} - -impl ColumnMajor { - pub fn col(&self, x: usize) -> Option<&[A]> { - if x >= self.width() { - return None; - } - let start = x.checked_mul(self.height())?; - let end = x.checked_add(1)?.checked_mul(self.height())?; - Some(&self.inner[start..end]) - } - - pub fn iter_row(&self, y: usize) -> Option + '_> { - if y >= self.height() { - return None; - } - Some((0..self.width()).map(move |x| self.get(x, y).expect("Size checked at instantiation"))) - } - - pub fn iter_row_wise(&self) -> impl Iterator + '_ { - (0..self.height()).flat_map(move |y| { - (0..self.width()).map(move |x| self.get(x, y).expect("Bounds already checked")) - }) - } - - pub fn iter_column_wise(&self) -> impl Iterator + '_ { - (0..self.width()).flat_map(move |x| { - (0..self.height()).map(move |y| self.get(x, y).expect("Bounds already checked")) - }) - } - - pub fn to_row_major(&self) -> RowMajor { - self.iter_row_wise() - .map(Clone::clone) - .collect::>() - .into_row_major(self.width(), self.height()) - .expect("Bounds already checked") - } -} - -pub trait IntoRowMajor { - fn into_row_major(self, width: usize, height: usize) -> Option>; -} - -pub trait IntoColumnMajor { - fn into_column_major(self, width: usize, height: usize) -> Option>; -} - -impl IntoRowMajor for Vec { - fn into_row_major(self, width: usize, height: usize) -> Option> { - if self.len() == usize::checked_mul(width, height)? { - Some(RowMajor { - dims: Dimensions::new_from(height, width)?, - inner: self, - }) - } else { - None - } - } -} - -impl IntoColumnMajor for Vec { - fn into_column_major(self, width: usize, height: usize) -> Option> { - if self.len() == width.checked_mul(height)? { - Some(ColumnMajor { - dims: Dimensions::new_from(height, width)?, - inner: self, - }) - } else { - None - } - } -} - -impl IntoColumnMajor for [A; LEN] { - fn into_column_major(self, width: usize, height: usize) -> Option> { - if self.len() == width.checked_mul(height)? { - Some(ColumnMajor { - dims: Dimensions::new_from(height, width)?, - inner: self.into(), - }) - } else { - None - } - } -} - -impl IntoRowMajor for [A; LEN] { - fn into_row_major(self, width: usize, height: usize) -> Option> { - if self.len() == width.checked_mul(height)? { - Some(RowMajor { - dims: Dimensions::new_from(height, width)?, - inner: self.into(), - }) - } else { - None - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use alloc::vec::Vec; - use nalgebra::base::DMatrix; - - #[test] - fn test_row_major() { - let data = [1, 2, 3, 4, 5, 6]; - - let rm = data.clone().into_row_major(3, 2).unwrap(); - assert_eq!(rm.get(0, 0), Some(&1)); - assert_eq!(rm.get(1, 0), Some(&2)); - assert_eq!(rm.get(2, 0), Some(&3)); - assert_eq!(rm.get(0, 1), Some(&4)); - assert_eq!(rm.get(1, 1), Some(&5)); - assert_eq!(rm.get(2, 1), Some(&6)); - - assert_eq!([1, 2, 3].as_slice(), rm.row(0).unwrap()); - assert_eq!([4, 5, 6].as_slice(), rm.row(1).unwrap()); - assert_eq!(vec![&1, &4], rm.iter_col(0).unwrap().collect::>()); - assert_eq!(vec![&2, &5], rm.iter_col(1).unwrap().collect::>()); - assert_eq!(vec![&3, &6], rm.iter_col(2).unwrap().collect::>()); - - let rm_matrix = DMatrix::from_row_iterator(2, 3, data); - assert_eq!(rm_matrix.get((0, 0)), Some(&1)); - assert_eq!(rm_matrix.get((1, 0)), Some(&4)); - assert_eq!(rm_matrix.get((0, 1)), Some(&2)); - assert_eq!(rm_matrix.get((1, 1)), Some(&5)); - assert_eq!(rm_matrix.get((0, 2)), Some(&3)); - assert_eq!(rm_matrix.get((1, 2)), Some(&6)); - - for (row, expected) in rm_matrix.row_iter().zip([[1, 2, 3], [4, 5, 6]].into_iter()) { - assert_eq!(row.iter().cloned().collect::>(), expected.to_vec()); - } - for (cols, expected) in rm_matrix - .column_iter() - .zip([[1, 4], [2, 5], [3, 6]].into_iter()) - { - assert_eq!( - cols.iter().cloned().collect::>(), - expected.to_vec() - ); - } - } - - #[test] - fn test_column_major() { - let data = [1, 4, 2, 5, 3, 6]; - let cm = data.clone().into_column_major(3, 2).unwrap(); - - assert_eq!(cm.get(0, 0), Some(&1)); - assert_eq!(cm.get(1, 0), Some(&2)); - assert_eq!(cm.get(2, 0), Some(&3)); - assert_eq!(cm.get(0, 1), Some(&4)); - assert_eq!(cm.get(1, 1), Some(&5)); - assert_eq!(cm.get(2, 1), Some(&6)); - - assert_eq!([1, 4].as_slice(), cm.col(0).unwrap()); - assert_eq!([2, 5].as_slice(), cm.col(1).unwrap()); - assert_eq!([3, 6].as_slice(), cm.col(2).unwrap()); - assert_eq!( - vec![&1, &2, &3], - cm.iter_row(0).unwrap().collect::>() - ); - assert_eq!( - vec![&4, &5, &6], - cm.iter_row(1).unwrap().collect::>() - ); - - let cm_matrix = DMatrix::from_column_slice(2, 3, &data); - assert_eq!(cm_matrix.get((0, 0)), Some(&1)); - assert_eq!(cm_matrix.get((1, 0)), Some(&4)); - assert_eq!(cm_matrix.get((0, 1)), Some(&2)); - assert_eq!(cm_matrix.get((1, 1)), Some(&5)); - assert_eq!(cm_matrix.get((0, 2)), Some(&3)); - assert_eq!(cm_matrix.get((1, 2)), Some(&6)); - - for (col, expected) in cm_matrix - .column_iter() - .zip([[1, 4], [2, 5], [3, 6]].into_iter()) - { - assert_eq!(col.iter().cloned().collect::>(), expected.to_vec()); - } - for (row, expected) in cm_matrix.row_iter().zip([[1, 2, 3], [4, 5, 6]].into_iter()) { - assert_eq!(row.iter().cloned().collect::>(), expected.to_vec()); - } - } -} diff --git a/kate/grid/src/lib.rs b/kate/grid/src/lib.rs deleted file mode 100644 index b45a5c4f..00000000 --- a/kate/grid/src/lib.rs +++ /dev/null @@ -1,10 +0,0 @@ -#![no_std] -#![deny(clippy::integer_arithmetic)] -//! Nice grid API, dealing with grids of different sizes and different orders -//! (column-major/row-major) - -#[cfg_attr(test, macro_use)] -extern crate alloc; - -mod grid; -pub use grid::*; diff --git a/kate/recovery/src/com.rs b/kate/recovery/src/com.rs index bc56f093..6c301f1b 100644 --- a/kate/recovery/src/com.rs +++ b/kate/recovery/src/com.rs @@ -42,7 +42,7 @@ pub enum ReconstructionError { /// Positions in columns are random. /// Function panics if factor is above 1.0. pub fn columns_positions( - dimensions: &matrix::Dimensions, + dimensions: matrix::Dimensions, positions: &[matrix::Position], factor: Percent, ) -> Vec { @@ -63,7 +63,7 @@ pub fn columns_positions( /// Creates hash map of columns, each being hash map of cells, from vector of cells. /// Intention is to be able to find duplicates and to group cells by column. fn map_cells( - dimensions: &matrix::Dimensions, + dimensions: matrix::Dimensions, cells: Vec, ) -> Result>, ReconstructionError> { let mut result: HashMap> = HashMap::new(); @@ -90,7 +90,7 @@ fn map_cells( /// * `app_id` - Application ID pub fn app_specific_rows( index: &index::AppDataIndex, - dimensions: &matrix::Dimensions, + dimensions: matrix::Dimensions, app_id: u32, ) -> Vec { index @@ -110,7 +110,7 @@ pub fn app_specific_rows( /// * `app_id` - Application ID pub fn app_specific_cells( index: &index::AppDataIndex, - dimensions: &matrix::Dimensions, + dimensions: matrix::Dimensions, app_id: u32, ) -> Option> { index @@ -133,7 +133,7 @@ pub type AppData = Vec>; /// * `app_id` - Application ID pub fn reconstruct_app_extrinsics( index: &index::AppDataIndex, - dimensions: &matrix::Dimensions, + dimensions: matrix::Dimensions, cells: Vec, app_id: u32, ) -> Result { @@ -155,7 +155,7 @@ pub fn reconstruct_app_extrinsics( /// * `cells` - Cells from required columns, at least 50% cells per column pub fn reconstruct_extrinsics( index: &index::AppDataIndex, - dimensions: &matrix::Dimensions, + dimensions: matrix::Dimensions, cells: Vec, ) -> Result, ReconstructionError> { let data = reconstruct_available(dimensions, cells)?; @@ -170,7 +170,7 @@ pub fn reconstruct_extrinsics( /// * `dimensions` - Extended matrix dimensions /// * `cells` - Cells from required columns, at least 50% cells per column pub fn reconstruct_columns( - dimensions: &matrix::Dimensions, + dimensions: matrix::Dimensions, cells: &[data::Cell], ) -> Result>, ReconstructionError> { let cells: Vec = cells.iter().cloned().map(Into::into).collect::>(); @@ -198,7 +198,7 @@ pub fn reconstruct_columns( } fn reconstruct_available( - dimensions: &matrix::Dimensions, + dimensions: matrix::Dimensions, cells: Vec, ) -> Result, ReconstructionError> { let columns = map_cells(dimensions, cells)?; @@ -251,11 +251,11 @@ pub fn decode_app_extrinsics( cells: Vec, app_id: u32, ) -> Result { - let positions = app_specific_cells(index, &dimensions, app_id).unwrap_or_default(); + let positions = app_specific_cells(index, dimensions, app_id).unwrap_or_default(); if positions.is_empty() { return Ok(vec![]); } - let cells_map = map_cells(&dimensions, cells)?; + let cells_map = map_cells(dimensions, cells)?; for position in positions { cells_map @@ -297,6 +297,7 @@ pub enum UnflattenError { use std::{collections::VecDeque, io}; +/// It is a Codec Reader which allows decoding from non-sequential data. struct SparseSliceRead<'a> { parts: VecDeque<&'a [u8]>, } @@ -349,7 +350,7 @@ pub fn unflatten_padded_data( ) -> Result, UnflattenError> { ensure!(data.len() % CHUNK_SIZE == 0, UnflattenError::InvalidLen); - fn extract_encoded_extrinsic<'a>(range_data: &'a [u8]) -> SparseSliceRead<'a> { + fn extract_encoded_extrinsic(range_data: &[u8]) -> SparseSliceRead { const_assert_ne!(CHUNK_SIZE, 0); const_assert_ne!(DATA_CHUNK_SIZE, 0); @@ -650,7 +651,7 @@ mod tests { index: vec![(1, 2), (2, 5), (3, 8)], }; let dimensions = Dimensions::new(8, 4).unwrap(); - let result = app_specific_rows(&index, &dimensions, app_id); + let result = app_specific_rows(&index, dimensions, app_id); assert_eq!(expected.len(), result.len()); } @@ -663,7 +664,7 @@ mod tests { index: vec![(1, 5)], }; let dimensions = Dimensions::new(4, 4).unwrap(); - let result = app_specific_cells(&index, &dimensions, app_id).unwrap_or_default(); + let result = app_specific_cells(&index, dimensions, app_id).unwrap_or_default(); assert_eq!(expected.len(), result.len()); result.iter().zip(expected).for_each(|(a, &(row, col))| { assert_eq!(a.row, row); diff --git a/kate/recovery/src/commitments.rs b/kate/recovery/src/commitments.rs index 4622cda7..039815df 100644 --- a/kate/recovery/src/commitments.rs +++ b/kate/recovery/src/commitments.rs @@ -83,7 +83,7 @@ pub fn verify_equality( commitments: &[[u8; COMMITMENT_SIZE]], rows: &[Option>], index: &index::AppDataIndex, - dimensions: &matrix::Dimensions, + dimensions: matrix::Dimensions, app_id: u32, ) -> Result<(Vec, Vec), Error> { let ext_rows: usize = dimensions.extended_rows().try_into()?; @@ -94,7 +94,8 @@ pub fn verify_equality( return Ok((vec![], app_rows)); } - let dim_cols = dimensions.cols().get().into(); + let dim_cols = dimensions.width(); + // @TODO Opening Key here??? let (prover_key, _) = public_params.trim(dim_cols)?; let domain = EvaluationDomain::new(dim_cols)?; @@ -169,7 +170,7 @@ mod tests { &[], &[], &index::AppDataIndex::default(), - &matrix::Dimensions::new(1, 1).unwrap(), + matrix::Dimensions::new(1, 1).unwrap(), 0, ) .is_err()); @@ -195,7 +196,7 @@ mod tests { &commitments, &[row_0.clone(), None, row_2, None, row_4, None, None, None], &AppDataIndex { size, index }, - &matrix::Dimensions::new(4, 32).unwrap(), + matrix::Dimensions::new(4, 32).unwrap(), 1, ); assert_eq!(result.unwrap(), (vec![0, 2, 4], vec![])); @@ -208,7 +209,7 @@ mod tests { &commitments, &[row_0, None, None, None, None, None, None, None], &AppDataIndex { size, index }, - &matrix::Dimensions::new(4, 32).unwrap(), + matrix::Dimensions::new(4, 32).unwrap(), 1, ); assert_eq!(result.unwrap(), (vec![0], vec![2, 4])); @@ -221,7 +222,7 @@ mod tests { &commitments, &[None, None, None, None, None, None, None, None], &AppDataIndex { size, index }, - &matrix::Dimensions::new(4, 32).unwrap(), + matrix::Dimensions::new(4, 32).unwrap(), 1, ); assert_eq!(result.unwrap(), (vec![], vec![0, 2, 4])); diff --git a/kate/recovery/src/data.rs b/kate/recovery/src/data.rs index f3009910..d06c5e9c 100644 --- a/kate/recovery/src/data.rs +++ b/kate/recovery/src/data.rs @@ -37,7 +37,7 @@ impl Cell { /// Merges cells data per row. /// Cells are sorted before merge. -pub fn rows(dimensions: &Dimensions, cells: &[&Cell]) -> Vec<(RowIndex, Vec)> { +pub fn rows(dimensions: Dimensions, cells: &[&Cell]) -> Vec<(RowIndex, Vec)> { let mut sorted_cells = cells.to_vec(); sorted_cells @@ -96,7 +96,7 @@ mod tests { &cell(position(0, 1), content([1; 32])), ]; - let mut rows = rows(&dimensions, &cells); + let mut rows = rows(dimensions, &cells); rows.sort_by_key(|(key, _)| key.0); let expected = [ @@ -121,7 +121,7 @@ mod tests { &cell(position(0, 1), content([1; 32])), ]; - let mut rows = rows(&dimensions, &cells); + let mut rows = rows(dimensions, &cells); rows.sort_by_key(|(key, _)| key.0); assert_eq!(rows.len(), 1); diff --git a/kate/recovery/src/matrix.rs b/kate/recovery/src/matrix.rs index fcc0e8c6..d34d4475 100644 --- a/kate/recovery/src/matrix.rs +++ b/kate/recovery/src/matrix.rs @@ -1,3 +1,4 @@ +use derive_more::Constructor; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; use std::{ @@ -13,7 +14,7 @@ const EXTENSION_FACTOR_U32: u32 = config::EXTENSION_FACTOR as u32; /// Position of a cell in the the matrix. #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive(Default, Debug, Clone, Copy, Hash, Eq, PartialEq)] +#[derive(Default, Debug, Clone, Copy, Hash, Eq, PartialEq, Constructor)] pub struct Position { pub row: u32, pub col: u16, @@ -150,15 +151,35 @@ impl Dimensions { } /// Returns number of rows + #[inline] pub fn rows(&self) -> NonZeroU16 { self.rows } + /// Returns number of rows, which is always greater than zero. + /// + /// # SAFETY + /// As internal member is `NonZeroU16`, this always returns greater than zero. + #[inline] + pub fn height(&self) -> usize { + NonZeroU16::get(self.rows).into() + } + /// Returns number of columns + #[inline] pub fn cols(&self) -> NonZeroU16 { self.cols } + /// Returns number of cols, which is always greater than zero. + /// + /// # SAFETY + /// As internal member is `NonZeroU16`, this always returns greater than zero. + #[inline] + pub fn width(&self) -> usize { + NonZeroU16::get(self.cols).into() + } + /// Matrix size. pub fn size + Mul>(&self) -> T { T::from(self.rows.get()) * T::from(self.cols.get()) diff --git a/kate/recovery/src/proof.rs b/kate/recovery/src/proof.rs index 1b6b91b9..f09385f5 100644 --- a/kate/recovery/src/proof.rs +++ b/kate/recovery/src/proof.rs @@ -30,7 +30,7 @@ impl From for Error { /// Verifies proof for given cell pub fn verify( public_parameters: &PublicParameters, - dimensions: &Dimensions, + dimensions: Dimensions, commitment: &[u8; COMMITMENT_SIZE], cell: &Cell, ) -> Result { diff --git a/kate/src/com.rs b/kate/src/com.rs index 72cd6a1a..41e75441 100644 --- a/kate/src/com.rs +++ b/kate/src/com.rs @@ -17,7 +17,7 @@ use dusk_plonk::{ prelude::{BlsScalar, CommitKey}, }; #[cfg(feature = "std")] -use kate_recovery::{com::app_specific_rows, index, matrix::Dimensions}; +use kate_recovery::matrix::Dimensions; use nalgebra::base::DMatrix; use rand::{Rng, SeedableRng}; use rand_chacha::ChaChaRng; @@ -29,12 +29,13 @@ use static_assertions::const_assert_eq; use thiserror_no_std::Error; use crate::{ + com::kzg10::commitment::Commitment, config::{ COL_EXTENSION, DATA_CHUNK_SIZE, EXTENSION_FACTOR, MAXIMUM_BLOCK_SIZE, MINIMUM_BLOCK_SIZE, - PROOF_SIZE, PROVER_KEY_SIZE, ROW_EXTENSION, SCALAR_SIZE, + PROOF_SIZE, ROW_EXTENSION, SCALAR_SIZE, }, metrics::Metrics, - padded_len_of_pad_iec_9797_1, BlockDimensions, Seed, LOG_TARGET, + padded_len_of_pad_iec_9797_1, BlockDimensions, Seed, TryFromBlockDimensionsError, LOG_TARGET, }; #[cfg(feature = "std")] use kate_recovery::testnet; @@ -68,6 +69,12 @@ impl From for Error { } } +impl From for Error { + fn from(_: TryFromBlockDimensionsError) -> Self { + Self::BlockTooBig + } +} + /// We cannot derive `PartialEq` becasue `PlonkError` does not support it in the current version. /// and we only need to double check its discriminat for testing. /// Only needed on tests by now. @@ -95,50 +102,6 @@ fn app_extrinsics_group_by_app_id(extrinsics: &[AppExtrinsic]) -> Vec<(AppId, Ve }) } -#[cfg(feature = "std")] -pub fn scalars_to_rows( - rows: &[u32], - dimensions: &Dimensions, - data: &[BlsScalar], -) -> Vec>> { - let extended_rows = BlockLengthRows(dimensions.extended_rows()); - let cols = BlockLengthColumns(dimensions.cols().get().into()); - dimensions - .iter_extended_rows() - .map(|i| { - rows.contains(&i).then(|| { - row(data, i as usize, cols, extended_rows) - .iter() - .flat_map(BlsScalar::to_bytes) - .collect::>() - }) - }) - .collect::>>>() -} - -#[cfg(feature = "std")] -pub fn scalars_to_app_rows( - app_id: u32, - index: &index::AppDataIndex, - dimensions: &Dimensions, - data: &[BlsScalar], -) -> Vec>> { - let extended_rows = BlockLengthRows(dimensions.extended_rows()); - let cols = BlockLengthColumns(dimensions.cols().get().into()); - let app_rows = app_specific_rows(index, dimensions, app_id); - dimensions - .iter_extended_rows() - .map(|i| { - app_rows.iter().find(|&&row| row == i).map(|_| { - row(data, i as usize, cols, extended_rows) - .iter() - .flat_map(BlsScalar::to_bytes) - .collect::>() - }) - }) - .collect::>>>() -} - pub fn flatten_and_pad_block( max_rows: BlockLengthRows, max_cols: BlockLengthColumns, @@ -288,12 +251,13 @@ fn pad_iec_9797_1(mut data: Vec) -> Vec { .expect("Const assertion ensures this transformation to `DataChunk`. qed") } -fn extend_column_with_zeros<'a, I>(column: I, height: usize) -> Vec -where - I: Iterator, -{ - let mut extended = column.take(height).cloned().collect::>(); +fn extend_column_with_zeros(column: &[BlsScalar], height: usize) -> Vec { + let mut extended = Vec::with_capacity(height); + let copied = core::cmp::min(height, column.len()); + + extended.extend_from_slice(&column[..copied]); extended.resize(height, BlsScalar::zero()); + extended } @@ -304,7 +268,7 @@ pub fn to_bls_scalar(chunk: &[u8]) -> Result { BlsScalar::from_bytes(&scalar_size_chunk).map_err(|_| Error::CellLengthExceeded) } -fn make_dims(bd: &BlockDimensions) -> Result { +fn make_dims(bd: BlockDimensions) -> Result { Dimensions::new_from(bd.rows.0, bd.cols.0).ok_or(Error::ZeroDimension) } @@ -316,14 +280,14 @@ fn make_dims(bd: &BlockDimensions) -> Result { /// instead of being in first k chunks of a column. /// /// `block` should be the raw data of a matrix, stored in row-major orientation. -#[cfg(feature = "std")] +#[cfg(feature = "parallel")] pub fn par_extend_data_matrix( block_dims: BlockDimensions, block: &[u8], metrics: &M, -) -> Result, Error> { +) -> Result, Error> { let start = Instant::now(); - let dims = make_dims(&block_dims)?; + let dims = make_dims(block_dims)?; let (ext_rows, _): (usize, usize) = dims .extend(ROW_EXTENSION, COL_EXTENSION) .ok_or(Error::InvalidDimensionExtension)? @@ -333,45 +297,40 @@ pub fn par_extend_data_matrix( // simple length with mod check would work... let chunk_size: usize = block_dims.chunk_size.try_into()?; - #[cfg(feature = "parallel")] let chunks = block.par_chunks_exact(chunk_size); - #[cfg(not(feature = "parallel"))] - let chunks = block.chunks_exact(chunk_size); - ensure!(chunks.remainder().is_empty(), Error::DimensionsMismatch); - #[cfg(feature = "parallel")] - let chunks = chunks.into_par_iter(); - let scalars = chunks + .into_par_iter() .map(to_bls_scalar) .collect::, Error>>()?; - // The data is currently row-major, so we need to put it into column-major - let col_wise_scalars = DMatrix::from_row_iterator(rows, cols, scalars.into_iter()); - - let mut chunk_elements = col_wise_scalars - .column_iter() - .flat_map(|column| extend_column_with_zeros(column.iter(), ext_rows)) - .collect::>(); - let extended_column_eval_domain = EvaluationDomain::new(ext_rows)?; let column_eval_domain = EvaluationDomain::new(rows)?; // rows_num = column_length - #[cfg(feature = "parallel")] - let chunk_elements_iter = chunk_elements.par_chunks_exact_mut(ext_rows); - #[cfg(not(feature = "parallel"))] - let chunk_elements_iter = chunk_elements.chunks_exact_mut(ext_rows); + // The data is currently row-major, so we need to put it into column-major + let col_wise_scalars = DMatrix::from_row_iterator(rows, cols, scalars.into_iter()); - chunk_elements_iter.for_each(|col| { - // (i)fft functions input parameter slice size has to be a power of 2, otherwise it panics - column_eval_domain.ifft_slice(&mut col[0..rows]); - extended_column_eval_domain.fft_slice(col); - }); + let ext_columns_wise = (0..cols) + .into_par_iter() + .flat_map(|col| { + let col_view = col_wise_scalars.column(col).data.into_slice(); + debug_assert_eq!(col_view.len(), rows); + let mut ext_col = extend_column_with_zeros(col_view, ext_rows); + // (i)fft functions input parameter slice size has to be a power of 2, otherwise it panics + column_eval_domain.ifft_slice(&mut ext_col[0..rows]); + extended_column_eval_domain.fft_slice(ext_col.as_mut_slice()); + debug_assert_eq!(ext_col.len(), ext_rows); + ext_col + }) + .collect::>(); + debug_assert_eq!(Some(ext_columns_wise.len()), cols.checked_mul(ext_rows)); + + let ext_matrix = DMatrix::from_iterator(ext_rows, cols, ext_columns_wise.into_iter()); metrics.extended_block_time(start.elapsed()); - Ok(chunk_elements) + Ok(ext_matrix) } //TODO cache extended data matrix @@ -379,11 +338,11 @@ pub fn par_extend_data_matrix( pub fn build_proof( public_params: &kzg10::PublicParameters, block_dims: BlockDimensions, - ext_data_matrix: &[BlsScalar], + ext_data_matrix: &DMatrix, cells: &[Cell], metrics: &M, ) -> Result, Error> { - let dims = make_dims(&block_dims)?; + let dims = make_dims(block_dims)?; let (ext_rows, ext_cols): (usize, usize) = dims .extend(ROW_EXTENSION, COL_EXTENSION) .ok_or(Error::InvalidDimensionExtension)? @@ -407,11 +366,11 @@ pub fn build_proof( let total_start = Instant::now(); // attempt to parallelly compute proof for all requested cells - #[cfg(feature = "parallel")] - let cell_iter = cells - .into_par_iter() - .zip(result_bytes.par_chunks_exact_mut(SPROOF_SIZE)); - #[cfg(not(feature = "parallel"))] + // #[cfg(feature = "parallel")] + // let cell_iter = cells + // .into_par_iter() + // .zip(result_bytes.par_chunks_exact_mut(SPROOF_SIZE)); + // #[cfg(not(feature = "parallel"))] let cell_iter = cells.iter().zip(result_bytes.chunks_exact_mut(SPROOF_SIZE)); cell_iter.for_each(|(cell, res)| { @@ -431,11 +390,6 @@ pub fn build_proof( .map(|j| ext_data_matrix[r_index.saturating_add(j.saturating_mul(ext_rows))]) .collect::>(); - //let row = ext_data_matrix_cm - // .iter_row(r_index) - // .expect("Already checked row index") - // .map(Clone::clone) - // .collect::>(); // row has to be a power of 2, otherwise interpolate() function panics TODO: cache evaluations let poly = Evaluations::from_vec_and_domain(row, row_eval_domain).interpolate(); let witness = prover_key.compute_single_witness(&poly, &row_dom_x_pts[c_index]); @@ -467,25 +421,26 @@ pub fn par_build_commitments( extrinsics_by_key: &[AppExtrinsic], rng_seed: Seed, metrics: &M, -) -> Result<(XtsLayout, Vec, BlockDimensions, Vec), Error> { +) -> Result<(XtsLayout, Vec, BlockDimensions, DMatrix), Error> { let start = Instant::now(); // generate data matrix first let (tx_layout, block, block_dims) = flatten_and_pad_block(rows, cols, chunk_size, extrinsics_by_key, rng_seed)?; - metrics.block_dims_and_size(&block_dims, block.len().saturated_into()); + metrics.block_dims_and_size(block_dims, block.len().saturated_into()); + + let ext_matrix = par_extend_data_matrix(block_dims, &block, metrics)?; - let ext_data_matrix = par_extend_data_matrix(block_dims, &block, metrics)?; - let extended_rows_num = block_dims - .rows - .0 - .checked_mul(EXTENSION_FACTOR) + let block_dims_cols = usize::try_from(block_dims.cols.0)?; + let block_dims_rows = usize::try_from(block_dims.rows.0)?; + let extended_rows = block_dims_rows + .checked_mul(EXTENSION_FACTOR as usize) .ok_or(Error::BlockTooBig)?; metrics.preparation_block_time(start.elapsed()); - let public_params = testnet::public_params(block_dims.cols.as_usize()); + let public_params = testnet::public_params(block_dims_cols); if log::log_enabled!(target: LOG_TARGET, log::Level::Debug) { let raw_pp = public_params.to_raw_var_bytes(); @@ -499,68 +454,42 @@ pub fn par_build_commitments( ); } - let (prover_key, _) = public_params - .trim(block_dims.cols.as_usize()) - .map_err(Error::from)?; - let row_eval_domain = EvaluationDomain::new(block_dims.cols.as_usize()).map_err(Error::from)?; - - let mut result_bytes: Vec = Vec::new(); - let result_bytes_len = extended_rows_num - .checked_mul(PROVER_KEY_SIZE) - .ok_or(Error::BlockTooBig)? as usize; - result_bytes.reserve_exact(result_bytes_len); - unsafe { - result_bytes.set_len(result_bytes_len); - } + let (prover_key, _) = public_params.trim(block_dims_cols)?; + let row_eval_domain = EvaluationDomain::new(block_dims_cols)?; let start = Instant::now(); - - (0..extended_rows_num) + let commitments = (0..extended_rows) .into_par_iter() - .map(|i| { - row( - &ext_data_matrix, - i as usize, - block_dims.cols, - BlockLengthRows(extended_rows_num), - ) + .map(|row_idx| { + let ext_row = get_row(&ext_matrix, row_idx); + commit(&prover_key, row_eval_domain, ext_row) }) - .zip(result_bytes.par_chunks_exact_mut(PROVER_KEY_SIZE as usize)) - .map(|(row, res)| commit(&prover_key, row_eval_domain, row, res)) - .collect::>()?; + .collect::>(); + let commitments = commitments.into_iter().collect::, _>>()?; + let commitments_bytes = commitments + .into_par_iter() + .flat_map(|c| c.to_bytes()) + .collect(); metrics.commitment_build_time(start.elapsed()); - Ok((tx_layout, result_bytes, block_dims, ext_data_matrix)) + Ok((tx_layout, commitments_bytes, block_dims, ext_matrix)) } #[cfg(feature = "std")] -fn row( - matrix: &[BlsScalar], - i: usize, - cols: BlockLengthColumns, - extended_rows: BlockLengthRows, -) -> Vec { - let mut row = Vec::with_capacity(cols.as_usize()); - (0..cols.as_usize().saturating_mul(extended_rows.as_usize())) - .step_by(extended_rows.as_usize()) - .for_each(|idx| row.push(matrix[i.saturating_add(idx)])); - - row +fn get_row(m: &DMatrix, row_idx: usize) -> Vec { + m.row(row_idx).iter().cloned().collect() } #[cfg(feature = "std")] -// Generate a commitment and store it into result +// Generate a commitment fn commit( prover_key: &CommitKey, domain: EvaluationDomain, row: Vec, - result: &mut [u8], -) -> Result<(), Error> { +) -> Result { let poly = Evaluations::from_vec_and_domain(row, domain).interpolate(); - let commitment = prover_key.commit(&poly).map_err(Error::from)?; - result.copy_from_slice(&commitment.to_bytes()); - Ok(()) + prover_key.commit(&poly).map_err(Error::from) } #[cfg(test)] @@ -573,8 +502,9 @@ mod tests { use hex_literal::hex; use kate_recovery::{ com::{ - app_specific_cells, decode_app_extrinsics, reconstruct_app_extrinsics, - reconstruct_extrinsics, unflatten_padded_data, ReconstructionError, + app_specific_cells, app_specific_rows, decode_app_extrinsics, + reconstruct_app_extrinsics, reconstruct_extrinsics, unflatten_padded_data, + ReconstructionError, }, commitments, config, data::{self, DataCell}, @@ -624,6 +554,26 @@ mod tests { Ok(AppDataIndex { size, index }) } + fn scalars_to_app_rows( + app_id: u32, + index: &AppDataIndex, + dimensions: Dimensions, + matrix: &DMatrix, + ) -> Vec>> { + let app_rows = app_specific_rows(index, dimensions, app_id); + dimensions + .iter_extended_rows() + .map(|i| { + app_rows.iter().find(|&&row| row == i).map(|_| { + let row = get_row(&matrix, i as usize); + row.iter() + .flat_map(BlsScalar::to_bytes) + .collect::>() + }) + }) + .collect() + } + #[test_case(0, 256, 256 => BlockDimensions::new(1, 4, 32) ; "block size zero")] #[test_case(11, 256, 256 => BlockDimensions::new(1, 4, 32) ; "below minimum block size")] #[test_case(300, 256, 256 => BlockDimensions::new(1, 16, 32) ; "regular case")] @@ -644,23 +594,23 @@ mod tests { // newapi done #[test] fn test_extend_data_matrix() { - let expected_result = [ - // Row 0 + let expected = [ + // Col 0 hex!("000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e00"), hex!("bc1c6b8b4b02ca677b825ec9dace9aa706813f3ec47abdf9f03c680f4468555e"), hex!("7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a00"), hex!("c16115f73784be22106830c9bc6bbb469bf5026ee80325e403efe5ccc3f55016"), - // Row 1 + // Col 1 hex!("1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d00"), hex!("db3b8aaa6a21e9869aa17de8f9edb9c625a05e5de399dc18105c872e6387745e"), hex!("9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b900"), hex!("e080341657a3dd412f874fe8db8ada65ba14228d07234403230e05ece2147016"), - // Row 2 + // Col 2 hex!("3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c00"), hex!("fa5aa9c9894008a6b9c09c07190dd9e544bf7d7c02b9fb372f7ba64d82a6935e"), hex!("babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d800"), hex!("ff9f533576c2fc604ea66e07fba9f984d93341ac26426322422d240b02348f16"), - // Row 3 + // Col 3 hex!("5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b00"), hex!("197ac8e8a85f27c5d8dfbb26382cf80464de9c9b21d81a574e9ac56ca1c5b25e"), hex!("d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f700"), @@ -670,6 +620,7 @@ mod tests { .map(BlsScalar::from_bytes) .collect::, _>>() .expect("Invalid Expected result"); + let expected = DMatrix::from_iterator(4, 4, expected.into_iter()); let block_dims = BlockDimensions::new(BlockLengthRows(2), BlockLengthColumns(4), 32); let block = (0..=247) @@ -677,8 +628,8 @@ mod tests { .chunks_exact(DATA_CHUNK_SIZE) .flat_map(|chunk| pad_with_zeroes(chunk.to_vec(), block_dims.chunk_size)) .collect::>(); - let res = par_extend_data_matrix(block_dims, &block, &IgnoreMetrics {}); - assert_eq!(res.unwrap(), expected_result); + let ext_matrix = par_extend_data_matrix(block_dims, &block, &IgnoreMetrics {}).unwrap(); + assert_eq!(ext_matrix, expected); } #[test_case( 1..=29 => "0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d8000" ; "chunk more than 3 values shorter")] @@ -752,45 +703,48 @@ mod tests { } fn sample_cells_from_matrix( - matrix: &[BlsScalar], - dimensions: &BlockDimensions, + matrix: &DMatrix, columns: Option<&[u16]>, ) -> Vec { - fn random_indexes(length: usize, seed: Seed) -> Vec { + fn random_indexes(length: usize, seed: Seed) -> Vec { // choose random len/2 (unique) indexes let mut idx = (0..length).collect::>(); - let mut chosen_idx = Vec::::new(); + let mut chosen_idx = Vec::::new(); let mut rng = ChaChaRng::from_seed(seed); for _ in 0..length / 2 { let i = rng.gen_range(0..idx.len()); let v = idx.remove(i); - chosen_idx.push(v as u16); + chosen_idx.push(v); } chosen_idx } - const RNG_SEED: Seed = [42u8; 32]; - matrix - .chunks_exact(dimensions.rows.as_usize().saturating_mul(2)) - .enumerate() - .map(|(col, e)| (col as u16, e)) - .flat_map(|(col, e)| { - random_indexes(e.len(), RNG_SEED) - .into_iter() - .map(|row| DataCell { - position: Position { - row: row as u32, - col, - }, - data: e[row as usize].to_bytes(), - }) - .filter(|cell| { - columns.is_none() || columns.unwrap_or(&[]).contains(&cell.position.col) + + let (rows, cols) = matrix.shape(); + let cols = u16::try_from(cols).unwrap(); + let indexes = random_indexes(rows, RNG_SEED); + + (0u16..cols) + .filter(|col_idx| match &columns { + None => true, + Some(allowed) => allowed.contains(&col_idx), + }) + .flat_map(|col_idx| { + let col_view = matrix.column(col_idx.into()).data.into_slice(); + + indexes + .iter() + .map(|row_idx| { + let row_pos = u32::try_from(*row_idx).unwrap(); + let position = Position::new(row_pos, col_idx); + debug_assert!(*row_idx < col_view.len()); + let data = col_view[*row_idx].to_bytes(); + DataCell::new(position, data) }) .collect::>() }) - .collect::>() + .collect() } fn app_extrinsic_strategy() -> impl Strategy { @@ -839,21 +793,17 @@ mod tests { #[ignore] // newapi done fn test_build_and_reconstruct(ref xts in app_extrinsics_strategy()) { - - // let test_file = std::fs::OpenOptions::new().create(true).append(true).open("/tmp/test.json").unwrap(); - // serde_json::to_writer_pretty(test_file, &xts); - let metrics = IgnoreMetrics {}; let (layout, commitments, dims, matrix) = par_build_commitments( BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default(), &metrics).unwrap(); - let columns = sample_cells_from_matrix(&matrix, &dims, None); + let columns = sample_cells_from_matrix(&matrix, None); let extended_dims = dims.try_into().unwrap(); let index = app_data_index_try_from_layout(layout).unwrap(); - let reconstructed = reconstruct_extrinsics(&index, &extended_dims, columns).unwrap(); + let reconstructed = reconstruct_extrinsics(&index, extended_dims, columns).unwrap(); for (result, xt) in reconstructed.iter().zip(xts) { - prop_assert_eq!(result.0, *xt.app_id); - prop_assert_eq!(result.1[0].as_slice(), &xt.data); + prop_assert_eq!(result.0, *xt.app_id); + prop_assert_eq!(result.1[0].as_slice(), &xt.data); } let public_params = testnet::public_params(dims.cols.as_usize()); @@ -869,7 +819,7 @@ mod tests { let extended_dims = dims.try_into().unwrap(); let commitment = commitments::from_slice(&commitments).unwrap()[row]; - let verification = proof::verify(&public_params, &extended_dims, &commitment, &cell); + let verification = proof::verify(&public_params, extended_dims, &commitment, &cell); prop_assert!(verification.is_ok()); prop_assert!(verification.unwrap()); } @@ -888,8 +838,8 @@ mod tests { let extended_dims = dims.try_into().unwrap(); let commitments = commitments::from_slice(&commitments).unwrap(); for xt in xts { - let rows = &scalars_to_app_rows(xt.app_id.0, &index, &extended_dims, &matrix); - let (_, missing) = commitments::verify_equality(&public_params, &commitments, rows, &index, &extended_dims, xt.app_id.0).unwrap(); + let rows = scalars_to_app_rows(xt.app_id.0, &index, extended_dims, &matrix); + let (_, missing) = commitments::verify_equality(&public_params, &commitments, rows.as_slice(), &index, extended_dims, xt.app_id.0).unwrap(); prop_assert!(missing.is_empty()); } } @@ -907,10 +857,10 @@ mod tests { let extended_dims = dims.try_into().unwrap(); let commitments = commitments::from_slice(&commitments).unwrap(); for xt in xts { - let mut rows = scalars_to_app_rows(xt.app_id.0, &index, &extended_dims, &matrix); + let mut rows = scalars_to_app_rows(xt.app_id.0, &index, extended_dims, &matrix); let app_row_index = rows.iter().position(Option::is_some).unwrap(); rows.remove(app_row_index); - let (_, missing) = commitments::verify_equality(&public_params, &commitments, &rows,&index,&extended_dims,xt.app_id.0).unwrap(); + let (_, missing) = commitments::verify_equality(&public_params, &commitments, &rows,&index, extended_dims,xt.app_id.0).unwrap(); prop_assert!(!missing.is_empty()); } } @@ -924,10 +874,7 @@ mod tests { let block_cols = BlockLengthColumns(256); let chunk_size = 32; let original_data = br#"test"#; - let hash: Seed = [ - 76, 41, 174, 145, 187, 12, 97, 32, 75, 111, 149, 209, 243, 195, 165, 10, 166, 172, 47, - 41, 218, 24, 212, 66, 62, 5, 187, 191, 129, 5, 105, 3, - ]; + let hash: Seed = hex!("4c29ae91bb0c61204b6f95d1f3c3a50aa6ac2f29da18d4423e05bbbf81056903"); let (_, commitments, dimensions, _) = par_build_commitments( block_rows, @@ -953,7 +900,7 @@ mod tests { #[test] // newapi wip - fn test_reconstruct_app_extrinsics_with_app_id() { + fn test_reconstruct_app_extrinsics_with_app_id() -> Result<(), Error> { let app_id_1_data = br#""This is mocked test data. It will be formatted as a matrix of BLS scalar cells and then individual columns get erasure coded to ensure redundancy."#; @@ -977,34 +924,28 @@ get erasure coded to ensure redundancy."#; let chunk_size = 32; - let (layout, data, dims) = flatten_and_pad_block( - BlockLengthRows(32), - BlockLengthColumns(4), - chunk_size, - &xts, - hash, - ) - .unwrap(); - let coded: Vec = - par_extend_data_matrix(dims, &data[..], &IgnoreMetrics {}).unwrap(); + let (layout, data, dims) = + flatten_and_pad_block(32.into(), 4.into(), chunk_size, &xts, hash)?; + let matrix = par_extend_data_matrix(dims, &data[..], &IgnoreMetrics {})?; - let cols_1 = sample_cells_from_matrix(&coded, &dims, Some(&[0, 1, 2, 3])); + let cols_1 = sample_cells_from_matrix(&matrix, Some(&[0, 1, 2, 3])); - let extended_dims = dims.try_into().unwrap(); + let extended_dims = dims.try_into()?; let index = app_data_index_try_from_layout(layout).unwrap(); - let res_1 = reconstruct_app_extrinsics(&index, &extended_dims, cols_1, 1).unwrap(); + let res_1 = reconstruct_app_extrinsics(&index, extended_dims, cols_1, 1).unwrap(); assert_eq!(res_1[0], app_id_1_data); - let cols_2 = sample_cells_from_matrix(&coded, &dims, Some(&[0, 2, 3])); + let cols_2 = sample_cells_from_matrix(&matrix, Some(&[0, 2, 3])); - let res_2 = reconstruct_app_extrinsics(&index, &extended_dims, cols_2, 2).unwrap(); + let res_2 = reconstruct_app_extrinsics(&index, extended_dims, cols_2, 2).unwrap(); assert_eq!(res_2[0], app_id_2_data); + Ok(()) } #[test] // newapi done - fn test_decode_app_extrinsics() { + fn test_decode_app_extrinsics() -> Result<(), Error> { let app_id_1_data = br#""This is mocked test data. It will be formatted as a matrix of BLS scalar cells and then individual columns get erasure coded to ensure redundancy."#; @@ -1029,23 +970,20 @@ get erasure coded to ensure redundancy."#; chunk_size, &xts, hash, - ) - .unwrap(); - let coded = par_extend_data_matrix(dims, &data[..], &IgnoreMetrics {}).unwrap(); - - let dimensions: Dimensions = dims.try_into().unwrap(); - let extended_matrix = coded - .chunks(dimensions.extended_rows() as usize) - .collect::>(); + )?; + let matrix = par_extend_data_matrix(dims, &data[..], &IgnoreMetrics {})?; + let dimensions: Dimensions = dims.try_into()?; let index = app_data_index_try_from_layout(layout).unwrap(); for xt in xts { - let positions = app_specific_cells(&index, &dimensions, xt.app_id.0).unwrap(); + let positions = app_specific_cells(&index, dimensions, xt.app_id.0).unwrap(); let cells = positions - .iter() - .map(|position| DataCell { - position: position.clone(), - data: extended_matrix[position.col as usize][position.row as usize].to_bytes(), + .into_iter() + .map(|position| { + let col: usize = position.col.into(); + let row = usize::try_from(position.row).unwrap(); + let data = matrix.get((row, col)).map(BlsScalar::to_bytes).unwrap(); + DataCell::new(position, data) }) .collect::>(); let data = &decode_app_extrinsics(&index, dimensions, cells, xt.app_id.0).unwrap()[0]; @@ -1056,11 +994,12 @@ get erasure coded to ensure redundancy."#; decode_app_extrinsics(&index, dimensions, vec![], 0), Err(ReconstructionError::MissingCell { .. }) )); + Ok(()) } #[test] // newapi done - fn test_extend_mock_data() { + fn test_extend_mock_data() -> Result<(), Error> { let orig_data = br#"This is mocked test data. It will be formatted as a matrix of BLS scalar cells and then individual columns get erasure coded to ensure redundancy. Let's see how this gets encoded and then reconstructed by sampling only some data."#; @@ -1074,27 +1013,25 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat chunk_size, &[AppExtrinsic::from(orig_data.to_vec())], hash, - ) - .unwrap(); + )?; - let coded: Vec = - par_extend_data_matrix(dims, &data[..], &IgnoreMetrics {}).unwrap(); + let matrix = par_extend_data_matrix(dims, &data[..], &IgnoreMetrics {})?; - let cols = sample_cells_from_matrix(&coded, &dims, None); + let cols = sample_cells_from_matrix(&matrix, None); - let extended_dims = dims.try_into().unwrap(); + let extended_dims = dims.try_into()?; let index = app_data_index_try_from_layout(layout).unwrap(); - let res = reconstruct_extrinsics(&index, &extended_dims, cols).unwrap(); + let res = reconstruct_extrinsics(&index, extended_dims, cols).unwrap(); let s = String::from_utf8_lossy(res[0].1[0].as_slice()); assert_eq!(res[0].1[0], orig_data); - eprintln!("Decoded: {}", s); + Ok(()) } #[test] // newapi done - fn test_multiple_extrinsics_for_same_app_id() { + fn test_multiple_extrinsics_for_same_app_id() -> Result<(), Error> { let xt1 = vec![5, 5]; let xt2 = vec![6, 6]; let xts = [ @@ -1116,20 +1053,19 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat chunk_size, &xts, hash, - ) - .unwrap(); + )?; - let coded: Vec = - par_extend_data_matrix(dims, &data[..], &IgnoreMetrics {}).unwrap(); + let matrix = par_extend_data_matrix(dims, &data[..], &IgnoreMetrics {})?; - let cols = sample_cells_from_matrix(&coded, &dims, None); + let cols = sample_cells_from_matrix(&matrix, None); let extended_dims = dims.try_into().unwrap(); let index = app_data_index_try_from_layout(layout).unwrap(); - let res = reconstruct_extrinsics(&index, &extended_dims, cols).unwrap(); + let res = reconstruct_extrinsics(&index, extended_dims, cols).unwrap(); assert_eq!(res[0].1[0], xt1); assert_eq!(res[0].1[1], xt2); + Ok(()) } #[test] @@ -1261,13 +1197,15 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat .collect::>(); assert_eq!(row.len(), len); - let mut result_bytes: Vec = vec![0u8; config::COMMITMENT_SIZE]; println!("Row: {:?}", row); - commit(&prover_key, row_eval_domain, row.clone(), &mut result_bytes).unwrap(); - println!("Commitment: {result_bytes:?}"); + let commitment = commit(&prover_key, row_eval_domain, row.clone()) + .map(|com| <[u8; config::COMMITMENT_SIZE]>::try_from(com.to_bytes()).unwrap()) + .unwrap(); + println!("Commitment: {commitment:?}"); // We artificially extend the matrix by doubling values, this is not proper erasure coding. - let ext_m = row.into_iter().flat_map(|e| vec![e, e]).collect::>(); + let ext_m = + DMatrix::from_row_iterator(1, row.len() * 2, row.into_iter().flat_map(|e| vec![e, e])); let rows: u16 = len.try_into().expect("rows length should be valid `u16`"); let metrics = IgnoreMetrics {}; @@ -1294,13 +1232,12 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat assert_eq!(proof.len(), 80); - let commitment = result_bytes.clone().try_into().unwrap(); let dims = Dimensions::new(1, 4).unwrap(); let cell = data::Cell { position: Position { row: 0, col }, content: proof.try_into().unwrap(), }; - let verification = proof::verify(&public_params, &dims, &commitment, &cell); + let verification = proof::verify(&public_params, dims, &commitment, &cell); assert!(verification.is_ok()); assert!(verification.unwrap()) } diff --git a/kate/src/gridgen/mod.rs b/kate/src/gridgen/mod.rs index 0ea42272..1c9b28a4 100644 --- a/kate/src/gridgen/mod.rs +++ b/kate/src/gridgen/mod.rs @@ -317,7 +317,7 @@ impl PolynomialGrid { srs: &M1NoPrecomp, cell: &Cell, eval_grid: &EvaluationGrid, - target_dims: &Dimensions, + target_dims: Dimensions, ) -> Result { let block = multiproof_block( cell.col.0 as usize, @@ -371,7 +371,7 @@ pub fn multiproof_block( x: usize, y: usize, grid: Dimensions, - target: &Dimensions, + target: Dimensions, ) -> Option { let (mp_rows, mp_cols): (usize, usize) = multiproof_dims(grid, target)?.into(); let (g_rows, g_cols): (usize, usize) = grid.into(); @@ -395,7 +395,7 @@ pub fn multiproof_block( /// Dimensions of the multiproof grid. These are guarenteed to cleanly divide `grid_dims`. /// `target_dims` must cleanly divide `grid_dims`. -pub fn multiproof_dims(grid: Dimensions, target: &Dimensions) -> Option { +pub fn multiproof_dims(grid: Dimensions, target: Dimensions) -> Option { let cols = min(grid.cols(), target.cols()); let rows = min(grid.rows(), target.rows()); if grid.cols().get() % cols != 0 || grid.rows().get() % rows != 0 { @@ -503,7 +503,7 @@ mod unit_tests { #[test_case(64, 0 => None)] #[test_case(0, 16 => None)] fn multiproof_max_grid_size(x: usize, y: usize) -> Option { - multiproof_block(x, y, GRID.clone(), &TARGET) + multiproof_block(x, y, GRID.clone(), TARGET) } #[test_case(256, 256, 64, 16 => Some((64, 16)))] @@ -521,7 +521,7 @@ mod unit_tests { let grid = unsafe { Dimensions::new_unchecked(grid_w, grid_h) }; let target = unsafe { Dimensions::new_unchecked(target_w, target_h) }; - multiproof_dims(grid, &target).map(Into::into) + multiproof_dims(grid, target).map(Into::into) } use proptest::prelude::*; diff --git a/kate/src/gridgen/tests/commitments.rs b/kate/src/gridgen/tests/commitments.rs index fe6ca34b..ee4e8e0e 100644 --- a/kate/src/gridgen/tests/commitments.rs +++ b/kate/src/gridgen/tests/commitments.rs @@ -103,7 +103,7 @@ proptest! { } // Need to provide the original dimensions here too let extended_dims = orig_dims.clone(); - let (_, missing) = verify_equality(&public_params, &commits, &app_rows, &index, &extended_dims, xt.app_id.0).unwrap(); + let (_, missing) = verify_equality(&public_params, &commits, &app_rows, &index, extended_dims, xt.app_id.0).unwrap(); prop_assert!(missing.is_empty()); } } @@ -132,7 +132,7 @@ proptest! { row_elems.remove(first_index); let extended_dims = orig_dims.transpose(); - let (_, missing) = verify_equality(&public_params, &commits, &row_elems,&index,&extended_dims,xt.app_id.0).unwrap(); + let (_, missing) = verify_equality(&public_params, &commits, &row_elems,&index,extended_dims,xt.app_id.0).unwrap(); prop_assert!(!missing.is_empty()); } } @@ -184,7 +184,7 @@ fn test_zero_deg_poly_commit(row_values: Vec) { }; let verification = kate_recovery::proof::verify( &kate_recovery::testnet::public_params(256), - &dims, + dims, &commitment, &cell, ); diff --git a/kate/src/gridgen/tests/formatting.rs b/kate/src/gridgen/tests/formatting.rs index 1bade603..bf16e19d 100644 --- a/kate/src/gridgen/tests/formatting.rs +++ b/kate/src/gridgen/tests/formatting.rs @@ -138,7 +138,7 @@ get erasure coded to ensure redundancy."#; let index = app_data_index_from_lookup(&grid.lookup); let bdims = grid.dims(); for xt in &xts { - let positions = app_specific_cells(&index, &bdims, xt.app_id.0).unwrap(); + let positions = app_specific_cells(&index, bdims, xt.app_id.0).unwrap(); let cells = positions .iter() .map(|pos| DataCell { @@ -179,7 +179,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat let bdims = grid.dims(); let index = app_data_index_from_lookup(&grid.lookup); - let res = reconstruct_extrinsics(&index, &bdims, cols).unwrap(); + let res = reconstruct_extrinsics(&index, bdims, cols).unwrap(); let s = String::from_utf8_lossy(res[0].1[0].as_slice()); assert_eq!(s, orig_data); diff --git a/kate/src/gridgen/tests/reconstruction.rs b/kate/src/gridgen/tests/reconstruction.rs index 553a8ee7..2cbbfa8a 100644 --- a/kate/src/gridgen/tests/reconstruction.rs +++ b/kate/src/gridgen/tests/reconstruction.rs @@ -41,7 +41,7 @@ fn test_multiple_extrinsics_for_same_app_id() { let index = app_data_index_from_lookup(&ev.lookup); let (rows, cols): (u16, u16) = ev.dims().into(); let bdims = Dimensions::new_from(rows, cols).unwrap(); - let res = reconstruct_extrinsics(&index, &bdims, cells).unwrap(); + let res = reconstruct_extrinsics(&index, bdims, cells).unwrap(); assert_eq!(res[0].1[0], xt1); assert_eq!(res[0].1[1], xt2); @@ -60,7 +60,7 @@ fn test_build_and_reconstruct(exts in super::app_extrinsics_strategy()) { let cells = sample_cells(&grid, None); let index = app_data_index_from_lookup(&grid.lookup); let bdims = Dimensions::new_from(rows, cols).unwrap(); - let reconstructed = reconstruct_extrinsics(&index, &bdims, cells).unwrap(); + let reconstructed = reconstruct_extrinsics(&index, bdims, cells).unwrap(); for (result, xt) in reconstructed.iter().zip(exts) { prop_assert_eq!(result.0, *xt.app_id); prop_assert_eq!(result.1[0].as_slice(), &xt.data); @@ -82,7 +82,7 @@ fn test_build_and_reconstruct(exts in super::app_extrinsics_strategy()) { content[48..].copy_from_slice(&grid.get(y, x).unwrap().to_bytes().unwrap()[..]); let dcell = DCell{position: Position { row: y as u32, col: x as u16 }, content }; - let verification = kate_recovery::proof::verify(&kate_recovery::testnet::public_params(256), &bdims, &commitments[y].to_bytes().unwrap(), &dcell); + let verification = kate_recovery::proof::verify(&kate_recovery::testnet::public_params(256), bdims, &commitments[y].to_bytes().unwrap(), &dcell); prop_assert!(verification.is_ok()); prop_assert!(verification.unwrap()); } @@ -122,11 +122,11 @@ get erasure coded to ensure redundancy."#; let index = app_data_index_from_lookup(&grid.lookup); let bdims = grid.dims(); - let res_1 = kate_recovery::com::reconstruct_app_extrinsics(&index, &bdims, cols_1, 1).unwrap(); + let res_1 = kate_recovery::com::reconstruct_app_extrinsics(&index, bdims, cols_1, 1).unwrap(); assert_eq!(res_1[0], app_id_1_data); let cols_2 = sample_cells(&grid, Some(vec![0, 2, 3])); - let res_2 = kate_recovery::com::reconstruct_app_extrinsics(&index, &bdims, cols_2, 2).unwrap(); + let res_2 = kate_recovery::com::reconstruct_app_extrinsics(&index, bdims, cols_2, 2).unwrap(); assert_eq!(res_2[0], app_id_2_data); } diff --git a/kate/src/lib.rs b/kate/src/lib.rs index 33ad26ff..1e00fef7 100644 --- a/kate/src/lib.rs +++ b/kate/src/lib.rs @@ -5,7 +5,6 @@ use core::{convert::TryInto, num::TryFromIntError}; use da_types::{BlockLengthColumns, BlockLengthRows}; #[cfg(feature = "std")] pub use dusk_plonk::{commitment_scheme::kzg10::PublicParameters, prelude::BlsScalar}; -#[cfg(feature = "std")] use kate_recovery::matrix::Dimensions; use sp_arithmetic::traits::SaturatedConversion; use static_assertions::const_assert_ne; diff --git a/kate/src/metrics.rs b/kate/src/metrics.rs index ea3e74a9..eefd2a0f 100644 --- a/kate/src/metrics.rs +++ b/kate/src/metrics.rs @@ -7,7 +7,7 @@ pub trait Metrics { fn preparation_block_time(&self, elapsed: Duration); fn commitment_build_time(&self, elapsed: Duration); fn proof_build_time(&self, elapsed: Duration, cells: u32); - fn block_dims_and_size(&self, block_dims: &BlockDimensions, block_len: u32); + fn block_dims_and_size(&self, block_dims: BlockDimensions, block_len: u32); } /// Adapter to ignore any measurements. @@ -20,5 +20,5 @@ impl Metrics for IgnoreMetrics { fn preparation_block_time(&self, _: Duration) {} fn commitment_build_time(&self, _: Duration) {} fn proof_build_time(&self, _: Duration, _: u32) {} - fn block_dims_and_size(&self, _: &BlockDimensions, _: u32) {} + fn block_dims_and_size(&self, _: BlockDimensions, _: u32) {} } From d0074bf98505010868b1aa95b5fd01c615efc617 Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Wed, 5 Jul 2023 14:07:49 +0200 Subject: [PATCH 64/87] Refactor `avail-core` add `runtime` feature --- Cargo.lock | 1064 ++++------------- Cargo.toml | 18 +- {primitives/avail => core}/Cargo.toml | 43 +- core/src/app_extrinsic.rs | 92 ++ .../src/asdr.rs | 17 +- core/src/bench_randomness.rs | 20 + core/src/constants.rs | 28 + {primitives/types => core}/src/data_lookup.rs | 18 +- {primitives/avail => core}/src/data_proof.rs | 4 +- .../src/header/extension/mod.rs | 7 +- .../avail => core}/src/header/extension/v1.rs | 2 +- .../avail => core}/src/header/extension/v2.rs | 2 +- .../src/header/extension/v_test.rs | 0 {primitives/avail => core}/src/header/mod.rs | 15 +- .../avail => core}/src/kate_commitment.rs | 0 core/src/lib.rs | 150 +++ .../avail => core}/src/opaque_extrinsic.rs | 4 +- core/src/sha2.rs | 48 + core/src/traits.rs | 31 + core/src/traits/extended_header.rs | 22 + .../src => core/src/traits}/get_app_id.rs | 4 +- deny.toml | 290 +++++ kate/Cargo.toml | 17 +- kate/examples/multiproof_verification.rs | 7 +- kate/recovery/Cargo.toml | 15 +- kate/recovery/src/com.rs | 31 +- kate/recovery/src/commitments.rs | 14 +- kate/recovery/src/proof.rs | 3 + kate/recovery/src/testnet.rs | 4 +- kate/src/com.rs | 277 ++--- kate/src/gridgen/mod.rs | 26 +- kate/src/gridgen/tests/commitments.rs | 13 +- kate/src/gridgen/tests/formatting.rs | 35 +- kate/src/gridgen/tests/mod.rs | 4 +- kate/src/gridgen/tests/reconstruction.rs | 31 +- kate/src/lib.rs | 48 +- primitives/avail/src/asdr.rs | 36 - primitives/avail/src/lib.rs | 81 -- primitives/avail/src/sha2.rs | 40 - primitives/avail/src/traits.rs | 76 -- primitives/nomad/signature/src/signature.rs | 4 +- primitives/types/Cargo.toml | 32 - primitives/types/src/lib.rs | 174 --- 43 files changed, 1248 insertions(+), 1599 deletions(-) rename {primitives/avail => core}/Cargo.toml (52%) create mode 100644 core/src/app_extrinsic.rs rename primitives/avail/src/asdr/app_unchecked_extrinsic.rs => core/src/asdr.rs (98%) create mode 100644 core/src/bench_randomness.rs create mode 100644 core/src/constants.rs rename {primitives/types => core}/src/data_lookup.rs (91%) rename {primitives/avail => core}/src/data_proof.rs (99%) rename {primitives/avail => core}/src/header/extension/mod.rs (95%) rename {primitives/avail => core}/src/header/extension/v1.rs (92%) rename {primitives/avail => core}/src/header/extension/v2.rs (93%) rename {primitives/avail => core}/src/header/extension/v_test.rs (100%) rename {primitives/avail => core}/src/header/mod.rs (98%) rename {primitives/avail => core}/src/kate_commitment.rs (100%) create mode 100644 core/src/lib.rs rename {primitives/avail => core}/src/opaque_extrinsic.rs (95%) create mode 100644 core/src/sha2.rs create mode 100644 core/src/traits.rs create mode 100644 core/src/traits/extended_header.rs rename {primitives/types/src => core/src/traits}/get_app_id.rs (93%) create mode 100644 deny.toml delete mode 100644 primitives/avail/src/asdr.rs delete mode 100644 primitives/avail/src/lib.rs delete mode 100644 primitives/avail/src/sha2.rs delete mode 100644 primitives/avail/src/traits.rs delete mode 100644 primitives/types/Cargo.toml delete mode 100644 primitives/types/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index a6ff2b80..92fb1764 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -23,11 +23,11 @@ dependencies = [ [[package]] name = "addr2line" -version = "0.19.0" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a76fd60b23679b7d19bd066031410fb7e458ccc5e958eb5c325888ce4baedc97" +checksum = "f4fa78e18c64fce05e902adecd7a5eed15a5e0a3439f7b0e169f0252214865e3" dependencies = [ - "gimli 0.27.2", + "gimli 0.27.3", ] [[package]] @@ -36,17 +36,6 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" -[[package]] -name = "aes" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "433cfd6710c9986c576a25ca913c39d66a6474107b406f34f91d4a8923395241" -dependencies = [ - "cfg-if", - "cipher", - "cpufeatures", -] - [[package]] name = "ahash" version = "0.4.7" @@ -59,7 +48,7 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" dependencies = [ - "getrandom 0.2.9", + "getrandom 0.2.10", "once_cell", "version_check", ] @@ -77,13 +66,19 @@ dependencies = [ [[package]] name = "aho-corasick" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67fc08ce920c31afb70f013dcce1bfc3a3195de6a228474e45e1f145b36f8d04" +checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41" dependencies = [ "memchr", ] +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + [[package]] name = "android_system_properties" version = "0.1.5" @@ -267,31 +262,19 @@ checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" [[package]] name = "arrayvec" -version = "0.7.2" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6" +checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" [[package]] name = "async-trait" -version = "0.1.68" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9ccdd8f2a161be9bd5c023df56f1b2a0bd1d83872ae53b71a84a12c9bf6e842" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.16", -] - -[[package]] -name = "auto_impl" -version = "1.1.0" +version = "0.1.70" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fee3da8ef1276b0bee5dd1c7258010d8fffd31801447323115a25560e1327b89" +checksum = "79fa67157abdfd688a259b6648808757db9347af834624f27ec646da976aee5d" dependencies = [ - "proc-macro-error", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.23", ] [[package]] @@ -300,18 +283,43 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" +[[package]] +name = "avail-core" +version = "0.5.0" +dependencies = [ + "beefy-merkle-tree", + "derive_more", + "frame-support", + "hash256-std-hasher", + "hex", + "hex-literal", + "log", + "parity-scale-codec", + "scale-info", + "serde", + "serde_json", + "sp-arithmetic", + "sp-core", + "sp-runtime", + "sp-runtime-interface", + "sp-std", + "sp-trie", + "test-case", + "thiserror-no-std", +] + [[package]] name = "backtrace" -version = "0.3.67" +version = "0.3.68" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "233d376d6d185f2a3093e58f283f60f880315b6c60075b01f36b3b85154564ca" +checksum = "4319208da049c43661739c5fade2ba182f09d1dc2299b32298d3a31692b17e12" dependencies = [ - "addr2line 0.19.0", + "addr2line 0.20.0", "cc", "cfg-if", "libc", "miniz_oxide", - "object 0.30.3", + "object 0.31.1", "rustc-demangle", ] @@ -321,34 +329,12 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce" -[[package]] -name = "base58" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5024ee8015f02155eee35c711107ddd9a9bf3cb689cf2a9089c97e79b6e1ae83" - [[package]] name = "base58" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6107fe1be6682a68940da878d9e9f5e90ca5745b3dec9fd1bb393c8777d4f581" -[[package]] -name = "base58check" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ee2fe4c9a0c84515f136aaae2466744a721af6d63339c18689d9e995d74d99b" -dependencies = [ - "base58 0.1.0", - "sha2 0.8.2", -] - -[[package]] -name = "base64" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff" - [[package]] name = "base64" version = "0.13.1" @@ -361,12 +347,6 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" -[[package]] -name = "bech32" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dabbe35f96fb9507f7330793dc490461b2962659ac5d427181e451a623751d1" - [[package]] name = "beefy-merkle-tree" version = "4.0.0-dev" @@ -408,14 +388,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] -name = "bitvec" -version = "0.17.4" +name = "bitflags" +version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41262f11d771fd4a61aa3ce019fca363b4b6c282fca9da2a31186d3965a47a5c" -dependencies = [ - "either", - "radium 0.3.0", -] +checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42" [[package]] name = "bitvec" @@ -424,7 +400,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" dependencies = [ "funty", - "radium 0.7.0", + "radium", "tap", "wyz", ] @@ -488,12 +464,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "bs58" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "771fe0050b883fcc3ea2359b1a96bcfbc090b7116eae7c3c512c7a083fdf23d3" - [[package]] name = "bumpalo" version = "3.13.0" @@ -529,9 +499,6 @@ name = "bytes" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" -dependencies = [ - "serde", -] [[package]] name = "cast" @@ -562,12 +529,12 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.24" +version = "0.4.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e3c5919066adf22df73762e50cffcde3a758f2a848b113b586d1f86728b673b" +checksum = "ec837a71355b28f6556dbd569b37b3f363091c0bd4b2e735674521b4c5fd9bc5" dependencies = [ + "android-tzdata", "iana-time-zone", - "num-integer", "num-traits", "winapi", ] @@ -599,16 +566,6 @@ dependencies = [ "half", ] -[[package]] -name = "cipher" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" -dependencies = [ - "crypto-common", - "inout", -] - [[package]] name = "ckb-merkle-mountain-range" version = "0.5.2" @@ -620,21 +577,20 @@ dependencies = [ [[package]] name = "clap" -version = "4.3.8" +version = "4.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9394150f5b4273a1763355bd1c2ec54cc5a2593f790587bcd6b2c947cfa9211" +checksum = "384e169cc618c613d5e3ca6404dda77a8685a63e08660dcc64abaf7da7cb0c7a" dependencies = [ "clap_builder", ] [[package]] name = "clap_builder" -version = "4.3.8" +version = "4.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a78fbdd3cc2914ddf37ba444114bc7765bbdcb55ec9cbe6fa054f0137400717" +checksum = "ef137bbe35aab78bdb468ccfba75a5f4d8321ae011d34063770780545176af2d" dependencies = [ "anstyle", - "bitflags", "clap_lex", ] @@ -644,68 +600,11 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b" -[[package]] -name = "coins-bip32" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "634c509653de24b439672164bbf56f5f582a2ab0e313d3b0f6af0b7345cf2560" -dependencies = [ - "bincode", - "bs58", - "coins-core", - "digest 0.10.7", - "getrandom 0.2.9", - "hmac 0.12.1", - "k256", - "lazy_static", - "serde", - "sha2 0.10.6", - "thiserror", -] - -[[package]] -name = "coins-bip39" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a11892bcac83b4c6e95ab84b5b06c76d9d70ad73548dd07418269c5c7977171" -dependencies = [ - "bitvec 0.17.4", - "coins-bip32", - "getrandom 0.2.9", - "hex", - "hmac 0.12.1", - "pbkdf2 0.11.0", - "rand 0.8.5", - "sha2 0.10.6", - "thiserror", -] - -[[package]] -name = "coins-core" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c94090a6663f224feae66ab01e41a2555a8296ee07b5f20dab8888bdefc9f617" -dependencies = [ - "base58check", - "base64 0.12.3", - "bech32", - "blake2", - "digest 0.10.7", - "generic-array 0.14.7", - "hex", - "ripemd", - "serde", - "serde_derive", - "sha2 0.10.6", - "sha3", - "thiserror", -] - [[package]] name = "const-oid" -version = "0.9.2" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "520fbf3c07483f94e3e3ca9d0cfd913d7718ef2483d2cfd91c0d9e91474ab913" +checksum = "6340df57935414636969091153f35f68d9f00bbc8fb4a9c6054706c213e6c6bc" [[package]] name = "convert_case" @@ -713,15 +612,6 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" -[[package]] -name = "convert_case" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" -dependencies = [ - "unicode-segmentation", -] - [[package]] name = "core-foundation-sys" version = "0.8.4" @@ -739,9 +629,9 @@ dependencies = [ [[package]] name = "cpufeatures" -version = "0.2.7" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e4c1eaa2012c47becbbad2ab175484c2a84d1185b566fb2cc5b8707343dfe58" +checksum = "03e69e28e9f7f77debdedbaafa2866e1de9ba56df55a8bd7cfc724c25a09987c" dependencies = [ "libc", ] @@ -823,22 +713,22 @@ dependencies = [ [[package]] name = "crossbeam-epoch" -version = "0.9.14" +version = "0.9.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46bd5f3f85273295a9d14aedfb86f6aadbff6d8f5295c4a9edb08e819dcf5695" +checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" dependencies = [ "autocfg", "cfg-if", "crossbeam-utils", - "memoffset 0.8.0", + "memoffset 0.9.0", "scopeguard", ] [[package]] name = "crossbeam-utils" -version = "0.8.15" +version = "0.8.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c063cd8cc95f5c377ed0d4b49a4b21f632396ff690e8470c29b3359b346984b" +checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" dependencies = [ "cfg-if", ] @@ -891,15 +781,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "ctr" -version = "0.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835" -dependencies = [ - "cipher", -] - [[package]] name = "curve25519-dalek" version = "2.1.3" @@ -926,44 +807,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "da-primitives" -version = "0.4.6" -dependencies = [ - "beefy-merkle-tree", - "da-types", - "frame-support", - "hash256-std-hasher", - "hex-literal", - "log", - "parity-scale-codec", - "scale-info", - "serde", - "serde_json", - "sp-core", - "sp-runtime", - "sp-runtime-interface", - "sp-std 5.0.0", - "sp-trie", - "test-case", - "thiserror-no-std", -] - -[[package]] -name = "da-types" -version = "0.4.4" -dependencies = [ - "derive_more", - "hex", - "num-traits", - "parity-scale-codec", - "scale-info", - "serde", - "sp-core", - "test-case", - "thiserror-no-std", -] - [[package]] name = "der" version = "0.6.1" @@ -1002,7 +845,7 @@ version = "0.99.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" dependencies = [ - "convert_case 0.4.0", + "convert_case", "proc-macro2", "quote", "rustc_version", @@ -1187,7 +1030,6 @@ dependencies = [ "ff", "generic-array 0.14.7", "group", - "pkcs8", "rand_core 0.6.4", "sec1", "subtle", @@ -1200,6 +1042,12 @@ version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e48c92028aaa870e83d51c64e5d4e0b6981b360c522198c23959f219a4e1b15b" +[[package]] +name = "equivalent" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88bffebc5d80432c9b140ee17875ff173a8ab62faad5b257da912bd2f6c1c0a1" + [[package]] name = "errno" version = "0.2.8" @@ -1232,123 +1080,6 @@ dependencies = [ "libc", ] -[[package]] -name = "eth-keystore" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fda3bf123be441da5260717e0661c25a2fd9cb2b2c1d20bf2e05580047158ab" -dependencies = [ - "aes", - "ctr", - "digest 0.10.7", - "hex", - "hmac 0.12.1", - "pbkdf2 0.11.0", - "rand 0.8.5", - "scrypt", - "serde", - "serde_json", - "sha2 0.10.6", - "sha3", - "thiserror", - "uuid", -] - -[[package]] -name = "ethabi" -version = "18.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7413c5f74cc903ea37386a8965a936cbeb334bd270862fdece542c1b2dcbc898" -dependencies = [ - "ethereum-types", - "hex", - "once_cell", - "regex", - "serde", - "serde_json", - "sha3", - "thiserror", - "uint", -] - -[[package]] -name = "ethbloom" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c22d4b5885b6aa2fe5e8b9329fb8d232bf739e434e6b87347c63bdd00c120f60" -dependencies = [ - "crunchy", - "fixed-hash", - "impl-codec", - "impl-rlp", - "impl-serde", - "scale-info", - "tiny-keccak", -] - -[[package]] -name = "ethereum-types" -version = "0.14.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02d215cbf040552efcbe99a38372fe80ab9d00268e20012b79fcd0f073edd8ee" -dependencies = [ - "ethbloom", - "fixed-hash", - "impl-codec", - "impl-rlp", - "impl-serde", - "primitive-types", - "scale-info", - "uint", -] - -[[package]] -name = "ethers-core" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ade3e9c97727343984e1ceada4fdab11142d2ee3472d2c67027d56b1251d4f15" -dependencies = [ - "arrayvec 0.7.2", - "bytes", - "chrono", - "convert_case 0.6.0", - "elliptic-curve", - "ethabi", - "generic-array 0.14.7", - "hex", - "k256", - "open-fastrlp", - "proc-macro2", - "rand 0.8.5", - "rlp", - "rlp-derive", - "serde", - "serde_json", - "strum", - "syn 1.0.109", - "thiserror", - "tiny-keccak", - "unicode-xid", -] - -[[package]] -name = "ethers-signers" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f41ced186867f64773db2e55ffdd92959e094072a1d09a5e5e831d443204f98" -dependencies = [ - "async-trait", - "coins-bip32", - "coins-bip39", - "elliptic-curve", - "eth-keystore", - "ethers-core", - "hex", - "rand 0.8.5", - "sha2 0.10.6", - "thiserror", -] - [[package]] name = "fake-simd" version = "0.1.2" @@ -1415,7 +1146,7 @@ name = "frame-support" version = "4.0.0-dev" source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.37#6fa7fe1326ecaab9921c2c3888530ad679cfbb87" dependencies = [ - "bitflags", + "bitflags 1.3.2", "frame-metadata", "frame-support-procedural", "impl-trait-for-tuples", @@ -1436,7 +1167,7 @@ dependencies = [ "sp-runtime", "sp-staking", "sp-state-machine", - "sp-std 5.0.0", + "sp-std", "sp-tracing", "sp-weights", "tt-call", @@ -1541,7 +1272,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ "proc-macro2", "quote", - "syn 2.0.16", + "syn 2.0.23", ] [[package]] @@ -1606,15 +1337,13 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.9" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c85e1d9ab2eadba7e5040d4e09cbd6d072b76a557ad64e797c2cb9d4da21d7e4" +checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" dependencies = [ "cfg-if", - "js-sys", "libc", "wasi 0.11.0+wasi-snapshot-preview1", - "wasm-bindgen", ] [[package]] @@ -1629,9 +1358,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.27.2" +version = "0.27.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0a93d233ebf96623465aad4046a8d3aa4da22d4f4beba5388838c8a434bbb4" +checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e" [[package]] name = "glob" @@ -1699,25 +1428,16 @@ dependencies = [ ] [[package]] -name = "heck" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" - -[[package]] -name = "hermit-abi" -version = "0.2.6" +name = "hashbrown" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7" -dependencies = [ - "libc", -] +checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" [[package]] name = "hermit-abi" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286" +checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" [[package]] name = "hex" @@ -1776,9 +1496,9 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.56" +version = "0.1.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0722cd7114b7de04316e7ea5456a0bbb20e4adb46fd27a3697adb812cff0f37c" +checksum = "2fad5b825842d2b38bd206f3e81d6957625fd7f0a361e345c30e01a0ae2dd613" dependencies = [ "android_system_properties", "core-foundation-sys", @@ -1806,15 +1526,6 @@ dependencies = [ "parity-scale-codec", ] -[[package]] -name = "impl-rlp" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f28220f89297a075ddc7245cd538076ee98b01f2a9c23a53a4f1105d5a322808" -dependencies = [ - "rlp", -] - [[package]] name = "impl-serde" version = "0.4.0" @@ -1847,12 +1558,13 @@ dependencies = [ ] [[package]] -name = "inout" -version = "0.1.3" +name = "indexmap" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" +checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d" dependencies = [ - "generic-array 0.14.7", + "equivalent", + "hashbrown 0.14.0", ] [[package]] @@ -1885,20 +1597,19 @@ version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" dependencies = [ - "hermit-abi 0.3.1", + "hermit-abi", "libc", "windows-sys 0.48.0", ] [[package]] name = "is-terminal" -version = "0.4.7" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adcf93614601c8129ddf72e2d5633df827ba6551541c6d8c59520a371475be1f" +checksum = "24fddda5af7e54bf7da53067d6e802dbcc381d0a8eef629df528e3ebf68755cb" dependencies = [ - "hermit-abi 0.3.1", - "io-lifetimes 1.0.11", - "rustix 0.37.20", + "hermit-abi", + "rustix 0.38.2", "windows-sys 0.48.0", ] @@ -1922,9 +1633,9 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.6" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6" +checksum = "62b02a5381cc465bd3041d84623d0fa3b66738b52b8e2fc3bab8ad63ab032f4a" [[package]] name = "js-sys" @@ -1944,17 +1655,15 @@ dependencies = [ "cfg-if", "ecdsa", "elliptic-curve", - "sha2 0.10.6", - "sha3", + "sha2 0.10.7", ] [[package]] name = "kate" version = "0.7.1" dependencies = [ + "avail-core", "criterion", - "da-primitives", - "da-types", "derive_more", "dusk-bytes", "dusk-plonk", @@ -1983,7 +1692,7 @@ dependencies = [ name = "kate-recovery" version = "0.8.1" dependencies = [ - "da-types", + "avail-core", "derive_more", "dusk-bytes", "dusk-plonk", @@ -2016,9 +1725,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.144" +version = "0.2.147" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b00cc1c228a6782d0f076e7b232802e0c5689d41bb5df366f2a6b6621cfdfe1" +checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" [[package]] name = "libm" @@ -2033,7 +1742,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95b09eff1b35ed3b33b877ced3a691fc7a481919c7e29c53c906226fcf55e2a1" dependencies = [ "arrayref", - "base64 0.13.1", + "base64", "digest 0.9.0", "hmac-drbg", "libsecp256k1-core", @@ -2086,11 +1795,17 @@ version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" +[[package]] +name = "linux-raw-sys" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09fc20d2ca12cb9f044c93e3bd6d32d523e6e2ec3db4f7b2939cd99026ecd3f0" + [[package]] name = "lock_api" -version = "0.4.9" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" +checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16" dependencies = [ "autocfg", "scopeguard", @@ -2098,12 +1813,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.17" +version = "0.4.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" -dependencies = [ - "cfg-if", -] +checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4" [[package]] name = "lru" @@ -2159,9 +1871,9 @@ dependencies = [ [[package]] name = "memoffset" -version = "0.8.0" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1" +checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" dependencies = [ "autocfg", ] @@ -2208,9 +1920,9 @@ dependencies = [ [[package]] name = "miniz_oxide" -version = "0.6.2" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa" +checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" dependencies = [ "adler", ] @@ -2236,90 +1948,6 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2bf50223579dc7cdcfb3bfcacf7069ff68243f8c363f62ffa99cf000a6b9c451" -[[package]] -name = "nomad-base" -version = "0.1.3" -dependencies = [ - "ethers-signers", - "frame-support", - "nomad-core", - "nomad-signature", - "once_cell", - "parity-scale-codec", - "primitive-types", - "scale-info", - "serde", - "sp-core", - "sp-io", - "sp-std 4.0.0", -] - -[[package]] -name = "nomad-core" -version = "0.1.3" -dependencies = [ - "async-trait", - "ethers-core", - "ethers-signers", - "frame-support", - "nomad-signature", - "parity-scale-codec", - "primitive-types", - "scale-info", - "serde", - "sp-core", - "sp-io", - "sp-runtime", - "sp-std 4.0.0", - "tiny-keccak", -] - -[[package]] -name = "nomad-merkle" -version = "0.1.1" -dependencies = [ - "ethers-core", - "frame-support", - "hex-literal", - "nomad-core", - "parity-scale-codec", - "primitive-types", - "scale-info", - "serde", - "serde_json", - "sp-core", - "sp-io", - "sp-runtime", - "sp-std 4.0.0", - "static_assertions", - "thiserror-no-std", - "tiny-keccak", -] - -[[package]] -name = "nomad-signature" -version = "0.1.1" -dependencies = [ - "byte-slice-cast", - "elliptic-curve", - "ethers-core", - "frame-support", - "generic-array 0.14.7", - "hex", - "k256", - "parity-scale-codec", - "primitive-types", - "rlp", - "rlp-derive", - "scale-info", - "serde", - "sp-core", - "sp-io", - "sp-std 4.0.0", - "thiserror-no-std", - "tiny-keccak", -] - [[package]] name = "num-bigint" version = "0.4.3" @@ -2346,7 +1974,7 @@ version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a652d9771a63711fd3c3deb670acfbe5c30a4072e664d7a3bf5a9e1056ac72c3" dependencies = [ - "arrayvec 0.7.2", + "arrayvec 0.7.4", "itoa", ] @@ -2384,11 +2012,11 @@ dependencies = [ [[package]] name = "num_cpus" -version = "1.15.0" +version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "hermit-abi 0.2.6", + "hermit-abi", "libc", ] @@ -2400,24 +2028,24 @@ checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53" dependencies = [ "crc32fast", "hashbrown 0.12.3", - "indexmap", + "indexmap 1.9.3", "memchr", ] [[package]] name = "object" -version = "0.30.3" +version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea86265d3d3dcb6a27fc51bd29a4bf387fae9d2986b823079d4986af253eb439" +checksum = "8bda667d9f2b5051b8833f59f3bf748b28ef54f850f4fcb389a252aa383866d1" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.17.1" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3" +checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" [[package]] name = "oorandom" @@ -2437,39 +2065,14 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" -[[package]] -name = "open-fastrlp" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "786393f80485445794f6043fd3138854dd109cc6c4bd1a6383db304c9ce9b9ce" -dependencies = [ - "arrayvec 0.7.2", - "auto_impl", - "bytes", - "ethereum-types", - "open-fastrlp-derive", -] - -[[package]] -name = "open-fastrlp-derive" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "003b2be5c6c53c1cfeb0a238b8a1c3915cd410feb684457a36c10038f764bb1c" -dependencies = [ - "bytes", - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "parity-scale-codec" -version = "3.5.0" +version = "3.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ddb756ca205bd108aee3c62c6d3c994e1df84a59b9d6d4a5ea42ee1fd5a9a28" +checksum = "756d439303e94fae44f288ba881ad29670c65b0c4b0e05674ca81061bb65f2c5" dependencies = [ - "arrayvec 0.7.2", - "bitvec 1.0.1", + "arrayvec 0.7.4", + "bitvec", "byte-slice-cast", "bytes", "impl-trait-for-tuples", @@ -2479,9 +2082,9 @@ dependencies = [ [[package]] name = "parity-scale-codec-derive" -version = "3.1.4" +version = "3.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b26a931f824dd4eca30b3e43bb4f31cd5f0d3a403c5f5ff27106b805bfde7b" +checksum = "9d884d78fcf214d70b1e239fcd1c6e5e95aa3be1881918da2e488cc946c7a476" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -2507,33 +2110,22 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.7" +version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9069cbb9f99e3a5083476ccb29ceb1de18b9118cafa53e90c9551235de2b9521" +checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.2.16", + "redox_syscall", "smallvec", - "windows-sys 0.45.0", -] - -[[package]] -name = "password-hash" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7676374caaee8a325c9e7a2ae557f216c5563a171d6997b0ef8a65af35147700" -dependencies = [ - "base64ct", - "rand_core 0.6.4", - "subtle", + "windows-targets", ] [[package]] name = "paste" -version = "1.0.12" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f746c4065a8fa3fe23974dd82f15431cc8d40779821001404d10d2e79ca7d79" +checksum = "b4b27ab7be369122c218afc2079489cdcb4b517c0a3fc386ff11e1fedfcc2b35" [[package]] name = "pbkdf2" @@ -2551,16 +2143,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "83a0692ec44e4cf1ef28ca317f14f8f07da2d95ec3fa01f86e4467b725e60917" dependencies = [ "digest 0.10.7", - "hmac 0.12.1", - "password-hash", - "sha2 0.10.6", ] [[package]] name = "pin-project-lite" -version = "0.2.9" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" +checksum = "4c40d25201921e5ff0c862a505c6557ea88568a4e3ace775ab55e93f2f4f9d57" [[package]] name = "pin-utils" @@ -2635,7 +2224,6 @@ checksum = "9f3486ccba82358b11a77516035647c34ba167dfa53312630de83b12bd4f3d66" dependencies = [ "fixed-hash", "impl-codec", - "impl-rlp", "impl-serde", "scale-info", "uint", @@ -2651,35 +2239,11 @@ dependencies = [ "toml_edit", ] -[[package]] -name = "proc-macro-error" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" -dependencies = [ - "proc-macro-error-attr", - "proc-macro2", - "quote", - "syn 1.0.109", - "version_check", -] - -[[package]] -name = "proc-macro-error-attr" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" -dependencies = [ - "proc-macro2", - "quote", - "version_check", -] - [[package]] name = "proc-macro2" -version = "1.0.58" +version = "1.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa1fb82fc0c281dd9671101b66b771ebbe1eaf967b96ac8740dcba4b70005ca8" +checksum = "7b368fba921b0dce7e60f5e04ec15e565b3303972b42bcfde1d0713b881959eb" dependencies = [ "unicode-ident", ] @@ -2691,7 +2255,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4e35c06b98bf36aba164cc17cb25f7e232f5c4aeea73baa14b8a9f0d92dbfa65" dependencies = [ "bit-set", - "bitflags", + "bitflags 1.3.2", "byteorder", "lazy_static", "num-traits", @@ -2721,19 +2285,13 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" [[package]] name = "quote" -version = "1.0.27" +version = "1.0.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f4f29d145265ec1c483c7c654450edde0bfe043d3938d6972630663356d9500" +checksum = "573015e8ab27661678357f27dc26460738fd2b6c86e46f386fde94cb5d913105" dependencies = [ "proc-macro2", ] -[[package]] -name = "radium" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "def50a86306165861203e7f84ecffbbdfdea79f0e51039b33de1e952358c47ac" - [[package]] name = "radium" version = "0.7.0" @@ -2799,7 +2357,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.9", + "getrandom 0.2.10", ] [[package]] @@ -2848,49 +2406,40 @@ dependencies = [ "num_cpus", ] -[[package]] -name = "redox_syscall" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" -dependencies = [ - "bitflags", -] - [[package]] name = "redox_syscall" version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" dependencies = [ - "bitflags", + "bitflags 1.3.2", ] [[package]] name = "ref-cast" -version = "1.0.16" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f43faa91b1c8b36841ee70e97188a869d37ae21759da6846d4be66de5bf7b12c" +checksum = "1641819477c319ef452a075ac34a4be92eb9ba09f6841f62d594d50fdcf0bf6b" dependencies = [ "ref-cast-impl", ] [[package]] name = "ref-cast-impl" -version = "1.0.16" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d2275aab483050ab2a7364c1a46604865ee7d6906684e08db0f090acf74f9e7" +checksum = "68bf53dad9b6086826722cdc99140793afd9f62faa14a1ad07eb4f955e7a7216" dependencies = [ "proc-macro2", "quote", - "syn 2.0.16", + "syn 2.0.23", ] [[package]] name = "regex" -version = "1.8.2" +version = "1.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1a59b5d8e97dee33696bf13c5ba8ab85341c002922fba050069326b9c498974" +checksum = "d0ab3ca65655bb1e41f2a8c8cd662eb4fb035e67c3f78da1d61dffe89d07300f" dependencies = [ "aho-corasick", "memchr", @@ -2929,36 +2478,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "ripemd" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd124222d17ad93a644ed9d011a40f4fb64aa54275c08cc216524a9ea82fb09f" -dependencies = [ - "digest 0.10.7", -] - -[[package]] -name = "rlp" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb919243f34364b6bd2fc10ef797edbfa75f33c252e7998527479c6d6b47e1ec" -dependencies = [ - "bytes", - "rustc-hex", -] - -[[package]] -name = "rlp-derive" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e33d7b2abe0c340d8797fe2907d3f20d3b5ea5908683618bfe80df7f621f672a" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "rustc-demangle" version = "0.1.23" @@ -2992,7 +2511,7 @@ version = "0.35.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "727a1a6d65f786ec22df8a81ca3121107f235970dc1705ed681d3e6e8b9cd5f9" dependencies = [ - "bitflags", + "bitflags 1.3.2", "errno 0.2.8", "io-lifetimes 0.7.5", "libc", @@ -3002,11 +2521,11 @@ dependencies = [ [[package]] name = "rustix" -version = "0.37.20" +version = "0.37.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b96e891d04aa506a6d1f318d2771bcb1c7dfda84e126660ace067c9b474bb2c0" +checksum = "8818fa822adcc98b18fedbb3632a6a33213c070556b5aa7c4c8cc21cff565c4c" dependencies = [ - "bitflags", + "bitflags 1.3.2", "errno 0.3.1", "io-lifetimes 1.0.11", "libc", @@ -3015,10 +2534,17 @@ dependencies = [ ] [[package]] -name = "rustversion" -version = "1.0.12" +name = "rustix" +version = "0.38.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f3208ce4d8448b3f3e7d168a73f5e0c43a61e32930de3bceeccedb388b6bf06" +checksum = "aabcb0461ebd01d6b79945797c27f8529082226cb630a9865a71870ff63532a4" +dependencies = [ + "bitflags 2.3.3", + "errno 0.3.1", + "libc", + "linux-raw-sys 0.4.3", + "windows-sys 0.48.0", +] [[package]] name = "rusty-fork" @@ -3034,9 +2560,9 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.13" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041" +checksum = "fe232bdf6be8c8de797b22184ee71118d63780ea42ac85b61d1baa6d3b782ae9" [[package]] name = "safe_arch" @@ -3047,15 +2573,6 @@ dependencies = [ "bytemuck", ] -[[package]] -name = "salsa20" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97a22f5af31f73a954c10289c93e8a50cc23d971e80ee446f1f6f7137a088213" -dependencies = [ - "cipher", -] - [[package]] name = "same-file" version = "1.0.6" @@ -3067,11 +2584,11 @@ dependencies = [ [[package]] name = "scale-info" -version = "2.7.0" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b569c32c806ec3abdf3b5869fb8bf1e0d275a7c1c9b0b05603d9464632649edf" +checksum = "35c0a159d0c45c12b20c5a844feb1fe4bea86e28f17b92a5f0c42193634d3782" dependencies = [ - "bitvec 1.0.1", + "bitvec", "cfg-if", "derive_more", "parity-scale-codec", @@ -3081,9 +2598,9 @@ dependencies = [ [[package]] name = "scale-info-derive" -version = "2.6.0" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53012eae69e5aa5c14671942a5dd47de59d4cdcff8532a6dd0e081faf1119482" +checksum = "912e55f6d20e0e80d63733872b40e1227c0bce1e1ab81ba67d696339bfd7fd29" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -3115,18 +2632,6 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" -[[package]] -name = "scrypt" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f9e24d2b632954ded8ab2ef9fea0a0c769ea56ea98bddbafbad22caeeadf45d" -dependencies = [ - "hmac 0.12.1", - "pbkdf2 0.11.0", - "salsa20", - "sha2 0.10.6", -] - [[package]] name = "sec1" version = "0.3.0" @@ -3176,29 +2681,29 @@ checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed" [[package]] name = "serde" -version = "1.0.163" +version = "1.0.166" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2113ab51b87a539ae008b5c6c02dc020ffa39afd2d83cffcb3f4eb2722cebec2" +checksum = "d01b7404f9d441d3ad40e6a636a7782c377d2abdbe4fa2440e2edcc2f4f10db8" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.163" +version = "1.0.166" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c805777e3930c8883389c602315a24224bcc738b63905ef87cd1420353ea93e" +checksum = "5dd83d6dde2b6b2d466e14d9d1acce8816dedee94f735eac6395808b3483c6d6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.16", + "syn 2.0.23", ] [[package]] name = "serde_json" -version = "1.0.96" +version = "1.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1" +checksum = "0f1e14e89be7aa4c4b78bdbdc9eb5bf8517829a600ae8eaa39a6e1d960b5185c" dependencies = [ "itoa", "ryu", @@ -3232,9 +2737,9 @@ dependencies = [ [[package]] name = "sha2" -version = "0.10.6" +version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0" +checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8" dependencies = [ "cfg-if", "cpufeatures", @@ -3310,7 +2815,7 @@ dependencies = [ "sp-core", "sp-runtime", "sp-state-machine", - "sp-std 5.0.0", + "sp-std", "sp-trie", "sp-version", "thiserror", @@ -3338,7 +2843,7 @@ dependencies = [ "serde", "sp-core", "sp-io", - "sp-std 5.0.0", + "sp-std", ] [[package]] @@ -3351,7 +2856,7 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-std 5.0.0", + "sp-std", "static_assertions", ] @@ -3369,7 +2874,7 @@ dependencies = [ "sp-io", "sp-mmr-primitives", "sp-runtime", - "sp-std 5.0.0", + "sp-std", ] [[package]] @@ -3378,8 +2883,8 @@ version = "7.0.0" source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.37#6fa7fe1326ecaab9921c2c3888530ad679cfbb87" dependencies = [ "array-bytes", - "base58 0.2.0", - "bitflags", + "base58", + "bitflags 1.3.2", "blake2", "dyn-clonable", "ed25519-zebra", @@ -3405,7 +2910,7 @@ dependencies = [ "sp-debug-derive", "sp-externalities", "sp-runtime-interface", - "sp-std 5.0.0", + "sp-std", "sp-storage", "ss58-registry", "substrate-bip39", @@ -3422,9 +2927,9 @@ dependencies = [ "blake2", "byteorder", "digest 0.10.7", - "sha2 0.10.6", + "sha2 0.10.7", "sha3", - "sp-std 5.0.0", + "sp-std", "twox-hash", ] @@ -3456,7 +2961,7 @@ source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.3 dependencies = [ "environmental", "parity-scale-codec", - "sp-std 5.0.0", + "sp-std", "sp-storage", ] @@ -3470,7 +2975,7 @@ dependencies = [ "parity-scale-codec", "sp-core", "sp-runtime", - "sp-std 5.0.0", + "sp-std", "thiserror", ] @@ -3492,7 +2997,7 @@ dependencies = [ "sp-keystore", "sp-runtime-interface", "sp-state-machine", - "sp-std 5.0.0", + "sp-std", "sp-tracing", "sp-trie", "tracing", @@ -3529,7 +3034,7 @@ dependencies = [ "sp-core", "sp-debug-derive", "sp-runtime", - "sp-std 5.0.0", + "sp-std", "thiserror", ] @@ -3561,7 +3066,7 @@ dependencies = [ "sp-arithmetic", "sp-core", "sp-io", - "sp-std 5.0.0", + "sp-std", "sp-weights", ] @@ -3576,7 +3081,7 @@ dependencies = [ "primitive-types", "sp-externalities", "sp-runtime-interface-proc-macro", - "sp-std 5.0.0", + "sp-std", "sp-storage", "sp-tracing", "sp-wasm-interface", @@ -3604,7 +3109,7 @@ dependencies = [ "scale-info", "sp-core", "sp-runtime", - "sp-std 5.0.0", + "sp-std", ] [[package]] @@ -3621,18 +3126,12 @@ dependencies = [ "sp-core", "sp-externalities", "sp-panic-handler", - "sp-std 5.0.0", + "sp-std", "sp-trie", "thiserror", "tracing", ] -[[package]] -name = "sp-std" -version = "4.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14804d6069ee7a388240b665f17908d98386ffb0b5d39f89a4099fc7a2a4c03f" - [[package]] name = "sp-std" version = "5.0.0" @@ -3648,7 +3147,7 @@ dependencies = [ "ref-cast", "serde", "sp-debug-derive", - "sp-std 5.0.0", + "sp-std", ] [[package]] @@ -3657,7 +3156,7 @@ version = "6.0.0" source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.37#6fa7fe1326ecaab9921c2c3888530ad679cfbb87" dependencies = [ "parity-scale-codec", - "sp-std 5.0.0", + "sp-std", "tracing", "tracing-core", "tracing-subscriber", @@ -3679,7 +3178,7 @@ dependencies = [ "parking_lot", "scale-info", "sp-core", - "sp-std 5.0.0", + "sp-std", "thiserror", "tracing", "trie-db", @@ -3698,7 +3197,7 @@ dependencies = [ "serde", "sp-core-hashing-proc-macro", "sp-runtime", - "sp-std 5.0.0", + "sp-std", "sp-version-proc-macro", "thiserror", ] @@ -3722,7 +3221,7 @@ dependencies = [ "impl-trait-for-tuples", "log", "parity-scale-codec", - "sp-std 5.0.0", + "sp-std", "wasmi", "wasmtime", ] @@ -3739,7 +3238,7 @@ dependencies = [ "sp-arithmetic", "sp-core", "sp-debug-derive", - "sp-std 5.0.0", + "sp-std", ] [[package]] @@ -3754,9 +3253,9 @@ dependencies = [ [[package]] name = "ss58-registry" -version = "1.40.0" +version = "1.41.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb47a8ad42e5fc72d5b1eb104a5546937eaf39843499948bb666d6e93c62423b" +checksum = "bfc443bad666016e012538782d9e3006213a7db43e9fb1dda91657dc06a6fa08" dependencies = [ "Inflector", "num-format", @@ -3779,28 +3278,6 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" -[[package]] -name = "strum" -version = "0.24.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" -dependencies = [ - "strum_macros", -] - -[[package]] -name = "strum_macros" -version = "0.24.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" -dependencies = [ - "heck", - "proc-macro2", - "quote", - "rustversion", - "syn 1.0.109", -] - [[package]] name = "substrate-bip39" version = "0.4.4" @@ -3833,9 +3310,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.16" +version = "2.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6f671d4b5ffdb8eadec19c0ae67fe2639df8684bd7bc4b83d986b8db549cf01" +checksum = "59fb7d6d8281a51045d62b8eb3a7d1ce347b76f312af50cd3dc0af39c87c1737" dependencies = [ "proc-macro2", "quote", @@ -3850,9 +3327,9 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "target-lexicon" -version = "0.12.7" +version = "0.12.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd1ba337640d60c3e96bc6f0638a939b9c9a7f2c316a1598c279828b3d1dc8c5" +checksum = "1b1c7f239eb94671427157bd93b3694320f3668d4e1eff08c7285366fd777fac" [[package]] name = "tempfile" @@ -3863,8 +3340,8 @@ dependencies = [ "autocfg", "cfg-if", "fastrand", - "redox_syscall 0.3.5", - "rustix 0.37.20", + "redox_syscall", + "rustix 0.37.22", "windows-sys 0.48.0", ] @@ -3883,22 +3360,22 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.40" +version = "1.0.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac" +checksum = "c16a64ba9387ef3fdae4f9c1a7f07a0997fce91985c0336f1ddc1822b3b37802" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.40" +version = "1.0.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" +checksum = "d14928354b01c4d6a4f0e549069adef399a284e7995c7ccca94e8a07a5346c59" dependencies = [ "proc-macro2", "quote", - "syn 2.0.16", + "syn 2.0.23", ] [[package]] @@ -3952,22 +3429,13 @@ dependencies = [ "pbkdf2 0.11.0", "rand 0.8.5", "rustc-hash", - "sha2 0.10.6", + "sha2 0.10.7", "thiserror", "unicode-normalization", "wasm-bindgen", "zeroize", ] -[[package]] -name = "tiny-keccak" -version = "2.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" -dependencies = [ - "crunchy", -] - [[package]] name = "tinytemplate" version = "1.2.1" @@ -3995,17 +3463,17 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "toml_datetime" -version = "0.6.2" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a76a9312f5ba4c2dec6b9161fdf25d87ad8a09256ccea5a556fef03c706a10f" +checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" [[package]] name = "toml_edit" -version = "0.19.10" +version = "0.19.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2380d56e8670370eee6566b0bfd4265f65b3f432e8c6d85623f728d4fa31f739" +checksum = "266f016b7f039eec8a1a80dfe6156b633d208b9fccca5e4db1d6775b0c4e34a7" dependencies = [ - "indexmap", + "indexmap 2.0.0", "toml_datetime", "winnow", ] @@ -4024,13 +3492,13 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.24" +version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f57e3ca2a01450b1a921183a9c9cbfda207fd822cef4ccb00a65402cbba7a74" +checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" dependencies = [ "proc-macro2", "quote", - "syn 2.0.16", + "syn 2.0.23", ] [[package]] @@ -4152,9 +3620,9 @@ checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" [[package]] name = "unicode-ident" -version = "1.0.8" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5464a87b239f13a63a501f2701565754bae92d243d4bb7eb12f6d57d2269bf4" +checksum = "22049a19f4a68748a168c0fc439f9516686aa045927ff767eca0a85101fb6e73" [[package]] name = "unicode-normalization" @@ -4165,28 +3633,12 @@ dependencies = [ "tinyvec", ] -[[package]] -name = "unicode-segmentation" -version = "1.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" - [[package]] name = "unicode-xid" version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" -[[package]] -name = "uuid" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" -dependencies = [ - "getrandom 0.2.9", - "serde", -] - [[package]] name = "valuable" version = "0.1.0" @@ -4251,7 +3703,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.16", + "syn 2.0.23", "wasm-bindgen-shared", ] @@ -4273,7 +3725,7 @@ checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.16", + "syn 2.0.23", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -4323,7 +3775,7 @@ version = "0.89.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab5d3e08b13876f96dd55608d03cd4883a0545884932d5adf11925876c96daef" dependencies = [ - "indexmap", + "indexmap 1.9.3", ] [[package]] @@ -4335,7 +3787,7 @@ dependencies = [ "anyhow", "bincode", "cfg-if", - "indexmap", + "indexmap 1.9.3", "libc", "log", "object 0.29.0", @@ -4369,7 +3821,7 @@ dependencies = [ "anyhow", "cranelift-entity", "gimli 0.26.2", - "indexmap", + "indexmap 1.9.3", "log", "object 0.29.0", "serde", @@ -4421,7 +3873,7 @@ dependencies = [ "anyhow", "cc", "cfg-if", - "indexmap", + "indexmap 1.9.3", "libc", "log", "mach", @@ -4505,7 +3957,7 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" dependencies = [ - "windows-targets 0.48.0", + "windows-targets", ] [[package]] @@ -4536,44 +3988,20 @@ dependencies = [ "windows_x86_64_msvc 0.42.2", ] -[[package]] -name = "windows-sys" -version = "0.45.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" -dependencies = [ - "windows-targets 0.42.2", -] - [[package]] name = "windows-sys" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ - "windows-targets 0.48.0", + "windows-targets", ] [[package]] name = "windows-targets" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" -dependencies = [ - "windows_aarch64_gnullvm 0.42.2", - "windows_aarch64_msvc 0.42.2", - "windows_i686_gnu 0.42.2", - "windows_i686_msvc 0.42.2", - "windows_x86_64_gnu 0.42.2", - "windows_x86_64_gnullvm 0.42.2", - "windows_x86_64_msvc 0.42.2", -] - -[[package]] -name = "windows-targets" -version = "0.48.0" +version = "0.48.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5" +checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f" dependencies = [ "windows_aarch64_gnullvm 0.48.0", "windows_aarch64_msvc 0.48.0", @@ -4700,9 +4128,9 @@ checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" [[package]] name = "winnow" -version = "0.4.6" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61de7bac303dc551fe038e2b3cef0f571087a47571ea6e79a87692ac99b99699" +checksum = "ca0ace3845f0d96209f0375e6d367e3eb87eb65d27d445bdc9f1843a26f39448" dependencies = [ "memchr", ] @@ -4733,5 +4161,5 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.16", + "syn 2.0.23", ] diff --git a/Cargo.toml b/Cargo.toml index 4ec7cda4..4ed51948 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,20 +1,17 @@ [workspace] members = [ - "primitives/avail", - "primitives/types", - "kate", + "core", "kate/recovery", - "primitives/nomad/signature", - "primitives/nomad/nomad-core", - "primitives/nomad/nomad-base", - "primitives/nomad/merkle", + "kate", ] [patch.crates-io] # Substrate (polkadot-v0.9.37). sp-core = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } sp-core-hashing = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } sp-io = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } +sp-api = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } sp-std = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } +sp-application-crypto = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } sp-storage = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } sp-debug-derive = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } sp-arithmetic = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } @@ -23,6 +20,13 @@ sp-trie = { git = "https://github.com/paritytech/substrate.git", branch = "polka sp-runtime-interface = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } sp-weights = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } frame-support = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } +sp-externalities = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } +sp-inherents = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } +sp-staking = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } +sp-state-machine = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } +sp-tracing = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } +sp-version = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } +sp-wasm-interface = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } [profile.dev.package] nalgebra = { opt-level = 3 } diff --git a/primitives/avail/Cargo.toml b/core/Cargo.toml similarity index 52% rename from primitives/avail/Cargo.toml rename to core/Cargo.toml index 147ca9e4..16e603ac 100644 --- a/primitives/avail/Cargo.toml +++ b/core/Cargo.toml @@ -1,15 +1,15 @@ [package] -name = "da-primitives" -version = "0.4.6" +name = "avail-core" +version = "0.5.0" authors = [] edition = "2021" +license = "Apache-2.0" [dependencies] -# Internal -da-types = { path = "../types", default-features = false } - # Others +derive_more = { version = "0.99.17", default-features = false, features = ["constructor", "from", "add", "deref", "mul"] } hash256-std-hasher = { version = "0.15.2", default-features = false } +hex = { version = "0.4", optional = true, default-features = false, features = ["alloc", "serde"] } log = { version = "0.4.8", default-features = false } serde = { version = "1", optional = true, features = ["derive"] } thiserror-no-std = "2.0.2" @@ -17,13 +17,16 @@ thiserror-no-std = "2.0.2" # Substrate beefy-merkle-tree = { git = "https://github.com/paritytech/substrate.git/", branch = "polkadot-v0.9.37", default-features = false } codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive", "max-encoded-len"] } -frame-support = { version = "4.0.0-dev", default-features = false } -scale-info = { version = "2.1.1", default-features = false, features = ["derive"] } -sp-core = { version = "7", default-features = false } -sp-runtime = { version = "7", default-features = false } -sp-runtime-interface = { version = "7", default-features = false } -sp-std = { version = "5", default-features = false } -sp-trie = { version = "7.0.0", default-features = false } +scale-info = { version = "2", default-features = false, features = ["derive"] } +sp-arithmetic = { version = "*", default-features = false } +sp-core = { version = "*", default-features = false } +sp-std = { version = "*", default-features = false } +sp-trie = { version = "*", default-features = false } + +# Substrate Runtime +frame-support = { version = "4.0.0-dev", default-features = false, optional = true } +sp-runtime = { version = "7", default-features = false, optional = true } +sp-runtime-interface = { version = "7", default-features = false, optional = true } [dev-dependencies] hex-literal = "0.3.4" @@ -34,21 +37,29 @@ test-case = "1.2.3" default = ["std"] std = [ "serde", + "hex", "codec/std", "scale-info/std", "log/std", "sp-core/std", "sp-std/std", - "sp-runtime/std", "sp-trie/std", - "sp-runtime-interface/std", + "sp-arithmetic/std", "hash256-std-hasher/std", - "frame-support/std", "beefy-merkle-tree/std", - "da-types/std", + "derive_more/display", + "sp-runtime-interface?/std", + "sp-runtime?/std", + "frame-support?/std", +] +runtime = [ + "sp-runtime-interface", + "sp-runtime", + "frame-support", ] header-backward-compatibility-test = [] try-runtime = [ + "runtime", "sp-runtime/try-runtime", ] diff --git a/core/src/app_extrinsic.rs b/core/src/app_extrinsic.rs new file mode 100644 index 00000000..76e8db2a --- /dev/null +++ b/core/src/app_extrinsic.rs @@ -0,0 +1,92 @@ +use crate::traits::GetAppId; +use codec::{Decode, Encode}; +use derive_more::Constructor; +use scale_info::TypeInfo; +#[cfg(feature = "std")] +use serde::{Deserialize, Serialize}; +use sp_core::RuntimeDebug; +use sp_std::vec::Vec; + +use crate::AppId; + +/// Raw Extrinsic with application id. +#[derive(Clone, TypeInfo, Default, Encode, Decode, RuntimeDebug, Constructor)] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +pub struct AppExtrinsic { + pub app_id: AppId, + #[cfg_attr(feature = "std", serde(with = "hex"))] + pub data: Vec, +} + +#[cfg(feature = "runtime")] +use crate::asdr::AppUncheckedExtrinsic; +#[cfg(feature = "runtime")] +use sp_runtime::{generic::UncheckedExtrinsic, traits::SignedExtension}; + +#[cfg(feature = "runtime")] +impl From> for AppExtrinsic +where + A: Encode, + C: Encode, + S: Encode, + E: SignedExtension + GetAppId, +{ + fn from(ue: sp_runtime::generic::UncheckedExtrinsic) -> Self { + let app_id = ue + .signature + .as_ref() + .map(|(_, _, extra)| extra.app_id()) + .unwrap_or_default(); + let data = ue.encode(); + + Self { app_id, data } + } +} + +impl GetAppId for AppExtrinsic { + fn app_id(&self) -> AppId { + self.app_id + } +} + +impl From> for AppExtrinsic { + #[inline] + fn from(data: Vec) -> Self { + Self { + data, + app_id: <_>::default(), + } + } +} + +#[cfg(feature = "runtime")] +impl From<&AppUncheckedExtrinsic> for AppExtrinsic +where + A: Encode, + C: Encode, + S: Encode, + E: SignedExtension + GetAppId, +{ + fn from(app_ext: &AppUncheckedExtrinsic) -> Self { + Self { + app_id: app_ext.app_id(), + data: app_ext.encode(), + } + } +} + +#[cfg(feature = "runtime")] +impl From> for AppExtrinsic +where + A: Encode, + C: Encode, + S: Encode, + E: SignedExtension + GetAppId, +{ + fn from(app_ext: AppUncheckedExtrinsic) -> Self { + Self { + app_id: app_ext.app_id(), + data: app_ext.encode(), + } + } +} diff --git a/primitives/avail/src/asdr/app_unchecked_extrinsic.rs b/core/src/asdr.rs similarity index 98% rename from primitives/avail/src/asdr/app_unchecked_extrinsic.rs rename to core/src/asdr.rs index 16efb48d..c8d53123 100644 --- a/primitives/avail/src/asdr/app_unchecked_extrinsic.rs +++ b/core/src/asdr.rs @@ -18,12 +18,9 @@ //! Generic implementation of an unchecked (pre-verification) extrinsic. use codec::{Compact, Decode, Encode, EncodeLike, Error, Input}; -use frame_support::{ - dispatch::{DispatchInfo, GetDispatchInfo}, - traits::ExtrinsicCall, -}; use scale_info::{build::Fields, meta_type, Path, StaticTypeInfo, Type, TypeInfo, TypeParameter}; use sp_core::blake2_256; +#[cfg(feature = "runtime")] use sp_runtime::{ generic::CheckedExtrinsic, traits::{ @@ -39,10 +36,7 @@ use sp_std::{ vec::Vec, }; -use crate::{ - asdr::{AppId, GetAppId}, - OpaqueExtrinsic, -}; +use crate::{traits::GetAppId, AppId, OpaqueExtrinsic}; /// Current version of the [`UncheckedExtrinsic`] encoded format. /// @@ -243,6 +237,10 @@ where type SignedExtensions = Extra; } +#[cfg(feature = "runtime")] +use frame_support::dispatch::{DispatchInfo, GetDispatchInfo}; + +#[cfg(feature = "runtime")] impl GetDispatchInfo for AppUncheckedExtrinsic where @@ -461,7 +459,8 @@ where } } -impl ExtrinsicCall +#[cfg(feature = "runtime")] +impl frame_support::traits::ExtrinsicCall for AppUncheckedExtrinsic where Extra: SignedExtension, diff --git a/core/src/bench_randomness.rs b/core/src/bench_randomness.rs new file mode 100644 index 00000000..2b308abf --- /dev/null +++ b/core/src/bench_randomness.rs @@ -0,0 +1,20 @@ +use frame_support::traits::Randomness; + +/// Provides an implementation of [`frame_support::traits::Randomness`] that should only be used in +/// on Benchmarks! +pub struct BenchRandomness(sp_std::marker::PhantomData); + +impl Randomness for BenchRandomness +where + Output: codec::Decode + Default, + T: Default, +{ + fn random(subject: &[u8]) -> (Output, T) { + use sp_runtime::traits::TrailingZeroInput; + + ( + Output::decode(&mut TrailingZeroInput::new(subject)).unwrap_or_default(), + T::default(), + ) + } +} diff --git a/core/src/constants.rs b/core/src/constants.rs new file mode 100644 index 00000000..e4b8c462 --- /dev/null +++ b/core/src/constants.rs @@ -0,0 +1,28 @@ +use sp_arithmetic::Perbill; + +pub mod well_known_keys { + /// Public params used to generate Kate commitment + pub const KATE_PUBLIC_PARAMS: &[u8] = b":kate_public_params:"; +} + +/// We allow `Normal` extrinsics to fill up the block up to 90%, the rest can be used +/// by Operational extrinsics. +pub const NORMAL_DISPATCH_RATIO: Perbill = Perbill::from_percent(90); + +pub const BLOCK_CHUNK_SIZE: u32 = 32; + +/// Money matters. +pub mod currency { + + pub type Balance = u128; + + /// AVL has 18 decimal positions. + pub const AVL: Balance = 1_000_000_000_000_000_000; + + /// Cents of AVL has 16 decimal positions (100 Cents = $1) + /// 1 DOLLARS = 10_000_000_000_000_000 + pub const CENTS: Balance = AVL / 100; + + /// Millicent of AVL has 13 decimal positions( 100 mCents = 1 cent). + pub const MILLICENTS: Balance = CENTS / 1_000; +} diff --git a/primitives/types/src/data_lookup.rs b/core/src/data_lookup.rs similarity index 91% rename from primitives/types/src/data_lookup.rs rename to core/src/data_lookup.rs index 24d960a5..c445d7ea 100644 --- a/primitives/types/src/data_lookup.rs +++ b/core/src/data_lookup.rs @@ -1,12 +1,12 @@ -use alloc::vec::Vec; +use codec::{Decode, Encode}; use core::convert::TryFrom; use derive_more::Constructor; -use num_traits::{CheckedAdd, Zero}; -use parity_scale_codec::{Decode, Encode}; use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; +use sp_arithmetic::traits::{CheckedAdd, Zero}; use sp_core::RuntimeDebug; +use sp_std::vec::Vec; use thiserror_no_std::Error; use crate::{ensure, AppId}; @@ -138,20 +138,20 @@ mod test { fn into_lookup_items(vals: I) -> Vec where I: IntoIterator, - T: Into, + T: Into, { - vals.into_iter().map(Into::into).collect::>() + vals.into_iter() + .map(|v| (AppId(v.0.into()), v.1).into()) + .collect::>() } - #[test_case( vec![(0, 15), (1, 20), (2, 150)] => Ok(DataLookup::new(185, into_lookup_items([(1, 15), (2, 35)]))); "Valid case")] + #[test_case( vec![(0, 15), (1, 20), (2, 150)] => Ok(DataLookup::new(185, into_lookup_items([(1u32, 15), (2, 35)]))); "Valid case")] #[test_case( vec![(0, usize::MAX)] => Err(Error::OffsetOverflows); "Offset overflows at zero")] #[test_case( vec![(0, (u32::MAX -1) as usize), (1, 2)] => Err(Error::OffsetOverflows); "Offset overflows at non zero")] #[test_case( vec![(1, 10), (0, 2)] => Err(Error::DataNotSorted); "Unsortend data")] #[test_case( vec![] => Ok(DataLookup::new(0, vec![])); "Empty data")] fn from_len(id_len_data: Vec<(u32, usize)>) -> Result { - let iter = id_len_data - .into_iter() - .map(|(id, len)| (AppId::from(id), len)); + let iter = id_len_data.into_iter().map(|(id, len)| (AppId(id), len)); DataLookup::new_from_id_lenght(iter) } diff --git a/primitives/avail/src/data_proof.rs b/core/src/data_proof.rs similarity index 99% rename from primitives/avail/src/data_proof.rs rename to core/src/data_proof.rs index fea610b9..d2437402 100644 --- a/primitives/avail/src/data_proof.rs +++ b/core/src/data_proof.rs @@ -1,6 +1,6 @@ +use crate::ensure; use beefy_merkle_tree::MerkleProof; use codec::{Decode, Encode}; -use frame_support::ensure; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; use sp_core::{hashing::sha2_256, H256}; @@ -95,7 +95,7 @@ where } } -#[cfg(test)] +#[cfg(all(test, feature = "runtime"))] mod test { use crate::ShaTwo256; use hex_literal::hex; diff --git a/primitives/avail/src/header/extension/mod.rs b/core/src/header/extension/mod.rs similarity index 95% rename from primitives/avail/src/header/extension/mod.rs rename to core/src/header/extension/mod.rs index e336c416..d6fb76a8 100644 --- a/primitives/avail/src/header/extension/mod.rs +++ b/core/src/header/extension/mod.rs @@ -1,11 +1,13 @@ -use crate::asdr::DataLookup; use codec::{Decode, Encode}; use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; use sp_core::{RuntimeDebug, H256}; +#[cfg(feature = "runtime")] use sp_runtime_interface::pass_by::PassByCodec; +use crate::DataLookup; + pub mod v1; pub mod v2; @@ -13,8 +15,9 @@ pub mod v2; pub mod v_test; /// Header extension data. -#[derive(PartialEq, Eq, Clone, RuntimeDebug, TypeInfo, Encode, Decode, PassByCodec)] +#[derive(PartialEq, Eq, Clone, RuntimeDebug, TypeInfo, Encode, Decode)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "runtime", derive(PassByCodec))] pub enum HeaderExtension { V1(v1::HeaderExtension), V2(v2::HeaderExtension), diff --git a/primitives/avail/src/header/extension/v1.rs b/core/src/header/extension/v1.rs similarity index 92% rename from primitives/avail/src/header/extension/v1.rs rename to core/src/header/extension/v1.rs index 53c0d7ed..251b8b64 100644 --- a/primitives/avail/src/header/extension/v1.rs +++ b/core/src/header/extension/v1.rs @@ -4,7 +4,7 @@ use scale_info::TypeInfo; use serde::{Deserialize, Serialize}; use sp_core::{RuntimeDebug, H256}; -use crate::{asdr::DataLookup, v1::KateCommitment}; +use crate::{v1::KateCommitment, DataLookup}; #[derive(PartialEq, Eq, Clone, RuntimeDebug, TypeInfo, Encode, Decode, Default)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] diff --git a/primitives/avail/src/header/extension/v2.rs b/core/src/header/extension/v2.rs similarity index 93% rename from primitives/avail/src/header/extension/v2.rs rename to core/src/header/extension/v2.rs index 5fe3a2de..ff9984d5 100644 --- a/primitives/avail/src/header/extension/v2.rs +++ b/core/src/header/extension/v2.rs @@ -4,7 +4,7 @@ use scale_info::TypeInfo; use serde::{Deserialize, Serialize}; use sp_core::{RuntimeDebug, H256}; -use crate::{asdr::DataLookup, v2::KateCommitment}; +use crate::{v2::KateCommitment, DataLookup}; #[derive(PartialEq, Eq, Clone, RuntimeDebug, TypeInfo, Encode, Decode, Default)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] diff --git a/primitives/avail/src/header/extension/v_test.rs b/core/src/header/extension/v_test.rs similarity index 100% rename from primitives/avail/src/header/extension/v_test.rs rename to core/src/header/extension/v_test.rs diff --git a/primitives/avail/src/header/mod.rs b/core/src/header/mod.rs similarity index 98% rename from primitives/avail/src/header/mod.rs rename to core/src/header/mod.rs index fe54bc53..6ec9cb5e 100644 --- a/primitives/avail/src/header/mod.rs +++ b/core/src/header/mod.rs @@ -30,8 +30,7 @@ use sp_runtime::{ Digest, }; use sp_runtime_interface::pass_by::{Codec as PassByCodecImpl, PassBy}; -use sp_std::fmt; -use sp_std::{convert::TryFrom, fmt::Debug}; +use sp_std::{convert::TryFrom, fmt}; use crate::traits::{ExtendedHeader, HeaderBlockNumber, HeaderHash}; @@ -159,7 +158,7 @@ impl HeaderT for Header where Number: Member + MaybeSerializeDeserialize - + Debug + + fmt::Debug + sp_std::hash::Hash + MaybeDisplay + AtLeast32BitUnsigned @@ -175,7 +174,7 @@ where + Member + Ord + MaybeSerialize - + Debug + + fmt::Debug + MaybeDisplay + SimpleBitOps + Codec, @@ -244,7 +243,7 @@ where } } -impl ExtendedHeader for Header { +impl ExtendedHeader for Header { type Hash = ::Output; type Number = N; @@ -269,7 +268,7 @@ impl ExtendedHeader for Header { } } -#[cfg(test)] +#[cfg(all(test, feature = "runtime"))] mod tests { use codec::Error; use hex_literal::hex; @@ -282,8 +281,8 @@ mod tests { use super::*; use crate::{ - asdr::DataLookup, kate_commitment::{v1, v2}, + AppId, DataLookup, }; type THeader = Header; @@ -453,7 +452,7 @@ mod tests { }; let extension = extension::v1::HeaderExtension { commitment, - app_lookup: DataLookup::new_from_id_lenght(vec![(0, 1)].into_iter()) + app_lookup: DataLookup::new_from_id_lenght(vec![(AppId(0), 1)].into_iter()) .expect("Valid DataLookup .qed"), }; let digest = Digest { diff --git a/primitives/avail/src/kate_commitment.rs b/core/src/kate_commitment.rs similarity index 100% rename from primitives/avail/src/kate_commitment.rs rename to core/src/kate_commitment.rs diff --git a/core/src/lib.rs b/core/src/lib.rs new file mode 100644 index 00000000..0e5ee175 --- /dev/null +++ b/core/src/lib.rs @@ -0,0 +1,150 @@ +#![cfg_attr(not(feature = "std"), no_std)] + +use codec::{Decode, Encode, MaxEncodedLen}; +#[cfg(feature = "std")] +use derive_more::Display; +use derive_more::{Add, Constructor, Deref, Into, Mul}; +use scale_info::TypeInfo; +#[cfg(feature = "std")] +use serde::{Deserialize, Serialize}; +use sp_arithmetic::traits::Zero; +use sp_core::RuntimeDebug; + +pub mod opaque_extrinsic; +pub use opaque_extrinsic::*; + +/// Customized headers. +#[cfg(feature = "runtime")] +pub mod header; + +/// Kate Commitment on Headers. +pub mod kate_commitment; +pub use kate_commitment::*; + +/// Application Specific Data Retrieval +#[cfg(feature = "runtime")] +pub mod asdr; + +pub mod sha2; +pub use sha2::ShaTwo256; + +pub mod traits; + +pub mod data_proof; +pub use data_proof::DataProof; + +pub mod data_lookup; +pub use data_lookup::{DataLookup, DataLookupIndexItem}; + +pub mod app_extrinsic; +pub use app_extrinsic::*; + +pub mod constants; +pub use constants::*; + +#[cfg(feature = "runtime")] +pub mod bench_randomness; + +#[repr(u8)] +pub enum InvalidTransactionCustomId { + /// The AppId is not registered. + InvalidAppId = 137, + /// Extrinsic is not allowed for the given `AppId`. + ForbiddenAppId, + /// Max padded length was exceeded. + MaxPaddedLenExceeded, +} + +#[derive( + Clone, + Copy, + PartialEq, + Eq, + PartialOrd, + Ord, + Add, + Deref, + TypeInfo, + Encode, + Decode, + Default, + Into, + MaxEncodedLen, + RuntimeDebug, +)] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Display))] +pub struct AppId(#[codec(compact)] pub u32); + +impl Zero for AppId { + fn zero() -> Self { + AppId(Zero::zero()) + } + + fn is_zero(&self) -> bool { + self.0.is_zero() + } +} + +/// Strong type for `BlockLength::cols` +#[derive( + Clone, + Copy, + Add, + Mul, + PartialEq, + Eq, + Encode, + Decode, + TypeInfo, + PartialOrd, + Ord, + Into, + Constructor, + MaxEncodedLen, +)] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Display, Debug))] +#[mul(forward)] +pub struct BlockLengthColumns(#[codec(compact)] pub u32); + +/// Strong type for `BlockLength::rows` +#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Display, Debug))] +#[derive( + Encode, + Decode, + TypeInfo, + MaxEncodedLen, + Clone, + Copy, + Add, + Mul, + PartialEq, + Eq, + PartialOrd, + Ord, + Into, + Constructor, +)] +#[mul(forward)] +pub struct BlockLengthRows(#[codec(compact)] pub u32); + +/// Return Err of the expression: `return Err($expression);`. +/// +/// Used as `fail!(expression)`. +#[macro_export] +macro_rules! fail { + ( $y:expr ) => {{ + return Err($y.into()); + }}; +} + +/// Evaluate `$x:expr` and if not true return `Err($y:expr)`. +/// +/// Used as `ensure!(expression_to_ensure, expression_to_return_on_false)`. +#[macro_export] +macro_rules! ensure { + ( $x:expr, $y:expr $(,)? ) => {{ + if !$x { + $crate::fail!($y); + } + }}; +} diff --git a/primitives/avail/src/opaque_extrinsic.rs b/core/src/opaque_extrinsic.rs similarity index 95% rename from primitives/avail/src/opaque_extrinsic.rs rename to core/src/opaque_extrinsic.rs index 926b3c6f..7d14e459 100644 --- a/primitives/avail/src/opaque_extrinsic.rs +++ b/core/src/opaque_extrinsic.rs @@ -1,6 +1,5 @@ use codec::{Decode, Encode}; use scale_info::TypeInfo; -use sp_runtime::traits::Extrinsic; use sp_std::vec::Vec; /// Simple blob to hold an extrinsic without committing to its format and ensure it is serialized @@ -51,7 +50,8 @@ impl<'a> ::serde::Deserialize<'a> for OpaqueExtrinsic { } } -impl Extrinsic for OpaqueExtrinsic { +#[cfg(feature = "runtime")] +impl sp_runtime::traits::Extrinsic for OpaqueExtrinsic { type Call = (); type SignaturePayload = (); } diff --git a/core/src/sha2.rs b/core/src/sha2.rs new file mode 100644 index 00000000..9b16040e --- /dev/null +++ b/core/src/sha2.rs @@ -0,0 +1,48 @@ +use scale_info::TypeInfo; +#[cfg(feature = "std")] +use serde::{Deserialize, Serialize}; +use sp_core::{hashing::sha2_256, Hasher, RuntimeDebug}; + +/// Sha2 256 wrapper which supports `beefy-merkle-tree::Hasher`. +#[derive(PartialEq, Eq, Clone, RuntimeDebug, TypeInfo)] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +pub struct ShaTwo256 {} + +impl Hasher for ShaTwo256 { + type Out = sp_core::H256; + type StdHasher = hash256_std_hasher::Hash256StdHasher; + const LENGTH: usize = 32; + + fn hash(s: &[u8]) -> Self::Out { + sha2_256(s).into() + } +} + +#[cfg(feature = "runtime")] +pub mod hash { + use super::*; + use sp_core::storage::StateVersion; + use sp_std::vec::Vec; + use sp_trie::{LayoutV0, LayoutV1, TrieConfiguration as _}; + + impl sp_runtime::traits::Hash for ShaTwo256 { + type Output = sp_core::H256; + + fn trie_root(input: Vec<(Vec, Vec)>, version: StateVersion) -> Self::Output { + match version { + StateVersion::V0 => LayoutV0::::trie_root(input), + StateVersion::V1 => LayoutV1::::trie_root(input), + } + } + + fn ordered_trie_root(input: Vec>, version: StateVersion) -> Self::Output { + match version { + StateVersion::V0 => LayoutV0::::ordered_trie_root(input), + StateVersion::V1 => LayoutV1::::ordered_trie_root(input), + } + } + } +} + +#[cfg(feature = "runtime")] +pub use hash::*; diff --git a/core/src/traits.rs b/core/src/traits.rs new file mode 100644 index 00000000..b58f127e --- /dev/null +++ b/core/src/traits.rs @@ -0,0 +1,31 @@ +use codec::{Codec, Decode}; +use sp_arithmetic::traits::AtLeast32BitUnsigned; +use sp_core::U256; +use sp_std::{convert::TryFrom, fmt::Debug, hash::Hash as StdHash}; + +pub mod get_app_id; +pub use get_app_id::*; + +pub mod extended_header; +pub use extended_header::*; + +/// Header block number trait. +pub trait HeaderBlockNumber: + AtLeast32BitUnsigned + Codec + StdHash + Copy + Into + TryFrom + Debug + Eq +{ +} + +impl< + T: AtLeast32BitUnsigned + Codec + StdHash + Copy + Into + TryFrom + Debug + Eq, + > HeaderBlockNumber for T +{ +} + +/// Header hash. +#[cfg(feature = "runtime")] +pub trait HeaderHash: sp_runtime::traits::Hash {} +#[cfg(feature = "runtime")] +impl HeaderHash for T {} + +pub trait HeaderHashOutput: Decode + Ord {} +impl HeaderHashOutput for T {} diff --git a/core/src/traits/extended_header.rs b/core/src/traits/extended_header.rs new file mode 100644 index 00000000..43f7629e --- /dev/null +++ b/core/src/traits/extended_header.rs @@ -0,0 +1,22 @@ +/// Extended header access +pub trait ExtendedHeader { + /// Header number. + type Number; + + /// Header hash type + type Hash; + + /// Creates new header. + fn new( + number: Self::Number, + extrinsics_root: Self::Hash, + state_root: Self::Hash, + parent_hash: Self::Hash, + digest: D, + extension: E, + ) -> Self; + + fn extension(&self) -> &E; + + fn set_extension(&mut self, extension: E); +} diff --git a/primitives/types/src/get_app_id.rs b/core/src/traits/get_app_id.rs similarity index 93% rename from primitives/types/src/get_app_id.rs rename to core/src/traits/get_app_id.rs index c9b9541f..88f84a96 100644 --- a/primitives/types/src/get_app_id.rs +++ b/core/src/traits/get_app_id.rs @@ -28,7 +28,7 @@ mod tests { impl GetAppId for CustomAppId { fn app_id(&self) -> AppId { - 7.into() + AppId(7) } } @@ -40,7 +40,7 @@ mod tests { let custom_app_id = (0, 1, 2, 3, 4, 5, 6, CustomAppId {}); let default_app_id = (0, 1, 2, 3, 4, 5, 6, DefaultGetAppId {}); - assert_eq!(custom_app_id.app_id(), 7.into()); + assert_eq!(custom_app_id.app_id(), AppId(7)); assert_eq!(default_app_id.app_id(), Default::default()); } } diff --git a/deny.toml b/deny.toml new file mode 100644 index 00000000..e16848b1 --- /dev/null +++ b/deny.toml @@ -0,0 +1,290 @@ +# This template contains all of the possible sections and their default values + +# Note that all fields that take a lint level have these possible values: +# * deny - An error will be produced and the check will fail +# * warn - A warning will be produced, but the check will not fail +# * allow - No warning or error will be produced, though in some cases a note +# will be + +# The values provided in this template are the default values that will be used +# when any section or field is not specified in your own configuration + +# Root options + +# If 1 or more target triples (and optionally, target_features) are specified, +# only the specified targets will be checked when running `cargo deny check`. +# This means, if a particular package is only ever used as a target specific +# dependency, such as, for example, the `nix` crate only being used via the +# `target_family = "unix"` configuration, that only having windows targets in +# this list would mean the nix crate, as well as any of its exclusive +# dependencies not shared by any other crates, would be ignored, as the target +# list here is effectively saying which targets you are building for. +targets = [ + + + # The triple can be any string, but only the target triples built in to + # rustc (as of 1.40) can be checked against actual config expressions + # { triple = "x86_64-unknown-linux-musl" }, + # You can also specify which target_features you promise are enabled for a + # particular target. target_features are currently not validated against + # the actual valid features supported by the target architecture. + # { triple = "wasm32-unknown-unknown", features = ["atomics"] }, +] +# When creating the dependency graph used as the source of truth when checks are +# executed, this field can be used to prune crates from the graph, removing them +# from the view of cargo-deny. This is an extremely heavy hammer, as if a crate +# is pruned from the graph, all of its dependencies will also be pruned unless +# they are connected to another crate in the graph that hasn't been pruned, +# so it should be used with care. The identifiers are [Package ID Specifications] +# (https://doc.rust-lang.org/cargo/reference/pkgid-spec.html) +# exclude = [] +# If true, metadata will be collected with `--all-features`. Note that this can't +# be toggled off if true, if you want to conditionally enable `--all-features` it +# is recommended to pass `--all-features` on the cmd line instead +all-features = true +# If true, metadata will be collected with `--no-default-features`. The same +# caveat with `all-features` applies +no-default-features = false +# If set, these feature will be enabled when collecting metadata. If `--features` +# is specified on the cmd line they will take precedence over this option. +# features = [] +# When outputting inclusion graphs in diagnostics that include features, this +# option can be used to specify the depth at which feature edges will be added. +# This option is included since the graphs can be quite large and the addition +# of features from the crate(s) to all of the graph roots can be far too verbose. +# This option can be overridden via `--feature-depth` on the cmd line +feature-depth = 1 + +# This section is considered when running `cargo deny check advisories` +# More documentation for the advisories section can be found here: +# https://embarkstudios.github.io/cargo-deny/checks/advisories/cfg.html +[advisories] +# The path where the advisory database is cloned/fetched into +db-path = "~/.cargo/advisory-db" +# The url(s) of the advisory databases to use +db-urls = ["https://github.com/rustsec/advisory-db"] +# The lint level for security vulnerabilities +vulnerability = "deny" +# The lint level for unmaintained crates +unmaintained = "warn" +# The lint level for crates that have been yanked from their source registry +yanked = "warn" +# The lint level for crates with security notices. Note that as of +# 2019-12-17 there are no security notice advisories in +# https://github.com/rustsec/advisory-db +notice = "warn" +# A list of advisory IDs to ignore. Note that ignored advisories will still +# output a note when they are encountered. +ignore = [ + + + # "RUSTSEC-0000-0000", +] +# Threshold for security vulnerabilities, any vulnerability with a CVSS score +# lower than the range specified will be ignored. Note that ignored advisories +# will still output a note when they are encountered. +# * None - CVSS Score 0.0 +# * Low - CVSS Score 0.1 - 3.9 +# * Medium - CVSS Score 4.0 - 6.9 +# * High - CVSS Score 7.0 - 8.9 +# * Critical - CVSS Score 9.0 - 10.0 +# severity-threshold = + +# If this is true, then cargo deny will use the git executable to fetch advisory database. +# If this is false, then it uses a built-in git library. +# Setting this to true can be helpful if you have special authentication requirements that cargo-deny does not support. +# See Git Authentication for more information about setting up git authentication. +# git-fetch-with-cli = true + +# This section is considered when running `cargo deny check licenses` +# More documentation for the licenses section can be found here: +# https://embarkstudios.github.io/cargo-deny/checks/licenses/cfg.html +[licenses] +# The lint level for crates which do not have a detectable license +unlicensed = "deny" +# List of explicitly allowed licenses +# See https://spdx.org/licenses/ for list of possible licenses +# [possible values: any SPDX 3.11 short identifier (+ optional exception)]. +allow = [ + "MIT", + "Apache-2.0", + "Apache-2.0 WITH LLVM-exception", + "BSD-2-Clause", + "BSD-3-Clause", + "MPL-2.0", +] +# List of explicitly disallowed licenses +# See https://spdx.org/licenses/ for list of possible licenses +# [possible values: any SPDX 3.11 short identifier (+ optional exception)]. +deny = [ + + + # "Nokia", +] +# Lint level for licenses considered copyleft +copyleft = "warn" +# Blanket approval or denial for OSI-approved or FSF Free/Libre licenses +# * both - The license will be approved if it is both OSI-approved *AND* FSF +# * either - The license will be approved if it is either OSI-approved *OR* FSF +# * osi-only - The license will be approved if is OSI-approved *AND NOT* FSF +# * fsf-only - The license will be approved if is FSF *AND NOT* OSI-approved +# * neither - This predicate is ignored and the default lint level is used +allow-osi-fsf-free = "neither" +# Lint level used when no other predicates are matched +# 1. License isn't in the allow or deny lists +# 2. License isn't copyleft +# 3. License isn't OSI/FSF, or allow-osi-fsf-free = "neither" +default = "deny" +# The confidence threshold for detecting a license from license text. +# The higher the value, the more closely the license text must be to the +# canonical license text of a valid SPDX license file. +# [possible values: any between 0.0 and 1.0]. +confidence-threshold = 0.8 +# Allow 1 or more licenses on a per-crate basis, so that particular licenses +# aren't accepted for every possible crate as with the normal allow list +exceptions = [ + + + # Each entry is the crate and version constraint, and its specific allow + # list + # { allow = ["Zlib"], name = "adler32", version = "*" }, +] + +# Some crates don't have (easily) machine readable licensing information, +# adding a clarification entry for it allows you to manually specify the +# licensing information +# [[licenses.clarify]] +# The name of the crate the clarification applies to +# name = "ring" +# The optional version constraint for the crate +# version = "*" +# The SPDX expression for the license requirements of the crate +# expression = "MIT AND ISC AND OpenSSL" +# One or more files in the crate's source used as the "source of truth" for +# the license expression. If the contents match, the clarification will be used +# when running the license check, otherwise the clarification will be ignored +# and the crate will be checked normally, which may produce warnings or errors +# depending on the rest of your configuration +# license-files = [ +# Each entry is a crate relative path, and the (opaque) hash of its contents +# { path = "LICENSE", hash = 0xbd0eed23 } +# ] + +[licenses.private] +# If true, ignores workspace crates that aren't published, or are only +# published to private registries. +# To see how to mark a crate as unpublished (to the official registry), +# visit https://doc.rust-lang.org/cargo/reference/manifest.html#the-publish-field. +ignore = false +# One or more private registries that you might publish crates to, if a crate +# is only published to private registries, and ignore is true, the crate will +# not have its license(s) checked +registries = [ + + + # "https://sekretz.com/registry +] + +# This section is considered when running `cargo deny check bans`. +# More documentation about the 'bans' section can be found here: +# https://embarkstudios.github.io/cargo-deny/checks/bans/cfg.html +[bans] +# Lint level for when multiple versions of the same crate are detected +multiple-versions = "warn" +# Lint level for when a crate version requirement is `*` +wildcards = "allow" +# The graph highlighting used when creating dotgraphs for crates +# with multiple versions +# * lowest-version - The path to the lowest versioned duplicate is highlighted +# * simplest-path - The path to the version with the fewest edges is highlighted +# * all - Both lowest-version and simplest-path are used +highlight = "all" +# The default lint level for `default` features for crates that are members of +# the workspace that is being checked. This can be overriden by allowing/denying +# `default` on a crate-by-crate basis if desired. +workspace-default-features = "allow" +# The default lint level for `default` features for external crates that are not +# members of the workspace. This can be overriden by allowing/denying `default` +# on a crate-by-crate basis if desired. +external-default-features = "allow" +# List of crates that are allowed. Use with care! +allow = [ + + + # { name = "ansi_term", version = "=0.11.0" }, +] +# List of crates to deny +deny = [ + + + # Each entry the name of a crate and a version range. If version is + # not specified, all versions will be matched. + # { name = "ansi_term", version = "=0.11.0" }, + # + # Wrapper crates can optionally be specified to allow the crate when it + # is a direct dependency of the otherwise banned crate + # { name = "ansi_term", version = "=0.11.0", wrappers = [] }, +] + +# List of features to allow/deny +# Each entry the name of a crate and a version range. If version is +# not specified, all versions will be matched. +# [[bans.features]] +# name = "reqwest" +# Features to not allow +# deny = ["json"] +# Features to allow +# allow = [ +# "rustls", +# "__rustls", +# "__tls", +# "hyper-rustls", +# "rustls", +# "rustls-pemfile", +# "rustls-tls-webpki-roots", +# "tokio-rustls", +# "webpki-roots", +# ] +# If true, the allowed features must exactly match the enabled feature set. If +# this is set there is no point setting `deny` +# exact = true + +# Certain crates/versions that will be skipped when doing duplicate detection. +skip = [ + + + # { name = "ansi_term", version = "=0.11.0" }, +] +# Similarly to `skip` allows you to skip certain crates during duplicate +# detection. Unlike skip, it also includes the entire tree of transitive +# dependencies starting at the specified crate, up to a certain depth, which is +# by default infinite. +skip-tree = [ + + + # { name = "ansi_term", version = "=0.11.0", depth = 20 }, +] + +# This section is considered when running `cargo deny check sources`. +# More documentation about the 'sources' section can be found here: +# https://embarkstudios.github.io/cargo-deny/checks/sources/cfg.html +[sources] +# Lint level for what to happen when a crate from a crate registry that is not +# in the allow list is encountered +unknown-registry = "warn" +# Lint level for what to happen when a crate from a git repository that is not +# in the allow list is encountered +unknown-git = "warn" +# List of URLs for allowed crate registries. Defaults to the crates.io index +# if not specified. If it is specified but empty, no registries are allowed. +allow-registry = ["https://github.com/rust-lang/crates.io-index"] +# List of URLs for allowed Git repositories +allow-git = [] + +[sources.allow-org] +# 1 or more github.com organizations to allow git sources for +github = ["paritytech", "availproject"] +# 1 or more gitlab.com organizations to allow git sources for +gitlab = [] +# 1 or more bitbucket.org organizations to allow git sources for +bitbucket = [] diff --git a/kate/Cargo.toml b/kate/Cargo.toml index 8a0689ac..998b1f92 100644 --- a/kate/Cargo.toml +++ b/kate/Cargo.toml @@ -3,6 +3,7 @@ name = "kate" version = "0.7.1" authors = ["Denis Ermolin "] edition = "2021" +license = "Apache-2.0" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html @@ -11,15 +12,14 @@ edition = "2021" poly-multiproof = { git = "https://github.com/availproject/poly-multiproof", default-features = false, tag = "v0.0.1" } # Internal -da-types = { path = "../primitives/types", default-features = false } +avail-core = { path = "../core", default-features = false, feature = "runtime" } dusk-plonk = { git = "https://github.com/availproject/plonk.git", tag = "v0.12.0-polygon-2", optional = true } -kate-recovery = { path = "recovery", default-features = false, optional = true } -# kate-grid = { path = "grid" } +kate-recovery = { path = "recovery", default-features = false } # Parity & Substrate codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] } -sp-arithmetic = { version = "6", default-features = false } -sp-core = { version = "7.0.0", default-features = false, optional = true } +sp-arithmetic = { version = "*", default-features = false } +sp-core = { version = "*", default-features = false, optional = true } # 3rd-party derive_more = { version = "0.99.17", default-features = false, features = ["constructor"] } @@ -29,7 +29,7 @@ hex-literal = { version = "0.3.4", optional = true } log = { version = "0.4.8", optional = true } nalgebra = { version = "0.32.2", default-features = false } once_cell = { version = "1.8.0", optional = true } -rand = { version = "0.8.4", default-features = false, optional = true } +rand = { version = "0.8.5", default-features = false, optional = true } rand_chacha = { version = "0.3", default-features = false, optional = true } rayon = { version = "1.5.2", optional = true } serde = { version = "1", optional = true, features = ["derive"] } @@ -39,7 +39,6 @@ thiserror-no-std = "2.0.2" [dev-dependencies] criterion = "0.5.1" -da-primitives = { path = "../primitives/avail" } proptest = "1" serde_json = "1" test-case = "1.2.3" @@ -58,11 +57,11 @@ std = [ "codec/std", "serde", "serde_json", - "rand", "rand_chacha/std", + "rand/std", "log", "dusk-plonk/std", - "da-types/std", + "avail-core/std", "sp-arithmetic/std", "sp-core/std", "poly-multiproof/blst", diff --git a/kate/examples/multiproof_verification.rs b/kate/examples/multiproof_verification.rs index 07cfa1e7..6049baee 100644 --- a/kate/examples/multiproof_verification.rs +++ b/kate/examples/multiproof_verification.rs @@ -1,5 +1,5 @@ +use avail_core::{AppExtrinsic, AppId, BlockLengthColumns, BlockLengthRows}; use core::num::NonZeroU16; -use da_types::{AppExtrinsic, AppId}; use hex_literal::hex; use kate::{ gridgen::EvaluationGrid, @@ -62,10 +62,7 @@ fn multiproof_verification() -> Result { let multiproof = polys .multiproof( &pmp, - &kate::com::Cell { - row: 0.into(), - col: 0.into(), - }, + &kate::com::Cell::new(BlockLengthRows(0), BlockLengthColumns(0)), &grid, target_dims, ) diff --git a/kate/recovery/Cargo.toml b/kate/recovery/Cargo.toml index d722aba1..045c71ed 100644 --- a/kate/recovery/Cargo.toml +++ b/kate/recovery/Cargo.toml @@ -3,22 +3,23 @@ name = "kate-recovery" version = "0.8.1" authors = ["Denis Ermolin "] edition = "2018" +license = "Apache-2.0" [dependencies] # Internals -da-types = { path = "../../primitives/types", default-features = false } +avail-core = { path = "../../core", default-features = false } dusk-plonk = { git = "https://github.com/availproject/plonk.git", tag = "v0.12.0-polygon-2" } # Substrate codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] } -sp-arithmetic = { version = "6", default-features = false } +sp-arithmetic = { version = "*", default-features = false } # 3rd-parties derive_more = "0.99.17" dusk-bytes = { version = "0.1.6", default-features = false } once_cell = { version = "1.9.0", optional = true } -rand = { version = "0.8.4", optional = true } -rand_chacha = { version = "0.3", optional = true } +rand = { version = "0.8.5", default-features = false, features = ["alloc", "small_rng"], optional = true } +rand_chacha = { version = "0.3", default-features = false, optional = true } serde = { version = "1", optional = true, features = ["derive"] } static_assertions = "1.1.0" thiserror-no-std = "2.0.2" @@ -31,9 +32,9 @@ test-case = "1.2.3" default = ["std"] std = [ "once_cell", - "rand", - "rand_chacha", "serde", "sp-arithmetic/std", - "da-types/std", + "avail-core/std", + "rand/std", + "rand_chacha/std", ] diff --git a/kate/recovery/src/com.rs b/kate/recovery/src/com.rs index 6c301f1b..cadbd487 100644 --- a/kate/recovery/src/com.rs +++ b/kate/recovery/src/com.rs @@ -1,9 +1,8 @@ +use avail_core::ensure; use codec::{Decode, IoReader}; use core::num::TryFromIntError; -use da_types::ensure; use dusk_bytes::Serializable as _; use dusk_plonk::{fft::EvaluationDomain, prelude::BlsScalar}; -use rand::seq::SliceRandom; use sp_arithmetic::{traits::SaturatedConversion, Percent}; use static_assertions::const_assert_ne; use std::{ @@ -37,19 +36,32 @@ pub enum ReconstructionError { RowCountExceeded, } +#[cfg(feature = "std")] +impl std::error::Error for ReconstructionError { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + match &self { + Self::DataDecodingError(unflatten) => Some(unflatten), + _ => None, + } + } +} + /// From given positions, constructs related columns positions, up to given factor. /// E.g. if factor is 0.66, 66% of matched columns will be returned. /// Positions in columns are random. /// Function panics if factor is above 1.0. -pub fn columns_positions( +#[cfg(feature = "std")] +pub fn columns_positions( dimensions: matrix::Dimensions, positions: &[matrix::Position], factor: Percent, + rng: &mut R, ) -> Vec { + use rand::seq::SliceRandom; + let cells = factor .mul_ceil(dimensions.extended_rows()) .saturated_into::(); - let rng = &mut rand::thread_rng(); let columns: HashSet = HashSet::from_iter(positions.iter().map(|position| position.col)); @@ -295,6 +307,17 @@ pub enum UnflattenError { InvalidLen, } +#[cfg(feature = "std")] +impl std::error::Error for UnflattenError { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + match &self { + Self::RangeConversion(try_int) => Some(try_int), + Self::Codec(codec) => Some(codec), + _ => None, + } + } +} + use std::{collections::VecDeque, io}; /// It is a Codec Reader which allows decoding from non-sequential data. diff --git a/kate/recovery/src/commitments.rs b/kate/recovery/src/commitments.rs index 039815df..a8140510 100644 --- a/kate/recovery/src/commitments.rs +++ b/kate/recovery/src/commitments.rs @@ -4,7 +4,7 @@ use std::{ num::TryFromIntError, }; -use da_types::ensure; +use avail_core::ensure; #[cfg(feature = "std")] use dusk_bytes::Serializable; use dusk_plonk::{ @@ -41,6 +41,18 @@ pub enum Error { IntError(#[from] TryFromIntError), } +#[cfg(feature = "std")] +impl std::error::Error for Error { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + match &self { + Self::SliceError(slice) => Some(slice), + Self::PlonkError(plonk) => Some(plonk), + Self::IntError(try_int) => Some(try_int), + _ => None, + } + } +} + #[cfg(feature = "std")] impl From for Error { fn from(e: dusk_bytes::Error) -> Self { diff --git a/kate/recovery/src/proof.rs b/kate/recovery/src/proof.rs index f09385f5..791566ca 100644 --- a/kate/recovery/src/proof.rs +++ b/kate/recovery/src/proof.rs @@ -20,6 +20,9 @@ pub enum Error { InvalidDegree(String), } +#[cfg(feature = "std")] +impl std::error::Error for Error {} + #[cfg(feature = "std")] impl From for Error { fn from(error: dusk_bytes::Error) -> Self { diff --git a/kate/recovery/src/testnet.rs b/kate/recovery/src/testnet.rs index 4739490c..0ead1dca 100644 --- a/kate/recovery/src/testnet.rs +++ b/kate/recovery/src/testnet.rs @@ -2,8 +2,6 @@ use std::{collections::HashMap, sync::Mutex}; use dusk_plonk::commitment_scheme::kzg10::PublicParameters; use once_cell::sync::Lazy; -use rand::SeedableRng; -use rand_chacha::ChaChaRng; static SRS_DATA: Lazy>> = Lazy::new(|| Mutex::new(HashMap::new())); @@ -13,6 +11,8 @@ pub fn public_params(max_degree: usize) -> PublicParameters { srs_data_locked .entry(max_degree) .or_insert_with(|| { + use rand_chacha::{rand_core::SeedableRng as _, ChaChaRng}; + let mut rng = ChaChaRng::seed_from_u64(42); PublicParameters::setup(max_degree, &mut rng).unwrap() }) diff --git a/kate/src/com.rs b/kate/src/com.rs index 41e75441..e8e95e47 100644 --- a/kate/src/com.rs +++ b/kate/src/com.rs @@ -1,4 +1,4 @@ -use core::num::{NonZeroU32, NonZeroUsize}; +use core::num::NonZeroU32; use std::{ convert::{TryFrom, TryInto}, mem::size_of, @@ -6,8 +6,11 @@ use std::{ time::Instant, }; +use avail_core::{ + data_lookup::Error as DataLookupError, ensure, AppExtrinsic, AppId, BlockLengthColumns, + BlockLengthRows, +}; use codec::Encode; -use da_types::{ensure, AppExtrinsic, AppId, BlockLengthColumns, BlockLengthRows, DataLookupError}; use derive_more::Constructor; use dusk_bytes::Serializable; use dusk_plonk::{ @@ -19,8 +22,10 @@ use dusk_plonk::{ #[cfg(feature = "std")] use kate_recovery::matrix::Dimensions; use nalgebra::base::DMatrix; -use rand::{Rng, SeedableRng}; -use rand_chacha::ChaChaRng; +use rand_chacha::{ + rand_core::{Error as ChaChaError, RngCore, SeedableRng}, + ChaChaRng, +}; use rayon::prelude::*; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; @@ -36,6 +41,7 @@ use crate::{ }, metrics::Metrics, padded_len_of_pad_iec_9797_1, BlockDimensions, Seed, TryFromBlockDimensionsError, LOG_TARGET, + U32_USIZE_ERR, }; #[cfg(feature = "std")] use kate_recovery::testnet; @@ -61,6 +67,7 @@ pub enum Error { InvalidDimensionExtension, DomainSizeInvalid, InvalidDataLookup(#[from] DataLookupError), + Rng(#[from] ChaChaError), } impl From for Error { @@ -105,7 +112,7 @@ fn app_extrinsics_group_by_app_id(extrinsics: &[AppExtrinsic]) -> Vec<(AppId, Ve pub fn flatten_and_pad_block( max_rows: BlockLengthRows, max_cols: BlockLengthColumns, - chunk_size: u32, + chunk_size: NonZeroU32, extrinsics: &[AppExtrinsic], rng_seed: Seed, ) -> Result<(XtsLayout, FlatData, BlockDimensions), Error> { @@ -143,19 +150,16 @@ pub fn flatten_and_pad_block( // Determine the block size after padding let block_dims = get_block_dimensions(padded_block_len, max_rows, max_cols, chunk_size)?; + let chunk_size = usize::try_from(NonZeroU32::get(block_dims.chunk_size)).expect(U32_USIZE_ERR); - if padded_block.len() > block_dims.size() { - return Err(Error::BlockTooBig); - } + let block_dims_size = block_dims.size().ok_or(Error::BlockTooBig)?; + ensure!(padded_block.len() <= block_dims_size, Error::BlockTooBig); let mut rng = ChaChaRng::from_seed(rng_seed); // SAFETY: `padded_block.len() <= block_dims.size()` checked some lines above. if cfg!(debug_assertions) { - let chunk_size: usize = - usize::try_from(block_dims.chunk_size).expect("Cast to `usize` overflows"); - let dims_sub_pad = block_dims - .size() + let dims_sub_pad = block_dims_size .checked_sub(padded_block.len()) .expect("`padded_block.len() <= block_dims.size() .qed"); let rem = dims_sub_pad @@ -164,13 +168,12 @@ pub fn flatten_and_pad_block( assert_eq!(rem, 0); } - let nz_chunk_size: NonZeroUsize = usize::try_from(block_dims.chunk_size) - .map_err(|_| Error::CellLengthExceeded)? - .try_into() - .map_err(|_| Error::ZeroDimension)?; - - for _ in 0..(block_dims.size().saturating_sub(padded_block.len()) / nz_chunk_size) { - let rnd_values: DataChunk = rng.gen(); + #[allow(clippy::integer_arithmetic)] + // SAFETY: `chunk_size` comes from `NonZeroU32::get(...)` so we can safetly use `/`. + let last = block_dims_size.saturating_sub(padded_block.len()) / chunk_size; + for _ in 0..last { + let mut rnd_values = DataChunk::default(); + rng.try_fill_bytes(&mut rnd_values)?; padded_block.append(&mut pad_with_zeroes(rnd_values.to_vec(), chunk_size)); } @@ -181,16 +184,15 @@ pub fn get_block_dimensions( block_size: u32, max_rows: BlockLengthRows, max_cols: BlockLengthColumns, - chunk_size: u32, + chunk_size: NonZeroU32, ) -> Result { let max_block_dimensions = BlockDimensions::new(max_rows, max_cols, chunk_size); + let max_block_dimensions_size = max_block_dimensions.size().ok_or(Error::BlockTooBig)?; + let block_size = usize::try_from(block_size)?; - ensure!( - block_size <= max_block_dimensions.size(), - Error::BlockTooBig - ); + ensure!(block_size <= max_block_dimensions_size, Error::BlockTooBig); - if block_size == max_block_dimensions.size() || MAXIMUM_BLOCK_SIZE { + if block_size == max_block_dimensions_size || MAXIMUM_BLOCK_SIZE { return Ok(max_block_dimensions); } @@ -201,7 +203,7 @@ pub fn get_block_dimensions( nearest_power_2_size = MINIMUM_BLOCK_SIZE; } - let total_cells = (nearest_power_2_size as f32 / chunk_size as f32).ceil() as u32; + let total_cells = (nearest_power_2_size as f32 / chunk_size.get() as f32).ceil() as u32; // we must minimize number of rows, to minimize header size // (performance wise it doesn't matter) @@ -209,7 +211,7 @@ pub fn get_block_dimensions( let (cols, rows) = if total_cells > max_cols.0 { (max_cols, BlockLengthRows(total_cells / nz_max_cols)) } else { - (total_cells.into(), 1.into()) + (BlockLengthColumns(total_cells), BlockLengthRows(1)) }; Ok(BlockDimensions { @@ -220,20 +222,21 @@ pub fn get_block_dimensions( } #[inline] -fn pad_with_zeroes(mut chunk: Vec, length: u32) -> Vec { - chunk.resize(length as usize, 0); +fn pad_with_zeroes(mut chunk: Vec, len: usize) -> Vec { + chunk.resize(len, 0); chunk } -fn pad_to_chunk(chunk: DataChunk, chunk_size: u32) -> Vec { +fn pad_to_chunk(chunk: DataChunk, chunk_size: NonZeroU32) -> Vec { const_assert_eq!(DATA_CHUNK_SIZE, size_of::()); + let chunk_size = usize::try_from(chunk_size.get()).expect(U32_USIZE_ERR); debug_assert!( - chunk_size as usize >= DATA_CHUNK_SIZE, + chunk_size >= DATA_CHUNK_SIZE, "`BlockLength.chunk_size` is valid by design .qed" ); let mut padded = chunk.to_vec(); - padded.resize(chunk_size as usize, 0); + padded.resize(chunk_size, 0); padded } @@ -295,7 +298,8 @@ pub fn par_extend_data_matrix( let (rows, cols) = dims.into(); // simple length with mod check would work... - let chunk_size: usize = block_dims.chunk_size.try_into()?; + let chunk_size = + usize::try_from(block_dims.chunk_size.get()).map_err(|_| Error::BlockTooBig)?; let chunks = block.par_chunks_exact(chunk_size); ensure!(chunks.remainder().is_empty(), Error::DimensionsMismatch); @@ -373,12 +377,12 @@ pub fn build_proof( // #[cfg(not(feature = "parallel"))] let cell_iter = cells.iter().zip(result_bytes.chunks_exact_mut(SPROOF_SIZE)); - cell_iter.for_each(|(cell, res)| { - let r_index = cell.row.as_usize(); + for (cell, res) in cell_iter { + let r_index = usize::try_from(cell.row.0)?; if r_index >= ext_rows || cell.col >= block_dims.cols { res.fill(0); // for bad cell identifier, fill whole proof with zero bytes ! } else { - let c_index = cell.col.as_usize(); + let c_index = usize::try_from(cell.col.0)?; // construct polynomial per extended matrix row #[cfg(feature = "parallel")] @@ -406,7 +410,7 @@ pub fn build_proof( }, }; } - }); + } metrics.proof_build_time(total_start.elapsed(), cells.len().saturated_into()); @@ -417,7 +421,7 @@ pub fn build_proof( pub fn par_build_commitments( rows: BlockLengthRows, cols: BlockLengthColumns, - chunk_size: u32, + chunk_size: NonZeroU32, extrinsics_by_key: &[AppExtrinsic], rng_seed: Seed, metrics: &M, @@ -496,7 +500,6 @@ fn commit( mod tests { use std::{convert::TryInto, iter::repeat}; - use da_types::AppExtrinsic; use dusk_bytes::Serializable; use dusk_plonk::bls12_381::BlsScalar; use hex_literal::hex; @@ -528,6 +531,8 @@ mod tests { padded_len, }; + const TCHUNK: NonZeroU32 = unsafe { NonZeroU32::new_unchecked(32) }; + fn app_data_index_try_from_layout( layout: Vec<(AppId, u32)>, ) -> Result { @@ -574,21 +579,25 @@ mod tests { .collect() } - #[test_case(0, 256, 256 => BlockDimensions::new(1, 4, 32) ; "block size zero")] - #[test_case(11, 256, 256 => BlockDimensions::new(1, 4, 32) ; "below minimum block size")] - #[test_case(300, 256, 256 => BlockDimensions::new(1, 16, 32) ; "regular case")] - #[test_case(513, 256, 256 => BlockDimensions::new(1, 32, 32) ; "minimum overhead after 512")] - #[test_case(8192, 256, 256 => BlockDimensions::new(1, 256, 32) ; "maximum cols")] - #[test_case(8224, 256, 256 => BlockDimensions::new(2, 256, 32) ; "two rows")] - #[test_case(2097152, 256, 256 => BlockDimensions::new(256, 256, 32) ; "max block size")] + #[test_case(0, 256, 256 => (1, 4, 32) ; "block size zero")] + #[test_case(11, 256, 256 => (1, 4, 32) ; "below minimum block size")] + #[test_case(300, 256, 256 => (1, 16, 32) ; "regular case")] + #[test_case(513, 256, 256 => (1, 32, 32) ; "minimum overhead after 512")] + #[test_case(8192, 256, 256 => (1, 256, 32) ; "maximum cols")] + #[test_case(8224, 256, 256 => (2, 256, 32) ; "two rows")] + #[test_case(2097152, 256, 256 => (256, 256, 32) ; "max block size")] #[test_case(2097155, 256, 256 => panics "BlockTooBig" ; "too much data")] // newapi done - fn test_get_block_dimensions(size: u32, rows: R, cols: C) -> BlockDimensions - where - R: Into, - C: Into, - { - get_block_dimensions(size, rows.into(), cols.into(), 32).unwrap() + fn test_get_block_dimensions(size: u32, rows: u32, cols: u32) -> (u32, u32, u32) { + let dims = get_block_dimensions( + size, + BlockLengthRows(rows), + BlockLengthColumns(cols), + TCHUNK, + ) + .unwrap(); + + (dims.rows.0, dims.cols.0, dims.chunk_size.get()) } // newapi done @@ -622,11 +631,12 @@ mod tests { .expect("Invalid Expected result"); let expected = DMatrix::from_iterator(4, 4, expected.into_iter()); - let block_dims = BlockDimensions::new(BlockLengthRows(2), BlockLengthColumns(4), 32); + let block_dims = BlockDimensions::new(BlockLengthRows(2), BlockLengthColumns(4), TCHUNK); + let chunk_size = usize::try_from(block_dims.chunk_size.get()).unwrap(); let block = (0..=247) .collect::>() .chunks_exact(DATA_CHUNK_SIZE) - .flat_map(|chunk| pad_with_zeroes(chunk.to_vec(), block_dims.chunk_size)) + .flat_map(|chunk| pad_with_zeroes(chunk.to_vec(), chunk_size)) .collect::>(); let ext_matrix = par_extend_data_matrix(block_dims, &block, &IgnoreMetrics {}).unwrap(); assert_eq!(ext_matrix, expected); @@ -651,40 +661,28 @@ mod tests { // newapi done #[test] fn test_flatten_block() { - let chunk_size = 32; let extrinsics: Vec = vec![ - AppExtrinsic { - app_id: 0.into(), - data: (1..=29).collect(), - }, - AppExtrinsic { - app_id: 1.into(), - data: (1..=30).collect(), - }, - AppExtrinsic { - app_id: 2.into(), - data: (1..=31).collect(), - }, - AppExtrinsic { - app_id: 3.into(), - data: (1..=60).collect(), - }, + AppExtrinsic::new(AppId(0), (1..=29).collect()), + AppExtrinsic::new(AppId(1), (1..=30).collect()), + AppExtrinsic::new(AppId(2), (1..=31).collect()), + AppExtrinsic::new(AppId(3), (1..=60).collect()), ]; - let expected_dims = BlockDimensions::new(1, 16, chunk_size); + let expected_dims = + BlockDimensions::new(BlockLengthRows(1), BlockLengthColumns(16), TCHUNK); let (layout, data, dims) = flatten_and_pad_block( - 128.into(), - 256.into(), - chunk_size, + BlockLengthRows(128), + BlockLengthColumns(256), + TCHUNK, extrinsics.as_slice(), Seed::default(), ) .unwrap(); - let expected_layout = vec![(0.into(), 2), (1.into(), 2), (2.into(), 2), (3.into(), 3)]; + let expected_layout = vec![(AppId(0), 2), (AppId(1), 2), (AppId(2), 2), (AppId(3), 3)]; assert_eq!(layout, expected_layout, "The layouts don't match"); - let expected_data = hex!("04740102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d00800000000000000000000000000000000000000000000000000000000000000004780102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e80000000000000000000000000000000000000000000000000000000000000047c0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e1f80000000000000000000000000000000000000000000000000000000000004f00102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c00800000000000000000000000000000000000000000000000000000000000000076a04053bda0a88bda5177b86a15c3b29f559873cb481232299cd5743151ac004b2d63ae198e7bb0a9011f28e473c95f4013d7d53ec5fbc3b42df8ed101f6d00e831e52bfb76e51cca8b4e9016838657edfae09cb9a71eb219025c4c87a67c004aaa86f20ac0aa792bc121ee42e2c326127061eda15599cb5db3db870bea5a00ecf353161c3cb528b0c5d98050c4570bfc942d8b19ed7b0cbba5725e03e5f000b7e30db36b6df82ac151f668f5f80a5e2a9cac7c64991dd6a6ce21c060175800edb9260d2a86c836efc05f17e5c59525e404c6a93d051651fe2e4eefae281300"); + let expected_data = hex!("04740102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d00800000000000000000000000000000000000000000000000000000000000000004780102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e80000000000000000000000000000000000000000000000000000000000000047c0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e1f80000000000000000000000000000000000000000000000000000000000004f00102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c00800000000000000000000000000000000000000000000000000000000000000076b8e0ada0f13d90405d6ae55386bd28bdd219b8a08ded1aa836efcc8b770d00da41597c5157488d7724e03fb8d84a376a43b8f41518a11cc387b669b2ee65009f07e7be5551387a98ba977c732d080dcb0f29a048e3656912c6533e32ee7a0029b721769ce64e43d57133b074d839d531ed1f28510afb45ace10a1f4b794d002d09a0e663266ce1ae7ed1081968a0758e718e997bd362c6b0c34634a9a0b300012737681f7b5d0f281e3afde458bc1e73d2d313c9cf94c05ff3716240a248001320a058d7b3566bd520daaa3ed2bf0ac5b8b120fb852773c3639734b45c9100"); assert_eq!(dims, expected_dims, "Dimensions don't match the expected"); assert_eq!(data, expected_data, "Data doesn't match the expected data"); @@ -753,7 +751,7 @@ mod tests { any_with::>(size_range(1..2048).lift()), ) .prop_map(|(app_id, data)| AppExtrinsic { - app_id: app_id.into(), + app_id: AppId(app_id), data, }) } @@ -788,14 +786,13 @@ mod tests { } proptest! { - #![proptest_config(ProptestConfig::with_cases(20))] + #![proptest_config(ProptestConfig::with_cases(10))] #[test] - #[ignore] // newapi done fn test_build_and_reconstruct(ref xts in app_extrinsics_strategy()) { let metrics = IgnoreMetrics {}; let (layout, commitments, dims, matrix) = par_build_commitments( - BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default(), &metrics).unwrap(); + BlockLengthRows(64), BlockLengthColumns(16), TCHUNK, xts, Seed::default(), &metrics).unwrap(); let columns = sample_cells_from_matrix(&matrix, None); let extended_dims = dims.try_into().unwrap(); @@ -806,9 +803,10 @@ mod tests { prop_assert_eq!(result.1[0].as_slice(), &xt.data); } - let public_params = testnet::public_params(dims.cols.as_usize()); + let dims_cols = usize::try_from(dims.cols.0).unwrap(); + let public_params = testnet::public_params(dims_cols); for cell in random_cells(dims.cols, dims.rows, Percent::one() ) { - let row = cell.row.as_usize(); + let row = usize::try_from(cell.row.0).unwrap(); let proof = build_proof(&public_params, dims, &matrix, &[cell], &metrics).unwrap(); prop_assert!(proof.len() == 80); @@ -831,10 +829,11 @@ mod tests { #[test] // newapi done fn test_commitments_verify(ref xts in app_extrinsics_strategy()) { - let (layout, commitments, dims, matrix) = par_build_commitments(BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default(), &IgnoreMetrics{}).unwrap(); + let (layout, commitments, dims, matrix) = par_build_commitments(BlockLengthRows(64), BlockLengthColumns(16), TCHUNK, xts, Seed::default(), &IgnoreMetrics{}).unwrap(); let index = app_data_index_try_from_layout(layout).unwrap(); - let public_params = testnet::public_params(dims.cols.as_usize()); + let dims_cols = usize::try_from(dims.cols.0).unwrap(); + let public_params = testnet::public_params(dims_cols); let extended_dims = dims.try_into().unwrap(); let commitments = commitments::from_slice(&commitments).unwrap(); for xt in xts { @@ -850,10 +849,11 @@ mod tests { #[test] // newapi done fn verify_commitmnets_missing_row(ref xts in app_extrinsics_strategy()) { - let (layout, commitments, dims, matrix) = par_build_commitments(BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default(), &IgnoreMetrics{}).unwrap(); + let (layout, commitments, dims, matrix) = par_build_commitments(BlockLengthRows(64), BlockLengthColumns(16), TCHUNK, xts, Seed::default(), &IgnoreMetrics{}).unwrap(); let index = app_data_index_try_from_layout(layout).unwrap(); - let public_params = testnet::public_params(dims.cols.as_usize()); + let dims_cols = usize::try_from(dims.cols.0).unwrap(); + let public_params = testnet::public_params(dims_cols); let extended_dims = dims.try_into().unwrap(); let commitments = commitments::from_slice(&commitments).unwrap(); for xt in xts { @@ -872,14 +872,13 @@ mod tests { fn test_build_commitments_simple_commitment_check() { let block_rows = BlockLengthRows(256); let block_cols = BlockLengthColumns(256); - let chunk_size = 32; let original_data = br#"test"#; let hash: Seed = hex!("4c29ae91bb0c61204b6f95d1f3c3a50aa6ac2f29da18d4423e05bbbf81056903"); let (_, commitments, dimensions, _) = par_build_commitments( block_rows, block_cols, - chunk_size, + TCHUNK, &[AppExtrinsic::from(original_data.to_vec())], hash, &IgnoreMetrics {}, @@ -888,13 +887,9 @@ mod tests { assert_eq!( dimensions, - BlockDimensions { - rows: 1.into(), - cols: 4.into(), - chunk_size: 32 - } + BlockDimensions::new(BlockLengthRows(1), BlockLengthColumns(4), TCHUNK), ); - let expected_commitments = hex!("960F08F97D3A8BD21C3F5682366130132E18E375A587A1E5900937D7AA5F33C4E20A1C0ACAE664DCE1FD99EDC2693B8D960F08F97D3A8BD21C3F5682366130132E18E375A587A1E5900937D7AA5F33C4E20A1C0ACAE664DCE1FD99EDC2693B8D"); + let expected_commitments = hex!("9046c691ce4c7ba93c9860746d6ff3dfb5560e119f1eac26aa9a10b6fe29d5c8e2b90f23e2ef3a7a950965b08035470d9046c691ce4c7ba93c9860746d6ff3dfb5560e119f1eac26aa9a10b6fe29d5c8e2b90f23e2ef3a7a950965b08035470d"); assert_eq!(commitments, expected_commitments); } @@ -908,24 +903,18 @@ get erasure coded to ensure redundancy."#; let hash = Seed::default(); let xts = vec![ - AppExtrinsic { - app_id: 0.into(), - data: vec![0], - }, - AppExtrinsic { - app_id: 1.into(), - data: app_id_1_data.to_vec(), - }, - AppExtrinsic { - app_id: 2.into(), - data: app_id_2_data.to_vec(), - }, + AppExtrinsic::new(AppId(0), vec![0]), + AppExtrinsic::new(AppId(1), app_id_1_data.to_vec()), + AppExtrinsic::new(AppId(2), app_id_2_data.to_vec()), ]; - let chunk_size = 32; - - let (layout, data, dims) = - flatten_and_pad_block(32.into(), 4.into(), chunk_size, &xts, hash)?; + let (layout, data, dims) = flatten_and_pad_block( + BlockLengthRows(32), + BlockLengthColumns(4), + TCHUNK, + &xts, + hash, + )?; let matrix = par_extend_data_matrix(dims, &data[..], &IgnoreMetrics {})?; let cols_1 = sample_cells_from_matrix(&matrix, Some(&[0, 1, 2, 3])); @@ -956,18 +945,13 @@ get erasure coded to ensure redundancy."#; let hash = Seed::default(); let xts = (0..=2) .zip(data) - .map(|(app_id, data)| AppExtrinsic { - app_id: app_id.into(), - data, - }) + .map(|(app_id, data)| AppExtrinsic::new(AppId(app_id), data)) .collect::>(); - let chunk_size = 32; - let (layout, data, dims) = flatten_and_pad_block( BlockLengthRows(32), BlockLengthColumns(4), - chunk_size, + TCHUNK, &xts, hash, )?; @@ -1006,11 +990,10 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat // The hash is used for seed for padding the block to next power of two value let hash = Seed::default(); - let chunk_size = 32; let (layout, data, dims) = flatten_and_pad_block( BlockLengthRows(128), BlockLengthColumns(2), - chunk_size, + TCHUNK, &[AppExtrinsic::from(orig_data.to_vec())], hash, )?; @@ -1035,22 +1018,15 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat let xt1 = vec![5, 5]; let xt2 = vec![6, 6]; let xts = [ - AppExtrinsic { - app_id: 1.into(), - data: xt1.clone(), - }, - AppExtrinsic { - app_id: 1.into(), - data: xt2.clone(), - }, + AppExtrinsic::new(AppId(1), xt1.clone()), + AppExtrinsic::new(AppId(1), xt2.clone()), ]; // The hash is used for seed for padding the block to next power of two value let hash = Seed::default(); - let chunk_size = 32; let (layout, data, dims) = flatten_and_pad_block( BlockLengthRows(128), BlockLengthColumns(2), - chunk_size, + TCHUNK, &xts, hash, )?; @@ -1076,28 +1052,16 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat let xt3 = vec![7]; let xt4 = vec![]; let xts = [ - AppExtrinsic { - app_id: 1.into(), - data: xt1.clone(), - }, - AppExtrinsic { - app_id: 1.into(), - data: xt2.clone(), - }, - AppExtrinsic { - app_id: 2.into(), - data: xt3.clone(), - }, - AppExtrinsic { - app_id: 3.into(), - data: xt4.clone(), - }, + AppExtrinsic::new(AppId(1), xt1.clone()), + AppExtrinsic::new(AppId(1), xt2.clone()), + AppExtrinsic::new(AppId(2), xt3.clone()), + AppExtrinsic::new(AppId(3), xt4.clone()), ]; let expected = vec![ - (1.into(), vec![xt1, xt2]), - (2.into(), vec![xt3]), - (3.into(), vec![xt4]), + (AppId(1), vec![xt1, xt2]), + (AppId(2), vec![xt3]), + (AppId(3), vec![xt4]), ]; let rez = app_extrinsics_group_by_app_id(&xts); println!("{:?}", rez); @@ -1121,6 +1085,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat #[test_case( build_extrinsics(&[]), 32 => padded_len_group(&[], 32) ; "Empty chunk list")] #[test_case( build_extrinsics(&[4096]), 32 => padded_len_group(&[4096], 32) ; "4K chunk")] fn test_padding_len(extrinsics: Vec>, chunk_size: u32) -> u32 { + let chunk_size = NonZeroU32::new(chunk_size).expect("Invalid chunk size .qed"); extrinsics .into_iter() .flat_map(pad_iec_9797_1) @@ -1146,7 +1111,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat par_build_commitments( BlockLengthRows(4), BlockLengthColumns(4), - 32, + TCHUNK, &xts, hash, &IgnoreMetrics {}, @@ -1167,7 +1132,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat par_build_commitments( BlockLengthRows(4), BlockLengthColumns(4), - 32, + TCHUNK, &xts, hash, &IgnoreMetrics {}, @@ -1218,11 +1183,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat }; let proof = build_proof( &public_params, - BlockDimensions { - rows: BlockLengthRows(1), - cols: BlockLengthColumns(4), - chunk_size: 32, - }, + BlockDimensions::new(BlockLengthRows(1), BlockLengthColumns(4), TCHUNK), &ext_m, &[cell], &metrics, @@ -1243,8 +1204,8 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat } } - #[test_case( r#"{ "row": 42, "col": 99 }"# => Cell::new(42.into(),99.into()) ; "Simple" )] - #[test_case( r#"{ "row": 4294967295, "col": 99 }"# => Cell::new(4_294_967_295.into(),99.into()) ; "Max row" )] + #[test_case( r#"{ "row": 42, "col": 99 }"# => Cell::new(BlockLengthRows(42), BlockLengthColumns(99)) ; "Simple" )] + #[test_case( r#"{ "row": 4294967295, "col": 99 }"# => Cell::new(BlockLengthRows(4_294_967_295),BlockLengthColumns(99)) ; "Max row" )] // newapi ignore fn serde_block_length_types_untagged(data: &str) -> Cell { serde_json::from_str(data).unwrap() diff --git a/kate/src/gridgen/mod.rs b/kate/src/gridgen/mod.rs index 1c9b28a4..875c93e8 100644 --- a/kate/src/gridgen/mod.rs +++ b/kate/src/gridgen/mod.rs @@ -4,21 +4,23 @@ use crate::pmp::{ merlin::Transcript, traits::Committer, }; +use avail_core::{ensure, AppExtrinsic, AppId, DataLookup}; use codec::Encode; use core::{ cmp::{max, min}, iter, num::NonZeroU16, }; -use da_types::{ensure, AppExtrinsic, AppId, DataLookup}; use kate_recovery::{config::PADDING_TAIL_VALUE, matrix::Dimensions}; use nalgebra::base::DMatrix; use poly_multiproof::{ m1_blst::Proof, traits::{KZGProof, PolyMultiProofNoPrecomp}, }; -use rand::{CryptoRng, Rng, SeedableRng}; -use rand_chacha::ChaChaRng; +use rand_chacha::{ + rand_core::{RngCore, SeedableRng}, + ChaChaRng, +}; use static_assertions::const_assert; use std::collections::BTreeMap; use thiserror_no_std::Error; @@ -464,17 +466,17 @@ pub(crate) fn pad_to_bls_scalar(a: impl AsRef<[u8]>) -> Result ArkScalar::from_bytes(&buf).map_err(Error::MultiproofError) } -pub(crate) fn random_scalar(rng: &mut R) -> ArkScalar { - /* - let mut random = [0u8; SCALAR_SIZE]; - rng.fill(&mut random[..SCALAR_SIZE - 1]); - debug_assert!(random[SCALAR_SIZE - 1] == 0u8); +#[allow(clippy::integer_arithmetic)] +pub(crate) fn random_scalar(rng: &mut ChaChaRng) -> ArkScalar { + let mut raw_scalar = [0u8; SCALAR_SIZE]; + + const_assert!(SCALAR_SIZE >= 1); + rng.try_fill_bytes(&mut raw_scalar[..SCALAR_SIZE - 1]) + .expect("ChaChaRng::try_fill_bytes failed"); + debug_assert!(raw_scalar[SCALAR_SIZE - 1] == 0u8); - ArkScalar::from_bytes(&random) + ArkScalar::from_bytes(&raw_scalar) .expect("ArkScalar can be generated from SCALAR_SIZE -1 bytes .qed") - */ - let rnd_values: [u8; SCALAR_SIZE - 1] = rng.gen(); - pad_to_bls_scalar(rnd_values).unwrap() } #[cfg(test)] diff --git a/kate/src/gridgen/tests/commitments.rs b/kate/src/gridgen/tests/commitments.rs index ee4e8e0e..2196e2ee 100644 --- a/kate/src/gridgen/tests/commitments.rs +++ b/kate/src/gridgen/tests/commitments.rs @@ -1,11 +1,6 @@ use super::*; -use crate::gridgen::*; -use crate::testnet; -use crate::Seed; -use da_types::AppExtrinsic; -use da_types::AppId; -use da_types::BlockLengthColumns; -use da_types::BlockLengthRows; +use crate::{gridgen::*, testnet, Seed}; +use avail_core::{AppExtrinsic, AppId, BlockLengthColumns, BlockLengthRows}; use hex_literal::hex; use kate_recovery::{ commitments::verify_equality, @@ -52,7 +47,7 @@ fn test_build_commitments_simple_commitment_check() { .collect::>(); assert_eq!(ext_evals.dims(), Dimensions::new_from(2, 4).unwrap()); - let expected_commitments = hex!("960F08F97D3A8BD21C3F5682366130132E18E375A587A1E5900937D7AA5F33C4E20A1C0ACAE664DCE1FD99EDC2693B8D960F08F97D3A8BD21C3F5682366130132E18E375A587A1E5900937D7AA5F33C4E20A1C0ACAE664DCE1FD99EDC2693B8D"); + let expected_commitments = hex!("9046c691ce4c7ba93c9860746d6ff3dfb5560e119f1eac26aa9a10b6fe29d5c8e2b90f23e2ef3a7a950965b08035470d9046c691ce4c7ba93c9860746d6ff3dfb5560e119f1eac26aa9a10b6fe29d5c8e2b90f23e2ef3a7a950965b08035470d"); assert_eq!(commits, expected_commitments); assert_eq!(commits_fft_extended, expected_commitments); } @@ -120,7 +115,7 @@ proptest! { .collect::>(); let index = app_data_index_from_lookup(&grid.lookup); - let public_params = testnet::public_params((g_cols as u32).into()); + let public_params = testnet::public_params( BlockLengthColumns(g_cols.into())); for xt in xts { let rows = grid.app_rows(xt.app_id, Some(orig_dims)).unwrap().unwrap(); diff --git a/kate/src/gridgen/tests/formatting.rs b/kate/src/gridgen/tests/formatting.rs index bf16e19d..46b17f37 100644 --- a/kate/src/gridgen/tests/formatting.rs +++ b/kate/src/gridgen/tests/formatting.rs @@ -1,4 +1,4 @@ -use da_types::{AppExtrinsic, DataLookup}; +use avail_core::{AppExtrinsic, AppId, DataLookup}; use hex_literal::hex; use kate_recovery::{ com::{app_specific_cells, decode_app_extrinsics, reconstruct_extrinsics}, @@ -21,29 +21,19 @@ use core::num::NonZeroU16; #[test] fn newapi_test_flatten_block() { let extrinsics: Vec = vec![ - AppExtrinsic { - app_id: 0.into(), - data: (1..=29).collect(), - }, - AppExtrinsic { - app_id: 1.into(), - data: (1..=30).collect(), - }, - AppExtrinsic { - app_id: 2.into(), - data: (1..=31).collect(), - }, - AppExtrinsic { - app_id: 3.into(), - data: (1..=60).collect(), - }, + AppExtrinsic::new(AppId(0), (1..=29).collect()), + AppExtrinsic::new(AppId(1), (1..=30).collect()), + AppExtrinsic::new(AppId(2), (1..=31).collect()), + AppExtrinsic::new(AppId(3), (1..=60).collect()), ]; let expected_dims = Dimensions::new_from(1, 16).unwrap(); let evals = EvaluationGrid::from_extrinsics(extrinsics, 4, 256, 256, Seed::default()).unwrap(); - let expected_lookup = - DataLookup::new_from_id_lenght([(0, 2), (1, 2), (2, 2), (3, 3)].into_iter()).unwrap(); + let expected_lookup = DataLookup::new_from_id_lenght( + [(AppId(0), 2), (AppId(1), 2), (AppId(2), 2), (AppId(3), 3)].into_iter(), + ) + .unwrap(); assert_eq!(evals.lookup, expected_lookup, "The layouts don't match"); assert_eq!( @@ -52,7 +42,7 @@ fn newapi_test_flatten_block() { "Dimensions don't match the expected" ); - let expected_data = hex!("04740102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d00800000000000000000000000000000000000000000000000000000000000000004780102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e80000000000000000000000000000000000000000000000000000000000000047c0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e1f80000000000000000000000000000000000000000000000000000000000004f00102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c00800000000000000000000000000000000000000000000000000000000000000076a04053bda0a88bda5177b86a15c3b29f559873cb481232299cd5743151ac004b2d63ae198e7bb0a9011f28e473c95f4013d7d53ec5fbc3b42df8ed101f6d00e831e52bfb76e51cca8b4e9016838657edfae09cb9a71eb219025c4c87a67c004aaa86f20ac0aa792bc121ee42e2c326127061eda15599cb5db3db870bea5a00ecf353161c3cb528b0c5d98050c4570bfc942d8b19ed7b0cbba5725e03e5f000b7e30db36b6df82ac151f668f5f80a5e2a9cac7c64991dd6a6ce21c060175800edb9260d2a86c836efc05f17e5c59525e404c6a93d051651fe2e4eefae281300"); + let expected_data = hex!("04740102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d00800000000000000000000000000000000000000000000000000000000000000004780102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e80000000000000000000000000000000000000000000000000000000000000047c0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e1f80000000000000000000000000000000000000000000000000000000000004f00102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c00800000000000000000000000000000000000000000000000000000000000000076b8e0ada0f13d90405d6ae55386bd28bdd219b8a08ded1aa836efcc8b770d00da41597c5157488d7724e03fb8d84a376a43b8f41518a11cc387b669b2ee65009f07e7be5551387a98ba977c732d080dcb0f29a048e3656912c6533e32ee7a0029b721769ce64e43d57133b074d839d531ed1f28510afb45ace10a1f4b794d002d09a0e663266ce1ae7ed1081968a0758e718e997bd362c6b0c34634a9a0b300012737681f7b5d0f281e3afde458bc1e73d2d313c9cf94c05ff3716240a248001320a058d7b3566bd520daaa3ed2bf0ac5b8b120fb852773c3639734b45c9100"); let data = evals .evals @@ -124,10 +114,7 @@ get erasure coded to ensure redundancy."#; let hash = Seed::default(); let xts = (0..=2) .zip(data) - .map(|(app_id, data)| AppExtrinsic { - app_id: app_id.into(), - data, - }) + .map(|(id, data)| AppExtrinsic::new(AppId(id), data)) .collect::>(); let grid = EvaluationGrid::from_extrinsics(xts.clone(), 4, 32, 4, hash) diff --git a/kate/src/gridgen/tests/mod.rs b/kate/src/gridgen/tests/mod.rs index bfed8059..790d9c5f 100644 --- a/kate/src/gridgen/tests/mod.rs +++ b/kate/src/gridgen/tests/mod.rs @@ -1,4 +1,4 @@ -use da_types::{AppExtrinsic, DataLookup}; +use avail_core::{AppExtrinsic, AppId, DataLookup}; use kate_recovery::{data::DataCell, index::AppDataIndex, matrix::Position}; use once_cell::sync::Lazy; use poly_multiproof::{m1_blst::M1NoPrecomp, traits::AsBytes}; @@ -22,7 +22,7 @@ fn app_extrinsic_strategy() -> impl Strategy { any_with::>(size_range(1..2048).lift()), ) .prop_map(|(app_id, data)| AppExtrinsic { - app_id: app_id.into(), + app_id: AppId(app_id), data, }) } diff --git a/kate/src/gridgen/tests/reconstruction.rs b/kate/src/gridgen/tests/reconstruction.rs index 2cbbfa8a..e61c36ec 100644 --- a/kate/src/gridgen/tests/reconstruction.rs +++ b/kate/src/gridgen/tests/reconstruction.rs @@ -4,8 +4,8 @@ use crate::{ gridgen::{tests::sample_cells, EvaluationGrid}, Seed, }; +use avail_core::{AppExtrinsic, AppId, BlockLengthColumns, BlockLengthRows}; use core::num::NonZeroU16; -use da_types::AppExtrinsic; use kate_recovery::{ com::reconstruct_extrinsics, data::Cell as DCell, @@ -21,14 +21,8 @@ fn test_multiple_extrinsics_for_same_app_id() { let xt1 = vec![5, 5]; let xt2 = vec![6, 6]; let xts = [ - AppExtrinsic { - app_id: 1.into(), - data: xt1.clone(), - }, - AppExtrinsic { - app_id: 1.into(), - data: xt2.clone(), - }, + AppExtrinsic::new(AppId(1), xt1.clone()), + AppExtrinsic::new(AppId(1), xt2.clone()), ]; // The hash is used for seed for padding the block to next power of two value let hash = Seed::default(); @@ -75,7 +69,9 @@ fn test_build_and_reconstruct(exts in super::app_extrinsics_strategy()) { let mut rng = ChaChaRng::from_seed(RNG_SEED); let sampled = Uniform::from(0..indices.len()).sample_iter(&mut rng).take(10).map(|i| indices[i]); for (x, y) in sampled { - let cell = Cell { row: (y as u32).into(), col: (x as u32).into() }; + let row = BlockLengthRows(u32::try_from(y).unwrap()); + let col = BlockLengthColumns(u32::try_from(x).unwrap()); + let cell = Cell::new( row, col); let proof = polys.proof(pp, &cell).unwrap(); let mut content = [0u8; 80]; content[..48].copy_from_slice(&proof.to_bytes().unwrap()[..]); @@ -98,18 +94,9 @@ get erasure coded to ensure redundancy."#; br#""Let's see how this gets encoded and then reconstructed by sampling only some data."#; let xts = vec![ - AppExtrinsic { - app_id: 0.into(), - data: vec![0], - }, - AppExtrinsic { - app_id: 1.into(), - data: app_id_1_data.to_vec(), - }, - AppExtrinsic { - app_id: 2.into(), - data: app_id_2_data.to_vec(), - }, + AppExtrinsic::new(AppId(0), vec![0]), + AppExtrinsic::new(AppId(1), app_id_1_data.to_vec()), + AppExtrinsic::new(AppId(2), app_id_2_data.to_vec()), ]; let grid = EvaluationGrid::from_extrinsics(xts.clone(), 4, 4, 32, Seed::default()) diff --git a/kate/src/lib.rs b/kate/src/lib.rs index 1e00fef7..c9cb2180 100644 --- a/kate/src/lib.rs +++ b/kate/src/lib.rs @@ -1,8 +1,12 @@ #![cfg_attr(not(feature = "std"), no_std)] #![deny(clippy::integer_arithmetic)] -use core::{convert::TryInto, num::TryFromIntError}; -use da_types::{BlockLengthColumns, BlockLengthRows}; +use avail_core::{BlockLengthColumns, BlockLengthRows}; +use core::{ + convert::TryInto, + num::{NonZeroU32, TryFromIntError}, +}; +use derive_more::Constructor; #[cfg(feature = "std")] pub use dusk_plonk::{commitment_scheme::kzg10::PublicParameters, prelude::BlsScalar}; use kate_recovery::matrix::Dimensions; @@ -13,6 +17,8 @@ use thiserror_no_std::Error; use crate::config::DATA_CHUNK_SIZE; pub const LOG_TARGET: &str = "kate"; +pub const U32_USIZE_ERR: &str = "`u32` cast to `usize` overflows, unsupported platform"; + pub type Seed = [u8; 32]; #[cfg(feature = "std")] @@ -53,27 +59,28 @@ pub mod config { /// - Dedup this from `kate-recovery` once that library support `no-std`. #[cfg(feature = "std")] pub mod testnet { - use super::{BlockLengthColumns, PublicParameters}; + use super::*; use hex_literal::hex; use once_cell::sync::Lazy; use poly_multiproof::ark_ff::{BigInt, Fp}; use poly_multiproof::ark_serialize::CanonicalDeserialize; use poly_multiproof::m1_blst; use poly_multiproof::m1_blst::{Fr, G1, G2}; - use rand::SeedableRng; - use rand_chacha::ChaChaRng; + use rand_chacha::{rand_core::SeedableRng, ChaChaRng}; use std::{collections::HashMap, sync::Mutex}; static SRS_DATA: Lazy>> = Lazy::new(|| Mutex::new(HashMap::new())); pub fn public_params(max_degree: BlockLengthColumns) -> PublicParameters { + let max_degree: u32 = max_degree.into(); let mut srs_data_locked = SRS_DATA.lock().unwrap(); srs_data_locked - .entry(max_degree.0) + .entry(max_degree) .or_insert_with(|| { let mut rng = ChaChaRng::seed_from_u64(42); - PublicParameters::setup(max_degree.as_usize(), &mut rng).unwrap() + let max_degree = usize::try_from(max_degree).unwrap(); + PublicParameters::setup(max_degree, &mut rng).unwrap() }) .clone() } @@ -144,7 +151,7 @@ pub mod testnet { let pmp_ev = GeneralEvaluationDomain::::new(1024).unwrap(); let pmp_poly = pmp_ev.ifft(&pmp_evals); - let pubs = testnet::public_params(da_types::BlockLengthColumns(1024)); + let pubs = testnet::public_params(BlockLengthColumns(1024)); let dp_commit = pubs.commit_key().commit(&dp_poly).unwrap().0.to_bytes(); let mut pmp_commit = [0u8; 48]; @@ -203,31 +210,18 @@ pub fn padded_len(len: u32, chunk_size: u32) -> u32 { iec_9797_1_len + pad_to_chunk_extra } -#[derive(Clone, Copy, PartialEq, Eq, Debug)] +#[derive(Clone, Copy, PartialEq, Eq, Debug, Constructor)] pub struct BlockDimensions { pub rows: BlockLengthRows, pub cols: BlockLengthColumns, - pub chunk_size: u32, + pub chunk_size: NonZeroU32, } impl BlockDimensions { - pub fn size(&self) -> usize { - self.rows - .0 - .saturating_mul(self.cols.0) - .saturating_mul(self.chunk_size) as usize - } - - pub fn new(rows: R, cols: C, chunk_size: u32) -> Self - where - R: Into, - C: Into, - { - Self { - rows: rows.into(), - cols: cols.into(), - chunk_size, - } + pub fn size(&self) -> Option { + let rows_cols = self.rows.0.checked_mul(self.cols.0)?; + let rows_cols_chunk = rows_cols.checked_mul(self.chunk_size.get())?; + usize::try_from(rows_cols_chunk).ok() } } diff --git a/primitives/avail/src/asdr.rs b/primitives/avail/src/asdr.rs deleted file mode 100644 index 5064fdba..00000000 --- a/primitives/avail/src/asdr.rs +++ /dev/null @@ -1,36 +0,0 @@ -use codec::Encode; -pub use da_types::{AppExtrinsic, AppId, DataLookup, DataLookupIndexItem, GetAppId}; -use sp_runtime::traits::SignedExtension; - -mod app_unchecked_extrinsic; -pub use app_unchecked_extrinsic::*; - -impl From<&AppUncheckedExtrinsic> for AppExtrinsic -where - A: Encode, - C: Encode, - S: Encode, - E: SignedExtension + GetAppId, -{ - fn from(app_ext: &AppUncheckedExtrinsic) -> Self { - Self { - app_id: app_ext.app_id(), - data: app_ext.encode(), - } - } -} - -impl From> for AppExtrinsic -where - A: Encode, - C: Encode, - S: Encode, - E: SignedExtension + GetAppId, -{ - fn from(app_ext: AppUncheckedExtrinsic) -> Self { - Self { - app_id: app_ext.app_id(), - data: app_ext.encode(), - } - } -} diff --git a/primitives/avail/src/lib.rs b/primitives/avail/src/lib.rs deleted file mode 100644 index 1ecd2825..00000000 --- a/primitives/avail/src/lib.rs +++ /dev/null @@ -1,81 +0,0 @@ -#![cfg_attr(not(feature = "std"), no_std)] - -pub use da_types::{BlockLengthColumns, BlockLengthRows}; -use sp_runtime::Perbill; - -pub mod opaque_extrinsic; -pub use opaque_extrinsic::*; - -/// Customized headers. -pub mod header; -pub use header::*; - -/// Kate Commitment on Headers. -pub mod kate_commitment; -pub use kate_commitment::*; - -/// Application Specific Data Retrieval -pub mod asdr; - -pub mod sha2; -pub mod traits; -pub use sha2::ShaTwo256; - -pub mod data_proof; -pub use data_proof::DataProof; - -pub mod well_known_keys { - /// Public params used to generate Kate commitment - pub const KATE_PUBLIC_PARAMS: &[u8] = b":kate_public_params:"; -} - -/// We allow `Normal` extrinsics to fill up the block up to 90%, the rest can be used -/// by Operational extrinsics. -pub const NORMAL_DISPATCH_RATIO: Perbill = Perbill::from_percent(90); - -pub const BLOCK_CHUNK_SIZE: u32 = 32; - -/// Money matters. -pub mod currency { - - pub type Balance = u128; - - /// AVL has 18 decimal positions. - pub const AVL: Balance = 1_000_000_000_000_000_000; - - /// Cents of AVL has 16 decimal positions (100 Cents = $1) - /// 1 DOLLARS = 10_000_000_000_000_000 - pub const CENTS: Balance = AVL / 100; - - /// Millicent of AVL has 13 decimal positions( 100 mCents = 1 cent). - pub const MILLICENTS: Balance = CENTS / 1_000; -} - -#[repr(u8)] -pub enum InvalidTransactionCustomId { - /// The AppId is not registered. - InvalidAppId = 137, - /// Extrinsic is not allowed for the given `AppId`. - ForbiddenAppId, - /// Max padded length was exceeded. - MaxPaddedLenExceeded, -} - -/// Provides an implementation of [`frame_support::traits::Randomness`] that should only be used in -/// on Benchmarks! -pub struct BenchRandomness(sp_std::marker::PhantomData); - -impl frame_support::traits::Randomness for BenchRandomness -where - Output: codec::Decode + Default, - T: Default, -{ - fn random(subject: &[u8]) -> (Output, T) { - use sp_runtime::traits::TrailingZeroInput; - - ( - Output::decode(&mut TrailingZeroInput::new(subject)).unwrap_or_default(), - T::default(), - ) - } -} diff --git a/primitives/avail/src/sha2.rs b/primitives/avail/src/sha2.rs deleted file mode 100644 index 39caa232..00000000 --- a/primitives/avail/src/sha2.rs +++ /dev/null @@ -1,40 +0,0 @@ -use scale_info::TypeInfo; -#[cfg(feature = "std")] -use serde::{Deserialize, Serialize}; -use sp_core::{hashing::sha2_256, storage::StateVersion, Hasher, RuntimeDebug}; -use sp_runtime::traits::Hash; -use sp_std::vec::Vec; -use sp_trie::{LayoutV0, LayoutV1, TrieConfiguration as _}; - -/// Sha2 256 wrapper which supports `beefy-merkle-tree::Hasher`. -#[derive(PartialEq, Eq, Clone, RuntimeDebug, TypeInfo)] -#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -pub struct ShaTwo256 {} - -impl Hasher for ShaTwo256 { - type Out = sp_core::H256; - type StdHasher = hash256_std_hasher::Hash256StdHasher; - const LENGTH: usize = 32; - - fn hash(s: &[u8]) -> Self::Out { - sha2_256(s).into() - } -} - -impl Hash for ShaTwo256 { - type Output = sp_core::H256; - - fn trie_root(input: Vec<(Vec, Vec)>, version: StateVersion) -> Self::Output { - match version { - StateVersion::V0 => LayoutV0::::trie_root(input), - StateVersion::V1 => LayoutV1::::trie_root(input), - } - } - - fn ordered_trie_root(input: Vec>, version: StateVersion) -> Self::Output { - match version { - StateVersion::V0 => LayoutV0::::ordered_trie_root(input), - StateVersion::V1 => LayoutV1::::ordered_trie_root(input), - } - } -} diff --git a/primitives/avail/src/traits.rs b/primitives/avail/src/traits.rs deleted file mode 100644 index 5b951267..00000000 --- a/primitives/avail/src/traits.rs +++ /dev/null @@ -1,76 +0,0 @@ -use codec::{Codec, Decode}; -use sp_core::U256; -use sp_runtime::{ - traits::{ - AtLeast32BitUnsigned, Hash as HashT, MaybeDisplay, MaybeFromStr, MaybeSerializeDeserialize, - Member, SimpleBitOps, - }, - Digest, -}; -use sp_std::{convert::TryFrom, fmt::Debug, hash::Hash as StdHash}; - -use crate::header::HeaderExtension; - -/// Header block number trait. -pub trait HeaderBlockNumber: - Member - + AtLeast32BitUnsigned - + Codec - + MaybeSerializeDeserialize - + MaybeDisplay - + MaybeFromStr - + MaybeFromStr - + StdHash - + Copy - + Into - + TryFrom - + Debug - + Eq -{ -} -impl< - T: Member - + AtLeast32BitUnsigned - + Codec - + MaybeSerializeDeserialize - + MaybeDisplay - + MaybeFromStr - + StdHash - + Copy - + Into - + TryFrom - + Debug - + Eq, - > HeaderBlockNumber for T -{ -} - -/// Header hash. -pub trait HeaderHash: HashT {} -impl HeaderHash for T {} - -pub trait HeaderHashOutput: MaybeDisplay + Decode + SimpleBitOps + Ord {} -impl HeaderHashOutput for T {} - -/// Extended header access -pub trait ExtendedHeader { - /// Header number. - type Number; - - /// Header hash type - type Hash; - - /// Creates new header. - fn new( - number: Self::Number, - extrinsics_root: Self::Hash, - state_root: Self::Hash, - parent_hash: Self::Hash, - digest: Digest, - extension: HeaderExtension, - ) -> Self; - - fn extension(&self) -> &HeaderExtension; - - fn set_extension(&mut self, extension: HeaderExtension); -} diff --git a/primitives/nomad/signature/src/signature.rs b/primitives/nomad/signature/src/signature.rs index 70844cbe..6f924fe1 100644 --- a/primitives/nomad/signature/src/signature.rs +++ b/primitives/nomad/signature/src/signature.rs @@ -1,7 +1,9 @@ // Code adapted from: https://github.com/gakonst/ethers-rs/blob/master/ethers-core/src/types/signature.rs use alloc::{borrow::ToOwned, string::String, vec::Vec}; -use core::{convert::TryFrom, fmt, str::FromStr}; +use core::convert::TryFrom; +#[cfg(feature = "std")] +use core::{fmt, str::FromStr}; use elliptic_curve::{consts::U32, sec1::ToEncodedPoint as _}; use frame_support::{pallet_prelude::*, sp_runtime::traits::Keccak256}; diff --git a/primitives/types/Cargo.toml b/primitives/types/Cargo.toml deleted file mode 100644 index f011034b..00000000 --- a/primitives/types/Cargo.toml +++ /dev/null @@ -1,32 +0,0 @@ -[package] -name = "da-types" -version = "0.4.4" -authors = [] -edition = "2018" - -[dependencies] -# Substrate -parity-scale-codec = { version = "3", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "2.3.1", default-features = false, features = ["derive"] } -sp-core = { version = "7", default-features = false } - -# 3rd-parties -derive_more = "0.99.17" -hex = { version = "0.4", optional = true, default-features = false, features = ["alloc", "serde"] } -num-traits = { version = "0.2", default-features = false } -serde = { version = "1.0", features = ["derive"], optional = true } -thiserror-no-std = "2.0.2" - -[dev-dependencies] -test-case = "1.2.3" - -[features] -default = ["std"] -std = [ - "serde", - "hex", - "sp-core/std", - "parity-scale-codec/std", - "scale-info/std", - "num-traits/std", -] diff --git a/primitives/types/src/lib.rs b/primitives/types/src/lib.rs deleted file mode 100644 index 534da907..00000000 --- a/primitives/types/src/lib.rs +++ /dev/null @@ -1,174 +0,0 @@ -#![cfg_attr(not(feature = "std"), no_std)] -extern crate alloc; -use alloc::vec::Vec; -use derive_more::{Add, Constructor, Deref, Display, From, Into, Mul}; -use parity_scale_codec::{Decode, Encode, MaxEncodedLen}; -use scale_info::TypeInfo; -#[cfg(feature = "std")] -use serde::{Deserialize, Serialize}; -use sp_core::RuntimeDebug; - -mod data_lookup; -mod get_app_id; -pub use data_lookup::{DataLookup, DataLookupIndexItem, Error as DataLookupError}; -pub use get_app_id::*; - -/// Raw Extrinsic with application id. -#[derive(Clone, TypeInfo, Default, Encode, Decode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -pub struct AppExtrinsic { - pub app_id: AppId, - #[cfg_attr(feature = "std", serde(with = "hex"))] - pub data: Vec, -} -#[cfg(feature = "substrate")] -impl From> for AppExtrinsic -where - A: Encode, - C: Encode, - S: Encode, - E: sp_runtime::traits::SignedExtension + crate::GetAppId, -{ - fn from(ue: sp_runtime::generic::UncheckedExtrinsic) -> Self { - let app_id = ue - .signature - .as_ref() - .map(|(_, _, extra)| extra.app_id()) - .unwrap_or_default(); - let data = ue.encode(); - - Self { app_id, data } - } -} - -impl GetAppId for AppExtrinsic { - fn app_id(&self) -> AppId { - self.app_id - } -} - -#[derive( - Clone, - Copy, - PartialEq, - Eq, - PartialOrd, - Ord, - Add, - From, - Deref, - TypeInfo, - Encode, - Decode, - Display, - Into, - Default, - MaxEncodedLen, - RuntimeDebug, -)] -#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -pub struct AppId(#[codec(compact)] pub u32); - -impl num_traits::Zero for AppId { - fn zero() -> Self { - AppId(num_traits::Zero::zero()) - } - - fn is_zero(&self) -> bool { - self.0.is_zero() - } -} - -impl From> for AppExtrinsic { - #[inline] - fn from(data: Vec) -> Self { - Self { - data, - app_id: <_>::default(), - } - } -} - -/// Strong type for `BlockLength::cols` -#[derive( - Clone, - Copy, - Debug, - From, - Into, - Add, - Mul, - Display, - PartialEq, - Eq, - Encode, - Decode, - TypeInfo, - PartialOrd, - Ord, - Constructor, - MaxEncodedLen, -)] -#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[mul(forward)] -pub struct BlockLengthColumns(#[codec(compact)] pub u32); - -impl BlockLengthColumns { - #[inline] - pub fn as_usize(&self) -> usize { - self.0 as usize - } -} - -/// Strong type for `BlockLength::rows` -#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive( - Clone, - Copy, - Debug, - From, - Into, - Add, - Mul, - Display, - PartialEq, - Eq, - Encode, - Decode, - TypeInfo, - PartialOrd, - Ord, - Constructor, - MaxEncodedLen, -)] -#[mul(forward)] -pub struct BlockLengthRows(#[codec(compact)] pub u32); - -impl BlockLengthRows { - #[inline] - pub fn as_usize(&self) -> usize { - self.0 as usize - } -} - -/// Return Err of the expression: `return Err($expression);`. -/// -/// Used as `fail!(expression)`. -#[macro_export] -macro_rules! fail { - ( $y:expr ) => {{ - return Err($y.into()); - }}; -} - -/// Evaluate `$x:expr` and if not true return `Err($y:expr)`. -/// -/// Used as `ensure!(expression_to_ensure, expression_to_return_on_false)`. -#[macro_export] -macro_rules! ensure { - ( $x:expr, $y:expr $(,)? ) => {{ - if !$x { - $crate::fail!($y); - } - }}; -} From a0679eee8d1236d0dbff1f33668568501a86773f Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Wed, 5 Jul 2023 16:32:11 +0200 Subject: [PATCH 65/87] MerkleProof under 'runtime' feature --- .github/workflows/default.yml | 2 +- core/Cargo.toml | 7 ++++--- core/src/data_proof.rs | 14 +++++++++----- 3 files changed, 14 insertions(+), 9 deletions(-) diff --git a/.github/workflows/default.yml b/.github/workflows/default.yml index 4ca4a8dd..0c68abdb 100644 --- a/.github/workflows/default.yml +++ b/.github/workflows/default.yml @@ -49,7 +49,7 @@ jobs: uses: actions-rs/cargo@v1 with: command: test - args: --workspace + args: --workspace --features "avail-core/runtime" env: RUSTFLAGS: "-C instrument-coverage" LLVM_PROFILE_FILE: "profile-%p-%m.profraw" diff --git a/core/Cargo.toml b/core/Cargo.toml index 16e603ac..84e8258c 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -7,7 +7,7 @@ license = "Apache-2.0" [dependencies] # Others -derive_more = { version = "0.99.17", default-features = false, features = ["constructor", "from", "add", "deref", "mul"] } +derive_more = { version = "0.99.17", default-features = false, features = ["constructor", "from", "add", "deref", "mul", "into"] } hash256-std-hasher = { version = "0.15.2", default-features = false } hex = { version = "0.4", optional = true, default-features = false, features = ["alloc", "serde"] } log = { version = "0.4.8", default-features = false } @@ -15,7 +15,7 @@ serde = { version = "1", optional = true, features = ["derive"] } thiserror-no-std = "2.0.2" # Substrate -beefy-merkle-tree = { git = "https://github.com/paritytech/substrate.git/", branch = "polkadot-v0.9.37", default-features = false } +beefy-merkle-tree = { git = "https://github.com/paritytech/substrate.git/", branch = "polkadot-v0.9.37", default-features = false, optional = true } codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive", "max-encoded-len"] } scale-info = { version = "2", default-features = false, features = ["derive"] } sp-arithmetic = { version = "*", default-features = false } @@ -46,7 +46,7 @@ std = [ "sp-trie/std", "sp-arithmetic/std", "hash256-std-hasher/std", - "beefy-merkle-tree/std", + "beefy-merkle-tree?/std", "derive_more/display", "sp-runtime-interface?/std", "sp-runtime?/std", @@ -56,6 +56,7 @@ runtime = [ "sp-runtime-interface", "sp-runtime", "frame-support", + "beefy-merkle-tree", ] header-backward-compatibility-test = [] diff --git a/core/src/data_proof.rs b/core/src/data_proof.rs index d2437402..d61e9d5b 100644 --- a/core/src/data_proof.rs +++ b/core/src/data_proof.rs @@ -1,10 +1,11 @@ -use crate::ensure; +#[cfg(feature = "runtime")] use beefy_merkle_tree::MerkleProof; use codec::{Decode, Encode}; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; -use sp_core::{hashing::sha2_256, H256}; -use sp_std::{convert::TryFrom, vec::Vec}; +use sp_core::H256; +use sp_std::vec::Vec; + use thiserror_no_std::Error; /// Wrapper of `beefy-merkle-tree::MerkleProof` with codec support. @@ -54,7 +55,8 @@ pub enum DataProofTryFromError { InvalidLeafIndex, } -impl TryFrom<&MerkleProof> for DataProof +#[cfg(feature = "runtime")] +impl core::convert::TryFrom<&MerkleProof> for DataProof where T: AsRef<[u8]>, H: PartialEq + Eq + AsRef<[u8]>, @@ -62,6 +64,8 @@ where type Error = DataProofTryFromError; fn try_from(merkle_proof: &MerkleProof) -> Result { + use crate::ensure; + use sp_core::hashing::sha2_256; use DataProofTryFromError::*; let root = <[u8; 32]>::try_from(merkle_proof.root.as_ref()) @@ -99,7 +103,7 @@ where mod test { use crate::ShaTwo256; use hex_literal::hex; - use sp_core::H512; + use sp_core::{hashing::sha2_256, H512}; use sp_std::cmp::min; use test_case::test_case; From 890b8b48b361e2b8a29381e1392a5eddf94e7dfe Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Thu, 6 Jul 2023 14:26:48 +0200 Subject: [PATCH 66/87] DataLookup simplification --- .github/workflows/default.yml | 8 +- core/src/asdr.rs | 6 +- core/src/data_lookup.rs | 158 ------------------------ core/src/data_lookup/mod.rs | 222 ++++++++++++++++++++++++++++++++++ core/src/header/mod.rs | 2 +- core/src/lib.rs | 16 +-- 6 files changed, 244 insertions(+), 168 deletions(-) delete mode 100644 core/src/data_lookup.rs create mode 100644 core/src/data_lookup/mod.rs diff --git a/.github/workflows/default.yml b/.github/workflows/default.yml index 0c68abdb..fa3aae9f 100644 --- a/.github/workflows/default.yml +++ b/.github/workflows/default.yml @@ -43,7 +43,13 @@ jobs: uses: actions-rs/cargo@v1 with: command: fmt - args: --check + args: --check --features "avail-core/runtime" + + - name: Check Clippy + uses: actions-rs/cargo@v1 + with: + command: clippy + args: --check --features "avail-core/runtime" - name: Run tests uses: actions-rs/cargo@v1 diff --git a/core/src/asdr.rs b/core/src/asdr.rs index c8d53123..b66c2066 100644 --- a/core/src/asdr.rs +++ b/core/src/asdr.rs @@ -383,7 +383,11 @@ where let compact_len = codec::Compact::(tmp.len() as u32); // Allocate the output buffer with the correct length - let mut output = Vec::with_capacity(compact_len.size_hint() + tmp.len()); + let output_len = compact_len + .size_hint() + .checked_add(tmp.len()) + .expect("Cannot encode this `AppUncheckedExtrinsic` into memory"); + let mut output = Vec::with_capacity(output_len); compact_len.encode_to(&mut output); output.extend(tmp); diff --git a/core/src/data_lookup.rs b/core/src/data_lookup.rs deleted file mode 100644 index c445d7ea..00000000 --- a/core/src/data_lookup.rs +++ /dev/null @@ -1,158 +0,0 @@ -use codec::{Decode, Encode}; -use core::convert::TryFrom; -use derive_more::Constructor; -use scale_info::TypeInfo; -#[cfg(feature = "std")] -use serde::{Deserialize, Serialize}; -use sp_arithmetic::traits::{CheckedAdd, Zero}; -use sp_core::RuntimeDebug; -use sp_std::vec::Vec; -use thiserror_no_std::Error; - -use crate::{ensure, AppId}; - -#[derive(PartialEq, Eq, Clone, Encode, Decode, Default, TypeInfo, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[cfg_attr(test, derive(Constructor))] -pub struct DataLookup { - /// size of the look up - #[codec(compact)] - size: u32, - /// sorted vector of tuples(key, start index) - index: Vec, -} - -#[derive(Error, Debug, Clone, Copy, PartialEq, Eq)] -pub enum Error { - #[error("Input data is not sorted by AppId")] - DataNotSorted, - #[error("Data is empty on AppId {0}")] - DataEmptyOn(AppId), - #[error("Offset overflows")] - OffsetOverflows, -} - -impl DataLookup { - /// Creates the `DataLookup` from an iterator sorted by `AppId` - pub fn new_from_id_lenght(data: I) -> Result - where - I: Iterator, - AppId: From, - L: Zero + CheckedAdd, - u32: TryFrom, - { - let mut offset = 0; - let mut maybe_prev_id = None; - - let index = data - // .skip_while(|(id, _)| id.is_zero()) - .map(|(id, len)| { - // Check sorted by AppId - let id = AppId::from(id); - if let Some(prev_id) = maybe_prev_id.replace(id) { - ensure!(prev_id < id, Error::DataNotSorted); - } - - // Check non-empty data per AppId - let len = u32::try_from(len).map_err(|_| Error::OffsetOverflows)?; - ensure!(len > 0, Error::DataEmptyOn(id)); - - let item = DataLookupIndexItem::new(id, offset); - offset = offset.checked_add(len).ok_or(Error::OffsetOverflows)?; - - Ok::(item) - }) - .filter(|res_item| { - // Filter valid items where AppId == 0 - if let Ok(item) = res_item.as_ref() { - !item.app_id.is_zero() - } else { - true - } - }) - .collect::>()?; - - Ok(Self { - size: offset, - index, - }) - } - - pub fn len(&self) -> u32 { - self.size - } - - pub fn is_empty(&self) -> bool { - self.size == 0 - } - - pub fn index(&self) -> &Vec { - &self.index - } - - pub fn range_of(&self, app_id: AppId) -> Option<(u32, u32)> { - self.index - .iter() - .position(|item| item.app_id == app_id) - .map(|pos| { - let start_idx = unsafe { self.index.get_unchecked(pos).start }; - let end_idx = self - .index - .get(pos.saturating_add(1)) - .map(|item| item.start) - .unwrap_or(self.size); - debug_assert!(start_idx < end_idx); - (start_idx, end_idx) - }) - } -} - -#[derive( - PartialEq, Eq, Copy, Clone, Encode, Decode, Default, TypeInfo, RuntimeDebug, Constructor, -)] -#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -pub struct DataLookupIndexItem { - pub app_id: AppId, - #[codec(compact)] - pub start: u32, -} - -impl From<(A, S)> for DataLookupIndexItem -where - A: Into, - S: Into, -{ - fn from(value: (A, S)) -> Self { - Self { - app_id: value.0.into(), - start: value.1.into(), - } - } -} - -#[cfg(test)] -mod test { - use super::*; - use test_case::test_case; - - fn into_lookup_items(vals: I) -> Vec - where - I: IntoIterator, - T: Into, - { - vals.into_iter() - .map(|v| (AppId(v.0.into()), v.1).into()) - .collect::>() - } - - #[test_case( vec![(0, 15), (1, 20), (2, 150)] => Ok(DataLookup::new(185, into_lookup_items([(1u32, 15), (2, 35)]))); "Valid case")] - #[test_case( vec![(0, usize::MAX)] => Err(Error::OffsetOverflows); "Offset overflows at zero")] - #[test_case( vec![(0, (u32::MAX -1) as usize), (1, 2)] => Err(Error::OffsetOverflows); "Offset overflows at non zero")] - #[test_case( vec![(1, 10), (0, 2)] => Err(Error::DataNotSorted); "Unsortend data")] - #[test_case( vec![] => Ok(DataLookup::new(0, vec![])); "Empty data")] - fn from_len(id_len_data: Vec<(u32, usize)>) -> Result { - let iter = id_len_data.into_iter().map(|(id, len)| (AppId(id), len)); - - DataLookup::new_from_id_lenght(iter) - } -} diff --git a/core/src/data_lookup/mod.rs b/core/src/data_lookup/mod.rs new file mode 100644 index 00000000..032e1dca --- /dev/null +++ b/core/src/data_lookup/mod.rs @@ -0,0 +1,222 @@ +use codec::{Decode, Encode, Input}; +use core::convert::TryFrom; +use scale_info::{Type, TypeInfo}; +use sp_core::RuntimeDebug; +use sp_std::{ops::Range, vec::Vec}; +use thiserror_no_std::Error; + +#[cfg(feature = "std")] +use serde::{Deserialize, Serialize}; + +use crate::{ensure, AppId}; + +mod compact; +use compact::CompactDataLookup; + +pub type DataLookupRange = Range; + +#[derive(Error, Debug, Clone, Copy, PartialEq, Eq)] +pub enum Error { + #[error("Input data is not sorted by AppId")] + DataNotSorted, + #[error("Data is empty on AppId {0}")] + DataEmptyOn(AppId), + #[error("Offset overflows")] + OffsetOverflows, +} + +#[derive(PartialEq, Eq, Clone, Default, RuntimeDebug)] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +#[cfg_attr( + feature = "std", + serde(try_from = "CompactDataLookup", into = "CompactDataLookup") +)] +pub struct DataLookup { + pub(crate) index: Vec<(AppId, DataLookupRange)>, +} + +impl DataLookup { + pub fn len(&self) -> u32 { + self.index.last().map(|(_id, range)| range.end).unwrap_or(0) + } + + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + pub fn range_of(&self, app_id: AppId) -> Option { + self.index + .iter() + .find(|(id, _)| *id == app_id) + .map(|(_, range)| range) + .cloned() + } + + pub fn projected_range_of(&self, app_id: AppId, chunk_size: u32) -> Option { + self.range_of(app_id).and_then(|range| { + let start = range.start.checked_mul(chunk_size)?; + let end = range.end.checked_mul(chunk_size)?; + Some(start..end) + }) + } + + pub fn projected_ranges(&self, chunk_size: u32) -> Result)>, Error> { + self.index + .iter() + .map(|(id, range)| { + let start = range + .start + .checked_mul(chunk_size) + .ok_or(Error::OffsetOverflows)?; + let end = range + .end + .checked_mul(chunk_size) + .ok_or(Error::OffsetOverflows)?; + Ok((*id, start..end)) + }) + .collect() + } +} + +impl DataLookup { + pub fn from_id_and_len_iter(iter: I) -> Result + where + I: Iterator, + u32: From, + u32: TryFrom, + { + let mut offset: u32 = 0; + let mut maybe_prev_id = None; + + let index = iter + .map(|(id, len)| { + // Check sorted by AppId + let id = AppId(id.into()); + if let Some(prev_id) = maybe_prev_id.replace(id) { + ensure!(prev_id < id, Error::DataNotSorted); + } + + // Check non-empty data per AppId + let len = u32::try_from(len).map_err(|_| Error::OffsetOverflows)?; + ensure!(len > 0, Error::DataEmptyOn(id)); + + // Create range and update `offset`. + let end = offset.checked_add(len).ok_or(Error::OffsetOverflows)?; + let range = offset..end; + offset = end; + + Ok((id, range)) + }) + .collect::>()?; + + Ok(Self { index }) + } +} + +impl TryFrom for DataLookup { + type Error = Error; + + fn try_from(compacted: CompactDataLookup) -> Result { + let mut offset = 0; + let mut prev_id = AppId(0); + let mut index = Vec::with_capacity( + compacted + .index + .len() + .checked_add(1) + .ok_or(Error::OffsetOverflows)?, + ); + + for c_item in compacted.index.into_iter() { + index.push((prev_id, offset..c_item.start)); + prev_id = c_item.app_id; + offset = c_item.start; + } + + let last_range = offset..compacted.size; + if !last_range.is_empty() { + index.push((prev_id, offset..compacted.size)); + } + + let lookup = DataLookup { index }; + ensure!(lookup.len() == compacted.size, Error::DataNotSorted); + + Ok(lookup) + } +} + +// Encoding +// ================================== + +impl Encode for DataLookup { + /// Encodes as a `compact::DataLookup`. + fn encode(&self) -> Vec { + let compacted = CompactDataLookup::from_expanded(self); + compacted.encode() + } +} + +impl Decode for DataLookup { + /// Decodes from a `compact::DataLookup`. + fn decode(input: &mut I) -> Result { + let compacted = CompactDataLookup::decode(input)?; + DataLookup::try_from(compacted).map_err(|_| codec::Error::from("Invalid `DataLookup`")) + } +} + +impl TypeInfo for DataLookup { + type Identity = Self; + + fn type_info() -> Type { + CompactDataLookup::type_info() + } +} + +#[cfg(test)] +mod test { + use super::*; + use test_case::test_case; + + #[test_case( vec![(0, 15), (1, 20), (2, 150)] => Ok(vec![(0,0..15),(1, 15..35), (2, 35..185)]); "Valid case")] + #[test_case( vec![(0, usize::MAX)] => Err(Error::OffsetOverflows); "Offset overflows at zero")] + #[test_case( vec![(0, (u32::MAX -1) as usize), (1, 2)] => Err(Error::OffsetOverflows); "Offset overflows at non zero")] + #[test_case( vec![(1, 10), (0, 2)] => Err(Error::DataNotSorted); "Unsortend data")] + #[test_case( vec![] => Ok(vec![]); "Empty data")] + fn from_id_and_len( + id_len_data: Vec<(u32, usize)>, + ) -> Result, Error> { + let iter = id_len_data.into_iter().map(|(id, len)| (AppId(id), len)); + + DataLookup::from_id_and_len_iter(iter).map(|lookup| { + lookup + .index + .iter() + .map(|(id, range)| (id.0, range.clone())) + .collect::>() + }) + } + + #[test_case( vec![(0, 15), (1, 20), (2, 150)] => CompactDataLookup::new(185, vec![(1u32, 15u32).into(),(2u32,35u32).into()]).encode(); "Valid case")] + #[test_case( vec![(0, 100)] => CompactDataLookup::new(100, vec![]).encode(); "Only Zero AppId")] + #[test_case( vec![] => CompactDataLookup::new(0, vec![]).encode(); "Empty")] + + fn check_compressed_encode(id_lens: Vec<(u32, usize)>) -> Vec { + let lookup = DataLookup::from_id_and_len_iter(id_lens.into_iter()).unwrap(); + lookup.encode() + } + + #[test_case( vec![(0, 15), (1, 20), (2, 150)] ; "Valid case")] + #[test_case( vec![(0, 15)] ; "Only Zero AppId")] + #[test_case( vec![] ; "Empty")] + fn compressed_conversions(id_lens: Vec<(u32, usize)>) { + let lookup = DataLookup::from_id_and_len_iter(id_lens.into_iter()).unwrap(); + + let compact_lookup = CompactDataLookup::from_expanded(&lookup); + let expanded_lookup = DataLookup::try_from(compact_lookup.clone()).unwrap(); + + assert_eq!( + lookup, expanded_lookup, + "Lookup: {lookup:?} -> Compacted: {compact_lookup:?} -> Expanded: {expanded_lookup:?}" + ); + } +} diff --git a/core/src/header/mod.rs b/core/src/header/mod.rs index 6ec9cb5e..3ec17f13 100644 --- a/core/src/header/mod.rs +++ b/core/src/header/mod.rs @@ -452,7 +452,7 @@ mod tests { }; let extension = extension::v1::HeaderExtension { commitment, - app_lookup: DataLookup::new_from_id_lenght(vec![(AppId(0), 1)].into_iter()) + app_lookup: DataLookup::from_id_and_len_iter([(AppId(0), 1)].into_iter()) .expect("Valid DataLookup .qed"), }; let digest = Digest { diff --git a/core/src/lib.rs b/core/src/lib.rs index 0e5ee175..f081dfe4 100644 --- a/core/src/lib.rs +++ b/core/src/lib.rs @@ -1,9 +1,8 @@ #![cfg_attr(not(feature = "std"), no_std)] +#![deny(clippy::integer_arithmetic)] use codec::{Decode, Encode, MaxEncodedLen}; -#[cfg(feature = "std")] -use derive_more::Display; -use derive_more::{Add, Constructor, Deref, Into, Mul}; +use derive_more::{Add, Constructor, Deref, Display, Into, Mul}; use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; @@ -34,7 +33,7 @@ pub mod data_proof; pub use data_proof::DataProof; pub mod data_lookup; -pub use data_lookup::{DataLookup, DataLookupIndexItem}; +pub use data_lookup::*; pub mod app_extrinsic; pub use app_extrinsic::*; @@ -71,8 +70,9 @@ pub enum InvalidTransactionCustomId { Into, MaxEncodedLen, RuntimeDebug, + Display, )] -#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Display))] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct AppId(#[codec(compact)] pub u32); impl Zero for AppId { @@ -101,13 +101,14 @@ impl Zero for AppId { Into, Constructor, MaxEncodedLen, + Display, )] -#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Display, Debug))] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Debug))] #[mul(forward)] pub struct BlockLengthColumns(#[codec(compact)] pub u32); /// Strong type for `BlockLength::rows` -#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Display, Debug))] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Debug))] #[derive( Encode, Decode, @@ -123,6 +124,7 @@ pub struct BlockLengthColumns(#[codec(compact)] pub u32); Ord, Into, Constructor, + Display, )] #[mul(forward)] pub struct BlockLengthRows(#[codec(compact)] pub u32); From a3b994c746a9ff6ae03bfd0dea1ad7a4e8aa7d3b Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Thu, 6 Jul 2023 14:36:08 +0200 Subject: [PATCH 67/87] Add UT for `DataLookup`'s serde --- core/src/data_lookup/mod.rs | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/core/src/data_lookup/mod.rs b/core/src/data_lookup/mod.rs index 032e1dca..2375e61b 100644 --- a/core/src/data_lookup/mod.rs +++ b/core/src/data_lookup/mod.rs @@ -219,4 +219,16 @@ mod test { "Lookup: {lookup:?} -> Compacted: {compact_lookup:?} -> Expanded: {expanded_lookup:?}" ); } + + #[test_case( vec![(0, 15), (1, 20), (2, 150)] ; "Valid case")] + #[test_case( vec![(0, 15)] ; "Only Zero AppId")] + #[test_case( vec![] ; "Empty")] + fn serialization_compatibility(id_lens: Vec<(u32, usize)>) { + let lookup = DataLookup::from_id_and_len_iter(id_lens.into_iter()).unwrap(); + let lookup_json = serde_json::to_string(&lookup).unwrap(); + let compressed_from_json = serde_json::from_str::(&lookup_json).unwrap(); + let expanded_lookup = DataLookup::try_from(compressed_from_json.clone()).unwrap(); + + assert_eq!(lookup, expanded_lookup); + } } From efc32301a7326d72c2e7d92a11011a3e3482e871 Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Thu, 6 Jul 2023 19:05:01 +0200 Subject: [PATCH 68/87] Remove `AppIndex` & enforce `AppId` --- kate/recovery/Cargo.toml | 2 + kate/recovery/src/com.rs | 231 +++++++++---------------------- kate/recovery/src/commitments.rs | 54 +++----- kate/recovery/src/data.rs | 5 +- kate/recovery/src/index.rs | 121 ---------------- kate/recovery/src/lib.rs | 3 +- kate/recovery/src/matrix.rs | 4 +- 7 files changed, 100 insertions(+), 320 deletions(-) delete mode 100644 kate/recovery/src/index.rs diff --git a/kate/recovery/Cargo.toml b/kate/recovery/Cargo.toml index 045c71ed..f25f875c 100644 --- a/kate/recovery/Cargo.toml +++ b/kate/recovery/Cargo.toml @@ -13,6 +13,7 @@ dusk-plonk = { git = "https://github.com/availproject/plonk.git", tag = "v0.12.0 # Substrate codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] } sp-arithmetic = { version = "*", default-features = false } +sp-std = { version = "*", default-features = false } # 3rd-parties derive_more = "0.99.17" @@ -34,6 +35,7 @@ std = [ "once_cell", "serde", "sp-arithmetic/std", + "sp-std/std", "avail-core/std", "rand/std", "rand_chacha/std", diff --git a/kate/recovery/src/com.rs b/kate/recovery/src/com.rs index cadbd487..80b7f7f5 100644 --- a/kate/recovery/src/com.rs +++ b/kate/recovery/src/com.rs @@ -1,21 +1,25 @@ -use avail_core::ensure; +use avail_core::{data_lookup::Error as DataLookupError, ensure, AppId, DataLookup}; use codec::{Decode, IoReader}; use core::num::TryFromIntError; -use dusk_bytes::Serializable as _; -use dusk_plonk::{fft::EvaluationDomain, prelude::BlsScalar}; -use sp_arithmetic::{traits::SaturatedConversion, Percent}; -use static_assertions::const_assert_ne; -use std::{ - collections::{HashMap, HashSet}, +use core::{ convert::{TryFrom, TryInto}, - iter::FromIterator, ops::Range, }; +use dusk_bytes::Serializable as _; +use dusk_plonk::{fft::EvaluationDomain, prelude::BlsScalar}; +use sp_arithmetic::{traits::SaturatedConversion, Percent}; +use sp_std::{iter::FromIterator, vec::Vec}; +use static_assertions::{const_assert, const_assert_ne}; use thiserror_no_std::Error; +#[cfg(feature = "std")] +use crate::sparse_slice_read::SparseSliceRead; +#[cfg(feature = "std")] +use std::collections::{HashMap, HashSet}; + use crate::{ config::{self, CHUNK_SIZE, DATA_CHUNK_SIZE}, - data, index, matrix, + data, matrix, }; #[derive(Debug, Error)] @@ -34,6 +38,10 @@ pub enum ReconstructionError { DataDecodingError(#[from] UnflattenError), #[error("Column reconstruction supports up to {}", u16::MAX)] RowCountExceeded, + #[error("Missing AppId {0}")] + MissingId(AppId), + #[error("DataLookup {0}")] + DataLookup(#[from] DataLookupError), } #[cfg(feature = "std")] @@ -74,6 +82,7 @@ pub fn columns_positions( /// Creates hash map of columns, each being hash map of cells, from vector of cells. /// Intention is to be able to find duplicates and to group cells by column. +#[cfg(feature = "std")] fn map_cells( dimensions: matrix::Dimensions, cells: Vec, @@ -101,12 +110,12 @@ fn map_cells( /// * `dimensions` - Extended matrix dimensions /// * `app_id` - Application ID pub fn app_specific_rows( - index: &index::AppDataIndex, + index: &DataLookup, dimensions: matrix::Dimensions, - app_id: u32, + app_id: AppId, ) -> Vec { index - .app_cells_range(app_id) + .range_of(app_id) .and_then(|range| dimensions.extended_data_rows(range)) .unwrap_or_default() } @@ -121,12 +130,12 @@ pub fn app_specific_rows( /// * `dimensions` - Extended matrix dimensions /// * `app_id` - Application ID pub fn app_specific_cells( - index: &index::AppDataIndex, + index: &DataLookup, dimensions: matrix::Dimensions, - app_id: u32, + id: AppId, ) -> Option> { index - .app_cells_range(app_id) + .range_of(id) .and_then(|range| dimensions.extended_data_positions(range)) } @@ -144,15 +153,18 @@ pub type AppData = Vec>; /// * `cells` - Cells from required columns, at least 50% cells per column /// * `app_id` - Application ID pub fn reconstruct_app_extrinsics( - index: &index::AppDataIndex, + index: &DataLookup, dimensions: matrix::Dimensions, cells: Vec, - app_id: u32, + app_id: AppId, ) -> Result { let data = reconstruct_available(dimensions, cells)?; - let ranges = index.app_data_ranges(app_id); + const_assert!(config::CHUNK_SIZE as u64 <= u32::MAX as u64); + let range = index + .projected_range_of(app_id, config::CHUNK_SIZE as u32) + .ok_or(ReconstructionError::MissingId(app_id))?; - Ok(unflatten_padded_data(ranges, data)? + Ok(unflatten_padded_data(vec![(app_id, range)], data)? .into_iter() .flat_map(|(_, xts)| xts) .collect::>()) @@ -166,12 +178,14 @@ pub fn reconstruct_app_extrinsics( /// * `dimensions` - Extended matrix dimensions /// * `cells` - Cells from required columns, at least 50% cells per column pub fn reconstruct_extrinsics( - index: &index::AppDataIndex, + lookup: &DataLookup, dimensions: matrix::Dimensions, cells: Vec, -) -> Result, ReconstructionError> { +) -> Result, ReconstructionError> { let data = reconstruct_available(dimensions, cells)?; - let ranges = index.data_ranges(); + + const_assert!(config::CHUNK_SIZE as u64 <= u32::MAX as u64); + let ranges = lookup.projected_ranges(config::CHUNK_SIZE as u32)?; unflatten_padded_data(ranges, data).map_err(ReconstructionError::DataDecodingError) } @@ -258,10 +272,10 @@ fn reconstruct_available( /// * `cells` - Application specific data cells in extended matrix, without erasure coded data. /// * `app_id` - Application ID pub fn decode_app_extrinsics( - index: &index::AppDataIndex, + index: &DataLookup, dimensions: matrix::Dimensions, cells: Vec, - app_id: u32, + app_id: AppId, ) -> Result { let positions = app_specific_cells(index, dimensions, app_id).unwrap_or_default(); if positions.is_empty() { @@ -288,7 +302,12 @@ pub fn decode_app_extrinsics( Some(cell) => app_data.extend(cell.data), } } - let ranges = index.app_data_ranges(app_id); + + const_assert!((config::CHUNK_SIZE as u64) <= (u32::MAX as u64)); + let ranges = index + .projected_range_of(app_id, config::CHUNK_SIZE as u32) + .map(|range| vec![(app_id, range)]) + .unwrap_or_default(); Ok(unflatten_padded_data(ranges, app_data) .map_err(ReconstructionError::DataDecodingError)? @@ -318,59 +337,12 @@ impl std::error::Error for UnflattenError { } } -use std::{collections::VecDeque, io}; - -/// It is a Codec Reader which allows decoding from non-sequential data. -struct SparseSliceRead<'a> { - parts: VecDeque<&'a [u8]>, -} - -impl<'a> FromIterator<&'a [u8]> for SparseSliceRead<'a> { - fn from_iter>(iter: I) -> Self { - let parts = VecDeque::from_iter(iter); - Self { parts } - } -} - -impl<'a> io::Read for SparseSliceRead<'a> { - fn read(&mut self, mut buf: &mut [u8]) -> io::Result { - let mut bytes = 0usize; - - loop { - let buf_len = buf.len(); - if buf_len == 0 || self.parts.is_empty() { - break; - } - - if let Some(next_part) = self.parts.pop_front() { - // Define max copied bytes and pending for next iteration. - let copied_len = std::cmp::min(next_part.len(), buf_len); - bytes += copied_len; - - // Copy data into `buf`. - let (source, pending_next_part) = next_part.split_at(copied_len); - let (dest, pending_buf) = buf.split_at_mut(copied_len); - dest.copy_from_slice(source); - - // Advance output buffer. - buf = pending_buf; - - // Reinsert if it is still pending - if !pending_next_part.is_empty() { - self.parts.push_front(pending_next_part); - } - } - } - - Ok(bytes) - } -} - +#[cfg(feature = "std")] // Removes both extrinsics and block padding (iec_9797 and seeded random data) pub fn unflatten_padded_data( - ranges: Vec<(u32, AppDataRange)>, + ranges: Vec<(AppId, AppDataRange)>, data: Vec, -) -> Result, UnflattenError> { +) -> Result, UnflattenError> { ensure!(data.len() % CHUNK_SIZE == 0, UnflattenError::InvalidLen); fn extract_encoded_extrinsic(range_data: &[u8]) -> SparseSliceRead { @@ -395,7 +367,7 @@ pub fn unflatten_padded_data( Ok((app_id, extrinsic)) }) - .collect::, _>>() + .collect::, _>>() } // This module is taken from https://gist.github.com/itzmeanjan/4acf9338d9233e79cfbee5d311e7a0b4 @@ -597,102 +569,35 @@ mod tests { use super::*; use crate::{ data::DataCell, - index::AppDataIndex, matrix::{Dimensions, Position}, }; - #[test] - fn app_data_index_cell_ranges() { - let cases = vec![ - ( - AppDataIndex { - size: 8, - index: vec![], - }, - vec![(0, 0..8)], - ), - ( - AppDataIndex { - size: 4, - index: vec![(1, 0), (2, 2)], - }, - vec![(1, 0..2), (2, 2..4)], - ), - ( - AppDataIndex { - size: 15, - index: vec![(1, 3), (12, 8)], - }, - vec![(0, 0..3), (1, 3..8), (12, 8..15)], - ), - ]; - - for (index, result) in cases { - assert_eq!(index.cells_ranges(), result); - } - } - - #[test] - fn app_data_index_data_ranges() { - let cases = vec![ - ( - AppDataIndex { - size: 8, - index: vec![], - }, - vec![(0, 0..256)], - ), - ( - AppDataIndex { - size: 4, - index: vec![(1, 0), (2, 2)], - }, - vec![(1, 0..64), (2, 64..128)], - ), - ( - AppDataIndex { - size: 15, - index: vec![(1, 3), (12, 8)], - }, - vec![(0, 0..96), (1, 96..256), (12, 256..480)], - ), - ]; + #[test_case(0 => vec![0] ; "App 0 spans 2 rows form row 0")] + #[test_case(1 => vec![0, 2] ; "App 1 spans 2 rows from row 0")] + #[test_case(2 => vec![2] ; "App 2 spans 1 rows from row 2")] + #[test_case(3 => vec![4, 6] ; "App 3 spans 2 rows from row 4")] + #[test_case(4 => Vec::::new() ; "There is no app 4")] + fn test_app_specific_rows(id: u32) -> Vec { + let id_lens: Vec<(u32, u32)> = vec![(0, 2), (1, 3), (2, 3), (3, 8)]; + let index = DataLookup::from_id_and_len_iter(id_lens.into_iter()).unwrap(); + let dimensions = Dimensions::new(8, 4).unwrap(); - for (index, result) in cases { - assert_eq!(index.data_ranges(), result); - } + app_specific_rows(&index, dimensions, AppId(id)) } - #[test_case(0, &[0] ; "App 0 spans 2 rows form row 0")] - #[test_case(1, &[0, 2] ; "App 1 spans 2 rows from row 0")] - #[test_case(2, &[2] ; "App 2 spans 1 rows from row 2")] - #[test_case(3, &[4, 6] ; "App 3 spans 2 rows from row 4")] - #[test_case(4, &[] ; "There is no app 4")] - fn test_app_specific_rows(app_id: u32, expected: &[u32]) { - let index = AppDataIndex { - size: 16, - index: vec![(1, 2), (2, 5), (3, 8)], - }; - let dimensions = Dimensions::new(8, 4).unwrap(); - let result = app_specific_rows(&index, dimensions, app_id); - assert_eq!(expected.len(), result.len()); + fn to_matrix_pos(data: &[(u32, u16)]) -> Vec { + data.iter().cloned().map(Position::from).collect() } - #[test_case(0, &[(0, 0), (0, 1), (0, 2), (0, 3), (2, 0)] ; "App 0 has five cells")] - #[test_case(1, &[(2, 1), (2, 2), (2, 3)] ; "App 1 has 3 cells")] - #[test_case(2, &[] ; "App 2 has no cells")] - fn test_app_specific_cells(app_id: u32, expected: &[(u32, u16)]) { - let index = AppDataIndex { - size: 8, - index: vec![(1, 5)], - }; + #[test_case(0 => to_matrix_pos(&[(0, 0), (0, 1), (0, 2), (0, 3), (2, 0)]) ; "App 0 has five cells")] + #[test_case(1 => to_matrix_pos(&[(2, 1), (2, 2), (2, 3)]) ; "App 1 has 3 cells")] + #[test_case(2 => Vec::::new() ; "App 2 has no cells")] + fn test_app_specific_cells(app_id: u32) -> Vec { + let id_lens: Vec<(u32, usize)> = vec![(0, 5), (1, 3)]; + let index = DataLookup::from_id_and_len_iter(id_lens.into_iter()).unwrap(); let dimensions = Dimensions::new(4, 4).unwrap(); - let result = app_specific_cells(&index, dimensions, app_id).unwrap_or_default(); - assert_eq!(expected.len(), result.len()); - result.iter().zip(expected).for_each(|(a, &(row, col))| { - assert_eq!(a.row, row); - assert_eq!(a.col, col); - }); + + app_specific_cells(&index, dimensions, AppId(app_id)).unwrap_or_default() } #[test] diff --git a/kate/recovery/src/commitments.rs b/kate/recovery/src/commitments.rs index a8140510..58a9d053 100644 --- a/kate/recovery/src/commitments.rs +++ b/kate/recovery/src/commitments.rs @@ -1,10 +1,10 @@ -use std::{ +use core::{ array::TryFromSliceError, convert::{TryFrom, TryInto}, num::TryFromIntError, }; -use avail_core::ensure; +use avail_core::{ensure, AppId, DataLookup}; #[cfg(feature = "std")] use dusk_bytes::Serializable; use dusk_plonk::{ @@ -16,7 +16,7 @@ use thiserror_no_std::Error; use crate::{ com, config::{self, COMMITMENT_SIZE}, - index, matrix, + matrix, }; #[derive(Error, Debug)] @@ -94,9 +94,9 @@ pub fn verify_equality( public_params: &PublicParameters, commitments: &[[u8; COMMITMENT_SIZE]], rows: &[Option>], - index: &index::AppDataIndex, + index: &DataLookup, dimensions: matrix::Dimensions, - app_id: u32, + app_id: AppId, ) -> Result<(Vec, Vec), Error> { let ext_rows: usize = dimensions.extended_rows().try_into()?; ensure!(commitments.len() == ext_rows, Error::BadCommitmentsData); @@ -159,18 +159,14 @@ pub fn from_slice(source: &[u8]) -> Result, TryFromSl #[cfg(test)] mod tests { + use super::verify_equality; + use avail_core::{AppId, DataLookup}; use dusk_plonk::prelude::PublicParameters; use once_cell::sync::Lazy; use rand::SeedableRng; use rand_chacha::ChaChaRng; - use crate::{ - commitments, - index::{self, AppDataIndex}, - matrix, - }; - - use super::verify_equality; + use crate::{commitments, matrix}; static PUBLIC_PARAMETERS: Lazy = Lazy::new(|| PublicParameters::setup(256, &mut ChaChaRng::seed_from_u64(42)).unwrap()); @@ -181,9 +177,9 @@ mod tests { &PUBLIC_PARAMETERS, &[], &[], - &index::AppDataIndex::default(), + &DataLookup::default(), matrix::Dimensions::new(1, 1).unwrap(), - 0, + AppId(0), ) .is_err()); } @@ -200,42 +196,38 @@ mod tests { let row_4 = Some(hex::decode("722c20416c65782073657473206f757420746f207265736375652074686520006b696e67646f6d2e204f6e206869732071756573742c206865206465666561007473204a616e6b656e27732068656e63686d656e20616e64207265747269650076657320766172696f7573206974656d73207768696368206c656164206869006d20746f77617264204a616e6b656e2077686f6d20686520646566656174730020616e642073656573207475726e656420746f2073746f6e652e20416c65780020726574726965766573207468652063726f776e2c20616e6420746865207000656f706c65206f6620526164617869616e2061726520726573746f7265642000756e64657220746865206e65776c792063726f776e6564204b696e67204567006c652e800000000000000000000000000000000000000000000000000000000004fd01412072656d616b65206f66207468652067616d652c207469746c65640020416c6578204b69646420696e204d697261636c6520576f726c642044582c002077617320616e6e6f756e636564206f6e204a756e652031302c2032303230002c20616e642072656c6561736564206f6e204a756e652032322c2032303231002e2054686520800000000000000000000000000000000000000000000000000076a04053bda0a88bda5177b86a15c3b29f559873cb481232299cd5743151ac004b2d63ae198e7bb0a9011f28e473c95f4013d7d53ec5fbc3b42df8ed101f6d00e831e52bfb76e51cca8b4e9016838657edfae09cb9a71eb219025c4c87a67c004aaa86f20ac0aa792bc121ee42e2c326127061eda15599cb5db3db870bea5a00ecf353161c3cb528b0c5d98050c4570bfc942d8b19ed7b0cbba5725e03e5f000b7e30db36b6df82ac151f668f5f80a5e2a9cac7c64991dd6a6ce21c060175800edb9260d2a86c836efc05f17e5c59525e404c6a93d051651fe2e4eefae2813004925683890a942f63ce493f512f0b2cfb7c42a07ce9130cb6d059a388d886100536cb9c5b81a9a8dc46c2d64a7a5b1d93b2d8646805d8d2a122fccdb3bc7dc00975ab75fc865793536f66e64189050360f623dc88abb8300180cdd0a8f33d700d2159b3df296b46dd64bec57609a3f2fb4ad8b46e2fd4c9f25d44328dd50ce00514db7bbf50ef518c195a7053763d0a8dfdab6b946ee9f3954549319ac7dc600bac203232876b27b541433fb2f1438289799049b349f7a2c205d3a97f66ef4002800baa3cb78fb33130181775fb26a62630236bd8bc644a3656489d135ba1800b11846029a9183d434593cbbc1e03a4f8dba40cf6cfa07ba043c83f6a4888700364c233191a4b99aff1e9b8ab2aba54ecc61a6a8d2a50043e8948be1e76a43007d348990b99e55fee2a4bc79b29b27f2f9720e96840517dc8a0be65757110400").unwrap()); - let size = 79; - let index = vec![(1, 1), (2, 74)]; + let id_lens: Vec<(u32, u32)> = vec![(0, 1), (1, 73), (2, 6)]; + let lookup = DataLookup::from_id_and_len_iter(id_lens.into_iter()).unwrap(); + let dimension = matrix::Dimensions::new(4, 32).unwrap(); + let id = AppId(1); let result = verify_equality( &PUBLIC_PARAMETERS, &commitments, &[row_0.clone(), None, row_2, None, row_4, None, None, None], - &AppDataIndex { size, index }, - matrix::Dimensions::new(4, 32).unwrap(), - 1, + &lookup, + dimension, + id, ); assert_eq!(result.unwrap(), (vec![0, 2, 4], vec![])); - let size = 79; - let index = vec![(1, 1), (2, 74)]; - let result = verify_equality( &PUBLIC_PARAMETERS, &commitments, &[row_0, None, None, None, None, None, None, None], - &AppDataIndex { size, index }, - matrix::Dimensions::new(4, 32).unwrap(), - 1, + &lookup, + dimension, + id, ); assert_eq!(result.unwrap(), (vec![0], vec![2, 4])); - let size = 79; - let index = vec![(1, 1), (2, 74)]; - let result = verify_equality( &PUBLIC_PARAMETERS, &commitments, &[None, None, None, None, None, None, None, None], - &AppDataIndex { size, index }, - matrix::Dimensions::new(4, 32).unwrap(), - 1, + &lookup, + dimension, + id, ); assert_eq!(result.unwrap(), (vec![], vec![0, 2, 4])); } diff --git a/kate/recovery/src/data.rs b/kate/recovery/src/data.rs index d06c5e9c..ad8a03a0 100644 --- a/kate/recovery/src/data.rs +++ b/kate/recovery/src/data.rs @@ -1,5 +1,6 @@ +use core::convert::TryInto; use derive_more::Constructor; -use std::{collections::HashMap, convert::TryInto}; +use sp_std::collections::btree_map::BTreeMap; use crate::matrix::{Dimensions, Position, RowIndex}; @@ -43,7 +44,7 @@ pub fn rows(dimensions: Dimensions, cells: &[&Cell]) -> Vec<(RowIndex, Vec)> sorted_cells .sort_by(|a, b| (a.position.row, a.position.col).cmp(&(b.position.row, b.position.col))); - let mut rows = HashMap::new(); + let mut rows = BTreeMap::new(); for cell in sorted_cells { rows.entry(RowIndex(cell.position.row)) .or_insert_with(Vec::default) diff --git a/kate/recovery/src/index.rs b/kate/recovery/src/index.rs deleted file mode 100644 index 67cb8abb..00000000 --- a/kate/recovery/src/index.rs +++ /dev/null @@ -1,121 +0,0 @@ -use std::{convert::TryFrom, iter::once, ops::Range}; - -use serde::{Deserialize, Serialize}; - -use crate::config; - -/// Index is list of pairs (app_id, start_index), -/// where start index is index of first cell for that application. -/// -/// # TODO -/// - Replace it with `DataLookup`? -#[derive(Serialize, Deserialize, Default, Debug, Clone)] -pub struct AppDataIndex { - /// Number of the data cells in the matrix - pub size: u32, - /// Data index per application - pub index: Vec<(u32, u32)>, -} - -#[derive(PartialEq, Eq, Debug)] -pub enum AppDataIndexError { - SizeOverflow, - UnsortedLayout, -} - -impl AppDataIndex { - /// Calculates cell ranges per application from extrinsic offsets. - /// Range is from start index to end index in matrix. - pub fn cells_ranges(&self) -> Vec<(u32, Range)> { - // Case if first app_id in index is zero is ignored - // since it should be asserted elsewhere - let prepend = self.index.first().map_or(vec![(0, 0)], |&(_, offset)| { - if offset == 0 { - vec![] - } else { - vec![(0, 0)] - } - }); - - let starts = prepend.iter().chain(self.index.iter()); - - let ends = self - .index - .iter() - .skip_while(|&&(_, offset)| offset == 0) - .map(|&(_, offset)| offset) - .chain(once(self.size)); - - starts - .zip(ends) - .map(|(&(app_id, start), end)| (app_id, (start..end))) - .collect::>() - } - - pub fn app_cells_range(&self, app_id: u32) -> Option> { - self.cells_ranges() - .into_iter() - .find(|&(id, _)| app_id == id) - .map(|(_, range)| range) - } - - fn app_cells_ranges(&self, app_id: u32) -> Vec> { - self.cells_ranges() - .into_iter() - .filter(|&(id, _)| app_id == id) - .map(|(_, range)| range) - .collect::>() - } - - /// Calculates data range per application from extrinsics layout. - /// Range is from start index to end index in matrix flattened as byte array. - pub fn data_ranges(&self) -> Vec<(u32, Range)> { - const CHUNK_SIZE_U32: u32 = config::CHUNK_SIZE as u32; - self.cells_ranges() - .into_iter() - .map(|(app_id, Range { start, end })| { - (app_id, (start * CHUNK_SIZE_U32..end * CHUNK_SIZE_U32)) - }) - .collect::>() - } - - pub fn app_data_ranges(&self, app_id: u32) -> Vec<(u32, Range)> { - const CHUNK_SIZE_U32: u32 = config::CHUNK_SIZE as u32; - self.app_cells_ranges(app_id) - .iter() - .map(|Range { start, end }| (app_id, (start * CHUNK_SIZE_U32..end * CHUNK_SIZE_U32))) - .collect::>() - } -} - -impl TryFrom<&[(T, u32)]> for AppDataIndex -where - T: Clone + Into, -{ - type Error = AppDataIndexError; - - fn try_from(layout: &[(T, u32)]) -> Result { - let mut index = Vec::new(); - // transactions are ordered by application id - // skip transactions with 0 application id - it's not a data txs - let mut size = 0u32; - let mut prev_app_id = 0u32; - - for (app_id, data_len) in layout { - let app_id: u32 = app_id.clone().into(); - if app_id != 0 && prev_app_id != app_id { - index.push((app_id, size)); - } - - size = size - .checked_add(*data_len) - .ok_or(Self::Error::SizeOverflow)?; - if prev_app_id > app_id { - return Err(Self::Error::UnsortedLayout); - } - prev_app_id = app_id; - } - - Ok(AppDataIndex { size, index }) - } -} diff --git a/kate/recovery/src/lib.rs b/kate/recovery/src/lib.rs index 33c4012c..fcc8c3a4 100644 --- a/kate/recovery/src/lib.rs +++ b/kate/recovery/src/lib.rs @@ -4,9 +4,10 @@ pub mod com; pub mod commitments; pub mod config; pub mod data; -pub mod index; pub mod matrix; pub mod proof; +#[cfg(feature = "std")] +pub mod sparse_slice_read; #[cfg(feature = "std")] pub mod testnet; diff --git a/kate/recovery/src/matrix.rs b/kate/recovery/src/matrix.rs index d34d4475..88173b45 100644 --- a/kate/recovery/src/matrix.rs +++ b/kate/recovery/src/matrix.rs @@ -22,8 +22,8 @@ pub struct Position { impl From<(R, C)> for Position where - R: Into, - C: Into, + u32: From, + u16: From, { fn from(row_col: (R, C)) -> Self { Self { From 3ad206e460b559cc75e9b9a41468164ca5cb87b9 Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Thu, 6 Jul 2023 19:06:24 +0200 Subject: [PATCH 69/87] Add `CompactDataLookup` --- core/src/data_lookup/compact.rs | 57 +++++++++++++++++++++++++++++++++ 1 file changed, 57 insertions(+) create mode 100644 core/src/data_lookup/compact.rs diff --git a/core/src/data_lookup/compact.rs b/core/src/data_lookup/compact.rs new file mode 100644 index 00000000..a67b40ed --- /dev/null +++ b/core/src/data_lookup/compact.rs @@ -0,0 +1,57 @@ +use crate::{AppId, DataLookup}; + +use codec::{Decode, Encode}; +use derive_more::Constructor; +use scale_info::TypeInfo; +#[cfg(feature = "std")] +use serde::{Deserialize, Serialize}; + +#[derive(Copy, Clone, Encode, Decode, TypeInfo, Constructor, Debug)] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +pub struct DataLookupItem { + pub app_id: AppId, + #[codec(compact)] + pub start: u32, +} + +impl From<(A, S)> for DataLookupItem +where + u32: From, + u32: From, +{ + fn from(value: (A, S)) -> Self { + Self { + app_id: AppId(value.0.into()), + start: value.1.into(), + } + } +} + +#[derive(Encode, Decode, TypeInfo, Constructor, Debug, Clone)] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +pub struct CompactDataLookup { + /// size of the look up + #[codec(compact)] + pub(crate) size: u32, + /// sorted vector of tuples(key, start index) + pub(crate) index: Vec, +} + +impl CompactDataLookup { + pub fn from_expanded(lookup: &DataLookup) -> Self { + let index = lookup + .index + .iter() + .filter(|(id, _)| *id != AppId(0)) + .map(|(id, range)| DataLookupItem::new(*id, range.start)) + .collect(); + let size = lookup.index.last().map(|(_, range)| range.end).unwrap_or(0); + Self { size, index } + } +} + +impl From for CompactDataLookup { + fn from(lookup: DataLookup) -> Self { + CompactDataLookup::from_expanded(&lookup) + } +} From 8f8dc28751ef166a5a9164c72464533e4fb7a248 Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Mon, 10 Jul 2023 09:57:36 +0200 Subject: [PATCH 70/87] Moving `nomad` code --- Cargo.toml | 4 ++++ core/src/data_lookup/compact.rs | 2 ++ core/src/kate_commitment.rs | 7 +++++-- {primitives/nomad/nomad-base => nomad/base}/Cargo.toml | 2 +- {primitives/nomad/nomad-base => nomad/base}/src/lib.rs | 0 .../nomad/nomad-base => nomad/base}/src/testing.rs | 0 {primitives/nomad/nomad-core => nomad/core}/Cargo.toml | 0 {primitives/nomad/nomad-core => nomad/core}/src/lib.rs | 0 .../nomad-core => nomad/core}/src/nomad_message.rs | 0 .../nomad/nomad-core => nomad/core}/src/state.rs | 0 .../nomad/nomad-core => nomad/core}/src/test_utils.rs | 0 .../nomad-core => nomad/core}/src/typed_message.rs | 0 .../nomad/nomad-core => nomad/core}/src/update.rs | 0 .../nomad/nomad-core => nomad/core}/src/update_v2.rs | 0 .../nomad/nomad-core => nomad/core}/src/utils.rs | 0 {primitives/nomad => nomad}/merkle/Cargo.toml | 10 +++++++--- .../nomad => nomad}/merkle/fixtures/merkle.json | 0 {primitives/nomad => nomad}/merkle/src/error.rs | 0 {primitives/nomad => nomad}/merkle/src/lib.rs | 0 {primitives/nomad => nomad}/merkle/src/light.rs | 0 {primitives/nomad => nomad}/merkle/src/proof.rs | 0 {primitives/nomad => nomad}/merkle/src/test_utils.rs | 0 {primitives/nomad => nomad}/merkle/src/utils.rs | 0 {primitives/nomad => nomad}/signature/Cargo.toml | 0 {primitives/nomad => nomad}/signature/README.md | 0 {primitives/nomad => nomad}/signature/src/lib.rs | 0 {primitives/nomad => nomad}/signature/src/signature.rs | 0 {primitives/nomad => nomad}/signature/src/utils.rs | 0 28 files changed, 19 insertions(+), 6 deletions(-) rename {primitives/nomad/nomad-base => nomad/base}/Cargo.toml (94%) rename {primitives/nomad/nomad-base => nomad/base}/src/lib.rs (100%) rename {primitives/nomad/nomad-base => nomad/base}/src/testing.rs (100%) rename {primitives/nomad/nomad-core => nomad/core}/Cargo.toml (100%) rename {primitives/nomad/nomad-core => nomad/core}/src/lib.rs (100%) rename {primitives/nomad/nomad-core => nomad/core}/src/nomad_message.rs (100%) rename {primitives/nomad/nomad-core => nomad/core}/src/state.rs (100%) rename {primitives/nomad/nomad-core => nomad/core}/src/test_utils.rs (100%) rename {primitives/nomad/nomad-core => nomad/core}/src/typed_message.rs (100%) rename {primitives/nomad/nomad-core => nomad/core}/src/update.rs (100%) rename {primitives/nomad/nomad-core => nomad/core}/src/update_v2.rs (100%) rename {primitives/nomad/nomad-core => nomad/core}/src/utils.rs (100%) rename {primitives/nomad => nomad}/merkle/Cargo.toml (88%) rename {primitives/nomad => nomad}/merkle/fixtures/merkle.json (100%) rename {primitives/nomad => nomad}/merkle/src/error.rs (100%) rename {primitives/nomad => nomad}/merkle/src/lib.rs (100%) rename {primitives/nomad => nomad}/merkle/src/light.rs (100%) rename {primitives/nomad => nomad}/merkle/src/proof.rs (100%) rename {primitives/nomad => nomad}/merkle/src/test_utils.rs (100%) rename {primitives/nomad => nomad}/merkle/src/utils.rs (100%) rename {primitives/nomad => nomad}/signature/Cargo.toml (100%) rename {primitives/nomad => nomad}/signature/README.md (100%) rename {primitives/nomad => nomad}/signature/src/lib.rs (100%) rename {primitives/nomad => nomad}/signature/src/signature.rs (100%) rename {primitives/nomad => nomad}/signature/src/utils.rs (100%) diff --git a/Cargo.toml b/Cargo.toml index 4ed51948..78b33c31 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,6 +3,10 @@ members = [ "core", "kate/recovery", "kate", + "nomad/signature", + "nomad/core", + "nomad/base", + "nomad/merkle", ] [patch.crates-io] # Substrate (polkadot-v0.9.37). diff --git a/core/src/data_lookup/compact.rs b/core/src/data_lookup/compact.rs index a67b40ed..5da2db89 100644 --- a/core/src/data_lookup/compact.rs +++ b/core/src/data_lookup/compact.rs @@ -3,6 +3,8 @@ use crate::{AppId, DataLookup}; use codec::{Decode, Encode}; use derive_more::Constructor; use scale_info::TypeInfo; +use sp_std::vec::Vec; + #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; diff --git a/core/src/kate_commitment.rs b/core/src/kate_commitment.rs index 47347171..d2c2710b 100644 --- a/core/src/kate_commitment.rs +++ b/core/src/kate_commitment.rs @@ -1,11 +1,14 @@ use codec::{Decode, Encode}; use scale_info::TypeInfo; +use sp_core::H256; +use sp_std::vec::Vec; + #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; -use sp_core::{hexdisplay::HexDisplay, H256}; +#[cfg(feature = "std")] +use sp_core::hexdisplay::HexDisplay; #[cfg(feature = "std")] use sp_std::fmt; -use sp_std::vec::Vec; pub mod v1 { use super::*; diff --git a/primitives/nomad/nomad-base/Cargo.toml b/nomad/base/Cargo.toml similarity index 94% rename from primitives/nomad/nomad-base/Cargo.toml rename to nomad/base/Cargo.toml index 74e78694..1ca91515 100644 --- a/primitives/nomad/nomad-base/Cargo.toml +++ b/nomad/base/Cargo.toml @@ -6,7 +6,7 @@ edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -nomad-core = { path = "../nomad-core", default-features = false } +nomad-core = { path = "../core", default-features = false } nomad-signature = { path = "../signature", default-features = false } # Substrate diff --git a/primitives/nomad/nomad-base/src/lib.rs b/nomad/base/src/lib.rs similarity index 100% rename from primitives/nomad/nomad-base/src/lib.rs rename to nomad/base/src/lib.rs diff --git a/primitives/nomad/nomad-base/src/testing.rs b/nomad/base/src/testing.rs similarity index 100% rename from primitives/nomad/nomad-base/src/testing.rs rename to nomad/base/src/testing.rs diff --git a/primitives/nomad/nomad-core/Cargo.toml b/nomad/core/Cargo.toml similarity index 100% rename from primitives/nomad/nomad-core/Cargo.toml rename to nomad/core/Cargo.toml diff --git a/primitives/nomad/nomad-core/src/lib.rs b/nomad/core/src/lib.rs similarity index 100% rename from primitives/nomad/nomad-core/src/lib.rs rename to nomad/core/src/lib.rs diff --git a/primitives/nomad/nomad-core/src/nomad_message.rs b/nomad/core/src/nomad_message.rs similarity index 100% rename from primitives/nomad/nomad-core/src/nomad_message.rs rename to nomad/core/src/nomad_message.rs diff --git a/primitives/nomad/nomad-core/src/state.rs b/nomad/core/src/state.rs similarity index 100% rename from primitives/nomad/nomad-core/src/state.rs rename to nomad/core/src/state.rs diff --git a/primitives/nomad/nomad-core/src/test_utils.rs b/nomad/core/src/test_utils.rs similarity index 100% rename from primitives/nomad/nomad-core/src/test_utils.rs rename to nomad/core/src/test_utils.rs diff --git a/primitives/nomad/nomad-core/src/typed_message.rs b/nomad/core/src/typed_message.rs similarity index 100% rename from primitives/nomad/nomad-core/src/typed_message.rs rename to nomad/core/src/typed_message.rs diff --git a/primitives/nomad/nomad-core/src/update.rs b/nomad/core/src/update.rs similarity index 100% rename from primitives/nomad/nomad-core/src/update.rs rename to nomad/core/src/update.rs diff --git a/primitives/nomad/nomad-core/src/update_v2.rs b/nomad/core/src/update_v2.rs similarity index 100% rename from primitives/nomad/nomad-core/src/update_v2.rs rename to nomad/core/src/update_v2.rs diff --git a/primitives/nomad/nomad-core/src/utils.rs b/nomad/core/src/utils.rs similarity index 100% rename from primitives/nomad/nomad-core/src/utils.rs rename to nomad/core/src/utils.rs diff --git a/primitives/nomad/merkle/Cargo.toml b/nomad/merkle/Cargo.toml similarity index 88% rename from primitives/nomad/merkle/Cargo.toml rename to nomad/merkle/Cargo.toml index 451e457f..62ec4113 100644 --- a/primitives/nomad/merkle/Cargo.toml +++ b/nomad/merkle/Cargo.toml @@ -6,17 +6,21 @@ edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] +# Internal +nomad-core = { path = "../core", default-features = false } + +# Substrate codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false } -hex-literal = "0.3.4" -nomad-core = { path = "../nomad-core", default-features = false } -# parity-util-mem = { version = "0.10.2", default-features = false, features = ["primitive-types"] } primitive-types = { version = "0.12", default-features = false, features = ["scale-info", "codec"] } scale-info = { version = "2.1.1", default-features = false, features = ["derive"] } sp-core = { version = "7.0.0", default-features = false } sp-io = { version = "7.0.0", default-features = false } sp-runtime = { version = "7.0.0", default-features = false } sp-std = { version = "4.0.0-dev", default-features = false } + +# 3rd-party +hex-literal = "0.3.4" static_assertions = "1.1.0" thiserror-no-std = "2.0.2" tiny-keccak = { version = "2.0.2", default-features = false, features = ["keccak"] } diff --git a/primitives/nomad/merkle/fixtures/merkle.json b/nomad/merkle/fixtures/merkle.json similarity index 100% rename from primitives/nomad/merkle/fixtures/merkle.json rename to nomad/merkle/fixtures/merkle.json diff --git a/primitives/nomad/merkle/src/error.rs b/nomad/merkle/src/error.rs similarity index 100% rename from primitives/nomad/merkle/src/error.rs rename to nomad/merkle/src/error.rs diff --git a/primitives/nomad/merkle/src/lib.rs b/nomad/merkle/src/lib.rs similarity index 100% rename from primitives/nomad/merkle/src/lib.rs rename to nomad/merkle/src/lib.rs diff --git a/primitives/nomad/merkle/src/light.rs b/nomad/merkle/src/light.rs similarity index 100% rename from primitives/nomad/merkle/src/light.rs rename to nomad/merkle/src/light.rs diff --git a/primitives/nomad/merkle/src/proof.rs b/nomad/merkle/src/proof.rs similarity index 100% rename from primitives/nomad/merkle/src/proof.rs rename to nomad/merkle/src/proof.rs diff --git a/primitives/nomad/merkle/src/test_utils.rs b/nomad/merkle/src/test_utils.rs similarity index 100% rename from primitives/nomad/merkle/src/test_utils.rs rename to nomad/merkle/src/test_utils.rs diff --git a/primitives/nomad/merkle/src/utils.rs b/nomad/merkle/src/utils.rs similarity index 100% rename from primitives/nomad/merkle/src/utils.rs rename to nomad/merkle/src/utils.rs diff --git a/primitives/nomad/signature/Cargo.toml b/nomad/signature/Cargo.toml similarity index 100% rename from primitives/nomad/signature/Cargo.toml rename to nomad/signature/Cargo.toml diff --git a/primitives/nomad/signature/README.md b/nomad/signature/README.md similarity index 100% rename from primitives/nomad/signature/README.md rename to nomad/signature/README.md diff --git a/primitives/nomad/signature/src/lib.rs b/nomad/signature/src/lib.rs similarity index 100% rename from primitives/nomad/signature/src/lib.rs rename to nomad/signature/src/lib.rs diff --git a/primitives/nomad/signature/src/signature.rs b/nomad/signature/src/signature.rs similarity index 100% rename from primitives/nomad/signature/src/signature.rs rename to nomad/signature/src/signature.rs diff --git a/primitives/nomad/signature/src/utils.rs b/nomad/signature/src/utils.rs similarity index 100% rename from primitives/nomad/signature/src/utils.rs rename to nomad/signature/src/utils.rs From 442cb8989ea6226e10b7f9f2a9fab9a4545860ba Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Mon, 10 Jul 2023 10:01:53 +0200 Subject: [PATCH 71/87] Improve `no-std` segragation --- kate/recovery/src/com.rs | 103 ++++++++++++++----------- kate/recovery/src/commitments.rs | 6 +- kate/recovery/src/data.rs | 3 +- kate/recovery/src/matrix.rs | 18 +++-- kate/recovery/src/proof.rs | 26 +++---- kate/recovery/src/sparse_slice_read.rs | 51 ++++++++++++ kate/src/com.rs | 96 +++++++++-------------- 7 files changed, 173 insertions(+), 130 deletions(-) create mode 100644 kate/recovery/src/sparse_slice_read.rs diff --git a/kate/recovery/src/com.rs b/kate/recovery/src/com.rs index 80b7f7f5..e7ca656d 100644 --- a/kate/recovery/src/com.rs +++ b/kate/recovery/src/com.rs @@ -1,47 +1,56 @@ +use crate::{data, matrix}; +use core::{convert::TryFrom, num::TryFromIntError, ops::Range}; + use avail_core::{data_lookup::Error as DataLookupError, ensure, AppId, DataLookup}; -use codec::{Decode, IoReader}; -use core::num::TryFromIntError; -use core::{ - convert::{TryFrom, TryInto}, - ops::Range, -}; use dusk_bytes::Serializable as _; use dusk_plonk::{fft::EvaluationDomain, prelude::BlsScalar}; -use sp_arithmetic::{traits::SaturatedConversion, Percent}; -use sp_std::{iter::FromIterator, vec::Vec}; -use static_assertions::{const_assert, const_assert_ne}; +use sp_arithmetic::{traits::SaturatedConversion as _, Percent}; +use sp_std::prelude::*; use thiserror_no_std::Error; #[cfg(feature = "std")] -use crate::sparse_slice_read::SparseSliceRead; +use crate::{config, sparse_slice_read::SparseSliceRead}; #[cfg(feature = "std")] -use std::collections::{HashMap, HashSet}; - -use crate::{ - config::{self, CHUNK_SIZE, DATA_CHUNK_SIZE}, - data, matrix, +use codec::{Decode, IoReader}; +#[cfg(feature = "std")] +use static_assertions::{const_assert, const_assert_ne}; +#[cfg(feature = "std")] +use std::{ + collections::{HashMap, HashSet}, + convert::TryInto, + iter::FromIterator, }; #[derive(Debug, Error)] pub enum ReconstructionError { - #[error("Missing cell (col {}, row {})", .position.col, .position.row)] - MissingCell { position: matrix::Position }, - #[error("Invalid cell (col {}, row {})", .position.col, .position.row)] - InvalidCell { position: matrix::Position }, + #[error("Missing cell ({0})")] + MissingCell(matrix::Position), + #[error("Invalid cell ({0})")] + InvalidCell(matrix::Position), + #[error("Maximum cells allowed {0}")] + MaxCells(usize), + #[error("Minimum cells allowed {0}")] + MinCells(usize), #[error("Duplicate cell found")] DuplicateCellFound, #[error("Column {0} contains less than half rows")] InvalidColumn(u16), - #[error("Cannot reconstruct column: {0}")] - ColumnReconstructionError(String), #[error("Cannot decode data: {0}")] DataDecodingError(#[from] UnflattenError), #[error("Column reconstruction supports up to {}", u16::MAX)] RowCountExceeded, + #[error("Rows must be power of two")] + InvalidRowCount, #[error("Missing AppId {0}")] MissingId(AppId), #[error("DataLookup {0}")] DataLookup(#[from] DataLookupError), + #[error("Some cells are from different columns")] + CellsFromDifferentCols, + #[error("Invalid evaluation domain")] + InvalidEvaluationDomain, + #[error("Bad zero poly evaluation")] + BadZeroPoly, } #[cfg(feature = "std")] @@ -91,7 +100,7 @@ fn map_cells( for cell in cells { let position = cell.position; if !dimensions.extended_contains(&position) { - return Err(ReconstructionError::InvalidCell { position }); + return Err(ReconstructionError::InvalidCell(position)); } let cells = result.entry(position.col).or_insert_with(HashMap::new); if cells.insert(position.row, cell).is_some() { @@ -152,6 +161,7 @@ pub type AppData = Vec>; /// * `dimensions` - Extended matrix dimensions /// * `cells` - Cells from required columns, at least 50% cells per column /// * `app_id` - Application ID +#[cfg(feature = "std")] pub fn reconstruct_app_extrinsics( index: &DataLookup, dimensions: matrix::Dimensions, @@ -177,6 +187,7 @@ pub fn reconstruct_app_extrinsics( /// * `index` - Application data index /// * `dimensions` - Extended matrix dimensions /// * `cells` - Cells from required columns, at least 50% cells per column +#[cfg(feature = "std")] pub fn reconstruct_extrinsics( lookup: &DataLookup, dimensions: matrix::Dimensions, @@ -195,10 +206,11 @@ pub fn reconstruct_extrinsics( /// /// * `dimensions` - Extended matrix dimensions /// * `cells` - Cells from required columns, at least 50% cells per column +#[cfg(feature = "std")] pub fn reconstruct_columns( dimensions: matrix::Dimensions, cells: &[data::Cell], -) -> Result>, ReconstructionError> { +) -> Result>, ReconstructionError> { let cells: Vec = cells.iter().cloned().map(Into::into).collect::>(); let columns = map_cells(dimensions, cells)?; @@ -212,17 +224,17 @@ pub fn reconstruct_columns( let cells = cells.values().cloned().collect::>(); - let column = reconstruct_column(dimensions.extended_rows(), &cells) - .map_err(ReconstructionError::ColumnReconstructionError)? + let column = reconstruct_column(dimensions.extended_rows(), &cells)? .iter() .map(BlsScalar::to_bytes) - .collect::>(); + .collect::>(); Ok((col, column)) }) .collect::>() } +#[cfg(feature = "std")] fn reconstruct_available( dimensions: matrix::Dimensions, cells: Vec, @@ -242,7 +254,6 @@ fn reconstruct_available( reconstruct_column(dimensions.extended_rows(), &cells) .map(|scalars| scalars.into_iter().map(Some).collect::>()) - .map_err(ReconstructionError::ColumnReconstructionError) }, }) .collect::>, ReconstructionError>>()?; @@ -271,6 +282,7 @@ fn reconstruct_available( /// * `dimensions` - Extended matrix dimensions /// * `cells` - Application specific data cells in extended matrix, without erasure coded data. /// * `app_id` - Application ID +#[cfg(feature = "std")] pub fn decode_app_extrinsics( index: &DataLookup, dimensions: matrix::Dimensions, @@ -288,7 +300,7 @@ pub fn decode_app_extrinsics( .get(&position.col) .and_then(|column| column.get(&position.row)) .filter(|cell| !cell.data.is_empty()) - .ok_or(ReconstructionError::MissingCell { position })?; + .ok_or(ReconstructionError::MissingCell(position))?; } let mut app_data: Vec = vec![]; @@ -343,17 +355,20 @@ pub fn unflatten_padded_data( ranges: Vec<(AppId, AppDataRange)>, data: Vec, ) -> Result, UnflattenError> { - ensure!(data.len() % CHUNK_SIZE == 0, UnflattenError::InvalidLen); + ensure!( + data.len() % config::CHUNK_SIZE == 0, + UnflattenError::InvalidLen + ); fn extract_encoded_extrinsic(range_data: &[u8]) -> SparseSliceRead { - const_assert_ne!(CHUNK_SIZE, 0); - const_assert_ne!(DATA_CHUNK_SIZE, 0); + const_assert_ne!(config::CHUNK_SIZE, 0); + const_assert_ne!(config::DATA_CHUNK_SIZE, 0); // INTERNAL: Chunk into 32 bytes (CHUNK_SIZE), then remove padding (0..30 bytes). SparseSliceRead::from_iter( range_data - .chunks_exact(CHUNK_SIZE) - .map(|chunk| &chunk[0..DATA_CHUNK_SIZE]), + .chunks_exact(config::CHUNK_SIZE) + .map(|chunk| &chunk[0..config::DATA_CHUNK_SIZE]), ) } @@ -379,7 +394,7 @@ fn reconstruct_poly( eval_domain: EvaluationDomain, // subset of available data subset: Vec>, -) -> Result, String> { +) -> Result, ReconstructionError> { let missing_indices = subset .iter() .enumerate() @@ -390,7 +405,7 @@ fn reconstruct_poly( zero_poly_fn(eval_domain, missing_indices.as_slice(), subset.len() as u64); for i in 0..subset.len() { if subset[i].is_none() && zero_eval[i] != BlsScalar::zero() { - return Err("bad zero poly evaluation !".to_owned()); + return Err(ReconstructionError::BadZeroPoly); } } let mut poly_evals_with_zero: Vec = Vec::new(); @@ -489,6 +504,7 @@ fn unshift_poly(poly: &mut [BlsScalar]) { } pub type AppDataRange = Range; + // use this function for reconstructing back all cells of certain column // when at least 50% of them are available // @@ -500,7 +516,7 @@ pub type AppDataRange = Range; pub fn reconstruct_column( row_count: u32, cells: &[data::DataCell], -) -> Result, String> { +) -> Result, ReconstructionError> { // just ensures all rows are from same column ! // it's required as that's how it's erasure coded during // construction in validator node @@ -527,25 +543,24 @@ pub fn reconstruct_column( } // row count of data matrix must be power of two ! - let row_count_sz: usize = row_count - .try_into() - .map_err(|_| "Row count overflows `usize`")?; - ensure!(row_count % 2 == 0, "`row_count` must be power of two"); + let row_count_sz = + usize::try_from(row_count).map_err(|_| ReconstructionError::RowCountExceeded)?; + ensure!(row_count % 2 == 0, ReconstructionError::InvalidRowCount); ensure!( cells.len() >= row_count_sz / 2, - "Number of `cells` must be equal or greater than the half of `row_count`" + ReconstructionError::MinCells(row_count_sz / 2) ); ensure!( cells.len() <= row_count_sz, - "Number of `cells` must be equal or less than `row_count`" + ReconstructionError::MaxCells(row_count_sz) ); ensure!( check_cells(cells), - "At least one row is not from same column" + ReconstructionError::CellsFromDifferentCols ); let eval_domain = EvaluationDomain::new(row_count_sz) - .map_err(|e| format!("Evaluation domain cannot be created: {e:?}"))?; + .map_err(|_| ReconstructionError::InvalidEvaluationDomain)?; let mut subset: Vec> = Vec::with_capacity(row_count_sz); // fill up vector in ordered fashion diff --git a/kate/recovery/src/commitments.rs b/kate/recovery/src/commitments.rs index 58a9d053..544bfc29 100644 --- a/kate/recovery/src/commitments.rs +++ b/kate/recovery/src/commitments.rs @@ -1,16 +1,15 @@ +use avail_core::{ensure, AppId, DataLookup}; use core::{ array::TryFromSliceError, convert::{TryFrom, TryInto}, num::TryFromIntError, }; - -use avail_core::{ensure, AppId, DataLookup}; -#[cfg(feature = "std")] use dusk_bytes::Serializable; use dusk_plonk::{ fft::{EvaluationDomain, Evaluations}, prelude::{BlsScalar, CommitKey, PublicParameters}, }; +use sp_std::prelude::*; use thiserror_no_std::Error; use crate::{ @@ -53,7 +52,6 @@ impl std::error::Error for Error { } } -#[cfg(feature = "std")] impl From for Error { fn from(e: dusk_bytes::Error) -> Self { match e { diff --git a/kate/recovery/src/data.rs b/kate/recovery/src/data.rs index ad8a03a0..d55fab75 100644 --- a/kate/recovery/src/data.rs +++ b/kate/recovery/src/data.rs @@ -1,6 +1,6 @@ use core::convert::TryInto; use derive_more::Constructor; -use sp_std::collections::btree_map::BTreeMap; +use sp_std::{collections::btree_map::BTreeMap, vec::Vec}; use crate::matrix::{Dimensions, Position, RowIndex}; @@ -23,6 +23,7 @@ pub struct Cell { } impl Cell { + #[cfg(feature = "std")] pub fn reference(&self, block: u32) -> String { self.position.reference(block) } diff --git a/kate/recovery/src/matrix.rs b/kate/recovery/src/matrix.rs index 88173b45..f87d70cc 100644 --- a/kate/recovery/src/matrix.rs +++ b/kate/recovery/src/matrix.rs @@ -1,14 +1,15 @@ -use derive_more::Constructor; -#[cfg(feature = "std")] -use serde::{Deserialize, Serialize}; -use std::{ +use crate::config::{self, CHUNK_SIZE}; +use core::{ convert::TryInto, fmt::{Display, Formatter, Result}, num::NonZeroU16, ops::{Mul, Range}, }; +use derive_more::Constructor; +use sp_std::prelude::*; -use crate::config::{self, CHUNK_SIZE}; +#[cfg(feature = "std")] +use serde::{Deserialize, Serialize}; const EXTENSION_FACTOR_U32: u32 = config::EXTENSION_FACTOR as u32; @@ -51,6 +52,7 @@ impl Display for Position { impl Position { /// Refrence in format `block_number:column_number:row_number` + #[cfg(feature = "std")] pub fn reference(&self, block_number: u32) -> String { format!("{}:{}", block_number, self) } @@ -62,18 +64,20 @@ impl Position { } /// Matrix partition (column-wise) -#[derive(Serialize, Deserialize, Clone, Debug)] +#[derive(Clone, Copy, Debug)] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct Partition { pub number: u8, pub fraction: u8, } /// Matrix row index -#[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct RowIndex(pub u32); impl RowIndex { /// Refrence in format `block_number:row_number` + #[cfg(feature = "std")] pub fn reference(&self, block_number: u32) -> String { format!("{}:{}", block_number, self.0) } diff --git a/kate/recovery/src/proof.rs b/kate/recovery/src/proof.rs index 791566ca..b0c97657 100644 --- a/kate/recovery/src/proof.rs +++ b/kate/recovery/src/proof.rs @@ -1,4 +1,3 @@ -#[cfg(feature = "std")] use dusk_bytes::Serializable; use dusk_plonk::{ bls12_381::G1Affine, @@ -12,21 +11,22 @@ use crate::{config::COMMITMENT_SIZE, data::Cell, matrix::Dimensions}; #[derive(Error, Debug)] pub enum Error { - #[error("Proof, data or commitment is not valid: {0}")] - InvalidData(String), - #[error("Evaluation domain is not valid for given dimensions: {0}")] - InvalidDomain(String), - #[error("Public parameters degree is to small for given dimensions: {0}")] - InvalidDegree(String), + #[error("Proof, data or commitment is not valid")] + InvalidData, + #[error("Evaluation domain is not valid for given dimensions")] + InvalidDomain, + #[error("Public parameters degree is to small for given dimensions")] + InvalidDegree, + #[error("Position isn't in domain")] + InvalidPositionInDomain, } #[cfg(feature = "std")] impl std::error::Error for Error {} -#[cfg(feature = "std")] impl From for Error { - fn from(error: dusk_bytes::Error) -> Self { - Error::InvalidData(format!("{error:?}")) + fn from(_: dusk_bytes::Error) -> Self { + Error::InvalidData } } @@ -51,13 +51,13 @@ pub fn verify( let cols: usize = dimensions.cols().get().into(); let point = EvaluationDomain::new(cols) - .map_err(|error| Error::InvalidDomain(format!("{error:?}")))? + .map_err(|_| Error::InvalidDomain)? .elements() .nth(cell.position.col.into()) - .ok_or_else(|| Error::InvalidDomain("Position isn't in domain".to_string()))?; + .ok_or(Error::InvalidPositionInDomain)?; public_parameters .trim(cols) .map(|(_, verifier_key)| verifier_key.check(point, proof)) - .map_err(|error| Error::InvalidDegree(format!("{error:?}"))) + .map_err(|_| Error::InvalidDegree) } diff --git a/kate/recovery/src/sparse_slice_read.rs b/kate/recovery/src/sparse_slice_read.rs new file mode 100644 index 00000000..7a1bdb8a --- /dev/null +++ b/kate/recovery/src/sparse_slice_read.rs @@ -0,0 +1,51 @@ +use core::iter::FromIterator; +use std::{ + collections::VecDeque, + io::{Read, Result}, +}; + +/// It is a Codec Reader which allows decoding from non-sequential data. +pub struct SparseSliceRead<'a> { + parts: VecDeque<&'a [u8]>, +} + +impl<'a> FromIterator<&'a [u8]> for SparseSliceRead<'a> { + fn from_iter>(iter: I) -> Self { + let parts = VecDeque::from_iter(iter); + Self { parts } + } +} + +impl<'a> Read for SparseSliceRead<'a> { + fn read(&mut self, mut buf: &mut [u8]) -> Result { + let mut bytes = 0usize; + + loop { + let buf_len = buf.len(); + if buf_len == 0 || self.parts.is_empty() { + break; + } + + if let Some(next_part) = self.parts.pop_front() { + // Define max copied bytes and pending for next iteration. + let copied_len = std::cmp::min(next_part.len(), buf_len); + bytes += copied_len; + + // Copy data into `buf`. + let (source, pending_next_part) = next_part.split_at(copied_len); + let (dest, pending_buf) = buf.split_at_mut(copied_len); + dest.copy_from_slice(source); + + // Advance output buffer. + buf = pending_buf; + + // Reinsert if it is still pending + if !pending_next_part.is_empty() { + self.parts.push_front(pending_next_part); + } + } + } + + Ok(bytes) + } +} diff --git a/kate/src/com.rs b/kate/src/com.rs index e8e95e47..ca02e3a2 100644 --- a/kate/src/com.rs +++ b/kate/src/com.rs @@ -498,20 +498,15 @@ fn commit( #[cfg(test)] mod tests { - use std::{convert::TryInto, iter::repeat}; - + use avail_core::DataLookup; use dusk_bytes::Serializable; use dusk_plonk::bls12_381::BlsScalar; use hex_literal::hex; use kate_recovery::{ - com::{ - app_specific_cells, app_specific_rows, decode_app_extrinsics, - reconstruct_app_extrinsics, reconstruct_extrinsics, unflatten_padded_data, - ReconstructionError, - }, + com::*, commitments, config, + config::CHUNK_SIZE, data::{self, DataCell}, - index::{AppDataIndex, AppDataIndexError}, matrix::{Dimensions, Position}, proof, }; @@ -521,6 +516,8 @@ mod tests { }; use rand::{prelude::IteratorRandom, Rng, SeedableRng}; use sp_arithmetic::Percent; + use static_assertions::const_assert; + use std::{convert::TryInto, iter::repeat}; use test_case::test_case; use super::*; @@ -533,39 +530,13 @@ mod tests { const TCHUNK: NonZeroU32 = unsafe { NonZeroU32::new_unchecked(32) }; - fn app_data_index_try_from_layout( - layout: Vec<(AppId, u32)>, - ) -> Result { - let mut index = Vec::new(); - // transactions are ordered by application id - // skip transactions with 0 application id - it's not a data txs - let mut size = 0u32; - let mut prev_app_id = AppId(0u32); - - for (app_id, data_len) in layout { - if app_id.0 != 0 && prev_app_id != app_id { - index.push((app_id.0, size)); - } - - size = size - .checked_add(data_len) - .ok_or(AppDataIndexError::SizeOverflow)?; - if prev_app_id > app_id { - return Err(AppDataIndexError::UnsortedLayout); - } - prev_app_id = app_id; - } - - Ok(AppDataIndex { size, index }) - } - fn scalars_to_app_rows( - app_id: u32, - index: &AppDataIndex, + id: AppId, + lookup: &DataLookup, dimensions: Dimensions, matrix: &DMatrix, ) -> Vec>> { - let app_rows = app_specific_rows(index, dimensions, app_id); + let app_rows = app_specific_rows(lookup, dimensions, id); dimensions .iter_extended_rows() .map(|i| { @@ -686,17 +657,20 @@ mod tests { assert_eq!(dims, expected_dims, "Dimensions don't match the expected"); assert_eq!(data, expected_data, "Data doesn't match the expected data"); - let index = app_data_index_try_from_layout(layout).unwrap(); - let res = unflatten_padded_data(index.data_ranges(), data).unwrap(); + let lookup = DataLookup::from_id_and_len_iter(layout.into_iter()).unwrap(); + + const_assert!((CHUNK_SIZE as u64) <= (u32::MAX as u64)); + let data_lookup = lookup.projected_ranges(CHUNK_SIZE as u32).unwrap(); + let res = unflatten_padded_data(data_lookup, data).unwrap(); assert_eq!( res.len(), extrinsics.len(), "Number of extrinsics is not as expected." ); - for (res, exp) in res.iter().zip(extrinsics.iter()) { - assert_eq!(res.0, *exp.app_id); - assert_eq!(res.1[0], exp.data); + for ((id, data), exp) in res.iter().zip(extrinsics.iter()) { + assert_eq!(id.0, *exp.app_id); + assert_eq!(data[0], exp.data); } } @@ -796,11 +770,11 @@ mod tests { let columns = sample_cells_from_matrix(&matrix, None); let extended_dims = dims.try_into().unwrap(); - let index = app_data_index_try_from_layout(layout).unwrap(); + let index = DataLookup::from_id_and_len_iter(layout.into_iter()).unwrap(); let reconstructed = reconstruct_extrinsics(&index, extended_dims, columns).unwrap(); - for (result, xt) in reconstructed.iter().zip(xts) { - prop_assert_eq!(result.0, *xt.app_id); - prop_assert_eq!(result.1[0].as_slice(), &xt.data); + for ((app_id, data), xt) in reconstructed.iter().zip(xts) { + prop_assert_eq!(app_id.0, *xt.app_id); + prop_assert_eq!(data[0].as_slice(), &xt.data); } let dims_cols = usize::try_from(dims.cols.0).unwrap(); @@ -831,14 +805,14 @@ mod tests { fn test_commitments_verify(ref xts in app_extrinsics_strategy()) { let (layout, commitments, dims, matrix) = par_build_commitments(BlockLengthRows(64), BlockLengthColumns(16), TCHUNK, xts, Seed::default(), &IgnoreMetrics{}).unwrap(); - let index = app_data_index_try_from_layout(layout).unwrap(); + let index = DataLookup::from_id_and_len_iter(layout.into_iter()).unwrap(); let dims_cols = usize::try_from(dims.cols.0).unwrap(); let public_params = testnet::public_params(dims_cols); let extended_dims = dims.try_into().unwrap(); let commitments = commitments::from_slice(&commitments).unwrap(); for xt in xts { - let rows = scalars_to_app_rows(xt.app_id.0, &index, extended_dims, &matrix); - let (_, missing) = commitments::verify_equality(&public_params, &commitments, rows.as_slice(), &index, extended_dims, xt.app_id.0).unwrap(); + let rows = scalars_to_app_rows(xt.app_id, &index, extended_dims, &matrix); + let (_, missing) = commitments::verify_equality(&public_params, &commitments, rows.as_slice(), &index, extended_dims, xt.app_id).unwrap(); prop_assert!(missing.is_empty()); } } @@ -851,16 +825,16 @@ mod tests { fn verify_commitmnets_missing_row(ref xts in app_extrinsics_strategy()) { let (layout, commitments, dims, matrix) = par_build_commitments(BlockLengthRows(64), BlockLengthColumns(16), TCHUNK, xts, Seed::default(), &IgnoreMetrics{}).unwrap(); - let index = app_data_index_try_from_layout(layout).unwrap(); + let index = DataLookup::from_id_and_len_iter(layout.into_iter()).unwrap(); let dims_cols = usize::try_from(dims.cols.0).unwrap(); let public_params = testnet::public_params(dims_cols); let extended_dims = dims.try_into().unwrap(); let commitments = commitments::from_slice(&commitments).unwrap(); for xt in xts { - let mut rows = scalars_to_app_rows(xt.app_id.0, &index, extended_dims, &matrix); + let mut rows = scalars_to_app_rows(xt.app_id, &index, extended_dims, &matrix); let app_row_index = rows.iter().position(Option::is_some).unwrap(); rows.remove(app_row_index); - let (_, missing) = commitments::verify_equality(&public_params, &commitments, &rows,&index, extended_dims,xt.app_id.0).unwrap(); + let (_, missing) = commitments::verify_equality(&public_params, &commitments, &rows,&index, extended_dims,xt.app_id).unwrap(); prop_assert!(!missing.is_empty()); } } @@ -921,13 +895,13 @@ get erasure coded to ensure redundancy."#; let extended_dims = dims.try_into()?; - let index = app_data_index_try_from_layout(layout).unwrap(); - let res_1 = reconstruct_app_extrinsics(&index, extended_dims, cols_1, 1).unwrap(); + let index = DataLookup::from_id_and_len_iter(layout.into_iter()).unwrap(); + let res_1 = reconstruct_app_extrinsics(&index, extended_dims, cols_1, AppId(1)).unwrap(); assert_eq!(res_1[0], app_id_1_data); let cols_2 = sample_cells_from_matrix(&matrix, Some(&[0, 2, 3])); - let res_2 = reconstruct_app_extrinsics(&index, extended_dims, cols_2, 2).unwrap(); + let res_2 = reconstruct_app_extrinsics(&index, extended_dims, cols_2, AppId(2)).unwrap(); assert_eq!(res_2[0], app_id_2_data); Ok(()) } @@ -958,9 +932,9 @@ get erasure coded to ensure redundancy."#; let matrix = par_extend_data_matrix(dims, &data[..], &IgnoreMetrics {})?; let dimensions: Dimensions = dims.try_into()?; - let index = app_data_index_try_from_layout(layout).unwrap(); + let index = DataLookup::from_id_and_len_iter(layout.into_iter()).unwrap(); for xt in xts { - let positions = app_specific_cells(&index, dimensions, xt.app_id.0).unwrap(); + let positions = app_specific_cells(&index, dimensions, xt.app_id).unwrap(); let cells = positions .into_iter() .map(|position| { @@ -970,12 +944,12 @@ get erasure coded to ensure redundancy."#; DataCell::new(position, data) }) .collect::>(); - let data = &decode_app_extrinsics(&index, dimensions, cells, xt.app_id.0).unwrap()[0]; + let data = &decode_app_extrinsics(&index, dimensions, cells, xt.app_id).unwrap()[0]; assert_eq!(data, &xt.data); } assert!(matches!( - decode_app_extrinsics(&index, dimensions, vec![], 0), + decode_app_extrinsics(&index, dimensions, vec![], AppId(0)), Err(ReconstructionError::MissingCell { .. }) )); Ok(()) @@ -1003,7 +977,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat let cols = sample_cells_from_matrix(&matrix, None); let extended_dims = dims.try_into()?; - let index = app_data_index_try_from_layout(layout).unwrap(); + let index = DataLookup::from_id_and_len_iter(layout.into_iter()).unwrap(); let res = reconstruct_extrinsics(&index, extended_dims, cols).unwrap(); let s = String::from_utf8_lossy(res[0].1[0].as_slice()); @@ -1036,7 +1010,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat let cols = sample_cells_from_matrix(&matrix, None); let extended_dims = dims.try_into().unwrap(); - let index = app_data_index_try_from_layout(layout).unwrap(); + let index = DataLookup::from_id_and_len_iter(layout.into_iter()).unwrap(); let res = reconstruct_extrinsics(&index, extended_dims, cols).unwrap(); assert_eq!(res[0].1[0], xt1); From e9a6d9cb3baafea8ab1dbabc0c0ef8dcb431f4dc Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Mon, 10 Jul 2023 10:13:12 +0200 Subject: [PATCH 72/87] Dedup `Index` related stuff --- kate/src/gridgen/mod.rs | 8 ++++---- kate/src/gridgen/tests/commitments.rs | 6 ++---- kate/src/gridgen/tests/formatting.rs | 21 +++++++-------------- kate/src/gridgen/tests/mod.rs | 15 ++------------- kate/src/gridgen/tests/reconstruction.rs | 22 +++++++++------------- 5 files changed, 24 insertions(+), 48 deletions(-) diff --git a/kate/src/gridgen/mod.rs b/kate/src/gridgen/mod.rs index 875c93e8..89794448 100644 --- a/kate/src/gridgen/mod.rs +++ b/kate/src/gridgen/mod.rs @@ -114,7 +114,7 @@ impl EvaluationGrid { .map(|(app, scalars)| (*app, scalars.len())); // make the index of app info - let lookup = DataLookup::new_from_id_lenght(len_by_app)?; + let lookup = DataLookup::from_id_and_len_iter(len_by_app)?; let grid_size = usize::try_from(lookup.len())?; let (rows, cols): (usize, usize) = get_block_dims(grid_size, min_width, max_width, max_height)?.into(); @@ -189,12 +189,12 @@ impl EvaluationGrid { Ok(lineal_index / cols) }; - let (data_begin, data_end) = self + let range = self .lookup .range_of(app_id) .ok_or(AppRowError::IdNotFound(app_id))?; - let start_y: usize = row_from_lineal_index(orig_dims.cols(), data_begin)?; - let end_y: usize = row_from_lineal_index(orig_dims.cols(), data_end.saturating_sub(1))?; + let start_y: usize = row_from_lineal_index(orig_dims.cols(), range.start)?; + let end_y: usize = row_from_lineal_index(orig_dims.cols(), range.end.saturating_sub(1))?; // SAFETY: This won't overflow because `h_mul = rows / orig_dim.rows()` and `*_y < rows) debug_assert!(start_y < rows); diff --git a/kate/src/gridgen/tests/commitments.rs b/kate/src/gridgen/tests/commitments.rs index 2196e2ee..ccdd46f2 100644 --- a/kate/src/gridgen/tests/commitments.rs +++ b/kate/src/gridgen/tests/commitments.rs @@ -86,7 +86,6 @@ proptest! { .map(|c| c.to_bytes().unwrap()) .collect::>(); - let index = app_data_index_from_lookup(&grid.lookup); let public_params = testnet::public_params(BlockLengthColumns(g_cols as u32)); for xt in exts.iter() { @@ -98,7 +97,7 @@ proptest! { } // Need to provide the original dimensions here too let extended_dims = orig_dims.clone(); - let (_, missing) = verify_equality(&public_params, &commits, &app_rows, &index, extended_dims, xt.app_id.0).unwrap(); + let (_, missing) = verify_equality(&public_params, &commits, &app_rows, &grid.lookup, extended_dims, xt.app_id).unwrap(); prop_assert!(missing.is_empty()); } } @@ -114,7 +113,6 @@ proptest! { .map(|c| c.to_bytes().unwrap()) .collect::>(); - let index = app_data_index_from_lookup(&grid.lookup); let public_params = testnet::public_params( BlockLengthColumns(g_cols.into())); for xt in xts { @@ -127,7 +125,7 @@ proptest! { row_elems.remove(first_index); let extended_dims = orig_dims.transpose(); - let (_, missing) = verify_equality(&public_params, &commits, &row_elems,&index,extended_dims,xt.app_id.0).unwrap(); + let (_, missing) = verify_equality(&public_params, &commits, &row_elems,&grid.lookup,extended_dims,xt.app_id).unwrap(); prop_assert!(!missing.is_empty()); } } diff --git a/kate/src/gridgen/tests/formatting.rs b/kate/src/gridgen/tests/formatting.rs index 46b17f37..c8cef4c1 100644 --- a/kate/src/gridgen/tests/formatting.rs +++ b/kate/src/gridgen/tests/formatting.rs @@ -10,10 +10,7 @@ use poly_multiproof::traits::AsBytes; use crate::{ config::DATA_CHUNK_SIZE, - gridgen::{ - tests::{app_data_index_from_lookup, sample_cells}, - ArkScalar, EvaluationGrid, - }, + gridgen::{tests::sample_cells, ArkScalar, EvaluationGrid}, Seed, }; use core::num::NonZeroU16; @@ -30,10 +27,8 @@ fn newapi_test_flatten_block() { let expected_dims = Dimensions::new_from(1, 16).unwrap(); let evals = EvaluationGrid::from_extrinsics(extrinsics, 4, 256, 256, Seed::default()).unwrap(); - let expected_lookup = DataLookup::new_from_id_lenght( - [(AppId(0), 2), (AppId(1), 2), (AppId(2), 2), (AppId(3), 3)].into_iter(), - ) - .unwrap(); + let id_lens: Vec<(u32, usize)> = vec![(0, 2), (1, 2), (2, 2), (3, 3)]; + let expected_lookup = DataLookup::from_id_and_len_iter(id_lens.into_iter()).unwrap(); assert_eq!(evals.lookup, expected_lookup, "The layouts don't match"); assert_eq!( @@ -122,10 +117,9 @@ get erasure coded to ensure redundancy."#; .extend_columns(unsafe { NonZeroU16::new_unchecked(2) }) .unwrap(); - let index = app_data_index_from_lookup(&grid.lookup); let bdims = grid.dims(); for xt in &xts { - let positions = app_specific_cells(&index, bdims, xt.app_id.0).unwrap(); + let positions = app_specific_cells(&grid.lookup, bdims, xt.app_id).unwrap(); let cells = positions .iter() .map(|pos| DataCell { @@ -138,12 +132,12 @@ get erasure coded to ensure redundancy."#; .unwrap(), }) .collect::>(); - let data = &decode_app_extrinsics(&index, bdims, cells, xt.app_id.0).unwrap()[0]; + let data = &decode_app_extrinsics(&grid.lookup, bdims, cells, xt.app_id).unwrap()[0]; assert_eq!(data, &xt.data); } assert!(matches!( - decode_app_extrinsics(&index, bdims, vec![], 0), + decode_app_extrinsics(&grid.lookup, bdims, vec![], AppId(0)), Err(kate_recovery::com::ReconstructionError::MissingCell { .. }) )); } @@ -165,8 +159,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat let cols = sample_cells(&grid, None); let bdims = grid.dims(); - let index = app_data_index_from_lookup(&grid.lookup); - let res = reconstruct_extrinsics(&index, bdims, cols).unwrap(); + let res = reconstruct_extrinsics(&grid.lookup, bdims, cols).unwrap(); let s = String::from_utf8_lossy(res[0].1[0].as_slice()); assert_eq!(s, orig_data); diff --git a/kate/src/gridgen/tests/mod.rs b/kate/src/gridgen/tests/mod.rs index 790d9c5f..5475647e 100644 --- a/kate/src/gridgen/tests/mod.rs +++ b/kate/src/gridgen/tests/mod.rs @@ -1,5 +1,5 @@ -use avail_core::{AppExtrinsic, AppId, DataLookup}; -use kate_recovery::{data::DataCell, index::AppDataIndex, matrix::Position}; +use avail_core::{AppExtrinsic, AppId}; +use kate_recovery::{data::DataCell, matrix::Position}; use once_cell::sync::Lazy; use poly_multiproof::{m1_blst::M1NoPrecomp, traits::AsBytes}; use proptest::{collection, prelude::*, sample::size_range}; @@ -35,17 +35,6 @@ fn app_extrinsics_strategy() -> impl Strategy> { }) } -fn app_data_index_from_lookup(lookup: &DataLookup) -> AppDataIndex { - AppDataIndex { - size: lookup.len(), - index: lookup - .index() - .iter() - .map(|e| (e.app_id.0, e.start)) - .collect(), - } -} - fn sample_unique(rng: &mut impl Rng, n_samples: usize, n: usize) -> Vec { let mut sampled = vec![]; let u = Uniform::from(0..n); diff --git a/kate/src/gridgen/tests/reconstruction.rs b/kate/src/gridgen/tests/reconstruction.rs index e61c36ec..716a2352 100644 --- a/kate/src/gridgen/tests/reconstruction.rs +++ b/kate/src/gridgen/tests/reconstruction.rs @@ -1,4 +1,4 @@ -use super::{app_data_index_from_lookup, PMP}; +use super::PMP; use crate::{ com::Cell, gridgen::{tests::sample_cells, EvaluationGrid}, @@ -7,7 +7,7 @@ use crate::{ use avail_core::{AppExtrinsic, AppId, BlockLengthColumns, BlockLengthRows}; use core::num::NonZeroU16; use kate_recovery::{ - com::reconstruct_extrinsics, + com::{reconstruct_app_extrinsics, reconstruct_extrinsics}, data::Cell as DCell, matrix::{Dimensions, Position}, }; @@ -32,10 +32,9 @@ fn test_multiple_extrinsics_for_same_app_id() { .unwrap(); let cells = sample_cells(&ev, None); - let index = app_data_index_from_lookup(&ev.lookup); let (rows, cols): (u16, u16) = ev.dims().into(); let bdims = Dimensions::new_from(rows, cols).unwrap(); - let res = reconstruct_extrinsics(&index, bdims, cells).unwrap(); + let res = reconstruct_extrinsics(&ev.lookup, bdims, cells).unwrap(); assert_eq!(res[0].1[0], xt1); assert_eq!(res[0].1[1], xt2); @@ -52,12 +51,11 @@ fn test_build_and_reconstruct(exts in super::app_extrinsics_strategy()) { const RNG_SEED: Seed = [42u8; 32]; let cells = sample_cells(&grid, None); - let index = app_data_index_from_lookup(&grid.lookup); let bdims = Dimensions::new_from(rows, cols).unwrap(); - let reconstructed = reconstruct_extrinsics(&index, bdims, cells).unwrap(); - for (result, xt) in reconstructed.iter().zip(exts) { - prop_assert_eq!(result.0, *xt.app_id); - prop_assert_eq!(result.1[0].as_slice(), &xt.data); + let reconstructed = reconstruct_extrinsics(&grid.lookup, bdims, cells).unwrap(); + for ((id,data), xt) in reconstructed.iter().zip(exts) { + prop_assert_eq!(id.0, *xt.app_id); + prop_assert_eq!(data[0].as_slice(), &xt.data); } let pp = &*PMP; @@ -106,14 +104,12 @@ get erasure coded to ensure redundancy."#; let cols_1 = sample_cells(&grid, Some(vec![0, 1, 2, 3])); - let index = app_data_index_from_lookup(&grid.lookup); - let bdims = grid.dims(); - let res_1 = kate_recovery::com::reconstruct_app_extrinsics(&index, bdims, cols_1, 1).unwrap(); + let res_1 = reconstruct_app_extrinsics(&grid.lookup, bdims, cols_1, AppId(1)).unwrap(); assert_eq!(res_1[0], app_id_1_data); let cols_2 = sample_cells(&grid, Some(vec![0, 2, 3])); - let res_2 = kate_recovery::com::reconstruct_app_extrinsics(&index, bdims, cols_2, 2).unwrap(); + let res_2 = reconstruct_app_extrinsics(&grid.lookup, bdims, cols_2, AppId(2)).unwrap(); assert_eq!(res_2[0], app_id_2_data); } From 3d456d70e80b2c7cf4b3cc48a24a7abd8fd59ad0 Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Mon, 10 Jul 2023 11:03:38 +0200 Subject: [PATCH 73/87] Clean some deps in `nomad` --- Cargo.lock | 680 ++++++++++++++++++++++++++++--- kate/benches/reconstruct.rs | 55 +-- nomad/base/Cargo.toml | 14 +- nomad/base/src/lib.rs | 8 +- nomad/core/Cargo.toml | 13 +- nomad/core/src/nomad_message.rs | 17 +- nomad/core/src/state.rs | 5 +- nomad/core/src/update.rs | 4 +- nomad/core/src/update_v2.rs | 4 +- nomad/merkle/Cargo.toml | 13 +- nomad/signature/Cargo.toml | 16 +- nomad/signature/src/signature.rs | 18 +- 12 files changed, 687 insertions(+), 160 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 92fb1764..77767f84 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -36,6 +36,17 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" +[[package]] +name = "aes" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac1f845298e95f983ff1944b728ae08b8cebab80d684f0a832ed0fc74dfa27e2" +dependencies = [ + "cfg-if", + "cipher", + "cpufeatures", +] + [[package]] name = "ahash" version = "0.4.7" @@ -268,13 +279,25 @@ checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" [[package]] name = "async-trait" -version = "0.1.70" +version = "0.1.71" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a564d521dd56509c4c47480d00b80ee55f7e385ae48db5744c67ad50c92d2ebf" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.25", +] + +[[package]] +name = "auto_impl" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79fa67157abdfd688a259b6648808757db9347af834624f27ec646da976aee5d" +checksum = "fee3da8ef1276b0bee5dd1c7258010d8fffd31801447323115a25560e1327b89" dependencies = [ + "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.23", + "syn 1.0.109", ] [[package]] @@ -329,12 +352,34 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce" +[[package]] +name = "base58" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5024ee8015f02155eee35c711107ddd9a9bf3cb689cf2a9089c97e79b6e1ae83" + [[package]] name = "base58" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6107fe1be6682a68940da878d9e9f5e90ca5745b3dec9fd1bb393c8777d4f581" +[[package]] +name = "base58check" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ee2fe4c9a0c84515f136aaae2466744a721af6d63339c18689d9e995d74d99b" +dependencies = [ + "base58 0.1.0", + "sha2 0.8.2", +] + +[[package]] +name = "base64" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff" + [[package]] name = "base64" version = "0.13.1" @@ -347,6 +392,12 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" +[[package]] +name = "bech32" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dabbe35f96fb9507f7330793dc490461b2962659ac5d427181e451a623751d1" + [[package]] name = "beefy-merkle-tree" version = "4.0.0-dev" @@ -393,6 +444,16 @@ version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42" +[[package]] +name = "bitvec" +version = "0.17.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41262f11d771fd4a61aa3ce019fca363b4b6c282fca9da2a31186d3965a47a5c" +dependencies = [ + "either", + "radium 0.3.0", +] + [[package]] name = "bitvec" version = "1.0.1" @@ -400,7 +461,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" dependencies = [ "funty", - "radium", + "radium 0.7.0", "tap", "wyz", ] @@ -464,6 +525,12 @@ dependencies = [ "zeroize", ] +[[package]] +name = "bs58" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "771fe0050b883fcc3ea2359b1a96bcfbc090b7116eae7c3c512c7a083fdf23d3" + [[package]] name = "bumpalo" version = "3.13.0" @@ -499,6 +566,9 @@ name = "bytes" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" +dependencies = [ + "serde", +] [[package]] name = "cast" @@ -566,6 +636,16 @@ dependencies = [ "half", ] +[[package]] +name = "cipher" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" +dependencies = [ + "crypto-common", + "inout", +] + [[package]] name = "ckb-merkle-mountain-range" version = "0.5.2" @@ -577,18 +657,18 @@ dependencies = [ [[package]] name = "clap" -version = "4.3.10" +version = "4.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "384e169cc618c613d5e3ca6404dda77a8685a63e08660dcc64abaf7da7cb0c7a" +checksum = "1640e5cc7fb47dbb8338fd471b105e7ed6c3cb2aeb00c2e067127ffd3764a05d" dependencies = [ "clap_builder", ] [[package]] name = "clap_builder" -version = "4.3.10" +version = "4.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef137bbe35aab78bdb468ccfba75a5f4d8321ae011d34063770780545176af2d" +checksum = "98c59138d527eeaf9b53f35a77fcc1fad9d883116070c63d5de1c7dc7b00c72b" dependencies = [ "anstyle", "clap_lex", @@ -600,6 +680,63 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b" +[[package]] +name = "coins-bip32" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "634c509653de24b439672164bbf56f5f582a2ab0e313d3b0f6af0b7345cf2560" +dependencies = [ + "bincode", + "bs58", + "coins-core", + "digest 0.10.7", + "getrandom 0.2.10", + "hmac 0.12.1", + "k256", + "lazy_static", + "serde", + "sha2 0.10.7", + "thiserror", +] + +[[package]] +name = "coins-bip39" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a11892bcac83b4c6e95ab84b5b06c76d9d70ad73548dd07418269c5c7977171" +dependencies = [ + "bitvec 0.17.4", + "coins-bip32", + "getrandom 0.2.10", + "hex", + "hmac 0.12.1", + "pbkdf2 0.11.0", + "rand 0.8.5", + "sha2 0.10.7", + "thiserror", +] + +[[package]] +name = "coins-core" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c94090a6663f224feae66ab01e41a2555a8296ee07b5f20dab8888bdefc9f617" +dependencies = [ + "base58check", + "base64 0.12.3", + "bech32", + "blake2", + "digest 0.10.7", + "generic-array 0.14.7", + "hex", + "ripemd", + "serde", + "serde_derive", + "sha2 0.10.7", + "sha3", + "thiserror", +] + [[package]] name = "const-oid" version = "0.9.3" @@ -612,6 +749,15 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" +[[package]] +name = "convert_case" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" +dependencies = [ + "unicode-segmentation", +] + [[package]] name = "core-foundation-sys" version = "0.8.4" @@ -629,9 +775,9 @@ dependencies = [ [[package]] name = "cpufeatures" -version = "0.2.8" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03e69e28e9f7f77debdedbaafa2866e1de9ba56df55a8bd7cfc724c25a09987c" +checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1" dependencies = [ "libc", ] @@ -781,6 +927,15 @@ dependencies = [ "subtle", ] +[[package]] +name = "ctr" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835" +dependencies = [ + "cipher", +] + [[package]] name = "curve25519-dalek" version = "2.1.3" @@ -845,7 +1000,7 @@ version = "0.99.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" dependencies = [ - "convert_case", + "convert_case 0.4.0", "proc-macro2", "quote", "rustc_version", @@ -1030,6 +1185,7 @@ dependencies = [ "ff", "generic-array 0.14.7", "group", + "pkcs8", "rand_core 0.6.4", "sec1", "subtle", @@ -1080,6 +1236,123 @@ dependencies = [ "libc", ] +[[package]] +name = "eth-keystore" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fda3bf123be441da5260717e0661c25a2fd9cb2b2c1d20bf2e05580047158ab" +dependencies = [ + "aes", + "ctr", + "digest 0.10.7", + "hex", + "hmac 0.12.1", + "pbkdf2 0.11.0", + "rand 0.8.5", + "scrypt", + "serde", + "serde_json", + "sha2 0.10.7", + "sha3", + "thiserror", + "uuid", +] + +[[package]] +name = "ethabi" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7413c5f74cc903ea37386a8965a936cbeb334bd270862fdece542c1b2dcbc898" +dependencies = [ + "ethereum-types", + "hex", + "once_cell", + "regex", + "serde", + "serde_json", + "sha3", + "thiserror", + "uint", +] + +[[package]] +name = "ethbloom" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c22d4b5885b6aa2fe5e8b9329fb8d232bf739e434e6b87347c63bdd00c120f60" +dependencies = [ + "crunchy", + "fixed-hash", + "impl-codec", + "impl-rlp", + "impl-serde", + "scale-info", + "tiny-keccak", +] + +[[package]] +name = "ethereum-types" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02d215cbf040552efcbe99a38372fe80ab9d00268e20012b79fcd0f073edd8ee" +dependencies = [ + "ethbloom", + "fixed-hash", + "impl-codec", + "impl-rlp", + "impl-serde", + "primitive-types", + "scale-info", + "uint", +] + +[[package]] +name = "ethers-core" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ade3e9c97727343984e1ceada4fdab11142d2ee3472d2c67027d56b1251d4f15" +dependencies = [ + "arrayvec 0.7.4", + "bytes", + "chrono", + "convert_case 0.6.0", + "elliptic-curve", + "ethabi", + "generic-array 0.14.7", + "hex", + "k256", + "open-fastrlp", + "proc-macro2", + "rand 0.8.5", + "rlp", + "rlp-derive", + "serde", + "serde_json", + "strum", + "syn 1.0.109", + "thiserror", + "tiny-keccak", + "unicode-xid", +] + +[[package]] +name = "ethers-signers" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f41ced186867f64773db2e55ffdd92959e094072a1d09a5e5e831d443204f98" +dependencies = [ + "async-trait", + "coins-bip32", + "coins-bip39", + "elliptic-curve", + "eth-keystore", + "ethers-core", + "hex", + "rand 0.8.5", + "sha2 0.10.7", + "thiserror", +] + [[package]] name = "fake-simd" version = "0.1.2" @@ -1272,7 +1545,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ "proc-macro2", "quote", - "syn 2.0.23", + "syn 2.0.25", ] [[package]] @@ -1342,8 +1615,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" dependencies = [ "cfg-if", + "js-sys", "libc", "wasi 0.11.0+wasi-snapshot-preview1", + "wasm-bindgen", ] [[package]] @@ -1433,6 +1708,12 @@ version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + [[package]] name = "hermit-abi" version = "0.3.2" @@ -1526,6 +1807,15 @@ dependencies = [ "parity-scale-codec", ] +[[package]] +name = "impl-rlp" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f28220f89297a075ddc7245cd538076ee98b01f2a9c23a53a4f1105d5a322808" +dependencies = [ + "rlp", +] + [[package]] name = "impl-serde" version = "0.4.0" @@ -1567,6 +1857,15 @@ dependencies = [ "hashbrown 0.14.0", ] +[[package]] +name = "inout" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" +dependencies = [ + "generic-array 0.14.7", +] + [[package]] name = "instant" version = "0.1.12" @@ -1604,12 +1903,12 @@ dependencies = [ [[package]] name = "is-terminal" -version = "0.4.8" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24fddda5af7e54bf7da53067d6e802dbcc381d0a8eef629df528e3ebf68755cb" +checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" dependencies = [ "hermit-abi", - "rustix 0.38.2", + "rustix 0.38.3", "windows-sys 0.48.0", ] @@ -1656,6 +1955,7 @@ dependencies = [ "ecdsa", "elliptic-curve", "sha2 0.10.7", + "sha3", ] [[package]] @@ -1703,6 +2003,7 @@ dependencies = [ "rand_chacha 0.3.1", "serde", "sp-arithmetic", + "sp-std", "static_assertions", "test-case", "thiserror-no-std", @@ -1742,7 +2043,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95b09eff1b35ed3b33b877ced3a691fc7a481919c7e29c53c906226fcf55e2a1" dependencies = [ "arrayref", - "base64", + "base64 0.13.1", "digest 0.9.0", "hmac-drbg", "libsecp256k1-core", @@ -1841,7 +2142,7 @@ version = "0.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f099785f7595cc4b4553a174ce30dd7589ef93391ff414dbb67f62392b9e0ce1" dependencies = [ - "regex-automata", + "regex-automata 0.1.10", ] [[package]] @@ -1929,9 +2230,9 @@ dependencies = [ [[package]] name = "nalgebra" -version = "0.32.2" +version = "0.32.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d68d47bba83f9e2006d117a9a33af1524e655516b8919caac694427a6fb1e511" +checksum = "307ed9b18cc2423f29e83f84fd23a8e73628727990181f18641a8b5dc2ab1caa" dependencies = [ "approx", "matrixmultiply", @@ -1948,6 +2249,77 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2bf50223579dc7cdcfb3bfcacf7069ff68243f8c363f62ffa99cf000a6b9c451" +[[package]] +name = "nomad-base" +version = "0.1.3" +dependencies = [ + "ethers-signers", + "nomad-core", + "nomad-signature", + "once_cell", + "parity-scale-codec", + "scale-info", + "serde", + "sp-core", + "sp-runtime", +] + +[[package]] +name = "nomad-core" +version = "0.1.3" +dependencies = [ + "async-trait", + "ethers-core", + "ethers-signers", + "nomad-signature", + "parity-scale-codec", + "primitive-types", + "scale-info", + "serde", + "sp-core", + "sp-runtime", + "sp-std", + "tiny-keccak", +] + +[[package]] +name = "nomad-merkle" +version = "0.1.1" +dependencies = [ + "ethers-core", + "frame-support", + "hex-literal", + "nomad-core", + "parity-scale-codec", + "scale-info", + "serde", + "serde_json", + "sp-core", + "static_assertions", + "thiserror-no-std", + "tiny-keccak", +] + +[[package]] +name = "nomad-signature" +version = "0.1.1" +dependencies = [ + "byte-slice-cast", + "elliptic-curve", + "ethers-core", + "frame-support", + "generic-array 0.14.7", + "hex", + "k256", + "parity-scale-codec", + "scale-info", + "serde", + "sp-core", + "sp-runtime", + "thiserror-no-std", + "tiny-keccak", +] + [[package]] name = "num-bigint" version = "0.4.3" @@ -2065,6 +2437,31 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" +[[package]] +name = "open-fastrlp" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "786393f80485445794f6043fd3138854dd109cc6c4bd1a6383db304c9ce9b9ce" +dependencies = [ + "arrayvec 0.7.4", + "auto_impl", + "bytes", + "ethereum-types", + "open-fastrlp-derive", +] + +[[package]] +name = "open-fastrlp-derive" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "003b2be5c6c53c1cfeb0a238b8a1c3915cd410feb684457a36c10038f764bb1c" +dependencies = [ + "bytes", + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "parity-scale-codec" version = "3.6.3" @@ -2072,7 +2469,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "756d439303e94fae44f288ba881ad29670c65b0c4b0e05674ca81061bb65f2c5" dependencies = [ "arrayvec 0.7.4", - "bitvec", + "bitvec 1.0.1", "byte-slice-cast", "bytes", "impl-trait-for-tuples", @@ -2121,6 +2518,17 @@ dependencies = [ "windows-targets", ] +[[package]] +name = "password-hash" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7676374caaee8a325c9e7a2ae557f216c5563a171d6997b0ef8a65af35147700" +dependencies = [ + "base64ct", + "rand_core 0.6.4", + "subtle", +] + [[package]] name = "paste" version = "1.0.13" @@ -2143,6 +2551,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "83a0692ec44e4cf1ef28ca317f14f8f07da2d95ec3fa01f86e4467b725e60917" dependencies = [ "digest 0.10.7", + "hmac 0.12.1", + "password-hash", + "sha2 0.10.7", ] [[package]] @@ -2224,6 +2635,7 @@ checksum = "9f3486ccba82358b11a77516035647c34ba167dfa53312630de83b12bd4f3d66" dependencies = [ "fixed-hash", "impl-codec", + "impl-rlp", "impl-serde", "scale-info", "uint", @@ -2239,11 +2651,35 @@ dependencies = [ "toml_edit", ] +[[package]] +name = "proc-macro-error" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +dependencies = [ + "proc-macro-error-attr", + "proc-macro2", + "quote", + "syn 1.0.109", + "version_check", +] + +[[package]] +name = "proc-macro-error-attr" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" +dependencies = [ + "proc-macro2", + "quote", + "version_check", +] + [[package]] name = "proc-macro2" -version = "1.0.63" +version = "1.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b368fba921b0dce7e60f5e04ec15e565b3303972b42bcfde1d0713b881959eb" +checksum = "78803b62cbf1f46fde80d7c0e803111524b9877184cfe7c3033659490ac7a7da" dependencies = [ "unicode-ident", ] @@ -2292,6 +2728,12 @@ dependencies = [ "proc-macro2", ] +[[package]] +name = "radium" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "def50a86306165861203e7f84ecffbbdfdea79f0e51039b33de1e952358c47ac" + [[package]] name = "radium" version = "0.7.0" @@ -2432,18 +2874,19 @@ checksum = "68bf53dad9b6086826722cdc99140793afd9f62faa14a1ad07eb4f955e7a7216" dependencies = [ "proc-macro2", "quote", - "syn 2.0.23", + "syn 2.0.25", ] [[package]] name = "regex" -version = "1.8.4" +version = "1.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0ab3ca65655bb1e41f2a8c8cd662eb4fb035e67c3f78da1d61dffe89d07300f" +checksum = "b2eae68fc220f7cf2532e4494aded17545fce192d59cd996e0fe7887f4ceb575" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.7.2", + "regex-automata 0.3.2", + "regex-syntax 0.7.3", ] [[package]] @@ -2455,6 +2898,17 @@ dependencies = [ "regex-syntax 0.6.29", ] +[[package]] +name = "regex-automata" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83d3daa6976cffb758ec878f108ba0e062a45b2d6ca3a2cca965338855476caf" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax 0.7.3", +] + [[package]] name = "regex-syntax" version = "0.6.29" @@ -2463,9 +2917,9 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "436b050e76ed2903236f032a59761c1eb99e1b0aead2c257922771dab1fc8c78" +checksum = "2ab07dc67230e4a4718e70fd5c20055a4334b121f1f9db8fe63ef39ce9b8c846" [[package]] name = "rfc6979" @@ -2478,6 +2932,36 @@ dependencies = [ "zeroize", ] +[[package]] +name = "ripemd" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd124222d17ad93a644ed9d011a40f4fb64aa54275c08cc216524a9ea82fb09f" +dependencies = [ + "digest 0.10.7", +] + +[[package]] +name = "rlp" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb919243f34364b6bd2fc10ef797edbfa75f33c252e7998527479c6d6b47e1ec" +dependencies = [ + "bytes", + "rustc-hex", +] + +[[package]] +name = "rlp-derive" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e33d7b2abe0c340d8797fe2907d3f20d3b5ea5908683618bfe80df7f621f672a" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "rustc-demangle" version = "0.1.23" @@ -2507,9 +2991,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.35.13" +version = "0.35.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "727a1a6d65f786ec22df8a81ca3121107f235970dc1705ed681d3e6e8b9cd5f9" +checksum = "6380889b07a03b5ecf1d44dc9ede6fd2145d84b502a2a9ca0b03c48e0cc3220f" dependencies = [ "bitflags 1.3.2", "errno 0.2.8", @@ -2521,9 +3005,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.37.22" +version = "0.37.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8818fa822adcc98b18fedbb3632a6a33213c070556b5aa7c4c8cc21cff565c4c" +checksum = "4d69718bf81c6127a49dc64e44a742e8bb9213c0ff8869a22c308f84c1d4ab06" dependencies = [ "bitflags 1.3.2", "errno 0.3.1", @@ -2535,9 +3019,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.2" +version = "0.38.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aabcb0461ebd01d6b79945797c27f8529082226cb630a9865a71870ff63532a4" +checksum = "ac5ffa1efe7548069688cd7028f32591853cd7b5b756d41bcffd2353e4fc75b4" dependencies = [ "bitflags 2.3.3", "errno 0.3.1", @@ -2546,6 +3030,12 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "rustversion" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc31bd9b61a32c31f9650d18add92aa83a49ba979c143eefd27fe7177b05bd5f" + [[package]] name = "rusty-fork" version = "0.3.0" @@ -2573,6 +3063,15 @@ dependencies = [ "bytemuck", ] +[[package]] +name = "salsa20" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97a22f5af31f73a954c10289c93e8a50cc23d971e80ee446f1f6f7137a088213" +dependencies = [ + "cipher", +] + [[package]] name = "same-file" version = "1.0.6" @@ -2588,7 +3087,7 @@ version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "35c0a159d0c45c12b20c5a844feb1fe4bea86e28f17b92a5f0c42193634d3782" dependencies = [ - "bitvec", + "bitvec 1.0.1", "cfg-if", "derive_more", "parity-scale-codec", @@ -2632,6 +3131,18 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" +[[package]] +name = "scrypt" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f9e24d2b632954ded8ab2ef9fea0a0c769ea56ea98bddbafbad22caeeadf45d" +dependencies = [ + "hmac 0.12.1", + "pbkdf2 0.11.0", + "salsa20", + "sha2 0.10.7", +] + [[package]] name = "sec1" version = "0.3.0" @@ -2681,22 +3192,22 @@ checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed" [[package]] name = "serde" -version = "1.0.166" +version = "1.0.171" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d01b7404f9d441d3ad40e6a636a7782c377d2abdbe4fa2440e2edcc2f4f10db8" +checksum = "30e27d1e4fd7659406c492fd6cfaf2066ba8773de45ca75e855590f856dc34a9" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.166" +version = "1.0.171" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dd83d6dde2b6b2d466e14d9d1acce8816dedee94f735eac6395808b3483c6d6" +checksum = "389894603bd18c46fa56231694f8d827779c0951a667087194cf9de94ed24682" dependencies = [ "proc-macro2", "quote", - "syn 2.0.23", + "syn 2.0.25", ] [[package]] @@ -2799,9 +3310,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" +checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9" [[package]] name = "sp-api" @@ -2883,7 +3394,7 @@ version = "7.0.0" source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.37#6fa7fe1326ecaab9921c2c3888530ad679cfbb87" dependencies = [ "array-bytes", - "base58", + "base58 0.2.0", "bitflags 1.3.2", "blake2", "dyn-clonable", @@ -3278,6 +3789,28 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" +[[package]] +name = "strum" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.24.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "rustversion", + "syn 1.0.109", +] + [[package]] name = "substrate-bip39" version = "0.4.4" @@ -3310,9 +3843,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.23" +version = "2.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59fb7d6d8281a51045d62b8eb3a7d1ce347b76f312af50cd3dc0af39c87c1737" +checksum = "15e3fc8c0c74267e2df136e5e5fb656a464158aa57624053375eb9c8c6e25ae2" dependencies = [ "proc-macro2", "quote", @@ -3341,7 +3874,7 @@ dependencies = [ "cfg-if", "fastrand", "redox_syscall", - "rustix 0.37.22", + "rustix 0.37.23", "windows-sys 0.48.0", ] @@ -3360,22 +3893,22 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.41" +version = "1.0.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c16a64ba9387ef3fdae4f9c1a7f07a0997fce91985c0336f1ddc1822b3b37802" +checksum = "a35fc5b8971143ca348fa6df4f024d4d55264f3468c71ad1c2f365b0a4d58c42" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.41" +version = "1.0.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d14928354b01c4d6a4f0e549069adef399a284e7995c7ccca94e8a07a5346c59" +checksum = "463fe12d7993d3b327787537ce8dd4dfa058de32fc2b195ef3cde03dc4771e8f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.23", + "syn 2.0.25", ] [[package]] @@ -3436,6 +3969,15 @@ dependencies = [ "zeroize", ] +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", +] + [[package]] name = "tinytemplate" version = "1.2.1" @@ -3469,9 +4011,9 @@ checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" [[package]] name = "toml_edit" -version = "0.19.11" +version = "0.19.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "266f016b7f039eec8a1a80dfe6156b633d208b9fccca5e4db1d6775b0c4e34a7" +checksum = "c500344a19072298cd05a7224b3c0c629348b78692bf48466c5238656e315a78" dependencies = [ "indexmap 2.0.0", "toml_datetime", @@ -3498,7 +4040,7 @@ checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" dependencies = [ "proc-macro2", "quote", - "syn 2.0.23", + "syn 2.0.25", ] [[package]] @@ -3633,12 +4175,28 @@ dependencies = [ "tinyvec", ] +[[package]] +name = "unicode-segmentation" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" + [[package]] name = "unicode-xid" version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" +[[package]] +name = "uuid" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" +dependencies = [ + "getrandom 0.2.10", + "serde", +] + [[package]] name = "valuable" version = "0.1.0" @@ -3703,7 +4261,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.23", + "syn 2.0.25", "wasm-bindgen-shared", ] @@ -3725,7 +4283,7 @@ checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.23", + "syn 2.0.25", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -3846,7 +4404,7 @@ dependencies = [ "log", "object 0.29.0", "rustc-demangle", - "rustix 0.35.13", + "rustix 0.35.14", "serde", "target-lexicon", "thiserror", @@ -3880,7 +4438,7 @@ dependencies = [ "memoffset 0.6.5", "paste", "rand 0.8.5", - "rustix 0.35.13", + "rustix 0.35.14", "thiserror", "wasmtime-asm-macros", "wasmtime-environ", @@ -4128,9 +4686,9 @@ checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" [[package]] name = "winnow" -version = "0.4.7" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca0ace3845f0d96209f0375e6d367e3eb87eb65d27d445bdc9f1843a26f39448" +checksum = "81a2094c43cc94775293eaa0e499fbc30048a6d824ac82c0351a8c0bf9112529" dependencies = [ "memchr", ] @@ -4161,5 +4719,5 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.23", + "syn 2.0.25", ] diff --git a/kate/benches/reconstruct.rs b/kate/benches/reconstruct.rs index 4460647f..92c14dd1 100644 --- a/kate/benches/reconstruct.rs +++ b/kate/benches/reconstruct.rs @@ -1,6 +1,6 @@ +use avail_core::{AppExtrinsic, BlockLengthColumns, BlockLengthRows, DataLookup}; +use core::num::NonZeroU32; use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Throughput}; -use da_primitives::{BlockLengthColumns, BlockLengthRows}; -use da_types::{AppExtrinsic, AppId}; use dusk_plonk::prelude::BlsScalar; use kate::{ com::{Cell, *}, @@ -11,7 +11,6 @@ use kate_recovery::{ com::reconstruct_extrinsics, commitments, data::{self, DataCell}, - index::*, matrix::Position, proof, testnet, }; @@ -69,32 +68,6 @@ fn sample_cells_from_matrix(matrix: &DMatrix, columns: Option<&[u16]> .collect() } -fn app_data_index_try_from_layout( - layout: Vec<(AppId, u32)>, -) -> Result { - let mut index = Vec::new(); - // transactions are ordered by application id - // skip transactions with 0 application id - it's not a data txs - let mut size = 0u32; - let mut prev_app_id = AppId(0u32); - - for (app_id, data_len) in layout { - if app_id.0 != 0 && prev_app_id != app_id { - index.push((app_id.0, size)); - } - - size = size - .checked_add(data_len) - .ok_or(AppDataIndexError::SizeOverflow)?; - if prev_app_id > app_id { - return Err(AppDataIndexError::UnsortedLayout); - } - prev_app_id = app_id; - } - - Ok(AppDataIndex { size, index }) -} - fn random_cells( max_cols: BlockLengthColumns, max_rows: BlockLengthRows, @@ -140,7 +113,7 @@ fn reconstruct(xts: &[AppExtrinsic]) { let (layout, commitments, dims, matrix) = par_build_commitments( BlockLengthRows(64), BlockLengthColumns(16), - 32, + unsafe { NonZeroU32::new_unchecked(32) }, xts, Seed::default(), &metrics, @@ -149,16 +122,17 @@ fn reconstruct(xts: &[AppExtrinsic]) { let columns = sample_cells_from_matrix(&matrix, None); let extended_dims = dims.try_into().unwrap(); - let index = app_data_index_try_from_layout(layout).unwrap(); - let reconstructed = reconstruct_extrinsics(&index, extended_dims, columns).unwrap(); - for (result, xt) in reconstructed.iter().zip(xts) { - assert_eq!(result.0, *xt.app_id); - assert_eq!(result.1[0].as_slice(), &xt.data); + let lookup = DataLookup::from_id_and_len_iter(layout.into_iter()).unwrap(); + let reconstructed = reconstruct_extrinsics(&lookup, extended_dims, columns).unwrap(); + for ((app_id, data), xt) in reconstructed.iter().zip(xts) { + assert_eq!(app_id.0, *xt.app_id); + assert_eq!(data[0].as_slice(), &xt.data); } - let public_params = testnet::public_params(dims.cols.as_usize()); + let dims_cols: u32 = dims.cols.into(); + let public_params = testnet::public_params(usize::try_from(dims_cols).unwrap()); for cell in random_cells(dims.cols, dims.rows, Percent::one()) { - let row = cell.row.as_usize(); + let row: u32 = cell.row.into(); let proof = build_proof(&public_params, dims, &matrix, &[cell], &metrics).unwrap(); assert_eq!(proof.len(), 80); @@ -168,17 +142,14 @@ fn reconstruct(xts: &[AppExtrinsic]) { .0 .try_into() .expect("`random_cells` function generates a valid `u16` for columns"); - let position = Position { - row: cell.row.0, - col, - }; + let position = Position { row, col }; let cell = data::Cell { position, content: proof.try_into().unwrap(), }; let extended_dims = dims.try_into().unwrap(); - let commitment = commitments::from_slice(&commitments).unwrap()[row]; + let commitment = commitments::from_slice(&commitments).unwrap()[row as usize]; let verification = proof::verify(&public_params, extended_dims, &commitment, &cell); assert!(verification.is_ok()); assert!(verification.unwrap()); diff --git a/nomad/base/Cargo.toml b/nomad/base/Cargo.toml index 1ca91515..ded2ac0a 100644 --- a/nomad/base/Cargo.toml +++ b/nomad/base/Cargo.toml @@ -11,12 +11,10 @@ nomad-signature = { path = "../signature", default-features = false } # Substrate codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] } -frame-support = { version = "4.0.0-dev", default-features = false } -primitive-types = { version = "0.12", default-features = false, features = ["scale-info", "codec"] } -scale-info = { version = "2.1.1", default-features = false, features = ["derive"] } -sp-core = { version = "7.0.0", default-features = false } -sp-io = { version = "7.0.0", default-features = false } -sp-std = { version = "4.0.0", default-features = false } +scale-info = { version = "2", default-features = false, features = ["derive"] } +sp-core = { version = "*", default-features = false } + +sp-runtime = { version = "7", default-features = false, optional = true } # Eth ethers-signers = { version = "1", optional = true } @@ -31,12 +29,10 @@ std = [ "serde", "ethers-signers", "once_cell", - "primitive-types/serde", "codec/std", "nomad-signature/std", "scale-info/std", - "frame-support/std", "nomad-core/std", - "sp-std/std", "sp-core/std", + "sp-runtime/std", ] diff --git a/nomad/base/src/lib.rs b/nomad/base/src/lib.rs index c5878520..0da94e9a 100644 --- a/nomad/base/src/lib.rs +++ b/nomad/base/src/lib.rs @@ -1,14 +1,16 @@ #![cfg_attr(not(feature = "std"), no_std)] -use frame_support::pallet_prelude::*; +use codec::{Decode, Encode, MaxEncodedLen}; use nomad_core::{home_domain_hash, to_eth_signed_message_hash, NomadState, SignedUpdate, Update}; use nomad_signature::SignatureError; -#[cfg(feature = "std")] -use serde::{Deserialize, Serialize}; +use scale_info::TypeInfo; use sp_core::{H160, H256}; +use sp_runtime::RuntimeDebug; #[cfg(feature = "std")] pub mod testing; +#[cfg(feature = "std")] +use serde::{Deserialize, Serialize}; #[derive(Clone, Copy, Encode, Decode, PartialEq, Eq, RuntimeDebug, TypeInfo, MaxEncodedLen)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] diff --git a/nomad/core/Cargo.toml b/nomad/core/Cargo.toml index 2e719a28..9cbe7496 100644 --- a/nomad/core/Cargo.toml +++ b/nomad/core/Cargo.toml @@ -10,13 +10,12 @@ nomad-signature = { path = "../signature", default-features = false } # Substrate codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] } -frame-support = { version = "4.0.0-dev", default-features = false } primitive-types = { version = "0.12", default-features = false, features = ["scale-info", "codec"] } -scale-info = { version = "2.1.1", default-features = false, features = ["derive"] } -sp-core = { version = "7.0.0", default-features = false } -sp-io = { version = "7.0.0", default-features = false } -sp-runtime = { version = "7.0.0", default-features = false } -sp-std = { version = "4.0.0", default-features = false } +scale-info = { version = "2", default-features = false, features = ["derive"] } +sp-core = { version = "*", default-features = false } +sp-std = { version = "*", default-features = false } + +sp-runtime = { version = "7", default-features = false, optional = true } # Eth ethers-core = { version = "1", optional = true } @@ -39,8 +38,8 @@ std = [ "nomad-signature/std", "codec/std", "scale-info/std", - "frame-support/std", "sp-runtime/std", + "sp-std/std", ] runtime-benchmarks = [] diff --git a/nomad/core/src/nomad_message.rs b/nomad/core/src/nomad_message.rs index 23572259..1902a3e8 100644 --- a/nomad/core/src/nomad_message.rs +++ b/nomad/core/src/nomad_message.rs @@ -1,5 +1,7 @@ -use frame_support::{pallet_prelude::*, traits::Get}; -use sp_core::H256; +use codec::{Decode, Encode}; +use scale_info::TypeInfo; +use sp_core::{bounded::BoundedVec, Get, H256}; +use sp_runtime::RuntimeDebug; use sp_std::{mem::size_of, vec::Vec}; /// Size of `NomadMessage` fields except `body`. @@ -53,16 +55,11 @@ impl> NomadMessage { #[cfg(test)] mod tests { - use core::convert::TryInto; - - use frame_support::{parameter_types, BoundedVec}; + use super::*; + use sp_core::ConstU32; use sp_std::mem::size_of_val; - use super::{NomadMessage, NON_BODY_LENGTH}; - - parameter_types! { - const MaxBodyLen :u32 = 1024; - } + type MaxBodyLen = ConstU32<1024>; /// Double checks that constant `NON_BODY_LENGTH` will be synchronized with actual #[test] diff --git a/nomad/core/src/state.rs b/nomad/core/src/state.rs index 7691b781..b5060ed0 100644 --- a/nomad/core/src/state.rs +++ b/nomad/core/src/state.rs @@ -1,4 +1,7 @@ -use frame_support::pallet_prelude::*; +use codec::{Decode, Encode, MaxEncodedLen}; +use scale_info::TypeInfo; +use sp_runtime::RuntimeDebug; + #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; diff --git a/nomad/core/src/update.rs b/nomad/core/src/update.rs index befe19d0..18b984df 100644 --- a/nomad/core/src/update.rs +++ b/nomad/core/src/update.rs @@ -1,8 +1,10 @@ -use frame_support::pallet_prelude::*; +use codec::{Decode, Encode}; use nomad_signature::{hash_message, Signature, SignatureError}; +use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; use sp_core::{H160, H256}; +use sp_runtime::RuntimeDebug; use crate::utils::home_domain_hash; diff --git a/nomad/core/src/update_v2.rs b/nomad/core/src/update_v2.rs index 6c6d277f..f4dd6e1f 100644 --- a/nomad/core/src/update_v2.rs +++ b/nomad/core/src/update_v2.rs @@ -1,10 +1,12 @@ #![allow(dead_code)] -use frame_support::pallet_prelude::*; +use codec::{Decode, Encode}; use nomad_signature::{hash_message, Signature, SignatureError}; +use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; use sp_core::{H160, H256}; +use sp_runtime::RuntimeDebug; use crate::utils::home_domain_hash; diff --git a/nomad/merkle/Cargo.toml b/nomad/merkle/Cargo.toml index 62ec4113..29eb9d80 100644 --- a/nomad/merkle/Cargo.toml +++ b/nomad/merkle/Cargo.toml @@ -11,13 +11,10 @@ nomad-core = { path = "../core", default-features = false } # Substrate codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] } -frame-support = { version = "4.0.0-dev", default-features = false } -primitive-types = { version = "0.12", default-features = false, features = ["scale-info", "codec"] } -scale-info = { version = "2.1.1", default-features = false, features = ["derive"] } -sp-core = { version = "7.0.0", default-features = false } -sp-io = { version = "7.0.0", default-features = false } -sp-runtime = { version = "7.0.0", default-features = false } -sp-std = { version = "4.0.0-dev", default-features = false } +scale-info = { version = "2", default-features = false, features = ["derive"] } +sp-core = { version = "*", default-features = false } + +frame-support = { version = "4.0.0-dev", default-features = false, optional = true } # 3rd-party hex-literal = "0.3.4" @@ -41,8 +38,8 @@ default = ["std"] std = [ "serde", "nomad-core/std", - "primitive-types/serde", "codec/std", "scale-info/std", + "sp-core/std", "frame-support/std", ] diff --git a/nomad/signature/Cargo.toml b/nomad/signature/Cargo.toml index 4bc26073..e58d78ef 100644 --- a/nomad/signature/Cargo.toml +++ b/nomad/signature/Cargo.toml @@ -18,14 +18,11 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] # Substrate & Parity codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] } +scale-info = { version = "2", default-features = false, features = ["derive"] } +sp-core = { version = "*", default-features = false } + frame-support = { version = "4.0.0-dev", default-features = false } -primitive-types = { version = "0.12", default-features = false, features = ["scale-info", "codec"] } -rlp = { version = "0.5.0", default-features = false } -rlp-derive = { version = "0.1.0", default-features = false } -scale-info = { version = "2.1.1", default-features = false, features = ["derive"] } -sp-core = { version = "7.0.0", default-features = false } -sp-io = { version = "7.0.0", default-features = false } -sp-std = { version = "4.0.0", default-features = false } +sp-runtime = { version = "7", default-features = false, optional = true } # Eth ethers-core = { version = "1", default-features = false, optional = true } @@ -47,9 +44,10 @@ default = ["std"] std = [ "serde", "hex/std", - "primitive-types/serde", - "codec/std", "scale-info/std", + "codec/std", + "sp-core/std", + "sp-runtime/std", "frame-support/std", "ethers-core", ] diff --git a/nomad/signature/src/signature.rs b/nomad/signature/src/signature.rs index 6f924fe1..6c87b074 100644 --- a/nomad/signature/src/signature.rs +++ b/nomad/signature/src/signature.rs @@ -1,12 +1,11 @@ // Code adapted from: https://github.com/gakonst/ethers-rs/blob/master/ethers-core/src/types/signature.rs +use crate::utils::hash_message; use alloc::{borrow::ToOwned, string::String, vec::Vec}; +use codec::{Decode, Encode}; use core::convert::TryFrom; -#[cfg(feature = "std")] -use core::{fmt, str::FromStr}; - use elliptic_curve::{consts::U32, sec1::ToEncodedPoint as _}; -use frame_support::{pallet_prelude::*, sp_runtime::traits::Keccak256}; +use frame_support::ensure; use generic_array::GenericArray; use k256::{ ecdsa::{ @@ -15,12 +14,15 @@ use k256::{ }, PublicKey as K256PublicKey, }; -#[cfg(feature = "std")] -use serde::{Deserialize, Serialize}; -use sp_core::{Hasher, H160, H256, U256}; +use scale_info::TypeInfo; +use sp_core::{Hasher as _, H160, H256, U256}; +use sp_runtime::{traits::Keccak256, RuntimeDebug}; use thiserror_no_std::Error; -use crate::utils::hash_message; +#[cfg(feature = "std")] +use core::{fmt, str::FromStr}; +#[cfg(feature = "std")] +use serde::{Deserialize, Serialize}; type Address = H160; From 3cfa635e3a35e236b5ca32c4f6e2eb1917459730 Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Mon, 10 Jul 2023 11:16:19 +0200 Subject: [PATCH 74/87] Enable CI Benchmarks --- .github/workflows/default.yml | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/.github/workflows/default.yml b/.github/workflows/default.yml index fa3aae9f..0826f883 100644 --- a/.github/workflows/default.yml +++ b/.github/workflows/default.yml @@ -72,3 +72,26 @@ jobs: - name: Cleanup run: find . -name \*.profraw -type f -exec rm -f {} + + + # Run Bencmarks + - name: Run benchmark + run: cargo bench --features "avail-core/runtime" | tee output.txt + - name: Download previous benchmark data + uses: actions/cache@v1 + with: + path: ./cache + key: ${{ runner.os }}-benchmark + - name: Store benchmark result + uses: benchmark-action/github-action-benchmark@v1 + with: + tool: 'cargo' + output-file-path: output.txt + # GitHub API token to make a commit comment + github-token: ${{ secrets.GITHUB_TOKEN }} + # Enable alert commit comment + comment-on-alert: true + fail-on-alert: true + summary-always: true + # Mention users in the commit comment + # alert-comment-cc-users: '@fmiguelgarcia,@delroybosco,@prabal-banerjee,@kroos47,@jakubcech' + From 5b91d324a7aaec64d5b1645dfb45e87e5100883d Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Mon, 10 Jul 2023 11:18:43 +0200 Subject: [PATCH 75/87] Add temporary Will's branch --- .github/workflows/default.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/default.yml b/.github/workflows/default.yml index 0826f883..a66f2b3b 100644 --- a/.github/workflows/default.yml +++ b/.github/workflows/default.yml @@ -4,10 +4,12 @@ on: branches: - main - develop + - will/grid-refactor pull_request: branches: - main - develop + - will/grid-refactor jobs: build_and_test: From d6785177fafcd0637be5c8a0228ef7d700acdcba Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Mon, 10 Jul 2023 11:53:10 +0200 Subject: [PATCH 76/87] Fix check fmt on CI --- .github/workflows/default.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/default.yml b/.github/workflows/default.yml index a66f2b3b..b4df3782 100644 --- a/.github/workflows/default.yml +++ b/.github/workflows/default.yml @@ -45,7 +45,7 @@ jobs: uses: actions-rs/cargo@v1 with: command: fmt - args: --check --features "avail-core/runtime" + args: --check - name: Check Clippy uses: actions-rs/cargo@v1 From 4e23332fde79b89a55f27e3452faa7d9088a5934 Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Mon, 10 Jul 2023 12:02:44 +0200 Subject: [PATCH 77/87] Fix clippy on CI --- .github/workflows/default.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/default.yml b/.github/workflows/default.yml index b4df3782..dc6790d3 100644 --- a/.github/workflows/default.yml +++ b/.github/workflows/default.yml @@ -51,7 +51,7 @@ jobs: uses: actions-rs/cargo@v1 with: command: clippy - args: --check --features "avail-core/runtime" + args: --check - name: Run tests uses: actions-rs/cargo@v1 From 486d9ce5e74305d89907a86fcc41e24c1bd457df Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Mon, 10 Jul 2023 12:07:38 +0200 Subject: [PATCH 78/87] Fix check fmt on CI --- .github/workflows/default.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/default.yml b/.github/workflows/default.yml index dc6790d3..9409d332 100644 --- a/.github/workflows/default.yml +++ b/.github/workflows/default.yml @@ -51,7 +51,7 @@ jobs: uses: actions-rs/cargo@v1 with: command: clippy - args: --check + args: --workspace --features "avail-core/runtime" - name: Run tests uses: actions-rs/cargo@v1 From 4193fda0ee380b6cb7f50f9ec4efa48c55103c6f Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Mon, 10 Jul 2023 14:43:32 +0200 Subject: [PATCH 79/87] Remove `Criterion` on CI --- .github/workflows/default.yml | 23 ----------------------- 1 file changed, 23 deletions(-) diff --git a/.github/workflows/default.yml b/.github/workflows/default.yml index 9409d332..3417eec6 100644 --- a/.github/workflows/default.yml +++ b/.github/workflows/default.yml @@ -74,26 +74,3 @@ jobs: - name: Cleanup run: find . -name \*.profraw -type f -exec rm -f {} + - - # Run Bencmarks - - name: Run benchmark - run: cargo bench --features "avail-core/runtime" | tee output.txt - - name: Download previous benchmark data - uses: actions/cache@v1 - with: - path: ./cache - key: ${{ runner.os }}-benchmark - - name: Store benchmark result - uses: benchmark-action/github-action-benchmark@v1 - with: - tool: 'cargo' - output-file-path: output.txt - # GitHub API token to make a commit comment - github-token: ${{ secrets.GITHUB_TOKEN }} - # Enable alert commit comment - comment-on-alert: true - fail-on-alert: true - summary-always: true - # Mention users in the commit comment - # alert-comment-cc-users: '@fmiguelgarcia,@delroybosco,@prabal-banerjee,@kroos47,@jakubcech' - From 30f57f742f77b83392726e96ae89bddc5e156af3 Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Mon, 10 Jul 2023 16:30:08 +0200 Subject: [PATCH 80/87] Bump up versions --- Cargo.lock | 12 ++++++------ kate/Cargo.toml | 2 +- kate/recovery/Cargo.toml | 2 +- nomad/base/Cargo.toml | 2 +- nomad/core/Cargo.toml | 2 +- nomad/merkle/Cargo.toml | 2 +- nomad/signature/Cargo.toml | 2 +- 7 files changed, 12 insertions(+), 12 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 77767f84..ccc6637a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1960,7 +1960,7 @@ dependencies = [ [[package]] name = "kate" -version = "0.7.1" +version = "0.8.0" dependencies = [ "avail-core", "criterion", @@ -1990,7 +1990,7 @@ dependencies = [ [[package]] name = "kate-recovery" -version = "0.8.1" +version = "0.9.0" dependencies = [ "avail-core", "derive_more", @@ -2251,7 +2251,7 @@ checksum = "2bf50223579dc7cdcfb3bfcacf7069ff68243f8c363f62ffa99cf000a6b9c451" [[package]] name = "nomad-base" -version = "0.1.3" +version = "0.1.4" dependencies = [ "ethers-signers", "nomad-core", @@ -2266,7 +2266,7 @@ dependencies = [ [[package]] name = "nomad-core" -version = "0.1.3" +version = "0.1.4" dependencies = [ "async-trait", "ethers-core", @@ -2284,7 +2284,7 @@ dependencies = [ [[package]] name = "nomad-merkle" -version = "0.1.1" +version = "0.1.2" dependencies = [ "ethers-core", "frame-support", @@ -2302,7 +2302,7 @@ dependencies = [ [[package]] name = "nomad-signature" -version = "0.1.1" +version = "0.1.2" dependencies = [ "byte-slice-cast", "elliptic-curve", diff --git a/kate/Cargo.toml b/kate/Cargo.toml index 998b1f92..1e255123 100644 --- a/kate/Cargo.toml +++ b/kate/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "kate" -version = "0.7.1" +version = "0.8.0" authors = ["Denis Ermolin "] edition = "2021" license = "Apache-2.0" diff --git a/kate/recovery/Cargo.toml b/kate/recovery/Cargo.toml index f25f875c..9c84a83e 100644 --- a/kate/recovery/Cargo.toml +++ b/kate/recovery/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "kate-recovery" -version = "0.8.1" +version = "0.9.0" authors = ["Denis Ermolin "] edition = "2018" license = "Apache-2.0" diff --git a/nomad/base/Cargo.toml b/nomad/base/Cargo.toml index ded2ac0a..65907b40 100644 --- a/nomad/base/Cargo.toml +++ b/nomad/base/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "nomad-base" -version = "0.1.3" +version = "0.1.4" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/nomad/core/Cargo.toml b/nomad/core/Cargo.toml index 9cbe7496..b1aa67c5 100644 --- a/nomad/core/Cargo.toml +++ b/nomad/core/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "nomad-core" -version = "0.1.3" +version = "0.1.4" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/nomad/merkle/Cargo.toml b/nomad/merkle/Cargo.toml index 29eb9d80..fc49e9b6 100644 --- a/nomad/merkle/Cargo.toml +++ b/nomad/merkle/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "nomad-merkle" -version = "0.1.1" +version = "0.1.2" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/nomad/signature/Cargo.toml b/nomad/signature/Cargo.toml index e58d78ef..25d6d5e1 100644 --- a/nomad/signature/Cargo.toml +++ b/nomad/signature/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "nomad-signature" -version = "0.1.1" +version = "0.1.2" authors = ["Luke Tchang "] edition = "2021" license = "MIT OR Apache-2.0" From 886c6f239810a38164d99fc6ec69633ccc9de1eb Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Tue, 11 Jul 2023 00:56:55 +0200 Subject: [PATCH 81/87] Use `collect_into_vec` --- kate/src/com.rs | 30 ++++++++++++++++++------------ kate/src/gridgen/mod.rs | 11 ----------- 2 files changed, 18 insertions(+), 23 deletions(-) diff --git a/kate/src/com.rs b/kate/src/com.rs index ca02e3a2..7c0be71c 100644 --- a/kate/src/com.rs +++ b/kate/src/com.rs @@ -370,11 +370,6 @@ pub fn build_proof( let total_start = Instant::now(); // attempt to parallelly compute proof for all requested cells - // #[cfg(feature = "parallel")] - // let cell_iter = cells - // .into_par_iter() - // .zip(result_bytes.par_chunks_exact_mut(SPROOF_SIZE)); - // #[cfg(not(feature = "parallel"))] let cell_iter = cells.iter().zip(result_bytes.chunks_exact_mut(SPROOF_SIZE)); for (cell, res) in cell_iter { @@ -383,15 +378,23 @@ pub fn build_proof( res.fill(0); // for bad cell identifier, fill whole proof with zero bytes ! } else { let c_index = usize::try_from(cell.col.0)?; + let get_ext_data_matrix = + |j: usize| ext_data_matrix[r_index.saturating_add(j.saturating_mul(ext_rows))]; // construct polynomial per extended matrix row #[cfg(feature = "parallel")] - let ext_cols_iter = (0..ext_cols).into_par_iter(); + let row = { + let mut row = + Vec::with_capacity(ext_cols.checked_add(1).ok_or(Error::BlockTooBig)?); + (0..ext_cols) + .into_par_iter() + .map(get_ext_data_matrix) + .collect_into_vec(&mut row); + row + }; #[cfg(not(feature = "parallel"))] - let ext_cols_iter = 0..ext_cols; - - let row = ext_cols_iter - .map(|j| ext_data_matrix[r_index.saturating_add(j.saturating_mul(ext_rows))]) + let row = (0..ext_cols) + .map(get_ext_data_matrix) .collect::>(); // row has to be a power of 2, otherwise interpolate() function panics TODO: cache evaluations @@ -462,13 +465,16 @@ pub fn par_build_commitments( let row_eval_domain = EvaluationDomain::new(block_dims_cols)?; let start = Instant::now(); - let commitments = (0..extended_rows) + let mut commitments = + Vec::with_capacity(extended_rows.checked_add(1).ok_or(Error::BlockTooBig)?); + (0..extended_rows) .into_par_iter() .map(|row_idx| { let ext_row = get_row(&ext_matrix, row_idx); commit(&prover_key, row_eval_domain, ext_row) }) - .collect::>(); + .collect_into_vec(&mut commitments); + let commitments = commitments.into_iter().collect::, _>>()?; let commitments_bytes = commitments .into_par_iter() diff --git a/kate/src/gridgen/mod.rs b/kate/src/gridgen/mod.rs index 89794448..f073b6d0 100644 --- a/kate/src/gridgen/mod.rs +++ b/kate/src/gridgen/mod.rs @@ -44,17 +44,6 @@ macro_rules! cfg_iter { }}; } -/* -macro_rules! cfg_into_iter { - ($e: expr) => {{ - #[cfg(feature = "parallel")] - let result = $e.into_par_iter(); - #[cfg(not(feature = "parallel"))] - let result = $e.into_iter(); - result - }}; -}*/ - pub const SCALAR_SIZE: usize = 32; pub type ArkScalar = crate::pmp::m1_blst::Fr; pub type Commitment = crate::pmp::Commitment; From 2f8699698be8f5a7fa39e994385259ab5b124ab0 Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Tue, 11 Jul 2023 11:21:18 +0200 Subject: [PATCH 82/87] Use `Dimension::weight/height()` --- kate/recovery/src/com.rs | 4 ++-- kate/recovery/src/matrix.rs | 6 +++--- kate/recovery/src/proof.rs | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/kate/recovery/src/com.rs b/kate/recovery/src/com.rs index e7ca656d..a77f4972 100644 --- a/kate/recovery/src/com.rs +++ b/kate/recovery/src/com.rs @@ -218,7 +218,7 @@ pub fn reconstruct_columns( .iter() .map(|(&col, cells)| { ensure!( - cells.len() >= dimensions.rows().get().into(), + cells.len() >= dimensions.height(), ReconstructionError::InvalidColumn(col) ); @@ -240,7 +240,7 @@ fn reconstruct_available( cells: Vec, ) -> Result, ReconstructionError> { let columns = map_cells(dimensions, cells)?; - let rows: usize = dimensions.rows().get().into(); + let rows: usize = dimensions.height(); let scalars = (0..dimensions.cols().get()) .map(|col| match columns.get(&col) { diff --git a/kate/recovery/src/matrix.rs b/kate/recovery/src/matrix.rs index f87d70cc..a3ab97cb 100644 --- a/kate/recovery/src/matrix.rs +++ b/kate/recovery/src/matrix.rs @@ -208,7 +208,7 @@ impl Dimensions { /// Row size in bytes pub fn row_byte_size(&self) -> usize { - CHUNK_SIZE * usize::from(self.cols.get()) + CHUNK_SIZE * self.width() } /// Extended matrix rows count. @@ -301,8 +301,8 @@ impl Dimensions { /// Creates iterator over data cells in data matrix (used to retrieve data from the matrix). pub fn iter_data(&self) -> impl Iterator { - let rows = self.rows.get().into(); - let cols = self.cols.get().into(); + let rows = self.height(); + let cols = self.width(); (0..rows).flat_map(move |row| (0..cols).map(move |col| (row, col))) } diff --git a/kate/recovery/src/proof.rs b/kate/recovery/src/proof.rs index b0c97657..17acf619 100644 --- a/kate/recovery/src/proof.rs +++ b/kate/recovery/src/proof.rs @@ -49,7 +49,7 @@ pub fn verify( commitment_to_polynomial, }; - let cols: usize = dimensions.cols().get().into(); + let cols: usize = dimensions.width(); let point = EvaluationDomain::new(cols) .map_err(|_| Error::InvalidDomain)? .elements() From 544cc3e9e0052a831034f4bd4010c158d55faae5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Francisco=20Miguel=20Garc=C3=ADa?= Date: Fri, 21 Jul 2023 01:49:37 +0200 Subject: [PATCH 83/87] NAlgebra replaces custom grid (#33) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Simplify `Debug` and `RuntimeDebug` * Clean some deps * Remove asserts from production code * CodeReview: Keep warning for future * Replace `Grid` by `NAlgebra` * Full replacement All UT pass * Fix example * Use keccak256. Signed-off-by: Sasa Prsic * Add benchmark on reconstruction * Add benchmark * Add benchmark * Reduce memory copies * Some improvements * Refactor `avail-core` add `runtime` feature * MerkleProof under 'runtime' feature * DataLookup simplification * Add UT for `DataLookup`'s serde * Remove `AppIndex` & enforce `AppId` * Add `CompactDataLookup` * Moving `nomad` code * Improve `no-std` segragation * Dedup `Index` related stuff * Clean some deps in `nomad` * Enable CI Benchmarks * Add temporary Will's branch * Fix check fmt on CI * Fix clippy on CI * Fix check fmt on CI * Remove `Criterion` on CI * Bump up versions * Use `collect_into_vec` * Use `Dimension::weight/height()` --------- Signed-off-by: Sasa Prsic Co-authored-by: Sasa Prsic Co-authored-by: Saša Pršić <93726535+0xSasaPrsic@users.noreply.github.com> --- .github/workflows/default.yml | 10 +- Cargo.lock | 1029 ++++++++--------- Cargo.toml | 41 +- core/Cargo.toml | 66 ++ core/src/app_extrinsic.rs | 92 ++ .../src/asdr.rs | 25 +- core/src/bench_randomness.rs | 20 + core/src/constants.rs | 28 + core/src/data_lookup/compact.rs | 59 + core/src/data_lookup/mod.rs | 234 ++++ {primitives/avail => core}/src/data_proof.rs | 65 +- .../src/header/extension/mod.rs | 16 +- .../avail => core}/src/header/extension/v1.rs | 11 +- .../avail => core}/src/header/extension/v2.rs | 11 +- .../src/header/extension/v_test.rs | 9 - {primitives/avail => core}/src/header/mod.rs | 33 +- .../avail => core}/src/kate_commitment.rs | 27 +- core/src/keccak256.rs | 49 + core/src/lib.rs | 155 +++ .../avail => core}/src/opaque_extrinsic.rs | 4 +- core/src/sha2.rs | 48 + core/src/traits.rs | 31 + core/src/traits/extended_header.rs | 22 + .../src => core/src/traits}/get_app_id.rs | 4 +- deny.toml | 290 +++++ kate/Cargo.toml | 74 +- kate/benches/kzg.rs | 210 ---- kate/benches/reconstruct.data.json | 135 +++ kate/benches/reconstruct.rs | 160 +++ kate/examples/multiproof_verification.rs | 58 +- kate/grid/Cargo.toml | 11 - kate/grid/src/dims.rs | 110 -- kate/grid/src/grid.rs | 285 ----- kate/grid/src/lib.rs | 12 - kate/recovery/Cargo.toml | 44 +- kate/recovery/src/com.rs | 421 +++---- kate/recovery/src/commitments.rs | 151 ++- kate/recovery/src/data.rs | 21 +- kate/recovery/src/index.rs | 118 -- kate/recovery/src/lib.rs | 6 +- kate/recovery/src/matrix.rs | 223 +++- kate/recovery/src/proof.rs | 36 +- kate/recovery/src/sparse_slice_read.rs | 51 + kate/recovery/src/testnet.rs | 4 +- kate/src/com.rs | 963 +++++++-------- kate/src/gridgen/mod.rs | 443 +++---- kate/src/gridgen/tests/commitments.rs | 79 +- kate/src/gridgen/tests/formatting.rs | 122 +- kate/src/gridgen/tests/mod.rs | 45 +- kate/src/gridgen/tests/reconstruction.rs | 105 +- kate/src/lib.rs | 75 +- kate/src/metrics.rs | 4 +- .../nomad-base => nomad/base}/Cargo.toml | 18 +- .../nomad-base => nomad/base}/src/lib.rs | 8 +- .../nomad-base => nomad/base}/src/testing.rs | 0 .../nomad-core => nomad/core}/Cargo.toml | 15 +- .../nomad-core => nomad/core}/src/lib.rs | 0 .../core}/src/nomad_message.rs | 17 +- .../nomad-core => nomad/core}/src/state.rs | 5 +- .../core}/src/test_utils.rs | 0 .../core}/src/typed_message.rs | 0 .../nomad-core => nomad/core}/src/update.rs | 4 +- .../core}/src/update_v2.rs | 4 +- .../nomad-core => nomad/core}/src/utils.rs | 0 {primitives/nomad => nomad}/merkle/Cargo.toml | 23 +- .../merkle/fixtures/merkle.json | 0 .../nomad => nomad}/merkle/src/error.rs | 0 {primitives/nomad => nomad}/merkle/src/lib.rs | 0 .../nomad => nomad}/merkle/src/light.rs | 0 .../nomad => nomad}/merkle/src/proof.rs | 0 .../nomad => nomad}/merkle/src/test_utils.rs | 0 .../nomad => nomad}/merkle/src/utils.rs | 0 .../nomad => nomad}/signature/Cargo.toml | 18 +- .../nomad => nomad}/signature/README.md | 0 .../nomad => nomad}/signature/src/lib.rs | 0 .../signature/src/signature.rs | 18 +- .../nomad => nomad}/signature/src/utils.rs | 0 primitives/avail/Cargo.toml | 57 - primitives/avail/src/asdr.rs | 36 - primitives/avail/src/lib.rs | 81 -- primitives/avail/src/sha2.rs | 41 - primitives/avail/src/traits.rs | 76 -- primitives/types/Cargo.toml | 24 - primitives/types/src/data_lookup.rs | 150 --- primitives/types/src/lib.rs | 153 --- 85 files changed, 3577 insertions(+), 3516 deletions(-) create mode 100644 core/Cargo.toml create mode 100644 core/src/app_extrinsic.rs rename primitives/avail/src/asdr/app_unchecked_extrinsic.rs => core/src/asdr.rs (97%) create mode 100644 core/src/bench_randomness.rs create mode 100644 core/src/constants.rs create mode 100644 core/src/data_lookup/compact.rs create mode 100644 core/src/data_lookup/mod.rs rename {primitives/avail => core}/src/data_proof.rs (67%) rename {primitives/avail => core}/src/header/extension/mod.rs (88%) rename {primitives/avail => core}/src/header/extension/v1.rs (67%) rename {primitives/avail => core}/src/header/extension/v2.rs (68%) rename {primitives/avail => core}/src/header/extension/v_test.rs (73%) rename {primitives/avail => core}/src/header/mod.rs (95%) rename {primitives/avail => core}/src/kate_commitment.rs (82%) create mode 100644 core/src/keccak256.rs create mode 100644 core/src/lib.rs rename {primitives/avail => core}/src/opaque_extrinsic.rs (95%) create mode 100644 core/src/sha2.rs create mode 100644 core/src/traits.rs create mode 100644 core/src/traits/extended_header.rs rename {primitives/types/src => core/src/traits}/get_app_id.rs (93%) create mode 100644 deny.toml delete mode 100644 kate/benches/kzg.rs create mode 100644 kate/benches/reconstruct.data.json create mode 100644 kate/benches/reconstruct.rs delete mode 100644 kate/grid/Cargo.toml delete mode 100644 kate/grid/src/dims.rs delete mode 100644 kate/grid/src/grid.rs delete mode 100644 kate/grid/src/lib.rs delete mode 100644 kate/recovery/src/index.rs create mode 100644 kate/recovery/src/sparse_slice_read.rs rename {primitives/nomad/nomad-base => nomad/base}/Cargo.toml (56%) rename {primitives/nomad/nomad-base => nomad/base}/src/lib.rs (96%) rename {primitives/nomad/nomad-base => nomad/base}/src/testing.rs (100%) rename {primitives/nomad/nomad-core => nomad/core}/Cargo.toml (70%) rename {primitives/nomad/nomad-core => nomad/core}/src/lib.rs (100%) rename {primitives/nomad/nomad-core => nomad/core}/src/nomad_message.rs (90%) rename {primitives/nomad/nomad-core => nomad/core}/src/state.rs (79%) rename {primitives/nomad/nomad-core => nomad/core}/src/test_utils.rs (100%) rename {primitives/nomad/nomad-core => nomad/core}/src/typed_message.rs (100%) rename {primitives/nomad/nomad-core => nomad/core}/src/update.rs (96%) rename {primitives/nomad/nomad-core => nomad/core}/src/update_v2.rs (96%) rename {primitives/nomad/nomad-core => nomad/core}/src/utils.rs (100%) rename {primitives/nomad => nomad}/merkle/Cargo.toml (56%) rename {primitives/nomad => nomad}/merkle/fixtures/merkle.json (100%) rename {primitives/nomad => nomad}/merkle/src/error.rs (100%) rename {primitives/nomad => nomad}/merkle/src/lib.rs (100%) rename {primitives/nomad => nomad}/merkle/src/light.rs (100%) rename {primitives/nomad => nomad}/merkle/src/proof.rs (100%) rename {primitives/nomad => nomad}/merkle/src/test_utils.rs (100%) rename {primitives/nomad => nomad}/merkle/src/utils.rs (100%) rename {primitives/nomad => nomad}/signature/Cargo.toml (73%) rename {primitives/nomad => nomad}/signature/README.md (100%) rename {primitives/nomad => nomad}/signature/src/lib.rs (100%) rename {primitives/nomad => nomad}/signature/src/signature.rs (97%) rename {primitives/nomad => nomad}/signature/src/utils.rs (100%) delete mode 100644 primitives/avail/Cargo.toml delete mode 100644 primitives/avail/src/asdr.rs delete mode 100644 primitives/avail/src/lib.rs delete mode 100644 primitives/avail/src/sha2.rs delete mode 100644 primitives/avail/src/traits.rs delete mode 100644 primitives/types/Cargo.toml delete mode 100644 primitives/types/src/data_lookup.rs delete mode 100644 primitives/types/src/lib.rs diff --git a/.github/workflows/default.yml b/.github/workflows/default.yml index 4ca4a8dd..3417eec6 100644 --- a/.github/workflows/default.yml +++ b/.github/workflows/default.yml @@ -4,10 +4,12 @@ on: branches: - main - develop + - will/grid-refactor pull_request: branches: - main - develop + - will/grid-refactor jobs: build_and_test: @@ -45,11 +47,17 @@ jobs: command: fmt args: --check + - name: Check Clippy + uses: actions-rs/cargo@v1 + with: + command: clippy + args: --workspace --features "avail-core/runtime" + - name: Run tests uses: actions-rs/cargo@v1 with: command: test - args: --workspace + args: --workspace --features "avail-core/runtime" env: RUSTFLAGS: "-C instrument-coverage" LLVM_PROFILE_FILE: "profile-%p-%m.profraw" diff --git a/Cargo.lock b/Cargo.lock index 055256e3..5196d5ff 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -23,11 +23,11 @@ dependencies = [ [[package]] name = "addr2line" -version = "0.19.0" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a76fd60b23679b7d19bd066031410fb7e458ccc5e958eb5c325888ce4baedc97" +checksum = "f4fa78e18c64fce05e902adecd7a5eed15a5e0a3439f7b0e169f0252214865e3" dependencies = [ - "gimli 0.27.2", + "gimli 0.27.3", ] [[package]] @@ -38,9 +38,9 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] name = "aes" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "433cfd6710c9986c576a25ca913c39d66a6474107b406f34f91d4a8923395241" +checksum = "ac1f845298e95f983ff1944b728ae08b8cebab80d684f0a832ed0fc74dfa27e2" dependencies = [ "cfg-if", "cipher", @@ -59,7 +59,7 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" dependencies = [ - "getrandom 0.2.9", + "getrandom 0.2.10", "once_cell", "version_check", ] @@ -77,13 +77,19 @@ dependencies = [ [[package]] name = "aho-corasick" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67fc08ce920c31afb70f013dcce1bfc3a3195de6a228474e45e1f145b36f8d04" +checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41" dependencies = [ "memchr", ] +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + [[package]] name = "android_system_properties" version = "0.1.5" @@ -93,6 +99,12 @@ dependencies = [ "libc", ] +[[package]] +name = "anes" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" + [[package]] name = "ansi_term" version = "0.12.1" @@ -102,12 +114,27 @@ dependencies = [ "winapi", ] +[[package]] +name = "anstyle" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a30da5c5f2d5e72842e00bcb57657162cdabef0931f40e2deb9b4140440cecd" + [[package]] name = "anyhow" version = "1.0.71" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8" +[[package]] +name = "approx" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cab112f0a86d568ea0e627cc1d6be74a1e9cd55214684db5561995f6dad897c6" +dependencies = [ + "num-traits", +] + [[package]] name = "ark-bls12-381" version = "0.4.0" @@ -246,30 +273,19 @@ checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" [[package]] name = "arrayvec" -version = "0.7.2" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6" +checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" [[package]] name = "async-trait" -version = "0.1.68" +version = "0.1.71" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9ccdd8f2a161be9bd5c023df56f1b2a0bd1d83872ae53b71a84a12c9bf6e842" +checksum = "a564d521dd56509c4c47480d00b80ee55f7e385ae48db5744c67ad50c92d2ebf" dependencies = [ "proc-macro2", "quote", - "syn 2.0.16", -] - -[[package]] -name = "atty" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" -dependencies = [ - "hermit-abi 0.1.19", - "libc", - "winapi", + "syn 2.0.25", ] [[package]] @@ -290,18 +306,43 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" +[[package]] +name = "avail-core" +version = "0.5.0" +dependencies = [ + "beefy-merkle-tree", + "derive_more", + "frame-support", + "hash256-std-hasher", + "hex", + "hex-literal", + "log", + "parity-scale-codec", + "scale-info", + "serde", + "serde_json", + "sp-arithmetic", + "sp-core", + "sp-runtime", + "sp-runtime-interface", + "sp-std", + "sp-trie", + "test-case", + "thiserror-no-std", +] + [[package]] name = "backtrace" -version = "0.3.67" +version = "0.3.68" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "233d376d6d185f2a3093e58f283f60f880315b6c60075b01f36b3b85154564ca" +checksum = "4319208da049c43661739c5fade2ba182f09d1dc2299b32298d3a31692b17e12" dependencies = [ - "addr2line 0.19.0", + "addr2line 0.20.0", "cc", "cfg-if", "libc", "miniz_oxide", - "object 0.30.3", + "object 0.31.1", "rustc-demangle", ] @@ -397,6 +438,12 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" +[[package]] +name = "bitflags" +version = "2.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42" + [[package]] name = "bitvec" version = "0.17.4" @@ -470,7 +517,7 @@ dependencies = [ [[package]] name = "blst" version = "0.3.10" -source = "git+https://github.com/aphoh/blst?rev=556e037926d9c526c2eb6cb1522bea39690416ea#556e037926d9c526c2eb6cb1522bea39690416ea" +source = "git+https://github.com/availproject/blst?tag=v0.3.10#556e037926d9c526c2eb6cb1522bea39690416ea" dependencies = [ "cc", "glob", @@ -502,6 +549,12 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" +[[package]] +name = "bytemuck" +version = "1.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17febce684fd15d89027105661fec94afb475cb995fbc59d2865198446ba2eea" + [[package]] name = "byteorder" version = "1.4.3" @@ -546,16 +599,43 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.24" +version = "0.4.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e3c5919066adf22df73762e50cffcde3a758f2a848b113b586d1f86728b673b" +checksum = "ec837a71355b28f6556dbd569b37b3f363091c0bd4b2e735674521b4c5fd9bc5" dependencies = [ + "android-tzdata", "iana-time-zone", - "num-integer", "num-traits", "winapi", ] +[[package]] +name = "ciborium" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "effd91f6c78e5a4ace8a5d3c0b6bfaec9e2baaef55f3efc00e45fb2e477ee926" +dependencies = [ + "ciborium-io", + "ciborium-ll", + "serde", +] + +[[package]] +name = "ciborium-io" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdf919175532b369853f5d5e20b26b43112613fd6fe7aee757e35f7a44642656" + +[[package]] +name = "ciborium-ll" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "defaa24ecc093c77630e6c15e17c51f5e187bf35ee514f4e2d67baaa96dae22b" +dependencies = [ + "ciborium-io", + "half", +] + [[package]] name = "cipher" version = "0.4.4" @@ -577,15 +657,29 @@ dependencies = [ [[package]] name = "clap" -version = "2.34.0" +version = "4.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1640e5cc7fb47dbb8338fd471b105e7ed6c3cb2aeb00c2e067127ffd3764a05d" +dependencies = [ + "clap_builder", +] + +[[package]] +name = "clap_builder" +version = "4.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" +checksum = "98c59138d527eeaf9b53f35a77fcc1fad9d883116070c63d5de1c7dc7b00c72b" dependencies = [ - "bitflags", - "textwrap", - "unicode-width", + "anstyle", + "clap_lex", ] +[[package]] +name = "clap_lex" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b" + [[package]] name = "coins-bip32" version = "0.7.0" @@ -596,12 +690,12 @@ dependencies = [ "bs58", "coins-core", "digest 0.10.7", - "getrandom 0.2.9", + "getrandom 0.2.10", "hmac 0.12.1", "k256", "lazy_static", "serde", - "sha2 0.10.6", + "sha2 0.10.7", "thiserror", ] @@ -613,12 +707,12 @@ checksum = "2a11892bcac83b4c6e95ab84b5b06c76d9d70ad73548dd07418269c5c7977171" dependencies = [ "bitvec 0.17.4", "coins-bip32", - "getrandom 0.2.9", + "getrandom 0.2.10", "hex", "hmac 0.12.1", "pbkdf2 0.11.0", "rand 0.8.5", - "sha2 0.10.6", + "sha2 0.10.7", "thiserror", ] @@ -638,16 +732,16 @@ dependencies = [ "ripemd", "serde", "serde_derive", - "sha2 0.10.6", + "sha2 0.10.7", "sha3", "thiserror", ] [[package]] name = "const-oid" -version = "0.9.2" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "520fbf3c07483f94e3e3ca9d0cfd913d7718ef2483d2cfd91c0d9e91474ab913" +checksum = "795bc6e66a8e340f075fcf6227e417a2dc976b92b91f3cdc778bb858778b6747" [[package]] name = "convert_case" @@ -681,9 +775,9 @@ dependencies = [ [[package]] name = "cpufeatures" -version = "0.2.7" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e4c1eaa2012c47becbbad2ab175484c2a84d1185b566fb2cc5b8707343dfe58" +checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1" dependencies = [ "libc", ] @@ -708,24 +802,24 @@ dependencies = [ [[package]] name = "criterion" -version = "0.3.6" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b01d6de93b2b6c65e17c634a26653a29d107b3c98c607c765bf38d041531cd8f" +checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f" dependencies = [ - "atty", + "anes", "cast", + "ciborium", "clap", "criterion-plot", - "csv", + "is-terminal", "itertools 0.10.5", - "lazy_static", "num-traits", + "once_cell", "oorandom", "plotters", "rayon", "regex", "serde", - "serde_cbor", "serde_derive", "serde_json", "tinytemplate", @@ -734,9 +828,9 @@ dependencies = [ [[package]] name = "criterion-plot" -version = "0.4.5" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2673cc8207403546f45f5fd319a974b1e6983ad1a3ee7e6041650013be041876" +checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" dependencies = [ "cast", "itertools 0.10.5", @@ -765,22 +859,22 @@ dependencies = [ [[package]] name = "crossbeam-epoch" -version = "0.9.14" +version = "0.9.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46bd5f3f85273295a9d14aedfb86f6aadbff6d8f5295c4a9edb08e819dcf5695" +checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" dependencies = [ "autocfg", "cfg-if", "crossbeam-utils", - "memoffset 0.8.0", + "memoffset 0.9.0", "scopeguard", ] [[package]] name = "crossbeam-utils" -version = "0.8.15" +version = "0.8.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c063cd8cc95f5c377ed0d4b49a4b21f632396ff690e8470c29b3359b346984b" +checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" dependencies = [ "cfg-if", ] @@ -833,27 +927,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "csv" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b015497079b9a9d69c02ad25de6c0a6edef051ea6360a327d0bd05802ef64ad" -dependencies = [ - "csv-core", - "itoa", - "ryu", - "serde", -] - -[[package]] -name = "csv-core" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b2466559f260f48ad25fe6317b3c8dac77b5bdb5763ac7d9d6103530663bc90" -dependencies = [ - "memchr", -] - [[package]] name = "ctr" version = "0.9.2" @@ -889,46 +962,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "da-primitives" -version = "0.4.6" -dependencies = [ - "beefy-merkle-tree", - "da-types", - "derive_more", - "frame-support", - "hash256-std-hasher", - "hex-literal", - "log", - "parity-scale-codec", - "parity-util-mem", - "scale-info", - "serde", - "serde_json", - "sp-core", - "sp-io", - "sp-runtime", - "sp-runtime-interface", - "sp-std 4.0.0", - "sp-trie", - "test-case", - "thiserror-no-std", -] - -[[package]] -name = "da-types" -version = "0.4.4" -dependencies = [ - "derive_more", - "num-traits", - "parity-scale-codec", - "parity-util-mem", - "scale-info", - "serde", - "sp-debug-derive 5.0.0", - "sp-runtime", -] - [[package]] name = "der" version = "0.6.1" @@ -1165,6 +1198,12 @@ version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e48c92028aaa870e83d51c64e5d4e0b6981b360c522198c23959f219a4e1b15b" +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + [[package]] name = "errno" version = "0.2.8" @@ -1213,7 +1252,7 @@ dependencies = [ "scrypt", "serde", "serde_json", - "sha2 0.10.6", + "sha2 0.10.7", "sha3", "thiserror", "uuid", @@ -1273,7 +1312,7 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ade3e9c97727343984e1ceada4fdab11142d2ee3472d2c67027d56b1251d4f15" dependencies = [ - "arrayvec 0.7.2", + "arrayvec 0.7.4", "bytes", "chrono", "convert_case 0.6.0", @@ -1310,7 +1349,7 @@ dependencies = [ "ethers-core", "hex", "rand 0.8.5", - "sha2 0.10.6", + "sha2 0.10.7", "thiserror", ] @@ -1380,7 +1419,7 @@ name = "frame-support" version = "4.0.0-dev" source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.37#6fa7fe1326ecaab9921c2c3888530ad679cfbb87" dependencies = [ - "bitflags", + "bitflags 1.3.2", "frame-metadata", "frame-support-procedural", "impl-trait-for-tuples", @@ -1393,7 +1432,7 @@ dependencies = [ "serde", "smallvec", "sp-api", - "sp-arithmetic 6.0.0", + "sp-arithmetic", "sp-core", "sp-core-hashing-proc-macro", "sp-inherents", @@ -1401,7 +1440,7 @@ dependencies = [ "sp-runtime", "sp-staking", "sp-state-machine", - "sp-std 5.0.0", + "sp-std", "sp-tracing", "sp-weights", "tt-call", @@ -1506,7 +1545,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ "proc-macro2", "quote", - "syn 2.0.16", + "syn 2.0.25", ] [[package]] @@ -1571,9 +1610,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.9" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c85e1d9ab2eadba7e5040d4e09cbd6d072b76a557ad64e797c2cb9d4da21d7e4" +checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" dependencies = [ "cfg-if", "js-sys", @@ -1594,9 +1633,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.27.2" +version = "0.27.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0a93d233ebf96623465aad4046a8d3aa4da22d4f4beba5388838c8a434bbb4" +checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e" [[package]] name = "glob" @@ -1664,40 +1703,31 @@ dependencies = [ ] [[package]] -name = "heck" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" - -[[package]] -name = "hermit-abi" -version = "0.1.19" +name = "hashbrown" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" -dependencies = [ - "libc", -] +checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" [[package]] -name = "hermit-abi" -version = "0.2.6" +name = "heck" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7" -dependencies = [ - "libc", -] +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" [[package]] name = "hermit-abi" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286" +checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" [[package]] name = "hex" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" +dependencies = [ + "serde", +] [[package]] name = "hex-literal" @@ -1747,9 +1777,9 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.56" +version = "0.1.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0722cd7114b7de04316e7ea5456a0bbb20e4adb46fd27a3697adb812cff0f37c" +checksum = "2fad5b825842d2b38bd206f3e81d6957625fd7f0a361e345c30e01a0ae2dd613" dependencies = [ "android_system_properties", "core-foundation-sys", @@ -1817,6 +1847,16 @@ dependencies = [ "serde", ] +[[package]] +name = "indexmap" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d" +dependencies = [ + "equivalent", + "hashbrown 0.14.0", +] + [[package]] name = "inout" version = "0.1.3" @@ -1852,15 +1892,26 @@ checksum = "59ce5ef949d49ee85593fc4d3f3f95ad61657076395cbbce23e2121fc5542074" [[package]] name = "io-lifetimes" -version = "1.0.10" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c66c74d2ae7e79a5a8f7ac924adbe38ee42a859c6539ad869eb51f0b52dc220" +checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" dependencies = [ - "hermit-abi 0.3.1", + "hermit-abi", "libc", "windows-sys 0.48.0", ] +[[package]] +name = "is-terminal" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" +dependencies = [ + "hermit-abi", + "rustix 0.38.3", + "windows-sys 0.48.0", +] + [[package]] name = "itertools" version = "0.9.0" @@ -1881,15 +1932,15 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.6" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6" +checksum = "62b02a5381cc465bd3041d84623d0fa3b66738b52b8e2fc3bab8ad63ab032f4a" [[package]] name = "js-sys" -version = "0.3.63" +version = "0.3.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f37a4a5928311ac501dee68b3c7613a1037d0edb30c8e5427bd832d55d1b790" +checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a" dependencies = [ "wasm-bindgen", ] @@ -1903,26 +1954,24 @@ dependencies = [ "cfg-if", "ecdsa", "elliptic-curve", - "sha2 0.10.6", + "sha2 0.10.7", "sha3", ] [[package]] name = "kate" -version = "0.7.1" +version = "0.8.0" dependencies = [ + "avail-core", "criterion", - "da-types", + "derive_more", "dusk-bytes", "dusk-plonk", - "frame-support", - "getrandom 0.2.9", "hex", "hex-literal", - "itertools 0.10.5", - "kate-grid", "kate-recovery", "log", + "nalgebra", "once_cell", "parity-scale-codec", "poly-multiproof", @@ -1932,36 +1981,32 @@ dependencies = [ "rayon", "serde", "serde_json", - "sp-arithmetic 7.0.0", - "sp-core-hashing", - "sp-std 4.0.0", + "sp-arithmetic", + "sp-core", "static_assertions", "test-case", -] - -[[package]] -name = "kate-grid" -version = "0.6.1" -dependencies = [ - "rayon", + "thiserror-no-std", ] [[package]] name = "kate-recovery" -version = "0.8.1" +version = "0.9.0" dependencies = [ + "avail-core", + "derive_more", "dusk-bytes", "dusk-plonk", - "getrandom 0.2.9", "hex", - "num", "once_cell", "parity-scale-codec", "rand 0.8.5", "rand_chacha 0.3.1", "serde", + "sp-arithmetic", + "sp-std", + "static_assertions", "test-case", - "thiserror", + "thiserror-no-std", ] [[package]] @@ -1981,9 +2026,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.144" +version = "0.2.147" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b00cc1c228a6782d0f076e7b232802e0c5689d41bb5df366f2a6b6621cfdfe1" +checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" [[package]] name = "libm" @@ -2051,11 +2096,17 @@ version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" +[[package]] +name = "linux-raw-sys" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09fc20d2ca12cb9f044c93e3bd6d32d523e6e2ec3db4f7b2939cd99026ecd3f0" + [[package]] name = "lock_api" -version = "0.4.9" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" +checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16" dependencies = [ "autocfg", "scopeguard", @@ -2063,12 +2114,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.17" +version = "0.4.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" -dependencies = [ - "cfg-if", -] +checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4" [[package]] name = "lru" @@ -2094,7 +2142,17 @@ version = "0.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f099785f7595cc4b4553a174ce30dd7589ef93391ff414dbb67f62392b9e0ce1" dependencies = [ - "regex-automata", + "regex-automata 0.1.10", +] + +[[package]] +name = "matrixmultiply" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "090126dc04f95dc0d1c1c91f61bdd474b3930ca064c1edc8a849da2c6cbe1e77" +dependencies = [ + "autocfg", + "rawpointer", ] [[package]] @@ -2114,9 +2172,9 @@ dependencies = [ [[package]] name = "memoffset" -version = "0.8.0" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1" +checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" dependencies = [ "autocfg", ] @@ -2163,13 +2221,28 @@ dependencies = [ [[package]] name = "miniz_oxide" -version = "0.6.2" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa" +checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" dependencies = [ "adler", ] +[[package]] +name = "nalgebra" +version = "0.32.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "307ed9b18cc2423f29e83f84fd23a8e73628727990181f18641a8b5dc2ab1caa" +dependencies = [ + "approx", + "matrixmultiply", + "num-complex", + "num-rational", + "num-traits", + "simba", + "typenum", +] + [[package]] name = "nohash-hasher" version = "0.2.0" @@ -2178,59 +2251,50 @@ checksum = "2bf50223579dc7cdcfb3bfcacf7069ff68243f8c363f62ffa99cf000a6b9c451" [[package]] name = "nomad-base" -version = "0.1.3" +version = "0.1.4" dependencies = [ "ethers-signers", - "frame-support", "nomad-core", "nomad-signature", "once_cell", "parity-scale-codec", - "primitive-types", "scale-info", "serde", "sp-core", - "sp-io", - "sp-std 4.0.0", + "sp-runtime", ] [[package]] name = "nomad-core" -version = "0.1.3" +version = "0.1.4" dependencies = [ "async-trait", "ethers-core", "ethers-signers", - "frame-support", "nomad-signature", "parity-scale-codec", "primitive-types", "scale-info", "serde", "sp-core", - "sp-io", "sp-runtime", - "sp-std 4.0.0", + "sp-std", "tiny-keccak", ] [[package]] name = "nomad-merkle" -version = "0.1.1" +version = "0.1.2" dependencies = [ "ethers-core", "frame-support", "hex-literal", "nomad-core", "parity-scale-codec", - "primitive-types", "scale-info", "serde", "serde_json", "sp-core", - "sp-io", - "sp-runtime", - "sp-std 4.0.0", "static_assertions", "thiserror-no-std", "tiny-keccak", @@ -2238,7 +2302,7 @@ dependencies = [ [[package]] name = "nomad-signature" -version = "0.1.1" +version = "0.1.2" dependencies = [ "byte-slice-cast", "elliptic-curve", @@ -2248,32 +2312,14 @@ dependencies = [ "hex", "k256", "parity-scale-codec", - "primitive-types", - "rlp", - "rlp-derive", "scale-info", "serde", "sp-core", - "sp-io", - "sp-std 4.0.0", + "sp-runtime", "thiserror-no-std", "tiny-keccak", ] -[[package]] -name = "num" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43db66d1170d347f9a065114077f7dccb00c1b9478c89384490a3425279a4606" -dependencies = [ - "num-bigint", - "num-complex", - "num-integer", - "num-iter", - "num-rational", - "num-traits", -] - [[package]] name = "num-bigint" version = "0.4.3" @@ -2300,7 +2346,7 @@ version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a652d9771a63711fd3c3deb670acfbe5c30a4072e664d7a3bf5a9e1056ac72c3" dependencies = [ - "arrayvec 0.7.2", + "arrayvec 0.7.4", "itoa", ] @@ -2314,17 +2360,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "num-iter" -version = "0.1.43" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d03e6c028c5dc5cac6e2dec0efda81fc887605bb3d884578bb6d6bf7514e252" -dependencies = [ - "autocfg", - "num-integer", - "num-traits", -] - [[package]] name = "num-rational" version = "0.4.1" @@ -2349,11 +2384,11 @@ dependencies = [ [[package]] name = "num_cpus" -version = "1.15.0" +version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "hermit-abi 0.2.6", + "hermit-abi", "libc", ] @@ -2365,24 +2400,24 @@ checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53" dependencies = [ "crc32fast", "hashbrown 0.12.3", - "indexmap", + "indexmap 1.9.3", "memchr", ] [[package]] name = "object" -version = "0.30.3" +version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea86265d3d3dcb6a27fc51bd29a4bf387fae9d2986b823079d4986af253eb439" +checksum = "8bda667d9f2b5051b8833f59f3bf748b28ef54f850f4fcb389a252aa383866d1" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.17.1" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3" +checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" [[package]] name = "oorandom" @@ -2408,7 +2443,7 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "786393f80485445794f6043fd3138854dd109cc6c4bd1a6383db304c9ce9b9ce" dependencies = [ - "arrayvec 0.7.2", + "arrayvec 0.7.4", "auto_impl", "bytes", "ethereum-types", @@ -2429,11 +2464,11 @@ dependencies = [ [[package]] name = "parity-scale-codec" -version = "3.5.0" +version = "3.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ddb756ca205bd108aee3c62c6d3c994e1df84a59b9d6d4a5ea42ee1fd5a9a28" +checksum = "756d439303e94fae44f288ba881ad29670c65b0c4b0e05674ca81061bb65f2c5" dependencies = [ - "arrayvec 0.7.2", + "arrayvec 0.7.4", "bitvec 1.0.1", "byte-slice-cast", "bytes", @@ -2444,9 +2479,9 @@ dependencies = [ [[package]] name = "parity-scale-codec-derive" -version = "3.1.4" +version = "3.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b26a931f824dd4eca30b3e43bb4f31cd5f0d3a403c5f5ff27106b805bfde7b" +checksum = "9d884d78fcf214d70b1e239fcd1c6e5e95aa3be1881918da2e488cc946c7a476" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -2454,31 +2489,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "parity-util-mem" -version = "0.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d32c34f4f5ca7f9196001c0aba5a1f9a5a12382c8944b8b0f90233282d1e8f8" -dependencies = [ - "cfg-if", - "impl-trait-for-tuples", - "parity-util-mem-derive", - "parking_lot", - "primitive-types", - "winapi", -] - -[[package]] -name = "parity-util-mem-derive" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f557c32c6d268a07c921471619c0295f5efad3a0e76d4f97a05c091a51d110b2" -dependencies = [ - "proc-macro2", - "syn 1.0.109", - "synstructure", -] - [[package]] name = "parity-wasm" version = "0.45.0" @@ -2497,15 +2507,15 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.7" +version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9069cbb9f99e3a5083476ccb29ceb1de18b9118cafa53e90c9551235de2b9521" +checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.2.16", + "redox_syscall", "smallvec", - "windows-sys 0.45.0", + "windows-targets", ] [[package]] @@ -2521,9 +2531,9 @@ dependencies = [ [[package]] name = "paste" -version = "1.0.12" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f746c4065a8fa3fe23974dd82f15431cc8d40779821001404d10d2e79ca7d79" +checksum = "b4b27ab7be369122c218afc2079489cdcb4b517c0a3fc386ff11e1fedfcc2b35" [[package]] name = "pbkdf2" @@ -2543,14 +2553,14 @@ dependencies = [ "digest 0.10.7", "hmac 0.12.1", "password-hash", - "sha2 0.10.6", + "sha2 0.10.7", ] [[package]] name = "pin-project-lite" -version = "0.2.9" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" +checksum = "4c40d25201921e5ff0c862a505c6557ea88568a4e3ace775ab55e93f2f4f9d57" [[package]] name = "pin-utils" @@ -2570,9 +2580,9 @@ dependencies = [ [[package]] name = "plotters" -version = "0.3.4" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2538b639e642295546c50fcd545198c9d64ee2a38620a628724a3b266d5fbf97" +checksum = "d2c224ba00d7cadd4d5c660deaf2098e5e80e07846537c51f9cfa4be50c1fd45" dependencies = [ "num-traits", "plotters-backend", @@ -2583,15 +2593,15 @@ dependencies = [ [[package]] name = "plotters-backend" -version = "0.3.4" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "193228616381fecdc1224c62e96946dfbc73ff4384fba576e052ff8c1bea8142" +checksum = "9e76628b4d3a7581389a35d5b6e2139607ad7c75b17aed325f210aa91f4a9609" [[package]] name = "plotters-svg" -version = "0.3.3" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9a81d2759aae1dae668f783c308bc5c8ebd191ff4184aaa1b37f65a6ae5a56f" +checksum = "38f6d39893cca0701371e3c27294f09797214b86f1fb951b89ade8ec04e2abab" dependencies = [ "plotters-backend", ] @@ -2599,7 +2609,7 @@ dependencies = [ [[package]] name = "poly-multiproof" version = "0.0.1" -source = "git+https://github.com/aphoh/poly-multiproof?tag=v0.1.0#adca8b53aea6371981a289f9a74842b24fe0e197" +source = "git+https://github.com/availproject/poly-multiproof?tag=v0.0.1#cd8d31b7eb568dea2fddfc9237e2e31ea7ae7ed3" dependencies = [ "ark-bls12-381", "ark-ec", @@ -2667,9 +2677,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.58" +version = "1.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa1fb82fc0c281dd9671101b66b771ebbe1eaf967b96ac8740dcba4b70005ca8" +checksum = "78803b62cbf1f46fde80d7c0e803111524b9877184cfe7c3033659490ac7a7da" dependencies = [ "unicode-ident", ] @@ -2681,7 +2691,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4e35c06b98bf36aba164cc17cb25f7e232f5c4aeea73baa14b8a9f0d92dbfa65" dependencies = [ "bit-set", - "bitflags", + "bitflags 1.3.2", "byteorder", "lazy_static", "num-traits", @@ -2711,9 +2721,9 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" [[package]] name = "quote" -version = "1.0.27" +version = "1.0.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f4f29d145265ec1c483c7c654450edde0bfe043d3938d6972630663356d9500" +checksum = "573015e8ab27661678357f27dc26460738fd2b6c86e46f386fde94cb5d913105" dependencies = [ "proc-macro2", ] @@ -2789,7 +2799,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.9", + "getrandom 0.2.10", ] [[package]] @@ -2810,6 +2820,12 @@ dependencies = [ "rand_core 0.6.4", ] +[[package]] +name = "rawpointer" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60a357793950651c4ed0f3f52338f53b2f809f32d83a07f72909fa13e4c6c1e3" + [[package]] name = "rayon" version = "1.7.0" @@ -2832,53 +2848,45 @@ dependencies = [ "num_cpus", ] -[[package]] -name = "redox_syscall" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" -dependencies = [ - "bitflags", -] - [[package]] name = "redox_syscall" version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" dependencies = [ - "bitflags", + "bitflags 1.3.2", ] [[package]] name = "ref-cast" -version = "1.0.16" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f43faa91b1c8b36841ee70e97188a869d37ae21759da6846d4be66de5bf7b12c" +checksum = "1641819477c319ef452a075ac34a4be92eb9ba09f6841f62d594d50fdcf0bf6b" dependencies = [ "ref-cast-impl", ] [[package]] name = "ref-cast-impl" -version = "1.0.16" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d2275aab483050ab2a7364c1a46604865ee7d6906684e08db0f090acf74f9e7" +checksum = "68bf53dad9b6086826722cdc99140793afd9f62faa14a1ad07eb4f955e7a7216" dependencies = [ "proc-macro2", "quote", - "syn 2.0.16", + "syn 2.0.25", ] [[package]] name = "regex" -version = "1.8.2" +version = "1.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1a59b5d8e97dee33696bf13c5ba8ab85341c002922fba050069326b9c498974" +checksum = "b2eae68fc220f7cf2532e4494aded17545fce192d59cd996e0fe7887f4ceb575" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.7.2", + "regex-automata 0.3.2", + "regex-syntax 0.7.4", ] [[package]] @@ -2890,6 +2898,17 @@ dependencies = [ "regex-syntax 0.6.29", ] +[[package]] +name = "regex-automata" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83d3daa6976cffb758ec878f108ba0e062a45b2d6ca3a2cca965338855476caf" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax 0.7.4", +] + [[package]] name = "regex-syntax" version = "0.6.29" @@ -2898,9 +2917,9 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" -version = "0.7.2" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "436b050e76ed2903236f032a59761c1eb99e1b0aead2c257922771dab1fc8c78" +checksum = "e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2" [[package]] name = "rfc6979" @@ -2972,11 +2991,11 @@ dependencies = [ [[package]] name = "rustix" -version = "0.35.13" +version = "0.35.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "727a1a6d65f786ec22df8a81ca3121107f235970dc1705ed681d3e6e8b9cd5f9" +checksum = "6380889b07a03b5ecf1d44dc9ede6fd2145d84b502a2a9ca0b03c48e0cc3220f" dependencies = [ - "bitflags", + "bitflags 1.3.2", "errno 0.2.8", "io-lifetimes 0.7.5", "libc", @@ -2986,23 +3005,36 @@ dependencies = [ [[package]] name = "rustix" -version = "0.37.19" +version = "0.37.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acf8729d8542766f1b2cf77eb034d52f40d375bb8b615d0b147089946e16613d" +checksum = "4d69718bf81c6127a49dc64e44a742e8bb9213c0ff8869a22c308f84c1d4ab06" dependencies = [ - "bitflags", + "bitflags 1.3.2", "errno 0.3.1", - "io-lifetimes 1.0.10", + "io-lifetimes 1.0.11", "libc", "linux-raw-sys 0.3.8", "windows-sys 0.48.0", ] +[[package]] +name = "rustix" +version = "0.38.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac5ffa1efe7548069688cd7028f32591853cd7b5b756d41bcffd2353e4fc75b4" +dependencies = [ + "bitflags 2.3.3", + "errno 0.3.1", + "libc", + "linux-raw-sys 0.4.3", + "windows-sys 0.48.0", +] + [[package]] name = "rustversion" -version = "1.0.12" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f3208ce4d8448b3f3e7d168a73f5e0c43a61e32930de3bceeccedb388b6bf06" +checksum = "dc31bd9b61a32c31f9650d18add92aa83a49ba979c143eefd27fe7177b05bd5f" [[package]] name = "rusty-fork" @@ -3018,9 +3050,18 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.13" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041" +checksum = "fe232bdf6be8c8de797b22184ee71118d63780ea42ac85b61d1baa6d3b782ae9" + +[[package]] +name = "safe_arch" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62a7484307bd40f8f7ccbacccac730108f2cae119a3b11c74485b48aa9ea650f" +dependencies = [ + "bytemuck", +] [[package]] name = "salsa20" @@ -3042,9 +3083,9 @@ dependencies = [ [[package]] name = "scale-info" -version = "2.7.0" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b569c32c806ec3abdf3b5869fb8bf1e0d275a7c1c9b0b05603d9464632649edf" +checksum = "35c0a159d0c45c12b20c5a844feb1fe4bea86e28f17b92a5f0c42193634d3782" dependencies = [ "bitvec 1.0.1", "cfg-if", @@ -3056,9 +3097,9 @@ dependencies = [ [[package]] name = "scale-info-derive" -version = "2.6.0" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53012eae69e5aa5c14671942a5dd47de59d4cdcff8532a6dd0e081faf1119482" +checksum = "912e55f6d20e0e80d63733872b40e1227c0bce1e1ab81ba67d696339bfd7fd29" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -3099,7 +3140,7 @@ dependencies = [ "hmac 0.12.1", "pbkdf2 0.11.0", "salsa20", - "sha2 0.10.6", + "sha2 0.10.7", ] [[package]] @@ -3151,39 +3192,29 @@ checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed" [[package]] name = "serde" -version = "1.0.163" +version = "1.0.171" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2113ab51b87a539ae008b5c6c02dc020ffa39afd2d83cffcb3f4eb2722cebec2" +checksum = "30e27d1e4fd7659406c492fd6cfaf2066ba8773de45ca75e855590f856dc34a9" dependencies = [ "serde_derive", ] -[[package]] -name = "serde_cbor" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bef2ebfde456fb76bbcf9f59315333decc4fda0b2b44b420243c11e0f5ec1f5" -dependencies = [ - "half", - "serde", -] - [[package]] name = "serde_derive" -version = "1.0.163" +version = "1.0.171" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c805777e3930c8883389c602315a24224bcc738b63905ef87cd1420353ea93e" +checksum = "389894603bd18c46fa56231694f8d827779c0951a667087194cf9de94ed24682" dependencies = [ "proc-macro2", "quote", - "syn 2.0.16", + "syn 2.0.25", ] [[package]] name = "serde_json" -version = "1.0.96" +version = "1.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1" +checksum = "0f1e14e89be7aa4c4b78bdbdc9eb5bf8517829a600ae8eaa39a6e1d960b5185c" dependencies = [ "itoa", "ryu", @@ -3217,9 +3248,9 @@ dependencies = [ [[package]] name = "sha2" -version = "0.10.6" +version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0" +checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8" dependencies = [ "cfg-if", "cpufeatures", @@ -3255,6 +3286,19 @@ dependencies = [ "rand_core 0.6.4", ] +[[package]] +name = "simba" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "061507c94fc6ab4ba1c9a0305018408e312e17c041eb63bef8aa726fa33aceae" +dependencies = [ + "approx", + "num-complex", + "num-traits", + "paste", + "wide", +] + [[package]] name = "slab" version = "0.4.8" @@ -3266,9 +3310,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" +checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9" [[package]] name = "sp-api" @@ -3282,7 +3326,7 @@ dependencies = [ "sp-core", "sp-runtime", "sp-state-machine", - "sp-std 5.0.0", + "sp-std", "sp-trie", "sp-version", "thiserror", @@ -3310,7 +3354,7 @@ dependencies = [ "serde", "sp-core", "sp-io", - "sp-std 5.0.0", + "sp-std", ] [[package]] @@ -3323,23 +3367,7 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-std 5.0.0", - "static_assertions", -] - -[[package]] -name = "sp-arithmetic" -version = "7.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd6413ad82d166d40d995aa95ca6e0cbb473f973d3a2f0b433ae19813048c6c1" -dependencies = [ - "integer-sqrt", - "num-traits", - "parity-scale-codec", - "scale-info", - "serde", - "sp-debug-derive 6.0.0", - "sp-std 6.0.0", + "sp-std", "static_assertions", ] @@ -3357,7 +3385,7 @@ dependencies = [ "sp-io", "sp-mmr-primitives", "sp-runtime", - "sp-std 5.0.0", + "sp-std", ] [[package]] @@ -3367,7 +3395,7 @@ source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.3 dependencies = [ "array-bytes", "base58 0.2.0", - "bitflags", + "bitflags 1.3.2", "blake2", "dyn-clonable", "ed25519-zebra", @@ -3390,10 +3418,10 @@ dependencies = [ "secrecy", "serde", "sp-core-hashing", - "sp-debug-derive 5.0.0", + "sp-debug-derive", "sp-externalities", "sp-runtime-interface", - "sp-std 5.0.0", + "sp-std", "sp-storage", "ss58-registry", "substrate-bip39", @@ -3410,9 +3438,9 @@ dependencies = [ "blake2", "byteorder", "digest 0.10.7", - "sha2 0.10.6", + "sha2 0.10.7", "sha3", - "sp-std 5.0.0", + "sp-std", "twox-hash", ] @@ -3437,17 +3465,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "sp-debug-derive" -version = "6.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66fb9dc63d54de7d7bed62a505b6e0bd66c122525ea1abb348f6564717c3df2d" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "sp-externalities" version = "0.13.0" @@ -3455,7 +3472,7 @@ source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.3 dependencies = [ "environmental", "parity-scale-codec", - "sp-std 5.0.0", + "sp-std", "sp-storage", ] @@ -3469,7 +3486,7 @@ dependencies = [ "parity-scale-codec", "sp-core", "sp-runtime", - "sp-std 5.0.0", + "sp-std", "thiserror", ] @@ -3491,7 +3508,7 @@ dependencies = [ "sp-keystore", "sp-runtime-interface", "sp-state-machine", - "sp-std 5.0.0", + "sp-std", "sp-tracing", "sp-trie", "tracing", @@ -3526,9 +3543,9 @@ dependencies = [ "serde", "sp-api", "sp-core", - "sp-debug-derive 5.0.0", + "sp-debug-derive", "sp-runtime", - "sp-std 5.0.0", + "sp-std", "thiserror", ] @@ -3557,10 +3574,10 @@ dependencies = [ "scale-info", "serde", "sp-application-crypto", - "sp-arithmetic 6.0.0", + "sp-arithmetic", "sp-core", "sp-io", - "sp-std 5.0.0", + "sp-std", "sp-weights", ] @@ -3575,7 +3592,7 @@ dependencies = [ "primitive-types", "sp-externalities", "sp-runtime-interface-proc-macro", - "sp-std 5.0.0", + "sp-std", "sp-storage", "sp-tracing", "sp-wasm-interface", @@ -3603,7 +3620,7 @@ dependencies = [ "scale-info", "sp-core", "sp-runtime", - "sp-std 5.0.0", + "sp-std", ] [[package]] @@ -3620,29 +3637,17 @@ dependencies = [ "sp-core", "sp-externalities", "sp-panic-handler", - "sp-std 5.0.0", + "sp-std", "sp-trie", "thiserror", "tracing", ] -[[package]] -name = "sp-std" -version = "4.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14804d6069ee7a388240b665f17908d98386ffb0b5d39f89a4099fc7a2a4c03f" - [[package]] name = "sp-std" version = "5.0.0" source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.37#6fa7fe1326ecaab9921c2c3888530ad679cfbb87" -[[package]] -name = "sp-std" -version = "6.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af0ee286f98455272f64ac5bb1384ff21ac029fbb669afbaf48477faff12760e" - [[package]] name = "sp-storage" version = "7.0.0" @@ -3652,8 +3657,8 @@ dependencies = [ "parity-scale-codec", "ref-cast", "serde", - "sp-debug-derive 5.0.0", - "sp-std 5.0.0", + "sp-debug-derive", + "sp-std", ] [[package]] @@ -3662,7 +3667,7 @@ version = "6.0.0" source = "git+https://github.com/paritytech/substrate.git?branch=polkadot-v0.9.37#6fa7fe1326ecaab9921c2c3888530ad679cfbb87" dependencies = [ "parity-scale-codec", - "sp-std 5.0.0", + "sp-std", "tracing", "tracing-core", "tracing-subscriber", @@ -3684,7 +3689,7 @@ dependencies = [ "parking_lot", "scale-info", "sp-core", - "sp-std 5.0.0", + "sp-std", "thiserror", "tracing", "trie-db", @@ -3703,7 +3708,7 @@ dependencies = [ "serde", "sp-core-hashing-proc-macro", "sp-runtime", - "sp-std 5.0.0", + "sp-std", "sp-version-proc-macro", "thiserror", ] @@ -3727,7 +3732,7 @@ dependencies = [ "impl-trait-for-tuples", "log", "parity-scale-codec", - "sp-std 5.0.0", + "sp-std", "wasmi", "wasmtime", ] @@ -3741,10 +3746,10 @@ dependencies = [ "scale-info", "serde", "smallvec", - "sp-arithmetic 6.0.0", + "sp-arithmetic", "sp-core", - "sp-debug-derive 5.0.0", - "sp-std 5.0.0", + "sp-debug-derive", + "sp-std", ] [[package]] @@ -3759,9 +3764,9 @@ dependencies = [ [[package]] name = "ss58-registry" -version = "1.40.0" +version = "1.41.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb47a8ad42e5fc72d5b1eb104a5546937eaf39843499948bb666d6e93c62423b" +checksum = "bfc443bad666016e012538782d9e3006213a7db43e9fb1dda91657dc06a6fa08" dependencies = [ "Inflector", "num-format", @@ -3838,27 +3843,15 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.16" +version = "2.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6f671d4b5ffdb8eadec19c0ae67fe2639df8684bd7bc4b83d986b8db549cf01" +checksum = "15e3fc8c0c74267e2df136e5e5fb656a464158aa57624053375eb9c8c6e25ae2" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] -[[package]] -name = "synstructure" -version = "0.12.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", - "unicode-xid", -] - [[package]] name = "tap" version = "1.0.1" @@ -3867,21 +3860,22 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "target-lexicon" -version = "0.12.7" +version = "0.12.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd1ba337640d60c3e96bc6f0638a939b9c9a7f2c316a1598c279828b3d1dc8c5" +checksum = "1b1c7f239eb94671427157bd93b3694320f3668d4e1eff08c7285366fd777fac" [[package]] name = "tempfile" -version = "3.5.0" +version = "3.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9fbec84f381d5795b08656e4912bec604d162bff9291d6189a78f4c8ab87998" +checksum = "31c0432476357e58790aaa47a8efb0c5138f137343f3b5f23bd36a27e3b0a6d6" dependencies = [ + "autocfg", "cfg-if", "fastrand", - "redox_syscall 0.3.5", - "rustix 0.37.19", - "windows-sys 0.45.0", + "redox_syscall", + "rustix 0.37.23", + "windows-sys 0.48.0", ] [[package]] @@ -3897,33 +3891,24 @@ dependencies = [ "version_check", ] -[[package]] -name = "textwrap" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" -dependencies = [ - "unicode-width", -] - [[package]] name = "thiserror" -version = "1.0.40" +version = "1.0.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac" +checksum = "a35fc5b8971143ca348fa6df4f024d4d55264f3468c71ad1c2f365b0a4d58c42" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.40" +version = "1.0.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" +checksum = "463fe12d7993d3b327787537ce8dd4dfa058de32fc2b195ef3cde03dc4771e8f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.16", + "syn 2.0.25", ] [[package]] @@ -3977,7 +3962,7 @@ dependencies = [ "pbkdf2 0.11.0", "rand 0.8.5", "rustc-hash", - "sha2 0.10.6", + "sha2 0.10.7", "thiserror", "unicode-normalization", "wasm-bindgen", @@ -4020,17 +4005,17 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "toml_datetime" -version = "0.6.2" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a76a9312f5ba4c2dec6b9161fdf25d87ad8a09256ccea5a556fef03c706a10f" +checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" [[package]] name = "toml_edit" -version = "0.19.10" +version = "0.19.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2380d56e8670370eee6566b0bfd4265f65b3f432e8c6d85623f728d4fa31f739" +checksum = "c500344a19072298cd05a7224b3c0c629348b78692bf48466c5238656e315a78" dependencies = [ - "indexmap", + "indexmap 2.0.0", "toml_datetime", "winnow", ] @@ -4049,13 +4034,13 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.24" +version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f57e3ca2a01450b1a921183a9c9cbfda207fd822cef4ccb00a65402cbba7a74" +checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" dependencies = [ "proc-macro2", "quote", - "syn 2.0.16", + "syn 2.0.25", ] [[package]] @@ -4177,9 +4162,9 @@ checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" [[package]] name = "unicode-ident" -version = "1.0.8" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5464a87b239f13a63a501f2701565754bae92d243d4bb7eb12f6d57d2269bf4" +checksum = "22049a19f4a68748a168c0fc439f9516686aa045927ff767eca0a85101fb6e73" [[package]] name = "unicode-normalization" @@ -4196,12 +4181,6 @@ version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" -[[package]] -name = "unicode-width" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" - [[package]] name = "unicode-xid" version = "0.2.4" @@ -4214,7 +4193,7 @@ version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" dependencies = [ - "getrandom 0.2.9", + "getrandom 0.2.10", "serde", ] @@ -4263,9 +4242,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.86" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bba0e8cb82ba49ff4e229459ff22a191bbe9a1cb3a341610c9c33efc27ddf73" +checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -4273,24 +4252,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.86" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19b04bc93f9d6bdee709f6bd2118f57dd6679cf1176a1af464fca3ab0d66d8fb" +checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 2.0.16", + "syn 2.0.25", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.86" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14d6b024f1a526bb0234f52840389927257beb670610081360e5a03c5df9c258" +checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -4298,22 +4277,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.86" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e128beba882dd1eb6200e1dc92ae6c5dbaa4311aa7bb211ca035779e5efc39f8" +checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.16", + "syn 2.0.25", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.86" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed9d5b4305409d1fc9482fee2d7f9bcbf24b3972bf59817ef757e23982242a93" +checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" [[package]] name = "wasmi" @@ -4354,7 +4333,7 @@ version = "0.89.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab5d3e08b13876f96dd55608d03cd4883a0545884932d5adf11925876c96daef" dependencies = [ - "indexmap", + "indexmap 1.9.3", ] [[package]] @@ -4366,7 +4345,7 @@ dependencies = [ "anyhow", "bincode", "cfg-if", - "indexmap", + "indexmap 1.9.3", "libc", "log", "object 0.29.0", @@ -4400,7 +4379,7 @@ dependencies = [ "anyhow", "cranelift-entity", "gimli 0.26.2", - "indexmap", + "indexmap 1.9.3", "log", "object 0.29.0", "serde", @@ -4425,7 +4404,7 @@ dependencies = [ "log", "object 0.29.0", "rustc-demangle", - "rustix 0.35.13", + "rustix 0.35.14", "serde", "target-lexicon", "thiserror", @@ -4452,14 +4431,14 @@ dependencies = [ "anyhow", "cc", "cfg-if", - "indexmap", + "indexmap 1.9.3", "libc", "log", "mach", "memoffset 0.6.5", "paste", "rand 0.8.5", - "rustix 0.35.13", + "rustix 0.35.14", "thiserror", "wasmtime-asm-macros", "wasmtime-environ", @@ -4481,14 +4460,24 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.63" +version = "0.3.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bdd9ef4e984da1187bf8110c5cf5b845fbc87a23602cdf912386a76fcd3a7c2" +checksum = "9b85cbef8c220a6abc02aefd892dfc0fc23afb1c6a426316ec33253a3877249b" dependencies = [ "js-sys", "wasm-bindgen", ] +[[package]] +name = "wide" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40018623e2dba2602a9790faba8d33f2ebdebf4b86561b83928db735f8784728" +dependencies = [ + "bytemuck", + "safe_arch", +] + [[package]] name = "winapi" version = "0.3.9" @@ -4526,7 +4515,7 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" dependencies = [ - "windows-targets 0.48.0", + "windows-targets", ] [[package]] @@ -4557,44 +4546,20 @@ dependencies = [ "windows_x86_64_msvc 0.42.2", ] -[[package]] -name = "windows-sys" -version = "0.45.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" -dependencies = [ - "windows-targets 0.42.2", -] - [[package]] name = "windows-sys" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ - "windows-targets 0.48.0", + "windows-targets", ] [[package]] name = "windows-targets" -version = "0.42.2" +version = "0.48.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" -dependencies = [ - "windows_aarch64_gnullvm 0.42.2", - "windows_aarch64_msvc 0.42.2", - "windows_i686_gnu 0.42.2", - "windows_i686_msvc 0.42.2", - "windows_x86_64_gnu 0.42.2", - "windows_x86_64_gnullvm 0.42.2", - "windows_x86_64_msvc 0.42.2", -] - -[[package]] -name = "windows-targets" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5" +checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f" dependencies = [ "windows_aarch64_gnullvm 0.48.0", "windows_aarch64_msvc 0.48.0", @@ -4721,9 +4686,9 @@ checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" [[package]] name = "winnow" -version = "0.4.6" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61de7bac303dc551fe038e2b3cef0f571087a47571ea6e79a87692ac99b99699" +checksum = "81a2094c43cc94775293eaa0e499fbc30048a6d824ac82c0351a8c0bf9112529" dependencies = [ "memchr", ] @@ -4754,5 +4719,5 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.16", + "syn 2.0.25", ] diff --git a/Cargo.toml b/Cargo.toml index b833ca61..78b33c31 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,21 +1,21 @@ [workspace] members = [ - "primitives/avail", - "primitives/types", - "kate", + "core", "kate/recovery", - "kate/grid", - "primitives/nomad/signature", - "primitives/nomad/nomad-core", - "primitives/nomad/nomad-base", - "primitives/nomad/merkle", + "kate", + "nomad/signature", + "nomad/core", + "nomad/base", + "nomad/merkle", ] [patch.crates-io] # Substrate (polkadot-v0.9.37). sp-core = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } sp-core-hashing = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } sp-io = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } +sp-api = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } sp-std = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } +sp-application-crypto = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } sp-storage = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } sp-debug-derive = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } sp-arithmetic = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } @@ -24,3 +24,28 @@ sp-trie = { git = "https://github.com/paritytech/substrate.git", branch = "polka sp-runtime-interface = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } sp-weights = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } frame-support = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } +sp-externalities = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } +sp-inherents = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } +sp-staking = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } +sp-state-machine = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } +sp-tracing = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } +sp-version = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } +sp-wasm-interface = { git = "https://github.com/paritytech/substrate.git", branch = "polkadot-v0.9.37" } + +[profile.dev.package] +nalgebra = { opt-level = 3 } +blst = { opt-level = 3 } +dusk-bls12_381 = { opt-level = 3 } +dusk-plonk = { opt-level = 3 } +dusk-jubjub = { opt-level = 3 } +dusk-bytes = { opt-level = 3 } +rayon = { opt-level = 3 } +rayon-core = { opt-level = 3 } +poly-multiproof = { opt-level = 3 } +ark-bls12-381 = { opt-level = 3 } +ark-ec = { opt-level = 3 } +ark-ff = { opt-level = 3 } +ark-poly = { opt-level = 3 } +ark-serialize = { opt-level = 3 } +ark-std = { opt-level = 3 } +merlin = { opt-level = 3 } diff --git a/core/Cargo.toml b/core/Cargo.toml new file mode 100644 index 00000000..84e8258c --- /dev/null +++ b/core/Cargo.toml @@ -0,0 +1,66 @@ +[package] +name = "avail-core" +version = "0.5.0" +authors = [] +edition = "2021" +license = "Apache-2.0" + +[dependencies] +# Others +derive_more = { version = "0.99.17", default-features = false, features = ["constructor", "from", "add", "deref", "mul", "into"] } +hash256-std-hasher = { version = "0.15.2", default-features = false } +hex = { version = "0.4", optional = true, default-features = false, features = ["alloc", "serde"] } +log = { version = "0.4.8", default-features = false } +serde = { version = "1", optional = true, features = ["derive"] } +thiserror-no-std = "2.0.2" + +# Substrate +beefy-merkle-tree = { git = "https://github.com/paritytech/substrate.git/", branch = "polkadot-v0.9.37", default-features = false, optional = true } +codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive", "max-encoded-len"] } +scale-info = { version = "2", default-features = false, features = ["derive"] } +sp-arithmetic = { version = "*", default-features = false } +sp-core = { version = "*", default-features = false } +sp-std = { version = "*", default-features = false } +sp-trie = { version = "*", default-features = false } + +# Substrate Runtime +frame-support = { version = "4.0.0-dev", default-features = false, optional = true } +sp-runtime = { version = "7", default-features = false, optional = true } +sp-runtime-interface = { version = "7", default-features = false, optional = true } + +[dev-dependencies] +hex-literal = "0.3.4" +serde_json = "1" +test-case = "1.2.3" + +[features] +default = ["std"] +std = [ + "serde", + "hex", + "codec/std", + "scale-info/std", + "log/std", + "sp-core/std", + "sp-std/std", + "sp-trie/std", + "sp-arithmetic/std", + "hash256-std-hasher/std", + "beefy-merkle-tree?/std", + "derive_more/display", + "sp-runtime-interface?/std", + "sp-runtime?/std", + "frame-support?/std", +] +runtime = [ + "sp-runtime-interface", + "sp-runtime", + "frame-support", + "beefy-merkle-tree", +] + +header-backward-compatibility-test = [] +try-runtime = [ + "runtime", + "sp-runtime/try-runtime", +] diff --git a/core/src/app_extrinsic.rs b/core/src/app_extrinsic.rs new file mode 100644 index 00000000..76e8db2a --- /dev/null +++ b/core/src/app_extrinsic.rs @@ -0,0 +1,92 @@ +use crate::traits::GetAppId; +use codec::{Decode, Encode}; +use derive_more::Constructor; +use scale_info::TypeInfo; +#[cfg(feature = "std")] +use serde::{Deserialize, Serialize}; +use sp_core::RuntimeDebug; +use sp_std::vec::Vec; + +use crate::AppId; + +/// Raw Extrinsic with application id. +#[derive(Clone, TypeInfo, Default, Encode, Decode, RuntimeDebug, Constructor)] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +pub struct AppExtrinsic { + pub app_id: AppId, + #[cfg_attr(feature = "std", serde(with = "hex"))] + pub data: Vec, +} + +#[cfg(feature = "runtime")] +use crate::asdr::AppUncheckedExtrinsic; +#[cfg(feature = "runtime")] +use sp_runtime::{generic::UncheckedExtrinsic, traits::SignedExtension}; + +#[cfg(feature = "runtime")] +impl From> for AppExtrinsic +where + A: Encode, + C: Encode, + S: Encode, + E: SignedExtension + GetAppId, +{ + fn from(ue: sp_runtime::generic::UncheckedExtrinsic) -> Self { + let app_id = ue + .signature + .as_ref() + .map(|(_, _, extra)| extra.app_id()) + .unwrap_or_default(); + let data = ue.encode(); + + Self { app_id, data } + } +} + +impl GetAppId for AppExtrinsic { + fn app_id(&self) -> AppId { + self.app_id + } +} + +impl From> for AppExtrinsic { + #[inline] + fn from(data: Vec) -> Self { + Self { + data, + app_id: <_>::default(), + } + } +} + +#[cfg(feature = "runtime")] +impl From<&AppUncheckedExtrinsic> for AppExtrinsic +where + A: Encode, + C: Encode, + S: Encode, + E: SignedExtension + GetAppId, +{ + fn from(app_ext: &AppUncheckedExtrinsic) -> Self { + Self { + app_id: app_ext.app_id(), + data: app_ext.encode(), + } + } +} + +#[cfg(feature = "runtime")] +impl From> for AppExtrinsic +where + A: Encode, + C: Encode, + S: Encode, + E: SignedExtension + GetAppId, +{ + fn from(app_ext: AppUncheckedExtrinsic) -> Self { + Self { + app_id: app_ext.app_id(), + data: app_ext.encode(), + } + } +} diff --git a/primitives/avail/src/asdr/app_unchecked_extrinsic.rs b/core/src/asdr.rs similarity index 97% rename from primitives/avail/src/asdr/app_unchecked_extrinsic.rs rename to core/src/asdr.rs index 12661eb2..b66c2066 100644 --- a/primitives/avail/src/asdr/app_unchecked_extrinsic.rs +++ b/core/src/asdr.rs @@ -18,12 +18,9 @@ //! Generic implementation of an unchecked (pre-verification) extrinsic. use codec::{Compact, Decode, Encode, EncodeLike, Error, Input}; -use frame_support::{ - dispatch::{DispatchInfo, GetDispatchInfo}, - traits::ExtrinsicCall, -}; use scale_info::{build::Fields, meta_type, Path, StaticTypeInfo, Type, TypeInfo, TypeParameter}; -use sp_io::hashing::blake2_256; +use sp_core::blake2_256; +#[cfg(feature = "runtime")] use sp_runtime::{ generic::CheckedExtrinsic, traits::{ @@ -39,10 +36,7 @@ use sp_std::{ vec::Vec, }; -use crate::{ - asdr::{AppId, GetAppId}, - OpaqueExtrinsic, -}; +use crate::{traits::GetAppId, AppId, OpaqueExtrinsic}; /// Current version of the [`UncheckedExtrinsic`] encoded format. /// @@ -243,6 +237,10 @@ where type SignedExtensions = Extra; } +#[cfg(feature = "runtime")] +use frame_support::dispatch::{DispatchInfo, GetDispatchInfo}; + +#[cfg(feature = "runtime")] impl GetDispatchInfo for AppUncheckedExtrinsic where @@ -385,7 +383,11 @@ where let compact_len = codec::Compact::(tmp.len() as u32); // Allocate the output buffer with the correct length - let mut output = Vec::with_capacity(compact_len.size_hint() + tmp.len()); + let output_len = compact_len + .size_hint() + .checked_add(tmp.len()) + .expect("Cannot encode this `AppUncheckedExtrinsic` into memory"); + let mut output = Vec::with_capacity(output_len); compact_len.encode_to(&mut output); output.extend(tmp); @@ -461,7 +463,8 @@ where } } -impl ExtrinsicCall +#[cfg(feature = "runtime")] +impl frame_support::traits::ExtrinsicCall for AppUncheckedExtrinsic where Extra: SignedExtension, diff --git a/core/src/bench_randomness.rs b/core/src/bench_randomness.rs new file mode 100644 index 00000000..2b308abf --- /dev/null +++ b/core/src/bench_randomness.rs @@ -0,0 +1,20 @@ +use frame_support::traits::Randomness; + +/// Provides an implementation of [`frame_support::traits::Randomness`] that should only be used in +/// on Benchmarks! +pub struct BenchRandomness(sp_std::marker::PhantomData); + +impl Randomness for BenchRandomness +where + Output: codec::Decode + Default, + T: Default, +{ + fn random(subject: &[u8]) -> (Output, T) { + use sp_runtime::traits::TrailingZeroInput; + + ( + Output::decode(&mut TrailingZeroInput::new(subject)).unwrap_or_default(), + T::default(), + ) + } +} diff --git a/core/src/constants.rs b/core/src/constants.rs new file mode 100644 index 00000000..e4b8c462 --- /dev/null +++ b/core/src/constants.rs @@ -0,0 +1,28 @@ +use sp_arithmetic::Perbill; + +pub mod well_known_keys { + /// Public params used to generate Kate commitment + pub const KATE_PUBLIC_PARAMS: &[u8] = b":kate_public_params:"; +} + +/// We allow `Normal` extrinsics to fill up the block up to 90%, the rest can be used +/// by Operational extrinsics. +pub const NORMAL_DISPATCH_RATIO: Perbill = Perbill::from_percent(90); + +pub const BLOCK_CHUNK_SIZE: u32 = 32; + +/// Money matters. +pub mod currency { + + pub type Balance = u128; + + /// AVL has 18 decimal positions. + pub const AVL: Balance = 1_000_000_000_000_000_000; + + /// Cents of AVL has 16 decimal positions (100 Cents = $1) + /// 1 DOLLARS = 10_000_000_000_000_000 + pub const CENTS: Balance = AVL / 100; + + /// Millicent of AVL has 13 decimal positions( 100 mCents = 1 cent). + pub const MILLICENTS: Balance = CENTS / 1_000; +} diff --git a/core/src/data_lookup/compact.rs b/core/src/data_lookup/compact.rs new file mode 100644 index 00000000..5da2db89 --- /dev/null +++ b/core/src/data_lookup/compact.rs @@ -0,0 +1,59 @@ +use crate::{AppId, DataLookup}; + +use codec::{Decode, Encode}; +use derive_more::Constructor; +use scale_info::TypeInfo; +use sp_std::vec::Vec; + +#[cfg(feature = "std")] +use serde::{Deserialize, Serialize}; + +#[derive(Copy, Clone, Encode, Decode, TypeInfo, Constructor, Debug)] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +pub struct DataLookupItem { + pub app_id: AppId, + #[codec(compact)] + pub start: u32, +} + +impl From<(A, S)> for DataLookupItem +where + u32: From, + u32: From, +{ + fn from(value: (A, S)) -> Self { + Self { + app_id: AppId(value.0.into()), + start: value.1.into(), + } + } +} + +#[derive(Encode, Decode, TypeInfo, Constructor, Debug, Clone)] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +pub struct CompactDataLookup { + /// size of the look up + #[codec(compact)] + pub(crate) size: u32, + /// sorted vector of tuples(key, start index) + pub(crate) index: Vec, +} + +impl CompactDataLookup { + pub fn from_expanded(lookup: &DataLookup) -> Self { + let index = lookup + .index + .iter() + .filter(|(id, _)| *id != AppId(0)) + .map(|(id, range)| DataLookupItem::new(*id, range.start)) + .collect(); + let size = lookup.index.last().map(|(_, range)| range.end).unwrap_or(0); + Self { size, index } + } +} + +impl From for CompactDataLookup { + fn from(lookup: DataLookup) -> Self { + CompactDataLookup::from_expanded(&lookup) + } +} diff --git a/core/src/data_lookup/mod.rs b/core/src/data_lookup/mod.rs new file mode 100644 index 00000000..2375e61b --- /dev/null +++ b/core/src/data_lookup/mod.rs @@ -0,0 +1,234 @@ +use codec::{Decode, Encode, Input}; +use core::convert::TryFrom; +use scale_info::{Type, TypeInfo}; +use sp_core::RuntimeDebug; +use sp_std::{ops::Range, vec::Vec}; +use thiserror_no_std::Error; + +#[cfg(feature = "std")] +use serde::{Deserialize, Serialize}; + +use crate::{ensure, AppId}; + +mod compact; +use compact::CompactDataLookup; + +pub type DataLookupRange = Range; + +#[derive(Error, Debug, Clone, Copy, PartialEq, Eq)] +pub enum Error { + #[error("Input data is not sorted by AppId")] + DataNotSorted, + #[error("Data is empty on AppId {0}")] + DataEmptyOn(AppId), + #[error("Offset overflows")] + OffsetOverflows, +} + +#[derive(PartialEq, Eq, Clone, Default, RuntimeDebug)] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +#[cfg_attr( + feature = "std", + serde(try_from = "CompactDataLookup", into = "CompactDataLookup") +)] +pub struct DataLookup { + pub(crate) index: Vec<(AppId, DataLookupRange)>, +} + +impl DataLookup { + pub fn len(&self) -> u32 { + self.index.last().map(|(_id, range)| range.end).unwrap_or(0) + } + + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + pub fn range_of(&self, app_id: AppId) -> Option { + self.index + .iter() + .find(|(id, _)| *id == app_id) + .map(|(_, range)| range) + .cloned() + } + + pub fn projected_range_of(&self, app_id: AppId, chunk_size: u32) -> Option { + self.range_of(app_id).and_then(|range| { + let start = range.start.checked_mul(chunk_size)?; + let end = range.end.checked_mul(chunk_size)?; + Some(start..end) + }) + } + + pub fn projected_ranges(&self, chunk_size: u32) -> Result)>, Error> { + self.index + .iter() + .map(|(id, range)| { + let start = range + .start + .checked_mul(chunk_size) + .ok_or(Error::OffsetOverflows)?; + let end = range + .end + .checked_mul(chunk_size) + .ok_or(Error::OffsetOverflows)?; + Ok((*id, start..end)) + }) + .collect() + } +} + +impl DataLookup { + pub fn from_id_and_len_iter(iter: I) -> Result + where + I: Iterator, + u32: From, + u32: TryFrom, + { + let mut offset: u32 = 0; + let mut maybe_prev_id = None; + + let index = iter + .map(|(id, len)| { + // Check sorted by AppId + let id = AppId(id.into()); + if let Some(prev_id) = maybe_prev_id.replace(id) { + ensure!(prev_id < id, Error::DataNotSorted); + } + + // Check non-empty data per AppId + let len = u32::try_from(len).map_err(|_| Error::OffsetOverflows)?; + ensure!(len > 0, Error::DataEmptyOn(id)); + + // Create range and update `offset`. + let end = offset.checked_add(len).ok_or(Error::OffsetOverflows)?; + let range = offset..end; + offset = end; + + Ok((id, range)) + }) + .collect::>()?; + + Ok(Self { index }) + } +} + +impl TryFrom for DataLookup { + type Error = Error; + + fn try_from(compacted: CompactDataLookup) -> Result { + let mut offset = 0; + let mut prev_id = AppId(0); + let mut index = Vec::with_capacity( + compacted + .index + .len() + .checked_add(1) + .ok_or(Error::OffsetOverflows)?, + ); + + for c_item in compacted.index.into_iter() { + index.push((prev_id, offset..c_item.start)); + prev_id = c_item.app_id; + offset = c_item.start; + } + + let last_range = offset..compacted.size; + if !last_range.is_empty() { + index.push((prev_id, offset..compacted.size)); + } + + let lookup = DataLookup { index }; + ensure!(lookup.len() == compacted.size, Error::DataNotSorted); + + Ok(lookup) + } +} + +// Encoding +// ================================== + +impl Encode for DataLookup { + /// Encodes as a `compact::DataLookup`. + fn encode(&self) -> Vec { + let compacted = CompactDataLookup::from_expanded(self); + compacted.encode() + } +} + +impl Decode for DataLookup { + /// Decodes from a `compact::DataLookup`. + fn decode(input: &mut I) -> Result { + let compacted = CompactDataLookup::decode(input)?; + DataLookup::try_from(compacted).map_err(|_| codec::Error::from("Invalid `DataLookup`")) + } +} + +impl TypeInfo for DataLookup { + type Identity = Self; + + fn type_info() -> Type { + CompactDataLookup::type_info() + } +} + +#[cfg(test)] +mod test { + use super::*; + use test_case::test_case; + + #[test_case( vec![(0, 15), (1, 20), (2, 150)] => Ok(vec![(0,0..15),(1, 15..35), (2, 35..185)]); "Valid case")] + #[test_case( vec![(0, usize::MAX)] => Err(Error::OffsetOverflows); "Offset overflows at zero")] + #[test_case( vec![(0, (u32::MAX -1) as usize), (1, 2)] => Err(Error::OffsetOverflows); "Offset overflows at non zero")] + #[test_case( vec![(1, 10), (0, 2)] => Err(Error::DataNotSorted); "Unsortend data")] + #[test_case( vec![] => Ok(vec![]); "Empty data")] + fn from_id_and_len( + id_len_data: Vec<(u32, usize)>, + ) -> Result, Error> { + let iter = id_len_data.into_iter().map(|(id, len)| (AppId(id), len)); + + DataLookup::from_id_and_len_iter(iter).map(|lookup| { + lookup + .index + .iter() + .map(|(id, range)| (id.0, range.clone())) + .collect::>() + }) + } + + #[test_case( vec![(0, 15), (1, 20), (2, 150)] => CompactDataLookup::new(185, vec![(1u32, 15u32).into(),(2u32,35u32).into()]).encode(); "Valid case")] + #[test_case( vec![(0, 100)] => CompactDataLookup::new(100, vec![]).encode(); "Only Zero AppId")] + #[test_case( vec![] => CompactDataLookup::new(0, vec![]).encode(); "Empty")] + + fn check_compressed_encode(id_lens: Vec<(u32, usize)>) -> Vec { + let lookup = DataLookup::from_id_and_len_iter(id_lens.into_iter()).unwrap(); + lookup.encode() + } + + #[test_case( vec![(0, 15), (1, 20), (2, 150)] ; "Valid case")] + #[test_case( vec![(0, 15)] ; "Only Zero AppId")] + #[test_case( vec![] ; "Empty")] + fn compressed_conversions(id_lens: Vec<(u32, usize)>) { + let lookup = DataLookup::from_id_and_len_iter(id_lens.into_iter()).unwrap(); + + let compact_lookup = CompactDataLookup::from_expanded(&lookup); + let expanded_lookup = DataLookup::try_from(compact_lookup.clone()).unwrap(); + + assert_eq!( + lookup, expanded_lookup, + "Lookup: {lookup:?} -> Compacted: {compact_lookup:?} -> Expanded: {expanded_lookup:?}" + ); + } + + #[test_case( vec![(0, 15), (1, 20), (2, 150)] ; "Valid case")] + #[test_case( vec![(0, 15)] ; "Only Zero AppId")] + #[test_case( vec![] ; "Empty")] + fn serialization_compatibility(id_lens: Vec<(u32, usize)>) { + let lookup = DataLookup::from_id_and_len_iter(id_lens.into_iter()).unwrap(); + let lookup_json = serde_json::to_string(&lookup).unwrap(); + let compressed_from_json = serde_json::from_str::(&lookup_json).unwrap(); + let expanded_lookup = DataLookup::try_from(compressed_from_json.clone()).unwrap(); + + assert_eq!(lookup, expanded_lookup); + } +} diff --git a/primitives/avail/src/data_proof.rs b/core/src/data_proof.rs similarity index 67% rename from primitives/avail/src/data_proof.rs rename to core/src/data_proof.rs index f1bbca53..b1607ec5 100644 --- a/primitives/avail/src/data_proof.rs +++ b/core/src/data_proof.rs @@ -1,13 +1,11 @@ +#[cfg(feature = "runtime")] use beefy_merkle_tree::MerkleProof; use codec::{Decode, Encode}; -use frame_support::ensure; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; use sp_core::H256; -use sp_io::hashing::sha2_256; -use sp_std::{convert::TryFrom, vec::Vec}; +use sp_std::vec::Vec; use thiserror_no_std::Error; - /// Wrapper of `beefy-merkle-tree::MerkleProof` with codec support. #[derive(Clone, Debug, PartialEq, Eq, Encode, Decode, Default)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] @@ -41,13 +39,13 @@ pub enum DataProofTryFromError { /// The given index of proofs cannot be converted into `H256`. #[error("Proof at {0} cannot be converted into `H256`")] InvalidProof(usize), - /// Number of leaves overflowed + /// Number of leaves overflowed. #[error("Number of leaves overflowed")] OverflowedNumberOfLeaves, /// Number of leaves must be greater than zero. #[error("Number of leaves cannot be zero")] InvalidNumberOfLeaves, - /// Leaf index overflowed + /// Leaf index overflowed. #[error("Leaf index overflowed")] OverflowedLeafIndex, /// Leaf index overflowed or invalid (greater or equal to `number_of_leaves`) @@ -55,7 +53,8 @@ pub enum DataProofTryFromError { InvalidLeafIndex, } -impl TryFrom<&MerkleProof> for DataProof +#[cfg(feature = "runtime")] +impl core::convert::TryFrom<&MerkleProof> for DataProof where T: AsRef<[u8]>, H: PartialEq + Eq + AsRef<[u8]>, @@ -63,12 +62,15 @@ where type Error = DataProofTryFromError; fn try_from(merkle_proof: &MerkleProof) -> Result { + use crate::ensure; + use sp_core::keccak_256; use DataProofTryFromError::*; let root = <[u8; 32]>::try_from(merkle_proof.root.as_ref()) .map_err(|_| InvalidRoot)? .into(); - let leaf = sha2_256(merkle_proof.leaf.as_ref()).into(); + let leaf = keccak_256(merkle_proof.leaf.as_ref()).into(); + let proof = merkle_proof .proof .iter() @@ -96,11 +98,11 @@ where } } -#[cfg(test)] +#[cfg(all(test, feature = "runtime"))] mod test { - use crate::ShaTwo256; + use crate::Keccak256; use hex_literal::hex; - use sp_core::H512; + use sp_core::{keccak_256, H512}; use sp_std::cmp::min; use test_case::test_case; @@ -121,8 +123,9 @@ mod test { let leaves = leaves(); let index = min(leaf_index, leaves.len() - 1); - let mut proof = beefy_merkle_tree::merkle_proof::(leaves, index); + let mut proof = beefy_merkle_tree::merkle_proof::(leaves, index); proof.leaf_index = leaf_index; + proof } @@ -138,50 +141,50 @@ mod test { fn expected_data_proof_1() -> Result { Ok(DataProof { - root: hex!("e18e5f531a15090555c2d3539b5d93a5a872ffc3422bd9b9410776549d71f6f6").into(), + root: hex!("08a1133e47edacdc5a7a37f7301aad3c725fbf5698ca5e35acb7915ad1784b95").into(), proof: vec![ - hex!("f5a5fd42d16a20302798ef6ed309979b43003d2320d9f0e8ea9831a92759fb4b").into(), - hex!("e59d380e38bc66ab4e5452df8ee47bb4611e719efb8985c2a5e6598784e3d642").into(), - hex!("fc7ad74dc17cb03a8464bbfb12fd037cceaef8ef5973d9f1772b4913503bff6e").into(), + hex!("ad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb5").into(), + hex!("8421b50025cb27f1412ed7103442ecdd09d4aa1e4a1ba777597ae921e48b31e1").into(), + hex!("08f1f28658e6a37fa6fd9be84bd7315c3ca1eceb0849ec88cbd5bf9a69160653").into(), ], number_of_leaves: 7, leaf_index: 1, - leaf: sha2_256(H512::repeat_byte(1).as_bytes()).into(), + leaf: keccak_256(H512::repeat_byte(1).as_bytes()).into(), }) } fn expected_data_proof_0() -> Result { Ok(DataProof { - root: hex!("e18e5f531a15090555c2d3539b5d93a5a872ffc3422bd9b9410776549d71f6f6").into(), + root: hex!("08a1133e47edacdc5a7a37f7301aad3c725fbf5698ca5e35acb7915ad1784b95").into(), proof: vec![ - hex!("7c8975e1e60a5c8337f28edf8c33c3b180360b7279644a9bc1af3c51e6220bf5").into(), - hex!("e59d380e38bc66ab4e5452df8ee47bb4611e719efb8985c2a5e6598784e3d642").into(), - hex!("fc7ad74dc17cb03a8464bbfb12fd037cceaef8ef5973d9f1772b4913503bff6e").into(), + hex!("401617bc4f769381f86be40df0207a0a3e31ae0839497a5ac6d4252dfc35577f").into(), + hex!("8421b50025cb27f1412ed7103442ecdd09d4aa1e4a1ba777597ae921e48b31e1").into(), + hex!("08f1f28658e6a37fa6fd9be84bd7315c3ca1eceb0849ec88cbd5bf9a69160653").into(), ], number_of_leaves: 7, leaf_index: 0, - leaf: sha2_256(H512::repeat_byte(0).as_bytes()).into(), + leaf: keccak_256(H512::repeat_byte(0).as_bytes()).into(), }) } fn expected_data_proof_6() -> Result { Ok(DataProof { - root: hex!("e18e5f531a15090555c2d3539b5d93a5a872ffc3422bd9b9410776549d71f6f6").into(), + root: hex!("08a1133e47edacdc5a7a37f7301aad3c725fbf5698ca5e35acb7915ad1784b95").into(), proof: vec![ - hex!("6b19c42f81575abc499679f91bb649e0aa8af83d9634aab78af04b5e13b04e5f").into(), - hex!("e4117bb4906266f46977187ca43a9151b88928ab1aa03283ddf5ead4b33c3e78").into(), + hex!("8663c7e2962f98579b883bf5e2179f9200ae3615ec6fc3bd8027a0de9973606a").into(), + hex!("b225b28cd9168524306b0d944342b11bb21d37e9156cdbf42073d4e51b2f0a41").into(), ], number_of_leaves: 7, leaf_index: 6, - leaf: sha2_256(H512::repeat_byte(6).as_bytes()).into(), + leaf: keccak_256(H512::repeat_byte(6).as_bytes()).into(), }) } - #[test_case( merkle_proof_idx(0) => expected_data_proof_0(); "From merkle proof 0")] - #[test_case( merkle_proof_idx(1) => expected_data_proof_1(); "From merkle proof 1")] - #[test_case( merkle_proof_idx(6) => expected_data_proof_6(); "From merkle proof 6")] - #[test_case( merkle_proof_idx(7) => Err(DataProofTryFromError::InvalidLeafIndex); "From invalid leaf index")] - #[test_case( invalid_merkle_proof_zero_leaves() => Err(DataProofTryFromError::InvalidNumberOfLeaves); "From invalid number of leaves")] + #[test_case(merkle_proof_idx(0) => expected_data_proof_0(); "From merkle proof 0")] + #[test_case(merkle_proof_idx(1) => expected_data_proof_1(); "From merkle proof 1")] + #[test_case(merkle_proof_idx(6) => expected_data_proof_6(); "From merkle proof 6")] + #[test_case(merkle_proof_idx(7) => Err(DataProofTryFromError::InvalidLeafIndex); "From invalid leaf index")] + #[test_case(invalid_merkle_proof_zero_leaves() => Err(DataProofTryFromError::InvalidNumberOfLeaves); "From invalid number of leaves")] fn from_beefy( beefy_proof: MerkleProof>, ) -> Result { diff --git a/primitives/avail/src/header/extension/mod.rs b/core/src/header/extension/mod.rs similarity index 88% rename from primitives/avail/src/header/extension/mod.rs rename to core/src/header/extension/mod.rs index ad5fd923..d6fb76a8 100644 --- a/primitives/avail/src/header/extension/mod.rs +++ b/core/src/header/extension/mod.rs @@ -1,13 +1,13 @@ -use crate::asdr::DataLookup; use codec::{Decode, Encode}; -#[cfg(feature = "std")] -use parity_util_mem::{MallocSizeOf, MallocSizeOfOps}; use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; use sp_core::{RuntimeDebug, H256}; +#[cfg(feature = "runtime")] use sp_runtime_interface::pass_by::PassByCodec; +use crate::DataLookup; + pub mod v1; pub mod v2; @@ -15,8 +15,9 @@ pub mod v2; pub mod v_test; /// Header extension data. -#[derive(PartialEq, Eq, Clone, RuntimeDebug, TypeInfo, Encode, Decode, PassByCodec)] +#[derive(PartialEq, Eq, Clone, RuntimeDebug, TypeInfo, Encode, Decode)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "runtime", derive(PassByCodec))] pub enum HeaderExtension { V1(v1::HeaderExtension), V2(v2::HeaderExtension), @@ -70,13 +71,6 @@ impl Default for HeaderExtension { } } -#[cfg(feature = "std")] -impl MallocSizeOf for HeaderExtension { - fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize { - forward_to_version!(self, size_of, ops) - } -} - impl From for HeaderExtension { #[inline] fn from(ext: v1::HeaderExtension) -> Self { diff --git a/primitives/avail/src/header/extension/v1.rs b/core/src/header/extension/v1.rs similarity index 67% rename from primitives/avail/src/header/extension/v1.rs rename to core/src/header/extension/v1.rs index f0f9b1ed..251b8b64 100644 --- a/primitives/avail/src/header/extension/v1.rs +++ b/core/src/header/extension/v1.rs @@ -1,12 +1,10 @@ use codec::{Decode, Encode}; -#[cfg(feature = "std")] -use parity_util_mem::{MallocSizeOf, MallocSizeOfOps}; use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; use sp_core::{RuntimeDebug, H256}; -use crate::{asdr::DataLookup, v1::KateCommitment}; +use crate::{v1::KateCommitment, DataLookup}; #[derive(PartialEq, Eq, Clone, RuntimeDebug, TypeInfo, Encode, Decode, Default)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] @@ -32,10 +30,3 @@ impl HeaderExtension { self.commitment.cols } } - -#[cfg(feature = "std")] -impl MallocSizeOf for HeaderExtension { - fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize { - self.commitment.size_of(ops) + self.app_lookup.size_of(ops) - } -} diff --git a/primitives/avail/src/header/extension/v2.rs b/core/src/header/extension/v2.rs similarity index 68% rename from primitives/avail/src/header/extension/v2.rs rename to core/src/header/extension/v2.rs index f7a6855a..ff9984d5 100644 --- a/primitives/avail/src/header/extension/v2.rs +++ b/core/src/header/extension/v2.rs @@ -1,12 +1,10 @@ use codec::{Decode, Encode}; -#[cfg(feature = "std")] -use parity_util_mem::{MallocSizeOf, MallocSizeOfOps}; use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; use sp_core::{RuntimeDebug, H256}; -use crate::{asdr::DataLookup, v2::KateCommitment}; +use crate::{v2::KateCommitment, DataLookup}; #[derive(PartialEq, Eq, Clone, RuntimeDebug, TypeInfo, Encode, Decode, Default)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] @@ -32,10 +30,3 @@ impl HeaderExtension { self.commitment.cols } } - -#[cfg(feature = "std")] -impl MallocSizeOf for HeaderExtension { - fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize { - self.commitment.size_of(ops) + self.app_lookup.size_of(ops) - } -} diff --git a/primitives/avail/src/header/extension/v_test.rs b/core/src/header/extension/v_test.rs similarity index 73% rename from primitives/avail/src/header/extension/v_test.rs rename to core/src/header/extension/v_test.rs index 23d989d6..a7e8b4bf 100644 --- a/primitives/avail/src/header/extension/v_test.rs +++ b/core/src/header/extension/v_test.rs @@ -1,6 +1,4 @@ use codec::{Decode, Encode}; -#[cfg(feature = "std")] -use parity_util_mem::{MallocSizeOf, MallocSizeOfOps}; use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; @@ -23,13 +21,6 @@ impl HeaderExtension { } } -#[cfg(feature = "std")] -impl MallocSizeOf for HeaderExtension { - fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize { - self.new_field.size_of(ops) + self.commitment.size_of(ops) + self.app_lookup.size_of(ops) - } -} - impl From for HeaderExtension { fn from(ext: v1::HeaderExtension) -> Self { Self { diff --git a/primitives/avail/src/header/mod.rs b/core/src/header/mod.rs similarity index 95% rename from primitives/avail/src/header/mod.rs rename to core/src/header/mod.rs index 9f4682b5..3ec17f13 100644 --- a/primitives/avail/src/header/mod.rs +++ b/core/src/header/mod.rs @@ -30,8 +30,7 @@ use sp_runtime::{ Digest, }; use sp_runtime_interface::pass_by::{Codec as PassByCodecImpl, PassBy}; -use sp_std::fmt; -use sp_std::{convert::TryFrom, fmt::Debug}; +use sp_std::{convert::TryFrom, fmt}; use crate::traits::{ExtendedHeader, HeaderBlockNumber, HeaderHash}; @@ -159,7 +158,7 @@ impl HeaderT for Header where Number: Member + MaybeSerializeDeserialize - + Debug + + fmt::Debug + sp_std::hash::Hash + MaybeDisplay + AtLeast32BitUnsigned @@ -175,7 +174,7 @@ where + Member + Ord + MaybeSerialize - + Debug + + fmt::Debug + MaybeDisplay + SimpleBitOps + Codec, @@ -244,7 +243,7 @@ where } } -impl ExtendedHeader for Header { +impl ExtendedHeader for Header { type Hash = ::Output; type Number = N; @@ -269,7 +268,7 @@ impl ExtendedHeader for Header { } } -#[cfg(test)] +#[cfg(all(test, feature = "runtime"))] mod tests { use codec::Error; use hex_literal::hex; @@ -282,8 +281,8 @@ mod tests { use super::*; use crate::{ - asdr::DataLookup, kate_commitment::{v1, v2}, + AppId, DataLookup, }; type THeader = Header; @@ -453,10 +452,8 @@ mod tests { }; let extension = extension::v1::HeaderExtension { commitment, - app_lookup: DataLookup { - size: 1, - index: vec![], - }, + app_lookup: DataLookup::from_id_and_len_iter([(AppId(0), 1)].into_iter()) + .expect("Valid DataLookup .qed"), }; let digest = Digest { logs: vec![ @@ -564,19 +561,6 @@ mod tests { (header, hash) } - fn corrupted_app_lookup(header_and_hash: (THeader, H256)) -> (THeader, H256) { - let (mut header, hash) = header_and_hash; - - match header.extension { - extension::HeaderExtension::V1(ref mut ext) => ext.app_lookup.size += 1, - extension::HeaderExtension::V2(ref mut ext) => ext.app_lookup.size += 1, - #[cfg(feature = "header-backward-compatibility-test")] - _ => unreachable!(), - }; - - (header, hash) - } - fn corrupted_number(mut header_and_hash: (THeader, H256)) -> (THeader, H256) { header_and_hash.0.number += 1; header_and_hash @@ -596,7 +580,6 @@ mod tests { #[test_case( corrupted_kate_data_root(header()) => false; "Corrupted data root in kate")] #[test_case( corrupted_kate_cols(header()) => false; "Corrupted cols in kate")] #[test_case( corrupted_kate_rows(header()) => false; "Corrupted rows in kate")] - #[test_case( corrupted_app_lookup(header()) => false )] #[test_case( corrupted_number(header()) => false )] #[test_case( corrupted_state_root(header()) => false )] #[test_case( corrupted_parent(header()) => false )] diff --git a/primitives/avail/src/kate_commitment.rs b/core/src/kate_commitment.rs similarity index 82% rename from primitives/avail/src/kate_commitment.rs rename to core/src/kate_commitment.rs index 14d11a5c..d2c2710b 100644 --- a/primitives/avail/src/kate_commitment.rs +++ b/core/src/kate_commitment.rs @@ -1,11 +1,14 @@ use codec::{Decode, Encode}; use scale_info::TypeInfo; +use sp_core::H256; +use sp_std::vec::Vec; + #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; -use sp_core::{hexdisplay::HexDisplay, H256}; +#[cfg(feature = "std")] +use sp_core::hexdisplay::HexDisplay; #[cfg(feature = "std")] use sp_std::fmt; -use sp_std::vec::Vec; pub mod v1 { use super::*; @@ -39,16 +42,6 @@ pub mod v1 { .finish() } } - - #[cfg(feature = "std")] - impl parity_util_mem::MallocSizeOf for KateCommitment { - fn size_of(&self, ops: &mut parity_util_mem::MallocSizeOfOps) -> usize { - self.commitment.size_of(ops) - + self.rows.size_of(ops) - + self.cols.size_of(ops) - + self.data_root.size_of(ops) - } - } } pub mod v2 { @@ -103,16 +96,6 @@ pub mod v2 { } } - #[cfg(feature = "std")] - impl parity_util_mem::MallocSizeOf for KateCommitment { - fn size_of(&self, ops: &mut parity_util_mem::MallocSizeOfOps) -> usize { - self.commitment.size_of(ops) - + self.rows.size_of(ops) - + self.cols.size_of(ops) - + self.data_root.size_of(ops) - } - } - #[cfg(test)] mod tests { use super::*; diff --git a/core/src/keccak256.rs b/core/src/keccak256.rs new file mode 100644 index 00000000..800b3b88 --- /dev/null +++ b/core/src/keccak256.rs @@ -0,0 +1,49 @@ +use scale_info::TypeInfo; +use sp_core::{keccak_256, Hasher, RuntimeDebug}; + +#[cfg(feature = "std")] +use serde::{Deserialize, Serialize}; + +/// Keccak 256 wrapper which supports `beefy-merkle-tree::Hasher`. +#[derive(PartialEq, Eq, Clone, RuntimeDebug, TypeInfo)] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +pub struct Keccak256 {} + +impl Hasher for Keccak256 { + type Out = sp_core::H256; + type StdHasher = hash256_std_hasher::Hash256StdHasher; + const LENGTH: usize = 32; + + fn hash(s: &[u8]) -> Self::Out { + keccak_256(s).into() + } +} + +#[cfg(feature = "runtime")] +pub mod hash { + use super::*; + use sp_core::storage::StateVersion; + use sp_std::vec::Vec; + use sp_trie::{LayoutV0, LayoutV1, TrieConfiguration as _}; + + impl sp_runtime::traits::Hash for Keccak256 { + type Output = sp_core::H256; + + fn trie_root(input: Vec<(Vec, Vec)>, version: StateVersion) -> Self::Output { + match version { + StateVersion::V0 => LayoutV0::::trie_root(input), + StateVersion::V1 => LayoutV1::::trie_root(input), + } + } + + fn ordered_trie_root(input: Vec>, version: StateVersion) -> Self::Output { + match version { + StateVersion::V0 => LayoutV0::::ordered_trie_root(input), + StateVersion::V1 => LayoutV1::::ordered_trie_root(input), + } + } + } +} + +#[cfg(feature = "runtime")] +pub use hash::*; diff --git a/core/src/lib.rs b/core/src/lib.rs new file mode 100644 index 00000000..8a8d3a49 --- /dev/null +++ b/core/src/lib.rs @@ -0,0 +1,155 @@ +#![cfg_attr(not(feature = "std"), no_std)] +#![deny(clippy::integer_arithmetic)] + +use codec::{Decode, Encode, MaxEncodedLen}; +use derive_more::{Add, Constructor, Deref, Display, Into, Mul}; +use scale_info::TypeInfo; +#[cfg(feature = "std")] +use serde::{Deserialize, Serialize}; +use sp_arithmetic::traits::Zero; +use sp_core::RuntimeDebug; + +pub mod opaque_extrinsic; +pub use opaque_extrinsic::*; + +/// Customized headers. +#[cfg(feature = "runtime")] +pub mod header; + +/// Kate Commitment on Headers. +pub mod kate_commitment; +pub use kate_commitment::*; + +/// Application Specific Data Retrieval +#[cfg(feature = "runtime")] +pub mod asdr; + +pub mod sha2; +pub use sha2::ShaTwo256; + +pub mod traits; + +pub mod keccak256; +pub use keccak256::Keccak256; + +pub mod data_proof; +pub use data_proof::DataProof; + +pub mod data_lookup; +pub use data_lookup::*; + +pub mod app_extrinsic; +pub use app_extrinsic::*; + +pub mod constants; +pub use constants::*; + +#[cfg(feature = "runtime")] +pub mod bench_randomness; + +#[repr(u8)] +pub enum InvalidTransactionCustomId { + /// The AppId is not registered. + InvalidAppId = 137, + /// Extrinsic is not allowed for the given `AppId`. + ForbiddenAppId, + /// Max padded length was exceeded. + MaxPaddedLenExceeded, +} + +#[derive( + Clone, + Copy, + PartialEq, + Eq, + PartialOrd, + Ord, + Add, + Deref, + TypeInfo, + Encode, + Decode, + Default, + Into, + MaxEncodedLen, + RuntimeDebug, + Display, +)] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +pub struct AppId(#[codec(compact)] pub u32); + +impl Zero for AppId { + fn zero() -> Self { + AppId(Zero::zero()) + } + + fn is_zero(&self) -> bool { + self.0.is_zero() + } +} + +/// Strong type for `BlockLength::cols` +#[derive( + Clone, + Copy, + Add, + Mul, + PartialEq, + Eq, + Encode, + Decode, + TypeInfo, + PartialOrd, + Ord, + Into, + Constructor, + MaxEncodedLen, + Display, +)] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Debug))] +#[mul(forward)] +pub struct BlockLengthColumns(#[codec(compact)] pub u32); + +/// Strong type for `BlockLength::rows` +#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Debug))] +#[derive( + Encode, + Decode, + TypeInfo, + MaxEncodedLen, + Clone, + Copy, + Add, + Mul, + PartialEq, + Eq, + PartialOrd, + Ord, + Into, + Constructor, + Display, +)] +#[mul(forward)] +pub struct BlockLengthRows(#[codec(compact)] pub u32); + +/// Return Err of the expression: `return Err($expression);`. +/// +/// Used as `fail!(expression)`. +#[macro_export] +macro_rules! fail { + ( $y:expr ) => {{ + return Err($y.into()); + }}; +} + +/// Evaluate `$x:expr` and if not true return `Err($y:expr)`. +/// +/// Used as `ensure!(expression_to_ensure, expression_to_return_on_false)`. +#[macro_export] +macro_rules! ensure { + ( $x:expr, $y:expr $(,)? ) => {{ + if !$x { + $crate::fail!($y); + } + }}; +} diff --git a/primitives/avail/src/opaque_extrinsic.rs b/core/src/opaque_extrinsic.rs similarity index 95% rename from primitives/avail/src/opaque_extrinsic.rs rename to core/src/opaque_extrinsic.rs index 926b3c6f..7d14e459 100644 --- a/primitives/avail/src/opaque_extrinsic.rs +++ b/core/src/opaque_extrinsic.rs @@ -1,6 +1,5 @@ use codec::{Decode, Encode}; use scale_info::TypeInfo; -use sp_runtime::traits::Extrinsic; use sp_std::vec::Vec; /// Simple blob to hold an extrinsic without committing to its format and ensure it is serialized @@ -51,7 +50,8 @@ impl<'a> ::serde::Deserialize<'a> for OpaqueExtrinsic { } } -impl Extrinsic for OpaqueExtrinsic { +#[cfg(feature = "runtime")] +impl sp_runtime::traits::Extrinsic for OpaqueExtrinsic { type Call = (); type SignaturePayload = (); } diff --git a/core/src/sha2.rs b/core/src/sha2.rs new file mode 100644 index 00000000..9b16040e --- /dev/null +++ b/core/src/sha2.rs @@ -0,0 +1,48 @@ +use scale_info::TypeInfo; +#[cfg(feature = "std")] +use serde::{Deserialize, Serialize}; +use sp_core::{hashing::sha2_256, Hasher, RuntimeDebug}; + +/// Sha2 256 wrapper which supports `beefy-merkle-tree::Hasher`. +#[derive(PartialEq, Eq, Clone, RuntimeDebug, TypeInfo)] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +pub struct ShaTwo256 {} + +impl Hasher for ShaTwo256 { + type Out = sp_core::H256; + type StdHasher = hash256_std_hasher::Hash256StdHasher; + const LENGTH: usize = 32; + + fn hash(s: &[u8]) -> Self::Out { + sha2_256(s).into() + } +} + +#[cfg(feature = "runtime")] +pub mod hash { + use super::*; + use sp_core::storage::StateVersion; + use sp_std::vec::Vec; + use sp_trie::{LayoutV0, LayoutV1, TrieConfiguration as _}; + + impl sp_runtime::traits::Hash for ShaTwo256 { + type Output = sp_core::H256; + + fn trie_root(input: Vec<(Vec, Vec)>, version: StateVersion) -> Self::Output { + match version { + StateVersion::V0 => LayoutV0::::trie_root(input), + StateVersion::V1 => LayoutV1::::trie_root(input), + } + } + + fn ordered_trie_root(input: Vec>, version: StateVersion) -> Self::Output { + match version { + StateVersion::V0 => LayoutV0::::ordered_trie_root(input), + StateVersion::V1 => LayoutV1::::ordered_trie_root(input), + } + } + } +} + +#[cfg(feature = "runtime")] +pub use hash::*; diff --git a/core/src/traits.rs b/core/src/traits.rs new file mode 100644 index 00000000..b58f127e --- /dev/null +++ b/core/src/traits.rs @@ -0,0 +1,31 @@ +use codec::{Codec, Decode}; +use sp_arithmetic::traits::AtLeast32BitUnsigned; +use sp_core::U256; +use sp_std::{convert::TryFrom, fmt::Debug, hash::Hash as StdHash}; + +pub mod get_app_id; +pub use get_app_id::*; + +pub mod extended_header; +pub use extended_header::*; + +/// Header block number trait. +pub trait HeaderBlockNumber: + AtLeast32BitUnsigned + Codec + StdHash + Copy + Into + TryFrom + Debug + Eq +{ +} + +impl< + T: AtLeast32BitUnsigned + Codec + StdHash + Copy + Into + TryFrom + Debug + Eq, + > HeaderBlockNumber for T +{ +} + +/// Header hash. +#[cfg(feature = "runtime")] +pub trait HeaderHash: sp_runtime::traits::Hash {} +#[cfg(feature = "runtime")] +impl HeaderHash for T {} + +pub trait HeaderHashOutput: Decode + Ord {} +impl HeaderHashOutput for T {} diff --git a/core/src/traits/extended_header.rs b/core/src/traits/extended_header.rs new file mode 100644 index 00000000..43f7629e --- /dev/null +++ b/core/src/traits/extended_header.rs @@ -0,0 +1,22 @@ +/// Extended header access +pub trait ExtendedHeader { + /// Header number. + type Number; + + /// Header hash type + type Hash; + + /// Creates new header. + fn new( + number: Self::Number, + extrinsics_root: Self::Hash, + state_root: Self::Hash, + parent_hash: Self::Hash, + digest: D, + extension: E, + ) -> Self; + + fn extension(&self) -> &E; + + fn set_extension(&mut self, extension: E); +} diff --git a/primitives/types/src/get_app_id.rs b/core/src/traits/get_app_id.rs similarity index 93% rename from primitives/types/src/get_app_id.rs rename to core/src/traits/get_app_id.rs index c9b9541f..88f84a96 100644 --- a/primitives/types/src/get_app_id.rs +++ b/core/src/traits/get_app_id.rs @@ -28,7 +28,7 @@ mod tests { impl GetAppId for CustomAppId { fn app_id(&self) -> AppId { - 7.into() + AppId(7) } } @@ -40,7 +40,7 @@ mod tests { let custom_app_id = (0, 1, 2, 3, 4, 5, 6, CustomAppId {}); let default_app_id = (0, 1, 2, 3, 4, 5, 6, DefaultGetAppId {}); - assert_eq!(custom_app_id.app_id(), 7.into()); + assert_eq!(custom_app_id.app_id(), AppId(7)); assert_eq!(default_app_id.app_id(), Default::default()); } } diff --git a/deny.toml b/deny.toml new file mode 100644 index 00000000..e16848b1 --- /dev/null +++ b/deny.toml @@ -0,0 +1,290 @@ +# This template contains all of the possible sections and their default values + +# Note that all fields that take a lint level have these possible values: +# * deny - An error will be produced and the check will fail +# * warn - A warning will be produced, but the check will not fail +# * allow - No warning or error will be produced, though in some cases a note +# will be + +# The values provided in this template are the default values that will be used +# when any section or field is not specified in your own configuration + +# Root options + +# If 1 or more target triples (and optionally, target_features) are specified, +# only the specified targets will be checked when running `cargo deny check`. +# This means, if a particular package is only ever used as a target specific +# dependency, such as, for example, the `nix` crate only being used via the +# `target_family = "unix"` configuration, that only having windows targets in +# this list would mean the nix crate, as well as any of its exclusive +# dependencies not shared by any other crates, would be ignored, as the target +# list here is effectively saying which targets you are building for. +targets = [ + + + # The triple can be any string, but only the target triples built in to + # rustc (as of 1.40) can be checked against actual config expressions + # { triple = "x86_64-unknown-linux-musl" }, + # You can also specify which target_features you promise are enabled for a + # particular target. target_features are currently not validated against + # the actual valid features supported by the target architecture. + # { triple = "wasm32-unknown-unknown", features = ["atomics"] }, +] +# When creating the dependency graph used as the source of truth when checks are +# executed, this field can be used to prune crates from the graph, removing them +# from the view of cargo-deny. This is an extremely heavy hammer, as if a crate +# is pruned from the graph, all of its dependencies will also be pruned unless +# they are connected to another crate in the graph that hasn't been pruned, +# so it should be used with care. The identifiers are [Package ID Specifications] +# (https://doc.rust-lang.org/cargo/reference/pkgid-spec.html) +# exclude = [] +# If true, metadata will be collected with `--all-features`. Note that this can't +# be toggled off if true, if you want to conditionally enable `--all-features` it +# is recommended to pass `--all-features` on the cmd line instead +all-features = true +# If true, metadata will be collected with `--no-default-features`. The same +# caveat with `all-features` applies +no-default-features = false +# If set, these feature will be enabled when collecting metadata. If `--features` +# is specified on the cmd line they will take precedence over this option. +# features = [] +# When outputting inclusion graphs in diagnostics that include features, this +# option can be used to specify the depth at which feature edges will be added. +# This option is included since the graphs can be quite large and the addition +# of features from the crate(s) to all of the graph roots can be far too verbose. +# This option can be overridden via `--feature-depth` on the cmd line +feature-depth = 1 + +# This section is considered when running `cargo deny check advisories` +# More documentation for the advisories section can be found here: +# https://embarkstudios.github.io/cargo-deny/checks/advisories/cfg.html +[advisories] +# The path where the advisory database is cloned/fetched into +db-path = "~/.cargo/advisory-db" +# The url(s) of the advisory databases to use +db-urls = ["https://github.com/rustsec/advisory-db"] +# The lint level for security vulnerabilities +vulnerability = "deny" +# The lint level for unmaintained crates +unmaintained = "warn" +# The lint level for crates that have been yanked from their source registry +yanked = "warn" +# The lint level for crates with security notices. Note that as of +# 2019-12-17 there are no security notice advisories in +# https://github.com/rustsec/advisory-db +notice = "warn" +# A list of advisory IDs to ignore. Note that ignored advisories will still +# output a note when they are encountered. +ignore = [ + + + # "RUSTSEC-0000-0000", +] +# Threshold for security vulnerabilities, any vulnerability with a CVSS score +# lower than the range specified will be ignored. Note that ignored advisories +# will still output a note when they are encountered. +# * None - CVSS Score 0.0 +# * Low - CVSS Score 0.1 - 3.9 +# * Medium - CVSS Score 4.0 - 6.9 +# * High - CVSS Score 7.0 - 8.9 +# * Critical - CVSS Score 9.0 - 10.0 +# severity-threshold = + +# If this is true, then cargo deny will use the git executable to fetch advisory database. +# If this is false, then it uses a built-in git library. +# Setting this to true can be helpful if you have special authentication requirements that cargo-deny does not support. +# See Git Authentication for more information about setting up git authentication. +# git-fetch-with-cli = true + +# This section is considered when running `cargo deny check licenses` +# More documentation for the licenses section can be found here: +# https://embarkstudios.github.io/cargo-deny/checks/licenses/cfg.html +[licenses] +# The lint level for crates which do not have a detectable license +unlicensed = "deny" +# List of explicitly allowed licenses +# See https://spdx.org/licenses/ for list of possible licenses +# [possible values: any SPDX 3.11 short identifier (+ optional exception)]. +allow = [ + "MIT", + "Apache-2.0", + "Apache-2.0 WITH LLVM-exception", + "BSD-2-Clause", + "BSD-3-Clause", + "MPL-2.0", +] +# List of explicitly disallowed licenses +# See https://spdx.org/licenses/ for list of possible licenses +# [possible values: any SPDX 3.11 short identifier (+ optional exception)]. +deny = [ + + + # "Nokia", +] +# Lint level for licenses considered copyleft +copyleft = "warn" +# Blanket approval or denial for OSI-approved or FSF Free/Libre licenses +# * both - The license will be approved if it is both OSI-approved *AND* FSF +# * either - The license will be approved if it is either OSI-approved *OR* FSF +# * osi-only - The license will be approved if is OSI-approved *AND NOT* FSF +# * fsf-only - The license will be approved if is FSF *AND NOT* OSI-approved +# * neither - This predicate is ignored and the default lint level is used +allow-osi-fsf-free = "neither" +# Lint level used when no other predicates are matched +# 1. License isn't in the allow or deny lists +# 2. License isn't copyleft +# 3. License isn't OSI/FSF, or allow-osi-fsf-free = "neither" +default = "deny" +# The confidence threshold for detecting a license from license text. +# The higher the value, the more closely the license text must be to the +# canonical license text of a valid SPDX license file. +# [possible values: any between 0.0 and 1.0]. +confidence-threshold = 0.8 +# Allow 1 or more licenses on a per-crate basis, so that particular licenses +# aren't accepted for every possible crate as with the normal allow list +exceptions = [ + + + # Each entry is the crate and version constraint, and its specific allow + # list + # { allow = ["Zlib"], name = "adler32", version = "*" }, +] + +# Some crates don't have (easily) machine readable licensing information, +# adding a clarification entry for it allows you to manually specify the +# licensing information +# [[licenses.clarify]] +# The name of the crate the clarification applies to +# name = "ring" +# The optional version constraint for the crate +# version = "*" +# The SPDX expression for the license requirements of the crate +# expression = "MIT AND ISC AND OpenSSL" +# One or more files in the crate's source used as the "source of truth" for +# the license expression. If the contents match, the clarification will be used +# when running the license check, otherwise the clarification will be ignored +# and the crate will be checked normally, which may produce warnings or errors +# depending on the rest of your configuration +# license-files = [ +# Each entry is a crate relative path, and the (opaque) hash of its contents +# { path = "LICENSE", hash = 0xbd0eed23 } +# ] + +[licenses.private] +# If true, ignores workspace crates that aren't published, or are only +# published to private registries. +# To see how to mark a crate as unpublished (to the official registry), +# visit https://doc.rust-lang.org/cargo/reference/manifest.html#the-publish-field. +ignore = false +# One or more private registries that you might publish crates to, if a crate +# is only published to private registries, and ignore is true, the crate will +# not have its license(s) checked +registries = [ + + + # "https://sekretz.com/registry +] + +# This section is considered when running `cargo deny check bans`. +# More documentation about the 'bans' section can be found here: +# https://embarkstudios.github.io/cargo-deny/checks/bans/cfg.html +[bans] +# Lint level for when multiple versions of the same crate are detected +multiple-versions = "warn" +# Lint level for when a crate version requirement is `*` +wildcards = "allow" +# The graph highlighting used when creating dotgraphs for crates +# with multiple versions +# * lowest-version - The path to the lowest versioned duplicate is highlighted +# * simplest-path - The path to the version with the fewest edges is highlighted +# * all - Both lowest-version and simplest-path are used +highlight = "all" +# The default lint level for `default` features for crates that are members of +# the workspace that is being checked. This can be overriden by allowing/denying +# `default` on a crate-by-crate basis if desired. +workspace-default-features = "allow" +# The default lint level for `default` features for external crates that are not +# members of the workspace. This can be overriden by allowing/denying `default` +# on a crate-by-crate basis if desired. +external-default-features = "allow" +# List of crates that are allowed. Use with care! +allow = [ + + + # { name = "ansi_term", version = "=0.11.0" }, +] +# List of crates to deny +deny = [ + + + # Each entry the name of a crate and a version range. If version is + # not specified, all versions will be matched. + # { name = "ansi_term", version = "=0.11.0" }, + # + # Wrapper crates can optionally be specified to allow the crate when it + # is a direct dependency of the otherwise banned crate + # { name = "ansi_term", version = "=0.11.0", wrappers = [] }, +] + +# List of features to allow/deny +# Each entry the name of a crate and a version range. If version is +# not specified, all versions will be matched. +# [[bans.features]] +# name = "reqwest" +# Features to not allow +# deny = ["json"] +# Features to allow +# allow = [ +# "rustls", +# "__rustls", +# "__tls", +# "hyper-rustls", +# "rustls", +# "rustls-pemfile", +# "rustls-tls-webpki-roots", +# "tokio-rustls", +# "webpki-roots", +# ] +# If true, the allowed features must exactly match the enabled feature set. If +# this is set there is no point setting `deny` +# exact = true + +# Certain crates/versions that will be skipped when doing duplicate detection. +skip = [ + + + # { name = "ansi_term", version = "=0.11.0" }, +] +# Similarly to `skip` allows you to skip certain crates during duplicate +# detection. Unlike skip, it also includes the entire tree of transitive +# dependencies starting at the specified crate, up to a certain depth, which is +# by default infinite. +skip-tree = [ + + + # { name = "ansi_term", version = "=0.11.0", depth = 20 }, +] + +# This section is considered when running `cargo deny check sources`. +# More documentation about the 'sources' section can be found here: +# https://embarkstudios.github.io/cargo-deny/checks/sources/cfg.html +[sources] +# Lint level for what to happen when a crate from a crate registry that is not +# in the allow list is encountered +unknown-registry = "warn" +# Lint level for what to happen when a crate from a git repository that is not +# in the allow list is encountered +unknown-git = "warn" +# List of URLs for allowed crate registries. Defaults to the crates.io index +# if not specified. If it is specified but empty, no registries are allowed. +allow-registry = ["https://github.com/rust-lang/crates.io-index"] +# List of URLs for allowed Git repositories +allow-git = [] + +[sources.allow-org] +# 1 or more github.com organizations to allow git sources for +github = ["paritytech", "availproject"] +# 1 or more gitlab.com organizations to allow git sources for +gitlab = [] +# 1 or more bitbucket.org organizations to allow git sources for +bitbucket = [] diff --git a/kate/Cargo.toml b/kate/Cargo.toml index 33edbe83..1e255123 100644 --- a/kate/Cargo.toml +++ b/kate/Cargo.toml @@ -1,72 +1,76 @@ [package] name = "kate" -version = "0.7.1" +version = "0.8.0" authors = ["Denis Ermolin "] edition = "2021" +license = "Apache-2.0" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] } -da-types = { path = "../primitives/types", default-features = false } -dusk-bytes = { version = "0.1.6", default-features = false, optional = true } +# Pending to review +poly-multiproof = { git = "https://github.com/availproject/poly-multiproof", default-features = false, tag = "v0.0.1" } + +# Internal +avail-core = { path = "../core", default-features = false, feature = "runtime" } dusk-plonk = { git = "https://github.com/availproject/plonk.git", tag = "v0.12.0-polygon-2", optional = true } -frame-support = { version = "4.0.0-dev", default-features = false } -getrandom = { version = "0.2", features = ["js"], optional = true } -hex = { version = "0.4", default-features = false, features = ["alloc"] } -hex-literal = "0.3.4" -kate-grid = { path = "grid" } -kate-recovery = { path = "recovery", default-features = false, optional = true } +kate-recovery = { path = "recovery", default-features = false } + +# Parity & Substrate +codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] } +sp-arithmetic = { version = "*", default-features = false } +sp-core = { version = "*", default-features = false, optional = true } + +# 3rd-party +derive_more = { version = "0.99.17", default-features = false, features = ["constructor"] } +dusk-bytes = { version = "0.1.6", default-features = false } +hex = { version = "0.4", optional = true, default-features = false, features = ["alloc", "serde"] } +hex-literal = { version = "0.3.4", optional = true } log = { version = "0.4.8", optional = true } +nalgebra = { version = "0.32.2", default-features = false } once_cell = { version = "1.8.0", optional = true } -poly-multiproof = { git = "https://github.com/aphoh/poly-multiproof", default-features = false, tag = "v0.1.0" } -rand = { version = "0.8.4", default-features = false, optional = true } +rand = { version = "0.8.5", default-features = false, optional = true } rand_chacha = { version = "0.3", default-features = false, optional = true } rayon = { version = "1.5.2", optional = true } -serde = { version = "1.0.121", optional = true, features = ["derive"] } -sp-arithmetic = { version = "7.0.0", default-features = false } -sp-core-hashing = { version = "5.0.0", default-features = false, optional = true } -sp-std = { version = "4.0.0", default-features = false } +serde = { version = "1", optional = true, features = ["derive"] } +serde_json = { version = "1", optional = true } static_assertions = "1.1.0" +thiserror-no-std = "2.0.2" [dev-dependencies] -criterion = "0.3.5" -itertools = "0.10" -proptest = "1.0.0" -serde_json = "1.0" +criterion = "0.5.1" +proptest = "1" +serde_json = "1" test-case = "1.2.3" [features] default = ["std"] -alloc = ["dusk-plonk/alloc"] -parallel = ["std", "rayon", "kate-grid/parallel"] +alloc = ["dusk-plonk/alloc", "nalgebra/alloc"] +parallel = ["rayon"] std = [ + "parallel", "kate-recovery/std", - "hex/std", "once_cell", + "hex-literal", + "hex", "codec/std", - "alloc", "serde", - "rayon", - "rand", + "serde_json", "rand_chacha/std", + "rand/std", "log", "dusk-plonk/std", - "dusk-bytes", - "sp-std/std", - "da-types/std", + "avail-core/std", "sp-arithmetic/std", - "sp-core-hashing", - "sp-core-hashing/std", + "sp-core/std", "poly-multiproof/blst", + "nalgebra/std", ] -substrate = [ - "da-types/substrate", -] + extended-columns = [] maximum-block-size = [] [[bench]] -name = "kzg" +name = "reconstruct" harness = false diff --git a/kate/benches/kzg.rs b/kate/benches/kzg.rs deleted file mode 100644 index dacaddf3..00000000 --- a/kate/benches/kzg.rs +++ /dev/null @@ -1,210 +0,0 @@ -use criterion::{black_box, criterion_group, criterion_main, Criterion}; -use da_primitives::{asdr::AppExtrinsic, BlockLengthColumns, BlockLengthRows}; -use itertools::Itertools; -use kate::{ - com::{build_proof, par_build_commitments, Cell}, - config::DATA_CHUNK_SIZE, -}; -use kate_recovery::{data, matrix::Position, proof, testnet}; -use rand::prelude::*; -use rand_chacha::ChaCha20Rng; - -fn variate_rc(rows: u32, cols: u32) -> Vec<(u32, u32)> { - assert_eq!(rows >= 64, true); - assert_eq!(cols >= 64, true); - - let mut dims = Vec::new(); - - let mut i = 64; - while i <= rows { - dims.push((i, cols * (rows / i))); - i <<= 1; - } - - let mut i = 64; - while i < cols { - dims.push((rows * (cols / i), i)); - i <<= 1; - } - - dims -} - -fn generate_matrix_dimensions() -> Vec<(u32, u32)> { - const MIN_ROWS: u32 = 256; - const MAX_ROWS: u32 = 2048; - - const MIN_COLS: u32 = 256; - const MAX_COLS: u32 = 2048; - - let mut dims = Vec::new(); - - let mut r = MIN_ROWS; - while r <= MAX_ROWS { - let mut c = MIN_COLS; - while c <= MAX_COLS { - dims.extend(&variate_rc(r, c)); - c <<= 1; - } - r <<= 1; - } - - dims.into_iter().unique().collect::>() -} - -// Commitment builder routine candidate -fn bench_par_build_commitments(c: &mut Criterion) { - let mut rng = ChaCha20Rng::from_entropy(); - - const CHUNK: usize = DATA_CHUNK_SIZE as usize + 1; - let dims = generate_matrix_dimensions(); - - for dim in dims { - let dlen = (dim.0 * dim.1) as usize * (CHUNK - 2); - - let mut seed = [0u8; 32]; - let mut data = vec![0u8; dlen]; - - rng.fill_bytes(&mut seed); - rng.fill_bytes(&mut data); - - let tx = AppExtrinsic::from(data.to_vec()); - let txs = [tx]; - - c.bench_function( - &format!( - "par_build_commitments/{}x{}/{} MB", - dim.0, - dim.1, - ((dim.0 * dim.1) as usize * CHUNK) >> 20 - ), - |b| { - b.iter(|| { - let (_, _, _, _) = par_build_commitments( - black_box(BlockLengthRows(dim.0)), - black_box(BlockLengthColumns(dim.1)), - black_box(CHUNK.try_into().unwrap()), - black_box(&txs), - black_box(seed), - ) - .unwrap(); - }); - }, - ); - } -} - -fn bench_build_proof(c: &mut Criterion) { - let mut rng = ChaCha20Rng::from_entropy(); - - const CHUNK: usize = DATA_CHUNK_SIZE as usize + 1; - let mdims = generate_matrix_dimensions(); - - for dim in mdims { - let dlen = (dim.0 * dim.1) as usize * (CHUNK - 2); - - let mut seed = [0u8; 32]; - let mut data = vec![0u8; dlen]; - - rng.fill_bytes(&mut seed); - rng.fill_bytes(&mut data); - - let tx = AppExtrinsic::from(data.to_vec()); - let txs = [tx]; - - let public_params = testnet::public_params(dim.1 as usize); - - let (_, _, dims, mat) = par_build_commitments( - BlockLengthRows(dim.0), - BlockLengthColumns(dim.1), - CHUNK.try_into().unwrap(), - &txs, - seed, - ) - .unwrap(); - - c.bench_function( - &format!( - "build_proof/{}x{}/ {} MB", - dim.0, - dim.1, - ((dim.0 * dim.1) as usize * CHUNK) >> 20 - ), - |b| { - b.iter(|| { - let cell = Cell::new( - BlockLengthRows(rng.next_u32() % dims.rows.0), - BlockLengthColumns(rng.next_u32() % dims.cols.0), - ); - - let proof = build_proof(&public_params, dims, &mat, &[cell]).unwrap(); - assert_eq!(proof.len(), 80); - }); - }, - ); - } -} - -fn bench_verify_proof(c: &mut Criterion) { - let mut rng = ChaCha20Rng::from_entropy(); - - const CHUNK: usize = DATA_CHUNK_SIZE as usize + 1; - let mdims = generate_matrix_dimensions(); - - for dim in mdims { - let dlen = (dim.0 * dim.1) as usize * (CHUNK - 2); - - let mut seed = [0u8; 32]; - let mut data = vec![0u8; dlen]; - - rng.fill_bytes(&mut seed); - rng.fill_bytes(&mut data); - - let tx = AppExtrinsic::from(data.to_vec()); - let txs = [tx]; - - let pp = testnet::public_params(dim.1 as usize); - - let (_, comms, dims, mat) = par_build_commitments( - BlockLengthRows(dim.0), - BlockLengthColumns(dim.1), - CHUNK.try_into().unwrap(), - &txs, - seed, - ) - .unwrap(); - - let row = BlockLengthRows(rng.next_u32() % dims.rows.0); - let col = BlockLengthColumns(rng.next_u32() % dims.cols.0); - - let proof = build_proof(&pp, dims, &mat, &[Cell { row, col }]).unwrap(); - assert_eq!(proof.len(), 80); - - c.bench_function( - &format!( - "verify_proof/{}x{}/ {} MB", - dim.0, - dim.1, - ((dim.0 * dim.1) as usize * CHUNK) >> 20 - ), - |b| { - b.iter(|| { - let comm: [u8; 48] = comms[row.as_usize() * 48..(row.as_usize() + 1) * 48] - .try_into() - .unwrap(); - let dims = dims.try_into().unwrap(); - let cell = data::Cell { - position: Position { row: 0, col: 0 }, - content: proof.clone().try_into().unwrap(), - }; - let flg = proof::verify(&pp, &dims, &comm, &cell); - - assert!(flg.unwrap()); - }); - }, - ); - } -} - -criterion_group! {name = kzg; config = Criterion::default().sample_size(10); targets = bench_par_build_commitments, bench_build_proof, bench_verify_proof} -criterion_main!(kzg); diff --git a/kate/benches/reconstruct.data.json b/kate/benches/reconstruct.data.json new file mode 100644 index 00000000..dfa6a80e --- /dev/null +++ b/kate/benches/reconstruct.data.json @@ -0,0 +1,135 @@ +[ + [ + { + "app_id": 2867178220, + "data": "94461b692904e3288a40893d750f842f0f1f72e2d1d305c0e53e20b04abcb9224aa4c91c1dc391f2702ef3a0aab4f3488945bd0005807499f3f3fc0b395b607ed35d2d4bc6ac8b9ab0ff5cb36f58fd2237144c312dbc658e11fec7990febe8ffe4373e33bcfb5189a690b11e473aeb6d57787ef6ea0909e7988d993e583f589b31e8da63fe014db6d1fcadc4a6e99b15d21cfa5ff00cd93d89224b7bfccf7cb44f9b727bf7994b849300a8a4254feab27c9fc3918e4206febe64daa2b5f715fc6763d4fc1ece9be8424ab1db4bb843d097f66568101e586e47b220cf61a0ec635e0cb4490abaa4fefbdad6588eb3e670d1037845257f98971e014f9079ce507f660bf27d25704908dfa2520a92dd06feca0d8737a7c774ceaa1ba9887ff398da09b21bd78fa8dc835a5731d4a4914eddef16209d14e319a809306b62180fbf8d6fa5662e4f1ab09a1efe358a9a88a52393b825120648af932ba1dcd2d47a0ccadb0ba96e10d04d02afaeeee7c332560ebe54f7697ffb9a405398cc489dcf4812771731e9a39b375b37a35bdec4180fd0647f0daaad1327a7f1f6053125a8d64956123fa22d1cc2528f595465b924ed14142e97e0a92c34fbcf76a199c2fe84efb4cc7de2f0024ed5b29b0b81e2786652a8fceac787b23054466151600b5ecc47abd930b80cc78f6abf2811f6f93d33600fe3bf22bf8087d3d39df459170a7e7c26e3f143531208b2702002937eee2b5acdc2bda278c23455b14b060b01a8b9aa57e8ba499f0a38d429872e7701bd8b1f8161aaeec6f46d5f9c996fd83053f9dd787a4586107204d5d0bcb8abfe043bdb5c01d3b8fd667e8d8fc6e8ea7a2e8eb2fa9879b9d2ddaad1bf8550c61f7ac853eb8e9b708eb8eff4cc7adfab147dc355d27ddb0ce3cf78106c871855e1f9cbb340ed0652e691ef657f5a19f2f3f710f668121dea55727497633773b5e0abd7acf97d313139be57ff556a728933b1fccf3203071ac494686343530ac8b5a31951a9ba86048870cbaf626417b8278e8382dabf680da5d8d9de5dfaf6ed54b321c794dca67c10bb0e7e4e9e4b5a2e2edb9f5b5ed5188e7694488b39da2e8a0266569dc08e6e06a68e698085326b6b456d89993e72bddcf522c1f70a1d986c54bbb8328893e56a7fb58ec162dc5b31fefb94c417ce6bda86125b6b0ef4d97fef83bfa38b901f8b7bdce5d0b27c841dba04a99b6b0d88a9d5ae387f193bf4a40e2b4f301f7e63195a1102ec9f5779c9cdac0bcdc0c04c318a848bb018903e225df771fe92bca9b592681f584b9cb484eb2bf6cfdcc616cd08e16ff306b67f09b18279f3ee8fb30bddff62251452482b25980a08c6fa1d8d3e0118204269323e61f43e513f14c6a46a638a1159abe7b1acacbfae6d057e7eebcb03562aba7460a66fa1c547e857b31faea87a6e028fec4d3f05550e5e7af60fbb6e793ecd9bcf85b36a6244995ec33a85d627d9fdf47f185d4ad6fc90af245c6ba5b74bd69e28d29cb311da691308e7a89888dd54b8f4e760c8b809ef1a821507ba26dbdc411af54fddd9d8dd36062fa7f39b4b8293188813d7d93f74a7eade8b8132ab6a393fe4a92ee3eeb1526a0dab793ba41e6e92d9" + }, + { + "app_id": 3120385832, + "data": "978c8270499be4097e6873e513e863f307dd58362e735f8c63c43636c8d301fbc007fb7ec140abc2f1db6d90410e2a3a8fc2bd58cf2aef3545144acb66231943f7b8647872aa2869010aa7ba6a018239b3357574df6f831d9f0f4e6f2859b6387ad5faf7a736d9a88d61495d55091fea38480e956e5df3704ce43a0502ca21584ee570c7cf350f51b613419af2da7ef852a04d3663cd44d1ef07d61f8179f42d999c82c30b87beff1859375401b432cb9627a3d66bfbf32394824a1eaefae7792d6332c711f2ef7cf00b5b0c4b95954f510b9fae330751974a01563c74e6917c2e24e74ee74e35aec557ceb8a69b1d4340efb86acaac7f16d5eeb76126b6af0a966ddfdfc81149ecea188f7d519b50c58a82c2bbdd7d88b9285374d7db22f310a8488f21b238f6aebc9eb1c868b3ab5ba7026cc0c4849e6eb2c64c0df6684ba3523243ca63e044691b0f3e7a02aa4e751fa7fd4e11b69b6b76f302f1a82f86a8bda291875a5f65cb13d60f31a177f68eebd7703fdd6debc299e4f03ca40d0fae82ebb62d644d63608229b831c1f484f8c3c104a141f67d895a95cb4270253021e502d096f9bec316ce7add2dc733ce8a936189e9ce58cf2b6f4165392e6fcf6e162f0813ba7d1f5cc68b033418881c804f2c1988946b141a85212093a01395baa62e611a04169d5c1e03817695edc2f66190da077ab5b6efe8f1a20a294ded7334a054b22f722a7ace2d7b2a111819eab01e06447f2e1567dcf4de32998b3743dd64307a967f9c4d510a4fdd858f0394d4d69d4b7438a7c429c9d1d88fd9bbdd11e88e2e3aa70daa971b933d05c2b7931e3ddcf602c89f55ae58e5a5fb8fc990b9a722a73bfd2b33bc215dd2de3920ce335193f51bdc69f6e1f2e627558a8bfc1acc3e4a4df7ecea489c43b6cbf747f975f9abfe82403ce9de4474baa06296fe71db533ca049d2fa0e29cd4dbe857520cc60209995e333a7ffed2cc8f5068a283d672394d4a5b1a06861a233727e8ca55abe385efa8265e10edb95ceff8b92860d6b9af584e0bcdebb019e471250d39e8dc741ea6aa9b041046102fe5ce71d732f3d86cd00199311964bfe0364533c2ee34bda9fad96d3965ab021bd4720542aa6992faf94745b37c03376e3b284c98911511cc487cfe6bb53223a0cdb45f615f8ca3f5354f2dc7124f139cbd5a0987a5085c9df8face25a209f10ae522ad32e3f286edf400288cf1b5dfe345e7e5d3a8fa9930e79921b9993b7e507537357a9749e3bf41c9121a34cf335d835f6a792a005be878b5e03f56dda628f5bcc20a79e4c0cf5991125274163fb87fd7aa9ab80ff8a232fa7b00f347b0d8d35dd95919bcb9448a755de2469da82fbd1189ab297b23e7e610a42de808dfe3985622f2aeab646f2467af690d3801272995573625fd2f579efa632a1c0f008ea17075e61a9018e9daf0bcbf5338e10217160cbf0f628c2255948cbfa5667fb8fd1895d5d354b8c09ce97a0658153c20136dfb04f71c2dc3fbcc60192428962552e637e5a8b2ed68d8c35ba11749f28f67bc99d83ae04d65c0b0117da75d170fd7c10d06bc86159322a2ccf129da67418ce4ecbd4b635c95941e9f9840fb124a0be6a94a4c1b7a20aeda0b5c23f1c6fa180eb3dacad9df04848a38ef687b4948e8787b2006eb6f3ee9ad666f9d3fc8953e3a94d44ce35ec66a9fb5489e5fe01b0016ffa1078ebe888ca282152155a31affaaf76f2" + } + ], + [ + { + "app_id": 3570869364, + "data": "e9fd48e7a698af62c4c88692b9bcc576b31b048b393d9eb36be9ca767b58a05fb486d05f15f803b0967418ca49aada021e29c52a3550b182645222d5118fc5fc80b30d9b496143c79f5ff67ca959dbee3624a8c6ecb0cf4352e01aeca6b4a738d9ec22c732ec9c7e74c3d0270e6cff9c30bcbbbb715c7b6c73991c3f6b5894526b49bfc9ca7db2cefc6c369c28f6ee213040f1787e2c2c9751e5c40b3c386ebb7c6268b9d6adf2f34df24ca03e8021b894463b1c1e93a47574b5c001fc9f281521f5ab905c9cefe593d30c16071f2b4fd27d3f204854da10a37a378fdb999e9cd97f22079a2ceb00a206c370e5df3e78e0a1b4ef9c5c1933817a4c9280826e35f1f7ae824482c4cec068d19ec80e735ddb94a6ccce117fd5853bc061e71ce04789b9235cf6314d05ad0a17ed2dfa3ce3d7bf1466a6a6b1fef798dc1408cf2c51eee6e5de0e4f12f105cf9b99d0285e48fbfa9330a0b3faa4cbd32829a1cf904400c77b47508dc68f43d9b7066dd41d0697b2dd6a5e2dfaffe17a6884fa58fc383f5e070cf6fe33afaf1762d4f0e204558444f51f6d71faedf49bc51f64744cfdac6dc2fbbc3dbaf7bbe7638b27bbd5f559d3a4fddcfd04cb5dc5ca16cd5dee85097d61f7421b9e86155b4571fc0d91a1a634e0f3b01e5fbca9de94873d8905bde6c54c619ad15d5aad3238db60093f43e383a028091c4c0d27b916ae6509461b51cf276787c04c28bba9718b2393a7e7c6b93cd89ff5c0d547df92bcdf0155ddea23fae8e78f040082986fdcf45ed7f4c1d29ad19d746e139919f8a30423224444225dcddbac71c012b21f791d42f31ef287d1cb7f9b6c44f4154875c2146830ab48a495a836cb9968b76fdd11feaf5a3dd979e7ffb780c6b9e018550a7532cbefcb65ce1dd0be2afda3782346cd0a239969805d541c5d329d5bcbbb3d3e0576940ffe7124c56331749432ad719f721eea19438cd3d8c4c3f9ef59d3e495293d061964e10d26edb44e94ab74fe6fdb0de1b51f7304d76fd9f77c1d50dc51d14a94d2eb6784de3692f61073c4725a2f" + }, + { + "app_id": 3698898287, + "data": "1e7edb5645169db65b5a4333d7e24a7d5b6506e775b18765b3edfb523f9c3a2cba797479cff44fa3f6e2332b1a473cf7f2a859aa404d3a777580d9d11f0566366b6247e417b6c57a9c1cab0e64f74411aa7539002e3af3c64c8af860778c6ee51926a3d377e1d342aed4c2e01d061de793d96a1b01d78a106d44c4d57b3db5228474c5059ea7ecf735aca67bde00d6b3e69d9b3cdddfa918c4772287ab357abac966e789ddf139d69cdca3807cd38819d7c253868669070a34bbbeac4b1ef92eba5974110695e8eb19c5e82f4d0dd9da0a06c7865566ab9864cadfed43edda9a2021f882fcbd84f71049cdb9cc2e420972c83ede19e9459b8adf268a721ab1f1c8664df331f729ec69fb2b79054401c414b5dc525e4dd981483225ebc1e31886711fd504f11ee09fe9e33ad513f28923476f42b7974945623d" + }, + { + "app_id": 4086489077, + "data": "a2227840b7b420b07595ce1508a118714f1e68ae67441f09ca2dc106f8b07dd537993827d3d269663330112b01e315c77bf001dd16eff454c9d05cd647acfda96c9a3fc8ab2ae041cf94b74193defa170889f467d994999252c35235e2fe55da08b9288f0d58baf742343cec985a5f8858c276bf3c831b7a999203667be28406e4dd791322e9d833c5da477085be2f1e8432cc063ed018184acf6613efcff7b2ff501005a54abf6afb0d9325c25771e09229c1338ae88506271ba9b7e448c3da7fddceb6b0f694cb3b39df2243dcaa727acd3024c5ca8d3e6ec25ea78377a4f48ed78886da41b59495b959ff3f333c0a8e265a1a2fd79a0e28af2d41b59abc632ff7c9a39fd64d53298e27506eb9022217af934b1c7c90ca2fd9de1912071f80b748d3f13d9f8f2bfaebe468775af842212c44010d0fa85f1d3202cd922c1e12b6568def37b71e66f4eb20d36de2a27ba398911a6e49bf2540c9d6055f87630711283d9a1b6debce7a1e4658251faf8763b9b1ff6a8604ddee4395e3b73afc720617c69d4906a0202e6bc0aafdab1737b205614e0b3fc65b0c068908ff4c890f24dc5ded60d1a73e5380998b171fb66a8be4cb808f6aeb98548263b5804335c630d40335da18072d34206ac845db8736016a2ccd4d0a6b4846eed735c55c9b8458ca40cd3b72ce1110f2701f10bf4b6c0337a0df239762184e02519e217780c9866749d29bae993681694f53ba170c0f7911acc850d3eb145f119d978026878d598357efb0122fdcc381eb54508d1deaa7c983bb7b1aadce3eb0e6d5a9e1c0eea7e39190f52050288853f61b618445281af9c93c0912db987210ef2e156ad136fd3d5e54af173f76aeb1f65e8e69d4e923a06b1b6b40f89104cc06c4e9e27266ecc1df1964cde62c1e465a2103b556e2e977574a68a8a96106b196d6b21d278f91ea56810ffe5759ad1f5e174756872fe96408d4b765dd9609c91f86916d07095d9ddb631c41846948a779ffeefb39cb290da757edbbec73b28f86086e25b7b46ad428138ef5021201c4cf0ec57e4fbc8031e79ca60fda331e077fb015be6ebb382a098f85fdf15867a8a85ca896cd23a90ca7a7650cafd46a3089e8edfb22ea890551fba73205de8d07646995e6342d0ff911edae53ec050523f6bd6e080b76fa483d57ed75e825b2fa669bcb68394a5a3ca21a426ab2ca241beaae7f1d816b2079f60062ef2141cc86ba23edb39e19df5e2292a97e4a291c2580e1d8ec714d1243beda5fee6c46daac4d64ff9addba07a3268848ae90283e9ec10b21776aa261ea6f562a5bc69aebc01ba869d7b6b20ff86e2bb76ea29902c8725f5e5bb56929766798622671d4e9a5306e948848b701b4bef2d5b9a7a7ab88a831fc3ded843149c7c802481b84388aa6f78ff9923f5caa3b0ca9d157b9a72abae3150d3083e4291b81671d2379bde07865244aeb0023de0bc2845ba6c090880720c1d272e63aebeb5db8695840cd26c979797d8fe6d859eb7a37bf7e362a79a31f44321cb2d6f3d9c44109791deb40fa4a480ea0a1813b53525f24ad26193b6fc632f51729470de100ba90d27bfc5e05997a86be4bdbc03416d2ee4c4aaf9acfe0095a8d165bb799f479d2ac0d22ba211f6c1f61f99d26d333843baa7975dba9247f682bb2e204a16acfea127f359050818df01ba7f7619440406c0a4c8645421de5eb0ea9ebe70f96bd2efcb1218aa300d2ed7fe1c53dcfbab1619959b8d6a5810de8c5f30ebe1260a3607c2e3b249a97fd464edce42e0719a6396efe7b591eacfd76b1425de327882556b7fba82dec0b2ef6b1181ec13e4acbd007bdb4f17f2b0249b7cd660b00352f464a3cfce1a34c56a0602a9761f30d550a135a5326ddf1effe19b1587201f2bf7e3a132ccd16280695c5683b55703b1f8a4ce89897d9f9549c5bf53509cf4d6a43aabce807be0469a8c07125190db84f7f740f26438519bba0e05fbd6c987a42cade13977632d9d831a3a0fd04c3b588310e4db8599019573a2d724927154dacb3179178a9417f1b04e53192f587421334b31341cd77bbb2d6ff50e1b65d4d57b869cf8abda3764462a52b705a14918737e93f69a17433dec67bea6d1ecb91645ded5e5a1750a4ab395f4430a432538f38c9e50941c8000a2951727d9a523a97310bafd77af695fca81a2b72fda797537a9d4cde474faccb009a19c3a4ba15be4a1b9d49029abb94c30b20cac72e00abc2e42982f432b18622ebd67b13d9e1d92d3ceb50305bc3c67b896306a433a9457921c85f09b91e38cd1c50d03670c39a8c8c924848505e10202e6d4070875bc31a6a19c07c056520425b6d0fdc8f828a004e57dee437a21cca505970bb7468418309c8caa63554cfa7feca46878ae60b3495b8eb0453302295561de840dc0aa9b6fa9136494a0243c995fd571cf4ae1e30d37b8a360faee63eea920186b297078ebc58e65b6d62977787da91c2ef99bcbc3f50abd680f9d712b5472783d9be572c6b49317046eb6e6f8fad8dc9755004e17b4c6fc5c752e27f46a863508c0a2008462436e3fca4a144e04d5a32132c2e99bb49aada2b8fe74af15060724e5005c26d76faee8be01a05cc5f986888c1e21b3d70e146abad8d9478112250cfbd91f7675ab7c10f1ec7f1c3e625d75ba8047cb73e534e4a53b09ab08cbe3b1cdeaf6a714da5657117a9e666cea5065fcf0a24f046e12b53b2fe206decfde30be5219b6c6e38354f31b78070fe01044a70d052b7522bacb070d3e4262a7fa3f7321797792286870e0b0e806ee9d4eabd92e0ed9193b7247a5c0bbb025435207ed100e26091e76fcc809d1efc5a765b2154d1d70fdbb5c08805bb200e047a" + } + ], + + [ + { + "app_id": 912747417, + "data": "5cfa2b9e618e7ae0cfb2af0e6ee9f33677e3b4e513efab8a7bb4c9befac1953d72f41198959e47a1b199a5aa213baeaf070fa517a92611a9e7ead08295c243f025ceb58da6678af48eb5743d6c3964247f2028e4ab0485df1c12a897fb98c411dba7414010b2f14ebbb16882bee720df5e55f73b628d7855d008c21e7788e289bfd99c3427a849594038e02a2faea45bb31d902876bae22da1722262c1536f6d42aa839e29ebcba7c6835c0bb47d7651e1495bd62c3f01d93c23778146f33feb8f0660623141e2212b2d15a3195410654f803f0c765378968aa111e70d2febc426bb14290188a67d5733c1630da41d932f48917b5e3e3ce05370d2de8ca28e8b970e9bffef2f135f0195d9f51220c3a6ce5bfbaeedb9f5b8f14686bbc88249329e258ce2dfc4375d906496df835edb26e65be0fcfd93f0146116c5b555b5ffb6009407841e9880fb95303f08bef10b4c98a1d592979f00fd0ef451583a4a3c60a9f2e1b4c1084fec2ed38fa2a06ff65dc736d9037b99143ce264511c9d2b9e16190f5ec05c49c4ed2b31d8037564f91b876123a0a6fa4cf2f29769e790764c97027ddf51eeee803744e5a2597f317f4241e476ff642269f8e32d76ab610cf52a7b4b3970259d5f93a0f5c8c2a1c38a4bf74517d56efa2247256a4170b1dca0514e84ff7df496aeef0ca2823f7bfdad2346ab6cdd17121ad005de6a7e8cd19d6d02311f1bfe7d085ff468e92deaf5d8ac27b5458a6205e2e2d899334578be9aeb2d8597bde5c4d4111082996c3b95759a681d45143ac3ffd88cca866d60a2b0301dc4636733cd0ca21eb01c52b99792397f69bad37ec84da9f3cd00cc6f9489c6b60cc53aeb20475ce1d9a58aeb1bd5471203e2810d386edcd04a40f1dac82ed5104bdeb4c30df5b8a459d5ba00ff90ac9e05b487f7eba36860d6060a0d86253feb044e830c94d16bb0f6d4c153bacb3d541ac17f4d9d8bd46d6e367158d700e5e76d5c1f08c969b4c4d1ddc8025169f02c2412fb46452d3d729df59a4843b397e0c6d8421f87dbc4a065c2bc4012662b8e24e4c60087545f2e08a6029c07f9f579cff6d127d3d08a7b9bbb92e2392a92cdcd353958b148734cb209d1470bcf6fc01f97fa076acab2f698477d058b8048a1ae15b0afb89d7be071a9d2f636a1c65337ca5f603e6ec4c115f59266d12ecc8a6e6df351cb74d93b2479cbb5e2f0a9e42bf8df7bce0d4a8986328abf7792a141d160a177e5d9c9850d041225565bab3874675e578483ea12be82281d93fb8a879edf9f4000c3d682ad7bc123d3b7579b" + }, + { + "app_id": 1380428063, + "data": "7b4dccd49bb5dec165febc1915df957be3392886b2f8bea481737a397c3fed7c3bc593d52caadc83396c80cb755d3c0aa302f1e433e92b394509c380114a58aa25e6e2d7b609ef13e6d31a208ef0f881ced3cb0541e423cf4c401d1736bc31d978b52d242ebc906d59c996a91fd6916e095f11bea2adeb6b5c6ecc59d6e82e524376d7a9132cad915ba56ae17c854aa83282e33e726a03012843c943f82ec55180a54b6de9bf2e43c0eb6f388e9faab5d18d81aab1c5ed87f30467be76098a70570148f8ac020379e67521d02fae3b2a06bc22a422c21cb67aa675885373cf4c7bd2024378ee7a7329694048bd55fda1271d41bbff666dda6696b2ec823b43227184438a31adc2cb118e2c9ed150d64accd2efdc762bc359cda610aa55d9409f2fe910a1bb688b17e6e59d6f3743c6e0cc9c866f5b6748f8fd5e75517fca70ee4019ed04e0c6c11f678c61bc17d0588ac6fd1b5e9bafbf29ce8ffa20b1f39426123ca6603cc5dafda0dd9c27453ac026256368e6cce227dc55b58417939319436a1d9073c1b7116883c671503112f3309bcfac9cfbb3ae6f274ca24bf209721d4124e343f02cc8770affc7af5deb6fcff415a8ffd4b183386461b332195a08e3756687ef8cdad75f96277129fbf0d15ec574e75323adaeb7fa0bd0deab80163d21e5587a6a81f59ada826497b6ae20861304c8e3b3b1088bf07923515b37d1423d57e72f3b777d0a91259587e529b8c2b791089b3b31922c721f096b85f086d01448f0550f0fe7395dd8bb0c634e8210ac61f245dafd118f98838746c1dc744a4d957342afdb338191f14cd560ea41ebc8edfeac5761a9470772328666d8b744cd15bd47d297859dd8568cf56e8e4ed89a1c7b3361f704ae97df0990573770bb6a2a2ab04f89a34a24f7aab7036c2a2925249f7ca4900a55a3bd023be33c58b4220179bc87ce17855c9d70c3e39da0a59c36fcae1c49bd8ccd31fdd544c20ddb036c8cb9a1beb69228040f692dab92d40d8ea615c30cd44ad9b81b3ca9c3464dcf1f691fc44de5a4481366120dfd35936a1617b2ed92a6494cf4ca18a81cc99268d98dec85e0516df4e3dc8203371f8df4f38e037fd6433dbe10bac3efa09af4c478d5a2046869427caa1b058d64486bcffa4cf6cdd160174f46d35a31cf123a887c2b9c5c630cfd7d04fe6fb176e66b1eea16ac61421d274f7b6299b761361129fdbebfa9de6ae542de0dde62608d7f9954af8a5de5c33c4af138829183e64c2f1841caf3ef1b14d100f61742d12c46f667344a88cd46ee303fcfb5352e974dc7b9649e6a1f3419124c9b319cd847dc14f21f850e47fc5d27f6b1a42fbf188c151a237833961f42d5f90a642dffe327a0ca28fe73aaaddd538a4ec2a20e096e65c7959f3e10b8d7ec44a0125642c7b206b4f65c803d793962fcccf6e507b77c064bf7bc248e63333d698427d060f9a58eeafb579ba0382faa00db1be78a4cc4d508d51df1324861294f0bb1114b5995965f36a776a291a1e418aa12250664e8e69e0eea73fe73bd0912b357aeca72274a257487b250948f74a5ec120cc758864aa8962ba983f53730149df35db56e03f69e28816241c35a37eb7260b278133bee303b36f1c07d1bdc91f81e040cfd4d68987e3a702836eb6987cc1e23c260b7bd39f0431490fca7af8f0ef8d9d65bec10d24ef8144fbe95d4efe200205f314e460edd99495cabccd70bc4e44e983f0c73a013b667346c77eab2f309e5c580445c3db0dbeb8e49dede41c0fe0179fe1af106ede3e67e020424e48debd72a1fb95c79339e7db75fe8bc6270be72c869c6fe2be4f0caebfd6b685833572256dd7c070a4f1f06b33285de122cce7b1ef008001ac91bc102a941163041c70ae936a4f6b30a42549a73b8a1a1c69735d4ad652ef059dd3db665370e98d9707c6850ae86608b1ebeb9df6a62db29b18244ae5d8601b935890b02c8dfcec2525990e3c47c06720dd95d05ca3544ea4c16e4fcf21010edb9c06e3f953bca4aded0145da40dc0381a6477efaca487eb761081cccf5a8d06ae0962b7184937b49674fc95b146ce53d63deb23ee5e895b2c6c95127dab30934e77d8bef399a9d7c925e97c71ff963e9441b7e28b683c31a7d39fa62790e7e9dcd4d9e4a7fe48ee46e7e4edfdaf91013299bed0585906f2089b3fb4b8d2de4d86c56a886a26a0db91d9bd2c3fc6bd34ef5e80ba77a41b3c1b094848bf97cf1a84fa8a3308840f8f353af16c67a0397ceae0c49d38d07e80130e94e33bad83739accc8bb94689a02b82fcfd4cd588d81877eb3168a7c6f15f0a72912a8bdfaaa5ced46674cea37312f51b469dbff9f1de68b54eeb683fee40d49e293a7d3d209bec81cd5dabc8f0016f2199ef82817fd259c4381fbece2a95d4394b66803b275d14a0456de141825b8f41418c431dbcdd0d2814afbfd860a3398eaa2a5ecc389ddc4e15bdf437ffe25b9653d06179a887db739026bcd4c24892de19bf665a3352e405363ea70b1900fa5ccc38faed147ecec15ac0d49a0b289cb4ad0f64901f72bf75abe70fb59785095159f685981f5b935698c0260f977d17ebe8f30cbdc56d94f901b" + }, + { + "app_id": 1432371538, + "data": "42fcccf25c2d1dcefe12c2eb00b918ad31407c3e12d182bd133a254015a9fdd10c2cac3edfde06aacb7e5e4394e6eca9444679e615e048a887032618a9048a0c97b8b0c0fb0b6ba09e7f0d32fedeb15bc08e9f3e3001d83cb199d9214d3622011d0ae03bbd5ea66cf9559cc2b1210a0a5a92383cd97764f82a388eec8f3c481b50343c145bfe7bee56c91ec18aa712375bb68dcf074abde3c0d4d3d5db54ac28c1c46d16090ff3e45d304d0c9bdd2ad1011588fcfd5dbeb343e6f6e4ef9e4cd66ac94857d15f782dc215d5e54084ec11c2ea6205c9db736d46568a32dc77abc83a9677bf9b77eaefcc6dd5176208a20fd49c5c6bb080d3905be31ffeaddcfe2c6a69d18a938eb4639950ec19ba0c18507e8312f99ebb6715d0ff5cd81ab84bc7c602f51907cf8515af05ef3f058eee2983a2709e792b210e86311ced80d4f197f7799ce6179387ea240b915f9f3c83549d64227c929c272fc315c61c6ae42fb17103a8e83a42968d97735e425379204002a6756c42f2cf7b15ec4f47804cb159aac93fc442e6f3ef49df245bdf242e90b746b63b031618e26394df9598bc9be24a9207be2fcd92cea283660b68479563622245c5ca25413db246134885631be68a29ac83cdb85b133148eee299289518669c4f02a35240483294fe12faec9f89749d92fd70d203b485168403566dd356ebaad4943f7add5688138e6bec3da426d387ea8e31e0af41927649bed1933d8d6c1c9a0498f8690967481b9a91906721b29ae49a13119825d1871cc76380b7bbaaf88a786bfe7b86003598898e86d76bc02df9a8d8924f7315f816d8bd6c53934a30365182c5ddad438e8369abf12acb95f5d9195b958e464b9d895966f9cd134e7fc88a83fd871740fda0711975a2a5b508e4e68e0bda2d066e7817ab85791703be37e93dd1fd3d26418f64b314f49745706a85e967725dee51fe836ed76ed9324e5ef8c4e2c5b9cd47954cf0321ed056f644e79b9ac85d13443e925201d8b4c31b587372745b2516c58ff69f5b3ce6e9fbe480ba3903d5caa78932e108f8d76b6e579ab45244e46d5bb4c95a24a53e3b14bcaccf57a7163b94d6a0fa570e0de957c08668899bf74c0dbdb85e09c1331fbabdb310017c3d4eeb51ec0bf6afbdc86f8bf05efe4f6e751549894618d434333756735b94b02823f6c2ae10" + }, + { + "app_id": 1892003797, + "data": "278917d5da02c127e0f2f2e85fc4f66284117cd946a837861bda5c3c92534cc64ce4055f54362f48c407cf31c2d9ab05cd77f4d91f45b74ac4b510b4025a19d3205a3d34aea51c0d59fdcd7c45d67ea5bbcc6cb4d0a7223eabfe7a64e4fcafcb6de7b03d0f5581355d54364f50e6d56cec51fb8c6219422392bcc69925eaf1dc5ab5a9be8d34892c0ba9ee648705bd33d7af403fc34a883f96df7157022a56c0fb49b207cec2e1d754c61f2fafb87697b6561d77439dd50a79b657f6f4ac4fb9f954ebd1284d692aea8ffa5021b3691bac24e905c1fd83e5e4bc8ef84a1a44c8090103f97cf44484056d76c5335ac95b73944e0aab83900771a4f162b11c1392993bdc7996dfa133fd194f443143802be3304cfff6c6b85452f39225e9556d5aa5d28b397110dec0e8be3720a395f35daa526e7cdcec1b1075a0946f011ca197c136ae84d768863852804a0ecb501030f229b38920079d22b945b8e4a03fb1155f5445b207ba4f5a1b1868367e1002a9733abf56b9bc7371ae03f0102defc16d5930df4f79236e04a21f8d9bc6d8a2b9b2ac4ead3b18fa6054638520a5a5b8464a39a890b4483c9fb0f7a90ec1680364297e436fca55442b123947d8849170b7a161922a053caf3d6fcd015d9d9447dd146e9f69f97ed5cfcf19d8be240f9c43356ac7933e32ea99bae100bbf11e2fa20bfffed312f41f5fa6f77f7c0c08b3ff3e7266e3ad4bfe3a61cf36e77aafb16b6f9a8aa0db2d0a390a01c119d05142ae50d24399ced2411514db0123ec6b49c612c82bd3c3936bf98fc0af121da9d1c4d463dba46d708960ed6bbb7670fd72689d41c982fe812a540f08c0c08522ed0cdd2116e3da3a67c48d35bfb6b0a5cbf8bcbbbf66d36cd21e7186e0ea154552fbe6b83743d5f4befeb4766d5a8eaea800085fe0d388ac835c6323a2928a33cb403b94e9ac70ef213307f8b9abf1e395a444b42a9ee6914e99fe8e41d9cb621c894ce6b334520403079731f7657c0d61c030b1f6b71b90398f36d56ce8783eef17d3b46dd9fdea0dba83ca4937c3e777cfd18109ab5708bbe6f8e53adce4b0837a19abab1abcc98f127e21cc9a72288bf4234196061c3be4406bd44554c7235184d5c61cb949b174604c1485fb306106e3399120b11d1841bbc9dcf30d5fd13fb27d8242d305bb1cb07dde87a42ac09b3d8e4c9326b02d388fa88600743cee6c5c8a977bb7d3c5bcfe325e481f138ae8465848c28b11d3c4b6e2a301d03c4fa94c67386cb1633f1db81f0d7113cc4d086af904d606cb35165968533944d63873ea90bebd045bdc00b81c1612c154b96da3183854fed595f1e0b2317e7a76ac20569aa0d5e15dc63ae652d0997536a39fc91d04a4cf3dffe15a5db008b32c7b4b1ccd8af252c681e9da330233ed5146b25c0c38bf5d76c63faa42c0dbb3a75c41b0f1c1c75c7d28bdcd5e2a53161ed48dc48da965e7efea9c923e9295c8a7ed2a62035310851ad87b2feb30e8a435341d60c7d253db83294f368167fa352333" + }, + { + "app_id": 2008533748, + "data": "b1faa5a0adcdddadb9b67466a2b20eb2176a22116b108e1428b03c63e9a6e2cf8954aca0ce0c2307f499974a9c688db73751a2ea43d27a480b79d0b8d6da977034d61655f9621336f7bdc0a2c2bb2937bc172176fbc7c04f28fa80645210e0a8e06d1a0cb6fbdd3169ebf5c28d252ef4633026de48d48aae7405986f4d63ea1bd50e250c6ff19a825101a1cdd2bf4e4910104e19aacce296741fb5c5c5525c42e18e12fc16d182d0e2ee47fbb847d2b28b15c8e1018213d080c209e67332291ad625bed0a226acbf8b43e91df995661d289284c5757cd85d36f99c4284a9338e1b09677a61d3c7ecc2e35354ebd834c52612f14accfa55159c5523c9ed336a45659c818c2babd6edc26b1f2c23ec7a6890847503b1b30b5774ec1cd654f10d6278dafb9004f1d2ad0cd6aa59d55542cdc41c3a333205c0e2bf0fec1e88c01c49febdd958b35aa9c489aebd36f9142f5eacb52da5567109f86ffae1259d5721228fccc7b2ce85db68a56cc500909ba3539bace80e62f1e8d8e623e8e4c0476e33b25982e7d738b6f4057fd7eb9f0244e66cdc69c969b8024bc9c2dd787b375e77abc63106a082412f54e3888d55d06a05df3b72b54c0740dd49a67b72ea8a71f65afeab0caf9aa12cf1e45cb6caba12ac23f9af2fc2e3dc650012d4acdf04f88d5a73a9b674a10a759b4b1221bc4bade26af66cef78edf53cba127db3ed91ee1e4b7bdccf64c4f786f09c6a967c4a1abea6ec8561af72248bd4b11f357d694c94bcfb0a8c032019cf6ee3e59f9a6713ffe772f06138df626f59843f91f6cdaa97e0ea05fe1b98b06248021d5e43c41223da46489efca630c9001d23c1cf0b6346a3c982914c6ef588920f6712817ee262f66037ec84601f03336cc98cdf0f42f82f82459e0969c9a1cc3648ad3f5cd79d5a6613e9669cf8a819a6248025674f0fae6d4cd0015bb2edb586885a22310f11d78767ea98320f50ea95edfc9ea52225362c98d1f55ca6f1a7462bf622bd2360882830c7a3789a191e3f5e75ad3c11ebb7fa0c48c5b3a99b399906b78d8ee3305280de2c749d86658cc5fc9ad75383da78f7993c41526f9954b69f84852b333fee6c1e565e2cfce9fc604be1b27f6587f791f786fb56adf6a50eeab7de228e47c009f8b17308eed67dbc2174fe8b583c60fa420c364333e7d632403207e8adc53b1a0504011032ed7684f34fc330a6a05d249facd978035eb3fca09e138ca83f3f77fdb80a28583f9889879c0a81080386df075faa633ec5f4dd56a5f31c7ab1d159d1ef3f31ca9a6be3f11f793ab5e6e9cf9f2eaf393236115684faad9d1c78aee5b8c0007bc181a25e847cb5e2a6d2ea91a5d091c765eb7a6804fcb05fa7c96f5433a22b070c6f67080738f48d28eea7b705270e2c6b0dbc00826995c77dbffc2632169c1b4c41027b642cdd34d3699c5b2222405dbe276cec437be68eb097054029fc541d6d10803b53d5a5ca0bea0afd5f4e86dd4b82c5172954bca2d6e252c70e569084ad1bff86ce062f7e5cde981c6a95cc" + }, + { + "app_id": 2085427659, + "data": "b423cb3a0881e77e1bc7a4498a8a012b58437455e04f597b44fe4050eb42cc12d698fb1c8da1a88cd21597787da1140275a4604445f7cfa746885fdfea22d797b4a7d6260a707df7706f948f9da6ea4552d8bdc86d09ee2a4e2d8f2f5d6f524fde92b42451aa428a151cd69a035b84ec12113f2f9929b073b87898a0d9adc7a86a83cc46c35a154ce4171665430a55d660b79c550ff66766756a1c7bf368794814dd02d40e59771e1ff9c3d59f1c3c09e760a8ddf90300695ffabae00ab566fdeb538db919422251cd84bb919b8644d53e511e254e980cd7fa74b0838e9bb2d78d8769463be4e1753c8fb086bd14f4234baa56f6a416be3e088dc011a07827c81eb658df8c3daac3a505cae3f5c7e343e67c857c583faa8ef169246ccc1bf4d19eedeacb3a5f6396b07e4a986e77070771fb29ea73923891d7b067ce484ec88c02bd041525b028210f125f314d0472874fcd7ebff343ecfacbc1abb420385595ccc37968215010af35dce339391233d3a638ff8d592b6fc361899955691715535c3e8b82a2d4ab7320bf69e30841a0e7f5f41fcce7abe0f9b1ba151e2499e796170dcd524b3d81accc6f2b3e4bcfbb6d6e7e6e2c0013d56bbe1f688c9a4f05af430f201a28e949e98ff0e5a8f8c4a775b220860cac1f42517e3d706252ce4af1f86d54abbfd705cbc058683a4c3f3f162e4d879c578f3a0a5f9294cb75592a5a2697aa22cfab11c0775472e01a5c718d9e8f5e76d24980c041403dd66afb8ea589a9a4a18f9d8bea3b5ae07291b0c192318d78b6977afbb252e68ef1c23804ebb312c8876d1bfbf79ffb8afbce4da13791c2d0d2977f5bc415a381c6e41b06c033c2d61a43291aa41897ad2d27c1b7d7bc93bff7dc64df62ddcca892014511ee80979c5ad38de7a5607eafad746edeee2d9b22c005bf4b19be8cdb0ca9b1ef8d16a0c8b5f77cd9632f0f7d66a90051a87feb589b9cbbae18e9b15f3fed8d13d81860592f1bed54b4c5b25bfaf9a7a5c8466933ad30ae3b64aedb97b657afded76018d7b5a20509dbef416a3deae359d04a62912d1d6418da3e5a8515de5009bb9a2827d8219f8e09604a5e35fee7086a40bc34a01adbe48a05f1d8725669a0364e5acecd127a5871508bf8fa493364d14a5b5a1c0e20c34bb925adc99810d31a5c6fe5b82033502361dcae4675b59eb3da3e89c26db383bb383693994007464569a3aa7cb480cbe077129efc43f4934433aab11cd859d9dbcdcf5d5d33089194e7db69c6b0bf1a9699f60c0ef26757a474caaa95dd198dff9ad142bcb9df3c44fd924b4fc50e20b6aae359821a4e58f55601dc56ced35d272af769e4a20afb8e6f1720ffd2f8786ae72d013b2b3996cb4a72d9d17bb7097eb0ec173ade8c002c9d9adead8eaad18e80783049634b7ea7d6c7e7272846b7ce7baa206b2494cd9ea96cbf66ccfe980ea9ca5650aed5b5bcffb906f1a3a0fb32126d48563d6335bc13f2dc8d38926c5f12cc9a89d7870527d7382a8730075b564a4d195b33835578fb9e4601644912e88228365d6384012dfa3876b652685602d8c0e7f7fba4156a5c8132b902b2049bd53fcf9330c86bdcd2174c5ef3ce9c3363a1964510febd003b4a1c67b6dd21ef6915fe758ef8f59f663af837976a184230542935440a4b64703c7d9b28bc80f815dc9d79f2f8953cf1e82d6e5d31" + }, + { + "app_id": 2231607266, + "data": "cb36a6d8bfbf94e77e6bcb7b4244f36761b0b9314f598b2f68f32f7c1bd3372e09ea2bccba7edbe0901a7df5aa11373cd90cf282a5caf17521e144b17fd519287e86fbf8717fb40930fc1cf8da43d7433037483e884b691b1ad73085b751fe5c95f050fd0116493d6455c556ea0daee80d9ee23d51e023e6a2769df8b84c29cd36caf01631c65bce632f374b015e805b3c91f4c945cd7ed8ed3c848d4216b6022df447240c08ae5351bb8ef5ea557303c20be5159dc7e2ea96ea1a29d441c7b0efdf2f0dc6d701a2ea089652fb1501d857f99b88465f1f487820833ccbd48983e687e4c17e6a92aa90b7698ee1693ee5341f5763b2a588071f625d38f57bfbde989bde531404fe2ed8dd98a4573b4040a3d647dccbbacc2bbb34ed058072112fceb2e26995c7b805d969aada8d6d6527e97dd2b5be4b4c5ffa8fb8e0d88c82f2561985fd5141b8391266e87b39fe8ab0a4b1e762c5a28d23896128d2949c7c57ab849758089df909bb3a9dc43393d55ed074b4a34b5e05f87fe1c309722da96d8de725ac09af0f54b949c7ec8010d70bd9c8e6062d764b6519b9a5487947fbb29ecc8629eaf9616ce007c5739138e321edaff310ab36e69d3caac11d152ff18494ed02b455b9010b19bba14e9b811f8c8dc36de6bad6514cc33cfbb6aa85889c3d40d87a7a796f37d0eb4abf570db6379f46b1c8aa23aeb0504a7bafa06926c1e0d88c0d19703ac6a9baaa91556a291ca842082d8ad35be4008bf08c0e9240bea67f3f7a1f04854b9d09a9c33207fa8d2ad4c1d656a7679f9b3add654076791aec99cc96bf6ac1dd816a6d3f08f2f27bd993bf843ced0dfe4d9ca56c13b0b9d6e6c31c6a20a4c3012fbcaa6d016a22148f0b8a65d783e77f6963f91aff4d00803a420ac9263b121843a5a13ce30235addfcf62848d95279c150a6aa2e20280b4f213d536c8daa57b5b0b8336e82aea38265cf20254446397ebfd4889d2af263bd808370a1d6ca50e5112292e57147cd8f3efd0b6396ebbf537b497fddeb14cce474dff57c903bfd16882ac503d5ab31bc03081fdf58b5e0936699d0b2779c0781aadee4c50289364e3f75f8cf1c91888e05f62cc32b328ba95b0206c7ad2249f38a9fea7acacbf8c3ccdec2b069ac2ba96b624456f1f346f85c94b91a784355986bd327fa304b25839cc2f503d3909c86622f2bf75a0a01c52cec89359d339c82fc423cfb835a6a48338af0d9af723cd07c3ceb8ad13a5ebcb3d50206a4d6b63624a20bae6a641da1b0d526413e6ed900b50d2cb11a4cf69df0fefc1b9d3c1a90852e2d5c812cfa97e30185a6d14ef5704e985ef6046ded518125941d5b29c14e1854a4d6930aa59ce059d1dc6d91d47f5bb9bb9300d5d1807bf35fddce1943941b4508d397ab74a90025a9d6d7a4ad834aa79f320f47c19016e92a87f6a6899458912d032b9103117501a1180a0140047b0a8012e3f63724d0ffeec9111a7cd86c4c34c604b4ada056b2a2aa1ec1afeadc7018b9fe3199379570b57bc74df9ea74f1605a4f5f1f03bad061e8c0f138d2a901da7e1c3066af53b0f6887180d8063d8b9e7d0f934fb5ff66da3f1db60d135aae045d161c7e415aadea944e6749f962fc01e6159b6c871ce6821ab857224ac19c494cc8e416d3b2eb69f765e05958fe0e465dabe0455a352bdc4f19ea76894a70943830285555de6584af15dfa4e456359856b65ccf363fa3662a06f4e464136cb9f7e03c6472e6da8d47dd0f66eed08427e1bb0ad93902643573c30669f4d2c2b1b5b01947144a4285936db4f3649ddb6369420147e1d6b0218356d4e21cb2372eaee4c72a014525ca8ce551a83c419db081bb02d9cd914d2729d0978212dd8f1ad3953a870dec7f9065b551443e5cce33fbeac106fb0203eb9de067f8bcc7185a655a00b3fba88e32f722e42ea47415cc49341346d279aee8d437d837cba2baba3d4e76ea3e10fd13c41f1ba34" + }, + { + "app_id": 3255057126, + "data": "41ddeb850e4f9faf47c0fd5e82f7c299964c1b155f6267263d2d0cdc90064ae55032039a78dd9a155294e4121522c57c9100a96c2d6d28950bd3bf9c0d42b0f843ae2ca9e6b23ffec85a03ccab7826ccc099faf5353b844aee914835c7d02bd2ad65e16e741030b724e5975cf566eac98954a85f2f0e063f93190bcb2f9f638a5c1146a68fe971bee1d07a63efabaed01d415b683a5d03c1019a546ccfad298058d4ed6e930c03242d543d30cfdc6b6c41228bf227246a63080bd43ee0cf6611dfd0d00576b4de146f3a1534c7549623c2b308880870954f577694fa83c0d2be54ff690a59e26971e33e796181108409fc93a7c748df8c715f2b235a83d77f29f1504327bf9ab531a60535f8d8def0d9e0ad80b730791f3c599c56f385e991282fce7ca0de5da6c1955e3dc835b22cb30b5f26b0cd80947b8027bad457d3d997f593f34ef3d0d1f741ce486a02b402d29e4dcba69629425ad2cbfc2fe9d49fbe33bead46aa4ac1abe8946bc0c3432f1eff098f83ead1e07a8d6e4b9540d93e44e85a9db2ec703f05b95ccadd35b2850fbb31879d54ec18b9" + }, + { + "app_id": 3506639155, + "data": "c892f9474e222d50d8333e51fb08c90ce69aa46a574ed5a49e1e4321b7db794cc1190bacd49770dac07b6d452e516e833797edf591c5241279ddef33fb7d7a7f43e418db9f1d53b3878f70835cdd83945b15308dd2522c896d51c88d8ea3a203068fe4e7ffacc30acb5e1c6cde84306f4a30d645375210faf3293559a51871e4ad56a6c00571a2f264b96531d4b3151b8b2976e0d5cb77669a9f1b69f08381c6277d3144f3af1ddf1b2100ffb00fb953a7981e1a6f853f9398eb4a3c0db0b313548e0de4be72f8f876dcd893aa567073f368fba508467081cd739b53f6b381ba0360d88576c0c72ef0ddf355b4112b4834f60932ebc41e083ece51cd11ec16fb127bd06f39a6335b19c9b1d98ce10ce619c7253229fbe34c9f24969beb3987a15811541fed99b5b862d38fc4976f5667cf1550f036c156938d2f6056008eb2defe3bb1fee5209b18cb3f66939bea56965cd86faec4dea9e95a1da01a6fcce6c084b0390c4b0df3ddc410f509dceab6da4264d2c86a024269bca8e4e620a3b907f7ccb2ef624afd5da3b0cc75152e47ad0e05dc206bd602d454323df624affcad72f8b50a67a11bf755c6f23e2af76b63e6b5d34665f481c14ef9f584d8abfefa49955b262984e0fb2d4da8d56a18f7ed57027f68c4abdc8d88a8c90af99281cfed40bb9381780c70626f1e89681401e1b66cdc4c23514f4cbcc2f43e7af5eb61ee537e58205ec20c480d0061fa19d34a8920b142ef7d8b83515fae7570f3e2a4aca0038a5c507fdd48ecd009df1477a7cdc74ebaca7e04ce62654dce38e23f2708e1b8f008578d9064933ec27c5fbc08a6d2baea8e09ac5bed5cb91c137a7186ece4fe0b94ec5bafbe8789b6d358fcd273131bd2f0d5738b133a5f409d515334e2782439f944428f2d729db223ae952ed8134dc4e1c3ad65151e1fffa7e0db78c24dbba3509404a2b58791ec47e5dfb48e21a21ff333d65b24f083c10317bca7a1ac57ad9a47606483140d5941e726028074b6068994ad2654022eb60793999a813eba274daab9bc8376c0e6fcd4fb625496b3462e86b00dc62dbd69ce10c4f15c1e812ce8bb083ff402fa173d3ed384becf2449379bd535d5ea43d616ca2a068af4352f4051b5997a5c839f09172a90955452ee316b3e3df717c6457b32824778bf35c725380014363d082f72dbd53b9bceae659e6588bd0deafdf8347fc73bd83d57328a89b258b29345d8c08c4e1392c33810562eaf328260ef78dc0c513506f226a3f6ab94f428d82230b862b5e571083db9c2400678087f36f88ac38563af3e77656435ca3f4b4eceddde0ed8f4b6d0097264895783ac25edcc93f84f21ce3dca27da11fda51636fe2f43eed3f44f725aae577c0c6906986ec58f6a3fd5c16777c574efc1ff4b276cbbe7a55abbdb6b9994ae087e4ff4f51df5d6f7fb44acb13e6974f8508efc807b80bef8e59ed3a891994990a14688f3d1d037881d18ecf0c2279be68a12c7604763705123aba81ddd474e3169be6e67ab3f941fe16682509d38834405ce9ccc03a405ee8684285ebd51c09eb4d887e79aca7dcf1b8500bbbd0091d7762c995153b6577c33ab9c9f6947c664c57dfa568fb5b8aa496a3a2225c1da3477826cf8f862b09322b4aaf699b8b086b05e9dbe1ce8713cd88884254d42895d5e38cecb6ca1fa7a38b0ad5fa9bc4b3474cfcca26a366f14da79ffdefe412ff806ab9f16cb91db5e163107be002628aea220e6dc91cbc781a0bf74eb085aeef4254abef12c5beff2c6c743b86f17b959b7dd7509853dce39fbb6a7b5d820160afca76c" + }, + { + "app_id": 3828286333, + "data": "1058fd5c9bb07edcbd37a6eaaeffba93eefeba03a356ca" + }, + { + "app_id": 3885071354, + "data": "e615278c6ad2e2c95debb922a754cc4d0fb0a55f85de2554cb9c336c25ec9be2dd98c3bf3c4cf2321df996478e7d10c7acf12bb9331d3d0c7b953db104f0a33c6f80f3fbf5bf25570ad3fb6a7b9b02829d39e5749394947187f981914e8a194ec1b4e6dd053d25d8171eb3bbab2fa7b5aa3a3f65635108ae29255c004143d9f297a59316e2dd83ce16baf68769207ad0deada67c1af71d2eebc9612ce78ce8b5731e763ebe2a5a22282db99e11c519d30b745b749b43ee02b4f8d35b92650ea038870bdca34bf18898f9f2521351a247745cd377eba4d00de0a290ff86274b7f20e33c1b01839a87c96de6ca54cb96e3640f468aeaa927993176a7107f4124e33da379c706e4c4b38d1d1c1e112341b1424e689a10e7cc9626d90437fd07cd20fcfd3866a1c493b3ebe37e4471c2f9bc1957ddbd540ed2f83fea4f5f9f567d0fed776bf17481912b9548a23900b653388dbef4b13dbac490047d05a73d4759b148e490d842ebeddd784a4fa12d5d6e05d7664cd1cbd30a5a3e379a750b13c7fe4fa997a5ba038db884fc34a816949ba9cd6dcdb328a9aebc48bc78cedc89243f5300a06d218938f6dd2c6b7192abb22822a1a3554eb93e6631e43004aed719726741dbbd6349045ca08238dbaa47085573b7db35ee8acc33fffa860db2a7030d3fcbe59b2676f03560eb2844ff5d4e2bd03f4ba892c757824b917e6489d8690866c0abfed6457e0cdd10aca947be5ab989de3a86826b2b4a4efe06da83970196da991a189a1a1c3b779017bed6ad957baa979a3663a2d679116407078805b9235620d7fd8c81a889c6a184638c766c049a7b43d14532bc2c3201d1ed9215a1fb59e373f258499eff95f8eb7b649de3e07197db2e7965f44881fff435ff21370fce65e1b15c329bebbb53e4d161f7813b4a920bae532162ea69b06f15fc42a4bffc84e615b05ddb4f8d519a89531632e8ba37f3448ebce7ae3b725574e4e193e02894a511f73669594ffd7c2dc2189df40511d8a18b3603086b1ae6182a00a59a5175287967fe5abec83600103ab227734bdbb6dfa210b09b6abe200be870d0aef6a2d4482e73ba61a4c15f26f0dfd264067ca6feccc5ade7e4f9641d3f1a52fd746c021ca49962af73e0e679b46cd27237065dcef944c2842a7036752ecea10498d408b52e683e0f67a7f7ec89cc06132b54a7ad28678cbcc5f3bf7ce1cfffe3fd29aa5fb683eec929129dafe3d0170941124997090c9a2d473bc82dc5347caed90952a91520948c40ec4022b73e7ba47de045e02f7a138f602c9ae01f46c4274cfac6aba179ab8a2b3ea7a8e131407911b8eaae2822bf64206407e5852e7d27f8a4f109ed4602f0c0760568847dbc028f359970dbf3a4634628e58c562b4eda2f450b0cd0e6a3" + } + ], + [ + { + "app_id": 75043118, + "data": "ad4abd4afcde9b0885c9b62329a7e8a89c81c5782df83fb233e12832b6b90f612194b2e51a5d786b9bfc21410435db18e41e96eb2dfe67c969b86923476f86011de95c6dd18eda17b0e8011bc26843f07ae3ca7b481444d768869afda07a2cad07d45cd8c5d22654043ddea902253bab2fa2271fd13a5a04f396dd4d49135d8489e554517d1c352aa87fdd25bba6fa91d75c37a99560612caf1691d039c04d7e79fb6e2b741a6a80b5f4e1078ebcb1ed60149db232ba2eccc65a4c85c9b19a67e6d95416f1b5c0c3d002b99e8f9d321ea07e1e6ec818996d779206158f94d2a9935544b52128ad5ec7a6153f3e0c38f3fa48c4ae5c27f1c151642412813069cebf902b5015ea22da450cb87fd8a6ad9f40957741ac1e9587e40a3bb256110bcebfa6df404049b069932c53cf230293f0cece8abb3d199c5a3291deae4ac493b1d861bce6d5704ccf3ddc82ede8c60c187ca8c184bd94ae71f4f0daf5ee06055d6a36a37ebe447e915325c12bbb7a9b6850899846a539aa8e2c404e397d165a113e829cd0abfd5dbdcc8abc0ad92f9f95dee19099c680431dba6aca58688b3db5745084f6aab71716c49bc236f000c3c3282263d849ff262519151dcf52dccbeae964f01d49dfb659fa22e1f8d6ad3b85c3d1a81415470210db4524b1cdbe353cbaf28c0e9b7fac955a8adaa848c4c12605f26f2f489bd8aa5fc7ea0aa0788320e7c567aee8921c420b32d67826a8758826ce4821d22bf513c71b58af910607535ef9c42cadf128af49f1aff7fc30b9bd4ba5be66068e7e9f8af29d1debc37d457d99468297ede0c12a3143a5fed2f3ca6a42d3bfd42f549e5133ca881b46571ecdbc20af2e0c479ea0a2bfc7e12761f8037671b5cf8c1ec5d452d6710a6160ace32b28051f246d4780d7f26f0e30bac2dfe174926685b2c8f523dd82e2a520d8a703d5ec28186cfcb29152f6a3507543d67caa3cf2b44781be11c8a6625e642c96c8177ed92e749a375f24882c894f2fa92b1ee15a90c0c4e24c9cf36453f7ad4d76239eea15a9d09c147bcf0e5c8e35443b445462ede3f6176cbf6149c01781a861f9bdde9bb292d794cdc97ff028c6e6ecde3aeb917761f758e2f5ffeeccec6b54ea8ed7c7d83c9ab6b352ff627d4ac271ca84b841c00cde81535e3c519262d0e2080b46eff8f2c806ca29d0d77c7af8e082cb19a8d86fa51cbd70b0bb687fb58ff8d2d2b4be4f842424c3468e6fdaf24794b1078c6228fa28118ac731056cf663d5e0f69c60c6b0891987d733ea5d7cf4da26d59b552f6202da64e34215df24b8f3b522f7771127a7fc1094476bb264f780e12f91592f4502d06dfbc9117e73bfa2b6a4ce14580ed281613b9d3ae4083e3d3b682807326367a94898d9991b65d428da8e30dda697b5d9d4eba36286f762f0cffe7c85ecfc608c8724530ce5f0efa2ea0aa9e04076453e5c48c3f9fe82b19be84efe2248c97019ecce62c6d264b59248cbf754230620573380c7dd6cf0a5f59288e4b3f4fa59a3ec4cbb5a7532fc7414fb685ca4ae2c3f694a9ce6eba023be021403f4b607e58543a63971c80bc433d63884fd4534c0c42ead9b46ee401bc64e96bdba077b353492c0ceccb44b848749452ebeb37e4d41129a957761809f2ac837722ab2cd1e66640ac2a7d9ca14a1be882005d1491aadac0ad4e8f22eadb6f3864b0a6d13d6ab4a7998b57cf061864f46b5f1f7488b3a09ebe58c7fccb658e1e54671039ccba41d62259185e9a7448f7e77b21fed4716dae5de6991d8a6f7a5f25c899dbe4287fcedac2b0826608aae1df7b7082d2e3beb685225ae29ac170275771e244cde347036058eee2c3b570e123b62098de94100ca9269decd9d0e94b7ce2c02aef3f2dd27c09d97083fff75944c80c7" + }, + { + "app_id": 237249942, + "data": "35b0da313cc09598c70cff589ac1c602da0d9666984b608fc85a5489ba00344280ef7d76d9c2eb81bc9b02e7bc4bf28c721990d8abc078f5ac586fffe376fca27088bac1c6517ab07620f6da071330d9bf4e099251552af717e4a62bde4e735365a0f8b2c08ff69c90da11e1a7cd28cfe5cb90c69d206fce9e93191d388bf57e7e1e76665b7b555b7cd83674fc1e240302b4a02a0f4a1b0fc3c0ab45677a29bbf699218d140e397e9ea9ebc81fdfaf458deae0e0b8a9299f8262852195e2ae385a1f4acf14536d9ad6c067cd64f2de0c97d2a198160b57d6f8f8fa5274c80490d3ccfa318fcf0312355e2d6e83a42c9525f970854d1eeff138b2e94efa5388e63b5edc5c3ba343a0708e2a422ee8db6906a2f78025539a1d3fb6e700c0d70c65f19d7214b8a57858655b653c7e62f0a77ab287f9489a6891991d575b4a136658d7233651a454fcdd7366062ac9cfed294a6abe5a6868d54487707e24be00dc5b4ca3ab3680055a62484ac419ecbbeace7848d34ce69a25d4c5706ef8409daa54ef22a9b374ea87a287bb9ccf69426d2617b2a6dfa21e8bd5b46ef55bb039662d1d198084d1f43f1a6f8ead73534ac063e3687458bfa130d2cab1bf8e271ad014ac41e23711d47f6e4fbc5fbd53940bb1d5add280f3b670a24f6b04c2dbe746bb275e1d577c65fb6a547333f31593f1148b4b51afaf60989f2001c30856ebbbfa5996b1dbacd67c9bb010d5fbf77098db3355ac631fbcdc62fe03e673486441d8f4311e727ae6c1e1019c2179eda8781a6018ff9bcc6038c01ef9e6e60b6ac47328a0201518ccff05907a6f8d296f1a4bf30b694847e912d2b08f026963e605afd0eb13010fb811ec564d3bef297d2a5ca3139df5217e0dfe9f9a9d4e3747db7b722155e6c06ff7d679fd17db6b450cb2313917331d3a538cc63d418f2c164d5d738ab9a6205045bb957e685917c0011db3c068ca51b869e057e1e93fe23535ed56b5eac99bdaf92bf938594c18a15768258fef654fc7ebfe52d4677d2a4418260c7410590ec05891c33203a4e9bbce9ad71d2b1f6b9d649ea1142a410b58bebed56f01bedd77665f2c62a74e488a044d68c6beed4e70a6dba62cad1bb4536277bd3627f832bd88230405615ae29620db210664ad3b1f18c6eac9ea94b34c371799ecf7b51a7dcbf2d2bf1702d7e5a5e65a32e2a8355d88521be6110dc97cfacaec30009081db04ae49aeb82c228e93ce24524d3bf1ccdfc67d466a0ffb742bcf53cc8cc8902290f9694a027f70f2fb1d74b21e65ce05ca8695d82c15835be722c6a8ee28da56110f89985d75766d406ee0a40fb5432131779e656ccd16d992decc2f417d633eb5601aed906ce63cb3461e97a03b2763ba818972fa4554fe0519fbb502795b44590919ecae42e201c3cc3edf479f5e3e54031a81060d8687e5b88875a6c796c483ad585a04eab0de1c858133f5840479c44cca5679002f4585fbc7c429e7706a66f405721fd69d39241473f666df30d742115779712d63342ef197dd1a06dd7fbb96c53f46ac4e57a934a6b26ff66f7c73ed8e5c2d090d67648fa9f6f0fbcc428fac1b325f35d4b775206eb56cbcbb07c70635ae3d47fcacfe5864300f6d4e8d14a04d272a1fce" + }, + { + "app_id": 554175156, + "data": "b510a4a0d25905790b2641b6f08015195f6ee894520f62d6c403e3d406168a0e05c6eafaf69e05121f105226c9497525362deebc047c578c7afda2dfdf45c2784a743bc9f95146ec7d864ce83329ba12877389f95ca902392676ab1e71fedf3e6477d07a81d15e13ec0a05ffb5e1bdefdfa089cc01531a2159f78e445792591d6af4dab408f45fa8c79c5ec4aca39387ef7d78a80aa05b887407a204f4dfcbbfb335b75da9bcc29f738acec3d25bfdc133fc6b86696a891c1a0b4f7cf4c389e7e6270b75c0ed7461172d4479a5950f924aa32e5414ec164aa433c5dc853b73d243b794bbcb7849315b46c45fe1f73d93a682310dad5b587dc4634af5a06dbdc1c03d51837cfdd46b111ea396b6d3ecb417d1983ee1d126c7fa483d648264b6ec447b5eba1c2978112da2a6750b661bec69e95594bd0e19e9ff97942ffea3c5d74c224bc791592f20c6b8e05690f5305634aee99dc595f841043f8fa856938a70820b2188f8cb4ac7511148425fb3fdcd0dbb8923fec648aba3594c9d52d27bf58b0846370a69c00bdf1fa26085131dcf1358ec108529b07df3238dc37289609dbc4438ea2f9c70e392eb62176a479312f1aa43a7879b5f59e1c28cd3a840ccc0a2205045f236d598eb6c72e5ee9f5a14a44b52a60de58894ed5a21de056c8085391140be0a753a8915bbe0257893ef43960ddc3690732455c9c680e79645edb00eb0da29020207181a82410e31d566f22b09f064e8eed1c33b4d0a4d9c45e155b56a98fa7f76db4f8381e73d5cfaa5ba128fd045e251fb65fd1825f5cc709ee7755e794c21a961cbc751060ccbb26d036f4bee41b7726d394d660707d97aabc9bbc20f2a9ff274b3d230e8baa1c5fc897563c1c50b02decfcfe18046885e8cefd17a634fe6bb11f49a27e48ce0ecfca3b888810c01360eca6f2f10b6fcf9a3c7caf59ceb228b2e51e43d8e3abb7e50930ba5354074f3a060691b731eac5ab73f150e3c6d9458176871d15d9e884bd4daa16f9c873549e73e43cba6578b18974c7fadba7b09d17f703f05b11d94777c12269a57ac95aecb612496fd0490a4b7e5c368f3db9ba53e04066bc7b48288ab3586f1b0d3787fcd548e40ed72f5ea554fba9b20fb76c066c2bd4bacb22fd836ccfb497e49563801116f1aa2edc2d630020fa48a66c90b6e032fac03cf0d6c243e5b81225612109633f9c7678fad9f750913560fb1098edbde9cc325254a7e2f73e6b565a8a69f8542e7d69ddfa0f9bbbee515dd85f88662196810f39d8fb9d7ac1a452e7be0a3f80b90f7982db0b1f4e46d69b4a03fad52b16e8000b888a7834d760abd2706d641a05614fda3b615631f5de77a000f6a2f979053308ddc51e6c1a12162b7e7e92915d460e2357e527bcda314ccbc278bfaf01a1b6aa66b91b6e5bab7c84e9d6a1a83030fad2bfa37ae628576e6d9fb7a1ce87c765d18920654a6cabb2d0528204eebcc99fa60075320d1a48f9fa4dd082cc18688d76e16767e3b8c430a844d46b0ff3b039d9904fc94951424b55486f0cd0205b1047c80c229113fba5556b4ac78e2e1467c999c1829c1839cfebf3fbae4c962a8866cbe775c471930634462166bef749d1cbe8cfdce1fe1313870c4eb5c5d076d38b7aea561273104a4ff6703428b3b4669e6ddd5613eb4758577627cc43784ebe954f5d85a203e4da5596ac5238bcc60a721c1fdd55afc8e529b3da0d1e5b8130cb19fea58b6b9f68dc45716cb331cb1f26bcf6b8b7a9774dca19448a0a73eb85c4b0f617e267907e9750d4eeb2728d4a8f76dab2deddae0393825c005d5522b86cdc007562b542123bfac3551fae5daf55b78fcd9e5c617d72544254609bf34ea633beeb8bd6122123f1565bc65c1758abd62a947767b156e955cbc99eefb9fe1a4aa9a0513e9b88c7798b38f96810a0a9cd074eceb842c3440f2e001704979044cfec9154035ca1f82ac28cc0cc64d77fbd6732890ba73d86f73adb79f063e8f6c033f51c38e5d68bd8ae7b6f9d5212193249b24f6e7d74568826d59e5f87b3237a14326cbf5f24108a89fd6ba2922069c21b9e3fd6a60215bd725945bcaee6c51a22f9f30bfce0fa92a467f68b47cb253a6482dfa329ef8a6d3f6" + }, + { + "app_id": 652717895, + "data": "98362e720ac67151052183f17ba4eb81c7c0ba767aa1ac37ac2b399899ca50bc1566cb8a08a08ae1b26c7ec778013f8b43996a86f780ac9c782f3d1a434d28617b996d3c1a58f6af6c9a35eb66cd1bef82f4bca8d89b9a2c99635fdf230510a77aab0a9497d722e73822a226811ef9fc25827ec09beeb70b5ec320602359b9d534e124b595f6639bc156eb10bc219cdb92f82bb0efeba14047ee86c7e3f01044ef931d6a64af48f6bd461f9a20aa9bc5b2b19b5590cc54dd79d1b0482dfe787eee14a00bd448779403fe0ba42bc2f73b30effbf6bcddf1f715cefd5416e14ad5cbdbc7654f5d0ef37e1289da6d4c57b377dfc29e93afb45990b519d08d599915a927f05b0403d53c6ee925e01c8a9902cd2535916aba6bdb26a1ac1cb9d54e8bec3281cdc48b529e161bf3edc98c3f85ba9457db44e4060fea108c58158e0f2940e2ce9e996739438de52e59f1ff5fd3adc63cab4a475b532998e209d4720337b24b6a45e8152ff626e425ab3e7159d6604a1d342259838bbd50625c71bfd0d4f167ba2881d8ef2d4c996716b8fe6775f76b6d09130a3e3a175af3b35163a4279811c015a5c50b6331064d1afa5395cf4265cb3d8f7a921d37893fcaa06ebd4462a09acc6e2cdb747f4891b9c780e33ce2961d7b968a03bb64b2d832139515af074f45703e6159e90f27bf473eedb492a8691ec0ba25ddf529536eabb6ec5ca977d328c966cccf655d1341b2854df6bc6811f7b7763d254afb68bd2bbe06cd1988e6def8827fa0391a27c29bbd1e8443aab008c79f02ca155ba5a1ce8988bb5ad64e9679a5ca7010a7cfd3d960618726963a8d7df458a21a001f59f0f9ddc0ede830a3f5273d903717bb2789da219541c0c0c7d8091831376637a551ac28797ab11219f52a809478da5575109955bbf07f43132f150e3c17bdc2fe550390123c9631a81b449d393576e04a74718ab1f96edec2765ed640dfe4836375438bbd10d5903116244dd31a1117c6649df40c13bee45bbb6fa135e2e6fe733aae257b4ab9db874051fd9ac29dca7f8438c0ef2907d7d1353dbd0ef61acbc8f2794e083346e09fbd968a8a56a73756cf6b346872b781c6b1bb9b6801cff514a0e308895f73ce8a0468d7a64ee9e1e162002941a65bc8e46c07786cf66e51d9ab8777082d1e5d84117cb879f554d1fcf810a8ccffdb512e9ee177d6b871af0be3f9ec972c85ec4605ef06c6b2a760c679bcb5d269cb5bb63acee30bd732a153c817f1b2fc80188e762251215472480a7af52bc74004ab0cc5abf8170e32cd74dd37923ef9e3550a0ecb7c22b18eff4cd2e1876e688f2cbc497cc98fe02ab104c63eefc592e6e413917f10defb4d96f7cbe55ffdd91a2fea8c5eda3e834877bde98b6d81e521814401a21c6c57395de424abbad71a4fc21203cbf434dcf12d3f022f243b601b4aec2744d3ec9c758d02242075f9835c081221ac67ed8f201d19d87fe89e52d7df27323a9638048e343e03b08c5b9f3acfd29bd8aba4787ccdefb7f6e42ba2445d35a80861dcaccba6e24161122d2aa6610372b4b8080250e679eb8a1f0668a6f3e66ea21b80de687e3fcf6f0663c3815a1bac5bd044e90b0171962140e090961d2d8284f4ce7a52279ec098dd88fc2ef0df718ff5090ea685ffaa401f2134775b469e5a315bec4b11db09c40bc7c2592f0c057d8345d4b6bf9bde371750da6a04331f74c7f7ba57ec7c8df047570e65ce1923c3dbb3ab8034b89be882c75b20dc6102ef5bfa9c77ce0dbea3ffdc3ed32f294d31d1abc53aac14060123bcdde3f9e818d8617583afc518619e8b4291ae15cdcd85bf33781363f82e122eff90f80854f55d9f9bfc26b2e09982027c452bdf21ee8d4ff43681a2643efed43f79128f4607183fa31ff4926a7c2002e00a8bc3cc3c3e2fefbbb3bf0d9a263f3814bbdb2a7ff1162ebe656318358f93e732f440004e339751754dd470119500ea5c82a0245f1d2e21eb1a046134b017d7931d84d73b139e6e46f7d6380a9561f2c67a2e123c9a4597b617876fe1029ac12765ac8cb2efa1170c929af5eec39cb86849651ccb871d4097bd493f3b605f39daf54594b051b686492ddfc252cc3f79ac24246263" + }, + { + "app_id": 1104624959, + "data": "1113b808ef2675ef89058cf67411a9e8a9352eaf7ad5158e5e407efe02644403e78743feb61c1ac56d1ba824dd6c34c464abd7c6de0e5140e8bf9781c16a5314af77cfa2765d411cfa077ca3a3725d8efd2fba36521b18d926a553e3f51c76264d8ac65ee2d33ac48c29051abfc280" + }, + { + "app_id": 1460565589, + "data": "ccb41188cb33352b57c3edb78687167ee38c7111833c4b7faae71e905acd5437574268b1c425c194291249ff3f39cbd667fc65d74354a5ecb8792fb6a75e27cbed3862556b22332afb33ed7531a0cb9dd619f1b81cd6509f8e27ead2440f3294d601fc9f5969043d00b758bc4d98de01b4531a613bdfcf95683a2b3f1a67596ca14627c2cddf67bda02edfe92c0df03ebf2ecb42ac497f1c229f71eb7e22d9024d8c3e45abd83921151b1bc8ea4d8d6ecfc79de2539eb3db3eac67ced4294235a8c23b1ffa4f10a7baac071f0ffb20628ca85381dab76a99befa2e86b70b37e0215e942234403c17ad68ea32ffa41badb2692aafd1342f7d9b2fb7ef7019527eb073b92ded03e84b44248e0ba03cb86b4efef34223be43afb484522757ddc3eb1051ff245b1f39b543eb05ddbf7e91ba16b806294e0e242cb15cecb0f28f4df8f4fe535ba08dfaf03776bd53ee7f68970b52704c2434a5bffe7559f5519f10b31d5ca5eefbff534f6568cf8276d4d3e975d51a9189fff8f13eaecf5f65b3697520a1ff80d86a58f1e8a4ba73262534ab7f1b9d9b5b2238033bf1d5819d40c3f684b50f270a72f4ec5acbe94162b5eea346c6dbf6c1a679e502f3a4c8436a9febe31200f883d3c0d289d23f702bbf555fdb5cad3178497472417cba7fb70d94f5ad32b87b937c89f73731d6aebafcb5d54a61afbbf601d322dcc81b670d33cf9bb153c3ced53cd2ce11ad61aa267f703939562577517670808e1698e1f965b857b49a6de72e32d7dff3a8601f733b8e955575e5da6920ca1f3505c9d09bcc67429b4252c4ce13d798bf79083cfd7159d7a39e937f879e5016b67a218fb177232c910b6a3149f5259e7b58fd5ca85501e1f2e9d9079ce2791adfb2647d620c929782f45e49a3092c14dcf1fb9b0ea4fff5df65ab2a78f211f21dfb89fcd5ec1f181d821bbe12b347eaea677bf7058bdf8ca348100e18c09de851c27e6d589c8ab8976fc45aede667f6188fd19158f64b650a8a0894e9ce6f3a035cd333fa7d22b6eac56d380bff7e1614e4eb72ff27f5d579f56a85dffb1f98790e1fd769e741f99df54cd988823ac7e22f8ae9a9d1bb5dfcddfe557371be4b47b7ae6fdbc8a6dd4eb3b3be8f1bdbc90504bf9274ff72221102faf2a9e0fe3dd69f65d52b3ec7aad864e0971d563e06154c43" + }, + { + "app_id": 1611396816, + "data": "6c98ff14bf46d39786bc6f4dee9c80e346c67be1e8c295879faafeb9331bc6c137931fc937948941bd6fee58de2dd2cd4cfa2333bf1586fd477d9d02b3d7bde788d0a5a6d316cffc3dd5fe65612778bd7cbcc1dd9211130ad2a66005086e59b4f39717f7e43846da1f4a2d192f6d4d4ad431e3adcdf287222bdc693e913e02d62400dfaf4c6b3d5a635a77ff40b71ab06cdce7da303cfce648dd711e7362b2479c7af042be174ba1c60361daa3f1e01be588238190a3429a52655ef2840aa9159d98e27c155a4d41b2af8419b6c65da2f265ff9e653552703f59efb3ddb8b337b2ae62d3b361d3e94139b89e7d775dee9076aab9ae254868a2a06af8ff8291058e92e0bd68fd3f1f49aae82e0be37bbf0fa52e6101b606d41afe316b1ba77f62ba470292d09d514cdb115465d38249cc4102f9d50a9be8b26170d0d057c23d595a5d2e7d7890b6128b8b32da1697dca65169ca19c3d7a7976a9c0102460e69bf565a855e5f83d7e065b1ef48ab3dd61a881c5a574e4ee80eb9189645763e691e772517f36bfa9c42b0caec8b369102ffad3a45a541bbce1f19dfe3f570c690d3a4e91468b4077c47f0d15d412646218fe1b3a0a90c445e8b74c032844a032034c69c21a4a8cbc9fb6695ee8cbfa1afb33494d42fb39e3f62df00eed81c6b8806a7401c3294cdb732394bc5b4a3ac7c5a289abddd5050fa5b000f8f1dded1fbb477292188ff391944e76863992447061efba7f18bce2a194040fb147097f9820d9330e20a3e38f40a63ee36c5ae3b9cf2474530836fbb1317aadcc8e21d981989de0caa567213737db6255c6bc5a4551da1deee308e164ce138ba9dc57ac9be96ffc034533f07851c7ffc94ea54f530f1f1ebda9a5e126905d16ee69c1866df2a5d141795d3da86ddaf5d6b967a9fc2c78811cc74f30140864bd2a08c532ad22abc641da977b68d3c2d27c573756b443aab5df80c105d8ebaf20b22fe028862d5d8f92453e5c0ae810ab9e9177070d6b6e5f2e7df3a695206db0f8dc66f96cc1029489ccecebad45bbea56b45bc47052c46e60bcb112009456bfb09b11d21e117bd8cfe6c79648014b2c228226d99136dc5921ca5f29469e250fde6cb2bc3691a70ffc594aee440dbda4053b9fe9c99924c584ee639634562a213784ff70d3a9695a0fc0c8c1309f2a2bd81d869006e40425f94d68dac090453a7f6e93d5c5efe30048261e7fe7151455c44f47a94f8e6e9c89af288b84d397b2bdddd2ec304f2ba5c5964d48301e631f0153cc6562f25fd8a7d902d3a2038b84004e6647170546c63aa7db500097f79912bae110f520e5f8b3285bd4add7a13313ef914dd0221fd838ba9d0a04da705610462c79448f565276bc6e078" + }, + { + "app_id": 2209933717, + "data": "579e1f4b7be2155fa1930b4e5ecdffcc8a9aa8f7e694ca4e55a841c6a3486b282364f52be970411710600293a55431d37f869b62542a50636874839ed86218fdf5696afc9fed40f220bc0c6063dc7ad58cc6c95bf8c627f9f15a2e8b0c11dec18ab6d80454d8e03480124250288bdaf5167fa5085198ac72d5b20a092a283bbb50aa44e28c7c627dcd22a88ca99c28b65cfd243280c3d9e43b67424a814cea9ccd3ebcc0ac08afcbdd0abe561f390cbaa92f0212db1f040ecaca850aa09053ba62da6927eba5cf7d7a85b22ce8bc29152680980a99900d255c44441a3f080d6a7a0da4912518279f55e97f4d8e4b1d5c07ba769f5f3c4d2dab8e34f180f07e7709e90059b855d876ecd86cd1551517f5eb45504a22de5261d6f83cf945ec2de39e00be5157238f2a3eeb4a333fcda94d9f4e6789122944254925b36134b69128dc5ec173d98ee9817f2c6a0713285af92c1ef6534a8c95515edd452b765bc61df32206b4b84babd73ca6e499cfeeb1af37f07761e34fccab94799d3c67290c78958b77a933f1d76cad17a4e71456f25819c6fe94759331bff7b29ad8919f935a9cb55dbce91bf7d0afccdfa55ddc2abcf89406a1bc0a0407ce4ead2c2c22b784de7240b0ce12d861a744d7a4e4c320d18071242e8161409f40f6294676f459004b1f0b726a0b0bb379d7a8234cfdc86ab0440a7405b04ef81e38886a59a4eaa62796bb023c6b59397ea41d4118d1985302fd1593765c871ee87c72927d7c5f0a8f992336dda1bf0440c13b2eaa13f56d63978578ebd746d40d3c3760c0effe0699ce6b8d9bc1e448008babea6fbe92cf414eb21a338fdda1fdde9dd91a5cff689ccfbb37dedee63427f507b7f5cb202826f6ff053f80282d4844b2dc7f9924f8dd29e0d8f38013ddf2541b211d2d50f0ec7769ea3745c133346473c3b7e279c9bed03ccb82cb752518a67acfb1ef6aef47c9507cdf84af79fd989fccff8bbe2324fedaee56c247ca9217fe113915888cb4c9def8616f622a5cd9593e110a891c5d4797609b62eabcbfe00295f90f50b043693fcf23ce7d03da3d70eee4c9d13e80311a49f529fb1cd8186f89c37a045e3e7191bb157530ede14a3a033eabc42b7d168adc9743481471fd2148924a0114439bbaab6371c9ff78a3dca0a959d4cc492c2f25f97acb33956ec269e75ac8df83cd3c0bca5557fec8b398bb7e9b0f0975bd6f429a20c470963017ca7a4af335cc3cbe407bd31db208e9aaa21ec485f7439b711ad2a7fb843b799ef88bfa9f26f30c8a42ff43ec1557162fab66b29cba56a47cdce44f7a4915cba463d0eb86e87d24e625f457d2038fa34c3e6eee9fb5bbcb9ce8fb12c82b57d0196ca34b36329a99868c572ca10b39098852b8d56df42e72c9c026ce285248dd093fdd2cc21fc185d264309c988defc45a4fa0f88f99c299f1f18739b72a35385f07baa94fe1097f3276a1456123a13b41b43f315ea7c801a90fcbd75bd0bb25163fc22bc9c5500315c1726c45a6013ea6d0f746b7a8e67a3b0bbf2b473edfb6f98ea6f91bba3f034fe96b4aff5cd310a782530305dbf6da4e7704c63aae877c3ea8c8a1faf73a277f5d1998e78ad6f182c85936f3b1f3758165432d13e852423a878dd6778541c3713936b055319a715ad3187c531db3180bde740066cfd8b922ef500c82c41676d403225dbdf2bccd32955cbc55bab749f031d4efc3834c9d7599e3bf9461a880f2e0f040de7f171057d00400a20f08b9d470a028ea3fe971997262a4fd62b13264d02b491f975c8053831307a414644dda482372c91b008409c13279aea4eba3e20118d60a9147cb9369d4121c81b85309dcbf82f22fbd9c79001b91095b6924bc93b423de6d50acbf78ef0029437428027437b16b0dcc9f1ec1db630f95deed06174f0abc440f989e66c5d04e9f58f4ff6e7702ecda5b8c5a3eebb239ddac85d4d7beba210cfecbd1f3e3b9eac140302914d7cf722d8bd7c1da0ca9f70dd98e8ae13dd7f7063dea38472072dfdc57048c2b54fb5b0d0774d971d45e443d2e3259a0f30a7145edce7a9fac3148d64cd4ec0202beace20d2d381c02b9cb07fcfc4efaf71ddbb21aae01e13e5f9bc12d3371a296d9ac775afeca05dc4dc63a4f24ee9a58a95b2c05bd53990ee3af10525e7eba72765fe94871f638214b1f7012c058819548445e8847be24b31b25ea4be813448b0276c4d9ad066296f00d0f64a79ca3ebafc154f2ecbe81971bba7be9871e37a45a59ef37454d723684a3b0c8149167fc52c266673e91da9b95bf86259d3daab5b0c4c7b8e991e496faaa1c9a2ce2d1d668ec214cd37df05173e6d8c94f940187d50708cce086a66043ba2ec4aa38dc9cda26add5f923807c9c487abf3c994e062c99089b6ed16d6d01d7391727e435d4e40d1e23d0dd5905939abe36b71e2389c644dca8088a18f3303a851dcda938ffc09e5d6e0d6b9283a798e40f51a4587fad87e7514d5e2f510ff304f06125b1d45c170e7a66de9329927150c835d3f410ac834f61c2c0bd20b3d1c0f6e50303e87d49cbc9c2c6518bc2072eac5e6f526a7408bc0f7e4ca34da66add82150e9a2f5a5d7977d920f038a50dffeaeca928f87ed03ef13df866ba67e472c0724ca470d7bf02b8c48eac27305b9abe74ce8bc174f3299ac0941400a65936b8eaaa6a3f4339bbb114b4b56bb28231bb374daa19923e28e3ce552d38cf218ceccbf8ac795c0d8230ed5d47463a9f69e3b0e2e2e03f8489c365ffdba83a33426147d0d0abcfcd2037b96ddf283395f8679285063c39529d7086c1a64f45b2aa21b2f879ac34b60e7a6a54768226d8441131a60abe5e5ebe8e394af1ecea60b97640bccf275801185c83e5c0" + }, + { + "app_id": 2355063471, + "data": "f783115a39bb709592eb2197f6ba940ad77af14dbd366287ad353005d88ee023f5c93ee9fe3e816fc00ced35633ec3f9b4cb058e671547886b41c63f0f5a9605cf6f66ec689f6dabff294b8fc66e503710ef0160a14ebfa2f7f1eb9ffee24bd685ccb3e175a384eea22e2567a12054c4d599cf53f734a71732c0dddbdf00489a9bf08417303f9318d5a67bdf1789ebe8b812f257eb17ef251bbdcd6b158975f5f5c54c72aff3b65eabeae98cc7f93acb0daf19ef032044ac7563b0e0a7d14cb6abb2bac1255f4b14c8e8fe9fb9bf2b514c55e867076ff2be169f98ba59ad6e2267468befa2c69d448e909ce229feff6e7b25fa817df441e43cfbeb7dd75f84a9c2684d383972b5e491a870c6a1a85a0000a075bef3a974f03a4767f20ccee96aa6e15ab34d7207984a20cadafe3d1d3b7b4bb2482690fc09cf1c09dbcd7b44fc7001f103e71544988a4a52d4914b0faef9c08c1deedc147f2c3cc122732f32189f440b04782ff3035b9fc042ef23db788b52e868117dafbb1c6dcc358f28ba0df29d5c6d1c48e77eabeb742c73fb7ed12a057601e1b767745b6627bf2d31afc32172d27c" + }, + { + "app_id": 2492217952, + "data": "70d67e1aaeefb4e48c299a0240e8c5b98198931342344b4f5a1f4f308ece1e10dfd7ed93aecfe64390fd847e0208cd2d266b5fde7e12ff6c312150de9d1a30abafe8654b07226e7848280a14f5aea982c40f2dba6f17bcfb471b9bf4ddea0eba72cb4dd2f4dfb9fa935f576bb93b3e4fd1eb1b4afd33ed1715e567becaf3ffe3516c3a099582b9738353a7d9a099bb88e46af232ee19504d4d25db3252d20935921ae7f60c903664b8fc88d84896436bbe48dd" + }, + { + "app_id": 2802187347, + "data": "5d59a3919c30cb537c26fdbbe815bfa829df645fe3bb2102165379f3f50b1f47c6c47731311729439c2ddb17568ea346ccc9f32d1a67f9b325de5a9a2a6c1179ea22dc6fcaad144819daf387fab46276c2bbdcc7d0862f1c0d6f57a8cc7a4cfeb77ae4a81ee76833342453e0761a675fa24d984fe5657aed7eca76055da5c8eb0804f002a744d0188e6752a3c4f7076766a0f005a6480b03c9aca79404fdb83fc9b9af8312a346c7ff940242a80c24c231a3aa556f3ce30f4a5e53421037af7fcb3b14899adbbe1565ffd2a95618918c41e4ba7f469e298d6ec929f211c80f401c368e82906cdf4f514ed2ebfede7dfd24aa84d613ac0fdc459d56f2bcbe0096a431d829d76abc21eafa721bb2619bb793c67b951a283eebdfd0e5dcb4570809ac65137b0a428335ee3465df582274bdab2d7d502f70827590c2b7ebc5cb802b05f6ad3fda8443497920d5f39d23c26b374d483134c3efdb8254cb4aa1d31ac8e7d581cbe364defe8bb8c9a38b82972c07460ca5d93d247de2b57335f428002f93882775fba1010fae8b7ab704f8c3281ac2c7a671c8de1f83eaf1b4d23f7c49030facb43e1c6f837df672eb0bb01f7f1be60198c31785b3f4f743e721be9458cc29b9dde90680c0f332c3dc49157aa0fec1317a546ed784d6b827d60662e8a083ec2a36614abeb8f9a0eaf9620f7cd4d3dd2d6282d8d4bcb3ccd05b215fa7c667f14cab10b0ce1422b79ec55d32084a5458334a802efc9eedc73682c85ce8249cd922a5735f603584f74d0be6d108b560c96861f5f138d4de147942a8f8891052b65d9d5085c6a734739243e058a5fa35f4ffb98aca8eb5768e5e382ae3c2be85b8d8f55f1c9ab3fcfe9f5ee485430afbbaf48cb70c5bc8dd85f7066c1b72cc61ea75254ec043ed52ea74ff161855b89f1d968ac9b4bf0fb451fb5ad5766f434782cae42bc715d420a60c3ee39a75c0ae25280c94392f08604fbb97bf1071099c7b98d0e3d2092b5220031b11fe9ec56b30303f120d7b29430c052ac6d1090f90e614eecd53b52914b94b4ca0ae97e5cc59d75c9d2b274b2ef29cbaa5f9f8351d384b993a2915f55036252af2b62b7ef042c82e241eda801a3c509eb3742bbd1154f56ce6e48b2958f7bfe78006c7a47a3aff74424165a73bbc9c9b0b9b44a4ccf4e08ef03248694d798d931878351166606d8cd5df6c3e7582153cab7b15e644ce6bb69be08dce94cfc327bbebe92c1433a3f922dac4a10763164939540cf03a382f25e9772d32a152d62bbe3765e952a85e14ea6a0dc5f51ae2ba02f979a566186c5251a9a74f68c29e1d2bc3785d91315bd2fb321635a3c47cc0d16ef4d32da544ff8c8cd927c276c49a2b15eeef494e494aa0a8358c6d4b67f4acf1f044d270839f52fcc72977f73009008ebd92fb9b68f937a9d75c52053f25f6bb06a09e2cf4b6189f76567b4d14802430375f97ebd1e232a5c6ca4870200b00afafb45ab54789ab988bcebf2640e6b44c586735705e2e38a6cf7266fcee028941d540511751d050afcb7b6bc88a3a593d79bc30bd54bfc3367a21e86a17f45801b932d5862f0aedecfbd4aef578bffd9dc6932df7d06f0d744843c7b899704ca33eb2368e6dc4c67ea56780ac9b7362b94bb1c7824ef38cd00ac05dcf88f710048caccff13edd9637da487a35a4ed840fc0cd841a798116e0864a345666e43cd85c0323f5feb34851bbf66ab70f517179c9b568acbd4a79bbe1b9ee77533f5c8077141998be6e280c44ee5ac7ebefd460fdb44a9cee921c58d2d934e85c4bcafc197bb6f816b451a5a22334f0d299c7b3a4d5eff63055d51f7521dc0f70f7b0aabb60012f091e522475221ace85c9e636cc5d69197c25eb7a0848e3401bb1b71b68052e3e918532b5902df8cbe8ec40aba0b39d3a81620def982ed63b1a3d47fa09b4fedaeb3260ae13e200782d35ee1af652b396d4f304342af6268f72464380431cac1a43061e050b08f7255af030a4f3c9c67dec8dd15bb854bd2b4fcefbcb49363ee09ff57a857f889485e5a4330e11e89c221b45649c45e80a53a036b3114432ba09ce43b5ad531a461337960668e6bc87f3cbd323b13c669e1568232b6f8b940632a05e5731fa2109889afafa58e6c02ee70ca123cfca2e05f1a2147fdf34bb4380c188b3e28033e5485d48d1c0c248f26b003582f2c2ee5c3b3c4cbd6e0d15dd00a4c5ec3fce30916f158329953014887f853242a507fda46cb7ef16062cdd2b84cd6b6d7d1b40f279c713851d7f5428ff7b6d9ca638c0ec0a60a6c00a2fcdb7c882ccd27c68e34e6fe425b34584cb43b699a603cf8e2b019a12708f4133f55c8511ade05325f457" + }, + { + "app_id": 2996444749, + "data": "19477d140b32d4cacdefe2cad3cad1942b0539de6b3a2fd321e66f005c38a5a9542d4d3f2f08a4e88f6acd7e4518156b3b138abbb19f2f3be961ceb725a8b8de7247d6151de5ca5f5a5e65339ceff3da2f5b7127f2ab348cfc908571315abedcb8a08357c1c459210ebb317d97853fd5b6d1741438799a5a650bb98028ba1e4b9d2902d97a9368939bdb85153624c3eaa13278dadf6adc88dd3608e4ef19063ff20f40342cb9bb9f9c0dc691c34cdbeafe767478ebafe4bf2ef043fd306b6ebcd2837b5948029c534d4bb76609da457bd2930b76a3e4236bf3bca314321f21be45da586179b853d80c49a5b95bf67d863052a6ad4dd4b081860e05da4ff2e1e81e8c7861509c0481fab6587781889554cbd9981286ec437b36065f56e5a0eac5f5ce683931e421121769787b7d49154bab6fb22a46aad8ebfc3a84cb791ef4b4a0dfbd4cb34554f204c21e5b975bd345e0c670906d3bfcfd4c1a3d1b0a73f0629e7437d75f511e43c9eab4f308bc06a59786c69e4b412471dc63413144035b37610f5f5fa1cc6503bdb6140b1e75dfe3d3b18b36ecdd78f6dfeeacc12c585bd191d1b32ba1c8330d335ff61ba5e2898f3cf466068e8d7a1b4b9ec42652794e5a810f80e234091443481c864f13fb3d5d27f3ec90ea8e4e3ba736e714b5d12ae2fbe975efe0b59a540c76af7a1a7929fb5f86bdf4ce5c84788cd919abad1ed0f091c0b33ca9d86e5d1773545d41f99b0ab2bfd4a6aacdd28edd3bc798360b6e72eebf55bc1e6a62f017b39965f4df4b7c5f39a55d8f95b058946652372b559b49ee367faa7008863990b83aebf1ce789c955f9aaa3b6d189623f710ce84ef7eaa344ce678a39b05c0ea37229932e459af010fe6d20ca9b8cfbf9914d560782c0162eca5ac82a06ce5b6755711b20f40d3c63ad065d06b821743c0cd380720a3892d6d38cf17889ff06d544df4d92935f5f68109074af8cf270e47bdf164f38626f047db1535f303827e56c82e4f028a52a99e6088d197e453fa0197a49bbc54f728a8ddd3ee9cddfa7bc97a1c1bc733275497e4df1e410e6cf007c6fe76c6ec23962602c123a4537964ca60e282dde80559d55c9779055339edf9291f415ad373c66cf1911ee20f9d1633476a54e78783ee7aed4ae2f0256a841c0a3b1fdcf289179420555a1275c625fc1c08b22fd3890e33d831fa5e1deace2d7137cab6fa095935637e6b04f8672061b0c7eb73f4e6b5c5bbef3871b8fc61a42cbf6f83191bf129197190a46fcf7a7bb24c37bc8a4208e592e2a8609c209b6e7e6dcf759d8f690666284d9fd06e176f2f94c2d98effd1d6b18c9721ea8a85a68651904c3e51a72699d0a922a94805b76f702f5925cba30984e16d3e8cf608dde11ef34b91c07636c056c6748a066809aa43ab5c5127b2dab00e32a1b7b0f526bb6083402ff42e366a39670d5a468ca2b2cf7b102744779b05661c8e973665d40426e3eaacbebc3f18c6161e3007fac1b1281d948bbcde2b316582906398eb1e2235e1d9d385e300b04df8d34801d95f8db315b1f6ed5e62429ac40927bd33f6fff6a5d1c729508d95365a3e1f5fb3ac1ef49f1fb75257457e4bcf89a1cfdeb2cec1f9d61ec578973756b7b3117bdbb767a0cd7df487b98122122f9917d45e9b8f571f109632db09f1b688b552bfa472b272286dd5dadc1427757f3f24e9c3fdaebf709341a525b0aa989458557365b5957f1264b79056a883153e551291cc8dcc4aa0559e06d5b22b3e56e1aa7f2c0d6013c57611cb095a157c2b10b43e0a7b1c61d05fe2f3f0c63829fc2477e3d94b0b2b60c5e45bbdc1f8a39e7e77a4ffd7f5cdea80fdb13663b9b06da92412fd052a08f8cc59d6b3eee2a680550d33de7a59108f78c18e584b7b6db4d9e4b184038794294a9fa6738a293d71b760812e10b959088d5aa157cc18f10ed3e59d2ce577e244ee89bf11e15f7d430da8248929a62902a192ae1b2badf28494bc77c66e9db50bf598fa9165e752729d78e7de69ea57b6be609ccea8d83d2b827bb87640e6e888865858d29a8a2e132cf4c8c43472e18d6d9f22ed1e42ea48488db3aa54d08a9cc64d14377f06c4d2267c2e709b597cfbcfb284c6d27b2d701517102d3d612d9dcc8391c5b99af67ce85e822381037245e15ea533b6bfcbbedd4755b549b4e6f52931029475b2cd5bd5b9907e7a9c5f0a9178e7e1bee3895a860f92960894eab8bce437794356579ed0b1c21fb2ada821b28fb1e32d4141db90c6289118d76c442d85da08f78b459d323a3935174661098bcd6c567cb0d6683677303a4412648db184fa3d440461efc67e3deab668758d1c8976984294199b28134ce02de6b68b46d2297a5df5ed40705806f43836064e29912a6a43e4871c2a643848fcbc7553de6db91edc743de6c821b57049c937c6e652fb0a55ae94d5cfbed5d4b4a9b59cd3c0282becc86efb1f99328241d4c59e63dbafdf01f4b4c11e1e37fcd0b352f26c1d07a81683dc5d593fcdc96ae0e22ab66749a0c7e9dcd0c81ceaf40241093591a65f9c57dec137dd48707f4db223ac13b483c1e1cb941ee7d5a7093eac21906310d4d8c86ec7db30aa11cef179257ef050c239f3ec7110c404c29389b0ae3f2c07eecd4a690fc2f606a96278f0d2052591cf72ca2ade6ce644170140c27bb498d7972c7df34ce62d6a4f30e2ff9c35254743e9eca1894492e6c28fdbecf52282e2881dc7b85d5b34762f826856a2de5cad687dab30455ae721d687ff21603a848335d901835993c9365f7ffd0af803bb672993e69b7cfda5c76e32a6d2f09d13487193c5" + }, + { + "app_id": 3315510934, + "data": "79249977e3d11b007f11355f602905ae2915f9ef77b5dbbf7f29f6c6ef38cdda86c24b9181e77fc617db2a7b14811c3270b174e44c8f0789d710297636e9bd2cddc780d94e049bef7d9003cdd26115137e351c6adbd29897cb6bc449d31c502af84351c76487c2ee4652dba1c688711eadd45aacca46ec8feb59200af90150ef25c01f2ac1aa43120f79c99f6cd698b7619e1c7a5c3e7726d90d42737c24364db4d33a87faf6d88e05831906d89e6f36bf066a8ce765782c9aeec2795be308a3e5656d1f41d62cc035ecf653c5c930ff9e6ca21deedbd02010bad3df9cfa9f41ebd8400cbaf7fec514f46c93be1912594c7decd09d78528345b676778d1b9017813c55f467e41fd8f1a5870b33696246760b489f1460d50e6d4e7b902daa3bd527d11eff647454ae5ea2171383962c7443588a86aa88864ae703ba5b7b9a6c440b98222a27f46a15c61db166b2fa000efb7423beacb97eec80bfd7294f06acae1d409f79b9536cc9e6b7d4f5ae5fd537fe19bf0a667aca789186fa1f449c7a638c396cba1c19881e16c5c2e1613a6382e04df54ebcb9282ffd6bc6df71cfecb3629811015c7d63525e2d1a0a2b7b32352055a03c1d8dabe27a4142b7648a5813645612c8d47dc8760cdffc1effe33849c104bad0c089f9b589e297bb3eb41d2e53d7372b8154802b2f158e34d6c6468a6286bc9b30d9dfeca62698f89331c46c2c1d9dc68360aedd48a8bb4ca636598366f03f99ff138bfcf47289d97ea9959b4b0f925d5b04e8c15a8e9c6b813157ac2174bdaf09da67a853d9fbff62cef069c6d1c515740c" + }, + { + "app_id": 3408444956, + "data": "bb89f92fb327746b2a2b9e86266d583b6e587755ebfcdb1cbfea1cf59bcfe7e222b8fd73e3156aa054b0ffea1495f47083597ef699955b23dd18ef533757a7a455fc386039ddcf396facd016a166f022b69882d0a428b03c7db597bf432e086e13212787bf208ff60a5b18fc62ed0bf6f4b9fa793e7dff4006a5d7afbc66c84ec9ac19eca8dfa5a3aed00aa011404a47d7567795f6d4e6181d1eb2e7687afcd70ddf3bcf91f707532737f16caf045fbfed9a25de66543be0d9258a176b4ca0d5d5a0c2f9df6ad4e51698569c1fb0920ec8d4a98b1ea63baac061dc6baad62691ad8940f75d2b65ab38d0fa839d6f491f97f20cebc618f177a1364e2893f3d815e8a3e0fe69141271a7d1740864de4a5c5275a6e04342212b0478ec4e9f510b6bce7febf7ebfdfa81de4fb88b6d749c22e61c531dcc7974a4cce574fa1c8ad181b0f6b5798b30c6cc118ed94c0056b3fa62e3b6ae774bfb8a3dc43b306e0f4f73ed32c04ee51b0350fd08016618acd12fd231a1a4324696796128948ff64618a6fc68788cbf0896741af2521a8e00e7d0b7aa835ce97934921bfd28ed18eb8391109d95692d2335f8add32fb505c769d0961dea5e3f40d1651ea41b66ba78f987294ef310a41a3a96f75f8d9e6c898f5655c79fef35ffc2b3905181f11f3ef438aab778dcce85bde52fb5f9b862338d9b1f473f734a16ed67f34e65df7bbec84f8b20e268963682102be15ace8ec18c9e0a879d1005e0fd7308b0803464b9f1fd82c4b3540bc503d3d2eb6a461f7dec768800db56ee22c496cab71444b3a75ee676af1d687c9f2fabdae0aac2f6d6e65facc4730281dda985eaaf56f02244f1b04a634b2523da86610017b60a60b3f588c1052edd46f8bd03464d8b488cfb2d39c0cc140adbaf16397cba217cafabd6f69a17fff5e97082d6618130ef896e227db7b90965f7e485c083d8caf43c591e51fc84f74d11ba727469c1979c48bfa07c3c4c999aedbb2fd64208b013d516e3626a1b3f03e6cd712bb003004ea3bf84a699dc04a6b8d270e3be5df14c99b5c7c5ea587d17fda0800288569ceb20caa4841c1f77b91c2c0a8a7cbf13caa7fc47ee309076027b930c01892066c1797e365d2f62401ad456b30b980a8afe52ab2524ee739b3efd03e9a2dc30b3a6c59dd7acfb7480fa349e80b119f77f98a29fdb095ac67716cf44d4c7727ba84301d0f28d490f158b4145be93db9969ca2502e23b33a09f7a170041797b989ffcef656ca2ced67b5d0106d53f189dcf4820abe3d97b5c74e4f2ec20bad690cf2b314889fb1fc10bc5b2cd92783330bba8460d8c757107f5331ae4e4fb335416d65b59450a281ab643c9f26fdac4738454feaf175a780041e83d90f90254eb2e409141448dbd3403fe9c060571a2d22851316eab5bb601720420d09d3d2b1d793fb4456fb798d97ee99c2a4d9c3df15820eef414801be4484f3ed8e340b556a5d6526f25e41dcb4c098c655895310721f2eecd6ed87e65f745795da3aff6d8dd92b1673783fbc1a57fe2b091f1eace76d33ae2229f9b8e9a16aac64c527c34ce423a9f4dec816117ad7c9928f5b747502688e643e73a01d48936594b0a8997b76a5ab8016b743324f2fe9ddf8fa3ae1ed39737863d732efaa1917ff80f9c6424205143652bebb9c82d27130ae4690bebb6169edf5a6493f67b5adda352ec7fb7fe9a0e8ae5c43744a0ba899bef675ab456ff39ec1bce4a95a56b84787f8f0c790c0caf2e5e78b6505d5af4d570239c98a5a5cafd1096a8f2f91fdf3350266dc30f1d188764cbe7bdfa90f581186aac25ca71a5ca1b90c9ccaca0b474f4c48a345e7dd0d22627ded5f4de932d63e6c3cfcf0240b916469cab20c8b097577bfa916587fbbaf38ee0f5bbf12f10ddb50b2b85a114e874b06154eb34e89f75c1a3edcef52a08ebc2c52935c8d29e0854bb2eff3f8524a9770e9877139cbc240187f5d1db651140353ed040282b43e2e887584ab85a1f0e9cecfb643054e5cf3c987bc28cc3d8903d3e37d9c5b01a55ab06a2b7f131ffd7fc9e8e8d10a8df00fd7e0e3229065287c1b26e857221ed459b63bc313886b75c53d51b58f221117ffbb19edf83d122fd514edb6f4d0499f880bbe1ab237e1a3c0fb4b6f9ec87b09f27ffd9a02f5c7993cd01c97ff3a4faaa63a362fc675a19c9ee314248d57caf3504337e331dc7590ebde2a469" + }, + { + "app_id": 3878605408, + "data": "f318643f745891fef0e0547a94b02430a2e0717db2b3b5f0140c1107c727469f607313471f90c9f6e3c201090a4b0a7b19ed7f1fe8e33e0e9f6460f215abab05a48c571e876cbe73de43f885410e36369e182ae1a320942d963706fbc805dbdfe964e43d89938d85d5352132928592b4bbc20cc5775f27ec5f5c27fa42f7c6231fb278ff17bae6ca92b980b279691439ccacb65c267c24572f6c4435318dae6ee134249457d29e0eeb54ac83d73c55139f9cc01585a622190c3ba17f1d49ab54a4e7b997183427689067767503573c9fa3bafa5b1e2ba38631b6499174d5f0d9077fde4e6b3e4e3e317f842307ca7aa5ecd6447800bca1eceed88ee58247ff04dec7604a5a82f90919a887c994fb4a95272cb0906ac9d47ff9a4e168024ad41033ec17bd98d9a39a49c51c2298d055e3cccb8e7279717506190a8e05f3627e91fabae803063da84147f6c0ebb889a479f37599d4e9feaf700dac1d664a29b086625e5309c1010f46f14f620535bb5ef904d7bd8bc4177ce15eca8f537c8af7a9504d52ef3c3959243c209e41383949d8184fa52fb0cb8a8df88f45268d82da3b93b2da77c868409d56586e6339b757771865fdc6b92047ea3267ff4a7090b4f4b6f86e2bb1bb07ddb2eea6b97c2dedfbd8736c1f8d790a60879bfc3b650cf3d6ef13da00415389db0030da5c026386cb5bb0884eb709e8b1bcb96bcdb090b1312165202a346495ccffe493c03b8f41d1f4ffc4e4c31f7fe9a1b89b85a17896af6e9f89b6c21b967157a1a77d1e840ec20940a0bc348caa10471946eb30f0f1798797fac9282bcd29adc2d1dbc3fa65435fcc6ac161aff3bf8b6ff574bc2884d7115e1c9df3358b24c7ad232d3a615eb01c732e28a496ebb7e8e8b06e84241528340270fae38b6a0033d960bb410acbb881de7d683fc8c8b65a69515e35911957f94de935832d08aafc7b98145d13e2418e085cf0148dff17733fa457285c6f2f665b7ce2da7d7e87883a67ded8c4211e15b9f41bf49c56c3038ac534adbf8e47530e194478446fa728717f3d1d06472b72999842825cffd8a5415089d5843839fe05e58dc9a0695f5c1cefb26bb70576a53353be42195498f62327703f87a281c2bbe99b61c85aec8f1c2258a5149716ab84ec5a77d45ea56323e717da0fe1d9c47dcbf4be9d02bf5953ed34187fc1c2e383ab4c492e025d0078665b9d834a538e2a2ed018e5bb5a0fd292164972a8983f2a7614f17fa6f751418193807bc3d568f910c15ad369ab10affbbf254758392d6a1cbdd6ce0563b2d014a7ab8c5a96b2e96679191b6959843037c2280f6735715c547855594fb63c50310cf9cabccf0b44d8ffd53f1add0cf35cab35eb8d06b1cc3eb46e526583d156a9d8dc648f015dae376af15709d7e092707f520d9858bc97ee4317e1b8b4871be537223e9abc98e63e451c31b173066fe9fddb9a0b39707e3ca1c9b77455d9fe4d26f16d6791aabc1d8b1c75da3fcf09716f6cec19dcff4fc869ce7c73584569e51f666bd90d39a930ec4305cf1015cd1f09716c9bcb02fe7b23c0af55e84af1505156e1b58f453b4881bd51436581a559b84841c641da6112a7b8b0c86af0b97741f7b899ce68db0c174a39865c1afa0c1a48ac4590925f5f07becac1a3774b87e252b3ec7ca5378a175b44374138419058064836f63ccf967704dcf7270a20b8e6f6e2ee86d62c3b0c34d29e3ee54df5c4ed5f4e3551a3caa00ae0cd75a729a8fe7ebd78c1e5e4354a399399a9dbefbf42a39e45f726c128e23a3f8f73749d985e290a08a5cd05e6a98ffbe5c0072b8a04867019488b79a4e640cb15ffeb6231078e5e0ede5453dd6fc94f3fc475a9ff1fb2666d559fc170f4b80aa98a633b6d0ffbb605d09c6b7efe4808d2ee4d49dc28748c769f745df6009624ab334fdff90b0e029a5ac51083dec1a62c7c9b0dc18e0f281fd54fac7f607f9b22d09a4fd5d17e70ff28181068b37af137a61e8ae0fa8cc0482bf4398bad275353a9c539f3ca72e47803efb7d2fc4732fa898cc556f96023186196bb56a153bd57ba70977f775f51faf1995f0811e63967b9b7551a1286021eb6956f56e673" + } + ] +] diff --git a/kate/benches/reconstruct.rs b/kate/benches/reconstruct.rs new file mode 100644 index 00000000..92c14dd1 --- /dev/null +++ b/kate/benches/reconstruct.rs @@ -0,0 +1,160 @@ +use avail_core::{AppExtrinsic, BlockLengthColumns, BlockLengthRows, DataLookup}; +use core::num::NonZeroU32; +use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Throughput}; +use dusk_plonk::prelude::BlsScalar; +use kate::{ + com::{Cell, *}, + metrics::IgnoreMetrics, + Seed, Serializable as _, +}; +use kate_recovery::{ + com::reconstruct_extrinsics, + commitments, + data::{self, DataCell}, + matrix::Position, + proof, testnet, +}; +use nalgebra::DMatrix; +use rand::{prelude::IteratorRandom, Rng, SeedableRng}; +use rand_chacha::ChaChaRng; +use sp_arithmetic::{traits::SaturatedConversion, Percent}; + +const XTS_JSON_SETS: &str = include_str!("reconstruct.data.json"); + +#[rustfmt::skip] +fn load_xts() -> Vec> { + serde_json::from_str(XTS_JSON_SETS).expect("Autogenerated Json file .qed") +} + +fn sample_cells_from_matrix(matrix: &DMatrix, columns: Option<&[u16]>) -> Vec { + fn random_indexes(length: usize, seed: Seed) -> Vec { + // choose random len/2 (unique) indexes + let mut idx = (0..length).collect::>(); + let mut chosen_idx = Vec::::new(); + let mut rng = ChaChaRng::from_seed(seed); + + for _ in 0..length / 2 { + let i = rng.gen_range(0..idx.len()); + let v = idx.remove(i); + chosen_idx.push(v); + } + chosen_idx + } + const RNG_SEED: Seed = [42u8; 32]; + + let (rows, cols) = matrix.shape(); + let cols = u16::try_from(cols).unwrap(); + let indexes = random_indexes(rows, RNG_SEED); + + (0u16..cols) + .filter(|col_idx| match &columns { + None => true, + Some(allowed) => allowed.contains(&col_idx), + }) + .flat_map(|col_idx| { + let col_view = matrix.column(col_idx.into()).data.into_slice(); + + indexes + .iter() + .map(|row_idx| { + let row_pos = u32::try_from(*row_idx).unwrap(); + let position = Position::new(row_pos, col_idx); + debug_assert!(*row_idx < col_view.len()); + let data = col_view[*row_idx].to_bytes(); + DataCell::new(position, data) + }) + .collect::>() + }) + .collect() +} + +fn random_cells( + max_cols: BlockLengthColumns, + max_rows: BlockLengthRows, + percents: Percent, +) -> Vec { + let max_cols = max_cols.into(); + let max_rows = max_rows.into(); + + let rng = &mut ChaChaRng::from_seed([0u8; 32]); + let amount: usize = percents + .mul_ceil::(max_cols * max_rows) + .saturated_into(); + + (0..max_cols) + .flat_map(move |col| { + (0..max_rows).map(move |row| Cell::new(BlockLengthRows(row), BlockLengthColumns(col))) + }) + .choose_multiple(rng, amount) +} + +fn bench_reconstruct(c: &mut Criterion) { + let xts_sets = load_xts(); + + let mut group = c.benchmark_group("reconstruct from xts"); + for xts in xts_sets.into_iter() { + let size = xts + .iter() + .map(|app| app.data.len()) + .sum::() + .try_into() + .unwrap(); + group.throughput(Throughput::Bytes(size)); + group.sample_size(10); + group.bench_with_input(BenchmarkId::from_parameter(size), &xts, |b, xts| { + b.iter(|| reconstruct(xts.as_slice())) + }); + } + group.finish(); +} + +fn reconstruct(xts: &[AppExtrinsic]) { + let metrics = IgnoreMetrics {}; + let (layout, commitments, dims, matrix) = par_build_commitments( + BlockLengthRows(64), + BlockLengthColumns(16), + unsafe { NonZeroU32::new_unchecked(32) }, + xts, + Seed::default(), + &metrics, + ) + .unwrap(); + + let columns = sample_cells_from_matrix(&matrix, None); + let extended_dims = dims.try_into().unwrap(); + let lookup = DataLookup::from_id_and_len_iter(layout.into_iter()).unwrap(); + let reconstructed = reconstruct_extrinsics(&lookup, extended_dims, columns).unwrap(); + for ((app_id, data), xt) in reconstructed.iter().zip(xts) { + assert_eq!(app_id.0, *xt.app_id); + assert_eq!(data[0].as_slice(), &xt.data); + } + + let dims_cols: u32 = dims.cols.into(); + let public_params = testnet::public_params(usize::try_from(dims_cols).unwrap()); + for cell in random_cells(dims.cols, dims.rows, Percent::one()) { + let row: u32 = cell.row.into(); + + let proof = build_proof(&public_params, dims, &matrix, &[cell], &metrics).unwrap(); + assert_eq!(proof.len(), 80); + + let col: u16 = cell + .col + .0 + .try_into() + .expect("`random_cells` function generates a valid `u16` for columns"); + let position = Position { row, col }; + let cell = data::Cell { + position, + content: proof.try_into().unwrap(), + }; + + let extended_dims = dims.try_into().unwrap(); + let commitment = commitments::from_slice(&commitments).unwrap()[row as usize]; + let verification = proof::verify(&public_params, extended_dims, &commitment, &cell); + assert!(verification.is_ok()); + assert!(verification.unwrap()); + } +} + +criterion_group! { benches, bench_reconstruct } +criterion_main!(benches); diff --git a/kate/examples/multiproof_verification.rs b/kate/examples/multiproof_verification.rs index 9ef785d7..6049baee 100644 --- a/kate/examples/multiproof_verification.rs +++ b/kate/examples/multiproof_verification.rs @@ -1,17 +1,35 @@ -use da_types::{AppExtrinsic, AppId}; +use avail_core::{AppExtrinsic, AppId, BlockLengthColumns, BlockLengthRows}; +use core::num::NonZeroU16; use hex_literal::hex; use kate::{ + gridgen::EvaluationGrid, pmp::{merlin::Transcript, traits::PolyMultiProofNoPrecomp}, + testnet::multiproof_params, Seed, }; +use kate_recovery::matrix::Dimensions; use poly_multiproof::traits::AsBytes; use rand::thread_rng; +use thiserror_no_std::Error; -fn main() { - let target_dims = kate::grid::Dimensions::new_unchecked(16, 64); - let pp = kate::testnet::multiproof_params(256, 256); +#[derive(Error, Debug)] +enum AppError { + Kate(#[from] kate::com::Error), + MultiProof(#[from] poly_multiproof::Error), +} + +fn main() -> Result<(), AppError> { + let verified = multiproof_verification()?; + println!("Multiproof verfication is {verified}"); + + Ok(()) +} + +fn multiproof_verification() -> Result { + let target_dims = Dimensions::new_from(16, 64).unwrap(); + let pp = multiproof_params(256, 256); let pmp = poly_multiproof::m1_blst::M1NoPrecomp::new(256, 256, &mut thread_rng()); - let points = kate::gridgen::domain_points(256).unwrap(); + let points = kate::gridgen::domain_points(256)?; let (proof, evals, commitments, dims) = { let exts = vec![ AppExtrinsic { @@ -28,13 +46,11 @@ fn main() { }, ]; let seed = Seed::default(); - let grid = kate::gridgen::EvaluationGrid::from_extrinsics(exts, 4, 256, 256, seed) - .unwrap() - .extend_columns(2) - .unwrap(); + let grid = EvaluationGrid::from_extrinsics(exts, 4, 256, 256, seed)? + .extend_columns(unsafe { NonZeroU16::new_unchecked(2) })?; // Setup, serializing as bytes - let polys = grid.make_polynomial_grid().unwrap(); + let polys = grid.make_polynomial_grid()?; let commitments = polys .commitments(&pp) @@ -46,25 +62,22 @@ fn main() { let multiproof = polys .multiproof( &pmp, - &kate::com::Cell { - row: 0.into(), - col: 0.into(), - }, + &kate::com::Cell::new(BlockLengthRows(0), BlockLengthColumns(0)), &grid, - &target_dims, + target_dims, ) .unwrap(); - let proof_bytes = multiproof.proof.to_bytes().unwrap(); + let proof_bytes = multiproof.proof.to_bytes()?; let evals_bytes = multiproof .evals .iter() .flat_map(|row| row.iter().flat_map(|e| e.to_bytes().unwrap())) .collect::>(); - (proof_bytes, evals_bytes, commitments, grid.dims) + (proof_bytes, evals_bytes, commitments, grid.dims()) }; - let mp_block = kate::gridgen::multiproof_block(0, 0, &dims, &target_dims).unwrap(); + let mp_block = kate::gridgen::multiproof_block(0, 0, dims, target_dims).unwrap(); let commits = commitments .chunks_exact(48) .skip(mp_block.start_y) @@ -83,14 +96,15 @@ fn main() { .chunks_exact(mp_block.end_x - mp_block.start_x) .collect::>(); - let proof = kate::pmp::m1_blst::Proof::from_bytes(&proof).unwrap(); + let proof = kate::pmp::m1_blst::Proof::from_bytes(&proof)?; - pmp.verify( + let verified = pmp.verify( &mut Transcript::new(b"avail-mp"), block_commits, &points[mp_block.start_x..mp_block.end_x], &evals_grid, &proof, - ) - .unwrap(); + )?; + + Ok(verified) } diff --git a/kate/grid/Cargo.toml b/kate/grid/Cargo.toml deleted file mode 100644 index 82cf3469..00000000 --- a/kate/grid/Cargo.toml +++ /dev/null @@ -1,11 +0,0 @@ -[package] -name = "kate-grid" -version = "0.6.1" -authors = ["William Arnold warnold@polygon.technology"] -edition = "2021" - -[dependencies] -rayon = { version = "1.5.2", optional = true } - -[features] -parallel = ["rayon"] diff --git a/kate/grid/src/dims.rs b/kate/grid/src/dims.rs deleted file mode 100644 index c7e89593..00000000 --- a/kate/grid/src/dims.rs +++ /dev/null @@ -1,110 +0,0 @@ -use core::num::NonZeroUsize; - -/// The dimensions of a grid -#[derive(Debug, Copy, Clone, PartialEq, Eq)] -pub struct Dimensions { - width: NonZeroUsize, - height: NonZeroUsize, -} - -impl Dimensions { - pub const fn new(width: NonZeroUsize, height: NonZeroUsize) -> Self { - Dimensions { width, height } - } - - /// Make a new `Dimensions` panicking if either width or height are zero. - /// Again, **this will panic if a zero width or zero height are given**. - pub const fn new_unchecked(width: usize, height: usize) -> Self { - Self { - width: nonzero_unchecked(width), - height: nonzero_unchecked(height), - } - } - - pub fn width(&self) -> usize { - self.width.get() - } - - pub fn width_nz(&self) -> NonZeroUsize { - self.width - } - - pub fn height(&self) -> usize { - self.height.get() - } - - pub fn height_nz(&self) -> NonZeroUsize { - self.height - } - - pub fn n_cells(&self) -> usize { - self.width.saturating_mul(self.height).get() - } - - pub fn divides(&self, other: &Self) -> bool { - other.width.get() % self.width == 0 && other.height.get() % self.height == 0 - } - - pub fn extend(&self, e: Extension) -> Self { - Self { - width: e.width_factor.saturating_mul(self.width), - height: e.height_factor.saturating_mul(self.height), - } - } -} - -/// The ways a set of dimensions can be extended -#[derive(Debug, Clone)] -pub struct Extension { - /// This means extending the height of the grid by some factor. - /// `2` would mean doubling the grid upwards, increasing the height by a factor of - /// 2 and multiplying the number of rows by 2 - pub height_factor: NonZeroUsize, - /// This means extending the width of the grid by some factor. - /// `2` would mean doubling the grid sideways, increasing the width by a factor of - /// 2 and multiplying the number of columns by 2 - pub width_factor: NonZeroUsize, -} - -impl Extension { - pub const fn height(factor: NonZeroUsize) -> Self { - Self { - height_factor: factor, - width_factor: nonzero_unchecked(1), - } - } - - /// Make a new height extension without checking if `factor` is nonzero. - /// Again, **this will panic if a zero `factor` is given**. - pub const fn height_unchecked(factor: usize) -> Self { - Self { - height_factor: nonzero_unchecked(factor), - width_factor: nonzero_unchecked(1), - } - } - - pub const fn width(factor: NonZeroUsize) -> Self { - Self { - height_factor: nonzero_unchecked(1), - width_factor: factor, - } - } - - /// Make a new width extension without checking if `factor` is nonzero. - /// Again, **this will panic if a zero `factor` is given**. - pub const fn width_unchecked(factor: usize) -> Self { - Self { - height_factor: nonzero_unchecked(1), - width_factor: nonzero_unchecked(factor), - } - } -} - -#[allow(unconditional_panic)] -const fn nonzero_unchecked(a: usize) -> NonZeroUsize { - // Hack to get around not being able to unwrap in a const context - match NonZeroUsize::new(a) { - Some(a) => a, - None => [][0], - } -} diff --git a/kate/grid/src/grid.rs b/kate/grid/src/grid.rs deleted file mode 100644 index f0f5b94a..00000000 --- a/kate/grid/src/grid.rs +++ /dev/null @@ -1,285 +0,0 @@ -use alloc::vec::Vec; - -use crate::Dimensions; - -pub trait Grid { - fn width(&self) -> usize; - fn height(&self) -> usize; - fn dims(&self) -> &Dimensions; - fn inner(&self) -> &Vec; - // x indexes within a row, y indexes within a column - // 0 <= x < width, 0 <= y < height - fn get(&self, x: usize, y: usize) -> Option<&A> { - let i = Self::coord_to_ind(self.dims(), x, y)?; - self.get_ind(i) - } - fn get_ind(&self, i: usize) -> Option<&A>; - fn ind_to_coord(dims: &Dimensions, i: usize) -> (usize, usize); - fn coord_to_ind(dims: &Dimensions, x: usize, y: usize) -> Option; -} - -pub struct RowMajor { - dims: Dimensions, - inner: Vec, -} - -pub struct ColumnMajor { - dims: Dimensions, - inner: Vec, -} - -impl Grid for RowMajor { - fn width(&self) -> usize { - self.dims.width() - } - - fn height(&self) -> usize { - self.dims.height() - } - - fn dims(&self) -> &Dimensions { - &self.dims - } - - fn get_ind(&self, i: usize) -> Option<&A> { - self.inner.get(i) - } - - fn ind_to_coord(dims: &Dimensions, i: usize) -> (usize, usize) { - (i % dims.width_nz(), i / dims.width_nz()) - } - - fn coord_to_ind(dims: &Dimensions, x: usize, y: usize) -> Option { - x.checked_add(y.checked_mul(dims.width())?) - } - - fn inner(&self) -> &Vec { - &self.inner - } -} - -impl Grid for ColumnMajor { - fn width(&self) -> usize { - self.dims.width() - } - - fn height(&self) -> usize { - self.dims.height() - } - - fn dims(&self) -> &Dimensions { - &self.dims - } - - fn get_ind(&self, i: usize) -> Option<&A> { - self.inner.get(i) - } - - fn ind_to_coord(dims: &Dimensions, i: usize) -> (usize, usize) { - (i / dims.height_nz(), i % dims.height_nz()) - } - - fn coord_to_ind(dims: &Dimensions, x: usize, y: usize) -> Option { - y.checked_add(x.checked_mul(dims.height())?) - } - - fn inner(&self) -> &Vec { - &self.inner - } -} - -#[cfg(feature = "parallel")] -use rayon::prelude::*; - -impl RowMajor { - pub fn new(width: usize, height: usize, data: Vec) -> Option { - if data.len() == usize::checked_mul(width, height)? { - Some(Self { - dims: Dimensions::new(width.try_into().ok()?, height.try_into().ok()?), - inner: data, - }) - } else { - None - } - } - pub fn row(&self, y: usize) -> Option<&[A]> { - if y >= self.height() { - return None; - } - // SAFETY: `y < height` (just one line up) and `height * width` **is already checked** at `new / into_column_mayor` fns - // as invariant of this type, then we can omit `checked_` operations. - let start = y * self.width(); - let end = (y + 1) * self.width(); - Some(&self.inner[start..end]) - } - - pub fn iter_col(&self, x: usize) -> Option + '_> { - if x >= self.width() { - return None; - } - Some((0..self.height()).map(move |y| self.get(x, y).expect("Bounds already checked"))) - } - - pub fn rows(&self) -> impl Iterator + '_ { - (0..self.height()).map(|y| (y, self.row(y).expect("Bounds already checked"))) - } - // TODO: this return type is kinda gross, should it just iterate over vecs? - pub fn columns(&self) -> impl Iterator)> + '_ { - (0..self.width()).map(|x| (x, self.iter_col(x).expect("Bounds already checked"))) - } - - pub fn iter_row_wise(&self) -> impl Iterator + '_ { - (0..self.height()).flat_map(move |y| { - (0..self.width()).map(move |x| self.get(x, y).expect("Bounds already checked")) - }) - } - - pub fn iter_column_wise(&self) -> impl Iterator + '_ { - (0..self.width()).flat_map(move |x| { - (0..self.height()).map(move |y| self.get(x, y).expect("Bounds already checked")) - }) - } -} - -impl RowMajor { - pub fn to_column_major(&self) -> ColumnMajor { - self.iter_column_wise() - .map(Clone::clone) - .collect::>() - .into_column_major(self.width(), self.height()) - .expect("Bounds already checked") - } -} - -#[cfg(feature = "parallel")] -impl RowMajor { - pub fn rows_par_iter(&self) -> impl ParallelIterator + '_ { - (0..self.height()) - .into_par_iter() - .map(|y| (y, self.row(y).expect("Bounds already checked"))) - } -} - -impl ColumnMajor { - pub fn new(width: usize, height: usize, data: Vec) -> Option { - if data.len() == usize::checked_mul(width, height)? { - Some(Self { - dims: Dimensions::new(width.try_into().ok()?, height.try_into().ok()?), - inner: data, - }) - } else { - None - } - } - pub fn col(&self, x: usize) -> Option<&[A]> { - if x >= self.width() { - return None; - } - let start = x.checked_mul(self.height())?; - let end = x.checked_add(1)?.checked_mul(self.height())?; - Some(&self.inner[start..end]) - } - - pub fn iter_row(&self, y: usize) -> Option + '_> { - if y >= self.height() { - return None; - } - Some((0..self.width()).map(move |x| self.get(x, y).expect("Size checked at instantiation"))) - } - - pub fn iter_row_wise(&self) -> impl Iterator + '_ { - (0..self.height()).flat_map(move |y| { - (0..self.width()).map(move |x| self.get(x, y).expect("Bounds already checked")) - }) - } - - pub fn iter_column_wise(&self) -> impl Iterator + '_ { - (0..self.width()).flat_map(move |x| { - (0..self.height()).map(move |y| self.get(x, y).expect("Bounds already checked")) - }) - } -} - -impl ColumnMajor { - pub fn to_row_major(&self) -> RowMajor { - self.iter_row_wise() - .map(Clone::clone) - .collect::>() - .into_row_major(self.width(), self.height()) - .expect("Bounds already checked") - } -} - -pub trait IntoRowMajor { - /// Convert the underlying data structure to be row-major. This likely involves - /// re-allocating the array or re-arranging its elements. - fn into_row_major(self, width: usize, height: usize) -> Option>; -} - -pub trait IntoColumnMajor { - /// Convert the underlying data structure to be column-major. This likely involves - /// re-allocating the array or re-arranging its elements. - fn into_column_major(self, width: usize, height: usize) -> Option>; -} - -impl>> IntoRowMajor for B { - fn into_row_major(self, width: usize, height: usize) -> Option> { - RowMajor::new(width, height, self.into()) - } -} - -impl>> IntoColumnMajor for B { - fn into_column_major(self, width: usize, height: usize) -> Option> { - ColumnMajor::new(width, height, self.into()) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use alloc::vec::Vec; - - #[test] - fn test_row_major() { - let data = [1, 2, 3, 4, 5, 6]; - let rm = data.into_row_major(3, 2).unwrap(); - - assert_eq!(rm.get(0, 0), Some(&1)); - assert_eq!(rm.get(1, 0), Some(&2)); - assert_eq!(rm.get(2, 0), Some(&3)); - assert_eq!(rm.get(0, 1), Some(&4)); - assert_eq!(rm.get(1, 1), Some(&5)); - assert_eq!(rm.get(2, 1), Some(&6)); - - assert_eq!([1, 2, 3].as_slice(), rm.row(0).unwrap()); - assert_eq!([4, 5, 6].as_slice(), rm.row(1).unwrap()); - assert_eq!(vec![&1, &4], rm.iter_col(0).unwrap().collect::>()); - assert_eq!(vec![&2, &5], rm.iter_col(1).unwrap().collect::>()); - assert_eq!(vec![&3, &6], rm.iter_col(2).unwrap().collect::>()); - } - - #[test] - fn test_column_major() { - let data = [1, 4, 2, 5, 3, 6]; - let cm = data.into_column_major(3, 2).unwrap(); - - assert_eq!(cm.get(0, 0), Some(&1)); - assert_eq!(cm.get(1, 0), Some(&2)); - assert_eq!(cm.get(2, 0), Some(&3)); - assert_eq!(cm.get(0, 1), Some(&4)); - assert_eq!(cm.get(1, 1), Some(&5)); - assert_eq!(cm.get(2, 1), Some(&6)); - - assert_eq!([1, 4].as_slice(), cm.col(0).unwrap()); - assert_eq!([2, 5].as_slice(), cm.col(1).unwrap()); - assert_eq!([3, 6].as_slice(), cm.col(2).unwrap()); - assert_eq!( - vec![&1, &2, &3], - cm.iter_row(0).unwrap().collect::>() - ); - assert_eq!( - vec![&4, &5, &6], - cm.iter_row(1).unwrap().collect::>() - ); - } -} diff --git a/kate/grid/src/lib.rs b/kate/grid/src/lib.rs deleted file mode 100644 index 0cf77e61..00000000 --- a/kate/grid/src/lib.rs +++ /dev/null @@ -1,12 +0,0 @@ -#![no_std] -#![deny(clippy::integer_arithmetic)] -//! Nice grid API, dealing with grids of different sizes and different orders -//! (column-major/row-major) - -#[cfg_attr(test, macro_use)] -extern crate alloc; - -mod dims; -mod grid; -pub use dims::*; -pub use grid::*; diff --git a/kate/recovery/Cargo.toml b/kate/recovery/Cargo.toml index 013f0b43..9c84a83e 100644 --- a/kate/recovery/Cargo.toml +++ b/kate/recovery/Cargo.toml @@ -1,28 +1,42 @@ [package] name = "kate-recovery" -version = "0.8.1" +version = "0.9.0" authors = ["Denis Ermolin "] edition = "2018" +license = "Apache-2.0" [dependencies] -codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] } -dusk-bytes = "0.1.6" +# Internals +avail-core = { path = "../../core", default-features = false } dusk-plonk = { git = "https://github.com/availproject/plonk.git", tag = "v0.12.0-polygon-2" } -getrandom = { version = "0.2", features = ["js"] } -hex = "0.4" -num = "0.4.0" -once_cell = { version = "1.9.0", default-features = false } -rand = "0.8.4" -rand_chacha = "0.3" -serde = { version = "1.0", features = ["derive"] } -thiserror = "1.0.37" + +# Substrate +codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] } +sp-arithmetic = { version = "*", default-features = false } +sp-std = { version = "*", default-features = false } + +# 3rd-parties +derive_more = "0.99.17" +dusk-bytes = { version = "0.1.6", default-features = false } +once_cell = { version = "1.9.0", optional = true } +rand = { version = "0.8.5", default-features = false, features = ["alloc", "small_rng"], optional = true } +rand_chacha = { version = "0.3", default-features = false, optional = true } +serde = { version = "1", optional = true, features = ["derive"] } +static_assertions = "1.1.0" +thiserror-no-std = "2.0.2" [dev-dependencies] -once_cell = "1.9.0" -rand = "0.8.4" -rand_chacha = "0.3" +hex = "0.4" test-case = "1.2.3" [features] default = ["std"] -std = [] +std = [ + "once_cell", + "serde", + "sp-arithmetic/std", + "sp-std/std", + "avail-core/std", + "rand/std", + "rand_chacha/std", +] diff --git a/kate/recovery/src/com.rs b/kate/recovery/src/com.rs index 1a42be9d..a77f4972 100644 --- a/kate/recovery/src/com.rs +++ b/kate/recovery/src/com.rs @@ -1,54 +1,84 @@ -use codec::Decode; -use dusk_bytes::Serializable; +use crate::{data, matrix}; +use core::{convert::TryFrom, num::TryFromIntError, ops::Range}; + +use avail_core::{data_lookup::Error as DataLookupError, ensure, AppId, DataLookup}; +use dusk_bytes::Serializable as _; use dusk_plonk::{fft::EvaluationDomain, prelude::BlsScalar}; -use num::ToPrimitive; -use rand::seq::SliceRandom; +use sp_arithmetic::{traits::SaturatedConversion as _, Percent}; +use sp_std::prelude::*; +use thiserror_no_std::Error; + +#[cfg(feature = "std")] +use crate::{config, sparse_slice_read::SparseSliceRead}; +#[cfg(feature = "std")] +use codec::{Decode, IoReader}; +#[cfg(feature = "std")] +use static_assertions::{const_assert, const_assert_ne}; +#[cfg(feature = "std")] use std::{ collections::{HashMap, HashSet}, - convert::TryFrom, + convert::TryInto, iter::FromIterator, }; -use thiserror::Error; - -use crate::{ - config::{self, CHUNK_SIZE}, - data, index, matrix, -}; #[derive(Debug, Error)] pub enum ReconstructionError { - #[error("Missing cell (col {}, row {})", .position.col, .position.row)] - MissingCell { position: matrix::Position }, - #[error("Invalid cell (col {}, row {})", .position.col, .position.row)] - InvalidCell { position: matrix::Position }, + #[error("Missing cell ({0})")] + MissingCell(matrix::Position), + #[error("Invalid cell ({0})")] + InvalidCell(matrix::Position), + #[error("Maximum cells allowed {0}")] + MaxCells(usize), + #[error("Minimum cells allowed {0}")] + MinCells(usize), #[error("Duplicate cell found")] DuplicateCellFound, #[error("Column {0} contains less than half rows")] InvalidColumn(u16), - #[error("Cannot reconstruct column: {0}")] - ColumnReconstructionError(String), #[error("Cannot decode data: {0}")] - DataDecodingError(String), + DataDecodingError(#[from] UnflattenError), #[error("Column reconstruction supports up to {}", u16::MAX)] RowCountExceeded, + #[error("Rows must be power of two")] + InvalidRowCount, + #[error("Missing AppId {0}")] + MissingId(AppId), + #[error("DataLookup {0}")] + DataLookup(#[from] DataLookupError), + #[error("Some cells are from different columns")] + CellsFromDifferentCols, + #[error("Invalid evaluation domain")] + InvalidEvaluationDomain, + #[error("Bad zero poly evaluation")] + BadZeroPoly, +} + +#[cfg(feature = "std")] +impl std::error::Error for ReconstructionError { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + match &self { + Self::DataDecodingError(unflatten) => Some(unflatten), + _ => None, + } + } } /// From given positions, constructs related columns positions, up to given factor. /// E.g. if factor is 0.66, 66% of matched columns will be returned. /// Positions in columns are random. /// Function panics if factor is above 1.0. -pub fn columns_positions( - dimensions: &matrix::Dimensions, +#[cfg(feature = "std")] +pub fn columns_positions( + dimensions: matrix::Dimensions, positions: &[matrix::Position], - factor: f64, + factor: Percent, + rng: &mut R, ) -> Vec { - assert!(factor <= 1.0); - - let cells = (factor * dimensions.extended_rows() as f64) - .to_usize() - .expect("result is lesser than usize maximum"); + use rand::seq::SliceRandom; - let rng = &mut rand::thread_rng(); + let cells = factor + .mul_ceil(dimensions.extended_rows()) + .saturated_into::(); let columns: HashSet = HashSet::from_iter(positions.iter().map(|position| position.col)); @@ -61,15 +91,16 @@ pub fn columns_positions( /// Creates hash map of columns, each being hash map of cells, from vector of cells. /// Intention is to be able to find duplicates and to group cells by column. +#[cfg(feature = "std")] fn map_cells( - dimensions: &matrix::Dimensions, + dimensions: matrix::Dimensions, cells: Vec, ) -> Result>, ReconstructionError> { let mut result: HashMap> = HashMap::new(); for cell in cells { - let position = cell.position.clone(); + let position = cell.position; if !dimensions.extended_contains(&position) { - return Err(ReconstructionError::InvalidCell { position }); + return Err(ReconstructionError::InvalidCell(position)); } let cells = result.entry(position.col).or_insert_with(HashMap::new); if cells.insert(position.row, cell).is_some() { @@ -88,14 +119,14 @@ fn map_cells( /// * `dimensions` - Extended matrix dimensions /// * `app_id` - Application ID pub fn app_specific_rows( - index: &index::AppDataIndex, - dimensions: &matrix::Dimensions, - app_id: u32, + index: &DataLookup, + dimensions: matrix::Dimensions, + app_id: AppId, ) -> Vec { index - .app_cells_range(app_id) - .map(|range| dimensions.extended_data_rows(range)) - .unwrap_or_else(std::vec::Vec::new) + .range_of(app_id) + .and_then(|range| dimensions.extended_data_rows(range)) + .unwrap_or_default() } /// Generates empty cell positions in extended data matrix, @@ -108,13 +139,13 @@ pub fn app_specific_rows( /// * `dimensions` - Extended matrix dimensions /// * `app_id` - Application ID pub fn app_specific_cells( - index: &index::AppDataIndex, - dimensions: &matrix::Dimensions, - app_id: u32, + index: &DataLookup, + dimensions: matrix::Dimensions, + id: AppId, ) -> Option> { index - .app_cells_range(app_id) - .map(|range| dimensions.extended_data_positions(range)) + .range_of(id) + .and_then(|range| dimensions.extended_data_positions(range)) } /// Application data, represents list of extrinsics encoded in a block. @@ -130,17 +161,20 @@ pub type AppData = Vec>; /// * `dimensions` - Extended matrix dimensions /// * `cells` - Cells from required columns, at least 50% cells per column /// * `app_id` - Application ID +#[cfg(feature = "std")] pub fn reconstruct_app_extrinsics( - index: &index::AppDataIndex, - dimensions: &matrix::Dimensions, + index: &DataLookup, + dimensions: matrix::Dimensions, cells: Vec, - app_id: u32, + app_id: AppId, ) -> Result { let data = reconstruct_available(dimensions, cells)?; - let ranges = index.app_data_ranges(app_id); + const_assert!(config::CHUNK_SIZE as u64 <= u32::MAX as u64); + let range = index + .projected_range_of(app_id, config::CHUNK_SIZE as u32) + .ok_or(ReconstructionError::MissingId(app_id))?; - Ok(unflatten_padded_data(ranges, data) - .map_err(ReconstructionError::DataDecodingError)? + Ok(unflatten_padded_data(vec![(app_id, range)], data)? .into_iter() .flat_map(|(_, xts)| xts) .collect::>()) @@ -153,13 +187,16 @@ pub fn reconstruct_app_extrinsics( /// * `index` - Application data index /// * `dimensions` - Extended matrix dimensions /// * `cells` - Cells from required columns, at least 50% cells per column +#[cfg(feature = "std")] pub fn reconstruct_extrinsics( - index: &index::AppDataIndex, - dimensions: &matrix::Dimensions, + lookup: &DataLookup, + dimensions: matrix::Dimensions, cells: Vec, -) -> Result, ReconstructionError> { +) -> Result, ReconstructionError> { let data = reconstruct_available(dimensions, cells)?; - let ranges = index.data_ranges(); + + const_assert!(config::CHUNK_SIZE as u64 <= u32::MAX as u64); + let ranges = lookup.projected_ranges(config::CHUNK_SIZE as u32)?; unflatten_padded_data(ranges, data).map_err(ReconstructionError::DataDecodingError) } @@ -169,51 +206,54 @@ pub fn reconstruct_extrinsics( /// /// * `dimensions` - Extended matrix dimensions /// * `cells` - Cells from required columns, at least 50% cells per column +#[cfg(feature = "std")] pub fn reconstruct_columns( - dimensions: &matrix::Dimensions, + dimensions: matrix::Dimensions, cells: &[data::Cell], -) -> Result>, ReconstructionError> { +) -> Result>, ReconstructionError> { let cells: Vec = cells.iter().cloned().map(Into::into).collect::>(); let columns = map_cells(dimensions, cells)?; columns .iter() .map(|(&col, cells)| { - if cells.len() < dimensions.rows().into() { - return Err(ReconstructionError::InvalidColumn(col)); - } + ensure!( + cells.len() >= dimensions.height(), + ReconstructionError::InvalidColumn(col) + ); let cells = cells.values().cloned().collect::>(); - let column = reconstruct_column(dimensions.extended_rows(), &cells) - .map_err(ReconstructionError::ColumnReconstructionError)? + let column = reconstruct_column(dimensions.extended_rows(), &cells)? .iter() .map(BlsScalar::to_bytes) - .collect::>(); + .collect::>(); Ok((col, column)) }) .collect::>() } +#[cfg(feature = "std")] fn reconstruct_available( - dimensions: &matrix::Dimensions, + dimensions: matrix::Dimensions, cells: Vec, ) -> Result, ReconstructionError> { let columns = map_cells(dimensions, cells)?; + let rows: usize = dimensions.height(); - let scalars = (0..dimensions.cols()) + let scalars = (0..dimensions.cols().get()) .map(|col| match columns.get(&col) { - None => Ok(vec![None; dimensions.rows() as usize]), + None => Ok(vec![None; rows]), Some(column_cells) => { - if column_cells.len() < dimensions.rows() as usize { - return Err(ReconstructionError::InvalidColumn(col)); - } + ensure!( + column_cells.len() >= rows, + ReconstructionError::InvalidColumn(col) + ); let cells = column_cells.values().cloned().collect::>(); reconstruct_column(dimensions.extended_rows(), &cells) .map(|scalars| scalars.into_iter().map(Some).collect::>()) - .map_err(ReconstructionError::ColumnReconstructionError) }, }) .collect::>, ReconstructionError>>()?; @@ -242,11 +282,12 @@ fn reconstruct_available( /// * `dimensions` - Extended matrix dimensions /// * `cells` - Application specific data cells in extended matrix, without erasure coded data. /// * `app_id` - Application ID +#[cfg(feature = "std")] pub fn decode_app_extrinsics( - index: &index::AppDataIndex, - dimensions: &matrix::Dimensions, + index: &DataLookup, + dimensions: matrix::Dimensions, cells: Vec, - app_id: u32, + app_id: AppId, ) -> Result { let positions = app_specific_cells(index, dimensions, app_id).unwrap_or_default(); if positions.is_empty() { @@ -259,7 +300,7 @@ pub fn decode_app_extrinsics( .get(&position.col) .and_then(|column| column.get(&position.row)) .filter(|cell| !cell.data.is_empty()) - .ok_or(ReconstructionError::MissingCell { position })?; + .ok_or(ReconstructionError::MissingCell(position))?; } let mut app_data: Vec = vec![]; @@ -273,7 +314,12 @@ pub fn decode_app_extrinsics( Some(cell) => app_data.extend(cell.data), } } - let ranges = index.app_data_ranges(app_id); + + const_assert!((config::CHUNK_SIZE as u64) <= (u32::MAX as u64)); + let ranges = index + .projected_range_of(app_id, config::CHUNK_SIZE as u32) + .map(|range| vec![(app_id, range)]) + .unwrap_or_default(); Ok(unflatten_padded_data(ranges, app_data) .map_err(ReconstructionError::DataDecodingError)? @@ -282,50 +328,61 @@ pub fn decode_app_extrinsics( .collect::>()) } -// Removes both extrinsics and block padding (iec_9797 and seeded random data) -pub fn unflatten_padded_data( - ranges: Vec<(u32, AppDataRange)>, - data: Vec, -) -> Result, String> { - if data.len() % config::CHUNK_SIZE > 0 { - return Err("Invalid data size".to_string()); - } +#[derive(Error, Clone, Debug)] +pub enum UnflattenError { + #[error("`AppDataRange` cannot be converted into `Range`")] + RangeConversion(#[from] TryFromIntError), + #[error("`AppData` cannot be decoded due to {0}")] + Codec(#[from] codec::Error), + #[error("Invalid data size, it needs to be a multiple of CHUNK_SIZE")] + InvalidLen, +} - fn trim_to_data_chunks(range_data: &[u8]) -> Result, String> { - range_data - .chunks_exact(config::CHUNK_SIZE) - .map(|chunk| chunk.get(0..config::DATA_CHUNK_SIZE)) - .collect::>>() - .map(|data_chunks| data_chunks.concat()) - .ok_or_else(|| format!("Chunk data size less than {}", config::DATA_CHUNK_SIZE)) +#[cfg(feature = "std")] +impl std::error::Error for UnflattenError { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + match &self { + Self::RangeConversion(try_int) => Some(try_int), + Self::Codec(codec) => Some(codec), + _ => None, + } } +} - fn trim_padding(mut data: Vec) -> Result, String> { - while data.last() == Some(&0) { - data.pop(); - } +#[cfg(feature = "std")] +// Removes both extrinsics and block padding (iec_9797 and seeded random data) +pub fn unflatten_padded_data( + ranges: Vec<(AppId, AppDataRange)>, + data: Vec, +) -> Result, UnflattenError> { + ensure!( + data.len() % config::CHUNK_SIZE == 0, + UnflattenError::InvalidLen + ); - match data.pop() { - None => Err("Cannot trim padding on empty data".to_string()), - Some(config::PADDING_TAIL_VALUE) => Ok(data), - Some(_) => Err("Invalid padding tail value".to_string()), - } - } + fn extract_encoded_extrinsic(range_data: &[u8]) -> SparseSliceRead { + const_assert_ne!(config::CHUNK_SIZE, 0); + const_assert_ne!(config::DATA_CHUNK_SIZE, 0); - fn decode_extrinsics(data: Vec) -> Result { - ::decode(&mut data.as_slice()).map_err(|err| format!("Cannot decode data: {err}")) + // INTERNAL: Chunk into 32 bytes (CHUNK_SIZE), then remove padding (0..30 bytes). + SparseSliceRead::from_iter( + range_data + .chunks_exact(config::CHUNK_SIZE) + .map(|chunk| &chunk[0..config::DATA_CHUNK_SIZE]), + ) } ranges .into_iter() .map(|(app_id, range)| { - let range = range.start as usize..range.end as usize; - trim_to_data_chunks(&data[range]) - .and_then(trim_padding) - .and_then(decode_extrinsics) - .map(|data| (app_id, data)) + //let range = range.start as usize..range.end as usize; + let range: Range = range.start.try_into()?..range.end.try_into()?; + let reader = extract_encoded_extrinsic(&data[range]); + let extrinsic = ::decode(&mut IoReader(reader))?; + + Ok((app_id, extrinsic)) }) - .collect::, String>>() + .collect::, _>>() } // This module is taken from https://gist.github.com/itzmeanjan/4acf9338d9233e79cfbee5d311e7a0b4 @@ -337,7 +394,7 @@ fn reconstruct_poly( eval_domain: EvaluationDomain, // subset of available data subset: Vec>, -) -> Result, String> { +) -> Result, ReconstructionError> { let missing_indices = subset .iter() .enumerate() @@ -348,7 +405,7 @@ fn reconstruct_poly( zero_poly_fn(eval_domain, missing_indices.as_slice(), subset.len() as u64); for i in 0..subset.len() { if subset[i].is_none() && zero_eval[i] != BlsScalar::zero() { - return Err("bad zero poly evaluation !".to_owned()); + return Err(ReconstructionError::BadZeroPoly); } } let mut poly_evals_with_zero: Vec = Vec::new(); @@ -446,7 +503,8 @@ fn unshift_poly(poly: &mut [BlsScalar]) { } } -pub type AppDataRange = std::ops::Range; +pub type AppDataRange = Range; + // use this function for reconstructing back all cells of certain column // when at least 50% of them are available // @@ -458,14 +516,16 @@ pub type AppDataRange = std::ops::Range; pub fn reconstruct_column( row_count: u32, cells: &[data::DataCell], -) -> Result, String> { +) -> Result, ReconstructionError> { // just ensures all rows are from same column ! // it's required as that's how it's erasure coded during // construction in validator node - fn check_cells(cells: &[data::DataCell]) { - assert!(!cells.is_empty()); + fn check_cells(cells: &[data::DataCell]) -> bool { + if cells.is_empty() { + return false; + } let first_col = cells[0].position.col; - assert!(cells.iter().all(|c| c.position.col == first_col)); + cells.iter().all(|c| c.position.col == first_col) } // given row index in column of interest, finds it if present @@ -474,20 +534,34 @@ pub fn reconstruct_column( cells .iter() .find(|cell| cell.position.row == idx) - .map(|cell| { + .and_then(|cell| { <[u8; BlsScalar::SIZE]>::try_from(&cell.data[..]) - .expect("didn't find u8 array of length 32") + .map(|data| BlsScalar::from_bytes(&data).ok()) + .ok() + .flatten() }) - .and_then(|data| BlsScalar::from_bytes(&data).ok()) } // row count of data matrix must be power of two ! - assert!(row_count % 2 == 0); - assert!(cells.len() >= (row_count / 2) as usize && cells.len() <= row_count as usize); - check_cells(cells); - - let eval_domain = EvaluationDomain::new(row_count as usize).unwrap(); - let mut subset: Vec> = Vec::with_capacity(row_count as usize); + let row_count_sz = + usize::try_from(row_count).map_err(|_| ReconstructionError::RowCountExceeded)?; + ensure!(row_count % 2 == 0, ReconstructionError::InvalidRowCount); + ensure!( + cells.len() >= row_count_sz / 2, + ReconstructionError::MinCells(row_count_sz / 2) + ); + ensure!( + cells.len() <= row_count_sz, + ReconstructionError::MaxCells(row_count_sz) + ); + ensure!( + check_cells(cells), + ReconstructionError::CellsFromDifferentCols + ); + + let eval_domain = EvaluationDomain::new(row_count_sz) + .map_err(|_| ReconstructionError::InvalidEvaluationDomain)?; + let mut subset: Vec> = Vec::with_capacity(row_count_sz); // fill up vector in ordered fashion // @note the way it's done should be improved @@ -510,102 +584,35 @@ mod tests { use super::*; use crate::{ data::DataCell, - index::AppDataIndex, matrix::{Dimensions, Position}, }; - #[test] - fn app_data_index_cell_ranges() { - let cases = vec![ - ( - AppDataIndex { - size: 8, - index: vec![], - }, - vec![(0, 0..8)], - ), - ( - AppDataIndex { - size: 4, - index: vec![(1, 0), (2, 2)], - }, - vec![(1, 0..2), (2, 2..4)], - ), - ( - AppDataIndex { - size: 15, - index: vec![(1, 3), (12, 8)], - }, - vec![(0, 0..3), (1, 3..8), (12, 8..15)], - ), - ]; + #[test_case(0 => vec![0] ; "App 0 spans 2 rows form row 0")] + #[test_case(1 => vec![0, 2] ; "App 1 spans 2 rows from row 0")] + #[test_case(2 => vec![2] ; "App 2 spans 1 rows from row 2")] + #[test_case(3 => vec![4, 6] ; "App 3 spans 2 rows from row 4")] + #[test_case(4 => Vec::::new() ; "There is no app 4")] + fn test_app_specific_rows(id: u32) -> Vec { + let id_lens: Vec<(u32, u32)> = vec![(0, 2), (1, 3), (2, 3), (3, 8)]; + let index = DataLookup::from_id_and_len_iter(id_lens.into_iter()).unwrap(); + let dimensions = Dimensions::new(8, 4).unwrap(); - for (index, result) in cases { - assert_eq!(index.cells_ranges(), result); - } + app_specific_rows(&index, dimensions, AppId(id)) } - #[test] - fn app_data_index_data_ranges() { - let cases = vec![ - ( - AppDataIndex { - size: 8, - index: vec![], - }, - vec![(0, 0..256)], - ), - ( - AppDataIndex { - size: 4, - index: vec![(1, 0), (2, 2)], - }, - vec![(1, 0..64), (2, 64..128)], - ), - ( - AppDataIndex { - size: 15, - index: vec![(1, 3), (12, 8)], - }, - vec![(0, 0..96), (1, 96..256), (12, 256..480)], - ), - ]; - - for (index, result) in cases { - assert_eq!(index.data_ranges(), result); - } + fn to_matrix_pos(data: &[(u32, u16)]) -> Vec { + data.iter().cloned().map(Position::from).collect() } - #[test_case(0, &[0] ; "App 0 spans 2 rows form row 0")] - #[test_case(1, &[0, 2] ; "App 1 spans 2 rows from row 0")] - #[test_case(2, &[2] ; "App 2 spans 1 rows from row 2")] - #[test_case(3, &[4, 6] ; "App 3 spans 2 rows from row 4")] - #[test_case(4, &[] ; "There is no app 4")] - fn test_app_specific_rows(app_id: u32, expected: &[u32]) { - let index = AppDataIndex { - size: 16, - index: vec![(1, 2), (2, 5), (3, 8)], - }; - let dimensions = Dimensions::new(8, 4).unwrap(); - let result = app_specific_rows(&index, &dimensions, app_id); - assert_eq!(expected.len(), result.len()); - } - - #[test_case(0, &[(0, 0), (0, 1), (0, 2), (0, 3), (2, 0)] ; "App 0 has five cells")] - #[test_case(1, &[(2, 1), (2, 2), (2, 3)] ; "App 1 has 3 cells")] - #[test_case(2, &[] ; "App 2 has no cells")] - fn test_app_specific_cells(app_id: u32, expected: &[(u32, u16)]) { - let index = AppDataIndex { - size: 8, - index: vec![(1, 5)], - }; + #[test_case(0 => to_matrix_pos(&[(0, 0), (0, 1), (0, 2), (0, 3), (2, 0)]) ; "App 0 has five cells")] + #[test_case(1 => to_matrix_pos(&[(2, 1), (2, 2), (2, 3)]) ; "App 1 has 3 cells")] + #[test_case(2 => Vec::::new() ; "App 2 has no cells")] + fn test_app_specific_cells(app_id: u32) -> Vec { + let id_lens: Vec<(u32, usize)> = vec![(0, 5), (1, 3)]; + let index = DataLookup::from_id_and_len_iter(id_lens.into_iter()).unwrap(); let dimensions = Dimensions::new(4, 4).unwrap(); - let result = app_specific_cells(&index, &dimensions, app_id).unwrap_or_default(); - assert_eq!(expected.len(), result.len()); - result.iter().zip(expected).for_each(|(a, &(row, col))| { - assert_eq!(a.row, row); - assert_eq!(a.col, col); - }); + + app_specific_cells(&index, dimensions, AppId(app_id)).unwrap_or_default() } #[test] diff --git a/kate/recovery/src/commitments.rs b/kate/recovery/src/commitments.rs index 419ab885..544bfc29 100644 --- a/kate/recovery/src/commitments.rs +++ b/kate/recovery/src/commitments.rs @@ -1,26 +1,27 @@ -use std::{ +use avail_core::{ensure, AppId, DataLookup}; +use core::{ array::TryFromSliceError, convert::{TryFrom, TryInto}, num::TryFromIntError, }; - use dusk_bytes::Serializable; use dusk_plonk::{ fft::{EvaluationDomain, Evaluations}, - prelude::{BlsScalar, PublicParameters}, + prelude::{BlsScalar, CommitKey, PublicParameters}, }; -use thiserror::Error; +use sp_std::prelude::*; +use thiserror_no_std::Error; use crate::{ com, config::{self, COMMITMENT_SIZE}, - index, matrix, + matrix, }; #[derive(Error, Debug)] -pub enum DataError { +pub enum Error { #[error("Scalar slice error: {0}")] - SliceError(TryFromSliceError), + SliceError(#[from] TryFromSliceError), #[error("Scalar data is not valid")] ScalarDataError, #[error("Invalid scalar data length")] @@ -30,47 +31,37 @@ pub enum DataError { #[error("Bad data len")] BadLen, #[error("Plonk error: {0}")] - PlonkError(dusk_plonk::error::Error), + PlonkError(#[from] dusk_plonk::error::Error), #[error("Bad commitments data")] BadCommitmentsData, #[error("Bad rows data")] BadRowsData, + #[error("Integer conversion error")] + IntError(#[from] TryFromIntError), } -#[derive(Error, Debug)] -pub enum Error { - #[error("Invalid data: {0}")] - InvalidData(DataError), -} - -impl From for Error { - fn from(e: TryFromSliceError) -> Self { - Self::InvalidData(DataError::SliceError(e)) - } -} - -impl From for Error { - fn from(_: TryFromIntError) -> Self { - Self::InvalidData(DataError::BadCommitmentsData) +#[cfg(feature = "std")] +impl std::error::Error for Error { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + match &self { + Self::SliceError(slice) => Some(slice), + Self::PlonkError(plonk) => Some(plonk), + Self::IntError(try_int) => Some(try_int), + _ => None, + } } } impl From for Error { fn from(e: dusk_bytes::Error) -> Self { match e { - dusk_bytes::Error::InvalidData => Self::InvalidData(DataError::ScalarDataError), - dusk_bytes::Error::BadLength { .. } => Self::InvalidData(DataError::BadScalarDataLen), - dusk_bytes::Error::InvalidChar { .. } => Self::InvalidData(DataError::BadScalarData), + dusk_bytes::Error::InvalidData => Self::ScalarDataError, + dusk_bytes::Error::BadLength { .. } => Self::BadScalarDataLen, + dusk_bytes::Error::InvalidChar { .. } => Self::BadScalarData, } } } -impl From for Error { - fn from(e: dusk_plonk::error::Error) -> Self { - Self::InvalidData(DataError::PlonkError(e)) - } -} - fn try_into_scalar(chunk: &[u8]) -> Result { let sized_chunk = <[u8; config::CHUNK_SIZE]>::try_from(chunk)?; BlsScalar::from_bytes(&sized_chunk).map_err(From::from) @@ -78,9 +69,7 @@ fn try_into_scalar(chunk: &[u8]) -> Result { fn try_into_scalars(data: &[u8]) -> Result, Error> { let chunks = data.chunks_exact(config::CHUNK_SIZE); - if !chunks.remainder().is_empty() { - return Err(Error::InvalidData(DataError::BadLen)); - } + ensure!(chunks.remainder().is_empty(), Error::BadLen); chunks .map(try_into_scalar) .collect::, Error>>() @@ -103,22 +92,22 @@ pub fn verify_equality( public_params: &PublicParameters, commitments: &[[u8; COMMITMENT_SIZE]], rows: &[Option>], - index: &index::AppDataIndex, - dimensions: &matrix::Dimensions, - app_id: u32, + index: &DataLookup, + dimensions: matrix::Dimensions, + app_id: AppId, ) -> Result<(Vec, Vec), Error> { - if commitments.len() != dimensions.extended_rows().try_into()? { - return Err(Error::InvalidData(DataError::BadCommitmentsData)); - } - + let ext_rows: usize = dimensions.extended_rows().try_into()?; + ensure!(commitments.len() == ext_rows, Error::BadCommitmentsData); let mut app_rows = com::app_specific_rows(index, dimensions, app_id); - if rows.len() != dimensions.extended_rows().try_into()? { + if rows.len() != ext_rows { return Ok((vec![], app_rows)); } - let (prover_key, _) = public_params.trim(dimensions.cols() as usize)?; - let domain = EvaluationDomain::new(dimensions.cols() as usize)?; + let dim_cols = dimensions.width(); + // @TODO Opening Key here??? + let (prover_key, _) = public_params.trim(dim_cols)?; + let domain = EvaluationDomain::new(dim_cols)?; // This is a single-threaded implementation. // At some point we should benchmark and decide @@ -128,11 +117,8 @@ pub fn verify_equality( .zip(rows.iter()) .zip(0u32..) .filter(|(.., index)| app_rows.contains(index)) - .filter_map(|((&commitment, row), index)| { - try_into_scalars(row.as_ref()?) - .map(|scalars| Evaluations::from_vec_and_domain(scalars, domain).interpolate()) - .and_then(|polynomial| prover_key.commit(&polynomial).map_err(From::from)) - .map(|result| (result.to_bytes() == commitment).then_some(index)) + .filter_map(|((commitment, maybe_row), index)| { + row_index_commitment_verification(&prover_key, domain, commitment, maybe_row, index) .transpose() }) .collect::, Error>>()?; @@ -142,6 +128,25 @@ pub fn verify_equality( Ok((verified, app_rows)) } +fn row_index_commitment_verification( + prover_key: &CommitKey, + domain: EvaluationDomain, + commitment: &[u8], + maybe_row: &Option>, + index: u32, +) -> Result, Error> { + if let Some(row) = maybe_row.as_ref() { + let scalars = try_into_scalars(row)?; + let polynomial = Evaluations::from_vec_and_domain(scalars, domain).interpolate(); + let result = prover_key.commit(&polynomial)?; + + if result.to_bytes() == commitment { + return Ok(Some(index)); + } + } + Ok(None) +} + /// Creates vector of exact size commitments, from commitments slice pub fn from_slice(source: &[u8]) -> Result, TryFromSliceError> { source @@ -152,18 +157,14 @@ pub fn from_slice(source: &[u8]) -> Result, TryFromSl #[cfg(test)] mod tests { + use super::verify_equality; + use avail_core::{AppId, DataLookup}; use dusk_plonk::prelude::PublicParameters; use once_cell::sync::Lazy; use rand::SeedableRng; use rand_chacha::ChaChaRng; - use crate::{ - commitments, - index::{self, AppDataIndex}, - matrix, - }; - - use super::verify_equality; + use crate::{commitments, matrix}; static PUBLIC_PARAMETERS: Lazy = Lazy::new(|| PublicParameters::setup(256, &mut ChaChaRng::seed_from_u64(42)).unwrap()); @@ -174,9 +175,9 @@ mod tests { &PUBLIC_PARAMETERS, &[], &[], - &index::AppDataIndex::default(), - &matrix::Dimensions::new(1, 1).unwrap(), - 0, + &DataLookup::default(), + matrix::Dimensions::new(1, 1).unwrap(), + AppId(0), ) .is_err()); } @@ -193,42 +194,38 @@ mod tests { let row_4 = Some(hex::decode("722c20416c65782073657473206f757420746f207265736375652074686520006b696e67646f6d2e204f6e206869732071756573742c206865206465666561007473204a616e6b656e27732068656e63686d656e20616e64207265747269650076657320766172696f7573206974656d73207768696368206c656164206869006d20746f77617264204a616e6b656e2077686f6d20686520646566656174730020616e642073656573207475726e656420746f2073746f6e652e20416c65780020726574726965766573207468652063726f776e2c20616e6420746865207000656f706c65206f6620526164617869616e2061726520726573746f7265642000756e64657220746865206e65776c792063726f776e6564204b696e67204567006c652e800000000000000000000000000000000000000000000000000000000004fd01412072656d616b65206f66207468652067616d652c207469746c65640020416c6578204b69646420696e204d697261636c6520576f726c642044582c002077617320616e6e6f756e636564206f6e204a756e652031302c2032303230002c20616e642072656c6561736564206f6e204a756e652032322c2032303231002e2054686520800000000000000000000000000000000000000000000000000076a04053bda0a88bda5177b86a15c3b29f559873cb481232299cd5743151ac004b2d63ae198e7bb0a9011f28e473c95f4013d7d53ec5fbc3b42df8ed101f6d00e831e52bfb76e51cca8b4e9016838657edfae09cb9a71eb219025c4c87a67c004aaa86f20ac0aa792bc121ee42e2c326127061eda15599cb5db3db870bea5a00ecf353161c3cb528b0c5d98050c4570bfc942d8b19ed7b0cbba5725e03e5f000b7e30db36b6df82ac151f668f5f80a5e2a9cac7c64991dd6a6ce21c060175800edb9260d2a86c836efc05f17e5c59525e404c6a93d051651fe2e4eefae2813004925683890a942f63ce493f512f0b2cfb7c42a07ce9130cb6d059a388d886100536cb9c5b81a9a8dc46c2d64a7a5b1d93b2d8646805d8d2a122fccdb3bc7dc00975ab75fc865793536f66e64189050360f623dc88abb8300180cdd0a8f33d700d2159b3df296b46dd64bec57609a3f2fb4ad8b46e2fd4c9f25d44328dd50ce00514db7bbf50ef518c195a7053763d0a8dfdab6b946ee9f3954549319ac7dc600bac203232876b27b541433fb2f1438289799049b349f7a2c205d3a97f66ef4002800baa3cb78fb33130181775fb26a62630236bd8bc644a3656489d135ba1800b11846029a9183d434593cbbc1e03a4f8dba40cf6cfa07ba043c83f6a4888700364c233191a4b99aff1e9b8ab2aba54ecc61a6a8d2a50043e8948be1e76a43007d348990b99e55fee2a4bc79b29b27f2f9720e96840517dc8a0be65757110400").unwrap()); - let size = 79; - let index = vec![(1, 1), (2, 74)]; + let id_lens: Vec<(u32, u32)> = vec![(0, 1), (1, 73), (2, 6)]; + let lookup = DataLookup::from_id_and_len_iter(id_lens.into_iter()).unwrap(); + let dimension = matrix::Dimensions::new(4, 32).unwrap(); + let id = AppId(1); let result = verify_equality( &PUBLIC_PARAMETERS, &commitments, &[row_0.clone(), None, row_2, None, row_4, None, None, None], - &AppDataIndex { size, index }, - &matrix::Dimensions::new(4, 32).unwrap(), - 1, + &lookup, + dimension, + id, ); assert_eq!(result.unwrap(), (vec![0, 2, 4], vec![])); - let size = 79; - let index = vec![(1, 1), (2, 74)]; - let result = verify_equality( &PUBLIC_PARAMETERS, &commitments, &[row_0, None, None, None, None, None, None, None], - &AppDataIndex { size, index }, - &matrix::Dimensions::new(4, 32).unwrap(), - 1, + &lookup, + dimension, + id, ); assert_eq!(result.unwrap(), (vec![0], vec![2, 4])); - let size = 79; - let index = vec![(1, 1), (2, 74)]; - let result = verify_equality( &PUBLIC_PARAMETERS, &commitments, &[None, None, None, None, None, None, None, None], - &AppDataIndex { size, index }, - &matrix::Dimensions::new(4, 32).unwrap(), - 1, + &lookup, + dimension, + id, ); assert_eq!(result.unwrap(), (vec![], vec![0, 2, 4])); } diff --git a/kate/recovery/src/data.rs b/kate/recovery/src/data.rs index 7d47d049..d55fab75 100644 --- a/kate/recovery/src/data.rs +++ b/kate/recovery/src/data.rs @@ -1,9 +1,11 @@ -use std::{collections::HashMap, convert::TryInto}; +use core::convert::TryInto; +use derive_more::Constructor; +use sp_std::{collections::btree_map::BTreeMap, vec::Vec}; use crate::matrix::{Dimensions, Position, RowIndex}; /// Position and data of a cell in extended matrix -#[derive(Default, Debug, Clone)] +#[derive(Default, Debug, Clone, Constructor)] pub struct DataCell { /// Cell's position pub position: Position, @@ -12,7 +14,7 @@ pub struct DataCell { } /// Position and content of a cell in extended matrix -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Constructor)] pub struct Cell { /// Cell's position pub position: Position, @@ -21,6 +23,7 @@ pub struct Cell { } impl Cell { + #[cfg(feature = "std")] pub fn reference(&self, block: u32) -> String { self.position.reference(block) } @@ -36,13 +39,13 @@ impl Cell { /// Merges cells data per row. /// Cells are sorted before merge. -pub fn rows(dimensions: &Dimensions, cells: &[&Cell]) -> Vec<(RowIndex, Vec)> { +pub fn rows(dimensions: Dimensions, cells: &[&Cell]) -> Vec<(RowIndex, Vec)> { let mut sorted_cells = cells.to_vec(); sorted_cells .sort_by(|a, b| (a.position.row, a.position.col).cmp(&(b.position.row, b.position.col))); - let mut rows = HashMap::new(); + let mut rows = BTreeMap::new(); for cell in sorted_cells { rows.entry(RowIndex(cell.position.row)) .or_insert_with(Vec::default) @@ -56,7 +59,7 @@ pub fn rows(dimensions: &Dimensions, cells: &[&Cell]) -> Vec<(RowIndex, Vec) impl From for DataCell { fn from(cell: Cell) -> Self { DataCell { - position: cell.position.clone(), + position: cell.position, data: cell.data(), } } @@ -95,7 +98,7 @@ mod tests { &cell(position(0, 1), content([1; 32])), ]; - let mut rows = rows(&dimensions, &cells); + let mut rows = rows(dimensions, &cells); rows.sort_by_key(|(key, _)| key.0); let expected = [ @@ -120,10 +123,10 @@ mod tests { &cell(position(0, 1), content([1; 32])), ]; - let mut rows = rows(&dimensions, &cells); + let mut rows = rows(dimensions, &cells); rows.sort_by_key(|(key, _)| key.0); - assert!(rows.len() == 1); + assert_eq!(rows.len(), 1); let (row_index, row) = &rows[0]; assert_eq!(row_index.0, 0); assert_eq!(*row, [[0u8; 32], [1u8; 32]].concat()); diff --git a/kate/recovery/src/index.rs b/kate/recovery/src/index.rs deleted file mode 100644 index 61145ac5..00000000 --- a/kate/recovery/src/index.rs +++ /dev/null @@ -1,118 +0,0 @@ -use std::{convert::TryFrom, iter::once, ops::Range}; - -use serde::{Deserialize, Serialize}; - -use crate::config; - -/// Index is list of pairs (app_id, start_index), -/// where start index is index of first cell for that application. -#[derive(Serialize, Deserialize, Default, Debug, Clone)] -pub struct AppDataIndex { - /// Number of the data cells in the matrix - pub size: u32, - /// Data index per application - pub index: Vec<(u32, u32)>, -} - -#[derive(PartialEq, Eq, Debug)] -pub enum AppDataIndexError { - SizeOverflow, - UnsortedLayout, -} - -impl AppDataIndex { - /// Calculates cell ranges per application from extrinsic offsets. - /// Range is from start index to end index in matrix. - pub fn cells_ranges(&self) -> Vec<(u32, Range)> { - // Case if first app_id in index is zero is ignored - // since it should be asserted elsewhere - let prepend = self.index.first().map_or(vec![(0, 0)], |&(_, offset)| { - if offset == 0 { - vec![] - } else { - vec![(0, 0)] - } - }); - - let starts = prepend.iter().chain(self.index.iter()); - - let ends = self - .index - .iter() - .skip_while(|&&(_, offset)| offset == 0) - .map(|&(_, offset)| offset) - .chain(once(self.size)); - - starts - .zip(ends) - .map(|(&(app_id, start), end)| (app_id, (start..end))) - .collect::>() - } - - pub fn app_cells_range(&self, app_id: u32) -> Option> { - self.cells_ranges() - .into_iter() - .find(|&(id, _)| app_id == id) - .map(|(_, range)| range) - } - - fn app_cells_ranges(&self, app_id: u32) -> Vec> { - self.cells_ranges() - .into_iter() - .filter(|&(id, _)| app_id == id) - .map(|(_, range)| range) - .collect::>() - } - - /// Calculates data range per application from extrinsics layout. - /// Range is from start index to end index in matrix flattened as byte array. - pub fn data_ranges(&self) -> Vec<(u32, Range)> { - const CHUNK_SIZE_U32: u32 = config::CHUNK_SIZE as u32; - self.cells_ranges() - .into_iter() - .map(|(app_id, Range { start, end })| { - (app_id, (start * CHUNK_SIZE_U32..end * CHUNK_SIZE_U32)) - }) - .collect::>() - } - - pub fn app_data_ranges(&self, app_id: u32) -> Vec<(u32, Range)> { - const CHUNK_SIZE_U32: u32 = config::CHUNK_SIZE as u32; - self.app_cells_ranges(app_id) - .iter() - .map(|Range { start, end }| (app_id, (start * CHUNK_SIZE_U32..end * CHUNK_SIZE_U32))) - .collect::>() - } -} - -impl TryFrom<&[(T, u32)]> for AppDataIndex -where - T: Clone + Into, -{ - type Error = AppDataIndexError; - - fn try_from(layout: &[(T, u32)]) -> Result { - let mut index = Vec::new(); - // transactions are ordered by application id - // skip transactions with 0 application id - it's not a data txs - let mut size = 0u32; - let mut prev_app_id = 0u32; - - for (app_id, data_len) in layout { - let app_id: u32 = app_id.clone().into(); - if app_id != 0 && prev_app_id != app_id { - index.push((app_id, size)); - } - - size = size - .checked_add(*data_len) - .ok_or(Self::Error::SizeOverflow)?; - if prev_app_id > app_id { - return Err(Self::Error::UnsortedLayout); - } - prev_app_id = app_id; - } - - Ok(AppDataIndex { size, index }) - } -} diff --git a/kate/recovery/src/lib.rs b/kate/recovery/src/lib.rs index 48f158ea..fcc8c3a4 100644 --- a/kate/recovery/src/lib.rs +++ b/kate/recovery/src/lib.rs @@ -1,9 +1,13 @@ +#![cfg_attr(not(feature = "std"), no_std)] + pub mod com; pub mod commitments; pub mod config; pub mod data; -pub mod index; pub mod matrix; pub mod proof; +#[cfg(feature = "std")] +pub mod sparse_slice_read; + #[cfg(feature = "std")] pub mod testnet; diff --git a/kate/recovery/src/matrix.rs b/kate/recovery/src/matrix.rs index 5c81697a..a3ab97cb 100644 --- a/kate/recovery/src/matrix.rs +++ b/kate/recovery/src/matrix.rs @@ -1,22 +1,60 @@ -use std::ops::Range; - -use serde::{Deserialize, Serialize}; - use crate::config::{self, CHUNK_SIZE}; +use core::{ + convert::TryInto, + fmt::{Display, Formatter, Result}, + num::NonZeroU16, + ops::{Mul, Range}, +}; +use derive_more::Constructor; +use sp_std::prelude::*; + +#[cfg(feature = "std")] +use serde::{Deserialize, Serialize}; const EXTENSION_FACTOR_U32: u32 = config::EXTENSION_FACTOR as u32; /// Position of a cell in the the matrix. -#[derive(Default, Debug, Clone, Hash, Eq, PartialEq, Serialize, Deserialize)] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +#[derive(Default, Debug, Clone, Copy, Hash, Eq, PartialEq, Constructor)] pub struct Position { pub row: u32, pub col: u16, } +impl From<(R, C)> for Position +where + u32: From, + u16: From, +{ + fn from(row_col: (R, C)) -> Self { + Self { + row: row_col.0.into(), + col: row_col.1.into(), + } + } +} + +impl From for (R, C) +where + R: From, + C: From, +{ + fn from(p: Position) -> (R, C) { + (p.row.into(), p.col.into()) + } +} + +impl Display for Position { + fn fmt(&self, f: &mut Formatter<'_>) -> Result { + f.write_fmt(format_args!("{}:{}", self.col, self.row)) + } +} + impl Position { /// Refrence in format `block_number:column_number:row_number` + #[cfg(feature = "std")] pub fn reference(&self, block_number: u32) -> String { - format!("{}:{}:{}", block_number, self.col, self.row) + format!("{}:{}", block_number, self) } /// Checks if position is from extended row @@ -26,18 +64,20 @@ impl Position { } /// Matrix partition (column-wise) -#[derive(Serialize, Deserialize, Clone, Debug)] +#[derive(Clone, Copy, Debug)] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct Partition { pub number: u8, pub fraction: u8, } /// Matrix row index -#[derive(Debug, Hash, PartialEq, Eq, PartialOrd, Ord)] +#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)] pub struct RowIndex(pub u32); impl RowIndex { /// Refrence in format `block_number:row_number` + #[cfg(feature = "std")] pub fn reference(&self, block_number: u32) -> String { format!("{}:{}", block_number, self.0) } @@ -57,72 +97,145 @@ impl RowIndex { /// Extended columns (EC is erasure code): [1,EC,5,EC], [2,EC,6,EC], [3,EC,7,EC], [4,EC,8,EC] /// Matrix representation: [1,5,2,6,3,7,4,8] /// Extended matrix representation: [1,EC,5,EC,2,EC,6,EC,3,EC,7,EC,4,EC,8,EC] -#[derive(Debug, Clone)] +#[derive(Copy, Debug, Clone, PartialEq, Eq)] pub struct Dimensions { - rows: u16, - cols: u16, + rows: NonZeroU16, + cols: NonZeroU16, +} + +impl From<(R, C)> for Dimensions +where + R: Into, + C: Into, +{ + fn from(rows_cols: (R, C)) -> Self { + let (rows, cols) = rows_cols; + Self { + rows: rows.into(), + cols: cols.into(), + } + } +} + +impl From for (R, C) +where + R: From, + C: From, +{ + fn from(d: Dimensions) -> Self { + (d.rows.get().into(), d.cols.get().into()) + } } impl Dimensions { - /// Creates new matrix dimensions. - /// Data layout is assumed to be row-wise. - /// Returns `None` if rows or cols is 0. - pub const fn new(rows: u16, cols: u16) -> Option { - if rows == 0 || cols == 0 { - return None; + pub fn new, C: TryInto>(rows: R, cols: C) -> Option { + let rows = rows.try_into().ok()?; + let cols = cols.try_into().ok()?; + + Some(Self { rows, cols }) + } + + pub fn new_from, C: TryInto>(rows: R, cols: C) -> Option { + let rows: u16 = rows.try_into().ok()?; + let cols: u16 = cols.try_into().ok()?; + + Self::new(rows, cols) + } + + /// Creates a `Dimension` without checking whether parameters are non-zero. This results in + /// undefined behaviour if any parameter is zero. + /// + /// # Safety + /// Parameters `rows` and `cols` must not be zero. + pub const unsafe fn new_unchecked(rows: u16, cols: u16) -> Self { + Self { + rows: NonZeroU16::new_unchecked(rows), + cols: NonZeroU16::new_unchecked(cols), } - Some(Dimensions { rows, cols }) } /// Returns number of rows - pub fn rows(&self) -> u16 { + #[inline] + pub fn rows(&self) -> NonZeroU16 { self.rows } + /// Returns number of rows, which is always greater than zero. + /// + /// # SAFETY + /// As internal member is `NonZeroU16`, this always returns greater than zero. + #[inline] + pub fn height(&self) -> usize { + NonZeroU16::get(self.rows).into() + } + /// Returns number of columns - pub fn cols(&self) -> u16 { + #[inline] + pub fn cols(&self) -> NonZeroU16 { self.cols } + /// Returns number of cols, which is always greater than zero. + /// + /// # SAFETY + /// As internal member is `NonZeroU16`, this always returns greater than zero. + #[inline] + pub fn width(&self) -> usize { + NonZeroU16::get(self.cols).into() + } + /// Matrix size. - pub fn size(&self) -> u32 { - self.rows as u32 * self.cols as u32 + pub fn size + Mul>(&self) -> T { + T::from(self.rows.get()) * T::from(self.cols.get()) + } + + pub fn divides(&self, other: &Self) -> bool { + other.cols.get() % self.cols == 0u16 && other.rows.get() % self.rows == 0u16 + } + + /// Extends rows by `row_factor` and cols by `col_factor`. + pub fn extend(&self, row_factor: NonZeroU16, col_factor: NonZeroU16) -> Option { + let rows = self.rows.checked_mul(row_factor)?; + let cols = self.cols.checked_mul(col_factor)?; + + Some(Self { rows, cols }) } /// Extended matrix size. - pub fn extended_size(&self) -> u64 { - self.extended_rows() as u64 * self.cols as u64 + pub fn extended_size(&self) -> u32 { + self.extended_rows() * u32::from(self.cols.get()) } /// Row size in bytes pub fn row_byte_size(&self) -> usize { - CHUNK_SIZE * self.cols as usize + CHUNK_SIZE * self.width() } /// Extended matrix rows count. pub fn extended_rows(&self) -> u32 { - (self.rows as u32) * EXTENSION_FACTOR_U32 + u32::from(self.rows.get()) * EXTENSION_FACTOR_U32 } /// List of data row indexes in the extended matrix. - pub fn extended_data_rows(&self, cells: Range) -> Vec { - assert!(cells.end <= self.size()); - if cells.end == 0 { - return vec![]; + pub fn extended_data_rows(&self, cells: Range) -> Option> { + // Invalid range returns `None` + if cells.end > self.size() || cells.end == 0 { + return None; } let first_row = self.extended_data_row(cells.start); let last_row = self.extended_data_row(cells.end - 1); - (first_row..=last_row) + let data = (first_row..=last_row) .step_by(config::EXTENSION_FACTOR) - .collect::>() + .collect::>(); + Some(data) } /// Cell positions for given column in extended matrix. /// Empty if column index is not valid. pub fn col_positions(&self, col: u16) -> Vec { - if self.cols() <= col { + if self.cols().get() <= col { return vec![]; } (0..self.extended_rows()) @@ -136,7 +249,7 @@ impl Dimensions { if self.extended_rows() <= row { return vec![]; } - (0..self.cols()) + (0..self.cols().get()) .map(|col| Position { col, row }) .collect::>() } @@ -151,12 +264,12 @@ impl Dimensions { /// Column index of a cell in the matrix. fn col(&self, cell: u32) -> u16 { - (cell % self.cols as u32) as u16 + (cell % u32::from(self.cols.get())) as u16 } /// Extended matrix data row index of cell in the data matrix. fn extended_data_row(&self, cell: u32) -> u32 { - (cell / self.cols as u32) * EXTENSION_FACTOR_U32 + (cell / u32::from(self.cols.get())) * EXTENSION_FACTOR_U32 } /// Extended matrix data position of a cell in the data matrix. @@ -168,16 +281,17 @@ impl Dimensions { } /// Extended matrix data positions for given data matrix cells range. - pub fn extended_data_positions(&self, cells: Range) -> Vec { - assert!(cells.end <= self.size()); - cells - .map(|cell| self.extended_data_position(cell)) - .collect::>() + pub fn extended_data_positions(&self, cells: Range) -> Option> { + (cells.end <= self.size()).then(|| { + cells + .map(|cell| self.extended_data_position(cell)) + .collect::>() + }) } /// Checks if extended matrix contains given position. pub fn extended_contains(&self, position: &Position) -> bool { - position.row < self.extended_rows() && position.col < self.cols + position.row < self.extended_rows() && position.col < self.cols.get() } /// Creates iterator over rows in extended matrix. @@ -187,22 +301,22 @@ impl Dimensions { /// Creates iterator over data cells in data matrix (used to retrieve data from the matrix). pub fn iter_data(&self) -> impl Iterator { - let rows = self.rows as usize; - let cols = self.cols as usize; + let rows = self.height(); + let cols = self.width(); (0..rows).flat_map(move |row| (0..cols).map(move |col| (row, col))) } /// Creates iterator over cell indexes in data matrix (used to store data in the matrix). pub fn iter_cells(&self) -> impl Iterator { - let rows = self.rows as u32; - let cols = self.cols; - (0..cols).flat_map(move |col| (0..rows).map(move |row| row * cols as u32 + col as u32)) + let rows: u32 = self.rows.get().into(); + let cols: u32 = self.cols.get().into(); + (0..cols).flat_map(move |col| (0..rows).map(move |row| row * cols + col)) } /// Creates iterator over data positions by row in extended matrix. pub fn iter_extended_data_positions(&self) -> impl Iterator { - let rows = self.rows as u32; - let cols = self.cols; + let rows: u32 = self.rows.get().into(); + let cols = self.cols.get(); (0..rows).flat_map(move |row| (0..cols).map(move |col| (row * EXTENSION_FACTOR_U32, col))) } @@ -214,13 +328,20 @@ impl Dimensions { let size = (self.extended_size() as f64 / partition.fraction as f64).ceil() as u32; let start = size * (partition.number - 1) as u32; let end = size * (partition.number as u32); - let cols: u32 = self.cols().into(); + let cols: u32 = self.cols.get().into(); (start..end).map(move |cell| Position { row: cell / cols, col: (cell % cols) as u16, }) } + + pub fn transpose(self) -> Self { + Self { + rows: self.cols, + cols: self.rows, + } + } } #[cfg(test)] @@ -274,6 +395,6 @@ mod tests { .unwrap() .iter_extended_partition_positions(&Partition { number, fraction }) .zip(expected.iter().map(|&(row, col)| Position { row, col })) - .for_each(|(p1, p2)| assert!(p1 == p2)); + .for_each(|(p1, p2)| assert_eq!(p1, p2)); } } diff --git a/kate/recovery/src/proof.rs b/kate/recovery/src/proof.rs index c30c60fe..17acf619 100644 --- a/kate/recovery/src/proof.rs +++ b/kate/recovery/src/proof.rs @@ -5,30 +5,35 @@ use dusk_plonk::{ fft::EvaluationDomain, prelude::BlsScalar, }; -use thiserror::Error; +use thiserror_no_std::Error; use crate::{config::COMMITMENT_SIZE, data::Cell, matrix::Dimensions}; #[derive(Error, Debug)] pub enum Error { - #[error("Proof, data or commitment is not valid: {0}")] - InvalidData(String), - #[error("Evaluation domain is not valid for given dimensions: {0}")] - InvalidDomain(String), - #[error("Public parameters degree is to small for given dimensions: {0}")] - InvalidDegree(String), + #[error("Proof, data or commitment is not valid")] + InvalidData, + #[error("Evaluation domain is not valid for given dimensions")] + InvalidDomain, + #[error("Public parameters degree is to small for given dimensions")] + InvalidDegree, + #[error("Position isn't in domain")] + InvalidPositionInDomain, } +#[cfg(feature = "std")] +impl std::error::Error for Error {} + impl From for Error { - fn from(error: dusk_bytes::Error) -> Self { - Error::InvalidData(format!("{error:?}")) + fn from(_: dusk_bytes::Error) -> Self { + Error::InvalidData } } /// Verifies proof for given cell pub fn verify( public_parameters: &PublicParameters, - dimensions: &Dimensions, + dimensions: Dimensions, commitment: &[u8; COMMITMENT_SIZE], cell: &Cell, ) -> Result { @@ -44,14 +49,15 @@ pub fn verify( commitment_to_polynomial, }; - let point = EvaluationDomain::new(dimensions.cols().into()) - .map_err(|error| Error::InvalidDomain(format!("{error:?}")))? + let cols: usize = dimensions.width(); + let point = EvaluationDomain::new(cols) + .map_err(|_| Error::InvalidDomain)? .elements() .nth(cell.position.col.into()) - .ok_or_else(|| Error::InvalidDomain("Position isn't in domain".to_string()))?; + .ok_or(Error::InvalidPositionInDomain)?; public_parameters - .trim(dimensions.cols().into()) + .trim(cols) .map(|(_, verifier_key)| verifier_key.check(point, proof)) - .map_err(|error| Error::InvalidDegree(format!("{error:?}"))) + .map_err(|_| Error::InvalidDegree) } diff --git a/kate/recovery/src/sparse_slice_read.rs b/kate/recovery/src/sparse_slice_read.rs new file mode 100644 index 00000000..7a1bdb8a --- /dev/null +++ b/kate/recovery/src/sparse_slice_read.rs @@ -0,0 +1,51 @@ +use core::iter::FromIterator; +use std::{ + collections::VecDeque, + io::{Read, Result}, +}; + +/// It is a Codec Reader which allows decoding from non-sequential data. +pub struct SparseSliceRead<'a> { + parts: VecDeque<&'a [u8]>, +} + +impl<'a> FromIterator<&'a [u8]> for SparseSliceRead<'a> { + fn from_iter>(iter: I) -> Self { + let parts = VecDeque::from_iter(iter); + Self { parts } + } +} + +impl<'a> Read for SparseSliceRead<'a> { + fn read(&mut self, mut buf: &mut [u8]) -> Result { + let mut bytes = 0usize; + + loop { + let buf_len = buf.len(); + if buf_len == 0 || self.parts.is_empty() { + break; + } + + if let Some(next_part) = self.parts.pop_front() { + // Define max copied bytes and pending for next iteration. + let copied_len = std::cmp::min(next_part.len(), buf_len); + bytes += copied_len; + + // Copy data into `buf`. + let (source, pending_next_part) = next_part.split_at(copied_len); + let (dest, pending_buf) = buf.split_at_mut(copied_len); + dest.copy_from_slice(source); + + // Advance output buffer. + buf = pending_buf; + + // Reinsert if it is still pending + if !pending_next_part.is_empty() { + self.parts.push_front(pending_next_part); + } + } + } + + Ok(bytes) + } +} diff --git a/kate/recovery/src/testnet.rs b/kate/recovery/src/testnet.rs index 4739490c..0ead1dca 100644 --- a/kate/recovery/src/testnet.rs +++ b/kate/recovery/src/testnet.rs @@ -2,8 +2,6 @@ use std::{collections::HashMap, sync::Mutex}; use dusk_plonk::commitment_scheme::kzg10::PublicParameters; use once_cell::sync::Lazy; -use rand::SeedableRng; -use rand_chacha::ChaChaRng; static SRS_DATA: Lazy>> = Lazy::new(|| Mutex::new(HashMap::new())); @@ -13,6 +11,8 @@ pub fn public_params(max_degree: usize) -> PublicParameters { srs_data_locked .entry(max_degree) .or_insert_with(|| { + use rand_chacha::{rand_core::SeedableRng as _, ChaChaRng}; + let mut rng = ChaChaRng::seed_from_u64(42); PublicParameters::setup(max_degree, &mut rng).unwrap() }) diff --git a/kate/src/com.rs b/kate/src/com.rs index 8fdba31a..158300e5 100644 --- a/kate/src/com.rs +++ b/kate/src/com.rs @@ -1,12 +1,17 @@ -use core::num::{NonZeroU32, NonZeroUsize, TryFromIntError}; +use core::num::NonZeroU32; use std::{ convert::{TryFrom, TryInto}, mem::size_of, + num::TryFromIntError, time::Instant, }; +use avail_core::{ + data_lookup::Error as DataLookupError, ensure, AppExtrinsic, AppId, BlockLengthColumns, + BlockLengthRows, +}; use codec::Encode; -use da_types::{AppExtrinsic, AppId, BlockLengthColumns, BlockLengthRows}; +use derive_more::Constructor; use dusk_bytes::Serializable; use dusk_plonk::{ commitment_scheme::kzg10, @@ -14,51 +19,55 @@ use dusk_plonk::{ fft::{EvaluationDomain, Evaluations}, prelude::{BlsScalar, CommitKey}, }; -use kate_grid::{Dimensions, IntoRowMajor}; #[cfg(feature = "std")] -use kate_recovery::{com::app_specific_rows, index, matrix}; -use rand::{Rng, SeedableRng}; -use rand_chacha::ChaChaRng; +use kate_recovery::matrix::Dimensions; +use nalgebra::base::DMatrix; +use rand_chacha::{ + rand_core::{Error as ChaChaError, RngCore, SeedableRng}, + ChaChaRng, +}; use rayon::prelude::*; +#[cfg(feature = "std")] use serde::{Deserialize, Serialize}; use sp_arithmetic::traits::SaturatedConversion; use static_assertions::const_assert_eq; +use thiserror_no_std::Error; use crate::{ + com::kzg10::commitment::Commitment, config::{ - DATA_CHUNK_SIZE, EXTENSION, EXTENSION_FACTOR, MAXIMUM_BLOCK_SIZE, MINIMUM_BLOCK_SIZE, - PROOF_SIZE, PROVER_KEY_SIZE, SCALAR_SIZE, + COL_EXTENSION, DATA_CHUNK_SIZE, EXTENSION_FACTOR, MAXIMUM_BLOCK_SIZE, MINIMUM_BLOCK_SIZE, + PROOF_SIZE, ROW_EXTENSION, SCALAR_SIZE, }, metrics::Metrics, - padded_len_of_pad_iec_9797_1, BlockDimensions, Seed, LOG_TARGET, + padded_len_of_pad_iec_9797_1, BlockDimensions, Seed, TryFromBlockDimensionsError, LOG_TARGET, + U32_USIZE_ERR, }; #[cfg(feature = "std")] use kate_recovery::testnet; -#[derive(Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] +#[derive(Constructor, Clone, Copy, PartialEq, Eq, Debug)] pub struct Cell { pub row: BlockLengthRows, pub col: BlockLengthColumns, } -impl Cell { - pub fn new(row: BlockLengthRows, col: BlockLengthColumns) -> Self { - Cell { row, col } - } -} - -#[derive(Debug)] +#[derive(Error, Debug)] pub enum Error { - PlonkError(PlonkError), - DuskBytesError(dusk_bytes::Error), - MultiproofError(poly_multiproof::Error), + PlonkError(#[from] PlonkError), + DuskBytesError(#[from] dusk_bytes::Error), + MultiproofError(#[from] poly_multiproof::Error), CellLengthExceeded, BadHeaderHash, BlockTooBig, InvalidChunkLength, DimensionsMismatch, ZeroDimension, + InvalidDimensionExtension, DomainSizeInvalid, + InvalidDataLookup(#[from] DataLookupError), + Rng(#[from] ChaChaError), /// The base grid width, before extension, does not fit cleanly into a domain for FFTs BaseGridDomainSizeInvalid(usize), /// The extended grid width does not fit cleanly into a domain for FFTs @@ -71,15 +80,19 @@ impl From for Error { } } -impl From for Error { - fn from(error: PlonkError) -> Self { - Self::PlonkError(error) +impl From for Error { + fn from(_: TryFromBlockDimensionsError) -> Self { + Self::BlockTooBig } } -impl From for Error { - fn from(err: poly_multiproof::Error) -> Self { - Self::MultiproofError(err) +/// We cannot derive `PartialEq` becasue `PlonkError` does not support it in the current version. +/// and we only need to double check its discriminat for testing. +/// Only needed on tests by now. +#[cfg(test)] +impl PartialEq for Error { + fn eq(&self, other: &Self) -> bool { + std::mem::discriminant(self) == std::mem::discriminant(other) } } @@ -100,54 +113,10 @@ fn app_extrinsics_group_by_app_id(extrinsics: &[AppExtrinsic]) -> Vec<(AppId, Ve }) } -#[cfg(feature = "std")] -pub fn scalars_to_rows( - rows: &[u32], - dimensions: &matrix::Dimensions, - data: &[BlsScalar], -) -> Vec>> { - let extended_rows = BlockLengthRows(dimensions.extended_rows()); - let cols = BlockLengthColumns(dimensions.cols() as u32); - dimensions - .iter_extended_rows() - .map(|i| { - rows.contains(&i).then(|| { - row(data, i as usize, cols, extended_rows) - .iter() - .flat_map(BlsScalar::to_bytes) - .collect::>() - }) - }) - .collect::>>>() -} - -#[cfg(feature = "std")] -pub fn scalars_to_app_rows( - app_id: u32, - index: &index::AppDataIndex, - dimensions: &matrix::Dimensions, - data: &[BlsScalar], -) -> Vec>> { - let extended_rows = BlockLengthRows(dimensions.extended_rows()); - let cols = BlockLengthColumns(dimensions.cols() as u32); - let app_rows = app_specific_rows(index, dimensions, app_id); - dimensions - .iter_extended_rows() - .map(|i| { - app_rows.iter().find(|&&row| row == i).map(|_| { - row(data, i as usize, cols, extended_rows) - .iter() - .flat_map(BlsScalar::to_bytes) - .collect::>() - }) - }) - .collect::>>>() -} - pub fn flatten_and_pad_block( max_rows: BlockLengthRows, max_cols: BlockLengthColumns, - chunk_size: u32, + chunk_size: NonZeroU32, extrinsics: &[AppExtrinsic], rng_seed: Seed, ) -> Result<(XtsLayout, FlatData, BlockDimensions), Error> { @@ -155,19 +124,19 @@ pub fn flatten_and_pad_block( let mut extrinsics = extrinsics.to_vec(); extrinsics.sort_by(|a, b| a.app_id.cmp(&b.app_id)); - let extrinsics = app_extrinsics_group_by_app_id(&extrinsics) - .iter() - .map(|e| (e.0, e.1.encode())) - .collect::>(); - // Pad data before determining exact block size // Padding occurs both inside a single chunk and with additional chunk (if needed) - let (tx_layout, padded_chunks): (Vec<_>, Vec<_>) = extrinsics + let (tx_layout, padded_chunks): (Vec<_>, Vec<_>) = app_extrinsics_group_by_app_id(&extrinsics) .iter() - .map(|(app_id, data)| { - let chunks = pad_iec_9797_1(data.clone()); - ((*app_id, chunks.len() as u32), chunks) + .map(|e| { + let app_id = e.0; + let data = e.1.encode(); + let chunks = pad_iec_9797_1(data); + let chunks_len = u32::try_from(chunks.len()).map_err(|_| Error::BlockTooBig)?; + Ok(((app_id, chunks_len), chunks)) }) + .collect::, Error>>()? + .into_iter() .unzip(); let mut padded_block = padded_chunks @@ -185,25 +154,30 @@ pub fn flatten_and_pad_block( // Determine the block size after padding let block_dims = get_block_dimensions(padded_block_len, max_rows, max_cols, chunk_size)?; + let chunk_size = usize::try_from(NonZeroU32::get(block_dims.chunk_size)).expect(U32_USIZE_ERR); - if padded_block.len() > block_dims.size() { - return Err(Error::BlockTooBig); - } + let block_dims_size = block_dims.size().ok_or(Error::BlockTooBig)?; + ensure!(padded_block.len() <= block_dims_size, Error::BlockTooBig); let mut rng = ChaChaRng::from_seed(rng_seed); - assert!( - (block_dims.size().saturating_sub(padded_block.len())) - .checked_rem(block_dims.chunk_size as usize) - == Some(0) - ); - let nz_chunk_size: NonZeroUsize = usize::try_from(block_dims.chunk_size) - .map_err(|_| Error::CellLengthExceeded)? - .try_into() - .map_err(|_| Error::ZeroDimension)?; + // SAFETY: `padded_block.len() <= block_dims.size()` checked some lines above. + if cfg!(debug_assertions) { + let dims_sub_pad = block_dims_size + .checked_sub(padded_block.len()) + .expect("`padded_block.len() <= block_dims.size() .qed"); + let rem = dims_sub_pad + .checked_rem(chunk_size) + .expect("`chunk_size != 0 .qed"); + assert_eq!(rem, 0); + } - for _ in 0..(block_dims.size().saturating_sub(padded_block.len()) / nz_chunk_size) { - let rnd_values: DataChunk = rng.gen(); + #[allow(clippy::integer_arithmetic)] + // SAFETY: `chunk_size` comes from `NonZeroU32::get(...)` so we can safetly use `/`. + let last = block_dims_size.saturating_sub(padded_block.len()) / chunk_size; + for _ in 0..last { + let mut rnd_values = DataChunk::default(); + rng.try_fill_bytes(&mut rnd_values)?; padded_block.append(&mut pad_with_zeroes(rnd_values.to_vec(), chunk_size)); } @@ -214,14 +188,15 @@ pub fn get_block_dimensions( block_size: u32, max_rows: BlockLengthRows, max_cols: BlockLengthColumns, - chunk_size: u32, + chunk_size: NonZeroU32, ) -> Result { let max_block_dimensions = BlockDimensions::new(max_rows, max_cols, chunk_size); - if block_size as usize > max_block_dimensions.size() { - return Err(Error::BlockTooBig); - } + let max_block_dimensions_size = max_block_dimensions.size().ok_or(Error::BlockTooBig)?; + + let block_size = usize::try_from(block_size)?; + ensure!(block_size <= max_block_dimensions_size, Error::BlockTooBig); - if block_size as usize == max_block_dimensions.size() || MAXIMUM_BLOCK_SIZE { + if block_size == max_block_dimensions_size || MAXIMUM_BLOCK_SIZE { return Ok(max_block_dimensions); } @@ -232,7 +207,7 @@ pub fn get_block_dimensions( nearest_power_2_size = MINIMUM_BLOCK_SIZE; } - let total_cells = (nearest_power_2_size as f32 / chunk_size as f32).ceil() as u32; + let total_cells = (nearest_power_2_size as f32 / chunk_size.get() as f32).ceil() as u32; // we must minimize number of rows, to minimize header size // (performance wise it doesn't matter) @@ -240,7 +215,7 @@ pub fn get_block_dimensions( let (cols, rows) = if total_cells > max_cols.0 { (max_cols, BlockLengthRows(total_cells / nz_max_cols)) } else { - (total_cells.into(), 1.into()) + (BlockLengthColumns(total_cells), BlockLengthRows(1)) }; Ok(BlockDimensions { @@ -251,20 +226,21 @@ pub fn get_block_dimensions( } #[inline] -fn pad_with_zeroes(mut chunk: Vec, length: u32) -> Vec { - chunk.resize(length as usize, 0); +fn pad_with_zeroes(mut chunk: Vec, len: usize) -> Vec { + chunk.resize(len, 0); chunk } -fn pad_to_chunk(chunk: DataChunk, chunk_size: u32) -> Vec { +fn pad_to_chunk(chunk: DataChunk, chunk_size: NonZeroU32) -> Vec { const_assert_eq!(DATA_CHUNK_SIZE, size_of::()); + let chunk_size = usize::try_from(chunk_size.get()).expect(U32_USIZE_ERR); debug_assert!( - chunk_size as usize >= DATA_CHUNK_SIZE, + chunk_size >= DATA_CHUNK_SIZE, "`BlockLength.chunk_size` is valid by design .qed" ); let mut padded = chunk.to_vec(); - padded.resize(chunk_size as usize, 0); + padded.resize(chunk_size, 0); padded } @@ -283,9 +259,13 @@ fn pad_iec_9797_1(mut data: Vec) -> Vec { } fn extend_column_with_zeros(column: &[BlsScalar], height: usize) -> Vec { - let mut result = column.to_vec(); - result.resize(height, BlsScalar::zero()); - result + let mut extended = Vec::with_capacity(height); + let copied = core::cmp::min(height, column.len()); + + extended.extend_from_slice(&column[..copied]); + extended.resize(height, BlsScalar::zero()); + + extended } pub fn to_bls_scalar(chunk: &[u8]) -> Result { @@ -295,17 +275,8 @@ pub fn to_bls_scalar(chunk: &[u8]) -> Result { BlsScalar::from_bytes(&scalar_size_chunk).map_err(|_| Error::CellLengthExceeded) } -fn make_dims(bd: &BlockDimensions) -> Result { - Ok(Dimensions::new( - bd.cols - .as_usize() - .try_into() - .map_err(|_| Error::ZeroDimension)?, - bd.rows - .as_usize() - .try_into() - .map_err(|_| Error::ZeroDimension)?, - )) +fn make_dims(bd: BlockDimensions) -> Result { + Dimensions::new_from(bd.rows.0, bd.cols.0).ok_or(Error::ZeroDimension) } /// Build extended data matrix, by columns. @@ -316,52 +287,58 @@ fn make_dims(bd: &BlockDimensions) -> Result { /// instead of being in first k chunks of a column. /// /// `block` should be the raw data of a matrix, stored in row-major orientation. -#[cfg(feature = "std")] +#[cfg(feature = "parallel")] pub fn par_extend_data_matrix( block_dims: BlockDimensions, block: &[u8], metrics: &M, -) -> Result, Error> { +) -> Result, Error> { let start = Instant::now(); - let dims = make_dims(&block_dims)?; - let extended_dims = dims.extend(EXTENSION); + let dims = make_dims(block_dims)?; + let (ext_rows, _): (usize, usize) = dims + .extend(ROW_EXTENSION, COL_EXTENSION) + .ok_or(Error::InvalidDimensionExtension)? + .into(); + let (rows, cols) = dims.into(); // simple length with mod check would work... - let chunks = block.par_chunks_exact(block_dims.chunk_size as usize); - if !chunks.remainder().is_empty() { - return Err(Error::DimensionsMismatch); - } + let chunk_size = + usize::try_from(block_dims.chunk_size.get()).map_err(|_| Error::BlockTooBig)?; + + let chunks = block.par_chunks_exact(chunk_size); + ensure!(chunks.remainder().is_empty(), Error::DimensionsMismatch); let scalars = chunks .into_par_iter() .map(to_bls_scalar) .collect::, Error>>()?; + let extended_column_eval_domain = EvaluationDomain::new(ext_rows)?; + let column_eval_domain = EvaluationDomain::new(rows)?; // rows_num = column_length + // The data is currently row-major, so we need to put it into column-major - let rm = scalars - .into_row_major(dims.width(), dims.height()) - .ok_or(Error::DimensionsMismatch)?; - let col_wise_scalars = rm.iter_column_wise().map(Clone::clone).collect::>(); - - let mut chunk_elements = col_wise_scalars - .chunks_exact(dims.height_nz().get()) - .flat_map(|column| extend_column_with_zeros(column, extended_dims.height())) - .collect::>(); - - let extended_column_eval_domain = EvaluationDomain::new(extended_dims.height())?; - let column_eval_domain = EvaluationDomain::new(dims.height())?; // rows_num = column_length - - chunk_elements - .par_chunks_exact_mut(extended_dims.height()) - .for_each(|col| { + let col_wise_scalars = DMatrix::from_row_iterator(rows, cols, scalars.into_iter()); + + let ext_columns_wise = (0..cols) + .into_par_iter() + .flat_map(|col| { + let col_view = col_wise_scalars.column(col).data.into_slice(); + debug_assert_eq!(col_view.len(), rows); + let mut ext_col = extend_column_with_zeros(col_view, ext_rows); // (i)fft functions input parameter slice size has to be a power of 2, otherwise it panics - column_eval_domain.ifft_slice(&mut col[0..dims.height()]); - extended_column_eval_domain.fft_slice(col); - }); + column_eval_domain.ifft_slice(&mut ext_col[0..rows]); + extended_column_eval_domain.fft_slice(ext_col.as_mut_slice()); + debug_assert_eq!(ext_col.len(), ext_rows); + ext_col + }) + .collect::>(); + debug_assert_eq!(Some(ext_columns_wise.len()), cols.checked_mul(ext_rows)); + + let ext_matrix = DMatrix::from_iterator(ext_rows, cols, ext_columns_wise.into_iter()); metrics.extended_block_time(start.elapsed()); - Ok(chunk_elements) + Ok(ext_matrix) } //TODO cache extended data matrix @@ -369,19 +346,23 @@ pub fn par_extend_data_matrix( pub fn build_proof( public_params: &kzg10::PublicParameters, block_dims: BlockDimensions, - ext_data_matrix: &[BlsScalar], + ext_data_matrix: &DMatrix, cells: &[Cell], metrics: &M, ) -> Result, Error> { - let dims = make_dims(&block_dims)?; - let extended_dims = dims.extend(EXTENSION); + let dims = make_dims(block_dims)?; + let (ext_rows, ext_cols): (usize, usize) = dims + .extend(ROW_EXTENSION, COL_EXTENSION) + .ok_or(Error::InvalidDimensionExtension)? + .into(); + let (_, cols): (usize, usize) = dims.into(); const SPROOF_SIZE: usize = PROOF_SIZE + SCALAR_SIZE; - let (prover_key, _) = public_params.trim(dims.width()).map_err(Error::from)?; + let (prover_key, _) = public_params.trim(cols).map_err(Error::from)?; // Generate all the x-axis points of the domain on which all the row polynomials reside - let row_eval_domain = EvaluationDomain::new(dims.width()).map_err(Error::from)?; + let row_eval_domain = EvaluationDomain::new(cols)?; let row_dom_x_pts = row_eval_domain.elements().collect::>(); let mut result_bytes: Vec = vec![0u8; SPROOF_SIZE.saturating_mul(cells.len())]; @@ -393,48 +374,50 @@ pub fn build_proof( let total_start = Instant::now(); // attempt to parallelly compute proof for all requested cells - cells - .into_par_iter() - .zip(result_bytes.par_chunks_exact_mut(SPROOF_SIZE)) - .for_each(|(cell, res)| { - let r_index = cell.row.as_usize(); - if r_index >= extended_dims.height() || cell.col >= block_dims.cols { - res.fill(0); // for bad cell identifier, fill whole proof with zero bytes ! - } else { - let c_index = cell.col.as_usize(); - - // construct polynomial per extended matrix row - let row = (0..extended_dims.width()) + let cell_iter = cells.iter().zip(result_bytes.chunks_exact_mut(SPROOF_SIZE)); + + for (cell, res) in cell_iter { + let r_index = usize::try_from(cell.row.0)?; + if r_index >= ext_rows || cell.col >= block_dims.cols { + res.fill(0); // for bad cell identifier, fill whole proof with zero bytes ! + } else { + let c_index = usize::try_from(cell.col.0)?; + let get_ext_data_matrix = + |j: usize| ext_data_matrix[r_index.saturating_add(j.saturating_mul(ext_rows))]; + + // construct polynomial per extended matrix row + #[cfg(feature = "parallel")] + let row = { + let mut row = + Vec::with_capacity(ext_cols.checked_add(1).ok_or(Error::BlockTooBig)?); + (0..ext_cols) .into_par_iter() - .map(|j| { - ext_data_matrix - [r_index.saturating_add(j.saturating_mul(extended_dims.height()))] - }) - .collect::>(); - //let row = ext_data_matrix_cm - // .iter_row(r_index) - // .expect("Already checked row index") - // .map(Clone::clone) - // .collect::>(); - - // row has to be a power of 2, otherwise interpolate() function panics - // TODO: cache evaluations - let poly = Evaluations::from_vec_and_domain(row, row_eval_domain).interpolate(); - let witness = prover_key.compute_single_witness(&poly, &row_dom_x_pts[c_index]); - match prover_key.commit(&witness) { - Ok(commitment_to_witness) => { - let evaluated_point = ext_data_matrix[r_index - .saturating_add(c_index.saturating_mul(extended_dims.height()))]; - - res[0..PROOF_SIZE].copy_from_slice(&commitment_to_witness.to_bytes()); - res[PROOF_SIZE..].copy_from_slice(&evaluated_point.to_bytes()); - }, - Err(_) => { - res.fill(0); // for bad cell identifier, fill whole proof with zero bytes ! - }, - }; - } - }); + .map(get_ext_data_matrix) + .collect_into_vec(&mut row); + row + }; + #[cfg(not(feature = "parallel"))] + let row = (0..ext_cols) + .map(get_ext_data_matrix) + .collect::>(); + + // row has to be a power of 2, otherwise interpolate() function panics TODO: cache evaluations + let poly = Evaluations::from_vec_and_domain(row, row_eval_domain).interpolate(); + let witness = prover_key.compute_single_witness(&poly, &row_dom_x_pts[c_index]); + match prover_key.commit(&witness) { + Ok(commitment_to_witness) => { + let evaluated_point = + ext_data_matrix[r_index.saturating_add(c_index.saturating_mul(ext_rows))]; + + res[0..PROOF_SIZE].copy_from_slice(&commitment_to_witness.to_bytes()); + res[PROOF_SIZE..].copy_from_slice(&evaluated_point.to_bytes()); + }, + Err(_) => { + res.fill(0); // for bad cell identifier, fill whole proof with zero bytes ! + }, + }; + } + } metrics.proof_build_time(total_start.elapsed(), cells.len().saturated_into()); @@ -445,33 +428,34 @@ pub fn build_proof( pub fn par_build_commitments( rows: BlockLengthRows, cols: BlockLengthColumns, - chunk_size: u32, + chunk_size: NonZeroU32, extrinsics_by_key: &[AppExtrinsic], rng_seed: Seed, metrics: &M, -) -> Result<(XtsLayout, Vec, BlockDimensions, Vec), Error> { +) -> Result<(XtsLayout, Vec, BlockDimensions, DMatrix), Error> { let start = Instant::now(); // generate data matrix first let (tx_layout, block, block_dims) = flatten_and_pad_block(rows, cols, chunk_size, extrinsics_by_key, rng_seed)?; - metrics.block_dims_and_size(&block_dims, block.len().saturated_into()); + metrics.block_dims_and_size(block_dims, block.len().saturated_into()); + + let ext_matrix = par_extend_data_matrix(block_dims, &block, metrics)?; - let ext_data_matrix = par_extend_data_matrix(block_dims, &block, metrics)?; - let extended_rows_num = block_dims - .rows - .0 - .checked_mul(EXTENSION_FACTOR) + let block_dims_cols = usize::try_from(block_dims.cols.0)?; + let block_dims_rows = usize::try_from(block_dims.rows.0)?; + let extended_rows = block_dims_rows + .checked_mul(EXTENSION_FACTOR as usize) .ok_or(Error::BlockTooBig)?; metrics.preparation_block_time(start.elapsed()); - let public_params = testnet::public_params(block_dims.cols.as_usize()); + let public_params = testnet::public_params(block_dims_cols); if log::log_enabled!(target: LOG_TARGET, log::Level::Debug) { let raw_pp = public_params.to_raw_var_bytes(); - let hash_pp = hex::encode(sp_core_hashing::blake2_128(&raw_pp)); + let hash_pp = hex::encode(sp_core::hashing::blake2_128(&raw_pp)); let hex_pp = hex::encode(raw_pp); log::debug!( target: LOG_TARGET, @@ -481,86 +465,58 @@ pub fn par_build_commitments( ); } - let (prover_key, _) = public_params - .trim(block_dims.cols.as_usize()) - .map_err(Error::from)?; - let row_eval_domain = EvaluationDomain::new(block_dims.cols.as_usize()).map_err(Error::from)?; - - let mut result_bytes: Vec = Vec::new(); - let result_bytes_len = extended_rows_num - .checked_mul(PROVER_KEY_SIZE) - .ok_or(Error::BlockTooBig)? as usize; - result_bytes.reserve_exact(result_bytes_len); - unsafe { - result_bytes.set_len(result_bytes_len); - } + let (prover_key, _) = public_params.trim(block_dims_cols)?; + let row_eval_domain = EvaluationDomain::new(block_dims_cols)?; let start = Instant::now(); - - (0..extended_rows_num) + let mut commitments = + Vec::with_capacity(extended_rows.checked_add(1).ok_or(Error::BlockTooBig)?); + (0..extended_rows) .into_par_iter() - .map(|i| { - row( - &ext_data_matrix, - i as usize, - block_dims.cols, - BlockLengthRows(extended_rows_num), - ) + .map(|row_idx| { + let ext_row = get_row(&ext_matrix, row_idx); + commit(&prover_key, row_eval_domain, ext_row) }) - .zip(result_bytes.par_chunks_exact_mut(PROVER_KEY_SIZE as usize)) - .map(|(row, res)| commit(&prover_key, row_eval_domain, row, res)) - .collect::>()?; + .collect_into_vec(&mut commitments); + + let commitments = commitments.into_iter().collect::, _>>()?; + let commitments_bytes = commitments + .into_par_iter() + .flat_map(|c| c.to_bytes()) + .collect(); metrics.commitment_build_time(start.elapsed()); - Ok((tx_layout, result_bytes, block_dims, ext_data_matrix)) + Ok((tx_layout, commitments_bytes, block_dims, ext_matrix)) } #[cfg(feature = "std")] -fn row( - matrix: &[BlsScalar], - i: usize, - cols: BlockLengthColumns, - extended_rows: BlockLengthRows, -) -> Vec { - let mut row = Vec::with_capacity(cols.as_usize()); - (0..cols.as_usize().saturating_mul(extended_rows.as_usize())) - .step_by(extended_rows.as_usize()) - .for_each(|idx| row.push(matrix[i.saturating_add(idx)])); - - row +fn get_row(m: &DMatrix, row_idx: usize) -> Vec { + m.row(row_idx).iter().cloned().collect() } #[cfg(feature = "std")] -// Generate a commitment and store it into result +// Generate a commitment fn commit( prover_key: &CommitKey, domain: EvaluationDomain, row: Vec, - result: &mut [u8], -) -> Result<(), Error> { +) -> Result { let poly = Evaluations::from_vec_and_domain(row, domain).interpolate(); - let commitment = prover_key.commit(&poly).map_err(Error::from)?; - result.copy_from_slice(&commitment.to_bytes()); - Ok(()) + prover_key.commit(&poly).map_err(Error::from) } #[cfg(test)] mod tests { - use std::{convert::TryInto, iter::repeat, str::from_utf8}; - - use da_types::AppExtrinsic; + use avail_core::DataLookup; use dusk_bytes::Serializable; use dusk_plonk::bls12_381::BlsScalar; use hex_literal::hex; use kate_recovery::{ - com::{ - app_specific_cells, decode_app_extrinsics, reconstruct_app_extrinsics, - reconstruct_extrinsics, unflatten_padded_data, ReconstructionError, - }, + com::*, commitments, config, + config::CHUNK_SIZE, data::{self, DataCell}, - index::{AppDataIndex, AppDataIndexError}, matrix::{Dimensions, Position}, proof, }; @@ -569,6 +525,9 @@ mod tests { prelude::*, }; use rand::{prelude::IteratorRandom, Rng, SeedableRng}; + use sp_arithmetic::Percent; + use static_assertions::const_assert; + use std::{convert::TryInto, iter::repeat}; use test_case::test_case; use super::*; @@ -579,91 +538,89 @@ mod tests { padded_len, }; - fn app_data_index_try_from_layout( - layout: Vec<(AppId, u32)>, - ) -> Result { - let mut index = Vec::new(); - // transactions are ordered by application id - // skip transactions with 0 application id - it's not a data txs - let mut size = 0u32; - let mut prev_app_id = AppId(0u32); - - for (app_id, data_len) in layout { - if app_id.0 != 0 && prev_app_id != app_id { - index.push((app_id.0, size)); - } - - size = size - .checked_add(data_len) - .ok_or(AppDataIndexError::SizeOverflow)?; - if prev_app_id > app_id { - return Err(AppDataIndexError::UnsortedLayout); - } - prev_app_id = app_id; - } - - Ok(AppDataIndex { size, index }) + const TCHUNK: NonZeroU32 = unsafe { NonZeroU32::new_unchecked(32) }; + + fn scalars_to_app_rows( + id: AppId, + lookup: &DataLookup, + dimensions: Dimensions, + matrix: &DMatrix, + ) -> Vec>> { + let app_rows = app_specific_rows(lookup, dimensions, id); + dimensions + .iter_extended_rows() + .map(|i| { + app_rows.iter().find(|&&row| row == i).map(|_| { + let row = get_row(&matrix, i as usize); + row.iter() + .flat_map(BlsScalar::to_bytes) + .collect::>() + }) + }) + .collect() } - #[test_case(0, 256, 256 => BlockDimensions::new(1, 4, 32) ; "block size zero")] - #[test_case(11, 256, 256 => BlockDimensions::new(1, 4, 32) ; "below minimum block size")] - #[test_case(300, 256, 256 => BlockDimensions::new(1, 16, 32) ; "regular case")] - #[test_case(513, 256, 256 => BlockDimensions::new(1, 32, 32) ; "minimum overhead after 512")] - #[test_case(8192, 256, 256 => BlockDimensions::new(1, 256, 32) ; "maximum cols")] - #[test_case(8224, 256, 256 => BlockDimensions::new(2, 256, 32) ; "two rows")] - #[test_case(2097152, 256, 256 => BlockDimensions::new(256, 256, 32) ; "max block size")] + #[test_case(0, 256, 256 => (1, 4, 32) ; "block size zero")] + #[test_case(11, 256, 256 => (1, 4, 32) ; "below minimum block size")] + #[test_case(300, 256, 256 => (1, 16, 32) ; "regular case")] + #[test_case(513, 256, 256 => (1, 32, 32) ; "minimum overhead after 512")] + #[test_case(8192, 256, 256 => (1, 256, 32) ; "maximum cols")] + #[test_case(8224, 256, 256 => (2, 256, 32) ; "two rows")] + #[test_case(2097152, 256, 256 => (256, 256, 32) ; "max block size")] #[test_case(2097155, 256, 256 => panics "BlockTooBig" ; "too much data")] // newapi done - fn test_get_block_dimensions(size: u32, rows: R, cols: C) -> BlockDimensions - where - R: Into, - C: Into, - { - get_block_dimensions(size, rows.into(), cols.into(), 32).unwrap() + fn test_get_block_dimensions(size: u32, rows: u32, cols: u32) -> (u32, u32, u32) { + let dims = get_block_dimensions( + size, + BlockLengthRows(rows), + BlockLengthColumns(cols), + TCHUNK, + ) + .unwrap(); + + (dims.rows.0, dims.cols.0, dims.chunk_size.get()) } // newapi done #[test] fn test_extend_data_matrix() { - let expected_result = vec![ - b"000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e00", - b"bc1c6b8b4b02ca677b825ec9dace9aa706813f3ec47abdf9f03c680f4468555e", - b"7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a00", - b"c16115f73784be22106830c9bc6bbb469bf5026ee80325e403efe5ccc3f55016", - b"1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d00", - b"db3b8aaa6a21e9869aa17de8f9edb9c625a05e5de399dc18105c872e6387745e", - b"9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b900", - b"e080341657a3dd412f874fe8db8ada65ba14228d07234403230e05ece2147016", - b"3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c00", - b"fa5aa9c9894008a6b9c09c07190dd9e544bf7d7c02b9fb372f7ba64d82a6935e", - b"babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d800", - b"ff9f533576c2fc604ea66e07fba9f984d93341ac26426322422d240b02348f16", - b"5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b00", - b"197ac8e8a85f27c5d8dfbb26382cf80464de9c9b21d81a574e9ac56ca1c5b25e", - b"d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f700", - b"1ebf725495e11b806dc58d261ac918a4f85260cb45618241614c432a2153ae16", + let expected = [ + // Col 0 + hex!("000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e00"), + hex!("bc1c6b8b4b02ca677b825ec9dace9aa706813f3ec47abdf9f03c680f4468555e"), + hex!("7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a00"), + hex!("c16115f73784be22106830c9bc6bbb469bf5026ee80325e403efe5ccc3f55016"), + // Col 1 + hex!("1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d00"), + hex!("db3b8aaa6a21e9869aa17de8f9edb9c625a05e5de399dc18105c872e6387745e"), + hex!("9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b900"), + hex!("e080341657a3dd412f874fe8db8ada65ba14228d07234403230e05ece2147016"), + // Col 2 + hex!("3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c00"), + hex!("fa5aa9c9894008a6b9c09c07190dd9e544bf7d7c02b9fb372f7ba64d82a6935e"), + hex!("babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d800"), + hex!("ff9f533576c2fc604ea66e07fba9f984d93341ac26426322422d240b02348f16"), + // Col 3 + hex!("5d5e5f606162636465666768696a6b6c6d6e6f707172737475767778797a7b00"), + hex!("197ac8e8a85f27c5d8dfbb26382cf80464de9c9b21d81a574e9ac56ca1c5b25e"), + hex!("d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f700"), + hex!("1ebf725495e11b806dc58d261ac918a4f85260cb45618241614c432a2153ae16"), ] - .into_iter() - .map(|e| { - e.chunks_exact(2) - .map(|h| u8::from_str_radix(from_utf8(h).unwrap(), 16).unwrap()) - .collect::>() - }) - .map(|e| { - BlsScalar::from_bytes(e.as_slice().try_into().expect("wrong number of elems")).unwrap() - }) - .collect::>(); + .iter() + .map(BlsScalar::from_bytes) + .collect::, _>>() + .expect("Invalid Expected result"); + let expected = DMatrix::from_iterator(4, 4, expected.into_iter()); - let block_dims = BlockDimensions::new(BlockLengthRows(2), BlockLengthColumns(4), 32); + let block_dims = BlockDimensions::new(BlockLengthRows(2), BlockLengthColumns(4), TCHUNK); + let chunk_size = usize::try_from(block_dims.chunk_size.get()).unwrap(); let block = (0..=247) .collect::>() .chunks_exact(DATA_CHUNK_SIZE) - .flat_map(|chunk| pad_with_zeroes(chunk.to_vec(), block_dims.chunk_size)) + .flat_map(|chunk| pad_with_zeroes(chunk.to_vec(), chunk_size)) .collect::>(); - let res = par_extend_data_matrix(block_dims, &block, &IgnoreMetrics {}); - eprintln!("result={:?}", res); - eprintln!("expect={:?}", expected_result); - assert_eq!(res.unwrap(), expected_result); + let ext_matrix = par_extend_data_matrix(block_dims, &block, &IgnoreMetrics {}).unwrap(); + assert_eq!(ext_matrix, expected); } #[test_case( 1..=29 => "0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d8000" ; "chunk more than 3 values shorter")] @@ -685,97 +642,91 @@ mod tests { // newapi done #[test] fn test_flatten_block() { - let chunk_size = 32; let extrinsics: Vec = vec![ - AppExtrinsic { - app_id: 0.into(), - data: (1..=29).collect(), - }, - AppExtrinsic { - app_id: 1.into(), - data: (1..=30).collect(), - }, - AppExtrinsic { - app_id: 2.into(), - data: (1..=31).collect(), - }, - AppExtrinsic { - app_id: 3.into(), - data: (1..=60).collect(), - }, + AppExtrinsic::new(AppId(0), (1..=29).collect()), + AppExtrinsic::new(AppId(1), (1..=30).collect()), + AppExtrinsic::new(AppId(2), (1..=31).collect()), + AppExtrinsic::new(AppId(3), (1..=60).collect()), ]; - let expected_dims = BlockDimensions::new(1, 16, chunk_size); + let expected_dims = + BlockDimensions::new(BlockLengthRows(1), BlockLengthColumns(16), TCHUNK); let (layout, data, dims) = flatten_and_pad_block( - 128.into(), - 256.into(), - chunk_size, + BlockLengthRows(128), + BlockLengthColumns(256), + TCHUNK, extrinsics.as_slice(), Seed::default(), ) .unwrap(); - let expected_layout = vec![(0.into(), 2), (1.into(), 2), (2.into(), 2), (3.into(), 3)]; + let expected_layout = vec![(AppId(0), 2), (AppId(1), 2), (AppId(2), 2), (AppId(3), 3)]; assert_eq!(layout, expected_layout, "The layouts don't match"); - let expected_data = hex!("04740102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d00800000000000000000000000000000000000000000000000000000000000000004780102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e80000000000000000000000000000000000000000000000000000000000000047c0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e1f80000000000000000000000000000000000000000000000000000000000004f00102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c00800000000000000000000000000000000000000000000000000000000000000076a04053bda0a88bda5177b86a15c3b29f559873cb481232299cd5743151ac004b2d63ae198e7bb0a9011f28e473c95f4013d7d53ec5fbc3b42df8ed101f6d00e831e52bfb76e51cca8b4e9016838657edfae09cb9a71eb219025c4c87a67c004aaa86f20ac0aa792bc121ee42e2c326127061eda15599cb5db3db870bea5a00ecf353161c3cb528b0c5d98050c4570bfc942d8b19ed7b0cbba5725e03e5f000b7e30db36b6df82ac151f668f5f80a5e2a9cac7c64991dd6a6ce21c060175800edb9260d2a86c836efc05f17e5c59525e404c6a93d051651fe2e4eefae281300"); + let expected_data = hex!("04740102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d00800000000000000000000000000000000000000000000000000000000000000004780102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e80000000000000000000000000000000000000000000000000000000000000047c0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e1f80000000000000000000000000000000000000000000000000000000000004f00102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c00800000000000000000000000000000000000000000000000000000000000000076b8e0ada0f13d90405d6ae55386bd28bdd219b8a08ded1aa836efcc8b770d00da41597c5157488d7724e03fb8d84a376a43b8f41518a11cc387b669b2ee65009f07e7be5551387a98ba977c732d080dcb0f29a048e3656912c6533e32ee7a0029b721769ce64e43d57133b074d839d531ed1f28510afb45ace10a1f4b794d002d09a0e663266ce1ae7ed1081968a0758e718e997bd362c6b0c34634a9a0b300012737681f7b5d0f281e3afde458bc1e73d2d313c9cf94c05ff3716240a248001320a058d7b3566bd520daaa3ed2bf0ac5b8b120fb852773c3639734b45c9100"); assert_eq!(dims, expected_dims, "Dimensions don't match the expected"); assert_eq!(data, expected_data, "Data doesn't match the expected data"); - let index = app_data_index_try_from_layout(layout).unwrap(); - let res = unflatten_padded_data(index.data_ranges(), data).unwrap(); + let lookup = DataLookup::from_id_and_len_iter(layout.into_iter()).unwrap(); + + const_assert!((CHUNK_SIZE as u64) <= (u32::MAX as u64)); + let data_lookup = lookup.projected_ranges(CHUNK_SIZE as u32).unwrap(); + let res = unflatten_padded_data(data_lookup, data).unwrap(); assert_eq!( res.len(), extrinsics.len(), "Number of extrinsics is not as expected." ); - for (res, exp) in res.iter().zip(extrinsics.iter()) { - assert_eq!(res.0, *exp.app_id); - assert_eq!(res.1[0], exp.data); + for ((id, data), exp) in res.iter().zip(extrinsics.iter()) { + assert_eq!(id.0, *exp.app_id); + assert_eq!(data[0], exp.data); } } fn sample_cells_from_matrix( - matrix: &[BlsScalar], - dimensions: &BlockDimensions, + matrix: &DMatrix, columns: Option<&[u16]>, ) -> Vec { - fn random_indexes(length: usize, seed: Seed) -> Vec { + fn random_indexes(length: usize, seed: Seed) -> Vec { // choose random len/2 (unique) indexes let mut idx = (0..length).collect::>(); - let mut chosen_idx = Vec::::new(); + let mut chosen_idx = Vec::::new(); let mut rng = ChaChaRng::from_seed(seed); for _ in 0..length / 2 { let i = rng.gen_range(0..idx.len()); let v = idx.remove(i); - chosen_idx.push(v as u16); + chosen_idx.push(v); } chosen_idx } - const RNG_SEED: Seed = [42u8; 32]; - matrix - .chunks_exact(dimensions.rows.as_usize().saturating_mul(2)) - .enumerate() - .map(|(col, e)| (col as u16, e)) - .flat_map(|(col, e)| { - random_indexes(e.len(), RNG_SEED) - .into_iter() - .map(|row| DataCell { - position: Position { - row: row as u32, - col, - }, - data: e[row as usize].to_bytes(), - }) - .filter(|cell| { - columns.is_none() || columns.unwrap_or(&[]).contains(&cell.position.col) + + let (rows, cols) = matrix.shape(); + let cols = u16::try_from(cols).unwrap(); + let indexes = random_indexes(rows, RNG_SEED); + + (0u16..cols) + .filter(|col_idx| match &columns { + None => true, + Some(allowed) => allowed.contains(&col_idx), + }) + .flat_map(|col_idx| { + let col_view = matrix.column(col_idx.into()).data.into_slice(); + + indexes + .iter() + .map(|row_idx| { + let row_pos = u32::try_from(*row_idx).unwrap(); + let position = Position::new(row_pos, col_idx); + debug_assert!(*row_idx < col_view.len()); + let data = col_view[*row_idx].to_bytes(); + DataCell::new(position, data) }) .collect::>() }) - .collect::>() + .collect() } fn app_extrinsic_strategy() -> impl Strategy { @@ -784,7 +735,7 @@ mod tests { any_with::>(size_range(1..2048).lift()), ) .prop_map(|(app_id, data)| AppExtrinsic { - app_id: app_id.into(), + app_id: AppId(app_id), data, }) } @@ -800,14 +751,15 @@ mod tests { fn random_cells( max_cols: BlockLengthColumns, max_rows: BlockLengthRows, - percents: usize, + percents: Percent, ) -> Vec { - assert!(percents > 0 && percents <= 100); let max_cols = max_cols.into(); let max_rows = max_rows.into(); let rng = &mut ChaChaRng::from_seed([0u8; 32]); - let amount = ((max_cols * max_rows) as f32 * (percents as f32 / 100.0)).ceil() as usize; + let amount: usize = percents + .mul_ceil::(max_cols * max_rows) + .saturated_into(); (0..max_cols) .flat_map(move |col| { @@ -818,26 +770,27 @@ mod tests { } proptest! { - #![proptest_config(ProptestConfig::with_cases(20))] + #![proptest_config(ProptestConfig::with_cases(10))] #[test] // newapi done fn test_build_and_reconstruct(ref xts in app_extrinsics_strategy()) { let metrics = IgnoreMetrics {}; let (layout, commitments, dims, matrix) = par_build_commitments( - BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default(), &metrics).unwrap(); + BlockLengthRows(64), BlockLengthColumns(16), TCHUNK, xts, Seed::default(), &metrics).unwrap(); - let columns = sample_cells_from_matrix(&matrix, &dims, None); + let columns = sample_cells_from_matrix(&matrix, None); let extended_dims = dims.try_into().unwrap(); - let index = app_data_index_try_from_layout(layout).unwrap(); - let reconstructed = reconstruct_extrinsics(&index, &extended_dims, columns).unwrap(); - for (result, xt) in reconstructed.iter().zip(xts) { - prop_assert_eq!(result.0, *xt.app_id); - prop_assert_eq!(result.1[0].as_slice(), &xt.data); + let index = DataLookup::from_id_and_len_iter(layout.into_iter()).unwrap(); + let reconstructed = reconstruct_extrinsics(&index, extended_dims, columns).unwrap(); + for ((app_id, data), xt) in reconstructed.iter().zip(xts) { + prop_assert_eq!(app_id.0, *xt.app_id); + prop_assert_eq!(data[0].as_slice(), &xt.data); } - let public_params = testnet::public_params(dims.cols.as_usize()); - for cell in random_cells(dims.cols, dims.rows, 1) { - let row = cell.row.as_usize(); + let dims_cols = usize::try_from(dims.cols.0).unwrap(); + let public_params = testnet::public_params(dims_cols); + for cell in random_cells(dims.cols, dims.rows, Percent::one() ) { + let row = usize::try_from(cell.row.0).unwrap(); let proof = build_proof(&public_params, dims, &matrix, &[cell], &metrics).unwrap(); prop_assert!(proof.len() == 80); @@ -848,7 +801,7 @@ mod tests { let extended_dims = dims.try_into().unwrap(); let commitment = commitments::from_slice(&commitments).unwrap()[row]; - let verification = proof::verify(&public_params, &extended_dims, &commitment, &cell); + let verification = proof::verify(&public_params, extended_dims, &commitment, &cell); prop_assert!(verification.is_ok()); prop_assert!(verification.unwrap()); } @@ -860,15 +813,16 @@ mod tests { #[test] // newapi done fn test_commitments_verify(ref xts in app_extrinsics_strategy()) { - let (layout, commitments, dims, matrix) = par_build_commitments(BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default(), &IgnoreMetrics{}).unwrap(); + let (layout, commitments, dims, matrix) = par_build_commitments(BlockLengthRows(64), BlockLengthColumns(16), TCHUNK, xts, Seed::default(), &IgnoreMetrics{}).unwrap(); - let index = app_data_index_try_from_layout(layout).unwrap(); - let public_params = testnet::public_params(dims.cols.as_usize()); + let index = DataLookup::from_id_and_len_iter(layout.into_iter()).unwrap(); + let dims_cols = usize::try_from(dims.cols.0).unwrap(); + let public_params = testnet::public_params(dims_cols); let extended_dims = dims.try_into().unwrap(); let commitments = commitments::from_slice(&commitments).unwrap(); for xt in xts { - let rows = &scalars_to_app_rows(xt.app_id.0, &index, &extended_dims, &matrix); - let (_, missing) = commitments::verify_equality(&public_params, &commitments, rows, &index, &extended_dims, xt.app_id.0).unwrap(); + let rows = scalars_to_app_rows(xt.app_id, &index, extended_dims, &matrix); + let (_, missing) = commitments::verify_equality(&public_params, &commitments, rows.as_slice(), &index, extended_dims, xt.app_id).unwrap(); prop_assert!(missing.is_empty()); } } @@ -879,17 +833,18 @@ mod tests { #[test] // newapi done fn verify_commitmnets_missing_row(ref xts in app_extrinsics_strategy()) { - let (layout, commitments, dims, matrix) = par_build_commitments(BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default(), &IgnoreMetrics{}).unwrap(); + let (layout, commitments, dims, matrix) = par_build_commitments(BlockLengthRows(64), BlockLengthColumns(16), TCHUNK, xts, Seed::default(), &IgnoreMetrics{}).unwrap(); - let index = app_data_index_try_from_layout(layout).unwrap(); - let public_params = testnet::public_params(dims.cols.as_usize()); + let index = DataLookup::from_id_and_len_iter(layout.into_iter()).unwrap(); + let dims_cols = usize::try_from(dims.cols.0).unwrap(); + let public_params = testnet::public_params(dims_cols); let extended_dims = dims.try_into().unwrap(); let commitments = commitments::from_slice(&commitments).unwrap(); for xt in xts { - let mut rows = scalars_to_app_rows(xt.app_id.0, &index, &extended_dims, &matrix); + let mut rows = scalars_to_app_rows(xt.app_id, &index, extended_dims, &matrix); let app_row_index = rows.iter().position(Option::is_some).unwrap(); rows.remove(app_row_index); - let (_, missing) = commitments::verify_equality(&public_params, &commitments, &rows,&index,&extended_dims,xt.app_id.0).unwrap(); + let (_, missing) = commitments::verify_equality(&public_params, &commitments, &rows,&index, extended_dims,xt.app_id).unwrap(); prop_assert!(!missing.is_empty()); } } @@ -901,17 +856,13 @@ mod tests { fn test_build_commitments_simple_commitment_check() { let block_rows = BlockLengthRows(256); let block_cols = BlockLengthColumns(256); - let chunk_size = 32; let original_data = br#"test"#; - let hash: Seed = [ - 76, 41, 174, 145, 187, 12, 97, 32, 75, 111, 149, 209, 243, 195, 165, 10, 166, 172, 47, - 41, 218, 24, 212, 66, 62, 5, 187, 191, 129, 5, 105, 3, - ]; + let hash: Seed = hex!("4c29ae91bb0c61204b6f95d1f3c3a50aa6ac2f29da18d4423e05bbbf81056903"); let (_, commitments, dimensions, _) = par_build_commitments( block_rows, block_cols, - chunk_size, + TCHUNK, &[AppExtrinsic::from(original_data.to_vec())], hash, &IgnoreMetrics {}, @@ -920,19 +871,15 @@ mod tests { assert_eq!( dimensions, - BlockDimensions { - rows: 1.into(), - cols: 4.into(), - chunk_size: 32 - } + BlockDimensions::new(BlockLengthRows(1), BlockLengthColumns(4), TCHUNK), ); - let expected_commitments = hex!("960F08F97D3A8BD21C3F5682366130132E18E375A587A1E5900937D7AA5F33C4E20A1C0ACAE664DCE1FD99EDC2693B8D960F08F97D3A8BD21C3F5682366130132E18E375A587A1E5900937D7AA5F33C4E20A1C0ACAE664DCE1FD99EDC2693B8D"); + let expected_commitments = hex!("9046c691ce4c7ba93c9860746d6ff3dfb5560e119f1eac26aa9a10b6fe29d5c8e2b90f23e2ef3a7a950965b08035470d9046c691ce4c7ba93c9860746d6ff3dfb5560e119f1eac26aa9a10b6fe29d5c8e2b90f23e2ef3a7a950965b08035470d"); assert_eq!(commitments, expected_commitments); } #[test] // newapi wip - fn test_reconstruct_app_extrinsics_with_app_id() { + fn test_reconstruct_app_extrinsics_with_app_id() -> Result<(), Error> { let app_id_1_data = br#""This is mocked test data. It will be formatted as a matrix of BLS scalar cells and then individual columns get erasure coded to ensure redundancy."#; @@ -940,50 +887,38 @@ get erasure coded to ensure redundancy."#; let hash = Seed::default(); let xts = vec![ - AppExtrinsic { - app_id: 0.into(), - data: vec![0], - }, - AppExtrinsic { - app_id: 1.into(), - data: app_id_1_data.to_vec(), - }, - AppExtrinsic { - app_id: 2.into(), - data: app_id_2_data.to_vec(), - }, + AppExtrinsic::new(AppId(0), vec![0]), + AppExtrinsic::new(AppId(1), app_id_1_data.to_vec()), + AppExtrinsic::new(AppId(2), app_id_2_data.to_vec()), ]; - let chunk_size = 32; - let (layout, data, dims) = flatten_and_pad_block( BlockLengthRows(32), BlockLengthColumns(4), - chunk_size, + TCHUNK, &xts, hash, - ) - .unwrap(); - let coded: Vec = - par_extend_data_matrix(dims, &data[..], &IgnoreMetrics {}).unwrap(); + )?; + let matrix = par_extend_data_matrix(dims, &data[..], &IgnoreMetrics {})?; - let cols_1 = sample_cells_from_matrix(&coded, &dims, Some(&[0, 1, 2, 3])); + let cols_1 = sample_cells_from_matrix(&matrix, Some(&[0, 1, 2, 3])); - let extended_dims = dims.try_into().unwrap(); + let extended_dims = dims.try_into()?; - let index = app_data_index_try_from_layout(layout).unwrap(); - let res_1 = reconstruct_app_extrinsics(&index, &extended_dims, cols_1, 1).unwrap(); + let index = DataLookup::from_id_and_len_iter(layout.into_iter()).unwrap(); + let res_1 = reconstruct_app_extrinsics(&index, extended_dims, cols_1, AppId(1)).unwrap(); assert_eq!(res_1[0], app_id_1_data); - let cols_2 = sample_cells_from_matrix(&coded, &dims, Some(&[0, 2, 3])); + let cols_2 = sample_cells_from_matrix(&matrix, Some(&[0, 2, 3])); - let res_2 = reconstruct_app_extrinsics(&index, &extended_dims, cols_2, 2).unwrap(); + let res_2 = reconstruct_app_extrinsics(&index, extended_dims, cols_2, AppId(2)).unwrap(); assert_eq!(res_2[0], app_id_2_data); + Ok(()) } #[test] // newapi done - fn test_decode_app_extrinsics() { + fn test_decode_app_extrinsics() -> Result<(), Error> { let app_id_1_data = br#""This is mocked test data. It will be formatted as a matrix of BLS scalar cells and then individual columns get erasure coded to ensure redundancy."#; @@ -994,121 +929,103 @@ get erasure coded to ensure redundancy."#; let hash = Seed::default(); let xts = (0..=2) .zip(data) - .map(|(app_id, data)| AppExtrinsic { - app_id: app_id.into(), - data, - }) + .map(|(app_id, data)| AppExtrinsic::new(AppId(app_id), data)) .collect::>(); - let chunk_size = 32; - let (layout, data, dims) = flatten_and_pad_block( BlockLengthRows(32), BlockLengthColumns(4), - chunk_size, + TCHUNK, &xts, hash, - ) - .unwrap(); - let coded = par_extend_data_matrix(dims, &data[..], &IgnoreMetrics {}).unwrap(); + )?; + let matrix = par_extend_data_matrix(dims, &data[..], &IgnoreMetrics {})?; + let dimensions: Dimensions = dims.try_into()?; - let dimensions: Dimensions = dims.try_into().unwrap(); - let extended_matrix = coded - .chunks(dimensions.extended_rows() as usize) - .collect::>(); - - let index = app_data_index_try_from_layout(layout).unwrap(); + let index = DataLookup::from_id_and_len_iter(layout.into_iter()).unwrap(); for xt in xts { - let positions = app_specific_cells(&index, &dimensions, xt.app_id.0).unwrap(); + let positions = app_specific_cells(&index, dimensions, xt.app_id).unwrap(); let cells = positions - .iter() - .map(|position| DataCell { - position: position.clone(), - data: extended_matrix[position.col as usize][position.row as usize].to_bytes(), + .into_iter() + .map(|position| { + let col: usize = position.col.into(); + let row = usize::try_from(position.row).unwrap(); + let data = matrix.get((row, col)).map(BlsScalar::to_bytes).unwrap(); + DataCell::new(position, data) }) .collect::>(); - let data = &decode_app_extrinsics(&index, &dimensions, cells, xt.app_id.0).unwrap()[0]; + let data = &decode_app_extrinsics(&index, dimensions, cells, xt.app_id).unwrap()[0]; assert_eq!(data, &xt.data); } assert!(matches!( - decode_app_extrinsics(&index, &dimensions, vec![], 0), + decode_app_extrinsics(&index, dimensions, vec![], AppId(0)), Err(ReconstructionError::MissingCell { .. }) )); + Ok(()) } #[test] // newapi done - fn test_extend_mock_data() { + fn test_extend_mock_data() -> Result<(), Error> { let orig_data = br#"This is mocked test data. It will be formatted as a matrix of BLS scalar cells and then individual columns get erasure coded to ensure redundancy. Let's see how this gets encoded and then reconstructed by sampling only some data."#; // The hash is used for seed for padding the block to next power of two value let hash = Seed::default(); - let chunk_size = 32; let (layout, data, dims) = flatten_and_pad_block( BlockLengthRows(128), BlockLengthColumns(2), - chunk_size, + TCHUNK, &[AppExtrinsic::from(orig_data.to_vec())], hash, - ) - .unwrap(); + )?; - let coded: Vec = - par_extend_data_matrix(dims, &data[..], &IgnoreMetrics {}).unwrap(); + let matrix = par_extend_data_matrix(dims, &data[..], &IgnoreMetrics {})?; - let cols = sample_cells_from_matrix(&coded, &dims, None); + let cols = sample_cells_from_matrix(&matrix, None); - let extended_dims = dims.try_into().unwrap(); - let index = app_data_index_try_from_layout(layout).unwrap(); - let res = reconstruct_extrinsics(&index, &extended_dims, cols).unwrap(); + let extended_dims = dims.try_into()?; + let index = DataLookup::from_id_and_len_iter(layout.into_iter()).unwrap(); + let res = reconstruct_extrinsics(&index, extended_dims, cols).unwrap(); let s = String::from_utf8_lossy(res[0].1[0].as_slice()); assert_eq!(res[0].1[0], orig_data); - eprintln!("Decoded: {}", s); + Ok(()) } #[test] // newapi done - fn test_multiple_extrinsics_for_same_app_id() { + fn test_multiple_extrinsics_for_same_app_id() -> Result<(), Error> { let xt1 = vec![5, 5]; let xt2 = vec![6, 6]; let xts = [ - AppExtrinsic { - app_id: 1.into(), - data: xt1.clone(), - }, - AppExtrinsic { - app_id: 1.into(), - data: xt2.clone(), - }, + AppExtrinsic::new(AppId(1), xt1.clone()), + AppExtrinsic::new(AppId(1), xt2.clone()), ]; // The hash is used for seed for padding the block to next power of two value let hash = Seed::default(); - let chunk_size = 32; let (layout, data, dims) = flatten_and_pad_block( BlockLengthRows(128), BlockLengthColumns(2), - chunk_size, + TCHUNK, &xts, hash, - ) - .unwrap(); + )?; - let coded: Vec = - par_extend_data_matrix(dims, &data[..], &IgnoreMetrics {}).unwrap(); + let matrix = par_extend_data_matrix(dims, &data[..], &IgnoreMetrics {})?; - let cols = sample_cells_from_matrix(&coded, &dims, None); + let cols = sample_cells_from_matrix(&matrix, None); let extended_dims = dims.try_into().unwrap(); - let index = app_data_index_try_from_layout(layout).unwrap(); - let res = reconstruct_extrinsics(&index, &extended_dims, cols).unwrap(); + let index = DataLookup::from_id_and_len_iter(layout.into_iter()).unwrap(); + let res = reconstruct_extrinsics(&index, extended_dims, cols).unwrap(); assert_eq!(res[0].1[0], xt1); assert_eq!(res[0].1[1], xt2); + Ok(()) } #[test] @@ -1119,28 +1036,16 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat let xt3 = vec![7]; let xt4 = vec![]; let xts = [ - AppExtrinsic { - app_id: 1.into(), - data: xt1.clone(), - }, - AppExtrinsic { - app_id: 1.into(), - data: xt2.clone(), - }, - AppExtrinsic { - app_id: 2.into(), - data: xt3.clone(), - }, - AppExtrinsic { - app_id: 3.into(), - data: xt4.clone(), - }, + AppExtrinsic::new(AppId(1), xt1.clone()), + AppExtrinsic::new(AppId(1), xt2.clone()), + AppExtrinsic::new(AppId(2), xt3.clone()), + AppExtrinsic::new(AppId(3), xt4.clone()), ]; let expected = vec![ - (1.into(), vec![xt1, xt2]), - (2.into(), vec![xt3]), - (3.into(), vec![xt4]), + (AppId(1), vec![xt1, xt2]), + (AppId(2), vec![xt3]), + (AppId(3), vec![xt4]), ]; let rez = app_extrinsics_group_by_app_id(&xts); println!("{:?}", rez); @@ -1164,6 +1069,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat #[test_case( build_extrinsics(&[]), 32 => padded_len_group(&[], 32) ; "Empty chunk list")] #[test_case( build_extrinsics(&[4096]), 32 => padded_len_group(&[4096], 32) ; "4K chunk")] fn test_padding_len(extrinsics: Vec>, chunk_size: u32) -> u32 { + let chunk_size = NonZeroU32::new(chunk_size).expect("Invalid chunk size .qed"); extrinsics .into_iter() .flat_map(pad_iec_9797_1) @@ -1189,7 +1095,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat par_build_commitments( BlockLengthRows(4), BlockLengthColumns(4), - 32, + TCHUNK, &xts, hash, &IgnoreMetrics {}, @@ -1210,7 +1116,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat par_build_commitments( BlockLengthRows(4), BlockLengthColumns(4), - 32, + TCHUNK, &xts, hash, &IgnoreMetrics {}, @@ -1240,13 +1146,15 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat .collect::>(); assert_eq!(row.len(), len); - let mut result_bytes: Vec = vec![0u8; config::COMMITMENT_SIZE]; println!("Row: {:?}", row); - commit(&prover_key, row_eval_domain, row.clone(), &mut result_bytes).unwrap(); - println!("Commitment: {result_bytes:?}"); + let commitment = commit(&prover_key, row_eval_domain, row.clone()) + .map(|com| <[u8; config::COMMITMENT_SIZE]>::try_from(com.to_bytes()).unwrap()) + .unwrap(); + println!("Commitment: {commitment:?}"); // We artificially extend the matrix by doubling values, this is not proper erasure coding. - let ext_m = row.into_iter().flat_map(|e| vec![e, e]).collect::>(); + let ext_m = + DMatrix::from_row_iterator(1, row.len() * 2, row.into_iter().flat_map(|e| vec![e, e])); let rows: u16 = len.try_into().expect("rows length should be valid `u16`"); let metrics = IgnoreMetrics {}; @@ -1259,11 +1167,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat }; let proof = build_proof( &public_params, - BlockDimensions { - rows: BlockLengthRows(1), - cols: BlockLengthColumns(4), - chunk_size: 32, - }, + BlockDimensions::new(BlockLengthRows(1), BlockLengthColumns(4), TCHUNK), &ext_m, &[cell], &metrics, @@ -1271,22 +1175,21 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat .unwrap(); println!("Proof: {proof:?}"); - assert!(proof.len() == 80); + assert_eq!(proof.len(), 80); - let commitment = result_bytes.clone().try_into().unwrap(); let dims = Dimensions::new(1, 4).unwrap(); let cell = data::Cell { position: Position { row: 0, col }, content: proof.try_into().unwrap(), }; - let verification = proof::verify(&public_params, &dims, &commitment, &cell); + let verification = proof::verify(&public_params, dims, &commitment, &cell); assert!(verification.is_ok()); assert!(verification.unwrap()) } } - #[test_case( r#"{ "row": 42, "col": 99 }"# => Cell::new(42.into(),99.into()) ; "Simple" )] - #[test_case( r#"{ "row": 4294967295, "col": 99 }"# => Cell::new(4_294_967_295.into(),99.into()) ; "Max row" )] + #[test_case( r#"{ "row": 42, "col": 99 }"# => Cell::new(BlockLengthRows(42), BlockLengthColumns(99)) ; "Simple" )] + #[test_case( r#"{ "row": 4294967295, "col": 99 }"# => Cell::new(BlockLengthRows(4_294_967_295),BlockLengthColumns(99)) ; "Max row" )] // newapi ignore fn serde_block_length_types_untagged(data: &str) -> Cell { serde_json::from_str(data).unwrap() diff --git a/kate/src/gridgen/mod.rs b/kate/src/gridgen/mod.rs index 3f84d751..70dc3fbf 100644 --- a/kate/src/gridgen/mod.rs +++ b/kate/src/gridgen/mod.rs @@ -4,18 +4,26 @@ use crate::pmp::{ merlin::Transcript, traits::Committer, }; +use avail_core::{ensure, AppExtrinsic, AppId, DataLookup}; use codec::Encode; -use core::num::NonZeroUsize; -use da_types::{AppExtrinsic, AppId, DataLookup, DataLookupIndexItem}; -use kate_grid::{Dimensions, Extension, Grid, IntoColumnMajor, IntoRowMajor, RowMajor}; -use kate_recovery::config::PADDING_TAIL_VALUE; +use core::{ + cmp::{max, min}, + iter, + num::NonZeroU16, +}; +use kate_recovery::{config::PADDING_TAIL_VALUE, matrix::Dimensions}; +use nalgebra::base::DMatrix; use poly_multiproof::{ m1_blst::Proof, traits::{KZGProof, PolyMultiProofNoPrecomp}, }; -use rand::{Rng, SeedableRng}; -use rand_chacha::ChaChaRng; +use rand_chacha::{ + rand_core::{RngCore, SeedableRng}, + ChaChaRng, +}; +use static_assertions::const_assert; use std::collections::BTreeMap; +use thiserror_no_std::Error; use crate::{ com::{Cell, Error}, @@ -36,16 +44,6 @@ macro_rules! cfg_iter { }}; } -macro_rules! cfg_into_iter { - ($e: expr) => {{ - #[cfg(feature = "parallel")] - let result = $e.into_par_iter(); - #[cfg(not(feature = "parallel"))] - let result = $e.into_iter(); - result - }}; -} - pub const SCALAR_SIZE: usize = 32; pub type ArkScalar = crate::pmp::m1_blst::Fr; pub type Commitment = crate::pmp::Commitment; @@ -55,9 +53,18 @@ pub use poly_multiproof::traits::AsBytes; mod tests; pub struct EvaluationGrid { - pub lookup: DataLookup, - pub evals: RowMajor, - pub dims: Dimensions, + lookup: DataLookup, + evals: DMatrix, +} + +#[derive(Error, Debug, Clone, Copy)] +pub enum AppRowError { + #[error("Original dimensions are not divisible by current ones")] + OrigDimNotDivisible, + #[error("AppId({0}) not found")] + IdNotFound(AppId), + #[error("Lineal index overflows")] + LinealIndexOverflows, } impl EvaluationGrid { @@ -80,7 +87,7 @@ impl EvaluationGrid { ); // Convert each grup of extrinsics into scalars - let encoded = grouped + let scalars_by_app = grouped .into_iter() .map(|(id, datas)| { let mut enc = datas.encode(); @@ -92,149 +99,154 @@ impl EvaluationGrid { }) .collect::, _>>()?; + let len_by_app = scalars_by_app + .iter() + .map(|(app, scalars)| (*app, scalars.len())); + // make the index of app info - let mut start = 0u32; - let mut index = vec![]; - for (app_id, scalars) in &encoded { - index.push(DataLookupIndexItem { - app_id: *app_id, - start, - }); - start = start - .checked_add(scalars.len() as u32) - .ok_or(Error::CellLengthExceeded)?; // next item should start after current one - } + let lookup = DataLookup::from_id_and_len_iter(len_by_app)?; + let grid_size = usize::try_from(lookup.len())?; + let (rows, cols): (usize, usize) = + get_block_dims(grid_size, min_width, max_width, max_height)?.into(); // Flatten the grid - let mut grid = encoded + let mut rng = ChaChaRng::from_seed(rng_seed); + let grid = scalars_by_app .into_iter() .flat_map(|(_, scalars)| scalars) - .collect::>(); + .chain(iter::repeat_with(|| random_scalar(&mut rng))); - let lookup = DataLookup { - size: grid.len() as u32, - index, - }; - - // Fit the grid to the desired grid size - let dims = get_block_dims(grid.len(), min_width, max_width, max_height)?; - let mut rng = ChaChaRng::from_seed(rng_seed); - while grid.len() != dims.n_cells() { - let rnd_values: [u8; SCALAR_SIZE - 1] = rng.gen(); - // TODO: can we just use zeros instead? - grid.push(pad_to_bls_scalar(rnd_values)?); - } + let row_major_evals = DMatrix::from_row_iterator(rows, cols, grid); Ok(EvaluationGrid { lookup, - evals: grid - .into_row_major(dims.width(), dims.height()) - .ok_or(Error::DimensionsMismatch)?, - dims, + evals: row_major_evals, }) } - pub fn row(&self, y: usize) -> Option<&[ArkScalar]> { - self.evals.row(y) + /// Get the row `y` of the evaluation. + pub fn row(&self, y: usize) -> Option> { + let (rows, _cols) = self.evals.shape(); + (y < rows).then(|| self.evals.row(y).iter().cloned().collect()) } - /// Returns the start/end indices of the given app id *for the non-extended grid* - fn app_data_indices(&self, app_id: &AppId) -> Option<(usize, usize)> { - if self.lookup.size == 0 { - // Empty block, short circuit. - return None; - } - let (i, start_index) = self - .lookup - .index - .iter() - .enumerate() - .find(|(_i, item)| &item.app_id == app_id) - .map(|(i, item)| (i, item.start as usize))?; - let end_index = self - .lookup - .index - .get(i.saturating_add(1)) - .map(|elem| elem.start) - .unwrap_or(self.lookup.size) as usize; - Some((start_index, end_index)) + pub fn dims(&self) -> Dimensions { + let (rows, cols) = self.evals.shape(); + // SAFETY: We cannot construct an `EvaluationGrid` with any dimension `< 1` or `> u16::MAX` + debug_assert!(rows <= usize::from(u16::MAX) && cols <= usize::from(u16::MAX)); + unsafe { Dimensions::new_unchecked(rows as u16, cols as u16) } + } + + #[inline] + pub fn get(&self, row: R, col: C) -> Option<&ArkScalar> + where + usize: From, + usize: From, + { + self.evals.get::<(usize, usize)>((row.into(), col.into())) } /// Returns a list of `(index, row)` pairs for the underlying rows of an application. /// Returns `None` if the `app_id` cannot be found, or if the provided `orig_dims` are invalid. pub fn app_rows( &self, - app_id: &AppId, - orig_dims: Option<&Dimensions>, - ) -> Option)>> { - let orig_dims = orig_dims.unwrap_or(&self.dims); - if !orig_dims.divides(&self.dims) { - return None; - } - let h_mul = self.dims.height() / orig_dims.height_nz(); + app_id: AppId, + maybe_orig_dims: Option, + ) -> Result)>>, AppRowError> { + let dims = self.dims(); + let (rows, _cols): (usize, usize) = dims.into(); + + // Ensure `origin_dims` is divisible by `dims` if some. + let orig_dims = match maybe_orig_dims { + Some(d) => { + ensure!(d.divides(&dims), AppRowError::OrigDimNotDivisible); + d + }, + None => dims, + }; - let (start_ind, end_ind) = self.app_data_indices(app_id)?; - let (_, start_y) = RowMajor::<()>::ind_to_coord(orig_dims, start_ind); - let (_, end_y) = RowMajor::<()>::ind_to_coord(orig_dims, end_ind.saturating_sub(1)); // Find y of last cell elt - let (new_start_y, new_end_y) = (start_y.saturating_mul(h_mul), end_y.saturating_mul(h_mul)); + // SAFETY: `origin_dims.rows is NonZeroU16` + // Compiler checks that `Dimensions::rows()` returns a `NonZeroU16` using the expression + // `NonZeroU16::get(x)` instead of `x.get()`. + #[allow(clippy::integer_arithmetic)] + let h_mul: usize = rows / usize::from(NonZeroU16::get(orig_dims.rows())); + #[allow(clippy::integer_arithmetic)] + let row_from_lineal_index = |cols, lineal_index| { + let lineal_index = + usize::try_from(lineal_index).map_err(|_| AppRowError::LinealIndexOverflows)?; + let cols = usize::from(NonZeroU16::get(cols)); + + Ok(lineal_index / cols) + }; - (new_start_y..=new_end_y) + let range = self + .lookup + .range_of(app_id) + .ok_or(AppRowError::IdNotFound(app_id))?; + let start_y: usize = row_from_lineal_index(orig_dims.cols(), range.start)?; + let end_y: usize = row_from_lineal_index(orig_dims.cols(), range.end.saturating_sub(1))?; + + // SAFETY: This won't overflow because `h_mul = rows / orig_dim.rows()` and `*_y < rows) + debug_assert!(start_y < rows); + debug_assert!(end_y < rows); + #[allow(clippy::integer_arithmetic)] + let (new_start_y, new_end_y) = (start_y * h_mul, end_y * h_mul); + + let app_rows = (new_start_y..=new_end_y) .step_by(h_mul) - .map(|y| self.evals.row(y).map(|a| (y, a.to_vec()))) - .collect() - } + .map(|y| self.row(y).map(|a| (y, a))) + .collect(); - pub fn extend_columns(&self, extension_factor: usize) -> Result { - let new_dims = self.dims.extend(Extension::height( - extension_factor - .try_into() - .map_err(|_| Error::CellLengthExceeded)?, - )); - - let domain = GeneralEvaluationDomain::::new(self.dims.height()) - .ok_or(Error::BaseGridDomainSizeInvalid(self.dims.width()))?; - let domain_new = GeneralEvaluationDomain::::new(new_dims.height()) - .ok_or(Error::ExtendedGridDomianSizeInvalid(new_dims.width()))?; - if domain_new.size() != new_dims.height() { - return Err(Error::DomainSizeInvalid); - } - - let cols: Vec> = self - .evals - .columns() - .map(|(_i, col)| col.map(|s| *s).collect::>()) - .collect::>(); - - let new_evals = cfg_into_iter!(cols) - .flat_map(|mut col| { - // ifft, resize, fft - domain.ifft_in_place(&mut col); - domain_new.fft_in_place(&mut col); - col - }) - .collect::>() - .into_column_major(new_dims.width(), new_dims.height()) - .expect("Each column should be expanded to news dims") - .to_row_major(); + Ok(app_rows) + } + pub fn extend_columns(&self, row_factor: NonZeroU16) -> Result { + let dims = self.dims(); + let (new_rows, new_cols): (usize, usize) = dims + .extend(row_factor, unsafe { NonZeroU16::new_unchecked(1) }) + .ok_or(Error::CellLengthExceeded)? + .into(); + let (rows, _cols): (usize, usize) = dims.into(); + + let domain = + GeneralEvaluationDomain::::new(rows).ok_or(Error::DomainSizeInvalid)?; + let domain_new = + GeneralEvaluationDomain::::new(new_rows).ok_or(Error::DomainSizeInvalid)?; + ensure!(domain_new.size() == new_rows, Error::DomainSizeInvalid); + + let new_data = self.evals.column_iter().flat_map(|col| { + let mut col = col.iter().cloned().collect::>(); + domain.ifft_in_place(&mut col); + domain_new.fft_in_place(&mut col); + col + }); + + let row_major_evals = DMatrix::from_iterator(new_rows, new_cols, new_data); + debug_assert!(row_major_evals.shape() == (new_rows, new_cols)); Ok(Self { lookup: self.lookup.clone(), - evals: new_evals, - dims: new_dims, + evals: row_major_evals, }) } pub fn make_polynomial_grid(&self) -> Result { - let domain = GeneralEvaluationDomain::::new(self.dims.width()) - .ok_or(Error::DomainSizeInvalid)?; - #[cfg(not(feature = "parallel"))] - let rows = self.evals.rows(); - #[cfg(feature = "parallel")] - let rows = self.evals.rows_par_iter(); + let (_rows, cols): (usize, usize) = self.evals.shape(); + let domain = + GeneralEvaluationDomain::::new(cols).ok_or(Error::DomainSizeInvalid)?; + + let inner = self + .evals + .row_iter() + .map(|view| { + let row = view.iter().cloned().collect::>(); + domain.ifft(&row) + }) + .collect::>(); + Ok(PolynomialGrid { - dims: self.dims.clone(), + dims: self.dims(), points: domain.elements().collect(), - inner: rows.map(|(_, row)| domain.ifft(row)).collect::>(), + inner, }) } } @@ -264,7 +276,7 @@ impl PolynomialGrid { extension_factor: usize, ) -> Result, Error> { let res = cfg_iter!(self.inner) - .map(|coeffs| srs.commit(&coeffs).map_err(Error::MultiproofError)) + .map(|coeffs| srs.commit(coeffs).map_err(Error::MultiproofError)) .collect::, _>>()?; poly_multiproof::Commitment::::extend_commitments( &res, @@ -297,25 +309,27 @@ impl PolynomialGrid { srs: &M1NoPrecomp, cell: &Cell, eval_grid: &EvaluationGrid, - target_dims: &Dimensions, + target_dims: Dimensions, ) -> Result { let block = multiproof_block( cell.col.0 as usize, cell.row.0 as usize, - &self.dims, + self.dims, target_dims, ) .ok_or(Error::CellLengthExceeded)?; let polys = &self.inner[block.start_y..block.end_y]; - let evals = (block.start_y..block.end_y) + let evals: Vec> = (block.start_y..block.end_y) .map(|y| { - eval_grid.evals.row(y).expect("Already bounds checked")[block.start_x..block.end_x] + eval_grid.row(y).expect("Already bounds checked .qed")[block.start_x..block.end_x] .to_vec() }) .collect::>(); + let evals_view = evals.iter().map(|row| row.as_slice()).collect::>(); + let points = &self.points[block.start_x..block.end_x]; let mut ts = Transcript::new(b"avail-mp"); - let proof = PolyMultiProofNoPrecomp::open(srs, &mut ts, &evals, &polys, points) + let proof = PolyMultiProofNoPrecomp::open(srs, &mut ts, &evals_view, polys, points) .map_err(Error::MultiproofError)?; Ok(Multiproof { @@ -345,20 +359,24 @@ pub struct CellBlock { /// `mp_grid_dims` is the size of the multiproof grid, which `x,y` lies in. /// For example, a 256x256 grid could be converted to a 4x4 target size multiproof grid, by making 16 multiproofs /// of size 64x64. +#[allow(clippy::integer_arithmetic)] pub fn multiproof_block( x: usize, y: usize, - grid_dims: &Dimensions, - target_dims: &Dimensions, + grid: Dimensions, + target: Dimensions, ) -> Option { - let mp_grid_dims = multiproof_dims(grid_dims, target_dims)?; + let mp_grid_dims = multiproof_dims(grid, target)?; + let (g_rows, g_cols): (usize, usize) = grid.into(); if x >= mp_grid_dims.width() || y >= mp_grid_dims.height() { return None; } - let block_width = grid_dims.width() / mp_grid_dims.width_nz(); - let block_height = grid_dims.height() / mp_grid_dims.height_nz(); - // SAFETY: values never overflow since x,y are always less than grid_dims.{width,height}(). + // SAFETY: Division is safe because `cols() != 0 && rows() != 0`. + let block_width = g_cols / usize::from(NonZeroU16::get(mp_grid_dims.cols())); + let block_height = g_rows / usize::from(NonZeroU16::get(mp_grid_dims.rows())); + + // SAFETY: values never overflow since `x` and `y` are always less than grid_dims.{width,height}(). // This is because x,y < mp_grid_dims.{width, height} and block width is the quotient of // grid_dims and mp_grid_dims. Some(CellBlock { @@ -371,13 +389,14 @@ pub fn multiproof_block( /// Dimensions of the multiproof grid. These are guarenteed to cleanly divide `grid_dims`. /// `target_dims` must cleanly divide `grid_dims`. -pub fn multiproof_dims(grid_dims: &Dimensions, target_dims: &Dimensions) -> Option { - let target_width = grid_dims.width_nz().min(target_dims.width_nz()); - let target_height = grid_dims.height_nz().min(target_dims.height_nz()); - if grid_dims.width() % target_width != 0 || grid_dims.height() % target_height != 0 { +pub fn multiproof_dims(grid: Dimensions, target: Dimensions) -> Option { + let cols = min(grid.cols(), target.cols()); + let rows = min(grid.rows(), target.rows()); + if grid.cols().get() % cols != 0 || grid.rows().get() % rows != 0 { return None; } - Some(Dimensions::new(target_width, target_height)) + + Dimensions::new(rows, cols) } pub fn get_block_dims( @@ -390,19 +409,28 @@ pub fn get_block_dims( if n_scalars < max_width { let current_width = n_scalars; // Don't let the width get lower than the minimum provided - let width = core::cmp::max(round_up_power_of_2(current_width), min_width).try_into()?; - let height = 1.try_into()?; - Ok(Dimensions::new(width, height)) + let width = max( + current_width + .checked_next_power_of_two() + .ok_or(Error::BlockTooBig)?, + min_width, + ); + let height = unsafe { NonZeroU16::new_unchecked(1) }; + + Dimensions::new_from(height, width).ok_or(Error::ZeroDimension) } else { - let width = NonZeroUsize::try_from(max_width)?; - let current_height = round_up_to_multiple(n_scalars, width) / width; + let width = NonZeroU16::try_from(u16::try_from(max_width)?)?; + let current_height = round_up_to_multiple(n_scalars, width) + .checked_div(max_width) + .expect("`max_width` is non zero, checked one line before"); // Round the height up to a power of 2 for ffts - let height = round_up_power_of_2(current_height); + let height = current_height + .checked_next_power_of_two() + .ok_or(Error::BlockTooBig)?; // Error if height too big - if height > max_height { - return Err(Error::BlockTooBig); - } - Ok(Dimensions::new(width, height.try_into()?)) + ensure!(height <= max_height, Error::BlockTooBig); + + Dimensions::new_from(height, width).ok_or(Error::ZeroDimension) } } @@ -411,34 +439,36 @@ pub fn domain_points(n: usize) -> Result, Error> { Ok(domain.elements().collect()) } -fn round_up_to_multiple(input: usize, multiple: NonZeroUsize) -> usize { - let n_multiples = input.saturating_add(multiple.get()).saturating_sub(1) / multiple; - n_multiples.saturating_mul(multiple.get()) +/// SAFETY: As `multiple` is a `NonZeroU16` we can safetly make the following ops. +#[allow(clippy::integer_arithmetic)] +fn round_up_to_multiple(input: usize, multiple: NonZeroU16) -> usize { + let multiple: usize = multiple.get().into(); + let n_multiples = input.saturating_add(multiple - 1) / multiple; + n_multiples.saturating_mul(multiple) } pub(crate) fn pad_to_bls_scalar(a: impl AsRef<[u8]>) -> Result { - if a.as_ref().len() > DATA_CHUNK_SIZE { - return Err(Error::InvalidChunkLength); - } + let bytes = a.as_ref(); + ensure!(bytes.len() <= DATA_CHUNK_SIZE, Error::InvalidChunkLength); + const_assert!(DATA_CHUNK_SIZE <= SCALAR_SIZE); + let mut buf = [0u8; SCALAR_SIZE]; - buf[0..a.as_ref().len()].copy_from_slice(a.as_ref()); + buf[0..bytes.len()].copy_from_slice(bytes); + ArkScalar::from_bytes(&buf).map_err(Error::MultiproofError) } -// Round up. only valid for positive integers #[allow(clippy::integer_arithmetic)] -fn round_up_power_of_2(mut v: usize) -> usize { - if v == 0 { - return 1; - } - v -= 1; - v |= v >> 1; - v |= v >> 2; - v |= v >> 4; - v |= v >> 8; - v |= v >> 16; - v += 1; - v +pub(crate) fn random_scalar(rng: &mut ChaChaRng) -> ArkScalar { + let mut raw_scalar = [0u8; SCALAR_SIZE]; + + const_assert!(SCALAR_SIZE >= 1); + rng.try_fill_bytes(&mut raw_scalar[..SCALAR_SIZE - 1]) + .expect("ChaChaRng::try_fill_bytes failed"); + debug_assert!(raw_scalar[SCALAR_SIZE - 1] == 0u8); + + ArkScalar::from_bytes(&raw_scalar) + .expect("ArkScalar can be generated from SCALAR_SIZE -1 bytes .qed") } #[cfg(test)] @@ -449,8 +479,8 @@ mod unit_tests { use test_case::test_case; // parameters that will split a 256x256 grid into pieces of size 4x16 - const TARGET: Dimensions = Dimensions::new_unchecked(64, 16); - const GRID: Dimensions = Dimensions::new_unchecked(256, 256); + const TARGET: Dimensions = unsafe { Dimensions::new_unchecked(16, 64) }; + const GRID: Dimensions = unsafe { Dimensions::new_unchecked(256, 256) }; fn cb(start_x: usize, start_y: usize, end_x: usize, end_y: usize) -> CellBlock { CellBlock { @@ -467,7 +497,7 @@ mod unit_tests { #[test_case(64, 0 => None)] #[test_case(0, 16 => None)] fn multiproof_max_grid_size(x: usize, y: usize) -> Option { - multiproof_block(x, y, &GRID, &TARGET) + multiproof_block(x, y, GRID.clone(), TARGET) } #[test_case(256, 256, 64, 16 => Some((64, 16)))] @@ -477,16 +507,15 @@ mod unit_tests { #[test_case(256, 8, 32, 32 => Some((32, 8)))] #[test_case(4 , 1, 32, 32 => Some((4, 1)))] fn test_multiproof_dims( - grid_w: usize, - grid_h: usize, - target_w: usize, - target_h: usize, + grid_w: u16, + grid_h: u16, + target_w: u16, + target_h: u16, ) -> Option<(usize, usize)> { - multiproof_dims( - &Dimensions::new_unchecked(grid_w, grid_h), - &Dimensions::new_unchecked(target_w, target_h), - ) - .map(|i| (i.width(), i.height())) + let grid = unsafe { Dimensions::new_unchecked(grid_w, grid_h) }; + let target = unsafe { Dimensions::new_unchecked(target_w, target_h) }; + + multiproof_dims(grid, target).map(Into::into) } use proptest::prelude::*; @@ -495,10 +524,12 @@ mod unit_tests { cases: 200, .. ProptestConfig::default() })] #[test] - fn test_round_up_to_multiple(i in 1..1000usize, m in 1..32usize) { - for k in 0..m { - let a = i * m - k; - prop_assert_eq!(round_up_to_multiple(a, m.try_into().unwrap()), i * m) + fn test_round_up_to_multiple(i in 1..1000usize, m in 1..32u16) { + for k in 0..usize::from(m) { + let a :usize = i * usize::from(m) - k; + let output = round_up_to_multiple(a, m.try_into().unwrap()); + let expected :usize = i * usize::from(m); + prop_assert_eq!( output, expected) } } } @@ -509,19 +540,23 @@ mod unit_tests { #[test_case(6 => 8)] #[test_case(972 => 1024)] fn test_round_up_to_2(i: usize) -> usize { - round_up_power_of_2(i) + i.next_power_of_two() + } + + fn new_dim(rows: u16, cols: u16) -> Result { + Dimensions::new(rows, cols).ok_or(Error::BlockTooBig) } - #[test_case(0 => Dimensions::new_unchecked(4, 1) ; "block size zero")] - #[test_case(1 => Dimensions::new_unchecked(4, 1) ; "below minimum block size")] - #[test_case(10 => Dimensions::new_unchecked(16, 1) ; "regular case")] - #[test_case(17 => Dimensions::new_unchecked(32, 1) ; "minimum overhead after 512")] - #[test_case(256 => Dimensions::new_unchecked(256, 1) ; "maximum cols")] - #[test_case(257 => Dimensions::new_unchecked(256, 2) ; "two rows")] - #[test_case(256 * 256 => Dimensions::new_unchecked(256, 256) ; "max block size")] - #[test_case(256 * 256 + 1 => panics "BlockTooBig" ; "too much data")] - fn test_get_block_dims(size: usize) -> Dimensions + #[test_case(0 => new_dim(1,4) ; "block size zero")] + #[test_case(1 => new_dim(1,4) ; "below minimum block size")] + #[test_case(10 => new_dim(1, 16) ; "regular case")] + #[test_case(17 => new_dim(1, 32) ; "minimum overhead after 512")] + #[test_case(256 => new_dim(1, 256) ; "maximum cols")] + #[test_case(257 => new_dim(2, 256) ; "two rows")] + #[test_case(256 * 256 => new_dim(256, 256) ; "max block size")] + #[test_case(256 * 256 + 1 => Err(Error::BlockTooBig) ; "too much data")] + fn test_get_block_dims(size: usize) -> Result where { - get_block_dims(size, 4, 256, 256).unwrap() + get_block_dims(size, 4, 256, 256) } } diff --git a/kate/src/gridgen/tests/commitments.rs b/kate/src/gridgen/tests/commitments.rs index 202fa712..ccdd46f2 100644 --- a/kate/src/gridgen/tests/commitments.rs +++ b/kate/src/gridgen/tests/commitments.rs @@ -1,14 +1,11 @@ use super::*; -use crate::gridgen::*; -use crate::testnet; -use crate::Seed; -use da_types::AppExtrinsic; -use da_types::AppId; -use da_types::BlockLengthColumns; -use da_types::BlockLengthRows; +use crate::{gridgen::*, testnet, Seed}; +use avail_core::{AppExtrinsic, AppId, BlockLengthColumns, BlockLengthRows}; use hex_literal::hex; -use kate_grid::Dimensions; -use kate_recovery::matrix::Position; +use kate_recovery::{ + commitments::verify_equality, + matrix::{Dimensions, Position}, +}; use test_case::test_case; #[test] @@ -30,7 +27,9 @@ fn test_build_commitments_simple_commitment_check() { hash, ) .unwrap(); - let ext_evals = evals.extend_columns(2).unwrap(); + let ext_evals = evals + .extend_columns(unsafe { NonZeroU16::new_unchecked(2) }) + .unwrap(); let polys = ext_evals.make_polynomial_grid().unwrap(); let commits = polys .commitments(&*PMP) @@ -47,8 +46,8 @@ fn test_build_commitments_simple_commitment_check() { .flat_map(|p| p.to_bytes().unwrap()) .collect::>(); - assert_eq!(ext_evals.dims, Dimensions::new_unchecked(4, 2)); - let expected_commitments = hex!("960F08F97D3A8BD21C3F5682366130132E18E375A587A1E5900937D7AA5F33C4E20A1C0ACAE664DCE1FD99EDC2693B8D960F08F97D3A8BD21C3F5682366130132E18E375A587A1E5900937D7AA5F33C4E20A1C0ACAE664DCE1FD99EDC2693B8D"); + assert_eq!(ext_evals.dims(), Dimensions::new_from(2, 4).unwrap()); + let expected_commitments = hex!("9046c691ce4c7ba93c9860746d6ff3dfb5560e119f1eac26aa9a10b6fe29d5c8e2b90f23e2ef3a7a950965b08035470d9046c691ce4c7ba93c9860746d6ff3dfb5560e119f1eac26aa9a10b6fe29d5c8e2b90f23e2ef3a7a950965b08035470d"); assert_eq!(commits, expected_commitments); assert_eq!(commits_fft_extended, expected_commitments); } @@ -64,7 +63,9 @@ fn par_build_commitments_row_wise_constant_row() { }]; let evals = EvaluationGrid::from_extrinsics(xts, 4, 4, 4, hash).unwrap(); - let evals = evals.extend_columns(2).unwrap(); + let evals = evals + .extend_columns(unsafe { NonZeroU16::new_unchecked(2) }) + .unwrap(); let polys = evals.make_polynomial_grid().unwrap(); polys.commitments(&*PMP).unwrap(); } @@ -74,8 +75,10 @@ proptest! { #[test] fn commitments_verify(ref exts in app_extrinsics_strategy()) { //let (layout, commitments, dims, matrix) = par_build_commitments(BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default()).unwrap(); - let grid = EvaluationGrid::from_extrinsics(exts.clone(), 4, 16, 64, Seed::default()).unwrap().extend_columns(2).unwrap(); - let orig_dims = Dimensions::new(grid.dims.width_nz(), (grid.dims.height() / 2).try_into().unwrap()); + let grid = EvaluationGrid::from_extrinsics(exts.clone(), 4, 16, 64, Seed::default()).unwrap(); + let grid = grid.extend_columns( unsafe { NonZeroU16::new_unchecked(2)}).unwrap(); + let (g_rows, g_cols) :(u16,u16) = grid.dims().into(); + let orig_dims = Dimensions::new(g_rows / 2, g_cols).unwrap(); let polys = grid.make_polynomial_grid().unwrap(); let commits = polys.commitments(&*PMP) .unwrap() @@ -83,26 +86,26 @@ proptest! { .map(|c| c.to_bytes().unwrap()) .collect::>(); - let index = app_data_index_from_lookup(&grid.lookup); - let public_params = testnet::public_params((grid.dims.width() as u32).into()); + let public_params = testnet::public_params(BlockLengthColumns(g_cols as u32)); - for xt in exts { - let rows = grid.app_rows(&xt.app_id, Some(&orig_dims)).unwrap(); + for xt in exts.iter() { + let rows = grid.app_rows(xt.app_id, Some(orig_dims)).unwrap().unwrap(); // Have to put the rows we find in this funky data structure - let mut app_rows = vec![None; grid.dims.height()]; + let mut app_rows = vec![None; g_rows.into()]; for (row_i, row) in rows { app_rows[row_i] = Some(row.iter().flat_map(|s| s.to_bytes().unwrap()).collect()); } // Need to provide the original dimensions here too - let extended_dims = kate_recovery::matrix::Dimensions::new(orig_dims.height() as u16, orig_dims.width() as u16).unwrap(); - let (_, missing) = kate_recovery::commitments::verify_equality(&public_params, &commits, &app_rows, &index, &extended_dims, xt.app_id.0).unwrap(); + let extended_dims = orig_dims.clone(); + let (_, missing) = verify_equality(&public_params, &commits, &app_rows, &grid.lookup, extended_dims, xt.app_id).unwrap(); prop_assert!(missing.is_empty()); } } fn verify_commitments_missing_row(ref xts in app_extrinsics_strategy()) { - let grid = EvaluationGrid::from_extrinsics(xts.clone(), 4, 16, 64, Seed::default()).unwrap().extend_columns(2).unwrap(); - let orig_dims = Dimensions::new(grid.dims.width_nz(), (grid.dims.height() / 2).try_into().unwrap()); + let grid = EvaluationGrid::from_extrinsics(xts.clone(), 4, 16, 64, Seed::default()).unwrap().extend_columns( unsafe { NonZeroU16::new_unchecked(2) }).unwrap(); + let (g_rows, g_cols):(u16,u16) = grid.dims().into(); + let orig_dims = Dimensions::new_from(g_rows / 2, g_cols).unwrap(); let polys = grid.make_polynomial_grid().unwrap(); let commits = polys.commitments(&*PMP) .unwrap() @@ -110,28 +113,27 @@ proptest! { .map(|c| c.to_bytes().unwrap()) .collect::>(); - let index = app_data_index_from_lookup(&grid.lookup); - let public_params = testnet::public_params((grid.dims.width() as u32).into()); + let public_params = testnet::public_params( BlockLengthColumns(g_cols.into())); for xt in xts { - let rows = grid.app_rows(&xt.app_id, Some(&orig_dims)).unwrap(); - let mut row_elems = vec![None; grid.dims.height()]; + let rows = grid.app_rows(xt.app_id, Some(orig_dims)).unwrap().unwrap(); + let mut row_elems = vec![None; g_rows.into()]; for (i, data) in &rows { row_elems[*i] = Some(data.iter().flat_map(|s| s.to_bytes().unwrap()).collect()); } let first_index = rows.iter().map(|(i, _)| *i).min().unwrap(); row_elems.remove(first_index); - let extended_dims = kate_recovery::matrix::Dimensions::new(orig_dims.height() as u16, orig_dims.width() as u16).unwrap(); - let (_, missing) = kate_recovery::commitments::verify_equality(&public_params, &commits, &row_elems,&index,&extended_dims,xt.app_id.0).unwrap(); + let extended_dims = orig_dims.transpose(); + let (_, missing) = verify_equality(&public_params, &commits, &row_elems,&grid.lookup,extended_dims,xt.app_id).unwrap(); prop_assert!(!missing.is_empty()); } } } -#[test_case( ([1,1,1,1]).to_vec(); "All values are non-zero but same")] -#[test_case( ([0,0,0,0]).to_vec(); "All values are zero")] -#[test_case( ([0,5,2,1]).to_vec(); "All values are different")] +#[test_case( vec![1;4]; "All values are non-zero but same")] +#[test_case( vec![0;4]; "All values are zero")] +#[test_case( vec![0,5,2,1]; "All values are different")] fn test_zero_deg_poly_commit(row_values: Vec) { // There are two main cases that generate a zero degree polynomial. One is for data that is non-zero, but the same. // The other is for all-zero data. They differ, as the former yields a polynomial with one coefficient, and latter generates zero coefficients. @@ -144,11 +146,10 @@ fn test_zero_deg_poly_commit(row_values: Vec) { //let ae = AppExtrinsic { 0.into(), vec![} let ev = EvaluationGrid { lookup: Default::default(), // Shouldn't need to care about this - dims: Dimensions::new_unchecked(row_values.len(), 1), - evals: row.into_row_major(row_values.len(), 1).unwrap(), + evals: DMatrix::from_row_iterator(len, 1, row.into_iter()).transpose(), }; - println!("Row: {:?}", ev.evals.inner()); + println!("Row: {:?}", ev.evals); let pg = ev.make_polynomial_grid().unwrap(); println!("Poly: {:?}", pg.inner[0]); @@ -164,9 +165,9 @@ fn test_zero_deg_poly_commit(row_values: Vec) { let proof = pg.proof(&*PMP, &cell).unwrap(); let proof_bytes = proof.to_bytes().unwrap(); - let cell_bytes = ev.evals.get(x, 0).unwrap().to_bytes().unwrap(); + let cell_bytes = ev.get(0usize, x).unwrap().to_bytes().unwrap(); let content = [&proof_bytes[..], &cell_bytes[..]].concat(); - let dims = kate_recovery::matrix::Dimensions::new(1, 4).unwrap(); + let dims = Dimensions::new(1, 4).unwrap(); let cell = kate_recovery::data::Cell { position: Position { row: 0, @@ -176,7 +177,7 @@ fn test_zero_deg_poly_commit(row_values: Vec) { }; let verification = kate_recovery::proof::verify( &kate_recovery::testnet::public_params(256), - &dims, + dims, &commitment, &cell, ); diff --git a/kate/src/gridgen/tests/formatting.rs b/kate/src/gridgen/tests/formatting.rs index 7ac4e3f3..c8cef4c1 100644 --- a/kate/src/gridgen/tests/formatting.rs +++ b/kate/src/gridgen/tests/formatting.rs @@ -1,68 +1,52 @@ -use da_types::{AppExtrinsic, DataLookup, DataLookupIndexItem}; +use avail_core::{AppExtrinsic, AppId, DataLookup}; use hex_literal::hex; -use kate_grid::{Dimensions, Grid, IntoColumnMajor, IntoRowMajor}; use kate_recovery::{ com::{app_specific_cells, decode_app_extrinsics, reconstruct_extrinsics}, data::DataCell, + matrix::Dimensions, }; +use nalgebra::base::DMatrix; use poly_multiproof::traits::AsBytes; use crate::{ config::DATA_CHUNK_SIZE, - gridgen::{ - tests::{app_data_index_from_lookup, sample_cells}, - ArkScalar, EvaluationGrid, - }, + gridgen::{tests::sample_cells, ArkScalar, EvaluationGrid}, Seed, }; +use core::num::NonZeroU16; #[test] fn newapi_test_flatten_block() { let extrinsics: Vec = vec![ - AppExtrinsic { - app_id: 0.into(), - data: (1..=29).collect(), - }, - AppExtrinsic { - app_id: 1.into(), - data: (1..=30).collect(), - }, - AppExtrinsic { - app_id: 2.into(), - data: (1..=31).collect(), - }, - AppExtrinsic { - app_id: 3.into(), - data: (1..=60).collect(), - }, + AppExtrinsic::new(AppId(0), (1..=29).collect()), + AppExtrinsic::new(AppId(1), (1..=30).collect()), + AppExtrinsic::new(AppId(2), (1..=31).collect()), + AppExtrinsic::new(AppId(3), (1..=60).collect()), ]; - let expected_dims = Dimensions::new_unchecked(16, 1); + let expected_dims = Dimensions::new_from(1, 16).unwrap(); let evals = EvaluationGrid::from_extrinsics(extrinsics, 4, 256, 256, Seed::default()).unwrap(); - let expected_index = [(0.into(), 0), (1.into(), 2), (2.into(), 4), (3.into(), 6)] - .into_iter() - .map(|(app_id, start)| DataLookupIndexItem { app_id, start }) - .collect::>(); - - let expected_lookup = DataLookup { - size: 9, - index: expected_index, - }; + let id_lens: Vec<(u32, usize)> = vec![(0, 2), (1, 2), (2, 2), (3, 3)]; + let expected_lookup = DataLookup::from_id_and_len_iter(id_lens.into_iter()).unwrap(); assert_eq!(evals.lookup, expected_lookup, "The layouts don't match"); assert_eq!( - evals.dims, expected_dims, + evals.dims(), + expected_dims, "Dimensions don't match the expected" ); - let expected_data = hex!("04740102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d00800000000000000000000000000000000000000000000000000000000000000004780102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e80000000000000000000000000000000000000000000000000000000000000047c0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e1f80000000000000000000000000000000000000000000000000000000000004f00102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c00800000000000000000000000000000000000000000000000000000000000000076a04053bda0a88bda5177b86a15c3b29f559873cb481232299cd5743151ac004b2d63ae198e7bb0a9011f28e473c95f4013d7d53ec5fbc3b42df8ed101f6d00e831e52bfb76e51cca8b4e9016838657edfae09cb9a71eb219025c4c87a67c004aaa86f20ac0aa792bc121ee42e2c326127061eda15599cb5db3db870bea5a00ecf353161c3cb528b0c5d98050c4570bfc942d8b19ed7b0cbba5725e03e5f000b7e30db36b6df82ac151f668f5f80a5e2a9cac7c64991dd6a6ce21c060175800edb9260d2a86c836efc05f17e5c59525e404c6a93d051651fe2e4eefae281300"); + let expected_data = hex!("04740102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d00800000000000000000000000000000000000000000000000000000000000000004780102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e80000000000000000000000000000000000000000000000000000000000000047c0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e1f80000000000000000000000000000000000000000000000000000000000004f00102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d001e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c00800000000000000000000000000000000000000000000000000000000000000076b8e0ada0f13d90405d6ae55386bd28bdd219b8a08ded1aa836efcc8b770d00da41597c5157488d7724e03fb8d84a376a43b8f41518a11cc387b669b2ee65009f07e7be5551387a98ba977c732d080dcb0f29a048e3656912c6533e32ee7a0029b721769ce64e43d57133b074d839d531ed1f28510afb45ace10a1f4b794d002d09a0e663266ce1ae7ed1081968a0758e718e997bd362c6b0c34634a9a0b300012737681f7b5d0f281e3afde458bc1e73d2d313c9cf94c05ff3716240a248001320a058d7b3566bd520daaa3ed2bf0ac5b8b120fb852773c3639734b45c9100"); let data = evals .evals - .inner() - .iter() - .flat_map(|s| s.to_bytes().unwrap()) + .row_iter() + .flat_map(|row| { + row.iter() + .flat_map(|s| s.to_bytes().unwrap()) + .collect::>() + }) .collect::>(); assert_eq!(data, expected_data, "Data doesn't match the expected data"); } @@ -70,7 +54,7 @@ fn newapi_test_flatten_block() { #[test] fn newapi_test_extend_data_matrix() { // This test expects this result in column major - let expected_result = vec![ + let expected_data = vec![ hex!("000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e00"), hex!("bc1c6b8b4b02ca677b825ec9dace9aa706813f3ec47abdf9f03c680f4468555e"), hex!("7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a00"), @@ -88,14 +72,13 @@ fn newapi_test_extend_data_matrix() { hex!("d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f700"), hex!("1ebf725495e11b806dc58d261ac918a4f85260cb45618241614c432a2153ae16"), ] - .into_iter() - .map(|e| ArkScalar::from_bytes(e.as_slice().try_into().unwrap()).unwrap()) - .collect::>() - .into_column_major(4, 4) - .unwrap() - .to_row_major(); - - let block_dims = Dimensions::new_unchecked(4, 2); + .iter() + .map(ArkScalar::from_bytes) + .collect::, _>>() + .expect("Invalid Expected result"); + + let expected_result = DMatrix::from_column_slice(4, 4, &expected_data); + let scalars = (0..=247) .collect::>() .chunks_exact(DATA_CHUNK_SIZE) @@ -104,14 +87,13 @@ fn newapi_test_extend_data_matrix() { let grid = EvaluationGrid { lookup: DataLookup::default(), - evals: scalars - .into_row_major(block_dims.width(), block_dims.height()) - .unwrap(), - dims: block_dims, + evals: DMatrix::from_row_iterator(2, 4, scalars.into_iter()), }; - let extend = grid.extend_columns(2).unwrap(); + let extend = grid + .extend_columns(unsafe { NonZeroU16::new_unchecked(2) }) + .unwrap(); - assert_eq!(extend.evals.inner(), expected_result.inner()); + assert_eq!(extend.evals, expected_result); } #[test] @@ -127,69 +109,61 @@ get erasure coded to ensure redundancy."#; let hash = Seed::default(); let xts = (0..=2) .zip(data) - .map(|(app_id, data)| AppExtrinsic { - app_id: app_id.into(), - data, - }) + .map(|(id, data)| AppExtrinsic::new(AppId(id), data)) .collect::>(); let grid = EvaluationGrid::from_extrinsics(xts.clone(), 4, 32, 4, hash) .unwrap() - .extend_columns(2) + .extend_columns(unsafe { NonZeroU16::new_unchecked(2) }) .unwrap(); - let index = app_data_index_from_lookup(&grid.lookup); - let bdims = - kate_recovery::matrix::Dimensions::new(grid.dims.height() as u16, grid.dims.width() as u16) - .unwrap(); + let bdims = grid.dims(); for xt in &xts { - let positions = app_specific_cells(&index, &bdims, xt.app_id.0).unwrap(); + let positions = app_specific_cells(&grid.lookup, bdims, xt.app_id).unwrap(); let cells = positions .iter() .map(|pos| DataCell { position: pos.clone(), data: grid .evals - .get(pos.col as usize, pos.row as usize) + .get((pos.row as usize, pos.col as usize)) .unwrap() .to_bytes() .unwrap(), }) .collect::>(); - let data = &decode_app_extrinsics(&index, &bdims, cells, xt.app_id.0).unwrap()[0]; + let data = &decode_app_extrinsics(&grid.lookup, bdims, cells, xt.app_id).unwrap()[0]; assert_eq!(data, &xt.data); } assert!(matches!( - decode_app_extrinsics(&index, &bdims, vec![], 0), + decode_app_extrinsics(&grid.lookup, bdims, vec![], AppId(0)), Err(kate_recovery::com::ReconstructionError::MissingCell { .. }) )); } #[test] fn test_extend_mock_data() { - let orig_data = br#"This is mocked test data. It will be formatted as a matrix of BLS scalar cells and then individual columns + let orig_data = r#"This is mocked test data. It will be formatted as a matrix of BLS scalar cells and then individual columns get erasure coded to ensure redundancy. Let's see how this gets encoded and then reconstructed by sampling only some data."#; - let exts = vec![AppExtrinsic::from(orig_data.to_vec())]; + let exts = vec![AppExtrinsic::from(orig_data.as_bytes().to_vec())]; // The hash is used for seed for padding the block to next power of two value let hash = Seed::default(); let grid = EvaluationGrid::from_extrinsics(exts.clone(), 4, 128, 2, hash) .unwrap() - .extend_columns(2) + .extend_columns(unsafe { NonZeroU16::new_unchecked(2) }) .unwrap(); let cols = sample_cells(&grid, None); - let bdims = - kate_recovery::matrix::Dimensions::new(grid.dims.height() as u16, grid.dims.width() as u16) - .unwrap(); + let bdims = grid.dims(); - let index = app_data_index_from_lookup(&grid.lookup); - let res = reconstruct_extrinsics(&index, &bdims, cols).unwrap(); + let res = reconstruct_extrinsics(&grid.lookup, bdims, cols).unwrap(); let s = String::from_utf8_lossy(res[0].1[0].as_slice()); - assert_eq!(res[0].1[0], orig_data); + assert_eq!(s, orig_data); + assert_eq!(res[0].1[0], orig_data.as_bytes()); eprintln!("Decoded: {}", s); } diff --git a/kate/src/gridgen/tests/mod.rs b/kate/src/gridgen/tests/mod.rs index e0828278..5475647e 100644 --- a/kate/src/gridgen/tests/mod.rs +++ b/kate/src/gridgen/tests/mod.rs @@ -1,13 +1,12 @@ -use da_types::{AppExtrinsic, DataLookup}; -use kate_grid::Grid; -use kate_recovery::{data::DataCell, index::AppDataIndex}; +use avail_core::{AppExtrinsic, AppId}; +use kate_recovery::{data::DataCell, matrix::Position}; use once_cell::sync::Lazy; use poly_multiproof::{m1_blst::M1NoPrecomp, traits::AsBytes}; use proptest::{collection, prelude::*, sample::size_range}; use rand::{distributions::Uniform, prelude::Distribution, SeedableRng}; use rand_chacha::ChaChaRng; -use crate::testnet; +use crate::{gridgen::ArkScalar, testnet}; use super::EvaluationGrid; @@ -23,7 +22,7 @@ fn app_extrinsic_strategy() -> impl Strategy { any_with::>(size_range(1..2048).lift()), ) .prop_map(|(app_id, data)| AppExtrinsic { - app_id: app_id.into(), + app_id: AppId(app_id), data, }) } @@ -36,13 +35,6 @@ fn app_extrinsics_strategy() -> impl Strategy> { }) } -fn app_data_index_from_lookup(lookup: &DataLookup) -> AppDataIndex { - AppDataIndex { - size: lookup.size, - index: lookup.index.iter().map(|e| (e.app_id.0, e.start)).collect(), - } -} - fn sample_unique(rng: &mut impl Rng, n_samples: usize, n: usize) -> Vec { let mut sampled = vec![]; let u = Uniform::from(0..n); @@ -55,22 +47,27 @@ fn sample_unique(rng: &mut impl Rng, n_samples: usize, n: usize) -> Vec { sampled } -fn sample_cells(grid: &EvaluationGrid, columns: Option<&[usize]>) -> Vec { +fn sample_cells(grid: &EvaluationGrid, columns: Option>) -> Vec { let mut rng = ChaChaRng::from_seed([42u8; 32]); - let cols: Vec = match columns { - Some(cols) => cols.to_vec(), - None => (0..grid.dims.width()).into_iter().collect(), - }; + let (g_rows, g_cols): (usize, usize) = grid.dims().into(); + let cols = columns.unwrap_or_else(|| (0..g_cols).into_iter().collect()); + cols.iter() .flat_map(|x| { - sample_unique(&mut rng, grid.dims.height() / 2, grid.dims.height()) + debug_assert!(*x < g_cols); + sample_unique(&mut rng, g_rows / 2, g_rows) .into_iter() - .map(move |y| kate_recovery::data::DataCell { - position: kate_recovery::matrix::Position { - row: y as u32, - col: *x as u16, - }, - data: grid.evals.get(*x, y).unwrap().to_bytes().unwrap(), + .map(move |y| { + let data = grid + .evals + .get((y, *x)) + .and_then(|s: &ArkScalar| s.to_bytes().ok()) + .unwrap(); + // SAFETY: `y` and `x` can be casted safetly becasue `x < g_cols (u16)` and `y + // < g_rows(u16)` + let position = Position::from((y as u32, *x as u16)); + + DataCell::new(position, data) }) }) .collect::>() diff --git a/kate/src/gridgen/tests/reconstruction.rs b/kate/src/gridgen/tests/reconstruction.rs index 7c19d21b..716a2352 100644 --- a/kate/src/gridgen/tests/reconstruction.rs +++ b/kate/src/gridgen/tests/reconstruction.rs @@ -1,18 +1,19 @@ -use super::{app_data_index_from_lookup, PMP}; -use crate::com::Cell; -use crate::gridgen::tests::sample_cells; -use crate::gridgen::EvaluationGrid; -use crate::Seed; -use da_types::AppExtrinsic; -use kate_grid::Grid; -use kate_recovery::com::reconstruct_extrinsics; -use kate_recovery::data::Cell as DCell; -use kate_recovery::matrix::Position as DPosition; +use super::PMP; +use crate::{ + com::Cell, + gridgen::{tests::sample_cells, EvaluationGrid}, + Seed, +}; +use avail_core::{AppExtrinsic, AppId, BlockLengthColumns, BlockLengthRows}; +use core::num::NonZeroU16; +use kate_recovery::{ + com::{reconstruct_app_extrinsics, reconstruct_extrinsics}, + data::Cell as DCell, + matrix::{Dimensions, Position}, +}; use poly_multiproof::traits::AsBytes; use proptest::prelude::*; -use rand::distributions::Uniform; -use rand::prelude::Distribution; -use rand::SeedableRng; +use rand::{distributions::Uniform, prelude::Distribution, SeedableRng}; use rand_chacha::ChaChaRng; #[test] @@ -20,28 +21,20 @@ fn test_multiple_extrinsics_for_same_app_id() { let xt1 = vec![5, 5]; let xt2 = vec![6, 6]; let xts = [ - AppExtrinsic { - app_id: 1.into(), - data: xt1.clone(), - }, - AppExtrinsic { - app_id: 1.into(), - data: xt2.clone(), - }, + AppExtrinsic::new(AppId(1), xt1.clone()), + AppExtrinsic::new(AppId(1), xt2.clone()), ]; // The hash is used for seed for padding the block to next power of two value let hash = Seed::default(); let ev = EvaluationGrid::from_extrinsics(xts.into(), 4, 128, 2, hash) .unwrap() - .extend_columns(2) + .extend_columns(unsafe { NonZeroU16::new_unchecked(2) }) .unwrap(); let cells = sample_cells(&ev, None); - let index = app_data_index_from_lookup(&ev.lookup); - let bdims = - kate_recovery::matrix::Dimensions::new(ev.dims.height() as u16, ev.dims.width() as u16) - .unwrap(); - let res = reconstruct_extrinsics(&index, &bdims, cells).unwrap(); + let (rows, cols): (u16, u16) = ev.dims().into(); + let bdims = Dimensions::new_from(rows, cols).unwrap(); + let res = reconstruct_extrinsics(&ev.lookup, bdims, cells).unwrap(); assert_eq!(res[0].1[0], xt1); assert_eq!(res[0].1[1], xt2); @@ -51,38 +44,39 @@ proptest! { #![proptest_config(ProptestConfig::with_cases(5))] #[test] fn test_build_and_reconstruct(exts in super::app_extrinsics_strategy()) { - let grid = EvaluationGrid::from_extrinsics(exts.clone(), 4, 256, 256, Seed::default()).unwrap().extend_columns(2).unwrap(); - let dims = &grid.dims; + let grid = EvaluationGrid::from_extrinsics(exts.clone(), 4, 256, 256, Seed::default()).unwrap().extend_columns(unsafe { NonZeroU16::new_unchecked(2)}).unwrap(); + let (rows, cols) :(usize,usize)= grid.dims().into(); //let (layout, commitments, dims, matrix) = par_build_commitments( // BlockLengthRows(64), BlockLengthColumns(16), 32, xts, Seed::default()).unwrap(); const RNG_SEED: Seed = [42u8; 32]; let cells = sample_cells(&grid, None); - let index = app_data_index_from_lookup(&grid.lookup); - let bdims = kate_recovery::matrix::Dimensions::new(dims.height() as u16, dims.width() as u16).unwrap(); - let reconstructed = reconstruct_extrinsics(&index, &bdims, cells).unwrap(); - for (result, xt) in reconstructed.iter().zip(exts) { - prop_assert_eq!(result.0, *xt.app_id); - prop_assert_eq!(result.1[0].as_slice(), &xt.data); + let bdims = Dimensions::new_from(rows, cols).unwrap(); + let reconstructed = reconstruct_extrinsics(&grid.lookup, bdims, cells).unwrap(); + for ((id,data), xt) in reconstructed.iter().zip(exts) { + prop_assert_eq!(id.0, *xt.app_id); + prop_assert_eq!(data[0].as_slice(), &xt.data); } let pp = &*PMP; let polys = grid.make_polynomial_grid().unwrap(); let commitments = polys.commitments(pp).unwrap(); - let indices = (0..dims.width()).flat_map(|x| (0..dims.height()).map(move |y| (x, y))).collect::>(); + let indices = (0..cols).flat_map(|x| (0..rows).map(move |y| (x, y))).collect::>(); // Sample some number 10 of the indices, all is too slow for tests... let mut rng = ChaChaRng::from_seed(RNG_SEED); let sampled = Uniform::from(0..indices.len()).sample_iter(&mut rng).take(10).map(|i| indices[i]); for (x, y) in sampled { - let cell = Cell { row: (y as u32).into(), col: (x as u32).into() }; + let row = BlockLengthRows(u32::try_from(y).unwrap()); + let col = BlockLengthColumns(u32::try_from(x).unwrap()); + let cell = Cell::new( row, col); let proof = polys.proof(pp, &cell).unwrap(); let mut content = [0u8; 80]; content[..48].copy_from_slice(&proof.to_bytes().unwrap()[..]); - content[48..].copy_from_slice(&grid.evals.get(x, y).unwrap().to_bytes().unwrap()[..]); + content[48..].copy_from_slice(&grid.get(y, x).unwrap().to_bytes().unwrap()[..]); - let dcell = DCell{position: DPosition { row: y as u32, col: x as u16 }, content }; - let verification = kate_recovery::proof::verify(&kate_recovery::testnet::public_params(256), &bdims, &commitments[y].to_bytes().unwrap(), &dcell); + let dcell = DCell{position: Position { row: y as u32, col: x as u16 }, content }; + let verification = kate_recovery::proof::verify(&kate_recovery::testnet::public_params(256), bdims, &commitments[y].to_bytes().unwrap(), &dcell); prop_assert!(verification.is_ok()); prop_assert!(verification.unwrap()); } @@ -98,37 +92,24 @@ get erasure coded to ensure redundancy."#; br#""Let's see how this gets encoded and then reconstructed by sampling only some data."#; let xts = vec![ - AppExtrinsic { - app_id: 0.into(), - data: vec![0], - }, - AppExtrinsic { - app_id: 1.into(), - data: app_id_1_data.to_vec(), - }, - AppExtrinsic { - app_id: 2.into(), - data: app_id_2_data.to_vec(), - }, + AppExtrinsic::new(AppId(0), vec![0]), + AppExtrinsic::new(AppId(1), app_id_1_data.to_vec()), + AppExtrinsic::new(AppId(2), app_id_2_data.to_vec()), ]; let grid = EvaluationGrid::from_extrinsics(xts.clone(), 4, 4, 32, Seed::default()) .unwrap() - .extend_columns(2) + .extend_columns(unsafe { NonZeroU16::new_unchecked(2) }) .unwrap(); - let cols_1 = sample_cells(&grid, Some(&[0, 1, 2, 3])); + let cols_1 = sample_cells(&grid, Some(vec![0, 1, 2, 3])); - let index = app_data_index_from_lookup(&grid.lookup); - - let bdims = - kate_recovery::matrix::Dimensions::new(grid.dims.height() as u16, grid.dims.width() as u16) - .unwrap(); - let res_1 = kate_recovery::com::reconstruct_app_extrinsics(&index, &bdims, cols_1, 1).unwrap(); + let bdims = grid.dims(); + let res_1 = reconstruct_app_extrinsics(&grid.lookup, bdims, cols_1, AppId(1)).unwrap(); assert_eq!(res_1[0], app_id_1_data); - let cols_2 = sample_cells(&grid, Some(&[0, 2, 3])); + let cols_2 = sample_cells(&grid, Some(vec![0, 2, 3])); - let res_2 = kate_recovery::com::reconstruct_app_extrinsics(&index, &bdims, cols_2, 2).unwrap(); + let res_2 = reconstruct_app_extrinsics(&grid.lookup, bdims, cols_2, AppId(2)).unwrap(); assert_eq!(res_2[0], app_id_2_data); } diff --git a/kate/src/lib.rs b/kate/src/lib.rs index 34329b1e..c9cb2180 100644 --- a/kate/src/lib.rs +++ b/kate/src/lib.rs @@ -1,30 +1,34 @@ #![cfg_attr(not(feature = "std"), no_std)] #![deny(clippy::integer_arithmetic)] -use da_types::{BlockLengthColumns, BlockLengthRows}; +use avail_core::{BlockLengthColumns, BlockLengthRows}; +use core::{ + convert::TryInto, + num::{NonZeroU32, TryFromIntError}, +}; +use derive_more::Constructor; #[cfg(feature = "std")] pub use dusk_plonk::{commitment_scheme::kzg10::PublicParameters, prelude::BlsScalar}; -#[cfg(feature = "std")] use kate_recovery::matrix::Dimensions; use sp_arithmetic::traits::SaturatedConversion; use static_assertions::const_assert_ne; +use thiserror_no_std::Error; use crate::config::DATA_CHUNK_SIZE; pub const LOG_TARGET: &str = "kate"; +pub const U32_USIZE_ERR: &str = "`u32` cast to `usize` overflows, unsupported platform"; + pub type Seed = [u8; 32]; #[cfg(feature = "std")] pub use dusk_bytes::Serializable; #[cfg(feature = "std")] -pub use kate_grid as grid; -#[cfg(feature = "std")] pub use poly_multiproof as pmp; pub mod config { - use kate_grid::Extension; - use super::{BlockLengthColumns, BlockLengthRows}; + use core::num::NonZeroU16; // TODO: Delete this? not used anywhere pub const SCALAR_SIZE_WIDE: usize = 64; @@ -32,7 +36,8 @@ pub mod config { pub const SCALAR_SIZE: usize = 32; pub const DATA_CHUNK_SIZE: usize = 31; // Actual chunk size is 32 after 0 padding is done pub const EXTENSION_FACTOR: u32 = 2; - pub const EXTENSION: Extension = Extension::height_unchecked(2); + pub const ROW_EXTENSION: NonZeroU16 = unsafe { NonZeroU16::new_unchecked(2) }; + pub const COL_EXTENSION: NonZeroU16 = unsafe { NonZeroU16::new_unchecked(1) }; pub const PROVER_KEY_SIZE: u32 = 48; pub const PROOF_SIZE: usize = 48; // MINIMUM_BLOCK_SIZE, MAX_BLOCK_ROWS and MAX_BLOCK_COLUMNS have to be a power of 2 because of the FFT functions requirements @@ -54,27 +59,28 @@ pub mod config { /// - Dedup this from `kate-recovery` once that library support `no-std`. #[cfg(feature = "std")] pub mod testnet { - use super::{BlockLengthColumns, PublicParameters}; + use super::*; use hex_literal::hex; use once_cell::sync::Lazy; use poly_multiproof::ark_ff::{BigInt, Fp}; use poly_multiproof::ark_serialize::CanonicalDeserialize; use poly_multiproof::m1_blst; use poly_multiproof::m1_blst::{Fr, G1, G2}; - use rand::SeedableRng; - use rand_chacha::ChaChaRng; + use rand_chacha::{rand_core::SeedableRng, ChaChaRng}; use std::{collections::HashMap, sync::Mutex}; static SRS_DATA: Lazy>> = Lazy::new(|| Mutex::new(HashMap::new())); pub fn public_params(max_degree: BlockLengthColumns) -> PublicParameters { + let max_degree: u32 = max_degree.into(); let mut srs_data_locked = SRS_DATA.lock().unwrap(); srs_data_locked - .entry(max_degree.0) + .entry(max_degree) .or_insert_with(|| { let mut rng = ChaChaRng::seed_from_u64(42); - PublicParameters::setup(max_degree.as_usize(), &mut rng).unwrap() + let max_degree = usize::try_from(max_degree).unwrap(); + PublicParameters::setup(max_degree, &mut rng).unwrap() }) .clone() } @@ -145,7 +151,7 @@ pub mod testnet { let pmp_ev = GeneralEvaluationDomain::::new(1024).unwrap(); let pmp_poly = pmp_ev.ifft(&pmp_evals); - let pubs = testnet::public_params(da_types::BlockLengthColumns(1024)); + let pubs = testnet::public_params(BlockLengthColumns(1024)); let dp_commit = pubs.commit_key().commit(&dp_poly).unwrap().0.to_bytes(); let mut pmp_commit = [0u8; 48]; @@ -204,55 +210,32 @@ pub fn padded_len(len: u32, chunk_size: u32) -> u32 { iec_9797_1_len + pad_to_chunk_extra } -#[derive(Clone, Copy, PartialEq, Eq, Debug)] +#[derive(Clone, Copy, PartialEq, Eq, Debug, Constructor)] pub struct BlockDimensions { pub rows: BlockLengthRows, pub cols: BlockLengthColumns, - pub chunk_size: u32, + pub chunk_size: NonZeroU32, } impl BlockDimensions { - pub fn size(&self) -> usize { - self.rows - .0 - .saturating_mul(self.cols.0) - .saturating_mul(self.chunk_size) as usize - } - - pub fn new(rows: R, cols: C, chunk_size: u32) -> Self - where - R: Into, - C: Into, - { - Self { - rows: rows.into(), - cols: cols.into(), - chunk_size, - } + pub fn size(&self) -> Option { + let rows_cols = self.rows.0.checked_mul(self.cols.0)?; + let rows_cols_chunk = rows_cols.checked_mul(self.chunk_size.get())?; + usize::try_from(rows_cols_chunk).ok() } } -#[derive(PartialEq, Eq, Debug)] +#[derive(Error, Copy, Clone, PartialEq, Eq, Debug)] pub enum TryFromBlockDimensionsError { - InvalidRowsOrColumns(sp_std::num::TryFromIntError), + InvalidRowsOrColumns(#[from] TryFromIntError), InvalidDimensions, } -impl From for TryFromBlockDimensionsError { - fn from(error: sp_std::num::TryFromIntError) -> Self { - TryFromBlockDimensionsError::InvalidRowsOrColumns(error) - } -} - -#[cfg(feature = "std")] -impl sp_std::convert::TryInto for BlockDimensions { +impl TryInto for BlockDimensions { type Error = TryFromBlockDimensionsError; fn try_into(self) -> Result { - let rows = self.rows.0.try_into()?; - let cols = self.cols.0.try_into()?; - - Dimensions::new(rows, cols).ok_or(TryFromBlockDimensionsError::InvalidDimensions) + Dimensions::new_from(self.rows.0, self.cols.0).ok_or(Self::Error::InvalidDimensions) } } diff --git a/kate/src/metrics.rs b/kate/src/metrics.rs index ea3e74a9..eefd2a0f 100644 --- a/kate/src/metrics.rs +++ b/kate/src/metrics.rs @@ -7,7 +7,7 @@ pub trait Metrics { fn preparation_block_time(&self, elapsed: Duration); fn commitment_build_time(&self, elapsed: Duration); fn proof_build_time(&self, elapsed: Duration, cells: u32); - fn block_dims_and_size(&self, block_dims: &BlockDimensions, block_len: u32); + fn block_dims_and_size(&self, block_dims: BlockDimensions, block_len: u32); } /// Adapter to ignore any measurements. @@ -20,5 +20,5 @@ impl Metrics for IgnoreMetrics { fn preparation_block_time(&self, _: Duration) {} fn commitment_build_time(&self, _: Duration) {} fn proof_build_time(&self, _: Duration, _: u32) {} - fn block_dims_and_size(&self, _: &BlockDimensions, _: u32) {} + fn block_dims_and_size(&self, _: BlockDimensions, _: u32) {} } diff --git a/primitives/nomad/nomad-base/Cargo.toml b/nomad/base/Cargo.toml similarity index 56% rename from primitives/nomad/nomad-base/Cargo.toml rename to nomad/base/Cargo.toml index 74e78694..65907b40 100644 --- a/primitives/nomad/nomad-base/Cargo.toml +++ b/nomad/base/Cargo.toml @@ -1,22 +1,20 @@ [package] name = "nomad-base" -version = "0.1.3" +version = "0.1.4" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -nomad-core = { path = "../nomad-core", default-features = false } +nomad-core = { path = "../core", default-features = false } nomad-signature = { path = "../signature", default-features = false } # Substrate codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] } -frame-support = { version = "4.0.0-dev", default-features = false } -primitive-types = { version = "0.12", default-features = false, features = ["scale-info", "codec"] } -scale-info = { version = "2.1.1", default-features = false, features = ["derive"] } -sp-core = { version = "7.0.0", default-features = false } -sp-io = { version = "7.0.0", default-features = false } -sp-std = { version = "4.0.0", default-features = false } +scale-info = { version = "2", default-features = false, features = ["derive"] } +sp-core = { version = "*", default-features = false } + +sp-runtime = { version = "7", default-features = false, optional = true } # Eth ethers-signers = { version = "1", optional = true } @@ -31,12 +29,10 @@ std = [ "serde", "ethers-signers", "once_cell", - "primitive-types/serde", "codec/std", "nomad-signature/std", "scale-info/std", - "frame-support/std", "nomad-core/std", - "sp-std/std", "sp-core/std", + "sp-runtime/std", ] diff --git a/primitives/nomad/nomad-base/src/lib.rs b/nomad/base/src/lib.rs similarity index 96% rename from primitives/nomad/nomad-base/src/lib.rs rename to nomad/base/src/lib.rs index c5878520..0da94e9a 100644 --- a/primitives/nomad/nomad-base/src/lib.rs +++ b/nomad/base/src/lib.rs @@ -1,14 +1,16 @@ #![cfg_attr(not(feature = "std"), no_std)] -use frame_support::pallet_prelude::*; +use codec::{Decode, Encode, MaxEncodedLen}; use nomad_core::{home_domain_hash, to_eth_signed_message_hash, NomadState, SignedUpdate, Update}; use nomad_signature::SignatureError; -#[cfg(feature = "std")] -use serde::{Deserialize, Serialize}; +use scale_info::TypeInfo; use sp_core::{H160, H256}; +use sp_runtime::RuntimeDebug; #[cfg(feature = "std")] pub mod testing; +#[cfg(feature = "std")] +use serde::{Deserialize, Serialize}; #[derive(Clone, Copy, Encode, Decode, PartialEq, Eq, RuntimeDebug, TypeInfo, MaxEncodedLen)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] diff --git a/primitives/nomad/nomad-base/src/testing.rs b/nomad/base/src/testing.rs similarity index 100% rename from primitives/nomad/nomad-base/src/testing.rs rename to nomad/base/src/testing.rs diff --git a/primitives/nomad/nomad-core/Cargo.toml b/nomad/core/Cargo.toml similarity index 70% rename from primitives/nomad/nomad-core/Cargo.toml rename to nomad/core/Cargo.toml index 2e719a28..b1aa67c5 100644 --- a/primitives/nomad/nomad-core/Cargo.toml +++ b/nomad/core/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "nomad-core" -version = "0.1.3" +version = "0.1.4" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html @@ -10,13 +10,12 @@ nomad-signature = { path = "../signature", default-features = false } # Substrate codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] } -frame-support = { version = "4.0.0-dev", default-features = false } primitive-types = { version = "0.12", default-features = false, features = ["scale-info", "codec"] } -scale-info = { version = "2.1.1", default-features = false, features = ["derive"] } -sp-core = { version = "7.0.0", default-features = false } -sp-io = { version = "7.0.0", default-features = false } -sp-runtime = { version = "7.0.0", default-features = false } -sp-std = { version = "4.0.0", default-features = false } +scale-info = { version = "2", default-features = false, features = ["derive"] } +sp-core = { version = "*", default-features = false } +sp-std = { version = "*", default-features = false } + +sp-runtime = { version = "7", default-features = false, optional = true } # Eth ethers-core = { version = "1", optional = true } @@ -39,8 +38,8 @@ std = [ "nomad-signature/std", "codec/std", "scale-info/std", - "frame-support/std", "sp-runtime/std", + "sp-std/std", ] runtime-benchmarks = [] diff --git a/primitives/nomad/nomad-core/src/lib.rs b/nomad/core/src/lib.rs similarity index 100% rename from primitives/nomad/nomad-core/src/lib.rs rename to nomad/core/src/lib.rs diff --git a/primitives/nomad/nomad-core/src/nomad_message.rs b/nomad/core/src/nomad_message.rs similarity index 90% rename from primitives/nomad/nomad-core/src/nomad_message.rs rename to nomad/core/src/nomad_message.rs index 23572259..1902a3e8 100644 --- a/primitives/nomad/nomad-core/src/nomad_message.rs +++ b/nomad/core/src/nomad_message.rs @@ -1,5 +1,7 @@ -use frame_support::{pallet_prelude::*, traits::Get}; -use sp_core::H256; +use codec::{Decode, Encode}; +use scale_info::TypeInfo; +use sp_core::{bounded::BoundedVec, Get, H256}; +use sp_runtime::RuntimeDebug; use sp_std::{mem::size_of, vec::Vec}; /// Size of `NomadMessage` fields except `body`. @@ -53,16 +55,11 @@ impl> NomadMessage { #[cfg(test)] mod tests { - use core::convert::TryInto; - - use frame_support::{parameter_types, BoundedVec}; + use super::*; + use sp_core::ConstU32; use sp_std::mem::size_of_val; - use super::{NomadMessage, NON_BODY_LENGTH}; - - parameter_types! { - const MaxBodyLen :u32 = 1024; - } + type MaxBodyLen = ConstU32<1024>; /// Double checks that constant `NON_BODY_LENGTH` will be synchronized with actual #[test] diff --git a/primitives/nomad/nomad-core/src/state.rs b/nomad/core/src/state.rs similarity index 79% rename from primitives/nomad/nomad-core/src/state.rs rename to nomad/core/src/state.rs index 7691b781..b5060ed0 100644 --- a/primitives/nomad/nomad-core/src/state.rs +++ b/nomad/core/src/state.rs @@ -1,4 +1,7 @@ -use frame_support::pallet_prelude::*; +use codec::{Decode, Encode, MaxEncodedLen}; +use scale_info::TypeInfo; +use sp_runtime::RuntimeDebug; + #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; diff --git a/primitives/nomad/nomad-core/src/test_utils.rs b/nomad/core/src/test_utils.rs similarity index 100% rename from primitives/nomad/nomad-core/src/test_utils.rs rename to nomad/core/src/test_utils.rs diff --git a/primitives/nomad/nomad-core/src/typed_message.rs b/nomad/core/src/typed_message.rs similarity index 100% rename from primitives/nomad/nomad-core/src/typed_message.rs rename to nomad/core/src/typed_message.rs diff --git a/primitives/nomad/nomad-core/src/update.rs b/nomad/core/src/update.rs similarity index 96% rename from primitives/nomad/nomad-core/src/update.rs rename to nomad/core/src/update.rs index befe19d0..18b984df 100644 --- a/primitives/nomad/nomad-core/src/update.rs +++ b/nomad/core/src/update.rs @@ -1,8 +1,10 @@ -use frame_support::pallet_prelude::*; +use codec::{Decode, Encode}; use nomad_signature::{hash_message, Signature, SignatureError}; +use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; use sp_core::{H160, H256}; +use sp_runtime::RuntimeDebug; use crate::utils::home_domain_hash; diff --git a/primitives/nomad/nomad-core/src/update_v2.rs b/nomad/core/src/update_v2.rs similarity index 96% rename from primitives/nomad/nomad-core/src/update_v2.rs rename to nomad/core/src/update_v2.rs index 6c6d277f..f4dd6e1f 100644 --- a/primitives/nomad/nomad-core/src/update_v2.rs +++ b/nomad/core/src/update_v2.rs @@ -1,10 +1,12 @@ #![allow(dead_code)] -use frame_support::pallet_prelude::*; +use codec::{Decode, Encode}; use nomad_signature::{hash_message, Signature, SignatureError}; +use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; use sp_core::{H160, H256}; +use sp_runtime::RuntimeDebug; use crate::utils::home_domain_hash; diff --git a/primitives/nomad/nomad-core/src/utils.rs b/nomad/core/src/utils.rs similarity index 100% rename from primitives/nomad/nomad-core/src/utils.rs rename to nomad/core/src/utils.rs diff --git a/primitives/nomad/merkle/Cargo.toml b/nomad/merkle/Cargo.toml similarity index 56% rename from primitives/nomad/merkle/Cargo.toml rename to nomad/merkle/Cargo.toml index 451e457f..fc49e9b6 100644 --- a/primitives/nomad/merkle/Cargo.toml +++ b/nomad/merkle/Cargo.toml @@ -1,22 +1,23 @@ [package] name = "nomad-merkle" -version = "0.1.1" +version = "0.1.2" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] +# Internal +nomad-core = { path = "../core", default-features = false } + +# Substrate codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] } -frame-support = { version = "4.0.0-dev", default-features = false } +scale-info = { version = "2", default-features = false, features = ["derive"] } +sp-core = { version = "*", default-features = false } + +frame-support = { version = "4.0.0-dev", default-features = false, optional = true } + +# 3rd-party hex-literal = "0.3.4" -nomad-core = { path = "../nomad-core", default-features = false } -# parity-util-mem = { version = "0.10.2", default-features = false, features = ["primitive-types"] } -primitive-types = { version = "0.12", default-features = false, features = ["scale-info", "codec"] } -scale-info = { version = "2.1.1", default-features = false, features = ["derive"] } -sp-core = { version = "7.0.0", default-features = false } -sp-io = { version = "7.0.0", default-features = false } -sp-runtime = { version = "7.0.0", default-features = false } -sp-std = { version = "4.0.0-dev", default-features = false } static_assertions = "1.1.0" thiserror-no-std = "2.0.2" tiny-keccak = { version = "2.0.2", default-features = false, features = ["keccak"] } @@ -37,8 +38,8 @@ default = ["std"] std = [ "serde", "nomad-core/std", - "primitive-types/serde", "codec/std", "scale-info/std", + "sp-core/std", "frame-support/std", ] diff --git a/primitives/nomad/merkle/fixtures/merkle.json b/nomad/merkle/fixtures/merkle.json similarity index 100% rename from primitives/nomad/merkle/fixtures/merkle.json rename to nomad/merkle/fixtures/merkle.json diff --git a/primitives/nomad/merkle/src/error.rs b/nomad/merkle/src/error.rs similarity index 100% rename from primitives/nomad/merkle/src/error.rs rename to nomad/merkle/src/error.rs diff --git a/primitives/nomad/merkle/src/lib.rs b/nomad/merkle/src/lib.rs similarity index 100% rename from primitives/nomad/merkle/src/lib.rs rename to nomad/merkle/src/lib.rs diff --git a/primitives/nomad/merkle/src/light.rs b/nomad/merkle/src/light.rs similarity index 100% rename from primitives/nomad/merkle/src/light.rs rename to nomad/merkle/src/light.rs diff --git a/primitives/nomad/merkle/src/proof.rs b/nomad/merkle/src/proof.rs similarity index 100% rename from primitives/nomad/merkle/src/proof.rs rename to nomad/merkle/src/proof.rs diff --git a/primitives/nomad/merkle/src/test_utils.rs b/nomad/merkle/src/test_utils.rs similarity index 100% rename from primitives/nomad/merkle/src/test_utils.rs rename to nomad/merkle/src/test_utils.rs diff --git a/primitives/nomad/merkle/src/utils.rs b/nomad/merkle/src/utils.rs similarity index 100% rename from primitives/nomad/merkle/src/utils.rs rename to nomad/merkle/src/utils.rs diff --git a/primitives/nomad/signature/Cargo.toml b/nomad/signature/Cargo.toml similarity index 73% rename from primitives/nomad/signature/Cargo.toml rename to nomad/signature/Cargo.toml index 4bc26073..25d6d5e1 100644 --- a/primitives/nomad/signature/Cargo.toml +++ b/nomad/signature/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "nomad-signature" -version = "0.1.1" +version = "0.1.2" authors = ["Luke Tchang "] edition = "2021" license = "MIT OR Apache-2.0" @@ -18,14 +18,11 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] # Substrate & Parity codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] } +scale-info = { version = "2", default-features = false, features = ["derive"] } +sp-core = { version = "*", default-features = false } + frame-support = { version = "4.0.0-dev", default-features = false } -primitive-types = { version = "0.12", default-features = false, features = ["scale-info", "codec"] } -rlp = { version = "0.5.0", default-features = false } -rlp-derive = { version = "0.1.0", default-features = false } -scale-info = { version = "2.1.1", default-features = false, features = ["derive"] } -sp-core = { version = "7.0.0", default-features = false } -sp-io = { version = "7.0.0", default-features = false } -sp-std = { version = "4.0.0", default-features = false } +sp-runtime = { version = "7", default-features = false, optional = true } # Eth ethers-core = { version = "1", default-features = false, optional = true } @@ -47,9 +44,10 @@ default = ["std"] std = [ "serde", "hex/std", - "primitive-types/serde", - "codec/std", "scale-info/std", + "codec/std", + "sp-core/std", + "sp-runtime/std", "frame-support/std", "ethers-core", ] diff --git a/primitives/nomad/signature/README.md b/nomad/signature/README.md similarity index 100% rename from primitives/nomad/signature/README.md rename to nomad/signature/README.md diff --git a/primitives/nomad/signature/src/lib.rs b/nomad/signature/src/lib.rs similarity index 100% rename from primitives/nomad/signature/src/lib.rs rename to nomad/signature/src/lib.rs diff --git a/primitives/nomad/signature/src/signature.rs b/nomad/signature/src/signature.rs similarity index 97% rename from primitives/nomad/signature/src/signature.rs rename to nomad/signature/src/signature.rs index 70844cbe..6c87b074 100644 --- a/primitives/nomad/signature/src/signature.rs +++ b/nomad/signature/src/signature.rs @@ -1,10 +1,11 @@ // Code adapted from: https://github.com/gakonst/ethers-rs/blob/master/ethers-core/src/types/signature.rs +use crate::utils::hash_message; use alloc::{borrow::ToOwned, string::String, vec::Vec}; -use core::{convert::TryFrom, fmt, str::FromStr}; - +use codec::{Decode, Encode}; +use core::convert::TryFrom; use elliptic_curve::{consts::U32, sec1::ToEncodedPoint as _}; -use frame_support::{pallet_prelude::*, sp_runtime::traits::Keccak256}; +use frame_support::ensure; use generic_array::GenericArray; use k256::{ ecdsa::{ @@ -13,12 +14,15 @@ use k256::{ }, PublicKey as K256PublicKey, }; -#[cfg(feature = "std")] -use serde::{Deserialize, Serialize}; -use sp_core::{Hasher, H160, H256, U256}; +use scale_info::TypeInfo; +use sp_core::{Hasher as _, H160, H256, U256}; +use sp_runtime::{traits::Keccak256, RuntimeDebug}; use thiserror_no_std::Error; -use crate::utils::hash_message; +#[cfg(feature = "std")] +use core::{fmt, str::FromStr}; +#[cfg(feature = "std")] +use serde::{Deserialize, Serialize}; type Address = H160; diff --git a/primitives/nomad/signature/src/utils.rs b/nomad/signature/src/utils.rs similarity index 100% rename from primitives/nomad/signature/src/utils.rs rename to nomad/signature/src/utils.rs diff --git a/primitives/avail/Cargo.toml b/primitives/avail/Cargo.toml deleted file mode 100644 index 4e720255..00000000 --- a/primitives/avail/Cargo.toml +++ /dev/null @@ -1,57 +0,0 @@ -[package] -name = "da-primitives" -version = "0.4.6" -authors = [] -edition = "2021" - -[dependencies] -# Others -da-types = { path = "../types", default-features = false, features = ["substrate"] } -derive_more = "0.99.17" -log = { version = "0.4.8", default-features = false } -serde = { version = "1.0.121", optional = true, features = ["derive"] } -serde_json = { version = "1.0", optional = true } -thiserror-no-std = "2.0.2" - -# Substrate -beefy-merkle-tree = { git = "https://github.com/paritytech/substrate.git/", branch = "polkadot-v0.9.37", default-features = false } -codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] } -frame-support = { version = "4.0.0-dev", default-features = false } -hash256-std-hasher = { version = "0.15.2", default-features = false } -parity-util-mem = { version = "0.12.0", default-features = false, features = ["primitive-types"] } -scale-info = { version = "2.1.1", default-features = false, features = ["derive"] } -sp-core = { version = "7.0.0", default-features = false } -sp-io = { version = "7.0.0", default-features = false } -sp-runtime = { version = "7.0.0", default-features = false } -sp-runtime-interface = { version = "7.0.0", default-features = false, features = ["disable_target_static_assertions"] } -sp-std = { version = "4.0.0", default-features = false } -sp-trie = { version = "7.0.0", default-features = false } - -[dev-dependencies] -hex-literal = "0.3.4" -test-case = "1.2.3" - -[features] -default = ["std"] -std = [ - "serde", - "serde_json", - "codec/std", - "scale-info/std", - "log/std", - "sp-core/std", - "sp-std/std", - "sp-io/std", - "sp-runtime/std", - "sp-trie/std", - "sp-runtime-interface/std", - "hash256-std-hasher/std", - "frame-support/std", - "parity-util-mem/std", - "beefy-merkle-tree/std", -] - -header-backward-compatibility-test = [] -try-runtime = [ - "sp-runtime/try-runtime", -] diff --git a/primitives/avail/src/asdr.rs b/primitives/avail/src/asdr.rs deleted file mode 100644 index 5064fdba..00000000 --- a/primitives/avail/src/asdr.rs +++ /dev/null @@ -1,36 +0,0 @@ -use codec::Encode; -pub use da_types::{AppExtrinsic, AppId, DataLookup, DataLookupIndexItem, GetAppId}; -use sp_runtime::traits::SignedExtension; - -mod app_unchecked_extrinsic; -pub use app_unchecked_extrinsic::*; - -impl From<&AppUncheckedExtrinsic> for AppExtrinsic -where - A: Encode, - C: Encode, - S: Encode, - E: SignedExtension + GetAppId, -{ - fn from(app_ext: &AppUncheckedExtrinsic) -> Self { - Self { - app_id: app_ext.app_id(), - data: app_ext.encode(), - } - } -} - -impl From> for AppExtrinsic -where - A: Encode, - C: Encode, - S: Encode, - E: SignedExtension + GetAppId, -{ - fn from(app_ext: AppUncheckedExtrinsic) -> Self { - Self { - app_id: app_ext.app_id(), - data: app_ext.encode(), - } - } -} diff --git a/primitives/avail/src/lib.rs b/primitives/avail/src/lib.rs deleted file mode 100644 index 1ecd2825..00000000 --- a/primitives/avail/src/lib.rs +++ /dev/null @@ -1,81 +0,0 @@ -#![cfg_attr(not(feature = "std"), no_std)] - -pub use da_types::{BlockLengthColumns, BlockLengthRows}; -use sp_runtime::Perbill; - -pub mod opaque_extrinsic; -pub use opaque_extrinsic::*; - -/// Customized headers. -pub mod header; -pub use header::*; - -/// Kate Commitment on Headers. -pub mod kate_commitment; -pub use kate_commitment::*; - -/// Application Specific Data Retrieval -pub mod asdr; - -pub mod sha2; -pub mod traits; -pub use sha2::ShaTwo256; - -pub mod data_proof; -pub use data_proof::DataProof; - -pub mod well_known_keys { - /// Public params used to generate Kate commitment - pub const KATE_PUBLIC_PARAMS: &[u8] = b":kate_public_params:"; -} - -/// We allow `Normal` extrinsics to fill up the block up to 90%, the rest can be used -/// by Operational extrinsics. -pub const NORMAL_DISPATCH_RATIO: Perbill = Perbill::from_percent(90); - -pub const BLOCK_CHUNK_SIZE: u32 = 32; - -/// Money matters. -pub mod currency { - - pub type Balance = u128; - - /// AVL has 18 decimal positions. - pub const AVL: Balance = 1_000_000_000_000_000_000; - - /// Cents of AVL has 16 decimal positions (100 Cents = $1) - /// 1 DOLLARS = 10_000_000_000_000_000 - pub const CENTS: Balance = AVL / 100; - - /// Millicent of AVL has 13 decimal positions( 100 mCents = 1 cent). - pub const MILLICENTS: Balance = CENTS / 1_000; -} - -#[repr(u8)] -pub enum InvalidTransactionCustomId { - /// The AppId is not registered. - InvalidAppId = 137, - /// Extrinsic is not allowed for the given `AppId`. - ForbiddenAppId, - /// Max padded length was exceeded. - MaxPaddedLenExceeded, -} - -/// Provides an implementation of [`frame_support::traits::Randomness`] that should only be used in -/// on Benchmarks! -pub struct BenchRandomness(sp_std::marker::PhantomData); - -impl frame_support::traits::Randomness for BenchRandomness -where - Output: codec::Decode + Default, - T: Default, -{ - fn random(subject: &[u8]) -> (Output, T) { - use sp_runtime::traits::TrailingZeroInput; - - ( - Output::decode(&mut TrailingZeroInput::new(subject)).unwrap_or_default(), - T::default(), - ) - } -} diff --git a/primitives/avail/src/sha2.rs b/primitives/avail/src/sha2.rs deleted file mode 100644 index af72d32c..00000000 --- a/primitives/avail/src/sha2.rs +++ /dev/null @@ -1,41 +0,0 @@ -use frame_support::RuntimeDebug; -use scale_info::TypeInfo; -#[cfg(feature = "std")] -use serde::{Deserialize, Serialize}; -use sp_core::{storage::StateVersion, Hasher}; -use sp_runtime::traits::Hash; -use sp_std::vec::Vec; -use sp_trie::{LayoutV0, LayoutV1, TrieConfiguration as _}; - -/// Sha2 256 wrapper which supports `beefy-merkle-tree::Hasher`. -#[derive(PartialEq, Eq, Clone, RuntimeDebug, TypeInfo)] -#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -pub struct ShaTwo256 {} - -impl Hasher for ShaTwo256 { - type Out = sp_core::H256; - type StdHasher = hash256_std_hasher::Hash256StdHasher; - const LENGTH: usize = 32; - - fn hash(s: &[u8]) -> Self::Out { - sp_io::hashing::sha2_256(s).into() - } -} - -impl Hash for ShaTwo256 { - type Output = sp_core::H256; - - fn trie_root(input: Vec<(Vec, Vec)>, version: StateVersion) -> Self::Output { - match version { - StateVersion::V0 => LayoutV0::::trie_root(input), - StateVersion::V1 => LayoutV1::::trie_root(input), - } - } - - fn ordered_trie_root(input: Vec>, version: StateVersion) -> Self::Output { - match version { - StateVersion::V0 => LayoutV0::::ordered_trie_root(input), - StateVersion::V1 => LayoutV1::::ordered_trie_root(input), - } - } -} diff --git a/primitives/avail/src/traits.rs b/primitives/avail/src/traits.rs deleted file mode 100644 index 5b951267..00000000 --- a/primitives/avail/src/traits.rs +++ /dev/null @@ -1,76 +0,0 @@ -use codec::{Codec, Decode}; -use sp_core::U256; -use sp_runtime::{ - traits::{ - AtLeast32BitUnsigned, Hash as HashT, MaybeDisplay, MaybeFromStr, MaybeSerializeDeserialize, - Member, SimpleBitOps, - }, - Digest, -}; -use sp_std::{convert::TryFrom, fmt::Debug, hash::Hash as StdHash}; - -use crate::header::HeaderExtension; - -/// Header block number trait. -pub trait HeaderBlockNumber: - Member - + AtLeast32BitUnsigned - + Codec - + MaybeSerializeDeserialize - + MaybeDisplay - + MaybeFromStr - + MaybeFromStr - + StdHash - + Copy - + Into - + TryFrom - + Debug - + Eq -{ -} -impl< - T: Member - + AtLeast32BitUnsigned - + Codec - + MaybeSerializeDeserialize - + MaybeDisplay - + MaybeFromStr - + StdHash - + Copy - + Into - + TryFrom - + Debug - + Eq, - > HeaderBlockNumber for T -{ -} - -/// Header hash. -pub trait HeaderHash: HashT {} -impl HeaderHash for T {} - -pub trait HeaderHashOutput: MaybeDisplay + Decode + SimpleBitOps + Ord {} -impl HeaderHashOutput for T {} - -/// Extended header access -pub trait ExtendedHeader { - /// Header number. - type Number; - - /// Header hash type - type Hash; - - /// Creates new header. - fn new( - number: Self::Number, - extrinsics_root: Self::Hash, - state_root: Self::Hash, - parent_hash: Self::Hash, - digest: Digest, - extension: HeaderExtension, - ) -> Self; - - fn extension(&self) -> &HeaderExtension; - - fn set_extension(&mut self, extension: HeaderExtension); -} diff --git a/primitives/types/Cargo.toml b/primitives/types/Cargo.toml deleted file mode 100644 index e415b294..00000000 --- a/primitives/types/Cargo.toml +++ /dev/null @@ -1,24 +0,0 @@ -[package] -name = "da-types" -version = "0.4.4" -authors = [] -edition = "2018" - -[dependencies] -derive_more = "0.99.17" -num-traits = { version = "0.2", default-features = false } -parity-scale-codec = { version = "3", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "2.3.1", default-features = false, features = ["derive"] } - -serde = { version = "1.0", features = ["derive"], optional = true } - -parity-util-mem = { version = "*", default-features = false, optional = true } -sp-debug-derive = { version = "*", default-features = false, optional = true } -sp-runtime = { version = "*", default-features = false, optional = true } - -[dev-dependencies] - -[features] -default = ["std"] -std = ["serde", "parity-scale-codec/std", "scale-info/std", "num-traits/std"] -substrate = ["sp-debug-derive", "parity-util-mem", "sp-runtime"] diff --git a/primitives/types/src/data_lookup.rs b/primitives/types/src/data_lookup.rs deleted file mode 100644 index 6066194d..00000000 --- a/primitives/types/src/data_lookup.rs +++ /dev/null @@ -1,150 +0,0 @@ -use alloc::vec::Vec; -use num_traits::Zero; -use parity_scale_codec::{Decode, Encode}; -use scale_info::TypeInfo; -#[cfg(feature = "std")] -use serde::{Deserialize, Serialize}; - -use crate::AppId; - -#[derive(PartialEq, Eq, Clone, Encode, Decode, Default, TypeInfo)] -#[cfg_attr(feature = "substrate", derive(sp_debug_derive::RuntimeDebug))] -#[cfg_attr(all(feature = "std", not(feature = "substrate")), derive(Debug))] -#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -pub struct DataLookup { - /// size of the look up - #[codec(compact)] - pub size: u32, - /// sorted vector of tuples(key, start index) - pub index: Vec, -} - -#[derive(PartialEq, Eq, Copy, Clone, Encode, Decode, Default, TypeInfo)] -#[cfg_attr(feature = "substrate", derive(sp_debug_derive::RuntimeDebug))] -#[cfg_attr(all(feature = "std", not(feature = "substrate")), derive(Debug))] -#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -pub struct DataLookupIndexItem { - pub app_id: AppId, - #[codec(compact)] - pub start: u32, -} - -impl From<(A, S)> for DataLookupIndexItem -where - A: Into, - S: Into, -{ - fn from(value: (A, S)) -> Self { - Self { - app_id: value.0.into(), - start: value.1.into(), - } - } -} - -#[cfg(all(feature = "std", feature = "substrate"))] -impl parity_util_mem::MallocSizeOf for DataLookupIndexItem { - fn size_of(&self, ops: &mut parity_util_mem::MallocSizeOfOps) -> usize { - self.app_id.size_of(ops) + self.start.size_of(ops) - } -} - -#[derive(PartialEq, Eq)] -#[cfg_attr(feature = "substrate", derive(sp_debug_derive::RuntimeDebug))] -#[cfg_attr(all(feature = "std", not(feature = "substrate")), derive(Debug))] -/// Errors during the creation from `extrinsics`. -pub enum TryFromError { - /// Size overflows - SizeOverflow, - /// Extrinsics are not sorted. - UnsortedExtrinsics, -} - -use core::convert::TryFrom; -impl TryFrom<&[(AppId, u32)]> for DataLookup { - type Error = TryFromError; - - fn try_from(extrinsics: &[(AppId, u32)]) -> Result { - let mut index = Vec::new(); - // transactions are order by application id - // skip transactions with 0 application id - it's not a data txs - let mut size = 0u32; - let mut prev_app_id = AppId(0); - - for (app_id, data_len) in extrinsics { - if !app_id.is_zero() && prev_app_id != *app_id { - index.push(DataLookupIndexItem { - app_id: *app_id, - start: size, - }); - } - - size = size - .checked_add(*data_len) - .ok_or(Self::Error::SizeOverflow)?; - if prev_app_id > *app_id { - return Err(Self::Error::UnsortedExtrinsics); - } - prev_app_id = *app_id; - } - - Ok(DataLookup { size, index }) - } -} - -#[cfg(all(feature = "std", feature = "substrate"))] -impl parity_util_mem::MallocSizeOf for DataLookup { - fn size_of(&self, ops: &mut parity_util_mem::MallocSizeOfOps) -> usize { - self.size.size_of(ops) + self.index.size_of(ops) - } -} - -#[cfg(test)] -mod test { - use super::*; - - fn into_app_ids(vals: I) -> Vec<(AppId, u32)> - where - I: IntoIterator, - T: Into, - { - vals.into_iter() - .map(|(id, idx)| (id.into(), idx)) - .collect::>() - } - fn into_lookup_items(vals: I) -> Vec - where - I: IntoIterator, - T: Into, - { - vals.into_iter().map(Into::into).collect::>() - } - - fn from_extrinsics_data() -> Vec<(Vec<(AppId, u32)>, Result)> { - vec![ - ( - into_app_ids([(0, 5), (0, 10), (1, 5), (1, 10), (2, 100), (2, 50)]), - Ok(DataLookup { - size: 180, - index: into_lookup_items([(1, 15), (2, 30)]), - }), - ), - ( - into_app_ids([(0, 5), (0, 10), (1, u32::MAX)]), - Err(TryFromError::SizeOverflow), - ), - ( - into_app_ids([(0, 5), (0, 10), (1, 5), (2, 100), (1, 10), (2, 50)]), - Err(TryFromError::UnsortedExtrinsics), - ), - ] - } - - #[test] - fn from_extrinsics() { - for (extrinsic, expected) in from_extrinsics_data() { - let data_lookup = DataLookup::try_from(extrinsic.as_slice()); - assert_eq!(data_lookup, expected); - } - } -} diff --git a/primitives/types/src/lib.rs b/primitives/types/src/lib.rs deleted file mode 100644 index 786215b4..00000000 --- a/primitives/types/src/lib.rs +++ /dev/null @@ -1,153 +0,0 @@ -#![cfg_attr(not(feature = "std"), no_std)] -extern crate alloc; -use alloc::vec::Vec; -use derive_more::{Add, Constructor, Deref, Display, From, Into, Mul}; -use parity_scale_codec::{Decode, Encode, MaxEncodedLen}; -use scale_info::TypeInfo; -#[cfg(feature = "std")] -use serde::{Deserialize, Serialize}; - -mod data_lookup; -mod get_app_id; -pub use data_lookup::*; -pub use get_app_id::*; - -/// Raw Extrinsic with application id. -#[derive(Clone, TypeInfo, Default, Encode, Decode)] -#[cfg_attr(feature = "substrate", derive(sp_debug_derive::RuntimeDebug))] -#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[cfg_attr(all(feature = "std", not(feature = "substrate")), derive(Debug))] -pub struct AppExtrinsic { - pub app_id: AppId, - pub data: Vec, -} -#[cfg(feature = "substrate")] -impl From> for AppExtrinsic -where - A: Encode, - C: Encode, - S: Encode, - E: sp_runtime::traits::SignedExtension + crate::GetAppId, -{ - fn from(ue: sp_runtime::generic::UncheckedExtrinsic) -> Self { - let app_id = ue - .signature - .as_ref() - .map(|(_, _, extra)| extra.app_id()) - .unwrap_or_default(); - let data = ue.encode(); - - Self { app_id, data } - } -} - -impl GetAppId for AppExtrinsic { - fn app_id(&self) -> AppId { - self.app_id - } -} - -#[derive( - Clone, - Copy, - PartialEq, - Eq, - PartialOrd, - Ord, - Add, - From, - Deref, - TypeInfo, - Encode, - Decode, - Display, - Into, - Default, - MaxEncodedLen, -)] -#[cfg_attr(feature = "substrate", derive(sp_debug_derive::RuntimeDebug))] -#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[cfg_attr(all(feature = "std", not(feature = "substrate")), derive(Debug))] -pub struct AppId(#[codec(compact)] pub u32); - -impl num_traits::Zero for AppId { - fn zero() -> Self { - AppId(num_traits::Zero::zero()) - } - - fn is_zero(&self) -> bool { - self.0.is_zero() - } -} - -impl From> for AppExtrinsic { - #[inline] - fn from(data: Vec) -> Self { - Self { - data, - app_id: <_>::default(), - } - } -} - -/// Strong type for `BlockLength::cols` -#[derive( - Clone, - Copy, - Debug, - From, - Into, - Add, - Mul, - Display, - PartialEq, - Eq, - Encode, - Decode, - TypeInfo, - PartialOrd, - Ord, - Constructor, - MaxEncodedLen, -)] -#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[mul(forward)] -pub struct BlockLengthColumns(#[codec(compact)] pub u32); - -impl BlockLengthColumns { - #[inline] - pub fn as_usize(&self) -> usize { - self.0 as usize - } -} - -/// Strong type for `BlockLength::rows` -#[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive( - Clone, - Copy, - Debug, - From, - Into, - Add, - Mul, - Display, - PartialEq, - Eq, - Encode, - Decode, - TypeInfo, - PartialOrd, - Ord, - Constructor, - MaxEncodedLen, -)] -#[mul(forward)] -pub struct BlockLengthRows(#[codec(compact)] pub u32); - -impl BlockLengthRows { - #[inline] - pub fn as_usize(&self) -> usize { - self.0 as usize - } -} From 8ddf54cf388cc5a8fb1b8e1146284c49672de5fe Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Mon, 24 Jul 2023 12:32:26 +0200 Subject: [PATCH 84/87] Fix deps for WASM built --- Cargo.lock | 247 ++++++++++------------------- core/Cargo.toml | 2 + core/src/asdr.rs | 4 +- core/src/data_proof.rs | 6 +- core/src/header/mod.rs | 15 +- core/src/keccak256.rs | 5 +- core/src/sha2.rs | 5 +- core/src/traits.rs | 13 +- core/src/traits/extended_header.rs | 24 ++- kate/Cargo.toml | 26 +-- kate/recovery/Cargo.toml | 13 +- kate/recovery/src/com.rs | 29 +++- kate/recovery/src/commitments.rs | 33 ++-- kate/recovery/src/matrix.rs | 1 + kate/recovery/src/proof.rs | 4 + kate/src/com.rs | 57 ++++--- kate/src/lib.rs | 9 +- nomad/base/src/lib.rs | 3 +- nomad/core/src/nomad_message.rs | 3 +- nomad/core/src/state.rs | 3 +- nomad/core/src/update.rs | 3 +- nomad/core/src/update_v2.rs | 5 +- nomad/merkle/Cargo.toml | 2 + nomad/merkle/src/lib.rs | 2 +- nomad/merkle/src/light.rs | 2 +- nomad/signature/Cargo.toml | 2 +- 26 files changed, 249 insertions(+), 269 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5196d5ff..ec3aa68a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -122,9 +122,9 @@ checksum = "3a30da5c5f2d5e72842e00bcb57657162cdabef0931f40e2deb9b4140440cecd" [[package]] name = "anyhow" -version = "1.0.71" +version = "1.0.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8" +checksum = "3b13c32d80ecc7ab747b80c3784bce54ee8a7a0cc4fbda9bf4cda2cf6fe90854" [[package]] name = "approx" @@ -279,13 +279,13 @@ checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" [[package]] name = "async-trait" -version = "0.1.71" +version = "0.1.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a564d521dd56509c4c47480d00b80ee55f7e385ae48db5744c67ad50c92d2ebf" +checksum = "cc6dde6e4ed435a4c1ee4e73592f5ba9da2151af10076cc04858746af9352d09" dependencies = [ "proc-macro2", "quote", - "syn 2.0.25", + "syn 2.0.27", ] [[package]] @@ -323,6 +323,7 @@ dependencies = [ "serde_json", "sp-arithmetic", "sp-core", + "sp-io", "sp-runtime", "sp-runtime-interface", "sp-std", @@ -657,18 +658,18 @@ dependencies = [ [[package]] name = "clap" -version = "4.3.11" +version = "4.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1640e5cc7fb47dbb8338fd471b105e7ed6c3cb2aeb00c2e067127ffd3764a05d" +checksum = "5b0827b011f6f8ab38590295339817b0d26f344aa4932c3ced71b45b0c54b4a9" dependencies = [ "clap_builder", ] [[package]] name = "clap_builder" -version = "4.3.11" +version = "4.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98c59138d527eeaf9b53f35a77fcc1fad9d883116070c63d5de1c7dc7b00c72b" +checksum = "9441b403be87be858db6a23edb493e7f694761acdc3343d5a0fcaafd304cbc9e" dependencies = [ "anstyle", "clap_lex", @@ -816,7 +817,6 @@ dependencies = [ "num-traits", "once_cell", "oorandom", - "plotters", "rayon", "regex", "serde", @@ -1115,9 +1115,9 @@ dependencies = [ [[package]] name = "dyn-clone" -version = "1.0.11" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68b0cf012f1230e43cd00ebb729c6bb58707ecfa8ad08b52ef3a4ccd2697fc30" +checksum = "304e6508efa593091e97a9abbc10f90aa7ca635b6d2784feff3c89d41dd12272" [[package]] name = "ecdsa" @@ -1367,12 +1367,9 @@ checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" [[package]] name = "fastrand" -version = "1.9.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" -dependencies = [ - "instant", -] +checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764" [[package]] name = "ff" @@ -1545,7 +1542,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ "proc-macro2", "quote", - "syn 2.0.25", + "syn 2.0.27", ] [[package]] @@ -1866,15 +1863,6 @@ dependencies = [ "generic-array 0.14.7", ] -[[package]] -name = "instant" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" -dependencies = [ - "cfg-if", -] - [[package]] name = "integer-sqrt" version = "0.1.5" @@ -1890,17 +1878,6 @@ version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "59ce5ef949d49ee85593fc4d3f3f95ad61657076395cbbce23e2121fc5542074" -[[package]] -name = "io-lifetimes" -version = "1.0.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" -dependencies = [ - "hermit-abi", - "libc", - "windows-sys 0.48.0", -] - [[package]] name = "is-terminal" version = "0.4.9" @@ -1908,7 +1885,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" dependencies = [ "hermit-abi", - "rustix 0.38.3", + "rustix 0.38.4", "windows-sys 0.48.0", ] @@ -1932,9 +1909,9 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.8" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b02a5381cc465bd3041d84623d0fa3b66738b52b8e2fc3bab8ad63ab032f4a" +checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" [[package]] name = "js-sys" @@ -2090,12 +2067,6 @@ version = "0.0.46" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d4d2456c373231a208ad294c33dc5bff30051eafd954cd4caae83a712b12854d" -[[package]] -name = "linux-raw-sys" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" - [[package]] name = "linux-raw-sys" version = "0.4.3" @@ -2286,6 +2257,7 @@ dependencies = [ name = "nomad-merkle" version = "0.1.2" dependencies = [ + "avail-core", "ethers-core", "frame-support", "hex-literal", @@ -2374,9 +2346,9 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" +checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" dependencies = [ "autocfg", "libm", @@ -2464,9 +2436,9 @@ dependencies = [ [[package]] name = "parity-scale-codec" -version = "3.6.3" +version = "3.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "756d439303e94fae44f288ba881ad29670c65b0c4b0e05674ca81061bb65f2c5" +checksum = "dd8e946cc0cc711189c0b0249fb8b599cbeeab9784d83c415719368bb8d4ac64" dependencies = [ "arrayvec 0.7.4", "bitvec 1.0.1", @@ -2479,9 +2451,9 @@ dependencies = [ [[package]] name = "parity-scale-codec-derive" -version = "3.6.3" +version = "3.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d884d78fcf214d70b1e239fcd1c6e5e95aa3be1881918da2e488cc946c7a476" +checksum = "2a296c3079b5fefbc499e1de58dc26c09b1b9a5952d26694ee89f04a43ebbb3e" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -2531,9 +2503,9 @@ dependencies = [ [[package]] name = "paste" -version = "1.0.13" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4b27ab7be369122c218afc2079489cdcb4b517c0a3fc386ff11e1fedfcc2b35" +checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" [[package]] name = "pbkdf2" @@ -2578,34 +2550,6 @@ dependencies = [ "spki", ] -[[package]] -name = "plotters" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2c224ba00d7cadd4d5c660deaf2098e5e80e07846537c51f9cfa4be50c1fd45" -dependencies = [ - "num-traits", - "plotters-backend", - "plotters-svg", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "plotters-backend" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e76628b4d3a7581389a35d5b6e2139607ad7c75b17aed325f210aa91f4a9609" - -[[package]] -name = "plotters-svg" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38f6d39893cca0701371e3c27294f09797214b86f1fb951b89ade8ec04e2abab" -dependencies = [ - "plotters-backend", -] - [[package]] name = "poly-multiproof" version = "0.0.1" @@ -2677,9 +2621,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.64" +version = "1.0.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78803b62cbf1f46fde80d7c0e803111524b9877184cfe7c3033659490ac7a7da" +checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9" dependencies = [ "unicode-ident", ] @@ -2721,9 +2665,9 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" [[package]] name = "quote" -version = "1.0.29" +version = "1.0.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "573015e8ab27661678357f27dc26460738fd2b6c86e46f386fde94cb5d913105" +checksum = "5fe8a65d69dd0808184ebb5f836ab526bb259db23c657efa38711b1072ee47f0" dependencies = [ "proc-macro2", ] @@ -2859,22 +2803,22 @@ dependencies = [ [[package]] name = "ref-cast" -version = "1.0.18" +version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1641819477c319ef452a075ac34a4be92eb9ba09f6841f62d594d50fdcf0bf6b" +checksum = "61ef7e18e8841942ddb1cf845054f8008410030a3997875d9e49b7a363063df1" dependencies = [ "ref-cast-impl", ] [[package]] name = "ref-cast-impl" -version = "1.0.18" +version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68bf53dad9b6086826722cdc99140793afd9f62faa14a1ad07eb4f955e7a7216" +checksum = "2dfaf0c85b766276c797f3791f5bc6d5bd116b41d53049af2789666b0c0bc9fa" dependencies = [ "proc-macro2", "quote", - "syn 2.0.25", + "syn 2.0.27", ] [[package]] @@ -2885,7 +2829,7 @@ checksum = "b2eae68fc220f7cf2532e4494aded17545fce192d59cd996e0fe7887f4ceb575" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.3.2", + "regex-automata 0.3.3", "regex-syntax 0.7.4", ] @@ -2900,9 +2844,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83d3daa6976cffb758ec878f108ba0e062a45b2d6ca3a2cca965338855476caf" +checksum = "39354c10dd07468c2e73926b23bb9c2caca74c5501e38a35da70406f1d923310" dependencies = [ "aho-corasick", "memchr", @@ -2997,7 +2941,7 @@ checksum = "6380889b07a03b5ecf1d44dc9ede6fd2145d84b502a2a9ca0b03c48e0cc3220f" dependencies = [ "bitflags 1.3.2", "errno 0.2.8", - "io-lifetimes 0.7.5", + "io-lifetimes", "libc", "linux-raw-sys 0.0.46", "windows-sys 0.42.0", @@ -3005,23 +2949,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.37.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d69718bf81c6127a49dc64e44a742e8bb9213c0ff8869a22c308f84c1d4ab06" -dependencies = [ - "bitflags 1.3.2", - "errno 0.3.1", - "io-lifetimes 1.0.11", - "libc", - "linux-raw-sys 0.3.8", - "windows-sys 0.48.0", -] - -[[package]] -name = "rustix" -version = "0.38.3" +version = "0.38.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac5ffa1efe7548069688cd7028f32591853cd7b5b756d41bcffd2353e4fc75b4" +checksum = "0a962918ea88d644592894bc6dc55acc6c0956488adcebbfb6e273506b7fd6e5" dependencies = [ "bitflags 2.3.3", "errno 0.3.1", @@ -3032,9 +2962,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.13" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc31bd9b61a32c31f9650d18add92aa83a49ba979c143eefd27fe7177b05bd5f" +checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" [[package]] name = "rusty-fork" @@ -3050,15 +2980,15 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.14" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe232bdf6be8c8de797b22184ee71118d63780ea42ac85b61d1baa6d3b782ae9" +checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" [[package]] name = "safe_arch" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62a7484307bd40f8f7ccbacccac730108f2cae119a3b11c74485b48aa9ea650f" +checksum = "f398075ce1e6a179b46f51bd88d0598b92b00d3551f1a2d4ac49e771b56ac354" dependencies = [ "bytemuck", ] @@ -3127,9 +3057,9 @@ dependencies = [ [[package]] name = "scopeguard" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "scrypt" @@ -3186,35 +3116,35 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.17" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed" +checksum = "b0293b4b29daaf487284529cc2f5675b8e57c61f70167ba415a463651fd6a918" [[package]] name = "serde" -version = "1.0.171" +version = "1.0.174" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30e27d1e4fd7659406c492fd6cfaf2066ba8773de45ca75e855590f856dc34a9" +checksum = "3b88756493a5bd5e5395d53baa70b194b05764ab85b59e43e4b8f4e1192fa9b1" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.171" +version = "1.0.174" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "389894603bd18c46fa56231694f8d827779c0951a667087194cf9de94ed24682" +checksum = "6e5c3a298c7f978e53536f95a63bdc4c4a64550582f31a0359a9afda6aede62e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.25", + "syn 2.0.27", ] [[package]] name = "serde_json" -version = "1.0.100" +version = "1.0.103" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f1e14e89be7aa4c4b78bdbdc9eb5bf8517829a600ae8eaa39a6e1d960b5185c" +checksum = "d03b412469450d4404fe8499a268edd7f8b79fecb074b0d812ad64ca21f4031b" dependencies = [ "itoa", "ryu", @@ -3843,9 +3773,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.25" +version = "2.0.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15e3fc8c0c74267e2df136e5e5fb656a464158aa57624053375eb9c8c6e25ae2" +checksum = "b60f673f44a8255b9c8c657daf66a596d435f2da81a555b06dc644d080ba45e0" dependencies = [ "proc-macro2", "quote", @@ -3860,21 +3790,20 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "target-lexicon" -version = "0.12.8" +version = "0.12.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b1c7f239eb94671427157bd93b3694320f3668d4e1eff08c7285366fd777fac" +checksum = "1d2faeef5759ab89935255b1a4cd98e0baf99d1085e37d36599c625dac49ae8e" [[package]] name = "tempfile" -version = "3.6.0" +version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31c0432476357e58790aaa47a8efb0c5138f137343f3b5f23bd36a27e3b0a6d6" +checksum = "5486094ee78b2e5038a6382ed7645bc084dc2ec433426ca4c3cb61e2007b8998" dependencies = [ - "autocfg", "cfg-if", "fastrand", "redox_syscall", - "rustix 0.37.23", + "rustix 0.38.4", "windows-sys 0.48.0", ] @@ -3893,22 +3822,22 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.43" +version = "1.0.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a35fc5b8971143ca348fa6df4f024d4d55264f3468c71ad1c2f365b0a4d58c42" +checksum = "611040a08a0439f8248d1990b111c95baa9c704c805fa1f62104b39655fd7f90" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.43" +version = "1.0.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "463fe12d7993d3b327787537ce8dd4dfa058de32fc2b195ef3cde03dc4771e8f" +checksum = "090198534930841fab3a5d1bb637cde49e339654e606195f8d9c76eeb081dc96" dependencies = [ "proc-macro2", "quote", - "syn 2.0.25", + "syn 2.0.27", ] [[package]] @@ -4011,9 +3940,9 @@ checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" [[package]] name = "toml_edit" -version = "0.19.12" +version = "0.19.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c500344a19072298cd05a7224b3c0c629348b78692bf48466c5238656e315a78" +checksum = "f8123f27e969974a3dfba720fdb560be359f57b44302d280ba72e76a74480e8a" dependencies = [ "indexmap 2.0.0", "toml_datetime", @@ -4040,7 +3969,7 @@ checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" dependencies = [ "proc-macro2", "quote", - "syn 2.0.25", + "syn 2.0.27", ] [[package]] @@ -4162,9 +4091,9 @@ checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" [[package]] name = "unicode-ident" -version = "1.0.10" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22049a19f4a68748a168c0fc439f9516686aa045927ff767eca0a85101fb6e73" +checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c" [[package]] name = "unicode-normalization" @@ -4261,7 +4190,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.25", + "syn 2.0.27", "wasm-bindgen-shared", ] @@ -4283,7 +4212,7 @@ checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.25", + "syn 2.0.27", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -4458,21 +4387,11 @@ dependencies = [ "wasmparser", ] -[[package]] -name = "web-sys" -version = "0.3.64" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b85cbef8c220a6abc02aefd892dfc0fc23afb1c6a426316ec33253a3877249b" -dependencies = [ - "js-sys", - "wasm-bindgen", -] - [[package]] name = "wide" -version = "0.7.10" +version = "0.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40018623e2dba2602a9790faba8d33f2ebdebf4b86561b83928db735f8784728" +checksum = "aa469ffa65ef7e0ba0f164183697b89b854253fd31aeb92358b7b6155177d62f" dependencies = [ "bytemuck", "safe_arch", @@ -4686,9 +4605,9 @@ checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" [[package]] name = "winnow" -version = "0.4.9" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81a2094c43cc94775293eaa0e499fbc30048a6d824ac82c0351a8c0bf9112529" +checksum = "81fac9742fd1ad1bd9643b991319f72dd031016d44b77039a26977eb667141e7" dependencies = [ "memchr", ] @@ -4719,5 +4638,5 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.25", + "syn 2.0.27", ] diff --git a/core/Cargo.toml b/core/Cargo.toml index 84e8258c..74ad2dc8 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -20,6 +20,7 @@ codec = { package = "parity-scale-codec", version = "3", default-features = fals scale-info = { version = "2", default-features = false, features = ["derive"] } sp-arithmetic = { version = "*", default-features = false } sp-core = { version = "*", default-features = false } +sp-io = { version = "*", default-features = false } sp-std = { version = "*", default-features = false } sp-trie = { version = "*", default-features = false } @@ -42,6 +43,7 @@ std = [ "scale-info/std", "log/std", "sp-core/std", + "sp-io/std", "sp-std/std", "sp-trie/std", "sp-arithmetic/std", diff --git a/core/src/asdr.rs b/core/src/asdr.rs index b66c2066..9f9e2bab 100644 --- a/core/src/asdr.rs +++ b/core/src/asdr.rs @@ -19,7 +19,6 @@ use codec::{Compact, Decode, Encode, EncodeLike, Error, Input}; use scale_info::{build::Fields, meta_type, Path, StaticTypeInfo, Type, TypeInfo, TypeParameter}; -use sp_core::blake2_256; #[cfg(feature = "runtime")] use sp_runtime::{ generic::CheckedExtrinsic, @@ -36,6 +35,8 @@ use sp_std::{ vec::Vec, }; +use sp_io::hashing::blake2_256; + use crate::{traits::GetAppId, AppId, OpaqueExtrinsic}; /// Current version of the [`UncheckedExtrinsic`] encoded format. @@ -537,7 +538,6 @@ where #[cfg(test)] mod tests { - use sp_core::blake2_256; use sp_runtime::{ codec::{Decode, Encode}, testing::TestSignature as TestSig, diff --git a/core/src/data_proof.rs b/core/src/data_proof.rs index b1607ec5..a59682b4 100644 --- a/core/src/data_proof.rs +++ b/core/src/data_proof.rs @@ -63,9 +63,10 @@ where fn try_from(merkle_proof: &MerkleProof) -> Result { use crate::ensure; - use sp_core::keccak_256; use DataProofTryFromError::*; + use sp_io::hashing::keccak_256; + let root = <[u8; 32]>::try_from(merkle_proof.root.as_ref()) .map_err(|_| InvalidRoot)? .into(); @@ -102,7 +103,8 @@ where mod test { use crate::Keccak256; use hex_literal::hex; - use sp_core::{keccak_256, H512}; + use sp_core::H512; + use sp_io::hashing::keccak_256; use sp_std::cmp::min; use test_case::test_case; diff --git a/core/src/header/mod.rs b/core/src/header/mod.rs index 3ec17f13..6798deb4 100644 --- a/core/src/header/mod.rs +++ b/core/src/header/mod.rs @@ -243,16 +243,15 @@ where } } -impl ExtendedHeader for Header { - type Hash = ::Output; - type Number = N; - +impl + ExtendedHeader::Output, Digest, HeaderExtension> for Header +{ /// Creates new header. fn new( - n: Self::Number, - extrinsics: Self::Hash, - state: Self::Hash, - parent: Self::Hash, + n: N, + extrinsics: ::Output, + state: ::Output, + parent: ::Output, digest: Digest, extension: HeaderExtension, ) -> Self { diff --git a/core/src/keccak256.rs b/core/src/keccak256.rs index 800b3b88..5df84883 100644 --- a/core/src/keccak256.rs +++ b/core/src/keccak256.rs @@ -1,5 +1,5 @@ use scale_info::TypeInfo; -use sp_core::{keccak_256, Hasher, RuntimeDebug}; +use sp_core::{Hasher, RuntimeDebug}; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; @@ -15,7 +15,8 @@ impl Hasher for Keccak256 { const LENGTH: usize = 32; fn hash(s: &[u8]) -> Self::Out { - keccak_256(s).into() + let keccak_out = sp_io::hashing::keccak_256(s); + keccak_out.into() } } diff --git a/core/src/sha2.rs b/core/src/sha2.rs index 9b16040e..f22519e3 100644 --- a/core/src/sha2.rs +++ b/core/src/sha2.rs @@ -1,7 +1,7 @@ use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; -use sp_core::{hashing::sha2_256, Hasher, RuntimeDebug}; +use sp_core::{Hasher, RuntimeDebug}; /// Sha2 256 wrapper which supports `beefy-merkle-tree::Hasher`. #[derive(PartialEq, Eq, Clone, RuntimeDebug, TypeInfo)] @@ -14,7 +14,8 @@ impl Hasher for ShaTwo256 { const LENGTH: usize = 32; fn hash(s: &[u8]) -> Self::Out { - sha2_256(s).into() + let sha2_out = sp_io::hashing::sha2_256(s); + sha2_out.into() } } diff --git a/core/src/traits.rs b/core/src/traits.rs index b58f127e..3be72d0f 100644 --- a/core/src/traits.rs +++ b/core/src/traits.rs @@ -1,5 +1,6 @@ use codec::{Codec, Decode}; use sp_arithmetic::traits::AtLeast32BitUnsigned; +use sp_arithmetic::traits::Saturating; use sp_core::U256; use sp_std::{convert::TryFrom, fmt::Debug, hash::Hash as StdHash}; @@ -11,12 +12,20 @@ pub use extended_header::*; /// Header block number trait. pub trait HeaderBlockNumber: - AtLeast32BitUnsigned + Codec + StdHash + Copy + Into + TryFrom + Debug + Eq + AtLeast32BitUnsigned + Codec + StdHash + Copy + Into + TryFrom + Debug + Eq + Saturating { } impl< - T: AtLeast32BitUnsigned + Codec + StdHash + Copy + Into + TryFrom + Debug + Eq, + T: AtLeast32BitUnsigned + + Codec + + StdHash + + Copy + + Into + + TryFrom + + Debug + + Eq + + Saturating, > HeaderBlockNumber for T { } diff --git a/core/src/traits/extended_header.rs b/core/src/traits/extended_header.rs index 43f7629e..82838750 100644 --- a/core/src/traits/extended_header.rs +++ b/core/src/traits/extended_header.rs @@ -1,22 +1,16 @@ /// Extended header access -pub trait ExtendedHeader { - /// Header number. - type Number; - - /// Header hash type - type Hash; - +pub trait ExtendedHeader { /// Creates new header. fn new( - number: Self::Number, - extrinsics_root: Self::Hash, - state_root: Self::Hash, - parent_hash: Self::Hash, - digest: D, - extension: E, + number: Number, + extrinsics_root: Hash, + state_root: Hash, + parent_hash: Hash, + digest: Digest, + extension: Extension, ) -> Self; - fn extension(&self) -> &E; + fn extension(&self) -> &Extension; - fn set_extension(&mut self, extension: E); + fn set_extension(&mut self, extension: Extension); } diff --git a/kate/Cargo.toml b/kate/Cargo.toml index 1e255123..1715bcd8 100644 --- a/kate/Cargo.toml +++ b/kate/Cargo.toml @@ -9,36 +9,38 @@ license = "Apache-2.0" [dependencies] # Pending to review -poly-multiproof = { git = "https://github.com/availproject/poly-multiproof", default-features = false, tag = "v0.0.1" } +poly-multiproof = { git = "https://github.com/availproject/poly-multiproof", default-features = false, tag = "v0.0.1", optional = true } # Internal avail-core = { path = "../core", default-features = false, feature = "runtime" } -dusk-plonk = { git = "https://github.com/availproject/plonk.git", tag = "v0.12.0-polygon-2", optional = true } kate-recovery = { path = "recovery", default-features = false } +dusk-plonk = { git = "https://github.com/availproject/plonk.git", tag = "v0.12.0-polygon-2", optional = true } + # Parity & Substrate codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] } sp-arithmetic = { version = "*", default-features = false } -sp-core = { version = "*", default-features = false, optional = true } +sp-core = { version = "*", default-features = false } # 3rd-party derive_more = { version = "0.99.17", default-features = false, features = ["constructor"] } -dusk-bytes = { version = "0.1.6", default-features = false } +static_assertions = "1.1.0" +thiserror-no-std = "2.0.2" + +dusk-bytes = { version = "0.1.6", default-features = false, optional = true } hex = { version = "0.4", optional = true, default-features = false, features = ["alloc", "serde"] } hex-literal = { version = "0.3.4", optional = true } log = { version = "0.4.8", optional = true } -nalgebra = { version = "0.32.2", default-features = false } +nalgebra = { version = "0.32.2", default-features = false, optional = true } once_cell = { version = "1.8.0", optional = true } -rand = { version = "0.8.5", default-features = false, optional = true } +rand = { version = "0.8", default-features = false, features = ["alloc", "small_rng"], optional = true } rand_chacha = { version = "0.3", default-features = false, optional = true } rayon = { version = "1.5.2", optional = true } serde = { version = "1", optional = true, features = ["derive"] } serde_json = { version = "1", optional = true } -static_assertions = "1.1.0" -thiserror-no-std = "2.0.2" [dev-dependencies] -criterion = "0.5.1" +criterion = { version = "0.5.1", default-features = false } proptest = "1" serde_json = "1" test-case = "1.2.3" @@ -46,7 +48,10 @@ test-case = "1.2.3" [features] default = ["std"] alloc = ["dusk-plonk/alloc", "nalgebra/alloc"] -parallel = ["rayon"] +parallel = [ + "rayon", + "criterion/rayon", +] std = [ "parallel", @@ -61,6 +66,7 @@ std = [ "rand/std", "log", "dusk-plonk/std", + "dusk-bytes", "avail-core/std", "sp-arithmetic/std", "sp-core/std", diff --git a/kate/recovery/Cargo.toml b/kate/recovery/Cargo.toml index 9c84a83e..9b39eaaa 100644 --- a/kate/recovery/Cargo.toml +++ b/kate/recovery/Cargo.toml @@ -8,7 +8,7 @@ license = "Apache-2.0" [dependencies] # Internals avail-core = { path = "../../core", default-features = false } -dusk-plonk = { git = "https://github.com/availproject/plonk.git", tag = "v0.12.0-polygon-2" } +dusk-plonk = { git = "https://github.com/availproject/plonk.git", tag = "v0.12.0-polygon-2", default-features = false, optional = true } # Substrate codec = { package = "parity-scale-codec", version = "3", default-features = false, features = ["derive"] } @@ -17,13 +17,14 @@ sp-std = { version = "*", default-features = false } # 3rd-parties derive_more = "0.99.17" -dusk-bytes = { version = "0.1.6", default-features = false } +static_assertions = "1.1.0" +thiserror-no-std = "2.0.2" + +dusk-bytes = { version = "0.1.6", default-features = false, optional = true } once_cell = { version = "1.9.0", optional = true } -rand = { version = "0.8.5", default-features = false, features = ["alloc", "small_rng"], optional = true } +rand = { version = "0.8", default-features = false, features = ["alloc", "small_rng"], optional = true } rand_chacha = { version = "0.3", default-features = false, optional = true } serde = { version = "1", optional = true, features = ["derive"] } -static_assertions = "1.1.0" -thiserror-no-std = "2.0.2" [dev-dependencies] hex = "0.4" @@ -39,4 +40,6 @@ std = [ "avail-core/std", "rand/std", "rand_chacha/std", + "dusk-bytes", + "dusk-plonk/std", ] diff --git a/kate/recovery/src/com.rs b/kate/recovery/src/com.rs index a77f4972..2808799f 100644 --- a/kate/recovery/src/com.rs +++ b/kate/recovery/src/com.rs @@ -1,23 +1,31 @@ -use crate::{data, matrix}; -use core::{convert::TryFrom, num::TryFromIntError, ops::Range}; +use crate::matrix; +use core::{num::TryFromIntError, ops::Range}; + +use avail_core::{data_lookup::Error as DataLookupError, AppId, DataLookup}; -use avail_core::{data_lookup::Error as DataLookupError, ensure, AppId, DataLookup}; -use dusk_bytes::Serializable as _; -use dusk_plonk::{fft::EvaluationDomain, prelude::BlsScalar}; -use sp_arithmetic::{traits::SaturatedConversion as _, Percent}; use sp_std::prelude::*; use thiserror_no_std::Error; +#[cfg(feature = "std")] +use crate::data; #[cfg(feature = "std")] use crate::{config, sparse_slice_read::SparseSliceRead}; #[cfg(feature = "std")] +use avail_core::ensure; +#[cfg(feature = "std")] use codec::{Decode, IoReader}; #[cfg(feature = "std")] +use dusk_bytes::Serializable as _; +#[cfg(feature = "std")] +use dusk_plonk::{fft::EvaluationDomain, prelude::BlsScalar}; +#[cfg(feature = "std")] +use sp_arithmetic::{traits::SaturatedConversion as _, Percent}; +#[cfg(feature = "std")] use static_assertions::{const_assert, const_assert_ne}; #[cfg(feature = "std")] use std::{ collections::{HashMap, HashSet}, - convert::TryInto, + convert::{TryFrom, TryInto}, iter::FromIterator, }; @@ -387,7 +395,7 @@ pub fn unflatten_padded_data( // This module is taken from https://gist.github.com/itzmeanjan/4acf9338d9233e79cfbee5d311e7a0b4 // which I wrote few months back when exploring polynomial based erasure coding technique ! - +#[cfg(feature = "std")] fn reconstruct_poly( // domain I'm working with // all (i)ffts to be performed on it @@ -434,6 +442,7 @@ fn reconstruct_poly( Ok(reconstructed_data) } +#[cfg(feature = "std")] fn expand_root_of_unity(eval_domain: EvaluationDomain) -> Vec { let root_of_unity = eval_domain.group_gen; let mut roots: Vec = vec![BlsScalar::one(), root_of_unity]; @@ -445,6 +454,7 @@ fn expand_root_of_unity(eval_domain: EvaluationDomain) -> Vec { roots } +#[cfg(feature = "std")] fn zero_poly_fn( eval_domain: EvaluationDomain, missing_indices: &[u64], @@ -476,6 +486,7 @@ fn zero_poly_fn( } // in-place shifting +#[cfg(feature = "std")] fn shift_poly(poly: &mut [BlsScalar]) { // primitive root of unity let shift_factor = BlsScalar::from(5); @@ -492,6 +503,7 @@ fn shift_poly(poly: &mut [BlsScalar]) { } // in-place unshifting +#[cfg(feature = "std")] fn unshift_poly(poly: &mut [BlsScalar]) { // primitive root of unity let shift_factor = BlsScalar::from(5); @@ -513,6 +525,7 @@ pub type AppDataRange = Range; // // performing one round of ifft should reveal original data which were // coded together +#[cfg(feature = "std")] pub fn reconstruct_column( row_count: u32, cells: &[data::DataCell], diff --git a/kate/recovery/src/commitments.rs b/kate/recovery/src/commitments.rs index 544bfc29..0fe1d278 100644 --- a/kate/recovery/src/commitments.rs +++ b/kate/recovery/src/commitments.rs @@ -1,14 +1,8 @@ -use avail_core::{ensure, AppId, DataLookup}; use core::{ array::TryFromSliceError, convert::{TryFrom, TryInto}, num::TryFromIntError, }; -use dusk_bytes::Serializable; -use dusk_plonk::{ - fft::{EvaluationDomain, Evaluations}, - prelude::{BlsScalar, CommitKey, PublicParameters}, -}; use sp_std::prelude::*; use thiserror_no_std::Error; @@ -18,6 +12,16 @@ use crate::{ matrix, }; +#[cfg(feature = "std")] +use avail_core::{ensure, AppId, DataLookup}; +#[cfg(feature = "std")] +use dusk_bytes::Serializable; +#[cfg(feature = "std")] +use dusk_plonk::{ + fft::{EvaluationDomain, Evaluations}, + prelude::{BlsScalar, CommitKey, PublicParameters}, +}; + #[derive(Error, Debug)] pub enum Error { #[error("Scalar slice error: {0}")] @@ -30,8 +34,8 @@ pub enum Error { BadScalarData, #[error("Bad data len")] BadLen, - #[error("Plonk error: {0}")] - PlonkError(#[from] dusk_plonk::error::Error), + #[error("Plonk error")] + PlonkError, #[error("Bad commitments data")] BadCommitmentsData, #[error("Bad rows data")] @@ -45,13 +49,13 @@ impl std::error::Error for Error { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { match &self { Self::SliceError(slice) => Some(slice), - Self::PlonkError(plonk) => Some(plonk), Self::IntError(try_int) => Some(try_int), _ => None, } } } +#[cfg(feature = "std")] impl From for Error { fn from(e: dusk_bytes::Error) -> Self { match e { @@ -62,11 +66,20 @@ impl From for Error { } } +#[cfg(feature = "std")] +impl From for Error { + fn from(_: dusk_plonk::error::Error) -> Self { + Self::PlonkError + } +} + +#[cfg(feature = "std")] fn try_into_scalar(chunk: &[u8]) -> Result { let sized_chunk = <[u8; config::CHUNK_SIZE]>::try_from(chunk)?; BlsScalar::from_bytes(&sized_chunk).map_err(From::from) } +#[cfg(feature = "std")] fn try_into_scalars(data: &[u8]) -> Result, Error> { let chunks = data.chunks_exact(config::CHUNK_SIZE); ensure!(chunks.remainder().is_empty(), Error::BadLen); @@ -88,6 +101,7 @@ fn try_into_scalars(data: &[u8]) -> Result, Error> { /// * `index` - Application data index /// * `dimensions` - Extended matrix dimensions /// * `app_id` - Application ID +#[cfg(feature = "std")] pub fn verify_equality( public_params: &PublicParameters, commitments: &[[u8; COMMITMENT_SIZE]], @@ -128,6 +142,7 @@ pub fn verify_equality( Ok((verified, app_rows)) } +#[cfg(feature = "std")] fn row_index_commitment_verification( prover_key: &CommitKey, domain: EvaluationDomain, diff --git a/kate/recovery/src/matrix.rs b/kate/recovery/src/matrix.rs index a3ab97cb..8242885d 100644 --- a/kate/recovery/src/matrix.rs +++ b/kate/recovery/src/matrix.rs @@ -321,6 +321,7 @@ impl Dimensions { } /// Generates cell positions for given block partition + #[cfg(feature = "std")] pub fn iter_extended_partition_positions( &self, partition: &Partition, diff --git a/kate/recovery/src/proof.rs b/kate/recovery/src/proof.rs index 17acf619..1e867d5e 100644 --- a/kate/recovery/src/proof.rs +++ b/kate/recovery/src/proof.rs @@ -1,4 +1,6 @@ +#[cfg(feature = "std")] use dusk_bytes::Serializable; +#[cfg(feature = "std")] use dusk_plonk::{ bls12_381::G1Affine, commitment_scheme::kzg10::{commitment::Commitment, proof::Proof, PublicParameters}, @@ -24,6 +26,7 @@ pub enum Error { #[cfg(feature = "std")] impl std::error::Error for Error {} +#[cfg(feature = "std")] impl From for Error { fn from(_: dusk_bytes::Error) -> Self { Error::InvalidData @@ -31,6 +34,7 @@ impl From for Error { } /// Verifies proof for given cell +#[cfg(feature = "std")] pub fn verify( public_parameters: &PublicParameters, dimensions: Dimensions, diff --git a/kate/src/com.rs b/kate/src/com.rs index 158300e5..b7a6adb0 100644 --- a/kate/src/com.rs +++ b/kate/src/com.rs @@ -8,7 +8,7 @@ use std::{ use avail_core::{ data_lookup::Error as DataLookupError, ensure, AppExtrinsic, AppId, BlockLengthColumns, - BlockLengthRows, + BlockLengthRows, DataLookup, }; use codec::Encode; use derive_more::Constructor; @@ -506,6 +506,39 @@ fn commit( prover_key.commit(&poly).map_err(Error::from) } +#[cfg(feature = "std")] +pub fn scalars_to_app_rows( + id: AppId, + lookup: &DataLookup, + dimensions: Dimensions, + matrix: &DMatrix, +) -> Vec>> { + let app_rows = kate_recovery::com::app_specific_rows(lookup, dimensions, id); + dimensions + .iter_extended_rows() + .map(|i| { + app_rows.iter().find(|&&row| row == i).map(|_| { + let row = get_row(&matrix, i as usize); + row.iter() + .flat_map(BlsScalar::to_bytes) + .collect::>() + }) + }) + .collect() +} + +#[cfg(feature = "std")] +pub fn scalars_to_rows(rows: &[u32], data: &DMatrix) -> Vec> { + rows.iter() + .map(|i| { + let row = get_row(data, *i as usize); + row.iter() + .flat_map(BlsScalar::to_bytes) + .collect::>() + }) + .collect::>>() +} + #[cfg(test)] mod tests { use avail_core::DataLookup; @@ -539,27 +572,6 @@ mod tests { }; const TCHUNK: NonZeroU32 = unsafe { NonZeroU32::new_unchecked(32) }; - - fn scalars_to_app_rows( - id: AppId, - lookup: &DataLookup, - dimensions: Dimensions, - matrix: &DMatrix, - ) -> Vec>> { - let app_rows = app_specific_rows(lookup, dimensions, id); - dimensions - .iter_extended_rows() - .map(|i| { - app_rows.iter().find(|&&row| row == i).map(|_| { - let row = get_row(&matrix, i as usize); - row.iter() - .flat_map(BlsScalar::to_bytes) - .collect::>() - }) - }) - .collect() - } - #[test_case(0, 256, 256 => (1, 4, 32) ; "block size zero")] #[test_case(11, 256, 256 => (1, 4, 32) ; "below minimum block size")] #[test_case(300, 256, 256 => (1, 16, 32) ; "regular case")] @@ -1060,6 +1072,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat } fn padded_len_group(lens: &[u32], chunk_size: u32) -> u32 { + let chunk_size = NonZeroU32::new(chunk_size).unwrap(); lens.iter().map(|len| padded_len(*len, chunk_size)).sum() } diff --git a/kate/src/lib.rs b/kate/src/lib.rs index c9cb2180..5e5e7912 100644 --- a/kate/src/lib.rs +++ b/kate/src/lib.rs @@ -191,15 +191,15 @@ fn padded_len_of_pad_iec_9797_1(len: u32) -> u32 { /// Calculates the padded len based of initial `len`. #[allow(clippy::integer_arithmetic)] -pub fn padded_len(len: u32, chunk_size: u32) -> u32 { +pub fn padded_len(len: u32, chunk_size: NonZeroU32) -> u32 { let iec_9797_1_len = padded_len_of_pad_iec_9797_1(len); const_assert_ne!(DATA_CHUNK_SIZE, 0); debug_assert!( - chunk_size >= DATA_CHUNK_SIZE as u32, + chunk_size.get() >= DATA_CHUNK_SIZE as u32, "`BlockLength.chunk_size` is valid by design .qed" ); - let diff_per_chunk = chunk_size - DATA_CHUNK_SIZE as u32; + let diff_per_chunk = chunk_size.get() - DATA_CHUNK_SIZE as u32; let pad_to_chunk_extra = if diff_per_chunk != 0 { let chunks_count = iec_9797_1_len / DATA_CHUNK_SIZE as u32; chunks_count * diff_per_chunk @@ -210,7 +210,8 @@ pub fn padded_len(len: u32, chunk_size: u32) -> u32 { iec_9797_1_len + pad_to_chunk_extra } -#[derive(Clone, Copy, PartialEq, Eq, Debug, Constructor)] +#[cfg_attr(feature = "std", derive(Debug))] +#[derive(Clone, Copy, PartialEq, Eq, Constructor)] pub struct BlockDimensions { pub rows: BlockLengthRows, pub cols: BlockLengthColumns, diff --git a/nomad/base/src/lib.rs b/nomad/base/src/lib.rs index 0da94e9a..cd98456a 100644 --- a/nomad/base/src/lib.rs +++ b/nomad/base/src/lib.rs @@ -5,14 +5,13 @@ use nomad_core::{home_domain_hash, to_eth_signed_message_hash, NomadState, Signe use nomad_signature::SignatureError; use scale_info::TypeInfo; use sp_core::{H160, H256}; -use sp_runtime::RuntimeDebug; #[cfg(feature = "std")] pub mod testing; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; -#[derive(Clone, Copy, Encode, Decode, PartialEq, Eq, RuntimeDebug, TypeInfo, MaxEncodedLen)] +#[derive(Clone, Copy, Encode, Decode, PartialEq, Eq, TypeInfo, MaxEncodedLen)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct NomadBase { pub state: NomadState, diff --git a/nomad/core/src/nomad_message.rs b/nomad/core/src/nomad_message.rs index 1902a3e8..af4915fd 100644 --- a/nomad/core/src/nomad_message.rs +++ b/nomad/core/src/nomad_message.rs @@ -1,14 +1,13 @@ use codec::{Decode, Encode}; use scale_info::TypeInfo; use sp_core::{bounded::BoundedVec, Get, H256}; -use sp_runtime::RuntimeDebug; use sp_std::{mem::size_of, vec::Vec}; /// Size of `NomadMessage` fields except `body`. pub const NON_BODY_LENGTH: usize = 3 * size_of::() + 2 * size_of::(); /// A full Nomad message -#[derive(Clone, Encode, Decode, PartialEq, Eq, RuntimeDebug, TypeInfo)] +#[derive(Clone, Encode, Decode, PartialEq, Eq, TypeInfo)] pub struct NomadMessage> { /// 4 SLIP-44 ID pub origin: u32, diff --git a/nomad/core/src/state.rs b/nomad/core/src/state.rs index b5060ed0..289d4656 100644 --- a/nomad/core/src/state.rs +++ b/nomad/core/src/state.rs @@ -1,11 +1,10 @@ use codec::{Decode, Encode, MaxEncodedLen}; use scale_info::TypeInfo; -use sp_runtime::RuntimeDebug; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; -#[derive(Clone, Copy, Encode, Decode, Eq, PartialEq, RuntimeDebug, TypeInfo, MaxEncodedLen)] +#[derive(Clone, Copy, Encode, Decode, Eq, PartialEq, TypeInfo, MaxEncodedLen)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub enum NomadState { /// Contract is active diff --git a/nomad/core/src/update.rs b/nomad/core/src/update.rs index 18b984df..3b0a3cd5 100644 --- a/nomad/core/src/update.rs +++ b/nomad/core/src/update.rs @@ -3,8 +3,7 @@ use nomad_signature::{hash_message, Signature, SignatureError}; use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; -use sp_core::{H160, H256}; -use sp_runtime::RuntimeDebug; +use sp_core::{RuntimeDebug, H160, H256}; use crate::utils::home_domain_hash; diff --git a/nomad/core/src/update_v2.rs b/nomad/core/src/update_v2.rs index f4dd6e1f..e1cfba14 100644 --- a/nomad/core/src/update_v2.rs +++ b/nomad/core/src/update_v2.rs @@ -6,12 +6,11 @@ use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; use sp_core::{H160, H256}; -use sp_runtime::RuntimeDebug; use crate::utils::home_domain_hash; /// Nomad update -#[derive(Clone, Encode, Decode, PartialEq, Eq, RuntimeDebug, TypeInfo)] +#[derive(Clone, Encode, Decode, PartialEq, Eq, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct UpdateV2 { /// The home chain @@ -32,7 +31,7 @@ impl UpdateV2 { } /// A Signed Nomad Update -#[derive(Clone, Encode, Decode, PartialEq, Eq, RuntimeDebug, TypeInfo)] +#[derive(Clone, Encode, Decode, PartialEq, Eq, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct SignedUpdateV2 { /// The update diff --git a/nomad/merkle/Cargo.toml b/nomad/merkle/Cargo.toml index fc49e9b6..87d3aba2 100644 --- a/nomad/merkle/Cargo.toml +++ b/nomad/merkle/Cargo.toml @@ -7,6 +7,7 @@ edition = "2021" [dependencies] # Internal +avail-core = { path = "../../core", default-features = false } nomad-core = { path = "../core", default-features = false } # Substrate @@ -38,6 +39,7 @@ default = ["std"] std = [ "serde", "nomad-core/std", + "avail-core/std", "codec/std", "scale-info/std", "sp-core/std", diff --git a/nomad/merkle/src/lib.rs b/nomad/merkle/src/lib.rs index a9237ead..b5a288d2 100644 --- a/nomad/merkle/src/lib.rs +++ b/nomad/merkle/src/lib.rs @@ -20,7 +20,7 @@ pub mod proof; #[cfg(test)] pub(crate) mod test_utils; -use frame_support::ensure; +use avail_core::ensure; use sp_core::H256; /// Tree depth diff --git a/nomad/merkle/src/light.rs b/nomad/merkle/src/light.rs index ef43f629..eabfd154 100644 --- a/nomad/merkle/src/light.rs +++ b/nomad/merkle/src/light.rs @@ -1,5 +1,5 @@ +use avail_core::ensure; use codec::{Decode, Encode, MaxEncodedLen}; -use frame_support::ensure; use nomad_core::keccak256_concat; use scale_info::TypeInfo; #[cfg(feature = "std")] diff --git a/nomad/signature/Cargo.toml b/nomad/signature/Cargo.toml index 25d6d5e1..b0d51a0c 100644 --- a/nomad/signature/Cargo.toml +++ b/nomad/signature/Cargo.toml @@ -22,7 +22,7 @@ scale-info = { version = "2", default-features = false, features = ["derive"] } sp-core = { version = "*", default-features = false } frame-support = { version = "4.0.0-dev", default-features = false } -sp-runtime = { version = "7", default-features = false, optional = true } +sp-runtime = { version = "7", default-features = false } # Eth ethers-core = { version = "1", default-features = false, optional = true } From 32a71b5b77644d8ca2806545f36841d4e671eea6 Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Mon, 24 Jul 2023 13:28:18 +0200 Subject: [PATCH 85/87] `BLOCK_CHUNK_SIZE` as `NonZeroU32` --- Cargo.lock | 1 + core/Cargo.toml | 1 + core/src/constants.rs | 5 ++++- kate/recovery/src/commitments.rs | 16 ++++++---------- kate/src/com.rs | 2 +- 5 files changed, 13 insertions(+), 12 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ec3aa68a..721e8ea7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -328,6 +328,7 @@ dependencies = [ "sp-runtime-interface", "sp-std", "sp-trie", + "static_assertions", "test-case", "thiserror-no-std", ] diff --git a/core/Cargo.toml b/core/Cargo.toml index 74ad2dc8..30bec54b 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -12,6 +12,7 @@ hash256-std-hasher = { version = "0.15.2", default-features = false } hex = { version = "0.4", optional = true, default-features = false, features = ["alloc", "serde"] } log = { version = "0.4.8", default-features = false } serde = { version = "1", optional = true, features = ["derive"] } +static_assertions = "1.1.0" thiserror-no-std = "2.0.2" # Substrate diff --git a/core/src/constants.rs b/core/src/constants.rs index e4b8c462..c31cad26 100644 --- a/core/src/constants.rs +++ b/core/src/constants.rs @@ -1,4 +1,6 @@ +use core::num::NonZeroU32; use sp_arithmetic::Perbill; +use static_assertions::const_assert; pub mod well_known_keys { /// Public params used to generate Kate commitment @@ -9,7 +11,8 @@ pub mod well_known_keys { /// by Operational extrinsics. pub const NORMAL_DISPATCH_RATIO: Perbill = Perbill::from_percent(90); -pub const BLOCK_CHUNK_SIZE: u32 = 32; +const_assert!(BLOCK_CHUNK_SIZE.get() > 0); +pub const BLOCK_CHUNK_SIZE: NonZeroU32 = unsafe { NonZeroU32::new_unchecked(32) }; /// Money matters. pub mod currency { diff --git a/kate/recovery/src/commitments.rs b/kate/recovery/src/commitments.rs index 0fe1d278..59c8bb55 100644 --- a/kate/recovery/src/commitments.rs +++ b/kate/recovery/src/commitments.rs @@ -1,20 +1,16 @@ -use core::{ - array::TryFromSliceError, - convert::{TryFrom, TryInto}, - num::TryFromIntError, -}; +use core::{array::TryFromSliceError, convert::TryInto, num::TryFromIntError}; use sp_std::prelude::*; use thiserror_no_std::Error; -use crate::{ - com, - config::{self, COMMITMENT_SIZE}, - matrix, -}; +use crate::config::COMMITMENT_SIZE; +#[cfg(feature = "std")] +use crate::{com, config, matrix}; #[cfg(feature = "std")] use avail_core::{ensure, AppId, DataLookup}; #[cfg(feature = "std")] +use core::convert::TryFrom; +#[cfg(feature = "std")] use dusk_bytes::Serializable; #[cfg(feature = "std")] use dusk_plonk::{ diff --git a/kate/src/com.rs b/kate/src/com.rs index b7a6adb0..cb85178c 100644 --- a/kate/src/com.rs +++ b/kate/src/com.rs @@ -518,7 +518,7 @@ pub fn scalars_to_app_rows( .iter_extended_rows() .map(|i| { app_rows.iter().find(|&&row| row == i).map(|_| { - let row = get_row(&matrix, i as usize); + let row = get_row(matrix, i as usize); row.iter() .flat_map(BlsScalar::to_bytes) .collect::>() From c745a8b6e7fa2da580db32e31af88a53da1e78cf Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Tue, 25 Jul 2023 13:06:00 +0200 Subject: [PATCH 86/87] Minor fixes required by LC --- core/src/data_lookup/mod.rs | 2 +- kate/recovery/src/com.rs | 2 +- kate/recovery/src/matrix.rs | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/core/src/data_lookup/mod.rs b/core/src/data_lookup/mod.rs index 2375e61b..d6eb6572 100644 --- a/core/src/data_lookup/mod.rs +++ b/core/src/data_lookup/mod.rs @@ -10,7 +10,7 @@ use serde::{Deserialize, Serialize}; use crate::{ensure, AppId}; -mod compact; +pub mod compact; use compact::CompactDataLookup; pub type DataLookupRange = Range; diff --git a/kate/recovery/src/com.rs b/kate/recovery/src/com.rs index 2808799f..0934e8b2 100644 --- a/kate/recovery/src/com.rs +++ b/kate/recovery/src/com.rs @@ -19,7 +19,7 @@ use dusk_bytes::Serializable as _; #[cfg(feature = "std")] use dusk_plonk::{fft::EvaluationDomain, prelude::BlsScalar}; #[cfg(feature = "std")] -use sp_arithmetic::{traits::SaturatedConversion as _, Percent}; +pub use sp_arithmetic::{traits::SaturatedConversion as _, Percent}; #[cfg(feature = "std")] use static_assertions::{const_assert, const_assert_ne}; #[cfg(feature = "std")] diff --git a/kate/recovery/src/matrix.rs b/kate/recovery/src/matrix.rs index 8242885d..b86fbb00 100644 --- a/kate/recovery/src/matrix.rs +++ b/kate/recovery/src/matrix.rs @@ -7,6 +7,7 @@ use core::{ }; use derive_more::Constructor; use sp_std::prelude::*; +use sp_std::vec; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; From 70e15d157c1c8a7073cef97b1b2b69c2957f666b Mon Sep 17 00:00:00 2001 From: fmiguelgarcia Date: Thu, 27 Jul 2023 11:30:14 +0200 Subject: [PATCH 87/87] `BlockDimensions` improves its invariant --- kate/src/com.rs | 22 ++++++++++------------ kate/src/lib.rs | 44 +++++++++++++++++++++++++++++++++++--------- 2 files changed, 45 insertions(+), 21 deletions(-) diff --git a/kate/src/com.rs b/kate/src/com.rs index cb85178c..4a513143 100644 --- a/kate/src/com.rs +++ b/kate/src/com.rs @@ -156,7 +156,7 @@ pub fn flatten_and_pad_block( let block_dims = get_block_dimensions(padded_block_len, max_rows, max_cols, chunk_size)?; let chunk_size = usize::try_from(NonZeroU32::get(block_dims.chunk_size)).expect(U32_USIZE_ERR); - let block_dims_size = block_dims.size().ok_or(Error::BlockTooBig)?; + let block_dims_size = block_dims.size(); ensure!(padded_block.len() <= block_dims_size, Error::BlockTooBig); let mut rng = ChaChaRng::from_seed(rng_seed); @@ -190,8 +190,9 @@ pub fn get_block_dimensions( max_cols: BlockLengthColumns, chunk_size: NonZeroU32, ) -> Result { - let max_block_dimensions = BlockDimensions::new(max_rows, max_cols, chunk_size); - let max_block_dimensions_size = max_block_dimensions.size().ok_or(Error::BlockTooBig)?; + let max_block_dimensions = + BlockDimensions::new(max_rows, max_cols, chunk_size).ok_or(Error::BlockTooBig)?; + let max_block_dimensions_size = max_block_dimensions.size(); let block_size = usize::try_from(block_size)?; ensure!(block_size <= max_block_dimensions_size, Error::BlockTooBig); @@ -218,11 +219,7 @@ pub fn get_block_dimensions( (BlockLengthColumns(total_cells), BlockLengthRows(1)) }; - Ok(BlockDimensions { - cols, - rows, - chunk_size, - }) + BlockDimensions::new(rows, cols, chunk_size).ok_or(Error::BlockTooBig) } #[inline] @@ -624,7 +621,8 @@ mod tests { .expect("Invalid Expected result"); let expected = DMatrix::from_iterator(4, 4, expected.into_iter()); - let block_dims = BlockDimensions::new(BlockLengthRows(2), BlockLengthColumns(4), TCHUNK); + let block_dims = + BlockDimensions::new(BlockLengthRows(2), BlockLengthColumns(4), TCHUNK).unwrap(); let chunk_size = usize::try_from(block_dims.chunk_size.get()).unwrap(); let block = (0..=247) .collect::>() @@ -662,7 +660,7 @@ mod tests { ]; let expected_dims = - BlockDimensions::new(BlockLengthRows(1), BlockLengthColumns(16), TCHUNK); + BlockDimensions::new(BlockLengthRows(1), BlockLengthColumns(16), TCHUNK).unwrap(); let (layout, data, dims) = flatten_and_pad_block( BlockLengthRows(128), BlockLengthColumns(256), @@ -883,7 +881,7 @@ mod tests { assert_eq!( dimensions, - BlockDimensions::new(BlockLengthRows(1), BlockLengthColumns(4), TCHUNK), + BlockDimensions::new(BlockLengthRows(1), BlockLengthColumns(4), TCHUNK).unwrap(), ); let expected_commitments = hex!("9046c691ce4c7ba93c9860746d6ff3dfb5560e119f1eac26aa9a10b6fe29d5c8e2b90f23e2ef3a7a950965b08035470d9046c691ce4c7ba93c9860746d6ff3dfb5560e119f1eac26aa9a10b6fe29d5c8e2b90f23e2ef3a7a950965b08035470d"); assert_eq!(commitments, expected_commitments); @@ -1180,7 +1178,7 @@ Let's see how this gets encoded and then reconstructed by sampling only some dat }; let proof = build_proof( &public_params, - BlockDimensions::new(BlockLengthRows(1), BlockLengthColumns(4), TCHUNK), + BlockDimensions::new(BlockLengthRows(1), BlockLengthColumns(4), TCHUNK).unwrap(), &ext_m, &[cell], &metrics, diff --git a/kate/src/lib.rs b/kate/src/lib.rs index 5e5e7912..fe936054 100644 --- a/kate/src/lib.rs +++ b/kate/src/lib.rs @@ -6,7 +6,6 @@ use core::{ convert::TryInto, num::{NonZeroU32, TryFromIntError}, }; -use derive_more::Constructor; #[cfg(feature = "std")] pub use dusk_plonk::{commitment_scheme::kzg10::PublicParameters, prelude::BlsScalar}; use kate_recovery::matrix::Dimensions; @@ -211,18 +210,45 @@ pub fn padded_len(len: u32, chunk_size: NonZeroU32) -> u32 { } #[cfg_attr(feature = "std", derive(Debug))] -#[derive(Clone, Copy, PartialEq, Eq, Constructor)] +#[derive(Clone, Copy, PartialEq, Eq)] pub struct BlockDimensions { - pub rows: BlockLengthRows, - pub cols: BlockLengthColumns, - pub chunk_size: NonZeroU32, + rows: BlockLengthRows, + cols: BlockLengthColumns, + chunk_size: NonZeroU32, + size: usize, } impl BlockDimensions { - pub fn size(&self) -> Option { - let rows_cols = self.rows.0.checked_mul(self.cols.0)?; - let rows_cols_chunk = rows_cols.checked_mul(self.chunk_size.get())?; - usize::try_from(rows_cols_chunk).ok() + pub fn new( + rows: BlockLengthRows, + cols: BlockLengthColumns, + chunk_size: NonZeroU32, + ) -> Option { + let rows_cols = rows.0.checked_mul(cols.0)?; + let size_u32 = rows_cols.checked_mul(chunk_size.get())?; + let size = usize::try_from(size_u32).ok()?; + + Some(Self { + rows, + cols, + chunk_size, + size, + }) + } + + #[inline] + pub fn size(&self) -> usize { + self.size + } + + #[inline] + pub fn rows(&self) -> BlockLengthRows { + self.rows + } + + #[inline] + pub fn cols(&self) -> BlockLengthColumns { + self.cols } }