From 764db119407c29c70d62018e1b1bd3c26bae6c1b Mon Sep 17 00:00:00 2001 From: Jim Posen Date: Fri, 11 Oct 2019 16:18:36 +0200 Subject: [PATCH] New crate for compact trie proofs. --- Cargo.toml | 1 + trie-db/src/iter_build.rs | 4 +- trie-db/src/triedb.rs | 2 +- trie-db/src/triedbmut.rs | 3 +- trie-proof/Cargo.toml | 27 + trie-proof/src/lib.rs | 477 ++++++++++++++++++ trie-proof/src/node.rs | 136 +++++ trie-proof/src/reference_codec/mod.rs | 22 + trie-proof/src/reference_codec/util.rs | 142 ++++++ trie-proof/src/reference_codec/with_ext.rs | 238 +++++++++ trie-proof/src/reference_codec/without_ext.rs | 284 +++++++++++ trie-proof/src/tests.rs | 98 ++++ trie-proof/src/util.rs | 122 +++++ 13 files changed, 1552 insertions(+), 4 deletions(-) create mode 100644 trie-proof/Cargo.toml create mode 100644 trie-proof/src/lib.rs create mode 100644 trie-proof/src/node.rs create mode 100644 trie-proof/src/reference_codec/mod.rs create mode 100644 trie-proof/src/reference_codec/util.rs create mode 100644 trie-proof/src/reference_codec/with_ext.rs create mode 100644 trie-proof/src/reference_codec/without_ext.rs create mode 100644 trie-proof/src/tests.rs create mode 100644 trie-proof/src/util.rs diff --git a/Cargo.toml b/Cargo.toml index a974bf7e..444a5f93 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -8,5 +8,6 @@ members = [ "test-support/trie-standardmap", "test-support/trie-bench", "trie-db", + "trie-proof", "trie-root" ] diff --git a/trie-db/src/iter_build.rs b/trie-db/src/iter_build.rs index cf6f65ee..37224de3 100644 --- a/trie-db/src/iter_build.rs +++ b/trie-db/src/iter_build.rs @@ -38,7 +38,7 @@ macro_rules! exponential_out { type CacheNode = Option>; #[inline(always)] -fn new_vec_slice_buffer() -> [CacheNode; 16] { +fn new_vec_slice_buffer() -> [CacheNode; 16] { exponential_out!(@3, [None, None]) } @@ -314,7 +314,7 @@ pub fn trie_visit(input: I, callback: &mut F) } /// Visitor trait to implement when using `trie_visit`. -pub trait ProcessEncodedNode { +pub trait ProcessEncodedNode { /// Function call with prefix, encoded value and a boolean indicating if the /// node is the root for each node of the trie. /// diff --git a/trie-db/src/triedb.rs b/trie-db/src/triedb.rs index b2add430..f6e46860 100644 --- a/trie-db/src/triedb.rs +++ b/trie-db/src/triedb.rs @@ -303,7 +303,7 @@ impl Crumb { } } -/// Iterator for going through all values in the trie. +/// Iterator for going through all values in the trie in pre-order traversal order. pub struct TrieDBIterator<'a, L: TrieLayout> { db: &'a TrieDB<'a, L>, trail: Vec, diff --git a/trie-db/src/triedbmut.rs b/trie-db/src/triedbmut.rs index 592c99cc..9150ff67 100644 --- a/trie-db/src/triedbmut.rs +++ b/trie-db/src/triedbmut.rs @@ -270,7 +270,8 @@ enum Stored { } /// Used to build a collection of child nodes from a collection of `NodeHandle`s -pub enum ChildReference { // `HO` is e.g. `H256`, i.e. the output of a `Hasher` +#[derive(Clone, Copy)] +pub enum ChildReference { // `HO` is e.g. `H256`, i.e. the output of a `Hasher` Hash(HO), Inline(HO, usize), // usize is the length of the node data we store in the `H::Out` } diff --git a/trie-proof/Cargo.toml b/trie-proof/Cargo.toml new file mode 100644 index 00000000..c52e5682 --- /dev/null +++ b/trie-proof/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "trie-proof" +version = "0.15.2" +authors = ["Parity Technologies "] +description = "Compact proof generation and verification for Merkle-Patricia trie" +repository = "https://github.com/paritytech/trie" +license = "Apache-2.0" +edition = "2018" + +[dependencies] +hash-db = { path = "../hash-db", default-features = false, version = "0.15.2" } +memory-db = { path = "../memory-db", default-features = false, version = "0.15.2" } +parity-scale-codec = { version = "1.0.3", features = ["derive"] } +trie-db = { path = "../trie-db", default-features = false, version = "0.15.2" } + +[dev-dependencies] +keccak-hasher = { path = "../test-support/keccak-hasher", version = "0.15.2" } +reference-trie = { path = "../test-support/reference-trie", version = "0.15.2" } + +[features] +default = ["std"] +std = [ + "hash-db/std", + "memory-db/std", + "parity-scale-codec/std", + "trie-db/std", +] diff --git a/trie-proof/src/lib.rs b/trie-proof/src/lib.rs new file mode 100644 index 00000000..605b5bb0 --- /dev/null +++ b/trie-proof/src/lib.rs @@ -0,0 +1,477 @@ +// Copyright 2019 Parity Technologies +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Generation and verification of compact proofs for Merkle-Patricia tries. +//! +//! Using this library, it is possible to generate a logarithm-space proof of inclusion or +//! non-inclusion of certain key-value pairs in a trie with a known root. The proof contains +//! information so that the verifier can reconstruct the subset of nodes in the trie required to +//! lookup the keys. The trie nodes are not included in their entirety as data which the verifier +//! can compute for themself is omitted. In particular, the values of included keys and and hashes +//! of other trie nodes in the proof are omitted. +//! +//! The proof is a sequence of the subset of nodes in the trie traversed while performing lookups +//! on all keys. The trie nodes are listed in post-order traversal order with some values and +//! internal hashes omitted. In particular, values on leaf nodes, child references on extension +//! nodes, values on branch nodes corresponding to a key in the statement, and child references on +//! branch nodes corresponding to another node in the proof are all omitted. The proof is verified +//! by iteratively reconstructing the trie nodes using the values proving as part of the statement +//! and the hashes of other reconstructed nodes. Since the nodes in the proof are arranged in +//! post-order traversal order, the construction can be done efficiently using a stack. + +#[cfg(not(feature = "std"))] +extern crate alloc; + +#[cfg(feature = "std")] +extern crate std; + +#[cfg(not(feature = "std"))] +mod std { + pub use core::cmp; + pub use core::result; + pub use alloc::vec; + + pub mod error { + pub trait Error {} + impl Error for T {} + } +} + +pub mod node; +pub mod reference_codec; +mod util; + +#[cfg(test)] +mod tests; + +use hash_db::{HashDB, Hasher, EMPTY_PREFIX}; +use trie_db::{ + ChildReference, DBValue, NodeCodec, Recorder, Trie, TrieDB, TrieLayout, + nibble_ops::NIBBLE_LENGTH, node::Node, +}; +use crate::std::{result::Result, vec::Vec}; + +use crate::node::{ + ProofNode, ProofBranchChild, ProofBranchValue, ProofNodeCodec, encode_proof_node, +}; +use crate::util::{LeftAlignedNibbleSlice, post_order_compare}; + +type MemoryDB = memory_db::MemoryDB, DBValue>; + +//! A compact proof of a set of key-value lookups in a trie with respect to a known root. +pub struct Proof { + nodes: Vec>, +} + +//! Generate a compact proof for key-value pairs in a trie given a set of keys. +pub fn generate_proof<'a, T, L, C, I, K>(trie: &T, keys: I) -> Result + where + T: Trie, + L: TrieLayout, + C: ProofNodeCodec, + I: IntoIterator, + K: 'a + AsRef<[u8]> +{ + /// Sort items in post-order traversal order by key. + let mut keys = keys.into_iter() + .map(|key| key.as_ref()) + .collect::>(); + keys.sort_by(|a, b| post_order_compare(a, b)); + + // Look up all keys in order and record the nodes traversed during lookups. + // + // Ideally, we would only store the recorded nodes for one key at a time in memory, making the + // memory requirements O(d) where d is the depth of the tree. However, borrowck makes this + // difficult, so instead we store all recorded nodes for all keys in memory, making the memory + // requirements O(d * k), where k is the number of keys. + let mut recorder = Recorder::new(); + let values = keys.iter() + .map(|key| { + trie.get_with(key, &mut recorder) + .map_err(|_| "failed to lookup key in trie") + }) + .collect::, _>>()?; + + let recorded_nodes = recorder.drain(); + let mut recorded_nodes_iter = recorded_nodes.iter(); + + // Stack of trie nodes traversed with additional information to construct proof nodes. + struct NodeStackEntry<'a> { + key: LeftAlignedNibbleSlice<'a>, + node: Node<'a>, + omit_branch_value: bool, + omit_branch_children: [bool; NIBBLE_LENGTH], + } + let mut node_stack = >::new(); + + // The nodes composing the final proof. + let mut proof_nodes = Vec::new(); + + for (key, expected_value) in keys.iter().zip(values.into_iter()) { + let key_nibbles = LeftAlignedNibbleSlice::new(key); + + // Find last common trie node in the stack on the path to the new key. After this we are + // guaranteed that until the end of the loop body that all stack entries will have a key + // that is a prefix of the current key. + while let Some(entry) = node_stack.pop() { + if key_nibbles.starts_with(&entry.key) { + node_stack.push(entry); + break; + } else { + // Pop and finalize node from the stack that is not on the path to the current key. + let proof_node = new_proof_node( + entry.node, entry.omit_branch_value, entry.omit_branch_children + ); + proof_nodes.push(encode_proof_node::(&proof_node)); + } + } + + enum Step<'a> { + FirstEntry, + Descend(usize, &'a [u8]), + FoundValue(Option<&'a [u8]>), + } + loop { + let step = match node_stack.last_mut() { + Some(entry) => match entry.node { + Node::Empty => Step::FoundValue(None), + Node::Leaf(partial_key, value) => { + if key_nibbles.contains(partial_key, entry.key.len()) && + key_nibbles.len() == entry.key.len() + partial_key.len() + { + Step::FoundValue(Some(value)) + } else { + Step::FoundValue(None) + } + } + Node::Extension(partial_key, child_data) => { + if key_nibbles.contains(partial_key, entry.key.len()) && + key_nibbles.len() >= entry.key.len() + partial_key.len() + { + let child_key_len = entry.key.len() + partial_key.len(); + Step::Descend(child_key_len, child_data) + } else { + Step::FoundValue(None) + } + } + Node::Branch(children, value) => { + if key_nibbles.len() == entry.key.len() { + entry.omit_branch_value = true; + Step::FoundValue(value) + } else { + let index = key_nibbles.at(entry.key.len()) + .expect( + "entry key is a prefix of key_nibbles due to stack invariant; \ + thus key_nibbles len is greater than equal to entry key; \ + also they are unequal due to else condition; + qed" + ) + as usize; + if let Some(child_data) = children[index] { + entry.omit_branch_children[index] = true; + let child_key_len = entry.key.len() + 1; + Step::Descend(child_key_len, child_data) + } else { + Step::FoundValue(None) + } + } + } + Node::NibbledBranch(partial_key, children, value) => { + if key_nibbles.contains(partial_key, entry.key.len()) { + if key_nibbles.len() == entry.key.len() + partial_key.len() { + entry.omit_branch_value = true; + Step::FoundValue(value) + } else { + let index = key_nibbles.at(entry.key.len() + partial_key.len()) + .expect( + "key_nibbles contains partial key after entry key offset; \ + thus key_nibbles len is greater than equal to entry key len plus partial key len; \ + also they are unequal due to else condition; + qed" + ) + as usize; + if let Some(child_data) = children[index] { + entry.omit_branch_children[index] = true; + let child_key_len = entry.key.len() + partial_key.len() + 1; + Step::Descend(child_key_len, child_data) + } else { + Step::FoundValue(None) + } + } + } else { + Step::FoundValue(None) + } + } + }, + None => Step::FirstEntry, + }; + + match step { + Step::FirstEntry => { + let record = recorded_nodes_iter.next() + .ok_or_else(|| "out of recorded nodes")?; + let trie_node = L::Codec::decode(&record.data) + .map_err(|_| "failure to decode trie node")?; + node_stack.push(NodeStackEntry { + key: LeftAlignedNibbleSlice::new(&[]), + node: trie_node, + omit_branch_value: false, + omit_branch_children: [false; NIBBLE_LENGTH], + }) + }, + Step::Descend(child_key_len, child_ref) => { + let node_data = match L::Codec::try_decode_hash(child_ref) { + Some(hash) => { + // Since recorded nodes are listed in traversal order, the one we are + // looking for must be later in the sequence. + let child_record = recorded_nodes_iter + .find(|record| record.hash == hash) + .ok_or_else(|| "out of recorded nodes")?; + &child_record.data + } + None => child_ref, + }; + let trie_node = L::Codec::decode(node_data) + .map_err(|_| "failure to decode trie node")?; + node_stack.push(NodeStackEntry { + key: key_nibbles.truncate(child_key_len), + node: trie_node, + omit_branch_value: false, + omit_branch_children: [false; NIBBLE_LENGTH], + }) + } + Step::FoundValue(value) => { + if value != expected_value.as_ref().map(|v| v.as_ref()) { + return Err("different values between trie traversal and lookup"); + } + break; + } + } + } + } + + // Pop and finalize remaining nodes in the stack. + while let Some(entry) = node_stack.pop() { + let proof_node = new_proof_node( + entry.node, entry.omit_branch_value, entry.omit_branch_children + ); + proof_nodes.push(encode_proof_node::(&proof_node)); + } + + Ok(Proof { nodes: proof_nodes }) +} + +//! Verify a compact proof for key-value pairs in a trie given a root hash. +pub fn verify_proof<'a, L, C, I, K, V>(root: &::Out, proof: Proof, items: I) + -> Result<(), &'static str> + where + L: TrieLayout, + C: ProofNodeCodec, + I: IntoIterator)>, + K: 'a + AsRef<[u8]>, + V: 'a + AsRef<[u8]>, +{ + /// Sort items in post-order traversal order by key. + let mut items = items.into_iter() + .map(|(k, v)| (k.as_ref(), v.as_ref().map(|v| v.as_ref()))) + .collect::>(); + items.sort_by(|(a_key, _), (b_key, _)| post_order_compare(a_key, b_key)); + + let mut items_iter = items.iter(); + + // A stack of child references to fill in omitted branch children for later trie nodes in the + // proof. + let mut node_ref_stack = Vec::new(); + + // A HashDB of the reconstructed trie nodes. + let mut db = >::default(); + + for encoded_proof_node in proof.nodes.iter() { + let proof_node = C::decode(encoded_proof_node) + .map_err(|_| "decoding failure")?; + let trie_node = match proof_node { + ProofNode::Empty => L::Codec::empty_node().to_vec(), + ProofNode::Leaf { partial_key } => { + let (_, value) = items_iter + .find(|(_key, value)| value.is_some()) + .ok_or_else(|| "out of values")?; + let value = value + .expect("value is guaranteed to be Some from find predicate; qed"); + L::Codec::leaf_node(partial_key.right(), value.as_ref()) + } + ProofNode::Extension { partial_key } => { + let child_ref = node_ref_stack.pop() + .ok_or_else(|| "referenced non-existent trie node")?; + L::Codec::extension_node( + partial_key.right_iter(), + partial_key.len(), + child_ref, + ) + }, + ProofNode::Branch { children, value } => { + let (trie_children, trie_value) = handle_branch_node::( + &mut node_ref_stack, &mut items_iter, children, value + )?; + L::Codec::branch_node( + trie_children.iter(), + trie_value, + ) + } + ProofNode::NibbledBranch { partial_key, children, value } => { + let (trie_children, trie_value) = handle_branch_node::( + &mut node_ref_stack, &mut items_iter, children, value + )?; + L::Codec::branch_node_nibbled( + partial_key.right_iter(), + partial_key.len(), + trie_children.iter(), + trie_value, + ) + } + }; + + let trie_node_len = trie_node.len(); + let node_ref = if trie_node_len < L::Hash::LENGTH { + let mut inline = ::Out::default(); + inline.as_mut()[..trie_node_len].copy_from_slice(&trie_node); + ChildReference::Inline(inline, trie_node_len) + } else { + let hash = db.insert(EMPTY_PREFIX, &trie_node); + ChildReference::Hash(hash) + }; + + node_ref_stack.push(node_ref); + } + + if node_ref_stack.len() != 1 { + return Err("proof does not contain a single root trie node"); + } + let root_ref = node_ref_stack.pop() + .expect("length of node_ref_stack is guaranteed to be 1 above; qed"); + + let root_hash = match root_ref { + ChildReference::Inline(data, _) => db.insert(EMPTY_PREFIX, data.as_ref()), + ChildReference::Hash(hash) => hash, + }; + + if root_hash != *root { + return Err("root hash mismatch"); + } + + // Perform the key lookups on the reconstructed trie to ensure the values are correct. + let trie = >::new(&db, &root_hash) + .map_err(|_| "could not construct trie")?; + for (key, expected_value) in items.iter() { + let actual_value = trie.get(key) + .map_err(|_| "could not find key in trie subset")?; + if actual_value.as_ref().map(|v| v.as_ref()) != *expected_value { + return Err("incorrect value for key"); + } + } + + Ok(()) +} + +fn handle_branch_node<'a, 'b, H, I>( + node_ref_stack: &'b mut Vec>, + items_iter: &'b mut I, + children: [ProofBranchChild<'a>; NIBBLE_LENGTH], + value: ProofBranchValue<'a>, +) + -> Result< + ([Option>; NIBBLE_LENGTH], Option<&'a [u8]>), + &'static str + > + where + H: Hasher, + I: Iterator)>, +{ + let mut trie_children = [None; NIBBLE_LENGTH]; + for i in (0..NIBBLE_LENGTH).rev() { + trie_children[i] = match children[i] { + ProofBranchChild::Empty => None, + ProofBranchChild::Omitted => { + let child_ref = node_ref_stack.pop() + .ok_or_else(|| "referenced non-existent trie node")?; + Some(child_ref) + } + ProofBranchChild::Included(node_data) => { + let node_len = node_data.len(); + if node_len >= H::LENGTH { + return Err("inline branch child exceeds hash length"); + } + let mut inline = H::Out::default(); + inline.as_mut()[..node_len].copy_from_slice(node_data); + Some(ChildReference::Inline(inline, node_len)) + } + }; + } + let trie_value = match value { + ProofBranchValue::Empty => None, + ProofBranchValue::Omitted => { + let (_key, value) = items_iter + .find(|(_key, value)| value.is_some()) + .ok_or_else(|| "out of values")?; + *value + } + ProofBranchValue::Included(value) => Some(value), + }; + Ok((trie_children, trie_value)) +} + +fn new_proof_node( + node: Node, + omit_branch_value: bool, + omit_branch_children: [bool; NIBBLE_LENGTH], +) -> ProofNode +{ + match node { + Node::Empty => ProofNode::Empty, + Node::Leaf(partial_key, _value) => ProofNode::Leaf { partial_key }, + Node::Extension(partial_key, _child) => ProofNode::Extension { partial_key }, + Node::Branch(children, value) => ProofNode::Branch { + children: to_proof_children(children, omit_branch_children), + value: to_proof_value(value, omit_branch_value), + }, + Node::NibbledBranch(partial_key, children, value) => ProofNode::NibbledBranch { + partial_key, + children: to_proof_children(children, omit_branch_children), + value: to_proof_value(value, omit_branch_value), + }, + } +} + +fn to_proof_children( + children: [Option<&[u8]>; NIBBLE_LENGTH], + omit_children: [bool; NIBBLE_LENGTH], +) -> [ProofBranchChild; NIBBLE_LENGTH] +{ + let mut proof_children = [ProofBranchChild::Empty; NIBBLE_LENGTH]; + for i in 0..NIBBLE_LENGTH { + proof_children[i] = match children[i] { + None => ProofBranchChild::Empty, + Some(_) if omit_children[i] => ProofBranchChild::Omitted, + Some(child_data) => ProofBranchChild::Included(child_data), + }; + } + proof_children +} + +fn to_proof_value(value: Option<&[u8]>, omit_value: bool) -> ProofBranchValue { + match value { + None => ProofBranchValue::Empty, + Some(_) if omit_value => ProofBranchValue::Omitted, + Some(value) => ProofBranchValue::Included(value), + } +} diff --git a/trie-proof/src/node.rs b/trie-proof/src/node.rs new file mode 100644 index 00000000..005803d5 --- /dev/null +++ b/trie-proof/src/node.rs @@ -0,0 +1,136 @@ +// Copyright 2019 Parity Technologies +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! The data structures included in the proof. + +use crate::std::{self, vec::Vec}; + +use hash_db::Hasher; +use trie_db::{NibbleSlice, nibble_ops::NIBBLE_LENGTH}; + +/// A child entry in a proof branch node. +#[derive(Clone, Copy, PartialEq, Eq)] +#[cfg_attr(feature = "std", derive(Debug))] +pub enum ProofBranchChild<'a> { + Empty, + Omitted, + Included(&'a [u8]), +} + +/// A child value in a proof branch node. +#[derive(Clone, Copy, PartialEq, Eq)] +#[cfg_attr(feature = "std", derive(Debug))] +pub enum ProofBranchValue<'a> { + Empty, + Omitted, + Included(&'a [u8]), +} + +/// A proof node. +#[derive(Clone, Copy, PartialEq, Eq)] +#[cfg_attr(feature = "std", derive(Debug))] +pub enum ProofNode<'a> { + /// Null trie node; could be an empty root or an empty branch entry. + Empty, + /// Leaf node; has key slice and an omitted value. + Leaf { + partial_key: NibbleSlice<'a>, + }, + /// Extension node; has key slice and an omitted child node reference. + Extension { + partial_key: NibbleSlice<'a>, + }, + /// Branch node; has slice of children which may be empty, omitted, or included and a value, + /// which also may be empty, omitted, or included. + Branch { + children: [ProofBranchChild<'a>; NIBBLE_LENGTH], + value: ProofBranchValue<'a>, + }, + /// Branch node with key slice. This is used as an alternative to extension nodes in some trie + /// layouts. + NibbledBranch { + partial_key: NibbleSlice<'a>, + children: [ProofBranchChild<'a>; NIBBLE_LENGTH], + value: ProofBranchValue<'a>, + } +} + +/// Trait for proof node encoding/decoding. +pub trait ProofNodeCodec: Sized + where H: Hasher +{ + /// Codec error type. + type Error: std::error::Error; + + /// Decode bytes to a `ProofNode`. Returns `Self::Error` on failure. + fn decode(data: &[u8]) -> Result; + + /// Decode bytes to the `Hasher`s output type. Returns `None` on failure. + fn try_decode_hash(data: &[u8]) -> Option; + + /// Returns an encoded empty node. + fn empty_node() -> &'static [u8]; + + /// Returns an encoded leaf node. + fn leaf_node( + partial: impl Iterator, + number_nibble: usize, + ) -> Vec; + + /// Returns an encoded extension node. + fn extension_node( + partial: impl Iterator, + number_nibble: usize, + ) -> Vec; + + /// Returns an encoded branch node. + fn branch_node<'a>( + children: &'a [ProofBranchChild<'a>; NIBBLE_LENGTH], + value: &ProofBranchValue<'a>, + ) -> Vec; + + /// Returns an encoded branch node with a possible partial path. + fn branch_node_nibbled<'a>( + partial: impl Iterator, + number_nibble: usize, + children: &'a [ProofBranchChild<'a>; NIBBLE_LENGTH], + value: &ProofBranchValue<'a>, + ) -> Vec; +} + +/// Encode a proof node to a new byte vector. +pub fn encode_proof_node(node: &ProofNode) -> Vec + where + C: ProofNodeCodec, + H: Hasher, +{ + match node { + ProofNode::Empty => C::empty_node().to_vec(), + ProofNode::Leaf { partial_key } => C::leaf_node( + partial_key.right_iter(), + partial_key.len(), + ), + ProofNode::Extension { partial_key } => C::extension_node( + partial_key.right_iter(), + partial_key.len(), + ), + ProofNode::Branch { children, value } => C::branch_node(children, value), + ProofNode::NibbledBranch { partial_key, children, value } => C::branch_node_nibbled( + partial_key.right_iter(), + partial_key.len(), + children, + value + ), + } +} diff --git a/trie-proof/src/reference_codec/mod.rs b/trie-proof/src/reference_codec/mod.rs new file mode 100644 index 00000000..04c39ef9 --- /dev/null +++ b/trie-proof/src/reference_codec/mod.rs @@ -0,0 +1,22 @@ +// Copyright 2019 Parity Technologies +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Reference implementations of `ProofNodeCodec` for common trie layouts. + +mod with_ext; +mod without_ext; +mod util; + +pub use with_ext::ReferenceProofNodeCodec as ReferenceProofNodeCodecWithExt; +pub use without_ext::ReferenceProofNodeCodec as ReferenceProofNodeCodecWithoutExt; diff --git a/trie-proof/src/reference_codec/util.rs b/trie-proof/src/reference_codec/util.rs new file mode 100644 index 00000000..78c2c1f4 --- /dev/null +++ b/trie-proof/src/reference_codec/util.rs @@ -0,0 +1,142 @@ +// Copyright 2019 Parity Technologies +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +///! Common utilities for reference trie implementations. + +use parity_scale_codec::{Compact, Decode, Encode, Error as CodecError, Input}; +use trie_db::nibble_ops::NIBBLE_LENGTH; + +use crate::std::{result::Result, vec::Vec}; +use crate::node::{ProofBranchChild, ProofBranchValue}; + +pub fn take<'a>(input: &mut &'a[u8], count: usize) -> Option<&'a[u8]> { + if input.len() < count { + return None + } + let r = &(*input)[..count]; + *input = &(*input)[count..]; + Some(r) +} + +pub const BITMAP_LENGTH: usize = 2; + +/// Radix 16 trie, bitmap encoding implementation, +/// it contains children mapping information for a branch +/// (children presence only), it encodes into +/// a compact bitmap encoding representation. +pub struct Bitmap(u16); + +impl Bitmap { + pub fn decode(data: &[u8]) -> Result { + Ok(Bitmap(u16::decode(&mut &data[..])?)) + } + + pub fn value_at(&self, i: usize) -> bool { + self.0 & (1u16 << i) != 0 + } + + pub fn encode>(has_children: I , dest: &mut [u8]) { + let mut bitmap: u16 = 0; + let mut cursor: u16 = 1; + for v in has_children { + if v { bitmap |= cursor } + cursor <<= 1; + } + dest[0] = (bitmap % 256) as u8; + dest[1] = (bitmap / 256) as u8; + } +} + +pub const BRANCH_VALUE_OMITTED: u8 = 0; +pub const BRANCH_VALUE_INLINE: u8 = 1; + +pub fn encode_branch_value(output: &mut Vec, value: &ProofBranchValue) { + match value { + ProofBranchValue::Empty => {}, + ProofBranchValue::Omitted => { + output.push(BRANCH_VALUE_OMITTED); + } + ProofBranchValue::Included(data) => { + output.push(BRANCH_VALUE_INLINE); + Compact(data.len() as u32).encode_to(output); + output.extend_from_slice(data); + } + } +} + +pub fn encode_branch_children(output: &mut Vec, children: &[ProofBranchChild; NIBBLE_LENGTH]) { + let offset = output.len(); + output.extend_from_slice(&[0; 2 * BITMAP_LENGTH][..]); + let (has_children, inline_children) = children.iter() + .map(|child| match child { + ProofBranchChild::Empty => (false, false), + ProofBranchChild::Omitted => (true, false), + ProofBranchChild::Included(data) => { + Compact(data.len() as u32).encode_to(output); + output.extend_from_slice(data); + (true, true) + } + }) + .unzip::<_, _, Vec<_>, Vec<_>>(); + Bitmap::encode( + has_children.iter().cloned(), + &mut output[offset..(offset + BITMAP_LENGTH)] + ); + Bitmap::encode( + inline_children.iter().cloned(), + &mut output[(offset + BITMAP_LENGTH)..(offset + 2 * BITMAP_LENGTH)] + ); +} + +pub fn decode_branch_value<'a>(input: &mut &'a [u8], has_value: bool) + -> Result, CodecError> +{ + if has_value { + match input.read_byte()? { + BRANCH_VALUE_OMITTED => Ok(ProofBranchValue::Omitted), + BRANCH_VALUE_INLINE => { + let count = >::decode(input)?.0 as usize; + let data = take(input, count).ok_or(CodecError::from("Bad format"))?; + Ok(ProofBranchValue::Included(data)) + } + _ => Err(CodecError::from("Bad format")), + } + } else { + Ok(ProofBranchValue::Empty) + } +} + +pub fn decode_branch_children<'a>(input: &mut &'a [u8]) + -> Result<[ProofBranchChild<'a>; NIBBLE_LENGTH], CodecError> +{ + let bitmap_slice = take(input, BITMAP_LENGTH) + .ok_or(CodecError::from("Bad format"))?; + let has_children_bitmap = Bitmap::decode(&bitmap_slice[..])?; + + let bitmap_slice = take(input, BITMAP_LENGTH) + .ok_or(CodecError::from("Bad format"))?; + let inline_children_bitmap = Bitmap::decode(&bitmap_slice[..])?; + + let mut children = [ProofBranchChild::Empty; 16]; + for i in 0..NIBBLE_LENGTH { + if inline_children_bitmap.value_at(i) { + let count = >::decode(input)?.0 as usize; + let data = take(input, count).ok_or(CodecError::from("Bad format"))?; + children[i] = ProofBranchChild::Included(data); + } else if has_children_bitmap.value_at(i) { + children[i] = ProofBranchChild::Omitted; + } + } + Ok(children) +} diff --git a/trie-proof/src/reference_codec/with_ext.rs b/trie-proof/src/reference_codec/with_ext.rs new file mode 100644 index 00000000..140098b8 --- /dev/null +++ b/trie-proof/src/reference_codec/with_ext.rs @@ -0,0 +1,238 @@ +// Copyright 2019 Parity Technologies +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use hash_db::Hasher; +use parity_scale_codec::{Decode, Input, Output, Encode, Error as CodecError}; +use trie_db::{NibbleSlice, nibble_ops}; + +use crate::node::{ProofBranchChild, ProofBranchValue, ProofNode, ProofNodeCodec}; +use crate::std::{result::Result, vec::Vec}; +use crate::reference_codec::util::{ + decode_branch_value, decode_branch_children, encode_branch_children, encode_branch_value, take, +}; + +const EMPTY_TRIE: u8 = 0; +const LEAF_NODE_OFFSET: u8 = 1; +const EXTENSION_NODE_OFFSET: u8 = 128; +const BRANCH_NODE_NO_VALUE: u8 = 254; +const BRANCH_NODE_WITH_VALUE: u8 = 255; +const LEAF_NODE_OVER: u8 = EXTENSION_NODE_OFFSET - LEAF_NODE_OFFSET; +const EXTENSION_NODE_OVER: u8 = BRANCH_NODE_NO_VALUE - EXTENSION_NODE_OFFSET; +const LEAF_NODE_LAST: u8 = EXTENSION_NODE_OFFSET - 1; +const EXTENSION_NODE_LAST: u8 = BRANCH_NODE_NO_VALUE - 1; + +/// A `ProofNodeCodec` implementation for with extension and where branch nodes do not include +/// partial keys. The codec is compatible with any `Hasher`. +#[derive(Default, Clone)] +pub struct ReferenceProofNodeCodec; + +impl ProofNodeCodec for ReferenceProofNodeCodec { + type Error = CodecError; + + fn decode(data: &[u8]) -> Result { + let input = &mut &*data; + match NodeHeader::decode(input)? { + NodeHeader::Null => Ok(ProofNode::Empty), + NodeHeader::Leaf(nibble_count) => { + let partial_key = decode_partial(input, nibble_count)?; + Ok(ProofNode::Leaf { partial_key }) + } + NodeHeader::Extension(nibble_count) => { + let partial_key = decode_partial(input, nibble_count)?; + Ok(ProofNode::Extension { partial_key }) + } + NodeHeader::Branch(has_value) => { + let value = decode_branch_value(input, has_value)?; + let children = decode_branch_children(input)?; + Ok(ProofNode::Branch { children, value }) + } + } + } + + fn try_decode_hash(data: &[u8]) -> Option<::Out> { + if data.len() == H::LENGTH { + let mut r = ::Out::default(); + r.as_mut().copy_from_slice(data); + Some(r) + } else { + None + } + } + + fn empty_node() -> &'static[u8] { + &[EMPTY_TRIE] + } + + fn leaf_node(partial: impl Iterator, number_nibble: usize) -> Vec { + assert!(number_nibble < LEAF_NODE_OVER as usize); + + let mut output = Vec::with_capacity(1 + (number_nibble / nibble_ops::NIBBLE_PER_BYTE)); + NodeHeader::Leaf(number_nibble).encode_to(&mut output); + output.extend(partial); + output + } + + fn extension_node(partial: impl Iterator, number_nibble: usize) -> Vec { + assert!(number_nibble < EXTENSION_NODE_OVER as usize); + + let mut output = Vec::with_capacity(1 + (number_nibble / nibble_ops::NIBBLE_PER_BYTE)); + NodeHeader::Extension(number_nibble).encode_to(&mut output); + output.extend(partial); + output + } + + fn branch_node<'a>( + children: &'a [ProofBranchChild<'a>; nibble_ops::NIBBLE_LENGTH], + value: &ProofBranchValue<'a>, + ) -> Vec + { + let has_value = *value != ProofBranchValue::Empty; + let mut output = NodeHeader::Branch(has_value).encode(); + encode_branch_value(&mut output, value); + encode_branch_children(&mut output, children); + output + } + + fn branch_node_nibbled<'a>( + _partial: impl Iterator, + _number_nibble: usize, + _children: &'a [ProofBranchChild<'a>; nibble_ops::NIBBLE_LENGTH], + _value: &ProofBranchValue<'a>, + ) -> Vec + { + unreachable!(); + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +enum NodeHeader { + Null, + Leaf(usize), + Extension(usize), + Branch(bool), +} + +impl Encode for NodeHeader { + fn encode_to(&self, output: &mut T) { + match self { + NodeHeader::Null => output.push_byte(EMPTY_TRIE), + NodeHeader::Leaf(nibble_count) => + output.push_byte(LEAF_NODE_OFFSET + *nibble_count as u8), + NodeHeader::Extension(nibble_count) => + output.push_byte(EXTENSION_NODE_OFFSET + *nibble_count as u8), + NodeHeader::Branch(true) => output.push_byte(BRANCH_NODE_WITH_VALUE), + NodeHeader::Branch(false) => output.push_byte(BRANCH_NODE_NO_VALUE), + } + } +} + +impl Decode for NodeHeader { + fn decode(input: &mut I) -> Result { + let byte = input.read_byte()?; + Ok(match byte { + EMPTY_TRIE => NodeHeader::Null, + LEAF_NODE_OFFSET..=LEAF_NODE_LAST => + NodeHeader::Leaf((byte - LEAF_NODE_OFFSET) as usize), + EXTENSION_NODE_OFFSET ..= EXTENSION_NODE_LAST => + NodeHeader::Extension((byte - EXTENSION_NODE_OFFSET) as usize), + BRANCH_NODE_NO_VALUE => NodeHeader::Branch(false), + BRANCH_NODE_WITH_VALUE => NodeHeader::Branch(true), + }) + } +} + +fn decode_partial<'a>(input: &mut &'a [u8], nibble_count: usize) + -> Result, CodecError> +{ + let nibble_data = take( + input, + (nibble_count + (nibble_ops::NIBBLE_PER_BYTE - 1)) / nibble_ops::NIBBLE_PER_BYTE, + ).ok_or(CodecError::from("Bad format"))?; + let nibble_slice = NibbleSlice::new_offset( + nibble_data, + nibble_ops::number_padding(nibble_count), + ); + Ok(nibble_slice) +} + +#[cfg(test)] +mod tests { + use super::*; + use keccak_hasher::KeccakHasher; + + #[test] + fn empty_encode_decode() { + let encoded = > + ::empty_node(); + let decoded = > + ::decode(encoded).unwrap(); + assert_eq!(decoded, ProofNode::Empty); + } + + #[test] + fn leaf_encode_decode() { + let partial_key = NibbleSlice::new(b"tralala"); + let encoded = > + ::leaf_node(partial_key.right_iter(), partial_key.len()); + let decoded = > + ::decode(&encoded).unwrap(); + assert_eq!(decoded, ProofNode::Leaf { partial_key }); + + let partial_key = NibbleSlice::new_offset(b"tralala", 1); + let encoded = > + ::leaf_node(partial_key.right_iter(), partial_key.len()); + let decoded = > + ::decode(&encoded).unwrap(); + assert_eq!(decoded, ProofNode::Leaf { partial_key }); + } + + #[test] + fn extension_encode_decode() { + let partial_key = NibbleSlice::new(b"tralala"); + let encoded = > + ::extension_node(partial_key.right_iter(), partial_key.len()); + let decoded = > + ::decode(&encoded).unwrap(); + assert_eq!(decoded, ProofNode::Extension { partial_key }); + + let partial_key = NibbleSlice::new_offset(b"tralala", 1); + let encoded = > + ::extension_node(partial_key.right_iter(), partial_key.len()); + let decoded = > + ::decode(&encoded).unwrap(); + assert_eq!(decoded, ProofNode::Extension { partial_key }); + } + + #[test] + fn branch_encode_decode() { + let mut children = [ProofBranchChild::Empty; nibble_ops::NIBBLE_LENGTH]; + children[2] = ProofBranchChild::Omitted; + children[3] = ProofBranchChild::Included(b"value 3"); + children[7] = ProofBranchChild::Included(b"value 7"); + children[12] = ProofBranchChild::Omitted; + + let values = [ + ProofBranchValue::Empty, + ProofBranchValue::Omitted, + ProofBranchValue::Included(b"value"), + ]; + for value in values.iter() { + let encoded = > + ::branch_node(&children, &value); + let decoded = > + ::decode(&encoded).unwrap(); + assert_eq!(decoded, ProofNode::Branch { children, value: *value }); + } + } +} diff --git a/trie-proof/src/reference_codec/without_ext.rs b/trie-proof/src/reference_codec/without_ext.rs new file mode 100644 index 00000000..1fb0c111 --- /dev/null +++ b/trie-proof/src/reference_codec/without_ext.rs @@ -0,0 +1,284 @@ +// Copyright 2019 Parity Technologies +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use hash_db::Hasher; +use parity_scale_codec::{Decode, Input, Output, Encode, Error as CodecError}; +use trie_db::{NibbleSlice, nibble_ops}; + +use crate::node::{ProofBranchChild, ProofBranchValue, ProofNode, ProofNodeCodec}; +use crate::std::{cmp, iter, result::Result, vec::Vec}; +use crate::reference_codec::util::{ + decode_branch_value, decode_branch_children, encode_branch_value, encode_branch_children, take, +}; + +const EMPTY_TRIE: u8 = 0; +const NIBBLE_SIZE_BOUND: usize = u16::max_value() as usize; +const LEAF_PREFIX_MASK: u8 = 0b_01 << 6; +const BRANCH_WITHOUT_MASK: u8 = 0b_10 << 6; +const BRANCH_WITH_MASK: u8 = 0b_11 << 6; + +/// A `ProofNodeCodec` implementation for without extension and where branch nodes include partial +/// keys. The codec is compatible with any `Hasher`. +#[derive(Default, Clone)] +pub struct ReferenceProofNodeCodec; + +impl ProofNodeCodec for ReferenceProofNodeCodec { + type Error = CodecError; + + fn decode(data: &[u8]) -> Result { + let input = &mut &*data; + match NodeHeader::decode(input)? { + NodeHeader::Null => Ok(ProofNode::Empty), + NodeHeader::Leaf(nibble_count) => { + let partial_key = decode_partial(input, nibble_count)?; + Ok(ProofNode::Leaf { partial_key }) + } + NodeHeader::Branch(has_value, nibble_count) => { + let partial_key = decode_partial(input, nibble_count)?; + let value = decode_branch_value(input, has_value)?; + let children = decode_branch_children(input)?; + Ok(ProofNode::NibbledBranch { partial_key, children, value }) + } + } + } + + fn try_decode_hash(data: &[u8]) -> Option<::Out> { + if data.len() == H::LENGTH { + let mut r = ::Out::default(); + r.as_mut().copy_from_slice(data); + Some(r) + } else { + None + } + } + + fn empty_node() -> &'static[u8] { + &[EMPTY_TRIE] + } + + fn leaf_node(partial: impl Iterator, number_nibble: usize) -> Vec { + assert!(number_nibble < NIBBLE_SIZE_BOUND as usize); + + let mut output = Vec::with_capacity(3 + (number_nibble / nibble_ops::NIBBLE_PER_BYTE)); + NodeHeader::Leaf(number_nibble).encode_to(&mut output); + output.extend(partial); + output + } + + fn extension_node(_partial: impl Iterator, _number_nibble: usize) -> Vec { + unreachable!() + } + + fn branch_node<'a>( + _children: &'a [ProofBranchChild<'a>; nibble_ops::NIBBLE_LENGTH], + _value: &ProofBranchValue<'a>, + ) -> Vec + { + unreachable!() + } + + fn branch_node_nibbled<'a>( + partial: impl Iterator, + number_nibble: usize, + children: &'a [ProofBranchChild<'a>; nibble_ops::NIBBLE_LENGTH], + value: &ProofBranchValue<'a>, + ) -> Vec + { + assert!(number_nibble < NIBBLE_SIZE_BOUND as usize); + + let has_value = *value != ProofBranchValue::Empty; + let mut output = NodeHeader::Branch(has_value, number_nibble).encode(); + output.extend(partial); + encode_branch_value(&mut output, value); + encode_branch_children(&mut output, children); + output + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +enum NodeHeader { + Null, + Leaf(usize), + Branch(bool, usize), +} + +impl Encode for NodeHeader { + fn encode_to(&self, output: &mut T) { + match self { + NodeHeader::Null => output.push_byte(EMPTY_TRIE), + NodeHeader::Leaf(nibble_count) => + encode_size_and_prefix(*nibble_count, LEAF_PREFIX_MASK, output), + NodeHeader::Branch(true, nibble_count) => + encode_size_and_prefix(*nibble_count, BRANCH_WITH_MASK, output), + NodeHeader::Branch(false, nibble_count) => + encode_size_and_prefix(*nibble_count, BRANCH_WITHOUT_MASK, output), + } + } +} + +impl Decode for NodeHeader { + fn decode(input: &mut I) -> Result { + let i = input.read_byte()?; + if i == EMPTY_TRIE { + return Ok(NodeHeader::Null); + } + match i & (0b11 << 6) { + LEAF_PREFIX_MASK => + Ok(NodeHeader::Leaf(decode_size(i, input)?)), + BRANCH_WITHOUT_MASK => + Ok(NodeHeader::Branch(false, decode_size(i, input)?)), + BRANCH_WITH_MASK => + Ok(NodeHeader::Branch(true, decode_size(i, input)?)), + // do not allow any special encoding + _ => Err("Unknown type of node".into()), + } + } +} + +/// Encode and allocate node type header (type and size), and partial value. +/// It uses an iterator over encoded partial bytes as input. +fn size_and_prefix_iterator(size: usize, prefix: u8) -> impl Iterator { + let size = cmp::min(NIBBLE_SIZE_BOUND, size); + + let l1 = cmp::min(62, size); + let (first_byte, mut rem) = if size == l1 { + (iter::once(prefix + l1 as u8), 0) + } else { + (iter::once(prefix + 63), size - l1) + }; + let next_bytes = move || { + if rem > 0 { + if rem < 256 { + let result = rem - 1; + rem = 0; + Some(result as u8) + } else { + rem = rem.saturating_sub(255); + Some(255) + } + } else { + None + } + }; + first_byte.chain(iter::from_fn(next_bytes)) +} + +fn encode_size_and_prefix(size: usize, prefix: u8, out: &mut impl Output) { + for b in size_and_prefix_iterator(size, prefix) { + out.push_byte(b) + } +} + +fn decode_size(first: u8, input: &mut I) -> Result { + let mut result = (first & 255u8 >> 2) as usize; + if result < 63 { + return Ok(result); + } + result -= 1; + while result <= NIBBLE_SIZE_BOUND { + let n = input.read_byte()? as usize; + if n < 255 { + return Ok(result + n + 1); + } + result += 255; + } + Err("Size limit reached for a nibble slice".into()) +} + +fn decode_partial<'a>(input: &mut &'a [u8], nibble_count: usize) + -> Result, CodecError> +{ + let padding = nibble_count % nibble_ops::NIBBLE_PER_BYTE != 0; + // check that the padding is valid (if any) + if padding && nibble_ops::pad_left(input[0]) != 0 { + return Err(CodecError::from("Bad format")); + } + let nibble_data = take( + input, + (nibble_count + (nibble_ops::NIBBLE_PER_BYTE - 1)) / nibble_ops::NIBBLE_PER_BYTE, + ).ok_or(CodecError::from("Bad format"))?; + let nibble_slice = NibbleSlice::new_offset( + nibble_data, + nibble_ops::number_padding(nibble_count), + ); + Ok(nibble_slice) +} + +#[cfg(test)] +mod tests { + use super::*; + use keccak_hasher::KeccakHasher; + + #[test] + fn empty_encode_decode() { + let encoded = > + ::empty_node(); + let decoded = > + ::decode(encoded).unwrap(); + assert_eq!(decoded, ProofNode::Empty); + } + + #[test] + fn leaf_encode_decode() { + let partial_key = NibbleSlice::new(b"tralala"); + let encoded = > + ::leaf_node(partial_key.right_iter(), partial_key.len()); + let decoded = > + ::decode(&encoded).unwrap(); + assert_eq!(decoded, ProofNode::Leaf { partial_key }); + + let partial_key = NibbleSlice::new_offset(b"tralala", 1); + let encoded = > + ::leaf_node(partial_key.right_iter(), partial_key.len()); + let decoded = > + ::decode(&encoded).unwrap(); + assert_eq!(decoded, ProofNode::Leaf { partial_key }); + } + + #[test] + fn branch_encode_decode() { + let mut children = [ProofBranchChild::Empty; nibble_ops::NIBBLE_LENGTH]; + children[2] = ProofBranchChild::Omitted; + children[3] = ProofBranchChild::Included(b"value 3"); + children[7] = ProofBranchChild::Included(b"value 7"); + children[12] = ProofBranchChild::Omitted; + + let partial_keys = [ + NibbleSlice::new_offset(b"tralala", 0), + NibbleSlice::new_offset(b"tralala", 1), + ]; + let values = [ + ProofBranchValue::Empty, + ProofBranchValue::Omitted, + ProofBranchValue::Included(b"value"), + ]; + for partial_key in partial_keys.iter() { + for value in values.iter() { + let encoded = > + ::branch_node_nibbled( + partial_key.right_iter(), + partial_key.len(), + &children, + &value, + ); + let decoded = > + ::decode(&encoded).unwrap(); + assert_eq!( + decoded, + ProofNode::NibbledBranch { partial_key: *partial_key, children, value: *value } + ); + } + } + } +} \ No newline at end of file diff --git a/trie-proof/src/tests.rs b/trie-proof/src/tests.rs new file mode 100644 index 00000000..447cb8ec --- /dev/null +++ b/trie-proof/src/tests.rs @@ -0,0 +1,98 @@ +// Copyright 2019 Parity Technologies +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use hash_db::Hasher; +use trie_db::{DBValue, TrieMut}; +use keccak_hasher::KeccakHasher; +use reference_trie::{ + ExtensionLayout, NoExtensionLayout, RefTrieDB, RefTrieDBNoExt, RefTrieDBMut, RefTrieDBMutNoExt, +}; + +use crate::reference_codec::{ReferenceProofNodeCodecWithExt, ReferenceProofNodeCodecWithoutExt}; +use crate::{generate_proof, verify_proof}; + +type MemoryDB = memory_db::MemoryDB, DBValue>; + +fn test_entries() -> Vec<(&'static [u8], &'static [u8])> { + vec![ + (b"alfa", b"val alpha"), + (b"bravo", b"val bravo"), + (b"do", b"verb"), + (b"dog", b"puppy"), + (b"doge", b"coin"), + (b"horse", b"stallion"), + ] +} + +#[test] +fn trie_proofs_with_ext() { + let (db, root) = { + let mut root = ::Out::default(); + let mut db = MemoryDB::default(); + { + let mut trie = RefTrieDBMut::new(&mut db, &mut root); + for (key, value) in test_entries() { + trie.insert(key, value).unwrap(); + } + } + (db, root) + }; + let trie = RefTrieDB::new(&db, &root).unwrap(); + + let items = vec![ + (&b"dog"[..], Some(&b"puppy"[..])), + (&b"doge"[..], Some(&b"coin"[..])), + (&b"bravo"[..], Some(&b"val bravo"[..])), + (&b"do"[..], Some(&b"verb"[..])), + (&b"dag"[..], None), + ]; + let proof = generate_proof::<_, ExtensionLayout, ReferenceProofNodeCodecWithExt, _, _,>( + &trie, items.iter().map(|(k, _)| k) + ).unwrap(); + + verify_proof::( + &root, proof, items.iter() + ).unwrap(); +} + +#[test] +fn trie_proofs_without_ext() { + let (db, root) = { + let mut root = ::Out::default(); + let mut db = MemoryDB::default(); + { + let mut trie = RefTrieDBMutNoExt::new(&mut db, &mut root); + for (key, value) in test_entries() { + trie.insert(key, value).unwrap(); + } + } + (db, root) + }; + let trie = RefTrieDBNoExt::new(&db, &root).unwrap(); + + let items = vec![ + (&b"dog"[..], Some(&b"puppy"[..])), + (&b"doge"[..], Some(&b"coin"[..])), + (&b"bravo"[..], Some(&b"val bravo"[..])), + (&b"do"[..], Some(&b"verb"[..])), + (&b"dag"[..], None), + ]; + let proof = generate_proof::<_, NoExtensionLayout, ReferenceProofNodeCodecWithoutExt, _, _,>( + &trie, items.iter().map(|(k, _)| k) + ).unwrap(); + + verify_proof::( + &root, proof, items.iter() + ).unwrap(); +} diff --git a/trie-proof/src/util.rs b/trie-proof/src/util.rs new file mode 100644 index 00000000..11edcc24 --- /dev/null +++ b/trie-proof/src/util.rs @@ -0,0 +1,122 @@ +// Copyright 2019 Parity Technologies +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use trie_db::{NibbleSlice, nibble_ops::{self, NIBBLE_PER_BYTE}}; + +use crate::std::{fmt, cmp::{self, Ordering}}; + +/// A representation of a nibble slice which is left-aligned. The regular `trie_db::NibbleSlice` is +/// right-aligned meaning it does not support efficient truncation from the right side. +pub struct LeftAlignedNibbleSlice<'a> { + bytes: &'a [u8], + len: usize, +} + +impl<'a> LeftAlignedNibbleSlice<'a> { + pub fn new(bytes: &'a [u8]) -> Self { + LeftAlignedNibbleSlice { + bytes, + len: bytes.len() * NIBBLE_PER_BYTE, + } + } + + /// Returns the length of the slice in nibbles. + pub fn len(&self) -> usize { + self.len + } + + /// Get the nibble at a nibble index padding with a 0 nibble. Returns None if the index is + /// out of bounds. + pub fn at(&self, index: usize) -> Option { + if index < self.len() { + Some(nibble_ops::left_nibble_at(self.bytes, index)) + } else { + None + } + } + + /// Returns a new slice truncated from the right side to the given length. If the given length + /// is greater than that of this slice, the function just returns a copy. + pub fn truncate(&self, len: usize) -> Self { + LeftAlignedNibbleSlice { + bytes: self.bytes, + len: cmp::min(len, self.len), + } + } + + /// Returns whether the given slice is a prefix of this one. + pub fn starts_with(&self, prefix: &LeftAlignedNibbleSlice<'a>) -> bool { + self.truncate(prefix.len()) == *prefix + } + + /// Returns whether another regular (right-aligned) nibble slice is contained in this one at + /// the given offset. + pub fn contains(&self, partial: NibbleSlice, offset: usize) -> bool { + (0..partial.len()).all(|i| self.at(offset + i) == Some(partial.at(i))) + } +} + +impl<'a> PartialEq for LeftAlignedNibbleSlice<'a> { + fn eq(&self, other: &Self) -> bool { + let len = self.len(); + if other.len() != len { + return false; + } + + // Quickly compare the common prefix of the byte slices. + let byte_len = len / NIBBLE_PER_BYTE; + if self.bytes[..byte_len] != other.bytes[..byte_len] { + return false; + } + + // Compare nibble-by-nibble (either 0 or 1 nibbles) any after the common byte prefix. + for i in (byte_len * NIBBLE_PER_BYTE)..len { + let a = self.at(i).expect("i < len; len == self.len() qed"); + let b = other.at(i).expect("i < len; len == other.len(); qed"); + if a != b { + return false + } + } + + true + } +} + +impl<'a> Eq for LeftAlignedNibbleSlice<'a> {} + +#[cfg(feature = "std")] +impl<'a> fmt::Debug for LeftAlignedNibbleSlice<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + for i in 0..self.len() { + let nibble = self.at(i).expect("i < self.len(); qed"); + match i { + 0 => write!(f, "{:01x}", nibble)?, + _ => write!(f, "'{:01x}", nibble)?, + } + } + Ok(()) + } +} + +/// A comparator function for keys in post-order traversal order in a trie. This is similar to +/// the regular lexographic order except that if one byte slice is a prefix of another, the longer +/// one comes first in the ordering. +pub fn post_order_compare(a: &[u8], b: &[u8]) -> Ordering { + let common_len = cmp::min(a.len(), b.len()); + match a[..common_len].cmp(&b[..common_len]) { + // If one is a prefix of the other, the longer string is lesser. + Ordering::Equal => b.len().cmp(&a.len()), + ordering => ordering, + } +}