diff --git a/Cargo.lock b/Cargo.lock index 2c3def1e28abc..24c458e18d885 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1107,11 +1107,11 @@ dependencies = [ [[package]] name = "finality-grandpa" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", - "hashmap_core 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", + "hashbrown 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "parity-scale-codec 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1519,11 +1519,6 @@ dependencies = [ "autocfg 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "hashmap_core" -version = "0.1.11" -source = "registry+https://github.com/rust-lang/crates.io-index" - [[package]] name = "heapsize" version = "0.4.2" @@ -4476,6 +4471,31 @@ dependencies = [ "substrate-primitives 2.0.0", ] +[[package]] +name = "srml-bridge" +version = "0.1.0" +dependencies = [ + "finality-grandpa 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", + "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", + "num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "parity-scale-codec 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", + "sr-io 2.0.0", + "sr-primitives 2.0.0", + "sr-std 2.0.0", + "srml-session 2.0.0", + "srml-support 2.0.0", + "srml-system 2.0.0", + "substrate-client 2.0.0", + "substrate-finality-grandpa 2.0.0", + "substrate-finality-grandpa-primitives 2.0.0", + "substrate-keyring 2.0.0", + "substrate-primitives 2.0.0", + "substrate-state-machine 2.0.0", + "substrate-test-runtime-client 2.0.0", + "substrate-trie 2.0.0", +] + [[package]] name = "srml-collective" version = "2.0.0" @@ -5706,7 +5726,7 @@ name = "substrate-finality-grandpa" version = "2.0.0" dependencies = [ "env_logger 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "finality-grandpa 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", + "finality-grandpa 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", "fork-tree 2.0.0", "futures 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)", "futures-preview 0.3.0-alpha.19 (registry+https://github.com/rust-lang/crates.io-index)", @@ -7662,7 +7682,7 @@ dependencies = [ "checksum fallible-iterator 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" "checksum fdlimit 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b1ee15a7050e5580b3712877157068ea713b245b080ff302ae2ca973cfcd9baa" "checksum file-per-thread-logger 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "8505b75b31ef7285168dd237c4a7db3c1f3e0927e7d314e670bc98e854272fe9" -"checksum finality-grandpa 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9681c1f75941ea47584573dd2bc10558b2067d460612945887e00744e43393be" +"checksum finality-grandpa 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "34754852da8d86bc509715292c73140a5b678656d0b16132acd6737bdb5fd5f8" "checksum fixed-hash 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6357b15872f8126e4ea7cf79d579473f132ccd2de239494ad1bf4aa892faea68" "checksum fixedbitset 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "86d4de0081402f5e88cdac65c8dcdcc73118c1a7a465e2a05f0da05843a8ea33" "checksum flate2 1.0.12 (registry+https://github.com/rust-lang/crates.io-index)" = "ad3c5233c9a940c8719031b423d7e6c16af66e031cb0420b0896f5245bf181d3" @@ -7708,7 +7728,6 @@ dependencies = [ "checksum hash256-std-hasher 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)" = "92c171d55b98633f4ed3860808f004099b36c1cc29c42cfc53aa8591b21efcf2" "checksum hashbrown 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "3bae29b6653b3412c2e71e9d486db9f9df5d701941d86683005efb9f2d28e3da" "checksum hashbrown 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6587d09be37fb98a11cb08b9000a3f592451c1b1b613ca69d949160e313a430a" -"checksum hashmap_core 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "2d6852e5a86250521973b0c1d39677166d8a9c0047c908d7e04f1aa04177973c" "checksum heapsize 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1679e6ea370dee694f91f1dc469bf94cf8f52051d147aec3e1f9497c6fc22461" "checksum heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205" "checksum hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "805026a5d0141ffc30abb3be3173848ad46a1b1664fe632428479619a3644d77" diff --git a/Cargo.toml b/Cargo.toml index 42f78db493b50..846067ec1ec38 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -71,6 +71,7 @@ members = [ "srml/assets", "srml/aura", "srml/balances", + "srml/bridge", "srml/contracts", "srml/contracts/rpc", "srml/collective", diff --git a/core/finality-grandpa/Cargo.toml b/core/finality-grandpa/Cargo.toml index d9b7fd176d639..35677101566cc 100644 --- a/core/finality-grandpa/Cargo.toml +++ b/core/finality-grandpa/Cargo.toml @@ -26,10 +26,10 @@ inherents = { package = "substrate-inherents", path = "../../core/inherents" } network = { package = "substrate-network", path = "../network" } srml-finality-tracker = { path = "../../srml/finality-tracker" } fg_primitives = { package = "substrate-finality-grandpa-primitives", path = "primitives" } -grandpa = { package = "finality-grandpa", version = "0.9.0", features = ["derive-codec"] } +grandpa = { package = "finality-grandpa", version = "0.9.1", features = ["derive-codec"] } [dev-dependencies] -grandpa = { package = "finality-grandpa", version = "0.9.0", features = ["derive-codec", "test-helpers"] } +grandpa = { package = "finality-grandpa", version = "0.9.1", features = ["derive-codec", "test-helpers"] } network = { package = "substrate-network", path = "../network", features = ["test-helpers"] } keyring = { package = "substrate-keyring", path = "../keyring" } test-client = { package = "substrate-test-runtime-client", path = "../test-runtime/client"} diff --git a/srml/bridge/Cargo.toml b/srml/bridge/Cargo.toml new file mode 100644 index 0000000000000..c623144104a7e --- /dev/null +++ b/srml/bridge/Cargo.toml @@ -0,0 +1,43 @@ +[package] +name = "srml-bridge" +version = "0.1.0" +authors = ["Parity Technologies "] +edition = "2018" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +codec = { package = "parity-scale-codec", version = "1.0.0", default-features = false } +fg = { package = "substrate-finality-grandpa", path = "../../core/finality-grandpa/" } +fg_primitives = { package = "substrate-finality-grandpa-primitives", path ="../../core/finality-grandpa/primitives", default-features = false } +grandpa = { package = "finality-grandpa", version = "0.9.1", default-features = false, features = ["derive-codec"] } +hash-db = { version = "0.15.2", default-features = false } +primitives = { package = "substrate-primitives", path = "../../core/primitives", default-features = false } +num = { package = "num-traits", version = "0.2", default-features = false } +rstd = { package = "sr-std", path = "../../core/sr-std", default-features = false } +runtime-io = { package = "sr-io", path = "../../core/sr-io", default-features = false } +session = { package = "srml-session", path = "../session", default-features = false, features = ["historical"] } +serde = { version = "1.0", optional = true } +sr-primitives = { path = "../../core/sr-primitives", default-features = false } +support = { package = "srml-support", path = "../support", default-features = false } +system = { package = "srml-system", path = "../system", default-features = false } +trie = { package = "substrate-trie", path = "../../core/trie", default-features = false } + +[dev-dependencies] +client = { package = "substrate-client", path = "../../core/client" } +keyring = { package = "substrate-keyring", path = "../../core/keyring" } +state-machine = { package = "substrate-state-machine", path = "../../core/state-machine" } +test-client = { package = "substrate-test-runtime-client", path = "../../core/test-runtime/client" } + +[features] +default = ["std"] +std = [ + "serde", + "codec/std", + "session/std", + "sr-primitives/std", + "support/std", + "system/std", + "trie/std", + "runtime-io/std", +] diff --git a/srml/bridge/src/justification.rs b/srml/bridge/src/justification.rs new file mode 100644 index 0000000000000..5c952ccc10b5e --- /dev/null +++ b/srml/bridge/src/justification.rs @@ -0,0 +1,316 @@ +// Copyright 2018-2019 Parity Technologies (UK) Ltd. +// This file is part of Substrate. + +// Substrate is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Substrate is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Substrate. If not, see . + +//! A module for creating a verifying the GRANDPA justifications. A justification +//! can be thought of as a finality proof. GRANDPA justifications consist +//! of a commit message plus an ancestry proof for pre-commits. + +use codec::{Encode, Decode}; +use core::cmp::{Ord, Ordering}; +// TODO: Since I can't use types from `core/finality-grandpa`, wait until #3868 +// is merged as that'll move these into `primitives/finality-grandpa`. +use fg::{Commit, Message}; +use fg_primitives::{AuthorityId, RoundNumber, SetId as SetIdNumber, AuthoritySignature}; +use grandpa::voter_set::VoterSet; +use grandpa::{Error as GrandpaError}; + +// Might be able to get this from primitives re-export +use rstd::collections::{ + btree_map::BTreeMap, + btree_set::BTreeSet, +}; + +use sr_primitives::app_crypto::RuntimeAppPublic; +use sr_primitives::traits::{NumberFor, Block as BlockT, Header as HeaderT}; +use primitives::{H256}; + +#[cfg(test)] +use sr_primitives::generic::BlockId; +#[cfg(test)] +use primitives::Blake2Hasher; +#[cfg(test)] +use client::{CallExecutor, Client}; +#[cfg(test)] +use client::backend::Backend; +#[cfg(test)] +use client::error::Error as ClientError; + +/// A GRANDPA justification for block finality, it includes a commit message and +/// an ancestry proof including all headers routing all precommit target blocks +/// to the commit target block. Due to the current voting strategy the precommit +/// targets should be the same as the commit target, since honest voters don't +/// vote past authority set change blocks. +/// +/// This is meant to be stored in the db and passed around the network to other +/// nodes, and are used by syncing nodes to prove authority set handoffs. +#[derive(Encode, Decode)] +pub struct GrandpaJustification { + round: u64, + commit: Commit, + votes_ancestries: Vec, +} + +impl> GrandpaJustification { + /// Create a GRANDPA justification from the given commit. This method + /// assumes the commit is valid and well-formed. + #[cfg(test)] + pub(crate) fn from_commit( + client: &Client, + round: u64, + commit: Commit, + ) -> Result, JustificationError> where + B: Backend, + E: CallExecutor + Send + Sync, + RA: Send + Sync, + { + let mut votes_ancestries_hashes = BTreeSet::new(); + let mut votes_ancestries = Vec::new(); + + let error = || { + Err(JustificationError::BadJustification) + }; + + for signed in commit.precommits.iter() { + let mut current_hash = signed.precommit.target_hash.clone(); + loop { + if current_hash == commit.target_hash { break; } + + match client.header(&BlockId::Hash(current_hash))? { + Some(current_header) => { + if *current_header.number() <= commit.target_number { + return error(); + } + + let parent_hash = current_header.parent_hash().clone(); + if votes_ancestries_hashes.insert(current_hash) { + votes_ancestries.push(current_header); + } + current_hash = parent_hash; + }, + _ => return error(), + } + } + } + + Ok(GrandpaJustification { round, commit, votes_ancestries }) + } + + /// Decode a GRANDPA justification and validate the commit and the votes' + /// ancestry proofs finalize the given block. + pub(crate) fn decode_and_verify_finalizes( + encoded: &[u8], + finalized_target: (Block::Hash, NumberFor), + set_id: u64, + voters: &VoterSet, + ) -> Result, JustificationError> where + NumberFor: grandpa::BlockNumberOps, + { + let justification = GrandpaJustification::::decode(&mut &*encoded) + .map_err(|_| JustificationError::JustificationDecode)?; + + if (justification.commit.target_hash, justification.commit.target_number) != finalized_target { + // let msg = "invalid commit target in grandpa justification".to_string(); + Err(JustificationError::BadJustification) + } else { + justification.verify(set_id, voters).map(|_| justification) + } + } + + /// Validate the commit and the votes' ancestry proofs. + pub(crate) fn verify(&self, set_id: u64, voters: &VoterSet) -> Result<(), JustificationError> + where + NumberFor: grandpa::BlockNumberOps, + { + use grandpa::Chain; + + let ancestry_chain = AncestryChain::::new(&self.votes_ancestries); + + match grandpa::validate_commit( + &self.commit, + voters, + &ancestry_chain, + ) { + Ok(ref result) if result.ghost().is_some() => {}, + _ => { + return Err(JustificationError::BadJustification); + } + } + + let mut visited_hashes = BTreeSet::new(); + for signed in self.commit.precommits.iter() { + if let Err(_) = check_message_sig::( + &grandpa::Message::Precommit(signed.precommit.clone()), + &signed.id, + &signed.signature, + self.round, + set_id, + ) { + return Err(JustificationError::BadJustification) + } + + if self.commit.target_hash == signed.precommit.target_hash { + continue; + } + + match ancestry_chain.ancestry(self.commit.target_hash, signed.precommit.target_hash) { + Ok(route) => { + // ancestry starts from parent hash but the precommit target hash has been visited + visited_hashes.insert(signed.precommit.target_hash); + for hash in route { + visited_hashes.insert(hash); + } + }, + _ => { + return Err(JustificationError::BadJustification) + }, + } + } + + let ancestry_hashes = self.votes_ancestries + .iter() + .map(|h: &Block::Header| h.hash()) + .collect(); + + if visited_hashes != ancestry_hashes { + return Err(JustificationError::BadJustification) + } + + Ok(()) + } + + /// Get the current commit message from the GRANDPA justification + pub(crate) fn _get_commit(&self) -> Commit { + self.commit.clone() + } +} + +// Since keys in a `BTreeMap` need to implement `Ord` we can't use Block::Hash directly. +// Instead we'll use a wrapper which implements `Ord` by leveraging the fact that +// `Block::Hash` implements `AsRef`, which itself implements `Ord` +#[derive(Eq, PartialEq)] +struct BlockHashKey(Block::Hash); + +impl BlockHashKey { + fn new(hash: Block::Hash) -> Self { + Self(hash) + } +} + +impl Ord for BlockHashKey { + fn cmp(&self, other: &Self) -> Ordering { + self.0.as_ref().cmp(other.0.as_ref()) + } +} + +impl PartialOrd for BlockHashKey { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.0.as_ref().cmp(other.0.as_ref())) + } +} + +/// A utility trait implementing `grandpa::Chain` using a given set of headers. +/// This is useful when validating commits, using the given set of headers to +/// verify a valid ancestry route to the target commit block. +struct AncestryChain { + ancestry: BTreeMap, Block::Header>, +} + +impl AncestryChain { + fn new(ancestry: &[Block::Header]) -> AncestryChain { + let ancestry: BTreeMap<_, _> = ancestry + .iter() + .cloned() + .map(|h: Block::Header| (BlockHashKey::new(h.hash()), h)) + .collect(); + + AncestryChain { ancestry } + } +} + +impl grandpa::Chain> for AncestryChain where + NumberFor: grandpa::BlockNumberOps +{ + fn ancestry(&self, base: Block::Hash, block: Block::Hash) -> Result, GrandpaError> { + let mut route = Vec::new(); + let mut current_hash = block; + loop { + if current_hash == base { break; } + + let key = BlockHashKey::new(current_hash); + match self.ancestry.get(&key) { + Some(current_header) => { + current_hash = *current_header.parent_hash(); + route.push(current_hash); + }, + _ => return Err(GrandpaError::NotDescendent), + } + } + route.pop(); // remove the base + + Ok(route) + } + + fn best_chain_containing(&self, _block: Block::Hash) -> Option<(Block::Hash, NumberFor)> { + None + } +} + +#[cfg(not(test))] +fn localized_payload(round: RoundNumber, set_id: SetIdNumber, message: &E) -> Vec { + (message, round, set_id).encode() +} + +#[cfg(test)] +pub(crate) fn localized_payload(round: RoundNumber, set_id: SetIdNumber, message: &E) -> Vec { + (message, round, set_id).encode() +} + +// Check the signature of a Grandpa message. +// This was originally taken from `communication/mod.rs` +fn check_message_sig( + message: &Message, + id: &AuthorityId, + signature: &AuthoritySignature, + round: RoundNumber, + set_id: SetIdNumber, +) -> Result<(), ()> { + let as_public = id.clone(); + let encoded_raw = localized_payload(round, set_id, message); + + if as_public.verify(&encoded_raw, signature) { + Ok(()) + } else { + // debug!(target: "afg", "Bad signature on message from {:?}", id); + Err(()) + } +} + +#[cfg_attr(test, derive(Debug))] +pub(crate) enum JustificationError { + BadJustification, + JustificationDecode, +} + +#[cfg(test)] +impl From for JustificationError { + fn from(e: ClientError) -> Self { + match e { + ClientError::BadJustification(_) => JustificationError::BadJustification, + ClientError::JustificationDecode => JustificationError::JustificationDecode, + _ => unreachable!(), + } + } +} diff --git a/srml/bridge/src/lib.rs b/srml/bridge/src/lib.rs new file mode 100644 index 0000000000000..5baecef6f2219 --- /dev/null +++ b/srml/bridge/src/lib.rs @@ -0,0 +1,700 @@ +// Copyright 2017-2019 Parity Technologies (UK) Ltd. +// This file is part of Substrate. + +// Substrate is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Substrate is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Substrate. If not, see . + +//! # Bridge Module +//! +//! This will eventually have some useful documentation. +//! For now though, enjoy this cow's wisdom. +//! +//!```ignore +//!________________________________________ +//! / You are only young once, but you can \ +//! \ stay immature indefinitely. / +//! ---------------------------------------- +//! \ ^__^ +//! \ (oo)\_______ +//! (__)\ )\/\ +//! ||----w | +//! || || +//!``` + +// Ensure we're `no_std` when compiling for Wasm. +#![cfg_attr(not(feature = "std"), no_std)] + +mod storage_proof; +mod justification; + +use crate::justification::{GrandpaJustification, JustificationError}; +use crate::storage_proof::{StorageProof, StorageProofChecker}; + +use core::iter::FromIterator; +use codec::{Encode, Decode}; +use fg_primitives::{AuthorityId, AuthorityWeight, AuthorityList, SetId}; +use grandpa::voter_set::VoterSet; +use primitives::H256; +use num::AsPrimitive; +use sr_primitives::Justification; +use sr_primitives::traits::{Block as BlockT, Header, NumberFor}; +use support::{ + decl_error, decl_module, decl_storage, +}; +use system::{ensure_signed}; + +#[derive(Encode, Decode, Clone, PartialEq)] +#[cfg_attr(feature = "std", derive(Debug))] +pub struct BridgeInfo { + last_finalized_block_header: T::Header, + current_validator_set: AuthorityList, +} + +impl BridgeInfo { + pub fn new( + block_header: T::Header, + validator_set: AuthorityList, + ) -> Self + { + BridgeInfo { + last_finalized_block_header: block_header, + current_validator_set: validator_set, + } + } +} + +type BridgeId = u64; + +pub trait Trait: system::Trait { + type Block: BlockT; +} + +decl_storage! { + trait Store for Module as Bridge + where + NumberFor: AsPrimitive + { + /// The number of current bridges managed by the module. + pub NumBridges get(num_bridges) config(): BridgeId; + + /// Maps a bridge id to a bridge struct. Allows a single + /// `bridge` module to manage multiple bridges. + pub TrackedBridges get(tracked_bridges): map BridgeId => Option>; + } +} + +decl_module! { + pub struct Module for enum Call + where + origin: T::Origin, + NumberFor: AsPrimitive + { + fn initialize_bridge( + origin, + block_header: T::Header, + validator_set: AuthorityList, + validator_set_proof: StorageProof, + ) { + // NOTE: Will want to make this a governance issued call + let _sender = ensure_signed(origin)?; + + let state_root = block_header.state_root(); + + Self::check_validator_set_proof(state_root, validator_set_proof, &validator_set)?; + + let bridge_info = BridgeInfo::new(block_header, validator_set); + + let new_bridge_id = NumBridges::get() + 1; + >::insert(new_bridge_id, bridge_info); + + NumBridges::put(new_bridge_id); + } + + fn submit_finalized_headers( + origin, + bridge_id: BridgeId, + header: T::Header, + ancestry_proof: Vec, + validator_set: AuthorityList, + validator_set_id: SetId, + grandpa_proof: Justification, + ) { + let _sender = ensure_signed(origin)?; + + // Check that the bridge exists + let bridge = >::get(bridge_id).ok_or(Error::NoSuchBridgeExists)?; + + // Check that the new header is a decendent of the old header + let last_header = bridge.last_finalized_block_header; + verify_ancestry(ancestry_proof, last_header.hash(), &header)?; + + let block_hash = header.hash(); + let block_num = *header.number(); + + // Check that the header has been finalized + let voter_set = VoterSet::from_iter(validator_set); + verify_grandpa_proof::( + grandpa_proof, + block_hash, + block_num, + validator_set_id, + &voter_set, + )?; + + // Update storage with current bridge's latest finalized header + >::mutate(bridge_id, |bridge| { + bridge + .as_mut() + .expect( + "We succesfully got this bridge earlier, therefore it exists; qed" + ).last_finalized_block_header = header; + }); + + // TODO: Update validator set if necessary. Still need to figure out details. + } + } +} + +decl_error! { + // Error for the Bridge module + pub enum Error { + InvalidStorageProof, + StorageRootMismatch, + StorageValueUnavailable, + InvalidValidatorSetProof, + ValidatorSetMismatch, + InvalidAncestryProof, + NoSuchBridgeExists, + InvalidFinalityProof, + UnknownClientError, + } +} + +impl From for Error { + fn from(e: JustificationError) -> Self { + match e { + JustificationError::BadJustification | JustificationError::JustificationDecode => { + Error::InvalidFinalityProof + }, + } + } +} + +impl Module + where + NumberFor: AsPrimitive +{ + fn check_validator_set_proof( + state_root: &T::Hash, + proof: StorageProof, + validator_set: &Vec<(AuthorityId, AuthorityWeight)>, + ) -> Result<(), Error> { + + let checker = ::Hasher>>::new( + *state_root, + proof.clone() + )?; + + // By encoding the given set we should have an easy way to compare + // with the stuff we get out of storage via `read_value` + let encoded_validator_set = validator_set.encode(); + let actual_validator_set = checker + .read_value(b":grandpa_authorities")? + .ok_or(Error::StorageValueUnavailable)?; + + if encoded_validator_set == actual_validator_set { + Ok(()) + } else { + Err(Error::ValidatorSetMismatch) + } + } +} + +// A naive way to check whether a `child` header is a decendent +// of an `ancestor` header. For this it requires a proof which +// is a chain of headers between (but not including) the `child` +// and `ancestor`. This could be updated to use something like +// Log2 Ancestors (#2053) in the future. +fn verify_ancestry(proof: Vec, ancestor_hash: H::Hash, child: &H) -> Result<(), Error> +where + H: Header +{ + let mut parent_hash = child.parent_hash(); + + // If we find that the header's parent hash matches our ancestor's hash we're done + for header in proof.iter() { + // Need to check that blocks are actually related + if header.hash() != *parent_hash { + break; + } + + parent_hash = header.parent_hash(); + if *parent_hash == ancestor_hash { + return Ok(()) + } + } + + Err(Error::InvalidAncestryProof) +} + +fn verify_grandpa_proof( + justification: Justification, + hash: B::Hash, + number: NumberFor, + set_id: u64, + voters: &VoterSet, +) -> Result<(), Error> +where + B: BlockT, + NumberFor: grandpa::BlockNumberOps, +{ + // We don't really care about the justification, as long as it's valid + let _ = GrandpaJustification::::decode_and_verify_finalizes( + &justification, + (hash, number), + set_id, + voters, + )?; + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + use fg_primitives::RoundNumber; + use keyring::Ed25519Keyring; + use primitives::{Blake2Hasher, H256, Public}; + use sr_primitives::{ + Perbill, + traits::{ + Block as BlockT, + Header as HeaderT, + IdentityLookup + }, + testing::{Block, Header, TestXt}, + generic::Digest, + }; + use support::{assert_ok, assert_err, impl_outer_origin, parameter_types}; + + impl_outer_origin! { + pub enum Origin for Test {} + } + + #[derive(Clone, PartialEq, Eq, Debug)] + pub struct Test; + + type _System = system::Module; + type MockBridge = Module; + + // TODO: Figure out what I actually need from here + parameter_types! { + pub const BlockHashCount: u64 = 250; + pub const MaximumBlockWeight: u32 = 1024; + pub const MaximumBlockLength: u32 = 2 * 1024; + pub const MinimumPeriod: u64 = 5; + pub const AvailableBlockRatio: Perbill = Perbill::one(); + } + + type DummyAuthorityId = u64; + + impl system::Trait for Test { + type Origin = Origin; + type Index = u64; + type BlockNumber = u64; + type Call = (); + type Hash = H256; + type Hashing = sr_primitives::traits::BlakeTwo256; + type AccountId = DummyAuthorityId; + type Lookup = IdentityLookup; + type Header = Header; + type Event = (); + type BlockHashCount = (); + type MaximumBlockWeight = (); + type AvailableBlockRatio = (); + type MaximumBlockLength = (); + type Version = (); + } + + impl Trait for Test { + type Block = Block>; + } + + fn new_test_ext() -> runtime_io::TestExternalities { + let mut t = system::GenesisConfig::default().build_storage::().unwrap(); + GenesisConfig { + num_bridges: 0, + }.assimilate_storage(&mut t).unwrap(); + t.into() + } + + #[test] + fn it_works_for_default_value() { + new_test_ext().execute_with(|| { + assert_eq!(MockBridge::num_bridges(), 0); + }); + } + + fn get_dummy_authorities() -> AuthorityList { + let authority1 = (AuthorityId::from_slice(&[1; 32]), 1); + let authority2 = (AuthorityId::from_slice(&[2; 32]), 1); + let authority3 = (AuthorityId::from_slice(&[3; 32]), 1); + + vec![authority1, authority2, authority3] + } + + fn create_dummy_validator_proof(validator_set: Vec<(AuthorityId, AuthorityWeight)>) -> (H256, StorageProof) { + use state_machine::{prove_read, backend::{Backend, InMemory}}; + + let encoded_set = validator_set.encode(); + + // construct storage proof + let backend = >::from(vec![ + (None, b":grandpa_authorities".to_vec(), Some(encoded_set)), + ]); + let root = backend.storage_root(std::iter::empty()).0; + + // Generates a storage read proof + let proof: StorageProof = prove_read(backend, &[&b":grandpa_authorities"[..]]) + .unwrap() + .iter_nodes() + .collect(); + + (root, proof) + } + + #[test] + fn it_can_validate_validator_sets() { + let authorities = get_dummy_authorities(); + let (root, proof) = create_dummy_validator_proof(authorities.clone()); + + assert_ok!(MockBridge::check_validator_set_proof(&root, proof, &authorities)); + } + + #[test] + fn it_rejects_invalid_validator_sets() { + let mut authorities = get_dummy_authorities(); + let (root, proof) = create_dummy_validator_proof(authorities.clone()); + + // Do something to make the authority set invalid + authorities.reverse(); + let invalid_authorities = authorities; + + assert_err!( + MockBridge::check_validator_set_proof(&root, proof, &invalid_authorities), + Error::ValidatorSetMismatch + ); + } + + #[test] + fn it_creates_a_new_bridge() { + let authorities = get_dummy_authorities(); + let (root, proof) = create_dummy_validator_proof(authorities.clone()); + + let test_header = Header { + parent_hash: H256::default(), + number: 42, + state_root: root, + extrinsics_root: H256::default(), + digest: Digest::default(), + }; + + new_test_ext().execute_with(|| { + assert_eq!(MockBridge::num_bridges(), 0); + assert_ok!( + MockBridge::initialize_bridge( + Origin::signed(1), + test_header.clone(), + authorities.clone(), + proof, + )); + + assert_eq!( + MockBridge::tracked_bridges(1), + Some(BridgeInfo { + last_finalized_block_header: test_header, + current_validator_set: authorities, + })); + + assert_eq!(MockBridge::num_bridges(), 1); + }); + } + + fn build_header_chain(root_header: Header, len: usize) -> Vec
{ + let mut header_chain = vec![root_header]; + for i in 1..len { + let parent = &header_chain[i - 1]; + + let h = Header { + parent_hash: parent.hash(), + number: parent.number() + 1, + state_root: H256::default(), + extrinsics_root: H256::default(), + digest: Digest::default(), + }; + + header_chain.push(h); + } + + // We want our proofs to go from newest to older headers + header_chain.reverse(); + // We don't actually need the oldest header in the proof + header_chain.pop(); + header_chain + } + + #[test] + fn check_that_child_is_ancestor_of_grandparent() { + let ancestor = Header { + parent_hash: H256::default(), + number: 1, + state_root: H256::default(), + extrinsics_root: H256::default(), + digest: Digest::default(), + }; + + // A valid proof doesn't include the child header, so remove it + let mut proof = build_header_chain(ancestor.clone(), 10); + let child = proof.remove(0); + + assert_ok!(verify_ancestry(proof, ancestor.hash(), &child)); + } + + #[test] + fn fake_ancestor_is_not_found_in_child_ancestry() { + let ancestor = Header { + parent_hash: H256::default(), + number: 1, + state_root: H256::default(), + extrinsics_root: H256::default(), + digest: Digest::default(), + }; + + // A valid proof doesn't include the child header, so remove it + let mut proof = build_header_chain(ancestor, 10); + let child = proof.remove(0); + + let fake_ancestor = Header { + parent_hash: H256::from_slice(&[1u8; 32]), + number: 42, + state_root: H256::default(), + extrinsics_root: H256::default(), + digest: Digest::default(), + }; + + assert_err!( + verify_ancestry(proof, fake_ancestor.hash(), &child), + Error::InvalidAncestryProof + ); + } + + #[test] + fn checker_fails_if_given_an_unrelated_header() { + let ancestor = Header { + parent_hash: H256::default(), + number: 1, + state_root: H256::default(), + extrinsics_root: H256::default(), + digest: Digest::default(), + }; + + // A valid proof doesn't include the child header, so remove it + let mut invalid_proof = build_header_chain(ancestor.clone(), 10); + let child = invalid_proof.remove(0); + + let fake_ancestor = Header { + parent_hash: H256::from_slice(&[1u8; 32]), + number: 42, + state_root: H256::default(), + extrinsics_root: H256::default(), + digest: Digest::default(), + }; + + invalid_proof.insert(5, fake_ancestor); + + assert_err!( + verify_ancestry(invalid_proof, ancestor.hash(), &child), + Error::InvalidAncestryProof + ); + } + + // Currently stealing this from `core/finality-grandpa/src/test.rs` + fn create_grandpa_justification( + authority: Ed25519Keyring, + header: Header, + round: RoundNumber, + set_id: SetId, + ) -> GrandpaJustification { + let client = test_client::new(); + + let justification = { + let precommit = grandpa::Precommit { + target_hash: header.hash(), + target_number: *header.number(), + }; + + let msg = grandpa::Message::Precommit(precommit.clone()); + let encoded = justification::localized_payload(round, set_id, &msg); + let signature = authority.sign(&encoded[..]).into(); + + let precommit = grandpa::SignedPrecommit { + precommit, + signature, + id: authority.public().into(), + }; + + let commit = grandpa::Commit { + target_hash: header.hash(), + target_number: *header.number(), + precommits: vec![precommit], + }; + + GrandpaJustification::from_commit( + &client, + round, + commit, + ).unwrap() + }; + + justification + } + + #[test] + fn correctly_accepts_a_new_finalized_header() { + let signer = Ed25519Keyring::Alice; + let authorities = vec![(signer.public().into(), 1)]; + let (storage_root, validator_proof) = create_dummy_validator_proof(authorities.clone()); + + let ancestor = Header { + parent_hash: H256::default(), + number: 1, + state_root: storage_root, + extrinsics_root: H256::default(), + digest: Digest::default(), + }; + + // A valid proof doesn't include the child header, so remove it + let mut block_ancestry_proof = build_header_chain(ancestor.clone(), 5); + let child = block_ancestry_proof.remove(0); + + let round = 1; + let set_id = 0; + let justification = create_grandpa_justification(signer, child.clone(), round, set_id); + let encoded: Justification = justification.encode(); + + new_test_ext().execute_with(|| { + assert_eq!(MockBridge::num_bridges(), 0); + assert_ok!( + MockBridge::initialize_bridge( + Origin::signed(1), + ancestor.clone(), + authorities.clone(), + validator_proof, + )); + + // Check that the header we sent on initialization was stored + assert_eq!( + MockBridge::tracked_bridges(1), + Some(BridgeInfo { + last_finalized_block_header: ancestor.clone(), + current_validator_set: authorities.clone(), + }) + ); + + // Send over the new header + proofs + let bridge_id = 1; + assert_ok!(MockBridge::submit_finalized_headers( + Origin::signed(1), + bridge_id, + child.clone(), + block_ancestry_proof, + authorities.clone(), + set_id, + encoded, + )); + + // Check that the header was correctly updated + assert_eq!( + MockBridge::tracked_bridges(1), + Some(BridgeInfo { + last_finalized_block_header: child, + current_validator_set: authorities, + }) + ); + }); + } + + #[test] + fn rejects_header_if_proof_is_signed_by_wrong_authorities() { + let signer = Ed25519Keyring::Alice; + let bad_signer = Ed25519Keyring::Bob; + let authorities = vec![(signer.public().into(), 1)]; + + let (storage_root, validator_proof) = create_dummy_validator_proof(authorities.clone()); + + let ancestor = Header { + parent_hash: H256::default(), + number: 1, + state_root: storage_root, + extrinsics_root: H256::default(), + digest: Digest::default(), + }; + + // A valid proof doesn't include the child header, so remove it + let mut block_ancestry_proof = build_header_chain(ancestor.clone(), 5); + let child = block_ancestry_proof.remove(0); + + let round = 1; + let set_id = 0; + + // Create a justification with an authority that's *not* part of the authority set + let justification = create_grandpa_justification(bad_signer, child.clone(), round, set_id); + let encoded: Justification = justification.encode(); + + new_test_ext().execute_with(|| { + assert_eq!(MockBridge::num_bridges(), 0); + assert_ok!( + MockBridge::initialize_bridge( + Origin::signed(1), + ancestor.clone(), + authorities.clone(), + validator_proof, + )); + + // Check that the header we sent on initialization was stored + assert_eq!( + MockBridge::tracked_bridges(1), + Some(BridgeInfo { + last_finalized_block_header: ancestor.clone(), + current_validator_set: authorities.clone(), + }) + ); + + // Send over the new header + proofs + let bridge_id = 1; + assert_err!( + MockBridge::submit_finalized_headers( + Origin::signed(1), + bridge_id, + child.clone(), + block_ancestry_proof, + authorities.clone(), + set_id, + encoded, + ), + Error::InvalidFinalityProof.into() + ); + }); + } +} diff --git a/srml/bridge/src/storage_proof.rs b/srml/bridge/src/storage_proof.rs new file mode 100644 index 0000000000000..b9a9baa9c36de --- /dev/null +++ b/srml/bridge/src/storage_proof.rs @@ -0,0 +1,107 @@ +// Copyright 2017-2019 Parity Technologies (UK) Ltd. +// This file is part of Substrate. + +// Substrate is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Substrate is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Substrate. If not, see . + +//! Logic for checking Substrate storage proofs. + +use hash_db::{Hasher, HashDB, EMPTY_PREFIX}; +use trie::{MemoryDB, Trie, trie_types::TrieDB}; + +use crate::Error; + +pub(crate) type StorageProof = Vec>; + +/// This struct is used to read storage values from a subset of a Merklized database. The "proof" +/// is a subset of the nodes in the Merkle structure of the database, so that it provides +/// authentication against a known Merkle root as well as the values in the database themselves. +pub struct StorageProofChecker + where H: Hasher +{ + root: H::Out, + db: MemoryDB, +} + +impl StorageProofChecker + where H: Hasher +{ + /// Constructs a new storage proof checker. + /// + /// This returns an error if the given proof is invalid with respect to the given root. + pub fn new(root: H::Out, proof: StorageProof) -> Result { + let mut db = MemoryDB::default(); + for item in proof { + db.insert(EMPTY_PREFIX, &item); + } + let checker = StorageProofChecker { + root, + db, + }; + // Return error if trie would be invalid. + let _ = checker.trie()?; + Ok(checker) + } + + /// Reads a value from the available subset of storage. If the value cannot be read due to an + /// incomplete or otherwise invalid proof, this returns an error. + pub fn read_value(&self, key: &[u8]) -> Result>, Error> { + self.trie()? + .get(key) + .map(|value| value.map(|value| value.into_vec())) + .map_err(|_| Error::StorageValueUnavailable) + } + + fn trie(&self) -> Result, Error> { + TrieDB::new(&self.db, &self.root) + .map_err(|_| Error::StorageRootMismatch) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + use primitives::{Blake2Hasher, H256}; + use state_machine::{prove_read, backend::{Backend, InMemory}}; + + #[test] + fn storage_proof_check() { + // construct storage proof + let backend = >::from(vec![ + (None, b"key1".to_vec(), Some(b"value1".to_vec())), + (None, b"key2".to_vec(), Some(b"value2".to_vec())), + (None, b"key3".to_vec(), Some(b"value3".to_vec())), + // Value is too big to fit in a branch node + (None, b"key11".to_vec(), Some(vec![0u8; 32])), + ]); + let root = backend.storage_root(std::iter::empty()).0; + let proof: StorageProof = prove_read(backend, &[&b"key1"[..], &b"key2"[..], &b"key22"[..]]) + .unwrap() + .iter_nodes() + .collect(); + + // check proof in runtime + let checker = >::new(root, proof.clone()).unwrap(); + assert_eq!(checker.read_value(b"key1"), Ok(Some(b"value1".to_vec()))); + assert_eq!(checker.read_value(b"key2"), Ok(Some(b"value2".to_vec()))); + assert_eq!(checker.read_value(b"key11111"), Err(Error::StorageValueUnavailable)); + assert_eq!(checker.read_value(b"key22"), Ok(None)); + + // checking proof against invalid commitment fails + assert_eq!( + >::new(H256::random(), proof).err(), + Some(Error::StorageRootMismatch) + ); + } +}