From 598d74bc067ab4532ee008e339f48278d3bf568b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Drwi=C4=99ga?= Date: Wed, 15 Sep 2021 10:07:06 +0200 Subject: [PATCH 01/33] Emit log on Runtime Code change. (#9580) * Emit digest item on Runtime Code changes. * Add tests. * cargo +nightly fmt --all * Rename. * Add comment. * Move generic parameter to the trait. * cargo +nightly fmt --all * Elaborate in doc. * Revert to RuntimeUpdated name * cargo +nightly fmt --all * Rename to RuntimeEnvironmentUpdated --- frame/system/src/lib.rs | 37 +++++++++++++++++------- frame/system/src/tests.rs | 22 ++++++++++++++ primitives/runtime/src/generic/digest.rs | 16 ++++++++++ 3 files changed, 65 insertions(+), 10 deletions(-) diff --git a/frame/system/src/lib.rs b/frame/system/src/lib.rs index 7b6ec9856d9f4..3d89e09a25a7e 100644 --- a/frame/system/src/lib.rs +++ b/frame/system/src/lib.rs @@ -138,14 +138,14 @@ pub type ConsumedWeight = PerDispatchClass; pub use pallet::*; /// Do something when we should be setting the code. -pub trait SetCode { +pub trait SetCode { /// Set the code to the given blob. fn set_code(code: Vec) -> DispatchResult; } -impl SetCode for () { +impl SetCode for () { fn set_code(code: Vec) -> DispatchResult { - storage::unhashed::put_raw(well_known_keys::CODE, &code); + >::update_code_in_storage(&code)?; Ok(()) } } @@ -296,9 +296,13 @@ pub mod pallet { #[pallet::constant] type SS58Prefix: Get; - /// What to do if the user wants the code set to something. Just use `()` unless you are in - /// cumulus. - type OnSetCode: SetCode; + /// What to do if the runtime wants to change the code to something new. + /// + /// The default (`()`) implementation is responsible for setting the correct storage + /// entry and emitting corresponding event and log item. (see [`update_code_in_storage`]). + /// It's unlikely that this needs to be customized, unless you are writing a parachain using + /// `Cumulus`, where the actual code change is deferred. + type OnSetCode: SetCode; } #[pallet::pallet] @@ -350,11 +354,13 @@ pub mod pallet { /// - 1 storage write. /// - Base Weight: 1.405 µs /// - 1 write to HEAP_PAGES + /// - 1 digest item /// # #[pallet::weight((T::SystemWeightInfo::set_heap_pages(), DispatchClass::Operational))] pub fn set_heap_pages(origin: OriginFor, pages: u64) -> DispatchResultWithPostInfo { ensure_root(origin)?; storage::unhashed::put_raw(well_known_keys::HEAP_PAGES, &pages.encode()); + Self::deposit_log(generic::DigestItem::RuntimeEnvironmentUpdated); Ok(().into()) } @@ -362,9 +368,10 @@ pub mod pallet { /// /// # /// - `O(C + S)` where `C` length of `code` and `S` complexity of `can_set_code` - /// - 1 storage write (codec `O(C)`). /// - 1 call to `can_set_code`: `O(S)` (calls `sp_io::misc::runtime_version` which is /// expensive). + /// - 1 storage write (codec `O(C)`). + /// - 1 digest item. /// - 1 event. /// The weight of this function is dependent on the runtime, but generally this is very /// expensive. We will treat this as a full block. @@ -373,9 +380,7 @@ pub mod pallet { pub fn set_code(origin: OriginFor, code: Vec) -> DispatchResultWithPostInfo { ensure_root(origin)?; Self::can_set_code(&code)?; - T::OnSetCode::set_code(code)?; - Self::deposit_event(Event::CodeUpdated); Ok(().into()) } @@ -384,6 +389,7 @@ pub mod pallet { /// # /// - `O(C)` where `C` length of `code` /// - 1 storage write (codec `O(C)`). + /// - 1 digest item. /// - 1 event. /// The weight of this function is dependent on the runtime. We will treat this as a full /// block. # @@ -394,7 +400,6 @@ pub mod pallet { ) -> DispatchResultWithPostInfo { ensure_root(origin)?; T::OnSetCode::set_code(code)?; - Self::deposit_event(Event::CodeUpdated); Ok(().into()) } @@ -1071,6 +1076,18 @@ impl Pallet { Account::::contains_key(who) } + /// Write code to the storage and emit related events and digest items. + /// + /// Note this function almost never should be used directly. It is exposed + /// for `OnSetCode` implementations that defer actual code being written to + /// the storage (for instance in case of parachains). + pub fn update_code_in_storage(code: &[u8]) -> DispatchResult { + storage::unhashed::put_raw(well_known_keys::CODE, code); + Self::deposit_log(generic::DigestItem::RuntimeEnvironmentUpdated); + Self::deposit_event(Event::CodeUpdated); + Ok(()) + } + /// Increment the reference counter on an account. #[deprecated = "Use `inc_consumers` instead"] pub fn inc_ref(who: &T::AccountId) { diff --git a/frame/system/src/tests.rs b/frame/system/src/tests.rs index f0a6a96ccc1e3..a4dd3403f2c3a 100644 --- a/frame/system/src/tests.rs +++ b/frame/system/src/tests.rs @@ -390,11 +390,24 @@ fn set_code_checks_works() { ext.execute_with(|| { let res = System::set_code(RawOrigin::Root.into(), vec![1, 2, 3, 4]); + assert_runtime_updated_digest(if res.is_ok() { 1 } else { 0 }); assert_eq!(expected.map_err(DispatchErrorWithPostInfo::from), res); }); } } +fn assert_runtime_updated_digest(num: usize) { + assert_eq!( + System::digest() + .logs + .into_iter() + .filter(|item| *item == generic::DigestItem::RuntimeEnvironmentUpdated) + .count(), + num, + "Incorrect number of Runtime Updated digest items", + ); +} + #[test] fn set_code_with_real_wasm_blob() { let executor = substrate_test_runtime_client::new_native_executor(); @@ -478,3 +491,12 @@ fn extrinsics_root_is_calculated_correctly() { assert_eq!(ext_root, *header.extrinsics_root()); }); } + +#[test] +fn runtime_updated_digest_emitted_when_heap_pages_changed() { + new_test_ext().execute_with(|| { + System::initialize(&1, &[0u8; 32].into(), &Default::default(), InitKind::Full); + System::set_heap_pages(RawOrigin::Root.into(), 5).unwrap(); + assert_runtime_updated_digest(1); + }); +} diff --git a/primitives/runtime/src/generic/digest.rs b/primitives/runtime/src/generic/digest.rs index 390acb87f690d..99d27ad5826c4 100644 --- a/primitives/runtime/src/generic/digest.rs +++ b/primitives/runtime/src/generic/digest.rs @@ -118,6 +118,14 @@ pub enum DigestItem { /// Some other thing. Unsupported and experimental. Other(Vec), + + /// An indication for the light clients that the runtime execution + /// environment is updated. + /// + /// Currently this is triggered when: + /// 1. Runtime code blob is changed or + /// 2. `heap_pages` value is changed. + RuntimeEnvironmentUpdated, } /// Available changes trie signals. @@ -184,6 +192,8 @@ pub enum DigestItemRef<'a, Hash: 'a> { ChangesTrieSignal(&'a ChangesTrieSignal), /// Any 'non-system' digest item, opaque to the native code. Other(&'a Vec), + /// Runtime code or heap pages updated. + RuntimeEnvironmentUpdated, } /// Type of the digest item. Used to gain explicit control over `DigestItem` encoding @@ -199,6 +209,7 @@ pub enum DigestItemType { Seal = 5, PreRuntime = 6, ChangesTrieSignal = 7, + RuntimeEnvironmentUpdated = 8, } /// Type of a digest item that contains raw data; this also names the consensus engine ID where @@ -225,6 +236,7 @@ impl DigestItem { Self::Seal(ref v, ref s) => DigestItemRef::Seal(v, s), Self::ChangesTrieSignal(ref s) => DigestItemRef::ChangesTrieSignal(s), Self::Other(ref v) => DigestItemRef::Other(v), + Self::RuntimeEnvironmentUpdated => DigestItemRef::RuntimeEnvironmentUpdated, } } @@ -322,6 +334,7 @@ impl Decode for DigestItem { DigestItemType::ChangesTrieSignal => Ok(Self::ChangesTrieSignal(Decode::decode(input)?)), DigestItemType::Other => Ok(Self::Other(Decode::decode(input)?)), + DigestItemType::RuntimeEnvironmentUpdated => Ok(Self::RuntimeEnvironmentUpdated), } } } @@ -457,6 +470,9 @@ impl<'a, Hash: Encode> Encode for DigestItemRef<'a, Hash> { DigestItemType::Other.encode_to(&mut v); val.encode_to(&mut v); }, + Self::RuntimeEnvironmentUpdated => { + DigestItemType::RuntimeEnvironmentUpdated.encode_to(&mut v); + }, } v From e1ddbb3ffec3c8f05de6cb05d9f5f7a922788c3f Mon Sep 17 00:00:00 2001 From: Qinxuan Chen Date: Wed, 15 Sep 2021 17:24:13 +0800 Subject: [PATCH 02/33] pallet-utility: use new pallet attribute macro for tests (#9780) Signed-off-by: koushiro --- frame/utility/src/tests.rs | 61 ++++++++++++++++++++++---------------- 1 file changed, 35 insertions(+), 26 deletions(-) diff --git a/frame/utility/src/tests.rs b/frame/utility/src/tests.rs index 2731b6ca0b8be..0a780550f3556 100644 --- a/frame/utility/src/tests.rs +++ b/frame/utility/src/tests.rs @@ -23,7 +23,7 @@ use super::*; use crate as utility; use frame_support::{ - assert_err_ignore_postinfo, assert_noop, assert_ok, decl_module, + assert_err_ignore_postinfo, assert_noop, assert_ok, dispatch::{DispatchError, DispatchErrorWithPostInfo, Dispatchable}, parameter_types, storage, traits::Contains, @@ -36,39 +36,48 @@ use sp_runtime::{ }; // example module to test behaviors. +#[frame_support::pallet] pub mod example { use super::*; - use frame_support::dispatch::{DispatchResultWithPostInfo, WithPostDispatchInfo}; - use frame_system::ensure_signed; + use frame_support::{dispatch::WithPostDispatchInfo, pallet_prelude::*}; + use frame_system::pallet_prelude::*; + + #[pallet::pallet] + pub struct Pallet(_); + + #[pallet::config] pub trait Config: frame_system::Config {} - decl_module! { - pub struct Module for enum Call where origin: ::Origin { - #[weight = *_weight] - fn noop(_origin, _weight: Weight) { } - - #[weight = *_start_weight] - fn foobar( - origin, - err: bool, - _start_weight: Weight, - end_weight: Option, - ) -> DispatchResultWithPostInfo { - let _ = ensure_signed(origin)?; - if err { - let error: DispatchError = "The cake is a lie.".into(); - if let Some(weight) = end_weight { - Err(error.with_weight(weight)) - } else { - Err(error)? - } + #[pallet::call] + impl Pallet { + #[pallet::weight(*_weight)] + pub fn noop(_origin: OriginFor, _weight: Weight) -> DispatchResult { + Ok(()) + } + + #[pallet::weight(*_start_weight)] + pub fn foobar( + origin: OriginFor, + err: bool, + _start_weight: Weight, + end_weight: Option, + ) -> DispatchResultWithPostInfo { + let _ = ensure_signed(origin)?; + if err { + let error: DispatchError = "The cake is a lie.".into(); + if let Some(weight) = end_weight { + Err(error.with_weight(weight)) } else { - Ok(end_weight.into()) + Err(error)? } + } else { + Ok(end_weight.into()) } + } - #[weight = 0] - fn big_variant(_origin, _arg: [u8; 400]) {} + #[pallet::weight(0)] + pub fn big_variant(_origin: OriginFor, _arg: [u8; 400]) -> DispatchResult { + Ok(()) } } } From eb4de697ccd45948254db422abbcd735ae3c295a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 15 Sep 2021 10:20:07 +0000 Subject: [PATCH 03/33] Bump serde_json from 1.0.64 to 1.0.68 (#9783) Bumps [serde_json](https://github.com/serde-rs/json) from 1.0.64 to 1.0.68. - [Release notes](https://github.com/serde-rs/json/releases) - [Commits](https://github.com/serde-rs/json/compare/v1.0.64...v1.0.68) --- updated-dependencies: - dependency-name: serde_json dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- bin/node/bench/Cargo.toml | 2 +- client/chain-spec/Cargo.toml | 2 +- client/cli/Cargo.toml | 2 +- client/consensus/babe/rpc/Cargo.toml | 2 +- client/finality-grandpa/Cargo.toml | 2 +- client/keystore/Cargo.toml | 2 +- client/network/Cargo.toml | 2 +- client/peerset/Cargo.toml | 2 +- client/rpc-api/Cargo.toml | 2 +- client/rpc-servers/Cargo.toml | 2 +- client/rpc/Cargo.toml | 2 +- client/service/Cargo.toml | 2 +- client/sync-state-rpc/Cargo.toml | 2 +- client/telemetry/Cargo.toml | 2 +- frame/merkle-mountain-range/rpc/Cargo.toml | 2 +- frame/transaction-payment/Cargo.toml | 2 +- primitives/rpc/Cargo.toml | 2 +- primitives/runtime/Cargo.toml | 2 +- primitives/serializer/Cargo.toml | 2 +- primitives/tracing/Cargo.toml | 2 +- test-utils/client/Cargo.toml | 2 +- 22 files changed, 23 insertions(+), 23 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f1f3b7e00c65a..ff21ec8dd0109 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8490,9 +8490,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.64" +version = "1.0.68" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "799e97dc9fdae36a5c8b8f2cae9ce2ee9fdce2058c57a93e6099d919fd982f79" +checksum = "0f690853975602e1bfe1ccbf50504d67174e3bcf340f23b5ea9992e0587a52d8" dependencies = [ "itoa", "ryu", diff --git a/bin/node/bench/Cargo.toml b/bin/node/bench/Cargo.toml index dee927cf944ba..b19a71966fb87 100644 --- a/bin/node/bench/Cargo.toml +++ b/bin/node/bench/Cargo.toml @@ -17,7 +17,7 @@ sc-client-api = { version = "4.0.0-dev", path = "../../../client/api/" } sp-runtime = { version = "4.0.0-dev", path = "../../../primitives/runtime" } sp-state-machine = { version = "0.10.0-dev", path = "../../../primitives/state-machine" } serde = "1.0.126" -serde_json = "1.0.41" +serde_json = "1.0.68" structopt = "0.3" derive_more = "0.99.2" kvdb = "0.10.0" diff --git a/client/chain-spec/Cargo.toml b/client/chain-spec/Cargo.toml index 3243430989c73..8af2996e968d8 100644 --- a/client/chain-spec/Cargo.toml +++ b/client/chain-spec/Cargo.toml @@ -18,7 +18,7 @@ impl-trait-for-tuples = "0.2.1" sc-network = { version = "0.10.0-dev", path = "../network" } sp-core = { version = "4.0.0-dev", path = "../../primitives/core" } serde = { version = "1.0.126", features = ["derive"] } -serde_json = "1.0.41" +serde_json = "1.0.68" sp-runtime = { version = "4.0.0-dev", path = "../../primitives/runtime" } sc-telemetry = { version = "4.0.0-dev", path = "../telemetry" } codec = { package = "parity-scale-codec", version = "2.0.0" } diff --git a/client/cli/Cargo.toml b/client/cli/Cargo.toml index 97058110ad920..e7a0330e76e0c 100644 --- a/client/cli/Cargo.toml +++ b/client/cli/Cargo.toml @@ -23,7 +23,7 @@ parity-scale-codec = "2.0.0" hex = "0.4.2" rand = "0.7.3" tiny-bip39 = "0.8.0" -serde_json = "1.0.41" +serde_json = "1.0.68" sc-keystore = { version = "4.0.0-dev", path = "../keystore" } sp-panic-handler = { version = "3.0.0", path = "../../primitives/panic-handler" } sc-client-api = { version = "4.0.0-dev", path = "../api" } diff --git a/client/consensus/babe/rpc/Cargo.toml b/client/consensus/babe/rpc/Cargo.toml index 0f6c411a730e4..8d5625705a48c 100644 --- a/client/consensus/babe/rpc/Cargo.toml +++ b/client/consensus/babe/rpc/Cargo.toml @@ -33,7 +33,7 @@ sp-keystore = { version = "0.10.0-dev", path = "../../../../primitives/keystore" [dev-dependencies] sc-consensus = { version = "0.10.0-dev", path = "../../../consensus/common" } -serde_json = "1.0.50" +serde_json = "1.0.68" sp-keyring = { version = "4.0.0-dev", path = "../../../../primitives/keyring" } sc-keystore = { version = "4.0.0-dev", path = "../../../keystore" } substrate-test-runtime-client = { version = "2.0.0", path = "../../../../test-utils/runtime/client" } diff --git a/client/finality-grandpa/Cargo.toml b/client/finality-grandpa/Cargo.toml index 40385a2faea57..ffdfc80c4eb9b 100644 --- a/client/finality-grandpa/Cargo.toml +++ b/client/finality-grandpa/Cargo.toml @@ -35,7 +35,7 @@ sp-keystore = { version = "0.10.0-dev", path = "../../primitives/keystore" } sp-api = { version = "4.0.0-dev", path = "../../primitives/api" } sc-telemetry = { version = "4.0.0-dev", path = "../telemetry" } sc-keystore = { version = "4.0.0-dev", path = "../keystore" } -serde_json = "1.0.41" +serde_json = "1.0.68" sc-client-api = { version = "4.0.0-dev", path = "../api" } sp-blockchain = { version = "4.0.0-dev", path = "../../primitives/blockchain" } sc-network = { version = "0.10.0-dev", path = "../network" } diff --git a/client/keystore/Cargo.toml b/client/keystore/Cargo.toml index 11d6f5cb1e241..17c651a91decd 100644 --- a/client/keystore/Cargo.toml +++ b/client/keystore/Cargo.toml @@ -22,7 +22,7 @@ sp-core = { version = "4.0.0-dev", path = "../../primitives/core" } sp-keystore = { version = "0.10.0-dev", path = "../../primitives/keystore" } hex = "0.4.0" parking_lot = "0.11.1" -serde_json = "1.0.41" +serde_json = "1.0.68" [dev-dependencies] tempfile = "3.1.0" diff --git a/client/network/Cargo.toml b/client/network/Cargo.toml index 283ac7c68f3e8..873c2a847a29a 100644 --- a/client/network/Cargo.toml +++ b/client/network/Cargo.toml @@ -47,7 +47,7 @@ sc-block-builder = { version = "0.10.0-dev", path = "../block-builder" } sc-client-api = { version = "4.0.0-dev", path = "../api" } sc-peerset = { version = "4.0.0-dev", path = "../peerset" } serde = { version = "1.0.126", features = ["derive"] } -serde_json = "1.0.41" +serde_json = "1.0.68" smallvec = "1.5.0" sp-arithmetic = { version = "4.0.0-dev", path = "../../primitives/arithmetic" } sp-blockchain = { version = "4.0.0-dev", path = "../../primitives/blockchain" } diff --git a/client/peerset/Cargo.toml b/client/peerset/Cargo.toml index 9e83ede675d02..5962620d6e06e 100644 --- a/client/peerset/Cargo.toml +++ b/client/peerset/Cargo.toml @@ -19,7 +19,7 @@ futures = "0.3.9" libp2p = { version = "0.39.1", default-features = false } sc-utils = { version = "4.0.0-dev", path = "../utils"} log = "0.4.8" -serde_json = "1.0.41" +serde_json = "1.0.68" wasm-timer = "0.2" [dev-dependencies] diff --git a/client/rpc-api/Cargo.toml b/client/rpc-api/Cargo.toml index 86fd24c24e7fa..6342abb1a3c41 100644 --- a/client/rpc-api/Cargo.toml +++ b/client/rpc-api/Cargo.toml @@ -28,7 +28,7 @@ sp-version = { version = "4.0.0-dev", path = "../../primitives/version" } sp-runtime = { path = "../../primitives/runtime", version = "4.0.0-dev" } sc-chain-spec = { path = "../chain-spec", version = "4.0.0-dev" } serde = { version = "1.0.126", features = ["derive"] } -serde_json = "1.0.41" +serde_json = "1.0.68" sc-transaction-pool-api = { version = "4.0.0-dev", path = "../transaction-pool/api" } sp-rpc = { version = "4.0.0-dev", path = "../../primitives/rpc" } sp-tracing = { version = "4.0.0-dev", path = "../../primitives/tracing" } diff --git a/client/rpc-servers/Cargo.toml b/client/rpc-servers/Cargo.toml index e249bb1ed8ae8..26a05a8263dc4 100644 --- a/client/rpc-servers/Cargo.toml +++ b/client/rpc-servers/Cargo.toml @@ -18,7 +18,7 @@ jsonrpc-core = "18.0.0" pubsub = { package = "jsonrpc-pubsub", version = "18.0.0" } log = "0.4.8" prometheus-endpoint = { package = "substrate-prometheus-endpoint", path = "../../utils/prometheus", version = "0.9.0"} -serde_json = "1.0.41" +serde_json = "1.0.68" tokio = "1.10" http = { package = "jsonrpc-http-server", version = "18.0.0" } ipc = { package = "jsonrpc-ipc-server", version = "18.0.0" } diff --git a/client/rpc/Cargo.toml b/client/rpc/Cargo.toml index 9957bc999a8bb..427800f74ddf2 100644 --- a/client/rpc/Cargo.toml +++ b/client/rpc/Cargo.toml @@ -23,7 +23,7 @@ log = "0.4.8" sp-core = { version = "4.0.0-dev", path = "../../primitives/core" } rpc = { package = "jsonrpc-core", version = "18.0.0" } sp-version = { version = "4.0.0-dev", path = "../../primitives/version" } -serde_json = "1.0.41" +serde_json = "1.0.68" sp-session = { version = "4.0.0-dev", path = "../../primitives/session" } sp-offchain = { version = "4.0.0-dev", path = "../../primitives/offchain" } sp-runtime = { version = "4.0.0-dev", path = "../../primitives/runtime" } diff --git a/client/service/Cargo.toml b/client/service/Cargo.toml index ca81ede9a6a90..5120cc8f4dfaa 100644 --- a/client/service/Cargo.toml +++ b/client/service/Cargo.toml @@ -34,7 +34,7 @@ exit-future = "0.2.0" pin-project = "1.0.4" hash-db = "0.15.2" serde = "1.0.126" -serde_json = "1.0.41" +serde_json = "1.0.68" sc-keystore = { version = "4.0.0-dev", path = "../keystore" } sp-runtime = { version = "4.0.0-dev", path = "../../primitives/runtime" } sp-trie = { version = "4.0.0-dev", path = "../../primitives/trie" } diff --git a/client/sync-state-rpc/Cargo.toml b/client/sync-state-rpc/Cargo.toml index 9da9944a54547..b81fd1fd5c611 100644 --- a/client/sync-state-rpc/Cargo.toml +++ b/client/sync-state-rpc/Cargo.toml @@ -23,7 +23,7 @@ sc-consensus-babe = { version = "0.10.0-dev", path = "../consensus/babe" } sc-consensus-epochs = { version = "0.10.0-dev", path = "../consensus/epochs" } sc-finality-grandpa = { version = "0.10.0-dev", path = "../finality-grandpa" } sc-rpc-api = { version = "0.10.0-dev", path = "../rpc-api" } -serde_json = "1.0.58" +serde_json = "1.0.68" serde = { version = "1.0.126", features = ["derive"] } sp-blockchain = { version = "4.0.0-dev", path = "../../primitives/blockchain" } sp-runtime = { version = "4.0.0-dev", path = "../../primitives/runtime" } diff --git a/client/telemetry/Cargo.toml b/client/telemetry/Cargo.toml index 4dafeb205544b..f115017f09701 100644 --- a/client/telemetry/Cargo.toml +++ b/client/telemetry/Cargo.toml @@ -23,6 +23,6 @@ log = "0.4.8" pin-project = "1.0.4" rand = "0.7.2" serde = { version = "1.0.126", features = ["derive"] } -serde_json = "1.0.41" +serde_json = "1.0.68" chrono = "0.4.19" thiserror = "1.0.21" diff --git a/frame/merkle-mountain-range/rpc/Cargo.toml b/frame/merkle-mountain-range/rpc/Cargo.toml index 9182afbb1f5f5..5a0f114e50173 100644 --- a/frame/merkle-mountain-range/rpc/Cargo.toml +++ b/frame/merkle-mountain-range/rpc/Cargo.toml @@ -27,4 +27,4 @@ sp-runtime = { version = "4.0.0-dev", path = "../../../primitives/runtime" } pallet-mmr-primitives = { version = "4.0.0-dev", path = "../primitives" } [dev-dependencies] -serde_json = "1.0.41" +serde_json = "1.0.68" diff --git a/frame/transaction-payment/Cargo.toml b/frame/transaction-payment/Cargo.toml index 12b7622c1b18f..0bcc422bb8476 100644 --- a/frame/transaction-payment/Cargo.toml +++ b/frame/transaction-payment/Cargo.toml @@ -28,7 +28,7 @@ frame-support = { version = "4.0.0-dev", default-features = false, path = "../su frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } [dev-dependencies] -serde_json = "1.0.41" +serde_json = "1.0.68" pallet-balances = { version = "4.0.0-dev", path = "../balances" } [features] diff --git a/primitives/rpc/Cargo.toml b/primitives/rpc/Cargo.toml index 73c42555f1f1e..8e1b91a9acb21 100644 --- a/primitives/rpc/Cargo.toml +++ b/primitives/rpc/Cargo.toml @@ -18,4 +18,4 @@ sp-core = { version = "4.0.0-dev", path = "../core" } rustc-hash = "1.1.0" [dev-dependencies] -serde_json = "1.0.41" +serde_json = "1.0.68" diff --git a/primitives/runtime/Cargo.toml b/primitives/runtime/Cargo.toml index ad4b0477184e8..017c6a75efd99 100644 --- a/primitives/runtime/Cargo.toml +++ b/primitives/runtime/Cargo.toml @@ -31,7 +31,7 @@ hash256-std-hasher = { version = "0.15.2", default-features = false } either = { version = "1.5", default-features = false } [dev-dependencies] -serde_json = "1.0.41" +serde_json = "1.0.68" rand = "0.7.2" sp-state-machine = { version = "0.10.0-dev", path = "../state-machine" } sp-api = { version = "4.0.0-dev", path = "../api" } diff --git a/primitives/serializer/Cargo.toml b/primitives/serializer/Cargo.toml index 8f03d8f972934..2200274e0628d 100644 --- a/primitives/serializer/Cargo.toml +++ b/primitives/serializer/Cargo.toml @@ -15,4 +15,4 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = "1.0.126" -serde_json = "1.0.41" +serde_json = "1.0.68" diff --git a/primitives/tracing/Cargo.toml b/primitives/tracing/Cargo.toml index aba918c9c5532..3be09dcd576df 100644 --- a/primitives/tracing/Cargo.toml +++ b/primitives/tracing/Cargo.toml @@ -31,7 +31,7 @@ tracing-subscriber = { version = "0.2.19", optional = true, features = [ parking_lot = { version = "0.10.0", optional = true } erased-serde = { version = "0.3.9", optional = true } serde = { version = "1.0.126", optional = true } -serde_json = { version = "1.0.41", optional = true } +serde_json = { version = "1.0.68", optional = true } slog = { version = "2.5.2", features = ["nested-values"], optional = true } [features] diff --git a/test-utils/client/Cargo.toml b/test-utils/client/Cargo.toml index a6f152edafaa2..34238872cad84 100644 --- a/test-utils/client/Cargo.toml +++ b/test-utils/client/Cargo.toml @@ -16,7 +16,7 @@ codec = { package = "parity-scale-codec", version = "2.0.0" } futures = "0.3.16" hex = "0.4" serde = "1.0.126" -serde_json = "1.0.55" +serde_json = "1.0.68" sc-client-api = { version = "4.0.0-dev", path = "../../client/api" } sc-client-db = { version = "0.10.0-dev", features = [ "test-helpers", From ba153b9ae050eda022f002d74d76f98d1e339a81 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 15 Sep 2021 12:40:41 +0100 Subject: [PATCH 04/33] Enrich metadata with type information (#8615) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Cargo.lock after merge * Restore scale-info feature * Fully qualify TypeInfo derive * Skip PendingSwap T * Add missing skip_type_params attr * metadata docs features * Reduce pallet event attribute to struct * Cargo.lock * Update frame/balances/src/tests_composite.rs Co-authored-by: Guillaume Thiolliere * Line widths check * Cargo.lock * Add scale-info/std * Update frame/system/src/lib.rs Co-authored-by: Guillaume Thiolliere * Use `skip_type_params` to remove `TypeInfo` requirements on checks * Revert "Remove unused Call metadata stuff" This reverts commit 41311f85 * Skip BalanceSwapAction type parameter * Remove unused event metadata macro * Update frame-metadata * Update primitives/npos-elections/compact/src/codec.rs Co-authored-by: Guillaume Thiolliere * Manual TypeInfo for Header * Remove TypeInfo requirement for consts in BoundedVec etc. * Another TypeInfo bound removed * review: fix indentation * TypeInfo impls for Identity types * Add some todos to add custom TypeInfo impls * Update frame/support/procedural/src/pallet/expand/pallet_struct.rs Co-authored-by: Guillaume Thiolliere * Add some todos to add custom TypeInfo impls * Add a test for manual Data TypeInfo impl * Add custom TypeInfo impl for Vote * Era custom TypeInfo crimes * Revert finality-grandpa version to 0.14.z * review: renamed module to pallet_constants_metadata * New line at end of file * Add missing scale-info/std * Update frame/support/src/storage/types/mod.rs Co-authored-by: Guillaume Thiolliere * Remove StorageEntryType::Map unused flag * Add missing scale-info dependency after merge * SignedExtension::AdditionalSigned metadata * Update frame-metadata, use abbreviated docs and args fields * Update frame/example/Cargo.toml Co-authored-by: Keith Yeung * Add scale_info/std and remove unused scale-info dependency * Remove scale-info dependency * Remove treasury pallet::metadata * Remove redundant Event test * Add back scale-info as dev dependency * fix error metadata when no error defined in decl_module * Add Module3 to tests * Fix metadata test * Add docs feature to frame-support test * WIP fixing pallet metadata test * Remove redundant FunctionMetadata, FunctionArgumentMetadata as per https://github.com/paritytech/frame-metadata/pull/20 * Use main branch of frame-metadata * Use patch of scale-info for latest changes * Use latest patched scale-info * Manual TypeInfo for DigestItem * Manual TypeInfo for DigestItem * Update scale-info * Skip __Ignore variants for Error, depends on https://github.com/paritytech/scale-info/pull/117 * Named fields for FRAME v2 pallet Call variants * Named fields for FRAME v1 pallet Call variants * Add missing scale-info dependency * WIP expand benchmark call variant * fix benchmark with new function create a new function for each variant of a pallet call. This function is called by benchmarking macro in order not to break call creation with unnamed argument * fix tests * more fix * Fix staking tests * Fix offchain workers calls * Cherry pick rustfmt.toml from master * cargo +nightly-2021-06-22 fmt --all * Update to new call variant structs * More call variant struct updates * Remove unused import * More call variant structs * More call variant structs * Even more call variant structs * Mooar variant structs * Evermore variant structs * Call variant structs ad infinitum * Fmt * More call variants * Last call variant * Call variants all done? * Fix SS58Prefix type * Potential workaround for BitFlags TypeInfo * Enable docs capturing for Call, Event, and Error types * Fix IdentityFields TypeInfo * Remove metadata-docs feature * Add capture_docs = true for legacy Call, Event and Error types * Fmt * Fix metadata test type * Update benchmarks with call struct variants * Fmt * More test fixes * Fmt * Fix benches * Use latest capture_docs attr * Latest scale_info * Fmt * review: change &Vec to &[] * Remove pallet metadata attr * review: remove commented out test code * review: skip_type_params trailing comma suggestion * Update to scale-info 0.10.0 * Update construct_runtime ui tests, different because of metadata TypeInfo impls * Add some TypeInfo derives for UI tests * Update storage ensure span ui stderrs * Update call argument bound ui tests Possibly changed because change from tuple to struct variants? * Add scale-info dev dependency * Update to latest finality-grandpa release * review: missing newline * review: missing scale-info/std * review: remove duplicate scale-info/std * review: remove fully qualified TypeInfo * review: add missing scale-info/std * review: remove unnecessary imports. * Fmt * Use crates.io RC version of frame-metadata * Remove scale-info/std because it is a dev dependency * Add missing scale_info dev-dependency for test * Delete empty metadata folder * Fix sp_std import * review: improve manual UncheckedExtrinsic TypeInfo impl * review: use full scale-info for dev-dependency * Remove DefaultByteGetter impl * review: derive TypeInfo for generic header * Fmt * Update primitives/runtime/src/generic/unchecked_extrinsic.rs Co-authored-by: Keith Yeung * Update primitives/runtime/src/generic/unchecked_extrinsic.rs Co-authored-by: Keith Yeung * Update bin/node/executor/Cargo.toml Co-authored-by: Bastian Köcher * Update frame/identity/src/types.rs Co-authored-by: Bastian Köcher * Update frame/support/src/dispatch.rs Co-authored-by: Bastian Köcher * Remove redundant derive * Simplify scale-info dependency * Strip underscore prefix from call variant struct names * Another underscore field * More underscore fields * Another underscore field * Update to frame-metadata 14.0.0-rc.2 with combined StorageEntryType::Map * Fmt * Revert weights formatting * Fix up some tests * Fix up some tests for StorageEntryTypeMetadata * scale-info dev dependency * Fix test error * Add missing TypeInfo derives * Add back missing scale-info dependency * Add back missing scale-info dependency * Fix npos compact impls * Cargo.lock * Fmt * Fix errors * Fmt * Fix renamed raw_solution field * Fix error * Fmt * Fix some benchmarks * Fmt * Stray R * Fix * Add missing TypeInfos * ui test fix * Fix line widths * Revert "ui test fix" This reverts commit 2d15ec058a216e3f92d713f1174603a2bb1eac65. * Upgrade to scale-info 0.11.0 * Revert "Upgrade to scale-info 0.11.0" This reverts commit 047bb179085a0059c36cd20ab405f55cf0867e28. * Add Runtime type * Update to scale-info 0.12 * Update to scale-info 1.0 * Update frame-metadata to version 14.0.0 * Patch finality-grandpa until release available * Fix metadata tests * Fix metadata tests * Fmt * Remove patched finality-grandpa * Fix tests, use scale_info imports * Fix pallet tests * Add BlockNumber TypeInfo bound * ui test fix * Cargo.lock * Remove pallet metadata * Cargo.lock * Add missing scale-info dependency * Remove pallet event metadata * Fix error * Fix collective errors * Semicolol * Fmt * Remove another metadata attribute * Add new variant to custom digest TypeInfo * Fmt * Cargo.lock from master * Remove comma lol * Fix example call error * Fix example call error properly Co-authored-by: Guillaume Thiolliere Co-authored-by: Keith Yeung Co-authored-by: Shawn Tabrizi Co-authored-by: Bastian Köcher --- Cargo.lock | 126 ++- Cargo.toml | 4 +- bin/node-template/pallets/template/Cargo.toml | 2 + bin/node-template/pallets/template/src/lib.rs | 1 - bin/node-template/runtime/Cargo.toml | 2 + bin/node-template/runtime/src/lib.rs | 2 +- bin/node/cli/src/service.rs | 3 +- bin/node/executor/Cargo.toml | 1 + bin/node/executor/benches/bench.rs | 7 +- bin/node/executor/tests/basic.rs | 72 +- bin/node/executor/tests/common.rs | 2 +- bin/node/executor/tests/fees.rs | 18 +- bin/node/executor/tests/submit_transaction.rs | 26 +- bin/node/primitives/Cargo.toml | 2 + bin/node/runtime/Cargo.toml | 2 + bin/node/runtime/src/lib.rs | 18 +- bin/node/test-runner-example/src/lib.rs | 2 +- bin/node/testing/src/bench.rs | 18 +- client/finality-grandpa-warp-sync/Cargo.toml | 2 +- client/finality-grandpa/Cargo.toml | 2 +- client/finality-grandpa/rpc/Cargo.toml | 2 +- frame/assets/Cargo.toml | 2 + frame/assets/src/benchmarking.rs | 34 +- frame/assets/src/lib.rs | 1 - frame/assets/src/types.rs | 11 +- frame/atomic-swap/Cargo.toml | 2 + frame/atomic-swap/src/lib.rs | 8 +- frame/aura/Cargo.toml | 2 + frame/authority-discovery/Cargo.toml | 2 + frame/authorship/Cargo.toml | 2 + frame/authorship/src/lib.rs | 16 +- frame/babe/Cargo.toml | 2 + frame/babe/src/equivocation.rs | 10 +- frame/babe/src/tests.rs | 16 +- frame/balances/Cargo.toml | 2 + frame/balances/src/lib.rs | 15 +- frame/balances/src/tests.rs | 2 +- frame/benchmarking/Cargo.toml | 2 + frame/benchmarking/src/lib.rs | 52 +- frame/bounties/Cargo.toml | 2 + frame/bounties/src/lib.rs | 5 +- frame/collective/Cargo.toml | 2 + frame/collective/src/benchmarking.rs | 28 +- frame/collective/src/lib.rs | 7 +- frame/collective/src/tests.rs | 25 +- frame/contracts/Cargo.toml | 2 + frame/contracts/common/Cargo.toml | 2 + frame/contracts/rpc/runtime-api/Cargo.toml | 2 + frame/contracts/src/exec.rs | 19 +- frame/contracts/src/lib.rs | 1 - frame/contracts/src/schedule.rs | 12 +- frame/contracts/src/storage.rs | 5 +- frame/contracts/src/tests.rs | 7 +- frame/contracts/src/wasm/mod.rs | 5 +- frame/democracy/Cargo.toml | 2 + frame/democracy/src/benchmarking.rs | 24 +- frame/democracy/src/conviction.rs | 3 +- frame/democracy/src/lib.rs | 14 +- frame/democracy/src/tests.rs | 5 +- frame/democracy/src/types.rs | 9 +- frame/democracy/src/vote.rs | 22 +- frame/democracy/src/vote_threshold.rs | 3 +- .../election-provider-multi-phase/Cargo.toml | 2 + .../src/benchmarking.rs | 5 +- .../election-provider-multi-phase/src/lib.rs | 35 +- .../src/signed.rs | 2 +- .../src/unsigned.rs | 39 +- frame/election-provider-support/Cargo.toml | 1 + frame/elections-phragmen/Cargo.toml | 2 + frame/elections-phragmen/src/lib.rs | 12 +- frame/elections/Cargo.toml | 2 + frame/elections/src/lib.rs | 5 +- frame/example-offchain-worker/Cargo.toml | 2 + frame/example-offchain-worker/src/lib.rs | 24 +- frame/example-offchain-worker/src/tests.rs | 19 +- frame/example-parallel/Cargo.toml | 2 + frame/example-parallel/src/lib.rs | 2 +- frame/example/Cargo.toml | 2 + frame/example/src/lib.rs | 16 +- frame/example/src/tests.rs | 6 +- frame/executive/Cargo.toml | 2 + frame/executive/src/lib.rs | 66 +- frame/gilt/Cargo.toml | 2 + frame/gilt/src/benchmarking.rs | 6 +- frame/gilt/src/lib.rs | 11 +- frame/grandpa/Cargo.toml | 2 + frame/grandpa/src/equivocation.rs | 10 +- frame/grandpa/src/lib.rs | 6 +- frame/grandpa/src/tests.rs | 16 +- frame/identity/Cargo.toml | 2 + frame/identity/src/lib.rs | 4 - frame/identity/src/types.rs | 170 +++- frame/im-online/Cargo.toml | 2 + frame/im-online/src/benchmarking.rs | 4 +- frame/im-online/src/lib.rs | 16 +- frame/im-online/src/tests.rs | 21 +- frame/indices/Cargo.toml | 2 + frame/indices/src/lib.rs | 1 - frame/lottery/Cargo.toml | 2 + frame/lottery/src/benchmarking.rs | 20 +- frame/lottery/src/lib.rs | 3 +- frame/lottery/src/tests.rs | 45 +- frame/membership/Cargo.toml | 2 + frame/membership/src/lib.rs | 3 - frame/merkle-mountain-range/Cargo.toml | 2 + frame/merkle-mountain-range/src/lib.rs | 3 +- frame/metadata/Cargo.toml | 28 - frame/metadata/README.md | 7 - frame/metadata/src/lib.rs | 466 ----------- frame/multisig/Cargo.toml | 2 + frame/multisig/src/benchmarking.rs | 7 +- frame/multisig/src/lib.rs | 10 +- frame/multisig/src/tests.rs | 52 +- frame/nicks/Cargo.toml | 2 + frame/nicks/src/lib.rs | 1 - frame/node-authorization/Cargo.toml | 2 + frame/node-authorization/src/lib.rs | 1 - frame/offences/Cargo.toml | 2 + frame/offences/benchmarking/Cargo.toml | 2 + frame/proxy/Cargo.toml | 2 + frame/proxy/src/benchmarking.rs | 10 +- frame/proxy/src/lib.rs | 28 +- frame/proxy/src/tests.rs | 64 +- frame/randomness-collective-flip/Cargo.toml | 2 + frame/recovery/Cargo.toml | 2 + frame/recovery/src/lib.rs | 6 +- frame/recovery/src/tests.rs | 8 +- frame/scheduler/Cargo.toml | 2 + frame/scheduler/src/benchmarking.rs | 6 +- frame/scheduler/src/lib.rs | 120 +-- frame/scored-pool/Cargo.toml | 2 + frame/scored-pool/src/lib.rs | 3 +- frame/session/Cargo.toml | 2 + frame/session/benchmarking/Cargo.toml | 1 + frame/society/Cargo.toml | 2 + frame/society/src/lib.rs | 13 +- frame/staking/Cargo.toml | 2 + frame/staking/src/lib.rs | 29 +- frame/staking/src/pallet/mod.rs | 1 - frame/staking/src/slashing.rs | 7 +- frame/staking/src/tests.rs | 15 +- frame/sudo/Cargo.toml | 2 + frame/sudo/src/lib.rs | 1 - frame/sudo/src/mock.rs | 1 - frame/sudo/src/tests.rs | 24 +- frame/support/Cargo.toml | 5 +- .../src/construct_runtime/expand/call.rs | 1 + .../src/construct_runtime/expand/event.rs | 1 + .../src/construct_runtime/expand/metadata.rs | 80 +- .../src/construct_runtime/expand/origin.rs | 5 +- .../procedural/src/construct_runtime/mod.rs | 5 +- frame/support/procedural/src/key_prefix.rs | 4 +- .../procedural/src/pallet/expand/call.rs | 115 ++- .../procedural/src/pallet/expand/config.rs | 3 +- .../procedural/src/pallet/expand/constants.rs | 52 +- .../procedural/src/pallet/expand/error.rs | 31 +- .../procedural/src/pallet/expand/event.rs | 35 +- .../src/pallet/expand/genesis_config.rs | 6 +- .../procedural/src/pallet/expand/mod.rs | 3 +- .../src/pallet/expand/pallet_struct.rs | 31 +- .../procedural/src/pallet/expand/storage.rs | 109 +-- .../procedural/src/pallet/parse/call.rs | 5 +- .../procedural/src/pallet/parse/config.rs | 3 +- .../procedural/src/pallet/parse/error.rs | 3 +- .../procedural/src/pallet/parse/event.rs | 141 +--- .../src/pallet/parse/extra_constants.rs | 3 +- .../procedural/src/pallet/parse/helper.rs | 18 - .../procedural/src/pallet/parse/storage.rs | 3 +- .../procedural/src/storage/instance_trait.rs | 1 + .../procedural/src/storage/metadata.rs | 84 +- frame/support/procedural/tools/src/lib.rs | 18 + frame/support/src/dispatch.rs | 353 +++----- frame/support/src/error.rs | 23 +- frame/support/src/event.rs | 268 +----- frame/support/src/hash.rs | 17 +- frame/support/src/lib.rs | 254 +++--- .../support/src/storage/bounded_btree_map.rs | 3 +- frame/support/src/storage/bounded_vec.rs | 3 +- frame/support/src/storage/mod.rs | 1 + frame/support/src/storage/types/double_map.rs | 75 +- frame/support/src/storage/types/key.rs | 23 +- frame/support/src/storage/types/map.rs | 64 +- frame/support/src/storage/types/mod.rs | 30 +- frame/support/src/storage/types/nmap.rs | 49 +- frame/support/src/storage/types/value.rs | 32 +- frame/support/src/storage/weak_bounded_vec.rs | 3 +- frame/support/src/traits/tokens/misc.rs | 12 +- frame/support/src/weights.rs | 31 +- frame/support/test/Cargo.toml | 5 +- frame/support/test/pallet/Cargo.toml | 2 + frame/support/test/src/lib.rs | 4 +- frame/support/test/tests/construct_runtime.rs | 539 ++++-------- .../no_std_genesis_config.stderr | 22 + .../undefined_call_part.stderr | 24 + .../undefined_event_part.stderr | 32 +- .../undefined_genesis_config_part.stderr | 24 + .../undefined_inherent_part.stderr | 24 + .../undefined_origin_part.stderr | 24 + .../undefined_validate_unsigned_part.stderr | 24 + frame/support/test/tests/decl_storage.rs | 593 ++++++------- frame/support/test/tests/instance.rs | 90 +- frame/support/test/tests/issue2219.rs | 7 +- frame/support/test/tests/pallet.rs | 784 +++++++++++------- .../test/tests/pallet_compatibility.rs | 79 +- .../tests/pallet_compatibility_instance.rs | 73 +- frame/support/test/tests/pallet_instance.rs | 320 +++---- .../pallet_ui/call_argument_invalid_bound.rs | 2 +- .../call_argument_invalid_bound.stderr | 12 +- .../call_argument_invalid_bound_2.rs | 2 +- .../call_argument_invalid_bound_2.stderr | 22 +- .../call_argument_invalid_bound_3.rs | 2 +- .../call_argument_invalid_bound_3.stderr | 12 +- ...age_ensure_span_are_ok_on_wrong_gen.stderr | 26 +- ...re_span_are_ok_on_wrong_gen_unnamed.stderr | 26 +- .../pallet_ui/storage_info_unsatisfied.rs | 2 +- .../storage_info_unsatisfied_nmap.rs | 2 +- .../storage_info_unsatisfied_nmap.stderr | 4 +- frame/support/test/tests/system.rs | 6 +- frame/system/Cargo.toml | 2 + frame/system/benchmarking/Cargo.toml | 2 + frame/system/src/extensions/check_genesis.rs | 4 +- .../system/src/extensions/check_mortality.rs | 4 +- frame/system/src/extensions/check_nonce.rs | 4 +- .../src/extensions/check_spec_version.rs | 4 +- .../system/src/extensions/check_tx_version.rs | 4 +- frame/system/src/extensions/check_weight.rs | 4 +- frame/system/src/lib.rs | 19 +- frame/system/src/limits.rs | 7 +- frame/system/src/mock.rs | 3 +- frame/system/src/offchain.rs | 5 +- frame/timestamp/Cargo.toml | 2 + frame/timestamp/src/lib.rs | 9 +- frame/tips/Cargo.toml | 2 + frame/tips/src/lib.rs | 3 +- frame/transaction-payment/Cargo.toml | 2 + frame/transaction-payment/src/lib.rs | 10 +- frame/transaction-payment/src/payment.rs | 3 +- frame/transaction-storage/Cargo.toml | 2 + frame/transaction-storage/src/lib.rs | 6 +- frame/treasury/Cargo.toml | 2 + frame/treasury/src/lib.rs | 4 +- frame/uniques/Cargo.toml | 2 + frame/uniques/src/benchmarking.rs | 16 +- frame/uniques/src/lib.rs | 5 - frame/uniques/src/types.rs | 13 +- frame/utility/Cargo.toml | 2 + frame/utility/src/benchmarking.rs | 6 +- frame/utility/src/lib.rs | 2 +- frame/utility/src/tests.rs | 156 ++-- frame/vesting/Cargo.toml | 2 + frame/vesting/src/lib.rs | 6 +- frame/vesting/src/vesting_info.rs | 2 +- primitives/application-crypto/Cargo.toml | 2 + primitives/application-crypto/src/lib.rs | 6 + primitives/application-crypto/src/traits.rs | 2 +- primitives/arithmetic/Cargo.toml | 2 + primitives/arithmetic/src/fixed_point.rs | 12 +- primitives/arithmetic/src/per_things.rs | 2 +- primitives/authority-discovery/Cargo.toml | 2 + primitives/consensus/aura/Cargo.toml | 2 + primitives/consensus/babe/Cargo.toml | 2 + primitives/consensus/babe/src/digests.rs | 2 +- primitives/consensus/babe/src/lib.rs | 5 +- primitives/consensus/slots/Cargo.toml | 2 + primitives/consensus/slots/src/lib.rs | 5 +- primitives/core/Cargo.toml | 3 + primitives/core/src/changes_trie.rs | 2 +- primitives/core/src/crypto.rs | 6 +- primitives/core/src/ecdsa.rs | 5 +- primitives/core/src/ed25519.rs | 16 +- primitives/core/src/lib.rs | 15 +- primitives/core/src/offchain/mod.rs | 5 +- primitives/core/src/sr25519.rs | 16 +- primitives/finality-grandpa/Cargo.toml | 2 + primitives/finality-grandpa/src/lib.rs | 5 +- primitives/npos-elections/Cargo.toml | 2 + primitives/npos-elections/fuzzer/Cargo.toml | 1 + .../npos-elections/solution-type/Cargo.toml | 1 + .../npos-elections/solution-type/src/codec.rs | 90 +- .../solution-type/src/single_page.rs | 13 +- primitives/npos-elections/src/lib.rs | 4 +- primitives/runtime/Cargo.toml | 2 + primitives/runtime/src/curve.rs | 2 +- primitives/runtime/src/generic/digest.rs | 119 ++- primitives/runtime/src/generic/era.rs | 44 + primitives/runtime/src/generic/header.rs | 3 +- .../src/generic/unchecked_extrinsic.rs | 37 +- primitives/runtime/src/lib.rs | 13 +- primitives/runtime/src/multiaddress.rs | 2 +- primitives/runtime/src/runtime_string.rs | 8 + primitives/runtime/src/testing.rs | 6 +- primitives/runtime/src/traits.rs | 58 +- primitives/session/Cargo.toml | 2 + primitives/session/src/lib.rs | 2 +- primitives/staking/Cargo.toml | 2 + primitives/staking/src/offence.rs | 2 +- .../transaction-storage-proof/Cargo.toml | 2 + .../transaction-storage-proof/src/lib.rs | 2 +- primitives/trie/Cargo.toml | 2 + primitives/trie/src/storage_proof.rs | 5 +- primitives/version/Cargo.toml | 2 + primitives/version/src/lib.rs | 3 +- test-utils/runtime/Cargo.toml | 2 + test-utils/runtime/src/lib.rs | 5 +- utils/frame/rpc/support/Cargo.toml | 1 + 305 files changed, 3989 insertions(+), 3921 deletions(-) delete mode 100644 frame/metadata/Cargo.toml delete mode 100644 frame/metadata/README.md delete mode 100644 frame/metadata/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index ff21ec8dd0109..7754e0ae6b62f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1790,9 +1790,9 @@ dependencies = [ [[package]] name = "finality-grandpa" -version = "0.14.1" +version = "0.14.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74a1bfdcc776e63e49f741c7ce6116fa1b887e8ac2e3ccb14dd4aa113e54feb9" +checksum = "e8ac3ff5224ef91f3c97e03eb1de2db82743427e91aaa5ac635f454f0b164f5a" dependencies = [ "either", "futures 0.3.16", @@ -1802,6 +1802,7 @@ dependencies = [ "parity-scale-codec", "parking_lot 0.11.1", "rand 0.8.4", + "scale-info", ] [[package]] @@ -1884,6 +1885,7 @@ dependencies = [ "log 0.4.14", "parity-scale-codec", "paste 1.0.4", + "scale-info", "sp-api", "sp-io", "sp-runtime", @@ -1924,6 +1926,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", + "scale-info", "sp-arithmetic", "sp-core", "sp-io", @@ -1942,6 +1945,7 @@ dependencies = [ "pallet-balances", "pallet-transaction-payment", "parity-scale-codec", + "scale-info", "sp-core", "sp-inherents", "sp-io", @@ -1953,18 +1957,21 @@ dependencies = [ [[package]] name = "frame-metadata" -version = "14.0.0-dev" +version = "14.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96616f82e069102b95a72c87de4c84d2f87ef7f0f20630e78ce3824436483110" dependencies = [ + "cfg-if 1.0.0", "parity-scale-codec", + "scale-info", "serde", - "sp-core", - "sp-std", ] [[package]] name = "frame-support" version = "4.0.0-dev" dependencies = [ + "assert_matches", "bitflags", "frame-metadata", "frame-support-procedural", @@ -1976,6 +1983,7 @@ dependencies = [ "parity-util-mem", "paste 1.0.4", "pretty_assertions 0.6.1", + "scale-info", "serde", "smallvec 1.6.1", "sp-arithmetic", @@ -2024,19 +2032,21 @@ dependencies = [ name = "frame-support-test" version = "3.0.0" dependencies = [ - "frame-metadata", "frame-support", "frame-support-test-pallet", "frame-system", "parity-scale-codec", "pretty_assertions 0.6.1", "rustversion", + "scale-info", "serde", + "sp-arithmetic", "sp-core", "sp-io", "sp-runtime", "sp-state-machine", "sp-std", + "sp-version", "trybuild", ] @@ -2047,6 +2057,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", + "scale-info", ] [[package]] @@ -2057,6 +2068,7 @@ dependencies = [ "frame-support", "log 0.4.14", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-externalities", @@ -2075,6 +2087,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -4417,6 +4430,7 @@ dependencies = [ "pallet-treasury", "parity-scale-codec", "sc-executor", + "scale-info", "sp-application-crypto", "sp-consensus-babe", "sp-core", @@ -4450,6 +4464,7 @@ version = "2.0.0" dependencies = [ "frame-system", "parity-scale-codec", + "scale-info", "sp-application-crypto", "sp-core", "sp-runtime", @@ -4555,6 +4570,7 @@ dependencies = [ "pallet-utility", "pallet-vesting", "parity-scale-codec", + "scale-info", "sp-api", "sp-authority-discovery", "sp-block-builder", @@ -4633,6 +4649,7 @@ dependencies = [ "pallet-transaction-payment", "pallet-transaction-payment-rpc-runtime-api", "parity-scale-codec", + "scale-info", "sp-api", "sp-block-builder", "sp-consensus-aura", @@ -4899,6 +4916,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -4913,6 +4931,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -4927,6 +4946,7 @@ dependencies = [ "frame-system", "pallet-timestamp", "parity-scale-codec", + "scale-info", "sp-application-crypto", "sp-consensus-aura", "sp-core", @@ -4943,6 +4963,7 @@ dependencies = [ "frame-system", "pallet-session", "parity-scale-codec", + "scale-info", "sp-application-crypto", "sp-authority-discovery", "sp-core", @@ -4959,6 +4980,7 @@ dependencies = [ "frame-system", "impl-trait-for-tuples", "parity-scale-codec", + "scale-info", "sp-authorship", "sp-core", "sp-io", @@ -4983,6 +5005,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", + "scale-info", "sp-application-crypto", "sp-consensus-babe", "sp-consensus-vrf", @@ -5004,6 +5027,7 @@ dependencies = [ "log 0.4.14", "pallet-transaction-payment", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5020,6 +5044,7 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5035,6 +5060,7 @@ dependencies = [ "frame-system", "log 0.4.14", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5064,6 +5090,7 @@ dependencies = [ "pwasm-utils", "rand 0.7.3", "rand_pcg 0.2.1", + "scale-info", "serde", "smallvec 1.6.1", "sp-core", @@ -5081,6 +5108,7 @@ version = "4.0.0-dev" dependencies = [ "bitflags", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-runtime", @@ -5121,6 +5149,7 @@ version = "4.0.0-dev" dependencies = [ "pallet-contracts-primitives", "parity-scale-codec", + "scale-info", "sp-api", "sp-runtime", "sp-std", @@ -5136,6 +5165,7 @@ dependencies = [ "pallet-balances", "pallet-scheduler", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5156,6 +5186,7 @@ dependencies = [ "parity-scale-codec", "parking_lot 0.11.1", "rand 0.7.3", + "scale-info", "sp-arithmetic", "sp-core", "sp-io", @@ -5176,6 +5207,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5192,6 +5224,7 @@ dependencies = [ "log 0.4.14", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-npos-elections", @@ -5210,6 +5243,7 @@ dependencies = [ "log 0.4.14", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5225,6 +5259,7 @@ dependencies = [ "lite-json", "log 0.4.14", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-keystore", @@ -5239,6 +5274,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5255,6 +5291,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-arithmetic", "sp-core", "sp-io", @@ -5280,6 +5317,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", + "scale-info", "sp-application-crypto", "sp-core", "sp-finality-grandpa", @@ -5301,6 +5339,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5318,6 +5357,7 @@ dependencies = [ "pallet-authorship", "pallet-session", "parity-scale-codec", + "scale-info", "sp-application-crypto", "sp-core", "sp-io", @@ -5335,6 +5375,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-keyring", @@ -5352,6 +5393,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5367,6 +5409,7 @@ dependencies = [ "frame-system", "log 0.4.14", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5385,6 +5428,7 @@ dependencies = [ "hex-literal", "pallet-mmr-primitives", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5433,6 +5477,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5447,6 +5492,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5461,6 +5507,7 @@ dependencies = [ "frame-system", "log 0.4.14", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5476,6 +5523,7 @@ dependencies = [ "log 0.4.14", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5502,6 +5550,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5519,6 +5568,7 @@ dependencies = [ "pallet-balances", "pallet-utility", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5533,6 +5583,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "safe-mix", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5547,6 +5598,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5562,6 +5614,7 @@ dependencies = [ "frame-system", "log 0.4.14", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5577,6 +5630,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5593,6 +5647,7 @@ dependencies = [ "log 0.4.14", "pallet-timestamp", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5617,6 +5672,7 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "rand 0.7.3", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5634,6 +5690,7 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "rand_chacha 0.2.2", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5656,6 +5713,7 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "rand_chacha 0.2.2", + "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5693,6 +5751,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5707,6 +5766,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5721,6 +5781,7 @@ dependencies = [ "frame-system", "log 0.4.14", "parity-scale-codec", + "scale-info", "sp-core", "sp-inherents", "sp-io", @@ -5740,6 +5801,7 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5756,6 +5818,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "serde_json", "smallvec 1.6.1", @@ -5801,6 +5864,7 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-inherents", @@ -5820,6 +5884,7 @@ dependencies = [ "impl-trait-for-tuples", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5836,6 +5901,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5851,6 +5917,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5867,6 +5934,7 @@ dependencies = [ "log 0.4.14", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -6393,6 +6461,7 @@ dependencies = [ "fixed-hash", "impl-codec", "impl-serde", + "scale-info", "uint", ] @@ -8324,6 +8393,32 @@ dependencies = [ "prometheus", ] +[[package]] +name = "scale-info" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c55b744399c25532d63a0d2789b109df8d46fc93752d46b0782991a931a782f" +dependencies = [ + "bitvec 0.20.2", + "cfg-if 1.0.0", + "derive_more", + "parity-scale-codec", + "scale-info-derive", + "serde", +] + +[[package]] +name = "scale-info-derive" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baeb2780690380592f86205aa4ee49815feb2acad8c2f59e6dd207148c3f1fcd" +dependencies = [ + "proc-macro-crate 1.0.0", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "schannel" version = "0.1.19" @@ -8779,6 +8874,7 @@ name = "sp-application-crypto" version = "4.0.0-dev" dependencies = [ "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -8807,6 +8903,7 @@ dependencies = [ "parity-scale-codec", "primitive-types", "rand 0.7.3", + "scale-info", "serde", "sp-debug-derive", "sp-std", @@ -8828,6 +8925,7 @@ name = "sp-authority-discovery" version = "4.0.0-dev" dependencies = [ "parity-scale-codec", + "scale-info", "sp-api", "sp-application-crypto", "sp-runtime", @@ -8898,6 +8996,7 @@ version = "0.10.0-dev" dependencies = [ "async-trait", "parity-scale-codec", + "scale-info", "sp-api", "sp-application-crypto", "sp-consensus", @@ -8915,6 +9014,7 @@ dependencies = [ "async-trait", "merlin", "parity-scale-codec", + "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -8945,6 +9045,7 @@ name = "sp-consensus-slots" version = "0.10.0-dev" dependencies = [ "parity-scale-codec", + "scale-info", "sp-arithmetic", "sp-runtime", ] @@ -8987,6 +9088,7 @@ dependencies = [ "primitive-types", "rand 0.7.3", "regex", + "scale-info", "schnorrkel", "secrecy", "serde", @@ -9041,6 +9143,7 @@ dependencies = [ "finality-grandpa", "log 0.4.14", "parity-scale-codec", + "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -9129,6 +9232,7 @@ version = "4.0.0-dev" dependencies = [ "parity-scale-codec", "rand 0.7.3", + "scale-info", "serde", "sp-arithmetic", "sp-core", @@ -9145,6 +9249,7 @@ dependencies = [ "honggfuzz", "parity-scale-codec", "rand 0.7.3", + "scale-info", "sp-npos-elections", "sp-runtime", "structopt", @@ -9158,6 +9263,7 @@ dependencies = [ "proc-macro-crate 1.0.0", "proc-macro2", "quote", + "scale-info", "sp-arithmetic", "sp-npos-elections", "syn", @@ -9202,6 +9308,7 @@ dependencies = [ "parity-util-mem", "paste 1.0.4", "rand 0.7.3", + "scale-info", "serde", "serde_json", "sp-api", @@ -9314,6 +9421,7 @@ name = "sp-session" version = "4.0.0-dev" dependencies = [ "parity-scale-codec", + "scale-info", "sp-api", "sp-core", "sp-runtime", @@ -9326,6 +9434,7 @@ name = "sp-staking" version = "4.0.0-dev" dependencies = [ "parity-scale-codec", + "scale-info", "sp-runtime", "sp-std", ] @@ -9443,6 +9552,7 @@ dependencies = [ "async-trait", "log 0.4.14", "parity-scale-codec", + "scale-info", "sp-core", "sp-inherents", "sp-runtime", @@ -9459,6 +9569,7 @@ dependencies = [ "hex-literal", "memory-db", "parity-scale-codec", + "scale-info", "sp-core", "sp-runtime", "sp-std", @@ -9475,6 +9586,7 @@ dependencies = [ "impl-serde", "parity-scale-codec", "parity-wasm 0.42.2", + "scale-info", "serde", "sp-runtime", "sp-std", @@ -9659,6 +9771,7 @@ dependencies = [ "jsonrpc-client-transports", "parity-scale-codec", "sc-rpc-api", + "scale-info", "serde", "sp-storage", "tokio", @@ -9745,6 +9858,7 @@ dependencies = [ "sc-block-builder", "sc-executor", "sc-service", + "scale-info", "serde", "sp-api", "sp-application-crypto", diff --git a/Cargo.toml b/Cargo.toml index 64cbbf38966c3..bca0c816217ee 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -94,7 +94,6 @@ members = [ "frame/merkle-mountain-range", "frame/merkle-mountain-range/primitives", "frame/merkle-mountain-range/rpc", - "frame/metadata", "frame/multisig", "frame/nicks", "frame/node-authorization", @@ -261,7 +260,6 @@ wasmi = { opt-level = 3 } x25519-dalek = { opt-level = 3 } yamux = { opt-level = 3 } zeroize = { opt-level = 3 } - [profile.release] # Substrate runtime requires unwinding. -panic = "unwind" +panic = "unwind" \ No newline at end of file diff --git a/bin/node-template/pallets/template/Cargo.toml b/bin/node-template/pallets/template/Cargo.toml index bd4a91f0146af..b3eb747625b4f 100644 --- a/bin/node-template/pallets/template/Cargo.toml +++ b/bin/node-template/pallets/template/Cargo.toml @@ -17,6 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-support = { default-features = false, version = "4.0.0-dev", path = "../../../../frame/support" } frame-system = { default-features = false, version = "4.0.0-dev", path = "../../../../frame/system" } frame-benchmarking = { default-features = false, version = "4.0.0-dev", path = "../../../../frame/benchmarking", optional = true } @@ -30,6 +31,7 @@ sp-runtime = { default-features = false, version = "4.0.0-dev", path = "../../.. default = ['std'] std = [ 'codec/std', + 'scale-info/std', 'frame-support/std', 'frame-system/std', 'frame-benchmarking/std', diff --git a/bin/node-template/pallets/template/src/lib.rs b/bin/node-template/pallets/template/src/lib.rs index 7a9830a21eb2a..ee3ca695b64da 100644 --- a/bin/node-template/pallets/template/src/lib.rs +++ b/bin/node-template/pallets/template/src/lib.rs @@ -41,7 +41,6 @@ pub mod pallet { // Pallets use events to inform users when important changes are made. // https://substrate.dev/docs/en/knowledgebase/runtime/events #[pallet::event] - #[pallet::metadata(T::AccountId = "AccountId")] #[pallet::generate_deposit(pub(super) fn deposit_event)] pub enum Event { /// Event documentation should end with an array that provides descriptive names for event diff --git a/bin/node-template/runtime/Cargo.toml b/bin/node-template/runtime/Cargo.toml index 72e19cc62b0ba..47e67af2b9ae1 100644 --- a/bin/node-template/runtime/Cargo.toml +++ b/bin/node-template/runtime/Cargo.toml @@ -13,6 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } pallet-aura = { version = "4.0.0-dev", default-features = false, path = "../../../frame/aura" } pallet-balances = { version = "4.0.0-dev", default-features = false, path = "../../../frame/balances" } @@ -54,6 +55,7 @@ substrate-wasm-builder = { version = "5.0.0-dev", path = "../../../utils/wasm-bu default = ["std"] std = [ "codec/std", + "scale-info/std", "frame-executive/std", "frame-support/std", "frame-system-rpc-runtime-api/std", diff --git a/bin/node-template/runtime/src/lib.rs b/bin/node-template/runtime/src/lib.rs index eae40e1ab3564..eecc93e166666 100644 --- a/bin/node-template/runtime/src/lib.rs +++ b/bin/node-template/runtime/src/lib.rs @@ -340,7 +340,7 @@ impl_runtime_apis! { impl sp_api::Metadata for Runtime { fn metadata() -> OpaqueMetadata { - Runtime::metadata().into() + OpaqueMetadata::new(Runtime::metadata().into()) } } diff --git a/bin/node/cli/src/service.rs b/bin/node/cli/src/service.rs index 9f48ab7e3ef3c..acc7df5b1e5a3 100644 --- a/bin/node/cli/src/service.rs +++ b/bin/node/cli/src/service.rs @@ -810,7 +810,8 @@ mod tests { }; let signer = charlie.clone(); - let function = Call::Balances(BalancesCall::transfer(to.into(), amount)); + let function = + Call::Balances(BalancesCall::transfer { dest: to.into(), value: amount }); let check_spec_version = frame_system::CheckSpecVersion::new(); let check_tx_version = frame_system::CheckTxVersion::new(); diff --git a/bin/node/executor/Cargo.toml b/bin/node/executor/Cargo.toml index 0db8a9e411bf7..f283a913915f3 100644 --- a/bin/node/executor/Cargo.toml +++ b/bin/node/executor/Cargo.toml @@ -13,6 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0" } +scale-info = { version = "1.0", features = ["derive"] } node-primitives = { version = "2.0.0", path = "../primitives" } node-runtime = { version = "3.0.0-dev", path = "../runtime" } sc-executor = { version = "0.10.0-dev", path = "../../../client/executor" } diff --git a/bin/node/executor/benches/bench.rs b/bin/node/executor/benches/bench.rs index 0058a5c70340f..1a39c9decb321 100644 --- a/bin/node/executor/benches/bench.rs +++ b/bin/node/executor/benches/bench.rs @@ -162,11 +162,14 @@ fn test_blocks( let mut test_ext = new_test_ext(genesis_config); let mut block1_extrinsics = vec![CheckedExtrinsic { signed: None, - function: Call::Timestamp(pallet_timestamp::Call::set(0)), + function: Call::Timestamp(pallet_timestamp::Call::set { now: 0 }), }]; block1_extrinsics.extend((0..20).map(|i| CheckedExtrinsic { signed: Some((alice(), signed_extra(i, 0))), - function: Call::Balances(pallet_balances::Call::transfer(bob().into(), 1 * DOLLARS)), + function: Call::Balances(pallet_balances::Call::transfer { + dest: bob().into(), + value: 1 * DOLLARS, + }), })); let block1 = construct_block(executor, &mut test_ext.ext(), 1, GENESIS_HASH.into(), block1_extrinsics); diff --git a/bin/node/executor/tests/basic.rs b/bin/node/executor/tests/basic.rs index e9e21e541e753..c1ab5e5a0fe13 100644 --- a/bin/node/executor/tests/basic.rs +++ b/bin/node/executor/tests/basic.rs @@ -84,14 +84,14 @@ fn changes_trie_block() -> (Vec, Hash) { vec![ CheckedExtrinsic { signed: None, - function: Call::Timestamp(pallet_timestamp::Call::set(time)), + function: Call::Timestamp(pallet_timestamp::Call::set { now: time }), }, CheckedExtrinsic { signed: Some((alice(), signed_extra(0, 0))), - function: Call::Balances(pallet_balances::Call::transfer( - bob().into(), - 69 * DOLLARS, - )), + function: Call::Balances(pallet_balances::Call::transfer { + dest: bob().into(), + value: 69 * DOLLARS, + }), }, ], (time / SLOT_DURATION).into(), @@ -111,14 +111,14 @@ fn blocks() -> ((Vec, Hash), (Vec, Hash)) { vec![ CheckedExtrinsic { signed: None, - function: Call::Timestamp(pallet_timestamp::Call::set(time1)), + function: Call::Timestamp(pallet_timestamp::Call::set { now: time1 }), }, CheckedExtrinsic { signed: Some((alice(), signed_extra(0, 0))), - function: Call::Balances(pallet_balances::Call::transfer( - bob().into(), - 69 * DOLLARS, - )), + function: Call::Balances(pallet_balances::Call::transfer { + dest: bob().into(), + value: 69 * DOLLARS, + }), }, ], (time1 / SLOT_DURATION).into(), @@ -131,21 +131,21 @@ fn blocks() -> ((Vec, Hash), (Vec, Hash)) { vec![ CheckedExtrinsic { signed: None, - function: Call::Timestamp(pallet_timestamp::Call::set(time2)), + function: Call::Timestamp(pallet_timestamp::Call::set { now: time2 }), }, CheckedExtrinsic { signed: Some((bob(), signed_extra(0, 0))), - function: Call::Balances(pallet_balances::Call::transfer( - alice().into(), - 5 * DOLLARS, - )), + function: Call::Balances(pallet_balances::Call::transfer { + dest: alice().into(), + value: 5 * DOLLARS, + }), }, CheckedExtrinsic { signed: Some((alice(), signed_extra(1, 0))), - function: Call::Balances(pallet_balances::Call::transfer( - bob().into(), - 15 * DOLLARS, - )), + function: Call::Balances(pallet_balances::Call::transfer { + dest: bob().into(), + value: 15 * DOLLARS, + }), }, ], (time2 / SLOT_DURATION).into(), @@ -166,11 +166,11 @@ fn block_with_size(time: u64, nonce: u32, size: usize) -> (Vec, Hash) { vec![ CheckedExtrinsic { signed: None, - function: Call::Timestamp(pallet_timestamp::Call::set(time * 1000)), + function: Call::Timestamp(pallet_timestamp::Call::set { now: time * 1000 }), }, CheckedExtrinsic { signed: Some((alice(), signed_extra(nonce, 0))), - function: Call::System(frame_system::Call::remark(vec![0; size])), + function: Call::System(frame_system::Call::remark { remark: vec![0; size] }), }, ], (time * 1000 / SLOT_DURATION).into(), @@ -357,7 +357,7 @@ fn full_native_block_import_works() { let mut fees = t.execute_with(|| transfer_fee(&xt())); let transfer_weight = default_transfer_call().get_dispatch_info().weight; - let timestamp_weight = pallet_timestamp::Call::set::(Default::default()) + let timestamp_weight = pallet_timestamp::Call::set:: { now: Default::default() } .get_dispatch_info() .weight; @@ -646,28 +646,28 @@ fn deploying_wasm_contract_should_work() { vec![ CheckedExtrinsic { signed: None, - function: Call::Timestamp(pallet_timestamp::Call::set(time)), + function: Call::Timestamp(pallet_timestamp::Call::set { now: time }), }, CheckedExtrinsic { signed: Some((charlie(), signed_extra(0, 0))), function: Call::Contracts( - pallet_contracts::Call::instantiate_with_code::( - 1000 * DOLLARS + subsistence, - 500_000_000, - transfer_code, - Vec::new(), - Vec::new(), - ), + pallet_contracts::Call::instantiate_with_code:: { + endowment: 1000 * DOLLARS + subsistence, + gas_limit: 500_000_000, + code: transfer_code, + data: Vec::new(), + salt: Vec::new(), + }, ), }, CheckedExtrinsic { signed: Some((charlie(), signed_extra(1, 0))), - function: Call::Contracts(pallet_contracts::Call::call::( - sp_runtime::MultiAddress::Id(addr.clone()), - 10, - 500_000_000, - vec![0x00, 0x01, 0x02, 0x03], - )), + function: Call::Contracts(pallet_contracts::Call::call:: { + dest: sp_runtime::MultiAddress::Id(addr.clone()), + value: 10, + gas_limit: 500_000_000, + data: vec![0x00, 0x01, 0x02, 0x03], + }), }, ], (time / SLOT_DURATION).into(), diff --git a/bin/node/executor/tests/common.rs b/bin/node/executor/tests/common.rs index a0edb46a0d6ae..d1c24c83c836d 100644 --- a/bin/node/executor/tests/common.rs +++ b/bin/node/executor/tests/common.rs @@ -88,7 +88,7 @@ pub fn sign(xt: CheckedExtrinsic) -> UncheckedExtrinsic { } pub fn default_transfer_call() -> pallet_balances::Call { - pallet_balances::Call::transfer::(bob().into(), 69 * DOLLARS) + pallet_balances::Call::::transfer { dest: bob().into(), value: 69 * DOLLARS } } pub fn from_block_number(n: u32) -> Header { diff --git a/bin/node/executor/tests/fees.rs b/bin/node/executor/tests/fees.rs index 3bc9179da2b3d..379cdda5b76a3 100644 --- a/bin/node/executor/tests/fees.rs +++ b/bin/node/executor/tests/fees.rs @@ -56,11 +56,13 @@ fn fee_multiplier_increases_and_decreases_on_big_weight() { vec![ CheckedExtrinsic { signed: None, - function: Call::Timestamp(pallet_timestamp::Call::set(time1)), + function: Call::Timestamp(pallet_timestamp::Call::set { now: time1 }), }, CheckedExtrinsic { signed: Some((charlie(), signed_extra(0, 0))), - function: Call::System(frame_system::Call::fill_block(Perbill::from_percent(60))), + function: Call::System(frame_system::Call::fill_block { + ratio: Perbill::from_percent(60), + }), }, ], (time1 / SLOT_DURATION).into(), @@ -75,11 +77,11 @@ fn fee_multiplier_increases_and_decreases_on_big_weight() { vec![ CheckedExtrinsic { signed: None, - function: Call::Timestamp(pallet_timestamp::Call::set(time2)), + function: Call::Timestamp(pallet_timestamp::Call::set { now: time2 }), }, CheckedExtrinsic { signed: Some((charlie(), signed_extra(1, 0))), - function: Call::System(frame_system::Call::remark(vec![0; 1])), + function: Call::System(frame_system::Call::remark { remark: vec![0; 1] }), }, ], (time2 / SLOT_DURATION).into(), @@ -321,11 +323,9 @@ fn block_length_capacity_report() { }, CheckedExtrinsic { signed: Some((charlie(), signed_extra(nonce, 0))), - function: Call::System(frame_system::Call::remark(vec![ - 0u8; - (block_number * factor) - as usize - ])), + function: Call::System(frame_system::Call::remark { + remark: vec![0u8; (block_number * factor) as usize], + }), }, ], (time * 1000 / SLOT_DURATION).into(), diff --git a/bin/node/executor/tests/submit_transaction.rs b/bin/node/executor/tests/submit_transaction.rs index c83e48c8c933b..19ca8e5677c43 100644 --- a/bin/node/executor/tests/submit_transaction.rs +++ b/bin/node/executor/tests/submit_transaction.rs @@ -42,7 +42,7 @@ fn should_submit_unsigned_transaction() { validators_len: 0, }; - let call = pallet_im_online::Call::heartbeat(heartbeat_data, signature); + let call = pallet_im_online::Call::heartbeat { heartbeat: heartbeat_data, signature }; SubmitTransaction::>::submit_unsigned_transaction( call.into(), ) @@ -84,7 +84,10 @@ fn should_submit_signed_transaction() { t.execute_with(|| { let results = Signer::::all_accounts().send_signed_transaction(|_| { - pallet_balances::Call::transfer(Default::default(), Default::default()) + pallet_balances::Call::transfer { + dest: Default::default(), + value: Default::default(), + } }); let len = results.len(); @@ -118,7 +121,10 @@ fn should_submit_signed_twice_from_the_same_account() { t.execute_with(|| { let result = Signer::::any_account().send_signed_transaction(|_| { - pallet_balances::Call::transfer(Default::default(), Default::default()) + pallet_balances::Call::transfer { + dest: Default::default(), + value: Default::default(), + } }); assert!(result.is_some()); @@ -127,7 +133,10 @@ fn should_submit_signed_twice_from_the_same_account() { // submit another one from the same account. The nonce should be incremented. let result = Signer::::any_account().send_signed_transaction(|_| { - pallet_balances::Call::transfer(Default::default(), Default::default()) + pallet_balances::Call::transfer { + dest: Default::default(), + value: Default::default(), + } }); assert!(result.is_some()); @@ -163,7 +172,7 @@ fn should_submit_signed_twice_from_all_accounts() { t.execute_with(|| { let results = Signer::::all_accounts() .send_signed_transaction(|_| { - pallet_balances::Call::transfer(Default::default(), Default::default()) + pallet_balances::Call::transfer { dest: Default::default(), value: Default::default() } }); let len = results.len(); @@ -174,7 +183,7 @@ fn should_submit_signed_twice_from_all_accounts() { // submit another one from the same account. The nonce should be incremented. let results = Signer::::all_accounts() .send_signed_transaction(|_| { - pallet_balances::Call::transfer(Default::default(), Default::default()) + pallet_balances::Call::transfer { dest: Default::default(), value: Default::default() } }); let len = results.len(); @@ -227,7 +236,10 @@ fn submitted_transaction_should_be_valid() { t.execute_with(|| { let results = Signer::::all_accounts().send_signed_transaction(|_| { - pallet_balances::Call::transfer(Default::default(), Default::default()) + pallet_balances::Call::transfer { + dest: Default::default(), + value: Default::default(), + } }); let len = results.len(); assert_eq!(len, 1); diff --git a/bin/node/primitives/Cargo.toml b/bin/node/primitives/Cargo.toml index de6000b602065..12ec57e4d55b6 100644 --- a/bin/node/primitives/Cargo.toml +++ b/bin/node/primitives/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-system = { version = "4.0.0-dev", default-features = false, path = "../../../frame/system" } sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/application-crypto" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/core" } @@ -23,6 +24,7 @@ sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../.. default = ["std"] std = [ "codec/std", + "scale-info/std", "frame-system/std", "sp-application-crypto/std", "sp-core/std", diff --git a/bin/node/runtime/Cargo.toml b/bin/node/runtime/Cargo.toml index 2b9accffc8c3a..dafd9db8bab96 100644 --- a/bin/node/runtime/Cargo.toml +++ b/bin/node/runtime/Cargo.toml @@ -18,6 +18,7 @@ codec = { package = "parity-scale-codec", version = "2.2.0", default-features = "derive", "max-encoded-len", ] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } static_assertions = "1.1.0" hex-literal = { version = "0.3.1", optional = true } log = { version = "0.4.14", default-features = false } @@ -113,6 +114,7 @@ std = [ "pallet-bounties/std", "sp-block-builder/std", "codec/std", + "scale-info/std", "pallet-collective/std", "pallet-contracts/std", "pallet-contracts-primitives/std", diff --git a/bin/node/runtime/src/lib.rs b/bin/node/runtime/src/lib.rs index d7257a9ea71b5..7c6475bd18d6a 100644 --- a/bin/node/runtime/src/lib.rs +++ b/bin/node/runtime/src/lib.rs @@ -255,7 +255,17 @@ parameter_types! { /// The type used to represent the kinds of proxying allowed. #[derive( - Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Encode, Decode, RuntimeDebug, MaxEncodedLen, + Copy, + Clone, + Eq, + PartialEq, + Ord, + PartialOrd, + Encode, + Decode, + RuntimeDebug, + MaxEncodedLen, + scale_info::TypeInfo, )] pub enum ProxyType { Any, @@ -276,8 +286,8 @@ impl InstanceFilter for ProxyType { c, Call::Balances(..) | Call::Assets(..) | Call::Uniques(..) | - Call::Vesting(pallet_vesting::Call::vested_transfer(..)) | - Call::Indices(pallet_indices::Call::transfer(..)) + Call::Vesting(pallet_vesting::Call::vested_transfer { .. }) | + Call::Indices(pallet_indices::Call::transfer { .. }) ), ProxyType::Governance => matches!( c, @@ -1314,7 +1324,7 @@ impl_runtime_apis! { impl sp_api::Metadata for Runtime { fn metadata() -> OpaqueMetadata { - Runtime::metadata().into() + OpaqueMetadata::new(Runtime::metadata().into()) } } diff --git a/bin/node/test-runner-example/src/lib.rs b/bin/node/test-runner-example/src/lib.rs index e7fe1ee002423..0de7f5a4e2b70 100644 --- a/bin/node/test-runner-example/src/lib.rs +++ b/bin/node/test-runner-example/src/lib.rs @@ -115,7 +115,7 @@ mod tests { let alice = MultiSigner::from(Alice.public()).into_account(); let _hash = node .submit_extrinsic( - frame_system::Call::remark((b"hello world").to_vec()), + frame_system::Call::remark { remark: (b"hello world").to_vec() }, Some(alice), ) .await diff --git a/bin/node/testing/src/bench.rs b/bin/node/testing/src/bench.rs index a1f9bc8710565..cf0a463cc3e99 100644 --- a/bin/node/testing/src/bench.rs +++ b/bin/node/testing/src/bench.rs @@ -299,19 +299,19 @@ impl<'a> Iterator for BlockContentIterator<'a> { )), function: match self.content.block_type { BlockType::RandomTransfersKeepAlive => - Call::Balances(BalancesCall::transfer_keep_alive( - sp_runtime::MultiAddress::Id(receiver), - node_runtime::ExistentialDeposit::get() + 1, - )), + Call::Balances(BalancesCall::transfer_keep_alive { + dest: sp_runtime::MultiAddress::Id(receiver), + value: node_runtime::ExistentialDeposit::get() + 1, + }), BlockType::RandomTransfersReaping => { - Call::Balances(BalancesCall::transfer( - sp_runtime::MultiAddress::Id(receiver), + Call::Balances(BalancesCall::transfer { + dest: sp_runtime::MultiAddress::Id(receiver), // Transfer so that ending balance would be 1 less than existential // deposit so that we kill the sender account. - 100 * DOLLARS - (node_runtime::ExistentialDeposit::get() - 1), - )) + value: 100 * DOLLARS - (node_runtime::ExistentialDeposit::get() - 1), + }) }, - BlockType::Noop => Call::System(SystemCall::remark(Vec::new())), + BlockType::Noop => Call::System(SystemCall::remark { remark: Vec::new() }), }, }, self.runtime_version.spec_version, diff --git a/client/finality-grandpa-warp-sync/Cargo.toml b/client/finality-grandpa-warp-sync/Cargo.toml index 6bb00b936574c..a444125fdfa11 100644 --- a/client/finality-grandpa-warp-sync/Cargo.toml +++ b/client/finality-grandpa-warp-sync/Cargo.toml @@ -26,7 +26,7 @@ sp-finality-grandpa = { version = "4.0.0-dev", path = "../../primitives/finality sp-runtime = { version = "4.0.0-dev", path = "../../primitives/runtime" } [dev-dependencies] -finality-grandpa = { version = "0.14.1" } +finality-grandpa = { version = "0.14.4" } rand = "0.8" sc-block-builder = { version = "0.10.0-dev", path = "../block-builder" } sp-consensus = { version = "0.10.0-dev", path = "../../primitives/consensus/common" } diff --git a/client/finality-grandpa/Cargo.toml b/client/finality-grandpa/Cargo.toml index ffdfc80c4eb9b..7fdd91e557ab7 100644 --- a/client/finality-grandpa/Cargo.toml +++ b/client/finality-grandpa/Cargo.toml @@ -43,7 +43,7 @@ sc-network-gossip = { version = "0.10.0-dev", path = "../network-gossip" } sp-finality-grandpa = { version = "4.0.0-dev", path = "../../primitives/finality-grandpa" } prometheus-endpoint = { package = "substrate-prometheus-endpoint", path = "../../utils/prometheus", version = "0.9.0" } sc-block-builder = { version = "0.10.0-dev", path = "../block-builder" } -finality-grandpa = { version = "0.14.1", features = ["derive-codec"] } +finality-grandpa = { version = "0.14.4", features = ["derive-codec"] } async-trait = "0.1.50" [dev-dependencies] diff --git a/client/finality-grandpa/rpc/Cargo.toml b/client/finality-grandpa/rpc/Cargo.toml index 4f989ad4964cd..d2976ee71275f 100644 --- a/client/finality-grandpa/rpc/Cargo.toml +++ b/client/finality-grandpa/rpc/Cargo.toml @@ -14,7 +14,7 @@ sc-rpc = { version = "4.0.0-dev", path = "../../rpc" } sp-blockchain = { version = "4.0.0-dev", path = "../../../primitives/blockchain" } sp-core = { version = "4.0.0-dev", path = "../../../primitives/core" } sp-runtime = { version = "4.0.0-dev", path = "../../../primitives/runtime" } -finality-grandpa = { version = "0.14.1", features = ["derive-codec"] } +finality-grandpa = { version = "0.14.4", features = ["derive-codec"] } jsonrpc-core = "18.0.0" jsonrpc-core-client = "18.0.0" jsonrpc-derive = "18.0.0" diff --git a/frame/assets/Cargo.toml b/frame/assets/Cargo.toml index 504dd6957aeb9..05e7912dd07c6 100644 --- a/frame/assets/Cargo.toml +++ b/frame/assets/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } # Needed for various traits. In our case, `OnFinalize`. sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } @@ -33,6 +34,7 @@ pallet-balances = { version = "4.0.0-dev", path = "../balances" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-std/std", "sp-runtime/std", "frame-support/std", diff --git a/frame/assets/src/benchmarking.rs b/frame/assets/src/benchmarking.rs index 89a1308db1712..43eadffbe8497 100644 --- a/frame/assets/src/benchmarking.rs +++ b/frame/assets/src/benchmarking.rs @@ -330,13 +330,13 @@ benchmarks_instance_pallet! { create_default_asset::(true); let origin = T::ForceOrigin::successful_origin(); - let call = Call::::force_set_metadata( - Default::default(), - name.clone(), - symbol.clone(), + let call = Call::::force_set_metadata { + id: Default::default(), + name: name.clone(), + symbol: symbol.clone(), decimals, - false, - ); + is_frozen: false, + }; }: { call.dispatch_bypass_filter(origin)? } verify { let id = Default::default(); @@ -351,7 +351,7 @@ benchmarks_instance_pallet! { Assets::::set_metadata(origin, Default::default(), dummy.clone(), dummy, 12)?; let origin = T::ForceOrigin::successful_origin(); - let call = Call::::force_clear_metadata(Default::default()); + let call = Call::::force_clear_metadata { id: Default::default() }; }: { call.dispatch_bypass_filter(origin)? } verify { assert_last_event::(Event::MetadataCleared(Default::default()).into()); @@ -361,16 +361,16 @@ benchmarks_instance_pallet! { let (caller, caller_lookup) = create_default_asset::(true); let origin = T::ForceOrigin::successful_origin(); - let call = Call::::force_asset_status( - Default::default(), - caller_lookup.clone(), - caller_lookup.clone(), - caller_lookup.clone(), - caller_lookup.clone(), - 100u32.into(), - true, - false, - ); + let call = Call::::force_asset_status { + id: Default::default(), + owner: caller_lookup.clone(), + issuer: caller_lookup.clone(), + admin: caller_lookup.clone(), + freezer: caller_lookup.clone(), + min_balance: 100u32.into(), + is_sufficient: true, + is_frozen: false, + }; }: { call.dispatch_bypass_filter(origin)? } verify { assert_last_event::(Event::AssetStatusChanged(Default::default()).into()); diff --git a/frame/assets/src/lib.rs b/frame/assets/src/lib.rs index 386a3ea05c082..797a3ae7ee9fb 100644 --- a/frame/assets/src/lib.rs +++ b/frame/assets/src/lib.rs @@ -277,7 +277,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::AccountId = "AccountId", T::Balance = "Balance", T::AssetId = "AssetId")] pub enum Event, I: 'static = ()> { /// Some asset class was created. \[asset_id, creator, owner\] Created(T::AssetId, T::AccountId, T::AccountId), diff --git a/frame/assets/src/types.rs b/frame/assets/src/types.rs index 5e867550b3801..bc2edce848a64 100644 --- a/frame/assets/src/types.rs +++ b/frame/assets/src/types.rs @@ -19,6 +19,7 @@ use super::*; use frame_support::pallet_prelude::*; +use scale_info::TypeInfo; use frame_support::traits::{fungible, tokens::BalanceConversion}; use sp_runtime::{traits::Convert, FixedPointNumber, FixedPointOperand, FixedU128}; @@ -26,7 +27,7 @@ use sp_runtime::{traits::Convert, FixedPointNumber, FixedPointOperand, FixedU128 pub(super) type DepositBalanceOf = <>::Currency as Currency<::AccountId>>::Balance; -#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, MaxEncodedLen)] +#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, MaxEncodedLen, TypeInfo)] pub struct AssetDetails { /// Can change `owner`, `issuer`, `freezer` and `admin` accounts. pub(super) owner: AccountId, @@ -66,7 +67,7 @@ impl AssetDetails { /// The amount of funds approved for the balance transfer from the owner to some delegated /// target. @@ -75,7 +76,7 @@ pub struct Approval { pub(super) deposit: DepositBalance, } -#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Default, MaxEncodedLen)] +#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Default, MaxEncodedLen, TypeInfo)] pub struct AssetBalance { /// The balance. pub(super) balance: Balance, @@ -87,7 +88,7 @@ pub struct AssetBalance { pub(super) extra: Extra, } -#[derive(Clone, Encode, Decode, Eq, PartialEq, Default, RuntimeDebug, MaxEncodedLen)] +#[derive(Clone, Encode, Decode, Eq, PartialEq, Default, RuntimeDebug, MaxEncodedLen, TypeInfo)] pub struct AssetMetadata { /// The balance deposited for this metadata. /// @@ -104,7 +105,7 @@ pub struct AssetMetadata { } /// Witness data for the destroy transactions. -#[derive(Copy, Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, MaxEncodedLen)] +#[derive(Copy, Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, MaxEncodedLen, TypeInfo)] pub struct DestroyWitness { /// The number of accounts holding the asset. #[codec(compact)] diff --git a/frame/atomic-swap/Cargo.toml b/frame/atomic-swap/Cargo.toml index 2519772ed46de..53a8c3a81165b 100644 --- a/frame/atomic-swap/Cargo.toml +++ b/frame/atomic-swap/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } @@ -28,6 +29,7 @@ pallet-balances = { version = "4.0.0-dev", path = "../balances" } default = ["std"] std = [ "codec/std", + "scale-info/std", "frame-support/std", "frame-system/std", "sp-runtime/std", diff --git a/frame/atomic-swap/src/lib.rs b/frame/atomic-swap/src/lib.rs index b068dc7ba1a96..9cf92c3bd2337 100644 --- a/frame/atomic-swap/src/lib.rs +++ b/frame/atomic-swap/src/lib.rs @@ -49,6 +49,7 @@ use frame_support::{ weights::Weight, RuntimeDebugNoBound, }; +use scale_info::TypeInfo; use sp_io::hashing::blake2_256; use sp_runtime::RuntimeDebug; use sp_std::{ @@ -58,7 +59,8 @@ use sp_std::{ }; /// Pending atomic swap operation. -#[derive(Clone, Eq, PartialEq, RuntimeDebugNoBound, Encode, Decode)] +#[derive(Clone, Eq, PartialEq, RuntimeDebugNoBound, Encode, Decode, TypeInfo)] +#[scale_info(skip_type_params(T))] pub struct PendingSwap { /// Source of the swap. pub source: T::AccountId, @@ -91,7 +93,8 @@ pub trait SwapAction { } /// A swap action that only allows transferring balances. -#[derive(Clone, RuntimeDebug, Eq, PartialEq, Encode, Decode)] +#[derive(Clone, RuntimeDebug, Eq, PartialEq, Encode, Decode, TypeInfo)] +#[scale_info(skip_type_params(C))] pub struct BalanceSwapAction> { value: >::Balance, _marker: PhantomData, @@ -213,7 +216,6 @@ pub mod pallet { /// Event of atomic swap pallet. #[pallet::event] - #[pallet::metadata(T::AccountId = "AccountId", PendingSwap = "PendingSwap")] #[pallet::generate_deposit(pub(super) fn deposit_event)] pub enum Event { /// Swap created. \[account, proof, swap\] diff --git a/frame/aura/Cargo.toml b/frame/aura/Cargo.toml index ee7b15f91e359..8f5c42bc3c465 100644 --- a/frame/aura/Cargo.toml +++ b/frame/aura/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../primitives/application-crypto" } codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } @@ -31,6 +32,7 @@ default = ["std"] std = [ "sp-application-crypto/std", "codec/std", + "scale-info/std", "sp-std/std", "sp-runtime/std", "frame-support/std", diff --git a/frame/authority-discovery/Cargo.toml b/frame/authority-discovery/Cargo.toml index f5f695b0a0644..80a320c31e77f 100644 --- a/frame/authority-discovery/Cargo.toml +++ b/frame/authority-discovery/Cargo.toml @@ -18,6 +18,7 @@ sp-application-crypto = { version = "4.0.0-dev", default-features = false, path codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } pallet-session = { version = "4.0.0-dev", features = [ "historical", @@ -36,6 +37,7 @@ std = [ "sp-application-crypto/std", "sp-authority-discovery/std", "codec/std", + "scale-info/std", "sp-std/std", "pallet-session/std", "sp-runtime/std", diff --git a/frame/authorship/Cargo.toml b/frame/authorship/Cargo.toml index a5b8d96160918..120b72f8e6511 100644 --- a/frame/authorship/Cargo.toml +++ b/frame/authorship/Cargo.toml @@ -16,6 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-authorship = { version = "4.0.0-dev", default-features = false, path = "../../primitives/authorship" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } @@ -31,6 +32,7 @@ sp-io = { version = "4.0.0-dev", path = "../../primitives/io" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-runtime/std", "sp-std/std", "frame-support/std", diff --git a/frame/authorship/src/lib.rs b/frame/authorship/src/lib.rs index 325f80c74aa1e..5d36adabe888f 100644 --- a/frame/authorship/src/lib.rs +++ b/frame/authorship/src/lib.rs @@ -115,7 +115,7 @@ where } } -#[derive(Encode, Decode, sp_runtime::RuntimeDebug)] +#[derive(Encode, Decode, sp_runtime::RuntimeDebug, scale_info::TypeInfo)] #[cfg_attr(any(feature = "std", test), derive(PartialEq))] enum UncleEntryItem { InclusionHeight(BlockNumber), @@ -238,7 +238,7 @@ pub mod pallet { fn create_inherent(data: &InherentData) -> Option { let uncles = data.uncles().unwrap_or_default(); - let mut set_uncles = Vec::new(); + let mut new_uncles = Vec::new(); if !uncles.is_empty() { let prev_uncles = >::get(); @@ -257,10 +257,10 @@ pub mod pallet { match Self::verify_uncle(&uncle, &existing_hashes, &mut acc) { Ok(_) => { let hash = uncle.hash(); - set_uncles.push(uncle); + new_uncles.push(uncle); existing_hashes.push(hash); - if set_uncles.len() == MAX_UNCLES { + if new_uncles.len() == MAX_UNCLES { break } }, @@ -271,10 +271,10 @@ pub mod pallet { } } - if set_uncles.is_empty() { + if new_uncles.is_empty() { None } else { - Some(Call::set_uncles(set_uncles)) + Some(Call::set_uncles { new_uncles }) } } @@ -283,14 +283,14 @@ pub mod pallet { _data: &InherentData, ) -> result::Result<(), Self::Error> { match call { - Call::set_uncles(ref uncles) if uncles.len() > MAX_UNCLES => + Call::set_uncles { ref new_uncles } if new_uncles.len() > MAX_UNCLES => Err(InherentError::Uncles(Error::::TooManyUncles.as_str().into())), _ => Ok(()), } } fn is_inherent(call: &Self::Call) -> bool { - matches!(call, Call::set_uncles(_)) + matches!(call, Call::set_uncles { .. }) } } } diff --git a/frame/babe/Cargo.toml b/frame/babe/Cargo.toml index f1a93bb418e9f..d95f1419fd035 100644 --- a/frame/babe/Cargo.toml +++ b/frame/babe/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-benchmarking = { version = "4.0.0-dev", default-features = false, path = "../benchmarking", optional = true } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } @@ -42,6 +43,7 @@ frame-election-provider-support = { version = "4.0.0-dev", path = "../election-p default = ["std"] std = [ "codec/std", + "scale-info/std", "frame-benchmarking/std", "frame-support/std", "frame-system/std", diff --git a/frame/babe/src/equivocation.rs b/frame/babe/src/equivocation.rs index 2558ca8a6e255..2397918d1ef13 100644 --- a/frame/babe/src/equivocation.rs +++ b/frame/babe/src/equivocation.rs @@ -155,8 +155,10 @@ where ) -> DispatchResult { use frame_system::offchain::SubmitTransaction; - let call = - Call::report_equivocation_unsigned(Box::new(equivocation_proof), key_owner_proof); + let call = Call::report_equivocation_unsigned { + equivocation_proof: Box::new(equivocation_proof), + key_owner_proof, + }; match SubmitTransaction::>::submit_unsigned_transaction(call.into()) { Ok(()) => log::info!( @@ -184,7 +186,7 @@ where /// unsigned equivocation reports. impl Pallet { pub fn validate_unsigned(source: TransactionSource, call: &Call) -> TransactionValidity { - if let Call::report_equivocation_unsigned(equivocation_proof, key_owner_proof) = call { + if let Call::report_equivocation_unsigned { equivocation_proof, key_owner_proof } = call { // discard equivocation report not coming from the local node match source { TransactionSource::Local | TransactionSource::InBlock => { /* allowed */ }, @@ -219,7 +221,7 @@ impl Pallet { } pub fn pre_dispatch(call: &Call) -> Result<(), TransactionValidityError> { - if let Call::report_equivocation_unsigned(equivocation_proof, key_owner_proof) = call { + if let Call::report_equivocation_unsigned { equivocation_proof, key_owner_proof } = call { is_known_offence::(equivocation_proof, key_owner_proof) } else { Err(InvalidTransaction::Call.into()) diff --git a/frame/babe/src/tests.rs b/frame/babe/src/tests.rs index edb3eeb059d83..dc2f74c719519 100644 --- a/frame/babe/src/tests.rs +++ b/frame/babe/src/tests.rs @@ -726,10 +726,10 @@ fn report_equivocation_validate_unsigned_prevents_duplicates() { let key = (sp_consensus_babe::KEY_TYPE, &offending_authority_pair.public()); let key_owner_proof = Historical::prove(key).unwrap(); - let inner = Call::report_equivocation_unsigned( - Box::new(equivocation_proof.clone()), - key_owner_proof.clone(), - ); + let inner = Call::report_equivocation_unsigned { + equivocation_proof: Box::new(equivocation_proof.clone()), + key_owner_proof: key_owner_proof.clone(), + }; // only local/inblock reports are allowed assert_eq!( @@ -822,10 +822,10 @@ fn valid_equivocation_reports_dont_pay_fees() { .unwrap(); // check the dispatch info for the call. - let info = Call::::report_equivocation_unsigned( - Box::new(equivocation_proof.clone()), - key_owner_proof.clone(), - ) + let info = Call::::report_equivocation_unsigned { + equivocation_proof: Box::new(equivocation_proof.clone()), + key_owner_proof: key_owner_proof.clone(), + } .get_dispatch_info(); // it should have non-zero weight and the fee has to be paid. diff --git a/frame/balances/Cargo.toml b/frame/balances/Cargo.toml index 8b66f08d45d97..2263387d6d8ef 100644 --- a/frame/balances/Cargo.toml +++ b/frame/balances/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-benchmarking = { version = "4.0.0-dev", default-features = false, path = "../benchmarking", optional = true } @@ -30,6 +31,7 @@ pallet-transaction-payment = { version = "4.0.0-dev", path = "../transaction-pay default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-std/std", "sp-runtime/std", "frame-benchmarking/std", diff --git a/frame/balances/src/lib.rs b/frame/balances/src/lib.rs index 7ab8a54de232d..f7102ad4895f9 100644 --- a/frame/balances/src/lib.rs +++ b/frame/balances/src/lib.rs @@ -177,6 +177,7 @@ use frame_support::{ WeakBoundedVec, }; use frame_system as system; +use scale_info::TypeInfo; use sp_runtime::{ traits::{ AtLeast32BitUnsigned, Bounded, CheckedAdd, CheckedSub, MaybeSerializeDeserialize, @@ -206,7 +207,8 @@ pub mod pallet { + Copy + MaybeSerializeDeserialize + Debug - + MaxEncodedLen; + + MaxEncodedLen + + TypeInfo; /// Handler for the unbalanced reduction when removing a dust account. type DustRemoval: OnUnbalanced>; @@ -435,7 +437,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::AccountId = "AccountId", T::Balance = "Balance")] pub enum Event, I: 'static = ()> { /// An account was created with some free balance. \[account, free_balance\] Endowed(T::AccountId, T::Balance), @@ -599,7 +600,7 @@ impl, I: 'static> GenesisConfig { } /// Simplified reasons for withdrawing balance. -#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, RuntimeDebug, MaxEncodedLen)] +#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, RuntimeDebug, MaxEncodedLen, TypeInfo)] pub enum Reasons { /// Paying system transaction fees. Fee = 0, @@ -633,7 +634,7 @@ impl BitOr for Reasons { /// A single lock on a balance. There can be many of these on an account and they "overlap", so the /// same balance is frozen by multiple locks. -#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, MaxEncodedLen)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, MaxEncodedLen, TypeInfo)] pub struct BalanceLock { /// An identifier for this lock. Only one lock may be in existence for each identifier. pub id: LockIdentifier, @@ -644,7 +645,7 @@ pub struct BalanceLock { } /// Store named reserved balance. -#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, MaxEncodedLen)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, MaxEncodedLen, TypeInfo)] pub struct ReserveData { /// The identifier for the named reserve. pub id: ReserveIdentifier, @@ -653,7 +654,7 @@ pub struct ReserveData { } /// All balance information for an account. -#[derive(Encode, Decode, Clone, PartialEq, Eq, Default, RuntimeDebug, MaxEncodedLen)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, Default, RuntimeDebug, MaxEncodedLen, TypeInfo)] pub struct AccountData { /// Non-reserved part of the balance. There may still be restrictions on this, but it is the /// total pool what may in principle be transferred, reserved and used for tipping. @@ -700,7 +701,7 @@ impl AccountData { // A value placed in storage that represents the current version of the Balances storage. // This value is used by the `on_runtime_upgrade` logic to determine whether we run // storage migration logic. This should match directly with the semantic versions of the Rust crate. -#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, RuntimeDebug, MaxEncodedLen)] +#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, RuntimeDebug, MaxEncodedLen, TypeInfo)] enum Releases { V1_0_0, V2_0_0, diff --git a/frame/balances/src/tests.rs b/frame/balances/src/tests.rs index fd57371b3a16b..a08643821eba8 100644 --- a/frame/balances/src/tests.rs +++ b/frame/balances/src/tests.rs @@ -39,7 +39,7 @@ macro_rules! decl_tests { const ID_2: LockIdentifier = *b"2 "; pub const CALL: &<$test as frame_system::Config>::Call = - &Call::Balances(pallet_balances::Call::transfer(0, 0)); + &Call::Balances(pallet_balances::Call::transfer { dest: 0, value: 0 }); /// create a transaction info struct from weight. Handy to avoid building the whole struct. pub fn info_from_weight(w: Weight) -> DispatchInfo { diff --git a/frame/benchmarking/Cargo.toml b/frame/benchmarking/Cargo.toml index ad15b8e6042ef..ea690d966c979 100644 --- a/frame/benchmarking/Cargo.toml +++ b/frame/benchmarking/Cargo.toml @@ -16,6 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] linregress = { version = "0.4.3", optional = true } paste = "1.0" codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-api = { version = "4.0.0-dev", path = "../../primitives/api", default-features = false } sp-runtime-interface = { version = "4.0.0-dev", path = "../../primitives/runtime-interface", default-features = false } sp-runtime = { version = "4.0.0-dev", path = "../../primitives/runtime", default-features = false } @@ -33,6 +34,7 @@ hex-literal = "0.3.1" default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-runtime-interface/std", "sp-runtime/std", "sp-api/std", diff --git a/frame/benchmarking/src/lib.rs b/frame/benchmarking/src/lib.rs index 11c755e06c95f..6c124a8a75761 100644 --- a/frame/benchmarking/src/lib.rs +++ b/frame/benchmarking/src/lib.rs @@ -331,30 +331,38 @@ macro_rules! benchmarks_iter { verify $postcode:block $( $rest:tt )* ) => { - $crate::benchmarks_iter! { - { $( $instance: $instance_bound )? } - { $( $where_clause )* } - ( $( $names )* ) - ( $( $names_extra )* ) - ( $( $names_skip_meta )* ) - $name { - $( $code )* - let __benchmarked_call_encoded = $crate::frame_support::codec::Encode::encode( - &>::$dispatch($( $arg ),*) - ); - }: { - let call_decoded = < - Call - as $crate::frame_support::codec::Decode - >::decode(&mut &__benchmarked_call_encoded[..]) - .expect("call is encoded above, encoding must be correct"); + $crate::paste::paste! { + $crate::benchmarks_iter! { + { $( $instance: $instance_bound )? } + { $( $where_clause )* } + ( $( $names )* ) + ( $( $names_extra )* ) + ( $( $names_skip_meta )* ) + $name { + $( $code )* + let __call = Call::< + T + $( , $instance )? + >:: [< new_call_variant_ $dispatch >] ( + $($arg),* + ); + let __benchmarked_call_encoded = $crate::frame_support::codec::Encode::encode( + &__call + ); + }: { + let call_decoded = < + Call + as $crate::frame_support::codec::Decode + >::decode(&mut &__benchmarked_call_encoded[..]) + .expect("call is encoded above, encoding must be correct"); - < - Call as $crate::frame_support::traits::UnfilteredDispatchable - >::dispatch_bypass_filter(call_decoded, $origin.into())?; + < + Call as $crate::frame_support::traits::UnfilteredDispatchable + >::dispatch_bypass_filter(call_decoded, $origin.into())?; + } + verify $postcode + $( $rest )* } - verify $postcode - $( $rest )* } }; // iteration arm: diff --git a/frame/bounties/Cargo.toml b/frame/bounties/Cargo.toml index 84147b96f9104..3bb184d5b3393 100644 --- a/frame/bounties/Cargo.toml +++ b/frame/bounties/Cargo.toml @@ -16,6 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } @@ -33,6 +34,7 @@ pallet-balances = { version = "4.0.0-dev", path = "../balances" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-std/std", "sp-runtime/std", "frame-support/std", diff --git a/frame/bounties/src/lib.rs b/frame/bounties/src/lib.rs index a5be4a00cb94c..77a8e47174019 100644 --- a/frame/bounties/src/lib.rs +++ b/frame/bounties/src/lib.rs @@ -97,6 +97,7 @@ use frame_support::weights::Weight; use codec::{Decode, Encode}; use frame_system::{self as system, ensure_signed}; +use scale_info::TypeInfo; pub use weights::WeightInfo; type BalanceOf = pallet_treasury::BalanceOf; @@ -136,7 +137,7 @@ pub trait Config: frame_system::Config + pallet_treasury::Config { pub type BountyIndex = u32; /// A bounty proposal. -#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub struct Bounty { /// The account proposing it. proposer: AccountId, @@ -153,7 +154,7 @@ pub struct Bounty { } /// The status of a bounty proposal. -#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub enum BountyStatus { /// The bounty is proposed and waiting for approval. Proposed, diff --git a/frame/collective/Cargo.toml b/frame/collective/Cargo.toml index 8828c6a61cd3f..e88f28d417730 100644 --- a/frame/collective/Cargo.toml +++ b/frame/collective/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } log = { version = "0.4.14", default-features = false } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } @@ -30,6 +31,7 @@ default = ["std"] std = [ "codec/std", "log/std", + "scale-info/std", "sp-core/std", "sp-io/std", "sp-runtime/std", diff --git a/frame/collective/src/benchmarking.rs b/frame/collective/src/benchmarking.rs index 1ce7750278bc4..c7e695babf27d 100644 --- a/frame/collective/src/benchmarking.rs +++ b/frame/collective/src/benchmarking.rs @@ -65,7 +65,9 @@ benchmarks_instance_pallet! { let length = 100; for i in 0 .. p { // Proposals should be different so that different proposal hashes are generated - let proposal: T::Proposal = SystemCall::::remark(vec![i as u8; length]).into(); + let proposal: T::Proposal = SystemCall::::remark { + remark: vec![i as u8; length] + }.into(); Collective::::propose( SystemOrigin::Signed(last_old_member.clone()).into(), threshold, @@ -121,7 +123,7 @@ benchmarks_instance_pallet! { Collective::::set_members(SystemOrigin::Root.into(), members, None, T::MaxMembers::get())?; - let proposal: T::Proposal = SystemCall::::remark(vec![1; b as usize]).into(); + let proposal: T::Proposal = SystemCall::::remark { remark: vec![1; b as usize] }.into(); }: _(SystemOrigin::Signed(caller), Box::new(proposal.clone()), bytes_in_storage) verify { @@ -151,7 +153,7 @@ benchmarks_instance_pallet! { Collective::::set_members(SystemOrigin::Root.into(), members, None, T::MaxMembers::get())?; - let proposal: T::Proposal = SystemCall::::remark(vec![1; b as usize]).into(); + let proposal: T::Proposal = SystemCall::::remark { remark: vec![1; b as usize] }.into(); let threshold = 1; }: propose(SystemOrigin::Signed(caller), threshold, Box::new(proposal.clone()), bytes_in_storage) @@ -185,7 +187,7 @@ benchmarks_instance_pallet! { // Add previous proposals. for i in 0 .. p - 1 { // Proposals should be different so that different proposal hashes are generated - let proposal: T::Proposal = SystemCall::::remark(vec![i as u8; b as usize]).into(); + let proposal: T::Proposal = SystemCall::::remark { remark: vec![i as u8; b as usize] }.into(); Collective::::propose( SystemOrigin::Signed(caller.clone()).into(), threshold, @@ -196,7 +198,7 @@ benchmarks_instance_pallet! { assert_eq!(Collective::::proposals().len(), (p - 1) as usize); - let proposal: T::Proposal = SystemCall::::remark(vec![p as u8; b as usize]).into(); + let proposal: T::Proposal = SystemCall::::remark { remark: vec![p as u8; b as usize] }.into(); }: propose(SystemOrigin::Signed(caller.clone()), threshold, Box::new(proposal.clone()), bytes_in_storage) verify { @@ -233,7 +235,7 @@ benchmarks_instance_pallet! { let mut last_hash = T::Hash::default(); for i in 0 .. p { // Proposals should be different so that different proposal hashes are generated - let proposal: T::Proposal = SystemCall::::remark(vec![i as u8; b as usize]).into(); + let proposal: T::Proposal = SystemCall::::remark { remark: vec![i as u8; b as usize] }.into(); Collective::::propose( SystemOrigin::Signed(proposer.clone()).into(), threshold, @@ -308,7 +310,9 @@ benchmarks_instance_pallet! { let mut last_hash = T::Hash::default(); for i in 0 .. p { // Proposals should be different so that different proposal hashes are generated - let proposal: T::Proposal = SystemCall::::remark(vec![i as u8; bytes as usize]).into(); + let proposal: T::Proposal = SystemCall::::remark { + remark: vec![i as u8; bytes as usize] + }.into(); Collective::::propose( SystemOrigin::Signed(proposer.clone()).into(), threshold, @@ -385,7 +389,7 @@ benchmarks_instance_pallet! { let mut last_hash = T::Hash::default(); for i in 0 .. p { // Proposals should be different so that different proposal hashes are generated - let proposal: T::Proposal = SystemCall::::remark(vec![i as u8; b as usize]).into(); + let proposal: T::Proposal = SystemCall::::remark { remark: vec![i as u8; b as usize] }.into(); Collective::::propose( SystemOrigin::Signed(caller.clone()).into(), threshold, @@ -471,7 +475,9 @@ benchmarks_instance_pallet! { let mut last_hash = T::Hash::default(); for i in 0 .. p { // Proposals should be different so that different proposal hashes are generated - let proposal: T::Proposal = SystemCall::::remark(vec![i as u8; bytes as usize]).into(); + let proposal: T::Proposal = SystemCall::::remark { + remark: vec![i as u8; bytes as usize] + }.into(); Collective::::propose( SystemOrigin::Signed(caller.clone()).into(), threshold, @@ -543,7 +549,7 @@ benchmarks_instance_pallet! { let mut last_hash = T::Hash::default(); for i in 0 .. p { // Proposals should be different so that different proposal hashes are generated - let proposal: T::Proposal = SystemCall::::remark(vec![i as u8; b as usize]).into(); + let proposal: T::Proposal = SystemCall::::remark { remark: vec![i as u8; b as usize] }.into(); Collective::::propose( SystemOrigin::Signed(caller.clone()).into(), threshold, @@ -614,7 +620,7 @@ benchmarks_instance_pallet! { let mut last_hash = T::Hash::default(); for i in 0 .. p { // Proposals should be different so that different proposal hashes are generated - let proposal: T::Proposal = SystemCall::::remark(vec![i as u8; b as usize]).into(); + let proposal: T::Proposal = SystemCall::::remark { remark: vec![i as u8; b as usize] }.into(); Collective::::propose( SystemOrigin::Signed(caller.clone()).into(), threshold, diff --git a/frame/collective/src/lib.rs b/frame/collective/src/lib.rs index fa537a0a439a1..89d4c8a150c36 100644 --- a/frame/collective/src/lib.rs +++ b/frame/collective/src/lib.rs @@ -42,6 +42,7 @@ #![cfg_attr(not(feature = "std"), no_std)] #![recursion_limit = "128"] +use scale_info::TypeInfo; use sp_core::u32_trait::Value as U32; use sp_io::storage; use sp_runtime::{traits::Hash, RuntimeDebug}; @@ -124,7 +125,8 @@ impl DefaultVote for MoreThanMajorityThenPrimeDefaultVote { } /// Origin for the collective module. -#[derive(PartialEq, Eq, Clone, RuntimeDebug, Encode, Decode)] +#[derive(PartialEq, Eq, Clone, RuntimeDebug, Encode, Decode, TypeInfo)] +#[scale_info(skip_type_params(I))] pub enum RawOrigin { /// It has been condoned by a given number of members of the collective from a given total. Members(MemberCount, MemberCount), @@ -144,7 +146,7 @@ impl GetBacking for RawOrigin { } /// Info for keeping track of a motion being voted on. -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, TypeInfo)] pub struct Votes { /// The proposal's unique index. index: ProposalIndex, @@ -274,7 +276,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::AccountId = "AccountId", T::Hash = "Hash")] pub enum Event, I: 'static = ()> { /// A motion (given hash) has been proposed (by given account) with a threshold (given /// `MemberCount`). diff --git a/frame/collective/src/tests.rs b/frame/collective/src/tests.rs index 5c662428fd992..b8feb64867cf8 100644 --- a/frame/collective/src/tests.rs +++ b/frame/collective/src/tests.rs @@ -172,7 +172,7 @@ pub fn new_test_ext() -> sp_io::TestExternalities { } fn make_proposal(value: u64) -> Call { - Call::System(frame_system::Call::remark(value.encode())) + Call::System(frame_system::Call::remark { remark: value.encode() }) } fn record(event: Event) -> EventRecord { @@ -229,8 +229,11 @@ fn close_works() { #[test] fn proposal_weight_limit_works_on_approve() { new_test_ext().execute_with(|| { - let proposal = - Call::Collective(crate::Call::set_members(vec![1, 2, 3], None, MaxMembers::get())); + let proposal = Call::Collective(crate::Call::set_members { + new_members: vec![1, 2, 3], + prime: None, + old_count: MaxMembers::get(), + }); let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let proposal_weight = proposal.get_dispatch_info().weight; let hash = BlakeTwo256::hash_of(&proposal); @@ -256,8 +259,11 @@ fn proposal_weight_limit_works_on_approve() { #[test] fn proposal_weight_limit_ignored_on_disapprove() { new_test_ext().execute_with(|| { - let proposal = - Call::Collective(crate::Call::set_members(vec![1, 2, 3], None, MaxMembers::get())); + let proposal = Call::Collective(crate::Call::set_members { + new_members: vec![1, 2, 3], + prime: None, + old_count: MaxMembers::get(), + }); let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let proposal_weight = proposal.get_dispatch_info().weight; let hash = BlakeTwo256::hash_of(&proposal); @@ -561,8 +567,11 @@ fn limit_active_proposals() { #[test] fn correct_validate_and_get_proposal() { new_test_ext().execute_with(|| { - let proposal = - Call::Collective(crate::Call::set_members(vec![1, 2, 3], None, MaxMembers::get())); + let proposal = Call::Collective(crate::Call::set_members { + new_members: vec![1, 2, 3], + prime: None, + old_count: MaxMembers::get(), + }); let length = proposal.encode().len() as u32; assert_ok!(Collective::propose(Origin::signed(1), 3, Box::new(proposal.clone()), length)); @@ -782,7 +791,7 @@ fn motions_reproposing_disapproved_works() { #[test] fn motions_approval_with_enough_votes_and_lower_voting_threshold_works() { new_test_ext().execute_with(|| { - let proposal = Call::Democracy(mock_democracy::Call::external_propose_majority()); + let proposal = Call::Democracy(mock_democracy::Call::external_propose_majority {}); let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let proposal_weight = proposal.get_dispatch_info().weight; let hash: H256 = proposal.blake2_256().into(); diff --git a/frame/contracts/Cargo.toml b/frame/contracts/Cargo.toml index 36d05e35180be..80dc0b05e7511 100644 --- a/frame/contracts/Cargo.toml +++ b/frame/contracts/Cargo.toml @@ -18,6 +18,7 @@ codec = { package = "parity-scale-codec", version = "2.2.0", default-features = "derive", "max-encoded-len", ] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } log = { version = "0.4", default-features = false } pwasm-utils = { version = "0.18.2", default-features = false } serde = { version = "1", optional = true, features = ["derive"] } @@ -60,6 +61,7 @@ default = ["std"] std = [ "serde", "codec/std", + "scale-info/std", "sp-core/std", "sp-runtime/std", "sp-io/std", diff --git a/frame/contracts/common/Cargo.toml b/frame/contracts/common/Cargo.toml index e353b3af04714..b441d88453ae2 100644 --- a/frame/contracts/common/Cargo.toml +++ b/frame/contracts/common/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] bitflags = "1.0" codec = { package = "parity-scale-codec", version = "2", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } serde = { version = "1", features = ["derive"], optional = true } # Substrate Dependencies (This crate should not rely on frame) @@ -26,6 +27,7 @@ sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../.. default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-core/std", "sp-runtime/std", "sp-std/std", diff --git a/frame/contracts/rpc/runtime-api/Cargo.toml b/frame/contracts/rpc/runtime-api/Cargo.toml index fb5addc5a437b..e5f6d1ec7eb8e 100644 --- a/frame/contracts/rpc/runtime-api/Cargo.toml +++ b/frame/contracts/rpc/runtime-api/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } # Substrate Dependencies pallet-contracts-primitives = { version = "4.0.0-dev", default-features = false, path = "../../common" } @@ -26,6 +27,7 @@ default = ["std"] std = [ "sp-api/std", "codec/std", + "scale-info/std", "sp-std/std", "sp-runtime/std", "pallet-contracts-primitives/std", diff --git a/frame/contracts/src/exec.rs b/frame/contracts/src/exec.rs index 516de3a22d5ae..cc468466c2922 100644 --- a/frame/contracts/src/exec.rs +++ b/frame/contracts/src/exec.rs @@ -2060,7 +2060,9 @@ mod tests { #[test] fn call_runtime_works() { let code_hash = MockLoader::insert(Call, |ctx, _| { - let call = Call::System(frame_system::Call::remark_with_event(b"Hello World".to_vec())); + let call = Call::System(frame_system::Call::remark_with_event { + remark: b"Hello World".to_vec(), + }); ctx.ext.call_runtime(call).unwrap(); exec_success() }); @@ -2094,20 +2096,19 @@ mod tests { use pallet_utility::Call as UtilCall; // remark should still be allowed - let allowed_call = Call::System(SysCall::remark_with_event(b"Hello".to_vec())); + let allowed_call = + Call::System(SysCall::remark_with_event { remark: b"Hello".to_vec() }); // transfers are disallowed by the `TestFiler` (see below) - let forbidden_call = Call::Balances(BalanceCall::transfer(CHARLIE, 22)); + let forbidden_call = Call::Balances(BalanceCall::transfer { dest: CHARLIE, value: 22 }); // simple cases: direct call assert_err!(ctx.ext.call_runtime(forbidden_call.clone()), BadOrigin); // as part of a patch: return is OK (but it interrupted the batch) - assert_ok!(ctx.ext.call_runtime(Call::Utility(UtilCall::batch(vec![ - allowed_call.clone(), - forbidden_call, - allowed_call - ]))),); + assert_ok!(ctx.ext.call_runtime(Call::Utility(UtilCall::batch { + calls: vec![allowed_call.clone(), forbidden_call, allowed_call] + })),); // the transfer wasn't performed assert_eq!(get_balance(&CHARLIE), 0); @@ -2116,7 +2117,7 @@ mod tests { }); TestFilter::set_filter(|call| match call { - Call::Balances(pallet_balances::Call::transfer(_, _)) => false, + Call::Balances(pallet_balances::Call::transfer { .. }) => false, _ => true, }); diff --git a/frame/contracts/src/lib.rs b/frame/contracts/src/lib.rs index 7b165e51dcfe1..77efcc6986e64 100644 --- a/frame/contracts/src/lib.rs +++ b/frame/contracts/src/lib.rs @@ -386,7 +386,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::AccountId = "AccountId", T::Hash = "Hash", BalanceOf = "Balance")] pub enum Event { /// Contract deployed by address at the specified address. \[deployer, contract\] Instantiated(T::AccountId, T::AccountId), diff --git a/frame/contracts/src/schedule.rs b/frame/contracts/src/schedule.rs index 51aefa8bdaf63..c14165b4c6aec 100644 --- a/frame/contracts/src/schedule.rs +++ b/frame/contracts/src/schedule.rs @@ -24,6 +24,7 @@ use codec::{Decode, Encode}; use frame_support::{weights::Weight, DefaultNoBound}; use pallet_contracts_proc_macro::{ScheduleDebug, WeightDebug}; use pwasm_utils::{parity_wasm::elements, rules}; +use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; use sp_runtime::RuntimeDebug; @@ -72,7 +73,8 @@ pub const INSTR_BENCHMARK_BATCH_SIZE: u32 = 100; /// changes are made to its values. #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] #[cfg_attr(feature = "std", serde(bound(serialize = "", deserialize = "")))] -#[derive(Clone, Encode, Decode, PartialEq, Eq, ScheduleDebug, DefaultNoBound)] +#[derive(Clone, Encode, Decode, PartialEq, Eq, ScheduleDebug, DefaultNoBound, TypeInfo)] +#[scale_info(skip_type_params(T))] pub struct Schedule { /// Describes the upper limits on various metrics. pub limits: Limits, @@ -92,7 +94,7 @@ pub struct Schedule { /// values will break existing contracts which are above the new limits when a /// re-instrumentation is triggered. #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive(Clone, Encode, Decode, PartialEq, Eq, RuntimeDebug)] +#[derive(Clone, Encode, Decode, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub struct Limits { /// The maximum number of topics supported by an event. pub event_topics: u32, @@ -174,7 +176,8 @@ impl Limits { /// that use them as supporting instructions. Supporting means mainly pushing arguments /// and dropping return values in order to maintain a valid module. #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive(Clone, Encode, Decode, PartialEq, Eq, WeightDebug)] +#[derive(Clone, Encode, Decode, PartialEq, Eq, WeightDebug, TypeInfo)] +#[scale_info(skip_type_params(T))] pub struct InstructionWeights { /// Version of the instruction weights. /// @@ -247,7 +250,8 @@ pub struct InstructionWeights { /// Describes the weight for each imported function that a contract is allowed to call. #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive(Clone, Encode, Decode, PartialEq, Eq, WeightDebug)] +#[derive(Clone, Encode, Decode, PartialEq, Eq, WeightDebug, TypeInfo)] +#[scale_info(skip_type_params(T))] pub struct HostFnWeights { /// Weight of calling `seal_caller`. pub caller: Weight, diff --git a/frame/contracts/src/storage.rs b/frame/contracts/src/storage.rs index 510a1c95f9a31..41db0796717e4 100644 --- a/frame/contracts/src/storage.rs +++ b/frame/contracts/src/storage.rs @@ -29,6 +29,7 @@ use frame_support::{ traits::Get, weights::Weight, }; +use scale_info::TypeInfo; use sp_core::crypto::UncheckedFrom; use sp_io::hashing::blake2_256; use sp_runtime::{traits::Hash, RuntimeDebug}; @@ -38,7 +39,7 @@ pub type ContractInfo = RawContractInfo>; /// Information for managing an account and its sub trie abstraction. /// This is the required info to cache for an account. -#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub struct RawContractInfo { /// Unique ID for the subtree encoded as a bytes vector. pub trie_id: TrieId, @@ -61,7 +62,7 @@ fn child_trie_info(trie_id: &[u8]) -> ChildInfo { ChildInfo::new_default(trie_id) } -#[derive(Encode, Decode)] +#[derive(Encode, Decode, TypeInfo)] pub struct DeletedContract { pub(crate) trie_id: TrieId, } diff --git a/frame/contracts/src/tests.rs b/frame/contracts/src/tests.rs index 28f05fd390d5d..f5b95c192c42e 100644 --- a/frame/contracts/src/tests.rs +++ b/frame/contracts/src/tests.rs @@ -1781,7 +1781,12 @@ fn gas_estimation_call_runtime() { // Call something trivial with a huge gas limit so that we can observe the effects // of pre-charging. This should create a difference between consumed and required. - let call = Call::Contracts(crate::Call::call(addr_callee, 0, GAS_LIMIT / 3, vec![])); + let call = Call::Contracts(crate::Call::call { + dest: addr_callee, + value: 0, + gas_limit: GAS_LIMIT / 3, + data: vec![], + }); let result = Contracts::bare_call(ALICE, addr_caller.clone(), 0, GAS_LIMIT, call.encode(), false); assert_ok!(&result.result); diff --git a/frame/contracts/src/wasm/mod.rs b/frame/contracts/src/wasm/mod.rs index b92ed111e9881..855cb6e45091f 100644 --- a/frame/contracts/src/wasm/mod.rs +++ b/frame/contracts/src/wasm/mod.rs @@ -50,7 +50,8 @@ pub use tests::MockExt; /// `instruction_weights_version` and `code` when a contract with an outdated instrumention is /// called. Therefore one must be careful when holding any in-memory representation of this /// type while calling into a contract as those fields can get out of date. -#[derive(Clone, Encode, Decode)] +#[derive(Clone, Encode, Decode, scale_info::TypeInfo)] +#[scale_info(skip_type_params(T))] pub struct PrefabWasmModule { /// Version of the instruction weights with which the code was instrumented. #[codec(compact)] @@ -1967,7 +1968,7 @@ mod tests { #[cfg(feature = "unstable-interface")] fn call_runtime_works() { use std::convert::TryInto; - let call = Call::System(frame_system::Call::remark(b"Hello World".to_vec())); + let call = Call::System(frame_system::Call::remark { remark: b"Hello World".to_vec() }); let mut ext = MockExt::default(); let result = execute(CODE_CALL_RUNTIME, call.encode(), &mut ext).unwrap(); assert_eq!(*ext.runtime_calls.borrow(), vec![call]); diff --git a/frame/democracy/Cargo.toml b/frame/democracy/Cargo.toml index 98b034c3ce7ee..94719553e28aa 100644 --- a/frame/democracy/Cargo.toml +++ b/frame/democracy/Cargo.toml @@ -17,6 +17,7 @@ serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } @@ -34,6 +35,7 @@ default = ["std"] std = [ "serde", "codec/std", + "scale-info/std", "sp-std/std", "sp-io/std", "frame-benchmarking/std", diff --git a/frame/democracy/src/benchmarking.rs b/frame/democracy/src/benchmarking.rs index 487a525719410..7d4d7aee140b9 100644 --- a/frame/democracy/src/benchmarking.rs +++ b/frame/democracy/src/benchmarking.rs @@ -73,7 +73,7 @@ fn add_referendum(n: u32) -> Result { None, 63, frame_system::RawOrigin::Root.into(), - Call::enact_proposal(proposal_hash, referendum_index).into(), + Call::enact_proposal { proposal_hash, index: referendum_index }.into(), ) .map_err(|_| "failed to schedule named")?; Ok(referendum_index) @@ -194,7 +194,7 @@ benchmarks! { emergency_cancel { let origin = T::CancellationOrigin::successful_origin(); let referendum_index = add_referendum::(0)?; - let call = Call::::emergency_cancel(referendum_index); + let call = Call::::emergency_cancel { ref_index: referendum_index }; assert_ok!(Democracy::::referendum_status(referendum_index)); }: { call.dispatch_bypass_filter(origin)? } verify { @@ -224,7 +224,7 @@ benchmarks! { let referendum_index = add_referendum::(0)?; assert_ok!(Democracy::::referendum_status(referendum_index)); - let call = Call::::blacklist(hash, Some(referendum_index)); + let call = Call::::blacklist { proposal_hash: hash, maybe_ref_index: Some(referendum_index) }; let origin = T::BlacklistOrigin::successful_origin(); }: { call.dispatch_bypass_filter(origin)? } verify { @@ -247,7 +247,7 @@ benchmarks! { (T::BlockNumber::zero(), vec![T::AccountId::default(); v as usize]) ); - let call = Call::::external_propose(proposal_hash); + let call = Call::::external_propose { proposal_hash }; }: { call.dispatch_bypass_filter(origin)? } verify { // External proposal created @@ -257,7 +257,7 @@ benchmarks! { external_propose_majority { let origin = T::ExternalMajorityOrigin::successful_origin(); let proposal_hash = T::Hashing::hash_of(&0); - let call = Call::::external_propose_majority(proposal_hash); + let call = Call::::external_propose_majority { proposal_hash }; }: { call.dispatch_bypass_filter(origin)? } verify { // External proposal created @@ -267,7 +267,7 @@ benchmarks! { external_propose_default { let origin = T::ExternalDefaultOrigin::successful_origin(); let proposal_hash = T::Hashing::hash_of(&0); - let call = Call::::external_propose_default(proposal_hash); + let call = Call::::external_propose_default { proposal_hash }; }: { call.dispatch_bypass_filter(origin)? } verify { // External proposal created @@ -283,7 +283,11 @@ benchmarks! { let origin_fast_track = T::FastTrackOrigin::successful_origin(); let voting_period = T::FastTrackVotingPeriod::get(); let delay = 0u32; - let call = Call::::fast_track(proposal_hash, voting_period.into(), delay.into()); + let call = Call::::fast_track { + proposal_hash, + voting_period: voting_period.into(), + delay: delay.into() + }; }: { call.dispatch_bypass_filter(origin_fast_track)? } verify { @@ -306,7 +310,7 @@ benchmarks! { vetoers.sort(); Blacklist::::insert(proposal_hash, (T::BlockNumber::zero(), vetoers)); - let call = Call::::veto_external(proposal_hash); + let call = Call::::veto_external { proposal_hash }; let origin = T::VetoOrigin::successful_origin(); ensure!(NextExternal::::get().is_some(), "no external proposal"); }: { call.dispatch_bypass_filter(origin)? } @@ -356,7 +360,7 @@ benchmarks! { let origin = T::ExternalMajorityOrigin::successful_origin(); let proposal_hash = T::Hashing::hash_of(&r); - let call = Call::::external_propose_majority(proposal_hash); + let call = Call::::external_propose_majority { proposal_hash }; call.dispatch_bypass_filter(origin)?; // External proposal created ensure!(>::exists(), "External proposal didn't work"); @@ -739,7 +743,7 @@ benchmarks! { let b in 0 .. MAX_BYTES; let proposer = funded_account::("proposer", 0); - let raw_call = Call::note_preimage(vec![1; b as usize]); + let raw_call = Call::note_preimage { encoded_proposal: vec![1; b as usize] }; let generic_call: T::Proposal = raw_call.into(); let encoded_proposal = generic_call.encode(); let proposal_hash = T::Hashing::hash(&encoded_proposal[..]); diff --git a/frame/democracy/src/conviction.rs b/frame/democracy/src/conviction.rs index 6b77acfab5b00..b4f24c93bb40f 100644 --- a/frame/democracy/src/conviction.rs +++ b/frame/democracy/src/conviction.rs @@ -19,6 +19,7 @@ use crate::types::Delegations; use codec::{Decode, Encode}; +use scale_info::TypeInfo; use sp_runtime::{ traits::{Bounded, CheckedDiv, CheckedMul, Zero}, RuntimeDebug, @@ -26,7 +27,7 @@ use sp_runtime::{ use sp_std::{convert::TryFrom, result::Result}; /// A value denoting the strength of conviction of a vote. -#[derive(Encode, Decode, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, RuntimeDebug)] +#[derive(Encode, Decode, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, RuntimeDebug, TypeInfo)] pub enum Conviction { /// 0.1x votes, unlocked. None, diff --git a/frame/democracy/src/lib.rs b/frame/democracy/src/lib.rs index 473ac964692cf..8bc6921c4f8ad 100644 --- a/frame/democracy/src/lib.rs +++ b/frame/democracy/src/lib.rs @@ -162,6 +162,7 @@ use frame_support::{ }, weights::Weight, }; +use scale_info::TypeInfo; use sp_runtime::{ traits::{Bounded, Dispatchable, Hash, Saturating, Zero}, ArithmeticError, DispatchError, DispatchResult, RuntimeDebug, @@ -205,7 +206,7 @@ type NegativeImbalanceOf = <::Currency as Currency< ::AccountId, >>::NegativeImbalance; -#[derive(Clone, Encode, Decode, RuntimeDebug)] +#[derive(Clone, Encode, Decode, RuntimeDebug, TypeInfo)] pub enum PreimageStatus { /// The preimage is imminently needed at the argument. Missing(BlockNumber), @@ -232,7 +233,7 @@ impl PreimageStatus = "Vec", - BalanceOf = "Balance", - T::BlockNumber = "BlockNumber", - T::Hash = "Hash", - )] pub enum Event { /// A motion has been proposed by a public account. \[proposal_index, deposit\] Proposed(PropIndex, BalanceOf), @@ -1714,7 +1708,7 @@ impl Pallet { None, 63, frame_system::RawOrigin::Root.into(), - Call::enact_proposal(status.proposal_hash, index).into(), + Call::enact_proposal { proposal_hash: status.proposal_hash, index }.into(), ) .is_err() { diff --git a/frame/democracy/src/tests.rs b/frame/democracy/src/tests.rs index 9a5e47c89ac77..75104db51b971 100644 --- a/frame/democracy/src/tests.rs +++ b/frame/democracy/src/tests.rs @@ -72,7 +72,7 @@ frame_support::construct_runtime!( pub struct BaseFilter; impl Contains for BaseFilter { fn contains(call: &Call) -> bool { - !matches!(call, &Call::Balances(pallet_balances::Call::set_balance(..))) + !matches!(call, &Call::Balances(pallet_balances::Call::set_balance { .. })) } } @@ -226,7 +226,8 @@ fn params_should_work() { } fn set_balance_proposal(value: u64) -> Vec { - Call::Balances(pallet_balances::Call::set_balance(42, value, 0)).encode() + Call::Balances(pallet_balances::Call::set_balance { who: 42, new_free: value, new_reserved: 0 }) + .encode() } #[test] diff --git a/frame/democracy/src/types.rs b/frame/democracy/src/types.rs index 5c4002a46dd38..2eb004ba61bc4 100644 --- a/frame/democracy/src/types.rs +++ b/frame/democracy/src/types.rs @@ -19,13 +19,14 @@ use crate::{AccountVote, Conviction, Vote, VoteThreshold}; use codec::{Decode, Encode}; +use scale_info::TypeInfo; use sp_runtime::{ traits::{Bounded, CheckedAdd, CheckedDiv, CheckedMul, CheckedSub, Saturating, Zero}, RuntimeDebug, }; /// Info regarding an ongoing referendum. -#[derive(Encode, Decode, Default, Clone, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Default, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub struct Tally { /// The number of aye votes, expressed in terms of post-conviction lock-vote. pub ayes: Balance, @@ -36,7 +37,7 @@ pub struct Tally { } /// Amount of votes and capital placed in delegation for an account. -#[derive(Encode, Decode, Default, Copy, Clone, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Default, Copy, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub struct Delegations { /// The number of votes (this is post-conviction). pub votes: Balance, @@ -159,7 +160,7 @@ impl< } /// Info regarding an ongoing referendum. -#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub struct ReferendumStatus { /// When voting on this referendum will end. pub end: BlockNumber, @@ -174,7 +175,7 @@ pub struct ReferendumStatus { } /// Info regarding a referendum, present or past. -#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub enum ReferendumInfo { /// Referendum is happening, the arg is the block number at which it will end. Ongoing(ReferendumStatus), diff --git a/frame/democracy/src/vote.rs b/frame/democracy/src/vote.rs index 7b1b32ea37f58..03ca020ca0949 100644 --- a/frame/democracy/src/vote.rs +++ b/frame/democracy/src/vote.rs @@ -19,6 +19,7 @@ use crate::{Conviction, Delegations, ReferendumIndex}; use codec::{Decode, Encode, EncodeLike, Input, Output}; +use scale_info::TypeInfo; use sp_runtime::{ traits::{Saturating, Zero}, RuntimeDebug, @@ -51,8 +52,21 @@ impl Decode for Vote { } } +impl TypeInfo for Vote { + type Identity = Self; + + fn type_info() -> scale_info::Type { + scale_info::Type::builder() + .path(scale_info::Path::new("Vote", module_path!())) + .composite( + scale_info::build::Fields::unnamed() + .field(|f| f.ty::().docs(&["Raw vote byte, encodes aye + conviction"])), + ) + } +} + /// A vote for a referendum of a particular account. -#[derive(Encode, Decode, Copy, Clone, Eq, PartialEq, RuntimeDebug)] +#[derive(Encode, Decode, Copy, Clone, Eq, PartialEq, RuntimeDebug, TypeInfo)] pub enum AccountVote { /// A standard vote, one-way (approve or reject) with a given amount of conviction. Standard { vote: Vote, balance: Balance }, @@ -92,7 +106,9 @@ impl AccountVote { } /// A "prior" lock, i.e. a lock for some now-forgotten reason. -#[derive(Encode, Decode, Default, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, RuntimeDebug)] +#[derive( + Encode, Decode, Default, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, RuntimeDebug, TypeInfo, +)] pub struct PriorLock(BlockNumber, Balance); impl PriorLock { @@ -115,7 +131,7 @@ impl PriorLock { /// The account is voting directly. `delegations` is the total amount of post-conviction voting /// weight that it controls from those that have delegated to it. diff --git a/frame/democracy/src/vote_threshold.rs b/frame/democracy/src/vote_threshold.rs index feaa596921c41..ad8bce290ed4f 100644 --- a/frame/democracy/src/vote_threshold.rs +++ b/frame/democracy/src/vote_threshold.rs @@ -19,13 +19,14 @@ use crate::Tally; use codec::{Decode, Encode}; +use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; use sp_runtime::traits::{IntegerSquareRoot, Zero}; use sp_std::ops::{Add, Div, Mul, Rem}; /// A means of determining if a vote is past pass threshold. -#[derive(Clone, Copy, PartialEq, Eq, Encode, Decode, sp_runtime::RuntimeDebug)] +#[derive(Clone, Copy, PartialEq, Eq, Encode, Decode, sp_runtime::RuntimeDebug, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub enum VoteThreshold { /// A supermajority of approvals is needed to pass this vote. diff --git a/frame/election-provider-multi-phase/Cargo.toml b/frame/election-provider-multi-phase/Cargo.toml index 2dca7ed0a4f96..b2d50321e8cd3 100644 --- a/frame/election-provider-multi-phase/Cargo.toml +++ b/frame/election-provider-multi-phase/Cargo.toml @@ -17,6 +17,7 @@ static_assertions = "1.1.0" codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } log = { version = "0.4.14", default-features = false } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } @@ -55,6 +56,7 @@ frame-benchmarking = { version = "4.0.0-dev", path = "../benchmarking" } default = ["std"] std = [ "codec/std", + "scale-info/std", "log/std", "frame-support/std", diff --git a/frame/election-provider-multi-phase/src/benchmarking.rs b/frame/election-provider-multi-phase/src/benchmarking.rs index bca9c359d47f7..fb5adda52e166 100644 --- a/frame/election-provider-multi-phase/src/benchmarking.rs +++ b/frame/election-provider-multi-phase/src/benchmarking.rs @@ -353,7 +353,10 @@ frame_benchmarking::benchmarks! { // encode the most significant storage item that needs to be decoded in the dispatch. let encoded_snapshot = >::snapshot().ok_or("missing snapshot")?.encode(); - let encoded_call = >::submit_unsigned(Box::new(raw_solution.clone()), witness).encode(); + let encoded_call = Call::::submit_unsigned { + raw_solution: Box::new(raw_solution.clone()), + witness + }.encode(); }: { assert_ok!( >::submit_unsigned( diff --git a/frame/election-provider-multi-phase/src/lib.rs b/frame/election-provider-multi-phase/src/lib.rs index a3b6083914ca5..269057b55b094 100644 --- a/frame/election-provider-multi-phase/src/lib.rs +++ b/frame/election-provider-multi-phase/src/lib.rs @@ -217,6 +217,7 @@ use frame_support::{ weights::{DispatchClass, Weight}, }; use frame_system::{ensure_none, offchain::SendTransactionTypes}; +use scale_info::TypeInfo; use sp_arithmetic::{ traits::{CheckedAdd, Saturating, Zero}, UpperOf, @@ -311,7 +312,7 @@ impl ElectionProvider for NoFallback } /// Current phase of the pallet. -#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, Debug)] +#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, Debug, TypeInfo)] pub enum Phase { /// Nothing, the election is not happening. Off, @@ -373,7 +374,7 @@ impl Phase { } /// The type of `Computation` that provided this election data. -#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, Debug)] +#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, Debug, TypeInfo)] pub enum ElectionCompute { /// Election was computed on-chain. OnChain, @@ -399,7 +400,7 @@ impl Default for ElectionCompute { /// /// Such a solution should never become effective in anyway before being checked by the /// `Pallet::feasibility_check`. -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, PartialOrd, Ord)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, PartialOrd, Ord, TypeInfo)] pub struct RawSolution { /// the solution itself. pub solution: S, @@ -417,7 +418,7 @@ impl Default for RawSolution { } /// A checked solution, ready to be enacted. -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, Default)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, Default, TypeInfo)] pub struct ReadySolution { /// The final supports of the solution. /// @@ -436,7 +437,7 @@ pub struct ReadySolution { /// [`ElectionDataProvider`] and are kept around until the round is finished. /// /// These are stored together because they are often accessed together. -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, Default)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, Default, TypeInfo)] pub struct RoundSnapshot { /// All of the voters. pub voters: Vec<(A, VoteWeight, Vec)>, @@ -449,7 +450,7 @@ pub struct RoundSnapshot { /// This is stored automatically on-chain, and it contains the **size of the entire snapshot**. /// This is also used in dispatchables as weight witness data and should **only contain the size of /// the presented solution**, not the entire snapshot. -#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, Debug, Default)] +#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, Debug, Default, TypeInfo)] pub struct SolutionOrSnapshotSize { /// The length of voters. #[codec(compact)] @@ -643,7 +644,8 @@ pub mod pallet { + Clone + sp_std::fmt::Debug + Ord - + NposSolution; + + NposSolution + + TypeInfo; /// Configuration for the fallback type Fallback: ElectionProvider< @@ -949,7 +951,8 @@ pub mod pallet { // create the submission let deposit = Self::deposit_for(&raw_solution, size); let reward = { - let call = Call::submit(raw_solution.clone(), num_signed_submissions); + let call = + Call::submit { raw_solution: raw_solution.clone(), num_signed_submissions }; let call_fee = T::EstimateCallFee::estimate_call_fee(&call, None.into()); T::SignedRewardBase::get().saturating_add(call_fee) }; @@ -985,10 +988,6 @@ pub mod pallet { } #[pallet::event] - #[pallet::metadata( - ::AccountId = "AccountId", - BalanceOf = "Balance" - )] #[pallet::generate_deposit(pub(super) fn deposit_event)] pub enum Event { /// A solution was stored with the given compute. @@ -1042,14 +1041,14 @@ pub mod pallet { impl ValidateUnsigned for Pallet { type Call = Call; fn validate_unsigned(source: TransactionSource, call: &Self::Call) -> TransactionValidity { - if let Call::submit_unsigned(solution, _) = call { + if let Call::submit_unsigned { raw_solution, .. } = call { // Discard solution not coming from the local OCW. match source { TransactionSource::Local | TransactionSource::InBlock => { /* allowed */ }, _ => return InvalidTransaction::Call.into(), } - let _ = Self::unsigned_pre_dispatch_checks(solution) + let _ = Self::unsigned_pre_dispatch_checks(raw_solution) .map_err(|err| { log!(debug, "unsigned transaction validation failed due to {:?}", err); err @@ -1060,11 +1059,11 @@ pub mod pallet { // The higher the score[0], the better a solution is. .priority( T::MinerTxPriority::get() - .saturating_add(solution.score[0].saturated_into()), + .saturating_add(raw_solution.score[0].saturated_into()), ) // Used to deduplicate unsigned solutions: each validator should produce one // solution per round at most, and solutions are not propagate. - .and_provides(solution.round) + .and_provides(raw_solution.round) // Transaction should stay in the pool for the duration of the unsigned phase. .longevity(T::UnsignedPhase::get().saturated_into::()) // We don't propagate this. This can never be validated at a remote node. @@ -1076,8 +1075,8 @@ pub mod pallet { } fn pre_dispatch(call: &Self::Call) -> Result<(), TransactionValidityError> { - if let Call::submit_unsigned(solution, _) = call { - Self::unsigned_pre_dispatch_checks(solution) + if let Call::submit_unsigned { raw_solution, .. } = call { + Self::unsigned_pre_dispatch_checks(raw_solution) .map_err(dispatch_error_to_invalid) .map_err(Into::into) } else { diff --git a/frame/election-provider-multi-phase/src/signed.rs b/frame/election-provider-multi-phase/src/signed.rs index 72aa3e6680343..61215059c53a6 100644 --- a/frame/election-provider-multi-phase/src/signed.rs +++ b/frame/election-provider-multi-phase/src/signed.rs @@ -42,7 +42,7 @@ use sp_std::{ /// A raw, unchecked signed submission. /// /// This is just a wrapper around [`RawSolution`] and some additional info. -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, Default)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, Default, scale_info::TypeInfo)] pub struct SignedSubmission { /// Who submitted this solution. pub who: AccountId, diff --git a/frame/election-provider-multi-phase/src/unsigned.rs b/frame/election-provider-multi-phase/src/unsigned.rs index 0afb6eee16121..af0b79177d86c 100644 --- a/frame/election-provider-multi-phase/src/unsigned.rs +++ b/frame/election-provider-multi-phase/src/unsigned.rs @@ -160,9 +160,9 @@ impl Pallet { let call = restore_solution::() .and_then(|call| { // ensure the cached call is still current before submitting - if let Call::submit_unsigned(solution, _) = &call { + if let Call::submit_unsigned { raw_solution, .. } = &call { // prevent errors arising from state changes in a forkful chain - Self::basic_checks(solution, "restored")?; + Self::basic_checks(raw_solution, "restored")?; Ok(call) } else { Err(MinerError::SolutionCallInvalid) @@ -213,7 +213,8 @@ impl Pallet { let (raw_solution, witness) = Self::mine_and_check()?; let score = raw_solution.score.clone(); - let call: Call = Call::submit_unsigned(Box::new(raw_solution), witness).into(); + let call: Call = + Call::submit_unsigned { raw_solution: Box::new(raw_solution), witness }.into(); log!( debug, @@ -763,7 +764,10 @@ mod tests { ExtBuilder::default().desired_targets(0).build_and_execute(|| { let solution = RawSolution:: { score: [5, 0, 0], ..Default::default() }; - let call = Call::submit_unsigned(Box::new(solution.clone()), witness()); + let call = Call::submit_unsigned { + raw_solution: Box::new(solution.clone()), + witness: witness(), + }; // initial assert_eq!(MultiPhase::current_phase(), Phase::Off); @@ -833,7 +837,10 @@ mod tests { let solution = RawSolution:: { score: [5, 0, 0], ..Default::default() }; - let call = Call::submit_unsigned(Box::new(solution.clone()), witness()); + let call = Call::submit_unsigned { + raw_solution: Box::new(solution.clone()), + witness: witness(), + }; // initial assert!(::validate_unsigned( @@ -870,7 +877,8 @@ mod tests { assert!(MultiPhase::current_phase().is_unsigned()); let raw = RawSolution:: { score: [5, 0, 0], ..Default::default() }; - let call = Call::submit_unsigned(Box::new(raw.clone()), witness()); + let call = + Call::submit_unsigned { raw_solution: Box::new(raw.clone()), witness: witness() }; assert_eq!(raw.solution.unique_targets().len(), 0); // won't work anymore. @@ -896,7 +904,10 @@ mod tests { let solution = RawSolution:: { score: [5, 0, 0], ..Default::default() }; - let call = Call::submit_unsigned(Box::new(solution.clone()), witness()); + let call = Call::submit_unsigned { + raw_solution: Box::new(solution.clone()), + witness: witness(), + }; assert_eq!( ::validate_unsigned( @@ -923,7 +934,10 @@ mod tests { // This is in itself an invalid BS solution. let solution = RawSolution:: { score: [5, 0, 0], ..Default::default() }; - let call = Call::submit_unsigned(Box::new(solution.clone()), witness()); + let call = Call::submit_unsigned { + raw_solution: Box::new(solution.clone()), + witness: witness(), + }; let outer_call: OuterCall = call.into(); let _ = outer_call.dispatch(Origin::none()); }) @@ -944,7 +958,10 @@ mod tests { let mut correct_witness = witness(); correct_witness.voters += 1; correct_witness.targets -= 1; - let call = Call::submit_unsigned(Box::new(solution.clone()), correct_witness); + let call = Call::submit_unsigned { + raw_solution: Box::new(solution.clone()), + witness: correct_witness, + }; let outer_call: OuterCall = call.into(); let _ = outer_call.dispatch(Origin::none()); }) @@ -1350,7 +1367,7 @@ mod tests { let encoded = pool.read().transactions[0].clone(); let extrinsic: Extrinsic = codec::Decode::decode(&mut &*encoded).unwrap(); let call = extrinsic.call; - assert!(matches!(call, OuterCall::MultiPhase(Call::submit_unsigned(..)))); + assert!(matches!(call, OuterCall::MultiPhase(Call::submit_unsigned { .. }))); }) } @@ -1367,7 +1384,7 @@ mod tests { let encoded = pool.read().transactions[0].clone(); let extrinsic = Extrinsic::decode(&mut &*encoded).unwrap(); let call = match extrinsic.call { - OuterCall::MultiPhase(call @ Call::submit_unsigned(..)) => call, + OuterCall::MultiPhase(call @ Call::submit_unsigned { .. }) => call, _ => panic!("bad call: unexpected submission"), }; diff --git a/frame/election-provider-support/Cargo.toml b/frame/election-provider-support/Cargo.toml index d713b98fcefa1..dfe2b11024334 100644 --- a/frame/election-provider-support/Cargo.toml +++ b/frame/election-provider-support/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../../primitives/arithmetic" } sp-npos-elections = { version = "4.0.0-dev", default-features = false, path = "../../primitives/npos-elections" } diff --git a/frame/elections-phragmen/Cargo.toml b/frame/elections-phragmen/Cargo.toml index 930c9c2c80832..f2771a9f72783 100644 --- a/frame/elections-phragmen/Cargo.toml +++ b/frame/elections-phragmen/Cargo.toml @@ -16,6 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } sp-npos-elections = { version = "4.0.0-dev", default-features = false, path = "../../primitives/npos-elections" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } @@ -35,6 +36,7 @@ substrate-test-utils = { version = "4.0.0-dev", path = "../../test-utils" } default = ["std"] std = [ "codec/std", + "scale-info/std", "frame-support/std", "sp-runtime/std", "sp-npos-elections/std", diff --git a/frame/elections-phragmen/src/lib.rs b/frame/elections-phragmen/src/lib.rs index 8e0a31377f982..d7b42383da757 100644 --- a/frame/elections-phragmen/src/lib.rs +++ b/frame/elections-phragmen/src/lib.rs @@ -108,6 +108,7 @@ use frame_support::{ }, weights::Weight, }; +use scale_info::TypeInfo; use sp_npos_elections::{ElectionResult, ExtendedBalance}; use sp_runtime::{ traits::{Saturating, StaticLookup, Zero}, @@ -135,7 +136,7 @@ type NegativeImbalanceOf = <::Currency as Currency< >>::NegativeImbalance; /// An indication that the renouncing account currently has which of the below roles. -#[derive(Encode, Decode, Clone, PartialEq, RuntimeDebug)] +#[derive(Encode, Decode, Clone, PartialEq, RuntimeDebug, TypeInfo)] pub enum Renouncing { /// A member is renouncing. Member, @@ -146,7 +147,7 @@ pub enum Renouncing { } /// An active voter. -#[derive(Encode, Decode, Clone, Default, RuntimeDebug, PartialEq)] +#[derive(Encode, Decode, Clone, Default, RuntimeDebug, PartialEq, TypeInfo)] pub struct Voter { /// The members being backed. pub votes: Vec, @@ -159,7 +160,7 @@ pub struct Voter { } /// A holder of a seat as either a member or a runner-up. -#[derive(Encode, Decode, Clone, Default, RuntimeDebug, PartialEq)] +#[derive(Encode, Decode, Clone, Default, RuntimeDebug, PartialEq, TypeInfo)] pub struct SeatHolder { /// The holder. pub who: AccountId, @@ -531,11 +532,6 @@ pub mod pallet { } #[pallet::event] - #[pallet::metadata( - ::AccountId = "AccountId", - BalanceOf = "Balance", - Vec<(::AccountId, BalanceOf)> = "Vec<(AccountId, Balance)>", - )] #[pallet::generate_deposit(pub(super) fn deposit_event)] pub enum Event { /// A new term with \[new_members\]. This indicates that enough candidates existed to run diff --git a/frame/elections/Cargo.toml b/frame/elections/Cargo.toml index 29bc8f8a5cea4..8557cfba6b58c 100644 --- a/frame/elections/Cargo.toml +++ b/frame/elections/Cargo.toml @@ -16,6 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } @@ -30,6 +31,7 @@ pallet-balances = { version = "4.0.0-dev", path = "../balances" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-core/std", "sp-std/std", "sp-io/std", diff --git a/frame/elections/src/lib.rs b/frame/elections/src/lib.rs index 5057a6e00f565..ac13bce31b0f6 100644 --- a/frame/elections/src/lib.rs +++ b/frame/elections/src/lib.rs @@ -111,7 +111,9 @@ mod tests; // entries before they increase the capacity. /// The activity status of a voter. -#[derive(PartialEq, Eq, Copy, Clone, Encode, Decode, Default, RuntimeDebug)] +#[derive( + PartialEq, Eq, Copy, Clone, Encode, Decode, Default, RuntimeDebug, scale_info::TypeInfo, +)] pub struct VoterInfo { /// Last VoteIndex in which this voter assigned (or initialized) approvals. last_active: VoteIndex, @@ -462,7 +464,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::AccountId = "AccountId", Vec = "Vec")] pub enum Event { /// Reaped \[voter, reaper\]. VoterReaped(T::AccountId, T::AccountId), diff --git a/frame/example-offchain-worker/Cargo.toml b/frame/example-offchain-worker/Cargo.toml index 69a5621748622..1ccd9f33f0318 100644 --- a/frame/example-offchain-worker/Cargo.toml +++ b/frame/example-offchain-worker/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } @@ -28,6 +29,7 @@ log = { version = "0.4.14", default-features = false } default = ["std"] std = [ "codec/std", + "scale-info/std", "frame-support/std", "frame-system/std", "lite-json/std", diff --git a/frame/example-offchain-worker/src/lib.rs b/frame/example-offchain-worker/src/lib.rs index 9e043c8bfb250..644e1ca299a3c 100644 --- a/frame/example-offchain-worker/src/lib.rs +++ b/frame/example-offchain-worker/src/lib.rs @@ -294,8 +294,10 @@ pub mod pallet { /// are being whitelisted and marked as valid. fn validate_unsigned(_source: TransactionSource, call: &Self::Call) -> TransactionValidity { // Firstly let's check that we call the right function. - if let Call::submit_price_unsigned_with_signed_payload(ref payload, ref signature) = - call + if let Call::submit_price_unsigned_with_signed_payload { + price_payload: ref payload, + ref signature, + } = call { let signature_valid = SignedPayload::::verify::(payload, signature.clone()); @@ -303,7 +305,7 @@ pub mod pallet { return InvalidTransaction::BadProof.into() } Self::validate_transaction_parameters(&payload.block_number, &payload.price) - } else if let Call::submit_price_unsigned(block_number, new_price) = call { + } else if let Call::submit_price_unsigned { block_number, price: new_price } = call { Self::validate_transaction_parameters(block_number, new_price) } else { InvalidTransaction::Call.into() @@ -330,7 +332,7 @@ pub mod pallet { /// Payload used by this example crate to hold price /// data required to submit a transaction. -#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] pub struct PricePayload { block_number: BlockNumber, price: u32, @@ -443,7 +445,7 @@ impl Pallet { // Received price is wrapped into a call to `submit_price` public function of this // pallet. This means that the transaction, when executed, will simply call that // function passing `price` as an argument. - Call::submit_price(price) + Call::submit_price { price } }); for (acc, res) in &results { @@ -472,7 +474,7 @@ impl Pallet { // Received price is wrapped into a call to `submit_price_unsigned` public function of this // pallet. This means that the transaction, when executed, will simply call that function // passing `price` as an argument. - let call = Call::submit_price_unsigned(block_number, price); + let call = Call::submit_price_unsigned { block_number, price }; // Now let's create a transaction out of this call and submit it to the pool. // Here we showcase two ways to send an unsigned transaction / unsigned payload (raw) @@ -507,8 +509,9 @@ impl Pallet { let (_, result) = Signer::::any_account() .send_unsigned_transaction( |account| PricePayload { price, block_number, public: account.public.clone() }, - |payload, signature| { - Call::submit_price_unsigned_with_signed_payload(payload, signature) + |payload, signature| Call::submit_price_unsigned_with_signed_payload { + price_payload: payload, + signature, }, ) .ok_or("No local accounts accounts available.")?; @@ -536,8 +539,9 @@ impl Pallet { let transaction_results = Signer::::all_accounts() .send_unsigned_transaction( |account| PricePayload { price, block_number, public: account.public.clone() }, - |payload, signature| { - Call::submit_price_unsigned_with_signed_payload(payload, signature) + |payload, signature| Call::submit_price_unsigned_with_signed_payload { + price_payload: payload, + signature, }, ); for (_account_id, result) in transaction_results.into_iter() { diff --git a/frame/example-offchain-worker/src/tests.rs b/frame/example-offchain-worker/src/tests.rs index d0a3664abf4ac..1dde8a1df60c8 100644 --- a/frame/example-offchain-worker/src/tests.rs +++ b/frame/example-offchain-worker/src/tests.rs @@ -227,7 +227,7 @@ fn should_submit_signed_transaction_on_chain() { assert!(pool_state.read().transactions.is_empty()); let tx = Extrinsic::decode(&mut &*tx).unwrap(); assert_eq!(tx.signature.unwrap().0, 0); - assert_eq!(tx.call, Call::Example(crate::Call::submit_price(15523))); + assert_eq!(tx.call, Call::Example(crate::Call::submit_price { price: 15523 })); }); } @@ -273,10 +273,10 @@ fn should_submit_unsigned_transaction_on_chain_for_any_account() { let tx = pool_state.write().transactions.pop().unwrap(); let tx = Extrinsic::decode(&mut &*tx).unwrap(); assert_eq!(tx.signature, None); - if let Call::Example(crate::Call::submit_price_unsigned_with_signed_payload( - body, + if let Call::Example(crate::Call::submit_price_unsigned_with_signed_payload { + price_payload: body, signature, - )) = tx.call + }) = tx.call { assert_eq!(body, price_payload); @@ -333,10 +333,10 @@ fn should_submit_unsigned_transaction_on_chain_for_all_accounts() { let tx = pool_state.write().transactions.pop().unwrap(); let tx = Extrinsic::decode(&mut &*tx).unwrap(); assert_eq!(tx.signature, None); - if let Call::Example(crate::Call::submit_price_unsigned_with_signed_payload( - body, + if let Call::Example(crate::Call::submit_price_unsigned_with_signed_payload { + price_payload: body, signature, - )) = tx.call + }) = tx.call { assert_eq!(body, price_payload); @@ -373,7 +373,10 @@ fn should_submit_raw_unsigned_transaction_on_chain() { assert!(pool_state.read().transactions.is_empty()); let tx = Extrinsic::decode(&mut &*tx).unwrap(); assert_eq!(tx.signature, None); - assert_eq!(tx.call, Call::Example(crate::Call::submit_price_unsigned(1, 15523))); + assert_eq!( + tx.call, + Call::Example(crate::Call::submit_price_unsigned { block_number: 1, price: 15523 }) + ); }); } diff --git a/frame/example-parallel/Cargo.toml b/frame/example-parallel/Cargo.toml index 4d14d635201ff..5e0f6d4bc255a 100644 --- a/frame/example-parallel/Cargo.toml +++ b/frame/example-parallel/Cargo.toml @@ -13,6 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } @@ -25,6 +26,7 @@ sp-tasks = { version = "4.0.0-dev", default-features = false, path = "../../prim default = ["std"] std = [ "codec/std", + "scale-info/std", "frame-support/std", "frame-system/std", "sp-core/std", diff --git a/frame/example-parallel/src/lib.rs b/frame/example-parallel/src/lib.rs index c41cd2401dd25..c86cac4295684 100644 --- a/frame/example-parallel/src/lib.rs +++ b/frame/example-parallel/src/lib.rs @@ -95,7 +95,7 @@ pub mod pallet { } /// Request to enlist participant. -#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] pub struct EnlistedParticipant { pub account: Vec, pub signature: Vec, diff --git a/frame/example/Cargo.toml b/frame/example/Cargo.toml index 08d7af4ef673f..58daaf1c75558 100644 --- a/frame/example/Cargo.toml +++ b/frame/example/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } pallet-balances = { version = "4.0.0-dev", default-features = false, path = "../balances" } @@ -35,6 +36,7 @@ std = [ "frame-system/std", "log/std", "pallet-balances/std", + "scale-info/std", "sp-io/std", "sp-runtime/std", "sp-std/std" diff --git a/frame/example/src/lib.rs b/frame/example/src/lib.rs index f588e11a6164d..3f56b57dac8ca 100644 --- a/frame/example/src/lib.rs +++ b/frame/example/src/lib.rs @@ -277,6 +277,7 @@ use frame_support::{ }; use frame_system::ensure_signed; use log::info; +use scale_info::TypeInfo; use sp_runtime::{ traits::{Bounded, DispatchInfoOf, SaturatedConversion, Saturating, SignedExtension}, transaction_validity::{ @@ -688,12 +689,13 @@ impl Pallet { // types defined in the runtime. Lookup `pub type SignedExtra = (...)` in `node/runtime` and // `node-template` for an example of this. -// A simple signed extension that checks for the `set_dummy` call. In that case, it increases the -// priority and prints some log. -// -// Additionally, it drops any transaction with an encoded length higher than 200 bytes. No -// particular reason why, just to demonstrate the power of signed extensions. -#[derive(Encode, Decode, Clone, Eq, PartialEq)] +/// A simple signed extension that checks for the `set_dummy` call. In that case, it increases the +/// priority and prints some log. +/// +/// Additionally, it drops any transaction with an encoded length higher than 200 bytes. No +/// particular reason why, just to demonstrate the power of signed extensions. +#[derive(Encode, Decode, Clone, Eq, PartialEq, TypeInfo)] +#[scale_info(skip_type_params(T))] pub struct WatchDummy(PhantomData); impl sp_std::fmt::Debug for WatchDummy { @@ -730,7 +732,7 @@ where // check for `set_dummy` match call.is_sub_type() { - Some(Call::set_dummy(..)) => { + Some(Call::set_dummy { .. }) => { sp_runtime::print("set_dummy was received."); let mut valid_tx = ValidTransaction::default(); diff --git a/frame/example/src/tests.rs b/frame/example/src/tests.rs index 645b5c9bc13ab..87c2404f5b100 100644 --- a/frame/example/src/tests.rs +++ b/frame/example/src/tests.rs @@ -163,7 +163,7 @@ fn set_dummy_works() { #[test] fn signed_ext_watch_dummy_works() { new_test_ext().execute_with(|| { - let call = >::set_dummy(10).into(); + let call = pallet_example::Call::set_dummy { new_value: 10 }.into(); let info = DispatchInfo::default(); assert_eq!( @@ -183,14 +183,14 @@ fn signed_ext_watch_dummy_works() { #[test] fn weights_work() { // must have a defined weight. - let default_call = >::accumulate_dummy(10); + let default_call = pallet_example::Call::::accumulate_dummy { increase_by: 10 }; let info1 = default_call.get_dispatch_info(); // aka. `let info = as GetDispatchInfo>::get_dispatch_info(&default_call);` assert!(info1.weight > 0); // `set_dummy` is simpler than `accumulate_dummy`, and the weight // should be less. - let custom_call = >::set_dummy(20); + let custom_call = pallet_example::Call::::set_dummy { new_value: 20 }; let info2 = custom_call.get_dispatch_info(); assert!(info1.weight > info2.weight); } diff --git a/frame/executive/Cargo.toml b/frame/executive/Cargo.toml index a809da6f1cd35..1abbf50e6a4c4 100644 --- a/frame/executive/Cargo.toml +++ b/frame/executive/Cargo.toml @@ -16,6 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } @@ -38,6 +39,7 @@ default = ["std"] with-tracing = ["sp-tracing/with-tracing"] std = [ "codec/std", + "scale-info/std", "frame-support/std", "frame-system/std", "sp-core/std", diff --git a/frame/executive/src/lib.rs b/frame/executive/src/lib.rs index 5f1ae23c2f531..655a38fe1b540 100644 --- a/frame/executive/src/lib.rs +++ b/frame/executive/src/lib.rs @@ -645,7 +645,7 @@ mod tests { None } fn is_inherent(call: &Self::Call) -> bool { - *call == Call::::inherent_call() + *call == Call::::inherent_call {} } } @@ -658,7 +658,7 @@ mod tests { call: &Self::Call, ) -> TransactionValidity { match call { - Call::allowed_unsigned(..) => Ok(Default::default()), + Call::allowed_unsigned { .. } => Ok(Default::default()), _ => UnknownTransaction::NoUnsignedValidator.into(), } } @@ -666,8 +666,8 @@ mod tests { // Inherent call is accepted for being dispatched fn pre_dispatch(call: &Self::Call) -> Result<(), TransactionValidityError> { match call { - Call::allowed_unsigned(..) => Ok(()), - Call::inherent_call(..) => Ok(()), + Call::allowed_unsigned { .. } => Ok(()), + Call::inherent_call { .. } => Ok(()), _ => Err(UnknownTransaction::NoUnsignedValidator.into()), } } @@ -809,13 +809,17 @@ mod tests { Some((who, extra(nonce, fee))) } + fn call_transfer(dest: u64, value: u64) -> Call { + Call::Balances(BalancesCall::transfer { dest, value }) + } + #[test] fn balance_transfer_dispatch_works() { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); pallet_balances::GenesisConfig:: { balances: vec![(1, 211)] } .assimilate_storage(&mut t) .unwrap(); - let xt = TestXt::new(Call::Balances(BalancesCall::transfer(2, 69)), sign_extra(1, 0, 0)); + let xt = TestXt::new(call_transfer(2, 69), sign_extra(1, 0, 0)); let weight = xt.get_dispatch_info().weight + ::BlockWeights::get() .get(DispatchClass::Normal) @@ -912,7 +916,7 @@ mod tests { fn bad_extrinsic_not_inserted() { let mut t = new_test_ext(1); // bad nonce check! - let xt = TestXt::new(Call::Balances(BalancesCall::transfer(33, 69)), sign_extra(1, 30, 0)); + let xt = TestXt::new(call_transfer(33, 69), sign_extra(1, 30, 0)); t.execute_with(|| { Executive::initialize_block(&Header::new( 1, @@ -933,7 +937,10 @@ mod tests { fn block_weight_limit_enforced() { let mut t = new_test_ext(10000); // given: TestXt uses the encoded len as fixed Len: - let xt = TestXt::new(Call::Balances(BalancesCall::transfer(33, 0)), sign_extra(1, 0, 0)); + let xt = TestXt::new( + Call::Balances(BalancesCall::transfer { dest: 33, value: 0 }), + sign_extra(1, 0, 0), + ); let encoded = xt.encode(); let encoded_len = encoded.len() as Weight; // on_initialize weight + base block execution weight @@ -954,7 +961,7 @@ mod tests { for nonce in 0..=num_to_exhaust_block { let xt = TestXt::new( - Call::Balances(BalancesCall::transfer(33, 0)), + Call::Balances(BalancesCall::transfer { dest: 33, value: 0 }), sign_extra(1, nonce.into(), 0), ); let res = Executive::apply_extrinsic(xt); @@ -978,9 +985,18 @@ mod tests { #[test] fn block_weight_and_size_is_stored_per_tx() { - let xt = TestXt::new(Call::Balances(BalancesCall::transfer(33, 0)), sign_extra(1, 0, 0)); - let x1 = TestXt::new(Call::Balances(BalancesCall::transfer(33, 0)), sign_extra(1, 1, 0)); - let x2 = TestXt::new(Call::Balances(BalancesCall::transfer(33, 0)), sign_extra(1, 2, 0)); + let xt = TestXt::new( + Call::Balances(BalancesCall::transfer { dest: 33, value: 0 }), + sign_extra(1, 0, 0), + ); + let x1 = TestXt::new( + Call::Balances(BalancesCall::transfer { dest: 33, value: 0 }), + sign_extra(1, 1, 0), + ); + let x2 = TestXt::new( + Call::Balances(BalancesCall::transfer { dest: 33, value: 0 }), + sign_extra(1, 2, 0), + ); let len = xt.clone().encode().len() as u32; let mut t = new_test_ext(1); t.execute_with(|| { @@ -1034,8 +1050,8 @@ mod tests { #[test] fn validate_unsigned() { - let valid = TestXt::new(Call::Custom(custom::Call::allowed_unsigned()), None); - let invalid = TestXt::new(Call::Custom(custom::Call::unallowed_unsigned()), None); + let valid = TestXt::new(Call::Custom(custom::Call::allowed_unsigned {}), None); + let invalid = TestXt::new(Call::Custom(custom::Call::unallowed_unsigned {}), None); let mut t = new_test_ext(1); let mut default_with_prio_3 = ValidTransaction::default(); @@ -1074,8 +1090,10 @@ mod tests { as LockableCurrency>::set_lock( id, &1, 110, lock, ); - let xt = - TestXt::new(Call::System(SystemCall::remark(vec![1u8])), sign_extra(1, 0, 0)); + let xt = TestXt::new( + Call::System(SystemCall::remark { remark: vec![1u8] }), + sign_extra(1, 0, 0), + ); let weight = xt.get_dispatch_info().weight + ::BlockWeights::get() .get(DispatchClass::Normal) @@ -1222,7 +1240,10 @@ mod tests { /// used through the `ExecuteBlock` trait. #[test] fn custom_runtime_upgrade_is_called_when_using_execute_block_trait() { - let xt = TestXt::new(Call::Balances(BalancesCall::transfer(33, 0)), sign_extra(1, 0, 0)); + let xt = TestXt::new( + Call::Balances(BalancesCall::transfer { dest: 33, value: 0 }), + sign_extra(1, 0, 0), + ); let header = new_test_ext(1).execute_with(|| { // Make sure `on_runtime_upgrade` is called. @@ -1326,7 +1347,7 @@ mod tests { #[test] fn calculating_storage_root_twice_works() { - let call = Call::Custom(custom::Call::calculate_storage_root()); + let call = Call::Custom(custom::Call::calculate_storage_root {}); let xt = TestXt::new(call, sign_extra(1, 0, 0)); let header = new_test_ext(1).execute_with(|| { @@ -1352,8 +1373,11 @@ mod tests { #[test] #[should_panic(expected = "Invalid inherent position for extrinsic at index 1")] fn invalid_inherent_position_fail() { - let xt1 = TestXt::new(Call::Balances(BalancesCall::transfer(33, 0)), sign_extra(1, 0, 0)); - let xt2 = TestXt::new(Call::Custom(custom::Call::inherent_call()), None); + let xt1 = TestXt::new( + Call::Balances(BalancesCall::transfer { dest: 33, value: 0 }), + sign_extra(1, 0, 0), + ); + let xt2 = TestXt::new(Call::Custom(custom::Call::inherent_call {}), None); let header = new_test_ext(1).execute_with(|| { // Let's build some fake block. @@ -1378,8 +1402,8 @@ mod tests { #[test] fn valid_inherents_position_works() { - let xt1 = TestXt::new(Call::Custom(custom::Call::inherent_call()), None); - let xt2 = TestXt::new(Call::Balances(BalancesCall::transfer(33, 0)), sign_extra(1, 0, 0)); + let xt1 = TestXt::new(Call::Custom(custom::Call::inherent_call {}), None); + let xt2 = TestXt::new(call_transfer(33, 0), sign_extra(1, 0, 0)); let header = new_test_ext(1).execute_with(|| { // Let's build some fake block. diff --git a/frame/gilt/Cargo.toml b/frame/gilt/Cargo.toml index a6e59a2dcd65f..c275b693d8f27 100644 --- a/frame/gilt/Cargo.toml +++ b/frame/gilt/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../../primitives/arithmetic" } @@ -30,6 +31,7 @@ pallet-balances = { version = "4.0.0-dev", path = "../balances" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-std/std", "sp-runtime/std", "sp-arithmetic/std", diff --git a/frame/gilt/src/benchmarking.rs b/frame/gilt/src/benchmarking.rs index befa373e6e7fa..55d34a35a7ce4 100644 --- a/frame/gilt/src/benchmarking.rs +++ b/frame/gilt/src/benchmarking.rs @@ -81,7 +81,7 @@ benchmarks! { } set_target { - let call = Call::::set_target(Default::default()); + let call = Call::::set_target { target: Default::default() }; let origin = T::AdminOrigin::successful_origin(); }: { call.dispatch_bypass_filter(origin)? } @@ -111,7 +111,7 @@ benchmarks! { Gilt::::place_bid(RawOrigin::Signed(caller.clone()).into(), T::MinFreeze::get(), 1)?; } - Call::::set_target(Perquintill::from_percent(100)) + Call::::set_target { target: Perquintill::from_percent(100) } .dispatch_bypass_filter(T::AdminOrigin::successful_origin())?; }: { Gilt::::pursue_target(b) } @@ -127,7 +127,7 @@ benchmarks! { Gilt::::place_bid(RawOrigin::Signed(caller.clone()).into(), T::MinFreeze::get(), i + 1)?; } - Call::::set_target(Perquintill::from_percent(100)) + Call::::set_target { target: Perquintill::from_percent(100) } .dispatch_bypass_filter(T::AdminOrigin::successful_origin())?; }: { Gilt::::pursue_target(q) } diff --git a/frame/gilt/src/lib.rs b/frame/gilt/src/lib.rs index 7bfca872dc3f8..de114e4bb87de 100644 --- a/frame/gilt/src/lib.rs +++ b/frame/gilt/src/lib.rs @@ -82,6 +82,7 @@ pub mod pallet { traits::{Currency, OnUnbalanced, ReservableCurrency}, }; use frame_system::pallet_prelude::*; + use scale_info::TypeInfo; use sp_arithmetic::{PerThing, Perquintill}; use sp_runtime::traits::{Saturating, Zero}; use sp_std::prelude::*; @@ -111,7 +112,8 @@ pub mod pallet { + MaybeSerializeDeserialize + sp_std::fmt::Debug + Default - + From; + + From + + TypeInfo; /// Origin required for setting the target proportion to be under gilt. type AdminOrigin: EnsureOrigin; @@ -181,7 +183,7 @@ pub mod pallet { pub struct Pallet(_); /// A single bid on a gilt, an item of a *queue* in `Queues`. - #[derive(Clone, Eq, PartialEq, Default, Encode, Decode, RuntimeDebug)] + #[derive(Clone, Eq, PartialEq, Default, Encode, Decode, RuntimeDebug, TypeInfo)] pub struct GiltBid { /// The amount bid. pub amount: Balance, @@ -190,7 +192,7 @@ pub mod pallet { } /// Information representing an active gilt. - #[derive(Clone, Eq, PartialEq, Default, Encode, Decode, RuntimeDebug)] + #[derive(Clone, Eq, PartialEq, Default, Encode, Decode, RuntimeDebug, TypeInfo)] pub struct ActiveGilt { /// The proportion of the effective total issuance (i.e. accounting for any eventual gilt /// expansion or contraction that may eventually be claimed). @@ -214,7 +216,7 @@ pub mod pallet { /// `issuance - frozen + proportion * issuance` /// /// where `issuance = total_issuance - IgnoredIssuance` - #[derive(Clone, Eq, PartialEq, Default, Encode, Decode, RuntimeDebug)] + #[derive(Clone, Eq, PartialEq, Default, Encode, Decode, RuntimeDebug, TypeInfo)] pub struct ActiveGiltsTotal { /// The total amount of funds held in reserve for all active gilts. pub frozen: Balance, @@ -269,7 +271,6 @@ pub mod pallet { } #[pallet::event] - #[pallet::metadata(T::AccountId = "AccountId")] #[pallet::generate_deposit(pub(super) fn deposit_event)] pub enum Event { /// A bid was successfully placed. diff --git a/frame/grandpa/Cargo.toml b/frame/grandpa/Cargo.toml index 9e60ad0fb3c8a..53ab443783e5d 100644 --- a/frame/grandpa/Cargo.toml +++ b/frame/grandpa/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../primitives/application-crypto" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-finality-grandpa = { version = "4.0.0-dev", default-features = false, path = "../../primitives/finality-grandpa" } @@ -44,6 +45,7 @@ frame-election-provider-support = { version = "4.0.0-dev", path = "../election-p default = ["std"] std = [ "codec/std", + "scale-info/std", "frame-benchmarking/std", "sp-application-crypto/std", "sp-core/std", diff --git a/frame/grandpa/src/equivocation.rs b/frame/grandpa/src/equivocation.rs index 40d8535dabb60..8a23ce6e1ef1e 100644 --- a/frame/grandpa/src/equivocation.rs +++ b/frame/grandpa/src/equivocation.rs @@ -164,8 +164,10 @@ where ) -> DispatchResult { use frame_system::offchain::SubmitTransaction; - let call = - Call::report_equivocation_unsigned(Box::new(equivocation_proof), key_owner_proof); + let call = Call::report_equivocation_unsigned { + equivocation_proof: Box::new(equivocation_proof), + key_owner_proof, + }; match SubmitTransaction::>::submit_unsigned_transaction(call.into()) { Ok(()) => log::info!( @@ -203,7 +205,7 @@ pub struct GrandpaTimeSlot { /// unsigned equivocation reports. impl Pallet { pub fn validate_unsigned(source: TransactionSource, call: &Call) -> TransactionValidity { - if let Call::report_equivocation_unsigned(equivocation_proof, key_owner_proof) = call { + if let Call::report_equivocation_unsigned { equivocation_proof, key_owner_proof } = call { // discard equivocation report not coming from the local node match source { TransactionSource::Local | TransactionSource::InBlock => { /* allowed */ }, @@ -242,7 +244,7 @@ impl Pallet { } pub fn pre_dispatch(call: &Call) -> Result<(), TransactionValidityError> { - if let Call::report_equivocation_unsigned(equivocation_proof, key_owner_proof) = call { + if let Call::report_equivocation_unsigned { equivocation_proof, key_owner_proof } = call { is_known_offence::(equivocation_proof, key_owner_proof) } else { Err(InvalidTransaction::Call.into()) diff --git a/frame/grandpa/src/lib.rs b/frame/grandpa/src/lib.rs index 7cad0d477c9ee..cd75deea770b4 100644 --- a/frame/grandpa/src/lib.rs +++ b/frame/grandpa/src/lib.rs @@ -67,6 +67,8 @@ pub use equivocation::{ pub use pallet::*; +use scale_info::TypeInfo; + /// The current storage version. const STORAGE_VERSION: StorageVersion = StorageVersion::new(4); @@ -354,7 +356,7 @@ pub trait WeightInfo { } /// A stored pending change. -#[derive(Encode, Decode)] +#[derive(Encode, Decode, TypeInfo)] pub struct StoredPendingChange { /// The block number this was scheduled at. pub scheduled_at: N, @@ -370,7 +372,7 @@ pub struct StoredPendingChange { /// Current state of the GRANDPA authority set. State transitions must happen in /// the same order of states defined below, e.g. `Paused` implies a prior /// `PendingPause`. -#[derive(Decode, Encode)] +#[derive(Decode, Encode, TypeInfo)] #[cfg_attr(test, derive(Debug, PartialEq))] pub enum StoredState { /// The current authority set is live, and GRANDPA is enabled. diff --git a/frame/grandpa/src/tests.rs b/frame/grandpa/src/tests.rs index 034758e0a21b5..98f54f966fadc 100644 --- a/frame/grandpa/src/tests.rs +++ b/frame/grandpa/src/tests.rs @@ -681,10 +681,10 @@ fn report_equivocation_validate_unsigned_prevents_duplicates() { let key_owner_proof = Historical::prove((sp_finality_grandpa::KEY_TYPE, &equivocation_key)).unwrap(); - let call = Call::report_equivocation_unsigned( - Box::new(equivocation_proof.clone()), - key_owner_proof.clone(), - ); + let call = Call::report_equivocation_unsigned { + equivocation_proof: Box::new(equivocation_proof.clone()), + key_owner_proof: key_owner_proof.clone(), + }; // only local/inblock reports are allowed assert_eq!( @@ -843,10 +843,10 @@ fn valid_equivocation_reports_dont_pay_fees() { Historical::prove((sp_finality_grandpa::KEY_TYPE, &equivocation_key)).unwrap(); // check the dispatch info for the call. - let info = Call::::report_equivocation_unsigned( - Box::new(equivocation_proof.clone()), - key_owner_proof.clone(), - ) + let info = Call::::report_equivocation_unsigned { + equivocation_proof: Box::new(equivocation_proof.clone()), + key_owner_proof: key_owner_proof.clone(), + } .get_dispatch_info(); // it should have non-zero weight and the fee has to be paid. diff --git a/frame/identity/Cargo.toml b/frame/identity/Cargo.toml index 489a01b27da69..598be25c5ef38 100644 --- a/frame/identity/Cargo.toml +++ b/frame/identity/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } enumflags2 = { version = "0.6.2" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } @@ -30,6 +31,7 @@ pallet-balances = { version = "4.0.0-dev", path = "../balances" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-std/std", "sp-io/std", "sp-runtime/std", diff --git a/frame/identity/src/lib.rs b/frame/identity/src/lib.rs index b56df59e113c0..a91381f1edd8b 100644 --- a/frame/identity/src/lib.rs +++ b/frame/identity/src/lib.rs @@ -240,10 +240,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata( - T::AccountId = "AccountId", - BalanceOf = "Balance" - )] pub enum Event { /// A name was set or reset (which will remove all judgements). \[who\] IdentitySet(T::AccountId), diff --git a/frame/identity/src/types.rs b/frame/identity/src/types.rs index 86e461c644d10..ed6aeb18e96a1 100644 --- a/frame/identity/src/types.rs +++ b/frame/identity/src/types.rs @@ -22,6 +22,10 @@ use frame_support::{ traits::{ConstU32, Get}, BoundedVec, CloneNoBound, PartialEqNoBound, RuntimeDebugNoBound, }; +use scale_info::{ + build::{Fields, Variants}, + meta_type, Path, Type, TypeInfo, TypeParameter, +}; use sp_runtime::{traits::Zero, RuntimeDebug}; use sp_std::{fmt::Debug, iter::once, ops::Add, prelude::*}; @@ -89,6 +93,81 @@ impl Encode for Data { } impl codec::EncodeLike for Data {} +/// Add a Raw variant with the given index and a fixed sized byte array +macro_rules! data_raw_variants { + ($variants:ident, $(($index:literal, $size:literal)),* ) => { + $variants + $( + .variant(concat!("Raw", stringify!($size)), |v| v + .index($index) + .fields(Fields::unnamed().field(|f| f.ty::<[u8; $size]>())) + ) + )* + } +} + +impl TypeInfo for Data { + type Identity = Self; + + fn type_info() -> Type { + let variants = Variants::new().variant("None", |v| v.index(0)); + + // create a variant for all sizes of Raw data from 0-32 + let variants = data_raw_variants!( + variants, + (1, 0), + (2, 1), + (3, 2), + (4, 3), + (5, 4), + (6, 5), + (7, 6), + (8, 7), + (9, 8), + (10, 9), + (11, 10), + (12, 11), + (13, 12), + (14, 13), + (15, 14), + (16, 15), + (17, 16), + (18, 17), + (19, 18), + (20, 19), + (21, 20), + (22, 21), + (23, 22), + (24, 23), + (25, 24), + (26, 25), + (27, 26), + (28, 27), + (29, 28), + (30, 29), + (31, 30), + (32, 31), + (33, 32) + ); + + let variants = variants + .variant("BlakeTwo256", |v| { + v.index(34).fields(Fields::unnamed().field(|f| f.ty::<[u8; 32]>())) + }) + .variant("Sha256", |v| { + v.index(35).fields(Fields::unnamed().field(|f| f.ty::<[u8; 32]>())) + }) + .variant("Keccak256", |v| { + v.index(36).fields(Fields::unnamed().field(|f| f.ty::<[u8; 32]>())) + }) + .variant("ShaThree256", |v| { + v.index(37).fields(Fields::unnamed().field(|f| f.ty::<[u8; 32]>())) + }); + + Type::builder().path(Path::new("Data", module_path!())).variant(variants) + } +} + impl Default for Data { fn default() -> Self { Self::None @@ -102,7 +181,7 @@ pub type RegistrarIndex = u32; /// /// NOTE: Registrars may pay little attention to some fields. Registrars may want to make clear /// which fields their attestation is relevant for by off-chain means. -#[derive(Copy, Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, MaxEncodedLen)] +#[derive(Copy, Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, MaxEncodedLen, TypeInfo)] pub enum Judgement { /// The default value; no opinion is held. @@ -152,7 +231,7 @@ impl>::from_bits(field as u64).map_err(|_| "invalid value")?)) } } +impl TypeInfo for IdentityFields { + type Identity = Self; + + fn type_info() -> Type { + Type::builder() + .path(Path::new("BitFlags", module_path!())) + .type_params(vec![TypeParameter::new("T", Some(meta_type::()))]) + .composite(Fields::unnamed().field(|f| f.ty::().type_name("IdentityField"))) + } +} /// Information concerning the identity of the controller of an account. /// /// NOTE: This should be stored at the end of the storage item to facilitate the addition of extra /// fields in a backwards compatible way through a specialized `Decode` impl. #[derive( - CloneNoBound, Encode, Decode, Eq, MaxEncodedLen, PartialEqNoBound, RuntimeDebugNoBound, + CloneNoBound, Encode, Decode, Eq, MaxEncodedLen, PartialEqNoBound, RuntimeDebugNoBound, TypeInfo, )] #[codec(mel_bound(FieldLimit: Get))] #[cfg_attr(test, derive(frame_support::DefaultNoBound))] +#[scale_info(skip_type_params(FieldLimit))] pub struct IdentityInfo> { /// Additional fields of the identity that are not catered for with the struct's explicit /// fields. @@ -246,12 +336,15 @@ pub struct IdentityInfo> { /// /// NOTE: This is stored separately primarily to facilitate the addition of extra fields in a /// backwards compatible way through a specialized `Decode` impl. -#[derive(CloneNoBound, Encode, Eq, MaxEncodedLen, PartialEqNoBound, RuntimeDebugNoBound)] +#[derive( + CloneNoBound, Encode, Eq, MaxEncodedLen, PartialEqNoBound, RuntimeDebugNoBound, TypeInfo, +)] #[codec(mel_bound( Balance: Encode + Decode + MaxEncodedLen + Copy + Clone + Debug + Eq + PartialEq + Zero + Add, MaxJudgements: Get, MaxAdditionalFields: Get, ))] +#[scale_info(skip_type_params(MaxJudgements, MaxAdditionalFields))] pub struct Registration< Balance: Encode + Decode + MaxEncodedLen + Copy + Clone + Debug + Eq + PartialEq, MaxJudgements: Get, @@ -296,7 +389,7 @@ impl< } /// Information concerning a registrar. -#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, MaxEncodedLen)] +#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, MaxEncodedLen, TypeInfo)] pub struct RegistrarInfo< Balance: Encode + Decode + Clone + Debug + Eq + PartialEq, AccountId: Encode + Decode + Clone + Debug + Eq + PartialEq, @@ -311,3 +404,70 @@ pub struct RegistrarInfo< /// these fields. pub fields: IdentityFields, } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn manual_data_type_info() { + let mut registry = scale_info::Registry::new(); + let type_id = registry.register_type(&scale_info::meta_type::()); + let registry: scale_info::PortableRegistry = registry.into(); + let type_info = registry.resolve(type_id.id()).unwrap(); + + let check_type_info = |data: &Data| { + let variant_name = match data { + Data::None => "None".to_string(), + Data::BlakeTwo256(_) => "BlakeTwo256".to_string(), + Data::Sha256(_) => "Sha256".to_string(), + Data::Keccak256(_) => "Keccak256".to_string(), + Data::ShaThree256(_) => "ShaThree256".to_string(), + Data::Raw(bytes) => format!("Raw{}", bytes.len()), + }; + if let scale_info::TypeDef::Variant(variant) = type_info.type_def() { + let variant = variant + .variants() + .iter() + .find(|v| v.name() == &variant_name) + .expect(&format!("Expected to find variant {}", variant_name)); + + let field_arr_len = variant + .fields() + .first() + .and_then(|f| registry.resolve(f.ty().id())) + .map(|ty| { + if let scale_info::TypeDef::Array(arr) = ty.type_def() { + arr.len() + } else { + panic!("Should be an array type") + } + }) + .unwrap_or(0); + + let encoded = data.encode(); + assert_eq!(encoded[0], variant.index()); + assert_eq!(encoded.len() as u32 - 1, field_arr_len); + } else { + panic!("Should be a variant type") + }; + }; + + let mut data = vec![ + Data::None, + Data::BlakeTwo256(Default::default()), + Data::Sha256(Default::default()), + Data::Keccak256(Default::default()), + Data::ShaThree256(Default::default()), + ]; + + // A Raw instance for all possible sizes of the Raw data + for n in 0..32 { + data.push(Data::Raw(vec![0u8; n as usize].try_into().unwrap())) + } + + for d in data.iter() { + check_type_info(d); + } + } +} diff --git a/frame/im-online/Cargo.toml b/frame/im-online/Cargo.toml index 1208da3b3f3d5..a1efd626c0690 100644 --- a/frame/im-online/Cargo.toml +++ b/frame/im-online/Cargo.toml @@ -16,6 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../primitives/application-crypto" } pallet-authorship = { version = "4.0.0-dev", default-features = false, path = "../authorship" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } @@ -36,6 +37,7 @@ std = [ "sp-application-crypto/std", "pallet-authorship/std", "codec/std", + "scale-info/std", "sp-core/std", "sp-std/std", "sp-io/std", diff --git a/frame/im-online/src/benchmarking.rs b/frame/im-online/src/benchmarking.rs index 2a2d837a4bd58..1043a97f67def 100644 --- a/frame/im-online/src/benchmarking.rs +++ b/frame/im-online/src/benchmarking.rs @@ -80,7 +80,7 @@ benchmarks! { let k in 1 .. MAX_KEYS; let e in 1 .. MAX_EXTERNAL_ADDRESSES; let (input_heartbeat, signature) = create_heartbeat::(k, e)?; - let call = Call::heartbeat(input_heartbeat, signature); + let call = Call::heartbeat { heartbeat: input_heartbeat, signature }; }: { ImOnline::::validate_unsigned(TransactionSource::InBlock, &call) .map_err(<&str>::from)?; @@ -90,7 +90,7 @@ benchmarks! { let k in 1 .. MAX_KEYS; let e in 1 .. MAX_EXTERNAL_ADDRESSES; let (input_heartbeat, signature) = create_heartbeat::(k, e)?; - let call = Call::heartbeat(input_heartbeat, signature); + let call = Call::heartbeat { heartbeat: input_heartbeat, signature }; }: { ImOnline::::validate_unsigned(TransactionSource::InBlock, &call) .map_err(<&str>::from)?; diff --git a/frame/im-online/src/lib.rs b/frame/im-online/src/lib.rs index 3a040c7b5e21d..ab4f7001574e5 100644 --- a/frame/im-online/src/lib.rs +++ b/frame/im-online/src/lib.rs @@ -80,6 +80,7 @@ use frame_support::traits::{ }; use frame_system::offchain::{SendTransactionTypes, SubmitTransaction}; pub use pallet::*; +use scale_info::TypeInfo; use sp_application_crypto::RuntimeAppPublic; use sp_core::offchain::OpaqueNetworkState; use sp_runtime::{ @@ -140,7 +141,7 @@ const INCLUDE_THRESHOLD: u32 = 3; /// This stores the block number at which heartbeat was requested and when the worker /// has actually managed to produce it. /// Note we store such status for every `authority_index` separately. -#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] struct HeartbeatStatus { /// An index of the session that we are supposed to send heartbeat for. pub session_index: SessionIndex, @@ -202,7 +203,7 @@ impl sp_std::fmt::Debug for OffchainErr where BlockNumber: PartialEq + Eq + Decode + Encode, @@ -297,7 +298,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::AuthorityId = "AuthorityId", Vec> = "Vec")] pub enum Event { /// A new heartbeat was received from `AuthorityId` \[authority_id\] HeartbeatReceived(T::AuthorityId), @@ -459,7 +459,7 @@ pub mod pallet { type Call = Call; fn validate_unsigned(_source: TransactionSource, call: &Self::Call) -> TransactionValidity { - if let Call::heartbeat(heartbeat, signature) = call { + if let Call::heartbeat { heartbeat, signature } = call { if >::is_online(heartbeat.authority_index) { // we already received a heartbeat for this authority return InvalidTransaction::Stale.into() @@ -631,7 +631,7 @@ impl Pallet { let prepare_heartbeat = || -> OffchainResult> { let network_state = sp_io::offchain::network_state().map_err(|_| OffchainErr::NetworkState)?; - let heartbeat_data = Heartbeat { + let heartbeat = Heartbeat { block_number, network_state, session_index, @@ -639,9 +639,9 @@ impl Pallet { validators_len, }; - let signature = key.sign(&heartbeat_data.encode()).ok_or(OffchainErr::FailedSigning)?; + let signature = key.sign(&heartbeat.encode()).ok_or(OffchainErr::FailedSigning)?; - Ok(Call::heartbeat(heartbeat_data, signature)) + Ok(Call::heartbeat { heartbeat, signature }) }; if Self::is_online(authority_index) { @@ -820,7 +820,7 @@ impl OneSessionHandler for Pallet { } /// An offence that is filed if a validator didn't send a heartbeat message. -#[derive(RuntimeDebug)] +#[derive(RuntimeDebug, TypeInfo)] #[cfg_attr(feature = "std", derive(Clone, PartialEq, Eq))] pub struct UnresponsivenessOffence { /// The current session index in which we report the unresponsive validators. diff --git a/frame/im-online/src/tests.rs b/frame/im-online/src/tests.rs index 2492e46ef18a0..bb2c4c7cae548 100644 --- a/frame/im-online/src/tests.rs +++ b/frame/im-online/src/tests.rs @@ -130,14 +130,15 @@ fn heartbeat( }; let signature = id.sign(&heartbeat.encode()).unwrap(); - ImOnline::pre_dispatch(&crate::Call::heartbeat(heartbeat.clone(), signature.clone())).map_err( - |e| match e { - TransactionValidityError::Invalid(InvalidTransaction::Custom( - INVALID_VALIDATORS_LEN, - )) => "invalid validators len", - e @ _ => <&'static str>::from(e), - }, - )?; + ImOnline::pre_dispatch(&crate::Call::heartbeat { + heartbeat: heartbeat.clone(), + signature: signature.clone(), + }) + .map_err(|e| match e { + TransactionValidityError::Invalid(InvalidTransaction::Custom(INVALID_VALIDATORS_LEN)) => + "invalid validators len", + e @ _ => <&'static str>::from(e), + })?; ImOnline::heartbeat(Origin::none(), heartbeat, signature) } @@ -237,7 +238,7 @@ fn should_generate_heartbeats() { // check stuff about the transaction. let ex: Extrinsic = Decode::decode(&mut &*transaction).unwrap(); let heartbeat = match ex.call { - crate::mock::Call::ImOnline(crate::Call::heartbeat(h, ..)) => h, + crate::mock::Call::ImOnline(crate::Call::heartbeat { heartbeat, .. }) => heartbeat, e => panic!("Unexpected call: {:?}", e), }; @@ -352,7 +353,7 @@ fn should_not_send_a_report_if_already_online() { // check stuff about the transaction. let ex: Extrinsic = Decode::decode(&mut &*transaction).unwrap(); let heartbeat = match ex.call { - crate::mock::Call::ImOnline(crate::Call::heartbeat(h, ..)) => h, + crate::mock::Call::ImOnline(crate::Call::heartbeat { heartbeat, .. }) => heartbeat, e => panic!("Unexpected call: {:?}", e), }; diff --git a/frame/indices/Cargo.toml b/frame/indices/Cargo.toml index c226ea2cf2350..17d04c43fa5d9 100644 --- a/frame/indices/Cargo.toml +++ b/frame/indices/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-keyring = { version = "4.0.0-dev", optional = true, path = "../../primitives/keyring" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } @@ -32,6 +33,7 @@ default = ["std"] std = [ "sp-keyring", "codec/std", + "scale-info/std", "sp-core/std", "sp-std/std", "sp-io/std", diff --git a/frame/indices/src/lib.rs b/frame/indices/src/lib.rs index 331873d42451c..0901a89d41ad6 100644 --- a/frame/indices/src/lib.rs +++ b/frame/indices/src/lib.rs @@ -260,7 +260,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::AccountId = "AccountId", T::AccountIndex = "AccountIndex")] pub enum Event { /// A account index was assigned. \[index, who\] IndexAssigned(T::AccountId, T::AccountIndex), diff --git a/frame/lottery/Cargo.toml b/frame/lottery/Cargo.toml index 351e72a77a555..f14d65310cc70 100644 --- a/frame/lottery/Cargo.toml +++ b/frame/lottery/Cargo.toml @@ -16,6 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } @@ -33,6 +34,7 @@ sp-io = { version = "4.0.0-dev", path = "../../primitives/io" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-std/std", "frame-support/std", "sp-runtime/std", diff --git a/frame/lottery/src/benchmarking.rs b/frame/lottery/src/benchmarking.rs index 706561471ee5d..3b7035c72deb0 100644 --- a/frame/lottery/src/benchmarking.rs +++ b/frame/lottery/src/benchmarking.rs @@ -35,11 +35,11 @@ fn setup_lottery(repeat: bool) -> Result<(), &'static str> { let delay = 5u32.into(); // Calls will be maximum length... let mut calls = vec![ - frame_system::Call::::set_code(vec![]).into(); + frame_system::Call::::set_code { code: vec![] }.into(); T::MaxCalls::get().saturating_sub(1) as usize ]; // Last call will be the match for worst case scenario. - calls.push(frame_system::Call::::remark(vec![]).into()); + calls.push(frame_system::Call::::remark { remark: vec![] }.into()); let origin = T::ManagerOrigin::successful_origin(); Lottery::::set_calls(origin.clone(), calls)?; Lottery::::start_lottery(origin, price, length, delay, repeat)?; @@ -53,7 +53,7 @@ benchmarks! { setup_lottery::(false)?; // force user to have a long vec of calls participating let set_code_index: CallIndex = Lottery::::call_to_index( - &frame_system::Call::::set_code(vec![]).into() + &frame_system::Call::::set_code{ code: vec![] }.into() )?; let already_called: (u32, Vec) = ( LotteryIndex::::get(), @@ -64,7 +64,7 @@ benchmarks! { ); Participants::::insert(&caller, already_called); - let call = frame_system::Call::::remark(vec![]); + let call = frame_system::Call::::remark { remark: vec![] }; }: _(RawOrigin::Signed(caller), Box::new(call.into())) verify { assert_eq!(TicketsCount::::get(), 1); @@ -72,9 +72,9 @@ benchmarks! { set_calls { let n in 0 .. T::MaxCalls::get() as u32; - let calls = vec![frame_system::Call::::remark(vec![]).into(); n as usize]; + let calls = vec![frame_system::Call::::remark { remark: vec![] }.into(); n as usize]; - let call = Call::::set_calls(calls); + let call = Call::::set_calls { calls }; let origin = T::ManagerOrigin::successful_origin(); assert!(CallIndices::::get().is_empty()); }: { call.dispatch_bypass_filter(origin)? } @@ -89,7 +89,7 @@ benchmarks! { let end = 10u32.into(); let payout = 5u32.into(); - let call = Call::::start_lottery(price, end, payout, true); + let call = Call::::start_lottery { price, length: end, delay: payout, repeat: true }; let origin = T::ManagerOrigin::successful_origin(); }: { call.dispatch_bypass_filter(origin)? } verify { @@ -99,7 +99,7 @@ benchmarks! { stop_repeat { setup_lottery::(true)?; assert_eq!(crate::Lottery::::get().unwrap().repeat, true); - let call = Call::::stop_repeat(); + let call = Call::::stop_repeat {}; let origin = T::ManagerOrigin::successful_origin(); }: { call.dispatch_bypass_filter(origin)? } verify { @@ -115,7 +115,7 @@ benchmarks! { let lottery_account = Lottery::::account_id(); T::Currency::make_free_balance_be(&lottery_account, T::Currency::minimum_balance() * 10u32.into()); // Buy a ticket - let call = frame_system::Call::::remark(vec![]); + let call = frame_system::Call::::remark { remark: vec![] }; Lottery::::buy_ticket(RawOrigin::Signed(winner.clone()).into(), Box::new(call.into()))?; // Kill user account for worst case T::Currency::make_free_balance_be(&winner, 0u32.into()); @@ -146,7 +146,7 @@ benchmarks! { let lottery_account = Lottery::::account_id(); T::Currency::make_free_balance_be(&lottery_account, T::Currency::minimum_balance() * 10u32.into()); // Buy a ticket - let call = frame_system::Call::::remark(vec![]); + let call = frame_system::Call::::remark { remark: vec![] }; Lottery::::buy_ticket(RawOrigin::Signed(winner.clone()).into(), Box::new(call.into()))?; // Kill user account for worst case T::Currency::make_free_balance_be(&winner, 0u32.into()); diff --git a/frame/lottery/src/lib.rs b/frame/lottery/src/lib.rs index c879a819b0b79..260b4c2d76ae9 100644 --- a/frame/lottery/src/lib.rs +++ b/frame/lottery/src/lib.rs @@ -76,7 +76,7 @@ type BalanceOf = // We use this to uniquely match someone's incoming call with the calls configured for the lottery. type CallIndex = (u8, u8); -#[derive(Encode, Decode, Default, Eq, PartialEq, RuntimeDebug)] +#[derive(Encode, Decode, Default, Eq, PartialEq, RuntimeDebug, scale_info::TypeInfo)] pub struct LotteryConfig { /// Price per entry. price: Balance, @@ -170,7 +170,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::AccountId = "AccountId", BalanceOf = "Balance")] pub enum Event { /// A lottery has been started! LotteryStarted, diff --git a/frame/lottery/src/tests.rs b/frame/lottery/src/tests.rs index 9cc4c582943e3..623beea4a6b5b 100644 --- a/frame/lottery/src/tests.rs +++ b/frame/lottery/src/tests.rs @@ -43,8 +43,8 @@ fn basic_end_to_end_works() { let length = 20; let delay = 5; let calls = vec![ - Call::Balances(BalancesCall::force_transfer(0, 0, 0)), - Call::Balances(BalancesCall::transfer(0, 0)), + Call::Balances(BalancesCall::force_transfer { source: 0, dest: 0, value: 0 }), + Call::Balances(BalancesCall::transfer { dest: 0, value: 0 }), ]; // Set calls for the lottery @@ -55,7 +55,7 @@ fn basic_end_to_end_works() { assert!(crate::Lottery::::get().is_some()); assert_eq!(Balances::free_balance(&1), 100); - let call = Box::new(Call::Balances(BalancesCall::transfer(2, 20))); + let call = Box::new(Call::Balances(BalancesCall::transfer { dest: 2, value: 20 })); assert_ok!(Lottery::buy_ticket(Origin::signed(1), call.clone())); // 20 from the transfer, 10 from buying a ticket assert_eq!(Balances::free_balance(&1), 100 - 20 - 10); @@ -96,17 +96,17 @@ fn set_calls_works() { assert!(!CallIndices::::exists()); let calls = vec![ - Call::Balances(BalancesCall::force_transfer(0, 0, 0)), - Call::Balances(BalancesCall::transfer(0, 0)), + Call::Balances(BalancesCall::force_transfer { source: 0, dest: 0, value: 0 }), + Call::Balances(BalancesCall::transfer { dest: 0, value: 0 }), ]; assert_ok!(Lottery::set_calls(Origin::root(), calls)); assert!(CallIndices::::exists()); let too_many_calls = vec![ - Call::Balances(BalancesCall::force_transfer(0, 0, 0)), - Call::Balances(BalancesCall::transfer(0, 0)), - Call::System(SystemCall::remark(vec![])), + Call::Balances(BalancesCall::force_transfer { source: 0, dest: 0, value: 0 }), + Call::Balances(BalancesCall::transfer { dest: 0, value: 0 }), + Call::System(SystemCall::remark { remark: vec![] }), ]; assert_noop!( @@ -150,7 +150,7 @@ fn buy_ticket_works_as_simple_passthrough() { // as a simple passthrough to the real call. new_test_ext().execute_with(|| { // No lottery set up - let call = Box::new(Call::Balances(BalancesCall::transfer(2, 20))); + let call = Box::new(Call::Balances(BalancesCall::transfer { dest: 2, value: 20 })); // This is just a basic transfer then assert_ok!(Lottery::buy_ticket(Origin::signed(1), call.clone())); assert_eq!(Balances::free_balance(&1), 100 - 20); @@ -158,8 +158,8 @@ fn buy_ticket_works_as_simple_passthrough() { // Lottery is set up, but too expensive to enter, so `do_buy_ticket` fails. let calls = vec![ - Call::Balances(BalancesCall::force_transfer(0, 0, 0)), - Call::Balances(BalancesCall::transfer(0, 0)), + Call::Balances(BalancesCall::force_transfer { source: 0, dest: 0, value: 0 }), + Call::Balances(BalancesCall::transfer { dest: 0, value: 0 }), ]; assert_ok!(Lottery::set_calls(Origin::root(), calls)); @@ -170,21 +170,24 @@ fn buy_ticket_works_as_simple_passthrough() { assert_eq!(TicketsCount::::get(), 0); // If call would fail, the whole thing still fails the same - let fail_call = Box::new(Call::Balances(BalancesCall::transfer(2, 1000))); + let fail_call = Box::new(Call::Balances(BalancesCall::transfer { dest: 2, value: 1000 })); assert_noop!( Lottery::buy_ticket(Origin::signed(1), fail_call), BalancesError::::InsufficientBalance, ); - let bad_origin_call = Box::new(Call::Balances(BalancesCall::force_transfer(0, 0, 0))); - assert_noop!(Lottery::buy_ticket(Origin::signed(1), bad_origin_call), BadOrigin); + let bad_origin_call = + Box::new(Call::Balances(BalancesCall::force_transfer { source: 0, dest: 0, value: 0 })); + assert_noop!(Lottery::buy_ticket(Origin::signed(1), bad_origin_call), BadOrigin,); // User can call other txs, but doesn't get a ticket - let remark_call = Box::new(Call::System(SystemCall::remark(b"hello, world!".to_vec()))); + let remark_call = + Box::new(Call::System(SystemCall::remark { remark: b"hello, world!".to_vec() })); assert_ok!(Lottery::buy_ticket(Origin::signed(2), remark_call)); assert_eq!(TicketsCount::::get(), 0); - let successful_call = Box::new(Call::Balances(BalancesCall::transfer(2, 1))); + let successful_call = + Box::new(Call::Balances(BalancesCall::transfer { dest: 2, value: 1 })); assert_ok!(Lottery::buy_ticket(Origin::signed(2), successful_call)); assert_eq!(TicketsCount::::get(), 1); }); @@ -195,13 +198,13 @@ fn buy_ticket_works() { new_test_ext().execute_with(|| { // Set calls for the lottery. let calls = vec![ - Call::System(SystemCall::remark(vec![])), - Call::Balances(BalancesCall::transfer(0, 0)), + Call::System(SystemCall::remark { remark: vec![] }), + Call::Balances(BalancesCall::transfer { dest: 0, value: 0 }), ]; assert_ok!(Lottery::set_calls(Origin::root(), calls)); // Can't buy ticket before start - let call = Box::new(Call::Balances(BalancesCall::transfer(2, 1))); + let call = Box::new(Call::Balances(BalancesCall::transfer { dest: 2, value: 1 })); assert_ok!(Lottery::buy_ticket(Origin::signed(1), call.clone())); assert_eq!(TicketsCount::::get(), 0); @@ -214,12 +217,12 @@ fn buy_ticket_works() { assert_eq!(TicketsCount::::get(), 1); // Can't buy another of the same ticket (even if call is slightly changed) - let call = Box::new(Call::Balances(BalancesCall::transfer(3, 30))); + let call = Box::new(Call::Balances(BalancesCall::transfer { dest: 3, value: 30 })); assert_ok!(Lottery::buy_ticket(Origin::signed(1), call)); assert_eq!(TicketsCount::::get(), 1); // Buy ticket for remark - let call = Box::new(Call::System(SystemCall::remark(b"hello, world!".to_vec()))); + let call = Box::new(Call::System(SystemCall::remark { remark: b"hello, world!".to_vec() })); assert_ok!(Lottery::buy_ticket(Origin::signed(1), call.clone())); assert_eq!(TicketsCount::::get(), 2); diff --git a/frame/membership/Cargo.toml b/frame/membership/Cargo.toml index 8136b818eac8e..acc82f7678de6 100644 --- a/frame/membership/Cargo.toml +++ b/frame/membership/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } log = { version = "0.4.0", default-features = false } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } @@ -29,6 +30,7 @@ frame-system = { version = "4.0.0-dev", default-features = false, path = "../sys default = ["std"] std = [ "codec/std", + "scale-info/std", "log/std", "sp-core/std", "sp-io/std", diff --git a/frame/membership/src/lib.rs b/frame/membership/src/lib.rs index b66dc51b3b0ec..7922d9efaf569 100644 --- a/frame/membership/src/lib.rs +++ b/frame/membership/src/lib.rs @@ -131,9 +131,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata( - PhantomData<(T::AccountId, >::Event)> = "sp_std::marker::PhantomData<(AccountId, Event)>", - )] pub enum Event, I: 'static = ()> { /// The given member was added; see the transaction for who. MemberAdded, diff --git a/frame/merkle-mountain-range/Cargo.toml b/frame/merkle-mountain-range/Cargo.toml index eecdfd7a9e841..02b4be182ef82 100644 --- a/frame/merkle-mountain-range/Cargo.toml +++ b/frame/merkle-mountain-range/Cargo.toml @@ -13,6 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } mmr-lib = { package = "ckb-merkle-mountain-range", default-features = false, version = "0.3.1" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } @@ -34,6 +35,7 @@ hex-literal = "0.3" default = ["std"] std = [ "codec/std", + "scale-info/std", "mmr-lib/std", "sp-core/std", "sp-io/std", diff --git a/frame/merkle-mountain-range/src/lib.rs b/frame/merkle-mountain-range/src/lib.rs index 83fdc5b1715c9..01bf1b2254f09 100644 --- a/frame/merkle-mountain-range/src/lib.rs +++ b/frame/merkle-mountain-range/src/lib.rs @@ -124,7 +124,8 @@ pub mod pallet { + Copy + Default + codec::Codec - + codec::EncodeLike; + + codec::EncodeLike + + scale_info::TypeInfo; /// Data stored in the leaf nodes. /// diff --git a/frame/metadata/Cargo.toml b/frame/metadata/Cargo.toml deleted file mode 100644 index 332ce5b70c26e..0000000000000 --- a/frame/metadata/Cargo.toml +++ /dev/null @@ -1,28 +0,0 @@ -[package] -name = "frame-metadata" -version = "14.0.0-dev" -authors = ["Parity Technologies "] -edition = "2018" -license = "Apache-2.0" -homepage = "https://substrate.dev" -repository = "https://github.com/paritytech/substrate/" -description = "Decodable variant of the RuntimeMetadata." -readme = "README.md" - -[package.metadata.docs.rs] -targets = ["x86_64-unknown-linux-gnu"] - -[dependencies] -codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -serde = { version = "1.0.126", optional = true, features = ["derive"] } -sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } -sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } - -[features] -default = ["std"] -std = [ - "codec/std", - "sp-std/std", - "sp-core/std", - "serde", -] diff --git a/frame/metadata/README.md b/frame/metadata/README.md deleted file mode 100644 index 423af8602e3f0..0000000000000 --- a/frame/metadata/README.md +++ /dev/null @@ -1,7 +0,0 @@ -Decodable variant of the RuntimeMetadata. - -This really doesn't belong here, but is necessary for the moment. In the future -it should be removed entirely to an external module for shimming on to the -codec-encoded metadata. - -License: Apache-2.0 \ No newline at end of file diff --git a/frame/metadata/src/lib.rs b/frame/metadata/src/lib.rs deleted file mode 100644 index 7dcf5932df283..0000000000000 --- a/frame/metadata/src/lib.rs +++ /dev/null @@ -1,466 +0,0 @@ -// This file is part of Substrate. - -// Copyright (C) 2018-2021 Parity Technologies (UK) Ltd. -// SPDX-License-Identifier: Apache-2.0 - -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Decodable variant of the RuntimeMetadata. -//! -//! This really doesn't belong here, but is necessary for the moment. In the future -//! it should be removed entirely to an external module for shimming on to the -//! codec-encoded metadata. - -#![cfg_attr(not(feature = "std"), no_std)] - -#[cfg(feature = "std")] -use codec::{Decode, Error, Input}; -use codec::{Encode, Output}; -#[cfg(feature = "std")] -use serde::Serialize; -use sp_core::RuntimeDebug; -use sp_std::vec::Vec; - -#[cfg(feature = "std")] -type StringBuf = String; - -/// Current prefix of metadata -pub const META_RESERVED: u32 = 0x6174656d; // 'meta' warn endianness - -/// On `no_std` we do not support `Decode` and thus `StringBuf` is just `&'static str`. -/// So, if someone tries to decode this stuff on `no_std`, they will get a compilation error. -#[cfg(not(feature = "std"))] -type StringBuf = &'static str; - -/// A type that decodes to a different type than it encodes. -/// The user needs to make sure that both types use the same encoding. -/// -/// For example a `&'static [ &'static str ]` can be decoded to a `Vec`. -#[derive(Clone)] -pub enum DecodeDifferent -where - B: 'static, - O: 'static, -{ - Encode(B), - Decoded(O), -} - -impl Encode for DecodeDifferent -where - B: Encode + 'static, - O: Encode + 'static, -{ - fn encode_to(&self, dest: &mut W) { - match self { - DecodeDifferent::Encode(b) => b.encode_to(dest), - DecodeDifferent::Decoded(o) => o.encode_to(dest), - } - } -} - -impl codec::EncodeLike for DecodeDifferent -where - B: Encode + 'static, - O: Encode + 'static, -{ -} - -#[cfg(feature = "std")] -impl Decode for DecodeDifferent -where - B: 'static, - O: Decode + 'static, -{ - fn decode(input: &mut I) -> Result { - ::decode(input).map(|val| DecodeDifferent::Decoded(val)) - } -} - -impl PartialEq for DecodeDifferent -where - B: Encode + Eq + PartialEq + 'static, - O: Encode + Eq + PartialEq + 'static, -{ - fn eq(&self, other: &Self) -> bool { - self.encode() == other.encode() - } -} - -impl Eq for DecodeDifferent -where - B: Encode + Eq + PartialEq + 'static, - O: Encode + Eq + PartialEq + 'static, -{ -} - -impl sp_std::fmt::Debug for DecodeDifferent -where - B: sp_std::fmt::Debug + Eq + 'static, - O: sp_std::fmt::Debug + Eq + 'static, -{ - fn fmt(&self, f: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result { - match self { - DecodeDifferent::Encode(b) => b.fmt(f), - DecodeDifferent::Decoded(o) => o.fmt(f), - } - } -} - -#[cfg(feature = "std")] -impl serde::Serialize for DecodeDifferent -where - B: serde::Serialize + 'static, - O: serde::Serialize + 'static, -{ - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - match self { - DecodeDifferent::Encode(b) => b.serialize(serializer), - DecodeDifferent::Decoded(o) => o.serialize(serializer), - } - } -} - -pub type DecodeDifferentArray = DecodeDifferent<&'static [B], Vec>; - -type DecodeDifferentStr = DecodeDifferent<&'static str, StringBuf>; - -/// All the metadata about a function. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub struct FunctionMetadata { - pub name: DecodeDifferentStr, - pub arguments: DecodeDifferentArray, - pub documentation: DecodeDifferentArray<&'static str, StringBuf>, -} - -/// All the metadata about a function argument. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub struct FunctionArgumentMetadata { - pub name: DecodeDifferentStr, - pub ty: DecodeDifferentStr, -} - -/// Newtype wrapper for support encoding functions (actual the result of the function). -#[derive(Clone, Eq)] -pub struct FnEncode(pub fn() -> E) -where - E: Encode + 'static; - -impl Encode for FnEncode { - fn encode_to(&self, dest: &mut W) { - self.0().encode_to(dest); - } -} - -impl codec::EncodeLike for FnEncode {} - -impl PartialEq for FnEncode { - fn eq(&self, other: &Self) -> bool { - self.0().eq(&other.0()) - } -} - -impl sp_std::fmt::Debug for FnEncode { - fn fmt(&self, f: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result { - self.0().fmt(f) - } -} - -#[cfg(feature = "std")] -impl serde::Serialize for FnEncode { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - self.0().serialize(serializer) - } -} - -/// All the metadata about an outer event. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub struct OuterEventMetadata { - pub name: DecodeDifferentStr, - pub events: DecodeDifferentArray< - (&'static str, FnEncode<&'static [EventMetadata]>), - (StringBuf, Vec), - >, -} - -/// All the metadata about an event. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub struct EventMetadata { - pub name: DecodeDifferentStr, - pub arguments: DecodeDifferentArray<&'static str, StringBuf>, - pub documentation: DecodeDifferentArray<&'static str, StringBuf>, -} - -/// All the metadata about one storage entry. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub struct StorageEntryMetadata { - pub name: DecodeDifferentStr, - pub modifier: StorageEntryModifier, - pub ty: StorageEntryType, - pub default: ByteGetter, - pub documentation: DecodeDifferentArray<&'static str, StringBuf>, -} - -/// All the metadata about one module constant. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub struct ModuleConstantMetadata { - pub name: DecodeDifferentStr, - pub ty: DecodeDifferentStr, - pub value: ByteGetter, - pub documentation: DecodeDifferentArray<&'static str, StringBuf>, -} - -/// All the metadata about a module error. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub struct ErrorMetadata { - pub name: DecodeDifferentStr, - pub documentation: DecodeDifferentArray<&'static str, StringBuf>, -} - -/// All the metadata about errors in a module. -pub trait ModuleErrorMetadata { - fn metadata() -> &'static [ErrorMetadata]; -} - -impl ModuleErrorMetadata for &'static str { - fn metadata() -> &'static [ErrorMetadata] { - &[] - } -} - -/// A technical trait to store lazy initiated vec value as static dyn pointer. -pub trait DefaultByte: Send + Sync { - fn default_byte(&self) -> Vec; -} - -/// Wrapper over dyn pointer for accessing a cached once byte value. -#[derive(Clone)] -pub struct DefaultByteGetter(pub &'static dyn DefaultByte); - -/// Decode different for static lazy initiated byte value. -pub type ByteGetter = DecodeDifferent>; - -impl Encode for DefaultByteGetter { - fn encode_to(&self, dest: &mut W) { - self.0.default_byte().encode_to(dest) - } -} - -impl codec::EncodeLike for DefaultByteGetter {} - -impl PartialEq for DefaultByteGetter { - fn eq(&self, other: &DefaultByteGetter) -> bool { - let left = self.0.default_byte(); - let right = other.0.default_byte(); - left.eq(&right) - } -} - -impl Eq for DefaultByteGetter {} - -#[cfg(feature = "std")] -impl serde::Serialize for DefaultByteGetter { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - self.0.default_byte().serialize(serializer) - } -} - -impl sp_std::fmt::Debug for DefaultByteGetter { - fn fmt(&self, f: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result { - self.0.default_byte().fmt(f) - } -} - -/// Hasher used by storage maps -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub enum StorageHasher { - Blake2_128, - Blake2_256, - Blake2_128Concat, - Twox128, - Twox256, - Twox64Concat, - Identity, -} - -/// A storage entry type. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub enum StorageEntryType { - Plain(DecodeDifferentStr), - Map { - hasher: StorageHasher, - key: DecodeDifferentStr, - value: DecodeDifferentStr, - // is_linked flag previously, unused now to keep backwards compat - unused: bool, - }, - DoubleMap { - hasher: StorageHasher, - key1: DecodeDifferentStr, - key2: DecodeDifferentStr, - value: DecodeDifferentStr, - key2_hasher: StorageHasher, - }, - NMap { - keys: DecodeDifferentArray<&'static str, StringBuf>, - hashers: DecodeDifferentArray, - value: DecodeDifferentStr, - }, -} - -/// A storage entry modifier. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub enum StorageEntryModifier { - Optional, - Default, -} - -/// All metadata of the storage. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub struct StorageMetadata { - /// The common prefix used by all storage entries. - pub prefix: DecodeDifferent<&'static str, StringBuf>, - pub entries: DecodeDifferent<&'static [StorageEntryMetadata], Vec>, -} - -/// Metadata prefixed by a u32 for reserved usage -#[derive(Eq, Encode, PartialEq, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub struct RuntimeMetadataPrefixed(pub u32, pub RuntimeMetadata); - -/// Metadata of the extrinsic used by the runtime. -#[derive(Eq, Encode, PartialEq, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub struct ExtrinsicMetadata { - /// Extrinsic version. - pub version: u8, - /// The signed extensions in the order they appear in the extrinsic. - pub signed_extensions: Vec, -} - -/// The metadata of a runtime. -/// The version ID encoded/decoded through -/// the enum nature of `RuntimeMetadata`. -#[derive(Eq, Encode, PartialEq, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub enum RuntimeMetadata { - /// Unused; enum filler. - V0(RuntimeMetadataDeprecated), - /// Version 1 for runtime metadata. No longer used. - V1(RuntimeMetadataDeprecated), - /// Version 2 for runtime metadata. No longer used. - V2(RuntimeMetadataDeprecated), - /// Version 3 for runtime metadata. No longer used. - V3(RuntimeMetadataDeprecated), - /// Version 4 for runtime metadata. No longer used. - V4(RuntimeMetadataDeprecated), - /// Version 5 for runtime metadata. No longer used. - V5(RuntimeMetadataDeprecated), - /// Version 6 for runtime metadata. No longer used. - V6(RuntimeMetadataDeprecated), - /// Version 7 for runtime metadata. No longer used. - V7(RuntimeMetadataDeprecated), - /// Version 8 for runtime metadata. No longer used. - V8(RuntimeMetadataDeprecated), - /// Version 9 for runtime metadata. No longer used. - V9(RuntimeMetadataDeprecated), - /// Version 10 for runtime metadata. No longer used. - V10(RuntimeMetadataDeprecated), - /// Version 11 for runtime metadata. No longer used. - V11(RuntimeMetadataDeprecated), - /// Version 12 for runtime metadata. No longer used. - V12(RuntimeMetadataDeprecated), - /// Version 13 for runtime metadata. - V13(RuntimeMetadataV13), -} - -/// Enum that should fail. -#[derive(Eq, PartialEq, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Serialize))] -pub enum RuntimeMetadataDeprecated {} - -impl Encode for RuntimeMetadataDeprecated { - fn encode_to(&self, _dest: &mut W) {} -} - -impl codec::EncodeLike for RuntimeMetadataDeprecated {} - -#[cfg(feature = "std")] -impl Decode for RuntimeMetadataDeprecated { - fn decode(_input: &mut I) -> Result { - Err("Decoding is not supported".into()) - } -} - -/// The metadata of a runtime. -#[derive(Eq, Encode, PartialEq, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub struct RuntimeMetadataV13 { - /// Metadata of all the modules. - pub modules: DecodeDifferentArray, - /// Metadata of the extrinsic. - pub extrinsic: ExtrinsicMetadata, -} - -/// The latest version of the metadata. -pub type RuntimeMetadataLastVersion = RuntimeMetadataV13; - -/// All metadata about an runtime module. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub struct ModuleMetadata { - pub name: DecodeDifferentStr, - pub storage: Option, StorageMetadata>>, - pub calls: ODFnA, - pub event: ODFnA, - pub constants: DFnA, - pub errors: DFnA, - /// Define the index of the module, this index will be used for the encoding of module event, - /// call and origin variants. - pub index: u8, -} - -type ODFnA = Option>; -type DFnA = DecodeDifferent, Vec>; - -impl Into for RuntimeMetadataPrefixed { - fn into(self) -> sp_core::OpaqueMetadata { - sp_core::OpaqueMetadata::new(self.encode()) - } -} - -impl Into for RuntimeMetadataLastVersion { - fn into(self) -> RuntimeMetadataPrefixed { - RuntimeMetadataPrefixed(META_RESERVED, RuntimeMetadata::V13(self)) - } -} diff --git a/frame/multisig/Cargo.toml b/frame/multisig/Cargo.toml index 84314256499a3..177334d4ccf8d 100644 --- a/frame/multisig/Cargo.toml +++ b/frame/multisig/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } @@ -30,6 +31,7 @@ pallet-balances = { version = "4.0.0-dev", path = "../balances" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-runtime/std", "frame-support/std", "frame-system/std", diff --git a/frame/multisig/src/benchmarking.rs b/frame/multisig/src/benchmarking.rs index 6847036ce4716..2e23dff156e07 100644 --- a/frame/multisig/src/benchmarking.rs +++ b/frame/multisig/src/benchmarking.rs @@ -40,7 +40,8 @@ fn setup_multi(s: u32, z: u32) -> Result<(Vec, Vec) } signatories.sort(); // Must first convert to outer call type. - let call: ::Call = frame_system::Call::::remark(vec![0; z as usize]).into(); + let call: ::Call = + frame_system::Call::::remark { remark: vec![0; z as usize] }.into(); let call_data = call.encode(); return Ok((signatories, call_data)) } @@ -51,7 +52,9 @@ benchmarks! { let z in 0 .. 10_000; let max_signatories = T::MaxSignatories::get().into(); let (mut signatories, _) = setup_multi::(max_signatories, z)?; - let call: ::Call = frame_system::Call::::remark(vec![0; z as usize]).into(); + let call: ::Call = frame_system::Call::::remark { + remark: vec![0; z as usize] + }.into(); let call_hash = call.using_encoded(blake2_256); let multi_account_id = Multisig::::multi_account_id(&signatories, 1); let caller = signatories.pop().ok_or("signatories should have len 2 or more")?; diff --git a/frame/multisig/src/lib.rs b/frame/multisig/src/lib.rs index b1ef5f11a5e36..43040ada45a98 100644 --- a/frame/multisig/src/lib.rs +++ b/frame/multisig/src/lib.rs @@ -61,6 +61,7 @@ use frame_support::{ RuntimeDebug, }; use frame_system::{self as system, RawOrigin}; +use scale_info::TypeInfo; use sp_io::hashing::blake2_256; use sp_runtime::{ traits::{Dispatchable, Zero}, @@ -79,7 +80,7 @@ pub type OpaqueCall = Vec; /// A global extrinsic index, formed as the extrinsic index within a block, together with that /// block's height. This allows a transaction in which a multisig operation of a particular /// composite was created to be uniquely identified. -#[derive(Copy, Clone, Eq, PartialEq, Encode, Decode, Default, RuntimeDebug)] +#[derive(Copy, Clone, Eq, PartialEq, Encode, Decode, Default, RuntimeDebug, TypeInfo)] pub struct Timepoint { /// The height of the chain at the point in time. height: BlockNumber, @@ -88,7 +89,7 @@ pub struct Timepoint { } /// An open multisig operation. -#[derive(Clone, Eq, PartialEq, Encode, Decode, Default, RuntimeDebug)] +#[derive(Clone, Eq, PartialEq, Encode, Decode, Default, RuntimeDebug, TypeInfo)] pub struct Multisig { /// The extrinsic when the multisig operation was opened. when: Timepoint, @@ -203,11 +204,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata( - T::AccountId = "AccountId", - T::BlockNumber = "BlockNumber", - Timepoint = "Timepoint" - )] pub enum Event { /// A new multisig operation has begun. \[approving, multisig, call_hash\] NewMultisig(T::AccountId, T::AccountId, CallHash), diff --git a/frame/multisig/src/tests.rs b/frame/multisig/src/tests.rs index 2809a106d66ea..3d311cf5d3dc8 100644 --- a/frame/multisig/src/tests.rs +++ b/frame/multisig/src/tests.rs @@ -99,7 +99,7 @@ impl Contains for TestBaseCallFilter { match *c { Call::Balances(_) => true, // Needed for benchmarking - Call::System(frame_system::Call::remark(_)) => true, + Call::System(frame_system::Call::remark { .. }) => true, _ => false, } } @@ -132,6 +132,10 @@ fn now() -> Timepoint { Multisig::timepoint() } +fn call_transfer(dest: u64, value: u64) -> Call { + Call::Balances(BalancesCall::transfer { dest, value }) +} + #[test] fn multisig_deposit_is_taken_and_returned() { new_test_ext().execute_with(|| { @@ -140,7 +144,7 @@ fn multisig_deposit_is_taken_and_returned() { assert_ok!(Balances::transfer(Origin::signed(2), multi, 5)); assert_ok!(Balances::transfer(Origin::signed(3), multi, 5)); - let call = Call::Balances(BalancesCall::transfer(6, 15)); + let call = call_transfer(6, 15); let call_weight = call.get_dispatch_info().weight; let data = call.encode(); assert_ok!(Multisig::as_multi( @@ -177,7 +181,7 @@ fn multisig_deposit_is_taken_and_returned_with_call_storage() { assert_ok!(Balances::transfer(Origin::signed(2), multi, 5)); assert_ok!(Balances::transfer(Origin::signed(3), multi, 5)); - let call = Call::Balances(BalancesCall::transfer(6, 15)); + let call = call_transfer(6, 15); let call_weight = call.get_dispatch_info().weight; let data = call.encode(); let hash = blake2_256(&data); @@ -206,7 +210,7 @@ fn multisig_deposit_is_taken_and_returned_with_alt_call_storage() { assert_ok!(Balances::transfer(Origin::signed(2), multi, 5)); assert_ok!(Balances::transfer(Origin::signed(3), multi, 5)); - let call = Call::Balances(BalancesCall::transfer(6, 15)); + let call = call_transfer(6, 15); let call_weight = call.get_dispatch_info().weight; let data = call.encode(); let hash = blake2_256(&data); @@ -254,7 +258,7 @@ fn multisig_deposit_is_taken_and_returned_with_alt_call_storage() { #[test] fn cancel_multisig_returns_deposit() { new_test_ext().execute_with(|| { - let call = Call::Balances(BalancesCall::transfer(6, 15)).encode(); + let call = call_transfer(6, 15).encode(); let hash = blake2_256(&call); assert_ok!(Multisig::approve_as_multi( Origin::signed(1), @@ -294,7 +298,7 @@ fn timepoint_checking_works() { assert_ok!(Balances::transfer(Origin::signed(2), multi, 5)); assert_ok!(Balances::transfer(Origin::signed(3), multi, 5)); - let call = Call::Balances(BalancesCall::transfer(6, 15)).encode(); + let call = call_transfer(6, 15).encode(); let hash = blake2_256(&call); assert_noop!( @@ -339,7 +343,7 @@ fn multisig_2_of_3_works_with_call_storing() { assert_ok!(Balances::transfer(Origin::signed(2), multi, 5)); assert_ok!(Balances::transfer(Origin::signed(3), multi, 5)); - let call = Call::Balances(BalancesCall::transfer(6, 15)); + let call = call_transfer(6, 15); let call_weight = call.get_dispatch_info().weight; let data = call.encode(); let hash = blake2_256(&data); @@ -366,7 +370,7 @@ fn multisig_2_of_3_works() { assert_ok!(Balances::transfer(Origin::signed(2), multi, 5)); assert_ok!(Balances::transfer(Origin::signed(3), multi, 5)); - let call = Call::Balances(BalancesCall::transfer(6, 15)); + let call = call_transfer(6, 15); let call_weight = call.get_dispatch_info().weight; let data = call.encode(); let hash = blake2_256(&data); @@ -394,7 +398,7 @@ fn multisig_3_of_3_works() { assert_ok!(Balances::transfer(Origin::signed(2), multi, 5)); assert_ok!(Balances::transfer(Origin::signed(3), multi, 5)); - let call = Call::Balances(BalancesCall::transfer(6, 15)); + let call = call_transfer(6, 15); let call_weight = call.get_dispatch_info().weight; let data = call.encode(); let hash = blake2_256(&data); @@ -432,7 +436,7 @@ fn multisig_3_of_3_works() { #[test] fn cancel_multisig_works() { new_test_ext().execute_with(|| { - let call = Call::Balances(BalancesCall::transfer(6, 15)).encode(); + let call = call_transfer(6, 15).encode(); let hash = blake2_256(&call); assert_ok!(Multisig::approve_as_multi( Origin::signed(1), @@ -467,7 +471,7 @@ fn cancel_multisig_works() { #[test] fn cancel_multisig_with_call_storage_works() { new_test_ext().execute_with(|| { - let call = Call::Balances(BalancesCall::transfer(6, 15)).encode(); + let call = call_transfer(6, 15).encode(); let hash = blake2_256(&call); assert_ok!(Multisig::as_multi(Origin::signed(1), 3, vec![2, 3], None, call, true, 0)); assert_eq!(Balances::free_balance(1), 4); @@ -497,7 +501,7 @@ fn cancel_multisig_with_call_storage_works() { #[test] fn cancel_multisig_with_alt_call_storage_works() { new_test_ext().execute_with(|| { - let call = Call::Balances(BalancesCall::transfer(6, 15)).encode(); + let call = call_transfer(6, 15).encode(); let hash = blake2_256(&call); assert_ok!(Multisig::approve_as_multi( Origin::signed(1), @@ -532,7 +536,7 @@ fn multisig_2_of_3_as_multi_works() { assert_ok!(Balances::transfer(Origin::signed(2), multi, 5)); assert_ok!(Balances::transfer(Origin::signed(3), multi, 5)); - let call = Call::Balances(BalancesCall::transfer(6, 15)); + let call = call_transfer(6, 15); let call_weight = call.get_dispatch_info().weight; let data = call.encode(); assert_ok!(Multisig::as_multi( @@ -567,10 +571,10 @@ fn multisig_2_of_3_as_multi_with_many_calls_works() { assert_ok!(Balances::transfer(Origin::signed(2), multi, 5)); assert_ok!(Balances::transfer(Origin::signed(3), multi, 5)); - let call1 = Call::Balances(BalancesCall::transfer(6, 10)); + let call1 = call_transfer(6, 10); let call1_weight = call1.get_dispatch_info().weight; let data1 = call1.encode(); - let call2 = Call::Balances(BalancesCall::transfer(7, 5)); + let call2 = call_transfer(7, 5); let call2_weight = call2.get_dispatch_info().weight; let data2 = call2.encode(); @@ -624,7 +628,7 @@ fn multisig_2_of_3_cannot_reissue_same_call() { assert_ok!(Balances::transfer(Origin::signed(2), multi, 5)); assert_ok!(Balances::transfer(Origin::signed(3), multi, 5)); - let call = Call::Balances(BalancesCall::transfer(6, 10)); + let call = call_transfer(6, 10); let call_weight = call.get_dispatch_info().weight; let data = call.encode(); let hash = blake2_256(&data); @@ -677,7 +681,7 @@ fn multisig_2_of_3_cannot_reissue_same_call() { #[test] fn minimum_threshold_check_works() { new_test_ext().execute_with(|| { - let call = Call::Balances(BalancesCall::transfer(6, 15)).encode(); + let call = call_transfer(6, 15).encode(); assert_noop!( Multisig::as_multi(Origin::signed(1), 0, vec![2], None, call.clone(), false, 0), Error::::MinimumThreshold, @@ -692,7 +696,7 @@ fn minimum_threshold_check_works() { #[test] fn too_many_signatories_fails() { new_test_ext().execute_with(|| { - let call = Call::Balances(BalancesCall::transfer(6, 15)).encode(); + let call = call_transfer(6, 15).encode(); assert_noop!( Multisig::as_multi(Origin::signed(1), 2, vec![2, 3, 4], None, call.clone(), false, 0), Error::::TooManySignatories, @@ -703,7 +707,7 @@ fn too_many_signatories_fails() { #[test] fn duplicate_approvals_are_ignored() { new_test_ext().execute_with(|| { - let call = Call::Balances(BalancesCall::transfer(6, 15)).encode(); + let call = call_transfer(6, 15).encode(); let hash = blake2_256(&call); assert_ok!(Multisig::approve_as_multi( Origin::signed(1), @@ -754,7 +758,7 @@ fn multisig_1_of_3_works() { assert_ok!(Balances::transfer(Origin::signed(2), multi, 5)); assert_ok!(Balances::transfer(Origin::signed(3), multi, 5)); - let call = Call::Balances(BalancesCall::transfer(6, 15)).encode(); + let call = call_transfer(6, 15).encode(); let hash = blake2_256(&call); assert_noop!( Multisig::approve_as_multi(Origin::signed(1), 1, vec![2, 3], None, hash.clone(), 0), @@ -764,7 +768,7 @@ fn multisig_1_of_3_works() { Multisig::as_multi(Origin::signed(1), 1, vec![2, 3], None, call.clone(), false, 0), Error::::MinimumThreshold, ); - let boxed_call = Box::new(Call::Balances(BalancesCall::transfer(6, 15))); + let boxed_call = Box::new(call_transfer(6, 15)); assert_ok!(Multisig::as_multi_threshold_1(Origin::signed(1), vec![2, 3], boxed_call)); assert_eq!(Balances::free_balance(6), 15); @@ -774,7 +778,7 @@ fn multisig_1_of_3_works() { #[test] fn multisig_filters() { new_test_ext().execute_with(|| { - let call = Box::new(Call::System(frame_system::Call::set_code(vec![]))); + let call = Box::new(Call::System(frame_system::Call::set_code { code: vec![] })); assert_noop!( Multisig::as_multi_threshold_1(Origin::signed(1), vec![2], call.clone()), DispatchError::BadOrigin, @@ -790,7 +794,7 @@ fn weight_check_works() { assert_ok!(Balances::transfer(Origin::signed(2), multi, 5)); assert_ok!(Balances::transfer(Origin::signed(3), multi, 5)); - let call = Call::Balances(BalancesCall::transfer(6, 15)); + let call = call_transfer(6, 15); let data = call.encode(); assert_ok!(Multisig::as_multi( Origin::signed(1), @@ -821,7 +825,7 @@ fn multisig_handles_no_preimage_after_all_approve() { assert_ok!(Balances::transfer(Origin::signed(2), multi, 5)); assert_ok!(Balances::transfer(Origin::signed(3), multi, 5)); - let call = Call::Balances(BalancesCall::transfer(6, 15)); + let call = call_transfer(6, 15); let call_weight = call.get_dispatch_info().weight; let data = call.encode(); let hash = blake2_256(&data); diff --git a/frame/nicks/Cargo.toml b/frame/nicks/Cargo.toml index 02e64491650cd..431ee2c84157c 100644 --- a/frame/nicks/Cargo.toml +++ b/frame/nicks/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } @@ -28,6 +29,7 @@ pallet-balances = { version = "4.0.0-dev", path = "../balances" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-std/std", "sp-io/std", "sp-runtime/std", diff --git a/frame/nicks/src/lib.rs b/frame/nicks/src/lib.rs index a5c22b619a5eb..16c7e2042dda0 100644 --- a/frame/nicks/src/lib.rs +++ b/frame/nicks/src/lib.rs @@ -91,7 +91,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::AccountId = "AccountId", BalanceOf = "Balance")] pub enum Event { /// A name was set. \[who\] NameSet(T::AccountId), diff --git a/frame/node-authorization/Cargo.toml b/frame/node-authorization/Cargo.toml index 35b02747e4006..635e72e3a8b8a 100644 --- a/frame/node-authorization/Cargo.toml +++ b/frame/node-authorization/Cargo.toml @@ -13,6 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } @@ -25,6 +26,7 @@ log = { version = "0.4.14", default-features = false } default = ["std"] std = [ "codec/std", + "scale-info/std", "frame-support/std", "frame-system/std", "sp-core/std", diff --git a/frame/node-authorization/src/lib.rs b/frame/node-authorization/src/lib.rs index 5551ec2ad2c40..016f12d2eb838 100644 --- a/frame/node-authorization/src/lib.rs +++ b/frame/node-authorization/src/lib.rs @@ -126,7 +126,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::AccountId = "AccountId")] pub enum Event { /// The given well known node was added. NodeAdded(PeerId, T::AccountId), diff --git a/frame/offences/Cargo.toml b/frame/offences/Cargo.toml index c4295747d649f..8fdcbf46fa3e1 100644 --- a/frame/offences/Cargo.toml +++ b/frame/offences/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] pallet-balances = { version = "4.0.0-dev", default-features = false, path = "../balances" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } serde = { version = "1.0.126", optional = true } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } @@ -32,6 +33,7 @@ default = ["std"] std = [ "pallet-balances/std", "codec/std", + "scale-info/std", "sp-std/std", "serde", "sp-runtime/std", diff --git a/frame/offences/benchmarking/Cargo.toml b/frame/offences/benchmarking/Cargo.toml index dc408ee8121dd..b21e6cf9b7e13 100644 --- a/frame/offences/benchmarking/Cargo.toml +++ b/frame/offences/benchmarking/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-benchmarking = { version = "4.0.0-dev", default-features = false, path = "../../benchmarking" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../../system" } @@ -57,4 +58,5 @@ std = [ "frame-election-provider-support/std", "sp-std/std", "codec/std", + "scale-info/std", ] diff --git a/frame/proxy/Cargo.toml b/frame/proxy/Cargo.toml index 22edf1f3c20ac..83db82990d105 100644 --- a/frame/proxy/Cargo.toml +++ b/frame/proxy/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["max-encoded-len"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } @@ -31,6 +32,7 @@ pallet-utility = { version = "4.0.0-dev", path = "../utility" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-runtime/std", "frame-support/std", "frame-system/std", diff --git a/frame/proxy/src/benchmarking.rs b/frame/proxy/src/benchmarking.rs index 77cdff11de9ce..e66f6782c19e1 100644 --- a/frame/proxy/src/benchmarking.rs +++ b/frame/proxy/src/benchmarking.rs @@ -83,7 +83,7 @@ benchmarks! { T::Currency::make_free_balance_be(&caller, BalanceOf::::max_value()); // ... and "real" is the traditional caller. This is not a typo. let real: T::AccountId = whitelisted_caller(); - let call: ::Call = frame_system::Call::::remark(vec![]).into(); + let call: ::Call = frame_system::Call::::remark { remark: vec![] }.into(); }: _(RawOrigin::Signed(caller), real, Some(T::ProxyType::default()), Box::new(call)) verify { assert_last_event::(Event::ProxyExecuted(Ok(())).into()) @@ -98,7 +98,7 @@ benchmarks! { T::Currency::make_free_balance_be(&delegate, BalanceOf::::max_value()); // ... and "real" is the traditional caller. This is not a typo. let real: T::AccountId = whitelisted_caller(); - let call: ::Call = frame_system::Call::::remark(vec![]).into(); + let call: ::Call = frame_system::Call::::remark { remark: vec![] }.into(); Proxy::::announce( RawOrigin::Signed(delegate.clone()).into(), real.clone(), @@ -118,7 +118,7 @@ benchmarks! { T::Currency::make_free_balance_be(&caller, BalanceOf::::max_value()); // ... and "real" is the traditional caller. This is not a typo. let real: T::AccountId = whitelisted_caller(); - let call: ::Call = frame_system::Call::::remark(vec![]).into(); + let call: ::Call = frame_system::Call::::remark { remark: vec![] }.into(); Proxy::::announce( RawOrigin::Signed(caller.clone()).into(), real.clone(), @@ -139,7 +139,7 @@ benchmarks! { T::Currency::make_free_balance_be(&caller, BalanceOf::::max_value()); // ... and "real" is the traditional caller. This is not a typo. let real: T::AccountId = whitelisted_caller(); - let call: ::Call = frame_system::Call::::remark(vec![]).into(); + let call: ::Call = frame_system::Call::::remark { remark: vec![] }.into(); Proxy::::announce( RawOrigin::Signed(caller.clone()).into(), real.clone(), @@ -161,7 +161,7 @@ benchmarks! { // ... and "real" is the traditional caller. This is not a typo. let real: T::AccountId = whitelisted_caller(); add_announcements::(a, Some(caller.clone()), None)?; - let call: ::Call = frame_system::Call::::remark(vec![]).into(); + let call: ::Call = frame_system::Call::::remark { remark: vec![] }.into(); let call_hash = T::CallHasher::hash_of(&call); }: _(RawOrigin::Signed(caller.clone()), real.clone(), call_hash) verify { diff --git a/frame/proxy/src/lib.rs b/frame/proxy/src/lib.rs index 0537ed4a32392..b73101fa73486 100644 --- a/frame/proxy/src/lib.rs +++ b/frame/proxy/src/lib.rs @@ -42,6 +42,7 @@ use frame_support::{ RuntimeDebug, }; use frame_system::{self as system}; +use scale_info::TypeInfo; use sp_io::hashing::blake2_256; use sp_runtime::{ traits::{Dispatchable, Hash, Saturating, Zero}, @@ -60,7 +61,17 @@ type BalanceOf = /// The parameters under which a particular account has a proxy relationship with some other /// account. #[derive( - Encode, Decode, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, RuntimeDebug, MaxEncodedLen, + Encode, + Decode, + Clone, + Copy, + Eq, + PartialEq, + Ord, + PartialOrd, + RuntimeDebug, + MaxEncodedLen, + TypeInfo, )] pub struct ProxyDefinition { /// The account which may act on behalf of another. @@ -73,7 +84,7 @@ pub struct ProxyDefinition { } /// Details surrounding a specific instance of an announcement to make a call. -#[derive(Encode, Decode, Clone, Copy, Eq, PartialEq, RuntimeDebug, MaxEncodedLen)] +#[derive(Encode, Decode, Clone, Copy, Eq, PartialEq, RuntimeDebug, MaxEncodedLen, TypeInfo)] pub struct Announcement { /// The account which made the announcement. real: AccountId, @@ -534,12 +545,6 @@ pub mod pallet { } #[pallet::event] - #[pallet::metadata( - T::AccountId = "AccountId", - T::ProxyType = "ProxyType", - CallHashOf = "Hash", - T::BlockNumber = "BlockNumber", - )] #[pallet::generate_deposit(pub(super) fn deposit_event)] pub enum Event { /// A proxy was executed correctly, with the given \[result\]. @@ -782,12 +787,13 @@ impl Pallet { match c.is_sub_type() { // Proxy call cannot add or remove a proxy with more permissions than it already // has. - Some(Call::add_proxy(_, ref pt, _)) | Some(Call::remove_proxy(_, ref pt, _)) - if !def.proxy_type.is_superset(&pt) => + Some(Call::add_proxy { ref proxy_type, .. }) | + Some(Call::remove_proxy { ref proxy_type, .. }) + if !def.proxy_type.is_superset(&proxy_type) => false, // Proxy call cannot remove all proxies or kill anonymous proxies unless it has full // permissions. - Some(Call::remove_proxies(..)) | Some(Call::kill_anonymous(..)) + Some(Call::remove_proxies { .. }) | Some(Call::kill_anonymous { .. }) if def.proxy_type != T::ProxyType::default() => false, _ => def.proxy_type.filter(c), diff --git a/frame/proxy/src/tests.rs b/frame/proxy/src/tests.rs index eb4193a18d935..d319ebb1a5ab0 100644 --- a/frame/proxy/src/tests.rs +++ b/frame/proxy/src/tests.rs @@ -107,7 +107,17 @@ parameter_types! { pub const AnnouncementDepositFactor: u64 = 1; } #[derive( - Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Encode, Decode, RuntimeDebug, MaxEncodedLen, + Copy, + Clone, + Eq, + PartialEq, + Ord, + PartialOrd, + Encode, + Decode, + RuntimeDebug, + MaxEncodedLen, + scale_info::TypeInfo, )] pub enum ProxyType { Any, @@ -124,9 +134,9 @@ impl InstanceFilter for ProxyType { match self { ProxyType::Any => true, ProxyType::JustTransfer => { - matches!(c, Call::Balances(pallet_balances::Call::transfer(..))) + matches!(c, Call::Balances(pallet_balances::Call::transfer { .. })) }, - ProxyType::JustUtility => matches!(c, Call::Utility(..)), + ProxyType::JustUtility => matches!(c, Call::Utility { .. }), } } fn is_superset(&self, o: &Self) -> bool { @@ -138,7 +148,7 @@ impl Contains for BaseFilter { fn contains(c: &Call) -> bool { match *c { // Remark is used as a no-op call in the benchmarking - Call::System(SystemCall::remark(_)) => true, + Call::System(SystemCall::remark { .. }) => true, Call::System(_) => false, _ => true, } @@ -190,6 +200,10 @@ fn expect_events(e: Vec) { assert_eq!(last_events(e.len()), e); } +fn call_transfer(dest: u64, value: u64) -> Call { + Call::Balances(BalancesCall::transfer { dest, value }) +} + #[test] fn announcement_works() { new_test_ext().execute_with(|| { @@ -272,7 +286,7 @@ fn announcer_must_be_proxy() { fn delayed_requires_pre_announcement() { new_test_ext().execute_with(|| { assert_ok!(Proxy::add_proxy(Origin::signed(1), 2, ProxyType::Any, 1)); - let call = Box::new(Call::Balances(BalancesCall::transfer(6, 1))); + let call = Box::new(call_transfer(6, 1)); let e = Error::::Unannounced; assert_noop!(Proxy::proxy(Origin::signed(2), 1, None, call.clone()), e); let e = Error::::Unannounced; @@ -289,7 +303,7 @@ fn proxy_announced_removes_announcement_and_returns_deposit() { new_test_ext().execute_with(|| { assert_ok!(Proxy::add_proxy(Origin::signed(1), 3, ProxyType::Any, 1)); assert_ok!(Proxy::add_proxy(Origin::signed(2), 3, ProxyType::Any, 1)); - let call = Box::new(Call::Balances(BalancesCall::transfer(6, 1))); + let call = Box::new(call_transfer(6, 1)); let call_hash = BlakeTwo256::hash_of(&call); assert_ok!(Proxy::announce(Origin::signed(3), 1, call_hash)); assert_ok!(Proxy::announce(Origin::signed(3), 2, call_hash)); @@ -313,7 +327,7 @@ fn filtering_works() { assert_ok!(Proxy::add_proxy(Origin::signed(1), 3, ProxyType::JustTransfer, 0)); assert_ok!(Proxy::add_proxy(Origin::signed(1), 4, ProxyType::JustUtility, 0)); - let call = Box::new(Call::Balances(BalancesCall::transfer(6, 1))); + let call = Box::new(call_transfer(6, 1)); assert_ok!(Proxy::proxy(Origin::signed(2), 1, None, call.clone())); System::assert_last_event(ProxyEvent::ProxyExecuted(Ok(())).into()); assert_ok!(Proxy::proxy(Origin::signed(3), 1, None, call.clone())); @@ -323,9 +337,10 @@ fn filtering_works() { let derivative_id = Utility::derivative_account_id(1, 0); assert!(Balances::mutate_account(&derivative_id, |a| a.free = 1000).is_ok()); - let inner = Box::new(Call::Balances(BalancesCall::transfer(6, 1))); + let inner = Box::new(call_transfer(6, 1)); - let call = Box::new(Call::Utility(UtilityCall::as_derivative(0, inner.clone()))); + let call = + Box::new(Call::Utility(UtilityCall::as_derivative { index: 0, call: inner.clone() })); assert_ok!(Proxy::proxy(Origin::signed(2), 1, None, call.clone())); System::assert_last_event(ProxyEvent::ProxyExecuted(Ok(())).into()); assert_ok!(Proxy::proxy(Origin::signed(3), 1, None, call.clone())); @@ -333,7 +348,7 @@ fn filtering_works() { assert_ok!(Proxy::proxy(Origin::signed(4), 1, None, call.clone())); System::assert_last_event(ProxyEvent::ProxyExecuted(Err(DispatchError::BadOrigin)).into()); - let call = Box::new(Call::Utility(UtilityCall::batch(vec![*inner]))); + let call = Box::new(Call::Utility(UtilityCall::batch { calls: vec![*inner] })); assert_ok!(Proxy::proxy(Origin::signed(2), 1, None, call.clone())); expect_events(vec![ UtilityEvent::BatchCompleted.into(), @@ -347,8 +362,9 @@ fn filtering_works() { ProxyEvent::ProxyExecuted(Ok(())).into(), ]); - let inner = Box::new(Call::Proxy(ProxyCall::add_proxy(5, ProxyType::Any, 0))); - let call = Box::new(Call::Utility(UtilityCall::batch(vec![*inner]))); + let inner = + Box::new(Call::Proxy(ProxyCall::new_call_variant_add_proxy(5, ProxyType::Any, 0))); + let call = Box::new(Call::Utility(UtilityCall::batch { calls: vec![*inner] })); assert_ok!(Proxy::proxy(Origin::signed(2), 1, None, call.clone())); expect_events(vec![ UtilityEvent::BatchCompleted.into(), @@ -362,7 +378,7 @@ fn filtering_works() { ProxyEvent::ProxyExecuted(Ok(())).into(), ]); - let call = Box::new(Call::Proxy(ProxyCall::remove_proxies())); + let call = Box::new(Call::Proxy(ProxyCall::remove_proxies {})); assert_ok!(Proxy::proxy(Origin::signed(3), 1, None, call.clone())); System::assert_last_event(ProxyEvent::ProxyExecuted(Err(DispatchError::BadOrigin)).into()); assert_ok!(Proxy::proxy(Origin::signed(4), 1, None, call.clone())); @@ -431,7 +447,7 @@ fn proxying_works() { assert_ok!(Proxy::add_proxy(Origin::signed(1), 2, ProxyType::JustTransfer, 0)); assert_ok!(Proxy::add_proxy(Origin::signed(1), 3, ProxyType::Any, 0)); - let call = Box::new(Call::Balances(BalancesCall::transfer(6, 1))); + let call = Box::new(call_transfer(6, 1)); assert_noop!( Proxy::proxy(Origin::signed(4), 1, None, call.clone()), Error::::NotProxy @@ -444,14 +460,14 @@ fn proxying_works() { System::assert_last_event(ProxyEvent::ProxyExecuted(Ok(())).into()); assert_eq!(Balances::free_balance(6), 1); - let call = Box::new(Call::System(SystemCall::set_code(vec![]))); + let call = Box::new(Call::System(SystemCall::set_code { code: vec![] })); assert_ok!(Proxy::proxy(Origin::signed(3), 1, None, call.clone())); System::assert_last_event(ProxyEvent::ProxyExecuted(Err(DispatchError::BadOrigin)).into()); - let call = Box::new(Call::Balances(BalancesCall::transfer_keep_alive(6, 1))); - assert_ok!( - Call::Proxy(super::Call::proxy(1, None, call.clone())).dispatch(Origin::signed(2)) - ); + let call = + Box::new(Call::Balances(BalancesCall::transfer_keep_alive { dest: 6, value: 1 })); + assert_ok!(Call::Proxy(super::Call::new_call_variant_proxy(1, None, call.clone())) + .dispatch(Origin::signed(2))); System::assert_last_event(ProxyEvent::ProxyExecuted(Err(DispatchError::BadOrigin)).into()); assert_ok!(Proxy::proxy(Origin::signed(3), 1, None, call.clone())); System::assert_last_event(ProxyEvent::ProxyExecuted(Ok(())).into()); @@ -483,13 +499,19 @@ fn anonymous_works() { System::set_block_number(2); assert_ok!(Proxy::anonymous(Origin::signed(1), ProxyType::Any, 0, 0)); - let call = Box::new(Call::Balances(BalancesCall::transfer(6, 1))); + let call = Box::new(call_transfer(6, 1)); assert_ok!(Balances::transfer(Origin::signed(3), anon, 5)); assert_ok!(Proxy::proxy(Origin::signed(1), anon, None, call)); System::assert_last_event(ProxyEvent::ProxyExecuted(Ok(())).into()); assert_eq!(Balances::free_balance(6), 1); - let call = Box::new(Call::Proxy(ProxyCall::kill_anonymous(1, ProxyType::Any, 0, 1, 0))); + let call = Box::new(Call::Proxy(ProxyCall::new_call_variant_kill_anonymous( + 1, + ProxyType::Any, + 0, + 1, + 0, + ))); assert_ok!(Proxy::proxy(Origin::signed(2), anon2, None, call.clone())); let de = DispatchError::from(Error::::NoPermission).stripped(); System::assert_last_event(ProxyEvent::ProxyExecuted(Err(de)).into()); diff --git a/frame/randomness-collective-flip/Cargo.toml b/frame/randomness-collective-flip/Cargo.toml index 016d56142ecad..5e8eb6b082879 100644 --- a/frame/randomness-collective-flip/Cargo.toml +++ b/frame/randomness-collective-flip/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] safe-mix = { version = "1.0", default-features = false } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } @@ -30,6 +31,7 @@ default = ["std"] std = [ "safe-mix/std", "codec/std", + "scale-info/std", "sp-runtime/std", "sp-std/std", "frame-system/std", diff --git a/frame/recovery/Cargo.toml b/frame/recovery/Cargo.toml index b8601d0852f63..40a89e9b59f89 100644 --- a/frame/recovery/Cargo.toml +++ b/frame/recovery/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } @@ -28,6 +29,7 @@ pallet-balances = { version = "4.0.0-dev", path = "../balances" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-std/std", "sp-io/std", "sp-runtime/std", diff --git a/frame/recovery/src/lib.rs b/frame/recovery/src/lib.rs index ad61baae60c96..797581788077b 100644 --- a/frame/recovery/src/lib.rs +++ b/frame/recovery/src/lib.rs @@ -155,6 +155,7 @@ #![cfg_attr(not(feature = "std"), no_std)] use codec::{Decode, Encode}; +use scale_info::TypeInfo; use sp_runtime::traits::{CheckedAdd, CheckedMul, Dispatchable, SaturatedConversion}; use sp_std::prelude::*; @@ -176,7 +177,7 @@ type BalanceOf = <::Currency as Currency<::AccountId>>::Balance; /// An active recovery process. -#[derive(Clone, Eq, PartialEq, Encode, Decode, Default, RuntimeDebug)] +#[derive(Clone, Eq, PartialEq, Encode, Decode, Default, RuntimeDebug, TypeInfo)] pub struct ActiveRecovery { /// The block number when the recovery process started. created: BlockNumber, @@ -188,7 +189,7 @@ pub struct ActiveRecovery { } /// Configuration for recovering an account. -#[derive(Clone, Eq, PartialEq, Encode, Decode, Default, RuntimeDebug)] +#[derive(Clone, Eq, PartialEq, Encode, Decode, Default, RuntimeDebug, TypeInfo)] pub struct RecoveryConfig { /// The minimum number of blocks since the start of the recovery process before the account /// can be recovered. @@ -260,7 +261,6 @@ pub mod pallet { /// Events type. #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::AccountId = "AccountId")] pub enum Event { /// A recovery process has been set up for an \[account\]. RecoveryCreated(T::AccountId), diff --git a/frame/recovery/src/tests.rs b/frame/recovery/src/tests.rs index 122088bf5ed63..fe971319bc97c 100644 --- a/frame/recovery/src/tests.rs +++ b/frame/recovery/src/tests.rs @@ -44,7 +44,7 @@ fn set_recovered_works() { // Root can set a recovered account though assert_ok!(Recovery::set_recovered(Origin::root(), 5, 1)); // Account 1 should now be able to make a call through account 5 - let call = Box::new(Call::Balances(BalancesCall::transfer(1, 100))); + let call = Box::new(Call::Balances(BalancesCall::transfer { dest: 1, value: 100 })); assert_ok!(Recovery::as_recovered(Origin::signed(1), 5, call)); // Account 1 has successfully drained the funds from account 5 assert_eq!(Balances::free_balance(1), 200); @@ -76,15 +76,15 @@ fn recovery_life_cycle_works() { assert_ok!(Recovery::claim_recovery(Origin::signed(1), 5)); // Account 1 can use account 5 to close the active recovery process, claiming the deposited // funds used to initiate the recovery process into account 5. - let call = Box::new(Call::Recovery(RecoveryCall::close_recovery(1))); + let call = Box::new(Call::Recovery(RecoveryCall::close_recovery { rescuer: 1 })); assert_ok!(Recovery::as_recovered(Origin::signed(1), 5, call)); // Account 1 can then use account 5 to remove the recovery configuration, claiming the // deposited funds used to create the recovery configuration into account 5. - let call = Box::new(Call::Recovery(RecoveryCall::remove_recovery())); + let call = Box::new(Call::Recovery(RecoveryCall::remove_recovery {})); assert_ok!(Recovery::as_recovered(Origin::signed(1), 5, call)); // Account 1 should now be able to make a call through account 5 to get all of their funds assert_eq!(Balances::free_balance(5), 110); - let call = Box::new(Call::Balances(BalancesCall::transfer(1, 110))); + let call = Box::new(Call::Balances(BalancesCall::transfer { dest: 1, value: 110 })); assert_ok!(Recovery::as_recovered(Origin::signed(1), 5, call)); // All funds have been fully recovered! assert_eq!(Balances::free_balance(1), 200); diff --git a/frame/scheduler/Cargo.toml b/frame/scheduler/Cargo.toml index 93f76b2369f2c..62b21fe04c9df 100644 --- a/frame/scheduler/Cargo.toml +++ b/frame/scheduler/Cargo.toml @@ -11,6 +11,7 @@ readme = "README.md" [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } @@ -28,6 +29,7 @@ substrate-test-utils = { version = "4.0.0-dev", path = "../../test-utils" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-runtime/std", "frame-benchmarking/std", "frame-support/std", diff --git a/frame/scheduler/src/benchmarking.rs b/frame/scheduler/src/benchmarking.rs index c122bed71b1fd..2c164eaede229 100644 --- a/frame/scheduler/src/benchmarking.rs +++ b/frame/scheduler/src/benchmarking.rs @@ -33,7 +33,7 @@ const BLOCK_NUMBER: u32 = 2; // Add `n` named items to the schedule fn fill_schedule(when: T::BlockNumber, n: u32) -> Result<(), &'static str> { // Essentially a no-op call. - let call = frame_system::Call::set_storage(vec![]); + let call = frame_system::Call::set_storage { items: vec![] }; for i in 0..n { // Named schedule is strictly heavier than anonymous Scheduler::::do_schedule_named( @@ -58,7 +58,7 @@ benchmarks! { let periodic = Some((T::BlockNumber::one(), 100)); let priority = 0; // Essentially a no-op call. - let call = Box::new(frame_system::Call::set_storage(vec![]).into()); + let call = Box::new(frame_system::Call::set_storage { items: vec![] }.into()); fill_schedule::(when, s)?; }: _(RawOrigin::Root, when, periodic, priority, call) @@ -95,7 +95,7 @@ benchmarks! { let periodic = Some((T::BlockNumber::one(), 100)); let priority = 0; // Essentially a no-op call. - let call = Box::new(frame_system::Call::set_storage(vec![]).into()); + let call = Box::new(frame_system::Call::set_storage { items: vec![] }.into()); fill_schedule::(when, s)?; }: _(RawOrigin::Root, id, when, periodic, priority, call) diff --git a/frame/scheduler/src/lib.rs b/frame/scheduler/src/lib.rs index ceb163a432e7b..ca9e15812a76d 100644 --- a/frame/scheduler/src/lib.rs +++ b/frame/scheduler/src/lib.rs @@ -64,6 +64,7 @@ use frame_support::{ }; use frame_system::{self as system, ensure_signed}; pub use pallet::*; +use scale_info::TypeInfo; use sp_runtime::{ traits::{BadOrigin, One, Saturating, Zero}, RuntimeDebug, @@ -87,7 +88,7 @@ struct ScheduledV1 { /// Information regarding an item to be executed in the future. #[cfg_attr(any(feature = "std", test), derive(PartialEq, Eq))] -#[derive(Clone, RuntimeDebug, Encode, Decode)] +#[derive(Clone, RuntimeDebug, Encode, Decode, TypeInfo)] pub struct ScheduledV2 { /// The unique identity for this task, if there is one. maybe_id: Option>, @@ -109,7 +110,7 @@ pub type Scheduled = // A value placed in storage that represents the current version of the Scheduler storage. // This value is used by the `on_runtime_upgrade` logic to determine whether we run // storage migration logic. -#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, RuntimeDebug, TypeInfo)] enum Releases { V1, V2, @@ -143,7 +144,7 @@ pub mod pallet { + IsType<::Origin>; /// The caller origin, overarching type of all pallets origins. - type PalletsOrigin: From> + Codec + Clone + Eq; + type PalletsOrigin: From> + Codec + Clone + Eq + TypeInfo; /// The aggregated call type. type Call: Parameter @@ -192,7 +193,6 @@ pub mod pallet { /// Events type. #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::BlockNumber = "BlockNumber", TaskAddress = "TaskAddress")] pub enum Event { /// Scheduled some task. \[when, index\] Scheduled(T::BlockNumber, u32), @@ -926,7 +926,7 @@ mod tests { pub struct BaseFilter; impl Contains for BaseFilter { fn contains(call: &Call) -> bool { - !matches!(call, Call::Logger(LoggerCall::log(_, _))) + !matches!(call, Call::Logger(LoggerCall::log { .. })) } } @@ -1004,7 +1004,7 @@ mod tests { #[test] fn basic_scheduling_works() { new_test_ext().execute_with(|| { - let call = Call::Logger(LoggerCall::log(42, 1000)); + let call = Call::Logger(LoggerCall::log { i: 42, weight: 1000 }); assert!(!::BaseCallFilter::contains(&call)); assert_ok!(Scheduler::do_schedule(DispatchTime::At(4), None, 127, root(), call)); run_to_block(3); @@ -1020,7 +1020,7 @@ mod tests { fn schedule_after_works() { new_test_ext().execute_with(|| { run_to_block(2); - let call = Call::Logger(LoggerCall::log(42, 1000)); + let call = Call::Logger(LoggerCall::log { i: 42, weight: 1000 }); assert!(!::BaseCallFilter::contains(&call)); // This will schedule the call 3 blocks after the next block... so block 3 + 3 = 6 assert_ok!(Scheduler::do_schedule(DispatchTime::After(3), None, 127, root(), call)); @@ -1037,7 +1037,7 @@ mod tests { fn schedule_after_zero_works() { new_test_ext().execute_with(|| { run_to_block(2); - let call = Call::Logger(LoggerCall::log(42, 1000)); + let call = Call::Logger(LoggerCall::log { i: 42, weight: 1000 }); assert!(!::BaseCallFilter::contains(&call)); assert_ok!(Scheduler::do_schedule(DispatchTime::After(0), None, 127, root(), call)); // Will trigger on the next block. @@ -1057,7 +1057,7 @@ mod tests { Some((3, 3)), 127, root(), - Call::Logger(logger::Call::log(42, 1000)) + Call::Logger(logger::Call::log { i: 42, weight: 1000 }) )); run_to_block(3); assert!(logger::log().is_empty()); @@ -1079,7 +1079,7 @@ mod tests { #[test] fn reschedule_works() { new_test_ext().execute_with(|| { - let call = Call::Logger(LoggerCall::log(42, 1000)); + let call = Call::Logger(LoggerCall::log { i: 42, weight: 1000 }); assert!(!::BaseCallFilter::contains(&call)); assert_eq!( Scheduler::do_schedule(DispatchTime::At(4), None, 127, root(), call).unwrap(), @@ -1110,7 +1110,7 @@ mod tests { #[test] fn reschedule_named_works() { new_test_ext().execute_with(|| { - let call = Call::Logger(LoggerCall::log(42, 1000)); + let call = Call::Logger(LoggerCall::log { i: 42, weight: 1000 }); assert!(!::BaseCallFilter::contains(&call)); assert_eq!( Scheduler::do_schedule_named( @@ -1152,7 +1152,7 @@ mod tests { #[test] fn reschedule_named_perodic_works() { new_test_ext().execute_with(|| { - let call = Call::Logger(LoggerCall::log(42, 1000)); + let call = Call::Logger(LoggerCall::log { i: 42, weight: 1000 }); assert!(!::BaseCallFilter::contains(&call)); assert_eq!( Scheduler::do_schedule_named( @@ -1214,7 +1214,7 @@ mod tests { None, 127, root(), - Call::Logger(LoggerCall::log(69, 1000)), + Call::Logger(LoggerCall::log { i: 69, weight: 1000 }), ) .unwrap(); let i = Scheduler::do_schedule( @@ -1222,7 +1222,7 @@ mod tests { None, 127, root(), - Call::Logger(LoggerCall::log(42, 1000)), + Call::Logger(LoggerCall::log { i: 42, weight: 1000 }), ) .unwrap(); run_to_block(3); @@ -1244,7 +1244,7 @@ mod tests { Some((3, 3)), 127, root(), - Call::Logger(LoggerCall::log(42, 1000)), + Call::Logger(LoggerCall::log { i: 42, weight: 1000 }), ) .unwrap(); // same id results in error. @@ -1254,7 +1254,7 @@ mod tests { None, 127, root(), - Call::Logger(LoggerCall::log(69, 1000)) + Call::Logger(LoggerCall::log { i: 69, weight: 1000 }) ) .is_err()); // different id is ok. @@ -1264,7 +1264,7 @@ mod tests { None, 127, root(), - Call::Logger(LoggerCall::log(69, 1000)), + Call::Logger(LoggerCall::log { i: 69, weight: 1000 }), ) .unwrap(); run_to_block(3); @@ -1286,14 +1286,14 @@ mod tests { None, 127, root(), - Call::Logger(LoggerCall::log(42, MaximumSchedulerWeight::get() / 2)) + Call::Logger(LoggerCall::log { i: 42, weight: MaximumSchedulerWeight::get() / 2 }) )); assert_ok!(Scheduler::do_schedule( DispatchTime::At(4), None, 127, root(), - Call::Logger(LoggerCall::log(69, MaximumSchedulerWeight::get() / 2)) + Call::Logger(LoggerCall::log { i: 69, weight: MaximumSchedulerWeight::get() / 2 }) )); // 69 and 42 do not fit together run_to_block(4); @@ -1311,14 +1311,14 @@ mod tests { None, 0, root(), - Call::Logger(LoggerCall::log(42, MaximumSchedulerWeight::get() / 2)) + Call::Logger(LoggerCall::log { i: 42, weight: MaximumSchedulerWeight::get() / 2 }) )); assert_ok!(Scheduler::do_schedule( DispatchTime::At(4), None, 0, root(), - Call::Logger(LoggerCall::log(69, MaximumSchedulerWeight::get() / 2)) + Call::Logger(LoggerCall::log { i: 69, weight: MaximumSchedulerWeight::get() / 2 }) )); // With base weights, 69 and 42 should not fit together, but do because of hard // deadlines @@ -1335,14 +1335,14 @@ mod tests { None, 1, root(), - Call::Logger(LoggerCall::log(42, MaximumSchedulerWeight::get() / 2)) + Call::Logger(LoggerCall::log { i: 42, weight: MaximumSchedulerWeight::get() / 2 }) )); assert_ok!(Scheduler::do_schedule( DispatchTime::At(4), None, 0, root(), - Call::Logger(LoggerCall::log(69, MaximumSchedulerWeight::get() / 2)) + Call::Logger(LoggerCall::log { i: 69, weight: MaximumSchedulerWeight::get() / 2 }) )); run_to_block(4); assert_eq!(logger::log(), vec![(root(), 69u32), (root(), 42u32)]); @@ -1357,21 +1357,24 @@ mod tests { None, 255, root(), - Call::Logger(LoggerCall::log(42, MaximumSchedulerWeight::get() / 3)) + Call::Logger(LoggerCall::log { i: 42, weight: MaximumSchedulerWeight::get() / 3 }) )); assert_ok!(Scheduler::do_schedule( DispatchTime::At(4), None, 127, root(), - Call::Logger(LoggerCall::log(69, MaximumSchedulerWeight::get() / 2)) + Call::Logger(LoggerCall::log { i: 69, weight: MaximumSchedulerWeight::get() / 2 }) )); assert_ok!(Scheduler::do_schedule( DispatchTime::At(4), None, 126, root(), - Call::Logger(LoggerCall::log(2600, MaximumSchedulerWeight::get() / 2)) + Call::Logger(LoggerCall::log { + i: 2600, + weight: MaximumSchedulerWeight::get() / 2 + }) )); // 2600 does not fit with 69 or 42, but has higher priority, so will go through @@ -1400,7 +1403,7 @@ mod tests { None, 255, root(), - Call::Logger(LoggerCall::log(3, MaximumSchedulerWeight::get() / 3)) + Call::Logger(LoggerCall::log { i: 3, weight: MaximumSchedulerWeight::get() / 3 }) )); // Anon Periodic assert_ok!(Scheduler::do_schedule( @@ -1408,7 +1411,7 @@ mod tests { Some((1000, 3)), 128, root(), - Call::Logger(LoggerCall::log(42, MaximumSchedulerWeight::get() / 3)) + Call::Logger(LoggerCall::log { i: 42, weight: MaximumSchedulerWeight::get() / 3 }) )); // Anon assert_ok!(Scheduler::do_schedule( @@ -1416,7 +1419,7 @@ mod tests { None, 127, root(), - Call::Logger(LoggerCall::log(69, MaximumSchedulerWeight::get() / 2)) + Call::Logger(LoggerCall::log { i: 69, weight: MaximumSchedulerWeight::get() / 2 }) )); // Named Periodic assert_ok!(Scheduler::do_schedule_named( @@ -1425,7 +1428,10 @@ mod tests { Some((1000, 3)), 126, root(), - Call::Logger(LoggerCall::log(2600, MaximumSchedulerWeight::get() / 2)) + Call::Logger(LoggerCall::log { + i: 2600, + weight: MaximumSchedulerWeight::get() / 2 + }) )); // Will include the named periodic only @@ -1469,8 +1475,8 @@ mod tests { #[test] fn root_calls_works() { new_test_ext().execute_with(|| { - let call = Box::new(Call::Logger(LoggerCall::log(69, 1000))); - let call2 = Box::new(Call::Logger(LoggerCall::log(42, 1000))); + let call = Box::new(Call::Logger(LoggerCall::log { i: 69, weight: 1000 })); + let call2 = Box::new(Call::Logger(LoggerCall::log { i: 42, weight: 1000 })); assert_ok!(Scheduler::schedule_named( Origin::root(), 1u32.encode(), @@ -1497,8 +1503,8 @@ mod tests { new_test_ext().execute_with(|| { run_to_block(3); - let call = Box::new(Call::Logger(LoggerCall::log(69, 1000))); - let call2 = Box::new(Call::Logger(LoggerCall::log(42, 1000))); + let call = Box::new(Call::Logger(LoggerCall::log { i: 69, weight: 1000 })); + let call2 = Box::new(Call::Logger(LoggerCall::log { i: 42, weight: 1000 })); assert_err!( Scheduler::schedule_named(Origin::root(), 1u32.encode(), 2, None, 127, call), @@ -1520,8 +1526,8 @@ mod tests { #[test] fn should_use_orign() { new_test_ext().execute_with(|| { - let call = Box::new(Call::Logger(LoggerCall::log(69, 1000))); - let call2 = Box::new(Call::Logger(LoggerCall::log(42, 1000))); + let call = Box::new(Call::Logger(LoggerCall::log { i: 69, weight: 1000 })); + let call2 = Box::new(Call::Logger(LoggerCall::log { i: 42, weight: 1000 })); assert_ok!(Scheduler::schedule_named( system::RawOrigin::Signed(1).into(), 1u32.encode(), @@ -1552,8 +1558,8 @@ mod tests { #[test] fn should_check_orign() { new_test_ext().execute_with(|| { - let call = Box::new(Call::Logger(LoggerCall::log(69, 1000))); - let call2 = Box::new(Call::Logger(LoggerCall::log(42, 1000))); + let call = Box::new(Call::Logger(LoggerCall::log { i: 69, weight: 1000 })); + let call2 = Box::new(Call::Logger(LoggerCall::log { i: 42, weight: 1000 })); assert_noop!( Scheduler::schedule_named( system::RawOrigin::Signed(2).into(), @@ -1575,8 +1581,10 @@ mod tests { #[test] fn should_check_orign_for_cancel() { new_test_ext().execute_with(|| { - let call = Box::new(Call::Logger(LoggerCall::log_without_filter(69, 1000))); - let call2 = Box::new(Call::Logger(LoggerCall::log_without_filter(42, 1000))); + let call = + Box::new(Call::Logger(LoggerCall::log_without_filter { i: 69, weight: 1000 })); + let call2 = + Box::new(Call::Logger(LoggerCall::log_without_filter { i: 42, weight: 1000 })); assert_ok!(Scheduler::schedule_named( system::RawOrigin::Signed(1).into(), 1u32.encode(), @@ -1626,14 +1634,14 @@ mod tests { Some(ScheduledV1 { maybe_id: None, priority: i as u8 + 10, - call: Call::Logger(LoggerCall::log(96, 100)), + call: Call::Logger(LoggerCall::log { i: 96, weight: 100 }), maybe_periodic: None, }), None, Some(ScheduledV1 { maybe_id: Some(b"test".to_vec()), priority: 123, - call: Call::Logger(LoggerCall::log(69, 1000)), + call: Call::Logger(LoggerCall::log { i: 69, weight: 1000 }), maybe_periodic: Some((456u64, 10)), }), ]; @@ -1653,7 +1661,7 @@ mod tests { Some(ScheduledV2 { maybe_id: None, priority: 10, - call: Call::Logger(LoggerCall::log(96, 100)), + call: Call::Logger(LoggerCall::log { i: 96, weight: 100 }), maybe_periodic: None, origin: root(), _phantom: PhantomData::::default(), @@ -1662,7 +1670,7 @@ mod tests { Some(ScheduledV2 { maybe_id: Some(b"test".to_vec()), priority: 123, - call: Call::Logger(LoggerCall::log(69, 1000)), + call: Call::Logger(LoggerCall::log { i: 69, weight: 1000 }), maybe_periodic: Some((456u64, 10)), origin: root(), _phantom: PhantomData::::default(), @@ -1675,7 +1683,7 @@ mod tests { Some(ScheduledV2 { maybe_id: None, priority: 11, - call: Call::Logger(LoggerCall::log(96, 100)), + call: Call::Logger(LoggerCall::log { i: 96, weight: 100 }), maybe_periodic: None, origin: root(), _phantom: PhantomData::::default(), @@ -1684,7 +1692,7 @@ mod tests { Some(ScheduledV2 { maybe_id: Some(b"test".to_vec()), priority: 123, - call: Call::Logger(LoggerCall::log(69, 1000)), + call: Call::Logger(LoggerCall::log { i: 69, weight: 1000 }), maybe_periodic: Some((456u64, 10)), origin: root(), _phantom: PhantomData::::default(), @@ -1697,7 +1705,7 @@ mod tests { Some(ScheduledV2 { maybe_id: None, priority: 12, - call: Call::Logger(LoggerCall::log(96, 100)), + call: Call::Logger(LoggerCall::log { i: 96, weight: 100 }), maybe_periodic: None, origin: root(), _phantom: PhantomData::::default(), @@ -1706,7 +1714,7 @@ mod tests { Some(ScheduledV2 { maybe_id: Some(b"test".to_vec()), priority: 123, - call: Call::Logger(LoggerCall::log(69, 1000)), + call: Call::Logger(LoggerCall::log { i: 69, weight: 1000 }), maybe_periodic: Some((456u64, 10)), origin: root(), _phantom: PhantomData::::default(), @@ -1729,7 +1737,7 @@ mod tests { Some(Scheduled { maybe_id: None, priority: i as u8 + 10, - call: Call::Logger(LoggerCall::log(96, 100)), + call: Call::Logger(LoggerCall::log { i: 96, weight: 100 }), origin: 3u32, maybe_periodic: None, _phantom: Default::default(), @@ -1739,7 +1747,7 @@ mod tests { maybe_id: Some(b"test".to_vec()), priority: 123, origin: 2u32, - call: Call::Logger(LoggerCall::log(69, 1000)), + call: Call::Logger(LoggerCall::log { i: 69, weight: 1000 }), maybe_periodic: Some((456u64, 10)), _phantom: Default::default(), }), @@ -1768,7 +1776,7 @@ mod tests { Some(ScheduledV2::<_, _, OriginCaller, u64> { maybe_id: None, priority: 10, - call: Call::Logger(LoggerCall::log(96, 100)), + call: Call::Logger(LoggerCall::log { i: 96, weight: 100 }), maybe_periodic: None, origin: system::RawOrigin::Root.into(), _phantom: PhantomData::::default(), @@ -1777,7 +1785,7 @@ mod tests { Some(ScheduledV2 { maybe_id: Some(b"test".to_vec()), priority: 123, - call: Call::Logger(LoggerCall::log(69, 1000)), + call: Call::Logger(LoggerCall::log { i: 69, weight: 1000 }), maybe_periodic: Some((456u64, 10)), origin: system::RawOrigin::None.into(), _phantom: PhantomData::::default(), @@ -1790,7 +1798,7 @@ mod tests { Some(ScheduledV2 { maybe_id: None, priority: 11, - call: Call::Logger(LoggerCall::log(96, 100)), + call: Call::Logger(LoggerCall::log { i: 96, weight: 100 }), maybe_periodic: None, origin: system::RawOrigin::Root.into(), _phantom: PhantomData::::default(), @@ -1799,7 +1807,7 @@ mod tests { Some(ScheduledV2 { maybe_id: Some(b"test".to_vec()), priority: 123, - call: Call::Logger(LoggerCall::log(69, 1000)), + call: Call::Logger(LoggerCall::log { i: 69, weight: 1000 }), maybe_periodic: Some((456u64, 10)), origin: system::RawOrigin::None.into(), _phantom: PhantomData::::default(), @@ -1812,7 +1820,7 @@ mod tests { Some(ScheduledV2 { maybe_id: None, priority: 12, - call: Call::Logger(LoggerCall::log(96, 100)), + call: Call::Logger(LoggerCall::log { i: 96, weight: 100 }), maybe_periodic: None, origin: system::RawOrigin::Root.into(), _phantom: PhantomData::::default(), @@ -1821,7 +1829,7 @@ mod tests { Some(ScheduledV2 { maybe_id: Some(b"test".to_vec()), priority: 123, - call: Call::Logger(LoggerCall::log(69, 1000)), + call: Call::Logger(LoggerCall::log { i: 69, weight: 1000 }), maybe_periodic: Some((456u64, 10)), origin: system::RawOrigin::None.into(), _phantom: PhantomData::::default(), diff --git a/frame/scored-pool/Cargo.toml b/frame/scored-pool/Cargo.toml index dc85e8d2ca811..9d5f156c175d5 100644 --- a/frame/scored-pool/Cargo.toml +++ b/frame/scored-pool/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } @@ -28,6 +29,7 @@ sp-core = { version = "4.0.0-dev", path = "../../primitives/core" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-io/std", "sp-runtime/std", "sp-std/std", diff --git a/frame/scored-pool/src/lib.rs b/frame/scored-pool/src/lib.rs index fc25004eda68d..a5cdb6274f995 100644 --- a/frame/scored-pool/src/lib.rs +++ b/frame/scored-pool/src/lib.rs @@ -136,7 +136,8 @@ pub mod pallet { + Default + FullCodec + MaybeSerializeDeserialize - + Debug; + + Debug + + scale_info::TypeInfo; /// The overarching event type. type Event: From> + IsType<::Event>; diff --git a/frame/session/Cargo.toml b/frame/session/Cargo.toml index b5841319000b7..8f07de2e7a6db 100644 --- a/frame/session/Cargo.toml +++ b/frame/session/Cargo.toml @@ -16,6 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } @@ -34,6 +35,7 @@ default = ["std", "historical"] historical = ["sp-trie"] std = [ "codec/std", + "scale-info/std", "sp-std/std", "sp-io/std", "frame-support/std", diff --git a/frame/session/benchmarking/Cargo.toml b/frame/session/benchmarking/Cargo.toml index c559b29f14eec..cc242085bf5e4 100644 --- a/frame/session/benchmarking/Cargo.toml +++ b/frame/session/benchmarking/Cargo.toml @@ -29,6 +29,7 @@ rand = { version = "0.7.2", default-features = false } codec = { package = "parity-scale-codec", version = "2.0.0", features = [ "derive", ] } +scale-info = "1.0" sp-core = { version = "4.0.0-dev", path = "../../../primitives/core" } pallet-staking-reward-curve = { version = "4.0.0-dev", path = "../../staking/reward-curve" } sp-io = { version = "4.0.0-dev", path = "../../../primitives/io" } diff --git a/frame/society/Cargo.toml b/frame/society/Cargo.toml index b058733b3ce40..942b2844195f2 100644 --- a/frame/society/Cargo.toml +++ b/frame/society/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } @@ -30,6 +31,7 @@ pallet-balances = { version = "4.0.0-dev", path = "../balances" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-runtime/std", "rand_chacha/std", "sp-std/std", diff --git a/frame/society/src/lib.rs b/frame/society/src/lib.rs index c19cd35ae7db6..c6d63eed20ac0 100644 --- a/frame/society/src/lib.rs +++ b/frame/society/src/lib.rs @@ -268,6 +268,7 @@ use rand_chacha::{ rand_core::{RngCore, SeedableRng}, ChaChaRng, }; +use scale_info::TypeInfo; use sp_runtime::{ traits::{ AccountIdConversion, CheckedSub, Hash, IntegerSquareRoot, Saturating, StaticLookup, @@ -334,7 +335,7 @@ pub trait Config: system::Config { } /// A vote by a member on a candidate application. -#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub enum Vote { /// The member has been chosen to be skeptic and has not yet taken any action. Skeptic, @@ -345,7 +346,7 @@ pub enum Vote { } /// A judgement by the suspension judgement origin on a suspended candidate. -#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub enum Judgement { /// The suspension judgement origin takes no direct judgment /// and places the candidate back into the bid pool. @@ -357,7 +358,7 @@ pub enum Judgement { } /// Details of a payout given as a per-block linear "trickle". -#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, Default)] +#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, Default, TypeInfo)] pub struct Payout { /// Total value of the payout. value: Balance, @@ -370,7 +371,7 @@ pub struct Payout { } /// Status of a vouching member. -#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub enum VouchingStatus { /// Member is currently vouching for a user. Vouching, @@ -382,7 +383,7 @@ pub enum VouchingStatus { pub type StrikeCount = u32; /// A bid for entry into society. -#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub struct Bid { /// The bidder/candidate trying to enter society who: AccountId, @@ -393,7 +394,7 @@ pub struct Bid { } /// A vote by a member on a candidate application. -#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub enum BidKind { /// The CandidateDeposit was paid for this bid. Deposit(Balance), diff --git a/frame/staking/Cargo.toml b/frame/staking/Cargo.toml index 5859cf27788f6..aba19ba56357a 100644 --- a/frame/staking/Cargo.toml +++ b/frame/staking/Cargo.toml @@ -17,6 +17,7 @@ serde = { version = "1.0.126", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } @@ -51,6 +52,7 @@ default = ["std"] std = [ "serde", "codec/std", + "scale-info/std", "sp-std/std", "sp-io/std", "frame-support/std", diff --git a/frame/staking/src/lib.rs b/frame/staking/src/lib.rs index e424b724b4c25..31b35acdd99aa 100644 --- a/frame/staking/src/lib.rs +++ b/frame/staking/src/lib.rs @@ -288,6 +288,7 @@ use frame_support::{ traits::{Currency, Get}, weights::Weight, }; +use scale_info::TypeInfo; use sp_runtime::{ curve::PiecewiseLinear, traits::{AtLeast32BitUnsigned, Convert, Saturating, Zero}, @@ -333,7 +334,7 @@ type NegativeImbalanceOf = <::Currency as Currency< >>::NegativeImbalance; /// Information regarding the active era (era in used in session). -#[derive(Encode, Decode, RuntimeDebug)] +#[derive(Encode, Decode, RuntimeDebug, TypeInfo)] pub struct ActiveEraInfo { /// Index of era. pub index: EraIndex, @@ -347,7 +348,7 @@ pub struct ActiveEraInfo { /// Reward points of an era. Used to split era total payout between validators. /// /// This points will be used to reward validators and their respective nominators. -#[derive(PartialEq, Encode, Decode, Default, RuntimeDebug)] +#[derive(PartialEq, Encode, Decode, Default, RuntimeDebug, TypeInfo)] pub struct EraRewardPoints { /// Total number of points. Equals the sum of reward points for each validator. total: RewardPoint, @@ -356,7 +357,7 @@ pub struct EraRewardPoints { } /// Indicates the initial status of the staker. -#[derive(RuntimeDebug)] +#[derive(RuntimeDebug, TypeInfo)] #[cfg_attr(feature = "std", derive(serde::Serialize, serde::Deserialize))] pub enum StakerStatus { /// Chilling. @@ -368,7 +369,7 @@ pub enum StakerStatus { } /// A destination account for payment. -#[derive(PartialEq, Eq, Copy, Clone, Encode, Decode, RuntimeDebug)] +#[derive(PartialEq, Eq, Copy, Clone, Encode, Decode, RuntimeDebug, TypeInfo)] pub enum RewardDestination { /// Pay into the stash account, increasing the amount at stake accordingly. Staked, @@ -389,7 +390,7 @@ impl Default for RewardDestination { } /// Preference of what happens regarding validation. -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, TypeInfo)] pub struct ValidatorPrefs { /// Reward that validator takes up-front; only the rest is split between themselves and /// nominators. @@ -408,7 +409,7 @@ impl Default for ValidatorPrefs { } /// Just a Balance/BlockNumber tuple to encode when a chunk of funds will be unlocked. -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, TypeInfo)] pub struct UnlockChunk { /// Amount of funds to be unlocked. #[codec(compact)] @@ -419,7 +420,7 @@ pub struct UnlockChunk { } /// The ledger of a (bonded) stash. -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, TypeInfo)] pub struct StakingLedger { /// The stash account whose balance is actually locked and at stake. pub stash: AccountId, @@ -547,7 +548,7 @@ where } /// A record of the nominations made by a specific account. -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, TypeInfo)] pub struct Nominations { /// The targets of nomination. pub targets: Vec, @@ -563,7 +564,7 @@ pub struct Nominations { } /// The amount of exposure (to slashing) than an individual nominator has. -#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Encode, Decode, RuntimeDebug)] +#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Encode, Decode, RuntimeDebug, TypeInfo)] pub struct IndividualExposure { /// The stash account of the nominator in question. pub who: AccountId, @@ -573,7 +574,9 @@ pub struct IndividualExposure { } /// A snapshot of the stake backing a single validator in the system. -#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Encode, Decode, Default, RuntimeDebug)] +#[derive( + PartialEq, Eq, PartialOrd, Ord, Clone, Encode, Decode, Default, RuntimeDebug, TypeInfo, +)] pub struct Exposure { /// The total balance backing this validator. #[codec(compact)] @@ -587,7 +590,7 @@ pub struct Exposure { /// A pending slash record. The value of the slash has been computed but not applied yet, /// rather deferred for several eras. -#[derive(Encode, Decode, Default, RuntimeDebug)] +#[derive(Encode, Decode, Default, RuntimeDebug, TypeInfo)] pub struct UnappliedSlash { /// The stash ID of the offending validator. validator: AccountId, @@ -691,7 +694,7 @@ impl = "Balance")] pub enum Event { /// The era payout has been set; the first balance is the validator-payout; the second is /// the remainder from the maximum amount of reward. diff --git a/frame/staking/src/slashing.rs b/frame/staking/src/slashing.rs index 3da79924d0a0e..15ca85b4d046f 100644 --- a/frame/staking/src/slashing.rs +++ b/frame/staking/src/slashing.rs @@ -58,6 +58,7 @@ use frame_support::{ ensure, traits::{Currency, Imbalance, OnUnbalanced}, }; +use scale_info::TypeInfo; use sp_runtime::{ traits::{Saturating, Zero}, DispatchResult, RuntimeDebug, @@ -72,7 +73,7 @@ const REWARD_F1: Perbill = Perbill::from_percent(50); pub type SpanIndex = u32; // A range of start..end eras for a slashing span. -#[derive(Encode, Decode)] +#[derive(Encode, Decode, TypeInfo)] #[cfg_attr(test, derive(Debug, PartialEq))] pub(crate) struct SlashingSpan { pub(crate) index: SpanIndex, @@ -87,7 +88,7 @@ impl SlashingSpan { } /// An encoding of all of a nominator's slashing spans. -#[derive(Encode, Decode, RuntimeDebug)] +#[derive(Encode, Decode, RuntimeDebug, TypeInfo)] pub struct SlashingSpans { // the index of the current slashing span of the nominator. different for // every stash, resets when the account hits free balance 0. @@ -180,7 +181,7 @@ impl SlashingSpans { } /// A slashing-span record for a particular stash. -#[derive(Encode, Decode, Default)] +#[derive(Encode, Decode, Default, TypeInfo)] pub(crate) struct SpanRecord { slashed: Balance, paid_out: Balance, diff --git a/frame/staking/src/tests.rs b/frame/staking/src/tests.rs index 931ffaa10386c..97dfaa39c84a9 100644 --- a/frame/staking/src/tests.rs +++ b/frame/staking/src/tests.rs @@ -3355,7 +3355,8 @@ fn payout_stakers_handles_weight_refund() { start_active_era(2); // Collect payouts when there are no nominators - let call = TestRuntimeCall::Staking(StakingCall::payout_stakers(11, 1)); + let call = + TestRuntimeCall::Staking(StakingCall::payout_stakers { validator_stash: 11, era: 1 }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(20)); assert_ok!(result); @@ -3368,7 +3369,8 @@ fn payout_stakers_handles_weight_refund() { start_active_era(3); // Collect payouts for an era where the validator did not receive any points. - let call = TestRuntimeCall::Staking(StakingCall::payout_stakers(11, 2)); + let call = + TestRuntimeCall::Staking(StakingCall::payout_stakers { validator_stash: 11, era: 2 }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(20)); assert_ok!(result); @@ -3381,7 +3383,8 @@ fn payout_stakers_handles_weight_refund() { start_active_era(4); // Collect payouts when the validator has `half_max_nom_rewarded` nominators. - let call = TestRuntimeCall::Staking(StakingCall::payout_stakers(11, 3)); + let call = + TestRuntimeCall::Staking(StakingCall::payout_stakers { validator_stash: 11, era: 3 }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(20)); assert_ok!(result); @@ -3404,14 +3407,16 @@ fn payout_stakers_handles_weight_refund() { start_active_era(6); // Collect payouts when the validator had `half_max_nom_rewarded` nominators. - let call = TestRuntimeCall::Staking(StakingCall::payout_stakers(11, 5)); + let call = + TestRuntimeCall::Staking(StakingCall::payout_stakers { validator_stash: 11, era: 5 }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(20)); assert_ok!(result); assert_eq!(extract_actual_weight(&result, &info), max_nom_rewarded_weight); // Try and collect payouts for an era that has already been collected. - let call = TestRuntimeCall::Staking(StakingCall::payout_stakers(11, 5)); + let call = + TestRuntimeCall::Staking(StakingCall::payout_stakers { validator_stash: 11, era: 5 }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(20)); assert!(result.is_err()); diff --git a/frame/sudo/Cargo.toml b/frame/sudo/Cargo.toml index f19afd2d61a0b..baacb66d5c751 100644 --- a/frame/sudo/Cargo.toml +++ b/frame/sudo/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } @@ -27,6 +28,7 @@ sp-core = { version = "4.0.0-dev", path = "../../primitives/core" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-std/std", "sp-io/std", "sp-runtime/std", diff --git a/frame/sudo/src/lib.rs b/frame/sudo/src/lib.rs index 6dcb3bf5e44ce..bab93ffcee162 100644 --- a/frame/sudo/src/lib.rs +++ b/frame/sudo/src/lib.rs @@ -249,7 +249,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::AccountId = "AccountId")] pub enum Event { /// A sudo just took place. \[result\] Sudid(DispatchResult), diff --git a/frame/sudo/src/mock.rs b/frame/sudo/src/mock.rs index 7fd55a618a6b5..dad17384d5603 100644 --- a/frame/sudo/src/mock.rs +++ b/frame/sudo/src/mock.rs @@ -79,7 +79,6 @@ pub mod logger { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::AccountId = "AccountId")] pub enum Event { AppendI32(i32, Weight), AppendI32AndAccount(T::AccountId, i32, Weight), diff --git a/frame/sudo/src/tests.rs b/frame/sudo/src/tests.rs index ebd7a11a70f1e..2eb558e9471c4 100644 --- a/frame/sudo/src/tests.rs +++ b/frame/sudo/src/tests.rs @@ -39,12 +39,12 @@ fn sudo_basics() { // Configure a default test environment and set the root `key` to 1. new_test_ext(1).execute_with(|| { // A privileged function should work when `sudo` is passed the root `key` as `origin`. - let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log(42, 1_000))); + let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log { i: 42, weight: 1_000 })); assert_ok!(Sudo::sudo(Origin::signed(1), call)); assert_eq!(Logger::i32_log(), vec![42i32]); // A privileged function should not work when `sudo` is passed a non-root `key` as `origin`. - let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log(42, 1_000))); + let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log { i: 42, weight: 1_000 })); assert_noop!(Sudo::sudo(Origin::signed(2), call), Error::::RequireSudo); }); } @@ -56,7 +56,7 @@ fn sudo_emits_events_correctly() { System::set_block_number(1); // Should emit event to indicate success when called with the root `key` and `call` is `Ok`. - let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log(42, 1))); + let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log { i: 42, weight: 1 })); assert_ok!(Sudo::sudo(Origin::signed(1), call)); System::assert_has_event(TestEvent::Sudo(Event::Sudid(Ok(())))); }) @@ -66,12 +66,12 @@ fn sudo_emits_events_correctly() { fn sudo_unchecked_weight_basics() { new_test_ext(1).execute_with(|| { // A privileged function should work when `sudo` is passed the root `key` as origin. - let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log(42, 1_000))); + let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log { i: 42, weight: 1_000 })); assert_ok!(Sudo::sudo_unchecked_weight(Origin::signed(1), call, 1_000)); assert_eq!(Logger::i32_log(), vec![42i32]); // A privileged function should not work when called with a non-root `key`. - let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log(42, 1_000))); + let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log { i: 42, weight: 1_000 })); assert_noop!( Sudo::sudo_unchecked_weight(Origin::signed(2), call, 1_000), Error::::RequireSudo, @@ -80,8 +80,8 @@ fn sudo_unchecked_weight_basics() { assert_eq!(Logger::i32_log(), vec![42i32]); // Controls the dispatched weight. - let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log(42, 1))); - let sudo_unchecked_weight_call = SudoCall::sudo_unchecked_weight(call, 1_000); + let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log { i: 42, weight: 1 })); + let sudo_unchecked_weight_call = SudoCall::sudo_unchecked_weight { call, weight: 1_000 }; let info = sudo_unchecked_weight_call.get_dispatch_info(); assert_eq!(info.weight, 1_000); }); @@ -94,7 +94,7 @@ fn sudo_unchecked_weight_emits_events_correctly() { System::set_block_number(1); // Should emit event to indicate success when called with the root `key` and `call` is `Ok`. - let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log(42, 1))); + let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log { i: 42, weight: 1 })); assert_ok!(Sudo::sudo_unchecked_weight(Origin::signed(1), call, 1_000)); System::assert_has_event(TestEvent::Sudo(Event::Sudid(Ok(())))); }) @@ -134,17 +134,17 @@ fn set_key_emits_events_correctly() { fn sudo_as_basics() { new_test_ext(1).execute_with(|| { // A privileged function will not work when passed to `sudo_as`. - let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log(42, 1_000))); + let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log { i: 42, weight: 1_000 })); assert_ok!(Sudo::sudo_as(Origin::signed(1), 2, call)); assert!(Logger::i32_log().is_empty()); assert!(Logger::account_log().is_empty()); // A non-privileged function should not work when called with a non-root `key`. - let call = Box::new(Call::Logger(LoggerCall::non_privileged_log(42, 1))); + let call = Box::new(Call::Logger(LoggerCall::non_privileged_log { i: 42, weight: 1 })); assert_noop!(Sudo::sudo_as(Origin::signed(3), 2, call), Error::::RequireSudo); // A non-privileged function will work when passed to `sudo_as` with the root `key`. - let call = Box::new(Call::Logger(LoggerCall::non_privileged_log(42, 1))); + let call = Box::new(Call::Logger(LoggerCall::non_privileged_log { i: 42, weight: 1 })); assert_ok!(Sudo::sudo_as(Origin::signed(1), 2, call)); assert_eq!(Logger::i32_log(), vec![42i32]); // The correct user makes the call within `sudo_as`. @@ -159,7 +159,7 @@ fn sudo_as_emits_events_correctly() { System::set_block_number(1); // A non-privileged function will work when passed to `sudo_as` with the root `key`. - let call = Box::new(Call::Logger(LoggerCall::non_privileged_log(42, 1))); + let call = Box::new(Call::Logger(LoggerCall::non_privileged_log { i: 42, weight: 1 })); assert_ok!(Sudo::sudo_as(Origin::signed(1), 2, call)); System::assert_has_event(TestEvent::Sudo(Event::SudoAsDone(Ok(())))); }); diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 9b515c3aa84ab..b09ed65a114dc 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -15,7 +15,8 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -frame-metadata = { version = "14.0.0-dev", default-features = false, path = "../metadata" } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } +frame-metadata = { version = "14.0.0", default-features = false, features = ["v14"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } @@ -34,6 +35,7 @@ smallvec = "1.4.1" log = { version = "0.4.14", default-features = false } [dev-dependencies] +assert_matches = "1.3.0" pretty_assertions = "0.6.1" frame-system = { version = "4.0.0-dev", path = "../system" } parity-util-mem = { version = "0.10.0", default-features = false, features = ["primitive-types"] } @@ -45,6 +47,7 @@ std = [ "serde", "sp-io/std", "codec/std", + "scale-info/std", "sp-std/std", "sp-runtime/std", "sp-tracing/std", diff --git a/frame/support/procedural/src/construct_runtime/expand/call.rs b/frame/support/procedural/src/construct_runtime/expand/call.rs index f847bc6dbfbdd..2532a680e21be 100644 --- a/frame/support/procedural/src/construct_runtime/expand/call.rs +++ b/frame/support/procedural/src/construct_runtime/expand/call.rs @@ -54,6 +54,7 @@ pub fn expand_outer_dispatch( Clone, PartialEq, Eq, #scrate::codec::Encode, #scrate::codec::Decode, + #scrate::scale_info::TypeInfo, #scrate::RuntimeDebug, )] pub enum Call { diff --git a/frame/support/procedural/src/construct_runtime/expand/event.rs b/frame/support/procedural/src/construct_runtime/expand/event.rs index a04759ec972b6..798646bf27334 100644 --- a/frame/support/procedural/src/construct_runtime/expand/event.rs +++ b/frame/support/procedural/src/construct_runtime/expand/event.rs @@ -75,6 +75,7 @@ pub fn expand_outer_event( Clone, PartialEq, Eq, #scrate::codec::Encode, #scrate::codec::Decode, + #scrate::scale_info::TypeInfo, #scrate::RuntimeDebug, )] #[allow(non_camel_case_types)] diff --git a/frame/support/procedural/src/construct_runtime/expand/metadata.rs b/frame/support/procedural/src/construct_runtime/expand/metadata.rs index fa12242f4fcd6..c8445e0bbc255 100644 --- a/frame/support/procedural/src/construct_runtime/expand/metadata.rs +++ b/frame/support/procedural/src/construct_runtime/expand/metadata.rs @@ -26,7 +26,7 @@ pub fn expand_runtime_metadata( scrate: &TokenStream, extrinsic: &TypePath, ) -> TokenStream { - let modules = pallet_declarations + let pallets = pallet_declarations .iter() .filter_map(|pallet_declaration| { pallet_declaration.find_part("Pallet").map(|_| { @@ -42,21 +42,21 @@ pub fn expand_runtime_metadata( .map(|(decl, filtered_names)| { let name = &decl.name; let index = &decl.index; - let storage = expand_pallet_metadata_storage(&filtered_names, runtime, scrate, decl); - let calls = expand_pallet_metadata_calls(&filtered_names, runtime, scrate, decl); + let storage = expand_pallet_metadata_storage(&filtered_names, runtime, decl); + let calls = expand_pallet_metadata_calls(&filtered_names, runtime, decl); let event = expand_pallet_metadata_events(&filtered_names, runtime, scrate, decl); - let constants = expand_pallet_metadata_constants(runtime, scrate, decl); - let errors = expand_pallet_metadata_errors(runtime, scrate, decl); + let constants = expand_pallet_metadata_constants(runtime, decl); + let errors = expand_pallet_metadata_errors(runtime, decl); quote! { - #scrate::metadata::ModuleMetadata { - name: #scrate::metadata::DecodeDifferent::Encode(stringify!(#name)), + #scrate::metadata::PalletMetadata { + name: stringify!(#name), index: #index, storage: #storage, calls: #calls, event: #event, constants: #constants, - errors: #errors, + error: #errors, } } }) @@ -65,20 +65,26 @@ pub fn expand_runtime_metadata( quote! { impl #runtime { pub fn metadata() -> #scrate::metadata::RuntimeMetadataPrefixed { - #scrate::metadata::RuntimeMetadataLastVersion { - modules: #scrate::metadata::DecodeDifferent::Encode(&[ #(#modules),* ]), - extrinsic: #scrate::metadata::ExtrinsicMetadata { + #scrate::metadata::RuntimeMetadataLastVersion::new( + #scrate::sp_std::vec![ #(#pallets),* ], + #scrate::metadata::ExtrinsicMetadata { + ty: #scrate::scale_info::meta_type::<#extrinsic>(), version: <#extrinsic as #scrate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, signed_extensions: < < #extrinsic as #scrate::sp_runtime::traits::ExtrinsicMetadata >::SignedExtensions as #scrate::sp_runtime::traits::SignedExtension - >::identifier() + >::metadata() .into_iter() - .map(#scrate::metadata::DecodeDifferent::Encode) + .map(|meta| #scrate::metadata::SignedExtensionMetadata { + identifier: meta.identifier, + ty: meta.ty, + additional_signed: meta.additional_signed, + }) .collect(), }, - }.into() + #scrate::scale_info::meta_type::<#runtime>() + ).into() } } } @@ -87,7 +93,6 @@ pub fn expand_runtime_metadata( fn expand_pallet_metadata_storage( filtered_names: &[&'static str], runtime: &Ident, - scrate: &TokenStream, decl: &Pallet, ) -> TokenStream { if filtered_names.contains(&"Storage") { @@ -95,11 +100,7 @@ fn expand_pallet_metadata_storage( let path = &decl.path; quote! { - Some(#scrate::metadata::DecodeDifferent::Encode( - #scrate::metadata::FnEncode( - #path::Pallet::<#runtime #(, #path::#instance)*>::storage_metadata - ) - )) + Some(#path::Pallet::<#runtime #(, #path::#instance)*>::storage_metadata()) } } else { quote!(None) @@ -109,7 +110,6 @@ fn expand_pallet_metadata_storage( fn expand_pallet_metadata_calls( filtered_names: &[&'static str], runtime: &Ident, - scrate: &TokenStream, decl: &Pallet, ) -> TokenStream { if filtered_names.contains(&"Call") { @@ -117,11 +117,7 @@ fn expand_pallet_metadata_calls( let path = &decl.path; quote! { - Some(#scrate::metadata::DecodeDifferent::Encode( - #scrate::metadata::FnEncode( - #path::Pallet::<#runtime #(, #path::#instance)*>::call_functions - ) - )) + Some(#path::Pallet::<#runtime #(, #path::#instance)*>::call_functions()) } } else { quote!(None) @@ -150,45 +146,31 @@ fn expand_pallet_metadata_events( }; quote! { - Some(#scrate::metadata::DecodeDifferent::Encode( - #scrate::metadata::FnEncode(#pallet_event::metadata) - )) + Some( + #scrate::metadata::PalletEventMetadata { + ty: #scrate::scale_info::meta_type::<#pallet_event>() + } + ) } } else { quote!(None) } } -fn expand_pallet_metadata_constants( - runtime: &Ident, - scrate: &TokenStream, - decl: &Pallet, -) -> TokenStream { +fn expand_pallet_metadata_constants(runtime: &Ident, decl: &Pallet) -> TokenStream { let path = &decl.path; let instance = decl.instance.as_ref().into_iter(); quote! { - #scrate::metadata::DecodeDifferent::Encode( - #scrate::metadata::FnEncode( - #path::Pallet::<#runtime #(, #path::#instance)*>::module_constants_metadata - ) - ) + #path::Pallet::<#runtime #(, #path::#instance)*>::pallet_constants_metadata() } } -fn expand_pallet_metadata_errors( - runtime: &Ident, - scrate: &TokenStream, - decl: &Pallet, -) -> TokenStream { +fn expand_pallet_metadata_errors(runtime: &Ident, decl: &Pallet) -> TokenStream { let path = &decl.path; let instance = decl.instance.as_ref().into_iter(); quote! { - #scrate::metadata::DecodeDifferent::Encode( - #scrate::metadata::FnEncode( - <#path::Pallet::<#runtime #(, #path::#instance)*> as #scrate::metadata::ModuleErrorMetadata>::metadata - ) - ) + #path::Pallet::<#runtime #(, #path::#instance)*>::error_metadata() } } diff --git a/frame/support/procedural/src/construct_runtime/expand/origin.rs b/frame/support/procedural/src/construct_runtime/expand/origin.rs index 10ab9e9347eb5..a65ad78527ff7 100644 --- a/frame/support/procedural/src/construct_runtime/expand/origin.rs +++ b/frame/support/procedural/src/construct_runtime/expand/origin.rs @@ -171,7 +171,10 @@ pub fn expand_outer_origin( } } - #[derive(Clone, PartialEq, Eq, #scrate::RuntimeDebug, #scrate::codec::Encode, #scrate::codec::Decode)] + #[derive( + Clone, PartialEq, Eq, #scrate::RuntimeDebug, #scrate::codec::Encode, + #scrate::codec::Decode, #scrate::scale_info::TypeInfo, + )] #[allow(non_camel_case_types)] pub enum OriginCaller { #[codec(index = #system_index)] diff --git a/frame/support/procedural/src/construct_runtime/mod.rs b/frame/support/procedural/src/construct_runtime/mod.rs index 402cb5458851d..8aacd8f0aa810 100644 --- a/frame/support/procedural/src/construct_runtime/mod.rs +++ b/frame/support/procedural/src/construct_runtime/mod.rs @@ -151,7 +151,10 @@ fn construct_runtime_parsed(definition: RuntimeDefinition) -> Result Result),* > HasKeyPrefix<( #( #kargs, )* )> for ( #( Key<#hashers, #current_tuple>, )* ) { @@ -60,7 +60,7 @@ pub fn impl_key_prefix_for_tuples(input: proc_macro::TokenStream) -> Result),* > HasReversibleKeyPrefix<( #( #kargs, )* )> for ( #( Key<#hashers, #current_tuple>, )* ) { diff --git a/frame/support/procedural/src/pallet/expand/call.rs b/frame/support/procedural/src/pallet/expand/call.rs index 23ea9be9eac7f..8f7bcdccaf22d 100644 --- a/frame/support/procedural/src/pallet/expand/call.rs +++ b/frame/support/procedural/src/pallet/expand/call.rs @@ -16,7 +16,6 @@ // limitations under the License. use crate::{pallet::Def, COUNTER}; -use frame_support_procedural_tools::clean_type_string; use syn::spanned::Spanned; /// @@ -43,6 +42,15 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { let pallet_ident = &def.pallet_struct.pallet; let fn_name = methods.iter().map(|method| &method.name).collect::>(); + let new_call_variant_fn_name = fn_name + .iter() + .map(|fn_name| quote::format_ident!("new_call_variant_{}", fn_name)) + .collect::>(); + + let new_call_variant_doc = fn_name + .iter() + .map(|fn_name| format!("Create a call with the variant `{}`.", fn_name)) + .collect::>(); let fn_weight = methods.iter().map(|method| &method.weight); @@ -53,6 +61,42 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { .map(|method| method.args.iter().map(|(_, name, _)| name.clone()).collect::>()) .collect::>(); + let args_name_stripped = methods + .iter() + .map(|method| { + method + .args + .iter() + .map(|(_, name, _)| { + syn::Ident::new(&name.to_string().trim_start_matches('_'), name.span()) + }) + .collect::>() + }) + .collect::>(); + + let make_args_name_pattern = |ref_tok| { + args_name + .iter() + .zip(args_name_stripped.iter()) + .map(|(args_name, args_name_stripped)| { + args_name + .iter() + .zip(args_name_stripped) + .map(|(args_name, args_name_stripped)| { + if args_name == args_name_stripped { + quote::quote!( #ref_tok #args_name ) + } else { + quote::quote!( #args_name_stripped: #ref_tok #args_name ) + } + }) + .collect::>() + }) + .collect::>() + }; + + let args_name_pattern = make_args_name_pattern(None); + let args_name_pattern_ref = make_args_name_pattern(Some(quote::quote!(ref))); + let args_type = methods .iter() .map(|method| method.args.iter().map(|(_, _, type_)| type_.clone()).collect::>()) @@ -72,21 +116,6 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { .collect::>() }); - let args_metadata_type = methods.iter().map(|method| { - method - .args - .iter() - .map(|(is_compact, _, type_)| { - let final_type = if *is_compact { - quote::quote_spanned!(type_.span() => Compact<#type_>) - } else { - quote::quote!(#type_) - }; - clean_type_string(&final_type.to_string()) - }) - .collect::>() - }); - let default_docs = [syn::parse_quote!( r"Contains one variant per dispatchable that can be called by an extrinsic." )]; @@ -131,9 +160,11 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { #frame_support::PartialEqNoBound, #frame_support::codec::Encode, #frame_support::codec::Decode, + #frame_support::scale_info::TypeInfo, )] #[codec(encode_bound())] #[codec(decode_bound())] + #[scale_info(skip_type_params(#type_use_gen), capture_docs = "always")] #[allow(non_camel_case_types)] pub enum #call_ident<#type_decl_bounded_gen> #where_clause { #[doc(hidden)] @@ -142,7 +173,25 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { #frame_support::sp_std::marker::PhantomData<(#type_use_gen,)>, #frame_support::Never, ), - #( #( #[doc = #fn_doc] )* #fn_name( #( #args_compact_attr #args_type ),* ), )* + #( + #( #[doc = #fn_doc] )* + #fn_name { + #( #args_compact_attr #args_name_stripped: #args_type ),* + }, + )* + } + + impl<#type_impl_gen> #call_ident<#type_use_gen> #where_clause { + #( + #[doc = #new_call_variant_doc] + pub fn #new_call_variant_fn_name( + #( #args_name_stripped: #args_type ),* + ) -> Self { + Self::#fn_name { + #( #args_name_stripped ),* + } + } + )* } impl<#type_impl_gen> #frame_support::dispatch::GetDispatchInfo @@ -152,7 +201,7 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { fn get_dispatch_info(&self) -> #frame_support::dispatch::DispatchInfo { match *self { #( - Self::#fn_name ( #( ref #args_name, )* ) => { + Self::#fn_name { #( #args_name_pattern_ref, )* } => { let __pallet_base_weight = #fn_weight; let __pallet_weight = < @@ -186,7 +235,7 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { { fn get_call_name(&self) -> &'static str { match *self { - #( Self::#fn_name(..) => stringify!(#fn_name), )* + #( Self::#fn_name { .. } => stringify!(#fn_name), )* Self::__Ignore(_, _) => unreachable!("__PhantomItem cannot be used."), } } @@ -207,7 +256,7 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { ) -> #frame_support::dispatch::DispatchResultWithPostInfo { match self { #( - Self::#fn_name( #( #args_name, )* ) => { + Self::#fn_name { #( #args_name_pattern, )* } => { #frame_support::sp_tracing::enter_span!( #frame_support::sp_tracing::trace_span!(stringify!(#fn_name)) ); @@ -231,30 +280,8 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { impl<#type_impl_gen> #pallet_ident<#type_use_gen> #where_clause { #[doc(hidden)] - #[allow(dead_code)] - pub fn call_functions() -> &'static [#frame_support::dispatch::FunctionMetadata] { - &[ #( - #frame_support::dispatch::FunctionMetadata { - name: #frame_support::dispatch::DecodeDifferent::Encode( - stringify!(#fn_name) - ), - arguments: #frame_support::dispatch::DecodeDifferent::Encode( - &[ #( - #frame_support::dispatch::FunctionArgumentMetadata { - name: #frame_support::dispatch::DecodeDifferent::Encode( - stringify!(#args_name) - ), - ty: #frame_support::dispatch::DecodeDifferent::Encode( - #args_metadata_type - ), - }, - )* ] - ), - documentation: #frame_support::dispatch::DecodeDifferent::Encode( - &[ #( #fn_doc ),* ] - ), - }, - )* ] + pub fn call_functions() -> #frame_support::metadata::PalletCallMetadata { + #frame_support::scale_info::meta_type::<#call_ident<#type_use_gen>>().into() } } ) diff --git a/frame/support/procedural/src/pallet/expand/config.rs b/frame/support/procedural/src/pallet/expand/config.rs index 17101b0be8f53..dad26ccad6dc1 100644 --- a/frame/support/procedural/src/pallet/expand/config.rs +++ b/frame/support/procedural/src/pallet/expand/config.rs @@ -15,7 +15,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -use crate::pallet::{parse::helper::get_doc_literals, Def}; +use crate::pallet::Def; +use frame_support_procedural_tools::get_doc_literals; /// /// * Generate default rust doc diff --git a/frame/support/procedural/src/pallet/expand/constants.rs b/frame/support/procedural/src/pallet/expand/constants.rs index fcf77ae8e4b7f..7cc245e8089df 100644 --- a/frame/support/procedural/src/pallet/expand/constants.rs +++ b/frame/support/procedural/src/pallet/expand/constants.rs @@ -16,8 +16,6 @@ // limitations under the License. use crate::pallet::Def; -use frame_support_procedural_tools::clean_type_string; -use quote::ToTokens; struct ConstDef { /// Name of the associated type. @@ -35,7 +33,6 @@ struct ConstDef { pub fn expand_constants(def: &mut Def) -> proc_macro2::TokenStream { let frame_support = &def.frame_support; let type_impl_gen = &def.type_impl_generics(proc_macro2::Span::call_site()); - let type_decl_gen = &def.type_decl_generics(proc_macro2::Span::call_site()); let type_use_gen = &def.type_use_generics(proc_macro2::Span::call_site()); let pallet_ident = &def.pallet_struct.pallet; @@ -74,50 +71,17 @@ pub fn expand_constants(def: &mut Def) -> proc_macro2::TokenStream { let consts = config_consts.chain(extra_consts).map(|const_| { let const_type = &const_.type_; - let const_type_str = clean_type_string(&const_type.to_token_stream().to_string()); let ident = &const_.ident; let ident_str = format!("{}", ident); let doc = const_.doc.clone().into_iter(); let default_byte_impl = &const_.default_byte_impl; - let default_byte_getter = - syn::Ident::new(&format!("{}DefaultByteGetter", ident), ident.span()); quote::quote!({ - #[allow(non_upper_case_types)] - #[allow(non_camel_case_types)] - struct #default_byte_getter<#type_decl_gen>( - #frame_support::sp_std::marker::PhantomData<(#type_use_gen)> - ); - - impl<#type_impl_gen> #frame_support::dispatch::DefaultByte for - #default_byte_getter<#type_use_gen> - #completed_where_clause - { - fn default_byte(&self) -> #frame_support::sp_std::vec::Vec { - #default_byte_impl - } - } - - unsafe impl<#type_impl_gen> Send for #default_byte_getter<#type_use_gen> - #completed_where_clause - {} - unsafe impl<#type_impl_gen> Sync for #default_byte_getter<#type_use_gen> - #completed_where_clause - {} - - #frame_support::dispatch::ModuleConstantMetadata { - name: #frame_support::dispatch::DecodeDifferent::Encode(#ident_str), - ty: #frame_support::dispatch::DecodeDifferent::Encode(#const_type_str), - value: #frame_support::dispatch::DecodeDifferent::Encode( - #frame_support::dispatch::DefaultByteGetter( - &#default_byte_getter::<#type_use_gen>( - #frame_support::sp_std::marker::PhantomData - ) - ) - ), - documentation: #frame_support::dispatch::DecodeDifferent::Encode( - &[ #( #doc ),* ] - ), + #frame_support::metadata::PalletConstantMetadata { + name: #ident_str, + ty: #frame_support::scale_info::meta_type::<#const_type>(), + value: { #default_byte_impl }, + docs: #frame_support::sp_std::vec![ #( #doc ),* ], } }) }); @@ -126,10 +90,10 @@ pub fn expand_constants(def: &mut Def) -> proc_macro2::TokenStream { impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause{ #[doc(hidden)] - pub fn module_constants_metadata() - -> &'static [#frame_support::dispatch::ModuleConstantMetadata] + pub fn pallet_constants_metadata() + -> #frame_support::sp_std::vec::Vec<#frame_support::metadata::PalletConstantMetadata> { - &[ #( #consts ),* ] + #frame_support::sp_std::vec![ #( #consts ),* ] } } ) diff --git a/frame/support/procedural/src/pallet/expand/error.rs b/frame/support/procedural/src/pallet/expand/error.rs index 19c4296ad02f5..7a058bb32c922 100644 --- a/frame/support/procedural/src/pallet/expand/error.rs +++ b/frame/support/procedural/src/pallet/expand/error.rs @@ -15,11 +15,11 @@ // See the License for the specific language governing permissions and // limitations under the License. -use crate::pallet::{parse::helper::get_doc_literals, Def}; +use crate::pallet::Def; +use frame_support_procedural_tools::get_doc_literals; /// /// * impl various trait on Error -/// * impl ModuleErrorMetadata for Error pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { let error = if let Some(error) = &def.error { error } else { return Default::default() }; @@ -32,6 +32,7 @@ pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { let phantom_variant: syn::Variant = syn::parse_quote!( #[doc(hidden)] + #[codec(skip)] __Ignore( #frame_support::sp_std::marker::PhantomData<(#type_use_gen)>, #frame_support::Never, @@ -47,16 +48,6 @@ pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { quote::quote_spanned!(error.attr_span => Self::#variant => #variant_str,) }); - let metadata = error.variants.iter().map(|(variant, doc)| { - let variant_str = format!("{}", variant); - quote::quote_spanned!(error.attr_span => - #frame_support::error::ErrorMetadata { - name: #frame_support::error::DecodeDifferent::Encode(#variant_str), - documentation: #frame_support::error::DecodeDifferent::Encode(&[ #( #doc, )* ]), - }, - ) - }); - let error_item = { let item = &mut def.item.content.as_mut().expect("Checked by def parser").1[error.index]; if let syn::Item::Enum(item) = item { @@ -67,6 +58,13 @@ pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { }; error_item.variants.insert(0, phantom_variant); + // derive TypeInfo for error metadata + error_item + .attrs + .push(syn::parse_quote!( #[derive(#frame_support::scale_info::TypeInfo)] )); + error_item.attrs.push(syn::parse_quote!( + #[scale_info(skip_type_params(#type_use_gen), capture_docs = "always")] + )); if get_doc_literals(&error_item.attrs).is_empty() { error_item.attrs.push(syn::parse_quote!( @@ -130,14 +128,5 @@ pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { } } } - - impl<#type_impl_gen> #frame_support::error::ModuleErrorMetadata - for #error_ident<#type_use_gen> - #config_where_clause - { - fn metadata() -> &'static [#frame_support::error::ErrorMetadata] { - &[ #( #metadata )* ] - } - } ) } diff --git a/frame/support/procedural/src/pallet/expand/event.rs b/frame/support/procedural/src/pallet/expand/event.rs index 2a2a3020a96b8..ebd2d7aeabaff 100644 --- a/frame/support/procedural/src/pallet/expand/event.rs +++ b/frame/support/procedural/src/pallet/expand/event.rs @@ -16,9 +16,10 @@ // limitations under the License. use crate::{ - pallet::{parse::helper::get_doc_literals, Def}, + pallet::{parse::event::PalletEventDepositAttr, Def}, COUNTER, }; +use frame_support_procedural_tools::get_doc_literals; use syn::{spanned::Spanned, Ident}; /// @@ -69,20 +70,6 @@ pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { let frame_support = &def.frame_support; let event_use_gen = &event.gen_kind.type_use_gen(event.attr_span); let event_impl_gen = &event.gen_kind.type_impl_gen(event.attr_span); - let metadata = event.metadata.iter().map(|(ident, args, docs)| { - let name = format!("{}", ident); - quote::quote_spanned!(event.attr_span => - #frame_support::event::EventMetadata { - name: #frame_support::event::DecodeDifferent::Encode(#name), - arguments: #frame_support::event::DecodeDifferent::Encode(&[ - #( #args, )* - ]), - documentation: #frame_support::event::DecodeDifferent::Encode(&[ - #( #docs, )* - ]), - }, - ) - }); let event_item = { let item = &mut def.item.content.as_mut().expect("Checked by def parser").1[event.index]; @@ -126,15 +113,23 @@ pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { #frame_support::RuntimeDebugNoBound, #frame_support::codec::Encode, #frame_support::codec::Decode, + #frame_support::scale_info::TypeInfo, )] )); - let deposit_event = if let Some((fn_vis, fn_span)) = &event.deposit_event { + // skip requirement for type params to implement `TypeInfo`, and require docs capture + event_item.attrs.push(syn::parse_quote!( + #[scale_info(skip_type_params(#event_use_gen), capture_docs = "always")] + )); + + let deposit_event = if let Some(deposit_event) = &event.deposit_event { let event_use_gen = &event.gen_kind.type_use_gen(event.attr_span); let trait_use_gen = &def.trait_use_generics(event.attr_span); let type_impl_gen = &def.type_impl_generics(event.attr_span); let type_use_gen = &def.type_use_generics(event.attr_span); + let PalletEventDepositAttr { fn_vis, fn_span, .. } = deposit_event; + quote::quote_spanned!(*fn_span => impl<#type_impl_gen> Pallet<#type_use_gen> #completed_where_clause { #fn_vis fn deposit_event(event: Event<#event_use_gen>) { @@ -174,13 +169,5 @@ pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { impl<#event_impl_gen> From<#event_ident<#event_use_gen>> for () #event_where_clause { fn from(_: #event_ident<#event_use_gen>) {} } - - impl<#event_impl_gen> #event_ident<#event_use_gen> #event_where_clause { - #[allow(dead_code)] - #[doc(hidden)] - pub fn metadata() -> &'static [#frame_support::event::EventMetadata] { - &[ #( #metadata )* ] - } - } ) } diff --git a/frame/support/procedural/src/pallet/expand/genesis_config.rs b/frame/support/procedural/src/pallet/expand/genesis_config.rs index 8c540209f40c2..4bbba2c05908e 100644 --- a/frame/support/procedural/src/pallet/expand/genesis_config.rs +++ b/frame/support/procedural/src/pallet/expand/genesis_config.rs @@ -15,10 +15,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -use crate::{ - pallet::{parse::helper::get_doc_literals, Def}, - COUNTER, -}; +use crate::{pallet::Def, COUNTER}; +use frame_support_procedural_tools::get_doc_literals; use syn::{spanned::Spanned, Ident}; /// diff --git a/frame/support/procedural/src/pallet/expand/mod.rs b/frame/support/procedural/src/pallet/expand/mod.rs index cfb61e700ac29..1c8883977c765 100644 --- a/frame/support/procedural/src/pallet/expand/mod.rs +++ b/frame/support/procedural/src/pallet/expand/mod.rs @@ -32,7 +32,8 @@ mod store_trait; mod type_value; mod validate_unsigned; -use crate::pallet::{parse::helper::get_doc_literals, Def}; +use crate::pallet::Def; +use frame_support_procedural_tools::get_doc_literals; use quote::ToTokens; /// Merge where clause together, `where` token span is taken from the first not none one. diff --git a/frame/support/procedural/src/pallet/expand/pallet_struct.rs b/frame/support/procedural/src/pallet/expand/pallet_struct.rs index 40fc39b161f15..a217742fec55d 100644 --- a/frame/support/procedural/src/pallet/expand/pallet_struct.rs +++ b/frame/support/procedural/src/pallet/expand/pallet_struct.rs @@ -15,13 +15,14 @@ // See the License for the specific language governing permissions and // limitations under the License. -use crate::pallet::{expand::merge_where_clauses, parse::helper::get_doc_literals, Def}; +use crate::pallet::{expand::merge_where_clauses, Def}; +use frame_support_procedural_tools::get_doc_literals; /// /// * Add derive trait on Pallet /// * Implement GetStorageVersion on Pallet /// * Implement OnGenesis on Pallet -/// * Implement ModuleErrorMetadata on Pallet +/// * Implement `fn error_metadata` on Pallet /// * declare Module type alias for construct_runtime /// * replace the first field type of `struct Pallet` with `PhantomData` if it is `_` /// * implementation of `PalletInfoAccess` information @@ -76,28 +77,22 @@ pub fn expand_pallet_struct(def: &mut Def) -> proc_macro2::TokenStream { )] )); - let module_error_metadata = if let Some(error_def) = &def.error { + let pallet_error_metadata = if let Some(error_def) = &def.error { let error_ident = &error_def.error; quote::quote_spanned!(def.pallet_struct.attr_span => - impl<#type_impl_gen> #frame_support::error::ModuleErrorMetadata - for #pallet_ident<#type_use_gen> - #config_where_clause - { - fn metadata() -> &'static [#frame_support::error::ErrorMetadata] { - < - #error_ident<#type_use_gen> as #frame_support::error::ModuleErrorMetadata - >::metadata() + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #config_where_clause { + pub fn error_metadata() -> Option<#frame_support::metadata::PalletErrorMetadata> { + Some(#frame_support::metadata::PalletErrorMetadata { + ty: #frame_support::scale_info::meta_type::<#error_ident<#type_use_gen>>() + }) } } ) } else { quote::quote_spanned!(def.pallet_struct.attr_span => - impl<#type_impl_gen> #frame_support::error::ModuleErrorMetadata - for #pallet_ident<#type_use_gen> - #config_where_clause - { - fn metadata() -> &'static [#frame_support::error::ErrorMetadata] { - &[] + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #config_where_clause { + pub fn error_metadata() -> Option<#frame_support::metadata::PalletErrorMetadata> { + None } } ) @@ -159,7 +154,7 @@ pub fn expand_pallet_struct(def: &mut Def) -> proc_macro2::TokenStream { }; quote::quote_spanned!(def.pallet_struct.attr_span => - #module_error_metadata + #pallet_error_metadata /// Type alias to `Pallet`, to be used by `construct_runtime`. /// diff --git a/frame/support/procedural/src/pallet/expand/storage.rs b/frame/support/procedural/src/pallet/expand/storage.rs index ac03a41deb995..0f7133f10dd47 100644 --- a/frame/support/procedural/src/pallet/expand/storage.rs +++ b/frame/support/procedural/src/pallet/expand/storage.rs @@ -19,7 +19,6 @@ use crate::pallet::{ parse::storage::{Metadata, QueryKind, StorageDef, StorageGenerics}, Def, }; -use frame_support_procedural_tools::clean_type_string; use std::collections::HashSet; /// Generate the prefix_ident related the the storage. @@ -176,89 +175,15 @@ pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { let cfg_attrs = &storage.cfg_attrs; - let metadata_trait = match &storage.metadata { - Metadata::Value { .. } => quote::quote_spanned!(storage.attr_span => - #frame_support::storage::types::StorageValueMetadata - ), - Metadata::Map { .. } => quote::quote_spanned!(storage.attr_span => - #frame_support::storage::types::StorageMapMetadata - ), - Metadata::DoubleMap { .. } => quote::quote_spanned!(storage.attr_span => - #frame_support::storage::types::StorageDoubleMapMetadata - ), - Metadata::NMap { .. } => quote::quote_spanned!(storage.attr_span => - #frame_support::storage::types::StorageNMapMetadata - ), - }; - - let ty = match &storage.metadata { - Metadata::Value { value } => { - let value = clean_type_string("e::quote!(#value).to_string()); - quote::quote_spanned!(storage.attr_span => - #frame_support::metadata::StorageEntryType::Plain( - #frame_support::metadata::DecodeDifferent::Encode(#value) - ) - ) - }, - Metadata::Map { key, value } => { - let value = clean_type_string("e::quote!(#value).to_string()); - let key = clean_type_string("e::quote!(#key).to_string()); - quote::quote_spanned!(storage.attr_span => - #frame_support::metadata::StorageEntryType::Map { - hasher: <#full_ident as #metadata_trait>::HASHER, - key: #frame_support::metadata::DecodeDifferent::Encode(#key), - value: #frame_support::metadata::DecodeDifferent::Encode(#value), - unused: false, - } - ) - }, - Metadata::DoubleMap { key1, key2, value } => { - let value = clean_type_string("e::quote!(#value).to_string()); - let key1 = clean_type_string("e::quote!(#key1).to_string()); - let key2 = clean_type_string("e::quote!(#key2).to_string()); - quote::quote_spanned!(storage.attr_span => - #frame_support::metadata::StorageEntryType::DoubleMap { - hasher: <#full_ident as #metadata_trait>::HASHER1, - key2_hasher: <#full_ident as #metadata_trait>::HASHER2, - key1: #frame_support::metadata::DecodeDifferent::Encode(#key1), - key2: #frame_support::metadata::DecodeDifferent::Encode(#key2), - value: #frame_support::metadata::DecodeDifferent::Encode(#value), - } - ) - }, - Metadata::NMap { keys, value, .. } => { - let keys = keys - .iter() - .map(|key| clean_type_string("e::quote!(#key).to_string())) - .collect::>(); - let value = clean_type_string("e::quote!(#value).to_string()); - quote::quote_spanned!(storage.attr_span => - #frame_support::metadata::StorageEntryType::NMap { - keys: #frame_support::metadata::DecodeDifferent::Encode(&[ - #( #keys, )* - ]), - hashers: #frame_support::metadata::DecodeDifferent::Encode( - <#full_ident as #metadata_trait>::HASHERS, - ), - value: #frame_support::metadata::DecodeDifferent::Encode(#value), - } - ) - }, - }; - quote::quote_spanned!(storage.attr_span => #(#cfg_attrs)* #frame_support::metadata::StorageEntryMetadata { - name: #frame_support::metadata::DecodeDifferent::Encode( - <#full_ident as #metadata_trait>::NAME - ), - modifier: <#full_ident as #metadata_trait>::MODIFIER, - ty: #ty, - default: #frame_support::metadata::DecodeDifferent::Encode( - <#full_ident as #metadata_trait>::DEFAULT - ), - documentation: #frame_support::metadata::DecodeDifferent::Encode(&[ + name: <#full_ident as #frame_support::storage::StorageEntryMetadata>::NAME, + modifier: <#full_ident as #frame_support::storage::StorageEntryMetadata>::MODIFIER, + ty: <#full_ident as #frame_support::storage::StorageEntryMetadata>::ty(), + default: <#full_ident as #frame_support::storage::StorageEntryMetadata>::default(), + docs: #frame_support::sp_std::vec![ #( #docs, )* - ]), + ], } ) }); @@ -419,18 +344,16 @@ pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { #completed_where_clause { #[doc(hidden)] - pub fn storage_metadata() -> #frame_support::metadata::StorageMetadata { - #frame_support::metadata::StorageMetadata { - prefix: #frame_support::metadata::DecodeDifferent::Encode( - < - ::PalletInfo as - #frame_support::traits::PalletInfo - >::name::<#pallet_ident<#type_use_gen>>() - .expect("Every active pallet has a name in the runtime; qed") - ), - entries: #frame_support::metadata::DecodeDifferent::Encode( - &[ #( #entries, )* ] - ), + pub fn storage_metadata() -> #frame_support::metadata::PalletStorageMetadata { + #frame_support::metadata::PalletStorageMetadata { + prefix: < + ::PalletInfo as + #frame_support::traits::PalletInfo + >::name::<#pallet_ident<#type_use_gen>>() + .expect("Every active pallet has a name in the runtime; qed"), + entries: #frame_support::sp_std::vec![ + #( #entries, )* + ], } } } diff --git a/frame/support/procedural/src/pallet/parse/call.rs b/frame/support/procedural/src/pallet/parse/call.rs index d022e8025aab2..0563568f33311 100644 --- a/frame/support/procedural/src/pallet/parse/call.rs +++ b/frame/support/procedural/src/pallet/parse/call.rs @@ -16,6 +16,7 @@ // limitations under the License. use super::helper; +use frame_support_procedural_tools::get_doc_literals; use quote::ToTokens; use syn::spanned::Spanned; @@ -219,7 +220,7 @@ impl CallDef { args.push((!arg_attrs.is_empty(), arg_ident, arg.ty.clone())); } - let docs = helper::get_doc_literals(&method.attrs); + let docs = get_doc_literals(&method.attrs); methods.push(CallVariantDef { name: method.sig.ident.clone(), weight, args, docs }); } else { @@ -234,7 +235,7 @@ impl CallDef { instances, methods, where_clause: item.generics.where_clause.clone(), - docs: helper::get_doc_literals(&item.attrs), + docs: get_doc_literals(&item.attrs), }) } } diff --git a/frame/support/procedural/src/pallet/parse/config.rs b/frame/support/procedural/src/pallet/parse/config.rs index b006aadf51a07..712c20ffc7b4c 100644 --- a/frame/support/procedural/src/pallet/parse/config.rs +++ b/frame/support/procedural/src/pallet/parse/config.rs @@ -17,6 +17,7 @@ use super::helper; use core::convert::TryFrom; +use frame_support_procedural_tools::get_doc_literals; use quote::ToTokens; use syn::spanned::Spanned; @@ -69,7 +70,7 @@ impl TryFrom<&syn::TraitItemType> for ConstMetadataDef { let err = |span, msg| { syn::Error::new(span, format!("Invalid usage of `#[pallet::constant]`: {}", msg)) }; - let doc = helper::get_doc_literals(&trait_ty.attrs); + let doc = get_doc_literals(&trait_ty.attrs); let ident = trait_ty.ident.clone(); let bound = trait_ty .bounds diff --git a/frame/support/procedural/src/pallet/parse/error.rs b/frame/support/procedural/src/pallet/parse/error.rs index 9b96a1876917d..9c9a95105c53c 100644 --- a/frame/support/procedural/src/pallet/parse/error.rs +++ b/frame/support/procedural/src/pallet/parse/error.rs @@ -16,6 +16,7 @@ // limitations under the License. use super::helper; +use frame_support_procedural_tools::get_doc_literals; use quote::ToTokens; use syn::spanned::Spanned; @@ -80,7 +81,7 @@ impl ErrorDef { return Err(syn::Error::new(span, msg)) } - Ok((variant.ident.clone(), helper::get_doc_literals(&variant.attrs))) + Ok((variant.ident.clone(), get_doc_literals(&variant.attrs))) }) .collect::>()?; diff --git a/frame/support/procedural/src/pallet/parse/event.rs b/frame/support/procedural/src/pallet/parse/event.rs index d66e35e09025b..33de4aca8b599 100644 --- a/frame/support/procedural/src/pallet/parse/event.rs +++ b/frame/support/procedural/src/pallet/parse/event.rs @@ -16,13 +16,11 @@ // limitations under the License. use super::helper; -use frame_support_procedural_tools::clean_type_string; use quote::ToTokens; use syn::spanned::Spanned; /// List of additional token to be used for parsing. mod keyword { - syn::custom_keyword!(metadata); syn::custom_keyword!(Event); syn::custom_keyword!(pallet); syn::custom_keyword!(generate_deposit); @@ -35,60 +33,31 @@ pub struct EventDef { pub index: usize, /// The keyword Event used (contains span). pub event: keyword::Event, - /// Event metadatas: `(name, args, docs)`. - pub metadata: Vec<(syn::Ident, Vec, Vec)>, /// A set of usage of instance, must be check for consistency with trait. pub instances: Vec, /// The kind of generic the type `Event` has. pub gen_kind: super::GenericKind, /// Whether the function `deposit_event` must be generated. - pub deposit_event: Option<(syn::Visibility, proc_macro2::Span)>, + pub deposit_event: Option, /// Where clause used in event definition. pub where_clause: Option, /// The span of the pallet::event attribute. pub attr_span: proc_macro2::Span, } -/// Attribute for Event: defines metadata name to use. +/// Attribute for a pallet's Event. /// /// Syntax is: -/// * `#[pallet::metadata(SomeType = MetadataName, ...)]` /// * `#[pallet::generate_deposit($vis fn deposit_event)]` -enum PalletEventAttr { - Metadata { - metadata: Vec<(syn::Type, String)>, - // Span of the attribute - span: proc_macro2::Span, - }, - DepositEvent { - fn_vis: syn::Visibility, - // Span for the keyword deposit_event - fn_span: proc_macro2::Span, - // Span of the attribute - span: proc_macro2::Span, - }, +pub struct PalletEventDepositAttr { + pub fn_vis: syn::Visibility, + // Span for the keyword deposit_event + pub fn_span: proc_macro2::Span, + // Span of the attribute + pub span: proc_macro2::Span, } -impl PalletEventAttr { - fn span(&self) -> proc_macro2::Span { - match self { - Self::Metadata { span, .. } => *span, - Self::DepositEvent { span, .. } => *span, - } - } -} - -/// Parse for syntax `$Type = "$SomeString"`. -fn parse_event_metadata_element( - input: syn::parse::ParseStream, -) -> syn::Result<(syn::Type, String)> { - let typ = input.parse::()?; - input.parse::()?; - let ident = input.parse::()?; - Ok((typ, ident.value())) -} - -impl syn::parse::Parse for PalletEventAttr { +impl syn::parse::Parse for PalletEventDepositAttr { fn parse(input: syn::parse::ParseStream) -> syn::Result { input.parse::()?; let content; @@ -96,56 +65,33 @@ impl syn::parse::Parse for PalletEventAttr { content.parse::()?; content.parse::()?; - let lookahead = content.lookahead1(); - if lookahead.peek(keyword::metadata) { - let span = content.parse::()?.span(); - let metadata_content; - syn::parenthesized!(metadata_content in content); - - let metadata = metadata_content - .parse_terminated::<_, syn::Token![,]>(parse_event_metadata_element)? - .into_pairs() - .map(syn::punctuated::Pair::into_value) - .collect(); - - Ok(PalletEventAttr::Metadata { metadata, span }) - } else if lookahead.peek(keyword::generate_deposit) { - let span = content.parse::()?.span(); - - let generate_content; - syn::parenthesized!(generate_content in content); - let fn_vis = generate_content.parse::()?; - generate_content.parse::()?; - let fn_span = generate_content.parse::()?.span(); - - Ok(PalletEventAttr::DepositEvent { fn_vis, span, fn_span }) - } else { - Err(lookahead.error()) - } + let span = content.parse::()?.span(); + let generate_content; + syn::parenthesized!(generate_content in content); + let fn_vis = generate_content.parse::()?; + generate_content.parse::()?; + let fn_span = generate_content.parse::()?.span(); + + Ok(PalletEventDepositAttr { fn_vis, span, fn_span }) } } struct PalletEventAttrInfo { - metadata: Option>, - deposit_event: Option<(syn::Visibility, proc_macro2::Span)>, + deposit_event: Option, } impl PalletEventAttrInfo { - fn from_attrs(attrs: Vec) -> syn::Result { - let mut metadata = None; + fn from_attrs(attrs: Vec) -> syn::Result { let mut deposit_event = None; for attr in attrs { - match attr { - PalletEventAttr::Metadata { metadata: m, .. } if metadata.is_none() => - metadata = Some(m), - PalletEventAttr::DepositEvent { fn_vis, fn_span, .. } - if deposit_event.is_none() => - deposit_event = Some((fn_vis, fn_span)), - attr => return Err(syn::Error::new(attr.span(), "Duplicate attribute")), + if deposit_event.is_none() { + deposit_event = Some(attr) + } else { + return Err(syn::Error::new(attr.span, "Duplicate attribute")) } } - Ok(PalletEventAttrInfo { metadata, deposit_event }) + Ok(PalletEventAttrInfo { deposit_event }) } } @@ -161,9 +107,9 @@ impl EventDef { return Err(syn::Error::new(item.span(), "Invalid pallet::event, expected item enum")) }; - let event_attrs: Vec = helper::take_item_pallet_attrs(&mut item.attrs)?; + let event_attrs: Vec = + helper::take_item_pallet_attrs(&mut item.attrs)?; let attr_info = PalletEventAttrInfo::from_attrs(event_attrs)?; - let metadata = attr_info.metadata.unwrap_or_else(Vec::new); let deposit_event = attr_info.deposit_event; if !matches!(item.vis, syn::Visibility::Public(_)) { @@ -190,39 +136,6 @@ impl EventDef { let event = syn::parse2::(item.ident.to_token_stream())?; - let metadata = item - .variants - .iter() - .map(|variant| { - let name = variant.ident.clone(); - let docs = helper::get_doc_literals(&variant.attrs); - let args = variant - .fields - .iter() - .map(|field| { - metadata - .iter() - .find(|m| m.0 == field.ty) - .map(|m| m.1.clone()) - .unwrap_or_else(|| { - clean_type_string(&field.ty.to_token_stream().to_string()) - }) - }) - .collect(); - - (name, args, docs) - }) - .collect(); - - Ok(EventDef { - attr_span, - index, - metadata, - instances, - deposit_event, - event, - gen_kind, - where_clause, - }) + Ok(EventDef { attr_span, index, instances, deposit_event, event, gen_kind, where_clause }) } } diff --git a/frame/support/procedural/src/pallet/parse/extra_constants.rs b/frame/support/procedural/src/pallet/parse/extra_constants.rs index 71208f3329a10..c1324df6c22f1 100644 --- a/frame/support/procedural/src/pallet/parse/extra_constants.rs +++ b/frame/support/procedural/src/pallet/parse/extra_constants.rs @@ -16,6 +16,7 @@ // limitations under the License. use super::helper; +use frame_support_procedural_tools::get_doc_literals; use syn::spanned::Spanned; /// List of additional token to be used for parsing. @@ -104,7 +105,7 @@ impl ExtraConstantsDef { extra_constants.push(ExtraConstantDef { ident: method.sig.ident.clone(), type_, - doc: helper::get_doc_literals(&method.attrs), + doc: get_doc_literals(&method.attrs), }); } diff --git a/frame/support/procedural/src/pallet/parse/helper.rs b/frame/support/procedural/src/pallet/parse/helper.rs index 211f1ed5ee428..2590e86b58b0e 100644 --- a/frame/support/procedural/src/pallet/parse/helper.rs +++ b/frame/support/procedural/src/pallet/parse/helper.rs @@ -139,24 +139,6 @@ impl MutItemAttrs for syn::ItemMod { } } -/// Return all doc attributes literals found. -pub fn get_doc_literals(attrs: &Vec) -> Vec { - attrs - .iter() - .filter_map(|attr| { - if let Ok(syn::Meta::NameValue(meta)) = attr.parse_meta() { - if meta.path.get_ident().map_or(false, |ident| ident == "doc") { - Some(meta.lit) - } else { - None - } - } else { - None - } - }) - .collect() -} - /// Parse for `()` struct Unit; impl syn::parse::Parse for Unit { diff --git a/frame/support/procedural/src/pallet/parse/storage.rs b/frame/support/procedural/src/pallet/parse/storage.rs index 5df7bc132dff4..e58b5d2048863 100644 --- a/frame/support/procedural/src/pallet/parse/storage.rs +++ b/frame/support/procedural/src/pallet/parse/storage.rs @@ -16,6 +16,7 @@ // limitations under the License. use super::helper; +use frame_support_procedural_tools::get_doc_literals; use quote::ToTokens; use std::collections::HashMap; use syn::spanned::Spanned; @@ -609,7 +610,7 @@ impl StorageDef { instances.push(helper::check_type_def_gen(&item.generics, item.ident.span())?); let where_clause = item.generics.where_clause.clone(); - let docs = helper::get_doc_literals(&item.attrs); + let docs = get_doc_literals(&item.attrs); let typ = if let syn::Type::Path(typ) = &*item.ty { typ diff --git a/frame/support/procedural/src/storage/instance_trait.rs b/frame/support/procedural/src/storage/instance_trait.rs index 4f55d38596666..00a73d6fbd6e7 100644 --- a/frame/support/procedural/src/storage/instance_trait.rs +++ b/frame/support/procedural/src/storage/instance_trait.rs @@ -126,6 +126,7 @@ fn create_and_impl_instance_struct( Clone, Eq, PartialEq, #scrate::codec::Encode, #scrate::codec::Decode, + #scrate::scale_info::TypeInfo, #scrate::RuntimeDebug, )] #doc diff --git a/frame/support/procedural/src/storage/metadata.rs b/frame/support/procedural/src/storage/metadata.rs index ca7dd97c155f3..a90e5051c5b2e 100644 --- a/frame/support/procedural/src/storage/metadata.rs +++ b/frame/support/procedural/src/storage/metadata.rs @@ -18,31 +18,28 @@ //! Implementation of `storage_metadata` on module structure, used by construct_runtime. use super::{DeclStorageDefExt, StorageLineDefExt, StorageLineTypeDef}; -use frame_support_procedural_tools::clean_type_string; +use frame_support_procedural_tools::get_doc_literals; use proc_macro2::TokenStream; use quote::quote; fn storage_line_metadata_type(scrate: &TokenStream, line: &StorageLineDefExt) -> TokenStream { let value_type = &line.value_type; - let value_type = clean_type_string("e!( #value_type ).to_string()); match &line.storage_type { StorageLineTypeDef::Simple(_) => { quote! { #scrate::metadata::StorageEntryType::Plain( - #scrate::metadata::DecodeDifferent::Encode(#value_type), + #scrate::scale_info::meta_type::<#value_type>() ) } }, StorageLineTypeDef::Map(map) => { let hasher = map.hasher.into_metadata(); let key = &map.key; - let key = clean_type_string("e!(#key).to_string()); quote! { #scrate::metadata::StorageEntryType::Map { - hasher: #scrate::metadata::#hasher, - key: #scrate::metadata::DecodeDifferent::Encode(#key), - value: #scrate::metadata::DecodeDifferent::Encode(#value_type), - unused: false, + hashers: #scrate::sp_std::vec! [ #scrate::metadata::#hasher ], + key: #scrate::scale_info::meta_type::<#key>(), + value: #scrate::scale_info::meta_type::<#value_type>(), } } }, @@ -50,39 +47,32 @@ fn storage_line_metadata_type(scrate: &TokenStream, line: &StorageLineDefExt) -> let hasher1 = map.hasher1.into_metadata(); let hasher2 = map.hasher2.into_metadata(); let key1 = &map.key1; - let key1 = clean_type_string("e!(#key1).to_string()); let key2 = &map.key2; - let key2 = clean_type_string("e!(#key2).to_string()); quote! { - #scrate::metadata::StorageEntryType::DoubleMap { - hasher: #scrate::metadata::#hasher1, - key1: #scrate::metadata::DecodeDifferent::Encode(#key1), - key2: #scrate::metadata::DecodeDifferent::Encode(#key2), - value: #scrate::metadata::DecodeDifferent::Encode(#value_type), - key2_hasher: #scrate::metadata::#hasher2, + #scrate::metadata::StorageEntryType::Map { + hashers: #scrate::sp_std::vec! [ + #scrate::metadata::#hasher1, + #scrate::metadata::#hasher2, + ], + key: #scrate::scale_info::meta_type::<(#key1, #key2)>(), + value: #scrate::scale_info::meta_type::<#value_type>(), } } }, StorageLineTypeDef::NMap(map) => { - let keys = map - .keys - .iter() - .map(|key| clean_type_string("e!(#key).to_string())) - .collect::>(); + let key_tuple = &map.to_key_tuple(); let hashers = map .hashers .iter() .map(|hasher| hasher.to_storage_hasher_struct()) .collect::>(); quote! { - #scrate::metadata::StorageEntryType::NMap { - keys: #scrate::metadata::DecodeDifferent::Encode(&[ - #( #keys, )* - ]), - hashers: #scrate::metadata::DecodeDifferent::Encode(&[ + #scrate::metadata::StorageEntryType::Map { + hashers: #scrate::sp_std::vec! [ #( #scrate::metadata::StorageHasher::#hashers, )* - ]), - value: #scrate::metadata::DecodeDifferent::Encode(#value_type), + ], + key: #scrate::scale_info::meta_type::<#key_tuple>(), + value: #scrate::scale_info::meta_type::<#value_type>(), } } }, @@ -129,8 +119,7 @@ fn default_byte_getter( #[cfg(feature = "std")] impl<#runtime_generic: #runtime_trait, #optional_instance_bound> - #scrate::metadata::DefaultByte - for #struct_name<#runtime_generic, #optional_instance> + #struct_name<#runtime_generic, #optional_instance> #where_clause { fn default_byte(&self) -> #scrate::sp_std::vec::Vec { @@ -142,16 +131,9 @@ fn default_byte_getter( } } - unsafe impl<#runtime_generic: #runtime_trait, #optional_instance_bound> Send - for #struct_name<#runtime_generic, #optional_instance> #where_clause {} - - unsafe impl<#runtime_generic: #runtime_trait, #optional_instance_bound> Sync - for #struct_name<#runtime_generic, #optional_instance> #where_clause {} - #[cfg(not(feature = "std"))] impl<#runtime_generic: #runtime_trait, #optional_instance_bound> - #scrate::metadata::DefaultByte - for #struct_name<#runtime_generic, #optional_instance> + #struct_name<#runtime_generic, #optional_instance> #where_clause { fn default_byte(&self) -> #scrate::sp_std::vec::Vec { @@ -187,25 +169,15 @@ pub fn impl_metadata(def: &DeclStorageDefExt) -> TokenStream { let (default_byte_getter_struct_def, default_byte_getter_struct_instance) = default_byte_getter(scrate, line, def); - let mut docs = TokenStream::new(); - for attr in line.attrs.iter().filter_map(|v| v.parse_meta().ok()) { - if let syn::Meta::NameValue(meta) = attr { - if meta.path.is_ident("doc") { - let lit = meta.lit; - docs.extend(quote!(#lit,)); - } - } - } + let docs = get_doc_literals(&line.attrs); let entry = quote! { #scrate::metadata::StorageEntryMetadata { - name: #scrate::metadata::DecodeDifferent::Encode(#str_name), + name: #str_name, modifier: #modifier, ty: #ty, - default: #scrate::metadata::DecodeDifferent::Encode( - #scrate::metadata::DefaultByteGetter(&#default_byte_getter_struct_instance) - ), - documentation: #scrate::metadata::DecodeDifferent::Encode(&[ #docs ]), + default: #default_byte_getter_struct_instance.default_byte(), + docs: #scrate::sp_std::vec![ #( #docs ),* ], }, }; @@ -222,9 +194,9 @@ pub fn impl_metadata(def: &DeclStorageDefExt) -> TokenStream { }; let store_metadata = quote!( - #scrate::metadata::StorageMetadata { - prefix: #scrate::metadata::DecodeDifferent::Encode(#prefix), - entries: #scrate::metadata::DecodeDifferent::Encode(&[ #entries ][..]), + #scrate::metadata::PalletStorageMetadata { + prefix: #prefix, + entries: #scrate::sp_std::vec![ #entries ], } ); @@ -237,7 +209,7 @@ pub fn impl_metadata(def: &DeclStorageDefExt) -> TokenStream { impl#module_impl #module_struct #where_clause { #[doc(hidden)] - pub fn storage_metadata() -> #scrate::metadata::StorageMetadata { + pub fn storage_metadata() -> #scrate::metadata::PalletStorageMetadata { #store_metadata } } diff --git a/frame/support/procedural/tools/src/lib.rs b/frame/support/procedural/tools/src/lib.rs index 19242db4594c0..d7aba4c7cbf1c 100644 --- a/frame/support/procedural/tools/src/lib.rs +++ b/frame/support/procedural/tools/src/lib.rs @@ -100,3 +100,21 @@ pub fn clean_type_string(input: &str) -> String { .replace("< ", "<") .replace(" >", ">") } + +/// Return all doc attributes literals found. +pub fn get_doc_literals(attrs: &[syn::Attribute]) -> Vec { + attrs + .iter() + .filter_map(|attr| { + if let Ok(syn::Meta::NameValue(meta)) = attr.parse_meta() { + if meta.path.get_ident().map_or(false, |ident| ident == "doc") { + Some(meta.lit) + } else { + None + } + } else { + None + } + }) + .collect() +} diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index a4644cebeeb55..2e6777fee2af2 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -33,10 +33,6 @@ pub use crate::{ TransactionPriority, WeighData, Weight, WithPostDispatchInfo, }, }; -pub use frame_metadata::{ - DecodeDifferent, DecodeDifferentArray, DefaultByte, DefaultByteGetter, ErrorMetadata, - FunctionArgumentMetadata, FunctionMetadata, ModuleConstantMetadata, ModuleErrorMetadata, -}; pub use sp_runtime::{traits::Dispatchable, DispatchError}; /// The return type of a `Dispatchable` in frame. When returned explicitly from @@ -67,8 +63,8 @@ pub type CallableCallFor = >::Call; /// A type that can be used as a parameter in a dispatchable function. /// /// When using `decl_module` all arguments for call functions must implement this trait. -pub trait Parameter: Codec + EncodeLike + Clone + Eq + fmt::Debug {} -impl Parameter for T where T: Codec + EncodeLike + Clone + Eq + fmt::Debug {} +pub trait Parameter: Codec + EncodeLike + Clone + Eq + fmt::Debug + scale_info::TypeInfo {} +impl Parameter for T where T: Codec + EncodeLike + Clone + Eq + fmt::Debug + scale_info::TypeInfo {} /// Declares a `Module` struct and a `Call` enum, which implements the dispatch logic. /// @@ -1169,7 +1165,7 @@ macro_rules! decl_module { { $( $on_finalize )* } { $( $offchain )* } { $( $constants )* } - { &'static str } + { __NO_ERROR_DEFINED } { $( $integrity_test )* } { $( $storage_version )* } [ $($t)* ] @@ -1239,7 +1235,7 @@ macro_rules! decl_module { { $( $on_finalize:tt )* } { $( $offchain:tt )* } { $( $constants:tt )* } - { $error_type:ty } + { $( $error_type:tt )* } { $( $integrity_test:tt )* } { $( $storage_version:tt )* } [ $( $dispatchables:tt )* ] @@ -1265,8 +1261,8 @@ macro_rules! decl_module { { $( $on_finalize )* } { $( $offchain )* } { $( $constants )* } - { $error_type } - { $( $integrity_test )* } + { $( $error_type )* } + { $( $integrity_test)* } { $( $storage_version )* } [ $( $dispatchables )* @@ -1750,7 +1746,6 @@ macro_rules! decl_module { (@impl_function $module:ident<$trait_instance:ident: $trait_name:ident$(, $instance:ident: $instantiable:path)?>; $origin_ty:ty; - $error_type:ty; $ignore:ident; $(#[$fn_attr:meta])* $vis:vis fn $name:ident ( @@ -1772,7 +1767,6 @@ macro_rules! decl_module { (@impl_function $module:ident<$trait_instance:ident: $trait_name:ident$(, $instance:ident: $instantiable:path)?>; $origin_ty:ty; - $error_type:ty; $ignore:ident; $(#[$fn_attr:meta])* $vis:vis fn $name:ident ( @@ -1796,7 +1790,7 @@ macro_rules! decl_module { variant $fn_name:ident; $( #[doc = $doc_attr:tt] )* #[compact] - $type:ty; + $name:ident : $type:ty; $( $rest:tt )* ) => { $crate::decl_module! { @@ -1808,7 +1802,7 @@ macro_rules! decl_module { { $( $current_params )* #[codec(compact)] - $type, + $name: $type, } variant $fn_name; $( #[doc = $doc_attr] )* @@ -1825,7 +1819,7 @@ macro_rules! decl_module { { $( $current_params:tt )* } variant $fn_name:ident; $(#[doc = $doc_attr:tt])* - $type:ty; + $name:ident : $type:ty; $( $rest:tt )* ) => { $crate::decl_module! { @@ -1836,7 +1830,7 @@ macro_rules! decl_module { { $( $generated_variants )* } { $( $current_params )* - $type, + $name: $type, } variant $fn_name; $( #[doc = $doc_attr] )* @@ -1866,9 +1860,9 @@ macro_rules! decl_module { $( $generated_variants )* #[allow(non_camel_case_types)] $(#[doc = $doc_attr])* - $fn_name ( + $fn_name { $( $current_params )* - ), + }, } {} $( @@ -1888,7 +1882,8 @@ macro_rules! decl_module { /// Dispatchable calls. /// /// Each variant of this enum maps to a dispatchable function from the associated module. - #[derive($crate::codec::Encode, $crate::codec::Decode)] + #[derive($crate::codec::Encode, $crate::codec::Decode, $crate::scale_info::TypeInfo)] + #[scale_info(skip_type_params($trait_instance $(, $instance)?), capture_docs = "always")] pub enum $call_type<$trait_instance: $trait_name$(, $instance: $instantiable $( = $module_default_instance)?)?> where $( $other_where_bounds )* { @@ -1965,7 +1960,7 @@ macro_rules! decl_module { { $( $on_finalize:tt )* } { $( $offchain:tt )* } { $( $constants:tt )* } - { $error_type:ty } + { $( $error_type:tt )* } { $( $integrity_test:tt )* } { $( $storage_version:tt )* } ) => { @@ -2051,7 +2046,6 @@ macro_rules! decl_module { @impl_function $mod_type<$trait_instance: $trait_name $(, $fn_instance: $fn_instantiable)?>; $origin_type; - $error_type; $from; $(#[doc = $doc_attr])* /// @@ -2076,11 +2070,28 @@ macro_rules! decl_module { $(#[doc = $doc_attr])* $( $(#[$codec_attr])* - $param; + $param_name : $param; )* )* } + $crate::paste::paste! { + impl<$trait_instance: $trait_name $(, $instance: $instantiable)?> + $call_type<$trait_instance $(, $instance)?> where $( $other_where_bounds )* + { + $( + #[doc = "Create a call with the variant `" $fn_name "`."] + pub fn [< new_call_variant_ $fn_name >]( + $( $param_name: $param ),* + ) -> Self { + Self::$fn_name { + $( $param_name ),* + } + } + )* + } + } + $crate::decl_module! { @impl_get_storage_version $mod_type<$trait_instance: $trait_name $(, $instance: $instantiable)?>; @@ -2095,7 +2106,7 @@ macro_rules! decl_module { fn get_dispatch_info(&self) -> $crate::dispatch::DispatchInfo { match *self { $( - $call_type::$fn_name( $( ref $param_name ),* ) => { + $call_type::$fn_name { $( ref $param_name ),* } => { let __pallet_base_weight = $weight; let __pallet_weight = >::weigh_data( &__pallet_base_weight, @@ -2149,7 +2160,7 @@ macro_rules! decl_module { fn get_call_name(&self) -> &'static str { match *self { $( - $call_type::$fn_name( $( ref $param_name ),* ) => { + $call_type::$fn_name { $( ref $param_name ),* } => { // Don't generate any warnings for unused variables let _ = ( $( $param_name ),* ); stringify!($fn_name) @@ -2186,8 +2197,8 @@ macro_rules! decl_module { fn clone(&self) -> Self { match *self { $( - $call_type::$fn_name( $( ref $param_name ),* ) => - $call_type::$fn_name( $( (*$param_name).clone() ),* ) + $call_type::$fn_name { $( ref $param_name ),* } => + $call_type::$fn_name { $( $param_name: (*$param_name).clone() ),* } ,)* _ => unreachable!(), } @@ -2200,9 +2211,9 @@ macro_rules! decl_module { fn eq(&self, _other: &Self) -> bool { match *self { $( - $call_type::$fn_name( $( ref $param_name ),* ) => { + $call_type::$fn_name { $( ref $param_name ),* } => { let self_params = ( $( $param_name, )* ); - if let $call_type::$fn_name( $( ref $param_name ),* ) = *_other { + if let $call_type::$fn_name { $( ref $param_name ),* } = *_other { self_params == ( $( $param_name, )* ) } else { match *_other { @@ -2230,7 +2241,7 @@ macro_rules! decl_module { ) -> $crate::dispatch::result::Result<(), $crate::dispatch::fmt::Error> { match *self { $( - $call_type::$fn_name( $( ref $param_name ),* ) => + $call_type::$fn_name { $( ref $param_name ),* } => write!(_f, "{}{:?}", stringify!($fn_name), ( $( $param_name.clone(), )* ) @@ -2248,7 +2259,7 @@ macro_rules! decl_module { fn dispatch_bypass_filter(self, _origin: Self::Origin) -> $crate::dispatch::DispatchResultWithPostInfo { match self { $( - $call_type::$fn_name( $( $param_name ),* ) => { + $call_type::$fn_name { $( $param_name ),* } => { $crate::decl_module!( @call $from @@ -2277,20 +2288,17 @@ macro_rules! decl_module { )* } } + $crate::__impl_error_metadata! { + $mod_type<$trait_instance: $trait_name $(, $instance: $instantiable)?> + { $( $other_where_bounds )* } + $( $error_type )* + } $crate::__impl_module_constants_metadata ! { $mod_type<$trait_instance: $trait_name $(, $instance: $instantiable)?> { $( $other_where_bounds )* } $( $constants )* } - impl<$trait_instance: $trait_name $(, $instance: $instantiable)?> $crate::dispatch::ModuleErrorMetadata - for $mod_type<$trait_instance $(, $instance)?> where $( $other_where_bounds )* - { - fn metadata() -> &'static [$crate::dispatch::ErrorMetadata] { - <$error_type as $crate::dispatch::ModuleErrorMetadata>::metadata() - } - } - $crate::__generate_dummy_part_checker!(); } } @@ -2302,6 +2310,7 @@ macro_rules! __dispatch_impl_metadata { ( $mod_type:ident<$trait_instance:ident: $trait_name:ident$(, $instance:ident: $instantiable:path)?> { $( $other_where_bounds:tt )* } + $call_type:ident $($rest:tt)* ) => { impl<$trait_instance: $trait_name $(, $instance: $instantiable)?> $mod_type<$trait_instance $(, $instance)?> @@ -2309,13 +2318,51 @@ macro_rules! __dispatch_impl_metadata { { #[doc(hidden)] #[allow(dead_code)] - pub fn call_functions() -> &'static [$crate::dispatch::FunctionMetadata] { - $crate::__call_to_functions!($($rest)*) + pub fn call_functions() -> $crate::metadata::PalletCallMetadata { + $crate::scale_info::meta_type::<$call_type<$trait_instance $(, $instance)?>>().into() } } } } +/// Implement metadata for pallet error. +#[macro_export] +#[doc(hidden)] +macro_rules! __impl_error_metadata { + ( + $mod_type:ident<$trait_instance:ident: $trait_name:ident$(, $instance:ident: $instantiable:path)?> + { $( $other_where_bounds:tt )* } + __NO_ERROR_DEFINED + ) => { + impl<$trait_instance: $trait_name $(, $instance: $instantiable)?> $mod_type<$trait_instance $(, $instance)?> + where $( $other_where_bounds )* + { + #[doc(hidden)] + #[allow(dead_code)] + pub fn error_metadata() -> Option<$crate::metadata::PalletErrorMetadata> { + None + } + } + }; + ( + $mod_type:ident<$trait_instance:ident: $trait_name:ident$(, $instance:ident: $instantiable:path)?> + { $( $other_where_bounds:tt )* } + $( $error_type:tt )* + ) => { + impl<$trait_instance: $trait_name $(, $instance: $instantiable)?> $mod_type<$trait_instance $(, $instance)?> + where $( $other_where_bounds )* + { + #[doc(hidden)] + #[allow(dead_code)] + pub fn error_metadata() -> Option<$crate::metadata::PalletErrorMetadata> { + Some($crate::metadata::PalletErrorMetadata { + ty: $crate::scale_info::meta_type::<$( $error_type )*>() + }) + } + } + }; +} + /// Implement metadata for module constants. #[macro_export] #[doc(hidden)] @@ -2383,7 +2430,7 @@ macro_rules! __impl_module_constants_metadata { { #[doc(hidden)] #[allow(dead_code)] - pub fn module_constants_metadata() -> &'static [$crate::dispatch::ModuleConstantMetadata] { + pub fn pallet_constants_metadata() -> $crate::sp_std::vec::Vec<$crate::metadata::PalletConstantMetadata> { // Create the `ByteGetter`s $( #[allow(non_upper_case_types)] @@ -2397,40 +2444,23 @@ macro_rules! __impl_module_constants_metadata { >); impl<$const_trait_instance: 'static + $const_trait_name $( , $const_instance: $const_instantiable)? - > $crate::dispatch::DefaultByte - for $default_byte_name <$const_trait_instance $(, $const_instance)?> + > $default_byte_name <$const_trait_instance $(, $const_instance)?> { fn default_byte(&self) -> $crate::dispatch::Vec { let value: $type = $value; $crate::dispatch::Encode::encode(&value) } } - - unsafe impl<$const_trait_instance: 'static + $const_trait_name $( - , $const_instance: $const_instantiable)? - > Send for $default_byte_name <$const_trait_instance $(, $const_instance)?> {} - - unsafe impl<$const_trait_instance: 'static + $const_trait_name $( - , $const_instance: $const_instantiable)? - > Sync for $default_byte_name <$const_trait_instance $(, $const_instance)?> {} )* - &[ + $crate::sp_std::vec![ $( - $crate::dispatch::ModuleConstantMetadata { - name: $crate::dispatch::DecodeDifferent::Encode(stringify!($name)), - ty: $crate::dispatch::DecodeDifferent::Encode(stringify!($type)), - value: $crate::dispatch::DecodeDifferent::Encode( - $crate::dispatch::DefaultByteGetter( - &$default_byte_name::< - $const_trait_instance $(, $const_instance)? - >( - $crate::dispatch::marker::PhantomData - ) - ) - ), - documentation: $crate::dispatch::DecodeDifferent::Encode( - &[ $( $doc_attr ),* ] - ), + $crate::metadata::PalletConstantMetadata { + name: stringify!($name), + ty: $crate::scale_info::meta_type::<$type>(), + value: $default_byte_name::<$const_trait_instance $(, $const_instance)?>( + Default::default() + ).default_byte(), + docs: $crate::sp_std::vec![ $( $doc_attr ),* ], } ),* ] @@ -2439,106 +2469,6 @@ macro_rules! __impl_module_constants_metadata { } } -/// Convert the list of calls into their JSON representation, joined by ",". -#[macro_export] -#[doc(hidden)] -macro_rules! __call_to_functions { - ( - $call_type:ident $origin_type:ty - { - $( - $(#[doc = $doc_attr:tt])* - fn $fn_name:ident($from:ident - $( - , $(#[$codec_attr:ident])* $param_name:ident : $param:ty - )* - ); - )* - } - ) => { - $crate::__functions_to_metadata!(0; $origin_type;; $( - fn $fn_name( $($(#[$codec_attr])* $param_name: $param ),* ); - $( $doc_attr ),*; - )*) - }; -} - -/// Convert a list of functions into a list of `FunctionMetadata` items. -#[macro_export] -#[doc(hidden)] -macro_rules! __functions_to_metadata{ - ( - $fn_id:expr; - $origin_type:ty; - $( $function_metadata:expr ),*; - fn $fn_name:ident( - $( - $(#[$codec_attr:ident])* $param_name:ident : $param:ty - ),* - ); - $( $fn_doc:expr ),*; - $( $rest:tt )* - ) => { - $crate::__functions_to_metadata!( - $fn_id + 1; $origin_type; - $( $function_metadata, )* $crate::__function_to_metadata!( - fn $fn_name($( $(#[$codec_attr])* $param_name : $param ),*); $( $fn_doc ),*; $fn_id; - ); - $($rest)* - ) - }; - ( - $fn_id:expr; - $origin_type:ty; - $( $function_metadata:expr ),*; - ) => { - &[ $( $function_metadata ),* ] - } -} - -/// Convert a function into its metadata representation. -#[macro_export] -#[doc(hidden)] -macro_rules! __function_to_metadata { - ( - fn $fn_name:ident( - $( $(#[$codec_attr:ident])* $param_name:ident : $param:ty),* - ); - $( $fn_doc:expr ),*; - $fn_id:expr; - ) => { - $crate::dispatch::FunctionMetadata { - name: $crate::dispatch::DecodeDifferent::Encode(stringify!($fn_name)), - arguments: $crate::dispatch::DecodeDifferent::Encode(&[ - $( - $crate::dispatch::FunctionArgumentMetadata { - name: $crate::dispatch::DecodeDifferent::Encode(stringify!($param_name)), - ty: $crate::dispatch::DecodeDifferent::Encode( - $crate::__function_to_metadata!(@stringify_expand_attr - $(#[$codec_attr])* $param_name: $param - ) - ), - } - ),* - ]), - documentation: $crate::dispatch::DecodeDifferent::Encode(&[ $( $fn_doc ),* ]), - } - }; - - (@stringify_expand_attr #[compact] $param_name:ident : $param:ty) => { - concat!("Compact<", stringify!($param), ">") - }; - - (@stringify_expand_attr $param_name:ident : $param:ty) => { stringify!($param) }; - - (@stringify_expand_attr $(#[codec_attr:ident])* $param_name:ident : $param:ty) => { - compile_error!(concat!( - "Invalid attribute for parameter `", stringify!($param_name), - "`, the following attributes are supported: `#[compact]`" - )); - } -} - #[macro_export] #[doc(hidden)] macro_rules! __check_reserved_fn_name { @@ -2597,6 +2527,7 @@ macro_rules! __check_reserved_fn_name { mod tests { use super::*; use crate::{ + metadata::*, traits::{ Get, GetCallName, IntegrityTest, OnFinalize, OnIdle, OnInitialize, OnRuntimeUpgrade, PalletInfo, @@ -2623,7 +2554,7 @@ mod tests { type DbWeight: Get; } - #[derive(Clone, PartialEq, Eq, Debug, Encode, Decode)] + #[derive(Clone, PartialEq, Eq, Debug, Encode, Decode, scale_info::TypeInfo)] pub enum RawOrigin { Root, Signed(AccountId), @@ -2679,68 +2610,7 @@ mod tests { } } - const EXPECTED_METADATA: &'static [FunctionMetadata] = &[ - FunctionMetadata { - name: DecodeDifferent::Encode("aux_0"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[" Hi, this is a comment."]), - }, - FunctionMetadata { - name: DecodeDifferent::Encode("aux_1"), - arguments: DecodeDifferent::Encode(&[FunctionArgumentMetadata { - name: DecodeDifferent::Encode("_data"), - ty: DecodeDifferent::Encode("Compact"), - }]), - documentation: DecodeDifferent::Encode(&[]), - }, - FunctionMetadata { - name: DecodeDifferent::Encode("aux_2"), - arguments: DecodeDifferent::Encode(&[ - FunctionArgumentMetadata { - name: DecodeDifferent::Encode("_data"), - ty: DecodeDifferent::Encode("i32"), - }, - FunctionArgumentMetadata { - name: DecodeDifferent::Encode("_data2"), - ty: DecodeDifferent::Encode("String"), - }, - ]), - documentation: DecodeDifferent::Encode(&[]), - }, - FunctionMetadata { - name: DecodeDifferent::Encode("aux_3"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }, - FunctionMetadata { - name: DecodeDifferent::Encode("aux_4"), - arguments: DecodeDifferent::Encode(&[FunctionArgumentMetadata { - name: DecodeDifferent::Encode("_data"), - ty: DecodeDifferent::Encode("i32"), - }]), - documentation: DecodeDifferent::Encode(&[]), - }, - FunctionMetadata { - name: DecodeDifferent::Encode("aux_5"), - arguments: DecodeDifferent::Encode(&[ - FunctionArgumentMetadata { - name: DecodeDifferent::Encode("_data"), - ty: DecodeDifferent::Encode("i32"), - }, - FunctionArgumentMetadata { - name: DecodeDifferent::Encode("_data2"), - ty: DecodeDifferent::Encode("Compact"), - }, - ]), - documentation: DecodeDifferent::Encode(&[]), - }, - FunctionMetadata { - name: DecodeDifferent::Encode("operational"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }, - ]; - + #[derive(scale_info::TypeInfo)] pub struct TraitImpl {} impl Config for TraitImpl {} @@ -2823,17 +2693,19 @@ mod tests { #[test] fn module_json_metadata() { let metadata = Module::::call_functions(); - assert_eq!(EXPECTED_METADATA, metadata); + let expected_metadata = + PalletCallMetadata { ty: scale_info::meta_type::>() }; + assert_eq!(expected_metadata, metadata); } #[test] fn compact_attr() { - let call: Call = Call::aux_1(1); + let call: Call = Call::aux_1 { _data: 1 }; let encoded = call.encode(); assert_eq!(2, encoded.len()); assert_eq!(vec![1, 4], encoded); - let call: Call = Call::aux_5(1, 2); + let call: Call = Call::aux_5 { _data: 1, _data2: 2 }; let encoded = call.encode(); assert_eq!(6, encoded.len()); assert_eq!(vec![5, 1, 0, 0, 0, 8], encoded); @@ -2841,13 +2713,13 @@ mod tests { #[test] fn encode_is_correct_and_decode_works() { - let call: Call = Call::aux_0(); + let call: Call = Call::aux_0 {}; let encoded = call.encode(); assert_eq!(vec![0], encoded); let decoded = Call::::decode(&mut &encoded[..]).unwrap(); assert_eq!(decoded, call); - let call: Call = Call::aux_2(32, "hello".into()); + let call: Call = Call::aux_2 { _data: 32, _data2: "hello".into() }; let encoded = call.encode(); assert_eq!(vec![2, 32, 0, 0, 0, 20, 104, 101, 108, 108, 111], encoded); let decoded = Call::::decode(&mut &encoded[..]).unwrap(); @@ -2899,19 +2771,19 @@ mod tests { fn weight_should_attach_to_call_enum() { // operational. assert_eq!( - Call::::operational().get_dispatch_info(), + Call::::operational {}.get_dispatch_info(), DispatchInfo { weight: 5, class: DispatchClass::Operational, pays_fee: Pays::Yes }, ); // custom basic assert_eq!( - Call::::aux_3().get_dispatch_info(), + Call::::aux_3 {}.get_dispatch_info(), DispatchInfo { weight: 3, class: DispatchClass::Normal, pays_fee: Pays::Yes }, ); } #[test] fn call_name() { - let name = Call::::aux_3().get_call_name(); + let name = Call::::aux_3 {}.get_call_name(); assert_eq!("aux_3", name); } @@ -2929,4 +2801,9 @@ mod tests { fn integrity_test_should_work() { as IntegrityTest>::integrity_test(); } + + #[test] + fn test_new_call_variant() { + Call::::new_call_variant_aux_0(); + } } diff --git a/frame/support/src/error.rs b/frame/support/src/error.rs index f0c6ba0f3b1c7..836428c6bc7db 100644 --- a/frame/support/src/error.rs +++ b/frame/support/src/error.rs @@ -17,8 +17,6 @@ //! Macro for declaring a module error. -#[doc(hidden)] -pub use frame_metadata::{DecodeDifferent, ErrorMetadata, ModuleErrorMetadata}; #[doc(hidden)] pub use sp_runtime::traits::{BadOrigin, LookupError}; @@ -87,10 +85,13 @@ macro_rules! decl_error { } ) => { $(#[$attr])* + #[derive($crate::scale_info::TypeInfo)] + #[scale_info(skip_type_params($generic $(, $inst_generic)?), capture_docs = "always")] pub enum $error<$generic: $trait $(, $inst_generic: $instance)?> $( where $( $where_ty: $where_bound ),* )? { #[doc(hidden)] + #[codec(skip)] __Ignore( $crate::sp_std::marker::PhantomData<($generic, $( $inst_generic)?)>, $crate::Never, @@ -159,24 +160,6 @@ macro_rules! decl_error { } } } - - impl<$generic: $trait $(, $inst_generic: $instance)?> $crate::error::ModuleErrorMetadata - for $error<$generic $(, $inst_generic)?> - $( where $( $where_ty: $where_bound ),* )? - { - fn metadata() -> &'static [$crate::error::ErrorMetadata] { - &[ - $( - $crate::error::ErrorMetadata { - name: $crate::error::DecodeDifferent::Encode(stringify!($name)), - documentation: $crate::error::DecodeDifferent::Encode(&[ - $( $doc_attr ),* - ]), - } - ),* - ] - } - } }; (@GENERATE_AS_U8 $self:ident diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index 6e0d4ba6b47bd..3d042a3122db8 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -21,8 +21,6 @@ // You should have received a copy of the GNU General Public License // along with Substrate. If not, see . -pub use frame_metadata::{DecodeDifferent, EventMetadata, FnEncode, OuterEventMetadata}; - /// Implement the `Event` for a module. /// /// # Simple Event Example: @@ -129,8 +127,10 @@ macro_rules! decl_event { Clone, PartialEq, Eq, $crate::codec::Encode, $crate::codec::Decode, + $crate::scale_info::TypeInfo, $crate::RuntimeDebug, )] + #[scale_info(capture_docs = "always")] /// Events for this module. /// $(#[$attr])* @@ -142,13 +142,6 @@ macro_rules! decl_event { impl From for () { fn from(_: Event) -> () { () } } - impl Event { - #[allow(dead_code)] - #[doc(hidden)] - pub fn metadata() -> &'static [ $crate::event::EventMetadata ] { - $crate::__events_to_metadata!(; $( $events )* ) - } - } } } @@ -272,8 +265,10 @@ macro_rules! __decl_generic_event { Clone, PartialEq, Eq, $crate::codec::Encode, $crate::codec::Decode, + $crate::scale_info::TypeInfo, $crate::RuntimeDebug, )] + #[scale_info(capture_docs = "always")] /// Events for this module. /// $(#[$attr])* @@ -290,263 +285,8 @@ macro_rules! __decl_generic_event { impl<$( $generic_param ),* $(, $instance)? > From> for () { fn from(_: RawEvent<$( $generic_param ),* $(, $instance)?>) -> () { () } } - impl<$( $generic_param ),* $(, $instance)?> RawEvent<$( $generic_param ),* $(, $instance)?> { - #[allow(dead_code)] - #[doc(hidden)] - pub fn metadata() -> &'static [$crate::event::EventMetadata] { - $crate::__events_to_metadata!(; $( $events )* ) - } - } }; (@cannot_parse $ty:ty) => { compile_error!(concat!("The type `", stringify!($ty), "` can't be parsed as an unnamed one, please name it `Name = ", stringify!($ty), "`")); } } - -#[macro_export] -#[doc(hidden)] -macro_rules! __events_to_metadata { - ( - $( $metadata:expr ),*; - $( #[doc = $doc_attr:tt] )* - $event:ident $( ( $( $param:path ),* $(,)? ) )*, - $( $rest:tt )* - ) => { - $crate::__events_to_metadata!( - $( $metadata, )* - $crate::event::EventMetadata { - name: $crate::event::DecodeDifferent::Encode(stringify!($event)), - arguments: $crate::event::DecodeDifferent::Encode(&[ - $( $( stringify!($param) ),* )* - ]), - documentation: $crate::event::DecodeDifferent::Encode(&[ - $( $doc_attr ),* - ]), - }; - $( $rest )* - ) - }; - ( - $( $metadata:expr ),*; - ) => { - &[ $( $metadata ),* ] - } -} - -#[cfg(test)] -#[allow(dead_code)] -mod tests { - use super::*; - use codec::{Decode, Encode}; - use serde::Serialize; - - mod system { - pub trait Config: 'static { - type Origin; - type BlockNumber; - type PalletInfo: crate::traits::PalletInfo; - type DbWeight: crate::traits::Get; - } - - decl_module! { - pub struct Module for enum Call where origin: T::Origin, system=self {} - } - - decl_event!( - pub enum Event { - SystemEvent, - } - ); - } - - mod system_renamed { - pub trait Config: 'static { - type Origin; - type BlockNumber; - type PalletInfo: crate::traits::PalletInfo; - type DbWeight: crate::traits::Get; - } - - decl_module! { - pub struct Module for enum Call where origin: T::Origin, system=self {} - } - - decl_event!( - pub enum Event { - SystemEvent, - } - ); - } - - mod event_module { - use super::system; - - pub trait Config: system::Config { - type Balance; - } - - decl_module! { - pub struct Module for enum Call where origin: T::Origin, system=system {} - } - - decl_event!( - /// Event without renaming the generic parameter `Balance` and `Origin`. - pub enum Event where ::Balance, ::Origin - { - /// Hi, I am a comment. - TestEvent(Balance, Origin), - /// Dog - EventWithoutParams, - } - ); - } - - mod event_module2 { - use super::system; - - pub trait Config: system::Config { - type Balance; - } - - decl_module! { - pub struct Module for enum Call where origin: T::Origin, system=system {} - } - - decl_event!( - /// Event with renamed generic parameter - pub enum Event - where - BalanceRenamed = ::Balance, - OriginRenamed = ::Origin, - { - TestEvent(BalanceRenamed), - TestOrigin(OriginRenamed), - } - ); - } - - mod event_module3 { - decl_event!( - pub enum Event { - HiEvent, - } - ); - } - - mod event_module4 { - use super::system; - - pub trait Config: system::Config { - type Balance; - } - - decl_module! { - pub struct Module for enum Call where origin: T::Origin, system=system {} - } - - decl_event!( - /// Event finish formatting on an unnamed one with trailing comma - pub enum Event where - ::Balance, - ::Origin, - { - TestEvent(Balance, Origin), - } - ); - } - - mod event_module5 { - use super::system; - - pub trait Config: system::Config { - type Balance; - } - - decl_module! { - pub struct Module for enum Call where origin: T::Origin, system=system {} - } - - decl_event!( - /// Event finish formatting on an named one with trailing comma - pub enum Event - where - BalanceRenamed = ::Balance, - OriginRenamed = ::Origin, - { - TestEvent(BalanceRenamed, OriginRenamed), - TrailingCommaInArgs(u32, u32), - } - ); - } - - #[derive(Debug, Clone, PartialEq, Eq, Encode, Decode, Serialize)] - pub struct TestRuntime; - - #[derive(Debug, Clone, PartialEq, Eq, Encode, Decode, Serialize)] - pub struct TestRuntime2; - - impl event_module::Config for TestRuntime { - type Balance = u32; - } - - impl event_module2::Config for TestRuntime { - type Balance = u32; - } - - impl system::Config for TestRuntime { - type Origin = u32; - type BlockNumber = u32; - type PalletInfo = crate::tests::PanicPalletInfo; - type DbWeight = (); - } - - #[test] - fn event_metadata() { - assert_eq!( - system_renamed::Event::metadata(), - &[EventMetadata { - name: DecodeDifferent::Encode("SystemEvent"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - },] - ); - assert_eq!( - event_module::Event::::metadata(), - &[ - EventMetadata { - name: DecodeDifferent::Encode("TestEvent"), - arguments: DecodeDifferent::Encode(&["Balance", "Origin"]), - documentation: DecodeDifferent::Encode(&[" Hi, I am a comment."]) - }, - EventMetadata { - name: DecodeDifferent::Encode("EventWithoutParams"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[" Dog"]), - }, - ] - ); - assert_eq!( - event_module2::Event::::metadata(), - &[ - EventMetadata { - name: DecodeDifferent::Encode("TestEvent"), - arguments: DecodeDifferent::Encode(&["BalanceRenamed"]), - documentation: DecodeDifferent::Encode(&[]) - }, - EventMetadata { - name: DecodeDifferent::Encode("TestOrigin"), - arguments: DecodeDifferent::Encode(&["OriginRenamed"]), - documentation: DecodeDifferent::Encode(&[]), - }, - ] - ); - assert_eq!( - event_module3::Event::metadata(), - &[EventMetadata { - name: DecodeDifferent::Encode("HiEvent"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]) - }], - ); - } -} diff --git a/frame/support/src/hash.rs b/frame/support/src/hash.rs index 4136bd518f4c2..f943bcf323090 100644 --- a/frame/support/src/hash.rs +++ b/frame/support/src/hash.rs @@ -17,6 +17,7 @@ //! Hash utilities. +use crate::metadata; use codec::{Codec, MaxEncodedLen}; use sp_io::hashing::{blake2_128, blake2_256, twox_128, twox_256, twox_64}; use sp_std::prelude::Vec; @@ -58,7 +59,7 @@ impl Hashable for T { /// Hasher to use to hash keys to insert to storage. pub trait StorageHasher: 'static { - const METADATA: frame_metadata::StorageHasher; + const METADATA: metadata::StorageHasher; type Output: AsRef<[u8]>; fn hash(x: &[u8]) -> Self::Output; @@ -79,7 +80,7 @@ pub trait ReversibleStorageHasher: StorageHasher { /// Store the key directly. pub struct Identity; impl StorageHasher for Identity { - const METADATA: frame_metadata::StorageHasher = frame_metadata::StorageHasher::Identity; + const METADATA: metadata::StorageHasher = metadata::StorageHasher::Identity; type Output = Vec; fn hash(x: &[u8]) -> Vec { x.to_vec() @@ -97,7 +98,7 @@ impl ReversibleStorageHasher for Identity { /// Hash storage keys with `concat(twox64(key), key)` pub struct Twox64Concat; impl StorageHasher for Twox64Concat { - const METADATA: frame_metadata::StorageHasher = frame_metadata::StorageHasher::Twox64Concat; + const METADATA: metadata::StorageHasher = metadata::StorageHasher::Twox64Concat; type Output = Vec; fn hash(x: &[u8]) -> Vec { twox_64(x).iter().chain(x.into_iter()).cloned().collect::>() @@ -119,7 +120,7 @@ impl ReversibleStorageHasher for Twox64Concat { /// Hash storage keys with `concat(blake2_128(key), key)` pub struct Blake2_128Concat; impl StorageHasher for Blake2_128Concat { - const METADATA: frame_metadata::StorageHasher = frame_metadata::StorageHasher::Blake2_128Concat; + const METADATA: metadata::StorageHasher = metadata::StorageHasher::Blake2_128Concat; type Output = Vec; fn hash(x: &[u8]) -> Vec { blake2_128(x).iter().chain(x.into_iter()).cloned().collect::>() @@ -141,7 +142,7 @@ impl ReversibleStorageHasher for Blake2_128Concat { /// Hash storage keys with blake2 128 pub struct Blake2_128; impl StorageHasher for Blake2_128 { - const METADATA: frame_metadata::StorageHasher = frame_metadata::StorageHasher::Blake2_128; + const METADATA: metadata::StorageHasher = metadata::StorageHasher::Blake2_128; type Output = [u8; 16]; fn hash(x: &[u8]) -> [u8; 16] { blake2_128(x) @@ -154,7 +155,7 @@ impl StorageHasher for Blake2_128 { /// Hash storage keys with blake2 256 pub struct Blake2_256; impl StorageHasher for Blake2_256 { - const METADATA: frame_metadata::StorageHasher = frame_metadata::StorageHasher::Blake2_256; + const METADATA: metadata::StorageHasher = metadata::StorageHasher::Blake2_256; type Output = [u8; 32]; fn hash(x: &[u8]) -> [u8; 32] { blake2_256(x) @@ -167,7 +168,7 @@ impl StorageHasher for Blake2_256 { /// Hash storage keys with twox 128 pub struct Twox128; impl StorageHasher for Twox128 { - const METADATA: frame_metadata::StorageHasher = frame_metadata::StorageHasher::Twox128; + const METADATA: metadata::StorageHasher = metadata::StorageHasher::Twox128; type Output = [u8; 16]; fn hash(x: &[u8]) -> [u8; 16] { twox_128(x) @@ -180,7 +181,7 @@ impl StorageHasher for Twox128 { /// Hash storage keys with twox 256 pub struct Twox256; impl StorageHasher for Twox256 { - const METADATA: frame_metadata::StorageHasher = frame_metadata::StorageHasher::Twox256; + const METADATA: metadata::StorageHasher = metadata::StorageHasher::Twox256; type Output = [u8; 32]; fn hash(x: &[u8]) -> [u8; 32] { twox_256(x) diff --git a/frame/support/src/lib.rs b/frame/support/src/lib.rs index 9dee6da89b256..cce03f1e8ce6c 100644 --- a/frame/support/src/lib.rs +++ b/frame/support/src/lib.rs @@ -36,6 +36,8 @@ pub use log; pub use once_cell; #[doc(hidden)] pub use paste; +#[doc(hidden)] +pub use scale_info; #[cfg(feature = "std")] pub use serde; pub use sp_core::Void; @@ -90,17 +92,18 @@ pub use self::{ pub use sp_runtime::{self, print, traits::Printable, ConsensusEngineId}; use codec::{Decode, Encode}; +use scale_info::TypeInfo; use sp_runtime::TypeId; /// A unified log target for support operations. pub const LOG_TARGET: &'static str = "runtime::frame-support"; /// A type that cannot be instantiated. -#[derive(Debug, PartialEq, Eq, Clone)] +#[derive(Debug, PartialEq, Eq, Clone, TypeInfo)] pub enum Never {} /// A pallet identifier. These are per pallet and should be stored in a registry somewhere. -#[derive(Clone, Copy, Eq, PartialEq, Encode, Decode)] +#[derive(Clone, Copy, Eq, PartialEq, Encode, Decode, TypeInfo)] pub struct PalletId(pub [u8; 8]); impl TypeId for PalletId { @@ -811,13 +814,13 @@ pub use serde::{Deserialize, Serialize}; #[cfg(test)] pub mod tests { use super::*; - use codec::{Codec, EncodeLike}; - use frame_metadata::{ - DecodeDifferent, DefaultByteGetter, StorageEntryMetadata, StorageEntryModifier, - StorageEntryType, StorageHasher, StorageMetadata, + use crate::metadata::{ + PalletStorageMetadata, StorageEntryMetadata, StorageEntryModifier, StorageEntryType, + StorageHasher, }; + use codec::{Codec, EncodeLike}; use sp_io::TestExternalities; - use sp_std::{marker::PhantomData, result}; + use sp_std::result; /// A PalletInfo implementation which just panics. pub struct PanicPalletInfo; @@ -832,7 +835,7 @@ pub mod tests { } pub trait Config: 'static { - type BlockNumber: Codec + EncodeLike + Default; + type BlockNumber: Codec + EncodeLike + Default + TypeInfo; type Origin; type PalletInfo: crate::traits::PalletInfo; type DbWeight: crate::traits::Get; @@ -1150,132 +1153,109 @@ pub mod tests { }); } - const EXPECTED_METADATA: StorageMetadata = StorageMetadata { - prefix: DecodeDifferent::Encode("Test"), - entries: DecodeDifferent::Encode(&[ - StorageEntryMetadata { - name: DecodeDifferent::Encode("Data"), - modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Map { - hasher: StorageHasher::Twox64Concat, - key: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("u64"), - unused: false, + fn expected_metadata() -> PalletStorageMetadata { + PalletStorageMetadata { + prefix: "Test", + entries: vec![ + StorageEntryMetadata { + name: "Data", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Map { + hashers: vec![StorageHasher::Twox64Concat], + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), + }, + default: vec![0, 0, 0, 0, 0, 0, 0, 0], + docs: vec![], }, - default: DecodeDifferent::Encode(DefaultByteGetter(&__GetByteStructData( - PhantomData::, - ))), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("OptionLinkedMap"), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Map { - hasher: StorageHasher::Blake2_128Concat, - key: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("u32"), - unused: false, + StorageEntryMetadata { + name: "OptionLinkedMap", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + hashers: vec![StorageHasher::Blake2_128Concat], + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), + }, + default: vec![0], + docs: vec![], }, - default: DecodeDifferent::Encode(DefaultByteGetter( - &__GetByteStructOptionLinkedMap(PhantomData::), - )), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("GenericData"), - modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Map { - hasher: StorageHasher::Identity, - key: DecodeDifferent::Encode("T::BlockNumber"), - value: DecodeDifferent::Encode("T::BlockNumber"), - unused: false, + StorageEntryMetadata { + name: "GenericData", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Map { + hashers: vec![StorageHasher::Identity], + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), + }, + default: vec![0, 0, 0, 0], + docs: vec![], }, - default: DecodeDifferent::Encode(DefaultByteGetter(&__GetByteStructGenericData( - PhantomData::, - ))), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("GenericData2"), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Map { - hasher: StorageHasher::Blake2_128Concat, - key: DecodeDifferent::Encode("T::BlockNumber"), - value: DecodeDifferent::Encode("T::BlockNumber"), - unused: false, + StorageEntryMetadata { + name: "GenericData2", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + hashers: vec![StorageHasher::Blake2_128Concat], + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), + }, + default: vec![0], + docs: vec![], }, - default: DecodeDifferent::Encode(DefaultByteGetter(&__GetByteStructGenericData2( - PhantomData::, - ))), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("DataDM"), - modifier: StorageEntryModifier::Default, - ty: StorageEntryType::DoubleMap { - hasher: StorageHasher::Twox64Concat, - key1: DecodeDifferent::Encode("u32"), - key2: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("u64"), - key2_hasher: StorageHasher::Blake2_128Concat, + StorageEntryMetadata { + name: "DataDM", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Map { + hashers: vec![StorageHasher::Twox64Concat, StorageHasher::Blake2_128Concat], + key: scale_info::meta_type::<(u32, u32)>(), + value: scale_info::meta_type::(), + }, + default: vec![0, 0, 0, 0, 0, 0, 0, 0], + docs: vec![], }, - default: DecodeDifferent::Encode(DefaultByteGetter(&__GetByteStructDataDM( - PhantomData::, - ))), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("GenericDataDM"), - modifier: StorageEntryModifier::Default, - ty: StorageEntryType::DoubleMap { - hasher: StorageHasher::Blake2_128Concat, - key1: DecodeDifferent::Encode("T::BlockNumber"), - key2: DecodeDifferent::Encode("T::BlockNumber"), - value: DecodeDifferent::Encode("T::BlockNumber"), - key2_hasher: StorageHasher::Identity, + StorageEntryMetadata { + name: "GenericDataDM", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Map { + hashers: vec![StorageHasher::Blake2_128Concat, StorageHasher::Identity], + key: scale_info::meta_type::<(u32, u32)>(), + value: scale_info::meta_type::(), + }, + default: vec![0, 0, 0, 0], + docs: vec![], }, - default: DecodeDifferent::Encode(DefaultByteGetter(&__GetByteStructGenericDataDM( - PhantomData::, - ))), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("GenericData2DM"), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::DoubleMap { - hasher: StorageHasher::Blake2_128Concat, - key1: DecodeDifferent::Encode("T::BlockNumber"), - key2: DecodeDifferent::Encode("T::BlockNumber"), - value: DecodeDifferent::Encode("T::BlockNumber"), - key2_hasher: StorageHasher::Twox64Concat, + StorageEntryMetadata { + name: "GenericData2DM", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + hashers: vec![StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat], + key: scale_info::meta_type::<(u32, u32)>(), + value: scale_info::meta_type::(), + }, + default: vec![0], + docs: vec![], }, - default: DecodeDifferent::Encode(DefaultByteGetter( - &__GetByteStructGenericData2DM(PhantomData::), - )), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("AppendableDM"), - modifier: StorageEntryModifier::Default, - ty: StorageEntryType::DoubleMap { - hasher: StorageHasher::Blake2_128Concat, - key1: DecodeDifferent::Encode("u32"), - key2: DecodeDifferent::Encode("T::BlockNumber"), - value: DecodeDifferent::Encode("Vec"), - key2_hasher: StorageHasher::Blake2_128Concat, + StorageEntryMetadata { + name: "AppendableDM", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Map { + hashers: vec![ + StorageHasher::Blake2_128Concat, + StorageHasher::Blake2_128Concat, + ], + key: scale_info::meta_type::<(u32, u32)>(), + value: scale_info::meta_type::>(), + }, + default: vec![0], + docs: vec![], }, - default: DecodeDifferent::Encode(DefaultByteGetter( - &__GetByteStructGenericData2DM(PhantomData::), - )), - documentation: DecodeDifferent::Encode(&[]), - }, - ]), - }; + ], + } + } #[test] fn store_metadata() { let metadata = Module::::storage_metadata(); - pretty_assertions::assert_eq!(EXPECTED_METADATA, metadata); + pretty_assertions::assert_eq!(expected_metadata(), metadata); } parameter_types! { @@ -1470,7 +1450,7 @@ pub mod pallet_prelude { /// It implements on pallet: /// * [`traits::GetStorageVersion`] /// * [`traits::OnGenesis`]: contains some logic to write pallet version into storage. -/// * `ModuleErrorMetadata`: using error declared or no metadata. +/// * `PalletErrorTypeInfo`: provides the type information for the pallet error, if defined. /// /// It declare `type Module` type alias for `Pallet`, used by [`construct_runtime`]. /// @@ -1623,9 +1603,6 @@ pub mod pallet_prelude { /// The macro implements `From>` for `&'static str`. /// The macro implements `From>` for `DispatchError`. /// -/// The macro implements `ModuleErrorMetadata` on `Pallet` defining the `ErrorMetadata` of the -/// pallet. -/// /// # Event: `#[pallet::event]` optional /// /// Allow to define pallet events, pallet events are stored in the block when they deposited @@ -1634,7 +1611,6 @@ pub mod pallet_prelude { /// Item is defined as: /// ```ignore /// #[pallet::event] -/// #[pallet::metadata($SomeType = "$Metadata", $SomeOtherType = "$Metadata", ..)] // Optional /// #[pallet::generate_deposit($visibility fn deposit_event)] // Optional /// pub enum Event<$some_generic> $optional_where_clause { /// /// Some doc @@ -1649,24 +1625,6 @@ pub mod pallet_prelude { /// std only). /// For ease of use, bound the trait `Member` available in frame_support::pallet_prelude. /// -/// Variant documentations and field types are put into metadata. -/// The attribute `#[pallet::metadata(..)]` allows to specify the metadata to put for some -/// types. -/// -/// The metadata of a type is defined by: -/// * if matching a type in `#[pallet::metadata(..)]`, then the corresponding metadata. -/// * otherwise the type stringified. -/// -/// E.g.: -/// ```ignore -/// #[pallet::event] -/// #[pallet::metadata(u32 = "SpecialU32")] -/// pub enum Event { -/// Proposed(u32, T::AccountId), -/// } -/// ``` -/// will write in event variant metadata `"SpecialU32"` and `"T::AccountId"`. -/// /// The attribute `#[pallet::generate_deposit($visibility fn deposit_event)]` generate a helper /// function on `Pallet` to deposit event. /// @@ -2027,8 +1985,6 @@ pub mod pallet_prelude { /// // /// // The macro generates event metadata, and derive Clone, Debug, Eq, PartialEq and Codec /// #[pallet::event] -/// // Additional argument to specify the metadata to use for given type. -/// #[pallet::metadata(BalanceOf = "Balance", u32 = "Other")] /// // Generate a funciton on Pallet to deposit an event. /// #[pallet::generate_deposit(pub(super) fn deposit_event)] /// pub enum Event { @@ -2192,7 +2148,6 @@ pub mod pallet_prelude { /// } /// /// #[pallet::event] -/// #[pallet::metadata(BalanceOf = "Balance", u32 = "Other")] /// #[pallet::generate_deposit(pub(super) fn deposit_event)] /// pub enum Event, I: 'static = ()> { /// /// doc comment put in metadata @@ -2342,8 +2297,7 @@ pub mod pallet_prelude { /// 7. **migrate event**: /// rewrite as a simple enum under with the attribute `#[pallet::event]`, /// use `#[pallet::generate_deposit($vis fn deposit_event)]` to generate deposit_event, -/// use `#[pallet::metadata(...)]` to configure the metadata for types in order not to break -/// them. 8. **migrate error**: rewrite it with attribute `#[pallet::error]`. +/// 8. **migrate error**: rewrite it with attribute `#[pallet::error]`. /// 9. **migrate storage**: /// decl_storage provide an upgrade template (see 3.). All storages, genesis config, genesis /// build and default implementation of genesis config can be taken from it directly. diff --git a/frame/support/src/storage/bounded_btree_map.rs b/frame/support/src/storage/bounded_btree_map.rs index 737c8953d29eb..d0c0aa7c4f155 100644 --- a/frame/support/src/storage/bounded_btree_map.rs +++ b/frame/support/src/storage/bounded_btree_map.rs @@ -31,7 +31,8 @@ use sp_std::{ /// /// Unlike a standard `BTreeMap`, there is an enforced upper limit to the number of items in the /// map. All internal operations ensure this bound is respected. -#[derive(Encode)] +#[derive(Encode, scale_info::TypeInfo)] +#[scale_info(skip_type_params(S))] pub struct BoundedBTreeMap(BTreeMap, PhantomData); impl Decode for BoundedBTreeMap diff --git a/frame/support/src/storage/bounded_vec.rs b/frame/support/src/storage/bounded_vec.rs index 0f56511e6edd8..b45c294f8d4a4 100644 --- a/frame/support/src/storage/bounded_vec.rs +++ b/frame/support/src/storage/bounded_vec.rs @@ -37,7 +37,8 @@ use sp_std::{convert::TryFrom, fmt, marker::PhantomData, prelude::*}; /// /// As the name suggests, the length of the queue is always bounded. All internal operations ensure /// this bound is respected. -#[derive(Encode)] +#[derive(Encode, scale_info::TypeInfo)] +#[scale_info(skip_type_params(S))] pub struct BoundedVec(Vec, PhantomData); /// A bounded slice. diff --git a/frame/support/src/storage/mod.rs b/frame/support/src/storage/mod.rs index 8cee9faf6e81d..e57a876bf9831 100644 --- a/frame/support/src/storage/mod.rs +++ b/frame/support/src/storage/mod.rs @@ -17,6 +17,7 @@ //! Stuff to do with the runtime's storage. +pub use self::types::StorageEntryMetadata; use crate::{ hash::{ReversibleStorageHasher, StorageHasher}, storage::types::{ diff --git a/frame/support/src/storage/types/double_map.rs b/frame/support/src/storage/types/double_map.rs index 2db8a845c568c..7750110050868 100644 --- a/frame/support/src/storage/types/double_map.rs +++ b/frame/support/src/storage/types/double_map.rs @@ -19,14 +19,14 @@ //! StoragePrefixedDoubleMap traits and their methods directly. use crate::{ + metadata::{StorageEntryModifier, StorageEntryType}, storage::{ - types::{OnEmptyGetter, OptionQuery, QueryKindTrait}, + types::{OptionQuery, QueryKindTrait, StorageEntryMetadata}, StorageAppend, StorageDecodeLength, StoragePrefixedMap, StorageTryAppend, }, traits::{Get, GetDefault, StorageInfo, StorageInstance}, }; use codec::{Decode, Encode, EncodeLike, FullCodec, MaxEncodedLen}; -use frame_metadata::{DefaultByteGetter, StorageEntryModifier}; use sp_arithmetic::traits::SaturatedConversion; use sp_std::prelude::*; @@ -511,37 +511,34 @@ where } } -/// Part of storage metadata for a storage double map. -/// -/// NOTE: Generic hashers is supported. -pub trait StorageDoubleMapMetadata { - const MODIFIER: StorageEntryModifier; - const NAME: &'static str; - const DEFAULT: DefaultByteGetter; - const HASHER1: frame_metadata::StorageHasher; - const HASHER2: frame_metadata::StorageHasher; -} - impl - StorageDoubleMapMetadata + StorageEntryMetadata for StorageDoubleMap where Prefix: StorageInstance, Hasher1: crate::hash::StorageHasher, Hasher2: crate::hash::StorageHasher, - Key1: FullCodec, - Key2: FullCodec, - Value: FullCodec, + Key1: FullCodec + scale_info::StaticTypeInfo, + Key2: FullCodec + scale_info::StaticTypeInfo, + Value: FullCodec + scale_info::StaticTypeInfo, QueryKind: QueryKindTrait, OnEmpty: Get + 'static, MaxValues: Get>, { const MODIFIER: StorageEntryModifier = QueryKind::METADATA; - const HASHER1: frame_metadata::StorageHasher = Hasher1::METADATA; - const HASHER2: frame_metadata::StorageHasher = Hasher2::METADATA; const NAME: &'static str = Prefix::STORAGE_PREFIX; - const DEFAULT: DefaultByteGetter = - DefaultByteGetter(&OnEmptyGetter::(core::marker::PhantomData)); + + fn ty() -> StorageEntryType { + StorageEntryType::Map { + hashers: vec![Hasher1::METADATA, Hasher2::METADATA], + key: scale_info::meta_type::<(Key1, Key2)>(), + value: scale_info::meta_type::(), + } + } + + fn default() -> Vec { + OnEmpty::get().encode() + } } impl @@ -603,8 +600,12 @@ where #[cfg(test)] mod test { use super::*; - use crate::{hash::*, storage::types::ValueQuery}; - use frame_metadata::StorageEntryModifier; + use crate::{ + hash::*, + metadata::{StorageEntryModifier, StorageEntryType, StorageHasher}, + storage::types::ValueQuery, + }; + use assert_matches::assert_matches; use sp_io::{hashing::twox_128, TestExternalities}; struct Prefix; @@ -768,19 +769,27 @@ mod test { assert_eq!(A::MODIFIER, StorageEntryModifier::Optional); assert_eq!(AValueQueryWithAnOnEmpty::MODIFIER, StorageEntryModifier::Default); - assert_eq!(A::HASHER1, frame_metadata::StorageHasher::Blake2_128Concat); - assert_eq!(A::HASHER2, frame_metadata::StorageHasher::Twox64Concat); - assert_eq!( - AValueQueryWithAnOnEmpty::HASHER1, - frame_metadata::StorageHasher::Blake2_128Concat + + let assert_map_hashers = |ty, expected_hashers| { + if let StorageEntryType::Map { hashers, .. } = ty { + assert_eq!(hashers, expected_hashers) + } else { + assert_matches!(ty, StorageEntryType::Map { .. }) + } + }; + + assert_map_hashers( + A::ty(), + vec![StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat], ); - assert_eq!( - AValueQueryWithAnOnEmpty::HASHER2, - frame_metadata::StorageHasher::Twox64Concat + assert_map_hashers( + AValueQueryWithAnOnEmpty::ty(), + vec![StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat], ); + assert_eq!(A::NAME, "foo"); - assert_eq!(AValueQueryWithAnOnEmpty::DEFAULT.0.default_byte(), 97u32.encode()); - assert_eq!(A::DEFAULT.0.default_byte(), Option::::None.encode()); + assert_eq!(AValueQueryWithAnOnEmpty::default(), 97u32.encode()); + assert_eq!(A::default(), Option::::None.encode()); WithLen::remove_all(None); assert_eq!(WithLen::decode_len(3, 30), None); diff --git a/frame/support/src/storage/types/key.rs b/frame/support/src/storage/types/key.rs index db66838e3ff1e..da265fd6e6c87 100755 --- a/frame/support/src/storage/types/key.rs +++ b/frame/support/src/storage/types/key.rs @@ -20,6 +20,7 @@ use crate::hash::{ReversibleStorageHasher, StorageHasher}; use codec::{Encode, EncodeLike, FullCodec, MaxEncodedLen}; use paste::paste; +use scale_info::StaticTypeInfo; use sp_std::prelude::*; /// A type used exclusively by storage maps as their key type. @@ -35,14 +36,14 @@ pub struct Key(core::marker::PhantomData<(Hasher, KeyType)>); /// A trait that contains the current key as an associated type. pub trait KeyGenerator { - type Key: EncodeLike; + type Key: EncodeLike + StaticTypeInfo; type KArg: Encode; type HashFn: FnOnce(&[u8]) -> Vec; type HArg; - const HASHER_METADATA: &'static [frame_metadata::StorageHasher]; + const HASHER_METADATA: &'static [crate::metadata::StorageHasher]; - /// Given a `key` tuple, calculate the final key by encoding each element individuallly and + /// Given a `key` tuple, calculate the final key by encoding each element individually and /// hashing them using the corresponding hasher in the `KeyGenerator`. fn final_key + TupleToEncodedIter>(key: KArg) -> Vec; /// Given a `key` tuple, migrate the keys from using the old hashers as given by `hash_fns` @@ -67,13 +68,13 @@ pub trait KeyGeneratorInner: KeyGenerator { fn final_hash(encoded: &[u8]) -> Vec; } -impl KeyGenerator for Key { +impl KeyGenerator for Key { type Key = K; type KArg = (K,); type HashFn = Box Vec>; type HArg = (Self::HashFn,); - const HASHER_METADATA: &'static [frame_metadata::StorageHasher] = &[H::METADATA]; + const HASHER_METADATA: &'static [crate::metadata::StorageHasher] = &[H::METADATA]; fn final_key + TupleToEncodedIter>(key: KArg) -> Vec { H::hash(&key.to_encoded_iter().next().expect("should have at least one element!")) @@ -89,13 +90,15 @@ impl KeyGenerator for Key { } } -impl KeyGeneratorMaxEncodedLen for Key { +impl KeyGeneratorMaxEncodedLen + for Key +{ fn key_max_encoded_len() -> usize { H::max_len::() } } -impl KeyGeneratorInner for Key { +impl KeyGeneratorInner for Key { type Hasher = H; fn final_hash(encoded: &[u8]) -> Vec { @@ -111,7 +114,7 @@ impl KeyGenerator for Tuple { for_tuples!( type HArg = ( #(Tuple::HashFn),* ); ); type HashFn = Box Vec>; - const HASHER_METADATA: &'static [frame_metadata::StorageHasher] = + const HASHER_METADATA: &'static [crate::metadata::StorageHasher] = &[for_tuples!( #(Tuple::Hasher::METADATA),* )]; fn final_key + TupleToEncodedIter>(key: KArg) -> Vec { @@ -218,7 +221,9 @@ pub trait ReversibleKeyGenerator: KeyGenerator { fn decode_final_key(key_material: &[u8]) -> Result<(Self::Key, &[u8]), codec::Error>; } -impl ReversibleKeyGenerator for Key { +impl ReversibleKeyGenerator + for Key +{ type ReversibleHasher = H; fn decode_final_key(key_material: &[u8]) -> Result<(Self::Key, &[u8]), codec::Error> { diff --git a/frame/support/src/storage/types/map.rs b/frame/support/src/storage/types/map.rs index 6b3cfe64eaec0..a31224f15c80f 100644 --- a/frame/support/src/storage/types/map.rs +++ b/frame/support/src/storage/types/map.rs @@ -19,14 +19,14 @@ //! methods directly. use crate::{ + metadata::{StorageEntryModifier, StorageEntryType}, storage::{ - types::{OnEmptyGetter, OptionQuery, QueryKindTrait}, + types::{OptionQuery, QueryKindTrait, StorageEntryMetadata}, StorageAppend, StorageDecodeLength, StoragePrefixedMap, StorageTryAppend, }, traits::{Get, GetDefault, StorageInfo, StorageInstance}, }; use codec::{Decode, Encode, EncodeLike, FullCodec, MaxEncodedLen}; -use frame_metadata::{DefaultByteGetter, StorageEntryModifier}; use sp_arithmetic::traits::SaturatedConversion; use sp_std::prelude::*; @@ -336,32 +336,31 @@ where } } -/// Part of storage metadata for a storage map. -/// -/// NOTE: Generic hasher is supported. -pub trait StorageMapMetadata { - const MODIFIER: StorageEntryModifier; - const NAME: &'static str; - const DEFAULT: DefaultByteGetter; - const HASHER: frame_metadata::StorageHasher; -} - -impl StorageMapMetadata +impl StorageEntryMetadata for StorageMap where Prefix: StorageInstance, Hasher: crate::hash::StorageHasher, - Key: FullCodec, - Value: FullCodec, + Key: FullCodec + scale_info::StaticTypeInfo, + Value: FullCodec + scale_info::StaticTypeInfo, QueryKind: QueryKindTrait, OnEmpty: Get + 'static, MaxValues: Get>, { const MODIFIER: StorageEntryModifier = QueryKind::METADATA; - const HASHER: frame_metadata::StorageHasher = Hasher::METADATA; const NAME: &'static str = Prefix::STORAGE_PREFIX; - const DEFAULT: DefaultByteGetter = - DefaultByteGetter(&OnEmptyGetter::(core::marker::PhantomData)); + + fn ty() -> StorageEntryType { + StorageEntryType::Map { + hashers: vec![Hasher::METADATA], + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), + } + } + + fn default() -> Vec { + OnEmpty::get().encode() + } } impl crate::traits::StorageInfoTrait @@ -417,8 +416,12 @@ where #[cfg(test)] mod test { use super::*; - use crate::{hash::*, storage::types::ValueQuery}; - use frame_metadata::StorageEntryModifier; + use crate::{ + hash::*, + metadata::{StorageEntryModifier, StorageEntryType, StorageHasher}, + storage::types::ValueQuery, + }; + use assert_matches::assert_matches; use sp_io::{hashing::twox_128, TestExternalities}; struct Prefix; @@ -572,14 +575,23 @@ mod test { assert_eq!(A::MODIFIER, StorageEntryModifier::Optional); assert_eq!(AValueQueryWithAnOnEmpty::MODIFIER, StorageEntryModifier::Default); - assert_eq!(A::HASHER, frame_metadata::StorageHasher::Blake2_128Concat); - assert_eq!( - AValueQueryWithAnOnEmpty::HASHER, - frame_metadata::StorageHasher::Blake2_128Concat + + let assert_map_hashers = |ty, expected_hashers| { + if let StorageEntryType::Map { hashers, .. } = ty { + assert_eq!(hashers, expected_hashers) + } else { + assert_matches!(ty, StorageEntryType::Map { .. }) + } + }; + + assert_map_hashers(A::ty(), vec![StorageHasher::Blake2_128Concat]); + assert_map_hashers( + AValueQueryWithAnOnEmpty::ty(), + vec![StorageHasher::Blake2_128Concat], ); assert_eq!(A::NAME, "foo"); - assert_eq!(AValueQueryWithAnOnEmpty::DEFAULT.0.default_byte(), 97u32.encode()); - assert_eq!(A::DEFAULT.0.default_byte(), Option::::None.encode()); + assert_eq!(AValueQueryWithAnOnEmpty::default(), 97u32.encode()); + assert_eq!(A::default(), Option::::None.encode()); WithLen::remove_all(None); assert_eq!(WithLen::decode_len(3), None); diff --git a/frame/support/src/storage/types/mod.rs b/frame/support/src/storage/types/mod.rs index d61ca6813c9dd..76fed0b8cb320 100644 --- a/frame/support/src/storage/types/mod.rs +++ b/frame/support/src/storage/types/mod.rs @@ -18,8 +18,9 @@ //! Storage types to build abstraction on storage, they implements storage traits such as //! StorageMap and others. +use crate::metadata::{StorageEntryModifier, StorageEntryType}; use codec::FullCodec; -use frame_metadata::{DefaultByte, StorageEntryModifier}; +use sp_std::prelude::*; mod double_map; mod key; @@ -27,14 +28,14 @@ mod map; mod nmap; mod value; -pub use double_map::{StorageDoubleMap, StorageDoubleMapMetadata}; +pub use double_map::StorageDoubleMap; pub use key::{ EncodeLikeTuple, HasKeyPrefix, HasReversibleKeyPrefix, Key, KeyGenerator, KeyGeneratorMaxEncodedLen, ReversibleKeyGenerator, TupleToEncodedIter, }; -pub use map::{StorageMap, StorageMapMetadata}; -pub use nmap::{StorageNMap, StorageNMapMetadata}; -pub use value::{StorageValue, StorageValueMetadata}; +pub use map::StorageMap; +pub use nmap::StorageNMap; +pub use value::StorageValue; /// Trait implementing how the storage optional value is converted into the queried type. /// @@ -102,14 +103,13 @@ where } } -/// A helper struct which implements DefaultByte using `Get` and encode it. -struct OnEmptyGetter(core::marker::PhantomData<(Value, OnEmpty)>); -impl> DefaultByte - for OnEmptyGetter -{ - fn default_byte(&self) -> sp_std::vec::Vec { - OnEmpty::get().encode() - } +/// Provide metadata for a storage entry. +/// +/// Implemented by each of the storage entry kinds: value, map, doublemap and nmap. +pub trait StorageEntryMetadata { + const MODIFIER: StorageEntryModifier; + const NAME: &'static str; + + fn ty() -> StorageEntryType; + fn default() -> Vec; } -unsafe impl> Send for OnEmptyGetter {} -unsafe impl> Sync for OnEmptyGetter {} diff --git a/frame/support/src/storage/types/nmap.rs b/frame/support/src/storage/types/nmap.rs index 149872ccba9ab..7048a69d59c2c 100755 --- a/frame/support/src/storage/types/nmap.rs +++ b/frame/support/src/storage/types/nmap.rs @@ -19,17 +19,17 @@ //! StoragePrefixedDoubleMap traits and their methods directly. use crate::{ + metadata::{StorageEntryModifier, StorageEntryType}, storage::{ types::{ - EncodeLikeTuple, HasKeyPrefix, HasReversibleKeyPrefix, OnEmptyGetter, OptionQuery, - QueryKindTrait, TupleToEncodedIter, + EncodeLikeTuple, HasKeyPrefix, HasReversibleKeyPrefix, OptionQuery, QueryKindTrait, + StorageEntryMetadata, TupleToEncodedIter, }, KeyGenerator, PrefixIterator, StorageAppend, StorageDecodeLength, StoragePrefixedMap, }, traits::{Get, GetDefault, StorageInfo, StorageInstance}, }; use codec::{Decode, Encode, EncodeLike, FullCodec, MaxEncodedLen}; -use frame_metadata::{DefaultByteGetter, StorageEntryModifier}; use sp_runtime::SaturatedConversion; use sp_std::prelude::*; @@ -440,31 +440,30 @@ where } } -/// Part of storage metadata for a storage n map. -/// -/// NOTE: Generic hashers is supported. -pub trait StorageNMapMetadata { - const MODIFIER: StorageEntryModifier; - const NAME: &'static str; - const DEFAULT: DefaultByteGetter; - const HASHERS: &'static [frame_metadata::StorageHasher]; -} - -impl StorageNMapMetadata +impl StorageEntryMetadata for StorageNMap where Prefix: StorageInstance, Key: super::key::KeyGenerator, - Value: FullCodec, + Value: FullCodec + scale_info::StaticTypeInfo, QueryKind: QueryKindTrait, OnEmpty: Get + 'static, MaxValues: Get>, { const MODIFIER: StorageEntryModifier = QueryKind::METADATA; const NAME: &'static str = Prefix::STORAGE_PREFIX; - const DEFAULT: DefaultByteGetter = - DefaultByteGetter(&OnEmptyGetter::(core::marker::PhantomData)); - const HASHERS: &'static [frame_metadata::StorageHasher] = Key::HASHER_METADATA; + + fn ty() -> StorageEntryType { + StorageEntryType::Map { + key: scale_info::meta_type::(), + hashers: Key::HASHER_METADATA.iter().cloned().collect(), + value: scale_info::meta_type::(), + } + } + + fn default() -> Vec { + OnEmpty::get().encode() + } } impl crate::traits::StorageInfoTrait @@ -518,9 +517,9 @@ mod test { use super::*; use crate::{ hash::*, + metadata::StorageEntryModifier, storage::types::{Key, ValueQuery}, }; - use frame_metadata::StorageEntryModifier; use sp_io::{hashing::twox_128, TestExternalities}; struct Prefix; @@ -688,8 +687,8 @@ mod test { assert_eq!(A::MODIFIER, StorageEntryModifier::Optional); assert_eq!(AValueQueryWithAnOnEmpty::MODIFIER, StorageEntryModifier::Default); assert_eq!(A::NAME, "Foo"); - assert_eq!(AValueQueryWithAnOnEmpty::DEFAULT.0.default_byte(), 98u32.encode()); - assert_eq!(A::DEFAULT.0.default_byte(), Option::::None.encode()); + assert_eq!(AValueQueryWithAnOnEmpty::default(), 98u32.encode()); + assert_eq!(A::default(), Option::::None.encode()); WithLen::remove_all(None); assert_eq!(WithLen::decode_len((3,)), None); @@ -856,8 +855,8 @@ mod test { assert_eq!(A::MODIFIER, StorageEntryModifier::Optional); assert_eq!(AValueQueryWithAnOnEmpty::MODIFIER, StorageEntryModifier::Default); assert_eq!(A::NAME, "Foo"); - assert_eq!(AValueQueryWithAnOnEmpty::DEFAULT.0.default_byte(), 98u32.encode()); - assert_eq!(A::DEFAULT.0.default_byte(), Option::::None.encode()); + assert_eq!(AValueQueryWithAnOnEmpty::default(), 98u32.encode()); + assert_eq!(A::default(), Option::::None.encode()); WithLen::remove_all(None); assert_eq!(WithLen::decode_len((3, 30)), None); @@ -1046,8 +1045,8 @@ mod test { assert_eq!(A::MODIFIER, StorageEntryModifier::Optional); assert_eq!(AValueQueryWithAnOnEmpty::MODIFIER, StorageEntryModifier::Default); assert_eq!(A::NAME, "Foo"); - assert_eq!(AValueQueryWithAnOnEmpty::DEFAULT.0.default_byte(), 98u32.encode()); - assert_eq!(A::DEFAULT.0.default_byte(), Option::::None.encode()); + assert_eq!(AValueQueryWithAnOnEmpty::default(), 98u32.encode()); + assert_eq!(A::default(), Option::::None.encode()); WithLen::remove_all(None); assert_eq!(WithLen::decode_len((3, 30, 300)), None); diff --git a/frame/support/src/storage/types/value.rs b/frame/support/src/storage/types/value.rs index ad835e928bdd6..d7f15487592b1 100644 --- a/frame/support/src/storage/types/value.rs +++ b/frame/support/src/storage/types/value.rs @@ -18,15 +18,15 @@ //! Storage value type. Implements StorageValue trait and its method directly. use crate::{ + metadata::{StorageEntryModifier, StorageEntryType}, storage::{ generator::StorageValue as StorageValueT, - types::{OnEmptyGetter, OptionQuery, QueryKindTrait}, + types::{OptionQuery, QueryKindTrait, StorageEntryMetadata}, StorageAppend, StorageDecodeLength, StorageTryAppend, }, traits::{GetDefault, StorageInfo, StorageInstance}, }; use codec::{Decode, Encode, EncodeLike, FullCodec, MaxEncodedLen}; -use frame_metadata::{DefaultByteGetter, StorageEntryModifier}; use sp_arithmetic::traits::SaturatedConversion; use sp_std::prelude::*; @@ -201,25 +201,24 @@ where } } -/// Part of storage metadata for storage value. -pub trait StorageValueMetadata { - const MODIFIER: StorageEntryModifier; - const NAME: &'static str; - const DEFAULT: DefaultByteGetter; -} - -impl StorageValueMetadata +impl StorageEntryMetadata for StorageValue where Prefix: StorageInstance, - Value: FullCodec, + Value: FullCodec + scale_info::StaticTypeInfo, QueryKind: QueryKindTrait, OnEmpty: crate::traits::Get + 'static, { const MODIFIER: StorageEntryModifier = QueryKind::METADATA; const NAME: &'static str = Prefix::STORAGE_PREFIX; - const DEFAULT: DefaultByteGetter = - DefaultByteGetter(&OnEmptyGetter::(core::marker::PhantomData)); + + fn ty() -> StorageEntryType { + StorageEntryType::Plain(scale_info::meta_type::()) + } + + fn default() -> Vec { + OnEmpty::get().encode() + } } impl crate::traits::StorageInfoTrait @@ -264,8 +263,7 @@ where #[cfg(test)] mod test { use super::*; - use crate::storage::types::ValueQuery; - use frame_metadata::StorageEntryModifier; + use crate::{metadata::StorageEntryModifier, storage::types::ValueQuery}; use sp_io::{hashing::twox_128, TestExternalities}; struct Prefix; @@ -347,8 +345,8 @@ mod test { assert_eq!(A::MODIFIER, StorageEntryModifier::Optional); assert_eq!(AValueQueryWithAnOnEmpty::MODIFIER, StorageEntryModifier::Default); assert_eq!(A::NAME, "foo"); - assert_eq!(A::DEFAULT.0.default_byte(), Option::::None.encode()); - assert_eq!(AValueQueryWithAnOnEmpty::DEFAULT.0.default_byte(), 97u32.encode()); + assert_eq!(A::default(), Option::::None.encode()); + assert_eq!(AValueQueryWithAnOnEmpty::default(), 97u32.encode()); WithLen::kill(); assert_eq!(WithLen::decode_len(), None); diff --git a/frame/support/src/storage/weak_bounded_vec.rs b/frame/support/src/storage/weak_bounded_vec.rs index f60e4d87bde85..9c30c45c3e2e1 100644 --- a/frame/support/src/storage/weak_bounded_vec.rs +++ b/frame/support/src/storage/weak_bounded_vec.rs @@ -36,7 +36,8 @@ use sp_std::{convert::TryFrom, fmt, marker::PhantomData, prelude::*}; /// /// The length of the vec is not strictly bounded. Decoding a vec with more element that the bound /// is accepted, and some method allow to bypass the restriction with warnings. -#[derive(Encode)] +#[derive(Encode, scale_info::TypeInfo)] +#[scale_info(skip_type_params(S))] pub struct WeakBoundedVec(Vec, PhantomData); impl> Decode for WeakBoundedVec { diff --git a/frame/support/src/traits/tokens/misc.rs b/frame/support/src/traits/tokens/misc.rs index bea6e664cf2cd..214c28708a196 100644 --- a/frame/support/src/traits/tokens/misc.rs +++ b/frame/support/src/traits/tokens/misc.rs @@ -116,7 +116,7 @@ pub enum ExistenceRequirement { } /// Status of funds. -#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] pub enum BalanceStatus { /// Funds are free, as corresponding to `free` item in Balances. Free, @@ -165,8 +165,14 @@ pub trait AssetId: FullCodec + Copy + Eq + PartialEq + Debug {} impl AssetId for T {} /// Simple amalgamation trait to collect together properties for a Balance under one roof. -pub trait Balance: AtLeast32BitUnsigned + FullCodec + Copy + Default + Debug {} -impl Balance for T {} +pub trait Balance: + AtLeast32BitUnsigned + FullCodec + Copy + Default + Debug + scale_info::TypeInfo +{ +} +impl Balance + for T +{ +} /// Converts a balance value into an asset balance. pub trait BalanceConversion { diff --git a/frame/support/src/weights.rs b/frame/support/src/weights.rs index 7af6d440aa40c..115470a9bf034 100644 --- a/frame/support/src/weights.rs +++ b/frame/support/src/weights.rs @@ -129,6 +129,7 @@ use crate::dispatch::{DispatchError, DispatchErrorWithPostInfo, DispatchResultWithPostInfo}; use codec::{Decode, Encode}; +use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; use smallvec::{smallvec, SmallVec}; @@ -201,7 +202,7 @@ pub trait PaysFee { } /// Explicit enum to denote if a transaction pays fee or not. -#[derive(Clone, Copy, Eq, PartialEq, RuntimeDebug, Encode, Decode)] +#[derive(Clone, Copy, Eq, PartialEq, RuntimeDebug, Encode, Decode, TypeInfo)] pub enum Pays { /// Transactor will pay related fees. Yes, @@ -221,7 +222,7 @@ impl Default for Pays { /// [DispatchClass::all] and [DispatchClass::non_mandatory] helper functions. #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] #[cfg_attr(feature = "std", serde(rename_all = "camelCase"))] -#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug, TypeInfo)] pub enum DispatchClass { /// A normal dispatch. Normal, @@ -311,7 +312,7 @@ pub mod priority { } /// A bundle of static information collected from the `#[weight = $x]` attributes. -#[derive(Clone, Copy, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode)] +#[derive(Clone, Copy, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode, TypeInfo)] pub struct DispatchInfo { /// Weight of this transaction. pub weight: Weight, @@ -338,7 +339,7 @@ impl GetDispatchInfo for () { /// Weight information that is only available post dispatch. /// NOTE: This can only be used to reduce the weight or fee, not increase it. -#[derive(Clone, Copy, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode)] +#[derive(Clone, Copy, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode, TypeInfo)] pub struct PostDispatchInfo { /// Actual weight consumed by a call or `None` which stands for the worst case static weight. pub actual_weight: Option, @@ -627,7 +628,7 @@ impl GetDispatchInfo for sp_runtime::testing::TestX } /// The weight of database operations that the runtime can invoke. -#[derive(Clone, Copy, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode)] +#[derive(Clone, Copy, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode, TypeInfo)] pub struct RuntimeDbWeight { pub read: Weight, pub write: Weight, @@ -659,7 +660,7 @@ impl RuntimeDbWeight { /// /// The `negative` value encodes whether the term is added or substracted from the /// overall polynomial result. -#[derive(Clone, Encode, Decode)] +#[derive(Clone, Encode, Decode, TypeInfo)] pub struct WeightToFeeCoefficient { /// The integral part of the coefficient. pub coeff_integer: Balance, @@ -737,7 +738,7 @@ where } /// A struct holding value for each `DispatchClass`. -#[derive(Clone, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode)] +#[derive(Clone, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode, TypeInfo)] pub struct PerDispatchClass { /// Value for `Normal` extrinsics. normal: T, @@ -882,49 +883,49 @@ mod tests { #[test] fn weights_are_correct() { // #[weight = 1000] - let info = Call::::f00().get_dispatch_info(); + let info = Call::::f00 {}.get_dispatch_info(); assert_eq!(info.weight, 1000); assert_eq!(info.class, DispatchClass::Normal); assert_eq!(info.pays_fee, Pays::Yes); // #[weight = (1000, DispatchClass::Mandatory)] - let info = Call::::f01().get_dispatch_info(); + let info = Call::::f01 {}.get_dispatch_info(); assert_eq!(info.weight, 1000); assert_eq!(info.class, DispatchClass::Mandatory); assert_eq!(info.pays_fee, Pays::Yes); // #[weight = (1000, Pays::No)] - let info = Call::::f02().get_dispatch_info(); + let info = Call::::f02 {}.get_dispatch_info(); assert_eq!(info.weight, 1000); assert_eq!(info.class, DispatchClass::Normal); assert_eq!(info.pays_fee, Pays::No); // #[weight = (1000, DispatchClass::Operational, Pays::No)] - let info = Call::::f03().get_dispatch_info(); + let info = Call::::f03 {}.get_dispatch_info(); assert_eq!(info.weight, 1000); assert_eq!(info.class, DispatchClass::Operational); assert_eq!(info.pays_fee, Pays::No); // #[weight = ((_a * 10 + _eb * 1) as Weight, DispatchClass::Normal, Pays::Yes)] - let info = Call::::f11(13, 20).get_dispatch_info(); + let info = Call::::f11 { _a: 13, _eb: 20 }.get_dispatch_info(); assert_eq!(info.weight, 150); // 13*10 + 20 assert_eq!(info.class, DispatchClass::Normal); assert_eq!(info.pays_fee, Pays::Yes); // #[weight = (0, DispatchClass::Operational, Pays::Yes)] - let info = Call::::f12(10, 20).get_dispatch_info(); + let info = Call::::f12 { _a: 10, _eb: 20 }.get_dispatch_info(); assert_eq!(info.weight, 0); assert_eq!(info.class, DispatchClass::Operational); assert_eq!(info.pays_fee, Pays::Yes); // #[weight = T::DbWeight::get().reads(3) + T::DbWeight::get().writes(2) + 10_000] - let info = Call::::f20().get_dispatch_info(); + let info = Call::::f20 {}.get_dispatch_info(); assert_eq!(info.weight, 12300); // 100*3 + 1000*2 + 10_1000 assert_eq!(info.class, DispatchClass::Normal); assert_eq!(info.pays_fee, Pays::Yes); // #[weight = T::DbWeight::get().reads_writes(6, 5) + 40_000] - let info = Call::::f21().get_dispatch_info(); + let info = Call::::f21 {}.get_dispatch_info(); assert_eq!(info.weight, 45600); // 100*6 + 1000*5 + 40_1000 assert_eq!(info.class, DispatchClass::Normal); assert_eq!(info.pays_fee, Pays::Yes); diff --git a/frame/support/test/Cargo.toml b/frame/support/test/Cargo.toml index c8f746c7cb9d4..e12880871e5c2 100644 --- a/frame/support/test/Cargo.toml +++ b/frame/support/test/Cargo.toml @@ -14,16 +14,18 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.126", default-features = false, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } +sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/arithmetic" } sp-io = { version = "4.0.0-dev", path = "../../../primitives/io", default-features = false } sp-state-machine = { version = "0.10.0-dev", optional = true, path = "../../../primitives/state-machine" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/runtime" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/std" } +sp-version = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/version" } trybuild = "1.0.43" pretty_assertions = "0.6.1" rustversion = "1.0.0" -frame-metadata = { version = "14.0.0-dev", default-features = false, path = "../../metadata" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../../system" } # The "std" feature for this pallet is never activated on purpose, in order to test construct_runtime error message test-pallet = { package = "frame-support-test-pallet", default-features = false, path = "pallet" } @@ -33,6 +35,7 @@ default = ["std"] std = [ "serde/std", "codec/std", + "scale-info/std", "sp-io/std", "frame-support/std", "frame-system/std", diff --git a/frame/support/test/pallet/Cargo.toml b/frame/support/test/pallet/Cargo.toml index 3a421ecc461fe..35eb4f34acae1 100644 --- a/frame/support/test/pallet/Cargo.toml +++ b/frame/support/test/pallet/Cargo.toml @@ -13,6 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../../" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../../../system" } @@ -20,6 +21,7 @@ frame-system = { version = "4.0.0-dev", default-features = false, path = "../../ default = ["std"] std = [ "codec/std", + "scale-info/std", "frame-support/std", "frame-system/std", ] diff --git a/frame/support/test/src/lib.rs b/frame/support/test/src/lib.rs index 78317a1a2f907..52c0a6270d47f 100644 --- a/frame/support/test/src/lib.rs +++ b/frame/support/test/src/lib.rs @@ -25,9 +25,9 @@ /// The configuration trait pub trait Config: 'static { /// The runtime origin type. - type Origin: codec::Codec + codec::EncodeLike + Default; + type Origin: codec::Codec + codec::EncodeLike + Default + scale_info::TypeInfo; /// The block number type. - type BlockNumber: codec::Codec + codec::EncodeLike + Default; + type BlockNumber: codec::Codec + codec::EncodeLike + Default + scale_info::TypeInfo; /// The information about the pallet setup in the runtime. type PalletInfo: frame_support::traits::PalletInfo; /// The db weights. diff --git a/frame/support/test/tests/construct_runtime.rs b/frame/support/test/tests/construct_runtime.rs index 5ddcb89a7dca2..062993fe10fbb 100644 --- a/frame/support/test/tests/construct_runtime.rs +++ b/frame/support/test/tests/construct_runtime.rs @@ -22,6 +22,7 @@ #![recursion_limit = "128"] use frame_support::traits::PalletInfo as _; +use scale_info::TypeInfo; use sp_core::{sr25519, H256}; use sp_runtime::{ generic, @@ -54,7 +55,7 @@ mod module1 { } } - #[derive(Clone, PartialEq, Eq, Debug, codec::Encode, codec::Decode)] + #[derive(Clone, PartialEq, Eq, Debug, codec::Encode, codec::Decode, TypeInfo)] pub struct Origin(pub core::marker::PhantomData<(T, I)>); frame_support::decl_event! { @@ -96,7 +97,7 @@ mod module2 { } } - #[derive(Clone, PartialEq, Eq, Debug, codec::Encode, codec::Decode)] + #[derive(Clone, PartialEq, Eq, Debug, codec::Encode, codec::Decode, TypeInfo)] pub struct Origin; frame_support::decl_event! { @@ -139,7 +140,7 @@ mod nested { } } - #[derive(Clone, PartialEq, Eq, Debug, codec::Encode, codec::Decode)] + #[derive(Clone, PartialEq, Eq, Debug, codec::Encode, codec::Decode, TypeInfo)] pub struct Origin; frame_support::decl_event! { @@ -195,7 +196,7 @@ pub mod module3 { } } - #[derive(Clone, PartialEq, Eq, Debug, codec::Encode, codec::Decode)] + #[derive(Clone, PartialEq, Eq, Debug, codec::Encode, codec::Decode, TypeInfo)] pub struct Origin(pub core::marker::PhantomData); frame_support::decl_event! { @@ -309,8 +310,8 @@ mod origin_test { #[test] fn origin_default_filter() { - let accepted_call = nested::module3::Call::fail().into(); - let rejected_call = module3::Call::fail().into(); + let accepted_call = nested::module3::Call::fail {}.into(); + let rejected_call = module3::Call::fail {}.into(); assert_eq!(Origin::root().filter_call(&accepted_call), true); assert_eq!(Origin::root().filter_call(&rejected_call), true); @@ -472,28 +473,28 @@ fn event_codec() { #[test] fn call_codec() { use codec::Encode; - assert_eq!(Call::System(system::Call::noop()).encode()[0], 30); - assert_eq!(Call::Module1_1(module1::Call::fail()).encode()[0], 31); - assert_eq!(Call::Module2(module2::Call::fail()).encode()[0], 32); - assert_eq!(Call::Module1_2(module1::Call::fail()).encode()[0], 33); - assert_eq!(Call::NestedModule3(nested::module3::Call::fail()).encode()[0], 34); - assert_eq!(Call::Module3(module3::Call::fail()).encode()[0], 35); - assert_eq!(Call::Module1_4(module1::Call::fail()).encode()[0], 3); - assert_eq!(Call::Module1_6(module1::Call::fail()).encode()[0], 1); - assert_eq!(Call::Module1_7(module1::Call::fail()).encode()[0], 2); - assert_eq!(Call::Module1_8(module1::Call::fail()).encode()[0], 12); - assert_eq!(Call::Module1_9(module1::Call::fail()).encode()[0], 13); + assert_eq!(Call::System(system::Call::noop {}).encode()[0], 30); + assert_eq!(Call::Module1_1(module1::Call::fail {}).encode()[0], 31); + assert_eq!(Call::Module2(module2::Call::fail {}).encode()[0], 32); + assert_eq!(Call::Module1_2(module1::Call::fail {}).encode()[0], 33); + assert_eq!(Call::NestedModule3(nested::module3::Call::fail {}).encode()[0], 34); + assert_eq!(Call::Module3(module3::Call::fail {}).encode()[0], 35); + assert_eq!(Call::Module1_4(module1::Call::fail {}).encode()[0], 3); + assert_eq!(Call::Module1_6(module1::Call::fail {}).encode()[0], 1); + assert_eq!(Call::Module1_7(module1::Call::fail {}).encode()[0], 2); + assert_eq!(Call::Module1_8(module1::Call::fail {}).encode()[0], 12); + assert_eq!(Call::Module1_9(module1::Call::fail {}).encode()[0], 13); } #[test] fn call_compact_attr() { use codec::Encode; - let call: module3::Call = module3::Call::aux_1(1); + let call: module3::Call = module3::Call::aux_1 { _data: 1 }; let encoded = call.encode(); assert_eq!(2, encoded.len()); assert_eq!(vec![1, 4], encoded); - let call: module3::Call = module3::Call::aux_2(1, 2); + let call: module3::Call = module3::Call::aux_2 { _data: 1, _data2: 2 }; let encoded = call.encode(); assert_eq!(6, encoded.len()); assert_eq!(vec![2, 1, 0, 0, 0, 8], encoded); @@ -502,13 +503,13 @@ fn call_compact_attr() { #[test] fn call_encode_is_correct_and_decode_works() { use codec::{Decode, Encode}; - let call: module3::Call = module3::Call::fail(); + let call: module3::Call = module3::Call::fail {}; let encoded = call.encode(); assert_eq!(vec![0], encoded); let decoded = module3::Call::::decode(&mut &encoded[..]).unwrap(); assert_eq!(decoded, call); - let call: module3::Call = module3::Call::aux_3(32, "hello".into()); + let call: module3::Call = module3::Call::aux_3 { _data: 32, _data2: "hello".into() }; let encoded = call.encode(); assert_eq!(vec![3, 32, 0, 0, 0, 20, 104, 101, 108, 108, 111], encoded); let decoded = module3::Call::::decode(&mut &encoded[..]).unwrap(); @@ -523,12 +524,12 @@ fn call_weight_should_attach_to_call_enum() { }; // operational. assert_eq!( - module3::Call::::operational().get_dispatch_info(), + module3::Call::::operational {}.get_dispatch_info(), DispatchInfo { weight: 5, class: DispatchClass::Operational, pays_fee: Pays::Yes }, ); // custom basic assert_eq!( - module3::Call::::aux_4().get_dispatch_info(), + module3::Call::::aux_4 {}.get_dispatch_info(), DispatchInfo { weight: 3, class: DispatchClass::Normal, pays_fee: Pays::Yes }, ); } @@ -536,14 +537,14 @@ fn call_weight_should_attach_to_call_enum() { #[test] fn call_name() { use frame_support::dispatch::GetCallName; - let name = module3::Call::::aux_4().get_call_name(); + let name = module3::Call::::aux_4 {}.get_call_name(); assert_eq!("aux_4", name); } #[test] fn call_metadata() { use frame_support::dispatch::{CallMetadata, GetCallMetadata}; - let call = Call::Module3(module3::Call::::aux_4()); + let call = Call::Module3(module3::Call::::aux_4 {}); let metadata = call.get_call_metadata(); let expected = CallMetadata { function_name: "aux_4".into(), pallet_name: "Module3".into() }; assert_eq!(metadata, expected); @@ -581,10 +582,10 @@ fn get_module_names() { #[test] fn call_subtype_conversion() { use frame_support::{dispatch::CallableCallFor, traits::IsSubType}; - let call = Call::Module3(module3::Call::::fail()); + let call = Call::Module3(module3::Call::::fail {}); let subcall: Option<&CallableCallFor> = call.is_sub_type(); let subcall_none: Option<&CallableCallFor> = call.is_sub_type(); - assert_eq!(Some(&module3::Call::::fail()), subcall); + assert_eq!(Some(&module3::Call::::fail {}), subcall); assert_eq!(None, subcall_none); let from = Call::from(subcall.unwrap().clone()); @@ -593,355 +594,145 @@ fn call_subtype_conversion() { #[test] fn test_metadata() { - use frame_metadata::*; - let expected_metadata: RuntimeMetadataLastVersion = RuntimeMetadataLastVersion { - modules: DecodeDifferent::Encode(&[ - ModuleMetadata { - name: DecodeDifferent::Encode("System"), - storage: None, - calls: Some(DecodeDifferent::Encode(FnEncode(|| { - &[FunctionMetadata { - name: DecodeDifferent::Encode("noop"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }] - }))), - event: Some(DecodeDifferent::Encode(FnEncode(|| { - &[ - EventMetadata { - name: DecodeDifferent::Encode("ExtrinsicSuccess"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }, - EventMetadata { - name: DecodeDifferent::Encode("ExtrinsicFailed"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }, - EventMetadata { - name: DecodeDifferent::Encode("Ignore"), - arguments: DecodeDifferent::Encode(&["BlockNumber"]), - documentation: DecodeDifferent::Encode(&[]), - }, - ] - }))), - constants: DecodeDifferent::Encode(FnEncode(|| &[])), - errors: DecodeDifferent::Encode(FnEncode(|| &[])), - index: 30, - }, - ModuleMetadata { - name: DecodeDifferent::Encode("Module1_1"), - storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { - prefix: DecodeDifferent::Encode("Instance1Module"), - entries: DecodeDifferent::Encode(&[]), - }))), - calls: Some(DecodeDifferent::Encode(FnEncode(|| { - &[FunctionMetadata { - name: DecodeDifferent::Encode("fail"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }] - }))), - event: Some(DecodeDifferent::Encode(FnEncode(|| { - &[EventMetadata { - name: DecodeDifferent::Encode("A"), - arguments: DecodeDifferent::Encode(&["AccountId"]), - documentation: DecodeDifferent::Encode(&[]), - }] - }))), - constants: DecodeDifferent::Encode(FnEncode(|| &[])), - errors: DecodeDifferent::Encode(FnEncode(|| &[])), - index: 31, - }, - ModuleMetadata { - name: DecodeDifferent::Encode("Module2"), - storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { - prefix: DecodeDifferent::Encode("Module"), - entries: DecodeDifferent::Encode(&[]), - }))), - calls: Some(DecodeDifferent::Encode(FnEncode(|| { - &[FunctionMetadata { - name: DecodeDifferent::Encode("fail"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }] - }))), - event: Some(DecodeDifferent::Encode(FnEncode(|| { - &[EventMetadata { - name: DecodeDifferent::Encode("A"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }] - }))), - constants: DecodeDifferent::Encode(FnEncode(|| &[])), - errors: DecodeDifferent::Encode(FnEncode(|| &[])), - index: 32, - }, - ModuleMetadata { - name: DecodeDifferent::Encode("Module1_2"), - storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { - prefix: DecodeDifferent::Encode("Instance2Module"), - entries: DecodeDifferent::Encode(&[]), - }))), - calls: Some(DecodeDifferent::Encode(FnEncode(|| { - &[FunctionMetadata { - name: DecodeDifferent::Encode("fail"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }] - }))), - event: Some(DecodeDifferent::Encode(FnEncode(|| { - &[EventMetadata { - name: DecodeDifferent::Encode("A"), - arguments: DecodeDifferent::Encode(&["AccountId"]), - documentation: DecodeDifferent::Encode(&[]), - }] - }))), - constants: DecodeDifferent::Encode(FnEncode(|| &[])), - errors: DecodeDifferent::Encode(FnEncode(|| &[])), - index: 33, - }, - ModuleMetadata { - name: DecodeDifferent::Encode("NestedModule3"), - storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { - prefix: DecodeDifferent::Encode("Module"), - entries: DecodeDifferent::Encode(&[]), - }))), - calls: Some(DecodeDifferent::Encode(FnEncode(|| { - &[FunctionMetadata { - name: DecodeDifferent::Encode("fail"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }] - }))), - event: Some(DecodeDifferent::Encode(FnEncode(|| { - &[EventMetadata { - name: DecodeDifferent::Encode("A"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }] - }))), - constants: DecodeDifferent::Encode(FnEncode(|| &[])), - errors: DecodeDifferent::Encode(FnEncode(|| &[])), - index: 34, - }, - ModuleMetadata { - name: DecodeDifferent::Encode("Module3"), - storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { - prefix: DecodeDifferent::Encode("Module"), - entries: DecodeDifferent::Encode(&[]), - }))), - calls: Some(DecodeDifferent::Encode(FnEncode(|| { - &[ - FunctionMetadata { - name: DecodeDifferent::Encode("fail"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }, - FunctionMetadata { - name: DecodeDifferent::Encode("aux_1"), - arguments: DecodeDifferent::Encode(&[FunctionArgumentMetadata { - name: DecodeDifferent::Encode("_data"), - ty: DecodeDifferent::Encode("Compact"), - }]), - documentation: DecodeDifferent::Encode(&[]), - }, - FunctionMetadata { - name: DecodeDifferent::Encode("aux_2"), - arguments: DecodeDifferent::Encode(&[ - FunctionArgumentMetadata { - name: DecodeDifferent::Encode("_data"), - ty: DecodeDifferent::Encode("i32"), - }, - FunctionArgumentMetadata { - name: DecodeDifferent::Encode("_data2"), - ty: DecodeDifferent::Encode("Compact"), - }, - ]), - documentation: DecodeDifferent::Encode(&[]), - }, - FunctionMetadata { - name: DecodeDifferent::Encode("aux_3"), - arguments: DecodeDifferent::Encode(&[ - FunctionArgumentMetadata { - name: DecodeDifferent::Encode("_data"), - ty: DecodeDifferent::Encode("i32"), - }, - FunctionArgumentMetadata { - name: DecodeDifferent::Encode("_data2"), - ty: DecodeDifferent::Encode("String"), - }, - ]), - documentation: DecodeDifferent::Encode(&[]), - }, - FunctionMetadata { - name: DecodeDifferent::Encode("aux_4"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }, - FunctionMetadata { - name: DecodeDifferent::Encode("operational"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }, - ] - }))), - event: Some(DecodeDifferent::Encode(FnEncode(|| { - &[EventMetadata { - name: DecodeDifferent::Encode("A"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }] - }))), - constants: DecodeDifferent::Encode(FnEncode(|| &[])), - errors: DecodeDifferent::Encode(FnEncode(|| &[])), - index: 35, - }, - ModuleMetadata { - name: DecodeDifferent::Encode("Module1_3"), - storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { - prefix: DecodeDifferent::Encode("Instance3Module"), - entries: DecodeDifferent::Encode(&[]), - }))), - calls: None, - event: None, - constants: DecodeDifferent::Encode(FnEncode(|| &[])), - errors: DecodeDifferent::Encode(FnEncode(|| &[])), - index: 6, - }, - ModuleMetadata { - name: DecodeDifferent::Encode("Module1_4"), - storage: None, - calls: Some(DecodeDifferent::Encode(FnEncode(|| { - &[FunctionMetadata { - name: DecodeDifferent::Encode("fail"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }] - }))), - event: None, - constants: DecodeDifferent::Encode(FnEncode(|| &[])), - errors: DecodeDifferent::Encode(FnEncode(|| &[])), - index: 3, - }, - ModuleMetadata { - name: DecodeDifferent::Encode("Module1_5"), - storage: None, - calls: None, - event: Some(DecodeDifferent::Encode(FnEncode(|| { - &[EventMetadata { - name: DecodeDifferent::Encode("A"), - arguments: DecodeDifferent::Encode(&["AccountId"]), - documentation: DecodeDifferent::Encode(&[]), - }] - }))), - constants: DecodeDifferent::Encode(FnEncode(|| &[])), - errors: DecodeDifferent::Encode(FnEncode(|| &[])), - index: 4, - }, - ModuleMetadata { - name: DecodeDifferent::Encode("Module1_6"), - storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { - prefix: DecodeDifferent::Encode("Instance6Module"), - entries: DecodeDifferent::Encode(&[]), - }))), - calls: Some(DecodeDifferent::Encode(FnEncode(|| { - &[FunctionMetadata { - name: DecodeDifferent::Encode("fail"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }] - }))), - event: Some(DecodeDifferent::Encode(FnEncode(|| { - &[EventMetadata { - name: DecodeDifferent::Encode("A"), - arguments: DecodeDifferent::Encode(&["AccountId"]), - documentation: DecodeDifferent::Encode(&[]), - }] - }))), - constants: DecodeDifferent::Encode(FnEncode(|| &[])), - errors: DecodeDifferent::Encode(FnEncode(|| &[])), - index: 1, - }, - ModuleMetadata { - name: DecodeDifferent::Encode("Module1_7"), - storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { - prefix: DecodeDifferent::Encode("Instance7Module"), - entries: DecodeDifferent::Encode(&[]), - }))), - calls: Some(DecodeDifferent::Encode(FnEncode(|| { - &[FunctionMetadata { - name: DecodeDifferent::Encode("fail"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }] - }))), - event: Some(DecodeDifferent::Encode(FnEncode(|| { - &[EventMetadata { - name: DecodeDifferent::Encode("A"), - arguments: DecodeDifferent::Encode(&["AccountId"]), - documentation: DecodeDifferent::Encode(&[]), - }] - }))), - constants: DecodeDifferent::Encode(FnEncode(|| &[])), - errors: DecodeDifferent::Encode(FnEncode(|| &[])), - index: 2, - }, - ModuleMetadata { - name: DecodeDifferent::Encode("Module1_8"), - storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { - prefix: DecodeDifferent::Encode("Instance8Module"), - entries: DecodeDifferent::Encode(&[]), - }))), - calls: Some(DecodeDifferent::Encode(FnEncode(|| { - &[FunctionMetadata { - name: DecodeDifferent::Encode("fail"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }] - }))), - event: Some(DecodeDifferent::Encode(FnEncode(|| { - &[EventMetadata { - name: DecodeDifferent::Encode("A"), - arguments: DecodeDifferent::Encode(&["AccountId"]), - documentation: DecodeDifferent::Encode(&[]), - }] - }))), - constants: DecodeDifferent::Encode(FnEncode(|| &[])), - errors: DecodeDifferent::Encode(FnEncode(|| &[])), - index: 12, - }, - ModuleMetadata { - name: DecodeDifferent::Encode("Module1_9"), - storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { - prefix: DecodeDifferent::Encode("Instance9Module"), - entries: DecodeDifferent::Encode(&[]), - }))), - calls: Some(DecodeDifferent::Encode(FnEncode(|| { - &[FunctionMetadata { - name: DecodeDifferent::Encode("fail"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }] - }))), - event: Some(DecodeDifferent::Encode(FnEncode(|| { - &[EventMetadata { - name: DecodeDifferent::Encode("A"), - arguments: DecodeDifferent::Encode(&["AccountId"]), - documentation: DecodeDifferent::Encode(&[]), - }] - }))), - constants: DecodeDifferent::Encode(FnEncode(|| &[])), - errors: DecodeDifferent::Encode(FnEncode(|| &[])), - index: 13, - }, - ]), - extrinsic: ExtrinsicMetadata { - version: 4, - signed_extensions: vec![DecodeDifferent::Encode("UnitSignedExtension")], + use frame_support::metadata::*; + use scale_info::meta_type; + + let pallets = vec![ + PalletMetadata { + name: "System", + storage: None, + calls: Some(meta_type::>().into()), + event: Some(meta_type::>().into()), + constants: vec![], + error: None, + index: 30, }, + PalletMetadata { + name: "Module1_1", + storage: Some(PalletStorageMetadata { prefix: "Instance1Module", entries: vec![] }), + calls: Some(meta_type::>().into()), + event: Some(meta_type::>().into()), + constants: vec![], + error: None, + index: 31, + }, + PalletMetadata { + name: "Module2", + storage: Some(PalletStorageMetadata { prefix: "Module", entries: vec![] }), + calls: Some(meta_type::>().into()), + event: Some(meta_type::().into()), + constants: vec![], + error: None, + index: 32, + }, + PalletMetadata { + name: "Module1_2", + storage: Some(PalletStorageMetadata { prefix: "Instance2Module", entries: vec![] }), + calls: Some(meta_type::>().into()), + event: Some(meta_type::>().into()), + constants: vec![], + error: None, + index: 33, + }, + PalletMetadata { + name: "NestedModule3", + storage: Some(PalletStorageMetadata { prefix: "Module", entries: vec![] }), + calls: Some(meta_type::>().into()), + event: Some(meta_type::().into()), + constants: vec![], + error: None, + index: 34, + }, + PalletMetadata { + name: "Module3", + storage: Some(PalletStorageMetadata { prefix: "Module", entries: vec![] }), + calls: Some(meta_type::>().into()), + event: Some(meta_type::().into()), + constants: vec![], + error: None, + index: 35, + }, + PalletMetadata { + name: "Module1_3", + storage: Some(PalletStorageMetadata { prefix: "Instance3Module", entries: vec![] }), + calls: None, + event: None, + constants: vec![], + error: None, + index: 6, + }, + PalletMetadata { + name: "Module1_4", + storage: None, + calls: Some(meta_type::>().into()), + event: None, + constants: vec![], + error: None, + index: 3, + }, + PalletMetadata { + name: "Module1_5", + storage: None, + calls: None, + event: Some(meta_type::>().into()), + constants: vec![], + error: None, + index: 4, + }, + PalletMetadata { + name: "Module1_6", + storage: Some(PalletStorageMetadata { prefix: "Instance6Module", entries: vec![] }), + calls: Some(meta_type::>().into()), + event: Some(meta_type::>().into()), + constants: vec![], + error: None, + index: 1, + }, + PalletMetadata { + name: "Module1_7", + storage: Some(PalletStorageMetadata { prefix: "Instance7Module", entries: vec![] }), + calls: Some(meta_type::>().into()), + event: Some(PalletEventMetadata { + ty: meta_type::>(), + }), + constants: vec![], + error: None, + index: 2, + }, + PalletMetadata { + name: "Module1_8", + storage: Some(PalletStorageMetadata { prefix: "Instance8Module", entries: vec![] }), + calls: Some(meta_type::>().into()), + event: Some(meta_type::>().into()), + constants: vec![], + error: None, + index: 12, + }, + PalletMetadata { + name: "Module1_9", + storage: Some(PalletStorageMetadata { prefix: "Instance9Module", entries: vec![] }), + calls: Some(meta_type::>().into()), + event: Some(meta_type::>().into()), + constants: vec![], + error: None, + index: 13, + }, + ]; + + let extrinsic = ExtrinsicMetadata { + ty: meta_type::(), + version: 4, + signed_extensions: vec![SignedExtensionMetadata { + identifier: "UnitSignedExtension", + ty: meta_type::<()>(), + additional_signed: meta_type::<()>(), + }], }; - pretty_assertions::assert_eq!(Runtime::metadata().1, RuntimeMetadata::V13(expected_metadata)); + + let expected_metadata: RuntimeMetadataPrefixed = + RuntimeMetadataLastVersion::new(pallets, extrinsic, meta_type::()).into(); + let actual_metadata = Runtime::metadata(); + pretty_assertions::assert_eq!(actual_metadata, expected_metadata); } #[test] diff --git a/frame/support/test/tests/construct_runtime_ui/no_std_genesis_config.stderr b/frame/support/test/tests/construct_runtime_ui/no_std_genesis_config.stderr index 6ae37ccf9b92d..5bc831f58988b 100644 --- a/frame/support/test/tests/construct_runtime_ui/no_std_genesis_config.stderr +++ b/frame/support/test/tests/construct_runtime_ui/no_std_genesis_config.stderr @@ -36,6 +36,28 @@ help: consider importing this enum 1 | use frame_system::RawOrigin; | +error[E0433]: failed to resolve: use of undeclared crate or module `system` + --> $DIR/no_std_genesis_config.rs:13:1 + | +13 | / construct_runtime! { +14 | | pub enum Runtime where +15 | | Block = Block, +16 | | NodeBlock = Block, +... | +21 | | } +22 | | } + | |_^ not found in `system` + | + = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider importing one of these items + | +1 | use frame_support_test::Pallet; + | +1 | use frame_system::Pallet; + | +1 | use test_pallet::Pallet; + | + error[E0412]: cannot find type `GenesisConfig` in crate `test_pallet` --> $DIR/no_std_genesis_config.rs:13:1 | diff --git a/frame/support/test/tests/construct_runtime_ui/undefined_call_part.stderr b/frame/support/test/tests/construct_runtime_ui/undefined_call_part.stderr index 201609b2abaf6..8781fe0df201a 100644 --- a/frame/support/test/tests/construct_runtime_ui/undefined_call_part.stderr +++ b/frame/support/test/tests/construct_runtime_ui/undefined_call_part.stderr @@ -39,6 +39,30 @@ help: consider importing this enum 1 | use frame_system::RawOrigin; | +error[E0433]: failed to resolve: use of undeclared crate or module `system` + --> $DIR/undefined_call_part.rs:22:1 + | +22 | / construct_runtime! { +23 | | pub enum Runtime where +24 | | Block = Block, +25 | | NodeBlock = Block, +... | +30 | | } +31 | | } + | |_^ not found in `system` + | + = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider importing one of these items + | +1 | use crate::pallet::Pallet; + | +1 | use frame_support_test::Pallet; + | +1 | use frame_system::Pallet; + | +1 | use test_pallet::Pallet; + | + error[E0277]: the trait bound `Runtime: frame_system::Config` is not satisfied --> $DIR/undefined_call_part.rs:20:6 | diff --git a/frame/support/test/tests/construct_runtime_ui/undefined_event_part.stderr b/frame/support/test/tests/construct_runtime_ui/undefined_event_part.stderr index b68beb2b3fc65..fa837698aa642 100644 --- a/frame/support/test/tests/construct_runtime_ui/undefined_event_part.stderr +++ b/frame/support/test/tests/construct_runtime_ui/undefined_event_part.stderr @@ -21,7 +21,7 @@ error[E0433]: failed to resolve: use of undeclared crate or module `system` 28 | System: system::{Pallet, Call, Storage, Config, Event}, | ^^^^^^ use of undeclared crate or module `system` -error[E0433]: failed to resolve: could not find `Event` in `pallet` +error[E0412]: cannot find type `Event` in module `pallet` --> $DIR/undefined_event_part.rs:22:1 | 22 | / construct_runtime! { @@ -31,9 +31,13 @@ error[E0433]: failed to resolve: could not find `Event` in `pallet` ... | 30 | | } 31 | | } - | |_^ could not find `Event` in `pallet` + | |_^ not found in `pallet` | = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider importing this enum + | +1 | use frame_system::Event; + | error[E0412]: cannot find type `Event` in module `pallet` --> $DIR/undefined_event_part.rs:22:1 @@ -48,12 +52,14 @@ error[E0412]: cannot find type `Event` in module `pallet` | |_^ not found in `pallet` | = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) -help: consider importing this enum +help: consider importing one of these items + | +1 | use crate::Event; | 1 | use frame_system::Event; | -error[E0412]: cannot find type `Event` in module `pallet` +error[E0433]: failed to resolve: use of undeclared crate or module `system` --> $DIR/undefined_event_part.rs:22:1 | 22 | / construct_runtime! { @@ -63,14 +69,12 @@ error[E0412]: cannot find type `Event` in module `pallet` ... | 30 | | } 31 | | } - | |_^ not found in `pallet` + | |_^ not found in `system` | = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) -help: consider importing one of these items - | -1 | use crate::Event; +help: consider importing this enum | -1 | use frame_system::Event; +1 | use frame_system::RawOrigin; | error[E0433]: failed to resolve: use of undeclared crate or module `system` @@ -86,9 +90,15 @@ error[E0433]: failed to resolve: use of undeclared crate or module `system` | |_^ not found in `system` | = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) -help: consider importing this enum +help: consider importing one of these items | -1 | use frame_system::RawOrigin; +1 | use crate::pallet::Pallet; + | +1 | use frame_support_test::Pallet; + | +1 | use frame_system::Pallet; + | +1 | use test_pallet::Pallet; | error[E0277]: the trait bound `Runtime: frame_system::Config` is not satisfied diff --git a/frame/support/test/tests/construct_runtime_ui/undefined_genesis_config_part.stderr b/frame/support/test/tests/construct_runtime_ui/undefined_genesis_config_part.stderr index 686875d83a4f4..699f66a414ed2 100644 --- a/frame/support/test/tests/construct_runtime_ui/undefined_genesis_config_part.stderr +++ b/frame/support/test/tests/construct_runtime_ui/undefined_genesis_config_part.stderr @@ -39,6 +39,30 @@ help: consider importing this enum 1 | use frame_system::RawOrigin; | +error[E0433]: failed to resolve: use of undeclared crate or module `system` + --> $DIR/undefined_genesis_config_part.rs:22:1 + | +22 | / construct_runtime! { +23 | | pub enum Runtime where +24 | | Block = Block, +25 | | NodeBlock = Block, +... | +30 | | } +31 | | } + | |_^ not found in `system` + | + = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider importing one of these items + | +1 | use crate::pallet::Pallet; + | +1 | use frame_support_test::Pallet; + | +1 | use frame_system::Pallet; + | +1 | use test_pallet::Pallet; + | + error[E0412]: cannot find type `GenesisConfig` in module `pallet` --> $DIR/undefined_genesis_config_part.rs:22:1 | diff --git a/frame/support/test/tests/construct_runtime_ui/undefined_inherent_part.stderr b/frame/support/test/tests/construct_runtime_ui/undefined_inherent_part.stderr index 303819b45dd7c..88ff9ee910937 100644 --- a/frame/support/test/tests/construct_runtime_ui/undefined_inherent_part.stderr +++ b/frame/support/test/tests/construct_runtime_ui/undefined_inherent_part.stderr @@ -39,6 +39,30 @@ help: consider importing this enum 1 | use frame_system::RawOrigin; | +error[E0433]: failed to resolve: use of undeclared crate or module `system` + --> $DIR/undefined_inherent_part.rs:22:1 + | +22 | / construct_runtime! { +23 | | pub enum Runtime where +24 | | Block = Block, +25 | | NodeBlock = Block, +... | +30 | | } +31 | | } + | |_^ not found in `system` + | + = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider importing one of these items + | +1 | use crate::pallet::Pallet; + | +1 | use frame_support_test::Pallet; + | +1 | use frame_system::Pallet; + | +1 | use test_pallet::Pallet; + | + error[E0277]: the trait bound `Runtime: frame_system::Config` is not satisfied --> $DIR/undefined_inherent_part.rs:20:6 | diff --git a/frame/support/test/tests/construct_runtime_ui/undefined_origin_part.stderr b/frame/support/test/tests/construct_runtime_ui/undefined_origin_part.stderr index f49dcf5783e74..3b3aa75c1ea08 100644 --- a/frame/support/test/tests/construct_runtime_ui/undefined_origin_part.stderr +++ b/frame/support/test/tests/construct_runtime_ui/undefined_origin_part.stderr @@ -77,6 +77,30 @@ help: consider importing one of these items 1 | use frame_system::Origin; | +error[E0433]: failed to resolve: use of undeclared crate or module `system` + --> $DIR/undefined_origin_part.rs:22:1 + | +22 | / construct_runtime! { +23 | | pub enum Runtime where +24 | | Block = Block, +25 | | NodeBlock = Block, +... | +30 | | } +31 | | } + | |_^ not found in `system` + | + = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider importing one of these items + | +1 | use crate::pallet::Pallet; + | +1 | use frame_support_test::Pallet; + | +1 | use frame_system::Pallet; + | +1 | use test_pallet::Pallet; + | + error[E0277]: the trait bound `Runtime: frame_system::Config` is not satisfied --> $DIR/undefined_origin_part.rs:20:6 | diff --git a/frame/support/test/tests/construct_runtime_ui/undefined_validate_unsigned_part.stderr b/frame/support/test/tests/construct_runtime_ui/undefined_validate_unsigned_part.stderr index 41202c3b005b7..ac12c56d5c279 100644 --- a/frame/support/test/tests/construct_runtime_ui/undefined_validate_unsigned_part.stderr +++ b/frame/support/test/tests/construct_runtime_ui/undefined_validate_unsigned_part.stderr @@ -39,6 +39,30 @@ help: consider importing this enum 1 | use frame_system::RawOrigin; | +error[E0433]: failed to resolve: use of undeclared crate or module `system` + --> $DIR/undefined_validate_unsigned_part.rs:22:1 + | +22 | / construct_runtime! { +23 | | pub enum Runtime where +24 | | Block = Block, +25 | | NodeBlock = Block, +... | +30 | | } +31 | | } + | |_^ not found in `system` + | + = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider importing one of these items + | +1 | use crate::pallet::Pallet; + | +1 | use frame_support_test::Pallet; + | +1 | use frame_system::Pallet; + | +1 | use test_pallet::Pallet; + | + error[E0277]: the trait bound `Runtime: frame_system::Config` is not satisfied --> $DIR/undefined_validate_unsigned_part.rs:20:6 | diff --git a/frame/support/test/tests/decl_storage.rs b/frame/support/test/tests/decl_storage.rs index 50c8387bca555..347a3130daa79 100644 --- a/frame/support/test/tests/decl_storage.rs +++ b/frame/support/test/tests/decl_storage.rs @@ -21,14 +21,17 @@ mod tests { use frame_support::metadata::*; use sp_io::TestExternalities; - use std::marker::PhantomData; frame_support::decl_module! { pub struct Module for enum Call where origin: T::Origin, system=frame_support_test {} } pub trait Config: frame_support_test::Config { - type Origin2: codec::Codec + codec::EncodeLike + Default + codec::MaxEncodedLen; + type Origin2: codec::Codec + + codec::EncodeLike + + Default + + codec::MaxEncodedLen + + scale_info::TypeInfo; } frame_support::decl_storage! { @@ -101,329 +104,265 @@ mod tests { type Origin2 = u32; } - const EXPECTED_METADATA: StorageMetadata = StorageMetadata { - prefix: DecodeDifferent::Encode("TestStorage"), - entries: DecodeDifferent::Encode(&[ - StorageEntryMetadata { - name: DecodeDifferent::Encode("U32"), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode(DefaultByteGetter(&__GetByteStructU32( - PhantomData::, - ))), - documentation: DecodeDifferent::Encode(&[" Hello, this is doc!"]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("PUBU32"), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode(DefaultByteGetter(&__GetByteStructPUBU32( - PhantomData::, - ))), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("U32MYDEF"), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode(DefaultByteGetter(&__GetByteStructU32MYDEF( - PhantomData::, - ))), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("PUBU32MYDEF"), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode(DefaultByteGetter(&__GetByteStructPUBU32MYDEF( - PhantomData::, - ))), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("GETU32"), - modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("T::Origin2")), - default: DecodeDifferent::Encode(DefaultByteGetter(&__GetByteStructGETU32( - PhantomData::, - ))), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("PUBGETU32"), - modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode(DefaultByteGetter(&__GetByteStructPUBGETU32( - PhantomData::, - ))), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("GETU32WITHCONFIG"), - modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode(DefaultByteGetter( - &__GetByteStructGETU32WITHCONFIG(PhantomData::), - )), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("PUBGETU32WITHCONFIG"), - modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode(DefaultByteGetter( - &__GetByteStructPUBGETU32WITHCONFIG(PhantomData::), - )), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("GETU32MYDEF"), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode(DefaultByteGetter(&__GetByteStructGETU32MYDEF( - PhantomData::, - ))), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("PUBGETU32MYDEF"), - modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode(DefaultByteGetter( - &__GetByteStructPUBGETU32MYDEF(PhantomData::), - )), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("GETU32WITHCONFIGMYDEF"), - modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode(DefaultByteGetter( - &__GetByteStructGETU32WITHCONFIGMYDEF(PhantomData::), - )), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("PUBGETU32WITHCONFIGMYDEF"), - modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode(DefaultByteGetter( - &__GetByteStructPUBGETU32WITHCONFIGMYDEF(PhantomData::), - )), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("PUBGETU32WITHCONFIGMYDEFOPT"), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode(DefaultByteGetter( - &__GetByteStructPUBGETU32WITHCONFIGMYDEFOPT(PhantomData::), - )), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("GetU32WithBuilder"), - modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode(DefaultByteGetter( - &__GetByteStructGetU32WithBuilder(PhantomData::), - )), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("GetOptU32WithBuilderSome"), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode(DefaultByteGetter( - &__GetByteStructGetOptU32WithBuilderSome(PhantomData::), - )), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("GetOptU32WithBuilderNone"), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode(DefaultByteGetter( - &__GetByteStructGetOptU32WithBuilderNone(PhantomData::), - )), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("MAPU32"), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Map { - hasher: StorageHasher::Blake2_128Concat, - key: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("[u8; 4]"), - unused: false, - }, - default: DecodeDifferent::Encode(DefaultByteGetter(&__GetByteStructMAPU32( - PhantomData::, - ))), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("PUBMAPU32"), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Map { - hasher: StorageHasher::Blake2_128Concat, - key: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("[u8; 4]"), - unused: false, - }, - default: DecodeDifferent::Encode(DefaultByteGetter(&__GetByteStructPUBMAPU32( - PhantomData::, - ))), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("GETMAPU32"), - modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Map { - hasher: StorageHasher::Blake2_128Concat, - key: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("[u8; 4]"), - unused: false, - }, - default: DecodeDifferent::Encode(DefaultByteGetter(&__GetByteStructGETMAPU32( - PhantomData::, - ))), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("PUBGETMAPU32"), - modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Map { - hasher: StorageHasher::Blake2_128Concat, - key: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("[u8; 4]"), - unused: false, - }, - default: DecodeDifferent::Encode(DefaultByteGetter(&__GetByteStructPUBGETMAPU32( - PhantomData::, - ))), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("GETMAPU32MYDEF"), - modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Map { - hasher: StorageHasher::Blake2_128Concat, - key: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("[u8; 4]"), - unused: false, - }, - default: DecodeDifferent::Encode(DefaultByteGetter( - &__GetByteStructGETMAPU32MYDEF(PhantomData::), - )), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("PUBGETMAPU32MYDEF"), - modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Map { - hasher: StorageHasher::Blake2_128Concat, - key: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("[u8; 4]"), - unused: false, - }, - default: DecodeDifferent::Encode(DefaultByteGetter( - &__GetByteStructPUBGETMAPU32MYDEF(PhantomData::), - )), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("DOUBLEMAP"), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::DoubleMap { - hasher: StorageHasher::Blake2_128Concat, - key1: DecodeDifferent::Encode("u32"), - key2: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("[u8; 4]"), - key2_hasher: StorageHasher::Blake2_128Concat, - }, - default: DecodeDifferent::Encode(DefaultByteGetter(&__GetByteStructDOUBLEMAP( - PhantomData::, - ))), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("DOUBLEMAP2"), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::DoubleMap { - hasher: StorageHasher::Blake2_128Concat, - key1: DecodeDifferent::Encode("u32"), - key2: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("[u8; 4]"), - key2_hasher: StorageHasher::Blake2_128Concat, - }, - default: DecodeDifferent::Encode(DefaultByteGetter(&__GetByteStructDOUBLEMAP2( - PhantomData::, - ))), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("COMPLEXTYPE1"), - modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Plain(DecodeDifferent::Encode( - "(::std::option::Option,)", - )), - default: DecodeDifferent::Encode(DefaultByteGetter(&__GetByteStructCOMPLEXTYPE1( - PhantomData::, - ))), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("COMPLEXTYPE2"), - modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Plain(DecodeDifferent::Encode( - "([[(u16, Option<()>); 32]; 12], u32)", - )), - default: DecodeDifferent::Encode(DefaultByteGetter(&__GetByteStructCOMPLEXTYPE2( - PhantomData::, - ))), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("COMPLEXTYPE3"), - modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("[u32; 25]")), - default: DecodeDifferent::Encode(DefaultByteGetter(&__GetByteStructCOMPLEXTYPE3( - PhantomData::, - ))), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("NMAP"), - modifier: StorageEntryModifier::Default, - ty: StorageEntryType::NMap { - keys: DecodeDifferent::Encode(&["u32", "u16"]), - hashers: DecodeDifferent::Encode(&[ - StorageHasher::Blake2_128Concat, - StorageHasher::Twox64Concat, - ]), - value: DecodeDifferent::Encode("u8"), - }, - default: DecodeDifferent::Encode(DefaultByteGetter(&__GetByteStructNMAP( - PhantomData::, - ))), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("NMAP2"), - modifier: StorageEntryModifier::Default, - ty: StorageEntryType::NMap { - keys: DecodeDifferent::Encode(&["u32"]), - hashers: DecodeDifferent::Encode(&[StorageHasher::Blake2_128Concat]), - value: DecodeDifferent::Encode("u8"), - }, - default: DecodeDifferent::Encode(DefaultByteGetter(&__GetByteStructNMAP( - PhantomData::, - ))), - documentation: DecodeDifferent::Encode(&[]), - }, - ]), - }; + fn expected_metadata() -> PalletStorageMetadata { + PalletStorageMetadata { + prefix: "TestStorage", + entries: vec![ + StorageEntryMetadata { + name: "U32", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![0], + docs: vec![" Hello, this is doc!"], + }, + StorageEntryMetadata { + name: "PUBU32", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "U32MYDEF", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "PUBU32MYDEF", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "GETU32", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![0, 0, 0, 0], + docs: vec![], + }, + StorageEntryMetadata { + name: "PUBGETU32", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![0, 0, 0, 0], + docs: vec![], + }, + StorageEntryMetadata { + name: "GETU32WITHCONFIG", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![0, 0, 0, 0], + docs: vec![], + }, + StorageEntryMetadata { + name: "PUBGETU32WITHCONFIG", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![0, 0, 0, 0], + docs: vec![], + }, + StorageEntryMetadata { + name: "GETU32MYDEF", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "PUBGETU32MYDEF", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![3, 0, 0, 0], + docs: vec![], + }, + StorageEntryMetadata { + name: "GETU32WITHCONFIGMYDEF", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![2, 0, 0, 0], + docs: vec![], + }, + StorageEntryMetadata { + name: "PUBGETU32WITHCONFIGMYDEF", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![1, 0, 0, 0], + docs: vec![], + }, + StorageEntryMetadata { + name: "PUBGETU32WITHCONFIGMYDEFOPT", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "GetU32WithBuilder", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![0, 0, 0, 0], + docs: vec![], + }, + StorageEntryMetadata { + name: "GetOptU32WithBuilderSome", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "GetOptU32WithBuilderNone", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "MAPU32", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + hashers: vec![StorageHasher::Blake2_128Concat], + key: scale_info::meta_type::(), + value: scale_info::meta_type::<[u8; 4]>(), + }, + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "PUBMAPU32", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + hashers: vec![StorageHasher::Blake2_128Concat], + key: scale_info::meta_type::(), + value: scale_info::meta_type::<[u8; 4]>(), + }, + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "GETMAPU32", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Map { + hashers: vec![StorageHasher::Blake2_128Concat], + key: scale_info::meta_type::(), + value: scale_info::meta_type::<[u8; 4]>(), + }, + default: vec![0, 0, 0, 0], + docs: vec![], + }, + StorageEntryMetadata { + name: "PUBGETMAPU32", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Map { + hashers: vec![StorageHasher::Blake2_128Concat], + key: scale_info::meta_type::(), + value: scale_info::meta_type::<[u8; 4]>(), + }, + default: vec![0, 0, 0, 0], + docs: vec![], + }, + StorageEntryMetadata { + name: "GETMAPU32MYDEF", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Map { + hashers: vec![StorageHasher::Blake2_128Concat], + key: scale_info::meta_type::(), + value: scale_info::meta_type::<[u8; 4]>(), + }, + default: vec![109, 97, 112, 100], // "map" + docs: vec![], + }, + StorageEntryMetadata { + name: "PUBGETMAPU32MYDEF", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Map { + hashers: vec![StorageHasher::Blake2_128Concat], + key: scale_info::meta_type::(), + value: scale_info::meta_type::<[u8; 4]>(), + }, + default: vec![112, 117, 98, 109], // "pubmap" + docs: vec![], + }, + StorageEntryMetadata { + name: "DOUBLEMAP", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + hashers: vec![ + StorageHasher::Blake2_128Concat, + StorageHasher::Blake2_128Concat, + ], + key: scale_info::meta_type::<(u32, u32)>(), + value: scale_info::meta_type::<[u8; 4]>(), + }, + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "DOUBLEMAP2", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + hashers: vec![ + StorageHasher::Blake2_128Concat, + StorageHasher::Blake2_128Concat, + ], + key: scale_info::meta_type::<(u32, u32)>(), + value: scale_info::meta_type::<[u8; 4]>(), + }, + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "COMPLEXTYPE1", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Plain(scale_info::meta_type::<(Option,)>()), + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "COMPLEXTYPE2", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Plain(scale_info::meta_type::<( + [[(u16, Option<()>); 32]; 12], + u32, + )>()), + default: [0u8; 1156].to_vec(), + docs: vec![], + }, + StorageEntryMetadata { + name: "COMPLEXTYPE3", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Plain(scale_info::meta_type::<[u32; 25]>()), + default: [0u8; 100].to_vec(), + docs: vec![], + }, + StorageEntryMetadata { + name: "NMAP", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Map { + key: scale_info::meta_type::<(u32, u16)>(), + hashers: vec![StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat], + value: scale_info::meta_type::(), + }, + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "NMAP2", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Map { + key: scale_info::meta_type::(), + hashers: vec![StorageHasher::Blake2_128Concat], + value: scale_info::meta_type::(), + }, + default: vec![0], + docs: vec![], + }, + ], + } + } #[test] fn storage_info() { @@ -645,7 +584,7 @@ mod tests { #[test] fn store_metadata() { let metadata = Module::::storage_metadata(); - pretty_assertions::assert_eq!(EXPECTED_METADATA, metadata); + pretty_assertions::assert_eq!(expected_metadata(), metadata); } #[test] @@ -790,7 +729,7 @@ mod test_append_and_len { pub struct Module for enum Call where origin: T::Origin, system=frame_support_test {} } - #[derive(PartialEq, Eq, Clone, Encode, Decode)] + #[derive(PartialEq, Eq, Clone, Encode, Decode, scale_info::TypeInfo)] struct NoDef(u32); frame_support::decl_storage! { diff --git a/frame/support/test/tests/instance.rs b/frame/support/test/tests/instance.rs index a948853ff2a44..809edae14f80c 100644 --- a/frame/support/test/tests/instance.rs +++ b/frame/support/test/tests/instance.rs @@ -21,13 +21,14 @@ use codec::{Codec, Decode, Encode, EncodeLike}; use frame_support::{ inherent::{InherentData, InherentIdentifier, MakeFatalError, ProvideInherent}, metadata::{ - DecodeDifferent, DefaultByteGetter, StorageEntryMetadata, StorageEntryModifier, - StorageEntryType, StorageHasher, StorageMetadata, + PalletStorageMetadata, StorageEntryMetadata, StorageEntryModifier, StorageEntryType, + StorageHasher, }, parameter_types, traits::Get, Parameter, StorageDoubleMap, StorageMap, StorageValue, }; +use scale_info::TypeInfo; use sp_core::{sr25519, H256}; use sp_runtime::{ generic, @@ -53,7 +54,7 @@ mod module1 { type Event: From> + Into<::Event>; type Origin: From>; type SomeParameter: Get; - type GenericType: Default + Clone + Codec + EncodeLike; + type GenericType: Default + Clone + Codec + EncodeLike + TypeInfo; } frame_support::decl_module! { @@ -108,7 +109,7 @@ mod module1 { } } - #[derive(PartialEq, Eq, Clone, sp_runtime::RuntimeDebug, Encode, Decode)] + #[derive(PartialEq, Eq, Clone, sp_runtime::RuntimeDebug, Encode, Decode, TypeInfo)] pub enum Origin, I> where T::BlockNumber: From, @@ -181,7 +182,7 @@ mod module2 { } } - #[derive(PartialEq, Eq, Clone, sp_runtime::RuntimeDebug, Encode, Decode)] + #[derive(PartialEq, Eq, Clone, sp_runtime::RuntimeDebug, Encode, Decode, TypeInfo)] pub enum Origin, I = DefaultInstance> { Members(u32), _Phantom(std::marker::PhantomData<(T, I)>), @@ -410,54 +411,45 @@ fn storage_with_instance_basic_operation() { }); } -const EXPECTED_METADATA: StorageMetadata = StorageMetadata { - prefix: DecodeDifferent::Encode("Instance2Module2"), - entries: DecodeDifferent::Encode(&[ - StorageEntryMetadata { - name: DecodeDifferent::Encode("Value"), - modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("T::Amount")), - default: DecodeDifferent::Encode(DefaultByteGetter(&module2::__GetByteStructValue( - std::marker::PhantomData::<(Runtime, module2::Instance2)>, - ))), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("Map"), - modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Map { - hasher: StorageHasher::Identity, - key: DecodeDifferent::Encode("u64"), - value: DecodeDifferent::Encode("u64"), - unused: false, +fn expected_metadata() -> PalletStorageMetadata { + PalletStorageMetadata { + prefix: "Instance2Module2", + entries: vec![ + StorageEntryMetadata { + name: "Value", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![0, 0, 0, 0], + docs: vec![], }, - default: DecodeDifferent::Encode(DefaultByteGetter(&module2::__GetByteStructMap( - std::marker::PhantomData::<(Runtime, module2::Instance2)>, - ))), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("DoubleMap"), - modifier: StorageEntryModifier::Default, - ty: StorageEntryType::DoubleMap { - hasher: StorageHasher::Identity, - key2_hasher: StorageHasher::Identity, - key1: DecodeDifferent::Encode("u64"), - key2: DecodeDifferent::Encode("u64"), - value: DecodeDifferent::Encode("u64"), + StorageEntryMetadata { + name: "Map", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Map { + hashers: vec![StorageHasher::Identity], + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), + }, + default: [0u8; 8].to_vec(), + docs: vec![], }, - default: DecodeDifferent::Encode(DefaultByteGetter( - &module2::__GetByteStructDoubleMap( - std::marker::PhantomData::<(Runtime, module2::Instance2)>, - ), - )), - documentation: DecodeDifferent::Encode(&[]), - }, - ]), -}; + StorageEntryMetadata { + name: "DoubleMap", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Map { + hashers: vec![StorageHasher::Identity, StorageHasher::Identity], + key: scale_info::meta_type::<(u64, u64)>(), + value: scale_info::meta_type::(), + }, + default: [0u8; 8].to_vec(), + docs: vec![], + }, + ], + } +} #[test] fn test_instance_storage_metadata() { let metadata = Module2_2::storage_metadata(); - pretty_assertions::assert_eq!(EXPECTED_METADATA, metadata); + pretty_assertions::assert_eq!(expected_metadata(), metadata); } diff --git a/frame/support/test/tests/issue2219.rs b/frame/support/test/tests/issue2219.rs index 17eebf2d1022e..68ad2a50a21bc 100644 --- a/frame/support/test/tests/issue2219.rs +++ b/frame/support/test/tests/issue2219.rs @@ -17,6 +17,7 @@ use frame_support::{ codec::{Decode, Encode}, + scale_info::TypeInfo, sp_runtime::{ generic, traits::{BlakeTwo256, Verify}, @@ -34,12 +35,12 @@ mod module { (::AccountId, Role, ::BlockNumber); pub type Requests = Vec>; - #[derive(Encode, Decode, Copy, Clone, Eq, PartialEq, Debug)] + #[derive(Encode, Decode, Copy, Clone, Eq, PartialEq, Debug, TypeInfo)] pub enum Role { Storage, } - #[derive(Encode, Decode, Copy, Clone, Eq, PartialEq, Debug)] + #[derive(Encode, Decode, Copy, Clone, Eq, PartialEq, Debug, TypeInfo)] pub struct RoleParameters { // minimum actors to maintain - if role is unstaking // and remaining actors would be less that this value - prevent or punish for unstaking @@ -82,7 +83,7 @@ mod module { } } - pub trait Config: system::Config {} + pub trait Config: system::Config + TypeInfo {} frame_support::decl_module! { pub struct Module for enum Call where origin: T::Origin, system=system {} diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 80bae000b6c77..2874ef6bd7685 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -24,6 +24,7 @@ use frame_support::{ }, weights::{DispatchClass, DispatchInfo, GetDispatchInfo, Pays, RuntimeDbWeight}, }; +use scale_info::{meta_type, TypeInfo}; use sp_io::{ hashing::{blake2_128, twox_128, twox_64}, TestExternalities, @@ -80,14 +81,14 @@ impl From for u64 { } pub trait SomeAssociation1 { - type _1: Parameter + codec::MaxEncodedLen; + type _1: Parameter + codec::MaxEncodedLen + TypeInfo; } impl SomeAssociation1 for u64 { type _1 = u64; } pub trait SomeAssociation2 { - type _2: Parameter + codec::MaxEncodedLen; + type _2: Parameter + codec::MaxEncodedLen + TypeInfo; } impl SomeAssociation2 for u64 { type _2 = u64; @@ -101,6 +102,7 @@ pub mod pallet { }; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; + use scale_info::TypeInfo; type BalanceOf = ::Balance; @@ -124,7 +126,7 @@ pub mod pallet { #[pallet::constant] type MyGetParam3: Get<::_1>; - type Balance: Parameter + Default; + type Balance: Parameter + Default + TypeInfo; type Event: From> + IsType<::Event>; } @@ -228,7 +230,6 @@ pub mod pallet { } #[pallet::event] - #[pallet::metadata(BalanceOf = "Balance", u32 = "Other")] #[pallet::generate_deposit(fn deposit_event)] pub enum Event where @@ -340,7 +341,9 @@ pub mod pallet { } #[pallet::origin] - #[derive(EqNoBound, RuntimeDebugNoBound, CloneNoBound, PartialEqNoBound, Encode, Decode)] + #[derive( + EqNoBound, RuntimeDebugNoBound, CloneNoBound, PartialEqNoBound, Encode, Decode, TypeInfo, + )] pub struct Origin(PhantomData); #[pallet::validate_unsigned] @@ -352,7 +355,7 @@ pub mod pallet { fn validate_unsigned(_source: TransactionSource, call: &Self::Call) -> TransactionValidity { T::AccountId::from(SomeType1); // Test for where clause T::AccountId::from(SomeType5); // Test for where clause - if matches!(call, Call::foo_transactional(_)) { + if matches!(call, Call::foo_transactional { .. }) { return Ok(ValidTransaction::default()) } Err(TransactionValidityError::Invalid(InvalidTransaction::Call)) @@ -372,18 +375,18 @@ pub mod pallet { fn create_inherent(_data: &InherentData) -> Option { T::AccountId::from(SomeType1); // Test for where clause T::AccountId::from(SomeType6); // Test for where clause - Some(Call::foo_no_post_info()) + Some(Call::foo_no_post_info {}) } fn is_inherent(call: &Self::Call) -> bool { - matches!(call, Call::foo_no_post_info() | Call::foo(..)) + matches!(call, Call::foo_no_post_info {} | Call::foo { .. }) } fn check_inherent(call: &Self::Call, _: &InherentData) -> Result<(), Self::Error> { match call { - Call::foo_no_post_info() => Ok(()), - Call::foo(0, 0) => Err(InherentError::Fatal), - Call::foo(..) => Ok(()), + Call::foo_no_post_info {} => Ok(()), + Call::foo { foo: 0, bar: 0 } => Err(InherentError::Fatal), + Call::foo { .. } => Ok(()), _ => unreachable!("other calls are not inherents"), } } @@ -550,13 +553,13 @@ fn transactional_works() { TestExternalities::default().execute_with(|| { frame_system::Pallet::::set_block_number(1); - pallet::Call::::foo_transactional(0) + pallet::Call::::foo_transactional { foo: 0 } .dispatch_bypass_filter(None.into()) .err() .unwrap(); assert!(frame_system::Pallet::::events().is_empty()); - pallet::Call::::foo_transactional(1) + pallet::Call::::foo_transactional { foo: 1 } .dispatch_bypass_filter(None.into()) .unwrap(); assert_eq!( @@ -571,7 +574,7 @@ fn transactional_works() { #[test] fn call_expand() { - let call_foo = pallet::Call::::foo(3, 0); + let call_foo = pallet::Call::::foo { foo: 3, bar: 0 }; assert_eq!( call_foo.get_dispatch_info(), DispatchInfo { weight: 3, class: DispatchClass::Normal, pays_fee: Pays::Yes } @@ -620,7 +623,7 @@ fn inherent_expand() { let inherents = InherentData::new().create_extrinsics(); let expected = vec![UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo_no_post_info()), + function: Call::Example(pallet::Call::foo_no_post_info {}), signature: None, }]; assert_eq!(expected, inherents); @@ -635,11 +638,11 @@ fn inherent_expand() { ), vec![ UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo_no_post_info()), + function: Call::Example(pallet::Call::foo_no_post_info {}), signature: None, }, UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo(1, 0)), + function: Call::Example(pallet::Call::foo { foo: 1, bar: 0 }), signature: None, }, ], @@ -657,11 +660,11 @@ fn inherent_expand() { ), vec![ UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo_no_post_info()), + function: Call::Example(pallet::Call::foo_no_post_info {}), signature: None, }, UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo(0, 0)), + function: Call::Example(pallet::Call::foo { foo: 0, bar: 0 }), signature: None, }, ], @@ -678,7 +681,7 @@ fn inherent_expand() { Digest::default(), ), vec![UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo_transactional(0)), + function: Call::Example(pallet::Call::foo_transactional { foo: 0 }), signature: None, }], ); @@ -696,7 +699,7 @@ fn inherent_expand() { Digest::default(), ), vec![UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo_no_post_info()), + function: Call::Example(pallet::Call::foo_no_post_info {}), signature: Some((1, (), ())), }], ); @@ -715,11 +718,11 @@ fn inherent_expand() { ), vec![ UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo(1, 1)), + function: Call::Example(pallet::Call::foo { foo: 1, bar: 1 }), signature: None, }, UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo_transactional(0)), + function: Call::Example(pallet::Call::foo_transactional { foo: 0 }), signature: None, }, ], @@ -737,15 +740,15 @@ fn inherent_expand() { ), vec![ UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo(1, 1)), + function: Call::Example(pallet::Call::foo { foo: 1, bar: 1 }), signature: None, }, UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo_transactional(0)), + function: Call::Example(pallet::Call::foo_transactional { foo: 0 }), signature: None, }, UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo_no_post_info()), + function: Call::Example(pallet::Call::foo_no_post_info {}), signature: None, }, ], @@ -763,15 +766,15 @@ fn inherent_expand() { ), vec![ UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo(1, 1)), + function: Call::Example(pallet::Call::foo { foo: 1, bar: 1 }), signature: None, }, UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo(1, 0)), + function: Call::Example(pallet::Call::foo { foo: 1, bar: 0 }), signature: Some((1, (), ())), }, UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo_no_post_info()), + function: Call::Example(pallet::Call::foo_no_post_info {}), signature: None, }, ], @@ -786,12 +789,12 @@ fn validate_unsigned_expand() { InvalidTransaction, TransactionSource, TransactionValidityError, ValidTransaction, ValidateUnsigned, }; - let call = pallet::Call::::foo_no_post_info(); + let call = pallet::Call::::foo_no_post_info {}; let validity = pallet::Pallet::validate_unsigned(TransactionSource::Local, &call).unwrap_err(); assert_eq!(validity, TransactionValidityError::Invalid(InvalidTransaction::Call)); - let call = pallet::Call::::foo_transactional(0); + let call = pallet::Call::::foo_transactional { foo: 0 }; let validity = pallet::Pallet::validate_unsigned(TransactionSource::External, &call).unwrap(); assert_eq!(validity, ValidTransaction::default()); @@ -810,7 +813,9 @@ fn trait_store_expand() { fn pallet_expand_deposit_event() { TestExternalities::default().execute_with(|| { frame_system::Pallet::::set_block_number(1); - pallet::Call::::foo(3, 0).dispatch_bypass_filter(None.into()).unwrap(); + pallet::Call::::foo { foo: 3, bar: 0 } + .dispatch_bypass_filter(None.into()) + .unwrap(); assert_eq!( frame_system::Pallet::::events()[0].event, Event::Example(pallet::Event::Something(3)), @@ -818,6 +823,11 @@ fn pallet_expand_deposit_event() { }) } +#[test] +fn pallet_new_call_variant() { + Call::Example(pallet::Call::new_call_variant_foo(3, 4)); +} + #[test] fn storage_expand() { use frame_support::{pallet_prelude::*, storage::StoragePrefixedMap}; @@ -969,288 +979,478 @@ fn migrate_from_pallet_version_to_storage_version() { #[test] fn metadata() { - use codec::{Decode, Encode}; - use frame_metadata::*; - - let expected_pallet_metadata = ModuleMetadata { - index: 1, - name: DecodeDifferent::Decoded("Example".to_string()), - storage: Some(DecodeDifferent::Decoded(StorageMetadata { - prefix: DecodeDifferent::Decoded("Example".to_string()), - entries: DecodeDifferent::Decoded(vec![ - StorageEntryMetadata { - name: DecodeDifferent::Decoded("ValueWhereClause".to_string()), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(DecodeDifferent::Decoded( - "::_2".to_string(), - )), - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), + use frame_support::metadata::*; + + let pallets = vec![ + PalletMetadata { + index: 0, + name: "System", + storage: None, + calls: Some(meta_type::>().into()), + event: Some(meta_type::>().into()), + constants: vec![ + PalletConstantMetadata { + name: "BlockWeights", + ty: meta_type::(), + value: vec![], + docs: vec![], }, - StorageEntryMetadata { - name: DecodeDifferent::Decoded("Value".to_string()), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(DecodeDifferent::Decoded("u32".to_string())), - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), + PalletConstantMetadata { + name: "BlockLength", + ty: meta_type::(), + value: vec![], + docs: vec![], }, - StorageEntryMetadata { - name: DecodeDifferent::Decoded("Value2".to_string()), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(DecodeDifferent::Decoded("u64".to_string())), - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), + PalletConstantMetadata { + name: "BlockHashCount", + ty: meta_type::(), + value: vec![], + docs: vec![], }, - StorageEntryMetadata { - name: DecodeDifferent::Decoded("Map".to_string()), - modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Map { - key: DecodeDifferent::Decoded("u8".to_string()), - value: DecodeDifferent::Decoded("u16".to_string()), - hasher: StorageHasher::Blake2_128Concat, - unused: false, - }, - default: DecodeDifferent::Decoded(vec![4, 0]), - documentation: DecodeDifferent::Decoded(vec![]), + PalletConstantMetadata { + name: "DbWeight", + ty: meta_type::(), + value: vec![], + docs: vec![], }, - StorageEntryMetadata { - name: DecodeDifferent::Decoded("Map2".to_string()), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Map { - key: DecodeDifferent::Decoded("u16".to_string()), - value: DecodeDifferent::Decoded("u32".to_string()), - hasher: StorageHasher::Twox64Concat, - unused: false, - }, - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), + PalletConstantMetadata { + name: "Version", + ty: meta_type::(), + value: vec![], + docs: vec![], }, - StorageEntryMetadata { - name: DecodeDifferent::Decoded("DoubleMap".to_string()), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::DoubleMap { - value: DecodeDifferent::Decoded("u32".to_string()), - key1: DecodeDifferent::Decoded("u8".to_string()), - key2: DecodeDifferent::Decoded("u16".to_string()), - hasher: StorageHasher::Blake2_128Concat, - key2_hasher: StorageHasher::Twox64Concat, - }, - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), + PalletConstantMetadata { + name: "SS58Prefix", + ty: meta_type::(), + value: vec![], + docs: vec![], }, - StorageEntryMetadata { - name: DecodeDifferent::Decoded("DoubleMap2".to_string()), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::DoubleMap { - value: DecodeDifferent::Decoded("u64".to_string()), - key1: DecodeDifferent::Decoded("u16".to_string()), - key2: DecodeDifferent::Decoded("u32".to_string()), - hasher: StorageHasher::Twox64Concat, - key2_hasher: StorageHasher::Blake2_128Concat, + ], + error: Some(meta_type::>().into()), + }, + PalletMetadata { + index: 1, + name: "Example", + storage: Some(PalletStorageMetadata { + prefix: "Example", + entries: vec![ + StorageEntryMetadata { + name: "ValueWhereClause", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(meta_type::()), + default: vec![0], + docs: vec![], }, - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Decoded("NMap".to_string()), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::NMap { - keys: DecodeDifferent::Decoded(vec!["u8".to_string()]), - hashers: DecodeDifferent::Decoded(vec![StorageHasher::Blake2_128Concat]), - value: DecodeDifferent::Decoded("u32".to_string()), + StorageEntryMetadata { + name: "Value", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(meta_type::()), + default: vec![0], + docs: vec![], }, - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Decoded("NMap2".to_string()), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::NMap { - keys: DecodeDifferent::Decoded(vec!["u16".to_string(), "u32".to_string()]), - hashers: DecodeDifferent::Decoded(vec![ - StorageHasher::Twox64Concat, - StorageHasher::Blake2_128Concat, - ]), - value: DecodeDifferent::Decoded("u64".to_string()), + StorageEntryMetadata { + name: "Value2", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(meta_type::()), + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "Map", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Map { + key: meta_type::(), + value: meta_type::(), + hashers: vec![StorageHasher::Blake2_128Concat], + }, + default: vec![4, 0], + docs: vec![], + }, + StorageEntryMetadata { + name: "Map2", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + key: meta_type::(), + value: meta_type::(), + hashers: vec![StorageHasher::Twox64Concat], + }, + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "DoubleMap", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + value: meta_type::(), + hashers: vec![ + StorageHasher::Blake2_128Concat, + StorageHasher::Twox64Concat, + ], + key: meta_type::<(u8, u16)>(), + }, + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "DoubleMap2", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + value: meta_type::(), + key: meta_type::<(u16, u32)>(), + hashers: vec![ + StorageHasher::Twox64Concat, + StorageHasher::Blake2_128Concat, + ], + }, + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "NMap", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + key: meta_type::(), + hashers: vec![StorageHasher::Blake2_128Concat], + value: meta_type::(), + }, + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "NMap2", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + key: meta_type::<(u16, u32)>(), + hashers: vec![ + StorageHasher::Twox64Concat, + StorageHasher::Blake2_128Concat, + ], + value: meta_type::(), + }, + default: vec![0], + docs: vec![], + }, + #[cfg(feature = "conditional-storage")] + StorageEntryMetadata { + name: "ConditionalValue", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(meta_type::()), + default: vec![0], + docs: vec![], }, - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), + #[cfg(feature = "conditional-storage")] + StorageEntryMetadata { + name: "ConditionalMap", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + key: meta_type::(), + value: meta_type::(), + hashers: vec![StorageHasher::Twox64Concat], + }, + default: vec![0], + docs: vec![], + }, + #[cfg(feature = "conditional-storage")] + StorageEntryMetadata { + name: "ConditionalDoubleMap", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + value: meta_type::(), + key: meta_type::<(u8, u16)>(), + hashers: vec![ + StorageHasher::Blake2_128Concat, + StorageHasher::Twox64Concat, + ], + }, + default: vec![0], + docs: vec![], + }, + #[cfg(feature = "conditional-storage")] + StorageEntryMetadata { + name: "ConditionalNMap", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + key: meta_type::<(u8, u16)>(), + hashers: vec![ + StorageHasher::Blake2_128Concat, + StorageHasher::Twox64Concat, + ], + value: meta_type::(), + }, + default: vec![0], + docs: vec![], + }, + ], + }), + calls: Some(meta_type::>().into()), + event: Some(meta_type::>().into()), + constants: vec![ + PalletConstantMetadata { + name: "MyGetParam", + ty: meta_type::(), + value: vec![10, 0, 0, 0], + docs: vec![" Some comment", " Some comment"], }, - #[cfg(feature = "conditional-storage")] - StorageEntryMetadata { - name: DecodeDifferent::Decoded("ConditionalValue".to_string()), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(DecodeDifferent::Decoded("u32".to_string())), - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), + PalletConstantMetadata { + name: "MyGetParam2", + ty: meta_type::(), + value: vec![11, 0, 0, 0], + docs: vec![" Some comment", " Some comment"], }, - #[cfg(feature = "conditional-storage")] - StorageEntryMetadata { - name: DecodeDifferent::Decoded("ConditionalMap".to_string()), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Map { - key: DecodeDifferent::Decoded("u16".to_string()), - value: DecodeDifferent::Decoded("u32".to_string()), - hasher: StorageHasher::Twox64Concat, - unused: false, - }, - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), + PalletConstantMetadata { + name: "MyGetParam3", + ty: meta_type::(), + value: vec![12, 0, 0, 0, 0, 0, 0, 0], + docs: vec![], }, - #[cfg(feature = "conditional-storage")] - StorageEntryMetadata { - name: DecodeDifferent::Decoded("ConditionalDoubleMap".to_string()), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::DoubleMap { - value: DecodeDifferent::Decoded("u32".to_string()), - key1: DecodeDifferent::Decoded("u8".to_string()), - key2: DecodeDifferent::Decoded("u16".to_string()), - hasher: StorageHasher::Blake2_128Concat, - key2_hasher: StorageHasher::Twox64Concat, - }, - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), + PalletConstantMetadata { + name: "some_extra", + ty: meta_type::(), + value: vec![100, 0, 0, 0, 0, 0, 0, 0], + docs: vec![" Some doc", " Some doc"], }, - #[cfg(feature = "conditional-storage")] - StorageEntryMetadata { - name: DecodeDifferent::Decoded("ConditionalNMap".to_string()), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::NMap { - keys: DecodeDifferent::Decoded(vec!["u8".to_string(), "u16".to_string()]), - hashers: DecodeDifferent::Decoded(vec![ - StorageHasher::Blake2_128Concat, - StorageHasher::Twox64Concat, - ]), - value: DecodeDifferent::Decoded("u32".to_string()), - }, - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), + PalletConstantMetadata { + name: "some_extra_extra", + ty: meta_type::(), + value: vec![0, 0, 0, 0, 0, 0, 0, 0], + docs: vec![" Some doc"], }, - ]), - })), - calls: Some(DecodeDifferent::Decoded(vec![ - FunctionMetadata { - name: DecodeDifferent::Decoded("foo".to_string()), - arguments: DecodeDifferent::Decoded(vec![ - FunctionArgumentMetadata { - name: DecodeDifferent::Decoded("_foo".to_string()), - ty: DecodeDifferent::Decoded("Compact".to_string()), + ], + error: Some(PalletErrorMetadata { ty: meta_type::>() }), + }, + PalletMetadata { + index: 1, + name: "Example", + storage: Some(PalletStorageMetadata { + prefix: "Example", + entries: vec![ + StorageEntryMetadata { + name: "ValueWhereClause", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(meta_type::()), + default: vec![0], + docs: vec![], }, - FunctionArgumentMetadata { - name: DecodeDifferent::Decoded("_bar".to_string()), - ty: DecodeDifferent::Decoded("u32".to_string()), + StorageEntryMetadata { + name: "Value", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(meta_type::()), + default: vec![0], + docs: vec![], }, - ]), - documentation: DecodeDifferent::Decoded(vec![ - " Doc comment put in metadata".to_string() - ]), - }, - FunctionMetadata { - name: DecodeDifferent::Decoded("foo_transactional".to_string()), - arguments: DecodeDifferent::Decoded(vec![FunctionArgumentMetadata { - name: DecodeDifferent::Decoded("foo".to_string()), - ty: DecodeDifferent::Decoded("Compact".to_string()), - }]), - documentation: DecodeDifferent::Decoded(vec![ - " Doc comment put in metadata".to_string() - ]), - }, - FunctionMetadata { - name: DecodeDifferent::Decoded("foo_no_post_info".to_string()), - arguments: DecodeDifferent::Decoded(vec![]), - documentation: DecodeDifferent::Decoded(vec![]), - }, - ])), - event: Some(DecodeDifferent::Decoded(vec![ - EventMetadata { - name: DecodeDifferent::Decoded("Proposed".to_string()), - arguments: DecodeDifferent::Decoded(vec![ - "::AccountId".to_string() - ]), - documentation: DecodeDifferent::Decoded(vec![ - " doc comment put in metadata".to_string() - ]), - }, - EventMetadata { - name: DecodeDifferent::Decoded("Spending".to_string()), - arguments: DecodeDifferent::Decoded(vec!["Balance".to_string()]), - documentation: DecodeDifferent::Decoded(vec![" doc".to_string()]), - }, - EventMetadata { - name: DecodeDifferent::Decoded("Something".to_string()), - arguments: DecodeDifferent::Decoded(vec!["Other".to_string()]), - documentation: DecodeDifferent::Decoded(vec![]), - }, - EventMetadata { - name: DecodeDifferent::Decoded("SomethingElse".to_string()), - arguments: DecodeDifferent::Decoded(vec![ - "::_1".to_string() - ]), - documentation: DecodeDifferent::Decoded(vec![]), - }, - ])), - constants: DecodeDifferent::Decoded(vec![ - ModuleConstantMetadata { - name: DecodeDifferent::Decoded("MyGetParam".to_string()), - ty: DecodeDifferent::Decoded("u32".to_string()), - value: DecodeDifferent::Decoded(vec![10, 0, 0, 0]), - documentation: DecodeDifferent::Decoded(vec![ - " Some comment".to_string(), - " Some comment".to_string(), - ]), - }, - ModuleConstantMetadata { - name: DecodeDifferent::Decoded("MyGetParam2".to_string()), - ty: DecodeDifferent::Decoded("u32".to_string()), - value: DecodeDifferent::Decoded(vec![11, 0, 0, 0]), - documentation: DecodeDifferent::Decoded(vec![ - " Some comment".to_string(), - " Some comment".to_string(), - ]), - }, - ModuleConstantMetadata { - name: DecodeDifferent::Decoded("MyGetParam3".to_string()), - ty: DecodeDifferent::Decoded("::_1".to_string()), - value: DecodeDifferent::Decoded(vec![12, 0, 0, 0, 0, 0, 0, 0]), - documentation: DecodeDifferent::Decoded(vec![]), - }, - ModuleConstantMetadata { - name: DecodeDifferent::Decoded("some_extra".to_string()), - ty: DecodeDifferent::Decoded("T::AccountId".to_string()), - value: DecodeDifferent::Decoded(vec![100, 0, 0, 0, 0, 0, 0, 0]), - documentation: DecodeDifferent::Decoded(vec![ - " Some doc".to_string(), - " Some doc".to_string(), - ]), - }, - ModuleConstantMetadata { - name: DecodeDifferent::Decoded("some_extra_extra".to_string()), - ty: DecodeDifferent::Decoded("T::AccountId".to_string()), - value: DecodeDifferent::Decoded(vec![0, 0, 0, 0, 0, 0, 0, 0]), - documentation: DecodeDifferent::Decoded(vec![" Some doc".to_string()]), - }, - ]), - errors: DecodeDifferent::Decoded(vec![ErrorMetadata { - name: DecodeDifferent::Decoded("InsufficientProposersBalance".to_string()), - documentation: DecodeDifferent::Decoded(vec![ - " doc comment put into metadata".to_string() - ]), - }]), + StorageEntryMetadata { + name: "Value2", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(meta_type::()), + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "Map", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Map { + key: meta_type::(), + value: meta_type::(), + hashers: vec![StorageHasher::Blake2_128Concat], + }, + default: vec![4, 0], + docs: vec![], + }, + StorageEntryMetadata { + name: "Map2", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + key: meta_type::(), + value: meta_type::(), + hashers: vec![StorageHasher::Twox64Concat], + }, + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "DoubleMap", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + value: meta_type::(), + key: meta_type::<(u8, u16)>(), + hashers: vec![ + StorageHasher::Blake2_128Concat, + StorageHasher::Twox64Concat, + ], + }, + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "DoubleMap2", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + value: meta_type::(), + key: meta_type::<(u16, u32)>(), + hashers: vec![ + StorageHasher::Twox64Concat, + StorageHasher::Blake2_128Concat, + ], + }, + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "NMap", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + key: meta_type::(), + hashers: vec![StorageHasher::Blake2_128Concat], + value: meta_type::(), + }, + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "NMap2", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + key: meta_type::<(u16, u32)>(), + hashers: vec![ + StorageHasher::Twox64Concat, + StorageHasher::Blake2_128Concat, + ], + value: meta_type::(), + }, + default: vec![0], + docs: vec![], + }, + #[cfg(feature = "conditional-storage")] + StorageEntryMetadata { + name: "ConditionalValue", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(meta_type::()), + default: vec![0], + docs: vec![], + }, + #[cfg(feature = "conditional-storage")] + StorageEntryMetadata { + name: "ConditionalMap", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + key: meta_type::(), + value: meta_type::(), + hashers: vec![StorageHasher::Twox64Concat], + }, + default: vec![0], + docs: vec![], + }, + #[cfg(feature = "conditional-storage")] + StorageEntryMetadata { + name: "ConditionalDoubleMap", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + value: meta_type::(), + key: meta_type::<(u8, u16)>(), + hashers: vec![ + StorageHasher::Blake2_128Concat, + StorageHasher::Twox64Concat, + ], + }, + default: vec![0], + docs: vec![], + }, + #[cfg(feature = "conditional-storage")] + StorageEntryMetadata { + name: "ConditionalNMap", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + key: meta_type::<(u8, u16)>(), + hashers: vec![ + StorageHasher::Blake2_128Concat, + StorageHasher::Twox64Concat, + ], + value: meta_type::(), + }, + default: vec![0], + docs: vec![], + }, + ], + }), + calls: Some(meta_type::>().into()), + event: Some(meta_type::>().into()), + constants: vec![ + PalletConstantMetadata { + name: "MyGetParam", + ty: meta_type::(), + value: vec![10, 0, 0, 0], + docs: vec![" Some comment", " Some comment"], + }, + PalletConstantMetadata { + name: "MyGetParam2", + ty: meta_type::(), + value: vec![11, 0, 0, 0], + docs: vec![" Some comment", " Some comment"], + }, + PalletConstantMetadata { + name: "MyGetParam3", + ty: meta_type::(), + value: vec![12, 0, 0, 0, 0, 0, 0, 0], + docs: vec![], + }, + PalletConstantMetadata { + name: "some_extra", + ty: meta_type::(), + value: vec![100, 0, 0, 0, 0, 0, 0, 0], + docs: vec![" Some doc", " Some doc"], + }, + PalletConstantMetadata { + name: "some_extra_extra", + ty: meta_type::(), + value: vec![0, 0, 0, 0, 0, 0, 0, 0], + docs: vec![" Some doc"], + }, + ], + error: Some(PalletErrorMetadata { ty: meta_type::>() }), + }, + PalletMetadata { + index: 2, + name: "Example2", + storage: Some(PalletStorageMetadata { + prefix: "Example2", + entries: vec![StorageEntryMetadata { + name: "SomeValue", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(meta_type::>()), + default: vec![0], + docs: vec![], + }], + }), + calls: Some(meta_type::>().into()), + event: Some(PalletEventMetadata { ty: meta_type::() }), + constants: vec![], + error: None, + }, + ]; + + let extrinsic = ExtrinsicMetadata { + ty: meta_type::(), + version: 4, + signed_extensions: vec![SignedExtensionMetadata { + identifier: "UnitSignedExtension", + ty: meta_type::<()>(), + additional_signed: meta_type::<()>(), + }], }; - let metadata = match Runtime::metadata().1 { - RuntimeMetadata::V13(metadata) => metadata, - _ => panic!("metadata has been bump, test needs to be updated"), + let expected_metadata: RuntimeMetadataPrefixed = + RuntimeMetadataLastVersion::new(pallets, extrinsic, meta_type::()).into(); + let expected_metadata = match expected_metadata.1 { + RuntimeMetadata::V14(metadata) => metadata, + _ => panic!("metadata has been bumped, test needs to be updated"), }; - let modules_metadata = match metadata.modules { - DecodeDifferent::Encode(modules_metadata) => modules_metadata, - _ => unreachable!(), + let actual_metadata = match Runtime::metadata().1 { + RuntimeMetadata::V14(metadata) => metadata, + _ => panic!("metadata has been bumped, test needs to be updated"), }; - let pallet_metadata = ModuleMetadata::decode(&mut &modules_metadata[1].encode()[..]).unwrap(); - - pretty_assertions::assert_eq!(pallet_metadata, expected_pallet_metadata); + pretty_assertions::assert_eq!(actual_metadata.pallets[1], expected_metadata.pallets[1]); } #[test] diff --git a/frame/support/test/tests/pallet_compatibility.rs b/frame/support/test/tests/pallet_compatibility.rs index 9814fcb392b50..4523063252ab9 100644 --- a/frame/support/test/tests/pallet_compatibility.rs +++ b/frame/support/test/tests/pallet_compatibility.rs @@ -99,7 +99,7 @@ mod pallet_old { #[frame_support::pallet] pub mod pallet { use super::SomeAssociation; - use frame_support::pallet_prelude::*; + use frame_support::{pallet_prelude::*, scale_info}; use frame_system::{ensure_root, pallet_prelude::*}; #[pallet::config] @@ -110,7 +110,8 @@ pub mod pallet { + Into + Default + MaybeSerializeDeserialize - + SomeAssociation; + + SomeAssociation + + scale_info::StaticTypeInfo; #[pallet::constant] type SomeConst: Get; type Event: From> + IsType<::Event>; @@ -155,7 +156,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(fn deposit_event)] - #[pallet::metadata(T::Balance = "Balance")] pub enum Event { /// Dummy event, just here so there's a generic type that's used. Dummy(T::Balance), @@ -279,22 +279,71 @@ frame_support::construct_runtime!( mod test { use super::{pallet, pallet_old, Runtime}; use codec::{Decode, Encode}; + use scale_info::{form::PortableForm, Variant}; #[test] fn metadata() { let metadata = Runtime::metadata(); - let modules = match metadata.1 { - frame_metadata::RuntimeMetadata::V13(frame_metadata::RuntimeMetadataV13 { - modules: frame_metadata::DecodeDifferent::Encode(m), - .. - }) => m, + let (pallets, types) = match metadata.1 { + frame_support::metadata::RuntimeMetadata::V14(metadata) => + (metadata.pallets, metadata.types), _ => unreachable!(), }; - pretty_assertions::assert_eq!(modules[1].storage, modules[2].storage); - pretty_assertions::assert_eq!(modules[1].calls, modules[2].calls); - pretty_assertions::assert_eq!(modules[1].event, modules[2].event); - pretty_assertions::assert_eq!(modules[1].constants, modules[2].constants); - pretty_assertions::assert_eq!(modules[1].errors, modules[2].errors); + + let assert_meta_types = |ty_id1, ty_id2| { + let ty1 = types.resolve(ty_id1).map(|ty| ty.type_def()); + let ty2 = types.resolve(ty_id2).map(|ty| ty.type_def()); + pretty_assertions::assert_eq!(ty1, ty2); + }; + + let get_enum_variants = |ty_id| match types.resolve(ty_id).map(|ty| ty.type_def()) { + Some(ty) => match ty { + scale_info::TypeDef::Variant(var) => var.variants(), + _ => panic!("Expected variant type"), + }, + _ => panic!("No type found"), + }; + + let assert_enum_variants = |vs1: &[Variant], + vs2: &[Variant]| { + assert_eq!(vs1.len(), vs2.len()); + for i in 0..vs1.len() { + let v1 = &vs2[i]; + let v2 = &vs2[i]; + assert_eq!(v1.fields().len(), v2.fields().len()); + for f in 0..v1.fields().len() { + let f1 = &v1.fields()[f]; + let f2 = &v2.fields()[f]; + pretty_assertions::assert_eq!(f1.name(), f2.name()); + pretty_assertions::assert_eq!(f1.ty(), f2.ty()); + } + } + }; + + pretty_assertions::assert_eq!(pallets[1].storage, pallets[2].storage); + + let calls1 = pallets[1].calls.as_ref().unwrap(); + let calls2 = pallets[2].calls.as_ref().unwrap(); + assert_meta_types(calls1.ty.id(), calls2.ty.id()); + + // event: check variants and fields but ignore the type name which will be different + let event1_variants = get_enum_variants(pallets[1].event.as_ref().unwrap().ty.id()); + let event2_variants = get_enum_variants(pallets[2].event.as_ref().unwrap().ty.id()); + assert_enum_variants(event1_variants, event2_variants); + + let err1 = get_enum_variants(pallets[1].error.as_ref().unwrap().ty.id()) + .iter() + .filter(|v| v.name() == "__Ignore") + .cloned() + .collect::>(); + let err2 = get_enum_variants(pallets[2].error.as_ref().unwrap().ty.id()) + .iter() + .filter(|v| v.name() == "__Ignore") + .cloned() + .collect::>(); + assert_enum_variants(&err1, &err2); + + pretty_assertions::assert_eq!(pallets[1].constants, pallets[2].constants); } #[test] @@ -309,10 +358,10 @@ mod test { assert_eq!( pallet_old::Call::::decode( - &mut &pallet::Call::::set_dummy(10).encode()[..] + &mut &pallet::Call::::set_dummy { new_value: 10 }.encode()[..] ) .unwrap(), - pallet_old::Call::::set_dummy(10), + pallet_old::Call::::set_dummy { new_value: 10 }, ); } } diff --git a/frame/support/test/tests/pallet_compatibility_instance.rs b/frame/support/test/tests/pallet_compatibility_instance.rs index b8d43b5e32bfb..768b9f28d35f3 100644 --- a/frame/support/test/tests/pallet_compatibility_instance.rs +++ b/frame/support/test/tests/pallet_compatibility_instance.rs @@ -85,7 +85,7 @@ mod pallet_old { #[frame_support::pallet] pub mod pallet { - use frame_support::pallet_prelude::*; + use frame_support::{pallet_prelude::*, scale_info}; use frame_system::{ensure_root, pallet_prelude::*}; #[pallet::config] @@ -95,7 +95,8 @@ pub mod pallet { + From + Into + Default - + MaybeSerializeDeserialize; + + MaybeSerializeDeserialize + + scale_info::StaticTypeInfo; #[pallet::constant] type SomeConst: Get; type Event: From> + IsType<::Event>; @@ -140,7 +141,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(fn deposit_event)] - #[pallet::metadata(T::Balance = "Balance")] pub enum Event, I: 'static = ()> { /// Dummy event, just here so there's a generic type that's used. Dummy(T::Balance), @@ -282,23 +282,66 @@ frame_support::construct_runtime!( mod test { use super::{pallet, pallet_old, Runtime}; use codec::{Decode, Encode}; + use scale_info::{form::PortableForm, Variant}; #[test] fn metadata() { let metadata = Runtime::metadata(); - let modules = match metadata.1 { - frame_metadata::RuntimeMetadata::V13(frame_metadata::RuntimeMetadataV13 { - modules: frame_metadata::DecodeDifferent::Encode(m), - .. - }) => m, + let (pallets, types) = match metadata.1 { + frame_support::metadata::RuntimeMetadata::V14(metadata) => + (metadata.pallets, metadata.types), _ => unreachable!(), }; + + let get_enum_variants = |ty_id| match types.resolve(ty_id).map(|ty| ty.type_def()) { + Some(ty) => match ty { + scale_info::TypeDef::Variant(var) => var.variants(), + _ => panic!("Expected variant type"), + }, + _ => panic!("No type found"), + }; + + let assert_enum_variants = |vs1: &[Variant], + vs2: &[Variant]| { + assert_eq!(vs1.len(), vs2.len()); + for i in 0..vs1.len() { + let v1 = &vs2[i]; + let v2 = &vs2[i]; + assert_eq!(v1.fields().len(), v2.fields().len()); + for f in 0..v1.fields().len() { + let f1 = &v1.fields()[f]; + let f2 = &v2.fields()[f]; + pretty_assertions::assert_eq!(f1.name(), f2.name()); + pretty_assertions::assert_eq!(f1.ty(), f2.ty()); + } + } + }; + for i in vec![1, 3, 5].into_iter() { - pretty_assertions::assert_eq!(modules[i].storage, modules[i + 1].storage); - pretty_assertions::assert_eq!(modules[i].calls, modules[i + 1].calls); - pretty_assertions::assert_eq!(modules[i].event, modules[i + 1].event); - pretty_assertions::assert_eq!(modules[i].constants, modules[i + 1].constants); - pretty_assertions::assert_eq!(modules[i].errors, modules[i + 1].errors); + pretty_assertions::assert_eq!(pallets[i].storage, pallets[i + 1].storage); + + let call1_variants = get_enum_variants(pallets[i].calls.as_ref().unwrap().ty.id()); + let call2_variants = get_enum_variants(pallets[i + 1].calls.as_ref().unwrap().ty.id()); + assert_enum_variants(call1_variants, call2_variants); + + // event: check variants and fields but ignore the type name which will be different + let event1_variants = get_enum_variants(pallets[i].event.as_ref().unwrap().ty.id()); + let event2_variants = get_enum_variants(pallets[i + 1].event.as_ref().unwrap().ty.id()); + assert_enum_variants(event1_variants, event2_variants); + + let err1 = get_enum_variants(pallets[i].error.as_ref().unwrap().ty.id()) + .iter() + .filter(|v| v.name() == "__Ignore") + .cloned() + .collect::>(); + let err2 = get_enum_variants(pallets[i + 1].error.as_ref().unwrap().ty.id()) + .iter() + .filter(|v| v.name() == "__Ignore") + .cloned() + .collect::>(); + assert_enum_variants(&err1, &err2); + + pretty_assertions::assert_eq!(pallets[i].constants, pallets[i + 1].constants); } } @@ -314,10 +357,10 @@ mod test { assert_eq!( pallet_old::Call::::decode( - &mut &pallet::Call::::set_dummy(10).encode()[..] + &mut &pallet::Call::::set_dummy { new_value: 10 }.encode()[..] ) .unwrap(), - pallet_old::Call::::set_dummy(10), + pallet_old::Call::::set_dummy { new_value: 10 }, ); } } diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index adfbc7a64f0ed..34586e8414216 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -29,7 +29,7 @@ use sp_runtime::DispatchError; #[frame_support::pallet] pub mod pallet { - use frame_support::pallet_prelude::*; + use frame_support::{pallet_prelude::*, scale_info}; use frame_system::pallet_prelude::*; use sp_std::any::TypeId; @@ -39,7 +39,7 @@ pub mod pallet { pub trait Config: frame_system::Config { #[pallet::constant] type MyGetParam: Get; - type Balance: Parameter + Default; + type Balance: Parameter + Default + scale_info::StaticTypeInfo; type Event: From> + IsType<::Event>; } @@ -109,7 +109,6 @@ pub mod pallet { } #[pallet::event] - #[pallet::metadata(BalanceOf = "Balance", u32 = "Other")] #[pallet::generate_deposit(fn deposit_event)] pub enum Event, I: 'static = ()> { /// doc comment put in metadata @@ -157,7 +156,16 @@ pub mod pallet { } #[pallet::origin] - #[derive(EqNoBound, RuntimeDebugNoBound, CloneNoBound, PartialEqNoBound, Encode, Decode)] + #[derive( + EqNoBound, + RuntimeDebugNoBound, + CloneNoBound, + PartialEqNoBound, + Encode, + Decode, + scale_info::TypeInfo, + )] + #[scale_info(skip_type_params(T, I))] pub struct Origin(PhantomData<(T, I)>); #[pallet::validate_unsigned] @@ -306,7 +314,7 @@ frame_support::construct_runtime!( #[test] fn call_expand() { - let call_foo = pallet::Call::::foo(3); + let call_foo = pallet::Call::::foo { foo: 3 }; assert_eq!( call_foo.get_dispatch_info(), DispatchInfo { weight: 3, class: DispatchClass::Normal, pays_fee: Pays::Yes } @@ -314,7 +322,7 @@ fn call_expand() { assert_eq!(call_foo.get_call_name(), "foo"); assert_eq!(pallet::Call::::get_call_names(), &["foo", "foo_transactional"]); - let call_foo = pallet::Call::::foo(3); + let call_foo = pallet::Call::::foo { foo: 3 }; assert_eq!( call_foo.get_dispatch_info(), DispatchInfo { weight: 3, class: DispatchClass::Normal, pays_fee: Pays::Yes } @@ -369,7 +377,9 @@ fn instance_expand() { fn pallet_expand_deposit_event() { TestExternalities::default().execute_with(|| { frame_system::Pallet::::set_block_number(1); - pallet::Call::::foo(3).dispatch_bypass_filter(None.into()).unwrap(); + pallet::Call::::foo { foo: 3 } + .dispatch_bypass_filter(None.into()) + .unwrap(); assert_eq!( frame_system::Pallet::::events()[0].event, Event::Example(pallet::Event::Something(3)), @@ -378,7 +388,7 @@ fn pallet_expand_deposit_event() { TestExternalities::default().execute_with(|| { frame_system::Pallet::::set_block_number(1); - pallet::Call::::foo(3) + pallet::Call::::foo { foo: 3 } .dispatch_bypass_filter(None.into()) .unwrap(); assert_eq!( @@ -544,183 +554,207 @@ fn pallet_on_genesis() { #[test] fn metadata() { - use codec::{Decode, Encode}; - use frame_metadata::*; + use frame_support::metadata::*; + + let system_pallet_metadata = PalletMetadata { + index: 0, + name: "System", + storage: None, + calls: Some(scale_info::meta_type::>().into()), + event: Some(PalletEventMetadata { + ty: scale_info::meta_type::>(), + }), + constants: vec![ + PalletConstantMetadata { + name: "BlockWeights", + ty: scale_info::meta_type::(), + value: vec![], + docs: vec![], + }, + PalletConstantMetadata { + name: "BlockLength", + ty: scale_info::meta_type::(), + value: vec![], + docs: vec![], + }, + PalletConstantMetadata { + name: "BlockHashCount", + ty: scale_info::meta_type::(), + value: vec![], + docs: vec![], + }, + PalletConstantMetadata { + name: "DbWeight", + ty: scale_info::meta_type::(), + value: vec![], + docs: vec![], + }, + PalletConstantMetadata { + name: "Version", + ty: scale_info::meta_type::(), + value: vec![], + docs: vec![], + }, + PalletConstantMetadata { + name: "SS58Prefix", + ty: scale_info::meta_type::(), + value: vec![], + docs: vec![], + }, + ], + error: Some(PalletErrorMetadata { + ty: scale_info::meta_type::>(), + }), + }; - let expected_pallet_metadata = ModuleMetadata { + let example_pallet_metadata = PalletMetadata { index: 1, - name: DecodeDifferent::Decoded("Example".to_string()), - storage: Some(DecodeDifferent::Decoded(StorageMetadata { - prefix: DecodeDifferent::Decoded("Example".to_string()), - entries: DecodeDifferent::Decoded(vec![ + name: "Example", + storage: Some(PalletStorageMetadata { + prefix: "Example", + entries: vec![ StorageEntryMetadata { - name: DecodeDifferent::Decoded("Value".to_string()), + name: "Value", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(DecodeDifferent::Decoded("u32".to_string())), - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![0], + docs: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Decoded("Map".to_string()), + name: "Map", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { - key: DecodeDifferent::Decoded("u8".to_string()), - value: DecodeDifferent::Decoded("u16".to_string()), - hasher: StorageHasher::Blake2_128Concat, - unused: false, + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), + hashers: vec![StorageHasher::Blake2_128Concat], }, - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), + default: vec![0], + docs: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Decoded("Map2".to_string()), + name: "Map2", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { - key: DecodeDifferent::Decoded("u16".to_string()), - value: DecodeDifferent::Decoded("u32".to_string()), - hasher: StorageHasher::Twox64Concat, - unused: false, + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), + hashers: vec![StorageHasher::Twox64Concat], }, - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), + default: vec![0], + docs: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Decoded("DoubleMap".to_string()), + name: "DoubleMap", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::DoubleMap { - value: DecodeDifferent::Decoded("u32".to_string()), - key1: DecodeDifferent::Decoded("u8".to_string()), - key2: DecodeDifferent::Decoded("u16".to_string()), - hasher: StorageHasher::Blake2_128Concat, - key2_hasher: StorageHasher::Twox64Concat, + ty: StorageEntryType::Map { + value: scale_info::meta_type::(), + key: scale_info::meta_type::<(u8, u16)>(), + hashers: vec![StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat], }, - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), + default: vec![0], + docs: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Decoded("DoubleMap2".to_string()), + name: "DoubleMap2", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::DoubleMap { - value: DecodeDifferent::Decoded("u64".to_string()), - key1: DecodeDifferent::Decoded("u16".to_string()), - key2: DecodeDifferent::Decoded("u32".to_string()), - hasher: StorageHasher::Twox64Concat, - key2_hasher: StorageHasher::Blake2_128Concat, + ty: StorageEntryType::Map { + value: scale_info::meta_type::(), + key: scale_info::meta_type::<(u16, u32)>(), + hashers: vec![StorageHasher::Twox64Concat, StorageHasher::Blake2_128Concat], }, - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), + default: vec![0], + docs: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Decoded("NMap".to_string()), + name: "NMap", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::NMap { - keys: DecodeDifferent::Decoded(vec!["u8".to_string()]), - hashers: DecodeDifferent::Decoded(vec![StorageHasher::Blake2_128Concat]), - value: DecodeDifferent::Decoded("u32".to_string()), + ty: StorageEntryType::Map { + key: scale_info::meta_type::(), + hashers: vec![StorageHasher::Blake2_128Concat], + value: scale_info::meta_type::(), }, - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), + default: vec![0], + docs: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Decoded("NMap2".to_string()), + name: "NMap2", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::NMap { - keys: DecodeDifferent::Decoded(vec!["u16".to_string(), "u32".to_string()]), - hashers: DecodeDifferent::Decoded(vec![ - StorageHasher::Twox64Concat, - StorageHasher::Blake2_128Concat, - ]), - value: DecodeDifferent::Decoded("u64".to_string()), + ty: StorageEntryType::Map { + key: scale_info::meta_type::<(u16, u32)>(), + hashers: vec![StorageHasher::Twox64Concat, StorageHasher::Blake2_128Concat], + value: scale_info::meta_type::(), }, - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), + default: vec![0], + docs: vec![], }, - ]), - })), - calls: Some(DecodeDifferent::Decoded(vec![ - FunctionMetadata { - name: DecodeDifferent::Decoded("foo".to_string()), - arguments: DecodeDifferent::Decoded(vec![FunctionArgumentMetadata { - name: DecodeDifferent::Decoded("_foo".to_string()), - ty: DecodeDifferent::Decoded("Compact".to_string()), - }]), - documentation: DecodeDifferent::Decoded(vec![ - " Doc comment put in metadata".to_string() - ]), - }, - FunctionMetadata { - name: DecodeDifferent::Decoded("foo_transactional".to_string()), - arguments: DecodeDifferent::Decoded(vec![FunctionArgumentMetadata { - name: DecodeDifferent::Decoded("_foo".to_string()), - ty: DecodeDifferent::Decoded("Compact".to_string()), - }]), - documentation: DecodeDifferent::Decoded(vec![ - " Doc comment put in metadata".to_string() - ]), - }, - ])), - event: Some(DecodeDifferent::Decoded(vec![ - EventMetadata { - name: DecodeDifferent::Decoded("Proposed".to_string()), - arguments: DecodeDifferent::Decoded(vec![ - "::AccountId".to_string() - ]), - documentation: DecodeDifferent::Decoded(vec![ - " doc comment put in metadata".to_string() - ]), - }, - EventMetadata { - name: DecodeDifferent::Decoded("Spending".to_string()), - arguments: DecodeDifferent::Decoded(vec!["Balance".to_string()]), - documentation: DecodeDifferent::Decoded(vec![" doc".to_string()]), - }, - EventMetadata { - name: DecodeDifferent::Decoded("Something".to_string()), - arguments: DecodeDifferent::Decoded(vec!["Other".to_string()]), - documentation: DecodeDifferent::Decoded(vec![]), - }, - ])), - constants: DecodeDifferent::Decoded(vec![ModuleConstantMetadata { - name: DecodeDifferent::Decoded("MyGetParam".to_string()), - ty: DecodeDifferent::Decoded("u32".to_string()), - value: DecodeDifferent::Decoded(vec![10, 0, 0, 0]), - documentation: DecodeDifferent::Decoded(vec![]), - }]), - errors: DecodeDifferent::Decoded(vec![ErrorMetadata { - name: DecodeDifferent::Decoded("InsufficientProposersBalance".to_string()), - documentation: DecodeDifferent::Decoded(vec![ - " doc comment put into metadata".to_string() - ]), - }]), + ], + }), + calls: Some(scale_info::meta_type::>().into()), + event: Some(PalletEventMetadata { ty: scale_info::meta_type::>() }), + constants: vec![PalletConstantMetadata { + name: "MyGetParam", + ty: scale_info::meta_type::(), + value: vec![10, 0, 0, 0], + docs: vec![], + }], + error: Some(PalletErrorMetadata { ty: scale_info::meta_type::>() }), }; - let mut expected_pallet_instance1_metadata = expected_pallet_metadata.clone(); - expected_pallet_instance1_metadata.name = - DecodeDifferent::Decoded("Instance1Example".to_string()); - expected_pallet_instance1_metadata.index = 2; - match expected_pallet_instance1_metadata.storage { - Some(DecodeDifferent::Decoded(ref mut storage_meta)) => { - storage_meta.prefix = DecodeDifferent::Decoded("Instance1Example".to_string()); + let mut example_pallet_instance1_metadata = example_pallet_metadata.clone(); + example_pallet_instance1_metadata.name = "Instance1Example"; + example_pallet_instance1_metadata.index = 2; + match example_pallet_instance1_metadata.calls { + Some(ref mut calls_meta) => { + calls_meta.ty = scale_info::meta_type::>(); + }, + _ => unreachable!(), + } + match example_pallet_instance1_metadata.event { + Some(ref mut event_meta) => { + event_meta.ty = scale_info::meta_type::>(); + }, + _ => unreachable!(), + } + match example_pallet_instance1_metadata.error { + Some(ref mut error_meta) => { + error_meta.ty = scale_info::meta_type::>(); + }, + _ => unreachable!(), + } + match example_pallet_instance1_metadata.storage { + Some(ref mut storage_meta) => { + storage_meta.prefix = "Instance1Example"; }, _ => unreachable!(), } - let metadata = match Runtime::metadata().1 { - RuntimeMetadata::V13(metadata) => metadata, - _ => panic!("metadata has been bump, test needs to be updated"), + let pallets = + vec![system_pallet_metadata, example_pallet_metadata, example_pallet_instance1_metadata]; + + let extrinsic = ExtrinsicMetadata { + ty: scale_info::meta_type::(), + version: 4, + signed_extensions: vec![SignedExtensionMetadata { + identifier: "UnitSignedExtension", + ty: scale_info::meta_type::<()>(), + additional_signed: scale_info::meta_type::<()>(), + }], }; - let modules_metadata = match metadata.modules { - DecodeDifferent::Encode(modules_metadata) => modules_metadata, - _ => unreachable!(), + let expected_metadata: RuntimeMetadataPrefixed = + RuntimeMetadataLastVersion::new(pallets, extrinsic, scale_info::meta_type::()) + .into(); + let expected_metadata = match expected_metadata.1 { + RuntimeMetadata::V14(metadata) => metadata, + _ => panic!("metadata has been bumped, test needs to be updated"), }; - let pallet_metadata = ModuleMetadata::decode(&mut &modules_metadata[1].encode()[..]).unwrap(); - let pallet_instance1_metadata = - ModuleMetadata::decode(&mut &modules_metadata[2].encode()[..]).unwrap(); + let actual_metadata = match Runtime::metadata().1 { + RuntimeMetadata::V14(metadata) => metadata, + _ => panic!("metadata has been bumped, test needs to be updated"), + }; - pretty_assertions::assert_eq!(pallet_metadata, expected_pallet_metadata); - pretty_assertions::assert_eq!(pallet_instance1_metadata, expected_pallet_instance1_metadata); + pretty_assertions::assert_eq!(actual_metadata.pallets[1], expected_metadata.pallets[1]); + pretty_assertions::assert_eq!(actual_metadata.pallets[2], expected_metadata.pallets[2]); } #[test] diff --git a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound.rs b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound.rs index 0f58187f73ebe..ee9d692eba9b3 100644 --- a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound.rs +++ b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound.rs @@ -5,7 +5,7 @@ mod pallet { #[pallet::config] pub trait Config: frame_system::Config { - type Bar: codec::Codec; + type Bar: codec::Codec + scale_info::TypeInfo; } #[pallet::pallet] diff --git a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound.stderr b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound.stderr index d32d8ada7a11a..d1b040c16091f 100644 --- a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound.stderr +++ b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound.stderr @@ -1,26 +1,26 @@ error[E0277]: `::Bar` doesn't implement `std::fmt::Debug` - --> $DIR/call_argument_invalid_bound.rs:20:41 + --> $DIR/call_argument_invalid_bound.rs:20:36 | 20 | pub fn foo(origin: OriginFor, bar: T::Bar) -> DispatchResultWithPostInfo { - | ^ `::Bar` cannot be formatted using `{:?}` because it doesn't implement `std::fmt::Debug` + | ^^^ `::Bar` cannot be formatted using `{:?}` because it doesn't implement `std::fmt::Debug` | = help: the trait `std::fmt::Debug` is not implemented for `::Bar` = note: required because of the requirements on the impl of `std::fmt::Debug` for `&::Bar` = note: required for the cast to the object type `dyn std::fmt::Debug` error[E0277]: the trait bound `::Bar: Clone` is not satisfied - --> $DIR/call_argument_invalid_bound.rs:20:41 + --> $DIR/call_argument_invalid_bound.rs:20:36 | 20 | pub fn foo(origin: OriginFor, bar: T::Bar) -> DispatchResultWithPostInfo { - | ^ the trait `Clone` is not implemented for `::Bar` + | ^^^ the trait `Clone` is not implemented for `::Bar` | = note: required by `clone` error[E0369]: binary operation `==` cannot be applied to type `&::Bar` - --> $DIR/call_argument_invalid_bound.rs:20:41 + --> $DIR/call_argument_invalid_bound.rs:20:36 | 20 | pub fn foo(origin: OriginFor, bar: T::Bar) -> DispatchResultWithPostInfo { - | ^ + | ^^^ | help: consider further restricting this bound | diff --git a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_2.rs b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_2.rs index da87046822eb7..d981b55c48620 100644 --- a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_2.rs +++ b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_2.rs @@ -5,7 +5,7 @@ mod pallet { #[pallet::config] pub trait Config: frame_system::Config { - type Bar; + type Bar: scale_info::TypeInfo; } #[pallet::pallet] diff --git a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_2.stderr b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_2.stderr index 0e1ebbf525257..84d4863672957 100644 --- a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_2.stderr +++ b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_2.stderr @@ -1,26 +1,26 @@ error[E0277]: `::Bar` doesn't implement `std::fmt::Debug` - --> $DIR/call_argument_invalid_bound_2.rs:20:41 + --> $DIR/call_argument_invalid_bound_2.rs:20:36 | 20 | pub fn foo(origin: OriginFor, bar: T::Bar) -> DispatchResultWithPostInfo { - | ^ `::Bar` cannot be formatted using `{:?}` because it doesn't implement `std::fmt::Debug` + | ^^^ `::Bar` cannot be formatted using `{:?}` because it doesn't implement `std::fmt::Debug` | = help: the trait `std::fmt::Debug` is not implemented for `::Bar` = note: required because of the requirements on the impl of `std::fmt::Debug` for `&::Bar` = note: required for the cast to the object type `dyn std::fmt::Debug` error[E0277]: the trait bound `::Bar: Clone` is not satisfied - --> $DIR/call_argument_invalid_bound_2.rs:20:41 + --> $DIR/call_argument_invalid_bound_2.rs:20:36 | 20 | pub fn foo(origin: OriginFor, bar: T::Bar) -> DispatchResultWithPostInfo { - | ^ the trait `Clone` is not implemented for `::Bar` + | ^^^ the trait `Clone` is not implemented for `::Bar` | = note: required by `clone` error[E0369]: binary operation `==` cannot be applied to type `&::Bar` - --> $DIR/call_argument_invalid_bound_2.rs:20:41 + --> $DIR/call_argument_invalid_bound_2.rs:20:36 | 20 | pub fn foo(origin: OriginFor, bar: T::Bar) -> DispatchResultWithPostInfo { - | ^ + | ^^^ | help: consider further restricting this bound | @@ -28,23 +28,23 @@ help: consider further restricting this bound | ^^^^^^^^^^^^^^^^^^^^^ error[E0277]: the trait bound `::Bar: WrapperTypeEncode` is not satisfied - --> $DIR/call_argument_invalid_bound_2.rs:20:41 + --> $DIR/call_argument_invalid_bound_2.rs:20:36 | 20 | pub fn foo(origin: OriginFor, bar: T::Bar) -> DispatchResultWithPostInfo { - | ^ the trait `WrapperTypeEncode` is not implemented for `::Bar` + | ^^^ the trait `WrapperTypeEncode` is not implemented for `::Bar` | ::: $CARGO/parity-scale-codec-2.2.0/src/codec.rs | | fn encode_to(&self, dest: &mut T) { | ------ required by this bound in `encode_to` | - = note: required because of the requirements on the impl of `pallet::_::_parity_scale_codec::Encode` for `::Bar` + = note: required because of the requirements on the impl of `Encode` for `::Bar` error[E0277]: the trait bound `::Bar: WrapperTypeDecode` is not satisfied - --> $DIR/call_argument_invalid_bound_2.rs:20:41 + --> $DIR/call_argument_invalid_bound_2.rs:20:36 | 20 | pub fn foo(origin: OriginFor, bar: T::Bar) -> DispatchResultWithPostInfo { - | ^ the trait `WrapperTypeDecode` is not implemented for `::Bar` + | ^^^ the trait `WrapperTypeDecode` is not implemented for `::Bar` | ::: $CARGO/parity-scale-codec-2.2.0/src/codec.rs | diff --git a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_3.rs b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_3.rs index 4a6a781ff44a7..e7f99d7ca4f2d 100644 --- a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_3.rs +++ b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_3.rs @@ -13,7 +13,7 @@ mod pallet { #[pallet::hooks] impl Hooks> for Pallet {} - #[derive(Encode, Decode)] + #[derive(Encode, Decode, scale_info::TypeInfo)] struct Bar; #[pallet::call] diff --git a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_3.stderr b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_3.stderr index b6f4494033f7b..73513907e85f3 100644 --- a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_3.stderr +++ b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_3.stderr @@ -1,8 +1,8 @@ error[E0277]: `Bar` doesn't implement `std::fmt::Debug` - --> $DIR/call_argument_invalid_bound_3.rs:22:41 + --> $DIR/call_argument_invalid_bound_3.rs:22:36 | 22 | pub fn foo(origin: OriginFor, bar: Bar) -> DispatchResultWithPostInfo { - | ^^^ `Bar` cannot be formatted using `{:?}` + | ^^^ `Bar` cannot be formatted using `{:?}` | = help: the trait `std::fmt::Debug` is not implemented for `Bar` = note: add `#[derive(Debug)]` or manually implement `std::fmt::Debug` @@ -10,17 +10,17 @@ error[E0277]: `Bar` doesn't implement `std::fmt::Debug` = note: required for the cast to the object type `dyn std::fmt::Debug` error[E0277]: the trait bound `Bar: Clone` is not satisfied - --> $DIR/call_argument_invalid_bound_3.rs:22:41 + --> $DIR/call_argument_invalid_bound_3.rs:22:36 | 22 | pub fn foo(origin: OriginFor, bar: Bar) -> DispatchResultWithPostInfo { - | ^^^ the trait `Clone` is not implemented for `Bar` + | ^^^ the trait `Clone` is not implemented for `Bar` | = note: required by `clone` error[E0369]: binary operation `==` cannot be applied to type `&Bar` - --> $DIR/call_argument_invalid_bound_3.rs:22:41 + --> $DIR/call_argument_invalid_bound_3.rs:22:36 | 22 | pub fn foo(origin: OriginFor, bar: Bar) -> DispatchResultWithPostInfo { - | ^^^ + | ^^^ | = note: an implementation of `std::cmp::PartialEq` might be missing for `&Bar` diff --git a/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.stderr b/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.stderr index aff86e333457c..e78eb7ff9537b 100644 --- a/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.stderr +++ b/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.stderr @@ -1,3 +1,13 @@ +error[E0277]: the trait bound `Bar: TypeInfo` is not satisfied + --> $DIR/storage_ensure_span_are_ok_on_wrong_gen.rs:20:12 + | +20 | #[pallet::storage] + | ^^^^^^^ the trait `TypeInfo` is not implemented for `Bar` + | + = note: required because of the requirements on the impl of `StaticTypeInfo` for `Bar` + = note: required because of the requirements on the impl of `frame_support::storage::StorageEntryMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` + = note: required by `NAME` + error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied --> $DIR/storage_ensure_span_are_ok_on_wrong_gen.rs:20:12 | @@ -6,8 +16,8 @@ error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied | = note: required because of the requirements on the impl of `Decode` for `Bar` = note: required because of the requirements on the impl of `FullCodec` for `Bar` - = note: required because of the requirements on the impl of `StorageValueMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `frame_support::storage::types::StorageValueMetadata::NAME` + = note: required because of the requirements on the impl of `frame_support::storage::StorageEntryMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` + = note: required by `NAME` error[E0277]: the trait bound `Bar: EncodeLike` is not satisfied --> $DIR/storage_ensure_span_are_ok_on_wrong_gen.rs:20:12 @@ -17,8 +27,8 @@ error[E0277]: the trait bound `Bar: EncodeLike` is not satisfied | = note: required because of the requirements on the impl of `FullEncode` for `Bar` = note: required because of the requirements on the impl of `FullCodec` for `Bar` - = note: required because of the requirements on the impl of `StorageValueMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `frame_support::storage::types::StorageValueMetadata::NAME` + = note: required because of the requirements on the impl of `frame_support::storage::StorageEntryMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` + = note: required by `NAME` error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied --> $DIR/storage_ensure_span_are_ok_on_wrong_gen.rs:20:12 @@ -26,11 +36,11 @@ error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied 20 | #[pallet::storage] | ^^^^^^^ the trait `WrapperTypeEncode` is not implemented for `Bar` | - = note: required because of the requirements on the impl of `pallet::_::_parity_scale_codec::Encode` for `Bar` + = note: required because of the requirements on the impl of `Encode` for `Bar` = note: required because of the requirements on the impl of `FullEncode` for `Bar` = note: required because of the requirements on the impl of `FullCodec` for `Bar` - = note: required because of the requirements on the impl of `StorageValueMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `frame_support::storage::types::StorageValueMetadata::NAME` + = note: required because of the requirements on the impl of `frame_support::storage::StorageEntryMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` + = note: required by `NAME` error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied --> $DIR/storage_ensure_span_are_ok_on_wrong_gen.rs:9:12 @@ -60,7 +70,7 @@ error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied 9 | #[pallet::pallet] | ^^^^^^ the trait `WrapperTypeEncode` is not implemented for `Bar` | - = note: required because of the requirements on the impl of `pallet::_::_parity_scale_codec::Encode` for `Bar` + = note: required because of the requirements on the impl of `Encode` for `Bar` = note: required because of the requirements on the impl of `FullEncode` for `Bar` = note: required because of the requirements on the impl of `FullCodec` for `Bar` = note: required because of the requirements on the impl of `PartialStorageInfoTrait` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` diff --git a/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.stderr b/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.stderr index 2f4876554aa54..d9a7ddbf3443e 100644 --- a/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.stderr +++ b/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.stderr @@ -1,3 +1,13 @@ +error[E0277]: the trait bound `Bar: TypeInfo` is not satisfied + --> $DIR/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:20:12 + | +20 | #[pallet::storage] + | ^^^^^^^ the trait `TypeInfo` is not implemented for `Bar` + | + = note: required because of the requirements on the impl of `StaticTypeInfo` for `Bar` + = note: required because of the requirements on the impl of `frame_support::storage::StorageEntryMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` + = note: required by `NAME` + error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied --> $DIR/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:20:12 | @@ -6,8 +16,8 @@ error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied | = note: required because of the requirements on the impl of `Decode` for `Bar` = note: required because of the requirements on the impl of `FullCodec` for `Bar` - = note: required because of the requirements on the impl of `StorageValueMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `frame_support::storage::types::StorageValueMetadata::NAME` + = note: required because of the requirements on the impl of `frame_support::storage::StorageEntryMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` + = note: required by `NAME` error[E0277]: the trait bound `Bar: EncodeLike` is not satisfied --> $DIR/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:20:12 @@ -17,8 +27,8 @@ error[E0277]: the trait bound `Bar: EncodeLike` is not satisfied | = note: required because of the requirements on the impl of `FullEncode` for `Bar` = note: required because of the requirements on the impl of `FullCodec` for `Bar` - = note: required because of the requirements on the impl of `StorageValueMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `frame_support::storage::types::StorageValueMetadata::NAME` + = note: required because of the requirements on the impl of `frame_support::storage::StorageEntryMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` + = note: required by `NAME` error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied --> $DIR/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:20:12 @@ -26,11 +36,11 @@ error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied 20 | #[pallet::storage] | ^^^^^^^ the trait `WrapperTypeEncode` is not implemented for `Bar` | - = note: required because of the requirements on the impl of `pallet::_::_parity_scale_codec::Encode` for `Bar` + = note: required because of the requirements on the impl of `Encode` for `Bar` = note: required because of the requirements on the impl of `FullEncode` for `Bar` = note: required because of the requirements on the impl of `FullCodec` for `Bar` - = note: required because of the requirements on the impl of `StorageValueMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `frame_support::storage::types::StorageValueMetadata::NAME` + = note: required because of the requirements on the impl of `frame_support::storage::StorageEntryMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` + = note: required by `NAME` error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied --> $DIR/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:9:12 @@ -60,7 +70,7 @@ error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied 9 | #[pallet::pallet] | ^^^^^^ the trait `WrapperTypeEncode` is not implemented for `Bar` | - = note: required because of the requirements on the impl of `pallet::_::_parity_scale_codec::Encode` for `Bar` + = note: required because of the requirements on the impl of `Encode` for `Bar` = note: required because of the requirements on the impl of `FullEncode` for `Bar` = note: required because of the requirements on the impl of `FullCodec` for `Bar` = note: required because of the requirements on the impl of `PartialStorageInfoTrait` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` diff --git a/frame/support/test/tests/pallet_ui/storage_info_unsatisfied.rs b/frame/support/test/tests/pallet_ui/storage_info_unsatisfied.rs index 569e59ef6ec27..76e3566100640 100644 --- a/frame/support/test/tests/pallet_ui/storage_info_unsatisfied.rs +++ b/frame/support/test/tests/pallet_ui/storage_info_unsatisfied.rs @@ -16,7 +16,7 @@ mod pallet { #[pallet::call] impl Pallet {} - #[derive(codec::Encode, codec::Decode)] + #[derive(codec::Encode, codec::Decode, scale_info::TypeInfo)] struct Bar; #[pallet::storage] diff --git a/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.rs b/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.rs index 3d03099c3c4b6..c5d773d716116 100644 --- a/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.rs +++ b/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.rs @@ -16,7 +16,7 @@ mod pallet { #[pallet::call] impl Pallet {} - #[derive(codec::Encode, codec::Decode)] + #[derive(codec::Encode, codec::Decode, scale_info::TypeInfo)] struct Bar; #[pallet::storage] diff --git a/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr b/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr index 6c92423c6a7fe..545520124bfee 100644 --- a/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr +++ b/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr @@ -4,6 +4,6 @@ error[E0277]: the trait bound `Bar: MaxEncodedLen` is not satisfied 10 | #[pallet::generate_storage_info] | ^^^^^^^^^^^^^^^^^^^^^ the trait `MaxEncodedLen` is not implemented for `Bar` | - = note: required because of the requirements on the impl of `KeyGeneratorMaxEncodedLen` for `Key` - = note: required because of the requirements on the impl of `StorageInfoTrait` for `frame_support::pallet_prelude::StorageNMap<_GeneratedPrefixForStorageFoo, Key, u32>` + = note: required because of the requirements on the impl of `KeyGeneratorMaxEncodedLen` for `NMapKey` + = note: required because of the requirements on the impl of `StorageInfoTrait` for `frame_support::pallet_prelude::StorageNMap<_GeneratedPrefixForStorageFoo, NMapKey, u32>` = note: required by `storage_info` diff --git a/frame/support/test/tests/system.rs b/frame/support/test/tests/system.rs index 0419326299264..4acc248d25f20 100644 --- a/frame/support/test/tests/system.rs +++ b/frame/support/test/tests/system.rs @@ -26,9 +26,9 @@ pub trait Config: 'static + Eq + Clone { + From>; type BaseCallFilter: frame_support::traits::Contains; - type BlockNumber: Decode + Encode + EncodeLike + Clone + Default; + type BlockNumber: Decode + Encode + EncodeLike + Clone + Default + scale_info::TypeInfo; type Hash; - type AccountId: Encode + EncodeLike + Decode; + type AccountId: Encode + EncodeLike + Decode + scale_info::TypeInfo; type Call; type Event: From>; type PalletInfo: frame_support::traits::PalletInfo; @@ -68,7 +68,7 @@ frame_support::decl_error! { } /// Origin for the system module. -#[derive(PartialEq, Eq, Clone, sp_runtime::RuntimeDebug, Encode, Decode)] +#[derive(PartialEq, Eq, Clone, sp_runtime::RuntimeDebug, Encode, Decode, scale_info::TypeInfo)] pub enum RawOrigin { Root, Signed(AccountId), diff --git a/frame/system/Cargo.toml b/frame/system/Cargo.toml index d6e34de2a0824..389730107b439 100644 --- a/frame/system/Cargo.toml +++ b/frame/system/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", path = "../../primitives/io", default-features = false } @@ -33,6 +34,7 @@ default = ["std"] std = [ "serde", "codec/std", + "scale-info/std", "sp-core/std", "sp-std/std", "sp-io/std", diff --git a/frame/system/benchmarking/Cargo.toml b/frame/system/benchmarking/Cargo.toml index d7e4e2641d393..29bcccfd7d830 100644 --- a/frame/system/benchmarking/Cargo.toml +++ b/frame/system/benchmarking/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/runtime" } frame-benchmarking = { version = "4.0.0-dev", default-features = false, path = "../../benchmarking" } @@ -28,6 +29,7 @@ sp-io = { version = "4.0.0-dev", path = "../../../primitives/io" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-runtime/std", "sp-std/std", "frame-benchmarking/std", diff --git a/frame/system/src/extensions/check_genesis.rs b/frame/system/src/extensions/check_genesis.rs index 4f561f17c3564..6f409d5d3d4ad 100644 --- a/frame/system/src/extensions/check_genesis.rs +++ b/frame/system/src/extensions/check_genesis.rs @@ -17,13 +17,15 @@ use crate::{Config, Pallet}; use codec::{Decode, Encode}; +use scale_info::TypeInfo; use sp_runtime::{ traits::{SignedExtension, Zero}, transaction_validity::TransactionValidityError, }; /// Genesis hash check to provide replay protection between different networks. -#[derive(Encode, Decode, Clone, Eq, PartialEq)] +#[derive(Encode, Decode, Clone, Eq, PartialEq, TypeInfo)] +#[scale_info(skip_type_params(T))] pub struct CheckGenesis(sp_std::marker::PhantomData); impl sp_std::fmt::Debug for CheckGenesis { diff --git a/frame/system/src/extensions/check_mortality.rs b/frame/system/src/extensions/check_mortality.rs index 6596939eb9d68..69cca765efea9 100644 --- a/frame/system/src/extensions/check_mortality.rs +++ b/frame/system/src/extensions/check_mortality.rs @@ -17,6 +17,7 @@ use crate::{BlockHash, Config, Pallet}; use codec::{Decode, Encode}; +use scale_info::TypeInfo; use sp_runtime::{ generic::Era, traits::{DispatchInfoOf, SaturatedConversion, SignedExtension}, @@ -26,7 +27,8 @@ use sp_runtime::{ }; /// Check for transaction mortality. -#[derive(Encode, Decode, Clone, Eq, PartialEq)] +#[derive(Encode, Decode, Clone, Eq, PartialEq, TypeInfo)] +#[scale_info(skip_type_params(T))] pub struct CheckMortality(Era, sp_std::marker::PhantomData); impl CheckMortality { diff --git a/frame/system/src/extensions/check_nonce.rs b/frame/system/src/extensions/check_nonce.rs index 6eaa9f9e02a4b..081a0efa3db71 100644 --- a/frame/system/src/extensions/check_nonce.rs +++ b/frame/system/src/extensions/check_nonce.rs @@ -18,6 +18,7 @@ use crate::Config; use codec::{Decode, Encode}; use frame_support::weights::DispatchInfo; +use scale_info::TypeInfo; use sp_runtime::{ traits::{DispatchInfoOf, Dispatchable, One, SignedExtension}, transaction_validity::{ @@ -31,7 +32,8 @@ use sp_std::vec; /// /// Note that this does not set any priority by default. Make sure that AT LEAST one of the signed /// extension sets some kind of priority upon validating transactions. -#[derive(Encode, Decode, Clone, Eq, PartialEq)] +#[derive(Encode, Decode, Clone, Eq, PartialEq, TypeInfo)] +#[scale_info(skip_type_params(T))] pub struct CheckNonce(#[codec(compact)] T::Index); impl CheckNonce { diff --git a/frame/system/src/extensions/check_spec_version.rs b/frame/system/src/extensions/check_spec_version.rs index 7f5629fefa924..0217aefae6b9d 100644 --- a/frame/system/src/extensions/check_spec_version.rs +++ b/frame/system/src/extensions/check_spec_version.rs @@ -17,10 +17,12 @@ use crate::{Config, Pallet}; use codec::{Decode, Encode}; +use scale_info::TypeInfo; use sp_runtime::{traits::SignedExtension, transaction_validity::TransactionValidityError}; /// Ensure the runtime version registered in the transaction is the same as at present. -#[derive(Encode, Decode, Clone, Eq, PartialEq)] +#[derive(Encode, Decode, Clone, Eq, PartialEq, TypeInfo)] +#[scale_info(skip_type_params(T))] pub struct CheckSpecVersion(sp_std::marker::PhantomData); impl sp_std::fmt::Debug for CheckSpecVersion { diff --git a/frame/system/src/extensions/check_tx_version.rs b/frame/system/src/extensions/check_tx_version.rs index badf0292601b6..9418d3ff5d937 100644 --- a/frame/system/src/extensions/check_tx_version.rs +++ b/frame/system/src/extensions/check_tx_version.rs @@ -17,10 +17,12 @@ use crate::{Config, Pallet}; use codec::{Decode, Encode}; +use scale_info::TypeInfo; use sp_runtime::{traits::SignedExtension, transaction_validity::TransactionValidityError}; /// Ensure the transaction version registered in the transaction is the same as at present. -#[derive(Encode, Decode, Clone, Eq, PartialEq)] +#[derive(Encode, Decode, Clone, Eq, PartialEq, TypeInfo)] +#[scale_info(skip_type_params(T))] pub struct CheckTxVersion(sp_std::marker::PhantomData); impl sp_std::fmt::Debug for CheckTxVersion { diff --git a/frame/system/src/extensions/check_weight.rs b/frame/system/src/extensions/check_weight.rs index 1e7ad9454b4c9..92dc7382fa2d5 100644 --- a/frame/system/src/extensions/check_weight.rs +++ b/frame/system/src/extensions/check_weight.rs @@ -21,6 +21,7 @@ use frame_support::{ traits::Get, weights::{priority::FrameTransactionPriority, DispatchClass, DispatchInfo, PostDispatchInfo}, }; +use scale_info::TypeInfo; use sp_runtime::{ traits::{DispatchInfoOf, Dispatchable, PostDispatchInfoOf, SignedExtension}, transaction_validity::{ @@ -31,7 +32,8 @@ use sp_runtime::{ }; /// Block resource (weight) limit check. -#[derive(Encode, Decode, Clone, Eq, PartialEq, Default)] +#[derive(Encode, Decode, Clone, Eq, PartialEq, Default, TypeInfo)] +#[scale_info(skip_type_params(T))] pub struct CheckWeight(sp_std::marker::PhantomData); impl CheckWeight diff --git a/frame/system/src/lib.rs b/frame/system/src/lib.rs index 3d89e09a25a7e..2e7f26eef16f4 100644 --- a/frame/system/src/lib.rs +++ b/frame/system/src/lib.rs @@ -94,6 +94,7 @@ use frame_support::{ }, Parameter, }; +use scale_info::TypeInfo; use sp_core::{storage::well_known_keys, ChangesTrieConfiguration}; #[cfg(feature = "std")] @@ -204,7 +205,8 @@ pub mod pallet { + sp_std::hash::Hash + sp_std::str::FromStr + MaybeMallocSizeOf - + MaxEncodedLen; + + MaxEncodedLen + + TypeInfo; /// The output of the `Hashing` function. type Hash: Parameter @@ -224,7 +226,7 @@ pub mod pallet { + MaxEncodedLen; /// The hashing system (algorithm) being used in the runtime (e.g. Blake2). - type Hashing: Hash; + type Hashing: Hash + TypeInfo; /// The user account identifier type for the runtime. type AccountId: Parameter @@ -276,7 +278,7 @@ pub mod pallet { /// Data to be associated with an account (other than nonce/transaction counter, which this /// pallet does regardless). - type AccountData: Member + FullCodec + Clone + Default; + type AccountData: Member + FullCodec + Clone + Default + TypeInfo; /// Handler for when a new account has just been created. type OnNewAccount: OnNewAccount; @@ -522,7 +524,6 @@ pub mod pallet { /// Event for the System pallet. #[pallet::event] - #[pallet::metadata(T::AccountId = "AccountId", T::Hash = "Hash")] pub enum Event { /// An extrinsic completed successfully. \[info\] ExtrinsicSuccess(DispatchInfo), @@ -763,7 +764,7 @@ pub type Key = Vec; pub type KeyValue = (Vec, Vec); /// A phase of a block's execution. -#[derive(Encode, Decode, RuntimeDebug)] +#[derive(Encode, Decode, RuntimeDebug, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, PartialEq, Eq, Clone))] pub enum Phase { /// Applying an extrinsic. @@ -781,7 +782,7 @@ impl Default for Phase { } /// Record of an event happening. -#[derive(Encode, Decode, RuntimeDebug)] +#[derive(Encode, Decode, RuntimeDebug, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, PartialEq, Eq, Clone))] pub struct EventRecord { /// The phase of the block it happened in. @@ -793,7 +794,7 @@ pub struct EventRecord { } /// Origin for the System pallet. -#[derive(PartialEq, Eq, Clone, RuntimeDebug, Encode, Decode)] +#[derive(PartialEq, Eq, Clone, RuntimeDebug, Encode, Decode, TypeInfo)] pub enum RawOrigin { /// The system itself ordained this dispatch to happen: this is the highest privilege level. Root, @@ -833,7 +834,7 @@ type EventIndex = u32; pub type RefCount = u32; /// Information of an account. -#[derive(Clone, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode)] +#[derive(Clone, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode, TypeInfo)] pub struct AccountInfo { /// The number of transactions this account has sent. pub nonce: Index, @@ -853,7 +854,7 @@ pub struct AccountInfo { /// Stores the `spec_version` and `spec_name` of when the last runtime upgrade /// happened. -#[derive(sp_runtime::RuntimeDebug, Encode, Decode)] +#[derive(sp_runtime::RuntimeDebug, Encode, Decode, TypeInfo)] #[cfg_attr(feature = "std", derive(PartialEq))] pub struct LastRuntimeUpgradeInfo { pub spec_version: codec::Compact, diff --git a/frame/system/src/limits.rs b/frame/system/src/limits.rs index 74ffc828314b8..687fb6f3dd367 100644 --- a/frame/system/src/limits.rs +++ b/frame/system/src/limits.rs @@ -26,10 +26,11 @@ //! which should be passed to `frame_system` configuration when runtime is being set up. use frame_support::weights::{constants, DispatchClass, OneOrMany, PerDispatchClass, Weight}; +use scale_info::TypeInfo; use sp_runtime::{Perbill, RuntimeDebug}; /// Block length limit configuration. -#[derive(RuntimeDebug, Clone, codec::Encode, codec::Decode)] +#[derive(RuntimeDebug, Clone, codec::Encode, codec::Decode, TypeInfo)] pub struct BlockLength { /// Maximal total length in bytes for each extrinsic class. /// @@ -91,7 +92,7 @@ pub type ValidationResult = Result; const DEFAULT_NORMAL_RATIO: Perbill = Perbill::from_percent(75); /// `DispatchClass`-specific weight configuration. -#[derive(RuntimeDebug, Clone, codec::Encode, codec::Decode)] +#[derive(RuntimeDebug, Clone, codec::Encode, codec::Decode, TypeInfo)] pub struct WeightsPerClass { /// Base weight of single extrinsic of given class. pub base_extrinsic: Weight, @@ -191,7 +192,7 @@ pub struct WeightsPerClass { /// /// As a consequence of `reserved` space, total consumed block weight might exceed `max_block` /// value, so this parameter should rather be thought of as "target block weight" than a hard limit. -#[derive(RuntimeDebug, Clone, codec::Encode, codec::Decode)] +#[derive(RuntimeDebug, Clone, codec::Encode, codec::Decode, TypeInfo)] pub struct BlockWeights { /// Base weight of block execution. pub base_block: Weight, diff --git a/frame/system/src/mock.rs b/frame/system/src/mock.rs index 8039b73445ae6..9dd35691cab84 100644 --- a/frame/system/src/mock.rs +++ b/frame/system/src/mock.rs @@ -116,7 +116,8 @@ impl Config for Test { pub type SysEvent = frame_system::Event; /// A simple call, which one doesn't matter. -pub const CALL: &::Call = &Call::System(frame_system::Call::set_heap_pages(0u64)); +pub const CALL: &::Call = + &Call::System(frame_system::Call::set_heap_pages { pages: 0u64 }); /// Create new externalities for `System` module tests. pub fn new_test_ext() -> sp_io::TestExternalities { diff --git a/frame/system/src/offchain.rs b/frame/system/src/offchain.rs index e9f3d82ea3c28..ed758a2556b77 100644 --- a/frame/system/src/offchain.rs +++ b/frame/system/src/offchain.rs @@ -453,10 +453,11 @@ pub trait SigningTypes: crate::Config { + IdentifyAccount + core::fmt::Debug + codec::Codec - + Ord; + + Ord + + scale_info::TypeInfo; /// A matching `Signature` type. - type Signature: Clone + PartialEq + core::fmt::Debug + codec::Codec; + type Signature: Clone + PartialEq + core::fmt::Debug + codec::Codec + scale_info::TypeInfo; } /// A definition of types required to submit transactions from within the runtime. diff --git a/frame/timestamp/Cargo.toml b/frame/timestamp/Cargo.toml index cdf31b1e7ae2d..1c95c4782b5c4 100644 --- a/frame/timestamp/Cargo.toml +++ b/frame/timestamp/Cargo.toml @@ -16,6 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io", optional = true } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } @@ -35,6 +36,7 @@ default = ["std"] std = [ "sp-inherents/std", "codec/std", + "scale-info/std", "sp-std/std", "sp-runtime/std", "frame-benchmarking/std", diff --git a/frame/timestamp/src/lib.rs b/frame/timestamp/src/lib.rs index a1ff8d37ff881..153606bedbacf 100644 --- a/frame/timestamp/src/lib.rs +++ b/frame/timestamp/src/lib.rs @@ -120,7 +120,8 @@ pub mod pallet { + AtLeast32Bit + Scale + Copy - + MaxEncodedLen; + + MaxEncodedLen + + scale_info::StaticTypeInfo; /// Something which can be notified when the timestamp is set. Set this to `()` if not /// needed. @@ -221,7 +222,7 @@ pub mod pallet { let data = (*inherent_data).saturated_into::(); let next_time = cmp::max(data, Self::now() + T::MinimumPeriod::get()); - Some(Call::set(next_time.into())) + Some(Call::set { now: next_time.into() }) } fn check_inherent( @@ -232,7 +233,7 @@ pub mod pallet { sp_timestamp::Timestamp::new(30 * 1000); let t: u64 = match call { - Call::set(ref t) => t.clone().saturated_into::(), + Call::set { ref now } => now.clone().saturated_into::(), _ => return Ok(()), }; @@ -252,7 +253,7 @@ pub mod pallet { } fn is_inherent(call: &Self::Call) -> bool { - matches!(call, Call::set(_)) + matches!(call, Call::set { .. }) } } } diff --git a/frame/tips/Cargo.toml b/frame/tips/Cargo.toml index d706552393e6e..8ca395e1c5416 100644 --- a/frame/tips/Cargo.toml +++ b/frame/tips/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } log = { version = "0.4.0", default-features = false } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } serde = { version = "1.0.126", features = ["derive"], optional = true } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } @@ -37,6 +38,7 @@ default = ["std"] std = [ "codec/std", "log/std", + "scale-info/std", "serde", "sp-core/std", diff --git a/frame/tips/src/lib.rs b/frame/tips/src/lib.rs index 50abe4684cded..f4a4edb7b3999 100644 --- a/frame/tips/src/lib.rs +++ b/frame/tips/src/lib.rs @@ -83,7 +83,7 @@ pub type NegativeImbalanceOf = pallet_treasury::NegativeImbalanceOf; /// An open tipping "motion". Retains all details of a tip including information on the finder /// and the members who have voted. -#[derive(Clone, Eq, PartialEq, Encode, Decode, RuntimeDebug)] +#[derive(Clone, Eq, PartialEq, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] pub struct OpenTip< AccountId: Parameter, Balance: Parameter, @@ -177,7 +177,6 @@ pub mod pallet { pub type Reasons = StorageMap<_, Identity, T::Hash, Vec, OptionQuery>; #[pallet::event] - #[pallet::metadata(T::Hash = "Hash", T::AccountId = "AccountId", BalanceOf = "Balance")] #[pallet::generate_deposit(pub(super) fn deposit_event)] pub enum Event { /// A new tip suggestion has been opened. \[tip_hash\] diff --git a/frame/transaction-payment/Cargo.toml b/frame/transaction-payment/Cargo.toml index 0bcc422bb8476..546939692bbaf 100644 --- a/frame/transaction-payment/Cargo.toml +++ b/frame/transaction-payment/Cargo.toml @@ -16,6 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } serde = { version = "1.0.126", optional = true } smallvec = "1.4.1" @@ -36,6 +37,7 @@ default = ["std"] std = [ "serde", "codec/std", + "scale-info/std", "sp-core/std", "sp-io/std", "sp-runtime/std", diff --git a/frame/transaction-payment/src/lib.rs b/frame/transaction-payment/src/lib.rs index 9e8dbf6cb5d1d..e3a3bccc3d39a 100644 --- a/frame/transaction-payment/src/lib.rs +++ b/frame/transaction-payment/src/lib.rs @@ -48,6 +48,7 @@ #![cfg_attr(not(feature = "std"), no_std)] use codec::{Decode, Encode}; +use scale_info::TypeInfo; use sp_runtime::{ traits::{ @@ -225,7 +226,7 @@ where } /// Storage releases of the pallet. -#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, RuntimeDebug, TypeInfo)] enum Releases { /// Original version of the pallet. V1Ancient, @@ -522,7 +523,8 @@ where /// Require the transactor pay for themselves and maybe include a tip to gain additional priority /// in the queue. -#[derive(Encode, Decode, Clone, Eq, PartialEq)] +#[derive(Encode, Decode, Clone, Eq, PartialEq, TypeInfo)] +#[scale_info(skip_type_params(T))] pub struct ChargeTransactionPayment(#[codec(compact)] BalanceOf); impl ChargeTransactionPayment @@ -714,7 +716,7 @@ mod tests { ); const CALL: &::Call = - &Call::Balances(BalancesCall::transfer(2, 69)); + &Call::Balances(BalancesCall::transfer { dest: 2, value: 69 }); thread_local! { static EXTRINSIC_BASE_WEIGHT: RefCell = RefCell::new(0); @@ -1054,7 +1056,7 @@ mod tests { #[test] fn query_info_works() { - let call = Call::Balances(BalancesCall::transfer(2, 69)); + let call = Call::Balances(BalancesCall::transfer { dest: 2, value: 69 }); let origin = 111111; let extra = (); let xt = TestXt::new(call, Some((origin, extra))); diff --git a/frame/transaction-payment/src/payment.rs b/frame/transaction-payment/src/payment.rs index 832e4d5359a1c..58e6ef63109a3 100644 --- a/frame/transaction-payment/src/payment.rs +++ b/frame/transaction-payment/src/payment.rs @@ -27,7 +27,8 @@ pub trait OnChargeTransaction { + Copy + MaybeSerializeDeserialize + Debug - + Default; + + Default + + scale_info::TypeInfo; type LiquidityInfo: Default; /// Before the transaction is executed the payment of the transaction fees diff --git a/frame/transaction-storage/Cargo.toml b/frame/transaction-storage/Cargo.toml index 74f31ffed4b37..a4ebd5cfbc876 100644 --- a/frame/transaction-storage/Cargo.toml +++ b/frame/transaction-storage/Cargo.toml @@ -16,6 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] serde = { version = "1.0.126", optional = true } hex-literal = { version = "0.3.1", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } pallet-balances = { version = "4.0.0-dev", default-features = false, path = "../balances" } @@ -36,6 +37,7 @@ runtime-benchmarks = ["frame-benchmarking", "hex-literal"] std = [ "serde", "codec/std", + "scale-info/std", "sp-runtime/std", "frame-support/std", "frame-system/std", diff --git a/frame/transaction-storage/src/lib.rs b/frame/transaction-storage/src/lib.rs index 1b751f3b214cd..2fe3c04e0229f 100644 --- a/frame/transaction-storage/src/lib.rs +++ b/frame/transaction-storage/src/lib.rs @@ -57,7 +57,7 @@ pub const DEFAULT_MAX_TRANSACTION_SIZE: u32 = 8 * 1024 * 1024; pub const DEFAULT_MAX_BLOCK_TRANSACTIONS: u32 = 512; /// State data for a stored transaction. -#[derive(Encode, Decode, Clone, sp_runtime::RuntimeDebug, PartialEq, Eq)] +#[derive(Encode, Decode, Clone, sp_runtime::RuntimeDebug, PartialEq, Eq, scale_info::TypeInfo)] pub struct TransactionInfo { /// Chunk trie root. chunk_root: ::Output, @@ -408,7 +408,7 @@ pub mod pallet { let proof = data .get_data::(&Self::INHERENT_IDENTIFIER) .unwrap_or(None); - proof.map(Call::check_proof) + proof.map(|proof| Call::check_proof { proof }) } fn check_inherent( @@ -419,7 +419,7 @@ pub mod pallet { } fn is_inherent(call: &Self::Call) -> bool { - matches!(call, Call::check_proof(_)) + matches!(call, Call::check_proof { .. }) } } diff --git a/frame/treasury/Cargo.toml b/frame/treasury/Cargo.toml index c670cc1e439fc..b2991f3febcad 100644 --- a/frame/treasury/Cargo.toml +++ b/frame/treasury/Cargo.toml @@ -17,6 +17,7 @@ codec = { package = "parity-scale-codec", version = "2.2.0", default-features = "derive", "max-encoded-len", ] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } serde = { version = "1.0.126", features = ["derive"], optional = true } impl-trait-for-tuples = "0.2.1" @@ -37,6 +38,7 @@ sp-core = { version = "4.0.0-dev", path = "../../primitives/core" } default = ["std"] std = [ "codec/std", + "scale-info/std", "serde", "sp-std/std", "sp-runtime/std", diff --git a/frame/treasury/src/lib.rs b/frame/treasury/src/lib.rs index 965f06731c945..646baa99b99b0 100644 --- a/frame/treasury/src/lib.rs +++ b/frame/treasury/src/lib.rs @@ -63,6 +63,7 @@ mod tests; pub mod weights; use codec::{Decode, Encode, MaxEncodedLen}; +use scale_info::TypeInfo; use sp_runtime::{ traits::{AccountIdConversion, Saturating, StaticLookup, Zero}, @@ -118,7 +119,7 @@ pub type ProposalIndex = u32; /// A spending proposal. #[cfg_attr(feature = "std", derive(serde::Serialize, serde::Deserialize))] -#[derive(Encode, Decode, Clone, PartialEq, Eq, MaxEncodedLen, RuntimeDebug)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, MaxEncodedLen, RuntimeDebug, TypeInfo)] pub struct Proposal { /// The account proposing it. proposer: AccountId, @@ -253,7 +254,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::AccountId = "AccountId", BalanceOf = "Balance")] pub enum Event, I: 'static = ()> { /// New proposal. \[proposal_index\] Proposed(ProposalIndex), diff --git a/frame/uniques/Cargo.toml b/frame/uniques/Cargo.toml index 32a283fc36d25..4f664ecc2b6a9 100644 --- a/frame/uniques/Cargo.toml +++ b/frame/uniques/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } @@ -30,6 +31,7 @@ pallet-balances = { version = "4.0.0-dev", path = "../balances" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-std/std", "sp-runtime/std", "frame-support/std", diff --git a/frame/uniques/src/benchmarking.rs b/frame/uniques/src/benchmarking.rs index 20ddbb15d5360..5c777dc961e9e 100644 --- a/frame/uniques/src/benchmarking.rs +++ b/frame/uniques/src/benchmarking.rs @@ -286,15 +286,15 @@ benchmarks_instance_pallet! { force_asset_status { let (class, caller, caller_lookup) = create_class::(); let origin = T::ForceOrigin::successful_origin(); - let call = Call::::force_asset_status( + let call = Call::::force_asset_status { class, - caller_lookup.clone(), - caller_lookup.clone(), - caller_lookup.clone(), - caller_lookup.clone(), - true, - false, - ); + owner: caller_lookup.clone(), + issuer: caller_lookup.clone(), + admin: caller_lookup.clone(), + freezer: caller_lookup.clone(), + free_holding: true, + is_frozen: false, + }; }: { call.dispatch_bypass_filter(origin)? } verify { assert_last_event::(Event::AssetStatusChanged(class).into()); diff --git a/frame/uniques/src/lib.rs b/frame/uniques/src/lib.rs index b4a0b9821683a..8c716694051b5 100644 --- a/frame/uniques/src/lib.rs +++ b/frame/uniques/src/lib.rs @@ -190,11 +190,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata( - T::AccountId = "AccountId", - T::ClassId = "ClassId", - T::InstanceId = "InstanceId" - )] pub enum Event, I: 'static = ()> { /// An asset class was created. \[ class, creator, owner \] Created(T::ClassId, T::AccountId, T::AccountId), diff --git a/frame/uniques/src/types.rs b/frame/uniques/src/types.rs index ae61b6b5e1fd3..1e4405aa09c84 100644 --- a/frame/uniques/src/types.rs +++ b/frame/uniques/src/types.rs @@ -19,6 +19,7 @@ use super::*; use frame_support::{traits::Get, BoundedVec}; +use scale_info::TypeInfo; pub(super) type DepositBalanceOf = <>::Currency as Currency<::AccountId>>::Balance; @@ -27,7 +28,7 @@ pub(super) type ClassDetailsFor = pub(super) type InstanceDetailsFor = InstanceDetails<::AccountId, DepositBalanceOf>; -#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug)] +#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, TypeInfo)] pub struct ClassDetails { /// Can change `owner`, `issuer`, `freezer` and `admin` accounts. pub(super) owner: AccountId, @@ -53,7 +54,7 @@ pub struct ClassDetails { } /// Witness data for the destroy transactions. -#[derive(Copy, Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug)] +#[derive(Copy, Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, TypeInfo)] pub struct DestroyWitness { /// The total number of outstanding instances of this asset class. #[codec(compact)] @@ -77,7 +78,7 @@ impl ClassDetails { } /// Information concerning the ownership of a single unique asset. -#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Default)] +#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Default, TypeInfo)] pub struct InstanceDetails { /// The owner of this asset. pub(super) owner: AccountId, @@ -90,7 +91,8 @@ pub struct InstanceDetails { pub(super) deposit: DepositBalance, } -#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Default)] +#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Default, TypeInfo)] +#[scale_info(skip_type_params(StringLimit))] pub struct ClassMetadata> { /// The balance deposited for this metadata. /// @@ -104,7 +106,8 @@ pub struct ClassMetadata> { pub(super) is_frozen: bool, } -#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Default)] +#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Default, TypeInfo)] +#[scale_info(skip_type_params(StringLimit))] pub struct InstanceMetadata> { /// The balance deposited for this metadata. /// diff --git a/frame/utility/Cargo.toml b/frame/utility/Cargo.toml index d3e2933faf9a9..b5b8eab9cdbf3 100644 --- a/frame/utility/Cargo.toml +++ b/frame/utility/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } @@ -31,6 +32,7 @@ pallet-balances = { version = "4.0.0-dev", path = "../balances" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-runtime/std", "frame-support/std", "frame-system/std", diff --git a/frame/utility/src/benchmarking.rs b/frame/utility/src/benchmarking.rs index 9fd0184b8fa3d..210a6156499cf 100644 --- a/frame/utility/src/benchmarking.rs +++ b/frame/utility/src/benchmarking.rs @@ -34,7 +34,7 @@ benchmarks! { let c in 0 .. 1000; let mut calls: Vec<::Call> = Vec::new(); for i in 0 .. c { - let call = frame_system::Call::remark(vec![]).into(); + let call = frame_system::Call::remark { remark: vec![] }.into(); calls.push(call); } let caller = whitelisted_caller(); @@ -45,7 +45,7 @@ benchmarks! { as_derivative { let caller = account("caller", SEED, SEED); - let call = Box::new(frame_system::Call::remark(vec![]).into()); + let call = Box::new(frame_system::Call::remark { remark: vec![] }.into()); // Whitelist caller account from further DB operations. let caller_key = frame_system::Account::::hashed_key_for(&caller); frame_benchmarking::benchmarking::add_to_whitelist(caller_key.into()); @@ -55,7 +55,7 @@ benchmarks! { let c in 0 .. 1000; let mut calls: Vec<::Call> = Vec::new(); for i in 0 .. c { - let call = frame_system::Call::remark(vec![]).into(); + let call = frame_system::Call::remark { remark: vec![] }.into(); calls.push(call); } let caller = whitelisted_caller(); diff --git a/frame/utility/src/lib.rs b/frame/utility/src/lib.rs index 2e34502494c7d..54de87c4740c8 100644 --- a/frame/utility/src/lib.rs +++ b/frame/utility/src/lib.rs @@ -304,7 +304,7 @@ pub mod pallet { // Don't allow users to nest `batch_all` calls. filtered_origin.add_filter(move |c: &::Call| { let c = ::Call::from_ref(c); - !matches!(c.is_sub_type(), Some(Call::batch_all(_))) + !matches!(c.is_sub_type(), Some(Call::batch_all { .. })) }); call.dispatch(filtered_origin) }; diff --git a/frame/utility/src/tests.rs b/frame/utility/src/tests.rs index 0a780550f3556..bbfbb417e23d1 100644 --- a/frame/utility/src/tests.rs +++ b/frame/utility/src/tests.rs @@ -155,10 +155,10 @@ impl Contains for TestBaseCallFilter { fn contains(c: &Call) -> bool { match *c { // Transfer works. Use `transfer_keep_alive` for a call that doesn't pass the filter. - Call::Balances(pallet_balances::Call::transfer(..)) => true, + Call::Balances(pallet_balances::Call::transfer { .. }) => true, Call::Utility(_) => true, // For benchmarking, this acts as a noop call - Call::System(frame_system::Call::remark(..)) => true, + Call::System(frame_system::Call::remark { .. }) => true, // For tests Call::Example(_) => true, _ => false, @@ -189,24 +189,24 @@ pub fn new_test_ext() -> sp_io::TestExternalities { ext } +fn call_transfer(dest: u64, value: u64) -> Call { + Call::Balances(BalancesCall::transfer { dest, value }) +} + +fn call_foobar(err: bool, start_weight: u64, end_weight: Option) -> Call { + Call::Example(ExampleCall::foobar { err, start_weight, end_weight }) +} + #[test] fn as_derivative_works() { new_test_ext().execute_with(|| { let sub_1_0 = Utility::derivative_account_id(1, 0); assert_ok!(Balances::transfer(Origin::signed(1), sub_1_0, 5)); assert_err_ignore_postinfo!( - Utility::as_derivative( - Origin::signed(1), - 1, - Box::new(Call::Balances(BalancesCall::transfer(6, 3))), - ), + Utility::as_derivative(Origin::signed(1), 1, Box::new(call_transfer(6, 3)),), BalancesError::::InsufficientBalance ); - assert_ok!(Utility::as_derivative( - Origin::signed(1), - 0, - Box::new(Call::Balances(BalancesCall::transfer(2, 3))), - )); + assert_ok!(Utility::as_derivative(Origin::signed(1), 0, Box::new(call_transfer(2, 3)),)); assert_eq!(Balances::free_balance(sub_1_0), 2); assert_eq!(Balances::free_balance(2), 13); }); @@ -220,16 +220,18 @@ fn as_derivative_handles_weight_refund() { let diff = start_weight - end_weight; // Full weight when ok - let inner_call = Call::Example(ExampleCall::foobar(false, start_weight, None)); - let call = Call::Utility(UtilityCall::as_derivative(0, Box::new(inner_call))); + let inner_call = call_foobar(false, start_weight, None); + let call = + Call::Utility(UtilityCall::as_derivative { index: 0, call: Box::new(inner_call) }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_ok!(result); assert_eq!(extract_actual_weight(&result, &info), info.weight); // Refund weight when ok - let inner_call = Call::Example(ExampleCall::foobar(false, start_weight, Some(end_weight))); - let call = Call::Utility(UtilityCall::as_derivative(0, Box::new(inner_call))); + let inner_call = call_foobar(false, start_weight, Some(end_weight)); + let call = + Call::Utility(UtilityCall::as_derivative { index: 0, call: Box::new(inner_call) }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_ok!(result); @@ -237,8 +239,9 @@ fn as_derivative_handles_weight_refund() { assert_eq!(extract_actual_weight(&result, &info), info.weight - diff); // Full weight when err - let inner_call = Call::Example(ExampleCall::foobar(true, start_weight, None)); - let call = Call::Utility(UtilityCall::as_derivative(0, Box::new(inner_call))); + let inner_call = call_foobar(true, start_weight, None); + let call = + Call::Utility(UtilityCall::as_derivative { index: 0, call: Box::new(inner_call) }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_noop!( @@ -254,8 +257,9 @@ fn as_derivative_handles_weight_refund() { ); // Refund weight when err - let inner_call = Call::Example(ExampleCall::foobar(true, start_weight, Some(end_weight))); - let call = Call::Utility(UtilityCall::as_derivative(0, Box::new(inner_call))); + let inner_call = call_foobar(true, start_weight, Some(end_weight)); + let call = + Call::Utility(UtilityCall::as_derivative { index: 0, call: Box::new(inner_call) }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_noop!( @@ -279,7 +283,10 @@ fn as_derivative_filters() { Utility::as_derivative( Origin::signed(1), 1, - Box::new(Call::Balances(pallet_balances::Call::transfer_keep_alive(2, 1))), + Box::new(Call::Balances(pallet_balances::Call::transfer_keep_alive { + dest: 2, + value: 1 + })), ), DispatchError::BadOrigin ); @@ -290,15 +297,16 @@ fn as_derivative_filters() { fn batch_with_root_works() { new_test_ext().execute_with(|| { let k = b"a".to_vec(); - let call = Call::System(frame_system::Call::set_storage(vec![(k.clone(), k.clone())])); + let call = + Call::System(frame_system::Call::set_storage { items: vec![(k.clone(), k.clone())] }); assert!(!TestBaseCallFilter::contains(&call)); assert_eq!(Balances::free_balance(1), 10); assert_eq!(Balances::free_balance(2), 10); assert_ok!(Utility::batch( Origin::root(), vec![ - Call::Balances(BalancesCall::force_transfer(1, 2, 5)), - Call::Balances(BalancesCall::force_transfer(1, 2, 5)), + Call::Balances(BalancesCall::force_transfer { source: 1, dest: 2, value: 5 }), + Call::Balances(BalancesCall::force_transfer { source: 1, dest: 2, value: 5 }), call, // Check filters are correctly bypassed ] )); @@ -315,10 +323,7 @@ fn batch_with_signed_works() { assert_eq!(Balances::free_balance(2), 10); assert_ok!(Utility::batch( Origin::signed(1), - vec![ - Call::Balances(BalancesCall::transfer(2, 5)), - Call::Balances(BalancesCall::transfer(2, 5)) - ] + vec![call_transfer(2, 5), call_transfer(2, 5)] ),); assert_eq!(Balances::free_balance(1), 0); assert_eq!(Balances::free_balance(2), 20); @@ -330,7 +335,7 @@ fn batch_with_signed_filters() { new_test_ext().execute_with(|| { assert_ok!(Utility::batch( Origin::signed(1), - vec![Call::Balances(pallet_balances::Call::transfer_keep_alive(2, 1))] + vec![Call::Balances(pallet_balances::Call::transfer_keep_alive { dest: 2, value: 1 })] ),); System::assert_last_event( utility::Event::BatchInterrupted(0, DispatchError::BadOrigin).into(), @@ -345,11 +350,7 @@ fn batch_early_exit_works() { assert_eq!(Balances::free_balance(2), 10); assert_ok!(Utility::batch( Origin::signed(1), - vec![ - Call::Balances(BalancesCall::transfer(2, 5)), - Call::Balances(BalancesCall::transfer(2, 10)), - Call::Balances(BalancesCall::transfer(2, 5)), - ] + vec![call_transfer(2, 5), call_transfer(2, 10), call_transfer(2, 5),] ),); assert_eq!(Balances::free_balance(1), 5); assert_eq!(Balances::free_balance(2), 15); @@ -360,15 +361,13 @@ fn batch_early_exit_works() { fn batch_weight_calculation_doesnt_overflow() { use sp_runtime::Perbill; new_test_ext().execute_with(|| { - let big_call = Call::System(SystemCall::fill_block(Perbill::from_percent(50))); + let big_call = Call::System(SystemCall::fill_block { ratio: Perbill::from_percent(50) }); assert_eq!(big_call.get_dispatch_info().weight, Weight::max_value() / 2); // 3 * 50% saturates to 100% - let batch_call = Call::Utility(crate::Call::batch(vec![ - big_call.clone(), - big_call.clone(), - big_call.clone(), - ])); + let batch_call = Call::Utility(crate::Call::batch { + calls: vec![big_call.clone(), big_call.clone(), big_call.clone()], + }); assert_eq!(batch_call.get_dispatch_info().weight, Weight::max_value()); }); @@ -383,18 +382,18 @@ fn batch_handles_weight_refund() { let batch_len: Weight = 4; // Full weight when ok - let inner_call = Call::Example(ExampleCall::foobar(false, start_weight, None)); + let inner_call = call_foobar(false, start_weight, None); let batch_calls = vec![inner_call; batch_len as usize]; - let call = Call::Utility(UtilityCall::batch(batch_calls)); + let call = Call::Utility(UtilityCall::batch { calls: batch_calls }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_ok!(result); assert_eq!(extract_actual_weight(&result, &info), info.weight); // Refund weight when ok - let inner_call = Call::Example(ExampleCall::foobar(false, start_weight, Some(end_weight))); + let inner_call = call_foobar(false, start_weight, Some(end_weight)); let batch_calls = vec![inner_call; batch_len as usize]; - let call = Call::Utility(UtilityCall::batch(batch_calls)); + let call = Call::Utility(UtilityCall::batch { calls: batch_calls }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_ok!(result); @@ -402,10 +401,10 @@ fn batch_handles_weight_refund() { assert_eq!(extract_actual_weight(&result, &info), info.weight - diff * batch_len); // Full weight when err - let good_call = Call::Example(ExampleCall::foobar(false, start_weight, None)); - let bad_call = Call::Example(ExampleCall::foobar(true, start_weight, None)); + let good_call = call_foobar(false, start_weight, None); + let bad_call = call_foobar(true, start_weight, None); let batch_calls = vec![good_call, bad_call]; - let call = Call::Utility(UtilityCall::batch(batch_calls)); + let call = Call::Utility(UtilityCall::batch { calls: batch_calls }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_ok!(result); @@ -416,11 +415,11 @@ fn batch_handles_weight_refund() { assert_eq!(extract_actual_weight(&result, &info), info.weight); // Refund weight when err - let good_call = Call::Example(ExampleCall::foobar(false, start_weight, Some(end_weight))); - let bad_call = Call::Example(ExampleCall::foobar(true, start_weight, Some(end_weight))); + let good_call = call_foobar(false, start_weight, Some(end_weight)); + let bad_call = call_foobar(true, start_weight, Some(end_weight)); let batch_calls = vec![good_call, bad_call]; let batch_len = batch_calls.len() as Weight; - let call = Call::Utility(UtilityCall::batch(batch_calls)); + let call = Call::Utility(UtilityCall::batch { calls: batch_calls }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_ok!(result); @@ -430,10 +429,10 @@ fn batch_handles_weight_refund() { assert_eq!(extract_actual_weight(&result, &info), info.weight - diff * batch_len); // Partial batch completion - let good_call = Call::Example(ExampleCall::foobar(false, start_weight, Some(end_weight))); - let bad_call = Call::Example(ExampleCall::foobar(true, start_weight, Some(end_weight))); + let good_call = call_foobar(false, start_weight, Some(end_weight)); + let bad_call = call_foobar(true, start_weight, Some(end_weight)); let batch_calls = vec![good_call, bad_call.clone(), bad_call]; - let call = Call::Utility(UtilityCall::batch(batch_calls)); + let call = Call::Utility(UtilityCall::batch { calls: batch_calls }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_ok!(result); @@ -455,10 +454,7 @@ fn batch_all_works() { assert_eq!(Balances::free_balance(2), 10); assert_ok!(Utility::batch_all( Origin::signed(1), - vec![ - Call::Balances(BalancesCall::transfer(2, 5)), - Call::Balances(BalancesCall::transfer(2, 5)) - ] + vec![call_transfer(2, 5), call_transfer(2, 5)] ),); assert_eq!(Balances::free_balance(1), 0); assert_eq!(Balances::free_balance(2), 20); @@ -468,7 +464,7 @@ fn batch_all_works() { #[test] fn batch_all_revert() { new_test_ext().execute_with(|| { - let call = Call::Balances(BalancesCall::transfer(2, 5)); + let call = call_transfer(2, 5); let info = call.get_dispatch_info(); assert_eq!(Balances::free_balance(1), 10); @@ -476,11 +472,7 @@ fn batch_all_revert() { assert_noop!( Utility::batch_all( Origin::signed(1), - vec![ - Call::Balances(BalancesCall::transfer(2, 5)), - Call::Balances(BalancesCall::transfer(2, 10)), - Call::Balances(BalancesCall::transfer(2, 5)), - ] + vec![call_transfer(2, 5), call_transfer(2, 10), call_transfer(2, 5),] ), DispatchErrorWithPostInfo { post_info: PostDispatchInfo { @@ -506,18 +498,18 @@ fn batch_all_handles_weight_refund() { let batch_len: Weight = 4; // Full weight when ok - let inner_call = Call::Example(ExampleCall::foobar(false, start_weight, None)); + let inner_call = call_foobar(false, start_weight, None); let batch_calls = vec![inner_call; batch_len as usize]; - let call = Call::Utility(UtilityCall::batch_all(batch_calls)); + let call = Call::Utility(UtilityCall::batch_all { calls: batch_calls }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_ok!(result); assert_eq!(extract_actual_weight(&result, &info), info.weight); // Refund weight when ok - let inner_call = Call::Example(ExampleCall::foobar(false, start_weight, Some(end_weight))); + let inner_call = call_foobar(false, start_weight, Some(end_weight)); let batch_calls = vec![inner_call; batch_len as usize]; - let call = Call::Utility(UtilityCall::batch_all(batch_calls)); + let call = Call::Utility(UtilityCall::batch_all { calls: batch_calls }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_ok!(result); @@ -525,10 +517,10 @@ fn batch_all_handles_weight_refund() { assert_eq!(extract_actual_weight(&result, &info), info.weight - diff * batch_len); // Full weight when err - let good_call = Call::Example(ExampleCall::foobar(false, start_weight, None)); - let bad_call = Call::Example(ExampleCall::foobar(true, start_weight, None)); + let good_call = call_foobar(false, start_weight, None); + let bad_call = call_foobar(true, start_weight, None); let batch_calls = vec![good_call, bad_call]; - let call = Call::Utility(UtilityCall::batch_all(batch_calls)); + let call = Call::Utility(UtilityCall::batch_all { calls: batch_calls }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_err_ignore_postinfo!(result, "The cake is a lie."); @@ -536,21 +528,21 @@ fn batch_all_handles_weight_refund() { assert_eq!(extract_actual_weight(&result, &info), info.weight); // Refund weight when err - let good_call = Call::Example(ExampleCall::foobar(false, start_weight, Some(end_weight))); - let bad_call = Call::Example(ExampleCall::foobar(true, start_weight, Some(end_weight))); + let good_call = call_foobar(false, start_weight, Some(end_weight)); + let bad_call = call_foobar(true, start_weight, Some(end_weight)); let batch_calls = vec![good_call, bad_call]; let batch_len = batch_calls.len() as Weight; - let call = Call::Utility(UtilityCall::batch_all(batch_calls)); + let call = Call::Utility(UtilityCall::batch_all { calls: batch_calls }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_err_ignore_postinfo!(result, "The cake is a lie."); assert_eq!(extract_actual_weight(&result, &info), info.weight - diff * batch_len); // Partial batch completion - let good_call = Call::Example(ExampleCall::foobar(false, start_weight, Some(end_weight))); - let bad_call = Call::Example(ExampleCall::foobar(true, start_weight, Some(end_weight))); + let good_call = call_foobar(false, start_weight, Some(end_weight)); + let bad_call = call_foobar(true, start_weight, Some(end_weight)); let batch_calls = vec![good_call, bad_call.clone(), bad_call]; - let call = Call::Utility(UtilityCall::batch_all(batch_calls)); + let call = Call::Utility(UtilityCall::batch_all { calls: batch_calls }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_err_ignore_postinfo!(result, "The cake is a lie."); @@ -565,11 +557,9 @@ fn batch_all_handles_weight_refund() { #[test] fn batch_all_does_not_nest() { new_test_ext().execute_with(|| { - let batch_all = Call::Utility(UtilityCall::batch_all(vec![ - Call::Balances(BalancesCall::transfer(2, 1)), - Call::Balances(BalancesCall::transfer(2, 1)), - Call::Balances(BalancesCall::transfer(2, 1)), - ])); + let batch_all = Call::Utility(UtilityCall::batch_all { + calls: vec![call_transfer(2, 1), call_transfer(2, 1), call_transfer(2, 1)], + }); let info = batch_all.get_dispatch_info(); @@ -590,7 +580,7 @@ fn batch_all_does_not_nest() { // And for those who want to get a little fancy, we check that the filter persists across // other kinds of dispatch wrapping functions... in this case // `batch_all(batch(batch_all(..)))` - let batch_nested = Call::Utility(UtilityCall::batch(vec![batch_all])); + let batch_nested = Call::Utility(UtilityCall::batch { calls: vec![batch_all] }); // Batch will end with `Ok`, but does not actually execute as we can see from the event // and balances. assert_ok!(Utility::batch_all(Origin::signed(1), vec![batch_nested])); @@ -605,7 +595,7 @@ fn batch_all_does_not_nest() { #[test] fn batch_limit() { new_test_ext().execute_with(|| { - let calls = vec![Call::System(SystemCall::remark(vec![])); 40_000]; + let calls = vec![Call::System(SystemCall::remark { remark: vec![] }); 40_000]; assert_noop!(Utility::batch(Origin::signed(1), calls.clone()), Error::::TooManyCalls); assert_noop!(Utility::batch_all(Origin::signed(1), calls), Error::::TooManyCalls); }); diff --git a/frame/vesting/Cargo.toml b/frame/vesting/Cargo.toml index 96af259959c3e..806e0e6036862 100644 --- a/frame/vesting/Cargo.toml +++ b/frame/vesting/Cargo.toml @@ -16,6 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } @@ -32,6 +33,7 @@ pallet-balances = { version = "4.0.0-dev", path = "../balances" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-std/std", "sp-runtime/std", "frame-support/std", diff --git a/frame/vesting/src/lib.rs b/frame/vesting/src/lib.rs index 7e4a11fbd5c36..27862a5ca4b72 100644 --- a/frame/vesting/src/lib.rs +++ b/frame/vesting/src/lib.rs @@ -65,6 +65,7 @@ use frame_support::{ }; use frame_system::{ensure_root, ensure_signed, pallet_prelude::*}; pub use pallet::*; +use scale_info::TypeInfo; use sp_runtime::{ traits::{ AtLeast32BitUnsigned, Bounded, Convert, MaybeSerializeDeserialize, One, Saturating, @@ -85,7 +86,7 @@ const VESTING_ID: LockIdentifier = *b"vesting "; // A value placed in storage that represents the current version of the Vesting storage. // This value is used by `on_runtime_upgrade` to determine whether we run storage migration logic. -#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, RuntimeDebug, MaxEncodedLen)] +#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, RuntimeDebug, MaxEncodedLen, TypeInfo)] enum Releases { V0, V1, @@ -271,9 +272,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata( - T::AccountId = "AccountId", BalanceOf = "Balance", T::BlockNumber = "BlockNumber" - )] pub enum Event { /// The amount vested has been updated. This could indicate a change in funds available. /// The balance given is the amount which is left unvested (and thus locked). diff --git a/frame/vesting/src/vesting_info.rs b/frame/vesting/src/vesting_info.rs index 72171910086cd..81bffa199fd72 100644 --- a/frame/vesting/src/vesting_info.rs +++ b/frame/vesting/src/vesting_info.rs @@ -20,7 +20,7 @@ use super::*; /// Struct to encode the vesting schedule of an individual account. -#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, MaxEncodedLen)] +#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, MaxEncodedLen, TypeInfo)] pub struct VestingInfo { /// Locked amount at genesis. locked: Balance, diff --git a/primitives/application-crypto/Cargo.toml b/primitives/application-crypto/Cargo.toml index dd9d7f22d242f..6849dc25f8561 100644 --- a/primitives/application-crypto/Cargo.toml +++ b/primitives/application-crypto/Cargo.toml @@ -17,6 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-core = { version = "4.0.0-dev", default-features = false, path = "../core" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } serde = { version = "1.0.126", optional = true, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../io" } @@ -27,6 +28,7 @@ std = [ "full_crypto", "sp-core/std", "codec/std", + "scale-info/std", "serde", "sp-std/std", "sp-io/std", diff --git a/primitives/application-crypto/src/lib.rs b/primitives/application-crypto/src/lib.rs index 43a14e29f4ee5..baa6560667059 100644 --- a/primitives/application-crypto/src/lib.rs +++ b/primitives/application-crypto/src/lib.rs @@ -34,6 +34,8 @@ pub use sp_core::{ #[doc(hidden)] pub use codec; #[doc(hidden)] +pub use scale_info; +#[doc(hidden)] #[cfg(feature = "std")] pub use serde; #[doc(hidden)] @@ -224,6 +226,7 @@ macro_rules! app_crypto_public_full_crypto { $crate::codec::Decode, $crate::RuntimeDebug, $crate::codec::MaxEncodedLen, + $crate::scale_info::TypeInfo, )] #[codec(crate = $crate::codec)] pub struct Public($public); @@ -260,6 +263,7 @@ macro_rules! app_crypto_public_not_full_crypto { $crate::codec::Decode, $crate::RuntimeDebug, $crate::codec::MaxEncodedLen, + $crate::scale_info::TypeInfo, )] pub struct Public($public); } @@ -435,6 +439,7 @@ macro_rules! app_crypto_signature_full_crypto { $crate::codec::Encode, $crate::codec::Decode, $crate::RuntimeDebug, + $crate::scale_info::TypeInfo, )] #[derive(Hash)] pub struct Signature($sig); @@ -468,6 +473,7 @@ macro_rules! app_crypto_signature_not_full_crypto { #[derive(Clone, Default, Eq, PartialEq, $crate::codec::Encode, $crate::codec::Decode, + $crate::scale_info::TypeInfo, $crate::RuntimeDebug, )] pub struct Signature($sig); diff --git a/primitives/application-crypto/src/traits.rs b/primitives/application-crypto/src/traits.rs index 2f7fd139c0187..376d12f0c7a3e 100644 --- a/primitives/application-crypto/src/traits.rs +++ b/primitives/application-crypto/src/traits.rs @@ -130,7 +130,7 @@ pub trait RuntimeAppPublic: Sized { const CRYPTO_ID: CryptoTypeId; /// The signature that will be generated when signing with the corresponding private key. - type Signature: Codec + Debug + MaybeHash + Eq + PartialEq + Clone; + type Signature: Codec + Debug + MaybeHash + Eq + PartialEq + Clone + scale_info::TypeInfo; /// Returns all public keys for this application in the keystore. fn all() -> crate::Vec; diff --git a/primitives/arithmetic/Cargo.toml b/primitives/arithmetic/Cargo.toml index 8a97f7ce5042b..abdbd4e60d041 100644 --- a/primitives/arithmetic/Cargo.toml +++ b/primitives/arithmetic/Cargo.toml @@ -18,6 +18,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } integer-sqrt = "0.1.2" static_assertions = "1.1.0" num-traits = { version = "0.2.8", default-features = false } @@ -34,6 +35,7 @@ primitive-types = "0.10.1" default = ["std"] std = [ "codec/std", + "scale-info/std", "num-traits/std", "sp-std/std", "serde", diff --git a/primitives/arithmetic/src/fixed_point.rs b/primitives/arithmetic/src/fixed_point.rs index 1515573b46742..7a81f222c4926 100644 --- a/primitives/arithmetic/src/fixed_point.rs +++ b/primitives/arithmetic/src/fixed_point.rs @@ -363,7 +363,17 @@ macro_rules! implement_fixed { /// A fixed point number representation in the range. #[doc = $title] #[derive( - Encode, Decode, CompactAs, Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, + Encode, + Decode, + CompactAs, + Default, + Copy, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + scale_info::TypeInfo, )] pub struct $name($inner_type); diff --git a/primitives/arithmetic/src/per_things.rs b/primitives/arithmetic/src/per_things.rs index f9c048e55b6f7..f388c19de6b43 100644 --- a/primitives/arithmetic/src/per_things.rs +++ b/primitives/arithmetic/src/per_things.rs @@ -425,7 +425,7 @@ macro_rules! implement_per_thing { /// #[doc = $title] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] - #[derive(Encode, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, RuntimeDebug)] + #[derive(Encode, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, RuntimeDebug, scale_info::TypeInfo)] pub struct $name($type); /// Implementation makes any compact encoding of `PerThing::Inner` valid, diff --git a/primitives/authority-discovery/Cargo.toml b/primitives/authority-discovery/Cargo.toml index c900324d85510..6638e478b4cd7 100644 --- a/primitives/authority-discovery/Cargo.toml +++ b/primitives/authority-discovery/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../application-crypto" } codec = { package = "parity-scale-codec", default-features = false, version = "2.0.0" } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } sp-api = { version = "4.0.0-dev", default-features = false, path = "../api" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../runtime" } @@ -24,6 +25,7 @@ default = ["std"] std = [ "sp-application-crypto/std", "codec/std", + "scale-info/std", "sp-std/std", "sp-api/std", "sp-runtime/std" diff --git a/primitives/consensus/aura/Cargo.toml b/primitives/consensus/aura/Cargo.toml index 1feb04b5bc574..c228b88fd6570 100644 --- a/primitives/consensus/aura/Cargo.toml +++ b/primitives/consensus/aura/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../std" } sp-api = { version = "4.0.0-dev", default-features = false, path = "../../api" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../runtime" } @@ -29,6 +30,7 @@ default = ["std"] std = [ "sp-application-crypto/std", "codec/std", + "scale-info/std", "sp-std/std", "sp-api/std", "sp-runtime/std", diff --git a/primitives/consensus/babe/Cargo.toml b/primitives/consensus/babe/Cargo.toml index 0428d8e222881..5f6bfec219739 100644 --- a/primitives/consensus/babe/Cargo.toml +++ b/primitives/consensus/babe/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } merlin = { version = "2.0", default-features = false } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../std" } sp-api = { version = "4.0.0-dev", default-features = false, path = "../../api" } @@ -34,6 +35,7 @@ default = ["std"] std = [ "sp-application-crypto/std", "codec/std", + "scale-info/std", "merlin/std", "sp-std/std", "sp-api/std", diff --git a/primitives/consensus/babe/src/digests.rs b/primitives/consensus/babe/src/digests.rs index 682894f5837b1..470a028021ca1 100644 --- a/primitives/consensus/babe/src/digests.rs +++ b/primitives/consensus/babe/src/digests.rs @@ -134,7 +134,7 @@ pub struct NextEpochDescriptor { /// Information about the next epoch config, if changed. This is broadcast in the first /// block of the epoch, and applies using the same rules as `NextEpochDescriptor`. -#[derive(Decode, Encode, PartialEq, Eq, Clone, RuntimeDebug)] +#[derive(Decode, Encode, PartialEq, Eq, Clone, RuntimeDebug, scale_info::TypeInfo)] pub enum NextConfigDescriptor { /// Version 1. #[codec(index = 1)] diff --git a/primitives/consensus/babe/src/lib.rs b/primitives/consensus/babe/src/lib.rs index 3f2fc7e1f5e6a..4417670f4144b 100644 --- a/primitives/consensus/babe/src/lib.rs +++ b/primitives/consensus/babe/src/lib.rs @@ -29,6 +29,7 @@ pub use sp_consensus_vrf::schnorrkel::{ }; use codec::{Decode, Encode}; +use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; #[cfg(feature = "std")] @@ -213,7 +214,7 @@ pub struct BabeGenesisConfiguration { } /// Types of allowed slots. -#[derive(Clone, Copy, PartialEq, Eq, Encode, Decode, RuntimeDebug)] +#[derive(Clone, Copy, PartialEq, Eq, Encode, Decode, RuntimeDebug, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub enum AllowedSlots { /// Only allow primary slots. @@ -246,7 +247,7 @@ impl sp_consensus::SlotData for BabeGenesisConfiguration { } /// Configuration data used by the BABE consensus engine. -#[derive(Clone, PartialEq, Eq, Encode, Decode, RuntimeDebug)] +#[derive(Clone, PartialEq, Eq, Encode, Decode, RuntimeDebug, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct BabeEpochConfiguration { /// A constant value that is used in the threshold calculation formula. diff --git a/primitives/consensus/slots/Cargo.toml b/primitives/consensus/slots/Cargo.toml index 2718158cfb7d6..3ad204f973961 100644 --- a/primitives/consensus/slots/Cargo.toml +++ b/primitives/consensus/slots/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../runtime" } sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../../arithmetic" } @@ -21,6 +22,7 @@ sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../.. default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-runtime/std", "sp-arithmetic/std", ] diff --git a/primitives/consensus/slots/src/lib.rs b/primitives/consensus/slots/src/lib.rs index 0b66ac8c9cb6f..89b57dca83082 100644 --- a/primitives/consensus/slots/src/lib.rs +++ b/primitives/consensus/slots/src/lib.rs @@ -20,9 +20,10 @@ #![cfg_attr(not(feature = "std"), no_std)] use codec::{Decode, Encode, MaxEncodedLen}; +use scale_info::TypeInfo; /// Unit type wrapper that represents a slot. -#[derive(Debug, Encode, MaxEncodedLen, Decode, Eq, Clone, Copy, Default, Ord)] +#[derive(Debug, Encode, MaxEncodedLen, Decode, Eq, Clone, Copy, Default, Ord, TypeInfo)] pub struct Slot(u64); impl core::ops::Deref for Slot { @@ -96,7 +97,7 @@ impl From for u64 { /// produces more than one block on the same slot. The proof of equivocation /// are the given distinct headers that were signed by the validator and which /// include the slot number. -#[derive(Clone, Debug, Decode, Encode, PartialEq)] +#[derive(Clone, Debug, Decode, Encode, PartialEq, TypeInfo)] pub struct EquivocationProof { /// Returns the authority id of the equivocator. pub offender: Id, diff --git a/primitives/core/Cargo.toml b/primitives/core/Cargo.toml index b2c5230975ec4..73c3d454ed584 100644 --- a/primitives/core/Cargo.toml +++ b/primitives/core/Cargo.toml @@ -18,11 +18,13 @@ codec = { package = "parity-scale-codec", version = "2.2.0", default-features = "derive", "max-encoded-len", ] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } log = { version = "0.4.11", default-features = false } serde = { version = "1.0.126", optional = true, features = ["derive"] } byteorder = { version = "1.3.2", default-features = false } primitive-types = { version = "0.10.1", default-features = false, features = [ "codec", + "scale-info" ] } impl-serde = { version = "0.3.0", optional = true } wasmi = { version = "0.9.0", optional = true } @@ -96,6 +98,7 @@ std = [ "primitive-types/rustc-hex", "impl-serde", "codec/std", + "scale-info/std", "hash256-std-hasher/std", "hash-db/std", "sp-std/std", diff --git a/primitives/core/src/changes_trie.rs b/primitives/core/src/changes_trie.rs index dd99a5f769ce9..f4ce83dc2c877 100644 --- a/primitives/core/src/changes_trie.rs +++ b/primitives/core/src/changes_trie.rs @@ -27,7 +27,7 @@ use serde::{Deserialize, Serialize}; any(feature = "std", test), derive(Serialize, Deserialize, parity_util_mem::MallocSizeOf) )] -#[derive(Debug, Clone, PartialEq, Eq, Default, Encode, Decode)] +#[derive(Debug, Clone, PartialEq, Eq, Default, Encode, Decode, scale_info::TypeInfo)] pub struct ChangesTrieConfiguration { /// Interval (in blocks) at which level1-digests are created. Digests are not /// created when this is less or equal to 1. diff --git a/primitives/core/src/crypto.rs b/primitives/core/src/crypto.rs index b866639565491..4764a0cac1b14 100644 --- a/primitives/core/src/crypto.rs +++ b/primitives/core/src/crypto.rs @@ -31,6 +31,7 @@ use parking_lot::Mutex; use rand::{rngs::OsRng, RngCore}; #[cfg(feature = "std")] use regex::Regex; +use scale_info::TypeInfo; /// Trait for accessing reference to `SecretString`. pub use secrecy::ExposeSecret; /// A store for sensitive data. @@ -715,7 +716,9 @@ pub trait Public: } /// An opaque 32-byte cryptographic identifier. -#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Default, Encode, Decode, MaxEncodedLen)] +#[derive( + Clone, Eq, PartialEq, Ord, PartialOrd, Default, Encode, Decode, MaxEncodedLen, TypeInfo, +)] #[cfg_attr(feature = "std", derive(Hash))] pub struct AccountId32([u8; 32]); @@ -1175,6 +1178,7 @@ pub trait CryptoType { Decode, PassByInner, crate::RuntimeDebug, + TypeInfo, )] #[cfg_attr(feature = "std", derive(serde::Serialize, serde::Deserialize))] pub struct KeyTypeId(pub [u8; 4]); diff --git a/primitives/core/src/ecdsa.rs b/primitives/core/src/ecdsa.rs index 147569d52b89f..11e9b9d71d80e 100644 --- a/primitives/core/src/ecdsa.rs +++ b/primitives/core/src/ecdsa.rs @@ -20,6 +20,7 @@ // end::description[] use codec::{Decode, Encode, MaxEncodedLen}; +use scale_info::TypeInfo; use sp_runtime_interface::pass_by::PassByInner; use sp_std::cmp::Ordering; @@ -54,7 +55,7 @@ pub const CRYPTO_ID: CryptoTypeId = CryptoTypeId(*b"ecds"); type Seed = [u8; 32]; /// The ECDSA compressed public key. -#[derive(Clone, Encode, Decode, PassByInner, MaxEncodedLen)] +#[derive(Clone, Encode, Decode, PassByInner, MaxEncodedLen, TypeInfo)] pub struct Public(pub [u8; 33]); impl PartialOrd for Public { @@ -234,7 +235,7 @@ impl sp_std::hash::Hash for Public { } /// A signature (a 512-bit value, plus 8 bits for recovery ID). -#[derive(Encode, Decode, PassByInner)] +#[derive(Encode, Decode, PassByInner, TypeInfo)] pub struct Signature(pub [u8; 65]); impl sp_std::convert::TryFrom<&[u8]> for Signature { diff --git a/primitives/core/src/ed25519.rs b/primitives/core/src/ed25519.rs index be70da31e641d..d786ee9d255ff 100644 --- a/primitives/core/src/ed25519.rs +++ b/primitives/core/src/ed25519.rs @@ -24,6 +24,7 @@ use sp_std::vec::Vec; use crate::hash::{H256, H512}; use codec::{Decode, Encode, MaxEncodedLen}; +use scale_info::TypeInfo; #[cfg(feature = "std")] use crate::crypto::Ss58Codec; @@ -57,7 +58,18 @@ type Seed = [u8; 32]; /// A public key. #[cfg_attr(feature = "full_crypto", derive(Hash))] #[derive( - PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Encode, Decode, Default, PassByInner, MaxEncodedLen, + PartialEq, + Eq, + PartialOrd, + Ord, + Clone, + Copy, + Encode, + Decode, + Default, + PassByInner, + MaxEncodedLen, + TypeInfo, )] pub struct Public(pub [u8; 32]); @@ -198,7 +210,7 @@ impl<'de> Deserialize<'de> for Public { } /// A signature (a 512-bit value). -#[derive(Encode, Decode, PassByInner)] +#[derive(Encode, Decode, PassByInner, TypeInfo)] pub struct Signature(pub [u8; 64]); impl sp_std::convert::TryFrom<&[u8]> for Signature { diff --git a/primitives/core/src/lib.rs b/primitives/core/src/lib.rs index 0a61c90d71357..a6229fe43a1a5 100644 --- a/primitives/core/src/lib.rs +++ b/primitives/core/src/lib.rs @@ -33,6 +33,7 @@ macro_rules! map { #[doc(hidden)] pub use codec::{Decode, Encode}; +use scale_info::TypeInfo; #[cfg(feature = "std")] pub use serde; #[cfg(feature = "std")] @@ -191,7 +192,17 @@ impl sp_std::ops::Deref for OpaqueMetadata { /// Simple blob to hold a `PeerId` without committing to its format. #[derive( - Default, Clone, Eq, PartialEq, Ord, PartialOrd, Encode, Decode, RuntimeDebug, PassByInner, + Default, + Clone, + Eq, + PartialEq, + Ord, + PartialOrd, + Encode, + Decode, + RuntimeDebug, + PassByInner, + TypeInfo, )] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct OpaquePeerId(pub Vec); @@ -414,7 +425,7 @@ pub fn to_substrate_wasm_fn_return_value(value: &impl Encode) -> u64 { /// The void type - it cannot exist. // Oh rust, you crack me up... -#[derive(Clone, Decode, Encode, Eq, PartialEq, RuntimeDebug)] +#[derive(Clone, Decode, Encode, Eq, PartialEq, RuntimeDebug, TypeInfo)] pub enum Void {} /// Macro for creating `Maybe*` marker traits. diff --git a/primitives/core/src/offchain/mod.rs b/primitives/core/src/offchain/mod.rs index 59c92f540badf..640f4d2583b79 100644 --- a/primitives/core/src/offchain/mod.rs +++ b/primitives/core/src/offchain/mod.rs @@ -19,6 +19,7 @@ use crate::{OpaquePeerId, RuntimeDebug}; use codec::{Decode, Encode}; +use scale_info::TypeInfo; use sp_runtime_interface::pass_by::{PassByCodec, PassByEnum, PassByInner}; use sp_std::{ convert::TryFrom, @@ -186,7 +187,7 @@ impl TryFrom for HttpRequestStatus { /// A blob to hold information about the local node's network state /// without committing to its format. -#[derive(Clone, Eq, PartialEq, Encode, Decode, RuntimeDebug, PassByCodec)] +#[derive(Clone, Eq, PartialEq, Encode, Decode, RuntimeDebug, PassByCodec, TypeInfo)] #[cfg_attr(feature = "std", derive(Default))] pub struct OpaqueNetworkState { /// PeerId of the local node in SCALE encoded. @@ -196,7 +197,7 @@ pub struct OpaqueNetworkState { } /// Simple blob to hold a `Multiaddr` without committing to its format. -#[derive(Clone, Eq, PartialEq, Encode, Decode, RuntimeDebug, PassByInner)] +#[derive(Clone, Eq, PartialEq, Encode, Decode, RuntimeDebug, PassByInner, TypeInfo)] pub struct OpaqueMultiaddr(pub Vec); impl OpaqueMultiaddr { diff --git a/primitives/core/src/sr25519.rs b/primitives/core/src/sr25519.rs index 4c5122162d65d..4787c2d9d13ee 100644 --- a/primitives/core/src/sr25519.rs +++ b/primitives/core/src/sr25519.rs @@ -47,6 +47,7 @@ use crate::{ hash::{H256, H512}, }; use codec::{Decode, Encode, MaxEncodedLen}; +use scale_info::TypeInfo; use sp_std::ops::Deref; #[cfg(feature = "full_crypto")] @@ -65,7 +66,18 @@ pub const CRYPTO_ID: CryptoTypeId = CryptoTypeId(*b"sr25"); /// An Schnorrkel/Ristretto x25519 ("sr25519") public key. #[cfg_attr(feature = "full_crypto", derive(Hash))] #[derive( - PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Encode, Decode, Default, PassByInner, MaxEncodedLen, + PartialEq, + Eq, + PartialOrd, + Ord, + Clone, + Copy, + Encode, + Decode, + Default, + PassByInner, + MaxEncodedLen, + TypeInfo, )] pub struct Public(pub [u8; 32]); @@ -201,7 +213,7 @@ impl<'de> Deserialize<'de> for Public { /// An Schnorrkel/Ristretto x25519 ("sr25519") signature. /// /// Instead of importing it for the local module, alias it to be available as a public type -#[derive(Encode, Decode, PassByInner)] +#[derive(Encode, Decode, PassByInner, TypeInfo)] pub struct Signature(pub [u8; 64]); impl sp_std::convert::TryFrom<&[u8]> for Signature { diff --git a/primitives/finality-grandpa/Cargo.toml b/primitives/finality-grandpa/Cargo.toml index 895270d012192..c0c2a654270f7 100644 --- a/primitives/finality-grandpa/Cargo.toml +++ b/primitives/finality-grandpa/Cargo.toml @@ -16,6 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } grandpa = { package = "finality-grandpa", version = "0.14.1", default-features = false, features = ["derive-codec"] } log = { version = "0.4.8", optional = true } serde = { version = "1.0.126", optional = true, features = ["derive"] } @@ -32,6 +33,7 @@ std = [ "log", "serde", "codec/std", + "scale-info/std", "grandpa/std", "sp-api/std", "sp-application-crypto/std", diff --git a/primitives/finality-grandpa/src/lib.rs b/primitives/finality-grandpa/src/lib.rs index 353a3cd07822b..d99a4c1882222 100644 --- a/primitives/finality-grandpa/src/lib.rs +++ b/primitives/finality-grandpa/src/lib.rs @@ -26,6 +26,7 @@ extern crate alloc; use serde::Serialize; use codec::{Codec, Decode, Encode, Input}; +use scale_info::TypeInfo; #[cfg(feature = "std")] use sp_keystore::{SyncCryptoStore, SyncCryptoStorePtr}; use sp_runtime::{traits::NumberFor, ConsensusEngineId, RuntimeDebug}; @@ -170,7 +171,7 @@ impl ConsensusLog { /// GRANDPA happens when a voter votes on the same round (either at prevote or /// precommit stage) for different blocks. Proving is achieved by collecting the /// signed messages of conflicting votes. -#[derive(Clone, Debug, Decode, Encode, PartialEq)] +#[derive(Clone, Debug, Decode, Encode, PartialEq, TypeInfo)] pub struct EquivocationProof { set_id: SetId, equivocation: Equivocation, @@ -204,7 +205,7 @@ impl EquivocationProof { /// Wrapper object for GRANDPA equivocation proofs, useful for unifying prevote /// and precommit equivocations under a common type. -#[derive(Clone, Debug, Decode, Encode, PartialEq)] +#[derive(Clone, Debug, Decode, Encode, PartialEq, TypeInfo)] pub enum Equivocation { /// Proof of equivocation at prevote stage. Prevote(grandpa::Equivocation, AuthoritySignature>), diff --git a/primitives/npos-elections/Cargo.toml b/primitives/npos-elections/Cargo.toml index 5c6e5c1b13d53..b277df8f58f12 100644 --- a/primitives/npos-elections/Cargo.toml +++ b/primitives/npos-elections/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } serde = { version = "1.0.126", optional = true, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } sp-npos-elections-solution-type = { version = "4.0.0-dev", path = "./solution-type" } @@ -30,6 +31,7 @@ default = ["std"] bench = [] std = [ "codec/std", + "scale-info/std", "serde", "sp-std/std", "sp-arithmetic/std", diff --git a/primitives/npos-elections/fuzzer/Cargo.toml b/primitives/npos-elections/fuzzer/Cargo.toml index a8d0524fb8717..d6fcc09c8b586 100644 --- a/primitives/npos-elections/fuzzer/Cargo.toml +++ b/primitives/npos-elections/fuzzer/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } honggfuzz = "0.5" rand = { version = "0.7.3", features = ["std", "small_rng"] } sp-npos-elections = { version = "4.0.0-dev", path = ".." } diff --git a/primitives/npos-elections/solution-type/Cargo.toml b/primitives/npos-elections/solution-type/Cargo.toml index a061cedc92318..cbe6750266f01 100644 --- a/primitives/npos-elections/solution-type/Cargo.toml +++ b/primitives/npos-elections/solution-type/Cargo.toml @@ -22,6 +22,7 @@ proc-macro-crate = "1.0.0" [dev-dependencies] parity-scale-codec = "2.0.1" +scale-info = "1.0" sp-arithmetic = { path = "../../arithmetic", version = "4.0.0-dev" } # used by generate_solution_type: sp-npos-elections = { path = "..", version = "4.0.0-dev" } diff --git a/primitives/npos-elections/solution-type/src/codec.rs b/primitives/npos-elections/solution-type/src/codec.rs index 21688c03ace53..2dac076fcde42 100644 --- a/primitives/npos-elections/solution-type/src/codec.rs +++ b/primitives/npos-elections/solution-type/src/codec.rs @@ -15,33 +15,35 @@ // See the License for the specific language governing permissions and // limitations under the License. -//! Code generation for the ratio assignment type' encode/decode impl. +//! Code generation for the ratio assignment type' encode/decode/info impl. use crate::vote_field; use proc_macro2::TokenStream as TokenStream2; use quote::quote; -pub(crate) fn codec_impl( +pub(crate) fn codec_and_info_impl( ident: syn::Ident, voter_type: syn::Type, target_type: syn::Type, weight_type: syn::Type, count: usize, ) -> TokenStream2 { - let encode = encode_impl(ident.clone(), count); - let decode = decode_impl(ident, voter_type, target_type, weight_type, count); + let encode = encode_impl(&ident, count); + let decode = decode_impl(&ident, &voter_type, &target_type, &weight_type, count); + let scale_info = scale_info_impl(&ident, &voter_type, &target_type, &weight_type, count); quote! { #encode #decode + #scale_info } } fn decode_impl( - ident: syn::Ident, - voter_type: syn::Type, - target_type: syn::Type, - weight_type: syn::Type, + ident: &syn::Ident, + voter_type: &syn::Type, + target_type: &syn::Type, + weight_type: &syn::Type, count: usize, ) -> TokenStream2 { let decode_impl_single = { @@ -114,7 +116,7 @@ fn decode_impl( // General attitude is that we will convert inner values to `Compact` and then use the normal // `Encode` implementation. -fn encode_impl(ident: syn::Ident, count: usize) -> TokenStream2 { +fn encode_impl(ident: &syn::Ident, count: usize) -> TokenStream2 { let encode_impl_single = { let name = vote_field(1); quote! { @@ -168,3 +170,73 @@ fn encode_impl(ident: syn::Ident, count: usize) -> TokenStream2 { } ) } + +fn scale_info_impl( + ident: &syn::Ident, + voter_type: &syn::Type, + target_type: &syn::Type, + weight_type: &syn::Type, + count: usize, +) -> TokenStream2 { + let scale_info_impl_single = { + let name = format!("{}", vote_field(1)); + quote! { + .field(|f| + f.ty::<_npos::sp_std::prelude::Vec< + (_npos::codec::Compact<#voter_type>, _npos::codec::Compact<#target_type>) + >>() + .name(#name) + ) + } + }; + + let scale_info_impl_double = { + let name = format!("{}", vote_field(2)); + quote! { + .field(|f| + f.ty::<_npos::sp_std::prelude::Vec<( + _npos::codec::Compact<#voter_type>, + (_npos::codec::Compact<#target_type>, _npos::codec::Compact<#weight_type>), + _npos::codec::Compact<#target_type> + )>>() + .name(#name) + ) + } + }; + + let scale_info_impl_rest = (3..=count) + .map(|c| { + let name = format!("{}", vote_field(c)); + quote! { + .field(|f| + f.ty::<_npos::sp_std::prelude::Vec<( + _npos::codec::Compact<#voter_type>, + [ + (_npos::codec::Compact<#target_type>, _npos::codec::Compact<#weight_type>); + #c - 1 + ], + _npos::codec::Compact<#target_type> + )>>() + .name(#name) + ) + } + }) + .collect::(); + + quote!( + impl _npos::scale_info::TypeInfo for #ident { + type Identity = Self; + + fn type_info() -> _npos::scale_info::Type<_npos::scale_info::form::MetaForm> { + _npos::scale_info::Type::builder() + .path(_npos::scale_info::Path::new(stringify!(#ident), module_path!())) + .composite( + _npos::scale_info::build::Fields::named() + #scale_info_impl_single + #scale_info_impl_double + #scale_info_impl_rest + ) + } + } + ) +} diff --git a/primitives/npos-elections/solution-type/src/single_page.rs b/primitives/npos-elections/solution-type/src/single_page.rs index 7dfd0e56618f9..33017d558331a 100644 --- a/primitives/npos-elections/solution-type/src/single_page.rs +++ b/primitives/npos-elections/solution-type/src/single_page.rs @@ -64,7 +64,7 @@ pub(crate) fn generate(def: crate::SolutionDef) -> Result { let derives_and_maybe_compact_encoding = if compact_encoding { // custom compact encoding. - let compact_impl = crate::codec::codec_impl( + let compact_impl = crate::codec::codec_and_info_impl( ident.clone(), voter_type.clone(), target_type.clone(), @@ -77,7 +77,16 @@ pub(crate) fn generate(def: crate::SolutionDef) -> Result { } } else { // automatically derived. - quote!(#[derive(Default, PartialEq, Eq, Clone, Debug, _npos::codec::Encode, _npos::codec::Decode)]) + quote!(#[derive( + Default, + PartialEq, + Eq, + Clone, + Debug, + _npos::codec::Encode, + _npos::codec::Decode, + _npos::scale_info::TypeInfo, + )]) }; let struct_name = syn::Ident::new("solution", proc_macro2::Span::call_site()); diff --git a/primitives/npos-elections/src/lib.rs b/primitives/npos-elections/src/lib.rs index 84b5d480bef01..afe85ef53b3a7 100644 --- a/primitives/npos-elections/src/lib.rs +++ b/primitives/npos-elections/src/lib.rs @@ -110,6 +110,8 @@ pub use traits::{IdentifierT, NposSolution, PerThing128, __OrInvalidIndex}; #[doc(hidden)] pub use codec; #[doc(hidden)] +pub use scale_info; +#[doc(hidden)] pub use sp_arithmetic; #[doc(hidden)] pub use sp_std; @@ -337,7 +339,7 @@ pub struct ElectionResult { /// /// This, at the current version, resembles the `Exposure` defined in the Staking pallet, yet they /// do not necessarily have to be the same. -#[derive(Default, RuntimeDebug, Encode, Decode, Clone, Eq, PartialEq)] +#[derive(Default, RuntimeDebug, Encode, Decode, Clone, Eq, PartialEq, scale_info::TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct Support { /// Total support. diff --git a/primitives/runtime/Cargo.toml b/primitives/runtime/Cargo.toml index 017c6a75efd99..5ac5bcf1963e0 100644 --- a/primitives/runtime/Cargo.toml +++ b/primitives/runtime/Cargo.toml @@ -17,6 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../core" } sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../application-crypto" } sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../arithmetic" } @@ -46,6 +47,7 @@ std = [ "sp-application-crypto/std", "sp-arithmetic/std", "codec/std", + "scale-info/std", "log/std", "sp-core/std", "rand", diff --git a/primitives/runtime/src/curve.rs b/primitives/runtime/src/curve.rs index 72d64cf4b8e17..d6bd94c2bff70 100644 --- a/primitives/runtime/src/curve.rs +++ b/primitives/runtime/src/curve.rs @@ -24,7 +24,7 @@ use crate::{ use core::ops::Sub; /// Piecewise Linear function in [0, 1] -> [0, 1]. -#[derive(PartialEq, Eq, sp_core::RuntimeDebug)] +#[derive(PartialEq, Eq, sp_core::RuntimeDebug, scale_info::TypeInfo)] pub struct PiecewiseLinear<'a> { /// Array of points. Must be in order from the lowest abscissas to the highest. pub points: &'a [(Perbill, Perbill)], diff --git a/primitives/runtime/src/generic/digest.rs b/primitives/runtime/src/generic/digest.rs index 99d27ad5826c4..87af9bc77a5fa 100644 --- a/primitives/runtime/src/generic/digest.rs +++ b/primitives/runtime/src/generic/digest.rs @@ -24,12 +24,16 @@ use sp_std::prelude::*; use crate::{ codec::{Decode, Encode, Error, Input}, + scale_info::{ + build::{Fields, Variants}, + meta_type, Path, Type, TypeInfo, TypeParameter, + }, ConsensusEngineId, }; use sp_core::{ChangesTrieConfiguration, RuntimeDebug}; /// Generic header digest. -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize, parity_util_mem::MallocSizeOf))] pub struct Digest { /// A list of logs in the digest. @@ -129,7 +133,7 @@ pub enum DigestItem { } /// Available changes trie signals. -#[derive(PartialEq, Eq, Clone, Encode, Decode)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, TypeInfo)] #[cfg_attr(feature = "std", derive(Debug, parity_util_mem::MallocSizeOf))] pub enum ChangesTrieSignal { /// New changes trie configuration is enacted, starting from **next block**. @@ -167,6 +171,69 @@ impl<'a, Hash: Decode> serde::Deserialize<'a> for DigestItem { } } +impl TypeInfo for DigestItem +where + Hash: TypeInfo + 'static, +{ + type Identity = Self; + + fn type_info() -> Type { + Type::builder() + .path(Path::new("DigestItem", module_path!())) + .type_params(vec![TypeParameter::new("Hash", Some(meta_type::()))]) + .variant( + Variants::new() + .variant("ChangesTrieRoot", |v| { + v.index(DigestItemType::ChangesTrieRoot as u8) + .fields(Fields::unnamed().field(|f| f.ty::().type_name("Hash"))) + }) + .variant("PreRuntime", |v| { + v.index(DigestItemType::PreRuntime as u8).fields( + Fields::unnamed() + .field(|f| { + f.ty::().type_name("ConsensusEngineId") + }) + .field(|f| f.ty::>().type_name("Vec")), + ) + }) + .variant("Consensus", |v| { + v.index(DigestItemType::Consensus as u8).fields( + Fields::unnamed() + .field(|f| { + f.ty::().type_name("ConsensusEngineId") + }) + .field(|f| f.ty::>().type_name("Vec")), + ) + }) + .variant("Seal", |v| { + v.index(DigestItemType::Seal as u8).fields( + Fields::unnamed() + .field(|f| { + f.ty::().type_name("ConsensusEngineId") + }) + .field(|f| f.ty::>().type_name("Vec")), + ) + }) + .variant("ChangesTrieSignal", |v| { + v.index(DigestItemType::ChangesTrieSignal as u8).fields( + Fields::unnamed().field(|f| { + f.ty::().type_name("ChangesTrieSignal") + }), + ) + }) + .variant("Other", |v| { + v.index(DigestItemType::Other as u8).fields( + Fields::unnamed().field(|f| f.ty::>().type_name("Vec")), + ) + }) + .variant("RuntimeEnvironmentUpdated", |v| { + v.index(DigestItemType::RuntimeEnvironmentUpdated as u8) + .fields(Fields::unit()) + }), + ) + } +} + /// A 'referencing view' for digest item. Does not own its contents. Used by /// final runtime implementations for encoding/decoding its log items. #[derive(PartialEq, Eq, Clone, RuntimeDebug)] @@ -509,4 +576,52 @@ mod tests { r#"{"logs":["0x0204000000","0x000c010203","0x05746573740c010203"]}"# ); } + + #[test] + fn digest_item_type_info() { + let type_info = DigestItem::::type_info(); + let variants = if let scale_info::TypeDef::Variant(variant) = type_info.type_def() { + variant.variants() + } else { + panic!("Should be a TypeDef::TypeDefVariant") + }; + + // ensure that all variants are covered by manual TypeInfo impl + let check = |digest_item_type: DigestItemType| { + let (variant_name, digest_item) = match digest_item_type { + DigestItemType::Other => ("Other", DigestItem::::Other(Default::default())), + DigestItemType::ChangesTrieRoot => + ("ChangesTrieRoot", DigestItem::ChangesTrieRoot(Default::default())), + DigestItemType::Consensus => + ("Consensus", DigestItem::Consensus(Default::default(), Default::default())), + DigestItemType::Seal => + ("Seal", DigestItem::Seal(Default::default(), Default::default())), + DigestItemType::PreRuntime => + ("PreRuntime", DigestItem::PreRuntime(Default::default(), Default::default())), + DigestItemType::ChangesTrieSignal => ( + "ChangesTrieSignal", + DigestItem::ChangesTrieSignal(ChangesTrieSignal::NewConfiguration( + Default::default(), + )), + ), + DigestItemType::RuntimeEnvironmentUpdated => + ("RuntimeEnvironmentUpdated", DigestItem::RuntimeEnvironmentUpdated), + }; + let encoded = digest_item.encode(); + let variant = variants + .iter() + .find(|v| v.name() == &variant_name) + .expect(&format!("Variant {} not found", variant_name)); + + assert_eq!(encoded[0], variant.index()) + }; + + check(DigestItemType::Other); + check(DigestItemType::ChangesTrieRoot); + check(DigestItemType::Consensus); + check(DigestItemType::Seal); + check(DigestItemType::PreRuntime); + check(DigestItemType::ChangesTrieSignal); + check(DigestItemType::RuntimeEnvironmentUpdated); + } } diff --git a/primitives/runtime/src/generic/era.rs b/primitives/runtime/src/generic/era.rs index 1a7239ab6e3eb..9d831b679c5e4 100644 --- a/primitives/runtime/src/generic/era.rs +++ b/primitives/runtime/src/generic/era.rs @@ -134,6 +134,50 @@ impl Decode for Era { } } +/// Add Mortal{N}(u8) variants with the given indices, to describe custom encoding. +macro_rules! mortal_variants { + ($variants:ident, $($index:literal),* ) => { + $variants + $( + .variant(concat!(stringify!(Mortal), stringify!($index)), |v| v + .index($index) + .fields(scale_info::build::Fields::unnamed().field(|f| f.ty::())) + ) + )* + } +} + +impl scale_info::TypeInfo for Era { + type Identity = Self; + + fn type_info() -> scale_info::Type { + let variants = scale_info::build::Variants::new().variant("Immortal", |v| v.index(0)); + + // this is necessary since the size of the encoded Mortal variant is `u16`, conditional on + // the value of the first byte being > 0. + let variants = mortal_variants!( + variants, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, + 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, + 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, + 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, + 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, + 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, + 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, + 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, + 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, + 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, + 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, + 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, + 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, + 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255 + ); + + scale_info::Type::builder() + .path(scale_info::Path::new("Era", module_path!())) + .variant(variants) + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/primitives/runtime/src/generic/header.rs b/primitives/runtime/src/generic/header.rs index d28f663db003d..82f081c0d70b0 100644 --- a/primitives/runtime/src/generic/header.rs +++ b/primitives/runtime/src/generic/header.rs @@ -20,6 +20,7 @@ use crate::{ codec::{Codec, Decode, Encode}, generic::Digest, + scale_info::TypeInfo, traits::{ self, AtLeast32BitUnsigned, Hash as HashT, MaybeDisplay, MaybeMallocSizeOf, MaybeSerialize, MaybeSerializeDeserialize, Member, SimpleBitOps, @@ -31,7 +32,7 @@ use sp_core::U256; use sp_std::{convert::TryFrom, fmt::Debug}; /// Abstraction over a block header for a substrate chain. -#[derive(Encode, Decode, PartialEq, Eq, Clone, sp_core::RuntimeDebug)] +#[derive(Encode, Decode, PartialEq, Eq, Clone, sp_core::RuntimeDebug, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] #[cfg_attr(feature = "std", serde(rename_all = "camelCase"))] #[cfg_attr(feature = "std", serde(deny_unknown_fields))] diff --git a/primitives/runtime/src/generic/unchecked_extrinsic.rs b/primitives/runtime/src/generic/unchecked_extrinsic.rs index f0e00b7b82971..95f4f2f3584d9 100644 --- a/primitives/runtime/src/generic/unchecked_extrinsic.rs +++ b/primitives/runtime/src/generic/unchecked_extrinsic.rs @@ -27,6 +27,7 @@ use crate::{ OpaqueExtrinsic, }; use codec::{Compact, Decode, Encode, EncodeLike, Error, Input}; +use scale_info::{build::Fields, meta_type, Path, StaticTypeInfo, Type, TypeInfo, TypeParameter}; use sp_io::hashing::blake2_256; use sp_std::{fmt, prelude::*}; @@ -48,6 +49,40 @@ where pub function: Call, } +/// Manual [`TypeInfo`] implementation because of custom encoding. The data is a valid encoded +/// `Vec`, but requires some logic to extract the signature and payload. +/// +/// See [`UncheckedExtrinsic::encode`] and [`UncheckedExtrinsic::decode`]. +impl TypeInfo + for UncheckedExtrinsic +where + Address: StaticTypeInfo, + Call: StaticTypeInfo, + Signature: StaticTypeInfo, + Extra: SignedExtension + StaticTypeInfo, +{ + type Identity = UncheckedExtrinsic; + + fn type_info() -> Type { + Type::builder() + .path(Path::new("UncheckedExtrinsic", module_path!())) + // Include the type parameter types, even though they are not used directly in any of + // the described fields. These type definitions can be used by downstream consumers + // to help construct the custom decoding from the opaque bytes (see below). + .type_params(vec![ + TypeParameter::new("Address", Some(meta_type::
())), + TypeParameter::new("Call", Some(meta_type::())), + TypeParameter::new("Signature", Some(meta_type::())), + TypeParameter::new("Extra", Some(meta_type::())), + ]) + .docs(&["UncheckedExtrinsic raw bytes, requires custom decoding routine"]) + // Because of the custom encoding, we can only accurately describe the encoding as an + // opaque `Vec`. Downstream consumers will need to manually implement the codec to + // encode/decode the `signature` and `function` fields. + .composite(Fields::unnamed().field(|f| f.ty::>())) + } +} + #[cfg(feature = "std")] impl parity_util_mem::MallocSizeOf for UncheckedExtrinsic @@ -340,7 +375,7 @@ mod tests { const TEST_ACCOUNT: TestAccountId = 0; // NOTE: this is demonstration. One can simply use `()` for testing. - #[derive(Debug, Encode, Decode, Clone, Eq, PartialEq, Ord, PartialOrd)] + #[derive(Debug, Encode, Decode, Clone, Eq, PartialEq, Ord, PartialOrd, TypeInfo)] struct TestExtra; impl SignedExtension for TestExtra { const IDENTIFIER: &'static str = "TestExtra"; diff --git a/primitives/runtime/src/lib.rs b/primitives/runtime/src/lib.rs index 4a9c6087fa5cc..80293fe734844 100644 --- a/primitives/runtime/src/lib.rs +++ b/primitives/runtime/src/lib.rs @@ -26,6 +26,8 @@ extern crate test; #[doc(hidden)] pub use codec; +#[doc(hidden)] +pub use scale_info; #[cfg(feature = "std")] #[doc(hidden)] pub use serde; @@ -50,6 +52,7 @@ use sp_core::{ use sp_std::{convert::TryFrom, prelude::*}; use codec::{Decode, Encode}; +use scale_info::TypeInfo; pub mod curve; pub mod generic; @@ -220,7 +223,7 @@ pub type ConsensusEngineId = [u8; 4]; /// Signature verify that can work with any known signature types.. #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive(Eq, PartialEq, Clone, Encode, Decode, RuntimeDebug)] +#[derive(Eq, PartialEq, Clone, Encode, Decode, RuntimeDebug, TypeInfo)] pub enum MultiSignature { /// An Ed25519 signature. Ed25519(ed25519::Signature), @@ -288,7 +291,7 @@ impl Default for MultiSignature { } /// Public key for any known crypto algorithm. -#[derive(Eq, PartialEq, Ord, PartialOrd, Clone, Encode, Decode, RuntimeDebug)] +#[derive(Eq, PartialEq, Ord, PartialOrd, Clone, Encode, Decode, RuntimeDebug, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub enum MultiSigner { /// An Ed25519 identity. @@ -463,7 +466,7 @@ pub type DispatchResult = sp_std::result::Result<(), DispatchError>; pub type DispatchResultWithInfo = sp_std::result::Result>; /// Reason why a dispatch call failed. -#[derive(Eq, Clone, Copy, Encode, Decode, Debug)] +#[derive(Eq, Clone, Copy, Encode, Decode, Debug, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub enum DispatchError { /// Some error occurred. @@ -544,7 +547,7 @@ impl From for DispatchError { } /// Description of what went wrong when trying to complete an operation on a token. -#[derive(Eq, PartialEq, Clone, Copy, Encode, Decode, Debug)] +#[derive(Eq, PartialEq, Clone, Copy, Encode, Decode, Debug, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub enum TokenError { /// Funds are unavailable. @@ -584,7 +587,7 @@ impl From for DispatchError { } /// Arithmetic errors. -#[derive(Eq, PartialEq, Clone, Copy, Encode, Decode, Debug)] +#[derive(Eq, PartialEq, Clone, Copy, Encode, Decode, Debug, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub enum ArithmeticError { /// Underflow. diff --git a/primitives/runtime/src/multiaddress.rs b/primitives/runtime/src/multiaddress.rs index 28031461cf323..46d80608352dc 100644 --- a/primitives/runtime/src/multiaddress.rs +++ b/primitives/runtime/src/multiaddress.rs @@ -21,7 +21,7 @@ use codec::{Decode, Encode}; use sp_std::vec::Vec; /// A multi-format address wrapper for on-chain accounts. -#[derive(Encode, Decode, PartialEq, Eq, Clone, crate::RuntimeDebug)] +#[derive(Encode, Decode, PartialEq, Eq, Clone, crate::RuntimeDebug, scale_info::TypeInfo)] #[cfg_attr(feature = "std", derive(Hash))] pub enum MultiAddress { /// It's an account ID (pubkey). diff --git a/primitives/runtime/src/runtime_string.rs b/primitives/runtime/src/runtime_string.rs index 273a22e98f33d..179e881451813 100644 --- a/primitives/runtime/src/runtime_string.rs +++ b/primitives/runtime/src/runtime_string.rs @@ -32,6 +32,14 @@ pub enum RuntimeString { Owned(Vec), } +impl scale_info::TypeInfo for RuntimeString { + type Identity = str; + + fn type_info() -> scale_info::Type { + Self::Identity::type_info() + } +} + /// Convenience macro to use the format! interface to get a `RuntimeString::Owned` #[macro_export] macro_rules! format_runtime_string { diff --git a/primitives/runtime/src/testing.rs b/primitives/runtime/src/testing.rs index 60dc54e09534f..781f342d43c1e 100644 --- a/primitives/runtime/src/testing.rs +++ b/primitives/runtime/src/testing.rs @@ -20,6 +20,7 @@ use crate::{ codec::{Codec, Decode, Encode}, generic, + scale_info::TypeInfo, traits::{ self, Applyable, BlakeTwo256, Checkable, DispatchInfoOf, Dispatchable, OpaqueKeys, PostDispatchInfoOf, SignedExtension, ValidateUnsigned, @@ -58,6 +59,7 @@ use std::{ Deserialize, PartialOrd, Ord, + TypeInfo, )] pub struct UintAuthorityId(pub u64); @@ -167,7 +169,7 @@ impl traits::IdentifyAccount for UintAuthorityId { } /// A dummy signature type, to match `UintAuthorityId`. -#[derive(Eq, PartialEq, Clone, Debug, Hash, Serialize, Deserialize, Encode, Decode)] +#[derive(Eq, PartialEq, Clone, Debug, Hash, Serialize, Deserialize, Encode, Decode, TypeInfo)] pub struct TestSignature(pub u64, pub Vec); impl traits::Verify for TestSignature { @@ -288,7 +290,7 @@ where /// with index only used if sender is some. /// /// If sender is some then the transaction is signed otherwise it is unsigned. -#[derive(PartialEq, Eq, Clone, Encode, Decode)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, TypeInfo)] pub struct TestXt { /// Signature of the extrinsic. pub signature: Option<(u64, Extra)>, diff --git a/primitives/runtime/src/traits.rs b/primitives/runtime/src/traits.rs index 312a9f6331bf1..65c063fde1696 100644 --- a/primitives/runtime/src/traits.rs +++ b/primitives/runtime/src/traits.rs @@ -20,6 +20,7 @@ use crate::{ codec::{Codec, Decode, Encode, MaxEncodedLen}, generic::{Digest, DigestItem}, + scale_info::{MetaType, StaticTypeInfo, TypeInfo}, transaction_validity::{ TransactionSource, TransactionValidity, TransactionValidityError, UnknownTransaction, ValidTransaction, @@ -210,7 +211,7 @@ pub trait Lookup { /// context. pub trait StaticLookup { /// Type to lookup from. - type Source: Codec + Clone + PartialEq + Debug; + type Source: Codec + Clone + PartialEq + Debug + TypeInfo; /// Type to lookup into. type Target; /// Attempt a lookup. @@ -222,7 +223,7 @@ pub trait StaticLookup { /// A lookup implementation returning the input value. #[derive(Default)] pub struct IdentityLookup(PhantomData); -impl StaticLookup for IdentityLookup { +impl StaticLookup for IdentityLookup { type Source = T; type Target = T; fn lookup(x: T) -> Result { @@ -247,7 +248,7 @@ impl StaticLookup for AccountIdLookup: Codec, + crate::MultiAddress: Codec + StaticTypeInfo, { type Source = crate::MultiAddress; type Target = AccountId; @@ -444,7 +445,8 @@ pub trait Hash: + Default + Encode + Decode - + MaxEncodedLen; + + MaxEncodedLen + + TypeInfo; /// Produce the hash of some byte-slice. fn hash(s: &[u8]) -> Self::Output { @@ -464,7 +466,7 @@ pub trait Hash: } /// Blake2-256 Hash implementation. -#[derive(PartialEq, Eq, Clone, RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, RuntimeDebug, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct BlakeTwo256; @@ -491,7 +493,7 @@ impl Hash for BlakeTwo256 { } /// Keccak-256 Hash implementation. -#[derive(PartialEq, Eq, Clone, RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, RuntimeDebug, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct Keccak256; @@ -629,7 +631,8 @@ pub trait Header: + Codec + AsRef<[u8]> + AsMut<[u8]> - + MaybeMallocSizeOf; + + MaybeMallocSizeOf + + TypeInfo; /// Hashing algorithm type Hashing: Hash; @@ -697,7 +700,8 @@ pub trait Block: + Codec + AsRef<[u8]> + AsMut<[u8]> - + MaybeMallocSizeOf; + + MaybeMallocSizeOf + + TypeInfo; /// Returns a reference to the header. fn header(&self) -> &Self::Header; @@ -833,7 +837,9 @@ impl Dispatchable for () { /// Means by which a transaction may be extended. This type embodies both the data and the logic /// that should be additionally associated with the transaction. It should be plain old data. -pub trait SignedExtension: Codec + Debug + Sync + Send + Clone + Eq + PartialEq { +pub trait SignedExtension: + Codec + Debug + Sync + Send + Clone + Eq + PartialEq + StaticTypeInfo +{ /// Unique identifier of this signed extension. /// /// This will be exposed in the metadata to identify the signed extension used @@ -848,7 +854,7 @@ pub trait SignedExtension: Codec + Debug + Sync + Send + Clone + Eq + PartialEq /// Any additional data that will go into the signed payload. This may be created dynamically /// from the transaction using the `additional_signed` function. - type AdditionalSigned: Encode; + type AdditionalSigned: Encode + TypeInfo; /// The type that encodes information that can be passed from pre_dispatch to post-dispatch. type Pre: Default; @@ -953,18 +959,33 @@ pub trait SignedExtension: Codec + Debug + Sync + Send + Clone + Eq + PartialEq Ok(()) } - /// Returns the list of unique identifier for this signed extension. + /// Returns the metadata for this signed extension. /// /// As a [`SignedExtension`] can be a tuple of [`SignedExtension`]s we need to return a `Vec` - /// that holds all the unique identifiers. Each individual `SignedExtension` must return - /// *exactly* one identifier. + /// that holds the metadata of each one. Each individual `SignedExtension` must return + /// *exactly* one [`SignedExtensionMetadata`]. /// - /// This method provides a default implementation that returns `vec![SELF::IDENTIFIER]`. - fn identifier() -> Vec<&'static str> { - sp_std::vec![Self::IDENTIFIER] + /// This method provides a default implementation that returns a vec containing a single + /// [`SignedExtensionMetadata`]. + fn metadata() -> Vec { + sp_std::vec![SignedExtensionMetadata { + identifier: Self::IDENTIFIER, + ty: scale_info::meta_type::(), + additional_signed: scale_info::meta_type::() + }] } } +/// Information about a [`SignedExtension`] for the runtime metadata. +pub struct SignedExtensionMetadata { + /// The unique identifier of the [`SignedExtension`]. + pub identifier: &'static str, + /// The type of the [`SignedExtension`]. + pub ty: MetaType, + /// The type of the [`SignedExtension`] additional signed data for the payload. + pub additional_signed: MetaType, +} + #[impl_for_tuples(1, 12)] impl SignedExtension for Tuple { for_tuples!( where #( Tuple: SignedExtension )* ); @@ -1029,9 +1050,9 @@ impl SignedExtension for Tuple { Ok(()) } - fn identifier() -> Vec<&'static str> { + fn metadata() -> Vec { let mut ids = Vec::new(); - for_tuples!( #( ids.extend(Tuple::identifier()); )* ); + for_tuples!( #( ids.extend(Tuple::metadata()); )* ); ids } } @@ -1305,6 +1326,7 @@ macro_rules! impl_opaque_keys_inner { Default, Clone, PartialEq, Eq, $crate::codec::Encode, $crate::codec::Decode, + $crate::scale_info::TypeInfo, $crate::RuntimeDebug, )] pub struct $name { diff --git a/primitives/session/Cargo.toml b/primitives/session/Cargo.toml index 91677b474d954..8e1e2464e49ec 100644 --- a/primitives/session/Cargo.toml +++ b/primitives/session/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-api = { version = "4.0.0-dev", default-features = false, path = "../api" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } @@ -24,6 +25,7 @@ sp-runtime = { version = "4.0.0-dev", optional = true, path = "../runtime" } default = [ "std" ] std = [ "codec/std", + "scale-info/std", "sp-api/std", "sp-core/std", "sp-std/std", diff --git a/primitives/session/src/lib.rs b/primitives/session/src/lib.rs index 22d6b0b4a5920..d85b6af4349e4 100644 --- a/primitives/session/src/lib.rs +++ b/primitives/session/src/lib.rs @@ -53,7 +53,7 @@ sp_api::decl_runtime_apis! { pub type ValidatorCount = u32; /// Proof of membership of a specific key in a given session. -#[derive(Encode, Decode, Clone, Eq, PartialEq, Default, RuntimeDebug)] +#[derive(Encode, Decode, Clone, Eq, PartialEq, Default, RuntimeDebug, scale_info::TypeInfo)] pub struct MembershipProof { /// The session index on which the specific key is a member. pub session: SessionIndex, diff --git a/primitives/staking/Cargo.toml b/primitives/staking/Cargo.toml index 85f5487da884c..9e852319ede42 100644 --- a/primitives/staking/Cargo.toml +++ b/primitives/staking/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../runtime" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } @@ -21,6 +22,7 @@ sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-runtime/std", "sp-std/std", ] diff --git a/primitives/staking/src/offence.rs b/primitives/staking/src/offence.rs index b9afda41c5e77..a91cb47c117b6 100644 --- a/primitives/staking/src/offence.rs +++ b/primitives/staking/src/offence.rs @@ -170,7 +170,7 @@ impl OnOffenceHandler } /// A details about an offending authority for a particular kind of offence. -#[derive(Clone, PartialEq, Eq, Encode, Decode, sp_runtime::RuntimeDebug)] +#[derive(Clone, PartialEq, Eq, Encode, Decode, sp_runtime::RuntimeDebug, scale_info::TypeInfo)] pub struct OffenceDetails { /// The offending authority id pub offender: Offender, diff --git a/primitives/transaction-storage-proof/Cargo.toml b/primitives/transaction-storage-proof/Cargo.toml index 0b5065be8219f..8a41105b20b74 100644 --- a/primitives/transaction-storage-proof/Cargo.toml +++ b/primitives/transaction-storage-proof/Cargo.toml @@ -19,6 +19,7 @@ sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } sp-trie = { version = "4.0.0-dev", optional = true, path = "../trie" } sp-core = { version = "4.0.0-dev", path = "../core", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } log = { version = "0.4.8", optional = true } async-trait = { version = "0.1.50", optional = true } @@ -26,6 +27,7 @@ async-trait = { version = "0.1.50", optional = true } default = [ "std" ] std = [ "codec/std", + "scale-info/std", "sp-std/std", "sp-inherents/std", "sp-runtime/std", diff --git a/primitives/transaction-storage-proof/src/lib.rs b/primitives/transaction-storage-proof/src/lib.rs index d159aa735c266..4b01a8d45d454 100644 --- a/primitives/transaction-storage-proof/src/lib.rs +++ b/primitives/transaction-storage-proof/src/lib.rs @@ -51,7 +51,7 @@ impl IsFatalError for InherentError { /// Holds a chunk of data retrieved from storage along with /// a proof that the data was stored at that location in the trie. -#[derive(Encode, Decode, Clone, PartialEq, Debug)] +#[derive(Encode, Decode, Clone, PartialEq, Debug, scale_info::TypeInfo)] pub struct TransactionStorageProof { /// Data chunk that is proved to exist. pub chunk: Vec, diff --git a/primitives/trie/Cargo.toml b/primitives/trie/Cargo.toml index 60356e0a8d6d3..5a2de4f16f9a4 100644 --- a/primitives/trie/Cargo.toml +++ b/primitives/trie/Cargo.toml @@ -19,6 +19,7 @@ harness = false [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } hash-db = { version = "0.15.2", default-features = false } trie-db = { version = "0.22.6", default-features = false } @@ -38,6 +39,7 @@ default = ["std"] std = [ "sp-std/std", "codec/std", + "scale-info/std", "hash-db/std", "memory-db/std", "trie-db/std", diff --git a/primitives/trie/src/storage_proof.rs b/primitives/trie/src/storage_proof.rs index 410ad44e75a63..cfdb8566ea75f 100644 --- a/primitives/trie/src/storage_proof.rs +++ b/primitives/trie/src/storage_proof.rs @@ -17,6 +17,7 @@ use codec::{Decode, Encode}; use hash_db::{HashDB, Hasher}; +use scale_info::TypeInfo; use sp_std::vec::Vec; /// A proof that some set of key-value pairs are included in the storage trie. The proof contains @@ -26,13 +27,13 @@ use sp_std::vec::Vec; /// The proof consists of the set of serialized nodes in the storage trie accessed when looking up /// the keys covered by the proof. Verifying the proof requires constructing the partial trie from /// the serialized nodes and performing the key lookups. -#[derive(Debug, PartialEq, Eq, Clone, Encode, Decode)] +#[derive(Debug, PartialEq, Eq, Clone, Encode, Decode, TypeInfo)] pub struct StorageProof { trie_nodes: Vec>, } /// Storage proof in compact form. -#[derive(Debug, PartialEq, Eq, Clone, Encode, Decode)] +#[derive(Debug, PartialEq, Eq, Clone, Encode, Decode, TypeInfo)] pub struct CompactProof { pub encoded_nodes: Vec>, } diff --git a/primitives/version/Cargo.toml b/primitives/version/Cargo.toml index 1cd3e7c724750..fcab1eeabcaf4 100644 --- a/primitives/version/Cargo.toml +++ b/primitives/version/Cargo.toml @@ -18,6 +18,7 @@ targets = ["x86_64-unknown-linux-gnu"] impl-serde = { version = "0.3.1", optional = true } serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../runtime" } sp-version-proc-macro = { version = "4.0.0-dev", default-features = false, path = "proc-macro" } @@ -30,6 +31,7 @@ std = [ "impl-serde", "serde", "codec/std", + "scale-info/std", "sp-std/std", "sp-runtime/std", "parity-wasm", diff --git a/primitives/version/src/lib.rs b/primitives/version/src/lib.rs index 65b22436a5ba1..58216bc494dd7 100644 --- a/primitives/version/src/lib.rs +++ b/primitives/version/src/lib.rs @@ -27,6 +27,7 @@ use std::collections::HashSet; use std::fmt; use codec::{Decode, Encode}; +use scale_info::TypeInfo; pub use sp_runtime::create_runtime_str; use sp_runtime::RuntimeString; #[doc(hidden)] @@ -123,7 +124,7 @@ macro_rules! create_apis_vec { /// In particular: bug fixes should result in an increment of `spec_version` and possibly /// `authoring_version`, absolutely not `impl_version` since they change the semantics of the /// runtime. -#[derive(Clone, PartialEq, Eq, Encode, Decode, Default, sp_runtime::RuntimeDebug)] +#[derive(Clone, PartialEq, Eq, Encode, Decode, Default, sp_runtime::RuntimeDebug, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] #[cfg_attr(feature = "std", serde(rename_all = "camelCase"))] pub struct RuntimeVersion { diff --git a/test-utils/runtime/Cargo.toml b/test-utils/runtime/Cargo.toml index cc57f12ea31a3..24f4d404c18bd 100644 --- a/test-utils/runtime/Cargo.toml +++ b/test-utils/runtime/Cargo.toml @@ -18,6 +18,7 @@ sp-consensus-aura = { version = "0.10.0-dev", default-features = false, path = " sp-consensus-babe = { version = "0.10.0-dev", default-features = false, path = "../../primitives/consensus/babe" } sp-block-builder = { version = "4.0.0-dev", default-features = false, path = "../../primitives/block-builder" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-inherents = { version = "4.0.0-dev", default-features = false, path = "../../primitives/inherents" } sp-keyring = { version = "4.0.0-dev", optional = true, path = "../../primitives/keyring" } memory-db = { version = "0.27.0", default-features = false } @@ -69,6 +70,7 @@ std = [ "sp-consensus-babe/std", "sp-block-builder/std", "codec/std", + "scale-info/std", "sp-inherents/std", "sp-keyring", "log/std", diff --git a/test-utils/runtime/src/lib.rs b/test-utils/runtime/src/lib.rs index bdb8724120813..0d880d508ef38 100644 --- a/test-utils/runtime/src/lib.rs +++ b/test-utils/runtime/src/lib.rs @@ -24,6 +24,7 @@ pub mod genesismap; pub mod system; use codec::{Decode, Encode, Error, Input}; +use scale_info::TypeInfo; use sp_std::{marker::PhantomData, prelude::*}; use sp_application_crypto::{ecdsa, ed25519, sr25519, RuntimeAppPublic}; @@ -415,7 +416,7 @@ cfg_if! { } } -#[derive(Clone, Eq, PartialEq)] +#[derive(Clone, Eq, PartialEq, TypeInfo)] pub struct Runtime; impl GetNodeBlockType for Runtime { @@ -483,7 +484,7 @@ impl frame_support::traits::OriginTrait for Origin { } } -#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug)] +#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, TypeInfo)] pub struct Event; impl From> for Event { diff --git a/utils/frame/rpc/support/Cargo.toml b/utils/frame/rpc/support/Cargo.toml index aa9f1bbef8024..a94f18d0e8925 100644 --- a/utils/frame/rpc/support/Cargo.toml +++ b/utils/frame/rpc/support/Cargo.toml @@ -25,4 +25,5 @@ sc-rpc-api = { version = "0.10.0-dev", path = "../../../../client/rpc-api" } [dev-dependencies] frame-system = { version = "4.0.0-dev", path = "../../../../frame/system" } +scale-info = "1.0" tokio = "1.10" From 435f56edc14a3a7e895ff5370f6e5179dc547cc4 Mon Sep 17 00:00:00 2001 From: Guillaume Thiolliere Date: Thu, 16 Sep 2021 15:20:29 +0200 Subject: [PATCH 05/33] Implement a `CountedStorageMap` (#9125) * initial impl * expose in pallet_prelude * temp test * Apply suggestions from code review Co-authored-by: Peter Goodspeed-Niklaus Co-authored-by: Xiliang Chen * implement with macro help. * test for macro generation * add iterable functions, some test and fixes * fix merge * doc * Update frame/support/src/storage/types/counted_map.rs Co-authored-by: Zeke Mostov <32168567+emostov@users.noreply.github.com> * fix merge * fmt * fix spelling * improve on removal * fix partial storage info * fmt * add license * suggested renames * fix typo * fix test * fmt * fix ui tests * clearer doc * better doc * add metadata test Co-authored-by: Shawn Tabrizi Co-authored-by: Peter Goodspeed-Niklaus Co-authored-by: Xiliang Chen Co-authored-by: Zeke Mostov <32168567+emostov@users.noreply.github.com> --- frame/example/src/lib.rs | 3 + frame/example/src/tests.rs | 9 + .../procedural/src/pallet/expand/storage.rs | 172 ++- .../procedural/src/pallet/parse/storage.rs | 45 + frame/support/src/lib.rs | 21 +- .../src/storage/generator/double_map.rs | 3 + frame/support/src/storage/generator/map.rs | 1 + frame/support/src/storage/generator/nmap.rs | 3 + frame/support/src/storage/migration.rs | 7 +- frame/support/src/storage/mod.rs | 40 +- .../support/src/storage/types/counted_map.rs | 1040 +++++++++++++++++ frame/support/src/storage/types/double_map.rs | 93 +- frame/support/src/storage/types/map.rs | 83 +- frame/support/src/storage/types/mod.rs | 17 +- frame/support/src/storage/types/nmap.rs | 156 ++- frame/support/src/storage/types/value.rs | 52 +- frame/support/test/tests/pallet.rs | 98 +- .../pallet_ui/duplicate_storage_prefix.rs | 14 +- .../pallet_ui/duplicate_storage_prefix.stderr | 38 +- ...age_ensure_span_are_ok_on_wrong_gen.stderr | 16 +- ...re_span_are_ok_on_wrong_gen_unnamed.stderr | 16 +- .../storage_info_unsatisfied_nmap.stderr | 4 +- 22 files changed, 1724 insertions(+), 207 deletions(-) create mode 100644 frame/support/src/storage/types/counted_map.rs diff --git a/frame/example/src/lib.rs b/frame/example/src/lib.rs index 3f56b57dac8ca..23c4951c1a603 100644 --- a/frame/example/src/lib.rs +++ b/frame/example/src/lib.rs @@ -605,6 +605,9 @@ pub mod pallet { #[pallet::getter(fn foo)] pub(super) type Foo = StorageValue<_, T::Balance, ValueQuery>; + #[pallet::storage] + pub type CountedMap = CountedStorageMap<_, Blake2_128Concat, u8, u16>; + // The genesis config type. #[pallet::genesis_config] pub struct GenesisConfig { diff --git a/frame/example/src/tests.rs b/frame/example/src/tests.rs index 87c2404f5b100..4c2274572db81 100644 --- a/frame/example/src/tests.rs +++ b/frame/example/src/tests.rs @@ -180,6 +180,15 @@ fn signed_ext_watch_dummy_works() { }) } +#[test] +fn counted_map_works() { + new_test_ext().execute_with(|| { + assert_eq!(CountedMap::::count(), 0); + CountedMap::::insert(3, 3); + assert_eq!(CountedMap::::count(), 1); + }) +} + #[test] fn weights_work() { // must have a defined weight. diff --git a/frame/support/procedural/src/pallet/expand/storage.rs b/frame/support/procedural/src/pallet/expand/storage.rs index 0f7133f10dd47..a4f030722f1c1 100644 --- a/frame/support/procedural/src/pallet/expand/storage.rs +++ b/frame/support/procedural/src/pallet/expand/storage.rs @@ -19,27 +19,68 @@ use crate::pallet::{ parse::storage::{Metadata, QueryKind, StorageDef, StorageGenerics}, Def, }; -use std::collections::HashSet; +use std::collections::HashMap; -/// Generate the prefix_ident related the the storage. +/// Generate the prefix_ident related to the storage. /// prefix_ident is used for the prefix struct to be given to storage as first generic param. fn prefix_ident(storage: &StorageDef) -> syn::Ident { let storage_ident = &storage.ident; syn::Ident::new(&format!("_GeneratedPrefixForStorage{}", storage_ident), storage_ident.span()) } +/// Generate the counter_prefix_ident related to the storage. +/// counter_prefix_ident is used for the prefix struct to be given to counted storage map. +fn counter_prefix_ident(storage_ident: &syn::Ident) -> syn::Ident { + syn::Ident::new( + &format!("_GeneratedCounterPrefixForStorage{}", storage_ident), + storage_ident.span(), + ) +} + +/// Generate the counter_prefix related to the storage. +/// counter_prefix is used by counted storage map. +fn counter_prefix(prefix: &str) -> String { + format!("CounterFor{}", prefix) +} + /// Check for duplicated storage prefixes. This step is necessary since users can specify an /// alternative storage prefix using the #[pallet::storage_prefix] syntax, and we need to ensure /// that the prefix specified by the user is not a duplicate of an existing one. -fn check_prefix_duplicates(storage_def: &StorageDef, set: &mut HashSet) -> syn::Result<()> { +fn check_prefix_duplicates( + storage_def: &StorageDef, + // A hashmap of all already used prefix and their associated error if duplication + used_prefixes: &mut HashMap, +) -> syn::Result<()> { let prefix = storage_def.prefix(); + let dup_err = syn::Error::new( + storage_def.prefix_span(), + format!("Duplicate storage prefixes found for `{}`", prefix), + ); + + if let Some(other_dup_err) = used_prefixes.insert(prefix.clone(), dup_err.clone()) { + let mut err = dup_err; + err.combine(other_dup_err); + return Err(err) + } - if !set.insert(prefix.clone()) { - let err = syn::Error::new( + if let Metadata::CountedMap { .. } = storage_def.metadata { + let counter_prefix = counter_prefix(&prefix); + let counter_dup_err = syn::Error::new( storage_def.prefix_span(), - format!("Duplicate storage prefixes found for `{}`", prefix), + format!( + "Duplicate storage prefixes found for `{}`, used for counter associated to \ + counted storage map", + counter_prefix, + ), ); - return Err(err) + + if let Some(other_dup_err) = + used_prefixes.insert(counter_prefix.clone(), counter_dup_err.clone()) + { + let mut err = counter_dup_err; + err.combine(other_dup_err); + return Err(err) + } } Ok(()) @@ -51,11 +92,8 @@ fn check_prefix_duplicates(storage_def: &StorageDef, set: &mut HashSet) /// * Add `#[allow(type_alias_bounds)]` pub fn process_generics(def: &mut Def) -> syn::Result<()> { let frame_support = &def.frame_support; - let mut prefix_set = HashSet::new(); for storage_def in def.storages.iter_mut() { - check_prefix_duplicates(storage_def, &mut prefix_set)?; - let item = &mut def.item.content.as_mut().expect("Checked by def").1[storage_def.index]; let typ_item = match item { @@ -109,6 +147,24 @@ pub fn process_generics(def: &mut Def) -> syn::Result<()> { let max_values = max_values.unwrap_or_else(|| default_max_values.clone()); args.args.push(syn::GenericArgument::Type(max_values)); }, + StorageGenerics::CountedMap { + hasher, + key, + value, + query_kind, + on_empty, + max_values, + } => { + args.args.push(syn::GenericArgument::Type(hasher)); + args.args.push(syn::GenericArgument::Type(key)); + args.args.push(syn::GenericArgument::Type(value)); + let query_kind = query_kind.unwrap_or_else(|| default_query_kind.clone()); + args.args.push(syn::GenericArgument::Type(query_kind)); + let on_empty = on_empty.unwrap_or_else(|| default_on_empty.clone()); + args.args.push(syn::GenericArgument::Type(on_empty)); + let max_values = max_values.unwrap_or_else(|| default_max_values.clone()); + args.args.push(syn::GenericArgument::Type(max_values)); + }, StorageGenerics::DoubleMap { hasher1, key1, @@ -162,11 +218,22 @@ pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { return e.into_compile_error().into() } + // Check for duplicate prefixes + let mut prefix_set = HashMap::new(); + let mut errors = def + .storages + .iter() + .filter_map(|storage_def| check_prefix_duplicates(storage_def, &mut prefix_set).err()); + if let Some(mut final_error) = errors.next() { + errors.for_each(|error| final_error.combine(error)); + return final_error.into_compile_error() + } + let frame_support = &def.frame_support; let frame_system = &def.frame_system; let pallet_ident = &def.pallet_struct.pallet; - let entries = def.storages.iter().map(|storage| { + let entries_builder = def.storages.iter().map(|storage| { let docs = &storage.docs; let ident = &storage.ident; @@ -176,14 +243,14 @@ pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { let cfg_attrs = &storage.cfg_attrs; quote::quote_spanned!(storage.attr_span => - #(#cfg_attrs)* #frame_support::metadata::StorageEntryMetadata { - name: <#full_ident as #frame_support::storage::StorageEntryMetadata>::NAME, - modifier: <#full_ident as #frame_support::storage::StorageEntryMetadata>::MODIFIER, - ty: <#full_ident as #frame_support::storage::StorageEntryMetadata>::ty(), - default: <#full_ident as #frame_support::storage::StorageEntryMetadata>::default(), - docs: #frame_support::sp_std::vec![ - #( #docs, )* - ], + #(#cfg_attrs)* + { + <#full_ident as #frame_support::storage::StorageEntryMetadataBuilder>::build_metadata( + #frame_support::sp_std::vec![ + #( #docs, )* + ], + &mut entries, + ); } ) }); @@ -246,6 +313,27 @@ pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { } ) }, + Metadata::CountedMap { key, value } => { + let query = match storage.query_kind.as_ref().expect("Checked by def") { + QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => + Option<#value> + ), + QueryKind::ValueQuery => quote::quote!(#value), + }; + quote::quote_spanned!(storage.attr_span => + #(#cfg_attrs)* + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause { + #( #docs )* + pub fn #getter(k: KArg) -> #query where + KArg: #frame_support::codec::EncodeLike<#key>, + { + // NOTE: we can't use any trait here because CountedStorageMap + // doesn't implement any. + <#full_ident>::get(k) + } + } + ) + }, Metadata::DoubleMap { key1, key2, value } => { let query = match storage.query_kind.as_ref().expect("Checked by def") { QueryKind::OptionQuery => quote::quote_spanned!(storage.attr_span => @@ -311,7 +399,44 @@ pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { let cfg_attrs = &storage_def.cfg_attrs; + let maybe_counter = if let Metadata::CountedMap { .. } = storage_def.metadata { + let counter_prefix_struct_ident = counter_prefix_ident(&storage_def.ident); + let counter_prefix_struct_const = counter_prefix(&prefix_struct_const); + + quote::quote_spanned!(storage_def.attr_span => + #(#cfg_attrs)* + #prefix_struct_vis struct #counter_prefix_struct_ident<#type_use_gen>( + core::marker::PhantomData<(#type_use_gen,)> + ); + #(#cfg_attrs)* + impl<#type_impl_gen> #frame_support::traits::StorageInstance + for #counter_prefix_struct_ident<#type_use_gen> + #config_where_clause + { + fn pallet_prefix() -> &'static str { + < + ::PalletInfo + as #frame_support::traits::PalletInfo + >::name::>() + .expect("Every active pallet has a name in the runtime; qed") + } + const STORAGE_PREFIX: &'static str = #counter_prefix_struct_const; + } + #(#cfg_attrs)* + impl<#type_impl_gen> #frame_support::storage::types::CountedStorageMapInstance + for #prefix_struct_ident<#type_use_gen> + #config_where_clause + { + type CounterPrefix = #counter_prefix_struct_ident<#type_use_gen>; + } + ) + } else { + proc_macro2::TokenStream::default() + }; + quote::quote_spanned!(storage_def.attr_span => + #maybe_counter + #(#cfg_attrs)* #prefix_struct_vis struct #prefix_struct_ident<#type_use_gen>( core::marker::PhantomData<(#type_use_gen,)> @@ -351,9 +476,12 @@ pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { #frame_support::traits::PalletInfo >::name::<#pallet_ident<#type_use_gen>>() .expect("Every active pallet has a name in the runtime; qed"), - entries: #frame_support::sp_std::vec![ - #( #entries, )* - ], + entries: { + #[allow(unused_mut)] + let mut entries = #frame_support::sp_std::vec![]; + #( #entries_builder )* + entries + }, } } } diff --git a/frame/support/procedural/src/pallet/parse/storage.rs b/frame/support/procedural/src/pallet/parse/storage.rs index e58b5d2048863..8075daacb6f44 100644 --- a/frame/support/procedural/src/pallet/parse/storage.rs +++ b/frame/support/procedural/src/pallet/parse/storage.rs @@ -86,6 +86,7 @@ impl syn::parse::Parse for PalletStorageAttr { pub enum Metadata { Value { value: syn::Type }, Map { value: syn::Type, key: syn::Type }, + CountedMap { value: syn::Type, key: syn::Type }, DoubleMap { value: syn::Type, key1: syn::Type, key2: syn::Type }, NMap { keys: Vec, keygen: syn::Type, value: syn::Type }, } @@ -153,6 +154,14 @@ pub enum StorageGenerics { on_empty: Option, max_values: Option, }, + CountedMap { + hasher: syn::Type, + key: syn::Type, + value: syn::Type, + query_kind: Option, + on_empty: Option, + max_values: Option, + }, Value { value: syn::Type, query_kind: Option, @@ -173,6 +182,7 @@ impl StorageGenerics { let res = match self.clone() { Self::DoubleMap { value, key1, key2, .. } => Metadata::DoubleMap { value, key1, key2 }, Self::Map { value, key, .. } => Metadata::Map { value, key }, + Self::CountedMap { value, key, .. } => Metadata::CountedMap { value, key }, Self::Value { value, .. } => Metadata::Value { value }, Self::NMap { keygen, value, .. } => Metadata::NMap { keys: collect_keys(&keygen)?, keygen, value }, @@ -186,6 +196,7 @@ impl StorageGenerics { match &self { Self::DoubleMap { query_kind, .. } | Self::Map { query_kind, .. } | + Self::CountedMap { query_kind, .. } | Self::Value { query_kind, .. } | Self::NMap { query_kind, .. } => query_kind.clone(), } @@ -195,6 +206,7 @@ impl StorageGenerics { enum StorageKind { Value, Map, + CountedMap, DoubleMap, NMap, } @@ -324,6 +336,33 @@ fn process_named_generics( max_values: parsed.remove("MaxValues").map(|binding| binding.ty), } }, + StorageKind::CountedMap => { + check_generics( + &parsed, + &["Hasher", "Key", "Value"], + &["QueryKind", "OnEmpty", "MaxValues"], + "CountedStorageMap", + args_span, + )?; + + StorageGenerics::CountedMap { + hasher: parsed + .remove("Hasher") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + key: parsed + .remove("Key") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + value: parsed + .remove("Value") + .map(|binding| binding.ty) + .expect("checked above as mandatory generic"), + query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), + on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), + max_values: parsed.remove("MaxValues").map(|binding| binding.ty), + } + }, StorageKind::DoubleMap => { check_generics( &parsed, @@ -425,6 +464,11 @@ fn process_unnamed_generics( Metadata::Map { key: retrieve_arg(2)?, value: retrieve_arg(3)? }, retrieve_arg(4).ok(), ), + StorageKind::CountedMap => ( + None, + Metadata::CountedMap { key: retrieve_arg(2)?, value: retrieve_arg(3)? }, + retrieve_arg(4).ok(), + ), StorageKind::DoubleMap => ( None, Metadata::DoubleMap { @@ -451,6 +495,7 @@ fn process_generics( let storage_kind = match &*segment.ident.to_string() { "StorageValue" => StorageKind::Value, "StorageMap" => StorageKind::Map, + "CountedStorageMap" => StorageKind::CountedMap, "StorageDoubleMap" => StorageKind::DoubleMap, "StorageNMap" => StorageKind::NMap, found => { diff --git a/frame/support/src/lib.rs b/frame/support/src/lib.rs index cce03f1e8ce6c..459698707366d 100644 --- a/frame/support/src/lib.rs +++ b/frame/support/src/lib.rs @@ -1293,8 +1293,8 @@ pub mod pallet_prelude { storage::{ bounded_vec::BoundedVec, types::{ - Key as NMapKey, OptionQuery, StorageDoubleMap, StorageMap, StorageNMap, - StorageValue, ValueQuery, + CountedStorageMap, Key as NMapKey, OptionQuery, StorageDoubleMap, StorageMap, + StorageNMap, StorageValue, ValueQuery, }, }, traits::{ @@ -1673,6 +1673,8 @@ pub mod pallet_prelude { /// * [`pallet_prelude::StorageValue`] expect `Value` and optionally `QueryKind` and `OnEmpty`, /// * [`pallet_prelude::StorageMap`] expect `Hasher`, `Key`, `Value` and optionally `QueryKind` /// and `OnEmpty`, +/// * [`pallet_prelude::CountedStorageMap`] expect `Hasher`, `Key`, `Value` and optionally +/// `QueryKind` and `OnEmpty`, /// * [`pallet_prelude::StorageDoubleMap`] expect `Hasher1`, `Key1`, `Hasher2`, `Key2`, `Value` /// and optionally `QueryKind` and `OnEmpty`. /// @@ -1684,13 +1686,16 @@ pub mod pallet_prelude { /// E.g. if runtime names the pallet "MyExample" then the storage `type Foo = ...` use the /// prefix: `Twox128(b"MyExample") ++ Twox128(b"Foo")`. /// -/// The optional attribute `#[pallet::storage_prefix = "$custom_name"]` allows to define a -/// specific name to use for the prefix. +/// For the `CountedStorageMap` variant, the Prefix also implements +/// `CountedStorageMapInstance`. It associate a `CounterPrefix`, which is implemented same as +/// above, but the storage prefix is prepend with `"CounterFor"`. +/// E.g. if runtime names the pallet "MyExample" then the storage +/// `type Foo = CountedStorageaMap<...>` will store its counter at the prefix: +/// `Twox128(b"MyExample") ++ Twox128(b"CounterForFoo")`. /// /// E.g: /// ```ignore /// #[pallet::storage] -/// #[pallet::storage_prefix = "OtherName"] /// pub(super) type MyStorage = StorageMap; /// ``` /// In this case the final prefix used by the map is @@ -1699,9 +1704,13 @@ pub mod pallet_prelude { /// The optional attribute `#[pallet::getter(fn $my_getter_fn_name)]` allows to define a /// getter function on `Pallet`. /// +/// The optional attribute `#[pallet::storage_prefix = "SomeName"]` allow to define the storage +/// prefix to use, see how `Prefix` generic is implemented above. +/// /// E.g: /// ```ignore /// #[pallet::storage] +/// #[pallet::storage_prefix = "foo"] /// #[pallet::getter(fn my_storage)] /// pub(super) type MyStorage = StorageMap; /// ``` @@ -1738,6 +1747,8 @@ pub mod pallet_prelude { /// `_GeneratedPrefixForStorage$NameOfStorage`, and implements /// [`StorageInstance`](traits::StorageInstance) on it using the pallet and storage name. It /// then uses it as the first generic of the aliased type. +/// For `CountedStorageMap`, `CountedStorageMapInstance` is implemented, and another similar +/// struct is generated. /// /// For named generic, the macro will reorder the generics, and remove the names. /// diff --git a/frame/support/src/storage/generator/double_map.rs b/frame/support/src/storage/generator/double_map.rs index d28e42028de53..636a10feb1ab3 100644 --- a/frame/support/src/storage/generator/double_map.rs +++ b/frame/support/src/storage/generator/double_map.rs @@ -219,6 +219,7 @@ where previous_key: prefix, drain: false, closure: |_raw_key, mut raw_value| V::decode(&mut raw_value), + phantom: Default::default(), } } @@ -345,6 +346,7 @@ where let mut key_material = G::Hasher2::reverse(raw_key_without_prefix); Ok((K2::decode(&mut key_material)?, V::decode(&mut raw_value)?)) }, + phantom: Default::default(), } } @@ -398,6 +400,7 @@ where let k2 = K2::decode(&mut k2_material)?; Ok((k1, k2, V::decode(&mut raw_value)?)) }, + phantom: Default::default(), } } diff --git a/frame/support/src/storage/generator/map.rs b/frame/support/src/storage/generator/map.rs index 3fd3b9a0ea7b8..1a4225173c4ae 100644 --- a/frame/support/src/storage/generator/map.rs +++ b/frame/support/src/storage/generator/map.rs @@ -138,6 +138,7 @@ where let mut key_material = G::Hasher::reverse(raw_key_without_prefix); Ok((K::decode(&mut key_material)?, V::decode(&mut raw_value)?)) }, + phantom: Default::default(), } } diff --git a/frame/support/src/storage/generator/nmap.rs b/frame/support/src/storage/generator/nmap.rs index 592bcc81341bf..4845673d3d8c2 100755 --- a/frame/support/src/storage/generator/nmap.rs +++ b/frame/support/src/storage/generator/nmap.rs @@ -196,6 +196,7 @@ where previous_key: prefix, drain: false, closure: |_raw_key, mut raw_value| V::decode(&mut raw_value), + phantom: Default::default(), } } @@ -305,6 +306,7 @@ impl> let partial_key = K::decode_partial_key(raw_key_without_prefix)?; Ok((partial_key, V::decode(&mut raw_value)?)) }, + phantom: Default::default(), } } @@ -368,6 +370,7 @@ impl> let (final_key, _) = K::decode_final_key(raw_key_without_prefix)?; Ok((final_key, V::decode(&mut raw_value)?)) }, + phantom: Default::default(), } } diff --git a/frame/support/src/storage/migration.rs b/frame/support/src/storage/migration.rs index eae45b1e96ad0..59422a282aab5 100644 --- a/frame/support/src/storage/migration.rs +++ b/frame/support/src/storage/migration.rs @@ -186,7 +186,7 @@ pub fn storage_iter_with_suffix( Ok((raw_key_without_prefix.to_vec(), value)) }; - PrefixIterator { prefix, previous_key, drain: false, closure } + PrefixIterator { prefix, previous_key, drain: false, closure, phantom: Default::default() } } /// Construct iterator to iterate over map items in `module` for the map called `item`. @@ -219,7 +219,7 @@ pub fn storage_key_iter_with_suffix< let value = T::decode(&mut &raw_value[..])?; Ok((key, value)) }; - PrefixIterator { prefix, previous_key, drain: false, closure } + PrefixIterator { prefix, previous_key, drain: false, closure, phantom: Default::default() } } /// Get a particular value in storage by the `module`, the map's `item` name and the key `hash`. @@ -344,11 +344,12 @@ pub fn move_prefix(from_prefix: &[u8], to_prefix: &[u8]) { return } - let iter = PrefixIterator { + let iter = PrefixIterator::<_> { prefix: from_prefix.to_vec(), previous_key: from_prefix.to_vec(), drain: true, closure: |key, value| Ok((key.to_vec(), value.to_vec())), + phantom: Default::default(), }; for (key, value) in iter { diff --git a/frame/support/src/storage/mod.rs b/frame/support/src/storage/mod.rs index e57a876bf9831..35552e08fef1e 100644 --- a/frame/support/src/storage/mod.rs +++ b/frame/support/src/storage/mod.rs @@ -17,7 +17,7 @@ //! Stuff to do with the runtime's storage. -pub use self::types::StorageEntryMetadata; +pub use self::types::StorageEntryMetadataBuilder; use crate::{ hash::{ReversibleStorageHasher, StorageHasher}, storage::types::{ @@ -786,10 +786,12 @@ pub trait StorageNMap { KArg: EncodeLikeTuple + TupleToEncodedIter; } -/// Iterate over a prefix and decode raw_key and raw_value into `T`. +/// Iterate or drain over a prefix and decode raw_key and raw_value into `T`. /// /// If any decoding fails it skips it and continues to the next key. -pub struct PrefixIterator { +/// +/// If draining, then the hook `OnRemoval::on_removal` is called after each removal. +pub struct PrefixIterator { prefix: Vec, previous_key: Vec, /// If true then value are removed while iterating @@ -797,9 +799,21 @@ pub struct PrefixIterator { /// Function that take `(raw_key_without_prefix, raw_value)` and decode `T`. /// `raw_key_without_prefix` is the raw storage key without the prefix iterated on. closure: fn(&[u8], &[u8]) -> Result, + phantom: core::marker::PhantomData, +} + +/// Trait for specialising on removal logic of [`PrefixIterator`]. +pub trait PrefixIteratorOnRemoval { + /// This function is called whenever a key/value is removed. + fn on_removal(key: &[u8], value: &[u8]); +} + +/// No-op implementation. +impl PrefixIteratorOnRemoval for () { + fn on_removal(_key: &[u8], _value: &[u8]) {} } -impl PrefixIterator { +impl PrefixIterator { /// Creates a new `PrefixIterator`, iterating after `previous_key` and filtering out keys that /// are not prefixed with `prefix`. /// @@ -813,7 +827,13 @@ impl PrefixIterator { previous_key: Vec, decode_fn: fn(&[u8], &[u8]) -> Result, ) -> Self { - PrefixIterator { prefix, previous_key, drain: false, closure: decode_fn } + PrefixIterator { + prefix, + previous_key, + drain: false, + closure: decode_fn, + phantom: Default::default(), + } } /// Get the last key that has been iterated upon and return it. @@ -838,7 +858,7 @@ impl PrefixIterator { } } -impl Iterator for PrefixIterator { +impl Iterator for PrefixIterator { type Item = T; fn next(&mut self) -> Option { @@ -859,7 +879,8 @@ impl Iterator for PrefixIterator { }, }; if self.drain { - unhashed::kill(&self.previous_key) + unhashed::kill(&self.previous_key); + OnRemoval::on_removal(&self.previous_key, &raw_value); } let raw_key_without_prefix = &self.previous_key[self.prefix.len()..]; let item = match (self.closure)(raw_key_without_prefix, &raw_value[..]) { @@ -1119,7 +1140,7 @@ pub trait StoragePrefixedMap { /// Iter over all value of the storage. /// - /// NOTE: If a value failed to decode becaues storage is corrupted then it is skipped. + /// NOTE: If a value failed to decode because storage is corrupted then it is skipped. fn iter_values() -> PrefixIterator { let prefix = Self::final_prefix(); PrefixIterator { @@ -1127,6 +1148,7 @@ pub trait StoragePrefixedMap { previous_key: prefix.to_vec(), drain: false, closure: |_raw_key, mut raw_value| Value::decode(&mut raw_value), + phantom: Default::default(), } } @@ -1613,7 +1635,7 @@ mod test { assert_eq!(final_vec, vec![1, 2, 3, 4, 5]); - let mut iter = PrefixIterator::new( + let mut iter = PrefixIterator::<_>::new( iter.prefix().to_vec(), stored_key, |mut raw_key_without_prefix, mut raw_value| { diff --git a/frame/support/src/storage/types/counted_map.rs b/frame/support/src/storage/types/counted_map.rs new file mode 100644 index 0000000000000..0860a4ed541c6 --- /dev/null +++ b/frame/support/src/storage/types/counted_map.rs @@ -0,0 +1,1040 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Storage counted map type. + +use crate::{ + metadata::StorageEntryMetadata, + storage::{ + generator::StorageMap as _, + types::{ + OptionQuery, QueryKindTrait, StorageEntryMetadataBuilder, StorageMap, StorageValue, + ValueQuery, + }, + StorageAppend, StorageDecodeLength, StorageTryAppend, + }, + traits::{Get, GetDefault, StorageInfo, StorageInfoTrait, StorageInstance}, + Never, +}; +use codec::{Decode, Encode, EncodeLike, FullCodec, MaxEncodedLen, Ref}; +use sp_runtime::traits::Saturating; +use sp_std::prelude::*; + +/// A wrapper around a `StorageMap` and a `StorageValue` to keep track of how many items +/// are in a map, without needing to iterate all the values. +/// +/// This storage item has additional storage read and write overhead when manipulating values +/// compared to a regular storage map. +/// +/// For functions where we only add or remove a value, a single storage read is needed to check if +/// that value already exists. For mutate functions, two storage reads are used to check if the +/// value existed before and after the mutation. +/// +/// Whenever the counter needs to be updated, an additional read and write occurs to update that +/// counter. +pub struct CountedStorageMap< + Prefix, + Hasher, + Key, + Value, + QueryKind = OptionQuery, + OnEmpty = GetDefault, + MaxValues = GetDefault, +>(core::marker::PhantomData<(Prefix, Hasher, Key, Value, QueryKind, OnEmpty, MaxValues)>); + +/// The requirement for an instance of [`CountedStorageMap`]. +pub trait CountedStorageMapInstance: StorageInstance { + /// The prefix to use for the counter storage value. + type CounterPrefix: StorageInstance; +} + +// Private helper trait to access map from counted storage map. +trait MapWrapper { + type Map; +} + +impl MapWrapper + for CountedStorageMap +{ + type Map = StorageMap; +} + +type CounterFor

= StorageValue<

::CounterPrefix, u32, ValueQuery>; + +/// On removal logic for updating counter while draining upon some prefix with +/// [`crate::storage::PrefixIterator`]. +pub struct OnRemovalCounterUpdate(core::marker::PhantomData); + +impl crate::storage::PrefixIteratorOnRemoval + for OnRemovalCounterUpdate +{ + fn on_removal(_key: &[u8], _value: &[u8]) { + CounterFor::::mutate(|value| value.saturating_dec()); + } +} + +impl + CountedStorageMap +where + Prefix: CountedStorageMapInstance, + Hasher: crate::hash::StorageHasher, + Key: FullCodec, + Value: FullCodec, + QueryKind: QueryKindTrait, + OnEmpty: Get + 'static, + MaxValues: Get>, +{ + /// Get the storage key used to fetch a value corresponding to a specific key. + pub fn hashed_key_for>(key: KeyArg) -> Vec { + ::Map::hashed_key_for(key) + } + + /// Does the value (explicitly) exist in storage? + pub fn contains_key>(key: KeyArg) -> bool { + ::Map::contains_key(key) + } + + /// Load the value associated with the given key from the map. + pub fn get>(key: KeyArg) -> QueryKind::Query { + ::Map::get(key) + } + + /// Try to get the value for the given key from the map. + /// + /// Returns `Ok` if it exists, `Err` if not. + pub fn try_get>(key: KeyArg) -> Result { + ::Map::try_get(key) + } + + /// Swap the values of two keys. + pub fn swap, KeyArg2: EncodeLike>(key1: KeyArg1, key2: KeyArg2) { + ::Map::swap(key1, key2) + } + + /// Store a value to be associated with the given key from the map. + pub fn insert + Clone, ValArg: EncodeLike>( + key: KeyArg, + val: ValArg, + ) { + if !::Map::contains_key(Ref::from(&key)) { + CounterFor::::mutate(|value| value.saturating_inc()); + } + ::Map::insert(key, val) + } + + /// Remove the value under a key. + pub fn remove + Clone>(key: KeyArg) { + if ::Map::contains_key(Ref::from(&key)) { + CounterFor::::mutate(|value| value.saturating_dec()); + } + ::Map::remove(key) + } + + /// Mutate the value under a key. + pub fn mutate + Clone, R, F: FnOnce(&mut QueryKind::Query) -> R>( + key: KeyArg, + f: F, + ) -> R { + Self::try_mutate(key, |v| Ok::(f(v))) + .expect("`Never` can not be constructed; qed") + } + + /// Mutate the item, only if an `Ok` value is returned. + pub fn try_mutate(key: KeyArg, f: F) -> Result + where + KeyArg: EncodeLike + Clone, + F: FnOnce(&mut QueryKind::Query) -> Result, + { + Self::try_mutate_exists(key, |option_value_ref| { + let option_value = core::mem::replace(option_value_ref, None); + let mut query = ::Map::from_optional_value_to_query(option_value); + let res = f(&mut query); + let option_value = ::Map::from_query_to_optional_value(query); + let _ = core::mem::replace(option_value_ref, option_value); + res + }) + } + + /// Mutate the value under a key. Deletes the item if mutated to a `None`. + pub fn mutate_exists + Clone, R, F: FnOnce(&mut Option) -> R>( + key: KeyArg, + f: F, + ) -> R { + Self::try_mutate_exists(key, |v| Ok::(f(v))) + .expect("`Never` can not be constructed; qed") + } + + /// Mutate the item, only if an `Ok` value is returned. Deletes the item if mutated to a `None`. + pub fn try_mutate_exists(key: KeyArg, f: F) -> Result + where + KeyArg: EncodeLike + Clone, + F: FnOnce(&mut Option) -> Result, + { + ::Map::try_mutate_exists(key, |option_value| { + let existed = option_value.is_some(); + let res = f(option_value); + let exist = option_value.is_some(); + + if res.is_ok() { + if existed && !exist { + // Value was deleted + CounterFor::::mutate(|value| value.saturating_dec()); + } else if !existed && exist { + // Value was added + CounterFor::::mutate(|value| value.saturating_inc()); + } + } + res + }) + } + + /// Take the value under a key. + pub fn take + Clone>(key: KeyArg) -> QueryKind::Query { + let removed_value = + ::Map::mutate_exists(key, |value| core::mem::replace(value, None)); + if removed_value.is_some() { + CounterFor::::mutate(|value| value.saturating_dec()); + } + ::Map::from_optional_value_to_query(removed_value) + } + + /// Append the given items to the value in the storage. + /// + /// `Value` is required to implement `codec::EncodeAppend`. + /// + /// # Warning + /// + /// If the storage item is not encoded properly, the storage will be overwritten and set to + /// `[item]`. Any default value set for the storage item will be ignored on overwrite. + pub fn append(key: EncodeLikeKey, item: EncodeLikeItem) + where + EncodeLikeKey: EncodeLike + Clone, + Item: Encode, + EncodeLikeItem: EncodeLike, + Value: StorageAppend, + { + if !::Map::contains_key(Ref::from(&key)) { + CounterFor::::mutate(|value| value.saturating_inc()); + } + ::Map::append(key, item) + } + + /// Read the length of the storage value without decoding the entire value under the given + /// `key`. + /// + /// `Value` is required to implement [`StorageDecodeLength`]. + /// + /// If the value does not exists or it fails to decode the length, `None` is returned. Otherwise + /// `Some(len)` is returned. + /// + /// # Warning + /// + /// `None` does not mean that `get()` does not return a value. The default value is completly + /// ignored by this function. + pub fn decode_len>(key: KeyArg) -> Option + where + Value: StorageDecodeLength, + { + ::Map::decode_len(key) + } + + /// Migrate an item with the given `key` from a defunct `OldHasher` to the current hasher. + /// + /// If the key doesn't exist, then it's a no-op. If it does, then it returns its value. + pub fn migrate_key>( + key: KeyArg, + ) -> Option { + ::Map::migrate_key::(key) + } + + /// Remove all value of the storage. + pub fn remove_all() { + CounterFor::::set(0u32); + ::Map::remove_all(None); + } + + /// Iter over all value of the storage. + /// + /// NOTE: If a value failed to decode because storage is corrupted then it is skipped. + pub fn iter_values() -> crate::storage::PrefixIterator> { + let map_iterator = ::Map::iter_values(); + crate::storage::PrefixIterator { + prefix: map_iterator.prefix, + previous_key: map_iterator.previous_key, + drain: map_iterator.drain, + closure: map_iterator.closure, + phantom: Default::default(), + } + } + + /// Translate the values of all elements by a function `f`, in the map in no particular order. + /// + /// By returning `None` from `f` for an element, you'll remove it from the map. + /// + /// NOTE: If a value fail to decode because storage is corrupted then it is skipped. + /// + /// # Warning + /// + /// This function must be used with care, before being updated the storage still contains the + /// old type, thus other calls (such as `get`) will fail at decoding it. + /// + /// # Usage + /// + /// This would typically be called inside the module implementation of on_runtime_upgrade. + pub fn translate_values Option>(mut f: F) { + ::Map::translate_values(|old_value| { + let res = f(old_value); + if res.is_none() { + CounterFor::::mutate(|value| value.saturating_dec()); + } + res + }) + } + + /// Try and append the given item to the value in the storage. + /// + /// Is only available if `Value` of the storage implements [`StorageTryAppend`]. + pub fn try_append(key: KArg, item: EncodeLikeItem) -> Result<(), ()> + where + KArg: EncodeLike + Clone, + Item: Encode, + EncodeLikeItem: EncodeLike, + Value: StorageTryAppend, + { + let bound = Value::bound(); + let current = ::Map::decode_len(Ref::from(&key)).unwrap_or_default(); + if current < bound { + CounterFor::::mutate(|value| value.saturating_inc()); + let key = ::Map::hashed_key_for(key); + sp_io::storage::append(&key, item.encode()); + Ok(()) + } else { + Err(()) + } + } + + /// Initialize the counter with the actual number of items in the map. + /// + /// This function iterates through all the items in the map and sets the counter. This operation + /// can be very heavy, so use with caution. + /// + /// Returns the number of items in the map which is used to set the counter. + pub fn initialize_counter() -> u32 { + let count = Self::iter_values().count() as u32; + CounterFor::::set(count); + count + } + + /// Return the count. + pub fn count() -> u32 { + CounterFor::::get() + } +} + +impl + CountedStorageMap +where + Prefix: CountedStorageMapInstance, + Hasher: crate::hash::StorageHasher + crate::ReversibleStorageHasher, + Key: FullCodec, + Value: FullCodec, + QueryKind: QueryKindTrait, + OnEmpty: Get + 'static, + MaxValues: Get>, +{ + /// Enumerate all elements in the map in no particular order. + /// + /// If you alter the map while doing this, you'll get undefined results. + pub fn iter() -> crate::storage::PrefixIterator<(Key, Value), OnRemovalCounterUpdate> { + let map_iterator = ::Map::iter(); + crate::storage::PrefixIterator { + prefix: map_iterator.prefix, + previous_key: map_iterator.previous_key, + drain: map_iterator.drain, + closure: map_iterator.closure, + phantom: Default::default(), + } + } + + /// Remove all elements from the map and iterate through them in no particular order. + /// + /// If you add elements to the map while doing this, you'll get undefined results. + pub fn drain() -> crate::storage::PrefixIterator<(Key, Value), OnRemovalCounterUpdate> { + let map_iterator = ::Map::drain(); + crate::storage::PrefixIterator { + prefix: map_iterator.prefix, + previous_key: map_iterator.previous_key, + drain: map_iterator.drain, + closure: map_iterator.closure, + phantom: Default::default(), + } + } + + /// Translate the values of all elements by a function `f`, in the map in no particular order. + /// + /// By returning `None` from `f` for an element, you'll remove it from the map. + /// + /// NOTE: If a value fail to decode because storage is corrupted then it is skipped. + pub fn translate Option>(mut f: F) { + ::Map::translate(|key, old_value| { + let res = f(key, old_value); + if res.is_none() { + CounterFor::::mutate(|value| value.saturating_dec()); + } + res + }) + } +} + +impl StorageEntryMetadataBuilder + for CountedStorageMap +where + Prefix: CountedStorageMapInstance, + Hasher: crate::hash::StorageHasher, + Key: FullCodec + scale_info::StaticTypeInfo, + Value: FullCodec + scale_info::StaticTypeInfo, + QueryKind: QueryKindTrait, + OnEmpty: Get + 'static, + MaxValues: Get>, +{ + fn build_metadata(docs: Vec<&'static str>, entries: &mut Vec) { + ::Map::build_metadata(docs, entries); + CounterFor::::build_metadata( + vec![&"Counter for the related counted storage map"], + entries, + ); + } +} + +impl crate::traits::StorageInfoTrait + for CountedStorageMap +where + Prefix: CountedStorageMapInstance, + Hasher: crate::hash::StorageHasher, + Key: FullCodec + MaxEncodedLen, + Value: FullCodec + MaxEncodedLen, + QueryKind: QueryKindTrait, + OnEmpty: Get + 'static, + MaxValues: Get>, +{ + fn storage_info() -> Vec { + [::Map::storage_info(), CounterFor::::storage_info()].concat() + } +} + +/// It doesn't require to implement `MaxEncodedLen` and give no information for `max_size`. +impl + crate::traits::PartialStorageInfoTrait + for CountedStorageMap +where + Prefix: CountedStorageMapInstance, + Hasher: crate::hash::StorageHasher, + Key: FullCodec, + Value: FullCodec, + QueryKind: QueryKindTrait, + OnEmpty: Get + 'static, + MaxValues: Get>, +{ + fn partial_storage_info() -> Vec { + [::Map::partial_storage_info(), CounterFor::::storage_info()] + .concat() + } +} + +#[cfg(test)] +mod test { + use super::*; + use crate::{ + hash::*, + metadata::{StorageEntryModifier, StorageEntryType, StorageHasher}, + storage::{bounded_vec::BoundedVec, types::ValueQuery}, + traits::ConstU32, + }; + use sp_io::{hashing::twox_128, TestExternalities}; + + struct Prefix; + impl StorageInstance for Prefix { + fn pallet_prefix() -> &'static str { + "test" + } + const STORAGE_PREFIX: &'static str = "foo"; + } + + struct CounterPrefix; + impl StorageInstance for CounterPrefix { + fn pallet_prefix() -> &'static str { + "test" + } + const STORAGE_PREFIX: &'static str = "counter_for_foo"; + } + impl CountedStorageMapInstance for Prefix { + type CounterPrefix = CounterPrefix; + } + + struct ADefault; + impl crate::traits::Get for ADefault { + fn get() -> u32 { + 97 + } + } + + #[test] + fn test_value_query() { + type A = CountedStorageMap; + + TestExternalities::default().execute_with(|| { + let mut k: Vec = vec![]; + k.extend(&twox_128(b"test")); + k.extend(&twox_128(b"foo")); + k.extend(&3u16.twox_64_concat()); + assert_eq!(A::hashed_key_for(3).to_vec(), k); + + assert_eq!(A::contains_key(3), false); + assert_eq!(A::get(3), ADefault::get()); + assert_eq!(A::try_get(3), Err(())); + assert_eq!(A::count(), 0); + + // Insert non-existing. + A::insert(3, 10); + + assert_eq!(A::contains_key(3), true); + assert_eq!(A::get(3), 10); + assert_eq!(A::try_get(3), Ok(10)); + assert_eq!(A::count(), 1); + + // Swap non-existing with existing. + A::swap(4, 3); + + assert_eq!(A::contains_key(3), false); + assert_eq!(A::get(3), ADefault::get()); + assert_eq!(A::try_get(3), Err(())); + assert_eq!(A::contains_key(4), true); + assert_eq!(A::get(4), 10); + assert_eq!(A::try_get(4), Ok(10)); + assert_eq!(A::count(), 1); + + // Swap existing with non-existing. + A::swap(4, 3); + + assert_eq!(A::try_get(3), Ok(10)); + assert_eq!(A::contains_key(4), false); + assert_eq!(A::get(4), ADefault::get()); + assert_eq!(A::try_get(4), Err(())); + assert_eq!(A::count(), 1); + + A::insert(4, 11); + + assert_eq!(A::try_get(3), Ok(10)); + assert_eq!(A::try_get(4), Ok(11)); + assert_eq!(A::count(), 2); + + // Swap 2 existing. + A::swap(3, 4); + + assert_eq!(A::try_get(3), Ok(11)); + assert_eq!(A::try_get(4), Ok(10)); + assert_eq!(A::count(), 2); + + // Insert an existing key, shouldn't increment counted values. + A::insert(3, 11); + + assert_eq!(A::count(), 2); + + // Remove non-existing. + A::remove(2); + + assert_eq!(A::contains_key(2), false); + assert_eq!(A::count(), 2); + + // Remove existing. + A::remove(3); + + assert_eq!(A::try_get(3), Err(())); + assert_eq!(A::count(), 1); + + // Mutate non-existing to existing. + A::mutate(3, |query| { + assert_eq!(*query, ADefault::get()); + *query = 40; + }); + + assert_eq!(A::try_get(3), Ok(40)); + assert_eq!(A::count(), 2); + + // Mutate existing to existing. + A::mutate(3, |query| { + assert_eq!(*query, 40); + *query = 40; + }); + + assert_eq!(A::try_get(3), Ok(40)); + assert_eq!(A::count(), 2); + + // Try fail mutate non-existing to existing. + A::try_mutate(2, |query| { + assert_eq!(*query, ADefault::get()); + *query = 4; + Result::<(), ()>::Err(()) + }) + .err() + .unwrap(); + + assert_eq!(A::try_get(2), Err(())); + assert_eq!(A::count(), 2); + + // Try succeed mutate non-existing to existing. + A::try_mutate(2, |query| { + assert_eq!(*query, ADefault::get()); + *query = 41; + Result::<(), ()>::Ok(()) + }) + .unwrap(); + + assert_eq!(A::try_get(2), Ok(41)); + assert_eq!(A::count(), 3); + + // Try succeed mutate existing to existing. + A::try_mutate(2, |query| { + assert_eq!(*query, 41); + *query = 41; + Result::<(), ()>::Ok(()) + }) + .unwrap(); + + assert_eq!(A::try_get(2), Ok(41)); + assert_eq!(A::count(), 3); + + // Try fail mutate non-existing to existing. + A::try_mutate_exists(1, |query| { + assert_eq!(*query, None); + *query = Some(4); + Result::<(), ()>::Err(()) + }) + .err() + .unwrap(); + + assert_eq!(A::try_get(1), Err(())); + assert_eq!(A::count(), 3); + + // Try succeed mutate non-existing to existing. + A::try_mutate_exists(1, |query| { + assert_eq!(*query, None); + *query = Some(43); + Result::<(), ()>::Ok(()) + }) + .unwrap(); + + assert_eq!(A::try_get(1), Ok(43)); + assert_eq!(A::count(), 4); + + // Try succeed mutate existing to existing. + A::try_mutate_exists(1, |query| { + assert_eq!(*query, Some(43)); + *query = Some(43); + Result::<(), ()>::Ok(()) + }) + .unwrap(); + + assert_eq!(A::try_get(1), Ok(43)); + assert_eq!(A::count(), 4); + + // Try succeed mutate existing to non-existing. + A::try_mutate_exists(1, |query| { + assert_eq!(*query, Some(43)); + *query = None; + Result::<(), ()>::Ok(()) + }) + .unwrap(); + + assert_eq!(A::try_get(1), Err(())); + assert_eq!(A::count(), 3); + + // Take exsisting. + assert_eq!(A::take(4), 10); + + assert_eq!(A::try_get(4), Err(())); + assert_eq!(A::count(), 2); + + // Take non-exsisting. + assert_eq!(A::take(4), ADefault::get()); + + assert_eq!(A::try_get(4), Err(())); + assert_eq!(A::count(), 2); + + // Remove all. + A::remove_all(); + + assert_eq!(A::count(), 0); + assert_eq!(A::initialize_counter(), 0); + + A::insert(1, 1); + A::insert(2, 2); + + // Iter values. + assert_eq!(A::iter_values().collect::>(), vec![2, 1]); + + // Iter drain values. + assert_eq!(A::iter_values().drain().collect::>(), vec![2, 1]); + assert_eq!(A::count(), 0); + + A::insert(1, 1); + A::insert(2, 2); + + // Test initialize_counter. + assert_eq!(A::initialize_counter(), 2); + }) + } + + #[test] + fn test_option_query() { + type B = CountedStorageMap; + + TestExternalities::default().execute_with(|| { + let mut k: Vec = vec![]; + k.extend(&twox_128(b"test")); + k.extend(&twox_128(b"foo")); + k.extend(&3u16.twox_64_concat()); + assert_eq!(B::hashed_key_for(3).to_vec(), k); + + assert_eq!(B::contains_key(3), false); + assert_eq!(B::get(3), None); + assert_eq!(B::try_get(3), Err(())); + assert_eq!(B::count(), 0); + + // Insert non-existing. + B::insert(3, 10); + + assert_eq!(B::contains_key(3), true); + assert_eq!(B::get(3), Some(10)); + assert_eq!(B::try_get(3), Ok(10)); + assert_eq!(B::count(), 1); + + // Swap non-existing with existing. + B::swap(4, 3); + + assert_eq!(B::contains_key(3), false); + assert_eq!(B::get(3), None); + assert_eq!(B::try_get(3), Err(())); + assert_eq!(B::contains_key(4), true); + assert_eq!(B::get(4), Some(10)); + assert_eq!(B::try_get(4), Ok(10)); + assert_eq!(B::count(), 1); + + // Swap existing with non-existing. + B::swap(4, 3); + + assert_eq!(B::try_get(3), Ok(10)); + assert_eq!(B::contains_key(4), false); + assert_eq!(B::get(4), None); + assert_eq!(B::try_get(4), Err(())); + assert_eq!(B::count(), 1); + + B::insert(4, 11); + + assert_eq!(B::try_get(3), Ok(10)); + assert_eq!(B::try_get(4), Ok(11)); + assert_eq!(B::count(), 2); + + // Swap 2 existing. + B::swap(3, 4); + + assert_eq!(B::try_get(3), Ok(11)); + assert_eq!(B::try_get(4), Ok(10)); + assert_eq!(B::count(), 2); + + // Insert an existing key, shouldn't increment counted values. + B::insert(3, 11); + + assert_eq!(B::count(), 2); + + // Remove non-existing. + B::remove(2); + + assert_eq!(B::contains_key(2), false); + assert_eq!(B::count(), 2); + + // Remove existing. + B::remove(3); + + assert_eq!(B::try_get(3), Err(())); + assert_eq!(B::count(), 1); + + // Mutate non-existing to existing. + B::mutate(3, |query| { + assert_eq!(*query, None); + *query = Some(40) + }); + + assert_eq!(B::try_get(3), Ok(40)); + assert_eq!(B::count(), 2); + + // Mutate existing to existing. + B::mutate(3, |query| { + assert_eq!(*query, Some(40)); + *query = Some(40) + }); + + assert_eq!(B::try_get(3), Ok(40)); + assert_eq!(B::count(), 2); + + // Mutate existing to non-existing. + B::mutate(3, |query| { + assert_eq!(*query, Some(40)); + *query = None + }); + + assert_eq!(B::try_get(3), Err(())); + assert_eq!(B::count(), 1); + + B::insert(3, 40); + + // Try fail mutate non-existing to existing. + B::try_mutate(2, |query| { + assert_eq!(*query, None); + *query = Some(4); + Result::<(), ()>::Err(()) + }) + .err() + .unwrap(); + + assert_eq!(B::try_get(2), Err(())); + assert_eq!(B::count(), 2); + + // Try succeed mutate non-existing to existing. + B::try_mutate(2, |query| { + assert_eq!(*query, None); + *query = Some(41); + Result::<(), ()>::Ok(()) + }) + .unwrap(); + + assert_eq!(B::try_get(2), Ok(41)); + assert_eq!(B::count(), 3); + + // Try succeed mutate existing to existing. + B::try_mutate(2, |query| { + assert_eq!(*query, Some(41)); + *query = Some(41); + Result::<(), ()>::Ok(()) + }) + .unwrap(); + + assert_eq!(B::try_get(2), Ok(41)); + assert_eq!(B::count(), 3); + + // Try succeed mutate existing to non-existing. + B::try_mutate(2, |query| { + assert_eq!(*query, Some(41)); + *query = None; + Result::<(), ()>::Ok(()) + }) + .unwrap(); + + assert_eq!(B::try_get(2), Err(())); + assert_eq!(B::count(), 2); + + B::insert(2, 41); + + // Try fail mutate non-existing to existing. + B::try_mutate_exists(1, |query| { + assert_eq!(*query, None); + *query = Some(4); + Result::<(), ()>::Err(()) + }) + .err() + .unwrap(); + + assert_eq!(B::try_get(1), Err(())); + assert_eq!(B::count(), 3); + + // Try succeed mutate non-existing to existing. + B::try_mutate_exists(1, |query| { + assert_eq!(*query, None); + *query = Some(43); + Result::<(), ()>::Ok(()) + }) + .unwrap(); + + assert_eq!(B::try_get(1), Ok(43)); + assert_eq!(B::count(), 4); + + // Try succeed mutate existing to existing. + B::try_mutate_exists(1, |query| { + assert_eq!(*query, Some(43)); + *query = Some(43); + Result::<(), ()>::Ok(()) + }) + .unwrap(); + + assert_eq!(B::try_get(1), Ok(43)); + assert_eq!(B::count(), 4); + + // Try succeed mutate existing to non-existing. + B::try_mutate_exists(1, |query| { + assert_eq!(*query, Some(43)); + *query = None; + Result::<(), ()>::Ok(()) + }) + .unwrap(); + + assert_eq!(B::try_get(1), Err(())); + assert_eq!(B::count(), 3); + + // Take exsisting. + assert_eq!(B::take(4), Some(10)); + + assert_eq!(B::try_get(4), Err(())); + assert_eq!(B::count(), 2); + + // Take non-exsisting. + assert_eq!(B::take(4), None); + + assert_eq!(B::try_get(4), Err(())); + assert_eq!(B::count(), 2); + + // Remove all. + B::remove_all(); + + assert_eq!(B::count(), 0); + assert_eq!(B::initialize_counter(), 0); + + B::insert(1, 1); + B::insert(2, 2); + + // Iter values. + assert_eq!(B::iter_values().collect::>(), vec![2, 1]); + + // Iter drain values. + assert_eq!(B::iter_values().drain().collect::>(), vec![2, 1]); + assert_eq!(B::count(), 0); + + B::insert(1, 1); + B::insert(2, 2); + + // Test initialize_counter. + assert_eq!(B::initialize_counter(), 2); + }) + } + + #[test] + fn append_decode_len_works() { + type B = CountedStorageMap>; + + TestExternalities::default().execute_with(|| { + assert_eq!(B::decode_len(0), None); + B::append(0, 3); + assert_eq!(B::decode_len(0), Some(1)); + B::append(0, 3); + assert_eq!(B::decode_len(0), Some(2)); + B::append(0, 3); + assert_eq!(B::decode_len(0), Some(3)); + }) + } + + #[test] + fn try_append_decode_len_works() { + type B = CountedStorageMap>>; + + TestExternalities::default().execute_with(|| { + assert_eq!(B::decode_len(0), None); + B::try_append(0, 3).unwrap(); + assert_eq!(B::decode_len(0), Some(1)); + B::try_append(0, 3).unwrap(); + assert_eq!(B::decode_len(0), Some(2)); + B::try_append(0, 3).unwrap(); + assert_eq!(B::decode_len(0), Some(3)); + B::try_append(0, 3).err().unwrap(); + assert_eq!(B::decode_len(0), Some(3)); + }) + } + + #[test] + fn migrate_keys_works() { + type A = CountedStorageMap; + type B = CountedStorageMap; + TestExternalities::default().execute_with(|| { + A::insert(1, 1); + assert_eq!(B::migrate_key::(1), Some(1)); + assert_eq!(B::get(1), Some(1)); + }) + } + + #[test] + fn translate_values() { + type A = CountedStorageMap; + TestExternalities::default().execute_with(|| { + A::insert(1, 1); + A::insert(2, 2); + A::translate_values::(|old_value| if old_value == 1 { None } else { Some(1) }); + assert_eq!(A::count(), 1); + assert_eq!(A::get(2), Some(1)); + }) + } + + #[test] + fn test_iter_drain_translate() { + type A = CountedStorageMap; + TestExternalities::default().execute_with(|| { + A::insert(1, 1); + A::insert(2, 2); + + assert_eq!(A::iter().collect::>(), vec![(2, 2), (1, 1)]); + + assert_eq!(A::count(), 2); + + A::translate::( + |key, value| if key == 1 { None } else { Some(key as u32 * value) }, + ); + + assert_eq!(A::count(), 1); + + assert_eq!(A::drain().collect::>(), vec![(2, 4)]); + + assert_eq!(A::count(), 0); + }) + } + + #[test] + fn test_metadata() { + type A = CountedStorageMap; + let mut entries = vec![]; + A::build_metadata(vec![], &mut entries); + assert_eq!( + entries, + vec![ + StorageEntryMetadata { + name: "foo", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Map { + hashers: vec![StorageHasher::Twox64Concat], + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), + }, + default: 97u32.encode(), + docs: vec![], + }, + StorageEntryMetadata { + name: "counter_for_foo", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![0, 0, 0, 0], + docs: vec!["Counter for the related counted storage map"], + }, + ] + ); + } +} diff --git a/frame/support/src/storage/types/double_map.rs b/frame/support/src/storage/types/double_map.rs index 7750110050868..b9af4a621b92a 100644 --- a/frame/support/src/storage/types/double_map.rs +++ b/frame/support/src/storage/types/double_map.rs @@ -19,9 +19,9 @@ //! StoragePrefixedDoubleMap traits and their methods directly. use crate::{ - metadata::{StorageEntryModifier, StorageEntryType}, + metadata::{StorageEntryMetadata, StorageEntryType}, storage::{ - types::{OptionQuery, QueryKindTrait, StorageEntryMetadata}, + types::{OptionQuery, QueryKindTrait, StorageEntryMetadataBuilder}, StorageAppend, StorageDecodeLength, StoragePrefixedMap, StorageTryAppend, }, traits::{Get, GetDefault, StorageInfo, StorageInstance}, @@ -342,7 +342,7 @@ where /// Iter over all value of the storage. /// - /// NOTE: If a value failed to decode becaues storage is corrupted then it is skipped. + /// NOTE: If a value failed to decode because storage is corrupted then it is skipped. pub fn iter_values() -> crate::storage::PrefixIterator { >::iter_values() } @@ -512,7 +512,7 @@ where } impl - StorageEntryMetadata + StorageEntryMetadataBuilder for StorageDoubleMap where Prefix: StorageInstance, @@ -525,19 +525,20 @@ where OnEmpty: Get + 'static, MaxValues: Get>, { - const MODIFIER: StorageEntryModifier = QueryKind::METADATA; - const NAME: &'static str = Prefix::STORAGE_PREFIX; - - fn ty() -> StorageEntryType { - StorageEntryType::Map { - hashers: vec![Hasher1::METADATA, Hasher2::METADATA], - key: scale_info::meta_type::<(Key1, Key2)>(), - value: scale_info::meta_type::(), - } - } - - fn default() -> Vec { - OnEmpty::get().encode() + fn build_metadata(docs: Vec<&'static str>, entries: &mut Vec) { + let entry = StorageEntryMetadata { + name: Prefix::STORAGE_PREFIX, + modifier: QueryKind::METADATA, + ty: StorageEntryType::Map { + hashers: vec![Hasher1::METADATA, Hasher2::METADATA], + key: scale_info::meta_type::<(Key1, Key2)>(), + value: scale_info::meta_type::(), + }, + default: OnEmpty::get().encode(), + docs, + }; + + entries.push(entry); } } @@ -605,7 +606,6 @@ mod test { metadata::{StorageEntryModifier, StorageEntryType, StorageHasher}, storage::types::ValueQuery, }; - use assert_matches::assert_matches; use sp_io::{hashing::twox_128, TestExternalities}; struct Prefix; @@ -767,30 +767,43 @@ mod test { A::translate::(|k1, k2, v| Some((k1 * k2 as u16 * v as u16).into())); assert_eq!(A::iter().collect::>(), vec![(4, 40, 1600), (3, 30, 900)]); - assert_eq!(A::MODIFIER, StorageEntryModifier::Optional); - assert_eq!(AValueQueryWithAnOnEmpty::MODIFIER, StorageEntryModifier::Default); - - let assert_map_hashers = |ty, expected_hashers| { - if let StorageEntryType::Map { hashers, .. } = ty { - assert_eq!(hashers, expected_hashers) - } else { - assert_matches!(ty, StorageEntryType::Map { .. }) - } - }; - - assert_map_hashers( - A::ty(), - vec![StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat], - ); - assert_map_hashers( - AValueQueryWithAnOnEmpty::ty(), - vec![StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat], + let mut entries = vec![]; + A::build_metadata(vec![], &mut entries); + AValueQueryWithAnOnEmpty::build_metadata(vec![], &mut entries); + assert_eq!( + entries, + vec![ + StorageEntryMetadata { + name: "foo", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + hashers: vec![ + StorageHasher::Blake2_128Concat, + StorageHasher::Twox64Concat + ], + key: scale_info::meta_type::<(u16, u8)>(), + value: scale_info::meta_type::(), + }, + default: Option::::None.encode(), + docs: vec![], + }, + StorageEntryMetadata { + name: "foo", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Map { + hashers: vec![ + StorageHasher::Blake2_128Concat, + StorageHasher::Twox64Concat + ], + key: scale_info::meta_type::<(u16, u8)>(), + value: scale_info::meta_type::(), + }, + default: 97u32.encode(), + docs: vec![], + } + ] ); - assert_eq!(A::NAME, "foo"); - assert_eq!(AValueQueryWithAnOnEmpty::default(), 97u32.encode()); - assert_eq!(A::default(), Option::::None.encode()); - WithLen::remove_all(None); assert_eq!(WithLen::decode_len(3, 30), None); WithLen::append(0, 100, 10); diff --git a/frame/support/src/storage/types/map.rs b/frame/support/src/storage/types/map.rs index a31224f15c80f..45340f9015eaa 100644 --- a/frame/support/src/storage/types/map.rs +++ b/frame/support/src/storage/types/map.rs @@ -19,9 +19,9 @@ //! methods directly. use crate::{ - metadata::{StorageEntryModifier, StorageEntryType}, + metadata::{StorageEntryMetadata, StorageEntryType}, storage::{ - types::{OptionQuery, QueryKindTrait, StorageEntryMetadata}, + types::{OptionQuery, QueryKindTrait, StorageEntryMetadataBuilder}, StorageAppend, StorageDecodeLength, StoragePrefixedMap, StorageTryAppend, }, traits::{Get, GetDefault, StorageInfo, StorageInstance}, @@ -241,7 +241,7 @@ where /// Iter over all value of the storage. /// - /// NOTE: If a value failed to decode becaues storage is corrupted then it is skipped. + /// NOTE: If a value failed to decode because storage is corrupted then it is skipped. pub fn iter_values() -> crate::storage::PrefixIterator { >::iter_values() } @@ -336,7 +336,7 @@ where } } -impl StorageEntryMetadata +impl StorageEntryMetadataBuilder for StorageMap where Prefix: StorageInstance, @@ -347,19 +347,20 @@ where OnEmpty: Get + 'static, MaxValues: Get>, { - const MODIFIER: StorageEntryModifier = QueryKind::METADATA; - const NAME: &'static str = Prefix::STORAGE_PREFIX; - - fn ty() -> StorageEntryType { - StorageEntryType::Map { - hashers: vec![Hasher::METADATA], - key: scale_info::meta_type::(), - value: scale_info::meta_type::(), - } - } - - fn default() -> Vec { - OnEmpty::get().encode() + fn build_metadata(docs: Vec<&'static str>, entries: &mut Vec) { + let entry = StorageEntryMetadata { + name: Prefix::STORAGE_PREFIX, + modifier: QueryKind::METADATA, + ty: StorageEntryType::Map { + hashers: vec![Hasher::METADATA], + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), + }, + default: OnEmpty::get().encode(), + docs, + }; + + entries.push(entry); } } @@ -421,7 +422,6 @@ mod test { metadata::{StorageEntryModifier, StorageEntryType, StorageHasher}, storage::types::ValueQuery, }; - use assert_matches::assert_matches; use sp_io::{hashing::twox_128, TestExternalities}; struct Prefix; @@ -573,25 +573,36 @@ mod test { A::translate::(|k, v| Some((k * v as u16).into())); assert_eq!(A::iter().collect::>(), vec![(4, 40), (3, 30)]); - assert_eq!(A::MODIFIER, StorageEntryModifier::Optional); - assert_eq!(AValueQueryWithAnOnEmpty::MODIFIER, StorageEntryModifier::Default); - - let assert_map_hashers = |ty, expected_hashers| { - if let StorageEntryType::Map { hashers, .. } = ty { - assert_eq!(hashers, expected_hashers) - } else { - assert_matches!(ty, StorageEntryType::Map { .. }) - } - }; - - assert_map_hashers(A::ty(), vec![StorageHasher::Blake2_128Concat]); - assert_map_hashers( - AValueQueryWithAnOnEmpty::ty(), - vec![StorageHasher::Blake2_128Concat], + let mut entries = vec![]; + A::build_metadata(vec![], &mut entries); + AValueQueryWithAnOnEmpty::build_metadata(vec![], &mut entries); + assert_eq!( + entries, + vec![ + StorageEntryMetadata { + name: "foo", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + hashers: vec![StorageHasher::Blake2_128Concat], + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), + }, + default: Option::::None.encode(), + docs: vec![], + }, + StorageEntryMetadata { + name: "foo", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Map { + hashers: vec![StorageHasher::Blake2_128Concat], + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), + }, + default: 97u32.encode(), + docs: vec![], + } + ] ); - assert_eq!(A::NAME, "foo"); - assert_eq!(AValueQueryWithAnOnEmpty::default(), 97u32.encode()); - assert_eq!(A::default(), Option::::None.encode()); WithLen::remove_all(None); assert_eq!(WithLen::decode_len(3), None); diff --git a/frame/support/src/storage/types/mod.rs b/frame/support/src/storage/types/mod.rs index 76fed0b8cb320..bcab996f68323 100644 --- a/frame/support/src/storage/types/mod.rs +++ b/frame/support/src/storage/types/mod.rs @@ -18,16 +18,18 @@ //! Storage types to build abstraction on storage, they implements storage traits such as //! StorageMap and others. -use crate::metadata::{StorageEntryModifier, StorageEntryType}; +use crate::metadata::{StorageEntryMetadata, StorageEntryModifier}; use codec::FullCodec; use sp_std::prelude::*; +mod counted_map; mod double_map; mod key; mod map; mod nmap; mod value; +pub use counted_map::{CountedStorageMap, CountedStorageMapInstance}; pub use double_map::StorageDoubleMap; pub use key::{ EncodeLikeTuple, HasKeyPrefix, HasReversibleKeyPrefix, Key, KeyGenerator, @@ -103,13 +105,10 @@ where } } -/// Provide metadata for a storage entry. +/// Build the metadata of a storage. /// -/// Implemented by each of the storage entry kinds: value, map, doublemap and nmap. -pub trait StorageEntryMetadata { - const MODIFIER: StorageEntryModifier; - const NAME: &'static str; - - fn ty() -> StorageEntryType; - fn default() -> Vec; +/// Implemented by each of the storage types: value, map, countedmap, doublemap and nmap. +pub trait StorageEntryMetadataBuilder { + /// Build into `entries` the storage metadata entries of a storage given some `docs`. + fn build_metadata(doc: Vec<&'static str>, entries: &mut Vec); } diff --git a/frame/support/src/storage/types/nmap.rs b/frame/support/src/storage/types/nmap.rs index 7048a69d59c2c..96d6f383ae117 100755 --- a/frame/support/src/storage/types/nmap.rs +++ b/frame/support/src/storage/types/nmap.rs @@ -19,11 +19,11 @@ //! StoragePrefixedDoubleMap traits and their methods directly. use crate::{ - metadata::{StorageEntryModifier, StorageEntryType}, + metadata::{StorageEntryMetadata, StorageEntryType}, storage::{ types::{ EncodeLikeTuple, HasKeyPrefix, HasReversibleKeyPrefix, OptionQuery, QueryKindTrait, - StorageEntryMetadata, TupleToEncodedIter, + StorageEntryMetadataBuilder, TupleToEncodedIter, }, KeyGenerator, PrefixIterator, StorageAppend, StorageDecodeLength, StoragePrefixedMap, }, @@ -440,7 +440,7 @@ where } } -impl StorageEntryMetadata +impl StorageEntryMetadataBuilder for StorageNMap where Prefix: StorageInstance, @@ -450,19 +450,20 @@ where OnEmpty: Get + 'static, MaxValues: Get>, { - const MODIFIER: StorageEntryModifier = QueryKind::METADATA; - const NAME: &'static str = Prefix::STORAGE_PREFIX; - - fn ty() -> StorageEntryType { - StorageEntryType::Map { - key: scale_info::meta_type::(), - hashers: Key::HASHER_METADATA.iter().cloned().collect(), - value: scale_info::meta_type::(), - } - } - - fn default() -> Vec { - OnEmpty::get().encode() + fn build_metadata(docs: Vec<&'static str>, entries: &mut Vec) { + let entry = StorageEntryMetadata { + name: Prefix::STORAGE_PREFIX, + modifier: QueryKind::METADATA, + ty: StorageEntryType::Map { + key: scale_info::meta_type::(), + hashers: Key::HASHER_METADATA.iter().cloned().collect(), + value: scale_info::meta_type::(), + }, + default: OnEmpty::get().encode(), + docs, + }; + + entries.push(entry); } } @@ -516,8 +517,8 @@ where mod test { use super::*; use crate::{ - hash::*, - metadata::StorageEntryModifier, + hash::{StorageHasher as _, *}, + metadata::{StorageEntryModifier, StorageHasher}, storage::types::{Key, ValueQuery}, }; use sp_io::{hashing::twox_128, TestExternalities}; @@ -684,11 +685,36 @@ mod test { A::translate::(|k1, v| Some((k1 as u16 * v as u16).into())); assert_eq!(A::iter().collect::>(), vec![(4, 40), (3, 30)]); - assert_eq!(A::MODIFIER, StorageEntryModifier::Optional); - assert_eq!(AValueQueryWithAnOnEmpty::MODIFIER, StorageEntryModifier::Default); - assert_eq!(A::NAME, "Foo"); - assert_eq!(AValueQueryWithAnOnEmpty::default(), 98u32.encode()); - assert_eq!(A::default(), Option::::None.encode()); + let mut entries = vec![]; + A::build_metadata(vec![], &mut entries); + AValueQueryWithAnOnEmpty::build_metadata(vec![], &mut entries); + assert_eq!( + entries, + vec![ + StorageEntryMetadata { + name: "Foo", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + hashers: vec![StorageHasher::Blake2_128Concat], + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), + }, + default: Option::::None.encode(), + docs: vec![], + }, + StorageEntryMetadata { + name: "Foo", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Map { + hashers: vec![StorageHasher::Blake2_128Concat], + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), + }, + default: 98u32.encode(), + docs: vec![], + } + ] + ); WithLen::remove_all(None); assert_eq!(WithLen::decode_len((3,)), None); @@ -852,11 +878,42 @@ mod test { A::translate::(|(k1, k2), v| Some((k1 * k2 as u16 * v as u16).into())); assert_eq!(A::iter().collect::>(), vec![((4, 40), 1600), ((3, 30), 900)]); - assert_eq!(A::MODIFIER, StorageEntryModifier::Optional); - assert_eq!(AValueQueryWithAnOnEmpty::MODIFIER, StorageEntryModifier::Default); - assert_eq!(A::NAME, "Foo"); - assert_eq!(AValueQueryWithAnOnEmpty::default(), 98u32.encode()); - assert_eq!(A::default(), Option::::None.encode()); + let mut entries = vec![]; + A::build_metadata(vec![], &mut entries); + AValueQueryWithAnOnEmpty::build_metadata(vec![], &mut entries); + assert_eq!( + entries, + vec![ + StorageEntryMetadata { + name: "Foo", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + hashers: vec![ + StorageHasher::Blake2_128Concat, + StorageHasher::Twox64Concat + ], + key: scale_info::meta_type::<(u16, u8)>(), + value: scale_info::meta_type::(), + }, + default: Option::::None.encode(), + docs: vec![], + }, + StorageEntryMetadata { + name: "Foo", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Map { + hashers: vec![ + StorageHasher::Blake2_128Concat, + StorageHasher::Twox64Concat + ], + key: scale_info::meta_type::<(u16, u8)>(), + value: scale_info::meta_type::(), + }, + default: 98u32.encode(), + docs: vec![], + } + ] + ); WithLen::remove_all(None); assert_eq!(WithLen::decode_len((3, 30)), None); @@ -1042,11 +1099,44 @@ mod test { }); assert_eq!(A::iter().collect::>(), vec![((4, 40, 400), 4), ((3, 30, 300), 3)]); - assert_eq!(A::MODIFIER, StorageEntryModifier::Optional); - assert_eq!(AValueQueryWithAnOnEmpty::MODIFIER, StorageEntryModifier::Default); - assert_eq!(A::NAME, "Foo"); - assert_eq!(AValueQueryWithAnOnEmpty::default(), 98u32.encode()); - assert_eq!(A::default(), Option::::None.encode()); + let mut entries = vec![]; + A::build_metadata(vec![], &mut entries); + AValueQueryWithAnOnEmpty::build_metadata(vec![], &mut entries); + assert_eq!( + entries, + vec![ + StorageEntryMetadata { + name: "Foo", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + hashers: vec![ + StorageHasher::Blake2_128Concat, + StorageHasher::Blake2_128Concat, + StorageHasher::Twox64Concat + ], + key: scale_info::meta_type::<(u16, u16, u16)>(), + value: scale_info::meta_type::(), + }, + default: Option::::None.encode(), + docs: vec![], + }, + StorageEntryMetadata { + name: "Foo", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Map { + hashers: vec![ + StorageHasher::Blake2_128Concat, + StorageHasher::Blake2_128Concat, + StorageHasher::Twox64Concat + ], + key: scale_info::meta_type::<(u16, u16, u16)>(), + value: scale_info::meta_type::(), + }, + default: 98u32.encode(), + docs: vec![], + } + ] + ); WithLen::remove_all(None); assert_eq!(WithLen::decode_len((3, 30, 300)), None); diff --git a/frame/support/src/storage/types/value.rs b/frame/support/src/storage/types/value.rs index d7f15487592b1..c5e7173bd0af7 100644 --- a/frame/support/src/storage/types/value.rs +++ b/frame/support/src/storage/types/value.rs @@ -18,10 +18,10 @@ //! Storage value type. Implements StorageValue trait and its method directly. use crate::{ - metadata::{StorageEntryModifier, StorageEntryType}, + metadata::{StorageEntryMetadata, StorageEntryType}, storage::{ generator::StorageValue as StorageValueT, - types::{OptionQuery, QueryKindTrait, StorageEntryMetadata}, + types::{OptionQuery, QueryKindTrait, StorageEntryMetadataBuilder}, StorageAppend, StorageDecodeLength, StorageTryAppend, }, traits::{GetDefault, StorageInfo, StorageInstance}, @@ -201,7 +201,7 @@ where } } -impl StorageEntryMetadata +impl StorageEntryMetadataBuilder for StorageValue where Prefix: StorageInstance, @@ -209,15 +209,16 @@ where QueryKind: QueryKindTrait, OnEmpty: crate::traits::Get + 'static, { - const MODIFIER: StorageEntryModifier = QueryKind::METADATA; - const NAME: &'static str = Prefix::STORAGE_PREFIX; - - fn ty() -> StorageEntryType { - StorageEntryType::Plain(scale_info::meta_type::()) - } - - fn default() -> Vec { - OnEmpty::get().encode() + fn build_metadata(docs: Vec<&'static str>, entries: &mut Vec) { + let entry = StorageEntryMetadata { + name: Prefix::STORAGE_PREFIX, + modifier: QueryKind::METADATA, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: OnEmpty::get().encode(), + docs, + }; + + entries.push(entry); } } @@ -342,11 +343,28 @@ mod test { A::kill(); assert_eq!(A::try_get(), Err(())); - assert_eq!(A::MODIFIER, StorageEntryModifier::Optional); - assert_eq!(AValueQueryWithAnOnEmpty::MODIFIER, StorageEntryModifier::Default); - assert_eq!(A::NAME, "foo"); - assert_eq!(A::default(), Option::::None.encode()); - assert_eq!(AValueQueryWithAnOnEmpty::default(), 97u32.encode()); + let mut entries = vec![]; + A::build_metadata(vec![], &mut entries); + AValueQueryWithAnOnEmpty::build_metadata(vec![], &mut entries); + assert_eq!( + entries, + vec![ + StorageEntryMetadata { + name: "foo", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: Option::::None.encode(), + docs: vec![], + }, + StorageEntryMetadata { + name: "foo", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: 97u32.encode(), + docs: vec![], + } + ] + ); WithLen::kill(); assert_eq!(WithLen::decode_len(), None); diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 2874ef6bd7685..6a9a18ea48d4b 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -323,6 +323,12 @@ pub mod pallet { pub type ConditionalNMap = StorageNMap<_, (storage::Key, storage::Key), u32>; + #[pallet::storage] + #[pallet::storage_prefix = "RenamedCountedMap"] + #[pallet::getter(fn counted_storage_map)] + pub type SomeCountedStorageMap = + CountedStorageMap; + #[pallet::genesis_config] #[derive(Default)] pub struct GenesisConfig { @@ -416,6 +422,7 @@ pub mod pallet { } // Test that a pallet with non generic event and generic genesis_config is correctly handled +// and that a pallet without the attribute generate_storage_info is correctly handled. #[frame_support::pallet] pub mod pallet2 { use super::{SomeAssociation1, SomeType1}; @@ -446,6 +453,10 @@ pub mod pallet2 { #[pallet::storage] pub type SomeValue = StorageValue<_, Vec>; + #[pallet::storage] + pub type SomeCountedStorageMap = + CountedStorageMap; + #[pallet::event] pub enum Event { /// Something @@ -899,6 +910,13 @@ fn storage_expand() { pallet::ConditionalDoubleMap::::insert(1, 2, 3); pallet::ConditionalNMap::::insert((1, 2), 3); } + + pallet::SomeCountedStorageMap::::insert(1, 2); + let mut k = [twox_128(b"Example"), twox_128(b"RenamedCountedMap")].concat(); + k.extend(1u8.using_encoded(twox_64_concat)); + assert_eq!(unhashed::get::(&k), Some(2u32)); + let k = [twox_128(b"Example"), twox_128(b"CounterForRenamedCountedMap")].concat(); + assert_eq!(unhashed::get::(&k), Some(1u32)); }) } @@ -1180,6 +1198,24 @@ fn metadata() { default: vec![0], docs: vec![], }, + StorageEntryMetadata { + name: "RenamedCountedMap", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + hashers: vec![StorageHasher::Twox64Concat], + key: meta_type::(), + value: meta_type::(), + }, + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "CounterForRenamedCountedMap", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Plain(meta_type::()), + default: vec![0, 0, 0, 0], + docs: vec!["Counter for the related counted storage map"], + }, ], }), calls: Some(meta_type::>().into()), @@ -1370,6 +1406,24 @@ fn metadata() { default: vec![0], docs: vec![], }, + StorageEntryMetadata { + name: "RenamedCountedMap", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + hashers: vec![StorageHasher::Twox64Concat], + key: meta_type::(), + value: meta_type::(), + }, + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "CounterForRenamedCountedMap", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Plain(meta_type::()), + default: vec![0, 0, 0, 0], + docs: vec!["Counter for the related counted storage map"], + }, ], }), calls: Some(meta_type::>().into()), @@ -1577,17 +1631,47 @@ fn test_storage_info() { max_size: Some(7 + 16 + 8), } }, + StorageInfo { + pallet_name: b"Example".to_vec(), + storage_name: b"RenamedCountedMap".to_vec(), + prefix: prefix(b"Example", b"RenamedCountedMap").to_vec(), + max_values: None, + max_size: Some(1 + 4 + 8), + }, + StorageInfo { + pallet_name: b"Example".to_vec(), + storage_name: b"CounterForRenamedCountedMap".to_vec(), + prefix: prefix(b"Example", b"CounterForRenamedCountedMap").to_vec(), + max_values: Some(1), + max_size: Some(4), + }, ], ); assert_eq!( Example2::storage_info(), - vec![StorageInfo { - pallet_name: b"Example2".to_vec(), - storage_name: b"SomeValue".to_vec(), - prefix: prefix(b"Example2", b"SomeValue").to_vec(), - max_values: Some(1), - max_size: None, - },], + vec![ + StorageInfo { + pallet_name: b"Example2".to_vec(), + storage_name: b"SomeValue".to_vec(), + prefix: prefix(b"Example2", b"SomeValue").to_vec(), + max_values: Some(1), + max_size: None, + }, + StorageInfo { + pallet_name: b"Example2".to_vec(), + storage_name: b"SomeCountedStorageMap".to_vec(), + prefix: prefix(b"Example2", b"SomeCountedStorageMap").to_vec(), + max_values: None, + max_size: None, + }, + StorageInfo { + pallet_name: b"Example2".to_vec(), + storage_name: b"CounterForSomeCountedStorageMap".to_vec(), + prefix: prefix(b"Example2", b"CounterForSomeCountedStorageMap").to_vec(), + max_values: Some(1), + max_size: Some(4), + }, + ], ); } diff --git a/frame/support/test/tests/pallet_ui/duplicate_storage_prefix.rs b/frame/support/test/tests/pallet_ui/duplicate_storage_prefix.rs index d103fa09d991b..5e99c84050c95 100644 --- a/frame/support/test/tests/pallet_ui/duplicate_storage_prefix.rs +++ b/frame/support/test/tests/pallet_ui/duplicate_storage_prefix.rs @@ -1,6 +1,6 @@ #[frame_support::pallet] mod pallet { - use frame_support::pallet_prelude::StorageValue; + use frame_support::pallet_prelude::*; #[pallet::config] pub trait Config: frame_system::Config {} @@ -12,9 +12,15 @@ mod pallet { #[pallet::storage] type Foo = StorageValue<_, u8>; - #[pallet::storage] - #[pallet::storage_prefix = "Foo"] - type NotFoo = StorageValue<_, u16>; + #[pallet::storage] + #[pallet::storage_prefix = "Foo"] + type NotFoo = StorageValue<_, u16>; + + #[pallet::storage] + type CounterForBar = StorageValue<_, u16>; + + #[pallet::storage] + type Bar = CountedStorageMap<_, Twox64Concat, u16, u16>; } fn main() { diff --git a/frame/support/test/tests/pallet_ui/duplicate_storage_prefix.stderr b/frame/support/test/tests/pallet_ui/duplicate_storage_prefix.stderr index 63a6e71e44045..716888c9d8b65 100644 --- a/frame/support/test/tests/pallet_ui/duplicate_storage_prefix.stderr +++ b/frame/support/test/tests/pallet_ui/duplicate_storage_prefix.stderr @@ -1,17 +1,47 @@ error: Duplicate storage prefixes found for `Foo` - --> $DIR/duplicate_storage_prefix.rs:16:32 + --> $DIR/duplicate_storage_prefix.rs:16:29 | 16 | #[pallet::storage_prefix = "Foo"] | ^^^^^ +error: Duplicate storage prefixes found for `Foo` + --> $DIR/duplicate_storage_prefix.rs:13:7 + | +13 | type Foo = StorageValue<_, u8>; + | ^^^ + +error: Duplicate storage prefixes found for `CounterForBar`, used for counter associated to counted storage map + --> $DIR/duplicate_storage_prefix.rs:23:7 + | +23 | type Bar = CountedStorageMap<_, Twox64Concat, u16, u16>; + | ^^^ + +error: Duplicate storage prefixes found for `CounterForBar` + --> $DIR/duplicate_storage_prefix.rs:20:7 + | +20 | type CounterForBar = StorageValue<_, u16>; + | ^^^^^^^^^^^^^ + error[E0412]: cannot find type `_GeneratedPrefixForStorageFoo` in this scope --> $DIR/duplicate_storage_prefix.rs:13:7 | 13 | type Foo = StorageValue<_, u8>; | ^^^ not found in this scope -error[E0121]: the type placeholder `_` is not allowed within types on item signatures - --> $DIR/duplicate_storage_prefix.rs:17:35 +error[E0412]: cannot find type `_GeneratedPrefixForStorageNotFoo` in this scope + --> $DIR/duplicate_storage_prefix.rs:17:7 | 17 | type NotFoo = StorageValue<_, u16>; - | ^ not allowed in type signatures + | ^^^^^^ not found in this scope + +error[E0412]: cannot find type `_GeneratedPrefixForStorageCounterForBar` in this scope + --> $DIR/duplicate_storage_prefix.rs:20:7 + | +20 | type CounterForBar = StorageValue<_, u16>; + | ^^^^^^^^^^^^^ not found in this scope + +error[E0412]: cannot find type `_GeneratedPrefixForStorageBar` in this scope + --> $DIR/duplicate_storage_prefix.rs:23:7 + | +23 | type Bar = CountedStorageMap<_, Twox64Concat, u16, u16>; + | ^^^ not found in this scope diff --git a/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.stderr b/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.stderr index e78eb7ff9537b..239de4dba949b 100644 --- a/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.stderr +++ b/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.stderr @@ -5,8 +5,8 @@ error[E0277]: the trait bound `Bar: TypeInfo` is not satisfied | ^^^^^^^ the trait `TypeInfo` is not implemented for `Bar` | = note: required because of the requirements on the impl of `StaticTypeInfo` for `Bar` - = note: required because of the requirements on the impl of `frame_support::storage::StorageEntryMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `NAME` + = note: required because of the requirements on the impl of `StorageEntryMetadataBuilder` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` + = note: required by `build_metadata` error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied --> $DIR/storage_ensure_span_are_ok_on_wrong_gen.rs:20:12 @@ -16,8 +16,8 @@ error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied | = note: required because of the requirements on the impl of `Decode` for `Bar` = note: required because of the requirements on the impl of `FullCodec` for `Bar` - = note: required because of the requirements on the impl of `frame_support::storage::StorageEntryMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `NAME` + = note: required because of the requirements on the impl of `StorageEntryMetadataBuilder` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` + = note: required by `build_metadata` error[E0277]: the trait bound `Bar: EncodeLike` is not satisfied --> $DIR/storage_ensure_span_are_ok_on_wrong_gen.rs:20:12 @@ -27,8 +27,8 @@ error[E0277]: the trait bound `Bar: EncodeLike` is not satisfied | = note: required because of the requirements on the impl of `FullEncode` for `Bar` = note: required because of the requirements on the impl of `FullCodec` for `Bar` - = note: required because of the requirements on the impl of `frame_support::storage::StorageEntryMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `NAME` + = note: required because of the requirements on the impl of `StorageEntryMetadataBuilder` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` + = note: required by `build_metadata` error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied --> $DIR/storage_ensure_span_are_ok_on_wrong_gen.rs:20:12 @@ -39,8 +39,8 @@ error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied = note: required because of the requirements on the impl of `Encode` for `Bar` = note: required because of the requirements on the impl of `FullEncode` for `Bar` = note: required because of the requirements on the impl of `FullCodec` for `Bar` - = note: required because of the requirements on the impl of `frame_support::storage::StorageEntryMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `NAME` + = note: required because of the requirements on the impl of `StorageEntryMetadataBuilder` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` + = note: required by `build_metadata` error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied --> $DIR/storage_ensure_span_are_ok_on_wrong_gen.rs:9:12 diff --git a/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.stderr b/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.stderr index d9a7ddbf3443e..a5bf32a0ef2d2 100644 --- a/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.stderr +++ b/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.stderr @@ -5,8 +5,8 @@ error[E0277]: the trait bound `Bar: TypeInfo` is not satisfied | ^^^^^^^ the trait `TypeInfo` is not implemented for `Bar` | = note: required because of the requirements on the impl of `StaticTypeInfo` for `Bar` - = note: required because of the requirements on the impl of `frame_support::storage::StorageEntryMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `NAME` + = note: required because of the requirements on the impl of `StorageEntryMetadataBuilder` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` + = note: required by `build_metadata` error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied --> $DIR/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:20:12 @@ -16,8 +16,8 @@ error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied | = note: required because of the requirements on the impl of `Decode` for `Bar` = note: required because of the requirements on the impl of `FullCodec` for `Bar` - = note: required because of the requirements on the impl of `frame_support::storage::StorageEntryMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `NAME` + = note: required because of the requirements on the impl of `StorageEntryMetadataBuilder` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` + = note: required by `build_metadata` error[E0277]: the trait bound `Bar: EncodeLike` is not satisfied --> $DIR/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:20:12 @@ -27,8 +27,8 @@ error[E0277]: the trait bound `Bar: EncodeLike` is not satisfied | = note: required because of the requirements on the impl of `FullEncode` for `Bar` = note: required because of the requirements on the impl of `FullCodec` for `Bar` - = note: required because of the requirements on the impl of `frame_support::storage::StorageEntryMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `NAME` + = note: required because of the requirements on the impl of `StorageEntryMetadataBuilder` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` + = note: required by `build_metadata` error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied --> $DIR/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:20:12 @@ -39,8 +39,8 @@ error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied = note: required because of the requirements on the impl of `Encode` for `Bar` = note: required because of the requirements on the impl of `FullEncode` for `Bar` = note: required because of the requirements on the impl of `FullCodec` for `Bar` - = note: required because of the requirements on the impl of `frame_support::storage::StorageEntryMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `NAME` + = note: required because of the requirements on the impl of `StorageEntryMetadataBuilder` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` + = note: required by `build_metadata` error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied --> $DIR/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:9:12 diff --git a/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr b/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr index 545520124bfee..6c92423c6a7fe 100644 --- a/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr +++ b/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr @@ -4,6 +4,6 @@ error[E0277]: the trait bound `Bar: MaxEncodedLen` is not satisfied 10 | #[pallet::generate_storage_info] | ^^^^^^^^^^^^^^^^^^^^^ the trait `MaxEncodedLen` is not implemented for `Bar` | - = note: required because of the requirements on the impl of `KeyGeneratorMaxEncodedLen` for `NMapKey` - = note: required because of the requirements on the impl of `StorageInfoTrait` for `frame_support::pallet_prelude::StorageNMap<_GeneratedPrefixForStorageFoo, NMapKey, u32>` + = note: required because of the requirements on the impl of `KeyGeneratorMaxEncodedLen` for `Key` + = note: required because of the requirements on the impl of `StorageInfoTrait` for `frame_support::pallet_prelude::StorageNMap<_GeneratedPrefixForStorageFoo, Key, u32>` = note: required by `storage_info` From d67e5f4a8d27206c7b473fa6a9218c1a34ffa29b Mon Sep 17 00:00:00 2001 From: Shawn Tabrizi Date: Thu, 16 Sep 2021 09:21:33 -0400 Subject: [PATCH 06/33] Fix Spellcheck for Template (#9795) --- utils/frame/benchmarking-cli/src/template.hbs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/frame/benchmarking-cli/src/template.hbs b/utils/frame/benchmarking-cli/src/template.hbs index 4acb8c7baa23b..36abf27f59a6e 100644 --- a/utils/frame/benchmarking-cli/src/template.hbs +++ b/utils/frame/benchmarking-cli/src/template.hbs @@ -17,7 +17,7 @@ use frame_support::{traits::Get, weights::Weight}; use sp_std::marker::PhantomData; -/// Weight functions for {{pallet}}. +/// Weight functions for `{{pallet}}`. pub struct WeightInfo(PhantomData); impl {{pallet}}::WeightInfo for WeightInfo { {{~#each benchmarks as |benchmark|}} From 25eb7ac459211d8f06a98713d9ef60a4f7dd6b69 Mon Sep 17 00:00:00 2001 From: Roman Date: Thu, 16 Sep 2021 17:31:44 +0300 Subject: [PATCH 07/33] Reduce the number of types in build_transport for transport (#9793) --- client/network/src/transport.rs | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/client/network/src/transport.rs b/client/network/src/transport.rs index 04223c6d6846f..3f977a21b1165 100644 --- a/client/network/src/transport.rs +++ b/client/network/src/transport.rs @@ -58,20 +58,16 @@ pub fn build_transport( let desktop_trans = websocket::WsConfig::new(desktop_trans.clone()).or_transport(desktop_trans); let dns_init = futures::executor::block_on(dns::DnsConfig::system(desktop_trans.clone())); - OptionalTransport::some(if let Ok(dns) = dns_init { + EitherTransport::Left(if let Ok(dns) = dns_init { EitherTransport::Left(dns) } else { EitherTransport::Right(desktop_trans.map_err(dns::DnsErr::Transport)) }) } else { - // For the in-memory case we set up the transport with an `.or_transport` below. - OptionalTransport::none() + EitherTransport::Right(OptionalTransport::some( + libp2p::core::transport::MemoryTransport::default(), + )) }; - let transport = transport.or_transport(if memory_only { - OptionalTransport::some(libp2p::core::transport::MemoryTransport::default()) - } else { - OptionalTransport::none() - }); let (transport, bandwidth) = bandwidth::BandwidthLogging::new(transport); From 95a5337c33ff6123918720ef3c11493628bca5b9 Mon Sep 17 00:00:00 2001 From: Shawn Tabrizi Date: Thu, 16 Sep 2021 14:36:26 -0400 Subject: [PATCH 08/33] Add Force Unreserve to Balances (#9764) * force unreserve * add benchmark * cargo run --quiet --release --features=runtime-benchmarks --manifest-path=bin/node/cli/Cargo.toml -- benchmark --chain=dev --steps=50 --repeat=20 --pallet=pallet_balances --extrinsic=* --execution=wasm --wasm-execution=compiled --heap-pages=4096 --output=./frame/balances/src/weights.rs --template=./.maintain/frame-weight-template.hbs Co-authored-by: Parity Bot --- frame/balances/src/benchmarking.rs | 20 +++++++++++++++ frame/balances/src/lib.rs | 22 ++++++++++++++--- frame/balances/src/weights.rs | 39 ++++++++++++++++++++---------- 3 files changed, 65 insertions(+), 16 deletions(-) diff --git a/frame/balances/src/benchmarking.rs b/frame/balances/src/benchmarking.rs index 97c3c4309a80d..06d202ea37002 100644 --- a/frame/balances/src/benchmarking.rs +++ b/frame/balances/src/benchmarking.rs @@ -195,6 +195,26 @@ benchmarks_instance_pallet! { assert!(Balances::::free_balance(&caller).is_zero()); assert_eq!(Balances::::free_balance(&recipient), balance); } + + force_unreserve { + let user: T::AccountId = account("user", 0, SEED); + let user_lookup: ::Source = T::Lookup::unlookup(user.clone()); + + // Give some multiple of the existential deposit + let existential_deposit = T::ExistentialDeposit::get(); + let balance = existential_deposit.saturating_mul(ED_MULTIPLIER.into()); + let _ = as Currency<_>>::make_free_balance_be(&user, balance); + + // Reserve the balance + as ReservableCurrency<_>>::reserve(&user, balance)?; + assert_eq!(Balances::::reserved_balance(&user), balance); + assert!(Balances::::free_balance(&user).is_zero()); + + }: _(RawOrigin::Root, user_lookup, balance) + verify { + assert!(Balances::::reserved_balance(&user).is_zero()); + assert_eq!(Balances::::free_balance(&user), balance); + } } impl_benchmark_test_suite!( diff --git a/frame/balances/src/lib.rs b/frame/balances/src/lib.rs index f7102ad4895f9..afd2331c8e3cf 100644 --- a/frame/balances/src/lib.rs +++ b/frame/balances/src/lib.rs @@ -167,6 +167,7 @@ use codec::{Codec, Decode, Encode, MaxEncodedLen}; use frame_support::traits::GenesisBuild; use frame_support::{ ensure, + pallet_prelude::DispatchResult, traits::{ tokens::{fungible, BalanceStatus as Status, DepositConsequence, WithdrawConsequence}, Currency, ExistenceRequirement, @@ -183,7 +184,7 @@ use sp_runtime::{ AtLeast32BitUnsigned, Bounded, CheckedAdd, CheckedSub, MaybeSerializeDeserialize, Saturating, StaticLookup, Zero, }, - ArithmeticError, DispatchError, DispatchResult, RuntimeDebug, + ArithmeticError, DispatchError, RuntimeDebug, }; use sp_std::{cmp, fmt::Debug, mem, ops::BitOr, prelude::*, result}; pub use weights::WeightInfo; @@ -419,7 +420,7 @@ pub mod pallet { origin: OriginFor, dest: ::Source, keep_alive: bool, - ) -> DispatchResultWithPostInfo { + ) -> DispatchResult { use fungible::Inspect; let transactor = ensure_signed(origin)?; let reducible_balance = Self::reducible_balance(&transactor, keep_alive); @@ -431,7 +432,22 @@ pub mod pallet { reducible_balance, keep_alive.into(), )?; - Ok(().into()) + Ok(()) + } + + /// Unreserve some balance from a user by force. + /// + /// Can only be called by ROOT. + #[pallet::weight(T::WeightInfo::force_unreserve())] + pub fn force_unreserve( + origin: OriginFor, + who: ::Source, + amount: T::Balance, + ) -> DispatchResult { + ensure_root(origin)?; + let who = T::Lookup::lookup(who)?; + let _leftover = >::unreserve(&who, amount); + Ok(()) } } diff --git a/frame/balances/src/weights.rs b/frame/balances/src/weights.rs index 9fce8d4fde266..6f333bfc0500f 100644 --- a/frame/balances/src/weights.rs +++ b/frame/balances/src/weights.rs @@ -18,7 +18,7 @@ //! Autogenerated weights for pallet_balances //! //! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev -//! DATE: 2021-08-07, STEPS: `50`, REPEAT: 20, LOW RANGE: `[]`, HIGH RANGE: `[]` +//! DATE: 2021-09-13, STEPS: `50`, REPEAT: 20, LOW RANGE: `[]`, HIGH RANGE: `[]` //! EXECUTION: Some(Wasm), WASM-EXECUTION: Compiled, CHAIN: Some("dev"), DB CACHE: 128 // Executed Command: @@ -51,6 +51,7 @@ pub trait WeightInfo { fn set_balance_killing() -> Weight; fn force_transfer() -> Weight; fn transfer_all() -> Weight; + fn force_unreserve() -> Weight; } /// Weights for pallet_balances using the Substrate node and recommended hardware. @@ -58,37 +59,43 @@ pub struct SubstrateWeight(PhantomData); impl WeightInfo for SubstrateWeight { // Storage: System Account (r:1 w:1) fn transfer() -> Weight { - (72_229_000 as Weight) + (70_952_000 as Weight) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } // Storage: System Account (r:1 w:1) fn transfer_keep_alive() -> Weight { - (55_013_000 as Weight) + (54_410_000 as Weight) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } // Storage: System Account (r:1 w:1) fn set_balance_creating() -> Weight { - (29_404_000 as Weight) + (29_176_000 as Weight) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } // Storage: System Account (r:1 w:1) fn set_balance_killing() -> Weight { - (36_311_000 as Weight) + (35_214_000 as Weight) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } // Storage: System Account (r:2 w:2) fn force_transfer() -> Weight { - (73_125_000 as Weight) + (71_780_000 as Weight) .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } // Storage: System Account (r:1 w:1) fn transfer_all() -> Weight { - (67_749_000 as Weight) + (66_475_000 as Weight) + .saturating_add(T::DbWeight::get().reads(1 as Weight)) + .saturating_add(T::DbWeight::get().writes(1 as Weight)) + } + // Storage: System Account (r:1 w:1) + fn force_unreserve() -> Weight { + (27_766_000 as Weight) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } @@ -98,37 +105,43 @@ impl WeightInfo for SubstrateWeight { impl WeightInfo for () { // Storage: System Account (r:1 w:1) fn transfer() -> Weight { - (72_229_000 as Weight) + (70_952_000 as Weight) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } // Storage: System Account (r:1 w:1) fn transfer_keep_alive() -> Weight { - (55_013_000 as Weight) + (54_410_000 as Weight) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } // Storage: System Account (r:1 w:1) fn set_balance_creating() -> Weight { - (29_404_000 as Weight) + (29_176_000 as Weight) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } // Storage: System Account (r:1 w:1) fn set_balance_killing() -> Weight { - (36_311_000 as Weight) + (35_214_000 as Weight) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } // Storage: System Account (r:2 w:2) fn force_transfer() -> Weight { - (73_125_000 as Weight) + (71_780_000 as Weight) .saturating_add(RocksDbWeight::get().reads(2 as Weight)) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } // Storage: System Account (r:1 w:1) fn transfer_all() -> Weight { - (67_749_000 as Weight) + (66_475_000 as Weight) + .saturating_add(RocksDbWeight::get().reads(1 as Weight)) + .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + } + // Storage: System Account (r:1 w:1) + fn force_unreserve() -> Weight { + (27_766_000 as Weight) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } From 88b4fc861129b63b445492e3088d7f12382f0128 Mon Sep 17 00:00:00 2001 From: Sergejs Kostjucenko <85877331+sergejparity@users.noreply.github.com> Date: Thu, 16 Sep 2021 23:50:43 +0300 Subject: [PATCH 09/33] Fix buildah login (#9786) --- .gitlab-ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index bfdb5bb3d092a..ecafc9338a587 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -662,8 +662,8 @@ build-rustdoc: --tag "$IMAGE_NAME:$VERSION" --tag "$IMAGE_NAME:latest" --file "$DOCKERFILE" . - - echo "$DOCKER_HUB_USER" | - buildah login --username "$DOCKER_HUB_PASS" --password-stdin docker.io + - echo "$DOCKER_HUB_PASS" | + buildah login --username "$DOCKER_HUB_USER" --password-stdin docker.io - buildah info - buildah push --format=v2s2 "$IMAGE_NAME:$VERSION" - buildah push --format=v2s2 "$IMAGE_NAME:latest" From c000780dba99a611fadbf83873073e024be1be0b Mon Sep 17 00:00:00 2001 From: Zeke Mostov <32168567+emostov@users.noreply.github.com> Date: Thu, 16 Sep 2021 19:25:21 -0700 Subject: [PATCH 10/33] Implement `pallet-bags-list` and its interfaces with `pallet-staking` (#9507) * remove extra whitespace Co-authored-by: Kian Paimani <5588131+kianenigma@users.noreply.github.com> * only emit rebag event on success * add doc explaining the term voter * revamp/simplify rebag test * ensure genesis accounts are placed into the correct nodes/bags * bond_extra implicitly rebags * types at top; doc public type * start sketching out adjustable thresholds * add integrity test for voter bag threshold requirements * get rid of BagIdx This reorganizes bag storage such that bags are always referred to by their upper threshold. This in turn means that adding and removing bags is cheaper; you only need to migrate certain voters, not all of them. * implement migration logic for when the threshold list changes * start sketching out threshold proc macros * further refine macro signatures * WIP: implement make_ratio macro * start rethinking the process of producing threshold lists The macro approach seems to be a non-starter; that only really works if we're throwing around numeric literals everywhere, and that's just not nice in this case. Instead, let's write helper functions and make it really easy to generate the tables in separate, permanent files, which humans can then edit. * write helper functions to emit voter bags module * WIP: demo generating voter bags for a realistic runtime This isn't yet done, becuase it seems to take a Very Long Time to run, and it really shouldn't. Need to look into that. Still, it's a lot closer than it was this morning. * rm unnecessary arg_enum * fix voter bags math Turns out that when you're working in exponential space, you need to divide, not subtract, in order to keep the math working properly. Also neaten up the output a little bit to make it easier to read. * add computed voter bags thresholds to node * fixup some docs * iter from large bags to small, fulfuilling the contract * make tests compile * add VoterBagThresholds to some configs * ensure that iteration covers all voters even with implied final bag * use sp_std::boxed::Box; * fix unused import * add some more voter bags tests * file_header.txt * integrity test to ensure min bag exceeds existential weight * add more debug assertions about node list length * rm unused imports * Kian enters * Update frame/election-provider-support/src/onchain.rs Co-authored-by: Zeke Mostov <32168567+emostov@users.noreply.github.com> * Suggestions for #9081 (Store voters in unsorted bags) (#9328) * Add some debug asserts to node::get and remove_node * Improve the debug asserts in remove_node * improve debug asserts * Space * Remove bad assertions * Tests: WIP take_works * Take test * Doc comment * Apply suggestions from code review Co-authored-by: Kian Paimani <5588131+kianenigma@users.noreply.github.com> * Test storage is cleaned up; * formatting * Switch to simpler thresholds * Update the storage cleanup test * Remove hardcoded values from benchmark to make it more robust * Fix tests to acces bags properly * Sanity check WIP; tests failing * Update sanity checks to be more correct * Improve storage cleanup tests * WIP remote_ext_tests * Some notes on next steps * Remove some stuff that was for remote-ext tests * Some more cleanup to reduce diff * More :clean: * Mo cleanin * small fix * A lot of changes from kian Co-authored-by: Kian Paimani <5588131+kianenigma@users.noreply.github.com> Co-authored-by: kianenigma * merge fallout * Run cargo +nightly fmt * Fix a bunch of stuff, remove not needed runtime arg of make-bags * add logs * Glue the new staking bags to the election snapshot (#9415) * Glue the new staking bags to the election snapshot * add CheckedRem (#9412) * add CheckedRem * fix * Run fmt * Test comment Co-authored-by: Xiliang Chen Co-authored-by: emostov <32168567+emostov@users.noreply.github.com> * Update node runtime with VoterSnapshotPerBlock * Unit test for pallet-staking unsorted bags feature (targets #9081) (#9422) * impl notional_bag_for_works * Add tests: insert_as_works & insert_works * Impl test: remove_works * Trivial cleaning * Add test: update_position_for_works * Write out edge case; probably can delete later * Add test: bags::get_works * Add test: remove_node_happy_path_works * Add test: remove_node_bad_paths_documented * WIP: voting_data_works * done * Improve test voting_data_works * Add comment * Fill out test basic_setup_works * Update: iteration_is_semi_sorted * Improve remove_works * Update update_position_for_works; create set_ledger_and_free_balance * Improve get_works * Improve storage clean up checks in remove test * Test: impl rebag_works + insert_and_remove_works * forgot file - Test: impl rebag_works + insert_and_remove_works * Small tweak * Update voter_bags test to reflect unused bags are removed * Unbond & Rebond: do_rebag * Prevent infinite loops with duplicate tail insert * Check iter.count on voter list in pre-migrate * undo strang fmt comment stuff * Add in todo Co-authored-by: kianenigma * Try prepare for master merge * Reduce diff * Add comment for test to add * Add in code TODO for update_position efficiency updates * Initial impl compiles * impl StakingVoterListStub * Sample impl VoterListProvider for VoterList * impl VoterListProvider for voter-bags * WIP integrate pallet-voter-bags to staking mock * the trait `pallet_staking::pallet::pallet::Config` is not implemented for `mock::Test` * random * pushing my stuff * Mock working * WIP voter list tests * Add bag insert, remove tests * Add test for bag insert and remove * Add remaining tests for VoterList * Add tests for node * Add rebag works * Add rebag extrinsic tests * Rename to bags-list and name the list .. list! * Rename VoterBagThresholds => BagThresholds * Add test count_works * Test on_update_works * test sanity check * a round of test fixes * push a lot of changes * my last changes * all bags-list test work; fmt * Beautify some tests * Doc comment for bags-list * Add insert warnings * Setup initial benchmark * Wire up WeightInfo * is_terminal wip; everything broken! * Is terminal working * add TODOs for remove_node * clean up remoe_node * Fix all staking tests * retire VoterBagFor * commit * bring in stashed changes * save * bench pipeline works now, but I can't run stuff * sabe * benchmarks now run, but we have a failure * WIP: Wire up make_bags * bags-thresholds compiles * Fix most build issues * This will fix all the tests * move bag thresholds to bags-list * Move bag-thresholds bin to within pallet-bags * Remove some unnescary TODOs * Impl tets wrong_rebag_is_noop * assert remove is a noop with bad data * Assert integrity test panics * Return an error when inserting duplicates * Update to handle error in staking pallet when inserting to list * Test contains and on_insert error * Test re-nominate does not mess up list or count * Everything builds and works, only the benchmark... * fuck yeah benchmarks * more cleanup, more hardening. * use the bags list again * fix benhc * Some questions and changs for List::migration * Fix migration removed_bags and new_bags usage * Some trivial aesthetic changes * Some more trivial changes * tiny changes/ * mega rename * fix all tests and ci build * nit * Test and fix migration * nit * fmt * cargo run --quiet --release --features=runtime-benchmarks --manifest-path=bin/node/cli/Cargo.toml -- benchmark --chain=dev --steps=50 --repeat=20 --pallet=pallet_staking --extrinsic=* --execution=wasm --wasm-execution=compiled --heap-pages=4096 --output=./frame/staking/src/weights.rs --template=./.maintain/frame-weight-template.hbs * fmt * remove unused * make a few things pub * make node also pub.. for remote-ext test * Fix all tests again * Force bag changes in relevant benchmarks (targets #9507) (#9529) * force rebag for unbond, rebond, and bond_extra * nit * Improve utils * fmt * nits * Move generate_bags to its own pallet * Get runtime-benchmarks feature setup with prepare_on_update_benchmark * Withdraw unbonded kill working * Nominate bench working * some cleanup * WIP * update to check head pre & post conditions * Add some post condition verification stuff for on_remove * Update nominate * fmt * Improvements * Fix build * fix build with polkadot companion * Update frame/bags-list/src/list/tests.rs Co-authored-by: Kian Paimani <5588131+kianenigma@users.noreply.github.com> * move generate-bag from frame to utils * wip * refactor WIP * WIP save * Refactor working * some variable renaming * WIP: prepare to remove head checks * Finish MvP refactor * Some cleanup * Soem more cleanup * save * fix a lot of stuff * Update client/db/src/bench.rs Co-authored-by: Shawn Tabrizi * Apply suggestions from code review * Apply suggestions from code review * Fix some issues that came from trying to merge comments on github * some small changes * simplify it Co-authored-by: Kian Paimani <5588131+kianenigma@users.noreply.github.com> Co-authored-by: kianenigma Co-authored-by: Shawn Tabrizi * Build works * Apply suggestions from code review Co-authored-by: Guillaume Thiolliere * Apply suggestions from code review Co-authored-by: Guillaume Thiolliere * Remove commented out debug assert * Remove some unused deps and some unused benchmarking stuff * Fix stakings ElectionDataProvider clear * cargo run --quiet --release --features=runtime-benchmarks --manifest-path=bin/node/cli/Cargo.toml -- benchmark --chain=dev --steps=50 --repeat=20 --pallet=pallet_bags_list --extrinsic=* --execution=wasm --wasm-execution=compiled --heap-pages=4096 --output=./frame/bags-list/src/weights.rs --template=./.maintain/frame-weight-template.hbs * cargo run --quiet --release --features=runtime-benchmarks --manifest-path=bin/node/cli/Cargo.toml -- benchmark --chain=dev --steps=50 --repeat=20 --pallet=pallet_bags_list --extrinsic=* --execution=wasm --wasm-execution=compiled --heap-pages=4096 --output=./frame/bags-list/src/weights.rs --template=./.maintain/frame-weight-template.hbs * cargo run --quiet --release --features=runtime-benchmarks --manifest-path=bin/node/cli/Cargo.toml -- benchmark --chain=dev --steps=50 --repeat=20 --pallet=pallet_bags_list --extrinsic=* --execution=wasm --wasm-execution=compiled --heap-pages=4096 --output=./frame/bags-list/src/weights.rs --template=./.maintain/frame-weight-template.hbs * cargo run --quiet --release --features=runtime-benchmarks --manifest-path=bin/node/cli/Cargo.toml -- benchmark --chain=dev --steps=50 --repeat=20 --pallet=pallet_bags_list --extrinsic=* --execution=wasm --wasm-execution=compiled --heap-pages=4096 --output=./frame/bags-list/src/weights.rs --template=./.maintain/frame-weight-template.hbs * Improving staking pallet-bags-list migration * fix build and some comments; * comment * Reduce visibility in bags list components * make node.bag_upper only accesible to benchmarks * Address some feedback; comments updates * use nominator map comment * fix vec capacity debug assert * Apply suggestions from code review Co-authored-by: Guillaume Thiolliere * clarify VoterSnapshotPerBlock * Reduce diff on create_validators by wrapping with_seed * Some small improvements to staking benches * Soem comment updates * fix vec capacity debug assert ... for real this time * Reduce ListBags viz * cargo run --quiet --release --features=runtime-benchmarks --manifest-path=bin/node/cli/Cargo.toml -- benchmark --chain=dev --steps=50 --repeat=20 --pallet=pallet_staking --extrinsic=* --execution=wasm --wasm-execution=compiled --heap-pages=4096 --output=./frame/staking/src/weights.rs --template=./.maintain/frame-weight-template.hbs * cargo run --quiet --release --features=runtime-benchmarks --manifest-path=bin/node/cli/Cargo.toml -- benchmark --chain=dev --steps=50 --repeat=20 --pallet=pallet_staking --extrinsic=* --execution=wasm --wasm-execution=compiled --heap-pages=4096 --output=./frame/staking/src/weights.rs --template=./.maintain/frame-weight-template.hbs * Remove supports_eq_unordered & Support eq_unordered * Update utils/frame/generate-bags/src/lib.rs Co-authored-by: Guillaume Thiolliere * Make total-issuance & minimium-balance CLI args; Dont use emptry ext * Improve docs for generate bags CLI args * Apply suggestions from code review Co-authored-by: Kian Paimani <5588131+kianenigma@users.noreply.github.com> * Apply suggestions from code review Co-authored-by: Kian Paimani <5588131+kianenigma@users.noreply.github.com> * Don't use default bags weight in node runtime * Feature gating sanity_check not working * Feature gate sanity check by creating duplicate fns * Fix line wrapping * Document VoteWeightProvider * Make bags ext-builder not a module * Apply suggestions from code review Co-authored-by: Kian Paimani <5588131+kianenigma@users.noreply.github.com> * use pallet_bags_list instead of crate in mock * Make get_bags test helper fn live in List * use remove_from_storage_unchecked for node removal * Remove count of ids removed in remove_many * Add node sanity check, improve list sanity check * Do a list sanity check after on_update * List::migrate: clean up debug assert, exit early when no change in thresholds * Improve public doc comments for pallet_bags_list::list::List * Improve public doc comments for pallet_bags_list::list::List * Update generate bags docs * Fix grammar in bags-list benchmark * Add benchmark case for `rebag` extrinsic * Add count parameter to List::clear; WIP adding MaxEncodedLen to list' * MaxEncodeLen + generate_storage_info not working for Bag or Node * Get MaxEncodeLen derive to work * Try to correctly feature gate SortedListProvider::clear * Use u32::MAX, not u32::max_value * Get up to nominators_quota noms * SortedListProvider::clear takes an Option * Eplicitly ignore SortedListProvider return value * Fix doc comment * Update node-runtime voter snapshot per block * Add test get_max_len_voters_even_if_some_nominators_are_slashed * Add test only_iterates_max_2_times_nominators_quota * Fix generate bags cargo.toml * use sp_std vec * Remove v8 migration hooks from pallet-staking * Update npos trait * Try respect line width * cargo run --quiet --release --features=runtime-benchmarks --manifest-path=bin/node/cli/Cargo.toml -- benchmark --chain=dev --steps=50 --repeat=20 --pallet=pallet_bags_list --extrinsic=* --execution=wasm --wasm-execution=compiled --heap-pages=4096 --output=./frame/bags-list/src/weights.rs --template=./.maintain/frame-weight-template.hbs * Update frame/bags-list/src/benchmarks.rs * Unwrap try-runtime error; remove sortedlistprovider pre upgrade len check * trigger ci * restore * trigger ci * restore * trigger ci * revert * trigger ci * revert Co-authored-by: Peter Goodspeed-Niklaus Co-authored-by: Kian Paimani <5588131+kianenigma@users.noreply.github.com> Co-authored-by: Peter Goodspeed-Niklaus Co-authored-by: kianenigma Co-authored-by: Xiliang Chen Co-authored-by: Parity Benchmarking Bot Co-authored-by: Shawn Tabrizi Co-authored-by: Guillaume Thiolliere --- Cargo.lock | 82 ++ Cargo.toml | 3 + bin/node/runtime/Cargo.toml | 3 + bin/node/runtime/src/lib.rs | 26 + bin/node/runtime/src/voter_bags.rs | 235 ++++++ frame/babe/src/mock.rs | 1 + frame/bags-list/Cargo.toml | 66 ++ frame/bags-list/src/benchmarks.rs | 144 ++++ frame/bags-list/src/lib.rs | 306 +++++++ frame/bags-list/src/list/mod.rs | 786 ++++++++++++++++++ frame/bags-list/src/list/tests.rs | 735 ++++++++++++++++ frame/bags-list/src/mock.rs | 154 ++++ frame/bags-list/src/tests.rs | 389 +++++++++ frame/bags-list/src/weights.rs | 95 +++ .../election-provider-multi-phase/src/lib.rs | 58 +- .../election-provider-multi-phase/src/mock.rs | 8 +- frame/election-provider-support/src/lib.rs | 69 ++ frame/executive/src/lib.rs | 4 +- frame/grandpa/src/mock.rs | 1 + frame/offences/benchmarking/src/mock.rs | 1 + frame/session/benchmarking/src/mock.rs | 1 + frame/staking/Cargo.toml | 2 + frame/staking/src/benchmarking.rs | 327 +++++++- frame/staking/src/lib.rs | 17 +- frame/staking/src/migrations.rs | 50 ++ frame/staking/src/mock.rs | 44 +- frame/staking/src/pallet/impls.rs | 255 ++++-- frame/staking/src/pallet/mod.rs | 45 +- frame/staking/src/testing_utils.rs | 50 +- frame/staking/src/tests.rs | 173 +++- frame/staking/src/weights.rs | 468 ++++++----- primitives/npos-elections/src/traits.rs | 9 +- utils/frame/generate-bags/Cargo.toml | 26 + .../generate-bags/node-runtime/Cargo.toml | 17 + .../generate-bags/node-runtime/src/main.rs | 46 + utils/frame/generate-bags/src/lib.rs | 246 ++++++ 36 files changed, 4602 insertions(+), 340 deletions(-) create mode 100644 bin/node/runtime/src/voter_bags.rs create mode 100644 frame/bags-list/Cargo.toml create mode 100644 frame/bags-list/src/benchmarks.rs create mode 100644 frame/bags-list/src/lib.rs create mode 100644 frame/bags-list/src/list/mod.rs create mode 100644 frame/bags-list/src/list/tests.rs create mode 100644 frame/bags-list/src/mock.rs create mode 100644 frame/bags-list/src/tests.rs create mode 100644 frame/bags-list/src/weights.rs create mode 100644 utils/frame/generate-bags/Cargo.toml create mode 100644 utils/frame/generate-bags/node-runtime/Cargo.toml create mode 100644 utils/frame/generate-bags/node-runtime/src/main.rs create mode 100644 utils/frame/generate-bags/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index 7754e0ae6b62f..37773a4ae3a69 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2314,6 +2314,21 @@ version = "0.3.55" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f5f3913fa0bfe7ee1fd8248b6b9f42a5af4b9d65ec2dd2c3c26132b950ecfc2" +[[package]] +name = "generate-bags" +version = "3.0.0" +dependencies = [ + "chrono", + "frame-election-provider-support", + "frame-support", + "frame-system", + "git2", + "num-format", + "pallet-staking", + "sp-io", + "structopt", +] + [[package]] name = "generic-array" version = "0.12.4" @@ -2395,6 +2410,19 @@ dependencies = [ "stable_deref_trait", ] +[[package]] +name = "git2" +version = "0.13.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "659cd14835e75b64d9dba5b660463506763cf0aa6cb640aeeb0e98d841093490" +dependencies = [ + "bitflags", + "libc", + "libgit2-sys", + "log 0.4.14", + "url 2.2.1", +] + [[package]] name = "glob" version = "0.3.0" @@ -3203,6 +3231,18 @@ version = "0.2.95" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "789da6d93f1b866ffe175afc5322a4d76c038605a1c3319bb57b06967ca98a36" +[[package]] +name = "libgit2-sys" +version = "0.12.22+1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89c53ac117c44f7042ad8d8f5681378dfbc6010e49ec2c0d1f11dfedc7a4a1c3" +dependencies = [ + "cc", + "libc", + "libz-sys", + "pkg-config", +] + [[package]] name = "libloading" version = "0.5.2" @@ -3775,6 +3815,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "602113192b08db8f38796c4e85c39e960c145965140e918018bcde1952429655" dependencies = [ "cc", + "libc", "pkg-config", "vcpkg", ] @@ -4530,6 +4571,7 @@ dependencies = [ "pallet-authority-discovery", "pallet-authorship", "pallet-babe", + "pallet-bags-list", "pallet-balances", "pallet-bounties", "pallet-collective", @@ -4591,6 +4633,15 @@ dependencies = [ "substrate-wasm-builder", ] +[[package]] +name = "node-runtime-generate-bags" +version = "3.0.0" +dependencies = [ + "generate-bags", + "node-runtime", + "structopt", +] + [[package]] name = "node-template" version = "3.0.0" @@ -4750,6 +4801,16 @@ dependencies = [ "num-traits", ] +[[package]] +name = "num-format" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bafe4179722c2894288ee77a9f044f02811c86af699344c498b0840c698a2465" +dependencies = [ + "arrayvec 0.4.12", + "itoa", +] + [[package]] name = "num-integer" version = "0.1.44" @@ -5017,6 +5078,25 @@ dependencies = [ "sp-std", ] +[[package]] +name = "pallet-bags-list" +version = "4.0.0-dev" +dependencies = [ + "frame-benchmarking", + "frame-election-provider-support", + "frame-support", + "frame-system", + "log 0.4.14", + "pallet-balances", + "parity-scale-codec", + "scale-info", + "sp-core", + "sp-io", + "sp-runtime", + "sp-std", + "sp-tracing", +] + [[package]] name = "pallet-balances" version = "4.0.0-dev" @@ -5707,6 +5787,7 @@ dependencies = [ "frame-system", "log 0.4.14", "pallet-authorship", + "pallet-bags-list", "pallet-balances", "pallet-session", "pallet-staking-reward-curve", @@ -5718,6 +5799,7 @@ dependencies = [ "sp-application-crypto", "sp-core", "sp-io", + "sp-npos-elections", "sp-runtime", "sp-staking", "sp-std", diff --git a/Cargo.toml b/Cargo.toml index bca0c816217ee..e110c27b20d77 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -128,6 +128,7 @@ members = [ "frame/uniques", "frame/utility", "frame/vesting", + "frame/bags-list", "primitives/api", "primitives/api/proc-macro", "primitives/api/test", @@ -198,6 +199,8 @@ members = [ "utils/frame/try-runtime/cli", "utils/frame/rpc/support", "utils/frame/rpc/system", + "utils/frame/generate-bags", + "utils/frame/generate-bags/node-runtime", "utils/prometheus", "utils/wasm-builder", ] diff --git a/bin/node/runtime/Cargo.toml b/bin/node/runtime/Cargo.toml index dafd9db8bab96..d434be8f3c609 100644 --- a/bin/node/runtime/Cargo.toml +++ b/bin/node/runtime/Cargo.toml @@ -55,6 +55,7 @@ pallet-assets = { version = "4.0.0-dev", default-features = false, path = "../.. pallet-authority-discovery = { version = "4.0.0-dev", default-features = false, path = "../../../frame/authority-discovery" } pallet-authorship = { version = "4.0.0-dev", default-features = false, path = "../../../frame/authorship" } pallet-babe = { version = "4.0.0-dev", default-features = false, path = "../../../frame/babe" } +pallet-bags-list = { version = "4.0.0-dev", default-features = false, path = "../../../frame/bags-list" } pallet-balances = { version = "4.0.0-dev", default-features = false, path = "../../../frame/balances" } pallet-bounties = { version = "4.0.0-dev", default-features = false, path = "../../../frame/bounties" } pallet-collective = { version = "4.0.0-dev", default-features = false, path = "../../../frame/collective" } @@ -110,6 +111,7 @@ std = [ "pallet-authorship/std", "sp-consensus-babe/std", "pallet-babe/std", + "pallet-bags-list/std", "pallet-balances/std", "pallet-bounties/std", "sp-block-builder/std", @@ -179,6 +181,7 @@ runtime-benchmarks = [ "sp-runtime/runtime-benchmarks", "pallet-assets/runtime-benchmarks", "pallet-babe/runtime-benchmarks", + "pallet-bags-list/runtime-benchmarks", "pallet-balances/runtime-benchmarks", "pallet-bounties/runtime-benchmarks", "pallet-collective/runtime-benchmarks", diff --git a/bin/node/runtime/src/lib.rs b/bin/node/runtime/src/lib.rs index 7c6475bd18d6a..9e8adfcd0910e 100644 --- a/bin/node/runtime/src/lib.rs +++ b/bin/node/runtime/src/lib.rs @@ -92,6 +92,9 @@ pub mod constants; use constants::{currency::*, time::*}; use sp_runtime::generic::Era; +/// Generated voter bag information. +mod voter_bags; + // Make the WASM binary available. #[cfg(feature = "std")] include!(concat!(env!("OUT_DIR"), "/wasm_binary.rs")); @@ -525,6 +528,9 @@ impl pallet_staking::Config for Runtime { type MaxNominatorRewardedPerValidator = MaxNominatorRewardedPerValidator; type ElectionProvider = ElectionProviderMultiPhase; type GenesisElectionProvider = onchain::OnChainSequentialPhragmen; + // Alternatively, use pallet_staking::UseNominatorsMap to just use the nominators map. + // Note that the aforementioned does not scale to a very large number of nominators. + type SortedListProvider = BagsList; type WeightInfo = pallet_staking::weights::SubstrateWeight; } @@ -552,6 +558,11 @@ parameter_types! { *RuntimeBlockLength::get() .max .get(DispatchClass::Normal); + + // BagsList allows a practically unbounded count of nominators to participate in NPoS elections. + // To ensure we respect memory limits when using the BagsList this must be set to a number of + // voters we know can fit into a single vec allocation. + pub const VoterSnapshotPerBlock: u32 = 10_000; } sp_npos_elections::generate_solution_type!( @@ -634,6 +645,18 @@ impl pallet_election_provider_multi_phase::Config for Runtime { type WeightInfo = pallet_election_provider_multi_phase::weights::SubstrateWeight; type ForceOrigin = EnsureRootOrHalfCouncil; type BenchmarkingConfig = BenchmarkConfig; + type VoterSnapshotPerBlock = VoterSnapshotPerBlock; +} + +parameter_types! { + pub const BagThresholds: &'static [u64] = &voter_bags::THRESHOLDS; +} + +impl pallet_bags_list::Config for Runtime { + type Event = Event; + type VoteWeightProvider = Staking; + type WeightInfo = pallet_bags_list::weights::SubstrateWeight; + type BagThresholds = BagThresholds; } parameter_types! { @@ -1254,6 +1277,7 @@ construct_runtime!( Gilt: pallet_gilt::{Pallet, Call, Storage, Event, Config}, Uniques: pallet_uniques::{Pallet, Call, Storage, Event}, TransactionStorage: pallet_transaction_storage::{Pallet, Call, Storage, Inherent, Config, Event}, + BagsList: pallet_bags_list::{Pallet, Call, Storage, Event}, } ); @@ -1581,6 +1605,7 @@ impl_runtime_apis! { list_benchmark!(list, extra, pallet_assets, Assets); list_benchmark!(list, extra, pallet_babe, Babe); + list_benchmark!(list, extra, pallet_bags_list, BagsList); list_benchmark!(list, extra, pallet_balances, Balances); list_benchmark!(list, extra, pallet_bounties, Bounties); list_benchmark!(list, extra, pallet_collective, Council); @@ -1655,6 +1680,7 @@ impl_runtime_apis! { add_benchmark!(params, batches, pallet_assets, Assets); add_benchmark!(params, batches, pallet_babe, Babe); add_benchmark!(params, batches, pallet_balances, Balances); + add_benchmark!(params, batches, pallet_bags_list, BagsList); add_benchmark!(params, batches, pallet_bounties, Bounties); add_benchmark!(params, batches, pallet_collective, Council); add_benchmark!(params, batches, pallet_contracts, Contracts); diff --git a/bin/node/runtime/src/voter_bags.rs b/bin/node/runtime/src/voter_bags.rs new file mode 100644 index 0000000000000..c4c731a58badc --- /dev/null +++ b/bin/node/runtime/src/voter_bags.rs @@ -0,0 +1,235 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Autogenerated voter bag thresholds. +//! +//! Generated on 2021-07-05T09:17:40.469754927+00:00 +//! for the node runtime. + +/// Existential weight for this runtime. +#[cfg(any(test, feature = "std"))] +#[allow(unused)] +pub const EXISTENTIAL_WEIGHT: u64 = 100_000_000_000_000; + +/// Constant ratio between bags for this runtime. +#[cfg(any(test, feature = "std"))] +#[allow(unused)] +pub const CONSTANT_RATIO: f64 = 1.0628253590743408; + +/// Upper thresholds delimiting the bag list. +pub const THRESHOLDS: [u64; 200] = [ + 100_000_000_000_000, + 106_282_535_907_434, + 112_959_774_389_150, + 120_056_512_776_105, + 127_599_106_300_477, + 135_615_565_971_369, + 144_135_662_599_590, + 153_191_037_357_827, + 162_815_319_286_803, + 173_044_250_183_800, + 183_915_817_337_347, + 195_470_394_601_017, + 207_750_892_330_229, + 220_802_916_738_890, + 234_674_939_267_673, + 249_418_476_592_914, + 265_088_281_944_639, + 281_742_548_444_211, + 299_443_125_216_738, + 318_255_747_080_822, + 338_250_278_668_647, + 359_500_973_883_001, + 382_086_751_654_776, + 406_091_489_025_036, + 431_604_332_640_068, + 458_720_029_816_222, + 487_539_280_404_019, + 518_169_110_758_247, + 550_723_271_202_866, + 585_322_658_466_782, + 622_095_764_659_305, + 661_179_154_452_653, + 702_717_972_243_610, + 746_866_481_177_808, + 793_788_636_038_393, + 843_658_692_126_636, + 896_661_852_395_681, + 952_994_955_240_703, + 1_012_867_205_499_736, + 1_076_500_951_379_881, + 1_144_132_510_194_192, + 1_216_013_045_975_769, + 1_292_409_502_228_280, + 1_373_605_593_276_862, + 1_459_902_857_901_004, + 1_551_621_779_162_291, + 1_649_102_974_585_730, + 1_752_708_461_114_642, + 1_862_822_999_536_805, + 1_979_855_523_374_646, + 2_104_240_657_545_975, + 2_236_440_332_435_128, + 2_376_945_499_368_703, + 2_526_277_953_866_680, + 2_684_992_273_439_945, + 2_853_677_877_130_641, + 3_032_961_214_443_876, + 3_223_508_091_799_862, + 3_426_026_145_146_232, + 3_641_267_467_913_124, + 3_870_031_404_070_482, + 4_113_167_516_660_186, + 4_371_578_742_827_277, + 4_646_224_747_067_156, + 4_938_125_485_141_739, + 5_248_364_991_899_922, + 5_578_095_407_069_235, + 5_928_541_253_969_291, + 6_301_003_987_036_955, + 6_696_866_825_051_405, + 7_117_599_888_008_300, + 7_564_765_656_719_910, + 8_040_024_775_416_580, + 8_545_142_218_898_723, + 9_081_993_847_142_344, + 9_652_573_371_700_016, + 10_258_999_759_768_490, + 10_903_525_103_419_522, + 11_588_542_983_217_942, + 12_316_597_357_287_042, + 13_090_392_008_832_678, + 13_912_800_587_211_472, + 14_786_877_279_832_732, + 15_715_868_154_526_436, + 16_703_223_214_499_558, + 17_752_609_210_649_358, + 18_867_923_258_814_856, + 20_053_307_312_537_008, + 21_313_163_545_075_252, + 22_652_170_697_804_756, + 24_075_301_455_707_600, + 25_587_840_914_485_432, + 27_195_406_207_875_088, + 28_903_967_368_057_400, + 30_719_869_496_628_636, + 32_649_856_328_471_220, + 34_701_095_276_033_064, + 36_881_204_047_022_752, + 39_198_278_934_370_992, + 41_660_924_883_519_016, + 44_278_287_448_695_240, + 47_060_086_756_856_400, + 50_016_653_605_425_536, + 53_158_967_827_883_320, + 56_498_699_069_691_424, + 60_048_250_125_977_912, + 63_820_803_001_928_304, + 67_830_367_866_937_216, + 72_091_835_084_322_176, + 76_621_030_509_822_880, + 81_434_774_264_248_528, + 86_550_943_198_537_824, + 91_988_537_283_208_848, + 97_767_750_168_749_840, + 103_910_044_178_992_000, + 110_438_230_015_967_792, + 117_376_551_472_255_616, + 124_750_775_465_407_920, + 132_588_287_728_824_640, + 140_918_194_514_440_064, + 149_771_430_684_917_568, + 159_180_874_596_775_264, + 169_181_470_201_085_280, + 179_810_356_815_193_344, + 191_107_007_047_393_216, + 203_113_373_386_768_288, + 215_874_044_002_592_672, + 229_436_408_331_885_600, + 243_850_833_070_063_392, + 259_170_849_218_267_264, + 275_453_350_882_006_752, + 292_758_806_559_399_232, + 311_151_483_703_668_992, + 330_699_687_393_865_920, + 351_476_014_000_157_824, + 373_557_620_785_735_808, + 397_026_512_446_556_096, + 421_969_845_653_044_224, + 448_480_252_724_740_928, + 476_656_185_639_923_904, + 506_602_281_657_757_760, + 538_429_751_910_786_752, + 572_256_794_410_890_176, + 608_209_033_002_485_632, + 646_419_983_893_124_352, + 687_031_551_494_039_552, + 730_194_555_412_054_016, + 776_069_290_549_944_960, + 824_826_122_395_314_176, + 876_646_119_708_695_936, + 931_721_726_960_522_368, + 990_257_479_014_182_144, + 1_052_470_760_709_299_712, + 1_118_592_614_166_106_112, + 1_188_868_596_808_997_376, + 1_263_559_693_295_730_432, + 1_342_943_284_738_898_688, + 1_427_314_178_819_094_784, + 1_516_985_704_615_302_400, + 1_612_290_876_218_400_768, + 1_713_583_629_449_105_408, + 1_821_240_136_273_157_632, + 1_935_660_201_795_120_128, + 2_057_268_749_018_809_600, + 2_186_517_396_888_336_384, + 2_323_886_137_470_138_880, + 2_469_885_118_504_583_168, + 2_625_056_537_947_004_416, + 2_789_976_657_533_970_944, + 2_965_257_942_852_572_160, + 3_151_551_337_860_326_400, + 3_349_548_682_302_620_672, + 3_559_985_281_005_267_968, + 3_783_642_634_583_792_128, + 4_021_351_341_710_503_936, + 4_273_994_183_717_548_544, + 4_542_509_402_991_247_872, + 4_827_894_187_332_742_144, + 5_131_208_373_224_844_288, + 5_453_578_381_757_959_168, + 5_796_201_401_831_965_696, + 6_160_349_836_169_256_960, + 6_547_376_026_650_146_816, + 6_958_717_276_519_173_120, + 7_395_901_188_113_309_696, + 7_860_551_335_934_872_576, + 8_354_393_296_137_270_272, + 8_879_261_054_815_360_000, + 9_437_103_818_898_946_048, + 10_029_993_254_943_105_024, + 10_660_131_182_698_121_216, + 11_329_857_752_030_707_712, + 12_041_660_133_563_240_448, + 12_798_181_755_305_525_248, + 13_602_232_119_581_272_064, + 14_456_797_236_706_498_560, + 15_365_050_714_167_523_328, + 16_330_365_542_480_556_032, + 17_356_326_621_502_140_416, + 18_446_744_073_709_551_615, +]; diff --git a/frame/babe/src/mock.rs b/frame/babe/src/mock.rs index bc0be32624cba..833a68fbddb6c 100644 --- a/frame/babe/src/mock.rs +++ b/frame/babe/src/mock.rs @@ -216,6 +216,7 @@ impl pallet_staking::Config for Test { type ElectionProvider = onchain::OnChainSequentialPhragmen; type GenesisElectionProvider = Self::ElectionProvider; type WeightInfo = (); + type SortedListProvider = pallet_staking::UseNominatorsMap; } impl pallet_offences::Config for Test { diff --git a/frame/bags-list/Cargo.toml b/frame/bags-list/Cargo.toml new file mode 100644 index 0000000000000..860a6edc42143 --- /dev/null +++ b/frame/bags-list/Cargo.toml @@ -0,0 +1,66 @@ +[package] +name = "pallet-bags-list" +version = "4.0.0-dev" +authors = ["Parity Technologies "] +edition = "2018" +license = "Apache-2.0" +homepage = "https://substrate.dev" +repository = "https://github.com/paritytech/substrate/" +description = "FRAME pallet bags list" +readme = "README.md" + +[package.metadata.docs.rs] +targets = ["x86_64-unknown-linux-gnu"] + +[dependencies] +# parity +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } + +# primitives +sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } +sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } + +# FRAME +frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } +frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } +frame-election-provider-support = { version = "4.0.0-dev", default-features = false, path = "../election-provider-support" } + +# third party +log = { version = "0.4.14", default-features = false } + +# Optional imports for benchmarking +frame-benchmarking = { version = "4.0.0-dev", path = "../benchmarking", optional = true, default-features = false } +pallet-balances = { version = "4.0.0-dev", path = "../balances", optional = true, default-features = false } +sp-core = { version = "4.0.0-dev", path = "../../primitives/core", optional = true, default-features = false } +sp-io = { version = "4.0.0-dev", path = "../../primitives/io", optional = true, default-features = false } +sp-tracing = { version = "4.0.0-dev", path = "../../primitives/tracing", optional = true, default-features = false } + +[dev-dependencies] +sp-core = { version = "4.0.0-dev", path = "../../primitives/core"} +sp-io = { version = "4.0.0-dev", path = "../../primitives/io"} +sp-tracing = { version = "4.0.0-dev", path = "../../primitives/tracing" } +pallet-balances = { version = "4.0.0-dev", path = "../balances" } +frame-election-provider-support = { version = "4.0.0-dev", path = "../election-provider-support", features = ["runtime-benchmarks"] } +frame-benchmarking = { version = "4.0.0-dev", path = "../benchmarking" } + +[features] +default = ["std"] +std = [ + "codec/std", + "sp-runtime/std", + "sp-std/std", + "frame-support/std", + "frame-system/std", + "frame-election-provider-support/std", + "log/std", +] +runtime-benchmarks = [ + "frame-benchmarking", + "sp-core", + "sp-io", + "pallet-balances", + "sp-tracing", + "frame-election-provider-support/runtime-benchmarks", +] + diff --git a/frame/bags-list/src/benchmarks.rs b/frame/bags-list/src/benchmarks.rs new file mode 100644 index 0000000000000..a820eeba13b12 --- /dev/null +++ b/frame/bags-list/src/benchmarks.rs @@ -0,0 +1,144 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Benchmarks for the bags list pallet. + +use super::*; +use crate::list::List; +use frame_benchmarking::{account, whitelisted_caller}; +use frame_election_provider_support::VoteWeightProvider; +use frame_support::{assert_ok, traits::Get}; +use frame_system::RawOrigin as SystemOrigin; + +frame_benchmarking::benchmarks! { + rebag_non_terminal { + // An expensive case for rebag-ing (rebag a non-terminal node): + // + // - The node to be rebagged, _R_, should exist as a non-terminal node in a bag with at + // least 2 other nodes. Thus _R_ will have both its `prev` and `next` nodes updated when + // it is removed. (3 W/R) + // - The destination bag is not empty, thus we need to update the `next` pointer of the last + // node in the destination in addition to the work we do otherwise. (2 W/R) + + // clear any pre-existing storage. + List::::clear(None); + + // define our origin and destination thresholds. + let origin_bag_thresh = T::BagThresholds::get()[0]; + let dest_bag_thresh = T::BagThresholds::get()[1]; + + // seed items in the origin bag. + let origin_head: T::AccountId = account("origin_head", 0, 0); + assert_ok!(List::::insert(origin_head.clone(), origin_bag_thresh)); + + let origin_middle: T::AccountId = account("origin_middle", 0, 0); // the node we rebag (_R_) + assert_ok!(List::::insert(origin_middle.clone(), origin_bag_thresh)); + + let origin_tail: T::AccountId = account("origin_tail", 0, 0); + assert_ok!(List::::insert(origin_tail.clone(), origin_bag_thresh)); + + // seed items in the destination bag. + let dest_head: T::AccountId = account("dest_head", 0, 0); + assert_ok!(List::::insert(dest_head.clone(), dest_bag_thresh)); + + // the bags are in the expected state after initial setup. + assert_eq!( + List::::get_bags(), + vec![ + (origin_bag_thresh, vec![origin_head.clone(), origin_middle.clone(), origin_tail.clone()]), + (dest_bag_thresh, vec![dest_head.clone()]) + ] + ); + + let caller = whitelisted_caller(); + // update the weight of `origin_middle` to guarantee it will be rebagged into the destination. + T::VoteWeightProvider::set_vote_weight_of(&origin_middle, dest_bag_thresh); + }: rebag(SystemOrigin::Signed(caller), origin_middle.clone()) + verify { + // check the bags have updated as expected. + assert_eq!( + List::::get_bags(), + vec![ + ( + origin_bag_thresh, + vec![origin_head, origin_tail], + ), + ( + dest_bag_thresh, + vec![dest_head, origin_middle], + ) + ] + ); + } + + rebag_terminal { + // An expensive case for rebag-ing (rebag a terminal node): + // + // - The node to be rebagged, _R_, is a terminal node; so _R_, the node pointing to _R_ and + // the origin bag itself will need to be updated. (3 W/R) + // - The destination bag is not empty, thus we need to update the `next` pointer of the last + // node in the destination in addition to the work we do otherwise. (2 W/R) + + // clear any pre-existing storage. + List::::clear(None); + + // define our origin and destination thresholds. + let origin_bag_thresh = T::BagThresholds::get()[0]; + let dest_bag_thresh = T::BagThresholds::get()[1]; + + // seed items in the origin bag. + let origin_head: T::AccountId = account("origin_head", 0, 0); + assert_ok!(List::::insert(origin_head.clone(), origin_bag_thresh)); + + let origin_tail: T::AccountId = account("origin_tail", 0, 0); // the node we rebag (_R_) + assert_ok!(List::::insert(origin_tail.clone(), origin_bag_thresh)); + + // seed items in the destination bag. + let dest_head: T::AccountId = account("dest_head", 0, 0); + assert_ok!(List::::insert(dest_head.clone(), dest_bag_thresh)); + + // the bags are in the expected state after initial setup. + assert_eq!( + List::::get_bags(), + vec![ + (origin_bag_thresh, vec![origin_head.clone(), origin_tail.clone()]), + (dest_bag_thresh, vec![dest_head.clone()]) + ] + ); + + let caller = whitelisted_caller(); + // update the weight of `origin_tail` to guarantee it will be rebagged into the destination. + T::VoteWeightProvider::set_vote_weight_of(&origin_tail, dest_bag_thresh); + }: rebag(SystemOrigin::Signed(caller), origin_tail.clone()) + verify { + // check the bags have updated as expected. + assert_eq!( + List::::get_bags(), + vec![ + (origin_bag_thresh, vec![origin_head.clone()]), + (dest_bag_thresh, vec![dest_head.clone(), origin_tail.clone()]) + ] + ); + } +} + +use frame_benchmarking::impl_benchmark_test_suite; +impl_benchmark_test_suite!( + Pallet, + crate::mock::ExtBuilder::default().build(), + crate::mock::Runtime, +); diff --git a/frame/bags-list/src/lib.rs b/frame/bags-list/src/lib.rs new file mode 100644 index 0000000000000..4202a4d499895 --- /dev/null +++ b/frame/bags-list/src/lib.rs @@ -0,0 +1,306 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! # Bags-List Pallet +//! +//! A semi-sorted list, where items hold an `AccountId` based on some `VoteWeight`. The `AccountId` +//! (`id` for short) might be synonym to a `voter` or `nominator` in some context, and `VoteWeight` +//! signifies the chance of each id being included in the final [`VoteWeightProvider::iter`]. +//! +//! It implements [`sp_election_provider_support::SortedListProvider`] to provide a semi-sorted list +//! of accounts to another pallet. It needs some other pallet to give it some information about the +//! weights of accounts via [`sp_election_provider_support::VoteWeightProvider`]. +//! +//! This pallet is not configurable at genesis. Whoever uses it should call appropriate functions of +//! the `SortedListProvider` (e.g. `on_insert`, or `regenerate`) at their genesis. +//! +//! # Goals +//! +//! The data structure exposed by this pallet aims to be optimized for: +//! +//! - insertions and removals. +//! - iteration over the top* N items by weight, where the precise ordering of items doesn't +//! particularly matter. +//! +//! # Details +//! +//! - items are kept in bags, which are delineated by their range of weight (See [`BagThresholds`]). +//! - for iteration, bags are chained together from highest to lowest and elements within the bag +//! are iterated from head to tail. +//! - items within a bag are iterated in order of insertion. Thus removing an item and re-inserting +//! it will worsen its position in list iteration; this reduces incentives for some types of spam +//! that involve consistently removing and inserting for better position. Further, ordering +//! granularity is thus dictated by range between each bag threshold. +//! - if an item's weight changes to a value no longer within the range of its current bag the +//! item's position will need to be updated by an external actor with rebag (update), or removal +//! and insertion. + +#![cfg_attr(not(feature = "std"), no_std)] + +use frame_election_provider_support::{SortedListProvider, VoteWeight, VoteWeightProvider}; +use frame_system::ensure_signed; +use sp_std::prelude::*; + +#[cfg(any(feature = "runtime-benchmarks", test))] +mod benchmarks; + +mod list; +#[cfg(test)] +mod mock; +#[cfg(test)] +mod tests; +pub mod weights; + +pub use pallet::*; +pub use weights::WeightInfo; + +pub use list::Error; +use list::List; + +pub(crate) const LOG_TARGET: &'static str = "runtime::bags_list"; + +// syntactic sugar for logging. +#[macro_export] +macro_rules! log { + ($level:tt, $patter:expr $(, $values:expr)* $(,)?) => { + log::$level!( + target: crate::LOG_TARGET, + concat!("[{:?}] 👜", $patter), >::block_number() $(, $values)* + ) + }; +} + +#[frame_support::pallet] +pub mod pallet { + use super::*; + use frame_support::pallet_prelude::*; + use frame_system::pallet_prelude::*; + + #[pallet::pallet] + #[pallet::generate_store(pub(crate) trait Store)] + #[pallet::generate_storage_info] + pub struct Pallet(_); + + #[pallet::config] + pub trait Config: frame_system::Config { + /// The overarching event type. + type Event: From> + IsType<::Event>; + + /// Weight information for extrinsics in this pallet. + type WeightInfo: weights::WeightInfo; + + /// Something that provides the weights of ids. + type VoteWeightProvider: VoteWeightProvider; + + /// The list of thresholds separating the various bags. + /// + /// Ids are separated into unsorted bags according to their vote weight. This specifies the + /// thresholds separating the bags. An id's bag is the largest bag for which the id's weight + /// is less than or equal to its upper threshold. + /// + /// When ids are iterated, higher bags are iterated completely before lower bags. This means + /// that iteration is _semi-sorted_: ids of higher weight tend to come before ids of lower + /// weight, but peer ids within a particular bag are sorted in insertion order. + /// + /// # Expressing the constant + /// + /// This constant must be sorted in strictly increasing order. Duplicate items are not + /// permitted. + /// + /// There is an implied upper limit of `VoteWeight::MAX`; that value does not need to be + /// specified within the bag. For any two threshold lists, if one ends with + /// `VoteWeight::MAX`, the other one does not, and they are otherwise equal, the two lists + /// will behave identically. + /// + /// # Calculation + /// + /// It is recommended to generate the set of thresholds in a geometric series, such that + /// there exists some constant ratio such that `threshold[k + 1] == (threshold[k] * + /// constant_ratio).max(threshold[k] + 1)` for all `k`. + /// + /// The helpers in the `/utils/frame/generate-bags` module can simplify this calculation. + /// + /// # Examples + /// + /// - If `BagThresholds::get().is_empty()`, then all ids are put into the same bag, and + /// iteration is strictly in insertion order. + /// - If `BagThresholds::get().len() == 64`, and the thresholds are determined according to + /// the procedure given above, then the constant ratio is equal to 2. + /// - If `BagThresholds::get().len() == 200`, and the thresholds are determined according to + /// the procedure given above, then the constant ratio is approximately equal to 1.248. + /// - If the threshold list begins `[1, 2, 3, ...]`, then an id with weight 0 or 1 will fall + /// into bag 0, an id with weight 2 will fall into bag 1, etc. + /// + /// # Migration + /// + /// In the event that this list ever changes, a copy of the old bags list must be retained. + /// With that `List::migrate` can be called, which will perform the appropriate migration. + #[pallet::constant] + type BagThresholds: Get<&'static [VoteWeight]>; + } + + /// How many ids are registered. + // NOTE: This is merely a counter for `ListNodes`. It should someday be replaced by the + // `CountedMaop` storage. + #[pallet::storage] + pub(crate) type CounterForListNodes = StorageValue<_, u32, ValueQuery>; + + /// A single node, within some bag. + /// + /// Nodes store links forward and back within their respective bags. + #[pallet::storage] + pub(crate) type ListNodes = StorageMap<_, Twox64Concat, T::AccountId, list::Node>; + + /// A bag stored in storage. + /// + /// Stores a `Bag` struct, which stores head and tail pointers to itself. + #[pallet::storage] + pub(crate) type ListBags = StorageMap<_, Twox64Concat, VoteWeight, list::Bag>; + + #[pallet::event] + #[pallet::generate_deposit(pub(crate) fn deposit_event)] + pub enum Event { + /// Moved an account from one bag to another. \[who, from, to\]. + Rebagged(T::AccountId, VoteWeight, VoteWeight), + } + + #[pallet::call] + impl Pallet { + /// Declare that some `dislocated` account has, through rewards or penalties, sufficiently + /// changed its weight that it should properly fall into a different bag than its current + /// one. + /// + /// Anyone can call this function about any potentially dislocated account. + /// + /// Will never return an error; if `dislocated` does not exist or doesn't need a rebag, then + /// it is a noop and fees are still collected from `origin`. + #[pallet::weight(T::WeightInfo::rebag_non_terminal().max(T::WeightInfo::rebag_terminal()))] + pub fn rebag(origin: OriginFor, dislocated: T::AccountId) -> DispatchResult { + ensure_signed(origin)?; + let current_weight = T::VoteWeightProvider::vote_weight(&dislocated); + let _ = Pallet::::do_rebag(&dislocated, current_weight); + Ok(()) + } + } + + #[pallet::hooks] + impl Hooks> for Pallet { + fn integrity_test() { + // ensure they are strictly increasing, this also implies that duplicates are detected. + assert!( + T::BagThresholds::get().windows(2).all(|window| window[1] > window[0]), + "thresholds must strictly increase, and have no duplicates", + ); + } + } +} + +impl Pallet { + /// Move an account from one bag to another, depositing an event on success. + /// + /// If the account changed bags, returns `Some((from, to))`. + pub fn do_rebag( + account: &T::AccountId, + new_weight: VoteWeight, + ) -> Option<(VoteWeight, VoteWeight)> { + // if no voter at that node, don't do anything. + // the caller just wasted the fee to call this. + let maybe_movement = list::Node::::get(&account) + .and_then(|node| List::update_position_for(node, new_weight)); + if let Some((from, to)) = maybe_movement { + Self::deposit_event(Event::::Rebagged(account.clone(), from, to)); + }; + maybe_movement + } + + /// Equivalent to `ListBags::get`, but public. Useful for tests in outside of this crate. + #[cfg(feature = "std")] + pub fn list_bags_get(weight: VoteWeight) -> Option> { + ListBags::get(weight) + } +} + +impl SortedListProvider for Pallet { + type Error = Error; + + fn iter() -> Box> { + Box::new(List::::iter().map(|n| n.id().clone())) + } + + fn count() -> u32 { + CounterForListNodes::::get() + } + + fn contains(id: &T::AccountId) -> bool { + List::::contains(id) + } + + fn on_insert(id: T::AccountId, weight: VoteWeight) -> Result<(), Error> { + List::::insert(id, weight) + } + + fn on_update(id: &T::AccountId, new_weight: VoteWeight) { + Pallet::::do_rebag(id, new_weight); + } + + fn on_remove(id: &T::AccountId) { + List::::remove(id) + } + + fn regenerate( + all: impl IntoIterator, + weight_of: Box VoteWeight>, + ) -> u32 { + List::::regenerate(all, weight_of) + } + + #[cfg(feature = "std")] + fn sanity_check() -> Result<(), &'static str> { + List::::sanity_check() + } + + #[cfg(not(feature = "std"))] + fn sanity_check() -> Result<(), &'static str> { + Ok(()) + } + + fn clear(maybe_count: Option) -> u32 { + List::::clear(maybe_count) + } + + #[cfg(feature = "runtime-benchmarks")] + fn weight_update_worst_case(who: &T::AccountId, is_increase: bool) -> VoteWeight { + use frame_support::traits::Get as _; + let thresholds = T::BagThresholds::get(); + let node = list::Node::::get(who).unwrap(); + let current_bag_idx = thresholds + .iter() + .chain(sp_std::iter::once(&VoteWeight::MAX)) + .position(|w| w == &node.bag_upper()) + .unwrap(); + + if is_increase { + let next_threshold_idx = current_bag_idx + 1; + assert!(thresholds.len() > next_threshold_idx); + thresholds[next_threshold_idx] + } else { + assert!(current_bag_idx != 0); + let prev_threshold_idx = current_bag_idx - 1; + thresholds[prev_threshold_idx] + } + } +} diff --git a/frame/bags-list/src/list/mod.rs b/frame/bags-list/src/list/mod.rs new file mode 100644 index 0000000000000..3f55f22271910 --- /dev/null +++ b/frame/bags-list/src/list/mod.rs @@ -0,0 +1,786 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Implementation of a "bags list": a semi-sorted list where ordering granularity is dictated by +//! configurable thresholds that delineate the boundaries of bags. It uses a pattern of composite +//! data structures, where multiple storage items are masked by one outer API. See [`ListNodes`], +//! [`CounterForListNodes`] and [`ListBags`] for more information. +//! +//! The outer API of this module is the [`List`] struct. It wraps all acceptable operations on top +//! of the aggregate linked list. All operations with the bags list should happen through this +//! interface. + +use crate::Config; +use codec::{Decode, Encode, MaxEncodedLen}; +use frame_election_provider_support::{VoteWeight, VoteWeightProvider}; +use frame_support::{traits::Get, DefaultNoBound}; +use scale_info::TypeInfo; +use sp_std::{ + boxed::Box, + collections::{btree_map::BTreeMap, btree_set::BTreeSet}, + iter, + marker::PhantomData, + vec::Vec, +}; + +#[derive(Debug, PartialEq, Eq)] +pub enum Error { + /// A duplicate id has been detected. + Duplicate, +} + +#[cfg(test)] +mod tests; + +/// Given a certain vote weight, to which bag does it belong to? +/// +/// Bags are identified by their upper threshold; the value returned by this function is guaranteed +/// to be a member of `T::BagThresholds`. +/// +/// Note that even if the thresholds list does not have `VoteWeight::MAX` as its final member, this +/// function behaves as if it does. +pub(crate) fn notional_bag_for(weight: VoteWeight) -> VoteWeight { + let thresholds = T::BagThresholds::get(); + let idx = thresholds.partition_point(|&threshold| weight > threshold); + thresholds.get(idx).copied().unwrap_or(VoteWeight::MAX) +} + +/// The **ONLY** entry point of this module. All operations to the bags-list should happen through +/// this interface. It is forbidden to access other module members directly. +// +// Data structure providing efficient mostly-accurate selection of the top N id by `VoteWeight`. +// +// It's implemented as a set of linked lists. Each linked list comprises a bag of ids of +// arbitrary and unbounded length, all having a vote weight within a particular constant range. +// This structure means that ids can be added and removed in `O(1)` time. +// +// Iteration is accomplished by chaining the iteration of each bag, from greatest to least. While +// the users within any particular bag are sorted in an entirely arbitrary order, the overall vote +// weight decreases as successive bags are reached. This means that it is valid to truncate +// iteration at any desired point; only those ids in the lowest bag can be excluded. This +// satisfies both the desire for fairness and the requirement for efficiency. +pub struct List(PhantomData); + +impl List { + /// Remove all data associated with the list from storage. Parameter `items` is the number of + /// items to clear from the list. WARNING: `None` will clear all items and should generally not + /// be used in production as it could lead to an infinite number of storage accesses. + pub(crate) fn clear(maybe_count: Option) -> u32 { + crate::ListBags::::remove_all(maybe_count); + crate::ListNodes::::remove_all(maybe_count); + if let Some(count) = maybe_count { + crate::CounterForListNodes::::mutate(|items| *items - count); + count + } else { + crate::CounterForListNodes::::take() + } + } + + /// Regenerate all of the data from the given ids. + /// + /// WARNING: this is expensive and should only ever be performed when the list needs to be + /// generated from scratch. Care needs to be taken to ensure + /// + /// This may or may not need to be called at genesis as well, based on the configuration of the + /// pallet using this `List`. + /// + /// Returns the number of ids migrated. + pub fn regenerate( + all: impl IntoIterator, + weight_of: Box VoteWeight>, + ) -> u32 { + Self::clear(None); + Self::insert_many(all, weight_of) + } + + /// Migrate the list from one set of thresholds to another. + /// + /// This should only be called as part of an intentional migration; it's fairly expensive. + /// + /// Returns the number of accounts affected. + /// + /// Preconditions: + /// + /// - `old_thresholds` is the previous list of thresholds. + /// - All `bag_upper` currently in storage are members of `old_thresholds`. + /// - `T::BagThresholds` has already been updated and is the new set of thresholds. + /// + /// Postconditions: + /// + /// - All `bag_upper` currently in storage are members of `T::BagThresholds`. + /// - No id is changed unless required to by the difference between the old threshold list and + /// the new. + /// - ids whose bags change at all are implicitly rebagged into the appropriate bag in the new + /// threshold set. + #[allow(dead_code)] + pub fn migrate(old_thresholds: &[VoteWeight]) -> u32 { + let new_thresholds = T::BagThresholds::get(); + if new_thresholds == old_thresholds { + return 0 + } + + // we can't check all preconditions, but we can check one + debug_assert!( + crate::ListBags::::iter().all(|(threshold, _)| old_thresholds.contains(&threshold)), + "not all `bag_upper` currently in storage are members of `old_thresholds`", + ); + debug_assert!( + crate::ListNodes::::iter().all(|(_, node)| old_thresholds.contains(&node.bag_upper)), + "not all `node.bag_upper` currently in storage are members of `old_thresholds`", + ); + + let old_set: BTreeSet<_> = old_thresholds.iter().copied().collect(); + let new_set: BTreeSet<_> = new_thresholds.iter().copied().collect(); + + // accounts that need to be rebagged + let mut affected_accounts = BTreeSet::new(); + // track affected old bags to make sure we only iterate them once + let mut affected_old_bags = BTreeSet::new(); + + let new_bags = new_set.difference(&old_set).copied(); + // a new bag means that all accounts previously using the old bag's threshold must now + // be rebagged + for inserted_bag in new_bags { + let affected_bag = { + // this recreates `notional_bag_for` logic, but with the old thresholds. + let idx = old_thresholds.partition_point(|&threshold| inserted_bag > threshold); + old_thresholds.get(idx).copied().unwrap_or(VoteWeight::MAX) + }; + if !affected_old_bags.insert(affected_bag) { + // If the previous threshold list was [10, 20], and we insert [3, 5], then there's + // no point iterating through bag 10 twice. + continue + } + + if let Some(bag) = Bag::::get(affected_bag) { + affected_accounts.extend(bag.iter().map(|node| node.id)); + } + } + + let removed_bags = old_set.difference(&new_set).copied(); + // a removed bag means that all members of that bag must be rebagged + for removed_bag in removed_bags.clone() { + if !affected_old_bags.insert(removed_bag) { + continue + } + + if let Some(bag) = Bag::::get(removed_bag) { + affected_accounts.extend(bag.iter().map(|node| node.id)); + } + } + + // migrate the voters whose bag has changed + let num_affected = affected_accounts.len() as u32; + let weight_of = T::VoteWeightProvider::vote_weight; + let _removed = Self::remove_many(&affected_accounts); + debug_assert_eq!(_removed, num_affected); + let _inserted = Self::insert_many(affected_accounts.into_iter(), weight_of); + debug_assert_eq!(_inserted, num_affected); + + // we couldn't previously remove the old bags because both insertion and removal assume that + // it's always safe to add a bag if it's not present. Now that that's sorted, we can get rid + // of them. + // + // it's pretty cheap to iterate this again, because both sets are in-memory and require no + // lookups. + for removed_bag in removed_bags { + debug_assert!( + !crate::ListNodes::::iter().any(|(_, node)| node.bag_upper == removed_bag), + "no id should be present in a removed bag", + ); + crate::ListBags::::remove(removed_bag); + } + + debug_assert_eq!(Self::sanity_check(), Ok(())); + + num_affected + } + + /// Returns `true` if the list contains `id`, otherwise returns `false`. + pub(crate) fn contains(id: &T::AccountId) -> bool { + crate::ListNodes::::contains_key(id) + } + + /// Iterate over all nodes in all bags in the list. + /// + /// Full iteration can be expensive; it's recommended to limit the number of items with + /// `.take(n)`. + pub(crate) fn iter() -> impl Iterator> { + // We need a touch of special handling here: because we permit `T::BagThresholds` to + // omit the final bound, we need to ensure that we explicitly include that threshold in the + // list. + // + // It's important to retain the ability to omit the final bound because it makes tests much + // easier; they can just configure `type BagThresholds = ()`. + let thresholds = T::BagThresholds::get(); + let iter = thresholds.iter().copied(); + let iter: Box> = if thresholds.last() == Some(&VoteWeight::MAX) { + // in the event that they included it, we can just pass the iterator through unchanged. + Box::new(iter.rev()) + } else { + // otherwise, insert it here. + Box::new(iter.chain(iter::once(VoteWeight::MAX)).rev()) + }; + + iter.filter_map(Bag::get).flat_map(|bag| bag.iter()) + } + + /// Insert several ids into the appropriate bags in the list. Continues with insertions + /// if duplicates are detected. + /// + /// Returns the final count of number of ids inserted. + fn insert_many( + ids: impl IntoIterator, + weight_of: impl Fn(&T::AccountId) -> VoteWeight, + ) -> u32 { + let mut count = 0; + ids.into_iter().for_each(|v| { + let weight = weight_of(&v); + if Self::insert(v, weight).is_ok() { + count += 1; + } + }); + + count + } + + /// Insert a new id into the appropriate bag in the list. + /// + /// Returns an error if the list already contains `id`. + pub(crate) fn insert(id: T::AccountId, weight: VoteWeight) -> Result<(), Error> { + if Self::contains(&id) { + return Err(Error::Duplicate) + } + + let bag_weight = notional_bag_for::(weight); + let mut bag = Bag::::get_or_make(bag_weight); + // unchecked insertion is okay; we just got the correct `notional_bag_for`. + bag.insert_unchecked(id.clone()); + + // new inserts are always the tail, so we must write the bag. + bag.put(); + + crate::CounterForListNodes::::mutate(|prev_count| { + *prev_count = prev_count.saturating_add(1) + }); + + crate::log!( + debug, + "inserted {:?} with weight {} into bag {:?}, new count is {}", + id, + weight, + bag_weight, + crate::CounterForListNodes::::get(), + ); + + Ok(()) + } + + /// Remove an id from the list. + pub(crate) fn remove(id: &T::AccountId) { + Self::remove_many(sp_std::iter::once(id)); + } + + /// Remove many ids from the list. + /// + /// This is more efficient than repeated calls to `Self::remove`. + /// + /// Returns the final count of number of ids removed. + fn remove_many<'a>(ids: impl IntoIterator) -> u32 { + let mut bags = BTreeMap::new(); + let mut count = 0; + + for id in ids.into_iter() { + let node = match Node::::get(id) { + Some(node) => node, + None => continue, + }; + count += 1; + + if !node.is_terminal() { + // this node is not a head or a tail and thus the bag does not need to be updated + node.excise() + } else { + // this node is a head or tail, so the bag needs to be updated + let bag = bags + .entry(node.bag_upper) + .or_insert_with(|| Bag::::get_or_make(node.bag_upper)); + // node.bag_upper must be correct, therefore this bag will contain this node. + bag.remove_node_unchecked(&node); + } + + // now get rid of the node itself + node.remove_from_storage_unchecked() + } + + for (_, bag) in bags { + bag.put(); + } + + crate::CounterForListNodes::::mutate(|prev_count| { + *prev_count = prev_count.saturating_sub(count) + }); + + count + } + + /// Update a node's position in the list. + /// + /// If the node was in the correct bag, no effect. If the node was in the incorrect bag, they + /// are moved into the correct bag. + /// + /// Returns `Some((old_idx, new_idx))` if the node moved, otherwise `None`. + /// + /// This operation is somewhat more efficient than simply calling [`self.remove`] followed by + /// [`self.insert`]. However, given large quantities of nodes to move, it may be more efficient + /// to call [`self.remove_many`] followed by [`self.insert_many`]. + pub(crate) fn update_position_for( + node: Node, + new_weight: VoteWeight, + ) -> Option<(VoteWeight, VoteWeight)> { + node.is_misplaced(new_weight).then(move || { + let old_bag_upper = node.bag_upper; + + if !node.is_terminal() { + // this node is not a head or a tail, so we can just cut it out of the list. update + // and put the prev and next of this node, we do `node.put` inside `insert_note`. + node.excise(); + } else if let Some(mut bag) = Bag::::get(node.bag_upper) { + // this is a head or tail, so the bag must be updated. + bag.remove_node_unchecked(&node); + bag.put(); + } else { + crate::log!( + error, + "Node {:?} did not have a bag; ListBags is in an inconsistent state", + node.id, + ); + debug_assert!(false, "every node must have an extant bag associated with it"); + } + + // put the node into the appropriate new bag. + let new_bag_upper = notional_bag_for::(new_weight); + let mut bag = Bag::::get_or_make(new_bag_upper); + // prev, next, and bag_upper of the node are updated inside `insert_node`, also + // `node.put` is in there. + bag.insert_node_unchecked(node); + bag.put(); + + (old_bag_upper, new_bag_upper) + }) + } + + /// Sanity check the list. + /// + /// This should be called from the call-site, whenever one of the mutating apis (e.g. `insert`) + /// is being used, after all other staking data (such as counter) has been updated. It checks: + /// + /// * there are no duplicate ids, + /// * length of this list is in sync with `CounterForListNodes`, + /// * and sanity-checks all bags. This will cascade down all the checks and makes sure all bags + /// are checked per *any* update to `List`. + #[cfg(feature = "std")] + pub(crate) fn sanity_check() -> Result<(), &'static str> { + use frame_support::ensure; + let mut seen_in_list = BTreeSet::new(); + ensure!( + Self::iter().map(|node| node.id).all(|id| seen_in_list.insert(id)), + "duplicate identified", + ); + + let iter_count = Self::iter().count() as u32; + let stored_count = crate::CounterForListNodes::::get(); + let nodes_count = crate::ListNodes::::iter().count() as u32; + ensure!(iter_count == stored_count, "iter_count != stored_count"); + ensure!(stored_count == nodes_count, "stored_count != nodes_count"); + + crate::log!(debug, "count of nodes: {}", stored_count); + + let active_bags = { + let thresholds = T::BagThresholds::get().iter().copied(); + let thresholds: Vec = if thresholds.clone().last() == Some(VoteWeight::MAX) { + // in the event that they included it, we don't need to make any changes + // Box::new(thresholds.collect() + thresholds.collect() + } else { + // otherwise, insert it here. + thresholds.chain(iter::once(VoteWeight::MAX)).collect() + }; + thresholds.into_iter().filter_map(|t| Bag::::get(t)) + }; + + let _ = active_bags.clone().map(|b| b.sanity_check()).collect::>()?; + + let nodes_in_bags_count = + active_bags.clone().fold(0u32, |acc, cur| acc + cur.iter().count() as u32); + ensure!(nodes_count == nodes_in_bags_count, "stored_count != nodes_in_bags_count"); + + crate::log!(debug, "count of active bags {}", active_bags.count()); + + // check that all nodes are sane. We check the `ListNodes` storage item directly in case we + // have some "stale" nodes that are not in a bag. + for (_id, node) in crate::ListNodes::::iter() { + node.sanity_check()? + } + + Ok(()) + } + + #[cfg(not(feature = "std"))] + pub(crate) fn sanity_check() -> Result<(), &'static str> { + Ok(()) + } + + /// Returns the nodes of all non-empty bags. For testing and benchmarks. + #[cfg(any(feature = "std", feature = "runtime-benchmarks"))] + #[allow(dead_code)] + pub(crate) fn get_bags() -> Vec<(VoteWeight, Vec)> { + use frame_support::traits::Get as _; + + let thresholds = T::BagThresholds::get(); + let iter = thresholds.iter().copied(); + let iter: Box> = if thresholds.last() == Some(&VoteWeight::MAX) { + // in the event that they included it, we can just pass the iterator through unchanged. + Box::new(iter) + } else { + // otherwise, insert it here. + Box::new(iter.chain(sp_std::iter::once(VoteWeight::MAX))) + }; + + iter.filter_map(|t| { + Bag::::get(t).map(|bag| (t, bag.iter().map(|n| n.id().clone()).collect::>())) + }) + .collect::>() + } +} + +/// A Bag is a doubly-linked list of ids, where each id is mapped to a [`ListNode`]. +/// +/// Note that we maintain both head and tail pointers. While it would be possible to get away with +/// maintaining only a head pointer and cons-ing elements onto the front of the list, it's more +/// desirable to ensure that there is some element of first-come, first-serve to the list's +/// iteration so that there's no incentive to churn ids positioning to improve the chances of +/// appearing within the ids set. +#[derive(DefaultNoBound, Encode, Decode, MaxEncodedLen, TypeInfo)] +#[codec(mel_bound(T: Config))] +#[scale_info(skip_type_params(T))] +#[cfg_attr(feature = "std", derive(frame_support::DebugNoBound, Clone, PartialEq))] +pub struct Bag { + head: Option, + tail: Option, + + #[codec(skip)] + bag_upper: VoteWeight, +} + +impl Bag { + #[cfg(test)] + pub(crate) fn new( + head: Option, + tail: Option, + bag_upper: VoteWeight, + ) -> Self { + Self { head, tail, bag_upper } + } + + /// Get a bag by its upper vote weight. + pub(crate) fn get(bag_upper: VoteWeight) -> Option> { + crate::ListBags::::try_get(bag_upper).ok().map(|mut bag| { + bag.bag_upper = bag_upper; + bag + }) + } + + /// Get a bag by its upper vote weight or make it, appropriately initialized. Does not check if + /// if `bag_upper` is a valid threshold. + fn get_or_make(bag_upper: VoteWeight) -> Bag { + Self::get(bag_upper).unwrap_or(Bag { bag_upper, ..Default::default() }) + } + + /// `True` if self is empty. + fn is_empty(&self) -> bool { + self.head.is_none() && self.tail.is_none() + } + + /// Put the bag back into storage. + fn put(self) { + if self.is_empty() { + crate::ListBags::::remove(self.bag_upper); + } else { + crate::ListBags::::insert(self.bag_upper, self); + } + } + + /// Get the head node in this bag. + fn head(&self) -> Option> { + self.head.as_ref().and_then(|id| Node::get(id)) + } + + /// Get the tail node in this bag. + fn tail(&self) -> Option> { + self.tail.as_ref().and_then(|id| Node::get(id)) + } + + /// Iterate over the nodes in this bag. + pub(crate) fn iter(&self) -> impl Iterator> { + sp_std::iter::successors(self.head(), |prev| prev.next()) + } + + /// Insert a new id into this bag. + /// + /// This is private on purpose because it's naive: it doesn't check whether this is the + /// appropriate bag for this id at all. Generally, use [`List::insert`] instead. + /// + /// Storage note: this modifies storage, but only for the nodes. You still need to call + /// `self.put()` after use. + fn insert_unchecked(&mut self, id: T::AccountId) { + // insert_node will overwrite `prev`, `next` and `bag_upper` to the proper values. As long + // as this bag is the correct one, we're good. All calls to this must come after getting the + // correct [`notional_bag_for`]. + self.insert_node_unchecked(Node:: { id, prev: None, next: None, bag_upper: 0 }); + } + + /// Insert a node into this bag. + /// + /// This is private on purpose because it's naive; it doesn't check whether this is the + /// appropriate bag for this node at all. Generally, use [`List::insert`] instead. + /// + /// Storage note: this modifies storage, but only for the node. You still need to call + /// `self.put()` after use. + fn insert_node_unchecked(&mut self, mut node: Node) { + if let Some(tail) = &self.tail { + if *tail == node.id { + // this should never happen, but this check prevents one path to a worst case + // infinite loop. + debug_assert!(false, "system logic error: inserting a node who has the id of tail"); + crate::log!(warn, "system logic error: inserting a node who has the id of tail"); + return + }; + } + + // re-set the `bag_upper`. Regardless of whatever the node had previously, now it is going + // to be `self.bag_upper`. + node.bag_upper = self.bag_upper; + + let id = node.id.clone(); + // update this node now, treating it as the new tail. + node.prev = self.tail.clone(); + node.next = None; + node.put(); + + // update the previous tail. + if let Some(mut old_tail) = self.tail() { + old_tail.next = Some(id.clone()); + old_tail.put(); + } + self.tail = Some(id.clone()); + + // ensure head exist. This is only set when the length of the bag is just 1, i.e. if this is + // the first insertion into the bag. In this case, both head and tail should point to the + // same node. + if self.head.is_none() { + self.head = Some(id.clone()); + debug_assert!(self.iter().count() == 1); + } + } + + /// Remove a node from this bag. + /// + /// This is private on purpose because it doesn't check whether this bag contains the node in + /// the first place. Generally, use [`List::remove`] instead, similar to `insert_unchecked`. + /// + /// Storage note: this modifies storage, but only for adjacent nodes. You still need to call + /// `self.put()` and `ListNodes::remove(id)` to update storage for the bag and `node`. + fn remove_node_unchecked(&mut self, node: &Node) { + // reassign neighboring nodes. + node.excise(); + + // clear the bag head/tail pointers as necessary. + if self.tail.as_ref() == Some(&node.id) { + self.tail = node.prev.clone(); + } + if self.head.as_ref() == Some(&node.id) { + self.head = node.next.clone(); + } + } + + /// Sanity check this bag. + /// + /// Should be called by the call-site, after any mutating operation on a bag. The call site of + /// this struct is always `List`. + /// + /// * Ensures head has no prev. + /// * Ensures tail has no next. + /// * Ensures there are no loops, traversal from head to tail is correct. + #[cfg(feature = "std")] + fn sanity_check(&self) -> Result<(), &'static str> { + frame_support::ensure!( + self.head() + .map(|head| head.prev().is_none()) + // if there is no head, then there must not be a tail, meaning that the bag is + // empty. + .unwrap_or_else(|| self.tail.is_none()), + "head has a prev" + ); + + frame_support::ensure!( + self.tail() + .map(|tail| tail.next().is_none()) + // if there is no tail, then there must not be a head, meaning that the bag is + // empty. + .unwrap_or_else(|| self.head.is_none()), + "tail has a next" + ); + + let mut seen_in_bag = BTreeSet::new(); + frame_support::ensure!( + self.iter() + .map(|node| node.id) + // each voter is only seen once, thus there is no cycle within a bag + .all(|voter| seen_in_bag.insert(voter)), + "duplicate found in bag" + ); + + Ok(()) + } + + #[cfg(not(feature = "std"))] + fn sanity_check(&self) -> Result<(), &'static str> { + Ok(()) + } + + /// Iterate over the nodes in this bag (public for tests). + #[cfg(feature = "std")] + #[allow(dead_code)] + pub fn std_iter(&self) -> impl Iterator> { + sp_std::iter::successors(self.head(), |prev| prev.next()) + } + + /// Check if the bag contains a node with `id`. + #[cfg(feature = "std")] + fn contains(&self, id: &T::AccountId) -> bool { + self.iter().find(|n| n.id() == id).is_some() + } +} + +/// A Node is the fundamental element comprising the doubly-linked list described by `Bag`. +#[derive(Encode, Decode, MaxEncodedLen, TypeInfo)] +#[codec(mel_bound(T: Config))] +#[scale_info(skip_type_params(T))] +#[cfg_attr(feature = "std", derive(frame_support::DebugNoBound, Clone, PartialEq))] +pub struct Node { + id: T::AccountId, + prev: Option, + next: Option, + bag_upper: VoteWeight, +} + +impl Node { + /// Get a node by id. + pub(crate) fn get(id: &T::AccountId) -> Option> { + crate::ListNodes::::try_get(id).ok() + } + + /// Put the node back into storage. + fn put(self) { + crate::ListNodes::::insert(self.id.clone(), self); + } + + /// Update neighboring nodes to point to reach other. + /// + /// Only updates storage for adjacent nodes, but not `self`; so the user may need to call + /// `self.put`. + fn excise(&self) { + // Update previous node. + if let Some(mut prev) = self.prev() { + prev.next = self.next.clone(); + prev.put(); + } + // Update next self. + if let Some(mut next) = self.next() { + next.prev = self.prev.clone(); + next.put(); + } + } + + /// This is a naive function that removes a node from the `ListNodes` storage item. + /// + /// It is naive because it does not check if the node has first been removed from its bag. + fn remove_from_storage_unchecked(&self) { + crate::ListNodes::::remove(&self.id) + } + + /// Get the previous node in the bag. + fn prev(&self) -> Option> { + self.prev.as_ref().and_then(|id| Node::get(id)) + } + + /// Get the next node in the bag. + fn next(&self) -> Option> { + self.next.as_ref().and_then(|id| Node::get(id)) + } + + /// `true` when this voter is in the wrong bag. + pub(crate) fn is_misplaced(&self, current_weight: VoteWeight) -> bool { + notional_bag_for::(current_weight) != self.bag_upper + } + + /// `true` when this voter is a bag head or tail. + fn is_terminal(&self) -> bool { + self.prev.is_none() || self.next.is_none() + } + + /// Get the underlying voter. + pub(crate) fn id(&self) -> &T::AccountId { + &self.id + } + + /// Get the underlying voter (public fo tests). + #[cfg(feature = "std")] + #[allow(dead_code)] + pub fn std_id(&self) -> &T::AccountId { + &self.id + } + + /// The bag this nodes belongs to (public for benchmarks). + #[cfg(feature = "runtime-benchmarks")] + #[allow(dead_code)] + pub fn bag_upper(&self) -> VoteWeight { + self.bag_upper + } + + #[cfg(feature = "std")] + fn sanity_check(&self) -> Result<(), &'static str> { + let expected_bag = Bag::::get(self.bag_upper).ok_or("bag not found for node")?; + + let id = self.id(); + + frame_support::ensure!( + expected_bag.contains(id), + "node does not exist in the expected bag" + ); + + frame_support::ensure!( + !self.is_terminal() || + expected_bag.head.as_ref() == Some(id) || + expected_bag.tail.as_ref() == Some(id), + "a terminal node is neither its bag head or tail" + ); + + Ok(()) + } +} diff --git a/frame/bags-list/src/list/tests.rs b/frame/bags-list/src/list/tests.rs new file mode 100644 index 0000000000000..e2730cbf4e33d --- /dev/null +++ b/frame/bags-list/src/list/tests.rs @@ -0,0 +1,735 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use super::*; +use crate::{ + mock::{test_utils::*, *}, + CounterForListNodes, ListBags, ListNodes, +}; +use frame_election_provider_support::SortedListProvider; +use frame_support::{assert_ok, assert_storage_noop}; + +#[test] +fn basic_setup_works() { + ExtBuilder::default().build_and_execute(|| { + // syntactic sugar to create a raw node + let node = |id, prev, next, bag_upper| Node:: { id, prev, next, bag_upper }; + + assert_eq!(CounterForListNodes::::get(), 4); + assert_eq!(ListNodes::::iter().count(), 4); + assert_eq!(ListBags::::iter().count(), 2); + + assert_eq!(List::::get_bags(), vec![(10, vec![1]), (1_000, vec![2, 3, 4])]); + + // the state of the bags is as expected + assert_eq!( + ListBags::::get(10).unwrap(), + Bag:: { head: Some(1), tail: Some(1), bag_upper: 0 } + ); + assert_eq!( + ListBags::::get(1_000).unwrap(), + Bag:: { head: Some(2), tail: Some(4), bag_upper: 0 } + ); + + assert_eq!(ListNodes::::get(2).unwrap(), node(2, None, Some(3), 1_000)); + assert_eq!(ListNodes::::get(3).unwrap(), node(3, Some(2), Some(4), 1_000)); + assert_eq!(ListNodes::::get(4).unwrap(), node(4, Some(3), None, 1_000)); + assert_eq!(ListNodes::::get(1).unwrap(), node(1, None, None, 10)); + + // non-existent id does not have a storage footprint + assert_eq!(ListNodes::::get(42), None); + + // iteration of the bags would yield: + assert_eq!( + List::::iter().map(|n| *n.id()).collect::>(), + vec![2, 3, 4, 1], + // ^^ note the order of insertion in genesis! + ); + }); +} + +#[test] +fn notional_bag_for_works() { + // under a threshold gives the next threshold. + assert_eq!(notional_bag_for::(0), 10); + assert_eq!(notional_bag_for::(9), 10); + + // at a threshold gives that threshold. + assert_eq!(notional_bag_for::(10), 10); + + // above the threshold, gives the next threshold. + assert_eq!(notional_bag_for::(11), 20); + + let max_explicit_threshold = *::BagThresholds::get().last().unwrap(); + assert_eq!(max_explicit_threshold, 10_000); + + // if the max explicit threshold is less than VoteWeight::MAX, + assert!(VoteWeight::MAX > max_explicit_threshold); + + // then anything above it will belong to the VoteWeight::MAX bag. + assert_eq!(notional_bag_for::(max_explicit_threshold), max_explicit_threshold); + assert_eq!(notional_bag_for::(max_explicit_threshold + 1), VoteWeight::MAX); +} + +#[test] +fn remove_last_node_in_bags_cleans_bag() { + ExtBuilder::default().build_and_execute(|| { + // given + assert_eq!(List::::get_bags(), vec![(10, vec![1]), (1_000, vec![2, 3, 4])]); + + // bump 1 to a bigger bag + List::::remove(&1); + assert_ok!(List::::insert(1, 10_000)); + + // then the bag with bound 10 is wiped from storage. + assert_eq!(List::::get_bags(), vec![(1_000, vec![2, 3, 4]), (10_000, vec![1])]); + + // and can be recreated again as needed. + assert_ok!(List::::insert(77, 10)); + assert_eq!( + List::::get_bags(), + vec![(10, vec![77]), (1_000, vec![2, 3, 4]), (10_000, vec![1])] + ); + }); +} + +#[test] +fn migrate_works() { + ExtBuilder::default() + .add_ids(vec![(710, 15), (711, 16), (712, 2_000)]) + .build_and_execute(|| { + // given + assert_eq!( + List::::get_bags(), + vec![ + (10, vec![1]), + (20, vec![710, 711]), + (1_000, vec![2, 3, 4]), + (2_000, vec![712]) + ] + ); + let old_thresholds = ::BagThresholds::get(); + assert_eq!(old_thresholds, vec![10, 20, 30, 40, 50, 60, 1_000, 2_000, 10_000]); + + // when the new thresholds adds `15` and removes `2_000` + const NEW_THRESHOLDS: &'static [VoteWeight] = + &[10, 15, 20, 30, 40, 50, 60, 1_000, 10_000]; + BagThresholds::set(NEW_THRESHOLDS); + // and we call + List::::migrate(old_thresholds); + + // then + assert_eq!( + List::::get_bags(), + vec![ + (10, vec![1]), + (15, vec![710]), // nodes in range 11 ..= 15 move from bag 20 to bag 15 + (20, vec![711]), + (1_000, vec![2, 3, 4]), + // nodes in range 1_001 ..= 2_000 move from bag 2_000 to bag 10_000 + (10_000, vec![712]), + ] + ); + }); +} + +mod list { + use super::*; + + #[test] + fn iteration_is_semi_sorted() { + ExtBuilder::default() + .add_ids(vec![(5, 2_000), (6, 2_000)]) + .build_and_execute(|| { + // given + assert_eq!( + List::::get_bags(), + vec![(10, vec![1]), (1_000, vec![2, 3, 4]), (2_000, vec![5, 6])] + ); + assert_eq!( + get_list_as_ids(), + vec![ + 5, 6, // best bag + 2, 3, 4, // middle bag + 1, // last bag. + ] + ); + + // when adding an id that has a higher weight than pre-existing ids in the bag + assert_ok!(List::::insert(7, 10)); + + // then + assert_eq!( + get_list_as_ids(), + vec![ + 5, 6, // best bag + 2, 3, 4, // middle bag + 1, 7, // last bag; new id is last. + ] + ); + }) + } + + /// we can `take` x ids, even if that quantity ends midway through a list. + #[test] + fn take_works() { + ExtBuilder::default() + .add_ids(vec![(5, 2_000), (6, 2_000)]) + .build_and_execute(|| { + // given + assert_eq!( + List::::get_bags(), + vec![(10, vec![1]), (1_000, vec![2, 3, 4]), (2_000, vec![5, 6])] + ); + + // when + let iteration = + List::::iter().map(|node| *node.id()).take(4).collect::>(); + + // then + assert_eq!( + iteration, + vec![ + 5, 6, // best bag, fully iterated + 2, 3, // middle bag, partially iterated + ] + ); + }) + } + + #[test] + fn insert_works() { + ExtBuilder::default().build_and_execute(|| { + // when inserting into an existing bag + assert_ok!(List::::insert(5, 1_000)); + + // then + assert_eq!(List::::get_bags(), vec![(10, vec![1]), (1_000, vec![2, 3, 4, 5])]); + assert_eq!(get_list_as_ids(), vec![2, 3, 4, 5, 1]); + + // when inserting into a non-existent bag + assert_ok!(List::::insert(6, 1_001)); + + // then + assert_eq!( + List::::get_bags(), + vec![(10, vec![1]), (1_000, vec![2, 3, 4, 5]), (2_000, vec![6])] + ); + assert_eq!(get_list_as_ids(), vec![6, 2, 3, 4, 5, 1]); + }); + } + + #[test] + fn insert_errors_with_duplicate_id() { + ExtBuilder::default().build_and_execute(|| { + // given + assert!(get_list_as_ids().contains(&3)); + + // then + assert_storage_noop!(assert_eq!( + List::::insert(3, 20).unwrap_err(), + Error::Duplicate + )); + }); + } + + #[test] + fn remove_works() { + use crate::{CounterForListNodes, ListBags, ListNodes}; + let ensure_left = |id, counter| { + assert!(!ListNodes::::contains_key(id)); + assert_eq!(CounterForListNodes::::get(), counter); + assert_eq!(ListNodes::::iter().count() as u32, counter); + }; + + ExtBuilder::default().build_and_execute(|| { + // removing a non-existent id is a noop + assert!(!ListNodes::::contains_key(42)); + assert_storage_noop!(List::::remove(&42)); + + // when removing a node from a bag with multiple nodes: + List::::remove(&2); + + // then + assert_eq!(get_list_as_ids(), vec![3, 4, 1]); + assert_eq!(List::::get_bags(), vec![(10, vec![1]), (1_000, vec![3, 4])]); + ensure_left(2, 3); + + // when removing a node from a bag with only one node: + List::::remove(&1); + + // then + assert_eq!(get_list_as_ids(), vec![3, 4]); + assert_eq!(List::::get_bags(), vec![(1_000, vec![3, 4])]); + ensure_left(1, 2); + // bag 10 is removed + assert!(!ListBags::::contains_key(10)); + + // remove remaining ids to make sure storage cleans up as expected + List::::remove(&3); + ensure_left(3, 1); + assert_eq!(get_list_as_ids(), vec![4]); + + List::::remove(&4); + ensure_left(4, 0); + assert_eq!(get_list_as_ids(), Vec::::new()); + + // bags are deleted via removals + assert_eq!(ListBags::::iter().count(), 0); + }); + } + + #[test] + fn remove_many_is_noop_with_non_existent_ids() { + ExtBuilder::default().build_and_execute(|| { + let non_existent_ids = vec![&42, &666, &13]; + + // when account ids don' exist in the list + assert!(non_existent_ids.iter().all(|id| !BagsList::contains(id))); + + // then removing them is a noop + assert_storage_noop!(List::::remove_many(non_existent_ids)); + }); + } + + #[test] + fn update_position_for_works() { + ExtBuilder::default().build_and_execute(|| { + // given a correctly placed account 1 at bag 10. + let node = Node::::get(&1).unwrap(); + assert!(!node.is_misplaced(10)); + + // .. it is invalid with weight 20 + assert!(node.is_misplaced(20)); + + // move it to bag 20. + assert_eq!(List::::update_position_for(node, 20), Some((10, 20))); + + assert_eq!(List::::get_bags(), vec![(20, vec![1]), (1_000, vec![2, 3, 4])]); + + // get the new updated node; try and update the position with no change in weight. + let node = Node::::get(&1).unwrap(); + assert_storage_noop!(assert_eq!( + List::::update_position_for(node.clone(), 20), + None + )); + + // then move it to bag 1_000 by giving it weight 500. + assert_eq!(List::::update_position_for(node.clone(), 500), Some((20, 1_000))); + assert_eq!(List::::get_bags(), vec![(1_000, vec![2, 3, 4, 1])]); + + // moving within that bag again is a noop + let node = Node::::get(&1).unwrap(); + assert_storage_noop!(assert_eq!( + List::::update_position_for(node.clone(), 750), + None, + )); + assert_storage_noop!(assert_eq!( + List::::update_position_for(node, 1_000), + None, + )); + }); + } + + #[test] + fn sanity_check_works() { + ExtBuilder::default().build_and_execute_no_post_check(|| { + assert_ok!(List::::sanity_check()); + }); + + // make sure there are no duplicates. + ExtBuilder::default().build_and_execute_no_post_check(|| { + Bag::::get(10).unwrap().insert_unchecked(2); + assert_eq!(List::::sanity_check(), Err("duplicate identified")); + }); + + // ensure count is in sync with `CounterForListNodes`. + ExtBuilder::default().build_and_execute_no_post_check(|| { + crate::CounterForListNodes::::mutate(|counter| *counter += 1); + assert_eq!(crate::CounterForListNodes::::get(), 5); + assert_eq!(List::::sanity_check(), Err("iter_count != stored_count")); + }); + } + + #[test] + fn contains_works() { + ExtBuilder::default().build_and_execute(|| { + assert!(GENESIS_IDS.iter().all(|(id, _)| List::::contains(id))); + + let non_existent_ids = vec![&42, &666, &13]; + assert!(non_existent_ids.iter().all(|id| !List::::contains(id))); + }) + } +} + +mod bags { + use super::*; + + #[test] + fn get_works() { + ExtBuilder::default().build_and_execute(|| { + let check_bag = |bag_upper, head, tail, ids| { + let bag = Bag::::get(bag_upper).unwrap(); + let bag_ids = bag.iter().map(|n| *n.id()).collect::>(); + + assert_eq!(bag, Bag:: { head, tail, bag_upper }); + assert_eq!(bag_ids, ids); + }; + + assert_eq!(List::::get_bags(), vec![(10, vec![1]), (1_000, vec![2, 3, 4])]); + + // we can fetch them + check_bag(10, Some(1), Some(1), vec![1]); + check_bag(1_000, Some(2), Some(4), vec![2, 3, 4]); + + // and all other bag thresholds don't get bags. + ::BagThresholds::get() + .iter() + .chain(iter::once(&VoteWeight::MAX)) + .filter(|bag_upper| !vec![10, 1_000].contains(bag_upper)) + .for_each(|bag_upper| { + assert_storage_noop!(assert_eq!(Bag::::get(*bag_upper), None)); + assert!(!ListBags::::contains_key(*bag_upper)); + }); + + // when we make a pre-existing bag empty + List::::remove(&1); + + // then + assert_eq!(Bag::::get(10), None) + }); + } + + #[test] + fn insert_node_sets_proper_bag() { + ExtBuilder::default().build_and_execute_no_post_check(|| { + let node = |id, bag_upper| Node:: { id, prev: None, next: None, bag_upper }; + + assert_eq!(List::::get_bags(), vec![(10, vec![1]), (1_000, vec![2, 3, 4])]); + + let mut bag_10 = Bag::::get(10).unwrap(); + bag_10.insert_node_unchecked(node(42, 5)); + + assert_eq!( + ListNodes::::get(&42).unwrap(), + Node { bag_upper: 10, prev: Some(1), next: None, id: 42 } + ); + }); + } + + #[test] + fn insert_node_happy_paths_works() { + ExtBuilder::default().build_and_execute_no_post_check(|| { + let node = |id, bag_upper| Node:: { id, prev: None, next: None, bag_upper }; + + // when inserting into a bag with 1 node + let mut bag_10 = Bag::::get(10).unwrap(); + bag_10.insert_node_unchecked(node(42, bag_10.bag_upper)); + // then + assert_eq!(bag_as_ids(&bag_10), vec![1, 42]); + + // when inserting into a bag with 3 nodes + let mut bag_1000 = Bag::::get(1_000).unwrap(); + bag_1000.insert_node_unchecked(node(52, bag_1000.bag_upper)); + // then + assert_eq!(bag_as_ids(&bag_1000), vec![2, 3, 4, 52]); + + // when inserting into a new bag + let mut bag_20 = Bag::::get_or_make(20); + bag_20.insert_node_unchecked(node(62, bag_20.bag_upper)); + // then + assert_eq!(bag_as_ids(&bag_20), vec![62]); + + // when inserting a node pointing to the accounts not in the bag + let node_61 = + Node:: { id: 61, prev: Some(21), next: Some(101), bag_upper: 20 }; + bag_20.insert_node_unchecked(node_61); + // then ids are in order + assert_eq!(bag_as_ids(&bag_20), vec![62, 61]); + // and when the node is re-fetched all the info is correct + assert_eq!( + Node::::get(&61).unwrap(), + Node:: { id: 61, prev: Some(62), next: None, bag_upper: 20 } + ); + + // state of all bags is as expected + bag_20.put(); // need to put this newly created bag so its in the storage map + assert_eq!( + List::::get_bags(), + vec![(10, vec![1, 42]), (20, vec![62, 61]), (1_000, vec![2, 3, 4, 52])] + ); + }); + } + + // Document improper ways `insert_node` may be getting used. + #[test] + fn insert_node_bad_paths_documented() { + let node = |id, prev, next, bag_upper| Node:: { id, prev, next, bag_upper }; + ExtBuilder::default().build_and_execute_no_post_check(|| { + // when inserting a node with both prev & next pointing at an account in an incorrect + // bag. + let mut bag_1000 = Bag::::get(1_000).unwrap(); + bag_1000.insert_node_unchecked(node(42, Some(1), Some(1), 500)); + + // then the proper prev and next is set. + assert_eq!(bag_as_ids(&bag_1000), vec![2, 3, 4, 42]); + + // and when the node is re-fetched all the info is correct + assert_eq!( + Node::::get(&42).unwrap(), + node(42, Some(4), None, bag_1000.bag_upper) + ); + }); + + ExtBuilder::default().build_and_execute_no_post_check(|| { + // given 3 is in bag_1000 (and not a tail node) + let mut bag_1000 = Bag::::get(1_000).unwrap(); + assert_eq!(bag_as_ids(&bag_1000), vec![2, 3, 4]); + + // when inserting a node with duplicate id 3 + bag_1000.insert_node_unchecked(node(3, None, None, bag_1000.bag_upper)); + + // then all the nodes after the duplicate are lost (because it is set as the tail) + assert_eq!(bag_as_ids(&bag_1000), vec![2, 3]); + // also in the full iteration, 2 and 3 are from bag_1000 and 1 is from bag_10. + assert_eq!(get_list_as_ids(), vec![2, 3, 1]); + + // and the last accessible node has an **incorrect** prev pointer. + assert_eq!( + Node::::get(&3).unwrap(), + node(3, Some(4), None, bag_1000.bag_upper) + ); + }); + + ExtBuilder::default().build_and_execute_no_post_check(|| { + // when inserting a duplicate id of the head + let mut bag_1000 = Bag::::get(1_000).unwrap(); + assert_eq!(bag_as_ids(&bag_1000), vec![2, 3, 4]); + bag_1000.insert_node_unchecked(node(2, None, None, 0)); + + // then all nodes after the head are lost + assert_eq!(bag_as_ids(&bag_1000), vec![2]); + + // and the re-fetched node has bad pointers + assert_eq!( + Node::::get(&2).unwrap(), + node(2, Some(4), None, bag_1000.bag_upper) + ); + // ^^^ despite being the bags head, it has a prev + + assert_eq!(bag_1000, Bag { head: Some(2), tail: Some(2), bag_upper: 1_000 }) + }); + } + + // Panics in case of duplicate tail insert (which would result in an infinite loop). + #[test] + #[should_panic = "system logic error: inserting a node who has the id of tail"] + fn insert_node_duplicate_tail_panics_with_debug_assert() { + ExtBuilder::default().build_and_execute(|| { + let node = |id, prev, next, bag_upper| Node:: { id, prev, next, bag_upper }; + + // given + assert_eq!(List::::get_bags(), vec![(10, vec![1]), (1_000, vec![2, 3, 4])],); + let mut bag_1000 = Bag::::get(1_000).unwrap(); + + // when inserting a duplicate id that is already the tail + assert_eq!(bag_1000.tail, Some(4)); + bag_1000.insert_node_unchecked(node(4, None, None, bag_1000.bag_upper)); // panics + }); + } + + #[test] + fn remove_node_happy_paths_works() { + ExtBuilder::default() + .add_ids(vec![ + (11, 10), + (12, 10), + (13, 1_000), + (14, 1_000), + (15, 2_000), + (16, 2_000), + (17, 2_000), + (18, 2_000), + (19, 2_000), + ]) + .build_and_execute_no_post_check(|| { + let mut bag_10 = Bag::::get(10).unwrap(); + let mut bag_1000 = Bag::::get(1_000).unwrap(); + let mut bag_2000 = Bag::::get(2_000).unwrap(); + + // given + assert_eq!(bag_as_ids(&bag_10), vec![1, 11, 12]); + assert_eq!(bag_as_ids(&bag_1000), vec![2, 3, 4, 13, 14]); + assert_eq!(bag_as_ids(&bag_2000), vec![15, 16, 17, 18, 19]); + + // when removing a node that is not pointing at the head or tail + let node_4 = Node::::get(&4).unwrap(); + let node_4_pre_remove = node_4.clone(); + bag_1000.remove_node_unchecked(&node_4); + + // then + assert_eq!(bag_as_ids(&bag_1000), vec![2, 3, 13, 14]); + assert_ok!(bag_1000.sanity_check()); + // and the node isn't mutated when its removed + assert_eq!(node_4, node_4_pre_remove); + + // when removing a head that is not pointing at the tail + let node_2 = Node::::get(&2).unwrap(); + bag_1000.remove_node_unchecked(&node_2); + + // then + assert_eq!(bag_as_ids(&bag_1000), vec![3, 13, 14]); + assert_ok!(bag_1000.sanity_check()); + + // when removing a tail that is not pointing at the head + let node_14 = Node::::get(&14).unwrap(); + bag_1000.remove_node_unchecked(&node_14); + + // then + assert_eq!(bag_as_ids(&bag_1000), vec![3, 13]); + assert_ok!(bag_1000.sanity_check()); + + // when removing a tail that is pointing at the head + let node_13 = Node::::get(&13).unwrap(); + bag_1000.remove_node_unchecked(&node_13); + + // then + assert_eq!(bag_as_ids(&bag_1000), vec![3]); + assert_ok!(bag_1000.sanity_check()); + + // when removing a node that is both the head & tail + let node_3 = Node::::get(&3).unwrap(); + bag_1000.remove_node_unchecked(&node_3); + bag_1000.put(); // put into storage so `get` returns the updated bag + + // then + assert_eq!(Bag::::get(1_000), None); + + // when removing a node that is pointing at both the head & tail + let node_11 = Node::::get(&11).unwrap(); + bag_10.remove_node_unchecked(&node_11); + + // then + assert_eq!(bag_as_ids(&bag_10), vec![1, 12]); + assert_ok!(bag_10.sanity_check()); + + // when removing a head that is pointing at the tail + let node_1 = Node::::get(&1).unwrap(); + bag_10.remove_node_unchecked(&node_1); + + // then + assert_eq!(bag_as_ids(&bag_10), vec![12]); + assert_ok!(bag_10.sanity_check()); + // and since we updated the bag's head/tail, we need to write this storage so we + // can correctly `get` it again in later checks + bag_10.put(); + + // when removing a node that is pointing at the head but not the tail + let node_16 = Node::::get(&16).unwrap(); + bag_2000.remove_node_unchecked(&node_16); + + // then + assert_eq!(bag_as_ids(&bag_2000), vec![15, 17, 18, 19]); + assert_ok!(bag_2000.sanity_check()); + + // when removing a node that is pointing at tail, but not head + let node_18 = Node::::get(&18).unwrap(); + bag_2000.remove_node_unchecked(&node_18); + + // then + assert_eq!(bag_as_ids(&bag_2000), vec![15, 17, 19]); + assert_ok!(bag_2000.sanity_check()); + + // finally, when reading from storage, the state of all bags is as expected + assert_eq!( + List::::get_bags(), + vec![(10, vec![12]), (2_000, vec![15, 17, 19])] + ); + }); + } + + #[test] + fn remove_node_bad_paths_documented() { + ExtBuilder::default().build_and_execute_no_post_check(|| { + let bad_upper_node_2 = Node:: { + id: 2, + prev: None, + next: Some(3), + bag_upper: 10, // should be 1_000 + }; + let mut bag_1000 = Bag::::get(1_000).unwrap(); + + // when removing a node that is in the bag but has the wrong upper + bag_1000.remove_node_unchecked(&bad_upper_node_2); + bag_1000.put(); + + // then the node is no longer in any bags + assert_eq!(List::::get_bags(), vec![(10, vec![1]), (1_000, vec![3, 4])]); + // .. and the bag it was removed from + let bag_1000 = Bag::::get(1_000).unwrap(); + // is sane + assert_ok!(bag_1000.sanity_check()); + // and has the correct head and tail. + assert_eq!(bag_1000.head, Some(3)); + assert_eq!(bag_1000.tail, Some(4)); + }); + + // Removing a node that is in another bag, will mess up that other bag. + ExtBuilder::default().build_and_execute_no_post_check(|| { + // given a tail node is in bag 1_000 + let node_4 = Node::::get(&4).unwrap(); + + // when we remove it from bag 10 + let mut bag_10 = Bag::::get(10).unwrap(); + bag_10.remove_node_unchecked(&node_4); + bag_10.put(); + + // then bag remove was called on is ok, + let bag_10 = Bag::::get(10).unwrap(); + assert_eq!(bag_10.tail, Some(1)); + assert_eq!(bag_10.head, Some(1)); + + // but the bag that the node belonged to is in an invalid state + let bag_1000 = Bag::::get(1_000).unwrap(); + // because it still has the removed node as its tail. + assert_eq!(bag_1000.tail, Some(4)); + assert_eq!(bag_1000.head, Some(2)); + }); + } +} + +mod node { + use super::*; + + #[test] + fn is_misplaced_works() { + ExtBuilder::default().build_and_execute(|| { + let node = Node::::get(&1).unwrap(); + + // given + assert_eq!(node.bag_upper, 10); + + // then within bag 10 its not misplaced, + assert!(!node.is_misplaced(0)); + assert!(!node.is_misplaced(9)); + assert!(!node.is_misplaced(10)); + + // and out of bag 10 it is misplaced + assert!(node.is_misplaced(11)); + }); + } +} diff --git a/frame/bags-list/src/mock.rs b/frame/bags-list/src/mock.rs new file mode 100644 index 0000000000000..a6ab35896b1e7 --- /dev/null +++ b/frame/bags-list/src/mock.rs @@ -0,0 +1,154 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Mock runtime for pallet-bags-lists tests. + +use super::*; +use crate::{self as bags_list}; +use frame_election_provider_support::VoteWeight; +use frame_support::parameter_types; + +pub type AccountId = u32; +pub type Balance = u32; + +parameter_types! { + pub static NextVoteWeight: VoteWeight = 0; +} + +pub struct StakingMock; +impl frame_election_provider_support::VoteWeightProvider for StakingMock { + fn vote_weight(id: &AccountId) -> VoteWeight { + match id { + 710 => 15, + 711 => 16, + 712 => 2_000, // special cases used for migrate test + _ => NextVoteWeight::get(), + } + } + #[cfg(any(feature = "runtime-benchmarks", test))] + fn set_vote_weight_of(_: &AccountId, weight: VoteWeight) { + // we don't really keep a mapping, just set weight for everyone. + NextVoteWeight::set(weight) + } +} + +impl frame_system::Config for Runtime { + type SS58Prefix = (); + type BaseCallFilter = frame_support::traits::Everything; + type Origin = Origin; + type Index = u64; + type BlockNumber = u64; + type Call = Call; + type Hash = sp_core::H256; + type Hashing = sp_runtime::traits::BlakeTwo256; + type AccountId = AccountId; + type Lookup = sp_runtime::traits::IdentityLookup; + type Header = sp_runtime::testing::Header; + type Event = Event; + type BlockHashCount = (); + type DbWeight = (); + type BlockLength = (); + type BlockWeights = (); + type Version = (); + type PalletInfo = PalletInfo; + type AccountData = pallet_balances::AccountData; + type OnNewAccount = (); + type OnKilledAccount = (); + type SystemWeightInfo = (); + type OnSetCode = (); +} + +parameter_types! { + pub static BagThresholds: &'static [VoteWeight] = &[10, 20, 30, 40, 50, 60, 1_000, 2_000, 10_000]; +} + +impl bags_list::Config for Runtime { + type Event = Event; + type WeightInfo = (); + type BagThresholds = BagThresholds; + type VoteWeightProvider = StakingMock; +} + +type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; +type Block = frame_system::mocking::MockBlock; +frame_support::construct_runtime!( + pub enum Runtime where + Block = Block, + NodeBlock = Block, + UncheckedExtrinsic = UncheckedExtrinsic, + { + System: frame_system::{Pallet, Call, Storage, Event, Config}, + BagsList: bags_list::{Pallet, Call, Storage, Event}, + } +); + +/// Default AccountIds and their weights. +pub(crate) const GENESIS_IDS: [(AccountId, VoteWeight); 4] = + [(1, 10), (2, 1_000), (3, 1_000), (4, 1_000)]; + +#[derive(Default)] +pub(crate) struct ExtBuilder { + ids: Vec<(AccountId, VoteWeight)>, +} + +impl ExtBuilder { + /// Add some AccountIds to insert into `List`. + pub(crate) fn add_ids(mut self, ids: Vec<(AccountId, VoteWeight)>) -> Self { + self.ids = ids; + self + } + + pub(crate) fn build(self) -> sp_io::TestExternalities { + sp_tracing::try_init_simple(); + let storage = frame_system::GenesisConfig::default().build_storage::().unwrap(); + + let mut ext = sp_io::TestExternalities::from(storage); + ext.execute_with(|| { + for (id, weight) in GENESIS_IDS.iter().chain(self.ids.iter()) { + frame_support::assert_ok!(List::::insert(*id, *weight)); + } + }); + + ext + } + + pub(crate) fn build_and_execute(self, test: impl FnOnce() -> ()) { + self.build().execute_with(|| { + test(); + List::::sanity_check().expect("Sanity check post condition failed") + }) + } + + pub(crate) fn build_and_execute_no_post_check(self, test: impl FnOnce() -> ()) { + self.build().execute_with(test) + } +} + +pub(crate) mod test_utils { + use super::*; + use list::Bag; + + /// Returns the ordered ids within the given bag. + pub(crate) fn bag_as_ids(bag: &Bag) -> Vec { + bag.iter().map(|n| *n.id()).collect::>() + } + + /// Returns the ordered ids from the list. + pub(crate) fn get_list_as_ids() -> Vec { + List::::iter().map(|n| *n.id()).collect::>() + } +} diff --git a/frame/bags-list/src/tests.rs b/frame/bags-list/src/tests.rs new file mode 100644 index 0000000000000..e94017730668b --- /dev/null +++ b/frame/bags-list/src/tests.rs @@ -0,0 +1,389 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use frame_support::{assert_ok, assert_storage_noop, traits::IntegrityTest}; + +use super::*; +use frame_election_provider_support::SortedListProvider; +use list::Bag; +use mock::{test_utils::*, *}; + +mod pallet { + use super::*; + + #[test] + fn rebag_works() { + ExtBuilder::default().add_ids(vec![(42, 20)]).build_and_execute(|| { + // given + assert_eq!( + List::::get_bags(), + vec![(10, vec![1]), (20, vec![42]), (1_000, vec![2, 3, 4])] + ); + + // when increasing vote weight to the level of non-existent bag + NextVoteWeight::set(2_000); + assert_ok!(BagsList::rebag(Origin::signed(0), 42)); + + // then a new bag is created and the id moves into it + assert_eq!( + List::::get_bags(), + vec![(10, vec![1]), (1_000, vec![2, 3, 4]), (2_000, vec![42])] + ); + + // when decreasing weight within the range of the current bag + NextVoteWeight::set(1001); + assert_ok!(BagsList::rebag(Origin::signed(0), 42)); + + // then the id does not move + assert_eq!( + List::::get_bags(), + vec![(10, vec![1]), (1_000, vec![2, 3, 4]), (2_000, vec![42])] + ); + + // when reducing weight to the level of a non-existent bag + NextVoteWeight::set(30); + assert_ok!(BagsList::rebag(Origin::signed(0), 42)); + + // then a new bag is created and the id moves into it + assert_eq!( + List::::get_bags(), + vec![(10, vec![1]), (30, vec![42]), (1_000, vec![2, 3, 4])] + ); + + // when increasing weight to the level of a pre-existing bag + NextVoteWeight::set(500); + assert_ok!(BagsList::rebag(Origin::signed(0), 42)); + + // then the id moves into that bag + assert_eq!( + List::::get_bags(), + vec![(10, vec![1]), (1_000, vec![2, 3, 4, 42])] + ); + }); + } + + // Rebagging the tail of a bag results in the old bag having a new tail and an overall correct + // state. + #[test] + fn rebag_tail_works() { + ExtBuilder::default().build_and_execute(|| { + // given + assert_eq!(List::::get_bags(), vec![(10, vec![1]), (1_000, vec![2, 3, 4])]); + + // when + NextVoteWeight::set(10); + assert_ok!(BagsList::rebag(Origin::signed(0), 4)); + + // then + assert_eq!(List::::get_bags(), vec![(10, vec![1, 4]), (1_000, vec![2, 3])]); + assert_eq!(Bag::::get(1_000).unwrap(), Bag::new(Some(2), Some(3), 1_000)); + + // when + assert_ok!(BagsList::rebag(Origin::signed(0), 3)); + + // then + assert_eq!(List::::get_bags(), vec![(10, vec![1, 4, 3]), (1_000, vec![2])]); + + assert_eq!(Bag::::get(10).unwrap(), Bag::new(Some(1), Some(3), 10)); + assert_eq!(Bag::::get(1_000).unwrap(), Bag::new(Some(2), Some(2), 1_000)); + assert_eq!(get_list_as_ids(), vec![2u32, 1, 4, 3]); + + // when + assert_ok!(BagsList::rebag(Origin::signed(0), 2)); + + // then + assert_eq!(List::::get_bags(), vec![(10, vec![1, 4, 3, 2])]); + assert_eq!(Bag::::get(1_000), None); + }); + } + + // Rebagging the head of a bag results in the old bag having a new head and an overall correct + // state. + #[test] + fn rebag_head_works() { + ExtBuilder::default().build_and_execute(|| { + // when + NextVoteWeight::set(10); + assert_ok!(BagsList::rebag(Origin::signed(0), 2)); + + // then + assert_eq!(List::::get_bags(), vec![(10, vec![1, 2]), (1_000, vec![3, 4])]); + assert_eq!(Bag::::get(1_000).unwrap(), Bag::new(Some(3), Some(4), 1_000)); + + // when + assert_ok!(BagsList::rebag(Origin::signed(0), 3)); + + // then + assert_eq!(List::::get_bags(), vec![(10, vec![1, 2, 3]), (1_000, vec![4])]); + assert_eq!(Bag::::get(1_000).unwrap(), Bag::new(Some(4), Some(4), 1_000)); + + // when + assert_ok!(BagsList::rebag(Origin::signed(0), 4)); + + // then + assert_eq!(List::::get_bags(), vec![(10, vec![1, 2, 3, 4])]); + assert_eq!(Bag::::get(1_000), None); + }); + } + + #[test] + fn wrong_rebag_is_noop() { + ExtBuilder::default().build_and_execute(|| { + let node_3 = list::Node::::get(&3).unwrap(); + // when account 3 is _not_ misplaced with weight 500 + NextVoteWeight::set(500); + assert!(!node_3.is_misplaced(500)); + + // then calling rebag on account 3 with weight 500 is a noop + assert_storage_noop!(assert_eq!(BagsList::rebag(Origin::signed(0), 3), Ok(()))); + + // when account 42 is not in the list + assert!(!BagsList::contains(&42)); + + // then rebag-ing account 42 is a noop + assert_storage_noop!(assert_eq!(BagsList::rebag(Origin::signed(0), 42), Ok(()))); + }); + } + + #[test] + #[should_panic = "thresholds must strictly increase, and have no duplicates"] + fn duplicate_in_bags_threshold_panics() { + const DUPE_THRESH: &[VoteWeight; 4] = &[10, 20, 30, 30]; + BagThresholds::set(DUPE_THRESH); + BagsList::integrity_test(); + } + + #[test] + #[should_panic = "thresholds must strictly increase, and have no duplicates"] + fn decreasing_in_bags_threshold_panics() { + const DECREASING_THRESH: &[VoteWeight; 4] = &[10, 30, 20, 40]; + BagThresholds::set(DECREASING_THRESH); + BagsList::integrity_test(); + } + + #[test] + fn empty_threshold_works() { + BagThresholds::set(Default::default()); // which is the same as passing `()` to `Get<_>`. + + ExtBuilder::default().build_and_execute(|| { + // everyone in the same bag. + assert_eq!(List::::get_bags(), vec![(VoteWeight::MAX, vec![1, 2, 3, 4])]); + + // any insertion goes there as well. + assert_ok!(List::::insert(5, 999)); + assert_ok!(List::::insert(6, 0)); + assert_eq!( + List::::get_bags(), + vec![(VoteWeight::MAX, vec![1, 2, 3, 4, 5, 6])] + ); + + // any rebag is noop. + assert_storage_noop!(assert!(BagsList::rebag(Origin::signed(0), 1).is_ok())); + assert_storage_noop!(assert!(BagsList::rebag(Origin::signed(0), 10).is_ok())); + }) + } +} + +mod sorted_list_provider { + use super::*; + + #[test] + fn iter_works() { + ExtBuilder::default().build_and_execute(|| { + let expected = vec![2, 3, 4, 1]; + for (i, id) in BagsList::iter().enumerate() { + assert_eq!(id, expected[i]) + } + }); + } + + #[test] + fn count_works() { + ExtBuilder::default().build_and_execute(|| { + // given + assert_eq!(BagsList::count(), 4); + + // when inserting + assert_ok!(BagsList::on_insert(201, 0)); + // then the count goes up + assert_eq!(BagsList::count(), 5); + + // when removing + BagsList::on_remove(&201); + // then the count goes down + assert_eq!(BagsList::count(), 4); + + // when updating + BagsList::on_update(&201, VoteWeight::MAX); + // then the count stays the same + assert_eq!(BagsList::count(), 4); + }); + } + + #[test] + fn on_insert_works() { + ExtBuilder::default().build_and_execute(|| { + // when + assert_ok!(BagsList::on_insert(6, 1_000)); + + // then the bags + assert_eq!(List::::get_bags(), vec![(10, vec![1]), (1_000, vec![2, 3, 4, 6])]); + // and list correctly include the new id, + assert_eq!(BagsList::iter().collect::>(), vec![2, 3, 4, 6, 1]); + // and the count is incremented. + assert_eq!(BagsList::count(), 5); + + // when + assert_ok!(BagsList::on_insert(7, 1_001)); + + // then the bags + assert_eq!( + List::::get_bags(), + vec![(10, vec![1]), (1_000, vec![2, 3, 4, 6]), (2_000, vec![7])] + ); + // and list correctly include the new id, + assert_eq!(BagsList::iter().collect::>(), vec![7, 2, 3, 4, 6, 1]); + // and the count is incremented. + assert_eq!(BagsList::count(), 6); + }) + } + + #[test] + fn on_insert_errors_with_duplicate_id() { + ExtBuilder::default().build_and_execute(|| { + // given + assert!(get_list_as_ids().contains(&3)); + + // then + assert_storage_noop!(assert_eq!( + BagsList::on_insert(3, 20).unwrap_err(), + Error::Duplicate + )); + }); + } + + #[test] + fn on_update_works() { + ExtBuilder::default().add_ids(vec![(42, 20)]).build_and_execute(|| { + // given + assert_eq!( + List::::get_bags(), + vec![(10, vec![1]), (20, vec![42]), (1_000, vec![2, 3, 4])] + ); + assert_eq!(BagsList::count(), 5); + + // when increasing weight to the level of non-existent bag + BagsList::on_update(&42, 2_000); + + // then the bag is created with the id in it, + assert_eq!( + List::::get_bags(), + vec![(10, vec![1]), (1_000, vec![2, 3, 4]), (2000, vec![42])] + ); + // and the id position is updated in the list. + assert_eq!(BagsList::iter().collect::>(), vec![42, 2, 3, 4, 1]); + + // when decreasing weight within the range of the current bag + BagsList::on_update(&42, 1_001); + + // then the id does not change bags, + assert_eq!( + List::::get_bags(), + vec![(10, vec![1]), (1_000, vec![2, 3, 4]), (2000, vec![42])] + ); + // or change position in the list. + assert_eq!(BagsList::iter().collect::>(), vec![42, 2, 3, 4, 1]); + + // when increasing weight to the level of a non-existent bag with the max threshold + BagsList::on_update(&42, VoteWeight::MAX); + + // the the new bag is created with the id in it, + assert_eq!( + List::::get_bags(), + vec![(10, vec![1]), (1_000, vec![2, 3, 4]), (VoteWeight::MAX, vec![42])] + ); + // and the id position is updated in the list. + assert_eq!(BagsList::iter().collect::>(), vec![42, 2, 3, 4, 1]); + + // when decreasing the weight to a pre-existing bag + BagsList::on_update(&42, 1_000); + + // then id is moved to the correct bag (as the last member), + assert_eq!( + List::::get_bags(), + vec![(10, vec![1]), (1_000, vec![2, 3, 4, 42])] + ); + // and the id position is updated in the list. + assert_eq!(BagsList::iter().collect::>(), vec![2, 3, 4, 42, 1]); + + // since we have only called on_update, the `count` has not changed. + assert_eq!(BagsList::count(), 5); + }); + } + + #[test] + fn on_remove_works() { + let ensure_left = |id, counter| { + assert!(!ListNodes::::contains_key(id)); + assert_eq!(BagsList::count(), counter); + assert_eq!(CounterForListNodes::::get(), counter); + assert_eq!(ListNodes::::iter().count() as u32, counter); + }; + + ExtBuilder::default().build_and_execute(|| { + // it is a noop removing a non-existent id + assert!(!ListNodes::::contains_key(42)); + assert_storage_noop!(BagsList::on_remove(&42)); + + // when removing a node from a bag with multiple nodes + BagsList::on_remove(&2); + + // then + assert_eq!(get_list_as_ids(), vec![3, 4, 1]); + assert_eq!(List::::get_bags(), vec![(10, vec![1]), (1_000, vec![3, 4])]); + ensure_left(2, 3); + + // when removing a node from a bag with only one node + BagsList::on_remove(&1); + + // then + assert_eq!(get_list_as_ids(), vec![3, 4]); + assert_eq!(List::::get_bags(), vec![(1_000, vec![3, 4])]); + ensure_left(1, 2); + + // when removing all remaining ids + BagsList::on_remove(&4); + assert_eq!(get_list_as_ids(), vec![3]); + ensure_left(4, 1); + BagsList::on_remove(&3); + + // then the storage is completely cleaned up + assert_eq!(get_list_as_ids(), Vec::::new()); + ensure_left(3, 0); + }); + } + + #[test] + fn contains_works() { + ExtBuilder::default().build_and_execute(|| { + assert!(GENESIS_IDS.iter().all(|(id, _)| BagsList::contains(id))); + + let non_existent_ids = vec![&42, &666, &13]; + assert!(non_existent_ids.iter().all(|id| !BagsList::contains(id))); + }) + } +} diff --git a/frame/bags-list/src/weights.rs b/frame/bags-list/src/weights.rs new file mode 100644 index 0000000000000..95d3dfa6eb989 --- /dev/null +++ b/frame/bags-list/src/weights.rs @@ -0,0 +1,95 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Autogenerated weights for pallet_bags_list +//! +//! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev +//! DATE: 2021-09-15, STEPS: `50`, REPEAT: 20, LOW RANGE: `[]`, HIGH RANGE: `[]` +//! EXECUTION: Some(Wasm), WASM-EXECUTION: Compiled, CHAIN: Some("dev"), DB CACHE: 128 + +// Executed Command: +// target/release/substrate +// benchmark +// --chain=dev +// --steps=50 +// --repeat=20 +// --pallet=pallet_bags_list +// --extrinsic=* +// --execution=wasm +// --wasm-execution=compiled +// --heap-pages=4096 +// --output=./frame/bags-list/src/weights.rs +// --template=./.maintain/frame-weight-template.hbs + + +#![cfg_attr(rustfmt, rustfmt_skip)] +#![allow(unused_parens)] +#![allow(unused_imports)] + +use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use sp_std::marker::PhantomData; + +/// Weight functions needed for pallet_bags_list. +pub trait WeightInfo { + fn rebag_non_terminal() -> Weight; + fn rebag_terminal() -> Weight; +} + +/// Weights for pallet_bags_list using the Substrate node and recommended hardware. +pub struct SubstrateWeight(PhantomData); +impl WeightInfo for SubstrateWeight { + // Storage: Staking Bonded (r:1 w:0) + // Storage: Staking Ledger (r:1 w:0) + // Storage: BagsList ListNodes (r:4 w:4) + // Storage: BagsList ListBags (r:1 w:1) + fn rebag_non_terminal() -> Weight { + (74_175_000 as Weight) + .saturating_add(T::DbWeight::get().reads(7 as Weight)) + .saturating_add(T::DbWeight::get().writes(5 as Weight)) + } + // Storage: Staking Bonded (r:1 w:0) + // Storage: Staking Ledger (r:1 w:0) + // Storage: BagsList ListNodes (r:3 w:3) + // Storage: BagsList ListBags (r:2 w:2) + fn rebag_terminal() -> Weight { + (73_305_000 as Weight) + .saturating_add(T::DbWeight::get().reads(7 as Weight)) + .saturating_add(T::DbWeight::get().writes(5 as Weight)) + } +} + +// For backwards compatibility and tests +impl WeightInfo for () { + // Storage: Staking Bonded (r:1 w:0) + // Storage: Staking Ledger (r:1 w:0) + // Storage: BagsList ListNodes (r:4 w:4) + // Storage: BagsList ListBags (r:1 w:1) + fn rebag_non_terminal() -> Weight { + (74_175_000 as Weight) + .saturating_add(RocksDbWeight::get().reads(7 as Weight)) + .saturating_add(RocksDbWeight::get().writes(5 as Weight)) + } + // Storage: Staking Bonded (r:1 w:0) + // Storage: Staking Ledger (r:1 w:0) + // Storage: BagsList ListNodes (r:3 w:3) + // Storage: BagsList ListBags (r:2 w:2) + fn rebag_terminal() -> Weight { + (73_305_000 as Weight) + .saturating_add(RocksDbWeight::get().reads(7 as Weight)) + .saturating_add(RocksDbWeight::get().writes(5 as Weight)) + } +} diff --git a/frame/election-provider-multi-phase/src/lib.rs b/frame/election-provider-multi-phase/src/lib.rs index 269057b55b094..e83c49433e2bb 100644 --- a/frame/election-provider-multi-phase/src/lib.rs +++ b/frame/election-provider-multi-phase/src/lib.rs @@ -620,6 +620,15 @@ pub mod pallet { #[pallet::constant] type SignedDepositWeight: Get>; + /// The maximum number of voters to put in the snapshot. At the moment, snapshots are only + /// over a single block, but once multi-block elections are introduced they will take place + /// over multiple blocks. + /// + /// Also, note the data type: If the voters are represented by a `u32` in `type + /// CompactSolution`, the same `u32` is used here to ensure bounds are respected. + #[pallet::constant] + type VoterSnapshotPerBlock: Get>; + /// Handler for the slashed deposits. type SlashHandler: OnUnbalanced>; @@ -1274,7 +1283,8 @@ impl Pallet { fn create_snapshot_external( ) -> Result<(Vec, Vec>, u32), ElectionError> { let target_limit = >::max_value().saturated_into::(); - let voter_limit = >::max_value().saturated_into::(); + // for now we have just a single block snapshot. + let voter_limit = T::VoterSnapshotPerBlock::get().saturated_into::(); let targets = T::DataProvider::targets(Some(target_limit)).map_err(ElectionError::DataProvider)?; @@ -1933,7 +1943,8 @@ mod tests { } #[test] - fn snapshot_creation_fails_if_too_big() { + fn snapshot_too_big_failure_onchain_fallback() { + // the `MockStaking` is designed such that if it has too many targets, it simply fails. ExtBuilder::default().build_and_execute(|| { Targets::set((0..(TargetIndex::max_value() as AccountId) + 1).collect::>()); @@ -1949,6 +1960,49 @@ mod tests { roll_to(29); let supports = MultiPhase::elect().unwrap(); assert!(supports.len() > 0); + }); + } + + #[test] + fn snapshot_too_big_failure_no_fallback() { + // and if the backup mode is nothing, we go into the emergency mode.. + ExtBuilder::default().onchain_fallback(false).build_and_execute(|| { + crate::mock::Targets::set( + (0..(TargetIndex::max_value() as AccountId) + 1).collect::>(), + ); + + // Signed phase failed to open. + roll_to(15); + assert_eq!(MultiPhase::current_phase(), Phase::Off); + + // Unsigned phase failed to open. + roll_to(25); + assert_eq!(MultiPhase::current_phase(), Phase::Off); + + roll_to(29); + let err = MultiPhase::elect().unwrap_err(); + assert_eq!(err, ElectionError::Fallback("NoFallback.")); + assert_eq!(MultiPhase::current_phase(), Phase::Emergency); + }); + } + + #[test] + fn snapshot_too_big_truncate() { + // but if there are too many voters, we simply truncate them. + ExtBuilder::default().build_and_execute(|| { + // we have 8 voters in total. + assert_eq!(crate::mock::Voters::get().len(), 8); + // but we want to take 2. + crate::mock::VoterSnapshotPerBlock::set(2); + + // Signed phase opens just fine. + roll_to(15); + assert_eq!(MultiPhase::current_phase(), Phase::Signed); + + assert_eq!( + MultiPhase::snapshot_metadata().unwrap(), + SolutionOrSnapshotSize { voters: 2, targets: 4 } + ); }) } diff --git a/frame/election-provider-multi-phase/src/mock.rs b/frame/election-provider-multi-phase/src/mock.rs index 28a15291e6520..0d563955595a8 100644 --- a/frame/election-provider-multi-phase/src/mock.rs +++ b/frame/election-provider-multi-phase/src/mock.rs @@ -268,6 +268,7 @@ parameter_types! { pub static MinerMaxWeight: Weight = BlockWeights::get().max_block; pub static MinerMaxLength: u32 = 256; pub static MockWeightInfo: bool = false; + pub static VoterSnapshotPerBlock: VoterIndex = u32::max_value(); pub static EpochLength: u64 = 30; pub static OnChianFallback: bool = true; @@ -401,6 +402,7 @@ impl crate::Config for Runtime { type Fallback = MockFallback; type ForceOrigin = frame_system::EnsureRoot; type Solution = TestNposSolution; + type VoterSnapshotPerBlock = VoterSnapshotPerBlock; type Solver = SequentialPhragmen, Balancing>; } @@ -433,9 +435,9 @@ impl ElectionDataProvider for StakingMock { fn voters( maybe_max_len: Option, ) -> data_provider::Result)>> { - let voters = Voters::get(); - if maybe_max_len.map_or(false, |max_len| voters.len() > max_len) { - return Err("Voters too big") + let mut voters = Voters::get(); + if let Some(max_len) = maybe_max_len { + voters.truncate(max_len) } Ok(voters) diff --git a/frame/election-provider-support/src/lib.rs b/frame/election-provider-support/src/lib.rs index d2c4b1053cc6d..cb36e025c3bee 100644 --- a/frame/election-provider-support/src/lib.rs +++ b/frame/election-provider-support/src/lib.rs @@ -297,6 +297,75 @@ impl ElectionProvider for () { } } +/// A utility trait for something to implement `ElectionDataProvider` in a sensible way. +/// +/// This is generic over `AccountId` and it can represent a validator, a nominator, or any other +/// entity. +/// +/// To simplify the trait, the `VoteWeight` is hardcoded as the weight of each entity. The weights +/// are ascending, the higher, the better. In the long term, if this trait ends up having use cases +/// outside of the election context, it is easy enough to make it generic over the `VoteWeight`. +/// +/// Something that implements this trait will do a best-effort sort over ids, and thus can be +/// used on the implementing side of [`ElectionDataProvider`]. +pub trait SortedListProvider { + /// The list's error type. + type Error; + + /// An iterator over the list, which can have `take` called on it. + fn iter() -> Box>; + + /// The current count of ids in the list. + fn count() -> u32; + + /// Return true if the list already contains `id`. + fn contains(id: &AccountId) -> bool; + + /// Hook for inserting a new id. + fn on_insert(id: AccountId, weight: VoteWeight) -> Result<(), Self::Error>; + + /// Hook for updating a single id. + fn on_update(id: &AccountId, weight: VoteWeight); + + /// Hook for removing am id from the list. + fn on_remove(id: &AccountId); + + /// Regenerate this list from scratch. Returns the count of items inserted. + /// + /// This should typically only be used at a runtime upgrade. + fn regenerate( + all: impl IntoIterator, + weight_of: Box VoteWeight>, + ) -> u32; + + /// Remove `maybe_count` number of items from the list. Returns the number of items actually + /// removed. WARNING: removes all items if `maybe_count` is `None`, which should never be done + /// in production settings because it can lead to an unbounded amount of storage accesses. + fn clear(maybe_count: Option) -> u32; + + /// Sanity check internal state of list. Only meant for debug compilation. + fn sanity_check() -> Result<(), &'static str>; + + /// If `who` changes by the returned amount they are guaranteed to have a worst case change + /// in their list position. + #[cfg(feature = "runtime-benchmarks")] + fn weight_update_worst_case(_who: &AccountId, _is_increase: bool) -> VoteWeight { + VoteWeight::MAX + } +} + +/// Something that can provide the `VoteWeight` of an account. Similar to [`ElectionProvider`] and +/// [`ElectionDataProvider`], this should typically be implementing by whoever is supposed to *use* +/// `SortedListProvider`. +pub trait VoteWeightProvider { + /// Get the current `VoteWeight` of `who`. + fn vote_weight(who: &AccountId) -> VoteWeight; + + /// For tests and benchmarks, set the `VoteWeight`. + #[cfg(any(feature = "runtime-benchmarks", test))] + fn set_vote_weight_of(_: &AccountId, _: VoteWeight) {} +} + /// Something that can compute the result to an NPoS solution. pub trait NposSolver { /// The account identifier type of this solver. diff --git a/frame/executive/src/lib.rs b/frame/executive/src/lib.rs index 655a38fe1b540..9a0fce4d6b5b4 100644 --- a/frame/executive/src/lib.rs +++ b/frame/executive/src/lib.rs @@ -229,7 +229,7 @@ where (frame_system::Pallet::, COnRuntimeUpgrade, AllPallets) as OnRuntimeUpgrade - >::pre_upgrade()?; + >::pre_upgrade().unwrap(); let weight = Self::execute_on_runtime_upgrade(); @@ -237,7 +237,7 @@ where (frame_system::Pallet::, COnRuntimeUpgrade, AllPallets) as OnRuntimeUpgrade - >::post_upgrade()?; + >::post_upgrade().unwrap(); Ok(weight) } diff --git a/frame/grandpa/src/mock.rs b/frame/grandpa/src/mock.rs index 26dda514516a3..2f1b2630b2241 100644 --- a/frame/grandpa/src/mock.rs +++ b/frame/grandpa/src/mock.rs @@ -217,6 +217,7 @@ impl pallet_staking::Config for Test { type NextNewSession = Session; type ElectionProvider = onchain::OnChainSequentialPhragmen; type GenesisElectionProvider = Self::ElectionProvider; + type SortedListProvider = pallet_staking::UseNominatorsMap; type WeightInfo = (); } diff --git a/frame/offences/benchmarking/src/mock.rs b/frame/offences/benchmarking/src/mock.rs index c4fd88def0e33..82662295dea84 100644 --- a/frame/offences/benchmarking/src/mock.rs +++ b/frame/offences/benchmarking/src/mock.rs @@ -174,6 +174,7 @@ impl pallet_staking::Config for Test { type MaxNominatorRewardedPerValidator = MaxNominatorRewardedPerValidator; type ElectionProvider = onchain::OnChainSequentialPhragmen; type GenesisElectionProvider = Self::ElectionProvider; + type SortedListProvider = pallet_staking::UseNominatorsMap; type WeightInfo = (); } diff --git a/frame/session/benchmarking/src/mock.rs b/frame/session/benchmarking/src/mock.rs index c685db2bb2524..4d3a1a2d8689d 100644 --- a/frame/session/benchmarking/src/mock.rs +++ b/frame/session/benchmarking/src/mock.rs @@ -182,6 +182,7 @@ impl pallet_staking::Config for Test { type MaxNominatorRewardedPerValidator = MaxNominatorRewardedPerValidator; type ElectionProvider = onchain::OnChainSequentialPhragmen; type GenesisElectionProvider = Self::ElectionProvider; + type SortedListProvider = pallet_staking::UseNominatorsMap; type WeightInfo = (); } diff --git a/frame/staking/Cargo.toml b/frame/staking/Cargo.toml index aba19ba56357a..70637bcd7726f 100644 --- a/frame/staking/Cargo.toml +++ b/frame/staking/Cargo.toml @@ -39,9 +39,11 @@ rand_chacha = { version = "0.2", default-features = false, optional = true } [dev-dependencies] sp-tracing = { version = "4.0.0-dev", path = "../../primitives/tracing" } sp-core = { version = "4.0.0-dev", path = "../../primitives/core" } +sp-npos-elections = { version = "4.0.0-dev", path = "../../primitives/npos-elections" } pallet-balances = { version = "4.0.0-dev", path = "../balances" } pallet-timestamp = { version = "4.0.0-dev", path = "../timestamp" } pallet-staking-reward-curve = { version = "4.0.0-dev", path = "../staking/reward-curve" } +pallet-bags-list = { version = "4.0.0-dev", features = ["runtime-benchmarks"], path = "../bags-list" } substrate-test-utils = { version = "4.0.0-dev", path = "../../test-utils" } frame-benchmarking = { version = "4.0.0-dev", path = "../benchmarking" } frame-election-provider-support = { version = "4.0.0-dev", path = "../election-provider-support" } diff --git a/frame/staking/src/benchmarking.rs b/frame/staking/src/benchmarking.rs index bdc3d81f3c29b..f3def7206320c 100644 --- a/frame/staking/src/benchmarking.rs +++ b/frame/staking/src/benchmarking.rs @@ -21,9 +21,10 @@ use super::*; use crate::Pallet as Staking; use testing_utils::*; +use frame_election_provider_support::SortedListProvider; use frame_support::{ pallet_prelude::*, - traits::{Currency, Get, Imbalance}, + traits::{Currency, CurrencyToVote, Get, Imbalance}, }; use sp_runtime::{ traits::{StaticLookup, Zero}, @@ -110,6 +111,8 @@ pub fn create_validator_with_nominators( assert_eq!(new_validators.len(), 1); assert_eq!(new_validators[0], v_stash, "Our validator was not selected!"); + assert_ne!(CounterForValidators::::get(), 0); + assert_ne!(CounterForNominators::::get(), 0); // Give Era Points let reward = EraRewardPoints:: { @@ -129,13 +132,91 @@ pub fn create_validator_with_nominators( Ok((v_stash, nominators)) } +struct ListScenario { + /// Stash that is expected to be moved. + origin_stash1: T::AccountId, + /// Controller of the Stash that is expected to be moved. + origin_controller1: T::AccountId, + dest_weight: BalanceOf, +} + +impl ListScenario { + /// An expensive scenario for bags-list implementation: + /// + /// - the node to be updated (r) is the head of a bag that has at least one other node. The bag + /// itself will need to be read and written to update its head. The node pointed to by r.next + /// will need to be read and written as it will need to have its prev pointer updated. Note + /// that there are two other worst case scenarios for bag removal: 1) the node is a tail and + /// 2) the node is a middle node with prev and next; all scenarios end up with the same number + /// of storage reads and writes. + /// + /// - the destination bag has at least one node, which will need its next pointer updated. + /// + /// NOTE: while this scenario specifically targets a worst case for the bags-list, it should + /// also elicit a worst case for other known `SortedListProvider` implementations; although + /// this may not be true against unknown `SortedListProvider` implementations. + fn new(origin_weight: BalanceOf, is_increase: bool) -> Result { + ensure!(!origin_weight.is_zero(), "origin weight must be greater than 0"); + + // burn the entire issuance. + let i = T::Currency::burn(T::Currency::total_issuance()); + sp_std::mem::forget(i); + + // create accounts with the origin weight + + let (origin_stash1, origin_controller1) = create_stash_controller_with_balance::( + USER_SEED + 2, + origin_weight, + Default::default(), + )?; + Staking::::nominate( + RawOrigin::Signed(origin_controller1.clone()).into(), + // NOTE: these don't really need to be validators. + vec![T::Lookup::unlookup(account("random_validator", 0, SEED))], + )?; + + let (_origin_stash2, origin_controller2) = create_stash_controller_with_balance::( + USER_SEED + 3, + origin_weight, + Default::default(), + )?; + Staking::::nominate( + RawOrigin::Signed(origin_controller2.clone()).into(), + vec![T::Lookup::unlookup(account("random_validator", 0, SEED))].clone(), + )?; + + // find a destination weight that will trigger the worst case scenario + let dest_weight_as_vote = + T::SortedListProvider::weight_update_worst_case(&origin_stash1, is_increase); + + let total_issuance = T::Currency::total_issuance(); + + let dest_weight = + T::CurrencyToVote::to_currency(dest_weight_as_vote as u128, total_issuance); + + // create an account with the worst case destination weight + let (_dest_stash1, dest_controller1) = create_stash_controller_with_balance::( + USER_SEED + 1, + dest_weight, + Default::default(), + )?; + Staking::::nominate( + RawOrigin::Signed(dest_controller1).into(), + vec![T::Lookup::unlookup(account("random_validator", 0, SEED))], + )?; + + Ok(ListScenario { origin_stash1, origin_controller1, dest_weight }) + } +} + const USER_SEED: u32 = 999666; benchmarks! { bond { let stash = create_funded_user::("stash", USER_SEED, 100); let controller = create_funded_user::("controller", USER_SEED, 100); - let controller_lookup: ::Source = T::Lookup::unlookup(controller.clone()); + let controller_lookup: ::Source + = T::Lookup::unlookup(controller.clone()); let reward_destination = RewardDestination::Staked; let amount = T::Currency::minimum_balance() * 10u32.into(); whitelist_account!(stash); @@ -146,10 +227,25 @@ benchmarks! { } bond_extra { - let (stash, controller) = create_stash_controller::(USER_SEED, 100, Default::default())?; - let max_additional = T::Currency::minimum_balance() * 10u32.into(); - let ledger = Ledger::::get(&controller).ok_or("ledger not created before")?; - let original_bonded: BalanceOf = ledger.active; + // clean up any existing state. + clear_validators_and_nominators::(); + + let origin_weight = MinNominatorBond::::get().max(T::Currency::minimum_balance()); + + // setup the worst case list scenario. + + // the weight the nominator will start at. + let scenario = ListScenario::::new(origin_weight, true)?; + + let max_additional = scenario.dest_weight.clone() - origin_weight; + + let stash = scenario.origin_stash1.clone(); + let controller = scenario.origin_controller1.clone(); + let original_bonded: BalanceOf + = Ledger::::get(&controller).map(|l| l.active).ok_or("ledger not created after")?; + + T::Currency::deposit_into_existing(&stash, max_additional).unwrap(); + whitelist_account!(stash); }: _(RawOrigin::Signed(stash), max_additional) verify { @@ -159,10 +255,25 @@ benchmarks! { } unbond { - let (_, controller) = create_stash_controller::(USER_SEED, 100, Default::default())?; - let amount = T::Currency::minimum_balance() * 10u32.into(); + use sp_std::convert::TryFrom; + // clean up any existing state. + clear_validators_and_nominators::(); + + // setup the worst case list scenario. + let total_issuance = T::Currency::total_issuance(); + // the weight the nominator will start at. The value used here is expected to be + // significantly higher than the first position in a list (e.g. the first bag threshold). + let origin_weight = BalanceOf::::try_from(952_994_955_240_703u128) + .map_err(|_| "balance expected to be a u128") + .unwrap(); + let scenario = ListScenario::::new(origin_weight, false)?; + + let stash = scenario.origin_stash1.clone(); + let controller = scenario.origin_controller1.clone(); + let amount = origin_weight - scenario.dest_weight.clone(); let ledger = Ledger::::get(&controller).ok_or("ledger not created before")?; let original_bonded: BalanceOf = ledger.active; + whitelist_account!(controller); }: _(RawOrigin::Signed(controller.clone()), amount) verify { @@ -194,26 +305,50 @@ benchmarks! { withdraw_unbonded_kill { // Slashing Spans let s in 0 .. MAX_SPANS; - let (stash, controller) = create_stash_controller::(0, 100, Default::default())?; - add_slashing_spans::(&stash, s); - let amount = T::Currency::minimum_balance() * 10u32.into(); - Staking::::unbond(RawOrigin::Signed(controller.clone()).into(), amount)?; + // clean up any existing state. + clear_validators_and_nominators::(); + + let origin_weight = MinNominatorBond::::get().max(T::Currency::minimum_balance()); + + // setup a worst case list scenario. Note that we don't care about the setup of the + // destination position because we are doing a removal from the list but no insert. + let scenario = ListScenario::::new(origin_weight, true)?; + let controller = scenario.origin_controller1.clone(); + let stash = scenario.origin_stash1.clone(); + assert!(T::SortedListProvider::contains(&stash)); + + let ed = T::Currency::minimum_balance(); + let mut ledger = Ledger::::get(&controller).unwrap(); + ledger.active = ed - One::one(); + Ledger::::insert(&controller, ledger); CurrentEra::::put(EraIndex::max_value()); - let ledger = Ledger::::get(&controller).ok_or("ledger not created before")?; - let original_total: BalanceOf = ledger.total; + whitelist_account!(controller); }: withdraw_unbonded(RawOrigin::Signed(controller.clone()), s) verify { assert!(!Ledger::::contains_key(controller)); + assert!(!T::SortedListProvider::contains(&stash)); } validate { - let (stash, controller) = create_stash_controller::(USER_SEED, 100, Default::default())?; + // clean up any existing state. + clear_validators_and_nominators::(); + + let origin_weight = MinNominatorBond::::get().max(T::Currency::minimum_balance()); + + // setup a worst case scenario where the user calling validate was formerly a nominator so + // they must be removed from the list. + let scenario = ListScenario::::new(origin_weight, true)?; + let controller = scenario.origin_controller1.clone(); + let stash = scenario.origin_stash1.clone(); + assert!(T::SortedListProvider::contains(&stash)); + let prefs = ValidatorPrefs::default(); whitelist_account!(controller); }: _(RawOrigin::Signed(controller), prefs) verify { - assert!(Validators::::contains_key(stash)); + assert!(Validators::::contains_key(&stash)); + assert!(!T::SortedListProvider::contains(&stash)); } kick { @@ -225,7 +360,7 @@ benchmarks! { // these are the other validators; there are `T::MAX_NOMINATIONS - 1` of them, so // there are a total of `T::MAX_NOMINATIONS` validators in the system. - let rest_of_validators = create_validators::(T::MAX_NOMINATIONS - 1, 100)?; + let rest_of_validators = create_validators_with_seed::(T::MAX_NOMINATIONS - 1, 100, 415)?; // this is the validator that will be kicking. let (stash, controller) = create_stash_controller::( @@ -282,18 +417,50 @@ benchmarks! { // Worst case scenario, T::MAX_NOMINATIONS nominate { let n in 1 .. T::MAX_NOMINATIONS; - let (stash, controller) = create_stash_controller::(n + 1, 100, Default::default())?; - let validators = create_validators::(n, 100)?; + + // clean up any existing state. + clear_validators_and_nominators::(); + + let origin_weight = MinNominatorBond::::get().max(T::Currency::minimum_balance()); + + // setup a worst case list scenario. Note we don't care about the destination position, because + // we are just doing an insert into the origin position. + let scenario = ListScenario::::new(origin_weight, true)?; + let (stash, controller) = create_stash_controller_with_balance::( + SEED + T::MAX_NOMINATIONS + 1, // make sure the account does not conflict with others + origin_weight, + Default::default(), + ).unwrap(); + + assert!(!Nominators::::contains_key(&stash)); + assert!(!T::SortedListProvider::contains(&stash)); + + let validators = create_validators::(n, 100).unwrap(); whitelist_account!(controller); }: _(RawOrigin::Signed(controller), validators) verify { - assert!(Nominators::::contains_key(stash)); + assert!(Nominators::::contains_key(&stash)); + assert!(T::SortedListProvider::contains(&stash)) } chill { - let (_, controller) = create_stash_controller::(USER_SEED, 100, Default::default())?; + // clean up any existing state. + clear_validators_and_nominators::(); + + let origin_weight = MinNominatorBond::::get().max(T::Currency::minimum_balance()); + + // setup a worst case list scenario. Note that we don't care about the setup of the + // destination position because we are doing a removal from the list but no insert. + let scenario = ListScenario::::new(origin_weight, true)?; + let controller = scenario.origin_controller1.clone(); + let stash = scenario.origin_stash1.clone(); + assert!(T::SortedListProvider::contains(&stash)); + whitelist_account!(controller); }: _(RawOrigin::Signed(controller)) + verify { + assert!(!T::SortedListProvider::contains(&stash)); + } set_payee { let (stash, controller) = create_stash_controller::(USER_SEED, 100, Default::default())?; @@ -345,11 +512,23 @@ benchmarks! { force_unstake { // Slashing Spans let s in 0 .. MAX_SPANS; - let (stash, controller) = create_stash_controller::(0, 100, Default::default())?; + // Clean up any existing state. + clear_validators_and_nominators::(); + + let origin_weight = MinNominatorBond::::get().max(T::Currency::minimum_balance()); + + // setup a worst case list scenario. Note that we don't care about the setup of the + // destination position because we are doing a removal from the list but no insert. + let scenario = ListScenario::::new(origin_weight, true)?; + let controller = scenario.origin_controller1.clone(); + let stash = scenario.origin_stash1.clone(); + assert!(T::SortedListProvider::contains(&stash)); add_slashing_spans::(&stash, s); - }: _(RawOrigin::Root, stash, s) + + }: _(RawOrigin::Root, stash.clone(), s) verify { assert!(!Ledger::::contains_key(&controller)); + assert!(!T::SortedListProvider::contains(&stash)); } cancel_deferred_slash { @@ -438,19 +617,46 @@ benchmarks! { rebond { let l in 1 .. MAX_UNLOCKING_CHUNKS as u32; - let (_, controller) = create_stash_controller::(USER_SEED, 100, Default::default())?; - let mut staking_ledger = Ledger::::get(controller.clone()).unwrap(); + + // clean up any existing state. + clear_validators_and_nominators::(); + + let origin_weight = MinNominatorBond::::get() + .max(T::Currency::minimum_balance()) + // we use 100 to play friendly with the list threshold values in the mock + .max(100u32.into()); + + // setup a worst case list scenario. + let scenario = ListScenario::::new(origin_weight, true)?; + let dest_weight = scenario.dest_weight.clone(); + + // rebond an amount that will give the user dest_weight + let rebond_amount = dest_weight - origin_weight; + + // spread that amount to rebond across `l` unlocking chunks, + let value = rebond_amount / l.into(); + // if `value` is zero, we need a greater delta between dest <=> origin weight + assert_ne!(value, Zero::zero()); + // so the sum of unlocking chunks puts voter into the dest bag. + assert!(value * l.into() + origin_weight > origin_weight); + assert!(value * l.into() + origin_weight <= dest_weight); let unlock_chunk = UnlockChunk::> { - value: 1u32.into(), + value, era: EraIndex::zero(), }; + + let stash = scenario.origin_stash1.clone(); + let controller = scenario.origin_controller1.clone(); + let mut staking_ledger = Ledger::::get(controller.clone()).unwrap(); + for _ in 0 .. l { staking_ledger.unlocking.push(unlock_chunk.clone()) } Ledger::::insert(controller.clone(), staking_ledger.clone()); let original_bonded: BalanceOf = staking_ledger.active; + whitelist_account!(controller); - }: _(RawOrigin::Signed(controller.clone()), (l + 100).into()) + }: _(RawOrigin::Signed(controller.clone()), rebond_amount) verify { let ledger = Ledger::::get(&controller).ok_or("ledger not created after")?; let new_bonded: BalanceOf = ledger.active; @@ -477,19 +683,28 @@ benchmarks! { reap_stash { let s in 1 .. MAX_SPANS; - let (stash, controller) = create_stash_controller::(0, 100, Default::default())?; - Staking::::validate(RawOrigin::Signed(controller.clone()).into(), ValidatorPrefs::default())?; + // clean up any existing state. + clear_validators_and_nominators::(); + + let origin_weight = MinNominatorBond::::get().max(T::Currency::minimum_balance()); + + // setup a worst case list scenario. Note that we don't care about the setup of the + // destination position because we are doing a removal from the list but no insert. + let scenario = ListScenario::::new(origin_weight, true)?; + let controller = scenario.origin_controller1.clone(); + let stash = scenario.origin_stash1.clone(); + add_slashing_spans::(&stash, s); T::Currency::make_free_balance_be(&stash, T::Currency::minimum_balance()); - whitelist_account!(controller); assert!(Bonded::::contains_key(&stash)); - assert!(Validators::::contains_key(&stash)); + assert!(T::SortedListProvider::contains(&stash)); + whitelist_account!(controller); }: _(RawOrigin::Signed(controller), stash.clone(), s) verify { assert!(!Bonded::::contains_key(&stash)); - assert!(!Validators::::contains_key(&stash)); + assert!(!T::SortedListProvider::contains(&stash)); } new_era { @@ -590,17 +805,21 @@ benchmarks! { // total number of slashing spans. Assigned to validators randomly. let s in 1 .. 20; - let validators = create_validators_with_nominators_for_era::(v, n, T::MAX_NOMINATIONS as usize, false, None)? - .into_iter() - .map(|v| T::Lookup::lookup(v).unwrap()) - .collect::>(); + let validators = create_validators_with_nominators_for_era::( + v, n, T::MAX_NOMINATIONS as usize, false, None + )? + .into_iter() + .map(|v| T::Lookup::lookup(v).unwrap()) + .collect::>(); (0..s).for_each(|index| { add_slashing_spans::(&validators[index as usize], 10); }); + + let num_voters = (v + n) as usize; }: { - let voters = >::get_npos_voters(); - assert_eq!(voters.len() as u32, v + n); + let voters = >::get_npos_voters(None); + assert_eq!(voters.len(), num_voters); } get_npos_targets { @@ -609,7 +828,9 @@ benchmarks! { // number of nominator intention. let n = MAX_NOMINATORS; - let _ = create_validators_with_nominators_for_era::(v, n, T::MAX_NOMINATIONS as usize, false, None)?; + let _ = create_validators_with_nominators_for_era::( + v, n, T::MAX_NOMINATIONS as usize, false, None + )?; }: { let targets = >::get_npos_targets(); assert_eq!(targets.len() as u32, v); @@ -633,8 +854,18 @@ benchmarks! { } chill_other { - let (_, controller) = create_stash_controller::(USER_SEED, 100, Default::default())?; - Staking::::validate(RawOrigin::Signed(controller.clone()).into(), ValidatorPrefs::default())?; + // clean up any existing state. + clear_validators_and_nominators::(); + + let origin_weight = MinNominatorBond::::get().max(T::Currency::minimum_balance()); + + // setup a worst case list scenario. Note that we don't care about the setup of the + // destination position because we are doing a removal from the list but no insert. + let scenario = ListScenario::::new(origin_weight, true)?; + let controller = scenario.origin_controller1.clone(); + let stash = scenario.origin_stash1.clone(); + assert!(T::SortedListProvider::contains(&stash)); + Staking::::set_staking_limits( RawOrigin::Root.into(), BalanceOf::::max_value(), @@ -643,10 +874,11 @@ benchmarks! { Some(0), Some(Percent::from_percent(0)) )?; + let caller = whitelisted_caller(); }: _(RawOrigin::Signed(caller), controller.clone()) verify { - assert!(!Validators::::contains_key(controller)); + assert!(!T::SortedListProvider::contains(&stash)); } } @@ -658,7 +890,7 @@ mod tests { #[test] fn create_validators_with_nominators_for_era_works() { - ExtBuilder::default().has_stakers(true).build_and_execute(|| { + ExtBuilder::default().build_and_execute(|| { let v = 10; let n = 100; @@ -674,6 +906,9 @@ mod tests { let count_validators = Validators::::iter().count(); let count_nominators = Nominators::::iter().count(); + assert_eq!(count_validators, CounterForValidators::::get() as usize); + assert_eq!(count_nominators, CounterForNominators::::get() as usize); + assert_eq!(count_validators, v as usize); assert_eq!(count_nominators, n as usize); }); @@ -681,7 +916,7 @@ mod tests { #[test] fn create_validator_with_nominators_works() { - ExtBuilder::default().has_stakers(true).build_and_execute(|| { + ExtBuilder::default().build_and_execute(|| { let n = 10; let (validator_stash, nominators) = create_validator_with_nominators::( @@ -706,7 +941,7 @@ mod tests { #[test] fn add_slashing_spans_works() { - ExtBuilder::default().has_stakers(true).build_and_execute(|| { + ExtBuilder::default().build_and_execute(|| { let n = 10; let (validator_stash, _nominators) = create_validator_with_nominators::( @@ -738,7 +973,7 @@ mod tests { #[test] fn test_payout_all() { - ExtBuilder::default().has_stakers(true).build_and_execute(|| { + ExtBuilder::default().build_and_execute(|| { let v = 10; let n = 100; diff --git a/frame/staking/src/lib.rs b/frame/staking/src/lib.rs index 31b35acdd99aa..136515a5d6168 100644 --- a/frame/staking/src/lib.rs +++ b/frame/staking/src/lib.rs @@ -100,6 +100,13 @@ //! //! An account can become a nominator via the [`nominate`](Call::nominate) call. //! +//! #### Voting +//! +//! Staking is closely related to elections; actual validators are chosen from among all potential +//! validators via election by the potential validators and nominators. To reduce use of the phrase +//! "potential validators and nominators", we often use the term **voters**, who are simply +//! the union of potential validators and nominators. +//! //! #### Rewards and Slash //! //! The **reward and slashing** procedure is the core of the Staking pallet, attempting to _embrace @@ -264,15 +271,15 @@ //! - [Session](../pallet_session/index.html): Used to manage sessions. Also, a list of new //! validators is stored in the Session pallet's `Validators` at the end of each era. -#![recursion_limit = "128"] #![cfg_attr(not(feature = "std"), no_std)] #[cfg(feature = "runtime-benchmarks")] pub mod benchmarking; -#[cfg(test)] -mod mock; #[cfg(any(feature = "runtime-benchmarks", test))] pub mod testing_utils; + +#[cfg(test)] +pub(crate) mod mock; #[cfg(test)] mod tests; @@ -420,6 +427,7 @@ pub struct UnlockChunk { } /// The ledger of a (bonded) stash. +#[cfg_attr(feature = "runtime-benchmarks", derive(Default))] #[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, TypeInfo)] pub struct StakingLedger { /// The stash account whose balance is actually locked and at stake. @@ -727,11 +735,12 @@ enum Releases { V5_0_0, // blockable validators. V6_0_0, // removal of all storage associated with offchain phragmen. V7_0_0, // keep track of number of nominators / validators in map + V8_0_0, // populate `SortedListProvider`. } impl Default for Releases { fn default() -> Self { - Releases::V7_0_0 + Releases::V8_0_0 } } diff --git a/frame/staking/src/migrations.rs b/frame/staking/src/migrations.rs index d7fa2afc63082..7064f06dd12c7 100644 --- a/frame/staking/src/migrations.rs +++ b/frame/staking/src/migrations.rs @@ -18,6 +18,56 @@ use super::*; +pub mod v8 { + use frame_election_provider_support::SortedListProvider; + use frame_support::traits::Get; + + use crate::{Config, Nominators, Pallet, StorageVersion, Weight}; + + #[cfg(feature = "try-runtime")] + pub fn pre_migrate() -> Result<(), &'static str> { + frame_support::ensure!( + StorageVersion::::get() == crate::Releases::V7_0_0, + "must upgrade linearly" + ); + + crate::log!(info, "👜 staking bags-list migration passes PRE migrate checks ✅",); + Ok(()) + } + + /// Migration to sorted [`SortedListProvider`]. + pub fn migrate() -> Weight { + if StorageVersion::::get() == crate::Releases::V7_0_0 { + crate::log!(info, "migrating staking to Releases::V8_0_0"); + + let migrated = T::SortedListProvider::regenerate( + Nominators::::iter().map(|(id, _)| id), + Pallet::::weight_of_fn(), + ); + debug_assert_eq!(T::SortedListProvider::sanity_check(), Ok(())); + + StorageVersion::::put(crate::Releases::V8_0_0); + crate::log!( + info, + "👜 completed staking migration to Releases::V8_0_0 with {} voters migrated", + migrated, + ); + + T::BlockWeights::get().max_block + } else { + T::DbWeight::get().reads(1) + } + } + + #[cfg(feature = "try-runtime")] + pub fn post_migrate() -> Result<(), &'static str> { + T::SortedListProvider::sanity_check() + .map_err(|_| "SortedListProvider is not in a sane state.")?; + crate::log!(info, "👜 staking bags-list migration passes POST migrate checks ✅",); + Ok(()) + } +} + pub mod v7 { use super::*; diff --git a/frame/staking/src/mock.rs b/frame/staking/src/mock.rs index 0357fa05cb1dd..b3ce8e063cb61 100644 --- a/frame/staking/src/mock.rs +++ b/frame/staking/src/mock.rs @@ -17,9 +17,8 @@ //! Test utilities -use crate as staking; -use crate::*; -use frame_election_provider_support::onchain; +use crate::{self as pallet_staking, *}; +use frame_election_provider_support::{onchain, SortedListProvider}; use frame_support::{ assert_ok, parameter_types, traits::{ @@ -104,8 +103,9 @@ frame_support::construct_runtime!( Authorship: pallet_authorship::{Pallet, Call, Storage, Inherent}, Timestamp: pallet_timestamp::{Pallet, Call, Storage, Inherent}, Balances: pallet_balances::{Pallet, Call, Storage, Config, Event}, - Staking: staking::{Pallet, Call, Config, Storage, Event}, + Staking: pallet_staking::{Pallet, Call, Config, Storage, Event}, Session: pallet_session::{Pallet, Call, Storage, Event, Config}, + BagsList: pallet_bags_list::{Pallet, Call, Storage, Event}, } ); @@ -242,12 +242,26 @@ impl OnUnbalanced> for RewardRemainderMock { } } +const THRESHOLDS: [sp_npos_elections::VoteWeight; 9] = + [10, 20, 30, 40, 50, 60, 1_000, 2_000, 10_000]; + +parameter_types! { + pub static BagThresholds: &'static [sp_npos_elections::VoteWeight] = &THRESHOLDS; +} + +impl pallet_bags_list::Config for Test { + type Event = Event; + type WeightInfo = (); + type VoteWeightProvider = Staking; + type BagThresholds = BagThresholds; +} + impl onchain::Config for Test { type Accuracy = Perbill; type DataProvider = Staking; } -impl Config for Test { +impl crate::pallet::pallet::Config for Test { const MAX_NOMINATIONS: u32 = 16; type Currency = Balances; type UnixTime = Timestamp; @@ -267,6 +281,8 @@ impl Config for Test { type ElectionProvider = onchain::OnChainSequentialPhragmen; type GenesisElectionProvider = Self::ElectionProvider; type WeightInfo = (); + // NOTE: consider a macro and use `UseNominatorsMap` as well. + type SortedListProvider = BagsList; } impl frame_system::offchain::SendTransactionTypes for Test @@ -469,7 +485,7 @@ impl ExtBuilder { stakers.extend(self.stakers) } - let _ = staking::GenesisConfig:: { + let _ = pallet_staking::GenesisConfig:: { stakers, validator_count: self.validator_count, minimum_validator_count: self.minimum_validator_count, @@ -533,6 +549,10 @@ fn check_count() { let validator_count = Validators::::iter().count() as u32; assert_eq!(nominator_count, CounterForNominators::::get()); assert_eq!(validator_count, CounterForValidators::::get()); + + // the voters that the `SortedListProvider` list is storing for us. + let external_voters = ::SortedListProvider::count(); + assert_eq!(external_voters, nominator_count); } fn check_ledgers() { @@ -625,10 +645,14 @@ pub(crate) fn current_era() -> EraIndex { Staking::current_era().unwrap() } -pub(crate) fn bond_validator(stash: AccountId, ctrl: AccountId, val: Balance) { +pub(crate) fn bond(stash: AccountId, ctrl: AccountId, val: Balance) { let _ = Balances::make_free_balance_be(&stash, val); let _ = Balances::make_free_balance_be(&ctrl, val); assert_ok!(Staking::bond(Origin::signed(stash), ctrl, val, RewardDestination::Controller)); +} + +pub(crate) fn bond_validator(stash: AccountId, ctrl: AccountId, val: Balance) { + bond(stash, ctrl, val); assert_ok!(Staking::validate(Origin::signed(ctrl), ValidatorPrefs::default())); } @@ -638,9 +662,7 @@ pub(crate) fn bond_nominator( val: Balance, target: Vec, ) { - let _ = Balances::make_free_balance_be(&stash, val); - let _ = Balances::make_free_balance_be(&ctrl, val); - assert_ok!(Staking::bond(Origin::signed(stash), ctrl, val, RewardDestination::Controller)); + bond(stash, ctrl, val); assert_ok!(Staking::nominate(Origin::signed(ctrl), target)); } @@ -833,7 +855,7 @@ macro_rules! assert_session_era { }; } -pub(crate) fn staking_events() -> Vec> { +pub(crate) fn staking_events() -> Vec> { System::events() .into_iter() .map(|r| r.event) diff --git a/frame/staking/src/pallet/impls.rs b/frame/staking/src/pallet/impls.rs index fecd493eea022..3ae520872f278 100644 --- a/frame/staking/src/pallet/impls.rs +++ b/frame/staking/src/pallet/impls.rs @@ -17,7 +17,10 @@ //! Implementations for the Staking FRAME Pallet. -use frame_election_provider_support::{data_provider, ElectionProvider, Supports, VoteWeight}; +use frame_election_provider_support::{ + data_provider, ElectionDataProvider, ElectionProvider, SortedListProvider, Supports, + VoteWeight, VoteWeightProvider, +}; use frame_support::{ pallet_prelude::*, traits::{ @@ -26,6 +29,7 @@ use frame_support::{ }, weights::{Weight, WithPostDispatchInfo}, }; +use frame_system::pallet_prelude::BlockNumberFor; use pallet_session::historical; use sp_runtime::{ traits::{Bounded, Convert, SaturatedConversion, Saturating, Zero}, @@ -64,7 +68,7 @@ impl Pallet { /// /// This prevents call sites from repeatedly requesting `total_issuance` from backend. But it is /// important to be only used while the total issuance is not changing. - pub fn slashable_balance_of_fn() -> Box VoteWeight> { + pub fn weight_of_fn() -> Box VoteWeight> { // NOTE: changing this to unboxed `impl Fn(..)` return type and the pallet will still // compile, while some types in mock fail to resolve. let issuance = T::Currency::total_issuance(); @@ -73,6 +77,12 @@ impl Pallet { }) } + /// Same as `weight_of_fn`, but made for one time use. + pub fn weight_of(who: &T::AccountId) -> VoteWeight { + let issuance = T::Currency::total_issuance(); + Self::slashable_balance_of_vote_weight(who, issuance) + } + pub(super) fn do_payout_stakers( validator_stash: T::AccountId, era: EraIndex, @@ -629,54 +639,92 @@ impl Pallet { /// Get all of the voters that are eligible for the npos election. /// - /// This will use all on-chain nominators, and all the validators will inject a self vote. + /// `maybe_max_len` can imposes a cap on the number of voters returned; First all the validator + /// are included in no particular order, then remainder is taken from the nominators, as + /// returned by [`Config::SortedListProvider`]. + /// + /// This will use nominators, and all the validators will inject a self vote. /// /// This function is self-weighing as [`DispatchClass::Mandatory`]. /// /// ### Slashing /// /// All nominations that have been submitted before the last non-zero slash of the validator are - /// auto-chilled. - pub fn get_npos_voters() -> Vec<(T::AccountId, VoteWeight, Vec)> { - let weight_of = Self::slashable_balance_of_fn(); - let mut all_voters = Vec::new(); + /// auto-chilled, but still count towards the limit imposed by `maybe_max_len`. + pub fn get_npos_voters( + maybe_max_len: Option, + ) -> Vec<(T::AccountId, VoteWeight, Vec)> { + let max_allowed_len = { + let nominator_count = CounterForNominators::::get() as usize; + let validator_count = CounterForValidators::::get() as usize; + let all_voter_count = validator_count.saturating_add(nominator_count); + maybe_max_len.unwrap_or(all_voter_count).min(all_voter_count) + }; - let mut validator_count = 0u32; - for (validator, _) in >::iter() { + let mut all_voters = Vec::<_>::with_capacity(max_allowed_len); + + // first, grab all validators in no particular order, capped by the maximum allowed length. + let mut validators_taken = 0u32; + for (validator, _) in >::iter().take(max_allowed_len) { // Append self vote. - let self_vote = (validator.clone(), weight_of(&validator), vec![validator.clone()]); + let self_vote = + (validator.clone(), Self::weight_of(&validator), vec![validator.clone()]); all_voters.push(self_vote); - validator_count.saturating_inc(); + validators_taken.saturating_inc(); } - // Collect all slashing spans into a BTreeMap for further queries. + // .. and grab whatever we have left from nominators. + let nominators_quota = (max_allowed_len as u32).saturating_sub(validators_taken); let slashing_spans = >::iter().collect::>(); - let mut nominator_count = 0u32; - for (nominator, nominations) in Nominators::::iter() { - let Nominations { submitted_in, mut targets, suppressed: _ } = nominations; - - // Filter out nomination targets which were nominated before the most recent - // slashing span. - targets.retain(|stash| { - slashing_spans - .get(stash) - .map_or(true, |spans| submitted_in >= spans.last_nonzero_slash()) - }); + // track the count of nominators added to `all_voters + let mut nominators_taken = 0u32; + // track every nominator iterated over, but not necessarily added to `all_voters` + let mut nominators_seen = 0u32; + + let mut nominators_iter = T::SortedListProvider::iter(); + while nominators_taken < nominators_quota && nominators_seen < nominators_quota * 2 { + let nominator = match nominators_iter.next() { + Some(nominator) => { + nominators_seen.saturating_inc(); + nominator + }, + None => break, + }; - if !targets.is_empty() { - let vote_weight = weight_of(&nominator); - all_voters.push((nominator, vote_weight, targets)); - nominator_count.saturating_inc(); + if let Some(Nominations { submitted_in, mut targets, suppressed: _ }) = + >::get(&nominator) + { + targets.retain(|stash| { + slashing_spans + .get(stash) + .map_or(true, |spans| submitted_in >= spans.last_nonzero_slash()) + }); + if !targets.len().is_zero() { + all_voters.push((nominator.clone(), Self::weight_of(&nominator), targets)); + nominators_taken.saturating_inc(); + } + } else { + log!(error, "invalid item in `SortedListProvider`: {:?}", nominator) } } + // all_voters should have not re-allocated. + debug_assert!(all_voters.capacity() == max_allowed_len); + Self::register_weight(T::WeightInfo::get_npos_voters( - validator_count, - nominator_count, + validators_taken, + nominators_taken, slashing_spans.len() as u32, )); + log!( + info, + "generated {} npos voters, {} from validators and {} nominators", + all_voters.len(), + validators_taken, + nominators_taken + ); all_voters } @@ -698,34 +746,59 @@ impl Pallet { } /// This function will add a nominator to the `Nominators` storage map, - /// and keep track of the `CounterForNominators`. + /// [`SortedListProvider`] and keep track of the `CounterForNominators`. /// /// If the nominator already exists, their nominations will be updated. + /// + /// NOTE: you must ALWAYS use this function to add nominator or update their targets. Any access + /// to `Nominators`, its counter, or `VoterList` outside of this function is almost certainly + /// wrong. pub fn do_add_nominator(who: &T::AccountId, nominations: Nominations) { if !Nominators::::contains_key(who) { - CounterForNominators::::mutate(|x| x.saturating_inc()) + // maybe update the counter. + CounterForNominators::::mutate(|x| x.saturating_inc()); + + // maybe update sorted list. Error checking is defensive-only - this should never fail. + if T::SortedListProvider::on_insert(who.clone(), Self::weight_of(who)).is_err() { + log!(warn, "attempt to insert duplicate nominator ({:#?})", who); + debug_assert!(false, "attempt to insert duplicate nominator"); + }; + + debug_assert_eq!(T::SortedListProvider::sanity_check(), Ok(())); } + Nominators::::insert(who, nominations); } /// This function will remove a nominator from the `Nominators` storage map, - /// and keep track of the `CounterForNominators`. + /// [`SortedListProvider`] and keep track of the `CounterForNominators`. /// /// Returns true if `who` was removed from `Nominators`, otherwise false. + /// + /// NOTE: you must ALWAYS use this function to remove a nominator from the system. Any access to + /// `Nominators`, its counter, or `VoterList` outside of this function is almost certainly + /// wrong. pub fn do_remove_nominator(who: &T::AccountId) -> bool { if Nominators::::contains_key(who) { Nominators::::remove(who); CounterForNominators::::mutate(|x| x.saturating_dec()); + T::SortedListProvider::on_remove(who); + debug_assert_eq!(T::SortedListProvider::sanity_check(), Ok(())); + debug_assert_eq!(CounterForNominators::::get(), T::SortedListProvider::count()); true } else { false } } - /// This function will add a validator to the `Validators` storage map, - /// and keep track of the `CounterForValidators`. + /// This function will add a validator to the `Validators` storage map, and keep track of the + /// `CounterForValidators`. /// /// If the validator already exists, their preferences will be updated. + /// + /// NOTE: you must ALWAYS use this function to add a validator to the system. Any access to + /// `Validators`, its counter, or `VoterList` outside of this function is almost certainly + /// wrong. pub fn do_add_validator(who: &T::AccountId, prefs: ValidatorPrefs) { if !Validators::::contains_key(who) { CounterForValidators::::mutate(|x| x.saturating_inc()) @@ -737,6 +810,10 @@ impl Pallet { /// and keep track of the `CounterForValidators`. /// /// Returns true if `who` was removed from `Validators`, otherwise false. + /// + /// NOTE: you must ALWAYS use this function to remove a validator from the system. Any access to + /// `Validators`, its counter, or `VoterList` outside of this function is almost certainly + /// wrong. pub fn do_remove_validator(who: &T::AccountId) -> bool { if Validators::::contains_key(who) { Validators::::remove(who); @@ -758,10 +835,9 @@ impl Pallet { } } -impl frame_election_provider_support::ElectionDataProvider - for Pallet -{ +impl ElectionDataProvider> for Pallet { const MAXIMUM_VOTES_PER_VOTER: u32 = T::MAX_NOMINATIONS; + fn desired_targets() -> data_provider::Result { Self::register_weight(T::DbWeight::get().reads(1)); Ok(Self::validator_count()) @@ -770,30 +846,26 @@ impl frame_election_provider_support::ElectionDataProvider, ) -> data_provider::Result)>> { - let nominator_count = CounterForNominators::::get(); - let validator_count = CounterForValidators::::get(); - - let voter_count = nominator_count.saturating_add(validator_count) as usize; debug_assert!(>::iter().count() as u32 == CounterForNominators::::get()); debug_assert!(>::iter().count() as u32 == CounterForValidators::::get()); + debug_assert_eq!( + CounterForNominators::::get(), + T::SortedListProvider::count(), + "voter_count must be accurate", + ); - // register the extra 2 reads - Self::register_weight(T::DbWeight::get().reads(2)); - - if maybe_max_len.map_or(false, |max_len| voter_count > max_len) { - return Err("Voter snapshot too big") - } + // This can never fail -- if `maybe_max_len` is `Some(_)` we handle it. + let voters = Self::get_npos_voters(maybe_max_len); + debug_assert!(maybe_max_len.map_or(true, |max| voters.len() <= max)); - Ok(Self::get_npos_voters()) + Ok(voters) } fn targets(maybe_max_len: Option) -> data_provider::Result> { - let target_count = CounterForValidators::::get() as usize; + let target_count = CounterForValidators::::get(); - // register the extra 1 read - Self::register_weight(T::DbWeight::get().reads(1)); - - if maybe_max_len.map_or(false, |max_len| target_count > max_len) { + // We can't handle this case yet -- return an error. + if maybe_max_len.map_or(false, |max_len| target_count > max_len as u32) { return Err("Target snapshot too big") } @@ -879,6 +951,9 @@ impl frame_election_provider_support::ElectionDataProvider>::remove_all(None); >::remove_all(None); >::remove_all(None); + >::kill(); + >::kill(); + let _ = T::SortedListProvider::clear(None); } #[cfg(feature = "runtime-benchmarks")] @@ -1152,3 +1227,77 @@ where consumed_weight } } + +impl VoteWeightProvider for Pallet { + fn vote_weight(who: &T::AccountId) -> VoteWeight { + Self::weight_of(who) + } + + #[cfg(feature = "runtime-benchmarks")] + fn set_vote_weight_of(who: &T::AccountId, weight: VoteWeight) { + // this will clearly results in an inconsistent state, but it should not matter for a + // benchmark. + use sp_std::convert::TryInto; + let active: BalanceOf = weight.try_into().map_err(|_| ()).unwrap(); + let mut ledger = Self::ledger(who).unwrap_or_default(); + ledger.active = active; + >::insert(who, ledger); + >::insert(who, who); + + // also, we play a trick to make sure that a issuance based-`CurrencyToVote` behaves well: + // This will make sure that total issuance is zero, thus the currency to vote will be a 1-1 + // conversion. + let imbalance = T::Currency::burn(T::Currency::total_issuance()); + // kinda ugly, but gets the job done. The fact that this works here is a HUGE exception. + // Don't try this pattern in other places. + sp_std::mem::forget(imbalance); + } +} + +/// A simple voter list implementation that does not require any additional pallets. Note, this +/// does not provided nominators in sorted ordered. If you desire nominators in a sorted order take +/// a look at [`pallet-bags-list]. +pub struct UseNominatorsMap(sp_std::marker::PhantomData); +impl SortedListProvider for UseNominatorsMap { + type Error = (); + + /// Returns iterator over voter list, which can have `take` called on it. + fn iter() -> Box> { + Box::new(Nominators::::iter().map(|(n, _)| n)) + } + fn count() -> u32 { + CounterForNominators::::get() + } + fn contains(id: &T::AccountId) -> bool { + Nominators::::contains_key(id) + } + fn on_insert(_: T::AccountId, _weight: VoteWeight) -> Result<(), Self::Error> { + // nothing to do on insert. + Ok(()) + } + fn on_update(_: &T::AccountId, _weight: VoteWeight) { + // nothing to do on update. + } + fn on_remove(_: &T::AccountId) { + // nothing to do on remove. + } + fn regenerate( + _: impl IntoIterator, + _: Box VoteWeight>, + ) -> u32 { + // nothing to do upon regenerate. + 0 + } + fn sanity_check() -> Result<(), &'static str> { + Ok(()) + } + fn clear(maybe_count: Option) -> u32 { + Nominators::::remove_all(maybe_count); + if let Some(count) = maybe_count { + CounterForNominators::::mutate(|noms| *noms - count); + count + } else { + CounterForNominators::::take() + } + } +} diff --git a/frame/staking/src/pallet/mod.rs b/frame/staking/src/pallet/mod.rs index d99cd89f3b06c..c71130a3492b1 100644 --- a/frame/staking/src/pallet/mod.rs +++ b/frame/staking/src/pallet/mod.rs @@ -17,6 +17,7 @@ //! Staking FRAME Pallet. +use frame_election_provider_support::SortedListProvider; use frame_support::{ pallet_prelude::*, traits::{ @@ -38,7 +39,7 @@ mod impls; pub use impls::*; use crate::{ - migrations, slashing, weights::WeightInfo, ActiveEraInfo, BalanceOf, EraIndex, EraPayout, + log, migrations, slashing, weights::WeightInfo, ActiveEraInfo, BalanceOf, EraIndex, EraPayout, EraRewardPoints, Exposure, Forcing, NegativeImbalanceOf, Nominations, PositiveImbalanceOf, Releases, RewardDestination, SessionInterface, StakingLedger, UnappliedSlash, UnlockChunk, ValidatorPrefs, @@ -140,6 +141,11 @@ pub mod pallet { #[pallet::constant] type MaxNominatorRewardedPerValidator: Get; + /// Something that can provide a sorted list of voters in a somewhat sorted way. The + /// original use case for this was designed with [`pallet_bags_list::Pallet`] in mind. If + /// the bags-list is not desired, [`impls::UseNominatorsMap`] is likely the desired option. + type SortedListProvider: SortedListProvider; + /// Weight information for extrinsics in this pallet. type WeightInfo: WeightInfo; } @@ -492,6 +498,13 @@ pub mod pallet { MinValidatorBond::::put(self.min_validator_bond); for &(ref stash, ref controller, balance, ref status) in &self.stakers { + log!( + trace, + "inserting genesis staker: {:?} => {:?} => {:?}", + stash, + balance, + status + ); assert!( T::Currency::free_balance(&stash) >= balance, "Stash does not have enough balance to bond." @@ -514,6 +527,13 @@ pub mod pallet { _ => Ok(()), }); } + + // all voters are reported to the `SortedListProvider`. + assert_eq!( + T::SortedListProvider::count(), + CounterForNominators::::get(), + "not all genesis stakers were inserted into sorted list provider, something is wrong." + ); } } @@ -763,8 +783,15 @@ pub mod pallet { Error::::InsufficientBond ); - Self::deposit_event(Event::::Bonded(stash, extra)); + // NOTE: ledger must be updated prior to calling `Self::weight_of`. Self::update_ledger(&controller, &ledger); + // update this staker in the sorted list, if they exist in it. + if T::SortedListProvider::contains(&stash) { + T::SortedListProvider::on_update(&stash, Self::weight_of(&ledger.stash)); + debug_assert_eq!(T::SortedListProvider::sanity_check(), Ok(())); + } + + Self::deposit_event(Event::::Bonded(stash.clone(), extra)); } Ok(()) } @@ -823,7 +850,14 @@ pub mod pallet { // Note: in case there is no current era it is fine to bond one era more. let era = Self::current_era().unwrap_or(0) + T::BondingDuration::get(); ledger.unlocking.push(UnlockChunk { value, era }); + // NOTE: ledger must be updated prior to calling `Self::weight_of`. Self::update_ledger(&controller, &ledger); + + // update this staker in the sorted list, if they exist in it. + if T::SortedListProvider::contains(&ledger.stash) { + T::SortedListProvider::on_update(&ledger.stash, Self::weight_of(&ledger.stash)); + } + Self::deposit_event(Event::::Unbonded(ledger.stash, value)); } Ok(()) @@ -1319,7 +1353,12 @@ pub mod pallet { ensure!(ledger.active >= T::Currency::minimum_balance(), Error::::InsufficientBond); Self::deposit_event(Event::::Bonded(ledger.stash.clone(), value)); + + // NOTE: ledger must be updated prior to calling `Self::weight_of`. Self::update_ledger(&controller, &ledger); + if T::SortedListProvider::contains(&ledger.stash) { + T::SortedListProvider::on_update(&ledger.stash, Self::weight_of(&ledger.stash)); + } let removed_chunks = 1u32 // for the case where the last iterated chunk is not removed .saturating_add(initial_unlocking) @@ -1492,8 +1531,6 @@ pub mod pallet { /// /// This can be helpful if bond requirements are updated, and we need to remove old users /// who do not satisfy these requirements. - // TODO: Maybe we can deprecate `chill` in the future. - // https://github.com/paritytech/substrate/issues/9111 #[pallet::weight(T::WeightInfo::chill_other())] pub fn chill_other(origin: OriginFor, controller: T::AccountId) -> DispatchResult { // Anyone can call this function. diff --git a/frame/staking/src/testing_utils.rs b/frame/staking/src/testing_utils.rs index 795c066d09bb3..13762cf5886db 100644 --- a/frame/staking/src/testing_utils.rs +++ b/frame/staking/src/testing_utils.rs @@ -27,6 +27,7 @@ use rand_chacha::{ }; use sp_io::hashing::blake2_256; +use frame_election_provider_support::SortedListProvider; use frame_support::{pallet_prelude::*, traits::Currency}; use sp_runtime::{traits::StaticLookup, Perbill}; use sp_std::prelude::*; @@ -37,8 +38,11 @@ const SEED: u32 = 0; pub fn clear_validators_and_nominators() { Validators::::remove_all(None); CounterForValidators::::kill(); + + // whenever we touch nominators counter we should update `T::SortedListProvider` as well. Nominators::::remove_all(None); CounterForNominators::::kill(); + let _ = T::SortedListProvider::clear(None); } /// Grab a funded user. @@ -49,9 +53,18 @@ pub fn create_funded_user( ) -> T::AccountId { let user = account(string, n, SEED); let balance = T::Currency::minimum_balance() * balance_factor.into(); - T::Currency::make_free_balance_be(&user, balance); - // ensure T::CurrencyToVote will work correctly. - T::Currency::issue(balance); + let _ = T::Currency::make_free_balance_be(&user, balance); + user +} + +/// Grab a funded user with max Balance. +pub fn create_funded_user_with_balance( + string: &'static str, + n: u32, + balance: BalanceOf, +) -> T::AccountId { + let user = account(string, n, SEED); + let _ = T::Currency::make_free_balance_be(&user, balance); user } @@ -75,6 +88,26 @@ pub fn create_stash_controller( return Ok((stash, controller)) } +/// Create a stash and controller pair with fixed balance. +pub fn create_stash_controller_with_balance( + n: u32, + balance: crate::BalanceOf, + destination: RewardDestination, +) -> Result<(T::AccountId, T::AccountId), &'static str> { + let stash = create_funded_user_with_balance::("stash", n, balance); + let controller = create_funded_user_with_balance::("controller", n, balance); + let controller_lookup: ::Source = + T::Lookup::unlookup(controller.clone()); + + Staking::::bond( + RawOrigin::Signed(stash.clone()).into(), + controller_lookup, + balance, + destination, + )?; + Ok((stash, controller)) +} + /// Create a stash and controller pair, where the controller is dead, and payouts go to controller. /// This is used to test worst case payout scenarios. pub fn create_stash_and_dead_controller( @@ -101,11 +134,20 @@ pub fn create_stash_and_dead_controller( pub fn create_validators( max: u32, balance_factor: u32, +) -> Result::Source>, &'static str> { + create_validators_with_seed::(max, balance_factor, 0) +} + +/// create `max` validators, with a seed to help unintentional prevent account collisions. +pub fn create_validators_with_seed( + max: u32, + balance_factor: u32, + seed: u32, ) -> Result::Source>, &'static str> { let mut validators: Vec<::Source> = Vec::with_capacity(max as usize); for i in 0..max { let (stash, controller) = - create_stash_controller::(i, balance_factor, RewardDestination::Staked)?; + create_stash_controller::(i + seed, balance_factor, RewardDestination::Staked)?; let validator_prefs = ValidatorPrefs { commission: Perbill::from_percent(50), ..Default::default() }; Staking::::validate(RawOrigin::Signed(controller).into(), validator_prefs)?; diff --git a/frame/staking/src/tests.rs b/frame/staking/src/tests.rs index 97dfaa39c84a9..5e7fe3d6266aa 100644 --- a/frame/staking/src/tests.rs +++ b/frame/staking/src/tests.rs @@ -18,7 +18,7 @@ //! Tests for the module. use super::{Event, *}; -use frame_election_provider_support::{ElectionProvider, Support}; +use frame_election_provider_support::{ElectionProvider, SortedListProvider, Support}; use frame_support::{ assert_noop, assert_ok, dispatch::WithPostDispatchInfo, @@ -542,8 +542,8 @@ fn nominating_and_rewards_should_work() { total: 1000 + 800, own: 1000, others: vec![ - IndividualExposure { who: 3, value: 400 }, IndividualExposure { who: 1, value: 400 }, + IndividualExposure { who: 3, value: 400 }, ] }, ); @@ -553,8 +553,8 @@ fn nominating_and_rewards_should_work() { total: 1000 + 1200, own: 1000, others: vec![ - IndividualExposure { who: 3, value: 600 }, IndividualExposure { who: 1, value: 600 }, + IndividualExposure { who: 3, value: 600 }, ] }, ); @@ -1907,8 +1907,8 @@ fn bond_with_duplicate_vote_should_be_ignored_by_election_provider() { assert_eq!( supports, vec![ - (21, Support { total: 1800, voters: vec![(21, 1000), (3, 400), (1, 400)] }), - (31, Support { total: 2200, voters: vec![(31, 1000), (3, 600), (1, 600)] }) + (21, Support { total: 1800, voters: vec![(21, 1000), (1, 400), (3, 400)] }), + (31, Support { total: 2200, voters: vec![(31, 1000), (1, 600), (3, 600)] }) ], ); }); @@ -1952,7 +1952,7 @@ fn bond_with_duplicate_vote_should_be_ignored_by_election_provider_elected() { supports, vec![ (11, Support { total: 1500, voters: vec![(11, 1000), (1, 500)] }), - (21, Support { total: 2500, voters: vec![(21, 1000), (3, 1000), (1, 500)] }) + (21, Support { total: 2500, voters: vec![(21, 1000), (1, 500), (3, 1000)] }) ], ); }); @@ -3881,11 +3881,137 @@ mod election_data_provider { } #[test] - fn respects_len_limits() { - ExtBuilder::default().build_and_execute(|| { - assert_eq!(Staking::voters(Some(1)).unwrap_err(), "Voter snapshot too big"); - assert_eq!(Staking::targets(Some(1)).unwrap_err(), "Target snapshot too big"); - }); + fn respects_snapshot_len_limits() { + ExtBuilder::default() + .set_status(41, StakerStatus::Validator) + .build_and_execute(|| { + // sum of all nominators who'd be voters (1), plus the self-votes (4). + assert_eq!( + ::SortedListProvider::count() + + >::iter().count() as u32, + 5 + ); + + // if limits is less.. + assert_eq!(Staking::voters(Some(1)).unwrap().len(), 1); + + // if limit is equal.. + assert_eq!(Staking::voters(Some(5)).unwrap().len(), 5); + + // if limit is more. + assert_eq!(Staking::voters(Some(55)).unwrap().len(), 5); + + // if target limit is more.. + assert_eq!(Staking::targets(Some(6)).unwrap().len(), 4); + assert_eq!(Staking::targets(Some(4)).unwrap().len(), 4); + + // if target limit is less, then we return an error. + assert_eq!(Staking::targets(Some(1)).unwrap_err(), "Target snapshot too big"); + }); + } + + #[test] + fn only_iterates_max_2_times_nominators_quota() { + ExtBuilder::default() + .nominate(true) // add nominator 101, who nominates [11, 21] + // the other nominators only nominate 21 + .add_staker(61, 60, 2_000, StakerStatus::::Nominator(vec![21])) + .add_staker(71, 70, 2_000, StakerStatus::::Nominator(vec![21])) + .add_staker(81, 80, 2_000, StakerStatus::::Nominator(vec![21])) + .build_and_execute(|| { + // given our nominators ordered by stake, + assert_eq!( + ::SortedListProvider::iter().collect::>(), + vec![61, 71, 81, 101] + ); + + // and total voters + assert_eq!( + ::SortedListProvider::count() + + >::iter().count() as u32, + 7 + ); + + // roll to session 5 + run_to_block(25); + + // slash 21, the only validator nominated by our first 3 nominators + add_slash(&21); + + // we take 4 voters: 2 validators and 2 nominators (so nominators quota = 2) + assert_eq!( + Staking::voters(Some(3)) + .unwrap() + .iter() + .map(|(stash, _, _)| stash) + .copied() + .collect::>(), + vec![31, 11], // 2 validators, but no nominators because we hit the quota + ); + }); + } + + // Even if some of the higher staked nominators are slashed, we still get up to max len voters + // by adding more lower staked nominators. In other words, we assert that we keep on adding + // valid nominators until we reach max len voters; which is opposed to simply stopping after we + // have iterated max len voters, but not adding all of them to voters due to some nominators not + // having valid targets. + #[test] + fn get_max_len_voters_even_if_some_nominators_are_slashed() { + ExtBuilder::default() + .nominate(true) // add nominator 101, who nominates [11, 21] + .add_staker(61, 60, 20, StakerStatus::::Nominator(vec![21])) + // 61 only nominates validator 21 ^^ + .add_staker(71, 70, 10, StakerStatus::::Nominator(vec![11, 21])) + .build_and_execute(|| { + // given our nominators ordered by stake, + assert_eq!( + ::SortedListProvider::iter().collect::>(), + vec![101, 61, 71] + ); + + // and total voters + assert_eq!( + ::SortedListProvider::count() + + >::iter().count() as u32, + 6 + ); + + // we take 5 voters + assert_eq!( + Staking::voters(Some(5)) + .unwrap() + .iter() + .map(|(stash, _, _)| stash) + .copied() + .collect::>(), + // then + vec![ + 31, 21, 11, // 3 nominators + 101, 61 // 2 validators, and 71 is excluded + ], + ); + + // roll to session 5 + run_to_block(25); + + // slash 21, the only validator nominated by 61 + add_slash(&21); + + // we take 4 voters + assert_eq!( + Staking::voters(Some(4)) + .unwrap() + .iter() + .map(|(stash, _, _)| stash) + .copied() + .collect::>(), + vec![ + 31, 11, // 2 validators (21 was slashed) + 101, 71 // 2 nominators, excluding 61 + ], + ); + }); } #[test] @@ -4232,3 +4358,28 @@ fn capped_stakers_works() { assert_ok!(Staking::validate(Origin::signed(last_validator), ValidatorPrefs::default())); }) } + +mod sorted_list_provider { + use super::*; + use frame_election_provider_support::SortedListProvider; + + #[test] + fn re_nominate_does_not_change_counters_or_list() { + ExtBuilder::default().nominate(true).build_and_execute(|| { + // given + let pre_insert_nominator_count = Nominators::::iter().count() as u32; + assert_eq!(::SortedListProvider::count(), pre_insert_nominator_count); + assert!(Nominators::::contains_key(101)); + assert_eq!(::SortedListProvider::iter().collect::>(), vec![101]); + + // when account 101 renominates + assert_ok!(Staking::nominate(Origin::signed(100), vec![41])); + + // then counts don't change + assert_eq!(::SortedListProvider::count(), pre_insert_nominator_count); + assert_eq!(Nominators::::iter().count() as u32, pre_insert_nominator_count); + // and the list is the same + assert_eq!(::SortedListProvider::iter().collect::>(), vec![101]); + }); + } +} diff --git a/frame/staking/src/weights.rs b/frame/staking/src/weights.rs index 0bcf179e29339..32c8dc80da158 100644 --- a/frame/staking/src/weights.rs +++ b/frame/staking/src/weights.rs @@ -18,7 +18,7 @@ //! Autogenerated weights for pallet_staking //! //! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev -//! DATE: 2021-08-18, STEPS: `50`, REPEAT: 20, LOW RANGE: `[]`, HIGH RANGE: `[]` +//! DATE: 2021-09-04, STEPS: `50`, REPEAT: 20, LOW RANGE: `[]`, HIGH RANGE: `[]` //! EXECUTION: Some(Wasm), WASM-EXECUTION: Compiled, CHAIN: Some("dev"), DB CACHE: 128 // Executed Command: @@ -85,37 +85,42 @@ impl WeightInfo for SubstrateWeight { // Storage: Balances Locks (r:1 w:1) // Storage: Staking Payee (r:0 w:1) fn bond() -> Weight { - (73_523_000 as Weight) + (73_865_000 as Weight) .saturating_add(T::DbWeight::get().reads(5 as Weight)) .saturating_add(T::DbWeight::get().writes(4 as Weight)) } // Storage: Staking Bonded (r:1 w:0) // Storage: Staking Ledger (r:1 w:1) // Storage: Balances Locks (r:1 w:1) + // Storage: BagsList ListNodes (r:3 w:3) + // Storage: BagsList ListBags (r:2 w:2) fn bond_extra() -> Weight { - (58_129_000 as Weight) - .saturating_add(T::DbWeight::get().reads(3 as Weight)) - .saturating_add(T::DbWeight::get().writes(2 as Weight)) + (114_296_000 as Weight) + .saturating_add(T::DbWeight::get().reads(8 as Weight)) + .saturating_add(T::DbWeight::get().writes(7 as Weight)) } // Storage: Staking Ledger (r:1 w:1) // Storage: Staking Nominators (r:1 w:0) - // Storage: Staking Validators (r:1 w:0) + // Storage: Staking MinNominatorBond (r:1 w:0) // Storage: Staking CurrentEra (r:1 w:0) // Storage: Balances Locks (r:1 w:1) // Storage: System Account (r:1 w:1) + // Storage: BagsList ListNodes (r:3 w:3) + // Storage: Staking Bonded (r:1 w:0) + // Storage: BagsList ListBags (r:2 w:2) fn unbond() -> Weight { - (61_542_000 as Weight) - .saturating_add(T::DbWeight::get().reads(6 as Weight)) - .saturating_add(T::DbWeight::get().writes(3 as Weight)) + (121_737_000 as Weight) + .saturating_add(T::DbWeight::get().reads(12 as Weight)) + .saturating_add(T::DbWeight::get().writes(8 as Weight)) } // Storage: Staking Ledger (r:1 w:1) // Storage: Staking CurrentEra (r:1 w:0) // Storage: Balances Locks (r:1 w:1) // Storage: System Account (r:1 w:1) fn withdraw_unbonded_update(s: u32, ) -> Weight { - (53_160_000 as Weight) + (51_631_000 as Weight) // Standard Error: 0 - .saturating_add((53_000 as Weight).saturating_mul(s as Weight)) + .saturating_add((55_000 as Weight).saturating_mul(s as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } @@ -124,36 +129,40 @@ impl WeightInfo for SubstrateWeight { // Storage: Staking Bonded (r:1 w:1) // Storage: Staking SlashingSpans (r:1 w:0) // Storage: Staking Validators (r:1 w:0) - // Storage: Staking Nominators (r:1 w:0) + // Storage: Staking Nominators (r:1 w:1) + // Storage: Staking CounterForNominators (r:1 w:1) + // Storage: BagsList ListNodes (r:2 w:2) + // Storage: BagsList ListBags (r:1 w:1) + // Storage: BagsList CounterForListNodes (r:1 w:1) // Storage: System Account (r:1 w:1) // Storage: Balances Locks (r:1 w:1) // Storage: Staking Payee (r:0 w:1) - // Storage: Staking SpanSlash (r:0 w:2) - fn withdraw_unbonded_kill(s: u32, ) -> Weight { - (85_826_000 as Weight) - // Standard Error: 2_000 - .saturating_add((2_453_000 as Weight).saturating_mul(s as Weight)) - .saturating_add(T::DbWeight::get().reads(8 as Weight)) - .saturating_add(T::DbWeight::get().writes(6 as Weight)) - .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) + fn withdraw_unbonded_kill(_s: u32, ) -> Weight { + (101_870_000 as Weight) + .saturating_add(T::DbWeight::get().reads(13 as Weight)) + .saturating_add(T::DbWeight::get().writes(11 as Weight)) } // Storage: Staking Ledger (r:1 w:0) // Storage: Staking MinValidatorBond (r:1 w:0) // Storage: Staking Validators (r:1 w:1) // Storage: Staking MaxValidatorsCount (r:1 w:0) - // Storage: Staking Nominators (r:1 w:0) + // Storage: Staking Nominators (r:1 w:1) + // Storage: Staking CounterForNominators (r:1 w:1) + // Storage: BagsList ListNodes (r:2 w:2) + // Storage: BagsList ListBags (r:1 w:1) + // Storage: BagsList CounterForListNodes (r:1 w:1) // Storage: Staking CounterForValidators (r:1 w:1) fn validate() -> Weight { - (34_936_000 as Weight) - .saturating_add(T::DbWeight::get().reads(6 as Weight)) - .saturating_add(T::DbWeight::get().writes(2 as Weight)) + (69_092_000 as Weight) + .saturating_add(T::DbWeight::get().reads(11 as Weight)) + .saturating_add(T::DbWeight::get().writes(8 as Weight)) } // Storage: Staking Ledger (r:1 w:0) // Storage: Staking Nominators (r:1 w:1) fn kick(k: u32, ) -> Weight { - (23_493_000 as Weight) - // Standard Error: 17_000 - .saturating_add((16_632_000 as Weight).saturating_mul(k as Weight)) + (21_468_000 as Weight) + // Standard Error: 19_000 + .saturating_add((16_415_000 as Weight).saturating_mul(k as Weight)) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().reads((1 as Weight).saturating_mul(k as Weight))) .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(k as Weight))) @@ -165,84 +174,97 @@ impl WeightInfo for SubstrateWeight { // Storage: Staking Validators (r:2 w:0) // Storage: Staking CurrentEra (r:1 w:0) // Storage: Staking CounterForNominators (r:1 w:1) + // Storage: Staking Bonded (r:1 w:0) + // Storage: BagsList ListNodes (r:2 w:2) + // Storage: BagsList ListBags (r:1 w:1) + // Storage: BagsList CounterForListNodes (r:1 w:1) fn nominate(n: u32, ) -> Weight { - (41_733_000 as Weight) - // Standard Error: 11_000 - .saturating_add((5_840_000 as Weight).saturating_mul(n as Weight)) - .saturating_add(T::DbWeight::get().reads(7 as Weight)) + (82_389_000 as Weight) + // Standard Error: 14_000 + .saturating_add((5_597_000 as Weight).saturating_mul(n as Weight)) + .saturating_add(T::DbWeight::get().reads(12 as Weight)) .saturating_add(T::DbWeight::get().reads((1 as Weight).saturating_mul(n as Weight))) - .saturating_add(T::DbWeight::get().writes(2 as Weight)) + .saturating_add(T::DbWeight::get().writes(6 as Weight)) } // Storage: Staking Ledger (r:1 w:0) // Storage: Staking Validators (r:1 w:0) - // Storage: Staking Nominators (r:1 w:0) + // Storage: Staking Nominators (r:1 w:1) + // Storage: Staking CounterForNominators (r:1 w:1) + // Storage: BagsList ListNodes (r:2 w:2) + // Storage: BagsList ListBags (r:1 w:1) + // Storage: BagsList CounterForListNodes (r:1 w:1) fn chill() -> Weight { - (17_901_000 as Weight) - .saturating_add(T::DbWeight::get().reads(3 as Weight)) + (69_655_000 as Weight) + .saturating_add(T::DbWeight::get().reads(8 as Weight)) + .saturating_add(T::DbWeight::get().writes(6 as Weight)) } // Storage: Staking Ledger (r:1 w:0) // Storage: Staking Payee (r:0 w:1) fn set_payee() -> Weight { - (13_760_000 as Weight) + (12_770_000 as Weight) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } // Storage: Staking Bonded (r:1 w:1) // Storage: Staking Ledger (r:2 w:2) fn set_controller() -> Weight { - (28_388_000 as Weight) + (27_756_000 as Weight) .saturating_add(T::DbWeight::get().reads(3 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } // Storage: Staking ValidatorCount (r:0 w:1) fn set_validator_count() -> Weight { - (2_537_000 as Weight) + (2_446_000 as Weight) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } // Storage: Staking ForceEra (r:0 w:1) fn force_no_eras() -> Weight { - (2_749_000 as Weight) + (2_720_000 as Weight) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } // Storage: Staking ForceEra (r:0 w:1) fn force_new_era() -> Weight { - (2_834_000 as Weight) + (2_711_000 as Weight) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } // Storage: Staking ForceEra (r:0 w:1) fn force_new_era_always() -> Weight { - (2_800_000 as Weight) + (2_796_000 as Weight) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } // Storage: Staking Invulnerables (r:0 w:1) fn set_invulnerables(v: u32, ) -> Weight { - (3_429_000 as Weight) + (3_141_000 as Weight) // Standard Error: 0 - .saturating_add((56_000 as Weight).saturating_mul(v as Weight)) + .saturating_add((53_000 as Weight).saturating_mul(v as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } // Storage: Staking Bonded (r:1 w:1) // Storage: Staking SlashingSpans (r:1 w:0) // Storage: Staking Validators (r:1 w:0) - // Storage: Staking Nominators (r:1 w:0) + // Storage: Staking Nominators (r:1 w:1) + // Storage: Staking CounterForNominators (r:1 w:1) + // Storage: BagsList ListNodes (r:2 w:2) + // Storage: BagsList ListBags (r:1 w:1) + // Storage: BagsList CounterForListNodes (r:1 w:1) // Storage: System Account (r:1 w:1) // Storage: Balances Locks (r:1 w:1) // Storage: Staking Ledger (r:0 w:1) // Storage: Staking Payee (r:0 w:1) // Storage: Staking SpanSlash (r:0 w:2) fn force_unstake(s: u32, ) -> Weight { - (61_799_000 as Weight) + (97_394_000 as Weight) // Standard Error: 2_000 - .saturating_add((2_451_000 as Weight).saturating_mul(s as Weight)) - .saturating_add(T::DbWeight::get().reads(6 as Weight)) - .saturating_add(T::DbWeight::get().writes(6 as Weight)) + .saturating_add((2_370_000 as Weight).saturating_mul(s as Weight)) + .saturating_add(T::DbWeight::get().reads(11 as Weight)) + .saturating_add(T::DbWeight::get().writes(12 as Weight)) .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) } // Storage: Staking UnappliedSlashes (r:1 w:1) fn cancel_deferred_slash(s: u32, ) -> Weight { - (3_383_988_000 as Weight) - // Standard Error: 223_000 - .saturating_add((19_981_000 as Weight).saturating_mul(s as Weight)) + (2_783_746_000 as Weight) + // Standard Error: 182_000 + .saturating_add((16_223_000 as Weight).saturating_mul(s as Weight)) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } @@ -257,9 +279,9 @@ impl WeightInfo for SubstrateWeight { // Storage: Staking Payee (r:2 w:0) // Storage: System Account (r:2 w:2) fn payout_stakers_dead_controller(n: u32, ) -> Weight { - (124_714_000 as Weight) - // Standard Error: 23_000 - .saturating_add((47_575_000 as Weight).saturating_mul(n as Weight)) + (109_233_000 as Weight) + // Standard Error: 17_000 + .saturating_add((47_612_000 as Weight).saturating_mul(n as Weight)) .saturating_add(T::DbWeight::get().reads(10 as Weight)) .saturating_add(T::DbWeight::get().reads((3 as Weight).saturating_mul(n as Weight))) .saturating_add(T::DbWeight::get().writes(2 as Weight)) @@ -277,9 +299,9 @@ impl WeightInfo for SubstrateWeight { // Storage: System Account (r:2 w:2) // Storage: Balances Locks (r:2 w:2) fn payout_stakers_alive_staked(n: u32, ) -> Weight { - (160_203_000 as Weight) - // Standard Error: 24_000 - .saturating_add((61_321_000 as Weight).saturating_mul(n as Weight)) + (177_392_000 as Weight) + // Standard Error: 20_000 + .saturating_add((60_771_000 as Weight).saturating_mul(n as Weight)) .saturating_add(T::DbWeight::get().reads(11 as Weight)) .saturating_add(T::DbWeight::get().reads((5 as Weight).saturating_mul(n as Weight))) .saturating_add(T::DbWeight::get().writes(3 as Weight)) @@ -288,12 +310,15 @@ impl WeightInfo for SubstrateWeight { // Storage: Staking Ledger (r:1 w:1) // Storage: Balances Locks (r:1 w:1) // Storage: System Account (r:1 w:1) + // Storage: BagsList ListNodes (r:3 w:3) + // Storage: Staking Bonded (r:1 w:0) + // Storage: BagsList ListBags (r:2 w:2) fn rebond(l: u32, ) -> Weight { - (49_593_000 as Weight) - // Standard Error: 3_000 - .saturating_add((78_000 as Weight).saturating_mul(l as Weight)) - .saturating_add(T::DbWeight::get().reads(3 as Weight)) - .saturating_add(T::DbWeight::get().writes(3 as Weight)) + (111_858_000 as Weight) + // Standard Error: 4_000 + .saturating_add((36_000 as Weight).saturating_mul(l as Weight)) + .saturating_add(T::DbWeight::get().reads(9 as Weight)) + .saturating_add(T::DbWeight::get().writes(8 as Weight)) } // Storage: Staking CurrentEra (r:1 w:0) // Storage: Staking HistoryDepth (r:1 w:1) @@ -306,8 +331,8 @@ impl WeightInfo for SubstrateWeight { // Storage: Staking ErasStartSessionIndex (r:0 w:1) fn set_history_depth(e: u32, ) -> Weight { (0 as Weight) - // Standard Error: 71_000 - .saturating_add((35_237_000 as Weight).saturating_mul(e as Weight)) + // Standard Error: 68_000 + .saturating_add((33_495_000 as Weight).saturating_mul(e as Weight)) .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(4 as Weight)) .saturating_add(T::DbWeight::get().writes((7 as Weight).saturating_mul(e as Weight))) @@ -315,19 +340,22 @@ impl WeightInfo for SubstrateWeight { // Storage: System Account (r:1 w:1) // Storage: Staking Bonded (r:1 w:1) // Storage: Staking SlashingSpans (r:1 w:1) - // Storage: Staking Validators (r:1 w:1) - // Storage: Staking CounterForValidators (r:1 w:1) - // Storage: Staking Nominators (r:1 w:0) + // Storage: Staking Validators (r:1 w:0) + // Storage: Staking Nominators (r:1 w:1) + // Storage: Staking CounterForNominators (r:1 w:1) + // Storage: BagsList ListNodes (r:2 w:2) + // Storage: BagsList ListBags (r:1 w:1) + // Storage: BagsList CounterForListNodes (r:1 w:1) // Storage: Balances Locks (r:1 w:1) // Storage: Staking Ledger (r:0 w:1) // Storage: Staking Payee (r:0 w:1) // Storage: Staking SpanSlash (r:0 w:1) fn reap_stash(s: u32, ) -> Weight { - (72_484_000 as Weight) - // Standard Error: 2_000 - .saturating_add((2_452_000 as Weight).saturating_mul(s as Weight)) - .saturating_add(T::DbWeight::get().reads(7 as Weight)) - .saturating_add(T::DbWeight::get().writes(8 as Weight)) + (100_178_000 as Weight) + // Standard Error: 1_000 + .saturating_add((2_358_000 as Weight).saturating_mul(s as Weight)) + .saturating_add(T::DbWeight::get().reads(11 as Weight)) + .saturating_add(T::DbWeight::get().writes(12 as Weight)) .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) } // Storage: Staking CounterForNominators (r:1 w:0) @@ -336,7 +364,9 @@ impl WeightInfo for SubstrateWeight { // Storage: Staking Bonded (r:101 w:0) // Storage: Staking Ledger (r:101 w:0) // Storage: Staking SlashingSpans (r:1 w:0) - // Storage: Staking Nominators (r:101 w:0) + // Storage: BagsList ListBags (r:200 w:0) + // Storage: BagsList ListNodes (r:100 w:0) + // Storage: Staking Nominators (r:100 w:0) // Storage: Staking ValidatorCount (r:1 w:0) // Storage: Staking MinimumValidatorCount (r:1 w:0) // Storage: Staking CurrentEra (r:1 w:1) @@ -348,39 +378,43 @@ impl WeightInfo for SubstrateWeight { // Storage: Staking ErasStartSessionIndex (r:0 w:1) fn new_era(v: u32, n: u32, ) -> Weight { (0 as Weight) - // Standard Error: 856_000 - .saturating_add((305_057_000 as Weight).saturating_mul(v as Weight)) + // Standard Error: 860_000 + .saturating_add((298_721_000 as Weight).saturating_mul(v as Weight)) // Standard Error: 43_000 - .saturating_add((47_890_000 as Weight).saturating_mul(n as Weight)) - .saturating_add(T::DbWeight::get().reads(9 as Weight)) + .saturating_add((49_427_000 as Weight).saturating_mul(n as Weight)) + .saturating_add(T::DbWeight::get().reads(208 as Weight)) .saturating_add(T::DbWeight::get().reads((3 as Weight).saturating_mul(v as Weight))) - .saturating_add(T::DbWeight::get().reads((3 as Weight).saturating_mul(n as Weight))) + .saturating_add(T::DbWeight::get().reads((4 as Weight).saturating_mul(n as Weight))) .saturating_add(T::DbWeight::get().writes(3 as Weight)) .saturating_add(T::DbWeight::get().writes((3 as Weight).saturating_mul(v as Weight))) } + // Storage: Staking CounterForNominators (r:1 w:0) + // Storage: Staking CounterForValidators (r:1 w:0) // Storage: Staking Validators (r:501 w:0) // Storage: Staking Bonded (r:1500 w:0) // Storage: Staking Ledger (r:1500 w:0) // Storage: Staking SlashingSpans (r:21 w:0) - // Storage: Staking Nominators (r:1001 w:0) + // Storage: BagsList ListBags (r:200 w:0) + // Storage: BagsList ListNodes (r:1000 w:0) + // Storage: Staking Nominators (r:1000 w:0) fn get_npos_voters(v: u32, n: u32, s: u32, ) -> Weight { (0 as Weight) - // Standard Error: 98_000 - .saturating_add((25_610_000 as Weight).saturating_mul(v as Weight)) - // Standard Error: 98_000 - .saturating_add((28_064_000 as Weight).saturating_mul(n as Weight)) - // Standard Error: 3_346_000 - .saturating_add((18_123_000 as Weight).saturating_mul(s as Weight)) - .saturating_add(T::DbWeight::get().reads(3 as Weight)) + // Standard Error: 91_000 + .saturating_add((26_605_000 as Weight).saturating_mul(v as Weight)) + // Standard Error: 91_000 + .saturating_add((31_481_000 as Weight).saturating_mul(n as Weight)) + // Standard Error: 3_122_000 + .saturating_add((16_672_000 as Weight).saturating_mul(s as Weight)) + .saturating_add(T::DbWeight::get().reads(204 as Weight)) .saturating_add(T::DbWeight::get().reads((3 as Weight).saturating_mul(v as Weight))) - .saturating_add(T::DbWeight::get().reads((3 as Weight).saturating_mul(n as Weight))) + .saturating_add(T::DbWeight::get().reads((4 as Weight).saturating_mul(n as Weight))) .saturating_add(T::DbWeight::get().reads((1 as Weight).saturating_mul(s as Weight))) } // Storage: Staking Validators (r:501 w:0) fn get_npos_targets(v: u32, ) -> Weight { - (30_422_000 as Weight) - // Standard Error: 33_000 - .saturating_add((11_252_000 as Weight).saturating_mul(v as Weight)) + (0 as Weight) + // Standard Error: 34_000 + .saturating_add((10_558_000 as Weight).saturating_mul(v as Weight)) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().reads((1 as Weight).saturating_mul(v as Weight))) } @@ -390,20 +424,23 @@ impl WeightInfo for SubstrateWeight { // Storage: Staking MaxNominatorsCount (r:0 w:1) // Storage: Staking MinNominatorBond (r:0 w:1) fn set_staking_limits() -> Weight { - (6_486_000 as Weight) + (6_353_000 as Weight) .saturating_add(T::DbWeight::get().writes(5 as Weight)) } // Storage: Staking Ledger (r:1 w:0) // Storage: Staking ChillThreshold (r:1 w:0) - // Storage: Staking Nominators (r:1 w:0) - // Storage: Staking Validators (r:1 w:1) - // Storage: Staking MaxValidatorsCount (r:1 w:0) - // Storage: Staking CounterForValidators (r:1 w:1) - // Storage: Staking MinValidatorBond (r:1 w:0) + // Storage: Staking Nominators (r:1 w:1) + // Storage: Staking MaxNominatorsCount (r:1 w:0) + // Storage: Staking CounterForNominators (r:1 w:1) + // Storage: Staking MinNominatorBond (r:1 w:0) + // Storage: Staking Validators (r:1 w:0) + // Storage: BagsList ListNodes (r:2 w:2) + // Storage: BagsList ListBags (r:1 w:1) + // Storage: BagsList CounterForListNodes (r:1 w:1) fn chill_other() -> Weight { - (58_222_000 as Weight) - .saturating_add(T::DbWeight::get().reads(7 as Weight)) - .saturating_add(T::DbWeight::get().writes(2 as Weight)) + (83_389_000 as Weight) + .saturating_add(T::DbWeight::get().reads(11 as Weight)) + .saturating_add(T::DbWeight::get().writes(6 as Weight)) } } @@ -416,37 +453,42 @@ impl WeightInfo for () { // Storage: Balances Locks (r:1 w:1) // Storage: Staking Payee (r:0 w:1) fn bond() -> Weight { - (73_523_000 as Weight) + (73_865_000 as Weight) .saturating_add(RocksDbWeight::get().reads(5 as Weight)) .saturating_add(RocksDbWeight::get().writes(4 as Weight)) } // Storage: Staking Bonded (r:1 w:0) // Storage: Staking Ledger (r:1 w:1) // Storage: Balances Locks (r:1 w:1) + // Storage: BagsList ListNodes (r:3 w:3) + // Storage: BagsList ListBags (r:2 w:2) fn bond_extra() -> Weight { - (58_129_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(3 as Weight)) - .saturating_add(RocksDbWeight::get().writes(2 as Weight)) + (114_296_000 as Weight) + .saturating_add(RocksDbWeight::get().reads(8 as Weight)) + .saturating_add(RocksDbWeight::get().writes(7 as Weight)) } // Storage: Staking Ledger (r:1 w:1) // Storage: Staking Nominators (r:1 w:0) - // Storage: Staking Validators (r:1 w:0) + // Storage: Staking MinNominatorBond (r:1 w:0) // Storage: Staking CurrentEra (r:1 w:0) // Storage: Balances Locks (r:1 w:1) // Storage: System Account (r:1 w:1) + // Storage: BagsList ListNodes (r:3 w:3) + // Storage: Staking Bonded (r:1 w:0) + // Storage: BagsList ListBags (r:2 w:2) fn unbond() -> Weight { - (61_542_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(6 as Weight)) - .saturating_add(RocksDbWeight::get().writes(3 as Weight)) + (121_737_000 as Weight) + .saturating_add(RocksDbWeight::get().reads(12 as Weight)) + .saturating_add(RocksDbWeight::get().writes(8 as Weight)) } // Storage: Staking Ledger (r:1 w:1) // Storage: Staking CurrentEra (r:1 w:0) // Storage: Balances Locks (r:1 w:1) // Storage: System Account (r:1 w:1) fn withdraw_unbonded_update(s: u32, ) -> Weight { - (53_160_000 as Weight) + (51_631_000 as Weight) // Standard Error: 0 - .saturating_add((53_000 as Weight).saturating_mul(s as Weight)) + .saturating_add((55_000 as Weight).saturating_mul(s as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(3 as Weight)) } @@ -455,36 +497,40 @@ impl WeightInfo for () { // Storage: Staking Bonded (r:1 w:1) // Storage: Staking SlashingSpans (r:1 w:0) // Storage: Staking Validators (r:1 w:0) - // Storage: Staking Nominators (r:1 w:0) + // Storage: Staking Nominators (r:1 w:1) + // Storage: Staking CounterForNominators (r:1 w:1) + // Storage: BagsList ListNodes (r:2 w:2) + // Storage: BagsList ListBags (r:1 w:1) + // Storage: BagsList CounterForListNodes (r:1 w:1) // Storage: System Account (r:1 w:1) // Storage: Balances Locks (r:1 w:1) // Storage: Staking Payee (r:0 w:1) - // Storage: Staking SpanSlash (r:0 w:2) - fn withdraw_unbonded_kill(s: u32, ) -> Weight { - (85_826_000 as Weight) - // Standard Error: 2_000 - .saturating_add((2_453_000 as Weight).saturating_mul(s as Weight)) - .saturating_add(RocksDbWeight::get().reads(8 as Weight)) - .saturating_add(RocksDbWeight::get().writes(6 as Weight)) - .saturating_add(RocksDbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) + fn withdraw_unbonded_kill(_s: u32, ) -> Weight { + (101_870_000 as Weight) + .saturating_add(RocksDbWeight::get().reads(13 as Weight)) + .saturating_add(RocksDbWeight::get().writes(11 as Weight)) } // Storage: Staking Ledger (r:1 w:0) // Storage: Staking MinValidatorBond (r:1 w:0) // Storage: Staking Validators (r:1 w:1) // Storage: Staking MaxValidatorsCount (r:1 w:0) - // Storage: Staking Nominators (r:1 w:0) + // Storage: Staking Nominators (r:1 w:1) + // Storage: Staking CounterForNominators (r:1 w:1) + // Storage: BagsList ListNodes (r:2 w:2) + // Storage: BagsList ListBags (r:1 w:1) + // Storage: BagsList CounterForListNodes (r:1 w:1) // Storage: Staking CounterForValidators (r:1 w:1) fn validate() -> Weight { - (34_936_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(6 as Weight)) - .saturating_add(RocksDbWeight::get().writes(2 as Weight)) + (69_092_000 as Weight) + .saturating_add(RocksDbWeight::get().reads(11 as Weight)) + .saturating_add(RocksDbWeight::get().writes(8 as Weight)) } // Storage: Staking Ledger (r:1 w:0) // Storage: Staking Nominators (r:1 w:1) fn kick(k: u32, ) -> Weight { - (23_493_000 as Weight) - // Standard Error: 17_000 - .saturating_add((16_632_000 as Weight).saturating_mul(k as Weight)) + (21_468_000 as Weight) + // Standard Error: 19_000 + .saturating_add((16_415_000 as Weight).saturating_mul(k as Weight)) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().reads((1 as Weight).saturating_mul(k as Weight))) .saturating_add(RocksDbWeight::get().writes((1 as Weight).saturating_mul(k as Weight))) @@ -496,84 +542,97 @@ impl WeightInfo for () { // Storage: Staking Validators (r:2 w:0) // Storage: Staking CurrentEra (r:1 w:0) // Storage: Staking CounterForNominators (r:1 w:1) + // Storage: Staking Bonded (r:1 w:0) + // Storage: BagsList ListNodes (r:2 w:2) + // Storage: BagsList ListBags (r:1 w:1) + // Storage: BagsList CounterForListNodes (r:1 w:1) fn nominate(n: u32, ) -> Weight { - (41_733_000 as Weight) - // Standard Error: 11_000 - .saturating_add((5_840_000 as Weight).saturating_mul(n as Weight)) - .saturating_add(RocksDbWeight::get().reads(7 as Weight)) + (82_389_000 as Weight) + // Standard Error: 14_000 + .saturating_add((5_597_000 as Weight).saturating_mul(n as Weight)) + .saturating_add(RocksDbWeight::get().reads(12 as Weight)) .saturating_add(RocksDbWeight::get().reads((1 as Weight).saturating_mul(n as Weight))) - .saturating_add(RocksDbWeight::get().writes(2 as Weight)) + .saturating_add(RocksDbWeight::get().writes(6 as Weight)) } // Storage: Staking Ledger (r:1 w:0) // Storage: Staking Validators (r:1 w:0) - // Storage: Staking Nominators (r:1 w:0) + // Storage: Staking Nominators (r:1 w:1) + // Storage: Staking CounterForNominators (r:1 w:1) + // Storage: BagsList ListNodes (r:2 w:2) + // Storage: BagsList ListBags (r:1 w:1) + // Storage: BagsList CounterForListNodes (r:1 w:1) fn chill() -> Weight { - (17_901_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(3 as Weight)) + (69_655_000 as Weight) + .saturating_add(RocksDbWeight::get().reads(8 as Weight)) + .saturating_add(RocksDbWeight::get().writes(6 as Weight)) } // Storage: Staking Ledger (r:1 w:0) // Storage: Staking Payee (r:0 w:1) fn set_payee() -> Weight { - (13_760_000 as Weight) + (12_770_000 as Weight) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } // Storage: Staking Bonded (r:1 w:1) // Storage: Staking Ledger (r:2 w:2) fn set_controller() -> Weight { - (28_388_000 as Weight) + (27_756_000 as Weight) .saturating_add(RocksDbWeight::get().reads(3 as Weight)) .saturating_add(RocksDbWeight::get().writes(3 as Weight)) } // Storage: Staking ValidatorCount (r:0 w:1) fn set_validator_count() -> Weight { - (2_537_000 as Weight) + (2_446_000 as Weight) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } // Storage: Staking ForceEra (r:0 w:1) fn force_no_eras() -> Weight { - (2_749_000 as Weight) + (2_720_000 as Weight) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } // Storage: Staking ForceEra (r:0 w:1) fn force_new_era() -> Weight { - (2_834_000 as Weight) + (2_711_000 as Weight) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } // Storage: Staking ForceEra (r:0 w:1) fn force_new_era_always() -> Weight { - (2_800_000 as Weight) + (2_796_000 as Weight) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } // Storage: Staking Invulnerables (r:0 w:1) fn set_invulnerables(v: u32, ) -> Weight { - (3_429_000 as Weight) + (3_141_000 as Weight) // Standard Error: 0 - .saturating_add((56_000 as Weight).saturating_mul(v as Weight)) + .saturating_add((53_000 as Weight).saturating_mul(v as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } // Storage: Staking Bonded (r:1 w:1) // Storage: Staking SlashingSpans (r:1 w:0) // Storage: Staking Validators (r:1 w:0) - // Storage: Staking Nominators (r:1 w:0) + // Storage: Staking Nominators (r:1 w:1) + // Storage: Staking CounterForNominators (r:1 w:1) + // Storage: BagsList ListNodes (r:2 w:2) + // Storage: BagsList ListBags (r:1 w:1) + // Storage: BagsList CounterForListNodes (r:1 w:1) // Storage: System Account (r:1 w:1) // Storage: Balances Locks (r:1 w:1) // Storage: Staking Ledger (r:0 w:1) // Storage: Staking Payee (r:0 w:1) // Storage: Staking SpanSlash (r:0 w:2) fn force_unstake(s: u32, ) -> Weight { - (61_799_000 as Weight) + (97_394_000 as Weight) // Standard Error: 2_000 - .saturating_add((2_451_000 as Weight).saturating_mul(s as Weight)) - .saturating_add(RocksDbWeight::get().reads(6 as Weight)) - .saturating_add(RocksDbWeight::get().writes(6 as Weight)) + .saturating_add((2_370_000 as Weight).saturating_mul(s as Weight)) + .saturating_add(RocksDbWeight::get().reads(11 as Weight)) + .saturating_add(RocksDbWeight::get().writes(12 as Weight)) .saturating_add(RocksDbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) } // Storage: Staking UnappliedSlashes (r:1 w:1) fn cancel_deferred_slash(s: u32, ) -> Weight { - (3_383_988_000 as Weight) - // Standard Error: 223_000 - .saturating_add((19_981_000 as Weight).saturating_mul(s as Weight)) + (2_783_746_000 as Weight) + // Standard Error: 182_000 + .saturating_add((16_223_000 as Weight).saturating_mul(s as Weight)) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } @@ -588,9 +647,9 @@ impl WeightInfo for () { // Storage: Staking Payee (r:2 w:0) // Storage: System Account (r:2 w:2) fn payout_stakers_dead_controller(n: u32, ) -> Weight { - (124_714_000 as Weight) - // Standard Error: 23_000 - .saturating_add((47_575_000 as Weight).saturating_mul(n as Weight)) + (109_233_000 as Weight) + // Standard Error: 17_000 + .saturating_add((47_612_000 as Weight).saturating_mul(n as Weight)) .saturating_add(RocksDbWeight::get().reads(10 as Weight)) .saturating_add(RocksDbWeight::get().reads((3 as Weight).saturating_mul(n as Weight))) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) @@ -608,9 +667,9 @@ impl WeightInfo for () { // Storage: System Account (r:2 w:2) // Storage: Balances Locks (r:2 w:2) fn payout_stakers_alive_staked(n: u32, ) -> Weight { - (160_203_000 as Weight) - // Standard Error: 24_000 - .saturating_add((61_321_000 as Weight).saturating_mul(n as Weight)) + (177_392_000 as Weight) + // Standard Error: 20_000 + .saturating_add((60_771_000 as Weight).saturating_mul(n as Weight)) .saturating_add(RocksDbWeight::get().reads(11 as Weight)) .saturating_add(RocksDbWeight::get().reads((5 as Weight).saturating_mul(n as Weight))) .saturating_add(RocksDbWeight::get().writes(3 as Weight)) @@ -619,12 +678,15 @@ impl WeightInfo for () { // Storage: Staking Ledger (r:1 w:1) // Storage: Balances Locks (r:1 w:1) // Storage: System Account (r:1 w:1) + // Storage: BagsList ListNodes (r:3 w:3) + // Storage: Staking Bonded (r:1 w:0) + // Storage: BagsList ListBags (r:2 w:2) fn rebond(l: u32, ) -> Weight { - (49_593_000 as Weight) - // Standard Error: 3_000 - .saturating_add((78_000 as Weight).saturating_mul(l as Weight)) - .saturating_add(RocksDbWeight::get().reads(3 as Weight)) - .saturating_add(RocksDbWeight::get().writes(3 as Weight)) + (111_858_000 as Weight) + // Standard Error: 4_000 + .saturating_add((36_000 as Weight).saturating_mul(l as Weight)) + .saturating_add(RocksDbWeight::get().reads(9 as Weight)) + .saturating_add(RocksDbWeight::get().writes(8 as Weight)) } // Storage: Staking CurrentEra (r:1 w:0) // Storage: Staking HistoryDepth (r:1 w:1) @@ -637,8 +699,8 @@ impl WeightInfo for () { // Storage: Staking ErasStartSessionIndex (r:0 w:1) fn set_history_depth(e: u32, ) -> Weight { (0 as Weight) - // Standard Error: 71_000 - .saturating_add((35_237_000 as Weight).saturating_mul(e as Weight)) + // Standard Error: 68_000 + .saturating_add((33_495_000 as Weight).saturating_mul(e as Weight)) .saturating_add(RocksDbWeight::get().reads(2 as Weight)) .saturating_add(RocksDbWeight::get().writes(4 as Weight)) .saturating_add(RocksDbWeight::get().writes((7 as Weight).saturating_mul(e as Weight))) @@ -646,19 +708,22 @@ impl WeightInfo for () { // Storage: System Account (r:1 w:1) // Storage: Staking Bonded (r:1 w:1) // Storage: Staking SlashingSpans (r:1 w:1) - // Storage: Staking Validators (r:1 w:1) - // Storage: Staking CounterForValidators (r:1 w:1) - // Storage: Staking Nominators (r:1 w:0) + // Storage: Staking Validators (r:1 w:0) + // Storage: Staking Nominators (r:1 w:1) + // Storage: Staking CounterForNominators (r:1 w:1) + // Storage: BagsList ListNodes (r:2 w:2) + // Storage: BagsList ListBags (r:1 w:1) + // Storage: BagsList CounterForListNodes (r:1 w:1) // Storage: Balances Locks (r:1 w:1) // Storage: Staking Ledger (r:0 w:1) // Storage: Staking Payee (r:0 w:1) // Storage: Staking SpanSlash (r:0 w:1) fn reap_stash(s: u32, ) -> Weight { - (72_484_000 as Weight) - // Standard Error: 2_000 - .saturating_add((2_452_000 as Weight).saturating_mul(s as Weight)) - .saturating_add(RocksDbWeight::get().reads(7 as Weight)) - .saturating_add(RocksDbWeight::get().writes(8 as Weight)) + (100_178_000 as Weight) + // Standard Error: 1_000 + .saturating_add((2_358_000 as Weight).saturating_mul(s as Weight)) + .saturating_add(RocksDbWeight::get().reads(11 as Weight)) + .saturating_add(RocksDbWeight::get().writes(12 as Weight)) .saturating_add(RocksDbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) } // Storage: Staking CounterForNominators (r:1 w:0) @@ -667,7 +732,9 @@ impl WeightInfo for () { // Storage: Staking Bonded (r:101 w:0) // Storage: Staking Ledger (r:101 w:0) // Storage: Staking SlashingSpans (r:1 w:0) - // Storage: Staking Nominators (r:101 w:0) + // Storage: BagsList ListBags (r:200 w:0) + // Storage: BagsList ListNodes (r:100 w:0) + // Storage: Staking Nominators (r:100 w:0) // Storage: Staking ValidatorCount (r:1 w:0) // Storage: Staking MinimumValidatorCount (r:1 w:0) // Storage: Staking CurrentEra (r:1 w:1) @@ -679,39 +746,43 @@ impl WeightInfo for () { // Storage: Staking ErasStartSessionIndex (r:0 w:1) fn new_era(v: u32, n: u32, ) -> Weight { (0 as Weight) - // Standard Error: 856_000 - .saturating_add((305_057_000 as Weight).saturating_mul(v as Weight)) + // Standard Error: 860_000 + .saturating_add((298_721_000 as Weight).saturating_mul(v as Weight)) // Standard Error: 43_000 - .saturating_add((47_890_000 as Weight).saturating_mul(n as Weight)) - .saturating_add(RocksDbWeight::get().reads(9 as Weight)) + .saturating_add((49_427_000 as Weight).saturating_mul(n as Weight)) + .saturating_add(RocksDbWeight::get().reads(208 as Weight)) .saturating_add(RocksDbWeight::get().reads((3 as Weight).saturating_mul(v as Weight))) - .saturating_add(RocksDbWeight::get().reads((3 as Weight).saturating_mul(n as Weight))) + .saturating_add(RocksDbWeight::get().reads((4 as Weight).saturating_mul(n as Weight))) .saturating_add(RocksDbWeight::get().writes(3 as Weight)) .saturating_add(RocksDbWeight::get().writes((3 as Weight).saturating_mul(v as Weight))) } + // Storage: Staking CounterForNominators (r:1 w:0) + // Storage: Staking CounterForValidators (r:1 w:0) // Storage: Staking Validators (r:501 w:0) // Storage: Staking Bonded (r:1500 w:0) // Storage: Staking Ledger (r:1500 w:0) // Storage: Staking SlashingSpans (r:21 w:0) - // Storage: Staking Nominators (r:1001 w:0) + // Storage: BagsList ListBags (r:200 w:0) + // Storage: BagsList ListNodes (r:1000 w:0) + // Storage: Staking Nominators (r:1000 w:0) fn get_npos_voters(v: u32, n: u32, s: u32, ) -> Weight { (0 as Weight) - // Standard Error: 98_000 - .saturating_add((25_610_000 as Weight).saturating_mul(v as Weight)) - // Standard Error: 98_000 - .saturating_add((28_064_000 as Weight).saturating_mul(n as Weight)) - // Standard Error: 3_346_000 - .saturating_add((18_123_000 as Weight).saturating_mul(s as Weight)) - .saturating_add(RocksDbWeight::get().reads(3 as Weight)) + // Standard Error: 91_000 + .saturating_add((26_605_000 as Weight).saturating_mul(v as Weight)) + // Standard Error: 91_000 + .saturating_add((31_481_000 as Weight).saturating_mul(n as Weight)) + // Standard Error: 3_122_000 + .saturating_add((16_672_000 as Weight).saturating_mul(s as Weight)) + .saturating_add(RocksDbWeight::get().reads(204 as Weight)) .saturating_add(RocksDbWeight::get().reads((3 as Weight).saturating_mul(v as Weight))) - .saturating_add(RocksDbWeight::get().reads((3 as Weight).saturating_mul(n as Weight))) + .saturating_add(RocksDbWeight::get().reads((4 as Weight).saturating_mul(n as Weight))) .saturating_add(RocksDbWeight::get().reads((1 as Weight).saturating_mul(s as Weight))) } // Storage: Staking Validators (r:501 w:0) fn get_npos_targets(v: u32, ) -> Weight { - (30_422_000 as Weight) - // Standard Error: 33_000 - .saturating_add((11_252_000 as Weight).saturating_mul(v as Weight)) + (0 as Weight) + // Standard Error: 34_000 + .saturating_add((10_558_000 as Weight).saturating_mul(v as Weight)) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().reads((1 as Weight).saturating_mul(v as Weight))) } @@ -721,19 +792,22 @@ impl WeightInfo for () { // Storage: Staking MaxNominatorsCount (r:0 w:1) // Storage: Staking MinNominatorBond (r:0 w:1) fn set_staking_limits() -> Weight { - (6_486_000 as Weight) + (6_353_000 as Weight) .saturating_add(RocksDbWeight::get().writes(5 as Weight)) } // Storage: Staking Ledger (r:1 w:0) // Storage: Staking ChillThreshold (r:1 w:0) - // Storage: Staking Nominators (r:1 w:0) - // Storage: Staking Validators (r:1 w:1) - // Storage: Staking MaxValidatorsCount (r:1 w:0) - // Storage: Staking CounterForValidators (r:1 w:1) - // Storage: Staking MinValidatorBond (r:1 w:0) + // Storage: Staking Nominators (r:1 w:1) + // Storage: Staking MaxNominatorsCount (r:1 w:0) + // Storage: Staking CounterForNominators (r:1 w:1) + // Storage: Staking MinNominatorBond (r:1 w:0) + // Storage: Staking Validators (r:1 w:0) + // Storage: BagsList ListNodes (r:2 w:2) + // Storage: BagsList ListBags (r:1 w:1) + // Storage: BagsList CounterForListNodes (r:1 w:1) fn chill_other() -> Weight { - (58_222_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(7 as Weight)) - .saturating_add(RocksDbWeight::get().writes(2 as Weight)) + (83_389_000 as Weight) + .saturating_add(RocksDbWeight::get().reads(11 as Weight)) + .saturating_add(RocksDbWeight::get().writes(6 as Weight)) } } diff --git a/primitives/npos-elections/src/traits.rs b/primitives/npos-elections/src/traits.rs index 45b6fa368ae2a..597d7e648fd9b 100644 --- a/primitives/npos-elections/src/traits.rs +++ b/primitives/npos-elections/src/traits.rs @@ -10,8 +10,8 @@ // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// distributed under the License is distributed on an "AS IS" BASIS, // See the License for the specific language governing permissions and // limitations under the License. @@ -22,6 +22,7 @@ use crate::{ VoteWeight, }; use codec::Encode; +use scale_info::TypeInfo; use sp_arithmetic::{ traits::{Bounded, UniqueSaturatedInto}, PerThing, @@ -72,7 +73,8 @@ where + Copy + Clone + Bounded - + Encode; + + Encode + + TypeInfo; /// The target type. Needs to be an index (convert to usize). type TargetIndex: UniqueSaturatedInto @@ -82,7 +84,8 @@ where + Copy + Clone + Bounded - + Encode; + + Encode + + TypeInfo; /// The weight/accuracy type of each vote. type Accuracy: PerThing128; diff --git a/utils/frame/generate-bags/Cargo.toml b/utils/frame/generate-bags/Cargo.toml new file mode 100644 index 0000000000000..384307fbec9e5 --- /dev/null +++ b/utils/frame/generate-bags/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "generate-bags" +version = "3.0.0" +authors = ["Parity Technologies "] +edition = "2018" +license = "Apache-2.0" +homepage = "https://substrate.dev" +repository = "https://github.com/paritytech/substrate/" +description = "Bag threshold generation script for pallet-bag-list" +readme = "README.md" + +[dependencies] +# FRAME +frame-support = { version = "4.0.0-dev", default-features = false, path = "../../../frame/support" } +frame-election-provider-support = { version = "4.0.0-dev", path = "../../../frame/election-provider-support", features = ["runtime-benchmarks"] } +frame-system = { version = "4.0.0-dev", default-features = false, path = "../../../frame/system" } +pallet-staking = { version = "4.0.0-dev", default-features = false, path = "../../../frame/staking" } + +# primitives +sp-io = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/io" } + +# third party +chrono = { version = "0.4.19" } +git2 = { version = "0.13.20", default-features = false } +num-format = { version = "0.4.0" } +structopt = "0.3.21" diff --git a/utils/frame/generate-bags/node-runtime/Cargo.toml b/utils/frame/generate-bags/node-runtime/Cargo.toml new file mode 100644 index 0000000000000..7fcd981a6bbd6 --- /dev/null +++ b/utils/frame/generate-bags/node-runtime/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "node-runtime-generate-bags" +version = "3.0.0" +authors = ["Parity Technologies "] +edition = "2018" +license = "Apache-2.0" +homepage = "https://substrate.dev" +repository = "https://github.com/paritytech/substrate/" +description = "Bag threshold generation script for pallet-bag-list and node-runtime." +readme = "README.md" + +[dependencies] +node-runtime = { version = "3.0.0-dev", path = "../../../../bin/node/runtime" } +generate-bags = { version = "3.0.0", path = "../" } + +# third-party +structopt = "0.3.21" diff --git a/utils/frame/generate-bags/node-runtime/src/main.rs b/utils/frame/generate-bags/node-runtime/src/main.rs new file mode 100644 index 0000000000000..5d36b381a7d0c --- /dev/null +++ b/utils/frame/generate-bags/node-runtime/src/main.rs @@ -0,0 +1,46 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Make the set of bag thresholds to be used with pallet-bags-list. + +use generate_bags::generate_thresholds; +use std::path::PathBuf; +use structopt::StructOpt; + +#[derive(Debug, StructOpt)] +struct Opt { + /// How many bags to generate. + #[structopt(long, default_value = "200")] + n_bags: usize, + + /// Where to write the output. + output: PathBuf, + + /// The total issuance of the currency used to create `VoteWeight`. + #[structopt(short, long)] + total_issuance: u128, + + /// The minimum account balance (i.e. existential deposit) for the currency used to create + /// `VoteWeight`. + #[structopt(short, long)] + minimum_balance: u128, +} + +fn main() -> Result<(), std::io::Error> { + let Opt { n_bags, output, total_issuance, minimum_balance } = Opt::from_args(); + generate_thresholds::(n_bags, &output, total_issuance, minimum_balance) +} diff --git a/utils/frame/generate-bags/src/lib.rs b/utils/frame/generate-bags/src/lib.rs new file mode 100644 index 0000000000000..af9df4435bcab --- /dev/null +++ b/utils/frame/generate-bags/src/lib.rs @@ -0,0 +1,246 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Support code to ease the process of generating bag thresholds. +//! +//! NOTE: this assume the runtime implements [`pallet_staking::Config`], as it requires an +//! implementation of the traits [`frame_support::traits::Currency`] and +//! [`frame_support::traits::CurrencyToVote`]. +//! +//! The process of adding bags to a runtime requires only four steps. +//! +//! 1. Update the runtime definition. +//! +//! ```ignore +//! parameter_types!{ +//! pub const BagThresholds: &'static [u64] = &[]; +//! } +//! +//! impl pallet_bags_list::Config for Runtime { +//! // +//! type BagThresholds = BagThresholds; +//! } +//! ``` +//! +//! 2. Write a little program to generate the definitions. This program exists only to hook together +//! the runtime definitions with the various calculations here. Take a look at +//! _utils/frame/generate_bags/node-runtime_ for an example. +//! +//! 3. Run that program: +//! +//! ```sh,notrust +//! $ cargo run -p node-runtime-generate-bags -- --total-issuance 1234 --minimum-balance 1 +//! output.rs ``` +//! +//! 4. Update the runtime definition. +//! +//! ```diff,notrust +//! + mod output; +//! - pub const BagThresholds: &'static [u64] = &[]; +//! + pub const BagThresholds: &'static [u64] = &output::THRESHOLDS; +//! ``` + +use frame_election_provider_support::VoteWeight; +use frame_support::traits::Get; +use std::{ + io::Write, + path::{Path, PathBuf}, +}; + +/// Compute the existential weight for the specified configuration. +/// +/// Note that this value depends on the current issuance, a quantity known to change over time. +/// This makes the project of computing a static value suitable for inclusion in a static, +/// generated file _excitingly unstable_. +fn existential_weight( + total_issuance: u128, + minimum_balance: u128, +) -> VoteWeight { + use frame_support::traits::CurrencyToVote; + use std::convert::TryInto; + + T::CurrencyToVote::to_vote( + minimum_balance + .try_into() + .map_err(|_| "failed to convert minimum_balance to type Balance") + .unwrap(), + total_issuance + .try_into() + .map_err(|_| "failed to convert total_issuance to type Balance") + .unwrap(), + ) +} + +/// Return the path to a header file used in this repository if is exists. +/// +/// Just searches the git working directory root for files matching certain patterns; it's +/// pretty naive. +fn path_to_header_file() -> Option { + let repo = git2::Repository::open_from_env().ok()?; + let workdir = repo.workdir()?; + for file_name in &["HEADER-APACHE2", "HEADER-GPL3", "HEADER", "file_header.txt"] { + let path = workdir.join(file_name); + if path.exists() { + return Some(path) + } + } + None +} + +/// Create an underscore formatter: a formatter which inserts `_` every 3 digits of a number. +fn underscore_formatter() -> num_format::CustomFormat { + num_format::CustomFormat::builder() + .grouping(num_format::Grouping::Standard) + .separator("_") + .build() + .expect("format described here meets all constraints") +} + +/// Compute the constant ratio for the thresholds. +/// +/// This ratio ensures that each bag, with the possible exceptions of certain small ones and the +/// final one, is a constant multiple of the previous, while fully occupying the `VoteWeight` +/// space. +pub fn constant_ratio(existential_weight: VoteWeight, n_bags: usize) -> f64 { + ((VoteWeight::MAX as f64 / existential_weight as f64).ln() / ((n_bags - 1) as f64)).exp() +} + +/// Compute the list of bag thresholds. +/// +/// Returns a list of exactly `n_bags` elements, except in the case of overflow. +/// The first element is always `existential_weight`. +/// The last element is always `VoteWeight::MAX`. +/// +/// All other elements are computed from the previous according to the formula +/// `threshold[k + 1] = (threshold[k] * ratio).max(threshold[k] + 1); +pub fn thresholds( + existential_weight: VoteWeight, + constant_ratio: f64, + n_bags: usize, +) -> Vec { + const WEIGHT_LIMIT: f64 = VoteWeight::MAX as f64; + + let mut thresholds = Vec::with_capacity(n_bags); + + if n_bags > 1 { + thresholds.push(existential_weight); + } + + while n_bags > 0 && thresholds.len() < n_bags - 1 { + let last = thresholds.last().copied().unwrap_or(existential_weight); + let successor = (last as f64 * constant_ratio).round().max(last as f64 + 1.0); + if successor < WEIGHT_LIMIT { + thresholds.push(successor as VoteWeight); + } else { + eprintln!("unexpectedly exceeded weight limit; breaking threshold generation loop"); + break + } + } + + thresholds.push(VoteWeight::MAX); + + debug_assert_eq!(thresholds.len(), n_bags); + debug_assert!(n_bags == 0 || thresholds[0] == existential_weight); + debug_assert!(n_bags == 0 || thresholds[thresholds.len() - 1] == VoteWeight::MAX); + + thresholds +} + +/// Write a thresholds module to the path specified. +/// +/// Parameters: +/// - `n_bags` the number of bags to generate. +/// - `output` the path to write to; should terminate with a Rust module name, i.e. +/// `foo/bar/thresholds.rs`. +/// - `total_issuance` the total amount of the currency in the network. +/// - `minimum_balance` the minimum balance of the currency required for an account to exist (i.e. +/// existential deposit). +/// +/// This generated module contains, in order: +/// +/// - The contents of the header file in this repository's root, if found. +/// - Module documentation noting that this is autogenerated and when. +/// - Some associated constants. +/// - The constant array of thresholds. +pub fn generate_thresholds( + n_bags: usize, + output: &Path, + total_issuance: u128, + minimum_balance: u128, +) -> Result<(), std::io::Error> { + // ensure the file is accessable + if let Some(parent) = output.parent() { + if !parent.exists() { + std::fs::create_dir_all(parent)?; + } + } + + // copy the header file + if let Some(header_path) = path_to_header_file() { + std::fs::copy(header_path, output)?; + } + + // open an append buffer + let file = std::fs::OpenOptions::new().create(true).append(true).open(output)?; + let mut buf = std::io::BufWriter::new(file); + + // create underscore formatter and format buffer + let mut num_buf = num_format::Buffer::new(); + let format = underscore_formatter(); + + // module docs + let now = chrono::Utc::now(); + writeln!(buf)?; + writeln!(buf, "//! Autogenerated bag thresholds.")?; + writeln!(buf, "//!")?; + writeln!(buf, "//! Generated on {}", now.to_rfc3339())?; + writeln!( + buf, + "//! for the {} runtime.", + ::Version::get().spec_name, + )?; + + let existential_weight = existential_weight::(total_issuance, minimum_balance); + num_buf.write_formatted(&existential_weight, &format); + writeln!(buf)?; + writeln!(buf, "/// Existential weight for this runtime.")?; + writeln!(buf, "#[cfg(any(test, feature = \"std\"))]")?; + writeln!(buf, "#[allow(unused)]")?; + writeln!(buf, "pub const EXISTENTIAL_WEIGHT: u64 = {};", num_buf.as_str())?; + + // constant ratio + let constant_ratio = constant_ratio(existential_weight, n_bags); + writeln!(buf)?; + writeln!(buf, "/// Constant ratio between bags for this runtime.")?; + writeln!(buf, "#[cfg(any(test, feature = \"std\"))]")?; + writeln!(buf, "#[allow(unused)]")?; + writeln!(buf, "pub const CONSTANT_RATIO: f64 = {:.16};", constant_ratio)?; + + // thresholds + let thresholds = thresholds(existential_weight, constant_ratio, n_bags); + writeln!(buf)?; + writeln!(buf, "/// Upper thresholds delimiting the bag list.")?; + writeln!(buf, "pub const THRESHOLDS: [u64; {}] = [", thresholds.len())?; + for threshold in thresholds { + num_buf.write_formatted(&threshold, &format); + // u64::MAX, with spacers every 3 digits, is 26 characters wide + writeln!(buf, " {:>26},", num_buf.as_str())?; + } + writeln!(buf, "];")?; + + Ok(()) +} From 91b386ff07a85b3dd50ff3ed29c97e6b29d15f05 Mon Sep 17 00:00:00 2001 From: Xiliang Chen Date: Sun, 19 Sep 2021 05:25:30 +1200 Subject: [PATCH 11/33] make nonce public (#9810) --- frame/system/src/extensions/check_nonce.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/system/src/extensions/check_nonce.rs b/frame/system/src/extensions/check_nonce.rs index 081a0efa3db71..74be83398421e 100644 --- a/frame/system/src/extensions/check_nonce.rs +++ b/frame/system/src/extensions/check_nonce.rs @@ -34,7 +34,7 @@ use sp_std::vec; /// extension sets some kind of priority upon validating transactions. #[derive(Encode, Decode, Clone, Eq, PartialEq, TypeInfo)] #[scale_info(skip_type_params(T))] -pub struct CheckNonce(#[codec(compact)] T::Index); +pub struct CheckNonce(#[codec(compact)] pub T::Index); impl CheckNonce { /// utility constructor. Used only in client/factory code. From 58256613b50e6b3dc4dd6e20b463df0435104bb9 Mon Sep 17 00:00:00 2001 From: Jimmy Chu Date: Mon, 20 Sep 2021 18:50:15 +0800 Subject: [PATCH 12/33] Added template and scripts for generating rustdocs (#9785) Co-authored-by: Denis Pisarev --- .gitlab-ci.yml | 46 +++++++++++++++++++++++++----- .maintain/docs-index-tpl.ejs | 55 ++++++++++++++++++++++++++++++++++++ 2 files changed, 94 insertions(+), 7 deletions(-) create mode 100644 .maintain/docs-index-tpl.ejs diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index ecafc9338a587..70a8fd9bbbe93 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -618,6 +618,7 @@ build-rustdoc: variables: <<: *default-vars SKIP_WASM_BUILD: 1 + DOC_INDEX_PAGE: "sc_service/index.html" # default redirected page artifacts: name: "${CI_JOB_NAME}_${CI_COMMIT_REF_NAME}-doc" when: on_success @@ -632,7 +633,7 @@ build-rustdoc: - mv ./target/doc ./crate-docs # FIXME: remove me after CI image gets nonroot - chown -R nonroot:nonroot ./crate-docs - - echo "" > ./crate-docs/index.html + - echo "" > ./crate-docs/index.html - sccache -s #### stage: publish @@ -728,42 +729,73 @@ publish-rustdoc: stage: publish <<: *kubernetes-env <<: *vault-secrets - image: paritytech/tools:latest + image: node:16 variables: GIT_DEPTH: 100 + # --- Following variables are for rustdocs deployment --- + # Space separated values of branches and tags to generate rustdocs + RUSTDOCS_DEPLOY_REFS: "master monthly-2021-09+1 monthly-2021-08 v3.0.0" + # Location of the docs index template + INDEX_TPL: ".maintain/docs-index-tpl.ejs" + # Where the `/latest` symbolic link links to. One of the $RUSTDOCS_DEPLOY_REFS value. + LATEST: "monthly-2021-09+1" rules: - if: $CI_PIPELINE_SOURCE == "pipeline" when: never - if: $CI_PIPELINE_SOURCE == "web" && $CI_COMMIT_REF_NAME == "master" - if: $CI_COMMIT_REF_NAME == "master" + - if: $CI_COMMIT_REF_NAME =~ /^monthly-20[0-9]{2}-[0-9]{2}.*$/ # to support: monthly-2021-09+1 + - if: $CI_COMMIT_REF_NAME =~ /^v[0-9]+\.[0-9]+.*$/ # i.e. v1.0, v2.1rc1 # `needs:` can be removed after CI image gets nonroot. In this case `needs:` stops other # artifacts from being dowloaded by this job. needs: - job: build-rustdoc artifacts: true script: + # If $CI_COMMIT_REF_NAME doesn't match one of $RUSTDOCS_DEPLOY_REFS space-separated values, we + # exit immediately. + # Putting spaces at the front and back to ensure we are not matching just any substring, but the + # whole space-separated value. + - '[[ " ${RUSTDOCS_DEPLOY_REFS} " =~ " ${CI_COMMIT_REF_NAME} " ]] || exit 0' - rm -rf /tmp/* # Set git config - rm -rf .git/config - git config user.email "devops-team@parity.io" - git config user.name "${GITHUB_USER}" - - git config remote.origin.url "https://${GITHUB_TOKEN}@github.com/paritytech/substrate.git" + - git config remote.origin.url "https://${GITHUB_TOKEN}@github.com/paritytech/${CI_PROJECT_NAME}.git" - git config remote.origin.fetch "+refs/heads/*:refs/remotes/origin/*" - git fetch origin gh-pages + # Install `ejs` and generate index.html based on RUSTDOCS_DEPLOY_REFS + - yarn global add ejs + - 'ejs ${INDEX_TPL} -i "{\"deploy_refs\":\"${RUSTDOCS_DEPLOY_REFS}\",\"repo_name\":\"${CI_PROJECT_NAME}\",\"latest\":\"${LATEST}\"}" > /tmp/index.html' # Save README and docs - cp -r ./crate-docs/ /tmp/doc/ - cp README.md /tmp/doc/ - git checkout gh-pages - # Remove everything and restore generated docs and README - - rm -rf ./* - - mv /tmp/doc/* . + # Remove directories no longer necessary, as specified in $RUSTDOCS_DEPLOY_REFS. + # Also ensure $RUSTDOCS_DEPLOY_REFS is non-space + - if [[ ! -z ${RUSTDOCS_DEPLOY_REFS// } ]]; then + for FILE in *; do + if [[ ! " $RUSTDOCS_DEPLOY_REFS " =~ " $FILE " ]]; then + echo "Removing ${FILE}..." + rm -rf $FILE + fi + done + fi + # Move the index page & built back + - mv -f /tmp/index.html . + # Ensure the destination dir doesn't exist. + - rm -rf ${CI_COMMIT_REF_NAME} + - mv -f /tmp/doc ${CI_COMMIT_REF_NAME} + # Add the symlink + - '[[ -e "$LATEST" ]] && ln -sf "${LATEST}" latest' # Upload files - git add --all --force # `git commit` has an exit code of > 0 if there is nothing to commit. # This causes GitLab to exit immediately and marks this job failed. # We don't want to mark the entire job failed if there's nothing to # publish though, hence the `|| true`. - - git commit -m "Updated docs for ${CI_COMMIT_REF_NAME}" || + - git commit -m "___Updated docs for ${CI_COMMIT_REF_NAME}___" || echo "___Nothing to commit___" - git push origin gh-pages --force after_script: diff --git a/.maintain/docs-index-tpl.ejs b/.maintain/docs-index-tpl.ejs new file mode 100644 index 0000000000000..81c619a926b2b --- /dev/null +++ b/.maintain/docs-index-tpl.ejs @@ -0,0 +1,55 @@ +<% + const capFirst = s => (s && s[0].toUpperCase() + s.slice(1)) || ""; +%> + + + + + + + + <%= capFirst(repo_name) %> Rustdocs + + + + + + +

+
+

<%= capFirst(repo_name) %> Rustdocs

+
+ +
+
+
+ + From e30db04a7ec3b259e66d7b0334e42e538ed69b96 Mon Sep 17 00:00:00 2001 From: Georges Date: Mon, 20 Sep 2021 11:56:43 +0100 Subject: [PATCH 13/33] Generate storage info for pallet im_online (#9654) * Integrating WrapperOpaque from PR #9738 * Adding storage_info to pallet im-online Changing some `Vec` to `WeakBoundedVec` Adding the following bounds: * `MaxKeys * `MaxPeerInHeartbeats` * `MaxPeerDataEncodingSize` to limit the size of `WeakBoundedVec` * Fix syntax * Need to clone keys * Changes in formatting --- bin/node/runtime/src/lib.rs | 6 ++ frame/im-online/src/benchmarking.rs | 6 +- frame/im-online/src/lib.rs | 133 +++++++++++++++++++++--- frame/im-online/src/mock.rs | 6 ++ frame/offences/benchmarking/src/mock.rs | 6 ++ frame/session/src/lib.rs | 4 +- frame/support/src/traits.rs | 2 +- frame/support/src/traits/misc.rs | 49 ++++++++- frame/support/src/traits/validation.rs | 4 +- primitives/runtime/src/testing.rs | 3 +- 10 files changed, 196 insertions(+), 23 deletions(-) diff --git a/bin/node/runtime/src/lib.rs b/bin/node/runtime/src/lib.rs index 9e8adfcd0910e..28f8e5dc3fd6a 100644 --- a/bin/node/runtime/src/lib.rs +++ b/bin/node/runtime/src/lib.rs @@ -932,6 +932,9 @@ parameter_types! { /// We prioritize im-online heartbeats over election solution submission. pub const StakingUnsignedPriority: TransactionPriority = TransactionPriority::max_value() / 2; pub const MaxAuthorities: u32 = 100; + pub const MaxKeys: u32 = 10_000; + pub const MaxPeerInHeartbeats: u32 = 10_000; + pub const MaxPeerDataEncodingSize: u32 = 1_000; } impl frame_system::offchain::CreateSignedTransaction for Runtime @@ -996,6 +999,9 @@ impl pallet_im_online::Config for Runtime { type ReportUnresponsiveness = Offences; type UnsignedPriority = ImOnlineUnsignedPriority; type WeightInfo = pallet_im_online::weights::SubstrateWeight; + type MaxKeys = MaxKeys; + type MaxPeerInHeartbeats = MaxPeerInHeartbeats; + type MaxPeerDataEncodingSize = MaxPeerDataEncodingSize; } impl pallet_offences::Config for Runtime { diff --git a/frame/im-online/src/benchmarking.rs b/frame/im-online/src/benchmarking.rs index 1043a97f67def..20812f03d28dd 100644 --- a/frame/im-online/src/benchmarking.rs +++ b/frame/im-online/src/benchmarking.rs @@ -22,7 +22,7 @@ use super::*; use frame_benchmarking::{benchmarks, impl_benchmark_test_suite}; -use frame_support::traits::UnfilteredDispatchable; +use frame_support::{traits::UnfilteredDispatchable, WeakBoundedVec}; use frame_system::RawOrigin; use sp_core::{offchain::OpaqueMultiaddr, OpaquePeerId}; use sp_runtime::{ @@ -46,7 +46,9 @@ pub fn create_heartbeat( for _ in 0..k { keys.push(T::AuthorityId::generate_pair(None)); } - Keys::::put(keys.clone()); + let bounded_keys = WeakBoundedVec::<_, T::MaxKeys>::try_from(keys.clone()) + .map_err(|()| "More than the maximum number of keys provided")?; + Keys::::put(bounded_keys); let network_state = OpaqueNetworkState { peer_id: OpaquePeerId::default(), diff --git a/frame/im-online/src/lib.rs b/frame/im-online/src/lib.rs index ab4f7001574e5..2fcaed1820ff9 100644 --- a/frame/im-online/src/lib.rs +++ b/frame/im-online/src/lib.rs @@ -74,9 +74,14 @@ mod mock; mod tests; pub mod weights; -use codec::{Decode, Encode}; -use frame_support::traits::{ - EstimateNextSessionRotation, OneSessionHandler, ValidatorSet, ValidatorSetWithIdentification, +use codec::{Decode, Encode, MaxEncodedLen}; +use core::convert::TryFrom; +use frame_support::{ + traits::{ + EstimateNextSessionRotation, Get, OneSessionHandler, ValidatorSet, + ValidatorSetWithIdentification, WrapperOpaque, + }, + BoundedSlice, WeakBoundedVec, }; use frame_system::offchain::{SendTransactionTypes, SubmitTransaction}; pub use pallet::*; @@ -220,6 +225,65 @@ where pub validators_len: u32, } +/// A type that is the same as [`OpaqueNetworkState`] but with [`Vec`] replaced with +/// [`WeakBoundedVec`] where Limit is the respective size limit +/// `PeerIdEncodingLimit` represents the size limit of the encoding of `PeerId` +/// `MultiAddrEncodingLimit` represents the size limit of the encoding of `MultiAddr` +/// `AddressesLimit` represents the size limit of the vector of peers connected +#[derive(Clone, Eq, PartialEq, Encode, Decode, MaxEncodedLen, TypeInfo)] +#[codec(mel_bound(PeerIdEncodingLimit: Get, + MultiAddrEncodingLimit: Get, AddressesLimit: Get))] +#[scale_info(skip_type_params(PeerIdEncodingLimit, MultiAddrEncodingLimit, AddressesLimit))] +pub struct BoundedOpaqueNetworkState +where + PeerIdEncodingLimit: Get, + MultiAddrEncodingLimit: Get, + AddressesLimit: Get, +{ + /// PeerId of the local node in SCALE encoded. + pub peer_id: WeakBoundedVec, + /// List of addresses the node knows it can be reached as. + pub external_addresses: + WeakBoundedVec, AddressesLimit>, +} + +impl, MultiAddrEncodingLimit: Get, AddressesLimit: Get> + BoundedOpaqueNetworkState +{ + fn force_from(ons: &OpaqueNetworkState) -> Self { + let peer_id = WeakBoundedVec::<_, PeerIdEncodingLimit>::force_from( + ons.peer_id.0.clone(), + Some( + "Warning: The size of the encoding of PeerId \ + is bigger than expected. A runtime configuration \ + adjustment may be needed.", + ), + ); + + let external_addresses = WeakBoundedVec::<_, AddressesLimit>::force_from( + ons.external_addresses + .iter() + .map(|x| { + WeakBoundedVec::<_, MultiAddrEncodingLimit>::force_from( + x.0.clone(), + Some( + "Warning: The size of the encoding of MultiAddr \ + is bigger than expected. A runtime configuration \ + adjustment may be needed.", + ), + ) + }) + .collect(), + Some( + "Warning: The network has more peers than expected \ + A runtime configuration adjustment may be needed.", + ), + ); + + Self { peer_id, external_addresses } + } +} + /// A type for representing the validator id in a session. pub type ValidatorId = <::ValidatorSet as ValidatorSet< ::AccountId, @@ -251,6 +315,7 @@ pub mod pallet { #[pallet::pallet] #[pallet::generate_store(pub(super) trait Store)] + #[pallet::generate_storage_info] pub struct Pallet(_); #[pallet::config] @@ -261,7 +326,18 @@ pub mod pallet { + RuntimeAppPublic + Default + Ord - + MaybeSerializeDeserialize; + + MaybeSerializeDeserialize + + MaxEncodedLen; + + /// The maximum number of keys that can be added. + type MaxKeys: Get; + + /// The maximum number of peers to be stored in `ReceivedHeartbeats` + type MaxPeerInHeartbeats: Get; + + /// The maximum size of the encoding of `PeerId` and `MultiAddr` that are coming + /// from the hearbeat + type MaxPeerDataEncodingSize: Get; /// The overarching event type. type Event: From> + IsType<::Event>; @@ -333,14 +409,27 @@ pub mod pallet { /// The current set of keys that may issue a heartbeat. #[pallet::storage] #[pallet::getter(fn keys)] - pub(crate) type Keys = StorageValue<_, Vec, ValueQuery>; + pub(crate) type Keys = + StorageValue<_, WeakBoundedVec, ValueQuery>; - /// For each session index, we keep a mapping of `AuthIndex` to - /// `offchain::OpaqueNetworkState`. + /// For each session index, we keep a mapping of 'SessionIndex` and `AuthIndex` to + /// `WrapperOpaque`. #[pallet::storage] #[pallet::getter(fn received_heartbeats)] - pub(crate) type ReceivedHeartbeats = - StorageDoubleMap<_, Twox64Concat, SessionIndex, Twox64Concat, AuthIndex, Vec>; + pub(crate) type ReceivedHeartbeats = StorageDoubleMap< + _, + Twox64Concat, + SessionIndex, + Twox64Concat, + AuthIndex, + WrapperOpaque< + BoundedOpaqueNetworkState< + T::MaxPeerDataEncodingSize, + T::MaxPeerDataEncodingSize, + T::MaxPeerInHeartbeats, + >, + >, + >; /// For each session index, we keep a mapping of `ValidatorId` to the /// number of blocks authored by the given authority. @@ -409,11 +498,15 @@ pub mod pallet { if let (false, Some(public)) = (exists, public) { Self::deposit_event(Event::::HeartbeatReceived(public.clone())); - let network_state = heartbeat.network_state.encode(); + let network_state_bounded = BoundedOpaqueNetworkState::< + T::MaxPeerDataEncodingSize, + T::MaxPeerDataEncodingSize, + T::MaxPeerInHeartbeats, + >::force_from(&heartbeat.network_state); ReceivedHeartbeats::::insert( ¤t_session, &heartbeat.authority_index, - &network_state, + WrapperOpaque::from(network_state_bounded), ); Ok(()) @@ -739,13 +832,17 @@ impl Pallet { fn initialize_keys(keys: &[T::AuthorityId]) { if !keys.is_empty() { assert!(Keys::::get().is_empty(), "Keys are already initialized!"); - Keys::::put(keys); + let bounded_keys = >::try_from(keys) + .expect("More than the maximum number of keys provided"); + Keys::::put(bounded_keys); } } #[cfg(test)] fn set_keys(keys: Vec) { - Keys::::put(&keys) + let bounded_keys = WeakBoundedVec::<_, T::MaxKeys>::try_from(keys) + .expect("More than the maximum number of keys provided"); + Keys::::put(bounded_keys); } } @@ -776,7 +873,15 @@ impl OneSessionHandler for Pallet { >::put(block_number + half_session); // Remember who the authorities are for the new session. - Keys::::put(validators.map(|x| x.1).collect::>()); + let keys = validators.map(|x| x.1).collect::>(); + let bounded_keys = WeakBoundedVec::<_, T::MaxKeys>::force_from( + keys, + Some( + "Warning: The session has more keys than expected. \ + A runtime configuration adjustment may be needed.", + ), + ); + Keys::::put(bounded_keys); } fn on_before_session_ending() { diff --git a/frame/im-online/src/mock.rs b/frame/im-online/src/mock.rs index e4031b04271b9..92d1fe8e3f8b9 100644 --- a/frame/im-online/src/mock.rs +++ b/frame/im-online/src/mock.rs @@ -217,6 +217,9 @@ impl frame_support::traits::EstimateNextSessionRotation for TestNextSession parameter_types! { pub const UnsignedPriority: u64 = 1 << 20; + pub const MaxKeys: u32 = 10_000; + pub const MaxPeerInHeartbeats: u32 = 10_000; + pub const MaxPeerDataEncodingSize: u32 = 1_000; } impl Config for Runtime { @@ -227,6 +230,9 @@ impl Config for Runtime { type ReportUnresponsiveness = OffenceHandler; type UnsignedPriority = UnsignedPriority; type WeightInfo = (); + type MaxKeys = MaxKeys; + type MaxPeerInHeartbeats = MaxPeerInHeartbeats; + type MaxPeerDataEncodingSize = MaxPeerDataEncodingSize; } impl frame_system::offchain::SendTransactionTypes for Runtime diff --git a/frame/offences/benchmarking/src/mock.rs b/frame/offences/benchmarking/src/mock.rs index 82662295dea84..6973e25371d4f 100644 --- a/frame/offences/benchmarking/src/mock.rs +++ b/frame/offences/benchmarking/src/mock.rs @@ -146,6 +146,9 @@ pallet_staking_reward_curve::build! { parameter_types! { pub const RewardCurve: &'static sp_runtime::curve::PiecewiseLinear<'static> = &I_NPOS; pub const MaxNominatorRewardedPerValidator: u32 = 64; + pub const MaxKeys: u32 = 10_000; + pub const MaxPeerInHeartbeats: u32 = 10_000; + pub const MaxPeerDataEncodingSize: u32 = 1_000; } pub type Extrinsic = sp_runtime::testing::TestXt; @@ -186,6 +189,9 @@ impl pallet_im_online::Config for Test { type ReportUnresponsiveness = Offences; type UnsignedPriority = (); type WeightInfo = (); + type MaxKeys = MaxKeys; + type MaxPeerInHeartbeats = MaxPeerInHeartbeats; + type MaxPeerDataEncodingSize = MaxPeerDataEncodingSize; } impl pallet_offences::Config for Test { diff --git a/frame/session/src/lib.rs b/frame/session/src/lib.rs index 3f5d853d4fa21..e57decec8c651 100644 --- a/frame/session/src/lib.rs +++ b/frame/session/src/lib.rs @@ -114,7 +114,7 @@ mod mock; mod tests; pub mod weights; -use codec::Decode; +use codec::{Decode, MaxEncodedLen}; use frame_support::{ decl_error, decl_event, decl_module, decl_storage, dispatch::{self, DispatchError, DispatchResult}, @@ -367,7 +367,7 @@ pub trait Config: frame_system::Config { type Event: From + Into<::Event>; /// A stable ID for a validator. - type ValidatorId: Member + Parameter; + type ValidatorId: Member + Parameter + MaxEncodedLen; /// A conversion from account ID to validator ID. /// diff --git a/frame/support/src/traits.rs b/frame/support/src/traits.rs index efb5559ed0622..d5d0decd117eb 100644 --- a/frame/support/src/traits.rs +++ b/frame/support/src/traits.rs @@ -52,7 +52,7 @@ mod misc; pub use misc::{ Backing, ConstU32, EnsureInherentsAreFirst, EstimateCallFee, ExecuteBlock, ExtrinsicCall, Get, GetBacking, GetDefault, HandleLifetime, IsSubType, IsType, Len, OffchainWorker, - OnKilledAccount, OnNewAccount, SameOrOther, Time, TryDrop, UnixTime, + OnKilledAccount, OnNewAccount, SameOrOther, Time, TryDrop, UnixTime, WrapperOpaque, }; mod stored_map; diff --git a/frame/support/src/traits/misc.rs b/frame/support/src/traits/misc.rs index 1776e1ba320ea..75f2f8ac3fef1 100644 --- a/frame/support/src/traits/misc.rs +++ b/frame/support/src/traits/misc.rs @@ -17,8 +17,10 @@ //! Smaller traits used in FRAME which don't need their own file. -use crate::dispatch::Parameter; +use crate::{dispatch::Parameter, TypeInfo}; +use codec::{Decode, Encode, EncodeLike, Input, MaxEncodedLen}; use sp_runtime::{traits::Block as BlockT, DispatchError}; +use sp_std::vec::Vec; /// Anything that can have a `::len()` method. pub trait Len { @@ -377,3 +379,48 @@ impl, const T: u32> EstimateCallFee for T.into() } } + +/// A wrapper for any type `T` which implement encode/decode in a way compatible with `Vec`. +/// +/// The encoding is the encoding of `T` prepended with the compact encoding of its size in bytes. +/// Thus the encoded value can be decoded as a `Vec`. +#[derive(Debug, Eq, PartialEq, Default, Clone, MaxEncodedLen, TypeInfo)] +#[cfg_attr(feature = "std", derive(serde::Serialize, serde::Deserialize))] +pub struct WrapperOpaque(pub T); + +impl EncodeLike for WrapperOpaque {} + +impl Encode for WrapperOpaque { + fn size_hint(&self) -> usize { + // Compact usually takes at most 4 bytes + self.0.size_hint().saturating_add(4) + } + + fn encode_to(&self, dest: &mut O) { + self.0.encode().encode_to(dest); + } + + fn encode(&self) -> Vec { + self.0.encode().encode() + } + + fn using_encoded R>(&self, f: F) -> R { + self.0.encode().using_encoded(f) + } +} + +impl Decode for WrapperOpaque { + fn decode(input: &mut I) -> Result { + Ok(Self(T::decode(&mut &>::decode(input)?[..])?)) + } + + fn skip(input: &mut I) -> Result<(), codec::Error> { + >::skip(input) + } +} + +impl From for WrapperOpaque { + fn from(t: T) -> Self { + Self(t) + } +} diff --git a/frame/support/src/traits/validation.rs b/frame/support/src/traits/validation.rs index f4107ef6e2b02..11ea5a79f67ba 100644 --- a/frame/support/src/traits/validation.rs +++ b/frame/support/src/traits/validation.rs @@ -18,7 +18,7 @@ //! Traits for dealing with validation and validators. use crate::{dispatch::Parameter, weights::Weight}; -use codec::{Codec, Decode}; +use codec::{Codec, Decode, MaxEncodedLen}; use sp_runtime::{ traits::{Convert, Zero}, BoundToRuntimeAppPublic, ConsensusEngineId, Permill, RuntimeAppPublic, @@ -31,7 +31,7 @@ use sp_std::prelude::*; /// Something that can give information about the current validator set. pub trait ValidatorSet { /// Type for representing validator id in a session. - type ValidatorId: Parameter; + type ValidatorId: Parameter + MaxEncodedLen; /// A type for converting `AccountId` to `ValidatorId`. type ValidatorIdOf: Convert>; diff --git a/primitives/runtime/src/testing.rs b/primitives/runtime/src/testing.rs index 781f342d43c1e..fe9ba588adb87 100644 --- a/primitives/runtime/src/testing.rs +++ b/primitives/runtime/src/testing.rs @@ -18,7 +18,7 @@ //! Testing utilities. use crate::{ - codec::{Codec, Decode, Encode}, + codec::{Codec, Decode, Encode, MaxEncodedLen}, generic, scale_info::TypeInfo, traits::{ @@ -59,6 +59,7 @@ use std::{ Deserialize, PartialOrd, Ord, + MaxEncodedLen, TypeInfo, )] pub struct UintAuthorityId(pub u64); From 50c84cb8f92b6446b4a7b3043684b116aaea6866 Mon Sep 17 00:00:00 2001 From: Guillaume Thiolliere Date: Mon, 20 Sep 2021 16:49:04 +0200 Subject: [PATCH 14/33] add feature and fix ci (#9800) --- .gitlab-ci.yml | 2 +- frame/support/test/Cargo.toml | 2 ++ frame/support/test/tests/construct_runtime_ui.rs | 1 + frame/support/test/tests/decl_module_ui.rs | 1 + frame/support/test/tests/decl_storage_ui.rs | 1 + frame/support/test/tests/derive_no_bound_ui.rs | 1 + frame/support/test/tests/pallet_ui.rs | 1 + 7 files changed, 8 insertions(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 70a8fd9bbbe93..fd7bfe7155918 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -516,7 +516,7 @@ test-wasmer-sandbox: variables: <<: *default-vars script: - - time cargo test --release --features runtime-benchmarks,wasmer-sandbox + - time cargo test --release --features runtime-benchmarks,wasmer-sandbox,disable-ui-tests - sccache -s cargo-check-macos: diff --git a/frame/support/test/Cargo.toml b/frame/support/test/Cargo.toml index e12880871e5c2..863afceac4a98 100644 --- a/frame/support/test/Cargo.toml +++ b/frame/support/test/Cargo.toml @@ -48,3 +48,5 @@ try-runtime = ["frame-support/try-runtime"] # WARNING: CI only execute pallet test with this feature, # if the feature intended to be used outside, CI and this message need to be updated. conditional-storage = [] +# Disable ui tests +disable-ui-tests = [] diff --git a/frame/support/test/tests/construct_runtime_ui.rs b/frame/support/test/tests/construct_runtime_ui.rs index a55e800628582..ee475e37605ef 100644 --- a/frame/support/test/tests/construct_runtime_ui.rs +++ b/frame/support/test/tests/construct_runtime_ui.rs @@ -18,6 +18,7 @@ use std::env; #[rustversion::attr(not(stable), ignore)] +#[cfg(not(feature = "disable-ui-tests"))] #[test] fn ui() { // As trybuild is using `cargo check`, we don't need the real WASM binaries. diff --git a/frame/support/test/tests/decl_module_ui.rs b/frame/support/test/tests/decl_module_ui.rs index 2c097bb6e1332..e84025b9f2564 100644 --- a/frame/support/test/tests/decl_module_ui.rs +++ b/frame/support/test/tests/decl_module_ui.rs @@ -16,6 +16,7 @@ // limitations under the License. #[rustversion::attr(not(stable), ignore)] +#[cfg(not(feature = "disable-ui-tests"))] #[test] fn decl_module_ui() { // As trybuild is using `cargo check`, we don't need the real WASM binaries. diff --git a/frame/support/test/tests/decl_storage_ui.rs b/frame/support/test/tests/decl_storage_ui.rs index 99d2da87aca28..400ddfc0f94f4 100644 --- a/frame/support/test/tests/decl_storage_ui.rs +++ b/frame/support/test/tests/decl_storage_ui.rs @@ -16,6 +16,7 @@ // limitations under the License. #[rustversion::attr(not(stable), ignore)] +#[cfg(not(feature = "disable-ui-tests"))] #[test] fn decl_storage_ui() { // As trybuild is using `cargo check`, we don't need the real WASM binaries. diff --git a/frame/support/test/tests/derive_no_bound_ui.rs b/frame/support/test/tests/derive_no_bound_ui.rs index 434671e19b105..22c116931a47e 100644 --- a/frame/support/test/tests/derive_no_bound_ui.rs +++ b/frame/support/test/tests/derive_no_bound_ui.rs @@ -16,6 +16,7 @@ // limitations under the License. #[rustversion::attr(not(stable), ignore)] +#[cfg(not(feature = "disable-ui-tests"))] #[test] fn derive_no_bound_ui() { // As trybuild is using `cargo check`, we don't need the real WASM binaries. diff --git a/frame/support/test/tests/pallet_ui.rs b/frame/support/test/tests/pallet_ui.rs index e5f4a54dfb000..6f56c1efd6d73 100644 --- a/frame/support/test/tests/pallet_ui.rs +++ b/frame/support/test/tests/pallet_ui.rs @@ -16,6 +16,7 @@ // limitations under the License. #[rustversion::attr(not(stable), ignore)] +#[cfg(not(feature = "disable-ui-tests"))] #[test] fn pallet_ui() { // As trybuild is using `cargo check`, we don't need the real WASM binaries. From 0997854ccfd977783591f40f93cd0294e51c3631 Mon Sep 17 00:00:00 2001 From: Roman Date: Tue, 21 Sep 2021 14:32:13 +0300 Subject: [PATCH 15/33] Don't answer peers with a low reputation (#9008) * Init architecture for not answering peers with a low reputation * Get reputation inside of RequestResponsesBehaviour::poll * Filter reputation in RequestResponsesBehaviour * Pass PeersetHandle to RequestResponsesBehaviour * Add more docs * Fix tests compilation * Fix compiler warnings (still FIXME) * Fix tests * Fmt code --- client/network/src/behaviour.rs | 3 + client/network/src/request_responses.rs | 215 +++++++++++++++++++----- client/network/src/service.rs | 1 + client/peerset/src/lib.rs | 22 ++- 4 files changed, 193 insertions(+), 48 deletions(-) diff --git a/client/network/src/behaviour.rs b/client/network/src/behaviour.rs index 08d061ee26b23..7b334175a2805 100644 --- a/client/network/src/behaviour.rs +++ b/client/network/src/behaviour.rs @@ -38,6 +38,7 @@ use libp2p::{ use log::debug; use prost::Message; use sc_consensus::import_queue::{IncomingBlock, Origin}; +use sc_peerset::PeersetHandle; use sp_consensus::BlockOrigin; use sp_runtime::{ traits::{Block as BlockT, NumberFor}, @@ -206,6 +207,7 @@ impl Behaviour { light_client_request_protocol_config: request_responses::ProtocolConfig, // All remaining request protocol configs. mut request_response_protocols: Vec, + peerset: PeersetHandle, ) -> Result { // Extract protocol name and add to `request_response_protocols`. let block_request_protocol_name = block_request_protocol_config.name.to_string(); @@ -229,6 +231,7 @@ impl Behaviour { bitswap: bitswap.into(), request_responses: request_responses::RequestResponsesBehaviour::new( request_response_protocols.into_iter(), + peerset, )?, light_client_request_sender, events: VecDeque::new(), diff --git a/client/network/src/request_responses.rs b/client/network/src/request_responses.rs index 6ebc7416c2a35..0908d7510e359 100644 --- a/client/network/src/request_responses.rs +++ b/client/network/src/request_responses.rs @@ -64,6 +64,7 @@ use std::{ }; pub use libp2p::request_response::{InboundFailure, OutboundFailure, RequestId}; +use sc_peerset::{PeersetHandle, BANNED_THRESHOLD}; /// Configuration for a single request-response protocol. #[derive(Debug, Clone)] @@ -256,6 +257,27 @@ pub struct RequestResponsesBehaviour { /// Whenever a response is received on `pending_responses`, insert a channel to be notified /// when the request has been sent out. send_feedback: HashMap>, + + /// Primarily used to get a reputation of a node. + peerset: PeersetHandle, + + /// Pending message request, holds `MessageRequest` as a Future state to poll it + /// until we get a response from `Peerset` + message_request: Option, +} + +// This is a state of processing incoming request Message. +// The main reason of this struct is to hold `get_peer_reputation` as a Future state. +struct MessageRequest { + peer: PeerId, + request_id: RequestId, + request: Vec, + channel: ResponseChannel, ()>>, + protocol: String, + resp_builder: Option>, + // Once we get incoming request we save all params, create an async call to Peerset + // to get the reputation of the peer. + get_peer_reputation: Pin> + Send>>, } /// Generated by the response builder and waiting to be processed. @@ -270,7 +292,10 @@ struct RequestProcessingOutcome { impl RequestResponsesBehaviour { /// Creates a new behaviour. Must be passed a list of supported protocols. Returns an error if /// the same protocol is passed twice. - pub fn new(list: impl Iterator) -> Result { + pub fn new( + list: impl Iterator, + peerset: PeersetHandle, + ) -> Result { let mut protocols = HashMap::new(); for protocol in list { let mut cfg = RequestResponseConfig::default(); @@ -304,6 +329,8 @@ impl RequestResponsesBehaviour { pending_responses: Default::default(), pending_responses_arrival_time: Default::default(), send_feedback: Default::default(), + peerset, + message_request: None, }) } @@ -492,6 +519,93 @@ impl NetworkBehaviour for RequestResponsesBehaviour { >, > { 'poll_all: loop { + if let Some(message_request) = self.message_request.take() { + // Now we can can poll `MessageRequest` until we get the reputation + + let MessageRequest { + peer, + request_id, + request, + channel, + protocol, + resp_builder, + mut get_peer_reputation, + } = message_request; + + let reputation = Future::poll(Pin::new(&mut get_peer_reputation), cx); + match reputation { + Poll::Pending => { + // Save the state to poll it again next time. + + self.message_request = Some(MessageRequest { + peer, + request_id, + request, + channel, + protocol, + resp_builder, + get_peer_reputation, + }); + return Poll::Pending + }, + Poll::Ready(reputation) => { + // Once we get the reputation we can continue processing the request. + + let reputation = reputation.expect( + "The channel can only be closed if the peerset no longer exists; qed", + ); + + if reputation < BANNED_THRESHOLD { + log::debug!( + target: "sub-libp2p", + "Cannot handle requests from a node with a low reputation {}: {}", + peer, + reputation, + ); + continue 'poll_all + } + + let (tx, rx) = oneshot::channel(); + + // Submit the request to the "response builder" passed by the user at + // initialization. + if let Some(mut resp_builder) = resp_builder { + // If the response builder is too busy, silently drop `tx`. This + // will be reported by the corresponding `RequestResponse` through + // an `InboundFailure::Omission` event. + let _ = resp_builder.try_send(IncomingRequest { + peer: peer.clone(), + payload: request, + pending_response: tx, + }); + } else { + debug_assert!(false, "Received message on outbound-only protocol."); + } + + let protocol = Cow::from(protocol); + self.pending_responses.push(Box::pin(async move { + // The `tx` created above can be dropped if we are not capable of + // processing this request, which is reflected as a + // `InboundFailure::Omission` event. + if let Ok(response) = rx.await { + Some(RequestProcessingOutcome { + peer, + request_id, + protocol, + inner_channel: channel, + response, + }) + } else { + None + } + })); + + // This `continue` makes sure that `pending_responses` gets polled + // after we have added the new element. + continue 'poll_all + }, + } + } // Poll to see if any response is ready to be sent back. while let Poll::Ready(Some(outcome)) = self.pending_responses.poll_next_unpin(cx) { let RequestProcessingOutcome { @@ -585,42 +699,24 @@ impl NetworkBehaviour for RequestResponsesBehaviour { Instant::now(), ); - let (tx, rx) = oneshot::channel(); - - // Submit the request to the "response builder" passed by the user at - // initialization. - if let Some(resp_builder) = resp_builder { - // If the response builder is too busy, silently drop `tx`. This - // will be reported by the corresponding `RequestResponse` through - // an `InboundFailure::Omission` event. - let _ = resp_builder.try_send(IncomingRequest { - peer: peer.clone(), - payload: request, - pending_response: tx, - }); - } else { - debug_assert!(false, "Received message on outbound-only protocol."); - } + let get_peer_reputation = + self.peerset.clone().peer_reputation(peer.clone()); + let get_peer_reputation = Box::pin(get_peer_reputation); - let protocol = protocol.clone(); - self.pending_responses.push(Box::pin(async move { - // The `tx` created above can be dropped if we are not capable of - // processing this request, which is reflected as a - // `InboundFailure::Omission` event. - if let Ok(response) = rx.await { - Some(RequestProcessingOutcome { - peer, - request_id, - protocol, - inner_channel: channel, - response, - }) - } else { - None - } - })); - - // This `continue` makes sure that `pending_responses` gets polled + // Save the Future-like state with params to poll `get_peer_reputation` + // and to continue processing the request once we get the reputation of + // the peer. + self.message_request = Some(MessageRequest { + peer, + request_id, + request, + channel, + protocol: protocol.to_string(), + resp_builder: resp_builder.clone(), + get_peer_reputation, + }); + + // This `continue` makes sure that `message_request` gets polled // after we have added the new element. continue 'poll_all }, @@ -934,11 +1030,12 @@ mod tests { swarm::{Swarm, SwarmEvent}, Multiaddr, }; + use sc_peerset::{Peerset, PeersetConfig, SetConfig}; use std::{iter, time::Duration}; fn build_swarm( list: impl Iterator, - ) -> (Swarm, Multiaddr) { + ) -> (Swarm, Multiaddr, Peerset) { let keypair = Keypair::generate_ed25519(); let noise_keys = @@ -950,13 +1047,29 @@ mod tests { .multiplex(libp2p::yamux::YamuxConfig::default()) .boxed(); - let behaviour = RequestResponsesBehaviour::new(list).unwrap(); + let config = PeersetConfig { + sets: vec![SetConfig { + in_peers: u32::max_value(), + out_peers: u32::max_value(), + bootnodes: vec![], + reserved_nodes: Default::default(), + reserved_only: false, + }], + }; + + let (peerset, handle) = Peerset::from_config(config); + + let behaviour = RequestResponsesBehaviour::new(list, handle).unwrap(); let mut swarm = Swarm::new(transport, behaviour, keypair.public().into_peer_id()); let listen_addr: Multiaddr = format!("/memory/{}", rand::random::()).parse().unwrap(); swarm.listen_on(listen_addr.clone()).unwrap(); - (swarm, listen_addr) + (swarm, listen_addr, peerset) + } + + async fn loop_peerset(peerset: Peerset) { + let _: Vec<_> = peerset.collect().await; } #[test] @@ -1007,10 +1120,12 @@ mod tests { Swarm::dial_addr(&mut swarms[0].0, dial_addr).unwrap(); } + let (mut swarm, _, peerset) = swarms.remove(0); + // Process every peerset event in the background. + pool.spawner().spawn_obj(loop_peerset(peerset).boxed().into()).unwrap(); // Running `swarm[0]` in the background. pool.spawner() .spawn_obj({ - let (mut swarm, _) = swarms.remove(0); async move { loop { match swarm.select_next_some().await { @@ -1027,7 +1142,9 @@ mod tests { .unwrap(); // Remove and run the remaining swarm. - let (mut swarm, _) = swarms.remove(0); + let (mut swarm, _, peerset) = swarms.remove(0); + // Process every peerset event in the background. + pool.spawner().spawn_obj(loop_peerset(peerset).boxed().into()).unwrap(); pool.run_until(async move { let mut response_receiver = None; @@ -1105,9 +1222,11 @@ mod tests { // Running `swarm[0]` in the background until a `InboundRequest` event happens, // which is a hint about the test having ended. + let (mut swarm, _, peerset) = swarms.remove(0); + // Process every peerset event in the background. + pool.spawner().spawn_obj(loop_peerset(peerset).boxed().into()).unwrap(); pool.spawner() .spawn_obj({ - let (mut swarm, _) = swarms.remove(0); async move { loop { match swarm.select_next_some().await { @@ -1125,7 +1244,9 @@ mod tests { .unwrap(); // Remove and run the remaining swarm. - let (mut swarm, _) = swarms.remove(0); + let (mut swarm, _, peerset) = swarms.remove(0); + // Process every peerset event in the background. + pool.spawner().spawn_obj(loop_peerset(peerset).boxed().into()).unwrap(); pool.run_until(async move { let mut response_receiver = None; @@ -1195,7 +1316,7 @@ mod tests { build_swarm(protocol_configs.into_iter()).0 }; - let (mut swarm_2, mut swarm_2_handler_1, mut swarm_2_handler_2, listen_add_2) = { + let (mut swarm_2, mut swarm_2_handler_1, mut swarm_2_handler_2, listen_add_2, peerset) = { let (tx_1, rx_1) = mpsc::channel(64); let (tx_2, rx_2) = mpsc::channel(64); @@ -1216,10 +1337,12 @@ mod tests { }, ]; - let (swarm, listen_addr) = build_swarm(protocol_configs.into_iter()); + let (swarm, listen_addr, peerset) = build_swarm(protocol_configs.into_iter()); - (swarm, rx_1, rx_2, listen_addr) + (swarm, rx_1, rx_2, listen_addr, peerset) }; + // Process every peerset event in the background. + pool.spawner().spawn_obj(loop_peerset(peerset).boxed().into()).unwrap(); // Ask swarm 1 to dial swarm 2. There isn't any discovery mechanism in place in this test, // so they wouldn't connect to each other. diff --git a/client/network/src/service.rs b/client/network/src/service.rs index 525470145b78c..23f9c614d9069 100644 --- a/client/network/src/service.rs +++ b/client/network/src/service.rs @@ -355,6 +355,7 @@ impl NetworkWorker { bitswap, params.light_client_request_protocol_config, params.network_config.request_response_protocols, + peerset_handle.clone(), ); match result { diff --git a/client/peerset/src/lib.rs b/client/peerset/src/lib.rs index 9c6c5617c34b1..0775354befee4 100644 --- a/client/peerset/src/lib.rs +++ b/client/peerset/src/lib.rs @@ -34,7 +34,7 @@ mod peersstate; -use futures::prelude::*; +use futures::{channel::oneshot, prelude::*}; use log::{debug, error, trace}; use sc_utils::mpsc::{tracing_unbounded, TracingUnboundedReceiver, TracingUnboundedSender}; use serde_json::json; @@ -49,7 +49,7 @@ use wasm_timer::Delay; pub use libp2p::PeerId; /// We don't accept nodes whose reputation is under this value. -const BANNED_THRESHOLD: i32 = 82 * (i32::MIN / 100); +pub const BANNED_THRESHOLD: i32 = 82 * (i32::MIN / 100); /// Reputation change for a node when we get disconnected from it. const DISCONNECT_REPUTATION_CHANGE: i32 = -256; /// Amount of time between the moment we disconnect from a node and the moment we remove it from @@ -65,6 +65,7 @@ enum Action { ReportPeer(PeerId, ReputationChange), AddToPeersSet(SetId, PeerId), RemoveFromPeersSet(SetId, PeerId), + PeerReputation(PeerId, oneshot::Sender), } /// Identifier of a set in the peerset. @@ -165,6 +166,16 @@ impl PeersetHandle { pub fn remove_from_peers_set(&self, set_id: SetId, peer_id: PeerId) { let _ = self.tx.unbounded_send(Action::RemoveFromPeersSet(set_id, peer_id)); } + + /// Returns the reputation value of the peer. + pub async fn peer_reputation(self, peer_id: PeerId) -> Result { + let (tx, rx) = oneshot::channel(); + + let _ = self.tx.unbounded_send(Action::PeerReputation(peer_id, tx)); + + // The channel can only be closed if the peerset no longer exists. + rx.await.map_err(|_| ()) + } } /// Message that can be sent by the peer set manager (PSM). @@ -454,6 +465,11 @@ impl Peerset { } } + fn on_peer_reputation(&mut self, peer_id: PeerId, pending_response: oneshot::Sender) { + let reputation = self.data.peer_reputation(peer_id); + let _ = pending_response.send(reputation.reputation()); + } + /// Updates the value of `self.latest_time_update` and performs all the updates that happen /// over time, such as reputation increases for staying connected. fn update_time(&mut self) { @@ -744,6 +760,8 @@ impl Stream for Peerset { self.add_to_peers_set(sets_name, peer_id), Action::RemoveFromPeersSet(sets_name, peer_id) => self.on_remove_from_peers_set(sets_name, peer_id), + Action::PeerReputation(peer_id, pending_response) => + self.on_peer_reputation(peer_id, pending_response), } } } From 3486a134e826f115907dcbb1b65d4eb422505b6f Mon Sep 17 00:00:00 2001 From: Squirrel Date: Tue, 21 Sep 2021 12:37:57 +0100 Subject: [PATCH 16/33] check line width not needed (#9820) * check line width not needed (cargo fmt checks this) * Fixing test to work in release mode. --- .gitlab-ci.yml | 10 ----- .maintain/gitlab/check_line_width.sh | 55 ---------------------------- frame/bags-list/src/list/tests.rs | 9 ++++- 3 files changed, 7 insertions(+), 67 deletions(-) delete mode 100755 .maintain/gitlab/check_line_width.sh diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index fd7bfe7155918..c5cb6c571b475 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -269,16 +269,6 @@ check-signed-tag: script: - ./.maintain/gitlab/check_signed.sh -check-line-width: - stage: check - image: paritytech/tools:latest - <<: *kubernetes-env - rules: - - if: $CI_COMMIT_REF_NAME =~ /^[0-9]+$/ # PRs - script: - - ./.maintain/gitlab/check_line_width.sh - allow_failure: true - test-dependency-rules: stage: check image: paritytech/tools:latest diff --git a/.maintain/gitlab/check_line_width.sh b/.maintain/gitlab/check_line_width.sh deleted file mode 100755 index ebab3013e4b48..0000000000000 --- a/.maintain/gitlab/check_line_width.sh +++ /dev/null @@ -1,55 +0,0 @@ -#!/bin/sh -# -# check if line width of rust source files is not beyond x characters -# -set -e -set -o pipefail - -BASE_ORIGIN="origin" -BASE_BRANCH_NAME="master" -LINE_WIDTH="120" -GOOD_LINE_WIDTH="100" -BASE_BRANCH="${BASE_ORIGIN}/${BASE_BRANCH_NAME}" -git fetch ${BASE_ORIGIN} ${BASE_BRANCH_NAME} --depth 100 -BASE_HASH=$(git merge-base ${BASE_BRANCH} HEAD) - -git diff --name-only ${BASE_HASH} -- \*.rs | ( while read file -do - if [ ! -f ${file} ]; - then - echo "Skipping removed file." - elif git diff ${BASE_HASH} -- ${file} | grep -q "^+.\{$(( $LINE_WIDTH + 1 ))\}" - then - if [ -z "${FAIL}" ] - then - echo "| error!" - echo "| Lines must not be longer than ${LINE_WIDTH} characters." - echo "| " - echo "| see more https://github.com/paritytech/substrate/blob/master/docs/STYLE_GUIDE.md" - echo "|" - FAIL="true" - fi - echo "| file: ${file}" - git diff ${BASE_HASH} -- ${file} \ - | grep -n "^+.\{$(( $LINE_WIDTH + 1))\}" - echo "|" - else - if git diff ${BASE_HASH} -- ${file} | grep -q "^+.\{$(( $GOOD_LINE_WIDTH + 1 ))\}" - then - if [ -z "${FAIL}" ] - then - echo "| warning!" - echo "| Lines should be longer than ${GOOD_LINE_WIDTH} characters only in exceptional circumstances!" - echo "| " - echo "| see more https://github.com/paritytech/substrate/blob/master/docs/STYLE_GUIDE.md" - echo "|" - fi - echo "| file: ${file}" - git diff ${BASE_HASH} -- ${file} | grep -n "^+.\{$(( $GOOD_LINE_WIDTH + 1 ))\}" - echo "|" - fi - fi -done - -test -z "${FAIL}" -) diff --git a/frame/bags-list/src/list/tests.rs b/frame/bags-list/src/list/tests.rs index e2730cbf4e33d..14802bac9d1d8 100644 --- a/frame/bags-list/src/list/tests.rs +++ b/frame/bags-list/src/list/tests.rs @@ -537,7 +537,10 @@ mod bags { // Panics in case of duplicate tail insert (which would result in an infinite loop). #[test] - #[should_panic = "system logic error: inserting a node who has the id of tail"] + #[cfg_attr( + debug_assertions, + should_panic = "system logic error: inserting a node who has the id of tail" + )] fn insert_node_duplicate_tail_panics_with_debug_assert() { ExtBuilder::default().build_and_execute(|| { let node = |id, prev, next, bag_upper| Node:: { id, prev, next, bag_upper }; @@ -548,7 +551,9 @@ mod bags { // when inserting a duplicate id that is already the tail assert_eq!(bag_1000.tail, Some(4)); - bag_1000.insert_node_unchecked(node(4, None, None, bag_1000.bag_upper)); // panics + assert_eq!(bag_1000.iter().count(), 3); + bag_1000.insert_node_unchecked(node(4, None, None, bag_1000.bag_upper)); // panics in debug + assert_eq!(bag_1000.iter().count(), 3); // in release we expect it to silently ignore the request. }); } From ce3c31f2bfa7e10817a8a0833faddeaee818910d Mon Sep 17 00:00:00 2001 From: Guillaume Thiolliere Date: Tue, 21 Sep 2021 14:36:53 +0200 Subject: [PATCH 17/33] Improve post and pre migration checks for pallet-membership (#9746) * improve post and pre migration checks * prevent some more false positive * prevent another false positive * fix unused import * Apply suggestions from code review --- frame/membership/src/lib.rs | 11 ++++- frame/membership/src/migrations/v4.rs | 70 ++++++++++++--------------- 2 files changed, 41 insertions(+), 40 deletions(-) diff --git a/frame/membership/src/lib.rs b/frame/membership/src/lib.rs index 7922d9efaf569..57a12c7c8a453 100644 --- a/frame/membership/src/lib.rs +++ b/frame/membership/src/lib.rs @@ -790,9 +790,16 @@ mod tests { fn migration_v4() { new_test_ext().execute_with(|| { use frame_support::traits::PalletInfo; - let old_pallet_name = + let old_pallet_name = "OldMembership"; + let new_pallet_name = ::PalletInfo::name::().unwrap(); - let new_pallet_name = "NewMembership"; + + frame_support::storage::migration::move_pallet( + new_pallet_name.as_bytes(), + old_pallet_name.as_bytes(), + ); + + StorageVersion::new(0).put::(); crate::migrations::v4::pre_migrate::(old_pallet_name, new_pallet_name); crate::migrations::v4::migrate::(old_pallet_name, new_pallet_name); diff --git a/frame/membership/src/migrations/v4.rs b/frame/membership/src/migrations/v4.rs index 9f4b15e468b38..c1c944be1fd4f 100644 --- a/frame/membership/src/migrations/v4.rs +++ b/frame/membership/src/migrations/v4.rs @@ -15,8 +15,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -use sp_core::hexdisplay::HexDisplay; -use sp_io::{hashing::twox_128, storage}; +use sp_io::hashing::twox_128; use frame_support::{ traits::{ @@ -85,28 +84,22 @@ pub fn pre_migrate>(old_pallet_name: N, new_ let new_pallet_name = new_pallet_name.as_ref(); log_migration("pre-migration", old_pallet_name, new_pallet_name); - let old_pallet_prefix = twox_128(old_pallet_name.as_bytes()); - assert!(storage::next_key(&old_pallet_prefix) - .map_or(true, |next_key| next_key.starts_with(&old_pallet_prefix))); + if new_pallet_name == old_pallet_name { + return + } let new_pallet_prefix = twox_128(new_pallet_name.as_bytes()); - let storage_version_key = - [&new_pallet_prefix, &twox_128(STORAGE_VERSION_STORAGE_KEY_POSTFIX)[..]].concat(); - // ensure nothing is stored in the new prefix. - assert!( - storage::next_key(&new_pallet_prefix).map_or( - // either nothing is there - true, - // or we ensure that it has no common prefix with twox_128(new), - // or isn't the storage version that is already stored using the pallet name - |next_key| { - !next_key.starts_with(&new_pallet_prefix) || next_key == storage_version_key - }, - ), - "unexpected next_key({}) = {:?}", - new_pallet_name, - HexDisplay::from(&storage::next_key(&new_pallet_prefix).unwrap()), + let storage_version_key = twox_128(STORAGE_VERSION_STORAGE_KEY_POSTFIX); + + let mut new_pallet_prefix_iter = frame_support::storage::KeyPrefixIterator::new( + new_pallet_prefix.to_vec(), + new_pallet_prefix.to_vec(), + |key| Ok(key.to_vec()), ); + + // Ensure nothing except maybe the storage_version_key is stored in the new prefix. + assert!(new_pallet_prefix_iter.all(|key| key == storage_version_key)); + assert!(

::on_chain_storage_version() < 4); } @@ -119,26 +112,27 @@ pub fn post_migrate>(old_pallet_name: N, new let new_pallet_name = new_pallet_name.as_ref(); log_migration("post-migration", old_pallet_name, new_pallet_name); - let old_pallet_prefix = twox_128(old_pallet_name.as_bytes()); - #[cfg(test)] - { - let storage_version_key = - [&old_pallet_prefix, &twox_128(STORAGE_VERSION_STORAGE_KEY_POSTFIX)[..]].concat(); - assert!(storage::next_key(&old_pallet_prefix) - .map_or(true, |next_key| !next_key.starts_with(&old_pallet_prefix) || - next_key == storage_version_key)); - } - #[cfg(not(test))] - { - // Assert that nothing remains at the old prefix - assert!(storage::next_key(&old_pallet_prefix) - .map_or(true, |next_key| !next_key.starts_with(&old_pallet_prefix))); + if new_pallet_name == old_pallet_name { + return } + // Assert that nothing remains at the old prefix. + let old_pallet_prefix = twox_128(old_pallet_name.as_bytes()); + let old_pallet_prefix_iter = frame_support::storage::KeyPrefixIterator::new( + old_pallet_prefix.to_vec(), + old_pallet_prefix.to_vec(), + |_| Ok(()), + ); + assert_eq!(old_pallet_prefix_iter.count(), 0); + + // NOTE: storage_version_key is already in the new prefix. let new_pallet_prefix = twox_128(new_pallet_name.as_bytes()); - // Assert that the storages have been moved to the new prefix - assert!(storage::next_key(&new_pallet_prefix) - .map_or(true, |next_key| next_key.starts_with(&new_pallet_prefix))); + let new_pallet_prefix_iter = frame_support::storage::KeyPrefixIterator::new( + new_pallet_prefix.to_vec(), + new_pallet_prefix.to_vec(), + |_| Ok(()), + ); + assert!(new_pallet_prefix_iter.count() >= 1); assert_eq!(

::on_chain_storage_version(), 4); } From 78ce06139243df5fbbf85356a754e6c97ec7cb9c Mon Sep 17 00:00:00 2001 From: ferrell-code Date: Tue, 21 Sep 2021 09:07:36 -0400 Subject: [PATCH 18/33] Bounties Pallet to FrameV2 (#9566) * migrate bounties pallet * events in tests * test import event * Update frame/bounties/src/lib.rs Co-authored-by: Keith Yeung * cargo fmt * line width * benchmarks compile * add migrations * fmt * comments * mod migrations * fix Cargo.toml * never remember cargo fmt * fix migration * migrations and test * change checks in migration * remove unused values * Update frame/bounties/src/migrations/v4.rs * cargo fmt * fix benchmarking * trigger ci Co-authored-by: Keith Yeung Co-authored-by: Guillaume Thiolliere --- Cargo.lock | 1 + frame/bounties/Cargo.toml | 9 +- frame/bounties/src/benchmarking.rs | 34 +- frame/bounties/src/lib.rs | 454 ++++++++++++++------------- frame/bounties/src/migrations/mod.rs | 19 ++ frame/bounties/src/migrations/v4.rs | 230 ++++++++++++++ frame/bounties/src/tests.rs | 58 +++- 7 files changed, 566 insertions(+), 239 deletions(-) create mode 100644 frame/bounties/src/migrations/mod.rs create mode 100644 frame/bounties/src/migrations/v4.rs diff --git a/Cargo.lock b/Cargo.lock index 37773a4ae3a69..839c68b71fbf1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5121,6 +5121,7 @@ dependencies = [ "frame-benchmarking", "frame-support", "frame-system", + "log 0.4.14", "pallet-balances", "pallet-treasury", "parity-scale-codec", diff --git a/frame/bounties/Cargo.toml b/frame/bounties/Cargo.toml index 3bb184d5b3393..93a7ababb2ebd 100644 --- a/frame/bounties/Cargo.toml +++ b/frame/bounties/Cargo.toml @@ -22,24 +22,27 @@ sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../pr frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } pallet-treasury = { version = "4.0.0-dev", default-features = false, path = "../treasury" } - +sp-io = { version = "4.0.0-dev", path = "../../primitives/io", default-features = false } +sp-core = { version = "4.0.0-dev", path = "../../primitives/core", default-features = false } frame-benchmarking = { version = "4.0.0-dev", default-features = false, path = "../benchmarking", optional = true } +log = { version = "0.4.14", default-features = false } [dev-dependencies] -sp-io = { version = "4.0.0-dev", path = "../../primitives/io" } -sp-core = { version = "4.0.0-dev", path = "../../primitives/core" } pallet-balances = { version = "4.0.0-dev", path = "../balances" } [features] default = ["std"] std = [ "codec/std", + "sp-core/std", + "sp-io/std", "scale-info/std", "sp-std/std", "sp-runtime/std", "frame-support/std", "frame-system/std", "pallet-treasury/std", + "log/std", ] runtime-benchmarks = [ "frame-benchmarking", diff --git a/frame/bounties/src/benchmarking.rs b/frame/bounties/src/benchmarking.rs index 798d929d241f7..1aa1eabdb5177 100644 --- a/frame/bounties/src/benchmarking.rs +++ b/frame/bounties/src/benchmarking.rs @@ -22,11 +22,10 @@ use super::*; use frame_benchmarking::{account, benchmarks, impl_benchmark_test_suite, whitelisted_caller}; -use frame_support::traits::OnInitialize; use frame_system::RawOrigin; use sp_runtime::traits::Bounded; -use crate::Module as Bounties; +use crate::Pallet as Bounties; use pallet_treasury::Pallet as Treasury; const SEED: u32 = 0; @@ -36,10 +35,10 @@ fn create_approved_bounties(n: u32) -> Result<(), &'static str> { for i in 0..n { let (caller, _curator, _fee, value, reason) = setup_bounty::(i, MAX_BYTES); Bounties::::propose_bounty(RawOrigin::Signed(caller).into(), value, reason)?; - let bounty_id = BountyCount::get() - 1; + let bounty_id = BountyCount::::get() - 1; Bounties::::approve_bounty(RawOrigin::Root.into(), bounty_id)?; } - ensure!(BountyApprovals::get().len() == n as usize, "Not all bounty approved"); + ensure!(BountyApprovals::::get().len() == n as usize, "Not all bounty approved"); Ok(()) } @@ -64,7 +63,7 @@ fn create_bounty( let (caller, curator, fee, value, reason) = setup_bounty::(0, MAX_BYTES); let curator_lookup = T::Lookup::unlookup(curator.clone()); Bounties::::propose_bounty(RawOrigin::Signed(caller).into(), value, reason)?; - let bounty_id = BountyCount::get() - 1; + let bounty_id = BountyCount::::get() - 1; Bounties::::approve_bounty(RawOrigin::Root.into(), bounty_id)?; Treasury::::on_initialize(T::BlockNumber::zero()); Bounties::::propose_curator(RawOrigin::Root.into(), bounty_id, curator_lookup.clone(), fee)?; @@ -94,7 +93,7 @@ benchmarks! { approve_bounty { let (caller, curator, fee, value, reason) = setup_bounty::(0, MAX_BYTES); Bounties::::propose_bounty(RawOrigin::Signed(caller).into(), value, reason)?; - let bounty_id = BountyCount::get() - 1; + let bounty_id = BountyCount::::get() - 1; }: _(RawOrigin::Root, bounty_id) propose_curator { @@ -102,7 +101,7 @@ benchmarks! { let (caller, curator, fee, value, reason) = setup_bounty::(0, MAX_BYTES); let curator_lookup = T::Lookup::unlookup(curator.clone()); Bounties::::propose_bounty(RawOrigin::Signed(caller).into(), value, reason)?; - let bounty_id = BountyCount::get() - 1; + let bounty_id = BountyCount::::get() - 1; Bounties::::approve_bounty(RawOrigin::Root.into(), bounty_id)?; Bounties::::on_initialize(T::BlockNumber::zero()); }: _(RawOrigin::Root, bounty_id, curator_lookup, fee) @@ -112,7 +111,7 @@ benchmarks! { setup_pot_account::(); let (curator_lookup, bounty_id) = create_bounty::()?; Bounties::::on_initialize(T::BlockNumber::zero()); - let bounty_id = BountyCount::get() - 1; + let bounty_id = BountyCount::::get() - 1; frame_system::Pallet::::set_block_number(T::BountyUpdatePeriod::get() + 1u32.into()); let caller = whitelisted_caller(); }: _(RawOrigin::Signed(caller), bounty_id) @@ -122,7 +121,7 @@ benchmarks! { let (caller, curator, fee, value, reason) = setup_bounty::(0, MAX_BYTES); let curator_lookup = T::Lookup::unlookup(curator.clone()); Bounties::::propose_bounty(RawOrigin::Signed(caller).into(), value, reason)?; - let bounty_id = BountyCount::get() - 1; + let bounty_id = BountyCount::::get() - 1; Bounties::::approve_bounty(RawOrigin::Root.into(), bounty_id)?; Bounties::::on_initialize(T::BlockNumber::zero()); Bounties::::propose_curator(RawOrigin::Root.into(), bounty_id, curator_lookup, fee)?; @@ -133,7 +132,7 @@ benchmarks! { let (curator_lookup, bounty_id) = create_bounty::()?; Bounties::::on_initialize(T::BlockNumber::zero()); - let bounty_id = BountyCount::get() - 1; + let bounty_id = BountyCount::::get() - 1; let curator = T::Lookup::lookup(curator_lookup).map_err(<&str>::from)?; let beneficiary = T::Lookup::unlookup(account("beneficiary", 0, SEED)); @@ -144,10 +143,9 @@ benchmarks! { let (curator_lookup, bounty_id) = create_bounty::()?; Bounties::::on_initialize(T::BlockNumber::zero()); - let bounty_id = BountyCount::get() - 1; + let bounty_id = BountyCount::::get() - 1; let curator = T::Lookup::lookup(curator_lookup).map_err(<&str>::from)?; - let beneficiary_account: T::AccountId = account("beneficiary", 0, SEED); let beneficiary = T::Lookup::unlookup(beneficiary_account.clone()); Bounties::::award_bounty(RawOrigin::Signed(curator.clone()).into(), bounty_id, beneficiary)?; @@ -164,17 +162,17 @@ benchmarks! { setup_pot_account::(); let (caller, curator, fee, value, reason) = setup_bounty::(0, 0); Bounties::::propose_bounty(RawOrigin::Signed(caller).into(), value, reason)?; - let bounty_id = BountyCount::get() - 1; + let bounty_id = BountyCount::::get() - 1; }: close_bounty(RawOrigin::Root, bounty_id) close_bounty_active { setup_pot_account::(); let (curator_lookup, bounty_id) = create_bounty::()?; Bounties::::on_initialize(T::BlockNumber::zero()); - let bounty_id = BountyCount::get() - 1; + let bounty_id = BountyCount::::get() - 1; }: close_bounty(RawOrigin::Root, bounty_id) verify { - assert_last_event::(RawEvent::BountyCanceled(bounty_id).into()) + assert_last_event::(Event::BountyCanceled(bounty_id).into()) } extend_bounty_expiry { @@ -182,11 +180,11 @@ benchmarks! { let (curator_lookup, bounty_id) = create_bounty::()?; Bounties::::on_initialize(T::BlockNumber::zero()); - let bounty_id = BountyCount::get() - 1; + let bounty_id = BountyCount::::get() - 1; let curator = T::Lookup::lookup(curator_lookup).map_err(<&str>::from)?; }: _(RawOrigin::Signed(curator), bounty_id, Vec::new()) verify { - assert_last_event::(RawEvent::BountyExtended(bounty_id).into()) + assert_last_event::(Event::BountyExtended(bounty_id).into()) } spend_funds { @@ -209,7 +207,7 @@ benchmarks! { verify { ensure!(budget_remaining < BalanceOf::::max_value(), "Budget not used"); ensure!(missed_any == false, "Missed some"); - assert_last_event::(RawEvent::BountyBecameActive(b - 1).into()) + assert_last_event::(Event::BountyBecameActive(b - 1).into()) } } diff --git a/frame/bounties/src/lib.rs b/frame/bounties/src/lib.rs index 77a8e47174019..69380502bad3f 100644 --- a/frame/bounties/src/lib.rs +++ b/frame/bounties/src/lib.rs @@ -75,13 +75,12 @@ #![cfg_attr(not(feature = "std"), no_std)] mod benchmarking; +pub mod migrations; mod tests; pub mod weights; use sp_std::prelude::*; -use frame_support::{decl_error, decl_event, decl_module, decl_storage, ensure}; - use frame_support::traits::{ Currency, ExistenceRequirement::AllowDeath, Get, Imbalance, OnUnbalanced, ReservableCurrency, }; @@ -93,46 +92,17 @@ use sp_runtime::{ use frame_support::{dispatch::DispatchResultWithPostInfo, traits::EnsureOrigin}; -use frame_support::weights::Weight; - -use codec::{Decode, Encode}; -use frame_system::{self as system, ensure_signed}; +use frame_support::pallet_prelude::*; +use frame_system::pallet_prelude::*; use scale_info::TypeInfo; pub use weights::WeightInfo; +pub use pallet::*; + type BalanceOf = pallet_treasury::BalanceOf; type PositiveImbalanceOf = pallet_treasury::PositiveImbalanceOf; -pub trait Config: frame_system::Config + pallet_treasury::Config { - /// The amount held on deposit for placing a bounty proposal. - type BountyDepositBase: Get>; - - /// The delay period for which a bounty beneficiary need to wait before claim the payout. - type BountyDepositPayoutDelay: Get; - - /// Bounty duration in blocks. - type BountyUpdatePeriod: Get; - - /// Percentage of the curator fee that will be reserved upfront as deposit for bounty curator. - type BountyCuratorDeposit: Get; - - /// Minimum value for a bounty. - type BountyValueMinimum: Get>; - - /// The amount held on deposit per byte within the tip report reason or bounty description. - type DataDepositPerByte: Get>; - - /// The overarching event type. - type Event: From> + Into<::Event>; - - /// Maximum acceptable reason length. - type MaximumReasonLength: Get; - - /// Weight information for extrinsics in this pallet. - type WeightInfo: WeightInfo; -} - /// An index of a bounty. Just a `u32`. pub type BountyIndex = u32; @@ -186,55 +156,54 @@ pub enum BountyStatus { }, } -// Note :: For backward compatibility reasons, -// pallet-bounties uses Treasury for storage. -// This is temporary solution, soon will get replaced with -// Own storage identifier. -decl_storage! { - trait Store for Module as Treasury { +#[frame_support::pallet] +pub mod pallet { + use super::*; - /// Number of bounty proposals that have been made. - pub BountyCount get(fn bounty_count): BountyIndex; + #[pallet::pallet] + #[pallet::generate_store(pub(super) trait Store)] + pub struct Pallet(_); - /// Bounties that have been made. - pub Bounties get(fn bounties): - map hasher(twox_64_concat) BountyIndex - => Option, T::BlockNumber>>; + #[pallet::config] + pub trait Config: frame_system::Config + pallet_treasury::Config { + /// The amount held on deposit for placing a bounty proposal. + #[pallet::constant] + type BountyDepositBase: Get>; - /// The description of each bounty. - pub BountyDescriptions get(fn bounty_descriptions): map hasher(twox_64_concat) BountyIndex => Option>; + /// The delay period for which a bounty beneficiary need to wait before claim the payout. + #[pallet::constant] + type BountyDepositPayoutDelay: Get; - /// Bounty indices that have been approved but not yet funded. - pub BountyApprovals get(fn bounty_approvals): Vec; - } -} + /// Bounty duration in blocks. + #[pallet::constant] + type BountyUpdatePeriod: Get; -decl_event!( - pub enum Event - where - Balance = BalanceOf, - ::AccountId, - { - /// New bounty proposal. \[index\] - BountyProposed(BountyIndex), - /// A bounty proposal was rejected; funds were slashed. \[index, bond\] - BountyRejected(BountyIndex, Balance), - /// A bounty proposal is funded and became active. \[index\] - BountyBecameActive(BountyIndex), - /// A bounty is awarded to a beneficiary. \[index, beneficiary\] - BountyAwarded(BountyIndex, AccountId), - /// A bounty is claimed by beneficiary. \[index, payout, beneficiary\] - BountyClaimed(BountyIndex, Balance, AccountId), - /// A bounty is cancelled. \[index\] - BountyCanceled(BountyIndex), - /// A bounty expiry is extended. \[index\] - BountyExtended(BountyIndex), + /// Percentage of the curator fee that will be reserved upfront as deposit for bounty + /// curator. + #[pallet::constant] + type BountyCuratorDeposit: Get; + + /// Minimum value for a bounty. + #[pallet::constant] + type BountyValueMinimum: Get>; + + /// The amount held on deposit per byte within the tip report reason or bounty description. + #[pallet::constant] + type DataDepositPerByte: Get>; + + /// The overarching event type. + type Event: From> + IsType<::Event>; + + /// Maximum acceptable reason length. + #[pallet::constant] + type MaximumReasonLength: Get; + + /// Weight information for extrinsics in this pallet. + type WeightInfo: WeightInfo; } -); -decl_error! { - /// Error for the treasury module. - pub enum Error for Module { + #[pallet::error] + pub enum Error { /// Proposer's balance is too low. InsufficientProposersBalance, /// No proposal or bounty at that index. @@ -255,38 +224,53 @@ decl_error! { /// The bounties cannot be claimed/closed because it's still in the countdown period. Premature, } -} - -decl_module! { - pub struct Module - for enum Call - where origin: T::Origin - { - /// The amount held on deposit per byte within bounty description. - const DataDepositPerByte: BalanceOf = T::DataDepositPerByte::get(); - - /// The amount held on deposit for placing a bounty proposal. - const BountyDepositBase: BalanceOf = T::BountyDepositBase::get(); - - /// The delay period for which a bounty beneficiary need to wait before claim the payout. - const BountyDepositPayoutDelay: T::BlockNumber = T::BountyDepositPayoutDelay::get(); - /// Bounty duration in blocks. - const BountyUpdatePeriod: T::BlockNumber = T::BountyUpdatePeriod::get(); - - /// Percentage of the curator fee that will be reserved upfront as deposit for bounty curator. - const BountyCuratorDeposit: Permill = T::BountyCuratorDeposit::get(); - - /// Minimum value for a bounty. - const BountyValueMinimum: BalanceOf = T::BountyValueMinimum::get(); - - /// Maximum acceptable reason length. - const MaximumReasonLength: u32 = T::MaximumReasonLength::get(); - - type Error = Error; - - fn deposit_event() = default; + #[pallet::event] + #[pallet::generate_deposit(pub(super) fn deposit_event)] + pub enum Event { + /// New bounty proposal. \[index\] + BountyProposed(BountyIndex), + /// A bounty proposal was rejected; funds were slashed. \[index, bond\] + BountyRejected(BountyIndex, BalanceOf), + /// A bounty proposal is funded and became active. \[index\] + BountyBecameActive(BountyIndex), + /// A bounty is awarded to a beneficiary. \[index, beneficiary\] + BountyAwarded(BountyIndex, T::AccountId), + /// A bounty is claimed by beneficiary. \[index, payout, beneficiary\] + BountyClaimed(BountyIndex, BalanceOf, T::AccountId), + /// A bounty is cancelled. \[index\] + BountyCanceled(BountyIndex), + /// A bounty expiry is extended. \[index\] + BountyExtended(BountyIndex), + } + /// Number of bounty proposals that have been made. + #[pallet::storage] + #[pallet::getter(fn bounty_count)] + pub type BountyCount = StorageValue<_, BountyIndex, ValueQuery>; + + /// Bounties that have been made. + #[pallet::storage] + #[pallet::getter(fn bounties)] + pub type Bounties = StorageMap< + _, + Twox64Concat, + BountyIndex, + Bounty, T::BlockNumber>, + >; + + /// The description of each bounty. + #[pallet::storage] + #[pallet::getter(fn bounty_descriptions)] + pub type BountyDescriptions = StorageMap<_, Twox64Concat, BountyIndex, Vec>; + + /// Bounty indices that have been approved but not yet funded. + #[pallet::storage] + #[pallet::getter(fn bounty_approvals)] + pub type BountyApprovals = StorageValue<_, Vec, ValueQuery>; + + #[pallet::call] + impl Pallet { /// Propose a new bounty. /// /// The dispatch origin for this call must be _Signed_. @@ -299,14 +283,15 @@ decl_module! { /// - `fee`: The curator fee. /// - `value`: The total payment amount of this bounty, curator fee included. /// - `description`: The description of this bounty. - #[weight = ::WeightInfo::propose_bounty(description.len() as u32)] - fn propose_bounty( - origin, - #[compact] value: BalanceOf, + #[pallet::weight(::WeightInfo::propose_bounty(description.len() as u32))] + pub fn propose_bounty( + origin: OriginFor, + #[pallet::compact] value: BalanceOf, description: Vec, - ) { + ) -> DispatchResult { let proposer = ensure_signed(origin)?; Self::create_bounty(proposer, description, value)?; + Ok(()) } /// Approve a bounty proposal. At a later time, the bounty will be funded and become active @@ -317,8 +302,11 @@ decl_module! { /// # /// - O(1). /// # - #[weight = ::WeightInfo::approve_bounty()] - fn approve_bounty(origin, #[compact] bounty_id: BountyIndex) { + #[pallet::weight(::WeightInfo::approve_bounty())] + pub fn approve_bounty( + origin: OriginFor, + #[pallet::compact] bounty_id: BountyIndex, + ) -> DispatchResult { T::ApproveOrigin::ensure_origin(origin)?; Bounties::::try_mutate_exists(bounty_id, |maybe_bounty| -> DispatchResult { @@ -327,10 +315,11 @@ decl_module! { bounty.status = BountyStatus::Approved; - BountyApprovals::append(bounty_id); + BountyApprovals::::append(bounty_id); Ok(()) })?; + Ok(()) } /// Assign a curator to a funded bounty. @@ -340,18 +329,17 @@ decl_module! { /// # /// - O(1). /// # - #[weight = ::WeightInfo::propose_curator()] - fn propose_curator( - origin, - #[compact] bounty_id: BountyIndex, + #[pallet::weight(::WeightInfo::propose_curator())] + pub fn propose_curator( + origin: OriginFor, + #[pallet::compact] bounty_id: BountyIndex, curator: ::Source, - #[compact] fee: BalanceOf, - ) { + #[pallet::compact] fee: BalanceOf, + ) -> DispatchResult { T::ApproveOrigin::ensure_origin(origin)?; let curator = T::Lookup::lookup(curator)?; Bounties::::try_mutate_exists(bounty_id, |maybe_bounty| -> DispatchResult { - let mut bounty = maybe_bounty.as_mut().ok_or(Error::::InvalidIndex)?; match bounty.status { BountyStatus::Proposed | BountyStatus::Approved | BountyStatus::Funded => {}, @@ -365,14 +353,15 @@ decl_module! { Ok(()) })?; + Ok(()) } /// Unassign curator from a bounty. /// /// This function can only be called by the `RejectOrigin` a signed origin. /// - /// If this function is called by the `RejectOrigin`, we assume that the curator is malicious - /// or inactive. As a result, we will slash the curator when possible. + /// If this function is called by the `RejectOrigin`, we assume that the curator is + /// malicious or inactive. As a result, we will slash the curator when possible. /// /// If the origin is the curator, we take this as a sign they are unable to do their job and /// they willingly give up. We could slash them, but for now we allow them to recover their @@ -385,11 +374,11 @@ decl_module! { /// # /// - O(1). /// # - #[weight = ::WeightInfo::unassign_curator()] - fn unassign_curator( - origin, - #[compact] bounty_id: BountyIndex, - ) { + #[pallet::weight(::WeightInfo::unassign_curator())] + pub fn unassign_curator( + origin: OriginFor, + #[pallet::compact] bounty_id: BountyIndex, + ) -> DispatchResult { let maybe_sender = ensure_signed(origin.clone()) .map(Some) .or_else(|_| T::RejectOrigin::ensure_origin(origin).map(|_| None))?; @@ -407,7 +396,7 @@ decl_module! { BountyStatus::Proposed | BountyStatus::Approved | BountyStatus::Funded => { // No curator to unassign at this point. return Err(Error::::UnexpectedStatus.into()) - } + }, BountyStatus::CuratorProposed { ref curator } => { // A curator has been proposed, but not accepted yet. // Either `RejectOrigin` or the proposed curator can unassign the curator. @@ -425,10 +414,10 @@ decl_module! { // If the sender is not the curator, and the curator is inactive, // slash the curator. if sender != *curator { - let block_number = system::Pallet::::block_number(); + let block_number = frame_system::Pallet::::block_number(); if *update_due < block_number { slash_curator(curator, &mut bounty.curator_deposit); - // Continue to change bounty status below... + // Continue to change bounty status below... } else { // Curator has more time to give an update. return Err(Error::::Premature.into()) @@ -436,7 +425,8 @@ decl_module! { } else { // Else this is the curator, willingly giving up their role. // Give back their deposit. - let err_amount = T::Currency::unreserve(&curator, bounty.curator_deposit); + let err_amount = + T::Currency::unreserve(&curator, bounty.curator_deposit); debug_assert!(err_amount.is_zero()); // Continue to change bounty status below... } @@ -450,12 +440,13 @@ decl_module! { ensure!(maybe_sender.is_none(), BadOrigin); slash_curator(curator, &mut bounty.curator_deposit); // Continue to change bounty status below... - } + }, }; bounty.status = BountyStatus::Funded; Ok(()) })?; + Ok(()) } /// Accept the curator role for a bounty. @@ -466,8 +457,11 @@ decl_module! { /// # /// - O(1). /// # - #[weight = ::WeightInfo::accept_curator()] - fn accept_curator(origin, #[compact] bounty_id: BountyIndex) { + #[pallet::weight(::WeightInfo::accept_curator())] + pub fn accept_curator( + origin: OriginFor, + #[pallet::compact] bounty_id: BountyIndex, + ) -> DispatchResult { let signer = ensure_signed(origin)?; Bounties::::try_mutate_exists(bounty_id, |maybe_bounty| -> DispatchResult { @@ -481,17 +475,21 @@ decl_module! { T::Currency::reserve(curator, deposit)?; bounty.curator_deposit = deposit; - let update_due = system::Pallet::::block_number() + T::BountyUpdatePeriod::get(); - bounty.status = BountyStatus::Active { curator: curator.clone(), update_due }; + let update_due = frame_system::Pallet::::block_number() + + T::BountyUpdatePeriod::get(); + bounty.status = + BountyStatus::Active { curator: curator.clone(), update_due }; Ok(()) }, _ => Err(Error::::UnexpectedStatus.into()), } })?; + Ok(()) } - /// Award bounty to a beneficiary account. The beneficiary will be able to claim the funds after a delay. + /// Award bounty to a beneficiary account. The beneficiary will be able to claim the funds + /// after a delay. /// /// The dispatch origin for this call must be the curator of this bounty. /// @@ -501,18 +499,19 @@ decl_module! { /// # /// - O(1). /// # - #[weight = ::WeightInfo::award_bounty()] - fn award_bounty(origin, #[compact] bounty_id: BountyIndex, beneficiary: ::Source) { + #[pallet::weight(::WeightInfo::award_bounty())] + pub fn award_bounty( + origin: OriginFor, + #[pallet::compact] bounty_id: BountyIndex, + beneficiary: ::Source, + ) -> DispatchResult { let signer = ensure_signed(origin)?; let beneficiary = T::Lookup::lookup(beneficiary)?; Bounties::::try_mutate_exists(bounty_id, |maybe_bounty| -> DispatchResult { let mut bounty = maybe_bounty.as_mut().ok_or(Error::::InvalidIndex)?; match &bounty.status { - BountyStatus::Active { - curator, - .. - } => { + BountyStatus::Active { curator, .. } => { ensure!(signer == *curator, Error::::RequireCurator); }, _ => return Err(Error::::UnexpectedStatus.into()), @@ -520,13 +519,15 @@ decl_module! { bounty.status = BountyStatus::PendingPayout { curator: signer, beneficiary: beneficiary.clone(), - unlock_at: system::Pallet::::block_number() + T::BountyDepositPayoutDelay::get(), + unlock_at: frame_system::Pallet::::block_number() + + T::BountyDepositPayoutDelay::get(), }; Ok(()) })?; Self::deposit_event(Event::::BountyAwarded(bounty_id, beneficiary)); + Ok(()) } /// Claim the payout from an awarded bounty after payout delay. @@ -538,14 +539,22 @@ decl_module! { /// # /// - O(1). /// # - #[weight = ::WeightInfo::claim_bounty()] - fn claim_bounty(origin, #[compact] bounty_id: BountyIndex) { + #[pallet::weight(::WeightInfo::claim_bounty())] + pub fn claim_bounty( + origin: OriginFor, + #[pallet::compact] bounty_id: BountyIndex, + ) -> DispatchResult { let _ = ensure_signed(origin)?; // anyone can trigger claim Bounties::::try_mutate_exists(bounty_id, |maybe_bounty| -> DispatchResult { let bounty = maybe_bounty.take().ok_or(Error::::InvalidIndex)?; - if let BountyStatus::PendingPayout { curator, beneficiary, unlock_at } = bounty.status { - ensure!(system::Pallet::::block_number() >= unlock_at, Error::::Premature); + if let BountyStatus::PendingPayout { curator, beneficiary, unlock_at } = + bounty.status + { + ensure!( + frame_system::Pallet::::block_number() >= unlock_at, + Error::::Premature + ); let bounty_account = Self::bounty_account_id(bounty_id); let balance = T::Currency::free_balance(&bounty_account); let fee = bounty.fee.min(balance); // just to be safe @@ -554,12 +563,13 @@ decl_module! { debug_assert!(err_amount.is_zero()); let res = T::Currency::transfer(&bounty_account, &curator, fee, AllowDeath); // should not fail debug_assert!(res.is_ok()); - let res = T::Currency::transfer(&bounty_account, &beneficiary, payout, AllowDeath); // should not fail + let res = + T::Currency::transfer(&bounty_account, &beneficiary, payout, AllowDeath); // should not fail debug_assert!(res.is_ok()); *maybe_bounty = None; - BountyDescriptions::remove(bounty_id); + BountyDescriptions::::remove(bounty_id); Self::deposit_event(Event::::BountyClaimed(bounty_id, payout, beneficiary)); Ok(()) @@ -567,6 +577,7 @@ decl_module! { Err(Error::::UnexpectedStatus.into()) } })?; + Ok(()) } /// Cancel a proposed or active bounty. All the funds will be sent to treasury and @@ -579,62 +590,76 @@ decl_module! { /// # /// - O(1). /// # - #[weight = ::WeightInfo::close_bounty_proposed().max(::WeightInfo::close_bounty_active())] - fn close_bounty(origin, #[compact] bounty_id: BountyIndex) -> DispatchResultWithPostInfo { + #[pallet::weight(::WeightInfo::close_bounty_proposed() + .max(::WeightInfo::close_bounty_active()))] + pub fn close_bounty( + origin: OriginFor, + #[pallet::compact] bounty_id: BountyIndex, + ) -> DispatchResultWithPostInfo { T::RejectOrigin::ensure_origin(origin)?; - Bounties::::try_mutate_exists(bounty_id, |maybe_bounty| -> DispatchResultWithPostInfo { - let bounty = maybe_bounty.as_ref().ok_or(Error::::InvalidIndex)?; - - match &bounty.status { - BountyStatus::Proposed => { - // The reject origin would like to cancel a proposed bounty. - BountyDescriptions::remove(bounty_id); - let value = bounty.bond; - let imbalance = T::Currency::slash_reserved(&bounty.proposer, value).0; - T::OnSlash::on_unbalanced(imbalance); - *maybe_bounty = None; - - Self::deposit_event(Event::::BountyRejected(bounty_id, value)); - // Return early, nothing else to do. - return Ok(Some(::WeightInfo::close_bounty_proposed()).into()) - }, - BountyStatus::Approved => { - // For weight reasons, we don't allow a council to cancel in this phase. - // We ask for them to wait until it is funded before they can cancel. - return Err(Error::::UnexpectedStatus.into()) - }, - BountyStatus::Funded | - BountyStatus::CuratorProposed { .. } => { - // Nothing extra to do besides the removal of the bounty below. - }, - BountyStatus::Active { curator, .. } => { - // Cancelled by council, refund deposit of the working curator. - let err_amount = T::Currency::unreserve(&curator, bounty.curator_deposit); - debug_assert!(err_amount.is_zero()); - // Then execute removal of the bounty below. - }, - BountyStatus::PendingPayout { .. } => { - // Bounty is already pending payout. If council wants to cancel - // this bounty, it should mean the curator was acting maliciously. - // So the council should first unassign the curator, slashing their - // deposit. - return Err(Error::::PendingPayout.into()) + Bounties::::try_mutate_exists( + bounty_id, + |maybe_bounty| -> DispatchResultWithPostInfo { + let bounty = maybe_bounty.as_ref().ok_or(Error::::InvalidIndex)?; + + match &bounty.status { + BountyStatus::Proposed => { + // The reject origin would like to cancel a proposed bounty. + BountyDescriptions::::remove(bounty_id); + let value = bounty.bond; + let imbalance = T::Currency::slash_reserved(&bounty.proposer, value).0; + T::OnSlash::on_unbalanced(imbalance); + *maybe_bounty = None; + + Self::deposit_event(Event::::BountyRejected(bounty_id, value)); + // Return early, nothing else to do. + return Ok( + Some(::WeightInfo::close_bounty_proposed()).into() + ) + }, + BountyStatus::Approved => { + // For weight reasons, we don't allow a council to cancel in this phase. + // We ask for them to wait until it is funded before they can cancel. + return Err(Error::::UnexpectedStatus.into()) + }, + BountyStatus::Funded | BountyStatus::CuratorProposed { .. } => { + // Nothing extra to do besides the removal of the bounty below. + }, + BountyStatus::Active { curator, .. } => { + // Cancelled by council, refund deposit of the working curator. + let err_amount = + T::Currency::unreserve(&curator, bounty.curator_deposit); + debug_assert!(err_amount.is_zero()); + // Then execute removal of the bounty below. + }, + BountyStatus::PendingPayout { .. } => { + // Bounty is already pending payout. If council wants to cancel + // this bounty, it should mean the curator was acting maliciously. + // So the council should first unassign the curator, slashing their + // deposit. + return Err(Error::::PendingPayout.into()) + }, } - } - let bounty_account = Self::bounty_account_id(bounty_id); + let bounty_account = Self::bounty_account_id(bounty_id); - BountyDescriptions::remove(bounty_id); + BountyDescriptions::::remove(bounty_id); - let balance = T::Currency::free_balance(&bounty_account); - let res = T::Currency::transfer(&bounty_account, &Self::account_id(), balance, AllowDeath); // should not fail - debug_assert!(res.is_ok()); - *maybe_bounty = None; + let balance = T::Currency::free_balance(&bounty_account); + let res = T::Currency::transfer( + &bounty_account, + &Self::account_id(), + balance, + AllowDeath, + ); // should not fail + debug_assert!(res.is_ok()); + *maybe_bounty = None; - Self::deposit_event(Event::::BountyCanceled(bounty_id)); - Ok(Some(::WeightInfo::close_bounty_active()).into()) - }) + Self::deposit_event(Event::::BountyCanceled(bounty_id)); + Ok(Some(::WeightInfo::close_bounty_active()).into()) + }, + ) } /// Extend the expiry time of an active bounty. @@ -647,8 +672,12 @@ decl_module! { /// # /// - O(1). /// # - #[weight = ::WeightInfo::extend_bounty_expiry()] - fn extend_bounty_expiry(origin, #[compact] bounty_id: BountyIndex, _remark: Vec) { + #[pallet::weight(::WeightInfo::extend_bounty_expiry())] + pub fn extend_bounty_expiry( + origin: OriginFor, + #[pallet::compact] bounty_id: BountyIndex, + _remark: Vec, + ) -> DispatchResult { let signer = ensure_signed(origin)?; Bounties::::try_mutate_exists(bounty_id, |maybe_bounty| -> DispatchResult { @@ -657,7 +686,9 @@ decl_module! { match bounty.status { BountyStatus::Active { ref curator, ref mut update_due } => { ensure!(*curator == signer, Error::::RequireCurator); - *update_due = (system::Pallet::::block_number() + T::BountyUpdatePeriod::get()).max(*update_due); + *update_due = (frame_system::Pallet::::block_number() + + T::BountyUpdatePeriod::get()) + .max(*update_due); }, _ => return Err(Error::::UnexpectedStatus.into()), } @@ -666,11 +697,12 @@ decl_module! { })?; Self::deposit_event(Event::::BountyExtended(bounty_id)); + Ok(()) } } } -impl Module { +impl Pallet { // Add public immutables and private mutables. /// The account ID of the treasury pot. @@ -707,7 +739,7 @@ impl Module { T::Currency::reserve(&proposer, bond) .map_err(|_| Error::::InsufficientProposersBalance)?; - BountyCount::put(index + 1); + BountyCount::::put(index + 1); let bounty = Bounty { proposer, @@ -719,22 +751,22 @@ impl Module { }; Bounties::::insert(index, &bounty); - BountyDescriptions::insert(index, description); + BountyDescriptions::::insert(index, description); - Self::deposit_event(RawEvent::BountyProposed(index)); + Self::deposit_event(Event::::BountyProposed(index)); Ok(()) } } -impl pallet_treasury::SpendFunds for Module { +impl pallet_treasury::SpendFunds for Pallet { fn spend_funds( budget_remaining: &mut BalanceOf, imbalance: &mut PositiveImbalanceOf, total_weight: &mut Weight, missed_any: &mut bool, ) { - let bounties_len = BountyApprovals::mutate(|v| { + let bounties_len = BountyApprovals::::mutate(|v| { let bounties_approval_len = v.len() as u32; v.retain(|&index| { Bounties::::mutate(index, |bounty| { @@ -755,7 +787,7 @@ impl pallet_treasury::SpendFunds for Module { bounty.value, )); - Self::deposit_event(RawEvent::BountyBecameActive(index)); + Self::deposit_event(Event::::BountyBecameActive(index)); false } else { *missed_any = true; diff --git a/frame/bounties/src/migrations/mod.rs b/frame/bounties/src/migrations/mod.rs new file mode 100644 index 0000000000000..26d07a0cd5ac8 --- /dev/null +++ b/frame/bounties/src/migrations/mod.rs @@ -0,0 +1,19 @@ +// This file is part of Substrate. + +// Copyright (C) 2019-2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/// Version 4. +pub mod v4; diff --git a/frame/bounties/src/migrations/v4.rs b/frame/bounties/src/migrations/v4.rs new file mode 100644 index 0000000000000..a1ca0e47680b0 --- /dev/null +++ b/frame/bounties/src/migrations/v4.rs @@ -0,0 +1,230 @@ +// This file is part of Substrate. + +// Copyright (C) 2019-2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use frame_support::{ + storage::{generator::StorageValue, StoragePrefixedMap}, + traits::{ + Get, GetStorageVersion, PalletInfoAccess, StorageVersion, + STORAGE_VERSION_STORAGE_KEY_POSTFIX, + }, + weights::Weight, +}; +use sp_core::hexdisplay::HexDisplay; +use sp_io::{hashing::twox_128, storage}; +use sp_std::str; + +use crate as pallet_bounties; + +/// Migrate the storage of the bounties pallet to a new prefix, leaving all other storage untouched +/// +/// This new prefix must be the same as the one set in construct_runtime. For safety, use +/// `PalletInfo` to get it, as: +/// `::PalletInfo::name::`. +/// +/// The migration will look into the storage version in order not to trigger a migration on an up +/// to date storage. Thus the on chain storage version must be less than 4 in order to trigger the +/// migration. +pub fn migrate< + T: pallet_bounties::Config, + P: GetStorageVersion + PalletInfoAccess, + N: AsRef, +>( + old_pallet_name: N, + new_pallet_name: N, +) -> Weight { + let old_pallet_name = old_pallet_name.as_ref(); + let new_pallet_name = new_pallet_name.as_ref(); + + if new_pallet_name == old_pallet_name { + log::info!( + target: "runtime::bounties", + "New pallet name is equal to the old prefix. No migration needs to be done.", + ); + return 0 + } + + let on_chain_storage_version =

::on_chain_storage_version(); + log::info!( + target: "runtime::bounties", + "Running migration to v4 for bounties with storage version {:?}", + on_chain_storage_version, + ); + + if on_chain_storage_version < 4 { + let storage_prefix = pallet_bounties::BountyCount::::storage_prefix(); + frame_support::storage::migration::move_storage_from_pallet( + storage_prefix, + old_pallet_name.as_bytes(), + new_pallet_name.as_bytes(), + ); + log_migration("migration", storage_prefix, old_pallet_name, new_pallet_name); + + let storage_prefix = pallet_bounties::Bounties::::storage_prefix(); + frame_support::storage::migration::move_storage_from_pallet( + storage_prefix, + old_pallet_name.as_bytes(), + new_pallet_name.as_bytes(), + ); + log_migration("migration", storage_prefix, old_pallet_name, new_pallet_name); + + let storage_prefix = pallet_bounties::BountyDescriptions::::storage_prefix(); + frame_support::storage::migration::move_storage_from_pallet( + storage_prefix, + old_pallet_name.as_bytes(), + new_pallet_name.as_bytes(), + ); + log_migration("migration", storage_prefix, old_pallet_name, new_pallet_name); + + let storage_prefix = pallet_bounties::BountyApprovals::::storage_prefix(); + frame_support::storage::migration::move_storage_from_pallet( + storage_prefix, + old_pallet_name.as_bytes(), + new_pallet_name.as_bytes(), + ); + log_migration("migration", storage_prefix, old_pallet_name, new_pallet_name); + + StorageVersion::new(4).put::

(); + ::BlockWeights::get().max_block + } else { + log::warn!( + target: "runtime::bounties", + "Attempted to apply migration to v4 but failed because storage version is {:?}", + on_chain_storage_version, + ); + 0 + } +} + +/// Some checks prior to migration. This can be linked to +/// [`frame_support::traits::OnRuntimeUpgrade::pre_upgrade`] for further testing. +/// +/// Panics if anything goes wrong. +pub fn pre_migration>( + old_pallet_name: N, + new_pallet_name: N, +) { + let old_pallet_name = old_pallet_name.as_ref(); + let new_pallet_name = new_pallet_name.as_ref(); + let storage_prefix_bounties_count = pallet_bounties::BountyCount::::storage_prefix(); + let storage_prefix_bounties = pallet_bounties::Bounties::::storage_prefix(); + let storage_prefix_bounties_description = + pallet_bounties::BountyDescriptions::::storage_prefix(); + let storage_prefix_bounties_approvals = pallet_bounties::BountyApprovals::::storage_prefix(); + log_migration("pre-migration", storage_prefix_bounties_count, old_pallet_name, new_pallet_name); + log_migration("pre-migration", storage_prefix_bounties, old_pallet_name, new_pallet_name); + log_migration( + "pre-migration", + storage_prefix_bounties_description, + old_pallet_name, + new_pallet_name, + ); + log_migration( + "pre-migration", + storage_prefix_bounties_approvals, + old_pallet_name, + new_pallet_name, + ); + + let new_pallet_prefix = twox_128(new_pallet_name.as_bytes()); + let storage_version_key = + [&new_pallet_prefix, &twox_128(STORAGE_VERSION_STORAGE_KEY_POSTFIX)[..]].concat(); + + // ensure nothing is stored in the new prefix. + assert!( + storage::next_key(&new_pallet_prefix).map_or( + // either nothing is there + true, + // or we ensure that the next key has no common prefix with twox_128(new), + // or is the pallet version that is already stored using the pallet name + |next_key| { + storage::next_key(&next_key).map_or(true, |next_key| { + !next_key.starts_with(&new_pallet_prefix) || next_key == storage_version_key + }) + }, + ), + "unexpected next_key({}) = {:?}", + new_pallet_name, + HexDisplay::from(&sp_io::storage::next_key(&new_pallet_prefix).unwrap()), + ); + assert!(

::on_chain_storage_version() < 4); +} + +/// Some checks for after migration. This can be linked to +/// [`frame_support::traits::OnRuntimeUpgrade::post_upgrade`] for further testing. +/// +/// Panics if anything goes wrong. +pub fn post_migration>( + old_pallet_name: N, + new_pallet_name: N, +) { + let old_pallet_name = old_pallet_name.as_ref(); + let new_pallet_name = new_pallet_name.as_ref(); + let storage_prefix_bounties_count = pallet_bounties::BountyCount::::storage_prefix(); + let storage_prefix_bounties = pallet_bounties::Bounties::::storage_prefix(); + let storage_prefix_bounties_description = + pallet_bounties::BountyDescriptions::::storage_prefix(); + let storage_prefix_bounties_approvals = pallet_bounties::BountyApprovals::::storage_prefix(); + log_migration( + "post-migration", + storage_prefix_bounties_count, + old_pallet_name, + new_pallet_name, + ); + log_migration("post-migration", storage_prefix_bounties, old_pallet_name, new_pallet_name); + log_migration( + "post-migration", + storage_prefix_bounties_description, + old_pallet_name, + new_pallet_name, + ); + log_migration( + "post-migration", + storage_prefix_bounties_approvals, + old_pallet_name, + new_pallet_name, + ); + + let old_pallet_prefix = twox_128(old_pallet_name.as_bytes()); + let old_bounties_count_key = + [&old_pallet_prefix, &twox_128(storage_prefix_bounties_count)[..]].concat(); + let old_bounties_key = [&old_pallet_prefix, &twox_128(storage_prefix_bounties)[..]].concat(); + let old_bounties_description_key = + [&old_pallet_prefix, &twox_128(storage_prefix_bounties_description)[..]].concat(); + let old_bounties_approvals_key = + [&old_pallet_prefix, &twox_128(storage_prefix_bounties_approvals)[..]].concat(); + assert!(storage::next_key(&old_bounties_count_key) + .map_or(true, |next_key| !next_key.starts_with(&old_bounties_count_key))); + assert!(storage::next_key(&old_bounties_key) + .map_or(true, |next_key| !next_key.starts_with(&old_bounties_key))); + assert!(storage::next_key(&old_bounties_description_key) + .map_or(true, |next_key| !next_key.starts_with(&old_bounties_description_key))); + assert!(storage::next_key(&old_bounties_approvals_key) + .map_or(true, |next_key| !next_key.starts_with(&old_bounties_approvals_key))); + + assert_eq!(

::on_chain_storage_version(), 4); +} + +fn log_migration(stage: &str, storage_prefix: &[u8], old_pallet_name: &str, new_pallet_name: &str) { + log::info!( + target: "runtime::bounties", + "{} prefix of storage '{}': '{}' ==> '{}'", + stage, + str::from_utf8(storage_prefix).unwrap_or(""), + old_pallet_name, + new_pallet_name, + ); +} diff --git a/frame/bounties/src/tests.rs b/frame/bounties/src/tests.rs index ff058a3601e07..96c09581fdd1e 100644 --- a/frame/bounties/src/tests.rs +++ b/frame/bounties/src/tests.rs @@ -32,9 +32,11 @@ use sp_core::H256; use sp_runtime::{ testing::Header, traits::{BadOrigin, BlakeTwo256, IdentityLookup}, - Perbill, + Perbill, Storage, }; +use super::Event as BountiesEvent; + type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; type Block = frame_system::mocking::MockBlock; @@ -160,7 +162,7 @@ pub fn new_test_ext() -> sp_io::TestExternalities { t.into() } -fn last_event() -> RawEvent { +fn last_event() -> BountiesEvent { System::events() .into_iter() .map(|r| r.event) @@ -396,7 +398,7 @@ fn propose_bounty_works() { assert_ok!(Bounties::propose_bounty(Origin::signed(0), 10, b"1234567890".to_vec())); - assert_eq!(last_event(), RawEvent::BountyProposed(0)); + assert_eq!(last_event(), BountiesEvent::BountyProposed(0)); let deposit: u64 = 85 + 5; assert_eq!(Balances::reserved_balance(0), deposit); @@ -458,7 +460,7 @@ fn close_bounty_works() { let deposit: u64 = 80 + 5; - assert_eq!(last_event(), RawEvent::BountyRejected(0, deposit)); + assert_eq!(last_event(), BountiesEvent::BountyRejected(0, deposit)); assert_eq!(Balances::reserved_balance(0), 0); assert_eq!(Balances::free_balance(0), 100 - deposit); @@ -690,7 +692,7 @@ fn award_and_claim_bounty_works() { assert_ok!(Bounties::claim_bounty(Origin::signed(1), 0)); - assert_eq!(last_event(), RawEvent::BountyClaimed(0, 56, 3)); + assert_eq!(last_event(), BountiesEvent::BountyClaimed(0, 56, 3)); assert_eq!(Balances::free_balance(4), 14); // initial 10 + fee 4 @@ -729,7 +731,7 @@ fn claim_handles_high_fee() { assert_ok!(Bounties::claim_bounty(Origin::signed(1), 0)); - assert_eq!(last_event(), RawEvent::BountyClaimed(0, 0, 3)); + assert_eq!(last_event(), BountiesEvent::BountyClaimed(0, 0, 3)); assert_eq!(Balances::free_balance(4), 70); // 30 + 50 - 10 assert_eq!(Balances::free_balance(3), 0); @@ -806,7 +808,7 @@ fn award_and_cancel() { assert_ok!(Bounties::unassign_curator(Origin::root(), 0)); assert_ok!(Bounties::close_bounty(Origin::root(), 0)); - assert_eq!(last_event(), RawEvent::BountyCanceled(0)); + assert_eq!(last_event(), BountiesEvent::BountyCanceled(0)); assert_eq!(Balances::free_balance(Bounties::bounty_account_id(0)), 0); @@ -934,6 +936,48 @@ fn extend_expiry() { }); } +#[test] +fn test_migration_v4() { + let mut s = Storage::default(); + + let index: u32 = 10; + + let bounty = Bounty:: { + proposer: 0, + value: 20, + fee: 20, + curator_deposit: 20, + bond: 50, + status: BountyStatus::::Proposed, + }; + + let data = vec![ + (pallet_bounties::BountyCount::::hashed_key().to_vec(), 10.encode().to_vec()), + (pallet_bounties::Bounties::::hashed_key_for(index), bounty.encode().to_vec()), + (pallet_bounties::BountyDescriptions::::hashed_key_for(index), vec![0, 0]), + ( + pallet_bounties::BountyApprovals::::hashed_key().to_vec(), + vec![10 as u32].encode().to_vec(), + ), + ]; + + s.top = data.into_iter().collect(); + + sp_io::TestExternalities::new(s).execute_with(|| { + use frame_support::traits::PalletInfo; + let old_pallet_name = ::PalletInfo::name::() + .expect("Bounties is part of runtime, so it has a name; qed"); + let new_pallet_name = "NewBounties"; + + crate::migrations::v4::pre_migration::(old_pallet_name, new_pallet_name); + crate::migrations::v4::migrate::(old_pallet_name, new_pallet_name); + crate::migrations::v4::post_migration::( + old_pallet_name, + new_pallet_name, + ); + }); +} + #[test] fn genesis_funding_works() { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); From 700c9e2a476f94438f653423c23b9f0332dbd708 Mon Sep 17 00:00:00 2001 From: Pierre Krieger Date: Tue, 21 Sep 2021 16:59:57 +0200 Subject: [PATCH 19/33] Silence Kademlia InboundRequestServed event (#9797) Co-authored-by: Roman Proskuryakov --- client/network/src/discovery.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/client/network/src/discovery.rs b/client/network/src/discovery.rs index 71e46f73234c7..431de50c0f192 100644 --- a/client/network/src/discovery.rs +++ b/client/network/src/discovery.rs @@ -733,7 +733,8 @@ impl NetworkBehaviour for DiscoveryBehaviour { let ev = DiscoveryOut::Discovered(peer); return Poll::Ready(NetworkBehaviourAction::GenerateEvent(ev)) }, - KademliaEvent::PendingRoutablePeer { .. } => { + KademliaEvent::PendingRoutablePeer { .. } | + KademliaEvent::InboundRequestServed { .. } => { // We are not interested in this event at the moment. }, KademliaEvent::OutboundQueryCompleted { @@ -844,8 +845,8 @@ impl NetworkBehaviour for DiscoveryBehaviour { ), }, // We never start any other type of query. - e => { - debug!(target: "sub-libp2p", "Libp2p => Unhandled Kademlia event: {:?}", e) + KademliaEvent::OutboundQueryCompleted { result: e, .. } => { + warn!(target: "sub-libp2p", "Libp2p => Unhandled Kademlia event: {:?}", e) }, }, NetworkBehaviourAction::DialAddress { address } => From 04bb4e02137f7aba9c20be071bcf9ba6bceaf8da Mon Sep 17 00:00:00 2001 From: Kian Paimani <5588131+kianenigma@users.noreply.github.com> Date: Tue, 21 Sep 2021 16:04:28 +0100 Subject: [PATCH 20/33] `follow-chain` testing mode for try-runtime (and revamp CLI configs). (#9788) * deadlock, need to ask someone to help now * Finally it seems to be working.. at least for a few blocks * self-review * major mega revamp * some small fixes * another mega refactor * add license * Apply suggestions from code review * hack around signature verification * Some fixes * Update utils/frame/try-runtime/cli/src/lib.rs Co-authored-by: Zeke Mostov <32168567+emostov@users.noreply.github.com> * Update utils/frame/try-runtime/cli/src/lib.rs Co-authored-by: Zeke Mostov <32168567+emostov@users.noreply.github.com> * Update utils/frame/try-runtime/cli/src/lib.rs Co-authored-by: Zeke Mostov <32168567+emostov@users.noreply.github.com> * final tweaks, hopefully. * a little self-review * Add the ext root check Co-authored-by: Zeke Mostov <32168567+emostov@users.noreply.github.com> --- Cargo.lock | 4 + bin/node/runtime/src/lib.rs | 13 +- frame/executive/src/lib.rs | 31 + frame/try-runtime/src/lib.rs | 8 +- primitives/state-machine/src/testing.rs | 2 +- utils/frame/remote-externalities/Cargo.toml | 2 +- utils/frame/remote-externalities/src/lib.rs | 33 +- utils/frame/try-runtime/cli/Cargo.toml | 6 + .../cli/src/commands/execute_block.rs | 182 ++++ .../cli/src/commands/follow_chain.rs | 176 ++++ .../frame/try-runtime/cli/src/commands/mod.rs | 21 + .../cli/src/commands/offchain_worker.rs | 165 ++++ .../cli/src/commands/on_runtime_upgrade.rs | 92 ++ utils/frame/try-runtime/cli/src/lib.rs | 883 +++++++++++------- 14 files changed, 1238 insertions(+), 380 deletions(-) create mode 100644 utils/frame/try-runtime/cli/src/commands/execute_block.rs create mode 100644 utils/frame/try-runtime/cli/src/commands/follow_chain.rs create mode 100644 utils/frame/try-runtime/cli/src/commands/mod.rs create mode 100644 utils/frame/try-runtime/cli/src/commands/offchain_worker.rs create mode 100644 utils/frame/try-runtime/cli/src/commands/on_runtime_upgrade.rs diff --git a/Cargo.lock b/Cargo.lock index 839c68b71fbf1..e386b600f79ba 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10687,6 +10687,7 @@ checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642" name = "try-runtime-cli" version = "0.10.0-dev" dependencies = [ + "jsonrpsee-ws-client", "log 0.4.14", "parity-scale-codec", "remote-externalities", @@ -10696,9 +10697,12 @@ dependencies = [ "sc-service", "serde", "sp-core", + "sp-externalities", + "sp-io", "sp-keystore", "sp-runtime", "sp-state-machine", + "sp-version", "structopt", ] diff --git a/bin/node/runtime/src/lib.rs b/bin/node/runtime/src/lib.rs index 28f8e5dc3fd6a..4cf313f8d26f0 100644 --- a/bin/node/runtime/src/lib.rs +++ b/bin/node/runtime/src/lib.rs @@ -1585,9 +1585,16 @@ impl_runtime_apis! { #[cfg(feature = "try-runtime")] impl frame_try_runtime::TryRuntime for Runtime { - fn on_runtime_upgrade() -> Result<(Weight, Weight), sp_runtime::RuntimeString> { - let weight = Executive::try_runtime_upgrade()?; - Ok((weight, RuntimeBlockWeights::get().max_block)) + fn on_runtime_upgrade() -> (Weight, Weight) { + // NOTE: intentional unwrap: we don't want to propagate the error backwards, and want to + // have a backtrace here. If any of the pre/post migration checks fail, we shall stop + // right here and right now. + let weight = Executive::try_runtime_upgrade().unwrap(); + (weight, RuntimeBlockWeights::get().max_block) + } + + fn execute_block_no_check(block: Block) -> Weight { + Executive::execute_block_no_check(block) } } diff --git a/frame/executive/src/lib.rs b/frame/executive/src/lib.rs index 9a0fce4d6b5b4..244253185d238 100644 --- a/frame/executive/src/lib.rs +++ b/frame/executive/src/lib.rs @@ -220,6 +220,37 @@ where weight } + /// Execute given block, but don't do any of the [`final_checks`]. + /// + /// Should only be used for testing. + #[cfg(feature = "try-runtime")] + pub fn execute_block_no_check(block: Block) -> frame_support::weights::Weight { + Self::initialize_block(block.header()); + Self::initial_checks(&block); + + let (header, extrinsics) = block.deconstruct(); + + Self::execute_extrinsics_with_book_keeping(extrinsics, *header.number()); + + // do some of the checks that would normally happen in `final_checks`, but definitely skip + // the state root check. + { + let new_header = >::finalize(); + let items_zip = header.digest().logs().iter().zip(new_header.digest().logs().iter()); + for (header_item, computed_item) in items_zip { + header_item.check_equal(&computed_item); + assert!(header_item == computed_item, "Digest item must match that calculated."); + } + + assert!( + header.extrinsics_root() == new_header.extrinsics_root(), + "Transaction trie root must be valid.", + ); + } + + frame_system::Pallet::::block_weight().total() + } + /// Execute all `OnRuntimeUpgrade` of this runtime, including the pre and post migration checks. /// /// This should only be used for testing. diff --git a/frame/try-runtime/src/lib.rs b/frame/try-runtime/src/lib.rs index b2dfdfac6429e..754fc1d2a3303 100644 --- a/frame/try-runtime/src/lib.rs +++ b/frame/try-runtime/src/lib.rs @@ -32,6 +32,12 @@ sp_api::decl_runtime_apis! { /// /// Returns the consumed weight of the migration in case of a successful one, combined with /// the total allowed block weight of the runtime. - fn on_runtime_upgrade() -> Result<(Weight, Weight), sp_runtime::RuntimeString>; + fn on_runtime_upgrade() -> (Weight, Weight); + + /// Execute the given block, but don't check that its state root matches that of yours. + /// + /// This is only sensible where the incoming block is from a different network, yet it has + /// the same block format as the runtime implementing this API. + fn execute_block_no_check(block: Block) -> Weight; } } diff --git a/primitives/state-machine/src/testing.rs b/primitives/state-machine/src/testing.rs index ec1772ba8666f..23f66ee14d87e 100644 --- a/primitives/state-machine/src/testing.rs +++ b/primitives/state-machine/src/testing.rs @@ -159,7 +159,7 @@ where /// /// In contrast to [`commit_all`](Self::commit_all) this will not panic if there are open /// transactions. - fn as_backend(&self) -> InMemoryBackend { + pub fn as_backend(&self) -> InMemoryBackend { let top: Vec<_> = self.overlay.changes().map(|(k, v)| (k.clone(), v.value().cloned())).collect(); let mut transaction = vec![(None, top)]; diff --git a/utils/frame/remote-externalities/Cargo.toml b/utils/frame/remote-externalities/Cargo.toml index d255499d6c3ad..ce774679f94c2 100644 --- a/utils/frame/remote-externalities/Cargo.toml +++ b/utils/frame/remote-externalities/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] jsonrpsee-ws-client = { version = "0.3.0", default-features = false, features = [ "tokio1", -] } +]} jsonrpsee-proc-macros = "0.3.0" env_logger = "0.9" diff --git a/utils/frame/remote-externalities/src/lib.rs b/utils/frame/remote-externalities/src/lib.rs index addb3d1dd3c17..2052780286c66 100644 --- a/utils/frame/remote-externalities/src/lib.rs +++ b/utils/frame/remote-externalities/src/lib.rs @@ -112,8 +112,8 @@ pub struct OnlineConfig { pub at: Option, /// An optional state snapshot file to WRITE to, not for reading. Not written if set to `None`. pub state_snapshot: Option, - /// The modules to scrape. If empty, entire chain state will be scraped. - pub modules: Vec, + /// The pallets to scrape. If empty, entire chain state will be scraped. + pub pallets: Vec, /// Transport config. pub transport: Transport, } @@ -134,7 +134,7 @@ impl Default for OnlineConfig { transport: Transport { uri: DEFAULT_TARGET.to_owned(), client: None }, at: None, state_snapshot: None, - modules: vec![], + pallets: vec![], } } } @@ -360,9 +360,9 @@ impl Builder { .clone(); info!(target: LOG_TARGET, "scraping key-pairs from remote @ {:?}", at); - let mut keys_and_values = if config.modules.len() > 0 { + let mut keys_and_values = if config.pallets.len() > 0 { let mut filtered_kv = vec![]; - for f in config.modules.iter() { + for f in config.pallets.iter() { let hashed_prefix = StorageKey(twox_128(f.as_bytes()).to_vec()); let module_kv = self.rpc_get_pairs_paged(hashed_prefix.clone(), at).await?; info!( @@ -376,7 +376,7 @@ impl Builder { } filtered_kv } else { - info!(target: LOG_TARGET, "downloading data for all modules."); + info!(target: LOG_TARGET, "downloading data for all pallets."); self.rpc_get_pairs_paged(StorageKey(vec![]), at).await? }; @@ -482,12 +482,23 @@ impl Builder { self } + /// overwrite the `at` value, if `mode` is set to [`Mode::Online`]. + /// + /// noop if `mode` is [`Mode::Offline`] + pub fn overwrite_online_at(mut self, at: B::Hash) -> Self { + if let Mode::Online(mut online) = self.mode.clone() { + online.at = Some(at); + self.mode = Mode::Online(online); + } + self + } + /// Build the test externalities. pub async fn build(self) -> Result { let kv = self.pre_build().await?; let mut ext = TestExternalities::new_empty(); - debug!(target: LOG_TARGET, "injecting a total of {} keys", kv.len()); + info!(target: LOG_TARGET, "injecting a total of {} keys", kv.len()); for (k, v) in kv { let (k, v) = (k.0, v.0); // Insert the key,value pair into the test trie backend @@ -541,7 +552,7 @@ mod remote_tests { init_logger(); Builder::::new() .mode(Mode::Online(OnlineConfig { - modules: vec!["System".to_owned()], + pallets: vec!["System".to_owned()], ..Default::default() })) .build() @@ -555,7 +566,7 @@ mod remote_tests { init_logger(); Builder::::new() .mode(Mode::Online(OnlineConfig { - modules: vec![ + pallets: vec![ "Proxy".to_owned(), "Multisig".to_owned(), "PhragmenElection".to_owned(), @@ -583,7 +594,7 @@ mod remote_tests { init_logger(); Builder::::new() .mode(Mode::Online(OnlineConfig { - modules: vec!["PhragmenElection".to_owned()], + pallets: vec!["PhragmenElection".to_owned()], ..Default::default() })) .build() @@ -609,7 +620,7 @@ mod remote_tests { Builder::::new() .mode(Mode::Online(OnlineConfig { state_snapshot: Some(SnapshotConfig::new("test_snapshot_to_remove.bin")), - modules: vec!["Balances".to_owned()], + pallets: vec!["Balances".to_owned()], ..Default::default() })) .build() diff --git a/utils/frame/try-runtime/cli/Cargo.toml b/utils/frame/try-runtime/cli/Cargo.toml index 5cc5ae6ee58bb..11b899db4ca47 100644 --- a/utils/frame/try-runtime/cli/Cargo.toml +++ b/utils/frame/try-runtime/cli/Cargo.toml @@ -25,6 +25,12 @@ sc-chain-spec = { version = "4.0.0-dev", path = "../../../../client/chain-spec" sp-state-machine = { version = "0.10.0-dev", path = "../../../../primitives/state-machine" } sp-runtime = { version = "4.0.0-dev", path = "../../../../primitives/runtime" } sp-core = { version = "4.0.0-dev", path = "../../../../primitives/core" } +sp-io = { version = "4.0.0-dev", path = "../../../../primitives/io" } sp-keystore = { version = "0.10.0-dev", path = "../../../../primitives/keystore" } +sp-externalities = { version = "0.10.0-dev", path = "../../../../primitives/externalities" } +sp-version = { version = "4.0.0-dev", path = "../../../../primitives/version" } remote-externalities = { version = "0.10.0-dev", path = "../../remote-externalities" } +jsonrpsee-ws-client = { version = "0.3.0", default-features = false, features = [ + "tokio1", +]} diff --git a/utils/frame/try-runtime/cli/src/commands/execute_block.rs b/utils/frame/try-runtime/cli/src/commands/execute_block.rs new file mode 100644 index 0000000000000..19422db90119f --- /dev/null +++ b/utils/frame/try-runtime/cli/src/commands/execute_block.rs @@ -0,0 +1,182 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use crate::{ + build_executor, ensure_matching_spec, extract_code, full_extensions, hash_of, local_spec, + state_machine_call, SharedParams, State, LOG_TARGET, +}; +use remote_externalities::rpc_api; +use sc_service::{Configuration, NativeExecutionDispatch}; +use sp_core::storage::well_known_keys; +use sp_runtime::traits::{Block as BlockT, Header as HeaderT, NumberFor}; +use std::{fmt::Debug, str::FromStr}; + +/// Configurations of the [`Command::ExecuteBlock`]. +#[derive(Debug, Clone, structopt::StructOpt)] +pub struct ExecuteBlockCmd { + /// Overwrite the wasm code in state or not. + #[structopt(long)] + overwrite_wasm_code: bool, + + /// If set, then the state root check is disabled by the virtue of calling into + /// `TryRuntime_execute_block_no_check` instead of + /// `Core_execute_block`. + #[structopt(long)] + no_check: bool, + + /// The block hash at which to fetch the block. + /// + /// If the `live` state type is being used, then this can be omitted, and is equal to whatever + /// the `state::at` is. Only use this (with care) when combined with a snapshot. + #[structopt( + long, + multiple = false, + parse(try_from_str = crate::parse::hash) + )] + block_at: Option, + + /// The ws uri from which to fetch the block. + /// + /// If the `live` state type is being used, then this can be omitted, and is equal to whatever + /// the `state::uri` is. Only use this (with care) when combined with a snapshot. + #[structopt( + long, + multiple = false, + parse(try_from_str = crate::parse::url) + )] + block_ws_uri: Option, + + /// The state type to use. + /// + /// For this command only, if the `live` is used, then state of the parent block is fetched. + /// + /// If `block_at` is provided, then the [`State::Live::at`] is being ignored. + #[structopt(subcommand)] + state: State, +} + +impl ExecuteBlockCmd { + fn block_at(&self) -> sc_cli::Result + where + Block::Hash: FromStr, + ::Err: Debug, + { + match (&self.block_at, &self.state) { + (Some(block_at), State::Snap { .. }) => hash_of::(&block_at), + (Some(block_at), State::Live { .. }) => { + log::warn!(target: LOG_TARGET, "--block-at is provided while state type is live. the `Live::at` will be ignored"); + hash_of::(&block_at) + }, + (None, State::Live { at: Some(at), .. }) => hash_of::(&at), + _ => { + panic!("either `--block-at` must be provided, or state must be `live with a proper `--at``"); + }, + } + } + + fn block_ws_uri(&self) -> String + where + Block::Hash: FromStr, + ::Err: Debug, + { + match (&self.block_ws_uri, &self.state) { + (Some(block_ws_uri), State::Snap { .. }) => block_ws_uri.to_owned(), + (Some(block_ws_uri), State::Live { .. }) => { + log::error!(target: LOG_TARGET, "--block-uri is provided while state type is live, Are you sure you know what you are doing?"); + block_ws_uri.to_owned() + }, + (None, State::Live { uri, .. }) => uri.clone(), + (None, State::Snap { .. }) => { + panic!("either `--block-uri` must be provided, or state must be `live`"); + }, + } + } +} + +pub(crate) async fn execute_block( + shared: SharedParams, + command: ExecuteBlockCmd, + config: Configuration, +) -> sc_cli::Result<()> +where + Block: BlockT + serde::de::DeserializeOwned, + Block::Hash: FromStr, + ::Err: Debug, + NumberFor: FromStr, + as FromStr>::Err: Debug, + ExecDispatch: NativeExecutionDispatch + 'static, +{ + let executor = build_executor::(&shared, &config); + let execution = shared.execution; + + let block_at = command.block_at::()?; + let block_ws_uri = command.block_ws_uri::(); + let block: Block = rpc_api::get_block::(block_ws_uri.clone(), block_at).await?; + let parent_hash = block.header().parent_hash(); + log::info!( + target: LOG_TARGET, + "fetched block from {:?}, parent_hash to fetch the state {:?}", + block_ws_uri, + parent_hash + ); + + let ext = { + let builder = command + .state + .builder::()? + // make sure the state is being build with the parent hash, if it is online. + .overwrite_online_at(parent_hash.to_owned()); + + let builder = if command.overwrite_wasm_code { + let (code_key, code) = extract_code(&config.chain_spec)?; + builder.inject_key_value(&[(code_key, code)]) + } else { + builder.inject_hashed_key(well_known_keys::CODE) + }; + + builder.build().await? + }; + + // A digest item gets added when the runtime is processing the block, so we need to pop + // the last one to be consistent with what a gossiped block would contain. + let (mut header, extrinsics) = block.deconstruct(); + header.digest_mut().pop(); + let block = Block::new(header, extrinsics); + + let (expected_spec_name, expected_spec_version) = + local_spec::(&ext, &executor); + ensure_matching_spec::( + block_ws_uri.clone(), + expected_spec_name, + expected_spec_version, + shared.no_spec_name_check, + ) + .await; + + let _ = state_machine_call::( + &ext, + &executor, + execution, + if command.no_check { "TryRuntime_execute_block_no_check" } else { "Core_execute_block" }, + block.encode().as_ref(), + full_extensions(), + )?; + + log::info!(target: LOG_TARGET, "Core_execute_block executed without errors."); + + Ok(()) +} diff --git a/utils/frame/try-runtime/cli/src/commands/follow_chain.rs b/utils/frame/try-runtime/cli/src/commands/follow_chain.rs new file mode 100644 index 0000000000000..0526f5d327fb2 --- /dev/null +++ b/utils/frame/try-runtime/cli/src/commands/follow_chain.rs @@ -0,0 +1,176 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use crate::{ + build_executor, ensure_matching_spec, extract_code, full_extensions, local_spec, parse, + state_machine_call, SharedParams, LOG_TARGET, +}; +use jsonrpsee_ws_client::{ + types::{traits::SubscriptionClient, v2::params::JsonRpcParams, Subscription}, + WsClientBuilder, +}; +use parity_scale_codec::Decode; +use remote_externalities::{rpc_api, Builder, Mode, OnlineConfig}; +use sc_executor::NativeExecutionDispatch; +use sc_service::Configuration; +use sp_core::H256; +use sp_runtime::traits::{Block as BlockT, Header, NumberFor}; +use std::{fmt::Debug, str::FromStr}; + +const SUB: &'static str = "chain_subscribeFinalizedHeads"; +const UN_SUB: &'static str = "chain_unsubscribeFinalizedHeads"; + +/// Configurations of the [`Command::FollowChain`]. +#[derive(Debug, Clone, structopt::StructOpt)] +pub struct FollowChainCmd { + /// The url to connect to. + #[structopt( + short, + long, + parse(try_from_str = parse::url), + )] + uri: String, +} + +pub(crate) async fn follow_chain( + shared: SharedParams, + command: FollowChainCmd, + config: Configuration, +) -> sc_cli::Result<()> +where + Block: BlockT + serde::de::DeserializeOwned, + Block::Hash: FromStr, + Block::Header: serde::de::DeserializeOwned, + ::Err: Debug, + NumberFor: FromStr, + as FromStr>::Err: Debug, + ExecDispatch: NativeExecutionDispatch + 'static, +{ + let mut maybe_state_ext = None; + + let client = WsClientBuilder::default() + .connection_timeout(std::time::Duration::new(20, 0)) + .max_notifs_per_subscription(1024) + .max_request_body_size(u32::MAX) + .build(&command.uri) + .await + .unwrap(); + + log::info!(target: LOG_TARGET, "subscribing to {:?} / {:?}", SUB, UN_SUB); + let mut subscription: Subscription = + client.subscribe(&SUB, JsonRpcParams::NoParams, &UN_SUB).await.unwrap(); + + let (code_key, code) = extract_code(&config.chain_spec)?; + let executor = build_executor::(&shared, &config); + let execution = shared.execution; + + loop { + let header = match subscription.next().await { + Ok(Some(header)) => header, + Ok(None) => { + log::warn!("subscription returned `None`. Probably decoding has failed."); + break + }, + Err(why) => { + log::warn!("subscription returned error: {:?}.", why); + continue + }, + }; + + let hash = header.hash(); + let number = header.number(); + + let block = rpc_api::get_block::(&command.uri, hash).await.unwrap(); + + log::debug!( + target: LOG_TARGET, + "new block event: {:?} => {:?}, extrinsics: {}", + hash, + number, + block.extrinsics().len() + ); + + // create an ext at the state of this block, whatever is the first subscription event. + if maybe_state_ext.is_none() { + let builder = Builder::::new().mode(Mode::Online(OnlineConfig { + transport: command.uri.clone().into(), + at: Some(header.parent_hash().clone()), + ..Default::default() + })); + + let new_ext = + builder.inject_key_value(&[(code_key.clone(), code.clone())]).build().await?; + log::info!( + target: LOG_TARGET, + "initialized state externalities at {:?}, storage root {:?}", + number, + new_ext.as_backend().root() + ); + + let (expected_spec_name, expected_spec_version) = + local_spec::(&new_ext, &executor); + ensure_matching_spec::( + command.uri.clone(), + expected_spec_name, + expected_spec_version, + shared.no_spec_name_check, + ) + .await; + + maybe_state_ext = Some(new_ext); + } + + let state_ext = + maybe_state_ext.as_mut().expect("state_ext either existed or was just created"); + + let (mut changes, encoded_result) = state_machine_call::( + &state_ext, + &executor, + execution, + "TryRuntime_execute_block_no_check", + block.encode().as_ref(), + full_extensions(), + )?; + + let consumed_weight = ::decode(&mut &*encoded_result) + .map_err(|e| format!("failed to decode output: {:?}", e))?; + + let storage_changes = changes + .drain_storage_changes::<_, _, NumberFor>( + &state_ext.backend, + None, + Default::default(), + &mut Default::default(), + ) + .unwrap(); + state_ext.backend.apply_transaction( + storage_changes.transaction_storage_root, + storage_changes.transaction, + ); + + log::info!( + target: LOG_TARGET, + "executed block {}, consumed weight {}, new storage root {:?}", + number, + consumed_weight, + state_ext.as_backend().root(), + ); + } + + log::error!(target: LOG_TARGET, "ws subscription must have terminated."); + Ok(()) +} diff --git a/utils/frame/try-runtime/cli/src/commands/mod.rs b/utils/frame/try-runtime/cli/src/commands/mod.rs new file mode 100644 index 0000000000000..bfd8290fb31c1 --- /dev/null +++ b/utils/frame/try-runtime/cli/src/commands/mod.rs @@ -0,0 +1,21 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +pub(crate) mod execute_block; +pub(crate) mod follow_chain; +pub(crate) mod offchain_worker; +pub(crate) mod on_runtime_upgrade; diff --git a/utils/frame/try-runtime/cli/src/commands/offchain_worker.rs b/utils/frame/try-runtime/cli/src/commands/offchain_worker.rs new file mode 100644 index 0000000000000..6f37e4b3849fa --- /dev/null +++ b/utils/frame/try-runtime/cli/src/commands/offchain_worker.rs @@ -0,0 +1,165 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use crate::{ + build_executor, ensure_matching_spec, extract_code, full_extensions, hash_of, local_spec, + parse, state_machine_call, SharedParams, State, LOG_TARGET, +}; +use parity_scale_codec::Encode; +use remote_externalities::rpc_api; +use sc_executor::NativeExecutionDispatch; +use sc_service::Configuration; +use sp_core::storage::well_known_keys; +use sp_runtime::traits::{Block as BlockT, Header, NumberFor}; +use std::{fmt::Debug, str::FromStr}; + +/// Configurations of the [`Command::OffchainWorker`]. +#[derive(Debug, Clone, structopt::StructOpt)] +pub struct OffchainWorkerCmd { + /// Overwrite the wasm code in state or not. + #[structopt(long)] + overwrite_wasm_code: bool, + + /// The block hash at which to fetch the header. + /// + /// If the `live` state type is being used, then this can be omitted, and is equal to whatever + /// the `state::at` is. Only use this (with care) when combined with a snapshot. + #[structopt( + long, + multiple = false, + parse(try_from_str = parse::hash) + )] + header_at: Option, + + /// The ws uri from which to fetch the header. + /// + /// If the `live` state type is being used, then this can be omitted, and is equal to whatever + /// the `state::uri` is. Only use this (with care) when combined with a snapshot. + #[structopt( + long, + multiple = false, + parse(try_from_str = parse::url) + )] + header_ws_uri: Option, + + /// The state type to use. + #[structopt(subcommand)] + pub state: State, +} + +impl OffchainWorkerCmd { + fn header_at(&self) -> sc_cli::Result + where + Block::Hash: FromStr, + ::Err: Debug, + { + match (&self.header_at, &self.state) { + (Some(header_at), State::Snap { .. }) => hash_of::(&header_at), + (Some(header_at), State::Live { .. }) => { + log::error!(target: LOG_TARGET, "--header-at is provided while state type is live, this will most likely lead to a nonsensical result."); + hash_of::(&header_at) + }, + (None, State::Live { at: Some(at), .. }) => hash_of::(&at), + _ => { + panic!("either `--header-at` must be provided, or state must be `live` with a proper `--at`"); + }, + } + } + + fn header_ws_uri(&self) -> String + where + Block::Hash: FromStr, + ::Err: Debug, + { + match (&self.header_ws_uri, &self.state) { + (Some(header_ws_uri), State::Snap { .. }) => header_ws_uri.to_owned(), + (Some(header_ws_uri), State::Live { .. }) => { + log::error!(target: LOG_TARGET, "--header-uri is provided while state type is live, this will most likely lead to a nonsensical result."); + header_ws_uri.to_owned() + }, + (None, State::Live { uri, .. }) => uri.clone(), + (None, State::Snap { .. }) => { + panic!("either `--header-uri` must be provided, or state must be `live`"); + }, + } + } +} + +pub(crate) async fn offchain_worker( + shared: SharedParams, + command: OffchainWorkerCmd, + config: Configuration, +) -> sc_cli::Result<()> +where + Block: BlockT + serde::de::DeserializeOwned, + Block::Hash: FromStr, + Block::Header: serde::de::DeserializeOwned, + ::Err: Debug, + NumberFor: FromStr, + as FromStr>::Err: Debug, + ExecDispatch: NativeExecutionDispatch + 'static, +{ + let executor = build_executor(&shared, &config); + let execution = shared.execution; + + let header_at = command.header_at::()?; + let header_ws_uri = command.header_ws_uri::(); + + let header = rpc_api::get_header::(header_ws_uri.clone(), header_at).await?; + log::info!( + target: LOG_TARGET, + "fetched header from {:?}, block number: {:?}", + header_ws_uri, + header.number() + ); + + let ext = { + let builder = command.state.builder::()?; + + let builder = if command.overwrite_wasm_code { + let (code_key, code) = extract_code(&config.chain_spec)?; + builder.inject_key_value(&[(code_key, code)]) + } else { + builder.inject_hashed_key(well_known_keys::CODE) + }; + + builder.build().await? + }; + + let (expected_spec_name, expected_spec_version) = + local_spec::(&ext, &executor); + ensure_matching_spec::( + header_ws_uri, + expected_spec_name, + expected_spec_version, + shared.no_spec_name_check, + ) + .await; + + let _ = state_machine_call::( + &ext, + &executor, + execution, + "OffchainWorkerApi_offchain_worker", + header.encode().as_ref(), + full_extensions(), + )?; + + log::info!(target: LOG_TARGET, "OffchainWorkerApi_offchain_worker executed without errors."); + + Ok(()) +} diff --git a/utils/frame/try-runtime/cli/src/commands/on_runtime_upgrade.rs b/utils/frame/try-runtime/cli/src/commands/on_runtime_upgrade.rs new file mode 100644 index 0000000000000..86f5548b8aafa --- /dev/null +++ b/utils/frame/try-runtime/cli/src/commands/on_runtime_upgrade.rs @@ -0,0 +1,92 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use std::{fmt::Debug, str::FromStr}; + +use parity_scale_codec::Decode; +use sc_executor::NativeExecutionDispatch; +use sc_service::Configuration; +use sp_runtime::traits::{Block as BlockT, NumberFor}; + +use crate::{ + build_executor, ensure_matching_spec, extract_code, local_spec, state_machine_call, + SharedParams, State, LOG_TARGET, +}; + +/// Configurations of the [`Command::OnRuntimeUpgrade`]. +#[derive(Debug, Clone, structopt::StructOpt)] +pub struct OnRuntimeUpgradeCmd { + /// The state type to use. + #[structopt(subcommand)] + pub state: State, +} + +pub(crate) async fn on_runtime_upgrade( + shared: SharedParams, + command: OnRuntimeUpgradeCmd, + config: Configuration, +) -> sc_cli::Result<()> +where + Block: BlockT + serde::de::DeserializeOwned, + Block::Hash: FromStr, + ::Err: Debug, + NumberFor: FromStr, + as FromStr>::Err: Debug, + ExecDispatch: NativeExecutionDispatch + 'static, +{ + let executor = build_executor(&shared, &config); + let execution = shared.execution; + + let ext = { + let builder = command.state.builder::()?; + let (code_key, code) = extract_code(&config.chain_spec)?; + builder.inject_key_value(&[(code_key, code)]).build().await? + }; + + if let Some(uri) = command.state.live_uri() { + let (expected_spec_name, expected_spec_version) = + local_spec::(&ext, &executor); + ensure_matching_spec::( + uri, + expected_spec_name, + expected_spec_version, + shared.no_spec_name_check, + ) + .await; + } + + let (_, encoded_result) = state_machine_call::( + &ext, + &executor, + execution, + "TryRuntime_on_runtime_upgrade", + &[], + Default::default(), // we don't really need any extensions here. + )?; + + let (weight, total_weight) = <(u64, u64) as Decode>::decode(&mut &*encoded_result) + .map_err(|e| format!("failed to decode output: {:?}", e))?; + log::info!( + target: LOG_TARGET, + "TryRuntime_on_runtime_upgrade executed without errors. Consumed weight = {}, total weight = {} ({})", + weight, + total_weight, + weight as f64 / total_weight.max(1) as f64 + ); + + Ok(()) +} diff --git a/utils/frame/try-runtime/cli/src/lib.rs b/utils/frame/try-runtime/cli/src/lib.rs index c92c3959535e9..e7407f2f408fa 100644 --- a/utils/frame/try-runtime/cli/src/lib.rs +++ b/utils/frame/try-runtime/cli/src/lib.rs @@ -15,67 +15,331 @@ // See the License for the specific language governing permissions and // limitations under the License. -//! `Structopt`-ready structs for `try-runtime`. - -use parity_scale_codec::{Decode, Encode}; -use remote_externalities::{rpc_api, Builder, Mode, OfflineConfig, OnlineConfig, SnapshotConfig}; +//! # Try-runtime +//! +//! Substrate's ultimate testing framework for the power users. +//! +//! > As the name suggests, `try-runtime` is a detailed testing framework that gives you a lot of +//! control over what is being executed in which environment. It is recommended that user's first +//! familiarize themselves with substrate in depth, particularly the execution model. It is critical +//! to deeply understand how the wasm/native interactions, and the runtime apis work in the +//! substrate runtime, before commencing to working with `try-runtime`. +//! +//! #### Resources +//! +//! Some resources about the above: +//! +//! 1. https://substrate.dev/docs/en/knowledgebase/integrate/try-runtime +//! 2. https://www.crowdcast.io/e/substrate-seminar/41 +//! 3. https://substrate.dev/docs/en/knowledgebase/advanced/executor +//! +//! --- +//! +//! ## Overview +//! +//! The basis of all try-runtime commands is the same: connect to a live node, scrape its *state* +//! and put it inside a `TestExternalities`, then call into a *specific runtime-api* using the given +//! state and some *runtime*. +//! +//! All of the variables in the above statement are made *italic*. Let's look at each of them: +//! +//! 1. **State** is the key-value pairs of data that comprise the canonical information that any +//! blockchain is keeping. A state can be full (all key-value pairs), or be partial (only pairs +//! related to some pallets). Moreover, some keys are special and are not related to specific +//! pallets, known as [`well_known_keys`] in substrate. The most important of these is the +//! `:CODE:` key, which contains the code used for execution, when wasm execution is chosen. +//! +//! 2. *A runtime-api* call is a call into a function defined in the runtime, *on top of a given +//! state*. Each subcommand of `try-runtime` utilizes a specific *runtime-api*. +//! +//! 3. Finally, the **runtime** is the actual code that is used to execute the aforementioned +//! runtime-api. All substrate based chains always have two runtimes: native and wasm. The +//! decision of which one is chosen is non-trivial. First, let's look at the options: +//! +//! 1. Native: this means that the runtime that is **in your codebase**, aka whatever you see in +//! your editor, is being used. This runtime is easier for diagnostics. We refer to this as +//! the "local runtime". +//! +//! 2. Wasm: this means that whatever is stored in the `:CODE:` key of the state that your +//! scrape is being used. In plain sight, since the entire state (including `:CODE:`) is +//! scraped from a remote chain, you could conclude that the wasm runtime, if used, is always +//! equal to the canonical runtime of the live chain (i.e. NOT the "local runtime"). That's +//! factually true, but then the testing would be quite lame. Typically, with try-runtime, +//! you don't want to execute whatever code is already on the live chain. Instead, you want +//! your local runtime (which typically includes a non-released feature) to be used. This is +//! why try-runtime overwrites the wasm runtime (at `:CODE:`) with the local runtime as well. +//! That being said, this behavior can be controlled in certain subcommands with a special +//! flag (`--overwrite-wasm-code`). +//! +//! The decision of which runtime is eventually used is based on two facts: +//! +//! 1. `--execution` flag. If you specify `wasm`, then it is *always* wasm. If it is `native`, then +//! if and ONLY IF the spec versions match, then the native runtime is used. Else, wasm runtime +//! is used again. +//! 2. `--chain` flag (if present in your cli), which determines *which local runtime*, is selected. +//! This will specify: +//! 1. which native runtime is used, if you select `--execution Native` +//! 2. which wasm runtime is used to replace the `:CODE:`, if try-runtime is instructed to do +//! so. +//! +//! All in all, if the term "local runtime" is used in the rest of this crate's documentation, it +//! means either the native runtime, or the wasm runtime when overwritten inside `:CODE:`. In other +//! words, it means your... well, "local runtime", regardless of wasm or native. +//! +//! //! See [`Command`] for more information about each command's specific customization flags, and +//! assumptions regarding the runtime being used. +//! +//! Finally, To make sure there are no errors regarding this, always run any `try-runtime` command +//! with `executor=trace` logging targets, which will specify which runtime is being used per api +//! call. +//! +//! Furthermore, other relevant log targets are: `try-runtime::cli`, `remote-ext`, and `runtime`. +//! +//! ## Spec name check +//! +//! A common pitfall is that you might be running some test on top of the state of chain `x`, with +//! the runtime of chain `y`. To avoid this all commands do a spec-name check before executing +//! anything by default. This will check the spec name of the remote node your are connected to, +//! with the spec name of your local runtime and ensure that they match. +//! +//! Should you need to disable this on certain occasions, a top level flag of `--no-spec-name-check` +//! can be used. +//! +//! The spec version is also always inspected, but if it is a mismatch, it will only emit a warning. +//! +//! ## Note nodes that operate with `try-runtime` +//! +//! There are a number of flags that need to be preferably set on a running node in order to work +//! well with try-runtime's expensive RPC queries: +//! +//! - set `--rpc-max-payload 1000` to ensure large RPC queries can work. +//! - set `--rpc-cors all` to ensure ws connections can come through. +//! +//! Note that *none* of the try-runtime operations need unsafe RPCs. +//! +//! ## Examples +//! +//! Run the migrations of the local runtime on the state of polkadot, from the polkadot repo where +//! we have `--chain polkadot-dev`, on the latest finalized block's state +//! +//! ```ignore +//! RUST_LOG=runtime=trace,try-runtime::cli=trace,executor=trace \ +//! cargo run try-runtime \ +//! --execution Native \ +//! --chain polkadot-dev \ +//! on-runtime-upgrade \ +//! live \ +//! --uri wss://rpc.polkadot.io +//! # note that we don't pass any --at, nothing means latest block. +//! ``` +//! +//! Same as previous one, but let's say we want to run this command from the substrate repo, where +//! we don't have a matching spec name/version. +//! +//! ```ignore +//! RUST_LOG=runtime=trace,try-runtime::cli=trace,executor=trace \ +//! cargo run try-runtime \ +//! --execution Native \ +//! --chain dev \ +//! --no-spec-name-check \ # mind this one! +//! on-runtime-upgrade \ +//! live \ +//! --uri wss://rpc.polkadot.io +//! ``` +//! +//! Same as the previous one, but run it at specific block number's state. This means that this +//! block hash's state shall not yet have been pruned in `rpc.polkadot.io`. +//! +//! ```ignore +//! RUST_LOG=runtime=trace,try-runtime::cli=trace,executor=trace \ +//! cargo run try-runtime \ +//! --execution Native \ +//! --chain dev \ +//! --no-spec-name-check \ # mind this one! on-runtime-upgrade \ +//! on-runtime-upgrade \ +//! live \ +//! --uri wss://rpc.polkadot.io \ +//! --at +//! ``` +//! +//! Moving to `execute-block` and `offchain-workers`. For these commands, you always needs to +//! specify a block hash. For the rest of these examples, we assume we're in the polkadot repo. +//! +//! First, let's assume you are in a branch that has the same spec name/version as the live polkadot +//! network. +//! +//! ```ignore +//! RUST_LOG=runtime=trace,try-runtime::cli=trace,executor=trace \ +//! cargo run try-runtime \ +//! --execution Wasm \ +//! --chain polkadot-dev \ +//! --uri wss://rpc.polkadot.io \ +//! execute-block \ +//! live \ +//! --at +//! ``` +//! +//! This is wasm, so it will technically execute the code that lives on the live network. Let's say +//! you want to execute your local runtime. Since you have a matching spec versions, you can simply +//! change `--execution Wasm` to `--execution Native` to achieve this. Your logs of `executor=trace` +//! should show something among the lines of: +//! +//! ```ignore +//! Request for native execution succeeded (native: polkadot-9900 (parity-polkadot-0.tx7.au0), chain: polkadot-9900 (parity-polkadot-0.tx7.au0)) +//! ``` +//! +//! If you don't have matching spec versions, then are doomed to execute wasm. In this case, you can +//! manually overwrite the wasm code with your local runtime: +//! +//! ```ignore +//! RUST_LOG=runtime=trace,try-runtime::cli=trace,executor=trace \ +//! cargo run try-runtime \ +//! --execution Wasm \ +//! --chain polkadot-dev \ +//! execute-block \ +//! live \ +//! --uri wss://rpc.polkadot.io \ +//! --at \ +//! --overwrite-wasm-code +//! ``` +//! +//! For all of these blocks, the block with hash `` is being used, and the initial state +//! is the state of the parent hash. This is because by omitting [`ExecuteBlockCmd::block_at`], the +//! `--at` is used for both. This should be good enough for 99% of the cases. The only case where +//! you need to specify `block-at` and `block-ws-uri` is with snapshots. Let's say you have a file +//! `snap` and you know it corresponds to the state of the parent block of `X`. Then you'd do: +//! +//! ```ignore +//! RUST_LOG=runtime=trace,try-runtime::cli=trace,executor=trace \ +//! cargo run try-runtime \ +//! --execution Wasm \ +//! --chain polkadot-dev \ +//! --uri wss://rpc.polkadot.io \ +//! execute-block \ +//! --block-at \ +//! --block-ws-uri wss://rpc.polkadot.io \ +//! --overwrite-wasm-code \ +//! snap \ +//! -s snap \ +//! ``` + +use parity_scale_codec::Decode; +use remote_externalities::{ + Builder, Mode, OfflineConfig, OnlineConfig, SnapshotConfig, TestExternalities, +}; use sc_chain_spec::ChainSpec; use sc_cli::{CliConfiguration, ExecutionStrategy, WasmExecutionMethod}; use sc_executor::NativeElseWasmExecutor; use sc_service::{Configuration, NativeExecutionDispatch}; use sp_core::{ - hashing::twox_128, offchain::{ testing::{TestOffchainExt, TestTransactionPoolExt}, OffchainDbExt, OffchainWorkerExt, TransactionPoolExt, }, storage::{well_known_keys, StorageData, StorageKey}, + testing::TaskExecutor, + traits::TaskExecutorExt, + twox_128, H256, }; +use sp_externalities::Extensions; use sp_keystore::{testing::KeyStore, KeystoreExt}; -use sp_runtime::traits::{Block as BlockT, Header as HeaderT, NumberFor}; -use sp_state_machine::StateMachine; -use std::{fmt::Debug, path::PathBuf, str::FromStr, sync::Arc}; +use sp_runtime::traits::{Block as BlockT, NumberFor}; +use sp_state_machine::{OverlayedChanges, StateMachine}; +use std::{fmt::Debug, path::PathBuf, str::FromStr}; -mod parse; +mod commands; +pub(crate) mod parse; +pub(crate) const LOG_TARGET: &'static str = "try-runtime::cli"; -/// Possible subcommands of `try-runtime`. +/// Possible commands of `try-runtime`. #[derive(Debug, Clone, structopt::StructOpt)] pub enum Command { - /// Execute "TryRuntime_on_runtime_upgrade" against the given runtime state. - OnRuntimeUpgrade(OnRuntimeUpgradeCmd), - /// Execute "OffchainWorkerApi_offchain_worker" against the given runtime state. - OffchainWorker(OffchainWorkerCmd), - /// Execute "Core_execute_block" using the given block and the runtime state of the parent - /// block. - ExecuteBlock(ExecuteBlockCmd), -} - -#[derive(Debug, Clone, structopt::StructOpt)] -pub struct OnRuntimeUpgradeCmd { - #[structopt(subcommand)] - pub state: State, -} - -#[derive(Debug, Clone, structopt::StructOpt)] -pub struct OffchainWorkerCmd { - #[structopt(subcommand)] - pub state: State, -} - -#[derive(Debug, Clone, structopt::StructOpt)] -pub struct ExecuteBlockCmd { - #[structopt(subcommand)] - pub state: State, + /// Execute the migrations of the "local runtime". + /// + /// This uses a custom runtime api call, namely "TryRuntime_on_runtime_upgrade". + /// + /// This always overwrites the wasm code with the local runtime (specified by `--chain`), to + /// ensure the new migrations are being executed. Re-executing already existing migrations is + /// evidently not very exciting. + OnRuntimeUpgrade(commands::on_runtime_upgrade::OnRuntimeUpgradeCmd), + + /// Executes the given block against some state. + /// + /// Unlike [`Command:::OnRuntimeUpgrade`], this command needs two inputs: the state, and the + /// block data. Since the state could be cached (see [`State::Snap`]), different flags are + /// provided for both. `--block-at` and `--block-uri`, if provided, are only used for fetching + /// the block. For convenience, these flags can be both emitted, if the [`State::Live`] is + /// being used. + /// + /// Note that by default, this command does not overwrite the code, so in wasm execution, the + /// live chain's code is used. This can be disabled if desired, see + /// [`ExecuteBlockCmd::overwrite_wasm_code`]. + /// + /// Note that if you do overwrite the wasm code, or generally use the local runtime for this, + /// you might + /// - not be able to decode the block, if the block format has changed. + /// - quite possibly will get a signature verification failure, since the spec and + /// transaction version are part of the signature's payload, and if they differ between + /// your local runtime and the remote counterparts, the signatures cannot be verified. + /// - almost certainly will get a state root mismatch, since, well, you are executing a + /// different state transition function. + /// + /// To make testing slightly more dynamic, you can disable the state root check by enabling + /// [`ExecuteBlockCmd::no_check`]. If you get signature verification errors, you should + /// manually tweak your local runtime's spec version to fix this. + /// + /// A subtle detail of execute block is that if you want to execute block 100 of a live chain + /// again, you need to scrape the state of block 99. This is already done automatically if you + /// use [`State::Live`], and the parent hash of the target block is used to scrape the state. + /// If [`State::Snap`] is being used, then this needs to be manually taken into consideration. + /// + /// This executes the same runtime api as normal block import, namely `Core_execute_block`. If + /// [`ExecuteBlockCmd::no_check`] is set, it uses a custom, try-runtime-only runtime + /// api called `TryRuntime_execute_block_no_check`. + ExecuteBlock(commands::execute_block::ExecuteBlockCmd), + + /// Executes *the offchain worker hooks* of a given block against some state. + /// + /// Similar to [`Command:::ExecuteBlock`], this command needs two inputs: the state, and the + /// header data. Likewise, `--header-at` and `--header-uri` can be filled, or omitted if + /// [`State::Live`] is used. + /// + /// Similar to [`Command:::ExecuteBlock`], this command does not overwrite the code, so in wasm + /// execution, the live chain's code is used. This can be disabled if desired, see + /// [`OffchainWorkerCmd::overwrite_wasm_code`]. + /// + /// This executes the same runtime api as normal block import, namely + /// `OffchainWorkerApi_offchain_worker`. + OffchainWorker(commands::offchain_worker::OffchainWorkerCmd), + + /// Follow the given chain's finalized blocks and apply all of its extrinsics. + /// + /// This is essentially repeated calls to [`Command::ExecuteBlock`], whilst the local runtime + /// is always at use, the state root check is disabled, and the state is persisted between + /// executions. + /// + /// This allows the behavior of a new runtime to be inspected over a long period of time, with + /// realistic transactions coming as input. + /// + /// NOTE: this does NOT execute the offchain worker hooks of mirrored blocks. This might be + /// added in the future. + /// + /// This does not support snapshot states, and can only work with a remote chain. Upon first + /// connections, starts listening for finalized block events. Upon first block notification, it + /// initializes the state from the remote node, and starts applying that block, plus all the + /// blocks that follow, to the same growing state. + FollowChain(commands::follow_chain::FollowChainCmd), } +/// Shared parameters of the `try-runtime` commands #[derive(Debug, Clone, structopt::StructOpt)] pub struct SharedParams { - /// The shared parameters + /// Shared parameters of substrate cli. #[allow(missing_docs)] #[structopt(flatten)] pub shared_params: sc_cli::SharedParams, - /// The execution strategy that should be used for benchmarks + /// The execution strategy that should be used. #[structopt( long = "execution", value_name = "STRATEGY", @@ -85,7 +349,7 @@ pub struct SharedParams { )] pub execution: ExecutionStrategy, - /// Method for executing Wasm runtime code. + /// Type of wasm execution used. #[structopt( long = "wasm-execution", value_name = "METHOD", @@ -96,52 +360,18 @@ pub struct SharedParams { pub wasm_method: WasmExecutionMethod, /// The number of 64KB pages to allocate for Wasm execution. Defaults to - /// sc_service::Configuration.default_heap_pages. + /// [`sc_service::Configuration.default_heap_pages`]. #[structopt(long)] pub heap_pages: Option, - /// The block hash at which to read state. This is required for execute-block, offchain-worker, - /// or any command that used the live subcommand. - #[structopt( - short, - long, - multiple = false, - parse(try_from_str = parse::hash), - required_ifs( - &[("command", "offchain-worker"), ("command", "execute-block"), ("subcommand", "live")] - ) - )] - block_at: String, - - /// Whether or not to overwrite the code from state with the code from - /// the specified chain spec. + /// When enabled, the spec name check will not panic, and instead only show a warning. #[structopt(long)] - pub overwrite_code: bool, - - /// The url to connect to. - // TODO having this a shared parm is a temporary hack; the url is used just - // to get the header/block. We should try and get that out of state, OR allow - // the user to feed in a header/block via file. - // https://github.com/paritytech/substrate/issues/9027 - #[structopt(short, long, default_value = "ws://localhost:9944", parse(try_from_str = parse::url))] - url: String, + pub no_spec_name_check: bool, } -impl SharedParams { - /// Get the configured value of `block_at`, interpreted as the hash type of `Block`. - pub fn block_at(&self) -> sc_cli::Result - where - Block: BlockT, - ::Hash: FromStr, - <::Hash as FromStr>::Err: Debug, - { - self.block_at - .parse::<::Hash>() - .map_err(|e| format!("Could not parse block hash: {:?}", e).into()) - } -} - -/// Various commands to try out against runtime state at a specific block. +/// Our `try-runtime` command. +/// +/// See [`Command`] for more info. #[derive(Debug, Clone, structopt::StructOpt)] pub struct TryRuntimeCmd { #[structopt(flatten)] @@ -151,11 +381,12 @@ pub struct TryRuntimeCmd { pub command: Command, } -/// The source of runtime state to try operations against. +/// The source of runtime *state* to use. #[derive(Debug, Clone, structopt::StructOpt)] pub enum State { - /// Use a state snapshot as the source of runtime state. NOTE: for the offchain-worker and - /// execute-block command this is only partially supported and requires a archive node url. + /// Use a state snapshot as the source of runtime state. + /// + /// This can be crated by passing a value to [`State::Live::snapshot_path`]. Snap { #[structopt(short, long)] snapshot_path: PathBuf, @@ -163,285 +394,80 @@ pub enum State { /// Use a live chain as the source of runtime state. Live { + /// The url to connect to. + #[structopt( + short, + long, + parse(try_from_str = parse::url), + )] + uri: String, + + /// The block hash at which to fetch the state. + /// + /// If non provided, then the latest finalized head is used. This is particularly useful + /// for [`Command::OnRuntimeUpgrade`]. + #[structopt( + short, + long, + multiple = false, + parse(try_from_str = parse::hash), + )] + at: Option, + /// An optional state snapshot file to WRITE to. Not written if set to `None`. #[structopt(short, long)] snapshot_path: Option, - /// The modules to scrape. If empty, entire chain state will be scraped. + /// The pallets to scrape. If empty, entire chain state will be scraped. #[structopt(short, long, require_delimiter = true)] - modules: Option>, + pallets: Option>, }, } -async fn on_runtime_upgrade( - shared: SharedParams, - command: OnRuntimeUpgradeCmd, - config: Configuration, -) -> sc_cli::Result<()> -where - Block: BlockT + serde::de::DeserializeOwned, - Block::Hash: FromStr, - ::Err: Debug, - NumberFor: FromStr, - as FromStr>::Err: Debug, - ExecDispatch: NativeExecutionDispatch + 'static, -{ - let wasm_method = shared.wasm_method; - let execution = shared.execution; - let heap_pages = shared.heap_pages.or(config.default_heap_pages); - - let mut changes = Default::default(); - let max_runtime_instances = config.max_runtime_instances; - let executor = NativeElseWasmExecutor::::new( - wasm_method.into(), - heap_pages, - max_runtime_instances, - ); - - check_spec_name::(shared.url.clone(), config.chain_spec.name().to_string()).await; - - let ext = { - let builder = match command.state { +impl State { + /// Create the [`remote_externalities::Builder`] from self. + pub(crate) fn builder(&self) -> sc_cli::Result> + where + Block::Hash: FromStr, + ::Err: Debug, + { + Ok(match self { State::Snap { snapshot_path } => Builder::::new().mode(Mode::Offline(OfflineConfig { state_snapshot: SnapshotConfig::new(snapshot_path), })), - State::Live { snapshot_path, modules } => - Builder::::new().mode(Mode::Online(OnlineConfig { - transport: shared.url.to_owned().into(), - state_snapshot: snapshot_path.as_ref().map(SnapshotConfig::new), - modules: modules.to_owned().unwrap_or_default(), - at: Some(shared.block_at::()?), - ..Default::default() - })), - }; - - let (code_key, code) = extract_code(config.chain_spec)?; - builder - .inject_key_value(&[(code_key, code)]) - .inject_hashed_key(&[twox_128(b"System"), twox_128(b"LastRuntimeUpgrade")].concat()) - .build() - .await? - }; - - let encoded_result = StateMachine::<_, _, NumberFor, _>::new( - &ext.backend, - None, - &mut changes, - &executor, - "TryRuntime_on_runtime_upgrade", - &[], - ext.extensions, - &sp_state_machine::backend::BackendRuntimeCode::new(&ext.backend).runtime_code()?, - sp_core::testing::TaskExecutor::new(), - ) - .execute(execution.into()) - .map_err(|e| format!("failed to execute 'TryRuntime_on_runtime_upgrade': {:?}", e))?; - - let (weight, total_weight) = <(u64, u64) as Decode>::decode(&mut &*encoded_result) - .map_err(|e| format!("failed to decode output: {:?}", e))?; - log::info!( - "TryRuntime_on_runtime_upgrade executed without errors. Consumed weight = {}, total weight = {} ({})", - weight, - total_weight, - weight as f64 / total_weight as f64 - ); - - Ok(()) -} - -async fn offchain_worker( - shared: SharedParams, - command: OffchainWorkerCmd, - config: Configuration, -) -> sc_cli::Result<()> -where - Block: BlockT + serde::de::DeserializeOwned, - Block::Hash: FromStr, - Block::Header: serde::de::DeserializeOwned, - ::Err: Debug, - NumberFor: FromStr, - as FromStr>::Err: Debug, - ExecDispatch: NativeExecutionDispatch + 'static, -{ - let wasm_method = shared.wasm_method; - let execution = shared.execution; - let heap_pages = shared.heap_pages.or(config.default_heap_pages); - - let mut changes = Default::default(); - let max_runtime_instances = config.max_runtime_instances; - let executor = NativeElseWasmExecutor::::new( - wasm_method.into(), - heap_pages, - max_runtime_instances, - ); - - check_spec_name::(shared.url.clone(), config.chain_spec.name().to_string()).await; - - let mode = match command.state { - State::Live { snapshot_path, modules } => { - let at = shared.block_at::()?; - let online_config = OnlineConfig { - transport: shared.url.to_owned().into(), - state_snapshot: snapshot_path.as_ref().map(SnapshotConfig::new), - modules: modules.to_owned().unwrap_or_default(), - at: Some(at), - ..Default::default() - }; - - Mode::Online(online_config) - }, - State::Snap { snapshot_path } => { - let mode = - Mode::Offline(OfflineConfig { state_snapshot: SnapshotConfig::new(snapshot_path) }); - - mode - }, - }; - let builder = Builder::::new() - .mode(mode) - .inject_hashed_key(&[twox_128(b"System"), twox_128(b"LastRuntimeUpgrade")].concat()); - let mut ext = if shared.overwrite_code { - let (code_key, code) = extract_code(config.chain_spec)?; - builder.inject_key_value(&[(code_key, code)]).build().await? - } else { - builder.inject_hashed_key(well_known_keys::CODE).build().await? - }; - - let (offchain, _offchain_state) = TestOffchainExt::new(); - let (pool, _pool_state) = TestTransactionPoolExt::new(); - ext.register_extension(OffchainDbExt::new(offchain.clone())); - ext.register_extension(OffchainWorkerExt::new(offchain)); - ext.register_extension(KeystoreExt(Arc::new(KeyStore::new()))); - ext.register_extension(TransactionPoolExt::new(pool)); - - let header_hash = shared.block_at::()?; - let header = rpc_api::get_header::(shared.url, header_hash).await?; - - let _ = StateMachine::<_, _, NumberFor, _>::new( - &ext.backend, - None, - &mut changes, - &executor, - "OffchainWorkerApi_offchain_worker", - header.encode().as_ref(), - ext.extensions, - &sp_state_machine::backend::BackendRuntimeCode::new(&ext.backend).runtime_code()?, - sp_core::testing::TaskExecutor::new(), - ) - .execute(execution.into()) - .map_err(|e| format!("failed to execute 'OffchainWorkerApi_offchain_worker': {:?}", e))?; - - log::info!("OffchainWorkerApi_offchain_worker executed without errors."); - - Ok(()) -} - -async fn execute_block( - shared: SharedParams, - command: ExecuteBlockCmd, - config: Configuration, -) -> sc_cli::Result<()> -where - Block: BlockT + serde::de::DeserializeOwned, - Block::Hash: FromStr, - ::Err: Debug, - NumberFor: FromStr, - as FromStr>::Err: Debug, - ExecDispatch: NativeExecutionDispatch + 'static, -{ - let wasm_method = shared.wasm_method; - let execution = shared.execution; - let heap_pages = shared.heap_pages.or(config.default_heap_pages); - - let mut changes = Default::default(); - let max_runtime_instances = config.max_runtime_instances; - let executor = NativeElseWasmExecutor::::new( - wasm_method.into(), - heap_pages, - max_runtime_instances, - ); - - let block_hash = shared.block_at::()?; - let block: Block = rpc_api::get_block::(shared.url.clone(), block_hash).await?; - - check_spec_name::(shared.url.clone(), config.chain_spec.name().to_string()).await; - - let mode = match command.state { - State::Snap { snapshot_path } => { - let mode = - Mode::Offline(OfflineConfig { state_snapshot: SnapshotConfig::new(snapshot_path) }); - - mode - }, - State::Live { snapshot_path, modules } => { - let parent_hash = block.header().parent_hash(); - - let mode = Mode::Online(OnlineConfig { - transport: shared.url.to_owned().into(), - state_snapshot: snapshot_path.as_ref().map(SnapshotConfig::new), - modules: modules.to_owned().unwrap_or_default(), - at: Some(parent_hash.to_owned()), - ..Default::default() - }); - - mode - }, - }; - - let ext = { - let builder = Builder::::new() - .mode(mode) - .inject_hashed_key(&[twox_128(b"System"), twox_128(b"LastRuntimeUpgrade")].concat()); - let mut ext = if shared.overwrite_code { - let (code_key, code) = extract_code(config.chain_spec)?; - builder.inject_key_value(&[(code_key, code)]).build().await? - } else { - builder.inject_hashed_key(well_known_keys::CODE).build().await? - }; - - // register externality extensions in order to provide host interface for OCW to the - // runtime. - let (offchain, _offchain_state) = TestOffchainExt::new(); - let (pool, _pool_state) = TestTransactionPoolExt::new(); - ext.register_extension(OffchainDbExt::new(offchain.clone())); - ext.register_extension(OffchainWorkerExt::new(offchain)); - ext.register_extension(KeystoreExt(Arc::new(KeyStore::new()))); - ext.register_extension(TransactionPoolExt::new(pool)); - - ext - }; - - // A digest item gets added when the runtime is processing the block, so we need to pop - // the last one to be consistent with what a gossiped block would contain. - let (mut header, extrinsics) = block.deconstruct(); - header.digest_mut().pop(); - let block = Block::new(header, extrinsics); - - let _encoded_result = StateMachine::<_, _, NumberFor, _>::new( - &ext.backend, - None, - &mut changes, - &executor, - "Core_execute_block", - block.encode().as_ref(), - ext.extensions, - &sp_state_machine::backend::BackendRuntimeCode::new(&ext.backend).runtime_code()?, - sp_core::testing::TaskExecutor::new(), - ) - .execute(execution.into()) - .map_err(|e| format!("failed to execute 'Core_execute_block': {:?}", e))?; - debug_assert!(_encoded_result == vec![1]); - - log::info!("Core_execute_block executed without errors."); + State::Live { snapshot_path, pallets, uri, at } => { + let at = match at { + Some(at_str) => Some(hash_of::(at_str)?), + None => None, + }; + Builder::::new() + .mode(Mode::Online(OnlineConfig { + transport: uri.to_owned().into(), + state_snapshot: snapshot_path.as_ref().map(SnapshotConfig::new), + pallets: pallets.to_owned().unwrap_or_default(), + at, + })) + .inject_hashed_key( + &[twox_128(b"System"), twox_128(b"LastRuntimeUpgrade")].concat(), + ) + }, + }) + } - Ok(()) + /// Get the uri, if self is `Live`. + pub(crate) fn live_uri(&self) -> Option { + match self { + State::Live { uri, .. } => Some(uri.clone()), + _ => None, + } + } } impl TryRuntimeCmd { pub async fn run(&self, config: Configuration) -> sc_cli::Result<()> where - Block: BlockT + serde::de::DeserializeOwned, + Block: BlockT + serde::de::DeserializeOwned, Block::Header: serde::de::DeserializeOwned, Block::Hash: FromStr, ::Err: Debug, @@ -451,13 +477,33 @@ impl TryRuntimeCmd { { match &self.command { Command::OnRuntimeUpgrade(ref cmd) => - on_runtime_upgrade::(self.shared.clone(), cmd.clone(), config) - .await, + commands::on_runtime_upgrade::on_runtime_upgrade::( + self.shared.clone(), + cmd.clone(), + config, + ) + .await, Command::OffchainWorker(cmd) => - offchain_worker::(self.shared.clone(), cmd.clone(), config) - .await, + commands::offchain_worker::offchain_worker::( + self.shared.clone(), + cmd.clone(), + config, + ) + .await, Command::ExecuteBlock(cmd) => - execute_block::(self.shared.clone(), cmd.clone(), config).await, + commands::execute_block::execute_block::( + self.shared.clone(), + cmd.clone(), + config, + ) + .await, + Command::FollowChain(cmd) => + commands::follow_chain::follow_chain::( + self.shared.clone(), + cmd.clone(), + config, + ) + .await, } } } @@ -477,7 +523,7 @@ impl CliConfiguration for TryRuntimeCmd { /// Extract `:code` from the given chain spec and return as `StorageData` along with the /// corresponding `StorageKey`. -fn extract_code(spec: Box) -> sc_cli::Result<(StorageKey, StorageData)> { +pub(crate) fn extract_code(spec: &Box) -> sc_cli::Result<(StorageKey, StorageData)> { let genesis_storage = spec.build_storage()?; let code = StorageData( genesis_storage @@ -491,31 +537,142 @@ fn extract_code(spec: Box) -> sc_cli::Result<(StorageKey, Storage Ok((code_key, code)) } +/// Get the hash type of the generic `Block` from a `hash_str`. +pub(crate) fn hash_of(hash_str: &str) -> sc_cli::Result +where + Block::Hash: FromStr, + ::Err: Debug, +{ + hash_str + .parse::<::Hash>() + .map_err(|e| format!("Could not parse block hash: {:?}", e).into()) +} + /// Check the spec_name of an `ext` /// -/// If the version does not exist, or if it does not match with the given, it emits a warning. -async fn check_spec_name( +/// If the spec names don't match, if `relaxed`, then it emits a warning, else it panics. +/// If the spec versions don't match, it only ever emits a warning. +pub(crate) async fn ensure_matching_spec( uri: String, expected_spec_name: String, + expected_spec_version: u32, + relaxed: bool, ) { - let expected_spec_name = expected_spec_name.to_lowercase(); match remote_externalities::rpc_api::get_runtime_version::(uri.clone(), None) .await - .map(|version| String::from(version.spec_name.clone())) - .map(|spec_name| spec_name.to_lowercase()) + .map(|version| (String::from(version.spec_name.clone()), version.spec_version)) + .map(|(spec_name, spec_version)| (spec_name.to_lowercase(), spec_version)) { - Ok(spec) if spec == expected_spec_name => { - log::debug!("found matching spec name: {:?}", spec); - }, - Ok(spec) => { - log::warn!( - "version mismatch: remote spec name: '{}', expected (local chain spec, aka. `--chain`): '{}'", - spec, - expected_spec_name, - ); + Ok((name, version)) => { + // first, deal with spec name + if expected_spec_name == name { + log::info!(target: LOG_TARGET, "found matching spec name: {:?}", name); + } else { + let msg = format!( + "version mismatch: remote spec name: '{}', expected (local chain spec, aka. `--chain`): '{}'", + name, + expected_spec_name + ); + if relaxed { + log::warn!(target: LOG_TARGET, "{}", msg); + } else { + panic!("{}", msg); + } + } + + if expected_spec_version == version { + log::info!(target: LOG_TARGET, "found matching spec version: {:?}", version); + } else { + log::warn!( + target: LOG_TARGET, + "spec version mismatch (local {} != remote {}). This could cause some issues.", + expected_spec_version, + version + ); + } }, Err(why) => { - log::error!("failed to fetch runtime version from {}: {:?}", uri, why); + log::error!( + target: LOG_TARGET, + "failed to fetch runtime version from {}: {:?}. Skipping the check", + uri, + why + ); }, } } + +/// Build all extensions that we typically use. +pub(crate) fn full_extensions() -> Extensions { + let mut extensions = Extensions::default(); + extensions.register(TaskExecutorExt::new(TaskExecutor::new())); + let (offchain, _offchain_state) = TestOffchainExt::new(); + let (pool, _pool_state) = TestTransactionPoolExt::new(); + extensions.register(OffchainDbExt::new(offchain.clone())); + extensions.register(OffchainWorkerExt::new(offchain)); + extensions.register(KeystoreExt(std::sync::Arc::new(KeyStore::new()))); + extensions.register(TransactionPoolExt::new(pool)); + + extensions +} + +/// Build a default execution that we typically use. +pub(crate) fn build_executor( + shared: &SharedParams, + config: &sc_service::Configuration, +) -> NativeElseWasmExecutor { + let wasm_method = shared.wasm_method; + let heap_pages = shared.heap_pages.or(config.default_heap_pages); + let max_runtime_instances = config.max_runtime_instances; + + NativeElseWasmExecutor::::new(wasm_method.into(), heap_pages, max_runtime_instances) +} + +/// Execute the given `method` and `data` on top of `ext`, returning the results (encoded) and the +/// state `changes`. +pub(crate) fn state_machine_call( + ext: &TestExternalities, + executor: &NativeElseWasmExecutor, + execution: sc_cli::ExecutionStrategy, + method: &'static str, + data: &[u8], + extensions: Extensions, +) -> sc_cli::Result<(OverlayedChanges, Vec)> { + let mut changes = Default::default(); + let encoded_results = StateMachine::<_, _, NumberFor, _>::new( + &ext.backend, + None, + &mut changes, + executor, + method, + data, + extensions, + &sp_state_machine::backend::BackendRuntimeCode::new(&ext.backend).runtime_code()?, + sp_core::testing::TaskExecutor::new(), + ) + .execute(execution.into()) + .map_err(|e| format!("failed to execute 'TryRuntime_on_runtime_upgrade': {:?}", e)) + .map_err::(Into::into)?; + + Ok((changes, encoded_results)) +} + +/// Get the spec `(name, version)` from the local runtime. +pub(crate) fn local_spec( + ext: &TestExternalities, + executor: &NativeElseWasmExecutor, +) -> (String, u32) { + let (_, encoded) = state_machine_call::( + &ext, + &executor, + sc_cli::ExecutionStrategy::NativeElseWasm, + "Core_version", + &[], + Default::default(), + ) + .expect("all runtimes should have version; qed"); + ::decode(&mut &*encoded) + .map_err(|e| format!("failed to decode output: {:?}", e)) + .map(|v| (v.spec_name.into(), v.spec_version)) + .expect("all runtimes should have version; qed") +} From da6094d286cca99a9bc5acb2412341280e3828ab Mon Sep 17 00:00:00 2001 From: Alexander Popiak Date: Tue, 21 Sep 2021 20:43:45 +0200 Subject: [PATCH 21/33] Pull Request Template Suggestions (#9717) * pull request template suggestions * Update docs/PULL_REQUEST_TEMPLATE.md Co-authored-by: Squirrel Co-authored-by: Squirrel Co-authored-by: thiolliere --- docs/PULL_REQUEST_TEMPLATE.md | 32 +++++++++++++++++++------------- 1 file changed, 19 insertions(+), 13 deletions(-) diff --git a/docs/PULL_REQUEST_TEMPLATE.md b/docs/PULL_REQUEST_TEMPLATE.md index 77f5f79f60d40..12f39371892e7 100644 --- a/docs/PULL_REQUEST_TEMPLATE.md +++ b/docs/PULL_REQUEST_TEMPLATE.md @@ -1,26 +1,32 @@ -Thank you for your Pull Request! -Before you submitting, please check that: -- [ ] You added a brief description of the PR, e.g.: +✄ ----------------------------------------------------------------------------- + +Thank you for your Pull Request! 🙏 + +Before you submit, please check that: + +- [ ] **Description:** You added a brief description of the PR, e.g.: - What does it do? - - What important points reviewers should know? + - What important points should reviewers know? - Is there something left for follow-up PRs? -- [ ] You labeled the PR appropriately if you have permissions to do so: +- [ ] **Labels:** You labeled the PR appropriately if you have permissions to do so: - [ ] `A*` for PR status (**one required**) - [ ] `B*` for changelog (**one required**) - [ ] `C*` for release notes (**exactly one required**) - [ ] `D*` for various implications/requirements - - [ ] Github's project assignment -- [ ] You mentioned a related issue if this PR related to it, e.g. `Fixes #228` or `Related #1337`. -- [ ] You asked any particular reviewers to review. If you aren't sure, start with GH suggestions. -- [ ] Your PR adheres to [the style guide](https://github.com/paritytech/substrate/blob/master/docs/STYLE_GUIDE.md) + - [ ] Github project assignment +- [ ] **Related Issues:** You mentioned a related issue if this PR is related to it, e.g. `Fixes #228` or `Related #1337`. +- [ ] **2 Reviewers:** You asked at least two reviewers to review. If you aren't sure, start with GH suggestions. +- [ ] **Style Guide:** Your PR adheres to [the style guide](https://github.com/paritytech/substrate/blob/master/docs/STYLE_GUIDE.md) - In particular, mind the maximal line length of 100 (120 in exceptional circumstances). - There is no commented code checked in unless necessary. - - Any panickers have a proof or removed. -- [ ] You bumped the runtime version if there are breaking changes in the **runtime**. -- [ ] You updated any rustdocs which may have changed -- [ ] Has the PR altered the external API or interfaces used by Polkadot? Do you have the corresponding Polkadot PR ready? + - Any panickers in the runtime have a proof or were removed. +- [ ] **Runtime Version:** You bumped the runtime version if there are breaking changes in the **runtime**. +- [ ] **Docs:** You updated any rustdocs which may need to change. +- [ ] **Polkadot Companion:** Has the PR altered the external API or interfaces used by Polkadot? + - [ ] If so, do you have the corresponding Polkadot PR ready? + - [ ] Optionally: Do you have a corresponding Cumulus PR? Refer to [the contributing guide](https://github.com/paritytech/substrate/blob/master/docs/CONTRIBUTING.adoc) for details. From 0a18fa066c781aa12ae57803759a5dec317b2e5c Mon Sep 17 00:00:00 2001 From: ferrell-code Date: Tue, 21 Sep 2021 19:21:41 -0400 Subject: [PATCH 22/33] Society Pallet to FrameV2 (#9472) * migrate society * remove unused imports * cargo fmt * trigger ci * pass new ci * Apply suggestions from code review * fmt * regression bench ci? * remove metadata, fixed with typing! Co-authored-by: Guillaume Thiolliere Co-authored-by: charlesferrell --- frame/society/src/lib.rs | 754 +++++++++++++++++++++----------------- frame/society/src/mock.rs | 5 +- 2 files changed, 422 insertions(+), 337 deletions(-) diff --git a/frame/society/src/lib.rs b/frame/society/src/lib.rs index c6d63eed20ac0..83b1c4203722b 100644 --- a/frame/society/src/lib.rs +++ b/frame/society/src/lib.rs @@ -15,14 +15,14 @@ // See the License for the specific language governing permissions and // limitations under the License. -//! # Society Module +//! # Society Pallet //! //! - [`Config`] //! - [`Call`] //! //! ## Overview //! -//! The Society module is an economic game which incentivizes users to participate +//! The Society pallet is an economic game which incentivizes users to participate //! and maintain a membership society. //! //! ### User Types @@ -77,7 +77,7 @@ //! #### Society Treasury //! //! The membership society is independently funded by a treasury managed by this -//! module. Some subset of this treasury is placed in a Society Pot, which is used +//! pallet. Some subset of this treasury is placed in a Society Pot, which is used //! to determine the number of accepted bids. //! //! #### Rate of Growth @@ -132,7 +132,7 @@ //! the society. A vouching bid can additionally request some portion of that reward as a tip //! to the voucher for vouching for the prospective candidate. //! -//! Every rotation period, Bids are ordered by reward amount, and the module +//! Every rotation period, Bids are ordered by reward amount, and the pallet //! selects as many bids the Society Pot can support for that period. //! //! These selected bids become candidates and move on to the Candidate phase. @@ -251,19 +251,15 @@ mod mock; #[cfg(test)] mod tests; -use codec::{Decode, Encode}; use frame_support::{ - decl_error, decl_event, decl_module, decl_storage, - dispatch::DispatchResult, - ensure, + pallet_prelude::*, traits::{ BalanceStatus, ChangeMembers, Currency, EnsureOrigin, ExistenceRequirement::AllowDeath, - Get, Imbalance, OnUnbalanced, Randomness, ReservableCurrency, + Imbalance, OnUnbalanced, Randomness, ReservableCurrency, }, - weights::Weight, PalletId, }; -use frame_system::{self as system, ensure_root, ensure_signed}; +use frame_system::pallet_prelude::*; use rand_chacha::{ rand_core::{RngCore, SeedableRng}, ChaChaRng, @@ -278,62 +274,14 @@ use sp_runtime::{ }; use sp_std::prelude::*; +pub use pallet::*; + type BalanceOf = - <>::Currency as Currency<::AccountId>>::Balance; -type NegativeImbalanceOf = <::Currency as Currency< + <>::Currency as Currency<::AccountId>>::Balance; +type NegativeImbalanceOf = <>::Currency as Currency< ::AccountId, >>::NegativeImbalance; -/// The module's configuration trait. -pub trait Config: system::Config { - /// The overarching event type. - type Event: From> + Into<::Event>; - - /// The societies's module id - type PalletId: Get; - - /// The currency type used for bidding. - type Currency: ReservableCurrency; - - /// Something that provides randomness in the runtime. - type Randomness: Randomness; - - /// The minimum amount of a deposit required for a bid to be made. - type CandidateDeposit: Get>; - - /// The amount of the unpaid reward that gets deducted in the case that either a skeptic - /// doesn't vote or someone votes in the wrong way. - type WrongSideDeduction: Get>; - - /// The number of times a member may vote the wrong way (or not at all, when they are a skeptic) - /// before they become suspended. - type MaxStrikes: Get; - - /// The amount of incentive paid within each period. Doesn't include VoterTip. - type PeriodSpend: Get>; - - /// The receiver of the signal for when the members have changed. - type MembershipChanged: ChangeMembers; - - /// The number of blocks between candidate/membership rotation periods. - type RotationPeriod: Get; - - /// The maximum duration of the payout lock. - type MaxLockDuration: Get; - - /// The origin that is allowed to call `found`. - type FounderSetOrigin: EnsureOrigin; - - /// The origin that is allowed to make suspension judgements. - type SuspensionJudgementOrigin: EnsureOrigin; - - /// The number of blocks between membership challenges. - type ChallengePeriod: Get; - - /// The maximum number of candidates that we accept per round. - type MaxCandidateIntake: Get; -} - /// A vote by a member on a candidate application. #[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub enum Vote { @@ -417,108 +365,320 @@ impl BidKind { } } -// This module's storage items. -decl_storage! { - trait Store for Module, I: Instance=DefaultInstance> as Society { - /// The first member. - pub Founder get(fn founder) build(|config: &GenesisConfig| config.members.first().cloned()): - Option; +#[frame_support::pallet] +pub mod pallet { + use super::*; - /// A hash of the rules of this society concerning membership. Can only be set once and - /// only by the founder. - pub Rules get(fn rules): Option; + #[pallet::pallet] + #[pallet::generate_store(pub(super) trait Store)] + pub struct Pallet(_); - /// The current set of candidates; bidders that are attempting to become members. - pub Candidates get(fn candidates): Vec>>; + #[pallet::config] + pub trait Config: frame_system::Config { + /// The overarching event type. + type Event: From> + IsType<::Event>; - /// The set of suspended candidates. - pub SuspendedCandidates get(fn suspended_candidate): - map hasher(twox_64_concat) T::AccountId - => Option<(BalanceOf, BidKind>)>; + /// The societies's pallet id + #[pallet::constant] + type PalletId: Get; - /// Amount of our account balance that is specifically for the next round's bid(s). - pub Pot get(fn pot) config(): BalanceOf; + /// The currency type used for bidding. + type Currency: ReservableCurrency; - /// The most primary from the most recently approved members. - pub Head get(fn head) build(|config: &GenesisConfig| config.members.first().cloned()): - Option; + /// Something that provides randomness in the runtime. + type Randomness: Randomness; - /// The current set of members, ordered. - pub Members get(fn members) build(|config: &GenesisConfig| { - let mut m = config.members.clone(); - m.sort(); - m - }): Vec; + /// The minimum amount of a deposit required for a bid to be made. + #[pallet::constant] + type CandidateDeposit: Get>; + + /// The amount of the unpaid reward that gets deducted in the case that either a skeptic + /// doesn't vote or someone votes in the wrong way. + #[pallet::constant] + type WrongSideDeduction: Get>; - /// The set of suspended members. - pub SuspendedMembers get(fn suspended_member): map hasher(twox_64_concat) T::AccountId => bool; + /// The number of times a member may vote the wrong way (or not at all, when they are a + /// skeptic) before they become suspended. + #[pallet::constant] + type MaxStrikes: Get; - /// The current bids, stored ordered by the value of the bid. - Bids: Vec>>; + /// The amount of incentive paid within each period. Doesn't include VoterTip. + #[pallet::constant] + type PeriodSpend: Get>; - /// Members currently vouching or banned from vouching again - Vouching get(fn vouching): map hasher(twox_64_concat) T::AccountId => Option; + /// The receiver of the signal for when the members have changed. + type MembershipChanged: ChangeMembers; - /// Pending payouts; ordered by block number, with the amount that should be paid out. - Payouts: map hasher(twox_64_concat) T::AccountId => Vec<(T::BlockNumber, BalanceOf)>; + /// The number of blocks between candidate/membership rotation periods. + #[pallet::constant] + type RotationPeriod: Get; - /// The ongoing number of losing votes cast by the member. - Strikes: map hasher(twox_64_concat) T::AccountId => StrikeCount; + /// The maximum duration of the payout lock. + #[pallet::constant] + type MaxLockDuration: Get; - /// Double map from Candidate -> Voter -> (Maybe) Vote. - Votes: double_map - hasher(twox_64_concat) T::AccountId, - hasher(twox_64_concat) T::AccountId - => Option; + /// The origin that is allowed to call `found`. + type FounderSetOrigin: EnsureOrigin; - /// The defending member currently being challenged. - Defender get(fn defender): Option; + /// The origin that is allowed to make suspension judgements. + type SuspensionJudgementOrigin: EnsureOrigin; - /// Votes for the defender. - DefenderVotes: map hasher(twox_64_concat) T::AccountId => Option; + /// The number of blocks between membership challenges. + #[pallet::constant] + type ChallengePeriod: Get; - /// The max number of members for the society at one time. - MaxMembers get(fn max_members) config(): u32; + /// The maximum number of candidates that we accept per round. + #[pallet::constant] + type MaxCandidateIntake: Get; + } + + #[pallet::error] + pub enum Error { + /// An incorrect position was provided. + BadPosition, + /// User is not a member. + NotMember, + /// User is already a member. + AlreadyMember, + /// User is suspended. + Suspended, + /// User is not suspended. + NotSuspended, + /// Nothing to payout. + NoPayout, + /// Society already founded. + AlreadyFounded, + /// Not enough in pot to accept candidate. + InsufficientPot, + /// Member is already vouching or banned from vouching again. + AlreadyVouching, + /// Member is not vouching. + NotVouching, + /// Cannot remove the head of the chain. + Head, + /// Cannot remove the founder. + Founder, + /// User has already made a bid. + AlreadyBid, + /// User is already a candidate. + AlreadyCandidate, + /// User is not a candidate. + NotCandidate, + /// Too many members in the society. + MaxMembers, + /// The caller is not the founder. + NotFounder, + /// The caller is not the head. + NotHead, } - add_extra_genesis { - config(members): Vec; + + #[pallet::event] + #[pallet::generate_deposit(pub(super) fn deposit_event)] + pub enum Event, I: 'static = ()> { + /// The society is founded by the given identity. \[founder\] + Founded(T::AccountId), + /// A membership bid just happened. The given account is the candidate's ID and their offer + /// is the second. \[candidate_id, offer\] + Bid(T::AccountId, BalanceOf), + /// A membership bid just happened by vouching. The given account is the candidate's ID and + /// their offer is the second. The vouching party is the third. \[candidate_id, offer, + /// vouching\] + Vouch(T::AccountId, BalanceOf, T::AccountId), + /// A \[candidate\] was dropped (due to an excess of bids in the system). + AutoUnbid(T::AccountId), + /// A \[candidate\] was dropped (by their request). + Unbid(T::AccountId), + /// A \[candidate\] was dropped (by request of who vouched for them). + Unvouch(T::AccountId), + /// A group of candidates have been inducted. The batch's primary is the first value, the + /// batch in full is the second. \[primary, candidates\] + Inducted(T::AccountId, Vec), + /// A suspended member has been judged. \[who, judged\] + SuspendedMemberJudgement(T::AccountId, bool), + /// A \[candidate\] has been suspended + CandidateSuspended(T::AccountId), + /// A \[member\] has been suspended + MemberSuspended(T::AccountId), + /// A \[member\] has been challenged + Challenged(T::AccountId), + /// A vote has been placed \[candidate, voter, vote\] + Vote(T::AccountId, T::AccountId, bool), + /// A vote has been placed for a defending member \[voter, vote\] + DefenderVote(T::AccountId, bool), + /// A new \[max\] member count has been set + NewMaxMembers(u32), + /// Society is unfounded. \[founder\] + Unfounded(T::AccountId), + /// Some funds were deposited into the society account. \[value\] + Deposit(BalanceOf), } -} -// The module's dispatchable functions. -decl_module! { - /// The module declaration. - pub struct Module, I: Instance=DefaultInstance> for enum Call where origin: T::Origin { - type Error = Error; - /// The minimum amount of a deposit required for a bid to be made. - const CandidateDeposit: BalanceOf = T::CandidateDeposit::get(); + /// Old name generated by `decl_event`. + #[deprecated(note = "use `Event` instead")] + pub type RawEvent = Event; + + /// The first member. + #[pallet::storage] + #[pallet::getter(fn founder)] + pub type Founder, I: 'static = ()> = StorageValue<_, T::AccountId>; + + /// A hash of the rules of this society concerning membership. Can only be set once and + /// only by the founder. + #[pallet::storage] + #[pallet::getter(fn rules)] + pub type Rules, I: 'static = ()> = StorageValue<_, T::Hash>; + + /// The current set of candidates; bidders that are attempting to become members. + #[pallet::storage] + #[pallet::getter(fn candidates)] + pub type Candidates, I: 'static = ()> = + StorageValue<_, Vec>>, ValueQuery>; + + /// The set of suspended candidates. + #[pallet::storage] + #[pallet::getter(fn suspended_candidate)] + pub type SuspendedCandidates, I: 'static = ()> = StorageMap< + _, + Twox64Concat, + T::AccountId, + (BalanceOf, BidKind>), + >; + + /// Amount of our account balance that is specifically for the next round's bid(s). + #[pallet::storage] + #[pallet::getter(fn pot)] + pub type Pot, I: 'static = ()> = StorageValue<_, BalanceOf, ValueQuery>; + + /// The most primary from the most recently approved members. + #[pallet::storage] + #[pallet::getter(fn head)] + pub type Head, I: 'static = ()> = StorageValue<_, T::AccountId>; + + /// The current set of members, ordered. + #[pallet::storage] + #[pallet::getter(fn members)] + pub type Members, I: 'static = ()> = + StorageValue<_, Vec, ValueQuery>; + + /// The set of suspended members. + #[pallet::storage] + #[pallet::getter(fn suspended_member)] + pub type SuspendedMembers, I: 'static = ()> = + StorageMap<_, Twox64Concat, T::AccountId, bool, ValueQuery>; + + /// The current bids, stored ordered by the value of the bid. + #[pallet::storage] + pub(super) type Bids, I: 'static = ()> = + StorageValue<_, Vec>>, ValueQuery>; + + /// Members currently vouching or banned from vouching again + #[pallet::storage] + #[pallet::getter(fn vouching)] + pub(super) type Vouching, I: 'static = ()> = + StorageMap<_, Twox64Concat, T::AccountId, VouchingStatus>; + + /// Pending payouts; ordered by block number, with the amount that should be paid out. + #[pallet::storage] + pub(super) type Payouts, I: 'static = ()> = StorageMap< + _, + Twox64Concat, + T::AccountId, + Vec<(T::BlockNumber, BalanceOf)>, + ValueQuery, + >; + + /// The ongoing number of losing votes cast by the member. + #[pallet::storage] + pub(super) type Strikes, I: 'static = ()> = + StorageMap<_, Twox64Concat, T::AccountId, StrikeCount, ValueQuery>; + + /// Double map from Candidate -> Voter -> (Maybe) Vote. + #[pallet::storage] + pub(super) type Votes, I: 'static = ()> = + StorageDoubleMap<_, Twox64Concat, T::AccountId, Twox64Concat, T::AccountId, Vote>; + + /// The defending member currently being challenged. + #[pallet::storage] + #[pallet::getter(fn defender)] + pub(super) type Defender, I: 'static = ()> = StorageValue<_, T::AccountId>; + + /// Votes for the defender. + #[pallet::storage] + pub(super) type DefenderVotes, I: 'static = ()> = + StorageMap<_, Twox64Concat, T::AccountId, Vote>; + + /// The max number of members for the society at one time. + #[pallet::storage] + #[pallet::getter(fn max_members)] + pub(super) type MaxMembers, I: 'static = ()> = StorageValue<_, u32, ValueQuery>; + + #[pallet::hooks] + impl, I: 'static> Hooks> for Pallet { + fn on_initialize(n: T::BlockNumber) -> Weight { + let mut members = vec![]; - /// The amount of the unpaid reward that gets deducted in the case that either a skeptic - /// doesn't vote or someone votes in the wrong way. - const WrongSideDeduction: BalanceOf = T::WrongSideDeduction::get(); + let mut weight = 0; + let weights = T::BlockWeights::get(); - /// The number of times a member may vote the wrong way (or not at all, when they are a skeptic) - /// before they become suspended. - const MaxStrikes: u32 = T::MaxStrikes::get(); + // Run a candidate/membership rotation + if (n % T::RotationPeriod::get()).is_zero() { + members = >::get(); + Self::rotate_period(&mut members); - /// The amount of incentive paid within each period. Doesn't include VoterTip. - const PeriodSpend: BalanceOf = T::PeriodSpend::get(); + weight += weights.max_block / 20; + } - /// The number of blocks between candidate/membership rotation periods. - const RotationPeriod: T::BlockNumber = T::RotationPeriod::get(); + // Run a challenge rotation + if (n % T::ChallengePeriod::get()).is_zero() { + // Only read members if not already read. + if members.is_empty() { + members = >::get(); + } + Self::rotate_challenge(&mut members); - /// The number of blocks between membership challenges. - const ChallengePeriod: T::BlockNumber = T::ChallengePeriod::get(); + weight += weights.max_block / 20; + } - /// The societies's module id - const PalletId: PalletId = T::PalletId::get(); + weight + } + } + + #[pallet::genesis_config] + pub struct GenesisConfig, I: 'static = ()> { + pub pot: BalanceOf, + pub members: Vec, + pub max_members: u32, + } - /// Maximum candidate intake per round. - const MaxCandidateIntake: u32 = T::MaxCandidateIntake::get(); + #[cfg(feature = "std")] + impl, I: 'static> Default for GenesisConfig { + fn default() -> Self { + Self { + pot: Default::default(), + members: Default::default(), + max_members: Default::default(), + } + } + } - // Used for handling module events. - fn deposit_event() = default; + #[pallet::genesis_build] + impl, I: 'static> GenesisBuild for GenesisConfig { + fn build(&self) { + Pot::::put(self.pot); + MaxMembers::::put(self.max_members); + let first_member = self.members.first(); + if let Some(member) = first_member { + Founder::::put(member.clone()); + Head::::put(member.clone()); + }; + let mut m = self.members.clone(); + m.sort(); + Members::::put(m); + } + } + #[pallet::call] + impl, I: 'static> Pallet { /// A user outside of the society can make a bid for entry. /// /// Payment: `CandidateDeposit` will be reserved for making a bid. It is returned @@ -538,12 +698,13 @@ decl_module! { /// - One storage read to retrieve all current candidates. O(C) /// - One storage read to retrieve all members. O(M) /// - Storage Writes: - /// - One storage mutate to add a new bid to the vector O(B) (TODO: possible optimization w/ read) + /// - One storage mutate to add a new bid to the vector O(B) (TODO: possible optimization + /// w/ read) /// - Up to one storage removal if bid.len() > MAX_BID_COUNT. O(1) /// - Notable Computation: /// - O(B + C + log M) search to check user is not already a part of society. /// - O(log B) search to insert the new bid sorted. - /// - External Module Operations: + /// - External Pallet Operations: /// - One balance reserve operation. O(X) /// - Up to one balance unreserve operation if bids.len() > MAX_BID_COUNT. /// - Events: @@ -552,8 +713,8 @@ decl_module! { /// /// Total Complexity: O(M + B + C + logM + logB + X) /// # - #[weight = T::BlockWeights::get().max_block / 10] - pub fn bid(origin, value: BalanceOf) -> DispatchResult { + #[pallet::weight(T::BlockWeights::get().max_block / 10)] + pub fn bid(origin: OriginFor, value: BalanceOf) -> DispatchResult { let who = ensure_signed(origin)?; ensure!(!>::contains_key(&who), Error::::Suspended); ensure!(!>::contains_key(&who), Error::::Suspended); @@ -562,13 +723,13 @@ decl_module! { let candidates = >::get(); ensure!(!Self::is_candidate(&candidates, &who), Error::::AlreadyCandidate); let members = >::get(); - ensure!(!Self::is_member(&members ,&who), Error::::AlreadyMember); + ensure!(!Self::is_member(&members, &who), Error::::AlreadyMember); let deposit = T::CandidateDeposit::get(); T::Currency::reserve(&who, deposit)?; Self::put_bid(bids, &who, value.clone(), BidKind::Deposit(deposit)); - Self::deposit_event(RawEvent::Bid(who, value)); + Self::deposit_event(Event::::Bid(who, value)); Ok(()) } @@ -591,12 +752,12 @@ decl_module! { /// /// Total Complexity: O(B + X) /// # - #[weight = T::BlockWeights::get().max_block / 10] - pub fn unbid(origin, pos: u32) -> DispatchResult { + #[pallet::weight(T::BlockWeights::get().max_block / 10)] + pub fn unbid(origin: OriginFor, pos: u32) -> DispatchResult { let who = ensure_signed(origin)?; let pos = pos as usize; - >::mutate(|b| + >::mutate(|b| { if pos < b.len() && b[pos].who == who { // Either unreserve the deposit or free up the vouching member. // In neither case can we do much if the action isn't completable, but there's @@ -605,17 +766,17 @@ decl_module! { BidKind::Deposit(deposit) => { let err_amount = T::Currency::unreserve(&who, deposit); debug_assert!(err_amount.is_zero()); - } + }, BidKind::Vouch(voucher, _) => { >::remove(&voucher); - } + }, } - Self::deposit_event(RawEvent::Unbid(who)); + Self::deposit_event(Event::::Unbid(who)); Ok(()) } else { Err(Error::::BadPosition)? } - ) + }) } /// As a member, vouch for someone to join society by placing a bid on their behalf. @@ -647,13 +808,14 @@ decl_module! { /// - One storage read to retrieve all current candidates. O(C) /// - Storage Writes: /// - One storage write to insert vouching status to the member. O(1) - /// - One storage mutate to add a new bid to the vector O(B) (TODO: possible optimization w/ read) + /// - One storage mutate to add a new bid to the vector O(B) (TODO: possible optimization + /// w/ read) /// - Up to one storage removal if bid.len() > MAX_BID_COUNT. O(1) /// - Notable Computation: /// - O(log M) search to check sender is a member. /// - O(B + C + log M) search to check user is not already a part of society. /// - O(log B) search to insert the new bid sorted. - /// - External Module Operations: + /// - External Pallet Operations: /// - One balance reserve operation. O(X) /// - Up to one balance unreserve operation if bids.len() > MAX_BID_COUNT. /// - Events: @@ -662,8 +824,13 @@ decl_module! { /// /// Total Complexity: O(M + B + C + logM + logB + X) /// # - #[weight = T::BlockWeights::get().max_block / 10] - pub fn vouch(origin, who: T::AccountId, value: BalanceOf, tip: BalanceOf) -> DispatchResult { + #[pallet::weight(T::BlockWeights::get().max_block / 10)] + pub fn vouch( + origin: OriginFor, + who: T::AccountId, + value: BalanceOf, + tip: BalanceOf, + ) -> DispatchResult { let voucher = ensure_signed(origin)?; // Check user is not suspended. ensure!(!>::contains_key(&who), Error::::Suspended); @@ -682,7 +849,7 @@ decl_module! { >::insert(&voucher, VouchingStatus::Vouching); Self::put_bid(bids, &who, value.clone(), BidKind::Vouch(voucher.clone(), tip)); - Self::deposit_event(RawEvent::Vouch(who, value, voucher)); + Self::deposit_event(Event::::Vouch(who, value, voucher)); Ok(()) } @@ -703,23 +870,26 @@ decl_module! { /// /// Total Complexity: O(B) /// # - #[weight = T::BlockWeights::get().max_block / 10] - pub fn unvouch(origin, pos: u32) -> DispatchResult { + #[pallet::weight(T::BlockWeights::get().max_block / 10)] + pub fn unvouch(origin: OriginFor, pos: u32) -> DispatchResult { let voucher = ensure_signed(origin)?; - ensure!(Self::vouching(&voucher) == Some(VouchingStatus::Vouching), Error::::NotVouching); + ensure!( + Self::vouching(&voucher) == Some(VouchingStatus::Vouching), + Error::::NotVouching + ); let pos = pos as usize; - >::mutate(|b| + >::mutate(|b| { if pos < b.len() { b[pos].kind.check_voucher(&voucher)?; >::remove(&voucher); let who = b.remove(pos).who; - Self::deposit_event(RawEvent::Unvouch(who)); + Self::deposit_event(Event::::Unvouch(who)); Ok(()) } else { Err(Error::::BadPosition)? } - ) + }) } /// As a member, vote on a candidate. @@ -728,8 +898,8 @@ decl_module! { /// /// Parameters: /// - `candidate`: The candidate that the member would like to bid on. - /// - `approve`: A boolean which says if the candidate should be - /// approved (`true`) or rejected (`false`). + /// - `approve`: A boolean which says if the candidate should be approved (`true`) or + /// rejected (`false`). /// /// # /// Key: C (len of candidates), M (len of members) @@ -741,8 +911,12 @@ decl_module! { /// /// Total Complexity: O(M + logM + C) /// # - #[weight = T::BlockWeights::get().max_block / 10] - pub fn vote(origin, candidate: ::Source, approve: bool) { + #[pallet::weight(T::BlockWeights::get().max_block / 10)] + pub fn vote( + origin: OriginFor, + candidate: ::Source, + approve: bool, + ) -> DispatchResult { let voter = ensure_signed(origin)?; let candidate = T::Lookup::lookup(candidate)?; let candidates = >::get(); @@ -753,7 +927,8 @@ decl_module! { let vote = if approve { Vote::Approve } else { Vote::Reject }; >::insert(&candidate, &voter, vote); - Self::deposit_event(RawEvent::Vote(candidate, voter, approve)); + Self::deposit_event(Event::::Vote(candidate, voter, approve)); + Ok(()) } /// As a member, vote on the defender. @@ -772,8 +947,8 @@ decl_module! { /// /// Total Complexity: O(M + logM) /// # - #[weight = T::BlockWeights::get().max_block / 10] - pub fn defender_vote(origin, approve: bool) { + #[pallet::weight(T::BlockWeights::get().max_block / 10)] + pub fn defender_vote(origin: OriginFor, approve: bool) -> DispatchResult { let voter = ensure_signed(origin)?; let members = >::get(); ensure!(Self::is_member(&members, &voter), Error::::NotMember); @@ -781,12 +956,14 @@ decl_module! { let vote = if approve { Vote::Approve } else { Vote::Reject }; >::insert(&voter, vote); - Self::deposit_event(RawEvent::DefenderVote(voter, approve)); + Self::deposit_event(Event::::DefenderVote(voter, approve)); + Ok(()) } /// Transfer the first matured payout for the sender and remove it from the records. /// - /// NOTE: This extrinsic needs to be called multiple times to claim multiple matured payouts. + /// NOTE: This extrinsic needs to be called multiple times to claim multiple matured + /// payouts. /// /// Payment: The member will receive a payment equal to their first matured /// payout to their free balance. @@ -804,8 +981,8 @@ decl_module! { /// /// Total Complexity: O(M + logM + P + X) /// # - #[weight = T::BlockWeights::get().max_block / 10] - pub fn payout(origin) { + #[pallet::weight(T::BlockWeights::get().max_block / 10)] + pub fn payout(origin: OriginFor) -> DispatchResult { let who = ensure_signed(origin)?; let members = >::get(); @@ -813,7 +990,7 @@ decl_module! { let mut payouts = >::get(&who); if let Some((when, amount)) = payouts.first() { - if when <= &>::block_number() { + if when <= &>::block_number() { T::Currency::transfer(&Self::payouts(), &who, *amount, AllowDeath)?; payouts.remove(0); if payouts.is_empty() { @@ -830,7 +1007,7 @@ decl_module! { /// Found the society. /// /// This is done as a discrete action in order to allow for the - /// module to be included into a running chain and can only be done once. + /// pallet to be included into a running chain and can only be done once. /// /// The dispatch origin for this call must be from the _FounderSetOrigin_. /// @@ -846,18 +1023,24 @@ decl_module! { /// /// Total Complexity: O(1) /// # - #[weight = T::BlockWeights::get().max_block / 10] - fn found(origin, founder: T::AccountId, max_members: u32, rules: Vec) { + #[pallet::weight(T::BlockWeights::get().max_block / 10)] + pub fn found( + origin: OriginFor, + founder: T::AccountId, + max_members: u32, + rules: Vec, + ) -> DispatchResult { T::FounderSetOrigin::ensure_origin(origin)?; ensure!(!>::exists(), Error::::AlreadyFounded); ensure!(max_members > 1, Error::::MaxMembers); // This should never fail in the context of this function... - >::put(max_members); + >::put(max_members); Self::add_member(&founder)?; >::put(&founder); >::put(&founder); Rules::::put(T::Hashing::hash(&rules)); - Self::deposit_event(RawEvent::Founded(founder)); + Self::deposit_event(Event::::Founded(founder)); + Ok(()) } /// Annul the founding of the society. @@ -873,8 +1056,8 @@ decl_module! { /// /// Total Complexity: O(1) /// # - #[weight = T::BlockWeights::get().max_block / 10] - fn unfound(origin) { + #[pallet::weight(T::BlockWeights::get().max_block / 10)] + pub fn unfound(origin: OriginFor) -> DispatchResult { let founder = ensure_signed(origin)?; ensure!(Founder::::get() == Some(founder.clone()), Error::::NotFounder); ensure!(Head::::get() == Some(founder.clone()), Error::::NotHead); @@ -885,7 +1068,8 @@ decl_module! { Rules::::kill(); Candidates::::kill(); SuspendedCandidates::::remove_all(None); - Self::deposit_event(RawEvent::Unfounded(founder)); + Self::deposit_event(Event::::Unfounded(founder)); + Ok(()) } /// Allow suspension judgement origin to make judgement on a suspended member. @@ -900,13 +1084,14 @@ decl_module! { /// /// Parameters: /// - `who` - The suspended member to be judged. - /// - `forgive` - A boolean representing whether the suspension judgement origin - /// forgives (`true`) or rejects (`false`) a suspended member. + /// - `forgive` - A boolean representing whether the suspension judgement origin forgives + /// (`true`) or rejects (`false`) a suspended member. /// /// # /// Key: B (len of bids), M (len of members) /// - One storage read to check `who` is a suspended member. O(1) - /// - Up to one storage write O(M) with O(log M) binary search to add a member back to society. + /// - Up to one storage write O(M) with O(log M) binary search to add a member back to + /// society. /// - Up to 3 storage removals O(1) to clean up a removed member. /// - Up to one storage write O(B) with O(B) search to remove vouched bid from bids. /// - Up to one additional event if unvouch takes place. @@ -915,8 +1100,12 @@ decl_module! { /// /// Total Complexity: O(M + logM + B) /// # - #[weight = T::BlockWeights::get().max_block / 10] - fn judge_suspended_member(origin, who: T::AccountId, forgive: bool) { + #[pallet::weight(T::BlockWeights::get().max_block / 10)] + pub fn judge_suspended_member( + origin: OriginFor, + who: T::AccountId, + forgive: bool, + ) -> DispatchResult { T::SuspensionJudgementOrigin::ensure_origin(origin)?; ensure!(>::contains_key(&who), Error::::NotSuspended); @@ -936,14 +1125,15 @@ decl_module! { if let Some(pos) = bids.iter().position(|b| b.kind.check_voucher(&who).is_ok()) { // Remove the bid, and emit an event let vouched = bids.remove(pos).who; - Self::deposit_event(RawEvent::Unvouch(vouched)); + Self::deposit_event(Event::::Unvouch(vouched)); } ); } } >::remove(&who); - Self::deposit_event(RawEvent::SuspendedMemberJudgement(who, forgive)); + Self::deposit_event(Event::::SuspendedMemberJudgement(who, forgive)); + Ok(()) } /// Allow suspended judgement origin to make judgement on a suspended candidate. @@ -986,8 +1176,12 @@ decl_module! { /// /// Total Complexity: O(M + logM + B + X) /// # - #[weight = T::BlockWeights::get().max_block / 10] - fn judge_suspended_candidate(origin, who: T::AccountId, judgement: Judgement) { + #[pallet::weight(T::BlockWeights::get().max_block / 10)] + pub fn judge_suspended_candidate( + origin: OriginFor, + who: T::AccountId, + judgement: Judgement, + ) -> DispatchResult { T::SuspensionJudgementOrigin::ensure_origin(origin)?; if let Some((value, kind)) = >::get(&who) { match judgement { @@ -1001,29 +1195,35 @@ decl_module! { // Reduce next pot by payout >::put(pot - value); // Add payout for new candidate - let maturity = >::block_number() - + Self::lock_duration(Self::members().len() as u32); + let maturity = >::block_number() + + Self::lock_duration(Self::members().len() as u32); Self::pay_accepted_candidate(&who, value, kind, maturity); - } + }, Judgement::Reject => { // Founder has rejected this candidate match kind { BidKind::Deposit(deposit) => { // Slash deposit and move it to the society account - let res = T::Currency::repatriate_reserved(&who, &Self::account_id(), deposit, BalanceStatus::Free); + let res = T::Currency::repatriate_reserved( + &who, + &Self::account_id(), + deposit, + BalanceStatus::Free, + ); debug_assert!(res.is_ok()); - } + }, BidKind::Vouch(voucher, _) => { // Ban the voucher from vouching again >::insert(&voucher, VouchingStatus::Banned); - } + }, } - } + }, Judgement::Rebid => { - // Founder has taken no judgement, and candidate is placed back into the pool. + // Founder has taken no judgement, and candidate is placed back into the + // pool. let bids = >::get(); Self::put_bid(bids, &who, value, kind); - } + }, } // Remove suspended candidate @@ -1031,6 +1231,7 @@ decl_module! { } else { Err(Error::::NotSuspended)? } + Ok(()) } /// Allows root origin to change the maximum number of members in society. @@ -1047,137 +1248,24 @@ decl_module! { /// /// Total Complexity: O(1) /// # - #[weight = T::BlockWeights::get().max_block / 10] - fn set_max_members(origin, max: u32) { + #[pallet::weight(T::BlockWeights::get().max_block / 10)] + pub fn set_max_members(origin: OriginFor, max: u32) -> DispatchResult { ensure_root(origin)?; ensure!(max > 1, Error::::MaxMembers); - MaxMembers::::put(max); - Self::deposit_event(RawEvent::NewMaxMembers(max)); - } - - fn on_initialize(n: T::BlockNumber) -> Weight { - let mut members = vec![]; - - let mut weight = 0; - let weights = T::BlockWeights::get(); - - // Run a candidate/membership rotation - if (n % T::RotationPeriod::get()).is_zero() { - members = >::get(); - Self::rotate_period(&mut members); - - weight += weights.max_block / 20; - } - - // Run a challenge rotation - if (n % T::ChallengePeriod::get()).is_zero() { - // Only read members if not already read. - if members.is_empty() { - members = >::get(); - } - Self::rotate_challenge(&mut members); - - weight += weights.max_block / 20; - } - - weight + MaxMembers::::put(max); + Self::deposit_event(Event::::NewMaxMembers(max)); + Ok(()) } } } -decl_error! { - /// Errors for this module. - pub enum Error for Module, I: Instance> { - /// An incorrect position was provided. - BadPosition, - /// User is not a member. - NotMember, - /// User is already a member. - AlreadyMember, - /// User is suspended. - Suspended, - /// User is not suspended. - NotSuspended, - /// Nothing to payout. - NoPayout, - /// Society already founded. - AlreadyFounded, - /// Not enough in pot to accept candidate. - InsufficientPot, - /// Member is already vouching or banned from vouching again. - AlreadyVouching, - /// Member is not vouching. - NotVouching, - /// Cannot remove the head of the chain. - Head, - /// Cannot remove the founder. - Founder, - /// User has already made a bid. - AlreadyBid, - /// User is already a candidate. - AlreadyCandidate, - /// User is not a candidate. - NotCandidate, - /// Too many members in the society. - MaxMembers, - /// The caller is not the founder. - NotFounder, - /// The caller is not the head. - NotHead, - } -} - -decl_event! { - /// Events for this module. - pub enum Event where - AccountId = ::AccountId, - Balance = BalanceOf - { - /// The society is founded by the given identity. \[founder\] - Founded(AccountId), - /// A membership bid just happened. The given account is the candidate's ID and their offer - /// is the second. \[candidate_id, offer\] - Bid(AccountId, Balance), - /// A membership bid just happened by vouching. The given account is the candidate's ID and - /// their offer is the second. The vouching party is the third. \[candidate_id, offer, vouching\] - Vouch(AccountId, Balance, AccountId), - /// A \[candidate\] was dropped (due to an excess of bids in the system). - AutoUnbid(AccountId), - /// A \[candidate\] was dropped (by their request). - Unbid(AccountId), - /// A \[candidate\] was dropped (by request of who vouched for them). - Unvouch(AccountId), - /// A group of candidates have been inducted. The batch's primary is the first value, the - /// batch in full is the second. \[primary, candidates\] - Inducted(AccountId, Vec), - /// A suspended member has been judged. \[who, judged\] - SuspendedMemberJudgement(AccountId, bool), - /// A \[candidate\] has been suspended - CandidateSuspended(AccountId), - /// A \[member\] has been suspended - MemberSuspended(AccountId), - /// A \[member\] has been challenged - Challenged(AccountId), - /// A vote has been placed \[candidate, voter, vote\] - Vote(AccountId, AccountId, bool), - /// A vote has been placed for a defending member \[voter, vote\] - DefenderVote(AccountId, bool), - /// A new \[max\] member count has been set - NewMaxMembers(u32), - /// Society is unfounded. \[founder\] - Unfounded(AccountId), - /// Some funds were deposited into the society account. \[value\] - Deposit(Balance), - } -} - /// Simple ensure origin struct to filter for the founder account. pub struct EnsureFounder(sp_std::marker::PhantomData); impl EnsureOrigin for EnsureFounder { type Success = T::AccountId; fn try_origin(o: T::Origin) -> Result { o.into().and_then(|o| match (o, Founder::::get()) { - (system::RawOrigin::Signed(ref who), Some(ref f)) if who == f => Ok(who.clone()), + (frame_system::RawOrigin::Signed(ref who), Some(ref f)) if who == f => Ok(who.clone()), (r, _) => Err(T::Origin::from(r)), }) } @@ -1185,7 +1273,7 @@ impl EnsureOrigin for EnsureFounder { #[cfg(feature = "runtime-benchmarks")] fn successful_origin() -> T::Origin { let founder = Founder::::get().expect("society founder should exist"); - T::Origin::from(system::RawOrigin::Signed(founder)) + T::Origin::from(frame_system::RawOrigin::Signed(founder)) } } @@ -1203,7 +1291,7 @@ fn pick_usize<'a, R: RngCore>(rng: &mut R, max: usize) -> usize { (rng.next_u32() % (max as u32 + 1)) as usize } -impl, I: Instance> Module { +impl, I: 'static> Pallet { /// Puts a bid into storage ordered by smallest to largest value. /// Allows a maximum of 1000 bids in queue, removing largest value people first. fn put_bid( @@ -1251,7 +1339,7 @@ impl, I: Instance> Module { >::remove(&voucher); }, } - Self::deposit_event(RawEvent::AutoUnbid(popped)); + Self::deposit_event(Event::::AutoUnbid(popped)); } >::put(bids); @@ -1281,7 +1369,7 @@ impl, I: Instance> Module { /// Can fail when `MaxMember` limit is reached, but has no side-effects. fn add_member(who: &T::AccountId) -> DispatchResult { let mut members = >::get(); - ensure!(members.len() < MaxMembers::::get() as usize, Error::::MaxMembers); + ensure!(members.len() < MaxMembers::::get() as usize, Error::::MaxMembers); match members.binary_search(who) { // Add the new member Err(i) => { @@ -1338,8 +1426,8 @@ impl, I: Instance> Module { // out of society. members.reserve(candidates.len()); - let maturity = - >::block_number() + Self::lock_duration(members.len() as u32); + let maturity = >::block_number() + + Self::lock_duration(members.len() as u32); let mut rewardees = Vec::new(); let mut total_approvals = 0; @@ -1416,7 +1504,7 @@ impl, I: Instance> Module { } else { // Suspend Candidate >::insert(&candidate, (value, kind)); - Self::deposit_event(RawEvent::CandidateSuspended(candidate)); + Self::deposit_event(Event::::CandidateSuspended(candidate)); None } }) @@ -1485,7 +1573,7 @@ impl, I: Instance> Module { >::put(&primary); T::MembershipChanged::change_members_sorted(&accounts, &[], &members); - Self::deposit_event(RawEvent::Inducted(primary, accounts)); + Self::deposit_event(Event::::Inducted(primary, accounts)); } // Bump the pot by at most PeriodSpend, but less if there's not very much left in our @@ -1550,7 +1638,7 @@ impl, I: Instance> Module { if Self::remove_member(&who).is_ok() { >::insert(who, true); >::remove(who); - Self::deposit_event(RawEvent::MemberSuspended(who.clone())); + Self::deposit_event(Event::::MemberSuspended(who.clone())); } } @@ -1628,7 +1716,7 @@ impl, I: Instance> Module { let chosen = pick_item(&mut rng, &members[1..members.len() - 1]) .expect("exited if members empty; qed"); >::put(&chosen); - Self::deposit_event(RawEvent::Challenged(chosen.clone())); + Self::deposit_event(Event::::Challenged(chosen.clone())); } else { >::kill(); } @@ -1668,7 +1756,7 @@ impl, I: Instance> Module { members_len: usize, pot: BalanceOf, ) -> Vec>> { - let max_members = MaxMembers::::get() as usize; + let max_members = MaxMembers::::get() as usize; let mut max_selections: usize = (T::MaxCandidateIntake::get() as usize).min(max_members.saturating_sub(members_len)); @@ -1725,13 +1813,13 @@ impl, I: Instance> Module { } } -impl OnUnbalanced> for Module { - fn on_nonzero_unbalanced(amount: NegativeImbalanceOf) { +impl, I: 'static> OnUnbalanced> for Pallet { + fn on_nonzero_unbalanced(amount: NegativeImbalanceOf) { let numeric_amount = amount.peek(); // Must resolve into existing but better to be safe. let _ = T::Currency::resolve_creating(&Self::account_id(), amount); - Self::deposit_event(RawEvent::Deposit(numeric_amount)); + Self::deposit_event(Event::::Deposit(numeric_amount)); } } diff --git a/frame/society/src/mock.rs b/frame/society/src/mock.rs index 38c2586323135..9356c083f2331 100644 --- a/frame/society/src/mock.rs +++ b/frame/society/src/mock.rs @@ -20,10 +20,7 @@ use super::*; use crate as pallet_society; -use frame_support::{ - ord_parameter_types, parameter_types, - traits::{OnFinalize, OnInitialize}, -}; +use frame_support::{ord_parameter_types, parameter_types}; use frame_support_test::TestRandomness; use frame_system::EnsureSignedBy; use sp_core::H256; From 49e91e2f2390fda92e41726c2fb728cd778e7631 Mon Sep 17 00:00:00 2001 From: Georges Date: Wed, 22 Sep 2021 10:42:57 +0100 Subject: [PATCH 23/33] Adding storage_info to pallet grandpa (#9817) Converting some `Vec` to `WeakBoundedVec` Adding `MaxAuthorities` type to bound the `WeakBoundedVec` Co-authored-by: thiolliere --- bin/node-template/runtime/src/lib.rs | 1 + bin/node/runtime/src/lib.rs | 1 + frame/grandpa/src/lib.rs | 48 ++++++++++++++++++++++------ frame/grandpa/src/mock.rs | 2 ++ 4 files changed, 42 insertions(+), 10 deletions(-) diff --git a/bin/node-template/runtime/src/lib.rs b/bin/node-template/runtime/src/lib.rs index eecc93e166666..ca6e6b1822d45 100644 --- a/bin/node-template/runtime/src/lib.rs +++ b/bin/node-template/runtime/src/lib.rs @@ -222,6 +222,7 @@ impl pallet_grandpa::Config for Runtime { type HandleEquivocation = (); type WeightInfo = (); + type MaxAuthorities = MaxAuthorities; } parameter_types! { diff --git a/bin/node/runtime/src/lib.rs b/bin/node/runtime/src/lib.rs index 4cf313f8d26f0..75b80cccd8cb6 100644 --- a/bin/node/runtime/src/lib.rs +++ b/bin/node/runtime/src/lib.rs @@ -1035,6 +1035,7 @@ impl pallet_grandpa::Config for Runtime { >; type WeightInfo = (); + type MaxAuthorities = MaxAuthorities; } parameter_types! { diff --git a/frame/grandpa/src/lib.rs b/frame/grandpa/src/lib.rs index cd75deea770b4..687207151f4f4 100644 --- a/frame/grandpa/src/lib.rs +++ b/frame/grandpa/src/lib.rs @@ -33,7 +33,7 @@ pub use sp_finality_grandpa as fg_primitives; use sp_std::prelude::*; -use codec::{self as codec, Decode, Encode}; +use codec::{self as codec, Decode, Encode, MaxEncodedLen}; pub use fg_primitives::{AuthorityId, AuthorityList, AuthorityWeight, VersionedAuthorityList}; use fg_primitives::{ ConsensusLog, EquivocationProof, ScheduledChange, SetId, GRANDPA_AUTHORITIES_KEY, @@ -41,9 +41,11 @@ use fg_primitives::{ }; use frame_support::{ dispatch::DispatchResultWithPostInfo, + pallet_prelude::Get, storage, traits::{KeyOwnerProofSystem, OneSessionHandler, StorageVersion}, weights::{Pays, Weight}, + WeakBoundedVec, }; use sp_runtime::{generic::DigestItem, traits::Zero, DispatchResult, KeyTypeId}; use sp_session::{GetSessionNumber, GetValidatorCount}; @@ -81,6 +83,7 @@ pub mod pallet { #[pallet::pallet] #[pallet::generate_store(pub(super) trait Store)] #[pallet::storage_version(STORAGE_VERSION)] + #[pallet::generate_storage_info] pub struct Pallet(_); #[pallet::config] @@ -119,6 +122,10 @@ pub mod pallet { /// Weights for this pallet. type WeightInfo: WeightInfo; + + /// Max Authorities in use + #[pallet::constant] + type MaxAuthorities: Get; } #[pallet::hooks] @@ -133,13 +140,13 @@ pub mod pallet { median, ScheduledChange { delay: pending_change.delay, - next_authorities: pending_change.next_authorities.clone(), + next_authorities: pending_change.next_authorities.to_vec(), }, )) } else { Self::deposit_log(ConsensusLog::ScheduledChange(ScheduledChange { delay: pending_change.delay, - next_authorities: pending_change.next_authorities.clone(), + next_authorities: pending_change.next_authorities.to_vec(), })); } } @@ -147,7 +154,9 @@ pub mod pallet { // enact the change if we've reached the enacting block if block_number == pending_change.scheduled_at + pending_change.delay { Self::set_grandpa_authorities(&pending_change.next_authorities); - Self::deposit_event(Event::NewAuthorities(pending_change.next_authorities)); + Self::deposit_event(Event::NewAuthorities( + pending_change.next_authorities.to_vec(), + )); >::kill(); } } @@ -291,7 +300,8 @@ pub mod pallet { /// Pending change: (signaled at, scheduled change). #[pallet::storage] #[pallet::getter(fn pending_change)] - pub(super) type PendingChange = StorageValue<_, StoredPendingChange>; + pub(super) type PendingChange = + StorageValue<_, StoredPendingChange>; /// next block number where we can force a change. #[pallet::storage] @@ -355,15 +365,25 @@ pub trait WeightInfo { fn note_stalled() -> Weight; } +/// Bounded version of `AuthorityList`, `Limit` being the bound +pub type BoundedAuthorityList = WeakBoundedVec<(AuthorityId, AuthorityWeight), Limit>; + /// A stored pending change. -#[derive(Encode, Decode, TypeInfo)] -pub struct StoredPendingChange { +/// `Limit` is the bound for `next_authorities` +#[derive(Encode, Decode, TypeInfo, MaxEncodedLen)] +#[codec(mel_bound(Limit: Get))] +#[scale_info(skip_type_params(Limit))] +pub struct StoredPendingChange +where + Limit: Get, + N: MaxEncodedLen, +{ /// The block number this was scheduled at. pub scheduled_at: N, /// The delay in blocks until it will be applied. pub delay: N, - /// The next authority set. - pub next_authorities: AuthorityList, + /// The next authority set, weakly bounded in size by `Limit`. + pub next_authorities: BoundedAuthorityList, /// If defined it means the change was forced and the given block number /// indicates the median last finalized block when the change was signaled. pub forced: Option, @@ -372,7 +392,7 @@ pub struct StoredPendingChange { /// Current state of the GRANDPA authority set. State transitions must happen in /// the same order of states defined below, e.g. `Paused` implies a prior /// `PendingPause`. -#[derive(Decode, Encode, TypeInfo)] +#[derive(Decode, Encode, TypeInfo, MaxEncodedLen)] #[cfg_attr(test, derive(Debug, PartialEq))] pub enum StoredState { /// The current authority set is live, and GRANDPA is enabled. @@ -465,6 +485,14 @@ impl Pallet { >::put(scheduled_at + in_blocks * 2u32.into()); } + let next_authorities = WeakBoundedVec::<_, T::MaxAuthorities>::force_from( + next_authorities, + Some( + "Warning: The number of authorities given is too big. \ + A runtime configuration adjustment may be needed.", + ), + ); + >::put(StoredPendingChange { delay: in_blocks, scheduled_at, diff --git a/frame/grandpa/src/mock.rs b/frame/grandpa/src/mock.rs index 2f1b2630b2241..4e5e44ce36e7a 100644 --- a/frame/grandpa/src/mock.rs +++ b/frame/grandpa/src/mock.rs @@ -230,6 +230,7 @@ impl pallet_offences::Config for Test { parameter_types! { pub const ReportLongevity: u64 = BondingDuration::get() as u64 * SessionsPerEra::get() as u64 * Period::get(); + pub const MaxAuthorities: u32 = 100; } impl Config for Test { @@ -250,6 +251,7 @@ impl Config for Test { super::EquivocationHandler; type WeightInfo = (); + type MaxAuthorities = MaxAuthorities; } pub fn grandpa_log(log: ConsensusLog) -> DigestItem { From 136120674703bf5541ee2368ebb330d803b61cce Mon Sep 17 00:00:00 2001 From: Kian Paimani <5588131+kianenigma@users.noreply.github.com> Date: Wed, 22 Sep 2021 15:38:21 +0100 Subject: [PATCH 24/33] Use the automated call decode weight for `submit_unsigned` benchmark (#9766) * Use the automated call decode weight for benchmark * cargo run --quiet --release --features=runtime-benchmarks --manifest-path=bin/node/cli/Cargo.toml -- benchmark --chain=dev --steps=50 --repeat=20 --pallet=pallet_election_provider_multi_phase --extrinsic=* --execution=wasm --wasm-execution=compiled --heap-pages=4096 --output=./frame/election-provider-multi-phase/src/weights.rs --template=./.maintain/frame-weight-template.hbs * cargo run --quiet --release --features=runtime-benchmarks --manifest-path=bin/node/cli/Cargo.toml -- benchmark --chain=dev --steps=50 --repeat=20 --pallet=pallet_election_provider_multi_phase --extrinsic=* --execution=wasm --wasm-execution=compiled --heap-pages=4096 --output=./frame/election-provider-multi-phase/src/weights.rs --template=./.maintain/frame-weight-template.hbs * cargo run --quiet --release --features=runtime-benchmarks --manifest-path=bin/node/cli/Cargo.toml -- benchmark --chain=dev --steps=50 --repeat=20 --pallet=pallet_election_provider_multi_phase --extrinsic=* --execution=wasm --wasm-execution=compiled --heap-pages=4096 --output=./frame/election-provider-multi-phase/src/weights.rs --template=./.maintain/frame-weight-template.hbs * cargo run --quiet --release --features=runtime-benchmarks --manifest-path=bin/node/cli/Cargo.toml -- benchmark --chain=dev --steps=50 --repeat=20 --pallet=pallet_election_provider_multi_phase --extrinsic=* --execution=wasm --wasm-execution=compiled --heap-pages=4096 --output=./frame/election-provider-multi-phase/src/weights.rs --template=./.maintain/frame-weight-template.hbs * reduce the boundaries * Fix * Fox * cargo run --quiet --release --features=runtime-benchmarks --manifest-path=bin/node/cli/Cargo.toml -- benchmark --chain=dev --steps=50 --repeat=20 --pallet=pallet_election_provider_multi_phase --extrinsic=* --execution=wasm --wasm-execution=compiled --heap-pages=4096 --output=./frame/election-provider-multi-phase/src/weights.rs --template=./.maintain/frame-weight-template.hbs * reduce the numbers even less * cargo run --quiet --release --features=runtime-benchmarks --manifest-path=bin/node/cli/Cargo.toml -- benchmark --chain=dev --steps=50 --repeat=20 --pallet=pallet_election_provider_multi_phase --extrinsic=* --execution=wasm --wasm-execution=compiled --heap-pages=4096 --output=./frame/election-provider-multi-phase/src/weights.rs --template=./.maintain/frame-weight-template.hbs * Fix a few things * cargo run --quiet --release --features=runtime-benchmarks --manifest-path=bin/node/cli/Cargo.toml -- benchmark --chain=dev --steps=50 --repeat=20 --pallet=pallet_election_provider_multi_phase --extrinsic=* --execution=wasm --wasm-execution=compiled --heap-pages=4096 --output=./frame/election-provider-multi-phase/src/weights.rs --template=./.maintain/frame-weight-template.hbs * cargo run --quiet --release --features=runtime-benchmarks --manifest-path=bin/node/cli/Cargo.toml -- benchmark --chain=dev --steps=50 --repeat=20 --pallet=pallet_election_provider_multi_phase --extrinsic=* --execution=wasm --wasm-execution=compiled --heap-pages=4096 --output=./frame/election-provider-multi-phase/src/weights.rs --template=./.maintain/frame-weight-template.hbs * update comment Co-authored-by: Parity Bot --- bin/node/runtime/src/lib.rs | 19 +-- .../src/benchmarking.rs | 33 +---- .../election-provider-multi-phase/src/lib.rs | 4 +- .../election-provider-multi-phase/src/mock.rs | 4 +- .../src/unsigned.rs | 2 +- .../src/weights.rs | 132 ++++++++++-------- 6 files changed, 94 insertions(+), 100 deletions(-) diff --git a/bin/node/runtime/src/lib.rs b/bin/node/runtime/src/lib.rs index 75b80cccd8cb6..af465fc0ffc1d 100644 --- a/bin/node/runtime/src/lib.rs +++ b/bin/node/runtime/src/lib.rs @@ -576,17 +576,18 @@ sp_npos_elections::generate_solution_type!( pub const MAX_NOMINATIONS: u32 = ::LIMIT as u32; -/// The numbers configured here should always be more than the the maximum limits of staking pallet -/// to ensure election snapshot will not run out of memory. +/// The numbers configured here could always be more than the the maximum limits of staking pallet +/// to ensure election snapshot will not run out of memory. For now, we set them to smaller values +/// since the staking is bounded and the weight pipeline takes hours for this single pallet. pub struct BenchmarkConfig; impl pallet_election_provider_multi_phase::BenchmarkingConfig for BenchmarkConfig { - const VOTERS: [u32; 2] = [5_000, 10_000]; - const TARGETS: [u32; 2] = [1_000, 2_000]; - const ACTIVE_VOTERS: [u32; 2] = [1000, 4_000]; - const DESIRED_TARGETS: [u32; 2] = [400, 800]; - const SNAPSHOT_MAXIMUM_VOTERS: u32 = 25_000; - const MINER_MAXIMUM_VOTERS: u32 = 15_000; - const MAXIMUM_TARGETS: u32 = 2000; + const VOTERS: [u32; 2] = [1000, 2000]; + const TARGETS: [u32; 2] = [500, 1000]; + const ACTIVE_VOTERS: [u32; 2] = [500, 800]; + const DESIRED_TARGETS: [u32; 2] = [200, 400]; + const SNAPSHOT_MAXIMUM_VOTERS: u32 = 1000; + const MINER_MAXIMUM_VOTERS: u32 = 1000; + const MAXIMUM_TARGETS: u32 = 300; } /// Maximum number of iterations for balancing that will be executed in the embedded OCW diff --git a/frame/election-provider-multi-phase/src/benchmarking.rs b/frame/election-provider-multi-phase/src/benchmarking.rs index fb5adda52e166..a5bb0d6351c15 100644 --- a/frame/election-provider-multi-phase/src/benchmarking.rs +++ b/frame/election-provider-multi-phase/src/benchmarking.rs @@ -243,10 +243,10 @@ frame_benchmarking::benchmarks! { } create_snapshot_internal { - // number of votes in snapshot. Fixed to maximum. - let v = T::BenchmarkingConfig::SNAPSHOT_MAXIMUM_VOTERS; - // number of targets in snapshot. Fixed to maximum. - let t = T::BenchmarkingConfig::MAXIMUM_TARGETS; + // number of votes in snapshot. + let v in (T::BenchmarkingConfig::VOTERS[0]) .. T::BenchmarkingConfig::VOTERS[1]; + // number of targets in snapshot. + let t in (T::BenchmarkingConfig::TARGETS[0]) .. T::BenchmarkingConfig::TARGETS[1]; // we don't directly need the data-provider to be populated, but it is just easy to use it. set_up_data_provider::(v, t); @@ -351,24 +351,8 @@ frame_benchmarking::benchmarks! { assert!(>::queued_solution().is_none()); >::put(Phase::Unsigned((true, 1u32.into()))); - // encode the most significant storage item that needs to be decoded in the dispatch. - let encoded_snapshot = >::snapshot().ok_or("missing snapshot")?.encode(); - let encoded_call = Call::::submit_unsigned { - raw_solution: Box::new(raw_solution.clone()), - witness - }.encode(); - }: { - assert_ok!( - >::submit_unsigned( - RawOrigin::None.into(), - Box::new(raw_solution), - witness, - ) - ); - let _decoded_snap = as Decode>::decode(&mut &*encoded_snapshot) - .expect("decoding should not fail; qed."); - let _decoded_call = as Decode>::decode(&mut &*encoded_call).expect("decoding should not fail; qed."); - } verify { + }: _(RawOrigin::None, Box::new(raw_solution), witness) + verify { assert!(>::queued_solution().is_some()); } @@ -389,13 +373,8 @@ frame_benchmarking::benchmarks! { assert_eq!(raw_solution.solution.voter_count() as u32, a); assert_eq!(raw_solution.solution.unique_targets().len() as u32, d); - - // encode the most significant storage item that needs to be decoded in the dispatch. - let encoded_snapshot = >::snapshot().ok_or("snapshot missing")?.encode(); }: { assert_ok!(>::feasibility_check(raw_solution, ElectionCompute::Unsigned)); - let _decoded_snap = as Decode>::decode(&mut &*encoded_snapshot) - .expect("decoding should not fail; qed."); } // NOTE: this weight is not used anywhere, but the fact that it should succeed when execution in diff --git a/frame/election-provider-multi-phase/src/lib.rs b/frame/election-provider-multi-phase/src/lib.rs index e83c49433e2bb..6b0329afc0d77 100644 --- a/frame/election-provider-multi-phase/src/lib.rs +++ b/frame/election-provider-multi-phase/src/lib.rs @@ -1317,8 +1317,10 @@ impl Pallet { let (targets, voters, desired_targets) = Self::create_snapshot_external()?; // ..therefore we only measure the weight of this and add it. + let internal_weight = + T::WeightInfo::create_snapshot_internal(voters.len() as u32, targets.len() as u32); Self::create_snapshot_internal(targets, voters, desired_targets); - Self::register_weight(T::WeightInfo::create_snapshot_internal()); + Self::register_weight(internal_weight); Ok(()) } diff --git a/frame/election-provider-multi-phase/src/mock.rs b/frame/election-provider-multi-phase/src/mock.rs index 0d563955595a8..1a65316be1f10 100644 --- a/frame/election-provider-multi-phase/src/mock.rs +++ b/frame/election-provider-multi-phase/src/mock.rs @@ -304,11 +304,11 @@ impl multi_phase::weights::WeightInfo for DualMockWeightInfo { <() as multi_phase::weights::WeightInfo>::on_initialize_nothing() } } - fn create_snapshot_internal() -> Weight { + fn create_snapshot_internal(v: u32, t: u32) -> Weight { if MockWeightInfo::get() { Zero::zero() } else { - <() as multi_phase::weights::WeightInfo>::create_snapshot_internal() + <() as multi_phase::weights::WeightInfo>::create_snapshot_internal(v, t) } } fn on_initialize_open_signed() -> Weight { diff --git a/frame/election-provider-multi-phase/src/unsigned.rs b/frame/election-provider-multi-phase/src/unsigned.rs index af0b79177d86c..31ad502ac076e 100644 --- a/frame/election-provider-multi-phase/src/unsigned.rs +++ b/frame/election-provider-multi-phase/src/unsigned.rs @@ -651,7 +651,7 @@ mod max_weight { fn elect_queued(a: u32, d: u32) -> Weight { unreachable!() } - fn create_snapshot_internal() -> Weight { + fn create_snapshot_internal(v: u32, t: u32) -> Weight { unreachable!() } fn on_initialize_nothing() -> Weight { diff --git a/frame/election-provider-multi-phase/src/weights.rs b/frame/election-provider-multi-phase/src/weights.rs index 262838bcb9e70..4d49f60fabfc3 100644 --- a/frame/election-provider-multi-phase/src/weights.rs +++ b/frame/election-provider-multi-phase/src/weights.rs @@ -18,7 +18,7 @@ //! Autogenerated weights for pallet_election_provider_multi_phase //! //! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev -//! DATE: 2021-08-18, STEPS: `50`, REPEAT: 20, LOW RANGE: `[]`, HIGH RANGE: `[]` +//! DATE: 2021-09-22, STEPS: `50`, REPEAT: 20, LOW RANGE: `[]`, HIGH RANGE: `[]` //! EXECUTION: Some(Wasm), WASM-EXECUTION: Compiled, CHAIN: Some("dev"), DB CACHE: 128 // Executed Command: @@ -50,7 +50,7 @@ pub trait WeightInfo { fn on_initialize_open_unsigned() -> Weight; fn finalize_signed_phase_accept_solution() -> Weight; fn finalize_signed_phase_reject_solution() -> Weight; - fn create_snapshot_internal() -> Weight; + fn create_snapshot_internal(v: u32, t: u32, ) -> Weight; fn elect_queued(a: u32, d: u32, ) -> Weight; fn submit(c: u32, ) -> Weight; fn submit_unsigned(v: u32, t: u32, a: u32, d: u32, ) -> Weight; @@ -69,41 +69,45 @@ impl WeightInfo for SubstrateWeight { // Storage: Staking ForceEra (r:1 w:0) // Storage: ElectionProviderMultiPhase CurrentPhase (r:1 w:0) fn on_initialize_nothing() -> Weight { - (23_878_000 as Weight) + (22_784_000 as Weight) .saturating_add(T::DbWeight::get().reads(8 as Weight)) } // Storage: ElectionProviderMultiPhase Round (r:1 w:0) // Storage: ElectionProviderMultiPhase CurrentPhase (r:0 w:1) fn on_initialize_open_signed() -> Weight { - (34_547_000 as Weight) + (32_763_000 as Weight) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } // Storage: ElectionProviderMultiPhase Round (r:1 w:0) // Storage: ElectionProviderMultiPhase CurrentPhase (r:0 w:1) fn on_initialize_open_unsigned() -> Weight { - (33_568_000 as Weight) + (29_117_000 as Weight) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } // Storage: System Account (r:1 w:1) // Storage: ElectionProviderMultiPhase QueuedSolution (r:0 w:1) fn finalize_signed_phase_accept_solution() -> Weight { - (50_596_000 as Weight) + (48_996_000 as Weight) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } // Storage: System Account (r:1 w:1) fn finalize_signed_phase_reject_solution() -> Weight { - (33_389_000 as Weight) + (32_508_000 as Weight) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } // Storage: ElectionProviderMultiPhase SnapshotMetadata (r:0 w:1) // Storage: ElectionProviderMultiPhase DesiredTargets (r:0 w:1) // Storage: ElectionProviderMultiPhase Snapshot (r:0 w:1) - fn create_snapshot_internal() -> Weight { - (8_835_233_000 as Weight) + fn create_snapshot_internal(v: u32, t: u32, ) -> Weight { + (96_001_000 as Weight) + // Standard Error: 1_000 + .saturating_add((307_000 as Weight).saturating_mul(v as Weight)) + // Standard Error: 2_000 + .saturating_add((133_000 as Weight).saturating_mul(t as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } // Storage: ElectionProviderMultiPhase SignedSubmissionIndices (r:1 w:1) @@ -116,11 +120,11 @@ impl WeightInfo for SubstrateWeight { // Storage: ElectionProviderMultiPhase Snapshot (r:0 w:1) // Storage: ElectionProviderMultiPhase CurrentPhase (r:0 w:1) fn elect_queued(a: u32, d: u32, ) -> Weight { - (82_395_000 as Weight) - // Standard Error: 1_000 - .saturating_add((1_769_000 as Weight).saturating_mul(a as Weight)) - // Standard Error: 13_000 - .saturating_add((320_000 as Weight).saturating_mul(d as Weight)) + (100_505_000 as Weight) + // Standard Error: 6_000 + .saturating_add((1_665_000 as Weight).saturating_mul(a as Weight)) + // Standard Error: 10_000 + .saturating_add((443_000 as Weight).saturating_mul(d as Weight)) .saturating_add(T::DbWeight::get().reads(6 as Weight)) .saturating_add(T::DbWeight::get().writes(8 as Weight)) } @@ -131,9 +135,9 @@ impl WeightInfo for SubstrateWeight { // Storage: ElectionProviderMultiPhase SignedSubmissionNextIndex (r:1 w:1) // Storage: ElectionProviderMultiPhase SignedSubmissionsMap (r:0 w:1) fn submit(c: u32, ) -> Weight { - (77_368_000 as Weight) - // Standard Error: 9_000 - .saturating_add((369_000 as Weight).saturating_mul(c as Weight)) + (74_088_000 as Weight) + // Standard Error: 59_000 + .saturating_add((187_000 as Weight).saturating_mul(c as Weight)) .saturating_add(T::DbWeight::get().reads(5 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } @@ -146,14 +150,14 @@ impl WeightInfo for SubstrateWeight { // Storage: ElectionProviderMultiPhase Snapshot (r:1 w:0) fn submit_unsigned(v: u32, t: u32, a: u32, d: u32, ) -> Weight { (0 as Weight) - // Standard Error: 4_000 - .saturating_add((3_553_000 as Weight).saturating_mul(v as Weight)) - // Standard Error: 23_000 - .saturating_add((35_000 as Weight).saturating_mul(t as Weight)) - // Standard Error: 7_000 - .saturating_add((10_600_000 as Weight).saturating_mul(a as Weight)) - // Standard Error: 59_000 - .saturating_add((6_128_000 as Weight).saturating_mul(d as Weight)) + // Standard Error: 5_000 + .saturating_add((1_970_000 as Weight).saturating_mul(v as Weight)) + // Standard Error: 10_000 + .saturating_add((173_000 as Weight).saturating_mul(t as Weight)) + // Standard Error: 18_000 + .saturating_add((9_783_000 as Weight).saturating_mul(a as Weight)) + // Standard Error: 27_000 + .saturating_add((2_224_000 as Weight).saturating_mul(d as Weight)) .saturating_add(T::DbWeight::get().reads(7 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } @@ -161,14 +165,16 @@ impl WeightInfo for SubstrateWeight { // Storage: ElectionProviderMultiPhase DesiredTargets (r:1 w:0) // Storage: ElectionProviderMultiPhase MinimumUntrustedScore (r:1 w:0) // Storage: ElectionProviderMultiPhase Snapshot (r:1 w:0) - fn feasibility_check(v: u32, _t: u32, a: u32, d: u32, ) -> Weight { + fn feasibility_check(v: u32, t: u32, a: u32, d: u32, ) -> Weight { (0 as Weight) // Standard Error: 3_000 - .saturating_add((3_478_000 as Weight).saturating_mul(v as Weight)) - // Standard Error: 6_000 - .saturating_add((8_930_000 as Weight).saturating_mul(a as Weight)) - // Standard Error: 47_000 - .saturating_add((5_199_000 as Weight).saturating_mul(d as Weight)) + .saturating_add((1_910_000 as Weight).saturating_mul(v as Weight)) + // Standard Error: 7_000 + .saturating_add((111_000 as Weight).saturating_mul(t as Weight)) + // Standard Error: 13_000 + .saturating_add((7_741_000 as Weight).saturating_mul(a as Weight)) + // Standard Error: 19_000 + .saturating_add((1_844_000 as Weight).saturating_mul(d as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) } } @@ -184,41 +190,45 @@ impl WeightInfo for () { // Storage: Staking ForceEra (r:1 w:0) // Storage: ElectionProviderMultiPhase CurrentPhase (r:1 w:0) fn on_initialize_nothing() -> Weight { - (23_878_000 as Weight) + (22_784_000 as Weight) .saturating_add(RocksDbWeight::get().reads(8 as Weight)) } // Storage: ElectionProviderMultiPhase Round (r:1 w:0) // Storage: ElectionProviderMultiPhase CurrentPhase (r:0 w:1) fn on_initialize_open_signed() -> Weight { - (34_547_000 as Weight) + (32_763_000 as Weight) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } // Storage: ElectionProviderMultiPhase Round (r:1 w:0) // Storage: ElectionProviderMultiPhase CurrentPhase (r:0 w:1) fn on_initialize_open_unsigned() -> Weight { - (33_568_000 as Weight) + (29_117_000 as Weight) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } // Storage: System Account (r:1 w:1) // Storage: ElectionProviderMultiPhase QueuedSolution (r:0 w:1) fn finalize_signed_phase_accept_solution() -> Weight { - (50_596_000 as Weight) + (48_996_000 as Weight) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } // Storage: System Account (r:1 w:1) fn finalize_signed_phase_reject_solution() -> Weight { - (33_389_000 as Weight) + (32_508_000 as Weight) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } // Storage: ElectionProviderMultiPhase SnapshotMetadata (r:0 w:1) // Storage: ElectionProviderMultiPhase DesiredTargets (r:0 w:1) // Storage: ElectionProviderMultiPhase Snapshot (r:0 w:1) - fn create_snapshot_internal() -> Weight { - (8_835_233_000 as Weight) + fn create_snapshot_internal(v: u32, t: u32, ) -> Weight { + (96_001_000 as Weight) + // Standard Error: 1_000 + .saturating_add((307_000 as Weight).saturating_mul(v as Weight)) + // Standard Error: 2_000 + .saturating_add((133_000 as Weight).saturating_mul(t as Weight)) .saturating_add(RocksDbWeight::get().writes(3 as Weight)) } // Storage: ElectionProviderMultiPhase SignedSubmissionIndices (r:1 w:1) @@ -231,11 +241,11 @@ impl WeightInfo for () { // Storage: ElectionProviderMultiPhase Snapshot (r:0 w:1) // Storage: ElectionProviderMultiPhase CurrentPhase (r:0 w:1) fn elect_queued(a: u32, d: u32, ) -> Weight { - (82_395_000 as Weight) - // Standard Error: 1_000 - .saturating_add((1_769_000 as Weight).saturating_mul(a as Weight)) - // Standard Error: 13_000 - .saturating_add((320_000 as Weight).saturating_mul(d as Weight)) + (100_505_000 as Weight) + // Standard Error: 6_000 + .saturating_add((1_665_000 as Weight).saturating_mul(a as Weight)) + // Standard Error: 10_000 + .saturating_add((443_000 as Weight).saturating_mul(d as Weight)) .saturating_add(RocksDbWeight::get().reads(6 as Weight)) .saturating_add(RocksDbWeight::get().writes(8 as Weight)) } @@ -246,9 +256,9 @@ impl WeightInfo for () { // Storage: ElectionProviderMultiPhase SignedSubmissionNextIndex (r:1 w:1) // Storage: ElectionProviderMultiPhase SignedSubmissionsMap (r:0 w:1) fn submit(c: u32, ) -> Weight { - (77_368_000 as Weight) - // Standard Error: 9_000 - .saturating_add((369_000 as Weight).saturating_mul(c as Weight)) + (74_088_000 as Weight) + // Standard Error: 59_000 + .saturating_add((187_000 as Weight).saturating_mul(c as Weight)) .saturating_add(RocksDbWeight::get().reads(5 as Weight)) .saturating_add(RocksDbWeight::get().writes(3 as Weight)) } @@ -261,14 +271,14 @@ impl WeightInfo for () { // Storage: ElectionProviderMultiPhase Snapshot (r:1 w:0) fn submit_unsigned(v: u32, t: u32, a: u32, d: u32, ) -> Weight { (0 as Weight) - // Standard Error: 4_000 - .saturating_add((3_553_000 as Weight).saturating_mul(v as Weight)) - // Standard Error: 23_000 - .saturating_add((35_000 as Weight).saturating_mul(t as Weight)) - // Standard Error: 7_000 - .saturating_add((10_600_000 as Weight).saturating_mul(a as Weight)) - // Standard Error: 59_000 - .saturating_add((6_128_000 as Weight).saturating_mul(d as Weight)) + // Standard Error: 5_000 + .saturating_add((1_970_000 as Weight).saturating_mul(v as Weight)) + // Standard Error: 10_000 + .saturating_add((173_000 as Weight).saturating_mul(t as Weight)) + // Standard Error: 18_000 + .saturating_add((9_783_000 as Weight).saturating_mul(a as Weight)) + // Standard Error: 27_000 + .saturating_add((2_224_000 as Weight).saturating_mul(d as Weight)) .saturating_add(RocksDbWeight::get().reads(7 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } @@ -276,14 +286,16 @@ impl WeightInfo for () { // Storage: ElectionProviderMultiPhase DesiredTargets (r:1 w:0) // Storage: ElectionProviderMultiPhase MinimumUntrustedScore (r:1 w:0) // Storage: ElectionProviderMultiPhase Snapshot (r:1 w:0) - fn feasibility_check(v: u32, _t: u32, a: u32, d: u32, ) -> Weight { + fn feasibility_check(v: u32, t: u32, a: u32, d: u32, ) -> Weight { (0 as Weight) // Standard Error: 3_000 - .saturating_add((3_478_000 as Weight).saturating_mul(v as Weight)) - // Standard Error: 6_000 - .saturating_add((8_930_000 as Weight).saturating_mul(a as Weight)) - // Standard Error: 47_000 - .saturating_add((5_199_000 as Weight).saturating_mul(d as Weight)) + .saturating_add((1_910_000 as Weight).saturating_mul(v as Weight)) + // Standard Error: 7_000 + .saturating_add((111_000 as Weight).saturating_mul(t as Weight)) + // Standard Error: 13_000 + .saturating_add((7_741_000 as Weight).saturating_mul(a as Weight)) + // Standard Error: 19_000 + .saturating_add((1_844_000 as Weight).saturating_mul(d as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) } } From eded99055e32c1b096993b94b38d3f6e04237c18 Mon Sep 17 00:00:00 2001 From: Georges Date: Wed, 22 Sep 2021 23:49:36 +0100 Subject: [PATCH 25/33] Generate storage info for pallet babe (#9760) * Adding MaxEncodedLen for: * NextConfigDescriptor * AllowedSlots * BabeEpochConfiguration In prepation for adding storage information on pallet babe * Adding stotage_info to pallet babe Refactored UNDER_CONSTRUCTION_SEGMENT_LENGTH to become a runtime parameter MaxSegmentLength * Reinstate Slice as opposed to Vec * Refactoring code to make it neater * Replace `MaxSegmentLength` by `UNDER_CONSTRUCTION_SEGMENT_LENGTH` Stop exposing the bounds to the runtime * Removing extra line Co-authored-by: thiolliere --- bin/node/runtime/src/lib.rs | 3 +- frame/babe/src/lib.rs | 86 ++++++++++++++++++------ frame/babe/src/mock.rs | 3 + frame/babe/src/tests.rs | 2 +- primitives/consensus/babe/src/digests.rs | 6 +- primitives/consensus/babe/src/lib.rs | 6 +- test-utils/runtime/src/lib.rs | 4 +- 7 files changed, 80 insertions(+), 30 deletions(-) diff --git a/bin/node/runtime/src/lib.rs b/bin/node/runtime/src/lib.rs index af465fc0ffc1d..587a54ebd0d9b 100644 --- a/bin/node/runtime/src/lib.rs +++ b/bin/node/runtime/src/lib.rs @@ -376,6 +376,7 @@ impl pallet_babe::Config for Runtime { pallet_babe::EquivocationHandler; type WeightInfo = (); + type MaxAuthorities = MaxAuthorities; } parameter_types! { @@ -1441,7 +1442,7 @@ impl_runtime_apis! { slot_duration: Babe::slot_duration(), epoch_length: EpochDuration::get(), c: BABE_GENESIS_EPOCH_CONFIG.c, - genesis_authorities: Babe::authorities(), + genesis_authorities: Babe::authorities().to_vec(), randomness: Babe::randomness(), allowed_slots: BABE_GENESIS_EPOCH_CONFIG.allowed_slots, } diff --git a/frame/babe/src/lib.rs b/frame/babe/src/lib.rs index b39074bb3f057..4ccfdf6c13fe0 100644 --- a/frame/babe/src/lib.rs +++ b/frame/babe/src/lib.rs @@ -25,11 +25,13 @@ use codec::{Decode, Encode}; use frame_support::{ dispatch::DispatchResultWithPostInfo, traits::{ - DisabledValidators, FindAuthor, Get, KeyOwnerProofSystem, OnTimestampSet, OneSessionHandler, + ConstU32, DisabledValidators, FindAuthor, Get, KeyOwnerProofSystem, OnTimestampSet, + OneSessionHandler, }, weights::{Pays, Weight}, + BoundedVec, WeakBoundedVec, }; -use sp_application_crypto::Public; +use sp_application_crypto::{Public, TryFrom}; use sp_runtime::{ generic::DigestItem, traits::{IsMember, One, SaturatedConversion, Saturating, Zero}, @@ -100,7 +102,7 @@ impl EpochChangeTrigger for SameAuthoritiesForever { } } -const UNDER_CONSTRUCTION_SEGMENT_LENGTH: usize = 256; +const UNDER_CONSTRUCTION_SEGMENT_LENGTH: u32 = 256; type MaybeRandomness = Option; @@ -113,6 +115,7 @@ pub mod pallet { /// The BABE Pallet #[pallet::pallet] #[pallet::generate_store(pub(super) trait Store)] + #[pallet::generate_storage_info] pub struct Pallet(_); #[pallet::config] @@ -169,6 +172,10 @@ pub mod pallet { type HandleEquivocation: HandleEquivocation; type WeightInfo: WeightInfo; + + /// Max number of authorities allowed + #[pallet::constant] + type MaxAuthorities: Get; } #[pallet::error] @@ -189,7 +196,11 @@ pub mod pallet { /// Current epoch authorities. #[pallet::storage] #[pallet::getter(fn authorities)] - pub type Authorities = StorageValue<_, Vec<(AuthorityId, BabeAuthorityWeight)>, ValueQuery>; + pub type Authorities = StorageValue< + _, + WeakBoundedVec<(AuthorityId, BabeAuthorityWeight), T::MaxAuthorities>, + ValueQuery, + >; /// The slot at which the first epoch actually started. This is 0 /// until the first block of the chain. @@ -229,8 +240,11 @@ pub mod pallet { /// Next epoch authorities. #[pallet::storage] - pub(super) type NextAuthorities = - StorageValue<_, Vec<(AuthorityId, BabeAuthorityWeight)>, ValueQuery>; + pub(super) type NextAuthorities = StorageValue< + _, + WeakBoundedVec<(AuthorityId, BabeAuthorityWeight), T::MaxAuthorities>, + ValueQuery, + >; /// Randomness under construction. /// @@ -246,8 +260,13 @@ pub mod pallet { /// TWOX-NOTE: `SegmentIndex` is an increasing integer, so this is okay. #[pallet::storage] - pub(super) type UnderConstruction = - StorageMap<_, Twox64Concat, u32, Vec, ValueQuery>; + pub(super) type UnderConstruction = StorageMap< + _, + Twox64Concat, + u32, + BoundedVec>, + ValueQuery, + >; /// Temporary value (cleared at block finalization) which is `Some` /// if per-block initialization has already been called for current block. @@ -503,8 +522,8 @@ impl Pallet { /// Typically, this is not handled directly by the user, but by higher-level validator-set /// manager logic like `pallet-session`. pub fn enact_epoch_change( - authorities: Vec<(AuthorityId, BabeAuthorityWeight)>, - next_authorities: Vec<(AuthorityId, BabeAuthorityWeight)>, + authorities: WeakBoundedVec<(AuthorityId, BabeAuthorityWeight), T::MaxAuthorities>, + next_authorities: WeakBoundedVec<(AuthorityId, BabeAuthorityWeight), T::MaxAuthorities>, ) { // PRECONDITION: caller has done initialization and is guaranteed // by the session module to be called before this. @@ -541,8 +560,10 @@ impl Pallet { // so that nodes can track changes. let next_randomness = NextRandomness::::get(); - let next_epoch = - NextEpochDescriptor { authorities: next_authorities, randomness: next_randomness }; + let next_epoch = NextEpochDescriptor { + authorities: next_authorities.to_vec(), + randomness: next_randomness, + }; Self::deposit_consensus(ConsensusLog::NextEpochData(next_epoch)); if let Some(next_config) = NextEpochConfig::::get() { @@ -571,7 +592,7 @@ impl Pallet { epoch_index: EpochIndex::::get(), start_slot: Self::current_epoch_start(), duration: T::EpochDuration::get(), - authorities: Self::authorities(), + authorities: Self::authorities().to_vec(), randomness: Self::randomness(), config: EpochConfig::::get() .expect("EpochConfig is initialized in genesis; we never `take` or `kill` it; qed"), @@ -590,7 +611,7 @@ impl Pallet { epoch_index: next_epoch_index, start_slot: Self::epoch_start(next_epoch_index), duration: T::EpochDuration::get(), - authorities: NextAuthorities::::get(), + authorities: NextAuthorities::::get().to_vec(), randomness: NextRandomness::::get(), config: NextEpochConfig::::get().unwrap_or_else(|| { EpochConfig::::get().expect( @@ -619,14 +640,18 @@ impl Pallet { fn deposit_randomness(randomness: &schnorrkel::Randomness) { let segment_idx = SegmentIndex::::get(); let mut segment = UnderConstruction::::get(&segment_idx); - if segment.len() < UNDER_CONSTRUCTION_SEGMENT_LENGTH { + if segment.try_push(*randomness).is_ok() { // push onto current segment: not full. - segment.push(*randomness); UnderConstruction::::insert(&segment_idx, &segment); } else { // move onto the next segment and update the index. let segment_idx = segment_idx + 1; - UnderConstruction::::insert(&segment_idx, &vec![randomness.clone()]); + let bounded_randomness = + BoundedVec::<_, ConstU32>::try_from(vec![ + randomness.clone(), + ]) + .expect("UNDER_CONSTRUCTION_SEGMENT_LENGTH >= 1"); + UnderConstruction::::insert(&segment_idx, bounded_randomness); SegmentIndex::::put(&segment_idx); } } @@ -667,7 +692,7 @@ impl Pallet { // we use the same values as genesis because we haven't collected any // randomness yet. let next = NextEpochDescriptor { - authorities: Self::authorities(), + authorities: Self::authorities().to_vec(), randomness: Self::randomness(), }; @@ -732,7 +757,7 @@ impl Pallet { let segment_idx: u32 = SegmentIndex::::mutate(|s| sp_std::mem::replace(s, 0)); // overestimate to the segment being full. - let rho_size = segment_idx.saturating_add(1) as usize * UNDER_CONSTRUCTION_SEGMENT_LENGTH; + let rho_size = (segment_idx.saturating_add(1) * UNDER_CONSTRUCTION_SEGMENT_LENGTH) as usize; let next_randomness = compute_randomness( this_randomness, @@ -747,8 +772,11 @@ impl Pallet { fn initialize_authorities(authorities: &[(AuthorityId, BabeAuthorityWeight)]) { if !authorities.is_empty() { assert!(Authorities::::get().is_empty(), "Authorities are already initialized!"); - Authorities::::put(authorities); - NextAuthorities::::put(authorities); + let bounded_authorities = + WeakBoundedVec::<_, T::MaxAuthorities>::try_from(authorities.to_vec()) + .expect("Initial number of authorities should be lower than T::MaxAuthorities"); + Authorities::::put(&bounded_authorities); + NextAuthorities::::put(&bounded_authorities); } } @@ -878,10 +906,24 @@ impl OneSessionHandler for Pallet { I: Iterator, { let authorities = validators.map(|(_account, k)| (k, 1)).collect::>(); + let bounded_authorities = WeakBoundedVec::<_, T::MaxAuthorities>::force_from( + authorities, + Some( + "Warning: The session has more validators than expected. \ + A runtime configuration adjustment may be needed.", + ), + ); let next_authorities = queued_validators.map(|(_account, k)| (k, 1)).collect::>(); + let next_bounded_authorities = WeakBoundedVec::<_, T::MaxAuthorities>::force_from( + next_authorities, + Some( + "Warning: The session has more queued validators than expected. \ + A runtime configuration adjustment may be needed.", + ), + ); - Self::enact_epoch_change(authorities, next_authorities) + Self::enact_epoch_change(bounded_authorities, next_bounded_authorities) } fn on_disabled(i: usize) { diff --git a/frame/babe/src/mock.rs b/frame/babe/src/mock.rs index 833a68fbddb6c..a05072bc3319e 100644 --- a/frame/babe/src/mock.rs +++ b/frame/babe/src/mock.rs @@ -230,6 +230,8 @@ parameter_types! { pub const ExpectedBlockTime: u64 = 1; pub const ReportLongevity: u64 = BondingDuration::get() as u64 * SessionsPerEra::get() as u64 * EpochDuration::get(); + pub const MaxAuthorities: u32 = 10; + pub const MaxSegmentLength: u32 = 256; } impl Config for Test { @@ -252,6 +254,7 @@ impl Config for Test { super::EquivocationHandler; type WeightInfo = (); + type MaxAuthorities = MaxAuthorities; } pub fn go_to_block(n: u64, s: u64) { diff --git a/frame/babe/src/tests.rs b/frame/babe/src/tests.rs index dc2f74c719519..34d861d5d97f7 100644 --- a/frame/babe/src/tests.rs +++ b/frame/babe/src/tests.rs @@ -92,7 +92,7 @@ fn first_block_epoch_zero_start() { let consensus_log = sp_consensus_babe::ConsensusLog::NextEpochData( sp_consensus_babe::digests::NextEpochDescriptor { - authorities: Babe::authorities(), + authorities: Babe::authorities().to_vec(), randomness: Babe::randomness(), }, ); diff --git a/primitives/consensus/babe/src/digests.rs b/primitives/consensus/babe/src/digests.rs index 470a028021ca1..1c908fe61fc0b 100644 --- a/primitives/consensus/babe/src/digests.rs +++ b/primitives/consensus/babe/src/digests.rs @@ -21,7 +21,7 @@ use super::{ AllowedSlots, AuthorityId, AuthorityIndex, AuthoritySignature, BabeAuthorityWeight, BabeEpochConfiguration, Slot, BABE_ENGINE_ID, }; -use codec::{Codec, Decode, Encode}; +use codec::{Codec, Decode, Encode, MaxEncodedLen}; use sp_runtime::{DigestItem, RuntimeDebug}; use sp_std::vec::Vec; @@ -134,7 +134,9 @@ pub struct NextEpochDescriptor { /// Information about the next epoch config, if changed. This is broadcast in the first /// block of the epoch, and applies using the same rules as `NextEpochDescriptor`. -#[derive(Decode, Encode, PartialEq, Eq, Clone, RuntimeDebug, scale_info::TypeInfo)] +#[derive( + Decode, Encode, PartialEq, Eq, Clone, RuntimeDebug, MaxEncodedLen, scale_info::TypeInfo, +)] pub enum NextConfigDescriptor { /// Version 1. #[codec(index = 1)] diff --git a/primitives/consensus/babe/src/lib.rs b/primitives/consensus/babe/src/lib.rs index 4417670f4144b..560866cfb2ab5 100644 --- a/primitives/consensus/babe/src/lib.rs +++ b/primitives/consensus/babe/src/lib.rs @@ -28,7 +28,7 @@ pub use sp_consensus_vrf::schnorrkel::{ Randomness, RANDOMNESS_LENGTH, VRF_OUTPUT_LENGTH, VRF_PROOF_LENGTH, }; -use codec::{Decode, Encode}; +use codec::{Decode, Encode, MaxEncodedLen}; use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; @@ -214,7 +214,7 @@ pub struct BabeGenesisConfiguration { } /// Types of allowed slots. -#[derive(Clone, Copy, PartialEq, Eq, Encode, Decode, RuntimeDebug, TypeInfo)] +#[derive(Clone, Copy, PartialEq, Eq, Encode, Decode, RuntimeDebug, MaxEncodedLen, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub enum AllowedSlots { /// Only allow primary slots. @@ -247,7 +247,7 @@ impl sp_consensus::SlotData for BabeGenesisConfiguration { } /// Configuration data used by the BABE consensus engine. -#[derive(Clone, PartialEq, Eq, Encode, Decode, RuntimeDebug, TypeInfo)] +#[derive(Clone, PartialEq, Eq, Encode, Decode, RuntimeDebug, MaxEncodedLen, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct BabeEpochConfiguration { /// A constant value that is used in the threshold calculation formula. diff --git a/test-utils/runtime/src/lib.rs b/test-utils/runtime/src/lib.rs index 0d880d508ef38..479d69c437567 100644 --- a/test-utils/runtime/src/lib.rs +++ b/test-utils/runtime/src/lib.rs @@ -574,6 +574,7 @@ impl pallet_timestamp::Config for Runtime { parameter_types! { pub const EpochDuration: u64 = 6; pub const ExpectedBlockTime: u64 = 10_000; + pub const MaxAuthorities: u32 = 10; } impl pallet_babe::Config for Runtime { @@ -596,8 +597,9 @@ impl pallet_babe::Config for Runtime { )>>::IdentificationTuple; type HandleEquivocation = (); - type WeightInfo = (); + + type MaxAuthorities = MaxAuthorities; } /// Adds one to the given input and returns the final result. From 4f8e0cf68f34ec3eb8e4b483683368d553a01c6d Mon Sep 17 00:00:00 2001 From: Kian Paimani <5588131+kianenigma@users.noreply.github.com> Date: Thu, 23 Sep 2021 09:20:37 +0100 Subject: [PATCH 26/33] Add some docs about runtime migration best practices (#9837) * Add some docs about runtime migration best practices as well. * Update docs/CONTRIBUTING.adoc * Update docs/CONTRIBUTING.adoc Co-authored-by: Chevdor Co-authored-by: Chevdor --- docs/CONTRIBUTING.adoc | 5 ++- utils/frame/try-runtime/cli/src/lib.rs | 44 ++++++++++++++++++++++++-- 2 files changed, 43 insertions(+), 6 deletions(-) diff --git a/docs/CONTRIBUTING.adoc b/docs/CONTRIBUTING.adoc index b0eaec04455e4..0a9a7ebacff5b 100644 --- a/docs/CONTRIBUTING.adoc +++ b/docs/CONTRIBUTING.adoc @@ -42,7 +42,7 @@ A Pull Request (PR) needs to be reviewed and approved by project maintainers unl . PRs must be tagged with their release importance via the `C1-C9` labels. . PRs must be tagged with their audit requirements via the `D1-D9` labels. . PRs that must be backported to a stable branch must be tagged with https://github.com/paritytech/substrate/labels/E1-runtimemigration[`E0-patchthis`]. -. PRs that introduce runtime migrations must be tagged with https://github.com/paritytech/substrate/labels/E1-runtimemigration[`E1-runtimemigration`]. +. PRs that introduce runtime migrations must be tagged with https://github.com/paritytech/substrate/labels/E1-runtimemigration[`E1-runtimemigration`]. See the https://github.com/paritytech/substrate/blob/master/utils/frame/try-runtime/cli/src/lib.rs#L18[Migration Best Practices here] for more info about how to test runtime migrations. . PRs that introduce irreversible database migrations must be tagged with https://github.com/paritytech/substrate/labels/E2-databasemigration[`E2-databasemigration`]. . PRs that add host functions must be tagged with with https://github.com/paritytech/substrate/labels/E4-newhostfunctions[`E4-newhostfunctions`]. . PRs that break the external API must be tagged with https://github.com/paritytech/substrate/labels/E5-breaksapi[`E5-breaksapi`]. @@ -88,8 +88,7 @@ To create a Polkadot companion PR: - The bot will push a commit to the Polkadot PR updating its Substrate reference. - If the polkadot PR origins from a fork then a project member may need to press `approve run` on the polkadot PR. - The bot will merge the Polkadot PR once all its CI `{"build_allow_failure":false}` checks are green. - - Note: The merge-bot currently doesn't work with forks on org accounts, only individual accounts. + Note: The merge-bot currently doesn't work with forks on org accounts, only individual accounts. If your PR is reviewed well, but a Polkadot PR is missing, signal it with https://github.com/paritytech/substrate/labels/A7-needspolkadotpr[`A7-needspolkadotpr`] to prevent it from getting automatically merged. diff --git a/utils/frame/try-runtime/cli/src/lib.rs b/utils/frame/try-runtime/cli/src/lib.rs index e7407f2f408fa..d5ccca9560252 100644 --- a/utils/frame/try-runtime/cli/src/lib.rs +++ b/utils/frame/try-runtime/cli/src/lib.rs @@ -29,9 +29,9 @@ //! //! Some resources about the above: //! -//! 1. https://substrate.dev/docs/en/knowledgebase/integrate/try-runtime -//! 2. https://www.crowdcast.io/e/substrate-seminar/41 -//! 3. https://substrate.dev/docs/en/knowledgebase/advanced/executor +//! 1. +//! 2. +//! 3. //! //! --- //! @@ -117,6 +117,44 @@ //! //! Note that *none* of the try-runtime operations need unsafe RPCs. //! +//! ## Migration Best Practices +//! +//! One of the main use-cases of try-runtime is using it for testing storage migrations. The +//! following points makes sure you can *effectively* test your migrations with try-runtime. +//! +//! #### Adding pre/post hooks +//! +//! One of the gems that come only in the `try-runtime` feature flag is the `pre_upgrade` and +//! `post_upgrade` hooks for [`OnRuntimeUpgrade`]. This trait is implemented either inside the +//! pallet, or manually in a runtime, to define a migration. In both cases, these functions can be +//! added, given the right flag: +//! +//! ```ignore +//! #[cfg(feature = try-runtime)] +//! fn pre_upgrade() -> Result<(), &'static str> {} +//! +//! #[cfg(feature = try-runtime)] +//! fn post_upgrade() -> Result<(), &'static str> {} +//! ``` +//! +//! (The pallet macro syntax will support this simply as a part of `#[pallet::hooks]`). +//! +//! These hooks allow you to execute some code, only within the `on-runtime-upgrade` command, before +//! and after the migration. If any data needs to be temporarily stored between the pre/post +//! migration hooks, [`OnRuntimeUpgradeHelpersExt`] can help with that. +//! +//! #### Logging +//! +//! It is super helpful to make sure your migration code uses logging (always with a `runtime` log +//! target prefix, e.g. `runtime::balance`) and state exactly at which stage it is, and what it is +//! doing. +//! +//! #### Guarding migrations +//! +//! Always make sure that any migration code is guarded either by [`StorageVersion`], or by some +//! custom storage item, so that it is NEVER executed twice, even if the code lives in two +//! consecutive runtimes. +//! //! ## Examples //! //! Run the migrations of the local runtime on the state of polkadot, from the polkadot repo where From 0df70939659abedde7275ee8aa13570026ff4b77 Mon Sep 17 00:00:00 2001 From: Guillaume Thiolliere Date: Thu, 23 Sep 2021 13:51:40 +0200 Subject: [PATCH 27/33] add test (#9822) --- frame/support/src/traits/misc.rs | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/frame/support/src/traits/misc.rs b/frame/support/src/traits/misc.rs index 75f2f8ac3fef1..82c5512ac15c5 100644 --- a/frame/support/src/traits/misc.rs +++ b/frame/support/src/traits/misc.rs @@ -424,3 +424,19 @@ impl From for WrapperOpaque { Self(t) } } + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn test_opaque_wrapper() { + let encoded = WrapperOpaque(3u32).encode(); + assert_eq!(encoded, [codec::Compact(4u32).encode(), 3u32.to_le_bytes().to_vec()].concat()); + let vec_u8 = >::decode(&mut &encoded[..]).unwrap(); + let decoded_from_vec_u8 = u32::decode(&mut &vec_u8[..]).unwrap(); + assert_eq!(decoded_from_vec_u8, 3u32); + let decoded = >::decode(&mut &encoded[..]).unwrap(); + assert_eq!(decoded.0, 3u32); + } +} From 7fcebc86856164a346f6762d339e8cc3b6ec9d30 Mon Sep 17 00:00:00 2001 From: Guillaume Thiolliere Date: Thu, 23 Sep 2021 14:19:59 +0200 Subject: [PATCH 28/33] improve test (#9835) --- frame/support/test/tests/pallet.rs | 252 ++--------------------------- 1 file changed, 9 insertions(+), 243 deletions(-) diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 6a9a18ea48d4b..7eb8431ce146d 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -553,7 +553,7 @@ frame_support::construct_runtime!( NodeBlock = Block, UncheckedExtrinsic = UncheckedExtrinsic { - System: frame_system::{Pallet, Call, Event}, + System: frame_system::{Call, Event}, Example: pallet::{Pallet, Call, Event, Config, Storage, Inherent, Origin, ValidateUnsigned}, Example2: pallet2::{Pallet, Call, Event, Config, Storage}, } @@ -1000,52 +1000,6 @@ fn metadata() { use frame_support::metadata::*; let pallets = vec![ - PalletMetadata { - index: 0, - name: "System", - storage: None, - calls: Some(meta_type::>().into()), - event: Some(meta_type::>().into()), - constants: vec![ - PalletConstantMetadata { - name: "BlockWeights", - ty: meta_type::(), - value: vec![], - docs: vec![], - }, - PalletConstantMetadata { - name: "BlockLength", - ty: meta_type::(), - value: vec![], - docs: vec![], - }, - PalletConstantMetadata { - name: "BlockHashCount", - ty: meta_type::(), - value: vec![], - docs: vec![], - }, - PalletConstantMetadata { - name: "DbWeight", - ty: meta_type::(), - value: vec![], - docs: vec![], - }, - PalletConstantMetadata { - name: "Version", - ty: meta_type::(), - value: vec![], - docs: vec![], - }, - PalletConstantMetadata { - name: "SS58Prefix", - ty: meta_type::(), - value: vec![], - docs: vec![], - }, - ], - error: Some(meta_type::>().into()), - }, PalletMetadata { index: 1, name: "Example", @@ -1255,170 +1209,31 @@ fn metadata() { error: Some(PalletErrorMetadata { ty: meta_type::>() }), }, PalletMetadata { - index: 1, - name: "Example", + index: 2, + name: "Example2", storage: Some(PalletStorageMetadata { - prefix: "Example", + prefix: "Example2", entries: vec![ StorageEntryMetadata { - name: "ValueWhereClause", - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(meta_type::()), - default: vec![0], - docs: vec![], - }, - StorageEntryMetadata { - name: "Value", + name: "SomeValue", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(meta_type::()), + ty: StorageEntryType::Plain(meta_type::>()), default: vec![0], docs: vec![], }, StorageEntryMetadata { - name: "Value2", - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(meta_type::()), - default: vec![0], - docs: vec![], - }, - StorageEntryMetadata { - name: "Map", - modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Map { - key: meta_type::(), - value: meta_type::(), - hashers: vec![StorageHasher::Blake2_128Concat], - }, - default: vec![4, 0], - docs: vec![], - }, - StorageEntryMetadata { - name: "Map2", + name: "SomeCountedStorageMap", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { - key: meta_type::(), - value: meta_type::(), hashers: vec![StorageHasher::Twox64Concat], - }, - default: vec![0], - docs: vec![], - }, - StorageEntryMetadata { - name: "DoubleMap", - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Map { - value: meta_type::(), - key: meta_type::<(u8, u16)>(), - hashers: vec![ - StorageHasher::Blake2_128Concat, - StorageHasher::Twox64Concat, - ], - }, - default: vec![0], - docs: vec![], - }, - StorageEntryMetadata { - name: "DoubleMap2", - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Map { - value: meta_type::(), - key: meta_type::<(u16, u32)>(), - hashers: vec![ - StorageHasher::Twox64Concat, - StorageHasher::Blake2_128Concat, - ], - }, - default: vec![0], - docs: vec![], - }, - StorageEntryMetadata { - name: "NMap", - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Map { key: meta_type::(), - hashers: vec![StorageHasher::Blake2_128Concat], value: meta_type::(), }, default: vec![0], docs: vec![], }, StorageEntryMetadata { - name: "NMap2", - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Map { - key: meta_type::<(u16, u32)>(), - hashers: vec![ - StorageHasher::Twox64Concat, - StorageHasher::Blake2_128Concat, - ], - value: meta_type::(), - }, - default: vec![0], - docs: vec![], - }, - #[cfg(feature = "conditional-storage")] - StorageEntryMetadata { - name: "ConditionalValue", - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(meta_type::()), - default: vec![0], - docs: vec![], - }, - #[cfg(feature = "conditional-storage")] - StorageEntryMetadata { - name: "ConditionalMap", - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Map { - key: meta_type::(), - value: meta_type::(), - hashers: vec![StorageHasher::Twox64Concat], - }, - default: vec![0], - docs: vec![], - }, - #[cfg(feature = "conditional-storage")] - StorageEntryMetadata { - name: "ConditionalDoubleMap", - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Map { - value: meta_type::(), - key: meta_type::<(u8, u16)>(), - hashers: vec![ - StorageHasher::Blake2_128Concat, - StorageHasher::Twox64Concat, - ], - }, - default: vec![0], - docs: vec![], - }, - #[cfg(feature = "conditional-storage")] - StorageEntryMetadata { - name: "ConditionalNMap", - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Map { - key: meta_type::<(u8, u16)>(), - hashers: vec![ - StorageHasher::Blake2_128Concat, - StorageHasher::Twox64Concat, - ], - value: meta_type::(), - }, - default: vec![0], - docs: vec![], - }, - StorageEntryMetadata { - name: "RenamedCountedMap", - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Map { - hashers: vec![StorageHasher::Twox64Concat], - key: meta_type::(), - value: meta_type::(), - }, - default: vec![0], - docs: vec![], - }, - StorageEntryMetadata { - name: "CounterForRenamedCountedMap", + name: "CounterForSomeCountedStorageMap", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Plain(meta_type::()), default: vec![0, 0, 0, 0], @@ -1426,55 +1241,6 @@ fn metadata() { }, ], }), - calls: Some(meta_type::>().into()), - event: Some(meta_type::>().into()), - constants: vec![ - PalletConstantMetadata { - name: "MyGetParam", - ty: meta_type::(), - value: vec![10, 0, 0, 0], - docs: vec![" Some comment", " Some comment"], - }, - PalletConstantMetadata { - name: "MyGetParam2", - ty: meta_type::(), - value: vec![11, 0, 0, 0], - docs: vec![" Some comment", " Some comment"], - }, - PalletConstantMetadata { - name: "MyGetParam3", - ty: meta_type::(), - value: vec![12, 0, 0, 0, 0, 0, 0, 0], - docs: vec![], - }, - PalletConstantMetadata { - name: "some_extra", - ty: meta_type::(), - value: vec![100, 0, 0, 0, 0, 0, 0, 0], - docs: vec![" Some doc", " Some doc"], - }, - PalletConstantMetadata { - name: "some_extra_extra", - ty: meta_type::(), - value: vec![0, 0, 0, 0, 0, 0, 0, 0], - docs: vec![" Some doc"], - }, - ], - error: Some(PalletErrorMetadata { ty: meta_type::>() }), - }, - PalletMetadata { - index: 2, - name: "Example2", - storage: Some(PalletStorageMetadata { - prefix: "Example2", - entries: vec![StorageEntryMetadata { - name: "SomeValue", - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(meta_type::>()), - default: vec![0], - docs: vec![], - }], - }), calls: Some(meta_type::>().into()), event: Some(PalletEventMetadata { ty: meta_type::() }), constants: vec![], @@ -1504,7 +1270,7 @@ fn metadata() { _ => panic!("metadata has been bumped, test needs to be updated"), }; - pretty_assertions::assert_eq!(actual_metadata.pallets[1], expected_metadata.pallets[1]); + pretty_assertions::assert_eq!(actual_metadata.pallets, expected_metadata.pallets); } #[test] From 65ac6f39c72e77fd98f337f1a5beddd539ee8d6f Mon Sep 17 00:00:00 2001 From: Andreas Doerr Date: Thu, 23 Sep 2021 21:02:30 +0200 Subject: [PATCH 29/33] Integrate BEEFY (#9833) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Initial project setup and skeleton (#4) * initial project setup for beefy gadget client * update editorconfig * update gitignore * add initial skeleton for beefy gadget worker * add skeleton for gossip processing * add app crypto * move around some code * add basic flow for voting * add logic for picking blocks to sign * add rustfmt config * add example node with beefy gadget * use u32::next_power_of_two * make maximum periodicity configurable * add copyright header * rename max_periodicity to min_interval * CI stuff (#5) * CI stuff. * Fix workspace. * cargo fmt --all * Add license for beefy-gadget * One toolchain to rule them all. * Clippy. * Fix clippy. * Clippy in the runtime. * Fix clippy grumbles. * cargo fmt --all * Primitives & Light Client examples (#8) * Primitives. * Docs. * Document primitives. * Simple tests. * Light client examples. * Fix stuff. * cargo fmt --all * Add a bunch of tests for imports. * Add more examples. * cargo fmt --all * Fix clippy. * cargo fmt --all * Apply suggestions from code review Co-authored-by: André Silva <123550+andresilva@users.noreply.github.com> * Add GRANDPA / FG clarifications. * Fix min number of signatures. Co-authored-by: André Silva <123550+andresilva@users.noreply.github.com> * Update to substrate master (#22) * update to substrate master * update dependencies * fix clippy issues Co-authored-by: Tomasz Drwięga * Add beefy pallet (#25) * move beefy application crypto to primitives * make primitives compile under no_std * add beefy pallet that maintains authority set * add beefy pallet to node example runtime * tabify node-example cargo.toml files * use double quotes in Cargo.toml files * add missing hex-literal dependency * add runtime api to fetch BEEFY authorities * fix clippy warnings * rename beefy-pallet to pallet-beefy * sort dependencies in node-example/runtime/Cargo.toml * Signed commitments rpc pubsub (#26) * move beefy application crypto to primitives * make primitives compile under no_std * add beefy pallet that maintains authority set * add beefy pallet to node example runtime * tabify node-example cargo.toml files * use double quotes in Cargo.toml files * add missing hex-literal dependency * add runtime api to fetch BEEFY authorities * fix clippy warnings * gadget: use commitment and signedcommitment * gadget: send notifications for signed commitments * gadget: add rpc pubsub for signed commitments * node-example: enable beefy rpc * gadget: fix clippy warnings * rename beefy-pallet to pallet-beefy * sort dependencies in node-example/runtime/Cargo.toml * gadget: add documentation on SignedCommitment rpc wrapper type * gadget: add todos about dummy beefy commitments * gadget: remove redundant closure Co-authored-by: Tomasz Drwięga Co-authored-by: Tomasz Drwięga * Integrate MMR and deposit root into the digest. (#24) * Add basic MMR. * Deposit digest item. * cargo fmt --all * Merge with primitives. * cargo fmt --all * Fix extra spaces. * cargo fmt --all * Switch branch. * remove stray whitespace * update to latest td-mmr commit * fix clippy error Co-authored-by: André Silva * use new mmr root as commitment payload (#27) * use new mmr root as commitment payload * fix mmr root codec index * warn on MMR root digest not found Co-authored-by: Tomasz Drwięga * add type alias for MMR root hash Co-authored-by: Tomasz Drwięga * Bump serde_json from 1.0.59 to 1.0.60 (#28) * Update to latest substrate. (#32) * Update to latest substrate. * Fix tests. * cargo fmt --all * Switch to master. * Bump serde from 1.0.117 to 1.0.118 (#29) * Bump serde from 1.0.117 to 1.0.118 Bumps [serde](https://github.com/serde-rs/serde) from 1.0.117 to 1.0.118. - [Release notes](https://github.com/serde-rs/serde/releases) - [Commits](https://github.com/serde-rs/serde/compare/v1.0.117...v1.0.118) Signed-off-by: dependabot-preview[bot] * Bump arc-swap. Co-authored-by: dependabot-preview[bot] <27856297+dependabot-preview[bot]@users.noreply.github.com> Co-authored-by: Tomasz Drwięga Co-authored-by: Tomasz Drwięga * Remove transition flag (#35) * Get rid of is_set_transition_flag * Fix tests. * cargo fmt --all * Bump futures from 0.3.9 to 0.3.12 (#50) * Bump log from 0.4.11 to 0.4.13 (#52) * Bump Substrate and Deps (#57) * Update README (#58) * Update README * Apply suggestions from code review Co-authored-by: Tomasz Drwięga * address review comments * missed a typo Co-authored-by: Tomasz Drwięga * Add validator set to the pallet. (#65) * Bump Substrate and Deps (#71) * Bump Substrate and Deps * pin serde and syn * bump Substrate again for '__Nonexhaustive' fix * add cargo deny ignore * Beefy pallet test (#74) * setup mock * test session change * silence beefy * clippy still * no change - no log * clippy again * Apply suggestions from code review Co-authored-by: Tomasz Drwięga * code review changes, added additional test Co-authored-by: Tomasz Drwięga * Beefy node cleanup (#75) * bump serde * bump substrate, scale-codec 2.0.0 * we need a proper beefy node * rename primitives as well * Sort members. Co-authored-by: Tomasz Drwięga * Migrate beefy-pallet to FRAMEv2 (#76) * migrate beefy-pallet to FRAMEv2 * Code review Co-authored-by: Hernando Castano Co-authored-by: Hernando Castano * Run BEEFY worker as non-validator (#77) * run BEEFY worker as non-validator * don't check for roloe.is_authority * change enum type name * Bump Substrate and Deps (#79) * Add BEEFY gadget as extra peer set (#80) * Add BEEFY gadget as extra peer set * use BEEFY protocol * Add ValidatorSetId to BEEFY digest (#85) * add ValidatorSetId to BEEFY digest * apply review changes * Bump Substrate and Deps (#91) * Bump Substrate and Deps * Bump Substrate again in order to include a hot-fix * redo again * use CryptoStore issue * cargo fmt * Bump serde_json from 1.0.63 to 1.0.64 (#93) * Track BEEFY validator set (#94) * Track BEEFY validator set * Add validator_set_id to BeefyWorker * Make validattor_set_id optional * Ad 92 (#97) * sign_commitment() * Error handling todo * Add error type (#99) * Add error type * Address review * Extract worker and round logic (#104) * Bump serde from 1.0.123 to 1.0.124 (#106) * Rework BeefyAPI (#110) * Initialize BeefyWorker with current validator set (#111) * Update toolchain (#115) * Use nightly toolchain * dongradde to latest clippy stable * GH workflow trail and error * next try * use stable for clippy * update wasm builder * yet another try * fun with CI * no env var * and one more * allow from_over_into bco contruct_runtime * back to start * well ... * full circle * old version was still used * Bump Substrate and Deps (#117) * Bump Substrate and Deps * cargo fmt should enforce uniform imports * merge some imports * Delayed BEEFY worker initialization (#121) * lifecycle state * add Client convenience trait * rework trait identifiers * WIP * rework BeefyWorker::new() signature * Delayed BEEFY gadget initialization * address review * Bump substrate. (#123) * Bump substrate. * Fix tests. * Lower log-level for a missing validator set (#124) * lower log-level for a missing validator set * move best_finalized_block initialization * Setup Prometheus metrics (#125) * setup Prometheus metrics * expose validator set id * cargo fmt * Update beefy-gadget/src/lib.rs Co-authored-by: Tomasz Drwięga * add vote messages gossiped metric * track authorities change, before checking for MMR root digest Co-authored-by: Tomasz Drwięga * Make Client convenience trait public (#126) * Bump serde from 1.0.124 to 1.0.125 (#131) * Reset rounds on new validator set. (#133) * Re-set rounds on new validator set. * Fix docs. * Bump Substrate and Deps (#134) * beefy: authority set changes fixes (#139) * node: fix grandpa peers set config * gadget: update best finalized number only when finalized with beefy * gadget: process authorities changes regardless of vote status * gadget: remove superfluous signature type (#140) * node: fix grandpa peers set config * gadget: update best finalized number only when finalized with beefy * gadget: process authorities changes regardless of vote status * gadget: remove superfluous signature type Co-authored-by: Tomasz Drwięga * gadget: reduce gossip spam (#141) * node: fix grandpa peers set config * gadget: update best finalized number only when finalized with beefy * gadget: process authorities changes regardless of vote status * gadget: remove superfluous signature type * gadget: only gossip last 5 rounds * gadget: note round to gossip validator before gossiping message * gadget: fix clippy warnings * gadget: update docs Co-authored-by: Tomasz Drwięga Co-authored-by: Tomasz Drwięga Co-authored-by: adoerr <0xad@gmx.net> * gadget: verify SignedCommitment message signature (#142) * gadget: verify SignedCommitment message signature * gadget: log messages with bad sigs * gadget: move todo comment * Bump futures from 0.3.13 to 0.3.14 (#145) * Milestone 1 (#144) * use best_finalized, prevent race * make best_finalized_block an Option, should_vote_on bails on None * Bump futures from 0.3.13 to 0.3.14 * Revert futures bump * Revert "Revert futures bump" This reverts commit a1b5e7e9bac526f2897ebfdfee7f02dd29a13ac5. * Revert "Bump futures from 0.3.13 to 0.3.14" This reverts commit a4e508b118ad2c4b52909d24143c284073961458. * debug msg if the bail voting * validator_set() * local_id() * get rid of worker state * Apply review suggestions * fix should_vote_on() * Extract BeefyGossipValidator (#147) * Extract BeefyGossipValidator * Apply review suggestions * Add block_delta parameter to start_beefy_gadget (#151) * Add block_delta parameter * rename to min_block_delta * Add additional metrics (#152) * Add additional metrics * add skipped session metric * add some comment for temp metric * don't log under info for every concluded round (#156) * don't log error on missing validator keys (#157) * don't log error on missing validator keys * remove unused import * Fix validator set change handling (#158) * reduce some logs from debug to trace * fix validator set changes handling * rename validator module to gossip * run rustfmt * Fix should_vote_on() (#160) * Fix should_vote_on() * by the textbook * fix the algorithm * Apply review suggestions * don't use NumberFor in vote_target Co-authored-by: André Silva * Make KeyStore optional (#173) * Use builder pattern for NonDefaultSetConfig (#178) Co-authored-by: adoerr <0xad@gmx.net> * Append SignedCommitment to block justifications (#177) * Append SignedCommitment * add BeefyParams * add WorkerParams * use warn * versioned variant for SignedCommitment * Bump serde from 1.0.125 to 1.0.126 (#184) Bumps [serde](https://github.com/serde-rs/serde) from 1.0.125 to 1.0.126. - [Release notes](https://github.com/serde-rs/serde/releases) - [Commits](https://github.com/serde-rs/serde/compare/v1.0.125...v1.0.126) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * Bump strum from 0.20.0 to 0.21.0 (#195) * Bump strum from 0.20.0 to 0.21.0 Bumps [strum](https://github.com/Peternator7/strum) from 0.20.0 to 0.21.0. - [Release notes](https://github.com/Peternator7/strum/releases) - [Changelog](https://github.com/Peternator7/strum/blob/master/CHANGELOG.md) - [Commits](https://github.com/Peternator7/strum/commits) --- updated-dependencies: - dependency-name: strum dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * use dervie feature for strum; clippy and deny housekeeping Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: adoerr <0xad@gmx.net> * Make concluded round an info log (#200) * Remove external crypto trait bounds (#207) * BeefyKeystore newtype * WIP * remove mod ecdsa * WIP * fix tests * some polishing * Rename AuthorityId to BeefyId to avoid type conflict in UI (#211) * Add trace points; Reduce MAX_LIVE_GOSSIP_ROUNDS (#210) * Add trace points; Reduce MAX_LIVE_GOSSIP_ROUNDS * log local authority id * Additional initial authority id's (#217) * Scratch concluded rounds * adjust testnet doc * fix authority key typo * We don't want no scratches * address review comments * Fix note_round() (#219) * rename BeefyGossipValidator * Fix note_round() * use const for assert * put message trace points back in * test case note_same_round_twice() * address review comments * remove redundant check * Use LocalKeystore for tests (#224) * private_keys() * Use LocalKeystore for tests * Use keystore helper * Address review * some reformatting * Cache known votes in gossip (#227) * Implement known messages cache. * Add tests. * Appease clippy. * More clippy Co-authored-by: adoerr <0xad@gmx.net> * Some key store sanity checks (#232) * verify vote message * verify_validator_set() * rework logging * some rework * Tone down warnings. * Add signature verification. * Tone down more. * Fix clippy Co-authored-by: Tomasz Drwięga * Use Binary Merkle Tree instead of a trie (#225) * Binary tree merkle root. * Add proofs and verification. * Clean up debug. * Use BEEFY addresses instead of pubkeys. * Use new merkle tree. * Optimize allocations. * Add test for larger trees. * Add tests for larger cases. * Appease clippy * Appease clippy2. * Fix proof generation & verification. * Add more test data. * Fix CLI. * Update README * Bump version. * Update docs. * Rename beefy-merkle-root to beefy-merkle-tree Co-authored-by: adoerr <0xad@gmx.net> * Bump Substrate and Deps (#235) * BEEFY+MMR pallet (#236) * Add MMR leaf format to primitives. * Fix tests * Initial work on the BEEFY-MMR pallet. * Add tests to MMR pallet. * Use eth addresses. * Use binary merkle tree. * Bump libsecp256k1 * Fix compilation. * Bump deps. * Appease cargo deny. * Re-format. * Module-level docs. * no-std fix. * update README Co-authored-by: adoerr <0xad@gmx.net> * Fix noting rounds for non-authorities (#238) * Bump env_logger from 0.8.4 to 0.9.0 (#242) Bumps [env_logger](https://github.com/env-logger-rs/env_logger) from 0.8.4 to 0.9.0. - [Release notes](https://github.com/env-logger-rs/env_logger/releases) - [Changelog](https://github.com/env-logger-rs/env_logger/blob/main/CHANGELOG.md) - [Commits](https://github.com/env-logger-rs/env_logger/compare/v0.8.4...v0.9.0) --- updated-dependencies: - dependency-name: env_logger dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * gadget: add global timeout for rebroadcasting messages (#243) * gadget: add global timeout for rebroadcasting messages * update rustfmt.toml * make message_allowed() a debug trace Co-authored-by: adoerr <0xad@gmx.net> * Bump Substrate and Deps (#245) * Bump Substrate and Deps * Bump Substrate again * Bump futures from 0.3.15 to 0.3.16 (#247) Bumps [futures](https://github.com/rust-lang/futures-rs) from 0.3.15 to 0.3.16. - [Release notes](https://github.com/rust-lang/futures-rs/releases) - [Changelog](https://github.com/rust-lang/futures-rs/blob/master/CHANGELOG.md) - [Commits](https://github.com/rust-lang/futures-rs/compare/0.3.15...0.3.16) --- updated-dependencies: - dependency-name: futures dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * Bump libsecp256k1 from 0.5.0 to 0.6.0 (#249) * Bump libsecp256k1 from 0.5.0 to 0.6.0 Bumps [libsecp256k1](https://github.com/paritytech/libsecp256k1) from 0.5.0 to 0.6.0. - [Release notes](https://github.com/paritytech/libsecp256k1/releases) - [Changelog](https://github.com/paritytech/libsecp256k1/blob/master/CHANGELOG.md) - [Commits](https://github.com/paritytech/libsecp256k1/commits) --- updated-dependencies: - dependency-name: libsecp256k1 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * use correct crate name Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: adoerr <0xad@gmx.net> * Derive `scale_info::TypeInfo` for types used in polkadot (#218) * Add scale-info TypeInfo derives * Update scale-info * Add crates.io patches * Use substrate aj-metadata-vnext branch * Revert master branch substrate deps * Add scale-info to beefy-pallet * scale-info v0.9.0 * Remove github dependencies and patches * More TypeInfo derives * Update scale-info to 0.10.0 * Add missing scale-info dependency * Add missing TypeInfo derive * Hide TypeInfo under a feature. Co-authored-by: Tomasz Drwięga * Bump serde from 1.0.126 to 1.0.127 (#260) Bumps [serde](https://github.com/serde-rs/serde) from 1.0.126 to 1.0.127. - [Release notes](https://github.com/serde-rs/serde/releases) - [Commits](https://github.com/serde-rs/serde/compare/v1.0.126...v1.0.127) --- updated-dependencies: - dependency-name: serde dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * Bump Substrate and Deps (#262) * Update jsonrpc (#265) * Update jsonrpc * Update Substrate * bump Substrate and Deps (#268) * Bump serde from 1.0.127 to 1.0.128 (#272) Bumps [serde](https://github.com/serde-rs/serde) from 1.0.127 to 1.0.128. - [Release notes](https://github.com/serde-rs/serde/releases) - [Commits](https://github.com/serde-rs/serde/compare/v1.0.127...v1.0.128) --- updated-dependencies: - dependency-name: serde dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * Fix spelling (#271) * Bump serde from 1.0.128 to 1.0.130 (#276) Bumps [serde](https://github.com/serde-rs/serde) from 1.0.128 to 1.0.130. - [Release notes](https://github.com/serde-rs/serde/releases) - [Commits](https://github.com/serde-rs/serde/compare/v1.0.128...v1.0.130) --- updated-dependencies: - dependency-name: serde dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * Bump scale-info from 0.10.0 to 0.12.0 (#275) Bumps [scale-info](https://github.com/paritytech/scale-info) from 0.10.0 to 0.12.0. - [Release notes](https://github.com/paritytech/scale-info/releases) - [Changelog](https://github.com/paritytech/scale-info/blob/master/CHANGELOG.md) - [Commits](https://github.com/paritytech/scale-info/commits) --- updated-dependencies: - dependency-name: scale-info dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: adoerr <0xad@gmx.net> * Update to scale-info 1.0 (#278) * bump substrate (#282) * bump Substrate and Deps * cargo fmt Co-authored-by: Wenfeng Wang * Update worker.rs (#287) * Bump anyhow from 1.0.43 to 1.0.44 (#290) * Bump anyhow from 1.0.43 to 1.0.44 Bumps [anyhow](https://github.com/dtolnay/anyhow) from 1.0.43 to 1.0.44. - [Release notes](https://github.com/dtolnay/anyhow/releases) - [Commits](https://github.com/dtolnay/anyhow/compare/1.0.43...1.0.44) --- updated-dependencies: - dependency-name: anyhow dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * derive Default Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: adoerr <0xad@gmx.net> * Remove optional `scale-info` feature (#292) * Make scale-info dependency non-optional * Remove feature gated TypeInfo derives * Import TypeInfo * Update substrate * Fix up runtime * prune .git suffix (#294) * remove unused deps (#295) * remove unused deps * update lock file * Bump libsecp256k1 from 0.6.0 to 0.7.0 (#296) * Bump libsecp256k1 from 0.6.0 to 0.7.0 Bumps [libsecp256k1](https://github.com/paritytech/libsecp256k1) from 0.6.0 to 0.7.0. - [Release notes](https://github.com/paritytech/libsecp256k1/releases) - [Changelog](https://github.com/paritytech/libsecp256k1/blob/master/CHANGELOG.md) - [Commits](https://github.com/paritytech/libsecp256k1/commits) --- updated-dependencies: - dependency-name: libsecp256k1 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * update sec advisories Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: adoerr <0xad@gmx.net> * clean compile * use path dependencies * beefy-gadget license header * pallet-beefy license header * pallet-beefy-mmr license header * beefy-primitves license header * carg fmt * more formatting * shorten line * downgrade parity-scale-codec to 2.2.0 * use path dependency for Prometheus endpoint * remove clippy annotations Co-authored-by: André Silva <123550+andresilva@users.noreply.github.com> Co-authored-by: Tomasz Drwięga Co-authored-by: Tomasz Drwięga Co-authored-by: André Silva Co-authored-by: dependabot-preview[bot] <27856297+dependabot-preview[bot]@users.noreply.github.com> Co-authored-by: Hernando Castano Co-authored-by: Pierre Krieger Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Andrew Jones Co-authored-by: Bastian Köcher Co-authored-by: drewstone Co-authored-by: Andronik Ordian Co-authored-by: Wenfeng Wang Co-authored-by: Joshy Orndorff Co-authored-by: Squirrel --- Cargo.lock | 191 +++++- Cargo.toml | 6 + client/beefy/Cargo.toml | 38 ++ client/beefy/rpc/Cargo.toml | 26 + client/beefy/rpc/src/lib.rs | 114 ++++ client/beefy/rpc/src/notification.rs | 39 ++ client/beefy/src/error.rs | 31 + client/beefy/src/gossip.rs | 236 ++++++++ client/beefy/src/gossip_tests.rs | 182 ++++++ client/beefy/src/keystore.rs | 119 ++++ client/beefy/src/keystore_tests.rs | 275 +++++++++ client/beefy/src/lib.rs | 159 +++++ client/beefy/src/metrics.rs | 93 +++ client/beefy/src/notification.rs | 113 ++++ client/beefy/src/round.rs | 121 ++++ client/beefy/src/worker.rs | 534 +++++++++++++++++ frame/beefy-mmr/Cargo.toml | 56 ++ frame/beefy-mmr/primitives/Cargo.toml | 23 + frame/beefy-mmr/primitives/src/lib.rs | 806 ++++++++++++++++++++++++++ frame/beefy-mmr/src/lib.rs | 236 ++++++++ frame/beefy-mmr/src/mock.rs | 205 +++++++ frame/beefy-mmr/src/tests.rs | 148 +++++ frame/beefy/Cargo.toml | 40 ++ frame/beefy/src/lib.rs | 179 ++++++ frame/beefy/src/mock.rs | 164 ++++++ frame/beefy/src/tests.rs | 142 +++++ primitives/beefy/Cargo.toml | 33 ++ primitives/beefy/src/commitment.rs | 264 +++++++++ primitives/beefy/src/lib.rs | 137 +++++ primitives/beefy/src/mmr.rs | 132 +++++ primitives/beefy/src/witness.rs | 162 ++++++ 31 files changed, 4992 insertions(+), 12 deletions(-) create mode 100644 client/beefy/Cargo.toml create mode 100644 client/beefy/rpc/Cargo.toml create mode 100644 client/beefy/rpc/src/lib.rs create mode 100644 client/beefy/rpc/src/notification.rs create mode 100644 client/beefy/src/error.rs create mode 100644 client/beefy/src/gossip.rs create mode 100644 client/beefy/src/gossip_tests.rs create mode 100644 client/beefy/src/keystore.rs create mode 100644 client/beefy/src/keystore_tests.rs create mode 100644 client/beefy/src/lib.rs create mode 100644 client/beefy/src/metrics.rs create mode 100644 client/beefy/src/notification.rs create mode 100644 client/beefy/src/round.rs create mode 100644 client/beefy/src/worker.rs create mode 100644 frame/beefy-mmr/Cargo.toml create mode 100644 frame/beefy-mmr/primitives/Cargo.toml create mode 100644 frame/beefy-mmr/primitives/src/lib.rs create mode 100644 frame/beefy-mmr/src/lib.rs create mode 100644 frame/beefy-mmr/src/mock.rs create mode 100644 frame/beefy-mmr/src/tests.rs create mode 100644 frame/beefy/Cargo.toml create mode 100644 frame/beefy/src/lib.rs create mode 100644 frame/beefy/src/mock.rs create mode 100644 frame/beefy/src/tests.rs create mode 100644 primitives/beefy/Cargo.toml create mode 100644 primitives/beefy/src/commitment.rs create mode 100644 primitives/beefy/src/lib.rs create mode 100644 primitives/beefy/src/mmr.rs create mode 100644 primitives/beefy/src/witness.rs diff --git a/Cargo.lock b/Cargo.lock index e386b600f79ba..693388a5299ff 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -492,6 +492,80 @@ dependencies = [ "serde", ] +[[package]] +name = "beefy-gadget" +version = "4.0.0-dev" +dependencies = [ + "beefy-primitives", + "fnv", + "futures 0.3.16", + "log 0.4.14", + "parity-scale-codec", + "parking_lot 0.11.1", + "sc-client-api", + "sc-keystore", + "sc-network", + "sc-network-gossip", + "sc-network-test", + "sc-utils", + "sp-api", + "sp-application-crypto", + "sp-arithmetic", + "sp-blockchain", + "sp-core", + "sp-keystore", + "sp-runtime", + "strum 0.21.0", + "substrate-prometheus-endpoint", + "thiserror", + "wasm-timer", +] + +[[package]] +name = "beefy-gadget-rpc" +version = "4.0.0-dev" +dependencies = [ + "beefy-gadget", + "beefy-primitives", + "futures 0.3.16", + "jsonrpc-core", + "jsonrpc-core-client", + "jsonrpc-derive", + "jsonrpc-pubsub", + "log 0.4.14", + "parity-scale-codec", + "sc-rpc", + "serde", + "sp-core", + "sp-runtime", +] + +[[package]] +name = "beefy-merkle-tree" +version = "4.0.0-dev" +dependencies = [ + "env_logger 0.9.0", + "hex", + "hex-literal", + "log 0.4.14", + "tiny-keccak", +] + +[[package]] +name = "beefy-primitives" +version = "4.0.0-dev" +dependencies = [ + "hex-literal", + "parity-scale-codec", + "scale-info", + "sp-api", + "sp-application-crypto", + "sp-core", + "sp-keystore", + "sp-runtime", + "sp-std", +] + [[package]] name = "bincode" version = "1.3.2" @@ -3751,9 +3825,9 @@ dependencies = [ "base64 0.12.3", "digest 0.9.0", "hmac-drbg 0.3.0", - "libsecp256k1-core", - "libsecp256k1-gen-ecmult", - "libsecp256k1-gen-genmult", + "libsecp256k1-core 0.2.2", + "libsecp256k1-gen-ecmult 0.2.1", + "libsecp256k1-gen-genmult 0.2.1", "rand 0.7.3", "serde", "sha2 0.9.3", @@ -3770,15 +3844,32 @@ dependencies = [ "base64 0.12.3", "digest 0.9.0", "hmac-drbg 0.3.0", - "libsecp256k1-core", - "libsecp256k1-gen-ecmult", - "libsecp256k1-gen-genmult", + "libsecp256k1-core 0.2.2", + "libsecp256k1-gen-ecmult 0.2.1", + "libsecp256k1-gen-genmult 0.2.1", "rand 0.7.3", "serde", "sha2 0.9.3", "typenum", ] +[[package]] +name = "libsecp256k1" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0452aac8bab02242429380e9b2f94ea20cea2b37e2c1777a1358799bbe97f37" +dependencies = [ + "arrayref", + "base64 0.13.0", + "digest 0.9.0", + "libsecp256k1-core 0.3.0", + "libsecp256k1-gen-ecmult 0.3.0", + "libsecp256k1-gen-genmult 0.3.0", + "rand 0.8.4", + "serde", + "sha2 0.9.3", +] + [[package]] name = "libsecp256k1-core" version = "0.2.2" @@ -3790,13 +3881,33 @@ dependencies = [ "subtle 2.4.0", ] +[[package]] +name = "libsecp256k1-core" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5be9b9bb642d8522a44d533eab56c16c738301965504753b03ad1de3425d5451" +dependencies = [ + "crunchy", + "digest 0.9.0", + "subtle 2.4.0", +] + [[package]] name = "libsecp256k1-gen-ecmult" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ccab96b584d38fac86a83f07e659f0deafd0253dc096dab5a36d53efe653c5c3" dependencies = [ - "libsecp256k1-core", + "libsecp256k1-core 0.2.2", +] + +[[package]] +name = "libsecp256k1-gen-ecmult" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3038c808c55c87e8a172643a7d87187fc6c4174468159cb3090659d55bcb4809" +dependencies = [ + "libsecp256k1-core 0.3.0", ] [[package]] @@ -3805,7 +3916,16 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67abfe149395e3aa1c48a2beb32b068e2334402df8181f818d3aee2b304c4f5d" dependencies = [ - "libsecp256k1-core", + "libsecp256k1-core 0.2.2", +] + +[[package]] +name = "libsecp256k1-gen-genmult" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3db8d6ba2cec9eacc40e6e8ccc98931840301f1006e95647ceb2dd5c3aa06f7c" +dependencies = [ + "libsecp256k1-core 0.3.0", ] [[package]] @@ -5114,6 +5234,50 @@ dependencies = [ "sp-std", ] +[[package]] +name = "pallet-beefy" +version = "4.0.0-dev" +dependencies = [ + "beefy-primitives", + "frame-support", + "frame-system", + "pallet-session", + "parity-scale-codec", + "scale-info", + "serde", + "sp-core", + "sp-io", + "sp-runtime", + "sp-staking", + "sp-std", +] + +[[package]] +name = "pallet-beefy-mmr" +version = "4.0.0-dev" +dependencies = [ + "beefy-merkle-tree", + "beefy-primitives", + "frame-support", + "frame-system", + "hex", + "hex-literal", + "libsecp256k1 0.7.0", + "log 0.4.14", + "pallet-beefy", + "pallet-mmr", + "pallet-mmr-primitives", + "pallet-session", + "parity-scale-codec", + "scale-info", + "serde", + "sp-core", + "sp-io", + "sp-runtime", + "sp-staking", + "sp-std", +] + [[package]] name = "pallet-bounties" version = "4.0.0-dev" @@ -8629,9 +8793,9 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.126" +version = "1.0.130" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec7505abeacaec74ae4778d9d9328fe5a5d04253220a85c4ee022239fc996d03" +checksum = "f12d06de37cf59146fbdecab66aa99f9fe4f78722e3607577a5375d66bd0c913" dependencies = [ "serde_derive", ] @@ -8657,9 +8821,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.126" +version = "1.0.130" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "963a7dbc9895aeac7ac90e74f34a5d5261828f79df35cbed41e10189d3804d43" +checksum = "d7bc1a1ab1961464eae040d96713baa5a724a8152c1222492465b54322ec508b" dependencies = [ "proc-macro2", "quote", @@ -9779,6 +9943,9 @@ name = "strum" version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aaf86bbcfd1fa9670b7a129f64fc0c9fcbbfe4f1bc4210e9e98fe71ffc12cde2" +dependencies = [ + "strum_macros 0.21.1", +] [[package]] name = "strum_macros" diff --git a/Cargo.toml b/Cargo.toml index e110c27b20d77..71473a4bc5689 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -19,6 +19,8 @@ members = [ "client/api", "client/authority-discovery", "client/basic-authorship", + "client/beefy", + "client/beefy/rpc", "client/block-builder", "client/chain-spec", "client/chain-spec/derive", @@ -69,6 +71,9 @@ members = [ "frame/authorship", "frame/babe", "frame/balances", + "frame/beefy", + "frame/beefy-mmr", + "frame/beefy-mmr/primitives", "frame/benchmarking", "frame/bounties", "frame/collective", @@ -138,6 +143,7 @@ members = [ "primitives/arithmetic/fuzzer", "primitives/authority-discovery", "primitives/authorship", + "primitives/beefy", "primitives/block-builder", "primitives/blockchain", "primitives/consensus/aura", diff --git a/client/beefy/Cargo.toml b/client/beefy/Cargo.toml new file mode 100644 index 0000000000000..d4541288a6287 --- /dev/null +++ b/client/beefy/Cargo.toml @@ -0,0 +1,38 @@ +[package] +name = "beefy-gadget" +version = "4.0.0-dev" +authors = ["Parity Technologies "] +edition = "2018" +license = "GPL-3.0-or-later WITH Classpath-exception-2.0" + +[dependencies] +fnv = "1.0.6" +futures = "0.3" +log = "0.4" +parking_lot = "0.11" +thiserror = "1.0" +wasm-timer = "0.2.5" + +codec = { version = "2.2.0", package = "parity-scale-codec", features = ["derive"] } +prometheus = { version = "0.9.0", package = "substrate-prometheus-endpoint", path = "../../utils/prometheus" } + +sp-api = { version = "4.0.0-dev", path = "../../primitives/api" } +sp-application-crypto = { version = "4.0.0-dev", path = "../../primitives/application-crypto" } +sp-arithmetic = { version = "4.0.0-dev", path = "../../primitives/arithmetic" } +sp-blockchain = { version = "4.0.0-dev", path = "../../primitives/blockchain" } +sp-core = { version = "4.0.0-dev", path = "../../primitives/core" } +sp-keystore = { version = "0.10.0-dev", path = "../../primitives/keystore" } +sp-runtime = { version = "4.0.0-dev", path = "../../primitives/runtime" } + +sc-utils = { version = "4.0.0-dev", path = "../utils" } +sc-client-api = { version = "4.0.0-dev", path = "../api" } +sc-keystore = { version = "4.0.0-dev", path = "../keystore" } +sc-network = { version = "0.10.0-dev", path = "../network" } +sc-network-gossip = { version = "0.10.0-dev", path = "../network-gossip" } + +beefy-primitives = { version = "4.0.0-dev", path = "../../primitives/beefy" } + +[dev-dependencies] +sc-network-test = { version = "0.8.0", path = "../network/test" } + +strum = { version = "0.21", features = ["derive"] } diff --git a/client/beefy/rpc/Cargo.toml b/client/beefy/rpc/Cargo.toml new file mode 100644 index 0000000000000..6bb5c5fcc668e --- /dev/null +++ b/client/beefy/rpc/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "beefy-gadget-rpc" +version = "4.0.0-dev" +authors = ["Parity Technologies "] +edition = "2018" +license = "GPL-3.0-or-later WITH Classpath-exception-2.0" + +[dependencies] +futures = "0.3.16" +log = "0.4" +serde = { version = "1.0.130", features = ["derive"] } + +jsonrpc-core = "18.0.0" +jsonrpc-core-client = "18.0.0" +jsonrpc-derive = "18.0.0" +jsonrpc-pubsub = "18.0.0" + +codec = { version = "2.2.0", package = "parity-scale-codec", features = ["derive"] } + +sc-rpc = { version = "4.0.0-dev", path = "../../rpc" } + +sp-core = { version = "4.0.0-dev", path = "../../../primitives/core" } +sp-runtime = { versin = "4.0.0-dev", path = "../../../primitives/runtime" } + +beefy-gadget = { version = "4.0.0-dev", path = "../." } +beefy-primitives = { version = "4.0.0-dev", path = "../../../primitives/beefy" } diff --git a/client/beefy/rpc/src/lib.rs b/client/beefy/rpc/src/lib.rs new file mode 100644 index 0000000000000..c9a09525569b8 --- /dev/null +++ b/client/beefy/rpc/src/lib.rs @@ -0,0 +1,114 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 + +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +//! RPC API for BEEFY. + +#![warn(missing_docs)] + +use std::sync::Arc; + +use sp_runtime::traits::Block as BlockT; + +use futures::{FutureExt, SinkExt, StreamExt}; +use jsonrpc_derive::rpc; +use jsonrpc_pubsub::{manager::SubscriptionManager, typed::Subscriber, SubscriptionId}; +use log::warn; + +use beefy_gadget::notification::BeefySignedCommitmentStream; + +mod notification; + +/// Provides RPC methods for interacting with BEEFY. +#[rpc] +pub trait BeefyApi { + /// RPC Metadata + type Metadata; + + /// Returns the block most recently finalized by BEEFY, alongside side its justification. + #[pubsub( + subscription = "beefy_justifications", + subscribe, + name = "beefy_subscribeJustifications" + )] + fn subscribe_justifications( + &self, + metadata: Self::Metadata, + subscriber: Subscriber, + ); + + /// Unsubscribe from receiving notifications about recently finalized blocks. + #[pubsub( + subscription = "beefy_justifications", + unsubscribe, + name = "beefy_unsubscribeJustifications" + )] + fn unsubscribe_justifications( + &self, + metadata: Option, + id: SubscriptionId, + ) -> jsonrpc_core::Result; +} + +/// Implements the BeefyApi RPC trait for interacting with BEEFY. +pub struct BeefyRpcHandler { + signed_commitment_stream: BeefySignedCommitmentStream, + manager: SubscriptionManager, +} + +impl BeefyRpcHandler { + /// Creates a new BeefyRpcHandler instance. + pub fn new(signed_commitment_stream: BeefySignedCommitmentStream, executor: E) -> Self + where + E: futures::task::Spawn + Send + Sync + 'static, + { + let manager = SubscriptionManager::new(Arc::new(executor)); + Self { signed_commitment_stream, manager } + } +} + +impl BeefyApi for BeefyRpcHandler +where + Block: BlockT, +{ + type Metadata = sc_rpc::Metadata; + + fn subscribe_justifications( + &self, + _metadata: Self::Metadata, + subscriber: Subscriber, + ) { + let stream = self + .signed_commitment_stream + .subscribe() + .map(|x| Ok::<_, ()>(Ok(notification::SignedCommitment::new::(x)))); + + self.manager.add(subscriber, |sink| { + stream + .forward(sink.sink_map_err(|e| warn!("Error sending notifications: {:?}", e))) + .map(|_| ()) + }); + } + + fn unsubscribe_justifications( + &self, + _metadata: Option, + id: SubscriptionId, + ) -> jsonrpc_core::Result { + Ok(self.manager.cancel(id)) + } +} diff --git a/client/beefy/rpc/src/notification.rs b/client/beefy/rpc/src/notification.rs new file mode 100644 index 0000000000000..4830d72905a98 --- /dev/null +++ b/client/beefy/rpc/src/notification.rs @@ -0,0 +1,39 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 + +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +use codec::Encode; +use serde::{Deserialize, Serialize}; + +use sp_runtime::traits::Block as BlockT; + +/// An encoded signed commitment proving that the given header has been finalized. +/// The given bytes should be the SCALE-encoded representation of a +/// `beefy_primitives::SignedCommitment`. +#[derive(Clone, Serialize, Deserialize)] +pub struct SignedCommitment(sp_core::Bytes); + +impl SignedCommitment { + pub fn new( + signed_commitment: beefy_gadget::notification::SignedCommitment, + ) -> Self + where + Block: BlockT, + { + SignedCommitment(signed_commitment.encode().into()) + } +} diff --git a/client/beefy/src/error.rs b/client/beefy/src/error.rs new file mode 100644 index 0000000000000..db532d34c1e3b --- /dev/null +++ b/client/beefy/src/error.rs @@ -0,0 +1,31 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 + +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +//! BEEFY gadget specific errors +//! +//! Used for BEEFY gadget interal error handling only + +use std::fmt::Debug; + +#[derive(Debug, thiserror::Error, PartialEq)] +pub enum Error { + #[error("Keystore error: {0}")] + Keystore(String), + #[error("Signature error: {0}")] + Signature(String), +} diff --git a/client/beefy/src/gossip.rs b/client/beefy/src/gossip.rs new file mode 100644 index 0000000000000..d0199964b6ebf --- /dev/null +++ b/client/beefy/src/gossip.rs @@ -0,0 +1,236 @@ +// This file is part of Substrate. + +// Copyright (C) 2017-2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 + +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +use std::{collections::BTreeMap, time::Duration}; + +use sc_network::PeerId; +use sc_network_gossip::{MessageIntent, ValidationResult, Validator, ValidatorContext}; +use sp_core::hashing::twox_64; +use sp_runtime::traits::{Block, Hash, Header, NumberFor}; + +use codec::{Decode, Encode}; +use log::{debug, trace}; +use parking_lot::{Mutex, RwLock}; +use wasm_timer::Instant; + +use beefy_primitives::{ + crypto::{Public, Signature}, + MmrRootHash, VoteMessage, +}; + +use crate::keystore::BeefyKeystore; + +#[cfg(test)] +#[path = "gossip_tests.rs"] +mod tests; + +// Limit BEEFY gossip by keeping only a bound number of voting rounds alive. +const MAX_LIVE_GOSSIP_ROUNDS: usize = 3; + +// Timeout for rebroadcasting messages. +const REBROADCAST_AFTER: Duration = Duration::from_secs(60 * 5); + +/// Gossip engine messages topic +pub(crate) fn topic() -> B::Hash +where + B: Block, +{ + <::Hashing as Hash>::hash(b"beefy") +} + +/// A type that represents hash of the message. +pub type MessageHash = [u8; 8]; + +type KnownVotes = BTreeMap, fnv::FnvHashSet>; + +/// BEEFY gossip validator +/// +/// Validate BEEFY gossip messages and limit the number of live BEEFY voting rounds. +/// +/// Allows messages from last [`MAX_LIVE_GOSSIP_ROUNDS`] to flow, everything else gets +/// rejected/expired. +/// +///All messaging is handled in a single BEEFY global topic. +pub(crate) struct GossipValidator +where + B: Block, +{ + topic: B::Hash, + known_votes: RwLock>, + next_rebroadcast: Mutex, +} + +impl GossipValidator +where + B: Block, +{ + pub fn new() -> GossipValidator { + GossipValidator { + topic: topic::(), + known_votes: RwLock::new(BTreeMap::new()), + next_rebroadcast: Mutex::new(Instant::now() + REBROADCAST_AFTER), + } + } + + /// Note a voting round. + /// + /// Noting `round` will keep `round` live. + /// + /// We retain the [`MAX_LIVE_GOSSIP_ROUNDS`] most **recent** voting rounds as live. + /// As long as a voting round is live, it will be gossiped to peer nodes. + pub(crate) fn note_round(&self, round: NumberFor) { + debug!(target: "beefy", "🥩 About to note round #{}", round); + + let mut live = self.known_votes.write(); + + if !live.contains_key(&round) { + live.insert(round, Default::default()); + } + + if live.len() > MAX_LIVE_GOSSIP_ROUNDS { + let to_remove = live.iter().next().map(|x| x.0).copied(); + if let Some(first) = to_remove { + live.remove(&first); + } + } + } + + fn add_known(known_votes: &mut KnownVotes, round: &NumberFor, hash: MessageHash) { + known_votes.get_mut(round).map(|known| known.insert(hash)); + } + + // Note that we will always keep the most recent unseen round alive. + // + // This is a preliminary fix and the detailed description why we are + // doing this can be found as part of the issue below + // + // https://github.com/paritytech/grandpa-bridge-gadget/issues/237 + // + fn is_live(known_votes: &KnownVotes, round: &NumberFor) -> bool { + let unseen_round = if let Some(max_known_round) = known_votes.keys().last() { + round > max_known_round + } else { + known_votes.is_empty() + }; + + known_votes.contains_key(round) || unseen_round + } + + fn is_known(known_votes: &KnownVotes, round: &NumberFor, hash: &MessageHash) -> bool { + known_votes.get(round).map(|known| known.contains(hash)).unwrap_or(false) + } +} + +impl Validator for GossipValidator +where + B: Block, +{ + fn validate( + &self, + _context: &mut dyn ValidatorContext, + sender: &PeerId, + mut data: &[u8], + ) -> ValidationResult { + if let Ok(msg) = + VoteMessage::, Public, Signature>::decode(&mut data) + { + let msg_hash = twox_64(data); + let round = msg.commitment.block_number; + + // Verify general usefulness of the message. + // We are going to discard old votes right away (without verification) + // Also we keep track of already received votes to avoid verifying duplicates. + { + let known_votes = self.known_votes.read(); + + if !GossipValidator::::is_live(&known_votes, &round) { + return ValidationResult::Discard + } + + if GossipValidator::::is_known(&known_votes, &round, &msg_hash) { + return ValidationResult::ProcessAndKeep(self.topic) + } + } + + if BeefyKeystore::verify(&msg.id, &msg.signature, &msg.commitment.encode()) { + GossipValidator::::add_known(&mut *self.known_votes.write(), &round, msg_hash); + return ValidationResult::ProcessAndKeep(self.topic) + } else { + // TODO: report peer + debug!(target: "beefy", "🥩 Bad signature on message: {:?}, from: {:?}", msg, sender); + } + } + + ValidationResult::Discard + } + + fn message_expired<'a>(&'a self) -> Box bool + 'a> { + let known_votes = self.known_votes.read(); + Box::new(move |_topic, mut data| { + let msg = match VoteMessage::, Public, Signature>::decode( + &mut data, + ) { + Ok(vote) => vote, + Err(_) => return true, + }; + + let round = msg.commitment.block_number; + let expired = !GossipValidator::::is_live(&known_votes, &round); + + trace!(target: "beefy", "🥩 Message for round #{} expired: {}", round, expired); + + expired + }) + } + + fn message_allowed<'a>( + &'a self, + ) -> Box bool + 'a> { + let do_rebroadcast = { + let now = Instant::now(); + let mut next_rebroadcast = self.next_rebroadcast.lock(); + if now >= *next_rebroadcast { + *next_rebroadcast = now + REBROADCAST_AFTER; + true + } else { + false + } + }; + + let known_votes = self.known_votes.read(); + Box::new(move |_who, intent, _topic, mut data| { + if let MessageIntent::PeriodicRebroadcast = intent { + return do_rebroadcast + } + + let msg = match VoteMessage::, Public, Signature>::decode( + &mut data, + ) { + Ok(vote) => vote, + Err(_) => return true, + }; + + let round = msg.commitment.block_number; + let allowed = GossipValidator::::is_live(&known_votes, &round); + + debug!(target: "beefy", "🥩 Message for round #{} allowed: {}", round, allowed); + + allowed + }) + } +} diff --git a/client/beefy/src/gossip_tests.rs b/client/beefy/src/gossip_tests.rs new file mode 100644 index 0000000000000..2d46b873cb7b0 --- /dev/null +++ b/client/beefy/src/gossip_tests.rs @@ -0,0 +1,182 @@ +// This file is part of Substrate. + +// Copyright (C) 2017-2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 + +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +use sc_keystore::LocalKeystore; +use sc_network_test::Block; +use sp_keystore::{SyncCryptoStore, SyncCryptoStorePtr}; + +use beefy_primitives::{crypto::Signature, Commitment, MmrRootHash, VoteMessage, KEY_TYPE}; + +use crate::keystore::{tests::Keyring, BeefyKeystore}; + +use super::*; + +#[test] +fn note_round_works() { + let gv = GossipValidator::::new(); + + gv.note_round(1u64); + + let live = gv.known_votes.read(); + assert!(GossipValidator::::is_live(&live, &1u64)); + + drop(live); + + gv.note_round(3u64); + gv.note_round(7u64); + gv.note_round(10u64); + + let live = gv.known_votes.read(); + + assert_eq!(live.len(), MAX_LIVE_GOSSIP_ROUNDS); + + assert!(!GossipValidator::::is_live(&live, &1u64)); + assert!(GossipValidator::::is_live(&live, &3u64)); + assert!(GossipValidator::::is_live(&live, &7u64)); + assert!(GossipValidator::::is_live(&live, &10u64)); +} + +#[test] +fn keeps_most_recent_max_rounds() { + let gv = GossipValidator::::new(); + + gv.note_round(3u64); + gv.note_round(7u64); + gv.note_round(10u64); + gv.note_round(1u64); + + let live = gv.known_votes.read(); + + assert_eq!(live.len(), MAX_LIVE_GOSSIP_ROUNDS); + + assert!(GossipValidator::::is_live(&live, &3u64)); + assert!(!GossipValidator::::is_live(&live, &1u64)); + + drop(live); + + gv.note_round(23u64); + gv.note_round(15u64); + gv.note_round(20u64); + gv.note_round(2u64); + + let live = gv.known_votes.read(); + + assert_eq!(live.len(), MAX_LIVE_GOSSIP_ROUNDS); + + assert!(GossipValidator::::is_live(&live, &15u64)); + assert!(GossipValidator::::is_live(&live, &20u64)); + assert!(GossipValidator::::is_live(&live, &23u64)); +} + +#[test] +fn note_same_round_twice() { + let gv = GossipValidator::::new(); + + gv.note_round(3u64); + gv.note_round(7u64); + gv.note_round(10u64); + + let live = gv.known_votes.read(); + + assert_eq!(live.len(), MAX_LIVE_GOSSIP_ROUNDS); + + drop(live); + + // note round #7 again -> should not change anything + gv.note_round(7u64); + + let live = gv.known_votes.read(); + + assert_eq!(live.len(), MAX_LIVE_GOSSIP_ROUNDS); + + assert!(GossipValidator::::is_live(&live, &3u64)); + assert!(GossipValidator::::is_live(&live, &7u64)); + assert!(GossipValidator::::is_live(&live, &10u64)); +} + +struct TestContext; +impl ValidatorContext for TestContext { + fn broadcast_topic(&mut self, _topic: B::Hash, _force: bool) { + todo!() + } + + fn broadcast_message(&mut self, _topic: B::Hash, _message: Vec, _force: bool) { + todo!() + } + + fn send_message(&mut self, _who: &sc_network::PeerId, _message: Vec) { + todo!() + } + + fn send_topic(&mut self, _who: &sc_network::PeerId, _topic: B::Hash, _force: bool) { + todo!() + } +} + +fn sign_commitment( + who: &Keyring, + commitment: &Commitment, +) -> Signature { + let store: SyncCryptoStorePtr = std::sync::Arc::new(LocalKeystore::in_memory()); + SyncCryptoStore::ecdsa_generate_new(&*store, KEY_TYPE, Some(&who.to_seed())).unwrap(); + let beefy_keystore: BeefyKeystore = Some(store).into(); + + beefy_keystore.sign(&who.public(), &commitment.encode()).unwrap() +} + +#[test] +fn should_avoid_verifying_signatures_twice() { + let gv = GossipValidator::::new(); + let sender = sc_network::PeerId::random(); + let mut context = TestContext; + + let commitment = + Commitment { payload: MmrRootHash::default(), block_number: 3_u64, validator_set_id: 0 }; + + let signature = sign_commitment(&Keyring::Alice, &commitment); + + let vote = VoteMessage { commitment, id: Keyring::Alice.public(), signature }; + + gv.note_round(3u64); + gv.note_round(7u64); + gv.note_round(10u64); + + // first time the cache should be populated. + let res = gv.validate(&mut context, &sender, &vote.encode()); + + assert!(matches!(res, ValidationResult::ProcessAndKeep(_))); + assert_eq!(gv.known_votes.read().get(&vote.commitment.block_number).map(|x| x.len()), Some(1)); + + // second time we should hit the cache + let res = gv.validate(&mut context, &sender, &vote.encode()); + + assert!(matches!(res, ValidationResult::ProcessAndKeep(_))); + + // next we should quickly reject if the round is not live. + gv.note_round(11_u64); + gv.note_round(12_u64); + + assert!(!GossipValidator::::is_live( + &*gv.known_votes.read(), + &vote.commitment.block_number + )); + + let res = gv.validate(&mut context, &sender, &vote.encode()); + + assert!(matches!(res, ValidationResult::Discard)); +} diff --git a/client/beefy/src/keystore.rs b/client/beefy/src/keystore.rs new file mode 100644 index 0000000000000..88618b8a5a140 --- /dev/null +++ b/client/beefy/src/keystore.rs @@ -0,0 +1,119 @@ +// This file is part of Substrate. + +// Copyright (C) 2017-2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 + +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +use std::convert::{From, TryInto}; + +use sp_application_crypto::RuntimeAppPublic; +use sp_core::keccak_256; +use sp_keystore::{SyncCryptoStore, SyncCryptoStorePtr}; + +use log::warn; + +use beefy_primitives::{ + crypto::{Public, Signature}, + KEY_TYPE, +}; + +use crate::error; + +#[cfg(test)] +#[path = "keystore_tests.rs"] +pub mod tests; + +/// A BEEFY specific keystore implemented as a `Newtype`. This is basically a +/// wrapper around [`sp_keystore::SyncCryptoStore`] and allows to customize +/// common cryptographic functionality. +pub(crate) struct BeefyKeystore(Option); + +impl BeefyKeystore { + /// Check if the keystore contains a private key for one of the public keys + /// contained in `keys`. A public key with a matching private key is known + /// as a local authority id. + /// + /// Return the public key for which we also do have a private key. If no + /// matching private key is found, `None` will be returned. + pub fn authority_id(&self, keys: &[Public]) -> Option { + let store = self.0.clone()?; + + // we do check for multiple private keys as a key store sanity check. + let public: Vec = keys + .iter() + .filter(|k| SyncCryptoStore::has_keys(&*store, &[(k.to_raw_vec(), KEY_TYPE)])) + .cloned() + .collect(); + + if public.len() > 1 { + warn!(target: "beefy", "🥩 Multiple private keys found for: {:?} ({})", public, public.len()); + } + + public.get(0).cloned() + } + + /// Sign `message` with the `public` key. + /// + /// Note that `message` usually will be pre-hashed before being signed. + /// + /// Return the message signature or an error in case of failure. + pub fn sign(&self, public: &Public, message: &[u8]) -> Result { + let store = self.0.clone().ok_or_else(|| error::Error::Keystore("no Keystore".into()))?; + + let msg = keccak_256(message); + let public = public.as_ref(); + + let sig = SyncCryptoStore::ecdsa_sign_prehashed(&*store, KEY_TYPE, public, &msg) + .map_err(|e| error::Error::Keystore(e.to_string()))? + .ok_or_else(|| error::Error::Signature("ecdsa_sign_prehashed() failed".to_string()))?; + + // check that `sig` has the expected result type + let sig = sig.clone().try_into().map_err(|_| { + error::Error::Signature(format!("invalid signature {:?} for key {:?}", sig, public)) + })?; + + Ok(sig) + } + + /// Returns a vector of [`beefy_primitives::crypto::Public`] keys which are currently supported + /// (i.e. found in the keystore). + pub fn public_keys(&self) -> Result, error::Error> { + let store = self.0.clone().ok_or_else(|| error::Error::Keystore("no Keystore".into()))?; + + let pk: Vec = SyncCryptoStore::ecdsa_public_keys(&*store, KEY_TYPE) + .iter() + .map(|k| Public::from(k.clone())) + .collect(); + + Ok(pk) + } + + /// Use the `public` key to verify that `sig` is a valid signature for `message`. + /// + /// Return `true` if the signature is authentic, `false` otherwise. + pub fn verify(public: &Public, sig: &Signature, message: &[u8]) -> bool { + let msg = keccak_256(message); + let sig = sig.as_ref(); + let public = public.as_ref(); + + sp_core::ecdsa::Pair::verify_prehashed(sig, &msg, public) + } +} + +impl From> for BeefyKeystore { + fn from(store: Option) -> BeefyKeystore { + BeefyKeystore(store) + } +} diff --git a/client/beefy/src/keystore_tests.rs b/client/beefy/src/keystore_tests.rs new file mode 100644 index 0000000000000..99e3e42228df2 --- /dev/null +++ b/client/beefy/src/keystore_tests.rs @@ -0,0 +1,275 @@ +// This file is part of Substrate. + +// Copyright (C) 2017-2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 + +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +use std::sync::Arc; + +use sc_keystore::LocalKeystore; +use sp_core::{ecdsa, keccak_256, Pair}; +use sp_keystore::{SyncCryptoStore, SyncCryptoStorePtr}; + +use beefy_primitives::{crypto, KEY_TYPE}; + +use super::BeefyKeystore; +use crate::error::Error; + +/// Set of test accounts using [`beefy_primitives::crypto`] types. +#[allow(missing_docs)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, strum::Display, strum::EnumIter)] +pub(crate) enum Keyring { + Alice, + Bob, + Charlie, + Dave, + Eve, + Ferdie, + One, + Two, +} + +impl Keyring { + /// Sign `msg`. + pub fn sign(self, msg: &[u8]) -> crypto::Signature { + let msg = keccak_256(msg); + ecdsa::Pair::from(self).sign_prehashed(&msg).into() + } + + /// Return key pair. + pub fn pair(self) -> crypto::Pair { + ecdsa::Pair::from_string(self.to_seed().as_str(), None).unwrap().into() + } + + /// Return public key. + pub fn public(self) -> crypto::Public { + self.pair().public() + } + + /// Return seed string. + pub fn to_seed(self) -> String { + format!("//{}", self) + } +} + +impl From for crypto::Pair { + fn from(k: Keyring) -> Self { + k.pair() + } +} + +impl From for ecdsa::Pair { + fn from(k: Keyring) -> Self { + k.pair().into() + } +} + +fn keystore() -> SyncCryptoStorePtr { + Arc::new(LocalKeystore::in_memory()) +} + +#[test] +fn verify_should_work() { + let msg = keccak_256(b"I am Alice!"); + let sig = Keyring::Alice.sign(b"I am Alice!"); + + assert!(ecdsa::Pair::verify_prehashed( + &sig.clone().into(), + &msg, + &Keyring::Alice.public().into(), + )); + + // different public key -> fail + assert!(!ecdsa::Pair::verify_prehashed( + &sig.clone().into(), + &msg, + &Keyring::Bob.public().into(), + )); + + let msg = keccak_256(b"I am not Alice!"); + + // different msg -> fail + assert!(!ecdsa::Pair::verify_prehashed(&sig.into(), &msg, &Keyring::Alice.public().into(),)); +} + +#[test] +fn pair_works() { + let want = crypto::Pair::from_string("//Alice", None).expect("Pair failed").to_raw_vec(); + let got = Keyring::Alice.pair().to_raw_vec(); + assert_eq!(want, got); + + let want = crypto::Pair::from_string("//Bob", None).expect("Pair failed").to_raw_vec(); + let got = Keyring::Bob.pair().to_raw_vec(); + assert_eq!(want, got); + + let want = crypto::Pair::from_string("//Charlie", None).expect("Pair failed").to_raw_vec(); + let got = Keyring::Charlie.pair().to_raw_vec(); + assert_eq!(want, got); + + let want = crypto::Pair::from_string("//Dave", None).expect("Pair failed").to_raw_vec(); + let got = Keyring::Dave.pair().to_raw_vec(); + assert_eq!(want, got); + + let want = crypto::Pair::from_string("//Eve", None).expect("Pair failed").to_raw_vec(); + let got = Keyring::Eve.pair().to_raw_vec(); + assert_eq!(want, got); + + let want = crypto::Pair::from_string("//Ferdie", None).expect("Pair failed").to_raw_vec(); + let got = Keyring::Ferdie.pair().to_raw_vec(); + assert_eq!(want, got); + + let want = crypto::Pair::from_string("//One", None).expect("Pair failed").to_raw_vec(); + let got = Keyring::One.pair().to_raw_vec(); + assert_eq!(want, got); + + let want = crypto::Pair::from_string("//Two", None).expect("Pair failed").to_raw_vec(); + let got = Keyring::Two.pair().to_raw_vec(); + assert_eq!(want, got); +} + +#[test] +fn authority_id_works() { + let store = keystore(); + + let alice: crypto::Public = + SyncCryptoStore::ecdsa_generate_new(&*store, KEY_TYPE, Some(&Keyring::Alice.to_seed())) + .ok() + .unwrap() + .into(); + + let bob = Keyring::Bob.public(); + let charlie = Keyring::Charlie.public(); + + let store: BeefyKeystore = Some(store).into(); + + let mut keys = vec![bob, charlie]; + + let id = store.authority_id(keys.as_slice()); + assert!(id.is_none()); + + keys.push(alice.clone()); + + let id = store.authority_id(keys.as_slice()).unwrap(); + assert_eq!(id, alice); +} + +#[test] +fn sign_works() { + let store = keystore(); + + let alice: crypto::Public = + SyncCryptoStore::ecdsa_generate_new(&*store, KEY_TYPE, Some(&Keyring::Alice.to_seed())) + .ok() + .unwrap() + .into(); + + let store: BeefyKeystore = Some(store).into(); + + let msg = b"are you involved or commited?"; + + let sig1 = store.sign(&alice, msg).unwrap(); + let sig2 = Keyring::Alice.sign(msg); + + assert_eq!(sig1, sig2); +} + +#[test] +fn sign_error() { + let store = keystore(); + + let _ = SyncCryptoStore::ecdsa_generate_new(&*store, KEY_TYPE, Some(&Keyring::Bob.to_seed())) + .ok() + .unwrap(); + + let store: BeefyKeystore = Some(store).into(); + + let alice = Keyring::Alice.public(); + + let msg = b"are you involved or commited?"; + let sig = store.sign(&alice, msg).err().unwrap(); + let err = Error::Signature("ecdsa_sign_prehashed() failed".to_string()); + + assert_eq!(sig, err); +} + +#[test] +fn sign_no_keystore() { + let store: BeefyKeystore = None.into(); + + let alice = Keyring::Alice.public(); + let msg = b"are you involved or commited"; + + let sig = store.sign(&alice, msg).err().unwrap(); + let err = Error::Keystore("no Keystore".to_string()); + assert_eq!(sig, err); +} + +#[test] +fn verify_works() { + let store = keystore(); + + let alice: crypto::Public = + SyncCryptoStore::ecdsa_generate_new(&*store, KEY_TYPE, Some(&Keyring::Alice.to_seed())) + .ok() + .unwrap() + .into(); + + let store: BeefyKeystore = Some(store).into(); + + // `msg` and `sig` match + let msg = b"are you involved or commited?"; + let sig = store.sign(&alice, msg).unwrap(); + assert!(BeefyKeystore::verify(&alice, &sig, msg)); + + // `msg and `sig` don't match + let msg = b"you are just involved"; + assert!(!BeefyKeystore::verify(&alice, &sig, msg)); +} + +// Note that we use keys with and without a seed for this test. +#[test] +fn public_keys_works() { + const TEST_TYPE: sp_application_crypto::KeyTypeId = sp_application_crypto::KeyTypeId(*b"test"); + + let store = keystore(); + + let add_key = |key_type, seed: Option<&str>| { + SyncCryptoStore::ecdsa_generate_new(&*store, key_type, seed).unwrap() + }; + + // test keys + let _ = add_key(TEST_TYPE, Some(Keyring::Alice.to_seed().as_str())); + let _ = add_key(TEST_TYPE, Some(Keyring::Bob.to_seed().as_str())); + + let _ = add_key(TEST_TYPE, None); + let _ = add_key(TEST_TYPE, None); + + // BEEFY keys + let _ = add_key(KEY_TYPE, Some(Keyring::Dave.to_seed().as_str())); + let _ = add_key(KEY_TYPE, Some(Keyring::Eve.to_seed().as_str())); + + let key1: crypto::Public = add_key(KEY_TYPE, None).into(); + let key2: crypto::Public = add_key(KEY_TYPE, None).into(); + + let store: BeefyKeystore = Some(store).into(); + + let keys = store.public_keys().ok().unwrap(); + + assert!(keys.len() == 4); + assert!(keys.contains(&Keyring::Dave.public())); + assert!(keys.contains(&Keyring::Eve.public())); + assert!(keys.contains(&key1)); + assert!(keys.contains(&key2)); +} diff --git a/client/beefy/src/lib.rs b/client/beefy/src/lib.rs new file mode 100644 index 0000000000000..b2372b2a6c518 --- /dev/null +++ b/client/beefy/src/lib.rs @@ -0,0 +1,159 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 + +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +use std::sync::Arc; + +use log::debug; +use prometheus::Registry; + +use sc_client_api::{Backend, BlockchainEvents, Finalizer}; +use sc_network_gossip::{GossipEngine, Network as GossipNetwork}; + +use sp_api::ProvideRuntimeApi; +use sp_blockchain::HeaderBackend; +use sp_keystore::SyncCryptoStorePtr; +use sp_runtime::traits::Block; + +use beefy_primitives::BeefyApi; + +mod error; +mod gossip; +mod keystore; +mod metrics; +mod round; +mod worker; + +pub mod notification; + +pub const BEEFY_PROTOCOL_NAME: &str = "/paritytech/beefy/1"; + +/// Returns the configuration value to put in +/// [`sc_network::config::NetworkConfiguration::extra_sets`]. +pub fn beefy_peers_set_config() -> sc_network::config::NonDefaultSetConfig { + let mut cfg = + sc_network::config::NonDefaultSetConfig::new(BEEFY_PROTOCOL_NAME.into(), 1024 * 1024); + cfg.allow_non_reserved(25, 25); + cfg +} + +/// A convenience BEEFY client trait that defines all the type bounds a BEEFY client +/// has to satisfy. Ideally that should actually be a trait alias. Unfortunately as +/// of today, Rust does not allow a type alias to be used as a trait bound. Tracking +/// issue is . +pub trait Client: + BlockchainEvents + HeaderBackend + Finalizer + ProvideRuntimeApi + Send + Sync +where + B: Block, + BE: Backend, +{ + // empty +} + +impl Client for T +where + B: Block, + BE: Backend, + T: BlockchainEvents + + HeaderBackend + + Finalizer + + ProvideRuntimeApi + + Send + + Sync, +{ + // empty +} + +/// BEEFY gadget initialization parameters. +pub struct BeefyParams +where + B: Block, + BE: Backend, + C: Client, + C::Api: BeefyApi, + N: GossipNetwork + Clone + Send + 'static, +{ + /// BEEFY client + pub client: Arc, + /// Client Backend + pub backend: Arc, + /// Local key store + pub key_store: Option, + /// Gossip network + pub network: N, + /// BEEFY signed commitment sender + pub signed_commitment_sender: notification::BeefySignedCommitmentSender, + /// Minimal delta between blocks, BEEFY should vote for + pub min_block_delta: u32, + /// Prometheus metric registry + pub prometheus_registry: Option, +} + +/// Start the BEEFY gadget. +/// +/// This is a thin shim around running and awaiting a BEEFY worker. +pub async fn start_beefy_gadget(beefy_params: BeefyParams) +where + B: Block, + BE: Backend, + C: Client, + C::Api: BeefyApi, + N: GossipNetwork + Clone + Send + 'static, +{ + let BeefyParams { + client, + backend, + key_store, + network, + signed_commitment_sender, + min_block_delta, + prometheus_registry, + } = beefy_params; + + let gossip_validator = Arc::new(gossip::GossipValidator::new()); + let gossip_engine = + GossipEngine::new(network, BEEFY_PROTOCOL_NAME, gossip_validator.clone(), None); + + let metrics = + prometheus_registry.as_ref().map(metrics::Metrics::register).and_then( + |result| match result { + Ok(metrics) => { + debug!(target: "beefy", "🥩 Registered metrics"); + Some(metrics) + }, + Err(err) => { + debug!(target: "beefy", "🥩 Failed to register metrics: {:?}", err); + None + }, + }, + ); + + let worker_params = worker::WorkerParams { + client, + backend, + key_store: key_store.into(), + signed_commitment_sender, + gossip_engine, + gossip_validator, + min_block_delta, + metrics, + }; + + let worker = worker::BeefyWorker::<_, _, _>::new(worker_params); + + worker.run().await +} diff --git a/client/beefy/src/metrics.rs b/client/beefy/src/metrics.rs new file mode 100644 index 0000000000000..0fdc29f97c37a --- /dev/null +++ b/client/beefy/src/metrics.rs @@ -0,0 +1,93 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 + +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +//! BEEFY Prometheus metrics definition + +use prometheus::{register, Counter, Gauge, PrometheusError, Registry, U64}; + +/// BEEFY metrics exposed through Prometheus +pub(crate) struct Metrics { + /// Current active validator set id + pub beefy_validator_set_id: Gauge, + /// Total number of votes sent by this node + pub beefy_votes_sent: Counter, + /// Most recent concluded voting round + pub beefy_round_concluded: Gauge, + /// Best block finalized by BEEFY + pub beefy_best_block: Gauge, + /// Next block BEEFY should vote on + pub beefy_should_vote_on: Gauge, + /// Number of sessions without a signed commitment + pub beefy_skipped_sessions: Counter, +} + +impl Metrics { + pub(crate) fn register(registry: &Registry) -> Result { + Ok(Self { + beefy_validator_set_id: register( + Gauge::new("beefy_validator_set_id", "Current BEEFY active validator set id.")?, + registry, + )?, + beefy_votes_sent: register( + Counter::new("beefy_votes_sent", "Number of votes sent by this node")?, + registry, + )?, + beefy_round_concluded: register( + Gauge::new("beefy_round_concluded", "Voting round, that has been concluded")?, + registry, + )?, + beefy_best_block: register( + Gauge::new("beefy_best_block", "Best block finalized by BEEFY")?, + registry, + )?, + beefy_should_vote_on: register( + Gauge::new("beefy_should_vote_on", "Next block, BEEFY should vote on")?, + registry, + )?, + beefy_skipped_sessions: register( + Counter::new( + "beefy_skipped_sessions", + "Number of sessions without a signed commitment", + )?, + registry, + )?, + }) + } +} + +// Note: we use the `format` macro to convert an expr into a `u64`. This will fail, +// if expr does not derive `Display`. +#[macro_export] +macro_rules! metric_set { + ($self:ident, $m:ident, $v:expr) => {{ + let val: u64 = format!("{}", $v).parse().unwrap(); + + if let Some(metrics) = $self.metrics.as_ref() { + metrics.$m.set(val); + } + }}; +} + +#[macro_export] +macro_rules! metric_inc { + ($self:ident, $m:ident) => {{ + if let Some(metrics) = $self.metrics.as_ref() { + metrics.$m.inc(); + } + }}; +} diff --git a/client/beefy/src/notification.rs b/client/beefy/src/notification.rs new file mode 100644 index 0000000000000..6099c9681447b --- /dev/null +++ b/client/beefy/src/notification.rs @@ -0,0 +1,113 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 + +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +use std::sync::Arc; + +use sc_utils::mpsc::{tracing_unbounded, TracingUnboundedReceiver, TracingUnboundedSender}; +use sp_runtime::traits::{Block, NumberFor}; + +use parking_lot::Mutex; + +/// Stream of signed commitments returned when subscribing. +pub type SignedCommitment = + beefy_primitives::SignedCommitment, beefy_primitives::MmrRootHash>; + +/// Stream of signed commitments returned when subscribing. +type SignedCommitmentStream = TracingUnboundedReceiver>; + +/// Sending endpoint for notifying about signed commitments. +type SignedCommitmentSender = TracingUnboundedSender>; + +/// Collection of channel sending endpoints shared with the receiver side so they can register +/// themselves. +type SharedSignedCommitmentSenders = Arc>>>; + +/// The sending half of the signed commitment channel(s). +/// +/// Used to send notifications about signed commitments generated at the end of a BEEFY round. +#[derive(Clone)] +pub struct BeefySignedCommitmentSender +where + B: Block, +{ + subscribers: SharedSignedCommitmentSenders, +} + +impl BeefySignedCommitmentSender +where + B: Block, +{ + /// The `subscribers` should be shared with a corresponding `SignedCommitmentSender`. + fn new(subscribers: SharedSignedCommitmentSenders) -> Self { + Self { subscribers } + } + + /// Send out a notification to all subscribers that a new signed commitment is available for a + /// block. + pub fn notify(&self, signed_commitment: SignedCommitment) { + let mut subscribers = self.subscribers.lock(); + + // do an initial prune on closed subscriptions + subscribers.retain(|n| !n.is_closed()); + + if !subscribers.is_empty() { + subscribers.retain(|n| n.unbounded_send(signed_commitment.clone()).is_ok()); + } + } +} + +/// The receiving half of the signed commitments channel. +/// +/// Used to receive notifications about signed commitments generated at the end of a BEEFY round. +/// The `BeefySignedCommitmentStream` entity stores the `SharedSignedCommitmentSenders` so it can be +/// used to add more subscriptions. +#[derive(Clone)] +pub struct BeefySignedCommitmentStream +where + B: Block, +{ + subscribers: SharedSignedCommitmentSenders, +} + +impl BeefySignedCommitmentStream +where + B: Block, +{ + /// Creates a new pair of receiver and sender of signed commitment notifications. + pub fn channel() -> (BeefySignedCommitmentSender, Self) { + let subscribers = Arc::new(Mutex::new(vec![])); + let receiver = BeefySignedCommitmentStream::new(subscribers.clone()); + let sender = BeefySignedCommitmentSender::new(subscribers); + (sender, receiver) + } + + /// Create a new receiver of signed commitment notifications. + /// + /// The `subscribers` should be shared with a corresponding `BeefySignedCommitmentSender`. + fn new(subscribers: SharedSignedCommitmentSenders) -> Self { + Self { subscribers } + } + + /// Subscribe to a channel through which signed commitments are sent at the end of each BEEFY + /// voting round. + pub fn subscribe(&self) -> SignedCommitmentStream { + let (sender, receiver) = tracing_unbounded("mpsc_signed_commitments_notification_stream"); + self.subscribers.lock().push(sender); + receiver + } +} diff --git a/client/beefy/src/round.rs b/client/beefy/src/round.rs new file mode 100644 index 0000000000000..7d443603b364e --- /dev/null +++ b/client/beefy/src/round.rs @@ -0,0 +1,121 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 + +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +use std::{collections::BTreeMap, hash::Hash}; + +use log::{debug, trace}; + +use beefy_primitives::{ + crypto::{Public, Signature}, + ValidatorSet, ValidatorSetId, +}; +use sp_arithmetic::traits::AtLeast32BitUnsigned; +use sp_runtime::traits::MaybeDisplay; + +#[derive(Default)] +struct RoundTracker { + votes: Vec<(Public, Signature)>, +} + +impl RoundTracker { + fn add_vote(&mut self, vote: (Public, Signature)) -> bool { + // this needs to handle equivocations in the future + if self.votes.contains(&vote) { + return false + } + + self.votes.push(vote); + true + } + + fn is_done(&self, threshold: usize) -> bool { + self.votes.len() >= threshold + } +} + +fn threshold(authorities: usize) -> usize { + let faulty = authorities.saturating_sub(1) / 3; + authorities - faulty +} + +pub(crate) struct Rounds { + rounds: BTreeMap<(Hash, Number), RoundTracker>, + validator_set: ValidatorSet, +} + +impl Rounds +where + H: Ord + Hash, + N: Ord + AtLeast32BitUnsigned + MaybeDisplay, +{ + pub(crate) fn new(validator_set: ValidatorSet) -> Self { + Rounds { rounds: BTreeMap::new(), validator_set } + } +} + +impl Rounds +where + H: Ord + Hash, + N: Ord + AtLeast32BitUnsigned + MaybeDisplay, +{ + pub(crate) fn validator_set_id(&self) -> ValidatorSetId { + self.validator_set.id + } + + pub(crate) fn validators(&self) -> Vec { + self.validator_set.validators.clone() + } + + pub(crate) fn add_vote(&mut self, round: (H, N), vote: (Public, Signature)) -> bool { + self.rounds.entry(round).or_default().add_vote(vote) + } + + pub(crate) fn is_done(&self, round: &(H, N)) -> bool { + let done = self + .rounds + .get(round) + .map(|tracker| tracker.is_done(threshold(self.validator_set.validators.len()))) + .unwrap_or(false); + + debug!(target: "beefy", "🥩 Round #{} done: {}", round.1, done); + + done + } + + pub(crate) fn drop(&mut self, round: &(H, N)) -> Option>> { + trace!(target: "beefy", "🥩 About to drop round #{}", round.1); + + let signatures = self.rounds.remove(round)?.votes; + + Some( + self.validator_set + .validators + .iter() + .map(|authority_id| { + signatures.iter().find_map(|(id, sig)| { + if id == authority_id { + Some(sig.clone()) + } else { + None + } + }) + }) + .collect(), + ) + } +} diff --git a/client/beefy/src/worker.rs b/client/beefy/src/worker.rs new file mode 100644 index 0000000000000..3f52686930332 --- /dev/null +++ b/client/beefy/src/worker.rs @@ -0,0 +1,534 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 + +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +use std::{collections::BTreeSet, fmt::Debug, marker::PhantomData, sync::Arc}; + +use codec::{Codec, Decode, Encode}; +use futures::{future, FutureExt, StreamExt}; +use log::{debug, error, info, trace, warn}; +use parking_lot::Mutex; + +use sc_client_api::{Backend, FinalityNotification, FinalityNotifications}; +use sc_network_gossip::GossipEngine; + +use sp_api::BlockId; +use sp_arithmetic::traits::AtLeast32Bit; +use sp_runtime::{ + generic::OpaqueDigestItemId, + traits::{Block, Header, NumberFor}, + SaturatedConversion, +}; + +use beefy_primitives::{ + crypto::{AuthorityId, Public, Signature}, + BeefyApi, Commitment, ConsensusLog, MmrRootHash, SignedCommitment, ValidatorSet, + VersionedCommitment, VoteMessage, BEEFY_ENGINE_ID, GENESIS_AUTHORITY_SET_ID, +}; + +use crate::{ + error, + gossip::{topic, GossipValidator}, + keystore::BeefyKeystore, + metric_inc, metric_set, + metrics::Metrics, + notification, round, Client, +}; + +pub(crate) struct WorkerParams +where + B: Block, +{ + pub client: Arc, + pub backend: Arc, + pub key_store: BeefyKeystore, + pub signed_commitment_sender: notification::BeefySignedCommitmentSender, + pub gossip_engine: GossipEngine, + pub gossip_validator: Arc>, + pub min_block_delta: u32, + pub metrics: Option, +} + +/// A BEEFY worker plays the BEEFY protocol +pub(crate) struct BeefyWorker +where + B: Block, + BE: Backend, + C: Client, +{ + client: Arc, + backend: Arc, + key_store: BeefyKeystore, + signed_commitment_sender: notification::BeefySignedCommitmentSender, + gossip_engine: Arc>>, + gossip_validator: Arc>, + /// Min delta in block numbers between two blocks, BEEFY should vote on + min_block_delta: u32, + metrics: Option, + rounds: round::Rounds>, + finality_notifications: FinalityNotifications, + /// Best block we received a GRANDPA notification for + best_grandpa_block: NumberFor, + /// Best block a BEEFY voting round has been concluded for + best_beefy_block: Option>, + /// Validator set id for the last signed commitment + last_signed_id: u64, + // keep rustc happy + _backend: PhantomData, +} + +impl BeefyWorker +where + B: Block + Codec, + BE: Backend, + C: Client, + C::Api: BeefyApi, +{ + /// Return a new BEEFY worker instance. + /// + /// Note that a BEEFY worker is only fully functional if a corresponding + /// BEEFY pallet has been deployed on-chain. + /// + /// The BEEFY pallet is needed in order to keep track of the BEEFY authority set. + pub(crate) fn new(worker_params: WorkerParams) -> Self { + let WorkerParams { + client, + backend, + key_store, + signed_commitment_sender, + gossip_engine, + gossip_validator, + min_block_delta, + metrics, + } = worker_params; + + BeefyWorker { + client: client.clone(), + backend, + key_store, + signed_commitment_sender, + gossip_engine: Arc::new(Mutex::new(gossip_engine)), + gossip_validator, + min_block_delta, + metrics, + rounds: round::Rounds::new(ValidatorSet::empty()), + finality_notifications: client.finality_notification_stream(), + best_grandpa_block: client.info().finalized_number, + best_beefy_block: None, + last_signed_id: 0, + _backend: PhantomData, + } + } +} + +impl BeefyWorker +where + B: Block, + BE: Backend, + C: Client, + C::Api: BeefyApi, +{ + /// Return `true`, if we should vote on block `number` + fn should_vote_on(&self, number: NumberFor) -> bool { + let best_beefy_block = if let Some(block) = self.best_beefy_block { + block + } else { + debug!(target: "beefy", "🥩 Missing best BEEFY block - won't vote for: {:?}", number); + return false + }; + + let target = vote_target(self.best_grandpa_block, best_beefy_block, self.min_block_delta); + + trace!(target: "beefy", "🥩 should_vote_on: #{:?}, next_block_to_vote_on: #{:?}", number, target); + + metric_set!(self, beefy_should_vote_on, target); + + number == target + } + + /// Return the current active validator set at header `header`. + /// + /// Note that the validator set could be `None`. This is the case if we don't find + /// a BEEFY authority set change and we can't fetch the authority set from the + /// BEEFY on-chain state. + /// + /// Such a failure is usually an indication that the BEEFY pallet has not been deployed (yet). + fn validator_set(&self, header: &B::Header) -> Option> { + let new = if let Some(new) = find_authorities_change::(header) { + Some(new) + } else { + let at = BlockId::hash(header.hash()); + self.client.runtime_api().validator_set(&at).ok() + }; + + trace!(target: "beefy", "🥩 active validator set: {:?}", new); + + new + } + + /// Verify `active` validator set for `block` against the key store + /// + /// The critical case is, if we do have a public key in the key store which is not + /// part of the active validator set. + /// + /// Note that for a non-authority node there will be no keystore, and we will + /// return an error and don't check. The error can usually be ignored. + fn verify_validator_set( + &self, + block: &NumberFor, + mut active: ValidatorSet, + ) -> Result<(), error::Error> { + let active: BTreeSet = active.validators.drain(..).collect(); + + let store: BTreeSet = self.key_store.public_keys()?.drain(..).collect(); + + let missing: Vec<_> = store.difference(&active).cloned().collect(); + + if !missing.is_empty() { + debug!(target: "beefy", "🥩 for block {:?} public key missing in validator set: {:?}", block, missing); + } + + Ok(()) + } + + fn handle_finality_notification(&mut self, notification: FinalityNotification) { + trace!(target: "beefy", "🥩 Finality notification: {:?}", notification); + + // update best GRANDPA finalized block we have seen + self.best_grandpa_block = *notification.header.number(); + + if let Some(active) = self.validator_set(¬ification.header) { + // Authority set change or genesis set id triggers new voting rounds + // + // TODO: (adoerr) Enacting a new authority set will also implicitly 'conclude' + // the currently active BEEFY voting round by starting a new one. This is + // temporary and needs to be replaced by proper round life cycle handling. + if active.id != self.rounds.validator_set_id() || + (active.id == GENESIS_AUTHORITY_SET_ID && self.best_beefy_block.is_none()) + { + debug!(target: "beefy", "🥩 New active validator set id: {:?}", active); + metric_set!(self, beefy_validator_set_id, active.id); + + // BEEFY should produce a signed commitment for each session + if active.id != self.last_signed_id + 1 && active.id != GENESIS_AUTHORITY_SET_ID { + metric_inc!(self, beefy_skipped_sessions); + } + + // verify the new validator set + let _ = self.verify_validator_set(notification.header.number(), active.clone()); + + self.rounds = round::Rounds::new(active.clone()); + + debug!(target: "beefy", "🥩 New Rounds for id: {:?}", active.id); + + self.best_beefy_block = Some(*notification.header.number()); + + // this metric is kind of 'fake'. Best BEEFY block should only be updated once we + // have a signed commitment for the block. Remove once the above TODO is done. + metric_set!(self, beefy_best_block, *notification.header.number()); + } + } + + if self.should_vote_on(*notification.header.number()) { + let authority_id = if let Some(id) = + self.key_store.authority_id(self.rounds.validators().as_slice()) + { + debug!(target: "beefy", "🥩 Local authority id: {:?}", id); + id + } else { + debug!(target: "beefy", "🥩 Missing validator id - can't vote for: {:?}", notification.header.hash()); + return + }; + + let mmr_root = + if let Some(hash) = find_mmr_root_digest::(¬ification.header) { + hash + } else { + warn!(target: "beefy", "🥩 No MMR root digest found for: {:?}", notification.header.hash()); + return + }; + + let commitment = Commitment { + payload: mmr_root, + block_number: notification.header.number(), + validator_set_id: self.rounds.validator_set_id(), + }; + let encoded_commitment = commitment.encode(); + + let signature = match self.key_store.sign(&authority_id, &*encoded_commitment) { + Ok(sig) => sig, + Err(err) => { + warn!(target: "beefy", "🥩 Error signing commitment: {:?}", err); + return + }, + }; + + trace!( + target: "beefy", + "🥩 Produced signature using {:?}, is_valid: {:?}", + authority_id, + BeefyKeystore::verify(&authority_id, &signature, &*encoded_commitment) + ); + + let message = VoteMessage { commitment, id: authority_id, signature }; + + let encoded_message = message.encode(); + + metric_inc!(self, beefy_votes_sent); + + debug!(target: "beefy", "🥩 Sent vote message: {:?}", message); + + self.handle_vote( + (message.commitment.payload, *message.commitment.block_number), + (message.id, message.signature), + ); + + self.gossip_engine.lock().gossip_message(topic::(), encoded_message, false); + } + } + + fn handle_vote(&mut self, round: (MmrRootHash, NumberFor), vote: (Public, Signature)) { + self.gossip_validator.note_round(round.1); + + let vote_added = self.rounds.add_vote(round, vote); + + if vote_added && self.rounds.is_done(&round) { + if let Some(signatures) = self.rounds.drop(&round) { + // id is stored for skipped session metric calculation + self.last_signed_id = self.rounds.validator_set_id(); + + let commitment = Commitment { + payload: round.0, + block_number: round.1, + validator_set_id: self.last_signed_id, + }; + + let signed_commitment = SignedCommitment { commitment, signatures }; + + metric_set!(self, beefy_round_concluded, round.1); + + info!(target: "beefy", "🥩 Round #{} concluded, committed: {:?}.", round.1, signed_commitment); + + if self + .backend + .append_justification( + BlockId::Number(round.1), + ( + BEEFY_ENGINE_ID, + VersionedCommitment::V1(signed_commitment.clone()).encode(), + ), + ) + .is_err() + { + // just a trace, because until the round lifecycle is improved, we will + // conclude certain rounds multiple times. + trace!(target: "beefy", "🥩 Failed to append justification: {:?}", signed_commitment); + } + + self.signed_commitment_sender.notify(signed_commitment); + self.best_beefy_block = Some(round.1); + + metric_set!(self, beefy_best_block, round.1); + } + } + } + + pub(crate) async fn run(mut self) { + let mut votes = Box::pin(self.gossip_engine.lock().messages_for(topic::()).filter_map( + |notification| async move { + debug!(target: "beefy", "🥩 Got vote message: {:?}", notification); + + VoteMessage::, Public, Signature>::decode( + &mut ¬ification.message[..], + ) + .ok() + }, + )); + + loop { + let engine = self.gossip_engine.clone(); + let gossip_engine = future::poll_fn(|cx| engine.lock().poll_unpin(cx)); + + futures::select! { + notification = self.finality_notifications.next().fuse() => { + if let Some(notification) = notification { + self.handle_finality_notification(notification); + } else { + return; + } + }, + vote = votes.next().fuse() => { + if let Some(vote) = vote { + self.handle_vote( + (vote.commitment.payload, vote.commitment.block_number), + (vote.id, vote.signature), + ); + } else { + return; + } + }, + _ = gossip_engine.fuse() => { + error!(target: "beefy", "🥩 Gossip engine has terminated."); + return; + } + } + } + } +} + +/// Extract the MMR root hash from a digest in the given header, if it exists. +fn find_mmr_root_digest(header: &B::Header) -> Option +where + B: Block, + Id: Codec, +{ + header.digest().logs().iter().find_map(|log| { + match log.try_to::>(OpaqueDigestItemId::Consensus(&BEEFY_ENGINE_ID)) { + Some(ConsensusLog::MmrRoot(root)) => Some(root), + _ => None, + } + }) +} + +/// Scan the `header` digest log for a BEEFY validator set change. Return either the new +/// validator set or `None` in case no validator set change has been signaled. +fn find_authorities_change(header: &B::Header) -> Option> +where + B: Block, +{ + let id = OpaqueDigestItemId::Consensus(&BEEFY_ENGINE_ID); + + let filter = |log: ConsensusLog| match log { + ConsensusLog::AuthoritiesChange(validator_set) => Some(validator_set), + _ => None, + }; + + header.digest().convert_first(|l| l.try_to(id).and_then(filter)) +} + +/// Calculate next block number to vote on +fn vote_target(best_grandpa: N, best_beefy: N, min_delta: u32) -> N +where + N: AtLeast32Bit + Copy + Debug, +{ + let diff = best_grandpa.saturating_sub(best_beefy); + let diff = diff.saturated_into::(); + let target = best_beefy + min_delta.max(diff.next_power_of_two()).into(); + + trace!( + target: "beefy", + "🥩 vote target - diff: {:?}, next_power_of_two: {:?}, target block: #{:?}", + diff, + diff.next_power_of_two(), + target, + ); + + target +} + +#[cfg(test)] +mod tests { + use super::vote_target; + + #[test] + fn vote_on_min_block_delta() { + let t = vote_target(1u32, 0, 4); + assert_eq!(4, t); + let t = vote_target(2u32, 0, 4); + assert_eq!(4, t); + let t = vote_target(3u32, 0, 4); + assert_eq!(4, t); + let t = vote_target(4u32, 0, 4); + assert_eq!(4, t); + + let t = vote_target(4u32, 4, 4); + assert_eq!(8, t); + + let t = vote_target(10u32, 10, 4); + assert_eq!(14, t); + let t = vote_target(11u32, 10, 4); + assert_eq!(14, t); + let t = vote_target(12u32, 10, 4); + assert_eq!(14, t); + let t = vote_target(13u32, 10, 4); + assert_eq!(14, t); + + let t = vote_target(10u32, 10, 8); + assert_eq!(18, t); + let t = vote_target(11u32, 10, 8); + assert_eq!(18, t); + let t = vote_target(12u32, 10, 8); + assert_eq!(18, t); + let t = vote_target(13u32, 10, 8); + assert_eq!(18, t); + } + + #[test] + fn vote_on_power_of_two() { + let t = vote_target(1008u32, 1000, 4); + assert_eq!(1008, t); + + let t = vote_target(1016u32, 1000, 4); + assert_eq!(1016, t); + + let t = vote_target(1032u32, 1000, 4); + assert_eq!(1032, t); + + let t = vote_target(1064u32, 1000, 4); + assert_eq!(1064, t); + + let t = vote_target(1128u32, 1000, 4); + assert_eq!(1128, t); + + let t = vote_target(1256u32, 1000, 4); + assert_eq!(1256, t); + + let t = vote_target(1512u32, 1000, 4); + assert_eq!(1512, t); + + let t = vote_target(1024u32, 0, 4); + assert_eq!(1024, t); + } + + #[test] + fn vote_on_target_block() { + let t = vote_target(1008u32, 1002, 4); + assert_eq!(1010, t); + let t = vote_target(1010u32, 1002, 4); + assert_eq!(1010, t); + + let t = vote_target(1016u32, 1006, 4); + assert_eq!(1022, t); + let t = vote_target(1022u32, 1006, 4); + assert_eq!(1022, t); + + let t = vote_target(1032u32, 1012, 4); + assert_eq!(1044, t); + let t = vote_target(1044u32, 1012, 4); + assert_eq!(1044, t); + + let t = vote_target(1064u32, 1014, 4); + assert_eq!(1078, t); + let t = vote_target(1078u32, 1014, 4); + assert_eq!(1078, t); + + let t = vote_target(1128u32, 1008, 4); + assert_eq!(1136, t); + let t = vote_target(1136u32, 1008, 4); + assert_eq!(1136, t); + } +} diff --git a/frame/beefy-mmr/Cargo.toml b/frame/beefy-mmr/Cargo.toml new file mode 100644 index 0000000000000..3d4a9a72ddf86 --- /dev/null +++ b/frame/beefy-mmr/Cargo.toml @@ -0,0 +1,56 @@ +[package] +name = "pallet-beefy-mmr" +version = "4.0.0-dev" +authors = ["Parity Technologies "] +edition = "2018" +license = "Apache-2.0" +description = "BEEFY + MMR runtime utilities" + +[dependencies] +hex = { version = "0.4", optional = true } +codec = { version = "2.2.0", package = "parity-scale-codec", default-features = false, features = ["derive"] } +libsecp256k1 = { version = "0.7.0", default-features = false } +log = { version = "0.4.13", default-features = false } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } +serde = { version = "1.0.130", optional = true } + +frame-support = { version = "4.0.0-dev", path = "../support", default-features = false } +frame-system = { version = "4.0.0-dev", path = "../system", default-features = false } +pallet-mmr = { version = "4.0.0-dev", path = "../merkle-mountain-range", default-features = false } +pallet-mmr-primitives = { version = "4.0.0-dev", path = "../merkle-mountain-range/primitives", default-features = false } +pallet-session = { version = "4.0.0-dev", path = "../session", default-features = false } + +sp-core = { version = "4.0.0-dev", path = "../../primitives/core", default-features = false } +sp-io = { version = "4.0.0-dev", path = "../../primitives/io", default-features = false } +sp-runtime = { version = "4.0.0-dev", path = "../../primitives/runtime", default-features = false } +sp-std = { version = "4.0.0-dev", path = "../../primitives/std", default-features = false } + +beefy-merkle-tree = { version = "4.0.0-dev", path = "./primitives", default-features = false } +beefy-primitives = { version = "4.0.0-dev", path = "../../primitives/beefy", default-features = false } +pallet-beefy = { version = "4.0.0-dev", path = "../beefy", default-features = false } + +[dev-dependencies] +sp-staking = { version = "4.0.0-dev", path = "../../primitives/staking" } +hex-literal = "0.3" + +[features] +default = ["std"] +std = [ + "beefy-merkle-tree/std", + "beefy-primitives/std", + "codec/std", + "frame-support/std", + "frame-system/std", + "hex", + "libsecp256k1/std", + "log/std", + "pallet-beefy/std", + "pallet-mmr-primitives/std", + "pallet-mmr/std", + "pallet-session/std", + "serde", + "sp-core/std", + "sp-io/std", + "sp-runtime/std", + "sp-std/std", +] diff --git a/frame/beefy-mmr/primitives/Cargo.toml b/frame/beefy-mmr/primitives/Cargo.toml new file mode 100644 index 0000000000000..d5dcc0eed3350 --- /dev/null +++ b/frame/beefy-mmr/primitives/Cargo.toml @@ -0,0 +1,23 @@ +[package] +name = "beefy-merkle-tree" +version = "4.0.0-dev" +authors = ["Parity Technologies "] +edition = "2018" +license = "Apache-2.0" +description = "A no-std/Substrate compatible library to construct binary merkle tree." + +[dependencies] +hex = { version = "0.4", optional = true, default-features = false } +log = { version = "0.4", optional = true, default-features = false } +tiny-keccak = { version = "2.0.2", features = ["keccak"], optional = true } + +[dev-dependencies] +env_logger = "0.9" +hex = "0.4" +hex-literal = "0.3" + +[features] +debug = ["hex", "log"] +default = ["std", "debug", "keccak"] +keccak = ["tiny-keccak"] +std = [] diff --git a/frame/beefy-mmr/primitives/src/lib.rs b/frame/beefy-mmr/primitives/src/lib.rs new file mode 100644 index 0000000000000..4d4d4e8721ac8 --- /dev/null +++ b/frame/beefy-mmr/primitives/src/lib.rs @@ -0,0 +1,806 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#![cfg_attr(not(feature = "std"), no_std)] +#![warn(missing_docs)] + +//! This crate implements a simple binary Merkle Tree utilities required for inter-op with Ethereum +//! bridge & Solidity contract. +//! +//! The implementation is optimised for usage within Substrate Runtime and supports no-std +//! compilation targets. +//! +//! Merkle Tree is constructed from arbitrary-length leaves, that are initially hashed using the +//! same [Hasher] as the inner nodes. +//! Inner nodes are created by concatenating child hashes and hashing again. The implementation +//! does not perform any sorting of the input data (leaves) nor when inner nodes are created. +//! +//! If the number of leaves is not even, last leave (hash of) is promoted to the upper layer. + +#[cfg(not(feature = "std"))] +extern crate alloc; +#[cfg(not(feature = "std"))] +use alloc::vec::Vec; + +/// Supported hashing output size. +/// +/// The size is restricted to 32 bytes to allow for a more optimised implementation. +pub type Hash = [u8; 32]; + +/// Generic hasher trait. +/// +/// Implement the function to support custom way of hashing data. +/// The implementation must return a [Hash] type, so only 32-byte output hashes are supported. +pub trait Hasher { + /// Hash given arbitrary-length piece of data. + fn hash(data: &[u8]) -> Hash; +} + +#[cfg(feature = "keccak")] +mod keccak256 { + use tiny_keccak::{Hasher as _, Keccak}; + + /// Keccak256 hasher implementation. + pub struct Keccak256; + impl Keccak256 { + /// Hash given data. + pub fn hash(data: &[u8]) -> super::Hash { + ::hash(data) + } + } + impl super::Hasher for Keccak256 { + fn hash(data: &[u8]) -> super::Hash { + let mut keccak = Keccak::v256(); + keccak.update(data); + let mut output = [0_u8; 32]; + keccak.finalize(&mut output); + output + } + } +} +#[cfg(feature = "keccak")] +pub use keccak256::Keccak256; + +/// Construct a root hash of a Binary Merkle Tree created from given leaves. +/// +/// See crate-level docs for details about Merkle Tree construction. +/// +/// In case an empty list of leaves is passed the function returns a 0-filled hash. +pub fn merkle_root(leaves: I) -> Hash +where + H: Hasher, + I: IntoIterator, + T: AsRef<[u8]>, +{ + let iter = leaves.into_iter().map(|l| H::hash(l.as_ref())); + merkelize::(iter, &mut ()) +} + +fn merkelize(leaves: I, visitor: &mut V) -> Hash +where + H: Hasher, + V: Visitor, + I: Iterator, +{ + let upper = Vec::with_capacity(leaves.size_hint().0); + let mut next = match merkelize_row::(leaves, upper, visitor) { + Ok(root) => return root, + Err(next) if next.is_empty() => return Hash::default(), + Err(next) => next, + }; + + let mut upper = Vec::with_capacity((next.len() + 1) / 2); + loop { + visitor.move_up(); + + match merkelize_row::(next.drain(..), upper, visitor) { + Ok(root) => return root, + Err(t) => { + // swap collections to avoid allocations + upper = next; + next = t; + }, + }; + } +} + +/// A generated merkle proof. +/// +/// The structure contains all necessary data to later on verify the proof and the leaf itself. +#[derive(Debug, PartialEq, Eq)] +pub struct MerkleProof { + /// Root hash of generated merkle tree. + pub root: Hash, + /// Proof items (does not contain the leaf hash, nor the root obviously). + /// + /// This vec contains all inner node hashes necessary to reconstruct the root hash given the + /// leaf hash. + pub proof: Vec, + /// Number of leaves in the original tree. + /// + /// This is needed to detect a case where we have an odd number of leaves that "get promoted" + /// to upper layers. + pub number_of_leaves: usize, + /// Index of the leaf the proof is for (0-based). + pub leaf_index: usize, + /// Leaf content. + pub leaf: T, +} + +/// A trait of object inspecting merkle root creation. +/// +/// It can be passed to [`merkelize_row`] or [`merkelize`] functions and will be notified +/// about tree traversal. +trait Visitor { + /// We are moving one level up in the tree. + fn move_up(&mut self); + + /// We are creating an inner node from given `left` and `right` nodes. + /// + /// Note that in case of last odd node in the row `right` might be empty. + /// The method will also visit the `root` hash (level 0). + /// + /// The `index` is an index of `left` item. + fn visit(&mut self, index: usize, left: &Option, right: &Option); +} + +/// No-op implementation of the visitor. +impl Visitor for () { + fn move_up(&mut self) {} + fn visit(&mut self, _index: usize, _left: &Option, _right: &Option) {} +} + +/// Construct a Merkle Proof for leaves given by indices. +/// +/// The function constructs a (partial) Merkle Tree first and stores all elements required +/// to prove requested item (leaf) given the root hash. +/// +/// Both the Proof and the Root Hash is returned. +/// +/// # Panic +/// +/// The function will panic if given [`leaf_index`] is greater than the number of leaves. +pub fn merkle_proof(leaves: I, leaf_index: usize) -> MerkleProof +where + H: Hasher, + I: IntoIterator, + I::IntoIter: ExactSizeIterator, + T: AsRef<[u8]>, +{ + let mut leaf = None; + let iter = leaves.into_iter().enumerate().map(|(idx, l)| { + let hash = H::hash(l.as_ref()); + if idx == leaf_index { + leaf = Some(l); + } + hash + }); + + /// The struct collects a proof for single leaf. + struct ProofCollection { + proof: Vec, + position: usize, + } + + impl ProofCollection { + fn new(position: usize) -> Self { + ProofCollection { proof: Default::default(), position } + } + } + + impl Visitor for ProofCollection { + fn move_up(&mut self) { + self.position /= 2; + } + + fn visit(&mut self, index: usize, left: &Option, right: &Option) { + // we are at left branch - right goes to the proof. + if self.position == index { + if let Some(right) = right { + self.proof.push(*right); + } + } + // we are at right branch - left goes to the proof. + if self.position == index + 1 { + if let Some(left) = left { + self.proof.push(*left); + } + } + } + } + + let number_of_leaves = iter.len(); + let mut collect_proof = ProofCollection::new(leaf_index); + + let root = merkelize::(iter, &mut collect_proof); + let leaf = leaf.expect("Requested `leaf_index` is greater than number of leaves."); + + #[cfg(feature = "debug")] + log::debug!( + "[merkle_proof] Proof: {:?}", + collect_proof.proof.iter().map(hex::encode).collect::>() + ); + + MerkleProof { root, proof: collect_proof.proof, number_of_leaves, leaf_index, leaf } +} + +/// Leaf node for proof verification. +/// +/// Can be either a value that needs to be hashed first, +/// or the hash itself. +#[derive(Debug, PartialEq, Eq)] +pub enum Leaf<'a> { + /// Leaf content. + Value(&'a [u8]), + /// Hash of the leaf content. + Hash(Hash), +} + +impl<'a, T: AsRef<[u8]>> From<&'a T> for Leaf<'a> { + fn from(v: &'a T) -> Self { + Leaf::Value(v.as_ref()) + } +} + +impl<'a> From for Leaf<'a> { + fn from(v: Hash) -> Self { + Leaf::Hash(v) + } +} + +/// Verify Merkle Proof correctness versus given root hash. +/// +/// The proof is NOT expected to contain leaf hash as the first +/// element, but only all adjacent nodes required to eventually by process of +/// concatenating and hashing end up with given root hash. +/// +/// The proof must not contain the root hash. +pub fn verify_proof<'a, H, P, L>( + root: &'a Hash, + proof: P, + number_of_leaves: usize, + leaf_index: usize, + leaf: L, +) -> bool +where + H: Hasher, + P: IntoIterator, + L: Into>, +{ + if leaf_index >= number_of_leaves { + return false + } + + let leaf_hash = match leaf.into() { + Leaf::Value(content) => H::hash(content), + Leaf::Hash(hash) => hash, + }; + + let mut combined = [0_u8; 64]; + let mut position = leaf_index; + let mut width = number_of_leaves; + let computed = proof.into_iter().fold(leaf_hash, |a, b| { + if position % 2 == 1 || position + 1 == width { + combined[0..32].copy_from_slice(&b); + combined[32..64].copy_from_slice(&a); + } else { + combined[0..32].copy_from_slice(&a); + combined[32..64].copy_from_slice(&b); + } + let hash = H::hash(&combined); + #[cfg(feature = "debug")] + log::debug!( + "[verify_proof]: (a, b) {:?}, {:?} => {:?} ({:?}) hash", + hex::encode(a), + hex::encode(b), + hex::encode(hash), + hex::encode(combined) + ); + position /= 2; + width = ((width - 1) / 2) + 1; + hash + }); + + root == &computed +} + +/// Processes a single row (layer) of a tree by taking pairs of elements, +/// concatenating them, hashing and placing into resulting vector. +/// +/// In case only one element is provided it is returned via `Ok` result, in any other case (also an +/// empty iterator) an `Err` with the inner nodes of upper layer is returned. +fn merkelize_row( + mut iter: I, + mut next: Vec, + visitor: &mut V, +) -> Result> +where + H: Hasher, + V: Visitor, + I: Iterator, +{ + #[cfg(feature = "debug")] + log::debug!("[merkelize_row]"); + next.clear(); + + let mut index = 0; + let mut combined = [0_u8; 64]; + loop { + let a = iter.next(); + let b = iter.next(); + visitor.visit(index, &a, &b); + + #[cfg(feature = "debug")] + log::debug!(" {:?}\n {:?}", a.as_ref().map(hex::encode), b.as_ref().map(hex::encode)); + + index += 2; + match (a, b) { + (Some(a), Some(b)) => { + combined[0..32].copy_from_slice(&a); + combined[32..64].copy_from_slice(&b); + + next.push(H::hash(&combined)); + }, + // Odd number of items. Promote the item to the upper layer. + (Some(a), None) if !next.is_empty() => { + next.push(a); + }, + // Last item = root. + (Some(a), None) => return Ok(a), + // Finish up, no more items. + _ => { + #[cfg(feature = "debug")] + log::debug!( + "[merkelize_row] Next: {:?}", + next.iter().map(hex::encode).collect::>() + ); + return Err(next) + }, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use hex_literal::hex; + + #[test] + fn should_generate_empty_root() { + // given + let _ = env_logger::try_init(); + let data: Vec<[u8; 1]> = Default::default(); + + // when + let out = merkle_root::(data); + + // then + assert_eq!( + hex::encode(&out), + "0000000000000000000000000000000000000000000000000000000000000000" + ); + } + + #[test] + fn should_generate_single_root() { + // given + let _ = env_logger::try_init(); + let data = vec![hex!("E04CC55ebEE1cBCE552f250e85c57B70B2E2625b")]; + + // when + let out = merkle_root::(data); + + // then + assert_eq!( + hex::encode(&out), + "aeb47a269393297f4b0a3c9c9cfd00c7a4195255274cf39d83dabc2fcc9ff3d7" + ); + } + + #[test] + fn should_generate_root_pow_2() { + // given + let _ = env_logger::try_init(); + let data = vec![ + hex!("E04CC55ebEE1cBCE552f250e85c57B70B2E2625b"), + hex!("25451A4de12dcCc2D166922fA938E900fCc4ED24"), + ]; + + // when + let out = merkle_root::(data); + + // then + assert_eq!( + hex::encode(&out), + "697ea2a8fe5b03468548a7a413424a6292ab44a82a6f5cc594c3fa7dda7ce402" + ); + } + + #[test] + fn should_generate_root_complex() { + let _ = env_logger::try_init(); + let test = |root, data| { + assert_eq!(hex::encode(&merkle_root::(data)), root); + }; + + test( + "aff1208e69c9e8be9b584b07ebac4e48a1ee9d15ce3afe20b77a4d29e4175aa3", + vec!["a", "b", "c"], + ); + + test( + "b8912f7269068901f231a965adfefbc10f0eedcfa61852b103efd54dac7db3d7", + vec!["a", "b", "a"], + ); + + test( + "dc8e73fe6903148ff5079baecc043983625c23b39f31537e322cd0deee09fa9c", + vec!["a", "b", "a", "b"], + ); + + test( + "fb3b3be94be9e983ba5e094c9c51a7d96a4fa2e5d8e891df00ca89ba05bb1239", + vec!["a", "b", "c", "d", "e", "f", "g", "h", "i", "j"], + ); + } + + #[test] + fn should_generate_and_verify_proof_simple() { + // given + let _ = env_logger::try_init(); + let data = vec!["a", "b", "c"]; + + // when + let proof0 = merkle_proof::(data.clone(), 0); + assert!(verify_proof::( + &proof0.root, + proof0.proof.clone(), + data.len(), + proof0.leaf_index, + &proof0.leaf, + )); + + let proof1 = merkle_proof::(data.clone(), 1); + assert!(verify_proof::( + &proof1.root, + proof1.proof, + data.len(), + proof1.leaf_index, + &proof1.leaf, + )); + + let proof2 = merkle_proof::(data.clone(), 2); + assert!(verify_proof::( + &proof2.root, + proof2.proof, + data.len(), + proof2.leaf_index, + &proof2.leaf + )); + + // then + assert_eq!(hex::encode(proof0.root), hex::encode(proof1.root)); + assert_eq!(hex::encode(proof2.root), hex::encode(proof1.root)); + + assert!(!verify_proof::( + &hex!("fb3b3be94be9e983ba5e094c9c51a7d96a4fa2e5d8e891df00ca89ba05bb1239"), + proof0.proof, + data.len(), + proof0.leaf_index, + &proof0.leaf + )); + + assert!(!verify_proof::( + &proof0.root, + vec![], + data.len(), + proof0.leaf_index, + &proof0.leaf + )); + } + + #[test] + fn should_generate_and_verify_proof_complex() { + // given + let _ = env_logger::try_init(); + let data = vec!["a", "b", "c", "d", "e", "f", "g", "h", "i", "j"]; + + for l in 0..data.len() { + // when + let proof = merkle_proof::(data.clone(), l); + // then + assert!(verify_proof::( + &proof.root, + proof.proof, + data.len(), + proof.leaf_index, + &proof.leaf + )); + } + } + + #[test] + fn should_generate_and_verify_proof_large() { + // given + let _ = env_logger::try_init(); + let mut data = vec![]; + for i in 1..16 { + for c in 'a'..'z' { + if c as usize % i != 0 { + data.push(c.to_string()); + } + } + + for l in 0..data.len() { + // when + let proof = merkle_proof::(data.clone(), l); + // then + assert!(verify_proof::( + &proof.root, + proof.proof, + data.len(), + proof.leaf_index, + &proof.leaf + )); + } + } + } + + #[test] + fn should_generate_and_verify_proof_large_tree() { + // given + let _ = env_logger::try_init(); + let mut data = vec![]; + for i in 0..6000 { + data.push(format!("{}", i)); + } + + for l in (0..data.len()).step_by(13) { + // when + let proof = merkle_proof::(data.clone(), l); + // then + assert!(verify_proof::( + &proof.root, + proof.proof, + data.len(), + proof.leaf_index, + &proof.leaf + )); + } + } + + #[test] + #[should_panic] + fn should_panic_on_invalid_leaf_index() { + let _ = env_logger::try_init(); + merkle_proof::(vec!["a"], 5); + } + + #[test] + fn should_generate_and_verify_proof_on_test_data() { + let addresses = vec![ + "0x9aF1Ca5941148eB6A3e9b9C741b69738292C533f", + "0xDD6ca953fddA25c496165D9040F7F77f75B75002", + "0x60e9C47B64Bc1C7C906E891255EaEC19123E7F42", + "0xfa4859480Aa6D899858DE54334d2911E01C070df", + "0x19B9b128470584F7209eEf65B69F3624549Abe6d", + "0xC436aC1f261802C4494504A11fc2926C726cB83b", + "0xc304C8C2c12522F78aD1E28dD86b9947D7744bd0", + "0xDa0C2Cba6e832E55dE89cF4033affc90CC147352", + "0xf850Fd22c96e3501Aad4CDCBf38E4AEC95622411", + "0x684918D4387CEb5E7eda969042f036E226E50642", + "0x963F0A1bFbb6813C0AC88FcDe6ceB96EA634A595", + "0x39B38ad74b8bCc5CE564f7a27Ac19037A95B6099", + "0xC2Dec7Fdd1fef3ee95aD88EC8F3Cd5bd4065f3C7", + "0x9E311f05c2b6A43C2CCF16fB2209491BaBc2ec01", + "0x927607C30eCE4Ef274e250d0bf414d4a210b16f0", + "0x98882bcf85E1E2DFF780D0eB360678C1cf443266", + "0xFBb50191cd0662049E7C4EE32830a4Cc9B353047", + "0x963854fc2C358c48C3F9F0A598B9572c581B8DEF", + "0xF9D7Bc222cF6e3e07bF66711e6f409E51aB75292", + "0xF2E3fd32D063F8bBAcB9e6Ea8101C2edd899AFe6", + "0x407a5b9047B76E8668570120A96d580589fd1325", + "0xEAD9726FAFB900A07dAd24a43AE941d2eFDD6E97", + "0x42f5C8D9384034A9030313B51125C32a526b6ee8", + "0x158fD2529Bc4116570Eb7C80CC76FEf33ad5eD95", + "0x0A436EE2E4dEF3383Cf4546d4278326Ccc82514E", + "0x34229A215db8FeaC93Caf8B5B255e3c6eA51d855", + "0xEb3B7CF8B1840242CB98A732BA464a17D00b5dDF", + "0x2079692bf9ab2d6dc7D79BBDdEE71611E9aA3B72", + "0x46e2A67e5d450e2Cf7317779f8274a2a630f3C9B", + "0xA7Ece4A5390DAB18D08201aE18800375caD78aab", + "0x15E1c0D24D62057Bf082Cb2253dA11Ef0d469570", + "0xADDEF4C9b5687Eb1F7E55F2251916200A3598878", + "0xe0B16Fb96F936035db2b5A68EB37D470fED2f013", + "0x0c9A84993feaa779ae21E39F9793d09e6b69B62D", + "0x3bc4D5148906F70F0A7D1e2756572655fd8b7B34", + "0xFf4675C26903D5319795cbd3a44b109E7DDD9fDe", + "0xCec4450569A8945C6D2Aba0045e4339030128a92", + "0x85f0584B10950E421A32F471635b424063FD8405", + "0xb38bEe7Bdc0bC43c096e206EFdFEad63869929E3", + "0xc9609466274Fef19D0e58E1Ee3b321D5C141067E", + "0xa08EA868cF75268E7401021E9f945BAe73872ecc", + "0x67C9Cb1A29E964Fe87Ff669735cf7eb87f6868fE", + "0x1B6BEF636aFcdd6085cD4455BbcC93796A12F6E2", + "0x46B37b243E09540b55cF91C333188e7D5FD786dD", + "0x8E719E272f62Fa97da93CF9C941F5e53AA09e44a", + "0xa511B7E7DB9cb24AD5c89fBb6032C7a9c2EfA0a5", + "0x4D11FDcAeD335d839132AD450B02af974A3A66f8", + "0xB8cf790a5090E709B4619E1F335317114294E17E", + "0x7f0f57eA064A83210Cafd3a536866ffD2C5eDCB3", + "0xC03C848A4521356EF800e399D889e9c2A25D1f9E", + "0xC6b03DF05cb686D933DD31fCa5A993bF823dc4FE", + "0x58611696b6a8102cf95A32c25612E4cEF32b910F", + "0x2ed4bC7197AEF13560F6771D930Bf907772DE3CE", + "0x3C5E58f334306be029B0e47e119b8977B2639eb4", + "0x288646a1a4FeeC560B349d210263c609aDF649a6", + "0xb4F4981E0d027Dc2B3c86afA0D0fC03d317e83C0", + "0xaAE4A87F8058feDA3971f9DEd639Ec9189aA2500", + "0x355069DA35E598913d8736E5B8340527099960b8", + "0x3cf5A0F274cd243C0A186d9fCBdADad089821B93", + "0xca55155dCc4591538A8A0ca322a56EB0E4aD03C4", + "0xE824D0268366ec5C4F23652b8eD70D552B1F2b8B", + "0x84C3e9B25AE8a9b39FF5E331F9A597F2DCf27Ca9", + "0xcA0018e278751De10d26539915d9c7E7503432FE", + "0xf13077dE6191D6c1509ac7E088b8BE7Fe656c28b", + "0x7a6bcA1ec9Db506e47ac6FD86D001c2aBc59C531", + "0xeA7f9A2A9dd6Ba9bc93ca615C3Ddf26973146911", + "0x8D0d8577e16F8731d4F8712BAbFa97aF4c453458", + "0xB7a7855629dF104246997e9ACa0E6510df75d0ea", + "0x5C1009BDC70b0C8Ab2e5a53931672ab448C17c89", + "0x40B47D1AfefEF5eF41e0789F0285DE7b1C31631C", + "0x5086933d549cEcEB20652CE00973703CF10Da373", + "0xeb364f6FE356882F92ae9314fa96116Cf65F47d8", + "0xdC4D31516A416cEf533C01a92D9a04bbdb85EE67", + "0x9b36E086E5A274332AFd3D8509e12ca5F6af918d", + "0xBC26394fF36e1673aE0608ce91A53B9768aD0D76", + "0x81B5AB400be9e563fA476c100BE898C09966426c", + "0x9d93C8ae5793054D28278A5DE6d4653EC79e90FE", + "0x3B8E75804F71e121008991E3177fc942b6c28F50", + "0xC6Eb5886eB43dD473f5BB4e21e56E08dA464D9B4", + "0xfdf1277b71A73c813cD0e1a94B800f4B1Db66DBE", + "0xc2ff2cCc98971556670e287Ff0CC39DA795231ad", + "0x76b7E1473f0D0A87E9B4a14E2B179266802740f5", + "0xA7Bc965660a6EF4687CCa4F69A97563163A3C2Ef", + "0xB9C2b47888B9F8f7D03dC1de83F3F55E738CebD3", + "0xEd400162E6Dd6bD2271728FFb04176bF770De94a", + "0xE3E8331156700339142189B6E555DCb2c0962750", + "0xbf62e342Bc7706a448EdD52AE871d9C4497A53b1", + "0xb9d7A1A111eed75714a0AcD2dd467E872eE6B03D", + "0x03942919DFD0383b8c574AB8A701d89fd4bfA69D", + "0x0Ef4C92355D3c8c7050DFeb319790EFCcBE6fe9e", + "0xA6895a3cf0C60212a73B3891948ACEcF1753f25E", + "0x0Ed509239DB59ef3503ded3d31013C983d52803A", + "0xc4CE8abD123BfAFc4deFf37c7D11DeCd5c350EE4", + "0x4A4Bf59f7038eDcd8597004f35d7Ee24a7Bdd2d3", + "0x5769E8e8A2656b5ed6b6e6fa2a2bFAeaf970BB87", + "0xf9E15cCE181332F4F57386687c1776b66C377060", + "0xc98f8d4843D56a46C21171900d3eE538Cc74dbb5", + "0x3605965B47544Ce4302b988788B8195601AE4dEd", + "0xe993BDfdcAac2e65018efeE0F69A12678031c71d", + "0x274fDf8801385D3FAc954BCc1446Af45f5a8304c", + "0xBFb3f476fcD6429F4a475bA23cEFdDdd85c6b964", + "0x806cD16588Fe812ae740e931f95A289aFb4a4B50", + "0xa89488CE3bD9C25C3aF797D1bbE6CA689De79d81", + "0xd412f1AfAcf0Ebf3Cd324593A231Fc74CC488B12", + "0xd1f715b2D7951d54bc31210BbD41852D9BF98Ed1", + "0xf65aD707c344171F467b2ADba3d14f312219cE23", + "0x2971a4b242e9566dEF7bcdB7347f5E484E11919B", + "0x12b113D6827E07E7D426649fBd605f427da52314", + "0x1c6CA45171CDb9856A6C9Dba9c5F1216913C1e97", + "0x11cC6ee1d74963Db23294FCE1E3e0A0555779CeA", + "0x8Aa1C721255CDC8F895E4E4c782D86726b068667", + "0xA2cDC1f37510814485129aC6310b22dF04e9Bbf0", + "0xCf531b71d388EB3f5889F1f78E0d77f6fb109767", + "0xBe703e3545B2510979A0cb0C440C0Fba55c6dCB5", + "0x30a35886F989db39c797D8C93880180Fdd71b0c8", + "0x1071370D981F60c47A9Cd27ac0A61873a372cBB2", + "0x3515d74A11e0Cb65F0F46cB70ecf91dD1712daaa", + "0x50500a3c2b7b1229c6884505D00ac6Be29Aecd0C", + "0x9A223c2a11D4FD3585103B21B161a2B771aDA3d1", + "0xd7218df03AD0907e6c08E707B15d9BD14285e657", + "0x76CfD72eF5f93D1a44aD1F80856797fBE060c70a", + "0x44d093cB745944991EFF5cBa151AA6602d6f5420", + "0x626516DfF43bf09A71eb6fd1510E124F96ED0Cde", + "0x6530824632dfe099304E2DC5701cA99E6d031E08", + "0x57e6c423d6a7607160d6379A0c335025A14DaFC0", + "0x3966D4AD461Ef150E0B10163C81E79b9029E69c3", + "0xF608aCfd0C286E23721a3c347b2b65039f6690F1", + "0xbfB8FAac31A25646681936977837f7740fCd0072", + "0xd80aa634a623a7ED1F069a1a3A28a173061705c7", + "0x9122a77B36363e24e12E1E2D73F87b32926D3dF5", + "0x62562f0d1cD31315bCCf176049B6279B2bfc39C2", + "0x48aBF7A2a7119e5675059E27a7082ba7F38498b2", + "0xb4596983AB9A9166b29517acD634415807569e5F", + "0x52519D16E20BC8f5E96Da6d736963e85b2adA118", + "0x7663893C3dC0850EfC5391f5E5887eD723e51B83", + "0x5FF323a29bCC3B5b4B107e177EccEF4272959e61", + "0xee6e499AdDf4364D75c05D50d9344e9daA5A9AdF", + "0x1631b0BD31fF904aD67dD58994C6C2051CDe4E75", + "0xbc208e9723D44B9811C428f6A55722a26204eEF2", + "0xe76103a222Ee2C7Cf05B580858CEe625C4dc00E1", + "0xC71Bb2DBC51760f4fc2D46D84464410760971B8a", + "0xB4C18811e6BFe564D69E12c224FFc57351f7a7ff", + "0xD11DB0F5b41061A887cB7eE9c8711438844C298A", + "0xB931269934A3D4432c084bAAc3d0de8143199F4f", + "0x070037cc85C761946ec43ea2b8A2d5729908A2a1", + "0x2E34aa8C95Ffdbb37f14dCfBcA69291c55Ba48DE", + "0x052D93e8d9220787c31d6D83f87eC7dB088E998f", + "0x498dAC6C69b8b9ad645217050054840f1D91D029", + "0xE4F7D60f9d84301e1fFFd01385a585F3A11F8E89", + "0xEa637992f30eA06460732EDCBaCDa89355c2a107", + "0x4960d8Da07c27CB6Be48a79B96dD70657c57a6bF", + "0x7e471A003C8C9fdc8789Ded9C3dbe371d8aa0329", + "0xd24265Cc10eecb9e8d355CCc0dE4b11C556E74D7", + "0xDE59C8f7557Af779674f41CA2cA855d571018690", + "0x2fA8A6b3b6226d8efC9d8f6EBDc73Ca33DDcA4d8", + "0xe44102664c6c2024673Ff07DFe66E187Db77c65f", + "0x94E3f4f90a5f7CBF2cc2623e66B8583248F01022", + "0x0383EdBbc21D73DEd039E9C1Ff6bf56017b4CC40", + "0x64C3E49898B88d1E0f0d02DA23E0c00A2Cd0cA99", + "0xF4ccfB67b938d82B70bAb20975acFAe402E812E1", + "0x4f9ee5829e9852E32E7BC154D02c91D8E203e074", + "0xb006312eF9713463bB33D22De60444Ba95609f6B", + "0x7Cbe76ef69B52110DDb2e3b441C04dDb11D63248", + "0x70ADEEa65488F439392B869b1Df7241EF317e221", + "0x64C0bf8AA36Ba590477585Bc0D2BDa7970769463", + "0xA4cDc98593CE52d01Fe5Ca47CB3dA5320e0D7592", + "0xc26B34D375533fFc4c5276282Fa5D660F3d8cbcB", + ]; + let root = hex!("72b0acd7c302a84f1f6b6cefe0ba7194b7398afb440e1b44a9dbbe270394ca53"); + + let data = addresses + .into_iter() + .map(|address| hex::decode(&address[2..]).unwrap()) + .collect::>(); + + for l in 0..data.len() { + // when + let proof = merkle_proof::(data.clone(), l); + assert_eq!(hex::encode(&proof.root), hex::encode(&root)); + assert_eq!(proof.leaf_index, l); + assert_eq!(&proof.leaf, &data[l]); + + // then + assert!(verify_proof::( + &proof.root, + proof.proof, + data.len(), + proof.leaf_index, + &proof.leaf + )); + } + + let proof = merkle_proof::(data.clone(), data.len() - 1); + + assert_eq!( + proof, + MerkleProof { + root, + proof: vec![ + hex!("340bcb1d49b2d82802ddbcf5b85043edb3427b65d09d7f758fbc76932ad2da2f"), + hex!("ba0580e5bd530bc93d61276df7969fb5b4ae8f1864b4a28c280249575198ff1f"), + hex!("d02609d2bbdb28aa25f58b85afec937d5a4c85d37925bce6d0cf802f9d76ba79"), + hex!("ae3f8991955ed884613b0a5f40295902eea0e0abe5858fc520b72959bc016d4e"), + ], + number_of_leaves: data.len(), + leaf_index: data.len() - 1, + leaf: hex!("c26B34D375533fFc4c5276282Fa5D660F3d8cbcB").to_vec(), + } + ); + } +} diff --git a/frame/beefy-mmr/src/lib.rs b/frame/beefy-mmr/src/lib.rs new file mode 100644 index 0000000000000..001831639b169 --- /dev/null +++ b/frame/beefy-mmr/src/lib.rs @@ -0,0 +1,236 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#![cfg_attr(not(feature = "std"), no_std)] +#![warn(missing_docs)] + +//! A BEEFY+MMR pallet combo. +//! +//! While both BEEFY and Merkle Mountain Range (MMR) can be used separately, +//! these tools were designed to work together in unison. +//! +//! The pallet provides a standardized MMR Leaf format that is can be used +//! to bridge BEEFY+MMR-based networks (both standalone and polkadot-like). +//! +//! The MMR leaf contains: +//! 1. Block number and parent block hash. +//! 2. Merkle Tree Root Hash of next BEEFY validator set. +//! 3. Merkle Tree Root Hash of current parachain heads state. +//! +//! and thanks to versioning can be easily updated in the future. + +use sp_runtime::traits::{Convert, Hash}; +use sp_std::prelude::*; + +use beefy_primitives::mmr::{BeefyNextAuthoritySet, MmrLeaf, MmrLeafVersion}; +use pallet_mmr::primitives::LeafDataProvider; + +use codec::Encode; +use frame_support::traits::Get; + +pub use pallet::*; + +#[cfg(test)] +mod mock; +#[cfg(test)] +mod tests; + +/// A BEEFY consensus digest item with MMR root hash. +pub struct DepositBeefyDigest(sp_std::marker::PhantomData); + +impl pallet_mmr::primitives::OnNewRoot for DepositBeefyDigest +where + T: pallet_mmr::Config, + T: pallet_beefy::Config, +{ + fn on_new_root(root: &::Hash) { + let digest = sp_runtime::generic::DigestItem::Consensus( + beefy_primitives::BEEFY_ENGINE_ID, + codec::Encode::encode(&beefy_primitives::ConsensusLog::< + ::BeefyId, + >::MmrRoot(*root)), + ); + >::deposit_log(digest); + } +} + +/// Convert BEEFY secp256k1 public keys into Ethereum addresses +pub struct BeefyEcdsaToEthereum; +impl Convert> for BeefyEcdsaToEthereum { + fn convert(a: beefy_primitives::crypto::AuthorityId) -> Vec { + use sp_core::crypto::Public; + let compressed_key = a.as_slice(); + + libsecp256k1::PublicKey::parse_slice( + compressed_key, + Some(libsecp256k1::PublicKeyFormat::Compressed), + ) + // uncompress the key + .map(|pub_key| pub_key.serialize().to_vec()) + // now convert to ETH address + .map(|uncompressed| sp_io::hashing::keccak_256(&uncompressed[1..])[12..].to_vec()) + .map_err(|_| { + log::error!(target: "runtime::beefy", "Invalid BEEFY PublicKey format!"); + }) + .unwrap_or_default() + } +} + +type MerkleRootOf = ::Hash; +type ParaId = u32; +type ParaHead = Vec; + +/// A type that is able to return current list of parachain heads that end up in the MMR leaf. +pub trait ParachainHeadsProvider { + /// Return a list of tuples containing a `ParaId` and Parachain Header data (ParaHead). + /// + /// The returned data does not have to be sorted. + fn parachain_heads() -> Vec<(ParaId, ParaHead)>; +} + +/// A default implementation for runtimes without parachains. +impl ParachainHeadsProvider for () { + fn parachain_heads() -> Vec<(ParaId, ParaHead)> { + Default::default() + } +} + +#[frame_support::pallet] +pub mod pallet { + #![allow(missing_docs)] + + use super::*; + use frame_support::pallet_prelude::*; + + /// BEEFY-MMR pallet. + #[pallet::pallet] + #[pallet::generate_store(pub(super) trait Store)] + pub struct Pallet(_); + + /// The module's configuration trait. + #[pallet::config] + #[pallet::disable_frame_system_supertrait_check] + pub trait Config: pallet_mmr::Config + pallet_beefy::Config { + /// Current leaf version. + /// + /// Specifies the version number added to every leaf that get's appended to the MMR. + /// Read more in [`MmrLeafVersion`] docs about versioning leaves. + type LeafVersion: Get; + + /// Convert BEEFY AuthorityId to a form that would end up in the Merkle Tree. + /// + /// For instance for ECDSA (secp256k1) we want to store uncompressed public keys (65 bytes) + /// and later to Ethereum Addresses (160 bits) to simplify using them on Ethereum chain, + /// but the rest of the Substrate codebase is storing them compressed (33 bytes) for + /// efficiency reasons. + type BeefyAuthorityToMerkleLeaf: Convert<::BeefyId, Vec>; + + /// Retrieve a list of current parachain heads. + /// + /// The trait is implemented for `paras` module, but since not all chains might have + /// parachains, and we want to keep the MMR leaf structure uniform, it's possible to use + /// `()` as well to simply put dummy data to the leaf. + type ParachainHeads: ParachainHeadsProvider; + } + + /// Details of next BEEFY authority set. + /// + /// This storage entry is used as cache for calls to [`update_beefy_next_authority_set`]. + #[pallet::storage] + #[pallet::getter(fn beefy_next_authorities)] + pub type BeefyNextAuthorities = + StorageValue<_, BeefyNextAuthoritySet>, ValueQuery>; +} + +impl LeafDataProvider for Pallet +where + MerkleRootOf: From + Into, +{ + type LeafData = MmrLeaf< + ::BlockNumber, + ::Hash, + MerkleRootOf, + >; + + fn leaf_data() -> Self::LeafData { + MmrLeaf { + version: T::LeafVersion::get(), + parent_number_and_hash: frame_system::Pallet::::leaf_data(), + parachain_heads: Pallet::::parachain_heads_merkle_root(), + beefy_next_authority_set: Pallet::::update_beefy_next_authority_set(), + } + } +} + +impl beefy_merkle_tree::Hasher for Pallet +where + MerkleRootOf: Into, +{ + fn hash(data: &[u8]) -> beefy_merkle_tree::Hash { + ::Hashing::hash(data).into() + } +} + +impl Pallet +where + MerkleRootOf: From + Into, +{ + /// Returns latest root hash of a merkle tree constructed from all active parachain headers. + /// + /// The leafs are sorted by `ParaId` to allow more efficient lookups and non-existence proofs. + /// + /// NOTE this does not include parathreads - only parachains are part of the merkle tree. + /// + /// NOTE This is an initial and inefficient implementation, which re-constructs + /// the merkle tree every block. Instead we should update the merkle root in + /// [Self::on_initialize] call of this pallet and update the merkle tree efficiently (use + /// on-chain storage to persist inner nodes). + fn parachain_heads_merkle_root() -> MerkleRootOf { + let mut para_heads = T::ParachainHeads::parachain_heads(); + para_heads.sort(); + let para_heads = para_heads.into_iter().map(|pair| pair.encode()); + beefy_merkle_tree::merkle_root::(para_heads).into() + } + + /// Returns details of the next BEEFY authority set. + /// + /// Details contain authority set id, authority set length and a merkle root, + /// constructed from uncompressed secp256k1 public keys converted to Ethereum addresses + /// of the next BEEFY authority set. + /// + /// This function will use a storage-cached entry in case the set didn't change, or compute and + /// cache new one in case it did. + fn update_beefy_next_authority_set() -> BeefyNextAuthoritySet> { + let id = pallet_beefy::Pallet::::validator_set_id() + 1; + let current_next = Self::beefy_next_authorities(); + // avoid computing the merkle tree if validator set id didn't change. + if id == current_next.id { + return current_next + } + + let beefy_addresses = pallet_beefy::Pallet::::next_authorities() + .into_iter() + .map(T::BeefyAuthorityToMerkleLeaf::convert) + .collect::>(); + let len = beefy_addresses.len() as u32; + let root = beefy_merkle_tree::merkle_root::(beefy_addresses).into(); + let next_set = BeefyNextAuthoritySet { id, len, root }; + // cache the result + BeefyNextAuthorities::::put(&next_set); + next_set + } +} diff --git a/frame/beefy-mmr/src/mock.rs b/frame/beefy-mmr/src/mock.rs new file mode 100644 index 0000000000000..a8d136b192aec --- /dev/null +++ b/frame/beefy-mmr/src/mock.rs @@ -0,0 +1,205 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use std::vec; + +use beefy_primitives::mmr::MmrLeafVersion; +use frame_support::{ + construct_runtime, parameter_types, sp_io::TestExternalities, BasicExternalities, +}; +use sp_core::{Hasher, H256}; +use sp_runtime::{ + app_crypto::ecdsa::Public, + impl_opaque_keys, + testing::Header, + traits::{BlakeTwo256, ConvertInto, IdentityLookup, Keccak256, OpaqueKeys}, + Perbill, +}; + +use crate as pallet_beefy_mmr; + +pub use beefy_primitives::{crypto::AuthorityId as BeefyId, ConsensusLog, BEEFY_ENGINE_ID}; + +impl_opaque_keys! { + pub struct MockSessionKeys { + pub dummy: pallet_beefy::Pallet, + } +} + +type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; +type Block = frame_system::mocking::MockBlock; + +construct_runtime!( + pub enum Test where + Block = Block, + NodeBlock = Block, + UncheckedExtrinsic = UncheckedExtrinsic, + { + System: frame_system::{Pallet, Call, Config, Storage, Event}, + Session: pallet_session::{Pallet, Call, Storage, Event, Config}, + Mmr: pallet_mmr::{Pallet, Storage}, + Beefy: pallet_beefy::{Pallet, Config, Storage}, + BeefyMmr: pallet_beefy_mmr::{Pallet, Storage}, + } +); + +parameter_types! { + pub const BlockHashCount: u64 = 250; + pub const SS58Prefix: u8 = 42; +} + +impl frame_system::Config for Test { + type BaseCallFilter = frame_support::traits::Everything; + type BlockWeights = (); + type BlockLength = (); + type DbWeight = (); + type Origin = Origin; + type Index = u64; + type BlockNumber = u64; + type Hash = H256; + type Call = Call; + type Hashing = BlakeTwo256; + type AccountId = u64; + type Lookup = IdentityLookup; + type Header = Header; + type Event = Event; + type BlockHashCount = BlockHashCount; + type Version = (); + type PalletInfo = PalletInfo; + type AccountData = (); + type OnNewAccount = (); + type OnKilledAccount = (); + type SystemWeightInfo = (); + type SS58Prefix = SS58Prefix; + type OnSetCode = (); +} + +parameter_types! { + pub const Period: u64 = 1; + pub const Offset: u64 = 0; + pub const DisabledValidatorsThreshold: Perbill = Perbill::from_percent(33); +} + +impl pallet_session::Config for Test { + type Event = Event; + type ValidatorId = u64; + type ValidatorIdOf = ConvertInto; + type ShouldEndSession = pallet_session::PeriodicSessions; + type NextSessionRotation = pallet_session::PeriodicSessions; + type SessionManager = MockSessionManager; + type SessionHandler = ::KeyTypeIdProviders; + type Keys = MockSessionKeys; + type DisabledValidatorsThreshold = DisabledValidatorsThreshold; + type WeightInfo = (); +} + +pub type MmrLeaf = beefy_primitives::mmr::MmrLeaf< + ::BlockNumber, + ::Hash, + ::Hash, +>; + +impl pallet_mmr::Config for Test { + const INDEXING_PREFIX: &'static [u8] = b"mmr"; + + type Hashing = Keccak256; + + type Hash = ::Out; + + type LeafData = BeefyMmr; + + type OnNewRoot = pallet_beefy_mmr::DepositBeefyDigest; + + type WeightInfo = (); +} + +impl pallet_beefy::Config for Test { + type BeefyId = BeefyId; +} + +parameter_types! { + pub LeafVersion: MmrLeafVersion = MmrLeafVersion::new(1, 5); +} + +impl pallet_beefy_mmr::Config for Test { + type LeafVersion = LeafVersion; + + type BeefyAuthorityToMerkleLeaf = pallet_beefy_mmr::BeefyEcdsaToEthereum; + + type ParachainHeads = DummyParaHeads; +} + +pub struct DummyParaHeads; +impl pallet_beefy_mmr::ParachainHeadsProvider for DummyParaHeads { + fn parachain_heads() -> Vec<(pallet_beefy_mmr::ParaId, pallet_beefy_mmr::ParaHead)> { + vec![(15, vec![1, 2, 3]), (5, vec![4, 5, 6])] + } +} + +pub struct MockSessionManager; +impl pallet_session::SessionManager for MockSessionManager { + fn end_session(_: sp_staking::SessionIndex) {} + fn start_session(_: sp_staking::SessionIndex) {} + fn new_session(idx: sp_staking::SessionIndex) -> Option> { + if idx == 0 || idx == 1 { + Some(vec![1, 2]) + } else if idx == 2 { + Some(vec![3, 4]) + } else { + None + } + } +} + +// Note, that we can't use `UintAuthorityId` here. Reason is that the implementation +// of `to_public_key()` assumes, that a public key is 32 bytes long. This is true for +// ed25519 and sr25519 but *not* for ecdsa. An ecdsa public key is 33 bytes. +pub fn mock_beefy_id(id: u8) -> BeefyId { + let buf: [u8; 33] = [id; 33]; + let pk = Public::from_raw(buf); + BeefyId::from(pk) +} + +pub fn mock_authorities(vec: Vec) -> Vec<(u64, BeefyId)> { + vec.into_iter().map(|id| ((id as u64), mock_beefy_id(id))).collect() +} + +pub fn new_test_ext(ids: Vec) -> TestExternalities { + new_test_ext_raw_authorities(mock_authorities(ids)) +} + +pub fn new_test_ext_raw_authorities(authorities: Vec<(u64, BeefyId)>) -> TestExternalities { + let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); + + let session_keys: Vec<_> = authorities + .iter() + .enumerate() + .map(|(_, id)| (id.0 as u64, id.0 as u64, MockSessionKeys { dummy: id.1.clone() })) + .collect(); + + BasicExternalities::execute_with_storage(&mut t, || { + for (ref id, ..) in &session_keys { + frame_system::Pallet::::inc_providers(id); + } + }); + + pallet_session::GenesisConfig:: { keys: session_keys } + .assimilate_storage(&mut t) + .unwrap(); + + t.into() +} diff --git a/frame/beefy-mmr/src/tests.rs b/frame/beefy-mmr/src/tests.rs new file mode 100644 index 0000000000000..7c70766623b4d --- /dev/null +++ b/frame/beefy-mmr/src/tests.rs @@ -0,0 +1,148 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use std::vec; + +use beefy_primitives::{ + mmr::{BeefyNextAuthoritySet, MmrLeafVersion}, + ValidatorSet, +}; +use codec::{Decode, Encode}; +use hex_literal::hex; + +use sp_core::H256; +use sp_io::TestExternalities; +use sp_runtime::{traits::Keccak256, DigestItem}; + +use frame_support::traits::OnInitialize; + +use crate::mock::*; + +fn init_block(block: u64) { + System::set_block_number(block); + Session::on_initialize(block); + Mmr::on_initialize(block); + Beefy::on_initialize(block); + BeefyMmr::on_initialize(block); +} + +pub fn beefy_log(log: ConsensusLog) -> DigestItem { + DigestItem::Consensus(BEEFY_ENGINE_ID, log.encode()) +} + +fn offchain_key(pos: usize) -> Vec { + (::INDEXING_PREFIX, pos as u64).encode() +} + +fn read_mmr_leaf(ext: &mut TestExternalities, index: usize) -> MmrLeaf { + type Node = pallet_mmr_primitives::DataOrHash; + ext.persist_offchain_overlay(); + let offchain_db = ext.offchain_db(); + offchain_db + .get(&offchain_key(index)) + .map(|d| Node::decode(&mut &*d).unwrap()) + .map(|n| match n { + Node::Data(d) => d, + _ => panic!("Unexpected MMR node."), + }) + .unwrap() +} + +#[test] +fn should_contain_mmr_digest() { + let mut ext = new_test_ext(vec![1, 2, 3, 4]); + ext.execute_with(|| { + init_block(1); + + assert_eq!( + System::digest().logs, + vec![beefy_log(ConsensusLog::MmrRoot( + hex!("f3e3afbfa69e89cd1e99f8d3570155962f3346d1d8758dc079be49ef70387758").into() + ))] + ); + + // unique every time + init_block(2); + + assert_eq!( + System::digest().logs, + vec![ + beefy_log(ConsensusLog::MmrRoot( + hex!("f3e3afbfa69e89cd1e99f8d3570155962f3346d1d8758dc079be49ef70387758").into() + )), + beefy_log(ConsensusLog::AuthoritiesChange(ValidatorSet { + validators: vec![mock_beefy_id(3), mock_beefy_id(4),], + id: 1, + })), + beefy_log(ConsensusLog::MmrRoot( + hex!("7d4ae4524bae75d52b63f08eab173b0c263eb95ae2c55c3a1d871241bd0cc559").into() + )), + ] + ); + }); +} + +#[test] +fn should_contain_valid_leaf_data() { + let mut ext = new_test_ext(vec![1, 2, 3, 4]); + ext.execute_with(|| { + init_block(1); + }); + + let mmr_leaf = read_mmr_leaf(&mut ext, 0); + assert_eq!( + mmr_leaf, + MmrLeaf { + version: MmrLeafVersion::new(1, 5), + parent_number_and_hash: (0_u64, H256::repeat_byte(0x45)), + beefy_next_authority_set: BeefyNextAuthoritySet { + id: 1, + len: 2, + root: hex!("01b1a742589773fc054c8f5021a456316ffcec0370b25678b0696e116d1ef9ae") + .into(), + }, + parachain_heads: hex!( + "ed893c8f8cc87195a5d4d2805b011506322036bcace79642aa3e94ab431e442e" + ) + .into(), + } + ); + + // build second block on top + ext.execute_with(|| { + init_block(2); + }); + + let mmr_leaf = read_mmr_leaf(&mut ext, 1); + assert_eq!( + mmr_leaf, + MmrLeaf { + version: MmrLeafVersion::new(1, 5), + parent_number_and_hash: (1_u64, H256::repeat_byte(0x45)), + beefy_next_authority_set: BeefyNextAuthoritySet { + id: 2, + len: 2, + root: hex!("9c6b2c1b0d0b25a008e6c882cc7b415f309965c72ad2b944ac0931048ca31cd5") + .into(), + }, + parachain_heads: hex!( + "ed893c8f8cc87195a5d4d2805b011506322036bcace79642aa3e94ab431e442e" + ) + .into(), + } + ); +} diff --git a/frame/beefy/Cargo.toml b/frame/beefy/Cargo.toml new file mode 100644 index 0000000000000..e5af666e7ca54 --- /dev/null +++ b/frame/beefy/Cargo.toml @@ -0,0 +1,40 @@ +[package] +name = "pallet-beefy" +version = "4.0.0-dev" +authors = ["Parity Technologies "] +edition = "2018" +license = "Apache-2.0" + +[dependencies] +codec = { version = "2.2.0", package = "parity-scale-codec", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } +serde = { version = "1.0.130", optional = true } + +frame-support = { version = "4.0.0-dev", path = "../support", default-features = false } +frame-system = { version = "4.0.0-dev", path = "../system", default-features = false } + +sp-runtime = { version = "4.0.0-dev", path = "../../primitives/runtime", default-features = false } +sp-std = { version = "4.0.0-dev", path = "../../primitives/std", default-features = false } + +pallet-session = { version = "4.0.0-dev", path = "../session", default-features = false } + +beefy-primitives = { version = "4.0.0-dev", path = "../../primitives/beefy", default-features = false } + +[dev-dependencies] +sp-core = { version = "4.0.0-dev", path = "../../primitives/core" } +sp-io = { version = "4.0.0-dev", path = "../../primitives/io" } +sp-staking = { version = "4.0.0-dev", path = "../../primitives/staking" } + +[features] +default = ["std"] +std = [ + "codec/std", + "scale-info/std", + "serde", + "beefy-primitives/std", + "frame-support/std", + "frame-system/std", + "sp-runtime/std", + "sp-std/std", + "pallet-session/std", +] diff --git a/frame/beefy/src/lib.rs b/frame/beefy/src/lib.rs new file mode 100644 index 0000000000000..32f3133373432 --- /dev/null +++ b/frame/beefy/src/lib.rs @@ -0,0 +1,179 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#![cfg_attr(not(feature = "std"), no_std)] + +use codec::Encode; + +use frame_support::{traits::OneSessionHandler, Parameter}; + +use sp_runtime::{ + generic::DigestItem, + traits::{IsMember, Member}, + RuntimeAppPublic, +}; +use sp_std::prelude::*; + +use beefy_primitives::{AuthorityIndex, ConsensusLog, ValidatorSet, BEEFY_ENGINE_ID}; + +#[cfg(test)] +mod mock; + +#[cfg(test)] +mod tests; + +pub use pallet::*; + +#[frame_support::pallet] +pub mod pallet { + use super::*; + use frame_support::pallet_prelude::*; + use frame_system::pallet_prelude::*; + + #[pallet::config] + pub trait Config: frame_system::Config { + /// Authority identifier type + type BeefyId: Member + Parameter + RuntimeAppPublic + Default + MaybeSerializeDeserialize; + } + + #[pallet::pallet] + pub struct Pallet(PhantomData); + + #[pallet::hooks] + impl Hooks> for Pallet {} + + #[pallet::call] + impl Pallet {} + + /// The current authorities set + #[pallet::storage] + #[pallet::getter(fn authorities)] + pub(super) type Authorities = StorageValue<_, Vec, ValueQuery>; + + /// The current validator set id + #[pallet::storage] + #[pallet::getter(fn validator_set_id)] + pub(super) type ValidatorSetId = + StorageValue<_, beefy_primitives::ValidatorSetId, ValueQuery>; + + /// Authorities set scheduled to be used with the next session + #[pallet::storage] + #[pallet::getter(fn next_authorities)] + pub(super) type NextAuthorities = StorageValue<_, Vec, ValueQuery>; + + #[pallet::genesis_config] + pub struct GenesisConfig { + pub authorities: Vec, + } + + #[cfg(feature = "std")] + impl Default for GenesisConfig { + fn default() -> Self { + Self { authorities: Vec::new() } + } + } + + #[pallet::genesis_build] + impl GenesisBuild for GenesisConfig { + fn build(&self) { + Pallet::::initialize_authorities(&self.authorities); + } + } +} + +impl Pallet { + /// Return the current active BEEFY validator set. + pub fn validator_set() -> ValidatorSet { + ValidatorSet:: { validators: Self::authorities(), id: Self::validator_set_id() } + } + + fn change_authorities(new: Vec, queued: Vec) { + // As in GRANDPA, we trigger a validator set change only if the the validator + // set has actually changed. + if new != Self::authorities() { + >::put(&new); + + let next_id = Self::validator_set_id() + 1u64; + >::put(next_id); + + let log: DigestItem = DigestItem::Consensus( + BEEFY_ENGINE_ID, + ConsensusLog::AuthoritiesChange(ValidatorSet { validators: new, id: next_id }) + .encode(), + ); + >::deposit_log(log); + } + + >::put(&queued); + } + + fn initialize_authorities(authorities: &[T::BeefyId]) { + if authorities.is_empty() { + return + } + + assert!(>::get().is_empty(), "Authorities are already initialized!"); + + >::put(authorities); + >::put(0); + // Like `pallet_session`, initialize the next validator set as well. + >::put(authorities); + } +} + +impl sp_runtime::BoundToRuntimeAppPublic for Pallet { + type Public = T::BeefyId; +} + +impl OneSessionHandler for Pallet { + type Key = T::BeefyId; + + fn on_genesis_session<'a, I: 'a>(validators: I) + where + I: Iterator, + { + let authorities = validators.map(|(_, k)| k).collect::>(); + Self::initialize_authorities(&authorities); + } + + fn on_new_session<'a, I: 'a>(changed: bool, validators: I, queued_validators: I) + where + I: Iterator, + { + if changed { + let next_authorities = validators.map(|(_, k)| k).collect::>(); + let next_queued_authorities = queued_validators.map(|(_, k)| k).collect::>(); + + Self::change_authorities(next_authorities, next_queued_authorities); + } + } + + fn on_disabled(i: usize) { + let log: DigestItem = DigestItem::Consensus( + BEEFY_ENGINE_ID, + ConsensusLog::::OnDisabled(i as AuthorityIndex).encode(), + ); + + >::deposit_log(log); + } +} + +impl IsMember for Pallet { + fn is_member(authority_id: &T::BeefyId) -> bool { + Self::authorities().iter().any(|id| id == authority_id) + } +} diff --git a/frame/beefy/src/mock.rs b/frame/beefy/src/mock.rs new file mode 100644 index 0000000000000..696d0d972e70c --- /dev/null +++ b/frame/beefy/src/mock.rs @@ -0,0 +1,164 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use std::vec; + +use frame_support::{ + construct_runtime, parameter_types, sp_io::TestExternalities, BasicExternalities, +}; +use sp_core::H256; +use sp_runtime::{ + app_crypto::ecdsa::Public, + impl_opaque_keys, + testing::Header, + traits::{BlakeTwo256, ConvertInto, IdentityLookup, OpaqueKeys}, + Perbill, +}; + +use crate as pallet_beefy; + +pub use beefy_primitives::{crypto::AuthorityId as BeefyId, ConsensusLog, BEEFY_ENGINE_ID}; + +impl_opaque_keys! { + pub struct MockSessionKeys { + pub dummy: pallet_beefy::Pallet, + } +} + +type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; +type Block = frame_system::mocking::MockBlock; + +construct_runtime!( + pub enum Test where + Block = Block, + NodeBlock = Block, + UncheckedExtrinsic = UncheckedExtrinsic, + { + System: frame_system::{Pallet, Call, Config, Storage, Event}, + Beefy: pallet_beefy::{Pallet, Call, Config, Storage}, + Session: pallet_session::{Pallet, Call, Storage, Event, Config}, + } +); + +parameter_types! { + pub const BlockHashCount: u64 = 250; + pub const SS58Prefix: u8 = 42; +} + +impl frame_system::Config for Test { + type BaseCallFilter = frame_support::traits::Everything; + type BlockWeights = (); + type BlockLength = (); + type DbWeight = (); + type Origin = Origin; + type Index = u64; + type BlockNumber = u64; + type Hash = H256; + type Call = Call; + type Hashing = BlakeTwo256; + type AccountId = u64; + type Lookup = IdentityLookup; + type Header = Header; + type Event = Event; + type BlockHashCount = BlockHashCount; + type Version = (); + type PalletInfo = PalletInfo; + type AccountData = (); + type OnNewAccount = (); + type OnKilledAccount = (); + type SystemWeightInfo = (); + type SS58Prefix = SS58Prefix; + type OnSetCode = (); +} + +impl pallet_beefy::Config for Test { + type BeefyId = BeefyId; +} + +parameter_types! { + pub const Period: u64 = 1; + pub const Offset: u64 = 0; + pub const DisabledValidatorsThreshold: Perbill = Perbill::from_percent(33); +} + +impl pallet_session::Config for Test { + type Event = Event; + type ValidatorId = u64; + type ValidatorIdOf = ConvertInto; + type ShouldEndSession = pallet_session::PeriodicSessions; + type NextSessionRotation = pallet_session::PeriodicSessions; + type SessionManager = MockSessionManager; + type SessionHandler = ::KeyTypeIdProviders; + type Keys = MockSessionKeys; + type DisabledValidatorsThreshold = DisabledValidatorsThreshold; + type WeightInfo = (); +} + +pub struct MockSessionManager; + +impl pallet_session::SessionManager for MockSessionManager { + fn end_session(_: sp_staking::SessionIndex) {} + fn start_session(_: sp_staking::SessionIndex) {} + fn new_session(idx: sp_staking::SessionIndex) -> Option> { + if idx == 0 || idx == 1 { + Some(vec![1, 2]) + } else if idx == 2 { + Some(vec![3, 4]) + } else { + None + } + } +} + +// Note, that we can't use `UintAuthorityId` here. Reason is that the implementation +// of `to_public_key()` assumes, that a public key is 32 bytes long. This is true for +// ed25519 and sr25519 but *not* for ecdsa. An ecdsa public key is 33 bytes. +pub fn mock_beefy_id(id: u8) -> BeefyId { + let buf: [u8; 33] = [id; 33]; + let pk = Public::from_raw(buf); + BeefyId::from(pk) +} + +pub fn mock_authorities(vec: Vec) -> Vec<(u64, BeefyId)> { + vec.into_iter().map(|id| ((id as u64), mock_beefy_id(id))).collect() +} + +pub fn new_test_ext(ids: Vec) -> TestExternalities { + new_test_ext_raw_authorities(mock_authorities(ids)) +} + +pub fn new_test_ext_raw_authorities(authorities: Vec<(u64, BeefyId)>) -> TestExternalities { + let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); + + let session_keys: Vec<_> = authorities + .iter() + .enumerate() + .map(|(_, id)| (id.0 as u64, id.0 as u64, MockSessionKeys { dummy: id.1.clone() })) + .collect(); + + BasicExternalities::execute_with_storage(&mut t, || { + for (ref id, ..) in &session_keys { + frame_system::Pallet::::inc_providers(id); + } + }); + + pallet_session::GenesisConfig:: { keys: session_keys } + .assimilate_storage(&mut t) + .unwrap(); + + t.into() +} diff --git a/frame/beefy/src/tests.rs b/frame/beefy/src/tests.rs new file mode 100644 index 0000000000000..24f9acaf76bfc --- /dev/null +++ b/frame/beefy/src/tests.rs @@ -0,0 +1,142 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use std::vec; + +use beefy_primitives::ValidatorSet; +use codec::Encode; + +use sp_core::H256; +use sp_runtime::DigestItem; + +use frame_support::traits::OnInitialize; + +use crate::mock::*; + +fn init_block(block: u64) { + System::set_block_number(block); + Session::on_initialize(block); +} + +pub fn beefy_log(log: ConsensusLog) -> DigestItem { + DigestItem::Consensus(BEEFY_ENGINE_ID, log.encode()) +} + +#[test] +fn genesis_session_initializes_authorities() { + let want = vec![mock_beefy_id(1), mock_beefy_id(2), mock_beefy_id(3), mock_beefy_id(4)]; + + new_test_ext(vec![1, 2, 3, 4]).execute_with(|| { + let authorities = Beefy::authorities(); + + assert!(authorities.len() == 2); + assert_eq!(want[0], authorities[0]); + assert_eq!(want[1], authorities[1]); + + assert!(Beefy::validator_set_id() == 0); + + let next_authorities = Beefy::next_authorities(); + + assert!(next_authorities.len() == 2); + assert_eq!(want[0], next_authorities[0]); + assert_eq!(want[1], next_authorities[1]); + }); +} + +#[test] +fn session_change_updates_authorities() { + new_test_ext(vec![1, 2, 3, 4]).execute_with(|| { + init_block(1); + + assert!(0 == Beefy::validator_set_id()); + + // no change - no log + assert!(System::digest().logs.is_empty()); + + init_block(2); + + assert!(1 == Beefy::validator_set_id()); + + let want = beefy_log(ConsensusLog::AuthoritiesChange(ValidatorSet { + validators: vec![mock_beefy_id(3), mock_beefy_id(4)], + id: 1, + })); + + let log = System::digest().logs[0].clone(); + + assert_eq!(want, log); + }); +} + +#[test] +fn session_change_updates_next_authorities() { + let want = vec![mock_beefy_id(1), mock_beefy_id(2), mock_beefy_id(3), mock_beefy_id(4)]; + + new_test_ext(vec![1, 2, 3, 4]).execute_with(|| { + init_block(1); + + let next_authorities = Beefy::next_authorities(); + + assert!(next_authorities.len() == 2); + assert_eq!(want[0], next_authorities[0]); + assert_eq!(want[1], next_authorities[1]); + + init_block(2); + + let next_authorities = Beefy::next_authorities(); + + assert!(next_authorities.len() == 2); + assert_eq!(want[2], next_authorities[0]); + assert_eq!(want[3], next_authorities[1]); + }); +} + +#[test] +fn validator_set_at_genesis() { + let want = vec![mock_beefy_id(1), mock_beefy_id(2)]; + + new_test_ext(vec![1, 2, 3, 4]).execute_with(|| { + let vs = Beefy::validator_set(); + + assert_eq!(vs.id, 0u64); + assert_eq!(vs.validators[0], want[0]); + assert_eq!(vs.validators[1], want[1]); + }); +} + +#[test] +fn validator_set_updates_work() { + let want = vec![mock_beefy_id(1), mock_beefy_id(2), mock_beefy_id(3), mock_beefy_id(4)]; + + new_test_ext(vec![1, 2, 3, 4]).execute_with(|| { + init_block(1); + + let vs = Beefy::validator_set(); + + assert_eq!(vs.id, 0u64); + assert_eq!(want[0], vs.validators[0]); + assert_eq!(want[1], vs.validators[1]); + + init_block(2); + + let vs = Beefy::validator_set(); + + assert_eq!(vs.id, 1u64); + assert_eq!(want[2], vs.validators[0]); + assert_eq!(want[3], vs.validators[1]); + }); +} diff --git a/primitives/beefy/Cargo.toml b/primitives/beefy/Cargo.toml new file mode 100644 index 0000000000000..633ac0e8fbcd1 --- /dev/null +++ b/primitives/beefy/Cargo.toml @@ -0,0 +1,33 @@ +[package] +name = "beefy-primitives" +version = "4.0.0-dev" +authors = ["Parity Technologies "] +edition = "2018" +license = "Apache-2.0" + +[dependencies] +codec = { version = "2.2.0", package = "parity-scale-codec", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } + +sp-api = { version = "4.0.0-dev", path = "../api", default-features = false } +sp-application-crypto = { version = "4.0.0-dev", path = "../application-crypto", default-features = false } +sp-core = { version = "4.0.0-dev", path = "../core", default-features = false } +sp-runtime = { version = "4.0.0-dev", path = "../runtime", default-features = false } +sp-std = { version = "4.0.0-dev", path = "../std", default-features = false } + +[dev-dependencies] +hex-literal = "0.3" + +sp-keystore = { version = "0.10.0-dev", path = "../keystore" } + +[features] +default = ["std"] +std = [ + "codec/std", + "scale-info/std", + "sp-api/std", + "sp-application-crypto/std", + "sp-core/std", + "sp-runtime/std", + "sp-std/std", +] diff --git a/primitives/beefy/src/commitment.rs b/primitives/beefy/src/commitment.rs new file mode 100644 index 0000000000000..7aab93bbcb973 --- /dev/null +++ b/primitives/beefy/src/commitment.rs @@ -0,0 +1,264 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use sp_std::{cmp, prelude::*}; + +use crate::{crypto::Signature, ValidatorSetId}; + +/// A commitment signed by GRANDPA validators as part of BEEFY protocol. +/// +/// The commitment contains a [payload] extracted from the finalized block at height [block_number]. +/// GRANDPA validators collect signatures on commitments and a stream of such signed commitments +/// (see [SignedCommitment]) forms the BEEFY protocol. +#[derive(Clone, Debug, PartialEq, Eq, codec::Encode, codec::Decode)] +pub struct Commitment { + /// The payload being signed. + /// + /// This should be some form of cumulative representation of the chain (think MMR root hash). + /// The payload should also contain some details that allow the light client to verify next + /// validator set. The protocol does not enforce any particular format of this data, + /// nor how often it should be present in commitments, however the light client has to be + /// provided with full validator set whenever it performs the transition (i.e. importing first + /// block with [validator_set_id] incremented). + pub payload: TPayload, + + /// Finalized block number this commitment is for. + /// + /// GRANDPA validators agree on a block they create a commitment for and start collecting + /// signatures. This process is called a round. + /// There might be multiple rounds in progress (depending on the block choice rule), however + /// since the payload is supposed to be cumulative, it is not required to import all + /// commitments. + /// BEEFY light client is expected to import at least one commitment per epoch, + /// but is free to import as many as it requires. + pub block_number: TBlockNumber, + + /// BEEFY validator set supposed to sign this commitment. + /// + /// Validator set is changing once per epoch. The Light Client must be provided by details + /// about the validator set whenever it's importing first commitment with a new + /// `validator_set_id`. Validator set data MUST be verifiable, for instance using [payload] + /// information. + pub validator_set_id: ValidatorSetId, +} + +impl cmp::PartialOrd for Commitment +where + TBlockNumber: cmp::Ord, + TPayload: cmp::Eq, +{ + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl cmp::Ord for Commitment +where + TBlockNumber: cmp::Ord, + TPayload: cmp::Eq, +{ + fn cmp(&self, other: &Self) -> cmp::Ordering { + self.validator_set_id + .cmp(&other.validator_set_id) + .then_with(|| self.block_number.cmp(&other.block_number)) + } +} + +/// A commitment with matching GRANDPA validators' signatures. +#[derive(Clone, Debug, PartialEq, Eq, codec::Encode, codec::Decode)] +pub struct SignedCommitment { + /// The commitment signatures are collected for. + pub commitment: Commitment, + /// GRANDPA validators' signatures for the commitment. + /// + /// The length of this `Vec` must match number of validators in the current set (see + /// [Commitment::validator_set_id]). + pub signatures: Vec>, +} + +impl SignedCommitment { + /// Return the number of collected signatures. + pub fn no_of_signatures(&self) -> usize { + self.signatures.iter().filter(|x| x.is_some()).count() + } +} + +/// A [SignedCommitment] with a version number. This variant will be appended +/// to the block justifications for the block for which the signed commitment +/// has been generated. +#[derive(Clone, Debug, PartialEq, codec::Encode, codec::Decode)] +pub enum VersionedCommitment { + #[codec(index = 1)] + /// Current active version + V1(SignedCommitment), +} + +#[cfg(test)] +mod tests { + + use sp_core::{keccak_256, Pair}; + use sp_keystore::{testing::KeyStore, SyncCryptoStore, SyncCryptoStorePtr}; + + use super::*; + use codec::Decode; + + use crate::{crypto, KEY_TYPE}; + + type TestCommitment = Commitment; + type TestSignedCommitment = SignedCommitment; + type TestVersionedCommitment = VersionedCommitment; + + // The mock signatures are equivalent to the ones produced by the BEEFY keystore + fn mock_signatures() -> (crypto::Signature, crypto::Signature) { + let store: SyncCryptoStorePtr = KeyStore::new().into(); + + let alice = sp_core::ecdsa::Pair::from_string("//Alice", None).unwrap(); + let _ = + SyncCryptoStore::insert_unknown(&*store, KEY_TYPE, "//Alice", alice.public().as_ref()) + .unwrap(); + + let msg = keccak_256(b"This is the first message"); + let sig1 = SyncCryptoStore::ecdsa_sign_prehashed(&*store, KEY_TYPE, &alice.public(), &msg) + .unwrap() + .unwrap(); + + let msg = keccak_256(b"This is the second message"); + let sig2 = SyncCryptoStore::ecdsa_sign_prehashed(&*store, KEY_TYPE, &alice.public(), &msg) + .unwrap() + .unwrap(); + + (sig1.into(), sig2.into()) + } + + #[test] + fn commitment_encode_decode() { + // given + let commitment: TestCommitment = + Commitment { payload: "Hello World!".into(), block_number: 5, validator_set_id: 0 }; + + // when + let encoded = codec::Encode::encode(&commitment); + let decoded = TestCommitment::decode(&mut &*encoded); + + // then + assert_eq!(decoded, Ok(commitment)); + assert_eq!( + encoded, + hex_literal::hex!( + "3048656c6c6f20576f726c6421050000000000000000000000000000000000000000000000" + ) + ); + } + + #[test] + fn signed_commitment_encode_decode() { + // given + let commitment: TestCommitment = + Commitment { payload: "Hello World!".into(), block_number: 5, validator_set_id: 0 }; + + let sigs = mock_signatures(); + + let signed = SignedCommitment { + commitment, + signatures: vec![None, None, Some(sigs.0), Some(sigs.1)], + }; + + // when + let encoded = codec::Encode::encode(&signed); + let decoded = TestSignedCommitment::decode(&mut &*encoded); + + // then + assert_eq!(decoded, Ok(signed)); + assert_eq!( + encoded, + hex_literal::hex!( + "3048656c6c6f20576f726c64210500000000000000000000000000000000000000000000001000 + 0001558455ad81279df0795cc985580e4fb75d72d948d1107b2ac80a09abed4da8480c746cc321f2319a5e99a830e314d + 10dd3cd68ce3dc0c33c86e99bcb7816f9ba01012d6e1f8105c337a86cdd9aaacdc496577f3db8c55ef9e6fd48f2c5c05a + 2274707491635d8ba3df64f324575b7b2a34487bca2324b6a0046395a71681be3d0c2a00" + ) + ); + } + + #[test] + fn signed_commitment_count_signatures() { + // given + let commitment: TestCommitment = + Commitment { payload: "Hello World!".into(), block_number: 5, validator_set_id: 0 }; + + let sigs = mock_signatures(); + + let mut signed = SignedCommitment { + commitment, + signatures: vec![None, None, Some(sigs.0), Some(sigs.1)], + }; + assert_eq!(signed.no_of_signatures(), 2); + + // when + signed.signatures[2] = None; + + // then + assert_eq!(signed.no_of_signatures(), 1); + } + + #[test] + fn commitment_ordering() { + fn commitment( + block_number: u128, + validator_set_id: crate::ValidatorSetId, + ) -> TestCommitment { + Commitment { payload: "Hello World!".into(), block_number, validator_set_id } + } + + // given + let a = commitment(1, 0); + let b = commitment(2, 1); + let c = commitment(10, 0); + let d = commitment(10, 1); + + // then + assert!(a < b); + assert!(a < c); + assert!(c < b); + assert!(c < d); + assert!(b < d); + } + + #[test] + fn versioned_commitment_encode_decode() { + let commitment: TestCommitment = + Commitment { payload: "Hello World!".into(), block_number: 5, validator_set_id: 0 }; + + let sigs = mock_signatures(); + + let signed = SignedCommitment { + commitment, + signatures: vec![None, None, Some(sigs.0), Some(sigs.1)], + }; + + let versioned = TestVersionedCommitment::V1(signed.clone()); + + let encoded = codec::Encode::encode(&versioned); + + assert_eq!(1, encoded[0]); + assert_eq!(encoded[1..], codec::Encode::encode(&signed)); + + let decoded = TestVersionedCommitment::decode(&mut &*encoded); + + assert_eq!(decoded, Ok(versioned)); + } +} diff --git a/primitives/beefy/src/lib.rs b/primitives/beefy/src/lib.rs new file mode 100644 index 0000000000000..790b915ab98db --- /dev/null +++ b/primitives/beefy/src/lib.rs @@ -0,0 +1,137 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#![cfg_attr(not(feature = "std"), no_std)] +#![warn(missing_docs)] + +//! Primitives for BEEFY protocol. +//! +//! The crate contains shared data types used by BEEFY protocol and documentation (in a form of +//! code) for building a BEEFY light client. +//! +//! BEEFY is a gadget that runs alongside another finality gadget (for instance GRANDPA). +//! For simplicity (and the initially intended use case) the documentation says GRANDPA in places +//! where a more abstract "Finality Gadget" term could be used, but there is no reason why BEEFY +//! wouldn't run with some other finality scheme. +//! BEEFY validator set is supposed to be tracking the Finality Gadget validator set, but note that +//! it will use a different set of keys. For Polkadot use case we plan to use `secp256k1` for BEEFY, +//! while GRANDPA uses `ed25519`. + +mod commitment; +pub mod mmr; +pub mod witness; + +pub use commitment::{Commitment, SignedCommitment, VersionedCommitment}; + +use codec::{Codec, Decode, Encode}; +use scale_info::TypeInfo; +use sp_core::H256; +use sp_std::prelude::*; + +/// Key type for BEEFY module. +pub const KEY_TYPE: sp_application_crypto::KeyTypeId = sp_application_crypto::KeyTypeId(*b"beef"); + +/// BEEFY cryptographic types +/// +/// This module basically introduces three crypto types: +/// - `crypto::Pair` +/// - `crypto::Public` +/// - `crypto::Signature` +/// +/// Your code should use the above types as concrete types for all crypto related +/// functionality. +/// +/// The current underlying crypto scheme used is ECDSA. This can be changed, +/// without affecting code restricted against the above listed crypto types. +pub mod crypto { + use sp_application_crypto::{app_crypto, ecdsa}; + app_crypto!(ecdsa, crate::KEY_TYPE); + + /// Identity of a BEEFY authority using ECDSA as its crypto. + pub type AuthorityId = Public; + + /// Signature for a BEEFY authority using ECDSA as its crypto. + pub type AuthoritySignature = Signature; +} + +/// The `ConsensusEngineId` of BEEFY. +pub const BEEFY_ENGINE_ID: sp_runtime::ConsensusEngineId = *b"BEEF"; + +/// Authority set id starts with zero at genesis +pub const GENESIS_AUTHORITY_SET_ID: u64 = 0; + +/// A typedef for validator set id. +pub type ValidatorSetId = u64; + +/// A set of BEEFY authorities, a.k.a. validators. +#[derive(Decode, Encode, Debug, PartialEq, Clone, TypeInfo)] +pub struct ValidatorSet { + /// Public keys of the validator set elements + pub validators: Vec, + /// Identifier of the validator set + pub id: ValidatorSetId, +} + +impl ValidatorSet { + /// Return an empty validator set with id of 0. + pub fn empty() -> Self { + Self { validators: Default::default(), id: Default::default() } + } +} + +/// The index of an authority. +pub type AuthorityIndex = u32; + +/// The type used to represent an MMR root hash. +pub type MmrRootHash = H256; + +/// A consensus log item for BEEFY. +#[derive(Decode, Encode, TypeInfo)] +pub enum ConsensusLog { + /// The authorities have changed. + #[codec(index = 1)] + AuthoritiesChange(ValidatorSet), + /// Disable the authority with given index. + #[codec(index = 2)] + OnDisabled(AuthorityIndex), + /// MMR root hash. + #[codec(index = 3)] + MmrRoot(MmrRootHash), +} + +/// BEEFY vote message. +/// +/// A vote message is a direct vote created by a BEEFY node on every voting round +/// and is gossiped to its peers. +#[derive(Debug, Decode, Encode, TypeInfo)] +pub struct VoteMessage { + /// Commit to information extracted from a finalized block + pub commitment: Commitment, + /// Node authority id + pub id: Id, + /// Node signature + pub signature: Signature, +} + +sp_api::decl_runtime_apis! { + /// API necessary for BEEFY voters. + pub trait BeefyApi + { + /// Return the current active BEEFY validator set + fn validator_set() -> ValidatorSet; + } +} diff --git a/primitives/beefy/src/mmr.rs b/primitives/beefy/src/mmr.rs new file mode 100644 index 0000000000000..e428c0ea01215 --- /dev/null +++ b/primitives/beefy/src/mmr.rs @@ -0,0 +1,132 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! BEEFY + MMR utilties. +//! +//! While BEEFY can be used completely indepentently as an additional consensus gadget, +//! it is designed around a main use case of making bridging standalone networks together. +//! For that use case it's common to use some aggregated data structure (like MMR) to be +//! used in conjunction with BEEFY, to be able to efficiently prove any past blockchain data. +//! +//! This module contains primitives used by Polkadot implementation of the BEEFY+MMR bridge, +//! but we imagine they will be useful for other chains that either want to bridge with Polkadot +//! or are completely standalone, but heavily inspired by Polkadot. + +use codec::{Decode, Encode}; +use scale_info::TypeInfo; + +/// A standard leaf that gets added every block to the MMR constructed by Substrate's `pallet_mmr`. +#[derive(Debug, PartialEq, Eq, Clone, Encode, Decode)] +pub struct MmrLeaf { + /// Version of the leaf format. + /// + /// Can be used to enable future format migrations and compatibility. + /// See [`MmrLeafVersion`] documentation for details. + pub version: MmrLeafVersion, + /// Current block parent number and hash. + pub parent_number_and_hash: (BlockNumber, Hash), + /// A merkle root of the next BEEFY authority set. + pub beefy_next_authority_set: BeefyNextAuthoritySet, + /// A merkle root of all registered parachain heads. + pub parachain_heads: MerkleRoot, +} + +/// A MMR leaf versioning scheme. +/// +/// Version is a single byte that constist of two components: +/// - `major` - 3 bits +/// - `minor` - 5 bits +/// +/// Any change in encoding that adds new items to the structure is considered non-breaking, hence +/// only requires an update of `minor` version. Any backward incompatible change (i.e. decoding to a +/// previous leaf format fails) should be indicated with `major` version bump. +/// +/// Given that adding new struct elements in SCALE is backward compatible (i.e. old format can be +/// still decoded, the new fields will simply be ignored). We expect the major version to be bumped +/// very rarely (hopefuly never). +#[derive(Debug, Default, PartialEq, Eq, Clone, Encode, Decode)] +pub struct MmrLeafVersion(u8); +impl MmrLeafVersion { + /// Create new version object from `major` and `minor` components. + /// + /// Panics if any of the component occupies more than 4 bits. + pub fn new(major: u8, minor: u8) -> Self { + if major > 0b111 || minor > 0b11111 { + panic!("Version components are too big."); + } + let version = (major << 5) + minor; + Self(version) + } + + /// Split the version into `major` and `minor` sub-components. + pub fn split(&self) -> (u8, u8) { + let major = self.0 >> 5; + let minor = self.0 & 0b11111; + (major, minor) + } +} + +/// Details of the next BEEFY authority set. +#[derive(Debug, Default, PartialEq, Eq, Clone, Encode, Decode, TypeInfo)] +pub struct BeefyNextAuthoritySet { + /// Id of the next set. + /// + /// Id is required to correlate BEEFY signed commitments with the validator set. + /// Light Client can easily verify that the commitment witness it is getting is + /// produced by the latest validator set. + pub id: crate::ValidatorSetId, + /// Number of validators in the set. + /// + /// Some BEEFY Light Clients may use an interactive protocol to verify only subset + /// of signatures. We put set length here, so that these clients can verify the minimal + /// number of required signatures. + pub len: u32, + /// Merkle Root Hash build from BEEFY AuthorityIds. + /// + /// This is used by Light Clients to confirm that the commitments are signed by the correct + /// validator set. Light Clients using interactive protocol, might verify only subset of + /// signatures, hence don't require the full list here (will receive inclusion proofs). + pub root: MerkleRoot, +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn should_construct_version_correctly() { + let tests = vec![(0, 0, 0b00000000), (7, 2, 0b11100010), (7, 31, 0b11111111)]; + + for (major, minor, version) in tests { + let v = MmrLeafVersion::new(major, minor); + assert_eq!(v.encode(), vec![version], "Encoding does not match."); + assert_eq!(v.split(), (major, minor)); + } + } + + #[test] + #[should_panic] + fn should_panic_if_major_too_large() { + MmrLeafVersion::new(8, 0); + } + + #[test] + #[should_panic] + fn should_panic_if_minor_too_large() { + MmrLeafVersion::new(0, 32); + } +} diff --git a/primitives/beefy/src/witness.rs b/primitives/beefy/src/witness.rs new file mode 100644 index 0000000000000..c28a464e72df5 --- /dev/null +++ b/primitives/beefy/src/witness.rs @@ -0,0 +1,162 @@ +// This file is part of Substrate. + +// Copyright (C) 2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Primitives for light, 2-phase interactive verification protocol. +//! +//! Instead of submitting full list of signatures, it's possible to submit first a witness +//! form of [SignedCommitment]. +//! This can later be verified by the client requesting only some (out of all) signatures for +//! verification. This allows lowering the data and computation cost of verifying the +//! signed commitment. + +use sp_std::prelude::*; + +use crate::{ + commitment::{Commitment, SignedCommitment}, + crypto::Signature, +}; + +/// A light form of [SignedCommitment]. +/// +/// This is a light ("witness") form of the signed commitment. Instead of containing full list of +/// signatures, which might be heavy and expensive to verify, it only contains a bit vector of +/// validators which signed the original [SignedCommitment] and a merkle root of all signatures. +/// +/// This can be used by light clients for 2-phase interactive verification (for instance for +/// Ethereum Mainnet), in a commit-reveal like scheme, where first we submit only the signed +/// commitment witness and later on, the client picks only some signatures to verify at random. +#[derive(Debug, PartialEq, Eq, codec::Encode, codec::Decode)] +pub struct SignedCommitmentWitness { + /// The full content of the commitment. + pub commitment: Commitment, + + /// The bit vector of validators who signed the commitment. + pub signed_by: Vec, // TODO [ToDr] Consider replacing with bitvec crate + + /// A merkle root of signatures in the original signed commitment. + pub signatures_merkle_root: TMerkleRoot, +} + +impl + SignedCommitmentWitness +{ + /// Convert [SignedCommitment] into [SignedCommitmentWitness]. + /// + /// This takes a [SignedCommitment], which contains full signatures + /// and converts it into a witness form, which does not contain full signatures, + /// only a bit vector indicating which validators have signed the original [SignedCommitment] + /// and a merkle root of all signatures. + /// + /// Returns the full list of signatures along with the witness. + pub fn from_signed( + signed: SignedCommitment, + merkelize: TMerkelize, + ) -> (Self, Vec>) + where + TMerkelize: FnOnce(&[Option]) -> TMerkleRoot, + { + let SignedCommitment { commitment, signatures } = signed; + let signed_by = signatures.iter().map(|s| s.is_some()).collect(); + let signatures_merkle_root = merkelize(&signatures); + + (Self { commitment, signed_by, signatures_merkle_root }, signatures) + } +} + +#[cfg(test)] +mod tests { + + use sp_core::{keccak_256, Pair}; + use sp_keystore::{testing::KeyStore, SyncCryptoStore, SyncCryptoStorePtr}; + + use super::*; + use codec::Decode; + + use crate::{crypto, KEY_TYPE}; + + type TestCommitment = Commitment; + type TestSignedCommitment = SignedCommitment; + type TestSignedCommitmentWitness = + SignedCommitmentWitness>>; + + // The mock signatures are equivalent to the ones produced by the BEEFY keystore + fn mock_signatures() -> (crypto::Signature, crypto::Signature) { + let store: SyncCryptoStorePtr = KeyStore::new().into(); + + let alice = sp_core::ecdsa::Pair::from_string("//Alice", None).unwrap(); + let _ = + SyncCryptoStore::insert_unknown(&*store, KEY_TYPE, "//Alice", alice.public().as_ref()) + .unwrap(); + + let msg = keccak_256(b"This is the first message"); + let sig1 = SyncCryptoStore::ecdsa_sign_prehashed(&*store, KEY_TYPE, &alice.public(), &msg) + .unwrap() + .unwrap(); + + let msg = keccak_256(b"This is the second message"); + let sig2 = SyncCryptoStore::ecdsa_sign_prehashed(&*store, KEY_TYPE, &alice.public(), &msg) + .unwrap() + .unwrap(); + + (sig1.into(), sig2.into()) + } + + fn signed_commitment() -> TestSignedCommitment { + let commitment: TestCommitment = + Commitment { payload: "Hello World!".into(), block_number: 5, validator_set_id: 0 }; + + let sigs = mock_signatures(); + + SignedCommitment { commitment, signatures: vec![None, None, Some(sigs.0), Some(sigs.1)] } + } + + #[test] + fn should_convert_signed_commitment_to_witness() { + // given + let signed = signed_commitment(); + + // when + let (witness, signatures) = + TestSignedCommitmentWitness::from_signed(signed, |sigs| sigs.to_vec()); + + // then + assert_eq!(witness.signatures_merkle_root, signatures); + } + + #[test] + fn should_encode_and_decode_witness() { + // given + let signed = signed_commitment(); + let (witness, _) = TestSignedCommitmentWitness::from_signed(signed, |sigs| sigs.to_vec()); + + // when + let encoded = codec::Encode::encode(&witness); + let decoded = TestSignedCommitmentWitness::decode(&mut &*encoded); + + // then + assert_eq!(decoded, Ok(witness)); + assert_eq!( + encoded, + hex_literal::hex!( + "3048656c6c6f20576f726c64210500000000000000000000000000000000000000000000001000 + 00010110000001558455ad81279df0795cc985580e4fb75d72d948d1107b2ac80a09abed4da8480c746cc321f2319a5e9 + 9a830e314d10dd3cd68ce3dc0c33c86e99bcb7816f9ba01012d6e1f8105c337a86cdd9aaacdc496577f3db8c55ef9e6fd + 48f2c5c05a2274707491635d8ba3df64f324575b7b2a34487bca2324b6a0046395a71681be3d0c2a00" + ) + ); + } +} From e254b24b4bd9eb14abb75df205706d38b585f60c Mon Sep 17 00:00:00 2001 From: Jimmy Chu Date: Fri, 24 Sep 2021 07:34:37 +0800 Subject: [PATCH 30/33] Fix gitlab command (#9843) --- .gitlab-ci.yml | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index c5cb6c571b475..fa986923708d3 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -763,12 +763,14 @@ publish-rustdoc: - cp README.md /tmp/doc/ - git checkout gh-pages # Remove directories no longer necessary, as specified in $RUSTDOCS_DEPLOY_REFS. - # Also ensure $RUSTDOCS_DEPLOY_REFS is non-space + # Also ensure $RUSTDOCS_DEPLOY_REFS is not just empty spaces. + # Even though this block spans multiple lines, they are concatenated to run as a single line + # command, so note for the semi-colons in the inner-most code block. - if [[ ! -z ${RUSTDOCS_DEPLOY_REFS// } ]]; then for FILE in *; do if [[ ! " $RUSTDOCS_DEPLOY_REFS " =~ " $FILE " ]]; then - echo "Removing ${FILE}..." - rm -rf $FILE + echo "Removing ${FILE}..."; + rm -rf $FILE; fi done fi From 68e34234bc0a00b380e53ad8b21cf3ac47f82628 Mon Sep 17 00:00:00 2001 From: Qinxuan Chen Date: Fri, 24 Sep 2021 15:30:38 +0800 Subject: [PATCH 31/33] beefy-gadget-rpc: fix toml warnings (#9854) Signed-off-by: koushiro --- client/beefy/rpc/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/beefy/rpc/Cargo.toml b/client/beefy/rpc/Cargo.toml index 6bb5c5fcc668e..8af2fa3eac867 100644 --- a/client/beefy/rpc/Cargo.toml +++ b/client/beefy/rpc/Cargo.toml @@ -20,7 +20,7 @@ codec = { version = "2.2.0", package = "parity-scale-codec", features = ["derive sc-rpc = { version = "4.0.0-dev", path = "../../rpc" } sp-core = { version = "4.0.0-dev", path = "../../../primitives/core" } -sp-runtime = { versin = "4.0.0-dev", path = "../../../primitives/runtime" } +sp-runtime = { version = "4.0.0-dev", path = "../../../primitives/runtime" } beefy-gadget = { version = "4.0.0-dev", path = "../." } beefy-primitives = { version = "4.0.0-dev", path = "../../../primitives/beefy" } From b00e05470787422aab31a09d7e1198148d0ec7e1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexander=20Thei=C3=9Fen?= Date: Fri, 24 Sep 2021 12:17:51 +0200 Subject: [PATCH 32/33] De-duplicate the dispatchable and the bare_* functions (#9838) --- frame/contracts/src/lib.rs | 208 ++++++++++++++++++++++--------------- 1 file changed, 125 insertions(+), 83 deletions(-) diff --git a/frame/contracts/src/lib.rs b/frame/contracts/src/lib.rs index 77efcc6986e64..0d7e4cbf56474 100644 --- a/frame/contracts/src/lib.rs +++ b/frame/contracts/src/lib.rs @@ -104,7 +104,7 @@ pub use crate::{ schedule::{HostFnWeights, InstructionWeights, Limits, Schedule}, }; use crate::{ - exec::{Executable, Stack as ExecStack}, + exec::{AccountIdOf, ExecError, Executable, Stack as ExecStack}, gas::GasMeter, storage::{ContractInfo, DeletedContract, Storage}, wasm::PrefabWasmModule, @@ -112,13 +112,14 @@ use crate::{ }; use frame_support::{ dispatch::Dispatchable, + ensure, traits::{Contains, Currency, Get, Randomness, StorageVersion, Time}, weights::{GetDispatchInfo, PostDispatchInfo, Weight}, }; use frame_system::Pallet as System; use pallet_contracts_primitives::{ - Code, ContractAccessError, ContractExecResult, ContractInstantiateResult, GetStorageResult, - InstantiateReturnValue, + Code, ContractAccessError, ContractExecResult, ContractInstantiateResult, ExecReturnValue, + GetStorageResult, InstantiateReturnValue, }; use sp_core::{crypto::UncheckedFrom, Bytes}; use sp_runtime::traits::{Convert, Hash, Saturating, StaticLookup}; @@ -272,18 +273,8 @@ pub mod pallet { ) -> DispatchResultWithPostInfo { let origin = ensure_signed(origin)?; let dest = T::Lookup::lookup(dest)?; - let mut gas_meter = GasMeter::new(gas_limit); - let schedule = T::Schedule::get(); - let result = ExecStack::>::run_call( - origin, - dest, - &mut gas_meter, - &schedule, - value, - data, - None, - ); - gas_meter.into_dispatch_result(result, T::WeightInfo::call()) + let output = Self::internal_call(origin, dest, value, gas_limit, data, None); + output.gas_meter.into_dispatch_result(output.result, T::WeightInfo::call()) } /// Instantiates a new contract from the supplied `code` optionally transferring @@ -325,26 +316,19 @@ pub mod pallet { ) -> DispatchResultWithPostInfo { let origin = ensure_signed(origin)?; let code_len = code.len() as u32; - ensure!(code_len <= T::Schedule::get().limits.code_len, Error::::CodeTooLarge); - let mut gas_meter = GasMeter::new(gas_limit); - let schedule = T::Schedule::get(); - let executable = PrefabWasmModule::from_code(code, &schedule)?; - let code_len = executable.code_len(); - ensure!(code_len <= T::Schedule::get().limits.code_len, Error::::CodeTooLarge); - let result = ExecStack::>::run_instantiate( + let salt_len = salt.len() as u32; + let output = Self::internal_instantiate( origin, - executable, - &mut gas_meter, - &schedule, endowment, + gas_limit, + Code::Upload(Bytes(code)), data, - &salt, + salt, None, - ) - .map(|(_address, output)| output); - gas_meter.into_dispatch_result( - result, - T::WeightInfo::instantiate_with_code(code_len / 1024, salt.len() as u32 / 1024), + ); + output.gas_meter.into_dispatch_result( + output.result.map(|(_address, result)| result), + T::WeightInfo::instantiate_with_code(code_len / 1024, salt_len / 1024), ) } @@ -365,22 +349,20 @@ pub mod pallet { salt: Vec, ) -> DispatchResultWithPostInfo { let origin = ensure_signed(origin)?; - let mut gas_meter = GasMeter::new(gas_limit); - let schedule = T::Schedule::get(); - let executable = PrefabWasmModule::from_storage(code_hash, &schedule, &mut gas_meter)?; - let result = ExecStack::>::run_instantiate( + let salt_len = salt.len() as u32; + let output = Self::internal_instantiate( origin, - executable, - &mut gas_meter, - &schedule, endowment, + gas_limit, + Code::Existing(code_hash), data, - &salt, + salt, None, + ); + output.gas_meter.into_dispatch_result( + output.result.map(|(_address, output)| output), + T::WeightInfo::instantiate(salt_len / 1024), ) - .map(|(_address, output)| output); - gas_meter - .into_dispatch_result(result, T::WeightInfo::instantiate(salt.len() as u32 / 1024)) } } @@ -535,6 +517,20 @@ pub mod pallet { pub(crate) type DeletionQueue = StorageValue<_, Vec, ValueQuery>; } +/// Return type of the private [`Pallet::internal_call`] function. +type InternalCallOutput = InternalOutput; + +/// Return type of the private [`Pallet::internal_instantiate`] function. +type InternalInstantiateOutput = InternalOutput, ExecReturnValue)>; + +/// Return type of private helper functions. +struct InternalOutput { + /// The gas meter that was used to execute the call. + gas_meter: GasMeter, + /// The result of the call. + result: Result, +} + impl Pallet where T::AccountId: UncheckedFrom + AsRef<[u8]>, @@ -556,25 +552,16 @@ where dest: T::AccountId, value: BalanceOf, gas_limit: Weight, - input_data: Vec, + data: Vec, debug: bool, ) -> ContractExecResult { - let mut gas_meter = GasMeter::new(gas_limit); - let schedule = T::Schedule::get(); let mut debug_message = if debug { Some(Vec::new()) } else { None }; - let result = ExecStack::>::run_call( - origin, - dest, - &mut gas_meter, - &schedule, - value, - input_data, - debug_message.as_mut(), - ); + let output = + Self::internal_call(origin, dest, value, gas_limit, data, debug_message.as_mut()); ContractExecResult { - result: result.map_err(|r| r.error), - gas_consumed: gas_meter.gas_consumed(), - gas_required: gas_meter.gas_required(), + result: output.result.map_err(|r| r.error), + gas_consumed: output.gas_meter.gas_consumed(), + gas_required: output.gas_meter.gas_required(), debug_message: debug_message.unwrap_or_default(), } } @@ -601,38 +588,23 @@ where salt: Vec, debug: bool, ) -> ContractInstantiateResult { - let mut gas_meter = GasMeter::new(gas_limit); - let schedule = T::Schedule::get(); - let executable = match code { - Code::Upload(Bytes(binary)) => PrefabWasmModule::from_code(binary, &schedule), - Code::Existing(hash) => PrefabWasmModule::from_storage(hash, &schedule, &mut gas_meter), - }; - let executable = match executable { - Ok(executable) => executable, - Err(error) => - return ContractInstantiateResult { - result: Err(error.into()), - gas_consumed: gas_meter.gas_consumed(), - gas_required: gas_meter.gas_required(), - debug_message: Vec::new(), - }, - }; let mut debug_message = if debug { Some(Vec::new()) } else { None }; - let result = ExecStack::>::run_instantiate( + let output = Self::internal_instantiate( origin, - executable, - &mut gas_meter, - &schedule, endowment, + gas_limit, + code, data, - &salt, + salt, debug_message.as_mut(), - ) - .and_then(|(account_id, result)| Ok(InstantiateReturnValue { result, account_id })); + ); ContractInstantiateResult { - result: result.map_err(|e| e.error), - gas_consumed: gas_meter.gas_consumed(), - gas_required: gas_meter.gas_required(), + result: output + .result + .map(|(account_id, result)| InstantiateReturnValue { result, account_id }) + .map_err(|e| e.error), + gas_consumed: output.gas_meter.gas_consumed(), + gas_required: output.gas_meter.gas_required(), debug_message: debug_message.unwrap_or_default(), } } @@ -709,4 +681,74 @@ where ) -> frame_support::dispatch::DispatchResult { self::wasm::reinstrument(module, schedule) } + + /// Internal function that does the actual call. + /// + /// Called by dispatchables and public functions. + fn internal_call( + origin: T::AccountId, + dest: T::AccountId, + value: BalanceOf, + gas_limit: Weight, + data: Vec, + debug_message: Option<&mut Vec>, + ) -> InternalCallOutput { + let mut gas_meter = GasMeter::new(gas_limit); + let schedule = T::Schedule::get(); + let result = ExecStack::>::run_call( + origin, + dest, + &mut gas_meter, + &schedule, + value, + data, + debug_message, + ); + InternalCallOutput { gas_meter, result } + } + + /// Internal function that does the actual instantiation. + /// + /// Called by dispatchables and public functions. + fn internal_instantiate( + origin: T::AccountId, + endowment: BalanceOf, + gas_limit: Weight, + code: Code>, + data: Vec, + salt: Vec, + debug_message: Option<&mut Vec>, + ) -> InternalInstantiateOutput { + let mut gas_meter = GasMeter::new(gas_limit); + let schedule = T::Schedule::get(); + let try_exec = || { + let executable = match code { + Code::Upload(Bytes(binary)) => { + ensure!( + binary.len() as u32 <= schedule.limits.code_len, + >::CodeTooLarge + ); + let executable = PrefabWasmModule::from_code(binary, &schedule)?; + ensure!( + executable.code_len() <= schedule.limits.code_len, + >::CodeTooLarge + ); + executable + }, + Code::Existing(hash) => + PrefabWasmModule::from_storage(hash, &schedule, &mut gas_meter)?, + }; + ExecStack::>::run_instantiate( + origin, + executable, + &mut gas_meter, + &schedule, + endowment, + data, + &salt, + debug_message, + ) + }; + InternalInstantiateOutput { result: try_exec(), gas_meter } + } } From 2ab5cd2880b1ec798a7c2a72c29f995f54b45f13 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bastian=20K=C3=B6cher?= Date: Fri, 24 Sep 2021 13:42:08 +0300 Subject: [PATCH 33/33] Prepare UI tests for rust 1.55 (#9637) * Prepare UI tests for rust 1.54 * Delete wrong_page.stderr * CI: run with a staging CI image * Revert "CI: run with a staging CI image" This reverts commit 66f5b00d14b50fd9d8fbf773f7e884f380697591. * CI: debug, again * LOG_TARGET is only used in std * Remove unnecessary unsafe * Fixes * Use correct rustc locally * FMT * Compile with benchmarking * Review feedback * Some ui tests * I know... * Fix wasm tests Co-authored-by: Denis P Co-authored-by: thiolliere --- .gitlab-ci.yml | 4 +- client/chain-spec/src/chain_spec.rs | 1 + client/db/src/storage_cache.rs | 1 + client/executor/runtime-test/src/lib.rs | 6 +- client/executor/src/native_executor.rs | 3 - client/network/src/protocol/message.rs | 8 +- client/service/src/client/call_executor.rs | 6 +- client/service/src/client/wasm_override.rs | 34 ++--- frame/contracts/src/benchmarking/mod.rs | 2 +- frame/grandpa/src/benchmarking.rs | 2 - .../merkle-mountain-range/src/benchmarking.rs | 2 - .../support/src/storage/bounded_btree_map.rs | 8 +- .../support/src/storage/bounded_btree_set.rs | 8 +- frame/support/src/storage/bounded_vec.rs | 8 +- frame/support/src/storage/weak_bounded_vec.rs | 8 +- frame/support/src/traits/hooks.rs | 4 +- .../no_std_genesis_config.stderr | 8 +- .../undefined_call_part.stderr | 6 +- .../undefined_event_part.stderr | 10 +- .../undefined_genesis_config_part.stderr | 8 +- .../undefined_inherent_part.stderr | 6 +- .../undefined_origin_part.stderr | 10 +- .../undefined_validate_unsigned_part.stderr | 6 +- ...ed_keyword_two_times_integrity_test.stderr | 2 +- ...eserved_keyword_two_times_on_initialize.rs | 2 + ...ved_keyword_two_times_on_initialize.stderr | 14 +- .../tests/derive_no_bound_ui/clone.stderr | 16 +- .../tests/derive_no_bound_ui/default.stderr | 16 +- .../call_argument_invalid_bound.stderr | 16 +- .../call_argument_invalid_bound_2.stderr | 16 +- .../call_argument_invalid_bound_3.stderr | 18 ++- .../pallet_ui/event_field_not_member.stderr | 16 +- .../tests/pallet_ui/event_not_in_trait.stderr | 2 +- .../genesis_default_not_satisfied.stderr | 2 +- .../pallet_ui/genesis_invalid_generic.stderr | 2 +- .../tests/pallet_ui/hooks_invalid_item.stderr | 8 +- ...age_ensure_span_are_ok_on_wrong_gen.stderr | 142 +++++++++++------- ...re_span_are_ok_on_wrong_gen_unnamed.stderr | 142 +++++++++++------- .../pallet_ui/storage_info_unsatisfied.stderr | 6 +- .../storage_info_unsatisfied_nmap.stderr | 10 +- .../reserved_keyword/on_initialize.stderr | 10 +- frame/transaction-payment/src/lib.rs | 16 +- frame/transaction-storage/src/lib.rs | 4 +- .../ui/empty_impl_runtime_apis_call.stderr | 2 +- .../ui/impl_incorrect_method_signature.stderr | 4 +- .../ui/mock_advanced_block_id_by_value.stderr | 2 +- .../tests/ui/mock_only_self_reference.stderr | 4 +- ...reference_in_impl_runtime_apis_call.stderr | 4 +- primitives/io/src/lib.rs | 15 +- .../tests/ui/pass_by_enum_with_struct.stderr | 2 +- .../ui/pass_by_enum_with_value_variant.stderr | 2 +- .../ui/pass_by_inner_with_two_fields.stderr | 2 +- primitives/runtime/src/traits.rs | 1 + primitives/state-machine/src/backend.rs | 16 +- primitives/state-machine/src/ext.rs | 24 ++- primitives/state-machine/src/lib.rs | 27 ++-- .../src/overlayed_changes/mod.rs | 26 ++-- .../state-machine/src/trie_backend_essence.rs | 11 +- test-utils/runtime/client/src/trait_tests.rs | 1 - utils/wasm-builder/src/wasm_project.rs | 4 + 60 files changed, 431 insertions(+), 335 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index fa986923708d3..2b3284d9e1a83 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -38,7 +38,9 @@ variables: &default-vars CARGO_INCREMENTAL: 0 DOCKER_OS: "debian:stretch" ARCH: "x86_64" - CI_IMAGE: "paritytech/ci-linux:production" + # FIXME: revert me + # CI_IMAGE: "paritytech/ci-linux:production" + CI_IMAGE: "paritytech/ci-linux:staging-1.54.0" # FIXME set to release CARGO_UNLEASH_INSTALL_PARAMS: "--version 1.0.0-alpha.12" CARGO_UNLEASH_PKG_DEF: "--skip node node-* pallet-template pallet-example pallet-example-* subkey chain-spec-builder" diff --git a/client/chain-spec/src/chain_spec.rs b/client/chain-spec/src/chain_spec.rs index fcdb053c47c16..ff3a99760bd28 100644 --- a/client/chain-spec/src/chain_spec.rs +++ b/client/chain-spec/src/chain_spec.rs @@ -163,6 +163,7 @@ struct ClientSpec { // Never used, left only for backward compatibility. consensus_engine: (), #[serde(skip_serializing)] + #[allow(unused)] genesis: serde::de::IgnoredAny, /// Mapping from `block_hash` to `wasm_code`. /// diff --git a/client/db/src/storage_cache.rs b/client/db/src/storage_cache.rs index a895324a2e7b9..5fef0e5b12d08 100644 --- a/client/db/src/storage_cache.rs +++ b/client/db/src/storage_cache.rs @@ -1418,6 +1418,7 @@ mod qc { #[derive(Debug, Clone)] struct Node { hash: H256, + #[allow(unused)] parent: H256, state: KeyMap, changes: KeySet, diff --git a/client/executor/runtime-test/src/lib.rs b/client/executor/runtime-test/src/lib.rs index c9f7d6b1e2970..2b5699fa3f77a 100644 --- a/client/executor/runtime-test/src/lib.rs +++ b/client/executor/runtime-test/src/lib.rs @@ -91,7 +91,7 @@ sp_core::wasm_export_functions! { // This function dirties the **host** pages. I.e. we dirty 4KiB at a time and it will take // 16 writes to process a single wasm page. - let mut heap_ptr = heap_base as usize; + let heap_ptr = heap_base as usize; // Find the next wasm page boundary. let heap_ptr = round_up_to(heap_ptr, 65536); @@ -234,7 +234,7 @@ sp_core::wasm_export_functions! { match instance.get_global_val("test_global") { Some(sp_sandbox::Value::I64(val)) => val, None => 30, - val => 40, + _ => 40, } } @@ -362,7 +362,7 @@ sp_core::wasm_export_functions! { // It is expected that the given pointer is not allocated. fn check_and_set_in_heap(heap_base: u32, offset: u32) { let test_message = b"Hello invalid heap memory"; - let ptr = unsafe { (heap_base + offset) as *mut u8 }; + let ptr = (heap_base + offset) as *mut u8; let message_slice = unsafe { sp_std::slice::from_raw_parts_mut(ptr, test_message.len()) }; diff --git a/client/executor/src/native_executor.rs b/client/executor/src/native_executor.rs index 38dba55b5f87c..77b1ec7abf4f2 100644 --- a/client/executor/src/native_executor.rs +++ b/client/executor/src/native_executor.rs @@ -101,8 +101,6 @@ pub struct WasmExecutor { host_functions: Arc>, /// WASM runtime cache. cache: Arc, - /// The size of the instances cache. - max_runtime_instances: usize, /// The path to a directory which the executor can leverage for a file cache, e.g. put there /// compiled artifacts. cache_path: Option, @@ -138,7 +136,6 @@ impl WasmExecutor { default_heap_pages: default_heap_pages.unwrap_or(DEFAULT_HEAP_PAGES), host_functions: Arc::new(host_functions), cache: Arc::new(RuntimeCache::new(max_runtime_instances, cache_path.clone())), - max_runtime_instances, cache_path, } } diff --git a/client/network/src/protocol/message.rs b/client/network/src/protocol/message.rs index 8938c27aeddd8..001f6cbd7e455 100644 --- a/client/network/src/protocol/message.rs +++ b/client/network/src/protocol/message.rs @@ -143,10 +143,10 @@ pub struct RemoteReadResponse { /// Announcement summary used for debug logging. #[derive(Debug)] pub struct AnnouncementSummary { - block_hash: H::Hash, - number: H::Number, - parent_hash: H::Hash, - state: Option, + pub block_hash: H::Hash, + pub number: H::Number, + pub parent_hash: H::Hash, + pub state: Option, } impl generic::BlockAnnounce { diff --git a/client/service/src/client/call_executor.rs b/client/service/src/client/call_executor.rs index 9b8774ce6d497..d7a8b6f227e8f 100644 --- a/client/service/src/client/call_executor.rs +++ b/client/service/src/client/call_executor.rs @@ -41,7 +41,7 @@ use std::{cell::RefCell, panic::UnwindSafe, result, sync::Arc}; pub struct LocalCallExecutor { backend: Arc, executor: E, - wasm_override: Option>, + wasm_override: Option, wasm_substitutes: WasmSubstitutes, spawn_handle: Box, client_config: ClientConfig, @@ -62,7 +62,7 @@ where let wasm_override = client_config .wasm_runtime_overrides .as_ref() - .map(|p| WasmOverride::new(p.clone(), executor.clone())) + .map(|p| WasmOverride::new(p.clone(), &executor)) .transpose()?; let wasm_substitutes = WasmSubstitutes::new( @@ -371,7 +371,7 @@ mod tests { 1, ); - let overrides = crate::client::wasm_override::dummy_overrides(&executor); + let overrides = crate::client::wasm_override::dummy_overrides(); let onchain_code = WrappedRuntimeCode(substrate_test_runtime::wasm_binary_unwrap().into()); let onchain_code = RuntimeCode { code_fetcher: &onchain_code, diff --git a/client/service/src/client/wasm_override.rs b/client/service/src/client/wasm_override.rs index 6d5a071269d4d..3d28467a9cbd9 100644 --- a/client/service/src/client/wasm_override.rs +++ b/client/service/src/client/wasm_override.rs @@ -104,22 +104,19 @@ impl From for sp_blockchain::Error { /// Scrapes WASM from a folder and returns WASM from that folder /// if the runtime spec version matches. #[derive(Clone, Debug)] -pub struct WasmOverride { +pub struct WasmOverride { // Map of runtime spec version -> Wasm Blob overrides: HashMap, - executor: E, } -impl WasmOverride -where - E: RuntimeVersionOf + Clone + 'static, -{ - pub fn new

(path: P, executor: E) -> Result +impl WasmOverride { + pub fn new(path: P, executor: &E) -> Result where P: AsRef, + E: RuntimeVersionOf, { - let overrides = Self::scrape_overrides(path.as_ref(), &executor)?; - Ok(Self { overrides, executor }) + let overrides = Self::scrape_overrides(path.as_ref(), executor)?; + Ok(Self { overrides }) } /// Gets an override by it's runtime spec version. @@ -131,7 +128,10 @@ where /// Scrapes a folder for WASM runtimes. /// Returns a hashmap of the runtime version and wasm runtime code. - fn scrape_overrides(dir: &Path, executor: &E) -> Result> { + fn scrape_overrides(dir: &Path, executor: &E) -> Result> + where + E: RuntimeVersionOf, + { let handle_err = |e: std::io::Error| -> sp_blockchain::Error { WasmOverrideError::Io(dir.to_owned(), e).into() }; @@ -176,11 +176,14 @@ where Ok(overrides) } - fn runtime_version( + fn runtime_version( executor: &E, code: &WasmBlob, heap_pages: Option, - ) -> Result { + ) -> Result + where + E: RuntimeVersionOf, + { let mut ext = BasicExternalities::default(); executor .runtime_version(&mut ext, &code.runtime_code(heap_pages)) @@ -190,15 +193,12 @@ where /// Returns a WasmOverride struct filled with dummy data for testing. #[cfg(test)] -pub fn dummy_overrides(executor: &E) -> WasmOverride -where - E: RuntimeVersionOf + Clone + 'static, -{ +pub fn dummy_overrides() -> WasmOverride { let mut overrides = HashMap::new(); overrides.insert(0, WasmBlob::new(vec![0, 0, 0, 0, 0, 0, 0, 0])); overrides.insert(1, WasmBlob::new(vec![1, 1, 1, 1, 1, 1, 1, 1])); overrides.insert(2, WasmBlob::new(vec![2, 2, 2, 2, 2, 2, 2, 2])); - WasmOverride { overrides, executor: executor.clone() } + WasmOverride { overrides } } #[cfg(test)] diff --git a/frame/contracts/src/benchmarking/mod.rs b/frame/contracts/src/benchmarking/mod.rs index db657e618322e..981af218ea5a2 100644 --- a/frame/contracts/src/benchmarking/mod.rs +++ b/frame/contracts/src/benchmarking/mod.rs @@ -2241,7 +2241,7 @@ benchmarks! { ); } #[cfg(not(feature = "std"))] - return Err("Run this bench with a native runtime in order to see the schedule.".into()); + Err("Run this bench with a native runtime in order to see the schedule.")?; }: {} // Execute one erc20 transfer using the ink! erc20 example contract. diff --git a/frame/grandpa/src/benchmarking.rs b/frame/grandpa/src/benchmarking.rs index b0f70adb6061d..815a18d13531e 100644 --- a/frame/grandpa/src/benchmarking.rs +++ b/frame/grandpa/src/benchmarking.rs @@ -17,8 +17,6 @@ //! Benchmarks for the GRANDPA pallet. -#![cfg_attr(not(feature = "std"), no_std)] - use super::{Pallet as Grandpa, *}; use frame_benchmarking::benchmarks; use frame_system::RawOrigin; diff --git a/frame/merkle-mountain-range/src/benchmarking.rs b/frame/merkle-mountain-range/src/benchmarking.rs index 2680b3d030067..c269afb75855c 100644 --- a/frame/merkle-mountain-range/src/benchmarking.rs +++ b/frame/merkle-mountain-range/src/benchmarking.rs @@ -17,8 +17,6 @@ //! Benchmarks for the MMR pallet. -#![cfg_attr(not(feature = "std"), no_std)] - use crate::*; use frame_benchmarking::{benchmarks_instance_pallet, impl_benchmark_test_suite}; use frame_support::traits::OnInitialize; diff --git a/frame/support/src/storage/bounded_btree_map.rs b/frame/support/src/storage/bounded_btree_map.rs index d0c0aa7c4f155..404814cb81693 100644 --- a/frame/support/src/storage/bounded_btree_map.rs +++ b/frame/support/src/storage/bounded_btree_map.rs @@ -20,7 +20,7 @@ use crate::{storage::StorageDecodeLength, traits::Get}; use codec::{Decode, Encode, MaxEncodedLen}; use sp_std::{ - borrow::Borrow, collections::btree_map::BTreeMap, convert::TryFrom, fmt, marker::PhantomData, + borrow::Borrow, collections::btree_map::BTreeMap, convert::TryFrom, marker::PhantomData, ops::Deref, }; @@ -173,12 +173,12 @@ where } #[cfg(feature = "std")] -impl fmt::Debug for BoundedBTreeMap +impl std::fmt::Debug for BoundedBTreeMap where - BTreeMap: fmt::Debug, + BTreeMap: std::fmt::Debug, S: Get, { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_tuple("BoundedBTreeMap").field(&self.0).field(&Self::bound()).finish() } } diff --git a/frame/support/src/storage/bounded_btree_set.rs b/frame/support/src/storage/bounded_btree_set.rs index 182884e655dd2..f74ff12854a58 100644 --- a/frame/support/src/storage/bounded_btree_set.rs +++ b/frame/support/src/storage/bounded_btree_set.rs @@ -20,7 +20,7 @@ use crate::{storage::StorageDecodeLength, traits::Get}; use codec::{Decode, Encode, MaxEncodedLen}; use sp_std::{ - borrow::Borrow, collections::btree_set::BTreeSet, convert::TryFrom, fmt, marker::PhantomData, + borrow::Borrow, collections::btree_set::BTreeSet, convert::TryFrom, marker::PhantomData, ops::Deref, }; @@ -157,12 +157,12 @@ where } #[cfg(feature = "std")] -impl fmt::Debug for BoundedBTreeSet +impl std::fmt::Debug for BoundedBTreeSet where - BTreeSet: fmt::Debug, + BTreeSet: std::fmt::Debug, S: Get, { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_tuple("BoundedBTreeSet").field(&self.0).field(&Self::bound()).finish() } } diff --git a/frame/support/src/storage/bounded_vec.rs b/frame/support/src/storage/bounded_vec.rs index b45c294f8d4a4..44eaab905423b 100644 --- a/frame/support/src/storage/bounded_vec.rs +++ b/frame/support/src/storage/bounded_vec.rs @@ -28,7 +28,7 @@ use core::{ ops::{Deref, Index, IndexMut}, slice::SliceIndex, }; -use sp_std::{convert::TryFrom, fmt, marker::PhantomData, prelude::*}; +use sp_std::{convert::TryFrom, marker::PhantomData, prelude::*}; /// A bounded vector. /// @@ -201,12 +201,12 @@ impl Default for BoundedVec { } #[cfg(feature = "std")] -impl fmt::Debug for BoundedVec +impl std::fmt::Debug for BoundedVec where - T: fmt::Debug, + T: std::fmt::Debug, S: Get, { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_tuple("BoundedVec").field(&self.0).field(&Self::bound()).finish() } } diff --git a/frame/support/src/storage/weak_bounded_vec.rs b/frame/support/src/storage/weak_bounded_vec.rs index 9c30c45c3e2e1..4655c809e014b 100644 --- a/frame/support/src/storage/weak_bounded_vec.rs +++ b/frame/support/src/storage/weak_bounded_vec.rs @@ -27,7 +27,7 @@ use core::{ ops::{Deref, Index, IndexMut}, slice::SliceIndex, }; -use sp_std::{convert::TryFrom, fmt, marker::PhantomData, prelude::*}; +use sp_std::{convert::TryFrom, marker::PhantomData, prelude::*}; /// A weakly bounded vector. /// @@ -171,12 +171,12 @@ impl Default for WeakBoundedVec { } #[cfg(feature = "std")] -impl fmt::Debug for WeakBoundedVec +impl std::fmt::Debug for WeakBoundedVec where - T: fmt::Debug, + T: std::fmt::Debug, S: Get, { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_tuple("WeakBoundedVec").field(&self.0).field(&Self::bound()).finish() } } diff --git a/frame/support/src/traits/hooks.rs b/frame/support/src/traits/hooks.rs index adba88e5acbf3..2a8b0a156247a 100644 --- a/frame/support/src/traits/hooks.rs +++ b/frame/support/src/traits/hooks.rs @@ -19,7 +19,7 @@ use impl_trait_for_tuples::impl_for_tuples; use sp_arithmetic::traits::Saturating; -use sp_runtime::traits::{AtLeast32BitUnsigned, MaybeSerializeDeserialize}; +use sp_runtime::traits::AtLeast32BitUnsigned; /// The block initialization trait. /// @@ -294,7 +294,7 @@ pub trait Hooks { /// A trait to define the build function of a genesis config, T and I are placeholder for pallet /// trait and pallet instance. #[cfg(feature = "std")] -pub trait GenesisBuild: Default + MaybeSerializeDeserialize { +pub trait GenesisBuild: Default + sp_runtime::traits::MaybeSerializeDeserialize { /// The build function is called within an externalities allowing storage APIs. /// Thus one can write to storage using regular pallet storages. fn build(&self); diff --git a/frame/support/test/tests/construct_runtime_ui/no_std_genesis_config.stderr b/frame/support/test/tests/construct_runtime_ui/no_std_genesis_config.stderr index 5bc831f58988b..3dc7fcda9f18a 100644 --- a/frame/support/test/tests/construct_runtime_ui/no_std_genesis_config.stderr +++ b/frame/support/test/tests/construct_runtime_ui/no_std_genesis_config.stderr @@ -10,7 +10,7 @@ error: `Pallet` does not have the std feature enabled, this will cause the `test 22 | | } | |_^ | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `test_pallet::__substrate_genesis_config_check::is_std_enabled_for_genesis` (in Nightly builds, run with -Z macro-backtrace for more info) error[E0433]: failed to resolve: use of undeclared crate or module `system` --> $DIR/no_std_genesis_config.rs:19:11 @@ -30,7 +30,7 @@ error[E0433]: failed to resolve: use of undeclared crate or module `system` 22 | | } | |_^ not found in `system` | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `construct_runtime` (in Nightly builds, run with -Z macro-backtrace for more info) help: consider importing this enum | 1 | use frame_system::RawOrigin; @@ -48,7 +48,7 @@ error[E0433]: failed to resolve: use of undeclared crate or module `system` 22 | | } | |_^ not found in `system` | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `construct_runtime` (in Nightly builds, run with -Z macro-backtrace for more info) help: consider importing one of these items | 1 | use frame_support_test::Pallet; @@ -70,7 +70,7 @@ error[E0412]: cannot find type `GenesisConfig` in crate `test_pallet` 22 | | } | |_^ not found in `test_pallet` | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `construct_runtime` (in Nightly builds, run with -Z macro-backtrace for more info) help: consider importing this struct | 1 | use frame_system::GenesisConfig; diff --git a/frame/support/test/tests/construct_runtime_ui/undefined_call_part.stderr b/frame/support/test/tests/construct_runtime_ui/undefined_call_part.stderr index 8781fe0df201a..2629cf4101923 100644 --- a/frame/support/test/tests/construct_runtime_ui/undefined_call_part.stderr +++ b/frame/support/test/tests/construct_runtime_ui/undefined_call_part.stderr @@ -13,7 +13,7 @@ error: `Pallet` does not have #[pallet::call] defined, perhaps you should remove 31 | | } | |_- in this macro invocation | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `pallet::__substrate_call_check::is_call_part_defined` (in Nightly builds, run with -Z macro-backtrace for more info) error[E0433]: failed to resolve: use of undeclared crate or module `system` --> $DIR/undefined_call_part.rs:28:11 @@ -33,7 +33,7 @@ error[E0433]: failed to resolve: use of undeclared crate or module `system` 31 | | } | |_^ not found in `system` | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `construct_runtime` (in Nightly builds, run with -Z macro-backtrace for more info) help: consider importing this enum | 1 | use frame_system::RawOrigin; @@ -51,7 +51,7 @@ error[E0433]: failed to resolve: use of undeclared crate or module `system` 31 | | } | |_^ not found in `system` | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `construct_runtime` (in Nightly builds, run with -Z macro-backtrace for more info) help: consider importing one of these items | 1 | use crate::pallet::Pallet; diff --git a/frame/support/test/tests/construct_runtime_ui/undefined_event_part.stderr b/frame/support/test/tests/construct_runtime_ui/undefined_event_part.stderr index fa837698aa642..af69b79ed1a64 100644 --- a/frame/support/test/tests/construct_runtime_ui/undefined_event_part.stderr +++ b/frame/support/test/tests/construct_runtime_ui/undefined_event_part.stderr @@ -13,7 +13,7 @@ error: `Pallet` does not have #[pallet::event] defined, perhaps you should remov 31 | | } | |_- in this macro invocation | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `pallet::__substrate_event_check::is_event_part_defined` (in Nightly builds, run with -Z macro-backtrace for more info) error[E0433]: failed to resolve: use of undeclared crate or module `system` --> $DIR/undefined_event_part.rs:28:11 @@ -33,7 +33,7 @@ error[E0412]: cannot find type `Event` in module `pallet` 31 | | } | |_^ not found in `pallet` | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `construct_runtime` (in Nightly builds, run with -Z macro-backtrace for more info) help: consider importing this enum | 1 | use frame_system::Event; @@ -51,7 +51,7 @@ error[E0412]: cannot find type `Event` in module `pallet` 31 | | } | |_^ not found in `pallet` | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `construct_runtime` (in Nightly builds, run with -Z macro-backtrace for more info) help: consider importing one of these items | 1 | use crate::Event; @@ -71,7 +71,7 @@ error[E0433]: failed to resolve: use of undeclared crate or module `system` 31 | | } | |_^ not found in `system` | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `construct_runtime` (in Nightly builds, run with -Z macro-backtrace for more info) help: consider importing this enum | 1 | use frame_system::RawOrigin; @@ -89,7 +89,7 @@ error[E0433]: failed to resolve: use of undeclared crate or module `system` 31 | | } | |_^ not found in `system` | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `construct_runtime` (in Nightly builds, run with -Z macro-backtrace for more info) help: consider importing one of these items | 1 | use crate::pallet::Pallet; diff --git a/frame/support/test/tests/construct_runtime_ui/undefined_genesis_config_part.stderr b/frame/support/test/tests/construct_runtime_ui/undefined_genesis_config_part.stderr index 699f66a414ed2..bfedb921bca44 100644 --- a/frame/support/test/tests/construct_runtime_ui/undefined_genesis_config_part.stderr +++ b/frame/support/test/tests/construct_runtime_ui/undefined_genesis_config_part.stderr @@ -13,7 +13,7 @@ error: `Pallet` does not have #[pallet::genesis_config] defined, perhaps you sho 31 | | } | |_- in this macro invocation | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `pallet::__substrate_genesis_config_check::is_genesis_config_defined` (in Nightly builds, run with -Z macro-backtrace for more info) error[E0433]: failed to resolve: use of undeclared crate or module `system` --> $DIR/undefined_genesis_config_part.rs:28:17 @@ -33,7 +33,7 @@ error[E0433]: failed to resolve: use of undeclared crate or module `system` 31 | | } | |_^ not found in `system` | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `construct_runtime` (in Nightly builds, run with -Z macro-backtrace for more info) help: consider importing this enum | 1 | use frame_system::RawOrigin; @@ -51,7 +51,7 @@ error[E0433]: failed to resolve: use of undeclared crate or module `system` 31 | | } | |_^ not found in `system` | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `construct_runtime` (in Nightly builds, run with -Z macro-backtrace for more info) help: consider importing one of these items | 1 | use crate::pallet::Pallet; @@ -75,7 +75,7 @@ error[E0412]: cannot find type `GenesisConfig` in module `pallet` 31 | | } | |_^ not found in `pallet` | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `construct_runtime` (in Nightly builds, run with -Z macro-backtrace for more info) help: consider importing this struct | 1 | use frame_system::GenesisConfig; diff --git a/frame/support/test/tests/construct_runtime_ui/undefined_inherent_part.stderr b/frame/support/test/tests/construct_runtime_ui/undefined_inherent_part.stderr index 88ff9ee910937..50dde1108263b 100644 --- a/frame/support/test/tests/construct_runtime_ui/undefined_inherent_part.stderr +++ b/frame/support/test/tests/construct_runtime_ui/undefined_inherent_part.stderr @@ -13,7 +13,7 @@ error: `Pallet` does not have #[pallet::inherent] defined, perhaps you should re 31 | | } | |_- in this macro invocation | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `pallet::__substrate_inherent_check::is_inherent_part_defined` (in Nightly builds, run with -Z macro-backtrace for more info) error[E0433]: failed to resolve: use of undeclared crate or module `system` --> $DIR/undefined_inherent_part.rs:28:11 @@ -33,7 +33,7 @@ error[E0433]: failed to resolve: use of undeclared crate or module `system` 31 | | } | |_^ not found in `system` | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `construct_runtime` (in Nightly builds, run with -Z macro-backtrace for more info) help: consider importing this enum | 1 | use frame_system::RawOrigin; @@ -51,7 +51,7 @@ error[E0433]: failed to resolve: use of undeclared crate or module `system` 31 | | } | |_^ not found in `system` | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `construct_runtime` (in Nightly builds, run with -Z macro-backtrace for more info) help: consider importing one of these items | 1 | use crate::pallet::Pallet; diff --git a/frame/support/test/tests/construct_runtime_ui/undefined_origin_part.stderr b/frame/support/test/tests/construct_runtime_ui/undefined_origin_part.stderr index 3b3aa75c1ea08..b5f3ec4d381bc 100644 --- a/frame/support/test/tests/construct_runtime_ui/undefined_origin_part.stderr +++ b/frame/support/test/tests/construct_runtime_ui/undefined_origin_part.stderr @@ -13,7 +13,7 @@ error: `Pallet` does not have #[pallet::origin] defined, perhaps you should remo 31 | | } | |_- in this macro invocation | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `pallet::__substrate_origin_check::is_origin_part_defined` (in Nightly builds, run with -Z macro-backtrace for more info) error[E0433]: failed to resolve: use of undeclared crate or module `system` --> $DIR/undefined_origin_part.rs:28:11 @@ -33,7 +33,7 @@ error[E0433]: failed to resolve: use of undeclared crate or module `system` 31 | | } | |_^ not found in `system` | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `construct_runtime` (in Nightly builds, run with -Z macro-backtrace for more info) help: consider importing this enum | 1 | use frame_system::RawOrigin; @@ -51,7 +51,7 @@ error[E0412]: cannot find type `Origin` in module `pallet` 31 | | } | |_^ not found in `pallet` | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `construct_runtime` (in Nightly builds, run with -Z macro-backtrace for more info) help: consider importing this type alias | 1 | use frame_system::Origin; @@ -69,7 +69,7 @@ error[E0412]: cannot find type `Origin` in module `pallet` 31 | | } | |_^ not found in `pallet` | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `construct_runtime` (in Nightly builds, run with -Z macro-backtrace for more info) help: consider importing one of these items | 1 | use crate::Origin; @@ -89,7 +89,7 @@ error[E0433]: failed to resolve: use of undeclared crate or module `system` 31 | | } | |_^ not found in `system` | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `construct_runtime` (in Nightly builds, run with -Z macro-backtrace for more info) help: consider importing one of these items | 1 | use crate::pallet::Pallet; diff --git a/frame/support/test/tests/construct_runtime_ui/undefined_validate_unsigned_part.stderr b/frame/support/test/tests/construct_runtime_ui/undefined_validate_unsigned_part.stderr index ac12c56d5c279..12bdce67cf038 100644 --- a/frame/support/test/tests/construct_runtime_ui/undefined_validate_unsigned_part.stderr +++ b/frame/support/test/tests/construct_runtime_ui/undefined_validate_unsigned_part.stderr @@ -13,7 +13,7 @@ error: `Pallet` does not have #[pallet::validate_unsigned] defined, perhaps you 31 | | } | |_- in this macro invocation | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `pallet::__substrate_validate_unsigned_check::is_validate_unsigned_part_defined` (in Nightly builds, run with -Z macro-backtrace for more info) error[E0433]: failed to resolve: use of undeclared crate or module `system` --> $DIR/undefined_validate_unsigned_part.rs:28:11 @@ -33,7 +33,7 @@ error[E0433]: failed to resolve: use of undeclared crate or module `system` 31 | | } | |_^ not found in `system` | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `construct_runtime` (in Nightly builds, run with -Z macro-backtrace for more info) help: consider importing this enum | 1 | use frame_system::RawOrigin; @@ -51,7 +51,7 @@ error[E0433]: failed to resolve: use of undeclared crate or module `system` 31 | | } | |_^ not found in `system` | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `construct_runtime` (in Nightly builds, run with -Z macro-backtrace for more info) help: consider importing one of these items | 1 | use crate::pallet::Pallet; diff --git a/frame/support/test/tests/decl_module_ui/reserved_keyword_two_times_integrity_test.stderr b/frame/support/test/tests/decl_module_ui/reserved_keyword_two_times_integrity_test.stderr index 3bf5f58b43a39..86c427d8080be 100644 --- a/frame/support/test/tests/decl_module_ui/reserved_keyword_two_times_integrity_test.stderr +++ b/frame/support/test/tests/decl_module_ui/reserved_keyword_two_times_integrity_test.stderr @@ -10,7 +10,7 @@ error: `integrity_test` can only be passed once as input. 7 | | } | |_^ | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `$crate::decl_module` (in Nightly builds, run with -Z macro-backtrace for more info) error[E0601]: `main` function not found in crate `$CRATE` --> $DIR/reserved_keyword_two_times_integrity_test.rs:1:1 diff --git a/frame/support/test/tests/decl_module_ui/reserved_keyword_two_times_on_initialize.rs b/frame/support/test/tests/decl_module_ui/reserved_keyword_two_times_on_initialize.rs index ddde7c72c1cc5..18aaec12c5f39 100644 --- a/frame/support/test/tests/decl_module_ui/reserved_keyword_two_times_on_initialize.rs +++ b/frame/support/test/tests/decl_module_ui/reserved_keyword_two_times_on_initialize.rs @@ -9,3 +9,5 @@ frame_support::decl_module! { } } } + +fn main() {} diff --git a/frame/support/test/tests/decl_module_ui/reserved_keyword_two_times_on_initialize.stderr b/frame/support/test/tests/decl_module_ui/reserved_keyword_two_times_on_initialize.stderr index 2911d7ded8a23..369be77b8d249 100644 --- a/frame/support/test/tests/decl_module_ui/reserved_keyword_two_times_on_initialize.stderr +++ b/frame/support/test/tests/decl_module_ui/reserved_keyword_two_times_on_initialize.stderr @@ -10,16 +10,4 @@ error: `on_initialize` can only be passed once as input. 11 | | } | |_^ | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) - -error[E0601]: `main` function not found in crate `$CRATE` - --> $DIR/reserved_keyword_two_times_on_initialize.rs:1:1 - | -1 | / frame_support::decl_module! { -2 | | pub struct Module for enum Call where origin: T::Origin, system=self { -3 | | fn on_initialize() -> Weight { -4 | | 0 -... | -10 | | } -11 | | } - | |_^ consider adding a `main` function to `$DIR/tests/decl_module_ui/reserved_keyword_two_times_on_initialize.rs` + = note: this error originates in the macro `$crate::decl_module` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/frame/support/test/tests/derive_no_bound_ui/clone.stderr b/frame/support/test/tests/derive_no_bound_ui/clone.stderr index 4b253ad12451b..050b576c8b9ed 100644 --- a/frame/support/test/tests/derive_no_bound_ui/clone.stderr +++ b/frame/support/test/tests/derive_no_bound_ui/clone.stderr @@ -1,7 +1,11 @@ error[E0277]: the trait bound `::C: Clone` is not satisfied - --> $DIR/clone.rs:7:2 - | -7 | c: T::C, - | ^ the trait `Clone` is not implemented for `::C` - | - = note: required by `clone` + --> $DIR/clone.rs:7:2 + | +7 | c: T::C, + | ^ the trait `Clone` is not implemented for `::C` + | +note: required by `clone` + --> $DIR/clone.rs:121:5 + | +121 | fn clone(&self) -> Self; + | ^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/frame/support/test/tests/derive_no_bound_ui/default.stderr b/frame/support/test/tests/derive_no_bound_ui/default.stderr index d58b5e9185268..7608f877a3b56 100644 --- a/frame/support/test/tests/derive_no_bound_ui/default.stderr +++ b/frame/support/test/tests/derive_no_bound_ui/default.stderr @@ -1,7 +1,11 @@ error[E0277]: the trait bound `::C: std::default::Default` is not satisfied - --> $DIR/default.rs:7:2 - | -7 | c: T::C, - | ^ the trait `std::default::Default` is not implemented for `::C` - | - = note: required by `std::default::Default::default` + --> $DIR/default.rs:7:2 + | +7 | c: T::C, + | ^ the trait `std::default::Default` is not implemented for `::C` + | +note: required by `std::default::Default::default` + --> $DIR/default.rs:116:5 + | +116 | fn default() -> Self; + | ^^^^^^^^^^^^^^^^^^^^^ diff --git a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound.stderr b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound.stderr index d1b040c16091f..3d1ea1adc9862 100644 --- a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound.stderr +++ b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound.stderr @@ -9,12 +9,16 @@ error[E0277]: `::Bar` doesn't implement `std::fmt::Debug` = note: required for the cast to the object type `dyn std::fmt::Debug` error[E0277]: the trait bound `::Bar: Clone` is not satisfied - --> $DIR/call_argument_invalid_bound.rs:20:36 - | -20 | pub fn foo(origin: OriginFor, bar: T::Bar) -> DispatchResultWithPostInfo { - | ^^^ the trait `Clone` is not implemented for `::Bar` - | - = note: required by `clone` + --> $DIR/call_argument_invalid_bound.rs:20:36 + | +20 | pub fn foo(origin: OriginFor, bar: T::Bar) -> DispatchResultWithPostInfo { + | ^^^ the trait `Clone` is not implemented for `::Bar` + | +note: required by `clone` + --> $DIR/clone.rs:121:5 + | +121 | fn clone(&self) -> Self; + | ^^^^^^^^^^^^^^^^^^^^^^^^ error[E0369]: binary operation `==` cannot be applied to type `&::Bar` --> $DIR/call_argument_invalid_bound.rs:20:36 diff --git a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_2.stderr b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_2.stderr index 84d4863672957..c9ff843103b3b 100644 --- a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_2.stderr +++ b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_2.stderr @@ -9,12 +9,16 @@ error[E0277]: `::Bar` doesn't implement `std::fmt::Debug` = note: required for the cast to the object type `dyn std::fmt::Debug` error[E0277]: the trait bound `::Bar: Clone` is not satisfied - --> $DIR/call_argument_invalid_bound_2.rs:20:36 - | -20 | pub fn foo(origin: OriginFor, bar: T::Bar) -> DispatchResultWithPostInfo { - | ^^^ the trait `Clone` is not implemented for `::Bar` - | - = note: required by `clone` + --> $DIR/call_argument_invalid_bound_2.rs:20:36 + | +20 | pub fn foo(origin: OriginFor, bar: T::Bar) -> DispatchResultWithPostInfo { + | ^^^ the trait `Clone` is not implemented for `::Bar` + | +note: required by `clone` + --> $DIR/clone.rs:121:5 + | +121 | fn clone(&self) -> Self; + | ^^^^^^^^^^^^^^^^^^^^^^^^ error[E0369]: binary operation `==` cannot be applied to type `&::Bar` --> $DIR/call_argument_invalid_bound_2.rs:20:36 diff --git a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_3.stderr b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_3.stderr index 73513907e85f3..144b7e12bd664 100644 --- a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_3.stderr +++ b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_3.stderr @@ -5,17 +5,21 @@ error[E0277]: `Bar` doesn't implement `std::fmt::Debug` | ^^^ `Bar` cannot be formatted using `{:?}` | = help: the trait `std::fmt::Debug` is not implemented for `Bar` - = note: add `#[derive(Debug)]` or manually implement `std::fmt::Debug` + = note: add `#[derive(Debug)]` to `Bar` or manually `impl std::fmt::Debug for Bar` = note: required because of the requirements on the impl of `std::fmt::Debug` for `&Bar` = note: required for the cast to the object type `dyn std::fmt::Debug` error[E0277]: the trait bound `Bar: Clone` is not satisfied - --> $DIR/call_argument_invalid_bound_3.rs:22:36 - | -22 | pub fn foo(origin: OriginFor, bar: Bar) -> DispatchResultWithPostInfo { - | ^^^ the trait `Clone` is not implemented for `Bar` - | - = note: required by `clone` + --> $DIR/call_argument_invalid_bound_3.rs:22:36 + | +22 | pub fn foo(origin: OriginFor, bar: Bar) -> DispatchResultWithPostInfo { + | ^^^ the trait `Clone` is not implemented for `Bar` + | +note: required by `clone` + --> $DIR/clone.rs:121:5 + | +121 | fn clone(&self) -> Self; + | ^^^^^^^^^^^^^^^^^^^^^^^^ error[E0369]: binary operation `==` cannot be applied to type `&Bar` --> $DIR/call_argument_invalid_bound_3.rs:22:36 diff --git a/frame/support/test/tests/pallet_ui/event_field_not_member.stderr b/frame/support/test/tests/pallet_ui/event_field_not_member.stderr index d48012a6c952d..bf4c05bb4e5b5 100644 --- a/frame/support/test/tests/pallet_ui/event_field_not_member.stderr +++ b/frame/support/test/tests/pallet_ui/event_field_not_member.stderr @@ -1,10 +1,14 @@ error[E0277]: the trait bound `::Bar: Clone` is not satisfied - --> $DIR/event_field_not_member.rs:23:7 - | -23 | B { b: T::Bar }, - | ^ the trait `Clone` is not implemented for `::Bar` - | - = note: required by `clone` + --> $DIR/event_field_not_member.rs:23:7 + | +23 | B { b: T::Bar }, + | ^ the trait `Clone` is not implemented for `::Bar` + | +note: required by `clone` + --> $DIR/clone.rs:121:5 + | +121 | fn clone(&self) -> Self; + | ^^^^^^^^^^^^^^^^^^^^^^^^ error[E0369]: binary operation `==` cannot be applied to type `&::Bar` --> $DIR/event_field_not_member.rs:23:7 diff --git a/frame/support/test/tests/pallet_ui/event_not_in_trait.stderr b/frame/support/test/tests/pallet_ui/event_not_in_trait.stderr index dd96c700ce7e5..e3126ad6a85dc 100644 --- a/frame/support/test/tests/pallet_ui/event_not_in_trait.stderr +++ b/frame/support/test/tests/pallet_ui/event_not_in_trait.stderr @@ -4,4 +4,4 @@ error: Invalid usage of Event, `Config` contains no associated type `Event`, but 1 | #[frame_support::pallet] | ^^^^^^^^^^^^^^^^^^^^^^^^ | - = note: this error originates in an attribute macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the attribute macro `frame_support::pallet` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/frame/support/test/tests/pallet_ui/genesis_default_not_satisfied.stderr b/frame/support/test/tests/pallet_ui/genesis_default_not_satisfied.stderr index 4bc3cfdcbf9b7..ad8300b8d89b8 100644 --- a/frame/support/test/tests/pallet_ui/genesis_default_not_satisfied.stderr +++ b/frame/support/test/tests/pallet_ui/genesis_default_not_satisfied.stderr @@ -6,5 +6,5 @@ error[E0277]: the trait bound `pallet::GenesisConfig: std::default::Default` is | ::: $WORKSPACE/frame/support/src/traits/hooks.rs | - | pub trait GenesisBuild: Default + MaybeSerializeDeserialize { + | pub trait GenesisBuild: Default + sp_runtime::traits::MaybeSerializeDeserialize { | ------- required by this bound in `GenesisBuild` diff --git a/frame/support/test/tests/pallet_ui/genesis_invalid_generic.stderr b/frame/support/test/tests/pallet_ui/genesis_invalid_generic.stderr index f451f7b16aee5..f57b4a61c80c5 100644 --- a/frame/support/test/tests/pallet_ui/genesis_invalid_generic.stderr +++ b/frame/support/test/tests/pallet_ui/genesis_invalid_generic.stderr @@ -10,4 +10,4 @@ error: expected `<` 1 | #[frame_support::pallet] | ^^^^^^^^^^^^^^^^^^^^^^^^ | - = note: this error originates in an attribute macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the attribute macro `frame_support::pallet` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/frame/support/test/tests/pallet_ui/hooks_invalid_item.stderr b/frame/support/test/tests/pallet_ui/hooks_invalid_item.stderr index 3d7303fafdcf5..ecb57bec37a7b 100644 --- a/frame/support/test/tests/pallet_ui/hooks_invalid_item.stderr +++ b/frame/support/test/tests/pallet_ui/hooks_invalid_item.stderr @@ -2,14 +2,14 @@ error[E0107]: missing generics for trait `Hooks` --> $DIR/hooks_invalid_item.rs:12:18 | 12 | impl Hooks for Pallet {} - | ^^^^^ expected 1 type argument + | ^^^^^ expected 1 generic argument | -note: trait defined here, with 1 type parameter: `BlockNumber` +note: trait defined here, with 1 generic parameter: `BlockNumber` --> $DIR/hooks.rs:214:11 | 214 | pub trait Hooks { | ^^^^^ ----------- -help: use angle brackets to add missing type argument +help: add missing generic argument | 12 | impl Hooks for Pallet {} - | ^^^^^^^^^^^^^ + | ^^^^^^^^^^^^^^^^^^ diff --git a/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.stderr b/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.stderr index 239de4dba949b..cd3032c49735a 100644 --- a/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.stderr +++ b/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.stderr @@ -1,77 +1,105 @@ error[E0277]: the trait bound `Bar: TypeInfo` is not satisfied - --> $DIR/storage_ensure_span_are_ok_on_wrong_gen.rs:20:12 - | -20 | #[pallet::storage] - | ^^^^^^^ the trait `TypeInfo` is not implemented for `Bar` - | - = note: required because of the requirements on the impl of `StaticTypeInfo` for `Bar` - = note: required because of the requirements on the impl of `StorageEntryMetadataBuilder` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `build_metadata` + --> $DIR/storage_ensure_span_are_ok_on_wrong_gen.rs:20:12 + | +20 | #[pallet::storage] + | ^^^^^^^ the trait `TypeInfo` is not implemented for `Bar` + | + = note: required because of the requirements on the impl of `StaticTypeInfo` for `Bar` + = note: required because of the requirements on the impl of `StorageEntryMetadataBuilder` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` +note: required by `build_metadata` + --> $DIR/mod.rs:113:2 + | +113 | fn build_metadata(doc: Vec<&'static str>, entries: &mut Vec); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied - --> $DIR/storage_ensure_span_are_ok_on_wrong_gen.rs:20:12 + --> $DIR/storage_ensure_span_are_ok_on_wrong_gen.rs:20:12 + | +20 | #[pallet::storage] + | ^^^^^^^ the trait `WrapperTypeDecode` is not implemented for `Bar` + | + = note: required because of the requirements on the impl of `Decode` for `Bar` + = note: required because of the requirements on the impl of `FullCodec` for `Bar` + = note: required because of the requirements on the impl of `StorageEntryMetadataBuilder` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` +note: required by `build_metadata` + --> $DIR/mod.rs:113:2 + | +113 | fn build_metadata(doc: Vec<&'static str>, entries: &mut Vec); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error[E0277]: the trait bound `Bar: EncodeLike` is not satisfied + --> $DIR/storage_ensure_span_are_ok_on_wrong_gen.rs:20:12 + | +20 | #[pallet::storage] + | ^^^^^^^ the trait `EncodeLike` is not implemented for `Bar` + | + = note: required because of the requirements on the impl of `FullEncode` for `Bar` + = note: required because of the requirements on the impl of `FullCodec` for `Bar` + = note: required because of the requirements on the impl of `StorageEntryMetadataBuilder` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` +note: required by `build_metadata` + --> $DIR/mod.rs:113:2 + | +113 | fn build_metadata(doc: Vec<&'static str>, entries: &mut Vec); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied + --> $DIR/storage_ensure_span_are_ok_on_wrong_gen.rs:20:12 + | +20 | #[pallet::storage] + | ^^^^^^^ the trait `WrapperTypeEncode` is not implemented for `Bar` + | + = note: required because of the requirements on the impl of `Encode` for `Bar` + = note: required because of the requirements on the impl of `FullEncode` for `Bar` + = note: required because of the requirements on the impl of `FullCodec` for `Bar` + = note: required because of the requirements on the impl of `StorageEntryMetadataBuilder` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` +note: required by `build_metadata` + --> $DIR/mod.rs:113:2 + | +113 | fn build_metadata(doc: Vec<&'static str>, entries: &mut Vec); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied + --> $DIR/storage_ensure_span_are_ok_on_wrong_gen.rs:9:12 | -20 | #[pallet::storage] - | ^^^^^^^ the trait `WrapperTypeDecode` is not implemented for `Bar` +9 | #[pallet::pallet] + | ^^^^^^ the trait `WrapperTypeDecode` is not implemented for `Bar` | = note: required because of the requirements on the impl of `Decode` for `Bar` = note: required because of the requirements on the impl of `FullCodec` for `Bar` - = note: required because of the requirements on the impl of `StorageEntryMetadataBuilder` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `build_metadata` + = note: required because of the requirements on the impl of `PartialStorageInfoTrait` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` +note: required by `partial_storage_info` + --> $DIR/storage.rs:88:2 + | +88 | fn partial_storage_info() -> Vec; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0277]: the trait bound `Bar: EncodeLike` is not satisfied - --> $DIR/storage_ensure_span_are_ok_on_wrong_gen.rs:20:12 + --> $DIR/storage_ensure_span_are_ok_on_wrong_gen.rs:9:12 | -20 | #[pallet::storage] - | ^^^^^^^ the trait `EncodeLike` is not implemented for `Bar` +9 | #[pallet::pallet] + | ^^^^^^ the trait `EncodeLike` is not implemented for `Bar` | = note: required because of the requirements on the impl of `FullEncode` for `Bar` = note: required because of the requirements on the impl of `FullCodec` for `Bar` - = note: required because of the requirements on the impl of `StorageEntryMetadataBuilder` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `build_metadata` + = note: required because of the requirements on the impl of `PartialStorageInfoTrait` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` +note: required by `partial_storage_info` + --> $DIR/storage.rs:88:2 + | +88 | fn partial_storage_info() -> Vec; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied - --> $DIR/storage_ensure_span_are_ok_on_wrong_gen.rs:20:12 + --> $DIR/storage_ensure_span_are_ok_on_wrong_gen.rs:9:12 | -20 | #[pallet::storage] - | ^^^^^^^ the trait `WrapperTypeEncode` is not implemented for `Bar` +9 | #[pallet::pallet] + | ^^^^^^ the trait `WrapperTypeEncode` is not implemented for `Bar` | = note: required because of the requirements on the impl of `Encode` for `Bar` = note: required because of the requirements on the impl of `FullEncode` for `Bar` = note: required because of the requirements on the impl of `FullCodec` for `Bar` - = note: required because of the requirements on the impl of `StorageEntryMetadataBuilder` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `build_metadata` - -error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied - --> $DIR/storage_ensure_span_are_ok_on_wrong_gen.rs:9:12 - | -9 | #[pallet::pallet] - | ^^^^^^ the trait `WrapperTypeDecode` is not implemented for `Bar` - | - = note: required because of the requirements on the impl of `Decode` for `Bar` - = note: required because of the requirements on the impl of `FullCodec` for `Bar` - = note: required because of the requirements on the impl of `PartialStorageInfoTrait` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `partial_storage_info` - -error[E0277]: the trait bound `Bar: EncodeLike` is not satisfied - --> $DIR/storage_ensure_span_are_ok_on_wrong_gen.rs:9:12 - | -9 | #[pallet::pallet] - | ^^^^^^ the trait `EncodeLike` is not implemented for `Bar` - | - = note: required because of the requirements on the impl of `FullEncode` for `Bar` - = note: required because of the requirements on the impl of `FullCodec` for `Bar` - = note: required because of the requirements on the impl of `PartialStorageInfoTrait` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `partial_storage_info` - -error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied - --> $DIR/storage_ensure_span_are_ok_on_wrong_gen.rs:9:12 - | -9 | #[pallet::pallet] - | ^^^^^^ the trait `WrapperTypeEncode` is not implemented for `Bar` - | - = note: required because of the requirements on the impl of `Encode` for `Bar` - = note: required because of the requirements on the impl of `FullEncode` for `Bar` - = note: required because of the requirements on the impl of `FullCodec` for `Bar` - = note: required because of the requirements on the impl of `PartialStorageInfoTrait` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `partial_storage_info` + = note: required because of the requirements on the impl of `PartialStorageInfoTrait` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` +note: required by `partial_storage_info` + --> $DIR/storage.rs:88:2 + | +88 | fn partial_storage_info() -> Vec; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.stderr b/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.stderr index a5bf32a0ef2d2..3d03af836986a 100644 --- a/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.stderr +++ b/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.stderr @@ -1,77 +1,105 @@ error[E0277]: the trait bound `Bar: TypeInfo` is not satisfied - --> $DIR/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:20:12 - | -20 | #[pallet::storage] - | ^^^^^^^ the trait `TypeInfo` is not implemented for `Bar` - | - = note: required because of the requirements on the impl of `StaticTypeInfo` for `Bar` - = note: required because of the requirements on the impl of `StorageEntryMetadataBuilder` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `build_metadata` + --> $DIR/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:20:12 + | +20 | #[pallet::storage] + | ^^^^^^^ the trait `TypeInfo` is not implemented for `Bar` + | + = note: required because of the requirements on the impl of `StaticTypeInfo` for `Bar` + = note: required because of the requirements on the impl of `StorageEntryMetadataBuilder` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` +note: required by `build_metadata` + --> $DIR/mod.rs:113:2 + | +113 | fn build_metadata(doc: Vec<&'static str>, entries: &mut Vec); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied - --> $DIR/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:20:12 + --> $DIR/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:20:12 + | +20 | #[pallet::storage] + | ^^^^^^^ the trait `WrapperTypeDecode` is not implemented for `Bar` + | + = note: required because of the requirements on the impl of `Decode` for `Bar` + = note: required because of the requirements on the impl of `FullCodec` for `Bar` + = note: required because of the requirements on the impl of `StorageEntryMetadataBuilder` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` +note: required by `build_metadata` + --> $DIR/mod.rs:113:2 + | +113 | fn build_metadata(doc: Vec<&'static str>, entries: &mut Vec); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error[E0277]: the trait bound `Bar: EncodeLike` is not satisfied + --> $DIR/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:20:12 + | +20 | #[pallet::storage] + | ^^^^^^^ the trait `EncodeLike` is not implemented for `Bar` + | + = note: required because of the requirements on the impl of `FullEncode` for `Bar` + = note: required because of the requirements on the impl of `FullCodec` for `Bar` + = note: required because of the requirements on the impl of `StorageEntryMetadataBuilder` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` +note: required by `build_metadata` + --> $DIR/mod.rs:113:2 + | +113 | fn build_metadata(doc: Vec<&'static str>, entries: &mut Vec); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied + --> $DIR/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:20:12 + | +20 | #[pallet::storage] + | ^^^^^^^ the trait `WrapperTypeEncode` is not implemented for `Bar` + | + = note: required because of the requirements on the impl of `Encode` for `Bar` + = note: required because of the requirements on the impl of `FullEncode` for `Bar` + = note: required because of the requirements on the impl of `FullCodec` for `Bar` + = note: required because of the requirements on the impl of `StorageEntryMetadataBuilder` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` +note: required by `build_metadata` + --> $DIR/mod.rs:113:2 + | +113 | fn build_metadata(doc: Vec<&'static str>, entries: &mut Vec); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied + --> $DIR/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:9:12 | -20 | #[pallet::storage] - | ^^^^^^^ the trait `WrapperTypeDecode` is not implemented for `Bar` +9 | #[pallet::pallet] + | ^^^^^^ the trait `WrapperTypeDecode` is not implemented for `Bar` | = note: required because of the requirements on the impl of `Decode` for `Bar` = note: required because of the requirements on the impl of `FullCodec` for `Bar` - = note: required because of the requirements on the impl of `StorageEntryMetadataBuilder` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `build_metadata` + = note: required because of the requirements on the impl of `PartialStorageInfoTrait` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` +note: required by `partial_storage_info` + --> $DIR/storage.rs:88:2 + | +88 | fn partial_storage_info() -> Vec; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0277]: the trait bound `Bar: EncodeLike` is not satisfied - --> $DIR/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:20:12 + --> $DIR/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:9:12 | -20 | #[pallet::storage] - | ^^^^^^^ the trait `EncodeLike` is not implemented for `Bar` +9 | #[pallet::pallet] + | ^^^^^^ the trait `EncodeLike` is not implemented for `Bar` | = note: required because of the requirements on the impl of `FullEncode` for `Bar` = note: required because of the requirements on the impl of `FullCodec` for `Bar` - = note: required because of the requirements on the impl of `StorageEntryMetadataBuilder` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `build_metadata` + = note: required because of the requirements on the impl of `PartialStorageInfoTrait` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` +note: required by `partial_storage_info` + --> $DIR/storage.rs:88:2 + | +88 | fn partial_storage_info() -> Vec; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied - --> $DIR/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:20:12 + --> $DIR/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:9:12 | -20 | #[pallet::storage] - | ^^^^^^^ the trait `WrapperTypeEncode` is not implemented for `Bar` +9 | #[pallet::pallet] + | ^^^^^^ the trait `WrapperTypeEncode` is not implemented for `Bar` | = note: required because of the requirements on the impl of `Encode` for `Bar` = note: required because of the requirements on the impl of `FullEncode` for `Bar` = note: required because of the requirements on the impl of `FullCodec` for `Bar` - = note: required because of the requirements on the impl of `StorageEntryMetadataBuilder` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `build_metadata` - -error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied - --> $DIR/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:9:12 - | -9 | #[pallet::pallet] - | ^^^^^^ the trait `WrapperTypeDecode` is not implemented for `Bar` - | - = note: required because of the requirements on the impl of `Decode` for `Bar` - = note: required because of the requirements on the impl of `FullCodec` for `Bar` - = note: required because of the requirements on the impl of `PartialStorageInfoTrait` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `partial_storage_info` - -error[E0277]: the trait bound `Bar: EncodeLike` is not satisfied - --> $DIR/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:9:12 - | -9 | #[pallet::pallet] - | ^^^^^^ the trait `EncodeLike` is not implemented for `Bar` - | - = note: required because of the requirements on the impl of `FullEncode` for `Bar` - = note: required because of the requirements on the impl of `FullCodec` for `Bar` - = note: required because of the requirements on the impl of `PartialStorageInfoTrait` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `partial_storage_info` - -error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied - --> $DIR/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:9:12 - | -9 | #[pallet::pallet] - | ^^^^^^ the trait `WrapperTypeEncode` is not implemented for `Bar` - | - = note: required because of the requirements on the impl of `Encode` for `Bar` - = note: required because of the requirements on the impl of `FullEncode` for `Bar` - = note: required because of the requirements on the impl of `FullCodec` for `Bar` - = note: required because of the requirements on the impl of `PartialStorageInfoTrait` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `partial_storage_info` + = note: required because of the requirements on the impl of `PartialStorageInfoTrait` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` +note: required by `partial_storage_info` + --> $DIR/storage.rs:88:2 + | +88 | fn partial_storage_info() -> Vec; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/frame/support/test/tests/pallet_ui/storage_info_unsatisfied.stderr b/frame/support/test/tests/pallet_ui/storage_info_unsatisfied.stderr index ad415911bc933..0ffb015e36bca 100644 --- a/frame/support/test/tests/pallet_ui/storage_info_unsatisfied.stderr +++ b/frame/support/test/tests/pallet_ui/storage_info_unsatisfied.stderr @@ -5,4 +5,8 @@ error[E0277]: the trait bound `Bar: MaxEncodedLen` is not satisfied | ^^^^^^^^^^^^^^^^^^^^^ the trait `MaxEncodedLen` is not implemented for `Bar` | = note: required because of the requirements on the impl of `StorageInfoTrait` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `storage_info` +note: required by `storage_info` + --> $DIR/storage.rs:71:2 + | +71 | fn storage_info() -> Vec; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr b/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr index 6c92423c6a7fe..ffbc5aeea6b4f 100644 --- a/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr +++ b/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr @@ -4,6 +4,10 @@ error[E0277]: the trait bound `Bar: MaxEncodedLen` is not satisfied 10 | #[pallet::generate_storage_info] | ^^^^^^^^^^^^^^^^^^^^^ the trait `MaxEncodedLen` is not implemented for `Bar` | - = note: required because of the requirements on the impl of `KeyGeneratorMaxEncodedLen` for `Key` - = note: required because of the requirements on the impl of `StorageInfoTrait` for `frame_support::pallet_prelude::StorageNMap<_GeneratedPrefixForStorageFoo, Key, u32>` - = note: required by `storage_info` + = note: required because of the requirements on the impl of `KeyGeneratorMaxEncodedLen` for `NMapKey` + = note: required because of the requirements on the impl of `StorageInfoTrait` for `frame_support::pallet_prelude::StorageNMap<_GeneratedPrefixForStorageFoo, NMapKey, u32>` +note: required by `storage_info` + --> $DIR/storage.rs:71:2 + | +71 | fn storage_info() -> Vec; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/frame/support/test/tests/reserved_keyword/on_initialize.stderr b/frame/support/test/tests/reserved_keyword/on_initialize.stderr index 3df392dee9005..84e93fa52c2d9 100644 --- a/frame/support/test/tests/reserved_keyword/on_initialize.stderr +++ b/frame/support/test/tests/reserved_keyword/on_initialize.stderr @@ -4,7 +4,7 @@ error: Invalid call fn name: `on_finalize`, name is reserved and doesn't match e 28 | reserved!(on_finalize on_initialize on_runtime_upgrade offchain_worker deposit_event); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `$crate::__check_reserved_fn_name` (in Nightly builds, run with -Z macro-backtrace for more info) error: Invalid call fn name: `on_initialize`, name is reserved and doesn't match expected signature, please refer to `decl_module!` documentation to see the appropriate usage, or rename it to an unreserved keyword. --> $DIR/on_initialize.rs:28:1 @@ -12,7 +12,7 @@ error: Invalid call fn name: `on_initialize`, name is reserved and doesn't match 28 | reserved!(on_finalize on_initialize on_runtime_upgrade offchain_worker deposit_event); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `$crate::__check_reserved_fn_name` (in Nightly builds, run with -Z macro-backtrace for more info) error: Invalid call fn name: `on_runtime_upgrade`, name is reserved and doesn't match expected signature, please refer to `decl_module!` documentation to see the appropriate usage, or rename it to an unreserved keyword. --> $DIR/on_initialize.rs:28:1 @@ -20,7 +20,7 @@ error: Invalid call fn name: `on_runtime_upgrade`, name is reserved and doesn't 28 | reserved!(on_finalize on_initialize on_runtime_upgrade offchain_worker deposit_event); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `$crate::__check_reserved_fn_name` (in Nightly builds, run with -Z macro-backtrace for more info) error: Invalid call fn name: `offchain_worker`, name is reserved and doesn't match expected signature, please refer to `decl_module!` documentation to see the appropriate usage, or rename it to an unreserved keyword. --> $DIR/on_initialize.rs:28:1 @@ -28,7 +28,7 @@ error: Invalid call fn name: `offchain_worker`, name is reserved and doesn't mat 28 | reserved!(on_finalize on_initialize on_runtime_upgrade offchain_worker deposit_event); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `$crate::__check_reserved_fn_name` (in Nightly builds, run with -Z macro-backtrace for more info) error: Invalid call fn name: `deposit_event`, name is reserved and doesn't match expected signature, please refer to `decl_module!` documentation to see the appropriate usage, or rename it to an unreserved keyword. --> $DIR/on_initialize.rs:28:1 @@ -36,4 +36,4 @@ error: Invalid call fn name: `deposit_event`, name is reserved and doesn't match 28 | reserved!(on_finalize on_initialize on_runtime_upgrade offchain_worker deposit_event); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `$crate::__check_reserved_fn_name` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/frame/transaction-payment/src/lib.rs b/frame/transaction-payment/src/lib.rs index e3a3bccc3d39a..11dbcc010f67c 100644 --- a/frame/transaction-payment/src/lib.rs +++ b/frame/transaction-payment/src/lib.rs @@ -332,12 +332,7 @@ pub mod pallet { .unwrap(), ); - // This is the minimum value of the multiplier. Make sure that if we collapse to this - // value, we can recover with a reasonable amount of traffic. For this test we assert - // that if we collapse to minimum, the trend will be positive with a weight value - // which is 1% more than the target. - let min_value = T::FeeMultiplierUpdate::min(); - let mut target = T::FeeMultiplierUpdate::target() * + let target = T::FeeMultiplierUpdate::target() * T::BlockWeights::get().get(DispatchClass::Normal).max_total.expect( "Setting `max_total` for `Normal` dispatch class is not compatible with \ `transaction-payment` pallet.", @@ -348,10 +343,17 @@ pub mod pallet { // this is most likely because in a test setup we set everything to (). return } - target += addition; #[cfg(any(feature = "std", test))] sp_io::TestExternalities::new_empty().execute_with(|| { + // This is the minimum value of the multiplier. Make sure that if we collapse to + // this value, we can recover with a reasonable amount of traffic. For this test we + // assert that if we collapse to minimum, the trend will be positive with a weight + // value which is 1% more than the target. + let min_value = T::FeeMultiplierUpdate::min(); + + let target = target + addition; + >::set_block_consumed_resources(target, 0); let next = T::FeeMultiplierUpdate::convert(min_value); assert!( diff --git a/frame/transaction-storage/src/lib.rs b/frame/transaction-storage/src/lib.rs index 2fe3c04e0229f..bc31199d90391 100644 --- a/frame/transaction-storage/src/lib.rs +++ b/frame/transaction-storage/src/lib.rs @@ -37,7 +37,7 @@ use sp_runtime::traits::{BlakeTwo256, Hash, One, Saturating, Zero}; use sp_std::{prelude::*, result}; use sp_transaction_storage_proof::{ encode_index, random_chunk, InherentError, TransactionStorageProof, CHUNK_SIZE, - DEFAULT_STORAGE_PERIOD, INHERENT_IDENTIFIER, + INHERENT_IDENTIFIER, }; /// A type alias for the balance type from this pallet's point of view. @@ -380,7 +380,7 @@ pub mod pallet { Self { byte_fee: 10u32.into(), entry_fee: 1000u32.into(), - storage_period: DEFAULT_STORAGE_PERIOD.into(), + storage_period: sp_transaction_storage_proof::DEFAULT_STORAGE_PERIOD.into(), max_block_transactions: DEFAULT_MAX_BLOCK_TRANSACTIONS, max_transaction_size: DEFAULT_MAX_TRANSACTION_SIZE, } diff --git a/primitives/api/test/tests/ui/empty_impl_runtime_apis_call.stderr b/primitives/api/test/tests/ui/empty_impl_runtime_apis_call.stderr index b08f056b57d1c..bf201e8b55a78 100644 --- a/primitives/api/test/tests/ui/empty_impl_runtime_apis_call.stderr +++ b/primitives/api/test/tests/ui/empty_impl_runtime_apis_call.stderr @@ -4,4 +4,4 @@ error: No api implementation given! 17 | sp_api::impl_runtime_apis! {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `sp_api::impl_runtime_apis` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/primitives/api/test/tests/ui/impl_incorrect_method_signature.stderr b/primitives/api/test/tests/ui/impl_incorrect_method_signature.stderr index 9dd84c24b6781..2fb06c3565ea2 100644 --- a/primitives/api/test/tests/ui/impl_incorrect_method_signature.stderr +++ b/primitives/api/test/tests/ui/impl_incorrect_method_signature.stderr @@ -38,7 +38,7 @@ error[E0053]: method `Api_test_runtime_api_impl` has an incompatible type for tr | = note: expected fn pointer `fn(&RuntimeApiImpl<__SR_API_BLOCK__, RuntimeApiImplCall>, &BlockId<__SR_API_BLOCK__>, ExecutionContext, std::option::Option, Vec<_>) -> Result<_, _>` found fn pointer `fn(&RuntimeApiImpl<__SR_API_BLOCK__, RuntimeApiImplCall>, &BlockId<__SR_API_BLOCK__>, ExecutionContext, std::option::Option, Vec<_>) -> Result<_, _>` - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `sp_api::impl_runtime_apis` (in Nightly builds, run with -Z macro-backtrace for more info) error[E0308]: mismatched types --> $DIR/impl_incorrect_method_signature.rs:17:1 @@ -52,7 +52,7 @@ error[E0308]: mismatched types 33 | | } | |_^ expected `u64`, found struct `std::string::String` | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `sp_api::impl_runtime_apis` (in Nightly builds, run with -Z macro-backtrace for more info) error[E0308]: mismatched types --> $DIR/impl_incorrect_method_signature.rs:19:11 diff --git a/primitives/api/test/tests/ui/mock_advanced_block_id_by_value.stderr b/primitives/api/test/tests/ui/mock_advanced_block_id_by_value.stderr index 47cd9e01d910f..befe67c1d0b4a 100644 --- a/primitives/api/test/tests/ui/mock_advanced_block_id_by_value.stderr +++ b/primitives/api/test/tests/ui/mock_advanced_block_id_by_value.stderr @@ -10,4 +10,4 @@ error: `BlockId` needs to be taken by reference and not by value! 19 | | } | |_^ | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `sp_api::mock_impl_runtime_apis` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/primitives/api/test/tests/ui/mock_only_self_reference.stderr b/primitives/api/test/tests/ui/mock_only_self_reference.stderr index 7385fe4745989..1b1d2553940a5 100644 --- a/primitives/api/test/tests/ui/mock_only_self_reference.stderr +++ b/primitives/api/test/tests/ui/mock_only_self_reference.stderr @@ -36,7 +36,7 @@ error[E0053]: method `Api_test_runtime_api_impl` has an incompatible type for tr | = note: expected fn pointer `fn(&MockApi, &BlockId, Extrinsic>>, ExecutionContext, Option, Vec<_>) -> Result<_, _>` found fn pointer `fn(&MockApi, &BlockId, Extrinsic>>, ExecutionContext, Option<()>, Vec<_>) -> Result<_, _>` - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `sp_api::mock_impl_runtime_apis` (in Nightly builds, run with -Z macro-backtrace for more info) error[E0053]: method `Api_test2_runtime_api_impl` has an incompatible type for trait --> $DIR/mock_only_self_reference.rs:12:1 @@ -64,4 +64,4 @@ error[E0053]: method `Api_test2_runtime_api_impl` has an incompatible type for t | = note: expected fn pointer `fn(&MockApi, &BlockId, Extrinsic>>, ExecutionContext, Option, Vec<_>) -> Result<_, _>` found fn pointer `fn(&MockApi, &BlockId, Extrinsic>>, ExecutionContext, Option<()>, Vec<_>) -> Result<_, _>` - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `sp_api::mock_impl_runtime_apis` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/primitives/api/test/tests/ui/type_reference_in_impl_runtime_apis_call.stderr b/primitives/api/test/tests/ui/type_reference_in_impl_runtime_apis_call.stderr index a0a16c4a493db..063cbff60f81e 100644 --- a/primitives/api/test/tests/ui/type_reference_in_impl_runtime_apis_call.stderr +++ b/primitives/api/test/tests/ui/type_reference_in_impl_runtime_apis_call.stderr @@ -38,7 +38,7 @@ error[E0053]: method `Api_test_runtime_api_impl` has an incompatible type for tr | = note: expected fn pointer `fn(&RuntimeApiImpl<__SR_API_BLOCK__, RuntimeApiImplCall>, &BlockId<__SR_API_BLOCK__>, ExecutionContext, std::option::Option, Vec<_>) -> Result<_, _>` found fn pointer `fn(&RuntimeApiImpl<__SR_API_BLOCK__, RuntimeApiImplCall>, &BlockId<__SR_API_BLOCK__>, ExecutionContext, std::option::Option<&u64>, Vec<_>) -> Result<_, _>` - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `sp_api::impl_runtime_apis` (in Nightly builds, run with -Z macro-backtrace for more info) error[E0308]: mismatched types --> $DIR/type_reference_in_impl_runtime_apis_call.rs:17:1 @@ -52,7 +52,7 @@ error[E0308]: mismatched types 35 | | } | |_^ expected `u64`, found `&u64` | - = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the macro `sp_api::impl_runtime_apis` (in Nightly builds, run with -Z macro-backtrace for more info) error[E0308]: mismatched types --> $DIR/type_reference_in_impl_runtime_apis_call.rs:19:11 diff --git a/primitives/io/src/lib.rs b/primitives/io/src/lib.rs index 5faeb59c72db6..78e6f0c847952 100644 --- a/primitives/io/src/lib.rs +++ b/primitives/io/src/lib.rs @@ -73,6 +73,7 @@ mod batch_verifier; #[cfg(feature = "std")] use batch_verifier::BatchVerifier; +#[cfg(feature = "std")] const LOG_TARGET: &str = "runtime::io"; /// Error verifying ECDSA signature @@ -1481,21 +1482,17 @@ mod allocator_impl { #[panic_handler] #[no_mangle] pub fn panic(info: &core::panic::PanicInfo) -> ! { - unsafe { - let message = sp_std::alloc::format!("{}", info); - logging::log(LogLevel::Error, "runtime", message.as_bytes()); - core::arch::wasm32::unreachable(); - } + let message = sp_std::alloc::format!("{}", info); + logging::log(LogLevel::Error, "runtime", message.as_bytes()); + core::arch::wasm32::unreachable(); } /// A default OOM handler for WASM environment. #[cfg(all(not(feature = "disable_oom"), not(feature = "std")))] #[alloc_error_handler] pub fn oom(_: core::alloc::Layout) -> ! { - unsafe { - logging::log(LogLevel::Error, "runtime", b"Runtime memory exhausted. Aborting"); - core::arch::wasm32::unreachable(); - } + logging::log(LogLevel::Error, "runtime", b"Runtime memory exhausted. Aborting"); + core::arch::wasm32::unreachable(); } /// Type alias for Externalities implementation used in tests. diff --git a/primitives/runtime-interface/tests/ui/pass_by_enum_with_struct.stderr b/primitives/runtime-interface/tests/ui/pass_by_enum_with_struct.stderr index c7ed1af3b1a03..44fb5a244e03d 100644 --- a/primitives/runtime-interface/tests/ui/pass_by_enum_with_struct.stderr +++ b/primitives/runtime-interface/tests/ui/pass_by_enum_with_struct.stderr @@ -4,4 +4,4 @@ error: `PassByEnum` only supports enums as input type. 3 | #[derive(PassByEnum)] | ^^^^^^^^^^ | - = note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the derive macro `PassByEnum` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/primitives/runtime-interface/tests/ui/pass_by_enum_with_value_variant.stderr b/primitives/runtime-interface/tests/ui/pass_by_enum_with_value_variant.stderr index f6c85ed2bba3e..633dc3bbe8bc4 100644 --- a/primitives/runtime-interface/tests/ui/pass_by_enum_with_value_variant.stderr +++ b/primitives/runtime-interface/tests/ui/pass_by_enum_with_value_variant.stderr @@ -4,4 +4,4 @@ error: `PassByEnum` only supports unit variants. 3 | #[derive(PassByEnum)] | ^^^^^^^^^^ | - = note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the derive macro `PassByEnum` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/primitives/runtime-interface/tests/ui/pass_by_inner_with_two_fields.stderr b/primitives/runtime-interface/tests/ui/pass_by_inner_with_two_fields.stderr index 9afbce76f0c23..0ffee00210e79 100644 --- a/primitives/runtime-interface/tests/ui/pass_by_inner_with_two_fields.stderr +++ b/primitives/runtime-interface/tests/ui/pass_by_inner_with_two_fields.stderr @@ -4,4 +4,4 @@ error: Only newtype/one field structs are supported by `PassByInner`! 3 | #[derive(PassByInner)] | ^^^^^^^^^^^ | - = note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info) + = note: this error originates in the derive macro `PassByInner` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/primitives/runtime/src/traits.rs b/primitives/runtime/src/traits.rs index 65c063fde1696..6d79d740dc4e1 100644 --- a/primitives/runtime/src/traits.rs +++ b/primitives/runtime/src/traits.rs @@ -1474,6 +1474,7 @@ macro_rules! impl_opaque_keys { #[macro_export] #[cfg(not(feature = "std"))] +#[doc(hidden)] macro_rules! impl_opaque_keys { { $( #[ $attr:meta ] )* diff --git a/primitives/state-machine/src/backend.rs b/primitives/state-machine/src/backend.rs index 1b1a732f8d0fc..7dcf92b06de06 100644 --- a/primitives/state-machine/src/backend.rs +++ b/primitives/state-machine/src/backend.rs @@ -21,9 +21,9 @@ use crate::{ trie_backend::TrieBackend, trie_backend_essence::TrieBackendStorage, ChildStorageCollection, StorageCollection, StorageKey, StorageValue, UsageInfo, }; -use codec::{Decode, Encode}; +use codec::Encode; use hash_db::Hasher; -use sp_core::storage::{well_known_keys, ChildInfo, TrackedStorageKey}; +use sp_core::storage::{ChildInfo, TrackedStorageKey}; #[cfg(feature = "std")] use sp_core::traits::RuntimeCode; use sp_std::vec::Vec; @@ -330,7 +330,11 @@ impl<'a, B: Backend, H: Hasher> sp_core::traits::FetchRuntimeCode for BackendRuntimeCode<'a, B, H> { fn fetch_runtime_code<'b>(&'b self) -> Option> { - self.backend.storage(well_known_keys::CODE).ok().flatten().map(Into::into) + self.backend + .storage(sp_core::storage::well_known_keys::CODE) + .ok() + .flatten() + .map(Into::into) } } @@ -348,17 +352,17 @@ where pub fn runtime_code(&self) -> Result { let hash = self .backend - .storage_hash(well_known_keys::CODE) + .storage_hash(sp_core::storage::well_known_keys::CODE) .ok() .flatten() .ok_or("`:code` hash not found")? .encode(); let heap_pages = self .backend - .storage(well_known_keys::HEAP_PAGES) + .storage(sp_core::storage::well_known_keys::HEAP_PAGES) .ok() .flatten() - .and_then(|d| Decode::decode(&mut &d[..]).ok()); + .and_then(|d| codec::Decode::decode(&mut &d[..]).ok()); Ok(RuntimeCode { code_fetcher: self, hash, heap_pages }) } diff --git a/primitives/state-machine/src/ext.rs b/primitives/state-machine/src/ext.rs index c9693ca6a88c1..c20d8492fb1f3 100644 --- a/primitives/state-machine/src/ext.rs +++ b/primitives/state-machine/src/ext.rs @@ -17,17 +17,15 @@ //! Concrete externalities implementation. -use crate::{ - backend::Backend, overlayed_changes::OverlayedExtensions, IndexOperation, OverlayedChanges, - StorageKey, StorageValue, -}; +#[cfg(feature = "std")] +use crate::overlayed_changes::OverlayedExtensions; +use crate::{backend::Backend, IndexOperation, OverlayedChanges, StorageKey, StorageValue}; use codec::{Decode, Encode, EncodeAppend}; use hash_db::Hasher; -use sp_core::{ - hexdisplay::HexDisplay, - storage::{well_known_keys::is_child_storage_key, ChildInfo, TrackedStorageKey}, -}; -use sp_externalities::{Extension, ExtensionStore, Extensions, Externalities}; +#[cfg(feature = "std")] +use sp_core::hexdisplay::HexDisplay; +use sp_core::storage::{well_known_keys::is_child_storage_key, ChildInfo, TrackedStorageKey}; +use sp_externalities::{Extension, ExtensionStore, Externalities}; use sp_trie::{empty_child_trie_root, trie_types::Layout}; #[cfg(feature = "std")] @@ -37,7 +35,7 @@ use sp_std::{ any::{Any, TypeId}, boxed::Box, cmp::Ordering, - fmt, vec, + vec, vec::Vec, }; #[cfg(feature = "std")] @@ -72,8 +70,8 @@ pub enum Error { } #[cfg(feature = "std")] -impl fmt::Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { +impl std::fmt::Display for Error { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match *self { Error::Backend(ref e) => write!(f, "Storage backend error: {}", e), Error::Executor(ref e) => write!(f, "Sub-call execution error: {}", e), @@ -139,7 +137,7 @@ where storage_transaction_cache: &'a mut StorageTransactionCache, backend: &'a B, changes_trie_state: Option>, - extensions: Option<&'a mut Extensions>, + extensions: Option<&'a mut sp_externalities::Extensions>, ) -> Self { Self { overlay, diff --git a/primitives/state-machine/src/lib.rs b/primitives/state-machine/src/lib.rs index 05d2c6d20ccee..7bd0c645f3c00 100644 --- a/primitives/state-machine/src/lib.rs +++ b/primitives/state-machine/src/lib.rs @@ -55,11 +55,19 @@ pub use tracing::trace; #[cfg(not(feature = "std"))] #[macro_export] macro_rules! warn { - (target: $target:expr, $($arg:tt)+) => { - () + (target: $target:expr, $message:expr $( , $arg:ident )* $( , )?) => { + { + $( + let _ = &$arg; + )* + } }; - ($($arg:tt)+) => { - () + ($message:expr, $( $arg:expr, )*) => { + { + $( + let _ = &$arg; + )* + } }; } @@ -68,11 +76,12 @@ macro_rules! warn { #[cfg(not(feature = "std"))] #[macro_export] macro_rules! debug { - (target: $target:expr, $($arg:tt)+) => { - () - }; - ($($arg:tt)+) => { - () + (target: $target:expr, $message:expr $( , $arg:ident )* $( , )?) => { + { + $( + let _ = &$arg; + )* + } }; } diff --git a/primitives/state-machine/src/overlayed_changes/mod.rs b/primitives/state-machine/src/overlayed_changes/mod.rs index a0558e06a380e..cf7af1c9a6f3a 100644 --- a/primitives/state-machine/src/overlayed_changes/mod.rs +++ b/primitives/state-machine/src/overlayed_changes/mod.rs @@ -21,15 +21,7 @@ mod changeset; mod offchain; use self::changeset::OverlayedChangeSet; -use crate::{backend::Backend, stats::StateMachineStats}; -pub use offchain::OffchainOverlayedChanges; -use sp_std::{ - any::{Any, TypeId}, - boxed::Box, - vec::Vec, -}; - -use crate::{changes_trie::BlockNumber, DefaultError}; +use crate::{backend::Backend, changes_trie::BlockNumber, stats::StateMachineStats, DefaultError}; #[cfg(feature = "std")] use crate::{ changes_trie::{build_changes_trie, State as ChangesTrieState}, @@ -37,16 +29,23 @@ use crate::{ }; use codec::{Decode, Encode}; use hash_db::Hasher; +pub use offchain::OffchainOverlayedChanges; use sp_core::{ offchain::OffchainOverlayedChange, storage::{well_known_keys::EXTRINSIC_INDEX, ChildInfo}, }; +#[cfg(feature = "std")] use sp_externalities::{Extension, Extensions}; #[cfg(not(feature = "std"))] -use sp_std::collections::btree_map::{BTreeMap as Map, Entry as MapEntry}; -use sp_std::collections::btree_set::BTreeSet; +use sp_std::collections::btree_map::BTreeMap as Map; +use sp_std::{collections::btree_set::BTreeSet, vec::Vec}; #[cfg(feature = "std")] use std::collections::{hash_map::Entry as MapEntry, HashMap as Map}; +#[cfg(feature = "std")] +use std::{ + any::{Any, TypeId}, + boxed::Box, +}; pub use self::changeset::{AlreadyInRuntime, NoOpenTransaction, NotInRuntime, OverlayedValue}; @@ -581,6 +580,8 @@ impl OverlayedChanges { self.changes_trie_root(backend, changes_trie_state, parent_hash, false, &mut cache) .map_err(|_| "Failed to generate changes trie transaction")?; } + #[cfg(not(feature = "std"))] + let _ = parent_hash; #[cfg(feature = "std")] let changes_trie_transaction = cache @@ -758,6 +759,7 @@ where /// An overlayed extension is either a mutable reference /// or an owned extension. +#[cfg(feature = "std")] pub enum OverlayedExtension<'a> { MutRef(&'a mut Box), Owned(Box), @@ -770,10 +772,12 @@ pub enum OverlayedExtension<'a> { /// as owned references. After the execution of a runtime function, we /// can safely drop this object while not having modified the original /// list. +#[cfg(feature = "std")] pub struct OverlayedExtensions<'a> { extensions: Map>, } +#[cfg(feature = "std")] impl<'a> OverlayedExtensions<'a> { /// Create a new instance of overalyed extensions from the given extensions. pub fn new(extensions: &'a mut Extensions) -> Self { diff --git a/primitives/state-machine/src/trie_backend_essence.rs b/primitives/state-machine/src/trie_backend_essence.rs index 557a098fbaf79..6c575f0d76bc7 100644 --- a/primitives/state-machine/src/trie_backend_essence.rs +++ b/primitives/state-machine/src/trie_backend_essence.rs @@ -24,7 +24,7 @@ use hash_db::{self, Hasher, Prefix}; #[cfg(feature = "std")] use parking_lot::RwLock; use sp_core::storage::ChildInfo; -use sp_std::{boxed::Box, ops::Deref, vec::Vec}; +use sp_std::{boxed::Box, vec::Vec}; use sp_trie::{ empty_child_trie_root, read_child_trie_value, read_trie_value, trie_types::{Layout, TrieDB, TrieError}, @@ -37,8 +37,11 @@ use std::sync::Arc; #[cfg(not(feature = "std"))] macro_rules! format { - ($($arg:tt)+) => { - crate::DefaultError + ( $message:expr, $( $arg:expr )* ) => { + { + $( let _ = &$arg; )* + crate::DefaultError + } }; } @@ -488,7 +491,7 @@ impl TrieBackendStorage for Arc> { type Overlay = PrefixedMemoryDB; fn get(&self, key: &H::Out, prefix: Prefix) -> Result> { - Storage::::get(self.deref(), key, prefix) + Storage::::get(std::ops::Deref::deref(self), key, prefix) } } diff --git a/test-utils/runtime/client/src/trait_tests.rs b/test-utils/runtime/client/src/trait_tests.rs index c5e0ba49fcf5b..938aeda36d319 100644 --- a/test-utils/runtime/client/src/trait_tests.rs +++ b/test-utils/runtime/client/src/trait_tests.rs @@ -67,7 +67,6 @@ where .block; block_on(client.import(BlockOrigin::Own, a2.clone())).unwrap(); - #[allow(deprecated)] assert_eq!(blockchain.leaves().unwrap(), vec![a2.hash()]); // A2 -> A3 diff --git a/utils/wasm-builder/src/wasm_project.rs b/utils/wasm-builder/src/wasm_project.rs index 868692d341ff0..3806a890a1064 100644 --- a/utils/wasm-builder/src/wasm_project.rs +++ b/utils/wasm-builder/src/wasm_project.rs @@ -436,6 +436,10 @@ fn build_project(project: &Path, default_rustflags: &str, cargo_cmd: CargoComman // exclusive). The runner project is created in `CARGO_TARGET_DIR` and executing it will // create a sub target directory inside of `CARGO_TARGET_DIR`. .env_remove("CARGO_TARGET_DIR") + // As we are being called inside a build-script, this env variable is set. However, we set + // our own `RUSTFLAGS` and thus, we need to remove this. Otherwise cargo favors this + // env variable. + .env_remove("CARGO_ENCODED_RUSTFLAGS") // We don't want to call ourselves recursively .env(crate::SKIP_BUILD_ENV, "");