diff --git a/prdoc/pr_3616.prdoc b/prdoc/pr_3616.prdoc new file mode 100644 index 000000000000..fcf068dcd173 --- /dev/null +++ b/prdoc/pr_3616.prdoc @@ -0,0 +1,28 @@ +# Schema: Polkadot SDK PRDoc Schema (prdoc) v1.0.0 +# See doc at https://raw.githubusercontent.com/paritytech/polkadot-sdk/master/prdoc/schema_user.json + +title: "Benchmarking pallet V2 syntax extension: pov_mode attribute" + +doc: + - audience: Runtime Dev + description: | + Adds the `pov_mode` attribute from the V1 benchmarking syntax to the V2 syntax. This allows to + override the default PoV mode (`MaxEncodedLen`) to either `Measured` or `Ignored`. It can be + overridden for a whole benchmark, a key prefix of a specific key itself. + + Example syntax looks like this: + ```rust + #[benchmark(pov_mode = Measured { + Pallet: Measured, + Pallet::Storage: MaxEncodedLen, + })] + fn do_some() { + .. + } + ``` + +crates: + - name: frame-support-procedural + bump: minor + - name: frame-support + bump: minor diff --git a/substrate/frame/benchmarking/pov/src/benchmarking.rs b/substrate/frame/benchmarking/pov/src/benchmarking.rs index 84d81890b175..691e2fd05859 100644 --- a/substrate/frame/benchmarking/pov/src/benchmarking.rs +++ b/substrate/frame/benchmarking/pov/src/benchmarking.rs @@ -21,54 +21,78 @@ use super::*; +use frame_benchmarking::v2::*; use frame_support::traits::UnfilteredDispatchable; use frame_system::{Pallet as System, RawOrigin}; use sp_runtime::traits::Hash; -frame_benchmarking::benchmarks! { - storage_single_value_read { +#[benchmarks] +mod benchmarks { + use super::*; + + #[benchmark] + fn storage_single_value_read() { Value::::put(123); - }: { - assert_eq!(Value::::get(), Some(123)); + + #[block] + { + assert_eq!(Value::::get(), Some(123)); + } } - #[pov_mode = Ignored] - storage_single_value_ignored_read { + #[benchmark(pov_mode = Ignored)] + fn storage_single_value_ignored_read() { Value::::put(123); - }: { - assert_eq!(Value::::get(), Some(123)); + #[block] + { + assert_eq!(Value::::get(), Some(123)); + } } - #[pov_mode = MaxEncodedLen { + #[benchmark(pov_mode = MaxEncodedLen { Pov::Value2: Ignored - }] - storage_single_value_ignored_some_read { + })] + fn storage_single_value_ignored_some_read() { Value::::put(123); Value2::::put(123); - }: { - assert_eq!(Value::::get(), Some(123)); - assert_eq!(Value2::::get(), Some(123)); + + #[block] + { + assert_eq!(Value::::get(), Some(123)); + assert_eq!(Value2::::get(), Some(123)); + } } - storage_single_value_read_twice { + #[benchmark] + fn storage_single_value_read_twice() { Value::::put(123); - }: { - assert_eq!(Value::::get(), Some(123)); - assert_eq!(Value::::get(), Some(123)); + + #[block] + { + assert_eq!(Value::::get(), Some(123)); + assert_eq!(Value::::get(), Some(123)); + } } - storage_single_value_write { - }: { - Value::::put(123); - } verify { + #[benchmark] + fn storage_single_value_write() { + #[block] + { + Value::::put(123); + } + assert_eq!(Value::::get(), Some(123)); } - storage_single_value_kill { + #[benchmark] + fn storage_single_value_kill() { Value::::put(123); - }: { - Value::::kill(); - } verify { + + #[block] + { + Value::::kill(); + } + assert!(!Value::::exists()); } @@ -78,263 +102,297 @@ frame_benchmarking::benchmarks! { // created. Then the one value is read from the map. This demonstrates that the number of other // nodes in the Trie influences the proof size. The number of inserted nodes can be interpreted // as the number of `StorageMap`/`StorageValue` in the whole runtime. - #[pov_mode = Measured] - storage_1m_map_read_one_value_two_additional_layers { - (0..(1<<10)).for_each(|i| Map1M::::insert(i, i)); + #[benchmark(pov_mode = Measured)] + fn storage_1m_map_read_one_value_two_additional_layers() { + (0..(1 << 10)).for_each(|i| Map1M::::insert(i, i)); // Assume there are 16-256 other storage items. - (0..(1u32<<4)).for_each(|i| { + (0..(1u32 << 4)).for_each(|i| { let k = T::Hashing::hash(&i.to_be_bytes()); frame_support::storage::unhashed::put(k.as_ref(), &i); }); - }: { - assert_eq!(Map1M::::get(1<<9), Some(1<<9)); + + #[block] + { + assert_eq!(Map1M::::get(1 << 9), Some(1 << 9)); + } } - #[pov_mode = Measured] - storage_1m_map_read_one_value_three_additional_layers { - (0..(1<<10)).for_each(|i| Map1M::::insert(i, i)); + #[benchmark(pov_mode = Measured)] + fn storage_1m_map_read_one_value_three_additional_layers() { + (0..(1 << 10)).for_each(|i| Map1M::::insert(i, i)); // Assume there are 256-4096 other storage items. - (0..(1u32<<8)).for_each(|i| { + (0..(1u32 << 8)).for_each(|i| { let k = T::Hashing::hash(&i.to_be_bytes()); frame_support::storage::unhashed::put(k.as_ref(), &i); }); - }: { - assert_eq!(Map1M::::get(1<<9), Some(1<<9)); + + #[block] + { + assert_eq!(Map1M::::get(1 << 9), Some(1 << 9)); + } } - #[pov_mode = Measured] - storage_1m_map_read_one_value_four_additional_layers { - (0..(1<<10)).for_each(|i| Map1M::::insert(i, i)); + #[benchmark(pov_mode = Measured)] + fn storage_1m_map_read_one_value_four_additional_layers() { + (0..(1 << 10)).for_each(|i| Map1M::::insert(i, i)); // Assume there are 4096-65536 other storage items. - (0..(1u32<<12)).for_each(|i| { + (0..(1u32 << 12)).for_each(|i| { let k = T::Hashing::hash(&i.to_be_bytes()); frame_support::storage::unhashed::put(k.as_ref(), &i); }); - }: { - assert_eq!(Map1M::::get(1<<9), Some(1<<9)); + + #[block] + { + assert_eq!(Map1M::::get(1 << 9), Some(1 << 9)); + } } // Reads from both storage maps each `n` and `m` times. Should result in two linear components. - storage_map_read_per_component { - let n in 0 .. 100; - let m in 0 .. 100; + #[benchmark] + fn storage_map_read_per_component(n: Linear<0, 100>, m: Linear<0, 100>) { + (0..m * 10).for_each(|i| Map1M::::insert(i, i)); + (0..n * 10).for_each(|i| Map16M::::insert(i, i)); - (0..m*10).for_each(|i| Map1M::::insert(i, i)); - (0..n*10).for_each(|i| Map16M::::insert(i, i)); - }: { - (0..m).for_each(|i| - assert_eq!(Map1M::::get(i*10), Some(i*10))); - (0..n).for_each(|i| - assert_eq!(Map16M::::get(i*10), Some(i*10))); + #[block] + { + (0..m).for_each(|i| assert_eq!(Map1M::::get(i * 10), Some(i * 10))); + (0..n).for_each(|i| assert_eq!(Map16M::::get(i * 10), Some(i * 10))); + } } - #[pov_mode = MaxEncodedLen { + #[benchmark(pov_mode = MaxEncodedLen { Pov::Map1M: Ignored - }] - storage_map_read_per_component_one_ignored { - let n in 0 .. 100; - let m in 0 .. 100; + })] + fn storage_map_read_per_component_one_ignored(n: Linear<0, 100>, m: Linear<0, 100>) { + (0..m * 10).for_each(|i| Map1M::::insert(i, i)); + (0..n * 10).for_each(|i| Map16M::::insert(i, i)); - (0..m*10).for_each(|i| Map1M::::insert(i, i)); - (0..n*10).for_each(|i| Map16M::::insert(i, i)); - }: { - (0..m).for_each(|i| - assert_eq!(Map1M::::get(i*10), Some(i*10))); - (0..n).for_each(|i| - assert_eq!(Map16M::::get(i*10), Some(i*10))); + #[block] + { + (0..m).for_each(|i| assert_eq!(Map1M::::get(i * 10), Some(i * 10))); + (0..n).for_each(|i| assert_eq!(Map16M::::get(i * 10), Some(i * 10))); + } } // Reads the same value from a storage map. Should not result in a component. - storage_1m_map_one_entry_repeated_read { - let n in 0 .. 100; + #[benchmark] + fn storage_1m_map_one_entry_repeated_read(n: Linear<0, 100>) { Map1M::::insert(0, 0); - }: { - (0..n).for_each(|i| - assert_eq!(Map1M::::get(0), Some(0))); + + #[block] + { + (0..n).for_each(|_| assert_eq!(Map1M::::get(0), Some(0))); + } } // Reads the same values from a storage map. Should result in a `1x` linear component. - storage_1m_map_multiple_entry_repeated_read { - let n in 0 .. 100; + #[benchmark] + fn storage_1m_map_multiple_entry_repeated_read(n: Linear<0, 100>) { (0..n).for_each(|i| Map1M::::insert(i, i)); - }: { - (0..n).for_each(|i| { - // Reading the same value 10 times does nothing. - (0..10).for_each(|j| - assert_eq!(Map1M::::get(i), Some(i))); - }); + + #[block] + { + (0..n).for_each(|i| { + // Reading the same value 10 times does nothing. + (0..10).for_each(|_| assert_eq!(Map1M::::get(i), Some(i))); + }); + } } - storage_1m_double_map_read_per_component { - let n in 0 .. 1024; - (0..(1<<10)).for_each(|i| DoubleMap1M::::insert(i, i, i)); - }: { - (0..n).for_each(|i| - assert_eq!(DoubleMap1M::::get(i, i), Some(i))); + #[benchmark] + fn storage_1m_double_map_read_per_component(n: Linear<0, 1024>) { + (0..(1 << 10)).for_each(|i| DoubleMap1M::::insert(i, i, i)); + + #[block] + { + (0..n).for_each(|i| assert_eq!(DoubleMap1M::::get(i, i), Some(i))); + } } - storage_value_bounded_read { - }: { - assert!(BoundedValue::::get().is_none()); + #[benchmark] + fn storage_value_bounded_read() { + #[block] + { + assert!(BoundedValue::::get().is_none()); + } } // Reading unbounded values will produce no mathematical worst case PoV size for this component. - storage_value_unbounded_read { - }: { - assert!(UnboundedValue::::get().is_none()); + #[benchmark] + fn storage_value_unbounded_read() { + #[block] + { + assert!(UnboundedValue::::get().is_none()); + } } - #[pov_mode = Ignored] - storage_value_unbounded_ignored_read { - }: { - assert!(UnboundedValue::::get().is_none()); + #[benchmark(pov_mode = Ignored)] + fn storage_value_unbounded_ignored_read() { + #[block] + { + assert!(UnboundedValue::::get().is_none()); + } } // Same as above, but we still expect a mathematical worst case PoV size for the bounded one. - storage_value_bounded_and_unbounded_read { + #[benchmark] + fn storage_value_bounded_and_unbounded_read() { (0..1024).for_each(|i| Map1M::::insert(i, i)); - }: { - assert!(UnboundedValue::::get().is_none()); - assert!(BoundedValue::::get().is_none()); + #[block] + { + assert!(UnboundedValue::::get().is_none()); + assert!(BoundedValue::::get().is_none()); + } } - #[pov_mode = Measured] - measured_storage_value_read_linear_size { - let l in 0 .. 1<<22; + #[benchmark(pov_mode = Measured)] + fn measured_storage_value_read_linear_size(l: Linear<0, { 1 << 22 }>) { let v: sp_runtime::BoundedVec = sp_std::vec![0u8; l as usize].try_into().unwrap(); LargeValue::::put(&v); - }: { - assert!(LargeValue::::get().is_some()); + #[block] + { + assert!(LargeValue::::get().is_some()); + } } - #[pov_mode = MaxEncodedLen] - mel_storage_value_read_linear_size { - let l in 0 .. 1<<22; + #[benchmark(pov_mode = MaxEncodedLen)] + fn mel_storage_value_read_linear_size(l: Linear<0, { 1 << 22 }>) { let v: sp_runtime::BoundedVec = sp_std::vec![0u8; l as usize].try_into().unwrap(); LargeValue::::put(&v); - }: { - assert!(LargeValue::::get().is_some()); + #[block] + { + assert!(LargeValue::::get().is_some()); + } } - #[pov_mode = Measured] - measured_storage_double_value_read_linear_size { - let l in 0 .. 1<<22; + #[benchmark(pov_mode = Measured)] + fn measured_storage_double_value_read_linear_size(l: Linear<0, { 1 << 22 }>) { let v: sp_runtime::BoundedVec = sp_std::vec![0u8; l as usize].try_into().unwrap(); LargeValue::::put(&v); LargeValue2::::put(&v); - }: { - assert!(LargeValue::::get().is_some()); - assert!(LargeValue2::::get().is_some()); + #[block] + { + assert!(LargeValue::::get().is_some()); + assert!(LargeValue2::::get().is_some()); + } } - #[pov_mode = MaxEncodedLen] - mel_storage_double_value_read_linear_size { - let l in 0 .. 1<<22; + #[benchmark(pov_mode = MaxEncodedLen)] + fn mel_storage_double_value_read_linear_size(l: Linear<0, { 1 << 22 }>) { let v: sp_runtime::BoundedVec = sp_std::vec![0u8; l as usize].try_into().unwrap(); LargeValue::::put(&v); LargeValue2::::put(&v); - }: { - assert!(LargeValue::::get().is_some()); - assert!(LargeValue2::::get().is_some()); + #[block] + { + assert!(LargeValue::::get().is_some()); + assert!(LargeValue2::::get().is_some()); + } } - #[pov_mode = MaxEncodedLen { + #[benchmark(pov_mode = MaxEncodedLen { Pov::LargeValue2: Measured - }] - mel_mixed_storage_double_value_read_linear_size { - let l in 0 .. 1<<22; + })] + fn mel_mixed_storage_double_value_read_linear_size(l: Linear<0, { 1 << 22 }>) { let v: sp_runtime::BoundedVec = sp_std::vec![0u8; l as usize].try_into().unwrap(); LargeValue::::put(&v); LargeValue2::::put(&v); - }: { - assert!(LargeValue::::get().is_some()); - assert!(LargeValue2::::get().is_some()); + #[block] + { + assert!(LargeValue::::get().is_some()); + assert!(LargeValue2::::get().is_some()); + } } - #[pov_mode = Measured { + #[benchmark(pov_mode = Measured { Pov::LargeValue2: MaxEncodedLen - }] - measured_mixed_storage_double_value_read_linear_size { - let l in 0 .. 1<<22; + })] + fn measured_mixed_storage_double_value_read_linear_size(l: Linear<0, { 1 << 22 }>) { let v: sp_runtime::BoundedVec = sp_std::vec![0u8; l as usize].try_into().unwrap(); LargeValue::::put(&v); LargeValue2::::put(&v); - }: { - assert!(LargeValue::::get().is_some()); - assert!(LargeValue2::::get().is_some()); + #[block] + { + assert!(LargeValue::::get().is_some()); + assert!(LargeValue2::::get().is_some()); + } } - #[pov_mode = Measured] - storage_map_unbounded_both_measured_read { - let i in 0 .. 1000; - + #[benchmark(pov_mode = Measured)] + fn storage_map_unbounded_both_measured_read(i: Linear<0, 1000>) { UnboundedMap::::insert(i, sp_std::vec![0; i as usize]); UnboundedMap2::::insert(i, sp_std::vec![0; i as usize]); - }: { - assert!(UnboundedMap::::get(i).is_some()); - assert!(UnboundedMap2::::get(i).is_some()); + #[block] + { + assert!(UnboundedMap::::get(i).is_some()); + assert!(UnboundedMap2::::get(i).is_some()); + } } - #[pov_mode = MaxEncodedLen { + #[benchmark(pov_mode = MaxEncodedLen { Pov::UnboundedMap: Measured - }] - storage_map_partial_unbounded_read { - let i in 0 .. 1000; - + })] + fn storage_map_partial_unbounded_read(i: Linear<0, 1000>) { Map1M::::insert(i, 0); UnboundedMap::::insert(i, sp_std::vec![0; i as usize]); - }: { - assert!(Map1M::::get(i).is_some()); - assert!(UnboundedMap::::get(i).is_some()); + #[block] + { + assert!(Map1M::::get(i).is_some()); + assert!(UnboundedMap::::get(i).is_some()); + } } - #[pov_mode = MaxEncodedLen { + #[benchmark(pov_mode = MaxEncodedLen { Pov::UnboundedMap: Ignored - }] - storage_map_partial_unbounded_ignored_read { - let i in 0 .. 1000; - + })] + fn storage_map_partial_unbounded_ignored_read(i: Linear<0, 1000>) { Map1M::::insert(i, 0); UnboundedMap::::insert(i, sp_std::vec![0; i as usize]); - }: { - assert!(Map1M::::get(i).is_some()); - assert!(UnboundedMap::::get(i).is_some()); + #[block] + { + assert!(Map1M::::get(i).is_some()); + assert!(UnboundedMap::::get(i).is_some()); + } } // Emitting an event will not incur any PoV. - emit_event { + #[benchmark] + fn emit_event() { // Emit a single event. - let call = Call::::emit_event { }; - }: { call.dispatch_bypass_filter(RawOrigin::Root.into()).unwrap(); } - verify { + let call = Call::::emit_event {}; + #[block] + { + call.dispatch_bypass_filter(RawOrigin::Root.into()).unwrap(); + } assert_eq!(System::::events().len(), 1); } // A No-OP will not incur any PoV. - noop { - let call = Call::::noop { }; - }: { - call.dispatch_bypass_filter(RawOrigin::Root.into()).unwrap(); + #[benchmark] + fn noop() { + let call = Call::::noop {}; + #[block] + { + call.dispatch_bypass_filter(RawOrigin::Root.into()).unwrap(); + } } - storage_iteration { + #[benchmark] + fn storage_iteration() { for i in 0..65000 { UnboundedMapTwox::::insert(i, sp_std::vec![0; 64]); } - }: { - for (key, value) in UnboundedMapTwox::::iter() { - unsafe { - core::ptr::read_volatile(&key); - core::ptr::read_volatile(value.as_ptr()); + #[block] + { + for (key, value) in UnboundedMapTwox::::iter() { + unsafe { + core::ptr::read_volatile(&key); + core::ptr::read_volatile(value.as_ptr()); + } } } } - impl_benchmark_test_suite!( - Pallet, - mock::new_test_ext(), - mock::Test, - ); + impl_benchmark_test_suite!(Pallet, super::mock::new_test_ext(), super::mock::Test,); } #[cfg(test)] diff --git a/substrate/frame/benchmarking/pov/src/weights.rs b/substrate/frame/benchmarking/pov/src/weights.rs index d84ac88c98f0..c4fc03d1dd93 100644 --- a/substrate/frame/benchmarking/pov/src/weights.rs +++ b/substrate/frame/benchmarking/pov/src/weights.rs @@ -15,38 +15,36 @@ // See the License for the specific language governing permissions and // limitations under the License. -//! Autogenerated weights for frame_benchmarking_pallet_pov +//! Autogenerated weights for `frame_benchmarking_pallet_pov` //! -//! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev -//! DATE: 2023-04-12, STEPS: `50`, REPEAT: `20`, LOW RANGE: `[]`, HIGH RANGE: `[]` +//! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 32.0.0 +//! DATE: 2024-03-08, STEPS: `50`, REPEAT: `20`, LOW RANGE: `[]`, HIGH RANGE: `[]` //! WORST CASE MAP SIZE: `1000000` -//! HOSTNAME: `i9`, CPU: `13th Gen Intel(R) Core(TM) i9-13900K` -//! EXECUTION: None, WASM-EXECUTION: Compiled, CHAIN: None, DB CACHE: 1024 +//! HOSTNAME: `Olivers-MBP`, CPU: `` +//! WASM-EXECUTION: `Compiled`, CHAIN: `None`, DB CACHE: `1024` // Executed Command: -// ./target/release/substrate +// target/release/substrate-node // benchmark // pallet -// --dev // --pallet // frame-benchmarking-pallet-pov // --extrinsic // -// --steps -// 50 -// --repeat -// 20 -// --template=.maintain/frame-weight-template.hbs -// --output=frame/benchmarking/pov/src/weights.rs +// --output +// substrate/frame/benchmarking/pov/src/weights.rs +// --template +// substrate/.maintain/frame-weight-template.hbs #![cfg_attr(rustfmt, rustfmt_skip)] #![allow(unused_parens)] #![allow(unused_imports)] +#![allow(missing_docs)] use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; use core::marker::PhantomData; -/// Weight functions needed for frame_benchmarking_pallet_pov. +/// Weight functions needed for `frame_benchmarking_pallet_pov`. pub trait WeightInfo { fn storage_single_value_read() -> Weight; fn storage_single_value_ignored_read() -> Weight; @@ -80,361 +78,361 @@ pub trait WeightInfo { fn storage_iteration() -> Weight; } -/// Weights for frame_benchmarking_pallet_pov using the Substrate node and recommended hardware. +/// Weights for `frame_benchmarking_pallet_pov` using the Substrate node and recommended hardware. pub struct SubstrateWeight(PhantomData); impl WeightInfo for SubstrateWeight { - /// Storage: Pov Value (r:1 w:0) - /// Proof: Pov Value (max_values: Some(1), max_size: Some(4), added: 499, mode: MaxEncodedLen) + /// Storage: `Pov::Value` (r:1 w:0) + /// Proof: `Pov::Value` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`) fn storage_single_value_read() -> Weight { // Proof Size summary in bytes: // Measured: `136` // Estimated: `1489` - // Minimum execution time: 1_706_000 picoseconds. - Weight::from_parts(1_788_000, 1489) + // Minimum execution time: 3_000_000 picoseconds. + Weight::from_parts(3_000_000, 1489) .saturating_add(T::DbWeight::get().reads(1_u64)) } - /// Storage: Pov Value (r:1 w:0) - /// Proof: Pov Value (max_values: Some(1), max_size: Some(4), added: 499, mode: Ignored) + /// Storage: `Pov::Value` (r:1 w:0) + /// Proof: `Pov::Value` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `Ignored`) fn storage_single_value_ignored_read() -> Weight { // Proof Size summary in bytes: // Measured: `136` // Estimated: `0` - // Minimum execution time: 1_661_000 picoseconds. - Weight::from_parts(1_718_000, 0) + // Minimum execution time: 3_000_000 picoseconds. + Weight::from_parts(3_000_000, 0) .saturating_add(T::DbWeight::get().reads(1_u64)) } - /// Storage: Pov Value (r:1 w:0) - /// Proof: Pov Value (max_values: Some(1), max_size: Some(4), added: 499, mode: MaxEncodedLen) - /// Storage: Pov Value2 (r:1 w:0) - /// Proof: Pov Value2 (max_values: Some(1), max_size: Some(4), added: 499, mode: Ignored) + /// Storage: `Pov::Value` (r:1 w:0) + /// Proof: `Pov::Value` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`) + /// Storage: `Pov::Value2` (r:1 w:0) + /// Proof: `Pov::Value2` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `Ignored`) fn storage_single_value_ignored_some_read() -> Weight { // Proof Size summary in bytes: // Measured: `160` // Estimated: `1489` - // Minimum execution time: 2_226_000 picoseconds. - Weight::from_parts(2_365_000, 1489) + // Minimum execution time: 4_000_000 picoseconds. + Weight::from_parts(4_000_000, 1489) .saturating_add(T::DbWeight::get().reads(2_u64)) } - /// Storage: Pov Value (r:1 w:0) - /// Proof: Pov Value (max_values: Some(1), max_size: Some(4), added: 499, mode: MaxEncodedLen) + /// Storage: `Pov::Value` (r:1 w:0) + /// Proof: `Pov::Value` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`) fn storage_single_value_read_twice() -> Weight { // Proof Size summary in bytes: // Measured: `136` // Estimated: `1489` - // Minimum execution time: 1_785_000 picoseconds. - Weight::from_parts(1_980_000, 1489) + // Minimum execution time: 3_000_000 picoseconds. + Weight::from_parts(4_000_000, 1489) .saturating_add(T::DbWeight::get().reads(1_u64)) } - /// Storage: Pov Value (r:0 w:1) - /// Proof: Pov Value (max_values: Some(1), max_size: Some(4), added: 499, mode: MaxEncodedLen) + /// Storage: `Pov::Value` (r:0 w:1) + /// Proof: `Pov::Value` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`) fn storage_single_value_write() -> Weight { // Proof Size summary in bytes: // Measured: `0` // Estimated: `0` - // Minimum execution time: 254_000 picoseconds. - Weight::from_parts(326_000, 0) + // Minimum execution time: 0_000 picoseconds. + Weight::from_parts(1_000_000, 0) .saturating_add(T::DbWeight::get().writes(1_u64)) } - /// Storage: Pov Value (r:0 w:1) - /// Proof: Pov Value (max_values: Some(1), max_size: Some(4), added: 499, mode: MaxEncodedLen) + /// Storage: `Pov::Value` (r:0 w:1) + /// Proof: `Pov::Value` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`) fn storage_single_value_kill() -> Weight { // Proof Size summary in bytes: // Measured: `0` // Estimated: `0` - // Minimum execution time: 239_000 picoseconds. - Weight::from_parts(277_000, 0) + // Minimum execution time: 0_000 picoseconds. + Weight::from_parts(1_000_000, 0) .saturating_add(T::DbWeight::get().writes(1_u64)) } - /// Storage: Pov Map1M (r:1 w:0) - /// Proof: Pov Map1M (max_values: Some(1000000), max_size: Some(36), added: 2511, mode: Measured) + /// Storage: `Pov::Map1M` (r:1 w:0) + /// Proof: `Pov::Map1M` (`max_values`: Some(1000000), `max_size`: Some(36), added: 2511, mode: `Measured`) fn storage_1m_map_read_one_value_two_additional_layers() -> Weight { // Proof Size summary in bytes: // Measured: `1275` // Estimated: `4740` - // Minimum execution time: 4_760_000 picoseconds. - Weight::from_parts(5_051_000, 4740) + // Minimum execution time: 7_000_000 picoseconds. + Weight::from_parts(7_000_000, 4740) .saturating_add(T::DbWeight::get().reads(1_u64)) } - /// Storage: Pov Map1M (r:1 w:0) - /// Proof: Pov Map1M (max_values: Some(1000000), max_size: Some(36), added: 2511, mode: Measured) + /// Storage: `Pov::Map1M` (r:1 w:0) + /// Proof: `Pov::Map1M` (`max_values`: Some(1000000), `max_size`: Some(36), added: 2511, mode: `Measured`) fn storage_1m_map_read_one_value_three_additional_layers() -> Weight { // Proof Size summary in bytes: // Measured: `1544` // Estimated: `5009` - // Minimum execution time: 5_490_000 picoseconds. - Weight::from_parts(5_703_000, 5009) + // Minimum execution time: 8_000_000 picoseconds. + Weight::from_parts(8_000_000, 5009) .saturating_add(T::DbWeight::get().reads(1_u64)) } - /// Storage: Pov Map1M (r:1 w:0) - /// Proof: Pov Map1M (max_values: Some(1000000), max_size: Some(36), added: 2511, mode: Measured) + /// Storage: `Pov::Map1M` (r:1 w:0) + /// Proof: `Pov::Map1M` (`max_values`: Some(1000000), `max_size`: Some(36), added: 2511, mode: `Measured`) fn storage_1m_map_read_one_value_four_additional_layers() -> Weight { // Proof Size summary in bytes: // Measured: `2044` // Estimated: `5509` - // Minimum execution time: 6_397_000 picoseconds. - Weight::from_parts(7_084_000, 5509) + // Minimum execution time: 9_000_000 picoseconds. + Weight::from_parts(10_000_000, 5509) .saturating_add(T::DbWeight::get().reads(1_u64)) } - /// Storage: Pov Map1M (r:100 w:0) - /// Proof: Pov Map1M (max_values: Some(1000000), max_size: Some(36), added: 2511, mode: MaxEncodedLen) - /// Storage: Pov Map16M (r:100 w:0) - /// Proof: Pov Map16M (max_values: Some(16000000), max_size: Some(36), added: 3006, mode: MaxEncodedLen) + /// Storage: `Pov::Map1M` (r:100 w:0) + /// Proof: `Pov::Map1M` (`max_values`: Some(1000000), `max_size`: Some(36), added: 2511, mode: `MaxEncodedLen`) + /// Storage: `Pov::Map16M` (r:100 w:0) + /// Proof: `Pov::Map16M` (`max_values`: Some(16000000), `max_size`: Some(36), added: 3006, mode: `MaxEncodedLen`) /// The range of component `n` is `[0, 100]`. /// The range of component `m` is `[0, 100]`. fn storage_map_read_per_component(n: u32, m: u32, ) -> Weight { // Proof Size summary in bytes: // Measured: `515 + m * (188 ±0) + n * (188 ±0)` // Estimated: `990 + m * (2511 ±0) + n * (3006 ±0)` - // Minimum execution time: 181_481_000 picoseconds. - Weight::from_parts(129_275_141, 990) - // Standard Error: 13_049 - .saturating_add(Weight::from_parts(787_667, 0).saturating_mul(n.into())) - // Standard Error: 13_049 - .saturating_add(Weight::from_parts(830_378, 0).saturating_mul(m.into())) + // Minimum execution time: 342_000_000 picoseconds. + Weight::from_parts(179_688_624, 990) + // Standard Error: 26_526 + .saturating_add(Weight::from_parts(2_061_828, 0).saturating_mul(n.into())) + // Standard Error: 26_526 + .saturating_add(Weight::from_parts(1_825_923, 0).saturating_mul(m.into())) .saturating_add(T::DbWeight::get().reads((1_u64).saturating_mul(n.into()))) .saturating_add(T::DbWeight::get().reads((1_u64).saturating_mul(m.into()))) .saturating_add(Weight::from_parts(0, 2511).saturating_mul(m.into())) .saturating_add(Weight::from_parts(0, 3006).saturating_mul(n.into())) } - /// Storage: Pov Map1M (r:100 w:0) - /// Proof: Pov Map1M (max_values: Some(1000000), max_size: Some(36), added: 2511, mode: Ignored) - /// Storage: Pov Map16M (r:100 w:0) - /// Proof: Pov Map16M (max_values: Some(16000000), max_size: Some(36), added: 3006, mode: MaxEncodedLen) + /// Storage: `Pov::Map1M` (r:100 w:0) + /// Proof: `Pov::Map1M` (`max_values`: Some(1000000), `max_size`: Some(36), added: 2511, mode: `Ignored`) + /// Storage: `Pov::Map16M` (r:100 w:0) + /// Proof: `Pov::Map16M` (`max_values`: Some(16000000), `max_size`: Some(36), added: 3006, mode: `MaxEncodedLen`) /// The range of component `n` is `[0, 100]`. /// The range of component `m` is `[0, 100]`. fn storage_map_read_per_component_one_ignored(n: u32, m: u32, ) -> Weight { // Proof Size summary in bytes: // Measured: `515 + m * (188 ±0) + n * (188 ±0)` // Estimated: `1685 + m * (189 ±0) + n * (3006 ±0)` - // Minimum execution time: 181_925_000 picoseconds. - Weight::from_parts(134_416_814, 1685) - // Standard Error: 15_678 - .saturating_add(Weight::from_parts(827_168, 0).saturating_mul(n.into())) - // Standard Error: 15_678 - .saturating_add(Weight::from_parts(813_655, 0).saturating_mul(m.into())) + // Minimum execution time: 342_000_000 picoseconds. + Weight::from_parts(204_945_396, 1685) + // Standard Error: 25_217 + .saturating_add(Weight::from_parts(1_827_513, 0).saturating_mul(n.into())) + // Standard Error: 25_217 + .saturating_add(Weight::from_parts(1_661_271, 0).saturating_mul(m.into())) .saturating_add(T::DbWeight::get().reads((1_u64).saturating_mul(n.into()))) .saturating_add(T::DbWeight::get().reads((1_u64).saturating_mul(m.into()))) .saturating_add(Weight::from_parts(0, 189).saturating_mul(m.into())) .saturating_add(Weight::from_parts(0, 3006).saturating_mul(n.into())) } - /// Storage: Pov Map1M (r:1 w:0) - /// Proof: Pov Map1M (max_values: Some(1000000), max_size: Some(36), added: 2511, mode: MaxEncodedLen) + /// Storage: `Pov::Map1M` (r:1 w:0) + /// Proof: `Pov::Map1M` (`max_values`: Some(1000000), `max_size`: Some(36), added: 2511, mode: `MaxEncodedLen`) /// The range of component `n` is `[0, 100]`. fn storage_1m_map_one_entry_repeated_read(n: u32, ) -> Weight { // Proof Size summary in bytes: // Measured: `170` // Estimated: `3501` - // Minimum execution time: 20_000 picoseconds. - Weight::from_parts(2_006_399, 3501) - // Standard Error: 808 - .saturating_add(Weight::from_parts(263_609, 0).saturating_mul(n.into())) + // Minimum execution time: 0_000 picoseconds. + Weight::from_parts(3_387_064, 3501) + // Standard Error: 1_445 + .saturating_add(Weight::from_parts(1_143_678, 0).saturating_mul(n.into())) .saturating_add(T::DbWeight::get().reads(1_u64)) } - /// Storage: Pov Map1M (r:100 w:0) - /// Proof: Pov Map1M (max_values: Some(1000000), max_size: Some(36), added: 2511, mode: MaxEncodedLen) + /// Storage: `Pov::Map1M` (r:100 w:0) + /// Proof: `Pov::Map1M` (`max_values`: Some(1000000), `max_size`: Some(36), added: 2511, mode: `MaxEncodedLen`) /// The range of component `n` is `[0, 100]`. fn storage_1m_map_multiple_entry_repeated_read(n: u32, ) -> Weight { // Proof Size summary in bytes: // Measured: `147 + n * (40 ±0)` // Estimated: `990 + n * (2511 ±0)` - // Minimum execution time: 21_000 picoseconds. - Weight::from_parts(3_940_044, 990) - // Standard Error: 4_906 - .saturating_add(Weight::from_parts(3_454_882, 0).saturating_mul(n.into())) + // Minimum execution time: 0_000 picoseconds. + Weight::from_parts(1_323_684, 990) + // Standard Error: 10_546 + .saturating_add(Weight::from_parts(13_101_864, 0).saturating_mul(n.into())) .saturating_add(T::DbWeight::get().reads((1_u64).saturating_mul(n.into()))) .saturating_add(Weight::from_parts(0, 2511).saturating_mul(n.into())) } - /// Storage: Pov DoubleMap1M (r:1024 w:0) - /// Proof: Pov DoubleMap1M (max_values: Some(1000000), max_size: Some(68), added: 2543, mode: MaxEncodedLen) + /// Storage: `Pov::DoubleMap1M` (r:1024 w:0) + /// Proof: `Pov::DoubleMap1M` (`max_values`: Some(1000000), `max_size`: Some(68), added: 2543, mode: `MaxEncodedLen`) /// The range of component `n` is `[0, 1024]`. fn storage_1m_double_map_read_per_component(n: u32, ) -> Weight { // Proof Size summary in bytes: // Measured: `21938 + n * (57 ±0)` // Estimated: `990 + n * (2543 ±0)` - // Minimum execution time: 28_000 picoseconds. - Weight::from_parts(20_674_869, 990) - // Standard Error: 3_035 - .saturating_add(Weight::from_parts(1_995_730, 0).saturating_mul(n.into())) + // Minimum execution time: 0_000 picoseconds. + Weight::from_parts(39_703_963, 990) + // Standard Error: 10_589 + .saturating_add(Weight::from_parts(3_718_040, 0).saturating_mul(n.into())) .saturating_add(T::DbWeight::get().reads((1_u64).saturating_mul(n.into()))) .saturating_add(Weight::from_parts(0, 2543).saturating_mul(n.into())) } - /// Storage: Pov BoundedValue (r:1 w:0) - /// Proof: Pov BoundedValue (max_values: Some(1), max_size: Some(33), added: 528, mode: MaxEncodedLen) + /// Storage: `Pov::BoundedValue` (r:1 w:0) + /// Proof: `Pov::BoundedValue` (`max_values`: Some(1), `max_size`: Some(33), added: 528, mode: `MaxEncodedLen`) fn storage_value_bounded_read() -> Weight { // Proof Size summary in bytes: // Measured: `109` // Estimated: `1518` - // Minimum execution time: 1_091_000 picoseconds. - Weight::from_parts(1_181_000, 1518) + // Minimum execution time: 2_000_000 picoseconds. + Weight::from_parts(2_000_000, 1518) .saturating_add(T::DbWeight::get().reads(1_u64)) } - /// Storage: Pov UnboundedValue (r:1 w:0) - /// Proof Skipped: Pov UnboundedValue (max_values: Some(1), max_size: None, mode: Measured) + /// Storage: `Pov::UnboundedValue` (r:1 w:0) + /// Proof: `Pov::UnboundedValue` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) fn storage_value_unbounded_read() -> Weight { // Proof Size summary in bytes: // Measured: `109` // Estimated: `1594` - // Minimum execution time: 1_079_000 picoseconds. - Weight::from_parts(1_176_000, 1594) + // Minimum execution time: 2_000_000 picoseconds. + Weight::from_parts(2_000_000, 1594) .saturating_add(T::DbWeight::get().reads(1_u64)) } - /// Storage: Pov UnboundedValue (r:1 w:0) - /// Proof Skipped: Pov UnboundedValue (max_values: Some(1), max_size: None, mode: Ignored) + /// Storage: `Pov::UnboundedValue` (r:1 w:0) + /// Proof: `Pov::UnboundedValue` (`max_values`: Some(1), `max_size`: None, mode: `Ignored`) fn storage_value_unbounded_ignored_read() -> Weight { // Proof Size summary in bytes: // Measured: `109` // Estimated: `0` - // Minimum execution time: 1_101_000 picoseconds. - Weight::from_parts(1_160_000, 0) + // Minimum execution time: 2_000_000 picoseconds. + Weight::from_parts(2_000_000, 0) .saturating_add(T::DbWeight::get().reads(1_u64)) } - /// Storage: Pov UnboundedValue (r:1 w:0) - /// Proof Skipped: Pov UnboundedValue (max_values: Some(1), max_size: None, mode: Measured) - /// Storage: Pov BoundedValue (r:1 w:0) - /// Proof: Pov BoundedValue (max_values: Some(1), max_size: Some(33), added: 528, mode: MaxEncodedLen) + /// Storage: `Pov::UnboundedValue` (r:1 w:0) + /// Proof: `Pov::UnboundedValue` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) + /// Storage: `Pov::BoundedValue` (r:1 w:0) + /// Proof: `Pov::BoundedValue` (`max_values`: Some(1), `max_size`: Some(33), added: 528, mode: `MaxEncodedLen`) fn storage_value_bounded_and_unbounded_read() -> Weight { // Proof Size summary in bytes: // Measured: `147` // Estimated: `1632` - // Minimum execution time: 2_143_000 picoseconds. - Weight::from_parts(2_280_000, 1632) + // Minimum execution time: 3_000_000 picoseconds. + Weight::from_parts(5_000_000, 1632) .saturating_add(T::DbWeight::get().reads(2_u64)) } - /// Storage: Pov LargeValue (r:1 w:0) - /// Proof: Pov LargeValue (max_values: Some(1), max_size: Some(4194308), added: 4194803, mode: Measured) + /// Storage: `Pov::LargeValue` (r:1 w:0) + /// Proof: `Pov::LargeValue` (`max_values`: Some(1), `max_size`: Some(4194308), added: 4194803, mode: `Measured`) /// The range of component `l` is `[0, 4194304]`. fn measured_storage_value_read_linear_size(l: u32, ) -> Weight { // Proof Size summary in bytes: // Measured: `142 + l * (1 ±0)` // Estimated: `1626 + l * (1 ±0)` - // Minimum execution time: 1_665_000 picoseconds. - Weight::from_parts(1_725_000, 1626) - // Standard Error: 3 - .saturating_add(Weight::from_parts(376, 0).saturating_mul(l.into())) + // Minimum execution time: 3_000_000 picoseconds. + Weight::from_parts(3_000_000, 1626) + // Standard Error: 1 + .saturating_add(Weight::from_parts(393, 0).saturating_mul(l.into())) .saturating_add(T::DbWeight::get().reads(1_u64)) .saturating_add(Weight::from_parts(0, 1).saturating_mul(l.into())) } - /// Storage: Pov LargeValue (r:1 w:0) - /// Proof: Pov LargeValue (max_values: Some(1), max_size: Some(4194308), added: 4194803, mode: MaxEncodedLen) + /// Storage: `Pov::LargeValue` (r:1 w:0) + /// Proof: `Pov::LargeValue` (`max_values`: Some(1), `max_size`: Some(4194308), added: 4194803, mode: `MaxEncodedLen`) /// The range of component `l` is `[0, 4194304]`. fn mel_storage_value_read_linear_size(l: u32, ) -> Weight { // Proof Size summary in bytes: // Measured: `142 + l * (1 ±0)` // Estimated: `4195793` - // Minimum execution time: 1_640_000 picoseconds. - Weight::from_parts(1_724_000, 4195793) - // Standard Error: 4 - .saturating_add(Weight::from_parts(395, 0).saturating_mul(l.into())) + // Minimum execution time: 3_000_000 picoseconds. + Weight::from_parts(3_000_000, 4195793) + // Standard Error: 1 + .saturating_add(Weight::from_parts(394, 0).saturating_mul(l.into())) .saturating_add(T::DbWeight::get().reads(1_u64)) } - /// Storage: Pov LargeValue (r:1 w:0) - /// Proof: Pov LargeValue (max_values: Some(1), max_size: Some(4194308), added: 4194803, mode: Measured) - /// Storage: Pov LargeValue2 (r:1 w:0) - /// Proof: Pov LargeValue2 (max_values: Some(1), max_size: Some(4194308), added: 4194803, mode: Measured) + /// Storage: `Pov::LargeValue` (r:1 w:0) + /// Proof: `Pov::LargeValue` (`max_values`: Some(1), `max_size`: Some(4194308), added: 4194803, mode: `Measured`) + /// Storage: `Pov::LargeValue2` (r:1 w:0) + /// Proof: `Pov::LargeValue2` (`max_values`: Some(1), `max_size`: Some(4194308), added: 4194803, mode: `Measured`) /// The range of component `l` is `[0, 4194304]`. fn measured_storage_double_value_read_linear_size(l: u32, ) -> Weight { // Proof Size summary in bytes: // Measured: `171 + l * (2 ±0)` // Estimated: `1655 + l * (2 ±0)` - // Minimum execution time: 2_263_000 picoseconds. - Weight::from_parts(2_358_000, 1655) - // Standard Error: 8 - .saturating_add(Weight::from_parts(737, 0).saturating_mul(l.into())) + // Minimum execution time: 4_000_000 picoseconds. + Weight::from_parts(4_000_000, 1655) + // Standard Error: 2 + .saturating_add(Weight::from_parts(655, 0).saturating_mul(l.into())) .saturating_add(T::DbWeight::get().reads(2_u64)) .saturating_add(Weight::from_parts(0, 2).saturating_mul(l.into())) } - /// Storage: Pov LargeValue (r:1 w:0) - /// Proof: Pov LargeValue (max_values: Some(1), max_size: Some(4194308), added: 4194803, mode: MaxEncodedLen) - /// Storage: Pov LargeValue2 (r:1 w:0) - /// Proof: Pov LargeValue2 (max_values: Some(1), max_size: Some(4194308), added: 4194803, mode: MaxEncodedLen) + /// Storage: `Pov::LargeValue` (r:1 w:0) + /// Proof: `Pov::LargeValue` (`max_values`: Some(1), `max_size`: Some(4194308), added: 4194803, mode: `MaxEncodedLen`) + /// Storage: `Pov::LargeValue2` (r:1 w:0) + /// Proof: `Pov::LargeValue2` (`max_values`: Some(1), `max_size`: Some(4194308), added: 4194803, mode: `MaxEncodedLen`) /// The range of component `l` is `[0, 4194304]`. fn mel_storage_double_value_read_linear_size(l: u32, ) -> Weight { // Proof Size summary in bytes: // Measured: `171 + l * (2 ±0)` // Estimated: `4195793` - // Minimum execution time: 2_161_000 picoseconds. - Weight::from_parts(2_233_000, 4195793) - // Standard Error: 5 - .saturating_add(Weight::from_parts(639, 0).saturating_mul(l.into())) + // Minimum execution time: 4_000_000 picoseconds. + Weight::from_parts(4_000_000, 4195793) + // Standard Error: 2 + .saturating_add(Weight::from_parts(660, 0).saturating_mul(l.into())) .saturating_add(T::DbWeight::get().reads(2_u64)) } - /// Storage: Pov LargeValue (r:1 w:0) - /// Proof: Pov LargeValue (max_values: Some(1), max_size: Some(4194308), added: 4194803, mode: MaxEncodedLen) - /// Storage: Pov LargeValue2 (r:1 w:0) - /// Proof: Pov LargeValue2 (max_values: Some(1), max_size: Some(4194308), added: 4194803, mode: Measured) + /// Storage: `Pov::LargeValue` (r:1 w:0) + /// Proof: `Pov::LargeValue` (`max_values`: Some(1), `max_size`: Some(4194308), added: 4194803, mode: `MaxEncodedLen`) + /// Storage: `Pov::LargeValue2` (r:1 w:0) + /// Proof: `Pov::LargeValue2` (`max_values`: Some(1), `max_size`: Some(4194308), added: 4194803, mode: `Measured`) /// The range of component `l` is `[0, 4194304]`. fn mel_mixed_storage_double_value_read_linear_size(l: u32, ) -> Weight { // Proof Size summary in bytes: // Measured: `171 + l * (2 ±0)` // Estimated: `4195793 + l * (2 ±0)` - // Minimum execution time: 2_149_000 picoseconds. - Weight::from_parts(2_256_000, 4195793) - // Standard Error: 6 - .saturating_add(Weight::from_parts(677, 0).saturating_mul(l.into())) + // Minimum execution time: 4_000_000 picoseconds. + Weight::from_parts(4_000_000, 4195793) + // Standard Error: 4 + .saturating_add(Weight::from_parts(691, 0).saturating_mul(l.into())) .saturating_add(T::DbWeight::get().reads(2_u64)) .saturating_add(Weight::from_parts(0, 2).saturating_mul(l.into())) } - /// Storage: Pov LargeValue (r:1 w:0) - /// Proof: Pov LargeValue (max_values: Some(1), max_size: Some(4194308), added: 4194803, mode: Measured) - /// Storage: Pov LargeValue2 (r:1 w:0) - /// Proof: Pov LargeValue2 (max_values: Some(1), max_size: Some(4194308), added: 4194803, mode: MaxEncodedLen) + /// Storage: `Pov::LargeValue` (r:1 w:0) + /// Proof: `Pov::LargeValue` (`max_values`: Some(1), `max_size`: Some(4194308), added: 4194803, mode: `Measured`) + /// Storage: `Pov::LargeValue2` (r:1 w:0) + /// Proof: `Pov::LargeValue2` (`max_values`: Some(1), `max_size`: Some(4194308), added: 4194803, mode: `MaxEncodedLen`) /// The range of component `l` is `[0, 4194304]`. fn measured_mixed_storage_double_value_read_linear_size(l: u32, ) -> Weight { // Proof Size summary in bytes: // Measured: `171 + l * (2 ±0)` // Estimated: `4195793 + l * (2 ±0)` - // Minimum execution time: 2_254_000 picoseconds. - Weight::from_parts(2_319_000, 4195793) - // Standard Error: 5 - .saturating_add(Weight::from_parts(664, 0).saturating_mul(l.into())) + // Minimum execution time: 4_000_000 picoseconds. + Weight::from_parts(4_000_000, 4195793) + // Standard Error: 4 + .saturating_add(Weight::from_parts(691, 0).saturating_mul(l.into())) .saturating_add(T::DbWeight::get().reads(2_u64)) .saturating_add(Weight::from_parts(0, 2).saturating_mul(l.into())) } - /// Storage: Pov UnboundedMap (r:1 w:0) - /// Proof Skipped: Pov UnboundedMap (max_values: None, max_size: None, mode: Measured) - /// Storage: Pov UnboundedMap2 (r:1 w:0) - /// Proof Skipped: Pov UnboundedMap2 (max_values: None, max_size: None, mode: Measured) + /// Storage: `Pov::UnboundedMap` (r:1 w:0) + /// Proof: `Pov::UnboundedMap` (`max_values`: None, `max_size`: None, mode: `Measured`) + /// Storage: `Pov::UnboundedMap2` (r:1 w:0) + /// Proof: `Pov::UnboundedMap2` (`max_values`: None, `max_size`: None, mode: `Measured`) /// The range of component `i` is `[0, 1000]`. fn storage_map_unbounded_both_measured_read(i: u32, ) -> Weight { // Proof Size summary in bytes: // Measured: `229 + i * (8 ±0)` // Estimated: `3693 + i * (8 ±0)` - // Minimum execution time: 3_071_000 picoseconds. - Weight::from_parts(3_487_712, 3693) - // Standard Error: 26 - .saturating_add(Weight::from_parts(748, 0).saturating_mul(i.into())) + // Minimum execution time: 6_000_000 picoseconds. + Weight::from_parts(7_274_226, 3693) + // Standard Error: 280 + .saturating_add(Weight::from_parts(3_282, 0).saturating_mul(i.into())) .saturating_add(T::DbWeight::get().reads(2_u64)) .saturating_add(Weight::from_parts(0, 8).saturating_mul(i.into())) } - /// Storage: Pov Map1M (r:1 w:0) - /// Proof: Pov Map1M (max_values: Some(1000000), max_size: Some(36), added: 2511, mode: MaxEncodedLen) - /// Storage: Pov UnboundedMap (r:1 w:0) - /// Proof Skipped: Pov UnboundedMap (max_values: None, max_size: None, mode: Measured) + /// Storage: `Pov::Map1M` (r:1 w:0) + /// Proof: `Pov::Map1M` (`max_values`: Some(1000000), `max_size`: Some(36), added: 2511, mode: `MaxEncodedLen`) + /// Storage: `Pov::UnboundedMap` (r:1 w:0) + /// Proof: `Pov::UnboundedMap` (`max_values`: None, `max_size`: None, mode: `Measured`) /// The range of component `i` is `[0, 1000]`. fn storage_map_partial_unbounded_read(i: u32, ) -> Weight { // Proof Size summary in bytes: // Measured: `228 + i * (4 ±0)` // Estimated: `3692 + i * (4 ±0)` - // Minimum execution time: 3_150_000 picoseconds. - Weight::from_parts(3_582_963, 3692) - // Standard Error: 18 - .saturating_add(Weight::from_parts(380, 0).saturating_mul(i.into())) + // Minimum execution time: 7_000_000 picoseconds. + Weight::from_parts(7_507_333, 3692) + // Standard Error: 64 + .saturating_add(Weight::from_parts(982, 0).saturating_mul(i.into())) .saturating_add(T::DbWeight::get().reads(2_u64)) .saturating_add(Weight::from_parts(0, 4).saturating_mul(i.into())) } - /// Storage: Pov Map1M (r:1 w:0) - /// Proof: Pov Map1M (max_values: Some(1000000), max_size: Some(36), added: 2511, mode: MaxEncodedLen) - /// Storage: Pov UnboundedMap (r:1 w:0) - /// Proof Skipped: Pov UnboundedMap (max_values: None, max_size: None, mode: Ignored) + /// Storage: `Pov::Map1M` (r:1 w:0) + /// Proof: `Pov::Map1M` (`max_values`: Some(1000000), `max_size`: Some(36), added: 2511, mode: `MaxEncodedLen`) + /// Storage: `Pov::UnboundedMap` (r:1 w:0) + /// Proof: `Pov::UnboundedMap` (`max_values`: None, `max_size`: None, mode: `Ignored`) /// The range of component `i` is `[0, 1000]`. fn storage_map_partial_unbounded_ignored_read(i: u32, ) -> Weight { // Proof Size summary in bytes: // Measured: `228 + i * (4 ±0)` // Estimated: `3501 + i * (4 ±0)` - // Minimum execution time: 3_092_000 picoseconds. - Weight::from_parts(3_595_328, 3501) - // Standard Error: 20 - .saturating_add(Weight::from_parts(243, 0).saturating_mul(i.into())) + // Minimum execution time: 6_000_000 picoseconds. + Weight::from_parts(7_285_011, 3501) + // Standard Error: 80 + .saturating_add(Weight::from_parts(1_395, 0).saturating_mul(i.into())) .saturating_add(T::DbWeight::get().reads(2_u64)) .saturating_add(Weight::from_parts(0, 4).saturating_mul(i.into())) } @@ -442,382 +440,382 @@ impl WeightInfo for SubstrateWeight { // Proof Size summary in bytes: // Measured: `0` // Estimated: `0` - // Minimum execution time: 1_705_000 picoseconds. - Weight::from_parts(1_818_000, 0) + // Minimum execution time: 4_000_000 picoseconds. + Weight::from_parts(5_000_000, 0) } fn noop() -> Weight { // Proof Size summary in bytes: // Measured: `0` // Estimated: `0` - // Minimum execution time: 533_000 picoseconds. - Weight::from_parts(587_000, 0) + // Minimum execution time: 2_000_000 picoseconds. + Weight::from_parts(2_000_000, 0) } - /// Storage: Pov UnboundedMapTwox (r:65001 w:0) - /// Proof Skipped: Pov UnboundedMapTwox (max_values: None, max_size: None, mode: Measured) + /// Storage: `Pov::UnboundedMapTwox` (r:65001 w:0) + /// Proof: `Pov::UnboundedMapTwox` (`max_values`: None, `max_size`: None, mode: `Measured`) fn storage_iteration() -> Weight { // Proof Size summary in bytes: // Measured: `17985289` // Estimated: `178863754` - // Minimum execution time: 118_753_057_000 picoseconds. - Weight::from_parts(121_396_503_000, 178863754) + // Minimum execution time: 218_275_000_000 picoseconds. + Weight::from_parts(222_603_000_000, 178863754) .saturating_add(T::DbWeight::get().reads(65001_u64)) } } -// For backwards compatibility and tests +// For backwards compatibility and tests. impl WeightInfo for () { - /// Storage: Pov Value (r:1 w:0) - /// Proof: Pov Value (max_values: Some(1), max_size: Some(4), added: 499, mode: MaxEncodedLen) + /// Storage: `Pov::Value` (r:1 w:0) + /// Proof: `Pov::Value` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`) fn storage_single_value_read() -> Weight { // Proof Size summary in bytes: // Measured: `136` // Estimated: `1489` - // Minimum execution time: 1_706_000 picoseconds. - Weight::from_parts(1_788_000, 1489) + // Minimum execution time: 3_000_000 picoseconds. + Weight::from_parts(3_000_000, 1489) .saturating_add(RocksDbWeight::get().reads(1_u64)) } - /// Storage: Pov Value (r:1 w:0) - /// Proof: Pov Value (max_values: Some(1), max_size: Some(4), added: 499, mode: Ignored) + /// Storage: `Pov::Value` (r:1 w:0) + /// Proof: `Pov::Value` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `Ignored`) fn storage_single_value_ignored_read() -> Weight { // Proof Size summary in bytes: // Measured: `136` // Estimated: `0` - // Minimum execution time: 1_661_000 picoseconds. - Weight::from_parts(1_718_000, 0) + // Minimum execution time: 3_000_000 picoseconds. + Weight::from_parts(3_000_000, 0) .saturating_add(RocksDbWeight::get().reads(1_u64)) } - /// Storage: Pov Value (r:1 w:0) - /// Proof: Pov Value (max_values: Some(1), max_size: Some(4), added: 499, mode: MaxEncodedLen) - /// Storage: Pov Value2 (r:1 w:0) - /// Proof: Pov Value2 (max_values: Some(1), max_size: Some(4), added: 499, mode: Ignored) + /// Storage: `Pov::Value` (r:1 w:0) + /// Proof: `Pov::Value` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`) + /// Storage: `Pov::Value2` (r:1 w:0) + /// Proof: `Pov::Value2` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `Ignored`) fn storage_single_value_ignored_some_read() -> Weight { // Proof Size summary in bytes: // Measured: `160` // Estimated: `1489` - // Minimum execution time: 2_226_000 picoseconds. - Weight::from_parts(2_365_000, 1489) + // Minimum execution time: 4_000_000 picoseconds. + Weight::from_parts(4_000_000, 1489) .saturating_add(RocksDbWeight::get().reads(2_u64)) } - /// Storage: Pov Value (r:1 w:0) - /// Proof: Pov Value (max_values: Some(1), max_size: Some(4), added: 499, mode: MaxEncodedLen) + /// Storage: `Pov::Value` (r:1 w:0) + /// Proof: `Pov::Value` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`) fn storage_single_value_read_twice() -> Weight { // Proof Size summary in bytes: // Measured: `136` // Estimated: `1489` - // Minimum execution time: 1_785_000 picoseconds. - Weight::from_parts(1_980_000, 1489) + // Minimum execution time: 3_000_000 picoseconds. + Weight::from_parts(4_000_000, 1489) .saturating_add(RocksDbWeight::get().reads(1_u64)) } - /// Storage: Pov Value (r:0 w:1) - /// Proof: Pov Value (max_values: Some(1), max_size: Some(4), added: 499, mode: MaxEncodedLen) + /// Storage: `Pov::Value` (r:0 w:1) + /// Proof: `Pov::Value` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`) fn storage_single_value_write() -> Weight { // Proof Size summary in bytes: // Measured: `0` // Estimated: `0` - // Minimum execution time: 254_000 picoseconds. - Weight::from_parts(326_000, 0) + // Minimum execution time: 0_000 picoseconds. + Weight::from_parts(1_000_000, 0) .saturating_add(RocksDbWeight::get().writes(1_u64)) } - /// Storage: Pov Value (r:0 w:1) - /// Proof: Pov Value (max_values: Some(1), max_size: Some(4), added: 499, mode: MaxEncodedLen) + /// Storage: `Pov::Value` (r:0 w:1) + /// Proof: `Pov::Value` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`) fn storage_single_value_kill() -> Weight { // Proof Size summary in bytes: // Measured: `0` // Estimated: `0` - // Minimum execution time: 239_000 picoseconds. - Weight::from_parts(277_000, 0) + // Minimum execution time: 0_000 picoseconds. + Weight::from_parts(1_000_000, 0) .saturating_add(RocksDbWeight::get().writes(1_u64)) } - /// Storage: Pov Map1M (r:1 w:0) - /// Proof: Pov Map1M (max_values: Some(1000000), max_size: Some(36), added: 2511, mode: Measured) + /// Storage: `Pov::Map1M` (r:1 w:0) + /// Proof: `Pov::Map1M` (`max_values`: Some(1000000), `max_size`: Some(36), added: 2511, mode: `Measured`) fn storage_1m_map_read_one_value_two_additional_layers() -> Weight { // Proof Size summary in bytes: // Measured: `1275` // Estimated: `4740` - // Minimum execution time: 4_760_000 picoseconds. - Weight::from_parts(5_051_000, 4740) + // Minimum execution time: 7_000_000 picoseconds. + Weight::from_parts(7_000_000, 4740) .saturating_add(RocksDbWeight::get().reads(1_u64)) } - /// Storage: Pov Map1M (r:1 w:0) - /// Proof: Pov Map1M (max_values: Some(1000000), max_size: Some(36), added: 2511, mode: Measured) + /// Storage: `Pov::Map1M` (r:1 w:0) + /// Proof: `Pov::Map1M` (`max_values`: Some(1000000), `max_size`: Some(36), added: 2511, mode: `Measured`) fn storage_1m_map_read_one_value_three_additional_layers() -> Weight { // Proof Size summary in bytes: // Measured: `1544` // Estimated: `5009` - // Minimum execution time: 5_490_000 picoseconds. - Weight::from_parts(5_703_000, 5009) + // Minimum execution time: 8_000_000 picoseconds. + Weight::from_parts(8_000_000, 5009) .saturating_add(RocksDbWeight::get().reads(1_u64)) } - /// Storage: Pov Map1M (r:1 w:0) - /// Proof: Pov Map1M (max_values: Some(1000000), max_size: Some(36), added: 2511, mode: Measured) + /// Storage: `Pov::Map1M` (r:1 w:0) + /// Proof: `Pov::Map1M` (`max_values`: Some(1000000), `max_size`: Some(36), added: 2511, mode: `Measured`) fn storage_1m_map_read_one_value_four_additional_layers() -> Weight { // Proof Size summary in bytes: // Measured: `2044` // Estimated: `5509` - // Minimum execution time: 6_397_000 picoseconds. - Weight::from_parts(7_084_000, 5509) + // Minimum execution time: 9_000_000 picoseconds. + Weight::from_parts(10_000_000, 5509) .saturating_add(RocksDbWeight::get().reads(1_u64)) } - /// Storage: Pov Map1M (r:100 w:0) - /// Proof: Pov Map1M (max_values: Some(1000000), max_size: Some(36), added: 2511, mode: MaxEncodedLen) - /// Storage: Pov Map16M (r:100 w:0) - /// Proof: Pov Map16M (max_values: Some(16000000), max_size: Some(36), added: 3006, mode: MaxEncodedLen) + /// Storage: `Pov::Map1M` (r:100 w:0) + /// Proof: `Pov::Map1M` (`max_values`: Some(1000000), `max_size`: Some(36), added: 2511, mode: `MaxEncodedLen`) + /// Storage: `Pov::Map16M` (r:100 w:0) + /// Proof: `Pov::Map16M` (`max_values`: Some(16000000), `max_size`: Some(36), added: 3006, mode: `MaxEncodedLen`) /// The range of component `n` is `[0, 100]`. /// The range of component `m` is `[0, 100]`. fn storage_map_read_per_component(n: u32, m: u32, ) -> Weight { // Proof Size summary in bytes: // Measured: `515 + m * (188 ±0) + n * (188 ±0)` // Estimated: `990 + m * (2511 ±0) + n * (3006 ±0)` - // Minimum execution time: 181_481_000 picoseconds. - Weight::from_parts(129_275_141, 990) - // Standard Error: 13_049 - .saturating_add(Weight::from_parts(787_667, 0).saturating_mul(n.into())) - // Standard Error: 13_049 - .saturating_add(Weight::from_parts(830_378, 0).saturating_mul(m.into())) + // Minimum execution time: 342_000_000 picoseconds. + Weight::from_parts(179_688_624, 990) + // Standard Error: 26_526 + .saturating_add(Weight::from_parts(2_061_828, 0).saturating_mul(n.into())) + // Standard Error: 26_526 + .saturating_add(Weight::from_parts(1_825_923, 0).saturating_mul(m.into())) .saturating_add(RocksDbWeight::get().reads((1_u64).saturating_mul(n.into()))) .saturating_add(RocksDbWeight::get().reads((1_u64).saturating_mul(m.into()))) .saturating_add(Weight::from_parts(0, 2511).saturating_mul(m.into())) .saturating_add(Weight::from_parts(0, 3006).saturating_mul(n.into())) } - /// Storage: Pov Map1M (r:100 w:0) - /// Proof: Pov Map1M (max_values: Some(1000000), max_size: Some(36), added: 2511, mode: Ignored) - /// Storage: Pov Map16M (r:100 w:0) - /// Proof: Pov Map16M (max_values: Some(16000000), max_size: Some(36), added: 3006, mode: MaxEncodedLen) + /// Storage: `Pov::Map1M` (r:100 w:0) + /// Proof: `Pov::Map1M` (`max_values`: Some(1000000), `max_size`: Some(36), added: 2511, mode: `Ignored`) + /// Storage: `Pov::Map16M` (r:100 w:0) + /// Proof: `Pov::Map16M` (`max_values`: Some(16000000), `max_size`: Some(36), added: 3006, mode: `MaxEncodedLen`) /// The range of component `n` is `[0, 100]`. /// The range of component `m` is `[0, 100]`. fn storage_map_read_per_component_one_ignored(n: u32, m: u32, ) -> Weight { // Proof Size summary in bytes: // Measured: `515 + m * (188 ±0) + n * (188 ±0)` // Estimated: `1685 + m * (189 ±0) + n * (3006 ±0)` - // Minimum execution time: 181_925_000 picoseconds. - Weight::from_parts(134_416_814, 1685) - // Standard Error: 15_678 - .saturating_add(Weight::from_parts(827_168, 0).saturating_mul(n.into())) - // Standard Error: 15_678 - .saturating_add(Weight::from_parts(813_655, 0).saturating_mul(m.into())) + // Minimum execution time: 342_000_000 picoseconds. + Weight::from_parts(204_945_396, 1685) + // Standard Error: 25_217 + .saturating_add(Weight::from_parts(1_827_513, 0).saturating_mul(n.into())) + // Standard Error: 25_217 + .saturating_add(Weight::from_parts(1_661_271, 0).saturating_mul(m.into())) .saturating_add(RocksDbWeight::get().reads((1_u64).saturating_mul(n.into()))) .saturating_add(RocksDbWeight::get().reads((1_u64).saturating_mul(m.into()))) .saturating_add(Weight::from_parts(0, 189).saturating_mul(m.into())) .saturating_add(Weight::from_parts(0, 3006).saturating_mul(n.into())) } - /// Storage: Pov Map1M (r:1 w:0) - /// Proof: Pov Map1M (max_values: Some(1000000), max_size: Some(36), added: 2511, mode: MaxEncodedLen) + /// Storage: `Pov::Map1M` (r:1 w:0) + /// Proof: `Pov::Map1M` (`max_values`: Some(1000000), `max_size`: Some(36), added: 2511, mode: `MaxEncodedLen`) /// The range of component `n` is `[0, 100]`. fn storage_1m_map_one_entry_repeated_read(n: u32, ) -> Weight { // Proof Size summary in bytes: // Measured: `170` // Estimated: `3501` - // Minimum execution time: 20_000 picoseconds. - Weight::from_parts(2_006_399, 3501) - // Standard Error: 808 - .saturating_add(Weight::from_parts(263_609, 0).saturating_mul(n.into())) + // Minimum execution time: 0_000 picoseconds. + Weight::from_parts(3_387_064, 3501) + // Standard Error: 1_445 + .saturating_add(Weight::from_parts(1_143_678, 0).saturating_mul(n.into())) .saturating_add(RocksDbWeight::get().reads(1_u64)) } - /// Storage: Pov Map1M (r:100 w:0) - /// Proof: Pov Map1M (max_values: Some(1000000), max_size: Some(36), added: 2511, mode: MaxEncodedLen) + /// Storage: `Pov::Map1M` (r:100 w:0) + /// Proof: `Pov::Map1M` (`max_values`: Some(1000000), `max_size`: Some(36), added: 2511, mode: `MaxEncodedLen`) /// The range of component `n` is `[0, 100]`. fn storage_1m_map_multiple_entry_repeated_read(n: u32, ) -> Weight { // Proof Size summary in bytes: // Measured: `147 + n * (40 ±0)` // Estimated: `990 + n * (2511 ±0)` - // Minimum execution time: 21_000 picoseconds. - Weight::from_parts(3_940_044, 990) - // Standard Error: 4_906 - .saturating_add(Weight::from_parts(3_454_882, 0).saturating_mul(n.into())) + // Minimum execution time: 0_000 picoseconds. + Weight::from_parts(1_323_684, 990) + // Standard Error: 10_546 + .saturating_add(Weight::from_parts(13_101_864, 0).saturating_mul(n.into())) .saturating_add(RocksDbWeight::get().reads((1_u64).saturating_mul(n.into()))) .saturating_add(Weight::from_parts(0, 2511).saturating_mul(n.into())) } - /// Storage: Pov DoubleMap1M (r:1024 w:0) - /// Proof: Pov DoubleMap1M (max_values: Some(1000000), max_size: Some(68), added: 2543, mode: MaxEncodedLen) + /// Storage: `Pov::DoubleMap1M` (r:1024 w:0) + /// Proof: `Pov::DoubleMap1M` (`max_values`: Some(1000000), `max_size`: Some(68), added: 2543, mode: `MaxEncodedLen`) /// The range of component `n` is `[0, 1024]`. fn storage_1m_double_map_read_per_component(n: u32, ) -> Weight { // Proof Size summary in bytes: // Measured: `21938 + n * (57 ±0)` // Estimated: `990 + n * (2543 ±0)` - // Minimum execution time: 28_000 picoseconds. - Weight::from_parts(20_674_869, 990) - // Standard Error: 3_035 - .saturating_add(Weight::from_parts(1_995_730, 0).saturating_mul(n.into())) + // Minimum execution time: 0_000 picoseconds. + Weight::from_parts(39_703_963, 990) + // Standard Error: 10_589 + .saturating_add(Weight::from_parts(3_718_040, 0).saturating_mul(n.into())) .saturating_add(RocksDbWeight::get().reads((1_u64).saturating_mul(n.into()))) .saturating_add(Weight::from_parts(0, 2543).saturating_mul(n.into())) } - /// Storage: Pov BoundedValue (r:1 w:0) - /// Proof: Pov BoundedValue (max_values: Some(1), max_size: Some(33), added: 528, mode: MaxEncodedLen) + /// Storage: `Pov::BoundedValue` (r:1 w:0) + /// Proof: `Pov::BoundedValue` (`max_values`: Some(1), `max_size`: Some(33), added: 528, mode: `MaxEncodedLen`) fn storage_value_bounded_read() -> Weight { // Proof Size summary in bytes: // Measured: `109` // Estimated: `1518` - // Minimum execution time: 1_091_000 picoseconds. - Weight::from_parts(1_181_000, 1518) + // Minimum execution time: 2_000_000 picoseconds. + Weight::from_parts(2_000_000, 1518) .saturating_add(RocksDbWeight::get().reads(1_u64)) } - /// Storage: Pov UnboundedValue (r:1 w:0) - /// Proof Skipped: Pov UnboundedValue (max_values: Some(1), max_size: None, mode: Measured) + /// Storage: `Pov::UnboundedValue` (r:1 w:0) + /// Proof: `Pov::UnboundedValue` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) fn storage_value_unbounded_read() -> Weight { // Proof Size summary in bytes: // Measured: `109` // Estimated: `1594` - // Minimum execution time: 1_079_000 picoseconds. - Weight::from_parts(1_176_000, 1594) + // Minimum execution time: 2_000_000 picoseconds. + Weight::from_parts(2_000_000, 1594) .saturating_add(RocksDbWeight::get().reads(1_u64)) } - /// Storage: Pov UnboundedValue (r:1 w:0) - /// Proof Skipped: Pov UnboundedValue (max_values: Some(1), max_size: None, mode: Ignored) + /// Storage: `Pov::UnboundedValue` (r:1 w:0) + /// Proof: `Pov::UnboundedValue` (`max_values`: Some(1), `max_size`: None, mode: `Ignored`) fn storage_value_unbounded_ignored_read() -> Weight { // Proof Size summary in bytes: // Measured: `109` // Estimated: `0` - // Minimum execution time: 1_101_000 picoseconds. - Weight::from_parts(1_160_000, 0) + // Minimum execution time: 2_000_000 picoseconds. + Weight::from_parts(2_000_000, 0) .saturating_add(RocksDbWeight::get().reads(1_u64)) } - /// Storage: Pov UnboundedValue (r:1 w:0) - /// Proof Skipped: Pov UnboundedValue (max_values: Some(1), max_size: None, mode: Measured) - /// Storage: Pov BoundedValue (r:1 w:0) - /// Proof: Pov BoundedValue (max_values: Some(1), max_size: Some(33), added: 528, mode: MaxEncodedLen) + /// Storage: `Pov::UnboundedValue` (r:1 w:0) + /// Proof: `Pov::UnboundedValue` (`max_values`: Some(1), `max_size`: None, mode: `Measured`) + /// Storage: `Pov::BoundedValue` (r:1 w:0) + /// Proof: `Pov::BoundedValue` (`max_values`: Some(1), `max_size`: Some(33), added: 528, mode: `MaxEncodedLen`) fn storage_value_bounded_and_unbounded_read() -> Weight { // Proof Size summary in bytes: // Measured: `147` // Estimated: `1632` - // Minimum execution time: 2_143_000 picoseconds. - Weight::from_parts(2_280_000, 1632) + // Minimum execution time: 3_000_000 picoseconds. + Weight::from_parts(5_000_000, 1632) .saturating_add(RocksDbWeight::get().reads(2_u64)) } - /// Storage: Pov LargeValue (r:1 w:0) - /// Proof: Pov LargeValue (max_values: Some(1), max_size: Some(4194308), added: 4194803, mode: Measured) + /// Storage: `Pov::LargeValue` (r:1 w:0) + /// Proof: `Pov::LargeValue` (`max_values`: Some(1), `max_size`: Some(4194308), added: 4194803, mode: `Measured`) /// The range of component `l` is `[0, 4194304]`. fn measured_storage_value_read_linear_size(l: u32, ) -> Weight { // Proof Size summary in bytes: // Measured: `142 + l * (1 ±0)` // Estimated: `1626 + l * (1 ±0)` - // Minimum execution time: 1_665_000 picoseconds. - Weight::from_parts(1_725_000, 1626) - // Standard Error: 3 - .saturating_add(Weight::from_parts(376, 0).saturating_mul(l.into())) + // Minimum execution time: 3_000_000 picoseconds. + Weight::from_parts(3_000_000, 1626) + // Standard Error: 1 + .saturating_add(Weight::from_parts(393, 0).saturating_mul(l.into())) .saturating_add(RocksDbWeight::get().reads(1_u64)) .saturating_add(Weight::from_parts(0, 1).saturating_mul(l.into())) } - /// Storage: Pov LargeValue (r:1 w:0) - /// Proof: Pov LargeValue (max_values: Some(1), max_size: Some(4194308), added: 4194803, mode: MaxEncodedLen) + /// Storage: `Pov::LargeValue` (r:1 w:0) + /// Proof: `Pov::LargeValue` (`max_values`: Some(1), `max_size`: Some(4194308), added: 4194803, mode: `MaxEncodedLen`) /// The range of component `l` is `[0, 4194304]`. fn mel_storage_value_read_linear_size(l: u32, ) -> Weight { // Proof Size summary in bytes: // Measured: `142 + l * (1 ±0)` // Estimated: `4195793` - // Minimum execution time: 1_640_000 picoseconds. - Weight::from_parts(1_724_000, 4195793) - // Standard Error: 4 - .saturating_add(Weight::from_parts(395, 0).saturating_mul(l.into())) + // Minimum execution time: 3_000_000 picoseconds. + Weight::from_parts(3_000_000, 4195793) + // Standard Error: 1 + .saturating_add(Weight::from_parts(394, 0).saturating_mul(l.into())) .saturating_add(RocksDbWeight::get().reads(1_u64)) } - /// Storage: Pov LargeValue (r:1 w:0) - /// Proof: Pov LargeValue (max_values: Some(1), max_size: Some(4194308), added: 4194803, mode: Measured) - /// Storage: Pov LargeValue2 (r:1 w:0) - /// Proof: Pov LargeValue2 (max_values: Some(1), max_size: Some(4194308), added: 4194803, mode: Measured) + /// Storage: `Pov::LargeValue` (r:1 w:0) + /// Proof: `Pov::LargeValue` (`max_values`: Some(1), `max_size`: Some(4194308), added: 4194803, mode: `Measured`) + /// Storage: `Pov::LargeValue2` (r:1 w:0) + /// Proof: `Pov::LargeValue2` (`max_values`: Some(1), `max_size`: Some(4194308), added: 4194803, mode: `Measured`) /// The range of component `l` is `[0, 4194304]`. fn measured_storage_double_value_read_linear_size(l: u32, ) -> Weight { // Proof Size summary in bytes: // Measured: `171 + l * (2 ±0)` // Estimated: `1655 + l * (2 ±0)` - // Minimum execution time: 2_263_000 picoseconds. - Weight::from_parts(2_358_000, 1655) - // Standard Error: 8 - .saturating_add(Weight::from_parts(737, 0).saturating_mul(l.into())) + // Minimum execution time: 4_000_000 picoseconds. + Weight::from_parts(4_000_000, 1655) + // Standard Error: 2 + .saturating_add(Weight::from_parts(655, 0).saturating_mul(l.into())) .saturating_add(RocksDbWeight::get().reads(2_u64)) .saturating_add(Weight::from_parts(0, 2).saturating_mul(l.into())) } - /// Storage: Pov LargeValue (r:1 w:0) - /// Proof: Pov LargeValue (max_values: Some(1), max_size: Some(4194308), added: 4194803, mode: MaxEncodedLen) - /// Storage: Pov LargeValue2 (r:1 w:0) - /// Proof: Pov LargeValue2 (max_values: Some(1), max_size: Some(4194308), added: 4194803, mode: MaxEncodedLen) + /// Storage: `Pov::LargeValue` (r:1 w:0) + /// Proof: `Pov::LargeValue` (`max_values`: Some(1), `max_size`: Some(4194308), added: 4194803, mode: `MaxEncodedLen`) + /// Storage: `Pov::LargeValue2` (r:1 w:0) + /// Proof: `Pov::LargeValue2` (`max_values`: Some(1), `max_size`: Some(4194308), added: 4194803, mode: `MaxEncodedLen`) /// The range of component `l` is `[0, 4194304]`. fn mel_storage_double_value_read_linear_size(l: u32, ) -> Weight { // Proof Size summary in bytes: // Measured: `171 + l * (2 ±0)` // Estimated: `4195793` - // Minimum execution time: 2_161_000 picoseconds. - Weight::from_parts(2_233_000, 4195793) - // Standard Error: 5 - .saturating_add(Weight::from_parts(639, 0).saturating_mul(l.into())) + // Minimum execution time: 4_000_000 picoseconds. + Weight::from_parts(4_000_000, 4195793) + // Standard Error: 2 + .saturating_add(Weight::from_parts(660, 0).saturating_mul(l.into())) .saturating_add(RocksDbWeight::get().reads(2_u64)) } - /// Storage: Pov LargeValue (r:1 w:0) - /// Proof: Pov LargeValue (max_values: Some(1), max_size: Some(4194308), added: 4194803, mode: MaxEncodedLen) - /// Storage: Pov LargeValue2 (r:1 w:0) - /// Proof: Pov LargeValue2 (max_values: Some(1), max_size: Some(4194308), added: 4194803, mode: Measured) + /// Storage: `Pov::LargeValue` (r:1 w:0) + /// Proof: `Pov::LargeValue` (`max_values`: Some(1), `max_size`: Some(4194308), added: 4194803, mode: `MaxEncodedLen`) + /// Storage: `Pov::LargeValue2` (r:1 w:0) + /// Proof: `Pov::LargeValue2` (`max_values`: Some(1), `max_size`: Some(4194308), added: 4194803, mode: `Measured`) /// The range of component `l` is `[0, 4194304]`. fn mel_mixed_storage_double_value_read_linear_size(l: u32, ) -> Weight { // Proof Size summary in bytes: // Measured: `171 + l * (2 ±0)` // Estimated: `4195793 + l * (2 ±0)` - // Minimum execution time: 2_149_000 picoseconds. - Weight::from_parts(2_256_000, 4195793) - // Standard Error: 6 - .saturating_add(Weight::from_parts(677, 0).saturating_mul(l.into())) + // Minimum execution time: 4_000_000 picoseconds. + Weight::from_parts(4_000_000, 4195793) + // Standard Error: 4 + .saturating_add(Weight::from_parts(691, 0).saturating_mul(l.into())) .saturating_add(RocksDbWeight::get().reads(2_u64)) .saturating_add(Weight::from_parts(0, 2).saturating_mul(l.into())) } - /// Storage: Pov LargeValue (r:1 w:0) - /// Proof: Pov LargeValue (max_values: Some(1), max_size: Some(4194308), added: 4194803, mode: Measured) - /// Storage: Pov LargeValue2 (r:1 w:0) - /// Proof: Pov LargeValue2 (max_values: Some(1), max_size: Some(4194308), added: 4194803, mode: MaxEncodedLen) + /// Storage: `Pov::LargeValue` (r:1 w:0) + /// Proof: `Pov::LargeValue` (`max_values`: Some(1), `max_size`: Some(4194308), added: 4194803, mode: `Measured`) + /// Storage: `Pov::LargeValue2` (r:1 w:0) + /// Proof: `Pov::LargeValue2` (`max_values`: Some(1), `max_size`: Some(4194308), added: 4194803, mode: `MaxEncodedLen`) /// The range of component `l` is `[0, 4194304]`. fn measured_mixed_storage_double_value_read_linear_size(l: u32, ) -> Weight { // Proof Size summary in bytes: // Measured: `171 + l * (2 ±0)` // Estimated: `4195793 + l * (2 ±0)` - // Minimum execution time: 2_254_000 picoseconds. - Weight::from_parts(2_319_000, 4195793) - // Standard Error: 5 - .saturating_add(Weight::from_parts(664, 0).saturating_mul(l.into())) + // Minimum execution time: 4_000_000 picoseconds. + Weight::from_parts(4_000_000, 4195793) + // Standard Error: 4 + .saturating_add(Weight::from_parts(691, 0).saturating_mul(l.into())) .saturating_add(RocksDbWeight::get().reads(2_u64)) .saturating_add(Weight::from_parts(0, 2).saturating_mul(l.into())) } - /// Storage: Pov UnboundedMap (r:1 w:0) - /// Proof Skipped: Pov UnboundedMap (max_values: None, max_size: None, mode: Measured) - /// Storage: Pov UnboundedMap2 (r:1 w:0) - /// Proof Skipped: Pov UnboundedMap2 (max_values: None, max_size: None, mode: Measured) + /// Storage: `Pov::UnboundedMap` (r:1 w:0) + /// Proof: `Pov::UnboundedMap` (`max_values`: None, `max_size`: None, mode: `Measured`) + /// Storage: `Pov::UnboundedMap2` (r:1 w:0) + /// Proof: `Pov::UnboundedMap2` (`max_values`: None, `max_size`: None, mode: `Measured`) /// The range of component `i` is `[0, 1000]`. fn storage_map_unbounded_both_measured_read(i: u32, ) -> Weight { // Proof Size summary in bytes: // Measured: `229 + i * (8 ±0)` // Estimated: `3693 + i * (8 ±0)` - // Minimum execution time: 3_071_000 picoseconds. - Weight::from_parts(3_487_712, 3693) - // Standard Error: 26 - .saturating_add(Weight::from_parts(748, 0).saturating_mul(i.into())) + // Minimum execution time: 6_000_000 picoseconds. + Weight::from_parts(7_274_226, 3693) + // Standard Error: 280 + .saturating_add(Weight::from_parts(3_282, 0).saturating_mul(i.into())) .saturating_add(RocksDbWeight::get().reads(2_u64)) .saturating_add(Weight::from_parts(0, 8).saturating_mul(i.into())) } - /// Storage: Pov Map1M (r:1 w:0) - /// Proof: Pov Map1M (max_values: Some(1000000), max_size: Some(36), added: 2511, mode: MaxEncodedLen) - /// Storage: Pov UnboundedMap (r:1 w:0) - /// Proof Skipped: Pov UnboundedMap (max_values: None, max_size: None, mode: Measured) + /// Storage: `Pov::Map1M` (r:1 w:0) + /// Proof: `Pov::Map1M` (`max_values`: Some(1000000), `max_size`: Some(36), added: 2511, mode: `MaxEncodedLen`) + /// Storage: `Pov::UnboundedMap` (r:1 w:0) + /// Proof: `Pov::UnboundedMap` (`max_values`: None, `max_size`: None, mode: `Measured`) /// The range of component `i` is `[0, 1000]`. fn storage_map_partial_unbounded_read(i: u32, ) -> Weight { // Proof Size summary in bytes: // Measured: `228 + i * (4 ±0)` // Estimated: `3692 + i * (4 ±0)` - // Minimum execution time: 3_150_000 picoseconds. - Weight::from_parts(3_582_963, 3692) - // Standard Error: 18 - .saturating_add(Weight::from_parts(380, 0).saturating_mul(i.into())) + // Minimum execution time: 7_000_000 picoseconds. + Weight::from_parts(7_507_333, 3692) + // Standard Error: 64 + .saturating_add(Weight::from_parts(982, 0).saturating_mul(i.into())) .saturating_add(RocksDbWeight::get().reads(2_u64)) .saturating_add(Weight::from_parts(0, 4).saturating_mul(i.into())) } - /// Storage: Pov Map1M (r:1 w:0) - /// Proof: Pov Map1M (max_values: Some(1000000), max_size: Some(36), added: 2511, mode: MaxEncodedLen) - /// Storage: Pov UnboundedMap (r:1 w:0) - /// Proof Skipped: Pov UnboundedMap (max_values: None, max_size: None, mode: Ignored) + /// Storage: `Pov::Map1M` (r:1 w:0) + /// Proof: `Pov::Map1M` (`max_values`: Some(1000000), `max_size`: Some(36), added: 2511, mode: `MaxEncodedLen`) + /// Storage: `Pov::UnboundedMap` (r:1 w:0) + /// Proof: `Pov::UnboundedMap` (`max_values`: None, `max_size`: None, mode: `Ignored`) /// The range of component `i` is `[0, 1000]`. fn storage_map_partial_unbounded_ignored_read(i: u32, ) -> Weight { // Proof Size summary in bytes: // Measured: `228 + i * (4 ±0)` // Estimated: `3501 + i * (4 ±0)` - // Minimum execution time: 3_092_000 picoseconds. - Weight::from_parts(3_595_328, 3501) - // Standard Error: 20 - .saturating_add(Weight::from_parts(243, 0).saturating_mul(i.into())) + // Minimum execution time: 6_000_000 picoseconds. + Weight::from_parts(7_285_011, 3501) + // Standard Error: 80 + .saturating_add(Weight::from_parts(1_395, 0).saturating_mul(i.into())) .saturating_add(RocksDbWeight::get().reads(2_u64)) .saturating_add(Weight::from_parts(0, 4).saturating_mul(i.into())) } @@ -825,24 +823,24 @@ impl WeightInfo for () { // Proof Size summary in bytes: // Measured: `0` // Estimated: `0` - // Minimum execution time: 1_705_000 picoseconds. - Weight::from_parts(1_818_000, 0) + // Minimum execution time: 4_000_000 picoseconds. + Weight::from_parts(5_000_000, 0) } fn noop() -> Weight { // Proof Size summary in bytes: // Measured: `0` // Estimated: `0` - // Minimum execution time: 533_000 picoseconds. - Weight::from_parts(587_000, 0) + // Minimum execution time: 2_000_000 picoseconds. + Weight::from_parts(2_000_000, 0) } - /// Storage: Pov UnboundedMapTwox (r:65001 w:0) - /// Proof Skipped: Pov UnboundedMapTwox (max_values: None, max_size: None, mode: Measured) + /// Storage: `Pov::UnboundedMapTwox` (r:65001 w:0) + /// Proof: `Pov::UnboundedMapTwox` (`max_values`: None, `max_size`: None, mode: `Measured`) fn storage_iteration() -> Weight { // Proof Size summary in bytes: // Measured: `17985289` // Estimated: `178863754` - // Minimum execution time: 118_753_057_000 picoseconds. - Weight::from_parts(121_396_503_000, 178863754) + // Minimum execution time: 218_275_000_000 picoseconds. + Weight::from_parts(222_603_000_000, 178863754) .saturating_add(RocksDbWeight::get().reads(65001_u64)) } } diff --git a/substrate/frame/support/procedural/src/benchmark.rs b/substrate/frame/support/procedural/src/benchmark.rs index 27c75a7f054c..ea53ad263a16 100644 --- a/substrate/frame/support/procedural/src/benchmark.rs +++ b/substrate/frame/support/procedural/src/benchmark.rs @@ -40,10 +40,14 @@ mod keywords { custom_keyword!(benchmarks); custom_keyword!(block); custom_keyword!(extra); + custom_keyword!(pov_mode); custom_keyword!(extrinsic_call); custom_keyword!(skip_meta); custom_keyword!(BenchmarkError); custom_keyword!(Result); + custom_keyword!(MaxEncodedLen); + custom_keyword!(Measured); + custom_keyword!(Ignored); pub const BENCHMARK_TOKEN: &str = stringify!(benchmark); pub const BENCHMARKS_TOKEN: &str = stringify!(benchmarks); @@ -73,51 +77,158 @@ struct RangeArgs { struct BenchmarkAttrs { skip_meta: bool, extra: bool, + pov_mode: Option, } /// Represents a single benchmark option -enum BenchmarkAttrKeyword { +enum BenchmarkAttr { Extra, SkipMeta, + /// How the PoV should be measured. + PoV(PovModeAttr), } -impl syn::parse::Parse for BenchmarkAttrKeyword { +impl syn::parse::Parse for PovModeAttr { + fn parse(input: ParseStream) -> Result { + let _pov: keywords::pov_mode = input.parse()?; + let _eq: Token![=] = input.parse()?; + let root = PovEstimationMode::parse(input)?; + + let mut maybe_content = None; + let _ = || -> Result<()> { + let content; + syn::braced!(content in input); + maybe_content = Some(content); + Ok(()) + }(); + + let per_key = match maybe_content { + Some(content) => { + let per_key = Punctuated::::parse_terminated(&content)?; + per_key.into_iter().collect() + }, + None => Vec::new(), + }; + + Ok(Self { root, per_key }) + } +} + +impl syn::parse::Parse for BenchmarkAttr { fn parse(input: ParseStream) -> Result { let lookahead = input.lookahead1(); if lookahead.peek(keywords::extra) { let _extra: keywords::extra = input.parse()?; - return Ok(BenchmarkAttrKeyword::Extra) + Ok(BenchmarkAttr::Extra) } else if lookahead.peek(keywords::skip_meta) { let _skip_meta: keywords::skip_meta = input.parse()?; - return Ok(BenchmarkAttrKeyword::SkipMeta) + Ok(BenchmarkAttr::SkipMeta) + } else if lookahead.peek(keywords::pov_mode) { + PovModeAttr::parse(input).map(BenchmarkAttr::PoV) + } else { + Err(lookahead.error()) + } + } +} + +/// A `#[pov_mode = .. { .. }]` attribute. +#[derive(Debug, Clone)] +struct PovModeAttr { + /// The root mode for this benchmarks. + root: PovEstimationMode, + /// The pov-mode for a specific key. This overwrites `root` for this key. + per_key: Vec, +} + +/// A single key-value pair inside the `{}` of a `#[pov_mode = .. { .. }]` attribute. +#[derive(Debug, Clone, derive_syn_parse::Parse)] +struct PovModeKeyAttr { + /// A specific storage key for which to set the PoV mode. + key: Path, + _underscore: Token![:], + /// The PoV mode for this key. + mode: PovEstimationMode, +} + +/// How the PoV should be estimated. +#[derive(Debug, Eq, PartialEq, Clone, Copy)] +pub enum PovEstimationMode { + /// Use the maximal encoded length as provided by [`codec::MaxEncodedLen`]. + MaxEncodedLen, + /// Measure the accessed value size in the pallet benchmarking and add some trie overhead. + Measured, + /// Do not estimate the PoV size for this storage item or benchmark. + Ignored, +} + +impl syn::parse::Parse for PovEstimationMode { + fn parse(input: ParseStream) -> Result { + let lookahead = input.lookahead1(); + if lookahead.peek(keywords::MaxEncodedLen) { + let _max_encoded_len: keywords::MaxEncodedLen = input.parse()?; + return Ok(PovEstimationMode::MaxEncodedLen) + } else if lookahead.peek(keywords::Measured) { + let _measured: keywords::Measured = input.parse()?; + return Ok(PovEstimationMode::Measured) + } else if lookahead.peek(keywords::Ignored) { + let _ignored: keywords::Ignored = input.parse()?; + return Ok(PovEstimationMode::Ignored) } else { return Err(lookahead.error()) } } } +impl ToString for PovEstimationMode { + fn to_string(&self) -> String { + match self { + PovEstimationMode::MaxEncodedLen => "MaxEncodedLen".into(), + PovEstimationMode::Measured => "Measured".into(), + PovEstimationMode::Ignored => "Ignored".into(), + } + } +} + +impl quote::ToTokens for PovEstimationMode { + fn to_tokens(&self, tokens: &mut TokenStream2) { + match self { + PovEstimationMode::MaxEncodedLen => tokens.extend(quote!(MaxEncodedLen)), + PovEstimationMode::Measured => tokens.extend(quote!(Measured)), + PovEstimationMode::Ignored => tokens.extend(quote!(Ignored)), + } + } +} + impl syn::parse::Parse for BenchmarkAttrs { fn parse(input: ParseStream) -> syn::Result { let mut extra = false; let mut skip_meta = false; - let args = Punctuated::::parse_terminated(&input)?; + let mut pov_mode = None; + let args = Punctuated::::parse_terminated(&input)?; + for arg in args.into_iter() { match arg { - BenchmarkAttrKeyword::Extra => { + BenchmarkAttr::Extra => { if extra { return Err(input.error("`extra` can only be specified once")) } extra = true; }, - BenchmarkAttrKeyword::SkipMeta => { + BenchmarkAttr::SkipMeta => { if skip_meta { return Err(input.error("`skip_meta` can only be specified once")) } skip_meta = true; }, + BenchmarkAttr::PoV(mode) => { + if pov_mode.is_some() { + return Err(input.error("`pov_mode` can only be specified once")) + } + pov_mode = Some(mode); + }, } } - Ok(BenchmarkAttrs { extra, skip_meta }) + Ok(BenchmarkAttrs { extra, skip_meta, pov_mode }) } } @@ -344,6 +455,7 @@ pub fn benchmarks( tokens: TokenStream, instance: bool, ) -> syn::Result { + let krate = generate_access_from_frame_or_crate("frame-benchmarking")?; // gather module info let module: ItemMod = syn::parse(tokens)?; let mod_span = module.span(); @@ -364,6 +476,8 @@ pub fn benchmarks( let mut benchmark_names: Vec = Vec::new(); let mut extra_benchmark_names: Vec = Vec::new(); let mut skip_meta_benchmark_names: Vec = Vec::new(); + // Map benchmarks to PoV modes. + let mut pov_modes = Vec::new(); let (_brace, mut content) = module.content.ok_or(syn::Error::new(mod_span, "Module cannot be empty!"))?; @@ -400,6 +514,25 @@ pub fn benchmarks( } else if benchmark_attrs.skip_meta { skip_meta_benchmark_names.push(name.clone()); } + + if let Some(mode) = benchmark_attrs.pov_mode { + let mut modes = Vec::new(); + // We cannot expand strings here since it is no-std, but syn does not expand bytes. + let name = name.to_string(); + let m = mode.root.to_string(); + modes.push(quote!(("ALL".as_bytes().to_vec(), #m.as_bytes().to_vec()))); + + for attr in mode.per_key.iter() { + // syn always puts spaces in quoted paths: + let key = attr.key.clone().into_token_stream().to_string().replace(" ", ""); + let mode = attr.mode.to_string(); + modes.push(quote!((#key.as_bytes().to_vec(), #mode.as_bytes().to_vec()))); + } + + pov_modes.push( + quote!((#name.as_bytes().to_vec(), #krate::__private::vec![#(#modes),*])), + ); + } } // expand benchmark @@ -419,7 +552,6 @@ pub fn benchmarks( true => quote!(T: Config, I: 'static), }; - let krate = generate_access_from_frame_or_crate("frame-benchmarking")?; let frame_system = generate_access_from_frame_or_crate("frame-system")?; // benchmark name variables @@ -537,6 +669,16 @@ pub fn benchmarks( ]; all_names.retain(|x| !extra.contains(x)); } + let pov_modes: + #krate::__private::Vec<( + #krate::__private::Vec, + #krate::__private::Vec<( + #krate::__private::Vec, + #krate::__private::Vec + )>, + )> = #krate::__private::vec![ + #( #pov_modes ),* + ]; all_names.into_iter().map(|benchmark| { let selected_benchmark = match benchmark { #(#selected_benchmark_mappings), @@ -544,12 +686,13 @@ pub fn benchmarks( _ => panic!("all benchmarks should be selectable") }; let components = >::components(&selected_benchmark); + let name = benchmark.as_bytes().to_vec(); + let modes = pov_modes.iter().find(|p| p.0 == name).map(|p| p.1.clone()); + #krate::BenchmarkMetadata { name: benchmark.as_bytes().to_vec(), components, - // TODO: Not supported by V2 syntax as of yet. - // https://github.com/paritytech/substrate/issues/13132 - pov_modes: #krate::__private::vec![], + pov_modes: modes.unwrap_or_default(), } }).collect::<#krate::__private::Vec<_>>() } diff --git a/substrate/frame/support/test/tests/benchmark_ui/bad_attr_pov_mode_1.rs b/substrate/frame/support/test/tests/benchmark_ui/bad_attr_pov_mode_1.rs new file mode 100644 index 000000000000..40ef884bf857 --- /dev/null +++ b/substrate/frame/support/test/tests/benchmark_ui/bad_attr_pov_mode_1.rs @@ -0,0 +1,31 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use frame_benchmarking::v2::*; + +#[benchmarks] +mod benches { + use super::*; + + #[benchmark(pov_mode = Wrong)] + fn bench() { + #[block] + {} + } +} + +fn main() {} diff --git a/substrate/frame/support/test/tests/benchmark_ui/bad_attr_pov_mode_1.stderr b/substrate/frame/support/test/tests/benchmark_ui/bad_attr_pov_mode_1.stderr new file mode 100644 index 000000000000..add80da63070 --- /dev/null +++ b/substrate/frame/support/test/tests/benchmark_ui/bad_attr_pov_mode_1.stderr @@ -0,0 +1,5 @@ +error: expected one of: `MaxEncodedLen`, `Measured`, `Ignored` + --> tests/benchmark_ui/bad_attr_pov_mode_1.rs:24:25 + | +24 | #[benchmark(pov_mode = Wrong)] + | ^^^^^ diff --git a/substrate/frame/support/test/tests/benchmark_ui/bad_attr_pov_mode_2.rs b/substrate/frame/support/test/tests/benchmark_ui/bad_attr_pov_mode_2.rs new file mode 100644 index 000000000000..151bb931e920 --- /dev/null +++ b/substrate/frame/support/test/tests/benchmark_ui/bad_attr_pov_mode_2.rs @@ -0,0 +1,33 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use frame_benchmarking::v2::*; + +#[benchmarks] +mod benches { + use super::*; + + #[benchmark(pov_mode = Measured { + Key: Wrong + })] + fn bench() { + #[block] + {} + } +} + +fn main() {} diff --git a/substrate/frame/support/test/tests/benchmark_ui/bad_attr_pov_mode_2.stderr b/substrate/frame/support/test/tests/benchmark_ui/bad_attr_pov_mode_2.stderr new file mode 100644 index 000000000000..0f9961afd89f --- /dev/null +++ b/substrate/frame/support/test/tests/benchmark_ui/bad_attr_pov_mode_2.stderr @@ -0,0 +1,5 @@ +error: expected one of: `MaxEncodedLen`, `Measured`, `Ignored` + --> tests/benchmark_ui/bad_attr_pov_mode_2.rs:25:8 + | +25 | Key: Wrong + | ^^^^^ diff --git a/substrate/frame/support/test/tests/benchmark_ui/bad_attr_pov_mode_3.rs b/substrate/frame/support/test/tests/benchmark_ui/bad_attr_pov_mode_3.rs new file mode 100644 index 000000000000..9c5e3801b1a3 --- /dev/null +++ b/substrate/frame/support/test/tests/benchmark_ui/bad_attr_pov_mode_3.rs @@ -0,0 +1,31 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use frame_benchmarking::v2::*; + +#[benchmarks] +mod benches { + use super::*; + + #[benchmark(pov_mode)] + fn bench() { + #[block] + {} + } +} + +fn main() {} diff --git a/substrate/frame/support/test/tests/benchmark_ui/bad_attr_pov_mode_3.stderr b/substrate/frame/support/test/tests/benchmark_ui/bad_attr_pov_mode_3.stderr new file mode 100644 index 000000000000..f28a993989a3 --- /dev/null +++ b/substrate/frame/support/test/tests/benchmark_ui/bad_attr_pov_mode_3.stderr @@ -0,0 +1,5 @@ +error: expected `=` + --> tests/benchmark_ui/bad_attr_pov_mode_3.rs:24:22 + | +24 | #[benchmark(pov_mode)] + | ^ diff --git a/substrate/frame/support/test/tests/benchmark_ui/bad_attr_pov_mode_4.rs b/substrate/frame/support/test/tests/benchmark_ui/bad_attr_pov_mode_4.rs new file mode 100644 index 000000000000..11ec5124d289 --- /dev/null +++ b/substrate/frame/support/test/tests/benchmark_ui/bad_attr_pov_mode_4.rs @@ -0,0 +1,31 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use frame_benchmarking::v2::*; + +#[benchmarks] +mod benches { + use super::*; + + #[benchmark(pov_mode =)] + fn bench() { + #[block] + {} + } +} + +fn main() {} diff --git a/substrate/frame/support/test/tests/benchmark_ui/bad_attr_pov_mode_4.stderr b/substrate/frame/support/test/tests/benchmark_ui/bad_attr_pov_mode_4.stderr new file mode 100644 index 000000000000..572b6b0815dc --- /dev/null +++ b/substrate/frame/support/test/tests/benchmark_ui/bad_attr_pov_mode_4.stderr @@ -0,0 +1,5 @@ +error: unexpected end of input, expected one of: `MaxEncodedLen`, `Measured`, `Ignored` + --> tests/benchmark_ui/bad_attr_pov_mode_4.rs:24:24 + | +24 | #[benchmark(pov_mode =)] + | ^ diff --git a/substrate/frame/support/test/tests/benchmark_ui/dup_attr_pov_mode.rs b/substrate/frame/support/test/tests/benchmark_ui/dup_attr_pov_mode.rs new file mode 100644 index 000000000000..f49636d181a5 --- /dev/null +++ b/substrate/frame/support/test/tests/benchmark_ui/dup_attr_pov_mode.rs @@ -0,0 +1,32 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use frame_benchmarking::v2::*; +use frame_support_test::Config; + +#[benchmarks] +mod benches { + use super::*; + + #[benchmark(pov_mode = Measured, pov_mode = MaxEncodedLen)] + fn bench() { + #[block] + {} + } +} + +fn main() {} diff --git a/substrate/frame/support/test/tests/benchmark_ui/dup_attr_pov_mode.stderr b/substrate/frame/support/test/tests/benchmark_ui/dup_attr_pov_mode.stderr new file mode 100644 index 000000000000..aab91d271a69 --- /dev/null +++ b/substrate/frame/support/test/tests/benchmark_ui/dup_attr_pov_mode.stderr @@ -0,0 +1,14 @@ +error: unexpected end of input, `pov_mode` can only be specified once + --> tests/benchmark_ui/dup_attr_pov_mode.rs:25:59 + | +25 | #[benchmark(pov_mode = Measured, pov_mode = MaxEncodedLen)] + | ^ + +error: unused import: `frame_support_test::Config` + --> tests/benchmark_ui/dup_attr_pov_mode.rs:19:5 + | +19 | use frame_support_test::Config; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: `-D unused-imports` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(unused_imports)]` diff --git a/substrate/frame/support/test/tests/benchmark_ui/pass/valid_attr_pov_mode.rs b/substrate/frame/support/test/tests/benchmark_ui/pass/valid_attr_pov_mode.rs new file mode 100644 index 000000000000..35fa1e76ae5a --- /dev/null +++ b/substrate/frame/support/test/tests/benchmark_ui/pass/valid_attr_pov_mode.rs @@ -0,0 +1,74 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use frame_benchmarking::v2::*; +use frame_support_test::Config; + +#[benchmarks] +mod benches { + use super::*; + + #[benchmark(skip_meta, extra, pov_mode = Measured)] + fn bench1() { + #[block] + {} + } + + #[benchmark(pov_mode = Measured, extra, skip_meta)] + fn bench2() { + #[block] + {} + } + + #[benchmark(extra, pov_mode = Measured { + Pallet: Measured, + Pallet::Storage: MaxEncodedLen, + }, skip_meta)] + fn bench3() { + #[block] + {} + } + + #[benchmark(skip_meta, extra, pov_mode = Measured { + Pallet::Storage: MaxEncodedLen, + Pallet::StorageSubKey: Measured, + })] + fn bench4() { + #[block] + {} + } + + #[benchmark(pov_mode = MaxEncodedLen { + Pallet::Storage: Measured, + Pallet::StorageSubKey: Measured + }, extra, skip_meta)] + fn bench5() { + #[block] + {} + } + + #[benchmark(pov_mode = MaxEncodedLen { + Pallet::Storage: Measured, + Pallet::Storage::Nested: Ignored + }, extra, skip_meta)] + fn bench6() { + #[block] + {} + } +} + +fn main() {} diff --git a/substrate/frame/support/test/tests/benchmark_ui/pass/valid_basic.rs b/substrate/frame/support/test/tests/benchmark_ui/pass/valid_basic.rs index 126cee8fa6c5..5899eb3562a2 100644 --- a/substrate/frame/support/test/tests/benchmark_ui/pass/valid_basic.rs +++ b/substrate/frame/support/test/tests/benchmark_ui/pass/valid_basic.rs @@ -22,7 +22,7 @@ use frame_support_test::Config; mod benches { use super::*; - #[benchmark(skip_meta, extra)] + #[benchmark(skip_meta, pov_mode = Measured, extra)] fn bench() { let a = 2 + 2; #[block] diff --git a/substrate/frame/support/test/tests/benchmark_ui/unrecognized_option.stderr b/substrate/frame/support/test/tests/benchmark_ui/unrecognized_option.stderr index bea770b634e2..2eb06e396a85 100644 --- a/substrate/frame/support/test/tests/benchmark_ui/unrecognized_option.stderr +++ b/substrate/frame/support/test/tests/benchmark_ui/unrecognized_option.stderr @@ -1,4 +1,4 @@ -error: expected `extra` or `skip_meta` +error: expected one of: `extra`, `skip_meta`, `pov_mode` --> tests/benchmark_ui/unrecognized_option.rs:26:32 | 26 | #[benchmark(skip_meta, extra, bad)] diff --git a/substrate/frame/whitelist/src/benchmarking.rs b/substrate/frame/whitelist/src/benchmarking.rs index 9d356f09a9d2..7fb5632fc002 100644 --- a/substrate/frame/whitelist/src/benchmarking.rs +++ b/substrate/frame/whitelist/src/benchmarking.rs @@ -20,58 +20,57 @@ #![cfg(feature = "runtime-benchmarks")] use super::*; -use frame_benchmarking::v1::{benchmarks, BenchmarkError}; -use frame_support::{ensure, traits::EnsureOrigin}; +use frame_benchmarking::v2::*; +use frame_support::traits::EnsureOrigin; #[cfg(test)] use crate::Pallet as Whitelist; -benchmarks! { - whitelist_call { +#[benchmarks] +mod benchmarks { + use super::*; + + #[benchmark] + fn whitelist_call() -> Result<(), BenchmarkError> { let origin = T::WhitelistOrigin::try_successful_origin().map_err(|_| BenchmarkError::Weightless)?; let call_hash = Default::default(); - }: _(origin, call_hash) - verify { - ensure!( - WhitelistedCall::::contains_key(call_hash), - "call not whitelisted" - ); - ensure!( - T::Preimages::is_requested(&call_hash), - "preimage not requested" - ); + + #[extrinsic_call] + _(origin as T::RuntimeOrigin, call_hash); + + ensure!(WhitelistedCall::::contains_key(call_hash), "call not whitelisted"); + ensure!(T::Preimages::is_requested(&call_hash), "preimage not requested"); + Ok(()) } - remove_whitelisted_call { + #[benchmark] + fn remove_whitelisted_call() -> Result<(), BenchmarkError> { let origin = T::WhitelistOrigin::try_successful_origin().map_err(|_| BenchmarkError::Weightless)?; let call_hash = Default::default(); Pallet::::whitelist_call(origin.clone(), call_hash) .expect("whitelisting call must be successful"); - }: _(origin, call_hash) - verify { - ensure!( - !WhitelistedCall::::contains_key(call_hash), - "whitelist not removed" - ); - ensure!( - !T::Preimages::is_requested(&call_hash), - "preimage still requested" - ); + + #[extrinsic_call] + _(origin as T::RuntimeOrigin, call_hash); + + ensure!(!WhitelistedCall::::contains_key(call_hash), "whitelist not removed"); + ensure!(!T::Preimages::is_requested(&call_hash), "preimage still requested"); + Ok(()) } // We benchmark with the maximum possible size for a call. // If the resulting weight is too big, maybe it worth having a weight which depends // on the size of the call, with a new witness in parameter. - #[pov_mode = MaxEncodedLen { + #[benchmark(pov_mode = MaxEncodedLen { // Use measured PoV size for the Preimages since we pass in a length witness. Preimage::PreimageFor: Measured - }] - dispatch_whitelisted_call { - // NOTE: we remove `10` because we need some bytes to encode the variants and vec length - let n in 1 .. T::Preimages::MAX_LENGTH as u32 - 10; - + })] + // NOTE: we remove `10` because we need some bytes to encode the variants and vec length + fn dispatch_whitelisted_call( + n: Linear<1, { T::Preimages::MAX_LENGTH as u32 - 10 }>, + ) -> Result<(), BenchmarkError> { let origin = T::DispatchWhitelistedOrigin::try_successful_origin() .map_err(|_| BenchmarkError::Weightless)?; let remark = sp_std::vec![1u8; n as usize]; @@ -86,21 +85,16 @@ benchmarks! { T::Preimages::note(encoded_call.into()).unwrap(); - }: _(origin, call_hash, call_encoded_len, call_weight) - verify { - ensure!( - !WhitelistedCall::::contains_key(call_hash), - "whitelist not removed" - ); - ensure!( - !T::Preimages::is_requested(&call_hash), - "preimage still requested" - ); - } + #[extrinsic_call] + _(origin as T::RuntimeOrigin, call_hash, call_encoded_len, call_weight); - dispatch_whitelisted_call_with_preimage { - let n in 1 .. 10_000; + ensure!(!WhitelistedCall::::contains_key(call_hash), "whitelist not removed"); + ensure!(!T::Preimages::is_requested(&call_hash), "preimage still requested"); + Ok(()) + } + #[benchmark] + fn dispatch_whitelisted_call_with_preimage(n: Linear<1, 10_000>) -> Result<(), BenchmarkError> { let origin = T::DispatchWhitelistedOrigin::try_successful_origin() .map_err(|_| BenchmarkError::Weightless)?; let remark = sp_std::vec![1u8; n as usize]; @@ -110,16 +104,13 @@ benchmarks! { Pallet::::whitelist_call(origin.clone(), call_hash) .expect("whitelisting call must be successful"); - }: _(origin, Box::new(call)) - verify { - ensure!( - !WhitelistedCall::::contains_key(call_hash), - "whitelist not removed" - ); - ensure!( - !T::Preimages::is_requested(&call_hash), - "preimage still requested" - ); + + #[extrinsic_call] + _(origin as T::RuntimeOrigin, Box::new(call)); + + ensure!(!WhitelistedCall::::contains_key(call_hash), "whitelist not removed"); + ensure!(!T::Preimages::is_requested(&call_hash), "preimage still requested"); + Ok(()) } impl_benchmark_test_suite!(Whitelist, crate::mock::new_test_ext(), crate::mock::Test);