Skip to content

Commit

Permalink
Merge branch 'master' into dependabot/cargo/trybuild-1.0.82
Browse files Browse the repository at this point in the history
  • Loading branch information
jsdw authored Jul 24, 2023
2 parents 612c1dc + 560432b commit c6031bf
Show file tree
Hide file tree
Showing 3 changed files with 111 additions and 9 deletions.
16 changes: 12 additions & 4 deletions subxt/src/rpc/rpc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ use crate::{error::Error, utils::PhantomDataSendSync, Config, Metadata};

use super::{
rpc_params,
types::{self, ChainHeadEvent, FollowEvent},
types::{self, ChainHeadEvent, ChainHeadStorageEvent, FollowEvent, StorageQuery},
RpcClient, RpcClientT, Subscription,
};

Expand Down Expand Up @@ -555,14 +555,22 @@ impl<T: Config> Rpc<T> {
&self,
subscription_id: String,
hash: T::Hash,
key: &[u8],
items: Vec<StorageQuery<&[u8]>>,
child_key: Option<&[u8]>,
) -> Result<Subscription<ChainHeadEvent<Option<String>>>, Error> {
) -> Result<Subscription<ChainHeadStorageEvent<Option<String>>>, Error> {
let items: Vec<StorageQuery<String>> = items
.into_iter()
.map(|item| StorageQuery {
key: to_hex(item.key),
query_type: item.query_type,
})
.collect();

let subscription = self
.client
.subscribe(
"chainHead_unstable_storage",
rpc_params![subscription_id, hash, to_hex(key), child_key.map(to_hex)],
rpc_params![subscription_id, hash, items, child_key.map(to_hex)],
"chainHead_unstable_stopStorage",
)
.await?;
Expand Down
81 changes: 80 additions & 1 deletion subxt/src/rpc/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -489,7 +489,7 @@ pub struct ChainHeadResult<T> {
pub result: T,
}

/// The event generated by the body / call / storage methods.
/// The event generated by the body and call methods.
#[derive(Debug, Clone, PartialEq, Eq, Deserialize)]
#[serde(rename_all = "camelCase")]
#[serde(tag = "event")]
Expand Down Expand Up @@ -728,6 +728,85 @@ mod as_string {
}
}

/// The storage item received as paramter.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct StorageQuery<Key> {
/// The provided key.
pub key: Key,
/// The type of the storage query.
#[serde(rename = "type")]
pub query_type: StorageQueryType,
}

/// The type of the storage query.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub enum StorageQueryType {
/// Fetch the value of the provided key.
Value,
/// Fetch the hash of the value of the provided key.
Hash,
/// Fetch the closest descendant merkle value.
ClosestDescendantMerkleValue,
/// Fetch the values of all descendants of they provided key.
DescendantsValues,
/// Fetch the hashes of the values of all descendants of they provided key.
DescendantsHashes,
}

/// The storage result.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct StorageResult<T> {
/// The hex-encoded key of the result.
pub key: String,
/// The result of the query.
#[serde(flatten)]
pub result: StorageResultType<T>,
}

/// The type of the storage query.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub enum StorageResultType<T> {
/// Fetch the value of the provided key.
Value(T),
/// Fetch the hash of the value of the provided key.
Hash(T),
/// Fetch the closest descendant merkle value.
ClosestDescendantMerkleValue(T),
}

/// The event generated by storage method.
#[derive(Debug, Clone, PartialEq, Deserialize)]
#[serde(rename_all = "kebab-case")]
#[serde(tag = "event")]
pub enum ChainHeadStorageEvent<T> {
/// The request produced multiple result items.
Items(ItemsEvent<T>),
/// The request produced multiple result items.
WaitForContinue,
/// The request completed successfully and all the results were provided.
Done,
/// The resources requested are inaccessible.
///
/// Resubmitting the request later might succeed.
Inaccessible,
/// An error occurred. This is definitive.
Error(ErrorEvent),
/// The provided subscription ID is stale or invalid.
Disjoint,
}

/// The request produced multiple result items.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ItemsEvent<T> {
/// The resulting items.
pub items: Vec<StorageResult<T>>,
}

#[cfg(test)]
mod test {
use super::*;
Expand Down
23 changes: 19 additions & 4 deletions testing/integration-tests/src/full_client/client/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,9 @@ use sp_core::storage::well_known_keys;
use subxt::{
error::{DispatchError, Error, TokenError},
rpc::types::{
ChainHeadEvent, DryRunResult, DryRunResultBytes, FollowEvent, Initialized, RuntimeEvent,
RuntimeVersionEvent,
ChainHeadEvent, ChainHeadStorageEvent, DryRunResult, DryRunResultBytes, FollowEvent,
Initialized, RuntimeEvent, RuntimeVersionEvent, StorageQuery, StorageQueryType,
StorageResultType,
},
utils::AccountId32,
};
Expand Down Expand Up @@ -544,14 +545,28 @@ async fn chainhead_unstable_storage() {
let addr = node_runtime::storage().system().account(alice);
let addr_bytes = api.storage().address_bytes(&addr).unwrap();

let items = vec![StorageQuery {
key: addr_bytes.as_slice(),
query_type: StorageQueryType::Value,
}];
let mut sub = api
.rpc()
.chainhead_unstable_storage(sub_id, hash, &addr_bytes, None)
.chainhead_unstable_storage(sub_id, hash, items, None)
.await
.unwrap();
let event = sub.next().await.unwrap().unwrap();

assert_matches!(event, ChainHeadEvent::<Option<String>>::Done(done) if done.result.is_some());
match event {
ChainHeadStorageEvent::<Option<String>>::Items(event) => {
assert_eq!(event.items.len(), 1);
assert_eq!(event.items[0].key, format!("0x{}", hex::encode(addr_bytes)));
assert_matches!(&event.items[0].result, StorageResultType::Value(value) if value.is_some());
}
_ => panic!("unexpected ChainHeadStorageEvent"),
};

let event = sub.next().await.unwrap().unwrap();
assert_matches!(event, ChainHeadStorageEvent::<Option<String>>::Done);
}

#[tokio::test]
Expand Down

0 comments on commit c6031bf

Please sign in to comment.