Skip to content

Commit

Permalink
chore: update borsh to 1.0.0-alpha.4 (a lot of try_to_vec depreca…
Browse files Browse the repository at this point in the history
…tion)
  • Loading branch information
dj8yf0μl committed Sep 7, 2023
1 parent 893ccba commit 5efb2ef
Show file tree
Hide file tree
Showing 69 changed files with 303 additions and 311 deletions.
60 changes: 30 additions & 30 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ bitflags = "1.2"
blake2 = "0.9.1"
bn = { package = "zeropool-bn", version = "0.5.11" }
bolero = "0.8.0"
borsh = { version = "1.0.0-alpha.3", features = ["derive", "rc"] }
borsh = { version = "1.0.0-alpha.4", features = ["derive", "rc"] }
bs58 = "0.4"
bytes = "1"
bytesize = { version = "1.1", features = ["serde"] }
Expand Down
24 changes: 12 additions & 12 deletions chain/chain/src/chain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ use crate::validate::{
};
use crate::{byzantine_assert, create_light_client_block_view, Doomslug};
use crate::{metrics, DoomslugThresholdMode};
use borsh::BorshSerialize;

use chrono::Duration;
use crossbeam_channel::{unbounded, Receiver, Sender};
use delay_detector::DelayDetector;
Expand Down Expand Up @@ -1283,8 +1283,8 @@ impl Chain {
let other_header = self.get_block_header(block_hashes.iter().next().unwrap())?;

challenges.push(ChallengeBody::BlockDoubleSign(BlockDoubleSign {
left_block_header: header.try_to_vec().expect("Failed to serialize"),
right_block_header: other_header.try_to_vec().expect("Failed to serialize"),
left_block_header: borsh::to_vec(&header).expect("Failed to serialize"),
right_block_header: borsh::to_vec(&other_header).expect("Failed to serialize"),
}));
}
}
Expand Down Expand Up @@ -1629,7 +1629,7 @@ impl Chain {
if let Some(encoded_chunk) = self.store.is_invalid_chunk(&chunk_header.chunk_hash())? {
let merkle_paths = Block::compute_chunk_headers_root(block.chunks().iter()).1;
let chunk_proof = ChunkProofs {
block_header: block.header().try_to_vec().expect("Failed to serialize"),
block_header: borsh::to_vec(&block.header()).expect("Failed to serialize"),
merkle_proof: merkle_paths[shard_id].clone(),
chunk: MaybeEncodedShardChunk::Encoded(EncodedShardChunk::clone(
&encoded_chunk,
Expand Down Expand Up @@ -3021,7 +3021,7 @@ impl Chain {
sync_hash: CryptoHash,
) -> Result<ShardStateSyncResponseHeader, Error> {
// Check cache
let key = StateHeaderKey(shard_id, sync_hash).try_to_vec()?;
let key = borsh::to_vec(&StateHeaderKey(shard_id, sync_hash))?;
if let Ok(Some(header)) = self.store.store().get_ser(DBCol::StateHeaders, &key) {
return Ok(header);
}
Expand Down Expand Up @@ -3050,7 +3050,7 @@ impl Chain {
%sync_hash)
.entered();
// Check cache
let key = StatePartKey(sync_hash, shard_id, part_id).try_to_vec()?;
let key = borsh::to_vec(&StatePartKey(sync_hash, shard_id, part_id))?;
if let Ok(Some(state_part)) = self.store.store().get(DBCol::StateParts, &key) {
return Ok(state_part.into());
}
Expand Down Expand Up @@ -3258,7 +3258,7 @@ impl Chain {

// Saving the header data.
let mut store_update = self.store.store().store_update();
let key = StateHeaderKey(shard_id, sync_hash).try_to_vec()?;
let key = borsh::to_vec(&StateHeaderKey(shard_id, sync_hash))?;
store_update.set_ser(DBCol::StateHeaders, &key, &shard_state_header)?;
store_update.commit()?;

Expand Down Expand Up @@ -3293,7 +3293,7 @@ impl Chain {

// Saving the part data.
let mut store_update = self.store.store().store_update();
let key = StatePartKey(sync_hash, shard_id, part_id.idx).try_to_vec()?;
let key = borsh::to_vec(&StatePartKey(sync_hash, shard_id, part_id.idx))?;
store_update.set(DBCol::StateParts, &key, data);
store_update.commit()?;
Ok(())
Expand Down Expand Up @@ -3752,8 +3752,8 @@ impl Chain {
// .unwrap();
// let partial_state = apply_result.proof.unwrap().nodes;
Ok(ChunkState {
prev_block_header: prev_block.header().try_to_vec()?,
block_header: block.header().try_to_vec()?,
prev_block_header: borsh::to_vec(&prev_block.header())?,
block_header: borsh::to_vec(&block.header())?,
prev_merkle_proof: prev_merkle_proofs[chunk_shard_id as usize].clone(),
merkle_proof: merkle_proofs[chunk_shard_id as usize].clone(),
prev_chunk,
Expand Down Expand Up @@ -3999,7 +3999,7 @@ impl Chain {
if !validate_transactions_order(transactions) {
let merkle_paths = Block::compute_chunk_headers_root(block.chunks().iter()).1;
let chunk_proof = ChunkProofs {
block_header: block.header().try_to_vec().expect("Failed to serialize"),
block_header: borsh::to_vec(&block.header()).expect("Failed to serialize"),
merkle_proof: merkle_paths[shard_id as usize].clone(),
chunk: MaybeEncodedShardChunk::Decoded(chunk),
};
Expand Down Expand Up @@ -4850,7 +4850,7 @@ impl Chain {
shard_receipts
.into_iter()
.map(|(i, rs)| {
let bytes = (i, rs).try_to_vec().unwrap();
let bytes = borsh::to_vec(&(i, rs)).unwrap();
hash(&bytes)
})
.collect()
Expand Down
Loading

0 comments on commit 5efb2ef

Please sign in to comment.