Skip to content

Commit

Permalink
chore: update borsh dependency (#9432)
Browse files Browse the repository at this point in the history
[borsh-rs 1.0.0
release](https://github.com/near/borsh-rs/releases/tag/borsh-v1.0.0) is
a major milestone and it makes sense to update nearcore to use it.

Here is the migration guide that was captured along the way:
https://github.com/near/borsh-rs/blob/borsh-v1.0.0/docs/migration_guides/v0.10.2_to_v1.0.0_nearcore.md
  • Loading branch information
dj8yfo authored Oct 10, 2023
1 parent 492d189 commit 3a56da2
Show file tree
Hide file tree
Showing 79 changed files with 460 additions and 429 deletions.
157 changes: 95 additions & 62 deletions Cargo.lock

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ bitflags = "1.2"
blake2 = "0.9.1"
bn = { package = "zeropool-bn", version = "0.5.11" }
bolero = "0.8.0"
borsh = { version = "0.10.2", features = ["rc"] }
borsh = { version = "1.0.0", features = ["derive", "rc"] }
bs58 = "0.4"
bytes = "1"
bytesize = { version = "1.1", features = ["serde"] }
Expand Down
3 changes: 1 addition & 2 deletions chain/chain/src/blocks_delay_tracker.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
use chrono::DateTime;
use near_epoch_manager::EpochManagerAdapter;
use near_primitives::block::{Block, Tip};
use near_primitives::borsh::maybestd::collections::hash_map::Entry;
use near_primitives::hash::CryptoHash;
use near_primitives::sharding::{ChunkHash, ShardChunkHeader};
use near_primitives::static_clock::StaticClock;
Expand All @@ -10,7 +9,7 @@ use near_primitives::views::{
BlockProcessingInfo, BlockProcessingStatus, ChainProcessingInfo, ChunkProcessingInfo,
ChunkProcessingStatus, DroppedReason,
};
use std::collections::{BTreeMap, HashMap};
use std::collections::{hash_map::Entry, BTreeMap, HashMap};
use std::mem;
use std::time::Instant;
use tracing::error;
Expand Down
24 changes: 12 additions & 12 deletions chain/chain/src/chain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ use crate::validate::{
};
use crate::{byzantine_assert, create_light_client_block_view, Doomslug};
use crate::{metrics, DoomslugThresholdMode};
use borsh::BorshSerialize;

use chrono::Duration;
use crossbeam_channel::{unbounded, Receiver, Sender};
use itertools::Itertools;
Expand Down Expand Up @@ -1350,8 +1350,8 @@ impl Chain {
let other_header = self.get_block_header(block_hashes.iter().next().unwrap())?;

challenges.push(ChallengeBody::BlockDoubleSign(BlockDoubleSign {
left_block_header: header.try_to_vec().expect("Failed to serialize"),
right_block_header: other_header.try_to_vec().expect("Failed to serialize"),
left_block_header: borsh::to_vec(&header).expect("Failed to serialize"),
right_block_header: borsh::to_vec(&other_header).expect("Failed to serialize"),
}));
}
}
Expand Down Expand Up @@ -1696,7 +1696,7 @@ impl Chain {
if let Some(encoded_chunk) = self.store.is_invalid_chunk(&chunk_header.chunk_hash())? {
let merkle_paths = Block::compute_chunk_headers_root(block.chunks().iter()).1;
let chunk_proof = ChunkProofs {
block_header: block.header().try_to_vec().expect("Failed to serialize"),
block_header: borsh::to_vec(&block.header()).expect("Failed to serialize"),
merkle_proof: merkle_paths[shard_id].clone(),
chunk: MaybeEncodedShardChunk::Encoded(EncodedShardChunk::clone(
&encoded_chunk,
Expand Down Expand Up @@ -3089,7 +3089,7 @@ impl Chain {
sync_hash: CryptoHash,
) -> Result<ShardStateSyncResponseHeader, Error> {
// Check cache
let key = StateHeaderKey(shard_id, sync_hash).try_to_vec()?;
let key = borsh::to_vec(&StateHeaderKey(shard_id, sync_hash))?;
if let Ok(Some(header)) = self.store.store().get_ser(DBCol::StateHeaders, &key) {
return Ok(header);
}
Expand Down Expand Up @@ -3118,7 +3118,7 @@ impl Chain {
%sync_hash)
.entered();
// Check cache
let key = StatePartKey(sync_hash, shard_id, part_id).try_to_vec()?;
let key = borsh::to_vec(&StatePartKey(sync_hash, shard_id, part_id))?;
if let Ok(Some(state_part)) = self.store.store().get(DBCol::StateParts, &key) {
return Ok(state_part.into());
}
Expand Down Expand Up @@ -3330,7 +3330,7 @@ impl Chain {

// Saving the header data.
let mut store_update = self.store.store().store_update();
let key = StateHeaderKey(shard_id, sync_hash).try_to_vec()?;
let key = borsh::to_vec(&StateHeaderKey(shard_id, sync_hash))?;
store_update.set_ser(DBCol::StateHeaders, &key, &shard_state_header)?;
store_update.commit()?;

Expand Down Expand Up @@ -3365,7 +3365,7 @@ impl Chain {

// Saving the part data.
let mut store_update = self.store.store().store_update();
let key = StatePartKey(sync_hash, shard_id, part_id.idx).try_to_vec()?;
let key = borsh::to_vec(&StatePartKey(sync_hash, shard_id, part_id.idx))?;
store_update.set(DBCol::StateParts, &key, data);
store_update.commit()?;
Ok(())
Expand Down Expand Up @@ -3822,8 +3822,8 @@ impl Chain {
// .unwrap();
// let partial_state = apply_result.proof.unwrap().nodes;
Ok(ChunkState {
prev_block_header: prev_block.header().try_to_vec()?,
block_header: block.header().try_to_vec()?,
prev_block_header: borsh::to_vec(&prev_block.header())?,
block_header: borsh::to_vec(&block.header())?,
prev_merkle_proof: prev_merkle_proofs[chunk_shard_id as usize].clone(),
merkle_proof: merkle_proofs[chunk_shard_id as usize].clone(),
prev_chunk,
Expand Down Expand Up @@ -4069,7 +4069,7 @@ impl Chain {
if !validate_transactions_order(transactions) {
let merkle_paths = Block::compute_chunk_headers_root(block.chunks().iter()).1;
let chunk_proof = ChunkProofs {
block_header: block.header().try_to_vec().expect("Failed to serialize"),
block_header: borsh::to_vec(&block.header()).expect("Failed to serialize"),
merkle_proof: merkle_paths[shard_id as usize].clone(),
chunk: MaybeEncodedShardChunk::Decoded(chunk),
};
Expand Down Expand Up @@ -4922,7 +4922,7 @@ impl Chain {
shard_receipts
.into_iter()
.map(|(i, rs)| {
let bytes = (i, rs).try_to_vec().unwrap();
let bytes = borsh::to_vec(&(i, rs)).unwrap();
hash(&bytes)
})
.collect()
Expand Down
14 changes: 7 additions & 7 deletions chain/chain/src/store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -881,7 +881,7 @@ impl ChainStore {
shard_id: ShardId,
block_hash: CryptoHash,
) -> Result<ShardStateSyncResponseHeader, Error> {
let key = StateHeaderKey(shard_id, block_hash).try_to_vec()?;
let key = borsh::to_vec(&StateHeaderKey(shard_id, block_hash))?;
match self.store.get_ser(DBCol::StateHeaders, &key) {
Ok(Some(header)) => Ok(header),
_ => Err(Error::Other("Cannot get shard_state_header".into())),
Expand Down Expand Up @@ -2400,7 +2400,7 @@ impl<'a> ChainStoreUpdate<'a> {
let state_num_parts =
get_num_state_parts(shard_state_header.state_root_node().memory_usage);
self.gc_col_state_parts(block_hash, shard_id, state_num_parts)?;
let key = StateHeaderKey(shard_id, block_hash).try_to_vec()?;
let key = borsh::to_vec(&StateHeaderKey(shard_id, block_hash))?;
self.gc_col(DBCol::StateHeaders, &key);
}
}
Expand Down Expand Up @@ -2500,7 +2500,7 @@ impl<'a> ChainStoreUpdate<'a> {
let state_num_parts =
get_num_state_parts(shard_state_header.state_root_node().memory_usage);
self.gc_col_state_parts(block_hash, shard_id, state_num_parts)?;
let state_header_key = StateHeaderKey(shard_id, block_hash).try_to_vec()?;
let state_header_key = borsh::to_vec(&StateHeaderKey(shard_id, block_hash))?;
self.gc_col(DBCol::StateHeaders, &state_header_key);
}

Expand Down Expand Up @@ -2610,7 +2610,7 @@ impl<'a> ChainStoreUpdate<'a> {
num_parts: u64,
) -> Result<(), Error> {
for part_id in 0..num_parts {
let key = StatePartKey(sync_hash, shard_id, part_id).try_to_vec()?;
let key = borsh::to_vec(&StatePartKey(sync_hash, shard_id, part_id))?;
self.gc_col(DBCol::StateParts, &key);
}
Ok(())
Expand Down Expand Up @@ -3036,7 +3036,7 @@ impl<'a> ChainStoreUpdate<'a> {

// Increase transaction refcounts for all included txs
for tx in chunk.transactions().iter() {
let bytes = tx.try_to_vec().expect("Borsh cannot fail");
let bytes = borsh::to_vec(&tx).expect("Borsh cannot fail");
store_update.increment_refcount(
DBCol::Transactions,
tx.get_hash().as_ref(),
Expand All @@ -3046,7 +3046,7 @@ impl<'a> ChainStoreUpdate<'a> {

// Increase receipt refcounts for all included receipts
for receipt in chunk.prev_outgoing_receipts().iter() {
let bytes = receipt.try_to_vec().expect("Borsh cannot fail");
let bytes = borsh::to_vec(&receipt).expect("Borsh cannot fail");
store_update.increment_refcount(
DBCol::Receipts,
receipt.get_hash().as_ref(),
Expand Down Expand Up @@ -3116,7 +3116,7 @@ impl<'a> ChainStoreUpdate<'a> {
)?;
}
for (receipt_id, shard_id) in self.chain_store_cache_update.receipt_id_to_shard_id.iter() {
let data = shard_id.try_to_vec()?;
let data = borsh::to_vec(&shard_id)?;
store_update.increment_refcount(DBCol::ReceiptIdToShardId, receipt_id.as_ref(), &data);
}
for (block_hash, refcount) in self.chain_store_cache_update.block_refcounts.iter() {
Expand Down
4 changes: 2 additions & 2 deletions chain/chain/src/store_validator/validate.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use crate::StoreValidator;
use borsh::BorshSerialize;

use near_primitives::block::{Block, BlockHeader, Tip};
use near_primitives::epoch_manager::block_info::BlockInfo;
use near_primitives::epoch_manager::epoch_info::EpochInfo;
Expand Down Expand Up @@ -854,7 +854,7 @@ pub(crate) fn state_part_header_exists(
) -> Result<(), StoreValidatorError> {
let StatePartKey(block_hash, shard_id, part_id) = *key;
let state_header_key = unwrap_or_err!(
StateHeaderKey(shard_id, block_hash).try_to_vec(),
borsh::to_vec(&StateHeaderKey(shard_id, block_hash)),
"Can't serialize StateHeaderKey"
);
let header = unwrap_or_err_db!(
Expand Down
17 changes: 5 additions & 12 deletions chain/chain/src/test_utils/kv_runtime.rs
Original file line number Diff line number Diff line change
Expand Up @@ -349,7 +349,7 @@ impl KeyValueRuntime {
receipt_nonces: HashSet::default(),
tx_nonces: HashSet::default(),
};
let data = kv_state.try_to_vec().unwrap();
let data = borsh::to_vec(&kv_state).unwrap();
let data_len = data.len() as u64;
// StateRoot is actually faked here.
// We cannot do any reasonable validations of it in test_utils.
Expand Down Expand Up @@ -1158,7 +1158,7 @@ impl RuntimeAdapter for KeyValueRuntime {
}
}

let data = state.try_to_vec()?;
let data = borsh::to_vec(&state)?;
let state_size = data.len() as u64;
let state_root = hash(&data);
self.state.write().unwrap().insert(state_root, state);
Expand Down Expand Up @@ -1286,7 +1286,7 @@ impl RuntimeAdapter for KeyValueRuntime {
return Ok(vec![]);
}
let state = self.state.read().unwrap().get(state_root).unwrap().clone();
let data = state.try_to_vec().expect("should never fall");
let data = borsh::to_vec(&state).expect("should never fall");
Ok(data)
}

Expand All @@ -1308,7 +1308,7 @@ impl RuntimeAdapter for KeyValueRuntime {
}
let state = KVState::try_from_slice(data).unwrap();
self.state.write().unwrap().insert(*state_root, state.clone());
let data = state.try_to_vec()?;
let data = borsh::to_vec(&state)?;
let state_size = data.len() as u64;
self.state_size.write().unwrap().insert(*state_root, state_size);
Ok(())
Expand All @@ -1320,14 +1320,7 @@ impl RuntimeAdapter for KeyValueRuntime {
_block_hash: &CryptoHash,
state_root: &StateRoot,
) -> Result<StateRootNode, Error> {
let data = self
.state
.read()
.unwrap()
.get(state_root)
.unwrap()
.clone()
.try_to_vec()
let data = borsh::to_vec(&self.state.read().unwrap().get(state_root).unwrap().clone())
.expect("should never fall")
.into();
let memory_usage = *self.state_size.read().unwrap().get(state_root).unwrap();
Expand Down
4 changes: 2 additions & 2 deletions chain/client/src/debug.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
//! without backwards compatibility.
use crate::ClientActor;
use actix::{Context, Handler};
use borsh::BorshSerialize;

use itertools::Itertools;
use near_chain::crypto_hash_timer::CryptoHashTimer;
use near_chain::{near_chain_primitives, Chain, ChainStoreAccess};
Expand Down Expand Up @@ -255,7 +255,7 @@ impl ClientActor {

let state_header_exists: Vec<bool> = (0..block.chunks().len())
.map(|shard_id| {
let key = StateHeaderKey(shard_id as u64, *block.hash()).try_to_vec();
let key = borsh::to_vec(&StateHeaderKey(shard_id as u64, *block.hash()));
match key {
Ok(key) => {
matches!(
Expand Down
4 changes: 2 additions & 2 deletions chain/client/src/sync_jobs_actor.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use crate::ClientActor;
use actix::AsyncContext;
use borsh::BorshSerialize;

use near_chain::chain::{
do_apply_chunks, ApplyStatePartsRequest, ApplyStatePartsResponse, BlockCatchUpRequest,
BlockCatchUpResponse,
Expand Down Expand Up @@ -53,7 +53,7 @@ impl SyncJobsActor {

let shard_id = msg.shard_uid.shard_id as ShardId;
for part_id in 0..msg.num_parts {
let key = StatePartKey(msg.sync_hash, shard_id, part_id).try_to_vec()?;
let key = borsh::to_vec(&StatePartKey(msg.sync_hash, shard_id, part_id))?;
let part = store.get(DBCol::StateParts, &key)?.unwrap();

msg.runtime_adapter.apply_state_part(
Expand Down
8 changes: 4 additions & 4 deletions chain/client/src/tests/catching_up.rs
Original file line number Diff line number Diff line change
Expand Up @@ -266,10 +266,10 @@ fn test_catchup_receipts_sync_common(wait_till: u64, send: u64, sync_hold: bool)
peer_id: peer_id.clone(),
};
if !seen_hashes_with_state
.contains(&hash_func(&srs.try_to_vec().unwrap()))
.contains(&hash_func(&borsh::to_vec(&srs).unwrap()))
{
seen_hashes_with_state
.insert(hash_func(&srs.try_to_vec().unwrap()));
.insert(hash_func(&borsh::to_vec(&srs).unwrap()));
return (NetworkResponses::NoResponse.into(), false);
}
}
Expand All @@ -289,10 +289,10 @@ fn test_catchup_receipts_sync_common(wait_till: u64, send: u64, sync_hold: bool)
peer_id: peer_id.clone(),
};
if !seen_hashes_with_state
.contains(&hash_func(&srs.try_to_vec().unwrap()))
.contains(&hash_func(&borsh::to_vec(&srs).unwrap()))
{
seen_hashes_with_state
.insert(hash_func(&srs.try_to_vec().unwrap()));
.insert(hash_func(&borsh::to_vec(&srs).unwrap()));
return (NetworkResponses::NoResponse.into(), false);
}
}
Expand Down
10 changes: 5 additions & 5 deletions chain/jsonrpc/jsonrpc-tests/tests/rpc_transactions.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use std::sync::{Arc, Mutex};

use actix::{Actor, System};
use borsh::BorshSerialize;

use futures::{future, FutureExt, TryFutureExt};

use near_actix_test_utils::run_actix;
Expand Down Expand Up @@ -44,7 +44,7 @@ fn test_send_tx_async() {
100,
block_hash,
);
let bytes = tx.try_to_vec().unwrap();
let bytes = borsh::to_vec(&tx).unwrap();
let tx_hash = tx.get_hash().to_string();
*tx_hash2_1.lock().unwrap() = Some(tx.get_hash());
client
Expand Down Expand Up @@ -93,7 +93,7 @@ fn test_send_tx_commit() {
100,
block_hash,
);
let bytes = tx.try_to_vec().unwrap();
let bytes = borsh::to_vec(&tx).unwrap();
let result = client.broadcast_tx_commit(to_base64(&bytes)).await.unwrap();
assert_eq!(
result.final_execution_outcome.unwrap().into_outcome().status,
Expand Down Expand Up @@ -146,7 +146,7 @@ fn test_expired_tx() {
100,
block_hash,
);
let bytes = tx.try_to_vec().unwrap();
let bytes = borsh::to_vec(&tx).unwrap();
actix::spawn(
client
.broadcast_tx_commit(to_base64(&bytes))
Expand Down Expand Up @@ -190,7 +190,7 @@ fn test_replay_protection() {
100,
hash(&[1]),
);
let bytes = tx.try_to_vec().unwrap();
let bytes = borsh::to_vec(&tx).unwrap();
if let Ok(_) = client.broadcast_tx_commit(to_base64(&bytes)).await {
panic!("transaction should not succeed");
}
Expand Down
6 changes: 3 additions & 3 deletions chain/jsonrpc/src/api/transactions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ mod tests {
use near_jsonrpc_primitives::types::transactions::{
RpcBroadcastTransactionRequest, RpcTransactionStatusCommonRequest,
};
use near_primitives::borsh::BorshSerialize;
use near_primitives::borsh;
use near_primitives::hash::CryptoHash;
use near_primitives::serialize::to_base64;
use near_primitives::transaction::SignedTransaction;
Expand All @@ -85,7 +85,7 @@ mod tests {
fn test_serialize_tx_status_params_as_binary_signed_tx() {
let tx_hash = CryptoHash::new();
let tx = SignedTransaction::empty(tx_hash);
let bytes_tx = tx.try_to_vec().unwrap();
let bytes_tx = borsh::to_vec(&tx).unwrap();
let str_tx = to_base64(&bytes_tx);
let params = serde_json::json!([str_tx]);
assert!(RpcTransactionStatusCommonRequest::parse(params).is_ok());
Expand All @@ -103,7 +103,7 @@ mod tests {
fn test_serialize_send_tx_params_as_binary_signed_tx() {
let tx_hash = CryptoHash::new();
let tx = SignedTransaction::empty(tx_hash);
let bytes_tx = tx.try_to_vec().unwrap();
let bytes_tx = borsh::to_vec(&tx).unwrap();
let str_tx = to_base64(&bytes_tx);
let params = serde_json::json!([str_tx]);
assert!(RpcBroadcastTransactionRequest::parse(params).is_ok());
Expand Down
2 changes: 1 addition & 1 deletion chain/network/src/network_protocol/edge.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
use borsh::{BorshDeserialize, BorshSerialize};
use near_async::time;
use near_crypto::{KeyType, SecretKey, Signature};
use near_primitives::borsh::maybestd::sync::Arc;
use near_primitives::hash::CryptoHash;
use near_primitives::network::PeerId;
use once_cell::sync::Lazy;
use std::sync::Arc;

// We'd treat all nonces that are below this values as 'old style' (without any expiration time).
// And all nonces above this value as new style (that would expire after some time).
Expand Down
Loading

0 comments on commit 3a56da2

Please sign in to comment.