Skip to content

Commit dd1b5fd

Browse files
author
Jonas Bostoen
committed
feat: implement Capella upgrade
1 parent 0dc938e commit dd1b5fd

13 files changed

+389
-197
lines changed

Cargo.lock

+292-142
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

+17-16
Original file line numberDiff line numberDiff line change
@@ -9,22 +9,22 @@ edition = "2021"
99
[dependencies]
1010
discv5 = { version = "0.1.0", features = ["libp2p"] }
1111
unsigned-varint = { version = "0.6.0", features = ["codec"] }
12-
types = { git = "https://github.com/sigp/lighthouse", tag = "v3.3.0" }
13-
eth2_ssz_types = { git = "https://github.com/sigp/lighthouse", tag = "v3.3.0" }
12+
types = { git = "https://github.com/sigp/lighthouse", tag = "v4.0.1" }
13+
eth2_ssz_types = { git = "https://github.com/sigp/lighthouse", tag = "v4.0.1" }
1414
serde = { version = "1.0.116", features = ["derive"] }
1515
serde_derive = "1"
16-
eth2_ssz = { git = "https://github.com/sigp/lighthouse", tag = "v3.3.0" }
17-
eth2_ssz_derive = { git = "https://github.com/sigp/lighthouse", tag = "v3.3.0" }
16+
eth2_ssz = { git = "https://github.com/sigp/lighthouse", tag = "v4.0.1" }
17+
eth2_ssz_derive = { git = "https://github.com/sigp/lighthouse", tag = "v4.0.1" }
1818
slog = { version = "2.5.2", features = ["max_level_trace"] }
19-
lighthouse_version = { git = "https://github.com/sigp/lighthouse", tag = "v3.3.0" }
19+
lighthouse_version = { git = "https://github.com/sigp/lighthouse", tag = "v4.0.1" }
2020
tokio = { version = "1.14.0", features = ["time", "macros"] }
2121
tokio-stream = "0.1"
22-
futures = "0.3.7"
22+
futures = "0.3.28"
2323
error-chain = "0.12.4"
2424
dirs = "3.0.1"
2525
fnv = "1.0.7"
2626
lazy_static = "1.4.0"
27-
lighthouse_metrics = { git = "https://github.com/sigp/lighthouse", tag = "v3.3.0" }
27+
lighthouse_metrics = { git = "https://github.com/sigp/lighthouse", tag = "v4.0.1" }
2828
smallvec = "1.6.1"
2929
tokio-io-timeout = "1.1.1"
3030
lru = "0.7.1"
@@ -34,14 +34,14 @@ snap = "1.0.1"
3434
hex = "0.4.2"
3535
tokio-util = { version = "0.6.2", features = ["codec", "compat", "time"] }
3636
tiny-keccak = "2.0.2"
37-
task_executor = { git = "https://github.com/sigp/lighthouse", tag = "v3.3.0" }
37+
task_executor = { git = "https://github.com/sigp/lighthouse", tag = "v4.0.1" }
3838
rand = "0.8.5"
39-
directory = { git = "https://github.com/sigp/lighthouse", tag = "v3.3.0" }
39+
directory = { git = "https://github.com/sigp/lighthouse", tag = "v4.0.1" }
4040
regex = "1.5.5"
4141
strum = { version = "0.24.0", features = ["derive"] }
4242
superstruct = "0.5.0"
4343
prometheus-client = "0.18.0"
44-
unused_port = { git = "https://github.com/sigp/lighthouse", tag = "v3.3.0" }
44+
unused_port = { git = "https://github.com/sigp/lighthouse", tag = "v4.0.1" }
4545
delay_map = "0.1.1"
4646
tracing = { version = "0.1" }
4747
tracing-subscriber = { version = "0.2", features = ["fmt", "env-filter"] }
@@ -64,13 +64,14 @@ features = [
6464

6565
# Some lighthouse patches
6666
[patch.crates-io]
67-
eth2_ssz = { git = "https://github.com/sigp/lighthouse", tag = "v3.3.0" }
68-
eth2_ssz_types = { git = "https://github.com/sigp/lighthouse", tag = "v3.3.0" }
69-
eth2_serde_utils = { git = "https://github.com/sigp/lighthouse", tag = "v3.3.0" }
70-
eth2_hashing = { git = "https://github.com/sigp/lighthouse", tag = "v3.3.0" }
67+
eth2_ssz = { git = "https://github.com/sigp/lighthouse", tag = "v4.0.1" }
68+
eth2_ssz_types = { git = "https://github.com/sigp/lighthouse", tag = "v4.0.1" }
69+
eth2_ssz_derive = { git = "https://github.com/sigp/lighthouse", tag = "v4.0.1" }
70+
eth2_serde_utils = { git = "https://github.com/sigp/lighthouse", tag = "v4.0.1" }
71+
eth2_hashing = { git = "https://github.com/sigp/lighthouse", tag = "v4.0.1" }
7172

72-
tree_hash = { git = "https://github.com/sigp/lighthouse", tag = "v3.3.0" }
73-
tree_hash_derive = { git = "https://github.com/sigp/lighthouse", tag = "v3.3.0" }
73+
tree_hash = { git = "https://github.com/sigp/lighthouse", tag = "v4.0.1" }
74+
tree_hash_derive = { git = "https://github.com/sigp/lighthouse", tag = "v4.0.1" }
7475

7576
[dev-dependencies]
7677
slog-term = "2.6.0"

examples/main.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -26,8 +26,8 @@ async fn main() {
2626
.message()
2727
.execution_payload()
2828
.unwrap()
29-
.execution_payload
30-
.block_number;
29+
.execution_payload_ref()
30+
.block_number();
3131

3232
println!(
3333
"Received block {} (source peer: {:?}",

src/config.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -296,7 +296,7 @@ pub fn gossipsub_config(network_load: u8, fork_context: Arc<ForkContext>) -> Gos
296296
match fork_context.current_fork() {
297297
// according to: https://github.com/ethereum/consensus-specs/blob/dev/specs/merge/p2p-interface.md#the-gossip-domain-gossipsub
298298
// the derivation of the message-id remains the same in the merge
299-
ForkName::Altair | ForkName::Merge => {
299+
ForkName::Altair | ForkName::Merge | ForkName::Capella => {
300300
let topic_len_bytes = topic_bytes.len().to_le_bytes();
301301
let mut vec = Vec::with_capacity(
302302
prefix.len() + topic_len_bytes.len() + topic_bytes.len() + message.data.len(),

src/discovery/mod.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -1062,14 +1062,14 @@ mod tests {
10621062
use crate::rpc::methods::{MetaData, MetaDataV2};
10631063
use enr::EnrBuilder;
10641064
use types::{BitVector, MinimalEthSpec, SubnetId};
1065-
use unused_port::unused_udp_port;
1065+
use unused_port::unused_udp4_port;
10661066

10671067
type E = MinimalEthSpec;
10681068

10691069
async fn build_discovery() -> Discovery<E> {
10701070
let keypair = libp2p::identity::Keypair::generate_secp256k1();
10711071
let config = NetworkConfig {
1072-
discovery_port: unused_udp_port().unwrap(),
1072+
discovery_port: unused_udp4_port().unwrap(),
10731073
..Default::default()
10741074
};
10751075
let enr_key: CombinedKey = CombinedKey::from_libp2p(&keypair).unwrap();

src/metrics.rs

-8
Original file line numberDiff line numberDiff line change
@@ -165,11 +165,3 @@ pub fn check_nat() {
165165
inc_counter(&NAT_OPEN);
166166
}
167167
}
168-
169-
pub fn scrape_discovery_metrics() {
170-
let metrics = discv5::metrics::Metrics::from(discv5::Discv5::raw_metrics());
171-
set_float_gauge(&DISCOVERY_REQS, metrics.unsolicited_requests_per_second);
172-
set_gauge(&DISCOVERY_SESSIONS, metrics.active_sessions as i64);
173-
set_gauge(&DISCOVERY_SENT_BYTES, metrics.bytes_sent as i64);
174-
set_gauge(&DISCOVERY_RECV_BYTES, metrics.bytes_recv as i64);
175-
}

src/network/gossipsub_scoring_parameters.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -270,11 +270,11 @@ impl<TSpec: EthSpec> PeerScoreSettings<TSpec> {
270270

271271
let modulo_smaller = max(
272272
1,
273-
smaller_committee_size / self.target_aggregators_per_committee as usize,
273+
smaller_committee_size / self.target_aggregators_per_committee,
274274
);
275275
let modulo_larger = max(
276276
1,
277-
(smaller_committee_size + 1) / self.target_aggregators_per_committee as usize,
277+
(smaller_committee_size + 1) / self.target_aggregators_per_committee,
278278
);
279279

280280
Ok((

src/network/utils.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ fn keypair_from_hex(hex_bytes: &str) -> error::Result<Keypair> {
8888
hex_bytes.to_string()
8989
};
9090

91-
hex::decode(&hex_bytes)
91+
hex::decode(hex_bytes)
9292
.map_err(|e| format!("Failed to parse p2p secret key bytes: {:?}", e).into())
9393
.and_then(keypair_from_bytes)
9494
}

src/rpc/codec/base.rs

+3
Original file line numberDiff line numberDiff line change
@@ -193,14 +193,17 @@ mod tests {
193193
let mut chain_spec = Spec::default_spec();
194194
let altair_fork_epoch = Epoch::new(1);
195195
let merge_fork_epoch = Epoch::new(2);
196+
let capella_fork_epoch = Epoch::new(3);
196197

197198
chain_spec.altair_fork_epoch = Some(altair_fork_epoch);
198199
chain_spec.bellatrix_fork_epoch = Some(merge_fork_epoch);
200+
chain_spec.capella_fork_epoch = Some(capella_fork_epoch);
199201

200202
let current_slot = match fork_name {
201203
ForkName::Base => Slot::new(0),
202204
ForkName::Altair => altair_fork_epoch.start_slot(Spec::slots_per_epoch()),
203205
ForkName::Merge => merge_fork_epoch.start_slot(Spec::slots_per_epoch()),
206+
ForkName::Capella => capella_fork_epoch.start_slot(Spec::slots_per_epoch()),
204207
};
205208
ForkContext::new::<Spec>(current_slot, Hash256::zero(), &chain_spec)
206209
}

src/rpc/codec/ssz_snappy.rs

+22-5
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ use std::sync::Arc;
1717
use tokio_util::codec::{Decoder, Encoder};
1818
use types::{
1919
EthSpec, ForkContext, ForkName, SignedBeaconBlock, SignedBeaconBlockAltair,
20-
SignedBeaconBlockBase, SignedBeaconBlockMerge,
20+
SignedBeaconBlockBase, SignedBeaconBlockCapella, SignedBeaconBlockMerge,
2121
};
2222
use unsigned_varint::codec::Uvi;
2323

@@ -407,6 +407,10 @@ fn context_bytes<T: EthSpec>(
407407
return match **ref_box_block {
408408
// NOTE: If you are adding another fork type here, be sure to modify the
409409
// `fork_context.to_context_bytes()` function to support it as well!
410+
SignedBeaconBlock::Capella { .. } => {
411+
// Capella context being `None` implies that "merge never happened".
412+
fork_context.to_context_bytes(ForkName::Capella)
413+
}
410414
SignedBeaconBlock::Merge { .. } => {
411415
// Merge context being `None` implies that "merge never happened".
412416
fork_context.to_context_bytes(ForkName::Merge)
@@ -441,7 +445,7 @@ fn handle_length(
441445
// Note: length-prefix of > 10 bytes(uint64) would be a decoding error
442446
match uvi_codec.decode(bytes).map_err(RPCError::from)? {
443447
Some(length) => {
444-
*len = Some(length as usize);
448+
*len = Some(length);
445449
Ok(Some(length))
446450
}
447451
None => Ok(None), // need more bytes to decode length
@@ -586,6 +590,11 @@ fn handle_v2_response<T: EthSpec>(
586590
decoded_buffer,
587591
)?),
588592
)))),
593+
ForkName::Capella => Ok(Some(RPCResponse::BlocksByRange(Arc::new(
594+
SignedBeaconBlock::Capella(SignedBeaconBlockCapella::from_ssz_bytes(
595+
decoded_buffer,
596+
)?),
597+
)))),
589598
},
590599
Protocol::BlocksByRoot => match fork_name {
591600
ForkName::Altair => Ok(Some(RPCResponse::BlocksByRoot(Arc::new(
@@ -601,6 +610,11 @@ fn handle_v2_response<T: EthSpec>(
601610
decoded_buffer,
602611
)?),
603612
)))),
613+
ForkName::Capella => Ok(Some(RPCResponse::BlocksByRoot(Arc::new(
614+
SignedBeaconBlock::Capella(SignedBeaconBlockCapella::from_ssz_bytes(
615+
decoded_buffer,
616+
)?),
617+
)))),
604618
},
605619
_ => Err(RPCError::ErrorResponse(
606620
RPCResponseErrorCode::InvalidRequest,
@@ -625,9 +639,9 @@ fn context_bytes_to_fork_name(
625639
)
626640
})
627641
}
642+
628643
#[cfg(test)]
629644
mod tests {
630-
631645
use super::*;
632646
use crate::rpc::{protocol::*, MetaData};
633647
use crate::{
@@ -636,8 +650,8 @@ mod tests {
636650
};
637651
use std::sync::Arc;
638652
use types::{
639-
BeaconBlock, BeaconBlockAltair, BeaconBlockBase, BeaconBlockMerge, Epoch, ForkContext,
640-
FullPayload, Hash256, Signature, SignedBeaconBlock, Slot,
653+
BeaconBlock, BeaconBlockAltair, BeaconBlockBase, BeaconBlockMerge, EmptyBlock, Epoch,
654+
ForkContext, FullPayload, Hash256, Signature, SignedBeaconBlock, Slot,
641655
};
642656

643657
use snap::write::FrameEncoder;
@@ -650,14 +664,17 @@ mod tests {
650664
let mut chain_spec = Spec::default_spec();
651665
let altair_fork_epoch = Epoch::new(1);
652666
let merge_fork_epoch = Epoch::new(2);
667+
let capella_fork_epoch = Epoch::new(3);
653668

654669
chain_spec.altair_fork_epoch = Some(altair_fork_epoch);
655670
chain_spec.bellatrix_fork_epoch = Some(merge_fork_epoch);
671+
chain_spec.capella_fork_epoch = Some(capella_fork_epoch);
656672

657673
let current_slot = match fork_name {
658674
ForkName::Base => Slot::new(0),
659675
ForkName::Altair => altair_fork_epoch.start_slot(Spec::slots_per_epoch()),
660676
ForkName::Merge => merge_fork_epoch.start_slot(Spec::slots_per_epoch()),
677+
ForkName::Capella => capella_fork_epoch.start_slot(Spec::slots_per_epoch()),
661678
};
662679
ForkContext::new::<Spec>(current_slot, Hash256::zero(), &chain_spec)
663680
}

src/rpc/protocol.rs

+23-4
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,9 @@ use tokio_util::{
2121
compat::{Compat, FuturesAsyncReadCompatExt},
2222
};
2323
use types::{
24-
BeaconBlock, BeaconBlockAltair, BeaconBlockBase, BeaconBlockMerge, EthSpec, ForkContext,
25-
ForkName, Hash256, MainnetEthSpec, Signature, SignedBeaconBlock,
24+
BeaconBlock, BeaconBlockAltair, BeaconBlockBase, BeaconBlockCapella, BeaconBlockMerge,
25+
EmptyBlock, EthSpec, ForkContext, ForkName, Hash256, MainnetEthSpec, Signature,
26+
SignedBeaconBlock,
2627
};
2728

2829
lazy_static! {
@@ -61,16 +62,28 @@ lazy_static! {
6162
.as_ssz_bytes()
6263
.len();
6364

65+
pub static ref SIGNED_BEACON_BLOCK_CAPELLA_MAX_WITHOUT_PAYLOAD: usize = SignedBeaconBlock::<MainnetEthSpec>::from_block(
66+
BeaconBlock::Capella(BeaconBlockCapella::full(&MainnetEthSpec::default_spec())),
67+
Signature::empty(),
68+
)
69+
.as_ssz_bytes()
70+
.len();
71+
72+
6473
/// The `BeaconBlockMerge` block has an `ExecutionPayload` field which has a max size ~16 GiB for future proofing.
6574
/// We calculate the value from its fields instead of constructing the block and checking the length.
6675
/// Note: This is only the theoretical upper bound. We further bound the max size we receive over the network
6776
/// with `MAX_RPC_SIZE_POST_MERGE`.
6877
pub static ref SIGNED_BEACON_BLOCK_MERGE_MAX: usize =
6978
// Size of a full altair block
7079
*SIGNED_BEACON_BLOCK_ALTAIR_MAX
71-
+ types::ExecutionPayload::<MainnetEthSpec>::max_execution_payload_size() // adding max size of execution payload (~16gb)
80+
+ types::ExecutionPayload::<MainnetEthSpec>::max_execution_payload_merge_size() // adding max size of execution payload (~16gb)
7281
+ ssz::BYTES_PER_LENGTH_OFFSET; // Adding the additional ssz offset for the `ExecutionPayload` field
7382

83+
pub static ref SIGNED_BEACON_BLOCK_CAPELLA_MAX: usize = *SIGNED_BEACON_BLOCK_CAPELLA_MAX_WITHOUT_PAYLOAD
84+
+ types::ExecutionPayload::<MainnetEthSpec>::max_execution_payload_capella_size() // adding max size of execution payload (~16gb)
85+
+ ssz::BYTES_PER_LENGTH_OFFSET; // Adding the additional ssz offset for the `ExecutionPayload` field
86+
7487
pub static ref BLOCKS_BY_ROOT_REQUEST_MIN: usize =
7588
VariableList::<Hash256, MaxRequestBlocks>::from(Vec::<Hash256>::new())
7689
.as_ssz_bytes()
@@ -102,6 +115,7 @@ lazy_static! {
102115
pub(crate) const MAX_RPC_SIZE: usize = 1_048_576; // 1M
103116
/// The maximum bytes that can be sent across the RPC post-merge.
104117
pub(crate) const MAX_RPC_SIZE_POST_MERGE: usize = 10 * 1_048_576; // 10M
118+
pub(crate) const MAX_RPC_SIZE_POST_CAPELLA: usize = 10 * 1_048_576; // 10M
105119
/// The protocol prefix the RPC protocol id.
106120
const PROTOCOL_PREFIX: &str = "/eth2/beacon_chain/req";
107121
/// Time allowed for the first byte of a request to arrive before we time out (Time To First Byte).
@@ -113,8 +127,9 @@ const REQUEST_TIMEOUT: u64 = 15;
113127
/// Returns the maximum bytes that can be sent across the RPC.
114128
pub fn max_rpc_size(fork_context: &ForkContext) -> usize {
115129
match fork_context.current_fork() {
116-
ForkName::Merge => MAX_RPC_SIZE_POST_MERGE,
117130
ForkName::Altair | ForkName::Base => MAX_RPC_SIZE,
131+
ForkName::Merge => MAX_RPC_SIZE_POST_MERGE,
132+
ForkName::Capella => MAX_RPC_SIZE_POST_CAPELLA,
118133
}
119134
}
120135

@@ -135,6 +150,10 @@ pub fn rpc_block_limits_by_fork(current_fork: ForkName) -> RpcLimits {
135150
*SIGNED_BEACON_BLOCK_BASE_MIN, // Base block is smaller than altair and merge blocks
136151
*SIGNED_BEACON_BLOCK_MERGE_MAX, // Merge block is larger than base and altair blocks
137152
),
153+
ForkName::Capella => RpcLimits::new(
154+
*SIGNED_BEACON_BLOCK_BASE_MIN, // Base block is smaller than altair and merge blocks
155+
*SIGNED_BEACON_BLOCK_CAPELLA_MAX, // Capella block is larger than base, altair and merge blocks
156+
),
138157
}
139158
}
140159

src/service.rs

+18-12
Original file line numberDiff line numberDiff line change
@@ -109,17 +109,17 @@ impl Service {
109109
discovery_port: self.cfg.discovery_port,
110110
boot_nodes_enr: self.cfg.boot_enrs,
111111
target_peers: self.cfg.max_peers,
112+
network_load: 5,
112113
..Default::default()
113114
};
114115

115116
// Specify the fork
116-
let fork = ForkName::Merge;
117+
let fork = ForkName::Capella;
117118

118119
// Populate the chain spec
119120
let mainnet_spec = ChainSpec::mainnet();
120121

121-
// Get the merge slot
122-
let merge_slot = mainnet_spec
122+
let capella_slot = mainnet_spec
123123
.fork_epoch(fork)
124124
.unwrap()
125125
.start_slot(MainnetEthSpec::slots_per_epoch());
@@ -130,15 +130,21 @@ impl Service {
130130
.unwrap();
131131

132132
// Build the merge fork context
133-
let merge_fork_context =
134-
ForkContext::new::<MainnetEthSpec>(merge_slot, genesis_validators_root, &mainnet_spec);
133+
let capella_fork_context = ForkContext::new::<MainnetEthSpec>(
134+
capella_slot,
135+
genesis_validators_root,
136+
&mainnet_spec,
137+
);
138+
139+
let fork_digest = capella_fork_context.to_context_bytes(fork).unwrap();
140+
info!(slot = ?capella_slot, "Fork digest: {:?}", fork_digest);
135141

136142
// Build the network service context
137143
let ctx = Context {
138144
config: &network_config,
139145
enr_fork_id: mainnet_spec
140-
.enr_fork_id::<MainnetEthSpec>(merge_slot, genesis_validators_root),
141-
fork_context: Arc::new(merge_fork_context),
146+
.enr_fork_id::<MainnetEthSpec>(capella_slot, genesis_validators_root),
147+
fork_context: Arc::new(capella_fork_context),
142148
chain_spec: &mainnet_spec,
143149
gossipsub_registry: None,
144150
};
@@ -147,17 +153,17 @@ impl Service {
147153

148154
// Set a random default status (for now)
149155
let mut highest_status = StatusMessage {
150-
fork_digest: [74, 38, 197, 139],
156+
fork_digest,
151157
finalized_root: Hash256::from_str(
152-
"0x6e1fbcfc857c0f849e4570009422edf1d56e29b16098b632fa8bee1b7e7f353c",
158+
"0xb6adca904a0674b7263f8f9518b2a0dff5ee6089ee92890e742d0a64a2cbbb43",
153159
)
154160
.unwrap(),
155-
finalized_epoch: Epoch::new(169022),
161+
finalized_epoch: Epoch::new(194863),
156162
head_root: Hash256::from_str(
157-
"0xf4cc483036e8ec382ccc85639695b0bb12ed11e9c8af2daf5b0c5340b015ca4e",
163+
"0xb41d25d17ef959d15aabdc01df99e2ec94dd600a0ac218d5b79b2a95cb14acad",
158164
)
159165
.unwrap(),
160-
head_slot: Slot::new(5408793),
166+
head_slot: Slot::new(6235698),
161167
};
162168

163169
let mut epoch_blocks: VecDeque<(Slot, Hash256)> = VecDeque::with_capacity(3);

0 commit comments

Comments
 (0)