From 6e71fd7c19c9ea54f546cebfd9aeec192070e1a1 Mon Sep 17 00:00:00 2001 From: kevaundray Date: Tue, 28 Oct 2025 01:20:43 +0000 Subject: [PATCH 01/74] chore: fix typo (#8292) Co-Authored-By: kevaundray --- consensus/types/src/fork_name.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/consensus/types/src/fork_name.rs b/consensus/types/src/fork_name.rs index 338e2b1e759..1d7bf3795b2 100644 --- a/consensus/types/src/fork_name.rs +++ b/consensus/types/src/fork_name.rs @@ -144,7 +144,7 @@ impl ForkName { /// Return the name of the fork immediately prior to the current one. /// - /// If `self` is `ForkName::Base` then `Base` is returned. + /// If `self` is `ForkName::Base` then `None` is returned. pub fn previous_fork(self) -> Option { match self { ForkName::Base => None, From 5840004c368e9b1fa689d2307adda3cef555d749 Mon Sep 17 00:00:00 2001 From: chonghe <44791194+chong-he@users.noreply.github.com> Date: Tue, 28 Oct 2025 11:41:08 +0800 Subject: [PATCH 02/74] Add `/lighthouse/custody/info` to Lighthouse book (#8305) Co-Authored-By: Tan Chee Keong --- book/src/api_lighthouse.md | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/book/src/api_lighthouse.md b/book/src/api_lighthouse.md index 2e694989f93..fa093be3f68 100644 --- a/book/src/api_lighthouse.md +++ b/book/src/api_lighthouse.md @@ -447,6 +447,27 @@ indicating that all states with slots `>= 0` are available, i.e., full state his on the specific meanings of these fields see the docs on [Checkpoint Sync](./advanced_checkpoint_sync.md#how-to-run-an-archived-node). +## `/lighthouse/custody/info` + +Information about data columns custody info. + +```bash +curl "http://localhost:5052/lighthouse/custody/info" | jq +``` + +```json +{ + "earliest_custodied_data_column_slot": "8823040", + "custody_group_count": "4", + "custody_columns": [ + "117", + "72", + "31", + "79" + ] +} +``` + ## `/lighthouse/merge_readiness` Returns the current difficulty and terminal total difficulty of the network. Before [The Merge](https://ethereum.org/en/roadmap/merge/) on 15th September 2022, you will see that the current difficulty is less than the terminal total difficulty, An example is shown below: From f5809aff87959ed39c9d9f5716dbeebb82de7b87 Mon Sep 17 00:00:00 2001 From: Mac L Date: Tue, 28 Oct 2025 08:01:09 +0400 Subject: [PATCH 03/74] Bump `ssz_types` to `v0.12.2` (#8032) https://github.com/sigp/lighthouse/issues/8012 Replace all instances of `VariableList::from` and `FixedVector::from` to their `try_from` variants. While I tried to use proper error handling in most cases, there were certain situations where adding an `expect` for situations where `try_from` can trivially never fail avoided adding a lot of extra complexity. Co-Authored-By: Mac L Co-Authored-By: Michael Sproul Co-Authored-By: Michael Sproul --- Cargo.lock | 6 +- Cargo.toml | 2 +- beacon_node/beacon_chain/benches/benches.rs | 7 +- .../src/attestation_verification.rs | 33 +- beacon_node/beacon_chain/src/beacon_chain.rs | 180 +++++++--- .../src/data_column_verification.rs | 12 +- beacon_node/beacon_chain/src/errors.rs | 1 + .../beacon_chain/src/fetch_blobs/tests.rs | 2 +- beacon_node/beacon_chain/src/kzg_utils.rs | 31 +- beacon_node/beacon_chain/src/test_utils.rs | 4 +- .../beacon_chain/tests/block_verification.rs | 12 +- beacon_node/beacon_chain/tests/store_tests.rs | 2 +- .../execution_layer/src/engine_api/http.rs | 74 +++-- .../src/engine_api/json_structures.rs | 310 ++++++++++-------- .../src/engine_api/new_payload_request.rs | 2 +- beacon_node/execution_layer/src/lib.rs | 17 +- .../test_utils/execution_block_generator.rs | 59 ++-- .../src/test_utils/handle_rpc.rs | 190 ++++++----- .../src/test_utils/mock_builder.rs | 2 +- beacon_node/genesis/src/common.rs | 15 +- .../lighthouse_network/src/rpc/codec.rs | 12 +- .../lighthouse_network/src/rpc/methods.rs | 18 +- .../lighthouse_network/src/rpc/protocol.rs | 6 +- .../lighthouse_network/tests/rpc_tests.rs | 30 +- .../gossip_methods.rs | 14 + common/eth2/src/types.rs | 4 +- consensus/fork_choice/tests/tests.rs | 4 +- .../src/per_block_processing.rs | 7 +- .../src/per_block_processing/tests.rs | 24 +- consensus/types/src/attestation.rs | 17 +- consensus/types/src/chain_spec.rs | 2 +- consensus/types/src/eth_spec.rs | 5 + consensus/types/src/light_client_bootstrap.rs | 48 ++- .../types/src/light_client_finality_update.rs | 12 +- consensus/types/src/light_client_update.rs | 36 +- consensus/types/src/test_utils/test_random.rs | 2 +- lcli/src/http_sync.rs | 4 +- slasher/src/test_utils.rs | 4 +- testing/ef_tests/src/cases/ssz_generic.rs | 13 +- 39 files changed, 758 insertions(+), 465 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1efb1fbc706..8cc058b6155 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8939,14 +8939,14 @@ dependencies = [ [[package]] name = "ssz_types" -version = "0.11.0" +version = "0.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75b55bedc9a18ed2860a46d6beb4f4082416ee1d60be0cc364cebdcdddc7afd4" +checksum = "704671195db617afa3d919da8f220f2535f20d0fa8dad96a1c27a38a5f8f6e9c" dependencies = [ "arbitrary", "ethereum_serde_utils", "ethereum_ssz", - "itertools 0.13.0", + "itertools 0.14.0", "serde", "serde_derive", "smallvec", diff --git a/Cargo.toml b/Cargo.toml index ae84d645bb9..721102bd06e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -239,7 +239,7 @@ slashing_protection = { path = "validator_client/slashing_protection" } slot_clock = { path = "common/slot_clock" } smallvec = { version = "1.11.2", features = ["arbitrary"] } snap = "1" -ssz_types = "0.11.0" +ssz_types = "0.12.2" state_processing = { path = "consensus/state_processing" } store = { path = "beacon_node/store" } strum = { version = "0.24", features = ["derive"] } diff --git a/beacon_node/beacon_chain/benches/benches.rs b/beacon_node/beacon_chain/benches/benches.rs index d090fc35f74..de3ced3be11 100644 --- a/beacon_node/beacon_chain/benches/benches.rs +++ b/beacon_node/beacon_chain/benches/benches.rs @@ -26,8 +26,11 @@ fn create_test_block_and_blobs( let blobs = (0..num_of_blobs) .map(|_| Blob::::default()) .collect::>() - .into(); - let proofs = vec![KzgProof::empty(); num_of_blobs * E::number_of_columns()].into(); + .try_into() + .unwrap(); + let proofs = vec![KzgProof::empty(); num_of_blobs * E::number_of_columns()] + .try_into() + .unwrap(); (signed_block, blobs, proofs) } diff --git a/beacon_node/beacon_chain/src/attestation_verification.rs b/beacon_node/beacon_chain/src/attestation_verification.rs index 470664d4429..9dc10a6be50 100644 --- a/beacon_node/beacon_chain/src/attestation_verification.rs +++ b/beacon_node/beacon_chain/src/attestation_verification.rs @@ -57,7 +57,7 @@ use state_processing::{ }; use std::borrow::Cow; use strum::AsRefStr; -use tracing::debug; +use tracing::{debug, error}; use tree_hash::TreeHash; use types::{ Attestation, AttestationData, AttestationRef, BeaconCommittee, @@ -267,6 +267,14 @@ pub enum Error { /// We were unable to process this attestation due to an internal error. It's unclear if the /// attestation is valid. BeaconChainError(Box), + /// A critical error occurred while converting SSZ types. + /// This can only occur when a VariableList was not able to be constructed from a single + /// attestation. + /// + /// ## Peer scoring + /// + /// The peer has sent an invalid message. + SszTypesError(ssz_types::Error), } impl From for Error { @@ -275,6 +283,12 @@ impl From for Error { } } +impl From for Error { + fn from(e: ssz_types::Error) -> Self { + Self::SszTypesError(e) + } +} + /// Used to avoid double-checking signatures. #[derive(Copy, Clone)] enum CheckAttestationSignature { @@ -442,7 +456,18 @@ fn process_slash_info( .spec .fork_name_at_slot::(attestation.data.slot); - let indexed_attestation = attestation.to_indexed(fork_name); + let indexed_attestation = match attestation.to_indexed(fork_name) { + Ok(indexed) => indexed, + Err(e) => { + error!( + attestation_root = ?attestation.data.tree_hash_root(), + error = ?e, + "Unable to construct VariableList from a single attestation. \ + This indicates a serious bug in SSZ handling" + ); + return Error::SszTypesError(e); + } + }; (indexed_attestation, true, err) } SignatureNotCheckedIndexed(indexed, err) => (indexed, true, err), @@ -932,7 +957,9 @@ impl<'a, T: BeaconChainTypes> IndexedUnaggregatedAttestation<'a, T> { .spec .fork_name_at_slot::(attestation.data.slot); - let indexed_attestation = attestation.to_indexed(fork_name); + let indexed_attestation = attestation + .to_indexed(fork_name) + .map_err(|e| SignatureNotCheckedSingle(attestation, Error::SszTypesError(e)))?; let validator_index = match Self::verify_middle_checks(attestation, chain) { Ok(t) => t, diff --git a/beacon_node/beacon_chain/src/beacon_chain.rs b/beacon_node/beacon_chain/src/beacon_chain.rs index 3e02baf9017..58532116e6c 100644 --- a/beacon_node/beacon_chain/src/beacon_chain.rs +++ b/beacon_node/beacon_chain/src/beacon_chain.rs @@ -5483,11 +5483,21 @@ impl BeaconChain { randao_reveal, eth1_data, graffiti, - proposer_slashings: proposer_slashings.into(), - attester_slashings: attester_slashings_base.into(), - attestations: attestations_base.into(), - deposits: deposits.into(), - voluntary_exits: voluntary_exits.into(), + proposer_slashings: proposer_slashings + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + attester_slashings: attester_slashings_base + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + attestations: attestations_base + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + deposits: deposits + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + voluntary_exits: voluntary_exits + .try_into() + .map_err(BlockProductionError::SszTypesError)?, _phantom: PhantomData, }, }), @@ -5504,11 +5514,21 @@ impl BeaconChain { randao_reveal, eth1_data, graffiti, - proposer_slashings: proposer_slashings.into(), - attester_slashings: attester_slashings_base.into(), - attestations: attestations_base.into(), - deposits: deposits.into(), - voluntary_exits: voluntary_exits.into(), + proposer_slashings: proposer_slashings + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + attester_slashings: attester_slashings_base + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + attestations: attestations_base + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + deposits: deposits + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + voluntary_exits: voluntary_exits + .try_into() + .map_err(BlockProductionError::SszTypesError)?, sync_aggregate: sync_aggregate .ok_or(BlockProductionError::MissingSyncAggregate)?, _phantom: PhantomData, @@ -5531,11 +5551,21 @@ impl BeaconChain { randao_reveal, eth1_data, graffiti, - proposer_slashings: proposer_slashings.into(), - attester_slashings: attester_slashings_base.into(), - attestations: attestations_base.into(), - deposits: deposits.into(), - voluntary_exits: voluntary_exits.into(), + proposer_slashings: proposer_slashings + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + attester_slashings: attester_slashings_base + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + attestations: attestations_base + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + deposits: deposits + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + voluntary_exits: voluntary_exits + .try_into() + .map_err(BlockProductionError::SszTypesError)?, sync_aggregate: sync_aggregate .ok_or(BlockProductionError::MissingSyncAggregate)?, execution_payload: block_proposal_contents @@ -5563,18 +5593,30 @@ impl BeaconChain { randao_reveal, eth1_data, graffiti, - proposer_slashings: proposer_slashings.into(), - attester_slashings: attester_slashings_base.into(), - attestations: attestations_base.into(), - deposits: deposits.into(), - voluntary_exits: voluntary_exits.into(), + proposer_slashings: proposer_slashings + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + attester_slashings: attester_slashings_base + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + attestations: attestations_base + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + deposits: deposits + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + voluntary_exits: voluntary_exits + .try_into() + .map_err(BlockProductionError::SszTypesError)?, sync_aggregate: sync_aggregate .ok_or(BlockProductionError::MissingSyncAggregate)?, execution_payload: block_proposal_contents .to_payload() .try_into() .map_err(|_| BlockProductionError::InvalidPayloadFork)?, - bls_to_execution_changes: bls_to_execution_changes.into(), + bls_to_execution_changes: bls_to_execution_changes + .try_into() + .map_err(BlockProductionError::SszTypesError)?, }, }), None, @@ -5602,17 +5644,29 @@ impl BeaconChain { randao_reveal, eth1_data, graffiti, - proposer_slashings: proposer_slashings.into(), - attester_slashings: attester_slashings_base.into(), - attestations: attestations_base.into(), - deposits: deposits.into(), - voluntary_exits: voluntary_exits.into(), + proposer_slashings: proposer_slashings + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + attester_slashings: attester_slashings_base + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + attestations: attestations_base + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + deposits: deposits + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + voluntary_exits: voluntary_exits + .try_into() + .map_err(BlockProductionError::SszTypesError)?, sync_aggregate: sync_aggregate .ok_or(BlockProductionError::MissingSyncAggregate)?, execution_payload: payload .try_into() .map_err(|_| BlockProductionError::InvalidPayloadFork)?, - bls_to_execution_changes: bls_to_execution_changes.into(), + bls_to_execution_changes: bls_to_execution_changes + .try_into() + .map_err(BlockProductionError::SszTypesError)?, blob_kzg_commitments: kzg_commitments.ok_or( BlockProductionError::MissingKzgCommitment( "Kzg commitments missing from block contents".to_string(), @@ -5645,17 +5699,29 @@ impl BeaconChain { randao_reveal, eth1_data, graffiti, - proposer_slashings: proposer_slashings.into(), - attester_slashings: attester_slashings_electra.into(), - attestations: attestations_electra.into(), - deposits: deposits.into(), - voluntary_exits: voluntary_exits.into(), + proposer_slashings: proposer_slashings + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + attester_slashings: attester_slashings_electra + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + attestations: attestations_electra + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + deposits: deposits + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + voluntary_exits: voluntary_exits + .try_into() + .map_err(BlockProductionError::SszTypesError)?, sync_aggregate: sync_aggregate .ok_or(BlockProductionError::MissingSyncAggregate)?, execution_payload: payload .try_into() .map_err(|_| BlockProductionError::InvalidPayloadFork)?, - bls_to_execution_changes: bls_to_execution_changes.into(), + bls_to_execution_changes: bls_to_execution_changes + .try_into() + .map_err(BlockProductionError::SszTypesError)?, blob_kzg_commitments: kzg_commitments .ok_or(BlockProductionError::InvalidPayloadFork)?, execution_requests: maybe_requests @@ -5687,17 +5753,29 @@ impl BeaconChain { randao_reveal, eth1_data, graffiti, - proposer_slashings: proposer_slashings.into(), - attester_slashings: attester_slashings_electra.into(), - attestations: attestations_electra.into(), - deposits: deposits.into(), - voluntary_exits: voluntary_exits.into(), + proposer_slashings: proposer_slashings + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + attester_slashings: attester_slashings_electra + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + attestations: attestations_electra + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + deposits: deposits + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + voluntary_exits: voluntary_exits + .try_into() + .map_err(BlockProductionError::SszTypesError)?, sync_aggregate: sync_aggregate .ok_or(BlockProductionError::MissingSyncAggregate)?, execution_payload: payload .try_into() .map_err(|_| BlockProductionError::InvalidPayloadFork)?, - bls_to_execution_changes: bls_to_execution_changes.into(), + bls_to_execution_changes: bls_to_execution_changes + .try_into() + .map_err(BlockProductionError::SszTypesError)?, blob_kzg_commitments: kzg_commitments .ok_or(BlockProductionError::InvalidPayloadFork)?, execution_requests: maybe_requests @@ -5729,17 +5807,29 @@ impl BeaconChain { randao_reveal, eth1_data, graffiti, - proposer_slashings: proposer_slashings.into(), - attester_slashings: attester_slashings_electra.into(), - attestations: attestations_electra.into(), - deposits: deposits.into(), - voluntary_exits: voluntary_exits.into(), + proposer_slashings: proposer_slashings + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + attester_slashings: attester_slashings_electra + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + attestations: attestations_electra + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + deposits: deposits + .try_into() + .map_err(BlockProductionError::SszTypesError)?, + voluntary_exits: voluntary_exits + .try_into() + .map_err(BlockProductionError::SszTypesError)?, sync_aggregate: sync_aggregate .ok_or(BlockProductionError::MissingSyncAggregate)?, execution_payload: payload .try_into() .map_err(|_| BlockProductionError::InvalidPayloadFork)?, - bls_to_execution_changes: bls_to_execution_changes.into(), + bls_to_execution_changes: bls_to_execution_changes + .try_into() + .map_err(BlockProductionError::SszTypesError)?, blob_kzg_commitments: kzg_commitments .ok_or(BlockProductionError::InvalidPayloadFork)?, execution_requests: maybe_requests diff --git a/beacon_node/beacon_chain/src/data_column_verification.rs b/beacon_node/beacon_chain/src/data_column_verification.rs index 07f85b045ab..7a8066351a3 100644 --- a/beacon_node/beacon_chain/src/data_column_verification.rs +++ b/beacon_node/beacon_chain/src/data_column_verification.rs @@ -868,16 +868,16 @@ mod test { let state = harness.get_current_state(); let ((block, _blobs_opt), _state) = harness .make_block_with_modifier(state, slot, |block| { - *block.body_mut().blob_kzg_commitments_mut().unwrap() = vec![].into(); + *block.body_mut().blob_kzg_commitments_mut().unwrap() = vec![].try_into().unwrap(); }) .await; let index = 0; let column_sidecar = DataColumnSidecar:: { index, - column: vec![].into(), - kzg_commitments: vec![].into(), - kzg_proofs: vec![].into(), + column: vec![].try_into().unwrap(), + kzg_commitments: vec![].try_into().unwrap(), + kzg_proofs: vec![].try_into().unwrap(), signed_block_header: block.signed_block_header(), kzg_commitments_inclusion_proof: block .message() @@ -914,7 +914,9 @@ mod test { let ((block, _blobs_opt), _state) = harness .make_block_with_modifier(state, slot, |block| { *block.body_mut().blob_kzg_commitments_mut().unwrap() = - vec![preloaded_commitments_single[0]; blob_count].into(); + vec![preloaded_commitments_single[0]; blob_count] + .try_into() + .unwrap(); }) .await; diff --git a/beacon_node/beacon_chain/src/errors.rs b/beacon_node/beacon_chain/src/errors.rs index d4eba2b0ea2..9dc6e897fb1 100644 --- a/beacon_node/beacon_chain/src/errors.rs +++ b/beacon_node/beacon_chain/src/errors.rs @@ -318,6 +318,7 @@ pub enum BlockProductionError { KzgError(kzg::Error), FailedToBuildBlobSidecars(String), MissingExecutionRequests, + SszTypesError(ssz_types::Error), } easy_from_to!(BlockProcessingError, BlockProductionError); diff --git a/beacon_node/beacon_chain/src/fetch_blobs/tests.rs b/beacon_node/beacon_chain/src/fetch_blobs/tests.rs index e4855dd5598..cbe2f78fbda 100644 --- a/beacon_node/beacon_chain/src/fetch_blobs/tests.rs +++ b/beacon_node/beacon_chain/src/fetch_blobs/tests.rs @@ -576,7 +576,7 @@ fn create_test_block_and_blobs( .map(|(blob, proofs)| { BlobAndProof::V2(BlobAndProofV2 { blob, - proofs: proofs.to_vec().into(), + proofs: proofs.to_vec().try_into().unwrap(), }) }) .collect() diff --git a/beacon_node/beacon_chain/src/kzg_utils.rs b/beacon_node/beacon_chain/src/kzg_utils.rs index 382775ab50f..18e14587a52 100644 --- a/beacon_node/beacon_chain/src/kzg_utils.rs +++ b/beacon_node/beacon_chain/src/kzg_utils.rs @@ -258,7 +258,8 @@ pub(crate) fn build_data_column_sidecars( .get(col) .ok_or(format!("Missing blob cell at index {col}"))?; let cell: Vec = cell.to_vec(); - let cell = Cell::::from(cell); + let cell = + Cell::::try_from(cell).map_err(|e| format!("BytesPerCell exceeded: {e:?}"))?; let proof = blob_cell_proofs .get(col) @@ -276,23 +277,27 @@ pub(crate) fn build_data_column_sidecars( } } - let sidecars: Vec>> = columns + let sidecars: Result>>, String> = columns .into_iter() .zip(column_kzg_proofs) .enumerate() - .map(|(index, (col, proofs))| { - Arc::new(DataColumnSidecar { - index: index as u64, - column: DataColumn::::from(col), - kzg_commitments: kzg_commitments.clone(), - kzg_proofs: VariableList::from(proofs), - signed_block_header: signed_block_header.clone(), - kzg_commitments_inclusion_proof: kzg_commitments_inclusion_proof.clone(), - }) - }) + .map( + |(index, (col, proofs))| -> Result>, String> { + Ok(Arc::new(DataColumnSidecar { + index: index as u64, + column: DataColumn::::try_from(col) + .map_err(|e| format!("MaxBlobCommitmentsPerBlock exceeded: {e:?}"))?, + kzg_commitments: kzg_commitments.clone(), + kzg_proofs: VariableList::try_from(proofs) + .map_err(|e| format!("MaxBlobCommitmentsPerBlock exceeded: {e:?}"))?, + signed_block_header: signed_block_header.clone(), + kzg_commitments_inclusion_proof: kzg_commitments_inclusion_proof.clone(), + })) + }, + ) .collect(); - Ok(sidecars) + sidecars } /// Reconstruct blobs from a subset of data column sidecars (requires at least 50%). diff --git a/beacon_node/beacon_chain/src/test_utils.rs b/beacon_node/beacon_chain/src/test_utils.rs index 38797d0264d..c1d1d9de67d 100644 --- a/beacon_node/beacon_chain/src/test_utils.rs +++ b/beacon_node/beacon_chain/src/test_utils.rs @@ -2324,7 +2324,7 @@ where .collect::>(); // Building a VarList from leaves - let deposit_data_list = VariableList::<_, U4294967296>::from(leaves.clone()); + let deposit_data_list = VariableList::<_, U4294967296>::try_from(leaves.clone()).unwrap(); // Setting the deposit_root to be the tree_hash_root of the VarList state.eth1_data_mut().deposit_root = deposit_data_list.tree_hash_root(); @@ -2348,7 +2348,7 @@ where let deposits = datas .into_par_iter() .zip(proofs.into_par_iter()) - .map(|(data, proof)| (data, proof.into())) + .map(|(data, proof)| (data, proof.try_into().unwrap())) .map(|(data, proof)| Deposit { proof, data }) .collect::>(); diff --git a/beacon_node/beacon_chain/tests/block_verification.rs b/beacon_node/beacon_chain/tests/block_verification.rs index 7dfef50ea11..3d1fa8f4af4 100644 --- a/beacon_node/beacon_chain/tests/block_verification.rs +++ b/beacon_node/beacon_chain/tests/block_verification.rs @@ -707,7 +707,7 @@ async fn invalid_signature_attester_slashing() { let attester_slashing = if fork_name.electra_enabled() { let indexed_attestation = IndexedAttestationElectra { - attesting_indices: vec![0].into(), + attesting_indices: vec![0].try_into().unwrap(), data: AttestationData { slot: Slot::new(0), index: 0, @@ -731,7 +731,7 @@ async fn invalid_signature_attester_slashing() { AttesterSlashing::Electra(attester_slashing) } else { let indexed_attestation = IndexedAttestationBase { - attesting_indices: vec![0].into(), + attesting_indices: vec![0].try_into().unwrap(), data: AttestationData { slot: Slot::new(0), index: 0, @@ -898,7 +898,9 @@ async fn invalid_signature_deposit() { let harness = get_invalid_sigs_harness(&chain_segment).await; let mut snapshots = chain_segment.clone(); let deposit = Deposit { - proof: vec![Hash256::zero(); DEPOSIT_TREE_DEPTH + 1].into(), + proof: vec![Hash256::zero(); DEPOSIT_TREE_DEPTH + 1] + .try_into() + .unwrap(), data: DepositData { pubkey: Keypair::random().pk.into(), withdrawal_credentials: Hash256::zero(), @@ -1270,7 +1272,9 @@ async fn block_gossip_verification() { as usize; if let Ok(kzg_commitments) = block.body_mut().blob_kzg_commitments_mut() { - *kzg_commitments = vec![KzgCommitment::empty_for_testing(); kzg_commitments_len + 1].into(); + *kzg_commitments = vec![KzgCommitment::empty_for_testing(); kzg_commitments_len + 1] + .try_into() + .unwrap(); assert!( matches!( unwrap_err(harness.chain.verify_block_for_gossip(Arc::new(SignedBeaconBlock::from_block(block, signature))).await), diff --git a/beacon_node/beacon_chain/tests/store_tests.rs b/beacon_node/beacon_chain/tests/store_tests.rs index 53e841692e6..0a261e36cef 100644 --- a/beacon_node/beacon_chain/tests/store_tests.rs +++ b/beacon_node/beacon_chain/tests/store_tests.rs @@ -1415,7 +1415,7 @@ async fn proposer_shuffling_changing_with_lookahead() { let execution_requests = ExecutionRequests:: { deposits: VariableList::new(vec![deposit_request]).unwrap(), - withdrawals: vec![].into(), + withdrawals: vec![].try_into().unwrap(), consolidations: VariableList::new(vec![consolidation_request]).unwrap(), }; diff --git a/beacon_node/execution_layer/src/engine_api/http.rs b/beacon_node/execution_layer/src/engine_api/http.rs index bc927e19b41..74fb078510e 100644 --- a/beacon_node/execution_layer/src/engine_api/http.rs +++ b/beacon_node/execution_layer/src/engine_api/http.rs @@ -768,7 +768,7 @@ impl HttpJsonRpc { &self, execution_payload: ExecutionPayload, ) -> Result { - let params = json!([JsonExecutionPayload::from(execution_payload)]); + let params = json!([JsonExecutionPayload::try_from(execution_payload)?]); let response: JsonPayloadStatusV1 = self .rpc_request( @@ -785,7 +785,7 @@ impl HttpJsonRpc { &self, execution_payload: ExecutionPayload, ) -> Result { - let params = json!([JsonExecutionPayload::from(execution_payload)]); + let params = json!([JsonExecutionPayload::try_from(execution_payload)?]); let response: JsonPayloadStatusV1 = self .rpc_request( @@ -803,7 +803,12 @@ impl HttpJsonRpc { new_payload_request_deneb: NewPayloadRequestDeneb<'_, E>, ) -> Result { let params = json!([ - JsonExecutionPayload::Deneb(new_payload_request_deneb.execution_payload.clone().into()), + JsonExecutionPayload::Deneb( + new_payload_request_deneb + .execution_payload + .clone() + .try_into()? + ), new_payload_request_deneb.versioned_hashes, new_payload_request_deneb.parent_beacon_block_root, ]); @@ -825,7 +830,10 @@ impl HttpJsonRpc { ) -> Result { let params = json!([ JsonExecutionPayload::Electra( - new_payload_request_electra.execution_payload.clone().into() + new_payload_request_electra + .execution_payload + .clone() + .try_into()? ), new_payload_request_electra.versioned_hashes, new_payload_request_electra.parent_beacon_block_root, @@ -850,7 +858,12 @@ impl HttpJsonRpc { new_payload_request_fulu: NewPayloadRequestFulu<'_, E>, ) -> Result { let params = json!([ - JsonExecutionPayload::Fulu(new_payload_request_fulu.execution_payload.clone().into()), + JsonExecutionPayload::Fulu( + new_payload_request_fulu + .execution_payload + .clone() + .try_into()? + ), new_payload_request_fulu.versioned_hashes, new_payload_request_fulu.parent_beacon_block_root, new_payload_request_fulu @@ -874,7 +887,12 @@ impl HttpJsonRpc { new_payload_request_gloas: NewPayloadRequestGloas<'_, E>, ) -> Result { let params = json!([ - JsonExecutionPayload::Gloas(new_payload_request_gloas.execution_payload.clone().into()), + JsonExecutionPayload::Gloas( + new_payload_request_gloas + .execution_payload + .clone() + .try_into()? + ), new_payload_request_gloas.versioned_hashes, new_payload_request_gloas.parent_beacon_block_root, new_payload_request_gloas @@ -1125,10 +1143,14 @@ impl HttpJsonRpc { ) .await?; - Ok(response + response .into_iter() - .map(|opt_json| opt_json.map(From::from)) - .collect()) + .map(|opt_json| { + opt_json + .map(|json| json.try_into().map_err(Error::from)) + .transpose() + }) + .collect::, _>>() } pub async fn get_payload_bodies_by_range_v1( @@ -1149,10 +1171,14 @@ impl HttpJsonRpc { ) .await?; - Ok(response + response .into_iter() - .map(|opt_json| opt_json.map(From::from)) - .collect()) + .map(|opt_json| { + opt_json + .map(|json| json.try_into().map_err(Error::from)) + .transpose() + }) + .collect::, _>>() } pub async fn exchange_capabilities(&self) -> Result { @@ -1814,16 +1840,16 @@ mod test { fee_recipient: Address::repeat_byte(1), state_root: Hash256::repeat_byte(1), receipts_root: Hash256::repeat_byte(0), - logs_bloom: vec![1; 256].into(), + logs_bloom: vec![1; 256].try_into().unwrap(), prev_randao: Hash256::repeat_byte(1), block_number: 0, gas_limit: 1, gas_used: 2, timestamp: 42, - extra_data: vec![].into(), + extra_data: vec![].try_into().unwrap(), base_fee_per_gas: Uint256::from(1), block_hash: ExecutionBlockHash::repeat_byte(1), - transactions: vec![].into(), + transactions: vec![].try_into().unwrap(), }, )) .await; @@ -1861,16 +1887,16 @@ mod test { fee_recipient: Address::repeat_byte(1), state_root: Hash256::repeat_byte(1), receipts_root: Hash256::repeat_byte(0), - logs_bloom: vec![1; 256].into(), + logs_bloom: vec![1; 256].try_into().unwrap(), prev_randao: Hash256::repeat_byte(1), block_number: 0, gas_limit: 1, gas_used: 2, timestamp: 42, - extra_data: vec![].into(), + extra_data: vec![].try_into().unwrap(), base_fee_per_gas: Uint256::from(1), block_hash: ExecutionBlockHash::repeat_byte(1), - transactions: vec![].into(), + transactions: vec![].try_into().unwrap(), }, )) .await @@ -2071,16 +2097,16 @@ mod test { fee_recipient: Address::from_str("0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b").unwrap(), state_root: Hash256::from_str("0xca3149fa9e37db08d1cd49c9061db1002ef1cd58db2210f2115c8c989b2bdf45").unwrap(), receipts_root: Hash256::from_str("0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421").unwrap(), - logs_bloom: vec![0; 256].into(), + logs_bloom: vec![0; 256].try_into().unwrap(), prev_randao: Hash256::zero(), block_number: 1, gas_limit: u64::from_str_radix("1c95111",16).unwrap(), gas_used: 0, timestamp: 5, - extra_data: vec![].into(), + extra_data: vec![].try_into().unwrap(), base_fee_per_gas: Uint256::from(7), block_hash: ExecutionBlockHash::from_str("0x6359b8381a370e2f54072a5784ddd78b6ed024991558c511d4452eb4f6ac898c").unwrap(), - transactions: vec![].into(), + transactions: vec![].try_into().unwrap(), }); assert_eq!(payload, expected); @@ -2096,16 +2122,16 @@ mod test { fee_recipient: Address::from_str("0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b").unwrap(), state_root: Hash256::from_str("0xca3149fa9e37db08d1cd49c9061db1002ef1cd58db2210f2115c8c989b2bdf45").unwrap(), receipts_root: Hash256::from_str("0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421").unwrap(), - logs_bloom: vec![0; 256].into(), + logs_bloom: vec![0; 256].try_into().unwrap(), prev_randao: Hash256::zero(), block_number: 1, gas_limit: u64::from_str_radix("1c9c380",16).unwrap(), gas_used: 0, timestamp: 5, - extra_data: vec![].into(), + extra_data: vec![].try_into().unwrap(), base_fee_per_gas: Uint256::from(7), block_hash: ExecutionBlockHash::from_str("0x3559e851470f6e7bbed1db474980683e8c315bfce99b2a6ef47c057c04de7858").unwrap(), - transactions: vec![].into(), + transactions: vec![].try_into().unwrap(), })) .await; }, diff --git a/beacon_node/execution_layer/src/engine_api/json_structures.rs b/beacon_node/execution_layer/src/engine_api/json_structures.rs index 33decd4ec86..cc46070325d 100644 --- a/beacon_node/execution_layer/src/engine_api/json_structures.rs +++ b/beacon_node/execution_layer/src/engine_api/json_structures.rs @@ -1,7 +1,8 @@ use super::*; use alloy_rlp::RlpEncodable; use serde::{Deserialize, Serialize}; -use ssz::Decode; +use ssz::{Decode, TryFromIter}; +use ssz_types::{FixedVector, VariableList, typenum::Unsigned}; use strum::EnumString; use superstruct::superstruct; use types::beacon_block_body::KzgCommitments; @@ -9,7 +10,7 @@ use types::blob_sidecar::BlobsList; use types::execution_requests::{ ConsolidationRequests, DepositRequests, RequestType, WithdrawalRequests, }; -use types::{Blob, FixedVector, KzgProof, Unsigned}; +use types::{Blob, KzgProof}; #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] @@ -130,9 +131,11 @@ impl From> for JsonExecutionPayloadBell } } } -impl From> for JsonExecutionPayloadCapella { - fn from(payload: ExecutionPayloadCapella) -> Self { - JsonExecutionPayloadCapella { +impl TryFrom> for JsonExecutionPayloadCapella { + type Error = ssz_types::Error; + + fn try_from(payload: ExecutionPayloadCapella) -> Result { + Ok(JsonExecutionPayloadCapella { parent_hash: payload.parent_hash, fee_recipient: payload.fee_recipient, state_root: payload.state_root, @@ -147,18 +150,15 @@ impl From> for JsonExecutionPayloadCapell base_fee_per_gas: payload.base_fee_per_gas, block_hash: payload.block_hash, transactions: payload.transactions, - withdrawals: payload - .withdrawals - .into_iter() - .map(Into::into) - .collect::>() - .into(), - } + withdrawals: withdrawals_to_json(payload.withdrawals)?, + }) } } -impl From> for JsonExecutionPayloadDeneb { - fn from(payload: ExecutionPayloadDeneb) -> Self { - JsonExecutionPayloadDeneb { +impl TryFrom> for JsonExecutionPayloadDeneb { + type Error = ssz_types::Error; + + fn try_from(payload: ExecutionPayloadDeneb) -> Result { + Ok(JsonExecutionPayloadDeneb { parent_hash: payload.parent_hash, fee_recipient: payload.fee_recipient, state_root: payload.state_root, @@ -173,21 +173,18 @@ impl From> for JsonExecutionPayloadDeneb base_fee_per_gas: payload.base_fee_per_gas, block_hash: payload.block_hash, transactions: payload.transactions, - withdrawals: payload - .withdrawals - .into_iter() - .map(Into::into) - .collect::>() - .into(), + withdrawals: withdrawals_to_json(payload.withdrawals)?, blob_gas_used: payload.blob_gas_used, excess_blob_gas: payload.excess_blob_gas, - } + }) } } -impl From> for JsonExecutionPayloadElectra { - fn from(payload: ExecutionPayloadElectra) -> Self { - JsonExecutionPayloadElectra { +impl TryFrom> for JsonExecutionPayloadElectra { + type Error = ssz_types::Error; + + fn try_from(payload: ExecutionPayloadElectra) -> Result { + Ok(JsonExecutionPayloadElectra { parent_hash: payload.parent_hash, fee_recipient: payload.fee_recipient, state_root: payload.state_root, @@ -202,21 +199,18 @@ impl From> for JsonExecutionPayloadElectr base_fee_per_gas: payload.base_fee_per_gas, block_hash: payload.block_hash, transactions: payload.transactions, - withdrawals: payload - .withdrawals - .into_iter() - .map(Into::into) - .collect::>() - .into(), + withdrawals: withdrawals_to_json(payload.withdrawals)?, blob_gas_used: payload.blob_gas_used, excess_blob_gas: payload.excess_blob_gas, - } + }) } } -impl From> for JsonExecutionPayloadFulu { - fn from(payload: ExecutionPayloadFulu) -> Self { - JsonExecutionPayloadFulu { +impl TryFrom> for JsonExecutionPayloadFulu { + type Error = ssz_types::Error; + + fn try_from(payload: ExecutionPayloadFulu) -> Result { + Ok(JsonExecutionPayloadFulu { parent_hash: payload.parent_hash, fee_recipient: payload.fee_recipient, state_root: payload.state_root, @@ -231,21 +225,18 @@ impl From> for JsonExecutionPayloadFulu { base_fee_per_gas: payload.base_fee_per_gas, block_hash: payload.block_hash, transactions: payload.transactions, - withdrawals: payload - .withdrawals - .into_iter() - .map(Into::into) - .collect::>() - .into(), + withdrawals: withdrawals_to_json(payload.withdrawals)?, blob_gas_used: payload.blob_gas_used, excess_blob_gas: payload.excess_blob_gas, - } + }) } } -impl From> for JsonExecutionPayloadGloas { - fn from(payload: ExecutionPayloadGloas) -> Self { - JsonExecutionPayloadGloas { +impl TryFrom> for JsonExecutionPayloadGloas { + type Error = ssz_types::Error; + + fn try_from(payload: ExecutionPayloadGloas) -> Result { + Ok(JsonExecutionPayloadGloas { parent_hash: payload.parent_hash, fee_recipient: payload.fee_recipient, state_root: payload.state_root, @@ -260,27 +251,34 @@ impl From> for JsonExecutionPayloadGloas base_fee_per_gas: payload.base_fee_per_gas, block_hash: payload.block_hash, transactions: payload.transactions, - withdrawals: payload - .withdrawals - .into_iter() - .map(Into::into) - .collect::>() - .into(), + withdrawals: withdrawals_to_json(payload.withdrawals)?, blob_gas_used: payload.blob_gas_used, excess_blob_gas: payload.excess_blob_gas, - } + }) } } -impl From> for JsonExecutionPayload { - fn from(execution_payload: ExecutionPayload) -> Self { +impl TryFrom> for JsonExecutionPayload { + type Error = ssz_types::Error; + + fn try_from(execution_payload: ExecutionPayload) -> Result { match execution_payload { - ExecutionPayload::Bellatrix(payload) => JsonExecutionPayload::Bellatrix(payload.into()), - ExecutionPayload::Capella(payload) => JsonExecutionPayload::Capella(payload.into()), - ExecutionPayload::Deneb(payload) => JsonExecutionPayload::Deneb(payload.into()), - ExecutionPayload::Electra(payload) => JsonExecutionPayload::Electra(payload.into()), - ExecutionPayload::Fulu(payload) => JsonExecutionPayload::Fulu(payload.into()), - ExecutionPayload::Gloas(payload) => JsonExecutionPayload::Gloas(payload.into()), + ExecutionPayload::Bellatrix(payload) => { + Ok(JsonExecutionPayload::Bellatrix(payload.into())) + } + ExecutionPayload::Capella(payload) => { + Ok(JsonExecutionPayload::Capella(payload.try_into()?)) + } + ExecutionPayload::Deneb(payload) => { + Ok(JsonExecutionPayload::Deneb(payload.try_into()?)) + } + ExecutionPayload::Electra(payload) => { + Ok(JsonExecutionPayload::Electra(payload.try_into()?)) + } + ExecutionPayload::Fulu(payload) => Ok(JsonExecutionPayload::Fulu(payload.try_into()?)), + ExecutionPayload::Gloas(payload) => { + Ok(JsonExecutionPayload::Gloas(payload.try_into()?)) + } } } } @@ -305,9 +303,11 @@ impl From> for ExecutionPayloadBell } } } -impl From> for ExecutionPayloadCapella { - fn from(payload: JsonExecutionPayloadCapella) -> Self { - ExecutionPayloadCapella { +impl TryFrom> for ExecutionPayloadCapella { + type Error = ssz_types::Error; + + fn try_from(payload: JsonExecutionPayloadCapella) -> Result { + Ok(ExecutionPayloadCapella { parent_hash: payload.parent_hash, fee_recipient: payload.fee_recipient, state_root: payload.state_root, @@ -322,19 +322,16 @@ impl From> for ExecutionPayloadCapell base_fee_per_gas: payload.base_fee_per_gas, block_hash: payload.block_hash, transactions: payload.transactions, - withdrawals: payload - .withdrawals - .into_iter() - .map(Into::into) - .collect::>() - .into(), - } + withdrawals: withdrawals_from_json(payload.withdrawals)?, + }) } } -impl From> for ExecutionPayloadDeneb { - fn from(payload: JsonExecutionPayloadDeneb) -> Self { - ExecutionPayloadDeneb { +impl TryFrom> for ExecutionPayloadDeneb { + type Error = ssz_types::Error; + + fn try_from(payload: JsonExecutionPayloadDeneb) -> Result { + Ok(ExecutionPayloadDeneb { parent_hash: payload.parent_hash, fee_recipient: payload.fee_recipient, state_root: payload.state_root, @@ -349,21 +346,18 @@ impl From> for ExecutionPayloadDeneb base_fee_per_gas: payload.base_fee_per_gas, block_hash: payload.block_hash, transactions: payload.transactions, - withdrawals: payload - .withdrawals - .into_iter() - .map(Into::into) - .collect::>() - .into(), + withdrawals: withdrawals_from_json(payload.withdrawals)?, blob_gas_used: payload.blob_gas_used, excess_blob_gas: payload.excess_blob_gas, - } + }) } } -impl From> for ExecutionPayloadElectra { - fn from(payload: JsonExecutionPayloadElectra) -> Self { - ExecutionPayloadElectra { +impl TryFrom> for ExecutionPayloadElectra { + type Error = ssz_types::Error; + + fn try_from(payload: JsonExecutionPayloadElectra) -> Result { + Ok(ExecutionPayloadElectra { parent_hash: payload.parent_hash, fee_recipient: payload.fee_recipient, state_root: payload.state_root, @@ -378,21 +372,18 @@ impl From> for ExecutionPayloadElectr base_fee_per_gas: payload.base_fee_per_gas, block_hash: payload.block_hash, transactions: payload.transactions, - withdrawals: payload - .withdrawals - .into_iter() - .map(Into::into) - .collect::>() - .into(), + withdrawals: withdrawals_from_json(payload.withdrawals)?, blob_gas_used: payload.blob_gas_used, excess_blob_gas: payload.excess_blob_gas, - } + }) } } -impl From> for ExecutionPayloadFulu { - fn from(payload: JsonExecutionPayloadFulu) -> Self { - ExecutionPayloadFulu { +impl TryFrom> for ExecutionPayloadFulu { + type Error = ssz_types::Error; + + fn try_from(payload: JsonExecutionPayloadFulu) -> Result { + Ok(ExecutionPayloadFulu { parent_hash: payload.parent_hash, fee_recipient: payload.fee_recipient, state_root: payload.state_root, @@ -407,21 +398,18 @@ impl From> for ExecutionPayloadFulu { base_fee_per_gas: payload.base_fee_per_gas, block_hash: payload.block_hash, transactions: payload.transactions, - withdrawals: payload - .withdrawals - .into_iter() - .map(Into::into) - .collect::>() - .into(), + withdrawals: withdrawals_from_json(payload.withdrawals)?, blob_gas_used: payload.blob_gas_used, excess_blob_gas: payload.excess_blob_gas, - } + }) } } -impl From> for ExecutionPayloadGloas { - fn from(payload: JsonExecutionPayloadGloas) -> Self { - ExecutionPayloadGloas { +impl TryFrom> for ExecutionPayloadGloas { + type Error = ssz_types::Error; + + fn try_from(payload: JsonExecutionPayloadGloas) -> Result { + Ok(ExecutionPayloadGloas { parent_hash: payload.parent_hash, fee_recipient: payload.fee_recipient, state_root: payload.state_root, @@ -436,27 +424,34 @@ impl From> for ExecutionPayloadGloas base_fee_per_gas: payload.base_fee_per_gas, block_hash: payload.block_hash, transactions: payload.transactions, - withdrawals: payload - .withdrawals - .into_iter() - .map(Into::into) - .collect::>() - .into(), + withdrawals: withdrawals_from_json(payload.withdrawals)?, blob_gas_used: payload.blob_gas_used, excess_blob_gas: payload.excess_blob_gas, - } + }) } } -impl From> for ExecutionPayload { - fn from(json_execution_payload: JsonExecutionPayload) -> Self { +impl TryFrom> for ExecutionPayload { + type Error = ssz_types::Error; + + fn try_from(json_execution_payload: JsonExecutionPayload) -> Result { match json_execution_payload { - JsonExecutionPayload::Bellatrix(payload) => ExecutionPayload::Bellatrix(payload.into()), - JsonExecutionPayload::Capella(payload) => ExecutionPayload::Capella(payload.into()), - JsonExecutionPayload::Deneb(payload) => ExecutionPayload::Deneb(payload.into()), - JsonExecutionPayload::Electra(payload) => ExecutionPayload::Electra(payload.into()), - JsonExecutionPayload::Fulu(payload) => ExecutionPayload::Fulu(payload.into()), - JsonExecutionPayload::Gloas(payload) => ExecutionPayload::Gloas(payload.into()), + JsonExecutionPayload::Bellatrix(payload) => { + Ok(ExecutionPayload::Bellatrix(payload.into())) + } + JsonExecutionPayload::Capella(payload) => { + Ok(ExecutionPayload::Capella(payload.try_into()?)) + } + JsonExecutionPayload::Deneb(payload) => { + Ok(ExecutionPayload::Deneb(payload.try_into()?)) + } + JsonExecutionPayload::Electra(payload) => { + Ok(ExecutionPayload::Electra(payload.try_into()?)) + } + JsonExecutionPayload::Fulu(payload) => Ok(ExecutionPayload::Fulu(payload.try_into()?)), + JsonExecutionPayload::Gloas(payload) => { + Ok(ExecutionPayload::Gloas(payload.try_into()?)) + } } } } @@ -590,13 +585,17 @@ impl TryFrom> for GetPayloadResponse { } JsonGetPayloadResponse::Capella(response) => { Ok(GetPayloadResponse::Capella(GetPayloadResponseCapella { - execution_payload: response.execution_payload.into(), + execution_payload: response.execution_payload.try_into().map_err(|e| { + format!("Failed to convert json to execution payload: {:?}", e) + })?, block_value: response.block_value, })) } JsonGetPayloadResponse::Deneb(response) => { Ok(GetPayloadResponse::Deneb(GetPayloadResponseDeneb { - execution_payload: response.execution_payload.into(), + execution_payload: response.execution_payload.try_into().map_err(|e| { + format!("Failed to convert json to execution payload: {:?}", e) + })?, block_value: response.block_value, blobs_bundle: response.blobs_bundle.into(), should_override_builder: response.should_override_builder, @@ -604,34 +603,40 @@ impl TryFrom> for GetPayloadResponse { } JsonGetPayloadResponse::Electra(response) => { Ok(GetPayloadResponse::Electra(GetPayloadResponseElectra { - execution_payload: response.execution_payload.into(), + execution_payload: response.execution_payload.try_into().map_err(|e| { + format!("Failed to convert json to execution payload: {:?}", e) + })?, block_value: response.block_value, blobs_bundle: response.blobs_bundle.into(), should_override_builder: response.should_override_builder, requests: response.execution_requests.try_into().map_err(|e| { - format!("Failed to convert json to execution requests : {:?}", e) + format!("Failed to convert json to execution requests: {:?}", e) })?, })) } JsonGetPayloadResponse::Fulu(response) => { Ok(GetPayloadResponse::Fulu(GetPayloadResponseFulu { - execution_payload: response.execution_payload.into(), + execution_payload: response.execution_payload.try_into().map_err(|e| { + format!("Failed to convert json to execution payload: {:?}", e) + })?, block_value: response.block_value, blobs_bundle: response.blobs_bundle.into(), should_override_builder: response.should_override_builder, requests: response.execution_requests.try_into().map_err(|e| { - format!("Failed to convert json to execution requests {:?}", e) + format!("Failed to convert json to execution requests: {:?}", e) })?, })) } JsonGetPayloadResponse::Gloas(response) => { Ok(GetPayloadResponse::Gloas(GetPayloadResponseGloas { - execution_payload: response.execution_payload.into(), + execution_payload: response.execution_payload.try_into().map_err(|e| { + format!("Failed to convert json to execution payload: {:?}", e) + })?, block_value: response.block_value, blobs_bundle: response.blobs_bundle.into(), should_override_builder: response.should_override_builder, requests: response.execution_requests.try_into().map_err(|e| { - format!("Failed to convert json to execution requests {:?}", e) + format!("Failed to convert json to execution requests: {:?}", e) })?, })) } @@ -673,6 +678,26 @@ impl From for Withdrawal { } } } + +// Helper functions to convert between `VariableList` and `VariableList`. +fn withdrawals_to_json( + list: VariableList, +) -> Result, ssz_types::Error> +where + N: Unsigned, +{ + VariableList::try_from_iter(list.into_iter().map(Into::into)) +} + +fn withdrawals_from_json( + list: VariableList, +) -> Result, ssz_types::Error> +where + N: Unsigned, +{ + VariableList::try_from_iter(list.into_iter().map(Into::into)) +} + #[derive(Debug, PartialEq, Clone, RlpEncodable)] pub struct EncodableJsonWithdrawal<'a> { pub index: u64, @@ -976,30 +1001,25 @@ pub struct JsonExecutionPayloadBodyV1 { pub withdrawals: Option>, } -impl From> for ExecutionPayloadBodyV1 { - fn from(value: JsonExecutionPayloadBodyV1) -> Self { - Self { +impl TryFrom> for ExecutionPayloadBodyV1 { + type Error = ssz_types::Error; + + fn try_from(value: JsonExecutionPayloadBodyV1) -> Result { + Ok(Self { transactions: value.transactions, - withdrawals: value.withdrawals.map(|json_withdrawals| { - Withdrawals::::from( - json_withdrawals - .into_iter() - .map(Into::into) - .collect::>(), - ) - }), - } + withdrawals: value.withdrawals.map(withdrawals_from_json).transpose()?, + }) } } -impl From> for JsonExecutionPayloadBodyV1 { - fn from(value: ExecutionPayloadBodyV1) -> Self { - Self { +impl TryFrom> for JsonExecutionPayloadBodyV1 { + type Error = ssz_types::Error; + + fn try_from(value: ExecutionPayloadBodyV1) -> Result { + Ok(Self { transactions: value.transactions, - withdrawals: value.withdrawals.map(|withdrawals| { - VariableList::from(withdrawals.into_iter().map(Into::into).collect::>()) - }), - } + withdrawals: value.withdrawals.map(withdrawals_to_json).transpose()?, + }) } } diff --git a/beacon_node/execution_layer/src/engine_api/new_payload_request.rs b/beacon_node/execution_layer/src/engine_api/new_payload_request.rs index aa5261c80b0..617d2e01129 100644 --- a/beacon_node/execution_layer/src/engine_api/new_payload_request.rs +++ b/beacon_node/execution_layer/src/engine_api/new_payload_request.rs @@ -337,7 +337,7 @@ mod test { *beacon_block .body_mut() .blob_kzg_commitments_mut() - .expect("should get commitments") = commitments.into(); + .expect("should get commitments") = commitments.try_into().unwrap(); let new_payload_request = NewPayloadRequest::try_from(beacon_block.to_ref()) .expect("should create new payload request"); diff --git a/beacon_node/execution_layer/src/lib.rs b/beacon_node/execution_layer/src/lib.rs index a5fa0f34158..4175abf7240 100644 --- a/beacon_node/execution_layer/src/lib.rs +++ b/beacon_node/execution_layer/src/lib.rs @@ -171,11 +171,18 @@ pub enum Error { InvalidPayloadBody(String), InvalidPayloadConversion, InvalidBlobConversion(String), + SszTypesError(ssz_types::Error), BeaconStateError(BeaconStateError), PayloadTypeMismatch, VerifyingVersionedHashes(versioned_hashes::Error), } +impl From for Error { + fn from(e: ssz_types::Error) -> Self { + Error::SszTypesError(e) + } +} + impl From for Error { fn from(e: BeaconStateError) -> Self { Error::BeaconStateError(e) @@ -2102,6 +2109,7 @@ enum InvalidBuilderPayload { payload: u64, expected: u64, }, + SszTypesError(ssz_types::Error), } impl fmt::Display for InvalidBuilderPayload { @@ -2143,6 +2151,7 @@ impl fmt::Display for InvalidBuilderPayload { InvalidBuilderPayload::GasLimitMismatch { payload, expected } => { write!(f, "payload gas limit was {} not {}", payload, expected) } + Self::SszTypesError(e) => write!(f, "{:?}", e), } } } @@ -2198,7 +2207,13 @@ fn verify_builder_bid( .withdrawals() .ok() .cloned() - .map(|withdrawals| Withdrawals::::from(withdrawals).tree_hash_root()); + .map(|withdrawals| { + Withdrawals::::try_from(withdrawals) + .map_err(InvalidBuilderPayload::SszTypesError) + .map(|w| w.tree_hash_root()) + }) + .transpose()?; + let payload_withdrawals_root = header.withdrawals_root().ok(); let expected_gas_limit = proposer_gas_limit .and_then(|target_gas_limit| expected_gas_limit(parent_gas_limit, target_gas_limit, spec)); diff --git a/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs b/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs index 4836f9307c8..29c764ee305 100644 --- a/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs +++ b/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs @@ -40,7 +40,7 @@ pub enum Block { } pub fn mock_el_extra_data() -> types::VariableList { - "block gen was here".as_bytes().to_vec().into() + "block gen was here".as_bytes().to_vec().try_into().unwrap() } impl Block { @@ -602,7 +602,7 @@ impl ExecutionBlockGenerator { fee_recipient: pa.suggested_fee_recipient, receipts_root: Hash256::repeat_byte(42), state_root: Hash256::repeat_byte(43), - logs_bloom: vec![0; 256].into(), + logs_bloom: vec![0; 256].try_into().unwrap(), prev_randao: pa.prev_randao, block_number: parent.block_number() + 1, gas_limit: DEFAULT_GAS_LIMIT, @@ -611,7 +611,7 @@ impl ExecutionBlockGenerator { extra_data: mock_el_extra_data::(), base_fee_per_gas: Uint256::from(1u64), block_hash: ExecutionBlockHash::zero(), - transactions: vec![].into(), + transactions: vec![].try_into().unwrap(), }), PayloadAttributes::V2(pa) => match self.get_fork_at_timestamp(pa.timestamp) { ForkName::Bellatrix => ExecutionPayload::Bellatrix(ExecutionPayloadBellatrix { @@ -619,7 +619,7 @@ impl ExecutionBlockGenerator { fee_recipient: pa.suggested_fee_recipient, receipts_root: Hash256::repeat_byte(42), state_root: Hash256::repeat_byte(43), - logs_bloom: vec![0; 256].into(), + logs_bloom: vec![0; 256].try_into().unwrap(), prev_randao: pa.prev_randao, block_number: parent.block_number() + 1, gas_limit: DEFAULT_GAS_LIMIT, @@ -628,14 +628,14 @@ impl ExecutionBlockGenerator { extra_data: mock_el_extra_data::(), base_fee_per_gas: Uint256::from(1u64), block_hash: ExecutionBlockHash::zero(), - transactions: vec![].into(), + transactions: vec![].try_into().unwrap(), }), ForkName::Capella => ExecutionPayload::Capella(ExecutionPayloadCapella { parent_hash: head_block_hash, fee_recipient: pa.suggested_fee_recipient, receipts_root: Hash256::repeat_byte(42), state_root: Hash256::repeat_byte(43), - logs_bloom: vec![0; 256].into(), + logs_bloom: vec![0; 256].try_into().unwrap(), prev_randao: pa.prev_randao, block_number: parent.block_number() + 1, gas_limit: DEFAULT_GAS_LIMIT, @@ -644,8 +644,8 @@ impl ExecutionBlockGenerator { extra_data: mock_el_extra_data::(), base_fee_per_gas: Uint256::from(1u64), block_hash: ExecutionBlockHash::zero(), - transactions: vec![].into(), - withdrawals: pa.withdrawals.clone().into(), + transactions: vec![].try_into().unwrap(), + withdrawals: pa.withdrawals.clone().try_into().unwrap(), }), _ => unreachable!(), }, @@ -655,7 +655,7 @@ impl ExecutionBlockGenerator { fee_recipient: pa.suggested_fee_recipient, receipts_root: Hash256::repeat_byte(42), state_root: Hash256::repeat_byte(43), - logs_bloom: vec![0; 256].into(), + logs_bloom: vec![0; 256].try_into().unwrap(), prev_randao: pa.prev_randao, block_number: parent.block_number() + 1, gas_limit: DEFAULT_GAS_LIMIT, @@ -664,8 +664,8 @@ impl ExecutionBlockGenerator { extra_data: mock_el_extra_data::(), base_fee_per_gas: Uint256::from(1u64), block_hash: ExecutionBlockHash::zero(), - transactions: vec![].into(), - withdrawals: pa.withdrawals.clone().into(), + transactions: vec![].try_into().unwrap(), + withdrawals: pa.withdrawals.clone().try_into().unwrap(), blob_gas_used: 0, excess_blob_gas: 0, }), @@ -674,7 +674,7 @@ impl ExecutionBlockGenerator { fee_recipient: pa.suggested_fee_recipient, receipts_root: Hash256::repeat_byte(42), state_root: Hash256::repeat_byte(43), - logs_bloom: vec![0; 256].into(), + logs_bloom: vec![0; 256].try_into().unwrap(), prev_randao: pa.prev_randao, block_number: parent.block_number() + 1, gas_limit: DEFAULT_GAS_LIMIT, @@ -683,8 +683,8 @@ impl ExecutionBlockGenerator { extra_data: mock_el_extra_data::(), base_fee_per_gas: Uint256::from(1u64), block_hash: ExecutionBlockHash::zero(), - transactions: vec![].into(), - withdrawals: pa.withdrawals.clone().into(), + transactions: vec![].try_into().unwrap(), + withdrawals: pa.withdrawals.clone().try_into().unwrap(), blob_gas_used: 0, excess_blob_gas: 0, }), @@ -693,17 +693,17 @@ impl ExecutionBlockGenerator { fee_recipient: pa.suggested_fee_recipient, receipts_root: Hash256::repeat_byte(42), state_root: Hash256::repeat_byte(43), - logs_bloom: vec![0; 256].into(), + logs_bloom: vec![0; 256].try_into().unwrap(), prev_randao: pa.prev_randao, block_number: parent.block_number() + 1, gas_limit: DEFAULT_GAS_LIMIT, gas_used: GAS_USED, timestamp: pa.timestamp, - extra_data: "block gen was here".as_bytes().to_vec().into(), + extra_data: "block gen was here".as_bytes().to_vec().try_into().unwrap(), base_fee_per_gas: Uint256::from(1u64), block_hash: ExecutionBlockHash::zero(), - transactions: vec![].into(), - withdrawals: pa.withdrawals.clone().into(), + transactions: vec![].try_into().unwrap(), + withdrawals: pa.withdrawals.clone().try_into().unwrap(), blob_gas_used: 0, excess_blob_gas: 0, }), @@ -712,17 +712,17 @@ impl ExecutionBlockGenerator { fee_recipient: pa.suggested_fee_recipient, receipts_root: Hash256::repeat_byte(42), state_root: Hash256::repeat_byte(43), - logs_bloom: vec![0; 256].into(), + logs_bloom: vec![0; 256].try_into().unwrap(), prev_randao: pa.prev_randao, block_number: parent.block_number() + 1, gas_limit: DEFAULT_GAS_LIMIT, gas_used: GAS_USED, timestamp: pa.timestamp, - extra_data: "block gen was here".as_bytes().to_vec().into(), + extra_data: "block gen was here".as_bytes().to_vec().try_into().unwrap(), base_fee_per_gas: Uint256::from(1u64), block_hash: ExecutionBlockHash::zero(), - transactions: vec![].into(), - withdrawals: pa.withdrawals.clone().into(), + transactions: vec![].try_into().unwrap(), + withdrawals: pa.withdrawals.clone().try_into().unwrap(), blob_gas_used: 0, excess_blob_gas: 0, }), @@ -813,24 +813,25 @@ pub fn generate_blobs( let bundle = if fork_name.fulu_enabled() { let (kzg_commitment, kzg_proofs, blob) = load_test_blobs_bundle_v2::()?; BlobsBundle { - commitments: vec![kzg_commitment; n_blobs].into(), + commitments: vec![kzg_commitment; n_blobs].try_into().unwrap(), proofs: vec![kzg_proofs.to_vec(); n_blobs] .into_iter() .flatten() .collect::>() - .into(), - blobs: vec![blob; n_blobs].into(), + .try_into() + .unwrap(), + blobs: vec![blob; n_blobs].try_into().unwrap(), } } else { let (kzg_commitment, kzg_proof, blob) = load_test_blobs_bundle_v1::()?; BlobsBundle { - commitments: vec![kzg_commitment; n_blobs].into(), - proofs: vec![kzg_proof; n_blobs].into(), - blobs: vec![blob; n_blobs].into(), + commitments: vec![kzg_commitment; n_blobs].try_into().unwrap(), + proofs: vec![kzg_proof; n_blobs].try_into().unwrap(), + blobs: vec![blob; n_blobs].try_into().unwrap(), } }; - Ok((bundle, transactions.into())) + Ok((bundle, transactions.try_into().unwrap())) } pub fn static_valid_tx() -> Result, String> { diff --git a/beacon_node/execution_layer/src/test_utils/handle_rpc.rs b/beacon_node/execution_layer/src/test_utils/handle_rpc.rs index 7a451beddb3..2168ed8961e 100644 --- a/beacon_node/execution_layer/src/test_utils/handle_rpc.rs +++ b/beacon_node/execution_layer/src/test_utils/handle_rpc.rs @@ -252,7 +252,7 @@ pub async fn handle_rpc( Some( ctx.execution_block_generator .write() - .new_payload(request.into()), + .new_payload(request.try_into().unwrap()), ) } else { None @@ -361,98 +361,107 @@ pub async fn handle_rpc( } match method { - ENGINE_GET_PAYLOAD_V1 => { - Ok(serde_json::to_value(JsonExecutionPayload::from(response)).unwrap()) + ENGINE_GET_PAYLOAD_V1 => Ok(serde_json::to_value( + JsonExecutionPayload::try_from(response).unwrap(), + ) + .unwrap()), + ENGINE_GET_PAYLOAD_V2 => { + Ok(match JsonExecutionPayload::try_from(response).unwrap() { + JsonExecutionPayload::Bellatrix(execution_payload) => { + serde_json::to_value(JsonGetPayloadResponseBellatrix { + execution_payload, + block_value: Uint256::from(DEFAULT_MOCK_EL_PAYLOAD_VALUE_WEI), + }) + .unwrap() + } + JsonExecutionPayload::Capella(execution_payload) => { + serde_json::to_value(JsonGetPayloadResponseCapella { + execution_payload, + block_value: Uint256::from(DEFAULT_MOCK_EL_PAYLOAD_VALUE_WEI), + }) + .unwrap() + } + _ => unreachable!(), + }) } - ENGINE_GET_PAYLOAD_V2 => Ok(match JsonExecutionPayload::from(response) { - JsonExecutionPayload::Bellatrix(execution_payload) => { - serde_json::to_value(JsonGetPayloadResponseBellatrix { - execution_payload, - block_value: Uint256::from(DEFAULT_MOCK_EL_PAYLOAD_VALUE_WEI), - }) - .unwrap() - } - JsonExecutionPayload::Capella(execution_payload) => { - serde_json::to_value(JsonGetPayloadResponseCapella { - execution_payload, - block_value: Uint256::from(DEFAULT_MOCK_EL_PAYLOAD_VALUE_WEI), - }) - .unwrap() - } - _ => unreachable!(), - }), // From v3 onwards, we use the getPayload version only for the corresponding // ExecutionPayload version. So we return an error if the ExecutionPayload version // we get does not correspond to the getPayload version. - ENGINE_GET_PAYLOAD_V3 => Ok(match JsonExecutionPayload::from(response) { - JsonExecutionPayload::Deneb(execution_payload) => { - serde_json::to_value(JsonGetPayloadResponseDeneb { - execution_payload, - block_value: Uint256::from(DEFAULT_MOCK_EL_PAYLOAD_VALUE_WEI), - blobs_bundle: maybe_blobs - .ok_or(( - "No blobs returned despite V3 Payload".to_string(), - GENERIC_ERROR_CODE, - ))? - .into(), - should_override_builder: false, - }) - .unwrap() - } - _ => unreachable!(), - }), - ENGINE_GET_PAYLOAD_V4 => Ok(match JsonExecutionPayload::from(response) { - JsonExecutionPayload::Electra(execution_payload) => { - serde_json::to_value(JsonGetPayloadResponseElectra { - execution_payload, - block_value: Uint256::from(DEFAULT_MOCK_EL_PAYLOAD_VALUE_WEI), - blobs_bundle: maybe_blobs - .ok_or(( - "No blobs returned despite V4 Payload".to_string(), - GENERIC_ERROR_CODE, - ))? - .into(), - should_override_builder: false, - // TODO(electra): add EL requests in mock el - execution_requests: Default::default(), - }) - .unwrap() - } - _ => unreachable!(), - }), - ENGINE_GET_PAYLOAD_V5 => Ok(match JsonExecutionPayload::from(response) { - JsonExecutionPayload::Fulu(execution_payload) => { - serde_json::to_value(JsonGetPayloadResponseFulu { - execution_payload, - block_value: Uint256::from(DEFAULT_MOCK_EL_PAYLOAD_VALUE_WEI), - blobs_bundle: maybe_blobs - .ok_or(( - "No blobs returned despite V5 Payload".to_string(), - GENERIC_ERROR_CODE, - ))? - .into(), - should_override_builder: false, - execution_requests: Default::default(), - }) - .unwrap() - } - JsonExecutionPayload::Gloas(execution_payload) => { - serde_json::to_value(JsonGetPayloadResponseGloas { - execution_payload, - block_value: Uint256::from(DEFAULT_MOCK_EL_PAYLOAD_VALUE_WEI), - blobs_bundle: maybe_blobs - .ok_or(( - "No blobs returned despite V5 Payload".to_string(), - GENERIC_ERROR_CODE, - ))? - .into(), - should_override_builder: false, - execution_requests: Default::default(), - }) - .unwrap() - } - _ => unreachable!(), - }), + ENGINE_GET_PAYLOAD_V3 => { + Ok(match JsonExecutionPayload::try_from(response).unwrap() { + JsonExecutionPayload::Deneb(execution_payload) => { + serde_json::to_value(JsonGetPayloadResponseDeneb { + execution_payload, + block_value: Uint256::from(DEFAULT_MOCK_EL_PAYLOAD_VALUE_WEI), + blobs_bundle: maybe_blobs + .ok_or(( + "No blobs returned despite V3 Payload".to_string(), + GENERIC_ERROR_CODE, + ))? + .into(), + should_override_builder: false, + }) + .unwrap() + } + _ => unreachable!(), + }) + } + ENGINE_GET_PAYLOAD_V4 => { + Ok(match JsonExecutionPayload::try_from(response).unwrap() { + JsonExecutionPayload::Electra(execution_payload) => { + serde_json::to_value(JsonGetPayloadResponseElectra { + execution_payload, + block_value: Uint256::from(DEFAULT_MOCK_EL_PAYLOAD_VALUE_WEI), + blobs_bundle: maybe_blobs + .ok_or(( + "No blobs returned despite V4 Payload".to_string(), + GENERIC_ERROR_CODE, + ))? + .into(), + should_override_builder: false, + // TODO(electra): add EL requests in mock el + execution_requests: Default::default(), + }) + .unwrap() + } + _ => unreachable!(), + }) + } + ENGINE_GET_PAYLOAD_V5 => { + Ok(match JsonExecutionPayload::try_from(response).unwrap() { + JsonExecutionPayload::Fulu(execution_payload) => { + serde_json::to_value(JsonGetPayloadResponseFulu { + execution_payload, + block_value: Uint256::from(DEFAULT_MOCK_EL_PAYLOAD_VALUE_WEI), + blobs_bundle: maybe_blobs + .ok_or(( + "No blobs returned despite V5 Payload".to_string(), + GENERIC_ERROR_CODE, + ))? + .into(), + should_override_builder: false, + execution_requests: Default::default(), + }) + .unwrap() + } + JsonExecutionPayload::Gloas(execution_payload) => { + serde_json::to_value(JsonGetPayloadResponseGloas { + execution_payload, + block_value: Uint256::from(DEFAULT_MOCK_EL_PAYLOAD_VALUE_WEI), + blobs_bundle: maybe_blobs + .ok_or(( + "No blobs returned despite V5 Payload".to_string(), + GENERIC_ERROR_CODE, + ))? + .into(), + should_override_builder: false, + execution_requests: Default::default(), + }) + .unwrap() + } + _ => unreachable!(), + }) + } _ => unreachable!(), } } @@ -644,7 +653,8 @@ pub async fn handle_rpc( transactions: payload.transactions().clone(), withdrawals: payload.withdrawals().ok().cloned(), }; - let json_payload_body = JsonExecutionPayloadBodyV1::from(payload_body); + let json_payload_body: JsonExecutionPayloadBodyV1 = + payload_body.try_into().unwrap(); response.push(Some(json_payload_body)); } None => response.push(None), diff --git a/beacon_node/execution_layer/src/test_utils/mock_builder.rs b/beacon_node/execution_layer/src/test_utils/mock_builder.rs index 6b63881d856..f0991f1733f 100644 --- a/beacon_node/execution_layer/src/test_utils/mock_builder.rs +++ b/beacon_node/execution_layer/src/test_utils/mock_builder.rs @@ -72,7 +72,7 @@ impl Operation { } pub fn mock_builder_extra_data() -> types::VariableList { - "mock_builder".as_bytes().to_vec().into() + "mock_builder".as_bytes().to_vec().try_into().unwrap() } #[derive(Debug)] diff --git a/beacon_node/genesis/src/common.rs b/beacon_node/genesis/src/common.rs index e48fa362046..88a88810d8a 100644 --- a/beacon_node/genesis/src/common.rs +++ b/beacon_node/genesis/src/common.rs @@ -37,10 +37,17 @@ pub fn genesis_deposits( proofs.push(proof); } - Ok(deposit_data + deposit_data .into_iter() .zip(proofs) - .map(|(data, proof)| (data, proof.into())) - .map(|(data, proof)| Deposit { proof, data }) - .collect()) + .map(|(data, proof)| { + let converted_proof = proof + .try_into() + .map_err(|e| format!("Error converting proof: {:?}", e))?; + Ok(Deposit { + proof: converted_proof, + data, + }) + }) + .collect() } diff --git a/beacon_node/lighthouse_network/src/rpc/codec.rs b/beacon_node/lighthouse_network/src/rpc/codec.rs index acb01884564..77d2a34e16e 100644 --- a/beacon_node/lighthouse_network/src/rpc/codec.rs +++ b/beacon_node/lighthouse_network/src/rpc/codec.rs @@ -1002,8 +1002,9 @@ mod tests { let mut block: BeaconBlockBellatrix<_, FullPayload> = BeaconBlockBellatrix::empty(spec); - let tx = VariableList::from(vec![0; 1024]); - let txs = VariableList::from(std::iter::repeat_n(tx, 5000).collect::>()); + let tx = VariableList::try_from(vec![0; 1024]).unwrap(); + let txs = + VariableList::try_from(std::iter::repeat_n(tx, 5000).collect::>()).unwrap(); block.body.execution_payload.execution_payload.transactions = txs; @@ -1021,8 +1022,9 @@ mod tests { let mut block: BeaconBlockBellatrix<_, FullPayload> = BeaconBlockBellatrix::empty(spec); - let tx = VariableList::from(vec![0; 1024]); - let txs = VariableList::from(std::iter::repeat_n(tx, 100000).collect::>()); + let tx = VariableList::try_from(vec![0; 1024]).unwrap(); + let txs = + VariableList::try_from(std::iter::repeat_n(tx, 100000).collect::>()).unwrap(); block.body.execution_payload.execution_payload.transactions = txs; @@ -1080,7 +1082,7 @@ mod tests { data_column_ids: RuntimeVariableList::new( vec![DataColumnsByRootIdentifier { block_root: Hash256::zero(), - columns: VariableList::from(vec![0, 1, 2]), + columns: VariableList::try_from(vec![0, 1, 2]).unwrap(), }], spec.max_request_blocks(fork_name), ) diff --git a/beacon_node/lighthouse_network/src/rpc/methods.rs b/beacon_node/lighthouse_network/src/rpc/methods.rs index 9319973e597..9aab0799521 100644 --- a/beacon_node/lighthouse_network/src/rpc/methods.rs +++ b/beacon_node/lighthouse_network/src/rpc/methods.rs @@ -29,15 +29,21 @@ pub const MAX_ERROR_LEN: u64 = 256; #[derive(Debug, Clone)] pub struct ErrorType(pub VariableList); -impl From for ErrorType { - fn from(s: String) -> Self { - Self(VariableList::from(s.as_bytes().to_vec())) +impl From<&str> for ErrorType { + // This will truncate the error if `string.as_bytes()` exceeds `MaxErrorLen`. + fn from(s: &str) -> Self { + let mut bytes = s.as_bytes().to_vec(); + bytes.truncate(MAX_ERROR_LEN as usize); + Self( + VariableList::try_from(bytes) + .expect("length should not exceed MaxErrorLen after truncation"), + ) } } -impl From<&str> for ErrorType { - fn from(s: &str) -> Self { - Self(VariableList::from(s.as_bytes().to_vec())) +impl From for ErrorType { + fn from(s: String) -> Self { + Self::from(s.as_str()) } } diff --git a/beacon_node/lighthouse_network/src/rpc/protocol.rs b/beacon_node/lighthouse_network/src/rpc/protocol.rs index 228a74f08cc..08085f3c271 100644 --- a/beacon_node/lighthouse_network/src/rpc/protocol.rs +++ b/beacon_node/lighthouse_network/src/rpc/protocol.rs @@ -70,13 +70,15 @@ pub static BLOB_SIDECAR_SIZE_MINIMAL: LazyLock = LazyLock::new(BlobSidecar::::max_size); pub static ERROR_TYPE_MIN: LazyLock = LazyLock::new(|| { - VariableList::::from(Vec::::new()) + VariableList::::try_from(Vec::::new()) + .expect("MaxErrorLen should not exceed MAX_ERROR_LEN") .as_ssz_bytes() .len() }); pub static ERROR_TYPE_MAX: LazyLock = LazyLock::new(|| { - VariableList::::from(vec![0u8; MAX_ERROR_LEN as usize]) + VariableList::::try_from(vec![0u8; MAX_ERROR_LEN as usize]) + .expect("MaxErrorLen should not exceed MAX_ERROR_LEN") .as_ssz_bytes() .len() }); diff --git a/beacon_node/lighthouse_network/tests/rpc_tests.rs b/beacon_node/lighthouse_network/tests/rpc_tests.rs index e37f4131a76..81d08764a5f 100644 --- a/beacon_node/lighthouse_network/tests/rpc_tests.rs +++ b/beacon_node/lighthouse_network/tests/rpc_tests.rs @@ -26,8 +26,8 @@ type E = MinimalEthSpec; /// Bellatrix block with length < max_rpc_size. fn bellatrix_block_small(spec: &ChainSpec) -> BeaconBlock { let mut block = BeaconBlockBellatrix::::empty(spec); - let tx = VariableList::from(vec![0; 1024]); - let txs = VariableList::from(std::iter::repeat_n(tx, 5000).collect::>()); + let tx = VariableList::try_from(vec![0; 1024]).unwrap(); + let txs = VariableList::try_from(std::iter::repeat_n(tx, 5000).collect::>()).unwrap(); block.body.execution_payload.execution_payload.transactions = txs; @@ -41,8 +41,8 @@ fn bellatrix_block_small(spec: &ChainSpec) -> BeaconBlock { /// Hence, we generate a bellatrix block just greater than `MAX_RPC_SIZE` to test rejection on the rpc layer. fn bellatrix_block_large(spec: &ChainSpec) -> BeaconBlock { let mut block = BeaconBlockBellatrix::::empty(spec); - let tx = VariableList::from(vec![0; 1024]); - let txs = VariableList::from(std::iter::repeat_n(tx, 100000).collect::>()); + let tx = VariableList::try_from(vec![0; 1024]).unwrap(); + let txs = VariableList::try_from(std::iter::repeat_n(tx, 100000).collect::>()).unwrap(); block.body.execution_payload.execution_payload.transactions = txs; @@ -1018,14 +1018,17 @@ fn test_tcp_columns_by_root_chunked_rpc() { }, signature: Signature::empty(), }, - column: vec![vec![0; E::bytes_per_blob()].into()].into(), - kzg_commitments: vec![KzgCommitment::empty_for_testing()].into(), - kzg_proofs: vec![KzgProof::empty()].into(), + column: vec![vec![0; E::bytes_per_cell()].try_into().unwrap()] + .try_into() + .unwrap(), + kzg_commitments: vec![KzgCommitment::empty_for_testing()].try_into().unwrap(), + kzg_proofs: vec![KzgProof::empty()].try_into().unwrap(), kzg_commitments_inclusion_proof: vec![ Hash256::zero(); E::kzg_commitments_inclusion_proof_depth() ] - .into(), + .try_into() + .unwrap(), }); let rpc_response = Response::DataColumnsByRoot(Some(data_column.clone())); @@ -1160,14 +1163,17 @@ fn test_tcp_columns_by_range_chunked_rpc() { }, signature: Signature::empty(), }, - column: vec![vec![0; E::bytes_per_blob()].into()].into(), - kzg_commitments: vec![KzgCommitment::empty_for_testing()].into(), - kzg_proofs: vec![KzgProof::empty()].into(), + column: vec![vec![0; E::bytes_per_cell()].try_into().unwrap()] + .try_into() + .unwrap(), + kzg_commitments: vec![KzgCommitment::empty_for_testing()].try_into().unwrap(), + kzg_proofs: vec![KzgProof::empty()].try_into().unwrap(), kzg_commitments_inclusion_proof: vec![ Hash256::zero(); E::kzg_commitments_inclusion_proof_depth() ] - .into(), + .try_into() + .unwrap(), }); let rpc_response = Response::DataColumnsByRange(Some(data_column.clone())); diff --git a/beacon_node/network/src/network_beacon_processor/gossip_methods.rs b/beacon_node/network/src/network_beacon_processor/gossip_methods.rs index a492ece5080..1ffe921e58e 100644 --- a/beacon_node/network/src/network_beacon_processor/gossip_methods.rs +++ b/beacon_node/network/src/network_beacon_processor/gossip_methods.rs @@ -2742,6 +2742,20 @@ impl NetworkBeaconProcessor { } } } + AttnError::SszTypesError(_) => { + error!( + %peer_id, + block = ?beacon_block_root, + ?attestation_type, + "Rejecting attestation due to a critical SSZ types error" + ); + self.propagate_validation_result(message_id, peer_id, MessageAcceptance::Reject); + self.gossip_penalize_peer( + peer_id, + PeerAction::MidToleranceError, + "attn_ssz_types_error", + ); + } } debug!( diff --git a/common/eth2/src/types.rs b/common/eth2/src/types.rs index 8f553b57d9c..60bc0804e45 100644 --- a/common/eth2/src/types.rs +++ b/common/eth2/src/types.rs @@ -1690,8 +1690,8 @@ mod tests { BeaconBlock::::Deneb(BeaconBlockDeneb::empty(&spec)), Signature::empty(), ); - let blobs = BlobsList::::from(vec![Blob::::default()]); - let kzg_proofs = KzgProofs::::from(vec![KzgProof::empty()]); + let blobs = BlobsList::::try_from(vec![Blob::::default()]).unwrap(); + let kzg_proofs = KzgProofs::::try_from(vec![KzgProof::empty()]).unwrap(); let signed_block_contents = PublishBlockRequest::new(Arc::new(block), Some((kzg_proofs, blobs))); diff --git a/consensus/fork_choice/tests/tests.rs b/consensus/fork_choice/tests/tests.rs index 25c3f03d3b9..67b792ef0d8 100644 --- a/consensus/fork_choice/tests/tests.rs +++ b/consensus/fork_choice/tests/tests.rs @@ -753,10 +753,10 @@ async fn invalid_attestation_empty_bitfield() { MutationDelay::NoDelay, |attestation, _| match attestation { IndexedAttestation::Base(att) => { - att.attesting_indices = vec![].into(); + att.attesting_indices = vec![].try_into().unwrap(); } IndexedAttestation::Electra(att) => { - att.attesting_indices = vec![].into(); + att.attesting_indices = vec![].try_into().unwrap(); } }, |result| { diff --git a/consensus/state_processing/src/per_block_processing.rs b/consensus/state_processing/src/per_block_processing.rs index 99abbef9c1e..9e7a20040e8 100644 --- a/consensus/state_processing/src/per_block_processing.rs +++ b/consensus/state_processing/src/per_block_processing.rs @@ -628,7 +628,12 @@ pub fn get_expected_withdrawals( .safe_rem(state.validators().len() as u64)?; } - Ok((withdrawals.into(), processed_partial_withdrawals_count)) + Ok(( + withdrawals + .try_into() + .map_err(BlockProcessingError::SszTypesError)?, + processed_partial_withdrawals_count, + )) } /// Apply withdrawals to the state. diff --git a/consensus/state_processing/src/per_block_processing/tests.rs b/consensus/state_processing/src/per_block_processing/tests.rs index 183063ac762..c32797f77f3 100644 --- a/consensus/state_processing/src/per_block_processing/tests.rs +++ b/consensus/state_processing/src/per_block_processing/tests.rs @@ -213,7 +213,7 @@ async fn valid_4_deposits() { let mut state = harness.get_current_state(); let (deposits, state) = harness.make_deposits(&mut state, 4, None, None); - let deposits = VariableList::from(deposits); + let deposits = VariableList::try_from(deposits).unwrap(); let mut head_block = harness .chain @@ -237,7 +237,7 @@ async fn invalid_deposit_deposit_count_too_big() { let mut state = harness.get_current_state(); let (deposits, state) = harness.make_deposits(&mut state, 1, None, None); - let deposits = VariableList::from(deposits); + let deposits = VariableList::try_from(deposits).unwrap(); let mut head_block = harness .chain @@ -269,7 +269,7 @@ async fn invalid_deposit_count_too_small() { let mut state = harness.get_current_state(); let (deposits, state) = harness.make_deposits(&mut state, 1, None, None); - let deposits = VariableList::from(deposits); + let deposits = VariableList::try_from(deposits).unwrap(); let mut head_block = harness .chain @@ -301,7 +301,7 @@ async fn invalid_deposit_bad_merkle_proof() { let mut state = harness.get_current_state(); let (deposits, state) = harness.make_deposits(&mut state, 1, None, None); - let deposits = VariableList::from(deposits); + let deposits = VariableList::try_from(deposits).unwrap(); let mut head_block = harness .chain @@ -336,7 +336,7 @@ async fn invalid_deposit_wrong_sig() { let (deposits, state) = harness.make_deposits(&mut state, 1, None, Some(SignatureBytes::empty())); - let deposits = VariableList::from(deposits); + let deposits = VariableList::try_from(deposits).unwrap(); let mut head_block = harness .chain @@ -360,7 +360,7 @@ async fn invalid_deposit_invalid_pub_key() { let (deposits, state) = harness.make_deposits(&mut state, 1, Some(PublicKeyBytes::empty()), None); - let deposits = VariableList::from(deposits); + let deposits = VariableList::try_from(deposits).unwrap(); let mut head_block = harness .chain @@ -753,10 +753,12 @@ async fn invalid_attester_slashing_1_invalid() { let mut attester_slashing = harness.make_attester_slashing(vec![1, 2]); match &mut attester_slashing { AttesterSlashing::Base(attester_slashing) => { - attester_slashing.attestation_1.attesting_indices = VariableList::from(vec![2, 1]); + attester_slashing.attestation_1.attesting_indices = + VariableList::try_from(vec![2, 1]).unwrap(); } AttesterSlashing::Electra(attester_slashing) => { - attester_slashing.attestation_1.attesting_indices = VariableList::from(vec![2, 1]); + attester_slashing.attestation_1.attesting_indices = + VariableList::try_from(vec![2, 1]).unwrap(); } } @@ -791,10 +793,12 @@ async fn invalid_attester_slashing_2_invalid() { let mut attester_slashing = harness.make_attester_slashing(vec![1, 2]); match &mut attester_slashing { AttesterSlashing::Base(attester_slashing) => { - attester_slashing.attestation_2.attesting_indices = VariableList::from(vec![2, 1]); + attester_slashing.attestation_2.attesting_indices = + VariableList::try_from(vec![2, 1]).unwrap(); } AttesterSlashing::Electra(attester_slashing) => { - attester_slashing.attestation_2.attesting_indices = VariableList::from(vec![2, 1]); + attester_slashing.attestation_2.attesting_indices = + VariableList::try_from(vec![2, 1]).unwrap(); } } diff --git a/consensus/types/src/attestation.rs b/consensus/types/src/attestation.rs index 860f0d0a2d3..52646867925 100644 --- a/consensus/types/src/attestation.rs +++ b/consensus/types/src/attestation.rs @@ -622,19 +622,22 @@ pub struct SingleAttestation { } impl SingleAttestation { - pub fn to_indexed(&self, fork_name: ForkName) -> IndexedAttestation { + pub fn to_indexed( + &self, + fork_name: ForkName, + ) -> Result, ssz_types::Error> { if fork_name.electra_enabled() { - IndexedAttestation::Electra(IndexedAttestationElectra { - attesting_indices: vec![self.attester_index].into(), + Ok(IndexedAttestation::Electra(IndexedAttestationElectra { + attesting_indices: vec![self.attester_index].try_into()?, data: self.data.clone(), signature: self.signature.clone(), - }) + })) } else { - IndexedAttestation::Base(IndexedAttestationBase { - attesting_indices: vec![self.attester_index].into(), + Ok(IndexedAttestation::Base(IndexedAttestationBase { + attesting_indices: vec![self.attester_index].try_into()?, data: self.data.clone(), signature: self.signature.clone(), - }) + })) } } } diff --git a/consensus/types/src/chain_spec.rs b/consensus/types/src/chain_spec.rs index 93f5140383a..fd87c8c2222 100644 --- a/consensus/types/src/chain_spec.rs +++ b/consensus/types/src/chain_spec.rs @@ -2067,7 +2067,7 @@ fn max_data_columns_by_root_request_common(max_request_blocks: u64) let empty_data_columns_by_root_id = DataColumnsByRootIdentifier { block_root: Hash256::zero(), - columns: VariableList::from(vec![0; E::number_of_columns()]), + columns: VariableList::repeat_full(0), }; RuntimeVariableList::>::new( diff --git a/consensus/types/src/eth_spec.rs b/consensus/types/src/eth_spec.rs index e001cf0e4e9..47d32ad9e4d 100644 --- a/consensus/types/src/eth_spec.rs +++ b/consensus/types/src/eth_spec.rs @@ -310,6 +310,11 @@ pub trait EthSpec: 'static + Default + Sync + Send + Clone + Debug + PartialEq + Self::BytesPerBlob::to_usize() } + /// Returns the `BYTES_PER_CELL` constant for this specification. + fn bytes_per_cell() -> usize { + Self::BytesPerCell::to_usize() + } + /// Returns the `KZG_COMMITMENT_INCLUSION_PROOF_DEPTH` preset for this specification. fn kzg_proof_inclusion_proof_depth() -> usize { Self::KzgCommitmentInclusionProofDepth::to_usize() diff --git a/consensus/types/src/light_client_bootstrap.rs b/consensus/types/src/light_client_bootstrap.rs index 5850db876c2..530ccd88dee 100644 --- a/consensus/types/src/light_client_bootstrap.rs +++ b/consensus/types/src/light_client_bootstrap.rs @@ -151,32 +151,44 @@ impl LightClientBootstrap { ForkName::Altair | ForkName::Bellatrix => Self::Altair(LightClientBootstrapAltair { header: LightClientHeaderAltair::block_to_light_client_header(block)?, current_sync_committee, - current_sync_committee_branch: current_sync_committee_branch.into(), + current_sync_committee_branch: current_sync_committee_branch + .try_into() + .map_err(Error::SszTypesError)?, }), ForkName::Capella => Self::Capella(LightClientBootstrapCapella { header: LightClientHeaderCapella::block_to_light_client_header(block)?, current_sync_committee, - current_sync_committee_branch: current_sync_committee_branch.into(), + current_sync_committee_branch: current_sync_committee_branch + .try_into() + .map_err(Error::SszTypesError)?, }), ForkName::Deneb => Self::Deneb(LightClientBootstrapDeneb { header: LightClientHeaderDeneb::block_to_light_client_header(block)?, current_sync_committee, - current_sync_committee_branch: current_sync_committee_branch.into(), + current_sync_committee_branch: current_sync_committee_branch + .try_into() + .map_err(Error::SszTypesError)?, }), ForkName::Electra => Self::Electra(LightClientBootstrapElectra { header: LightClientHeaderElectra::block_to_light_client_header(block)?, current_sync_committee, - current_sync_committee_branch: current_sync_committee_branch.into(), + current_sync_committee_branch: current_sync_committee_branch + .try_into() + .map_err(Error::SszTypesError)?, }), ForkName::Fulu => Self::Fulu(LightClientBootstrapFulu { header: LightClientHeaderFulu::block_to_light_client_header(block)?, current_sync_committee, - current_sync_committee_branch: current_sync_committee_branch.into(), + current_sync_committee_branch: current_sync_committee_branch + .try_into() + .map_err(Error::SszTypesError)?, }), ForkName::Gloas => Self::Gloas(LightClientBootstrapGloas { header: LightClientHeaderGloas::block_to_light_client_header(block)?, current_sync_committee, - current_sync_committee_branch: current_sync_committee_branch.into(), + current_sync_committee_branch: current_sync_committee_branch + .try_into() + .map_err(Error::SszTypesError)?, }), }; @@ -201,32 +213,44 @@ impl LightClientBootstrap { ForkName::Altair | ForkName::Bellatrix => Self::Altair(LightClientBootstrapAltair { header: LightClientHeaderAltair::block_to_light_client_header(block)?, current_sync_committee, - current_sync_committee_branch: current_sync_committee_branch.into(), + current_sync_committee_branch: current_sync_committee_branch + .try_into() + .map_err(Error::SszTypesError)?, }), ForkName::Capella => Self::Capella(LightClientBootstrapCapella { header: LightClientHeaderCapella::block_to_light_client_header(block)?, current_sync_committee, - current_sync_committee_branch: current_sync_committee_branch.into(), + current_sync_committee_branch: current_sync_committee_branch + .try_into() + .map_err(Error::SszTypesError)?, }), ForkName::Deneb => Self::Deneb(LightClientBootstrapDeneb { header: LightClientHeaderDeneb::block_to_light_client_header(block)?, current_sync_committee, - current_sync_committee_branch: current_sync_committee_branch.into(), + current_sync_committee_branch: current_sync_committee_branch + .try_into() + .map_err(Error::SszTypesError)?, }), ForkName::Electra => Self::Electra(LightClientBootstrapElectra { header: LightClientHeaderElectra::block_to_light_client_header(block)?, current_sync_committee, - current_sync_committee_branch: current_sync_committee_branch.into(), + current_sync_committee_branch: current_sync_committee_branch + .try_into() + .map_err(Error::SszTypesError)?, }), ForkName::Fulu => Self::Fulu(LightClientBootstrapFulu { header: LightClientHeaderFulu::block_to_light_client_header(block)?, current_sync_committee, - current_sync_committee_branch: current_sync_committee_branch.into(), + current_sync_committee_branch: current_sync_committee_branch + .try_into() + .map_err(Error::SszTypesError)?, }), ForkName::Gloas => Self::Gloas(LightClientBootstrapGloas { header: LightClientHeaderGloas::block_to_light_client_header(block)?, current_sync_committee, - current_sync_committee_branch: current_sync_committee_branch.into(), + current_sync_committee_branch: current_sync_committee_branch + .try_into() + .map_err(Error::SszTypesError)?, }), }; diff --git a/consensus/types/src/light_client_finality_update.rs b/consensus/types/src/light_client_finality_update.rs index 4fa98de40be..644824f12c2 100644 --- a/consensus/types/src/light_client_finality_update.rs +++ b/consensus/types/src/light_client_finality_update.rs @@ -116,7 +116,7 @@ impl LightClientFinalityUpdate { finalized_header: LightClientHeaderAltair::block_to_light_client_header( finalized_block, )?, - finality_branch: finality_branch.into(), + finality_branch: finality_branch.try_into().map_err(Error::SszTypesError)?, sync_aggregate, signature_slot, }) @@ -128,7 +128,7 @@ impl LightClientFinalityUpdate { finalized_header: LightClientHeaderCapella::block_to_light_client_header( finalized_block, )?, - finality_branch: finality_branch.into(), + finality_branch: finality_branch.try_into().map_err(Error::SszTypesError)?, sync_aggregate, signature_slot, }), @@ -139,7 +139,7 @@ impl LightClientFinalityUpdate { finalized_header: LightClientHeaderDeneb::block_to_light_client_header( finalized_block, )?, - finality_branch: finality_branch.into(), + finality_branch: finality_branch.try_into().map_err(Error::SszTypesError)?, sync_aggregate, signature_slot, }), @@ -150,7 +150,7 @@ impl LightClientFinalityUpdate { finalized_header: LightClientHeaderElectra::block_to_light_client_header( finalized_block, )?, - finality_branch: finality_branch.into(), + finality_branch: finality_branch.try_into().map_err(Error::SszTypesError)?, sync_aggregate, signature_slot, }), @@ -161,7 +161,7 @@ impl LightClientFinalityUpdate { finalized_header: LightClientHeaderFulu::block_to_light_client_header( finalized_block, )?, - finality_branch: finality_branch.into(), + finality_branch: finality_branch.try_into().map_err(Error::SszTypesError)?, sync_aggregate, signature_slot, }), @@ -172,7 +172,7 @@ impl LightClientFinalityUpdate { finalized_header: LightClientHeaderGloas::block_to_light_client_header( finalized_block, )?, - finality_branch: finality_branch.into(), + finality_branch: finality_branch.try_into().map_err(Error::SszTypesError)?, sync_aggregate, signature_slot, }), diff --git a/consensus/types/src/light_client_update.rs b/consensus/types/src/light_client_update.rs index bf1a8c614a7..afb7ebc96dc 100644 --- a/consensus/types/src/light_client_update.rs +++ b/consensus/types/src/light_client_update.rs @@ -261,9 +261,11 @@ impl LightClientUpdate { Self::Altair(LightClientUpdateAltair { attested_header, next_sync_committee, - next_sync_committee_branch: next_sync_committee_branch.into(), + next_sync_committee_branch: next_sync_committee_branch + .try_into() + .map_err(Error::SszTypesError)?, finalized_header, - finality_branch: finality_branch.into(), + finality_branch: finality_branch.try_into().map_err(Error::SszTypesError)?, sync_aggregate: sync_aggregate.clone(), signature_slot: block_slot, }) @@ -285,9 +287,11 @@ impl LightClientUpdate { Self::Capella(LightClientUpdateCapella { attested_header, next_sync_committee, - next_sync_committee_branch: next_sync_committee_branch.into(), + next_sync_committee_branch: next_sync_committee_branch + .try_into() + .map_err(Error::SszTypesError)?, finalized_header, - finality_branch: finality_branch.into(), + finality_branch: finality_branch.try_into().map_err(Error::SszTypesError)?, sync_aggregate: sync_aggregate.clone(), signature_slot: block_slot, }) @@ -309,9 +313,11 @@ impl LightClientUpdate { Self::Deneb(LightClientUpdateDeneb { attested_header, next_sync_committee, - next_sync_committee_branch: next_sync_committee_branch.into(), + next_sync_committee_branch: next_sync_committee_branch + .try_into() + .map_err(Error::SszTypesError)?, finalized_header, - finality_branch: finality_branch.into(), + finality_branch: finality_branch.try_into().map_err(Error::SszTypesError)?, sync_aggregate: sync_aggregate.clone(), signature_slot: block_slot, }) @@ -333,9 +339,11 @@ impl LightClientUpdate { Self::Electra(LightClientUpdateElectra { attested_header, next_sync_committee, - next_sync_committee_branch: next_sync_committee_branch.into(), + next_sync_committee_branch: next_sync_committee_branch + .try_into() + .map_err(Error::SszTypesError)?, finalized_header, - finality_branch: finality_branch.into(), + finality_branch: finality_branch.try_into().map_err(Error::SszTypesError)?, sync_aggregate: sync_aggregate.clone(), signature_slot: block_slot, }) @@ -357,9 +365,11 @@ impl LightClientUpdate { Self::Fulu(LightClientUpdateFulu { attested_header, next_sync_committee, - next_sync_committee_branch: next_sync_committee_branch.into(), + next_sync_committee_branch: next_sync_committee_branch + .try_into() + .map_err(Error::SszTypesError)?, finalized_header, - finality_branch: finality_branch.into(), + finality_branch: finality_branch.try_into().map_err(Error::SszTypesError)?, sync_aggregate: sync_aggregate.clone(), signature_slot: block_slot, }) @@ -381,9 +391,11 @@ impl LightClientUpdate { Self::Gloas(LightClientUpdateGloas { attested_header, next_sync_committee, - next_sync_committee_branch: next_sync_committee_branch.into(), + next_sync_committee_branch: next_sync_committee_branch + .try_into() + .map_err(Error::SszTypesError)?, finalized_header, - finality_branch: finality_branch.into(), + finality_branch: finality_branch.try_into().map_err(Error::SszTypesError)?, sync_aggregate: sync_aggregate.clone(), signature_slot: block_slot, }) diff --git a/consensus/types/src/test_utils/test_random.rs b/consensus/types/src/test_utils/test_random.rs index 98bb8565dd6..7c8f86e14df 100644 --- a/consensus/types/src/test_utils/test_random.rs +++ b/consensus/types/src/test_utils/test_random.rs @@ -115,7 +115,7 @@ where } } - output.into() + output.try_into().unwrap() } } diff --git a/lcli/src/http_sync.rs b/lcli/src/http_sync.rs index 6f7dcdb5956..dd941cda74e 100644 --- a/lcli/src/http_sync.rs +++ b/lcli/src/http_sync.rs @@ -139,8 +139,8 @@ async fn get_block_from_source( let block_root = block_from_source.canonical_root(); let block_contents = SignedBlockContents { signed_block: Arc::new(block_from_source), - kzg_proofs: kzg_proofs.into(), - blobs: blobs.into(), + kzg_proofs: kzg_proofs.try_into().unwrap(), + blobs: blobs.try_into().unwrap(), }; let publish_block_req = PublishBlockRequest::BlockContents(block_contents); diff --git a/slasher/src/test_utils.rs b/slasher/src/test_utils.rs index 26338a019a2..bbbadac7618 100644 --- a/slasher/src/test_utils.rs +++ b/slasher/src/test_utils.rs @@ -17,7 +17,7 @@ pub fn indexed_att_electra( target_root: u64, ) -> IndexedAttestation { IndexedAttestation::Electra(IndexedAttestationElectra { - attesting_indices: attesting_indices.as_ref().to_vec().into(), + attesting_indices: attesting_indices.as_ref().to_vec().try_into().unwrap(), data: AttestationData { slot: Slot::new(0), index: 0, @@ -42,7 +42,7 @@ pub fn indexed_att( target_root: u64, ) -> IndexedAttestation { IndexedAttestation::Base(IndexedAttestationBase { - attesting_indices: attesting_indices.as_ref().to_vec().into(), + attesting_indices: attesting_indices.as_ref().to_vec().try_into().unwrap(), data: AttestationData { slot: Slot::new(0), index: 0, diff --git a/testing/ef_tests/src/cases/ssz_generic.rs b/testing/ef_tests/src/cases/ssz_generic.rs index 4152711aee7..8742f8a1409 100644 --- a/testing/ef_tests/src/cases/ssz_generic.rs +++ b/testing/ef_tests/src/cases/ssz_generic.rs @@ -318,14 +318,13 @@ where { let s: String = serde::de::Deserialize::deserialize(deserializer)?; let decoded: Vec = hex::decode(&s.as_str()[2..]).map_err(D::Error::custom)?; + let decoded_len = decoded.len(); - if decoded.len() > N::to_usize() { - Err(D::Error::custom(format!( + decoded.try_into().map_err(|_| { + D::Error::custom(format!( "Too many values for list, got: {}, limit: {}", - decoded.len(), + decoded_len, N::to_usize() - ))) - } else { - Ok(decoded.into()) - } + )) + }) } From f4b1bb46b52998342923629142fab93d9eaada7b Mon Sep 17 00:00:00 2001 From: Mac L Date: Tue, 28 Oct 2025 09:49:47 +0400 Subject: [PATCH 04/74] Remove `compare_fields` and import from crates.io (#8189) Use the recently published `compare_fields` and remove it from Lighthouse https://crates.io/crates/compare_fields Co-Authored-By: Mac L --- Cargo.lock | 12 +- Cargo.toml | 5 +- common/compare_fields/Cargo.toml | 14 -- common/compare_fields/src/lib.rs | 197 ---------------------- common/compare_fields_derive/Cargo.toml | 12 -- common/compare_fields_derive/src/lib.rs | 70 -------- consensus/types/Cargo.toml | 1 - consensus/types/src/beacon_state.rs | 1 - consensus/types/src/historical_summary.rs | 2 +- testing/ef_tests/Cargo.toml | 1 - testing/ef_tests/src/cases/rewards.rs | 2 +- 11 files changed, 10 insertions(+), 307 deletions(-) delete mode 100644 common/compare_fields/Cargo.toml delete mode 100644 common/compare_fields/src/lib.rs delete mode 100644 common/compare_fields_derive/Cargo.toml delete mode 100644 common/compare_fields_derive/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index 8cc058b6155..8a282a60b79 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1658,15 +1658,19 @@ dependencies = [ [[package]] name = "compare_fields" -version = "0.2.0" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05162add7c8618791829528194a271dca93f69194d35b19db1ca7fbfb8275278" dependencies = [ "compare_fields_derive", - "itertools 0.10.5", + "itertools 0.14.0", ] [[package]] name = "compare_fields_derive" -version = "0.2.0" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f5ee468b2e568b668e2a686112935e7bbe9a81bf4fa6b9f6fc3410ea45fb7ce" dependencies = [ "quote", "syn 1.0.109", @@ -2539,7 +2543,6 @@ dependencies = [ "beacon_chain", "bls", "compare_fields", - "compare_fields_derive", "context_deserialize", "derivative", "eth2_network_config", @@ -9903,7 +9906,6 @@ dependencies = [ "beacon_chain", "bls", "compare_fields", - "compare_fields_derive", "context_deserialize", "criterion", "derivative", diff --git a/Cargo.toml b/Cargo.toml index 721102bd06e..a9799077695 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -19,8 +19,6 @@ members = [ "boot_node", "common/account_utils", "common/clap_utils", - "common/compare_fields", - "common/compare_fields_derive", "common/deposit_contract", "common/directory", "common/eip_3076", @@ -121,8 +119,7 @@ c-kzg = { version = "2.1", default-features = false } cargo_metadata = "0.19" clap = { version = "4.5.4", features = ["derive", "cargo", "wrap_help"] } clap_utils = { path = "common/clap_utils" } -compare_fields = { path = "common/compare_fields" } -compare_fields_derive = { path = "common/compare_fields_derive" } +compare_fields = "0.1" console-subscriber = "0.4" context_deserialize = { path = "consensus/context_deserialize/context_deserialize", features = [ "all", diff --git a/common/compare_fields/Cargo.toml b/common/compare_fields/Cargo.toml deleted file mode 100644 index 50e7e5f21d2..00000000000 --- a/common/compare_fields/Cargo.toml +++ /dev/null @@ -1,14 +0,0 @@ -[package] -name = "compare_fields" -version = "0.2.0" -authors = ["Paul Hauner "] -edition = { workspace = true } - -[package.metadata.cargo-udeps.ignore] -development = ["compare_fields_derive"] # used in doc-tests - -[dependencies] -itertools = { workspace = true } - -[dev-dependencies] -compare_fields_derive = { workspace = true } diff --git a/common/compare_fields/src/lib.rs b/common/compare_fields/src/lib.rs deleted file mode 100644 index 27baf148067..00000000000 --- a/common/compare_fields/src/lib.rs +++ /dev/null @@ -1,197 +0,0 @@ -//! Provides field-by-field comparisons for structs and vecs. -//! -//! Returns comparisons as data, without making assumptions about the desired equality (e.g., -//! does not `panic!` on inequality). -//! -//! Note: `compare_fields_derive` requires `PartialEq` and `Debug` implementations. -//! -//! ## Example -//! -//! ```rust -//! use compare_fields::{CompareFields, Comparison, FieldComparison}; -//! use compare_fields_derive::CompareFields; -//! -//! #[derive(PartialEq, Debug, CompareFields)] -//! pub struct Bar { -//! a: u64, -//! b: u16, -//! #[compare_fields(as_slice)] -//! c: Vec -//! } -//! -//! #[derive(Clone, PartialEq, Debug, CompareFields)] -//! pub struct Foo { -//! d: String -//! } -//! -//! let cat = Foo {d: "cat".to_string()}; -//! let dog = Foo {d: "dog".to_string()}; -//! let chicken = Foo {d: "chicken".to_string()}; -//! -//! let mut bar_a = Bar { -//! a: 42, -//! b: 12, -//! c: vec![ cat.clone(), dog.clone() ], -//! }; -//! -//! let mut bar_b = Bar { -//! a: 42, -//! b: 99, -//! c: vec![ chicken.clone(), dog.clone()] -//! }; -//! -//! let cat_dog = Comparison::Child(FieldComparison { -//! field_name: "d".to_string(), -//! equal: false, -//! a: "\"cat\"".to_string(), -//! b: "\"dog\"".to_string(), -//! }); -//! assert_eq!(cat.compare_fields(&dog), vec![cat_dog]); -//! -//! let bar_a_b = vec![ -//! Comparison::Child(FieldComparison { -//! field_name: "a".to_string(), -//! equal: true, -//! a: "42".to_string(), -//! b: "42".to_string(), -//! }), -//! Comparison::Child(FieldComparison { -//! field_name: "b".to_string(), -//! equal: false, -//! a: "12".to_string(), -//! b: "99".to_string(), -//! }), -//! Comparison::Parent{ -//! field_name: "c".to_string(), -//! equal: false, -//! children: vec![ -//! FieldComparison { -//! field_name: "0".to_string(), -//! equal: false, -//! a: "Some(Foo { d: \"cat\" })".to_string(), -//! b: "Some(Foo { d: \"chicken\" })".to_string(), -//! }, -//! FieldComparison { -//! field_name: "1".to_string(), -//! equal: true, -//! a: "Some(Foo { d: \"dog\" })".to_string(), -//! b: "Some(Foo { d: \"dog\" })".to_string(), -//! } -//! ] -//! } -//! ]; -//! assert_eq!(bar_a.compare_fields(&bar_b), bar_a_b); -//! ``` -use itertools::{EitherOrBoth, Itertools}; -use std::fmt::Debug; - -#[derive(Debug, PartialEq, Clone)] -pub enum Comparison { - Child(FieldComparison), - Parent { - field_name: String, - equal: bool, - children: Vec, - }, -} - -impl Comparison { - pub fn child>(field_name: String, a: &T, b: &T) -> Self { - Comparison::Child(FieldComparison::new(field_name, a, b)) - } - - pub fn parent(field_name: String, equal: bool, children: Vec) -> Self { - Comparison::Parent { - field_name, - equal, - children, - } - } - - pub fn from_slice>(field_name: String, a: &[T], b: &[T]) -> Self { - Self::from_iter(field_name, a.iter(), b.iter()) - } - - pub fn from_into_iter<'a, T: Debug + PartialEq + 'a>( - field_name: String, - a: impl IntoIterator, - b: impl IntoIterator, - ) -> Self { - Self::from_iter(field_name, a.into_iter(), b.into_iter()) - } - - pub fn from_iter<'a, T: Debug + PartialEq + 'a>( - field_name: String, - a: impl Iterator, - b: impl Iterator, - ) -> Self { - let mut children = vec![]; - let mut all_equal = true; - - for (i, entry) in a.zip_longest(b).enumerate() { - let comparison = match entry { - EitherOrBoth::Both(x, y) => { - FieldComparison::new(format!("{i}"), &Some(x), &Some(y)) - } - EitherOrBoth::Left(x) => FieldComparison::new(format!("{i}"), &Some(x), &None), - EitherOrBoth::Right(y) => FieldComparison::new(format!("{i}"), &None, &Some(y)), - }; - all_equal = all_equal && comparison.equal(); - children.push(comparison); - } - - Self::parent(field_name, all_equal, children) - } - - pub fn retain_children(&mut self, f: F) - where - F: FnMut(&FieldComparison) -> bool, - { - match self { - Comparison::Child(_) => (), - Comparison::Parent { children, .. } => children.retain(f), - } - } - - pub fn equal(&self) -> bool { - match self { - Comparison::Child(fc) => fc.equal, - Comparison::Parent { equal, .. } => *equal, - } - } - - pub fn not_equal(&self) -> bool { - !self.equal() - } -} - -#[derive(Debug, PartialEq, Clone)] -pub struct FieldComparison { - pub field_name: String, - pub equal: bool, - pub a: String, - pub b: String, -} - -pub trait CompareFields { - fn compare_fields(&self, b: &Self) -> Vec; -} - -impl FieldComparison { - pub fn new>(field_name: String, a: &T, b: &T) -> Self { - Self { - field_name, - equal: a == b, - a: format!("{a:?}"), - b: format!("{b:?}"), - } - } - - pub fn equal(&self) -> bool { - self.equal - } - - pub fn not_equal(&self) -> bool { - !self.equal() - } -} diff --git a/common/compare_fields_derive/Cargo.toml b/common/compare_fields_derive/Cargo.toml deleted file mode 100644 index 19682bf3673..00000000000 --- a/common/compare_fields_derive/Cargo.toml +++ /dev/null @@ -1,12 +0,0 @@ -[package] -name = "compare_fields_derive" -version = "0.2.0" -authors = ["Paul Hauner "] -edition = { workspace = true } - -[lib] -proc-macro = true - -[dependencies] -quote = { workspace = true } -syn = { workspace = true } diff --git a/common/compare_fields_derive/src/lib.rs b/common/compare_fields_derive/src/lib.rs deleted file mode 100644 index 35299707214..00000000000 --- a/common/compare_fields_derive/src/lib.rs +++ /dev/null @@ -1,70 +0,0 @@ -use proc_macro::TokenStream; -use quote::quote; -use syn::{DeriveInput, parse_macro_input}; - -fn is_iter(field: &syn::Field) -> bool { - field.attrs.iter().any(|attr| { - attr.path.is_ident("compare_fields") - && (attr.tokens.to_string().replace(' ', "") == "(as_slice)" - || attr.tokens.to_string().replace(' ', "") == "(as_iter)") - }) -} - -#[proc_macro_derive(CompareFields, attributes(compare_fields))] -pub fn compare_fields_derive(input: TokenStream) -> TokenStream { - let item = parse_macro_input!(input as DeriveInput); - - let name = &item.ident; - let (impl_generics, ty_generics, where_clause) = &item.generics.split_for_impl(); - - let syn::Data::Struct(struct_data) = &item.data else { - panic!("compare_fields_derive only supports structs."); - }; - - let mut quotes = vec![]; - - for field in struct_data.fields.iter() { - let Some(ident_a) = &field.ident else { - panic!("compare_fields_derive only supports named struct fields."); - }; - let field_name = ident_a.to_string(); - let ident_b = ident_a.clone(); - - let quote = if is_iter(field) { - quote! { - comparisons.push(compare_fields::Comparison::from_into_iter( - #field_name.to_string(), - &self.#ident_a, - &b.#ident_b - )); - } - } else { - quote! { - comparisons.push( - compare_fields::Comparison::child( - #field_name.to_string(), - &self.#ident_a, - &b.#ident_b - ) - ); - } - }; - - quotes.push(quote); - } - - let output = quote! { - impl #impl_generics compare_fields::CompareFields for #name #ty_generics #where_clause { - fn compare_fields(&self, b: &Self) -> Vec { - let mut comparisons = vec![]; - - #( - #quotes - )* - - comparisons - } - } - }; - output.into() -} diff --git a/consensus/types/Cargo.toml b/consensus/types/Cargo.toml index bfce4b72d22..d9b2f101987 100644 --- a/consensus/types/Cargo.toml +++ b/consensus/types/Cargo.toml @@ -29,7 +29,6 @@ alloy-rlp = { version = "0.3.4", features = ["derive"] } arbitrary = { workspace = true, features = ["derive"], optional = true } bls = { workspace = true } compare_fields = { workspace = true } -compare_fields_derive = { workspace = true } context_deserialize = { workspace = true } derivative = { workspace = true } eth2_interop_keypairs = { path = "../../common/eth2_interop_keypairs" } diff --git a/consensus/types/src/beacon_state.rs b/consensus/types/src/beacon_state.rs index 1bd4927fe87..2b03fda3035 100644 --- a/consensus/types/src/beacon_state.rs +++ b/consensus/types/src/beacon_state.rs @@ -5,7 +5,6 @@ use crate::historical_summary::HistoricalSummary; use crate::test_utils::TestRandom; use crate::*; use compare_fields::CompareFields; -use compare_fields_derive::CompareFields; use derivative::Derivative; use ethereum_hashing::hash; use int_to_bytes::{int_to_bytes4, int_to_bytes8}; diff --git a/consensus/types/src/historical_summary.rs b/consensus/types/src/historical_summary.rs index 0aad2d903d7..dc147ad0428 100644 --- a/consensus/types/src/historical_summary.rs +++ b/consensus/types/src/historical_summary.rs @@ -1,7 +1,7 @@ use crate::context_deserialize; use crate::test_utils::TestRandom; use crate::{BeaconState, EthSpec, ForkName, Hash256}; -use compare_fields_derive::CompareFields; +use compare_fields::CompareFields; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; diff --git a/testing/ef_tests/Cargo.toml b/testing/ef_tests/Cargo.toml index 50007f91602..d9afce0efe7 100644 --- a/testing/ef_tests/Cargo.toml +++ b/testing/ef_tests/Cargo.toml @@ -16,7 +16,6 @@ alloy-primitives = { workspace = true } beacon_chain = { workspace = true } bls = { workspace = true } compare_fields = { workspace = true } -compare_fields_derive = { workspace = true } context_deserialize = { workspace = true } derivative = { workspace = true } eth2_network_config = { workspace = true } diff --git a/testing/ef_tests/src/cases/rewards.rs b/testing/ef_tests/src/cases/rewards.rs index d6ce8be7428..798014a6b06 100644 --- a/testing/ef_tests/src/cases/rewards.rs +++ b/testing/ef_tests/src/cases/rewards.rs @@ -1,7 +1,7 @@ use super::*; use crate::case_result::compare_result_detailed; use crate::decode::{ssz_decode_file, ssz_decode_state, yaml_decode_file}; -use compare_fields_derive::CompareFields; +use compare_fields::CompareFields; use serde::Deserialize; use ssz::four_byte_option_impl; use ssz_derive::{Decode, Encode}; From 341eeeabe36f9501cfe85ee4a2eedd44ac46612a Mon Sep 17 00:00:00 2001 From: hopinheimer <48147533+hopinheimer@users.noreply.github.com> Date: Tue, 28 Oct 2025 12:32:02 +0530 Subject: [PATCH 05/74] Extracting the Error impl from the monolith `eth2` (#7878) Currently the `eth2` crate lib file is a large monolith of almost 3000 lines of code. As part of the bosun migration we are trying to increase code readability and modularity in the lighthouse crates initially, which then can be transferred to bosun Co-Authored-By: hopinheimer Co-Authored-By: hopinheimer <48147533+hopinheimer@users.noreply.github.com> --- common/eth2/src/error.rs | 165 +++++++++++++++++++ common/eth2/src/lib.rs | 117 +------------ common/eth2/src/lighthouse_vc/http_client.rs | 29 +--- common/eth2/src/types.rs | 42 +---- 4 files changed, 176 insertions(+), 177 deletions(-) create mode 100644 common/eth2/src/error.rs diff --git a/common/eth2/src/error.rs b/common/eth2/src/error.rs new file mode 100644 index 00000000000..c1bacb4510b --- /dev/null +++ b/common/eth2/src/error.rs @@ -0,0 +1,165 @@ +//! Centralized error handling for eth2 API clients +//! +//! This module consolidates all error types, response processing, +//! and recovery logic for both beacon node and validator client APIs. + +use pretty_reqwest_error::PrettyReqwestError; +use reqwest::{Response, StatusCode}; +use sensitive_url::SensitiveUrl; +use serde::{Deserialize, Serialize}; +use std::{fmt, path::PathBuf}; + +/// Main error type for eth2 API clients +#[derive(Debug)] +pub enum Error { + /// The `reqwest` client raised an error. + HttpClient(PrettyReqwestError), + /// The `reqwest_eventsource` client raised an error. + SseClient(Box), + /// The server returned an error message where the body was able to be parsed. + ServerMessage(ErrorMessage), + /// The server returned an error message with an array of errors. + ServerIndexedMessage(IndexedErrorMessage), + /// The server returned an error message where the body was unable to be parsed. + StatusCode(StatusCode), + /// The supplied URL is badly formatted. It should look something like `http://127.0.0.1:5052`. + InvalidUrl(SensitiveUrl), + /// The supplied validator client secret is invalid. + InvalidSecret(String), + /// The server returned a response with an invalid signature. It may be an impostor. + InvalidSignatureHeader, + /// The server returned a response without a signature header. It may be an impostor. + MissingSignatureHeader, + /// The server returned an invalid JSON response. + InvalidJson(serde_json::Error), + /// The server returned an invalid server-sent event. + InvalidServerSentEvent(String), + /// The server sent invalid response headers. + InvalidHeaders(String), + /// The server returned an invalid SSZ response. + InvalidSsz(ssz::DecodeError), + /// An I/O error occurred while loading an API token from disk. + TokenReadError(PathBuf, std::io::Error), + /// The client has been configured without a server pubkey, but requires one for this request. + NoServerPubkey, + /// The client has been configured without an API token, but requires one for this request. + NoToken, +} + +/// An API error serializable to JSON. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct ErrorMessage { + pub code: u16, + pub message: String, + #[serde(default)] + pub stacktraces: Vec, +} + +/// An indexed API error serializable to JSON. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct IndexedErrorMessage { + pub code: u16, + pub message: String, + pub failures: Vec, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct Failure { + pub index: u64, + pub message: String, +} + +impl Failure { + pub fn new(index: usize, message: String) -> Self { + Self { + index: index as u64, + message, + } + } +} + +/// Server error response variants +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(untagged)] +pub enum ResponseError { + Indexed(IndexedErrorMessage), + Message(ErrorMessage), +} + +impl Error { + /// If the error has a HTTP status code, return it. + pub fn status(&self) -> Option { + match self { + Error::HttpClient(error) => error.inner().status(), + Error::SseClient(error) => { + if let reqwest_eventsource::Error::InvalidStatusCode(status, _) = error.as_ref() { + Some(*status) + } else { + None + } + } + Error::ServerMessage(msg) => StatusCode::try_from(msg.code).ok(), + Error::ServerIndexedMessage(msg) => StatusCode::try_from(msg.code).ok(), + Error::StatusCode(status) => Some(*status), + Error::InvalidUrl(_) => None, + Error::InvalidSecret(_) => None, + Error::InvalidSignatureHeader => None, + Error::MissingSignatureHeader => None, + Error::InvalidJson(_) => None, + Error::InvalidSsz(_) => None, + Error::InvalidServerSentEvent(_) => None, + Error::InvalidHeaders(_) => None, + Error::TokenReadError(..) => None, + Error::NoServerPubkey | Error::NoToken => None, + } + } +} + +impl From for Error { + fn from(error: reqwest::Error) -> Self { + Error::HttpClient(error.into()) + } +} + +impl fmt::Display for Error { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{:?}", self) + } +} + +/// Returns `Ok(response)` if the response is a `200 OK`, `202 ACCEPTED`, or `204 NO_CONTENT` +/// Otherwise, creates an appropriate error message. +pub async fn ok_or_error(response: Response) -> Result { + let status = response.status(); + + if status == StatusCode::OK + || status == StatusCode::ACCEPTED + || status == StatusCode::NO_CONTENT + { + Ok(response) + } else if let Ok(message) = response.json::().await { + match message { + ResponseError::Message(message) => Err(Error::ServerMessage(message)), + ResponseError::Indexed(indexed) => Err(Error::ServerIndexedMessage(indexed)), + } + } else { + Err(Error::StatusCode(status)) + } +} + +/// Returns `Ok(response)` if the response is a success (2xx) response. Otherwise, creates an +/// appropriate error message. +pub async fn success_or_error(response: Response) -> Result { + let status = response.status(); + + if status.is_success() { + Ok(response) + } else if let Ok(message) = response.json().await { + match message { + ResponseError::Message(message) => Err(Error::ServerMessage(message)), + ResponseError::Indexed(indexed) => Err(Error::ServerIndexedMessage(indexed)), + } + } else { + Err(Error::StatusCode(status)) + } +} diff --git a/common/eth2/src/lib.rs b/common/eth2/src/lib.rs index 995e6966eae..a9dd752df03 100644 --- a/common/eth2/src/lib.rs +++ b/common/eth2/src/lib.rs @@ -7,6 +7,7 @@ //! Eventually it would be ideal to publish this crate on crates.io, however we have some local //! dependencies preventing this presently. +pub mod error; #[cfg(feature = "lighthouse")] pub mod lighthouse; #[cfg(feature = "lighthouse")] @@ -14,14 +15,14 @@ pub mod lighthouse_vc; pub mod mixin; pub mod types; +pub use self::error::{Error, ok_or_error, success_or_error}; use self::mixin::{RequestAccept, ResponseOptional}; -use self::types::{Error as ResponseError, *}; +use self::types::*; use ::types::beacon_response::ExecutionOptimisticFinalizedBeaconResponse; use derivative::Derivative; use futures::Stream; use futures_util::StreamExt; use libp2p_identity::PeerId; -use pretty_reqwest_error::PrettyReqwestError; pub use reqwest; use reqwest::{ Body, IntoUrl, RequestBuilder, Response, @@ -34,7 +35,6 @@ use serde::{Serialize, de::DeserializeOwned}; use ssz::Encode; use std::fmt; use std::future::Future; -use std::path::PathBuf; use std::time::Duration; pub const V1: EndpointVersion = EndpointVersion(1); @@ -68,83 +68,6 @@ const HTTP_GET_DEPOSIT_SNAPSHOT_QUOTIENT: u32 = 4; const HTTP_GET_VALIDATOR_BLOCK_TIMEOUT_QUOTIENT: u32 = 4; const HTTP_DEFAULT_TIMEOUT_QUOTIENT: u32 = 4; -#[derive(Debug)] -pub enum Error { - /// The `reqwest` client raised an error. - HttpClient(PrettyReqwestError), - /// The `reqwest_eventsource` client raised an error. - SseClient(Box), - /// The server returned an error message where the body was able to be parsed. - ServerMessage(ErrorMessage), - /// The server returned an error message with an array of errors. - ServerIndexedMessage(IndexedErrorMessage), - /// The server returned an error message where the body was unable to be parsed. - StatusCode(StatusCode), - /// The supplied URL is badly formatted. It should look something like `http://127.0.0.1:5052`. - InvalidUrl(SensitiveUrl), - /// The supplied validator client secret is invalid. - InvalidSecret(String), - /// The server returned a response with an invalid signature. It may be an impostor. - InvalidSignatureHeader, - /// The server returned a response without a signature header. It may be an impostor. - MissingSignatureHeader, - /// The server returned an invalid JSON response. - InvalidJson(serde_json::Error), - /// The server returned an invalid server-sent event. - InvalidServerSentEvent(String), - /// The server sent invalid response headers. - InvalidHeaders(String), - /// The server returned an invalid SSZ response. - InvalidSsz(ssz::DecodeError), - /// An I/O error occurred while loading an API token from disk. - TokenReadError(PathBuf, std::io::Error), - /// The client has been configured without a server pubkey, but requires one for this request. - NoServerPubkey, - /// The client has been configured without an API token, but requires one for this request. - NoToken, -} - -impl From for Error { - fn from(error: reqwest::Error) -> Self { - Error::HttpClient(error.into()) - } -} - -impl Error { - /// If the error has a HTTP status code, return it. - pub fn status(&self) -> Option { - match self { - Error::HttpClient(error) => error.inner().status(), - Error::SseClient(error) => { - if let reqwest_eventsource::Error::InvalidStatusCode(status, _) = error.as_ref() { - Some(*status) - } else { - None - } - } - Error::ServerMessage(msg) => StatusCode::try_from(msg.code).ok(), - Error::ServerIndexedMessage(msg) => StatusCode::try_from(msg.code).ok(), - Error::StatusCode(status) => Some(*status), - Error::InvalidUrl(_) => None, - Error::InvalidSecret(_) => None, - Error::InvalidSignatureHeader => None, - Error::MissingSignatureHeader => None, - Error::InvalidJson(_) => None, - Error::InvalidSsz(_) => None, - Error::InvalidServerSentEvent(_) => None, - Error::InvalidHeaders(_) => None, - Error::TokenReadError(..) => None, - Error::NoServerPubkey | Error::NoToken => None, - } - } -} - -impl fmt::Display for Error { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{:?}", self) - } -} - /// A struct to define a variety of different timeouts for different validator tasks to ensure /// proper fallback behaviour. #[derive(Clone, Debug, PartialEq, Eq)] @@ -2928,37 +2851,3 @@ impl BeaconNodeHttpClient { .await } } - -/// Returns `Ok(response)` if the response is a `200 OK` response. Otherwise, creates an -/// appropriate error message. -pub async fn ok_or_error(response: Response) -> Result { - let status = response.status(); - - if status == StatusCode::OK { - Ok(response) - } else if let Ok(message) = response.json().await { - match message { - ResponseError::Message(message) => Err(Error::ServerMessage(message)), - ResponseError::Indexed(indexed) => Err(Error::ServerIndexedMessage(indexed)), - } - } else { - Err(Error::StatusCode(status)) - } -} - -/// Returns `Ok(response)` if the response is a success (2xx) response. Otherwise, creates an -/// appropriate error message. -pub async fn success_or_error(response: Response) -> Result { - let status = response.status(); - - if status.is_success() { - Ok(response) - } else if let Ok(message) = response.json().await { - match message { - ResponseError::Message(message) => Err(Error::ServerMessage(message)), - ResponseError::Indexed(indexed) => Err(Error::ServerIndexedMessage(indexed)), - } - } else { - Err(Error::StatusCode(status)) - } -} diff --git a/common/eth2/src/lighthouse_vc/http_client.rs b/common/eth2/src/lighthouse_vc/http_client.rs index 60289605531..c4fddb97d7a 100644 --- a/common/eth2/src/lighthouse_vc/http_client.rs +++ b/common/eth2/src/lighthouse_vc/http_client.rs @@ -1,5 +1,5 @@ use super::types::*; -use crate::Error; +use crate::{Error, success_or_error}; use reqwest::{ IntoUrl, header::{HeaderMap, HeaderValue}, @@ -145,7 +145,7 @@ impl ValidatorClientHttpClient { .send() .await .map_err(Error::from)?; - ok_or_error(response).await + success_or_error(response).await } /// Perform a HTTP DELETE request, returning the `Response` for further processing. @@ -157,7 +157,7 @@ impl ValidatorClientHttpClient { .send() .await .map_err(Error::from)?; - ok_or_error(response).await + success_or_error(response).await } async fn get(&self, url: U) -> Result { @@ -218,7 +218,7 @@ impl ValidatorClientHttpClient { .send() .await .map_err(Error::from)?; - ok_or_error(response).await + success_or_error(response).await } async fn post( @@ -250,7 +250,7 @@ impl ValidatorClientHttpClient { .send() .await .map_err(Error::from)?; - ok_or_error(response).await?; + success_or_error(response).await?; Ok(()) } @@ -268,7 +268,7 @@ impl ValidatorClientHttpClient { .send() .await .map_err(Error::from)?; - ok_or_error(response).await + success_or_error(response).await } /// Perform a HTTP DELETE request. @@ -681,20 +681,3 @@ impl ValidatorClientHttpClient { self.delete(url).await } } - -/// Returns `Ok(response)` if the response is a `200 OK` response or a -/// `202 Accepted` response. Otherwise, creates an appropriate error message. -async fn ok_or_error(response: Response) -> Result { - let status = response.status(); - - if status == StatusCode::OK - || status == StatusCode::ACCEPTED - || status == StatusCode::NO_CONTENT - { - Ok(response) - } else if let Ok(message) = response.json().await { - Err(Error::ServerMessage(message)) - } else { - Err(Error::StatusCode(status)) - } -} diff --git a/common/eth2/src/types.rs b/common/eth2/src/types.rs index 60bc0804e45..a90fe6d058c 100644 --- a/common/eth2/src/types.rs +++ b/common/eth2/src/types.rs @@ -26,46 +26,8 @@ pub use types::*; #[cfg(feature = "lighthouse")] use crate::lighthouse::BlockReward; -/// An API error serializable to JSON. -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(untagged)] -pub enum Error { - Indexed(IndexedErrorMessage), - Message(ErrorMessage), -} - -/// An API error serializable to JSON. -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -pub struct ErrorMessage { - pub code: u16, - pub message: String, - #[serde(default)] - pub stacktraces: Vec, -} - -/// An indexed API error serializable to JSON. -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -pub struct IndexedErrorMessage { - pub code: u16, - pub message: String, - pub failures: Vec, -} - -/// A single failure in an index of API errors, serializable to JSON. -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -pub struct Failure { - pub index: u64, - pub message: String, -} - -impl Failure { - pub fn new(index: usize, message: String) -> Self { - Self { - index: index as u64, - message, - } - } -} +// Re-export error types from the unified error module +pub use crate::error::{ErrorMessage, Failure, IndexedErrorMessage, ResponseError as Error}; /// The version of a single API endpoint, e.g. the `v1` in `/eth/v1/beacon/blocks`. #[derive(Debug, Clone, Copy, PartialEq)] From b69c2f5ba1c54760e24d3d18cc8406c9d531ba81 Mon Sep 17 00:00:00 2001 From: Pawan Dhananjay Date: Wed, 29 Oct 2025 00:00:25 -0700 Subject: [PATCH 06/74] Run CI tests only recent forks (#8271) Partially addresses #8248 Run the beacon chain, http and network tests only for recent forks instead of everything from phase 0. Also added gloas also to the recent forks list. I thought that would be a good way to know if changes in the current fork affect future forks. Not completely sure if we should run for future forks, but added it so that we can discuss here. Co-Authored-By: Pawan Dhananjay Co-Authored-By: Jimmy Chen --- Makefile | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/Makefile b/Makefile index b9f93942f6f..2edc9f86328 100644 --- a/Makefile +++ b/Makefile @@ -30,12 +30,8 @@ TEST_FEATURES ?= # Cargo profile for regular builds. PROFILE ?= release -# List of all hard forks. This list is used to set env variables for several tests so that -# they run for different forks. -FORKS=phase0 altair bellatrix capella deneb electra fulu gloas - # List of all recent hard forks. This list is used to set env variables for http_api tests -RECENT_FORKS=electra fulu +RECENT_FORKS=electra fulu gloas # Extra flags for Cargo CARGO_INSTALL_EXTRA_FLAGS?= @@ -170,8 +166,8 @@ run-ef-tests: cargo nextest run --release -p ef_tests --features "ef_tests,$(EF_TEST_FEATURES),fake_crypto" ./$(EF_TESTS)/check_all_files_accessed.py $(EF_TESTS)/.accessed_file_log.txt $(EF_TESTS)/consensus-spec-tests -# Run the tests in the `beacon_chain` crate for all known forks. -test-beacon-chain: $(patsubst %,test-beacon-chain-%,$(FORKS)) +# Run the tests in the `beacon_chain` crate for recent forks. +test-beacon-chain: $(patsubst %,test-beacon-chain-%,$(RECENT_FORKS)) test-beacon-chain-%: env FORK_NAME=$* cargo nextest run --release --features "fork_from_env,slasher/lmdb,$(TEST_FEATURES)" -p beacon_chain @@ -184,15 +180,15 @@ test-http-api-%: # Run the tests in the `operation_pool` crate for all known forks. -test-op-pool: $(patsubst %,test-op-pool-%,$(FORKS)) +test-op-pool: $(patsubst %,test-op-pool-%,$(RECENT_FORKS)) test-op-pool-%: env FORK_NAME=$* cargo nextest run --release \ --features "beacon_chain/fork_from_env,$(TEST_FEATURES)"\ -p operation_pool -# Run the tests in the `network` crate for all known forks. -test-network: $(patsubst %,test-network-%,$(FORKS)) +# Run the tests in the `network` crate for recent forks. +test-network: $(patsubst %,test-network-%,$(RECENT_FORKS)) test-network-%: env FORK_NAME=$* cargo nextest run --release \ From f70c650d814dab9df44da41cc2c150d9f039e2e8 Mon Sep 17 00:00:00 2001 From: Michael Sproul Date: Wed, 29 Oct 2025 19:21:23 +1100 Subject: [PATCH 07/74] Update spec tests to v1.6.0-beta.1 (#8263) Update the EF spec tests to v1.6.0-beta.1 There are a few new light client tests (which we pass), and some for progressive containers, which we haven't implemented (we ignore them). Co-Authored-By: Michael Sproul --- testing/ef_tests/Makefile | 2 +- testing/ef_tests/check_all_files_accessed.py | 2 ++ testing/ef_tests/download_test_vectors.sh | 2 +- testing/ef_tests/src/handler.rs | 3 +-- 4 files changed, 5 insertions(+), 4 deletions(-) diff --git a/testing/ef_tests/Makefile b/testing/ef_tests/Makefile index da8640d681a..0ead9d00472 100644 --- a/testing/ef_tests/Makefile +++ b/testing/ef_tests/Makefile @@ -1,6 +1,6 @@ # To download/extract nightly tests, run: # CONSENSUS_SPECS_TEST_VERSION=nightly make -CONSENSUS_SPECS_TEST_VERSION ?= v1.6.0-alpha.6 +CONSENSUS_SPECS_TEST_VERSION ?= v1.6.0-beta.1 REPO_NAME := consensus-spec-tests OUTPUT_DIR := ./$(REPO_NAME) diff --git a/testing/ef_tests/check_all_files_accessed.py b/testing/ef_tests/check_all_files_accessed.py index 41e3c4bff70..1f70881a887 100755 --- a/testing/ef_tests/check_all_files_accessed.py +++ b/testing/ef_tests/check_all_files_accessed.py @@ -54,6 +54,8 @@ "tests/general/phase0/ssz_generic/basic_progressive_list", "tests/general/phase0/ssz_generic/containers/.*/ProgressiveBitsStruct.*", "tests/general/phase0/ssz_generic/containers/.*/ProgressiveTestStruct.*", + "tests/general/phase0/ssz_generic/progressive_containers/.*", + "tests/general/phase0/ssz_generic/compatible_unions/.*", # Ignore full epoch tests for now (just test the sub-transitions). "tests/.*/.*/epoch_processing/.*/pre_epoch.ssz_snappy", "tests/.*/.*/epoch_processing/.*/post_epoch.ssz_snappy", diff --git a/testing/ef_tests/download_test_vectors.sh b/testing/ef_tests/download_test_vectors.sh index 7297f7eeb85..21f74e817f5 100755 --- a/testing/ef_tests/download_test_vectors.sh +++ b/testing/ef_tests/download_test_vectors.sh @@ -57,7 +57,7 @@ else if [[ ! -e "${test}.tar.gz" ]]; then echo "Downloading: ${version}/${test}.tar.gz" curl --progress-bar --location --remote-name --show-error --retry 3 --retry-all-errors --fail \ - "https://github.com/ethereum/consensus-spec-tests/releases/download/${version}/${test}.tar.gz" \ + "https://github.com/ethereum/consensus-specs/releases/download/${version}/${test}.tar.gz" \ || { echo "Curl failed. Aborting" rm -f "${test}.tar.gz" diff --git a/testing/ef_tests/src/handler.rs b/testing/ef_tests/src/handler.rs index c31a75c3352..b49ab2d90d4 100644 --- a/testing/ef_tests/src/handler.rs +++ b/testing/ef_tests/src/handler.rs @@ -1083,8 +1083,7 @@ impl Handler for LightClientUpdateHandler { fn is_enabled_for_fork(&self, fork_name: ForkName) -> bool { // Enabled in Altair - // No test in Fulu yet. - fork_name.altair_enabled() && fork_name != ForkName::Fulu + fork_name.altair_enabled() } } From 30094f0c08c451087935ab932d1ac64b635085e7 Mon Sep 17 00:00:00 2001 From: Jimmy Chen Date: Thu, 30 Oct 2025 14:42:36 +1100 Subject: [PATCH 08/74] Remove redundant `subscribe_all_data_column_subnets` field from network (#8259) Addresses this comment: https://github.com/sigp/lighthouse/pull/8254#discussion_r2447998786 We're currently using `subscribe_all_data_column_subnets` here to subscribe to all subnets https://github.com/sigp/lighthouse/blob/522bd9e9c6ac167f2231525e937c9ebbcb86cf6e/beacon_node/lighthouse_network/src/types/topics.rs#L82-L92 But its unnecessary because the else path also works for supernode (uses `sampling_subnets` instead) The big diffs will disappear once #8254 is merged. Co-Authored-By: Jimmy Chen --- beacon_node/lighthouse_network/src/config.rs | 4 -- .../lighthouse_network/src/discovery/enr.rs | 58 +++---------------- .../lighthouse_network/src/discovery/mod.rs | 5 +- .../lighthouse_network/src/service/mod.rs | 2 +- .../lighthouse_network/src/types/globals.rs | 1 - .../lighthouse_network/src/types/topics.rs | 18 ++---- beacon_node/src/config.rs | 1 - lcli/src/generate_bootnode_enr.rs | 2 +- 8 files changed, 17 insertions(+), 74 deletions(-) diff --git a/beacon_node/lighthouse_network/src/config.rs b/beacon_node/lighthouse_network/src/config.rs index 89c6c58d4f6..416ca73e08e 100644 --- a/beacon_node/lighthouse_network/src/config.rs +++ b/beacon_node/lighthouse_network/src/config.rs @@ -94,9 +94,6 @@ pub struct Config { /// Attempt to construct external port mappings with UPnP. pub upnp_enabled: bool, - /// Subscribe to all data column subnets for the duration of the runtime. - pub subscribe_all_data_column_subnets: bool, - /// Subscribe to all subnets for the duration of the runtime. pub subscribe_all_subnets: bool, @@ -355,7 +352,6 @@ impl Default for Config { upnp_enabled: true, network_load: 3, private: false, - subscribe_all_data_column_subnets: false, subscribe_all_subnets: false, import_all_attestations: false, shutdown_after_sync: false, diff --git a/beacon_node/lighthouse_network/src/discovery/enr.rs b/beacon_node/lighthouse_network/src/discovery/enr.rs index bb9ff299c5d..4c285ea86c8 100644 --- a/beacon_node/lighthouse_network/src/discovery/enr.rs +++ b/beacon_node/lighthouse_network/src/discovery/enr.rs @@ -159,7 +159,7 @@ pub fn build_or_load_enr( local_key: Keypair, config: &NetworkConfig, enr_fork_id: &EnrForkId, - custody_group_count: Option, + custody_group_count: u64, next_fork_digest: [u8; 4], spec: &ChainSpec, ) -> Result { @@ -185,7 +185,7 @@ pub fn build_enr( enr_key: &CombinedKey, config: &NetworkConfig, enr_fork_id: &EnrForkId, - custody_group_count: Option, + custody_group_count: u64, next_fork_digest: [u8; 4], spec: &ChainSpec, ) -> Result { @@ -280,15 +280,6 @@ pub fn build_enr( // only set `cgc` and `nfd` if PeerDAS fork (Fulu) epoch has been scheduled if spec.is_peer_das_scheduled() { - let custody_group_count = if let Some(cgc) = custody_group_count { - cgc - } else if let Some(false_cgc) = config.advertise_false_custody_group_count { - false_cgc - } else if config.subscribe_all_data_column_subnets { - spec.number_of_custody_groups - } else { - spec.custody_requirement - }; builder.add_value(PEERDAS_CUSTODY_GROUP_COUNT_ENR_KEY, &custody_group_count); builder.add_value(NEXT_FORK_DIGEST_ENR_KEY, &next_fork_digest); } @@ -373,7 +364,7 @@ mod test { fn build_enr_with_config( config: NetworkConfig, - cgc: Option, + cgc: u64, spec: &ChainSpec, ) -> (Enr, CombinedKey) { let keypair = libp2p::identity::secp256k1::Keypair::generate(); @@ -386,56 +377,23 @@ mod test { #[test] fn test_nfd_enr_encoding() { let spec = make_fulu_spec(); - let enr = build_enr_with_config(NetworkConfig::default(), None, &spec).0; + let enr = + build_enr_with_config(NetworkConfig::default(), spec.custody_requirement, &spec).0; assert_eq!(enr.next_fork_digest().unwrap(), TEST_NFD); } - #[test] - fn custody_group_count_default() { - let config = NetworkConfig { - subscribe_all_data_column_subnets: false, - ..NetworkConfig::default() - }; - let spec = make_fulu_spec(); - - let enr = build_enr_with_config(config, None, &spec).0; - - assert_eq!( - enr.custody_group_count::(&spec).unwrap(), - spec.custody_requirement, - ); - } - - #[test] - fn custody_group_count_all() { - let config = NetworkConfig { - subscribe_all_data_column_subnets: true, - ..NetworkConfig::default() - }; - let spec = make_fulu_spec(); - let enr = build_enr_with_config(config, None, &spec).0; - - assert_eq!( - enr.custody_group_count::(&spec).unwrap(), - spec.number_of_custody_groups, - ); - } - #[test] fn custody_group_value() { - let config = NetworkConfig { - subscribe_all_data_column_subnets: true, - ..NetworkConfig::default() - }; + let config = NetworkConfig::default(); let spec = make_fulu_spec(); - let enr = build_enr_with_config(config, Some(42), &spec).0; + let enr = build_enr_with_config(config, 42, &spec).0; assert_eq!(enr.custody_group_count::(&spec).unwrap(), 42); } #[test] fn test_encode_decode_eth2_enr() { - let (enr, _key) = build_enr_with_config(NetworkConfig::default(), None, &E::default_spec()); + let (enr, _key) = build_enr_with_config(NetworkConfig::default(), 4, &E::default_spec()); // Check all Eth2 Mappings are decodeable enr.eth2().unwrap(); enr.attestation_bitfield::().unwrap(); diff --git a/beacon_node/lighthouse_network/src/discovery/mod.rs b/beacon_node/lighthouse_network/src/discovery/mod.rs index 49de62546df..3589882ae9b 100644 --- a/beacon_node/lighthouse_network/src/discovery/mod.rs +++ b/beacon_node/lighthouse_network/src/discovery/mod.rs @@ -1243,11 +1243,12 @@ mod tests { let config = Arc::new(config); let enr_key: CombinedKey = CombinedKey::from_secp256k1(&keypair); let next_fork_digest = [0; 4]; + let custody_group_count = spec.custody_requirement; let enr: Enr = build_enr::( &enr_key, &config, &EnrForkId::default(), - None, + custody_group_count, next_fork_digest, &spec, ) @@ -1258,7 +1259,7 @@ mod tests { seq_number: 0, attnets: Default::default(), syncnets: Default::default(), - custody_group_count: spec.custody_requirement, + custody_group_count, }), vec![], false, diff --git a/beacon_node/lighthouse_network/src/service/mod.rs b/beacon_node/lighthouse_network/src/service/mod.rs index ea2c53a07fe..1df17dffbaf 100644 --- a/beacon_node/lighthouse_network/src/service/mod.rs +++ b/beacon_node/lighthouse_network/src/service/mod.rs @@ -199,7 +199,7 @@ impl Network { local_keypair.clone(), &config, &ctx.enr_fork_id, - Some(advertised_cgc), + advertised_cgc, next_fork_digest, &ctx.chain_spec, )?; diff --git a/beacon_node/lighthouse_network/src/types/globals.rs b/beacon_node/lighthouse_network/src/types/globals.rs index 2a3571c3b70..f46eb05ceb0 100644 --- a/beacon_node/lighthouse_network/src/types/globals.rs +++ b/beacon_node/lighthouse_network/src/types/globals.rs @@ -227,7 +227,6 @@ impl NetworkGlobals { TopicConfig { enable_light_client_server: self.config.enable_light_client_server, subscribe_all_subnets: self.config.subscribe_all_subnets, - subscribe_all_data_column_subnets: self.config.subscribe_all_data_column_subnets, sampling_subnets: self.sampling_subnets.read().clone(), } } diff --git a/beacon_node/lighthouse_network/src/types/topics.rs b/beacon_node/lighthouse_network/src/types/topics.rs index b22adfbc487..cfdee907b9a 100644 --- a/beacon_node/lighthouse_network/src/types/topics.rs +++ b/beacon_node/lighthouse_network/src/types/topics.rs @@ -29,7 +29,6 @@ pub const LIGHT_CLIENT_OPTIMISTIC_UPDATE: &str = "light_client_optimistic_update pub struct TopicConfig { pub enable_light_client_server: bool, pub subscribe_all_subnets: bool, - pub subscribe_all_data_column_subnets: bool, pub sampling_subnets: HashSet, } @@ -80,14 +79,8 @@ pub fn core_topics_to_subscribe( } if fork_name.fulu_enabled() { - if opts.subscribe_all_data_column_subnets { - for i in 0..spec.data_column_sidecar_subnet_count { - topics.push(GossipKind::DataColumnSidecar(i.into())); - } - } else { - for subnet in &opts.sampling_subnets { - topics.push(GossipKind::DataColumnSidecar(*subnet)); - } + for subnet in &opts.sampling_subnets { + topics.push(GossipKind::DataColumnSidecar(*subnet)); } } @@ -125,7 +118,6 @@ pub fn all_topics_at_fork(fork: ForkName, spec: &ChainSpec) -> Vec(fork, &opts, spec) @@ -520,7 +512,6 @@ mod tests { TopicConfig { enable_light_client_server: false, subscribe_all_subnets: false, - subscribe_all_data_column_subnets: false, sampling_subnets: sampling_subnets.clone(), } } @@ -552,9 +543,8 @@ mod tests { #[test] fn columns_are_subscribed_in_peerdas() { let spec = get_spec(); - let s = get_sampling_subnets(); - let mut topic_config = get_topic_config(&s); - topic_config.subscribe_all_data_column_subnets = true; + let s = HashSet::from_iter([0.into()]); + let topic_config = get_topic_config(&s); assert!( core_topics_to_subscribe::(ForkName::Fulu, &topic_config, &spec) .contains(&GossipKind::DataColumnSidecar(0.into())) diff --git a/beacon_node/src/config.rs b/beacon_node/src/config.rs index 3b0e80e0b7d..0f169ffaad6 100644 --- a/beacon_node/src/config.rs +++ b/beacon_node/src/config.rs @@ -114,7 +114,6 @@ pub fn get_config( let is_semi_supernode = parse_flag(cli_args, "semi-supernode"); client_config.chain.node_custody_type = if is_supernode { - client_config.network.subscribe_all_data_column_subnets = true; NodeCustodyType::Supernode } else if is_semi_supernode { NodeCustodyType::SemiSupernode diff --git a/lcli/src/generate_bootnode_enr.rs b/lcli/src/generate_bootnode_enr.rs index ddd36e7e7a9..71186904d0b 100644 --- a/lcli/src/generate_bootnode_enr.rs +++ b/lcli/src/generate_bootnode_enr.rs @@ -43,7 +43,7 @@ pub fn run(matches: &ArgMatches, spec: &ChainSpec) -> Result<(), Str &enr_key, &config, &enr_fork_id, - None, + spec.custody_requirement, genesis_fork_digest, spec, ) From af9cae4d3e32fadbc022c494a8cad7bbc6012398 Mon Sep 17 00:00:00 2001 From: chonghe <44791194+chong-he@users.noreply.github.com> Date: Fri, 31 Oct 2025 00:47:27 +0800 Subject: [PATCH 09/74] Add `version` to the response of beacon API client side (#8326) Co-Authored-By: Tan Chee Keong --- beacon_node/http_api/tests/tests.rs | 30 +++++++++++++++++++++-------- common/eth2/src/lib.rs | 17 +++++++++++----- 2 files changed, 34 insertions(+), 13 deletions(-) diff --git a/beacon_node/http_api/tests/tests.rs b/beacon_node/http_api/tests/tests.rs index dc2fd4ae440..3b69430efcf 100644 --- a/beacon_node/http_api/tests/tests.rs +++ b/beacon_node/http_api/tests/tests.rs @@ -1316,12 +1316,14 @@ impl ApiTester { .ok() .map(|(state, _execution_optimistic, _finalized)| state); - let result = self + let result = match self .client .get_beacon_states_pending_deposits(state_id.0) .await - .unwrap() - .map(|res| res.data); + { + Ok(response) => response, + Err(e) => panic!("query failed incorrectly: {e:?}"), + }; if result.is_none() && state_opt.is_none() { continue; @@ -1330,7 +1332,12 @@ impl ApiTester { let state = state_opt.as_mut().expect("result should be none"); let expected = state.pending_deposits().unwrap(); - assert_eq!(result.unwrap(), expected.to_vec()); + let response = result.unwrap(); + assert_eq!(response.data(), &expected.to_vec()); + + // Check that the version header is returned in the response + let fork_name = state.fork_name(&self.chain.spec).unwrap(); + assert_eq!(response.version(), Some(fork_name),); } self @@ -1343,12 +1350,14 @@ impl ApiTester { .ok() .map(|(state, _execution_optimistic, _finalized)| state); - let result = self + let result = match self .client .get_beacon_states_pending_partial_withdrawals(state_id.0) .await - .unwrap() - .map(|res| res.data); + { + Ok(response) => response, + Err(e) => panic!("query failed incorrectly: {e:?}"), + }; if result.is_none() && state_opt.is_none() { continue; @@ -1357,7 +1366,12 @@ impl ApiTester { let state = state_opt.as_mut().expect("result should be none"); let expected = state.pending_partial_withdrawals().unwrap(); - assert_eq!(result.unwrap(), expected.to_vec()); + let response = result.unwrap(); + assert_eq!(response.data(), &expected.to_vec()); + + // Check that the version header is returned in the response + let fork_name = state.fork_name(&self.chain.spec).unwrap(); + assert_eq!(response.version(), Some(fork_name),); } self diff --git a/common/eth2/src/lib.rs b/common/eth2/src/lib.rs index a9dd752df03..2641a4c02e0 100644 --- a/common/eth2/src/lib.rs +++ b/common/eth2/src/lib.rs @@ -827,7 +827,8 @@ impl BeaconNodeHttpClient { pub async fn get_beacon_states_pending_deposits( &self, state_id: StateId, - ) -> Result>>, Error> { + ) -> Result>>, Error> + { let mut path = self.eth_path(V1)?; path.path_segments_mut() @@ -837,7 +838,9 @@ impl BeaconNodeHttpClient { .push(&state_id.to_string()) .push("pending_deposits"); - self.get_opt(path).await + self.get_fork_contextual(path, |fork| fork) + .await + .map(|opt| opt.map(BeaconResponse::ForkVersioned)) } /// `GET beacon/states/{state_id}/pending_partial_withdrawals` @@ -846,8 +849,10 @@ impl BeaconNodeHttpClient { pub async fn get_beacon_states_pending_partial_withdrawals( &self, state_id: StateId, - ) -> Result>>, Error> - { + ) -> Result< + Option>>, + Error, + > { let mut path = self.eth_path(V1)?; path.path_segments_mut() @@ -857,7 +862,9 @@ impl BeaconNodeHttpClient { .push(&state_id.to_string()) .push("pending_partial_withdrawals"); - self.get_opt(path).await + self.get_fork_contextual(path, |fork| fork) + .await + .map(|opt| opt.map(BeaconResponse::ForkVersioned)) } /// `GET beacon/states/{state_id}/pending_consolidations` From 2c9b670f5d313450252c6cb40a5ee34802d54fef Mon Sep 17 00:00:00 2001 From: Mac L Date: Mon, 3 Nov 2025 06:46:31 +0400 Subject: [PATCH 10/74] Rework `lighthouse_version` to reduce spurious recompilation (#8336) #8311 Removes the `git_version` crate from `lighthouse_version` and implements git `HEAD` tracking manually. This removes the (mostly) broken dirty tracking but prevents spurious recompilation of the `lighthouse_version` crate. This also reworks the way crate versions are handled by utilizing workspace version inheritance and Cargo environment variables. This means the _only_ place where Lighthouse's version is defined is in the top level `Cargo.toml` for the workspace. All relevant binaries then inherit this version. This largely makes the `change_version.sh` script useless so I've removed it, although we could keep a version which just alters the workspace version (if we need to maintain compatibility with certain build/release tooling. ### When is a Rebuild Triggered? 1. When the build.rs file is changed. 2. When the HEAD commit changes (added, removed, rebased, etc) 3. When the branch changes (this includes changing to the current branch, and creating a detached HEAD) Note that working/staged changes will not trigger a recompile of `lighthouse_version`. Co-Authored-By: Mac L Co-Authored-By: Michael Sproul --- Cargo.lock | 27 ++-------- Cargo.toml | 1 + account_manager/Cargo.toml | 2 +- beacon_node/Cargo.toml | 2 +- boot_node/Cargo.toml | 2 +- common/lighthouse_version/Cargo.toml | 6 +-- common/lighthouse_version/build.rs | 81 ++++++++++++++++++++++++++++ common/lighthouse_version/src/lib.rs | 50 +++++++---------- lcli/Cargo.toml | 2 +- lighthouse/Cargo.toml | 2 +- scripts/change_version.sh | 34 ------------ validator_client/Cargo.toml | 2 +- 12 files changed, 110 insertions(+), 101 deletions(-) create mode 100644 common/lighthouse_version/build.rs delete mode 100755 scripts/change_version.sh diff --git a/Cargo.lock b/Cargo.lock index 8a282a60b79..bf2d6dd2d15 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,7 +4,7 @@ version = 4 [[package]] name = "account_manager" -version = "0.3.5" +version = "8.0.0-rc.2" dependencies = [ "account_utils", "bls", @@ -3860,26 +3860,6 @@ version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" -[[package]] -name = "git-version" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ad568aa3db0fcbc81f2f116137f263d7304f512a1209b35b85150d3ef88ad19" -dependencies = [ - "git-version-macro", -] - -[[package]] -name = "git-version-macro" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53010ccb100b96a67bc32c0175f0ed1426b31b655d562898e57325f81c023ac0" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.100", -] - [[package]] name = "glob" version = "0.3.2" @@ -5711,9 +5691,8 @@ dependencies = [ [[package]] name = "lighthouse_version" -version = "0.1.0" +version = "8.0.0-rc.2" dependencies = [ - "git-version", "regex", ] @@ -10112,7 +10091,7 @@ dependencies = [ [[package]] name = "validator_client" -version = "0.3.5" +version = "8.0.0-rc.2" dependencies = [ "account_utils", "beacon_node_fallback", diff --git a/Cargo.toml b/Cargo.toml index a9799077695..0ca8cbf83cf 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -94,6 +94,7 @@ resolver = "2" [workspace.package] edition = "2024" +version = "8.0.0-rc.2" [workspace.dependencies] account_utils = { path = "common/account_utils" } diff --git a/account_manager/Cargo.toml b/account_manager/Cargo.toml index 071e2681dd1..8dd50cbc6ee 100644 --- a/account_manager/Cargo.toml +++ b/account_manager/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "account_manager" -version = "0.3.5" +version = { workspace = true } authors = [ "Paul Hauner ", "Luke Anderson ", diff --git a/beacon_node/Cargo.toml b/beacon_node/Cargo.toml index 56c2fb410cc..fd013559785 100644 --- a/beacon_node/Cargo.toml +++ b/beacon_node/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "beacon_node" -version = "8.0.0-rc.2" +version = { workspace = true } authors = [ "Paul Hauner ", "Age Manning "] edition = { workspace = true } diff --git a/common/lighthouse_version/Cargo.toml b/common/lighthouse_version/Cargo.toml index b7e669ed940..ab9509cb1e8 100644 --- a/common/lighthouse_version/Cargo.toml +++ b/common/lighthouse_version/Cargo.toml @@ -1,12 +1,8 @@ [package] name = "lighthouse_version" -version = "0.1.0" +version = { workspace = true } authors = ["Sigma Prime "] edition = { workspace = true } -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -git-version = "0.3.4" [dev-dependencies] regex = { workspace = true } diff --git a/common/lighthouse_version/build.rs b/common/lighthouse_version/build.rs new file mode 100644 index 00000000000..1af99996df5 --- /dev/null +++ b/common/lighthouse_version/build.rs @@ -0,0 +1,81 @@ +use std::env; +use std::fs; +use std::path::Path; +use std::process::Command; + +const CLIENT_NAME: &str = "Lighthouse"; + +fn main() { + println!("cargo:rerun-if-changed=build.rs"); + + let manifest_dir = env::var("CARGO_MANIFEST_DIR").unwrap(); + let manifest_path = Path::new(&manifest_dir); + + // The crate version is inherited from the workspace. + let semantic_version = env::var("CARGO_PKG_VERSION").unwrap(); + + // Hardcode the .git/ path. + // This assumes the `lighthouse_version` crate will never move. + let git_dir = manifest_path.join("../../.git"); + + if git_dir.exists() { + // HEAD either contains a commit hash directly (detached HEAD), or a reference to a branch. + let head_path = git_dir.join("HEAD"); + if head_path.exists() { + println!("cargo:rerun-if-changed={}", head_path.display()); + + if let Ok(head_content) = fs::read_to_string(&head_path) { + let head_content = head_content.trim(); + + // If HEAD is a reference, also check that file. + if let Some(ref_path) = head_content.strip_prefix("ref: ") { + let full_ref_path = git_dir.join(ref_path); + if full_ref_path.exists() { + println!("cargo:rerun-if-changed={}", full_ref_path.display()); + } + } + } + } + } + + // Construct Lighthouse version string without commit hash. + let base_version = format!("{}/v{}", CLIENT_NAME, semantic_version); + + let commit_hash = get_git_hash(7); + let commit_prefix = get_git_hash(8); + + // If commit hash is valid, construct the full version string. + let version = if !commit_hash.is_empty() && commit_hash.len() >= 7 { + format!("{}-{}", base_version, commit_hash) + } else { + base_version + }; + + println!("cargo:rustc-env=GIT_VERSION={}", version); + println!("cargo:rustc-env=GIT_COMMIT_PREFIX={}", commit_prefix); + println!("cargo:rustc-env=CLIENT_NAME={}", CLIENT_NAME); + println!("cargo:rustc-env=SEMANTIC_VERSION={}", semantic_version); +} + +fn get_git_hash(len: usize) -> String { + Command::new("git") + .args(["rev-parse", &format!("--short={}", len), "HEAD"]) + .output() + .ok() + .and_then(|output| { + if output.status.success() { + String::from_utf8(output.stdout).ok() + } else { + None + } + }) + .map(|s| s.trim().to_string()) + .unwrap_or_else(|| { + // Fallback commit prefix for execution engine reporting. + if len == 8 { + "00000000".to_string() + } else { + String::new() + } + }) +} diff --git a/common/lighthouse_version/src/lib.rs b/common/lighthouse_version/src/lib.rs index bd7b37926fc..1466487520b 100644 --- a/common/lighthouse_version/src/lib.rs +++ b/common/lighthouse_version/src/lib.rs @@ -1,49 +1,25 @@ -use git_version::git_version; use std::env::consts; /// Returns the current version of this build of Lighthouse. /// -/// A plus-sign (`+`) is appended to the git commit if the tree is dirty. /// Commit hash is omitted if the sources don't include git information. /// /// ## Example /// -/// `Lighthouse/v1.5.1-67da032+` -pub const VERSION: &str = git_version!( - args = [ - "--always", - "--dirty=+", - "--abbrev=7", - // NOTE: using --match instead of --exclude for compatibility with old Git - "--match=thiswillnevermatchlol" - ], - prefix = "Lighthouse/v8.0.0-rc.2-", - fallback = "Lighthouse/v8.0.0-rc.2" -); +/// `Lighthouse/v8.0.0-67da032` +pub const VERSION: &str = env!("GIT_VERSION"); /// Returns the first eight characters of the latest commit hash for this build. /// /// No indication is given if the tree is dirty. This is part of the standard /// for reporting the client version to the execution engine. -pub const COMMIT_PREFIX: &str = git_version!( - args = [ - "--always", - "--abbrev=8", - // NOTE: using --match instead of --exclude for compatibility with old Git - "--match=thiswillnevermatchlol" - ], - prefix = "", - suffix = "", - cargo_prefix = "", - cargo_suffix = "", - fallback = "00000000" -); +pub const COMMIT_PREFIX: &str = env!("GIT_COMMIT_PREFIX"); /// Returns `VERSION`, but with platform information appended to the end. /// /// ## Example /// -/// `Lighthouse/v1.5.1-67da032+/x86_64-linux` +/// `Lighthouse/v8.0.0-67da032/x86_64-linux` pub fn version_with_platform() -> String { format!("{}/{}-{}", VERSION, consts::ARCH, consts::OS) } @@ -52,16 +28,16 @@ pub fn version_with_platform() -> String { /// /// ## Example /// -/// `1.5.1` +/// `8.0.0` pub fn version() -> &'static str { - "8.0.0-rc.2" + env!("SEMANTIC_VERSION") } /// Returns the name of the current client running. /// /// This will usually be "Lighthouse" pub fn client_name() -> &'static str { - "Lighthouse" + env!("CLIENT_NAME") } #[cfg(test)] @@ -72,7 +48,7 @@ mod test { #[test] fn version_formatting() { let re = Regex::new( - r"^Lighthouse/v[0-9]+\.[0-9]+\.[0-9]+(-(rc|beta).[0-9])?(-[[:xdigit:]]{7})?\+?$", + r"^Lighthouse/v[0-9]+\.[0-9]+\.[0-9]+(-(rc|beta)\.[0-9])?(-[[:xdigit:]]{7})?$", ) .unwrap(); assert!( @@ -91,4 +67,14 @@ mod test { version() ); } + + #[test] + fn client_name_is_lighthouse() { + assert_eq!(client_name(), "Lighthouse"); + } + + #[test] + fn version_contains_semantic_version() { + assert!(VERSION.contains(version())); + } } diff --git a/lcli/Cargo.toml b/lcli/Cargo.toml index 2698073b5fe..04eb41960ba 100644 --- a/lcli/Cargo.toml +++ b/lcli/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "lcli" description = "Lighthouse CLI (modeled after zcli)" -version = "8.0.0-rc.2" +version = { workspace = true } authors = ["Paul Hauner "] edition = { workspace = true } diff --git a/lighthouse/Cargo.toml b/lighthouse/Cargo.toml index a3240c6d7c8..ebe00c9be59 100644 --- a/lighthouse/Cargo.toml +++ b/lighthouse/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "lighthouse" -version = "8.0.0-rc.2" +version = { workspace = true } authors = ["Sigma Prime "] edition = { workspace = true } autotests = false diff --git a/scripts/change_version.sh b/scripts/change_version.sh deleted file mode 100755 index bda87fd8633..00000000000 --- a/scripts/change_version.sh +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env bash - -# Change the version across multiple files, prior to a release. Use `sed` to -# find/replace the exiting version with the new one. -# -# Takes two arguments: -# -# 1. Current version (e.g., `0.2.6`) -# 2. New version (e.g., `0.2.7`) -# -# ## Example: -# -# `./change_version.sh 0.2.6 0.2.7` - -FROM=$1 -TO=$2 -VERSION_CRATE="../common/lighthouse_version/src/lib.rs" - -update_cargo_toml () { - echo $1 - sed -i -e "s/version = \"$FROM\"/version = \"$TO\"/g" $1 -} - -echo "Changing version from $FROM to $TO" - -update_cargo_toml ../account_manager/Cargo.toml -update_cargo_toml ../beacon_node/Cargo.toml -update_cargo_toml ../boot_node/Cargo.toml -update_cargo_toml ../lcli/Cargo.toml -update_cargo_toml ../lighthouse/Cargo.toml -update_cargo_toml ../validator_client/Cargo.toml - -echo $VERSION_CRATE -sed -i -e "s/$FROM/$TO/g" $VERSION_CRATE diff --git a/validator_client/Cargo.toml b/validator_client/Cargo.toml index a8c8fd59f13..6990a2f61a7 100644 --- a/validator_client/Cargo.toml +++ b/validator_client/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "validator_client" -version = "0.3.5" +version = { workspace = true } authors = ["Sigma Prime "] edition = { workspace = true } From bc86dc09e5d8ebadec0ac89a8cb51f77dc24caf2 Mon Sep 17 00:00:00 2001 From: Jimmy Chen Date: Tue, 4 Nov 2025 13:40:44 +1100 Subject: [PATCH 11/74] Reduce number of blobs used in tests to speed up CI (#8194) `beacon-chain-tests` is now regularly taking 1h+ on CI since Fulu fork was added. This PR attemtpts to reduce the test time by bringing down the number of blobs generated in tests - instead of generating 0..max_blobs, the generator now generates 0..1 blobs by default, and this can be modified by setting `harness.execution_block_generator.set_min_blob_count(n)`. Note: The blobs are pre-generated and doesn't require too much CPU to generate however processing a larger number of them on the beacon chain does take a lot of time. This PR also include a few other small improvements - Our slowest test (`chain_segment_varying_chunk_size`) runs 3x faster in Fulu just by reusing chain segments - Avoid re-running fork specific tests on all forks - Fix a bunch of tests that depends on the harness's existing random blob generation, which is fragile beacon chain test time on test machine is **~2x** faster: ### `unstable` ``` Summary [ 751.586s] 291 tests run: 291 passed (13 slow), 0 skipped ``` ### this branch ``` Summary [ 373.792s] 291 tests run: 291 passed (2 slow), 0 skipped ``` The next set of tests to optimise is the ones that use [`get_chain_segment`](https://github.com/sigp/lighthouse/blob/77a9af96de0f693127055e381ece3e98dceea0a8/beacon_node/beacon_chain/tests/block_verification.rs#L45), as it by default build 320 blocks with supernode - an easy optimisation would be to build these blocks with cgc = 8 for tests that only require fullnodes. Co-Authored-By: Jimmy Chen Co-Authored-By: Jimmy Chen --- .../overflow_lru_cache.rs | 2 +- beacon_node/beacon_chain/src/kzg_utils.rs | 12 +- beacon_node/beacon_chain/src/test_utils.rs | 126 ++++++------------ .../beacon_chain/tests/block_verification.rs | 13 +- beacon_node/beacon_chain/tests/events.rs | 32 ++++- beacon_node/beacon_chain/tests/store_tests.rs | 47 +++++-- .../execution_layer/src/engine_api/http.rs | 3 +- .../test_utils/execution_block_generator.rs | 13 +- .../src/test_utils/mock_execution_layer.rs | 1 - .../execution_layer/src/test_utils/mod.rs | 9 +- .../tests/broadcast_validation_tests.rs | 37 +++-- beacon_node/http_api/tests/tests.rs | 13 +- .../src/sync/block_sidecar_coupling.rs | 15 +-- beacon_node/network/src/sync/tests/lookups.rs | 6 +- .../generate_random_block_and_blobs.rs | 4 +- lcli/src/mock_el.rs | 2 +- testing/node_test_rig/src/lib.rs | 8 +- 17 files changed, 171 insertions(+), 172 deletions(-) diff --git a/beacon_node/beacon_chain/src/data_availability_checker/overflow_lru_cache.rs b/beacon_node/beacon_chain/src/data_availability_checker/overflow_lru_cache.rs index 402dac1fa8c..5e6322ae95a 100644 --- a/beacon_node/beacon_chain/src/data_availability_checker/overflow_lru_cache.rs +++ b/beacon_node/beacon_chain/src/data_availability_checker/overflow_lru_cache.rs @@ -1279,7 +1279,7 @@ mod pending_components_tests { let mut rng = StdRng::seed_from_u64(0xDEADBEEF0BAD5EEDu64); let spec = test_spec::(); let (block, blobs_vec) = - generate_rand_block_and_blobs::(ForkName::Deneb, NumBlobs::Random, &mut rng, &spec); + generate_rand_block_and_blobs::(ForkName::Deneb, NumBlobs::Random, &mut rng); let max_len = spec.max_blobs_per_block(block.epoch()) as usize; let mut blobs: RuntimeFixedVector>>> = RuntimeFixedVector::default(max_len); diff --git a/beacon_node/beacon_chain/src/kzg_utils.rs b/beacon_node/beacon_chain/src/kzg_utils.rs index 18e14587a52..200774ebe46 100644 --- a/beacon_node/beacon_chain/src/kzg_utils.rs +++ b/beacon_node/beacon_chain/src/kzg_utils.rs @@ -468,7 +468,7 @@ mod test { #[track_caller] fn test_validate_data_columns(kzg: &Kzg, spec: &ChainSpec) { - let num_of_blobs = 6; + let num_of_blobs = 2; let (signed_block, blobs, proofs) = create_test_fulu_block_and_blobs::(num_of_blobs, spec); let blob_refs = blobs.iter().collect::>(); @@ -494,7 +494,8 @@ mod test { #[track_caller] fn test_build_data_columns(kzg: &Kzg, spec: &ChainSpec) { - let num_of_blobs = 6; + // Using at least 2 blobs to make sure we're arranging the data columns correctly. + let num_of_blobs = 2; let (signed_block, blobs, proofs) = create_test_fulu_block_and_blobs::(num_of_blobs, spec); @@ -534,6 +535,7 @@ mod test { #[track_caller] fn test_reconstruct_data_columns(kzg: &Kzg, spec: &ChainSpec) { + // Using at least 2 blobs to make sure we're arranging the data columns correctly. let num_of_blobs = 2; let (signed_block, blobs, proofs) = create_test_fulu_block_and_blobs::(num_of_blobs, spec); @@ -557,6 +559,7 @@ mod test { #[track_caller] fn test_reconstruct_data_columns_unordered(kzg: &Kzg, spec: &ChainSpec) { + // Using at least 2 blobs to make sure we're arranging the data columns correctly. let num_of_blobs = 2; let (signed_block, blobs, proofs) = create_test_fulu_block_and_blobs::(num_of_blobs, spec); @@ -578,7 +581,7 @@ mod test { #[track_caller] fn test_reconstruct_blobs_from_data_columns(kzg: &Kzg, spec: &ChainSpec) { - let num_of_blobs = 6; + let num_of_blobs = 3; let (signed_block, blobs, proofs) = create_test_fulu_block_and_blobs::(num_of_blobs, spec); let blob_refs = blobs.iter().collect::>(); @@ -588,7 +591,8 @@ mod test { // Now reconstruct let signed_blinded_block = signed_block.into(); - let blob_indices = vec![3, 4, 5]; + // Using at least 2 blobs to make sure we're arranging the data columns correctly. + let blob_indices = vec![1, 2]; let reconstructed_blobs = reconstruct_blobs( kzg, &column_sidecars.iter().as_slice()[0..column_sidecars.len() / 2], diff --git a/beacon_node/beacon_chain/src/test_utils.rs b/beacon_node/beacon_chain/src/test_utils.rs index c1d1d9de67d..9601618e927 100644 --- a/beacon_node/beacon_chain/src/test_utils.rs +++ b/beacon_node/beacon_chain/src/test_utils.rs @@ -81,6 +81,10 @@ pub const TEST_DATA_COLUMN_SIDECARS_SSZ: &[u8] = // a different value. pub const DEFAULT_TARGET_AGGREGATORS: u64 = u64::MAX; +// Minimum and maximum number of blobs to generate in each slot when using the `NumBlobs::Random` option (default). +const DEFAULT_MIN_BLOBS: usize = 1; +const DEFAULT_MAX_BLOBS: usize = 2; + static KZG: LazyLock> = LazyLock::new(|| { let kzg = Kzg::new_from_trusted_setup(&get_trusted_setup()).expect("should create kzg"); Arc::new(kzg) @@ -172,23 +176,28 @@ fn make_rng() -> Mutex { Mutex::new(StdRng::seed_from_u64(0x0DDB1A5E5BAD5EEDu64)) } -/// Return a `ChainSpec` suitable for test usage. -/// -/// If the `fork_from_env` feature is enabled, read the fork to use from the FORK_NAME environment -/// variable. Otherwise use the default spec. -pub fn test_spec() -> ChainSpec { - let mut spec = if cfg!(feature = "fork_from_env") { +pub fn fork_name_from_env() -> Option { + if cfg!(feature = "fork_from_env") { let fork_name = std::env::var(FORK_NAME_ENV_VAR).unwrap_or_else(|e| { panic!( "{} env var must be defined when using fork_from_env: {:?}", FORK_NAME_ENV_VAR, e ) }); - let fork = ForkName::from_str(fork_name.as_str()).unwrap(); - fork.make_genesis_spec(E::default_spec()) + Some(ForkName::from_str(fork_name.as_str()).unwrap()) } else { - E::default_spec() - }; + None + } +} + +/// Return a `ChainSpec` suitable for test usage. +/// +/// If the `fork_from_env` feature is enabled, read the fork to use from the FORK_NAME environment +/// variable. Otherwise use the default spec. +pub fn test_spec() -> ChainSpec { + let mut spec = fork_name_from_env() + .map(|fork| fork.make_genesis_spec(E::default_spec())) + .unwrap_or_else(|| E::default_spec()); // Set target aggregators to a high value by default. spec.target_aggregators_per_committee = DEFAULT_TARGET_AGGREGATORS; @@ -3245,96 +3254,49 @@ pub enum NumBlobs { None, } +macro_rules! add_blob_transactions { + ($message:expr, $payload_type:ty, $num_blobs:expr, $rng:expr, $fork_name:expr) => {{ + let num_blobs = match $num_blobs { + NumBlobs::Random => $rng.random_range(DEFAULT_MIN_BLOBS..=DEFAULT_MAX_BLOBS), + NumBlobs::Number(n) => n, + NumBlobs::None => 0, + }; + let (bundle, transactions) = + execution_layer::test_utils::generate_blobs::(num_blobs, $fork_name).unwrap(); + + let payload: &mut $payload_type = &mut $message.body.execution_payload; + payload.execution_payload.transactions = <_>::default(); + for tx in Vec::from(transactions) { + payload.execution_payload.transactions.push(tx).unwrap(); + } + $message.body.blob_kzg_commitments = bundle.commitments.clone(); + bundle + }}; +} + pub fn generate_rand_block_and_blobs( fork_name: ForkName, num_blobs: NumBlobs, rng: &mut impl Rng, - spec: &ChainSpec, ) -> (SignedBeaconBlock>, Vec>) { let inner = map_fork_name!(fork_name, BeaconBlock, <_>::random_for_test(rng)); let mut block = SignedBeaconBlock::from_block(inner, types::Signature::random_for_test(rng)); - let max_blobs = spec.max_blobs_per_block(block.epoch()) as usize; let mut blob_sidecars = vec![]; let bundle = match block { SignedBeaconBlock::Deneb(SignedBeaconBlockDeneb { ref mut message, .. - }) => { - // Get either zero blobs or a random number of blobs between 1 and Max Blobs. - let payload: &mut FullPayloadDeneb = &mut message.body.execution_payload; - let num_blobs = match num_blobs { - NumBlobs::Random => rng.random_range(1..=max_blobs), - NumBlobs::Number(n) => n, - NumBlobs::None => 0, - }; - let (bundle, transactions) = - execution_layer::test_utils::generate_blobs::(num_blobs, fork_name).unwrap(); - - payload.execution_payload.transactions = <_>::default(); - for tx in Vec::from(transactions) { - payload.execution_payload.transactions.push(tx).unwrap(); - } - message.body.blob_kzg_commitments = bundle.commitments.clone(); - bundle - } + }) => add_blob_transactions!(message, FullPayloadDeneb, num_blobs, rng, fork_name), SignedBeaconBlock::Electra(SignedBeaconBlockElectra { ref mut message, .. - }) => { - // Get either zero blobs or a random number of blobs between 1 and Max Blobs. - let payload: &mut FullPayloadElectra = &mut message.body.execution_payload; - let num_blobs = match num_blobs { - NumBlobs::Random => rng.random_range(1..=max_blobs), - NumBlobs::Number(n) => n, - NumBlobs::None => 0, - }; - let (bundle, transactions) = - execution_layer::test_utils::generate_blobs::(num_blobs, fork_name).unwrap(); - payload.execution_payload.transactions = <_>::default(); - for tx in Vec::from(transactions) { - payload.execution_payload.transactions.push(tx).unwrap(); - } - message.body.blob_kzg_commitments = bundle.commitments.clone(); - bundle - } + }) => add_blob_transactions!(message, FullPayloadElectra, num_blobs, rng, fork_name), SignedBeaconBlock::Fulu(SignedBeaconBlockFulu { ref mut message, .. - }) => { - // Get either zero blobs or a random number of blobs between 1 and Max Blobs. - let payload: &mut FullPayloadFulu = &mut message.body.execution_payload; - let num_blobs = match num_blobs { - NumBlobs::Random => rng.random_range(1..=max_blobs), - NumBlobs::Number(n) => n, - NumBlobs::None => 0, - }; - let (bundle, transactions) = - execution_layer::test_utils::generate_blobs::(num_blobs, fork_name).unwrap(); - payload.execution_payload.transactions = <_>::default(); - for tx in Vec::from(transactions) { - payload.execution_payload.transactions.push(tx).unwrap(); - } - message.body.blob_kzg_commitments = bundle.commitments.clone(); - bundle - } + }) => add_blob_transactions!(message, FullPayloadFulu, num_blobs, rng, fork_name), SignedBeaconBlock::Gloas(SignedBeaconBlockGloas { ref mut message, .. - }) => { - // Get either zero blobs or a random number of blobs between 1 and Max Blobs. - let payload: &mut FullPayloadGloas = &mut message.body.execution_payload; - let num_blobs = match num_blobs { - NumBlobs::Random => rng.random_range(1..=max_blobs), - NumBlobs::Number(n) => n, - NumBlobs::None => 0, - }; - let (bundle, transactions) = - execution_layer::test_utils::generate_blobs::(num_blobs, fork_name).unwrap(); - payload.execution_payload.transactions = <_>::default(); - for tx in Vec::from(transactions) { - payload.execution_payload.transactions.push(tx).unwrap(); - } - message.body.blob_kzg_commitments = bundle.commitments.clone(); - bundle - } + }) => add_blob_transactions!(message, FullPayloadGloas, num_blobs, rng, fork_name), _ => return (block, blob_sidecars), }; @@ -3375,7 +3337,7 @@ pub fn generate_rand_block_and_data_columns( SignedBeaconBlock>, DataColumnSidecarList, ) { - let (block, _blobs) = generate_rand_block_and_blobs(fork_name, num_blobs, rng, spec); + let (block, _blobs) = generate_rand_block_and_blobs(fork_name, num_blobs, rng); let data_columns = generate_data_column_sidecars_from_block(&block, spec); (block, data_columns) } diff --git a/beacon_node/beacon_chain/tests/block_verification.rs b/beacon_node/beacon_chain/tests/block_verification.rs index 3d1fa8f4af4..881885cef23 100644 --- a/beacon_node/beacon_chain/tests/block_verification.rs +++ b/beacon_node/beacon_chain/tests/block_verification.rs @@ -297,19 +297,20 @@ async fn chain_segment_full_segment() { #[tokio::test] async fn chain_segment_varying_chunk_size() { - for chunk_size in &[1, 2, 3, 5, 31, 32, 33, 42] { + let (chain_segment, chain_segment_blobs) = get_chain_segment().await; + let blocks: Vec> = chain_segment_blocks(&chain_segment, &chain_segment_blobs) + .into_iter() + .collect(); + + for chunk_size in &[1, 2, 31, 32, 33] { let harness = get_harness(VALIDATOR_COUNT, NodeCustodyType::Fullnode); - let (chain_segment, chain_segment_blobs) = get_chain_segment().await; - let blocks: Vec> = chain_segment_blocks(&chain_segment, &chain_segment_blobs) - .into_iter() - .collect(); harness .chain .slot_clock .set_slot(blocks.last().unwrap().slot().as_u64()); - for chunk in blocks.chunks(*chunk_size) { + for chunk in blocks.clone().chunks(*chunk_size) { harness .chain .process_chain_segment(chunk.to_vec(), NotifyExecutionLayer::Yes) diff --git a/beacon_node/beacon_chain/tests/events.rs b/beacon_node/beacon_chain/tests/events.rs index 466058eea38..86bdb03dafd 100644 --- a/beacon_node/beacon_chain/tests/events.rs +++ b/beacon_node/beacon_chain/tests/events.rs @@ -1,20 +1,26 @@ use beacon_chain::blob_verification::GossipVerifiedBlob; use beacon_chain::data_column_verification::GossipVerifiedDataColumn; -use beacon_chain::test_utils::{BeaconChainHarness, generate_data_column_sidecars_from_block}; +use beacon_chain::test_utils::{ + BeaconChainHarness, fork_name_from_env, generate_data_column_sidecars_from_block, test_spec, +}; use eth2::types::{EventKind, SseBlobSidecar, SseDataColumnSidecar}; use rand::SeedableRng; use rand::rngs::StdRng; use std::sync::Arc; use types::blob_sidecar::FixedBlobSidecarList; use types::test_utils::TestRandom; -use types::{BlobSidecar, DataColumnSidecar, EthSpec, ForkName, MinimalEthSpec, Slot}; +use types::{BlobSidecar, DataColumnSidecar, EthSpec, MinimalEthSpec, Slot}; type E = MinimalEthSpec; /// Verifies that a blob event is emitted when a gossip verified blob is received via gossip or the publish block API. #[tokio::test] async fn blob_sidecar_event_on_process_gossip_blob() { - let spec = Arc::new(ForkName::Deneb.make_genesis_spec(E::default_spec())); + if fork_name_from_env().is_some_and(|f| !f.deneb_enabled() || f.fulu_enabled()) { + return; + }; + + let spec = Arc::new(test_spec::()); let harness = BeaconChainHarness::builder(E::default()) .spec(spec) .deterministic_keypairs(8) @@ -48,7 +54,11 @@ async fn blob_sidecar_event_on_process_gossip_blob() { /// Verifies that a data column event is emitted when a gossip verified data column is received via gossip or the publish block API. #[tokio::test] async fn data_column_sidecar_event_on_process_gossip_data_column() { - let spec = Arc::new(ForkName::Fulu.make_genesis_spec(E::default_spec())); + if fork_name_from_env().is_some_and(|f| !f.fulu_enabled()) { + return; + }; + + let spec = Arc::new(test_spec::()); let harness = BeaconChainHarness::builder(E::default()) .spec(spec) .deterministic_keypairs(8) @@ -93,7 +103,11 @@ async fn data_column_sidecar_event_on_process_gossip_data_column() { /// Verifies that a blob event is emitted when blobs are received via RPC. #[tokio::test] async fn blob_sidecar_event_on_process_rpc_blobs() { - let spec = Arc::new(ForkName::Deneb.make_genesis_spec(E::default_spec())); + if fork_name_from_env().is_some_and(|f| !f.deneb_enabled() || f.fulu_enabled()) { + return; + }; + + let spec = Arc::new(test_spec::()); let harness = BeaconChainHarness::builder(E::default()) .spec(spec) .deterministic_keypairs(8) @@ -112,7 +126,7 @@ async fn blob_sidecar_event_on_process_rpc_blobs() { let slot = head_state.slot() + 1; let ((signed_block, opt_blobs), _) = harness.make_block(head_state, slot).await; let (kzg_proofs, blobs) = opt_blobs.unwrap(); - assert!(blobs.len() > 2); + assert_eq!(blobs.len(), 2); let blob_1 = Arc::new(BlobSidecar::new(0, blobs[0].clone(), &signed_block, kzg_proofs[0]).unwrap()); @@ -144,7 +158,11 @@ async fn blob_sidecar_event_on_process_rpc_blobs() { #[tokio::test] async fn data_column_sidecar_event_on_process_rpc_columns() { - let spec = Arc::new(ForkName::Fulu.make_genesis_spec(E::default_spec())); + if fork_name_from_env().is_some_and(|f| !f.fulu_enabled()) { + return; + }; + + let spec = Arc::new(test_spec::()); let harness = BeaconChainHarness::builder(E::default()) .spec(spec.clone()) .deterministic_keypairs(8) diff --git a/beacon_node/beacon_chain/tests/store_tests.rs b/beacon_node/beacon_chain/tests/store_tests.rs index 0a261e36cef..41c8f905be7 100644 --- a/beacon_node/beacon_chain/tests/store_tests.rs +++ b/beacon_node/beacon_chain/tests/store_tests.rs @@ -7,11 +7,11 @@ use beacon_chain::custody_context::CUSTODY_CHANGE_DA_EFFECTIVE_DELAY_SECONDS; use beacon_chain::data_availability_checker::AvailableBlock; use beacon_chain::historical_data_columns::HistoricalDataColumnError; use beacon_chain::schema_change::migrate_schema; -use beacon_chain::test_utils::SyncCommitteeStrategy; use beacon_chain::test_utils::{ AttestationStrategy, BeaconChainHarness, BlockStrategy, DiskHarnessType, get_kzg, mock_execution_layer_from_parts, test_spec, }; +use beacon_chain::test_utils::{SyncCommitteeStrategy, fork_name_from_env}; use beacon_chain::{ BeaconChain, BeaconChainError, BeaconChainTypes, BeaconSnapshot, BlockError, ChainConfig, NotifyExecutionLayer, ServerSentEventHandler, WhenSlotSkipped, @@ -3211,12 +3211,13 @@ async fn test_import_historical_data_columns_batch() { for block in block_root_iter { let (block_root, _) = block.unwrap(); let data_columns = harness.chain.store.get_data_columns(&block_root).unwrap(); - assert!(data_columns.is_some()); - for data_column in data_columns.unwrap() { + for data_column in data_columns.unwrap_or_default() { data_columns_list.push(data_column); } } + assert!(!data_columns_list.is_empty()); + harness .extend_chain( (E::slots_per_epoch() * 4) as usize, @@ -3255,8 +3256,18 @@ async fn test_import_historical_data_columns_batch() { for block in block_root_iter { let (block_root, _) = block.unwrap(); - let data_columns = harness.chain.store.get_data_columns(&block_root).unwrap(); - assert!(data_columns.is_some()) + if !harness + .get_block(block_root.into()) + .unwrap() + .message() + .body() + .blob_kzg_commitments() + .unwrap() + .is_empty() + { + let data_columns = harness.chain.store.get_data_columns(&block_root).unwrap(); + assert!(data_columns.is_some()) + }; } } @@ -3290,9 +3301,8 @@ async fn test_import_historical_data_columns_batch_mismatched_block_root() { for block in block_root_iter { let (block_root, _) = block.unwrap(); let data_columns = harness.chain.store.get_data_columns(&block_root).unwrap(); - assert!(data_columns.is_some()); - for data_column in data_columns.unwrap() { + for data_column in data_columns.unwrap_or_default() { let mut data_column = (*data_column).clone(); if data_column.index % 2 == 0 { data_column.signed_block_header.message.body_root = Hash256::ZERO; @@ -3301,6 +3311,7 @@ async fn test_import_historical_data_columns_batch_mismatched_block_root() { data_columns_list.push(Arc::new(data_column)); } } + assert!(!data_columns_list.is_empty()); harness .extend_chain( @@ -3347,7 +3358,11 @@ async fn test_import_historical_data_columns_batch_mismatched_block_root() { // be imported. #[tokio::test] async fn test_import_historical_data_columns_batch_no_block_found() { - let spec = ForkName::Fulu.make_genesis_spec(E::default_spec()); + if fork_name_from_env().is_some_and(|f| !f.fulu_enabled()) { + return; + }; + + let spec = test_spec::(); let db_path = tempdir().unwrap(); let store = get_store_generic(&db_path, StoreConfig::default(), spec); let start_slot = Slot::new(1); @@ -3374,15 +3389,16 @@ async fn test_import_historical_data_columns_batch_no_block_found() { for block in block_root_iter { let (block_root, _) = block.unwrap(); let data_columns = harness.chain.store.get_data_columns(&block_root).unwrap(); - assert!(data_columns.is_some()); - for data_column in data_columns.unwrap() { + for data_column in data_columns.unwrap_or_default() { let mut data_column = (*data_column).clone(); data_column.signed_block_header.message.body_root = Hash256::ZERO; data_columns_list.push(Arc::new(data_column)); } } + assert!(!data_columns_list.is_empty()); + harness .extend_chain( (E::slots_per_epoch() * 4) as usize, @@ -4108,6 +4124,12 @@ async fn deneb_prune_blobs_no_finalization() { /// Check that blob pruning does not fail trying to prune across the fork boundary. #[tokio::test] async fn prune_blobs_across_fork_boundary() { + // This test covers earlier forks and only need to be executed once. + // Note: this test is quite expensive (building a chain to epoch 15) and we should revisit this + if fork_name_from_env() != Some(ForkName::latest_stable()) { + return; + } + let mut spec = ForkName::Capella.make_genesis_spec(E::default_spec()); let deneb_fork_epoch = Epoch::new(4); @@ -4124,6 +4146,7 @@ async fn prune_blobs_across_fork_boundary() { let store = get_store_generic(&db_path, StoreConfig::default(), spec); let harness = get_harness(store.clone(), LOW_VALIDATOR_COUNT); + harness.execution_block_generator().set_min_blob_count(1); let blocks_to_deneb_finalization = E::slots_per_epoch() * 7; let blocks_to_electra_finalization = E::slots_per_epoch() * 4; @@ -4279,7 +4302,7 @@ async fn prune_blobs_across_fork_boundary() { // Fulu fork epochs // Pruning should have been triggered assert!(store.get_blob_info().oldest_blob_slot <= Some(oldest_slot)); - // Oldest blost slot should never be greater than the first fulu slot + // Oldest blob slot should never be greater than the first fulu slot let fulu_first_slot = fulu_fork_epoch.start_slot(E::slots_per_epoch()); assert!(store.get_blob_info().oldest_blob_slot <= Some(fulu_first_slot)); // Blobs should not exist post-Fulu @@ -4764,7 +4787,7 @@ async fn fulu_prune_data_columns_margin_test(margin: u64) { check_data_column_existence(&harness, oldest_data_column_slot, harness.head_slot(), true); } -/// Check tat there are data column sidecars (or not) at every slot in the range. +/// Check that there are data column sidecars (or not) at every slot in the range. fn check_data_column_existence( harness: &TestHarness, start_slot: Slot, diff --git a/beacon_node/execution_layer/src/engine_api/http.rs b/beacon_node/execution_layer/src/engine_api/http.rs index 74fb078510e..a8dbed34cee 100644 --- a/beacon_node/execution_layer/src/engine_api/http.rs +++ b/beacon_node/execution_layer/src/engine_api/http.rs @@ -1479,8 +1479,7 @@ mod test { impl Tester { pub fn new(with_auth: bool) -> Self { - let spec = Arc::new(MainnetEthSpec::default_spec()); - let server = MockServer::unit_testing(spec); + let server = MockServer::unit_testing(); let rpc_url = SensitiveUrl::parse(&server.url()).unwrap(); let echo_url = SensitiveUrl::parse(&format!("{}/echo", server.url())).unwrap(); diff --git a/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs b/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs index 5652e557f27..1c1e3074174 100644 --- a/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs +++ b/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs @@ -13,6 +13,7 @@ use rand::{Rng, SeedableRng, rngs::StdRng}; use serde::{Deserialize, Serialize}; use ssz::Decode; use ssz_types::VariableList; +use std::cmp::max; use std::collections::HashMap; use std::sync::Arc; use tree_hash::TreeHash; @@ -157,7 +158,6 @@ pub struct ExecutionBlockGenerator { pub blobs_bundles: HashMap>, pub kzg: Option>, rng: Arc>, - spec: Arc, } fn make_rng() -> Arc> { @@ -177,7 +177,6 @@ impl ExecutionBlockGenerator { prague_time: Option, osaka_time: Option, amsterdam_time: Option, - spec: Arc, kzg: Option>, ) -> Self { let mut generator = Self { @@ -200,7 +199,6 @@ impl ExecutionBlockGenerator { blobs_bundles: <_>::default(), kzg, rng: make_rng(), - spec, }; generator.insert_pow_block(0).unwrap(); @@ -732,11 +730,10 @@ impl ExecutionBlockGenerator { let fork_name = execution_payload.fork_name(); if fork_name.deneb_enabled() { - // get random number between 0 and Max Blobs + // get random number between 0 and 1 blobs by default + // For tests that need higher blob count, consider adding a `set_max_blob_count` method let mut rng = self.rng.lock(); - // TODO(EIP-7892): see FIXME below - // FIXME: this will break with BPO forks. This function needs to calculate the epoch based on block timestamp.. - let max_blobs = self.spec.max_blobs_per_block_within_fork(fork_name) as usize; + let max_blobs = max(1, self.min_blobs_count); let num_blobs = rng.random_range(self.min_blobs_count..=max_blobs); let (bundle, transactions) = generate_blobs(num_blobs, fork_name)?; for tx in Vec::from(transactions) { @@ -978,7 +975,6 @@ mod test { const TERMINAL_DIFFICULTY: u64 = 10; const TERMINAL_BLOCK: u64 = 10; const DIFFICULTY_INCREMENT: u64 = 1; - let spec = Arc::new(MainnetEthSpec::default_spec()); let mut generator: ExecutionBlockGenerator = ExecutionBlockGenerator::new( Uint256::from(TERMINAL_DIFFICULTY), @@ -989,7 +985,6 @@ mod test { None, None, None, - spec, None, ); diff --git a/beacon_node/execution_layer/src/test_utils/mock_execution_layer.rs b/beacon_node/execution_layer/src/test_utils/mock_execution_layer.rs index 9e587d4e590..73c998956ca 100644 --- a/beacon_node/execution_layer/src/test_utils/mock_execution_layer.rs +++ b/beacon_node/execution_layer/src/test_utils/mock_execution_layer.rs @@ -63,7 +63,6 @@ impl MockExecutionLayer { prague_time, osaka_time, amsterdam_time, - spec.clone(), kzg, ); diff --git a/beacon_node/execution_layer/src/test_utils/mod.rs b/beacon_node/execution_layer/src/test_utils/mod.rs index 712c773dda0..8f129715606 100644 --- a/beacon_node/execution_layer/src/test_utils/mod.rs +++ b/beacon_node/execution_layer/src/test_utils/mod.rs @@ -22,7 +22,7 @@ use std::net::{Ipv4Addr, SocketAddr, SocketAddrV4}; use std::sync::{Arc, LazyLock}; use tokio::{runtime, sync::oneshot}; use tracing::info; -use types::{ChainSpec, EthSpec, ExecutionBlockHash, Uint256}; +use types::{EthSpec, ExecutionBlockHash, Uint256}; use warp::{Filter, Rejection, http::StatusCode}; use crate::EngineCapabilities; @@ -114,7 +114,7 @@ pub struct MockServer { } impl MockServer { - pub fn unit_testing(chain_spec: Arc) -> Self { + pub fn unit_testing() -> Self { Self::new( &runtime::Handle::current(), JwtKey::from_slice(&DEFAULT_JWT_SECRET).unwrap(), @@ -126,7 +126,6 @@ impl MockServer { None, // FIXME(electra): should this be the default? None, // FIXME(fulu): should this be the default? None, // FIXME(gloas): should this be the default? - chain_spec, None, ) } @@ -134,7 +133,6 @@ impl MockServer { pub fn new_with_config( handle: &runtime::Handle, config: MockExecutionConfig, - spec: Arc, kzg: Option>, ) -> Self { create_test_tracing_subscriber(); @@ -161,7 +159,6 @@ impl MockServer { prague_time, osaka_time, amsterdam_time, - spec, kzg, ); @@ -226,7 +223,6 @@ impl MockServer { prague_time: Option, osaka_time: Option, amsterdam_time: Option, - spec: Arc, kzg: Option>, ) -> Self { Self::new_with_config( @@ -243,7 +239,6 @@ impl MockServer { osaka_time, amsterdam_time, }, - spec, kzg, ) } diff --git a/beacon_node/http_api/tests/broadcast_validation_tests.rs b/beacon_node/http_api/tests/broadcast_validation_tests.rs index 9427f6fdf35..1b79c13d76f 100644 --- a/beacon_node/http_api/tests/broadcast_validation_tests.rs +++ b/beacon_node/http_api/tests/broadcast_validation_tests.rs @@ -822,6 +822,14 @@ pub async fn blinded_gossip_invalid() { tester.harness.advance_slot(); + // Ensure there's at least one blob in the block, so we don't run into failures when the + // block generator logic changes, as different errors could be returned: + // * Invalidity of blocks: `NotFinalizedDescendant` + // * Invalidity of blobs: `ParentUnknown` + tester + .harness + .execution_block_generator() + .set_min_blob_count(1); let (blinded_block, _) = tester .harness .make_blinded_block_with_modifier(chain_state_before, slot, |b| { @@ -837,21 +845,20 @@ pub async fn blinded_gossip_invalid() { assert!(response.is_err()); let error_response: eth2::Error = response.err().unwrap(); + assert_eq!(error_response.status(), Some(StatusCode::BAD_REQUEST)); + let pre_finalized_block_root = Hash256::zero(); - /* mandated by Beacon API spec */ - if tester.harness.spec.is_fulu_scheduled() { - // XXX: this should be a 400 but is a 500 due to the mock-builder being janky - assert_eq!( - error_response.status(), - Some(StatusCode::INTERNAL_SERVER_ERROR) - ); + let expected_error_msg = if tester.harness.spec.is_fulu_scheduled() { + format!( + "BAD_REQUEST: NotFinalizedDescendant {{ block_parent_root: {pre_finalized_block_root:?} }}" + ) } else { - assert_eq!(error_response.status(), Some(StatusCode::BAD_REQUEST)); - assert_server_message_error( - error_response, - format!("BAD_REQUEST: ParentUnknown {{ parent_root: {pre_finalized_block_root:?} }}"), - ); - } + // Since Deneb, the invalidity of the blobs will be detected prior to the invalidity of the + // block. + format!("BAD_REQUEST: ParentUnknown {{ parent_root: {pre_finalized_block_root:?} }}") + }; + + assert_server_message_error(error_response, expected_error_msg); } /// Process a blinded block that is invalid, but valid on gossip. @@ -1647,6 +1654,10 @@ pub async fn block_seen_on_gossip_with_some_blobs_or_columns() { ) .await; tester.harness.advance_slot(); + tester + .harness + .execution_block_generator() + .set_min_blob_count(2); let slot_a = Slot::new(num_initial); let slot_b = slot_a + 1; diff --git a/beacon_node/http_api/tests/tests.rs b/beacon_node/http_api/tests/tests.rs index 3b69430efcf..6fb5a8ed8aa 100644 --- a/beacon_node/http_api/tests/tests.rs +++ b/beacon_node/http_api/tests/tests.rs @@ -178,6 +178,9 @@ impl ApiTester { "precondition: current slot is one after head" ); + // Set a min blob count for the next block for get_blobs testing + harness.execution_block_generator().set_min_blob_count(2); + let (next_block, _next_state) = harness .make_block(head.beacon_state.clone(), harness.chain.slot().unwrap()) .await; @@ -1869,7 +1872,7 @@ impl ApiTester { } pub async fn test_get_blob_sidecars(self, use_indices: bool) -> Self { - let block_id = BlockId(CoreBlockId::Finalized); + let block_id = BlockId(CoreBlockId::Head); let (block_root, _, _) = block_id.root(&self.chain).unwrap(); let (block, _, _) = block_id.full_block(&self.chain).await.unwrap(); let num_blobs = block.num_expected_blobs(); @@ -1902,7 +1905,7 @@ impl ApiTester { } pub async fn test_get_blobs(self, versioned_hashes: bool) -> Self { - let block_id = BlockId(CoreBlockId::Finalized); + let block_id = BlockId(CoreBlockId::Head); let (block_root, _, _) = block_id.root(&self.chain).unwrap(); let (block, _, _) = block_id.full_block(&self.chain).await.unwrap(); let num_blobs = block.num_expected_blobs(); @@ -1940,7 +1943,7 @@ impl ApiTester { } pub async fn test_get_blobs_post_fulu_full_node(self, versioned_hashes: bool) -> Self { - let block_id = BlockId(CoreBlockId::Finalized); + let block_id = BlockId(CoreBlockId::Head); let (block_root, _, _) = block_id.root(&self.chain).unwrap(); let (block, _, _) = block_id.full_block(&self.chain).await.unwrap(); @@ -7867,6 +7870,8 @@ async fn get_blobs_post_fulu_supernode() { config.spec.fulu_fork_epoch = Some(Epoch::new(0)); ApiTester::new_from_config(config) + .await + .test_post_beacon_blocks_valid() .await // We can call the same get_blobs function in this test // because the function will call get_blobs_by_versioned_hashes which handles peerDAS post-Fulu @@ -7887,6 +7892,8 @@ async fn get_blobs_post_fulu_full_node() { config.spec.fulu_fork_epoch = Some(Epoch::new(0)); ApiTester::new_from_config(config) + .await + .test_post_beacon_blocks_valid() .await .test_get_blobs_post_fulu_full_node(false) .await diff --git a/beacon_node/network/src/sync/block_sidecar_coupling.rs b/beacon_node/network/src/sync/block_sidecar_coupling.rs index d5858c23f11..01929cbf906 100644 --- a/beacon_node/network/src/sync/block_sidecar_coupling.rs +++ b/beacon_node/network/src/sync/block_sidecar_coupling.rs @@ -517,11 +517,10 @@ mod tests { #[test] fn no_blobs_into_responses() { - let spec = test_spec::(); let mut rng = XorShiftRng::from_seed([42; 16]); let blocks = (0..4) .map(|_| { - generate_rand_block_and_blobs::(ForkName::Base, NumBlobs::None, &mut rng, &spec) + generate_rand_block_and_blobs::(ForkName::Base, NumBlobs::None, &mut rng) .0 .into() }) @@ -540,19 +539,13 @@ mod tests { #[test] fn empty_blobs_into_responses() { - let spec = test_spec::(); let mut rng = XorShiftRng::from_seed([42; 16]); let blocks = (0..4) .map(|_| { // Always generate some blobs. - generate_rand_block_and_blobs::( - ForkName::Deneb, - NumBlobs::Number(3), - &mut rng, - &spec, - ) - .0 - .into() + generate_rand_block_and_blobs::(ForkName::Deneb, NumBlobs::Number(3), &mut rng) + .0 + .into() }) .collect::>>>(); diff --git a/beacon_node/network/src/sync/tests/lookups.rs b/beacon_node/network/src/sync/tests/lookups.rs index fc641861754..63bcd176f52 100644 --- a/beacon_node/network/src/sync/tests/lookups.rs +++ b/beacon_node/network/src/sync/tests/lookups.rs @@ -194,7 +194,7 @@ impl TestRig { ) -> (SignedBeaconBlock, Vec>) { let fork_name = self.fork_name; let rng = &mut self.rng; - generate_rand_block_and_blobs::(fork_name, num_blobs, rng, &self.spec) + generate_rand_block_and_blobs::(fork_name, num_blobs, rng) } fn rand_block_and_data_columns( @@ -1146,10 +1146,8 @@ impl TestRig { #[test] fn stable_rng() { - let spec = types::MainnetEthSpec::default_spec(); let mut rng = XorShiftRng::from_seed([42; 16]); - let (block, _) = - generate_rand_block_and_blobs::(ForkName::Base, NumBlobs::None, &mut rng, &spec); + let (block, _) = generate_rand_block_and_blobs::(ForkName::Base, NumBlobs::None, &mut rng); assert_eq!( block.canonical_root(), Hash256::from_slice( diff --git a/consensus/types/src/test_utils/generate_random_block_and_blobs.rs b/consensus/types/src/test_utils/generate_random_block_and_blobs.rs index 0f52e485a8a..8f4908291ee 100644 --- a/consensus/types/src/test_utils/generate_random_block_and_blobs.rs +++ b/consensus/types/src/test_utils/generate_random_block_and_blobs.rs @@ -77,7 +77,7 @@ mod test { #[test] fn test_verify_blob_inclusion_proof() { let (_block, blobs) = - generate_rand_block_and_blobs::(ForkName::Deneb, 6, &mut rng()); + generate_rand_block_and_blobs::(ForkName::Deneb, 2, &mut rng()); for blob in blobs { assert!(blob.verify_blob_sidecar_inclusion_proof()); } @@ -115,7 +115,7 @@ mod test { #[test] fn test_verify_blob_inclusion_proof_invalid() { let (_block, blobs) = - generate_rand_block_and_blobs::(ForkName::Deneb, 6, &mut rng()); + generate_rand_block_and_blobs::(ForkName::Deneb, 1, &mut rng()); for mut blob in blobs { blob.kzg_commitment_inclusion_proof = FixedVector::random_for_test(&mut rng()); diff --git a/lcli/src/mock_el.rs b/lcli/src/mock_el.rs index ee6485b2388..d6bdfb0d712 100644 --- a/lcli/src/mock_el.rs +++ b/lcli/src/mock_el.rs @@ -44,7 +44,7 @@ pub fn run(mut env: Environment, matches: &ArgMatches) -> Result< amsterdam_time, }; let kzg = None; - let server: MockServer = MockServer::new_with_config(&handle, config, spec, kzg); + let server: MockServer = MockServer::new_with_config(&handle, config, kzg); if all_payloads_valid { eprintln!( diff --git a/testing/node_test_rig/src/lib.rs b/testing/node_test_rig/src/lib.rs index df191ed5af7..e49d11ee1eb 100644 --- a/testing/node_test_rig/src/lib.rs +++ b/testing/node_test_rig/src/lib.rs @@ -248,14 +248,8 @@ impl LocalExecutionNode { if let Err(e) = std::fs::write(jwt_file_path, config.jwt_key.hex_string()) { panic!("Failed to write jwt file {}", e); } - let spec = context.eth2_config.spec.clone(); Self { - server: MockServer::new_with_config( - &context.executor.handle().unwrap(), - config, - spec, - None, - ), + server: MockServer::new_with_config(&context.executor.handle().unwrap(), config, None), datadir, } } From a7e89a8761cee4f3ec2081acb1605a3f0915af5d Mon Sep 17 00:00:00 2001 From: Michael Sproul Date: Wed, 5 Nov 2025 13:08:46 +1100 Subject: [PATCH 12/74] Optimise `state_root_at_slot` for finalized slot (#8353) This is an optimisation targeted at Fulu networks in non-finality. While debugging on Holesky, we found that `state_root_at_slot` was being called from `prepare_beacon_proposer` a lot, for the finalized state: https://github.com/sigp/lighthouse/blob/2c9b670f5d313450252c6cb40a5ee34802d54fef/beacon_node/http_api/src/lib.rs#L3860-L3861 This was causing `prepare_beacon_proposer` calls to take upwards of 5 seconds, sometimes 10 seconds, because it would trigger _multiple_ beacon state loads in order to iterate back to the finalized slot. Ideally, loading the finalized state should be quick because we keep it cached in the state cache (technically we keep the split state, but they usually coincide). Instead we are computing the finalized state root separately (slow), and then loading the state from the cache (fast). Although it would be possible to make the API faster by removing the `state_root_at_slot` call, I believe it's simpler to change `state_root_at_slot` itself and remove the footgun. Devs rightly expect operations involving the finalized state to be fast. Co-Authored-By: Michael Sproul --- beacon_node/beacon_chain/src/beacon_chain.rs | 6 ++++++ beacon_node/beacon_chain/tests/store_tests.rs | 4 ++++ 2 files changed, 10 insertions(+) diff --git a/beacon_node/beacon_chain/src/beacon_chain.rs b/beacon_node/beacon_chain/src/beacon_chain.rs index 58532116e6c..5ffdf951ac1 100644 --- a/beacon_node/beacon_chain/src/beacon_chain.rs +++ b/beacon_node/beacon_chain/src/beacon_chain.rs @@ -883,6 +883,12 @@ impl BeaconChain { return Ok(None); } + // Fast-path for the split slot (which usually corresponds to the finalized slot). + let split = self.store.get_split_info(); + if request_slot == split.slot { + return Ok(Some(split.state_root)); + } + // Try an optimized path of reading the root directly from the head state. let fast_lookup: Option = self.with_head(|head| { if head.beacon_block.slot() <= request_slot { diff --git a/beacon_node/beacon_chain/tests/store_tests.rs b/beacon_node/beacon_chain/tests/store_tests.rs index 25f824c19bd..638c221a7fa 100644 --- a/beacon_node/beacon_chain/tests/store_tests.rs +++ b/beacon_node/beacon_chain/tests/store_tests.rs @@ -3155,6 +3155,10 @@ async fn weak_subjectivity_sync_test( .get_state(&state_root, Some(slot), CACHE_STATE_IN_TESTS) .unwrap() .unwrap(); + assert_eq!( + state_root, + beacon_chain.state_root_at_slot(slot).unwrap().unwrap() + ); assert_eq!(state.slot(), slot); assert_eq!(state.canonical_root().unwrap(), state_root); } From efadbb315a09928ea0e27e29cb946fc57fb1e1c4 Mon Sep 17 00:00:00 2001 From: antondlr Date: Wed, 5 Nov 2025 04:53:01 +0100 Subject: [PATCH 13/74] Remove Windows CI jobs (#8362) Remove all Windows-related CI jobs Co-Authored-By: antondlr --- .github/workflows/release.yml | 39 +------------------------------- .github/workflows/test-suite.yml | 33 --------------------------- 2 files changed, 1 insertion(+), 71 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 7c85cdd05cc..f7b65f07c91 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -32,8 +32,7 @@ jobs: matrix: arch: [aarch64-unknown-linux-gnu, x86_64-unknown-linux-gnu, - aarch64-apple-darwin, - x86_64-windows] + aarch64-apple-darwin] include: - arch: aarch64-unknown-linux-gnu runner: ${{ github.repository == 'sigp/lighthouse' && fromJson('["self-hosted", "linux", "release", "large"]') || 'ubuntu-latest' }} @@ -44,9 +43,6 @@ jobs: - arch: aarch64-apple-darwin runner: macos-14 profile: maxperf - - arch: x86_64-windows - runner: ${{ github.repository == 'sigp/lighthouse' && fromJson('["self-hosted", "windows", "release"]') || 'windows-2019' }} - profile: maxperf runs-on: ${{ matrix.runner }} needs: extract-version @@ -57,19 +53,6 @@ jobs: if: env.SELF_HOSTED_RUNNERS == 'false' run: rustup update stable - # ============================== - # Windows dependencies - # ============================== - - - uses: KyleMayes/install-llvm-action@v1 - if: env.SELF_HOSTED_RUNNERS == 'false' && startsWith(matrix.arch, 'x86_64-windows') - with: - version: "17.0" - directory: ${{ runner.temp }}/llvm - - name: Set LIBCLANG_PATH - if: startsWith(matrix.arch, 'x86_64-windows') - run: echo "LIBCLANG_PATH=$((gcm clang).source -replace "clang.exe")" >> $env:GITHUB_ENV - # ============================== # Builds # ============================== @@ -94,12 +77,7 @@ jobs: if: matrix.arch == 'aarch64-apple-darwin' run: cargo install --path lighthouse --force --locked --features portable,gnosis --profile ${{ matrix.profile }} - - name: Build Lighthouse for Windows - if: matrix.arch == 'x86_64-windows' - run: cargo install --path lighthouse --force --locked --features portable,gnosis --profile ${{ matrix.profile }} - - name: Configure GPG and create artifacts - if: startsWith(matrix.arch, 'x86_64-windows') != true env: GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }} GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }} @@ -118,20 +96,6 @@ jobs: done mv *tar.gz* .. - - name: Configure GPG and create artifacts Windows - if: startsWith(matrix.arch, 'x86_64-windows') - env: - GPG_SIGNING_KEY: ${{ secrets.GPG_SIGNING_KEY }} - GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }} - run: | - echo $env:GPG_SIGNING_KEY | gpg --batch --import - mkdir artifacts - move $env:USERPROFILE/.cargo/bin/lighthouse.exe ./artifacts - cd artifacts - tar -czf lighthouse-${{ needs.extract-version.outputs.VERSION }}-${{ matrix.arch }}.tar.gz lighthouse.exe - gpg --passphrase "$env:GPG_PASSPHRASE" --batch --pinentry-mode loopback -ab lighthouse-${{ needs.extract-version.outputs.VERSION }}-${{ matrix.arch }}.tar.gz - move *tar.gz* .. - # ======================================================================= # Upload artifacts # This is required to share artifacts between different jobs @@ -239,7 +203,6 @@ jobs: | Apple logo | aarch64 | [lighthouse-${{ env.VERSION }}-aarch64-apple-darwin.tar.gz](https://github.com/${{ env.REPO_NAME }}/releases/download/${{ env.VERSION }}/lighthouse-${{ env.VERSION }}-aarch64-apple-darwin.tar.gz) | [PGP Signature](https://github.com/${{ env.REPO_NAME }}/releases/download/${{ env.VERSION }}/lighthouse-${{ env.VERSION }}-aarch64-apple-darwin.tar.gz.asc) | | Linux logo | x86_64 | [lighthouse-${{ env.VERSION }}-x86_64-unknown-linux-gnu.tar.gz](https://github.com/${{ env.REPO_NAME }}/releases/download/${{ env.VERSION }}/lighthouse-${{ env.VERSION }}-x86_64-unknown-linux-gnu.tar.gz) | [PGP Signature](https://github.com/${{ env.REPO_NAME }}/releases/download/${{ env.VERSION }}/lighthouse-${{ env.VERSION }}-x86_64-unknown-linux-gnu.tar.gz.asc) | | Raspberrypi logo | aarch64 | [lighthouse-${{ env.VERSION }}-aarch64-unknown-linux-gnu.tar.gz](https://github.com/${{ env.REPO_NAME }}/releases/download/${{ env.VERSION }}/lighthouse-${{ env.VERSION }}-aarch64-unknown-linux-gnu.tar.gz) | [PGP Signature](https://github.com/${{ env.REPO_NAME }}/releases/download/${{ env.VERSION }}/lighthouse-${{ env.VERSION }}-aarch64-unknown-linux-gnu.tar.gz.asc) | - | Windows logo | x86_64 | [lighthouse-${{ env.VERSION }}-x86_64-windows.tar.gz](https://github.com/${{ env.REPO_NAME }}/releases/download/${{ env.VERSION }}/lighthouse-${{ env.VERSION }}-x86_64-windows.tar.gz) | [PGP Signature](https://github.com/${{ env.REPO_NAME }}/releases/download/${{ env.VERSION }}/lighthouse-${{ env.VERSION }}-x86_64-windows.tar.gz.asc) | | | | | | | **System** | **Option** | - | **Resource** | | Docker logo | Docker | [${{ env.VERSION }}](https://hub.docker.com/r/${{ env.IMAGE_NAME }}/tags?page=1&ordering=last_updated&name=${{ env.VERSION }}) | [${{ env.IMAGE_NAME }}](https://hub.docker.com/r/${{ env.IMAGE_NAME }}) | diff --git a/.github/workflows/test-suite.yml b/.github/workflows/test-suite.yml index 0201bf9ae30..0cdd8211da8 100644 --- a/.github/workflows/test-suite.yml +++ b/.github/workflows/test-suite.yml @@ -107,38 +107,6 @@ jobs: if: env.SELF_HOSTED_RUNNERS == 'true' continue-on-error: true run: sccache --show-stats - release-tests-windows: - name: release-tests-windows - needs: [check-labels] - if: needs.check-labels.outputs.skip_ci != 'true' - runs-on: ${{ github.repository == 'sigp/lighthouse' && fromJson('["self-hosted", "windows", "CI"]') || 'windows-2019' }} - steps: - - uses: actions/checkout@v5 - - name: Get latest version of stable Rust - if: env.SELF_HOSTED_RUNNERS == 'false' - uses: moonrepo/setup-rust@v1 - with: - channel: stable - cache-target: release - bins: cargo-nextest - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Install Foundry (anvil) - if: env.SELF_HOSTED_RUNNERS == 'false' - uses: foundry-rs/foundry-toolchain@v1 - with: - version: nightly-ca67d15f4abd46394b324c50e21e66f306a1162d - - name: Install make - if: env.SELF_HOSTED_RUNNERS == 'false' - run: choco install -y make - - name: Set LIBCLANG_PATH - run: echo "LIBCLANG_PATH=$((gcm clang).source -replace "clang.exe")" >> $env:GITHUB_ENV - - name: Run tests in release - run: make test-release - - name: Show cache stats - if: env.SELF_HOSTED_RUNNERS == 'true' - continue-on-error: true - run: sccache --show-stats beacon-chain-tests: name: beacon-chain-tests needs: [check-labels] @@ -501,7 +469,6 @@ jobs: 'check-labels', 'target-branch-check', 'release-tests-ubuntu', - 'release-tests-windows', 'beacon-chain-tests', 'op-pool-tests', 'network-tests', From 1e10329c98c980e2fdac45de66587b9acb7a4980 Mon Sep 17 00:00:00 2001 From: chonghe <44791194+chong-he@users.noreply.github.com> Date: Wed, 5 Nov 2025 11:53:03 +0800 Subject: [PATCH 14/74] Update proposer-only section in the documentation (#8358) Co-Authored-By: Tan Chee Keong Co-Authored-By: Michael Sproul --- book/src/advanced_blobs.md | 4 ++-- book/src/advanced_database_migrations.md | 1 + book/src/advanced_proposer_only.md | 4 +--- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/book/src/advanced_blobs.md b/book/src/advanced_blobs.md index ccc29acf263..bb989a85d89 100644 --- a/book/src/advanced_blobs.md +++ b/book/src/advanced_blobs.md @@ -2,7 +2,7 @@ With the [Fusaka](https://ethereum.org/roadmap/fusaka) upgrade, the main feature [PeerDAS](https://ethereum.org/roadmap/fusaka#peerdas) allows storing only a portion of blob data, known as data columns, thus reducing the storage and bandwidth requirements of a full node. This however also means that a full node will not be able to serve blobs after Fusaka. To continue serving blobs, run the beacon node with `--semi-supernode` or `--supernode`. Note that this comes at a significant increase in storage and bandwidth requirements, see [this blog post about PeerDAS](https://blog.sigmaprime.io/peerdas-distributed-blob-building.html) and [Fusaka bandwidth estimation](https://ethpandaops.io/posts/fusaka-bandwidth-estimation/) for more details. -> Note: the above assumes that the beacon node has no attached validators. If the beacon node has attached validators, then it is required to custody (store) a certain number of data columns which increases with the number of staked ETH. For example, if the staked ETH is `$\geq$` 2048 ETH, then due to custody requirement, it will make the beacon node a semi-supernode ; if `$\geq$` 4096 ETH, the beacon node will be a supernode without needing the flag. +> Note: the above assumes that the beacon node has no attached validators. If the beacon node has attached validators, then it is required to custody (store) a certain number of data columns which increases with the number of staked ETH. For example, if the staked ETH is >= 2048 ETH, then due to custody requirement, it will make the beacon node a semi-supernode ; if >= 4096 ETH, the beacon node will be a supernode without needing the flag. Table below summarizes the role of relevant flags in Lighthouse beacon node: @@ -17,7 +17,7 @@ While both `--supernode` and `--semi-supernode` can serve blobs, a supernode wil Combining `--prune-blobs false` and `--supernode` (or `--semi-supernode`) implies that no data columns will be pruned, and the node will be able to serve blobs since using the flag. -If you want historical blob data beyond the data availability period (18 days), you can backfill blobs or data columns with the experimental flag `--complete-blobs-backfill`. However, do note that this is an experimental feature and it may cause some issues, e.g., the node may block most of its peers. +If you want historical blob data beyond the data availability period (18 days), you can backfill blobs or data columns with the experimental flag `--complete-blob-backfill`. However, do note that this is an experimental feature and it only works when the flag is present during a fresh checkpoint sync when the database is initialised. The flag will have no effect if the node is already running (with an existing database). During blob backfill, the feature may cause some issues, e.g., the node may block most of its peers. **⚠️ The following section on Blobs is archived and not maintained as blobs are stored in the form of data columns after the Fulu fork ⚠️** diff --git a/book/src/advanced_database_migrations.md b/book/src/advanced_database_migrations.md index 3552a90b0e8..115a8858780 100644 --- a/book/src/advanced_database_migrations.md +++ b/book/src/advanced_database_migrations.md @@ -17,6 +17,7 @@ validator client or the slasher**. | Lighthouse version | Release date | Schema version | Downgrade available? | |--------------------|--------------|----------------|----------------------| +| v8.0.0 | Nov 2025 | v28 | yes before Fulu | | v8.0.0-rc.0 | Sep 2025 | v28 | yes before Fulu | | v7.1.0 | Jul 2025 | v26 | yes | | v7.0.0 | Apr 2025 | v22 | no | diff --git a/book/src/advanced_proposer_only.md b/book/src/advanced_proposer_only.md index f55e51606cf..1ef7a066559 100644 --- a/book/src/advanced_proposer_only.md +++ b/book/src/advanced_proposer_only.md @@ -23,9 +23,7 @@ normal activities such as performing attestations, but it will make the node harder to identify as a potential node to attack and will also consume less resources. -Specifically, this flag reduces the default peer count (to a safe minimal -number as maintaining peers on attestation subnets do not need to be considered), -prevents the node from subscribing to any attestation-subnets or +Specifically, this flag prevents the node from subscribing to any attestation-subnets or sync-committees which is a primary way for attackers to de-anonymize validators. From 8f7dcf02ba54edf264acffbe26e01fbbba23c18e Mon Sep 17 00:00:00 2001 From: hopinheimer <48147533+hopinheimer@users.noreply.github.com> Date: Wed, 5 Nov 2025 11:49:35 +0530 Subject: [PATCH 15/74] Fix unaggregated delay metric (#8366) while working on this #7892 @michaelsproul pointed it might be a good metric to measure the delay from start of the slot instead of the current `slot_duration / 3`, since the attestations duties start before the `1/3rd` mark now with the change in the link PR. Co-Authored-By: hopinheimer Co-Authored-By: hopinheimer <48147533+hopinheimer@users.noreply.github.com> --- beacon_node/beacon_chain/src/metrics.rs | 2 +- beacon_node/beacon_chain/src/validator_monitor.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/beacon_node/beacon_chain/src/metrics.rs b/beacon_node/beacon_chain/src/metrics.rs index 8f1da7b67b7..e6557c7a270 100644 --- a/beacon_node/beacon_chain/src/metrics.rs +++ b/beacon_node/beacon_chain/src/metrics.rs @@ -1185,7 +1185,7 @@ pub static VALIDATOR_MONITOR_UNAGGREGATED_ATTESTATION_DELAY_SECONDS: LazyLock< > = LazyLock::new(|| { try_create_histogram_vec( "validator_monitor_unaggregated_attestation_delay_seconds", - "The delay between when the validator should send the attestation and when it was received.", + "The delay between when the validator sent the attestation and the start of the slot.", &["src", "validator"], ) }); diff --git a/beacon_node/beacon_chain/src/validator_monitor.rs b/beacon_node/beacon_chain/src/validator_monitor.rs index 00c30e5ab1d..ba06d5da4ec 100644 --- a/beacon_node/beacon_chain/src/validator_monitor.rs +++ b/beacon_node/beacon_chain/src/validator_monitor.rs @@ -1214,7 +1214,7 @@ impl ValidatorMonitor { let delay = get_message_delay_ms( seen_timestamp, data.slot, - slot_clock.unagg_attestation_production_delay(), + Duration::from_secs(0), slot_clock, ); From 7b1cbca264f32a11857d7648a1c79225fe8f289d Mon Sep 17 00:00:00 2001 From: Jimmy Chen Date: Wed, 5 Nov 2025 18:46:30 +1100 Subject: [PATCH 16/74] Downgrade and remove unnecessary logs (#8367) ### Downgrade a non error to `Debug` I noticed this error on one of our hoodi nodes: ``` Nov 04 05:13:38.892 ERROR Error during data column reconstruction block_root: 0x4271b9efae7deccec3989bd2418e998b83ce8144210c2b17200abb62b7951190, error: DuplicateFullyImported(0x4271b9efae7deccec3989bd2418e998b83ce8144210c2b17200abb62b7951190) ``` This shouldn't be logged as an error and it's due to a normal race condition, and it doesn't impact the node negatively. ### Remove spammy logs This logs is filling up the log files quite quickly and it is also something we'd expect during normal operation - getting columns via EL before gossip. We haven't found this debug log to be useful, so I propose we remove it to avoid spamming debug logs. ``` Received already available column sidecar. Ignoring the column sidecar ``` In the process of removing this, I noticed we aren't propagating the validation result, which I think we should so I've added this. The impact should be quite minimal - the message will stay in the gossip memcache for a bit longer but should be evicted in the next heartbeat. Co-Authored-By: Jimmy Chen --- .../src/network_beacon_processor/gossip_methods.rs | 11 +++++------ .../network/src/network_beacon_processor/mod.rs | 3 +++ 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/beacon_node/network/src/network_beacon_processor/gossip_methods.rs b/beacon_node/network/src/network_beacon_processor/gossip_methods.rs index 1ffe921e58e..eb70147c6ef 100644 --- a/beacon_node/network/src/network_beacon_processor/gossip_methods.rs +++ b/beacon_node/network/src/network_beacon_processor/gossip_methods.rs @@ -735,12 +735,11 @@ impl NetworkBeaconProcessor { // Data column is available via either the EL or reconstruction. // Do not penalise the peer. // Gossip filter should filter any duplicates received after this. - debug!( - %slot, - %block_root, - %index, - "Received already available column sidecar. Ignoring the column sidecar" - ) + self.propagate_validation_result( + message_id, + peer_id, + MessageAcceptance::Ignore, + ); } GossipDataColumnError::FutureSlot { .. } | GossipDataColumnError::PastFinalizedSlot { .. } => { diff --git a/beacon_node/network/src/network_beacon_processor/mod.rs b/beacon_node/network/src/network_beacon_processor/mod.rs index 5fa2361f280..bebda36d71c 100644 --- a/beacon_node/network/src/network_beacon_processor/mod.rs +++ b/beacon_node/network/src/network_beacon_processor/mod.rs @@ -866,6 +866,9 @@ impl NetworkBeaconProcessor { "Reconstruction not required for block" ); } + Err(BlockError::DuplicateFullyImported(_)) => { + debug!("Block already imported in parallel with reconstruction"); + } Err(e) => { error!( %block_root, From 3066f0bef297ab5eb0201e43b19ca57ba6536128 Mon Sep 17 00:00:00 2001 From: Mac L Date: Wed, 5 Nov 2025 11:46:32 +0400 Subject: [PATCH 17/74] Prepare `sensitive_url` for `crates.io` (#8223) Another good candidate for publishing separately from Lighthouse is `sensitive_url` as it's a general utility crate and not related to Ethereum. This PR prepares it to be spun out into its own crate. I've made the `full` field on `SensitiveUrl` private and instead provided an explicit getter called `.expose_full()`. It's a bit ugly for the diff but I prefer the explicit nature of the getter. I've also added some extra tests and doc strings along with feature gating `Serialize` and `Deserialize` implementations behind the `serde` feature. Co-Authored-By: Mac L --- Cargo.lock | 1 + Cargo.toml | 2 +- beacon_node/builder_client/src/lib.rs | 14 +- .../execution_layer/src/engine_api/http.rs | 2 +- common/eth2/src/lib.rs | 16 +- common/eth2/src/lighthouse.rs | 26 +-- common/eth2/src/lighthouse_vc/http_client.rs | 36 ++-- common/monitoring_api/src/lib.rs | 2 +- common/sensitive_url/Cargo.toml | 8 +- common/sensitive_url/src/lib.rs | 177 +++++++++++++++--- lighthouse/tests/beacon_node.rs | 14 +- lighthouse/tests/validator_client.rs | 4 +- .../src/test_rig.rs | 4 +- .../beacon_node_fallback/src/lib.rs | 4 +- validator_manager/src/exit_validators.rs | 5 +- validator_manager/src/list_validators.rs | 3 +- 16 files changed, 225 insertions(+), 93 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 676f674b1bb..fad3ad2ffc2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8485,6 +8485,7 @@ name = "sensitive_url" version = "0.1.0" dependencies = [ "serde", + "serde_json", "url", ] diff --git a/Cargo.toml b/Cargo.toml index 1dfc753b8c2..d09b0fcd80c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -225,7 +225,7 @@ rpds = "0.11" rusqlite = { version = "0.28", features = ["bundled"] } rust_eth_kzg = "0.9" safe_arith = "0.1" -sensitive_url = { path = "common/sensitive_url" } +sensitive_url = { path = "common/sensitive_url", features = ["serde"] } serde = { version = "1", features = ["derive"] } serde_json = "1" serde_repr = "0.1" diff --git a/beacon_node/builder_client/src/lib.rs b/beacon_node/builder_client/src/lib.rs index 6b993542f35..b486e77083a 100644 --- a/beacon_node/builder_client/src/lib.rs +++ b/beacon_node/builder_client/src/lib.rs @@ -270,7 +270,7 @@ impl BuilderHttpClient { &self, validator: &[SignedValidatorRegistrationData], ) -> Result<(), Error> { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? @@ -289,7 +289,7 @@ impl BuilderHttpClient { &self, blinded_block: &SignedBlindedBeaconBlock, ) -> Result, Error> { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); let body = blinded_block.as_ssz_bytes(); @@ -337,7 +337,7 @@ impl BuilderHttpClient { &self, blinded_block: &SignedBlindedBeaconBlock, ) -> Result<(), Error> { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); let body = blinded_block.as_ssz_bytes(); @@ -387,7 +387,7 @@ impl BuilderHttpClient { &self, blinded_block: &SignedBlindedBeaconBlock, ) -> Result>, Error> { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? @@ -430,7 +430,7 @@ impl BuilderHttpClient { &self, blinded_block: &SignedBlindedBeaconBlock, ) -> Result<(), Error> { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? @@ -480,7 +480,7 @@ impl BuilderHttpClient { parent_hash: ExecutionBlockHash, pubkey: &PublicKeyBytes, ) -> Result>>, Error> { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? @@ -521,7 +521,7 @@ impl BuilderHttpClient { /// `GET /eth/v1/builder/status` pub async fn get_builder_status(&self) -> Result<(), Error> { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? diff --git a/beacon_node/execution_layer/src/engine_api/http.rs b/beacon_node/execution_layer/src/engine_api/http.rs index a8dbed34cee..8f7564ace6b 100644 --- a/beacon_node/execution_layer/src/engine_api/http.rs +++ b/beacon_node/execution_layer/src/engine_api/http.rs @@ -652,7 +652,7 @@ impl HttpJsonRpc { let mut request = self .client - .post(self.url.full.clone()) + .post(self.url.expose_full().clone()) .timeout(timeout) .header(CONTENT_TYPE, "application/json") .json(&body); diff --git a/common/eth2/src/lib.rs b/common/eth2/src/lib.rs index 2641a4c02e0..e8e6663d46a 100644 --- a/common/eth2/src/lib.rs +++ b/common/eth2/src/lib.rs @@ -30,7 +30,7 @@ use reqwest::{ }; pub use reqwest::{StatusCode, Url}; use reqwest_eventsource::{Event, EventSource}; -pub use sensitive_url::{SensitiveError, SensitiveUrl}; +pub use sensitive_url::SensitiveUrl; use serde::{Serialize, de::DeserializeOwned}; use ssz::Encode; use std::fmt; @@ -152,12 +152,6 @@ impl fmt::Display for BeaconNodeHttpClient { } } -impl AsRef for BeaconNodeHttpClient { - fn as_ref(&self) -> &str { - self.server.as_ref() - } -} - impl BeaconNodeHttpClient { pub fn new(server: SensitiveUrl, timeouts: Timeouts) -> Self { Self { @@ -178,10 +172,14 @@ impl BeaconNodeHttpClient { timeouts, } } + // Returns a reference to the `SensitiveUrl` of the server. + pub fn server(&self) -> &SensitiveUrl { + &self.server + } /// Return the path with the standard `/eth/vX` prefix applied. fn eth_path(&self, version: EndpointVersion) -> Result { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? @@ -2613,7 +2611,7 @@ impl BeaconNodeHttpClient { ids: &[u64], epoch: Epoch, ) -> Result>, Error> { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? diff --git a/common/eth2/src/lighthouse.rs b/common/eth2/src/lighthouse.rs index 4f9a049e44e..993c263cbfb 100644 --- a/common/eth2/src/lighthouse.rs +++ b/common/eth2/src/lighthouse.rs @@ -173,7 +173,7 @@ pub struct DepositLog { impl BeaconNodeHttpClient { /// `GET lighthouse/health` pub async fn get_lighthouse_health(&self) -> Result, Error> { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? @@ -185,7 +185,7 @@ impl BeaconNodeHttpClient { /// `GET lighthouse/syncing` pub async fn get_lighthouse_syncing(&self) -> Result, Error> { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? @@ -197,7 +197,7 @@ impl BeaconNodeHttpClient { /// `GET lighthouse/custody/info` pub async fn get_lighthouse_custody_info(&self) -> Result { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? @@ -210,7 +210,7 @@ impl BeaconNodeHttpClient { /// `POST lighthouse/custody/backfill` pub async fn post_lighthouse_custody_backfill(&self) -> Result<(), Error> { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? @@ -231,7 +231,7 @@ impl BeaconNodeHttpClient { /// `GET lighthouse/proto_array` pub async fn get_lighthouse_proto_array(&self) -> Result, Error> { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? @@ -246,7 +246,7 @@ impl BeaconNodeHttpClient { &self, epoch: Epoch, ) -> Result, Error> { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? @@ -264,7 +264,7 @@ impl BeaconNodeHttpClient { epoch: Epoch, validator_id: ValidatorId, ) -> Result>, Error> { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? @@ -278,7 +278,7 @@ impl BeaconNodeHttpClient { /// `POST lighthouse/database/reconstruct` pub async fn post_lighthouse_database_reconstruct(&self) -> Result { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? @@ -291,7 +291,7 @@ impl BeaconNodeHttpClient { /// `POST lighthouse/add_peer` pub async fn post_lighthouse_add_peer(&self, req: AdminPeer) -> Result<(), Error> { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? @@ -303,7 +303,7 @@ impl BeaconNodeHttpClient { /// `POST lighthouse/remove_peer` pub async fn post_lighthouse_remove_peer(&self, req: AdminPeer) -> Result<(), Error> { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? @@ -323,7 +323,7 @@ impl BeaconNodeHttpClient { start_slot: Slot, end_slot: Slot, ) -> Result, Error> { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? @@ -344,7 +344,7 @@ impl BeaconNodeHttpClient { start_epoch: Epoch, end_epoch: Epoch, ) -> Result, Error> { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? @@ -366,7 +366,7 @@ impl BeaconNodeHttpClient { end_epoch: Epoch, target: String, ) -> Result, Error> { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? diff --git a/common/eth2/src/lighthouse_vc/http_client.rs b/common/eth2/src/lighthouse_vc/http_client.rs index c4fddb97d7a..8c9d3397a8c 100644 --- a/common/eth2/src/lighthouse_vc/http_client.rs +++ b/common/eth2/src/lighthouse_vc/http_client.rs @@ -283,7 +283,7 @@ impl ValidatorClientHttpClient { /// `GET lighthouse/version` pub async fn get_lighthouse_version(&self) -> Result, Error> { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? @@ -295,7 +295,7 @@ impl ValidatorClientHttpClient { /// `GET lighthouse/health` pub async fn get_lighthouse_health(&self) -> Result, Error> { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? @@ -309,7 +309,7 @@ impl ValidatorClientHttpClient { pub async fn get_lighthouse_spec( &self, ) -> Result, Error> { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? @@ -323,7 +323,7 @@ impl ValidatorClientHttpClient { pub async fn get_lighthouse_validators( &self, ) -> Result>, Error> { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? @@ -338,7 +338,7 @@ impl ValidatorClientHttpClient { &self, validator_pubkey: &PublicKeyBytes, ) -> Result>, Error> { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? @@ -354,7 +354,7 @@ impl ValidatorClientHttpClient { &self, validators: Vec, ) -> Result, Error> { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? @@ -369,7 +369,7 @@ impl ValidatorClientHttpClient { &self, request: &CreateValidatorsMnemonicRequest, ) -> Result>, Error> { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? @@ -385,7 +385,7 @@ impl ValidatorClientHttpClient { &self, request: &KeystoreValidatorsPostRequest, ) -> Result, Error> { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? @@ -401,7 +401,7 @@ impl ValidatorClientHttpClient { &self, request: &[Web3SignerValidatorRequest], ) -> Result<(), Error> { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? @@ -424,7 +424,7 @@ impl ValidatorClientHttpClient { prefer_builder_proposals: Option, graffiti: Option, ) -> Result<(), Error> { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? @@ -451,7 +451,7 @@ impl ValidatorClientHttpClient { &self, req: &DeleteKeystoresRequest, ) -> Result { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? @@ -462,7 +462,7 @@ impl ValidatorClientHttpClient { } fn make_keystores_url(&self) -> Result { - let mut url = self.server.full.clone(); + let mut url = self.server.expose_full().clone(); url.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? .push("eth") @@ -472,7 +472,7 @@ impl ValidatorClientHttpClient { } fn make_remotekeys_url(&self) -> Result { - let mut url = self.server.full.clone(); + let mut url = self.server.expose_full().clone(); url.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? .push("eth") @@ -482,7 +482,7 @@ impl ValidatorClientHttpClient { } fn make_fee_recipient_url(&self, pubkey: &PublicKeyBytes) -> Result { - let mut url = self.server.full.clone(); + let mut url = self.server.expose_full().clone(); url.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? .push("eth") @@ -494,7 +494,7 @@ impl ValidatorClientHttpClient { } fn make_graffiti_url(&self, pubkey: &PublicKeyBytes) -> Result { - let mut url = self.server.full.clone(); + let mut url = self.server.expose_full().clone(); url.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? .push("eth") @@ -506,7 +506,7 @@ impl ValidatorClientHttpClient { } fn make_gas_limit_url(&self, pubkey: &PublicKeyBytes) -> Result { - let mut url = self.server.full.clone(); + let mut url = self.server.expose_full().clone(); url.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? .push("eth") @@ -519,7 +519,7 @@ impl ValidatorClientHttpClient { /// `GET lighthouse/auth` pub async fn get_auth(&self) -> Result { - let mut url = self.server.full.clone(); + let mut url = self.server.expose_full().clone(); url.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? .push("lighthouse") @@ -635,7 +635,7 @@ impl ValidatorClientHttpClient { pubkey: &PublicKeyBytes, epoch: Option, ) -> Result, Error> { - let mut path = self.server.full.clone(); + let mut path = self.server.expose_full().clone(); path.path_segments_mut() .map_err(|()| Error::InvalidUrl(self.server.clone()))? diff --git a/common/monitoring_api/src/lib.rs b/common/monitoring_api/src/lib.rs index 465618c9a82..03b93f2faae 100644 --- a/common/monitoring_api/src/lib.rs +++ b/common/monitoring_api/src/lib.rs @@ -195,7 +195,7 @@ impl MonitoringHttpClient { endpoint = %self.monitoring_endpoint, "Sending metrics to remote endpoint" ); - self.post(self.monitoring_endpoint.full.clone(), &metrics) + self.post(self.monitoring_endpoint.expose_full().clone(), &metrics) .await } } diff --git a/common/sensitive_url/Cargo.toml b/common/sensitive_url/Cargo.toml index ff562097225..3793cc51398 100644 --- a/common/sensitive_url/Cargo.toml +++ b/common/sensitive_url/Cargo.toml @@ -5,6 +5,12 @@ authors = ["Mac L "] edition = { workspace = true } # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[features] +serde = ["dep:serde"] + [dependencies] -serde = { workspace = true } +serde = { workspace = true, optional = true } url = { workspace = true } + +[dev-dependencies] +serde_json = { workspace = true } diff --git a/common/sensitive_url/src/lib.rs b/common/sensitive_url/src/lib.rs index 64ad070a1fd..3f9240268dd 100644 --- a/common/sensitive_url/src/lib.rs +++ b/common/sensitive_url/src/lib.rs @@ -1,26 +1,69 @@ +#[cfg(feature = "serde")] use serde::{Deserialize, Deserializer, Serialize, Serializer, de}; use std::fmt; use std::str::FromStr; use url::Url; +/// Errors that can occur when creating or parsing a `SensitiveUrl`. #[derive(Debug)] -pub enum SensitiveError { +pub enum Error { + /// The URL cannot be used as a base URL. InvalidUrl(String), + /// Failed to parse the URL string. ParseError(url::ParseError), + /// Failed to redact sensitive information from the URL. RedactError(String), } -impl fmt::Display for SensitiveError { +impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{:?}", self) + match self { + Error::InvalidUrl(msg) => write!(f, "Invalid URL: {}", msg), + Error::ParseError(e) => write!(f, "Parse error: {}", e), + Error::RedactError(msg) => write!(f, "Redact error: {}", msg), + } + } +} + +impl std::error::Error for Error { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + match self { + Error::ParseError(e) => Some(e), + _ => None, + } } } -// Wrapper around Url which provides a custom `Display` implementation to protect user secrets. -#[derive(Clone, PartialEq)] +/// A URL wrapper that redacts sensitive information in `Display` and `Debug` output. +/// +/// This type stores both the full URL (with credentials, paths, and query parameters) +/// and a redacted version (containing only the scheme, host, and port). The redacted +/// version is used when displaying or debugging to prevent accidental leakage of +/// credentials in logs. +/// +/// Note that `SensitiveUrl` specifically does NOT implement `Deref`, meaning you cannot call +/// `Url` methods like `.password()` or `.scheme()` directly on `SensitiveUrl`. You must first +/// explicitly call `.expose_full()`. +/// +/// # Examples +/// +/// ``` +/// use sensitive_url::SensitiveUrl; +/// +/// let url = SensitiveUrl::parse("https://user:pass@example.com/api?token=secret").unwrap(); +/// +/// // Display shows only the redacted version: +/// assert_eq!(url.to_string(), "https://example.com/"); +/// +/// // But you can still access the full URL when needed: +/// let full = url.expose_full(); +/// assert_eq!(full.to_string(), "https://user:pass@example.com/api?token=secret"); +/// assert_eq!(full.password(), Some("pass")); +/// ``` +#[derive(Clone, PartialEq, Eq, Hash)] pub struct SensitiveUrl { - pub full: Url, - pub redacted: String, + full: Url, + redacted: String, } impl fmt::Display for SensitiveUrl { @@ -31,16 +74,14 @@ impl fmt::Display for SensitiveUrl { impl fmt::Debug for SensitiveUrl { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.redacted.fmt(f) - } -} - -impl AsRef for SensitiveUrl { - fn as_ref(&self) -> &str { - self.redacted.as_str() + f.debug_struct("SensitiveUrl") + .field("redacted", &self.redacted) + // Maintains traditional `Debug` format but hides the 'full' field. + .finish_non_exhaustive() } } +#[cfg(feature = "serde")] impl Serialize for SensitiveUrl { fn serialize(&self, serializer: S) -> Result where @@ -50,6 +91,7 @@ impl Serialize for SensitiveUrl { } } +#[cfg(feature = "serde")] impl<'de> Deserialize<'de> for SensitiveUrl { fn deserialize(deserializer: D) -> Result where @@ -62,7 +104,7 @@ impl<'de> Deserialize<'de> for SensitiveUrl { } impl FromStr for SensitiveUrl { - type Err = SensitiveError; + type Err = Error; fn from_str(s: &str) -> Result { Self::parse(s) @@ -70,26 +112,28 @@ impl FromStr for SensitiveUrl { } impl SensitiveUrl { - pub fn parse(url: &str) -> Result { - let surl = Url::parse(url).map_err(SensitiveError::ParseError)?; + /// Attempts to parse a `&str` into a `SensitiveUrl`. + pub fn parse(url: &str) -> Result { + let surl = Url::parse(url).map_err(Error::ParseError)?; SensitiveUrl::new(surl) } - pub fn new(full: Url) -> Result { + /// Creates a `SensitiveUrl` from an existing `Url`. + pub fn new(full: Url) -> Result { let mut redacted = full.clone(); redacted .path_segments_mut() - .map_err(|_| SensitiveError::InvalidUrl("URL cannot be a base.".to_string()))? + .map_err(|_| Error::InvalidUrl("URL cannot be a base.".to_string()))? .clear(); redacted.set_query(None); if redacted.has_authority() { - redacted.set_username("").map_err(|_| { - SensitiveError::RedactError("Unable to redact username.".to_string()) - })?; - redacted.set_password(None).map_err(|_| { - SensitiveError::RedactError("Unable to redact password.".to_string()) - })?; + redacted + .set_username("") + .map_err(|_| Error::RedactError("Unable to redact username.".to_string()))?; + redacted + .set_password(None) + .map_err(|_| Error::RedactError("Unable to redact password.".to_string()))?; } Ok(Self { @@ -97,6 +141,16 @@ impl SensitiveUrl { redacted: redacted.to_string(), }) } + + /// Returns a reference to the full, unredacted URL. + pub fn expose_full(&self) -> &Url { + &self.full + } + + /// Returns the redacted URL as a `&str`. + pub fn redacted(&self) -> &str { + &self.redacted + } } #[cfg(test)] @@ -105,16 +159,81 @@ mod tests { #[test] fn redact_remote_url() { - let full = "https://project:secret@example.com/example?somequery"; + let full = "https://user:pass@example.com/example?somequery"; let surl = SensitiveUrl::parse(full).unwrap(); assert_eq!(surl.to_string(), "https://example.com/"); - assert_eq!(surl.full.to_string(), full); + assert_eq!(surl.expose_full().to_string(), full); } + #[test] fn redact_localhost_url() { - let full = "http://localhost:5052/"; + let full = "http://user:pass@localhost:5052/"; let surl = SensitiveUrl::parse(full).unwrap(); assert_eq!(surl.to_string(), "http://localhost:5052/"); - assert_eq!(surl.full.to_string(), full); + assert_eq!(surl.expose_full().to_string(), full); + } + + #[test] + fn test_no_credentials() { + let full = "https://example.com/path"; + let surl = SensitiveUrl::parse(full).unwrap(); + assert_eq!(surl.to_string(), "https://example.com/"); + assert_eq!(surl.expose_full().to_string(), full); + } + + #[test] + fn test_display() { + let full = "https://user:pass@example.com/api?token=secret"; + let surl = SensitiveUrl::parse(full).unwrap(); + + let display = surl.to_string(); + assert_eq!(display, "https://example.com/"); + } + + #[test] + fn test_debug() { + let full = "https://user:pass@example.com/api?token=secret"; + let surl = SensitiveUrl::parse(full).unwrap(); + + let debug = format!("{:?}", surl); + + assert_eq!( + debug, + "SensitiveUrl { redacted: \"https://example.com/\", .. }" + ); + } + + #[cfg(feature = "serde")] + mod serde_tests { + use super::*; + + #[test] + fn test_serialize() { + let full = "https://user:pass@example.com/api?token=secret"; + let surl = SensitiveUrl::parse(full).unwrap(); + + let json = serde_json::to_string(&surl).unwrap(); + assert_eq!(json, format!("\"{}\"", full)); + } + + #[test] + fn test_deserialize() { + let full = "https://user:pass@example.com/api?token=secret"; + let json = format!("\"{}\"", full); + + let surl: SensitiveUrl = serde_json::from_str(&json).unwrap(); + assert_eq!(surl.expose_full().as_str(), full); + } + + #[test] + fn test_roundtrip() { + let full = "https://user:pass@example.com/api?token=secret"; + let original = SensitiveUrl::parse(full).unwrap(); + + let json = serde_json::to_string(&original).unwrap(); + let deserialized: SensitiveUrl = serde_json::from_str(&json).unwrap(); + + assert_eq!(deserialized.expose_full(), original.expose_full()); + } } } diff --git a/lighthouse/tests/beacon_node.rs b/lighthouse/tests/beacon_node.rs index 8342b021738..207324ea33f 100644 --- a/lighthouse/tests/beacon_node.rs +++ b/lighthouse/tests/beacon_node.rs @@ -481,7 +481,12 @@ fn run_execution_jwt_secret_key_is_persisted() { .with_config(|config| { let config = config.execution_layer.as_ref().unwrap(); assert_eq!( - config.execution_endpoint.as_ref().unwrap().full.to_string(), + config + .execution_endpoint + .as_ref() + .unwrap() + .expose_full() + .to_string(), "http://localhost:8551/" ); let mut file_jwt_secret_key = String::new(); @@ -532,7 +537,12 @@ fn bellatrix_jwt_secrets_flag() { .with_config(|config| { let config = config.execution_layer.as_ref().unwrap(); assert_eq!( - config.execution_endpoint.as_ref().unwrap().full.to_string(), + config + .execution_endpoint + .as_ref() + .unwrap() + .expose_full() + .to_string(), "http://localhost:8551/" ); assert_eq!( diff --git a/lighthouse/tests/validator_client.rs b/lighthouse/tests/validator_client.rs index 398c6fbd6b2..ee3e910b369 100644 --- a/lighthouse/tests/validator_client.rs +++ b/lighthouse/tests/validator_client.rs @@ -109,12 +109,12 @@ fn beacon_nodes_flag() { .run() .with_config(|config| { assert_eq!( - config.beacon_nodes[0].full.to_string(), + config.beacon_nodes[0].expose_full().to_string(), "http://localhost:1001/" ); assert_eq!(config.beacon_nodes[0].to_string(), "http://localhost:1001/"); assert_eq!( - config.beacon_nodes[1].full.to_string(), + config.beacon_nodes[1].expose_full().to_string(), "https://project:secret@infura.io/" ); assert_eq!(config.beacon_nodes[1].to_string(), "https://infura.io/"); diff --git a/testing/execution_engine_integration/src/test_rig.rs b/testing/execution_engine_integration/src/test_rig.rs index 05ec0a2f191..9e45a788704 100644 --- a/testing/execution_engine_integration/src/test_rig.rs +++ b/testing/execution_engine_integration/src/test_rig.rs @@ -64,7 +64,7 @@ async fn import_and_unlock(http_url: SensitiveUrl, priv_keys: &[&str], password: let client = Client::builder().build().unwrap(); let request = client - .post(http_url.full.clone()) + .post(http_url.expose_full().clone()) .header(CONTENT_TYPE, "application/json") .json(&body); @@ -90,7 +90,7 @@ async fn import_and_unlock(http_url: SensitiveUrl, priv_keys: &[&str], password: ); let request = client - .post(http_url.full.clone()) + .post(http_url.expose_full().clone()) .header(CONTENT_TYPE, "application/json") .json(&body); diff --git a/validator_client/beacon_node_fallback/src/lib.rs b/validator_client/beacon_node_fallback/src/lib.rs index a3f60d2de04..0f13d8c8b7b 100644 --- a/validator_client/beacon_node_fallback/src/lib.rs +++ b/validator_client/beacon_node_fallback/src/lib.rs @@ -656,7 +656,7 @@ impl BeaconNodeFallback { R: Future>, Err: Debug, { - inc_counter_vec(&ENDPOINT_REQUESTS, &[candidate.as_ref()]); + inc_counter_vec(&ENDPOINT_REQUESTS, &[candidate.server().redacted()]); // There exists a race condition where `func` may be called when the candidate is // actually not ready. We deem this an acceptable inefficiency. @@ -668,7 +668,7 @@ impl BeaconNodeFallback { error = ?e, "Request to beacon node failed" ); - inc_counter_vec(&ENDPOINT_ERRORS, &[candidate.as_ref()]); + inc_counter_vec(&ENDPOINT_ERRORS, &[candidate.server().redacted()]); Err((candidate.to_string(), Error::RequestFailed(e))) } } diff --git a/validator_manager/src/exit_validators.rs b/validator_manager/src/exit_validators.rs index a6bbf05fb4a..4a398793ce1 100644 --- a/validator_manager/src/exit_validators.rs +++ b/validator_manager/src/exit_validators.rs @@ -191,8 +191,7 @@ async fn run(config: ExitConfig) -> Result<(), String> { // Only publish the voluntary exit if the --beacon-node flag is present if let Some(ref beacon_url) = beacon_url { let beacon_node = BeaconNodeHttpClient::new( - SensitiveUrl::parse(beacon_url.as_ref()) - .map_err(|e| format!("Failed to parse beacon http server: {:?}", e))?, + beacon_url.clone(), Timeouts::set_all(Duration::from_secs(12)), ); @@ -399,7 +398,7 @@ mod test { }) .collect(); - let beacon_url = SensitiveUrl::parse(self.beacon_node.client.as_ref()).unwrap(); + let beacon_url = self.beacon_node.client.server().clone(); let validators_to_exit = index_of_validators_to_exit .iter() diff --git a/validator_manager/src/list_validators.rs b/validator_manager/src/list_validators.rs index b064982adf4..082894a995d 100644 --- a/validator_manager/src/list_validators.rs +++ b/validator_manager/src/list_validators.rs @@ -134,8 +134,7 @@ async fn run(config: ListConfig) -> Result Date: Wed, 5 Nov 2025 23:02:21 +1100 Subject: [PATCH 18/74] Remove ecdsa feature of libp2p (#8374) This compiles, is there any reason to keep `ecdsa`? CC @jxs Co-Authored-By: Michael Sproul --- Cargo.lock | 34 ----------------------- beacon_node/lighthouse_network/Cargo.toml | 1 - 2 files changed, 35 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fad3ad2ffc2..c8c14c7257a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2236,7 +2236,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f55bf8e7b65898637379c1b74eb1551107c8294ed26d855ceb9fd1a09cfc9bc0" dependencies = [ "const-oid", - "pem-rfc7468", "zeroize", ] @@ -2722,7 +2721,6 @@ dependencies = [ "ff 0.13.1", "generic-array 0.14.7", "group 0.13.0", - "pem-rfc7468", "pkcs8 0.10.2", "rand_core 0.6.4", "sec1 0.7.3", @@ -5300,10 +5298,8 @@ dependencies = [ "hkdf", "k256 0.13.4", "multihash", - "p256", "quick-protobuf", "rand 0.8.5", - "sec1 0.7.3", "sha2 0.10.8", "thiserror 2.0.12", "tracing", @@ -6774,18 +6770,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "p256" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9863ad85fa8f4460f9c48cb909d38a0d689dba1f6f6988a5e3e0d31071bcd4b" -dependencies = [ - "ecdsa 0.16.9", - "elliptic-curve 0.13.8", - "primeorder", - "sha2 0.10.8", -] - [[package]] name = "pairing" version = "0.23.0" @@ -6951,15 +6935,6 @@ dependencies = [ "serde", ] -[[package]] -name = "pem-rfc7468" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" -dependencies = [ - "base64ct", -] - [[package]] name = "percent-encoding" version = "2.3.1" @@ -7182,15 +7157,6 @@ dependencies = [ "syn 2.0.100", ] -[[package]] -name = "primeorder" -version = "0.13.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6" -dependencies = [ - "elliptic-curve 0.13.8", -] - [[package]] name = "primitive-types" version = "0.10.1" diff --git a/beacon_node/lighthouse_network/Cargo.toml b/beacon_node/lighthouse_network/Cargo.toml index 7e69f6770bf..035452e4b2f 100644 --- a/beacon_node/lighthouse_network/Cargo.toml +++ b/beacon_node/lighthouse_network/Cargo.toml @@ -64,7 +64,6 @@ features = [ "plaintext", "secp256k1", "macros", - "ecdsa", "metrics", "quic", "upnp", From e6e3d783ad50fdec81d9b2c3c2a5755b659f2fa3 Mon Sep 17 00:00:00 2001 From: lmnzx Date: Wed, 5 Nov 2025 18:17:36 +0530 Subject: [PATCH 19/74] CI workflows to use warpbuild ci runner (#8343) Self hosted GitHub Runners review and improvements local testnet workflow now uses warpbuild ci runner Co-Authored-By: lemon Co-Authored-By: antondlr --- .github/workflows/local-testnet.yml | 18 +++++------ .github/workflows/test-suite.yml | 46 ++++------------------------- 2 files changed, 15 insertions(+), 49 deletions(-) diff --git a/.github/workflows/local-testnet.yml b/.github/workflows/local-testnet.yml index c6f9c075dbd..c129c0ec95c 100644 --- a/.github/workflows/local-testnet.yml +++ b/.github/workflows/local-testnet.yml @@ -14,7 +14,7 @@ concurrency: jobs: dockerfile-ubuntu: - runs-on: ${{ github.repository == 'sigp/lighthouse' && fromJson('["self-hosted", "linux", "CI", "large"]') || 'ubuntu-latest' }} + runs-on: ${{ github.repository == 'sigp/lighthouse' && 'warp-ubuntu-latest-x64-8x' || 'ubuntu-latest' }} steps: - uses: actions/checkout@v5 @@ -31,7 +31,7 @@ jobs: retention-days: 3 run-local-testnet: - runs-on: ubuntu-22.04 + runs-on: ${{ github.repository == 'sigp/lighthouse' && 'warp-ubuntu-latest-x64-8x' || 'ubuntu-latest' }} needs: dockerfile-ubuntu steps: - uses: actions/checkout@v5 @@ -89,7 +89,7 @@ jobs: ${{ steps.assertoor_test_result.outputs.failed_test_details }} EOF ) - + echo "Test Result: $test_result" echo "$test_status" if ! [ "$test_result" == "success" ]; then @@ -100,7 +100,7 @@ jobs: doppelganger-protection-success-test: needs: dockerfile-ubuntu - runs-on: ubuntu-22.04 + runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 @@ -136,7 +136,7 @@ jobs: doppelganger-protection-failure-test: needs: dockerfile-ubuntu - runs-on: ubuntu-22.04 + runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 @@ -173,7 +173,7 @@ jobs: # Tests checkpoint syncing to a live network (current fork) and a running devnet (usually next scheduled fork) checkpoint-sync-test: name: checkpoint-sync-test-${{ matrix.network }} - runs-on: ubuntu-latest + runs-on: ${{ github.repository == 'sigp/lighthouse' && 'warp-ubuntu-latest-x64-8x' || 'ubuntu-latest' }} needs: dockerfile-ubuntu if: contains(github.event.pull_request.labels.*.name, 'syncing') continue-on-error: true @@ -216,7 +216,7 @@ jobs: # Test syncing from genesis on a local testnet. Aims to cover forward syncing both short and long distances. genesis-sync-test: name: genesis-sync-test-${{ matrix.fork }}-${{ matrix.offline_secs }}s - runs-on: ubuntu-latest + runs-on: ${{ github.repository == 'sigp/lighthouse' && 'warp-ubuntu-latest-x64-8x' || 'ubuntu-latest' }} needs: dockerfile-ubuntu strategy: matrix: @@ -259,7 +259,7 @@ jobs: # a PR is safe to merge. New jobs should be added here. local-testnet-success: name: local-testnet-success - runs-on: ubuntu-latest + runs-on: ${{ github.repository == 'sigp/lighthouse' && 'warp-ubuntu-latest-x64-8x' || 'ubuntu-latest' }} needs: [ 'dockerfile-ubuntu', 'run-local-testnet', @@ -272,4 +272,4 @@ jobs: - name: Check that success job is dependent on all others run: | exclude_jobs='checkpoint-sync-test' - ./scripts/ci/check-success-job.sh ./.github/workflows/local-testnet.yml local-testnet-success "$exclude_jobs" + ./scripts/ci/check-success-job.sh ./.github/workflows/local-testnet.yml local-testnet-success "$exclude_jobs" diff --git a/.github/workflows/test-suite.yml b/.github/workflows/test-suite.yml index 0cdd8211da8..cc7282c3517 100644 --- a/.github/workflows/test-suite.yml +++ b/.github/workflows/test-suite.yml @@ -22,8 +22,6 @@ env: # NOTE: this token is a personal access token on Jimmy's account due to the default GITHUB_TOKEN # not having access to other repositories. We should eventually devise a better solution here. LIGHTHOUSE_GITHUB_TOKEN: ${{ secrets.LIGHTHOUSE_GITHUB_TOKEN }} - # Enable self-hosted runners for the sigp repo only. - SELF_HOSTED_RUNNERS: ${{ github.repository == 'sigp/lighthouse' }} # Disable incremental compilation CARGO_INCREMENTAL: 0 # Enable portable to prevent issues with caching `blst` for the wrong CPU type @@ -78,8 +76,7 @@ jobs: name: release-tests-ubuntu needs: [check-labels] if: needs.check-labels.outputs.skip_ci != 'true' - # Use self-hosted runners only on the sigp repo. - runs-on: ${{ github.repository == 'sigp/lighthouse' && fromJson('["self-hosted", "linux", "CI", "large"]') || 'ubuntu-latest' }} + runs-on: ${{ github.repository == 'sigp/lighthouse' && 'warp-ubuntu-latest-x64-8x' || 'ubuntu-latest' }} steps: - uses: actions/checkout@v5 # Set Java version to 21. (required since Web3Signer 24.12.0). @@ -88,7 +85,6 @@ jobs: distribution: 'temurin' java-version: '21' - name: Get latest version of stable Rust - if: env.SELF_HOSTED_RUNNERS == 'false' uses: moonrepo/setup-rust@v1 with: channel: stable @@ -97,7 +93,6 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Install Foundry (anvil) - if: env.SELF_HOSTED_RUNNERS == 'false' uses: foundry-rs/foundry-toolchain@v1 with: version: nightly-ca67d15f4abd46394b324c50e21e66f306a1162d @@ -111,14 +106,12 @@ jobs: name: beacon-chain-tests needs: [check-labels] if: needs.check-labels.outputs.skip_ci != 'true' - # Use self-hosted runners only on the sigp repo. - runs-on: ${{ github.repository == 'sigp/lighthouse' && fromJson('["self-hosted", "linux", "CI", "large"]') || 'ubuntu-latest' }} + runs-on: ${{ github.repository == 'sigp/lighthouse' && 'warp-ubuntu-latest-x64-8x' || 'ubuntu-latest' }} env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} steps: - uses: actions/checkout@v5 - name: Get latest version of stable Rust - if: env.SELF_HOSTED_RUNNERS == 'false' uses: moonrepo/setup-rust@v1 with: channel: stable @@ -126,22 +119,16 @@ jobs: bins: cargo-nextest - name: Run beacon_chain tests for all known forks run: make test-beacon-chain - - name: Show cache stats - if: env.SELF_HOSTED_RUNNERS == 'true' - continue-on-error: true - run: sccache --show-stats http-api-tests: name: http-api-tests needs: [check-labels] if: needs.check-labels.outputs.skip_ci != 'true' - # Use self-hosted runners only on the sigp repo. - runs-on: ${{ github.repository == 'sigp/lighthouse' && fromJson('["self-hosted", "linux", "CI", "large"]') || 'ubuntu-latest' }} + runs-on: ${{ github.repository == 'sigp/lighthouse' && 'warp-ubuntu-latest-x64-8x' || 'ubuntu-latest' }} env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} steps: - uses: actions/checkout@v5 - name: Get latest version of stable Rust - if: env.SELF_HOSTED_RUNNERS == 'false' uses: moonrepo/setup-rust@v1 with: channel: stable @@ -149,10 +136,6 @@ jobs: bins: cargo-nextest - name: Run http_api tests for all recent forks run: make test-http-api - - name: Show cache stats - if: env.SELF_HOSTED_RUNNERS == 'true' - continue-on-error: true - run: sccache --show-stats op-pool-tests: name: op-pool-tests needs: [check-labels] @@ -220,29 +203,22 @@ jobs: name: debug-tests-ubuntu needs: [check-labels] if: needs.check-labels.outputs.skip_ci != 'true' - # Use self-hosted runners only on the sigp repo. - runs-on: ${{ github.repository == 'sigp/lighthouse' && fromJson('["self-hosted", "linux", "CI", "large"]') || 'ubuntu-latest' }} + runs-on: ${{ github.repository == 'sigp/lighthouse' && 'warp-ubuntu-latest-x64-8x' || 'ubuntu-latest' }} env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} steps: - uses: actions/checkout@v5 - name: Get latest version of stable Rust - if: env.SELF_HOSTED_RUNNERS == 'false' uses: moonrepo/setup-rust@v1 with: channel: stable bins: cargo-nextest - name: Install Foundry (anvil) - if: env.SELF_HOSTED_RUNNERS == 'false' uses: foundry-rs/foundry-toolchain@v1 with: version: nightly-ca67d15f4abd46394b324c50e21e66f306a1162d - name: Run tests in debug run: make test-debug - - name: Show cache stats - if: env.SELF_HOSTED_RUNNERS == 'true' - continue-on-error: true - run: sccache --show-stats state-transition-vectors-ubuntu: name: state-transition-vectors-ubuntu needs: [check-labels] @@ -261,14 +237,12 @@ jobs: name: ef-tests-ubuntu needs: [check-labels] if: needs.check-labels.outputs.skip_ci != 'true' - # Use self-hosted runners only on the sigp repo. - runs-on: ${{ github.repository == 'sigp/lighthouse' && fromJson('["self-hosted", "linux", "CI", "small"]') || 'ubuntu-latest' }} + runs-on: ${{ github.repository == 'sigp/lighthouse' && 'warp-ubuntu-latest-x64-8x' || 'ubuntu-latest' }} env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} steps: - uses: actions/checkout@v5 - name: Get latest version of stable Rust - if: env.SELF_HOSTED_RUNNERS == 'false' uses: moonrepo/setup-rust@v1 with: channel: stable @@ -276,10 +250,6 @@ jobs: bins: cargo-nextest - name: Run consensus-spec-tests with blst and fake_crypto run: make test-ef - - name: Show cache stats - if: env.SELF_HOSTED_RUNNERS == 'true' - continue-on-error: true - run: sccache --show-stats basic-simulator-ubuntu: name: basic-simulator-ubuntu needs: [check-labels] @@ -328,11 +298,10 @@ jobs: name: execution-engine-integration-ubuntu needs: [check-labels] if: needs.check-labels.outputs.skip_ci != 'true' - runs-on: ${{ github.repository == 'sigp/lighthouse' && fromJson('["self-hosted", "linux", "CI", "small"]') || 'ubuntu-latest' }} + runs-on: ${{ github.repository == 'sigp/lighthouse' && 'warp-ubuntu-latest-x64-8x' || 'ubuntu-latest' }} steps: - uses: actions/checkout@v5 - name: Get latest version of stable Rust - if: env.SELF_HOSTED_RUNNERS == 'false' uses: moonrepo/setup-rust@v1 with: channel: stable @@ -340,9 +309,6 @@ jobs: cache: false env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Add go compiler to $PATH - if: env.SELF_HOSTED_RUNNERS == 'true' - run: echo "/usr/local/go/bin" >> $GITHUB_PATH - name: Run exec engine integration tests in release run: make test-exec-engine check-code: From 0090b35ee048dac346063d5a5d9fb15510002f6e Mon Sep 17 00:00:00 2001 From: Mac L Date: Thu, 6 Nov 2025 08:17:45 +0400 Subject: [PATCH 20/74] Remove `sensitive_url` and import from `crates.io` (#8377) Use the recently published `sensitive_url` and remove it from Lighthouse Co-Authored-By: Mac L --- Cargo.lock | 3 +- Cargo.toml | 3 +- common/sensitive_url/Cargo.toml | 16 --- common/sensitive_url/src/lib.rs | 239 -------------------------------- testing/simulator/Cargo.toml | 2 +- 5 files changed, 4 insertions(+), 259 deletions(-) delete mode 100644 common/sensitive_url/Cargo.toml delete mode 100644 common/sensitive_url/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index c8c14c7257a..1d1108b1d0b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8449,9 +8449,10 @@ checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" [[package]] name = "sensitive_url" version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb7b0221fa9905eec4163dbf7660b1876cc95663af1deddc3e19ebe49167c58c" dependencies = [ "serde", - "serde_json", "url", ] diff --git a/Cargo.toml b/Cargo.toml index d09b0fcd80c..4d357816d9a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -39,7 +39,6 @@ members = [ "common/network_utils", "common/oneshot_broadcast", "common/pretty_reqwest_error", - "common/sensitive_url", "common/slot_clock", "common/system_health", "common/target_check", @@ -225,7 +224,7 @@ rpds = "0.11" rusqlite = { version = "0.28", features = ["bundled"] } rust_eth_kzg = "0.9" safe_arith = "0.1" -sensitive_url = { path = "common/sensitive_url", features = ["serde"] } +sensitive_url = { version = "0.1", features = ["serde"] } serde = { version = "1", features = ["derive"] } serde_json = "1" serde_repr = "0.1" diff --git a/common/sensitive_url/Cargo.toml b/common/sensitive_url/Cargo.toml deleted file mode 100644 index 3793cc51398..00000000000 --- a/common/sensitive_url/Cargo.toml +++ /dev/null @@ -1,16 +0,0 @@ -[package] -name = "sensitive_url" -version = "0.1.0" -authors = ["Mac L "] -edition = { workspace = true } -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[features] -serde = ["dep:serde"] - -[dependencies] -serde = { workspace = true, optional = true } -url = { workspace = true } - -[dev-dependencies] -serde_json = { workspace = true } diff --git a/common/sensitive_url/src/lib.rs b/common/sensitive_url/src/lib.rs deleted file mode 100644 index 3f9240268dd..00000000000 --- a/common/sensitive_url/src/lib.rs +++ /dev/null @@ -1,239 +0,0 @@ -#[cfg(feature = "serde")] -use serde::{Deserialize, Deserializer, Serialize, Serializer, de}; -use std::fmt; -use std::str::FromStr; -use url::Url; - -/// Errors that can occur when creating or parsing a `SensitiveUrl`. -#[derive(Debug)] -pub enum Error { - /// The URL cannot be used as a base URL. - InvalidUrl(String), - /// Failed to parse the URL string. - ParseError(url::ParseError), - /// Failed to redact sensitive information from the URL. - RedactError(String), -} - -impl fmt::Display for Error { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Error::InvalidUrl(msg) => write!(f, "Invalid URL: {}", msg), - Error::ParseError(e) => write!(f, "Parse error: {}", e), - Error::RedactError(msg) => write!(f, "Redact error: {}", msg), - } - } -} - -impl std::error::Error for Error { - fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { - match self { - Error::ParseError(e) => Some(e), - _ => None, - } - } -} - -/// A URL wrapper that redacts sensitive information in `Display` and `Debug` output. -/// -/// This type stores both the full URL (with credentials, paths, and query parameters) -/// and a redacted version (containing only the scheme, host, and port). The redacted -/// version is used when displaying or debugging to prevent accidental leakage of -/// credentials in logs. -/// -/// Note that `SensitiveUrl` specifically does NOT implement `Deref`, meaning you cannot call -/// `Url` methods like `.password()` or `.scheme()` directly on `SensitiveUrl`. You must first -/// explicitly call `.expose_full()`. -/// -/// # Examples -/// -/// ``` -/// use sensitive_url::SensitiveUrl; -/// -/// let url = SensitiveUrl::parse("https://user:pass@example.com/api?token=secret").unwrap(); -/// -/// // Display shows only the redacted version: -/// assert_eq!(url.to_string(), "https://example.com/"); -/// -/// // But you can still access the full URL when needed: -/// let full = url.expose_full(); -/// assert_eq!(full.to_string(), "https://user:pass@example.com/api?token=secret"); -/// assert_eq!(full.password(), Some("pass")); -/// ``` -#[derive(Clone, PartialEq, Eq, Hash)] -pub struct SensitiveUrl { - full: Url, - redacted: String, -} - -impl fmt::Display for SensitiveUrl { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.redacted.fmt(f) - } -} - -impl fmt::Debug for SensitiveUrl { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("SensitiveUrl") - .field("redacted", &self.redacted) - // Maintains traditional `Debug` format but hides the 'full' field. - .finish_non_exhaustive() - } -} - -#[cfg(feature = "serde")] -impl Serialize for SensitiveUrl { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - serializer.serialize_str(self.full.as_ref()) - } -} - -#[cfg(feature = "serde")] -impl<'de> Deserialize<'de> for SensitiveUrl { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - let s: String = Deserialize::deserialize(deserializer)?; - SensitiveUrl::parse(&s) - .map_err(|e| de::Error::custom(format!("Failed to deserialize sensitive URL {:?}", e))) - } -} - -impl FromStr for SensitiveUrl { - type Err = Error; - - fn from_str(s: &str) -> Result { - Self::parse(s) - } -} - -impl SensitiveUrl { - /// Attempts to parse a `&str` into a `SensitiveUrl`. - pub fn parse(url: &str) -> Result { - let surl = Url::parse(url).map_err(Error::ParseError)?; - SensitiveUrl::new(surl) - } - - /// Creates a `SensitiveUrl` from an existing `Url`. - pub fn new(full: Url) -> Result { - let mut redacted = full.clone(); - redacted - .path_segments_mut() - .map_err(|_| Error::InvalidUrl("URL cannot be a base.".to_string()))? - .clear(); - redacted.set_query(None); - - if redacted.has_authority() { - redacted - .set_username("") - .map_err(|_| Error::RedactError("Unable to redact username.".to_string()))?; - redacted - .set_password(None) - .map_err(|_| Error::RedactError("Unable to redact password.".to_string()))?; - } - - Ok(Self { - full, - redacted: redacted.to_string(), - }) - } - - /// Returns a reference to the full, unredacted URL. - pub fn expose_full(&self) -> &Url { - &self.full - } - - /// Returns the redacted URL as a `&str`. - pub fn redacted(&self) -> &str { - &self.redacted - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn redact_remote_url() { - let full = "https://user:pass@example.com/example?somequery"; - let surl = SensitiveUrl::parse(full).unwrap(); - assert_eq!(surl.to_string(), "https://example.com/"); - assert_eq!(surl.expose_full().to_string(), full); - } - - #[test] - fn redact_localhost_url() { - let full = "http://user:pass@localhost:5052/"; - let surl = SensitiveUrl::parse(full).unwrap(); - assert_eq!(surl.to_string(), "http://localhost:5052/"); - assert_eq!(surl.expose_full().to_string(), full); - } - - #[test] - fn test_no_credentials() { - let full = "https://example.com/path"; - let surl = SensitiveUrl::parse(full).unwrap(); - assert_eq!(surl.to_string(), "https://example.com/"); - assert_eq!(surl.expose_full().to_string(), full); - } - - #[test] - fn test_display() { - let full = "https://user:pass@example.com/api?token=secret"; - let surl = SensitiveUrl::parse(full).unwrap(); - - let display = surl.to_string(); - assert_eq!(display, "https://example.com/"); - } - - #[test] - fn test_debug() { - let full = "https://user:pass@example.com/api?token=secret"; - let surl = SensitiveUrl::parse(full).unwrap(); - - let debug = format!("{:?}", surl); - - assert_eq!( - debug, - "SensitiveUrl { redacted: \"https://example.com/\", .. }" - ); - } - - #[cfg(feature = "serde")] - mod serde_tests { - use super::*; - - #[test] - fn test_serialize() { - let full = "https://user:pass@example.com/api?token=secret"; - let surl = SensitiveUrl::parse(full).unwrap(); - - let json = serde_json::to_string(&surl).unwrap(); - assert_eq!(json, format!("\"{}\"", full)); - } - - #[test] - fn test_deserialize() { - let full = "https://user:pass@example.com/api?token=secret"; - let json = format!("\"{}\"", full); - - let surl: SensitiveUrl = serde_json::from_str(&json).unwrap(); - assert_eq!(surl.expose_full().as_str(), full); - } - - #[test] - fn test_roundtrip() { - let full = "https://user:pass@example.com/api?token=secret"; - let original = SensitiveUrl::parse(full).unwrap(); - - let json = serde_json::to_string(&original).unwrap(); - let deserialized: SensitiveUrl = serde_json::from_str(&json).unwrap(); - - assert_eq!(deserialized.expose_full(), original.expose_full()); - } - } -} diff --git a/testing/simulator/Cargo.toml b/testing/simulator/Cargo.toml index cd23138a1cc..54035f2e827 100644 --- a/testing/simulator/Cargo.toml +++ b/testing/simulator/Cargo.toml @@ -15,7 +15,7 @@ logging = { workspace = true } node_test_rig = { path = "../node_test_rig" } parking_lot = { workspace = true } rayon = { workspace = true } -sensitive_url = { path = "../../common/sensitive_url" } +sensitive_url = { workspace = true } serde_json = { workspace = true } tokio = { workspace = true } tracing = { workspace = true } From 2c1f1c1605a736ee054c0e0518ebcedbb616f571 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Javier=20Ch=C3=A1varri?= Date: Thu, 6 Nov 2025 15:13:57 +0100 Subject: [PATCH 21/74] Migrate derivative to educe (#8125) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes #7001. Mostly mechanical replacement of `derivative` attributes with `educe` ones. ### **Attribute Syntax Changes** ```rust // Bounds: = "..." → (...) #[derivative(Hash(bound = "E: EthSpec"))] #[educe(Hash(bound(E: EthSpec)))] // Ignore: = "ignore" → (ignore) #[derivative(PartialEq = "ignore")] #[educe(PartialEq(ignore))] // Default values: value = "..." → expression = ... #[derivative(Default(value = "ForkName::Base"))] #[educe(Default(expression = ForkName::Base))] // Methods: format_with/compare_with = "..." → method(...) #[derivative(Debug(format_with = "fmt_peer_set_as_len"))] #[educe(Debug(method(fmt_peer_set_as_len)))] // Empty bounds: removed entirely, educe can infer appropriate bounds #[derivative(Default(bound = ""))] #[educe(Default)] // Transparent debug: manual implementation (educe doesn't support it) #[derivative(Debug = "transparent")] // Replaced with manual Debug impl that delegates to inner field ``` **Note**: Some bounds use strings (`bound("E: EthSpec")`) for superstruct compatibility (`expected ','` errors). Co-Authored-By: Javier Chávarri Co-Authored-By: Mac L --- Cargo.lock | 22 +-- Cargo.toml | 2 +- beacon_node/beacon_chain/Cargo.toml | 2 +- .../src/beacon_fork_choice_store.rs | 8 +- .../beacon_chain/src/blob_verification.rs | 6 +- .../beacon_chain/src/block_verification.rs | 6 +- .../src/block_verification_types.rs | 10 +- .../src/data_column_verification.rs | 14 +- ...ght_client_finality_update_verification.rs | 6 +- ...t_client_optimistic_update_verification.rs | 6 +- .../beacon_chain/src/observed_operations.rs | 8 +- .../src/sync_committee_verification.rs | 6 +- beacon_node/network/Cargo.toml | 2 +- beacon_node/network/src/sync/batch.rs | 8 +- .../sync/block_lookups/single_block_lookup.rs | 26 ++-- beacon_node/operation_pool/Cargo.toml | 2 +- beacon_node/operation_pool/src/persistence.rs | 7 +- common/eth2/Cargo.toml | 2 +- common/eth2/src/lib.rs | 8 +- common/validator_dir/Cargo.toml | 2 +- common/validator_dir/src/validator_dir.rs | 8 +- consensus/state_processing/Cargo.toml | 2 +- .../state_processing/src/verify_operation.rs | 8 +- consensus/types/Cargo.toml | 2 +- consensus/types/src/attestation.rs | 22 +-- consensus/types/src/attester_slashing.rs | 14 +- consensus/types/src/beacon_block.rs | 10 +- consensus/types/src/beacon_block_body.rs | 10 +- consensus/types/src/beacon_state.rs | 6 +- .../types/src/beacon_state/committee_cache.rs | 8 +- consensus/types/src/blob_sidecar.rs | 8 +- consensus/types/src/chain_spec.rs | 8 +- consensus/types/src/data_column_sidecar.rs | 8 +- consensus/types/src/data_column_subnet_id.rs | 10 +- consensus/types/src/execution_block_hash.rs | 10 +- consensus/types/src/execution_payload.rs | 10 +- .../types/src/execution_payload_header.rs | 10 +- consensus/types/src/execution_requests.rs | 6 +- consensus/types/src/indexed_attestation.rs | 9 +- consensus/types/src/light_client_bootstrap.rs | 6 +- .../types/src/light_client_finality_update.rs | 6 +- consensus/types/src/light_client_header.rs | 6 +- .../src/light_client_optimistic_update.rs | 6 +- consensus/types/src/light_client_update.rs | 6 +- consensus/types/src/payload.rs | 26 ++-- consensus/types/src/runtime_var_list.rs | 6 +- consensus/types/src/signed_beacon_block.rs | 10 +- consensus/types/src/sync_aggregate.rs | 8 +- crypto/kzg/Cargo.toml | 2 +- crypto/kzg/src/kzg_commitment.rs | 6 +- slasher/Cargo.toml | 2 +- slasher/src/database/redb_impl.rs | 14 +- testing/ef_tests/Cargo.toml | 2 +- testing/ef_tests/src/handler.rs | 126 +++++++++--------- validator_manager/Cargo.toml | 2 +- validator_manager/src/import_validators.rs | 8 +- 56 files changed, 277 insertions(+), 287 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1d1108b1d0b..e045c8697fb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -862,7 +862,7 @@ dependencies = [ "bitvec 1.0.1", "bls", "criterion", - "derivative", + "educe", "eth2", "eth2_network_config", "ethereum_hashing", @@ -2543,7 +2543,7 @@ dependencies = [ "bls", "compare_fields", "context_deserialize", - "derivative", + "educe", "eth2_network_config", "ethereum_ssz", "ethereum_ssz_derive", @@ -2862,7 +2862,7 @@ dependencies = [ name = "eth2" version = "0.1.0" dependencies = [ - "derivative", + "educe", "eip_3076", "either", "enr", @@ -5014,7 +5014,7 @@ dependencies = [ "arbitrary", "c-kzg", "criterion", - "derivative", + "educe", "ethereum_hashing", "ethereum_serde_utils", "ethereum_ssz", @@ -6309,7 +6309,7 @@ dependencies = [ "beacon_processor", "bls", "delay_map", - "derivative", + "educe", "eth2", "eth2_network_config", "ethereum_ssz", @@ -6745,7 +6745,7 @@ version = "0.2.0" dependencies = [ "beacon_chain", "bitvec 1.0.1", - "derivative", + "educe", "ethereum_ssz", "ethereum_ssz_derive", "itertools 0.10.5", @@ -8748,7 +8748,7 @@ version = "0.1.0" dependencies = [ "bincode", "byteorder", - "derivative", + "educe", "ethereum_ssz", "ethereum_ssz_derive", "filesystem", @@ -8917,7 +8917,7 @@ dependencies = [ "arbitrary", "beacon_chain", "bls", - "derivative", + "educe", "ethereum_hashing", "ethereum_ssz", "ethereum_ssz_derive", @@ -9855,7 +9855,7 @@ dependencies = [ "compare_fields", "context_deserialize", "criterion", - "derivative", + "educe", "eth2_interop_keypairs", "ethereum_hashing", "ethereum_serde_utils", @@ -10099,7 +10099,7 @@ version = "0.1.0" dependencies = [ "bls", "deposit_contract", - "derivative", + "educe", "eth2_keystore", "filesystem", "hex", @@ -10187,7 +10187,7 @@ dependencies = [ "beacon_chain", "clap", "clap_utils", - "derivative", + "educe", "environment", "eth2", "eth2_network_config", diff --git a/Cargo.toml b/Cargo.toml index 4d357816d9a..15fea466f0e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -128,11 +128,11 @@ context_deserialize_derive = { path = "consensus/context_deserialize/context_des criterion = "0.5" delay_map = "0.4" deposit_contract = { path = "common/deposit_contract" } -derivative = "2" directory = { path = "common/directory" } dirs = "3" discv5 = { version = "0.10", features = ["libp2p"] } doppelganger_service = { path = "validator_client/doppelganger_service" } +educe = "0.6" eip_3076 = { path = "common/eip_3076" } either = "1.9" environment = { path = "lighthouse/environment" } diff --git a/beacon_node/beacon_chain/Cargo.toml b/beacon_node/beacon_chain/Cargo.toml index dca351cbac6..e889f53bb01 100644 --- a/beacon_node/beacon_chain/Cargo.toml +++ b/beacon_node/beacon_chain/Cargo.toml @@ -18,7 +18,7 @@ test_backfill = [] alloy-primitives = { workspace = true } bitvec = { workspace = true } bls = { workspace = true } -derivative = { workspace = true } +educe = { workspace = true } eth2 = { workspace = true } eth2_network_config = { workspace = true } ethereum_hashing = { workspace = true } diff --git a/beacon_node/beacon_chain/src/beacon_fork_choice_store.rs b/beacon_node/beacon_chain/src/beacon_fork_choice_store.rs index 440388661c2..0c203009bbe 100644 --- a/beacon_node/beacon_chain/src/beacon_fork_choice_store.rs +++ b/beacon_node/beacon_chain/src/beacon_fork_choice_store.rs @@ -5,7 +5,7 @@ //! reads when fork choice requires the validator balances of the justified state. use crate::{BeaconSnapshot, metrics}; -use derivative::Derivative; +use educe::Educe; use fork_choice::ForkChoiceStore; use proto_array::JustifiedBalances; use safe_arith::ArithError; @@ -127,10 +127,10 @@ impl BalancesCache { /// Implements `fork_choice::ForkChoiceStore` in order to provide a persistent backing to the /// `fork_choice::ForkChoice` struct. -#[derive(Debug, Derivative)] -#[derivative(PartialEq(bound = "E: EthSpec, Hot: ItemStore, Cold: ItemStore"))] +#[derive(Debug, Educe)] +#[educe(PartialEq(bound(E: EthSpec, Hot: ItemStore, Cold: ItemStore)))] pub struct BeaconForkChoiceStore, Cold: ItemStore> { - #[derivative(PartialEq = "ignore")] + #[educe(PartialEq(ignore))] store: Arc>, balances_cache: BalancesCache, time: Slot, diff --git a/beacon_node/beacon_chain/src/blob_verification.rs b/beacon_node/beacon_chain/src/blob_verification.rs index 53f2eff0ca3..874673b52e8 100644 --- a/beacon_node/beacon_chain/src/blob_verification.rs +++ b/beacon_node/beacon_chain/src/blob_verification.rs @@ -1,4 +1,4 @@ -use derivative::Derivative; +use educe::Educe; use slot_clock::SlotClock; use std::marker::PhantomData; use std::sync::Arc; @@ -245,8 +245,8 @@ impl GossipVerifiedBlob { /// Wrapper over a `BlobSidecar` for which we have completed kzg verification. /// i.e. `verify_blob_kzg_proof(blob, commitment, proof) == true`. -#[derive(Debug, Derivative, Clone, Encode, Decode)] -#[derivative(PartialEq, Eq)] +#[derive(Debug, Educe, Clone, Encode, Decode)] +#[educe(PartialEq, Eq)] #[ssz(struct_behaviour = "transparent")] pub struct KzgVerifiedBlob { blob: Arc>, diff --git a/beacon_node/beacon_chain/src/block_verification.rs b/beacon_node/beacon_chain/src/block_verification.rs index 691293b2000..1ddc51cc351 100644 --- a/beacon_node/beacon_chain/src/block_verification.rs +++ b/beacon_node/beacon_chain/src/block_verification.rs @@ -66,7 +66,7 @@ use crate::{ beacon_chain::{BeaconForkChoice, ForkChoiceError}, metrics, }; -use derivative::Derivative; +use educe::Educe; use eth2::types::{BlockGossip, EventKind}; use execution_layer::PayloadStatus; pub use fork_choice::{AttestationFromBlock, PayloadVerificationStatus}; @@ -689,8 +689,8 @@ pub fn signature_verify_chain_segment( /// A wrapper around a `SignedBeaconBlock` that indicates it has been approved for re-gossiping on /// the p2p network. -#[derive(Derivative)] -#[derivative(Debug(bound = "T: BeaconChainTypes"))] +#[derive(Educe)] +#[educe(Debug(bound(T: BeaconChainTypes)))] pub struct GossipVerifiedBlock { pub block: Arc>, pub block_root: Hash256, diff --git a/beacon_node/beacon_chain/src/block_verification_types.rs b/beacon_node/beacon_chain/src/block_verification_types.rs index 1a0b188fdcd..5978e97c4d9 100644 --- a/beacon_node/beacon_chain/src/block_verification_types.rs +++ b/beacon_node/beacon_chain/src/block_verification_types.rs @@ -2,7 +2,7 @@ use crate::data_availability_checker::AvailabilityCheckError; pub use crate::data_availability_checker::{AvailableBlock, MaybeAvailableBlock}; use crate::data_column_verification::{CustodyDataColumn, CustodyDataColumnList}; use crate::{PayloadVerificationOutcome, get_block_root}; -use derivative::Derivative; +use educe::Educe; use ssz_types::VariableList; use state_processing::ConsensusContext; use std::fmt::{Debug, Formatter}; @@ -26,8 +26,8 @@ use types::{ /// Note: We make a distinction over blocks received over gossip because /// in a post-deneb world, the blobs corresponding to a given block that are received /// over rpc do not contain the proposer signature for dos resistance. -#[derive(Clone, Derivative)] -#[derivative(Hash(bound = "E: EthSpec"))] +#[derive(Clone, Educe)] +#[educe(Hash(bound(E: EthSpec)))] pub struct RpcBlock { block_root: Hash256, block: RpcBlockInner, @@ -80,8 +80,8 @@ impl RpcBlock { /// Note: This variant is intentionally private because we want to safely construct the /// internal variants after applying consistency checks to ensure that the block and blobs /// are consistent with respect to each other. -#[derive(Debug, Clone, Derivative)] -#[derivative(Hash(bound = "E: EthSpec"))] +#[derive(Debug, Clone, Educe)] +#[educe(Hash(bound(E: EthSpec)))] enum RpcBlockInner { /// Single block lookup response. This should potentially hit the data availability cache. Block(Arc>), diff --git a/beacon_node/beacon_chain/src/data_column_verification.rs b/beacon_node/beacon_chain/src/data_column_verification.rs index 7a8066351a3..b9986025667 100644 --- a/beacon_node/beacon_chain/src/data_column_verification.rs +++ b/beacon_node/beacon_chain/src/data_column_verification.rs @@ -4,7 +4,7 @@ use crate::block_verification::{ use crate::kzg_utils::{reconstruct_data_columns, validate_data_columns}; use crate::observed_data_sidecars::{ObservationStrategy, Observe}; use crate::{BeaconChain, BeaconChainError, BeaconChainTypes, metrics}; -use derivative::Derivative; +use educe::Educe; use fork_choice::ProtoBlock; use kzg::{Error as KzgError, Kzg}; use proto_array::Block; @@ -296,8 +296,8 @@ impl GossipVerifiedDataColumn } /// Wrapper over a `DataColumnSidecar` for which we have completed kzg verification. -#[derive(Debug, Derivative, Clone, Encode, Decode)] -#[derivative(PartialEq, Eq)] +#[derive(Debug, Educe, Clone, Encode, Decode)] +#[educe(PartialEq, Eq)] #[ssz(struct_behaviour = "transparent")] pub struct KzgVerifiedDataColumn { data: Arc>, @@ -353,8 +353,8 @@ pub type CustodyDataColumnList = VariableList, ::NumberOfColumns>; /// Data column that we must custody -#[derive(Debug, Derivative, Clone, Encode, Decode)] -#[derivative(PartialEq, Eq, Hash(bound = "E: EthSpec"))] +#[derive(Debug, Educe, Clone, Encode, Decode)] +#[educe(PartialEq, Eq, Hash(bound(E: EthSpec)))] #[ssz(struct_behaviour = "transparent")] pub struct CustodyDataColumn { data: Arc>, @@ -383,8 +383,8 @@ impl CustodyDataColumn { } /// Data column that we must custody and has completed kzg verification -#[derive(Debug, Derivative, Clone, Encode, Decode)] -#[derivative(PartialEq, Eq)] +#[derive(Debug, Educe, Clone, Encode, Decode)] +#[educe(PartialEq, Eq)] #[ssz(struct_behaviour = "transparent")] pub struct KzgVerifiedCustodyDataColumn { data: Arc>, diff --git a/beacon_node/beacon_chain/src/light_client_finality_update_verification.rs b/beacon_node/beacon_chain/src/light_client_finality_update_verification.rs index fe62b8ef903..2dc4de7d04b 100644 --- a/beacon_node/beacon_chain/src/light_client_finality_update_verification.rs +++ b/beacon_node/beacon_chain/src/light_client_finality_update_verification.rs @@ -1,5 +1,5 @@ use crate::{BeaconChain, BeaconChainTypes}; -use derivative::Derivative; +use educe::Educe; use slot_clock::SlotClock; use std::time::Duration; use strum::AsRefStr; @@ -55,8 +55,8 @@ pub enum Error { } /// Wraps a `LightClientFinalityUpdate` that has been verified for propagation on the gossip network. -#[derive(Derivative)] -#[derivative(Clone(bound = "T: BeaconChainTypes"))] +#[derive(Educe)] +#[educe(Clone(bound(T: BeaconChainTypes)))] pub struct VerifiedLightClientFinalityUpdate { light_client_finality_update: LightClientFinalityUpdate, seen_timestamp: Duration, diff --git a/beacon_node/beacon_chain/src/light_client_optimistic_update_verification.rs b/beacon_node/beacon_chain/src/light_client_optimistic_update_verification.rs index b59390ea0c4..4079a374f89 100644 --- a/beacon_node/beacon_chain/src/light_client_optimistic_update_verification.rs +++ b/beacon_node/beacon_chain/src/light_client_optimistic_update_verification.rs @@ -1,5 +1,5 @@ use crate::{BeaconChain, BeaconChainTypes}; -use derivative::Derivative; +use educe::Educe; use eth2::types::Hash256; use slot_clock::SlotClock; use std::time::Duration; @@ -49,8 +49,8 @@ pub enum Error { } /// Wraps a `LightClientOptimisticUpdate` that has been verified for propagation on the gossip network. -#[derive(Derivative)] -#[derivative(Clone(bound = "T: BeaconChainTypes"))] +#[derive(Educe)] +#[educe(Clone(bound(T: BeaconChainTypes)))] pub struct VerifiedLightClientOptimisticUpdate { light_client_optimistic_update: LightClientOptimisticUpdate, pub parent_root: Hash256, diff --git a/beacon_node/beacon_chain/src/observed_operations.rs b/beacon_node/beacon_chain/src/observed_operations.rs index 49614c5b542..4ca5371242c 100644 --- a/beacon_node/beacon_chain/src/observed_operations.rs +++ b/beacon_node/beacon_chain/src/observed_operations.rs @@ -1,4 +1,4 @@ -use derivative::Derivative; +use educe::Educe; use smallvec::{SmallVec, smallvec}; use state_processing::{SigVerifiedOp, TransformPersist, VerifyOperation, VerifyOperationAt}; use std::collections::HashSet; @@ -14,8 +14,8 @@ pub const SMALL_VEC_SIZE: usize = 8; /// Stateful tracker for exit/slashing operations seen on the network. /// /// Implements the conditions for gossip verification of exits and slashings from the P2P spec. -#[derive(Debug, Derivative)] -#[derivative(Default(bound = "T: ObservableOperation, E: EthSpec"))] +#[derive(Debug, Educe)] +#[educe(Default(bound(T: ObservableOperation, E: EthSpec)))] pub struct ObservedOperations, E: EthSpec> { /// Indices of validators for whom we have already seen an instance of an operation `T`. /// @@ -26,7 +26,7 @@ pub struct ObservedOperations, E: EthSpec> { /// `attestation_1.attester_indices` and `attestation_2.attester_indices`. observed_validator_indices: HashSet, /// The name of the current fork. The default will be overwritten on first use. - #[derivative(Default(value = "ForkName::Base"))] + #[educe(Default(expression = ForkName::Base))] current_fork: ForkName, _phantom: PhantomData<(T, E)>, } diff --git a/beacon_node/beacon_chain/src/sync_committee_verification.rs b/beacon_node/beacon_chain/src/sync_committee_verification.rs index 41d29d5526e..e72e9a6b21f 100644 --- a/beacon_node/beacon_chain/src/sync_committee_verification.rs +++ b/beacon_node/beacon_chain/src/sync_committee_verification.rs @@ -31,7 +31,7 @@ use crate::{ BeaconChain, BeaconChainError, BeaconChainTypes, metrics, observed_aggregates::ObserveOutcome, }; use bls::{PublicKeyBytes, verify_signature_sets}; -use derivative::Derivative; +use educe::Educe; use safe_arith::ArithError; use slot_clock::SlotClock; use ssz_derive::{Decode, Encode}; @@ -261,8 +261,8 @@ impl From for Error { } /// Wraps a `SignedContributionAndProof` that has been verified for propagation on the gossip network.\ -#[derive(Derivative)] -#[derivative(Clone(bound = "T: BeaconChainTypes"))] +#[derive(Educe)] +#[educe(Clone(bound(T: BeaconChainTypes)))] pub struct VerifiedSyncContribution { signed_aggregate: SignedContributionAndProof, participant_pubkeys: Vec, diff --git a/beacon_node/network/Cargo.toml b/beacon_node/network/Cargo.toml index 5615148648d..b60c5e6dbff 100644 --- a/beacon_node/network/Cargo.toml +++ b/beacon_node/network/Cargo.toml @@ -19,7 +19,7 @@ async-channel = { workspace = true } beacon_chain = { workspace = true } beacon_processor = { workspace = true } delay_map = { workspace = true } -derivative = { workspace = true } +educe = { workspace = true } ethereum_ssz = { workspace = true } execution_layer = { workspace = true } fnv = { workspace = true } diff --git a/beacon_node/network/src/sync/batch.rs b/beacon_node/network/src/sync/batch.rs index ea0ef15f4b2..8de386f5be2 100644 --- a/beacon_node/network/src/sync/batch.rs +++ b/beacon_node/network/src/sync/batch.rs @@ -1,5 +1,5 @@ use beacon_chain::block_verification_types::RpcBlock; -use derivative::Derivative; +use educe::Educe; use lighthouse_network::PeerId; use lighthouse_network::rpc::methods::BlocksByRangeRequest; use lighthouse_network::rpc::methods::DataColumnsByRangeRequest; @@ -78,8 +78,8 @@ pub enum BatchProcessingResult { NonFaultyFailure, } -#[derive(Derivative)] -#[derivative(Debug)] +#[derive(Educe)] +#[educe(Debug)] /// A segment of a chain. pub struct BatchInfo { /// Start slot of the batch. @@ -97,7 +97,7 @@ pub struct BatchInfo { /// Whether this batch contains all blocks or all blocks and blobs. batch_type: ByRangeRequestType, /// Pin the generic - #[derivative(Debug = "ignore")] + #[educe(Debug(ignore))] marker: std::marker::PhantomData<(E, B)>, } diff --git a/beacon_node/network/src/sync/block_lookups/single_block_lookup.rs b/beacon_node/network/src/sync/block_lookups/single_block_lookup.rs index 8fb3248a871..46897b2283b 100644 --- a/beacon_node/network/src/sync/block_lookups/single_block_lookup.rs +++ b/beacon_node/network/src/sync/block_lookups/single_block_lookup.rs @@ -5,7 +5,7 @@ use crate::sync::network_context::{ SyncNetworkContext, }; use beacon_chain::{BeaconChainTypes, BlockProcessStatus}; -use derivative::Derivative; +use educe::Educe; use lighthouse_network::service::api_types::Id; use lighthouse_tracing::SPAN_SINGLE_BLOCK_LOOKUP; use parking_lot::RwLock; @@ -57,8 +57,8 @@ pub enum LookupRequestError { }, } -#[derive(Derivative)] -#[derivative(Debug(bound = "T: BeaconChainTypes"))] +#[derive(Educe)] +#[educe(Debug(bound(T: BeaconChainTypes)))] pub struct SingleBlockLookup { pub id: Id, pub block_request_state: BlockRequestState, @@ -67,7 +67,7 @@ pub struct SingleBlockLookup { /// the custody request to have an updated view of the peers that claim to have imported the /// block associated with this lookup. The peer set of a lookup can change rapidly, and faster /// than the lifetime of a custody request. - #[derivative(Debug(format_with = "fmt_peer_set_as_len"))] + #[educe(Debug(method(fmt_peer_set_as_len)))] peers: Arc>>, block_root: Hash256, awaiting_parent: Option, @@ -369,10 +369,10 @@ impl SingleBlockLookup { } /// The state of the blob request component of a `SingleBlockLookup`. -#[derive(Derivative)] -#[derivative(Debug)] +#[derive(Educe)] +#[educe(Debug)] pub struct BlobRequestState { - #[derivative(Debug = "ignore")] + #[educe(Debug(ignore))] pub block_root: Hash256, pub state: SingleLookupRequestState>, } @@ -387,10 +387,10 @@ impl BlobRequestState { } /// The state of the custody request component of a `SingleBlockLookup`. -#[derive(Derivative)] -#[derivative(Debug)] +#[derive(Educe)] +#[educe(Debug)] pub struct CustodyRequestState { - #[derivative(Debug = "ignore")] + #[educe(Debug(ignore))] pub block_root: Hash256, pub state: SingleLookupRequestState>, } @@ -405,10 +405,10 @@ impl CustodyRequestState { } /// The state of the block request component of a `SingleBlockLookup`. -#[derive(Derivative)] -#[derivative(Debug)] +#[derive(Educe)] +#[educe(Debug)] pub struct BlockRequestState { - #[derivative(Debug = "ignore")] + #[educe(Debug(ignore))] pub requested_block_root: Hash256, pub state: SingleLookupRequestState>>, } diff --git a/beacon_node/operation_pool/Cargo.toml b/beacon_node/operation_pool/Cargo.toml index beaf8188824..eeddb53c23e 100644 --- a/beacon_node/operation_pool/Cargo.toml +++ b/beacon_node/operation_pool/Cargo.toml @@ -9,7 +9,7 @@ portable = ["beacon_chain/portable"] [dependencies] bitvec = { workspace = true } -derivative = { workspace = true } +educe = { workspace = true } ethereum_ssz = { workspace = true } ethereum_ssz_derive = { workspace = true } itertools = { workspace = true } diff --git a/beacon_node/operation_pool/src/persistence.rs b/beacon_node/operation_pool/src/persistence.rs index 4d754534605..ee45c8dd053 100644 --- a/beacon_node/operation_pool/src/persistence.rs +++ b/beacon_node/operation_pool/src/persistence.rs @@ -3,7 +3,7 @@ use crate::OperationPool; use crate::attestation_storage::AttestationMap; use crate::bls_to_execution_changes::{BlsToExecutionChanges, ReceivedPreCapella}; use crate::sync_aggregate_id::SyncAggregateId; -use derivative::Derivative; +use educe::Educe; use parking_lot::RwLock; use ssz::{Decode, Encode}; use ssz_derive::{Decode, Encode}; @@ -22,10 +22,7 @@ type PersistedSyncContributions = Vec<(SyncAggregateId, Vec { diff --git a/consensus/types/Cargo.toml b/consensus/types/Cargo.toml index d9b2f101987..4e04fa95daa 100644 --- a/consensus/types/Cargo.toml +++ b/consensus/types/Cargo.toml @@ -30,7 +30,7 @@ arbitrary = { workspace = true, features = ["derive"], optional = true } bls = { workspace = true } compare_fields = { workspace = true } context_deserialize = { workspace = true } -derivative = { workspace = true } +educe = { workspace = true } eth2_interop_keypairs = { path = "../../common/eth2_interop_keypairs" } ethereum_hashing = { workspace = true } ethereum_serde_utils = { workspace = true } diff --git a/consensus/types/src/attestation.rs b/consensus/types/src/attestation.rs index 52646867925..14305826589 100644 --- a/consensus/types/src/attestation.rs +++ b/consensus/types/src/attestation.rs @@ -8,7 +8,7 @@ use crate::{ }; use crate::{Hash256, Slot, test_utils::TestRandom}; use crate::{IndexedAttestation, context_deserialize}; -use derivative::Derivative; +use educe::Educe; use serde::{Deserialize, Deserializer, Serialize}; use ssz_derive::{Decode, Encode}; use ssz_types::BitVector; @@ -45,11 +45,11 @@ impl From for Error { Decode, Encode, TestRandom, - Derivative, + Educe, TreeHash, ), context_deserialize(ForkName), - derivative(PartialEq, Hash(bound = "E: EthSpec")), + educe(PartialEq, Hash(bound(E: EthSpec))), serde(bound = "E: EthSpec", deny_unknown_fields), cfg_attr( feature = "arbitrary", @@ -66,7 +66,8 @@ impl From for Error { derive(arbitrary::Arbitrary), arbitrary(bound = "E: EthSpec") )] -#[derive(Debug, Clone, Serialize, TreeHash, Encode, Derivative, Deserialize, PartialEq)] +#[derive(Debug, Clone, Serialize, TreeHash, Encode, Educe, Deserialize)] +#[educe(PartialEq)] #[serde(untagged)] #[tree_hash(enum_behaviour = "transparent")] #[ssz(enum_behaviour = "transparent")] @@ -599,18 +600,7 @@ impl<'de, E: EthSpec> ContextDeserialize<'de, ForkName> for Vec> */ #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] -#[derive( - Debug, - Clone, - Serialize, - Deserialize, - Decode, - Encode, - TestRandom, - Derivative, - TreeHash, - PartialEq, -)] +#[derive(Debug, Clone, Serialize, Deserialize, Decode, Encode, TestRandom, TreeHash, PartialEq)] #[context_deserialize(ForkName)] pub struct SingleAttestation { #[serde(with = "serde_utils::quoted_u64")] diff --git a/consensus/types/src/attester_slashing.rs b/consensus/types/src/attester_slashing.rs index adc3695f4a4..2bfb65653c6 100644 --- a/consensus/types/src/attester_slashing.rs +++ b/consensus/types/src/attester_slashing.rs @@ -4,7 +4,7 @@ use crate::indexed_attestation::{ }; use crate::{ContextDeserialize, ForkName}; use crate::{EthSpec, test_utils::TestRandom}; -use derivative::Derivative; +use educe::Educe; use rand::{Rng, RngCore}; use serde::{Deserialize, Deserializer, Serialize}; use ssz_derive::{Decode, Encode}; @@ -16,7 +16,7 @@ use tree_hash_derive::TreeHash; variants(Base, Electra), variant_attributes( derive( - Derivative, + Educe, Debug, Clone, Serialize, @@ -27,7 +27,7 @@ use tree_hash_derive::TreeHash; TestRandom, ), context_deserialize(ForkName), - derivative(PartialEq, Eq, Hash(bound = "E: EthSpec")), + educe(PartialEq, Eq, Hash(bound(E: EthSpec))), serde(bound = "E: EthSpec"), cfg_attr( feature = "arbitrary", @@ -42,8 +42,8 @@ use tree_hash_derive::TreeHash; derive(arbitrary::Arbitrary), arbitrary(bound = "E: EthSpec") )] -#[derive(Debug, Clone, Serialize, Encode, Deserialize, TreeHash, Derivative)] -#[derivative(PartialEq, Eq, Hash(bound = "E: EthSpec"))] +#[derive(Debug, Clone, Serialize, Encode, Deserialize, TreeHash, Educe)] +#[educe(PartialEq, Eq, Hash(bound(E: EthSpec)))] #[serde(bound = "E: EthSpec", untagged)] #[ssz(enum_behaviour = "transparent")] #[tree_hash(enum_behaviour = "transparent")] @@ -57,8 +57,8 @@ pub struct AttesterSlashing { /// This is a copy of the `AttesterSlashing` enum but with `Encode` and `Decode` derived /// using the `union` behavior for the purposes of persistence on disk. We use a separate /// type so that we don't accidentally use this non-spec encoding in consensus objects. -#[derive(Debug, Clone, Encode, Decode, Derivative)] -#[derivative(PartialEq, Eq, Hash(bound = "E: EthSpec"))] +#[derive(Debug, Clone, Encode, Decode, Educe)] +#[educe(PartialEq, Eq, Hash(bound(E: EthSpec)))] #[ssz(enum_behaviour = "union")] pub enum AttesterSlashingOnDisk { Base(AttesterSlashingBase), diff --git a/consensus/types/src/beacon_block.rs b/consensus/types/src/beacon_block.rs index 61c32dd4ac9..060709d6556 100644 --- a/consensus/types/src/beacon_block.rs +++ b/consensus/types/src/beacon_block.rs @@ -1,7 +1,7 @@ use crate::attestation::AttestationBase; use crate::test_utils::TestRandom; use crate::*; -use derivative::Derivative; +use educe::Educe; use serde::{Deserialize, Deserializer, Serialize}; use ssz::{Decode, DecodeError}; use ssz_derive::{Decode, Encode}; @@ -27,9 +27,9 @@ use self::indexed_attestation::IndexedAttestationBase; Decode, TreeHash, TestRandom, - Derivative, + Educe, ), - derivative(PartialEq, Hash(bound = "E: EthSpec, Payload: AbstractExecPayload")), + educe(PartialEq, Hash(bound(E: EthSpec, Payload: AbstractExecPayload))), serde( bound = "E: EthSpec, Payload: AbstractExecPayload", deny_unknown_fields @@ -52,8 +52,8 @@ use self::indexed_attestation::IndexedAttestationBase; derive(arbitrary::Arbitrary), arbitrary(bound = "E: EthSpec, Payload: AbstractExecPayload") )] -#[derive(Debug, Clone, Serialize, Deserialize, Encode, TreeHash, Derivative)] -#[derivative(PartialEq, Hash(bound = "E: EthSpec"))] +#[derive(Debug, Clone, Serialize, Deserialize, Encode, TreeHash, Educe)] +#[educe(PartialEq, Hash(bound(E: EthSpec)))] #[serde(untagged)] #[serde(bound = "E: EthSpec, Payload: AbstractExecPayload")] #[tree_hash(enum_behaviour = "transparent")] diff --git a/consensus/types/src/beacon_block_body.rs b/consensus/types/src/beacon_block_body.rs index e636fbb5346..ced8fea4a99 100644 --- a/consensus/types/src/beacon_block_body.rs +++ b/consensus/types/src/beacon_block_body.rs @@ -1,6 +1,6 @@ use crate::test_utils::TestRandom; use crate::*; -use derivative::Derivative; +use educe::Educe; use merkle_proof::{MerkleTree, MerkleTreeError}; use metastruct::metastruct; use serde::{Deserialize, Deserializer, Serialize}; @@ -39,9 +39,9 @@ pub const BLOB_KZG_COMMITMENTS_INDEX: usize = 11; Decode, TreeHash, TestRandom, - Derivative, + Educe, ), - derivative(PartialEq, Hash(bound = "E: EthSpec, Payload: AbstractExecPayload")), + educe(PartialEq, Hash(bound(E: EthSpec, Payload: AbstractExecPayload))), serde( bound = "E: EthSpec, Payload: AbstractExecPayload", deny_unknown_fields @@ -71,8 +71,8 @@ pub const BLOB_KZG_COMMITMENTS_INDEX: usize = 11; derive(arbitrary::Arbitrary), arbitrary(bound = "E: EthSpec, Payload: AbstractExecPayload") )] -#[derive(Debug, Clone, Serialize, Deserialize, Derivative, TreeHash)] -#[derivative(PartialEq, Hash(bound = "E: EthSpec"))] +#[derive(Debug, Clone, Serialize, Deserialize, Educe, TreeHash)] +#[educe(PartialEq, Hash(bound(E: EthSpec)))] #[serde(untagged)] #[serde(bound = "E: EthSpec, Payload: AbstractExecPayload")] #[tree_hash(enum_behaviour = "transparent")] diff --git a/consensus/types/src/beacon_state.rs b/consensus/types/src/beacon_state.rs index d1574be7cc1..d13e2235574 100644 --- a/consensus/types/src/beacon_state.rs +++ b/consensus/types/src/beacon_state.rs @@ -5,7 +5,7 @@ use crate::historical_summary::HistoricalSummary; use crate::test_utils::TestRandom; use crate::*; use compare_fields::CompareFields; -use derivative::Derivative; +use educe::Educe; use ethereum_hashing::hash; use int_to_bytes::{int_to_bytes4, int_to_bytes8}; use metastruct::{NumFields, metastruct}; @@ -245,7 +245,7 @@ impl From for Hash256 { variants(Base, Altair, Bellatrix, Capella, Deneb, Electra, Fulu, Gloas), variant_attributes( derive( - Derivative, + Educe, Debug, PartialEq, Serialize, @@ -262,7 +262,7 @@ impl From for Hash256 { derive(arbitrary::Arbitrary), arbitrary(bound = "E: EthSpec") ), - derivative(Clone), + educe(Clone), ), specific_variant_attributes( Base(metastruct( diff --git a/consensus/types/src/beacon_state/committee_cache.rs b/consensus/types/src/beacon_state/committee_cache.rs index 06242e8d20e..408c269da5f 100644 --- a/consensus/types/src/beacon_state/committee_cache.rs +++ b/consensus/types/src/beacon_state/committee_cache.rs @@ -2,7 +2,7 @@ use crate::*; use core::num::NonZeroUsize; -use derivative::Derivative; +use educe::Educe; use safe_arith::SafeArith; use serde::{Deserialize, Serialize}; use ssz::{Decode, DecodeError, Encode, four_byte_option_impl}; @@ -20,13 +20,13 @@ four_byte_option_impl!(four_byte_option_non_zero_usize, NonZeroUsize); /// Computes and stores the shuffling for an epoch. Provides various getters to allow callers to /// read the committees for the given epoch. -#[derive(Derivative, Debug, Default, Clone, Serialize, Deserialize, Encode, Decode)] -#[derivative(PartialEq)] +#[derive(Educe, Debug, Default, Clone, Serialize, Deserialize, Encode, Decode)] +#[educe(PartialEq)] pub struct CommitteeCache { #[ssz(with = "four_byte_option_epoch")] initialized_epoch: Option, shuffling: Vec, - #[derivative(PartialEq(compare_with = "compare_shuffling_positions"))] + #[educe(PartialEq(method(compare_shuffling_positions)))] shuffling_positions: Vec, committees_per_slot: u64, slots_per_epoch: u64, diff --git a/consensus/types/src/blob_sidecar.rs b/consensus/types/src/blob_sidecar.rs index 2e8c2578976..d2c7331a579 100644 --- a/consensus/types/src/blob_sidecar.rs +++ b/consensus/types/src/blob_sidecar.rs @@ -7,7 +7,7 @@ use crate::{ beacon_block_body::BLOB_KZG_COMMITMENTS_INDEX, }; use bls::Signature; -use derivative::Derivative; +use educe::Educe; use kzg::{BYTES_PER_BLOB, BYTES_PER_FIELD_ELEMENT, Blob as KzgBlob, Kzg, KzgCommitment, KzgProof}; use merkle_proof::{MerkleTreeError, merkle_root_from_branch, verify_merkle_proof}; use rand::Rng; @@ -49,12 +49,10 @@ impl Ord for BlobIdentifier { derive(arbitrary::Arbitrary), arbitrary(bound = "E: EthSpec") )] -#[derive( - Debug, Clone, Serialize, Deserialize, Encode, Decode, TreeHash, TestRandom, Derivative, -)] +#[derive(Debug, Clone, Serialize, Deserialize, Encode, Decode, TreeHash, TestRandom, Educe)] #[context_deserialize(ForkName)] #[serde(bound = "E: EthSpec")] -#[derivative(PartialEq, Eq, Hash(bound = "E: EthSpec"))] +#[educe(PartialEq, Eq, Hash(bound(E: EthSpec)))] pub struct BlobSidecar { #[serde(with = "serde_utils::quoted_u64")] pub index: u64, diff --git a/consensus/types/src/chain_spec.rs b/consensus/types/src/chain_spec.rs index 5dedd930c65..a66080ada6f 100644 --- a/consensus/types/src/chain_spec.rs +++ b/consensus/types/src/chain_spec.rs @@ -2,7 +2,7 @@ use crate::application_domain::{APPLICATION_DOMAIN_BUILDER, ApplicationDomain}; use crate::blob_sidecar::BlobIdentifier; use crate::data_column_sidecar::DataColumnsByRootIdentifier; use crate::*; -use derivative::Derivative; +use educe::Educe; use ethereum_hashing::hash; use int_to_bytes::int_to_bytes4; use safe_arith::{ArithError, SafeArith}; @@ -1566,15 +1566,15 @@ pub struct BlobParameters { // A wrapper around a vector of BlobParameters to ensure that the vector is reverse // sorted by epoch. #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] -#[derive(Debug, Derivative, Clone)] -#[derivative(PartialEq)] +#[derive(Debug, Educe, Clone)] +#[educe(PartialEq)] pub struct BlobSchedule { schedule: Vec, // This is a hack to prevent the blob schedule being serialized on the /eth/v1/config/spec // endpoint prior to the Fulu fork being scheduled. // // We can remove this once Fulu is live on mainnet. - #[derivative(PartialEq = "ignore")] + #[educe(PartialEq(ignore))] skip_serializing: bool, } diff --git a/consensus/types/src/data_column_sidecar.rs b/consensus/types/src/data_column_sidecar.rs index 2272b1695c9..62ce4467dfa 100644 --- a/consensus/types/src/data_column_sidecar.rs +++ b/consensus/types/src/data_column_sidecar.rs @@ -6,7 +6,7 @@ use crate::{ SignedBeaconBlockHeader, Slot, }; use bls::Signature; -use derivative::Derivative; +use educe::Educe; use kzg::Error as KzgError; use kzg::{KzgCommitment, KzgProof}; use merkle_proof::verify_merkle_proof; @@ -40,11 +40,9 @@ pub type DataColumnSidecarList = Vec>>; derive(arbitrary::Arbitrary), arbitrary(bound = "E: EthSpec") )] -#[derive( - Debug, Clone, Serialize, Deserialize, Encode, Decode, TreeHash, TestRandom, Derivative, -)] +#[derive(Debug, Clone, Serialize, Deserialize, Encode, Decode, TreeHash, TestRandom, Educe)] #[serde(bound = "E: EthSpec")] -#[derivative(PartialEq, Eq, Hash(bound = "E: EthSpec"))] +#[educe(PartialEq, Eq, Hash(bound(E: EthSpec)))] #[context_deserialize(ForkName)] pub struct DataColumnSidecar { #[serde(with = "serde_utils::quoted_u64")] diff --git a/consensus/types/src/data_column_subnet_id.rs b/consensus/types/src/data_column_subnet_id.rs index 4061cb4fdb0..c6b8846c783 100644 --- a/consensus/types/src/data_column_subnet_id.rs +++ b/consensus/types/src/data_column_subnet_id.rs @@ -1,18 +1,22 @@ //! Identifies each data column subnet by an integer identifier. use crate::ChainSpec; use crate::data_column_sidecar::ColumnIndex; -use derivative::Derivative; use safe_arith::{ArithError, SafeArith}; use serde::{Deserialize, Serialize}; use std::fmt::{self, Display}; use std::ops::{Deref, DerefMut}; #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] -#[derive(Clone, Copy, Derivative, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[derivative(Debug = "transparent")] +#[derive(Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] #[serde(transparent)] pub struct DataColumnSubnetId(#[serde(with = "serde_utils::quoted_u64")] u64); +impl fmt::Debug for DataColumnSubnetId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + std::fmt::Debug::fmt(&self.0, f) + } +} + impl DataColumnSubnetId { pub fn new(id: u64) -> Self { id.into() diff --git a/consensus/types/src/execution_block_hash.rs b/consensus/types/src/execution_block_hash.rs index d3065afbbb0..31905d64dfa 100644 --- a/consensus/types/src/execution_block_hash.rs +++ b/consensus/types/src/execution_block_hash.rs @@ -1,18 +1,22 @@ use crate::FixedBytesExtended; use crate::Hash256; use crate::test_utils::TestRandom; -use derivative::Derivative; use rand::RngCore; use serde::{Deserialize, Serialize}; use ssz::{Decode, DecodeError, Encode}; use std::fmt; #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] -#[derive(Default, Clone, Copy, Serialize, Deserialize, Eq, PartialEq, Hash, Derivative)] -#[derivative(Debug = "transparent")] +#[derive(Default, Clone, Copy, Serialize, Deserialize, Eq, PartialEq, Hash)] #[serde(transparent)] pub struct ExecutionBlockHash(#[serde(with = "serde_utils::b256_hex")] pub Hash256); +impl fmt::Debug for ExecutionBlockHash { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + std::fmt::Debug::fmt(&self.0, f) + } +} + impl ExecutionBlockHash { pub fn zero() -> Self { Self(Hash256::zero()) diff --git a/consensus/types/src/execution_payload.rs b/consensus/types/src/execution_payload.rs index 7a899e5f022..3548f67db2e 100644 --- a/consensus/types/src/execution_payload.rs +++ b/consensus/types/src/execution_payload.rs @@ -1,5 +1,5 @@ use crate::{test_utils::TestRandom, *}; -use derivative::Derivative; +use educe::Educe; use serde::{Deserialize, Deserializer, Serialize}; use ssz::{Decode, Encode}; use ssz_derive::{Decode, Encode}; @@ -27,10 +27,10 @@ pub type Withdrawals = VariableList::MaxWithdrawal Decode, TreeHash, TestRandom, - Derivative, + Educe, ), context_deserialize(ForkName), - derivative(PartialEq, Hash(bound = "E: EthSpec")), + educe(PartialEq, Hash(bound(E: EthSpec))), serde(bound = "E: EthSpec", deny_unknown_fields), cfg_attr( feature = "arbitrary", @@ -48,8 +48,8 @@ pub type Withdrawals = VariableList::MaxWithdrawal derive(arbitrary::Arbitrary), arbitrary(bound = "E: EthSpec") )] -#[derive(Debug, Clone, Serialize, Deserialize, Encode, TreeHash, Derivative)] -#[derivative(PartialEq, Hash(bound = "E: EthSpec"))] +#[derive(Debug, Clone, Serialize, Deserialize, Encode, TreeHash, Educe)] +#[educe(PartialEq, Hash(bound(E: EthSpec)))] #[serde(bound = "E: EthSpec", untagged)] #[ssz(enum_behaviour = "transparent")] #[tree_hash(enum_behaviour = "transparent")] diff --git a/consensus/types/src/execution_payload_header.rs b/consensus/types/src/execution_payload_header.rs index 2f5fac87a9a..241ecb4ce6e 100644 --- a/consensus/types/src/execution_payload_header.rs +++ b/consensus/types/src/execution_payload_header.rs @@ -1,5 +1,5 @@ use crate::{test_utils::TestRandom, *}; -use derivative::Derivative; +use educe::Educe; use serde::{Deserialize, Deserializer, Serialize}; use ssz::{Decode, Encode}; use ssz_derive::{Decode, Encode}; @@ -20,9 +20,9 @@ use tree_hash_derive::TreeHash; Decode, TreeHash, TestRandom, - Derivative, + Educe, ), - derivative(PartialEq, Hash(bound = "E: EthSpec")), + educe(PartialEq, Hash(bound(E: EthSpec))), serde(bound = "E: EthSpec", deny_unknown_fields), cfg_attr( feature = "arbitrary", @@ -44,8 +44,8 @@ use tree_hash_derive::TreeHash; derive(arbitrary::Arbitrary), arbitrary(bound = "E: EthSpec") )] -#[derive(Debug, Clone, Serialize, Deserialize, Encode, TreeHash, Derivative)] -#[derivative(PartialEq, Hash(bound = "E: EthSpec"))] +#[derive(Debug, Clone, Serialize, Deserialize, Encode, TreeHash, Educe)] +#[educe(PartialEq, Hash(bound(E: EthSpec)))] #[serde(bound = "E: EthSpec", untagged)] #[tree_hash(enum_behaviour = "transparent")] #[ssz(enum_behaviour = "transparent")] diff --git a/consensus/types/src/execution_requests.rs b/consensus/types/src/execution_requests.rs index 592dda5d5e1..67396af71d4 100644 --- a/consensus/types/src/execution_requests.rs +++ b/consensus/types/src/execution_requests.rs @@ -2,7 +2,7 @@ use crate::context_deserialize; use crate::test_utils::TestRandom; use crate::{ConsolidationRequest, DepositRequest, EthSpec, ForkName, Hash256, WithdrawalRequest}; use alloy_primitives::Bytes; -use derivative::Derivative; +use educe::Educe; use ethereum_hashing::{DynamicContext, Sha256Context}; use serde::{Deserialize, Serialize}; use ssz::Encode; @@ -24,10 +24,10 @@ pub type ConsolidationRequests = arbitrary(bound = "E: EthSpec") )] #[derive( - Debug, Derivative, Default, Clone, Serialize, Deserialize, Encode, Decode, TreeHash, TestRandom, + Debug, Educe, Default, Clone, Serialize, Deserialize, Encode, Decode, TreeHash, TestRandom, )] #[serde(bound = "E: EthSpec")] -#[derivative(PartialEq, Eq, Hash(bound = "E: EthSpec"))] +#[educe(PartialEq, Eq, Hash(bound(E: EthSpec)))] #[context_deserialize(ForkName)] pub struct ExecutionRequests { pub deposits: DepositRequests, diff --git a/consensus/types/src/indexed_attestation.rs b/consensus/types/src/indexed_attestation.rs index 4ba695b9d51..dc328842176 100644 --- a/consensus/types/src/indexed_attestation.rs +++ b/consensus/types/src/indexed_attestation.rs @@ -3,7 +3,7 @@ use crate::{ AggregateSignature, AttestationData, EthSpec, ForkName, VariableList, test_utils::TestRandom, }; use core::slice::Iter; -use derivative::Derivative; +use educe::Educe; use serde::{Deserialize, Serialize}; use ssz::Encode; use ssz_derive::{Decode, Encode}; @@ -28,11 +28,11 @@ use tree_hash_derive::TreeHash; Decode, Encode, TestRandom, - Derivative, + Educe, TreeHash, ), context_deserialize(ForkName), - derivative(PartialEq, Hash(bound = "E: EthSpec")), + educe(PartialEq, Hash(bound(E: EthSpec))), serde(bound = "E: EthSpec", deny_unknown_fields), cfg_attr( feature = "arbitrary", @@ -46,7 +46,8 @@ use tree_hash_derive::TreeHash; derive(arbitrary::Arbitrary), arbitrary(bound = "E: EthSpec") )] -#[derive(Debug, Clone, Serialize, TreeHash, Encode, Derivative, Deserialize, PartialEq)] +#[derive(Debug, Clone, Serialize, TreeHash, Encode, Educe, Deserialize)] +#[educe(PartialEq)] #[serde(untagged)] #[tree_hash(enum_behaviour = "transparent")] #[ssz(enum_behaviour = "transparent")] diff --git a/consensus/types/src/light_client_bootstrap.rs b/consensus/types/src/light_client_bootstrap.rs index 1345cee2244..80d5bbacf9e 100644 --- a/consensus/types/src/light_client_bootstrap.rs +++ b/consensus/types/src/light_client_bootstrap.rs @@ -5,7 +5,7 @@ use crate::{ LightClientHeaderElectra, LightClientHeaderFulu, LightClientHeaderGloas, SignedBlindedBeaconBlock, Slot, SyncCommittee, light_client_update::*, test_utils::TestRandom, }; -use derivative::Derivative; +use educe::Educe; use serde::{Deserialize, Deserializer, Serialize}; use ssz::{Decode, Encode}; use ssz_derive::{Decode, Encode}; @@ -22,15 +22,15 @@ use tree_hash_derive::TreeHash; derive( Debug, Clone, - PartialEq, Serialize, Deserialize, - Derivative, + Educe, Decode, Encode, TestRandom, TreeHash, ), + educe(PartialEq), serde(bound = "E: EthSpec", deny_unknown_fields), cfg_attr( feature = "arbitrary", diff --git a/consensus/types/src/light_client_finality_update.rs b/consensus/types/src/light_client_finality_update.rs index 644824f12c2..e58d7f4d72b 100644 --- a/consensus/types/src/light_client_finality_update.rs +++ b/consensus/types/src/light_client_finality_update.rs @@ -7,7 +7,7 @@ use crate::{ LightClientHeaderGloas, SignedBlindedBeaconBlock, light_client_update::*, test_utils::TestRandom, }; -use derivative::Derivative; +use educe::Educe; use serde::{Deserialize, Deserializer, Serialize}; use ssz::{Decode, Encode}; use ssz_derive::Decode; @@ -22,15 +22,15 @@ use tree_hash_derive::TreeHash; derive( Debug, Clone, - PartialEq, Serialize, Deserialize, - Derivative, + Educe, Decode, Encode, TestRandom, TreeHash, ), + educe(PartialEq), serde(bound = "E: EthSpec", deny_unknown_fields), cfg_attr( feature = "arbitrary", diff --git a/consensus/types/src/light_client_header.rs b/consensus/types/src/light_client_header.rs index 162203138ab..5820efcc91b 100644 --- a/consensus/types/src/light_client_header.rs +++ b/consensus/types/src/light_client_header.rs @@ -8,7 +8,7 @@ use crate::{ ExecutionPayloadHeaderElectra, ExecutionPayloadHeaderFulu, ExecutionPayloadHeaderGloas, FixedVector, Hash256, SignedBlindedBeaconBlock, test_utils::TestRandom, }; -use derivative::Derivative; +use educe::Educe; use serde::{Deserialize, Deserializer, Serialize}; use ssz::Decode; use ssz_derive::{Decode, Encode}; @@ -23,15 +23,15 @@ use tree_hash_derive::TreeHash; derive( Debug, Clone, - PartialEq, Serialize, Deserialize, - Derivative, + Educe, Decode, Encode, TestRandom, TreeHash, ), + educe(PartialEq), serde(bound = "E: EthSpec", deny_unknown_fields), cfg_attr( feature = "arbitrary", diff --git a/consensus/types/src/light_client_optimistic_update.rs b/consensus/types/src/light_client_optimistic_update.rs index 7528322d567..ca9957331f8 100644 --- a/consensus/types/src/light_client_optimistic_update.rs +++ b/consensus/types/src/light_client_optimistic_update.rs @@ -6,7 +6,7 @@ use crate::{ LightClientHeaderElectra, LightClientHeaderFulu, LightClientHeaderGloas, SignedBlindedBeaconBlock, light_client_update::*, }; -use derivative::Derivative; +use educe::Educe; use serde::{Deserialize, Deserializer, Serialize}; use ssz::{Decode, Encode}; use ssz_derive::Decode; @@ -24,15 +24,15 @@ use tree_hash_derive::TreeHash; derive( Debug, Clone, - PartialEq, Serialize, Deserialize, - Derivative, + Educe, Decode, Encode, TestRandom, TreeHash, ), + educe(PartialEq), serde(bound = "E: EthSpec", deny_unknown_fields), cfg_attr( feature = "arbitrary", diff --git a/consensus/types/src/light_client_update.rs b/consensus/types/src/light_client_update.rs index afb7ebc96dc..ede9436c50d 100644 --- a/consensus/types/src/light_client_update.rs +++ b/consensus/types/src/light_client_update.rs @@ -7,7 +7,7 @@ use crate::{ LightClientHeaderCapella, LightClientHeaderDeneb, LightClientHeaderFulu, LightClientHeaderGloas, SignedBlindedBeaconBlock, beacon_state, test_utils::TestRandom, }; -use derivative::Derivative; +use educe::Educe; use safe_arith::ArithError; use safe_arith::SafeArith; use serde::{Deserialize, Deserializer, Serialize}; @@ -105,15 +105,15 @@ impl From for Error { derive( Debug, Clone, - PartialEq, Serialize, Deserialize, - Derivative, + Educe, Decode, Encode, TestRandom, TreeHash, ), + educe(PartialEq), serde(bound = "E: EthSpec", deny_unknown_fields), cfg_attr( feature = "arbitrary", diff --git a/consensus/types/src/payload.rs b/consensus/types/src/payload.rs index 28dc10f9384..370c73ad0a6 100644 --- a/consensus/types/src/payload.rs +++ b/consensus/types/src/payload.rs @@ -1,5 +1,5 @@ use crate::{test_utils::TestRandom, *}; -use derivative::Derivative; +use educe::Educe; use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; use ssz::{Decode, Encode}; @@ -161,9 +161,9 @@ pub trait AbstractExecPayload: Decode, TestRandom, TreeHash, - Derivative, + Educe, ), - derivative(PartialEq, Hash(bound = "E: EthSpec")), + educe(PartialEq, Hash(bound(E: EthSpec))), serde(bound = "E: EthSpec", deny_unknown_fields), cfg_attr( feature = "arbitrary", @@ -173,8 +173,8 @@ pub trait AbstractExecPayload: ssz(struct_behaviour = "transparent"), ), ref_attributes( - derive(Debug, Derivative, TreeHash), - derivative(PartialEq, Hash(bound = "E: EthSpec")), + derive(Debug, Educe, TreeHash), + educe(PartialEq, Hash(bound(E: EthSpec))), tree_hash(enum_behaviour = "transparent"), ), map_into(ExecutionPayload), @@ -187,8 +187,8 @@ pub trait AbstractExecPayload: derive(arbitrary::Arbitrary), arbitrary(bound = "E: EthSpec") )] -#[derive(Debug, Clone, Serialize, Deserialize, TreeHash, Derivative)] -#[derivative(PartialEq, Hash(bound = "E: EthSpec"))] +#[derive(Debug, Clone, Serialize, Deserialize, TreeHash, Educe)] +#[educe(PartialEq, Hash(bound(E: EthSpec)))] #[serde(bound = "E: EthSpec")] #[tree_hash(enum_behaviour = "transparent")] pub struct FullPayload { @@ -531,9 +531,9 @@ impl TryFrom> for FullPayload { Decode, TestRandom, TreeHash, - Derivative, + Educe, ), - derivative(PartialEq, Hash(bound = "E: EthSpec")), + educe(PartialEq, Hash(bound(E: EthSpec))), serde(bound = "E: EthSpec", deny_unknown_fields), cfg_attr( feature = "arbitrary", @@ -543,8 +543,8 @@ impl TryFrom> for FullPayload { ssz(struct_behaviour = "transparent"), ), ref_attributes( - derive(Debug, Derivative, TreeHash), - derivative(PartialEq, Hash(bound = "E: EthSpec")), + derive(Debug, Educe, TreeHash), + educe(PartialEq, Hash(bound(E: EthSpec))), tree_hash(enum_behaviour = "transparent"), ), map_into(ExecutionPayloadHeader), @@ -556,8 +556,8 @@ impl TryFrom> for FullPayload { derive(arbitrary::Arbitrary), arbitrary(bound = "E: EthSpec") )] -#[derive(Debug, Clone, Serialize, Deserialize, TreeHash, Derivative)] -#[derivative(PartialEq, Hash(bound = "E: EthSpec"))] +#[derive(Debug, Clone, Serialize, Deserialize, TreeHash, Educe)] +#[educe(PartialEq, Hash(bound(E: EthSpec)))] #[serde(bound = "E: EthSpec")] #[tree_hash(enum_behaviour = "transparent")] pub struct BlindedPayload { diff --git a/consensus/types/src/runtime_var_list.rs b/consensus/types/src/runtime_var_list.rs index d57c65b1b71..e7b846029ef 100644 --- a/consensus/types/src/runtime_var_list.rs +++ b/consensus/types/src/runtime_var_list.rs @@ -1,5 +1,5 @@ use crate::ContextDeserialize; -use derivative::Derivative; +use educe::Educe; use serde::de::Error as DeError; use serde::{Deserialize, Deserializer, Serialize}; use ssz::Decode; @@ -44,8 +44,8 @@ use tree_hash::{Hash256, MerkleHasher, PackedEncoding, TreeHash, TreeHashType}; /// assert!(long.push(6).is_err()); /// /// ``` -#[derive(Clone, Serialize, Deserialize, Derivative)] -#[derivative(PartialEq, Eq, Hash(bound = "T: std::hash::Hash"))] +#[derive(Clone, Serialize, Deserialize, Educe)] +#[educe(PartialEq, Eq, Hash(bound(T: std::hash::Hash)))] #[serde(transparent)] pub struct RuntimeVariableList { vec: Vec, diff --git a/consensus/types/src/signed_beacon_block.rs b/consensus/types/src/signed_beacon_block.rs index 979b91e30d5..35d2faac483 100644 --- a/consensus/types/src/signed_beacon_block.rs +++ b/consensus/types/src/signed_beacon_block.rs @@ -1,7 +1,7 @@ use crate::beacon_block_body::{BLOB_KZG_COMMITMENTS_INDEX, format_kzg_commitments}; use crate::test_utils::TestRandom; use crate::*; -use derivative::Derivative; +use educe::Educe; use merkle_proof::MerkleTree; use serde::{Deserialize, Deserializer, Serialize}; use ssz_derive::{Decode, Encode}; @@ -51,10 +51,10 @@ impl From for Hash256 { Encode, Decode, TreeHash, - Derivative, + Educe, TestRandom ), - derivative(PartialEq, Hash(bound = "E: EthSpec")), + educe(PartialEq, Hash(bound(E: EthSpec))), serde(bound = "E: EthSpec, Payload: AbstractExecPayload"), cfg_attr( feature = "arbitrary", @@ -71,8 +71,8 @@ impl From for Hash256 { derive(arbitrary::Arbitrary), arbitrary(bound = "E: EthSpec, Payload: AbstractExecPayload") )] -#[derive(Debug, Clone, Serialize, Deserialize, Encode, TreeHash, Derivative)] -#[derivative(PartialEq, Hash(bound = "E: EthSpec"))] +#[derive(Debug, Clone, Serialize, Deserialize, Encode, TreeHash, Educe)] +#[educe(PartialEq, Hash(bound(E: EthSpec)))] #[serde(untagged)] #[serde(bound = "E: EthSpec, Payload: AbstractExecPayload")] #[tree_hash(enum_behaviour = "transparent")] diff --git a/consensus/types/src/sync_aggregate.rs b/consensus/types/src/sync_aggregate.rs index 7a4ef8f026a..ba6d840a526 100644 --- a/consensus/types/src/sync_aggregate.rs +++ b/consensus/types/src/sync_aggregate.rs @@ -2,7 +2,7 @@ use crate::consts::altair::SYNC_COMMITTEE_SUBNET_COUNT; use crate::context_deserialize; use crate::test_utils::TestRandom; use crate::{AggregateSignature, BitVector, EthSpec, ForkName, SyncCommitteeContribution}; -use derivative::Derivative; +use educe::Educe; use safe_arith::{ArithError, SafeArith}; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; @@ -26,10 +26,8 @@ impl From for Error { derive(arbitrary::Arbitrary), arbitrary(bound = "E: EthSpec") )] -#[derive( - Debug, Clone, Serialize, Deserialize, Encode, Decode, TreeHash, TestRandom, Derivative, -)] -#[derivative(PartialEq, Hash(bound = "E: EthSpec"))] +#[derive(Debug, Clone, Serialize, Deserialize, Encode, Decode, TreeHash, TestRandom, Educe)] +#[educe(PartialEq, Hash(bound(E: EthSpec)))] #[serde(bound = "E: EthSpec")] #[context_deserialize(ForkName)] pub struct SyncAggregate { diff --git a/crypto/kzg/Cargo.toml b/crypto/kzg/Cargo.toml index 432fcc1792e..5a36eb74f70 100644 --- a/crypto/kzg/Cargo.toml +++ b/crypto/kzg/Cargo.toml @@ -8,7 +8,7 @@ edition = "2021" [dependencies] arbitrary = { workspace = true } c-kzg = { workspace = true } -derivative = { workspace = true } +educe = { workspace = true } ethereum_hashing = { workspace = true } ethereum_serde_utils = { workspace = true } ethereum_ssz = { workspace = true } diff --git a/crypto/kzg/src/kzg_commitment.rs b/crypto/kzg/src/kzg_commitment.rs index cfab09f63e7..5a5e689429e 100644 --- a/crypto/kzg/src/kzg_commitment.rs +++ b/crypto/kzg/src/kzg_commitment.rs @@ -1,5 +1,5 @@ use c_kzg::BYTES_PER_COMMITMENT; -use derivative::Derivative; +use educe::Educe; use ethereum_hashing::hash_fixed; use serde::de::{Deserialize, Deserializer}; use serde::ser::{Serialize, Serializer}; @@ -11,8 +11,8 @@ use tree_hash::{Hash256, PackedEncoding, TreeHash}; pub const VERSIONED_HASH_VERSION_KZG: u8 = 0x01; -#[derive(Derivative, Clone, Copy, Encode, Decode)] -#[derivative(PartialEq, Eq, Hash)] +#[derive(Educe, Clone, Copy, Encode, Decode)] +#[educe(PartialEq, Eq, Hash)] #[ssz(struct_behaviour = "transparent")] pub struct KzgCommitment(pub [u8; c_kzg::BYTES_PER_COMMITMENT]); diff --git a/slasher/Cargo.toml b/slasher/Cargo.toml index b2f6eca9c37..cca55bcef88 100644 --- a/slasher/Cargo.toml +++ b/slasher/Cargo.toml @@ -14,7 +14,7 @@ portable = ["types/portable"] [dependencies] bincode = { workspace = true } byteorder = { workspace = true } -derivative = { workspace = true } +educe = { workspace = true } ethereum_ssz = { workspace = true } ethereum_ssz_derive = { workspace = true } filesystem = { workspace = true } diff --git a/slasher/src/database/redb_impl.rs b/slasher/src/database/redb_impl.rs index 4198e826455..570d7df1318 100644 --- a/slasher/src/database/redb_impl.rs +++ b/slasher/src/database/redb_impl.rs @@ -7,7 +7,7 @@ use crate::{ *, }, }; -use derivative::Derivative; +use educe::Educe; use redb::{ReadableTable, TableDefinition}; use std::{borrow::Cow, path::PathBuf}; @@ -23,18 +23,18 @@ pub struct Database<'env> { _phantom: PhantomData<&'env ()>, } -#[derive(Derivative)] -#[derivative(Debug)] +#[derive(Educe)] +#[educe(Debug)] pub struct RwTransaction<'env> { - #[derivative(Debug = "ignore")] + #[educe(Debug(ignore))] txn: redb::WriteTransaction, _phantom: PhantomData<&'env ()>, } -#[derive(Derivative)] -#[derivative(Debug)] +#[derive(Educe)] +#[educe(Debug)] pub struct Cursor<'env> { - #[derivative(Debug = "ignore")] + #[educe(Debug(ignore))] txn: &'env redb::WriteTransaction, db: &'env Database<'env>, current_key: Option>, diff --git a/testing/ef_tests/Cargo.toml b/testing/ef_tests/Cargo.toml index d9afce0efe7..581785e2a97 100644 --- a/testing/ef_tests/Cargo.toml +++ b/testing/ef_tests/Cargo.toml @@ -17,7 +17,7 @@ beacon_chain = { workspace = true } bls = { workspace = true } compare_fields = { workspace = true } context_deserialize = { workspace = true } -derivative = { workspace = true } +educe = { workspace = true } eth2_network_config = { workspace = true } ethereum_ssz = { workspace = true } ethereum_ssz_derive = { workspace = true } diff --git a/testing/ef_tests/src/handler.rs b/testing/ef_tests/src/handler.rs index b49ab2d90d4..a5b2ffada37 100644 --- a/testing/ef_tests/src/handler.rs +++ b/testing/ef_tests/src/handler.rs @@ -2,7 +2,7 @@ use crate::cases::{self, Case, Cases, EpochTransition, LoadCase, Operation}; use crate::type_name::TypeName; use crate::{FeatureName, type_name}; use context_deserialize::ContextDeserialize; -use derivative::Derivative; +use educe::Educe; use std::fs::{self, DirEntry}; use std::marker::PhantomData; use std::path::PathBuf; @@ -154,8 +154,8 @@ pub trait Handler { macro_rules! bls_eth_handler { ($runner_name: ident, $case_name:ident, $handler_name:expr) => { - #[derive(Derivative)] - #[derivative(Default(bound = ""))] + #[derive(Educe)] + #[educe(Default)] pub struct $runner_name; impl Handler for $runner_name { @@ -174,8 +174,8 @@ macro_rules! bls_eth_handler { macro_rules! bls_handler { ($runner_name: ident, $case_name:ident, $handler_name:expr) => { - #[derive(Derivative)] - #[derivative(Default(bound = ""))] + #[derive(Educe)] + #[educe(Default)] pub struct $runner_name; impl Handler for $runner_name { @@ -335,8 +335,8 @@ impl SszStaticHandler { } /// Handler for SSZ types that implement `CachedTreeHash`. -#[derive(Derivative)] -#[derivative(Default(bound = ""))] +#[derive(Educe)] +#[educe(Default)] pub struct SszStaticTHCHandler(PhantomData<(T, E)>); /// Handler for SSZ types that don't implement `ssz::Decode`. @@ -436,8 +436,8 @@ where } } -#[derive(Derivative)] -#[derivative(Default(bound = ""))] +#[derive(Educe)] +#[educe(Default)] pub struct ShufflingHandler(PhantomData); impl Handler for ShufflingHandler { @@ -460,8 +460,8 @@ impl Handler for ShufflingHandler { } } -#[derive(Derivative)] -#[derivative(Default(bound = ""))] +#[derive(Educe)] +#[educe(Default)] pub struct SanityBlocksHandler(PhantomData); impl Handler for SanityBlocksHandler { @@ -486,8 +486,8 @@ impl Handler for SanityBlocksHandler { } } -#[derive(Derivative)] -#[derivative(Default(bound = ""))] +#[derive(Educe)] +#[educe(Default)] pub struct SanitySlotsHandler(PhantomData); impl Handler for SanitySlotsHandler { @@ -511,8 +511,8 @@ impl Handler for SanitySlotsHandler { } } -#[derive(Derivative)] -#[derivative(Default(bound = ""))] +#[derive(Educe)] +#[educe(Default)] pub struct RandomHandler(PhantomData); impl Handler for RandomHandler { @@ -531,8 +531,8 @@ impl Handler for RandomHandler { } } -#[derive(Derivative)] -#[derivative(Default(bound = ""))] +#[derive(Educe)] +#[educe(Default)] pub struct EpochProcessingHandler(PhantomData<(E, T)>); impl> Handler for EpochProcessingHandler { @@ -581,8 +581,8 @@ impl Handler for RewardsHandler { } } -#[derive(Derivative)] -#[derivative(Default(bound = ""))] +#[derive(Educe)] +#[educe(Default)] pub struct ForkHandler(PhantomData); impl Handler for ForkHandler { @@ -601,8 +601,8 @@ impl Handler for ForkHandler { } } -#[derive(Derivative)] -#[derivative(Default(bound = ""))] +#[derive(Educe)] +#[educe(Default)] pub struct TransitionHandler(PhantomData); impl Handler for TransitionHandler { @@ -621,8 +621,8 @@ impl Handler for TransitionHandler { } } -#[derive(Derivative)] -#[derivative(Default(bound = ""))] +#[derive(Educe)] +#[educe(Default)] pub struct FinalityHandler(PhantomData); impl Handler for FinalityHandler { @@ -705,8 +705,8 @@ impl Handler for ForkChoiceHandler { } } -#[derive(Derivative)] -#[derivative(Default(bound = ""))] +#[derive(Educe)] +#[educe(Default)] pub struct OptimisticSyncHandler(PhantomData); impl Handler for OptimisticSyncHandler { @@ -734,8 +734,8 @@ impl Handler for OptimisticSyncHandler { } } -#[derive(Derivative)] -#[derivative(Default(bound = ""))] +#[derive(Educe)] +#[educe(Default)] pub struct GenesisValidityHandler(PhantomData); impl Handler for GenesisValidityHandler { @@ -754,8 +754,8 @@ impl Handler for GenesisValidityHandler { } } -#[derive(Derivative)] -#[derivative(Default(bound = ""))] +#[derive(Educe)] +#[educe(Default)] pub struct GenesisInitializationHandler(PhantomData); impl Handler for GenesisInitializationHandler { @@ -774,8 +774,8 @@ impl Handler for GenesisInitializationHandler { } } -#[derive(Derivative)] -#[derivative(Default(bound = ""))] +#[derive(Educe)] +#[educe(Default)] pub struct KZGBlobToKZGCommitmentHandler(PhantomData); impl Handler for KZGBlobToKZGCommitmentHandler { @@ -794,8 +794,8 @@ impl Handler for KZGBlobToKZGCommitmentHandler { } } -#[derive(Derivative)] -#[derivative(Default(bound = ""))] +#[derive(Educe)] +#[educe(Default)] pub struct KZGComputeBlobKZGProofHandler(PhantomData); impl Handler for KZGComputeBlobKZGProofHandler { @@ -814,8 +814,8 @@ impl Handler for KZGComputeBlobKZGProofHandler { } } -#[derive(Derivative)] -#[derivative(Default(bound = ""))] +#[derive(Educe)] +#[educe(Default)] pub struct KZGComputeKZGProofHandler(PhantomData); impl Handler for KZGComputeKZGProofHandler { @@ -834,8 +834,8 @@ impl Handler for KZGComputeKZGProofHandler { } } -#[derive(Derivative)] -#[derivative(Default(bound = ""))] +#[derive(Educe)] +#[educe(Default)] pub struct KZGVerifyBlobKZGProofHandler(PhantomData); impl Handler for KZGVerifyBlobKZGProofHandler { @@ -854,8 +854,8 @@ impl Handler for KZGVerifyBlobKZGProofHandler { } } -#[derive(Derivative)] -#[derivative(Default(bound = ""))] +#[derive(Educe)] +#[educe(Default)] pub struct KZGVerifyBlobKZGProofBatchHandler(PhantomData); impl Handler for KZGVerifyBlobKZGProofBatchHandler { @@ -874,8 +874,8 @@ impl Handler for KZGVerifyBlobKZGProofBatchHandler { } } -#[derive(Derivative)] -#[derivative(Default(bound = ""))] +#[derive(Educe)] +#[educe(Default)] pub struct KZGVerifyKZGProofHandler(PhantomData); impl Handler for KZGVerifyKZGProofHandler { @@ -894,8 +894,8 @@ impl Handler for KZGVerifyKZGProofHandler { } } -#[derive(Derivative)] -#[derivative(Default(bound = ""))] +#[derive(Educe)] +#[educe(Default)] pub struct GetCustodyGroupsHandler(PhantomData); impl Handler for GetCustodyGroupsHandler { @@ -914,8 +914,8 @@ impl Handler for GetCustodyGroupsHandler { } } -#[derive(Derivative)] -#[derivative(Default(bound = ""))] +#[derive(Educe)] +#[educe(Default)] pub struct ComputeColumnsForCustodyGroupHandler(PhantomData); impl Handler for ComputeColumnsForCustodyGroupHandler { @@ -934,8 +934,8 @@ impl Handler for ComputeColumnsForCustodyGroupHandler } } -#[derive(Derivative)] -#[derivative(Default(bound = ""))] +#[derive(Educe)] +#[educe(Default)] pub struct KZGComputeCellsHandler(PhantomData); impl Handler for KZGComputeCellsHandler { @@ -954,8 +954,8 @@ impl Handler for KZGComputeCellsHandler { } } -#[derive(Derivative)] -#[derivative(Default(bound = ""))] +#[derive(Educe)] +#[educe(Default)] pub struct KZGComputeCellsAndKZGProofHandler(PhantomData); impl Handler for KZGComputeCellsAndKZGProofHandler { @@ -974,8 +974,8 @@ impl Handler for KZGComputeCellsAndKZGProofHandler { } } -#[derive(Derivative)] -#[derivative(Default(bound = ""))] +#[derive(Educe)] +#[educe(Default)] pub struct KZGVerifyCellKZGProofBatchHandler(PhantomData); impl Handler for KZGVerifyCellKZGProofBatchHandler { @@ -994,8 +994,8 @@ impl Handler for KZGVerifyCellKZGProofBatchHandler { } } -#[derive(Derivative)] -#[derivative(Default(bound = ""))] +#[derive(Educe)] +#[educe(Default)] pub struct KZGRecoverCellsAndKZGProofHandler(PhantomData); impl Handler for KZGRecoverCellsAndKZGProofHandler { @@ -1014,8 +1014,8 @@ impl Handler for KZGRecoverCellsAndKZGProofHandler { } } -#[derive(Derivative)] -#[derivative(Default(bound = ""))] +#[derive(Educe)] +#[educe(Default)] pub struct KzgInclusionMerkleProofValidityHandler(PhantomData); impl Handler for KzgInclusionMerkleProofValidityHandler { @@ -1038,8 +1038,8 @@ impl Handler for KzgInclusionMerkleProofValidityHandler(PhantomData); impl Handler for MerkleProofValidityHandler { @@ -1062,8 +1062,8 @@ impl Handler for MerkleProofValidityHandler { } } -#[derive(Derivative)] -#[derivative(Default(bound = ""))] +#[derive(Educe)] +#[educe(Default)] pub struct LightClientUpdateHandler(PhantomData); impl Handler for LightClientUpdateHandler { @@ -1087,8 +1087,8 @@ impl Handler for LightClientUpdateHandler { } } -#[derive(Derivative)] -#[derivative(Default(bound = ""))] +#[derive(Educe)] +#[educe(Default)] pub struct OperationsHandler(PhantomData<(E, O)>); impl> Handler for OperationsHandler { @@ -1107,8 +1107,8 @@ impl> Handler for OperationsHandler } } -#[derive(Derivative)] -#[derivative(Default(bound = ""))] +#[derive(Educe)] +#[educe(Default)] pub struct SszGenericHandler(PhantomData); impl Handler for SszGenericHandler { diff --git a/validator_manager/Cargo.toml b/validator_manager/Cargo.toml index 9192f0e86b0..6ef179fbe99 100644 --- a/validator_manager/Cargo.toml +++ b/validator_manager/Cargo.toml @@ -8,7 +8,7 @@ edition = { workspace = true } account_utils = { workspace = true } clap = { workspace = true } clap_utils = { workspace = true } -derivative = { workspace = true } +educe = { workspace = true } environment = { workspace = true } eth2 = { workspace = true } eth2_network_config = { workspace = true } diff --git a/validator_manager/src/import_validators.rs b/validator_manager/src/import_validators.rs index 5f5f049ed97..24917f7d1b4 100644 --- a/validator_manager/src/import_validators.rs +++ b/validator_manager/src/import_validators.rs @@ -3,7 +3,7 @@ use crate::DumpConfig; use account_utils::eth2_keystore::Keystore; use clap::{Arg, ArgAction, ArgMatches, Command}; use clap_utils::FLAG_HEADER; -use derivative::Derivative; +use educe::Educe; use eth2::lighthouse_vc::types::KeystoreJsonStr; use eth2::{SensitiveUrl, lighthouse_vc::std_types::ImportKeystoreStatus}; use serde::{Deserialize, Serialize}; @@ -159,15 +159,15 @@ pub fn cli_app() -> Command { ) } -#[derive(Clone, PartialEq, Serialize, Deserialize, Derivative)] -#[derivative(Debug)] +#[derive(Clone, PartialEq, Serialize, Deserialize, Educe)] +#[educe(Debug)] pub struct ImportConfig { pub validators_file_path: Option, pub keystore_file_path: Option, pub vc_url: SensitiveUrl, pub vc_token_path: PathBuf, pub ignore_duplicates: bool, - #[derivative(Debug = "ignore")] + #[educe(Debug(ignore))] pub password: Option>, pub fee_recipient: Option
, pub gas_limit: Option, From 1bd4ac2113b98df51bad447e6f05fa35f52b48cd Mon Sep 17 00:00:00 2001 From: Jimmy Chen Date: Mon, 10 Nov 2025 15:06:42 +1100 Subject: [PATCH 22/74] Fix flaky reconstruction test (#8321) FIx flaky tests that depends on timing. Previously the test processes all 128 columns and expect reconstruction to happen after all columns are processed. There is a race here, and reconstruction could be triggered before all columns are processed. I've updated the tests to process 64 columns, just enough for reconstruction and wait for 50ms for reconstruction to be triggered. This PR requires the change made in https://github.com/sigp/lighthouse/pull/8194 for the test to pass consistently (blob count set to 1 for all blocks instead of random blob count between 0..max) Co-Authored-By: Jimmy Chen Co-Authored-By: Jimmy Chen --- .../src/network_beacon_processor/tests.rs | 33 ++++++++----------- 1 file changed, 13 insertions(+), 20 deletions(-) diff --git a/beacon_node/network/src/network_beacon_processor/tests.rs b/beacon_node/network/src/network_beacon_processor/tests.rs index a9794cb5c42..d83059ad278 100644 --- a/beacon_node/network/src/network_beacon_processor/tests.rs +++ b/beacon_node/network/src/network_beacon_processor/tests.rs @@ -916,36 +916,29 @@ async fn data_column_reconstruction_at_deadline() { .start_of(rig.next_block.slot()) .unwrap(); - rig.chain - .slot_clock - .set_current_time(slot_start - rig.chain.spec.maximum_gossip_clock_disparity()); - - assert_eq!( - rig.chain.slot().unwrap(), - rig.next_block.slot() - 1, - "chain should be at the correct slot" - ); - // We push the slot clock to 3 seconds into the slot, this is the deadline to trigger reconstruction. + let slot_duration = rig.chain.slot_clock.slot_duration().as_millis() as u64; + let reconstruction_deadline_millis = + (slot_duration * RECONSTRUCTION_DEADLINE.0) / RECONSTRUCTION_DEADLINE.1; rig.chain .slot_clock - .set_current_time(slot_start + Duration::from_secs(3)); + .set_current_time(slot_start + Duration::from_millis(reconstruction_deadline_millis)); - let num_data_columns = rig.next_data_columns.as_ref().map(|c| c.len()).unwrap_or(0); - for i in 0..num_data_columns { + let min_columns_for_reconstruction = E::number_of_columns() / 2; + for i in 0..min_columns_for_reconstruction { rig.enqueue_gossip_data_columns(i); rig.assert_event_journal_completes(&[WorkType::GossipDataColumnSidecar]) .await; } // Since we're at the reconstruction deadline, reconstruction should be triggered immediately - if num_data_columns > 0 { - rig.assert_event_journal_completes_with_timeout( - &[WorkType::ColumnReconstruction], - Duration::from_millis(50), - ) - .await; - } + rig.assert_event_journal_with_timeout( + &[WorkType::ColumnReconstruction.into()], + Duration::from_millis(50), + false, + false, + ) + .await; } // Test the column reconstruction is delayed for columns that arrive for a previous slot. From 93b8f4686d34b5793590cad47a7b4507316b9e53 Mon Sep 17 00:00:00 2001 From: Mac L Date: Mon, 10 Nov 2025 10:25:59 +0400 Subject: [PATCH 23/74] Remove `ethers-core` from `execution_layer` (#8149) #6022 Use `alloy_rpc_types::Transaction` to replace the `ethers_core::Transaction` inside the execution block generator. Co-Authored-By: Mac L --- Cargo.lock | 478 ++++++++++++++++-- Cargo.toml | 7 +- beacon_node/execution_layer/Cargo.toml | 2 +- beacon_node/execution_layer/src/lib.rs | 1 - .../test_utils/execution_block_generator.rs | 8 +- consensus/types/Cargo.toml | 2 +- 6 files changed, 458 insertions(+), 40 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e045c8697fb..080221e5f28 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -140,21 +140,43 @@ checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] name = "alloy-consensus" -version = "0.14.0" +version = "1.0.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2179ba839ac532f50279f5da2a6c5047f791f03f6f808b4dfab11327b97902f" +checksum = "3abecb92ba478a285fbf5689100dbafe4003ded4a09bf4b5ef62cca87cd4f79e" dependencies = [ "alloy-eips", "alloy-primitives", "alloy-rlp", + "alloy-serde", "alloy-trie", + "alloy-tx-macros", "auto_impl 1.2.1", + "c-kzg", "derive_more 2.0.1", "either", + "k256 0.13.4", "once_cell", + "rand 0.8.5", + "secp256k1", + "serde", + "serde_json", "thiserror 2.0.12", ] +[[package]] +name = "alloy-consensus-any" +version = "1.0.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e864d4f11d1fb8d3ac2fd8f3a15f1ee46d55ec6d116b342ed1b2cb737f25894" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-rlp", + "alloy-serde", + "serde", +] + [[package]] name = "alloy-eip2124" version = "0.2.0" @@ -164,6 +186,7 @@ dependencies = [ "alloy-primitives", "alloy-rlp", "crc", + "serde", "thiserror 2.0.12", ] @@ -175,52 +198,71 @@ checksum = "dbe3e16484669964c26ac48390245d84c410b1a5f968976076c17184725ef235" dependencies = [ "alloy-primitives", "alloy-rlp", + "serde", ] [[package]] name = "alloy-eip7702" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "804cefe429015b4244966c006d25bda5545fa9db5990e9c9079faf255052f50a" +checksum = "9d4769c6ffddca380b0070d71c8b7f30bed375543fe76bb2f74ec0acf4b7cd16" dependencies = [ "alloy-primitives", "alloy-rlp", + "serde", "thiserror 2.0.12", ] [[package]] name = "alloy-eips" -version = "0.14.0" +version = "1.0.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "609515c1955b33af3d78d26357540f68c5551a90ef58fd53def04f2aa074ec43" +checksum = "07d9a64522a0db6ebcc4ff9c904e329e77dd737c2c25d30f1bdc32ca6c6ce334" dependencies = [ "alloy-eip2124", "alloy-eip2930", "alloy-eip7702", "alloy-primitives", "alloy-rlp", + "alloy-serde", "auto_impl 1.2.1", + "c-kzg", "derive_more 2.0.1", "either", + "serde", + "serde_with", "sha2 0.10.8", + "thiserror 2.0.12", +] + +[[package]] +name = "alloy-network-primitives" +version = "1.0.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "219dccd2cf753a43bd9b0fbb7771a16927ffdb56e43e3a15755bef1a74d614aa" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-serde", + "serde", ] [[package]] name = "alloy-primitives" -version = "1.0.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70b98b99c1dcfbe74d7f0b31433ff215e7d1555e367d90e62db904f3c9d4ff53" +checksum = "355bf68a433e0fd7f7d33d5a9fc2583fde70bf5c530f63b80845f8da5505cf28" dependencies = [ "alloy-rlp", "arbitrary", "bytes", "cfg-if", "const-hex", - "derive_arbitrary", "derive_more 2.0.1", - "foldhash", + "foldhash 0.2.0", "getrandom 0.3.1", - "hashbrown 0.15.2", + "hashbrown 0.16.0", "indexmap 2.8.0", "itoa", "k256 0.13.4", @@ -238,9 +280,9 @@ dependencies = [ [[package]] name = "alloy-rlp" -version = "0.3.11" +version = "0.3.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d6c1d995bff8d011f7cd6c81820d51825e6e06d6db73914c1630ecf544d83d6" +checksum = "5f70d83b765fdc080dbcd4f4db70d8d23fe4761f2f02ebfa9146b833900634b4" dependencies = [ "alloy-rlp-derive", "arrayvec", @@ -249,30 +291,132 @@ dependencies = [ [[package]] name = "alloy-rlp-derive" -version = "0.3.11" +version = "0.3.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64b728d511962dda67c1bc7ea7c03736ec275ed2cf4c35d9585298ac9ccf3b73" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.100", +] + +[[package]] +name = "alloy-rpc-types-eth" +version = "1.0.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0185f68a0f8391ab996d335a887087d7ccdbc97952efab3516f6307d456ba2cd" +dependencies = [ + "alloy-consensus", + "alloy-consensus-any", + "alloy-eips", + "alloy-network-primitives", + "alloy-primitives", + "alloy-rlp", + "alloy-serde", + "alloy-sol-types", + "itertools 0.14.0", + "serde", + "serde_json", + "thiserror 2.0.12", +] + +[[package]] +name = "alloy-serde" +version = "1.0.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "596cfa360922ba9af901cc7370c68640e4f72adb6df0ab064de32f21fec498d7" +dependencies = [ + "alloy-primitives", + "serde", + "serde_json", +] + +[[package]] +name = "alloy-sol-macro" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3ce480400051b5217f19d6e9a82d9010cdde20f1ae9c00d53591e4a1afbb312" +dependencies = [ + "alloy-sol-macro-expander", + "alloy-sol-macro-input", + "proc-macro-error2", + "proc-macro2", + "quote", + "syn 2.0.100", +] + +[[package]] +name = "alloy-sol-macro-expander" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d792e205ed3b72f795a8044c52877d2e6b6e9b1d13f431478121d8d4eaa9028" +dependencies = [ + "alloy-sol-macro-input", + "const-hex", + "heck 0.5.0", + "indexmap 2.8.0", + "proc-macro-error2", + "proc-macro2", + "quote", + "syn 2.0.100", + "syn-solidity", + "tiny-keccak", +] + +[[package]] +name = "alloy-sol-macro-input" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a40e1ef334153322fd878d07e86af7a529bcb86b2439525920a88eba87bcf943" +checksum = "0bd1247a8f90b465ef3f1207627547ec16940c35597875cdc09c49d58b19693c" dependencies = [ + "const-hex", + "dunce", + "heck 0.5.0", + "macro-string", "proc-macro2", "quote", "syn 2.0.100", + "syn-solidity", +] + +[[package]] +name = "alloy-sol-types" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70319350969a3af119da6fb3e9bddb1bce66c9ea933600cb297c8b1850ad2a3c" +dependencies = [ + "alloy-primitives", + "alloy-sol-macro", ] [[package]] name = "alloy-trie" -version = "0.8.1" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "983d99aa81f586cef9dae38443245e585840fcf0fc58b09aee0b1f27aed1d500" +checksum = "e3412d52bb97c6c6cc27ccc28d4e6e8cf605469101193b50b0bd5813b1f990b5" dependencies = [ "alloy-primitives", "alloy-rlp", "arrayvec", "derive_more 2.0.1", "nybbles", + "serde", "smallvec", "tracing", ] +[[package]] +name = "alloy-tx-macros" +version = "1.0.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab54221eccefa254ce9f65b079c097b1796e48c21c7ce358230f8988d75392fb" +dependencies = [ + "darling 0.21.3", + "proc-macro2", + "quote", + "syn 2.0.100", +] + [[package]] name = "android-tzdata" version = "0.1.1" @@ -412,6 +556,26 @@ dependencies = [ "zeroize", ] +[[package]] +name = "ark-ff" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a177aba0ed1e0fbb62aa9f6d0502e9b46dad8c2eab04c14258a1212d2557ea70" +dependencies = [ + "ark-ff-asm 0.5.0", + "ark-ff-macros 0.5.0", + "ark-serialize 0.5.0", + "ark-std 0.5.0", + "arrayvec", + "digest 0.10.7", + "educe", + "itertools 0.13.0", + "num-bigint", + "num-traits", + "paste", + "zeroize", +] + [[package]] name = "ark-ff-asm" version = "0.3.0" @@ -432,6 +596,16 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "ark-ff-asm" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62945a2f7e6de02a31fe400aa489f0e0f5b2502e69f95f853adb82a96c7a6b60" +dependencies = [ + "quote", + "syn 2.0.100", +] + [[package]] name = "ark-ff-macros" version = "0.3.0" @@ -457,6 +631,19 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "ark-ff-macros" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09be120733ee33f7693ceaa202ca41accd5653b779563608f1234f78ae07c4b3" +dependencies = [ + "num-bigint", + "num-traits", + "proc-macro2", + "quote", + "syn 2.0.100", +] + [[package]] name = "ark-serialize" version = "0.3.0" @@ -478,6 +665,18 @@ dependencies = [ "num-bigint", ] +[[package]] +name = "ark-serialize" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f4d068aaf107ebcd7dfb52bc748f8030e0fc930ac8e360146ca54c1203088f7" +dependencies = [ + "ark-std 0.5.0", + "arrayvec", + "digest 0.10.7", + "num-bigint", +] + [[package]] name = "ark-std" version = "0.3.0" @@ -498,6 +697,16 @@ dependencies = [ "rand 0.8.5", ] +[[package]] +name = "ark-std" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "246a225cc6131e9ee4f24619af0f19d67761fff15d7ccc22e42b80846e69449a" +dependencies = [ + "num-traits", + "rand 0.8.5", +] + [[package]] name = "arraydeque" version = "0.5.1" @@ -515,6 +724,9 @@ name = "arrayvec" version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" +dependencies = [ + "serde", +] [[package]] name = "asn1-rs" @@ -1043,6 +1255,22 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7" +[[package]] +name = "bitcoin-io" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b47c4ab7a93edb0c7198c5535ed9b52b63095f4e9b45279c6736cec4b856baf" + +[[package]] +name = "bitcoin_hashes" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb18c03d0db0247e147a21a6faafd5a7eb851c743db062de72018b6b7e8e4d16" +dependencies = [ + "bitcoin-io", + "hex-conservative", +] + [[package]] name = "bitflags" version = "1.3.2" @@ -1307,6 +1535,8 @@ dependencies = [ "glob", "hex", "libc", + "once_cell", + "serde", ] [[package]] @@ -2075,6 +2305,16 @@ dependencies = [ "darling_macro 0.20.10", ] +[[package]] +name = "darling" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9cdf337090841a411e2a7f3deb9187445851f91b309c0c0a29e05f74a00a48c0" +dependencies = [ + "darling_core 0.21.3", + "darling_macro 0.21.3", +] + [[package]] name = "darling_core" version = "0.13.4" @@ -2103,6 +2343,21 @@ dependencies = [ "syn 2.0.100", ] +[[package]] +name = "darling_core" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1247195ecd7e3c85f83c8d2a366e4210d588e802133e1e355180a9870b517ea4" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "serde", + "strsim 0.11.1", + "syn 2.0.100", +] + [[package]] name = "darling_macro" version = "0.13.4" @@ -2125,6 +2380,17 @@ dependencies = [ "syn 2.0.100", ] +[[package]] +name = "darling_macro" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81" +dependencies = [ + "darling_core 0.21.3", + "quote", + "syn 2.0.100", +] + [[package]] name = "darwin-libproc" version = "0.1.2" @@ -2471,6 +2737,12 @@ version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6add3b8cff394282be81f3fc1a0605db594ed69890078ca6e2cab1c408bcf04" +[[package]] +name = "dunce" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" + [[package]] name = "ecdsa" version = "0.14.8" @@ -2493,6 +2765,7 @@ dependencies = [ "digest 0.10.7", "elliptic-curve 0.13.8", "rfc6979 0.4.0", + "serdect", "signature 2.2.0", "spki 0.7.3", ] @@ -2602,6 +2875,9 @@ name = "either" version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" +dependencies = [ + "serde", +] [[package]] name = "ekzg-bls12-381" @@ -2724,6 +3000,7 @@ dependencies = [ "pkcs8 0.10.2", "rand_core 0.6.4", "sec1 0.7.3", + "serdect", "subtle", "zeroize", ] @@ -3346,13 +3623,13 @@ dependencies = [ "alloy-consensus", "alloy-primitives", "alloy-rlp", + "alloy-rpc-types-eth", "arc-swap", "builder_client", "bytes", "eth2", "ethereum_serde_utils", "ethereum_ssz", - "ethers-core", "fixed_bytes", "fork_choice", "hash-db", @@ -3553,6 +3830,12 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f" +[[package]] +name = "foldhash" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77ce24cb58228fbb8aa041425bb1050850ac19177686ea6e0f41a70416f56fdb" + [[package]] name = "foreign-types" version = "0.3.2" @@ -3987,7 +4270,16 @@ checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" dependencies = [ "allocator-api2", "equivalent", - "foldhash", + "foldhash 0.1.4", +] + +[[package]] +name = "hashbrown" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d" +dependencies = [ + "foldhash 0.2.0", "serde", ] @@ -4113,6 +4405,15 @@ dependencies = [ "serde", ] +[[package]] +name = "hex-conservative" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5313b072ce3c597065a808dbf612c4c8e8590bdbf8b579508bf7a762c5eae6cd" +dependencies = [ + "arrayvec", +] + [[package]] name = "hex_fmt" version = "0.3.0" @@ -4974,6 +5275,7 @@ dependencies = [ "ecdsa 0.16.9", "elliptic-curve 0.13.8", "once_cell", + "serdect", "sha2 0.10.8", "signature 2.2.0", ] @@ -5842,6 +6144,17 @@ dependencies = [ "libc", ] +[[package]] +name = "macro-string" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b27834086c65ec3f9387b096d66e99f221cf081c2b738042aa252bcd41204e3" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.100", +] + [[package]] name = "malloc_utils" version = "0.1.0" @@ -6525,11 +6838,13 @@ dependencies = [ [[package]] name = "nybbles" -version = "0.3.4" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8983bb634df7248924ee0c4c3a749609b5abcb082c28fffe3254b3eb3602b307" +checksum = "2c4b5ecbd0beec843101bffe848217f770e8b8da81d8355b7d6e226f2199b3dc" dependencies = [ - "const-hex", + "cfg-if", + "ruint", + "serde", "smallvec", ] @@ -7227,6 +7542,28 @@ dependencies = [ "version_check", ] +[[package]] +name = "proc-macro-error-attr2" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96de42df36bb9bba5542fe9f1a054b8cc87e172759a1868aa05c1f3acc89dfc5" +dependencies = [ + "proc-macro2", + "quote", +] + +[[package]] +name = "proc-macro-error2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11ec05c52be0a07b08061f7dd003e7d7092e0472bc731b4af7bb1ef876109802" +dependencies = [ + "proc-macro-error-attr2", + "proc-macro2", + "quote", + "syn 2.0.100", +] + [[package]] name = "proc-macro2" version = "1.0.94" @@ -7310,9 +7647,9 @@ dependencies = [ [[package]] name = "proptest-derive" -version = "0.5.1" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ee1c9ac207483d5e7db4940700de86a9aae46ef90c48b57f99fe7edb8345e49" +checksum = "095a99f75c69734802359b682be8daaf8980296731f6470434ea2c652af1dd30" dependencies = [ "proc-macro2", "quote", @@ -7542,6 +7879,7 @@ dependencies = [ "libc", "rand_chacha 0.3.1", "rand_core 0.6.4", + "serde", ] [[package]] @@ -7926,14 +8264,15 @@ dependencies = [ [[package]] name = "ruint" -version = "1.14.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78a46eb779843b2c4f21fac5773e25d6d5b7c8f0922876c91541790d2ca27eef" +checksum = "a68df0380e5c9d20ce49534f292a36a7514ae21350726efe1865bdb1fa91d278" dependencies = [ "alloy-rlp", "arbitrary", "ark-ff 0.3.0", "ark-ff 0.4.2", + "ark-ff 0.5.0", "bytes", "fastrlp 0.3.1", "fastrlp 0.4.0", @@ -7947,7 +8286,7 @@ dependencies = [ "rand 0.9.0", "rlp", "ruint-macro", - "serde", + "serde_core", "valuable", "zeroize", ] @@ -8373,10 +8712,32 @@ dependencies = [ "der 0.7.9", "generic-array 0.14.7", "pkcs8 0.10.2", + "serdect", "subtle", "zeroize", ] +[[package]] +name = "secp256k1" +version = "0.30.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b50c5943d326858130af85e049f2661ba3c78b26589b8ab98e65e80ae44a1252" +dependencies = [ + "bitcoin_hashes", + "rand 0.8.5", + "secp256k1-sys", + "serde", +] + +[[package]] +name = "secp256k1-sys" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4387882333d3aa8cb20530a17c69a3752e97837832f34f6dccc760e715001d9" +dependencies = [ + "cc", +] + [[package]] name = "security-framework" version = "2.11.1" @@ -8458,10 +8819,11 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.219" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" dependencies = [ + "serde_core", "serde_derive", ] @@ -8496,11 +8858,20 @@ dependencies = [ "serde_urlencoded", ] +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + [[package]] name = "serde_derive" -version = "1.0.219" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", @@ -8542,6 +8913,28 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_with" +version = "3.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa66c845eee442168b2c8134fec70ac50dc20e760769c8ba0ad1319ca1959b04" +dependencies = [ + "serde_core", + "serde_with_macros", +] + +[[package]] +name = "serde_with_macros" +version = "3.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b91a903660542fced4e99881aa481bdbaec1634568ee02e0b8bd57c64cb38955" +dependencies = [ + "darling 0.21.3", + "proc-macro2", + "quote", + "syn 2.0.100", +] + [[package]] name = "serde_yaml" version = "0.9.34+deprecated" @@ -8555,6 +8948,16 @@ dependencies = [ "unsafe-libyaml", ] +[[package]] +name = "serdect" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a84f14a19e9a014bb9f4512488d9829a68e04ecabffb0f9904cd1ace94598177" +dependencies = [ + "base16ct 0.2.0", + "serde", +] + [[package]] name = "sha1" version = "0.10.6" @@ -8826,6 +9229,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd" dependencies = [ "arbitrary", + "serde", ] [[package]] @@ -9075,6 +9479,18 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "syn-solidity" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff790eb176cc81bb8936aed0f7b9f14fc4670069a2d371b3e3b0ecce908b2cb3" +dependencies = [ + "paste", + "proc-macro2", + "quote", + "syn 2.0.100", +] + [[package]] name = "sync_wrapper" version = "0.1.2" diff --git a/Cargo.toml b/Cargo.toml index 15fea466f0e..03116b3db18 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -97,9 +97,10 @@ version = "8.0.0" [workspace.dependencies] account_utils = { path = "common/account_utils" } -alloy-consensus = { version = "0.14.0", default-features = false } -alloy-primitives = { version = "1.0", features = ["rlp", "getrandom"] } -alloy-rlp = "0.3.4" +alloy-consensus = { version = "=1.0.42", default-features = false } +alloy-primitives = { version = "=1.4.1", default-features = false, features = ["rlp", "getrandom"] } +alloy-rlp = { version = "=0.3.12", default-features = false } +alloy-rpc-types-eth = { version = "=1.0.42", default-features = false, features = ["serde"] } anyhow = "1" arbitrary = { version = "1", features = ["derive"] } async-channel = "1.9.0" diff --git a/beacon_node/execution_layer/Cargo.toml b/beacon_node/execution_layer/Cargo.toml index f56159c7b55..43b2e1dd751 100644 --- a/beacon_node/execution_layer/Cargo.toml +++ b/beacon_node/execution_layer/Cargo.toml @@ -8,13 +8,13 @@ edition = { workspace = true } alloy-consensus = { workspace = true } alloy-primitives = { workspace = true } alloy-rlp = { workspace = true } +alloy-rpc-types-eth = { workspace = true } arc-swap = "1.6.0" builder_client = { path = "../builder_client" } bytes = { workspace = true } eth2 = { workspace = true } ethereum_serde_utils = { workspace = true } ethereum_ssz = { workspace = true } -ethers-core = { workspace = true } fixed_bytes = { workspace = true } fork_choice = { workspace = true } hash-db = "0.15.2" diff --git a/beacon_node/execution_layer/src/lib.rs b/beacon_node/execution_layer/src/lib.rs index 4175abf7240..c2a31c2699b 100644 --- a/beacon_node/execution_layer/src/lib.rs +++ b/beacon_node/execution_layer/src/lib.rs @@ -18,7 +18,6 @@ use engines::{Engine, EngineError}; pub use engines::{EngineState, ForkchoiceState}; use eth2::types::{BlobsBundle, FullPayloadContents}; use eth2::types::{ForkVersionedResponse, builder_bid::SignedBuilderBid}; -use ethers_core::types::Transaction as EthersTransaction; use fixed_bytes::UintExtended; use fork_choice::ForkchoiceUpdateParameters; use logging::crit; diff --git a/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs b/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs index 1c1e3074174..7e0033d732c 100644 --- a/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs +++ b/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs @@ -1,4 +1,3 @@ -use crate::EthersTransaction; use crate::engine_api::{ ExecutionBlock, PayloadAttributes, PayloadId, PayloadStatusV1, PayloadStatusV1Status, json_structures::{ @@ -6,6 +5,8 @@ use crate::engine_api::{ }, }; use crate::engines::ForkchoiceState; +use alloy_consensus::TxEnvelope; +use alloy_rpc_types_eth::Transaction as AlloyTransaction; use eth2::types::BlobsBundle; use kzg::{Kzg, KzgCommitment, KzgProof}; use parking_lot::Mutex; @@ -833,7 +834,7 @@ pub fn generate_blobs( pub fn static_valid_tx() -> Result, String> { // This is a real transaction hex encoded, but we don't care about the contents of the transaction. - let transaction: EthersTransaction = serde_json::from_str( + let transaction: AlloyTransaction = serde_json::from_str( r#"{ "blockHash":"0x1d59ff54b1eb26b013ce3cb5fc9dab3705b415a67127a003c3e61eb445bb8df2", "blockNumber":"0x5daf3b", @@ -852,7 +853,8 @@ pub fn static_valid_tx() -> Result(transaction.into()).to_vec()) .map_err(|e| format!("Failed to convert transaction to SSZ: {:?}", e)) } diff --git a/consensus/types/Cargo.toml b/consensus/types/Cargo.toml index 4e04fa95daa..1f527c0de8a 100644 --- a/consensus/types/Cargo.toml +++ b/consensus/types/Cargo.toml @@ -25,7 +25,7 @@ portable = ["bls/supranational-portable"] [dependencies] alloy-primitives = { workspace = true } -alloy-rlp = { version = "0.3.4", features = ["derive"] } +alloy-rlp = { workspace = true, features = ["derive"] } arbitrary = { workspace = true, features = ["derive"], optional = true } bls = { workspace = true } compare_fields = { workspace = true } From 22dea2bc322ebc674d5f196bde55a15fc368efa9 Mon Sep 17 00:00:00 2001 From: Lion - dapplion <35266934+dapplion@users.noreply.github.com> Date: Tue, 11 Nov 2025 01:40:32 -0300 Subject: [PATCH 24/74] Include block root in publish block logs (#8111) Debugging https://github.com/sigp/lighthouse/issues/8104 it would have been helpful to quickly see in the logs that a specific block was submitted into the HTTP API. Because we want to optimize the block root computation we don't include it in the logs, and just log the block slot. I believe we can take a minute performance hit to have the block root in all the logs during block publishing. Co-Authored-By: dapplion <35266934+dapplion@users.noreply.github.com> Co-Authored-By: Jimmy Chen --- beacon_node/http_api/src/publish_blocks.rs | 23 ++++++++++------------ 1 file changed, 10 insertions(+), 13 deletions(-) diff --git a/beacon_node/http_api/src/publish_blocks.rs b/beacon_node/http_api/src/publish_blocks.rs index 05a4a4b7a4a..bfe41c8706c 100644 --- a/beacon_node/http_api/src/publish_blocks.rs +++ b/beacon_node/http_api/src/publish_blocks.rs @@ -25,7 +25,7 @@ use std::sync::Arc; use std::sync::atomic::{AtomicBool, Ordering}; use std::time::Duration; use tokio::sync::mpsc::UnboundedSender; -use tracing::{Span, debug, debug_span, error, info, instrument, warn}; +use tracing::{Span, debug, debug_span, error, field, info, instrument, warn}; use tree_hash::TreeHash; use types::{ AbstractExecPayload, BeaconBlockRef, BlobSidecar, BlobsList, BlockImportSource, @@ -80,7 +80,7 @@ impl ProvenancedBlock> name = SPAN_PUBLISH_BLOCK, level = "info", skip_all, - fields(?block_root, ?validation_level, provenance = tracing::field::Empty) + fields(block_root = field::Empty, ?validation_level, block_slot = field::Empty, provenance = field::Empty) )] pub async fn publish_block>( block_root: Option, @@ -103,12 +103,16 @@ pub async fn publish_block>( } else { "builder" }; - let current_span = Span::current(); - current_span.record("provenance", provenance); let block = unverified_block.inner_block(); + let block_root = block_root.unwrap_or_else(|| block.canonical_root()); + + let current_span = Span::current(); + current_span.record("provenance", provenance); + current_span.record("block_root", field::display(block_root)); + current_span.record("block_slot", field::display(block.slot())); - debug!(slot = %block.slot(), "Signed block received in HTTP API"); + debug!("Signed block received in HTTP API"); /* actually publish a block */ let publish_block_p2p = move |block: Arc>, @@ -152,12 +156,6 @@ pub async fn publish_block>( // Gossip verify the block and blobs/data columns separately. let gossip_verified_block_result = unverified_block.into_gossip_verified_block(&chain); - let block_root = block_root.unwrap_or_else(|| { - gossip_verified_block_result.as_ref().map_or_else( - |_| block.canonical_root(), - |verified_block| verified_block.block_root, - ) - }); let should_publish_block = gossip_verified_block_result.is_ok(); if BroadcastValidation::Gossip == validation_level && should_publish_block { @@ -309,9 +307,8 @@ pub async fn publish_block>( .into_response()) } } - Err(BlockError::DuplicateImportStatusUnknown(root)) => { + Err(BlockError::DuplicateImportStatusUnknown(_)) => { debug!( - block_root = ?root, slot = %block.slot(), "Block previously seen" ); From b3df0d1985671e189ec9576c59e4df508d2b73cd Mon Sep 17 00:00:00 2001 From: kevaundray Date: Tue, 11 Nov 2025 05:23:44 +0000 Subject: [PATCH 25/74] fix: clarify `bb` vs `bl` variable names in BeaconProcessorQueue (#8315) since block and blob both start with `bl`, it was not clear how to differentiate between `blbroots_queue` and `bbroots_queue` After renaming, there also seems to be a discrepancy Co-Authored-By: Kevaundray Wedderburn --- beacon_node/beacon_processor/src/lib.rs | 56 ++++++++++++++----------- 1 file changed, 32 insertions(+), 24 deletions(-) diff --git a/beacon_node/beacon_processor/src/lib.rs b/beacon_node/beacon_processor/src/lib.rs index 28ed0cca913..1cdf3693ff2 100644 --- a/beacon_node/beacon_processor/src/lib.rs +++ b/beacon_node/beacon_processor/src/lib.rs @@ -123,10 +123,10 @@ pub struct BeaconProcessorQueueLengths { gossip_data_column_queue: usize, delayed_block_queue: usize, status_queue: usize, - bbrange_queue: usize, - bbroots_queue: usize, - blbroots_queue: usize, - blbrange_queue: usize, + block_brange_queue: usize, + block_broots_queue: usize, + blob_broots_queue: usize, + blob_brange_queue: usize, dcbroots_queue: usize, dcbrange_queue: usize, gossip_bls_to_execution_change_queue: usize, @@ -189,10 +189,10 @@ impl BeaconProcessorQueueLengths { gossip_data_column_queue: 1024, delayed_block_queue: 1024, status_queue: 1024, - bbrange_queue: 1024, - bbroots_queue: 1024, - blbroots_queue: 1024, - blbrange_queue: 1024, + block_brange_queue: 1024, + block_broots_queue: 1024, + blob_broots_queue: 1024, + blob_brange_queue: 1024, dcbroots_queue: 1024, dcbrange_queue: 1024, gossip_bls_to_execution_change_queue: 16384, @@ -876,10 +876,10 @@ impl BeaconProcessor { let mut delayed_block_queue = FifoQueue::new(queue_lengths.delayed_block_queue); let mut status_queue = FifoQueue::new(queue_lengths.status_queue); - let mut bbrange_queue = FifoQueue::new(queue_lengths.bbrange_queue); - let mut bbroots_queue = FifoQueue::new(queue_lengths.bbroots_queue); - let mut blbroots_queue = FifoQueue::new(queue_lengths.blbroots_queue); - let mut blbrange_queue = FifoQueue::new(queue_lengths.blbrange_queue); + let mut block_brange_queue = FifoQueue::new(queue_lengths.block_brange_queue); + let mut block_broots_queue = FifoQueue::new(queue_lengths.block_broots_queue); + let mut blob_broots_queue = FifoQueue::new(queue_lengths.blob_broots_queue); + let mut blob_brange_queue = FifoQueue::new(queue_lengths.blob_brange_queue); let mut dcbroots_queue = FifoQueue::new(queue_lengths.dcbroots_queue); let mut dcbrange_queue = FifoQueue::new(queue_lengths.dcbrange_queue); @@ -1190,13 +1190,13 @@ impl BeaconProcessor { // and BlocksByRoot) } else if let Some(item) = status_queue.pop() { Some(item) - } else if let Some(item) = bbrange_queue.pop() { + } else if let Some(item) = block_brange_queue.pop() { Some(item) - } else if let Some(item) = bbroots_queue.pop() { + } else if let Some(item) = block_broots_queue.pop() { Some(item) - } else if let Some(item) = blbrange_queue.pop() { + } else if let Some(item) = blob_brange_queue.pop() { Some(item) - } else if let Some(item) = blbroots_queue.pop() { + } else if let Some(item) = blob_broots_queue.pop() { Some(item) } else if let Some(item) = dcbroots_queue.pop() { Some(item) @@ -1360,9 +1360,15 @@ impl BeaconProcessor { backfill_chain_segment.push(work, work_id) } Work::Status { .. } => status_queue.push(work, work_id), - Work::BlocksByRangeRequest { .. } => bbrange_queue.push(work, work_id), - Work::BlocksByRootsRequest { .. } => bbroots_queue.push(work, work_id), - Work::BlobsByRangeRequest { .. } => blbrange_queue.push(work, work_id), + Work::BlocksByRangeRequest { .. } => { + block_brange_queue.push(work, work_id) + } + Work::BlocksByRootsRequest { .. } => { + block_broots_queue.push(work, work_id) + } + Work::BlobsByRangeRequest { .. } => { + blob_brange_queue.push(work, work_id) + } Work::LightClientBootstrapRequest { .. } => { lc_bootstrap_queue.push(work, work_id) } @@ -1384,7 +1390,9 @@ impl BeaconProcessor { Work::GossipBlsToExecutionChange { .. } => { gossip_bls_to_execution_change_queue.push(work, work_id) } - Work::BlobsByRootsRequest { .. } => blbroots_queue.push(work, work_id), + Work::BlobsByRootsRequest { .. } => { + blob_broots_queue.push(work, work_id) + } Work::DataColumnsByRootsRequest { .. } => { dcbroots_queue.push(work, work_id) } @@ -1435,10 +1443,10 @@ impl BeaconProcessor { WorkType::ChainSegment => chain_segment_queue.len(), WorkType::ChainSegmentBackfill => backfill_chain_segment.len(), WorkType::Status => status_queue.len(), - WorkType::BlocksByRangeRequest => blbrange_queue.len(), - WorkType::BlocksByRootsRequest => blbroots_queue.len(), - WorkType::BlobsByRangeRequest => bbrange_queue.len(), - WorkType::BlobsByRootsRequest => bbroots_queue.len(), + WorkType::BlocksByRangeRequest => block_brange_queue.len(), + WorkType::BlocksByRootsRequest => block_broots_queue.len(), + WorkType::BlobsByRangeRequest => blob_brange_queue.len(), + WorkType::BlobsByRootsRequest => blob_broots_queue.len(), WorkType::DataColumnsByRootsRequest => dcbroots_queue.len(), WorkType::DataColumnsByRangeRequest => dcbrange_queue.len(), WorkType::GossipBlsToExecutionChange => { From 11d1f60753222e8a281a19688c49fa2a811a0242 Mon Sep 17 00:00:00 2001 From: Mac L Date: Tue, 11 Nov 2025 10:52:46 +0400 Subject: [PATCH 26/74] Migrate the `deposit_contract` crate to `alloy` (#8139) https://github.com/sigp/lighthouse/issues/6022 Switches the `deposit_contract` crate to use the `alloy` ecosystem and removes the dependency on `ethabi` Co-Authored-By: Mac L --- Cargo.lock | 300 ++++++---------------- common/deposit_contract/Cargo.toml | 5 +- common/deposit_contract/src/lib.rs | 103 +++++--- common/validator_dir/src/validator_dir.rs | 2 +- 4 files changed, 148 insertions(+), 262 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 080221e5f28..481fe71df06 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -177,6 +177,22 @@ dependencies = [ "serde", ] +[[package]] +name = "alloy-dyn-abi" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdff496dd4e98a81f4861e66f7eaf5f2488971848bb42d9c892f871730245c8" +dependencies = [ + "alloy-json-abi", + "alloy-primitives", + "alloy-sol-type-parser", + "alloy-sol-types", + "itoa", + "serde", + "serde_json", + "winnow", +] + [[package]] name = "alloy-eip2124" version = "0.2.0" @@ -235,6 +251,18 @@ dependencies = [ "thiserror 2.0.12", ] +[[package]] +name = "alloy-json-abi" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5513d5e6bd1cba6bdcf5373470f559f320c05c8c59493b6e98912fbe6733943f" +dependencies = [ + "alloy-primitives", + "alloy-sol-type-parser", + "serde", + "serde_json", +] + [[package]] name = "alloy-network-primitives" version = "1.0.42" @@ -274,7 +302,7 @@ dependencies = [ "ruint", "rustc-hash 2.1.1", "serde", - "sha3 0.10.8", + "sha3", "tiny-keccak", ] @@ -379,14 +407,26 @@ dependencies = [ "syn-solidity", ] +[[package]] +name = "alloy-sol-type-parser" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "954d1b2533b9b2c7959652df3076954ecb1122a28cc740aa84e7b0a49f6ac0a9" +dependencies = [ + "serde", + "winnow", +] + [[package]] name = "alloy-sol-types" version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70319350969a3af119da6fb3e9bddb1bce66c9ea933600cb297c8b1850ad2a3c" dependencies = [ + "alloy-json-abi", "alloy-primitives", "alloy-sol-macro", + "serde", ] [[package]] @@ -1293,28 +1333,16 @@ dependencies = [ "radium 0.3.0", ] -[[package]] -name = "bitvec" -version = "0.20.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7774144344a4faa177370406a7ff5f1da24303817368584c6206c8303eb07848" -dependencies = [ - "funty 1.1.0", - "radium 0.6.2", - "tap", - "wyz 0.2.0", -] - [[package]] name = "bitvec" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" dependencies = [ - "funty 2.0.0", + "funty", "radium 0.7.0", "tap", - "wyz 0.5.1", + "wyz", ] [[package]] @@ -1332,7 +1360,7 @@ version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b" dependencies = [ - "block-padding 0.1.5", + "block-padding", "byte-tools", "byteorder", "generic-array 0.12.4", @@ -1344,7 +1372,6 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" dependencies = [ - "block-padding 0.2.1", "generic-array 0.14.7", ] @@ -1366,12 +1393,6 @@ dependencies = [ "byte-tools", ] -[[package]] -name = "block-padding" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d696c370c750c948ada61c69a0ee2cbbb9c50b1019ddb86d9317157a99c2cae" - [[package]] name = "bls" version = "0.2.0" @@ -1867,7 +1888,7 @@ dependencies = [ "serde", "serde_derive", "sha2 0.10.8", - "sha3 0.10.8", + "sha3", "thiserror 1.0.69", ] @@ -2475,7 +2496,9 @@ dependencies = [ name = "deposit_contract" version = "0.2.0" dependencies = [ - "ethabi 16.0.0", + "alloy-dyn-abi", + "alloy-json-abi", + "alloy-primitives", "ethereum_ssz", "hex", "reqwest 0.11.27", @@ -3029,7 +3052,7 @@ dependencies = [ "log", "rand 0.8.5", "serde", - "sha3 0.10.8", + "sha3", "zeroize", ] @@ -3130,7 +3153,7 @@ dependencies = [ "serde", "serde_json", "sha2 0.10.8", - "sha3 0.10.8", + "sha3", "thiserror 1.0.69", "uuid 0.8.2", ] @@ -3269,51 +3292,23 @@ dependencies = [ "tempfile", ] -[[package]] -name = "ethabi" -version = "16.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4c98847055d934070b90e806e12d3936b787d0a115068981c1d8dfd5dfef5a5" -dependencies = [ - "ethereum-types 0.12.1", - "hex", - "serde", - "serde_json", - "sha3 0.9.1", - "thiserror 1.0.69", - "uint 0.9.5", -] - [[package]] name = "ethabi" version = "18.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7413c5f74cc903ea37386a8965a936cbeb334bd270862fdece542c1b2dcbc898" dependencies = [ - "ethereum-types 0.14.1", + "ethereum-types", "hex", "once_cell", "regex", "serde", "serde_json", - "sha3 0.10.8", + "sha3", "thiserror 1.0.69", "uint 0.9.5", ] -[[package]] -name = "ethbloom" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfb684ac8fa8f6c5759f788862bb22ec6fe3cb392f6bfd08e3c64b603661e3f8" -dependencies = [ - "crunchy", - "fixed-hash 0.7.0", - "impl-rlp", - "impl-serde 0.3.2", - "tiny-keccak", -] - [[package]] name = "ethbloom" version = "0.13.0" @@ -3321,40 +3316,26 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c22d4b5885b6aa2fe5e8b9329fb8d232bf739e434e6b87347c63bdd00c120f60" dependencies = [ "crunchy", - "fixed-hash 0.8.0", - "impl-codec 0.6.0", + "fixed-hash", + "impl-codec", "impl-rlp", - "impl-serde 0.4.0", + "impl-serde", "scale-info", "tiny-keccak", ] -[[package]] -name = "ethereum-types" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05136f7057fe789f06e6d41d07b34e6f70d8c86e5693b60f97aaa6553553bdaf" -dependencies = [ - "ethbloom 0.11.1", - "fixed-hash 0.7.0", - "impl-rlp", - "impl-serde 0.3.2", - "primitive-types 0.10.1", - "uint 0.9.5", -] - [[package]] name = "ethereum-types" version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "02d215cbf040552efcbe99a38372fe80ab9d00268e20012b79fcd0f073edd8ee" dependencies = [ - "ethbloom 0.13.0", - "fixed-hash 0.8.0", - "impl-codec 0.6.0", + "ethbloom", + "fixed-hash", + "impl-codec", "impl-rlp", - "impl-serde 0.4.0", - "primitive-types 0.12.2", + "impl-serde", + "primitive-types", "scale-info", "uint 0.9.5", ] @@ -3439,7 +3420,7 @@ dependencies = [ "chrono", "convert_case 0.6.0", "elliptic-curve 0.12.3", - "ethabi 18.0.0", + "ethabi", "generic-array 0.14.7", "hex", "k256 0.11.6", @@ -3775,18 +3756,6 @@ dependencies = [ "windows-acl", ] -[[package]] -name = "fixed-hash" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfcf0ed7fe52a17a03854ec54a9f76d6d84508d1c0e66bc1793301c73fc8493c" -dependencies = [ - "byteorder", - "rand 0.8.5", - "rustc-hex", - "static_assertions", -] - [[package]] name = "fixed-hash" version = "0.8.0" @@ -3894,12 +3863,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "funty" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7" - [[package]] name = "funty" version = "2.0.0" @@ -4987,22 +4950,13 @@ dependencies = [ "xmltree", ] -[[package]] -name = "impl-codec" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "161ebdfec3c8e3b52bf61c4f3550a1eea4f9579d10dc1b936f3171ebdcd6c443" -dependencies = [ - "parity-scale-codec 2.3.1", -] - [[package]] name = "impl-codec" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba6a270039626615617f3f36d15fc827041df3b78c439da2cadfa47455a77f2f" dependencies = [ - "parity-scale-codec 3.7.4", + "parity-scale-codec", ] [[package]] @@ -5014,15 +4968,6 @@ dependencies = [ "rlp", ] -[[package]] -name = "impl-serde" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4551f042f3438e64dbd6226b20527fc84a6e1fe65688b58746a2f53623f25f5c" -dependencies = [ - "serde", -] - [[package]] name = "impl-serde" version = "0.4.0" @@ -5262,7 +5207,7 @@ dependencies = [ "ecdsa 0.14.8", "elliptic-curve 0.12.3", "sha2 0.10.8", - "sha3 0.10.8", + "sha3", ] [[package]] @@ -5305,7 +5250,7 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4b286e6b663fb926e1eeb68528e69cb70ed46c6d65871a21b2215ae8154c6d3c" dependencies = [ - "primitive-types 0.12.2", + "primitive-types", "tiny-keccak", ] @@ -6910,7 +6855,7 @@ dependencies = [ "arrayvec", "auto_impl 1.2.1", "bytes", - "ethereum-types 0.14.1", + "ethereum-types", "open-fastrlp-derive", ] @@ -7094,20 +7039,6 @@ dependencies = [ "group 0.13.0", ] -[[package]] -name = "parity-scale-codec" -version = "2.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "373b1a4c1338d9cd3d1fa53b3a11bdab5ab6bd80a20f7f7becd76953ae2be909" -dependencies = [ - "arrayvec", - "bitvec 0.20.4", - "byte-slice-cast", - "impl-trait-for-tuples", - "parity-scale-codec-derive 2.3.1", - "serde", -] - [[package]] name = "parity-scale-codec" version = "3.7.4" @@ -7119,30 +7050,18 @@ dependencies = [ "byte-slice-cast", "const_format", "impl-trait-for-tuples", - "parity-scale-codec-derive 3.7.4", + "parity-scale-codec-derive", "rustversion", "serde", ] -[[package]] -name = "parity-scale-codec-derive" -version = "2.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1557010476e0595c9b568d16dcfb81b93cdeb157612726f5170d31aa707bed27" -dependencies = [ - "proc-macro-crate 1.3.1", - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "parity-scale-codec-derive" version = "3.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "581c837bb6b9541ce7faa9377c20616e4fb7650f6b0f68bc93c827ee504fb7b3" dependencies = [ - "proc-macro-crate 3.3.0", + "proc-macro-crate", "proc-macro2", "quote", "syn 2.0.100", @@ -7472,50 +7391,27 @@ dependencies = [ "syn 2.0.100", ] -[[package]] -name = "primitive-types" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05e4722c697a58a99d5d06a08c30821d7c082a4632198de1eaa5a6c22ef42373" -dependencies = [ - "fixed-hash 0.7.0", - "impl-codec 0.5.1", - "impl-rlp", - "impl-serde 0.3.2", - "uint 0.9.5", -] - [[package]] name = "primitive-types" version = "0.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b34d9fd68ae0b74a41b21c03c2f62847aa0ffea044eee893b4c140b37e244e2" dependencies = [ - "fixed-hash 0.8.0", - "impl-codec 0.6.0", + "fixed-hash", + "impl-codec", "impl-rlp", - "impl-serde 0.4.0", + "impl-serde", "scale-info", "uint 0.9.5", ] -[[package]] -name = "proc-macro-crate" -version = "1.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" -dependencies = [ - "once_cell", - "toml_edit 0.19.15", -] - [[package]] name = "proc-macro-crate" version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "edce586971a4dfaa28950c6f18ed55e0406c1ab88bbce2c6f6293a7aaba73d35" dependencies = [ - "toml_edit 0.22.24", + "toml_edit", ] [[package]] @@ -7858,12 +7754,6 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "def50a86306165861203e7f84ecffbbdfdea79f0e51039b33de1e952358c47ac" -[[package]] -name = "radium" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "643f8f41a8ebc4c5dc4515c82bb8abd397b527fc20fd681b7c011c2aee5d44fb" - [[package]] name = "radium" version = "0.7.0" @@ -8279,8 +8169,8 @@ dependencies = [ "num-bigint", "num-integer", "num-traits", - "parity-scale-codec 3.7.4", - "primitive-types 0.12.2", + "parity-scale-codec", + "primitive-types", "proptest", "rand 0.8.5", "rand 0.9.0", @@ -8608,7 +8498,7 @@ checksum = "346a3b32eba2640d17a9cb5927056b08f3de90f65b72fe09402c2ad07d684d0b" dependencies = [ "cfg-if", "derive_more 1.0.0", - "parity-scale-codec 3.7.4", + "parity-scale-codec", "scale-info-derive", ] @@ -8618,7 +8508,7 @@ version = "2.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c6630024bf739e2179b91fb424b28898baf819414262c5d376677dbff1fe7ebf" dependencies = [ - "proc-macro-crate 3.3.0", + "proc-macro-crate", "proc-macro2", "quote", "syn 2.0.100", @@ -9005,18 +8895,6 @@ dependencies = [ "digest 0.10.7", ] -[[package]] -name = "sha3" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f81199417d4e5de3f04b1e871023acea7389672c4135918f05aa9cbf2f2fa809" -dependencies = [ - "block-buffer 0.9.0", - "digest 0.9.0", - "keccak", - "opaque-debug 0.3.1", -] - [[package]] name = "sha3" version = "0.10.8" @@ -9956,17 +9834,6 @@ version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" -[[package]] -name = "toml_edit" -version = "0.19.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" -dependencies = [ - "indexmap 2.8.0", - "toml_datetime", - "winnow 0.5.40", -] - [[package]] name = "toml_edit" version = "0.22.24" @@ -9975,7 +9842,7 @@ checksum = "17b4795ff5edd201c7cd6dca065ae59972ce77d1b80fa0a84d94950ece7d1474" dependencies = [ "indexmap 2.8.0", "toml_datetime", - "winnow 0.7.3", + "winnow", ] [[package]] @@ -11424,15 +11291,6 @@ version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" -[[package]] -name = "winnow" -version = "0.5.40" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" -dependencies = [ - "memchr", -] - [[package]] name = "winnow" version = "0.7.3" @@ -11500,12 +11358,6 @@ dependencies = [ "web-sys", ] -[[package]] -name = "wyz" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85e60b0d1b5f99db2556934e21937020776a5d31520bf169e851ac44e6420214" - [[package]] name = "wyz" version = "0.5.1" diff --git a/common/deposit_contract/Cargo.toml b/common/deposit_contract/Cargo.toml index 767f67b853f..dfaad43719d 100644 --- a/common/deposit_contract/Cargo.toml +++ b/common/deposit_contract/Cargo.toml @@ -7,8 +7,11 @@ edition = { workspace = true } build = "build.rs" [dependencies] -ethabi = "16.0.0" +alloy-dyn-abi = "1.4" +alloy-json-abi = "1.4" +alloy-primitives = { workspace = true } ethereum_ssz = { workspace = true } +serde_json = { workspace = true } tree_hash = { workspace = true } types = { workspace = true } diff --git a/common/deposit_contract/src/lib.rs b/common/deposit_contract/src/lib.rs index 7d58240f11b..12c3bdaa894 100644 --- a/common/deposit_contract/src/lib.rs +++ b/common/deposit_contract/src/lib.rs @@ -1,23 +1,44 @@ -use ethabi::{Contract, Token}; +use alloy_dyn_abi::{DynSolValue, JsonAbiExt}; +use alloy_json_abi::JsonAbi; +use alloy_primitives::FixedBytes; use ssz::{Decode, DecodeError as SszDecodeError, Encode}; use tree_hash::TreeHash; use types::{DepositData, Hash256, PublicKeyBytes, SignatureBytes}; -pub use ethabi::Error; - #[derive(Debug)] -pub enum DecodeError { - EthabiError(ethabi::Error), +pub enum Error { + AlloyCoreError(alloy_json_abi::Error), + SerdeJsonError(serde_json::Error), + DynAbiError(alloy_dyn_abi::Error), SszDecodeError(SszDecodeError), + FunctionNotFound, MissingField, UnableToGetBytes, MissingToken, InadequateBytes, } -impl From for DecodeError { - fn from(e: ethabi::Error) -> DecodeError { - DecodeError::EthabiError(e) +impl From for Error { + fn from(e: alloy_json_abi::Error) -> Error { + Error::AlloyCoreError(e) + } +} + +impl From for Error { + fn from(e: serde_json::Error) -> Error { + Error::SerdeJsonError(e) + } +} + +impl From for Error { + fn from(e: alloy_dyn_abi::Error) -> Error { + Error::DynAbiError(e) + } +} + +impl From for Error { + fn from(e: SszDecodeError) -> Error { + Error::SszDecodeError(e) } } @@ -36,47 +57,57 @@ pub mod testnet { pub fn encode_eth1_tx_data(deposit_data: &DepositData) -> Result, Error> { let params = vec![ - Token::Bytes(deposit_data.pubkey.as_ssz_bytes()), - Token::Bytes(deposit_data.withdrawal_credentials.as_ssz_bytes()), - Token::Bytes(deposit_data.signature.as_ssz_bytes()), - Token::FixedBytes(deposit_data.tree_hash_root().as_ssz_bytes()), + DynSolValue::Bytes(deposit_data.pubkey.as_ssz_bytes()), + DynSolValue::Bytes(deposit_data.withdrawal_credentials.as_ssz_bytes()), + DynSolValue::Bytes(deposit_data.signature.as_ssz_bytes()), + DynSolValue::FixedBytes( + FixedBytes::<32>::from_slice(&deposit_data.tree_hash_root().as_ssz_bytes()), + 32, + ), ]; // Here we make an assumption that the `crate::testnet::ABI` has a superset of the features of // the crate::ABI`. - let abi = Contract::load(ABI)?; - let function = abi.function("deposit")?; - function.encode_input(¶ms) + let abi: JsonAbi = serde_json::from_slice(ABI)?; + let function = abi + .function("deposit") + .and_then(|functions| functions.first()) + .ok_or(Error::FunctionNotFound)?; + + function + .abi_encode_input(¶ms) + .map_err(Error::DynAbiError) } -pub fn decode_eth1_tx_data( - bytes: &[u8], - amount: u64, -) -> Result<(DepositData, Hash256), DecodeError> { - let abi = Contract::load(ABI)?; - let function = abi.function("deposit")?; - let mut tokens = function.decode_input(bytes.get(4..).ok_or(DecodeError::InadequateBytes)?)?; +pub fn decode_eth1_tx_data(bytes: &[u8], amount: u64) -> Result<(DepositData, Hash256), Error> { + let abi: JsonAbi = serde_json::from_slice(ABI)?; + let function = abi + .function("deposit") + .and_then(|functions| functions.first()) + .ok_or(Error::FunctionNotFound)?; + + let input_data = bytes.get(4..).ok_or(Error::InadequateBytes)?; + let mut tokens = function.abi_decode_input(input_data)?; macro_rules! decode_token { - ($type: ty, $to_fn: ident) => { - <$type>::from_ssz_bytes( - &tokens - .pop() - .ok_or_else(|| DecodeError::MissingToken)? - .$to_fn() - .ok_or_else(|| DecodeError::UnableToGetBytes)?, - ) - .map_err(DecodeError::SszDecodeError)? - }; + ($type: ty) => {{ + let token = tokens.pop().ok_or(Error::MissingToken)?; + let bytes_data = match token { + DynSolValue::Bytes(b) => b, + DynSolValue::FixedBytes(b, _) => b.to_vec(), + _ => return Err(Error::UnableToGetBytes), + }; + <$type>::from_ssz_bytes(&bytes_data)? + }}; } - let root = decode_token!(Hash256, into_fixed_bytes); + let root = decode_token!(Hash256); let deposit_data = DepositData { amount, - signature: decode_token!(SignatureBytes, into_bytes), - withdrawal_credentials: decode_token!(Hash256, into_bytes), - pubkey: decode_token!(PublicKeyBytes, into_bytes), + signature: decode_token!(SignatureBytes), + withdrawal_credentials: decode_token!(Hash256), + pubkey: decode_token!(PublicKeyBytes), }; Ok((deposit_data, root)) diff --git a/common/validator_dir/src/validator_dir.rs b/common/validator_dir/src/validator_dir.rs index 352ede708ad..8b50ea66876 100644 --- a/common/validator_dir/src/validator_dir.rs +++ b/common/validator_dir/src/validator_dir.rs @@ -32,7 +32,7 @@ pub enum Error { UnableToReadDepositAmount(io::Error), UnableToParseDepositAmount(std::num::ParseIntError), DepositAmountIsNotUtf8(std::string::FromUtf8Error), - UnableToParseDepositData(deposit_contract::DecodeError), + UnableToParseDepositData(deposit_contract::Error), Eth1TxHashExists(PathBuf), UnableToWriteEth1TxHash(io::Error), /// The deposit root in the deposit data file does not match the one generated locally. This is From 53e73fa37673c70b5beb572281fdd5d21d4e40cc Mon Sep 17 00:00:00 2001 From: Lion - dapplion <35266934+dapplion@users.noreply.github.com> Date: Wed, 12 Nov 2025 00:42:17 -0300 Subject: [PATCH 27/74] Remove duplicate state in ProtoArray (#8324) Part of a fork-choice tech debt clean-up https://github.com/sigp/lighthouse/issues/8325 https://github.com/sigp/lighthouse/issues/7089 (non-finalized checkpoint sync) changes the meaning of the checkpoints inside fork-choice. It turns out that we persist the justified and finalized checkpoints **twice** in fork-choice 1. Inside the fork-choice store 2. Inside the proto-array There's no reason for 2. except for making the function signature of some methods smallers. It's not consistent with the rest of the crate, because in some functions we pass the external variable of time (current_slot) via args, but then read the finalized checkpoint from the internal state. Passing both variables as args makes fork-choice easier to reason about at the cost of a few extra lines. Remove the unnecessary state (`justified_checkpoint`, `finalized_checkpoint`) inside `ProtoArray`, to make it easier to reason about. Co-Authored-By: dapplion <35266934+dapplion@users.noreply.github.com> Co-Authored-By: Michael Sproul --- beacon_node/beacon_chain/src/beacon_chain.rs | 6 +- .../src/schema_change/migration_schema_v23.rs | 2 +- beacon_node/http_api/src/lib.rs | 4 +- beacon_node/http_api/tests/tests.rs | 4 +- consensus/fork_choice/src/fork_choice.rs | 10 +- .../src/fork_choice_test_definition.rs | 15 ++- consensus/proto_array/src/proto_array.rs | 120 +++++++++++++----- .../src/proto_array_fork_choice.rs | 105 +++++++++++---- consensus/proto_array/src/ssz_container.rs | 20 +-- 9 files changed, 204 insertions(+), 82 deletions(-) diff --git a/beacon_node/beacon_chain/src/beacon_chain.rs b/beacon_node/beacon_chain/src/beacon_chain.rs index 5ffdf951ac1..494346e7ff2 100644 --- a/beacon_node/beacon_chain/src/beacon_chain.rs +++ b/beacon_node/beacon_chain/src/beacon_chain.rs @@ -1412,10 +1412,10 @@ impl BeaconChain { /// /// Returns `(block_root, block_slot)`. pub fn heads(&self) -> Vec<(Hash256, Slot)> { - self.canonical_head - .fork_choice_read_lock() + let fork_choice = self.canonical_head.fork_choice_read_lock(); + fork_choice .proto_array() - .heads_descended_from_finalization::() + .heads_descended_from_finalization::(fork_choice.finalized_checkpoint()) .iter() .map(|node| (node.root, node.slot)) .collect() diff --git a/beacon_node/beacon_chain/src/schema_change/migration_schema_v23.rs b/beacon_node/beacon_chain/src/schema_change/migration_schema_v23.rs index e8bd526e19f..e238e1efb6c 100644 --- a/beacon_node/beacon_chain/src/schema_change/migration_schema_v23.rs +++ b/beacon_node/beacon_chain/src/schema_change/migration_schema_v23.rs @@ -122,7 +122,7 @@ pub fn downgrade_from_v23( let heads = fork_choice .proto_array() - .heads_descended_from_finalization::(); + .heads_descended_from_finalization::(fork_choice.finalized_checkpoint()); let head_roots = heads.iter().map(|node| node.root).collect(); let head_slots = heads.iter().map(|node| node.slot).collect(); diff --git a/beacon_node/http_api/src/lib.rs b/beacon_node/http_api/src/lib.rs index 9026792b911..e0fb39c42cf 100644 --- a/beacon_node/http_api/src/lib.rs +++ b/beacon_node/http_api/src/lib.rs @@ -3037,8 +3037,8 @@ pub fn serve( }) .collect::>(); Ok(ForkChoice { - justified_checkpoint: proto_array.justified_checkpoint, - finalized_checkpoint: proto_array.finalized_checkpoint, + justified_checkpoint: beacon_fork_choice.justified_checkpoint(), + finalized_checkpoint: beacon_fork_choice.finalized_checkpoint(), fork_choice_nodes, }) }) diff --git a/beacon_node/http_api/tests/tests.rs b/beacon_node/http_api/tests/tests.rs index 6fb5a8ed8aa..b3486da5ad2 100644 --- a/beacon_node/http_api/tests/tests.rs +++ b/beacon_node/http_api/tests/tests.rs @@ -3057,11 +3057,11 @@ impl ApiTester { assert_eq!( result.justified_checkpoint, - expected_proto_array.justified_checkpoint + beacon_fork_choice.justified_checkpoint() ); assert_eq!( result.finalized_checkpoint, - expected_proto_array.finalized_checkpoint + beacon_fork_choice.finalized_checkpoint() ); let expected_fork_choice_nodes: Vec = expected_proto_array diff --git a/consensus/fork_choice/src/fork_choice.rs b/consensus/fork_choice/src/fork_choice.rs index fe1f5fba9e4..6565e7cdaf6 100644 --- a/consensus/fork_choice/src/fork_choice.rs +++ b/consensus/fork_choice/src/fork_choice.rs @@ -627,7 +627,7 @@ where op: &InvalidationOperation, ) -> Result<(), Error> { self.proto_array - .process_execution_payload_invalidation::(op) + .process_execution_payload_invalidation::(op, self.finalized_checkpoint()) .map_err(Error::FailedToProcessInvalidExecutionPayload) } @@ -908,6 +908,8 @@ where unrealized_finalized_checkpoint: Some(unrealized_finalized_checkpoint), }, current_slot, + self.justified_checkpoint(), + self.finalized_checkpoint(), )?; Ok(()) @@ -1288,7 +1290,7 @@ where /// Return `true` if `block_root` is equal to the finalized checkpoint, or a known descendant of it. pub fn is_finalized_checkpoint_or_descendant(&self, block_root: Hash256) -> bool { self.proto_array - .is_finalized_checkpoint_or_descendant::(block_root) + .is_finalized_checkpoint_or_descendant::(block_root, self.finalized_checkpoint()) } pub fn is_descendant(&self, ancestor_root: Hash256, descendant_root: Hash256) -> bool { @@ -1508,7 +1510,9 @@ where /// be instantiated again later. pub fn to_persisted(&self) -> PersistedForkChoice { PersistedForkChoice { - proto_array: self.proto_array().as_ssz_container(), + proto_array: self + .proto_array() + .as_ssz_container(self.justified_checkpoint(), self.finalized_checkpoint()), queued_attestations: self.queued_attestations().to_vec(), } } diff --git a/consensus/proto_array/src/fork_choice_test_definition.rs b/consensus/proto_array/src/fork_choice_test_definition.rs index 20987dff26d..43a7e3b77fe 100644 --- a/consensus/proto_array/src/fork_choice_test_definition.rs +++ b/consensus/proto_array/src/fork_choice_test_definition.rs @@ -212,7 +212,12 @@ impl ForkChoiceTestDefinition { unrealized_finalized_checkpoint: None, }; fork_choice - .process_block::(block, slot) + .process_block::( + block, + slot, + self.justified_checkpoint, + self.finalized_checkpoint, + ) .unwrap_or_else(|e| { panic!( "process_block op at index {} returned error: {:?}", @@ -272,7 +277,10 @@ impl ForkChoiceTestDefinition { } }; fork_choice - .process_execution_payload_invalidation::(&op) + .process_execution_payload_invalidation::( + &op, + self.finalized_checkpoint, + ) .unwrap() } Operation::AssertWeight { block_root, weight } => assert_eq!( @@ -305,7 +313,8 @@ fn get_checkpoint(i: u64) -> Checkpoint { } fn check_bytes_round_trip(original: &ProtoArrayForkChoice) { - let bytes = original.as_bytes(); + // The checkpoint are ignored `ProtoArrayForkChoice::from_bytes` so any value is ok + let bytes = original.as_bytes(Checkpoint::default(), Checkpoint::default()); let decoded = ProtoArrayForkChoice::from_bytes(&bytes, original.balances.clone()) .expect("fork choice should decode from bytes"); assert!( diff --git a/consensus/proto_array/src/proto_array.rs b/consensus/proto_array/src/proto_array.rs index 18af2dfc24c..1d78ce9f443 100644 --- a/consensus/proto_array/src/proto_array.rs +++ b/consensus/proto_array/src/proto_array.rs @@ -130,8 +130,6 @@ pub struct ProtoArray { /// Do not attempt to prune the tree unless it has at least this many nodes. Small prunes /// simply waste time. pub prune_threshold: usize, - pub justified_checkpoint: Checkpoint, - pub finalized_checkpoint: Checkpoint, pub nodes: Vec, pub indices: HashMap, pub previous_proposer_boost: ProposerBoost, @@ -155,8 +153,8 @@ impl ProtoArray { pub fn apply_score_changes( &mut self, mut deltas: Vec, - justified_checkpoint: Checkpoint, - finalized_checkpoint: Checkpoint, + best_justified_checkpoint: Checkpoint, + best_finalized_checkpoint: Checkpoint, new_justified_balances: &JustifiedBalances, proposer_boost_root: Hash256, current_slot: Slot, @@ -169,13 +167,6 @@ impl ProtoArray { }); } - if justified_checkpoint != self.justified_checkpoint - || finalized_checkpoint != self.finalized_checkpoint - { - self.justified_checkpoint = justified_checkpoint; - self.finalized_checkpoint = finalized_checkpoint; - } - // Default the proposer boost score to zero. let mut proposer_score = 0; @@ -296,6 +287,8 @@ impl ProtoArray { parent_index, node_index, current_slot, + best_justified_checkpoint, + best_finalized_checkpoint, )?; } } @@ -306,7 +299,13 @@ impl ProtoArray { /// Register a block with the fork choice. /// /// It is only sane to supply a `None` parent for the genesis block. - pub fn on_block(&mut self, block: Block, current_slot: Slot) -> Result<(), Error> { + pub fn on_block( + &mut self, + block: Block, + current_slot: Slot, + best_justified_checkpoint: Checkpoint, + best_finalized_checkpoint: Checkpoint, + ) -> Result<(), Error> { // If the block is already known, simply ignore it. if self.indices.contains_key(&block.root) { return Ok(()); @@ -357,6 +356,8 @@ impl ProtoArray { parent_index, node_index, current_slot, + best_justified_checkpoint, + best_finalized_checkpoint, )?; if matches!(block.execution_status, ExecutionStatus::Valid(_)) { @@ -439,6 +440,7 @@ impl ProtoArray { pub fn propagate_execution_payload_invalidation( &mut self, op: &InvalidationOperation, + best_finalized_checkpoint: Checkpoint, ) -> Result<(), Error> { let mut invalidated_indices: HashSet = <_>::default(); let head_block_root = op.block_root(); @@ -467,7 +469,10 @@ impl ProtoArray { let latest_valid_ancestor_is_descendant = latest_valid_ancestor_root.is_some_and(|ancestor_root| { self.is_descendant(ancestor_root, head_block_root) - && self.is_finalized_checkpoint_or_descendant::(ancestor_root) + && self.is_finalized_checkpoint_or_descendant::( + ancestor_root, + best_finalized_checkpoint, + ) }); // Collect all *ancestors* which were declared invalid since they reside between the @@ -630,6 +635,8 @@ impl ProtoArray { &self, justified_root: &Hash256, current_slot: Slot, + best_justified_checkpoint: Checkpoint, + best_finalized_checkpoint: Checkpoint, ) -> Result { let justified_index = self .indices @@ -663,12 +670,17 @@ impl ProtoArray { .ok_or(Error::InvalidBestDescendant(best_descendant_index))?; // Perform a sanity check that the node is indeed valid to be the head. - if !self.node_is_viable_for_head::(best_node, current_slot) { + if !self.node_is_viable_for_head::( + best_node, + current_slot, + best_justified_checkpoint, + best_finalized_checkpoint, + ) { return Err(Error::InvalidBestNode(Box::new(InvalidBestNodeInfo { current_slot, start_root: *justified_root, - justified_checkpoint: self.justified_checkpoint, - finalized_checkpoint: self.finalized_checkpoint, + justified_checkpoint: best_justified_checkpoint, + finalized_checkpoint: best_finalized_checkpoint, head_root: best_node.root, head_justified_checkpoint: best_node.justified_checkpoint, head_finalized_checkpoint: best_node.finalized_checkpoint, @@ -765,6 +777,8 @@ impl ProtoArray { parent_index: usize, child_index: usize, current_slot: Slot, + best_justified_checkpoint: Checkpoint, + best_finalized_checkpoint: Checkpoint, ) -> Result<(), Error> { let child = self .nodes @@ -776,8 +790,12 @@ impl ProtoArray { .get(parent_index) .ok_or(Error::InvalidNodeIndex(parent_index))?; - let child_leads_to_viable_head = - self.node_leads_to_viable_head::(child, current_slot)?; + let child_leads_to_viable_head = self.node_leads_to_viable_head::( + child, + current_slot, + best_justified_checkpoint, + best_finalized_checkpoint, + )?; // These three variables are aliases to the three options that we may set the // `parent.best_child` and `parent.best_descendant` to. @@ -806,8 +824,12 @@ impl ProtoArray { .get(best_child_index) .ok_or(Error::InvalidBestDescendant(best_child_index))?; - let best_child_leads_to_viable_head = - self.node_leads_to_viable_head::(best_child, current_slot)?; + let best_child_leads_to_viable_head = self.node_leads_to_viable_head::( + best_child, + current_slot, + best_justified_checkpoint, + best_finalized_checkpoint, + )?; if child_leads_to_viable_head && !best_child_leads_to_viable_head { // The child leads to a viable head, but the current best-child doesn't. @@ -856,6 +878,8 @@ impl ProtoArray { &self, node: &ProtoNode, current_slot: Slot, + best_justified_checkpoint: Checkpoint, + best_finalized_checkpoint: Checkpoint, ) -> Result { let best_descendant_is_viable_for_head = if let Some(best_descendant_index) = node.best_descendant { @@ -864,13 +888,23 @@ impl ProtoArray { .get(best_descendant_index) .ok_or(Error::InvalidBestDescendant(best_descendant_index))?; - self.node_is_viable_for_head::(best_descendant, current_slot) + self.node_is_viable_for_head::( + best_descendant, + current_slot, + best_justified_checkpoint, + best_finalized_checkpoint, + ) } else { false }; Ok(best_descendant_is_viable_for_head - || self.node_is_viable_for_head::(node, current_slot)) + || self.node_is_viable_for_head::( + node, + current_slot, + best_justified_checkpoint, + best_finalized_checkpoint, + )) } /// This is the equivalent to the `filter_block_tree` function in the eth2 spec: @@ -879,7 +913,13 @@ impl ProtoArray { /// /// Any node that has a different finalized or justified epoch should not be viable for the /// head. - fn node_is_viable_for_head(&self, node: &ProtoNode, current_slot: Slot) -> bool { + fn node_is_viable_for_head( + &self, + node: &ProtoNode, + current_slot: Slot, + best_justified_checkpoint: Checkpoint, + best_finalized_checkpoint: Checkpoint, + ) -> bool { if node.execution_status.is_invalid() { return false; } @@ -901,12 +941,13 @@ impl ProtoArray { node_justified_checkpoint }; - let correct_justified = self.justified_checkpoint.epoch == genesis_epoch - || voting_source.epoch == self.justified_checkpoint.epoch + let correct_justified = best_justified_checkpoint.epoch == genesis_epoch + || voting_source.epoch == best_justified_checkpoint.epoch || voting_source.epoch + 2 >= current_epoch; - let correct_finalized = self.finalized_checkpoint.epoch == genesis_epoch - || self.is_finalized_checkpoint_or_descendant::(node.root); + let correct_finalized = best_finalized_checkpoint.epoch == genesis_epoch + || self + .is_finalized_checkpoint_or_descendant::(node.root, best_finalized_checkpoint); correct_justified && correct_finalized } @@ -961,10 +1002,13 @@ impl ProtoArray { /// /// Notably, this function is checking ancestory of the finalized /// *checkpoint* not the finalized *block*. - pub fn is_finalized_checkpoint_or_descendant(&self, root: Hash256) -> bool { - let finalized_root = self.finalized_checkpoint.root; - let finalized_slot = self - .finalized_checkpoint + pub fn is_finalized_checkpoint_or_descendant( + &self, + root: Hash256, + best_finalized_checkpoint: Checkpoint, + ) -> bool { + let finalized_root = best_finalized_checkpoint.root; + let finalized_slot = best_finalized_checkpoint .epoch .start_slot(E::slots_per_epoch()); @@ -987,7 +1031,7 @@ impl ProtoArray { // If the conditions don't match for this node then they're unlikely to // start matching for its ancestors. for checkpoint in &[node.finalized_checkpoint, node.justified_checkpoint] { - if checkpoint == &self.finalized_checkpoint { + if checkpoint == &best_finalized_checkpoint { return true; } } @@ -996,7 +1040,7 @@ impl ProtoArray { node.unrealized_finalized_checkpoint, node.unrealized_justified_checkpoint, ] { - if checkpoint.is_some_and(|cp| cp == self.finalized_checkpoint) { + if checkpoint.is_some_and(|cp| cp == best_finalized_checkpoint) { return true; } } @@ -1044,12 +1088,18 @@ impl ProtoArray { /// For informational purposes like the beacon HTTP API, we use this as the list of known heads, /// even though some of them might not be viable. We do this to maintain consistency between the /// definition of "head" used by pruning (which does not consider viability) and fork choice. - pub fn heads_descended_from_finalization(&self) -> Vec<&ProtoNode> { + pub fn heads_descended_from_finalization( + &self, + best_finalized_checkpoint: Checkpoint, + ) -> Vec<&ProtoNode> { self.nodes .iter() .filter(|node| { node.best_child.is_none() - && self.is_finalized_checkpoint_or_descendant::(node.root) + && self.is_finalized_checkpoint_or_descendant::( + node.root, + best_finalized_checkpoint, + ) }) .collect() } diff --git a/consensus/proto_array/src/proto_array_fork_choice.rs b/consensus/proto_array/src/proto_array_fork_choice.rs index dea853d245d..137471ce36d 100644 --- a/consensus/proto_array/src/proto_array_fork_choice.rs +++ b/consensus/proto_array/src/proto_array_fork_choice.rs @@ -424,8 +424,6 @@ impl ProtoArrayForkChoice { ) -> Result { let mut proto_array = ProtoArray { prune_threshold: DEFAULT_PRUNE_THRESHOLD, - justified_checkpoint, - finalized_checkpoint, nodes: Vec::with_capacity(1), indices: HashMap::with_capacity(1), previous_proposer_boost: ProposerBoost::default(), @@ -449,7 +447,12 @@ impl ProtoArrayForkChoice { }; proto_array - .on_block::(block, current_slot) + .on_block::( + block, + current_slot, + justified_checkpoint, + finalized_checkpoint, + ) .map_err(|e| format!("Failed to add finalized block to proto_array: {:?}", e))?; Ok(Self { @@ -473,9 +476,10 @@ impl ProtoArrayForkChoice { pub fn process_execution_payload_invalidation( &mut self, op: &InvalidationOperation, + finalized_checkpoint: Checkpoint, ) -> Result<(), String> { self.proto_array - .propagate_execution_payload_invalidation::(op) + .propagate_execution_payload_invalidation::(op, finalized_checkpoint) .map_err(|e| format!("Failed to process invalid payload: {:?}", e)) } @@ -499,13 +503,20 @@ impl ProtoArrayForkChoice { &mut self, block: Block, current_slot: Slot, + justified_checkpoint: Checkpoint, + finalized_checkpoint: Checkpoint, ) -> Result<(), String> { if block.parent_root.is_none() { return Err("Missing parent root".to_string()); } self.proto_array - .on_block::(block, current_slot) + .on_block::( + block, + current_slot, + justified_checkpoint, + finalized_checkpoint, + ) .map_err(|e| format!("process_block_error: {:?}", e)) } @@ -547,7 +558,12 @@ impl ProtoArrayForkChoice { *old_balances = new_balances.clone(); self.proto_array - .find_head::(&justified_checkpoint.root, current_slot) + .find_head::( + &justified_checkpoint.root, + current_slot, + justified_checkpoint, + finalized_checkpoint, + ) .map_err(|e| format!("find_head failed: {:?}", e)) } @@ -884,9 +900,10 @@ impl ProtoArrayForkChoice { pub fn is_finalized_checkpoint_or_descendant( &self, descendant_root: Hash256, + best_finalized_checkpoint: Checkpoint, ) -> bool { self.proto_array - .is_finalized_checkpoint_or_descendant::(descendant_root) + .is_finalized_checkpoint_or_descendant::(descendant_root, best_finalized_checkpoint) } pub fn latest_message(&self, validator_index: usize) -> Option<(Hash256, Epoch)> { @@ -916,12 +933,21 @@ impl ProtoArrayForkChoice { self.proto_array.iter_block_roots(block_root) } - pub fn as_ssz_container(&self) -> SszContainer { - SszContainer::from(self) + pub fn as_ssz_container( + &self, + justified_checkpoint: Checkpoint, + finalized_checkpoint: Checkpoint, + ) -> SszContainer { + SszContainer::from_proto_array(self, justified_checkpoint, finalized_checkpoint) } - pub fn as_bytes(&self) -> Vec { - SszContainer::from(self).as_ssz_bytes() + pub fn as_bytes( + &self, + justified_checkpoint: Checkpoint, + finalized_checkpoint: Checkpoint, + ) -> Vec { + self.as_ssz_container(justified_checkpoint, finalized_checkpoint) + .as_ssz_bytes() } pub fn from_bytes(bytes: &[u8], balances: JustifiedBalances) -> Result { @@ -954,8 +980,12 @@ impl ProtoArrayForkChoice { } /// Returns all nodes that have zero children and are descended from the finalized checkpoint. - pub fn heads_descended_from_finalization(&self) -> Vec<&ProtoNode> { - self.proto_array.heads_descended_from_finalization::() + pub fn heads_descended_from_finalization( + &self, + best_finalized_checkpoint: Checkpoint, + ) -> Vec<&ProtoNode> { + self.proto_array + .heads_descended_from_finalization::(best_finalized_checkpoint) } } @@ -1125,6 +1155,8 @@ mod test_compute_deltas { unrealized_finalized_checkpoint: Some(genesis_checkpoint), }, genesis_slot + 1, + genesis_checkpoint, + genesis_checkpoint, ) .unwrap(); @@ -1148,6 +1180,8 @@ mod test_compute_deltas { unrealized_finalized_checkpoint: None, }, genesis_slot + 1, + genesis_checkpoint, + genesis_checkpoint, ) .unwrap(); @@ -1161,10 +1195,24 @@ mod test_compute_deltas { assert!(!fc.is_descendant(finalized_root, not_finalized_desc)); assert!(!fc.is_descendant(finalized_root, unknown)); - assert!(fc.is_finalized_checkpoint_or_descendant::(finalized_root)); - assert!(fc.is_finalized_checkpoint_or_descendant::(finalized_desc)); - assert!(!fc.is_finalized_checkpoint_or_descendant::(not_finalized_desc)); - assert!(!fc.is_finalized_checkpoint_or_descendant::(unknown)); + assert!(fc.is_finalized_checkpoint_or_descendant::( + finalized_root, + genesis_checkpoint + )); + assert!(fc.is_finalized_checkpoint_or_descendant::( + finalized_desc, + genesis_checkpoint + )); + assert!(!fc.is_finalized_checkpoint_or_descendant::( + not_finalized_desc, + genesis_checkpoint + )); + assert!( + !fc.is_finalized_checkpoint_or_descendant::( + unknown, + genesis_checkpoint + ) + ); assert!(!fc.is_descendant(finalized_desc, not_finalized_desc)); assert!(fc.is_descendant(finalized_desc, finalized_desc)); @@ -1260,6 +1308,8 @@ mod test_compute_deltas { unrealized_finalized_checkpoint: Some(genesis_checkpoint), }, Slot::from(block.slot), + genesis_checkpoint, + genesis_checkpoint, ) .unwrap(); }; @@ -1314,29 +1364,34 @@ mod test_compute_deltas { // Set the finalized checkpoint to finalize the first slot of epoch 1 on // the canonical chain. - fc.proto_array.finalized_checkpoint = Checkpoint { + let finalized_checkpoint = Checkpoint { root: finalized_root, epoch: Epoch::new(1), }; assert!( fc.proto_array - .is_finalized_checkpoint_or_descendant::(finalized_root), + .is_finalized_checkpoint_or_descendant::( + finalized_root, + finalized_checkpoint + ), "the finalized checkpoint is the finalized checkpoint" ); assert!( fc.proto_array - .is_finalized_checkpoint_or_descendant::(get_block_root( - canonical_slot - )), + .is_finalized_checkpoint_or_descendant::( + get_block_root(canonical_slot), + finalized_checkpoint + ), "the canonical block is a descendant of the finalized checkpoint" ); assert!( !fc.proto_array - .is_finalized_checkpoint_or_descendant::(get_block_root( - non_canonical_slot - )), + .is_finalized_checkpoint_or_descendant::( + get_block_root(non_canonical_slot), + finalized_checkpoint + ), "although the non-canonical block is a descendant of the finalized block, \ it's not a descendant of the finalized checkpoint" ); diff --git a/consensus/proto_array/src/ssz_container.rs b/consensus/proto_array/src/ssz_container.rs index 0bb3f2b35d8..1e01b74c8cd 100644 --- a/consensus/proto_array/src/ssz_container.rs +++ b/consensus/proto_array/src/ssz_container.rs @@ -26,22 +26,28 @@ pub struct SszContainer { #[superstruct(only(V17))] pub balances: Vec, pub prune_threshold: usize, - pub justified_checkpoint: Checkpoint, - pub finalized_checkpoint: Checkpoint, + // Deprecated, remove in a future schema migration + justified_checkpoint: Checkpoint, + // Deprecated, remove in a future schema migration + finalized_checkpoint: Checkpoint, pub nodes: Vec, pub indices: Vec<(Hash256, usize)>, pub previous_proposer_boost: ProposerBoost, } -impl From<&ProtoArrayForkChoice> for SszContainer { - fn from(from: &ProtoArrayForkChoice) -> Self { +impl SszContainer { + pub fn from_proto_array( + from: &ProtoArrayForkChoice, + justified_checkpoint: Checkpoint, + finalized_checkpoint: Checkpoint, + ) -> Self { let proto_array = &from.proto_array; Self { votes: from.votes.0.clone(), prune_threshold: proto_array.prune_threshold, - justified_checkpoint: proto_array.justified_checkpoint, - finalized_checkpoint: proto_array.finalized_checkpoint, + justified_checkpoint, + finalized_checkpoint, nodes: proto_array.nodes.clone(), indices: proto_array.indices.iter().map(|(k, v)| (*k, *v)).collect(), previous_proposer_boost: proto_array.previous_proposer_boost, @@ -55,8 +61,6 @@ impl TryFrom<(SszContainer, JustifiedBalances)> for ProtoArrayForkChoice { fn try_from((from, balances): (SszContainer, JustifiedBalances)) -> Result { let proto_array = ProtoArray { prune_threshold: from.prune_threshold, - justified_checkpoint: from.justified_checkpoint, - finalized_checkpoint: from.finalized_checkpoint, nodes: from.nodes, indices: from.indices.into_iter().collect::>(), previous_proposer_boost: from.previous_proposer_boost, From fff248d41b5f7b020b8d836ad506a7f1a41858bf Mon Sep 17 00:00:00 2001 From: Mac L Date: Wed, 12 Nov 2025 08:43:19 +0400 Subject: [PATCH 28/74] Migrate `execution_engine_integration` to `alloy` (#8140) #6022 Migrate the `execution_engine_integration` tests to the `alloy` ecosystem. This removes the last remaining `ethers` dependencies Co-Authored-By: Mac L --- Cargo.lock | 1489 ++++++----------- Cargo.toml | 4 - .../execution_engine_integration/Cargo.toml | 9 +- .../src/execution_engine.rs | 10 +- .../src/test_rig.rs | 42 +- .../src/transactions.rs | 52 +- 6 files changed, 547 insertions(+), 1059 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 481fe71df06..3fbb985952c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -70,7 +70,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d122413f284cf2d62fb1b7db97e02edb8cda96d769b16e443a4f6195e35662b0" dependencies = [ "crypto-common", - "generic-array 0.14.7", + "generic-array", ] [[package]] @@ -83,7 +83,7 @@ dependencies = [ "cipher 0.3.0", "cpufeatures", "ctr 0.8.0", - "opaque-debug 0.3.1", + "opaque-debug", ] [[package]] @@ -138,6 +138,17 @@ version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" +[[package]] +name = "alloy-chains" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6068f356948cd84b5ad9ac30c50478e433847f14a50714d2b68f15d052724049" +dependencies = [ + "alloy-primitives", + "num_enum", + "strum 0.27.2", +] + [[package]] name = "alloy-consensus" version = "1.0.42" @@ -150,16 +161,17 @@ dependencies = [ "alloy-serde", "alloy-trie", "alloy-tx-macros", - "auto_impl 1.2.1", + "auto_impl", "c-kzg", "derive_more 2.0.1", "either", - "k256 0.13.4", + "k256", "once_cell", "rand 0.8.5", "secp256k1", "serde", "serde_json", + "serde_with", "thiserror 2.0.12", ] @@ -241,7 +253,7 @@ dependencies = [ "alloy-primitives", "alloy-rlp", "alloy-serde", - "auto_impl 1.2.1", + "auto_impl", "c-kzg", "derive_more 2.0.1", "either", @@ -263,6 +275,47 @@ dependencies = [ "serde_json", ] +[[package]] +name = "alloy-json-rpc" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31b67c5a702121e618217f7a86f314918acb2622276d0273490e2d4534490bc0" +dependencies = [ + "alloy-primitives", + "alloy-sol-types", + "http 1.3.0", + "serde", + "serde_json", + "thiserror 2.0.12", + "tracing", +] + +[[package]] +name = "alloy-network" +version = "1.0.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5d6ed73d440bae8f27771b7cd507fa8f10f19ddf0b8f67e7622a52e0dbf798e" +dependencies = [ + "alloy-consensus", + "alloy-consensus-any", + "alloy-eips", + "alloy-json-rpc", + "alloy-network-primitives", + "alloy-primitives", + "alloy-rpc-types-any", + "alloy-rpc-types-eth", + "alloy-serde", + "alloy-signer", + "alloy-sol-types", + "async-trait", + "auto_impl", + "derive_more 2.0.1", + "futures-utils-wasm", + "serde", + "serde_json", + "thiserror 2.0.12", +] + [[package]] name = "alloy-network-primitives" version = "1.0.42" @@ -293,7 +346,7 @@ dependencies = [ "hashbrown 0.16.0", "indexmap 2.8.0", "itoa", - "k256 0.13.4", + "k256", "keccak-asm", "paste", "proptest", @@ -306,6 +359,45 @@ dependencies = [ "tiny-keccak", ] +[[package]] +name = "alloy-provider" +version = "1.0.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0ef8cbc2b68e2512acf04b2d296c05c98a661bc460462add6414528f4ff3d9b" +dependencies = [ + "alloy-chains", + "alloy-consensus", + "alloy-eips", + "alloy-json-rpc", + "alloy-network", + "alloy-network-primitives", + "alloy-primitives", + "alloy-rpc-client", + "alloy-rpc-types-eth", + "alloy-signer", + "alloy-sol-types", + "alloy-transport", + "alloy-transport-http", + "async-stream", + "async-trait", + "auto_impl", + "dashmap", + "either", + "futures", + "futures-utils-wasm", + "lru 0.13.0", + "parking_lot", + "pin-project", + "reqwest 0.12.15", + "serde", + "serde_json", + "thiserror 2.0.12", + "tokio", + "tracing", + "url", + "wasmtimer", +] + [[package]] name = "alloy-rlp" version = "0.3.12" @@ -328,6 +420,40 @@ dependencies = [ "syn 2.0.100", ] +[[package]] +name = "alloy-rpc-client" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45f802228273056528dfd6cc8845cc91a7c7e0c6fc1a66d19e8673743dacdc7e" +dependencies = [ + "alloy-json-rpc", + "alloy-primitives", + "alloy-transport", + "alloy-transport-http", + "futures", + "pin-project", + "reqwest 0.12.15", + "serde", + "serde_json", + "tokio", + "tokio-stream", + "tower 0.5.2", + "tracing", + "url", + "wasmtimer", +] + +[[package]] +name = "alloy-rpc-types-any" +version = "1.0.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "425e14ee32eb8b7edd6a2247fe0ed640785e6eba75af27db27f1e6220c15ef0d" +dependencies = [ + "alloy-consensus-any", + "alloy-rpc-types-eth", + "alloy-serde", +] + [[package]] name = "alloy-rpc-types-eth" version = "1.0.42" @@ -345,6 +471,7 @@ dependencies = [ "itertools 0.14.0", "serde", "serde_json", + "serde_with", "thiserror 2.0.12", ] @@ -359,6 +486,37 @@ dependencies = [ "serde_json", ] +[[package]] +name = "alloy-signer" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9cf0b42ffbf558badfecf1dde0c3c5ed91f29bb7e97876d0bed008c3d5d67171" +dependencies = [ + "alloy-primitives", + "async-trait", + "auto_impl", + "either", + "elliptic-curve", + "k256", + "thiserror 2.0.12", +] + +[[package]] +name = "alloy-signer-local" +version = "1.0.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "590dcaeb290cdce23155e68af4791d093afc3754b1a331198a25d2d44c5456e8" +dependencies = [ + "alloy-consensus", + "alloy-network", + "alloy-primitives", + "alloy-signer", + "async-trait", + "k256", + "rand 0.8.5", + "thiserror 2.0.12", +] + [[package]] name = "alloy-sol-macro" version = "1.4.1" @@ -429,6 +587,44 @@ dependencies = [ "serde", ] +[[package]] +name = "alloy-transport" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71b3deee699d6f271eab587624a9fa84d02d0755db7a95a043d52a6488d16ebe" +dependencies = [ + "alloy-json-rpc", + "auto_impl", + "base64 0.22.1", + "derive_more 2.0.1", + "futures", + "futures-utils-wasm", + "parking_lot", + "serde", + "serde_json", + "thiserror 2.0.12", + "tokio", + "tower 0.5.2", + "tracing", + "url", + "wasmtimer", +] + +[[package]] +name = "alloy-transport-http" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1720bd2ba8fe7e65138aca43bb0f680e4e0bcbd3ca39bf9d3035c9d7d2757f24" +dependencies = [ + "alloy-json-rpc", + "alloy-transport", + "reqwest 0.12.15", + "serde_json", + "tower 0.5.2", + "tracing", + "url", +] + [[package]] name = "alloy-trie" version = "0.9.1" @@ -909,17 +1105,6 @@ dependencies = [ "syn 2.0.100", ] -[[package]] -name = "async_io_stream" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6d7b9decdf35d8908a7e3ef02f64c5e9b1695e230154c0e8de3969142d9b94c" -dependencies = [ - "futures", - "pharos", - "rustc_version 0.4.1", -] - [[package]] name = "asynchronous-codec" version = "0.7.0" @@ -951,18 +1136,6 @@ dependencies = [ "url", ] -[[package]] -name = "auto_impl" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7862e21c893d65a1650125d157eaeec691439379a1cee17ee49031b79236ada4" -dependencies = [ - "proc-macro-error", - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "auto_impl" version = "1.2.1" @@ -1048,40 +1221,12 @@ version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4cbbc9d0964165b47557570cce6c952866c2678457aca742aafc9fb771d30270" -[[package]] -name = "base16ct" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce" - [[package]] name = "base16ct" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" -[[package]] -name = "base58" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5024ee8015f02155eee35c711107ddd9a9bf3cb689cf2a9089c97e79b6e1ae83" - -[[package]] -name = "base58check" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ee2fe4c9a0c84515f136aaae2466744a721af6d63339c18689d9e995d74d99b" -dependencies = [ - "base58", - "sha2 0.8.2", -] - -[[package]] -name = "base64" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff" - [[package]] name = "base64" version = "0.13.1" @@ -1111,7 +1256,7 @@ name = "beacon_chain" version = "0.2.0" dependencies = [ "alloy-primitives", - "bitvec 1.0.1", + "bitvec", "bls", "criterion", "educe", @@ -1132,7 +1277,7 @@ dependencies = [ "lighthouse_tracing", "lighthouse_version", "logging", - "lru", + "lru 0.12.5", "maplit", "merkle_proof", "metrics", @@ -1141,7 +1286,7 @@ dependencies = [ "once_cell", "oneshot_broadcast", "operation_pool", - "parking_lot 0.12.3", + "parking_lot", "proto_array", "rand 0.9.0", "rayon", @@ -1155,7 +1300,7 @@ dependencies = [ "ssz_types", "state_processing", "store", - "strum", + "strum 0.24.1", "superstruct", "task_executor", "tempfile", @@ -1194,7 +1339,7 @@ dependencies = [ "serde_json", "slasher", "store", - "strum", + "strum 0.24.1", "task_executor", "tracing", "types", @@ -1211,7 +1356,7 @@ dependencies = [ "sensitive_url", "serde", "slot_clock", - "strum", + "strum 0.24.1", "task_executor", "tokio", "tracing", @@ -1231,10 +1376,10 @@ dependencies = [ "logging", "metrics", "num_cpus", - "parking_lot 0.12.3", + "parking_lot", "serde", "slot_clock", - "strum", + "strum 0.24.1", "task_executor", "tokio", "tokio-util", @@ -1242,12 +1387,6 @@ dependencies = [ "types", ] -[[package]] -name = "bech32" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dabbe35f96fb9507f7330793dc490461b2962659ac5d427181e451a623751d1" - [[package]] name = "bincode" version = "1.3.3" @@ -1323,16 +1462,6 @@ version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd" -[[package]] -name = "bitvec" -version = "0.17.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41262f11d771fd4a61aa3ce019fca363b4b6c282fca9da2a31186d3965a47a5c" -dependencies = [ - "either", - "radium 0.3.0", -] - [[package]] name = "bitvec" version = "1.0.1" @@ -1340,7 +1469,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" dependencies = [ "funty", - "radium 0.7.0", + "radium", "tap", "wyz", ] @@ -1354,25 +1483,13 @@ dependencies = [ "digest 0.10.7", ] -[[package]] -name = "block-buffer" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b" -dependencies = [ - "block-padding", - "byte-tools", - "byteorder", - "generic-array 0.12.4", -] - [[package]] name = "block-buffer" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" dependencies = [ - "generic-array 0.14.7", + "generic-array", ] [[package]] @@ -1381,16 +1498,7 @@ version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" dependencies = [ - "generic-array 0.14.7", -] - -[[package]] -name = "block-padding" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa79dedbb091f449f1f39e53edf88d5dbe95f895dae6135a8d7b881fb5af73f5" -dependencies = [ - "byte-tools", + "generic-array", ] [[package]] @@ -1432,8 +1540,8 @@ checksum = "7a8a8ed6fefbeef4a8c7b460e4110e12c5e22a5b7cf32621aae6ad650c4dcf29" dependencies = [ "blst", "byte-slice-cast", - "ff 0.13.1", - "group 0.13.0", + "ff", + "group", "pairing", "rand_core 0.6.4", "serde", @@ -1504,12 +1612,6 @@ version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7575182f7272186991736b70173b0ea045398f984bf5ebbb3804736ce1330c9d" -[[package]] -name = "byte-tools" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" - [[package]] name = "byteorder" version = "1.5.0" @@ -1664,6 +1766,7 @@ dependencies = [ "iana-time-zone", "js-sys", "num-traits", + "serde", "wasm-bindgen", "windows-link", ] @@ -1701,7 +1804,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ee52072ec15386f770805afd189a01c8841be8696bed250fa2f13c4c0d6dfb7" dependencies = [ - "generic-array 0.14.7", + "generic-array", ] [[package]] @@ -1835,63 +1938,6 @@ dependencies = [ "cc", ] -[[package]] -name = "coins-bip32" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "634c509653de24b439672164bbf56f5f582a2ab0e313d3b0f6af0b7345cf2560" -dependencies = [ - "bincode", - "bs58 0.4.0", - "coins-core", - "digest 0.10.7", - "getrandom 0.2.15", - "hmac 0.12.1", - "k256 0.11.6", - "lazy_static", - "serde", - "sha2 0.10.8", - "thiserror 1.0.69", -] - -[[package]] -name = "coins-bip39" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a11892bcac83b4c6e95ab84b5b06c76d9d70ad73548dd07418269c5c7977171" -dependencies = [ - "bitvec 0.17.4", - "coins-bip32", - "getrandom 0.2.15", - "hex", - "hmac 0.12.1", - "pbkdf2 0.11.0", - "rand 0.8.5", - "sha2 0.10.8", - "thiserror 1.0.69", -] - -[[package]] -name = "coins-core" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c94090a6663f224feae66ab01e41a2555a8296ee07b5f20dab8888bdefc9f617" -dependencies = [ - "base58check", - "base64 0.12.3", - "bech32", - "blake2", - "digest 0.10.7", - "generic-array 0.14.7", - "hex", - "ripemd", - "serde", - "serde_derive", - "sha2 0.10.8", - "sha3", - "thiserror 1.0.69", -] - [[package]] name = "colorchoice" version = "1.0.3" @@ -2047,15 +2093,6 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" -[[package]] -name = "convert_case" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" -dependencies = [ - "unicode-segmentation", -] - [[package]] name = "core-foundation" version = "0.9.4" @@ -2206,25 +2243,13 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43da5946c66ffcc7745f48db692ffbb10a83bfe0afd96235c5c2a4fb23994929" -[[package]] -name = "crypto-bigint" -version = "0.4.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef2b4b23cddf68b89b8f8069890e8c270d54e2d5fe1b143820234805e4cb17ef" -dependencies = [ - "generic-array 0.14.7", - "rand_core 0.6.4", - "subtle", - "zeroize", -] - [[package]] name = "crypto-bigint" version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" dependencies = [ - "generic-array 0.14.7", + "generic-array", "rand_core 0.6.4", "subtle", "zeroize", @@ -2236,7 +2261,7 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ - "generic-array 0.14.7", + "generic-array", "rand_core 0.6.4", "typenum", ] @@ -2247,7 +2272,7 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "25fab6889090c8133f3deb8f73ba3c65a7f456f66436fc012a1b1e272b1e103e" dependencies = [ - "generic-array 0.14.7", + "generic-array", "subtle", ] @@ -2432,6 +2457,20 @@ dependencies = [ "libc", ] +[[package]] +name = "dashmap" +version = "6.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" +dependencies = [ + "cfg-if", + "crossbeam-utils", + "hashbrown 0.14.5", + "lock_api", + "once_cell", + "parking_lot_core", +] + [[package]] name = "data-encoding" version = "2.8.0" @@ -2470,7 +2509,7 @@ dependencies = [ "hex", "serde", "store", - "strum", + "strum 0.24.1", "tracing", "types", ] @@ -2508,16 +2547,6 @@ dependencies = [ "types", ] -[[package]] -name = "der" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1a467a65c5e759bce6e65eaf91cc29f466cdc57cb65777bd646872a8a1fd4de" -dependencies = [ - "const-oid", - "zeroize", -] - [[package]] name = "der" version = "0.7.9" @@ -2549,6 +2578,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" dependencies = [ "powerfmt", + "serde", ] [[package]] @@ -2579,40 +2609,20 @@ version = "0.99.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3da29a38df43d6f156149c9b43ded5e018ddff2a855cf2cfd62e8cd7d079c69f" dependencies = [ - "convert_case 0.4.0", + "convert_case", "proc-macro2", "quote", "rustc_version 0.4.1", "syn 2.0.100", ] -[[package]] -name = "derive_more" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a9b99b9cbbe49445b21764dc0625032a89b145a2642e67603e1c936f5458d05" -dependencies = [ - "derive_more-impl 1.0.0", -] - [[package]] name = "derive_more" version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678" dependencies = [ - "derive_more-impl 2.0.1", -] - -[[package]] -name = "derive_more-impl" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.100", + "derive_more-impl", ] [[package]] @@ -2627,22 +2637,13 @@ dependencies = [ "unicode-xid", ] -[[package]] -name = "digest" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3d0c8c8752312f9713efd397ff63acb9f85585afbf179282e720e7704954dd5" -dependencies = [ - "generic-array 0.12.4", -] - [[package]] name = "digest" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" dependencies = [ - "generic-array 0.14.7", + "generic-array", ] [[package]] @@ -2706,10 +2707,10 @@ dependencies = [ "hkdf", "lazy_static", "libp2p-identity", - "lru", + "lru 0.12.5", "more-asserts", "multiaddr", - "parking_lot 0.12.3", + "parking_lot", "rand 0.8.5", "smallvec", "socket2", @@ -2739,7 +2740,7 @@ dependencies = [ "eth2", "futures", "logging", - "parking_lot 0.12.3", + "parking_lot", "slot_clock", "task_executor", "tokio", @@ -2767,16 +2768,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" [[package]] -name = "ecdsa" -version = "0.14.8" +name = "dyn-clone" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "413301934810f597c1d19ca71c8710e99a3f1ba28a0d2ebc01551a2daeea3c5c" -dependencies = [ - "der 0.6.1", - "elliptic-curve 0.12.3", - "rfc6979 0.3.1", - "signature 1.6.4", -] +checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555" [[package]] name = "ecdsa" @@ -2784,13 +2779,13 @@ version = "0.16.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" dependencies = [ - "der 0.7.9", + "der", "digest 0.10.7", - "elliptic-curve 0.13.8", - "rfc6979 0.4.0", + "elliptic-curve", + "rfc6979", "serdect", - "signature 2.2.0", - "spki 0.7.3", + "signature", + "spki", ] [[package]] @@ -2799,8 +2794,8 @@ version = "2.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53" dependencies = [ - "pkcs8 0.10.2", - "signature 2.2.0", + "pkcs8", + "signature", ] [[package]] @@ -2910,8 +2905,8 @@ checksum = "05c599a59deba6188afd9f783507e4d89efc997f0fa340a758f0d0992b322416" dependencies = [ "blst", "blstrs", - "ff 0.13.1", - "group 0.13.0", + "ff", + "group", "pairing", "subtle", ] @@ -2988,41 +2983,21 @@ dependencies = [ "serde_json", ] -[[package]] -name = "elliptic-curve" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7bb888ab5300a19b8e5bceef25ac745ad065f3c9f7efc6de1b91958110891d3" -dependencies = [ - "base16ct 0.1.1", - "crypto-bigint 0.4.9", - "der 0.6.1", - "digest 0.10.7", - "ff 0.12.1", - "generic-array 0.14.7", - "group 0.12.1", - "pkcs8 0.9.0", - "rand_core 0.6.4", - "sec1 0.3.0", - "subtle", - "zeroize", -] - [[package]] name = "elliptic-curve" version = "0.13.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" dependencies = [ - "base16ct 0.2.0", - "crypto-bigint 0.5.5", + "base16ct", + "crypto-bigint", "digest 0.10.7", - "ff 0.13.1", - "generic-array 0.14.7", - "group 0.13.0", - "pkcs8 0.10.2", + "ff", + "generic-array", + "group", + "pkcs8", "rand_core 0.6.4", - "sec1 0.7.3", + "sec1", "serdect", "subtle", "zeroize", @@ -3048,7 +3023,7 @@ dependencies = [ "bytes", "ed25519-dalek", "hex", - "k256 0.13.4", + "k256", "log", "rand 0.8.5", "serde", @@ -3136,28 +3111,6 @@ dependencies = [ "windows-sys 0.59.0", ] -[[package]] -name = "eth-keystore" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fda3bf123be441da5260717e0661c25a2fd9cb2b2c1d20bf2e05580047158ab" -dependencies = [ - "aes 0.8.4", - "ctr 0.9.2", - "digest 0.10.7", - "hex", - "hmac 0.12.1", - "pbkdf2 0.11.0", - "rand 0.8.5", - "scrypt 0.10.0", - "serde", - "serde_json", - "sha2 0.10.8", - "sha3", - "thiserror 1.0.69", - "uuid 0.8.2", -] - [[package]] name = "eth2" version = "0.1.0" @@ -3234,7 +3187,7 @@ dependencies = [ "hmac 0.11.0", "pbkdf2 0.8.0", "rand 0.9.0", - "scrypt 0.7.0", + "scrypt", "serde", "serde_json", "serde_repr", @@ -3286,58 +3239,10 @@ dependencies = [ [[package]] name = "eth2_wallet_manager" version = "0.1.0" -dependencies = [ - "eth2_wallet", - "lockfile", - "tempfile", -] - -[[package]] -name = "ethabi" -version = "18.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7413c5f74cc903ea37386a8965a936cbeb334bd270862fdece542c1b2dcbc898" -dependencies = [ - "ethereum-types", - "hex", - "once_cell", - "regex", - "serde", - "serde_json", - "sha3", - "thiserror 1.0.69", - "uint 0.9.5", -] - -[[package]] -name = "ethbloom" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c22d4b5885b6aa2fe5e8b9329fb8d232bf739e434e6b87347c63bdd00c120f60" -dependencies = [ - "crunchy", - "fixed-hash", - "impl-codec", - "impl-rlp", - "impl-serde", - "scale-info", - "tiny-keccak", -] - -[[package]] -name = "ethereum-types" -version = "0.14.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02d215cbf040552efcbe99a38372fe80ab9d00268e20012b79fcd0f073edd8ee" -dependencies = [ - "ethbloom", - "fixed-hash", - "impl-codec", - "impl-rlp", - "impl-serde", - "primitive-types", - "scale-info", - "uint 0.9.5", +dependencies = [ + "eth2_wallet", + "lockfile", + "tempfile", ] [[package]] @@ -3392,148 +3297,6 @@ dependencies = [ "syn 2.0.100", ] -[[package]] -name = "ethers-contract" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9c3c3e119a89f0a9a1e539e7faecea815f74ddcf7c90d0b00d1f524db2fdc9c" -dependencies = [ - "ethers-core", - "ethers-providers", - "futures-util", - "hex", - "once_cell", - "pin-project", - "serde", - "serde_json", - "thiserror 1.0.69", -] - -[[package]] -name = "ethers-core" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ade3e9c97727343984e1ceada4fdab11142d2ee3472d2c67027d56b1251d4f15" -dependencies = [ - "arrayvec", - "bytes", - "chrono", - "convert_case 0.6.0", - "elliptic-curve 0.12.3", - "ethabi", - "generic-array 0.14.7", - "hex", - "k256 0.11.6", - "open-fastrlp", - "proc-macro2", - "rand 0.8.5", - "rlp", - "rlp-derive", - "serde", - "serde_json", - "strum", - "syn 1.0.109", - "thiserror 1.0.69", - "tiny-keccak", - "unicode-xid", -] - -[[package]] -name = "ethers-etherscan" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9713f525348e5dde025d09b0a4217429f8074e8ff22c886263cc191e87d8216" -dependencies = [ - "ethers-core", - "getrandom 0.2.15", - "reqwest 0.11.27", - "semver 1.0.26", - "serde", - "serde-aux", - "serde_json", - "thiserror 1.0.69", - "tracing", -] - -[[package]] -name = "ethers-middleware" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e71df7391b0a9a51208ffb5c7f2d068900e99d6b3128d3a4849d138f194778b7" -dependencies = [ - "async-trait", - "auto_impl 0.5.0", - "ethers-contract", - "ethers-core", - "ethers-etherscan", - "ethers-providers", - "ethers-signers", - "futures-locks", - "futures-util", - "instant", - "reqwest 0.11.27", - "serde", - "serde_json", - "thiserror 1.0.69", - "tokio", - "tracing", - "tracing-futures", - "url", -] - -[[package]] -name = "ethers-providers" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1a9e0597aa6b2fdc810ff58bc95e4eeaa2c219b3e615ed025106ecb027407d8" -dependencies = [ - "async-trait", - "auto_impl 1.2.1", - "base64 0.13.1", - "ethers-core", - "futures-core", - "futures-timer", - "futures-util", - "getrandom 0.2.15", - "hashers", - "hex", - "http 0.2.12", - "once_cell", - "parking_lot 0.11.2", - "pin-project", - "reqwest 0.11.27", - "serde", - "serde_json", - "thiserror 1.0.69", - "tokio", - "tracing", - "tracing-futures", - "url", - "wasm-bindgen", - "wasm-bindgen-futures", - "wasm-timer", - "web-sys", - "ws_stream_wasm", -] - -[[package]] -name = "ethers-signers" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f41ced186867f64773db2e55ffdd92959e094072a1d09a5e5e831d443204f98" -dependencies = [ - "async-trait", - "coins-bip32", - "coins-bip39", - "elliptic-curve 0.12.3", - "eth-keystore", - "ethers-core", - "hex", - "rand 0.8.5", - "sha2 0.10.8", - "thiserror 1.0.69", -] - [[package]] name = "event-listener" version = "2.5.3" @@ -3576,12 +3339,13 @@ dependencies = [ name = "execution_engine_integration" version = "0.1.0" dependencies = [ + "alloy-network", + "alloy-primitives", + "alloy-provider", + "alloy-rpc-types-eth", + "alloy-signer-local", "async-channel 1.9.0", "deposit_contract", - "ethers-core", - "ethers-middleware", - "ethers-providers", - "ethers-signers", "execution_layer", "fork_choice", "futures", @@ -3621,9 +3385,9 @@ dependencies = [ "kzg", "lighthouse_version", "logging", - "lru", + "lru 0.12.5", "metrics", - "parking_lot 0.12.3", + "parking_lot", "pretty_reqwest_error", "rand 0.9.0", "reqwest 0.11.27", @@ -3634,7 +3398,7 @@ dependencies = [ "slot_clock", "ssz_types", "state_processing", - "strum", + "strum 0.24.1", "superstruct", "task_executor", "tempfile", @@ -3649,12 +3413,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "fake-simd" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed" - [[package]] name = "fallible-iterator" version = "0.2.0" @@ -3680,7 +3438,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "139834ddba373bbdd213dffe02c8d110508dcf1726c2be27e8d1f7d7e1856418" dependencies = [ "arrayvec", - "auto_impl 1.2.1", + "auto_impl", "bytes", ] @@ -3691,7 +3449,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce8dba4714ef14b8274c371879b175aa55b16b30f269663f19d576f380018dc4" dependencies = [ "arrayvec", - "auto_impl 1.2.1", + "auto_impl", "bytes", ] @@ -3705,23 +3463,13 @@ dependencies = [ "thiserror 1.0.69", ] -[[package]] -name = "ff" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d013fc25338cc558c5c2cfbad646908fb23591e2404481826742b651c9af7160" -dependencies = [ - "rand_core 0.6.4", - "subtle", -] - [[package]] name = "ff" version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0b50bfb653653f9ca9095b427bed08ab8d75a137839d9ad64eb11810d5b6393" dependencies = [ - "bitvec 1.0.1", + "bitvec", "rand_core 0.6.4", "subtle", ] @@ -3938,16 +3686,6 @@ dependencies = [ "pin-project-lite", ] -[[package]] -name = "futures-locks" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45ec6fe3675af967e67c5536c0b9d44e34e6c52f86bedc4ea49c5317b8e94d06" -dependencies = [ - "futures-channel", - "futures-task", -] - [[package]] name = "futures-macro" version = "0.3.31" @@ -4007,13 +3745,10 @@ dependencies = [ ] [[package]] -name = "fxhash" -version = "0.2.1" +name = "futures-utils-wasm" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" -dependencies = [ - "byteorder", -] +checksum = "42012b0f064e01aa58b545fe3727f90f7dd4020f4a3ea735b50344965f5a57e9" [[package]] name = "generator" @@ -4028,15 +3763,6 @@ dependencies = [ "windows 0.58.0", ] -[[package]] -name = "generic-array" -version = "0.12.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffdf9f34f1447443d37393cc6c2b8313aebddcd96906caf34e54c68d8e57d7bd" -dependencies = [ - "typenum", -] - [[package]] name = "generic-array" version = "0.14.7" @@ -4094,7 +3820,7 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0d8a4362ccb29cb0b265253fb0a2728f592895ee6854fd9bc13f2ffda266ff1" dependencies = [ - "opaque-debug 0.3.1", + "opaque-debug", "polyval", ] @@ -4122,24 +3848,13 @@ dependencies = [ "types", ] -[[package]] -name = "group" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7" -dependencies = [ - "ff 0.12.1", - "rand_core 0.6.4", - "subtle", -] - [[package]] name = "group" version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" dependencies = [ - "ff 0.13.1", + "ff", "rand 0.8.5", "rand_core 0.6.4", "rand_xorshift 0.3.0", @@ -4246,15 +3961,6 @@ dependencies = [ "serde", ] -[[package]] -name = "hashers" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2bca93b15ea5a746f220e56587f71e73c6165eab783df9e26590069953e3c30" -dependencies = [ - "fxhash", -] - [[package]] name = "hashlink" version = "0.8.4" @@ -4421,7 +4127,7 @@ dependencies = [ "ipconfig", "moka", "once_cell", - "parking_lot 0.12.3", + "parking_lot", "rand 0.9.0", "resolv-conf", "smallvec", @@ -4556,12 +4262,12 @@ dependencies = [ "lighthouse_tracing", "lighthouse_version", "logging", - "lru", + "lru 0.12.5", "metrics", "network", "network_utils", "operation_pool", - "parking_lot 0.12.3", + "parking_lot", "proto_array", "rand 0.9.0", "safe_arith", @@ -4709,6 +4415,22 @@ dependencies = [ "tokio-native-tls", ] +[[package]] +name = "hyper-tls" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +dependencies = [ + "bytes", + "http-body-util", + "hyper 1.6.0", + "hyper-util", + "native-tls", + "tokio", + "tokio-native-tls", + "tower-service", +] + [[package]] name = "hyper-util" version = "0.1.10" @@ -4959,24 +4681,6 @@ dependencies = [ "parity-scale-codec", ] -[[package]] -name = "impl-rlp" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f28220f89297a075ddc7245cd538076ee98b01f2a9c23a53a4f1105d5a322808" -dependencies = [ - "rlp", -] - -[[package]] -name = "impl-serde" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebc88fc67028ae3db0c853baa36269d398d5f45b6982f95549ff5def78c935cd" -dependencies = [ - "serde", -] - [[package]] name = "impl-trait-for-tuples" version = "0.2.3" @@ -4996,6 +4700,7 @@ checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" dependencies = [ "autocfg", "hashbrown 0.12.3", + "serde", ] [[package]] @@ -5021,7 +4726,7 @@ dependencies = [ "filesystem", "lockfile", "metrics", - "parking_lot 0.12.3", + "parking_lot", "rand 0.9.0", "reqwest 0.11.27", "serde", @@ -5042,19 +4747,7 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01" dependencies = [ - "generic-array 0.14.7", -] - -[[package]] -name = "instant" -version = "0.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" -dependencies = [ - "cfg-if", - "js-sys", - "wasm-bindgen", - "web-sys", + "generic-array", ] [[package]] @@ -5197,19 +4890,6 @@ dependencies = [ "simple_asn1", ] -[[package]] -name = "k256" -version = "0.11.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72c1e0b51e7ec0a97369623508396067a486bd0cbed95a2659a4b863d28cfc8b" -dependencies = [ - "cfg-if", - "ecdsa 0.14.8", - "elliptic-curve 0.12.3", - "sha2 0.10.8", - "sha3", -] - [[package]] name = "k256" version = "0.13.4" @@ -5217,12 +4897,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6e3919bbaa2945715f0bb6d3934a173d1e9a59ac23767fbaaef277265a7411b" dependencies = [ "cfg-if", - "ecdsa 0.16.9", - "elliptic-curve 0.13.8", + "ecdsa", + "elliptic-curve", "once_cell", "serdect", "sha2 0.10.8", - "signature 2.2.0", + "signature", ] [[package]] @@ -5383,7 +5063,7 @@ dependencies = [ "indexmap 1.9.3", "libc", "mdbx-sys", - "parking_lot 0.12.3", + "parking_lot", "thiserror 1.0.69", ] @@ -5455,7 +5135,7 @@ dependencies = [ "multiaddr", "multihash", "multistream-select", - "parking_lot 0.12.3", + "parking_lot", "pin-project", "quick-protobuf", "rand 0.8.5", @@ -5477,7 +5157,7 @@ dependencies = [ "hickory-resolver", "libp2p-core", "libp2p-identity", - "parking_lot 0.12.3", + "parking_lot", "smallvec", "tracing", ] @@ -5543,7 +5223,7 @@ dependencies = [ "bs58 0.5.1", "ed25519-dalek", "hkdf", - "k256 0.13.4", + "k256", "multihash", "quick-protobuf", "rand 0.8.5", @@ -5600,7 +5280,7 @@ dependencies = [ "libp2p-core", "libp2p-identity", "nohash-hasher", - "parking_lot 0.12.3", + "parking_lot", "rand 0.8.5", "smallvec", "tracing", @@ -5681,7 +5361,7 @@ dependencies = [ "libp2p-core", "libp2p-identity", "libp2p-swarm-derive", - "lru", + "lru 0.12.5", "multistream-select", "rand 0.8.5", "smallvec", @@ -5875,11 +5555,11 @@ dependencies = [ "lighthouse_version", "local-ip-address", "logging", - "lru", + "lru 0.12.5", "lru_cache", "metrics", "network_utils", - "parking_lot 0.12.3", + "parking_lot", "prometheus-client", "quickcheck", "quickcheck_macros", @@ -5890,7 +5570,7 @@ dependencies = [ "smallvec", "snap", "ssz_types", - "strum", + "strum 0.24.1", "superstruct", "task_executor", "tempfile", @@ -5919,7 +5599,7 @@ dependencies = [ "futures", "initialized_validators", "logging", - "parking_lot 0.12.3", + "parking_lot", "serde", "signing_method", "slashing_protection", @@ -6072,6 +5752,15 @@ dependencies = [ "hashbrown 0.15.2", ] +[[package]] +name = "lru" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "227748d55f2f0ab4735d87fd623798cb6b664512fe979705f829c9f81c934465" +dependencies = [ + "hashbrown 0.15.2", +] + [[package]] name = "lru_cache" version = "0.1.0" @@ -6106,7 +5795,7 @@ version = "0.1.0" dependencies = [ "libc", "metrics", - "parking_lot 0.12.3", + "parking_lot", "tikv-jemalloc-ctl", "tikv-jemallocator", ] @@ -6231,7 +5920,7 @@ dependencies = [ "ethereum_ssz", "ethereum_ssz_derive", "itertools 0.13.0", - "parking_lot 0.12.3", + "parking_lot", "rayon", "serde", "smallvec", @@ -6361,7 +6050,7 @@ dependencies = [ "crossbeam-epoch", "crossbeam-utils", "loom", - "parking_lot 0.12.3", + "parking_lot", "portable-atomic", "rustc_version 0.4.1", "smallvec", @@ -6578,7 +6267,7 @@ dependencies = [ "hex", "igd-next", "itertools 0.10.5", - "k256 0.13.4", + "k256", "kzg", "libp2p-gossipsub", "lighthouse_network", @@ -6588,7 +6277,7 @@ dependencies = [ "matches", "metrics", "operation_pool", - "parking_lot 0.12.3", + "parking_lot", "rand 0.8.5", "rand 0.9.0", "rand_chacha 0.3.1", @@ -6598,7 +6287,7 @@ dependencies = [ "smallvec", "ssz_types", "store", - "strum", + "strum 0.24.1", "task_executor", "tokio", "tokio-stream", @@ -6617,7 +6306,7 @@ dependencies = [ "lru_cache", "metrics", "multiaddr", - "parking_lot 0.12.3", + "parking_lot", "serde", "tiny-keccak", ] @@ -6781,13 +6470,36 @@ dependencies = [ "libc", ] +[[package]] +name = "num_enum" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1207a7e20ad57b847bbddc6776b968420d38292bbfe2089accff5e19e82454c" +dependencies = [ + "num_enum_derive", + "rustversion", +] + +[[package]] +name = "num_enum_derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff32365de1b6743cb203b710788263c44a03de03802daf96092f2da4fe6ba4d7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.100", +] + [[package]] name = "nybbles" version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2c4b5ecbd0beec843101bffe848217f770e8b8da81d8355b7d6e226f2199b3dc" dependencies = [ + "alloy-rlp", "cfg-if", + "proptest", "ruint", "serde", "smallvec", @@ -6825,7 +6537,7 @@ dependencies = [ name = "oneshot_broadcast" version = "0.1.0" dependencies = [ - "parking_lot 0.12.3", + "parking_lot", ] [[package]] @@ -6834,43 +6546,12 @@ version = "11.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e" -[[package]] -name = "opaque-debug" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2839e79665f131bdb5782e51f2c6c9599c133c6098982a54c794358bf432529c" - [[package]] name = "opaque-debug" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" -[[package]] -name = "open-fastrlp" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "786393f80485445794f6043fd3138854dd109cc6c4bd1a6383db304c9ce9b9ce" -dependencies = [ - "arrayvec", - "auto_impl 1.2.1", - "bytes", - "ethereum-types", - "open-fastrlp-derive", -] - -[[package]] -name = "open-fastrlp-derive" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "003b2be5c6c53c1cfeb0a238b8a1c3915cd410feb684457a36c10038f764bb1c" -dependencies = [ - "bytes", - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "openssl" version = "0.10.72" @@ -7004,14 +6685,14 @@ name = "operation_pool" version = "0.2.0" dependencies = [ "beacon_chain", - "bitvec 1.0.1", + "bitvec", "educe", "ethereum_ssz", "ethereum_ssz_derive", "itertools 0.10.5", "maplit", "metrics", - "parking_lot 0.12.3", + "parking_lot", "rand 0.9.0", "rayon", "serde", @@ -7021,22 +6702,13 @@ dependencies = [ "types", ] -[[package]] -name = "ordered-float" -version = "2.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c" -dependencies = [ - "num-traits", -] - [[package]] name = "pairing" version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "81fec4625e73cf41ef4bb6846cafa6d44736525f442ba45e407c4a000a13996f" dependencies = [ - "group 0.13.0", + "group", ] [[package]] @@ -7046,7 +6718,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c9fde3d0718baf5bc92f577d652001da0f8d54cd03a7974e118d04fc888dc23d" dependencies = [ "arrayvec", - "bitvec 1.0.1", + "bitvec", "byte-slice-cast", "const_format", "impl-trait-for-tuples", @@ -7073,17 +6745,6 @@ version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" -[[package]] -name = "parking_lot" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" -dependencies = [ - "instant", - "lock_api", - "parking_lot_core 0.8.6", -] - [[package]] name = "parking_lot" version = "0.12.3" @@ -7091,21 +6752,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" dependencies = [ "lock_api", - "parking_lot_core 0.9.10", -] - -[[package]] -name = "parking_lot_core" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" -dependencies = [ - "cfg-if", - "instant", - "libc", - "redox_syscall 0.2.16", - "smallvec", - "winapi", + "parking_lot_core", ] [[package]] @@ -7116,7 +6763,7 @@ checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.5.10", + "redox_syscall", "smallvec", "windows-targets 0.52.6", ] @@ -7186,16 +6833,6 @@ dependencies = [ "ucd-trie", ] -[[package]] -name = "pharos" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9567389417feee6ce15dd6527a8a1ecac205ef62c2932bcf3d9f6fc5b78b414" -dependencies = [ - "futures", - "rustc_version 0.4.1", -] - [[package]] name = "pin-project" version = "1.1.10" @@ -7228,24 +6865,14 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" -[[package]] -name = "pkcs8" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9eca2c590a5f85da82668fa685c09ce2888b9430e83299debf1f34b65fd4a4ba" -dependencies = [ - "der 0.6.1", - "spki 0.6.0", -] - [[package]] name = "pkcs8" version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" dependencies = [ - "der 0.7.9", - "spki 0.7.3", + "der", + "spki", ] [[package]] @@ -7310,7 +6937,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8159bd90725d2df49889a078b54f4f79e87f1f8a8444194cdca81d38f5393abf" dependencies = [ "cpufeatures", - "opaque-debug 0.3.1", + "opaque-debug", "universal-hash", ] @@ -7322,7 +6949,7 @@ checksum = "9d1fe60d06143b2430aa532c94cfe9e29783047f06c0d7fd359a9a51b729fa25" dependencies = [ "cfg-if", "cpufeatures", - "opaque-debug 0.3.1", + "opaque-debug", "universal-hash", ] @@ -7399,9 +7026,6 @@ checksum = "0b34d9fd68ae0b74a41b21c03c2f62847aa0ffea044eee893b4c140b37e244e2" dependencies = [ "fixed-hash", "impl-codec", - "impl-rlp", - "impl-serde", - "scale-info", "uint 0.9.5", ] @@ -7414,30 +7038,6 @@ dependencies = [ "toml_edit", ] -[[package]] -name = "proc-macro-error" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" -dependencies = [ - "proc-macro-error-attr", - "proc-macro2", - "quote", - "syn 1.0.109", - "version_check", -] - -[[package]] -name = "proc-macro-error-attr" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" -dependencies = [ - "proc-macro2", - "quote", - "version_check", -] - [[package]] name = "proc-macro-error-attr2" version = "2.0.0" @@ -7494,7 +7094,7 @@ dependencies = [ "fnv", "lazy_static", "memchr", - "parking_lot 0.12.3", + "parking_lot", "thiserror 1.0.69", ] @@ -7506,7 +7106,7 @@ checksum = "cf41c1a7c32ed72abe5082fb19505b969095c12da9f5732a4bc9878757fd087c" dependencies = [ "dtoa", "itoa", - "parking_lot 0.12.3", + "parking_lot", "prometheus-client-derive-encode", ] @@ -7734,7 +7334,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "51de85fb3fb6524929c8a2eb85e6b6d363de4e8c48f9e2c2eac4944abc181c93" dependencies = [ "log", - "parking_lot 0.12.3", + "parking_lot", "scheduled-thread-pool", ] @@ -7748,12 +7348,6 @@ dependencies = [ "rusqlite", ] -[[package]] -name = "radium" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "def50a86306165861203e7f84ecffbbdfdea79f0e51039b33de1e952358c47ac" - [[package]] name = "radium" version = "0.7.0" @@ -7883,15 +7477,6 @@ dependencies = [ "libc", ] -[[package]] -name = "redox_syscall" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" -dependencies = [ - "bitflags 1.3.2", -] - [[package]] name = "redox_syscall" version = "0.5.10" @@ -7912,6 +7497,26 @@ dependencies = [ "thiserror 1.0.69", ] +[[package]] +name = "ref-cast" +version = "1.0.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f354300ae66f76f1c85c5f84693f0ce81d747e2c3f21a45fef496d89c960bf7d" +dependencies = [ + "ref-cast-impl", +] + +[[package]] +name = "ref-cast-impl" +version = "1.0.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.100", +] + [[package]] name = "regex" version = "1.11.1" @@ -7957,7 +7562,7 @@ dependencies = [ "http-body 0.4.6", "hyper 0.14.32", "hyper-rustls", - "hyper-tls", + "hyper-tls 0.5.0", "ipnet", "js-sys", "log", @@ -8002,19 +7607,23 @@ dependencies = [ "http-body 1.0.1", "http-body-util", "hyper 1.6.0", + "hyper-tls 0.6.0", "hyper-util", "ipnet", "js-sys", "log", "mime", + "native-tls", "once_cell", "percent-encoding", "pin-project-lite", + "rustls-pemfile 2.2.0", "serde", "serde_json", "serde_urlencoded", "sync_wrapper 1.0.2", "tokio", + "tokio-native-tls", "tower 0.5.2", "tower-service", "url", @@ -8050,17 +7659,6 @@ dependencies = [ "quick-error", ] -[[package]] -name = "rfc6979" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7743f17af12fa0b03b803ba12cd6a8d9483a587e89c69445e3909655c0b9fabb" -dependencies = [ - "crypto-bigint 0.4.9", - "hmac 0.12.1", - "zeroize", -] - [[package]] name = "rfc6979" version = "0.4.0" @@ -8085,15 +7683,6 @@ dependencies = [ "windows-sys 0.52.0", ] -[[package]] -name = "ripemd" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd124222d17ad93a644ed9d011a40f4fb64aa54275c08cc216524a9ea82fb09f" -dependencies = [ - "digest 0.10.7", -] - [[package]] name = "rlp" version = "0.5.2" @@ -8104,17 +7693,6 @@ dependencies = [ "rustc-hex", ] -[[package]] -name = "rlp-derive" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e33d7b2abe0c340d8797fe2907d3f20d3b5ea5908683618bfe80df7f621f672a" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "rpassword" version = "5.0.1" @@ -8472,15 +8050,6 @@ dependencies = [ "cipher 0.3.0", ] -[[package]] -name = "salsa20" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97a22f5af31f73a954c10289c93e8a50cc23d971e80ee446f1f6f7137a088213" -dependencies = [ - "cipher 0.4.4", -] - [[package]] name = "same-file" version = "1.0.6" @@ -8491,45 +8060,45 @@ dependencies = [ ] [[package]] -name = "scale-info" -version = "2.11.6" +name = "schannel" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "346a3b32eba2640d17a9cb5927056b08f3de90f65b72fe09402c2ad07d684d0b" +checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" dependencies = [ - "cfg-if", - "derive_more 1.0.0", - "parity-scale-codec", - "scale-info-derive", + "windows-sys 0.59.0", ] [[package]] -name = "scale-info-derive" -version = "2.11.6" +name = "scheduled-thread-pool" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6630024bf739e2179b91fb424b28898baf819414262c5d376677dbff1fe7ebf" +checksum = "3cbc66816425a074528352f5789333ecff06ca41b36b0b0efdfbb29edc391a19" dependencies = [ - "proc-macro-crate", - "proc-macro2", - "quote", - "syn 2.0.100", + "parking_lot", ] [[package]] -name = "schannel" -version = "0.1.27" +name = "schemars" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" +checksum = "4cd191f9397d57d581cddd31014772520aa448f65ef991055d7f61582c65165f" dependencies = [ - "windows-sys 0.59.0", + "dyn-clone", + "ref-cast", + "serde", + "serde_json", ] [[package]] -name = "scheduled-thread-pool" -version = "0.2.7" +name = "schemars" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cbc66816425a074528352f5789333ecff06ca41b36b0b0efdfbb29edc391a19" +checksum = "9558e172d4e8533736ba97870c4b2cd63f84b382a3d6eb063da41b91cce17289" dependencies = [ - "parking_lot 0.12.3", + "dyn-clone", + "ref-cast", + "serde", + "serde_json", ] [[package]] @@ -8552,22 +8121,10 @@ checksum = "879588d8f90906e73302547e20fffefdd240eb3e0e744e142321f5d49dea0518" dependencies = [ "hmac 0.11.0", "pbkdf2 0.8.0", - "salsa20 0.8.1", + "salsa20", "sha2 0.9.9", ] -[[package]] -name = "scrypt" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f9e24d2b632954ded8ab2ef9fea0a0c769ea56ea98bddbafbad22caeeadf45d" -dependencies = [ - "hmac 0.12.1", - "pbkdf2 0.11.0", - "salsa20 0.10.2", - "sha2 0.10.8", -] - [[package]] name = "sct" version = "0.7.1" @@ -8578,30 +8135,16 @@ dependencies = [ "untrusted", ] -[[package]] -name = "sec1" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3be24c1842290c45df0a7bf069e0c268a747ad05a192f2fd7dcfdbc1cba40928" -dependencies = [ - "base16ct 0.1.1", - "der 0.6.1", - "generic-array 0.14.7", - "pkcs8 0.9.0", - "subtle", - "zeroize", -] - [[package]] name = "sec1" version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" dependencies = [ - "base16ct 0.2.0", - "der 0.7.9", - "generic-array 0.14.7", - "pkcs8 0.10.2", + "base16ct", + "der", + "generic-array", + "pkcs8", "serdect", "subtle", "zeroize", @@ -8691,12 +8234,6 @@ dependencies = [ "pest", ] -[[package]] -name = "send_wrapper" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" - [[package]] name = "sensitive_url" version = "0.1.0" @@ -8717,27 +8254,6 @@ dependencies = [ "serde_derive", ] -[[package]] -name = "serde-aux" -version = "4.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5290c39c5f6992b9dddbda28541d965dba46468294e6018a408fa297e6c602de" -dependencies = [ - "serde", - "serde-value", - "serde_json", -] - -[[package]] -name = "serde-value" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c" -dependencies = [ - "ordered-float", - "serde", -] - [[package]] name = "serde_array_query" version = "0.1.0" @@ -8770,14 +8286,15 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.140" +version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" +checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c" dependencies = [ "itoa", "memchr", "ryu", "serde", + "serde_core", ] [[package]] @@ -8809,8 +8326,17 @@ version = "3.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aa66c845eee442168b2c8134fec70ac50dc20e760769c8ba0ad1319ca1959b04" dependencies = [ + "base64 0.22.1", + "chrono", + "hex", + "indexmap 1.9.3", + "indexmap 2.8.0", + "schemars 0.9.0", + "schemars 1.1.0", "serde_core", + "serde_json", "serde_with_macros", + "time", ] [[package]] @@ -8844,7 +8370,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a84f14a19e9a014bb9f4512488d9829a68e04ecabffb0f9904cd1ace94598177" dependencies = [ - "base16ct 0.2.0", + "base16ct", "serde", ] @@ -8859,18 +8385,6 @@ dependencies = [ "digest 0.10.7", ] -[[package]] -name = "sha2" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a256f46ea78a0c0d9ff00077504903ac881a1dafdc20da66545699e7776b3e69" -dependencies = [ - "block-buffer 0.7.3", - "digest 0.8.1", - "fake-simd", - "opaque-debug 0.2.3", -] - [[package]] name = "sha2" version = "0.9.9" @@ -8881,7 +8395,7 @@ dependencies = [ "cfg-if", "cpufeatures", "digest 0.9.0", - "opaque-debug 0.3.1", + "opaque-debug", ] [[package]] @@ -8939,16 +8453,6 @@ dependencies = [ "libc", ] -[[package]] -name = "signature" -version = "1.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" -dependencies = [ - "digest 0.10.7", - "rand_core 0.6.4", -] - [[package]] name = "signature" version = "2.2.0" @@ -8966,7 +8470,7 @@ dependencies = [ "eth2_keystore", "ethereum_serde_utils", "lockfile", - "parking_lot 0.12.3", + "parking_lot", "reqwest 0.11.27", "serde", "task_executor", @@ -9004,7 +8508,7 @@ dependencies = [ "kzg", "logging", "node_test_rig", - "parking_lot 0.12.3", + "parking_lot", "rayon", "sensitive_url", "serde_json", @@ -9037,17 +8541,17 @@ dependencies = [ "libmdbx", "lmdb-rkv", "lmdb-rkv-sys", - "lru", + "lru 0.12.5", "maplit", "metrics", - "parking_lot 0.12.3", + "parking_lot", "rand 0.9.0", "rayon", "redb", "safe_arith", "serde", "ssz_types", - "strum", + "strum 0.24.1", "tempfile", "tracing", "tree_hash", @@ -9096,7 +8600,7 @@ name = "slot_clock" version = "0.2.0" dependencies = [ "metrics", - "parking_lot 0.12.3", + "parking_lot", "types", ] @@ -9149,16 +8653,6 @@ version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" -[[package]] -name = "spki" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67cf02bbac7a337dc36e4f5a693db6c21e7863f45070f7064577eb4367a3212b" -dependencies = [ - "base64ct", - "der 0.6.1", -] - [[package]] name = "spki" version = "0.7.3" @@ -9166,7 +8660,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" dependencies = [ "base64ct", - "der 0.7.9", + "der", ] [[package]] @@ -9251,9 +8745,9 @@ dependencies = [ "itertools 0.10.5", "leveldb", "logging", - "lru", + "lru 0.12.5", "metrics", - "parking_lot 0.12.3", + "parking_lot", "rand 0.9.0", "redb", "safe_arith", @@ -9261,7 +8755,7 @@ dependencies = [ "smallvec", "ssz_types", "state_processing", - "strum", + "strum 0.24.1", "superstruct", "tempfile", "tracing", @@ -9289,7 +8783,16 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" dependencies = [ - "strum_macros", + "strum_macros 0.24.3", +] + +[[package]] +name = "strum" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf" +dependencies = [ + "strum_macros 0.27.2", ] [[package]] @@ -9305,6 +8808,18 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "strum_macros" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7" +dependencies = [ + "heck 0.5.0", + "proc-macro2", + "quote", + "syn 2.0.100", +] + [[package]] name = "subtle" version = "2.6.1" @@ -9459,7 +8974,7 @@ dependencies = [ "lighthouse_network", "metrics", "network_utils", - "parking_lot 0.12.3", + "parking_lot", "serde", "sysinfo", "types", @@ -9740,7 +9255,7 @@ dependencies = [ "bytes", "libc", "mio", - "parking_lot 0.12.3", + "parking_lot", "pin-project-lite", "signal-hook-registry", "socket2", @@ -9999,16 +9514,6 @@ dependencies = [ "valuable", ] -[[package]] -name = "tracing-futures" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" -dependencies = [ - "pin-project", - "tracing", -] - [[package]] name = "tracing-log" version = "0.2.0" @@ -10153,7 +9658,7 @@ dependencies = [ "merkle_proof", "metastruct", "milhouse", - "parking_lot 0.12.3", + "parking_lot", "paste", "rand 0.9.0", "rand_xorshift 0.4.0", @@ -10241,12 +9746,6 @@ dependencies = [ "tinyvec", ] -[[package]] -name = "unicode-segmentation" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" - [[package]] name = "unicode-xid" version = "0.2.6" @@ -10360,7 +9859,7 @@ dependencies = [ "lighthouse_validator_store", "metrics", "monitoring_api", - "parking_lot 0.12.3", + "parking_lot", "reqwest 0.11.27", "sensitive_url", "serde", @@ -10416,7 +9915,7 @@ dependencies = [ "lighthouse_validator_store", "lighthouse_version", "logging", - "parking_lot 0.12.3", + "parking_lot", "rand 0.9.0", "sensitive_url", "serde", @@ -10451,7 +9950,7 @@ dependencies = [ "logging", "malloc_utils", "metrics", - "parking_lot 0.12.3", + "parking_lot", "serde", "slot_clock", "tracing", @@ -10508,7 +10007,7 @@ dependencies = [ "futures", "graffiti_file", "logging", - "parking_lot 0.12.3", + "parking_lot", "safe_arith", "slot_clock", "task_executor", @@ -10739,18 +10238,17 @@ dependencies = [ ] [[package]] -name = "wasm-timer" -version = "0.2.5" +name = "wasmtimer" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be0ecb0db480561e9a7642b5d3e4187c128914e58aa84330b9493e3eb68c5e7f" +checksum = "1c598d6b99ea013e35844697fc4670d08339d5cda15588f193c6beedd12f644b" dependencies = [ "futures", "js-sys", - "parking_lot 0.11.2", + "parking_lot", "pin-utils", + "slab", "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", ] [[package]] @@ -10787,7 +10285,7 @@ dependencies = [ "initialized_validators", "lighthouse_validator_store", "logging", - "parking_lot 0.12.3", + "parking_lot", "reqwest 0.11.27", "serde", "serde_json", @@ -11339,25 +10837,6 @@ version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" -[[package]] -name = "ws_stream_wasm" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7999f5f4217fe3818726b66257a4475f71e74ffd190776ad053fa159e50737f5" -dependencies = [ - "async_io_stream", - "futures", - "js-sys", - "log", - "pharos", - "rustc_version 0.4.1", - "send_wrapper", - "thiserror 1.0.69", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", -] - [[package]] name = "wyz" version = "0.5.1" @@ -11445,7 +10924,7 @@ dependencies = [ "futures", "log", "nohash-hasher", - "parking_lot 0.12.3", + "parking_lot", "pin-project", "rand 0.8.5", "static_assertions", @@ -11460,7 +10939,7 @@ dependencies = [ "futures", "log", "nohash-hasher", - "parking_lot 0.12.3", + "parking_lot", "pin-project", "rand 0.8.5", "static_assertions", diff --git a/Cargo.toml b/Cargo.toml index 03116b3db18..cfb4881abbe 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -147,10 +147,6 @@ ethereum_hashing = "0.7.0" ethereum_serde_utils = "0.8.0" ethereum_ssz = "0.9.0" ethereum_ssz_derive = "0.9.0" -ethers-core = "1" -ethers-middleware = { version = "1", default-features = false } -ethers-providers = { version = "1", default-features = false } -ethers-signers = { version = "1", default-features = false } execution_layer = { path = "beacon_node/execution_layer" } exit-future = "0.2" filesystem = { path = "common/filesystem" } diff --git a/testing/execution_engine_integration/Cargo.toml b/testing/execution_engine_integration/Cargo.toml index eef13cfc738..74bf43e3ae0 100644 --- a/testing/execution_engine_integration/Cargo.toml +++ b/testing/execution_engine_integration/Cargo.toml @@ -7,12 +7,13 @@ edition = { workspace = true } portable = ["types/portable"] [dependencies] +alloy-network = "1.0" +alloy-primitives = { workspace = true } +alloy-provider = "1.0" +alloy-rpc-types-eth = { workspace = true } +alloy-signer-local = "1.0" async-channel = { workspace = true } deposit_contract = { workspace = true } -ethers-core = { workspace = true } -ethers-middleware = { workspace = true } -ethers-providers = { workspace = true } -ethers-signers = { workspace = true } execution_layer = { workspace = true } fork_choice = { workspace = true } futures = { workspace = true } diff --git a/testing/execution_engine_integration/src/execution_engine.rs b/testing/execution_engine_integration/src/execution_engine.rs index ed4ee4682f4..3bb8585e448 100644 --- a/testing/execution_engine_integration/src/execution_engine.rs +++ b/testing/execution_engine_integration/src/execution_engine.rs @@ -1,6 +1,7 @@ -use ethers_providers::{Http, Provider}; +use alloy_provider::ProviderBuilder; use execution_layer::DEFAULT_JWT_FILE; use network_utils::unused_port::unused_tcp4_port; +use reqwest::Url; use sensitive_url::SensitiveUrl; use std::path::PathBuf; use std::process::Child; @@ -34,7 +35,7 @@ pub struct ExecutionEngine { http_port: u16, http_auth_port: u16, child: Child, - pub provider: Provider, + pub provider: Box, } impl Drop for ExecutionEngine { @@ -53,8 +54,9 @@ impl ExecutionEngine { let http_port = unused_tcp4_port().unwrap(); let http_auth_port = unused_tcp4_port().unwrap(); let child = E::start_client(&datadir, http_port, http_auth_port, jwt_secret_path); - let provider = Provider::::try_from(format!("http://localhost:{}", http_port)) - .expect("failed to instantiate ethers provider"); + let provider = Box::new(ProviderBuilder::new().connect_http( + Url::parse(&format!("http://localhost:{}", http_port)).expect("failed to parse URL"), + )); Self { engine, datadir, diff --git a/testing/execution_engine_integration/src/test_rig.rs b/testing/execution_engine_integration/src/test_rig.rs index 9e45a788704..57501c6ee2c 100644 --- a/testing/execution_engine_integration/src/test_rig.rs +++ b/testing/execution_engine_integration/src/test_rig.rs @@ -2,9 +2,10 @@ use crate::execution_engine::{ ACCOUNT1, ACCOUNT2, ExecutionEngine, GenericExecutionEngine, KEYSTORE_PASSWORD, PRIVATE_KEYS, }; use crate::transactions::transactions; -use ethers_middleware::SignerMiddleware; -use ethers_providers::Middleware; -use ethers_signers::LocalWallet; +use alloy_network::{EthereumWallet, TransactionBuilder}; +use alloy_primitives::Address as AlloyAddress; +use alloy_provider::{Provider, ProviderBuilder}; +use alloy_signer_local::PrivateKeySigner; use execution_layer::test_utils::DEFAULT_GAS_LIMIT; use execution_layer::{ BlockProposalContentsType, BuilderParams, ChainHealth, ExecutionLayer, PayloadAttributes, @@ -202,12 +203,13 @@ impl TestRig { self.wait_until_synced().await; // Create a local signer in case we need to sign transactions locally - let wallet1: LocalWallet = PRIVATE_KEYS[0].parse().expect("Invalid private key"); - let signer = SignerMiddleware::new(&self.ee_a.execution_engine.provider, wallet1); + let private_key_signer: PrivateKeySigner = + PRIVATE_KEYS[0].parse().expect("Invalid private key"); + let wallet = EthereumWallet::from(private_key_signer); // We hardcode the accounts here since some EEs start with a default unlocked account - let account1 = ethers_core::types::Address::from_slice(&hex::decode(ACCOUNT1).unwrap()); - let account2 = ethers_core::types::Address::from_slice(&hex::decode(ACCOUNT2).unwrap()); + let account1 = AlloyAddress::from_slice(&hex::decode(ACCOUNT1).unwrap()); + let account2 = AlloyAddress::from_slice(&hex::decode(ACCOUNT2).unwrap()); /* * Read the terminal block hash from both pairs, check it's equal. @@ -237,11 +239,18 @@ impl TestRig { if self.use_local_signing { // Sign locally with the Signer middleware - for (i, tx) in txs.clone().into_iter().enumerate() { + for (i, mut tx) in txs.clone().into_iter().enumerate() { // The local signer uses eth_sendRawTransaction, so we need to manually set the nonce - let mut tx = tx.clone(); - tx.set_nonce(i as u64); - let pending_tx = signer.send_transaction(tx, None).await.unwrap(); + tx = tx.with_nonce(i as u64); + let wallet_provider = ProviderBuilder::new().wallet(wallet.clone()).connect_http( + self.ee_a + .execution_engine + .http_url() + .to_string() + .parse() + .unwrap(), + ); + let pending_tx = wallet_provider.send_transaction(tx).await.unwrap(); pending_txs.push(pending_tx); } } else { @@ -261,7 +270,7 @@ impl TestRig { .ee_a .execution_engine .provider - .send_transaction(tx, None) + .send_transaction(tx) .await .unwrap(); pending_txs.push(pending_tx); @@ -446,11 +455,10 @@ impl TestRig { // Verify that all submitted txs were successful for pending_tx in pending_txs { - let tx_receipt = pending_tx.await.unwrap().unwrap(); - assert_eq!( - tx_receipt.status, - Some(1.into()), - "Tx index {} has invalid status ", + let tx_receipt = pending_tx.get_receipt().await.unwrap(); + assert!( + tx_receipt.status(), + "Tx index {:?} has invalid status ", tx_receipt.transaction_index ); } diff --git a/testing/execution_engine_integration/src/transactions.rs b/testing/execution_engine_integration/src/transactions.rs index b6111426b67..fe36a1bf67f 100644 --- a/testing/execution_engine_integration/src/transactions.rs +++ b/testing/execution_engine_integration/src/transactions.rs @@ -1,8 +1,7 @@ +use alloy_network::TransactionBuilder; +use alloy_primitives::{Address, U256}; +use alloy_rpc_types_eth::{AccessList, TransactionRequest}; use deposit_contract::{BYTECODE, CONTRACT_DEPLOY_GAS, DEPOSIT_GAS, encode_eth1_tx_data}; -use ethers_core::types::{ - Address, Bytes, Eip1559TransactionRequest, TransactionRequest, U256, - transaction::{eip2718::TypedTransaction, eip2930::AccessList}, -}; use types::{DepositData, EthSpec, FixedBytesExtended, Hash256, Keypair, Signature}; /// Hardcoded deposit contract address based on sender address and nonce @@ -21,7 +20,7 @@ pub enum Transaction { } /// Get a list of transactions to publish to the execution layer. -pub fn transactions(account1: Address, account2: Address) -> Vec { +pub fn transactions(account1: Address, account2: Address) -> Vec { vec![ Transaction::Transfer(account1, account2).transaction::(), Transaction::TransferLegacy(account1, account2).transaction::(), @@ -29,7 +28,7 @@ pub fn transactions(account1: Address, account2: Address) -> Vec(), Transaction::DepositDepositContract { sender: account1, - deposit_contract_address: ethers_core::types::Address::from_slice( + deposit_contract_address: Address::from_slice( &hex::decode(DEPOSIT_CONTRACT_ADDRESS).unwrap(), ), } @@ -38,33 +37,36 @@ pub fn transactions(account1: Address, account2: Address) -> Vec(&self) -> TypedTransaction { + pub fn transaction(&self) -> TransactionRequest { match &self { - Self::TransferLegacy(from, to) => TransactionRequest::new() + Self::TransferLegacy(from, to) => TransactionRequest::default() .from(*from) .to(*to) - .value(1) - .into(), - Self::Transfer(from, to) => Eip1559TransactionRequest::new() + .value(U256::from(1)) + .with_gas_price(1_000_000_000u128), // 1 gwei + Self::Transfer(from, to) => TransactionRequest::default() .from(*from) .to(*to) - .value(1) - .into(), - Self::TransferAccessList(from, to) => TransactionRequest::new() + .value(U256::from(1)) + .with_max_fee_per_gas(2_000_000_000u128) + .with_max_priority_fee_per_gas(1_000_000_000u128), + Self::TransferAccessList(from, to) => TransactionRequest::default() .from(*from) .to(*to) - .value(1) + .value(U256::from(1)) .with_access_list(AccessList::default()) - .into(), + .with_gas_price(1_000_000_000u128), // 1 gwei Self::DeployDepositContract(addr) => { let mut bytecode = String::from_utf8(BYTECODE.to_vec()).unwrap(); bytecode.retain(|c| c.is_ascii_hexdigit()); let bytecode = hex::decode(&bytecode[1..]).unwrap(); - TransactionRequest::new() + let mut req = TransactionRequest::default() .from(*addr) - .data(Bytes::from(bytecode)) - .gas(CONTRACT_DEPLOY_GAS) - .into() + .with_input(bytecode) + .with_gas_limit(CONTRACT_DEPLOY_GAS.try_into().unwrap()) + .with_gas_price(1_000_000_000u128); // 1 gwei + req.set_create(); + req } Self::DepositDepositContract { sender, @@ -80,13 +82,13 @@ impl Transaction { signature: Signature::empty().into(), }; deposit.signature = deposit.create_signature(&keypair.sk, &E::default_spec()); - TransactionRequest::new() + TransactionRequest::default() .from(*sender) .to(*deposit_contract_address) - .data(Bytes::from(encode_eth1_tx_data(&deposit).unwrap())) - .gas(DEPOSIT_GAS) - .value(U256::from(amount) * U256::exp10(9)) - .into() + .with_input(encode_eth1_tx_data(&deposit).unwrap()) + .with_gas_limit(DEPOSIT_GAS.try_into().unwrap()) + .value(U256::from(amount) * U256::from(10).pow(U256::from(9))) + .with_gas_price(1_000_000_000u128) // 1 gwei } } } From d54dc685a3cbf03e2d5b5f153b10124c514d51e2 Mon Sep 17 00:00:00 2001 From: chonghe <44791194+chong-he@users.noreply.github.com> Date: Wed, 12 Nov 2025 13:26:20 +0800 Subject: [PATCH 29/74] Add `optimistic_sync` metric (#8059) * #7974 Display `0` if optimistic sync and `1` for normal sync Co-Authored-By: Tan Chee Keong Co-Authored-By: chonghe <44791194+chong-he@users.noreply.github.com> --- beacon_node/client/src/metrics.rs | 7 +++++++ beacon_node/client/src/notifier.rs | 6 +++++- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/beacon_node/client/src/metrics.rs b/beacon_node/client/src/metrics.rs index 605a7346886..6ff3eb6a70f 100644 --- a/beacon_node/client/src/metrics.rs +++ b/beacon_node/client/src/metrics.rs @@ -15,6 +15,13 @@ pub static IS_SYNCED: LazyLock> = LazyLock::new(|| { ) }); +pub static IS_OPTIMISTIC_SYNC: LazyLock> = LazyLock::new(|| { + try_create_int_gauge( + "optimistic_sync", + "Metric to check if the beacon chain is in optimistic sync mode. 0 if synced and 1 if optimistic sync", + ) +}); + pub static NOTIFIER_HEAD_SLOT: LazyLock> = LazyLock::new(|| { try_create_int_gauge( "notifier_head_slot", diff --git a/beacon_node/client/src/notifier.rs b/beacon_node/client/src/notifier.rs index b1cf1bd7f55..52a3b92cb60 100644 --- a/beacon_node/client/src/notifier.rs +++ b/beacon_node/client/src/notifier.rs @@ -369,8 +369,12 @@ pub fn spawn_notifier( let block_hash = match beacon_chain.canonical_head.head_execution_status() { Ok(ExecutionStatus::Irrelevant(_)) => "n/a".to_string(), - Ok(ExecutionStatus::Valid(hash)) => format!("{} (verified)", hash), + Ok(ExecutionStatus::Valid(hash)) => { + metrics::set_gauge(&metrics::IS_OPTIMISTIC_SYNC, 0); + format!("{} (verified)", hash) + } Ok(ExecutionStatus::Optimistic(hash)) => { + metrics::set_gauge(&metrics::IS_OPTIMISTIC_SYNC, 1); warn!( info = "chain not fully verified, \ block and attestation production disabled until execution engine syncs", From b5260db5e68d0b6d0637e5f86d9435503a46d458 Mon Sep 17 00:00:00 2001 From: chonghe <44791194+chong-he@users.noreply.github.com> Date: Wed, 12 Nov 2025 15:01:52 +0800 Subject: [PATCH 30/74] Add extra data in `/eth/v1/debug/fork_choice` (#7845) * #7829 Co-Authored-By: Tan Chee Keong Co-Authored-By: chonghe <44791194+chong-he@users.noreply.github.com> --- beacon_node/http_api/src/lib.rs | 32 ++++++++++++++++++++++++++--- beacon_node/http_api/tests/tests.rs | 26 +++++++++++++++++++++++ common/eth2/src/types.rs | 15 ++++++++++++++ 3 files changed, 70 insertions(+), 3 deletions(-) diff --git a/beacon_node/http_api/src/lib.rs b/beacon_node/http_api/src/lib.rs index e0fb39c42cf..e8fb149bfd0 100644 --- a/beacon_node/http_api/src/lib.rs +++ b/beacon_node/http_api/src/lib.rs @@ -48,9 +48,9 @@ use bytes::Bytes; use directory::DEFAULT_ROOT_DIR; use eth2::types::{ self as api_types, BroadcastValidation, ContextDeserialize, EndpointVersion, ForkChoice, - ForkChoiceNode, LightClientUpdatesQuery, PublishBlockRequest, StateId as CoreStateId, - ValidatorBalancesRequestBody, ValidatorId, ValidatorIdentitiesRequestBody, ValidatorStatus, - ValidatorsRequestBody, + ForkChoiceExtraData, ForkChoiceNode, LightClientUpdatesQuery, PublishBlockRequest, + StateId as CoreStateId, ValidatorBalancesRequestBody, ValidatorId, + ValidatorIdentitiesRequestBody, ValidatorStatus, ValidatorsRequestBody, }; use eth2::{CONSENSUS_VERSION_HEADER, CONTENT_TYPE_HEADER, SSZ_CONTENT_TYPE_HEADER}; use health_metrics::observe::Observe; @@ -3033,6 +3033,32 @@ pub fn serve( .execution_status .block_hash() .map(|block_hash| block_hash.into_root()), + extra_data: ForkChoiceExtraData { + target_root: node.target_root, + justified_root: node.justified_checkpoint.root, + finalized_root: node.finalized_checkpoint.root, + unrealized_justified_root: node + .unrealized_justified_checkpoint + .map(|checkpoint| checkpoint.root), + unrealized_finalized_root: node + .unrealized_finalized_checkpoint + .map(|checkpoint| checkpoint.root), + unrealized_justified_epoch: node + .unrealized_justified_checkpoint + .map(|checkpoint| checkpoint.epoch), + unrealized_finalized_epoch: node + .unrealized_finalized_checkpoint + .map(|checkpoint| checkpoint.epoch), + execution_status: node.execution_status.to_string(), + best_child: node + .best_child + .and_then(|index| proto_array.nodes.get(index)) + .map(|child| child.root), + best_descendant: node + .best_descendant + .and_then(|index| proto_array.nodes.get(index)) + .map(|descendant| descendant.root), + }, } }) .collect::>(); diff --git a/beacon_node/http_api/tests/tests.rs b/beacon_node/http_api/tests/tests.rs index b3486da5ad2..8d99e696cf7 100644 --- a/beacon_node/http_api/tests/tests.rs +++ b/beacon_node/http_api/tests/tests.rs @@ -3088,6 +3088,32 @@ impl ApiTester { .execution_status .block_hash() .map(|block_hash| block_hash.into_root()), + extra_data: ForkChoiceExtraData { + target_root: node.target_root, + justified_root: node.justified_checkpoint.root, + finalized_root: node.finalized_checkpoint.root, + unrealized_justified_root: node + .unrealized_justified_checkpoint + .map(|checkpoint| checkpoint.root), + unrealized_finalized_root: node + .unrealized_finalized_checkpoint + .map(|checkpoint| checkpoint.root), + unrealized_justified_epoch: node + .unrealized_justified_checkpoint + .map(|checkpoint| checkpoint.epoch), + unrealized_finalized_epoch: node + .unrealized_finalized_checkpoint + .map(|checkpoint| checkpoint.epoch), + execution_status: node.execution_status.to_string(), + best_child: node + .best_child + .and_then(|index| expected_proto_array.nodes.get(index)) + .map(|child| child.root), + best_descendant: node + .best_descendant + .and_then(|index| expected_proto_array.nodes.get(index)) + .map(|descendant| descendant.root), + }, } }) .collect(); diff --git a/common/eth2/src/types.rs b/common/eth2/src/types.rs index c3f9c305e00..6aad00301a6 100644 --- a/common/eth2/src/types.rs +++ b/common/eth2/src/types.rs @@ -1520,6 +1520,21 @@ pub struct ForkChoiceNode { pub weight: u64, pub validity: Option, pub execution_block_hash: Option, + pub extra_data: ForkChoiceExtraData, +} + +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)] +pub struct ForkChoiceExtraData { + pub target_root: Hash256, + pub justified_root: Hash256, + pub finalized_root: Hash256, + pub unrealized_justified_root: Option, + pub unrealized_finalized_root: Option, + pub unrealized_justified_epoch: Option, + pub unrealized_finalized_epoch: Option, + pub execution_status: String, + pub best_child: Option, + pub best_descendant: Option, } #[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)] From e2823636691442413776feaa223aca5a434333d9 Mon Sep 17 00:00:00 2001 From: Michael Sproul Date: Mon, 17 Nov 2025 12:27:57 +1100 Subject: [PATCH 31/74] Gracefully handle deleting states prior to anchor_slot (#8409) Fix an issue detected by @jimmygchen that occurs when checkpoint sync is aborted midway and then later restarted. The characteristic error is something like: > Nov 13 00:51:35.832 ERROR Database write failed error: Hdiff(LessThanStart(Slot(1728288), Slot(1728320))), action: "reverting blob DB changes" Nov 13 00:51:35.833 WARN Hot DB pruning failed error: DBError(HotColdDBError(Rollback)) This issue has existed since v7.1.0. Delete snapshot/diff in the case where `hot_storage_strategy` fails. Co-Authored-By: Michael Sproul --- beacon_node/store/src/hot_cold_store.rs | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/beacon_node/store/src/hot_cold_store.rs b/beacon_node/store/src/hot_cold_store.rs index a0a75dbb0d4..c27bf195d5c 100644 --- a/beacon_node/store/src/hot_cold_store.rs +++ b/beacon_node/store/src/hot_cold_store.rs @@ -1308,8 +1308,13 @@ impl, Cold: ItemStore> HotColdDB state_root.as_slice().to_vec(), )); - if let Some(slot) = slot { - match self.hot_storage_strategy(slot)? { + // NOTE: `hot_storage_strategy` can error if there are states in the database + // prior to the `anchor_slot`. This can happen if checkpoint sync has been + // botched and left some states in the database prior to completing. + if let Some(slot) = slot + && let Ok(strategy) = self.hot_storage_strategy(slot) + { + match strategy { StorageStrategy::Snapshot => { // Full state stored in this position key_value_batch.push(KeyValueStoreOp::DeleteKey( From d59e340d3b61a31c326d9e0c28a0c6d65d58dc1a Mon Sep 17 00:00:00 2001 From: Jimmy Chen Date: Wed, 19 Nov 2025 11:51:41 +1100 Subject: [PATCH 32/74] Add nightly tests workflow to test prior forks (#8319) Once #8271 is merged, CI will only cover tests for `RECENT_FORKS` (prev, current, next) To make sure functionalities aren't broken for prior forks, we run tests for these forks nightly. They can also be manually triggered. Tested via manual trigger here: https://github.com/jimmygchen/lighthouse/actions/runs/18896690117 image Co-Authored-By: Jimmy Chen Co-Authored-By: Jimmy Chen --- .github/workflows/nightly-tests.yml | 135 ++++++++++++++++++++++++++++ 1 file changed, 135 insertions(+) create mode 100644 .github/workflows/nightly-tests.yml diff --git a/.github/workflows/nightly-tests.yml b/.github/workflows/nightly-tests.yml new file mode 100644 index 00000000000..be52c5b84d3 --- /dev/null +++ b/.github/workflows/nightly-tests.yml @@ -0,0 +1,135 @@ +# We only run tests on `RECENT_FORKS` on CI. To make sure we don't break prior forks, we run nightly tests to cover all prior forks. +name: nightly-tests + +on: + schedule: + # Run at 8:30 AM UTC every day + - cron: '30 8 * * *' + workflow_dispatch: # Allow manual triggering + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +env: + # Deny warnings in CI + # Disable debug info (see https://github.com/sigp/lighthouse/issues/4005) + RUSTFLAGS: "-D warnings -C debuginfo=0" + # Prevent Github API rate limiting. + LIGHTHOUSE_GITHUB_TOKEN: ${{ secrets.LIGHTHOUSE_GITHUB_TOKEN }} + # Disable incremental compilation + CARGO_INCREMENTAL: 0 + # Enable portable to prevent issues with caching `blst` for the wrong CPU type + TEST_FEATURES: portable + +jobs: + setup-matrix: + name: setup-matrix + runs-on: ubuntu-latest + outputs: + forks: ${{ steps.set-matrix.outputs.forks }} + steps: + - name: Set matrix + id: set-matrix + run: | + # All prior forks to cover in nightly tests. This list should be updated when we remove a fork from `RECENT_FORKS`. + echo 'forks=["phase0", "altair", "bellatrix", "capella", "deneb"]' >> $GITHUB_OUTPUT + + beacon-chain-tests: + name: beacon-chain-tests + needs: setup-matrix + runs-on: 'ubuntu-latest' + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + strategy: + matrix: + fork: ${{ fromJson(needs.setup-matrix.outputs.forks) }} + fail-fast: false + steps: + - uses: actions/checkout@v5 + - name: Get latest version of stable Rust + uses: moonrepo/setup-rust@v1 + with: + channel: stable + cache-target: release + bins: cargo-nextest + - name: Run beacon_chain tests for ${{ matrix.fork }} + run: make test-beacon-chain-${{ matrix.fork }} + timeout-minutes: 60 + + http-api-tests: + name: http-api-tests + needs: setup-matrix + runs-on: 'ubuntu-latest' + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + strategy: + matrix: + fork: ${{ fromJson(needs.setup-matrix.outputs.forks) }} + fail-fast: false + steps: + - uses: actions/checkout@v5 + - name: Get latest version of stable Rust + uses: moonrepo/setup-rust@v1 + with: + channel: stable + cache-target: release + bins: cargo-nextest + - name: Run http_api tests for ${{ matrix.fork }} + run: make test-http-api-${{ matrix.fork }} + timeout-minutes: 60 + + op-pool-tests: + name: op-pool-tests + needs: setup-matrix + runs-on: ubuntu-latest + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + strategy: + matrix: + fork: ${{ fromJson(needs.setup-matrix.outputs.forks) }} + fail-fast: false + steps: + - uses: actions/checkout@v5 + - name: Get latest version of stable Rust + uses: moonrepo/setup-rust@v1 + with: + channel: stable + cache-target: release + bins: cargo-nextest + - name: Run operation_pool tests for ${{ matrix.fork }} + run: make test-op-pool-${{ matrix.fork }} + timeout-minutes: 60 + + network-tests: + name: network-tests + needs: setup-matrix + runs-on: ubuntu-latest + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + strategy: + matrix: + fork: ${{ fromJson(needs.setup-matrix.outputs.forks) }} + fail-fast: false + steps: + - uses: actions/checkout@v5 + - name: Get latest version of stable Rust + uses: moonrepo/setup-rust@v1 + with: + channel: stable + cache-target: release + bins: cargo-nextest + - name: Create CI logger dir + run: mkdir ${{ runner.temp }}/network_test_logs + - name: Run network tests for ${{ matrix.fork }} + run: make test-network-${{ matrix.fork }} + timeout-minutes: 60 + env: + TEST_FEATURES: portable + CI_LOGGER_DIR: ${{ runner.temp }}/network_test_logs + - name: Upload logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: network_test_logs_${{ matrix.fork }} + path: ${{ runner.temp }}/network_test_logs From 2ba8a8e6aebdfccef520ce49ec7703643961cbe8 Mon Sep 17 00:00:00 2001 From: Age Manning Date: Mon, 24 Nov 2025 16:20:01 +1100 Subject: [PATCH 33/74] Cargo Update (#8443) Co-Authored-By: Age Manning --- Cargo.lock | 2073 ++++++++--------- .../lighthouse_network/src/service/utils.rs | 2 +- .../lighthouse_network/tests/common.rs | 2 +- validator_client/graffiti_file/src/lib.rs | 2 +- 4 files changed, 1020 insertions(+), 1059 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8a2b9faed39..0f3f57973eb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -37,7 +37,7 @@ dependencies = [ "eth2_keystore", "eth2_wallet", "filesystem", - "rand 0.9.0", + "rand 0.9.2", "regex", "rpassword", "serde", @@ -48,20 +48,11 @@ dependencies = [ "zeroize", ] -[[package]] -name = "addr2line" -version = "0.24.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" -dependencies = [ - "gimli", -] - [[package]] name = "adler2" -version = "2.0.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" [[package]] name = "aead" @@ -113,21 +104,21 @@ dependencies = [ [[package]] name = "ahash" -version = "0.8.11" +version = "0.8.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" +checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75" dependencies = [ "cfg-if", "once_cell", "version_check", - "zerocopy 0.7.35", + "zerocopy", ] [[package]] name = "aho-corasick" -version = "1.1.3" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301" dependencies = [ "memchr", ] @@ -140,9 +131,9 @@ checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] name = "alloy-chains" -version = "0.2.17" +version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6068f356948cd84b5ad9ac30c50478e433847f14a50714d2b68f15d052724049" +checksum = "4bc32535569185cbcb6ad5fa64d989a47bccb9a08e27284b1f2a3ccf16e6d010" dependencies = [ "alloy-primitives", "num_enum", @@ -172,7 +163,7 @@ dependencies = [ "serde", "serde_json", "serde_with", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -215,37 +206,39 @@ dependencies = [ "alloy-rlp", "crc", "serde", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] name = "alloy-eip2930" -version = "0.2.0" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbe3e16484669964c26ac48390245d84c410b1a5f968976076c17184725ef235" +checksum = "9441120fa82df73e8959ae0e4ab8ade03de2aaae61be313fbf5746277847ce25" dependencies = [ "alloy-primitives", "alloy-rlp", + "borsh", "serde", ] [[package]] name = "alloy-eip7702" -version = "0.6.1" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d4769c6ffddca380b0070d71c8b7f30bed375543fe76bb2f74ec0acf4b7cd16" +checksum = "2919c5a56a1007492da313e7a3b6d45ef5edc5d33416fdec63c0d7a2702a0d20" dependencies = [ "alloy-primitives", "alloy-rlp", + "borsh", "serde", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] name = "alloy-eips" -version = "1.0.42" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07d9a64522a0db6ebcc4ff9c904e329e77dd737c2c25d30f1bdc32ca6c6ce334" +checksum = "7e867b5fd52ed0372a95016f3a37cbff95a9d5409230fbaef2d8ea00e8618098" dependencies = [ "alloy-eip2124", "alloy-eip2930", @@ -254,13 +247,14 @@ dependencies = [ "alloy-rlp", "alloy-serde", "auto_impl", + "borsh", "c-kzg", "derive_more 2.0.1", "either", "serde", "serde_with", - "sha2 0.10.8", - "thiserror 2.0.12", + "sha2 0.10.9", + "thiserror 2.0.17", ] [[package]] @@ -277,16 +271,16 @@ dependencies = [ [[package]] name = "alloy-json-rpc" -version = "1.1.0" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31b67c5a702121e618217f7a86f314918acb2622276d0273490e2d4534490bc0" +checksum = "dcab4c51fb1273e3b0f59078e0cdf8aa99f697925b09f0d2055c18be46b4d48c" dependencies = [ "alloy-primitives", "alloy-sol-types", - "http 1.3.0", + "http 1.3.1", "serde", "serde_json", - "thiserror 2.0.12", + "thiserror 2.0.17", "tracing", ] @@ -313,7 +307,7 @@ dependencies = [ "futures-utils-wasm", "serde", "serde_json", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -342,16 +336,16 @@ dependencies = [ "const-hex", "derive_more 2.0.1", "foldhash 0.2.0", - "getrandom 0.3.1", + "getrandom 0.3.4", "hashbrown 0.16.0", - "indexmap 2.8.0", + "indexmap 2.12.0", "itoa", "k256", "keccak-asm", "paste", "proptest", "proptest-derive", - "rand 0.9.0", + "rand 0.9.2", "ruint", "rustc-hash 2.1.1", "serde", @@ -388,10 +382,10 @@ dependencies = [ "lru 0.13.0", "parking_lot", "pin-project", - "reqwest 0.12.15", + "reqwest 0.12.24", "serde", "serde_json", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "tracing", "url", @@ -417,14 +411,14 @@ checksum = "64b728d511962dda67c1bc7ea7c03736ec275ed2cf4c35d9585298ac9ccf3b73" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] name = "alloy-rpc-client" -version = "1.1.0" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45f802228273056528dfd6cc8845cc91a7c7e0c6fc1a66d19e8673743dacdc7e" +checksum = "e7c2630fde9ff6033a780635e1af6ef40e92d74a9cacb8af3defc1b15cfebca5" dependencies = [ "alloy-json-rpc", "alloy-primitives", @@ -432,7 +426,7 @@ dependencies = [ "alloy-transport-http", "futures", "pin-project", - "reqwest 0.12.15", + "reqwest 0.12.24", "serde", "serde_json", "tokio", @@ -472,14 +466,14 @@ dependencies = [ "serde", "serde_json", "serde_with", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] name = "alloy-serde" -version = "1.0.42" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "596cfa360922ba9af901cc7370c68640e4f72adb6df0ab064de32f21fec498d7" +checksum = "01e856112bfa0d9adc85bd7c13db03fad0e71d1d6fb4c2010e475b6718108236" dependencies = [ "alloy-primitives", "serde", @@ -488,9 +482,9 @@ dependencies = [ [[package]] name = "alloy-signer" -version = "1.1.0" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cf0b42ffbf558badfecf1dde0c3c5ed91f29bb7e97876d0bed008c3d5d67171" +checksum = "66a4f629da632d5279bbc5731634f0f5c9484ad9c4cad0cd974d9669dc1f46d6" dependencies = [ "alloy-primitives", "async-trait", @@ -498,7 +492,7 @@ dependencies = [ "either", "elliptic-curve", "k256", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -514,7 +508,7 @@ dependencies = [ "async-trait", "k256", "rand 0.8.5", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -528,7 +522,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -540,11 +534,11 @@ dependencies = [ "alloy-sol-macro-input", "const-hex", "heck 0.5.0", - "indexmap 2.8.0", + "indexmap 2.12.0", "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", "syn-solidity", "tiny-keccak", ] @@ -561,7 +555,7 @@ dependencies = [ "macro-string", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", "syn-solidity", ] @@ -589,9 +583,9 @@ dependencies = [ [[package]] name = "alloy-transport" -version = "1.1.0" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71b3deee699d6f271eab587624a9fa84d02d0755db7a95a043d52a6488d16ebe" +checksum = "fe215a2f9b51d5f1aa5c8cf22c8be8cdb354934de09c9a4e37aefb79b77552fd" dependencies = [ "alloy-json-rpc", "auto_impl", @@ -602,7 +596,7 @@ dependencies = [ "parking_lot", "serde", "serde_json", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "tower 0.5.2", "tracing", @@ -612,13 +606,13 @@ dependencies = [ [[package]] name = "alloy-transport-http" -version = "1.1.0" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1720bd2ba8fe7e65138aca43bb0f680e4e0bcbd3ca39bf9d3035c9d7d2757f24" +checksum = "dc1b37b1a30d23deb3a8746e882c70b384c574d355bc2bbea9ea918b0c31366e" dependencies = [ "alloy-json-rpc", "alloy-transport", - "reqwest 0.12.15", + "reqwest 0.12.24", "serde_json", "tower 0.5.2", "tracing", @@ -643,22 +637,16 @@ dependencies = [ [[package]] name = "alloy-tx-macros" -version = "1.0.42" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab54221eccefa254ce9f65b079c097b1796e48c21c7ce358230f8988d75392fb" +checksum = "7ccf423f6de62e8ce1d6c7a11fb7508ae3536d02e0d68aaeb05c8669337d0937" dependencies = [ "darling 0.21.3", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] -[[package]] -name = "android-tzdata" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" - [[package]] name = "android_system_properties" version = "0.1.5" @@ -676,9 +664,9 @@ checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" [[package]] name = "anstream" -version = "0.6.18" +version = "0.6.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" +checksum = "43d5b281e737544384e969a5ccad3f1cdd24b48086a0fc1b2a5262a26b8f4f4a" dependencies = [ "anstyle", "anstyle-parse", @@ -691,50 +679,50 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.10" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" +checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78" [[package]] name = "anstyle-parse" -version = "0.2.6" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" +checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.1.2" +version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" +checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] name = "anstyle-wincon" -version = "3.0.7" +version = "3.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3534e77181a9cc07539ad51f2141fe32f6c3ffd4df76db8ad92346b003ae4e" +checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d" dependencies = [ "anstyle", - "once_cell", - "windows-sys 0.59.0", + "once_cell_polyfill", + "windows-sys 0.61.2", ] [[package]] name = "anyhow" -version = "1.0.97" +version = "1.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcfed56ad506cb2c684a14971b8861fdc3baaaae314b9e5f9bb532cbe3ba7a4f" +checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" [[package]] name = "arbitrary" -version = "1.4.1" +version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dde20b3d026af13f561bdd0f15edf01fc734f0dafcedbaf42bba506a9517f223" +checksum = "c3d036a3c4ab069c7b410a2ce876bd74808d2d0888a82667669f8e783a898bf1" dependencies = [ "derive_arbitrary", ] @@ -839,7 +827,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62945a2f7e6de02a31fe400aa489f0e0f5b2502e69f95f853adb82a96c7a6b60" dependencies = [ "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -877,7 +865,7 @@ dependencies = [ "num-traits", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -976,7 +964,7 @@ dependencies = [ "nom", "num-traits", "rusticata-macros", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", ] @@ -988,7 +976,7 @@ checksum = "3109e49b1e4909e9db6515a30c633684d68cdeaa252f215214cb4fa1a5bfee2c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", "synstructure", ] @@ -1000,7 +988,7 @@ checksum = "7b18050c2cd6fe86c3a76584ef5e0baf286d038cda203eb6223df2cc413565f7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -1032,9 +1020,9 @@ dependencies = [ [[package]] name = "async-channel" -version = "2.3.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a" +checksum = "924ed96dd52d1b75e9c1a3e6275715fd320f5f9439fb5a4a11fa51f4221158d2" dependencies = [ "concurrent-queue", "event-listener-strategy", @@ -1044,32 +1032,20 @@ dependencies = [ [[package]] name = "async-io" -version = "2.4.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43a2b323ccce0a1d90b449fd71f2a06ca7faa7c54c2751f06c9bd851fc061059" +checksum = "456b8a8feb6f42d237746d4b3e9a178494627745c3c56c6ea55d92ba50d026fc" dependencies = [ - "async-lock", + "autocfg", "cfg-if", "concurrent-queue", "futures-io", "futures-lite", "parking", "polling", - "rustix 0.38.44", + "rustix 1.1.2", "slab", - "tracing", - "windows-sys 0.59.0", -] - -[[package]] -name = "async-lock" -version = "3.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff6e472cdea888a4bd64f342f09b3f50e1886d32afe8df3d663c01140b811b18" -dependencies = [ - "event-listener 5.4.0", - "event-listener-strategy", - "pin-project-lite", + "windows-sys 0.61.2", ] [[package]] @@ -1091,18 +1067,18 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] name = "async-trait" -version = "0.1.87" +version = "0.1.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d556ec1359574147ec0c4fc5eb525f3f23263a592b1a9c07e0a75b427de55c97" +checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -1131,27 +1107,27 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16e2cdb6d5ed835199484bb92bb8b3edd526effe995c61732580439c1a67e2e9" dependencies = [ "base64 0.22.1", - "http 1.3.0", + "http 1.3.1", "log", "url", ] [[package]] name = "auto_impl" -version = "1.2.1" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e12882f59de5360c748c4cbf569a042d5fb0eb515f7bea9c1f470b47f6ffbd73" +checksum = "ffdcb70bdbc4d478427380519163274ac86e52916e10f0a8889adf0f96d3fee7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] name = "autocfg" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" [[package]] name = "axum" @@ -1163,7 +1139,7 @@ dependencies = [ "axum-core", "bytes", "futures-util", - "http 1.3.0", + "http 1.3.1", "http-body 1.0.1", "http-body-util", "itoa", @@ -1189,7 +1165,7 @@ dependencies = [ "async-trait", "bytes", "futures-util", - "http 1.3.0", + "http 1.3.1", "http-body 1.0.1", "http-body-util", "mime", @@ -1200,21 +1176,6 @@ dependencies = [ "tower-service", ] -[[package]] -name = "backtrace" -version = "0.3.74" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" -dependencies = [ - "addr2line", - "cfg-if", - "libc", - "miniz_oxide", - "object", - "rustc-demangle", - "windows-targets 0.52.6", -] - [[package]] name = "base-x" version = "0.2.11" @@ -1227,6 +1188,16 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" +[[package]] +name = "base256emoji" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5e9430d9a245a77c92176e649af6e275f20839a48389859d1661e9a128d077c" +dependencies = [ + "const-str", + "match-lookup", +] + [[package]] name = "base64" version = "0.13.1" @@ -1247,9 +1218,9 @@ checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "base64ct" -version = "1.7.1" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb97d56060ee67d285efb8001fec9d2a4c710c32efd2e14b5cbb5ba71930fc2d" +checksum = "55248b47b0caf0546f7988906588779981c43bb1bc9d0c44087278f80cdb44ba" [[package]] name = "beacon_chain" @@ -1288,7 +1259,7 @@ dependencies = [ "operation_pool", "parking_lot", "proto_array", - "rand 0.9.0", + "rand 0.9.2", "rayon", "safe_arith", "sensitive_url", @@ -1330,7 +1301,7 @@ dependencies = [ "genesis", "hex", "http_api", - "hyper 1.6.0", + "hyper 1.8.1", "lighthouse_network", "monitoring_api", "network_utils", @@ -1402,7 +1373,7 @@ version = "0.69.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "271383c67ccabffb7381723dea0672a673f292304fcb45c01cc648c7a8d58088" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.10.0", "cexpr", "clang-sys", "itertools 0.12.1", @@ -1415,7 +1386,7 @@ dependencies = [ "regex", "rustc-hash 1.1.0", "shlex", - "syn 2.0.100", + "syn 2.0.110", "which", ] @@ -1458,9 +1429,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.9.0" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd" +checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" [[package]] name = "bitvec" @@ -1501,6 +1472,15 @@ dependencies = [ "generic-array", ] +[[package]] +name = "block2" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdeb9d870516001442e364c5220d3574d2da8dc765554b4a617230d33fa58ef5" +dependencies = [ + "objc2", +] + [[package]] name = "bls" version = "0.2.0" @@ -1513,7 +1493,7 @@ dependencies = [ "ethereum_ssz", "fixed_bytes", "hex", - "rand 0.9.0", + "rand 0.9.2", "safe_arith", "serde", "tree_hash", @@ -1570,6 +1550,29 @@ dependencies = [ "types", ] +[[package]] +name = "borsh" +version = "1.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad8646f98db542e39fc66e68a20b2144f6a732636df7c2354e74645faaa433ce" +dependencies = [ + "borsh-derive", + "cfg_aliases", +] + +[[package]] +name = "borsh-derive" +version = "1.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdd1d3c0c2f5833f22386f252fe8ed005c7f59fdcddeef025c01b4c3b9fd9ac3" +dependencies = [ + "once_cell", + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 2.0.110", +] + [[package]] name = "bs58" version = "0.4.0" @@ -1602,9 +1605,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.17.0" +version = "3.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" +checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" [[package]] name = "byte-slice-cast" @@ -1620,9 +1623,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.10.1" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" +checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3" dependencies = [ "serde", ] @@ -1664,11 +1667,11 @@ dependencies = [ [[package]] name = "camino" -version = "1.1.9" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3" +checksum = "276a59bf2b2c967788139340c9f0c5b12d7fd6630315c15c217e559de85d2609" dependencies = [ - "serde", + "serde_core", ] [[package]] @@ -1688,10 +1691,10 @@ checksum = "dd5eb614ed4c27c5d706420e4320fbe3216ab31fa1c33cd8246ac36dae4479ba" dependencies = [ "camino", "cargo-platform", - "semver 1.0.26", + "semver 1.0.27", "serde", "serde_json", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -1702,10 +1705,11 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" -version = "1.2.16" +version = "1.2.46" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be714c154be609ec7f5dad223a33bf1482fff90472de28f7362806e6d4832b8c" +checksum = "b97463e1064cb1b1c1384ad0a0b9c8abd0988e2a91f52606c80ef14aadb63e36" dependencies = [ + "find-msvc-tools", "jobserver", "libc", "shlex", @@ -1722,9 +1726,9 @@ dependencies = [ [[package]] name = "cfg-if" -version = "1.0.0" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" [[package]] name = "cfg_aliases" @@ -1758,11 +1762,10 @@ dependencies = [ [[package]] name = "chrono" -version = "0.4.40" +version = "0.4.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a7964611d71df112cb1730f2ee67324fcf4d0fc6606acbbe9bfe06df124637c" +checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2" dependencies = [ - "android-tzdata", "iana-time-zone", "js-sys", "num-traits", @@ -1831,9 +1834,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.32" +version = "4.5.53" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6088f3ae8c3608d19260cd7445411865a485688711b78b5be70d78cd96136f83" +checksum = "c9e340e012a1bf4935f5282ed1436d1489548e8f72308207ea5df0e23d2d03f8" dependencies = [ "clap_builder", "clap_derive", @@ -1841,9 +1844,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.32" +version = "4.5.53" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22a7ef7f676155edfb82daa97f99441f3ebf4a58d5e32f295a56259f1b6facc8" +checksum = "d76b5d13eaa18c901fd2f7fca939fefe3a0727a953561fefdf3b2922b8569d00" dependencies = [ "anstream", "anstyle", @@ -1854,21 +1857,21 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.32" +version = "4.5.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09176aae279615badda0765c0c0b3f6ed53f4709118af73cf4655d85d1530cd7" +checksum = "2a0b5487afeab2deb2ff4e03a807ad1a03ac532ff5a2cee5d86884440c7f7671" dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] name = "clap_lex" -version = "0.7.4" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" +checksum = "a1d728cc89cf3aee9ff92b05e62b19ee65a02b5702cff7d5a377e32c6ae29d8d" [[package]] name = "clap_utils" @@ -1910,7 +1913,7 @@ dependencies = [ "monitoring_api", "network", "operation_pool", - "rand 0.9.0", + "rand 0.9.2", "sensitive_url", "serde", "serde_json", @@ -1940,9 +1943,9 @@ dependencies = [ [[package]] name = "colorchoice" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" +checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" [[package]] name = "colored" @@ -2023,15 +2026,14 @@ dependencies = [ [[package]] name = "const-hex" -version = "1.14.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b0485bab839b018a8f1723fc5391819fea5f8f0f32288ef8a735fd096b6160c" +checksum = "3bb320cac8a0750d7f25280aa97b09c26edfe161164238ecbbb31092b079e735" dependencies = [ "cfg-if", "cpufeatures", - "hex", "proptest", - "serde", + "serde_core", ] [[package]] @@ -2040,11 +2042,17 @@ version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" +[[package]] +name = "const-str" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f421161cb492475f1661ddc9815a745a1c894592070661180fdec3d4872e9c3" + [[package]] name = "const_format" -version = "0.2.34" +version = "0.2.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "126f97965c8ad46d6d9163268ff28432e8f6a1196a55578867832e3049df63dd" +checksum = "7faa7469a93a566e9ccc1c73fe783b4a65c274c5ace346038dca9c39fe0030ad" dependencies = [ "const_format_proc_macros", ] @@ -2139,9 +2147,9 @@ dependencies = [ [[package]] name = "crc" -version = "3.2.1" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69e6e4d7b33a94f0991c26729976b10ebde1d34c3ee82408fb536164fa10d636" +checksum = "9710d3b3739c2e349eb44fe848ad0b7c8cb1e42bd87ee49371df2f7acaf3e675" dependencies = [ "crc-catalog", ] @@ -2154,9 +2162,9 @@ checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" [[package]] name = "crc32fast" -version = "1.4.2" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" dependencies = [ "cfg-if", ] @@ -2239,9 +2247,9 @@ checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" [[package]] name = "crunchy" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43da5946c66ffcc7745f48db692ffbb10a83bfe0afd96235c5c2a4fb23994929" +checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" [[package]] name = "crypto-bigint" @@ -2257,9 +2265,9 @@ dependencies = [ [[package]] name = "crypto-common" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a" dependencies = [ "generic-array", "rand_core 0.6.4", @@ -2296,12 +2304,13 @@ dependencies = [ [[package]] name = "ctrlc" -version = "3.4.5" +version = "3.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90eeab0aa92f3f9b4e87f258c72b139c207d251f9cbc1080a0086b86a8870dd3" +checksum = "73736a89c4aff73035ba2ed2e565061954da00d4970fc9ac25dcc85a2a20d790" dependencies = [ - "nix 0.29.0", - "windows-sys 0.59.0", + "dispatch2", + "nix 0.30.1", + "windows-sys 0.61.2", ] [[package]] @@ -2328,7 +2337,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -2343,12 +2352,12 @@ dependencies = [ [[package]] name = "darling" -version = "0.20.10" +version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" +checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" dependencies = [ - "darling_core 0.20.10", - "darling_macro 0.20.10", + "darling_core 0.20.11", + "darling_macro 0.20.11", ] [[package]] @@ -2377,16 +2386,16 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.20.10" +version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" +checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e" dependencies = [ "fnv", "ident_case", "proc-macro2", "quote", "strsim 0.11.1", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -2401,7 +2410,7 @@ dependencies = [ "quote", "serde", "strsim 0.11.1", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -2417,13 +2426,13 @@ dependencies = [ [[package]] name = "darling_macro" -version = "0.20.10" +version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" +checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" dependencies = [ - "darling_core 0.20.10", + "darling_core 0.20.11", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -2434,7 +2443,7 @@ checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81" dependencies = [ "darling_core 0.21.3", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -2473,15 +2482,15 @@ dependencies = [ [[package]] name = "data-encoding" -version = "2.8.0" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "575f75dfd25738df5b91b8e43e14d44bda14637a58fae779fd2b064f8bf3e010" +checksum = "2a2330da5de22e8a3cb63252ce2abb30116bf5265e89c0e01bc17015ce30a476" [[package]] name = "data-encoding-macro" -version = "0.1.17" +version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f9724adfcf41f45bf652b3995837669d73c4d49a1b5ac1ff82905ac7d9b5558" +checksum = "47ce6c96ea0102f01122a185683611bd5ac8d99e62bc59dd12e6bda344ee673d" dependencies = [ "data-encoding", "data-encoding-macro-internal", @@ -2489,12 +2498,12 @@ dependencies = [ [[package]] name = "data-encoding-macro-internal" -version = "0.1.15" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18e4fdb82bd54a12e42fb58a800dcae6b9e13982238ce2296dc3570b92148e1f" +checksum = "8d162beedaa69905488a8da94f5ac3edb4dd4788b732fadb7bd120b2625c1976" dependencies = [ "data-encoding", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -2522,9 +2531,9 @@ checksum = "b72465f46d518f6015d9cf07f7f3013a95dd6b9c2747c3d65ae0cce43929d14f" [[package]] name = "delay_map" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df941644b671f05f59433e481ba0d31ac10e3667de725236a4c0d587c496fba1" +checksum = "88e365f083a5cb5972d50ce8b1b2c9f125dc5ec0f50c0248cfb568ae59efcf0b" dependencies = [ "futures", "tokio", @@ -2549,9 +2558,9 @@ dependencies = [ [[package]] name = "der" -version = "0.7.9" +version = "0.7.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f55bf8e7b65898637379c1b74eb1551107c8294ed26d855ceb9fd1a09cfc9bc0" +checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb" dependencies = [ "const-oid", "zeroize", @@ -2573,12 +2582,12 @@ dependencies = [ [[package]] name = "deranged" -version = "0.3.11" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +checksum = "ececcb659e7ba858fb4f10388c250a7252eb0a27373f1a72b8748afdd248e587" dependencies = [ "powerfmt", - "serde", + "serde_core", ] [[package]] @@ -2594,26 +2603,26 @@ dependencies = [ [[package]] name = "derive_arbitrary" -version = "1.4.1" +version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30542c1ad912e0e3d22a1935c290e12e8a29d704a420177a31faad4a601a0800" +checksum = "1e567bd82dcff979e4b03460c307b3cdc9e96fde3d73bed1496d2bc75d9dd62a" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] name = "derive_more" -version = "0.99.19" +version = "0.99.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3da29a38df43d6f156149c9b43ded5e018ddff2a855cf2cfd62e8cd7d079c69f" +checksum = "6edb4b64a43d977b8e99788fe3a04d483834fba1215a7e02caa415b626497f7f" dependencies = [ "convert_case", "proc-macro2", "quote", "rustc_version 0.4.1", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -2633,7 +2642,7 @@ checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", "unicode-xid", ] @@ -2689,9 +2698,9 @@ dependencies = [ [[package]] name = "discv5" -version = "0.10.1" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a20b702c8491b3325866a4935d0b5101e49144d74540384243b6293794aad6fa" +checksum = "f170f4f6ed0e1df52bf43b403899f0081917ecf1500bfe312505cc3b515a8899" dependencies = [ "aes 0.8.4", "aes-gcm", @@ -2713,13 +2722,25 @@ dependencies = [ "parking_lot", "rand 0.8.5", "smallvec", - "socket2", + "socket2 0.5.10", "tokio", "tracing", "uint 0.10.0", "zeroize", ] +[[package]] +name = "dispatch2" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89a09f22a6c6069a18470eb92d2298acf25463f14256d24778e1230d789a2aec" +dependencies = [ + "bitflags 2.10.0", + "block2", + "libc", + "objc2", +] + [[package]] name = "displaydoc" version = "0.2.5" @@ -2728,7 +2749,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -2800,15 +2821,15 @@ dependencies = [ [[package]] name = "ed25519-dalek" -version = "2.1.1" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a3daa8e81a3963a60642bcc1f90a670680bd4a77535faa384e9d1c79d620871" +checksum = "70e796c081cee67dc755e1a36a0a172b897fab85fc3f6bc48307991f64e4eca9" dependencies = [ "curve25519-dalek", "ed25519", "rand_core 0.6.4", "serde", - "sha2 0.10.8", + "sha2 0.10.9", "subtle", "zeroize", ] @@ -2822,7 +2843,7 @@ dependencies = [ "enum-ordinalize", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -2873,7 +2894,7 @@ dependencies = [ "itertools 0.14.0", "serde", "serde_json", - "sha2 0.10.8", + "sha2 0.10.9", ] [[package]] @@ -2936,7 +2957,7 @@ dependencies = [ "ekzg-bls12-381", "ekzg-maybe-rayon", "ekzg-polynomial", - "sha2 0.10.8", + "sha2 0.10.9", ] [[package]] @@ -3040,27 +3061,27 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] name = "enum-ordinalize" -version = "4.3.0" +version = "4.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fea0dcfa4e54eeb516fe454635a95753ddd39acda650ce703031c6973e315dd5" +checksum = "4a1091a7bb1f8f2c4b28f1fe2cef4980ca2d410a3d727d67ecc3178c9b0800f0" dependencies = [ "enum-ordinalize-derive", ] [[package]] name = "enum-ordinalize-derive" -version = "4.3.1" +version = "4.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d28318a75d4aead5c4db25382e8ef717932d0346600cacae6357eb5941bc5ff" +checksum = "8ca9601fb2d62598ee17836250842873a413586e5d7ed88b356e38ddbb0ec631" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -3103,12 +3124,12 @@ checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "errno" -version = "0.3.10" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -3130,7 +3151,7 @@ dependencies = [ "multiaddr", "pretty_reqwest_error", "proto_array", - "rand 0.9.0", + "rand 0.9.2", "reqwest 0.11.27", "reqwest-eventsource", "sensitive_url", @@ -3186,7 +3207,7 @@ dependencies = [ "hex", "hmac 0.11.0", "pbkdf2 0.8.0", - "rand 0.9.0", + "rand 0.9.2", "scrypt", "serde", "serde_json", @@ -3227,7 +3248,7 @@ dependencies = [ "eth2_key_derivation", "eth2_keystore", "hex", - "rand 0.9.0", + "rand 0.9.2", "serde", "serde_json", "serde_repr", @@ -3253,7 +3274,7 @@ checksum = "c853bd72c9e5787f8aafc3df2907c2ed03cff3150c3acd94e2e53a98ab70a8ab" dependencies = [ "cpufeatures", "ring", - "sha2 0.10.8", + "sha2 0.10.9", ] [[package]] @@ -3271,9 +3292,9 @@ dependencies = [ [[package]] name = "ethereum_ssz" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ca8ba45b63c389c6e115b095ca16381534fdcc03cf58176a3f8554db2dbe19b" +checksum = "0dcddb2554d19cde19b099fadddde576929d7a4d0c1cd3512d1fd95cf174375c" dependencies = [ "alloy-primitives", "arbitrary", @@ -3287,14 +3308,14 @@ dependencies = [ [[package]] name = "ethereum_ssz_derive" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dd55d08012b4e0dfcc92b8d6081234df65f2986ad34cc76eeed69c5e2ce7506" +checksum = "a657b6b3b7e153637dc6bdc6566ad9279d9ee11a15b12cfb24a2e04360637e9f" dependencies = [ - "darling 0.20.10", + "darling 0.20.11", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -3305,9 +3326,9 @@ checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" [[package]] name = "event-listener" -version = "5.4.0" +version = "5.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3492acde4c3fc54c845eaab3eed8bd00c7a7d881f78bfc801e43a93dec1331ae" +checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab" dependencies = [ "concurrent-queue", "parking", @@ -3316,11 +3337,11 @@ dependencies = [ [[package]] name = "event-listener-strategy" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c3e4e0dd3673c1139bf041f3008816d9cf2946bbfac2945c09e523b8d7b05b2" +checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93" dependencies = [ - "event-listener 5.4.0", + "event-listener 5.4.1", "pin-project-lite", ] @@ -3389,7 +3410,7 @@ dependencies = [ "metrics", "parking_lot", "pretty_reqwest_error", - "rand 0.9.0", + "rand 0.9.2", "reqwest 0.11.27", "sensitive_url", "serde", @@ -3504,6 +3525,12 @@ dependencies = [ "windows-acl", ] +[[package]] +name = "find-msvc-tools" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844" + [[package]] name = "fixed-hash" version = "0.8.0" @@ -3526,9 +3553,9 @@ dependencies = [ [[package]] name = "flate2" -version = "1.1.0" +version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11faaf5a5236997af9848be0bef4db95824b1d534ebc64d0f0c6cf3e67bd38dc" +checksum = "bfe33edd8e85a12a67454e37f8c75e730830d83e313556ab9ebf9ee7fbeb3bfb" dependencies = [ "crc32fast", "libz-sys", @@ -3543,9 +3570,9 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "foldhash" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" [[package]] name = "foldhash" @@ -3588,9 +3615,9 @@ dependencies = [ [[package]] name = "form_urlencoded" -version = "1.2.1" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" dependencies = [ "percent-encoding", ] @@ -3678,9 +3705,9 @@ checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" [[package]] name = "futures-lite" -version = "2.6.0" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5edaec856126859abb19ed65f39e90fea3a9574b9707f13539acf4abf7eb532" +checksum = "f78e10609fe0e0b3f4157ffab1876319b5b0db102a2c60dc4626306dc46b44ad" dependencies = [ "futures-core", "pin-project-lite", @@ -3694,7 +3721,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -3704,7 +3731,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8f2f12607f92c69b12ed746fabf9ca4f5c482cba46679c1a75b874ed7c26adb" dependencies = [ "futures-io", - "rustls 0.23.23", + "rustls 0.23.35", "rustls-pki-types", ] @@ -3750,19 +3777,6 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42012b0f064e01aa58b545fe3727f90f7dd4020f4a3ea735b50344965f5a57e9" -[[package]] -name = "generator" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc6bd114ceda131d3b1d665eba35788690ad37f5916457286b32ab6fd3c438dd" -dependencies = [ - "cfg-if", - "libc", - "log", - "rustversion", - "windows 0.58.0", -] - [[package]] name = "generic-array" version = "0.14.7" @@ -3791,27 +3805,29 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" dependencies = [ "cfg-if", "js-sys", "libc", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi", "wasm-bindgen", ] [[package]] name = "getrandom" -version = "0.3.1" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43a49c392881ce6d5c3b8cb70f98717b7c07aabbdff06687b9030dbfbe2725f8" +checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" dependencies = [ "cfg-if", + "js-sys", "libc", - "wasi 0.13.3+wasi-0.2.2", - "windows-targets 0.52.6", + "r-efi", + "wasip2", + "wasm-bindgen", ] [[package]] @@ -3824,17 +3840,11 @@ dependencies = [ "polyval", ] -[[package]] -name = "gimli" -version = "0.31.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" - [[package]] name = "glob" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" +checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" [[package]] name = "graffiti_file" @@ -3863,9 +3873,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.26" +version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" +checksum = "0beca50380b1fc32983fc1cb4587bfa4bb9e78fc259aad4a0032d2080309222d" dependencies = [ "bytes", "fnv", @@ -3873,7 +3883,7 @@ dependencies = [ "futures-sink", "futures-util", "http 0.2.12", - "indexmap 2.8.0", + "indexmap 2.12.0", "slab", "tokio", "tokio-util", @@ -3882,17 +3892,17 @@ dependencies = [ [[package]] name = "h2" -version = "0.4.8" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5017294ff4bb30944501348f6f8e42e6ad28f42c8bbef7a74029aff064a4e3c2" +checksum = "f3c0b69cfcb4e1b9f1bf2f53f95f766e4661169728ec61cd3fe5a0166f2d1386" dependencies = [ "atomic-waker", "bytes", "fnv", "futures-core", "futures-sink", - "http 1.3.0", - "indexmap 2.8.0", + "http 1.3.1", + "indexmap 2.12.0", "slab", "tokio", "tokio-util", @@ -3901,12 +3911,13 @@ dependencies = [ [[package]] name = "half" -version = "2.4.1" +version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6dd08c532ae367adf81c312a4580bc67f1d0fe8bc9c460520283f4c0ff277888" +checksum = "6ea2d84b969582b4b1864a92dc5d27cd2b77b622a8d79306834f1be5ba20d84b" dependencies = [ "cfg-if", "crunchy", + "zerocopy", ] [[package]] @@ -3942,13 +3953,13 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.15.2" +version = "0.15.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" dependencies = [ "allocator-api2", "equivalent", - "foldhash 0.1.4", + "foldhash 0.1.5", ] [[package]] @@ -3985,7 +3996,7 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1" dependencies = [ - "hashbrown 0.15.2", + "hashbrown 0.15.5", ] [[package]] @@ -4055,24 +4066,15 @@ checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" [[package]] name = "hermit-abi" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" - -[[package]] -name = "hermit-abi" -version = "0.5.0" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbd780fe5cc30f81464441920d82ac8740e2e46b29a6fad543ddd075229ce37e" +checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c" [[package]] name = "hex" version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" -dependencies = [ - "serde", -] [[package]] name = "hex-conservative" @@ -4105,10 +4107,10 @@ dependencies = [ "idna", "ipnet", "once_cell", - "rand 0.9.0", + "rand 0.9.2", "ring", - "socket2", - "thiserror 2.0.12", + "socket2 0.5.10", + "thiserror 2.0.17", "tinyvec", "tokio", "tracing", @@ -4128,10 +4130,10 @@ dependencies = [ "moka", "once_cell", "parking_lot", - "rand 0.9.0", + "rand 0.9.2", "resolv-conf", "smallvec", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "tracing", ] @@ -4166,22 +4168,11 @@ dependencies = [ [[package]] name = "home" -version = "0.5.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589533453244b0995c858700322199b2becb13b627df2851f64a2775d024abcf" -dependencies = [ - "windows-sys 0.59.0", -] - -[[package]] -name = "hostname" -version = "0.3.1" +version = "0.5.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c731c3e10504cc8ed35cfe2f1db4c9274c3d35fa486e3b31df46f068ef3e867" +checksum = "cc627f471c528ff0c4a49e1d5e60450c8f6461dd6d10ba9dcd3a61d3dff7728d" dependencies = [ - "libc", - "match_cfg", - "winapi", + "windows-sys 0.61.2", ] [[package]] @@ -4197,9 +4188,9 @@ dependencies = [ [[package]] name = "http" -version = "1.3.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a761d192fbf18bdef69f5ceedd0d1333afcbda0ee23840373b8317570d23c65" +checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565" dependencies = [ "bytes", "fnv", @@ -4224,7 +4215,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", - "http 1.3.0", + "http 1.3.1", ] [[package]] @@ -4235,7 +4226,7 @@ checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" dependencies = [ "bytes", "futures-core", - "http 1.3.0", + "http 1.3.1", "http-body 1.0.1", "pin-project-lite", ] @@ -4269,7 +4260,7 @@ dependencies = [ "operation_pool", "parking_lot", "proto_array", - "rand 0.9.0", + "rand 0.9.2", "safe_arith", "sensitive_url", "serde", @@ -4326,9 +4317,9 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "humantime" -version = "2.1.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" +checksum = "135b12329e5e3ce057a9f972339ea52bc954fe1e9358ef27f95e89716fbc5424" [[package]] name = "hyper" @@ -4340,14 +4331,14 @@ dependencies = [ "futures-channel", "futures-core", "futures-util", - "h2 0.3.26", + "h2 0.3.27", "http 0.2.12", "http-body 0.4.6", "httparse", "httpdate", "itoa", "pin-project-lite", - "socket2", + "socket2 0.5.10", "tokio", "tower-service", "tracing", @@ -4356,20 +4347,22 @@ dependencies = [ [[package]] name = "hyper" -version = "1.6.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80" +checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" dependencies = [ + "atomic-waker", "bytes", "futures-channel", - "futures-util", - "h2 0.4.8", - "http 1.3.0", + "futures-core", + "h2 0.4.12", + "http 1.3.1", "http-body 1.0.1", "httparse", "httpdate", "itoa", "pin-project-lite", + "pin-utils", "smallvec", "tokio", "want", @@ -4395,7 +4388,7 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" dependencies = [ - "hyper 1.6.0", + "hyper 1.8.1", "hyper-util", "pin-project-lite", "tokio", @@ -4423,7 +4416,7 @@ checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" dependencies = [ "bytes", "http-body-util", - "hyper 1.6.0", + "hyper 1.8.1", "hyper-util", "native-tls", "tokio", @@ -4433,18 +4426,23 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.10" +version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" +checksum = "52e9a2a24dc5c6821e71a7030e1e14b7b632acac55c40e9d2e082c621261bb56" dependencies = [ + "base64 0.22.1", "bytes", "futures-channel", + "futures-core", "futures-util", - "http 1.3.0", + "http 1.3.1", "http-body 1.0.1", - "hyper 1.6.0", + "hyper 1.8.1", + "ipnet", + "libc", + "percent-encoding", "pin-project-lite", - "socket2", + "socket2 0.6.1", "tokio", "tower-service", "tracing", @@ -4452,16 +4450,17 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.61" +version = "0.1.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "235e081f3925a06703c2d0117ea8b91f042756fd6e7a6e5d901e8ca1a996b220" +checksum = "33e57f83510bb73707521ebaffa789ec8caf86f9657cad665b092b581d40e9fb" dependencies = [ "android_system_properties", "core-foundation-sys", "iana-time-zone-haiku", "js-sys", + "log", "wasm-bindgen", - "windows-core 0.52.0", + "windows-core 0.62.2", ] [[package]] @@ -4475,21 +4474,22 @@ dependencies = [ [[package]] name = "icu_collections" -version = "1.5.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" dependencies = [ "displaydoc", + "potential_utf", "yoke", "zerofrom", "zerovec", ] [[package]] -name = "icu_locid" -version = "1.5.0" +name = "icu_locale_core" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" dependencies = [ "displaydoc", "litemap", @@ -4498,99 +4498,61 @@ dependencies = [ "zerovec", ] -[[package]] -name = "icu_locid_transform" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" -dependencies = [ - "displaydoc", - "icu_locid", - "icu_locid_transform_data", - "icu_provider", - "tinystr", - "zerovec", -] - -[[package]] -name = "icu_locid_transform_data" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" - [[package]] name = "icu_normalizer" -version = "1.5.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" dependencies = [ - "displaydoc", "icu_collections", "icu_normalizer_data", "icu_properties", "icu_provider", "smallvec", - "utf16_iter", - "utf8_iter", - "write16", "zerovec", ] [[package]] name = "icu_normalizer_data" -version = "1.5.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" +checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" [[package]] name = "icu_properties" -version = "1.5.1" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +checksum = "e93fcd3157766c0c8da2f8cff6ce651a31f0810eaa1c51ec363ef790bbb5fb99" dependencies = [ - "displaydoc", "icu_collections", - "icu_locid_transform", + "icu_locale_core", "icu_properties_data", "icu_provider", - "tinystr", + "zerotrie", "zerovec", ] [[package]] name = "icu_properties_data" -version = "1.5.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" +checksum = "02845b3647bb045f1100ecd6480ff52f34c35f82d9880e029d329c21d1054899" [[package]] name = "icu_provider" -version = "1.5.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" dependencies = [ "displaydoc", - "icu_locid", - "icu_provider_macros", - "stable_deref_trait", - "tinystr", + "icu_locale_core", "writeable", "yoke", "zerofrom", + "zerotrie", "zerovec", ] -[[package]] -name = "icu_provider_macros" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.100", -] - [[package]] name = "ident_case" version = "1.0.1" @@ -4599,9 +4561,9 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "1.0.3" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" dependencies = [ "idna_adapter", "smallvec", @@ -4610,9 +4572,9 @@ dependencies = [ [[package]] name = "idna_adapter" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" dependencies = [ "icu_normalizer", "icu_properties", @@ -4648,7 +4610,7 @@ dependencies = [ "rtnetlink", "system-configuration 0.6.1", "tokio", - "windows 0.53.0", + "windows", ] [[package]] @@ -4661,12 +4623,12 @@ dependencies = [ "attohttpc", "bytes", "futures", - "http 1.3.0", + "http 1.3.1", "http-body-util", - "hyper 1.6.0", + "hyper 1.8.1", "hyper-util", "log", - "rand 0.9.0", + "rand 0.9.2", "tokio", "url", "xmltree", @@ -4689,7 +4651,7 @@ checksum = "a0eb5a3343abf848c0984fe4604b2b105da9539376e24fc0a3b0007411ae4fd9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -4705,14 +4667,15 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.8.0" +version = "2.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3954d50fe15b02142bf25d3b8bdadb634ec3948f103d04ffe3031bc8fe9d7058" +checksum = "6717a8d2a5a929a1a2eb43a12812498ed141a0bcfb7e8f7844fbdbe4303bba9f" dependencies = [ "arbitrary", "equivalent", - "hashbrown 0.15.2", + "hashbrown 0.16.0", "serde", + "serde_core", ] [[package]] @@ -4727,7 +4690,7 @@ dependencies = [ "lockfile", "metrics", "parking_lot", - "rand 0.9.0", + "rand 0.9.2", "reqwest 0.11.27", "serde", "serde_json", @@ -4785,8 +4748,8 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b58db92f96b720de98181bbbe63c831e87005ab460c1bf306eb2622b4707997f" dependencies = [ - "socket2", - "widestring 1.1.0", + "socket2 0.5.10", + "widestring 1.2.1", "windows-sys 0.48.0", "winreg", ] @@ -4797,22 +4760,32 @@ version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" +[[package]] +name = "iri-string" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f867b9d1d896b67beb18518eda36fdb77a32ea590de864f1325b294a6d14397" +dependencies = [ + "memchr", + "serde", +] + [[package]] name = "is-terminal" -version = "0.4.16" +version = "0.4.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9" +checksum = "3640c1c38b8e4e43584d8df18be5fc6b0aa314ce6ebf51b53313d4306cca8e46" dependencies = [ - "hermit-abi 0.5.0", + "hermit-abi 0.5.2", "libc", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] name = "is_terminal_polyfill" -version = "1.70.1" +version = "1.70.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" +checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" [[package]] name = "itertools" @@ -4858,18 +4831,19 @@ checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" [[package]] name = "jobserver" -version = "0.1.32" +version = "0.1.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" +checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" dependencies = [ + "getrandom 0.3.4", "libc", ] [[package]] name = "js-sys" -version = "0.3.77" +version = "0.3.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" +checksum = "b011eec8cc36da2aab2d5cff675ec18454fad408585853910a202391cf9f8e65" dependencies = [ "once_cell", "wasm-bindgen", @@ -4901,7 +4875,7 @@ dependencies = [ "elliptic-curve", "once_cell", "serdect", - "sha2 0.10.8", + "sha2 0.10.9", "signature", ] @@ -5032,25 +5006,25 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.175" +version = "0.2.177" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a82ae493e598baaea5209805c49bbf2ea7de956d50d7da0da1164f9c6d28543" +checksum = "2874a2af47a2325c2001a6e6fad9b16a53b802102b528163885171cf92b15976" [[package]] name = "libloading" -version = "0.8.6" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" +checksum = "d7c4b02199fee7c5d21a5ae7d8cfa79a6ef5bb2fc834d6e9058e89c825efdc55" dependencies = [ "cfg-if", - "windows-targets 0.52.6", + "windows-link", ] [[package]] name = "libm" -version = "0.2.11" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa" +checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de" [[package]] name = "libmdbx" @@ -5059,7 +5033,7 @@ source = "git+https://github.com/sigp/libmdbx-rs?rev=e6ff4b9377c1619bcf0bfdf52be dependencies = [ "bitflags 1.3.2", "byteorder", - "derive_more 0.99.19", + "derive_more 0.99.20", "indexmap 1.9.3", "libc", "mdbx-sys", @@ -5077,7 +5051,7 @@ dependencies = [ "either", "futures", "futures-timer", - "getrandom 0.2.15", + "getrandom 0.2.16", "libp2p-allow-block-list", "libp2p-connection-limits", "libp2p-core", @@ -5096,7 +5070,7 @@ dependencies = [ "multiaddr", "pin-project", "rw-stream-sink", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -5140,7 +5114,7 @@ dependencies = [ "quick-protobuf", "rand 0.8.5", "rw-stream-sink", - "thiserror 2.0.12", + "thiserror 2.0.17", "tracing", "unsigned-varint 0.8.0", "web-time", @@ -5167,7 +5141,7 @@ name = "libp2p-gossipsub" version = "0.50.0" source = "git+https://github.com/sigp/rust-libp2p.git?rev=5acdf89a65d64098f9346efa5769e57bcd19dea9#5acdf89a65d64098f9346efa5769e57bcd19dea9" dependencies = [ - "async-channel 2.3.1", + "async-channel 2.5.0", "asynchronous-codec", "base64 0.22.1", "byteorder", @@ -5176,7 +5150,7 @@ dependencies = [ "fnv", "futures", "futures-timer", - "getrandom 0.2.15", + "getrandom 0.2.16", "hashlink 0.10.0", "hex_fmt", "libp2p-core", @@ -5187,7 +5161,7 @@ dependencies = [ "quick-protobuf-codec", "rand 0.8.5", "regex", - "sha2 0.10.8", + "sha2 0.10.9", "tracing", "web-time", ] @@ -5209,7 +5183,7 @@ dependencies = [ "quick-protobuf", "quick-protobuf-codec", "smallvec", - "thiserror 2.0.12", + "thiserror 2.0.17", "tracing", ] @@ -5227,8 +5201,8 @@ dependencies = [ "multihash", "quick-protobuf", "rand 0.8.5", - "sha2 0.10.8", - "thiserror 2.0.12", + "sha2 0.10.9", + "thiserror 2.0.17", "tracing", "zeroize", ] @@ -5247,7 +5221,7 @@ dependencies = [ "libp2p-swarm", "rand 0.8.5", "smallvec", - "socket2", + "socket2 0.5.10", "tokio", "tracing", ] @@ -5270,9 +5244,9 @@ dependencies = [ [[package]] name = "libp2p-mplex" -version = "0.43.0" +version = "0.43.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8aaa6fee3722e355443058472fc4705d78681bc2d8e447a0bdeb3fecf40cd197" +checksum = "95a4019ba30c4e42b776113e9778071691fe3f34bf23b6b3bf0dfcf29d801f3d" dependencies = [ "asynchronous-codec", "bytes", @@ -5304,7 +5278,7 @@ dependencies = [ "rand 0.8.5", "snow", "static_assertions", - "thiserror 2.0.12", + "thiserror 2.0.17", "tracing", "x25519-dalek", "zeroize", @@ -5341,9 +5315,9 @@ dependencies = [ "quinn", "rand 0.8.5", "ring", - "rustls 0.23.23", - "socket2", - "thiserror 2.0.12", + "rustls 0.23.35", + "socket2 0.5.10", + "thiserror 2.0.17", "tokio", "tracing", ] @@ -5378,7 +5352,7 @@ checksum = "dd297cf53f0cb3dee4d2620bb319ae47ef27c702684309f682bdb7e55a18ae9c" dependencies = [ "heck 0.5.0", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -5392,7 +5366,7 @@ dependencies = [ "if-watch", "libc", "libp2p-core", - "socket2", + "socket2 0.5.10", "tokio", "tracing", ] @@ -5409,9 +5383,9 @@ dependencies = [ "libp2p-identity", "rcgen", "ring", - "rustls 0.23.23", - "rustls-webpki 0.103.4", - "thiserror 2.0.12", + "rustls 0.23.35", + "rustls-webpki 0.103.8", + "thiserror 2.0.17", "x509-parser", "yasna", ] @@ -5440,19 +5414,19 @@ dependencies = [ "either", "futures", "libp2p-core", - "thiserror 2.0.12", + "thiserror 2.0.17", "tracing", "yamux 0.12.1", - "yamux 0.13.4", + "yamux 0.13.8", ] [[package]] name = "libredox" -version = "0.1.3" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" +checksum = "416f7e718bdb06000964960ffa43b4335ad4012ae8b99060261aa4a8088d5ccb" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.10.0", "libc", ] @@ -5469,9 +5443,9 @@ dependencies = [ [[package]] name = "libz-sys" -version = "1.1.21" +version = "1.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df9b68e50e6e0b26f672573834882eb57759f6db9b3be2ea3c35c91188bb4eaa" +checksum = "15d118bbf3771060e7311cc7bb0545b01d08a8b4a7de949198dec1fa0ca1c0f7" dependencies = [ "cc", "pkg-config", @@ -5563,7 +5537,7 @@ dependencies = [ "prometheus-client", "quickcheck", "quickcheck_macros", - "rand 0.9.0", + "rand 0.9.2", "regex", "serde", "sha2 0.9.9", @@ -5633,15 +5607,15 @@ checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" [[package]] name = "linux-raw-sys" -version = "0.9.2" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6db9c683daf087dc577b7506e9695b3d556a9f3849903fa28186283afd6809e9" +checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" [[package]] name = "litemap" -version = "0.7.5" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23fb14cb19457329c82206317a5663005a4d404783dc74f4252769b0d5f42856" +checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" [[package]] name = "lmdb-rkv" @@ -5666,23 +5640,22 @@ dependencies = [ [[package]] name = "local-ip-address" -version = "0.6.3" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3669cf5561f8d27e8fc84cc15e58350e70f557d4d65f70e3154e54cd2f8e1782" +checksum = "656b3b27f8893f7bbf9485148ff9a65f019e3f33bd5cdc87c83cab16b3fd9ec8" dependencies = [ "libc", "neli", - "thiserror 1.0.69", + "thiserror 2.0.17", "windows-sys 0.59.0", ] [[package]] name = "lock_api" -version = "0.4.12" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" +checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" dependencies = [ - "autocfg", "scopeguard", ] @@ -5696,9 +5669,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.26" +version = "0.4.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30bde2b3dc3671ae49d8e2e9f044c7c005836e7a023ee57cffa25ab82764bb9e" +checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432" [[package]] name = "logging" @@ -5730,26 +5703,13 @@ dependencies = [ "thiserror 1.0.69", ] -[[package]] -name = "loom" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "419e0dc8046cb947daa77eb95ae174acfbddb7673b4151f56d1eed8e93fbfaca" -dependencies = [ - "cfg-if", - "generator", - "scoped-tls", - "tracing", - "tracing-subscriber", -] - [[package]] name = "lru" version = "0.12.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38" dependencies = [ - "hashbrown 0.15.2", + "hashbrown 0.15.5", ] [[package]] @@ -5758,9 +5718,15 @@ version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "227748d55f2f0ab4735d87fd623798cb6b664512fe979705f829c9f81c934465" dependencies = [ - "hashbrown 0.15.2", + "hashbrown 0.15.5", ] +[[package]] +name = "lru-slab" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" + [[package]] name = "lru_cache" version = "0.1.0" @@ -5771,9 +5737,9 @@ dependencies = [ [[package]] name = "mach2" -version = "0.4.2" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19b955cdeb2a02b9117f121ce63aa52d08ade45de53e48fe6a38b39c10f6f709" +checksum = "d640282b302c0bb0a2a8e0233ead9035e3bed871f0b7e81fe4a1ec829765db44" dependencies = [ "libc", ] @@ -5786,7 +5752,7 @@ checksum = "1b27834086c65ec3f9387b096d66e99f221cf081c2b738042aa252bcd41204e3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -5807,10 +5773,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" [[package]] -name = "match_cfg" -version = "0.1.0" +name = "match-lookup" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4" +checksum = "1265724d8cb29dbbc2b0f06fffb8bf1a8c0cf73a78eede9ba73a4a66c52a981e" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] [[package]] name = "matchers" @@ -5852,9 +5823,9 @@ checksum = "33746aadcb41349ec291e7f2f0a3aa6834d1d7c58066fb4b01f68efc4c4b7631" [[package]] name = "memchr" -version = "2.7.4" +version = "2.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" +checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" [[package]] name = "memoffset" @@ -5954,22 +5925,23 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.8.5" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e3e04debbb59698c15bacbb6d93584a8c0ca9cc3213cb423d31f760d8843ce5" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" dependencies = [ "adler2", + "simd-adler32", ] [[package]] name = "mio" -version = "1.0.3" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" +checksum = "69d83b0086dc8ecf3ce9ae2874b2d1290252e2a30720bea58a5c6639b0092873" dependencies = [ "libc", - "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys 0.52.0", + "wasi", + "windows-sys 0.61.2", ] [[package]] @@ -6001,7 +5973,7 @@ dependencies = [ "cfg-if", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -6013,7 +5985,7 @@ dependencies = [ "cfg-if", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -6026,13 +5998,13 @@ dependencies = [ "bytes", "colored", "futures-util", - "http 1.3.0", + "http 1.3.1", "http-body 1.0.1", "http-body-util", - "hyper 1.6.0", + "hyper 1.8.1", "hyper-util", "log", - "rand 0.9.0", + "rand 0.9.2", "regex", "serde_json", "serde_urlencoded", @@ -6042,21 +6014,20 @@ dependencies = [ [[package]] name = "moka" -version = "0.12.10" +version = "0.12.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9321642ca94a4282428e6ea4af8cc2ca4eac48ac7a6a4ea8f33f76d0ce70926" +checksum = "8261cd88c312e0004c1d51baad2980c66528dfdb2bee62003e643a4d8f86b077" dependencies = [ "crossbeam-channel", "crossbeam-epoch", "crossbeam-utils", - "loom", + "equivalent", "parking_lot", "portable-atomic", "rustc_version 0.4.1", "smallvec", "tagptr", - "thiserror 1.0.69", - "uuid 1.15.1", + "uuid 1.18.1", ] [[package]] @@ -6105,11 +6076,12 @@ dependencies = [ [[package]] name = "multibase" -version = "0.9.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b3539ec3c1f04ac9748a260728e855f261b4977f5c3406612c884564f329404" +checksum = "8694bb4835f452b0e3bb06dbebb1d6fc5385b6ca1caf2e55fd165c042390ec77" dependencies = [ "base-x", + "base256emoji", "data-encoding", "data-encoding-macro", ] @@ -6228,7 +6200,7 @@ dependencies = [ "log", "netlink-packet-core", "netlink-sys", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -6279,7 +6251,7 @@ dependencies = [ "operation_pool", "parking_lot", "rand 0.8.5", - "rand 0.9.0", + "rand 0.9.2", "rand_chacha 0.3.1", "rand_chacha 0.9.0", "serde_json", @@ -6335,11 +6307,11 @@ dependencies = [ [[package]] name = "nix" -version = "0.29.0" +version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46" +checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.10.0", "cfg-if", "cfg_aliases", "libc", @@ -6389,11 +6361,11 @@ dependencies = [ [[package]] name = "nu-ansi-term" -version = "0.50.1" +version = "0.50.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4a28e057d01f97e61255210fcff094d74ed0466038633e95017f5beb68e4399" +checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] @@ -6408,11 +6380,10 @@ dependencies = [ [[package]] name = "num-bigint-dig" -version = "0.8.4" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151" +checksum = "e661dda6640fad38e827a6d4a310ff4763082116fe217f279885c97f511bb0b7" dependencies = [ - "byteorder", "lazy_static", "libm", "num-integer", @@ -6462,11 +6433,11 @@ dependencies = [ [[package]] name = "num_cpus" -version = "1.16.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +checksum = "91df4bbde75afed763b708b7eee1e8e7651e02d97f6d5dd763e89367e957b23b" dependencies = [ - "hermit-abi 0.3.9", + "hermit-abi 0.5.2", "libc", ] @@ -6488,7 +6459,7 @@ checksum = "ff32365de1b6743cb203b710788263c44a03de03802daf96092f2da4fe6ba4d7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -6506,14 +6477,20 @@ dependencies = [ ] [[package]] -name = "object" -version = "0.36.7" +name = "objc2" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" +checksum = "b7c2599ce0ec54857b29ce62166b0ed9b4f6f1a70ccc9a71165b6154caca8c05" dependencies = [ - "memchr", + "objc2-encode", ] +[[package]] +name = "objc2-encode" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef25abbcd74fb2609453eb695bd2f860d389e457f67dc17cafc8b8cbc89d0c33" + [[package]] name = "oid-registry" version = "0.8.1" @@ -6525,14 +6502,20 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.21.0" +version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cde51589ab56b20a6f686b2c68f7a0bd6add753d697abf720d63f8db3ab7b1ad" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" dependencies = [ "critical-section", "portable-atomic", ] +[[package]] +name = "once_cell_polyfill" +version = "1.70.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" + [[package]] name = "oneshot_broadcast" version = "0.1.0" @@ -6554,11 +6537,11 @@ checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" [[package]] name = "openssl" -version = "0.10.72" +version = "0.10.75" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fedfea7d58a1f73118430a55da6a286e7b044961736ce96a16a17068ea25e5da" +checksum = "08838db121398ad17ab8531ce9de97b244589089e290a384c900cb9ff7434328" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.10.0", "cfg-if", "foreign-types", "libc", @@ -6575,7 +6558,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -6586,18 +6569,18 @@ checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" [[package]] name = "openssl-src" -version = "300.4.2+3.4.1" +version = "300.5.4+3.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "168ce4e058f975fe43e89d9ccf78ca668601887ae736090aacc23ae353c298e2" +checksum = "a507b3792995dae9b0df8a1c1e3771e8418b7c2d9f0baeba32e6fe8b06c7cb72" dependencies = [ "cc", ] [[package]] name = "openssl-sys" -version = "0.9.107" +version = "0.9.111" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8288979acd84749c744a9014b4382d42b8f7b2592847b5afb2ed29e5d16ede07" +checksum = "82cab2d520aa75e3c58898289429321eb788c3106963d0dc886ec7a5f4adc321" dependencies = [ "cc", "libc", @@ -6616,7 +6599,7 @@ dependencies = [ "futures-sink", "js-sys", "pin-project-lite", - "thiserror 2.0.12", + "thiserror 2.0.17", "tracing", ] @@ -6628,9 +6611,9 @@ checksum = "50f6639e842a97dbea8886e3439710ae463120091e2e064518ba8e716e6ac36d" dependencies = [ "async-trait", "bytes", - "http 1.3.0", + "http 1.3.1", "opentelemetry", - "reqwest 0.12.15", + "reqwest 0.12.24", ] [[package]] @@ -6639,14 +6622,14 @@ version = "0.30.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dbee664a43e07615731afc539ca60c6d9f1a9425e25ca09c57bc36c87c55852b" dependencies = [ - "http 1.3.0", + "http 1.3.1", "opentelemetry", "opentelemetry-http", "opentelemetry-proto", "opentelemetry_sdk", "prost", - "reqwest 0.12.15", - "thiserror 2.0.12", + "reqwest 0.12.24", + "thiserror 2.0.17", "tokio", "tonic 0.13.1", "tracing", @@ -6675,9 +6658,9 @@ dependencies = [ "futures-util", "opentelemetry", "percent-encoding", - "rand 0.9.0", + "rand 0.9.2", "serde_json", - "thiserror 2.0.12", + "thiserror 2.0.17", ] [[package]] @@ -6693,7 +6676,7 @@ dependencies = [ "maplit", "metrics", "parking_lot", - "rand 0.9.0", + "rand 0.9.2", "rayon", "serde", "state_processing", @@ -6713,9 +6696,9 @@ dependencies = [ [[package]] name = "parity-scale-codec" -version = "3.7.4" +version = "3.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9fde3d0718baf5bc92f577d652001da0f8d54cd03a7974e118d04fc888dc23d" +checksum = "799781ae679d79a948e13d4824a40970bfa500058d245760dd857301059810fa" dependencies = [ "arrayvec", "bitvec", @@ -6729,14 +6712,14 @@ dependencies = [ [[package]] name = "parity-scale-codec-derive" -version = "3.7.4" +version = "3.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "581c837bb6b9541ce7faa9377c20616e4fb7650f6b0f68bc93c827ee504fb7b3" +checksum = "34b4653168b563151153c9e4c08ebed57fb8262bebfa79711552fa983c623e7a" dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -6747,9 +6730,9 @@ checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" [[package]] name = "parking_lot" -version = "0.12.3" +version = "0.12.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" +checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" dependencies = [ "lock_api", "parking_lot_core", @@ -6757,15 +6740,15 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.10" +version = "0.9.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" +checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" dependencies = [ "cfg-if", "libc", "redox_syscall", "smallvec", - "windows-targets 0.52.6", + "windows-link", ] [[package]] @@ -6803,33 +6786,32 @@ dependencies = [ "digest 0.10.7", "hmac 0.12.1", "password-hash", - "sha2 0.10.8", + "sha2 0.10.9", ] [[package]] name = "pem" -version = "3.0.5" +version = "3.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38af38e8470ac9dee3ce1bae1af9c1671fffc44ddfd8bd1d0a3445bf349a8ef3" +checksum = "1d30c53c26bc5b31a98cd02d20f25a7c8567146caf63ed593a9d87b2775291be" dependencies = [ "base64 0.22.1", - "serde", + "serde_core", ] [[package]] name = "percent-encoding" -version = "2.3.1" +version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" [[package]] name = "pest" -version = "2.7.15" +version = "2.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b7cafe60d6cf8e62e1b9b2ea516a089c008945bb5a275416789e7db0bc199dc" +checksum = "989e7521a040efde50c3ab6bbadafbe15ab6dc042686926be59ac35d74607df4" dependencies = [ "memchr", - "thiserror 2.0.12", "ucd-trie", ] @@ -6850,7 +6832,7 @@ checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -6917,17 +6899,16 @@ dependencies = [ [[package]] name = "polling" -version = "3.7.4" +version = "3.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a604568c3202727d1507653cb121dbd627a58684eb09a820fd746bee38b4442f" +checksum = "5d0e4f59085d47d8241c88ead0f274e8a0cb551f3625263c05eb8dd897c34218" dependencies = [ "cfg-if", "concurrent-queue", - "hermit-abi 0.4.0", + "hermit-abi 0.5.2", "pin-project-lite", - "rustix 0.38.44", - "tracing", - "windows-sys 0.59.0", + "rustix 1.1.2", + "windows-sys 0.61.2", ] [[package]] @@ -6955,9 +6936,18 @@ dependencies = [ [[package]] name = "portable-atomic" -version = "1.11.0" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483" + +[[package]] +name = "potential_utf" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "350e9b48cbc6b0e028b0473b114454c6316e57336ee184ceab6e53f72c178b3e" +checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" +dependencies = [ + "zerovec", +] [[package]] name = "powerfmt" @@ -6971,7 +6961,7 @@ version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" dependencies = [ - "zerocopy 0.8.23", + "zerocopy", ] [[package]] @@ -7010,12 +7000,12 @@ dependencies = [ [[package]] name = "prettyplease" -version = "0.2.30" +version = "0.2.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1ccf34da56fc294e7d4ccf69a85992b7dfb826b7cf57bac6a70bba3494cc08a" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" dependencies = [ "proc-macro2", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -7031,9 +7021,9 @@ dependencies = [ [[package]] name = "proc-macro-crate" -version = "3.3.0" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edce586971a4dfaa28950c6f18ed55e0406c1ab88bbce2c6f6293a7aaba73d35" +checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983" dependencies = [ "toml_edit", ] @@ -7057,14 +7047,14 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] name = "proc-macro2" -version = "1.0.94" +version = "1.0.103" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84" +checksum = "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8" dependencies = [ "unicode-ident", ] @@ -7118,23 +7108,22 @@ checksum = "440f724eba9f6996b75d63681b0a92b06947f1457076d503a4d2e2c8f56442b8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] name = "proptest" -version = "1.6.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14cae93065090804185d3b75f0bf93b8eeda30c7a9b4a33d3bdb3988d6229e50" +checksum = "bee689443a2bd0a16ab0348b52ee43e3b2d1b1f931c8aa5c9f8de4c86fbe8c40" dependencies = [ "bit-set", "bit-vec", - "bitflags 2.9.0", - "lazy_static", + "bitflags 2.10.0", "num-traits", - "rand 0.8.5", - "rand_chacha 0.3.1", - "rand_xorshift 0.3.0", + "rand 0.9.2", + "rand_chacha 0.9.0", + "rand_xorshift 0.4.0", "regex-syntax", "rusty-fork", "tempfile", @@ -7149,7 +7138,7 @@ checksum = "095a99f75c69734802359b682be8daaf8980296731f6470434ea2c652af1dd30" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -7172,7 +7161,7 @@ dependencies = [ "itertools 0.14.0", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -7205,7 +7194,7 @@ checksum = "5e617cc9058daa5e1fe5a0d23ed745773a5ee354111dad1ec0235b0cc16b6730" dependencies = [ "cfg-if", "darwin-libproc", - "derive_more 0.99.19", + "derive_more 0.99.20", "glob", "mach2", "nix 0.24.3", @@ -7256,49 +7245,52 @@ dependencies = [ [[package]] name = "quickcheck_macros" -version = "1.0.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b22a693222d716a9587786f37ac3f6b4faedb5b80c23914e7303ff5a1d8016e9" +checksum = "f71ee38b42f8459a88d3362be6f9b841ad2d5421844f61eb1c59c11bff3ac14a" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.110", ] [[package]] name = "quinn" -version = "0.11.6" +version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62e96808277ec6f97351a2380e6c25114bc9e67037775464979f3037c92d05ef" +checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20" dependencies = [ "bytes", + "cfg_aliases", "futures-io", "pin-project-lite", "quinn-proto", "quinn-udp", "rustc-hash 2.1.1", - "rustls 0.23.23", - "socket2", - "thiserror 2.0.12", + "rustls 0.23.35", + "socket2 0.6.1", + "thiserror 2.0.17", "tokio", "tracing", + "web-time", ] [[package]] name = "quinn-proto" -version = "0.11.9" +version = "0.11.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2fe5ef3495d7d2e377ff17b1a8ce2ee2ec2a18cde8b6ad6619d65d0701c135d" +checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31" dependencies = [ "bytes", - "getrandom 0.2.15", - "rand 0.8.5", + "getrandom 0.3.4", + "lru-slab", + "rand 0.9.2", "ring", "rustc-hash 2.1.1", - "rustls 0.23.23", + "rustls 0.23.35", "rustls-pki-types", "slab", - "thiserror 2.0.12", + "thiserror 2.0.17", "tinyvec", "tracing", "web-time", @@ -7306,27 +7298,33 @@ dependencies = [ [[package]] name = "quinn-udp" -version = "0.5.10" +version = "0.5.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e46f3055866785f6b92bc6164b76be02ca8f2eb4b002c0354b28cf4c119e5944" +checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd" dependencies = [ "cfg_aliases", "libc", "once_cell", - "socket2", + "socket2 0.6.1", "tracing", - "windows-sys 0.59.0", + "windows-sys 0.60.2", ] [[package]] name = "quote" -version = "1.0.39" +version = "1.0.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1f1914ce909e1658d9907913b4b91947430c7d9be598b15a1912935b8c04801" +checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f" dependencies = [ "proc-macro2", ] +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + [[package]] name = "r2d2" version = "0.8.10" @@ -7368,14 +7366,13 @@ dependencies = [ [[package]] name = "rand" -version = "0.9.0" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94" +checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" dependencies = [ "rand_chacha 0.9.0", "rand_core 0.9.3", "serde", - "zerocopy 0.8.23", ] [[package]] @@ -7404,7 +7401,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.2.16", ] [[package]] @@ -7413,7 +7410,7 @@ version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" dependencies = [ - "getrandom 0.3.1", + "getrandom 0.3.4", "serde", ] @@ -7437,9 +7434,9 @@ dependencies = [ [[package]] name = "rayon" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" +checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f" dependencies = [ "either", "rayon-core", @@ -7447,9 +7444,9 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.12.1" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" +checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91" dependencies = [ "crossbeam-deque", "crossbeam-utils", @@ -7470,20 +7467,20 @@ dependencies = [ [[package]] name = "redb" -version = "2.4.0" +version = "2.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea0a72cd7140de9fc3e318823b883abf819c20d478ec89ce880466dc2ef263c6" +checksum = "8eca1e9d98d5a7e9002d0013e18d5a9b000aee942eb134883a82f06ebffb6c01" dependencies = [ "libc", ] [[package]] name = "redox_syscall" -version = "0.5.10" +version = "0.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b8c0c260b63a8219631167be35e6a988e9554dbd323f8bd08439c8ed1302bd1" +checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.10.0", ] [[package]] @@ -7492,7 +7489,7 @@ version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.2.16", "libredox", "thiserror 1.0.69", ] @@ -7514,14 +7511,14 @@ checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] name = "regex" -version = "1.11.1" +version = "1.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" +checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4" dependencies = [ "aho-corasick", "memchr", @@ -7531,9 +7528,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.9" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" +checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" dependencies = [ "aho-corasick", "memchr", @@ -7542,9 +7539,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.8.5" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" +checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" [[package]] name = "reqwest" @@ -7557,7 +7554,7 @@ dependencies = [ "encoding_rs", "futures-core", "futures-util", - "h2 0.3.26", + "h2 0.3.27", "http 0.2.12", "http-body 0.4.6", "hyper 0.14.32", @@ -7594,30 +7591,27 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.12.15" +version = "0.12.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d19c46a6fdd48bc4dab94b6103fccc55d34c67cc0ad04653aad4ea2a07cd7bbb" +checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f" dependencies = [ "base64 0.22.1", "bytes", "futures-channel", "futures-core", "futures-util", - "http 1.3.0", + "http 1.3.1", "http-body 1.0.1", "http-body-util", - "hyper 1.6.0", + "hyper 1.8.1", "hyper-tls 0.6.0", "hyper-util", - "ipnet", "js-sys", "log", - "mime", "native-tls", - "once_cell", "percent-encoding", "pin-project-lite", - "rustls-pemfile 2.2.0", + "rustls-pki-types", "serde", "serde_json", "serde_urlencoded", @@ -7625,12 +7619,12 @@ dependencies = [ "tokio", "tokio-native-tls", "tower 0.5.2", + "tower-http", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", - "windows-registry", ] [[package]] @@ -7651,13 +7645,9 @@ dependencies = [ [[package]] name = "resolv-conf" -version = "0.7.0" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52e44394d2086d010551b14b53b1f24e31647570cd1deb0379e2c21b329aba00" -dependencies = [ - "hostname", - "quick-error", -] +checksum = "1e061d1b48cb8d38042de4ae0a7a6401009d6143dc80d2e2d6f31f0bdd6470c7" [[package]] name = "rfc6979" @@ -7677,7 +7667,7 @@ checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" dependencies = [ "cc", "cfg-if", - "getrandom 0.2.15", + "getrandom 0.2.16", "libc", "untrusted", "windows-sys 0.52.0", @@ -7751,7 +7741,7 @@ dependencies = [ "primitive-types", "proptest", "rand 0.8.5", - "rand 0.9.0", + "rand 0.9.2", "rlp", "ruint-macro", "serde_core", @@ -7796,12 +7786,6 @@ dependencies = [ "serde_json", ] -[[package]] -name = "rustc-demangle" -version = "0.1.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" - [[package]] name = "rustc-hash" version = "1.1.0" @@ -7835,7 +7819,7 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" dependencies = [ - "semver 1.0.26", + "semver 1.0.27", ] [[package]] @@ -7867,7 +7851,7 @@ version = "0.38.44" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.10.0", "errno", "libc", "linux-raw-sys 0.4.15", @@ -7876,15 +7860,15 @@ dependencies = [ [[package]] name = "rustix" -version = "1.0.2" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7178faa4b75a30e269c71e61c353ce2748cf3d76f0c44c393f4e60abf49b825" +checksum = "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.10.0", "errno", "libc", - "linux-raw-sys 0.9.2", - "windows-sys 0.59.0", + "linux-raw-sys 0.11.0", + "windows-sys 0.61.2", ] [[package]] @@ -7915,29 +7899,29 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.23" +version = "0.23.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47796c98c480fce5406ef69d1c76378375492c3b0a0de587be0c1d9feb12f395" +checksum = "533f54bc6a7d4f647e46ad909549eda97bf5afc1585190ef692b4286b198bd8f" dependencies = [ "log", "once_cell", "ring", "rustls-pki-types", - "rustls-webpki 0.102.8", + "rustls-webpki 0.103.8", "subtle", "zeroize", ] [[package]] name = "rustls-native-certs" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcff2dd52b58a8d98a70243663a0d234c4e2b79235637849d15913394a247d3" +checksum = "9980d917ebb0c0536119ba501e90834767bffc3d60641457fd84a1f3fd337923" dependencies = [ "openssl-probe", "rustls-pki-types", "schannel", - "security-framework 3.3.0", + "security-framework 3.5.1", ] [[package]] @@ -7960,9 +7944,9 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.12.0" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "229a4a4c221013e7e1f1a043678c5cc39fe5171437c88fb47151a21e6f5b5c79" +checksum = "94182ad936a0c91c324cd46c6511b9510ed16af436d7b5bab34beab0afd55f7a" dependencies = [ "web-time", "zeroize", @@ -7991,9 +7975,9 @@ dependencies = [ [[package]] name = "rustls-webpki" -version = "0.103.4" +version = "0.103.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a17884ae0c1b773f1ccd2bd4a8c72f16da897310a98b0e84bf349ad5ead92fc" +checksum = "2ffdfa2f5286e2247234e03f680868ac2815974dc39e00ea15adc445d0aafe52" dependencies = [ "ring", "rustls-pki-types", @@ -8002,15 +7986,15 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.20" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eded382c5f5f786b989652c49544c4877d9f015cc22e145a5ea8ea66c2921cd2" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" [[package]] name = "rusty-fork" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f" +checksum = "cc6bf79ff24e648f6da1f8d1f011e9cac26491b619e6b9280f2b47f1774e6ee2" dependencies = [ "fnv", "quick-error", @@ -8061,11 +8045,11 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.27" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" +checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -8177,7 +8161,7 @@ version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.10.0", "core-foundation 0.9.4", "core-foundation-sys", "libc", @@ -8186,11 +8170,11 @@ dependencies = [ [[package]] name = "security-framework" -version = "3.3.0" +version = "3.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80fb1d92c5028aa318b4b8bd7302a5bfcf48be96a37fc6fc790f806b0004ee0c" +checksum = "b3297343eaf830f66ede390ea39da1d462b6b0c1b000f420d0a83f898bbbe6ef" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.10.0", "core-foundation 0.10.1", "core-foundation-sys", "libc", @@ -8199,9 +8183,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.14.0" +version = "2.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49db231d56a190491cb4aeda9527f1ad45345af50b0851622a7adb8c03b01c32" +checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" dependencies = [ "core-foundation-sys", "libc", @@ -8218,11 +8202,12 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.26" +version = "1.0.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" dependencies = [ "serde", + "serde_core", ] [[package]] @@ -8281,7 +8266,7 @@ checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -8305,7 +8290,7 @@ checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -8322,15 +8307,15 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.15.1" +version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa66c845eee442168b2c8134fec70ac50dc20e760769c8ba0ad1319ca1959b04" +checksum = "10574371d41b0d9b2cff89418eda27da52bcaff2cc8741db26382a77c29131f1" dependencies = [ "base64 0.22.1", "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.8.0", + "indexmap 2.12.0", "schemars 0.9.0", "schemars 1.1.0", "serde_core", @@ -8341,14 +8326,14 @@ dependencies = [ [[package]] name = "serde_with_macros" -version = "3.15.1" +version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b91a903660542fced4e99881aa481bdbaec1634568ee02e0b8bd57c64cb38955" +checksum = "08a72d8216842fdd57820dc78d840bef99248e35fb2554ff923319e60f2d686b" dependencies = [ "darling 0.21.3", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -8357,7 +8342,7 @@ version = "0.9.34+deprecated" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" dependencies = [ - "indexmap 2.8.0", + "indexmap 2.12.0", "itoa", "ryu", "serde", @@ -8400,9 +8385,9 @@ dependencies = [ [[package]] name = "sha2" -version = "0.10.8" +version = "0.10.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" dependencies = [ "cfg-if", "cpufeatures", @@ -8446,9 +8431,9 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "signal-hook-registry" -version = "1.4.2" +version = "1.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" +checksum = "b2a4719bff48cee6b39d12c020eeb490953ad2443b7055bd0b21fca26bd8c28b" dependencies = [ "libc", ] @@ -8479,6 +8464,12 @@ dependencies = [ "validator_metrics", ] +[[package]] +name = "simd-adler32" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" + [[package]] name = "similar" version = "2.7.0" @@ -8493,7 +8484,7 @@ checksum = "297f631f50729c8c99b84667867963997ec0b50f32b2a7dbcab828ef0541e8bb" dependencies = [ "num-bigint", "num-traits", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", ] @@ -8520,12 +8511,9 @@ dependencies = [ [[package]] name = "slab" -version = "0.4.9" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" -dependencies = [ - "autocfg", -] +checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" [[package]] name = "slasher" @@ -8545,7 +8533,7 @@ dependencies = [ "maplit", "metrics", "parking_lot", - "rand 0.9.0", + "rand 0.9.2", "rayon", "redb", "safe_arith", @@ -8606,9 +8594,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.14.0" +version = "1.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" dependencies = [ "arbitrary", "serde", @@ -8633,20 +8621,30 @@ dependencies = [ "rand_core 0.6.4", "ring", "rustc_version 0.4.1", - "sha2 0.10.8", + "sha2 0.10.9", "subtle", ] [[package]] name = "socket2" -version = "0.5.8" +version = "0.5.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" +checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" dependencies = [ "libc", "windows-sys 0.52.0", ] +[[package]] +name = "socket2" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17129e116933cf371d018bb80ae557e889637989d8638274fb25622827b03881" +dependencies = [ + "libc", + "windows-sys 0.60.2", +] + [[package]] name = "spin" version = "0.9.8" @@ -8682,9 +8680,9 @@ dependencies = [ [[package]] name = "stable_deref_trait" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" +checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" [[package]] name = "state_processing" @@ -8702,7 +8700,7 @@ dependencies = [ "itertools 0.10.5", "merkle_proof", "metrics", - "rand 0.9.0", + "rand 0.9.2", "rayon", "safe_arith", "smallvec", @@ -8748,7 +8746,7 @@ dependencies = [ "lru 0.12.5", "metrics", "parking_lot", - "rand 0.9.0", + "rand 0.9.2", "redb", "safe_arith", "serde", @@ -8817,7 +8815,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -8832,12 +8830,12 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b986e4a629907f20a2c2a639a75bc22a8b5d99b444e0d83c395f4cb309022bf" dependencies = [ - "darling 0.20.10", + "darling 0.20.11", "itertools 0.13.0", "proc-macro2", "quote", "smallvec", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -8863,9 +8861,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.100" +version = "2.0.110" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0" +checksum = "a99801b5bd34ede4cf3fc688c5919368fea4e4814a4664359503e6015b280aea" dependencies = [ "proc-macro2", "quote", @@ -8881,7 +8879,7 @@ dependencies = [ "paste", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -8901,13 +8899,13 @@ dependencies = [ [[package]] name = "synstructure" -version = "0.13.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -8942,7 +8940,7 @@ version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" dependencies = [ - "bitflags 2.9.0", + "bitflags 2.10.0", "core-foundation 0.9.4", "system-configuration-sys 0.6.0", ] @@ -9014,26 +9012,25 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.18.0" +version = "3.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c317e0a526ee6120d8dabad239c8dadca62b24b6f168914bbbc8e2fb1f0e567" +checksum = "2d31c77bdf42a745371d260a26ca7163f1e0924b64afa0b688e61b5a9fa02f16" dependencies = [ - "cfg-if", "fastrand", - "getrandom 0.3.1", + "getrandom 0.3.4", "once_cell", - "rustix 1.0.2", - "windows-sys 0.59.0", + "rustix 1.1.2", + "windows-sys 0.61.2", ] [[package]] name = "terminal_size" -version = "0.4.2" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45c6481c4829e4cc63825e62c49186a34538b7b2750b73b266581ffb612fb5ed" +checksum = "60b8cb979cb11c32ce1603f8137b22262a9d131aaa5c37b5678025f22b8becd0" dependencies = [ - "rustix 1.0.2", - "windows-sys 0.59.0", + "rustix 1.1.2", + "windows-sys 0.60.2", ] [[package]] @@ -9061,11 +9058,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.12" +version = "2.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" +checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8" dependencies = [ - "thiserror-impl 2.0.12", + "thiserror-impl 2.0.17", ] [[package]] @@ -9076,28 +9073,27 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] name = "thiserror-impl" -version = "2.0.12" +version = "2.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" +checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] name = "thread_local" -version = "1.1.8" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" dependencies = [ "cfg-if", - "once_cell", ] [[package]] @@ -9111,9 +9107,9 @@ dependencies = [ [[package]] name = "tikv-jemalloc-ctl" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f21f216790c8df74ce3ab25b534e0718da5a1916719771d3fec23315c99e468b" +checksum = "661f1f6a57b3a36dc9174a2c10f19513b4866816e13425d3e418b11cc37bc24c" dependencies = [ "libc", "paste", @@ -9122,9 +9118,9 @@ dependencies = [ [[package]] name = "tikv-jemalloc-sys" -version = "0.6.0+5.3.0-1-ge13ca993e8ccb9ba9847cc330696e02839f328f7" +version = "0.6.1+5.3.0-1-ge13ca993e8ccb9ba9847cc330696e02839f328f7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd3c60906412afa9c2b5b5a48ca6a5abe5736aec9eb48ad05037a677e52e4e2d" +checksum = "cd8aa5b2ab86a2cefa406d889139c162cbb230092f7d1d7cbc1716405d852a3b" dependencies = [ "cc", "libc", @@ -9132,9 +9128,9 @@ dependencies = [ [[package]] name = "tikv-jemallocator" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cec5ff18518d81584f477e9bfdf957f5bb0979b0bac3af4ca30b5b3ae2d2865" +checksum = "0359b4327f954e0567e69fb191cf1436617748813819c94b8cd4a431422d053a" dependencies = [ "libc", "tikv-jemalloc-sys", @@ -9142,9 +9138,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.39" +version = "0.3.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dad298b01a40a23aac4580b67e3dbedb7cc8402f3592d7f49469de2ea4aecdd8" +checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d" dependencies = [ "deranged", "itoa", @@ -9157,15 +9153,15 @@ dependencies = [ [[package]] name = "time-core" -version = "0.1.3" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "765c97a5b985b7c11d7bc27fa927dc4fe6af3a6dfb021d28deb60d3bf51e76ef" +checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b" [[package]] name = "time-macros" -version = "0.2.20" +version = "0.2.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8093bc3e81c3bc5f7879de09619d06c9a5a5e45ca44dfeeb7225bae38005c5c" +checksum = "30cfb0125f12d9c277f35663a0a33f8c30190f4e4574868a330595412d34ebf3" dependencies = [ "num-conv", "time-core", @@ -9194,7 +9190,7 @@ dependencies = [ "pbkdf2 0.11.0", "rand 0.8.5", "rustc-hash 1.1.0", - "sha2 0.10.8", + "sha2 0.10.9", "thiserror 1.0.69", "unicode-normalization", "wasm-bindgen", @@ -9212,9 +9208,9 @@ dependencies = [ [[package]] name = "tinystr" -version = "0.7.6" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" dependencies = [ "displaydoc", "zerovec", @@ -9232,9 +9228,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09b3661f17e86524eccd4371ab0429194e0d7c008abb45f7a7495b1719463c71" +checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa" dependencies = [ "tinyvec_macros", ] @@ -9247,32 +9243,31 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.44.0" +version = "1.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9975ea0f48b5aa3972bf2d888c238182458437cc2a19374b81b25cdf1023fb3a" +checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408" dependencies = [ - "backtrace", "bytes", "libc", "mio", "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2", + "socket2 0.6.1", "tokio-macros", "tracing", - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] name = "tokio-macros" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" +checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -9308,11 +9303,11 @@ dependencies = [ [[package]] name = "tokio-rustls" -version = "0.26.2" +version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b" +checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" dependencies = [ - "rustls 0.23.23", + "rustls 0.23.35", "tokio", ] @@ -9330,9 +9325,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.13" +version = "0.7.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7fcaa8d55a2bdd6b83ace262b016eca0d79ee02818c5c1bcdf0305114081078" +checksum = "2efa149fe76073d6e8fd97ef4f4eca7b67f599660115591483572e406e165594" dependencies = [ "bytes", "futures-core", @@ -9345,18 +9340,31 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.6.8" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" +checksum = "f2cdb639ebbc97961c51720f858597f7f24c4fc295327923af55b74c3c724533" +dependencies = [ + "serde_core", +] [[package]] name = "toml_edit" -version = "0.22.24" +version = "0.23.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17b4795ff5edd201c7cd6dca065ae59972ce77d1b80fa0a84d94950ece7d1474" +checksum = "6485ef6d0d9b5d0ec17244ff7eb05310113c3f316f2d14200d4de56b3cb98f8d" dependencies = [ - "indexmap 2.8.0", + "indexmap 2.12.0", "toml_datetime", + "toml_parser", + "winnow", +] + +[[package]] +name = "toml_parser" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0cbe268d35bdb4bb5a56a2de88d0ad0eb70af5384a99d648cd4b3d04039800e" +dependencies = [ "winnow", ] @@ -9371,17 +9379,17 @@ dependencies = [ "axum", "base64 0.22.1", "bytes", - "h2 0.4.8", - "http 1.3.0", + "h2 0.4.12", + "http 1.3.1", "http-body 1.0.1", "http-body-util", - "hyper 1.6.0", + "hyper 1.8.1", "hyper-timeout", "hyper-util", "percent-encoding", "pin-project", "prost", - "socket2", + "socket2 0.5.10", "tokio", "tokio-stream", "tower 0.4.13", @@ -9399,10 +9407,10 @@ dependencies = [ "async-trait", "base64 0.22.1", "bytes", - "http 1.3.0", + "http 1.3.1", "http-body 1.0.1", "http-body-util", - "hyper 1.6.0", + "hyper 1.8.1", "hyper-timeout", "hyper-util", "percent-encoding", @@ -9410,7 +9418,7 @@ dependencies = [ "prost", "rustls-native-certs", "tokio", - "tokio-rustls 0.26.2", + "tokio-rustls 0.26.4", "tokio-stream", "tower 0.5.2", "tower-layer", @@ -9446,7 +9454,7 @@ checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" dependencies = [ "futures-core", "futures-util", - "indexmap 2.8.0", + "indexmap 2.12.0", "pin-project-lite", "slab", "sync_wrapper 1.0.2", @@ -9457,6 +9465,24 @@ dependencies = [ "tracing", ] +[[package]] +name = "tower-http" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" +dependencies = [ + "bitflags 2.10.0", + "bytes", + "futures-util", + "http 1.3.1", + "http-body 1.0.1", + "iri-string", + "pin-project-lite", + "tower 0.5.2", + "tower-layer", + "tower-service", +] + [[package]] name = "tower-layer" version = "0.3.3" @@ -9495,20 +9521,20 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.28" +version = "0.1.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" +checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] name = "tracing-core" -version = "0.1.33" +version = "0.1.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" +checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678" dependencies = [ "once_cell", "valuable", @@ -9593,10 +9619,10 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0bee2ea1551f90040ab0e34b6fb7f2fa3bad8acc925837ac654f2c78a13e3089" dependencies = [ - "darling 0.20.10", + "darling 0.20.11", "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -9611,9 +9637,9 @@ dependencies = [ [[package]] name = "triomphe" -version = "0.1.14" +version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef8f7726da4807b58ea5c96fdc122f80702030edc33b35aff9190a51148ccc85" +checksum = "dd69c5aa8f924c7519d6372789a74eac5b94fb0f8fcf0d4a97eb0bfc3e785f39" dependencies = [ "serde", "stable_deref_trait", @@ -9627,9 +9653,9 @@ checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "typenum" -version = "1.18.0" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" +checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" [[package]] name = "types" @@ -9660,7 +9686,7 @@ dependencies = [ "milhouse", "parking_lot", "paste", - "rand 0.9.0", + "rand 0.9.2", "rand_xorshift 0.4.0", "rayon", "regex", @@ -9733,15 +9759,15 @@ checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" [[package]] name = "unicode-ident" -version = "1.0.18" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" +checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" [[package]] name = "unicode-normalization" -version = "0.1.24" +version = "0.1.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" +checksum = "5fd4f6878c9cb28d874b009da9e8d183b5abc80117c40bbd187a1fde336be6e8" dependencies = [ "tinyvec", ] @@ -9793,21 +9819,16 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.5.4" +version = "2.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" +checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b" dependencies = [ "form_urlencoded", "idna", "percent-encoding", + "serde", ] -[[package]] -name = "utf16_iter" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" - [[package]] name = "utf8_iter" version = "1.0.4" @@ -9826,17 +9847,19 @@ version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.2.16", "serde", ] [[package]] name = "uuid" -version = "1.15.1" +version = "1.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0f540e3240398cce6128b64ba83fdbdd86129c16a3aa1a3a252efd66eb3d587" +checksum = "2f87b8aa10b915a06587d0dec516c282ff295b475d94abf425d62b57710070a2" dependencies = [ - "getrandom 0.3.1", + "getrandom 0.3.4", + "js-sys", + "wasm-bindgen", ] [[package]] @@ -9854,7 +9877,7 @@ dependencies = [ "eth2", "fdlimit", "graffiti_file", - "hyper 1.6.0", + "hyper 1.8.1", "initialized_validators", "lighthouse_validator_store", "metrics", @@ -9886,7 +9909,7 @@ dependencies = [ "filesystem", "hex", "lockfile", - "rand 0.9.0", + "rand 0.9.2", "tempfile", "tree_hash", "types", @@ -9916,7 +9939,7 @@ dependencies = [ "lighthouse_version", "logging", "parking_lot", - "rand 0.9.0", + "rand 0.9.2", "sensitive_url", "serde", "serde_json", @@ -10140,50 +10163,37 @@ dependencies = [ [[package]] name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" +version = "0.11.1+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" [[package]] -name = "wasi" -version = "0.13.3+wasi-0.2.2" +name = "wasip2" +version = "1.0.1+wasi-0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26816d2e1a4a36a2940b96c5296ce403917633dff8f3440e9b236ed6f6bacad2" +checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" dependencies = [ - "wit-bindgen-rt", + "wit-bindgen", ] [[package]] name = "wasm-bindgen" -version = "0.2.100" +version = "0.2.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" +checksum = "da95793dfc411fbbd93f5be7715b0578ec61fe87cb1a42b12eb625caa5c5ea60" dependencies = [ "cfg-if", "once_cell", "rustversion", "wasm-bindgen-macro", -] - -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.100" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" -dependencies = [ - "bumpalo", - "log", - "proc-macro2", - "quote", - "syn 2.0.100", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.50" +version = "0.4.55" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61" +checksum = "551f88106c6d5e7ccc7cd9a16f312dd3b5d36ea8b4954304657d5dfba115d4a0" dependencies = [ "cfg-if", "js-sys", @@ -10194,9 +10204,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.100" +version = "0.2.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" +checksum = "04264334509e04a7bf8690f2384ef5265f05143a4bff3889ab7a3269adab59c2" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -10204,22 +10214,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.100" +version = "0.2.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" +checksum = "420bc339d9f322e562942d52e115d57e950d12d88983a14c79b86859ee6c7ebc" dependencies = [ + "bumpalo", "proc-macro2", "quote", - "syn 2.0.100", - "wasm-bindgen-backend", + "syn 2.0.110", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.100" +version = "0.2.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +checksum = "76f218a38c84bcb33c25ec7059b07847d465ce0e0a76b995e134a45adcb6af76" dependencies = [ "unicode-ident", ] @@ -10253,9 +10263,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.77" +version = "0.3.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" +checksum = "3a1f95c0d03a47f4ae1f7a64643a6bb97465d9b740f0fa8f90ea33915c99a9a1" dependencies = [ "js-sys", "wasm-bindgen", @@ -10327,9 +10337,9 @@ checksum = "c168940144dd21fd8046987c16a46a33d5fc84eec29ef9dcddc2ac9e31526b7c" [[package]] name = "widestring" -version = "1.1.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7219d36b6eac893fa81e84ebe06485e7dcbb616177469b142df14f1f4deb1311" +checksum = "72069c3113ab32ab29e5584db3c6ec55d416895e60715417b5b883a357c3e471" [[package]] name = "winapi" @@ -10349,11 +10359,11 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.9" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -10372,16 +10382,6 @@ dependencies = [ "windows-targets 0.52.6", ] -[[package]] -name = "windows" -version = "0.58.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd04d41d93c4992d421894c18c8b43496aa748dd4c081bac0dc93eb0489272b6" -dependencies = [ - "windows-core 0.58.0", - "windows-targets 0.52.6", -] - [[package]] name = "windows-acl" version = "0.3.0" @@ -10394,15 +10394,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "windows-core" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" -dependencies = [ - "windows-targets 0.52.6", -] - [[package]] name = "windows-core" version = "0.53.0" @@ -10415,55 +10406,44 @@ dependencies = [ [[package]] name = "windows-core" -version = "0.58.0" +version = "0.62.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ba6d44ec8c2591c134257ce647b7ea6b20335bf6379a27dac5f1641fcf59f99" +checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" dependencies = [ "windows-implement", "windows-interface", - "windows-result 0.2.0", - "windows-strings 0.1.0", - "windows-targets 0.52.6", + "windows-link", + "windows-result 0.4.1", + "windows-strings", ] [[package]] name = "windows-implement" -version = "0.58.0" +version = "0.60.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" +checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] name = "windows-interface" -version = "0.58.0" +version = "0.59.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" +checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] name = "windows-link" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6dccfd733ce2b1753b03b6d3c65edf020262ea35e20ccdf3e288043e6dd620e3" - -[[package]] -name = "windows-registry" -version = "0.4.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4286ad90ddb45071efd1a66dfa43eb02dd0dfbae1545ad6cc3c51cf34d7e8ba3" -dependencies = [ - "windows-result 0.3.1", - "windows-strings 0.3.1", - "windows-targets 0.53.2", -] +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" [[package]] name = "windows-result" @@ -10476,37 +10456,18 @@ dependencies = [ [[package]] name = "windows-result" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e" -dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-result" -version = "0.3.1" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06374efe858fab7e4f881500e6e86ec8bc28f9462c47e5a9941a0142ad86b189" +checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" dependencies = [ "windows-link", ] [[package]] name = "windows-strings" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10" -dependencies = [ - "windows-result 0.2.0", - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-strings" -version = "0.3.1" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87fa48cc5d406560701792be122a10132491cff9d0aeb23583cc2dcafc847319" +checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" dependencies = [ "windows-link", ] @@ -10547,6 +10508,24 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "windows-sys" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" +dependencies = [ + "windows-targets 0.53.5", +] + +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link", +] + [[package]] name = "windows-targets" version = "0.42.2" @@ -10595,18 +10574,19 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.53.2" +version = "0.53.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c66f69fcc9ce11da9966ddb31a40968cad001c5bedeb5c2b82ede4253ab48aef" +checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" dependencies = [ - "windows_aarch64_gnullvm 0.53.0", - "windows_aarch64_msvc 0.53.0", - "windows_i686_gnu 0.53.0", - "windows_i686_gnullvm 0.53.0", - "windows_i686_msvc 0.53.0", - "windows_x86_64_gnu 0.53.0", - "windows_x86_64_gnullvm 0.53.0", - "windows_x86_64_msvc 0.53.0", + "windows-link", + "windows_aarch64_gnullvm 0.53.1", + "windows_aarch64_msvc 0.53.1", + "windows_i686_gnu 0.53.1", + "windows_i686_gnullvm 0.53.1", + "windows_i686_msvc 0.53.1", + "windows_x86_64_gnu 0.53.1", + "windows_x86_64_gnullvm 0.53.1", + "windows_x86_64_msvc 0.53.1", ] [[package]] @@ -10629,9 +10609,9 @@ checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_gnullvm" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" +checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" [[package]] name = "windows_aarch64_msvc" @@ -10653,9 +10633,9 @@ checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_aarch64_msvc" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" +checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" [[package]] name = "windows_i686_gnu" @@ -10677,9 +10657,9 @@ checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnu" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3" +checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" [[package]] name = "windows_i686_gnullvm" @@ -10689,9 +10669,9 @@ checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_gnullvm" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" +checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" [[package]] name = "windows_i686_msvc" @@ -10713,9 +10693,9 @@ checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_i686_msvc" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" +checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" [[package]] name = "windows_x86_64_gnu" @@ -10737,9 +10717,9 @@ checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnu" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" +checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" [[package]] name = "windows_x86_64_gnullvm" @@ -10761,9 +10741,9 @@ checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_gnullvm" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" +checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" [[package]] name = "windows_x86_64_msvc" @@ -10785,15 +10765,15 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "windows_x86_64_msvc" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" +checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" [[package]] name = "winnow" -version = "0.7.3" +version = "0.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e7f4ea97f6f78012141bcdb6a216b2609f0979ada50b20ca5b52dde2eac2bb1" +checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf" dependencies = [ "memchr", ] @@ -10809,13 +10789,10 @@ dependencies = [ ] [[package]] -name = "wit-bindgen-rt" -version = "0.33.0" +name = "wit-bindgen" +version = "0.46.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c" -dependencies = [ - "bitflags 2.9.0", -] +checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" [[package]] name = "workspace_members" @@ -10825,17 +10802,11 @@ dependencies = [ "quote", ] -[[package]] -name = "write16" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" - [[package]] name = "writeable" -version = "0.5.5" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" +checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" [[package]] name = "wyz" @@ -10871,7 +10842,7 @@ dependencies = [ "nom", "oid-registry", "rusticata-macros", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", ] @@ -10891,9 +10862,9 @@ dependencies = [ [[package]] name = "xml-rs" -version = "0.8.25" +version = "0.8.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5b940ebc25896e71dd073bad2dbaa2abfe97b0a391415e22ad1326d9c54e3c4" +checksum = "3ae8337f8a065cfc972643663ea4279e04e7256de865aa66fe25cec5fb912d3f" [[package]] name = "xmltree" @@ -10932,16 +10903,16 @@ dependencies = [ [[package]] name = "yamux" -version = "0.13.4" +version = "0.13.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17610762a1207ee816c6fadc29220904753648aba0a9ed61c7b8336e80a559c4" +checksum = "deab71f2e20691b4728b349c6cee8fc7223880fa67b6b4f92225ec32225447e5" dependencies = [ "futures", "log", "nohash-hasher", "parking_lot", "pin-project", - "rand 0.8.5", + "rand 0.9.2", "static_assertions", "web-time", ] @@ -10957,11 +10928,10 @@ dependencies = [ [[package]] name = "yoke" -version = "0.7.5" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" +checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" dependencies = [ - "serde", "stable_deref_trait", "yoke-derive", "zerofrom", @@ -10969,54 +10939,34 @@ dependencies = [ [[package]] name = "yoke-derive" -version = "0.7.5" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", "synstructure", ] [[package]] name = "zerocopy" -version = "0.7.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" -dependencies = [ - "zerocopy-derive 0.7.35", -] - -[[package]] -name = "zerocopy" -version = "0.8.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd97444d05a4328b90e75e503a34bad781f14e28a823ad3557f0750df1ebcbc6" -dependencies = [ - "zerocopy-derive 0.8.23", -] - -[[package]] -name = "zerocopy-derive" -version = "0.7.35" +version = "0.8.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +checksum = "0894878a5fa3edfd6da3f88c4805f4c8558e2b996227a3d864f47fe11e38282c" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.100", + "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.8.23" +version = "0.8.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6352c01d0edd5db859a63e2605f4ea3183ddbd15e2c4a9e7d32184df75e4f154" +checksum = "88d2b8d9c68ad2b9e4340d7832716a4d21a22a1154777ad56ea55c51a9cf3831" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -11036,15 +10986,15 @@ checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", "synstructure", ] [[package]] name = "zeroize" -version = "1.8.1" +version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" +checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" dependencies = [ "serde", "zeroize_derive", @@ -11058,14 +11008,25 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", +] + +[[package]] +name = "zerotrie" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", ] [[package]] name = "zerovec" -version = "0.10.4" +version = "0.11.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" dependencies = [ "yoke", "zerofrom", @@ -11074,13 +11035,13 @@ dependencies = [ [[package]] name = "zerovec-derive" -version = "0.10.3" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.100", + "syn 2.0.110", ] [[package]] @@ -11118,7 +11079,7 @@ version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a" dependencies = [ - "zstd-safe 7.2.3", + "zstd-safe 7.2.4", ] [[package]] @@ -11133,18 +11094,18 @@ dependencies = [ [[package]] name = "zstd-safe" -version = "7.2.3" +version = "7.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3051792fbdc2e1e143244dc28c60f73d8470e93f3f9cbd0ead44da5ed802722" +checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d" dependencies = [ "zstd-sys", ] [[package]] name = "zstd-sys" -version = "2.0.14+zstd.1.5.7" +version = "2.0.16+zstd.1.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fb060d4926e4ac3a3ad15d864e99ceb5f343c6b34f5bd6d81ae6ed417311be5" +checksum = "91e19ebc2adc8f83e43039e79776e3fda8ca919132d68a1fed6a5faca2683748" dependencies = [ "cc", "pkg-config", diff --git a/beacon_node/lighthouse_network/src/service/utils.rs b/beacon_node/lighthouse_network/src/service/utils.rs index a0026837e37..63f22be5e2c 100644 --- a/beacon_node/lighthouse_network/src/service/utils.rs +++ b/beacon_node/lighthouse_network/src/service/utils.rs @@ -41,7 +41,7 @@ pub fn build_transport( quic_support: bool, ) -> std::io::Result { // mplex config - let mut mplex_config = libp2p_mplex::MplexConfig::new(); + let mut mplex_config = libp2p_mplex::Config::new(); mplex_config.set_max_buffer_size(256); mplex_config.set_max_buffer_behaviour(libp2p_mplex::MaxBufferBehaviour::Block); diff --git a/beacon_node/lighthouse_network/tests/common.rs b/beacon_node/lighthouse_network/tests/common.rs index 9e8b243698b..d04f1211cf9 100644 --- a/beacon_node/lighthouse_network/tests/common.rs +++ b/beacon_node/lighthouse_network/tests/common.rs @@ -109,7 +109,7 @@ pub fn build_config( config.set_ipv4_listening_address(std::net::Ipv4Addr::UNSPECIFIED, port, port, port); config.enr_address = (Some(std::net::Ipv4Addr::LOCALHOST), None); config.boot_nodes_enr.append(&mut boot_nodes); - config.network_dir = path.into_path(); + config.network_dir = path.keep(); config.disable_peer_scoring = disable_peer_scoring; config.inbound_rate_limiter_config = inbound_rate_limiter; Arc::new(config) diff --git a/validator_client/graffiti_file/src/lib.rs b/validator_client/graffiti_file/src/lib.rs index 8b5637d09ed..8e40ef907dd 100644 --- a/validator_client/graffiti_file/src/lib.rs +++ b/validator_client/graffiti_file/src/lib.rs @@ -154,7 +154,7 @@ mod tests { let pk5 = PublicKeyBytes::deserialize(&hex::decode(&PK5[2..]).unwrap()).unwrap(); let pk6 = PublicKeyBytes::deserialize(&hex::decode(&PK6[2..]).unwrap()).unwrap(); - let file_name = temp.into_path().join("graffiti.txt"); + let file_name = temp.keep().join("graffiti.txt"); let file = File::create(&file_name).unwrap(); let mut graffiti_file = LineWriter::new(file); From 0d0232e8fc08c5f8bdf6a3d772f839fd405440f5 Mon Sep 17 00:00:00 2001 From: Michael Sproul Date: Mon, 24 Nov 2025 16:25:46 +1100 Subject: [PATCH 34/74] Optimise out block header calculation (#8446) This is a `tracing`-driven optimisation. While investigating why Lighthouse is slow to send `newPayload`, I found a suspicious 13ms of computation on the hot path in `gossip_block_into_execution_pending_block_slashable`: headercalc Looking at the current implementation we can see that the _only_ thing that happens prior to calling into `from_gossip_verified_block` is the calculation of a `header`. We first call `SignatureVerifiedBlock::from_gossip_verified_block_check_slashable`: https://github.com/sigp/lighthouse/blob/261322c3e3ee467c9454fa160a00866439cbc62f/beacon_node/beacon_chain/src/block_verification.rs#L1075-L1076 Which is where the `header` is calculated prior to calling `from_gossip_verified_block`: https://github.com/sigp/lighthouse/blob/261322c3e3ee467c9454fa160a00866439cbc62f/beacon_node/beacon_chain/src/block_verification.rs#L1224-L1226 Notice that the `header` is _only_ used in the case of an error, yet we spend time computing it every time! This PR moves the calculation of the header (which involves hashing the whole beacon block, including the execution payload), into the error case. We take a cheap clone of the `Arc`'d beacon block on the hot path, and use this for calculating the header _only_ in the case an error actually occurs. This shaves 10-20ms off our pre-newPayload delays, and 10-20ms off every block processing :tada: Co-Authored-By: Michael Sproul --- .../beacon_chain/src/block_verification.rs | 20 +++++++++++-------- consensus/types/src/signed_beacon_block.rs | 2 ++ 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/beacon_node/beacon_chain/src/block_verification.rs b/beacon_node/beacon_chain/src/block_verification.rs index 237826281cc..374f1e2b360 100644 --- a/beacon_node/beacon_chain/src/block_verification.rs +++ b/beacon_node/beacon_chain/src/block_verification.rs @@ -1164,9 +1164,9 @@ impl SignatureVerifiedBlock { block_root: Hash256, chain: &BeaconChain, ) -> Result> { - let header = block.signed_block_header(); + let arc_block = block.block_cloned(); Self::new(block, block_root, chain) - .map_err(|e| BlockSlashInfo::from_early_error_block(header, e)) + .map_err(|e| BlockSlashInfo::from_early_error_block(arc_block.signed_block_header(), e)) } /// Finishes signature verification on the provided `GossipVerifedBlock`. Does not re-verify @@ -1221,9 +1221,13 @@ impl SignatureVerifiedBlock { from: GossipVerifiedBlock, chain: &BeaconChain, ) -> Result> { - let header = from.block.signed_block_header(); - Self::from_gossip_verified_block(from, chain) - .map_err(|e| BlockSlashInfo::from_early_error_block(header, e)) + let block = from.block.clone(); + Self::from_gossip_verified_block(from, chain).map_err(|e| { + // Lazily create the header from the block in case of error. Computing the header + // involves some hashing and takes ~13ms which we DO NOT want to do on the hot path of + // block processing (prior to sending newPayload pre-Gloas). + BlockSlashInfo::from_early_error_block(block.signed_block_header(), e) + }) } pub fn block_root(&self) -> Hash256 { @@ -1248,12 +1252,12 @@ impl IntoExecutionPendingBlock for SignatureVerifiedBloc chain: &Arc>, notify_execution_layer: NotifyExecutionLayer, ) -> Result, BlockSlashInfo> { - let header = self.block.signed_block_header(); + let arc_block = self.block.block_cloned(); let (parent, block) = if let Some(parent) = self.parent { (parent, self.block) } else { load_parent(self.block, chain) - .map_err(|e| BlockSlashInfo::SignatureValid(header.clone(), e))? + .map_err(|e| BlockSlashInfo::SignatureValid(arc_block.signed_block_header(), e))? }; ExecutionPendingBlock::from_signature_verified_components( @@ -1264,7 +1268,7 @@ impl IntoExecutionPendingBlock for SignatureVerifiedBloc chain, notify_execution_layer, ) - .map_err(|e| BlockSlashInfo::SignatureValid(header, e)) + .map_err(|e| BlockSlashInfo::SignatureValid(arc_block.signed_block_header(), e)) } fn block(&self) -> &SignedBeaconBlock { diff --git a/consensus/types/src/signed_beacon_block.rs b/consensus/types/src/signed_beacon_block.rs index 35d2faac483..7b04cc57711 100644 --- a/consensus/types/src/signed_beacon_block.rs +++ b/consensus/types/src/signed_beacon_block.rs @@ -8,6 +8,7 @@ use ssz_derive::{Decode, Encode}; use std::fmt; use superstruct::superstruct; use test_random_derive::TestRandom; +use tracing::instrument; use tree_hash::TreeHash; use tree_hash_derive::TreeHash; @@ -253,6 +254,7 @@ impl> SignedBeaconBlock } /// Produce a signed beacon block header corresponding to this block. + #[instrument(level = "debug", skip_all)] pub fn signed_block_header(&self) -> SignedBeaconBlockHeader { SignedBeaconBlockHeader { message: self.message().block_header(), From 03832b0ad2ffc49a6e3084d35465c8bf5d542a75 Mon Sep 17 00:00:00 2001 From: kevaundray Date: Mon, 24 Nov 2025 02:40:20 -0300 Subject: [PATCH 35/74] chore: Add Dockerfile.dev for local development (#8295) Currently whenever we build the `Dockerfile` file for local development using kurtosis , it recompiles everything on my laptop, even if no changes are made. This takes about 120 seconds on my laptop (might be faster on others). Conservatively, I created a new Dockerfile.dev, so that the original file is kept the same, even though its pretty similar. This uses `--mount-type=cache` saving the target and registry folder across builds. **Usage** ```sh docker build -f Dockerfile.dev -t lighthouse:dev . ``` Co-Authored-By: Kevaundray Wedderburn --- Dockerfile.dev | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 Dockerfile.dev diff --git a/Dockerfile.dev b/Dockerfile.dev new file mode 100644 index 00000000000..50bf1e5898e --- /dev/null +++ b/Dockerfile.dev @@ -0,0 +1,25 @@ +FROM rust:1.88.0-bullseye AS builder +RUN apt-get update && apt-get -y upgrade && apt-get install -y cmake libclang-dev +WORKDIR /lighthouse + +ARG FEATURES +ARG PROFILE=release +ARG CARGO_USE_GIT_CLI=true +ENV FEATURES=$FEATURES +ENV PROFILE=$PROFILE +ENV CARGO_NET_GIT_FETCH_WITH_CLI=$CARGO_USE_GIT_CLI +ENV CARGO_INCREMENTAL=1 + +COPY . . +# Persist the registry and target file across builds. See: https://docs.docker.com/build/cache/optimize/#use-cache-mounts +RUN --mount=type=cache,target=/usr/local/cargo/registry \ + --mount=type=cache,target=/lighthouse/target \ + make + +FROM ubuntu:22.04 +RUN apt-get update && apt-get -y upgrade && apt-get install -y --no-install-recommends \ + libssl-dev \ + ca-certificates \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* +COPY --from=builder /usr/local/cargo/bin/lighthouse /usr/local/bin/lighthouse \ No newline at end of file From bdfade8e3de8e1d70776b52d348a54ca7cc248cb Mon Sep 17 00:00:00 2001 From: Michael Sproul Date: Tue, 25 Nov 2025 09:39:40 +1100 Subject: [PATCH 36/74] Consolidate reqwest versions (#8452) Update `reqwest` to 0.12 so we only depend on a single version. This should slightly improve compile times and reduce binary bloat. Co-Authored-By: Michael Sproul --- Cargo.lock | 226 +++++---------------- Cargo.toml | 2 +- beacon_node/http_api/src/lib.rs | 4 +- beacon_node/http_api/src/publish_blocks.rs | 12 +- common/eth2/Cargo.toml | 2 +- common/warp_utils/src/lib.rs | 1 + common/warp_utils/src/status_code.rs | 9 + 7 files changed, 72 insertions(+), 184 deletions(-) create mode 100644 common/warp_utils/src/status_code.rs diff --git a/Cargo.lock b/Cargo.lock index 0f3f57973eb..a1ad2ab5ba7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -382,7 +382,7 @@ dependencies = [ "lru 0.13.0", "parking_lot", "pin-project", - "reqwest 0.12.24", + "reqwest", "serde", "serde_json", "thiserror 2.0.17", @@ -426,7 +426,7 @@ dependencies = [ "alloy-transport-http", "futures", "pin-project", - "reqwest 0.12.24", + "reqwest", "serde", "serde_json", "tokio", @@ -612,7 +612,7 @@ checksum = "dc1b37b1a30d23deb3a8746e882c70b384c574d355bc2bbea9ea918b0c31366e" dependencies = [ "alloy-json-rpc", "alloy-transport", - "reqwest 0.12.24", + "reqwest", "serde_json", "tower 0.5.2", "tracing", @@ -1150,7 +1150,7 @@ dependencies = [ "pin-project-lite", "rustversion", "serde", - "sync_wrapper 1.0.2", + "sync_wrapper", "tower 0.5.2", "tower-layer", "tower-service", @@ -1171,7 +1171,7 @@ dependencies = [ "mime", "pin-project-lite", "rustversion", - "sync_wrapper 1.0.2", + "sync_wrapper", "tower-layer", "tower-service", ] @@ -1596,7 +1596,7 @@ dependencies = [ "ethereum_ssz", "lighthouse_version", "mockito", - "reqwest 0.11.27", + "reqwest", "sensitive_url", "serde", "serde_json", @@ -2549,7 +2549,7 @@ dependencies = [ "alloy-primitives", "ethereum_ssz", "hex", - "reqwest 0.11.27", + "reqwest", "serde_json", "sha2 0.9.9", "tree_hash", @@ -3152,7 +3152,7 @@ dependencies = [ "pretty_reqwest_error", "proto_array", "rand 0.9.2", - "reqwest 0.11.27", + "reqwest", "reqwest-eventsource", "sensitive_url", "serde", @@ -3229,7 +3229,7 @@ dependencies = [ "ethereum_ssz", "kzg", "pretty_reqwest_error", - "reqwest 0.11.27", + "reqwest", "sensitive_url", "serde_yaml", "sha2 0.9.9", @@ -3373,7 +3373,7 @@ dependencies = [ "hex", "logging", "network_utils", - "reqwest 0.11.27", + "reqwest", "sensitive_url", "serde_json", "task_executor", @@ -3411,7 +3411,7 @@ dependencies = [ "parking_lot", "pretty_reqwest_error", "rand 0.9.2", - "reqwest 0.11.27", + "reqwest", "sensitive_url", "serde", "serde_json", @@ -4292,7 +4292,7 @@ dependencies = [ "malloc_utils", "metrics", "network_utils", - "reqwest 0.11.27", + "reqwest", "serde", "slot_clock", "store", @@ -4370,16 +4370,19 @@ dependencies = [ [[package]] name = "hyper-rustls" -version = "0.24.2" +version = "0.27.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" dependencies = [ - "futures-util", - "http 0.2.12", - "hyper 0.14.32", - "rustls 0.21.12", + "http 1.3.1", + "hyper 1.8.1", + "hyper-util", + "rustls 0.23.35", + "rustls-pki-types", "tokio", - "tokio-rustls 0.24.1", + "tokio-rustls 0.26.4", + "tower-service", + "webpki-roots", ] [[package]] @@ -4395,19 +4398,6 @@ dependencies = [ "tower-service", ] -[[package]] -name = "hyper-tls" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" -dependencies = [ - "bytes", - "hyper 0.14.32", - "native-tls", - "tokio", - "tokio-native-tls", -] - [[package]] name = "hyper-tls" version = "0.6.0" @@ -4608,7 +4598,7 @@ dependencies = [ "netlink-proto", "netlink-sys", "rtnetlink", - "system-configuration 0.6.1", + "system-configuration", "tokio", "windows", ] @@ -4691,7 +4681,7 @@ dependencies = [ "metrics", "parking_lot", "rand 0.9.2", - "reqwest 0.11.27", + "reqwest", "serde", "serde_json", "signing_method", @@ -6039,7 +6029,7 @@ dependencies = [ "lighthouse_version", "metrics", "regex", - "reqwest 0.11.27", + "reqwest", "sensitive_url", "serde", "serde_json", @@ -6613,7 +6603,7 @@ dependencies = [ "bytes", "http 1.3.1", "opentelemetry", - "reqwest 0.12.24", + "reqwest", ] [[package]] @@ -6628,7 +6618,7 @@ dependencies = [ "opentelemetry-proto", "opentelemetry_sdk", "prost", - "reqwest 0.12.24", + "reqwest", "thiserror 2.0.17", "tokio", "tonic 0.13.1", @@ -6994,7 +6984,7 @@ dependencies = [ name = "pretty_reqwest_error" version = "0.1.0" dependencies = [ - "reqwest 0.11.27", + "reqwest", "sensitive_url", ] @@ -7543,52 +7533,6 @@ version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" -[[package]] -name = "reqwest" -version = "0.11.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" -dependencies = [ - "base64 0.21.7", - "bytes", - "encoding_rs", - "futures-core", - "futures-util", - "h2 0.3.27", - "http 0.2.12", - "http-body 0.4.6", - "hyper 0.14.32", - "hyper-rustls", - "hyper-tls 0.5.0", - "ipnet", - "js-sys", - "log", - "mime", - "native-tls", - "once_cell", - "percent-encoding", - "pin-project-lite", - "rustls 0.21.12", - "rustls-pemfile 1.0.4", - "serde", - "serde_json", - "serde_urlencoded", - "sync_wrapper 0.1.2", - "system-configuration 0.5.1", - "tokio", - "tokio-native-tls", - "tokio-rustls 0.24.1", - "tokio-util", - "tower-service", - "url", - "wasm-bindgen", - "wasm-bindgen-futures", - "wasm-streams", - "web-sys", - "webpki-roots", - "winreg", -] - [[package]] name = "reqwest" version = "0.12.24" @@ -7604,34 +7548,41 @@ dependencies = [ "http-body 1.0.1", "http-body-util", "hyper 1.8.1", - "hyper-tls 0.6.0", + "hyper-rustls", + "hyper-tls", "hyper-util", "js-sys", "log", "native-tls", "percent-encoding", "pin-project-lite", + "quinn", + "rustls 0.23.35", "rustls-pki-types", "serde", "serde_json", "serde_urlencoded", - "sync_wrapper 1.0.2", + "sync_wrapper", "tokio", "tokio-native-tls", + "tokio-rustls 0.26.4", + "tokio-util", "tower 0.5.2", "tower-http", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", + "wasm-streams", "web-sys", + "webpki-roots", ] [[package]] name = "reqwest-eventsource" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f529a5ff327743addc322af460761dff5b50e0c826b9e6ac44c3195c50bb2026" +checksum = "632c55746dbb44275691640e7b40c907c16a2dc1a5842aa98aaec90da6ec6bde" dependencies = [ "eventsource-stream", "futures-core", @@ -7639,7 +7590,7 @@ dependencies = [ "mime", "nom", "pin-project-lite", - "reqwest 0.11.27", + "reqwest", "thiserror 1.0.69", ] @@ -7871,18 +7822,6 @@ dependencies = [ "windows-sys 0.61.2", ] -[[package]] -name = "rustls" -version = "0.21.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" -dependencies = [ - "log", - "ring", - "rustls-webpki 0.101.7", - "sct", -] - [[package]] name = "rustls" version = "0.22.4" @@ -7924,15 +7863,6 @@ dependencies = [ "security-framework 3.5.1", ] -[[package]] -name = "rustls-pemfile" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" -dependencies = [ - "base64 0.21.7", -] - [[package]] name = "rustls-pemfile" version = "2.2.0" @@ -7952,16 +7882,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "rustls-webpki" -version = "0.101.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" -dependencies = [ - "ring", - "untrusted", -] - [[package]] name = "rustls-webpki" version = "0.102.8" @@ -8109,16 +8029,6 @@ dependencies = [ "sha2 0.9.9", ] -[[package]] -name = "sct" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" -dependencies = [ - "ring", - "untrusted", -] - [[package]] name = "sec1" version = "0.7.3" @@ -8456,7 +8366,7 @@ dependencies = [ "ethereum_serde_utils", "lockfile", "parking_lot", - "reqwest 0.11.27", + "reqwest", "serde", "task_executor", "types", @@ -8882,12 +8792,6 @@ dependencies = [ "syn 2.0.110", ] -[[package]] -name = "sync_wrapper" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" - [[package]] name = "sync_wrapper" version = "1.0.2" @@ -8923,17 +8827,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "system-configuration" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" -dependencies = [ - "bitflags 1.3.2", - "core-foundation 0.9.4", - "system-configuration-sys 0.5.0", -] - [[package]] name = "system-configuration" version = "0.6.1" @@ -8942,17 +8835,7 @@ checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" dependencies = [ "bitflags 2.10.0", "core-foundation 0.9.4", - "system-configuration-sys 0.6.0", -] - -[[package]] -name = "system-configuration-sys" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" -dependencies = [ - "core-foundation-sys", - "libc", + "system-configuration-sys", ] [[package]] @@ -9280,16 +9163,6 @@ dependencies = [ "tokio", ] -[[package]] -name = "tokio-rustls" -version = "0.24.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" -dependencies = [ - "rustls 0.21.12", - "tokio", -] - [[package]] name = "tokio-rustls" version = "0.25.0" @@ -9457,7 +9330,7 @@ dependencies = [ "indexmap 2.12.0", "pin-project-lite", "slab", - "sync_wrapper 1.0.2", + "sync_wrapper", "tokio", "tokio-util", "tower-layer", @@ -9883,7 +9756,7 @@ dependencies = [ "metrics", "monitoring_api", "parking_lot", - "reqwest 0.11.27", + "reqwest", "sensitive_url", "serde", "slashing_protection", @@ -10133,7 +10006,7 @@ dependencies = [ "mime_guess", "percent-encoding", "pin-project", - "rustls-pemfile 2.2.0", + "rustls-pemfile", "scoped-tls", "serde", "serde_json", @@ -10296,7 +10169,7 @@ dependencies = [ "lighthouse_validator_store", "logging", "parking_lot", - "reqwest 0.11.27", + "reqwest", "serde", "serde_json", "serde_yaml", @@ -10313,9 +10186,12 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.25.4" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" +checksum = "b2878ef029c47c6e8cf779119f20fcf52bde7ad42a731b2a304bc221df17571e" +dependencies = [ + "rustls-pki-types", +] [[package]] name = "which" diff --git a/Cargo.toml b/Cargo.toml index 0a40e9e6963..713fbf25d8c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -209,7 +209,7 @@ r2d2 = "0.8" rand = "0.9.0" rayon = "1.7" regex = "1" -reqwest = { version = "0.11", default-features = false, features = [ +reqwest = { version = "0.12", default-features = false, features = [ "blocking", "json", "stream", diff --git a/beacon_node/http_api/src/lib.rs b/beacon_node/http_api/src/lib.rs index e8fb149bfd0..6389b34961a 100644 --- a/beacon_node/http_api/src/lib.rs +++ b/beacon_node/http_api/src/lib.rs @@ -46,6 +46,7 @@ pub use block_id::BlockId; use builder_states::get_next_withdrawals; use bytes::Bytes; use directory::DEFAULT_ROOT_DIR; +use eth2::StatusCode; use eth2::types::{ self as api_types, BroadcastValidation, ContextDeserialize, EndpointVersion, ForkChoice, ForkChoiceExtraData, ForkChoiceNode, LightClientUpdatesQuery, PublishBlockRequest, @@ -103,7 +104,6 @@ use version::{ unsupported_version_rejection, }; use warp::Reply; -use warp::http::StatusCode; use warp::hyper::Body; use warp::sse::Event; use warp::{Filter, Rejection, http::Response}; @@ -4097,7 +4097,7 @@ pub fn serve( convert_rejection(rx.await.unwrap_or_else(|_| { Ok(warp::reply::with_status( warp::reply::json(&"No response from channel"), - eth2::StatusCode::INTERNAL_SERVER_ERROR, + warp::http::StatusCode::INTERNAL_SERVER_ERROR, ) .into_response()) })) diff --git a/beacon_node/http_api/src/publish_blocks.rs b/beacon_node/http_api/src/publish_blocks.rs index bfe41c8706c..9671a72da26 100644 --- a/beacon_node/http_api/src/publish_blocks.rs +++ b/beacon_node/http_api/src/publish_blocks.rs @@ -9,9 +9,12 @@ use beacon_chain::{ AvailabilityProcessingStatus, BeaconChain, BeaconChainError, BeaconChainTypes, BlockError, IntoGossipVerifiedBlock, NotifyExecutionLayer, build_blob_data_column_sidecars, }; -use eth2::types::{ - BlobsBundle, BroadcastValidation, ErrorMessage, ExecutionPayloadAndBlobs, FullPayloadContents, - PublishBlockRequest, SignedBlockContents, +use eth2::{ + StatusCode, + types::{ + BlobsBundle, BroadcastValidation, ErrorMessage, ExecutionPayloadAndBlobs, + FullPayloadContents, PublishBlockRequest, SignedBlockContents, + }, }; use execution_layer::{ProvenancedPayload, SubmitBlindedBlockResponse}; use futures::TryFutureExt; @@ -32,7 +35,6 @@ use types::{ DataColumnSubnetId, EthSpec, ExecPayload, ExecutionBlockHash, ForkName, FullPayload, FullPayloadBellatrix, Hash256, KzgProofs, SignedBeaconBlock, SignedBlindedBeaconBlock, }; -use warp::http::StatusCode; use warp::{Rejection, Reply, reply::Response}; pub type UnverifiedBlobs = Option<( @@ -302,7 +304,7 @@ pub async fn publish_block>( message: "duplicate block".to_string(), stacktraces: vec![], }), - duplicate_status_code, + warp_utils::status_code::convert(duplicate_status_code)?, ) .into_response()) } diff --git a/common/eth2/Cargo.toml b/common/eth2/Cargo.toml index 906fcd7b5c3..7a75bdc80a1 100644 --- a/common/eth2/Cargo.toml +++ b/common/eth2/Cargo.toml @@ -26,7 +26,7 @@ pretty_reqwest_error = { workspace = true } proto_array = { workspace = true } rand = { workspace = true } reqwest = { workspace = true } -reqwest-eventsource = "0.5.0" +reqwest-eventsource = "0.6.0" sensitive_url = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } diff --git a/common/warp_utils/src/lib.rs b/common/warp_utils/src/lib.rs index c10adbac0df..1c77d4d84b8 100644 --- a/common/warp_utils/src/lib.rs +++ b/common/warp_utils/src/lib.rs @@ -5,5 +5,6 @@ pub mod cors; pub mod json; pub mod query; pub mod reject; +pub mod status_code; pub mod task; pub mod uor; diff --git a/common/warp_utils/src/status_code.rs b/common/warp_utils/src/status_code.rs new file mode 100644 index 00000000000..1b052973599 --- /dev/null +++ b/common/warp_utils/src/status_code.rs @@ -0,0 +1,9 @@ +use eth2::StatusCode; +use warp::Rejection; + +/// Convert from a "new" `http::StatusCode` to a `warp` compatible one. +pub fn convert(code: StatusCode) -> Result { + code.as_u16().try_into().map_err(|e| { + crate::reject::custom_server_error(format!("bad status code {code:?} - {e:?}")) + }) +} From d6cec0ba50e60fbf5ff0f7fbde0352b07fbde399 Mon Sep 17 00:00:00 2001 From: Lion - dapplion <35266934+dapplion@users.noreply.github.com> Date: Tue, 25 Nov 2025 00:45:25 -0300 Subject: [PATCH 37/74] Dockerfile with cargo artifacts caching (#8455) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit When developing locally with kurtosis a typical dev workflow is: loop: - Build local lighthouse docker image - Run kurtosis - Observe bug - Fix code The docker build step would download and build all crates. Docker docs suggests an optimization to cache build artifacts, see https://docs.docker.com/build/cache/optimize/#use-cache-mounts I have tested and it's like building Lighthouse outside of a docker environment 🤤 The docker build time after changing one line in the top beacon_node crate is 50 seconds on my local machine ❤️ The release path is un-affected. Do you have worries this can affect the output of the release binaries? This is too good of an improvement to keep it in a separate Dockerfile. Co-Authored-By: dapplion <35266934+dapplion@users.noreply.github.com> --- Dockerfile | 10 ++++++++-- Dockerfile.dev | 25 ------------------------- 2 files changed, 8 insertions(+), 27 deletions(-) delete mode 100644 Dockerfile.dev diff --git a/Dockerfile b/Dockerfile index f925836e48e..8cc20ab000f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,13 +1,19 @@ FROM rust:1.88.0-bullseye AS builder RUN apt-get update && apt-get -y upgrade && apt-get install -y cmake libclang-dev -COPY . lighthouse ARG FEATURES ARG PROFILE=release ARG CARGO_USE_GIT_CLI=true ENV FEATURES=$FEATURES ENV PROFILE=$PROFILE ENV CARGO_NET_GIT_FETCH_WITH_CLI=$CARGO_USE_GIT_CLI -RUN cd lighthouse && make +ENV CARGO_INCREMENTAL=1 + +WORKDIR /lighthouse +COPY . . +# Persist the registry and target file across builds. See: https://docs.docker.com/build/cache/optimize/#use-cache-mounts +RUN --mount=type=cache,target=/usr/local/cargo/registry \ + --mount=type=cache,target=/lighthouse/target \ + make FROM ubuntu:22.04 RUN apt-get update && apt-get -y upgrade && apt-get install -y --no-install-recommends \ diff --git a/Dockerfile.dev b/Dockerfile.dev deleted file mode 100644 index 50bf1e5898e..00000000000 --- a/Dockerfile.dev +++ /dev/null @@ -1,25 +0,0 @@ -FROM rust:1.88.0-bullseye AS builder -RUN apt-get update && apt-get -y upgrade && apt-get install -y cmake libclang-dev -WORKDIR /lighthouse - -ARG FEATURES -ARG PROFILE=release -ARG CARGO_USE_GIT_CLI=true -ENV FEATURES=$FEATURES -ENV PROFILE=$PROFILE -ENV CARGO_NET_GIT_FETCH_WITH_CLI=$CARGO_USE_GIT_CLI -ENV CARGO_INCREMENTAL=1 - -COPY . . -# Persist the registry and target file across builds. See: https://docs.docker.com/build/cache/optimize/#use-cache-mounts -RUN --mount=type=cache,target=/usr/local/cargo/registry \ - --mount=type=cache,target=/lighthouse/target \ - make - -FROM ubuntu:22.04 -RUN apt-get update && apt-get -y upgrade && apt-get install -y --no-install-recommends \ - libssl-dev \ - ca-certificates \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* -COPY --from=builder /usr/local/cargo/bin/lighthouse /usr/local/bin/lighthouse \ No newline at end of file From e21a433748251ea719f93b5aaf21b39a900f19be Mon Sep 17 00:00:00 2001 From: Michael Sproul Date: Thu, 27 Nov 2025 10:00:21 +1100 Subject: [PATCH 38/74] Allow manual checkpoint sync without blobs (#8470) Since merging this PR, we don't need `--checkpoint-blobs`, even prior to Fulu: - https://github.com/sigp/lighthouse/pull/8417 This PR removes the mandatory check for blobs prior to Fulu, enabling simpler manual checkpoint sync. Co-Authored-By: Michael Sproul Co-Authored-By: Jimmy Chen --- beacon_node/beacon_chain/tests/store_tests.rs | 29 ++++++++++++++----- beacon_node/client/src/builder.rs | 11 ++----- 2 files changed, 25 insertions(+), 15 deletions(-) diff --git a/beacon_node/beacon_chain/tests/store_tests.rs b/beacon_node/beacon_chain/tests/store_tests.rs index cf175a56d74..0733d901fc3 100644 --- a/beacon_node/beacon_chain/tests/store_tests.rs +++ b/beacon_node/beacon_chain/tests/store_tests.rs @@ -2705,7 +2705,7 @@ async fn weak_subjectivity_sync_easy() { let num_initial_slots = E::slots_per_epoch() * 11; let checkpoint_slot = Slot::new(E::slots_per_epoch() * 9); let slots = (1..num_initial_slots).map(Slot::new).collect(); - weak_subjectivity_sync_test(slots, checkpoint_slot, None).await + weak_subjectivity_sync_test(slots, checkpoint_slot, None, true).await } #[tokio::test] @@ -2713,7 +2713,7 @@ async fn weak_subjectivity_sync_single_block_batches() { let num_initial_slots = E::slots_per_epoch() * 11; let checkpoint_slot = Slot::new(E::slots_per_epoch() * 9); let slots = (1..num_initial_slots).map(Slot::new).collect(); - weak_subjectivity_sync_test(slots, checkpoint_slot, Some(1)).await + weak_subjectivity_sync_test(slots, checkpoint_slot, Some(1), true).await } #[tokio::test] @@ -2727,7 +2727,7 @@ async fn weak_subjectivity_sync_unaligned_advanced_checkpoint() { slot <= checkpoint_slot - 3 || slot > checkpoint_slot }) .collect(); - weak_subjectivity_sync_test(slots, checkpoint_slot, None).await + weak_subjectivity_sync_test(slots, checkpoint_slot, None, true).await } #[tokio::test] @@ -2741,7 +2741,7 @@ async fn weak_subjectivity_sync_unaligned_unadvanced_checkpoint() { slot <= checkpoint_slot || slot > checkpoint_slot + 3 }) .collect(); - weak_subjectivity_sync_test(slots, checkpoint_slot, None).await + weak_subjectivity_sync_test(slots, checkpoint_slot, None, true).await } // Regression test for https://github.com/sigp/lighthouse/issues/4817 @@ -2753,7 +2753,7 @@ async fn weak_subjectivity_sync_skips_at_genesis() { let end_slot = E::slots_per_epoch() * 4; let slots = (start_slot..end_slot).map(Slot::new).collect(); let checkpoint_slot = Slot::new(E::slots_per_epoch() * 2); - weak_subjectivity_sync_test(slots, checkpoint_slot, None).await + weak_subjectivity_sync_test(slots, checkpoint_slot, None, true).await } // Checkpoint sync from the genesis state. @@ -2766,13 +2766,24 @@ async fn weak_subjectivity_sync_from_genesis() { let end_slot = E::slots_per_epoch() * 2; let slots = (start_slot..end_slot).map(Slot::new).collect(); let checkpoint_slot = Slot::new(0); - weak_subjectivity_sync_test(slots, checkpoint_slot, None).await + weak_subjectivity_sync_test(slots, checkpoint_slot, None, true).await +} + +// Test checkpoint sync without providing blobs - backfill should fetch them. +#[tokio::test] +async fn weak_subjectivity_sync_without_blobs() { + let start_slot = 4; + let end_slot = E::slots_per_epoch() * 4; + let slots = (start_slot..end_slot).map(Slot::new).collect(); + let checkpoint_slot = Slot::new(E::slots_per_epoch() * 2); + weak_subjectivity_sync_test(slots, checkpoint_slot, None, false).await } async fn weak_subjectivity_sync_test( slots: Vec, checkpoint_slot: Slot, backfill_batch_size: Option, + provide_blobs: bool, ) { // Build an initial chain on one harness, representing a synced node with full history. let num_final_blocks = E::slots_per_epoch() * 2; @@ -2874,7 +2885,11 @@ async fn weak_subjectivity_sync_test( .weak_subjectivity_state( wss_state, wss_block.clone(), - wss_blobs_opt.clone(), + if provide_blobs { + wss_blobs_opt.clone() + } else { + None + }, genesis_state, ) .unwrap() diff --git a/beacon_node/client/src/builder.rs b/beacon_node/client/src/builder.rs index 380e0c114a4..bac61fc7356 100644 --- a/beacon_node/client/src/builder.rs +++ b/beacon_node/client/src/builder.rs @@ -354,15 +354,10 @@ where let anchor_block = SignedBeaconBlock::from_ssz_bytes(&anchor_block_bytes, &spec) .map_err(|e| format!("Unable to parse weak subj block SSZ: {:?}", e))?; - // `BlobSidecar` is no longer used from Fulu onwards (superseded by `DataColumnSidecar`), - // which will be fetched via rpc instead (unimplemented). - let is_before_fulu = !spec - .fork_name_at_slot::(anchor_block.slot()) - .fulu_enabled(); - let anchor_blobs = if is_before_fulu && anchor_block.message().body().has_blobs() { + // Providing blobs is optional now and not providing them is recommended. + // Backfill can handle downloading the blobs or columns for the checkpoint block. + let anchor_blobs = if let Some(anchor_blobs_bytes) = anchor_blobs_bytes { let max_blobs_len = spec.max_blobs_per_block(anchor_block.epoch()) as usize; - let anchor_blobs_bytes = anchor_blobs_bytes - .ok_or("Blobs for checkpoint must be provided using --checkpoint-blobs")?; Some( BlobSidecarList::from_ssz_bytes(&anchor_blobs_bytes, max_blobs_len) .map_err(|e| format!("Unable to parse weak subj blobs SSZ: {e:?}"))?, From 4494b0a68443711da1f16de62f11566627b64020 Mon Sep 17 00:00:00 2001 From: chonghe <44791194+chong-he@users.noreply.github.com> Date: Thu, 27 Nov 2025 13:21:44 +0800 Subject: [PATCH 39/74] Update docs on Siren port and other small updates (#8399) Co-Authored-By: Tan Chee Keong --- book/src/advanced_blobs.md | 2 +- book/src/advanced_checkpoint_sync.md | 2 +- book/src/ui_installation.md | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/book/src/advanced_blobs.md b/book/src/advanced_blobs.md index 6d9ecdf72f8..e06bdb9fb9a 100644 --- a/book/src/advanced_blobs.md +++ b/book/src/advanced_blobs.md @@ -17,7 +17,7 @@ While both `--supernode` and `--semi-supernode` can serve blobs, a supernode wil Combining `--prune-blobs false` and `--supernode` (or `--semi-supernode`) implies that no data columns will be pruned, and the node will be able to serve blobs since using the flag. -If you want historical blob data beyond the data availability period (18 days), you can backfill blobs or data columns with the experimental flag `--complete-blob-backfill`. However, do note that this is an experimental feature and it only works when the flag is present during a fresh checkpoint sync when the database is initialised. The flag will have no effect if the node is already running (with an existing database). During blob backfill, the feature may cause some issues, e.g., the node may block most of its peers. +If you want historical blob data beyond the data availability period (18 days), you can backfill blobs or data columns with the experimental flag `--complete-blob-backfill`. However, do note that this is an experimental feature and it only works when the flag is present during a fresh checkpoint sync when the database is initialised. The flag will not backfill blobs if the node is already running (with an existing database). During blob backfill, the feature may cause some issues, e.g., the node may block most of its peers. **⚠️ The following section on Blobs is archived and not maintained as blobs are stored in the form of data columns after the Fulu fork ⚠️** diff --git a/book/src/advanced_checkpoint_sync.md b/book/src/advanced_checkpoint_sync.md index 9cc18dda8c3..7c30598928b 100644 --- a/book/src/advanced_checkpoint_sync.md +++ b/book/src/advanced_checkpoint_sync.md @@ -160,7 +160,7 @@ curl -H "Accept: application/octet-stream" "http://localhost:5052/eth/v1/beacon/ where `$SLOT` is the slot number. A slot which is an epoch boundary slot (i.e., first slot of an epoch) should always be used for manual checkpoint sync. -If the block contains blobs, all state, block and blobs must be provided and must point to the same slot. The +If the block contains blobs, all state, block and blobs must be provided and must point to the same slot (only applies for slots before Fulu). The state may be from the same slot as the block (unadvanced), or advanced to an epoch boundary, in which case it will be assumed to be finalized at that epoch. diff --git a/book/src/ui_installation.md b/book/src/ui_installation.md index 5a785650049..82f5d755bcb 100644 --- a/book/src/ui_installation.md +++ b/book/src/ui_installation.md @@ -138,13 +138,13 @@ Navigate to the backend directory `cd backend`. Install all required Node packag After initializing the backend, return to the root directory. Install all frontend dependencies by executing `yarn`. Build the frontend using `yarn build`. Start the frontend production server with `yarn start`. -This will allow you to access siren at `http://localhost:3000` by default. +This will allow you to access siren at `http://localhost:3300` by default. ## Advanced configuration ### About self-signed SSL certificates -By default, internally, Siren is running on port 80 (plain, behind nginx), port 3000 (plain, direct) and port 443 (with SSL, behind nginx)). Siren will generate and use a self-signed certificate on startup. This will generate a security warning when you try to access the interface. We recommend to only disable SSL if you would access Siren over a local LAN or otherwise highly trusted or encrypted network (i.e. VPN). +By default, internally, Siren is running on port 80 (plain, behind nginx), port 3300 (plain, direct) and port 443 (with SSL, behind nginx)). Siren will generate and use a self-signed certificate on startup. This will generate a security warning when you try to access the interface. We recommend to only disable SSL if you would access Siren over a local LAN or otherwise highly trusted or encrypted network (i.e. VPN). #### Generating persistent SSL certificates and installing them to your system From 070e3957147dae9eaa6e1a4d0030467bb835161a Mon Sep 17 00:00:00 2001 From: Michael Sproul Date: Thu, 27 Nov 2025 16:53:55 +1100 Subject: [PATCH 40/74] Remove quickcheck in favour of proptest (#8471) Consolidate our property-testing around `proptest`. This PR was written with Copilot and manually tweaked. Co-Authored-By: Michael Sproul Co-Authored-By: Michael Sproul --- Cargo.lock | 38 +- Cargo.toml | 3 +- beacon_node/lighthouse_network/Cargo.toml | 3 +- .../src/peer_manager/mod.rs | 326 ++++++++++-------- consensus/merkle_proof/Cargo.toml | 3 +- consensus/merkle_proof/src/lib.rs | 90 +++-- 6 files changed, 244 insertions(+), 219 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a1ad2ab5ba7..6cc99a659fa 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3084,16 +3084,6 @@ dependencies = [ "syn 2.0.110", ] -[[package]] -name = "env_logger" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a19187fea3ac7e84da7dacf48de0c45d63c6a76f9490dae389aead16c243fce3" -dependencies = [ - "log", - "regex", -] - [[package]] name = "environment" version = "0.1.2" @@ -5525,8 +5515,7 @@ dependencies = [ "network_utils", "parking_lot", "prometheus-client", - "quickcheck", - "quickcheck_macros", + "proptest", "rand 0.9.2", "regex", "serde", @@ -5833,8 +5822,7 @@ dependencies = [ "alloy-primitives", "ethereum_hashing", "fixed_bytes", - "quickcheck", - "quickcheck_macros", + "proptest", "safe_arith", ] @@ -7222,28 +7210,6 @@ dependencies = [ "unsigned-varint 0.8.0", ] -[[package]] -name = "quickcheck" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "588f6378e4dd99458b60ec275b4477add41ce4fa9f64dcba6f15adccb19b50d6" -dependencies = [ - "env_logger", - "log", - "rand 0.8.5", -] - -[[package]] -name = "quickcheck_macros" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f71ee38b42f8459a88d3362be6f9b841ad2d5421844f61eb1c59c11bff3ac14a" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.110", -] - [[package]] name = "quinn" version = "0.11.9" diff --git a/Cargo.toml b/Cargo.toml index 713fbf25d8c..f4ce67d0bf8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -201,9 +201,8 @@ parking_lot = "0.12" paste = "1" pretty_reqwest_error = { path = "common/pretty_reqwest_error" } prometheus = { version = "0.13", default-features = false } +proptest = "1" proto_array = { path = "consensus/proto_array" } -quickcheck = "1" -quickcheck_macros = "1" quote = "1" r2d2 = "0.8" rand = "0.9.0" diff --git a/beacon_node/lighthouse_network/Cargo.toml b/beacon_node/lighthouse_network/Cargo.toml index 035452e4b2f..6963b7c2dd2 100644 --- a/beacon_node/lighthouse_network/Cargo.toml +++ b/beacon_node/lighthouse_network/Cargo.toml @@ -72,6 +72,5 @@ features = [ [dev-dependencies] async-channel = { workspace = true } logging = { workspace = true } -quickcheck = { workspace = true } -quickcheck_macros = { workspace = true } +proptest = { workspace = true } tempfile = { workspace = true } diff --git a/beacon_node/lighthouse_network/src/peer_manager/mod.rs b/beacon_node/lighthouse_network/src/peer_manager/mod.rs index ad16bb0421c..dfa8b374e9c 100644 --- a/beacon_node/lighthouse_network/src/peer_manager/mod.rs +++ b/beacon_node/lighthouse_network/src/peer_manager/mod.rs @@ -2975,8 +2975,7 @@ mod tests { use crate::peer_manager::tests::build_peer_manager_with_trusted_peers; use crate::rpc::{MetaData, MetaDataV3}; use libp2p::PeerId; - use quickcheck::{Arbitrary, Gen, TestResult}; - use quickcheck_macros::quickcheck; + use proptest::prelude::*; use std::collections::HashSet; use tokio::runtime::Runtime; use types::{DataColumnSubnetId, Unsigned}; @@ -2994,159 +2993,202 @@ mod tests { custody_subnets: HashSet, } - impl Arbitrary for PeerCondition { - fn arbitrary(g: &mut Gen) -> Self { - let attestation_net_bitfield = { - let len = ::SubnetBitfieldLength::to_usize(); - let mut bitfield = Vec::with_capacity(len); - for _ in 0..len { - bitfield.push(bool::arbitrary(g)); - } - bitfield - }; - - let sync_committee_net_bitfield = { - let len = ::SyncCommitteeSubnetCount::to_usize(); - let mut bitfield = Vec::with_capacity(len); - for _ in 0..len { - bitfield.push(bool::arbitrary(g)); - } - bitfield - }; - - let spec = E::default_spec(); - let custody_subnets = { - let total_subnet_count = spec.data_column_sidecar_subnet_count; - let custody_subnet_count = u64::arbitrary(g) % (total_subnet_count + 1); // 0 to 128 - (spec.custody_requirement..total_subnet_count) - .filter(|_| bool::arbitrary(g)) - .map(DataColumnSubnetId::new) - .take(custody_subnet_count as usize) - .collect() - }; - - PeerCondition { - peer_id: PeerId::random(), - outgoing: bool::arbitrary(g), - attestation_net_bitfield, - sync_committee_net_bitfield, - score: f64::arbitrary(g), - trusted: bool::arbitrary(g), - gossipsub_score: f64::arbitrary(g), - custody_subnets, - } - } - } - - #[quickcheck] - fn prune_excess_peers(peer_conditions: Vec) -> TestResult { - let target_peer_count = DEFAULT_TARGET_PEERS; + fn peer_condition_strategy() -> impl Strategy { + let attestation_len = ::SubnetBitfieldLength::to_usize(); + let sync_committee_len = ::SyncCommitteeSubnetCount::to_usize(); let spec = E::default_spec(); - if peer_conditions.len() < target_peer_count { - return TestResult::discard(); - } - let trusted_peers: Vec<_> = peer_conditions - .iter() - .filter_map(|p| if p.trusted { Some(p.peer_id) } else { None }) - .collect(); - // If we have a high percentage of trusted peers, it is very difficult to reason about - // the expected results of the pruning. - if trusted_peers.len() > peer_conditions.len() / 3_usize { - return TestResult::discard(); - } - let rt = Runtime::new().unwrap(); - - rt.block_on(async move { - // Collect all the trusted peers - let mut peer_manager = - build_peer_manager_with_trusted_peers(trusted_peers, target_peer_count).await; + let total_subnet_count = spec.data_column_sidecar_subnet_count; + let custody_requirement = spec.custody_requirement; + + // Create the pool of available subnet IDs + let available_subnets: Vec = (custody_requirement..total_subnet_count).collect(); + let max_custody_subnets = available_subnets.len(); + + // Trusted peer probability constants - 1 in 5 peers should be trusted (20%) + const TRUSTED_PEER_WEIGHT_FALSE: u32 = 4; + const TRUSTED_PEER_WEIGHT_TRUE: u32 = 1; + + ( + proptest::collection::vec(any::(), attestation_len), + proptest::collection::vec(any::(), sync_committee_len), + any::(), + any::(), + any::(), + // Weight trusted peers to avoid test rejection due to too many trusted peers + prop_oneof![ + TRUSTED_PEER_WEIGHT_FALSE => Just(false), + TRUSTED_PEER_WEIGHT_TRUE => Just(true), + ], + 0..=max_custody_subnets, + ) + .prop_flat_map( + move |( + attestation_net_bitfield, + sync_committee_net_bitfield, + score, + outgoing, + gossipsub_score, + trusted, + custody_subnet_count, + )| { + // Use proptest's subsequence to select a random subset of subnets + let custody_subnets_strategy = proptest::sample::subsequence( + available_subnets.clone(), + custody_subnet_count, + ); - // Create peers based on the randomly generated conditions. - for condition in &peer_conditions { - let mut attnets = crate::types::EnrAttestationBitfield::::new(); - let mut syncnets = crate::types::EnrSyncCommitteeBitfield::::new(); + ( + Just(attestation_net_bitfield), + Just(sync_committee_net_bitfield), + Just(score), + Just(outgoing), + Just(gossipsub_score), + Just(trusted), + custody_subnets_strategy, + ) + }, + ) + .prop_map( + |( + attestation_net_bitfield, + sync_committee_net_bitfield, + score, + outgoing, + gossipsub_score, + trusted, + custody_subnets_vec, + )| { + let custody_subnets: HashSet = custody_subnets_vec + .into_iter() + .map(DataColumnSubnetId::new) + .collect(); + + PeerCondition { + peer_id: PeerId::random(), + outgoing, + attestation_net_bitfield, + sync_committee_net_bitfield, + score, + trusted, + gossipsub_score, + custody_subnets, + } + }, + ) + } - if condition.outgoing { - peer_manager.inject_connect_outgoing( - &condition.peer_id, - "/ip4/0.0.0.0".parse().unwrap(), - None, - ); - } else { - peer_manager.inject_connect_ingoing( - &condition.peer_id, - "/ip4/0.0.0.0".parse().unwrap(), - None, - ); - } + // Upper bound for testing peer pruning - we test with at least the target number + // and up to 50% more than the target to verify pruning behavior. + const MAX_TEST_PEERS: usize = 300; - for (i, value) in condition.attestation_net_bitfield.iter().enumerate() { - attnets.set(i, *value).unwrap(); - } + proptest! { + #[test] + fn prune_excess_peers(peer_conditions in proptest::collection::vec(peer_condition_strategy(), DEFAULT_TARGET_PEERS..=MAX_TEST_PEERS)) { + let target_peer_count = DEFAULT_TARGET_PEERS; + let spec = E::default_spec(); - for (i, value) in condition.sync_committee_net_bitfield.iter().enumerate() { - syncnets.set(i, *value).unwrap(); - } + let trusted_peers: Vec<_> = peer_conditions + .iter() + .filter_map(|p| if p.trusted { Some(p.peer_id) } else { None }) + .collect(); + // If we have a high percentage of trusted peers, it is very difficult to reason about + // the expected results of the pruning. + prop_assume!(trusted_peers.len() <= peer_conditions.len() / 3_usize); + + let rt = Runtime::new().unwrap(); + + let result = rt.block_on(async move { + // Collect all the trusted peers + let mut peer_manager = + build_peer_manager_with_trusted_peers(trusted_peers, target_peer_count).await; + + // Create peers based on the randomly generated conditions. + for condition in &peer_conditions { + let mut attnets = crate::types::EnrAttestationBitfield::::new(); + let mut syncnets = crate::types::EnrSyncCommitteeBitfield::::new(); + + if condition.outgoing { + peer_manager.inject_connect_outgoing( + &condition.peer_id, + "/ip4/0.0.0.0".parse().unwrap(), + None, + ); + } else { + peer_manager.inject_connect_ingoing( + &condition.peer_id, + "/ip4/0.0.0.0".parse().unwrap(), + None, + ); + } - let subnets_per_custody_group = - spec.data_column_sidecar_subnet_count / spec.number_of_custody_groups; - let metadata = MetaDataV3 { - seq_number: 0, - attnets, - syncnets, - custody_group_count: condition.custody_subnets.len() as u64 - / subnets_per_custody_group, - }; + for (i, value) in condition.attestation_net_bitfield.iter().enumerate() { + attnets.set(i, *value).unwrap(); + } - let mut peer_db = peer_manager.network_globals.peers.write(); - let peer_info = peer_db.peer_info_mut(&condition.peer_id).unwrap(); - peer_info.set_meta_data(MetaData::V3(metadata)); - peer_info.set_gossipsub_score(condition.gossipsub_score); - peer_info.add_to_score(condition.score); - peer_info.set_custody_subnets(condition.custody_subnets.clone()); + for (i, value) in condition.sync_committee_net_bitfield.iter().enumerate() { + syncnets.set(i, *value).unwrap(); + } - for subnet in peer_info.long_lived_subnets() { - peer_db.add_subscription(&condition.peer_id, subnet); + let subnets_per_custody_group = + spec.data_column_sidecar_subnet_count / spec.number_of_custody_groups; + let metadata = MetaDataV3 { + seq_number: 0, + attnets, + syncnets, + custody_group_count: condition.custody_subnets.len() as u64 + / subnets_per_custody_group, + }; + + let mut peer_db = peer_manager.network_globals.peers.write(); + let peer_info = peer_db.peer_info_mut(&condition.peer_id).unwrap(); + peer_info.set_meta_data(MetaData::V3(metadata)); + peer_info.set_gossipsub_score(condition.gossipsub_score); + peer_info.add_to_score(condition.score); + peer_info.set_custody_subnets(condition.custody_subnets.clone()); + + for subnet in peer_info.long_lived_subnets() { + peer_db.add_subscription(&condition.peer_id, subnet); + } } - } - - // Perform the heartbeat. - peer_manager.heartbeat(); - // The minimum number of connected peers cannot be less than the target peer count - // or submitted peers. + // Perform the heartbeat. + peer_manager.heartbeat(); + + // The minimum number of connected peers cannot be less than the target peer count + // or submitted peers. + + let expected_peer_count = target_peer_count.min(peer_conditions.len()); + // Trusted peers could make this larger however. + let no_of_trusted_peers = peer_conditions + .iter() + .filter(|condition| condition.trusted) + .count(); + let expected_peer_count = expected_peer_count.max(no_of_trusted_peers); + + let target_peer_condition = + peer_manager.network_globals.connected_or_dialing_peers() + == expected_peer_count; + + // It could be that we reach our target outbound limit and are unable to prune any + // extra, which violates the target_peer_condition. + let outbound_peers = peer_manager.network_globals.connected_outbound_only_peers(); + let hit_outbound_limit = outbound_peers == peer_manager.target_outbound_peers(); + + // No trusted peers should be disconnected + let trusted_peer_disconnected = peer_conditions.iter().any(|condition| { + condition.trusted + && !peer_manager + .network_globals + .peers + .read() + .is_connected(&condition.peer_id) + }); - let expected_peer_count = target_peer_count.min(peer_conditions.len()); - // Trusted peers could make this larger however. - let no_of_trusted_peers = peer_conditions - .iter() - .filter(|condition| condition.trusted) - .count(); - let expected_peer_count = expected_peer_count.max(no_of_trusted_peers); - - let target_peer_condition = - peer_manager.network_globals.connected_or_dialing_peers() - == expected_peer_count; - - // It could be that we reach our target outbound limit and are unable to prune any - // extra, which violates the target_peer_condition. - let outbound_peers = peer_manager.network_globals.connected_outbound_only_peers(); - let hit_outbound_limit = outbound_peers == peer_manager.target_outbound_peers(); - - // No trusted peers should be disconnected - let trusted_peer_disconnected = peer_conditions.iter().any(|condition| { - condition.trusted - && !peer_manager - .network_globals - .peers - .read() - .is_connected(&condition.peer_id) + (target_peer_condition || hit_outbound_limit) && !trusted_peer_disconnected }); - TestResult::from_bool( - (target_peer_condition || hit_outbound_limit) && !trusted_peer_disconnected, - ) - }) + prop_assert!(result); + } } } diff --git a/consensus/merkle_proof/Cargo.toml b/consensus/merkle_proof/Cargo.toml index d750c054060..5ba8a1b949e 100644 --- a/consensus/merkle_proof/Cargo.toml +++ b/consensus/merkle_proof/Cargo.toml @@ -14,5 +14,4 @@ fixed_bytes = { workspace = true } safe_arith = { workspace = true } [dev-dependencies] -quickcheck = { workspace = true } -quickcheck_macros = { workspace = true } +proptest = { workspace = true } diff --git a/consensus/merkle_proof/src/lib.rs b/consensus/merkle_proof/src/lib.rs index bf075ec15a5..494c73d05ce 100644 --- a/consensus/merkle_proof/src/lib.rs +++ b/consensus/merkle_proof/src/lib.rs @@ -413,50 +413,70 @@ impl From for MerkleTreeError { #[cfg(test)] mod tests { use super::*; - use quickcheck::TestResult; - use quickcheck_macros::quickcheck; - - /// Check that we can: - /// 1. Build a MerkleTree from arbitrary leaves and an arbitrary depth. - /// 2. Generate valid proofs for all of the leaves of this MerkleTree. - #[quickcheck] - fn quickcheck_create_and_verify(int_leaves: Vec, depth: usize) -> TestResult { - if depth > MAX_TREE_DEPTH || int_leaves.len() > 2usize.pow(depth as u32) { - return TestResult::discard(); - } - let leaves: Vec<_> = int_leaves.into_iter().map(H256::from_low_u64_be).collect(); - let merkle_tree = MerkleTree::create(&leaves, depth); - let merkle_root = merkle_tree.hash(); + use proptest::prelude::*; + + // Limit test depth to avoid generating huge trees. Depth 10 = 1024 max leaves. + const TEST_MAX_DEPTH: usize = 10; - let proofs_ok = (0..leaves.len()).all(|i| { - let (leaf, branch) = merkle_tree - .generate_proof(i, depth) - .expect("should generate proof"); - leaf == leaves[i] && verify_merkle_proof(leaf, &branch, depth, i, merkle_root) - }); + fn merkle_leaves_strategy(max_depth: usize) -> impl Strategy, usize)> { + (0..=max_depth).prop_flat_map(|depth| { + let max_leaves = 2usize.pow(depth as u32); + ( + proptest::collection::vec(any::(), 0..=max_leaves), + Just(depth), + ) + }) + } - TestResult::from_bool(proofs_ok) + fn merkle_leaves_strategy_min_depth( + max_depth: usize, + min_depth: usize, + ) -> impl Strategy, usize)> { + (min_depth..=max_depth).prop_flat_map(|depth| { + let max_leaves = 2usize.pow(depth as u32); + ( + proptest::collection::vec(any::(), 0..=max_leaves), + Just(depth), + ) + }) } - #[quickcheck] - fn quickcheck_push_leaf_and_verify(int_leaves: Vec, depth: usize) -> TestResult { - if depth == 0 || depth > MAX_TREE_DEPTH || int_leaves.len() > 2usize.pow(depth as u32) { - return TestResult::discard(); + proptest::proptest! { + /// Check that we can: + /// 1. Build a MerkleTree from arbitrary leaves and an arbitrary depth. + /// 2. Generate valid proofs for all of the leaves of this MerkleTree. + #[test] + fn proptest_create_and_verify((int_leaves, depth) in merkle_leaves_strategy(TEST_MAX_DEPTH)) { + let leaves: Vec<_> = int_leaves.into_iter().map(H256::from_low_u64_be).collect(); + let merkle_tree = MerkleTree::create(&leaves, depth); + let merkle_root = merkle_tree.hash(); + + let proofs_ok = (0..leaves.len()).all(|i| { + let (leaf, branch) = merkle_tree + .generate_proof(i, depth) + .expect("should generate proof"); + leaf == leaves[i] && verify_merkle_proof(leaf, &branch, depth, i, merkle_root) + }); + + proptest::prop_assert!(proofs_ok); } - let leaves_iter = int_leaves.into_iter().map(H256::from_low_u64_be); - let mut merkle_tree = MerkleTree::create(&[], depth); + #[test] + fn proptest_push_leaf_and_verify((int_leaves, depth) in merkle_leaves_strategy_min_depth(TEST_MAX_DEPTH, 1)) { + let leaves_iter = int_leaves.into_iter().map(H256::from_low_u64_be); + let mut merkle_tree = MerkleTree::create(&[], depth); - let proofs_ok = leaves_iter.enumerate().all(|(i, leaf)| { - assert_eq!(merkle_tree.push_leaf(leaf, depth), Ok(())); - let (stored_leaf, branch) = merkle_tree - .generate_proof(i, depth) - .expect("should generate proof"); - stored_leaf == leaf && verify_merkle_proof(leaf, &branch, depth, i, merkle_tree.hash()) - }); + let proofs_ok = leaves_iter.enumerate().all(|(i, leaf)| { + assert_eq!(merkle_tree.push_leaf(leaf, depth), Ok(())); + let (stored_leaf, branch) = merkle_tree + .generate_proof(i, depth) + .expect("should generate proof"); + stored_leaf == leaf && verify_merkle_proof(leaf, &branch, depth, i, merkle_tree.hash()) + }); - TestResult::from_bool(proofs_ok) + proptest::prop_assert!(proofs_ok); + } } #[test] From e291955400fe9c80208525723c7d07a297e4ac11 Mon Sep 17 00:00:00 2001 From: Daniel Date: Thu, 27 Nov 2025 00:53:57 -0500 Subject: [PATCH 41/74] Integration tests ergonomics (#7836) Fixes #7785 - [x] Update all integration tests with >1 files to follow the `main` pattern. - [x] `crypto/eth2_key_derivation/tests` - [x] `crypto/eth2_keystore/tests` - [x] `crypto/eth2_wallet/tests` - [x] `slasher/tests` - [x] `common/eth2_interop_keypairs/tests` - [x] `beacon_node/lighthouse_network/tests` - [x] Set `debug_assertions` to false on `.vscode/settings.json`. - [x] Document how to make rust analyzer work on integration tests files. In `book/src/contributing_setup.md` --- Tracking a `rust-analyzer.toml` with settings like the one provided in `.vscode/settings.json` would be nicer. But this is not possible yet. For now, that config should be a good enough indicator for devs using editors different to VSCode. Co-Authored-By: Daniel Ramirez-Chiquillo Co-Authored-By: Michael Sproul --- .vscode/settings.json | 5 +++ beacon_node/lighthouse_network/Cargo.toml | 5 +++ beacon_node/lighthouse_network/tests/main.rs | 2 + .../lighthouse_network/tests/rpc_tests.rs | 5 +-- book/src/contributing_setup.md | 41 +++++++++++++++++++ common/eth2_interop_keypairs/Cargo.toml | 5 +++ common/eth2_interop_keypairs/tests/main.rs | 2 + crypto/eth2_key_derivation/Cargo.toml | 5 +++ crypto/eth2_key_derivation/tests/main.rs | 2 + crypto/eth2_keystore/Cargo.toml | 5 +++ crypto/eth2_keystore/tests/main.rs | 4 ++ crypto/eth2_wallet/Cargo.toml | 5 +++ crypto/eth2_wallet/tests/main.rs | 3 ++ slasher/Cargo.toml | 5 +++ slasher/tests/main.rs | 5 +++ wordlist.txt | 2 + 16 files changed, 98 insertions(+), 3 deletions(-) create mode 100644 .vscode/settings.json create mode 100644 beacon_node/lighthouse_network/tests/main.rs create mode 100644 common/eth2_interop_keypairs/tests/main.rs create mode 100644 crypto/eth2_key_derivation/tests/main.rs create mode 100644 crypto/eth2_keystore/tests/main.rs create mode 100644 crypto/eth2_wallet/tests/main.rs create mode 100644 slasher/tests/main.rs diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 00000000000..65447c4390a --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,5 @@ +{ + "rust-analyzer.cargo.cfgs": [ + "!debug_assertions" + ] +} diff --git a/beacon_node/lighthouse_network/Cargo.toml b/beacon_node/lighthouse_network/Cargo.toml index 6963b7c2dd2..a6dd276c197 100644 --- a/beacon_node/lighthouse_network/Cargo.toml +++ b/beacon_node/lighthouse_network/Cargo.toml @@ -3,6 +3,7 @@ name = "lighthouse_network" version = "0.2.0" authors = ["Sigma Prime "] edition = { workspace = true } +autotests = false [features] libp2p-websocket = [] @@ -74,3 +75,7 @@ async-channel = { workspace = true } logging = { workspace = true } proptest = { workspace = true } tempfile = { workspace = true } + +[[test]] +name = "lighthouse_network_tests" +path = "tests/main.rs" diff --git a/beacon_node/lighthouse_network/tests/main.rs b/beacon_node/lighthouse_network/tests/main.rs new file mode 100644 index 00000000000..2ed0eabaff7 --- /dev/null +++ b/beacon_node/lighthouse_network/tests/main.rs @@ -0,0 +1,2 @@ +mod common; +mod rpc_tests; diff --git a/beacon_node/lighthouse_network/tests/rpc_tests.rs b/beacon_node/lighthouse_network/tests/rpc_tests.rs index 81d08764a5f..60e3e3da972 100644 --- a/beacon_node/lighthouse_network/tests/rpc_tests.rs +++ b/beacon_node/lighthouse_network/tests/rpc_tests.rs @@ -1,9 +1,8 @@ #![cfg(test)] -mod common; - +use crate::common; use crate::common::spec_with_all_forks_enabled; -use common::{Protocol, build_tracing_subscriber}; +use crate::common::{Protocol, build_tracing_subscriber}; use lighthouse_network::rpc::{RequestType, methods::*}; use lighthouse_network::service::api_types::AppRequestId; use lighthouse_network::{NetworkEvent, ReportSource, Response}; diff --git a/book/src/contributing_setup.md b/book/src/contributing_setup.md index b817faad879..958e8f71f6e 100644 --- a/book/src/contributing_setup.md +++ b/book/src/contributing_setup.md @@ -71,6 +71,47 @@ $ cargo nextest run -p safe_arith Summary [ 0.012s] 8 tests run: 8 passed, 0 skipped ``` +### Integration tests + +Due to the size and complexity of the test suite, Lighthouse uses a pattern that differs from how +[integration tests are usually defined](https://doc.rust-lang.org/rust-by-example/testing/integration_testing.html). +This pattern helps manage large test suites more effectively and ensures tests only run in release +mode to avoid stack overflow issues. + +#### The "main pattern" + +For packages with integration tests that require more than one file, Lighthouse uses the following +structure: + +- A `main.rs` file is defined at `package/tests/main.rs` that declares other test files as modules +- In `package/Cargo.toml`, integration tests are explicitly configured: + + ```toml + [package] + autotests = false + + [[test]] + name = "package_tests" + path = "tests/main.rs" + ``` + +#### Rust Analyzer configuration + +This pattern, combined with `#![cfg(not(debug_assertions))]` directives in test files (which +prevent tests from running in debug mode), causes Rust Analyzer to not provide IDE services like +autocomplete and error checking in integration test files by default. + +To enable IDE support for these test files, configure Rust Analyzer to disable debug assertions. +For VSCode users, this is already configured in the repository's `.vscode/settings.json` file: + +```json +{ + "rust-analyzer.cargo.cfgs": [ + "!debug_assertions" + ] +} +``` + ### test_logger The test_logger, located in `/common/logging/` can be used to create a `Logger` that by diff --git a/common/eth2_interop_keypairs/Cargo.toml b/common/eth2_interop_keypairs/Cargo.toml index c19b32014e1..309ff233e62 100644 --- a/common/eth2_interop_keypairs/Cargo.toml +++ b/common/eth2_interop_keypairs/Cargo.toml @@ -3,6 +3,7 @@ name = "eth2_interop_keypairs" version = "0.2.0" authors = ["Paul Hauner "] edition = { workspace = true } +autotests = false # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] @@ -15,3 +16,7 @@ serde_yaml = { workspace = true } [dev-dependencies] base64 = "0.13.0" + +[[test]] +name = "eth2_interop_keypairs_tests" +path = "tests/main.rs" diff --git a/common/eth2_interop_keypairs/tests/main.rs b/common/eth2_interop_keypairs/tests/main.rs new file mode 100644 index 00000000000..4ee50127f29 --- /dev/null +++ b/common/eth2_interop_keypairs/tests/main.rs @@ -0,0 +1,2 @@ +mod from_file; +mod generation; diff --git a/crypto/eth2_key_derivation/Cargo.toml b/crypto/eth2_key_derivation/Cargo.toml index a893a9360dc..b8976b8ccb3 100644 --- a/crypto/eth2_key_derivation/Cargo.toml +++ b/crypto/eth2_key_derivation/Cargo.toml @@ -3,6 +3,7 @@ name = "eth2_key_derivation" version = "0.1.0" authors = ["Paul Hauner "] edition = { workspace = true } +autotests = false # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] @@ -14,3 +15,7 @@ zeroize = { workspace = true } [dev-dependencies] hex = { workspace = true } + +[[test]] +name = "eth2_key_derivation_tests" +path = "tests/main.rs" diff --git a/crypto/eth2_key_derivation/tests/main.rs b/crypto/eth2_key_derivation/tests/main.rs new file mode 100644 index 00000000000..a239eaa6185 --- /dev/null +++ b/crypto/eth2_key_derivation/tests/main.rs @@ -0,0 +1,2 @@ +mod eip2333_vectors; +mod tests; diff --git a/crypto/eth2_keystore/Cargo.toml b/crypto/eth2_keystore/Cargo.toml index 61d2722efbd..290a10adc9a 100644 --- a/crypto/eth2_keystore/Cargo.toml +++ b/crypto/eth2_keystore/Cargo.toml @@ -3,6 +3,7 @@ name = "eth2_keystore" version = "0.1.0" authors = ["Pawan Dhananjay "] edition = { workspace = true } +autotests = false # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] @@ -24,3 +25,7 @@ zeroize = { workspace = true } [dev-dependencies] tempfile = { workspace = true } + +[[test]] +name = "eth2_keystore_tests" +path = "tests/main.rs" diff --git a/crypto/eth2_keystore/tests/main.rs b/crypto/eth2_keystore/tests/main.rs new file mode 100644 index 00000000000..79b31d5eda5 --- /dev/null +++ b/crypto/eth2_keystore/tests/main.rs @@ -0,0 +1,4 @@ +mod eip2335_vectors; +mod json; +mod params; +mod tests; diff --git a/crypto/eth2_wallet/Cargo.toml b/crypto/eth2_wallet/Cargo.toml index 5327bdc163b..0d454016a6b 100644 --- a/crypto/eth2_wallet/Cargo.toml +++ b/crypto/eth2_wallet/Cargo.toml @@ -3,6 +3,7 @@ name = "eth2_wallet" version = "0.1.0" authors = ["Paul Hauner "] edition = { workspace = true } +autotests = false # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] @@ -18,3 +19,7 @@ uuid = { workspace = true } [dev-dependencies] hex = { workspace = true } tempfile = { workspace = true } + +[[test]] +name = "eth2_wallet_tests" +path = "tests/main.rs" diff --git a/crypto/eth2_wallet/tests/main.rs b/crypto/eth2_wallet/tests/main.rs new file mode 100644 index 00000000000..d59ccff6392 --- /dev/null +++ b/crypto/eth2_wallet/tests/main.rs @@ -0,0 +1,3 @@ +mod eip2386_vectors; +mod json; +mod tests; diff --git a/slasher/Cargo.toml b/slasher/Cargo.toml index cca55bcef88..94d048ef72e 100644 --- a/slasher/Cargo.toml +++ b/slasher/Cargo.toml @@ -3,6 +3,7 @@ name = "slasher" version = "0.1.0" authors = ["Michael Sproul "] edition = { workspace = true } +autotests = false [features] default = ["lmdb"] @@ -43,3 +44,7 @@ types = { workspace = true } maplit = { workspace = true } rayon = { workspace = true } tempfile = { workspace = true } + +[[test]] +name = "slasher_tests" +path = "tests/main.rs" diff --git a/slasher/tests/main.rs b/slasher/tests/main.rs new file mode 100644 index 00000000000..fb78dcb917d --- /dev/null +++ b/slasher/tests/main.rs @@ -0,0 +1,5 @@ +mod attester_slashings; +mod backend; +mod proposer_slashings; +mod random; +mod wrap_around; diff --git a/wordlist.txt b/wordlist.txt index 6d6906f6a77..e0e1fe7d730 100644 --- a/wordlist.txt +++ b/wordlist.txt @@ -2,6 +2,7 @@ allocator APIs ARMv AUR +autocomplete Backends Backfilling Beaconcha @@ -110,6 +111,7 @@ Validator VC VCs VPN +VSCode WalletConnect Withdrawable WSL From 847fa3f034ec898e66da31cb1686ccaca7b39679 Mon Sep 17 00:00:00 2001 From: Mac L Date: Fri, 28 Nov 2025 01:53:46 +0400 Subject: [PATCH 42/74] Remove `context_deserialize` and import from crates.io (#8172) Use the recently published `context_deserialize` and remove it from Lighthouse Co-Authored-By: Mac L Co-Authored-By: Michael Sproul --- Cargo.lock | 46 +++---- Cargo.toml | 21 ++-- .../context_deserialize/Cargo.toml | 17 --- .../context_deserialize/src/impls/core.rs | 103 --------------- .../context_deserialize/src/impls/milhouse.rs | 45 ------- .../context_deserialize/src/impls/mod.rs | 7 -- .../context_deserialize/src/impls/ssz.rs | 51 -------- .../context_deserialize/src/lib.rs | 13 -- .../context_deserialize_derive/Cargo.toml | 16 --- .../context_deserialize_derive/src/lib.rs | 118 ------------------ .../tests/context_deserialize_derive.rs | 93 -------------- 11 files changed, 32 insertions(+), 498 deletions(-) delete mode 100644 consensus/context_deserialize/context_deserialize/Cargo.toml delete mode 100644 consensus/context_deserialize/context_deserialize/src/impls/core.rs delete mode 100644 consensus/context_deserialize/context_deserialize/src/impls/milhouse.rs delete mode 100644 consensus/context_deserialize/context_deserialize/src/impls/mod.rs delete mode 100644 consensus/context_deserialize/context_deserialize/src/impls/ssz.rs delete mode 100644 consensus/context_deserialize/context_deserialize/src/lib.rs delete mode 100644 consensus/context_deserialize/context_deserialize_derive/Cargo.toml delete mode 100644 consensus/context_deserialize/context_deserialize_derive/src/lib.rs delete mode 100644 consensus/context_deserialize/context_deserialize_derive/tests/context_deserialize_derive.rs diff --git a/Cargo.lock b/Cargo.lock index 6cc99a659fa..e3730f132b3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2076,22 +2076,21 @@ checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" [[package]] name = "context_deserialize" -version = "0.1.0" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c5f9ea0a0ae2de4943f5ca71590b6dbd0b952475f0a0cafb30a470cec78c8b9" dependencies = [ "context_deserialize_derive", - "milhouse", "serde", - "ssz_types", ] [[package]] name = "context_deserialize_derive" -version = "0.1.0" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c57b2db1e4e3ed804dcc49894a144b68fe6c754b8f545eb1dda7ad3c7dbe7e6" dependencies = [ - "context_deserialize", "quote", - "serde", - "serde_json", "syn 1.0.109", ] @@ -3258,9 +3257,9 @@ dependencies = [ [[package]] name = "ethereum_hashing" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c853bd72c9e5787f8aafc3df2907c2ed03cff3150c3acd94e2e53a98ab70a8ab" +checksum = "5aa93f58bb1eb3d1e556e4f408ef1dac130bad01ac37db4e7ade45de40d1c86a" dependencies = [ "cpufeatures", "ring", @@ -3282,12 +3281,13 @@ dependencies = [ [[package]] name = "ethereum_ssz" -version = "0.9.1" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dcddb2554d19cde19b099fadddde576929d7a4d0c1cd3512d1fd95cf174375c" +checksum = "7e8cd8c4f47dfb947dbfe3cdf2945ae1da808dbedc592668658e827a12659ba1" dependencies = [ "alloy-primitives", "arbitrary", + "context_deserialize", "ethereum_serde_utils", "itertools 0.13.0", "serde", @@ -3298,9 +3298,9 @@ dependencies = [ [[package]] name = "ethereum_ssz_derive" -version = "0.9.1" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a657b6b3b7e153637dc6bdc6566ad9279d9ee11a15b12cfb24a2e04360637e9f" +checksum = "78d247bc40823c365a62e572441a8f8b12df03f171713f06bc76180fcd56ab71" dependencies = [ "darling 0.20.11", "proc-macro2", @@ -5858,12 +5858,13 @@ dependencies = [ [[package]] name = "milhouse" -version = "0.7.0" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bdb104e38d3a8c5ffb7e9d2c43c522e6bcc34070edbadba565e722f0dee56c7" +checksum = "259dd9da2ae5e0278b95da0b7ecef9c18c309d0a2d9e6db57ed33b9e8910c5e7" dependencies = [ "alloy-primitives", "arbitrary", + "context_deserialize", "educe", "ethereum_hashing", "ethereum_ssz", @@ -8539,11 +8540,12 @@ dependencies = [ [[package]] name = "ssz_types" -version = "0.12.2" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "704671195db617afa3d919da8f220f2535f20d0fa8dad96a1c27a38a5f8f6e9c" +checksum = "1fc20a89bab2dabeee65e9c9eb96892dc222c23254b401e1319b85efd852fa31" dependencies = [ "arbitrary", + "context_deserialize", "ethereum_serde_utils", "ethereum_ssz", "itertools 0.14.0", @@ -9441,9 +9443,9 @@ dependencies = [ [[package]] name = "tree_hash" -version = "0.10.0" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee44f4cef85f88b4dea21c0b1f58320bdf35715cf56d840969487cff00613321" +checksum = "2db21caa355767db4fd6129876e5ae278a8699f4a6959b1e3e7aff610b532d52" dependencies = [ "alloy-primitives", "ethereum_hashing", @@ -9454,11 +9456,11 @@ dependencies = [ [[package]] name = "tree_hash_derive" -version = "0.10.0" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bee2ea1551f90040ab0e34b6fb7f2fa3bad8acc925837ac654f2c78a13e3089" +checksum = "711cc655fcbb48384a87dc2bf641b991a15c5ad9afc3caa0b1ab1df3b436f70f" dependencies = [ - "darling 0.20.11", + "darling 0.21.3", "proc-macro2", "quote", "syn 2.0.110", diff --git a/Cargo.toml b/Cargo.toml index f4ce67d0bf8..35504c22b76 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -47,8 +47,6 @@ members = [ "common/validator_dir", "common/warp_utils", "common/workspace_members", - "consensus/context_deserialize/context_deserialize", - "consensus/context_deserialize/context_deserialize_derive", "consensus/fixed_bytes", "consensus/fork_choice", "consensus/int_to_bytes", @@ -122,10 +120,7 @@ clap = { version = "4.5.4", features = ["derive", "cargo", "wrap_help"] } clap_utils = { path = "common/clap_utils" } compare_fields = "0.1" console-subscriber = "0.4" -context_deserialize = { path = "consensus/context_deserialize/context_deserialize", features = [ - "all", -] } -context_deserialize_derive = { path = "consensus/context_deserialize/context_deserialize_derive" } +context_deserialize = "0.2" criterion = "0.5" delay_map = "0.4" deposit_contract = { path = "common/deposit_contract" } @@ -143,10 +138,10 @@ eth2_key_derivation = { path = "crypto/eth2_key_derivation" } eth2_keystore = { path = "crypto/eth2_keystore" } eth2_network_config = { path = "common/eth2_network_config" } eth2_wallet = { path = "crypto/eth2_wallet" } -ethereum_hashing = "0.7.0" +ethereum_hashing = "0.8.0" ethereum_serde_utils = "0.8.0" -ethereum_ssz = "0.9.0" -ethereum_ssz_derive = "0.9.0" +ethereum_ssz = { version = "0.10.0", features = ["context_deserialize"] } +ethereum_ssz_derive = "0.10.0" execution_layer = { path = "beacon_node/execution_layer" } exit-future = "0.2" filesystem = { path = "common/filesystem" } @@ -183,7 +178,7 @@ malloc_utils = { path = "common/malloc_utils" } maplit = "1" merkle_proof = { path = "consensus/merkle_proof" } metrics = { path = "common/metrics" } -milhouse = { version = "0.7", default-features = false } +milhouse = { version = "0.9", default-features = false, features = ["context_deserialize"] } mockall = "0.13" mockall_double = "0.3" mockito = "1.5.0" @@ -232,7 +227,7 @@ slashing_protection = { path = "validator_client/slashing_protection" } slot_clock = { path = "common/slot_clock" } smallvec = { version = "1.11.2", features = ["arbitrary"] } snap = "1" -ssz_types = "0.12.2" +ssz_types = { version = "0.14.0", features = ["context_deserialize"] } state_processing = { path = "consensus/state_processing" } store = { path = "beacon_node/store" } strum = { version = "0.24", features = ["derive"] } @@ -257,8 +252,8 @@ tracing-core = "0.1" tracing-log = "0.2" tracing-opentelemetry = "0.31.0" tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] } -tree_hash = "0.10.0" -tree_hash_derive = "0.10.0" +tree_hash = "0.12.0" +tree_hash_derive = "0.12.0" types = { path = "consensus/types" } url = "2" uuid = { version = "0.8", features = ["serde", "v4"] } diff --git a/consensus/context_deserialize/context_deserialize/Cargo.toml b/consensus/context_deserialize/context_deserialize/Cargo.toml deleted file mode 100644 index 0e4a97b9ae3..00000000000 --- a/consensus/context_deserialize/context_deserialize/Cargo.toml +++ /dev/null @@ -1,17 +0,0 @@ -[package] -name = "context_deserialize" -version = "0.1.0" -edition = "2021" - -[features] -default = ["derive"] -derive = ["dep:context_deserialize_derive"] -milhouse = ["dep:milhouse"] -ssz = ["dep:ssz_types"] -all = ["derive", "milhouse", "ssz"] - -[dependencies] -context_deserialize_derive = { version = "0.1.0", path = "../context_deserialize_derive", optional = true } -milhouse = { workspace = true, optional = true } -serde = { workspace = true } -ssz_types = { workspace = true, optional = true } diff --git a/consensus/context_deserialize/context_deserialize/src/impls/core.rs b/consensus/context_deserialize/context_deserialize/src/impls/core.rs deleted file mode 100644 index 803619365f1..00000000000 --- a/consensus/context_deserialize/context_deserialize/src/impls/core.rs +++ /dev/null @@ -1,103 +0,0 @@ -use crate::ContextDeserialize; -use serde::de::{Deserialize, DeserializeSeed, Deserializer, SeqAccess, Visitor}; -use std::marker::PhantomData; -use std::sync::Arc; - -impl<'de, C, T> ContextDeserialize<'de, T> for Arc -where - C: ContextDeserialize<'de, T>, -{ - fn context_deserialize(deserializer: D, context: T) -> Result - where - D: Deserializer<'de>, - { - Ok(Arc::new(C::context_deserialize(deserializer, context)?)) - } -} - -impl<'de, T, C> ContextDeserialize<'de, C> for Vec -where - T: ContextDeserialize<'de, C>, - C: Clone, -{ - fn context_deserialize(deserializer: D, context: C) -> Result - where - D: Deserializer<'de>, - { - // Our Visitor, which owns one copy of the context T - struct ContextVisitor { - context: T, - _marker: PhantomData, - } - - impl<'de, C, T> Visitor<'de> for ContextVisitor - where - C: ContextDeserialize<'de, T>, - T: Clone, - { - type Value = Vec; - - fn expecting(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { - fmt.write_str("a sequence of context‐deserialized elements") - } - - fn visit_seq(self, mut seq: A) -> Result, A::Error> - where - A: SeqAccess<'de>, - { - let mut out = Vec::with_capacity(seq.size_hint().unwrap_or(0)); - // for each element, we clone the context and hand it to the seed - while let Some(elem) = seq.next_element_seed(ContextSeed { - context: self.context.clone(), - _marker: PhantomData, - })? { - out.push(elem); - } - Ok(out) - } - } - - // A little seed that hands the deserializer + context into C::context_deserialize - struct ContextSeed { - context: C, - _marker: PhantomData, - } - - impl<'de, T, C> DeserializeSeed<'de> for ContextSeed - where - T: ContextDeserialize<'de, C>, - C: Clone, - { - type Value = T; - - fn deserialize(self, deserializer: D) -> Result - where - D: Deserializer<'de>, - { - T::context_deserialize(deserializer, self.context) - } - } - - deserializer.deserialize_seq(ContextVisitor { - context, - _marker: PhantomData, - }) - } -} - -macro_rules! trivial_deserialize { - ($($t:ty),* $(,)?) => { - $( - impl<'de, T> ContextDeserialize<'de, T> for $t { - fn context_deserialize(deserializer: D, _context: T) -> Result - where - D: Deserializer<'de>, - { - <$t>::deserialize(deserializer) - } - } - )* - }; -} - -trivial_deserialize!(bool, u8, u16, u32, u64, u128, i8, i16, i32, i64, i128, f32, f64); diff --git a/consensus/context_deserialize/context_deserialize/src/impls/milhouse.rs b/consensus/context_deserialize/context_deserialize/src/impls/milhouse.rs deleted file mode 100644 index 3b86f067a3e..00000000000 --- a/consensus/context_deserialize/context_deserialize/src/impls/milhouse.rs +++ /dev/null @@ -1,45 +0,0 @@ -use crate::ContextDeserialize; -use milhouse::{List, Value, Vector}; -use serde::de::Deserializer; -use ssz_types::typenum::Unsigned; - -impl<'de, C, T, N> ContextDeserialize<'de, C> for List -where - T: ContextDeserialize<'de, C> + Value, - N: Unsigned, - C: Clone, -{ - fn context_deserialize(deserializer: D, context: C) -> Result - where - D: Deserializer<'de>, - { - // First deserialize as a Vec. - // This is not the most efficient implementation as it allocates a temporary Vec. In future - // we could write a more performant implementation using `List::builder()`. - let vec = Vec::::context_deserialize(deserializer, context)?; - - // Then convert to List, which will check the length. - List::new(vec) - .map_err(|e| serde::de::Error::custom(format!("Failed to create List: {:?}", e))) - } -} - -impl<'de, C, T, N> ContextDeserialize<'de, C> for Vector -where - T: ContextDeserialize<'de, C> + Value, - N: Unsigned, - C: Clone, -{ - fn context_deserialize(deserializer: D, context: C) -> Result - where - D: Deserializer<'de>, - { - // First deserialize as a List - let list = List::::context_deserialize(deserializer, context)?; - - // Then convert to Vector, which will check the length - Vector::try_from(list).map_err(|e| { - serde::de::Error::custom(format!("Failed to convert List to Vector: {:?}", e)) - }) - } -} diff --git a/consensus/context_deserialize/context_deserialize/src/impls/mod.rs b/consensus/context_deserialize/context_deserialize/src/impls/mod.rs deleted file mode 100644 index 0225c5e031f..00000000000 --- a/consensus/context_deserialize/context_deserialize/src/impls/mod.rs +++ /dev/null @@ -1,7 +0,0 @@ -mod core; - -#[cfg(feature = "milhouse")] -mod milhouse; - -#[cfg(feature = "ssz")] -mod ssz; diff --git a/consensus/context_deserialize/context_deserialize/src/impls/ssz.rs b/consensus/context_deserialize/context_deserialize/src/impls/ssz.rs deleted file mode 100644 index 26813a96fb7..00000000000 --- a/consensus/context_deserialize/context_deserialize/src/impls/ssz.rs +++ /dev/null @@ -1,51 +0,0 @@ -use crate::ContextDeserialize; -use serde::{ - de::{Deserializer, Error}, - Deserialize, -}; -use ssz_types::{ - length::{Fixed, Variable}, - typenum::Unsigned, - Bitfield, FixedVector, -}; - -impl<'de, C, T, N> ContextDeserialize<'de, C> for FixedVector -where - T: ContextDeserialize<'de, C>, - N: Unsigned, - C: Clone, -{ - fn context_deserialize(deserializer: D, context: C) -> Result - where - D: Deserializer<'de>, - { - let vec = Vec::::context_deserialize(deserializer, context)?; - FixedVector::new(vec).map_err(|e| D::Error::custom(format!("{:?}", e))) - } -} - -impl<'de, C, N> ContextDeserialize<'de, C> for Bitfield> -where - N: Unsigned + Clone, -{ - fn context_deserialize(deserializer: D, _context: C) -> Result - where - D: Deserializer<'de>, - { - Bitfield::>::deserialize(deserializer) - .map_err(|e| D::Error::custom(format!("{:?}", e))) - } -} - -impl<'de, C, N> ContextDeserialize<'de, C> for Bitfield> -where - N: Unsigned + Clone, -{ - fn context_deserialize(deserializer: D, _context: C) -> Result - where - D: Deserializer<'de>, - { - Bitfield::>::deserialize(deserializer) - .map_err(|e| D::Error::custom(format!("{:?}", e))) - } -} diff --git a/consensus/context_deserialize/context_deserialize/src/lib.rs b/consensus/context_deserialize/context_deserialize/src/lib.rs deleted file mode 100644 index e5f2bfdba38..00000000000 --- a/consensus/context_deserialize/context_deserialize/src/lib.rs +++ /dev/null @@ -1,13 +0,0 @@ -mod impls; - -#[cfg(feature = "derive")] -pub use context_deserialize_derive::context_deserialize; - -use serde::de::Deserializer; - -/// General-purpose deserialization trait that accepts extra context `C`. -pub trait ContextDeserialize<'de, C>: Sized { - fn context_deserialize(deserializer: D, context: C) -> Result - where - D: Deserializer<'de>; -} diff --git a/consensus/context_deserialize/context_deserialize_derive/Cargo.toml b/consensus/context_deserialize/context_deserialize_derive/Cargo.toml deleted file mode 100644 index eedae30cdfe..00000000000 --- a/consensus/context_deserialize/context_deserialize_derive/Cargo.toml +++ /dev/null @@ -1,16 +0,0 @@ -[package] -name = "context_deserialize_derive" -version = "0.1.0" -edition = "2021" - -[lib] -proc-macro = true - -[dependencies] -quote = { workspace = true } -syn = { workspace = true } - -[dev-dependencies] -context_deserialize = { path = "../context_deserialize" } -serde = { workspace = true } -serde_json = "1.0" diff --git a/consensus/context_deserialize/context_deserialize_derive/src/lib.rs b/consensus/context_deserialize/context_deserialize_derive/src/lib.rs deleted file mode 100644 index 0b73a43b0a4..00000000000 --- a/consensus/context_deserialize/context_deserialize_derive/src/lib.rs +++ /dev/null @@ -1,118 +0,0 @@ -extern crate proc_macro; -extern crate quote; -extern crate syn; - -use proc_macro::TokenStream; -use quote::quote; -use syn::{ - parse_macro_input, AttributeArgs, DeriveInput, GenericParam, LifetimeDef, Meta, NestedMeta, - WhereClause, -}; - -#[proc_macro_attribute] -pub fn context_deserialize(attr: TokenStream, item: TokenStream) -> TokenStream { - let args = parse_macro_input!(attr as AttributeArgs); - let input = parse_macro_input!(item as DeriveInput); - let ident = &input.ident; - - let mut ctx_types = Vec::new(); - let mut explicit_where: Option = None; - - for meta in args { - match meta { - NestedMeta::Meta(Meta::Path(p)) => { - ctx_types.push(p); - } - NestedMeta::Meta(Meta::NameValue(nv)) if nv.path.is_ident("bound") => { - if let syn::Lit::Str(lit_str) = &nv.lit { - let where_string = format!("where {}", lit_str.value()); - match syn::parse_str::(&where_string) { - Ok(where_clause) => { - explicit_where = Some(where_clause); - } - Err(err) => { - return syn::Error::new_spanned( - lit_str, - format!("Invalid where clause '{}': {}", lit_str.value(), err), - ) - .to_compile_error() - .into(); - } - } - } else { - return syn::Error::new_spanned( - &nv, - "Expected a string literal for `bound` value", - ) - .to_compile_error() - .into(); - } - } - _ => { - return syn::Error::new_spanned( - &meta, - "Expected paths or `bound = \"...\"` in #[context_deserialize(...)]", - ) - .to_compile_error() - .into(); - } - } - } - - if ctx_types.is_empty() { - return quote! { - compile_error!("Usage: #[context_deserialize(Type1, Type2, ..., bound = \"...\")]"); - } - .into(); - } - - let original_generics = input.generics.clone(); - - // Clone and clean generics for impl use (remove default params) - let mut impl_generics = input.generics.clone(); - for param in impl_generics.params.iter_mut() { - if let GenericParam::Type(ty) = param { - ty.eq_token = None; - ty.default = None; - } - } - - // Ensure 'de lifetime exists in impl generics - let has_de = impl_generics - .lifetimes() - .any(|LifetimeDef { lifetime, .. }| lifetime.ident == "de"); - - if !has_de { - impl_generics.params.insert(0, syn::parse_quote! { 'de }); - } - - let (_, ty_generics, _) = original_generics.split_for_impl(); - let (impl_gens, _, _) = impl_generics.split_for_impl(); - - // Generate: no `'de` applied to the type name - let mut impls = quote! {}; - for ctx in ctx_types { - impls.extend(quote! { - impl #impl_gens context_deserialize::ContextDeserialize<'de, #ctx> - for #ident #ty_generics - #explicit_where - { - fn context_deserialize( - deserializer: D, - _context: #ctx, - ) -> Result - where - D: serde::de::Deserializer<'de>, - { - ::deserialize(deserializer) - } - } - }); - } - - quote! { - #input - #impls - } - .into() -} diff --git a/consensus/context_deserialize/context_deserialize_derive/tests/context_deserialize_derive.rs b/consensus/context_deserialize/context_deserialize_derive/tests/context_deserialize_derive.rs deleted file mode 100644 index 8fb46da9c65..00000000000 --- a/consensus/context_deserialize/context_deserialize_derive/tests/context_deserialize_derive.rs +++ /dev/null @@ -1,93 +0,0 @@ -use context_deserialize::{context_deserialize, ContextDeserialize}; -use serde::{Deserialize, Serialize}; - -#[test] -fn test_context_deserialize_derive() { - type TestContext = (); - - #[context_deserialize(TestContext)] - #[derive(Debug, PartialEq, Serialize, Deserialize)] - struct Test { - field: String, - } - - let test = Test { - field: "test".to_string(), - }; - let serialized = serde_json::to_string(&test).unwrap(); - let deserialized = - Test::context_deserialize(&mut serde_json::Deserializer::from_str(&serialized), ()) - .unwrap(); - assert_eq!(test, deserialized); -} - -#[test] -fn test_context_deserialize_derive_multiple_types() { - #[allow(dead_code)] - struct TestContext1(u64); - #[allow(dead_code)] - struct TestContext2(String); - - // This will derive: - // - ContextDeserialize for Test - // - ContextDeserialize for Test - // by just leveraging the Deserialize impl - #[context_deserialize(TestContext1, TestContext2)] - #[derive(Debug, PartialEq, Serialize, Deserialize)] - struct Test { - field: String, - } - - let test = Test { - field: "test".to_string(), - }; - let serialized = serde_json::to_string(&test).unwrap(); - let deserialized = Test::context_deserialize( - &mut serde_json::Deserializer::from_str(&serialized), - TestContext1(1), - ) - .unwrap(); - assert_eq!(test, deserialized); - - let deserialized = Test::context_deserialize( - &mut serde_json::Deserializer::from_str(&serialized), - TestContext2("2".to_string()), - ) - .unwrap(); - - assert_eq!(test, deserialized); -} - -#[test] -fn test_context_deserialize_derive_bound() { - use std::fmt::Debug; - - struct TestContext; - - #[derive(Debug, PartialEq, Serialize, Deserialize)] - struct Inner { - value: u64, - } - - #[context_deserialize( - TestContext, - bound = "T: Serialize + for<'a> Deserialize<'a> + Debug + PartialEq" - )] - #[derive(Debug, PartialEq, Serialize, Deserialize)] - struct Wrapper { - inner: T, - } - - let val = Wrapper { - inner: Inner { value: 42 }, - }; - - let serialized = serde_json::to_string(&val).unwrap(); - let deserialized = Wrapper::::context_deserialize( - &mut serde_json::Deserializer::from_str(&serialized), - TestContext, - ) - .unwrap(); - - assert_eq!(val, deserialized); -} From 713e4779129bd999202b09c91e96d4deda16b6d3 Mon Sep 17 00:00:00 2001 From: Moe Mahhouk Date: Fri, 28 Nov 2025 00:06:31 +0100 Subject: [PATCH 43/74] feat: Add reproducible builds release workflows and push images to DockerHub (#7614) This pull request introduces workflows and updates to ensure reproducible builds for the Lighthouse project. It adds two GitHub Actions workflows for building and testing reproducible Docker images and binaries, updates the `Makefile` to streamline reproducible build configurations, and modifies the `Dockerfile.reproducible` to align with the new build process. Additionally, it removes the `reproducible` profile from `Cargo.toml`. ### New GitHub Actions Workflows: * [`.github/workflows/docker-reproducible.yml`](diffhunk://#diff-222af23bee616920b04f5b92a83eb5106fce08abd885cd3a3b15b8beb5e789c3R1-R145): Adds a workflow to build and push reproducible multi-architecture Docker images for releases, including support for dry runs without pushing an image. ### Build Configuration Updates: * [`Makefile`](diffhunk://#diff-76ed074a9305c04054cdebb9e9aad2d818052b07091de1f20cad0bbac34ffb52L85-R143): Refactors reproducible build targets, centralizes environment variables for reproducibility, and updates Docker build arguments for `x86_64` and `aarch64` architectures. * [`Dockerfile.reproducible`](diffhunk://#diff-587298ff141278ce3be7c54a559f9f31472cc5b384e285e2105b3dee319ba31dL1-R24): Updates the base Rust image to version 1.86, removes hardcoded reproducibility settings, and delegates build logic to the `Makefile`. * Switch to using jemalloc-sys from Debian repos instead of building it from source. A Debian version is [reproducible](https://tests.reproducible-builds.org/debian/rb-pkg/trixie/amd64/jemalloc.html) which is [hard to achieve](https://github.com/NixOS/nixpkgs/issues/380852) if you build it from source. ### Profile Removal: * [`Cargo.toml`](diffhunk://#diff-2e9d962a08321605940b5a657135052fbcef87b5e360662bb527c96d9a615542L289-L295): Removes the `reproducible` profile, simplifying build configurations and relying on external tooling for reproducibility. Co-Authored-By: Moe Mahhouk Co-Authored-By: chonghe <44791194+chong-he@users.noreply.github.com> Co-Authored-By: Michael Sproul --- .github/workflows/docker-reproducible.yml | 176 ++++++++++++++++++++++ Cargo.toml | 7 - Dockerfile.reproducible | 32 +--- Makefile | 61 ++++++-- common/malloc_utils/Cargo.toml | 2 + testing/state_transition_vectors/Makefile | 2 +- 6 files changed, 231 insertions(+), 49 deletions(-) create mode 100644 .github/workflows/docker-reproducible.yml diff --git a/.github/workflows/docker-reproducible.yml b/.github/workflows/docker-reproducible.yml new file mode 100644 index 00000000000..f3479e9468d --- /dev/null +++ b/.github/workflows/docker-reproducible.yml @@ -0,0 +1,176 @@ +name: docker-reproducible + +on: + push: + branches: + - unstable + - stable + tags: + - v* + workflow_dispatch: # allows manual triggering for testing purposes and skips publishing an image + +env: + DOCKER_REPRODUCIBLE_IMAGE_NAME: >- + ${{ github.repository_owner }}/lighthouse-reproducible + DOCKER_PASSWORD: ${{ secrets.DH_KEY }} + DOCKER_USERNAME: ${{ secrets.DH_ORG }} + +jobs: + extract-version: + name: extract version + runs-on: ubuntu-22.04 + steps: + - name: Extract version + run: | + if [[ "${{ github.ref }}" == refs/tags/* ]]; then + # It's a tag (e.g., v1.2.3) + VERSION="${GITHUB_REF#refs/tags/}" + elif [[ "${{ github.ref }}" == refs/heads/stable ]]; then + # stable branch -> latest + VERSION="latest" + elif [[ "${{ github.ref }}" == refs/heads/unstable ]]; then + # unstable branch -> latest-unstable + VERSION="latest-unstable" + else + # For manual triggers from other branches and will not publish any image + VERSION="test-build" + fi + echo "VERSION=$VERSION" >> $GITHUB_OUTPUT + id: extract_version + outputs: + VERSION: ${{ steps.extract_version.outputs.VERSION }} + + verify-and-build: + name: verify reproducibility and build + needs: extract-version + strategy: + matrix: + arch: [amd64, arm64] + include: + - arch: amd64 + rust_target: x86_64-unknown-linux-gnu + rust_image: >- + rust:1.88-bullseye@sha256:8e3c421122bf4cd3b2a866af41a4dd52d87ad9e315fd2cb5100e87a7187a9816 + platform: linux/amd64 + runner: ubuntu-22.04 + - arch: arm64 + rust_target: aarch64-unknown-linux-gnu + rust_image: >- + rust:1.88-bullseye@sha256:8b22455a7ce2adb1355067638284ee99d21cc516fab63a96c4514beaf370aa94 + platform: linux/arm64 + runner: ubuntu-22.04-arm + runs-on: ${{ matrix.runner }} + steps: + - uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + with: + driver: docker + + - name: Verify reproducible builds (${{ matrix.arch }}) + run: | + # Build first image + docker build -f Dockerfile.reproducible \ + --platform ${{ matrix.platform }} \ + --build-arg RUST_TARGET="${{ matrix.rust_target }}" \ + --build-arg RUST_IMAGE="${{ matrix.rust_image }}" \ + -t lighthouse-verify-1-${{ matrix.arch }} . + + # Extract binary from first build + docker create --name extract-1-${{ matrix.arch }} lighthouse-verify-1-${{ matrix.arch }} + docker cp extract-1-${{ matrix.arch }}:/lighthouse ./lighthouse-1-${{ matrix.arch }} + docker rm extract-1-${{ matrix.arch }} + + # Clean state for second build + docker buildx prune -f + docker system prune -f + + # Build second image + docker build -f Dockerfile.reproducible \ + --platform ${{ matrix.platform }} \ + --build-arg RUST_TARGET="${{ matrix.rust_target }}" \ + --build-arg RUST_IMAGE="${{ matrix.rust_image }}" \ + -t lighthouse-verify-2-${{ matrix.arch }} . + + # Extract binary from second build + docker create --name extract-2-${{ matrix.arch }} lighthouse-verify-2-${{ matrix.arch }} + docker cp extract-2-${{ matrix.arch }}:/lighthouse ./lighthouse-2-${{ matrix.arch }} + docker rm extract-2-${{ matrix.arch }} + + # Compare binaries + echo "=== Comparing binaries ===" + echo "Build 1 SHA256: $(sha256sum lighthouse-1-${{ matrix.arch }})" + echo "Build 2 SHA256: $(sha256sum lighthouse-2-${{ matrix.arch }})" + + if cmp lighthouse-1-${{ matrix.arch }} lighthouse-2-${{ matrix.arch }}; then + echo "Reproducible build verified for ${{ matrix.arch }}" + else + echo "Reproducible build FAILED for ${{ matrix.arch }}" + echo "BLOCKING RELEASE: Builds are not reproducible!" + echo "First 10 differences:" + cmp -l lighthouse-1-${{ matrix.arch }} lighthouse-2-${{ matrix.arch }} | head -10 + exit 1 + fi + + # Clean up verification artifacts but keep one image for publishing + rm -f lighthouse-*-${{ matrix.arch }} + docker rmi lighthouse-verify-1-${{ matrix.arch }} || true + + # Re-tag the second image for publishing (we verified it's identical to first) + VERSION=${{ needs.extract-version.outputs.VERSION }} + FINAL_TAG="${{ env.DOCKER_REPRODUCIBLE_IMAGE_NAME }}:${VERSION}-${{ matrix.arch }}" + docker tag lighthouse-verify-2-${{ matrix.arch }} "$FINAL_TAG" + + - name: Log in to Docker Hub + if: ${{ github.event_name != 'workflow_dispatch' }} + uses: docker/login-action@v3 + with: + username: ${{ env.DOCKER_USERNAME }} + password: ${{ env.DOCKER_PASSWORD }} + + - name: Push verified image (${{ matrix.arch }}) + if: ${{ github.event_name != 'workflow_dispatch' }} + run: | + VERSION=${{ needs.extract-version.outputs.VERSION }} + IMAGE_TAG="${{ env.DOCKER_REPRODUCIBLE_IMAGE_NAME }}:${VERSION}-${{ matrix.arch }}" + docker push "$IMAGE_TAG" + + - name: Clean up local images + run: | + docker rmi lighthouse-verify-2-${{ matrix.arch }} || true + VERSION=${{ needs.extract-version.outputs.VERSION }} + docker rmi "${{ env.DOCKER_REPRODUCIBLE_IMAGE_NAME }}:${VERSION}-${{ matrix.arch }}" || true + + - name: Upload verification artifacts (on failure) + if: failure() + uses: actions/upload-artifact@v4 + with: + name: verification-failure-${{ matrix.arch }} + path: | + lighthouse-*-${{ matrix.arch }} + + create-manifest: + name: create multi-arch manifest + runs-on: ubuntu-22.04 + needs: [extract-version, verify-and-build] + if: ${{ github.event_name != 'workflow_dispatch' }} + steps: + - name: Log in to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ env.DOCKER_USERNAME }} + password: ${{ env.DOCKER_PASSWORD }} + + - name: Create and push multi-arch manifest + run: | + IMAGE_NAME=${{ env.DOCKER_REPRODUCIBLE_IMAGE_NAME }} + VERSION=${{ needs.extract-version.outputs.VERSION }} + + # Create manifest for the version tag + docker manifest create \ + ${IMAGE_NAME}:${VERSION} \ + ${IMAGE_NAME}:${VERSION}-amd64 \ + ${IMAGE_NAME}:${VERSION}-arm64 + + docker manifest push ${IMAGE_NAME}:${VERSION} diff --git a/Cargo.toml b/Cargo.toml index 35504c22b76..6ccf429b6c6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -279,13 +279,6 @@ lto = "fat" codegen-units = 1 incremental = false -[profile.reproducible] -inherits = "release" -debug = false -panic = "abort" -codegen-units = 1 -overflow-checks = true - [profile.release-debug] inherits = "release" debug = true diff --git a/Dockerfile.reproducible b/Dockerfile.reproducible index 24ba5a58a9b..903515373f8 100644 --- a/Dockerfile.reproducible +++ b/Dockerfile.reproducible @@ -3,42 +3,22 @@ ARG RUST_IMAGE="rust:1.88-bullseye@sha256:8e3c421122bf4cd3b2a866af41a4dd52d87ad9 FROM ${RUST_IMAGE} AS builder # Install specific version of the build dependencies -RUN apt-get update && apt-get install -y libclang-dev=1:11.0-51+nmu5 cmake=3.18.4-2+deb11u1 +RUN apt-get update && apt-get install -y libclang-dev=1:11.0-51+nmu5 cmake=3.18.4-2+deb11u1 libjemalloc-dev=5.2.1-3 -# Add target architecture argument with default value ARG RUST_TARGET="x86_64-unknown-linux-gnu" # Copy the project to the container -COPY . /app +COPY ./ /app WORKDIR /app -# Get the latest commit timestamp and set SOURCE_DATE_EPOCH (default it to 0 if not passed) -ARG SOURCE_DATE=0 - -# Set environment variables for reproducibility -ARG RUSTFLAGS="-C link-arg=-Wl,--build-id=none -C metadata='' --remap-path-prefix $(pwd)=." -ENV SOURCE_DATE_EPOCH=$SOURCE_DATE \ - CARGO_INCREMENTAL=0 \ - LC_ALL=C \ - TZ=UTC \ - RUSTFLAGS="${RUSTFLAGS}" - -# Set the default features if not provided -ARG FEATURES="gnosis,slasher-lmdb,slasher-mdbx,slasher-redb,jemalloc" - -# Set the default profile if not provided -ARG PROFILE="reproducible" - # Build the project with the reproducible settings -RUN cargo build --bin lighthouse \ - --features "${FEATURES}" \ - --profile "${PROFILE}" \ - --locked \ - --target "${RUST_TARGET}" +RUN make build-reproducible -RUN mv /app/target/${RUST_TARGET}/${PROFILE}/lighthouse /lighthouse +# Move the binary to a standard location +RUN mv /app/target/${RUST_TARGET}/release/lighthouse /lighthouse # Create a minimal final image with just the binary FROM gcr.io/distroless/cc-debian12:nonroot-6755e21ccd99ddead6edc8106ba03888cbeed41a COPY --from=builder /lighthouse /lighthouse + ENTRYPOINT [ "/lighthouse" ] diff --git a/Makefile b/Makefile index 2edc9f86328..a6891b682f7 100644 --- a/Makefile +++ b/Makefile @@ -81,36 +81,67 @@ build-lcli-aarch64: build-lcli-riscv64: cross build --bin lcli --target riscv64gc-unknown-linux-gnu --features "portable" --profile "$(CROSS_PROFILE)" --locked -# extracts the current source date for reproducible builds -SOURCE_DATE := $(shell git log -1 --pretty=%ct) - -# Default image for x86_64 +# Environment variables for reproducible builds +# Initialize RUSTFLAGS +RUST_BUILD_FLAGS = +# Remove build ID from the binary to ensure reproducibility across builds +RUST_BUILD_FLAGS += -C link-arg=-Wl,--build-id=none +# Remove metadata hash from symbol names to ensure reproducible builds +RUST_BUILD_FLAGS += -C metadata='' + +# Set timestamp from last git commit for reproducible builds +SOURCE_DATE ?= $(shell git log -1 --pretty=%ct) + +# Disable incremental compilation to avoid non-deterministic artifacts +CARGO_INCREMENTAL_VAL = 0 +# Set C locale for consistent string handling and sorting +LOCALE_VAL = C +# Set UTC timezone for consistent time handling across builds +TZ_VAL = UTC + +# Features for reproducible builds +FEATURES_REPRODUCIBLE = $(CROSS_FEATURES),jemalloc-unprefixed + +# Derive the architecture-specific library path from RUST_TARGET +JEMALLOC_LIB_ARCH = $(word 1,$(subst -, ,$(RUST_TARGET))) +JEMALLOC_OVERRIDE = /usr/lib/$(JEMALLOC_LIB_ARCH)-linux-gnu/libjemalloc.a + +# Default target architecture +RUST_TARGET ?= x86_64-unknown-linux-gnu + +# Default images for different architectures RUST_IMAGE_AMD64 ?= rust:1.88-bullseye@sha256:8e3c421122bf4cd3b2a866af41a4dd52d87ad9e315fd2cb5100e87a7187a9816 +RUST_IMAGE_ARM64 ?= rust:1.88-bullseye@sha256:8b22455a7ce2adb1355067638284ee99d21cc516fab63a96c4514beaf370aa94 -# Reproducible build for x86_64 -build-reproducible-x86_64: +.PHONY: build-reproducible +build-reproducible: ## Build the lighthouse binary into `target` directory with reproducible builds + SOURCE_DATE_EPOCH=$(SOURCE_DATE) \ + RUSTFLAGS="${RUST_BUILD_FLAGS} --remap-path-prefix $$(pwd)=." \ + CARGO_INCREMENTAL=${CARGO_INCREMENTAL_VAL} \ + LC_ALL=${LOCALE_VAL} \ + TZ=${TZ_VAL} \ + JEMALLOC_OVERRIDE=${JEMALLOC_OVERRIDE} \ + cargo build --bin lighthouse --features "$(FEATURES_REPRODUCIBLE)" --profile "$(PROFILE)" --locked --target $(RUST_TARGET) + +.PHONY: build-reproducible-x86_64 +build-reproducible-x86_64: ## Build reproducible x86_64 Docker image DOCKER_BUILDKIT=1 docker build \ --build-arg RUST_TARGET="x86_64-unknown-linux-gnu" \ --build-arg RUST_IMAGE=$(RUST_IMAGE_AMD64) \ - --build-arg SOURCE_DATE=$(SOURCE_DATE) \ -f Dockerfile.reproducible \ -t lighthouse:reproducible-amd64 . -# Default image for arm64 -RUST_IMAGE_ARM64 ?= rust:1.88-bullseye@sha256:8b22455a7ce2adb1355067638284ee99d21cc516fab63a96c4514beaf370aa94 - -# Reproducible build for aarch64 -build-reproducible-aarch64: +.PHONY: build-reproducible-aarch64 +build-reproducible-aarch64: ## Build reproducible aarch64 Docker image DOCKER_BUILDKIT=1 docker build \ --platform linux/arm64 \ --build-arg RUST_TARGET="aarch64-unknown-linux-gnu" \ --build-arg RUST_IMAGE=$(RUST_IMAGE_ARM64) \ - --build-arg SOURCE_DATE=$(SOURCE_DATE) \ -f Dockerfile.reproducible \ -t lighthouse:reproducible-arm64 . -# Build both architectures -build-reproducible-all: build-reproducible-x86_64 build-reproducible-aarch64 +.PHONY: build-reproducible-all +build-reproducible-all: build-reproducible-x86_64 build-reproducible-aarch64 ## Build both x86_64 and aarch64 reproducible Docker images # Create a `.tar.gz` containing a binary for a specific target. define tarball_release_binary diff --git a/common/malloc_utils/Cargo.toml b/common/malloc_utils/Cargo.toml index 39c7137d4cb..1052128852a 100644 --- a/common/malloc_utils/Cargo.toml +++ b/common/malloc_utils/Cargo.toml @@ -21,6 +21,8 @@ jemalloc-profiling = ["tikv-jemallocator/profiling"] # Force the use of system malloc (or glibc) rather than jemalloc. # This is a no-op on Windows where jemalloc is always disabled. sysmalloc = [] +# Enable jemalloc with unprefixed malloc (recommended for reproducible builds) +jemalloc-unprefixed = ["jemalloc", "tikv-jemallocator/unprefixed_malloc_on_supported_platforms"] [dependencies] libc = "0.2.79" diff --git a/testing/state_transition_vectors/Makefile b/testing/state_transition_vectors/Makefile index 437aa50b00a..c90810ad398 100644 --- a/testing/state_transition_vectors/Makefile +++ b/testing/state_transition_vectors/Makefile @@ -5,4 +5,4 @@ test: cargo test --release --features "$(TEST_FEATURES)" clean: - rm -r vectors/ + rm -rf vectors/ From 9394663155171f74c2f237b3de20c683bdb451ee Mon Sep 17 00:00:00 2001 From: sashass1315 Date: Fri, 28 Nov 2025 06:09:13 +0200 Subject: [PATCH 44/74] fix: compare bls changes in op-pool (#8465) Co-Authored-By: sashass1315 Co-Authored-By: Michael Sproul --- beacon_node/operation_pool/src/bls_to_execution_changes.rs | 2 +- beacon_node/operation_pool/src/lib.rs | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/beacon_node/operation_pool/src/bls_to_execution_changes.rs b/beacon_node/operation_pool/src/bls_to_execution_changes.rs index cc8809c43e6..485f21b5c8b 100644 --- a/beacon_node/operation_pool/src/bls_to_execution_changes.rs +++ b/beacon_node/operation_pool/src/bls_to_execution_changes.rs @@ -19,7 +19,7 @@ pub enum ReceivedPreCapella { /// /// Using the LIFO queue for block production disincentivises spam on P2P at the Capella fork, /// and is less-relevant after that. -#[derive(Debug, Default)] +#[derive(Debug, Default, PartialEq, Eq)] pub struct BlsToExecutionChanges { /// Map from validator index to BLS to execution change. by_validator_index: HashMap>>, diff --git a/beacon_node/operation_pool/src/lib.rs b/beacon_node/operation_pool/src/lib.rs index 24e2cfbbb5d..e92d381bacc 100644 --- a/beacon_node/operation_pool/src/lib.rs +++ b/beacon_node/operation_pool/src/lib.rs @@ -782,6 +782,7 @@ impl PartialEq for OperationPool { && *self.attester_slashings.read() == *other.attester_slashings.read() && *self.proposer_slashings.read() == *other.proposer_slashings.read() && *self.voluntary_exits.read() == *other.voluntary_exits.read() + && *self.bls_to_execution_changes.read() == *other.bls_to_execution_changes.read() } } From 7cee5d60906b624e139b0f74ff84951a3fb20d2f Mon Sep 17 00:00:00 2001 From: Jimmy Chen Date: Fri, 28 Nov 2025 15:30:49 +1100 Subject: [PATCH 45/74] Optimise pubkey cache initialisation during beacon node startup (#8451) Instrument beacon node startup and parallelise pubkey cache initialisation. I instrumented beacon node startup and noticed that pubkey cache takes a long time to initialise, mostly due to decompressing all the validator pubkeys. This PR uses rayon to parallelize the decompression on initial checkpoint sync. The pubkeys are stored uncompressed, so the decopression time is not a problem on subsequent restarts. On restarts, we still deserialize pubkeys, but the timing is quite minimal on Sepolia so I didn't investigate further. `validator_pubkey_cache_new` timing on Sepolia: * before: 109.64ms * with parallelization: 21ms on Hoodi: * before: times out with Kurtosis after 120s * with parallelization: 12.77s to import keys **UPDATE**: downloading checkpoint state + genesis state takes about 2 minutes on my laptop, so it seems like the BN managed to start the http server just before timing out (after the optimisation). image Co-Authored-By: Jimmy Chen --- .../src/validator_pubkey_cache.rs | 111 ++++++++++++++---- beacon_node/client/src/builder.rs | 5 +- 2 files changed, 94 insertions(+), 22 deletions(-) diff --git a/beacon_node/beacon_chain/src/validator_pubkey_cache.rs b/beacon_node/beacon_chain/src/validator_pubkey_cache.rs index 39d2c2c2d76..a346a649f02 100644 --- a/beacon_node/beacon_chain/src/validator_pubkey_cache.rs +++ b/beacon_node/beacon_chain/src/validator_pubkey_cache.rs @@ -1,12 +1,14 @@ use crate::errors::BeaconChainError; use crate::{BeaconChainTypes, BeaconStore}; use bls::PUBLIC_KEY_UNCOMPRESSED_BYTES_LEN; +use rayon::prelude::*; use smallvec::SmallVec; use ssz::{Decode, Encode}; use ssz_derive::{Decode, Encode}; use std::collections::HashMap; use std::marker::PhantomData; use store::{DBColumn, Error as StoreError, StoreItem, StoreOp}; +use tracing::instrument; use types::{BeaconState, FixedBytesExtended, Hash256, PublicKey, PublicKeyBytes}; /// Provides a mapping of `validator_index -> validator_publickey`. @@ -28,6 +30,7 @@ impl ValidatorPubkeyCache { /// Create a new public key cache using the keys in `state.validators`. /// /// The new cache will be updated with the keys from `state` and immediately written to disk. + #[instrument(name = "validator_pubkey_cache_new", skip_all)] pub fn new( state: &BeaconState, store: BeaconStore, @@ -46,6 +49,7 @@ impl ValidatorPubkeyCache { } /// Load the pubkey cache from the given on-disk database. + #[instrument(name = "validator_pubkey_cache_load_from_store", skip_all)] pub fn load_from_store(store: BeaconStore) -> Result { let mut pubkeys = vec![]; let mut indices = HashMap::new(); @@ -77,6 +81,7 @@ impl ValidatorPubkeyCache { /// Does not delete any keys from `self` if they don't appear in `state`. /// /// NOTE: The caller *must* commit the returned I/O batch as part of the block import process. + #[instrument(skip_all)] pub fn import_new_pubkeys( &mut self, state: &BeaconState, @@ -106,29 +111,58 @@ impl ValidatorPubkeyCache { self.indices.reserve(validator_keys.len()); let mut store_ops = Vec::with_capacity(validator_keys.len()); - for pubkey_bytes in validator_keys { - let i = self.pubkeys.len(); - if self.indices.contains_key(&pubkey_bytes) { - return Err(BeaconChainError::DuplicateValidatorPublicKey); + let is_initial_import = self.pubkeys.is_empty(); + + // Helper to insert a decompressed key + let mut insert_key = + |pubkey_bytes: PublicKeyBytes, pubkey: PublicKey| -> Result<(), BeaconChainError> { + let i = self.pubkeys.len(); + + if self.indices.contains_key(&pubkey_bytes) { + return Err(BeaconChainError::DuplicateValidatorPublicKey); + } + + // Stage the new validator key for writing to disk. + // It will be committed atomically when the block that introduced it is written to disk. + // Notably it is NOT written while the write lock on the cache is held. + // See: https://github.com/sigp/lighthouse/issues/2327 + store_ops.push(StoreOp::KeyValueOp( + DatabasePubkey::from_pubkey(&pubkey) + .as_kv_store_op(DatabasePubkey::key_for_index(i)), + )); + + self.pubkeys.push(pubkey); + self.pubkey_bytes.push(pubkey_bytes); + self.indices.insert(pubkey_bytes, i); + Ok(()) + }; + + if is_initial_import { + // On first startup, decompress keys in parallel for better performance + let validator_keys_vec: Vec = validator_keys.collect(); + + let decompressed: Vec<(PublicKeyBytes, PublicKey)> = validator_keys_vec + .into_par_iter() + .map(|pubkey_bytes| { + let pubkey = (&pubkey_bytes) + .try_into() + .map_err(BeaconChainError::InvalidValidatorPubkeyBytes)?; + Ok((pubkey_bytes, pubkey)) + }) + .collect::, BeaconChainError>>()?; + + for (pubkey_bytes, pubkey) in decompressed { + insert_key(pubkey_bytes, pubkey)?; + } + } else { + // Sequential path for incremental updates + for pubkey_bytes in validator_keys { + let pubkey = (&pubkey_bytes) + .try_into() + .map_err(BeaconChainError::InvalidValidatorPubkeyBytes)?; + insert_key(pubkey_bytes, pubkey)?; } - - let pubkey = (&pubkey_bytes) - .try_into() - .map_err(BeaconChainError::InvalidValidatorPubkeyBytes)?; - - // Stage the new validator key for writing to disk. - // It will be committed atomically when the block that introduced it is written to disk. - // Notably it is NOT written while the write lock on the cache is held. - // See: https://github.com/sigp/lighthouse/issues/2327 - store_ops.push(StoreOp::KeyValueOp( - DatabasePubkey::from_pubkey(&pubkey) - .as_kv_store_op(DatabasePubkey::key_for_index(i)), - )); - - self.pubkeys.push(pubkey); - self.pubkey_bytes.push(pubkey_bytes); - self.indices.insert(pubkey_bytes, i); } Ok(store_ops) @@ -324,4 +358,39 @@ mod test { let cache = ValidatorPubkeyCache::load_from_store(store).expect("should open cache"); check_cache_get(&cache, &keypairs[..]); } + + #[test] + fn parallel_import_maintains_order() { + // Test that parallel decompression on first startup maintains correct order and indices + let (state, keypairs) = get_state(100); + let store = get_store(); + + // Create cache from empty state (triggers parallel path) + let cache: ValidatorPubkeyCache = + ValidatorPubkeyCache::new(&state, store).expect("should create cache"); + + check_cache_get(&cache, &keypairs[..]); + } + + #[test] + fn incremental_import_maintains_order() { + // Test that incremental imports maintain correct order (triggers sequential path) + let store = get_store(); + + // Start with 50 validators + let (state1, keypairs1) = get_state(50); + let mut cache = + ValidatorPubkeyCache::new(&state1, store.clone()).expect("should create cache"); + check_cache_get(&cache, &keypairs1[..]); + + // Add 50 more validators + let (state2, keypairs2) = get_state(100); + let ops = cache + .import_new_pubkeys(&state2) + .expect("should import pubkeys"); + store.do_atomically_with_block_and_blobs_cache(ops).unwrap(); + + // Verify all 100 validators are correctly indexed + check_cache_get(&cache, &keypairs2[..]); + } } diff --git a/beacon_node/client/src/builder.rs b/beacon_node/client/src/builder.rs index bac61fc7356..c48021e45d4 100644 --- a/beacon_node/client/src/builder.rs +++ b/beacon_node/client/src/builder.rs @@ -42,7 +42,7 @@ use std::time::Duration; use std::time::{SystemTime, UNIX_EPOCH}; use store::database::interface::BeaconNodeBackend; use timer::spawn_timer; -use tracing::{debug, info, warn}; +use tracing::{debug, info, instrument, warn}; use types::data_column_custody_group::compute_ordered_custody_column_indices; use types::{ BeaconState, BlobSidecarList, ChainSpec, EthSpec, ExecutionBlockHash, Hash256, @@ -151,6 +151,7 @@ where /// Initializes the `BeaconChainBuilder`. The `build_beacon_chain` method will need to be /// called later in order to actually instantiate the `BeaconChain`. + #[instrument(skip_all)] pub async fn beacon_chain_builder( mut self, client_genesis: ClientGenesis, @@ -613,6 +614,7 @@ where /// /// If type inference errors are being raised, see the comment on the definition of `Self`. #[allow(clippy::type_complexity)] + #[instrument(name = "build_client", skip_all)] pub fn build( mut self, ) -> Result>, String> { @@ -813,6 +815,7 @@ where TColdStore: ItemStore + 'static, { /// Consumes the internal `BeaconChainBuilder`, attaching the resulting `BeaconChain` to self. + #[instrument(skip_all)] pub fn build_beacon_chain(mut self) -> Result { let context = self .runtime_context From 64031b6cbb7d20d90bc72064b6778f840bbb1e4a Mon Sep 17 00:00:00 2001 From: Jimmy Chen Date: Mon, 1 Dec 2025 11:19:49 +1100 Subject: [PATCH 46/74] Add tracing spans to validator client duty cycles (#8482) Co-Authored-By: Jimmy Chen Co-Authored-By: Jimmy Chen --- .../src/attestation_service.rs | 28 ++++++++++++++++- .../validator_services/src/block_service.rs | 11 ++++++- .../src/sync_committee_service.rs | 30 ++++++++++++++++--- 3 files changed, 63 insertions(+), 6 deletions(-) diff --git a/validator_client/validator_services/src/attestation_service.rs b/validator_client/validator_services/src/attestation_service.rs index da6e8f35886..a6ce67fae91 100644 --- a/validator_client/validator_services/src/attestation_service.rs +++ b/validator_client/validator_services/src/attestation_service.rs @@ -8,7 +8,7 @@ use std::ops::Deref; use std::sync::Arc; use task_executor::TaskExecutor; use tokio::time::{Duration, Instant, sleep, sleep_until}; -use tracing::{debug, error, info, trace, warn}; +use tracing::{Instrument, debug, error, info, info_span, instrument, trace, warn}; use tree_hash::TreeHash; use types::{Attestation, AttestationData, ChainSpec, CommitteeIndex, EthSpec, Slot}; use validator_store::{Error as ValidatorStoreError, ValidatorStore}; @@ -243,6 +243,11 @@ impl AttestationService AttestationService AttestationService AttestationService, Vec<_>) = join_all(signing_futures) + .instrument(info_span!( + "sign_attestations", + count = validator_duties.len() + )) .await .into_iter() .flatten() @@ -487,6 +498,10 @@ impl AttestationService(single_attestations, fork_name) .await }) + .instrument(info_span!( + "publish_attestations", + count = attestations.len() + )) .await { Ok(()) => info!( @@ -523,6 +538,7 @@ impl AttestationService AttestationService AttestationService AttestationService { diff --git a/validator_client/validator_services/src/block_service.rs b/validator_client/validator_services/src/block_service.rs index c111b1f22eb..5ffabd22ec4 100644 --- a/validator_client/validator_services/src/block_service.rs +++ b/validator_client/validator_services/src/block_service.rs @@ -11,7 +11,7 @@ use std::sync::Arc; use std::time::Duration; use task_executor::TaskExecutor; use tokio::sync::mpsc; -use tracing::{debug, error, info, trace, warn}; +use tracing::{Instrument, debug, error, info, info_span, instrument, trace, warn}; use types::{BlockType, ChainSpec, EthSpec, Graffiti, PublicKeyBytes, Slot}; use validator_store::{Error as ValidatorStoreError, SignedBlock, UnsignedBlock, ValidatorStore}; @@ -320,6 +320,7 @@ impl BlockService { } #[allow(clippy::too_many_arguments)] + #[instrument(skip_all, fields(%slot, ?validator_pubkey))] async fn sign_and_publish_block( &self, proposer_fallback: ProposerFallback, @@ -333,6 +334,7 @@ impl BlockService { let res = self .validator_store .sign_block(*validator_pubkey, unsigned_block, slot) + .instrument(info_span!("sign_block")) .await; let signed_block = match res { @@ -389,6 +391,11 @@ impl BlockService { Ok(()) } + #[instrument( + name = "block_proposal_duty_cycle", + skip_all, + fields(%slot, ?validator_pubkey) + )] async fn publish_block( self, slot: Slot, @@ -483,6 +490,7 @@ impl BlockService { Ok(()) } + #[instrument(skip_all)] async fn publish_signed_block_contents( &self, signed_block: &SignedBlock, @@ -518,6 +526,7 @@ impl BlockService { Ok::<_, BlockError>(()) } + #[instrument(skip_all, fields(%slot))] async fn get_validator_block( beacon_node: &BeaconNodeHttpClient, slot: Slot, diff --git a/validator_client/validator_services/src/sync_committee_service.rs b/validator_client/validator_services/src/sync_committee_service.rs index 02f9f24c8a1..5f6b1cb710f 100644 --- a/validator_client/validator_services/src/sync_committee_service.rs +++ b/validator_client/validator_services/src/sync_committee_service.rs @@ -11,7 +11,7 @@ use std::sync::Arc; use std::sync::atomic::{AtomicBool, Ordering}; use task_executor::TaskExecutor; use tokio::time::{Duration, Instant, sleep, sleep_until}; -use tracing::{debug, error, info, trace, warn}; +use tracing::{Instrument, debug, error, info, info_span, instrument, trace, warn}; use types::{ ChainSpec, EthSpec, Hash256, PublicKeyBytes, Slot, SyncCommitteeSubscription, SyncContributionData, SyncDuty, SyncSelectionProof, SyncSubnetId, @@ -208,7 +208,8 @@ impl SyncCommitteeService SyncCommitteeService SyncCommitteeService SyncCommitteeService SyncCommitteeService SyncCommitteeService SyncCommitteeService SyncCommitteeService SyncCommitteeService SyncCommitteeService Date: Mon, 1 Dec 2025 13:56:50 +0800 Subject: [PATCH 47/74] Refactor get_validator_blocks_v3 fallback (#8186) #7727 introduced a bug in the logging, where as long as the node failed the SSZ `get_validator_blocks_v3` endpoint, it would log as `Beacon node does not support...`. However, the failure can be due to other reasons, such as a timed out error as found by @jimmygchen: `WARN Beacon node does not support SSZ in block production, falling back to JSON slot: 5283379, error: HttpClient(url: https://ho-h-bn-cowl.spesi.io:15052/, kind: timeout, detail: operation timed out` This PR made the error log more generic, so there is less confusion. Additionally, suggested by @michaelsproul, this PR refactors the `get_validator_blocks_v3` calls by trying all beacon nodes using the SSZ endpoint first, and if all beacon node fails the SSZ endpoint, only then fallback to JSON. It changes the logic from: "SSZ -> JSON for primary beacon node, followed by SSZ -> JSON for second beacon node and so on" to "SSZ for all beacon nodes -> JSON for all beacon nodes" This has the advantage that if the primary beacon node is having issues and failed the SSZ, we avoid retrying the primary beacon node again on JSON (as it could be that the primary beacon node fail again); rather, we switch to the second beacon node. Co-Authored-By: Tan Chee Keong Co-Authored-By: chonghe <44791194+chong-he@users.noreply.github.com> --- .../validator_services/src/block_service.rs | 155 ++++++++---------- 1 file changed, 68 insertions(+), 87 deletions(-) diff --git a/validator_client/validator_services/src/block_service.rs b/validator_client/validator_services/src/block_service.rs index 5ffabd22ec4..8ec53d3f409 100644 --- a/validator_client/validator_services/src/block_service.rs +++ b/validator_client/validator_services/src/block_service.rs @@ -1,5 +1,4 @@ use beacon_node_fallback::{ApiTopic, BeaconNodeFallback, Error as FallbackError, Errors}; -use bls::SignatureBytes; use eth2::{BeaconNodeHttpClient, StatusCode}; use graffiti_file::{GraffitiFile, determine_graffiti}; use logging::crit; @@ -298,7 +297,7 @@ impl BlockService { self.inner.executor.spawn( async move { let result = service - .publish_block(slot, validator_pubkey, builder_boost_factor) + .get_validator_block_and_publish_block(slot, validator_pubkey, builder_boost_factor) .await; match result { @@ -396,7 +395,7 @@ impl BlockService { skip_all, fields(%slot, ?validator_pubkey) )] - async fn publish_block( + async fn get_validator_block_and_publish_block( self, slot: Slot, validator_pubkey: PublicKeyBytes, @@ -449,33 +448,80 @@ impl BlockService { info!(slot = slot.as_u64(), "Requesting unsigned block"); - // Request block from first responsive beacon node. + // Request an SSZ block from all beacon nodes in order, returning on the first successful response. + // If all nodes fail, run a second pass falling back to JSON. // - // Try the proposer nodes last, since it's likely that they don't have a + // Proposer nodes will always be tried last during each pass since it's likely that they don't have a // great view of attestations on the network. - let unsigned_block = proposer_fallback + let ssz_block_response = proposer_fallback .request_proposers_last(|beacon_node| async move { let _get_timer = validator_metrics::start_timer_vec( &validator_metrics::BLOCK_SERVICE_TIMES, &[validator_metrics::BEACON_BLOCK_HTTP_GET], ); - Self::get_validator_block( - &beacon_node, - slot, - randao_reveal_ref, - graffiti, - proposer_index, - builder_boost_factor, - ) - .await - .map_err(|e| { - BlockError::Recoverable(format!( - "Error from beacon node when producing block: {:?}", - e - )) - }) + beacon_node + .get_validator_blocks_v3_ssz::( + slot, + randao_reveal_ref, + graffiti.as_ref(), + builder_boost_factor, + ) + .await }) - .await?; + .await; + + let block_response = match ssz_block_response { + Ok((ssz_block_response, _metadata)) => ssz_block_response, + Err(e) => { + warn!( + slot = slot.as_u64(), + error = %e, + "SSZ block production failed, falling back to JSON" + ); + + proposer_fallback + .request_proposers_last(|beacon_node| async move { + let _get_timer = validator_metrics::start_timer_vec( + &validator_metrics::BLOCK_SERVICE_TIMES, + &[validator_metrics::BEACON_BLOCK_HTTP_GET], + ); + let (json_block_response, _metadata) = beacon_node + .get_validator_blocks_v3::( + slot, + randao_reveal_ref, + graffiti.as_ref(), + builder_boost_factor, + ) + .await + .map_err(|e| { + BlockError::Recoverable(format!( + "Error from beacon node when producing block: {:?}", + e + )) + })?; + + Ok(json_block_response.data) + }) + .await + .map_err(BlockError::from)? + } + }; + + let (block_proposer, unsigned_block) = match block_response { + eth2::types::ProduceBlockV3Response::Full(block) => { + (block.block().proposer_index(), UnsignedBlock::Full(block)) + } + eth2::types::ProduceBlockV3Response::Blinded(block) => { + (block.proposer_index(), UnsignedBlock::Blinded(block)) + } + }; + + info!(slot = slot.as_u64(), "Received unsigned block"); + if proposer_index != Some(block_proposer) { + return Err(BlockError::Recoverable( + "Proposer index does not match block proposer. Beacon chain re-orged".to_string(), + )); + } self_ref .sign_and_publish_block( @@ -525,71 +571,6 @@ impl BlockService { } Ok::<_, BlockError>(()) } - - #[instrument(skip_all, fields(%slot))] - async fn get_validator_block( - beacon_node: &BeaconNodeHttpClient, - slot: Slot, - randao_reveal_ref: &SignatureBytes, - graffiti: Option, - proposer_index: Option, - builder_boost_factor: Option, - ) -> Result, BlockError> { - let block_response = match beacon_node - .get_validator_blocks_v3_ssz::( - slot, - randao_reveal_ref, - graffiti.as_ref(), - builder_boost_factor, - ) - .await - { - Ok((ssz_block_response, _)) => ssz_block_response, - Err(e) => { - warn!( - slot = slot.as_u64(), - error = %e, - "Beacon node does not support SSZ in block production, falling back to JSON" - ); - - let (json_block_response, _) = beacon_node - .get_validator_blocks_v3::( - slot, - randao_reveal_ref, - graffiti.as_ref(), - builder_boost_factor, - ) - .await - .map_err(|e| { - BlockError::Recoverable(format!( - "Error from beacon node when producing block: {:?}", - e - )) - })?; - - // Extract ProduceBlockV3Response (data field of the struct ForkVersionedResponse) - json_block_response.data - } - }; - - let (block_proposer, unsigned_block) = match block_response { - eth2::types::ProduceBlockV3Response::Full(block) => { - (block.block().proposer_index(), UnsignedBlock::Full(block)) - } - eth2::types::ProduceBlockV3Response::Blinded(block) => { - (block.proposer_index(), UnsignedBlock::Blinded(block)) - } - }; - - info!(slot = slot.as_u64(), "Received unsigned block"); - if proposer_index != Some(block_proposer) { - return Err(BlockError::Recoverable( - "Proposer index does not match block proposer. Beacon chain re-orged".to_string(), - )); - } - - Ok::<_, BlockError>(unsigned_block) - } } /// Wrapper for values we want to log about a block we signed, for easy extraction from the possible From f42b14ac589b29013cbb971bb8fa733bd13a6537 Mon Sep 17 00:00:00 2001 From: Jimmy Chen Date: Tue, 2 Dec 2025 09:09:08 +1100 Subject: [PATCH 48/74] Update local testnet scripts for the fulu fork (#8489) * Remove `fulu-devnet-3` testing on CI * Delete `scripts/local_testnet/network_params_das.yaml` and consolidate it into the main `network_params.yaml` file we use on CI * Delete enclave before building image, so it doesn't cause slow image building. Co-Authored-By: Jimmy Chen --- .github/workflows/local-testnet.yml | 2 +- scripts/local_testnet/README.md | 2 +- scripts/local_testnet/network_params.yaml | 30 +++++++++++--- scripts/local_testnet/network_params_das.yaml | 41 ------------------- scripts/local_testnet/start_local_testnet.sh | 10 ++--- .../tests/checkpoint-sync-config-devnet.yaml | 24 ----------- 6 files changed, 31 insertions(+), 78 deletions(-) delete mode 100644 scripts/local_testnet/network_params_das.yaml delete mode 100644 scripts/tests/checkpoint-sync-config-devnet.yaml diff --git a/.github/workflows/local-testnet.yml b/.github/workflows/local-testnet.yml index c129c0ec95c..9992273e0a7 100644 --- a/.github/workflows/local-testnet.yml +++ b/.github/workflows/local-testnet.yml @@ -179,7 +179,7 @@ jobs: continue-on-error: true strategy: matrix: - network: [sepolia, devnet] + network: [sepolia] steps: - uses: actions/checkout@v5 diff --git a/scripts/local_testnet/README.md b/scripts/local_testnet/README.md index 9d9844c4c41..6260f910192 100644 --- a/scripts/local_testnet/README.md +++ b/scripts/local_testnet/README.md @@ -21,7 +21,7 @@ cd ./scripts/local_testnet ``` It will build a Lighthouse docker image from the root of the directory and will take an approximately 12 minutes to complete. Once built, the testing will be started automatically. You will see a list of services running and "Started!" at the end. -You can also select your own Lighthouse docker image to use by specifying it in `network_params.yml` under the `cl_image` key. +You can also select your own Lighthouse docker image to use by specifying it in `network_params.yaml` under the `cl_image` key. Full configuration reference for Kurtosis is specified [here](https://github.com/ethpandaops/ethereum-package?tab=readme-ov-file#configuration). To view all running services: diff --git a/scripts/local_testnet/network_params.yaml b/scripts/local_testnet/network_params.yaml index cdfacbced4b..a048674e630 100644 --- a/scripts/local_testnet/network_params.yaml +++ b/scripts/local_testnet/network_params.yaml @@ -1,19 +1,37 @@ # Full configuration reference [here](https://github.com/ethpandaops/ethereum-package?tab=readme-ov-file#configuration). participants: - - el_type: geth + - cl_type: lighthouse + cl_image: lighthouse:local + el_type: geth el_image: ethereum/client-go:latest - cl_type: lighthouse + supernode: true + cl_extra_params: + - --target-peers=3 + count: 2 + - cl_type: lighthouse cl_image: lighthouse:local + el_type: geth + el_image: ethereum/client-go:latest + supernode: false cl_extra_params: - --target-peers=3 - count: 4 + count: 2 network_params: - electra_fork_epoch: 0 - seconds_per_slot: 3 -global_log_level: debug + fulu_fork_epoch: 0 + seconds_per_slot: 6 snooper_enabled: false +global_log_level: debug additional_services: - dora - spamoor - prometheus_grafana - tempo +spamoor_params: + image: ethpandaops/spamoor:master + spammers: + - scenario: eoatx + config: + throughput: 200 + - scenario: blobs + config: + throughput: 20 \ No newline at end of file diff --git a/scripts/local_testnet/network_params_das.yaml b/scripts/local_testnet/network_params_das.yaml deleted file mode 100644 index e3bc5131531..00000000000 --- a/scripts/local_testnet/network_params_das.yaml +++ /dev/null @@ -1,41 +0,0 @@ -participants: - - cl_type: lighthouse - cl_image: lighthouse:local - el_type: geth - el_image: ethpandaops/geth:master - supernode: true - cl_extra_params: - # Note: useful for testing range sync (only produce block if the node is in sync to prevent forking) - - --sync-tolerance-epochs=0 - - --target-peers=3 - count: 2 - - cl_type: lighthouse - cl_image: lighthouse:local - el_type: geth - el_image: ethpandaops/geth:master - supernode: false - cl_extra_params: - # Note: useful for testing range sync (only produce block if the node is in sync to prevent forking) - - --sync-tolerance-epochs=0 - - --target-peers=3 - count: 2 -network_params: - electra_fork_epoch: 0 - fulu_fork_epoch: 1 - seconds_per_slot: 6 -snooper_enabled: false -global_log_level: debug -additional_services: - - dora - - spamoor - - prometheus_grafana - - tempo -spamoor_params: - image: ethpandaops/spamoor:master - spammers: - - scenario: eoatx - config: - throughput: 200 - - scenario: blobs - config: - throughput: 20 \ No newline at end of file diff --git a/scripts/local_testnet/start_local_testnet.sh b/scripts/local_testnet/start_local_testnet.sh index 442e6fd98d9..8d8b33526d3 100755 --- a/scripts/local_testnet/start_local_testnet.sh +++ b/scripts/local_testnet/start_local_testnet.sh @@ -78,6 +78,11 @@ if [ "$RUN_ASSERTOOR_TESTS" = true ]; then echo "Assertoor has been added to $NETWORK_PARAMS_FILE." fi +if [ "$KEEP_ENCLAVE" = false ]; then + # Stop local testnet + kurtosis enclave rm -f $ENCLAVE_NAME 2>/dev/null || true +fi + if [ "$BUILD_IMAGE" = true ]; then echo "Building Lighthouse Docker image." ROOT_DIR="$SCRIPT_DIR/../.." @@ -86,11 +91,6 @@ else echo "Not rebuilding Lighthouse Docker image." fi -if [ "$KEEP_ENCLAVE" = false ]; then - # Stop local testnet - kurtosis enclave rm -f $ENCLAVE_NAME 2>/dev/null || true -fi - kurtosis run --enclave $ENCLAVE_NAME github.com/ethpandaops/ethereum-package@$ETHEREUM_PKG_VERSION --args-file $NETWORK_PARAMS_FILE echo "Started!" diff --git a/scripts/tests/checkpoint-sync-config-devnet.yaml b/scripts/tests/checkpoint-sync-config-devnet.yaml deleted file mode 100644 index 2392011ed33..00000000000 --- a/scripts/tests/checkpoint-sync-config-devnet.yaml +++ /dev/null @@ -1,24 +0,0 @@ -# Kurtosis config file to checkpoint sync to a running devnet supported by ethPandaOps and `ethereum-package`. -participants: - - cl_type: lighthouse - cl_image: lighthouse:local - el_type: geth - el_image: ethpandaops/geth:master - cl_extra_params: - - --disable-backfill-rate-limiting - supernode: true - - cl_type: lighthouse - cl_image: lighthouse:local - el_type: geth - el_image: ethpandaops/geth:master - cl_extra_params: - - --disable-backfill-rate-limiting - supernode: false - -checkpoint_sync_enabled: true -checkpoint_sync_url: "https://checkpoint-sync.fusaka-devnet-3.ethpandaops.io" - -global_log_level: debug - -network_params: - network: fusaka-devnet-3 From 4fbe5174915a7d98998e83512ed1f1bacdf433b2 Mon Sep 17 00:00:00 2001 From: 0xMushow <105550256+0xMushow@users.noreply.github.com> Date: Tue, 2 Dec 2025 04:06:29 +0100 Subject: [PATCH 49/74] Fix data columns sorting when reconstructing blobs (#8510) Closes https://github.com/sigp/lighthouse/issues/8509 Co-Authored-By: Antoine James --- beacon_node/beacon_chain/src/beacon_chain.rs | 2 +- beacon_node/beacon_chain/src/kzg_utils.rs | 36 +++++++++++++++++--- beacon_node/http_api/src/block_id.rs | 2 +- 3 files changed, 34 insertions(+), 6 deletions(-) diff --git a/beacon_node/beacon_chain/src/beacon_chain.rs b/beacon_node/beacon_chain/src/beacon_chain.rs index 494346e7ff2..00c5ab415c1 100644 --- a/beacon_node/beacon_chain/src/beacon_chain.rs +++ b/beacon_node/beacon_chain/src/beacon_chain.rs @@ -1248,7 +1248,7 @@ impl BeaconChain { let num_required_columns = T::EthSpec::number_of_columns() / 2; let reconstruction_possible = columns.len() >= num_required_columns; if reconstruction_possible { - reconstruct_blobs(&self.kzg, &columns, None, &block, &self.spec) + reconstruct_blobs(&self.kzg, columns, None, &block, &self.spec) .map(Some) .map_err(Error::FailedToReconstructBlobs) } else { diff --git a/beacon_node/beacon_chain/src/kzg_utils.rs b/beacon_node/beacon_chain/src/kzg_utils.rs index 200774ebe46..334124419b9 100644 --- a/beacon_node/beacon_chain/src/kzg_utils.rs +++ b/beacon_node/beacon_chain/src/kzg_utils.rs @@ -308,12 +308,14 @@ pub(crate) fn build_data_column_sidecars( /// and it will be slow if the node needs to reconstruct the blobs pub fn reconstruct_blobs( kzg: &Kzg, - data_columns: &[Arc>], + mut data_columns: Vec>>, blob_indices_opt: Option>, signed_block: &SignedBlindedBeaconBlock, spec: &ChainSpec, ) -> Result, String> { - // The data columns are from the database, so we assume their correctness. + // Sort data columns by index to ensure ascending order for KZG operations + data_columns.sort_unstable_by_key(|dc| dc.index); + let first_data_column = data_columns .first() .ok_or("data_columns should have at least one element".to_string())?; @@ -331,7 +333,7 @@ pub fn reconstruct_blobs( .map(|row_index| { let mut cells: Vec = vec![]; let mut cell_ids: Vec = vec![]; - for data_column in data_columns { + for data_column in &data_columns { let cell = data_column .column .get(row_index) @@ -463,6 +465,7 @@ mod test { test_reconstruct_data_columns(&kzg, &spec); test_reconstruct_data_columns_unordered(&kzg, &spec); test_reconstruct_blobs_from_data_columns(&kzg, &spec); + test_reconstruct_blobs_from_data_columns_unordered(&kzg, &spec); test_validate_data_columns(&kzg, &spec); } @@ -595,7 +598,7 @@ mod test { let blob_indices = vec![1, 2]; let reconstructed_blobs = reconstruct_blobs( kzg, - &column_sidecars.iter().as_slice()[0..column_sidecars.len() / 2], + column_sidecars[0..column_sidecars.len() / 2].to_vec(), Some(blob_indices.clone()), &signed_blinded_block, spec, @@ -613,6 +616,31 @@ mod test { } } + #[track_caller] + fn test_reconstruct_blobs_from_data_columns_unordered(kzg: &Kzg, spec: &ChainSpec) { + let num_of_blobs = 2; + let (signed_block, blobs, proofs) = + create_test_fulu_block_and_blobs::(num_of_blobs, spec); + let blob_refs = blobs.iter().collect::>(); + let column_sidecars = + blobs_to_data_column_sidecars(&blob_refs, proofs.to_vec(), &signed_block, kzg, spec) + .unwrap(); + + // Test reconstruction with columns in reverse order (non-ascending) + let mut subset_columns: Vec<_> = + column_sidecars.iter().as_slice()[0..column_sidecars.len() / 2].to_vec(); + subset_columns.reverse(); // This would fail without proper sorting in reconstruct_blobs + + let signed_blinded_block = signed_block.into(); + let reconstructed_blobs = + reconstruct_blobs(kzg, subset_columns, None, &signed_blinded_block, spec).unwrap(); + + for (i, original_blob) in blobs.iter().enumerate() { + let reconstructed_blob = &reconstructed_blobs.get(i).unwrap().blob; + assert_eq!(reconstructed_blob, original_blob, "{i}"); + } + } + fn get_kzg() -> Kzg { Kzg::new_from_trusted_setup(&get_trusted_setup()).expect("should create kzg") } diff --git a/beacon_node/http_api/src/block_id.rs b/beacon_node/http_api/src/block_id.rs index 778067c32bb..e088005f201 100644 --- a/beacon_node/http_api/src/block_id.rs +++ b/beacon_node/http_api/src/block_id.rs @@ -474,7 +474,7 @@ impl BlockId { ) .collect::, _>>()?; - reconstruct_blobs(&chain.kzg, &data_columns, blob_indices, block, &chain.spec).map_err( + reconstruct_blobs(&chain.kzg, data_columns, blob_indices, block, &chain.spec).map_err( |e| { warp_utils::reject::custom_server_error(format!( "Error reconstructing data columns: {e:?}" From 7ef9501ff66f0edb88b7680b4a24d65d3309b363 Mon Sep 17 00:00:00 2001 From: Jimmy Chen Date: Tue, 2 Dec 2025 16:17:13 +1100 Subject: [PATCH 50/74] Instrument attestation signing. (#8508) We noticed attestation signing taking 2+ seconds on some of our hoodi nodes and the current traces doesn't provide enough details. This PR adds a few more spans to the `attestation_duty_cycle` code path in the VC. Before: image After: image Co-Authored-By: Jimmy Chen --- Cargo.lock | 1 + .../lighthouse_validator_store/src/lib.rs | 4 +- validator_client/signing_method/Cargo.toml | 1 + validator_client/signing_method/src/lib.rs | 2 + .../src/slashing_database.rs | 2 + .../src/attestation_service.rs | 133 +++++++++--------- 6 files changed, 77 insertions(+), 66 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e3730f132b3..7ddcad7239a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8336,6 +8336,7 @@ dependencies = [ "reqwest", "serde", "task_executor", + "tracing", "types", "url", "validator_metrics", diff --git a/validator_client/lighthouse_validator_store/src/lib.rs b/validator_client/lighthouse_validator_store/src/lib.rs index d10fecb32e4..dc8fb07b65f 100644 --- a/validator_client/lighthouse_validator_store/src/lib.rs +++ b/validator_client/lighthouse_validator_store/src/lib.rs @@ -15,7 +15,7 @@ use std::marker::PhantomData; use std::path::Path; use std::sync::Arc; use task_executor::TaskExecutor; -use tracing::{error, info, warn}; +use tracing::{error, info, instrument, warn}; use types::{ AbstractExecPayload, Address, AggregateAndProof, Attestation, BeaconBlock, BlindedPayload, ChainSpec, ContributionAndProof, Domain, Epoch, EthSpec, Fork, Graffiti, Hash256, @@ -242,6 +242,7 @@ impl LighthouseValidatorStore { /// Returns a `SigningMethod` for `validator_pubkey` *only if* that validator is considered safe /// by doppelganger protection. + #[instrument(skip_all, level = "debug")] fn doppelganger_checked_signing_method( &self, validator_pubkey: PublicKeyBytes, @@ -745,6 +746,7 @@ impl ValidatorStore for LighthouseValidatorS } } + #[instrument(skip_all)] async fn sign_attestation( &self, validator_pubkey: PublicKeyBytes, diff --git a/validator_client/signing_method/Cargo.toml b/validator_client/signing_method/Cargo.toml index 3e1a48142f9..2defd25caaa 100644 --- a/validator_client/signing_method/Cargo.toml +++ b/validator_client/signing_method/Cargo.toml @@ -12,6 +12,7 @@ parking_lot = { workspace = true } reqwest = { workspace = true } serde = { workspace = true } task_executor = { workspace = true } +tracing = { workspace = true } types = { workspace = true } url = { workspace = true } validator_metrics = { workspace = true } diff --git a/validator_client/signing_method/src/lib.rs b/validator_client/signing_method/src/lib.rs index c535415b1e9..7e0f2c02f7d 100644 --- a/validator_client/signing_method/src/lib.rs +++ b/validator_client/signing_method/src/lib.rs @@ -10,6 +10,7 @@ use reqwest::{Client, header::ACCEPT}; use std::path::PathBuf; use std::sync::Arc; use task_executor::TaskExecutor; +use tracing::instrument; use types::*; use url::Url; use web3signer::{ForkInfo, MessageType, SigningRequest, SigningResponse}; @@ -131,6 +132,7 @@ impl SigningMethod { } /// Return the signature of `signable_message`, with respect to the `signing_context`. + #[instrument(skip_all, level = "debug")] pub async fn get_signature>( &self, signable_message: SignableMessage<'_, E, Payload>, diff --git a/validator_client/slashing_protection/src/slashing_database.rs b/validator_client/slashing_protection/src/slashing_database.rs index ce32299a511..00677212a3f 100644 --- a/validator_client/slashing_protection/src/slashing_database.rs +++ b/validator_client/slashing_protection/src/slashing_database.rs @@ -11,6 +11,7 @@ use rusqlite::{OptionalExtension, Transaction, TransactionBehavior, params}; use std::fs::File; use std::path::Path; use std::time::Duration; +use tracing::instrument; use types::{AttestationData, BeaconBlockHeader, Epoch, Hash256, PublicKeyBytes, SignedRoot, Slot}; type Pool = r2d2::Pool; @@ -639,6 +640,7 @@ impl SlashingDatabase { /// to prevent concurrent checks and inserts from resulting in slashable data being inserted. /// /// This is the safe, externally-callable interface for checking attestations. + #[instrument(skip_all, level = "debug")] pub fn check_and_insert_attestation( &self, validator_pubkey: &PublicKeyBytes, diff --git a/validator_client/validator_services/src/attestation_service.rs b/validator_client/validator_services/src/attestation_service.rs index a6ce67fae91..8211fb11f3e 100644 --- a/validator_client/validator_services/src/attestation_service.rs +++ b/validator_client/validator_services/src/attestation_service.rs @@ -8,7 +8,7 @@ use std::ops::Deref; use std::sync::Arc; use task_executor::TaskExecutor; use tokio::time::{Duration, Instant, sleep, sleep_until}; -use tracing::{Instrument, debug, error, info, info_span, instrument, trace, warn}; +use tracing::{Instrument, Span, debug, error, info, info_span, instrument, trace, warn}; use tree_hash::TreeHash; use types::{Attestation, AttestationData, ChainSpec, CommitteeIndex, EthSpec, Slot}; use validator_store::{Error as ValidatorStoreError, ValidatorStore}; @@ -369,79 +369,82 @@ impl AttestationService(attestation_data, &self.chain_spec) { - crit!( - validator = ?duty.pubkey, - duty_slot = %duty.slot, - attestation_slot = %attestation_data.slot, - duty_index = duty.committee_index, - attestation_index = attestation_data.index, - "Inconsistent validator duties during signing" - ); - return None; - } + let signing_futures = validator_duties.iter().map(|duty_and_proof| { + async move { + let duty = &duty_and_proof.duty; + let attestation_data = attestation_data_ref; - let mut attestation = match Attestation::empty_for_signing( - duty.committee_index, - duty.committee_length as usize, - attestation_data.slot, - attestation_data.beacon_block_root, - attestation_data.source, - attestation_data.target, - &self.chain_spec, - ) { - Ok(attestation) => attestation, - Err(err) => { + // Ensure that the attestation matches the duties. + if !duty.match_attestation_data::(attestation_data, &self.chain_spec) { crit!( validator = ?duty.pubkey, - ?duty, - ?err, - "Invalid validator duties during signing" + duty_slot = %duty.slot, + attestation_slot = %attestation_data.slot, + duty_index = duty.committee_index, + attestation_index = attestation_data.index, + "Inconsistent validator duties during signing" ); return None; } - }; - match self - .validator_store - .sign_attestation( - duty.pubkey, - duty.validator_committee_index as usize, - &mut attestation, - current_epoch, - ) - .await - { - Ok(()) => Some((attestation, duty.validator_index)), - Err(ValidatorStoreError::UnknownPubkey(pubkey)) => { - // A pubkey can be missing when a validator was recently - // removed via the API. - warn!( - info = "a validator may have recently been removed from this VC", - pubkey = ?pubkey, - validator = ?duty.pubkey, - committee_index = committee_index, - slot = slot.as_u64(), - "Missing pubkey for attestation" - ); - None - } - Err(e) => { - crit!( - error = ?e, - validator = ?duty.pubkey, - committee_index, - slot = slot.as_u64(), - "Failed to sign attestation" - ); - None + let mut attestation = match Attestation::empty_for_signing( + duty.committee_index, + duty.committee_length as usize, + attestation_data.slot, + attestation_data.beacon_block_root, + attestation_data.source, + attestation_data.target, + &self.chain_spec, + ) { + Ok(attestation) => attestation, + Err(err) => { + crit!( + validator = ?duty.pubkey, + ?duty, + ?err, + "Invalid validator duties during signing" + ); + return None; + } + }; + + match self + .validator_store + .sign_attestation( + duty.pubkey, + duty.validator_committee_index as usize, + &mut attestation, + current_epoch, + ) + .await + { + Ok(()) => Some((attestation, duty.validator_index)), + Err(ValidatorStoreError::UnknownPubkey(pubkey)) => { + // A pubkey can be missing when a validator was recently + // removed via the API. + warn!( + info = "a validator may have recently been removed from this VC", + pubkey = ?pubkey, + validator = ?duty.pubkey, + committee_index = committee_index, + slot = slot.as_u64(), + "Missing pubkey for attestation" + ); + None + } + Err(e) => { + crit!( + error = ?e, + validator = ?duty.pubkey, + committee_index, + slot = slot.as_u64(), + "Failed to sign attestation" + ); + None + } } } + .instrument(Span::current()) }); // Execute all the futures in parallel, collecting any successful results. From 0bccc7090c9dc27b57e860f7ab9aaeb83eb305e1 Mon Sep 17 00:00:00 2001 From: chonghe <44791194+chong-he@users.noreply.github.com> Date: Wed, 3 Dec 2025 09:45:47 +0800 Subject: [PATCH 51/74] Always use committee index 0 when getting attestation data (#8171) * #8046 Split the function `publish_attestations_and_aggregates` into `publish_attestations` and `handle_aggregates`, so that for attestations, only 1 task is spawned. Co-Authored-By: Tan Chee Keong Co-Authored-By: chonghe <44791194+chong-he@users.noreply.github.com> Co-Authored-By: Michael Sproul Co-Authored-By: Michael Sproul --- .../src/attestation_service.rs | 238 +++++++++--------- 1 file changed, 125 insertions(+), 113 deletions(-) diff --git a/validator_client/validator_services/src/attestation_service.rs b/validator_client/validator_services/src/attestation_service.rs index 8211fb11f3e..b2b8bc81e22 100644 --- a/validator_client/validator_services/src/attestation_service.rs +++ b/validator_client/validator_services/src/attestation_service.rs @@ -180,8 +180,9 @@ impl AttestationService Result<(), String> { let slot = self.slot_clock.now().ok_or("Failed to read slot clock")?; let duration_to_next_slot = self @@ -189,6 +190,53 @@ impl AttestationService = self.duties_service.attesters(slot).into_iter().collect(); + let attestation_service = self.clone(); + + let attestation_data_handle = self + .inner + .executor + .spawn_handle( + async move { + let attestation_data = attestation_service + .beacon_nodes + .first_success(|beacon_node| async move { + let _timer = validator_metrics::start_timer_vec( + &validator_metrics::ATTESTATION_SERVICE_TIMES, + &[validator_metrics::ATTESTATIONS_HTTP_GET], + ); + beacon_node + .get_validator_attestation_data(slot, 0) + .await + .map_err(|e| format!("Failed to produce attestation data: {:?}", e)) + .map(|result| result.data) + }) + .await + .map_err(|e| e.to_string())?; + + attestation_service + .sign_and_publish_attestations( + slot, + &attestation_duties, + attestation_data.clone(), + ) + .await + .map_err(|e| { + crit!( + error = format!("{:?}", e), + slot = slot.as_u64(), + "Error during attestation routine" + ); + e + })?; + Ok::(attestation_data) + }, + "unaggregated attestation production", + ) + .ok_or("Failed to spawn attestation data task")?; + // If a validator needs to publish an aggregate attestation, they must do so at 2/3 // through the slot. This delay triggers at this time let aggregate_production_instant = Instant::now() @@ -196,7 +244,7 @@ impl AttestationService> = self + let aggregate_duties_by_committee_index: HashMap> = self .duties_service .attesters(slot) .into_iter() @@ -207,24 +255,45 @@ impl AttestationService data, + Ok(Some(Err(err))) => { + error!(?err, "Attestation production failed"); + return; + } + Ok(None) | Err(_) => { + info!("Aborting attestation production due to shutdown"); + return; + } + }; + + // For each committee index for this slot: + // Create and publish `SignedAggregateAndProof` for all aggregating validators. + aggregate_duties_by_committee_index.into_iter().for_each( + |(committee_index, validator_duties)| { + let attestation_service = attestation_service_clone.clone(); + let attestation_data = attestation_data.clone(); + executor.spawn_ignoring_error( + attestation_service.handle_aggregates( + slot, + committee_index, + validator_duties, + aggregate_production_instant, + attestation_data, + ), + "aggregate publish", + ); + }, + ) + }, + "attestation and aggregate publish", + ); // Schedule pruning of the slashing protection database once all unaggregated // attestations have (hopefully) been signed, i.e. at the same time as aggregate @@ -234,114 +303,76 @@ impl AttestationService, aggregate_production_instant: Instant, + attestation_data: AttestationData, ) -> Result<(), ()> { - let attestations_timer = validator_metrics::start_timer_vec( - &validator_metrics::ATTESTATION_SERVICE_TIMES, - &[validator_metrics::ATTESTATIONS], - ); - - // There's not need to produce `Attestation` or `SignedAggregateAndProof` if we do not have + // There's not need to produce `SignedAggregateAndProof` if we do not have // any validators for the given `slot` and `committee_index`. if validator_duties.is_empty() { return Ok(()); } - // Step 1. - // - // Download, sign and publish an `Attestation` for each validator. - let attestation_opt = self - .produce_and_publish_attestations(slot, committee_index, &validator_duties) - .await - .map_err(move |e| { - crit!( - error = format!("{:?}", e), - committee_index, - slot = slot.as_u64(), - "Error during attestation routine" - ) - })?; + // Wait until the `aggregation_production_instant` (2/3rds + // of the way though the slot). As verified in the + // `delay_triggers_when_in_the_past` test, this code will still run + // even if the instant has already elapsed. + sleep_until(aggregate_production_instant).await; - drop(attestations_timer); - - // Step 2. - // - // If an attestation was produced, make an aggregate. - if let Some(attestation_data) = attestation_opt { - // First, wait until the `aggregation_production_instant` (2/3rds - // of the way though the slot). As verified in the - // `delay_triggers_when_in_the_past` test, this code will still run - // even if the instant has already elapsed. - sleep_until(aggregate_production_instant).await; - - // Start the metrics timer *after* we've done the delay. - let _aggregates_timer = validator_metrics::start_timer_vec( - &validator_metrics::ATTESTATION_SERVICE_TIMES, - &[validator_metrics::AGGREGATES], - ); - - // Then download, sign and publish a `SignedAggregateAndProof` for each - // validator that is elected to aggregate for this `slot` and - // `committee_index`. - self.produce_and_publish_aggregates( - &attestation_data, - committee_index, - &validator_duties, - ) + // Start the metrics timer *after* we've done the delay. + let _aggregates_timer = validator_metrics::start_timer_vec( + &validator_metrics::ATTESTATION_SERVICE_TIMES, + &[validator_metrics::AGGREGATES], + ); + + // Download, sign and publish a `SignedAggregateAndProof` for each + // validator that is elected to aggregate for this `slot` and + // `committee_index`. + self.produce_and_publish_aggregates(&attestation_data, committee_index, &validator_duties) .await .map_err(move |e| { crit!( error = format!("{:?}", e), committee_index, slot = slot.as_u64(), - "Error during attestation routine" + "Error during aggregate attestation routine" ) })?; - } Ok(()) } - /// Performs the first step of the attesting process: downloading `Attestation` objects, - /// signing them and returning them to the validator. + /// Performs the main steps of the attesting process: signing and publishing to the BN. /// - /// https://github.com/ethereum/eth2.0-specs/blob/v0.12.1/specs/phase0/validator.md#attesting + /// https://github.com/ethereum/consensus-specs/blob/master/specs/phase0/validator.md#attesting /// /// ## Detail /// /// The given `validator_duties` should already be filtered to only contain those that match - /// `slot` and `committee_index`. Critical errors will be logged if this is not the case. - /// - /// Only one `Attestation` is downloaded from the BN. It is then cloned and signed by each - /// validator and the list of individually-signed `Attestation` objects is returned to the BN. - #[instrument(skip_all, fields(%slot, %committee_index))] - async fn produce_and_publish_attestations( + /// `slot`. Critical errors will be logged if this is not the case. + #[instrument(skip_all, fields(%slot, %attestation_data.beacon_block_root))] + async fn sign_and_publish_attestations( &self, slot: Slot, - committee_index: CommitteeIndex, validator_duties: &[DutyAndProof], - ) -> Result, String> { + attestation_data: AttestationData, + ) -> Result<(), String> { + let _attestations_timer = validator_metrics::start_timer_vec( + &validator_metrics::ATTESTATION_SERVICE_TIMES, + &[validator_metrics::ATTESTATIONS], + ); + if validator_duties.is_empty() { - return Ok(None); + return Ok(()); } let current_epoch = self @@ -350,23 +381,6 @@ impl AttestationService AttestationService AttestationService AttestationService AttestationService Date: Wed, 3 Dec 2025 15:06:28 +1100 Subject: [PATCH 52/74] Move deposit contract artifacts to /target (#8518) Alternative to: - https://github.com/sigp/lighthouse/pull/8488 Refactors deposit_contract crate to comply with Rust build conventions by placing generated artifacts in the build output directory. Co-Authored-By: Michael Sproul --- common/deposit_contract/build.rs | 9 ++++----- common/deposit_contract/src/lib.rs | 22 ++++++++++++++++------ 2 files changed, 20 insertions(+), 11 deletions(-) diff --git a/common/deposit_contract/build.rs b/common/deposit_contract/build.rs index cae1d480c81..2061d13c243 100644 --- a/common/deposit_contract/build.rs +++ b/common/deposit_contract/build.rs @@ -153,14 +153,13 @@ fn verify_checksum(bytes: &[u8], expected_checksum: &str) { /// Returns the directory that will be used to store the deposit contract ABI. fn abi_dir() -> PathBuf { - let base = env::var("CARGO_MANIFEST_DIR") - .expect("should know manifest dir") + let base = env::var("OUT_DIR") + .expect("should know out dir") .parse::() - .expect("should parse manifest dir as path") - .join("contracts"); + .expect("should parse out dir as path"); std::fs::create_dir_all(base.clone()) - .expect("should be able to create abi directory in manifest"); + .expect("should be able to create abi directory in out dir"); base } diff --git a/common/deposit_contract/src/lib.rs b/common/deposit_contract/src/lib.rs index 12c3bdaa894..e5f11bb89c0 100644 --- a/common/deposit_contract/src/lib.rs +++ b/common/deposit_contract/src/lib.rs @@ -44,15 +44,25 @@ impl From for Error { pub const CONTRACT_DEPLOY_GAS: usize = 4_000_000; pub const DEPOSIT_GAS: usize = 400_000; -pub const ABI: &[u8] = include_bytes!("../contracts/v0.12.1_validator_registration.json"); -pub const BYTECODE: &[u8] = include_bytes!("../contracts/v0.12.1_validator_registration.bytecode"); +pub const ABI: &[u8] = include_bytes!(concat!( + env!("OUT_DIR"), + "/v0.12.1_validator_registration.json" +)); +pub const BYTECODE: &[u8] = include_bytes!(concat!( + env!("OUT_DIR"), + "/v0.12.1_validator_registration.bytecode" +)); pub const DEPOSIT_DATA_LEN: usize = 420; // lol pub mod testnet { - pub const ABI: &[u8] = - include_bytes!("../contracts/v0.12.1_testnet_validator_registration.json"); - pub const BYTECODE: &[u8] = - include_bytes!("../contracts/v0.12.1_testnet_validator_registration.bytecode"); + pub const ABI: &[u8] = include_bytes!(concat!( + env!("OUT_DIR"), + "/v0.12.1_testnet_validator_registration.json" + )); + pub const BYTECODE: &[u8] = include_bytes!(concat!( + env!("OUT_DIR"), + "/v0.12.1_testnet_validator_registration.bytecode" + )); } pub fn encode_eth1_tx_data(deposit_data: &DepositData) -> Result, Error> { From 51d033602098d75da1e921025bb7665df07e5ae3 Mon Sep 17 00:00:00 2001 From: Jimmy Chen Date: Thu, 4 Dec 2025 17:58:57 +1100 Subject: [PATCH 53/74] Move beacon state endpoints to a separate module. (#8529) Part of the http api refactor to move endpoint handlers to separate modules. This should improve code maintainability, incremental compilation time and rust analyzer performance. Co-Authored-By: Jimmy Chen --- beacon_node/http_api/src/beacon/mod.rs | 1 + beacon_node/http_api/src/beacon/states.rs | 787 ++++++++++++++++++++++ beacon_node/http_api/src/lib.rs | 705 ++----------------- beacon_node/http_api/src/utils.rs | 3 + 4 files changed, 830 insertions(+), 666 deletions(-) create mode 100644 beacon_node/http_api/src/beacon/mod.rs create mode 100644 beacon_node/http_api/src/beacon/states.rs create mode 100644 beacon_node/http_api/src/utils.rs diff --git a/beacon_node/http_api/src/beacon/mod.rs b/beacon_node/http_api/src/beacon/mod.rs new file mode 100644 index 00000000000..20394784ae7 --- /dev/null +++ b/beacon_node/http_api/src/beacon/mod.rs @@ -0,0 +1 @@ +pub mod states; diff --git a/beacon_node/http_api/src/beacon/states.rs b/beacon_node/http_api/src/beacon/states.rs new file mode 100644 index 00000000000..6d06bcc77d6 --- /dev/null +++ b/beacon_node/http_api/src/beacon/states.rs @@ -0,0 +1,787 @@ +use crate::StateId; +use crate::task_spawner::{Priority, TaskSpawner}; +use crate::utils::ResponseFilter; +use crate::validator::pubkey_to_validator_index; +use crate::version::{ + ResponseIncludesVersion, add_consensus_version_header, + execution_optimistic_finalized_beacon_response, +}; +use beacon_chain::{BeaconChain, BeaconChainError, BeaconChainTypes, WhenSlotSkipped}; +use eth2::types::{ + ValidatorBalancesRequestBody, ValidatorId, ValidatorIdentitiesRequestBody, + ValidatorsRequestBody, +}; +use std::sync::Arc; +use types::{ + AttestationShufflingId, CommitteeCache, Error as BeaconStateError, EthSpec, RelativeEpoch, +}; +use warp::filters::BoxedFilter; +use warp::{Filter, Reply}; +use warp_utils::query::multi_key_query; + +type BeaconStatesPath = BoxedFilter<( + StateId, + TaskSpawner<::EthSpec>, + Arc>, +)>; + +// GET beacon/states/{state_id}/pending_consolidations +pub fn get_beacon_state_pending_consolidations( + beacon_states_path: BeaconStatesPath, +) -> ResponseFilter { + beacon_states_path + .clone() + .and(warp::path("pending_consolidations")) + .and(warp::path::end()) + .then( + |state_id: StateId, + task_spawner: TaskSpawner, + chain: Arc>| { + task_spawner.blocking_response_task(Priority::P1, move || { + let (data, execution_optimistic, finalized, fork_name) = state_id + .map_state_and_execution_optimistic_and_finalized( + &chain, + |state, execution_optimistic, finalized| { + let Ok(consolidations) = state.pending_consolidations() else { + return Err(warp_utils::reject::custom_bad_request( + "Pending consolidations not found".to_string(), + )); + }; + + Ok(( + consolidations.clone(), + execution_optimistic, + finalized, + state.fork_name_unchecked(), + )) + }, + )?; + + execution_optimistic_finalized_beacon_response( + ResponseIncludesVersion::Yes(fork_name), + execution_optimistic, + finalized, + data, + ) + .map(|res| warp::reply::json(&res).into_response()) + .map(|resp| add_consensus_version_header(resp, fork_name)) + }) + }, + ) + .boxed() +} + +// GET beacon/states/{state_id}/pending_partial_withdrawals +pub fn get_beacon_state_pending_partial_withdrawals( + beacon_states_path: BeaconStatesPath, +) -> ResponseFilter { + beacon_states_path + .clone() + .and(warp::path("pending_partial_withdrawals")) + .and(warp::path::end()) + .then( + |state_id: StateId, + task_spawner: TaskSpawner, + chain: Arc>| { + task_spawner.blocking_response_task(Priority::P1, move || { + let (data, execution_optimistic, finalized, fork_name) = state_id + .map_state_and_execution_optimistic_and_finalized( + &chain, + |state, execution_optimistic, finalized| { + let Ok(withdrawals) = state.pending_partial_withdrawals() else { + return Err(warp_utils::reject::custom_bad_request( + "Pending withdrawals not found".to_string(), + )); + }; + + Ok(( + withdrawals.clone(), + execution_optimistic, + finalized, + state.fork_name_unchecked(), + )) + }, + )?; + + execution_optimistic_finalized_beacon_response( + ResponseIncludesVersion::Yes(fork_name), + execution_optimistic, + finalized, + data, + ) + .map(|res| warp::reply::json(&res).into_response()) + .map(|resp| add_consensus_version_header(resp, fork_name)) + }) + }, + ) + .boxed() +} + +// GET beacon/states/{state_id}/pending_deposits +pub fn get_beacon_state_pending_deposits( + beacon_states_path: BeaconStatesPath, +) -> ResponseFilter { + beacon_states_path + .clone() + .and(warp::path("pending_deposits")) + .and(warp::path::end()) + .then( + |state_id: StateId, + task_spawner: TaskSpawner, + chain: Arc>| { + task_spawner.blocking_response_task(Priority::P1, move || { + let (data, execution_optimistic, finalized, fork_name) = state_id + .map_state_and_execution_optimistic_and_finalized( + &chain, + |state, execution_optimistic, finalized| { + let Ok(deposits) = state.pending_deposits() else { + return Err(warp_utils::reject::custom_bad_request( + "Pending deposits not found".to_string(), + )); + }; + + Ok(( + deposits.clone(), + execution_optimistic, + finalized, + state.fork_name_unchecked(), + )) + }, + )?; + + execution_optimistic_finalized_beacon_response( + ResponseIncludesVersion::Yes(fork_name), + execution_optimistic, + finalized, + data, + ) + .map(|res| warp::reply::json(&res).into_response()) + .map(|resp| add_consensus_version_header(resp, fork_name)) + }) + }, + ) + .boxed() +} + +// GET beacon/states/{state_id}/randao?epoch +pub fn get_beacon_state_randao( + beacon_states_path: BeaconStatesPath, +) -> ResponseFilter { + beacon_states_path + .clone() + .and(warp::path("randao")) + .and(warp::query::()) + .and(warp::path::end()) + .then( + |state_id: StateId, + task_spawner: TaskSpawner, + chain: Arc>, + query: eth2::types::RandaoQuery| { + task_spawner.blocking_json_task(Priority::P1, move || { + let (randao, execution_optimistic, finalized) = state_id + .map_state_and_execution_optimistic_and_finalized( + &chain, + |state, execution_optimistic, finalized| { + let epoch = query.epoch.unwrap_or_else(|| state.current_epoch()); + let randao = *state.get_randao_mix(epoch).map_err(|e| { + warp_utils::reject::custom_bad_request(format!( + "epoch out of range: {e:?}" + )) + })?; + Ok((randao, execution_optimistic, finalized)) + }, + )?; + + Ok( + eth2::types::GenericResponse::from(eth2::types::RandaoMix { randao }) + .add_execution_optimistic_finalized(execution_optimistic, finalized), + ) + }) + }, + ) + .boxed() +} + +// GET beacon/states/{state_id}/sync_committees?epoch +pub fn get_beacon_state_sync_committees( + beacon_states_path: BeaconStatesPath, +) -> ResponseFilter { + beacon_states_path + .clone() + .and(warp::path("sync_committees")) + .and(warp::query::()) + .and(warp::path::end()) + .then( + |state_id: StateId, + task_spawner: TaskSpawner, + chain: Arc>, + query: eth2::types::SyncCommitteesQuery| { + task_spawner.blocking_json_task(Priority::P1, move || { + let (sync_committee, execution_optimistic, finalized) = state_id + .map_state_and_execution_optimistic_and_finalized( + &chain, + |state, execution_optimistic, finalized| { + let current_epoch = state.current_epoch(); + let epoch = query.epoch.unwrap_or(current_epoch); + Ok(( + state + .get_built_sync_committee(epoch, &chain.spec) + .cloned() + .map_err(|e| match e { + BeaconStateError::SyncCommitteeNotKnown { .. } => { + warp_utils::reject::custom_bad_request(format!( + "state at epoch {} has no \ + sync committee for epoch {}", + current_epoch, epoch + )) + } + BeaconStateError::IncorrectStateVariant => { + warp_utils::reject::custom_bad_request(format!( + "state at epoch {} is not activated for Altair", + current_epoch, + )) + } + e => warp_utils::reject::beacon_state_error(e), + })?, + execution_optimistic, + finalized, + )) + }, + )?; + + let validators = chain + .validator_indices(sync_committee.pubkeys.iter()) + .map_err(warp_utils::reject::unhandled_error)?; + + let validator_aggregates = validators + .chunks_exact(T::EthSpec::sync_subcommittee_size()) + .map(|indices| eth2::types::SyncSubcommittee { + indices: indices.to_vec(), + }) + .collect(); + + let response = eth2::types::SyncCommitteeByValidatorIndices { + validators, + validator_aggregates, + }; + + Ok(eth2::types::GenericResponse::from(response) + .add_execution_optimistic_finalized(execution_optimistic, finalized)) + }) + }, + ) + .boxed() +} + +// GET beacon/states/{state_id}/committees?slot,index,epoch +pub fn get_beacon_state_committees( + beacon_states_path: BeaconStatesPath, +) -> ResponseFilter { + beacon_states_path + .clone() + .and(warp::path("committees")) + .and(warp::query::()) + .and(warp::path::end()) + .then( + |state_id: StateId, + task_spawner: TaskSpawner, + chain: Arc>, + query: eth2::types::CommitteesQuery| { + task_spawner.blocking_json_task(Priority::P1, move || { + let (data, execution_optimistic, finalized) = state_id + .map_state_and_execution_optimistic_and_finalized( + &chain, + |state, execution_optimistic, finalized| { + let current_epoch = state.current_epoch(); + let epoch = query.epoch.unwrap_or(current_epoch); + + // Attempt to obtain the committee_cache from the beacon chain + let decision_slot = (epoch.saturating_sub(2u64)) + .end_slot(T::EthSpec::slots_per_epoch()); + // Find the decision block and skip to another method on any kind + // of failure + let shuffling_id = if let Ok(Some(shuffling_decision_block)) = + chain.block_root_at_slot(decision_slot, WhenSlotSkipped::Prev) + { + Some(AttestationShufflingId { + shuffling_epoch: epoch, + shuffling_decision_block, + }) + } else { + None + }; + + // Attempt to read from the chain cache if there exists a + // shuffling_id + let maybe_cached_shuffling = if let Some(shuffling_id) = + shuffling_id.as_ref() + { + chain + .shuffling_cache + .try_write_for(std::time::Duration::from_secs(1)) + .and_then(|mut cache_write| cache_write.get(shuffling_id)) + .and_then(|cache_item| cache_item.wait().ok()) + } else { + None + }; + + let committee_cache = + if let Some(shuffling) = maybe_cached_shuffling { + shuffling + } else { + let possibly_built_cache = + match RelativeEpoch::from_epoch(current_epoch, epoch) { + Ok(relative_epoch) + if state.committee_cache_is_initialized( + relative_epoch, + ) => + { + state.committee_cache(relative_epoch).cloned() + } + _ => CommitteeCache::initialized( + state, + epoch, + &chain.spec, + ), + } + .map_err( + |e| match e { + BeaconStateError::EpochOutOfBounds => { + let max_sprp = + T::EthSpec::slots_per_historical_root() + as u64; + let first_subsequent_restore_point_slot = + ((epoch.start_slot( + T::EthSpec::slots_per_epoch(), + ) / max_sprp) + + 1) + * max_sprp; + if epoch < current_epoch { + warp_utils::reject::custom_bad_request( + format!( + "epoch out of bounds, \ + try state at slot {}", + first_subsequent_restore_point_slot, + ), + ) + } else { + warp_utils::reject::custom_bad_request( + "epoch out of bounds, \ + too far in future" + .into(), + ) + } + } + _ => warp_utils::reject::unhandled_error( + BeaconChainError::from(e), + ), + }, + )?; + + // Attempt to write to the beacon cache (only if the cache + // size is not the default value). + if chain.config.shuffling_cache_size + != beacon_chain::shuffling_cache::DEFAULT_CACHE_SIZE + && let Some(shuffling_id) = shuffling_id + && let Some(mut cache_write) = chain + .shuffling_cache + .try_write_for(std::time::Duration::from_secs(1)) + { + cache_write.insert_committee_cache( + shuffling_id, + &possibly_built_cache, + ); + } + + possibly_built_cache + }; + + // Use either the supplied slot or all slots in the epoch. + let slots = + query.slot.map(|slot| vec![slot]).unwrap_or_else(|| { + epoch.slot_iter(T::EthSpec::slots_per_epoch()).collect() + }); + + // Use either the supplied committee index or all available indices. + let indices = + query.index.map(|index| vec![index]).unwrap_or_else(|| { + (0..committee_cache.committees_per_slot()).collect() + }); + + let mut response = Vec::with_capacity(slots.len() * indices.len()); + + for slot in slots { + // It is not acceptable to query with a slot that is not within the + // specified epoch. + if slot.epoch(T::EthSpec::slots_per_epoch()) != epoch { + return Err(warp_utils::reject::custom_bad_request( + format!("{} is not in epoch {}", slot, epoch), + )); + } + + for &index in &indices { + let committee = committee_cache + .get_beacon_committee(slot, index) + .ok_or_else(|| { + warp_utils::reject::custom_bad_request(format!( + "committee index {} does not exist in epoch {}", + index, epoch + )) + })?; + + response.push(eth2::types::CommitteeData { + index, + slot, + validators: committee + .committee + .iter() + .map(|i| *i as u64) + .collect(), + }); + } + } + + Ok((response, execution_optimistic, finalized)) + }, + )?; + Ok(eth2::types::ExecutionOptimisticFinalizedResponse { + data, + execution_optimistic: Some(execution_optimistic), + finalized: Some(finalized), + }) + }) + }, + ) + .boxed() +} + +// GET beacon/states/{state_id}/validators/{validator_id} +pub fn get_beacon_state_validators_id( + beacon_states_path: BeaconStatesPath, +) -> ResponseFilter { + beacon_states_path + .clone() + .and(warp::path("validators")) + .and(warp::path::param::().or_else(|_| async { + Err(warp_utils::reject::custom_bad_request( + "Invalid validator ID".to_string(), + )) + })) + .and(warp::path::end()) + .then( + |state_id: StateId, + task_spawner: TaskSpawner, + chain: Arc>, + validator_id: ValidatorId| { + // Prioritise requests for validators at the head. These should be fast to service + // and could be required by the validator client. + let priority = if let StateId(eth2::types::StateId::Head) = state_id { + Priority::P0 + } else { + Priority::P1 + }; + task_spawner.blocking_json_task(priority, move || { + let (data, execution_optimistic, finalized) = state_id + .map_state_and_execution_optimistic_and_finalized( + &chain, + |state, execution_optimistic, finalized| { + let index_opt = match &validator_id { + ValidatorId::PublicKey(pubkey) => pubkey_to_validator_index( + &chain, state, pubkey, + ) + .map_err(|e| { + warp_utils::reject::custom_not_found(format!( + "unable to access pubkey cache: {e:?}", + )) + })?, + ValidatorId::Index(index) => Some(*index as usize), + }; + + Ok(( + index_opt + .and_then(|index| { + let validator = state.validators().get(index)?; + let balance = *state.balances().get(index)?; + let epoch = state.current_epoch(); + let far_future_epoch = chain.spec.far_future_epoch; + + Some(eth2::types::ValidatorData { + index: index as u64, + balance, + status: + eth2::types::ValidatorStatus::from_validator( + validator, + epoch, + far_future_epoch, + ), + validator: validator.clone(), + }) + }) + .ok_or_else(|| { + warp_utils::reject::custom_not_found(format!( + "unknown validator: {}", + validator_id + )) + })?, + execution_optimistic, + finalized, + )) + }, + )?; + + Ok(eth2::types::ExecutionOptimisticFinalizedResponse { + data, + execution_optimistic: Some(execution_optimistic), + finalized: Some(finalized), + }) + }) + }, + ) + .boxed() +} + +// POST beacon/states/{state_id}/validators +pub fn post_beacon_state_validators( + beacon_states_path: BeaconStatesPath, +) -> ResponseFilter { + beacon_states_path + .clone() + .and(warp::path("validators")) + .and(warp::path::end()) + .and(warp_utils::json::json()) + .then( + |state_id: StateId, + task_spawner: TaskSpawner, + chain: Arc>, + query: ValidatorsRequestBody| { + // Prioritise requests for validators at the head. These should be fast to service + // and could be required by the validator client. + let priority = if let StateId(eth2::types::StateId::Head) = state_id { + Priority::P0 + } else { + Priority::P1 + }; + task_spawner.blocking_json_task(priority, move || { + crate::validators::get_beacon_state_validators( + state_id, + chain, + &query.ids, + &query.statuses, + ) + }) + }, + ) + .boxed() +} + +// GET beacon/states/{state_id}/validators?id,status +pub fn get_beacon_state_validators( + beacon_states_path: BeaconStatesPath, +) -> ResponseFilter { + beacon_states_path + .clone() + .and(warp::path("validators")) + .and(warp::path::end()) + .and(multi_key_query::()) + .then( + |state_id: StateId, + task_spawner: TaskSpawner, + chain: Arc>, + query_res: Result| { + // Prioritise requests for validators at the head. These should be fast to service + // and could be required by the validator client. + let priority = if let StateId(eth2::types::StateId::Head) = state_id { + Priority::P0 + } else { + Priority::P1 + }; + task_spawner.blocking_json_task(priority, move || { + let query = query_res?; + crate::validators::get_beacon_state_validators( + state_id, + chain, + &query.id, + &query.status, + ) + }) + }, + ) + .boxed() +} + +// POST beacon/states/{state_id}/validator_identities +pub fn post_beacon_state_validator_identities( + beacon_states_path: BeaconStatesPath, +) -> ResponseFilter { + beacon_states_path + .clone() + .and(warp::path("validator_identities")) + .and(warp::path::end()) + .and(warp_utils::json::json_no_body()) + .then( + |state_id: StateId, + task_spawner: TaskSpawner, + chain: Arc>, + query: ValidatorIdentitiesRequestBody| { + // Prioritise requests for validators at the head. These should be fast to service + // and could be required by the validator client. + let priority = if let StateId(eth2::types::StateId::Head) = state_id { + Priority::P0 + } else { + Priority::P1 + }; + task_spawner.blocking_json_task(priority, move || { + crate::validators::get_beacon_state_validator_identities( + state_id, + chain, + Some(&query.ids), + ) + }) + }, + ) + .boxed() +} + +// POST beacon/states/{state_id}/validator_balances +pub fn post_beacon_state_validator_balances( + beacon_states_path: BeaconStatesPath, +) -> ResponseFilter { + beacon_states_path + .clone() + .and(warp::path("validator_balances")) + .and(warp::path::end()) + .and(warp_utils::json::json_no_body()) + .then( + |state_id: StateId, + task_spawner: TaskSpawner, + chain: Arc>, + query: ValidatorBalancesRequestBody| { + task_spawner.blocking_json_task(Priority::P1, move || { + crate::validators::get_beacon_state_validator_balances( + state_id, + chain, + Some(&query.ids), + ) + }) + }, + ) + .boxed() +} + +// GET beacon/states/{state_id}/validator_balances?id +pub fn get_beacon_state_validator_balances( + beacon_states_path: BeaconStatesPath, +) -> ResponseFilter { + beacon_states_path + .clone() + .and(warp::path("validator_balances")) + .and(warp::path::end()) + .and(multi_key_query::()) + .then( + |state_id: StateId, + task_spawner: TaskSpawner, + chain: Arc>, + query_res: Result| { + task_spawner.blocking_json_task(Priority::P1, move || { + let query = query_res?; + crate::validators::get_beacon_state_validator_balances( + state_id, + chain, + query.id.as_deref(), + ) + }) + }, + ) + .boxed() +} + +// GET beacon/states/{state_id}/finality_checkpoints +pub fn get_beacon_state_finality_checkpoints( + beacon_states_path: BeaconStatesPath, +) -> ResponseFilter { + beacon_states_path + .clone() + .and(warp::path("finality_checkpoints")) + .and(warp::path::end()) + .then( + |state_id: StateId, + task_spawner: TaskSpawner, + chain: Arc>| { + task_spawner.blocking_json_task(Priority::P1, move || { + let (data, execution_optimistic, finalized) = state_id + .map_state_and_execution_optimistic_and_finalized( + &chain, + |state, execution_optimistic, finalized| { + Ok(( + eth2::types::FinalityCheckpointsData { + previous_justified: state.previous_justified_checkpoint(), + current_justified: state.current_justified_checkpoint(), + finalized: state.finalized_checkpoint(), + }, + execution_optimistic, + finalized, + )) + }, + )?; + + Ok(eth2::types::ExecutionOptimisticFinalizedResponse { + data, + execution_optimistic: Some(execution_optimistic), + finalized: Some(finalized), + }) + }) + }, + ) + .boxed() +} + +// GET beacon/states/{state_id}/fork +pub fn get_beacon_state_fork( + beacon_states_path: BeaconStatesPath, +) -> ResponseFilter { + beacon_states_path + .clone() + .and(warp::path("fork")) + .and(warp::path::end()) + .then( + |state_id: StateId, + task_spawner: TaskSpawner, + chain: Arc>| { + task_spawner.blocking_json_task(Priority::P1, move || { + let (fork, execution_optimistic, finalized) = + state_id.fork_and_execution_optimistic_and_finalized(&chain)?; + Ok(eth2::types::ExecutionOptimisticFinalizedResponse { + data: fork, + execution_optimistic: Some(execution_optimistic), + finalized: Some(finalized), + }) + }) + }, + ) + .boxed() +} + +// GET beacon/states/{state_id}/root +pub fn get_beacon_state_root( + beacon_states_path: BeaconStatesPath, +) -> ResponseFilter { + beacon_states_path + .and(warp::path("root")) + .and(warp::path::end()) + .then( + |state_id: StateId, + task_spawner: TaskSpawner, + chain: Arc>| { + task_spawner.blocking_json_task(Priority::P1, move || { + let (root, execution_optimistic, finalized) = state_id.root(&chain)?; + Ok(eth2::types::GenericResponse::from( + eth2::types::RootData::from(root), + )) + .map(|resp| { + resp.add_execution_optimistic_finalized(execution_optimistic, finalized) + }) + }) + }, + ) + .boxed() +} diff --git a/beacon_node/http_api/src/lib.rs b/beacon_node/http_api/src/lib.rs index 6389b34961a..ccd0698161e 100644 --- a/beacon_node/http_api/src/lib.rs +++ b/beacon_node/http_api/src/lib.rs @@ -8,6 +8,7 @@ mod aggregate_attestation; mod attestation_performance; mod attester_duties; +mod beacon; mod block_id; mod block_packing_efficiency; mod block_rewards; @@ -29,13 +30,16 @@ mod sync_committees; mod task_spawner; pub mod test_utils; mod ui; +mod utils; mod validator; mod validator_inclusion; mod validators; mod version; + use crate::light_client::{get_light_client_bootstrap, get_light_client_updates}; use crate::produce_block::{produce_blinded_block_v2, produce_block_v2, produce_block_v3}; use crate::version::beacon_response; +use beacon::states; use beacon_chain::{ AttestationError as AttnError, BeaconChain, BeaconChainError, BeaconChainTypes, WhenSlotSkipped, attestation_verification::VerifiedAttestation, @@ -50,8 +54,7 @@ use eth2::StatusCode; use eth2::types::{ self as api_types, BroadcastValidation, ContextDeserialize, EndpointVersion, ForkChoice, ForkChoiceExtraData, ForkChoiceNode, LightClientUpdatesQuery, PublishBlockRequest, - StateId as CoreStateId, ValidatorBalancesRequestBody, ValidatorId, - ValidatorIdentitiesRequestBody, ValidatorStatus, ValidatorsRequestBody, + StateId as CoreStateId, ValidatorId, ValidatorStatus, }; use eth2::{CONSENSUS_VERSION_HEADER, CONTENT_TYPE_HEADER, SSZ_CONTENT_TYPE_HEADER}; use health_metrics::observe::Observe; @@ -90,14 +93,12 @@ use tokio_stream::{ }; use tracing::{debug, error, info, warn}; use types::{ - Attestation, AttestationData, AttestationShufflingId, AttesterSlashing, BeaconStateError, - ChainSpec, Checkpoint, CommitteeCache, ConfigAndPreset, Epoch, EthSpec, ForkName, Hash256, - ProposerPreparationData, ProposerSlashing, RelativeEpoch, SignedAggregateAndProof, - SignedBlindedBeaconBlock, SignedBlsToExecutionChange, SignedContributionAndProof, - SignedValidatorRegistrationData, SignedVoluntaryExit, SingleAttestation, Slot, - SyncCommitteeMessage, SyncContributionData, + Attestation, AttestationData, AttesterSlashing, BeaconStateError, ChainSpec, Checkpoint, + ConfigAndPreset, Epoch, EthSpec, ForkName, Hash256, ProposerPreparationData, ProposerSlashing, + SignedAggregateAndProof, SignedBlindedBeaconBlock, SignedBlsToExecutionChange, + SignedContributionAndProof, SignedValidatorRegistrationData, SignedVoluntaryExit, + SingleAttestation, Slot, SyncCommitteeMessage, SyncContributionData, }; -use validator::pubkey_to_validator_index; use version::{ ResponseIncludesVersion, V1, V2, V3, add_consensus_version_header, add_ssz_content_type_header, execution_optimistic_finalized_beacon_response, inconsistent_fork_rejection, @@ -583,693 +584,65 @@ pub fn serve( )) })) .and(task_spawner_filter.clone()) - .and(chain_filter.clone()); + .and(chain_filter.clone()) + .boxed(); // GET beacon/states/{state_id}/root - let get_beacon_state_root = beacon_states_path - .clone() - .and(warp::path("root")) - .and(warp::path::end()) - .then( - |state_id: StateId, - task_spawner: TaskSpawner, - chain: Arc>| { - task_spawner.blocking_json_task(Priority::P1, move || { - let (root, execution_optimistic, finalized) = state_id.root(&chain)?; - Ok(api_types::GenericResponse::from(api_types::RootData::from( - root, - ))) - .map(|resp| { - resp.add_execution_optimistic_finalized(execution_optimistic, finalized) - }) - }) - }, - ); + let get_beacon_state_root = states::get_beacon_state_root(beacon_states_path.clone()); // GET beacon/states/{state_id}/fork - let get_beacon_state_fork = beacon_states_path - .clone() - .and(warp::path("fork")) - .and(warp::path::end()) - .then( - |state_id: StateId, - task_spawner: TaskSpawner, - chain: Arc>| { - task_spawner.blocking_json_task(Priority::P1, move || { - let (fork, execution_optimistic, finalized) = - state_id.fork_and_execution_optimistic_and_finalized(&chain)?; - Ok(api_types::ExecutionOptimisticFinalizedResponse { - data: fork, - execution_optimistic: Some(execution_optimistic), - finalized: Some(finalized), - }) - }) - }, - ); + let get_beacon_state_fork = states::get_beacon_state_fork(beacon_states_path.clone()); // GET beacon/states/{state_id}/finality_checkpoints - let get_beacon_state_finality_checkpoints = beacon_states_path - .clone() - .and(warp::path("finality_checkpoints")) - .and(warp::path::end()) - .then( - |state_id: StateId, - task_spawner: TaskSpawner, - chain: Arc>| { - task_spawner.blocking_json_task(Priority::P1, move || { - let (data, execution_optimistic, finalized) = state_id - .map_state_and_execution_optimistic_and_finalized( - &chain, - |state, execution_optimistic, finalized| { - Ok(( - api_types::FinalityCheckpointsData { - previous_justified: state.previous_justified_checkpoint(), - current_justified: state.current_justified_checkpoint(), - finalized: state.finalized_checkpoint(), - }, - execution_optimistic, - finalized, - )) - }, - )?; - - Ok(api_types::ExecutionOptimisticFinalizedResponse { - data, - execution_optimistic: Some(execution_optimistic), - finalized: Some(finalized), - }) - }) - }, - ); + let get_beacon_state_finality_checkpoints = + states::get_beacon_state_finality_checkpoints(beacon_states_path.clone()); // GET beacon/states/{state_id}/validator_balances?id - let get_beacon_state_validator_balances = beacon_states_path - .clone() - .and(warp::path("validator_balances")) - .and(warp::path::end()) - .and(multi_key_query::()) - .then( - |state_id: StateId, - task_spawner: TaskSpawner, - chain: Arc>, - query_res: Result| { - task_spawner.blocking_json_task(Priority::P1, move || { - let query = query_res?; - crate::validators::get_beacon_state_validator_balances( - state_id, - chain, - query.id.as_deref(), - ) - }) - }, - ); + let get_beacon_state_validator_balances = + states::get_beacon_state_validator_balances(beacon_states_path.clone()); // POST beacon/states/{state_id}/validator_balances - let post_beacon_state_validator_balances = beacon_states_path - .clone() - .and(warp::path("validator_balances")) - .and(warp::path::end()) - .and(warp_utils::json::json_no_body()) - .then( - |state_id: StateId, - task_spawner: TaskSpawner, - chain: Arc>, - query: ValidatorBalancesRequestBody| { - task_spawner.blocking_json_task(Priority::P1, move || { - crate::validators::get_beacon_state_validator_balances( - state_id, - chain, - Some(&query.ids), - ) - }) - }, - ); + let post_beacon_state_validator_balances = + states::post_beacon_state_validator_balances(beacon_states_path.clone()); // POST beacon/states/{state_id}/validator_identities - let post_beacon_state_validator_identities = beacon_states_path - .clone() - .and(warp::path("validator_identities")) - .and(warp::path::end()) - .and(warp_utils::json::json_no_body()) - .then( - |state_id: StateId, - task_spawner: TaskSpawner, - chain: Arc>, - query: ValidatorIdentitiesRequestBody| { - // Prioritise requests for validators at the head. These should be fast to service - // and could be required by the validator client. - let priority = if let StateId(eth2::types::StateId::Head) = state_id { - Priority::P0 - } else { - Priority::P1 - }; - task_spawner.blocking_json_task(priority, move || { - crate::validators::get_beacon_state_validator_identities( - state_id, - chain, - Some(&query.ids), - ) - }) - }, - ); + let post_beacon_state_validator_identities = + states::post_beacon_state_validator_identities(beacon_states_path.clone()); // GET beacon/states/{state_id}/validators?id,status - let get_beacon_state_validators = beacon_states_path - .clone() - .and(warp::path("validators")) - .and(warp::path::end()) - .and(multi_key_query::()) - .then( - |state_id: StateId, - task_spawner: TaskSpawner, - chain: Arc>, - query_res: Result| { - // Prioritise requests for validators at the head. These should be fast to service - // and could be required by the validator client. - let priority = if let StateId(eth2::types::StateId::Head) = state_id { - Priority::P0 - } else { - Priority::P1 - }; - task_spawner.blocking_json_task(priority, move || { - let query = query_res?; - crate::validators::get_beacon_state_validators( - state_id, - chain, - &query.id, - &query.status, - ) - }) - }, - ); + let get_beacon_state_validators = + states::get_beacon_state_validators(beacon_states_path.clone()); // POST beacon/states/{state_id}/validators - let post_beacon_state_validators = beacon_states_path - .clone() - .and(warp::path("validators")) - .and(warp::path::end()) - .and(warp_utils::json::json()) - .then( - |state_id: StateId, - task_spawner: TaskSpawner, - chain: Arc>, - query: ValidatorsRequestBody| { - // Prioritise requests for validators at the head. These should be fast to service - // and could be required by the validator client. - let priority = if let StateId(eth2::types::StateId::Head) = state_id { - Priority::P0 - } else { - Priority::P1 - }; - task_spawner.blocking_json_task(priority, move || { - crate::validators::get_beacon_state_validators( - state_id, - chain, - &query.ids, - &query.statuses, - ) - }) - }, - ); + let post_beacon_state_validators = + states::post_beacon_state_validators(beacon_states_path.clone()); // GET beacon/states/{state_id}/validators/{validator_id} - let get_beacon_state_validators_id = beacon_states_path - .clone() - .and(warp::path("validators")) - .and(warp::path::param::().or_else(|_| async { - Err(warp_utils::reject::custom_bad_request( - "Invalid validator ID".to_string(), - )) - })) - .and(warp::path::end()) - .then( - |state_id: StateId, - task_spawner: TaskSpawner, - chain: Arc>, - validator_id: ValidatorId| { - // Prioritise requests for validators at the head. These should be fast to service - // and could be required by the validator client. - let priority = if let StateId(eth2::types::StateId::Head) = state_id { - Priority::P0 - } else { - Priority::P1 - }; - task_spawner.blocking_json_task(priority, move || { - let (data, execution_optimistic, finalized) = state_id - .map_state_and_execution_optimistic_and_finalized( - &chain, - |state, execution_optimistic, finalized| { - let index_opt = match &validator_id { - ValidatorId::PublicKey(pubkey) => pubkey_to_validator_index( - &chain, state, pubkey, - ) - .map_err(|e| { - warp_utils::reject::custom_not_found(format!( - "unable to access pubkey cache: {e:?}", - )) - })?, - ValidatorId::Index(index) => Some(*index as usize), - }; - - Ok(( - index_opt - .and_then(|index| { - let validator = state.validators().get(index)?; - let balance = *state.balances().get(index)?; - let epoch = state.current_epoch(); - let far_future_epoch = chain.spec.far_future_epoch; - - Some(api_types::ValidatorData { - index: index as u64, - balance, - status: api_types::ValidatorStatus::from_validator( - validator, - epoch, - far_future_epoch, - ), - validator: validator.clone(), - }) - }) - .ok_or_else(|| { - warp_utils::reject::custom_not_found(format!( - "unknown validator: {}", - validator_id - )) - })?, - execution_optimistic, - finalized, - )) - }, - )?; - - Ok(api_types::ExecutionOptimisticFinalizedResponse { - data, - execution_optimistic: Some(execution_optimistic), - finalized: Some(finalized), - }) - }) - }, - ); + let get_beacon_state_validators_id = + states::get_beacon_state_validators_id(beacon_states_path.clone()); // GET beacon/states/{state_id}/committees?slot,index,epoch - let get_beacon_state_committees = beacon_states_path - .clone() - .and(warp::path("committees")) - .and(warp::query::()) - .and(warp::path::end()) - .then( - |state_id: StateId, - task_spawner: TaskSpawner, - chain: Arc>, - query: api_types::CommitteesQuery| { - task_spawner.blocking_json_task(Priority::P1, move || { - let (data, execution_optimistic, finalized) = state_id - .map_state_and_execution_optimistic_and_finalized( - &chain, - |state, execution_optimistic, finalized| { - let current_epoch = state.current_epoch(); - let epoch = query.epoch.unwrap_or(current_epoch); - - // Attempt to obtain the committee_cache from the beacon chain - let decision_slot = (epoch.saturating_sub(2u64)) - .end_slot(T::EthSpec::slots_per_epoch()); - // Find the decision block and skip to another method on any kind - // of failure - let shuffling_id = if let Ok(Some(shuffling_decision_block)) = - chain.block_root_at_slot(decision_slot, WhenSlotSkipped::Prev) - { - Some(AttestationShufflingId { - shuffling_epoch: epoch, - shuffling_decision_block, - }) - } else { - None - }; - - // Attempt to read from the chain cache if there exists a - // shuffling_id - let maybe_cached_shuffling = if let Some(shuffling_id) = - shuffling_id.as_ref() - { - chain - .shuffling_cache - .try_write_for(std::time::Duration::from_secs(1)) - .and_then(|mut cache_write| cache_write.get(shuffling_id)) - .and_then(|cache_item| cache_item.wait().ok()) - } else { - None - }; - - let committee_cache = - if let Some(shuffling) = maybe_cached_shuffling { - shuffling - } else { - let possibly_built_cache = - match RelativeEpoch::from_epoch(current_epoch, epoch) { - Ok(relative_epoch) - if state.committee_cache_is_initialized( - relative_epoch, - ) => - { - state.committee_cache(relative_epoch).cloned() - } - _ => CommitteeCache::initialized( - state, - epoch, - &chain.spec, - ), - } - .map_err( - |e| match e { - BeaconStateError::EpochOutOfBounds => { - let max_sprp = - T::EthSpec::slots_per_historical_root() - as u64; - let first_subsequent_restore_point_slot = - ((epoch.start_slot( - T::EthSpec::slots_per_epoch(), - ) / max_sprp) - + 1) - * max_sprp; - if epoch < current_epoch { - warp_utils::reject::custom_bad_request( - format!( - "epoch out of bounds, \ - try state at slot {}", - first_subsequent_restore_point_slot, - ), - ) - } else { - warp_utils::reject::custom_bad_request( - "epoch out of bounds, \ - too far in future" - .into(), - ) - } - } - _ => warp_utils::reject::unhandled_error( - BeaconChainError::from(e), - ), - }, - )?; - - // Attempt to write to the beacon cache (only if the cache - // size is not the default value). - if chain.config.shuffling_cache_size - != beacon_chain::shuffling_cache::DEFAULT_CACHE_SIZE - && let Some(shuffling_id) = shuffling_id - && let Some(mut cache_write) = chain - .shuffling_cache - .try_write_for(std::time::Duration::from_secs(1)) - { - cache_write.insert_committee_cache( - shuffling_id, - &possibly_built_cache, - ); - } - - possibly_built_cache - }; - - // Use either the supplied slot or all slots in the epoch. - let slots = - query.slot.map(|slot| vec![slot]).unwrap_or_else(|| { - epoch.slot_iter(T::EthSpec::slots_per_epoch()).collect() - }); - - // Use either the supplied committee index or all available indices. - let indices = - query.index.map(|index| vec![index]).unwrap_or_else(|| { - (0..committee_cache.committees_per_slot()).collect() - }); - - let mut response = Vec::with_capacity(slots.len() * indices.len()); - - for slot in slots { - // It is not acceptable to query with a slot that is not within the - // specified epoch. - if slot.epoch(T::EthSpec::slots_per_epoch()) != epoch { - return Err(warp_utils::reject::custom_bad_request( - format!("{} is not in epoch {}", slot, epoch), - )); - } - - for &index in &indices { - let committee = committee_cache - .get_beacon_committee(slot, index) - .ok_or_else(|| { - warp_utils::reject::custom_bad_request(format!( - "committee index {} does not exist in epoch {}", - index, epoch - )) - })?; - - response.push(api_types::CommitteeData { - index, - slot, - validators: committee - .committee - .iter() - .map(|i| *i as u64) - .collect(), - }); - } - } - - Ok((response, execution_optimistic, finalized)) - }, - )?; - Ok(api_types::ExecutionOptimisticFinalizedResponse { - data, - execution_optimistic: Some(execution_optimistic), - finalized: Some(finalized), - }) - }) - }, - ); + let get_beacon_state_committees = + states::get_beacon_state_committees(beacon_states_path.clone()); // GET beacon/states/{state_id}/sync_committees?epoch - let get_beacon_state_sync_committees = beacon_states_path - .clone() - .and(warp::path("sync_committees")) - .and(warp::query::()) - .and(warp::path::end()) - .then( - |state_id: StateId, - task_spawner: TaskSpawner, - chain: Arc>, - query: api_types::SyncCommitteesQuery| { - task_spawner.blocking_json_task(Priority::P1, move || { - let (sync_committee, execution_optimistic, finalized) = state_id - .map_state_and_execution_optimistic_and_finalized( - &chain, - |state, execution_optimistic, finalized| { - let current_epoch = state.current_epoch(); - let epoch = query.epoch.unwrap_or(current_epoch); - Ok(( - state - .get_built_sync_committee(epoch, &chain.spec) - .cloned() - .map_err(|e| match e { - BeaconStateError::SyncCommitteeNotKnown { .. } => { - warp_utils::reject::custom_bad_request(format!( - "state at epoch {} has no \ - sync committee for epoch {}", - current_epoch, epoch - )) - } - BeaconStateError::IncorrectStateVariant => { - warp_utils::reject::custom_bad_request(format!( - "state at epoch {} is not activated for Altair", - current_epoch, - )) - } - e => warp_utils::reject::beacon_state_error(e), - })?, - execution_optimistic, - finalized, - )) - }, - )?; - - let validators = chain - .validator_indices(sync_committee.pubkeys.iter()) - .map_err(warp_utils::reject::unhandled_error)?; - - let validator_aggregates = validators - .chunks_exact(T::EthSpec::sync_subcommittee_size()) - .map(|indices| api_types::SyncSubcommittee { - indices: indices.to_vec(), - }) - .collect(); - - let response = api_types::SyncCommitteeByValidatorIndices { - validators, - validator_aggregates, - }; - - Ok(api_types::GenericResponse::from(response) - .add_execution_optimistic_finalized(execution_optimistic, finalized)) - }) - }, - ); + let get_beacon_state_sync_committees = + states::get_beacon_state_sync_committees(beacon_states_path.clone()); // GET beacon/states/{state_id}/randao?epoch - let get_beacon_state_randao = beacon_states_path - .clone() - .and(warp::path("randao")) - .and(warp::query::()) - .and(warp::path::end()) - .then( - |state_id: StateId, - task_spawner: TaskSpawner, - chain: Arc>, - query: api_types::RandaoQuery| { - task_spawner.blocking_json_task(Priority::P1, move || { - let (randao, execution_optimistic, finalized) = state_id - .map_state_and_execution_optimistic_and_finalized( - &chain, - |state, execution_optimistic, finalized| { - let epoch = query.epoch.unwrap_or_else(|| state.current_epoch()); - let randao = *state.get_randao_mix(epoch).map_err(|e| { - warp_utils::reject::custom_bad_request(format!( - "epoch out of range: {e:?}" - )) - })?; - Ok((randao, execution_optimistic, finalized)) - }, - )?; - - Ok( - api_types::GenericResponse::from(api_types::RandaoMix { randao }) - .add_execution_optimistic_finalized(execution_optimistic, finalized), - ) - }) - }, - ); + let get_beacon_state_randao = states::get_beacon_state_randao(beacon_states_path.clone()); // GET beacon/states/{state_id}/pending_deposits - let get_beacon_state_pending_deposits = beacon_states_path - .clone() - .and(warp::path("pending_deposits")) - .and(warp::path::end()) - .then( - |state_id: StateId, - task_spawner: TaskSpawner, - chain: Arc>| { - task_spawner.blocking_response_task(Priority::P1, move || { - let (data, execution_optimistic, finalized, fork_name) = state_id - .map_state_and_execution_optimistic_and_finalized( - &chain, - |state, execution_optimistic, finalized| { - let Ok(deposits) = state.pending_deposits() else { - return Err(warp_utils::reject::custom_bad_request( - "Pending deposits not found".to_string(), - )); - }; - - Ok(( - deposits.clone(), - execution_optimistic, - finalized, - state.fork_name_unchecked(), - )) - }, - )?; - - execution_optimistic_finalized_beacon_response( - ResponseIncludesVersion::Yes(fork_name), - execution_optimistic, - finalized, - data, - ) - .map(|res| warp::reply::json(&res).into_response()) - .map(|resp| add_consensus_version_header(resp, fork_name)) - }) - }, - ); + let get_beacon_state_pending_deposits = + states::get_beacon_state_pending_deposits(beacon_states_path.clone()); // GET beacon/states/{state_id}/pending_partial_withdrawals - let get_beacon_state_pending_partial_withdrawals = beacon_states_path - .clone() - .and(warp::path("pending_partial_withdrawals")) - .and(warp::path::end()) - .then( - |state_id: StateId, - task_spawner: TaskSpawner, - chain: Arc>| { - task_spawner.blocking_response_task(Priority::P1, move || { - let (data, execution_optimistic, finalized, fork_name) = state_id - .map_state_and_execution_optimistic_and_finalized( - &chain, - |state, execution_optimistic, finalized| { - let Ok(withdrawals) = state.pending_partial_withdrawals() else { - return Err(warp_utils::reject::custom_bad_request( - "Pending withdrawals not found".to_string(), - )); - }; - - Ok(( - withdrawals.clone(), - execution_optimistic, - finalized, - state.fork_name_unchecked(), - )) - }, - )?; - - execution_optimistic_finalized_beacon_response( - ResponseIncludesVersion::Yes(fork_name), - execution_optimistic, - finalized, - data, - ) - .map(|res| warp::reply::json(&res).into_response()) - .map(|resp| add_consensus_version_header(resp, fork_name)) - }) - }, - ); + let get_beacon_state_pending_partial_withdrawals = + states::get_beacon_state_pending_partial_withdrawals(beacon_states_path.clone()); // GET beacon/states/{state_id}/pending_consolidations - let get_beacon_state_pending_consolidations = beacon_states_path - .clone() - .and(warp::path("pending_consolidations")) - .and(warp::path::end()) - .then( - |state_id: StateId, - task_spawner: TaskSpawner, - chain: Arc>| { - task_spawner.blocking_response_task(Priority::P1, move || { - let (data, execution_optimistic, finalized, fork_name) = state_id - .map_state_and_execution_optimistic_and_finalized( - &chain, - |state, execution_optimistic, finalized| { - let Ok(consolidations) = state.pending_consolidations() else { - return Err(warp_utils::reject::custom_bad_request( - "Pending consolidations not found".to_string(), - )); - }; - - Ok(( - consolidations.clone(), - execution_optimistic, - finalized, - state.fork_name_unchecked(), - )) - }, - )?; - - execution_optimistic_finalized_beacon_response( - ResponseIncludesVersion::Yes(fork_name), - execution_optimistic, - finalized, - data, - ) - .map(|res| warp::reply::json(&res).into_response()) - .map(|resp| add_consensus_version_header(resp, fork_name)) - }) - }, - ); + let get_beacon_state_pending_consolidations = + states::get_beacon_state_pending_consolidations(beacon_states_path.clone()); // GET beacon/headers // diff --git a/beacon_node/http_api/src/utils.rs b/beacon_node/http_api/src/utils.rs new file mode 100644 index 00000000000..cf61fa481cb --- /dev/null +++ b/beacon_node/http_api/src/utils.rs @@ -0,0 +1,3 @@ +use warp::filters::BoxedFilter; + +pub type ResponseFilter = BoxedFilter<(warp::reply::Response,)>; From 4e958a92d333c8f8e816db645508cb9b2082309d Mon Sep 17 00:00:00 2001 From: Mac L Date: Thu, 4 Dec 2025 13:28:52 +0400 Subject: [PATCH 54/74] Refactor `consensus/types` (#7827) Organize and categorize `consensus/types` into modules based on their relation to key consensus structures/concepts. This is a precursor to a sensible public interface. While this refactor is very opinionated, I am open to suggestions on module names, or type groupings if my current ones are inappropriate. Co-Authored-By: Mac L --- Cargo.lock | 2 + Cargo.toml | 2 +- .../beacon_chain/src/attester_cache.rs | 2 +- beacon_node/beacon_chain/src/beacon_chain.rs | 1 + .../overflow_lru_cache.rs | 4 +- .../src/sync_committee_verification.rs | 2 +- beacon_node/beacon_chain/src/test_utils.rs | 1 + .../src/test_utils/mock_builder.rs | 7 +- beacon_node/http_api/src/block_id.rs | 4 +- beacon_node/http_api/src/light_client.rs | 3 +- beacon_node/http_api/src/produce_block.rs | 1 + beacon_node/http_api/src/version.rs | 12 +- .../lighthouse_network/src/rpc/codec.rs | 10 +- .../lighthouse_network/src/rpc/methods.rs | 4 +- .../lighthouse_network/tests/rpc_tests.rs | 4 +- .../src/network_beacon_processor/tests.rs | 5 +- .../src/sync/block_sidecar_coupling.rs | 3 +- beacon_node/network/src/sync/tests/lookups.rs | 2 +- common/eth2/Cargo.toml | 1 + .../eth2}/src/beacon_response.rs | 8 +- common/eth2/src/lib.rs | 7 +- common/eth2/src/lighthouse_vc/types.rs | 1 - common/eth2/src/types.rs | 7 +- consensus/types/Cargo.toml | 3 + .../{ => attestation}/aggregate_and_proof.rs | 19 +- .../src/{ => attestation}/attestation.rs | 29 +- .../src/{ => attestation}/attestation_data.rs | 11 +- .../src/{ => attestation}/attestation_duty.rs | 3 +- .../src/{ => attestation}/beacon_committee.rs | 2 +- .../types/src/{ => attestation}/checkpoint.rs | 8 +- .../{ => attestation}/indexed_attestation.rs | 21 +- consensus/types/src/attestation/mod.rs | 39 ++ .../{ => attestation}/participation_flags.rs | 6 +- .../{ => attestation}/pending_attestation.rs | 7 +- .../src/{ => attestation}/selection_proof.rs | 12 +- .../src/{ => attestation}/shuffling_id.rs | 9 +- .../signed_aggregate_and_proof.rs | 21 +- .../types/src/{ => attestation}/subnet_id.rs | 14 +- .../types/src/{ => block}/beacon_block.rs | 40 +- .../src/{ => block}/beacon_block_body.rs | 95 +++- .../src/{ => block}/beacon_block_header.rs | 11 +- consensus/types/src/block/mod.rs | 26 + .../src/{ => block}/signed_beacon_block.rs | 38 +- .../{ => block}/signed_beacon_block_header.rs | 14 +- .../types/src/{ => builder}/builder_bid.rs | 22 +- consensus/types/src/builder/mod.rs | 6 + .../consolidation_request.rs | 10 +- consensus/types/src/consolidation/mod.rs | 5 + .../pending_consolidation.rs | 6 +- .../src/{ => core}/application_domain.rs | 0 consensus/types/src/{ => core}/chain_spec.rs | 23 +- .../types/src/{ => core}/config_and_preset.rs | 15 +- consensus/types/src/{ => core}/consts.rs | 2 +- consensus/types/src/{ => core}/enr_fork_id.rs | 5 +- consensus/types/src/{ => core}/eth_spec.rs | 20 +- consensus/types/src/{ => core}/graffiti.rs | 11 +- consensus/types/src/core/mod.rs | 44 ++ .../types/src/{ => core}/non_zero_usize.rs | 0 consensus/types/src/{ => core}/preset.rs | 4 +- .../types/src/{ => core}/relative_epoch.rs | 3 +- .../types/src/{ => core}/signing_data.rs | 7 +- consensus/types/src/{ => core}/slot_data.rs | 2 +- consensus/types/src/{ => core}/slot_epoch.rs | 10 +- .../types/src/{ => core}/slot_epoch_macros.rs | 0 consensus/types/src/{ => core}/sqlite.rs | 3 +- .../types/src/{ => data}/blob_sidecar.rs | 28 +- .../{ => data}/data_column_custody_group.rs | 10 +- .../src/{ => data}/data_column_sidecar.rs | 21 +- .../src/{ => data}/data_column_subnet_id.rs | 25 +- consensus/types/src/data/mod.rs | 23 + consensus/types/src/{ => deposit}/deposit.rs | 8 +- .../types/src/{ => deposit}/deposit_data.rs | 11 +- .../src/{ => deposit}/deposit_message.rs | 11 +- .../src/{ => deposit}/deposit_request.rs | 8 +- .../{ => deposit}/deposit_tree_snapshot.rs | 5 +- consensus/types/src/deposit/mod.rs | 13 + .../src/{ => deposit}/pending_deposit.rs | 10 +- .../bls_to_execution_change.rs | 11 +- .../types/src/{ => execution}/eth1_data.rs | 7 +- .../{ => execution}/execution_block_hash.rs | 9 +- .../{ => execution}/execution_block_header.rs | 7 +- .../src/{ => execution}/execution_payload.rs | 26 +- .../execution_payload_header.rs | 27 +- .../src/{ => execution}/execution_requests.rs | 13 +- consensus/types/src/execution/mod.rs | 36 ++ .../types/src/{ => execution}/payload.rs | 122 ++-- .../signed_bls_to_execution_change.rs | 6 +- consensus/types/src/exit/mod.rs | 5 + .../src/{ => exit}/signed_voluntary_exit.rs | 6 +- .../types/src/{ => exit}/voluntary_exit.rs | 15 +- consensus/types/src/{ => fork}/fork.rs | 5 +- .../types/src/{ => fork}/fork_context.rs | 11 +- consensus/types/src/{ => fork}/fork_data.rs | 9 +- consensus/types/src/fork/fork_macros.rs | 60 ++ consensus/types/src/{ => fork}/fork_name.rs | 71 +-- .../types/src/fork/fork_version_decode.rs | 6 + consensus/types/src/fork/mod.rs | 15 + consensus/types/src/kzg_ext/consts.rs | 3 + consensus/types/src/kzg_ext/mod.rs | 27 + consensus/types/src/lib.rs | 418 +++++--------- consensus/types/src/light_client/consts.rs | 21 + consensus/types/src/light_client/error.rs | 41 ++ .../light_client_bootstrap.rs | 63 ++- .../light_client_finality_update.rs | 54 +- .../{ => light_client}/light_client_header.rs | 55 +- .../light_client_optimistic_update.rs | 31 +- .../{ => light_client}/light_client_update.rs | 152 ++--- consensus/types/src/light_client/mod.rs | 37 ++ consensus/types/src/runtime_fixed_vector.rs | 90 --- consensus/types/src/runtime_var_list.rs | 387 ------------- .../src/{ => slashing}/attester_slashing.rs | 14 +- consensus/types/src/slashing/mod.rs | 8 + .../src/{ => slashing}/proposer_slashing.rs | 7 +- .../types/src/{ => state}/activation_queue.rs | 6 +- .../src/{beacon_state => state}/balance.rs | 0 .../types/src/{ => state}/beacon_state.rs | 530 ++++++++++-------- .../committee_cache.rs | 38 +- .../types/src/{ => state}/epoch_cache.rs | 9 +- .../src/{beacon_state => state}/exit_cache.rs | 10 +- .../types/src/{ => state}/historical_batch.rs | 12 +- .../src/{ => state}/historical_summary.rs | 11 +- .../types/src/{beacon_state => state}/iter.rs | 7 +- consensus/types/src/state/mod.rs | 35 ++ .../progressive_balances_cache.rs | 14 +- .../{beacon_state => state}/pubkey_cache.rs | 2 +- .../slashings_cache.rs | 3 +- .../contribution_and_proof.rs | 15 +- consensus/types/src/sync_committee/mod.rs | 25 + .../signed_contribution_and_proof.rs | 15 +- .../{ => sync_committee}/sync_aggregate.rs | 14 +- .../sync_aggregator_selection_data.rs | 10 +- .../{ => sync_committee}/sync_committee.rs | 10 +- .../sync_committee_contribution.rs | 14 +- .../sync_committee_message.rs | 14 +- .../sync_committee_subscription.rs | 3 +- .../src/{ => sync_committee}/sync_duty.rs | 9 +- .../sync_selection_proof.rs | 20 +- .../{ => sync_committee}/sync_subnet_id.rs | 15 +- .../generate_deterministic_keypairs.rs | 5 +- .../generate_random_block_and_blobs.rs | 18 +- consensus/types/src/test_utils/mod.rs | 24 +- .../src/test_utils/test_random/address.rs | 4 +- .../test_random/aggregate_signature.rs | 6 +- .../src/test_utils/test_random/bitfield.rs | 8 +- .../src/test_utils/test_random/hash256.rs | 4 +- .../test_utils/test_random/kzg_commitment.rs | 4 +- .../src/test_utils/test_random/kzg_proof.rs | 7 +- .../types/src/test_utils/test_random/mod.rs | 15 + .../src/test_utils/test_random/public_key.rs | 6 +- .../test_random/public_key_bytes.rs | 6 +- .../src/test_utils/test_random/secret_key.rs | 6 +- .../src/test_utils/test_random/signature.rs | 6 +- .../test_utils/test_random/signature_bytes.rs | 6 +- .../{ => test_random}/test_random.rs | 22 +- .../src/test_utils/test_random/uint256.rs | 4 +- consensus/types/src/validator/mod.rs | 9 + .../proposer_preparation_data.rs | 3 +- .../types/src/{ => validator}/validator.rs | 16 +- .../validator_registration_data.rs | 4 +- .../{ => validator}/validator_subscription.rs | 3 +- consensus/types/src/withdrawal/mod.rs | 9 + .../pending_partial_withdrawal.rs | 6 +- .../types/src/{ => withdrawal}/withdrawal.rs | 12 +- .../withdrawal_credentials.rs | 7 +- .../{ => withdrawal}/withdrawal_request.rs | 7 +- .../tests.rs => tests/committee_cache.rs} | 11 +- .../beacon_state/tests.rs => tests/state.rs} | 18 +- 167 files changed, 2115 insertions(+), 1749 deletions(-) rename {consensus/types => common/eth2}/src/beacon_response.rs (97%) rename consensus/types/src/{ => attestation}/aggregate_and_proof.rs (93%) rename consensus/types/src/{ => attestation}/attestation.rs (97%) rename consensus/types/src/{ => attestation}/attestation_data.rs (87%) rename consensus/types/src/{ => attestation}/attestation_duty.rs (92%) rename consensus/types/src/{ => attestation}/beacon_committee.rs (92%) rename consensus/types/src/{ => attestation}/checkpoint.rs (88%) rename consensus/types/src/{ => attestation}/indexed_attestation.rs (96%) create mode 100644 consensus/types/src/attestation/mod.rs rename consensus/types/src/{ => attestation}/participation_flags.rs (96%) rename consensus/types/src/{ => attestation}/pending_attestation.rs (84%) rename consensus/types/src/{ => attestation}/selection_proof.rs (95%) rename consensus/types/src/{ => attestation}/shuffling_id.rs (93%) rename consensus/types/src/{ => attestation}/signed_aggregate_and_proof.rs (90%) rename consensus/types/src/{ => attestation}/subnet_id.rs (97%) rename consensus/types/src/{ => block}/beacon_block.rs (97%) rename consensus/types/src/{ => block}/beacon_block_body.rs (93%) rename consensus/types/src/{ => block}/beacon_block_header.rs (90%) create mode 100644 consensus/types/src/block/mod.rs rename consensus/types/src/{ => block}/signed_beacon_block.rs (95%) rename consensus/types/src/{ => block}/signed_beacon_block_header.rs (84%) rename consensus/types/src/{ => builder}/builder_bid.rs (93%) create mode 100644 consensus/types/src/builder/mod.rs rename consensus/types/src/{ => consolidation}/consolidation_request.rs (84%) create mode 100644 consensus/types/src/consolidation/mod.rs rename consensus/types/src/{ => consolidation}/pending_consolidation.rs (86%) rename consensus/types/src/{ => core}/application_domain.rs (100%) rename consensus/types/src/{ => core}/chain_spec.rs (99%) rename consensus/types/src/{ => core}/config_and_preset.rs (95%) rename consensus/types/src/{ => core}/consts.rs (94%) rename consensus/types/src/{ => core}/enr_fork_id.rs (95%) rename consensus/types/src/{ => core}/eth_spec.rs (98%) rename consensus/types/src/{ => core}/graffiti.rs (98%) create mode 100644 consensus/types/src/core/mod.rs rename consensus/types/src/{ => core}/non_zero_usize.rs (100%) rename consensus/types/src/{ => core}/preset.rs (99%) rename consensus/types/src/{ => core}/relative_epoch.rs (99%) rename consensus/types/src/{ => core}/signing_data.rs (85%) rename consensus/types/src/{ => core}/slot_data.rs (92%) rename consensus/types/src/{ => core}/slot_epoch.rs (98%) rename consensus/types/src/{ => core}/slot_epoch_macros.rs (100%) rename consensus/types/src/{ => core}/sqlite.rs (96%) rename consensus/types/src/{ => data}/blob_sidecar.rs (94%) rename consensus/types/src/{ => data}/data_column_custody_group.rs (98%) rename consensus/types/src/{ => data}/data_column_sidecar.rs (94%) rename consensus/types/src/{ => data}/data_column_subnet_id.rs (80%) create mode 100644 consensus/types/src/data/mod.rs rename consensus/types/src/{ => deposit}/deposit.rs (78%) rename consensus/types/src/{ => deposit}/deposit_data.rs (86%) rename consensus/types/src/{ => deposit}/deposit_message.rs (81%) rename consensus/types/src/{ => deposit}/deposit_request.rs (86%) rename consensus/types/src/{ => deposit}/deposit_tree_snapshot.rs (95%) create mode 100644 consensus/types/src/deposit/mod.rs rename consensus/types/src/{ => deposit}/pending_deposit.rs (78%) rename consensus/types/src/{ => execution}/bls_to_execution_change.rs (83%) rename consensus/types/src/{ => execution}/eth1_data.rs (86%) rename consensus/types/src/{ => execution}/execution_block_hash.rs (96%) rename consensus/types/src/{ => execution}/execution_block_header.rs (98%) rename consensus/types/src/{ => execution}/execution_payload.rs (92%) rename consensus/types/src/{ => execution}/execution_payload_header.rs (96%) rename consensus/types/src/{ => execution}/execution_requests.rs (93%) create mode 100644 consensus/types/src/execution/mod.rs rename consensus/types/src/{ => execution}/payload.rs (91%) rename consensus/types/src/{ => execution}/signed_bls_to_execution_change.rs (78%) create mode 100644 consensus/types/src/exit/mod.rs rename consensus/types/src/{ => exit}/signed_voluntary_exit.rs (84%) rename consensus/types/src/{ => exit}/voluntary_exit.rs (90%) rename consensus/types/src/{ => fork}/fork.rs (96%) rename consensus/types/src/{ => fork}/fork_context.rs (98%) rename consensus/types/src/{ => fork}/fork_data.rs (88%) create mode 100644 consensus/types/src/fork/fork_macros.rs rename consensus/types/src/{ => fork}/fork_name.rs (84%) create mode 100644 consensus/types/src/fork/fork_version_decode.rs create mode 100644 consensus/types/src/fork/mod.rs create mode 100644 consensus/types/src/kzg_ext/consts.rs create mode 100644 consensus/types/src/kzg_ext/mod.rs create mode 100644 consensus/types/src/light_client/consts.rs create mode 100644 consensus/types/src/light_client/error.rs rename consensus/types/src/{ => light_client}/light_client_bootstrap.rs (88%) rename consensus/types/src/{ => light_client}/light_client_finality_update.rs (89%) rename consensus/types/src/{ => light_client}/light_client_header.rs (91%) rename consensus/types/src/{ => light_client}/light_client_optimistic_update.rs (94%) rename consensus/types/src/{ => light_client}/light_client_update.rs (87%) create mode 100644 consensus/types/src/light_client/mod.rs delete mode 100644 consensus/types/src/runtime_fixed_vector.rs delete mode 100644 consensus/types/src/runtime_var_list.rs rename consensus/types/src/{ => slashing}/attester_slashing.rs (96%) create mode 100644 consensus/types/src/slashing/mod.rs rename consensus/types/src/{ => slashing}/proposer_slashing.rs (86%) rename consensus/types/src/{ => state}/activation_queue.rs (95%) rename consensus/types/src/{beacon_state => state}/balance.rs (100%) rename consensus/types/src/{ => state}/beacon_state.rs (88%) rename consensus/types/src/{beacon_state => state}/committee_cache.rs (93%) rename consensus/types/src/{ => state}/epoch_cache.rs (97%) rename consensus/types/src/{beacon_state => state}/exit_cache.rs (97%) rename consensus/types/src/{ => state}/historical_batch.rs (81%) rename consensus/types/src/{ => state}/historical_summary.rs (87%) rename consensus/types/src/{beacon_state => state}/iter.rs (96%) create mode 100644 consensus/types/src/state/mod.rs rename consensus/types/src/{beacon_state => state}/progressive_balances_cache.rs (98%) rename consensus/types/src/{beacon_state => state}/pubkey_cache.rs (98%) rename consensus/types/src/{beacon_state => state}/slashings_cache.rs (96%) rename consensus/types/src/{ => sync_committee}/contribution_and_proof.rs (88%) create mode 100644 consensus/types/src/sync_committee/mod.rs rename consensus/types/src/{ => sync_committee}/signed_contribution_and_proof.rs (87%) rename consensus/types/src/{ => sync_committee}/sync_aggregate.rs (91%) rename consensus/types/src/{ => sync_committee}/sync_aggregator_selection_data.rs (82%) rename consensus/types/src/{ => sync_committee}/sync_committee.rs (95%) rename consensus/types/src/{ => sync_committee}/sync_committee_contribution.rs (93%) rename consensus/types/src/{ => sync_committee}/sync_committee_message.rs (88%) rename consensus/types/src/{ => sync_committee}/sync_committee_subscription.rs (96%) rename consensus/types/src/{ => sync_committee}/sync_duty.rs (96%) rename consensus/types/src/{ => sync_committee}/sync_selection_proof.rs (92%) rename consensus/types/src/{ => sync_committee}/sync_subnet_id.rs (92%) create mode 100644 consensus/types/src/test_utils/test_random/mod.rs rename consensus/types/src/test_utils/{ => test_random}/test_random.rs (90%) create mode 100644 consensus/types/src/validator/mod.rs rename consensus/types/src/{ => validator}/proposer_preparation_data.rs (95%) rename consensus/types/src/{ => validator}/validator.rs (97%) rename consensus/types/src/{ => validator}/validator_registration_data.rs (93%) rename consensus/types/src/{ => validator}/validator_subscription.rs (93%) create mode 100644 consensus/types/src/withdrawal/mod.rs rename consensus/types/src/{ => withdrawal}/pending_partial_withdrawal.rs (85%) rename consensus/types/src/{ => withdrawal}/withdrawal.rs (73%) rename consensus/types/src/{ => withdrawal}/withdrawal_credentials.rs (91%) rename consensus/types/src/{ => withdrawal}/withdrawal_request.rs (87%) rename consensus/types/{src/beacon_state/committee_cache/tests.rs => tests/committee_cache.rs} (97%) rename consensus/types/{src/beacon_state/tests.rs => tests/state.rs} (97%) diff --git a/Cargo.lock b/Cargo.lock index 7ddcad7239a..481808b41f7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3125,6 +3125,7 @@ dependencies = [ name = "eth2" version = "0.1.0" dependencies = [ + "context_deserialize", "educe", "eip_3076", "either", @@ -8547,6 +8548,7 @@ checksum = "1fc20a89bab2dabeee65e9c9eb96892dc222c23254b401e1319b85efd852fa31" dependencies = [ "arbitrary", "context_deserialize", + "educe", "ethereum_serde_utils", "ethereum_ssz", "itertools 0.14.0", diff --git a/Cargo.toml b/Cargo.toml index 6ccf429b6c6..21cf551c48d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -227,7 +227,7 @@ slashing_protection = { path = "validator_client/slashing_protection" } slot_clock = { path = "common/slot_clock" } smallvec = { version = "1.11.2", features = ["arbitrary"] } snap = "1" -ssz_types = { version = "0.14.0", features = ["context_deserialize"] } +ssz_types = { version = "0.14.0", features = ["context_deserialize", "runtime_types"] } state_processing = { path = "consensus/state_processing" } store = { path = "beacon_node/store" } strum = { version = "0.24", features = ["derive"] } diff --git a/beacon_node/beacon_chain/src/attester_cache.rs b/beacon_node/beacon_chain/src/attester_cache.rs index f879adfb498..beaa1e581cc 100644 --- a/beacon_node/beacon_chain/src/attester_cache.rs +++ b/beacon_node/beacon_chain/src/attester_cache.rs @@ -17,7 +17,7 @@ use std::ops::Range; use types::{ BeaconState, BeaconStateError, ChainSpec, Checkpoint, Epoch, EthSpec, FixedBytesExtended, Hash256, RelativeEpoch, Slot, - attestation::Error as AttestationError, + attestation::AttestationError, beacon_state::{ compute_committee_index_in_epoch, compute_committee_range_in_epoch, epoch_committee_count, }, diff --git a/beacon_node/beacon_chain/src/beacon_chain.rs b/beacon_node/beacon_chain/src/beacon_chain.rs index 00c5ab415c1..adc400b1c17 100644 --- a/beacon_node/beacon_chain/src/beacon_chain.rs +++ b/beacon_node/beacon_chain/src/beacon_chain.rs @@ -74,6 +74,7 @@ use crate::{ AvailabilityPendingExecutedBlock, BeaconChainError, BeaconForkChoiceStore, BeaconSnapshot, CachedHead, metrics, }; +use eth2::beacon_response::ForkVersionedResponse; use eth2::types::{ EventKind, SseBlobSidecar, SseBlock, SseDataColumnSidecar, SseExtendedPayloadAttributes, }; diff --git a/beacon_node/beacon_chain/src/data_availability_checker/overflow_lru_cache.rs b/beacon_node/beacon_chain/src/data_availability_checker/overflow_lru_cache.rs index c383c20f9fb..e7c536c0d85 100644 --- a/beacon_node/beacon_chain/src/data_availability_checker/overflow_lru_cache.rs +++ b/beacon_node/beacon_chain/src/data_availability_checker/overflow_lru_cache.rs @@ -12,6 +12,7 @@ use crate::{BeaconChainTypes, BlockProcessStatus}; use lighthouse_tracing::SPAN_PENDING_COMPONENTS; use lru::LruCache; use parking_lot::{MappedRwLockReadGuard, RwLock, RwLockReadGuard, RwLockWriteGuard}; +use ssz_types::{RuntimeFixedVector, RuntimeVariableList}; use std::cmp::Ordering; use std::num::NonZeroUsize; use std::sync::Arc; @@ -20,8 +21,7 @@ use types::beacon_block_body::KzgCommitments; use types::blob_sidecar::BlobIdentifier; use types::{ BlobSidecar, BlockImportSource, ChainSpec, ColumnIndex, DataColumnSidecar, - DataColumnSidecarList, Epoch, EthSpec, Hash256, RuntimeFixedVector, RuntimeVariableList, - SignedBeaconBlock, + DataColumnSidecarList, Epoch, EthSpec, Hash256, SignedBeaconBlock, }; #[derive(Clone)] diff --git a/beacon_node/beacon_chain/src/sync_committee_verification.rs b/beacon_node/beacon_chain/src/sync_committee_verification.rs index e72e9a6b21f..88b040a6e59 100644 --- a/beacon_node/beacon_chain/src/sync_committee_verification.rs +++ b/beacon_node/beacon_chain/src/sync_committee_verification.rs @@ -49,7 +49,7 @@ use tree_hash_derive::TreeHash; use types::ChainSpec; use types::consts::altair::SYNC_COMMITTEE_SUBNET_COUNT; use types::slot_data::SlotData; -use types::sync_committee::Error as SyncCommitteeError; +use types::sync_committee::SyncCommitteeError; use types::{ AggregateSignature, BeaconStateError, EthSpec, Hash256, SignedContributionAndProof, Slot, SyncCommitteeContribution, SyncCommitteeMessage, SyncSelectionProof, SyncSubnetId, diff --git a/beacon_node/beacon_chain/src/test_utils.rs b/beacon_node/beacon_chain/src/test_utils.rs index 05d67e4504a..759b7e9bd77 100644 --- a/beacon_node/beacon_chain/src/test_utils.rs +++ b/beacon_node/beacon_chain/src/test_utils.rs @@ -46,6 +46,7 @@ use rand::seq::SliceRandom; use rayon::prelude::*; use sensitive_url::SensitiveUrl; use slot_clock::{SlotClock, TestingSlotClock}; +use ssz_types::RuntimeVariableList; use state_processing::per_block_processing::compute_timestamp_at_slot; use state_processing::state_advance::complete_state_advance; use std::borrow::Cow; diff --git a/beacon_node/execution_layer/src/test_utils/mock_builder.rs b/beacon_node/execution_layer/src/test_utils/mock_builder.rs index 9add1369194..589b29193c1 100644 --- a/beacon_node/execution_layer/src/test_utils/mock_builder.rs +++ b/beacon_node/execution_layer/src/test_utils/mock_builder.rs @@ -1,6 +1,7 @@ use crate::test_utils::{DEFAULT_BUILDER_PAYLOAD_VALUE_WEI, DEFAULT_JWT_SECRET}; use crate::{Config, ExecutionLayer, PayloadAttributes, PayloadParameters}; use bytes::Bytes; +use eth2::beacon_response::ForkVersionedResponse; use eth2::types::PublishBlockRequest; use eth2::types::{ BlobsBundle, BlockId, BroadcastValidation, EndpointVersion, EventKind, EventTopic, @@ -31,9 +32,9 @@ use types::builder_bid::{ }; use types::{ Address, BeaconState, ChainSpec, Epoch, EthSpec, ExecPayload, ExecutionPayload, - ExecutionPayloadHeaderRefMut, ExecutionRequests, ForkName, ForkVersionDecode, - ForkVersionedResponse, Hash256, PublicKeyBytes, Signature, SignedBlindedBeaconBlock, - SignedRoot, SignedValidatorRegistrationData, Slot, Uint256, + ExecutionPayloadHeaderRefMut, ExecutionRequests, ForkName, ForkVersionDecode, Hash256, + PublicKeyBytes, Signature, SignedBlindedBeaconBlock, SignedRoot, + SignedValidatorRegistrationData, Slot, Uint256, }; use types::{ExecutionBlockHash, SecretKey}; use warp::reply::{self, Reply}; diff --git a/beacon_node/http_api/src/block_id.rs b/beacon_node/http_api/src/block_id.rs index e088005f201..64f54515607 100644 --- a/beacon_node/http_api/src/block_id.rs +++ b/beacon_node/http_api/src/block_id.rs @@ -2,6 +2,7 @@ use crate::version::inconsistent_fork_rejection; use crate::{ExecutionOptimistic, state_id::checkpoint_slot_and_execution_optimistic}; use beacon_chain::kzg_utils::reconstruct_blobs; use beacon_chain::{BeaconChain, BeaconChainError, BeaconChainTypes, WhenSlotSkipped}; +use eth2::beacon_response::{ExecutionOptimisticFinalizedMetadata, UnversionedResponse}; use eth2::types::BlockId as CoreBlockId; use eth2::types::DataColumnIndicesQuery; use eth2::types::{BlobIndicesQuery, BlobWrapper, BlobsVersionedHashesQuery}; @@ -10,8 +11,7 @@ use std::str::FromStr; use std::sync::Arc; use types::{ BlobSidecarList, DataColumnSidecarList, EthSpec, FixedBytesExtended, ForkName, Hash256, - SignedBeaconBlock, SignedBlindedBeaconBlock, Slot, UnversionedResponse, - beacon_response::ExecutionOptimisticFinalizedMetadata, + SignedBeaconBlock, SignedBlindedBeaconBlock, Slot, }; use warp::Rejection; diff --git a/beacon_node/http_api/src/light_client.rs b/beacon_node/http_api/src/light_client.rs index ca9b86990c3..86eef03218b 100644 --- a/beacon_node/http_api/src/light_client.rs +++ b/beacon_node/http_api/src/light_client.rs @@ -3,13 +3,14 @@ use crate::version::{ beacon_response, }; use beacon_chain::{BeaconChain, BeaconChainError, BeaconChainTypes}; +use eth2::beacon_response::BeaconResponse; use eth2::types::{ self as api_types, LightClientUpdate, LightClientUpdateResponseChunk, LightClientUpdateResponseChunkInner, LightClientUpdatesQuery, }; use ssz::Encode; use std::sync::Arc; -use types::{BeaconResponse, EthSpec, ForkName, Hash256, LightClientBootstrap}; +use types::{EthSpec, ForkName, Hash256, LightClientBootstrap}; use warp::{ Rejection, hyper::{Body, Response}, diff --git a/beacon_node/http_api/src/produce_block.rs b/beacon_node/http_api/src/produce_block.rs index 367e09969b4..472ec0b65e4 100644 --- a/beacon_node/http_api/src/produce_block.rs +++ b/beacon_node/http_api/src/produce_block.rs @@ -9,6 +9,7 @@ use crate::{ use beacon_chain::{ BeaconBlockResponseWrapper, BeaconChain, BeaconChainTypes, ProduceBlockVerification, }; +use eth2::beacon_response::ForkVersionedResponse; use eth2::types::{self as api_types, ProduceBlockV3Metadata, SkipRandaoVerification}; use lighthouse_tracing::{SPAN_PRODUCE_BLOCK_V2, SPAN_PRODUCE_BLOCK_V3}; use ssz::Encode; diff --git a/beacon_node/http_api/src/version.rs b/beacon_node/http_api/src/version.rs index 871a10e7d4a..371064c886b 100644 --- a/beacon_node/http_api/src/version.rs +++ b/beacon_node/http_api/src/version.rs @@ -1,16 +1,14 @@ use crate::api_types::EndpointVersion; +use eth2::beacon_response::{ + BeaconResponse, ExecutionOptimisticFinalizedBeaconResponse, + ExecutionOptimisticFinalizedMetadata, ForkVersionedResponse, UnversionedResponse, +}; use eth2::{ CONSENSUS_BLOCK_VALUE_HEADER, CONSENSUS_VERSION_HEADER, CONTENT_TYPE_HEADER, EXECUTION_PAYLOAD_BLINDED_HEADER, EXECUTION_PAYLOAD_VALUE_HEADER, SSZ_CONTENT_TYPE_HEADER, }; use serde::Serialize; -use types::{ - BeaconResponse, ForkName, ForkVersionedResponse, InconsistentFork, Uint256, - UnversionedResponse, - beacon_response::{ - ExecutionOptimisticFinalizedBeaconResponse, ExecutionOptimisticFinalizedMetadata, - }, -}; +use types::{ForkName, InconsistentFork, Uint256}; use warp::reply::{self, Reply, Response}; pub const V1: EndpointVersion = EndpointVersion(1); diff --git a/beacon_node/lighthouse_network/src/rpc/codec.rs b/beacon_node/lighthouse_network/src/rpc/codec.rs index 77d2a34e16e..5b3574d48ac 100644 --- a/beacon_node/lighthouse_network/src/rpc/codec.rs +++ b/beacon_node/lighthouse_network/src/rpc/codec.rs @@ -8,7 +8,7 @@ use libp2p::bytes::BytesMut; use snap::read::FrameDecoder; use snap::write::FrameEncoder; use ssz::{Decode, Encode}; -use ssz_types::VariableList; +use ssz_types::{RuntimeVariableList, VariableList}; use std::io::Cursor; use std::io::ErrorKind; use std::io::{Read, Write}; @@ -18,10 +18,10 @@ use tokio_util::codec::{Decoder, Encoder}; use types::{ BlobSidecar, ChainSpec, DataColumnSidecar, DataColumnsByRootIdentifier, EthSpec, ForkContext, ForkName, Hash256, LightClientBootstrap, LightClientFinalityUpdate, - LightClientOptimisticUpdate, LightClientUpdate, RuntimeVariableList, SignedBeaconBlock, - SignedBeaconBlockAltair, SignedBeaconBlockBase, SignedBeaconBlockBellatrix, - SignedBeaconBlockCapella, SignedBeaconBlockDeneb, SignedBeaconBlockElectra, - SignedBeaconBlockFulu, SignedBeaconBlockGloas, + LightClientOptimisticUpdate, LightClientUpdate, SignedBeaconBlock, SignedBeaconBlockAltair, + SignedBeaconBlockBase, SignedBeaconBlockBellatrix, SignedBeaconBlockCapella, + SignedBeaconBlockDeneb, SignedBeaconBlockElectra, SignedBeaconBlockFulu, + SignedBeaconBlockGloas, }; use unsigned_varint::codec::Uvi; diff --git a/beacon_node/lighthouse_network/src/rpc/methods.rs b/beacon_node/lighthouse_network/src/rpc/methods.rs index 9aab0799521..a9b4aa2fbad 100644 --- a/beacon_node/lighthouse_network/src/rpc/methods.rs +++ b/beacon_node/lighthouse_network/src/rpc/methods.rs @@ -5,7 +5,7 @@ use regex::bytes::Regex; use serde::Serialize; use ssz::Encode; use ssz_derive::{Decode, Encode}; -use ssz_types::{VariableList, typenum::U256}; +use ssz_types::{RuntimeVariableList, VariableList, typenum::U256}; use std::fmt::Display; use std::marker::PhantomData; use std::ops::Deref; @@ -17,7 +17,7 @@ use types::light_client_update::MAX_REQUEST_LIGHT_CLIENT_UPDATES; use types::{ ChainSpec, ColumnIndex, DataColumnSidecar, DataColumnsByRootIdentifier, Epoch, EthSpec, ForkContext, Hash256, LightClientBootstrap, LightClientFinalityUpdate, - LightClientOptimisticUpdate, LightClientUpdate, RuntimeVariableList, SignedBeaconBlock, Slot, + LightClientOptimisticUpdate, LightClientUpdate, SignedBeaconBlock, Slot, blob_sidecar::BlobSidecar, }; diff --git a/beacon_node/lighthouse_network/tests/rpc_tests.rs b/beacon_node/lighthouse_network/tests/rpc_tests.rs index 60e3e3da972..8613edf5f5e 100644 --- a/beacon_node/lighthouse_network/tests/rpc_tests.rs +++ b/beacon_node/lighthouse_network/tests/rpc_tests.rs @@ -7,7 +7,7 @@ use lighthouse_network::rpc::{RequestType, methods::*}; use lighthouse_network::service::api_types::AppRequestId; use lighthouse_network::{NetworkEvent, ReportSource, Response}; use ssz::Encode; -use ssz_types::VariableList; +use ssz_types::{RuntimeVariableList, VariableList}; use std::sync::Arc; use std::time::{Duration, Instant}; use tokio::runtime::Runtime; @@ -17,7 +17,7 @@ use types::{ BeaconBlock, BeaconBlockAltair, BeaconBlockBase, BeaconBlockBellatrix, BeaconBlockHeader, BlobSidecar, ChainSpec, DataColumnSidecar, DataColumnsByRootIdentifier, EmptyBlock, Epoch, EthSpec, FixedBytesExtended, ForkName, Hash256, KzgCommitment, KzgProof, MinimalEthSpec, - RuntimeVariableList, Signature, SignedBeaconBlock, SignedBeaconBlockHeader, Slot, + Signature, SignedBeaconBlock, SignedBeaconBlockHeader, Slot, }; type E = MinimalEthSpec; diff --git a/beacon_node/network/src/network_beacon_processor/tests.rs b/beacon_node/network/src/network_beacon_processor/tests.rs index d83059ad278..841a8679cfd 100644 --- a/beacon_node/network/src/network_beacon_processor/tests.rs +++ b/beacon_node/network/src/network_beacon_processor/tests.rs @@ -33,6 +33,7 @@ use lighthouse_network::{ }; use matches::assert_matches; use slot_clock::SlotClock; +use ssz_types::RuntimeVariableList; use std::collections::HashSet; use std::iter::Iterator; use std::sync::Arc; @@ -42,8 +43,8 @@ use types::blob_sidecar::{BlobIdentifier, FixedBlobSidecarList}; use types::{ AttesterSlashing, BlobSidecar, BlobSidecarList, ChainSpec, DataColumnSidecarList, DataColumnSubnetId, Epoch, EthSpec, Hash256, MainnetEthSpec, ProposerSlashing, - RuntimeVariableList, SignedAggregateAndProof, SignedBeaconBlock, SignedVoluntaryExit, - SingleAttestation, Slot, SubnetId, + SignedAggregateAndProof, SignedBeaconBlock, SignedVoluntaryExit, SingleAttestation, Slot, + SubnetId, }; type E = MainnetEthSpec; diff --git a/beacon_node/network/src/sync/block_sidecar_coupling.rs b/beacon_node/network/src/sync/block_sidecar_coupling.rs index 01929cbf906..ed9a11a03de 100644 --- a/beacon_node/network/src/sync/block_sidecar_coupling.rs +++ b/beacon_node/network/src/sync/block_sidecar_coupling.rs @@ -7,11 +7,12 @@ use lighthouse_network::{ BlobsByRangeRequestId, BlocksByRangeRequestId, DataColumnsByRangeRequestId, }, }; +use ssz_types::RuntimeVariableList; use std::{collections::HashMap, sync::Arc}; use tracing::{Span, debug}; use types::{ BlobSidecar, ChainSpec, ColumnIndex, DataColumnSidecar, DataColumnSidecarList, EthSpec, - Hash256, RuntimeVariableList, SignedBeaconBlock, + Hash256, SignedBeaconBlock, }; use crate::sync::network_context::MAX_COLUMN_RETRIES; diff --git a/beacon_node/network/src/sync/tests/lookups.rs b/beacon_node/network/src/sync/tests/lookups.rs index 63bcd176f52..ef52f896785 100644 --- a/beacon_node/network/src/sync/tests/lookups.rs +++ b/beacon_node/network/src/sync/tests/lookups.rs @@ -1929,8 +1929,8 @@ mod deneb_only { block_verification_types::{AsBlock, RpcBlock}, data_availability_checker::AvailabilityCheckError, }; + use ssz_types::RuntimeVariableList; use std::collections::VecDeque; - use types::RuntimeVariableList; struct DenebTester { rig: TestRig, diff --git a/common/eth2/Cargo.toml b/common/eth2/Cargo.toml index 7a75bdc80a1..f7e6cde2100 100644 --- a/common/eth2/Cargo.toml +++ b/common/eth2/Cargo.toml @@ -9,6 +9,7 @@ default = ["lighthouse"] lighthouse = [] [dependencies] +context_deserialize = { workspace = true } educe = { workspace = true } eip_3076 = { workspace = true } either = { workspace = true } diff --git a/consensus/types/src/beacon_response.rs b/common/eth2/src/beacon_response.rs similarity index 97% rename from consensus/types/src/beacon_response.rs rename to common/eth2/src/beacon_response.rs index fc59fc94329..d58734997ce 100644 --- a/consensus/types/src/beacon_response.rs +++ b/common/eth2/src/beacon_response.rs @@ -1,12 +1,8 @@ -use crate::{ContextDeserialize, ForkName}; +use context_deserialize::ContextDeserialize; use serde::de::DeserializeOwned; use serde::{Deserialize, Deserializer, Serialize}; use serde_json::value::Value; - -pub trait ForkVersionDecode: Sized { - /// SSZ decode with explicit fork variant. - fn from_ssz_bytes_by_fork(bytes: &[u8], fork_name: ForkName) -> Result; -} +use types::ForkName; /// The metadata of type M should be set to `EmptyMetadata` if you don't care about adding fields other than /// version. If you *do* care about adding other fields you can mix in any type that implements diff --git a/common/eth2/src/lib.rs b/common/eth2/src/lib.rs index bcd979daca6..4e832a11dfa 100644 --- a/common/eth2/src/lib.rs +++ b/common/eth2/src/lib.rs @@ -7,6 +7,7 @@ //! Eventually it would be ideal to publish this crate on crates.io, however we have some local //! dependencies preventing this presently. +pub mod beacon_response; pub mod error; #[cfg(feature = "lighthouse")] pub mod lighthouse; @@ -15,10 +16,14 @@ pub mod lighthouse_vc; pub mod mixin; pub mod types; +pub use beacon_response::{ + BeaconResponse, EmptyMetadata, ExecutionOptimisticFinalizedBeaconResponse, + ExecutionOptimisticFinalizedMetadata, ForkVersionedResponse, UnversionedResponse, +}; + pub use self::error::{Error, ok_or_error, success_or_error}; use self::mixin::{RequestAccept, ResponseOptional}; use self::types::*; -use ::types::beacon_response::ExecutionOptimisticFinalizedBeaconResponse; use educe::Educe; use futures::Stream; use futures_util::StreamExt; diff --git a/common/eth2/src/lighthouse_vc/types.rs b/common/eth2/src/lighthouse_vc/types.rs index 4407e30e436..8e1d90f8f94 100644 --- a/common/eth2/src/lighthouse_vc/types.rs +++ b/common/eth2/src/lighthouse_vc/types.rs @@ -2,7 +2,6 @@ pub use crate::lighthouse::Health; pub use crate::lighthouse_vc::std_types::*; pub use crate::types::{GenericResponse, VersionData}; use eth2_keystore::Keystore; -use graffiti::GraffitiString; use serde::{Deserialize, Serialize}; use std::path::PathBuf; pub use types::*; diff --git a/common/eth2/src/types.rs b/common/eth2/src/types.rs index 6aad00301a6..cbdaa004d0d 100644 --- a/common/eth2/src/types.rs +++ b/common/eth2/src/types.rs @@ -19,10 +19,15 @@ use std::str::FromStr; use std::sync::Arc; use std::time::Duration; use test_random_derive::TestRandom; -use types::beacon_block_body::KzgCommitments; use types::test_utils::TestRandom; pub use types::*; +// TODO(mac): Temporary module and re-export hack to expose old `consensus/types` via `eth2/types`. +pub use crate::beacon_response::*; +pub mod beacon_response { + pub use crate::beacon_response::*; +} + #[cfg(feature = "lighthouse")] use crate::lighthouse::BlockReward; diff --git a/consensus/types/Cargo.toml b/consensus/types/Cargo.toml index 1f527c0de8a..559a1819480 100644 --- a/consensus/types/Cargo.toml +++ b/consensus/types/Cargo.toml @@ -73,6 +73,9 @@ paste = { workspace = true } state_processing = { workspace = true } tokio = { workspace = true } +[lints.clippy] +module_inception = "allow" + [[bench]] name = "benches" harness = false diff --git a/consensus/types/src/aggregate_and_proof.rs b/consensus/types/src/attestation/aggregate_and_proof.rs similarity index 93% rename from consensus/types/src/aggregate_and_proof.rs rename to consensus/types/src/attestation/aggregate_and_proof.rs index e76ba48bf47..4c6e775e56d 100644 --- a/consensus/types/src/aggregate_and_proof.rs +++ b/consensus/types/src/attestation/aggregate_and_proof.rs @@ -1,17 +1,20 @@ -use super::{AttestationBase, AttestationElectra, AttestationRef}; -use super::{ - ChainSpec, Domain, EthSpec, Fork, ForkName, Hash256, PublicKey, SecretKey, SelectionProof, - Signature, SignedRoot, -}; -use crate::Attestation; -use crate::context_deserialize; -use crate::test_utils::TestRandom; +use bls::{PublicKey, SecretKey, Signature}; +use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; use superstruct::superstruct; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{ + attestation::{ + Attestation, AttestationBase, AttestationElectra, AttestationRef, SelectionProof, + }, + core::{ChainSpec, Domain, EthSpec, Hash256, SignedRoot}, + fork::{Fork, ForkName}, + test_utils::TestRandom, +}; + #[superstruct( variants(Base, Electra), variant_attributes( diff --git a/consensus/types/src/attestation.rs b/consensus/types/src/attestation/attestation.rs similarity index 97% rename from consensus/types/src/attestation.rs rename to consensus/types/src/attestation/attestation.rs index 14305826589..693b5889f53 100644 --- a/consensus/types/src/attestation.rs +++ b/consensus/types/src/attestation/attestation.rs @@ -1,23 +1,28 @@ -use super::{ - AggregateSignature, AttestationData, BitList, ChainSpec, Domain, EthSpec, Fork, SecretKey, - Signature, SignedRoot, +use std::{ + collections::HashSet, + hash::{Hash, Hasher}, }; -use crate::slot_data::SlotData; -use crate::{ - Checkpoint, ContextDeserialize, ForkName, IndexedAttestationBase, IndexedAttestationElectra, -}; -use crate::{Hash256, Slot, test_utils::TestRandom}; -use crate::{IndexedAttestation, context_deserialize}; + +use bls::{AggregateSignature, SecretKey, Signature}; +use context_deserialize::{ContextDeserialize, context_deserialize}; use educe::Educe; use serde::{Deserialize, Deserializer, Serialize}; use ssz_derive::{Decode, Encode}; -use ssz_types::BitVector; -use std::collections::HashSet; -use std::hash::{Hash, Hasher}; +use ssz_types::{BitList, BitVector}; use superstruct::superstruct; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{ + attestation::{ + AttestationData, Checkpoint, IndexedAttestation, IndexedAttestationBase, + IndexedAttestationElectra, + }, + core::{ChainSpec, Domain, EthSpec, Hash256, SignedRoot, Slot, SlotData}, + fork::{Fork, ForkName}, + test_utils::TestRandom, +}; + #[derive(Debug, PartialEq, Clone)] pub enum Error { SszTypesError(ssz_types::Error), diff --git a/consensus/types/src/attestation_data.rs b/consensus/types/src/attestation/attestation_data.rs similarity index 87% rename from consensus/types/src/attestation_data.rs rename to consensus/types/src/attestation/attestation_data.rs index a4643e54741..f3fceb9b70f 100644 --- a/consensus/types/src/attestation_data.rs +++ b/consensus/types/src/attestation/attestation_data.rs @@ -1,11 +1,16 @@ -use crate::slot_data::SlotData; -use crate::test_utils::TestRandom; -use crate::{Checkpoint, ForkName, Hash256, SignedRoot, Slot}; use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; + +use crate::{ + attestation::Checkpoint, + core::{Hash256, SignedRoot, Slot, SlotData}, + fork::ForkName, + test_utils::TestRandom, +}; + /// The data upon which an attestation is based. /// /// Spec v0.12.1 diff --git a/consensus/types/src/attestation_duty.rs b/consensus/types/src/attestation/attestation_duty.rs similarity index 92% rename from consensus/types/src/attestation_duty.rs rename to consensus/types/src/attestation/attestation_duty.rs index 70c7c5c170f..fe3da79a2b1 100644 --- a/consensus/types/src/attestation_duty.rs +++ b/consensus/types/src/attestation/attestation_duty.rs @@ -1,6 +1,7 @@ -use crate::*; use serde::{Deserialize, Serialize}; +use crate::{attestation::CommitteeIndex, core::Slot}; + #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] #[derive(Debug, PartialEq, Clone, Copy, Default, Serialize, Deserialize)] pub struct AttestationDuty { diff --git a/consensus/types/src/beacon_committee.rs b/consensus/types/src/attestation/beacon_committee.rs similarity index 92% rename from consensus/types/src/beacon_committee.rs rename to consensus/types/src/attestation/beacon_committee.rs index 04fe763a11b..2dba30bad3c 100644 --- a/consensus/types/src/beacon_committee.rs +++ b/consensus/types/src/attestation/beacon_committee.rs @@ -1,4 +1,4 @@ -use crate::*; +use crate::{attestation::CommitteeIndex, core::Slot}; #[derive(Default, Clone, Debug, PartialEq)] pub struct BeaconCommittee<'a> { diff --git a/consensus/types/src/checkpoint.rs b/consensus/types/src/attestation/checkpoint.rs similarity index 88% rename from consensus/types/src/checkpoint.rs rename to consensus/types/src/attestation/checkpoint.rs index 545af59985e..f5a95f0ad94 100644 --- a/consensus/types/src/checkpoint.rs +++ b/consensus/types/src/attestation/checkpoint.rs @@ -1,11 +1,15 @@ -use crate::test_utils::TestRandom; -use crate::{Epoch, ForkName, Hash256}; use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{ + core::{Epoch, Hash256}, + fork::ForkName, + test_utils::TestRandom, +}; + /// Casper FFG checkpoint, used in attestations. /// /// Spec v0.12.1 diff --git a/consensus/types/src/indexed_attestation.rs b/consensus/types/src/attestation/indexed_attestation.rs similarity index 96% rename from consensus/types/src/indexed_attestation.rs rename to consensus/types/src/attestation/indexed_attestation.rs index dc328842176..272b015d907 100644 --- a/consensus/types/src/indexed_attestation.rs +++ b/consensus/types/src/attestation/indexed_attestation.rs @@ -1,17 +1,21 @@ -use crate::context_deserialize; -use crate::{ - AggregateSignature, AttestationData, EthSpec, ForkName, VariableList, test_utils::TestRandom, +use std::{ + hash::{Hash, Hasher}, + slice::Iter, }; -use core::slice::Iter; + +use bls::AggregateSignature; +use context_deserialize::context_deserialize; use educe::Educe; use serde::{Deserialize, Serialize}; use ssz::Encode; use ssz_derive::{Decode, Encode}; -use std::hash::{Hash, Hasher}; +use ssz_types::VariableList; use superstruct::superstruct; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{attestation::AttestationData, core::EthSpec, fork::ForkName, test_utils::TestRandom}; + /// Details an attestation that can be slashable. /// /// To be included in an `AttesterSlashing`. @@ -208,9 +212,10 @@ impl Hash for IndexedAttestation { #[cfg(test)] mod tests { use super::*; - use crate::MainnetEthSpec; - use crate::slot_epoch::Epoch; - use crate::test_utils::{SeedableRng, XorShiftRng}; + use crate::{ + core::{Epoch, MainnetEthSpec}, + test_utils::{SeedableRng, XorShiftRng}, + }; #[test] pub fn test_is_double_vote_true() { diff --git a/consensus/types/src/attestation/mod.rs b/consensus/types/src/attestation/mod.rs new file mode 100644 index 00000000000..2d2bf74e49a --- /dev/null +++ b/consensus/types/src/attestation/mod.rs @@ -0,0 +1,39 @@ +mod aggregate_and_proof; +mod attestation; +mod attestation_data; +mod attestation_duty; +mod beacon_committee; +mod checkpoint; +mod indexed_attestation; +mod participation_flags; +mod pending_attestation; +mod selection_proof; +mod shuffling_id; +mod signed_aggregate_and_proof; +mod subnet_id; + +pub use aggregate_and_proof::{ + AggregateAndProof, AggregateAndProofBase, AggregateAndProofElectra, AggregateAndProofRef, +}; +pub use attestation::{ + Attestation, AttestationBase, AttestationElectra, AttestationOnDisk, AttestationRef, + AttestationRefMut, AttestationRefOnDisk, Error as AttestationError, SingleAttestation, +}; +pub use attestation_data::AttestationData; +pub use attestation_duty::AttestationDuty; +pub use beacon_committee::{BeaconCommittee, OwnedBeaconCommittee}; +pub use checkpoint::Checkpoint; +pub use indexed_attestation::{ + IndexedAttestation, IndexedAttestationBase, IndexedAttestationElectra, IndexedAttestationRef, +}; +pub use participation_flags::ParticipationFlags; +pub use pending_attestation::PendingAttestation; +pub use selection_proof::SelectionProof; +pub use shuffling_id::AttestationShufflingId; +pub use signed_aggregate_and_proof::{ + SignedAggregateAndProof, SignedAggregateAndProofBase, SignedAggregateAndProofElectra, + SignedAggregateAndProofRefMut, +}; +pub use subnet_id::SubnetId; + +pub type CommitteeIndex = u64; diff --git a/consensus/types/src/participation_flags.rs b/consensus/types/src/attestation/participation_flags.rs similarity index 96% rename from consensus/types/src/participation_flags.rs rename to consensus/types/src/attestation/participation_flags.rs index e59efc51704..66831abfac0 100644 --- a/consensus/types/src/participation_flags.rs +++ b/consensus/types/src/attestation/participation_flags.rs @@ -1,10 +1,14 @@ -use crate::{Hash256, consts::altair::NUM_FLAG_INDICES, test_utils::TestRandom}; use safe_arith::{ArithError, SafeArith}; use serde::{Deserialize, Serialize}; use ssz::{Decode, DecodeError, Encode}; use test_random_derive::TestRandom; use tree_hash::{PackedEncoding, TreeHash, TreeHashType}; +use crate::{ + core::{Hash256, consts::altair::NUM_FLAG_INDICES}, + test_utils::TestRandom, +}; + #[derive(Debug, Default, Clone, Copy, PartialEq, Deserialize, Serialize, TestRandom)] #[serde(transparent)] #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] diff --git a/consensus/types/src/pending_attestation.rs b/consensus/types/src/attestation/pending_attestation.rs similarity index 84% rename from consensus/types/src/pending_attestation.rs rename to consensus/types/src/attestation/pending_attestation.rs index 4a00a0495ac..84353ac1185 100644 --- a/consensus/types/src/pending_attestation.rs +++ b/consensus/types/src/attestation/pending_attestation.rs @@ -1,11 +1,12 @@ -use crate::context_deserialize; -use crate::test_utils::TestRandom; -use crate::{AttestationData, BitList, EthSpec, ForkName}; +use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; +use ssz_types::BitList; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{attestation::AttestationData, core::EthSpec, fork::ForkName, test_utils::TestRandom}; + /// An attestation that has been included in the state but not yet fully processed. /// /// Spec v0.12.1 diff --git a/consensus/types/src/selection_proof.rs b/consensus/types/src/attestation/selection_proof.rs similarity index 95% rename from consensus/types/src/selection_proof.rs rename to consensus/types/src/attestation/selection_proof.rs index aa8c0c5658e..b4c48d00780 100644 --- a/consensus/types/src/selection_proof.rs +++ b/consensus/types/src/attestation/selection_proof.rs @@ -1,11 +1,15 @@ -use crate::{ - ChainSpec, Domain, EthSpec, Fork, Hash256, PublicKey, SecretKey, Signature, SignedRoot, Slot, -}; +use std::cmp; + +use bls::{PublicKey, SecretKey, Signature}; use ethereum_hashing::hash; use safe_arith::{ArithError, SafeArith}; use serde::{Deserialize, Serialize}; use ssz::Encode; -use std::cmp; + +use crate::{ + core::{ChainSpec, Domain, EthSpec, Hash256, SignedRoot, Slot}, + fork::Fork, +}; #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] #[derive(PartialEq, Debug, Clone, Serialize, Deserialize)] diff --git a/consensus/types/src/shuffling_id.rs b/consensus/types/src/attestation/shuffling_id.rs similarity index 93% rename from consensus/types/src/shuffling_id.rs rename to consensus/types/src/attestation/shuffling_id.rs index df16f605ed1..25217288f69 100644 --- a/consensus/types/src/shuffling_id.rs +++ b/consensus/types/src/attestation/shuffling_id.rs @@ -1,7 +1,12 @@ -use crate::*; +use std::hash::Hash; + use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; -use std::hash::Hash; + +use crate::{ + core::{Epoch, EthSpec, Hash256, RelativeEpoch}, + state::{BeaconState, BeaconStateError}, +}; /// Can be used to key (ID) the shuffling in some chain, in some epoch. /// diff --git a/consensus/types/src/signed_aggregate_and_proof.rs b/consensus/types/src/attestation/signed_aggregate_and_proof.rs similarity index 90% rename from consensus/types/src/signed_aggregate_and_proof.rs rename to consensus/types/src/attestation/signed_aggregate_and_proof.rs index 758ac2734b7..48c3f4c567e 100644 --- a/consensus/types/src/signed_aggregate_and_proof.rs +++ b/consensus/types/src/attestation/signed_aggregate_and_proof.rs @@ -1,18 +1,21 @@ -use super::{ - AggregateAndProof, AggregateAndProofBase, AggregateAndProofElectra, AggregateAndProofRef, -}; -use super::{ - Attestation, AttestationRef, ChainSpec, Domain, EthSpec, Fork, ForkName, Hash256, SecretKey, - SelectionProof, Signature, SignedRoot, -}; -use crate::context_deserialize; -use crate::test_utils::TestRandom; +use bls::{SecretKey, Signature}; +use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; use superstruct::superstruct; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{ + attestation::{ + AggregateAndProof, AggregateAndProofBase, AggregateAndProofElectra, AggregateAndProofRef, + Attestation, AttestationRef, SelectionProof, + }, + core::{ChainSpec, Domain, EthSpec, Hash256, SignedRoot}, + fork::{Fork, ForkName}, + test_utils::TestRandom, +}; + /// A Validators signed aggregate proof to publish on the `beacon_aggregate_and_proof` /// gossipsub topic. /// diff --git a/consensus/types/src/subnet_id.rs b/consensus/types/src/attestation/subnet_id.rs similarity index 97% rename from consensus/types/src/subnet_id.rs rename to consensus/types/src/attestation/subnet_id.rs index 6ec8ca4a27f..9585d077b5c 100644 --- a/consensus/types/src/subnet_id.rs +++ b/consensus/types/src/attestation/subnet_id.rs @@ -1,11 +1,17 @@ //! Identifies each shard by an integer identifier. -use crate::SingleAttestation; -use crate::{AttestationRef, ChainSpec, CommitteeIndex, EthSpec, Slot}; +use std::{ + ops::{Deref, DerefMut}, + sync::LazyLock, +}; + use alloy_primitives::{U256, bytes::Buf}; use safe_arith::{ArithError, SafeArith}; use serde::{Deserialize, Serialize}; -use std::ops::{Deref, DerefMut}; -use std::sync::LazyLock; + +use crate::{ + attestation::{AttestationRef, CommitteeIndex, SingleAttestation}, + core::{ChainSpec, EthSpec, Slot}, +}; const MAX_SUBNET_ID: usize = 64; diff --git a/consensus/types/src/beacon_block.rs b/consensus/types/src/block/beacon_block.rs similarity index 97% rename from consensus/types/src/beacon_block.rs rename to consensus/types/src/block/beacon_block.rs index 060709d6556..c2f361eb4b8 100644 --- a/consensus/types/src/beacon_block.rs +++ b/consensus/types/src/block/beacon_block.rs @@ -1,18 +1,39 @@ -use crate::attestation::AttestationBase; -use crate::test_utils::TestRandom; -use crate::*; +use std::{fmt, marker::PhantomData}; + +use bls::{AggregateSignature, PublicKeyBytes, SecretKey, Signature, SignatureBytes}; +use context_deserialize::ContextDeserialize; use educe::Educe; +use fixed_bytes::FixedBytesExtended; use serde::{Deserialize, Deserializer, Serialize}; use ssz::{Decode, DecodeError}; use ssz_derive::{Decode, Encode}; -use std::fmt; -use std::marker::PhantomData; +use ssz_types::{BitList, BitVector, FixedVector, VariableList, typenum::Unsigned}; use superstruct::superstruct; use test_random_derive::TestRandom; use tree_hash::TreeHash; use tree_hash_derive::TreeHash; -use self::indexed_attestation::IndexedAttestationBase; +use crate::{ + attestation::{AttestationBase, AttestationData, IndexedAttestationBase}, + block::{ + BeaconBlockBodyAltair, BeaconBlockBodyBase, BeaconBlockBodyBellatrix, + BeaconBlockBodyCapella, BeaconBlockBodyDeneb, BeaconBlockBodyElectra, BeaconBlockBodyFulu, + BeaconBlockBodyGloas, BeaconBlockBodyRef, BeaconBlockBodyRefMut, BeaconBlockHeader, + SignedBeaconBlock, SignedBeaconBlockHeader, + }, + core::{ChainSpec, Domain, Epoch, EthSpec, Graffiti, Hash256, SignedRoot, Slot}, + deposit::{Deposit, DepositData}, + execution::{ + AbstractExecPayload, BlindedPayload, Eth1Data, ExecutionPayload, ExecutionRequests, + FullPayload, + }, + exit::{SignedVoluntaryExit, VoluntaryExit}, + fork::{Fork, ForkName, InconsistentFork, map_fork_name}, + slashing::{AttesterSlashingBase, ProposerSlashing}, + state::BeaconStateError, + sync_committee::SyncAggregate, + test_utils::TestRandom, +}; /// A block of the `BeaconChain`. #[superstruct( @@ -283,7 +304,7 @@ impl<'a, E: EthSpec, Payload: AbstractExecPayload> BeaconBlockRef<'a, E, Payl /// Extracts a reference to an execution payload from a block, returning an error if the block /// is pre-merge. - pub fn execution_payload(&self) -> Result, Error> { + pub fn execution_payload(&self) -> Result, BeaconStateError> { self.body().execution_payload() } } @@ -865,7 +886,10 @@ impl fmt::Display for BlockImportSource { #[cfg(test)] mod tests { use super::*; - use crate::test_utils::{SeedableRng, XorShiftRng, test_ssz_tree_hash_pair_with}; + use crate::{ + core::MainnetEthSpec, + test_utils::{SeedableRng, XorShiftRng, test_ssz_tree_hash_pair_with}, + }; use ssz::Encode; type BeaconBlock = super::BeaconBlock; diff --git a/consensus/types/src/beacon_block_body.rs b/consensus/types/src/block/beacon_block_body.rs similarity index 93% rename from consensus/types/src/beacon_block_body.rs rename to consensus/types/src/block/beacon_block_body.rs index ced8fea4a99..f85dd8909e1 100644 --- a/consensus/types/src/beacon_block_body.rs +++ b/consensus/types/src/block/beacon_block_body.rs @@ -1,18 +1,42 @@ -use crate::test_utils::TestRandom; -use crate::*; +use std::marker::PhantomData; + +use bls::Signature; +use context_deserialize::{ContextDeserialize, context_deserialize}; use educe::Educe; use merkle_proof::{MerkleTree, MerkleTreeError}; use metastruct::metastruct; use serde::{Deserialize, Deserializer, Serialize}; use ssz_derive::{Decode, Encode}; -use std::marker::PhantomData; +use ssz_types::{FixedVector, VariableList}; use superstruct::superstruct; use test_random_derive::TestRandom; use tree_hash::{BYTES_PER_CHUNK, TreeHash}; use tree_hash_derive::TreeHash; -pub type KzgCommitments = - VariableList::MaxBlobCommitmentsPerBlock>; +use crate::{ + attestation::{AttestationBase, AttestationElectra, AttestationRef, AttestationRefMut}, + core::{EthSpec, Graffiti, Hash256}, + deposit::Deposit, + execution::{ + AbstractExecPayload, BlindedPayload, BlindedPayloadBellatrix, BlindedPayloadCapella, + BlindedPayloadDeneb, BlindedPayloadElectra, BlindedPayloadFulu, BlindedPayloadGloas, + Eth1Data, ExecutionPayload, ExecutionPayloadBellatrix, ExecutionPayloadCapella, + ExecutionPayloadDeneb, ExecutionPayloadElectra, ExecutionPayloadFulu, + ExecutionPayloadGloas, ExecutionRequests, FullPayload, FullPayloadBellatrix, + FullPayloadCapella, FullPayloadDeneb, FullPayloadElectra, FullPayloadFulu, + FullPayloadGloas, SignedBlsToExecutionChange, + }, + exit::SignedVoluntaryExit, + fork::{ForkName, map_fork_name}, + kzg_ext::KzgCommitments, + light_client::consts::{EXECUTION_PAYLOAD_INDEX, EXECUTION_PAYLOAD_PROOF_LEN}, + slashing::{ + AttesterSlashingBase, AttesterSlashingElectra, AttesterSlashingRef, ProposerSlashing, + }, + state::BeaconStateError, + sync_committee::SyncAggregate, + test_utils::TestRandom, +}; /// The number of leaves (including padding) on the `BeaconBlockBody` Merkle tree. /// @@ -63,8 +87,14 @@ pub const BLOB_KZG_COMMITMENTS_INDEX: usize = 11; Fulu(metastruct(mappings(beacon_block_body_fulu_fields(groups(fields))))), Gloas(metastruct(mappings(beacon_block_body_gloas_fields(groups(fields))))), ), - cast_error(ty = "Error", expr = "Error::IncorrectStateVariant"), - partial_getter_error(ty = "Error", expr = "Error::IncorrectStateVariant") + cast_error( + ty = "BeaconStateError", + expr = "BeaconStateError::IncorrectStateVariant" + ), + partial_getter_error( + ty = "BeaconStateError", + expr = "BeaconStateError::IncorrectStateVariant" + ) )] #[cfg_attr( feature = "arbitrary", @@ -147,7 +177,7 @@ pub struct BeaconBlockBody = FullPay } impl> BeaconBlockBody { - pub fn execution_payload(&self) -> Result, Error> { + pub fn execution_payload(&self) -> Result, BeaconStateError> { self.to_ref().execution_payload() } @@ -158,9 +188,9 @@ impl> BeaconBlockBody { } impl<'a, E: EthSpec, Payload: AbstractExecPayload> BeaconBlockBodyRef<'a, E, Payload> { - pub fn execution_payload(&self) -> Result, Error> { + pub fn execution_payload(&self) -> Result, BeaconStateError> { match self { - Self::Base(_) | Self::Altair(_) => Err(Error::IncorrectStateVariant), + Self::Base(_) | Self::Altair(_) => Err(BeaconStateError::IncorrectStateVariant), Self::Bellatrix(body) => Ok(Payload::Ref::from(&body.execution_payload)), Self::Capella(body) => Ok(Payload::Ref::from(&body.execution_payload)), Self::Deneb(body) => Ok(Payload::Ref::from(&body.execution_payload)), @@ -216,7 +246,7 @@ impl<'a, E: EthSpec, Payload: AbstractExecPayload> BeaconBlockBodyRef<'a, E, pub fn kzg_commitment_merkle_proof( &self, index: usize, - ) -> Result, Error> { + ) -> Result, BeaconStateError> { let kzg_commitments_proof = self.kzg_commitments_merkle_proof()?; let proof = self.complete_kzg_commitment_merkle_proof(index, &kzg_commitments_proof)?; Ok(proof) @@ -228,10 +258,10 @@ impl<'a, E: EthSpec, Payload: AbstractExecPayload> BeaconBlockBodyRef<'a, E, &self, index: usize, kzg_commitments_proof: &[Hash256], - ) -> Result, Error> { + ) -> Result, BeaconStateError> { match self { Self::Base(_) | Self::Altair(_) | Self::Bellatrix(_) | Self::Capella(_) => { - Err(Error::IncorrectStateVariant) + Err(BeaconStateError::IncorrectStateVariant) } Self::Deneb(_) | Self::Electra(_) | Self::Fulu(_) | Self::Gloas(_) => { // We compute the branches by generating 2 merkle trees: @@ -253,7 +283,7 @@ impl<'a, E: EthSpec, Payload: AbstractExecPayload> BeaconBlockBodyRef<'a, E, let tree = MerkleTree::create(&blob_leaves, depth as usize); let (_, mut proof) = tree .generate_proof(index, depth as usize) - .map_err(Error::MerkleTreeError)?; + .map_err(BeaconStateError::MerkleTreeError)?; // Add the branch corresponding to the length mix-in. let length = blob_leaves.len(); @@ -261,7 +291,9 @@ impl<'a, E: EthSpec, Payload: AbstractExecPayload> BeaconBlockBodyRef<'a, E, let mut length_bytes = [0; BYTES_PER_CHUNK]; length_bytes .get_mut(0..usize_len) - .ok_or(Error::MerkleTreeError(MerkleTreeError::PleaseNotifyTheDevs))? + .ok_or(BeaconStateError::MerkleTreeError( + MerkleTreeError::PleaseNotifyTheDevs, + ))? .copy_from_slice(&length.to_le_bytes()); let length_root = Hash256::from_slice(length_bytes.as_slice()); proof.push(length_root); @@ -279,32 +311,41 @@ impl<'a, E: EthSpec, Payload: AbstractExecPayload> BeaconBlockBodyRef<'a, E, /// Produces the proof of inclusion for `self.blob_kzg_commitments`. pub fn kzg_commitments_merkle_proof( &self, - ) -> Result, Error> { + ) -> Result, BeaconStateError> { let body_leaves = self.body_merkle_leaves(); let beacon_block_body_depth = body_leaves.len().next_power_of_two().ilog2() as usize; let tree = MerkleTree::create(&body_leaves, beacon_block_body_depth); let (_, proof) = tree .generate_proof(BLOB_KZG_COMMITMENTS_INDEX, beacon_block_body_depth) - .map_err(Error::MerkleTreeError)?; + .map_err(BeaconStateError::MerkleTreeError)?; Ok(FixedVector::new(proof)?) } - pub fn block_body_merkle_proof(&self, generalized_index: usize) -> Result, Error> { + pub fn block_body_merkle_proof( + &self, + generalized_index: usize, + ) -> Result, BeaconStateError> { let field_index = match generalized_index { - light_client_update::EXECUTION_PAYLOAD_INDEX => { + EXECUTION_PAYLOAD_INDEX => { // Execution payload is a top-level field, subtract off the generalized indices // for the internal nodes. Result should be 9, the field offset of the execution // payload in the `BeaconBlockBody`: // https://github.com/ethereum/consensus-specs/blob/dev/specs/deneb/beacon-chain.md#beaconblockbody generalized_index .checked_sub(NUM_BEACON_BLOCK_BODY_HASH_TREE_ROOT_LEAVES) - .ok_or(Error::GeneralizedIndexNotSupported(generalized_index))? + .ok_or(BeaconStateError::GeneralizedIndexNotSupported( + generalized_index, + ))? + } + _ => { + return Err(BeaconStateError::GeneralizedIndexNotSupported( + generalized_index, + )); } - _ => return Err(Error::GeneralizedIndexNotSupported(generalized_index)), }; let leaves = self.body_merkle_leaves(); - let depth = light_client_update::EXECUTION_PAYLOAD_PROOF_LEN; + let depth = EXECUTION_PAYLOAD_PROOF_LEN; let tree = merkle_proof::MerkleTree::create(&leaves, depth); let (_, proof) = tree.generate_proof(field_index, depth)?; @@ -1100,22 +1141,16 @@ impl<'de, E: EthSpec, Payload: AbstractExecPayload> ContextDeserialize<'de, F } } -/// Util method helpful for logging. -pub fn format_kzg_commitments(commitments: &[KzgCommitment]) -> String { - let commitment_strings: Vec = commitments.iter().map(|x| x.to_string()).collect(); - let commitments_joined = commitment_strings.join(", "); - let surrounded_commitments = format!("[{}]", commitments_joined); - surrounded_commitments -} - #[cfg(test)] mod tests { mod base { use super::super::*; + use crate::core::MainnetEthSpec; ssz_and_tree_hash_tests!(BeaconBlockBodyBase); } mod altair { use super::super::*; + use crate::core::MainnetEthSpec; ssz_and_tree_hash_tests!(BeaconBlockBodyAltair); } } diff --git a/consensus/types/src/beacon_block_header.rs b/consensus/types/src/block/beacon_block_header.rs similarity index 90% rename from consensus/types/src/beacon_block_header.rs rename to consensus/types/src/block/beacon_block_header.rs index e14a9fc8af7..06e1023d911 100644 --- a/consensus/types/src/beacon_block_header.rs +++ b/consensus/types/src/block/beacon_block_header.rs @@ -1,6 +1,4 @@ -use crate::test_utils::TestRandom; -use crate::*; - +use bls::SecretKey; use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; @@ -8,6 +6,13 @@ use test_random_derive::TestRandom; use tree_hash::TreeHash; use tree_hash_derive::TreeHash; +use crate::{ + block::SignedBeaconBlockHeader, + core::{ChainSpec, Domain, EthSpec, Hash256, SignedRoot, Slot}, + fork::{Fork, ForkName}, + test_utils::TestRandom, +}; + /// A header of a `BeaconBlock`. /// /// Spec v0.12.1 diff --git a/consensus/types/src/block/mod.rs b/consensus/types/src/block/mod.rs new file mode 100644 index 00000000000..81c8ffbd639 --- /dev/null +++ b/consensus/types/src/block/mod.rs @@ -0,0 +1,26 @@ +mod beacon_block; +mod beacon_block_body; +mod beacon_block_header; +mod signed_beacon_block; +mod signed_beacon_block_header; + +pub use beacon_block::{ + BeaconBlock, BeaconBlockAltair, BeaconBlockBase, BeaconBlockBellatrix, BeaconBlockCapella, + BeaconBlockDeneb, BeaconBlockElectra, BeaconBlockFulu, BeaconBlockGloas, BeaconBlockRef, + BeaconBlockRefMut, BlindedBeaconBlock, BlockImportSource, EmptyBlock, +}; +pub use beacon_block_body::{ + BLOB_KZG_COMMITMENTS_INDEX, BeaconBlockBody, BeaconBlockBodyAltair, BeaconBlockBodyBase, + BeaconBlockBodyBellatrix, BeaconBlockBodyCapella, BeaconBlockBodyDeneb, BeaconBlockBodyElectra, + BeaconBlockBodyFulu, BeaconBlockBodyGloas, BeaconBlockBodyRef, BeaconBlockBodyRefMut, + NUM_BEACON_BLOCK_BODY_HASH_TREE_ROOT_LEAVES, +}; +pub use beacon_block_header::BeaconBlockHeader; + +pub use signed_beacon_block::{ + SignedBeaconBlock, SignedBeaconBlockAltair, SignedBeaconBlockBase, SignedBeaconBlockBellatrix, + SignedBeaconBlockCapella, SignedBeaconBlockDeneb, SignedBeaconBlockElectra, + SignedBeaconBlockFulu, SignedBeaconBlockGloas, SignedBeaconBlockHash, SignedBlindedBeaconBlock, + ssz_tagged_signed_beacon_block, ssz_tagged_signed_beacon_block_arc, +}; +pub use signed_beacon_block_header::SignedBeaconBlockHeader; diff --git a/consensus/types/src/signed_beacon_block.rs b/consensus/types/src/block/signed_beacon_block.rs similarity index 95% rename from consensus/types/src/signed_beacon_block.rs rename to consensus/types/src/block/signed_beacon_block.rs index 7b04cc57711..e8927ee7659 100644 --- a/consensus/types/src/signed_beacon_block.rs +++ b/consensus/types/src/block/signed_beacon_block.rs @@ -1,17 +1,42 @@ -use crate::beacon_block_body::{BLOB_KZG_COMMITMENTS_INDEX, format_kzg_commitments}; -use crate::test_utils::TestRandom; -use crate::*; +use std::fmt; + +use bls::{PublicKey, Signature}; +use context_deserialize::ContextDeserialize; use educe::Educe; use merkle_proof::MerkleTree; use serde::{Deserialize, Deserializer, Serialize}; use ssz_derive::{Decode, Encode}; -use std::fmt; +use ssz_types::FixedVector; use superstruct::superstruct; use test_random_derive::TestRandom; use tracing::instrument; use tree_hash::TreeHash; use tree_hash_derive::TreeHash; +use crate::{ + block::{ + BLOB_KZG_COMMITMENTS_INDEX, BeaconBlock, BeaconBlockAltair, BeaconBlockBase, + BeaconBlockBellatrix, BeaconBlockBodyBellatrix, BeaconBlockBodyCapella, + BeaconBlockBodyDeneb, BeaconBlockBodyElectra, BeaconBlockBodyFulu, BeaconBlockBodyGloas, + BeaconBlockCapella, BeaconBlockDeneb, BeaconBlockElectra, BeaconBlockFulu, + BeaconBlockGloas, BeaconBlockHeader, BeaconBlockRef, BeaconBlockRefMut, + SignedBeaconBlockHeader, + }, + core::{ChainSpec, Domain, Epoch, EthSpec, Hash256, SignedRoot, SigningData, Slot}, + execution::{ + AbstractExecPayload, BlindedPayload, BlindedPayloadBellatrix, BlindedPayloadCapella, + BlindedPayloadDeneb, BlindedPayloadElectra, BlindedPayloadFulu, BlindedPayloadGloas, + ExecutionPayload, ExecutionPayloadBellatrix, ExecutionPayloadCapella, + ExecutionPayloadDeneb, ExecutionPayloadElectra, ExecutionPayloadFulu, + ExecutionPayloadGloas, FullPayload, FullPayloadBellatrix, FullPayloadCapella, + FullPayloadDeneb, FullPayloadElectra, FullPayloadFulu, FullPayloadGloas, + }, + fork::{Fork, ForkName, ForkVersionDecode, InconsistentFork, map_fork_name}, + kzg_ext::format_kzg_commitments, + state::BeaconStateError, + test_utils::TestRandom, +}; + #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] #[derive(PartialEq, Eq, Hash, Clone, Copy)] pub struct SignedBeaconBlockHash(Hash256); @@ -272,7 +297,7 @@ impl> SignedBeaconBlock SignedBeaconBlockHeader, FixedVector, ), - Error, + BeaconStateError, > { // Create the block body merkle tree let body_leaves = self.message().body().body_merkle_leaves(); @@ -282,7 +307,7 @@ impl> SignedBeaconBlock // Compute the KZG commitments inclusion proof let (_, proof) = body_merkle_tree .generate_proof(BLOB_KZG_COMMITMENTS_INDEX, beacon_block_body_depth) - .map_err(Error::MerkleTreeError)?; + .map_err(BeaconStateError::MerkleTreeError)?; let kzg_commitments_inclusion_proof = FixedVector::new(proof)?; let block_header = BeaconBlockHeader { @@ -919,6 +944,7 @@ pub mod ssz_tagged_signed_beacon_block_arc { #[cfg(test)] mod test { use super::*; + use crate::{block::EmptyBlock, core::MainnetEthSpec}; #[test] fn add_remove_payload_roundtrip() { diff --git a/consensus/types/src/signed_beacon_block_header.rs b/consensus/types/src/block/signed_beacon_block_header.rs similarity index 84% rename from consensus/types/src/signed_beacon_block_header.rs rename to consensus/types/src/block/signed_beacon_block_header.rs index 4a5ff2ec1a4..2fcd8a705f0 100644 --- a/consensus/types/src/signed_beacon_block_header.rs +++ b/consensus/types/src/block/signed_beacon_block_header.rs @@ -1,13 +1,17 @@ -use crate::context_deserialize; -use crate::{ - BeaconBlockHeader, ChainSpec, Domain, EthSpec, Fork, ForkName, Hash256, PublicKey, Signature, - SignedRoot, test_utils::TestRandom, -}; +use bls::{PublicKey, Signature}; +use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{ + block::BeaconBlockHeader, + core::{ChainSpec, Domain, EthSpec, Hash256, SignedRoot}, + fork::{Fork, ForkName}, + test_utils::TestRandom, +}; + /// A signed header of a `BeaconBlock`. /// /// Spec v0.12.1 diff --git a/consensus/types/src/builder_bid.rs b/consensus/types/src/builder/builder_bid.rs similarity index 93% rename from consensus/types/src/builder_bid.rs rename to consensus/types/src/builder/builder_bid.rs index 3fb7af35ca1..be9bb281553 100644 --- a/consensus/types/src/builder_bid.rs +++ b/consensus/types/src/builder/builder_bid.rs @@ -1,13 +1,6 @@ -use crate::beacon_block_body::KzgCommitments; -use crate::{ - ChainSpec, ContextDeserialize, EthSpec, ExecutionPayloadHeaderBellatrix, - ExecutionPayloadHeaderCapella, ExecutionPayloadHeaderDeneb, ExecutionPayloadHeaderElectra, - ExecutionPayloadHeaderFulu, ExecutionPayloadHeaderGloas, ExecutionPayloadHeaderRef, - ExecutionPayloadHeaderRefMut, ExecutionRequests, ForkName, ForkVersionDecode, SignedRoot, - Uint256, test_utils::TestRandom, -}; use bls::PublicKeyBytes; use bls::Signature; +use context_deserialize::ContextDeserialize; use serde::{Deserialize, Deserializer, Serialize}; use ssz::Decode; use ssz_derive::{Decode, Encode}; @@ -15,6 +8,19 @@ use superstruct::superstruct; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{ + core::{ChainSpec, EthSpec, SignedRoot, Uint256}, + execution::{ + ExecutionPayloadHeaderBellatrix, ExecutionPayloadHeaderCapella, + ExecutionPayloadHeaderDeneb, ExecutionPayloadHeaderElectra, ExecutionPayloadHeaderFulu, + ExecutionPayloadHeaderGloas, ExecutionPayloadHeaderRef, ExecutionPayloadHeaderRefMut, + ExecutionRequests, + }, + fork::{ForkName, ForkVersionDecode}, + kzg_ext::KzgCommitments, + test_utils::TestRandom, +}; + #[superstruct( variants(Bellatrix, Capella, Deneb, Electra, Fulu, Gloas), variant_attributes( diff --git a/consensus/types/src/builder/mod.rs b/consensus/types/src/builder/mod.rs new file mode 100644 index 00000000000..88a8e6a01a3 --- /dev/null +++ b/consensus/types/src/builder/mod.rs @@ -0,0 +1,6 @@ +mod builder_bid; + +pub use builder_bid::{ + BuilderBid, BuilderBidBellatrix, BuilderBidCapella, BuilderBidDeneb, BuilderBidElectra, + BuilderBidFulu, BuilderBidGloas, SignedBuilderBid, +}; diff --git a/consensus/types/src/consolidation_request.rs b/consensus/types/src/consolidation/consolidation_request.rs similarity index 84% rename from consensus/types/src/consolidation_request.rs rename to consensus/types/src/consolidation/consolidation_request.rs index 2af3426b68f..3f09517a903 100644 --- a/consensus/types/src/consolidation_request.rs +++ b/consensus/types/src/consolidation/consolidation_request.rs @@ -1,11 +1,17 @@ -use crate::context_deserialize; -use crate::{Address, ForkName, PublicKeyBytes, SignedRoot, test_utils::TestRandom}; +use bls::PublicKeyBytes; +use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz::Encode; use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{ + core::{Address, SignedRoot}, + fork::ForkName, + test_utils::TestRandom, +}; + #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] #[derive( Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize, Encode, Decode, TreeHash, TestRandom, diff --git a/consensus/types/src/consolidation/mod.rs b/consensus/types/src/consolidation/mod.rs new file mode 100644 index 00000000000..a6a2f4a3317 --- /dev/null +++ b/consensus/types/src/consolidation/mod.rs @@ -0,0 +1,5 @@ +mod consolidation_request; +mod pending_consolidation; + +pub use consolidation_request::ConsolidationRequest; +pub use pending_consolidation::PendingConsolidation; diff --git a/consensus/types/src/pending_consolidation.rs b/consensus/types/src/consolidation/pending_consolidation.rs similarity index 86% rename from consensus/types/src/pending_consolidation.rs rename to consensus/types/src/consolidation/pending_consolidation.rs index 9fb8c3566db..fcd76e43b65 100644 --- a/consensus/types/src/pending_consolidation.rs +++ b/consensus/types/src/consolidation/pending_consolidation.rs @@ -1,11 +1,11 @@ -use crate::ForkName; -use crate::context_deserialize; -use crate::test_utils::TestRandom; +use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{fork::ForkName, test_utils::TestRandom}; + #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] #[derive( Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize, Encode, Decode, TreeHash, TestRandom, diff --git a/consensus/types/src/application_domain.rs b/consensus/types/src/core/application_domain.rs similarity index 100% rename from consensus/types/src/application_domain.rs rename to consensus/types/src/core/application_domain.rs diff --git a/consensus/types/src/chain_spec.rs b/consensus/types/src/core/chain_spec.rs similarity index 99% rename from consensus/types/src/chain_spec.rs rename to consensus/types/src/core/chain_spec.rs index a66080ada6f..c8052b502b5 100644 --- a/consensus/types/src/chain_spec.rs +++ b/consensus/types/src/core/chain_spec.rs @@ -1,19 +1,27 @@ -use crate::application_domain::{APPLICATION_DOMAIN_BUILDER, ApplicationDomain}; -use crate::blob_sidecar::BlobIdentifier; -use crate::data_column_sidecar::DataColumnsByRootIdentifier; -use crate::*; +use std::{fs::File, path::Path, time::Duration}; + use educe::Educe; use ethereum_hashing::hash; +use fixed_bytes::FixedBytesExtended; use int_to_bytes::int_to_bytes4; use safe_arith::{ArithError, SafeArith}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use serde_utils::quoted_u64::MaybeQuoted; use ssz::Encode; -use std::fs::File; -use std::path::Path; -use std::time::Duration; +use ssz_types::{RuntimeVariableList, VariableList}; use tree_hash::TreeHash; +use crate::{ + core::{ + APPLICATION_DOMAIN_BUILDER, Address, ApplicationDomain, EnrForkId, Epoch, EthSpec, + EthSpecId, Hash256, MainnetEthSpec, Slot, Uint256, + }, + data::{BlobIdentifier, DataColumnSubnetId, DataColumnsByRootIdentifier}, + execution::ExecutionBlockHash, + fork::{Fork, ForkData, ForkName}, + state::BeaconState, +}; + /// Each of the BLS signature domains. #[derive(Debug, PartialEq, Clone, Copy)] pub enum Domain { @@ -2581,6 +2589,7 @@ mod tests { #[cfg(test)] mod yaml_tests { use super::*; + use crate::core::MinimalEthSpec; use paste::paste; use std::sync::Arc; use tempfile::NamedTempFile; diff --git a/consensus/types/src/config_and_preset.rs b/consensus/types/src/core/config_and_preset.rs similarity index 95% rename from consensus/types/src/config_and_preset.rs rename to consensus/types/src/core/config_and_preset.rs index 16b09c9c088..08141c77311 100644 --- a/consensus/types/src/config_and_preset.rs +++ b/consensus/types/src/core/config_and_preset.rs @@ -1,13 +1,14 @@ -use crate::{ - AltairPreset, BasePreset, BellatrixPreset, CapellaPreset, ChainSpec, Config, DenebPreset, - ElectraPreset, EthSpec, FuluPreset, GloasPreset, consts::altair, consts::deneb, -}; use maplit::hashmap; use serde::{Deserialize, Serialize}; use serde_json::Value; use std::collections::HashMap; use superstruct::superstruct; +use crate::core::{ + AltairPreset, BasePreset, BellatrixPreset, CapellaPreset, ChainSpec, Config, DenebPreset, + ElectraPreset, EthSpec, FuluPreset, GloasPreset, consts, +}; + /// Fusion of a runtime-config with the compile-time preset values. /// /// Mostly useful for the API. @@ -131,11 +132,11 @@ pub fn get_extra_fields(spec: &ChainSpec) -> HashMap { "domain_sync_committee_selection_proof".to_uppercase() => u32_hex(spec.domain_sync_committee_selection_proof), "sync_committee_subnet_count".to_uppercase() => - altair::SYNC_COMMITTEE_SUBNET_COUNT.to_string().into(), + consts::altair::SYNC_COMMITTEE_SUBNET_COUNT.to_string().into(), "target_aggregators_per_sync_subcommittee".to_uppercase() => - altair::TARGET_AGGREGATORS_PER_SYNC_SUBCOMMITTEE.to_string().into(), + consts::altair::TARGET_AGGREGATORS_PER_SYNC_SUBCOMMITTEE.to_string().into(), // Deneb - "versioned_hash_version_kzg".to_uppercase() => deneb::VERSIONED_HASH_VERSION_KZG.to_string().into(), + "versioned_hash_version_kzg".to_uppercase() => consts::deneb::VERSIONED_HASH_VERSION_KZG.to_string().into(), // Electra "compounding_withdrawal_prefix".to_uppercase() => u8_hex(spec.compounding_withdrawal_prefix_byte), "unset_deposit_requests_start_index".to_uppercase() => spec.unset_deposit_requests_start_index.to_string().into(), diff --git a/consensus/types/src/consts.rs b/consensus/types/src/core/consts.rs similarity index 94% rename from consensus/types/src/consts.rs rename to consensus/types/src/core/consts.rs index c20d5fe8f33..b6d63c47a88 100644 --- a/consensus/types/src/consts.rs +++ b/consensus/types/src/core/consts.rs @@ -23,5 +23,5 @@ pub mod bellatrix { pub const INTERVALS_PER_SLOT: u64 = 3; } pub mod deneb { - pub use crate::VERSIONED_HASH_VERSION_KZG; + pub use kzg::VERSIONED_HASH_VERSION_KZG; } diff --git a/consensus/types/src/enr_fork_id.rs b/consensus/types/src/core/enr_fork_id.rs similarity index 95% rename from consensus/types/src/enr_fork_id.rs rename to consensus/types/src/core/enr_fork_id.rs index e22672aeb60..c3b400cd136 100644 --- a/consensus/types/src/enr_fork_id.rs +++ b/consensus/types/src/core/enr_fork_id.rs @@ -1,11 +1,10 @@ -use crate::Epoch; -use crate::test_utils::TestRandom; - use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{core::Epoch, test_utils::TestRandom}; + /// Specifies a fork which allows nodes to identify each other on the network. This fork is used in /// a nodes local ENR. /// diff --git a/consensus/types/src/eth_spec.rs b/consensus/types/src/core/eth_spec.rs similarity index 98% rename from consensus/types/src/eth_spec.rs rename to consensus/types/src/core/eth_spec.rs index 47d32ad9e4d..11857e678c0 100644 --- a/consensus/types/src/eth_spec.rs +++ b/consensus/types/src/core/eth_spec.rs @@ -1,16 +1,22 @@ -use crate::*; +use std::{ + fmt::{self, Debug}, + str::FromStr, +}; use safe_arith::SafeArith; use serde::{Deserialize, Serialize}; use ssz_types::typenum::{ U0, U1, U2, U4, U8, U16, U17, U32, U64, U128, U256, U512, U625, U1024, U2048, U4096, U8192, U65536, U131072, U262144, U1048576, U16777216, U33554432, U134217728, U1073741824, - U1099511627776, UInt, bit::B0, + U1099511627776, UInt, Unsigned, bit::B0, +}; + +use crate::{ + core::{ChainSpec, Epoch}, + state::BeaconStateError, }; -use std::fmt::{self, Debug}; -use std::str::FromStr; -pub type U5000 = UInt, B0>, B0>; // 625 * 8 = 5000 +type U5000 = UInt, B0>, B0>; // 625 * 8 = 5000 const MAINNET: &str = "mainnet"; const MINIMAL: &str = "minimal"; @@ -182,7 +188,7 @@ pub trait EthSpec: 'static + Default + Sync + Send + Clone + Debug + PartialEq + fn get_committee_count_per_slot( active_validator_count: usize, spec: &ChainSpec, - ) -> Result { + ) -> Result { Self::get_committee_count_per_slot_with( active_validator_count, spec.max_committees_per_slot, @@ -194,7 +200,7 @@ pub trait EthSpec: 'static + Default + Sync + Send + Clone + Debug + PartialEq + active_validator_count: usize, max_committees_per_slot: usize, target_committee_size: usize, - ) -> Result { + ) -> Result { let slots_per_epoch = Self::SlotsPerEpoch::to_usize(); Ok(std::cmp::max( diff --git a/consensus/types/src/graffiti.rs b/consensus/types/src/core/graffiti.rs similarity index 98% rename from consensus/types/src/graffiti.rs rename to consensus/types/src/core/graffiti.rs index 31cc4187a67..d0e0e1b1a89 100644 --- a/consensus/types/src/graffiti.rs +++ b/consensus/types/src/core/graffiti.rs @@ -1,14 +1,13 @@ -use crate::{ - Hash256, - test_utils::{RngCore, TestRandom}, -}; +use std::{fmt, str::FromStr}; + +use rand::RngCore; use regex::bytes::Regex; use serde::{Deserialize, Deserializer, Serialize, Serializer, de::Error}; use ssz::{Decode, DecodeError, Encode}; -use std::fmt; -use std::str::FromStr; use tree_hash::{PackedEncoding, TreeHash}; +use crate::{core::Hash256, test_utils::TestRandom}; + pub const GRAFFITI_BYTES_LEN: usize = 32; /// The 32-byte `graffiti` field on a beacon block. diff --git a/consensus/types/src/core/mod.rs b/consensus/types/src/core/mod.rs new file mode 100644 index 00000000000..bb50bb18568 --- /dev/null +++ b/consensus/types/src/core/mod.rs @@ -0,0 +1,44 @@ +pub mod consts; + +mod application_domain; +mod chain_spec; +mod config_and_preset; +mod enr_fork_id; +mod eth_spec; +mod graffiti; +mod non_zero_usize; +mod preset; +mod relative_epoch; +mod signing_data; +mod slot_data; +#[macro_use] +mod slot_epoch_macros; +mod slot_epoch; +#[cfg(feature = "sqlite")] +mod sqlite; + +pub use application_domain::{APPLICATION_DOMAIN_BUILDER, ApplicationDomain}; +pub use chain_spec::{BlobParameters, BlobSchedule, ChainSpec, Config, Domain}; +pub use config_and_preset::{ + ConfigAndPreset, ConfigAndPresetDeneb, ConfigAndPresetElectra, ConfigAndPresetFulu, + ConfigAndPresetGloas, get_extra_fields, +}; +pub use enr_fork_id::EnrForkId; +pub use eth_spec::{EthSpec, EthSpecId, GNOSIS, GnosisEthSpec, MainnetEthSpec, MinimalEthSpec}; +pub use graffiti::{GRAFFITI_BYTES_LEN, Graffiti, GraffitiString}; +pub use non_zero_usize::new_non_zero_usize; +pub use preset::{ + AltairPreset, BasePreset, BellatrixPreset, CapellaPreset, DenebPreset, ElectraPreset, + FuluPreset, GloasPreset, +}; +pub use relative_epoch::{Error as RelativeEpochError, RelativeEpoch}; +pub use signing_data::{SignedRoot, SigningData}; +pub use slot_data::SlotData; +pub use slot_epoch::{Epoch, Slot}; + +pub type Hash256 = alloy_primitives::B256; +pub type Uint256 = alloy_primitives::U256; +pub type Hash64 = alloy_primitives::B64; +pub type Address = alloy_primitives::Address; +pub type VersionedHash = Hash256; +pub type MerkleProof = Vec; diff --git a/consensus/types/src/non_zero_usize.rs b/consensus/types/src/core/non_zero_usize.rs similarity index 100% rename from consensus/types/src/non_zero_usize.rs rename to consensus/types/src/core/non_zero_usize.rs diff --git a/consensus/types/src/preset.rs b/consensus/types/src/core/preset.rs similarity index 99% rename from consensus/types/src/preset.rs rename to consensus/types/src/core/preset.rs index ab54c0345f7..b436fafd3a0 100644 --- a/consensus/types/src/preset.rs +++ b/consensus/types/src/core/preset.rs @@ -1,5 +1,7 @@ -use crate::{ChainSpec, Epoch, EthSpec, Unsigned}; use serde::{Deserialize, Serialize}; +use ssz_types::typenum::Unsigned; + +use crate::core::{ChainSpec, Epoch, EthSpec}; /// Value-level representation of an Ethereum consensus "preset". /// diff --git a/consensus/types/src/relative_epoch.rs b/consensus/types/src/core/relative_epoch.rs similarity index 99% rename from consensus/types/src/relative_epoch.rs rename to consensus/types/src/core/relative_epoch.rs index 2fa0ae41bda..d1ee7ecc7c6 100644 --- a/consensus/types/src/relative_epoch.rs +++ b/consensus/types/src/core/relative_epoch.rs @@ -1,6 +1,7 @@ -use crate::*; use safe_arith::{ArithError, SafeArith}; +use crate::core::{Epoch, Slot}; + #[derive(Debug, PartialEq, Clone, Copy)] pub enum Error { EpochTooLow { base: Epoch, other: Epoch }, diff --git a/consensus/types/src/signing_data.rs b/consensus/types/src/core/signing_data.rs similarity index 85% rename from consensus/types/src/signing_data.rs rename to consensus/types/src/core/signing_data.rs index 69b7dabfe5a..907f03fac7b 100644 --- a/consensus/types/src/signing_data.rs +++ b/consensus/types/src/core/signing_data.rs @@ -1,13 +1,12 @@ -use crate::context_deserialize; -use crate::test_utils::TestRandom; -use crate::{ForkName, Hash256}; - +use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; use tree_hash::TreeHash; use tree_hash_derive::TreeHash; +use crate::{core::Hash256, fork::ForkName, test_utils::TestRandom}; + #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] #[derive(Debug, PartialEq, Clone, Serialize, Deserialize, Encode, Decode, TreeHash, TestRandom)] #[context_deserialize(ForkName)] diff --git a/consensus/types/src/slot_data.rs b/consensus/types/src/core/slot_data.rs similarity index 92% rename from consensus/types/src/slot_data.rs rename to consensus/types/src/core/slot_data.rs index 19775913b98..f0bd01814f2 100644 --- a/consensus/types/src/slot_data.rs +++ b/consensus/types/src/core/slot_data.rs @@ -1,4 +1,4 @@ -use crate::Slot; +use crate::core::Slot; /// A trait providing a `Slot` getter for messages that are related to a single slot. Useful in /// making parts of attestation and sync committee processing generic. diff --git a/consensus/types/src/slot_epoch.rs b/consensus/types/src/core/slot_epoch.rs similarity index 98% rename from consensus/types/src/slot_epoch.rs rename to consensus/types/src/core/slot_epoch.rs index 05af9c5232d..97457701b11 100644 --- a/consensus/types/src/slot_epoch.rs +++ b/consensus/types/src/core/slot_epoch.rs @@ -10,15 +10,17 @@ //! implement `Into`, however this would allow operations between `Slots` and `Epochs` which //! may lead to programming errors which are not detected by the compiler. -use crate::test_utils::TestRandom; -use crate::{ChainSpec, SignedRoot}; +use std::{fmt, hash::Hash}; use rand::RngCore; use safe_arith::{ArithError, SafeArith}; use serde::{Deserialize, Serialize}; use ssz::{Decode, DecodeError, Encode}; -use std::fmt; -use std::hash::Hash; + +use crate::{ + core::{ChainSpec, SignedRoot}, + test_utils::TestRandom, +}; #[cfg(feature = "legacy-arith")] use std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Rem, Sub, SubAssign}; diff --git a/consensus/types/src/slot_epoch_macros.rs b/consensus/types/src/core/slot_epoch_macros.rs similarity index 100% rename from consensus/types/src/slot_epoch_macros.rs rename to consensus/types/src/core/slot_epoch_macros.rs diff --git a/consensus/types/src/sqlite.rs b/consensus/types/src/core/sqlite.rs similarity index 96% rename from consensus/types/src/sqlite.rs rename to consensus/types/src/core/sqlite.rs index b6318dc4ce5..de892b4e98f 100644 --- a/consensus/types/src/sqlite.rs +++ b/consensus/types/src/core/sqlite.rs @@ -1,10 +1,11 @@ //! Implementations of SQLite compatibility traits. -use crate::{Epoch, Slot}; use rusqlite::{ Error, types::{FromSql, FromSqlError, ToSql, ToSqlOutput, ValueRef}, }; +use crate::core::{Epoch, Slot}; + macro_rules! impl_to_from_sql { ($type:ty) => { impl ToSql for $type { diff --git a/consensus/types/src/blob_sidecar.rs b/consensus/types/src/data/blob_sidecar.rs similarity index 94% rename from consensus/types/src/blob_sidecar.rs rename to consensus/types/src/data/blob_sidecar.rs index d2c7331a579..709e556933b 100644 --- a/consensus/types/src/blob_sidecar.rs +++ b/consensus/types/src/data/blob_sidecar.rs @@ -1,12 +1,7 @@ -use crate::context_deserialize; -use crate::test_utils::TestRandom; -use crate::{ - AbstractExecPayload, BeaconBlockHeader, BeaconStateError, Blob, ChainSpec, Epoch, EthSpec, - FixedVector, ForkName, Hash256, KzgProofs, RuntimeFixedVector, RuntimeVariableList, - SignedBeaconBlock, SignedBeaconBlockHeader, Slot, VariableList, - beacon_block_body::BLOB_KZG_COMMITMENTS_INDEX, -}; +use std::{fmt::Debug, hash::Hash, sync::Arc}; + use bls::Signature; +use context_deserialize::context_deserialize; use educe::Educe; use kzg::{BYTES_PER_BLOB, BYTES_PER_FIELD_ELEMENT, Blob as KzgBlob, Kzg, KzgCommitment, KzgProof}; use merkle_proof::{MerkleTreeError, merkle_root_from_branch, verify_merkle_proof}; @@ -15,13 +10,24 @@ use safe_arith::ArithError; use serde::{Deserialize, Serialize}; use ssz::Encode; use ssz_derive::{Decode, Encode}; -use std::fmt::Debug; -use std::hash::Hash; -use std::sync::Arc; +use ssz_types::{FixedVector, RuntimeFixedVector, RuntimeVariableList, VariableList}; use test_random_derive::TestRandom; use tree_hash::TreeHash; use tree_hash_derive::TreeHash; +use crate::{ + block::{ + BLOB_KZG_COMMITMENTS_INDEX, BeaconBlockHeader, SignedBeaconBlock, SignedBeaconBlockHeader, + }, + core::{ChainSpec, Epoch, EthSpec, Hash256, Slot}, + data::Blob, + execution::AbstractExecPayload, + fork::ForkName, + kzg_ext::KzgProofs, + state::BeaconStateError, + test_utils::TestRandom, +}; + /// Container of the data that identifies an individual blob. #[derive( Serialize, Deserialize, Encode, Decode, TreeHash, Copy, Clone, Debug, PartialEq, Eq, Hash, diff --git a/consensus/types/src/data_column_custody_group.rs b/consensus/types/src/data/data_column_custody_group.rs similarity index 98% rename from consensus/types/src/data_column_custody_group.rs rename to consensus/types/src/data/data_column_custody_group.rs index 7ecabab0abc..d96d13cfff6 100644 --- a/consensus/types/src/data_column_custody_group.rs +++ b/consensus/types/src/data/data_column_custody_group.rs @@ -1,8 +1,14 @@ -use crate::{ChainSpec, ColumnIndex, DataColumnSubnetId, EthSpec}; +use std::collections::HashSet; + use alloy_primitives::U256; use itertools::Itertools; use safe_arith::{ArithError, SafeArith}; -use std::collections::HashSet; + +use crate::{ + EthSpec, + core::ChainSpec, + data::{ColumnIndex, DataColumnSubnetId}, +}; pub type CustodyIndex = u64; diff --git a/consensus/types/src/data_column_sidecar.rs b/consensus/types/src/data/data_column_sidecar.rs similarity index 94% rename from consensus/types/src/data_column_sidecar.rs rename to consensus/types/src/data/data_column_sidecar.rs index 62ce4467dfa..71d821f83ef 100644 --- a/consensus/types/src/data_column_sidecar.rs +++ b/consensus/types/src/data/data_column_sidecar.rs @@ -1,13 +1,8 @@ -use crate::beacon_block_body::{BLOB_KZG_COMMITMENTS_INDEX, KzgCommitments}; -use crate::context_deserialize; -use crate::test_utils::TestRandom; -use crate::{ - BeaconBlockHeader, BeaconStateError, Epoch, EthSpec, ForkName, Hash256, - SignedBeaconBlockHeader, Slot, -}; +use std::sync::Arc; + use bls::Signature; +use context_deserialize::context_deserialize; use educe::Educe; -use kzg::Error as KzgError; use kzg::{KzgCommitment, KzgProof}; use merkle_proof::verify_merkle_proof; use safe_arith::ArithError; @@ -16,11 +11,19 @@ use ssz::Encode; use ssz_derive::{Decode, Encode}; use ssz_types::Error as SszError; use ssz_types::{FixedVector, VariableList}; -use std::sync::Arc; use test_random_derive::TestRandom; use tree_hash::TreeHash; use tree_hash_derive::TreeHash; +use crate::{ + block::{BLOB_KZG_COMMITMENTS_INDEX, BeaconBlockHeader, SignedBeaconBlockHeader}, + core::{Epoch, EthSpec, Hash256, Slot}, + fork::ForkName, + kzg_ext::{KzgCommitments, KzgError}, + state::BeaconStateError, + test_utils::TestRandom, +}; + pub type ColumnIndex = u64; pub type Cell = FixedVector::BytesPerCell>; pub type DataColumn = VariableList, ::MaxBlobCommitmentsPerBlock>; diff --git a/consensus/types/src/data_column_subnet_id.rs b/consensus/types/src/data/data_column_subnet_id.rs similarity index 80% rename from consensus/types/src/data_column_subnet_id.rs rename to consensus/types/src/data/data_column_subnet_id.rs index c6b8846c783..c30ebbba20e 100644 --- a/consensus/types/src/data_column_subnet_id.rs +++ b/consensus/types/src/data/data_column_subnet_id.rs @@ -1,10 +1,13 @@ //! Identifies each data column subnet by an integer identifier. -use crate::ChainSpec; -use crate::data_column_sidecar::ColumnIndex; -use safe_arith::{ArithError, SafeArith}; +use std::{ + fmt::{self, Display}, + ops::{Deref, DerefMut}, +}; + +use safe_arith::SafeArith; use serde::{Deserialize, Serialize}; -use std::fmt::{self, Display}; -use std::ops::{Deref, DerefMut}; + +use crate::{core::ChainSpec, data::ColumnIndex}; #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] #[derive(Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] @@ -69,15 +72,3 @@ impl From<&DataColumnSubnetId> for u64 { val.0 } } - -#[derive(Debug)] -pub enum Error { - ArithError(ArithError), - InvalidCustodySubnetCount(u64), -} - -impl From for Error { - fn from(e: ArithError) -> Self { - Error::ArithError(e) - } -} diff --git a/consensus/types/src/data/mod.rs b/consensus/types/src/data/mod.rs new file mode 100644 index 00000000000..10d062bada9 --- /dev/null +++ b/consensus/types/src/data/mod.rs @@ -0,0 +1,23 @@ +mod blob_sidecar; +mod data_column_custody_group; +mod data_column_sidecar; +mod data_column_subnet_id; + +pub use blob_sidecar::{ + BlobIdentifier, BlobSidecar, BlobSidecarError, BlobSidecarList, BlobsList, FixedBlobSidecarList, +}; +pub use data_column_custody_group::{ + CustodyIndex, DataColumnCustodyGroupError, compute_columns_for_custody_group, + compute_ordered_custody_column_indices, compute_subnets_for_node, + compute_subnets_from_custody_group, get_custody_groups, +}; +pub use data_column_sidecar::{ + Cell, ColumnIndex, DataColumn, DataColumnSidecar, DataColumnSidecarError, + DataColumnSidecarList, DataColumnsByRootIdentifier, +}; +pub use data_column_subnet_id::DataColumnSubnetId; + +use crate::core::EthSpec; +use ssz_types::FixedVector; + +pub type Blob = FixedVector::BytesPerBlob>; diff --git a/consensus/types/src/deposit.rs b/consensus/types/src/deposit/deposit.rs similarity index 78% rename from consensus/types/src/deposit.rs rename to consensus/types/src/deposit/deposit.rs index 724f3de2f07..67f8572defb 100644 --- a/consensus/types/src/deposit.rs +++ b/consensus/types/src/deposit/deposit.rs @@ -1,12 +1,12 @@ -use crate::context_deserialize; -use crate::test_utils::TestRandom; -use crate::*; +use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; -use ssz_types::typenum::U33; +use ssz_types::{FixedVector, typenum::U33}; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{core::Hash256, deposit::DepositData, fork::ForkName, test_utils::TestRandom}; + pub const DEPOSIT_TREE_DEPTH: usize = 32; /// A deposit to potentially become a beacon chain validator. diff --git a/consensus/types/src/deposit_data.rs b/consensus/types/src/deposit/deposit_data.rs similarity index 86% rename from consensus/types/src/deposit_data.rs rename to consensus/types/src/deposit/deposit_data.rs index 3d9ae128088..51697f5d1a2 100644 --- a/consensus/types/src/deposit_data.rs +++ b/consensus/types/src/deposit/deposit_data.rs @@ -1,10 +1,17 @@ -use crate::test_utils::TestRandom; -use crate::*; +use bls::{PublicKeyBytes, SecretKey, SignatureBytes}; +use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{ + core::{ChainSpec, Hash256, SignedRoot}, + deposit::DepositMessage, + fork::ForkName, + test_utils::TestRandom, +}; + /// The data supplied by the user to the deposit contract. /// /// Spec v0.12.1 diff --git a/consensus/types/src/deposit_message.rs b/consensus/types/src/deposit/deposit_message.rs similarity index 81% rename from consensus/types/src/deposit_message.rs rename to consensus/types/src/deposit/deposit_message.rs index 9fe3b878858..4495a5c0236 100644 --- a/consensus/types/src/deposit_message.rs +++ b/consensus/types/src/deposit/deposit_message.rs @@ -1,11 +1,16 @@ -use crate::test_utils::TestRandom; -use crate::*; - +use bls::PublicKeyBytes; +use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{ + core::{Hash256, SignedRoot}, + fork::ForkName, + test_utils::TestRandom, +}; + /// The data supplied by the user to the deposit contract. /// /// Spec v0.12.1 diff --git a/consensus/types/src/deposit_request.rs b/consensus/types/src/deposit/deposit_request.rs similarity index 86% rename from consensus/types/src/deposit_request.rs rename to consensus/types/src/deposit/deposit_request.rs index 16acfb3b443..8d3c6e88bae 100644 --- a/consensus/types/src/deposit_request.rs +++ b/consensus/types/src/deposit/deposit_request.rs @@ -1,13 +1,13 @@ -use crate::context_deserialize; -use crate::test_utils::TestRandom; -use crate::{ForkName, Hash256, PublicKeyBytes}; -use bls::SignatureBytes; +use bls::{PublicKeyBytes, SignatureBytes}; +use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz::Encode; use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{core::Hash256, fork::ForkName, test_utils::TestRandom}; + #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] #[derive( Debug, PartialEq, Hash, Clone, Serialize, Deserialize, Encode, Decode, TreeHash, TestRandom, diff --git a/consensus/types/src/deposit_tree_snapshot.rs b/consensus/types/src/deposit/deposit_tree_snapshot.rs similarity index 95% rename from consensus/types/src/deposit_tree_snapshot.rs rename to consensus/types/src/deposit/deposit_tree_snapshot.rs index 400fca217da..24f41397a0a 100644 --- a/consensus/types/src/deposit_tree_snapshot.rs +++ b/consensus/types/src/deposit/deposit_tree_snapshot.rs @@ -1,10 +1,11 @@ -use crate::*; use ethereum_hashing::{ZERO_HASHES, hash32_concat}; +use fixed_bytes::FixedBytesExtended; use int_to_bytes::int_to_bytes32; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; -use test_utils::TestRandom; + +use crate::{core::Hash256, deposit::DEPOSIT_TREE_DEPTH, test_utils::TestRandom}; #[derive(Encode, Decode, Deserialize, Serialize, Clone, Debug, PartialEq, TestRandom)] pub struct FinalizedExecutionBlock { diff --git a/consensus/types/src/deposit/mod.rs b/consensus/types/src/deposit/mod.rs new file mode 100644 index 00000000000..ff80f65cdb3 --- /dev/null +++ b/consensus/types/src/deposit/mod.rs @@ -0,0 +1,13 @@ +mod deposit; +mod deposit_data; +mod deposit_message; +mod deposit_request; +mod deposit_tree_snapshot; +mod pending_deposit; + +pub use deposit::{DEPOSIT_TREE_DEPTH, Deposit}; +pub use deposit_data::DepositData; +pub use deposit_message::DepositMessage; +pub use deposit_request::DepositRequest; +pub use deposit_tree_snapshot::{DepositTreeSnapshot, FinalizedExecutionBlock}; +pub use pending_deposit::PendingDeposit; diff --git a/consensus/types/src/pending_deposit.rs b/consensus/types/src/deposit/pending_deposit.rs similarity index 78% rename from consensus/types/src/pending_deposit.rs rename to consensus/types/src/deposit/pending_deposit.rs index 4a921edd549..4c039af39cd 100644 --- a/consensus/types/src/pending_deposit.rs +++ b/consensus/types/src/deposit/pending_deposit.rs @@ -1,10 +1,16 @@ -use crate::test_utils::TestRandom; -use crate::*; +use bls::{PublicKeyBytes, SignatureBytes}; +use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{ + core::{Hash256, Slot}, + fork::ForkName, + test_utils::TestRandom, +}; + #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] #[derive( Debug, PartialEq, Hash, Clone, Serialize, Deserialize, Encode, Decode, TreeHash, TestRandom, diff --git a/consensus/types/src/bls_to_execution_change.rs b/consensus/types/src/execution/bls_to_execution_change.rs similarity index 83% rename from consensus/types/src/bls_to_execution_change.rs rename to consensus/types/src/execution/bls_to_execution_change.rs index 72d737ac714..de14f1b4c5d 100644 --- a/consensus/types/src/bls_to_execution_change.rs +++ b/consensus/types/src/execution/bls_to_execution_change.rs @@ -1,10 +1,17 @@ -use crate::test_utils::TestRandom; -use crate::*; +use bls::{PublicKeyBytes, SecretKey}; +use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{ + core::{Address, ChainSpec, Domain, Hash256, SignedRoot}, + execution::SignedBlsToExecutionChange, + fork::ForkName, + test_utils::TestRandom, +}; + #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] #[derive( Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize, Encode, Decode, TreeHash, TestRandom, diff --git a/consensus/types/src/eth1_data.rs b/consensus/types/src/execution/eth1_data.rs similarity index 86% rename from consensus/types/src/eth1_data.rs rename to consensus/types/src/execution/eth1_data.rs index 800f3e25f94..89a4e634a66 100644 --- a/consensus/types/src/eth1_data.rs +++ b/consensus/types/src/execution/eth1_data.rs @@ -1,12 +1,11 @@ -use super::Hash256; -use crate::ForkName; -use crate::context_deserialize; -use crate::test_utils::TestRandom; +use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{core::Hash256, fork::ForkName, test_utils::TestRandom}; + /// Contains data obtained from the Eth1 chain. /// /// Spec v0.12.1 diff --git a/consensus/types/src/execution_block_hash.rs b/consensus/types/src/execution/execution_block_hash.rs similarity index 96% rename from consensus/types/src/execution_block_hash.rs rename to consensus/types/src/execution/execution_block_hash.rs index 31905d64dfa..91c019ce040 100644 --- a/consensus/types/src/execution_block_hash.rs +++ b/consensus/types/src/execution/execution_block_hash.rs @@ -1,10 +1,11 @@ -use crate::FixedBytesExtended; -use crate::Hash256; -use crate::test_utils::TestRandom; +use std::fmt; + +use fixed_bytes::FixedBytesExtended; use rand::RngCore; use serde::{Deserialize, Serialize}; use ssz::{Decode, DecodeError, Encode}; -use std::fmt; + +use crate::{core::Hash256, test_utils::TestRandom}; #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] #[derive(Default, Clone, Copy, Serialize, Deserialize, Eq, PartialEq, Hash)] diff --git a/consensus/types/src/execution_block_header.rs b/consensus/types/src/execution/execution_block_header.rs similarity index 98% rename from consensus/types/src/execution_block_header.rs rename to consensus/types/src/execution/execution_block_header.rs index 02152adbf73..e596ba1831d 100644 --- a/consensus/types/src/execution_block_header.rs +++ b/consensus/types/src/execution/execution_block_header.rs @@ -17,10 +17,15 @@ // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. -use crate::{Address, EthSpec, ExecutionPayloadRef, Hash64, Hash256, Uint256}; use alloy_rlp::RlpEncodable; +use fixed_bytes::Uint256; use metastruct::metastruct; +use crate::{ + core::{Address, EthSpec, Hash64, Hash256}, + execution::ExecutionPayloadRef, +}; + /// Execution block header as used for RLP encoding and Keccak hashing. /// /// Credit to Reth for the type definition. diff --git a/consensus/types/src/execution_payload.rs b/consensus/types/src/execution/execution_payload.rs similarity index 92% rename from consensus/types/src/execution_payload.rs rename to consensus/types/src/execution/execution_payload.rs index 3548f67db2e..7973b7efdce 100644 --- a/consensus/types/src/execution_payload.rs +++ b/consensus/types/src/execution/execution_payload.rs @@ -1,19 +1,29 @@ -use crate::{test_utils::TestRandom, *}; +use context_deserialize::{ContextDeserialize, context_deserialize}; use educe::Educe; +use fixed_bytes::Uint256; use serde::{Deserialize, Deserializer, Serialize}; use ssz::{Decode, Encode}; use ssz_derive::{Decode, Encode}; +use ssz_types::{FixedVector, VariableList}; +use superstruct::superstruct; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{ + core::{Address, EthSpec, Hash256}, + execution::ExecutionBlockHash, + fork::{ForkName, ForkVersionDecode}, + state::BeaconStateError, + test_utils::TestRandom, + withdrawal::Withdrawals, +}; + pub type Transaction = VariableList; pub type Transactions = VariableList< Transaction<::MaxBytesPerTransaction>, ::MaxTransactionsPerPayload, >; -pub type Withdrawals = VariableList::MaxWithdrawalsPerPayload>; - #[superstruct( variants(Bellatrix, Capella, Deneb, Electra, Fulu, Gloas), variant_attributes( @@ -38,8 +48,14 @@ pub type Withdrawals = VariableList::MaxWithdrawal arbitrary(bound = "E: EthSpec"), ), ), - cast_error(ty = "Error", expr = "BeaconStateError::IncorrectStateVariant"), - partial_getter_error(ty = "Error", expr = "BeaconStateError::IncorrectStateVariant"), + cast_error( + ty = "BeaconStateError", + expr = "BeaconStateError::IncorrectStateVariant" + ), + partial_getter_error( + ty = "BeaconStateError", + expr = "BeaconStateError::IncorrectStateVariant" + ), map_into(FullPayload, BlindedPayload), map_ref_into(ExecutionPayloadHeader) )] diff --git a/consensus/types/src/execution_payload_header.rs b/consensus/types/src/execution/execution_payload_header.rs similarity index 96% rename from consensus/types/src/execution_payload_header.rs rename to consensus/types/src/execution/execution_payload_header.rs index 241ecb4ce6e..bd91a6471b2 100644 --- a/consensus/types/src/execution_payload_header.rs +++ b/consensus/types/src/execution/execution_payload_header.rs @@ -1,12 +1,27 @@ -use crate::{test_utils::TestRandom, *}; +use context_deserialize::{ContextDeserialize, context_deserialize}; use educe::Educe; +use fixed_bytes::FixedBytesExtended; use serde::{Deserialize, Deserializer, Serialize}; use ssz::{Decode, Encode}; use ssz_derive::{Decode, Encode}; +use ssz_types::{FixedVector, VariableList}; +use superstruct::superstruct; use test_random_derive::TestRandom; use tree_hash::TreeHash; use tree_hash_derive::TreeHash; +use crate::{ + core::{Address, EthSpec, Hash256, Uint256}, + execution::{ + ExecutionBlockHash, ExecutionPayloadBellatrix, ExecutionPayloadCapella, + ExecutionPayloadDeneb, ExecutionPayloadElectra, ExecutionPayloadFulu, + ExecutionPayloadGloas, ExecutionPayloadRef, Transactions, + }, + fork::ForkName, + state::BeaconStateError, + test_utils::TestRandom, +}; + #[superstruct( variants(Bellatrix, Capella, Deneb, Electra, Fulu, Gloas), variant_attributes( @@ -35,8 +50,14 @@ use tree_hash_derive::TreeHash; derive(PartialEq, TreeHash, Debug), tree_hash(enum_behaviour = "transparent") ), - cast_error(ty = "Error", expr = "BeaconStateError::IncorrectStateVariant"), - partial_getter_error(ty = "Error", expr = "BeaconStateError::IncorrectStateVariant"), + cast_error( + ty = "BeaconStateError", + expr = "BeaconStateError::IncorrectStateVariant" + ), + partial_getter_error( + ty = "BeaconStateError", + expr = "BeaconStateError::IncorrectStateVariant" + ), map_ref_into(ExecutionPayloadHeader) )] #[cfg_attr( diff --git a/consensus/types/src/execution_requests.rs b/consensus/types/src/execution/execution_requests.rs similarity index 93% rename from consensus/types/src/execution_requests.rs rename to consensus/types/src/execution/execution_requests.rs index 67396af71d4..92d717778e3 100644 --- a/consensus/types/src/execution_requests.rs +++ b/consensus/types/src/execution/execution_requests.rs @@ -1,7 +1,5 @@ -use crate::context_deserialize; -use crate::test_utils::TestRandom; -use crate::{ConsolidationRequest, DepositRequest, EthSpec, ForkName, Hash256, WithdrawalRequest}; use alloy_primitives::Bytes; +use context_deserialize::context_deserialize; use educe::Educe; use ethereum_hashing::{DynamicContext, Sha256Context}; use serde::{Deserialize, Serialize}; @@ -11,6 +9,15 @@ use ssz_types::VariableList; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{ + consolidation::ConsolidationRequest, + core::{EthSpec, Hash256}, + deposit::DepositRequest, + fork::ForkName, + test_utils::TestRandom, + withdrawal::WithdrawalRequest, +}; + pub type DepositRequests = VariableList::MaxDepositRequestsPerPayload>; pub type WithdrawalRequests = diff --git a/consensus/types/src/execution/mod.rs b/consensus/types/src/execution/mod.rs new file mode 100644 index 00000000000..0708bc5d960 --- /dev/null +++ b/consensus/types/src/execution/mod.rs @@ -0,0 +1,36 @@ +mod eth1_data; +mod execution_block_hash; +mod execution_block_header; +#[macro_use] +mod execution_payload; +mod bls_to_execution_change; +mod execution_payload_header; +mod execution_requests; +mod payload; +mod signed_bls_to_execution_change; + +pub use bls_to_execution_change::BlsToExecutionChange; +pub use eth1_data::Eth1Data; +pub use execution_block_hash::ExecutionBlockHash; +pub use execution_block_header::{EncodableExecutionBlockHeader, ExecutionBlockHeader}; +pub use execution_payload::{ + ExecutionPayload, ExecutionPayloadBellatrix, ExecutionPayloadCapella, ExecutionPayloadDeneb, + ExecutionPayloadElectra, ExecutionPayloadFulu, ExecutionPayloadGloas, ExecutionPayloadRef, + Transaction, Transactions, +}; +pub use execution_payload_header::{ + ExecutionPayloadHeader, ExecutionPayloadHeaderBellatrix, ExecutionPayloadHeaderCapella, + ExecutionPayloadHeaderDeneb, ExecutionPayloadHeaderElectra, ExecutionPayloadHeaderFulu, + ExecutionPayloadHeaderGloas, ExecutionPayloadHeaderRef, ExecutionPayloadHeaderRefMut, +}; +pub use execution_requests::{ + ConsolidationRequests, DepositRequests, ExecutionRequests, RequestType, WithdrawalRequests, +}; +pub use payload::{ + AbstractExecPayload, BlindedPayload, BlindedPayloadBellatrix, BlindedPayloadCapella, + BlindedPayloadDeneb, BlindedPayloadElectra, BlindedPayloadFulu, BlindedPayloadGloas, + BlindedPayloadRef, BlockProductionVersion, BlockType, ExecPayload, FullPayload, + FullPayloadBellatrix, FullPayloadCapella, FullPayloadDeneb, FullPayloadElectra, + FullPayloadFulu, FullPayloadGloas, FullPayloadRef, OwnedExecPayload, +}; +pub use signed_bls_to_execution_change::SignedBlsToExecutionChange; diff --git a/consensus/types/src/payload.rs b/consensus/types/src/execution/payload.rs similarity index 91% rename from consensus/types/src/payload.rs rename to consensus/types/src/execution/payload.rs index 370c73ad0a6..c1cc6c4eb66 100644 --- a/consensus/types/src/payload.rs +++ b/consensus/types/src/execution/payload.rs @@ -1,16 +1,29 @@ -use crate::{test_utils::TestRandom, *}; use educe::Educe; use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; use ssz::{Decode, Encode}; use ssz_derive::{Decode, Encode}; -use std::borrow::Cow; -use std::fmt::Debug; -use std::hash::Hash; +use ssz_types::VariableList; +use std::{borrow::Cow, fmt::Debug, hash::Hash}; +use superstruct::superstruct; use test_random_derive::TestRandom; use tree_hash::TreeHash; use tree_hash_derive::TreeHash; +use crate::{ + core::{Address, EthSpec, Hash256}, + execution::{ + ExecutionBlockHash, ExecutionPayload, ExecutionPayloadBellatrix, ExecutionPayloadCapella, + ExecutionPayloadDeneb, ExecutionPayloadElectra, ExecutionPayloadFulu, + ExecutionPayloadGloas, ExecutionPayloadHeader, ExecutionPayloadHeaderBellatrix, + ExecutionPayloadHeaderCapella, ExecutionPayloadHeaderDeneb, ExecutionPayloadHeaderElectra, + ExecutionPayloadHeaderFulu, ExecutionPayloadHeaderGloas, ExecutionPayloadRef, Transactions, + }, + fork::ForkName, + state::BeaconStateError, + test_utils::TestRandom, +}; + #[derive(Debug, PartialEq)] pub enum BlockType { Blinded, @@ -38,8 +51,8 @@ pub trait ExecPayload: Debug + Clone + PartialEq + Hash + TreeHash + fn gas_limit(&self) -> u64; fn transactions(&self) -> Option<&Transactions>; /// fork-specific fields - fn withdrawals_root(&self) -> Result; - fn blob_gas_used(&self) -> Result; + fn withdrawals_root(&self) -> Result; + fn blob_gas_used(&self) -> Result; /// Is this a default payload with 0x0 roots for transactions and withdrawals? fn is_default_with_zero_roots(&self) -> bool; @@ -179,8 +192,14 @@ pub trait AbstractExecPayload: ), map_into(ExecutionPayload), map_ref_into(ExecutionPayloadRef), - cast_error(ty = "Error", expr = "BeaconStateError::IncorrectStateVariant"), - partial_getter_error(ty = "Error", expr = "BeaconStateError::IncorrectStateVariant") + cast_error( + ty = "BeaconStateError", + expr = "BeaconStateError::IncorrectStateVariant" + ), + partial_getter_error( + ty = "BeaconStateError", + expr = "BeaconStateError::IncorrectStateVariant" + ) )] #[cfg_attr( feature = "arbitrary", @@ -311,9 +330,9 @@ impl ExecPayload for FullPayload { }) } - fn withdrawals_root(&self) -> Result { + fn withdrawals_root(&self) -> Result { match self { - FullPayload::Bellatrix(_) => Err(Error::IncorrectStateVariant), + FullPayload::Bellatrix(_) => Err(BeaconStateError::IncorrectStateVariant), FullPayload::Capella(inner) => Ok(inner.execution_payload.withdrawals.tree_hash_root()), FullPayload::Deneb(inner) => Ok(inner.execution_payload.withdrawals.tree_hash_root()), FullPayload::Electra(inner) => Ok(inner.execution_payload.withdrawals.tree_hash_root()), @@ -322,10 +341,10 @@ impl ExecPayload for FullPayload { } } - fn blob_gas_used(&self) -> Result { + fn blob_gas_used(&self) -> Result { match self { FullPayload::Bellatrix(_) | FullPayload::Capella(_) => { - Err(Error::IncorrectStateVariant) + Err(BeaconStateError::IncorrectStateVariant) } FullPayload::Deneb(inner) => Ok(inner.execution_payload.blob_gas_used), FullPayload::Electra(inner) => Ok(inner.execution_payload.blob_gas_used), @@ -354,9 +373,9 @@ impl FullPayload { }) } - pub fn default_at_fork(fork_name: ForkName) -> Result { + pub fn default_at_fork(fork_name: ForkName) -> Result { match fork_name { - ForkName::Base | ForkName::Altair => Err(Error::IncorrectStateVariant), + ForkName::Base | ForkName::Altair => Err(BeaconStateError::IncorrectStateVariant), ForkName::Bellatrix => Ok(FullPayloadBellatrix::default().into()), ForkName::Capella => Ok(FullPayloadCapella::default().into()), ForkName::Deneb => Ok(FullPayloadDeneb::default().into()), @@ -450,9 +469,9 @@ impl ExecPayload for FullPayloadRef<'_, E> { }) } - fn withdrawals_root(&self) -> Result { + fn withdrawals_root(&self) -> Result { match self { - FullPayloadRef::Bellatrix(_) => Err(Error::IncorrectStateVariant), + FullPayloadRef::Bellatrix(_) => Err(BeaconStateError::IncorrectStateVariant), FullPayloadRef::Capella(inner) => { Ok(inner.execution_payload.withdrawals.tree_hash_root()) } @@ -469,10 +488,10 @@ impl ExecPayload for FullPayloadRef<'_, E> { } } - fn blob_gas_used(&self) -> Result { + fn blob_gas_used(&self) -> Result { match self { FullPayloadRef::Bellatrix(_) | FullPayloadRef::Capella(_) => { - Err(Error::IncorrectStateVariant) + Err(BeaconStateError::IncorrectStateVariant) } FullPayloadRef::Deneb(inner) => Ok(inner.execution_payload.blob_gas_used), FullPayloadRef::Electra(inner) => Ok(inner.execution_payload.blob_gas_used), @@ -548,8 +567,14 @@ impl TryFrom> for FullPayload { tree_hash(enum_behaviour = "transparent"), ), map_into(ExecutionPayloadHeader), - cast_error(ty = "Error", expr = "BeaconStateError::IncorrectStateVariant"), - partial_getter_error(ty = "Error", expr = "BeaconStateError::IncorrectStateVariant") + cast_error( + ty = "BeaconStateError", + expr = "BeaconStateError::IncorrectStateVariant" + ), + partial_getter_error( + ty = "BeaconStateError", + expr = "BeaconStateError::IncorrectStateVariant" + ) )] #[cfg_attr( feature = "arbitrary", @@ -658,9 +683,9 @@ impl ExecPayload for BlindedPayload { None } - fn withdrawals_root(&self) -> Result { + fn withdrawals_root(&self) -> Result { match self { - BlindedPayload::Bellatrix(_) => Err(Error::IncorrectStateVariant), + BlindedPayload::Bellatrix(_) => Err(BeaconStateError::IncorrectStateVariant), BlindedPayload::Capella(inner) => Ok(inner.execution_payload_header.withdrawals_root), BlindedPayload::Deneb(inner) => Ok(inner.execution_payload_header.withdrawals_root), BlindedPayload::Electra(inner) => Ok(inner.execution_payload_header.withdrawals_root), @@ -669,10 +694,10 @@ impl ExecPayload for BlindedPayload { } } - fn blob_gas_used(&self) -> Result { + fn blob_gas_used(&self) -> Result { match self { BlindedPayload::Bellatrix(_) | BlindedPayload::Capella(_) => { - Err(Error::IncorrectStateVariant) + Err(BeaconStateError::IncorrectStateVariant) } BlindedPayload::Deneb(inner) => Ok(inner.execution_payload_header.blob_gas_used), BlindedPayload::Electra(inner) => Ok(inner.execution_payload_header.blob_gas_used), @@ -766,9 +791,9 @@ impl<'b, E: EthSpec> ExecPayload for BlindedPayloadRef<'b, E> { None } - fn withdrawals_root(&self) -> Result { + fn withdrawals_root(&self) -> Result { match self { - BlindedPayloadRef::Bellatrix(_) => Err(Error::IncorrectStateVariant), + BlindedPayloadRef::Bellatrix(_) => Err(BeaconStateError::IncorrectStateVariant), BlindedPayloadRef::Capella(inner) => { Ok(inner.execution_payload_header.withdrawals_root) } @@ -781,10 +806,10 @@ impl<'b, E: EthSpec> ExecPayload for BlindedPayloadRef<'b, E> { } } - fn blob_gas_used(&self) -> Result { + fn blob_gas_used(&self) -> Result { match self { BlindedPayloadRef::Bellatrix(_) | BlindedPayloadRef::Capella(_) => { - Err(Error::IncorrectStateVariant) + Err(BeaconStateError::IncorrectStateVariant) } BlindedPayloadRef::Deneb(inner) => Ok(inner.execution_payload_header.blob_gas_used), BlindedPayloadRef::Electra(inner) => Ok(inner.execution_payload_header.blob_gas_used), @@ -877,12 +902,12 @@ macro_rules! impl_exec_payload_common { f(self) } - fn withdrawals_root(&self) -> Result { + fn withdrawals_root(&self) -> Result { let g = $g; g(self) } - fn blob_gas_used(&self) -> Result { + fn blob_gas_used(&self) -> Result { let h = $h; h(self) } @@ -917,15 +942,16 @@ macro_rules! impl_exec_payload_for_fork { }, { |_| { None } }, { - let c: for<'a> fn(&'a $wrapper_type_header) -> Result = - |payload: &$wrapper_type_header| { - let wrapper_ref_type = BlindedPayloadRef::$fork_variant(&payload); - wrapper_ref_type.withdrawals_root() - }; + let c: for<'a> fn( + &'a $wrapper_type_header, + ) -> Result = |payload: &$wrapper_type_header| { + let wrapper_ref_type = BlindedPayloadRef::$fork_variant(&payload); + wrapper_ref_type.withdrawals_root() + }; c }, { - let c: for<'a> fn(&'a $wrapper_type_header) -> Result = + let c: for<'a> fn(&'a $wrapper_type_header) -> Result = |payload: &$wrapper_type_header| { let wrapper_ref_type = BlindedPayloadRef::$fork_variant(&payload); wrapper_ref_type.blob_gas_used() @@ -935,12 +961,12 @@ macro_rules! impl_exec_payload_for_fork { ); impl TryInto<$wrapper_type_header> for BlindedPayload { - type Error = Error; + type Error = BeaconStateError; fn try_into(self) -> Result<$wrapper_type_header, Self::Error> { match self { BlindedPayload::$fork_variant(payload) => Ok(payload), - _ => Err(Error::IncorrectStateVariant), + _ => Err(BeaconStateError::IncorrectStateVariant), } } } @@ -963,13 +989,13 @@ macro_rules! impl_exec_payload_for_fork { } impl TryFrom> for $wrapper_type_header { - type Error = Error; + type Error = BeaconStateError; fn try_from(header: ExecutionPayloadHeader) -> Result { match header { ExecutionPayloadHeader::$fork_variant(execution_payload_header) => { Ok(execution_payload_header.into()) } - _ => Err(Error::PayloadConversionLogicFlaw), + _ => Err(BeaconStateError::PayloadConversionLogicFlaw), } } } @@ -1004,7 +1030,7 @@ macro_rules! impl_exec_payload_for_fork { c }, { - let c: for<'a> fn(&'a $wrapper_type_full) -> Result = + let c: for<'a> fn(&'a $wrapper_type_full) -> Result = |payload: &$wrapper_type_full| { let wrapper_ref_type = FullPayloadRef::$fork_variant(&payload); wrapper_ref_type.withdrawals_root() @@ -1012,7 +1038,7 @@ macro_rules! impl_exec_payload_for_fork { c }, { - let c: for<'a> fn(&'a $wrapper_type_full) -> Result = + let c: for<'a> fn(&'a $wrapper_type_full) -> Result = |payload: &$wrapper_type_full| { let wrapper_ref_type = FullPayloadRef::$fork_variant(&payload); wrapper_ref_type.blob_gas_used() @@ -1039,26 +1065,26 @@ macro_rules! impl_exec_payload_for_fork { } impl TryFrom> for $wrapper_type_full { - type Error = Error; + type Error = BeaconStateError; fn try_from(_: ExecutionPayloadHeader) -> Result { - Err(Error::PayloadConversionLogicFlaw) + Err(BeaconStateError::PayloadConversionLogicFlaw) } } impl TryFrom<$wrapped_type_header> for $wrapper_type_full { - type Error = Error; + type Error = BeaconStateError; fn try_from(_: $wrapped_type_header) -> Result { - Err(Error::PayloadConversionLogicFlaw) + Err(BeaconStateError::PayloadConversionLogicFlaw) } } impl TryInto<$wrapper_type_full> for FullPayload { - type Error = Error; + type Error = BeaconStateError; fn try_into(self) -> Result<$wrapper_type_full, Self::Error> { match self { FullPayload::$fork_variant(payload) => Ok(payload), - _ => Err(Error::PayloadConversionLogicFlaw), + _ => Err(BeaconStateError::PayloadConversionLogicFlaw), } } } diff --git a/consensus/types/src/signed_bls_to_execution_change.rs b/consensus/types/src/execution/signed_bls_to_execution_change.rs similarity index 78% rename from consensus/types/src/signed_bls_to_execution_change.rs rename to consensus/types/src/execution/signed_bls_to_execution_change.rs index 910c4c7d7ef..535960fb3f9 100644 --- a/consensus/types/src/signed_bls_to_execution_change.rs +++ b/consensus/types/src/execution/signed_bls_to_execution_change.rs @@ -1,10 +1,12 @@ -use crate::test_utils::TestRandom; -use crate::*; +use bls::Signature; +use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{execution::BlsToExecutionChange, fork::ForkName, test_utils::TestRandom}; + #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] #[derive( Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize, Encode, Decode, TreeHash, TestRandom, diff --git a/consensus/types/src/exit/mod.rs b/consensus/types/src/exit/mod.rs new file mode 100644 index 00000000000..cb066d1d7a0 --- /dev/null +++ b/consensus/types/src/exit/mod.rs @@ -0,0 +1,5 @@ +mod signed_voluntary_exit; +mod voluntary_exit; + +pub use signed_voluntary_exit::SignedVoluntaryExit; +pub use voluntary_exit::VoluntaryExit; diff --git a/consensus/types/src/signed_voluntary_exit.rs b/consensus/types/src/exit/signed_voluntary_exit.rs similarity index 84% rename from consensus/types/src/signed_voluntary_exit.rs rename to consensus/types/src/exit/signed_voluntary_exit.rs index 0beffa1e04a..b49401a7215 100644 --- a/consensus/types/src/signed_voluntary_exit.rs +++ b/consensus/types/src/exit/signed_voluntary_exit.rs @@ -1,12 +1,12 @@ -use crate::context_deserialize; -use crate::{ForkName, VoluntaryExit, test_utils::TestRandom}; use bls::Signature; - +use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{exit::VoluntaryExit, fork::ForkName, test_utils::TestRandom}; + /// An exit voluntarily submitted a validator who wishes to withdraw. /// /// Spec v0.12.1 diff --git a/consensus/types/src/voluntary_exit.rs b/consensus/types/src/exit/voluntary_exit.rs similarity index 90% rename from consensus/types/src/voluntary_exit.rs rename to consensus/types/src/exit/voluntary_exit.rs index 42d792a814d..30c6a97c4d1 100644 --- a/consensus/types/src/voluntary_exit.rs +++ b/consensus/types/src/exit/voluntary_exit.rs @@ -1,14 +1,17 @@ -use crate::context_deserialize; -use crate::{ - ChainSpec, Domain, Epoch, ForkName, Hash256, SecretKey, SignedRoot, SignedVoluntaryExit, - test_utils::TestRandom, -}; - +use bls::SecretKey; +use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{ + core::{ChainSpec, Domain, Epoch, Hash256, SignedRoot}, + exit::SignedVoluntaryExit, + fork::ForkName, + test_utils::TestRandom, +}; + /// An exit voluntarily submitted a validator who wishes to withdraw. /// /// Spec v0.12.1 diff --git a/consensus/types/src/fork.rs b/consensus/types/src/fork/fork.rs similarity index 96% rename from consensus/types/src/fork.rs rename to consensus/types/src/fork/fork.rs index 5c5bd7ffd18..371b11e05c5 100644 --- a/consensus/types/src/fork.rs +++ b/consensus/types/src/fork/fork.rs @@ -1,12 +1,11 @@ -use crate::test_utils::TestRandom; -use crate::{Epoch, ForkName}; use context_deserialize::context_deserialize; - use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{core::Epoch, fork::ForkName, test_utils::TestRandom}; + /// Specifies a fork of the `BeaconChain`, to prevent replay attacks. /// /// Spec v0.12.1 diff --git a/consensus/types/src/fork_context.rs b/consensus/types/src/fork/fork_context.rs similarity index 98% rename from consensus/types/src/fork_context.rs rename to consensus/types/src/fork/fork_context.rs index 66617326e13..aec72761241 100644 --- a/consensus/types/src/fork_context.rs +++ b/consensus/types/src/fork/fork_context.rs @@ -1,7 +1,11 @@ +use std::collections::BTreeMap; + use parking_lot::RwLock; -use crate::{ChainSpec, Epoch, EthSpec, ForkName, Hash256, Slot}; -use std::collections::BTreeMap; +use crate::{ + core::{ChainSpec, Epoch, EthSpec, Hash256, Slot}, + fork::ForkName, +}; /// Represents a hard fork in the consensus protocol. /// @@ -152,8 +156,7 @@ impl ForkContext { #[cfg(test)] mod tests { use super::*; - use crate::MainnetEthSpec; - use crate::chain_spec::{BlobParameters, BlobSchedule}; + use crate::core::{BlobParameters, BlobSchedule, MainnetEthSpec}; type E = MainnetEthSpec; diff --git a/consensus/types/src/fork_data.rs b/consensus/types/src/fork/fork_data.rs similarity index 88% rename from consensus/types/src/fork_data.rs rename to consensus/types/src/fork/fork_data.rs index 2d5e905efb9..1b9c8bad9ff 100644 --- a/consensus/types/src/fork_data.rs +++ b/consensus/types/src/fork/fork_data.rs @@ -1,12 +1,15 @@ -use crate::test_utils::TestRandom; -use crate::{ForkName, Hash256, SignedRoot}; use context_deserialize::context_deserialize; - use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{ + core::{Hash256, SignedRoot}, + fork::ForkName, + test_utils::TestRandom, +}; + /// Specifies a fork of the `BeaconChain`, to prevent replay attacks. /// /// Spec v0.12.1 diff --git a/consensus/types/src/fork/fork_macros.rs b/consensus/types/src/fork/fork_macros.rs new file mode 100644 index 00000000000..0c7f382ffc5 --- /dev/null +++ b/consensus/types/src/fork/fork_macros.rs @@ -0,0 +1,60 @@ +/// Map a fork name into a fork-versioned superstruct type like `BeaconBlock`. +/// +/// The `$body` expression is where the magic happens. The macro allows us to achieve polymorphism +/// in the return type, which is not usually possible in Rust without trait objects. +/// +/// E.g. you could call `map_fork_name!(fork, BeaconBlock, serde_json::from_str(s))` to decode +/// different `BeaconBlock` variants depending on the value of `fork`. Note how the type of the body +/// will change between `BeaconBlockBase` and `BeaconBlockAltair` depending on which branch is +/// taken, the important thing is that they are re-unified by injecting them back into the +/// `BeaconBlock` parent enum. +/// +/// If you would also like to extract additional data alongside the superstruct type, use +/// the more flexible `map_fork_name_with` macro. +#[macro_export] +macro_rules! map_fork_name { + ($fork_name:expr, $t:tt, $body:expr) => { + $crate::map_fork_name_with!($fork_name, $t, { ($body, ()) }).0 + }; +} + +/// Map a fork name into a tuple of `(t, extra)` where `t` is a superstruct type. +#[macro_export] +macro_rules! map_fork_name_with { + ($fork_name:expr, $t:tt, $body:block) => { + match $fork_name { + $crate::fork::ForkName::Base => { + let (value, extra_data) = $body; + ($t::Base(value), extra_data) + } + $crate::fork::ForkName::Altair => { + let (value, extra_data) = $body; + ($t::Altair(value), extra_data) + } + $crate::fork::ForkName::Bellatrix => { + let (value, extra_data) = $body; + ($t::Bellatrix(value), extra_data) + } + $crate::fork::ForkName::Capella => { + let (value, extra_data) = $body; + ($t::Capella(value), extra_data) + } + $crate::fork::ForkName::Deneb => { + let (value, extra_data) = $body; + ($t::Deneb(value), extra_data) + } + $crate::fork::ForkName::Electra => { + let (value, extra_data) = $body; + ($t::Electra(value), extra_data) + } + $crate::fork::ForkName::Fulu => { + let (value, extra_data) = $body; + ($t::Fulu(value), extra_data) + } + $crate::fork::ForkName::Gloas => { + let (value, extra_data) = $body; + ($t::Gloas(value), extra_data) + } + } + }; +} diff --git a/consensus/types/src/fork_name.rs b/consensus/types/src/fork/fork_name.rs similarity index 84% rename from consensus/types/src/fork_name.rs rename to consensus/types/src/fork/fork_name.rs index 1d7bf3795b2..e9ec5fbe41e 100644 --- a/consensus/types/src/fork_name.rs +++ b/consensus/types/src/fork/fork_name.rs @@ -1,8 +1,12 @@ -use crate::{ChainSpec, Epoch}; +use std::{ + fmt::{self, Display, Formatter}, + str::FromStr, +}; + use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; -use std::fmt::{self, Display, Formatter}; -use std::str::FromStr; + +use crate::core::{ChainSpec, Epoch}; #[derive( Debug, Clone, Copy, Decode, Encode, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize, @@ -243,67 +247,6 @@ impl ForkName { } } -/// Map a fork name into a fork-versioned superstruct type like `BeaconBlock`. -/// -/// The `$body` expression is where the magic happens. The macro allows us to achieve polymorphism -/// in the return type, which is not usually possible in Rust without trait objects. -/// -/// E.g. you could call `map_fork_name!(fork, BeaconBlock, serde_json::from_str(s))` to decode -/// different `BeaconBlock` variants depending on the value of `fork`. Note how the type of the body -/// will change between `BeaconBlockBase` and `BeaconBlockAltair` depending on which branch is -/// taken, the important thing is that they are re-unified by injecting them back into the -/// `BeaconBlock` parent enum. -/// -/// If you would also like to extract additional data alongside the superstruct type, use -/// the more flexible `map_fork_name_with` macro. -#[macro_export] -macro_rules! map_fork_name { - ($fork_name:expr, $t:tt, $body:expr) => { - map_fork_name_with!($fork_name, $t, { ($body, ()) }).0 - }; -} - -/// Map a fork name into a tuple of `(t, extra)` where `t` is a superstruct type. -#[macro_export] -macro_rules! map_fork_name_with { - ($fork_name:expr, $t:tt, $body:block) => { - match $fork_name { - ForkName::Base => { - let (value, extra_data) = $body; - ($t::Base(value), extra_data) - } - ForkName::Altair => { - let (value, extra_data) = $body; - ($t::Altair(value), extra_data) - } - ForkName::Bellatrix => { - let (value, extra_data) = $body; - ($t::Bellatrix(value), extra_data) - } - ForkName::Capella => { - let (value, extra_data) = $body; - ($t::Capella(value), extra_data) - } - ForkName::Deneb => { - let (value, extra_data) = $body; - ($t::Deneb(value), extra_data) - } - ForkName::Electra => { - let (value, extra_data) = $body; - ($t::Electra(value), extra_data) - } - ForkName::Fulu => { - let (value, extra_data) = $body; - ($t::Fulu(value), extra_data) - } - ForkName::Gloas => { - let (value, extra_data) = $body; - ($t::Gloas(value), extra_data) - } - } - }; -} - impl FromStr for ForkName { type Err = String; diff --git a/consensus/types/src/fork/fork_version_decode.rs b/consensus/types/src/fork/fork_version_decode.rs new file mode 100644 index 00000000000..4349efb21f9 --- /dev/null +++ b/consensus/types/src/fork/fork_version_decode.rs @@ -0,0 +1,6 @@ +use crate::fork::ForkName; + +pub trait ForkVersionDecode: Sized { + /// SSZ decode with explicit fork variant. + fn from_ssz_bytes_by_fork(bytes: &[u8], fork_name: ForkName) -> Result; +} diff --git a/consensus/types/src/fork/mod.rs b/consensus/types/src/fork/mod.rs new file mode 100644 index 00000000000..1ad1c7cb622 --- /dev/null +++ b/consensus/types/src/fork/mod.rs @@ -0,0 +1,15 @@ +mod fork; +mod fork_context; +mod fork_data; +mod fork_macros; +mod fork_name; +mod fork_version_decode; + +pub use crate::{map_fork_name, map_fork_name_with}; +pub use fork::Fork; +pub use fork_context::{ForkContext, HardFork}; +pub use fork_data::ForkData; +pub use fork_name::{ForkName, InconsistentFork}; +pub use fork_version_decode::ForkVersionDecode; + +pub type ForkVersion = [u8; 4]; diff --git a/consensus/types/src/kzg_ext/consts.rs b/consensus/types/src/kzg_ext/consts.rs new file mode 100644 index 00000000000..06c9f9c749e --- /dev/null +++ b/consensus/types/src/kzg_ext/consts.rs @@ -0,0 +1,3 @@ +pub use kzg::{ + BYTES_PER_BLOB, BYTES_PER_COMMITMENT, BYTES_PER_FIELD_ELEMENT, VERSIONED_HASH_VERSION_KZG, +}; diff --git a/consensus/types/src/kzg_ext/mod.rs b/consensus/types/src/kzg_ext/mod.rs new file mode 100644 index 00000000000..63533ec71f5 --- /dev/null +++ b/consensus/types/src/kzg_ext/mod.rs @@ -0,0 +1,27 @@ +pub mod consts; + +pub use kzg::{Blob as KzgBlob, Error as KzgError, Kzg, KzgCommitment, KzgProof}; + +use ssz_types::VariableList; + +use crate::core::EthSpec; + +// Note on List limit: +// - Deneb to Electra: `MaxBlobCommitmentsPerBlock` +// - Fulu: `MaxCellsPerBlock` +// We choose to use a single type (with the larger value from Fulu as `N`) instead of having to +// introduce a new type for Fulu. This is to avoid messy conversions and having to add extra types +// with no gains - as `N` does not impact serialisation at all, and only affects merkleization, +// which we don't current do on `KzgProofs` anyway. +pub type KzgProofs = VariableList::MaxCellsPerBlock>; + +pub type KzgCommitments = + VariableList::MaxBlobCommitmentsPerBlock>; + +/// Util method helpful for logging. +pub fn format_kzg_commitments(commitments: &[KzgCommitment]) -> String { + let commitment_strings: Vec = commitments.iter().map(|x| x.to_string()).collect(); + let commitments_joined = commitment_strings.join(", "); + let surrounded_commitments = format!("[{}]", commitments_joined); + surrounded_commitments +} diff --git a/consensus/types/src/lib.rs b/consensus/types/src/lib.rs index 8e83fed1d9a..a8a78f8cfbf 100644 --- a/consensus/types/src/lib.rs +++ b/consensus/types/src/lib.rs @@ -1,4 +1,4 @@ -//! Ethereum 2.0 types +//! Ethereum Consensus types // Clippy lint set up #![cfg_attr( not(test), @@ -12,287 +12,167 @@ #[macro_use] pub mod test_utils; -pub mod aggregate_and_proof; -pub mod application_domain; pub mod attestation; -pub mod attestation_data; -pub mod attestation_duty; -pub mod attester_slashing; -pub mod beacon_block; -pub mod beacon_block_body; -pub mod beacon_block_header; -pub mod beacon_committee; -pub mod beacon_response; -pub mod beacon_state; -pub mod bls_to_execution_change; -pub mod builder_bid; -pub mod chain_spec; -pub mod checkpoint; -pub mod consolidation_request; -pub mod consts; -pub mod contribution_and_proof; +pub mod block; +pub mod builder; +pub mod consolidation; +pub mod core; +pub mod data; pub mod deposit; -pub mod deposit_data; -pub mod deposit_message; -pub mod deposit_request; -pub mod deposit_tree_snapshot; -pub mod enr_fork_id; -pub mod eth1_data; -pub mod eth_spec; -pub mod execution_block_hash; -pub mod execution_payload; -pub mod execution_payload_header; +pub mod execution; +pub mod exit; pub mod fork; -pub mod fork_data; -pub mod fork_name; -pub mod graffiti; -pub mod historical_batch; -pub mod historical_summary; -pub mod indexed_attestation; -pub mod light_client_bootstrap; -pub mod light_client_finality_update; -pub mod light_client_optimistic_update; -pub mod light_client_update; -pub mod pending_attestation; -pub mod pending_consolidation; -pub mod pending_deposit; -pub mod pending_partial_withdrawal; -pub mod proposer_preparation_data; -pub mod proposer_slashing; -pub mod relative_epoch; -pub mod selection_proof; -pub mod shuffling_id; -pub mod signed_aggregate_and_proof; -pub mod signed_beacon_block; -pub mod signed_beacon_block_header; -pub mod signed_bls_to_execution_change; -pub mod signed_contribution_and_proof; -pub mod signed_voluntary_exit; -pub mod signing_data; -pub mod sync_committee_subscription; -pub mod sync_duty; -pub mod validator; -pub mod validator_subscription; -pub mod voluntary_exit; -pub mod withdrawal_credentials; -pub mod withdrawal_request; -#[macro_use] -pub mod slot_epoch_macros; -pub mod activation_queue; -pub mod config_and_preset; -pub mod execution_block_header; -pub mod execution_requests; -pub mod fork_context; -pub mod participation_flags; -pub mod payload; -pub mod preset; -pub mod slot_epoch; -pub mod subnet_id; -pub mod sync_aggregate; -pub mod sync_aggregator_selection_data; +pub mod kzg_ext; +pub mod light_client; +pub mod slashing; +pub mod state; pub mod sync_committee; -pub mod sync_committee_contribution; -pub mod sync_committee_message; -pub mod sync_selection_proof; -pub mod sync_subnet_id; -pub mod validator_registration_data; +pub mod validator; pub mod withdrawal; -pub mod epoch_cache; -pub mod slot_data; -#[cfg(feature = "sqlite")] -pub mod sqlite; +// Temporary root level exports to maintain backwards compatibility for Lighthouse. +pub use attestation::*; +pub use block::*; +pub use builder::*; +pub use consolidation::*; +pub use core::{consts, *}; +pub use data::*; +pub use deposit::*; +pub use execution::*; +pub use exit::*; +pub use fork::*; +pub use kzg_ext::*; +pub use light_client::*; +pub use slashing::*; +pub use state::*; +pub use sync_committee::*; +pub use validator::*; +pub use withdrawal::*; -pub mod blob_sidecar; -pub mod data_column_custody_group; -pub mod data_column_sidecar; -pub mod data_column_subnet_id; -pub mod light_client_header; -pub mod non_zero_usize; -pub mod runtime_fixed_vector; -pub mod runtime_var_list; +// Temporary facade modules to maintain backwards compatibility for Lighthouse. +pub mod eth_spec { + pub use crate::core::EthSpec; +} -pub use crate::activation_queue::ActivationQueue; -pub use crate::aggregate_and_proof::{ - AggregateAndProof, AggregateAndProofBase, AggregateAndProofElectra, AggregateAndProofRef, -}; -pub use crate::attestation::{ - Attestation, AttestationBase, AttestationElectra, AttestationRef, AttestationRefMut, - Error as AttestationError, SingleAttestation, -}; -pub use crate::attestation_data::AttestationData; -pub use crate::attestation_duty::AttestationDuty; -pub use crate::attester_slashing::{ - AttesterSlashing, AttesterSlashingBase, AttesterSlashingElectra, AttesterSlashingOnDisk, - AttesterSlashingRef, AttesterSlashingRefOnDisk, -}; -pub use crate::beacon_block::{ - BeaconBlock, BeaconBlockAltair, BeaconBlockBase, BeaconBlockBellatrix, BeaconBlockCapella, - BeaconBlockDeneb, BeaconBlockElectra, BeaconBlockFulu, BeaconBlockGloas, BeaconBlockRef, - BeaconBlockRefMut, BlindedBeaconBlock, BlockImportSource, EmptyBlock, -}; -pub use crate::beacon_block_body::{ - BeaconBlockBody, BeaconBlockBodyAltair, BeaconBlockBodyBase, BeaconBlockBodyBellatrix, - BeaconBlockBodyCapella, BeaconBlockBodyDeneb, BeaconBlockBodyElectra, BeaconBlockBodyFulu, - BeaconBlockBodyGloas, BeaconBlockBodyRef, BeaconBlockBodyRefMut, -}; -pub use crate::beacon_block_header::BeaconBlockHeader; -pub use crate::beacon_committee::{BeaconCommittee, OwnedBeaconCommittee}; -pub use crate::beacon_response::{ - BeaconResponse, ForkVersionDecode, ForkVersionedResponse, UnversionedResponse, -}; -pub use crate::beacon_state::{Error as BeaconStateError, *}; -pub use crate::blob_sidecar::{BlobIdentifier, BlobSidecar, BlobSidecarList, BlobsList}; -pub use crate::bls_to_execution_change::BlsToExecutionChange; -pub use crate::chain_spec::{ChainSpec, Config, Domain}; -pub use crate::checkpoint::Checkpoint; -pub use crate::config_and_preset::{ - ConfigAndPreset, ConfigAndPresetDeneb, ConfigAndPresetElectra, ConfigAndPresetFulu, - ConfigAndPresetGloas, -}; -pub use crate::consolidation_request::ConsolidationRequest; -pub use crate::contribution_and_proof::ContributionAndProof; -pub use crate::data_column_sidecar::{ - ColumnIndex, DataColumnSidecar, DataColumnSidecarList, DataColumnsByRootIdentifier, -}; -pub use crate::data_column_subnet_id::DataColumnSubnetId; -pub use crate::deposit::{DEPOSIT_TREE_DEPTH, Deposit}; -pub use crate::deposit_data::DepositData; -pub use crate::deposit_message::DepositMessage; -pub use crate::deposit_request::DepositRequest; -pub use crate::deposit_tree_snapshot::{DepositTreeSnapshot, FinalizedExecutionBlock}; -pub use crate::enr_fork_id::EnrForkId; -pub use crate::epoch_cache::{EpochCache, EpochCacheError, EpochCacheKey}; -pub use crate::eth_spec::EthSpecId; -pub use crate::eth1_data::Eth1Data; -pub use crate::execution_block_hash::ExecutionBlockHash; -pub use crate::execution_block_header::{EncodableExecutionBlockHeader, ExecutionBlockHeader}; -pub use crate::execution_payload::{ - ExecutionPayload, ExecutionPayloadBellatrix, ExecutionPayloadCapella, ExecutionPayloadDeneb, - ExecutionPayloadElectra, ExecutionPayloadFulu, ExecutionPayloadGloas, ExecutionPayloadRef, - Transaction, Transactions, Withdrawals, -}; -pub use crate::execution_payload_header::{ - ExecutionPayloadHeader, ExecutionPayloadHeaderBellatrix, ExecutionPayloadHeaderCapella, - ExecutionPayloadHeaderDeneb, ExecutionPayloadHeaderElectra, ExecutionPayloadHeaderFulu, - ExecutionPayloadHeaderGloas, ExecutionPayloadHeaderRef, ExecutionPayloadHeaderRefMut, -}; -pub use crate::execution_requests::{ExecutionRequests, RequestType}; -pub use crate::fork::Fork; -pub use crate::fork_context::ForkContext; -pub use crate::fork_data::ForkData; -pub use crate::fork_name::{ForkName, InconsistentFork}; -pub use crate::graffiti::{GRAFFITI_BYTES_LEN, Graffiti}; -pub use crate::historical_batch::HistoricalBatch; -pub use crate::indexed_attestation::{ - IndexedAttestation, IndexedAttestationBase, IndexedAttestationElectra, IndexedAttestationRef, -}; -pub use crate::light_client_bootstrap::{ - LightClientBootstrap, LightClientBootstrapAltair, LightClientBootstrapCapella, - LightClientBootstrapDeneb, LightClientBootstrapElectra, LightClientBootstrapFulu, - LightClientBootstrapGloas, -}; -pub use crate::light_client_finality_update::{ - LightClientFinalityUpdate, LightClientFinalityUpdateAltair, LightClientFinalityUpdateCapella, - LightClientFinalityUpdateDeneb, LightClientFinalityUpdateElectra, - LightClientFinalityUpdateFulu, LightClientFinalityUpdateGloas, -}; -pub use crate::light_client_header::{ - LightClientHeader, LightClientHeaderAltair, LightClientHeaderCapella, LightClientHeaderDeneb, - LightClientHeaderElectra, LightClientHeaderFulu, LightClientHeaderGloas, -}; -pub use crate::light_client_optimistic_update::{ - LightClientOptimisticUpdate, LightClientOptimisticUpdateAltair, - LightClientOptimisticUpdateCapella, LightClientOptimisticUpdateDeneb, - LightClientOptimisticUpdateElectra, LightClientOptimisticUpdateFulu, - LightClientOptimisticUpdateGloas, -}; -pub use crate::light_client_update::{ - Error as LightClientUpdateError, LightClientUpdate, LightClientUpdateAltair, - LightClientUpdateCapella, LightClientUpdateDeneb, LightClientUpdateElectra, - LightClientUpdateFulu, LightClientUpdateGloas, MerkleProof, -}; -pub use crate::participation_flags::ParticipationFlags; -pub use crate::payload::{ - AbstractExecPayload, BlindedPayload, BlindedPayloadBellatrix, BlindedPayloadCapella, - BlindedPayloadDeneb, BlindedPayloadElectra, BlindedPayloadFulu, BlindedPayloadGloas, - BlindedPayloadRef, BlockType, ExecPayload, FullPayload, FullPayloadBellatrix, - FullPayloadCapella, FullPayloadDeneb, FullPayloadElectra, FullPayloadFulu, FullPayloadGloas, - FullPayloadRef, OwnedExecPayload, -}; -pub use crate::pending_attestation::PendingAttestation; -pub use crate::pending_consolidation::PendingConsolidation; -pub use crate::pending_deposit::PendingDeposit; -pub use crate::pending_partial_withdrawal::PendingPartialWithdrawal; -pub use crate::preset::{ - AltairPreset, BasePreset, BellatrixPreset, CapellaPreset, DenebPreset, ElectraPreset, - FuluPreset, GloasPreset, -}; -pub use crate::proposer_preparation_data::ProposerPreparationData; -pub use crate::proposer_slashing::ProposerSlashing; -pub use crate::relative_epoch::{Error as RelativeEpochError, RelativeEpoch}; -pub use crate::runtime_fixed_vector::RuntimeFixedVector; -pub use crate::runtime_var_list::RuntimeVariableList; -pub use crate::selection_proof::SelectionProof; -pub use crate::shuffling_id::AttestationShufflingId; -pub use crate::signed_aggregate_and_proof::{ - SignedAggregateAndProof, SignedAggregateAndProofBase, SignedAggregateAndProofElectra, -}; -pub use crate::signed_beacon_block::{ - SignedBeaconBlock, SignedBeaconBlockAltair, SignedBeaconBlockBase, SignedBeaconBlockBellatrix, - SignedBeaconBlockCapella, SignedBeaconBlockDeneb, SignedBeaconBlockElectra, - SignedBeaconBlockFulu, SignedBeaconBlockGloas, SignedBeaconBlockHash, SignedBlindedBeaconBlock, - ssz_tagged_signed_beacon_block, ssz_tagged_signed_beacon_block_arc, -}; -pub use crate::signed_beacon_block_header::SignedBeaconBlockHeader; -pub use crate::signed_bls_to_execution_change::SignedBlsToExecutionChange; -pub use crate::signed_contribution_and_proof::SignedContributionAndProof; -pub use crate::signed_voluntary_exit::SignedVoluntaryExit; -pub use crate::signing_data::{SignedRoot, SigningData}; -pub use crate::slot_epoch::{Epoch, Slot}; -pub use crate::subnet_id::SubnetId; -pub use crate::sync_aggregate::SyncAggregate; -pub use crate::sync_aggregator_selection_data::SyncAggregatorSelectionData; -pub use crate::sync_committee::SyncCommittee; -pub use crate::sync_committee_contribution::{SyncCommitteeContribution, SyncContributionData}; -pub use crate::sync_committee_message::SyncCommitteeMessage; -pub use crate::sync_committee_subscription::SyncCommitteeSubscription; -pub use crate::sync_duty::SyncDuty; -pub use crate::sync_selection_proof::SyncSelectionProof; -pub use crate::sync_subnet_id::SyncSubnetId; -pub use crate::validator::Validator; -pub use crate::validator_registration_data::*; -pub use crate::validator_subscription::ValidatorSubscription; -pub use crate::voluntary_exit::VoluntaryExit; -pub use crate::withdrawal::Withdrawal; -pub use crate::withdrawal_credentials::WithdrawalCredentials; -pub use crate::withdrawal_request::WithdrawalRequest; -pub use fixed_bytes::FixedBytesExtended; +pub mod chain_spec { + pub use crate::core::ChainSpec; +} + +pub mod beacon_block { + pub use crate::block::{BlindedBeaconBlock, BlockImportSource}; +} + +pub mod beacon_block_body { + pub use crate::kzg_ext::{KzgCommitments, format_kzg_commitments}; +} + +pub mod beacon_state { + pub use crate::state::{ + BeaconState, BeaconStateBase, CommitteeCache, compute_committee_index_in_epoch, + compute_committee_range_in_epoch, epoch_committee_count, + }; +} + +pub mod graffiti { + pub use crate::core::GraffitiString; +} + +pub mod indexed_attestation { + pub use crate::attestation::{IndexedAttestationBase, IndexedAttestationElectra}; +} + +pub mod historical_summary { + pub use crate::state::HistoricalSummary; +} + +pub mod participation_flags { + pub use crate::attestation::ParticipationFlags; +} + +pub mod epoch_cache { + pub use crate::state::{EpochCache, EpochCacheError, EpochCacheKey}; +} + +pub mod non_zero_usize { + pub use crate::core::new_non_zero_usize; +} + +pub mod data_column_sidecar { + pub use crate::data::{ + Cell, ColumnIndex, DataColumn, DataColumnSidecar, DataColumnSidecarError, + DataColumnSidecarList, + }; +} + +pub mod builder_bid { + pub use crate::builder::*; +} -pub type CommitteeIndex = u64; -pub type Hash256 = fixed_bytes::Hash256; -pub type Uint256 = fixed_bytes::Uint256; -pub type Address = fixed_bytes::Address; -pub type ForkVersion = [u8; 4]; -pub type BLSFieldElement = Uint256; -pub type Blob = FixedVector::BytesPerBlob>; -// Note on List limit: -// - Deneb to Electra: `MaxBlobCommitmentsPerBlock` -// - Fulu: `MaxCellsPerBlock` -// We choose to use a single type (with the larger value from Fulu as `N`) instead of having to -// introduce a new type for Fulu. This is to avoid messy conversions and having to add extra types -// with no gains - as `N` does not impact serialisation at all, and only affects merkleization, -// which we don't current do on `KzgProofs` anyway. -pub type KzgProofs = VariableList::MaxCellsPerBlock>; -pub type VersionedHash = Hash256; -pub type Hash64 = alloy_primitives::B64; +pub mod blob_sidecar { + pub use crate::data::{ + BlobIdentifier, BlobSidecar, BlobSidecarError, BlobsList, FixedBlobSidecarList, + }; +} + +pub mod payload { + pub use crate::execution::BlockProductionVersion; +} + +pub mod execution_requests { + pub use crate::execution::{ + ConsolidationRequests, DepositRequests, ExecutionRequests, RequestType, WithdrawalRequests, + }; +} + +pub mod data_column_custody_group { + pub use crate::data::{ + CustodyIndex, compute_columns_for_custody_group, compute_ordered_custody_column_indices, + compute_subnets_for_node, compute_subnets_from_custody_group, get_custody_groups, + }; +} + +pub mod sync_aggregate { + pub use crate::sync_committee::SyncAggregateError as Error; +} + +pub mod light_client_update { + pub use crate::light_client::consts::{ + CURRENT_SYNC_COMMITTEE_INDEX, CURRENT_SYNC_COMMITTEE_INDEX_ELECTRA, FINALIZED_ROOT_INDEX, + FINALIZED_ROOT_INDEX_ELECTRA, MAX_REQUEST_LIGHT_CLIENT_UPDATES, NEXT_SYNC_COMMITTEE_INDEX, + NEXT_SYNC_COMMITTEE_INDEX_ELECTRA, + }; +} + +pub mod sync_committee_contribution { + pub use crate::sync_committee::{ + SyncCommitteeContributionError as Error, SyncContributionData, + }; +} + +pub mod slot_data { + pub use crate::core::SlotData; +} + +pub mod signed_aggregate_and_proof { + pub use crate::attestation::SignedAggregateAndProofRefMut; +} + +pub mod application_domain { + pub use crate::core::ApplicationDomain; +} + +// Temporary re-exports to maintain backwards compatibility for Lighthouse. +pub use crate::kzg_ext::consts::VERSIONED_HASH_VERSION_KZG; +pub use crate::light_client::LightClientError as LightClientUpdateError; +pub use crate::state::BeaconStateError as Error; pub use bls::{ - AggregatePublicKey, AggregateSignature, Keypair, PublicKey, PublicKeyBytes, SecretKey, - Signature, SignatureBytes, + AggregatePublicKey, AggregateSignature, Error as BlsError, Keypair, PUBLIC_KEY_BYTES_LEN, + PublicKey, PublicKeyBytes, SIGNATURE_BYTES_LEN, SecretKey, Signature, SignatureBytes, + get_withdrawal_credentials, }; pub use context_deserialize::{ContextDeserialize, context_deserialize}; -pub use kzg::{KzgCommitment, KzgProof, VERSIONED_HASH_VERSION_KZG}; +pub use fixed_bytes::FixedBytesExtended; pub use milhouse::{self, List, Vector}; pub use ssz_types::{BitList, BitVector, FixedVector, VariableList, typenum, typenum::Unsigned}; pub use superstruct::superstruct; diff --git a/consensus/types/src/light_client/consts.rs b/consensus/types/src/light_client/consts.rs new file mode 100644 index 00000000000..0092e75e873 --- /dev/null +++ b/consensus/types/src/light_client/consts.rs @@ -0,0 +1,21 @@ +pub const FINALIZED_ROOT_PROOF_LEN: usize = 6; +pub const CURRENT_SYNC_COMMITTEE_PROOF_LEN: usize = 5; +pub const NEXT_SYNC_COMMITTEE_PROOF_LEN: usize = 5; +pub const EXECUTION_PAYLOAD_PROOF_LEN: usize = 4; + +pub const FINALIZED_ROOT_PROOF_LEN_ELECTRA: usize = 7; +pub const NEXT_SYNC_COMMITTEE_PROOF_LEN_ELECTRA: usize = 6; +pub const CURRENT_SYNC_COMMITTEE_PROOF_LEN_ELECTRA: usize = 6; + +pub const FINALIZED_ROOT_INDEX: usize = 105; +pub const CURRENT_SYNC_COMMITTEE_INDEX: usize = 54; +pub const NEXT_SYNC_COMMITTEE_INDEX: usize = 55; +pub const EXECUTION_PAYLOAD_INDEX: usize = 25; + +pub const FINALIZED_ROOT_INDEX_ELECTRA: usize = 169; +pub const CURRENT_SYNC_COMMITTEE_INDEX_ELECTRA: usize = 86; +pub const NEXT_SYNC_COMMITTEE_INDEX_ELECTRA: usize = 87; + +// Max light client updates by range request limits +// spec: https://github.com/ethereum/consensus-specs/blob/dev/specs/altair/light-client/p2p-interface.md#configuration +pub const MAX_REQUEST_LIGHT_CLIENT_UPDATES: u64 = 128; diff --git a/consensus/types/src/light_client/error.rs b/consensus/types/src/light_client/error.rs new file mode 100644 index 00000000000..c492cfcbde3 --- /dev/null +++ b/consensus/types/src/light_client/error.rs @@ -0,0 +1,41 @@ +use safe_arith::ArithError; + +use crate::state::BeaconStateError; + +#[derive(Debug, PartialEq, Clone)] +pub enum LightClientError { + SszTypesError(ssz_types::Error), + MilhouseError(milhouse::Error), + BeaconStateError(BeaconStateError), + ArithError(ArithError), + AltairForkNotActive, + NotEnoughSyncCommitteeParticipants, + MismatchingPeriods, + InvalidFinalizedBlock, + BeaconBlockBodyError, + InconsistentFork, +} + +impl From for LightClientError { + fn from(e: ssz_types::Error) -> LightClientError { + LightClientError::SszTypesError(e) + } +} + +impl From for LightClientError { + fn from(e: BeaconStateError) -> LightClientError { + LightClientError::BeaconStateError(e) + } +} + +impl From for LightClientError { + fn from(e: ArithError) -> LightClientError { + LightClientError::ArithError(e) + } +} + +impl From for LightClientError { + fn from(e: milhouse::Error) -> LightClientError { + LightClientError::MilhouseError(e) + } +} diff --git a/consensus/types/src/light_client_bootstrap.rs b/consensus/types/src/light_client/light_client_bootstrap.rs similarity index 88% rename from consensus/types/src/light_client_bootstrap.rs rename to consensus/types/src/light_client/light_client_bootstrap.rs index 80d5bbacf9e..847b2a2a963 100644 --- a/consensus/types/src/light_client_bootstrap.rs +++ b/consensus/types/src/light_client/light_client_bootstrap.rs @@ -1,19 +1,30 @@ -use crate::context_deserialize; -use crate::{ - BeaconState, ChainSpec, ContextDeserialize, EthSpec, FixedVector, ForkName, Hash256, - LightClientHeader, LightClientHeaderAltair, LightClientHeaderCapella, LightClientHeaderDeneb, - LightClientHeaderElectra, LightClientHeaderFulu, LightClientHeaderGloas, - SignedBlindedBeaconBlock, Slot, SyncCommittee, light_client_update::*, test_utils::TestRandom, -}; +use std::sync::Arc; + +use context_deserialize::{ContextDeserialize, context_deserialize}; use educe::Educe; use serde::{Deserialize, Deserializer, Serialize}; use ssz::{Decode, Encode}; use ssz_derive::{Decode, Encode}; -use std::sync::Arc; +use ssz_types::FixedVector; use superstruct::superstruct; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{ + block::SignedBlindedBeaconBlock, + core::{ChainSpec, EthSpec, Hash256, Slot}, + fork::ForkName, + light_client::{ + CurrentSyncCommitteeProofLen, CurrentSyncCommitteeProofLenElectra, LightClientError, + LightClientHeader, LightClientHeaderAltair, LightClientHeaderCapella, + LightClientHeaderDeneb, LightClientHeaderElectra, LightClientHeaderFulu, + LightClientHeaderGloas, + }, + state::BeaconState, + sync_committee::SyncCommittee, + test_utils::TestRandom, +}; + /// A LightClientBootstrap is the initializer we send over to light_client nodes /// that are trying to generate their basic storage when booting up. #[superstruct( @@ -142,53 +153,53 @@ impl LightClientBootstrap { current_sync_committee: Arc>, current_sync_committee_branch: Vec, chain_spec: &ChainSpec, - ) -> Result { + ) -> Result { let light_client_bootstrap = match block .fork_name(chain_spec) - .map_err(|_| Error::InconsistentFork)? + .map_err(|_| LightClientError::InconsistentFork)? { - ForkName::Base => return Err(Error::AltairForkNotActive), + ForkName::Base => return Err(LightClientError::AltairForkNotActive), ForkName::Altair | ForkName::Bellatrix => Self::Altair(LightClientBootstrapAltair { header: LightClientHeaderAltair::block_to_light_client_header(block)?, current_sync_committee, current_sync_committee_branch: current_sync_committee_branch .try_into() - .map_err(Error::SszTypesError)?, + .map_err(LightClientError::SszTypesError)?, }), ForkName::Capella => Self::Capella(LightClientBootstrapCapella { header: LightClientHeaderCapella::block_to_light_client_header(block)?, current_sync_committee, current_sync_committee_branch: current_sync_committee_branch .try_into() - .map_err(Error::SszTypesError)?, + .map_err(LightClientError::SszTypesError)?, }), ForkName::Deneb => Self::Deneb(LightClientBootstrapDeneb { header: LightClientHeaderDeneb::block_to_light_client_header(block)?, current_sync_committee, current_sync_committee_branch: current_sync_committee_branch .try_into() - .map_err(Error::SszTypesError)?, + .map_err(LightClientError::SszTypesError)?, }), ForkName::Electra => Self::Electra(LightClientBootstrapElectra { header: LightClientHeaderElectra::block_to_light_client_header(block)?, current_sync_committee, current_sync_committee_branch: current_sync_committee_branch .try_into() - .map_err(Error::SszTypesError)?, + .map_err(LightClientError::SszTypesError)?, }), ForkName::Fulu => Self::Fulu(LightClientBootstrapFulu { header: LightClientHeaderFulu::block_to_light_client_header(block)?, current_sync_committee, current_sync_committee_branch: current_sync_committee_branch .try_into() - .map_err(Error::SszTypesError)?, + .map_err(LightClientError::SszTypesError)?, }), ForkName::Gloas => Self::Gloas(LightClientBootstrapGloas { header: LightClientHeaderGloas::block_to_light_client_header(block)?, current_sync_committee, current_sync_committee_branch: current_sync_committee_branch .try_into() - .map_err(Error::SszTypesError)?, + .map_err(LightClientError::SszTypesError)?, }), }; @@ -199,56 +210,56 @@ impl LightClientBootstrap { beacon_state: &mut BeaconState, block: &SignedBlindedBeaconBlock, chain_spec: &ChainSpec, - ) -> Result { + ) -> Result { let current_sync_committee_branch = beacon_state.compute_current_sync_committee_proof()?; let current_sync_committee = beacon_state.current_sync_committee()?.clone(); let light_client_bootstrap = match block .fork_name(chain_spec) - .map_err(|_| Error::InconsistentFork)? + .map_err(|_| LightClientError::InconsistentFork)? { - ForkName::Base => return Err(Error::AltairForkNotActive), + ForkName::Base => return Err(LightClientError::AltairForkNotActive), ForkName::Altair | ForkName::Bellatrix => Self::Altair(LightClientBootstrapAltair { header: LightClientHeaderAltair::block_to_light_client_header(block)?, current_sync_committee, current_sync_committee_branch: current_sync_committee_branch .try_into() - .map_err(Error::SszTypesError)?, + .map_err(LightClientError::SszTypesError)?, }), ForkName::Capella => Self::Capella(LightClientBootstrapCapella { header: LightClientHeaderCapella::block_to_light_client_header(block)?, current_sync_committee, current_sync_committee_branch: current_sync_committee_branch .try_into() - .map_err(Error::SszTypesError)?, + .map_err(LightClientError::SszTypesError)?, }), ForkName::Deneb => Self::Deneb(LightClientBootstrapDeneb { header: LightClientHeaderDeneb::block_to_light_client_header(block)?, current_sync_committee, current_sync_committee_branch: current_sync_committee_branch .try_into() - .map_err(Error::SszTypesError)?, + .map_err(LightClientError::SszTypesError)?, }), ForkName::Electra => Self::Electra(LightClientBootstrapElectra { header: LightClientHeaderElectra::block_to_light_client_header(block)?, current_sync_committee, current_sync_committee_branch: current_sync_committee_branch .try_into() - .map_err(Error::SszTypesError)?, + .map_err(LightClientError::SszTypesError)?, }), ForkName::Fulu => Self::Fulu(LightClientBootstrapFulu { header: LightClientHeaderFulu::block_to_light_client_header(block)?, current_sync_committee, current_sync_committee_branch: current_sync_committee_branch .try_into() - .map_err(Error::SszTypesError)?, + .map_err(LightClientError::SszTypesError)?, }), ForkName::Gloas => Self::Gloas(LightClientBootstrapGloas { header: LightClientHeaderGloas::block_to_light_client_header(block)?, current_sync_committee, current_sync_committee_branch: current_sync_committee_branch .try_into() - .map_err(Error::SszTypesError)?, + .map_err(LightClientError::SszTypesError)?, }), }; diff --git a/consensus/types/src/light_client_finality_update.rs b/consensus/types/src/light_client/light_client_finality_update.rs similarity index 89% rename from consensus/types/src/light_client_finality_update.rs rename to consensus/types/src/light_client/light_client_finality_update.rs index e58d7f4d72b..04374edcd96 100644 --- a/consensus/types/src/light_client_finality_update.rs +++ b/consensus/types/src/light_client/light_client_finality_update.rs @@ -1,21 +1,27 @@ -use super::{EthSpec, FixedVector, Hash256, LightClientHeader, Slot, SyncAggregate}; -use crate::ChainSpec; -use crate::context_deserialize; -use crate::{ - ContextDeserialize, ForkName, LightClientHeaderAltair, LightClientHeaderCapella, - LightClientHeaderDeneb, LightClientHeaderElectra, LightClientHeaderFulu, - LightClientHeaderGloas, SignedBlindedBeaconBlock, light_client_update::*, - test_utils::TestRandom, -}; +use context_deserialize::{ContextDeserialize, context_deserialize}; use educe::Educe; use serde::{Deserialize, Deserializer, Serialize}; use ssz::{Decode, Encode}; use ssz_derive::Decode; use ssz_derive::Encode; +use ssz_types::FixedVector; use superstruct::superstruct; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{ + block::SignedBlindedBeaconBlock, + core::{ChainSpec, EthSpec, Hash256, Slot}, + fork::ForkName, + light_client::{ + FinalizedRootProofLen, FinalizedRootProofLenElectra, LightClientError, LightClientHeader, + LightClientHeaderAltair, LightClientHeaderCapella, LightClientHeaderDeneb, + LightClientHeaderElectra, LightClientHeaderFulu, LightClientHeaderGloas, + }, + sync_committee::SyncAggregate, + test_utils::TestRandom, +}; + #[superstruct( variants(Altair, Capella, Deneb, Electra, Fulu, Gloas), variant_attributes( @@ -103,10 +109,10 @@ impl LightClientFinalityUpdate { sync_aggregate: SyncAggregate, signature_slot: Slot, chain_spec: &ChainSpec, - ) -> Result { + ) -> Result { let finality_update = match attested_block .fork_name(chain_spec) - .map_err(|_| Error::InconsistentFork)? + .map_err(|_| LightClientError::InconsistentFork)? { ForkName::Altair | ForkName::Bellatrix => { Self::Altair(LightClientFinalityUpdateAltair { @@ -116,7 +122,9 @@ impl LightClientFinalityUpdate { finalized_header: LightClientHeaderAltair::block_to_light_client_header( finalized_block, )?, - finality_branch: finality_branch.try_into().map_err(Error::SszTypesError)?, + finality_branch: finality_branch + .try_into() + .map_err(LightClientError::SszTypesError)?, sync_aggregate, signature_slot, }) @@ -128,7 +136,9 @@ impl LightClientFinalityUpdate { finalized_header: LightClientHeaderCapella::block_to_light_client_header( finalized_block, )?, - finality_branch: finality_branch.try_into().map_err(Error::SszTypesError)?, + finality_branch: finality_branch + .try_into() + .map_err(LightClientError::SszTypesError)?, sync_aggregate, signature_slot, }), @@ -139,7 +149,9 @@ impl LightClientFinalityUpdate { finalized_header: LightClientHeaderDeneb::block_to_light_client_header( finalized_block, )?, - finality_branch: finality_branch.try_into().map_err(Error::SszTypesError)?, + finality_branch: finality_branch + .try_into() + .map_err(LightClientError::SszTypesError)?, sync_aggregate, signature_slot, }), @@ -150,7 +162,9 @@ impl LightClientFinalityUpdate { finalized_header: LightClientHeaderElectra::block_to_light_client_header( finalized_block, )?, - finality_branch: finality_branch.try_into().map_err(Error::SszTypesError)?, + finality_branch: finality_branch + .try_into() + .map_err(LightClientError::SszTypesError)?, sync_aggregate, signature_slot, }), @@ -161,7 +175,9 @@ impl LightClientFinalityUpdate { finalized_header: LightClientHeaderFulu::block_to_light_client_header( finalized_block, )?, - finality_branch: finality_branch.try_into().map_err(Error::SszTypesError)?, + finality_branch: finality_branch + .try_into() + .map_err(LightClientError::SszTypesError)?, sync_aggregate, signature_slot, }), @@ -172,12 +188,14 @@ impl LightClientFinalityUpdate { finalized_header: LightClientHeaderGloas::block_to_light_client_header( finalized_block, )?, - finality_branch: finality_branch.try_into().map_err(Error::SszTypesError)?, + finality_branch: finality_branch + .try_into() + .map_err(LightClientError::SszTypesError)?, sync_aggregate, signature_slot, }), - ForkName::Base => return Err(Error::AltairForkNotActive), + ForkName::Base => return Err(LightClientError::AltairForkNotActive), }; Ok(finality_update) diff --git a/consensus/types/src/light_client_header.rs b/consensus/types/src/light_client/light_client_header.rs similarity index 91% rename from consensus/types/src/light_client_header.rs rename to consensus/types/src/light_client/light_client_header.rs index 5820efcc91b..a7ecd3b7fb2 100644 --- a/consensus/types/src/light_client_header.rs +++ b/consensus/types/src/light_client/light_client_header.rs @@ -1,22 +1,27 @@ -use crate::ChainSpec; -use crate::context_deserialize; -use crate::{BeaconBlockBody, light_client_update::*}; -use crate::{BeaconBlockHeader, ExecutionPayloadHeader}; -use crate::{ContextDeserialize, ForkName}; -use crate::{ - EthSpec, ExecutionPayloadHeaderCapella, ExecutionPayloadHeaderDeneb, - ExecutionPayloadHeaderElectra, ExecutionPayloadHeaderFulu, ExecutionPayloadHeaderGloas, - FixedVector, Hash256, SignedBlindedBeaconBlock, test_utils::TestRandom, -}; +use std::marker::PhantomData; + +use context_deserialize::{ContextDeserialize, context_deserialize}; use educe::Educe; use serde::{Deserialize, Deserializer, Serialize}; use ssz::Decode; use ssz_derive::{Decode, Encode}; -use std::marker::PhantomData; +use ssz_types::FixedVector; use superstruct::superstruct; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{ + block::{BeaconBlockBody, BeaconBlockHeader, SignedBlindedBeaconBlock}, + core::{ChainSpec, EthSpec, Hash256}, + execution::{ + ExecutionPayloadHeader, ExecutionPayloadHeaderCapella, ExecutionPayloadHeaderDeneb, + ExecutionPayloadHeaderElectra, ExecutionPayloadHeaderFulu, ExecutionPayloadHeaderGloas, + }, + fork::ForkName, + light_client::{ExecutionPayloadProofLen, LightClientError, consts::EXECUTION_PAYLOAD_INDEX}, + test_utils::TestRandom, +}; + #[superstruct( variants(Altair, Capella, Deneb, Electra, Fulu, Gloas), variant_attributes( @@ -85,12 +90,12 @@ impl LightClientHeader { pub fn block_to_light_client_header( block: &SignedBlindedBeaconBlock, chain_spec: &ChainSpec, - ) -> Result { + ) -> Result { let header = match block .fork_name(chain_spec) - .map_err(|_| Error::InconsistentFork)? + .map_err(|_| LightClientError::InconsistentFork)? { - ForkName::Base => return Err(Error::AltairForkNotActive), + ForkName::Base => return Err(LightClientError::AltairForkNotActive), ForkName::Altair | ForkName::Bellatrix => LightClientHeader::Altair( LightClientHeaderAltair::block_to_light_client_header(block)?, ), @@ -163,7 +168,7 @@ impl LightClientHeader { impl LightClientHeaderAltair { pub fn block_to_light_client_header( block: &SignedBlindedBeaconBlock, - ) -> Result { + ) -> Result { Ok(LightClientHeaderAltair { beacon: block.message().block_header(), _phantom_data: PhantomData, @@ -183,7 +188,7 @@ impl Default for LightClientHeaderAltair { impl LightClientHeaderCapella { pub fn block_to_light_client_header( block: &SignedBlindedBeaconBlock, - ) -> Result { + ) -> Result { let payload = block .message() .execution_payload()? @@ -194,7 +199,7 @@ impl LightClientHeaderCapella { block .message() .body_capella() - .map_err(|_| Error::BeaconBlockBodyError)? + .map_err(|_| LightClientError::BeaconBlockBodyError)? .to_owned(), ); @@ -225,7 +230,7 @@ impl Default for LightClientHeaderCapella { impl LightClientHeaderDeneb { pub fn block_to_light_client_header( block: &SignedBlindedBeaconBlock, - ) -> Result { + ) -> Result { let header = block .message() .execution_payload()? @@ -236,7 +241,7 @@ impl LightClientHeaderDeneb { block .message() .body_deneb() - .map_err(|_| Error::BeaconBlockBodyError)? + .map_err(|_| LightClientError::BeaconBlockBodyError)? .to_owned(), ); @@ -267,7 +272,7 @@ impl Default for LightClientHeaderDeneb { impl LightClientHeaderElectra { pub fn block_to_light_client_header( block: &SignedBlindedBeaconBlock, - ) -> Result { + ) -> Result { let payload = block .message() .execution_payload()? @@ -278,7 +283,7 @@ impl LightClientHeaderElectra { block .message() .body_electra() - .map_err(|_| Error::BeaconBlockBodyError)? + .map_err(|_| LightClientError::BeaconBlockBodyError)? .to_owned(), ); @@ -309,7 +314,7 @@ impl Default for LightClientHeaderElectra { impl LightClientHeaderFulu { pub fn block_to_light_client_header( block: &SignedBlindedBeaconBlock, - ) -> Result { + ) -> Result { let payload = block .message() .execution_payload()? @@ -320,7 +325,7 @@ impl LightClientHeaderFulu { block .message() .body_fulu() - .map_err(|_| Error::BeaconBlockBodyError)? + .map_err(|_| LightClientError::BeaconBlockBodyError)? .to_owned(), ); @@ -351,7 +356,7 @@ impl Default for LightClientHeaderFulu { impl LightClientHeaderGloas { pub fn block_to_light_client_header( block: &SignedBlindedBeaconBlock, - ) -> Result { + ) -> Result { let payload = block .message() .execution_payload()? @@ -362,7 +367,7 @@ impl LightClientHeaderGloas { block .message() .body_gloas() - .map_err(|_| Error::BeaconBlockBodyError)? + .map_err(|_| LightClientError::BeaconBlockBodyError)? .to_owned(), ); diff --git a/consensus/types/src/light_client_optimistic_update.rs b/consensus/types/src/light_client/light_client_optimistic_update.rs similarity index 94% rename from consensus/types/src/light_client_optimistic_update.rs rename to consensus/types/src/light_client/light_client_optimistic_update.rs index ca9957331f8..9266ce647a4 100644 --- a/consensus/types/src/light_client_optimistic_update.rs +++ b/consensus/types/src/light_client/light_client_optimistic_update.rs @@ -1,21 +1,26 @@ -use super::{ContextDeserialize, EthSpec, ForkName, LightClientHeader, Slot, SyncAggregate}; -use crate::context_deserialize; -use crate::test_utils::TestRandom; -use crate::{ - ChainSpec, LightClientHeaderAltair, LightClientHeaderCapella, LightClientHeaderDeneb, - LightClientHeaderElectra, LightClientHeaderFulu, LightClientHeaderGloas, - SignedBlindedBeaconBlock, light_client_update::*, -}; +use context_deserialize::{ContextDeserialize, context_deserialize}; use educe::Educe; use serde::{Deserialize, Deserializer, Serialize}; use ssz::{Decode, Encode}; -use ssz_derive::Decode; -use ssz_derive::Encode; +use ssz_derive::{Decode, Encode}; use superstruct::superstruct; use test_random_derive::TestRandom; use tree_hash::Hash256; use tree_hash_derive::TreeHash; +use crate::{ + block::SignedBlindedBeaconBlock, + core::{ChainSpec, EthSpec, Slot}, + fork::ForkName, + light_client::{ + LightClientError, LightClientHeader, LightClientHeaderAltair, LightClientHeaderCapella, + LightClientHeaderDeneb, LightClientHeaderElectra, LightClientHeaderFulu, + LightClientHeaderGloas, + }, + sync_committee::SyncAggregate, + test_utils::TestRandom, +}; + /// A LightClientOptimisticUpdate is the update we send on each slot, /// it is based off the current unfinalized epoch is verified only against BLS signature. #[superstruct( @@ -79,10 +84,10 @@ impl LightClientOptimisticUpdate { sync_aggregate: SyncAggregate, signature_slot: Slot, chain_spec: &ChainSpec, - ) -> Result { + ) -> Result { let optimistic_update = match attested_block .fork_name(chain_spec) - .map_err(|_| Error::InconsistentFork)? + .map_err(|_| LightClientError::InconsistentFork)? { ForkName::Altair | ForkName::Bellatrix => { Self::Altair(LightClientOptimisticUpdateAltair { @@ -128,7 +133,7 @@ impl LightClientOptimisticUpdate { sync_aggregate, signature_slot, }), - ForkName::Base => return Err(Error::AltairForkNotActive), + ForkName::Base => return Err(LightClientError::AltairForkNotActive), }; Ok(optimistic_update) diff --git a/consensus/types/src/light_client_update.rs b/consensus/types/src/light_client/light_client_update.rs similarity index 87% rename from consensus/types/src/light_client_update.rs rename to consensus/types/src/light_client/light_client_update.rs index ede9436c50d..7fc2c36239b 100644 --- a/consensus/types/src/light_client_update.rs +++ b/consensus/types/src/light_client/light_client_update.rs @@ -1,12 +1,6 @@ -use super::{EthSpec, FixedVector, Hash256, Slot, SyncAggregate, SyncCommittee}; -use crate::LightClientHeader; -use crate::context_deserialize; -use crate::light_client_header::LightClientHeaderElectra; -use crate::{ - ChainSpec, ContextDeserialize, Epoch, ForkName, LightClientHeaderAltair, - LightClientHeaderCapella, LightClientHeaderDeneb, LightClientHeaderFulu, - LightClientHeaderGloas, SignedBlindedBeaconBlock, beacon_state, test_utils::TestRandom, -}; +use std::sync::Arc; + +use context_deserialize::{ContextDeserialize, context_deserialize}; use educe::Educe; use safe_arith::ArithError; use safe_arith::SafeArith; @@ -14,20 +8,24 @@ use serde::{Deserialize, Deserializer, Serialize}; use ssz::{Decode, Encode}; use ssz_derive::Decode; use ssz_derive::Encode; +use ssz_types::FixedVector; use ssz_types::typenum::{U4, U5, U6, U7}; -use std::sync::Arc; use superstruct::superstruct; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; -pub const FINALIZED_ROOT_INDEX: usize = 105; -pub const CURRENT_SYNC_COMMITTEE_INDEX: usize = 54; -pub const NEXT_SYNC_COMMITTEE_INDEX: usize = 55; -pub const EXECUTION_PAYLOAD_INDEX: usize = 25; - -pub const FINALIZED_ROOT_INDEX_ELECTRA: usize = 169; -pub const CURRENT_SYNC_COMMITTEE_INDEX_ELECTRA: usize = 86; -pub const NEXT_SYNC_COMMITTEE_INDEX_ELECTRA: usize = 87; +use crate::{ + block::SignedBlindedBeaconBlock, + core::{ChainSpec, Epoch, EthSpec, Hash256, Slot}, + fork::ForkName, + light_client::{ + LightClientError, LightClientHeader, LightClientHeaderAltair, LightClientHeaderCapella, + LightClientHeaderDeneb, LightClientHeaderElectra, LightClientHeaderFulu, + LightClientHeaderGloas, + }, + sync_committee::{SyncAggregate, SyncCommittee}, + test_utils::TestRandom, +}; pub type FinalizedRootProofLen = U6; pub type CurrentSyncCommitteeProofLen = U5; @@ -38,64 +36,12 @@ pub type FinalizedRootProofLenElectra = U7; pub type CurrentSyncCommitteeProofLenElectra = U6; pub type NextSyncCommitteeProofLenElectra = U6; -pub const FINALIZED_ROOT_PROOF_LEN: usize = 6; -pub const CURRENT_SYNC_COMMITTEE_PROOF_LEN: usize = 5; -pub const NEXT_SYNC_COMMITTEE_PROOF_LEN: usize = 5; -pub const EXECUTION_PAYLOAD_PROOF_LEN: usize = 4; - -pub const FINALIZED_ROOT_PROOF_LEN_ELECTRA: usize = 7; -pub const NEXT_SYNC_COMMITTEE_PROOF_LEN_ELECTRA: usize = 6; -pub const CURRENT_SYNC_COMMITTEE_PROOF_LEN_ELECTRA: usize = 6; - -pub type MerkleProof = Vec; -// Max light client updates by range request limits -// spec: https://github.com/ethereum/consensus-specs/blob/dev/specs/altair/light-client/p2p-interface.md#configuration -pub const MAX_REQUEST_LIGHT_CLIENT_UPDATES: u64 = 128; - type FinalityBranch = FixedVector; type FinalityBranchElectra = FixedVector; type NextSyncCommitteeBranch = FixedVector; type NextSyncCommitteeBranchElectra = FixedVector; -#[derive(Debug, PartialEq, Clone)] -pub enum Error { - SszTypesError(ssz_types::Error), - MilhouseError(milhouse::Error), - BeaconStateError(beacon_state::Error), - ArithError(ArithError), - AltairForkNotActive, - NotEnoughSyncCommitteeParticipants, - MismatchingPeriods, - InvalidFinalizedBlock, - BeaconBlockBodyError, - InconsistentFork, -} - -impl From for Error { - fn from(e: ssz_types::Error) -> Error { - Error::SszTypesError(e) - } -} - -impl From for Error { - fn from(e: beacon_state::Error) -> Error { - Error::BeaconStateError(e) - } -} - -impl From for Error { - fn from(e: ArithError) -> Error { - Error::ArithError(e) - } -} - -impl From for Error { - fn from(e: milhouse::Error) -> Error { - Error::MilhouseError(e) - } -} - /// A LightClientUpdate is the update we request solely to either complete the bootstrapping process, /// or to sync up to the last committee period, we need to have one ready for each ALTAIR period /// we go over, note: there is no need to keep all of the updates from [ALTAIR_PERIOD, CURRENT_PERIOD]. @@ -238,12 +184,12 @@ impl LightClientUpdate { attested_block: &SignedBlindedBeaconBlock, finalized_block: Option<&SignedBlindedBeaconBlock>, chain_spec: &ChainSpec, - ) -> Result { + ) -> Result { let light_client_update = match attested_block .fork_name(chain_spec) - .map_err(|_| Error::InconsistentFork)? + .map_err(|_| LightClientError::InconsistentFork)? { - ForkName::Base => return Err(Error::AltairForkNotActive), + ForkName::Base => return Err(LightClientError::AltairForkNotActive), fork_name @ ForkName::Altair | fork_name @ ForkName::Bellatrix => { let attested_header = LightClientHeaderAltair::block_to_light_client_header(attested_block)?; @@ -263,9 +209,11 @@ impl LightClientUpdate { next_sync_committee, next_sync_committee_branch: next_sync_committee_branch .try_into() - .map_err(Error::SszTypesError)?, + .map_err(LightClientError::SszTypesError)?, finalized_header, - finality_branch: finality_branch.try_into().map_err(Error::SszTypesError)?, + finality_branch: finality_branch + .try_into() + .map_err(LightClientError::SszTypesError)?, sync_aggregate: sync_aggregate.clone(), signature_slot: block_slot, }) @@ -289,9 +237,11 @@ impl LightClientUpdate { next_sync_committee, next_sync_committee_branch: next_sync_committee_branch .try_into() - .map_err(Error::SszTypesError)?, + .map_err(LightClientError::SszTypesError)?, finalized_header, - finality_branch: finality_branch.try_into().map_err(Error::SszTypesError)?, + finality_branch: finality_branch + .try_into() + .map_err(LightClientError::SszTypesError)?, sync_aggregate: sync_aggregate.clone(), signature_slot: block_slot, }) @@ -315,9 +265,11 @@ impl LightClientUpdate { next_sync_committee, next_sync_committee_branch: next_sync_committee_branch .try_into() - .map_err(Error::SszTypesError)?, + .map_err(LightClientError::SszTypesError)?, finalized_header, - finality_branch: finality_branch.try_into().map_err(Error::SszTypesError)?, + finality_branch: finality_branch + .try_into() + .map_err(LightClientError::SszTypesError)?, sync_aggregate: sync_aggregate.clone(), signature_slot: block_slot, }) @@ -341,9 +293,11 @@ impl LightClientUpdate { next_sync_committee, next_sync_committee_branch: next_sync_committee_branch .try_into() - .map_err(Error::SszTypesError)?, + .map_err(LightClientError::SszTypesError)?, finalized_header, - finality_branch: finality_branch.try_into().map_err(Error::SszTypesError)?, + finality_branch: finality_branch + .try_into() + .map_err(LightClientError::SszTypesError)?, sync_aggregate: sync_aggregate.clone(), signature_slot: block_slot, }) @@ -367,9 +321,11 @@ impl LightClientUpdate { next_sync_committee, next_sync_committee_branch: next_sync_committee_branch .try_into() - .map_err(Error::SszTypesError)?, + .map_err(LightClientError::SszTypesError)?, finalized_header, - finality_branch: finality_branch.try_into().map_err(Error::SszTypesError)?, + finality_branch: finality_branch + .try_into() + .map_err(LightClientError::SszTypesError)?, sync_aggregate: sync_aggregate.clone(), signature_slot: block_slot, }) @@ -393,9 +349,11 @@ impl LightClientUpdate { next_sync_committee, next_sync_committee_branch: next_sync_committee_branch .try_into() - .map_err(Error::SszTypesError)?, + .map_err(LightClientError::SszTypesError)?, finalized_header, - finality_branch: finality_branch.try_into().map_err(Error::SszTypesError)?, + finality_branch: finality_branch + .try_into() + .map_err(LightClientError::SszTypesError)?, sync_aggregate: sync_aggregate.clone(), signature_slot: block_slot, }) @@ -452,23 +410,32 @@ impl LightClientUpdate { fn attested_header_sync_committee_period( &self, chain_spec: &ChainSpec, - ) -> Result { + ) -> Result { compute_sync_committee_period_at_slot::(self.attested_header_slot(), chain_spec) - .map_err(Error::ArithError) + .map_err(LightClientError::ArithError) } - fn signature_slot_sync_committee_period(&self, chain_spec: &ChainSpec) -> Result { + fn signature_slot_sync_committee_period( + &self, + chain_spec: &ChainSpec, + ) -> Result { compute_sync_committee_period_at_slot::(*self.signature_slot(), chain_spec) - .map_err(Error::ArithError) + .map_err(LightClientError::ArithError) } - pub fn is_sync_committee_update(&self, chain_spec: &ChainSpec) -> Result { + pub fn is_sync_committee_update( + &self, + chain_spec: &ChainSpec, + ) -> Result { Ok(!self.is_next_sync_committee_branch_empty() && (self.attested_header_sync_committee_period(chain_spec)? == self.signature_slot_sync_committee_period(chain_spec)?)) } - pub fn has_sync_committee_finality(&self, chain_spec: &ChainSpec) -> Result { + pub fn has_sync_committee_finality( + &self, + chain_spec: &ChainSpec, + ) -> Result { Ok( compute_sync_committee_period_at_slot::(self.finalized_header_slot(), chain_spec)? == self.attested_header_sync_committee_period(chain_spec)?, @@ -482,7 +449,7 @@ impl LightClientUpdate { &self, new: &Self, chain_spec: &ChainSpec, - ) -> Result { + ) -> Result { // Compare super majority (> 2/3) sync committee participation let max_active_participants = new.sync_aggregate().sync_committee_bits.len(); @@ -606,6 +573,7 @@ fn compute_sync_committee_period_at_slot( #[cfg(test)] mod tests { use super::*; + use crate::light_client::consts::*; use ssz_types::typenum::Unsigned; // `ssz_tests!` can only be defined once per namespace diff --git a/consensus/types/src/light_client/mod.rs b/consensus/types/src/light_client/mod.rs new file mode 100644 index 00000000000..4e287c22942 --- /dev/null +++ b/consensus/types/src/light_client/mod.rs @@ -0,0 +1,37 @@ +mod error; +mod light_client_bootstrap; +mod light_client_finality_update; +mod light_client_header; +mod light_client_optimistic_update; +mod light_client_update; + +pub mod consts; + +pub use error::LightClientError; +pub use light_client_bootstrap::{ + LightClientBootstrap, LightClientBootstrapAltair, LightClientBootstrapCapella, + LightClientBootstrapDeneb, LightClientBootstrapElectra, LightClientBootstrapFulu, + LightClientBootstrapGloas, +}; +pub use light_client_finality_update::{ + LightClientFinalityUpdate, LightClientFinalityUpdateAltair, LightClientFinalityUpdateCapella, + LightClientFinalityUpdateDeneb, LightClientFinalityUpdateElectra, + LightClientFinalityUpdateFulu, LightClientFinalityUpdateGloas, +}; +pub use light_client_header::{ + LightClientHeader, LightClientHeaderAltair, LightClientHeaderCapella, LightClientHeaderDeneb, + LightClientHeaderElectra, LightClientHeaderFulu, LightClientHeaderGloas, +}; +pub use light_client_optimistic_update::{ + LightClientOptimisticUpdate, LightClientOptimisticUpdateAltair, + LightClientOptimisticUpdateCapella, LightClientOptimisticUpdateDeneb, + LightClientOptimisticUpdateElectra, LightClientOptimisticUpdateFulu, + LightClientOptimisticUpdateGloas, +}; +pub use light_client_update::{ + CurrentSyncCommitteeProofLen, CurrentSyncCommitteeProofLenElectra, ExecutionPayloadProofLen, + FinalizedRootProofLen, FinalizedRootProofLenElectra, LightClientUpdate, + LightClientUpdateAltair, LightClientUpdateCapella, LightClientUpdateDeneb, + LightClientUpdateElectra, LightClientUpdateFulu, LightClientUpdateGloas, + NextSyncCommitteeProofLen, NextSyncCommitteeProofLenElectra, +}; diff --git a/consensus/types/src/runtime_fixed_vector.rs b/consensus/types/src/runtime_fixed_vector.rs deleted file mode 100644 index f562322a3df..00000000000 --- a/consensus/types/src/runtime_fixed_vector.rs +++ /dev/null @@ -1,90 +0,0 @@ -//! Emulates a fixed size array but with the length set at runtime. -//! -//! The length of the list cannot be changed once it is set. - -use std::fmt; -use std::fmt::Debug; - -#[derive(Clone)] -pub struct RuntimeFixedVector { - vec: Vec, - len: usize, -} - -impl Debug for RuntimeFixedVector { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{:?} (len={})", self.vec, self.len) - } -} - -impl RuntimeFixedVector { - pub fn new(vec: Vec) -> Self { - let len = vec.len(); - Self { vec, len } - } - - pub fn to_vec(&self) -> Vec { - self.vec.clone() - } - - pub fn as_slice(&self) -> &[T] { - self.vec.as_slice() - } - - #[allow(clippy::len_without_is_empty)] - pub fn len(&self) -> usize { - self.len - } - - pub fn into_vec(self) -> Vec { - self.vec - } - - pub fn default(max_len: usize) -> Self { - Self { - vec: vec![T::default(); max_len], - len: max_len, - } - } - - pub fn take(&mut self) -> Self { - let new = std::mem::take(&mut self.vec); - *self = Self::new(vec![T::default(); self.len]); - Self { - vec: new, - len: self.len, - } - } -} - -impl std::ops::Deref for RuntimeFixedVector { - type Target = [T]; - - fn deref(&self) -> &[T] { - &self.vec[..] - } -} - -impl std::ops::DerefMut for RuntimeFixedVector { - fn deref_mut(&mut self) -> &mut [T] { - &mut self.vec[..] - } -} - -impl IntoIterator for RuntimeFixedVector { - type Item = T; - type IntoIter = std::vec::IntoIter; - - fn into_iter(self) -> Self::IntoIter { - self.vec.into_iter() - } -} - -impl<'a, T> IntoIterator for &'a RuntimeFixedVector { - type Item = &'a T; - type IntoIter = std::slice::Iter<'a, T>; - - fn into_iter(self) -> Self::IntoIter { - self.vec.iter() - } -} diff --git a/consensus/types/src/runtime_var_list.rs b/consensus/types/src/runtime_var_list.rs deleted file mode 100644 index e7b846029ef..00000000000 --- a/consensus/types/src/runtime_var_list.rs +++ /dev/null @@ -1,387 +0,0 @@ -use crate::ContextDeserialize; -use educe::Educe; -use serde::de::Error as DeError; -use serde::{Deserialize, Deserializer, Serialize}; -use ssz::Decode; -use ssz_types::Error; -use std::fmt; -use std::fmt::Debug; -use std::ops::{Deref, Index, IndexMut}; -use std::slice::SliceIndex; -use tree_hash::{Hash256, MerkleHasher, PackedEncoding, TreeHash, TreeHashType}; - -/// Emulates a SSZ `List`. -/// -/// An ordered, heap-allocated, variable-length, homogeneous collection of `T`, with no more than -/// `max_len` values. -/// -/// To ensure there are no inconsistent states, we do not allow any mutating operation if `max_len` is not set. -/// -/// ## Example -/// -/// ``` -/// use types::{RuntimeVariableList}; -/// -/// let base: Vec = vec![1, 2, 3, 4]; -/// -/// // Create a `RuntimeVariableList` from a `Vec` that has the expected length. -/// let exact: RuntimeVariableList<_> = RuntimeVariableList::new(base.clone(), 4).unwrap(); -/// assert_eq!(&exact[..], &[1, 2, 3, 4]); -/// -/// // Create a `RuntimeVariableList` from a `Vec` that is too long you'll get an error. -/// let err = RuntimeVariableList::new(base.clone(), 3).unwrap_err(); -/// assert_eq!(err, ssz_types::Error::OutOfBounds { i: 4, len: 3 }); -/// -/// // Create a `RuntimeVariableList` from a `Vec` that is shorter than the maximum. -/// let mut long: RuntimeVariableList<_> = RuntimeVariableList::new(base, 5).unwrap(); -/// assert_eq!(&long[..], &[1, 2, 3, 4]); -/// -/// // Push a value to if it does not exceed the maximum -/// long.push(5).unwrap(); -/// assert_eq!(&long[..], &[1, 2, 3, 4, 5]); -/// -/// // Push a value to if it _does_ exceed the maximum. -/// assert!(long.push(6).is_err()); -/// -/// ``` -#[derive(Clone, Serialize, Deserialize, Educe)] -#[educe(PartialEq, Eq, Hash(bound(T: std::hash::Hash)))] -#[serde(transparent)] -pub struct RuntimeVariableList { - vec: Vec, - #[serde(skip)] - max_len: usize, -} - -impl Debug for RuntimeVariableList { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{:?} (max_len={})", self.vec, self.max_len) - } -} - -impl RuntimeVariableList { - /// Returns `Ok` if the given `vec` equals the fixed length of `Self`. Otherwise returns - /// `Err(OutOfBounds { .. })`. - pub fn new(vec: Vec, max_len: usize) -> Result { - if vec.len() <= max_len { - Ok(Self { vec, max_len }) - } else { - Err(Error::OutOfBounds { - i: vec.len(), - len: max_len, - }) - } - } - - /// Create an empty list with the given `max_len`. - pub fn empty(max_len: usize) -> Self { - Self { - vec: vec![], - max_len, - } - } - - pub fn as_slice(&self) -> &[T] { - self.vec.as_slice() - } - - pub fn as_mut_slice(&mut self) -> &mut [T] { - self.vec.as_mut_slice() - } - - /// Returns the number of values presently in `self`. - pub fn len(&self) -> usize { - self.vec.len() - } - - /// True if `self` does not contain any values. - pub fn is_empty(&self) -> bool { - self.len() == 0 - } - - /// Returns the type-level maximum length. - /// - /// Returns `None` if self is uninitialized with a max_len. - pub fn max_len(&self) -> usize { - self.max_len - } - - /// Appends `value` to the back of `self`. - /// - /// Returns `Err(())` when appending `value` would exceed the maximum length. - pub fn push(&mut self, value: T) -> Result<(), Error> { - if self.vec.len() < self.max_len { - self.vec.push(value); - Ok(()) - } else { - Err(Error::OutOfBounds { - i: self.vec.len().saturating_add(1), - len: self.max_len, - }) - } - } -} - -impl RuntimeVariableList { - pub fn from_ssz_bytes(bytes: &[u8], max_len: usize) -> Result { - let vec = if bytes.is_empty() { - vec![] - } else if ::is_ssz_fixed_len() { - let num_items = bytes - .len() - .checked_div(::ssz_fixed_len()) - .ok_or(ssz::DecodeError::ZeroLengthItem)?; - - if num_items > max_len { - return Err(ssz::DecodeError::BytesInvalid(format!( - "RuntimeVariableList of {} items exceeds maximum of {}", - num_items, max_len - ))); - } - - bytes.chunks(::ssz_fixed_len()).try_fold( - Vec::with_capacity(num_items), - |mut vec, chunk| { - vec.push(::from_ssz_bytes(chunk)?); - Ok(vec) - }, - )? - } else { - ssz::decode_list_of_variable_length_items(bytes, Some(max_len))? - }; - Ok(Self { vec, max_len }) - } -} - -impl From> for Vec { - fn from(list: RuntimeVariableList) -> Vec { - list.vec - } -} - -impl> Index for RuntimeVariableList { - type Output = I::Output; - - #[inline] - fn index(&self, index: I) -> &Self::Output { - Index::index(&self.vec, index) - } -} - -impl> IndexMut for RuntimeVariableList { - #[inline] - fn index_mut(&mut self, index: I) -> &mut Self::Output { - IndexMut::index_mut(&mut self.vec, index) - } -} - -impl Deref for RuntimeVariableList { - type Target = [T]; - - fn deref(&self) -> &[T] { - &self.vec[..] - } -} - -impl<'a, T> IntoIterator for &'a RuntimeVariableList { - type Item = &'a T; - type IntoIter = std::slice::Iter<'a, T>; - - fn into_iter(self) -> Self::IntoIter { - self.iter() - } -} - -impl IntoIterator for RuntimeVariableList { - type Item = T; - type IntoIter = std::vec::IntoIter; - - fn into_iter(self) -> Self::IntoIter { - self.vec.into_iter() - } -} - -impl ssz::Encode for RuntimeVariableList -where - T: ssz::Encode, -{ - fn is_ssz_fixed_len() -> bool { - >::is_ssz_fixed_len() - } - - fn ssz_append(&self, buf: &mut Vec) { - self.vec.ssz_append(buf) - } - - fn ssz_fixed_len() -> usize { - >::ssz_fixed_len() - } - - fn ssz_bytes_len(&self) -> usize { - self.vec.ssz_bytes_len() - } -} - -impl<'de, C, T> ContextDeserialize<'de, (C, usize)> for RuntimeVariableList -where - T: ContextDeserialize<'de, C>, - C: Clone, -{ - fn context_deserialize(deserializer: D, context: (C, usize)) -> Result - where - D: Deserializer<'de>, - { - // first parse out a Vec using the Vec impl you already have - let vec: Vec = Vec::context_deserialize(deserializer, context.0)?; - let vec_len = vec.len(); - RuntimeVariableList::new(vec, context.1).map_err(|e| { - DeError::custom(format!( - "RuntimeVariableList length {} exceeds max_len {}: {e:?}", - vec_len, context.1, - )) - }) - } -} - -impl TreeHash for RuntimeVariableList { - fn tree_hash_type() -> tree_hash::TreeHashType { - tree_hash::TreeHashType::List - } - - fn tree_hash_packed_encoding(&self) -> PackedEncoding { - unreachable!("List should never be packed.") - } - - fn tree_hash_packing_factor() -> usize { - unreachable!("List should never be packed.") - } - - fn tree_hash_root(&self) -> Hash256 { - let root = runtime_vec_tree_hash_root::(&self.vec, self.max_len); - - tree_hash::mix_in_length(&root, self.len()) - } -} - -// We can delete this once the upstream `vec_tree_hash_root` is modified to use a runtime max len. -pub fn runtime_vec_tree_hash_root(vec: &[T], max_len: usize) -> Hash256 -where - T: TreeHash, -{ - match T::tree_hash_type() { - TreeHashType::Basic => { - let mut hasher = - MerkleHasher::with_leaves(max_len.div_ceil(T::tree_hash_packing_factor())); - - for item in vec { - hasher - .write(&item.tree_hash_packed_encoding()) - .expect("ssz_types variable vec should not contain more elements than max"); - } - - hasher - .finish() - .expect("ssz_types variable vec should not have a remaining buffer") - } - TreeHashType::Container | TreeHashType::List | TreeHashType::Vector => { - let mut hasher = MerkleHasher::with_leaves(max_len); - - for item in vec { - hasher - .write(item.tree_hash_root().as_slice()) - .expect("ssz_types vec should not contain more elements than max"); - } - - hasher - .finish() - .expect("ssz_types vec should not have a remaining buffer") - } - } -} - -#[cfg(test)] -mod test { - use super::*; - use ssz::*; - use std::fmt::Debug; - - #[test] - fn new() { - let vec = vec![42; 5]; - let fixed: Result, _> = RuntimeVariableList::new(vec, 4); - assert!(fixed.is_err()); - - let vec = vec![42; 3]; - let fixed: Result, _> = RuntimeVariableList::new(vec, 4); - assert!(fixed.is_ok()); - - let vec = vec![42; 4]; - let fixed: Result, _> = RuntimeVariableList::new(vec, 4); - assert!(fixed.is_ok()); - } - - #[test] - fn indexing() { - let vec = vec![1, 2]; - - let mut fixed: RuntimeVariableList = - RuntimeVariableList::new(vec.clone(), 8192).unwrap(); - - assert_eq!(fixed[0], 1); - assert_eq!(&fixed[0..1], &vec[0..1]); - assert_eq!(fixed[..].len(), 2); - - fixed[1] = 3; - assert_eq!(fixed[1], 3); - } - - #[test] - fn length() { - // Too long. - let vec = vec![42; 5]; - let err = RuntimeVariableList::::new(vec.clone(), 4).unwrap_err(); - assert_eq!(err, Error::OutOfBounds { i: 5, len: 4 }); - - let vec = vec![42; 3]; - let fixed: RuntimeVariableList = RuntimeVariableList::new(vec.clone(), 4).unwrap(); - assert_eq!(&fixed[0..3], &vec[..]); - assert_eq!(&fixed[..], &vec![42, 42, 42][..]); - - let vec = vec![]; - let fixed: RuntimeVariableList = RuntimeVariableList::new(vec, 4).unwrap(); - assert_eq!(&fixed[..], &[] as &[u64]); - } - - #[test] - fn deref() { - let vec = vec![0, 2, 4, 6]; - let fixed: RuntimeVariableList = RuntimeVariableList::new(vec, 4).unwrap(); - - assert_eq!(fixed.first(), Some(&0)); - assert_eq!(fixed.get(3), Some(&6)); - assert_eq!(fixed.get(4), None); - } - - #[test] - fn encode() { - let vec: RuntimeVariableList = RuntimeVariableList::new(vec![0; 2], 2).unwrap(); - assert_eq!(vec.as_ssz_bytes(), vec![0, 0, 0, 0]); - assert_eq!( as Encode>::ssz_fixed_len(), 4); - } - - fn round_trip(item: RuntimeVariableList) { - let max_len = item.max_len(); - let encoded = &item.as_ssz_bytes(); - assert_eq!(item.ssz_bytes_len(), encoded.len()); - assert_eq!( - RuntimeVariableList::from_ssz_bytes(encoded, max_len), - Ok(item) - ); - } - - #[test] - fn u16_len_8() { - round_trip::(RuntimeVariableList::new(vec![42; 8], 8).unwrap()); - round_trip::(RuntimeVariableList::new(vec![0; 8], 8).unwrap()); - } -} diff --git a/consensus/types/src/attester_slashing.rs b/consensus/types/src/slashing/attester_slashing.rs similarity index 96% rename from consensus/types/src/attester_slashing.rs rename to consensus/types/src/slashing/attester_slashing.rs index 2bfb65653c6..5c214b35f74 100644 --- a/consensus/types/src/attester_slashing.rs +++ b/consensus/types/src/slashing/attester_slashing.rs @@ -1,9 +1,4 @@ -use crate::context_deserialize; -use crate::indexed_attestation::{ - IndexedAttestationBase, IndexedAttestationElectra, IndexedAttestationRef, -}; -use crate::{ContextDeserialize, ForkName}; -use crate::{EthSpec, test_utils::TestRandom}; +use context_deserialize::{ContextDeserialize, context_deserialize}; use educe::Educe; use rand::{Rng, RngCore}; use serde::{Deserialize, Deserializer, Serialize}; @@ -12,6 +7,13 @@ use superstruct::superstruct; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{ + attestation::{IndexedAttestationBase, IndexedAttestationElectra, IndexedAttestationRef}, + core::EthSpec, + fork::ForkName, + test_utils::TestRandom, +}; + #[superstruct( variants(Base, Electra), variant_attributes( diff --git a/consensus/types/src/slashing/mod.rs b/consensus/types/src/slashing/mod.rs new file mode 100644 index 00000000000..551b8e31377 --- /dev/null +++ b/consensus/types/src/slashing/mod.rs @@ -0,0 +1,8 @@ +mod attester_slashing; +mod proposer_slashing; + +pub use attester_slashing::{ + AttesterSlashing, AttesterSlashingBase, AttesterSlashingElectra, AttesterSlashingOnDisk, + AttesterSlashingRef, AttesterSlashingRefOnDisk, +}; +pub use proposer_slashing::ProposerSlashing; diff --git a/consensus/types/src/proposer_slashing.rs b/consensus/types/src/slashing/proposer_slashing.rs similarity index 86% rename from consensus/types/src/proposer_slashing.rs rename to consensus/types/src/slashing/proposer_slashing.rs index f4d914c1e59..697bd1a9aa5 100644 --- a/consensus/types/src/proposer_slashing.rs +++ b/consensus/types/src/slashing/proposer_slashing.rs @@ -1,12 +1,11 @@ -use crate::context_deserialize; -use crate::test_utils::TestRandom; -use crate::{ForkName, SignedBeaconBlockHeader}; - +use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{block::SignedBeaconBlockHeader, fork::ForkName, test_utils::TestRandom}; + /// Two conflicting proposals from the same proposer (validator). /// /// Spec v0.12.1 diff --git a/consensus/types/src/activation_queue.rs b/consensus/types/src/state/activation_queue.rs similarity index 95% rename from consensus/types/src/activation_queue.rs rename to consensus/types/src/state/activation_queue.rs index dd3ce5f88cb..0d920a20cf0 100644 --- a/consensus/types/src/activation_queue.rs +++ b/consensus/types/src/state/activation_queue.rs @@ -1,6 +1,10 @@ -use crate::{ChainSpec, Epoch, Validator}; use std::collections::BTreeSet; +use crate::{ + core::{ChainSpec, Epoch}, + validator::Validator, +}; + /// Activation queue computed during epoch processing for use in the *next* epoch. #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] #[derive(Debug, PartialEq, Eq, Default, Clone)] diff --git a/consensus/types/src/beacon_state/balance.rs b/consensus/types/src/state/balance.rs similarity index 100% rename from consensus/types/src/beacon_state/balance.rs rename to consensus/types/src/state/balance.rs diff --git a/consensus/types/src/beacon_state.rs b/consensus/types/src/state/beacon_state.rs similarity index 88% rename from consensus/types/src/beacon_state.rs rename to consensus/types/src/state/beacon_state.rs index d13e2235574..948899c98d3 100644 --- a/consensus/types/src/beacon_state.rs +++ b/consensus/types/src/state/beacon_state.rs @@ -1,49 +1,55 @@ -use self::committee_cache::get_active_validator_indices; -use crate::ContextDeserialize; -use crate::FixedBytesExtended; -use crate::historical_summary::HistoricalSummary; -use crate::test_utils::TestRandom; -use crate::*; +use std::{fmt, hash::Hash, mem, sync::Arc}; + +use bls::{AggregatePublicKey, PublicKeyBytes, Signature}; use compare_fields::CompareFields; +use context_deserialize::ContextDeserialize; use educe::Educe; use ethereum_hashing::hash; +use fixed_bytes::FixedBytesExtended; use int_to_bytes::{int_to_bytes4, int_to_bytes8}; use metastruct::{NumFields, metastruct}; -pub use pubkey_cache::PubkeyCache; +use milhouse::{List, Vector}; use safe_arith::{ArithError, SafeArith}; use serde::{Deserialize, Deserializer, Serialize}; use ssz::{Decode, DecodeError, Encode, ssz_encode}; use ssz_derive::{Decode, Encode}; -use std::hash::Hash; -use std::{fmt, mem, sync::Arc}; +use ssz_types::{BitVector, FixedVector, typenum::Unsigned}; use superstruct::superstruct; use swap_or_not_shuffle::compute_shuffled_index; use test_random_derive::TestRandom; +use tracing::instrument; use tree_hash::TreeHash; use tree_hash_derive::TreeHash; -pub use self::committee_cache::{ - CommitteeCache, compute_committee_index_in_epoch, compute_committee_range_in_epoch, - epoch_committee_count, +use crate::{ + attestation::{ + AttestationDuty, BeaconCommittee, Checkpoint, CommitteeIndex, ParticipationFlags, + PendingAttestation, + }, + block::{BeaconBlock, BeaconBlockHeader, SignedBeaconBlockHash}, + consolidation::PendingConsolidation, + core::{ChainSpec, Domain, Epoch, EthSpec, Hash256, RelativeEpoch, RelativeEpochError, Slot}, + deposit::PendingDeposit, + execution::{ + Eth1Data, ExecutionPayloadHeaderBellatrix, ExecutionPayloadHeaderCapella, + ExecutionPayloadHeaderDeneb, ExecutionPayloadHeaderElectra, ExecutionPayloadHeaderFulu, + ExecutionPayloadHeaderGloas, ExecutionPayloadHeaderRef, ExecutionPayloadHeaderRefMut, + }, + fork::{Fork, ForkName, ForkVersionDecode, InconsistentFork, map_fork_name}, + light_client::consts::{ + CURRENT_SYNC_COMMITTEE_INDEX, CURRENT_SYNC_COMMITTEE_INDEX_ELECTRA, FINALIZED_ROOT_INDEX, + FINALIZED_ROOT_INDEX_ELECTRA, NEXT_SYNC_COMMITTEE_INDEX, NEXT_SYNC_COMMITTEE_INDEX_ELECTRA, + }, + state::{ + BlockRootsIter, CommitteeCache, EpochCache, EpochCacheError, ExitCache, HistoricalBatch, + HistoricalSummary, ProgressiveBalancesCache, PubkeyCache, SlashingsCache, + get_active_validator_indices, + }, + sync_committee::{SyncCommittee, SyncDuty}, + test_utils::TestRandom, + validator::Validator, + withdrawal::PendingPartialWithdrawal, }; -pub use crate::beacon_state::balance::Balance; -pub use crate::beacon_state::exit_cache::ExitCache; -pub use crate::beacon_state::progressive_balances_cache::*; -pub use crate::beacon_state::slashings_cache::SlashingsCache; -pub use eth_spec::*; -pub use iter::BlockRootsIter; -pub use milhouse::{List, Vector, interface::Interface}; -use tracing::instrument; - -#[macro_use] -mod committee_cache; -mod balance; -mod exit_cache; -mod iter; -mod progressive_balances_cache; -mod pubkey_cache; -mod slashings_cache; -mod tests; pub const CACHED_EPOCHS: usize = 3; const MAX_RANDOM_BYTE: u64 = (1 << 8) - 1; @@ -53,7 +59,7 @@ pub type Validators = List::ValidatorRegistryLimit> pub type Balances = List::ValidatorRegistryLimit>; #[derive(Debug, PartialEq, Clone)] -pub enum Error { +pub enum BeaconStateError { /// A state for a different hard-fork was required -- a severe logic error. IncorrectStateVariant, EpochOutOfBounds, @@ -197,7 +203,7 @@ enum AllowNextEpoch { } impl AllowNextEpoch { - fn upper_bound_of(self, current_epoch: Epoch) -> Result { + fn upper_bound_of(self, current_epoch: Epoch) -> Result { match self { AllowNextEpoch::True => Ok(current_epoch.safe_add(1)?), AllowNextEpoch::False => Ok(current_epoch), @@ -378,8 +384,14 @@ impl From for Hash256 { num_fields(all()), )) ), - cast_error(ty = "Error", expr = "Error::IncorrectStateVariant"), - partial_getter_error(ty = "Error", expr = "Error::IncorrectStateVariant"), + cast_error( + ty = "BeaconStateError", + expr = "BeaconStateError::IncorrectStateVariant" + ), + partial_getter_error( + ty = "BeaconStateError", + expr = "BeaconStateError::IncorrectStateVariant" + ), map_ref_mut_into(BeaconStateRef) )] #[cfg_attr( @@ -740,11 +752,11 @@ impl BeaconState { } /// Returns the `tree_hash_root` of the state. - pub fn canonical_root(&mut self) -> Result { + pub fn canonical_root(&mut self) -> Result { self.update_tree_hash_cache() } - pub fn historical_batch(&mut self) -> Result, Error> { + pub fn historical_batch(&mut self) -> Result, BeaconStateError> { // Updating before cloning makes the clone cheap and saves repeated hashing. self.block_roots_mut().apply_updates()?; self.state_roots_mut().apply_updates()?; @@ -758,7 +770,10 @@ impl BeaconState { /// This method ensures the state's pubkey cache is fully up-to-date before checking if the validator /// exists in the registry. If a validator pubkey exists in the validator registry, returns `Some(i)`, /// otherwise returns `None`. - pub fn get_validator_index(&mut self, pubkey: &PublicKeyBytes) -> Result, Error> { + pub fn get_validator_index( + &mut self, + pubkey: &PublicKeyBytes, + ) -> Result, BeaconStateError> { self.update_pubkey_cache()?; Ok(self.pubkey_cache().get(pubkey)) } @@ -783,7 +798,7 @@ impl BeaconState { /// The epoch following `self.current_epoch()`. /// /// Spec v0.12.1 - pub fn next_epoch(&self) -> Result { + pub fn next_epoch(&self) -> Result { Ok(self.current_epoch().safe_add(1)?) } @@ -792,7 +807,7 @@ impl BeaconState { /// Makes use of the committee cache and will fail if no cache exists for the slot's epoch. /// /// Spec v0.12.1 - pub fn get_committee_count_at_slot(&self, slot: Slot) -> Result { + pub fn get_committee_count_at_slot(&self, slot: Slot) -> Result { let cache = self.committee_cache_at_slot(slot)?; Ok(cache.committees_per_slot()) } @@ -800,7 +815,10 @@ impl BeaconState { /// Compute the number of committees in an entire epoch. /// /// Spec v0.12.1 - pub fn get_epoch_committee_count(&self, relative_epoch: RelativeEpoch) -> Result { + pub fn get_epoch_committee_count( + &self, + relative_epoch: RelativeEpoch, + ) -> Result { let cache = self.committee_cache(relative_epoch)?; Ok(cache.epoch_committee_count() as u64) } @@ -813,7 +831,7 @@ impl BeaconState { pub fn get_cached_active_validator_indices( &self, relative_epoch: RelativeEpoch, - ) -> Result<&[usize], Error> { + ) -> Result<&[usize], BeaconStateError> { let cache = self.committee_cache(relative_epoch)?; Ok(cache.active_validator_indices()) @@ -826,7 +844,7 @@ impl BeaconState { &self, epoch: Epoch, spec: &ChainSpec, - ) -> Result, Error> { + ) -> Result, BeaconStateError> { if epoch >= self.compute_activation_exit_epoch(self.current_epoch(), spec)? { Err(BeaconStateError::EpochOutOfBounds) } else { @@ -839,7 +857,10 @@ impl BeaconState { /// Note: the indices are shuffled (i.e., not in ascending order). /// /// Returns an error if that epoch is not cached, or the cache is not initialized. - pub fn get_shuffling(&self, relative_epoch: RelativeEpoch) -> Result<&[usize], Error> { + pub fn get_shuffling( + &self, + relative_epoch: RelativeEpoch, + ) -> Result<&[usize], BeaconStateError> { let cache = self.committee_cache(relative_epoch)?; Ok(cache.shuffling()) @@ -854,14 +875,14 @@ impl BeaconState { &self, slot: Slot, index: CommitteeIndex, - ) -> Result, Error> { + ) -> Result, BeaconStateError> { let epoch = slot.epoch(E::slots_per_epoch()); let relative_epoch = RelativeEpoch::from_epoch(self.current_epoch(), epoch)?; let cache = self.committee_cache(relative_epoch)?; cache .get_beacon_committee(slot, index) - .ok_or(Error::NoCommittee { slot, index }) + .ok_or(BeaconStateError::NoCommittee { slot, index }) } /// Get all of the Beacon committees at a given slot. @@ -872,7 +893,7 @@ impl BeaconState { pub fn get_beacon_committees_at_slot( &self, slot: Slot, - ) -> Result>, Error> { + ) -> Result>, BeaconStateError> { let cache = self.committee_cache_at_slot(slot)?; cache.get_beacon_committees_at_slot(slot) } @@ -885,7 +906,7 @@ impl BeaconState { pub fn get_beacon_committees_at_epoch( &self, relative_epoch: RelativeEpoch, - ) -> Result>, Error> { + ) -> Result>, BeaconStateError> { let cache = self.committee_cache(relative_epoch)?; cache.get_all_beacon_committees() } @@ -901,7 +922,7 @@ impl BeaconState { epoch: Epoch, block_root: Hash256, spec: &ChainSpec, - ) -> Result { + ) -> Result { let decision_slot = spec.proposer_shuffling_decision_slot::(epoch); if self.slot() <= decision_slot { Ok(block_root) @@ -917,7 +938,7 @@ impl BeaconState { &self, epoch: Epoch, head_block_root: Hash256, - ) -> Result { + ) -> Result { let decision_slot = epoch.saturating_sub(1u64).end_slot(E::slots_per_epoch()); if self.slot() <= decision_slot { Ok(head_block_root) @@ -937,11 +958,14 @@ impl BeaconState { &self, block_root: Hash256, spec: &ChainSpec, - ) -> Result { + ) -> Result { self.proposer_shuffling_decision_root_at_epoch(self.current_epoch(), block_root, spec) } - pub fn epoch_cache_decision_root(&self, block_root: Hash256) -> Result { + pub fn epoch_cache_decision_root( + &self, + block_root: Hash256, + ) -> Result { // Epoch cache decision root for the current epoch (N) is the block root at the end of epoch // N - 1. This is the same as the root that determines the next epoch attester shuffling. self.attester_shuffling_decision_root(block_root, RelativeEpoch::Next) @@ -958,7 +982,7 @@ impl BeaconState { &self, block_root: Hash256, relative_epoch: RelativeEpoch, - ) -> Result { + ) -> Result { let decision_slot = self.attester_shuffling_decision_slot(relative_epoch); if self.slot() == decision_slot { Ok(block_root) @@ -985,9 +1009,9 @@ impl BeaconState { indices: &[usize], seed: &[u8], spec: &ChainSpec, - ) -> Result { + ) -> Result { if indices.is_empty() { - return Err(Error::InsufficientValidators); + return Err(BeaconStateError::InsufficientValidators); } let max_effective_balance = spec.max_effective_balance_for_fork(self.fork_name_unchecked()); @@ -1005,10 +1029,10 @@ impl BeaconState { seed, spec.shuffle_round_count, ) - .ok_or(Error::UnableToShuffle)?; + .ok_or(BeaconStateError::UnableToShuffle)?; let candidate_index = *indices .get(shuffled_index) - .ok_or(Error::ShuffleIndexOutOfBounds(shuffled_index))?; + .ok_or(BeaconStateError::ShuffleIndexOutOfBounds(shuffled_index))?; let random_value = self.shuffling_random_value(i, seed)?; let effective_balance = self.get_effective_balance(candidate_index)?; if effective_balance.safe_mul(max_random_value)? @@ -1027,11 +1051,11 @@ impl BeaconState { seed: &[u8], indices: &[usize], spec: &ChainSpec, - ) -> Result, Error> { + ) -> Result, BeaconStateError> { // Regardless of fork, we never support computing proposer indices for past epochs. let current_epoch = self.current_epoch(); if epoch < current_epoch { - return Err(Error::ComputeProposerIndicesPastEpoch { + return Err(BeaconStateError::ComputeProposerIndicesPastEpoch { current_epoch, request_epoch: epoch, }); @@ -1050,17 +1074,19 @@ impl BeaconState { if self.fork_name_unchecked().fulu_enabled() && epoch < current_epoch.safe_add(spec.min_seed_lookahead)? { - return Err(Error::ComputeProposerIndicesInsufficientLookahead { - current_epoch, - request_epoch: epoch, - }); + return Err( + BeaconStateError::ComputeProposerIndicesInsufficientLookahead { + current_epoch, + request_epoch: epoch, + }, + ); } } else { // Pre-Fulu the situation is reversed, we *should not* compute proposer indices using // too much lookahead. To do so would make us vulnerable to changes in the proposer // indices caused by effective balance changes. if epoch >= current_epoch.safe_add(spec.min_seed_lookahead)? { - return Err(Error::ComputeProposerIndicesExcessiveLookahead { + return Err(BeaconStateError::ComputeProposerIndicesExcessiveLookahead { current_epoch, request_epoch: epoch, }); @@ -1083,7 +1109,7 @@ impl BeaconState { /// In Electra and later, the random value is a 16-bit integer stored in a `u64`. /// /// Prior to Electra, the random value is an 8-bit integer stored in a `u64`. - fn shuffling_random_value(&self, i: usize, seed: &[u8]) -> Result { + fn shuffling_random_value(&self, i: usize, seed: &[u8]) -> Result { if self.fork_name_unchecked().electra_enabled() { Self::shuffling_random_u16_electra(i, seed).map(u64::from) } else { @@ -1094,37 +1120,39 @@ impl BeaconState { /// Get a random byte from the given `seed`. /// /// Used by the proposer & sync committee selection functions. - fn shuffling_random_byte(i: usize, seed: &[u8]) -> Result { + fn shuffling_random_byte(i: usize, seed: &[u8]) -> Result { let mut preimage = seed.to_vec(); preimage.append(&mut int_to_bytes8(i.safe_div(32)? as u64)); let index = i.safe_rem(32)?; hash(&preimage) .get(index) .copied() - .ok_or(Error::ShuffleIndexOutOfBounds(index)) + .ok_or(BeaconStateError::ShuffleIndexOutOfBounds(index)) } /// Get two random bytes from the given `seed`. /// /// This is used in place of `shuffling_random_byte` from Electra onwards. - fn shuffling_random_u16_electra(i: usize, seed: &[u8]) -> Result { + fn shuffling_random_u16_electra(i: usize, seed: &[u8]) -> Result { let mut preimage = seed.to_vec(); preimage.append(&mut int_to_bytes8(i.safe_div(16)? as u64)); let offset = i.safe_rem(16)?.safe_mul(2)?; hash(&preimage) .get(offset..offset.safe_add(2)?) - .ok_or(Error::ShuffleIndexOutOfBounds(offset))? + .ok_or(BeaconStateError::ShuffleIndexOutOfBounds(offset))? .try_into() .map(u16::from_le_bytes) - .map_err(|_| Error::ShuffleIndexOutOfBounds(offset)) + .map_err(|_| BeaconStateError::ShuffleIndexOutOfBounds(offset)) } /// Convenience accessor for the `execution_payload_header` as an `ExecutionPayloadHeaderRef`. pub fn latest_execution_payload_header( &self, - ) -> Result, Error> { + ) -> Result, BeaconStateError> { match self { - BeaconState::Base(_) | BeaconState::Altair(_) => Err(Error::IncorrectStateVariant), + BeaconState::Base(_) | BeaconState::Altair(_) => { + Err(BeaconStateError::IncorrectStateVariant) + } BeaconState::Bellatrix(state) => Ok(ExecutionPayloadHeaderRef::Bellatrix( &state.latest_execution_payload_header, )), @@ -1148,9 +1176,11 @@ impl BeaconState { pub fn latest_execution_payload_header_mut( &mut self, - ) -> Result, Error> { + ) -> Result, BeaconStateError> { match self { - BeaconState::Base(_) | BeaconState::Altair(_) => Err(Error::IncorrectStateVariant), + BeaconState::Base(_) | BeaconState::Altair(_) => { + Err(BeaconStateError::IncorrectStateVariant) + } BeaconState::Bellatrix(state) => Ok(ExecutionPayloadHeaderRefMut::Bellatrix( &mut state.latest_execution_payload_header, )), @@ -1181,7 +1211,7 @@ impl BeaconState { index: CommitteeIndex, slot_signature: &Signature, spec: &ChainSpec, - ) -> Result { + ) -> Result { let committee = self.get_beacon_committee(slot, index)?; let modulo = std::cmp::max( 1, @@ -1192,7 +1222,7 @@ impl BeaconState { signature_hash .get(0..8) .and_then(|bytes| bytes.try_into().ok()) - .ok_or(Error::IsAggregatorOutOfBounds)?, + .ok_or(BeaconStateError::IsAggregatorOutOfBounds)?, ); Ok(signature_hash_int.safe_rem(modulo)? == 0) @@ -1201,13 +1231,17 @@ impl BeaconState { /// Returns the beacon proposer index for the `slot` in `self.current_epoch()`. /// /// Spec v1.6.0-alpha.1 - pub fn get_beacon_proposer_index(&self, slot: Slot, spec: &ChainSpec) -> Result { + pub fn get_beacon_proposer_index( + &self, + slot: Slot, + spec: &ChainSpec, + ) -> Result { // Proposer indices are only known for the current epoch, due to the dependence on the // effective balances of validators, which change at every epoch transition. let epoch = slot.epoch(E::slots_per_epoch()); // TODO(EIP-7917): Explore allowing this function to be called with a slot one epoch in the future. if epoch != self.current_epoch() { - return Err(Error::SlotOutOfBounds); + return Err(BeaconStateError::SlotOutOfBounds); } if let Ok(proposer_lookahead) = self.proposer_lookahead() { @@ -1215,7 +1249,7 @@ impl BeaconState { let index = slot.as_usize().safe_rem(E::slots_per_epoch() as usize)?; proposer_lookahead .get(index) - .ok_or(Error::ProposerLookaheadOutOfBounds { i: index }) + .ok_or(BeaconStateError::ProposerLookaheadOutOfBounds { i: index }) .map(|index| *index as usize) } else { // Pre-Fulu @@ -1233,7 +1267,7 @@ impl BeaconState { &self, epoch: Epoch, spec: &ChainSpec, - ) -> Result, Error> { + ) -> Result, BeaconStateError> { // This isn't in the spec, but we remove the footgun that is requesting the current epoch // for a Fulu state. if let Ok(proposer_lookahead) = self.proposer_lookahead() @@ -1263,7 +1297,11 @@ impl BeaconState { /// Compute the seed to use for the beacon proposer selection at the given `slot`. /// /// Spec v0.12.1 - pub fn get_beacon_proposer_seed(&self, slot: Slot, spec: &ChainSpec) -> Result, Error> { + pub fn get_beacon_proposer_seed( + &self, + slot: Slot, + spec: &ChainSpec, + ) -> Result, BeaconStateError> { let epoch = slot.epoch(E::slots_per_epoch()); let mut preimage = self .get_seed(epoch, Domain::BeaconProposer, spec)? @@ -1278,7 +1316,7 @@ impl BeaconState { &self, epoch: Epoch, spec: &ChainSpec, - ) -> Result<&Arc>, Error> { + ) -> Result<&Arc>, BeaconStateError> { let sync_committee_period = epoch.sync_committee_period(spec)?; let current_sync_committee_period = self.current_epoch().sync_committee_period(spec)?; let next_sync_committee_period = current_sync_committee_period.safe_add(1)?; @@ -1288,7 +1326,7 @@ impl BeaconState { } else if sync_committee_period == next_sync_committee_period { self.next_sync_committee() } else { - Err(Error::SyncCommitteeNotKnown { + Err(BeaconStateError::SyncCommitteeNotKnown { current_epoch: self.current_epoch(), epoch, }) @@ -1299,7 +1337,7 @@ impl BeaconState { pub fn get_sync_committee_indices( &mut self, sync_committee: &SyncCommittee, - ) -> Result, Error> { + ) -> Result, BeaconStateError> { self.update_pubkey_cache()?; sync_committee .pubkeys @@ -1307,13 +1345,16 @@ impl BeaconState { .map(|pubkey| { self.pubkey_cache() .get(pubkey) - .ok_or(Error::PubkeyCacheInconsistent) + .ok_or(BeaconStateError::PubkeyCacheInconsistent) }) .collect() } /// Compute the sync committee indices for the next sync committee. - fn get_next_sync_committee_indices(&self, spec: &ChainSpec) -> Result, Error> { + fn get_next_sync_committee_indices( + &self, + spec: &ChainSpec, + ) -> Result, BeaconStateError> { let epoch = self.current_epoch().safe_add(1)?; let active_validator_indices = self.get_active_validator_indices(epoch, spec)?; @@ -1336,10 +1377,10 @@ impl BeaconState { seed.as_slice(), spec.shuffle_round_count, ) - .ok_or(Error::UnableToShuffle)?; + .ok_or(BeaconStateError::UnableToShuffle)?; let candidate_index = *active_validator_indices .get(shuffled_index) - .ok_or(Error::ShuffleIndexOutOfBounds(shuffled_index))?; + .ok_or(BeaconStateError::ShuffleIndexOutOfBounds(shuffled_index))?; let random_value = self.shuffling_random_value(i, seed.as_slice())?; let effective_balance = self.get_validator(candidate_index)?.effective_balance; if effective_balance.safe_mul(max_random_value)? @@ -1353,7 +1394,10 @@ impl BeaconState { } /// Compute the next sync committee. - pub fn get_next_sync_committee(&self, spec: &ChainSpec) -> Result, Error> { + pub fn get_next_sync_committee( + &self, + spec: &ChainSpec, + ) -> Result, BeaconStateError> { let sync_committee_indices = self.get_next_sync_committee_indices(spec)?; let pubkeys = sync_committee_indices @@ -1362,7 +1406,7 @@ impl BeaconState { self.validators() .get(index) .map(|v| v.pubkey) - .ok_or(Error::UnknownValidator(index)) + .ok_or(BeaconStateError::UnknownValidator(index)) }) .collect::, _>>()?; let decompressed_pubkeys = pubkeys @@ -1386,7 +1430,7 @@ impl BeaconState { epoch: Epoch, validator_indices: &[u64], spec: &ChainSpec, - ) -> Result, Error>>, Error> { + ) -> Result, BeaconStateError>>, BeaconStateError> { let sync_committee = self.get_built_sync_committee(epoch, spec)?; Ok(validator_indices @@ -1421,7 +1465,7 @@ impl BeaconState { /// Safely obtains the index for latest block roots, given some `slot`. /// /// Spec v0.12.1 - fn get_latest_block_roots_index(&self, slot: Slot) -> Result { + fn get_latest_block_roots_index(&self, slot: Slot) -> Result { if slot < self.slot() && self.slot() <= slot.safe_add(self.block_roots().len() as u64)? { Ok(slot.as_usize().safe_rem(self.block_roots().len())?) } else { @@ -1441,7 +1485,7 @@ impl BeaconState { let i = self.get_latest_block_roots_index(slot)?; self.block_roots() .get(i) - .ok_or(Error::BlockRootsOutOfBounds(i)) + .ok_or(BeaconStateError::BlockRootsOutOfBounds(i)) } /// Return the block root at a recent `epoch`. @@ -1461,12 +1505,12 @@ impl BeaconState { *self .block_roots_mut() .get_mut(i) - .ok_or(Error::BlockRootsOutOfBounds(i))? = block_root; + .ok_or(BeaconStateError::BlockRootsOutOfBounds(i))? = block_root; Ok(()) } /// Fill `randao_mixes` with - pub fn fill_randao_mixes_with(&mut self, index_root: Hash256) -> Result<(), Error> { + pub fn fill_randao_mixes_with(&mut self, index_root: Hash256) -> Result<(), BeaconStateError> { *self.randao_mixes_mut() = Vector::from_elem(index_root)?; Ok(()) } @@ -1478,7 +1522,7 @@ impl BeaconState { &self, epoch: Epoch, allow_next_epoch: AllowNextEpoch, - ) -> Result { + ) -> Result { let current_epoch = self.current_epoch(); let len = E::EpochsPerHistoricalVector::to_u64(); @@ -1487,7 +1531,7 @@ impl BeaconState { { Ok(epoch.as_usize().safe_rem(len as usize)?) } else { - Err(Error::EpochOutOfBounds) + Err(BeaconStateError::EpochOutOfBounds) } } @@ -1503,7 +1547,11 @@ impl BeaconState { /// # Errors: /// /// See `Self::get_randao_mix`. - pub fn update_randao_mix(&mut self, epoch: Epoch, signature: &Signature) -> Result<(), Error> { + pub fn update_randao_mix( + &mut self, + epoch: Epoch, + signature: &Signature, + ) -> Result<(), BeaconStateError> { let i = epoch .as_usize() .safe_rem(E::EpochsPerHistoricalVector::to_usize())?; @@ -1513,36 +1561,36 @@ impl BeaconState { *self .randao_mixes_mut() .get_mut(i) - .ok_or(Error::RandaoMixesOutOfBounds(i))? = + .ok_or(BeaconStateError::RandaoMixesOutOfBounds(i))? = *self.get_randao_mix(epoch)? ^ signature_hash; Ok(()) } /// Return the randao mix at a recent ``epoch``. - pub fn get_randao_mix(&self, epoch: Epoch) -> Result<&Hash256, Error> { + pub fn get_randao_mix(&self, epoch: Epoch) -> Result<&Hash256, BeaconStateError> { let i = self.get_randao_mix_index(epoch, AllowNextEpoch::False)?; self.randao_mixes() .get(i) - .ok_or(Error::RandaoMixesOutOfBounds(i)) + .ok_or(BeaconStateError::RandaoMixesOutOfBounds(i)) } /// Set the randao mix at a recent ``epoch``. /// /// Spec v0.12.1 - pub fn set_randao_mix(&mut self, epoch: Epoch, mix: Hash256) -> Result<(), Error> { + pub fn set_randao_mix(&mut self, epoch: Epoch, mix: Hash256) -> Result<(), BeaconStateError> { let i = self.get_randao_mix_index(epoch, AllowNextEpoch::True)?; *self .randao_mixes_mut() .get_mut(i) - .ok_or(Error::RandaoMixesOutOfBounds(i))? = mix; + .ok_or(BeaconStateError::RandaoMixesOutOfBounds(i))? = mix; Ok(()) } /// Safely obtains the index for latest state roots, given some `slot`. /// /// Spec v0.12.1 - fn get_latest_state_roots_index(&self, slot: Slot) -> Result { + fn get_latest_state_roots_index(&self, slot: Slot) -> Result { if slot < self.slot() && self.slot() <= slot.safe_add(self.state_roots().len() as u64)? { Ok(slot.as_usize().safe_rem(self.state_roots().len())?) } else { @@ -1551,38 +1599,42 @@ impl BeaconState { } /// Gets the state root for some slot. - pub fn get_state_root(&self, slot: Slot) -> Result<&Hash256, Error> { + pub fn get_state_root(&self, slot: Slot) -> Result<&Hash256, BeaconStateError> { let i = self.get_latest_state_roots_index(slot)?; self.state_roots() .get(i) - .ok_or(Error::StateRootsOutOfBounds(i)) + .ok_or(BeaconStateError::StateRootsOutOfBounds(i)) } /// Gets the state root for the start slot of some epoch. - pub fn get_state_root_at_epoch_start(&self, epoch: Epoch) -> Result { + pub fn get_state_root_at_epoch_start(&self, epoch: Epoch) -> Result { self.get_state_root(epoch.start_slot(E::slots_per_epoch())) .copied() } /// Gets the oldest (earliest slot) state root. - pub fn get_oldest_state_root(&self) -> Result<&Hash256, Error> { + pub fn get_oldest_state_root(&self) -> Result<&Hash256, BeaconStateError> { let oldest_slot = self.slot().saturating_sub(self.state_roots().len()); self.get_state_root(oldest_slot) } /// Gets the oldest (earliest slot) block root. - pub fn get_oldest_block_root(&self) -> Result<&Hash256, Error> { + pub fn get_oldest_block_root(&self) -> Result<&Hash256, BeaconStateError> { let oldest_slot = self.slot().saturating_sub(self.block_roots().len()); self.get_block_root(oldest_slot) } /// Sets the latest state root for slot. - pub fn set_state_root(&mut self, slot: Slot, state_root: Hash256) -> Result<(), Error> { + pub fn set_state_root( + &mut self, + slot: Slot, + state_root: Hash256, + ) -> Result<(), BeaconStateError> { let i = self.get_latest_state_roots_index(slot)?; *self .state_roots_mut() .get_mut(i) - .ok_or(Error::StateRootsOutOfBounds(i))? = state_root; + .ok_or(BeaconStateError::StateRootsOutOfBounds(i))? = state_root; Ok(()) } @@ -1591,7 +1643,7 @@ impl BeaconState { &self, epoch: Epoch, allow_next_epoch: AllowNextEpoch, - ) -> Result { + ) -> Result { // We allow the slashings vector to be accessed at any cached epoch at or before // the current epoch, or the next epoch if `AllowNextEpoch::True` is passed. let current_epoch = self.current_epoch(); @@ -1602,7 +1654,7 @@ impl BeaconState { .as_usize() .safe_rem(E::EpochsPerSlashingsVector::to_usize())?) } else { - Err(Error::EpochOutOfBounds) + Err(BeaconStateError::EpochOutOfBounds) } } @@ -1612,21 +1664,21 @@ impl BeaconState { } /// Get the total slashed balances for some epoch. - pub fn get_slashings(&self, epoch: Epoch) -> Result { + pub fn get_slashings(&self, epoch: Epoch) -> Result { let i = self.get_slashings_index(epoch, AllowNextEpoch::False)?; self.slashings() .get(i) .copied() - .ok_or(Error::SlashingsOutOfBounds(i)) + .ok_or(BeaconStateError::SlashingsOutOfBounds(i)) } /// Set the total slashed balances for some epoch. - pub fn set_slashings(&mut self, epoch: Epoch, value: u64) -> Result<(), Error> { + pub fn set_slashings(&mut self, epoch: Epoch, value: u64) -> Result<(), BeaconStateError> { let i = self.get_slashings_index(epoch, AllowNextEpoch::True)?; *self .slashings_mut() .get_mut(i) - .ok_or(Error::SlashingsOutOfBounds(i))? = value; + .ok_or(BeaconStateError::SlashingsOutOfBounds(i))? = value; Ok(()) } @@ -1666,10 +1718,10 @@ impl BeaconState { &mut ExitCache, &mut EpochCache, ), - Error, + BeaconStateError, > { match self { - BeaconState::Base(_) => Err(Error::IncorrectStateVariant), + BeaconState::Base(_) => Err(BeaconStateError::IncorrectStateVariant), BeaconState::Altair(state) => Ok(( &mut state.validators, &mut state.balances, @@ -1744,18 +1796,21 @@ impl BeaconState { } /// Get the balance of a single validator. - pub fn get_balance(&self, validator_index: usize) -> Result { + pub fn get_balance(&self, validator_index: usize) -> Result { self.balances() .get(validator_index) - .ok_or(Error::BalancesOutOfBounds(validator_index)) + .ok_or(BeaconStateError::BalancesOutOfBounds(validator_index)) .copied() } /// Get a mutable reference to the balance of a single validator. - pub fn get_balance_mut(&mut self, validator_index: usize) -> Result<&mut u64, Error> { + pub fn get_balance_mut( + &mut self, + validator_index: usize, + ) -> Result<&mut u64, BeaconStateError> { self.balances_mut() .get_mut(validator_index) - .ok_or(Error::BalancesOutOfBounds(validator_index)) + .ok_or(BeaconStateError::BalancesOutOfBounds(validator_index)) } /// Generate a seed for the given `epoch`. @@ -1764,7 +1819,7 @@ impl BeaconState { epoch: Epoch, domain_type: Domain, spec: &ChainSpec, - ) -> Result { + ) -> Result { // Bypass the safe getter for RANDAO so we can gracefully handle the scenario where `epoch // == 0`. let mix = { @@ -1775,7 +1830,7 @@ impl BeaconState { let i_mod = i.as_usize().safe_rem(self.randao_mixes().len())?; self.randao_mixes() .get(i_mod) - .ok_or(Error::RandaoMixesOutOfBounds(i_mod))? + .ok_or(BeaconStateError::RandaoMixesOutOfBounds(i_mod))? }; let domain_bytes = int_to_bytes4(spec.get_domain_constant(domain_type)); let epoch_bytes = int_to_bytes8(epoch.as_u64()); @@ -1794,17 +1849,20 @@ impl BeaconState { } /// Safe indexer for the `validators` list. - pub fn get_validator(&self, validator_index: usize) -> Result<&Validator, Error> { + pub fn get_validator(&self, validator_index: usize) -> Result<&Validator, BeaconStateError> { self.validators() .get(validator_index) - .ok_or(Error::UnknownValidator(validator_index)) + .ok_or(BeaconStateError::UnknownValidator(validator_index)) } /// Safe mutator for the `validators` list. - pub fn get_validator_mut(&mut self, validator_index: usize) -> Result<&mut Validator, Error> { + pub fn get_validator_mut( + &mut self, + validator_index: usize, + ) -> Result<&mut Validator, BeaconStateError> { self.validators_mut() .get_mut(validator_index) - .ok_or(Error::UnknownValidator(validator_index)) + .ok_or(BeaconStateError::UnknownValidator(validator_index)) } /// Add a validator to the registry and return the validator index that was allocated for it. @@ -1814,7 +1872,7 @@ impl BeaconState { withdrawal_credentials: Hash256, amount: u64, spec: &ChainSpec, - ) -> Result { + ) -> Result { let index = self.validators().len(); let fork_name = self.fork_name_unchecked(); self.validators_mut().push(Validator::from_deposit( @@ -1846,7 +1904,7 @@ impl BeaconState { if pubkey_cache.len() == index { let success = pubkey_cache.insert(pubkey, index); if !success { - return Err(Error::PubkeyCacheInconsistent); + return Err(BeaconStateError::PubkeyCacheInconsistent); } } @@ -1857,14 +1915,14 @@ impl BeaconState { pub fn get_validator_cow( &mut self, validator_index: usize, - ) -> Result, Error> { + ) -> Result, BeaconStateError> { self.validators_mut() .get_cow(validator_index) - .ok_or(Error::UnknownValidator(validator_index)) + .ok_or(BeaconStateError::UnknownValidator(validator_index)) } /// Return the effective balance for a validator with the given `validator_index`. - pub fn get_effective_balance(&self, validator_index: usize) -> Result { + pub fn get_effective_balance(&self, validator_index: usize) -> Result { self.get_validator(validator_index) .map(|v| v.effective_balance) } @@ -1872,20 +1930,27 @@ impl BeaconState { /// Get the inactivity score for a single validator. /// /// Will error if the state lacks an `inactivity_scores` field. - pub fn get_inactivity_score(&self, validator_index: usize) -> Result { + pub fn get_inactivity_score(&self, validator_index: usize) -> Result { self.inactivity_scores()? .get(validator_index) .copied() - .ok_or(Error::InactivityScoresOutOfBounds(validator_index)) + .ok_or(BeaconStateError::InactivityScoresOutOfBounds( + validator_index, + )) } /// Get a mutable reference to the inactivity score for a single validator. /// /// Will error if the state lacks an `inactivity_scores` field. - pub fn get_inactivity_score_mut(&mut self, validator_index: usize) -> Result<&mut u64, Error> { + pub fn get_inactivity_score_mut( + &mut self, + validator_index: usize, + ) -> Result<&mut u64, BeaconStateError> { self.inactivity_scores_mut()? .get_mut(validator_index) - .ok_or(Error::InactivityScoresOutOfBounds(validator_index)) + .ok_or(BeaconStateError::InactivityScoresOutOfBounds( + validator_index, + )) } /// Return the epoch at which an activation or exit triggered in ``epoch`` takes effect. @@ -1895,14 +1960,14 @@ impl BeaconState { &self, epoch: Epoch, spec: &ChainSpec, - ) -> Result { + ) -> Result { Ok(spec.compute_activation_exit_epoch(epoch)?) } /// Return the churn limit for the current epoch (number of validators who can leave per epoch). /// /// Uses the current epoch committee cache, and will error if it isn't initialized. - pub fn get_validator_churn_limit(&self, spec: &ChainSpec) -> Result { + pub fn get_validator_churn_limit(&self, spec: &ChainSpec) -> Result { Ok(std::cmp::max( spec.min_per_epoch_churn_limit, (self @@ -1915,7 +1980,7 @@ impl BeaconState { /// Return the activation churn limit for the current epoch (number of validators who can enter per epoch). /// /// Uses the current epoch committee cache, and will error if it isn't initialized. - pub fn get_activation_churn_limit(&self, spec: &ChainSpec) -> Result { + pub fn get_activation_churn_limit(&self, spec: &ChainSpec) -> Result { Ok(match self { BeaconState::Base(_) | BeaconState::Altair(_) @@ -1941,7 +2006,7 @@ impl BeaconState { &self, validator_index: usize, relative_epoch: RelativeEpoch, - ) -> Result, Error> { + ) -> Result, BeaconStateError> { let cache = self.committee_cache(relative_epoch)?; Ok(cache.get_attestation_duties(validator_index)) @@ -1951,7 +2016,10 @@ impl BeaconState { /// /// This method should rarely be invoked because single-pass epoch processing keeps the total /// active balance cache up to date. - pub fn compute_total_active_balance_slow(&self, spec: &ChainSpec) -> Result { + pub fn compute_total_active_balance_slow( + &self, + spec: &ChainSpec, + ) -> Result { let current_epoch = self.current_epoch(); let mut total_active_balance = 0; @@ -1973,20 +2041,20 @@ impl BeaconState { /// the current committee cache is. /// /// Returns minimum `EFFECTIVE_BALANCE_INCREMENT`, to avoid div by 0. - pub fn get_total_active_balance(&self) -> Result { + pub fn get_total_active_balance(&self) -> Result { self.get_total_active_balance_at_epoch(self.current_epoch()) } /// Get the cached total active balance while checking that it is for the correct `epoch`. - pub fn get_total_active_balance_at_epoch(&self, epoch: Epoch) -> Result { + pub fn get_total_active_balance_at_epoch(&self, epoch: Epoch) -> Result { let (initialized_epoch, balance) = self .total_active_balance() - .ok_or(Error::TotalActiveBalanceCacheUninitialized)?; + .ok_or(BeaconStateError::TotalActiveBalanceCacheUninitialized)?; if initialized_epoch == epoch { Ok(balance) } else { - Err(Error::TotalActiveBalanceCacheInconsistent { + Err(BeaconStateError::TotalActiveBalanceCacheInconsistent { initialized_epoch, current_epoch: epoch, }) @@ -2006,7 +2074,10 @@ impl BeaconState { /// Build the total active balance cache for the current epoch if it is not already built. #[instrument(skip_all, level = "debug")] - pub fn build_total_active_balance_cache(&mut self, spec: &ChainSpec) -> Result<(), Error> { + pub fn build_total_active_balance_cache( + &mut self, + spec: &ChainSpec, + ) -> Result<(), BeaconStateError> { if self .get_total_active_balance_at_epoch(self.current_epoch()) .is_err() @@ -2020,7 +2091,7 @@ impl BeaconState { pub fn force_build_total_active_balance_cache( &mut self, spec: &ChainSpec, - ) -> Result<(), Error> { + ) -> Result<(), BeaconStateError> { let total_active_balance = self.compute_total_active_balance_slow(spec)?; *self.total_active_balance_mut() = Some((self.current_epoch(), total_active_balance)); Ok(()) @@ -2037,7 +2108,7 @@ impl BeaconState { epoch: Epoch, previous_epoch: Epoch, current_epoch: Epoch, - ) -> Result<&mut List, Error> { + ) -> Result<&mut List, BeaconStateError> { if epoch == current_epoch { match self { BeaconState::Base(_) => Err(BeaconStateError::IncorrectStateVariant), @@ -2067,7 +2138,7 @@ impl BeaconState { /// Build all caches (except the tree hash cache), if they need to be built. #[instrument(skip_all, level = "debug")] - pub fn build_caches(&mut self, spec: &ChainSpec) -> Result<(), Error> { + pub fn build_caches(&mut self, spec: &ChainSpec) -> Result<(), BeaconStateError> { self.build_all_committee_caches(spec)?; self.update_pubkey_cache()?; self.build_exit_cache(spec)?; @@ -2078,7 +2149,7 @@ impl BeaconState { /// Build all committee caches, if they need to be built. #[instrument(skip_all, level = "debug")] - pub fn build_all_committee_caches(&mut self, spec: &ChainSpec) -> Result<(), Error> { + pub fn build_all_committee_caches(&mut self, spec: &ChainSpec) -> Result<(), BeaconStateError> { self.build_committee_cache(RelativeEpoch::Previous, spec)?; self.build_committee_cache(RelativeEpoch::Current, spec)?; self.build_committee_cache(RelativeEpoch::Next, spec)?; @@ -2087,7 +2158,7 @@ impl BeaconState { /// Build the exit cache, if it needs to be built. #[instrument(skip_all, level = "debug")] - pub fn build_exit_cache(&mut self, spec: &ChainSpec) -> Result<(), Error> { + pub fn build_exit_cache(&mut self, spec: &ChainSpec) -> Result<(), BeaconStateError> { if self.exit_cache().check_initialized().is_err() { *self.exit_cache_mut() = ExitCache::new(self.validators(), spec)?; } @@ -2096,7 +2167,7 @@ impl BeaconState { /// Build the slashings cache if it needs to be built. #[instrument(skip_all, level = "debug")] - pub fn build_slashings_cache(&mut self) -> Result<(), Error> { + pub fn build_slashings_cache(&mut self) -> Result<(), BeaconStateError> { let latest_block_slot = self.latest_block_header().slot; if !self.slashings_cache().is_initialized(latest_block_slot) { *self.slashings_cache_mut() = SlashingsCache::new(latest_block_slot, self.validators()); @@ -2110,7 +2181,7 @@ impl BeaconState { } /// Drop all caches on the state. - pub fn drop_all_caches(&mut self) -> Result<(), Error> { + pub fn drop_all_caches(&mut self) -> Result<(), BeaconStateError> { self.drop_total_active_balance_cache(); self.drop_committee_cache(RelativeEpoch::Previous)?; self.drop_committee_cache(RelativeEpoch::Current)?; @@ -2138,7 +2209,7 @@ impl BeaconState { &mut self, relative_epoch: RelativeEpoch, spec: &ChainSpec, - ) -> Result<(), Error> { + ) -> Result<(), BeaconStateError> { let i = Self::committee_cache_index(relative_epoch); let is_initialized = self .committee_cache_at_index(i)? @@ -2159,7 +2230,7 @@ impl BeaconState { &mut self, relative_epoch: RelativeEpoch, spec: &ChainSpec, - ) -> Result<(), Error> { + ) -> Result<(), BeaconStateError> { let epoch = relative_epoch.into_epoch(self.current_epoch()); let i = Self::committee_cache_index(relative_epoch); @@ -2175,7 +2246,7 @@ impl BeaconState { &self, epoch: Epoch, spec: &ChainSpec, - ) -> Result, Error> { + ) -> Result, BeaconStateError> { CommitteeCache::initialized(self, epoch, spec) } @@ -2185,7 +2256,7 @@ impl BeaconState { /// /// Note: this function will not build any new committee caches, nor will it update the total /// active balance cache. The total active balance cache must be updated separately. - pub fn advance_caches(&mut self) -> Result<(), Error> { + pub fn advance_caches(&mut self) -> Result<(), BeaconStateError> { self.committee_caches_mut().rotate_left(1); let next = Self::committee_cache_index(RelativeEpoch::Next); @@ -2204,27 +2275,33 @@ impl BeaconState { /// Get the committee cache for some `slot`. /// /// Return an error if the cache for the slot's epoch is not initialized. - fn committee_cache_at_slot(&self, slot: Slot) -> Result<&Arc, Error> { + fn committee_cache_at_slot( + &self, + slot: Slot, + ) -> Result<&Arc, BeaconStateError> { let epoch = slot.epoch(E::slots_per_epoch()); let relative_epoch = RelativeEpoch::from_epoch(self.current_epoch(), epoch)?; self.committee_cache(relative_epoch) } /// Get the committee cache at a given index. - fn committee_cache_at_index(&self, index: usize) -> Result<&Arc, Error> { + fn committee_cache_at_index( + &self, + index: usize, + ) -> Result<&Arc, BeaconStateError> { self.committee_caches() .get(index) - .ok_or(Error::CommitteeCachesOutOfBounds(index)) + .ok_or(BeaconStateError::CommitteeCachesOutOfBounds(index)) } /// Get a mutable reference to the committee cache at a given index. fn committee_cache_at_index_mut( &mut self, index: usize, - ) -> Result<&mut Arc, Error> { + ) -> Result<&mut Arc, BeaconStateError> { self.committee_caches_mut() .get_mut(index) - .ok_or(Error::CommitteeCachesOutOfBounds(index)) + .ok_or(BeaconStateError::CommitteeCachesOutOfBounds(index)) } /// Returns the cache for some `RelativeEpoch`. Returns an error if the cache has not been @@ -2232,19 +2309,24 @@ impl BeaconState { pub fn committee_cache( &self, relative_epoch: RelativeEpoch, - ) -> Result<&Arc, Error> { + ) -> Result<&Arc, BeaconStateError> { let i = Self::committee_cache_index(relative_epoch); let cache = self.committee_cache_at_index(i)?; if cache.is_initialized_at(relative_epoch.into_epoch(self.current_epoch())) { Ok(cache) } else { - Err(Error::CommitteeCacheUninitialized(Some(relative_epoch))) + Err(BeaconStateError::CommitteeCacheUninitialized(Some( + relative_epoch, + ))) } } /// Drops the cache, leaving it in an uninitialized state. - pub fn drop_committee_cache(&mut self, relative_epoch: RelativeEpoch) -> Result<(), Error> { + pub fn drop_committee_cache( + &mut self, + relative_epoch: RelativeEpoch, + ) -> Result<(), BeaconStateError> { *self.committee_cache_at_index_mut(Self::committee_cache_index(relative_epoch))? = Arc::new(CommitteeCache::default()); Ok(()) @@ -2255,7 +2337,7 @@ impl BeaconState { /// Adds all `pubkeys` from the `validators` which are not already in the cache. Will /// never re-add a pubkey. #[instrument(skip_all, level = "debug")] - pub fn update_pubkey_cache(&mut self) -> Result<(), Error> { + pub fn update_pubkey_cache(&mut self) -> Result<(), BeaconStateError> { let mut pubkey_cache = mem::take(self.pubkey_cache_mut()); let start_index = pubkey_cache.len(); @@ -2263,7 +2345,7 @@ impl BeaconState { let index = start_index.safe_add(i)?; let success = pubkey_cache.insert(validator.pubkey, index); if !success { - return Err(Error::PubkeyCacheInconsistent); + return Err(BeaconStateError::PubkeyCacheInconsistent); } } *self.pubkey_cache_mut() = pubkey_cache; @@ -2341,7 +2423,7 @@ impl BeaconState { /// /// Initialize the tree hash cache if it isn't already initialized. #[instrument(skip_all, level = "debug")] - pub fn update_tree_hash_cache<'a>(&'a mut self) -> Result { + pub fn update_tree_hash_cache<'a>(&'a mut self) -> Result { self.apply_pending_mutations()?; map_beacon_state_ref!(&'a _, self.to_ref(), |inner, cons| { let root = inner.tree_hash_root(); @@ -2353,7 +2435,7 @@ impl BeaconState { /// Compute the tree hash root of the validators using the tree hash cache. /// /// Initialize the tree hash cache if it isn't already initialized. - pub fn update_validators_tree_hash_cache(&mut self) -> Result { + pub fn update_validators_tree_hash_cache(&mut self) -> Result { self.validators_mut().apply_updates()?; Ok(self.validators().tree_hash_root()) } @@ -2364,7 +2446,7 @@ impl BeaconState { &self, previous_epoch: Epoch, val: &Validator, - ) -> Result { + ) -> Result { Ok(val.is_active_at(previous_epoch) || (val.slashed && previous_epoch.safe_add(Epoch::new(1))? < val.withdrawable_epoch)) } @@ -2388,7 +2470,7 @@ impl BeaconState { pub fn get_sync_committee_for_next_slot( &self, spec: &ChainSpec, - ) -> Result>, Error> { + ) -> Result>, BeaconStateError> { let next_slot_epoch = self .slot() .saturating_add(Slot::new(1)) @@ -2414,7 +2496,7 @@ impl BeaconState { // ******* Electra accessors ******* /// Return the churn limit for the current epoch. - pub fn get_balance_churn_limit(&self, spec: &ChainSpec) -> Result { + pub fn get_balance_churn_limit(&self, spec: &ChainSpec) -> Result { let total_active_balance = self.get_total_active_balance()?; let churn = std::cmp::max( spec.min_per_epoch_churn_limit_electra, @@ -2425,20 +2507,26 @@ impl BeaconState { } /// Return the churn limit for the current epoch dedicated to activations and exits. - pub fn get_activation_exit_churn_limit(&self, spec: &ChainSpec) -> Result { + pub fn get_activation_exit_churn_limit( + &self, + spec: &ChainSpec, + ) -> Result { Ok(std::cmp::min( spec.max_per_epoch_activation_exit_churn_limit, self.get_balance_churn_limit(spec)?, )) } - pub fn get_consolidation_churn_limit(&self, spec: &ChainSpec) -> Result { + pub fn get_consolidation_churn_limit(&self, spec: &ChainSpec) -> Result { self.get_balance_churn_limit(spec)? .safe_sub(self.get_activation_exit_churn_limit(spec)?) .map_err(Into::into) } - pub fn get_pending_balance_to_withdraw(&self, validator_index: usize) -> Result { + pub fn get_pending_balance_to_withdraw( + &self, + validator_index: usize, + ) -> Result { let mut pending_balance = 0; for withdrawal in self .pending_partial_withdrawals()? @@ -2456,11 +2544,11 @@ impl BeaconState { &mut self, validator_index: usize, spec: &ChainSpec, - ) -> Result<(), Error> { + ) -> Result<(), BeaconStateError> { let balance = self .balances_mut() .get_mut(validator_index) - .ok_or(Error::UnknownValidator(validator_index))?; + .ok_or(BeaconStateError::UnknownValidator(validator_index))?; if *balance > spec.min_activation_balance { let excess_balance = balance.safe_sub(spec.min_activation_balance)?; *balance = spec.min_activation_balance; @@ -2481,11 +2569,11 @@ impl BeaconState { &mut self, validator_index: usize, spec: &ChainSpec, - ) -> Result<(), Error> { + ) -> Result<(), BeaconStateError> { let validator = self .validators_mut() .get_mut(validator_index) - .ok_or(Error::UnknownValidator(validator_index))?; + .ok_or(BeaconStateError::UnknownValidator(validator_index))?; AsMut::<[u8; 32]>::as_mut(&mut validator.withdrawal_credentials)[0] = spec.compounding_withdrawal_prefix_byte; @@ -2497,7 +2585,7 @@ impl BeaconState { &mut self, exit_balance: u64, spec: &ChainSpec, - ) -> Result { + ) -> Result { let mut earliest_exit_epoch = std::cmp::max( self.earliest_exit_epoch()?, self.compute_activation_exit_epoch(self.current_epoch(), spec)?, @@ -2527,7 +2615,7 @@ impl BeaconState { | BeaconState::Altair(_) | BeaconState::Bellatrix(_) | BeaconState::Capella(_) - | BeaconState::Deneb(_) => Err(Error::IncorrectStateVariant), + | BeaconState::Deneb(_) => Err(BeaconStateError::IncorrectStateVariant), BeaconState::Electra(_) | BeaconState::Fulu(_) | BeaconState::Gloas(_) => { // Consume the balance and update state variables *self.exit_balance_to_consume_mut()? = @@ -2542,7 +2630,7 @@ impl BeaconState { &mut self, consolidation_balance: u64, spec: &ChainSpec, - ) -> Result { + ) -> Result { let mut earliest_consolidation_epoch = std::cmp::max( self.earliest_consolidation_epoch()?, self.compute_activation_exit_epoch(self.current_epoch(), spec)?, @@ -2574,7 +2662,7 @@ impl BeaconState { | BeaconState::Altair(_) | BeaconState::Bellatrix(_) | BeaconState::Capella(_) - | BeaconState::Deneb(_) => Err(Error::IncorrectStateVariant), + | BeaconState::Deneb(_) => Err(BeaconStateError::IncorrectStateVariant), BeaconState::Electra(_) | BeaconState::Fulu(_) | BeaconState::Gloas(_) => { // Consume the balance and update state variables. *self.consolidation_balance_to_consume_mut()? = @@ -2586,7 +2674,7 @@ impl BeaconState { } #[allow(clippy::arithmetic_side_effects)] - pub fn rebase_on(&mut self, base: &Self, spec: &ChainSpec) -> Result<(), Error> { + pub fn rebase_on(&mut self, base: &Self, spec: &ChainSpec) -> Result<(), BeaconStateError> { // Required for macros (which use type-hints internally). match (&mut *self, base) { @@ -2677,7 +2765,11 @@ impl BeaconState { Ok(()) } - pub fn rebase_caches_on(&mut self, base: &Self, spec: &ChainSpec) -> Result<(), Error> { + pub fn rebase_caches_on( + &mut self, + base: &Self, + spec: &ChainSpec, + ) -> Result<(), BeaconStateError> { // Use pubkey cache from `base` if it contains superior information (likely if our cache is // uninitialized). Be careful not to use a cache which has *more* validators than expected, // as other code expects `self.pubkey_cache().len() <= self.validators.len()`. @@ -2766,7 +2858,7 @@ impl BeaconState { } #[allow(clippy::arithmetic_side_effects)] - pub fn apply_pending_mutations(&mut self) -> Result<(), Error> { + pub fn apply_pending_mutations(&mut self) -> Result<(), BeaconStateError> { match self { Self::Base(inner) => { map_beacon_state_base_tree_list_fields!(inner, |_, x| { x.apply_updates() }) @@ -2796,43 +2888,43 @@ impl BeaconState { Ok(()) } - pub fn compute_current_sync_committee_proof(&self) -> Result, Error> { + pub fn compute_current_sync_committee_proof(&self) -> Result, BeaconStateError> { // Sync committees are top-level fields, subtract off the generalized indices // for the internal nodes. Result should be 22 or 23, the field offset of the committee // in the `BeaconState`: // https://github.com/ethereum/consensus-specs/blob/dev/specs/altair/beacon-chain.md#beaconstate let field_gindex = if self.fork_name_unchecked().electra_enabled() { - light_client_update::CURRENT_SYNC_COMMITTEE_INDEX_ELECTRA + CURRENT_SYNC_COMMITTEE_INDEX_ELECTRA } else { - light_client_update::CURRENT_SYNC_COMMITTEE_INDEX + CURRENT_SYNC_COMMITTEE_INDEX }; let field_index = field_gindex.safe_sub(self.num_fields_pow2())?; let leaves = self.get_beacon_state_leaves(); self.generate_proof(field_index, &leaves) } - pub fn compute_next_sync_committee_proof(&self) -> Result, Error> { + pub fn compute_next_sync_committee_proof(&self) -> Result, BeaconStateError> { // Sync committees are top-level fields, subtract off the generalized indices // for the internal nodes. Result should be 22 or 23, the field offset of the committee // in the `BeaconState`: // https://github.com/ethereum/consensus-specs/blob/dev/specs/altair/beacon-chain.md#beaconstate let field_gindex = if self.fork_name_unchecked().electra_enabled() { - light_client_update::NEXT_SYNC_COMMITTEE_INDEX_ELECTRA + NEXT_SYNC_COMMITTEE_INDEX_ELECTRA } else { - light_client_update::NEXT_SYNC_COMMITTEE_INDEX + NEXT_SYNC_COMMITTEE_INDEX }; let field_index = field_gindex.safe_sub(self.num_fields_pow2())?; let leaves = self.get_beacon_state_leaves(); self.generate_proof(field_index, &leaves) } - pub fn compute_finalized_root_proof(&self) -> Result, Error> { + pub fn compute_finalized_root_proof(&self) -> Result, BeaconStateError> { // Finalized root is the right child of `finalized_checkpoint`, divide by two to get // the generalized index of `state.finalized_checkpoint`. let checkpoint_root_gindex = if self.fork_name_unchecked().electra_enabled() { - light_client_update::FINALIZED_ROOT_INDEX_ELECTRA + FINALIZED_ROOT_INDEX_ELECTRA } else { - light_client_update::FINALIZED_ROOT_INDEX + FINALIZED_ROOT_INDEX }; let checkpoint_gindex = checkpoint_root_gindex / 2; @@ -2855,9 +2947,9 @@ impl BeaconState { &self, field_index: usize, leaves: &[Hash256], - ) -> Result, Error> { + ) -> Result, BeaconStateError> { if field_index >= leaves.len() { - return Err(Error::IndexNotSupported(field_index)); + return Err(BeaconStateError::IndexNotSupported(field_index)); } let depth = self.num_fields_pow2().ilog2() as usize; @@ -2916,45 +3008,45 @@ impl BeaconState { } } -impl From for Error { - fn from(e: RelativeEpochError) -> Error { - Error::RelativeEpochError(e) +impl From for BeaconStateError { + fn from(e: RelativeEpochError) -> BeaconStateError { + BeaconStateError::RelativeEpochError(e) } } -impl From for Error { - fn from(e: ssz_types::Error) -> Error { - Error::SszTypesError(e) +impl From for BeaconStateError { + fn from(e: ssz_types::Error) -> BeaconStateError { + BeaconStateError::SszTypesError(e) } } -impl From for Error { - fn from(e: bls::Error) -> Error { - Error::BlsError(e) +impl From for BeaconStateError { + fn from(e: bls::Error) -> BeaconStateError { + BeaconStateError::BlsError(e) } } -impl From for Error { - fn from(e: tree_hash::Error) -> Error { - Error::TreeHashError(e) +impl From for BeaconStateError { + fn from(e: tree_hash::Error) -> BeaconStateError { + BeaconStateError::TreeHashError(e) } } -impl From for Error { - fn from(e: merkle_proof::MerkleTreeError) -> Error { - Error::MerkleTreeError(e) +impl From for BeaconStateError { + fn from(e: merkle_proof::MerkleTreeError) -> BeaconStateError { + BeaconStateError::MerkleTreeError(e) } } -impl From for Error { - fn from(e: ArithError) -> Error { - Error::ArithError(e) +impl From for BeaconStateError { + fn from(e: ArithError) -> BeaconStateError { + BeaconStateError::ArithError(e) } } -impl From for Error { - fn from(e: milhouse::Error) -> Self { - Self::MilhouseError(e) +impl From for BeaconStateError { + fn from(e: milhouse::Error) -> BeaconStateError { + BeaconStateError::MilhouseError(e) } } diff --git a/consensus/types/src/beacon_state/committee_cache.rs b/consensus/types/src/state/committee_cache.rs similarity index 93% rename from consensus/types/src/beacon_state/committee_cache.rs rename to consensus/types/src/state/committee_cache.rs index 408c269da5f..15f6a4cd376 100644 --- a/consensus/types/src/beacon_state/committee_cache.rs +++ b/consensus/types/src/state/committee_cache.rs @@ -1,17 +1,20 @@ #![allow(clippy::arithmetic_side_effects)] -use crate::*; -use core::num::NonZeroUsize; +use std::{num::NonZeroUsize, ops::Range, sync::Arc}; + use educe::Educe; use safe_arith::SafeArith; use serde::{Deserialize, Serialize}; use ssz::{Decode, DecodeError, Encode, four_byte_option_impl}; use ssz_derive::{Decode, Encode}; -use std::ops::Range; -use std::sync::Arc; use swap_or_not_shuffle::shuffle_list; -mod tests; +use crate::{ + attestation::{AttestationDuty, BeaconCommittee, CommitteeIndex}, + core::{ChainSpec, Domain, Epoch, EthSpec, Slot}, + state::{BeaconState, BeaconStateError}, + validator::Validator, +}; // Define "legacy" implementations of `Option`, `Option` which use four bytes // for encoding the union selector. @@ -66,7 +69,7 @@ impl CommitteeCache { state: &BeaconState, epoch: Epoch, spec: &ChainSpec, - ) -> Result, Error> { + ) -> Result, BeaconStateError> { // Check that the cache is being built for an in-range epoch. // // We allow caches to be constructed for historic epochs, per: @@ -77,23 +80,23 @@ impl CommitteeCache { .saturating_sub(1u64); if reqd_randao_epoch < state.min_randao_epoch() || epoch > state.current_epoch() + 1 { - return Err(Error::EpochOutOfBounds); + return Err(BeaconStateError::EpochOutOfBounds); } // May cause divide-by-zero errors. if E::slots_per_epoch() == 0 { - return Err(Error::ZeroSlotsPerEpoch); + return Err(BeaconStateError::ZeroSlotsPerEpoch); } // The use of `NonZeroUsize` reduces the maximum number of possible validators by one. if state.validators().len() == usize::MAX { - return Err(Error::TooManyValidators); + return Err(BeaconStateError::TooManyValidators); } let active_validator_indices = get_active_validator_indices(state.validators(), epoch); if active_validator_indices.is_empty() { - return Err(Error::InsufficientValidators); + return Err(BeaconStateError::InsufficientValidators); } let committees_per_slot = @@ -107,13 +110,14 @@ impl CommitteeCache { &seed[..], false, ) - .ok_or(Error::UnableToShuffle)?; + .ok_or(BeaconStateError::UnableToShuffle)?; let mut shuffling_positions = vec![<_>::default(); state.validators().len()]; for (i, &v) in shuffling.iter().enumerate() { *shuffling_positions .get_mut(v) - .ok_or(Error::ShuffleIndexOutOfBounds(v))? = NonZeroUsize::new(i + 1).into(); + .ok_or(BeaconStateError::ShuffleIndexOutOfBounds(v))? = + NonZeroUsize::new(i + 1).into(); } Ok(Arc::new(CommitteeCache { @@ -188,24 +192,24 @@ impl CommitteeCache { pub fn get_beacon_committees_at_slot( &self, slot: Slot, - ) -> Result>, Error> { + ) -> Result>, BeaconStateError> { if self.initialized_epoch.is_none() { - return Err(Error::CommitteeCacheUninitialized(None)); + return Err(BeaconStateError::CommitteeCacheUninitialized(None)); } (0..self.committees_per_slot()) .map(|index| { self.get_beacon_committee(slot, index) - .ok_or(Error::NoCommittee { slot, index }) + .ok_or(BeaconStateError::NoCommittee { slot, index }) }) .collect() } /// Returns all committees for `self.initialized_epoch`. - pub fn get_all_beacon_committees(&self) -> Result>, Error> { + pub fn get_all_beacon_committees(&self) -> Result>, BeaconStateError> { let initialized_epoch = self .initialized_epoch - .ok_or(Error::CommitteeCacheUninitialized(None))?; + .ok_or(BeaconStateError::CommitteeCacheUninitialized(None))?; initialized_epoch.slot_iter(self.slots_per_epoch).try_fold( Vec::with_capacity(self.epoch_committee_count()), diff --git a/consensus/types/src/epoch_cache.rs b/consensus/types/src/state/epoch_cache.rs similarity index 97% rename from consensus/types/src/epoch_cache.rs rename to consensus/types/src/state/epoch_cache.rs index 9956cb400a7..cdea0d143df 100644 --- a/consensus/types/src/epoch_cache.rs +++ b/consensus/types/src/state/epoch_cache.rs @@ -1,7 +1,12 @@ -use crate::{ActivationQueue, BeaconStateError, ChainSpec, Epoch, Hash256, Slot}; -use safe_arith::{ArithError, SafeArith}; use std::sync::Arc; +use safe_arith::{ArithError, SafeArith}; + +use crate::{ + core::{ChainSpec, Epoch, Hash256, Slot}, + state::{ActivationQueue, BeaconStateError}, +}; + /// Cache of values which are uniquely determined at the start of an epoch. /// /// The values are fixed with respect to the last block of the _prior_ epoch, which we refer diff --git a/consensus/types/src/beacon_state/exit_cache.rs b/consensus/types/src/state/exit_cache.rs similarity index 97% rename from consensus/types/src/beacon_state/exit_cache.rs rename to consensus/types/src/state/exit_cache.rs index 2828a6138c6..43809d1af0e 100644 --- a/consensus/types/src/beacon_state/exit_cache.rs +++ b/consensus/types/src/state/exit_cache.rs @@ -1,7 +1,13 @@ -use super::{BeaconStateError, ChainSpec, Epoch, Validator}; -use safe_arith::SafeArith; use std::cmp::Ordering; +use safe_arith::SafeArith; + +use crate::{ + core::{ChainSpec, Epoch}, + state::BeaconStateError, + validator::Validator, +}; + /// Map from exit epoch to the number of validators with that exit epoch. #[derive(Debug, Default, Clone, PartialEq)] pub struct ExitCache { diff --git a/consensus/types/src/historical_batch.rs b/consensus/types/src/state/historical_batch.rs similarity index 81% rename from consensus/types/src/historical_batch.rs rename to consensus/types/src/state/historical_batch.rs index 55377f24894..0167d64f62a 100644 --- a/consensus/types/src/historical_batch.rs +++ b/consensus/types/src/state/historical_batch.rs @@ -1,11 +1,16 @@ -use crate::test_utils::TestRandom; -use crate::*; - +use context_deserialize::context_deserialize; +use milhouse::Vector; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{ + core::{EthSpec, Hash256}, + fork::ForkName, + test_utils::TestRandom, +}; + /// Historical block and state roots. /// /// Spec v0.12.1 @@ -26,6 +31,7 @@ pub struct HistoricalBatch { #[cfg(test)] mod tests { use super::*; + use crate::core::MainnetEthSpec; pub type FoundationHistoricalBatch = HistoricalBatch; diff --git a/consensus/types/src/historical_summary.rs b/consensus/types/src/state/historical_summary.rs similarity index 87% rename from consensus/types/src/historical_summary.rs rename to consensus/types/src/state/historical_summary.rs index dc147ad0428..f520e464837 100644 --- a/consensus/types/src/historical_summary.rs +++ b/consensus/types/src/state/historical_summary.rs @@ -1,13 +1,18 @@ -use crate::context_deserialize; -use crate::test_utils::TestRandom; -use crate::{BeaconState, EthSpec, ForkName, Hash256}; use compare_fields::CompareFields; +use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; use tree_hash::TreeHash; use tree_hash_derive::TreeHash; +use crate::{ + core::{EthSpec, Hash256}, + fork::ForkName, + state::BeaconState, + test_utils::TestRandom, +}; + /// `HistoricalSummary` matches the components of the phase0 `HistoricalBatch` /// making the two hash_tree_root-compatible. This struct is introduced into the beacon state /// in the Capella hard fork. diff --git a/consensus/types/src/beacon_state/iter.rs b/consensus/types/src/state/iter.rs similarity index 96% rename from consensus/types/src/beacon_state/iter.rs rename to consensus/types/src/state/iter.rs index d99c769e402..d761a6bd859 100644 --- a/consensus/types/src/beacon_state/iter.rs +++ b/consensus/types/src/state/iter.rs @@ -1,4 +1,7 @@ -use crate::*; +use crate::{ + core::{EthSpec, Hash256, Slot}, + state::{BeaconState, BeaconStateError}, +}; /// Returns an iterator across the past block roots of `state` in descending slot-order. /// @@ -28,7 +31,7 @@ impl<'a, E: EthSpec> BlockRootsIter<'a, E> { } impl Iterator for BlockRootsIter<'_, E> { - type Item = Result<(Slot, Hash256), Error>; + type Item = Result<(Slot, Hash256), BeaconStateError>; fn next(&mut self) -> Option { if self.prev > self.genesis_slot diff --git a/consensus/types/src/state/mod.rs b/consensus/types/src/state/mod.rs new file mode 100644 index 00000000000..309796d3592 --- /dev/null +++ b/consensus/types/src/state/mod.rs @@ -0,0 +1,35 @@ +mod activation_queue; +mod balance; +mod beacon_state; +#[macro_use] +mod committee_cache; +mod epoch_cache; +mod exit_cache; +mod historical_batch; +mod historical_summary; +mod iter; +mod progressive_balances_cache; +mod pubkey_cache; +mod slashings_cache; + +pub use activation_queue::ActivationQueue; +pub use balance::Balance; +pub use beacon_state::{ + BeaconState, BeaconStateAltair, BeaconStateBase, BeaconStateBellatrix, BeaconStateCapella, + BeaconStateDeneb, BeaconStateElectra, BeaconStateError, BeaconStateFulu, BeaconStateGloas, + BeaconStateHash, BeaconStateRef, CACHED_EPOCHS, +}; +pub use committee_cache::{ + CommitteeCache, compute_committee_index_in_epoch, compute_committee_range_in_epoch, + epoch_committee_count, get_active_validator_indices, +}; +pub use epoch_cache::{EpochCache, EpochCacheError, EpochCacheKey}; +pub use exit_cache::ExitCache; +pub use historical_batch::HistoricalBatch; +pub use historical_summary::HistoricalSummary; +pub use iter::BlockRootsIter; +pub use progressive_balances_cache::{ + EpochTotalBalances, ProgressiveBalancesCache, is_progressive_balances_enabled, +}; +pub use pubkey_cache::PubkeyCache; +pub use slashings_cache::SlashingsCache; diff --git a/consensus/types/src/beacon_state/progressive_balances_cache.rs b/consensus/types/src/state/progressive_balances_cache.rs similarity index 98% rename from consensus/types/src/beacon_state/progressive_balances_cache.rs rename to consensus/types/src/state/progressive_balances_cache.rs index 67d1155dbf1..1e4c311f9a2 100644 --- a/consensus/types/src/beacon_state/progressive_balances_cache.rs +++ b/consensus/types/src/state/progressive_balances_cache.rs @@ -1,14 +1,16 @@ -use crate::beacon_state::balance::Balance; +#[cfg(feature = "arbitrary")] +use arbitrary::Arbitrary; +use safe_arith::SafeArith; + use crate::{ - BeaconState, BeaconStateError, ChainSpec, Epoch, EthSpec, ParticipationFlags, - consts::altair::{ + attestation::ParticipationFlags, + core::consts::altair::{ NUM_FLAG_INDICES, TIMELY_HEAD_FLAG_INDEX, TIMELY_SOURCE_FLAG_INDEX, TIMELY_TARGET_FLAG_INDEX, }, + core::{ChainSpec, Epoch, EthSpec}, + state::{Balance, BeaconState, BeaconStateError}, }; -#[cfg(feature = "arbitrary")] -use arbitrary::Arbitrary; -use safe_arith::SafeArith; /// This cache keeps track of the accumulated target attestation balance for the current & previous /// epochs. The cached values can be utilised by fork choice to calculate unrealized justification diff --git a/consensus/types/src/beacon_state/pubkey_cache.rs b/consensus/types/src/state/pubkey_cache.rs similarity index 98% rename from consensus/types/src/beacon_state/pubkey_cache.rs rename to consensus/types/src/state/pubkey_cache.rs index 85ed00340d7..e62fafb53a6 100644 --- a/consensus/types/src/beacon_state/pubkey_cache.rs +++ b/consensus/types/src/state/pubkey_cache.rs @@ -1,4 +1,4 @@ -use crate::*; +use bls::PublicKeyBytes; use rpds::HashTrieMapSync as HashTrieMap; type ValidatorIndex = usize; diff --git a/consensus/types/src/beacon_state/slashings_cache.rs b/consensus/types/src/state/slashings_cache.rs similarity index 96% rename from consensus/types/src/beacon_state/slashings_cache.rs rename to consensus/types/src/state/slashings_cache.rs index 6530f795e9f..b6ed583df89 100644 --- a/consensus/types/src/beacon_state/slashings_cache.rs +++ b/consensus/types/src/state/slashings_cache.rs @@ -1,8 +1,9 @@ -use crate::{BeaconStateError, Slot, Validator}; #[cfg(feature = "arbitrary")] use arbitrary::Arbitrary; use rpds::HashTrieSetSync as HashTrieSet; +use crate::{core::Slot, state::BeaconStateError, validator::Validator}; + /// Persistent (cheap to clone) cache of all slashed validator indices. #[cfg_attr(feature = "arbitrary", derive(Arbitrary))] #[derive(Debug, Default, Clone, PartialEq)] diff --git a/consensus/types/src/contribution_and_proof.rs b/consensus/types/src/sync_committee/contribution_and_proof.rs similarity index 88% rename from consensus/types/src/contribution_and_proof.rs rename to consensus/types/src/sync_committee/contribution_and_proof.rs index 4d70cd1f8a0..2a344b89dee 100644 --- a/consensus/types/src/contribution_and_proof.rs +++ b/consensus/types/src/sync_committee/contribution_and_proof.rs @@ -1,14 +1,17 @@ -use super::{ - ChainSpec, EthSpec, Fork, ForkName, Hash256, SecretKey, Signature, SignedRoot, - SyncCommitteeContribution, SyncSelectionProof, -}; -use crate::context_deserialize; -use crate::test_utils::TestRandom; +use bls::{SecretKey, Signature}; +use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{ + core::{ChainSpec, EthSpec, Hash256, SignedRoot}, + fork::{Fork, ForkName}, + sync_committee::{SyncCommitteeContribution, SyncSelectionProof}, + test_utils::TestRandom, +}; + /// A Validators aggregate sync committee contribution and selection proof. #[cfg_attr( feature = "arbitrary", diff --git a/consensus/types/src/sync_committee/mod.rs b/consensus/types/src/sync_committee/mod.rs new file mode 100644 index 00000000000..5a75975fe0a --- /dev/null +++ b/consensus/types/src/sync_committee/mod.rs @@ -0,0 +1,25 @@ +mod contribution_and_proof; +mod signed_contribution_and_proof; +mod sync_aggregate; +mod sync_aggregator_selection_data; +mod sync_committee; +mod sync_committee_contribution; +mod sync_committee_message; +mod sync_committee_subscription; +mod sync_duty; +mod sync_selection_proof; +mod sync_subnet_id; + +pub use contribution_and_proof::ContributionAndProof; +pub use signed_contribution_and_proof::SignedContributionAndProof; +pub use sync_aggregate::{Error as SyncAggregateError, SyncAggregate}; +pub use sync_aggregator_selection_data::SyncAggregatorSelectionData; +pub use sync_committee::{Error as SyncCommitteeError, SyncCommittee}; +pub use sync_committee_contribution::{ + Error as SyncCommitteeContributionError, SyncCommitteeContribution, SyncContributionData, +}; +pub use sync_committee_message::SyncCommitteeMessage; +pub use sync_committee_subscription::SyncCommitteeSubscription; +pub use sync_duty::SyncDuty; +pub use sync_selection_proof::SyncSelectionProof; +pub use sync_subnet_id::{SyncSubnetId, sync_subnet_id_to_string}; diff --git a/consensus/types/src/signed_contribution_and_proof.rs b/consensus/types/src/sync_committee/signed_contribution_and_proof.rs similarity index 87% rename from consensus/types/src/signed_contribution_and_proof.rs rename to consensus/types/src/sync_committee/signed_contribution_and_proof.rs index 51c453d32ff..0027003b9f3 100644 --- a/consensus/types/src/signed_contribution_and_proof.rs +++ b/consensus/types/src/sync_committee/signed_contribution_and_proof.rs @@ -1,14 +1,17 @@ -use super::{ - ChainSpec, ContributionAndProof, Domain, EthSpec, Fork, ForkName, Hash256, SecretKey, - Signature, SignedRoot, SyncCommitteeContribution, SyncSelectionProof, -}; -use crate::context_deserialize; -use crate::test_utils::TestRandom; +use bls::{SecretKey, Signature}; +use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{ + core::{ChainSpec, Domain, EthSpec, Hash256, SignedRoot}, + fork::{Fork, ForkName}, + sync_committee::{ContributionAndProof, SyncCommitteeContribution, SyncSelectionProof}, + test_utils::TestRandom, +}; + /// A Validators signed contribution proof to publish on the `sync_committee_contribution_and_proof` /// gossipsub topic. #[cfg_attr( diff --git a/consensus/types/src/sync_aggregate.rs b/consensus/types/src/sync_committee/sync_aggregate.rs similarity index 91% rename from consensus/types/src/sync_aggregate.rs rename to consensus/types/src/sync_committee/sync_aggregate.rs index ba6d840a526..e5848aa22ce 100644 --- a/consensus/types/src/sync_aggregate.rs +++ b/consensus/types/src/sync_committee/sync_aggregate.rs @@ -1,14 +1,20 @@ -use crate::consts::altair::SYNC_COMMITTEE_SUBNET_COUNT; -use crate::context_deserialize; -use crate::test_utils::TestRandom; -use crate::{AggregateSignature, BitVector, EthSpec, ForkName, SyncCommitteeContribution}; +use bls::AggregateSignature; +use context_deserialize::context_deserialize; use educe::Educe; use safe_arith::{ArithError, SafeArith}; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; +use ssz_types::BitVector; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{ + core::{EthSpec, consts::altair::SYNC_COMMITTEE_SUBNET_COUNT}, + fork::ForkName, + sync_committee::SyncCommitteeContribution, + test_utils::TestRandom, +}; + #[derive(Debug, PartialEq)] pub enum Error { SszTypesError(ssz_types::Error), diff --git a/consensus/types/src/sync_aggregator_selection_data.rs b/consensus/types/src/sync_committee/sync_aggregator_selection_data.rs similarity index 82% rename from consensus/types/src/sync_aggregator_selection_data.rs rename to consensus/types/src/sync_committee/sync_aggregator_selection_data.rs index a280369fea3..e905ca036b3 100644 --- a/consensus/types/src/sync_aggregator_selection_data.rs +++ b/consensus/types/src/sync_committee/sync_aggregator_selection_data.rs @@ -1,11 +1,15 @@ -use crate::context_deserialize; -use crate::test_utils::TestRandom; -use crate::{ForkName, SignedRoot, Slot}; +use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{ + core::{SignedRoot, Slot}, + fork::ForkName, + test_utils::TestRandom, +}; + #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] #[derive( Debug, PartialEq, Clone, Serialize, Deserialize, Hash, Encode, Decode, TreeHash, TestRandom, diff --git a/consensus/types/src/sync_committee.rs b/consensus/types/src/sync_committee/sync_committee.rs similarity index 95% rename from consensus/types/src/sync_committee.rs rename to consensus/types/src/sync_committee/sync_committee.rs index a9fde425540..54484118002 100644 --- a/consensus/types/src/sync_committee.rs +++ b/consensus/types/src/sync_committee/sync_committee.rs @@ -1,14 +1,16 @@ -use crate::context_deserialize; -use crate::test_utils::TestRandom; -use crate::{EthSpec, FixedVector, ForkName, SyncSubnetId}; +use std::collections::HashMap; + use bls::PublicKeyBytes; +use context_deserialize::context_deserialize; use safe_arith::{ArithError, SafeArith}; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; -use std::collections::HashMap; +use ssz_types::FixedVector; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{core::EthSpec, fork::ForkName, sync_committee::SyncSubnetId, test_utils::TestRandom}; + #[derive(Debug, PartialEq)] pub enum Error { ArithError(ArithError), diff --git a/consensus/types/src/sync_committee_contribution.rs b/consensus/types/src/sync_committee/sync_committee_contribution.rs similarity index 93% rename from consensus/types/src/sync_committee_contribution.rs rename to consensus/types/src/sync_committee/sync_committee_contribution.rs index db22a3bdbc8..09376fbe5c0 100644 --- a/consensus/types/src/sync_committee_contribution.rs +++ b/consensus/types/src/sync_committee/sync_committee_contribution.rs @@ -1,12 +1,18 @@ -use super::{AggregateSignature, EthSpec, ForkName, SignedRoot}; -use crate::context_deserialize; -use crate::slot_data::SlotData; -use crate::{BitVector, Hash256, Slot, SyncCommitteeMessage, test_utils::TestRandom}; +use bls::AggregateSignature; +use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; +use ssz_types::BitVector; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{ + core::{EthSpec, Hash256, SignedRoot, Slot, SlotData}, + fork::ForkName, + sync_committee::SyncCommitteeMessage, + test_utils::TestRandom, +}; + #[derive(Debug, PartialEq)] pub enum Error { SszTypesError(ssz_types::Error), diff --git a/consensus/types/src/sync_committee_message.rs b/consensus/types/src/sync_committee/sync_committee_message.rs similarity index 88% rename from consensus/types/src/sync_committee_message.rs rename to consensus/types/src/sync_committee/sync_committee_message.rs index d5bb7250bb4..ed42555c43f 100644 --- a/consensus/types/src/sync_committee_message.rs +++ b/consensus/types/src/sync_committee/sync_committee_message.rs @@ -1,14 +1,16 @@ -use crate::context_deserialize; -use crate::slot_data::SlotData; -use crate::test_utils::TestRandom; -use crate::{ - ChainSpec, Domain, EthSpec, Fork, ForkName, Hash256, SecretKey, Signature, SignedRoot, Slot, -}; +use bls::{SecretKey, Signature}; +use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{ + core::{ChainSpec, Domain, EthSpec, Hash256, SignedRoot, Slot, SlotData}, + fork::{Fork, ForkName}, + test_utils::TestRandom, +}; + /// The data upon which a `SyncCommitteeContribution` is based. #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Encode, Decode, TreeHash, TestRandom)] diff --git a/consensus/types/src/sync_committee_subscription.rs b/consensus/types/src/sync_committee/sync_committee_subscription.rs similarity index 96% rename from consensus/types/src/sync_committee_subscription.rs rename to consensus/types/src/sync_committee/sync_committee_subscription.rs index 8e040279d73..6365b015dd2 100644 --- a/consensus/types/src/sync_committee_subscription.rs +++ b/consensus/types/src/sync_committee/sync_committee_subscription.rs @@ -1,7 +1,8 @@ -use crate::Epoch; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; +use crate::core::Epoch; + /// A sync committee subscription created when a validator subscribes to sync committee subnets to perform /// sync committee duties. #[derive(PartialEq, Debug, Serialize, Deserialize, Clone, Encode, Decode)] diff --git a/consensus/types/src/sync_duty.rs b/consensus/types/src/sync_committee/sync_duty.rs similarity index 96% rename from consensus/types/src/sync_duty.rs rename to consensus/types/src/sync_committee/sync_duty.rs index 59fbc960db5..773cc008f9f 100644 --- a/consensus/types/src/sync_duty.rs +++ b/consensus/types/src/sync_committee/sync_duty.rs @@ -1,8 +1,13 @@ -use crate::{EthSpec, SyncCommittee, SyncSubnetId}; +use std::collections::HashSet; + use bls::PublicKeyBytes; use safe_arith::ArithError; use serde::{Deserialize, Serialize}; -use std::collections::HashSet; + +use crate::{ + core::EthSpec, + sync_committee::{SyncCommittee, SyncSubnetId}, +}; #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct SyncDuty { diff --git a/consensus/types/src/sync_selection_proof.rs b/consensus/types/src/sync_committee/sync_selection_proof.rs similarity index 92% rename from consensus/types/src/sync_selection_proof.rs rename to consensus/types/src/sync_committee/sync_selection_proof.rs index b1e9e8186f5..7efc6c4c760 100644 --- a/consensus/types/src/sync_selection_proof.rs +++ b/consensus/types/src/sync_committee/sync_selection_proof.rs @@ -1,16 +1,20 @@ -use crate::consts::altair::{ - SYNC_COMMITTEE_SUBNET_COUNT, TARGET_AGGREGATORS_PER_SYNC_SUBCOMMITTEE, -}; -use crate::{ - ChainSpec, Domain, EthSpec, Fork, Hash256, PublicKey, SecretKey, Signature, SignedRoot, Slot, - SyncAggregatorSelectionData, -}; +use std::cmp; + +use bls::{PublicKey, SecretKey, Signature}; use ethereum_hashing::hash; use safe_arith::{ArithError, SafeArith}; use serde::{Deserialize, Serialize}; use ssz::Encode; use ssz_types::typenum::Unsigned; -use std::cmp; + +use crate::{ + core::{ + ChainSpec, Domain, EthSpec, Hash256, SignedRoot, Slot, + consts::altair::{SYNC_COMMITTEE_SUBNET_COUNT, TARGET_AGGREGATORS_PER_SYNC_SUBCOMMITTEE}, + }, + fork::Fork, + sync_committee::SyncAggregatorSelectionData, +}; #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] #[derive(PartialEq, Debug, Clone, Serialize, Deserialize)] diff --git a/consensus/types/src/sync_subnet_id.rs b/consensus/types/src/sync_committee/sync_subnet_id.rs similarity index 92% rename from consensus/types/src/sync_subnet_id.rs rename to consensus/types/src/sync_committee/sync_subnet_id.rs index 3d0d853fcaa..fb581461785 100644 --- a/consensus/types/src/sync_subnet_id.rs +++ b/consensus/types/src/sync_committee/sync_subnet_id.rs @@ -1,13 +1,16 @@ //! Identifies each sync committee subnet by an integer identifier. -use crate::EthSpec; -use crate::consts::altair::SYNC_COMMITTEE_SUBNET_COUNT; +use std::{ + collections::HashSet, + fmt::{self, Display}, + ops::{Deref, DerefMut}, + sync::LazyLock, +}; + use safe_arith::{ArithError, SafeArith}; use serde::{Deserialize, Serialize}; use ssz_types::typenum::Unsigned; -use std::collections::HashSet; -use std::fmt::{self, Display}; -use std::ops::{Deref, DerefMut}; -use std::sync::LazyLock; + +use crate::core::{EthSpec, consts::altair::SYNC_COMMITTEE_SUBNET_COUNT}; static SYNC_SUBNET_ID_TO_STRING: LazyLock> = LazyLock::new(|| { let mut v = Vec::with_capacity(SYNC_COMMITTEE_SUBNET_COUNT as usize); diff --git a/consensus/types/src/test_utils/generate_deterministic_keypairs.rs b/consensus/types/src/test_utils/generate_deterministic_keypairs.rs index f30afda257e..5ccd748c25c 100644 --- a/consensus/types/src/test_utils/generate_deterministic_keypairs.rs +++ b/consensus/types/src/test_utils/generate_deterministic_keypairs.rs @@ -1,7 +1,8 @@ -use crate::*; +use std::path::PathBuf; + +use bls::Keypair; use eth2_interop_keypairs::{keypair, keypairs_from_yaml_file}; use rayon::prelude::*; -use std::path::PathBuf; use tracing::debug; /// Generates `validator_count` keypairs where the secret key is derived solely from the index of diff --git a/consensus/types/src/test_utils/generate_random_block_and_blobs.rs b/consensus/types/src/test_utils/generate_random_block_and_blobs.rs index 8f4908291ee..cf7b5df891a 100644 --- a/consensus/types/src/test_utils/generate_random_block_and_blobs.rs +++ b/consensus/types/src/test_utils/generate_random_block_and_blobs.rs @@ -1,11 +1,16 @@ -use rand::Rng; - +use bls::Signature; use kzg::{KzgCommitment, KzgProof}; +use rand::Rng; -use crate::beacon_block_body::KzgCommitments; -use crate::*; - -use super::*; +use crate::{ + block::{BeaconBlock, SignedBeaconBlock}, + core::{EthSpec, MainnetEthSpec}, + data::{Blob, BlobSidecar, BlobsList}, + execution::FullPayload, + fork::{ForkName, map_fork_name}, + kzg_ext::{KzgCommitments, KzgProofs}, + test_utils::TestRandom, +}; type BlobsBundle = (KzgCommitments, KzgProofs, BlobsList); @@ -73,6 +78,7 @@ pub fn generate_blobs(n_blobs: usize) -> Result, Stri mod test { use super::*; use rand::rng; + use ssz_types::FixedVector; #[test] fn test_verify_blob_inclusion_proof() { diff --git a/consensus/types/src/test_utils/mod.rs b/consensus/types/src/test_utils/mod.rs index 37d58d43420..c4409b43924 100644 --- a/consensus/types/src/test_utils/mod.rs +++ b/consensus/types/src/test_utils/mod.rs @@ -1,17 +1,5 @@ #![allow(clippy::arithmetic_side_effects)] -use std::fmt::Debug; - -pub use rand::{RngCore, SeedableRng}; -pub use rand_xorshift::XorShiftRng; - -pub use generate_deterministic_keypairs::generate_deterministic_keypair; -pub use generate_deterministic_keypairs::generate_deterministic_keypairs; -pub use generate_deterministic_keypairs::load_keypairs_from_yaml; -use ssz::{Decode, Encode, ssz_encode}; -pub use test_random::{TestRandom, test_random_instance}; -use tree_hash::TreeHash; - #[macro_use] mod macros; mod generate_deterministic_keypairs; @@ -19,6 +7,18 @@ mod generate_deterministic_keypairs; mod generate_random_block_and_blobs; mod test_random; +pub use generate_deterministic_keypairs::generate_deterministic_keypair; +pub use generate_deterministic_keypairs::generate_deterministic_keypairs; +pub use generate_deterministic_keypairs::load_keypairs_from_yaml; +pub use test_random::{TestRandom, test_random_instance}; + +pub use rand::{RngCore, SeedableRng}; +pub use rand_xorshift::XorShiftRng; + +use ssz::{Decode, Encode, ssz_encode}; +use std::fmt::Debug; +use tree_hash::TreeHash; + pub fn test_ssz_tree_hash_pair(v1: &T, v2: &U) where T: TreeHash + Encode + Decode + Debug + PartialEq, diff --git a/consensus/types/src/test_utils/test_random/address.rs b/consensus/types/src/test_utils/test_random/address.rs index 421801ce53c..2f601cb91ec 100644 --- a/consensus/types/src/test_utils/test_random/address.rs +++ b/consensus/types/src/test_utils/test_random/address.rs @@ -1,7 +1,7 @@ -use super::*; +use crate::{core::Address, test_utils::TestRandom}; impl TestRandom for Address { - fn random_for_test(rng: &mut impl RngCore) -> Self { + fn random_for_test(rng: &mut impl rand::RngCore) -> Self { let mut key_bytes = vec![0; 20]; rng.fill_bytes(&mut key_bytes); Address::from_slice(&key_bytes[..]) diff --git a/consensus/types/src/test_utils/test_random/aggregate_signature.rs b/consensus/types/src/test_utils/test_random/aggregate_signature.rs index 772f2844313..f9f3dd95677 100644 --- a/consensus/types/src/test_utils/test_random/aggregate_signature.rs +++ b/consensus/types/src/test_utils/test_random/aggregate_signature.rs @@ -1,7 +1,9 @@ -use super::*; +use bls::{AggregateSignature, Signature}; + +use crate::test_utils::TestRandom; impl TestRandom for AggregateSignature { - fn random_for_test(rng: &mut impl RngCore) -> Self { + fn random_for_test(rng: &mut impl rand::RngCore) -> Self { let signature = Signature::random_for_test(rng); let mut aggregate_signature = AggregateSignature::infinity(); aggregate_signature.add_assign(&signature); diff --git a/consensus/types/src/test_utils/test_random/bitfield.rs b/consensus/types/src/test_utils/test_random/bitfield.rs index e335ac7fe8b..3bc0d37c620 100644 --- a/consensus/types/src/test_utils/test_random/bitfield.rs +++ b/consensus/types/src/test_utils/test_random/bitfield.rs @@ -1,8 +1,10 @@ -use super::*; use smallvec::smallvec; +use ssz_types::{BitList, BitVector, typenum::Unsigned}; + +use crate::test_utils::TestRandom; impl TestRandom for BitList { - fn random_for_test(rng: &mut impl RngCore) -> Self { + fn random_for_test(rng: &mut impl rand::RngCore) -> Self { let initial_len = std::cmp::max(1, N::to_usize().div_ceil(8)); let mut raw_bytes = smallvec![0; initial_len]; rng.fill_bytes(&mut raw_bytes); @@ -23,7 +25,7 @@ impl TestRandom for BitList { } impl TestRandom for BitVector { - fn random_for_test(rng: &mut impl RngCore) -> Self { + fn random_for_test(rng: &mut impl rand::RngCore) -> Self { let mut raw_bytes = smallvec![0; std::cmp::max(1, N::to_usize().div_ceil(8))]; rng.fill_bytes(&mut raw_bytes); // If N isn't divisible by 8 diff --git a/consensus/types/src/test_utils/test_random/hash256.rs b/consensus/types/src/test_utils/test_random/hash256.rs index 21d443c0e2a..4d7570fb55c 100644 --- a/consensus/types/src/test_utils/test_random/hash256.rs +++ b/consensus/types/src/test_utils/test_random/hash256.rs @@ -1,7 +1,7 @@ -use super::*; +use crate::{core::Hash256, test_utils::TestRandom}; impl TestRandom for Hash256 { - fn random_for_test(rng: &mut impl RngCore) -> Self { + fn random_for_test(rng: &mut impl rand::RngCore) -> Self { let mut key_bytes = vec![0; 32]; rng.fill_bytes(&mut key_bytes); Hash256::from_slice(&key_bytes[..]) diff --git a/consensus/types/src/test_utils/test_random/kzg_commitment.rs b/consensus/types/src/test_utils/test_random/kzg_commitment.rs index a4030f2b6a3..31e316a1987 100644 --- a/consensus/types/src/test_utils/test_random/kzg_commitment.rs +++ b/consensus/types/src/test_utils/test_random/kzg_commitment.rs @@ -1,4 +1,6 @@ -use super::*; +use kzg::KzgCommitment; + +use crate::test_utils::TestRandom; impl TestRandom for KzgCommitment { fn random_for_test(rng: &mut impl rand::RngCore) -> Self { diff --git a/consensus/types/src/test_utils/test_random/kzg_proof.rs b/consensus/types/src/test_utils/test_random/kzg_proof.rs index 7e771ca5660..4465d5ab39d 100644 --- a/consensus/types/src/test_utils/test_random/kzg_proof.rs +++ b/consensus/types/src/test_utils/test_random/kzg_proof.rs @@ -1,8 +1,9 @@ -use super::*; -use kzg::BYTES_PER_COMMITMENT; +use kzg::{BYTES_PER_COMMITMENT, KzgProof}; + +use crate::test_utils::TestRandom; impl TestRandom for KzgProof { - fn random_for_test(rng: &mut impl RngCore) -> Self { + fn random_for_test(rng: &mut impl rand::RngCore) -> Self { let mut bytes = [0; BYTES_PER_COMMITMENT]; rng.fill_bytes(&mut bytes); Self(bytes) diff --git a/consensus/types/src/test_utils/test_random/mod.rs b/consensus/types/src/test_utils/test_random/mod.rs new file mode 100644 index 00000000000..41812593fa7 --- /dev/null +++ b/consensus/types/src/test_utils/test_random/mod.rs @@ -0,0 +1,15 @@ +mod address; +mod aggregate_signature; +mod bitfield; +mod hash256; +mod kzg_commitment; +mod kzg_proof; +mod public_key; +mod public_key_bytes; +mod secret_key; +mod signature; +mod signature_bytes; +mod test_random; +mod uint256; + +pub use test_random::{TestRandom, test_random_instance}; diff --git a/consensus/types/src/test_utils/test_random/public_key.rs b/consensus/types/src/test_utils/test_random/public_key.rs index d33e9ac7043..9d287c23d73 100644 --- a/consensus/types/src/test_utils/test_random/public_key.rs +++ b/consensus/types/src/test_utils/test_random/public_key.rs @@ -1,7 +1,9 @@ -use super::*; +use bls::{PublicKey, SecretKey}; + +use crate::test_utils::TestRandom; impl TestRandom for PublicKey { - fn random_for_test(rng: &mut impl RngCore) -> Self { + fn random_for_test(rng: &mut impl rand::RngCore) -> Self { SecretKey::random_for_test(rng).public_key() } } diff --git a/consensus/types/src/test_utils/test_random/public_key_bytes.rs b/consensus/types/src/test_utils/test_random/public_key_bytes.rs index 6e5cafc4f03..587c3baf8fb 100644 --- a/consensus/types/src/test_utils/test_random/public_key_bytes.rs +++ b/consensus/types/src/test_utils/test_random/public_key_bytes.rs @@ -1,9 +1,9 @@ -use bls::PUBLIC_KEY_BYTES_LEN; +use bls::{PUBLIC_KEY_BYTES_LEN, PublicKey, PublicKeyBytes}; -use super::*; +use crate::test_utils::TestRandom; impl TestRandom for PublicKeyBytes { - fn random_for_test(rng: &mut impl RngCore) -> Self { + fn random_for_test(rng: &mut impl rand::RngCore) -> Self { //50-50 chance for signature to be "valid" or invalid if bool::random_for_test(rng) { //valid signature diff --git a/consensus/types/src/test_utils/test_random/secret_key.rs b/consensus/types/src/test_utils/test_random/secret_key.rs index da1614aa24e..a8295d968af 100644 --- a/consensus/types/src/test_utils/test_random/secret_key.rs +++ b/consensus/types/src/test_utils/test_random/secret_key.rs @@ -1,7 +1,9 @@ -use super::*; +use bls::SecretKey; + +use crate::test_utils::TestRandom; impl TestRandom for SecretKey { - fn random_for_test(_rng: &mut impl RngCore) -> Self { + fn random_for_test(_rng: &mut impl rand::RngCore) -> Self { // TODO: Not deterministic generation. Using `SecretKey::deserialize` results in // `BlstError(BLST_BAD_ENCODING)`, need to debug with blst source on what encoding expects. SecretKey::random() diff --git a/consensus/types/src/test_utils/test_random/signature.rs b/consensus/types/src/test_utils/test_random/signature.rs index 8bc0d711103..006aba9650a 100644 --- a/consensus/types/src/test_utils/test_random/signature.rs +++ b/consensus/types/src/test_utils/test_random/signature.rs @@ -1,7 +1,9 @@ -use super::*; +use bls::Signature; + +use crate::test_utils::TestRandom; impl TestRandom for Signature { - fn random_for_test(_rng: &mut impl RngCore) -> Self { + fn random_for_test(_rng: &mut impl rand::RngCore) -> Self { // TODO: `SecretKey::random_for_test` does not return a deterministic signature. Since this // signature will not pass verification we could just return the generator point or the // generator point multiplied by a random scalar if we want disctint signatures. diff --git a/consensus/types/src/test_utils/test_random/signature_bytes.rs b/consensus/types/src/test_utils/test_random/signature_bytes.rs index 2117a482321..6992e574679 100644 --- a/consensus/types/src/test_utils/test_random/signature_bytes.rs +++ b/consensus/types/src/test_utils/test_random/signature_bytes.rs @@ -1,9 +1,9 @@ -use bls::SIGNATURE_BYTES_LEN; +use bls::{SIGNATURE_BYTES_LEN, Signature, SignatureBytes}; -use super::*; +use crate::test_utils::TestRandom; impl TestRandom for SignatureBytes { - fn random_for_test(rng: &mut impl RngCore) -> Self { + fn random_for_test(rng: &mut impl rand::RngCore) -> Self { //50-50 chance for signature to be "valid" or invalid if bool::random_for_test(rng) { //valid signature diff --git a/consensus/types/src/test_utils/test_random.rs b/consensus/types/src/test_utils/test_random/test_random.rs similarity index 90% rename from consensus/types/src/test_utils/test_random.rs rename to consensus/types/src/test_utils/test_random/test_random.rs index 7c8f86e14df..f31be97c038 100644 --- a/consensus/types/src/test_utils/test_random.rs +++ b/consensus/types/src/test_utils/test_random/test_random.rs @@ -1,23 +1,9 @@ -use crate::*; -use rand::RngCore; -use rand::SeedableRng; +use std::{marker::PhantomData, sync::Arc}; + +use rand::{RngCore, SeedableRng}; use rand_xorshift::XorShiftRng; use smallvec::{SmallVec, smallvec}; -use std::marker::PhantomData; -use std::sync::Arc; - -mod address; -mod aggregate_signature; -mod bitfield; -mod hash256; -mod kzg_commitment; -mod kzg_proof; -mod public_key; -mod public_key_bytes; -mod secret_key; -mod signature; -mod signature_bytes; -mod uint256; +use ssz_types::{VariableList, typenum::Unsigned}; pub fn test_random_instance() -> T { let mut rng = XorShiftRng::from_seed([0x42; 16]); diff --git a/consensus/types/src/test_utils/test_random/uint256.rs b/consensus/types/src/test_utils/test_random/uint256.rs index 30077f0e0f6..eccf4765955 100644 --- a/consensus/types/src/test_utils/test_random/uint256.rs +++ b/consensus/types/src/test_utils/test_random/uint256.rs @@ -1,7 +1,7 @@ -use super::*; +use crate::{core::Uint256, test_utils::TestRandom}; impl TestRandom for Uint256 { - fn random_for_test(rng: &mut impl RngCore) -> Self { + fn random_for_test(rng: &mut impl rand::RngCore) -> Self { let mut key_bytes = [0; 32]; rng.fill_bytes(&mut key_bytes); Self::from_le_slice(&key_bytes[..]) diff --git a/consensus/types/src/validator/mod.rs b/consensus/types/src/validator/mod.rs new file mode 100644 index 00000000000..8a67407821c --- /dev/null +++ b/consensus/types/src/validator/mod.rs @@ -0,0 +1,9 @@ +mod proposer_preparation_data; +mod validator; +mod validator_registration_data; +mod validator_subscription; + +pub use proposer_preparation_data::ProposerPreparationData; +pub use validator::{Validator, is_compounding_withdrawal_credential}; +pub use validator_registration_data::{SignedValidatorRegistrationData, ValidatorRegistrationData}; +pub use validator_subscription::ValidatorSubscription; diff --git a/consensus/types/src/proposer_preparation_data.rs b/consensus/types/src/validator/proposer_preparation_data.rs similarity index 95% rename from consensus/types/src/proposer_preparation_data.rs rename to consensus/types/src/validator/proposer_preparation_data.rs index 477fb3b9d15..8ef675de4fd 100644 --- a/consensus/types/src/proposer_preparation_data.rs +++ b/consensus/types/src/validator/proposer_preparation_data.rs @@ -1,6 +1,7 @@ -use crate::*; use serde::{Deserialize, Serialize}; +use crate::core::Address; + /// A proposer preparation, created when a validator prepares the beacon node for potential proposers /// by supplying information required when proposing blocks for the given validators. #[derive(PartialEq, Debug, Serialize, Deserialize, Clone)] diff --git a/consensus/types/src/validator.rs b/consensus/types/src/validator/validator.rs similarity index 97% rename from consensus/types/src/validator.rs rename to consensus/types/src/validator/validator.rs index dec8bba627f..7898ab9073a 100644 --- a/consensus/types/src/validator.rs +++ b/consensus/types/src/validator/validator.rs @@ -1,13 +1,19 @@ -use crate::context_deserialize; -use crate::{ - Address, BeaconState, ChainSpec, Checkpoint, Epoch, EthSpec, FixedBytesExtended, ForkName, - Hash256, PublicKeyBytes, test_utils::TestRandom, -}; +use bls::PublicKeyBytes; +use context_deserialize::context_deserialize; +use fixed_bytes::FixedBytesExtended; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{ + attestation::Checkpoint, + core::{Address, ChainSpec, Epoch, EthSpec, Hash256}, + fork::ForkName, + state::BeaconState, + test_utils::TestRandom, +}; + /// Information about a `BeaconChain` validator. /// /// Spec v0.12.1 diff --git a/consensus/types/src/validator_registration_data.rs b/consensus/types/src/validator/validator_registration_data.rs similarity index 93% rename from consensus/types/src/validator_registration_data.rs rename to consensus/types/src/validator/validator_registration_data.rs index 345771074c5..a0a1df7dc54 100644 --- a/consensus/types/src/validator_registration_data.rs +++ b/consensus/types/src/validator/validator_registration_data.rs @@ -1,8 +1,10 @@ -use crate::*; +use bls::{PublicKeyBytes, Signature}; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; use tree_hash_derive::TreeHash; +use crate::core::{Address, ChainSpec, SignedRoot}; + /// Validator registration, for use in interacting with servers implementing the builder API. #[derive(PartialEq, Debug, Serialize, Deserialize, Clone, Encode, Decode)] pub struct SignedValidatorRegistrationData { diff --git a/consensus/types/src/validator_subscription.rs b/consensus/types/src/validator/validator_subscription.rs similarity index 93% rename from consensus/types/src/validator_subscription.rs rename to consensus/types/src/validator/validator_subscription.rs index 62932638ec1..92fb200e10d 100644 --- a/consensus/types/src/validator_subscription.rs +++ b/consensus/types/src/validator/validator_subscription.rs @@ -1,7 +1,8 @@ -use crate::*; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; +use crate::{attestation::CommitteeIndex, core::Slot}; + /// A validator subscription, created when a validator subscribes to a slot to perform optional aggregation /// duties. #[derive(PartialEq, Debug, Serialize, Deserialize, Clone, Encode, Decode, Eq, PartialOrd, Ord)] diff --git a/consensus/types/src/withdrawal/mod.rs b/consensus/types/src/withdrawal/mod.rs new file mode 100644 index 00000000000..bac80d00bed --- /dev/null +++ b/consensus/types/src/withdrawal/mod.rs @@ -0,0 +1,9 @@ +mod pending_partial_withdrawal; +mod withdrawal; +mod withdrawal_credentials; +mod withdrawal_request; + +pub use pending_partial_withdrawal::PendingPartialWithdrawal; +pub use withdrawal::{Withdrawal, Withdrawals}; +pub use withdrawal_credentials::WithdrawalCredentials; +pub use withdrawal_request::WithdrawalRequest; diff --git a/consensus/types/src/pending_partial_withdrawal.rs b/consensus/types/src/withdrawal/pending_partial_withdrawal.rs similarity index 85% rename from consensus/types/src/pending_partial_withdrawal.rs rename to consensus/types/src/withdrawal/pending_partial_withdrawal.rs index e9b10f79b5f..cd866369a47 100644 --- a/consensus/types/src/pending_partial_withdrawal.rs +++ b/consensus/types/src/withdrawal/pending_partial_withdrawal.rs @@ -1,11 +1,11 @@ -use crate::context_deserialize; -use crate::test_utils::TestRandom; -use crate::{Epoch, ForkName}; +use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{core::Epoch, fork::ForkName, test_utils::TestRandom}; + #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] #[derive( Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize, Encode, Decode, TreeHash, TestRandom, diff --git a/consensus/types/src/withdrawal.rs b/consensus/types/src/withdrawal/withdrawal.rs similarity index 73% rename from consensus/types/src/withdrawal.rs rename to consensus/types/src/withdrawal/withdrawal.rs index ef4a1f285d3..d75bd4f501f 100644 --- a/consensus/types/src/withdrawal.rs +++ b/consensus/types/src/withdrawal/withdrawal.rs @@ -1,10 +1,16 @@ -use crate::test_utils::TestRandom; -use crate::*; +use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; +use ssz_types::VariableList; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{ + core::{Address, EthSpec}, + fork::ForkName, + test_utils::TestRandom, +}; + #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] #[derive( Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize, Encode, Decode, TreeHash, TestRandom, @@ -21,6 +27,8 @@ pub struct Withdrawal { pub amount: u64, } +pub type Withdrawals = VariableList::MaxWithdrawalsPerPayload>; + #[cfg(test)] mod tests { use super::*; diff --git a/consensus/types/src/withdrawal_credentials.rs b/consensus/types/src/withdrawal/withdrawal_credentials.rs similarity index 91% rename from consensus/types/src/withdrawal_credentials.rs rename to consensus/types/src/withdrawal/withdrawal_credentials.rs index 52d51ed559c..b732222ca1b 100644 --- a/consensus/types/src/withdrawal_credentials.rs +++ b/consensus/types/src/withdrawal/withdrawal_credentials.rs @@ -1,5 +1,6 @@ -use crate::*; -use bls::get_withdrawal_credentials; +use bls::{PublicKey, get_withdrawal_credentials}; + +use crate::core::{Address, ChainSpec, Hash256}; pub struct WithdrawalCredentials(Hash256); @@ -27,7 +28,7 @@ impl From for Hash256 { #[cfg(test)] mod test { use super::*; - use crate::test_utils::generate_deterministic_keypair; + use crate::{EthSpec, MainnetEthSpec, test_utils::generate_deterministic_keypair}; use std::str::FromStr; #[test] diff --git a/consensus/types/src/withdrawal_request.rs b/consensus/types/src/withdrawal/withdrawal_request.rs similarity index 87% rename from consensus/types/src/withdrawal_request.rs rename to consensus/types/src/withdrawal/withdrawal_request.rs index c08921a68c4..98a40016f9f 100644 --- a/consensus/types/src/withdrawal_request.rs +++ b/consensus/types/src/withdrawal/withdrawal_request.rs @@ -1,12 +1,13 @@ -use crate::context_deserialize; -use crate::test_utils::TestRandom; -use crate::{Address, ForkName, PublicKeyBytes}; +use bls::PublicKeyBytes; +use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz::Encode; use ssz_derive::{Decode, Encode}; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use crate::{core::Address, fork::ForkName, test_utils::TestRandom}; + #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] #[derive( Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize, Encode, Decode, TreeHash, TestRandom, diff --git a/consensus/types/src/beacon_state/committee_cache/tests.rs b/consensus/types/tests/committee_cache.rs similarity index 97% rename from consensus/types/src/beacon_state/committee_cache/tests.rs rename to consensus/types/tests/committee_cache.rs index 1d2ca4ccdb7..751ef05d299 100644 --- a/consensus/types/src/beacon_state/committee_cache/tests.rs +++ b/consensus/types/tests/committee_cache.rs @@ -1,9 +1,14 @@ #![cfg(test)] -use crate::test_utils::*; -use beacon_chain::test_utils::{BeaconChainHarness, EphemeralHarnessType}; -use beacon_chain::types::*; use std::sync::LazyLock; + +use beacon_chain::test_utils::{BeaconChainHarness, EphemeralHarnessType}; +use bls::Keypair; +use fixed_bytes::FixedBytesExtended; +use milhouse::Vector; use swap_or_not_shuffle::shuffle_list; +use types::*; + +use crate::test_utils::generate_deterministic_keypairs; pub const VALIDATOR_COUNT: usize = 16; diff --git a/consensus/types/src/beacon_state/tests.rs b/consensus/types/tests/state.rs similarity index 97% rename from consensus/types/src/beacon_state/tests.rs rename to consensus/types/tests/state.rs index e5b05a4a5bd..63ab3b8084b 100644 --- a/consensus/types/src/beacon_state/tests.rs +++ b/consensus/types/tests/state.rs @@ -1,15 +1,17 @@ #![cfg(test)] -use crate::test_utils::*; -use beacon_chain::test_utils::{BeaconChainHarness, EphemeralHarnessType}; -use beacon_chain::types::{ - BeaconState, BeaconStateAltair, BeaconStateBase, BeaconStateError, ChainSpec, Domain, Epoch, - EthSpec, FixedBytesExtended, Hash256, Keypair, MainnetEthSpec, MinimalEthSpec, RelativeEpoch, - Slot, Vector, test_utils::TestRandom, -}; -use ssz::Encode; use std::ops::Mul; use std::sync::LazyLock; + +use beacon_chain::test_utils::{BeaconChainHarness, EphemeralHarnessType}; +use bls::Keypair; +use fixed_bytes::FixedBytesExtended; +use milhouse::Vector; +use rand::SeedableRng; +use rand_xorshift::XorShiftRng; +use ssz::Encode; use swap_or_not_shuffle::compute_shuffled_index; +use types::test_utils::{TestRandom, generate_deterministic_keypairs}; +use types::*; pub const MAX_VALIDATOR_COUNT: usize = 129; pub const SLOT_OFFSET: Slot = Slot::new(1); From e27f31648fd4d4f3504c20f1fcfba55fb84b3ac5 Mon Sep 17 00:00:00 2001 From: Jimmy Chen Date: Fri, 5 Dec 2025 21:08:38 +1100 Subject: [PATCH 55/74] Move validator http endpoints to a separate module (#8536) Continuation of: * #8529 Moving `/validator` endpoints out of `http_api` to a separation module. This should improve code maintainability, incremental compilation time and rust analyzer performance. This is a tedious but straight forward change, so we're going with a pair & insta-merge approach to avoid painful & slow async review. @michaelsproul and I paired on the first commit - I believe we are almost done, will pair with @pawanjay176 tomorrow to wrap it up and merge tomorrow. (cc @macladson ) Co-Authored-By: Jimmy Chen --- beacon_node/http_api/src/lib.rs | 1104 ++++--------------- beacon_node/http_api/src/publish_blocks.rs | 11 +- beacon_node/http_api/src/sync_committees.rs | 2 +- beacon_node/http_api/src/utils.rs | 86 ++ beacon_node/http_api/src/validator.rs | 22 - beacon_node/http_api/src/validator/mod.rs | 971 ++++++++++++++++ 6 files changed, 1265 insertions(+), 931 deletions(-) delete mode 100644 beacon_node/http_api/src/validator.rs create mode 100644 beacon_node/http_api/src/validator/mod.rs diff --git a/beacon_node/http_api/src/lib.rs b/beacon_node/http_api/src/lib.rs index ccd0698161e..38019338554 100644 --- a/beacon_node/http_api/src/lib.rs +++ b/beacon_node/http_api/src/lib.rs @@ -37,13 +37,14 @@ mod validators; mod version; use crate::light_client::{get_light_client_bootstrap, get_light_client_updates}; -use crate::produce_block::{produce_blinded_block_v2, produce_block_v2, produce_block_v3}; +use crate::utils::{AnyVersionFilter, EthV1Filter}; +use crate::validator::post_validator_liveness_epoch; +use crate::validator::*; use crate::version::beacon_response; use beacon::states; use beacon_chain::{ - AttestationError as AttnError, BeaconChain, BeaconChainError, BeaconChainTypes, - WhenSlotSkipped, attestation_verification::VerifiedAttestation, - observed_operations::ObservationOutcome, validator_monitor::timestamp_now, + BeaconChain, BeaconChainError, BeaconChainTypes, WhenSlotSkipped, + observed_operations::ObservationOutcome, }; use beacon_processor::BeaconProcessorSend; pub use block_id::BlockId; @@ -51,18 +52,20 @@ use builder_states::get_next_withdrawals; use bytes::Bytes; use directory::DEFAULT_ROOT_DIR; use eth2::StatusCode; +use eth2::lighthouse::sync_state::SyncState; use eth2::types::{ self as api_types, BroadcastValidation, ContextDeserialize, EndpointVersion, ForkChoice, - ForkChoiceExtraData, ForkChoiceNode, LightClientUpdatesQuery, PublishBlockRequest, - StateId as CoreStateId, ValidatorId, ValidatorStatus, + ForkChoiceExtraData, ForkChoiceNode, LightClientUpdatesQuery, PublishBlockRequest, ValidatorId, }; use eth2::{CONSENSUS_VERSION_HEADER, CONTENT_TYPE_HEADER, SSZ_CONTENT_TYPE_HEADER}; use health_metrics::observe::Observe; -use lighthouse_network::rpc::methods::MetaData; -use lighthouse_network::{Enr, NetworkGlobals, PeerId, PubsubMessage, types::SyncState}; +use lighthouse_network::Enr; +use lighthouse_network::NetworkGlobals; +use lighthouse_network::PeerId; +use lighthouse_network::PubsubMessage; use lighthouse_version::version_with_platform; use logging::{SSELoggingComponents, crit}; -use network::{NetworkMessage, NetworkSenders, ValidatorSubscriptionMessage}; +use network::{NetworkMessage, NetworkSenders}; use network_utils::enr_ext::EnrExt; use operation_pool::ReceivedPreCapella; use parking_lot::RwLock; @@ -83,24 +86,19 @@ use std::sync::Arc; use sysinfo::{System, SystemExt}; use system_health::{observe_nat, observe_system_health_bn}; use task_spawner::{Priority, TaskSpawner}; -use tokio::sync::{ - mpsc::{Sender, UnboundedSender}, - oneshot, -}; +use tokio::sync::mpsc::UnboundedSender; use tokio_stream::{ StreamExt, wrappers::{BroadcastStream, errors::BroadcastStreamRecvError}, }; -use tracing::{debug, error, info, warn}; +use tracing::{debug, info, warn}; use types::{ - Attestation, AttestationData, AttesterSlashing, BeaconStateError, ChainSpec, Checkpoint, - ConfigAndPreset, Epoch, EthSpec, ForkName, Hash256, ProposerPreparationData, ProposerSlashing, - SignedAggregateAndProof, SignedBlindedBeaconBlock, SignedBlsToExecutionChange, - SignedContributionAndProof, SignedValidatorRegistrationData, SignedVoluntaryExit, - SingleAttestation, Slot, SyncCommitteeMessage, SyncContributionData, + Attestation, AttestationData, AttesterSlashing, BeaconStateError, Checkpoint, ConfigAndPreset, + Epoch, EthSpec, ForkName, Hash256, ProposerSlashing, SignedBlindedBeaconBlock, + SignedBlsToExecutionChange, SignedVoluntaryExit, SingleAttestation, Slot, SyncCommitteeMessage, }; use version::{ - ResponseIncludesVersion, V1, V2, V3, add_consensus_version_header, add_ssz_content_type_header, + ResponseIncludesVersion, V1, V2, add_consensus_version_header, add_ssz_content_type_header, execution_optimistic_finalized_beacon_response, inconsistent_fork_rejection, unsupported_version_rejection, }; @@ -360,16 +358,18 @@ pub fn serve( } // Create a filter that extracts the endpoint version. - let any_version = warp::path(API_PREFIX).and(warp::path::param::().or_else( - |_| async move { - Err(warp_utils::reject::custom_bad_request( - "Invalid version identifier".to_string(), - )) - }, - )); + let any_version = warp::path(API_PREFIX) + .and( + warp::path::param::().or_else(|_| async move { + Err(warp_utils::reject::custom_bad_request( + "Invalid version identifier".to_string(), + )) + }), + ) + .boxed(); // Filter that enforces a single endpoint version and then discards the `EndpointVersion`. - let single_version = |reqd: EndpointVersion| { + fn single_version(any_version: AnyVersionFilter, reqd: EndpointVersion) -> EthV1Filter { any_version .and_then(move |version| async move { if version == reqd { @@ -379,10 +379,11 @@ pub fn serve( } }) .untuple_one() - }; + .boxed() + } - let eth_v1 = single_version(V1); - let eth_v2 = single_version(V2); + let eth_v1 = single_version(any_version.clone(), V1); + let eth_v2 = single_version(any_version.clone(), V2); // Create a `warp` filter that provides access to the network globals. let inner_network_globals = ctx.network_globals.clone(); @@ -403,34 +404,34 @@ pub fn serve( // Create a `warp` filter that provides access to the beacon chain. let inner_ctx = ctx.clone(); - let chain_filter = - warp::any() - .map(move || inner_ctx.chain.clone()) - .and_then(|chain| async move { - match chain { - Some(chain) => Ok(chain), - None => Err(warp_utils::reject::custom_not_found( - "Beacon chain genesis has not yet been observed.".to_string(), - )), - } - }); + let chain_filter = warp::any() + .map(move || inner_ctx.chain.clone()) + .and_then(|chain| async move { + match chain { + Some(chain) => Ok(chain), + None => Err(warp_utils::reject::custom_not_found( + "Beacon chain genesis has not yet been observed.".to_string(), + )), + } + }) + .boxed(); // Create a `warp` filter that provides access to the network sender channel. let network_tx = ctx .network_senders .as_ref() .map(|senders| senders.network_send()); - let network_tx_filter = - warp::any() - .map(move || network_tx.clone()) - .and_then(|network_tx| async move { - match network_tx { - Some(network_tx) => Ok(network_tx), - None => Err(warp_utils::reject::custom_not_found( - "The networking stack has not yet started (network_tx).".to_string(), - )), - } - }); + let network_tx_filter = warp::any() + .map(move || network_tx.clone()) + .and_then(|network_tx| async move { + match network_tx { + Some(network_tx) => Ok(network_tx), + None => Err(warp_utils::reject::custom_not_found( + "The networking stack has not yet started (network_tx).".to_string(), + )), + } + }) + .boxed(); // Create a `warp` filter that provides access to the network attestation subscription channel. let validator_subscriptions_tx = ctx @@ -447,7 +448,8 @@ pub fn serve( .to_string(), )), } - }); + }) + .boxed(); // Create a `warp` filter that rejects requests whilst the node is syncing. let not_while_syncing_filter = @@ -487,7 +489,8 @@ pub fn serve( SyncState::Stalled => Ok(()), } }, - ); + ) + .boxed(); // Create a `warp` filter that returns 404s if the light client server is disabled. let light_client_server_filter = @@ -540,8 +543,9 @@ pub fn serve( .beacon_processor_send .clone() .filter(|_| config.enable_beacon_processor); - let task_spawner_filter = - warp::any().map(move || TaskSpawner::new(beacon_processor_send.clone())); + let task_spawner_filter = warp::any() + .map(move || TaskSpawner::new(beacon_processor_send.clone())) + .boxed(); let duplicate_block_status_code = ctx.config.duplicate_block_status_code; @@ -553,6 +557,7 @@ pub fn serve( // GET beacon/genesis let get_beacon_genesis = eth_v1 + .clone() .and(warp::path("beacon")) .and(warp::path("genesis")) .and(warp::path::end()) @@ -576,6 +581,7 @@ pub fn serve( */ let beacon_states_path = eth_v1 + .clone() .and(warp::path("beacon")) .and(warp::path("states")) .and(warp::path::param::().or_else(|_| async { @@ -652,6 +658,7 @@ pub fn serve( // mechanism for arbitrary forwards block iteration, we only support iterating forwards along // the canonical chain. let get_beacon_headers = eth_v1 + .clone() .and(warp::path("beacon")) .and(warp::path("headers")) .and(warp::query::()) @@ -748,6 +755,7 @@ pub fn serve( // GET beacon/headers/{block_id} let get_beacon_headers_block_id = eth_v1 + .clone() .and(warp::path("beacon")) .and(warp::path("headers")) .and(warp::path::param::().or_else(|_| async { @@ -803,6 +811,7 @@ pub fn serve( // POST beacon/blocks let post_beacon_blocks = eth_v1 + .clone() .and(warp::path("beacon")) .and(warp::path("blocks")) .and(warp::path::end()) @@ -839,6 +848,7 @@ pub fn serve( ); let post_beacon_blocks_ssz = eth_v1 + .clone() .and(warp::path("beacon")) .and(warp::path("blocks")) .and(warp::path::end()) @@ -875,6 +885,7 @@ pub fn serve( ); let post_beacon_blocks_v2 = eth_v2 + .clone() .and(warp::path("beacon")) .and(warp::path("blocks")) .and(warp::query::()) @@ -914,6 +925,7 @@ pub fn serve( ); let post_beacon_blocks_v2_ssz = eth_v2 + .clone() .and(warp::path("beacon")) .and(warp::path("blocks")) .and(warp::query::()) @@ -957,6 +969,7 @@ pub fn serve( // POST beacon/blinded_blocks let post_beacon_blinded_blocks = eth_v1 + .clone() .and(warp::path("beacon")) .and(warp::path("blinded_blocks")) .and(warp::path::end()) @@ -984,6 +997,7 @@ pub fn serve( // POST beacon/blocks let post_beacon_blinded_blocks_ssz = eth_v1 + .clone() .and(warp::path("beacon")) .and(warp::path("blinded_blocks")) .and(warp::path::end()) @@ -1018,6 +1032,7 @@ pub fn serve( ); let post_beacon_blinded_blocks_v2 = eth_v2 + .clone() .and(warp::path("beacon")) .and(warp::path("blinded_blocks")) .and(warp::query::()) @@ -1057,6 +1072,7 @@ pub fn serve( ); let post_beacon_blinded_blocks_v2_ssz = eth_v2 + .clone() .and(warp::path("beacon")) .and(warp::path("blinded_blocks")) .and(warp::query::()) @@ -1099,6 +1115,7 @@ pub fn serve( }); let beacon_blocks_path_v1 = eth_v1 + .clone() .and(warp::path("beacon")) .and(warp::path("blocks")) .and(block_id_or_err) @@ -1106,6 +1123,7 @@ pub fn serve( .and(chain_filter.clone()); let beacon_blocks_path_any = any_version + .clone() .and(warp::path("beacon")) .and(warp::path("blocks")) .and(block_id_or_err) @@ -1231,6 +1249,7 @@ pub fn serve( // GET beacon/blinded_blocks/{block_id} let get_beacon_blinded_block = eth_v1 + .clone() .and(warp::path("beacon")) .and(warp::path("blinded_blocks")) .and(block_id_or_err) @@ -1283,6 +1302,7 @@ pub fn serve( // GET beacon/blob_sidecars/{block_id} let get_blob_sidecars = eth_v1 + .clone() .and(warp::path("beacon")) .and(warp::path("blob_sidecars")) .and(block_id_or_err) @@ -1334,6 +1354,7 @@ pub fn serve( // GET beacon/blobs/{block_id} let get_blobs = eth_v1 + .clone() .and(warp::path("beacon")) .and(warp::path("blobs")) .and(block_id_or_err) @@ -1383,18 +1404,21 @@ pub fn serve( */ let beacon_pool_path = eth_v1 + .clone() .and(warp::path("beacon")) .and(warp::path("pool")) .and(task_spawner_filter.clone()) .and(chain_filter.clone()); let beacon_pool_path_v2 = eth_v2 + .clone() .and(warp::path("beacon")) .and(warp::path("pool")) .and(task_spawner_filter.clone()) .and(chain_filter.clone()); let beacon_pool_path_any = any_version + .clone() .and(warp::path("beacon")) .and(warp::path("pool")) .and(task_spawner_filter.clone()) @@ -1524,7 +1548,7 @@ pub fn serve( .register_api_attester_slashing(slashing.to_ref()); if let ObservationOutcome::New(slashing) = outcome { - publish_pubsub_message( + utils::publish_pubsub_message( &network_tx, PubsubMessage::AttesterSlashing(Box::new( slashing.clone().into_inner(), @@ -1615,7 +1639,7 @@ pub fn serve( .register_api_proposer_slashing(&slashing); if let ObservationOutcome::New(slashing) = outcome { - publish_pubsub_message( + utils::publish_pubsub_message( &network_tx, PubsubMessage::ProposerSlashing(Box::new( slashing.clone().into_inner(), @@ -1673,7 +1697,7 @@ pub fn serve( .register_api_voluntary_exit(&exit.message); if let ObservationOutcome::New(exit) = outcome { - publish_pubsub_message( + utils::publish_pubsub_message( &network_tx, PubsubMessage::VoluntaryExit(Box::new(exit.clone().into_inner())), )?; @@ -1770,7 +1794,7 @@ pub fn serve( ReceivedPreCapella::Yes }; if matches!(received_pre_capella, ReceivedPreCapella::No) { - publish_pubsub_message( + utils::publish_pubsub_message( &network_tx, PubsubMessage::BlsToExecutionChange(Box::new( verified_address_change.as_inner().clone(), @@ -1824,6 +1848,7 @@ pub fn serve( ); let beacon_rewards_path = eth_v1 + .clone() .and(warp::path("beacon")) .and(warp::path("rewards")) .and(task_spawner_filter.clone()) @@ -1854,6 +1879,7 @@ pub fn serve( */ let builder_states_path = eth_v1 + .clone() .and(warp::path("builder")) .and(warp::path("states")) .and(chain_filter.clone()); @@ -1908,6 +1934,7 @@ pub fn serve( */ let beacon_light_client_path = eth_v1 + .clone() .and(warp::path("beacon")) .and(warp::path("light_client")) .and(light_client_server_filter) @@ -2060,6 +2087,7 @@ pub fn serve( */ let beacon_rewards_path = eth_v1 + .clone() .and(warp::path("beacon")) .and(warp::path("rewards")) .and(task_spawner_filter.clone()) @@ -2144,10 +2172,11 @@ pub fn serve( * config */ - let config_path = eth_v1.and(warp::path("config")); + let config_path = eth_v1.clone().and(warp::path("config")); // GET config/fork_schedule let get_config_fork_schedule = config_path + .clone() .and(warp::path("fork_schedule")) .and(warp::path::end()) .and(task_spawner_filter.clone()) @@ -2166,6 +2195,7 @@ pub fn serve( // GET config/spec let get_config_spec = config_path + .clone() .and(warp::path("spec")) .and(warp::path::end()) .and(task_spawner_filter.clone()) @@ -2205,6 +2235,7 @@ pub fn serve( // GET debug/beacon/data_column_sidecars/{block_id} let get_debug_data_column_sidecars = eth_v1 + .clone() .and(warp::path("debug")) .and(warp::path("beacon")) .and(warp::path("data_column_sidecars")) @@ -2254,6 +2285,7 @@ pub fn serve( // GET debug/beacon/states/{state_id} let get_debug_beacon_states = any_version + .clone() .and(warp::path("debug")) .and(warp::path("beacon")) .and(warp::path("states")) @@ -2329,6 +2361,7 @@ pub fn serve( // GET debug/beacon/heads let get_debug_beacon_heads = any_version + .clone() .and(warp::path("debug")) .and(warp::path("beacon")) .and(warp::path("heads")) @@ -2369,6 +2402,7 @@ pub fn serve( // GET debug/fork_choice let get_debug_fork_choice = eth_v1 + .clone() .and(warp::path("debug")) .and(warp::path("fork_choice")) .and(warp::path::end()) @@ -2450,6 +2484,7 @@ pub fn serve( // GET node/identity let get_node_identity = eth_v1 + .clone() .and(warp::path("node")) .and(warp::path("identity")) .and(warp::path::end()) @@ -2469,7 +2504,7 @@ pub fn serve( enr, p2p_addresses, discovery_addresses, - metadata: from_meta_data::( + metadata: utils::from_meta_data::( &network_globals.local_metadata, &chain.spec, ), @@ -2480,6 +2515,7 @@ pub fn serve( // GET node/version let get_node_version = eth_v1 + .clone() .and(warp::path("node")) .and(warp::path("version")) .and(warp::path::end()) @@ -2493,6 +2529,7 @@ pub fn serve( // GET node/syncing let get_node_syncing = eth_v1 + .clone() .and(warp::path("node")) .and(warp::path("syncing")) .and(warp::path::end()) @@ -2554,6 +2591,7 @@ pub fn serve( // GET node/health let get_node_health = eth_v1 + .clone() .and(warp::path("node")) .and(warp::path("health")) .and(warp::path::end()) @@ -2602,6 +2640,7 @@ pub fn serve( // GET node/peers/{peer_id} let get_node_peers_by_id = eth_v1 + .clone() .and(warp::path("node")) .and(warp::path("peers")) .and(warp::path::param::()) @@ -2656,6 +2695,7 @@ pub fn serve( // GET node/peers let get_node_peers = eth_v1 + .clone() .and(warp::path("node")) .and(warp::path("peers")) .and(warp::path::end()) @@ -2720,6 +2760,7 @@ pub fn serve( // GET node/peer_count let get_node_peer_count = eth_v1 + .clone() .and(warp::path("node")) .and(warp::path("peer_count")) .and(warp::path::end()) @@ -2763,804 +2804,124 @@ pub fn serve( */ // GET validator/duties/proposer/{epoch} - let get_validator_duties_proposer = eth_v1 - .and(warp::path("validator")) - .and(warp::path("duties")) - .and(warp::path("proposer")) - .and(warp::path::param::().or_else(|_| async { - Err(warp_utils::reject::custom_bad_request( - "Invalid epoch".to_string(), - )) - })) - .and(warp::path::end()) - .and(not_while_syncing_filter.clone()) - .and(task_spawner_filter.clone()) - .and(chain_filter.clone()) - .then( - |epoch: Epoch, - not_synced_filter: Result<(), Rejection>, - task_spawner: TaskSpawner, - chain: Arc>| { - task_spawner.blocking_json_task(Priority::P0, move || { - not_synced_filter?; - proposer_duties::proposer_duties(epoch, &chain) - }) - }, - ); + let get_validator_duties_proposer = get_validator_duties_proposer( + eth_v1.clone().clone(), + chain_filter.clone(), + not_while_syncing_filter.clone(), + task_spawner_filter.clone(), + ); // GET validator/blocks/{slot} - let get_validator_blocks = any_version - .and(warp::path("validator")) - .and(warp::path("blocks")) - .and(warp::path::param::().or_else(|_| async { - Err(warp_utils::reject::custom_bad_request( - "Invalid slot".to_string(), - )) - })) - .and(warp::path::end()) - .and(warp::header::optional::("accept")) - .and(not_while_syncing_filter.clone()) - .and(warp::query::()) - .and(task_spawner_filter.clone()) - .and(chain_filter.clone()) - .then( - |endpoint_version: EndpointVersion, - slot: Slot, - accept_header: Option, - not_synced_filter: Result<(), Rejection>, - query: api_types::ValidatorBlocksQuery, - task_spawner: TaskSpawner, - chain: Arc>| { - task_spawner.spawn_async_with_rejection(Priority::P0, async move { - debug!(?slot, "Block production request from HTTP API"); - - not_synced_filter?; - - if endpoint_version == V3 { - produce_block_v3(accept_header, chain, slot, query).await - } else { - produce_block_v2(accept_header, chain, slot, query).await - } - }) - }, - ); + let get_validator_blocks = get_validator_blocks( + any_version.clone().clone(), + chain_filter.clone(), + not_while_syncing_filter.clone(), + task_spawner_filter.clone(), + ); // GET validator/blinded_blocks/{slot} - let get_validator_blinded_blocks = eth_v1 - .and(warp::path("validator")) - .and(warp::path("blinded_blocks")) - .and(warp::path::param::().or_else(|_| async { - Err(warp_utils::reject::custom_bad_request( - "Invalid slot".to_string(), - )) - })) - .and(warp::path::end()) - .and(not_while_syncing_filter.clone()) - .and(warp::query::()) - .and(warp::header::optional::("accept")) - .and(task_spawner_filter.clone()) - .and(chain_filter.clone()) - .then( - |slot: Slot, - not_synced_filter: Result<(), Rejection>, - query: api_types::ValidatorBlocksQuery, - accept_header: Option, - task_spawner: TaskSpawner, - chain: Arc>| { - task_spawner.spawn_async_with_rejection(Priority::P0, async move { - not_synced_filter?; - produce_blinded_block_v2(accept_header, chain, slot, query).await - }) - }, - ); + let get_validator_blinded_blocks = get_validator_blinded_blocks( + eth_v1.clone().clone(), + chain_filter.clone(), + not_while_syncing_filter.clone(), + task_spawner_filter.clone(), + ); // GET validator/attestation_data?slot,committee_index - let get_validator_attestation_data = eth_v1 - .and(warp::path("validator")) - .and(warp::path("attestation_data")) - .and(warp::path::end()) - .and(warp::query::()) - .and(not_while_syncing_filter.clone()) - .and(task_spawner_filter.clone()) - .and(chain_filter.clone()) - .then( - |query: api_types::ValidatorAttestationDataQuery, - not_synced_filter: Result<(), Rejection>, - task_spawner: TaskSpawner, - chain: Arc>| { - task_spawner.blocking_json_task(Priority::P0, move || { - not_synced_filter?; - - let current_slot = chain.slot().map_err(warp_utils::reject::unhandled_error)?; - - // allow a tolerance of one slot to account for clock skew - if query.slot > current_slot + 1 { - return Err(warp_utils::reject::custom_bad_request(format!( - "request slot {} is more than one slot past the current slot {}", - query.slot, current_slot - ))); - } - - chain - .produce_unaggregated_attestation(query.slot, query.committee_index) - .map(|attestation| attestation.data().clone()) - .map(api_types::GenericResponse::from) - .map_err(warp_utils::reject::unhandled_error) - }) - }, - ); + let get_validator_attestation_data = get_validator_attestation_data( + eth_v1.clone().clone(), + chain_filter.clone(), + not_while_syncing_filter.clone(), + task_spawner_filter.clone(), + ); // GET validator/aggregate_attestation?attestation_data_root,slot - let get_validator_aggregate_attestation = any_version - .and(warp::path("validator")) - .and(warp::path("aggregate_attestation")) - .and(warp::path::end()) - .and(warp::query::()) - .and(not_while_syncing_filter.clone()) - .and(task_spawner_filter.clone()) - .and(chain_filter.clone()) - .then( - |endpoint_version: EndpointVersion, - query: api_types::ValidatorAggregateAttestationQuery, - not_synced_filter: Result<(), Rejection>, - task_spawner: TaskSpawner, - chain: Arc>| { - task_spawner.blocking_response_task(Priority::P0, move || { - not_synced_filter?; - crate::aggregate_attestation::get_aggregate_attestation( - query.slot, - &query.attestation_data_root, - query.committee_index, - endpoint_version, - chain, - ) - }) - }, - ); + let get_validator_aggregate_attestation = get_validator_aggregate_attestation( + any_version.clone().clone(), + chain_filter.clone(), + not_while_syncing_filter.clone(), + task_spawner_filter.clone(), + ); // POST validator/duties/attester/{epoch} - let post_validator_duties_attester = eth_v1 - .and(warp::path("validator")) - .and(warp::path("duties")) - .and(warp::path("attester")) - .and(warp::path::param::().or_else(|_| async { - Err(warp_utils::reject::custom_bad_request( - "Invalid epoch".to_string(), - )) - })) - .and(warp::path::end()) - .and(not_while_syncing_filter.clone()) - .and(warp_utils::json::json()) - .and(task_spawner_filter.clone()) - .and(chain_filter.clone()) - .then( - |epoch: Epoch, - not_synced_filter: Result<(), Rejection>, - indices: api_types::ValidatorIndexData, - task_spawner: TaskSpawner, - chain: Arc>| { - task_spawner.blocking_json_task(Priority::P0, move || { - not_synced_filter?; - attester_duties::attester_duties(epoch, &indices.0, &chain) - }) - }, - ); + let post_validator_duties_attester = post_validator_duties_attester( + eth_v1.clone().clone(), + chain_filter.clone(), + not_while_syncing_filter.clone(), + task_spawner_filter.clone(), + ); // POST validator/duties/sync/{epoch} - let post_validator_duties_sync = eth_v1 - .and(warp::path("validator")) - .and(warp::path("duties")) - .and(warp::path("sync")) - .and(warp::path::param::().or_else(|_| async { - Err(warp_utils::reject::custom_bad_request( - "Invalid epoch".to_string(), - )) - })) - .and(warp::path::end()) - .and(not_while_syncing_filter.clone()) - .and(warp_utils::json::json()) - .and(task_spawner_filter.clone()) - .and(chain_filter.clone()) - .then( - |epoch: Epoch, - not_synced_filter: Result<(), Rejection>, - indices: api_types::ValidatorIndexData, - task_spawner: TaskSpawner, - chain: Arc>| { - task_spawner.blocking_json_task(Priority::P0, move || { - not_synced_filter?; - sync_committees::sync_committee_duties(epoch, &indices.0, &chain) - }) - }, - ); + let post_validator_duties_sync = post_validator_duties_sync( + eth_v1.clone().clone(), + chain_filter.clone(), + not_while_syncing_filter.clone(), + task_spawner_filter.clone(), + ); // GET validator/sync_committee_contribution - let get_validator_sync_committee_contribution = eth_v1 - .and(warp::path("validator")) - .and(warp::path("sync_committee_contribution")) - .and(warp::path::end()) - .and(warp::query::()) - .and(not_while_syncing_filter.clone()) - .and(task_spawner_filter.clone()) - .and(chain_filter.clone()) - .then( - |sync_committee_data: SyncContributionData, - not_synced_filter: Result<(), Rejection>, - task_spawner: TaskSpawner, - chain: Arc>| { - task_spawner.blocking_json_task(Priority::P0, move || { - not_synced_filter?; - chain - .get_aggregated_sync_committee_contribution(&sync_committee_data) - .map_err(|e| { - warp_utils::reject::custom_bad_request(format!( - "unable to fetch sync contribution: {:?}", - e - )) - })? - .map(api_types::GenericResponse::from) - .ok_or_else(|| { - warp_utils::reject::custom_not_found( - "no matching sync contribution found".to_string(), - ) - }) - }) - }, - ); + let get_validator_sync_committee_contribution = get_validator_sync_committee_contribution( + eth_v1.clone().clone(), + chain_filter.clone(), + not_while_syncing_filter.clone(), + task_spawner_filter.clone(), + ); // POST validator/aggregate_and_proofs - let post_validator_aggregate_and_proofs = any_version - .and(warp::path("validator")) - .and(warp::path("aggregate_and_proofs")) - .and(warp::path::end()) - .and(not_while_syncing_filter.clone()) - .and(task_spawner_filter.clone()) - .and(chain_filter.clone()) - .and(warp_utils::json::json()) - .and(network_tx_filter.clone()) - .then( - // V1 and V2 are identical except V2 has a consensus version header in the request. - // We only require this header for SSZ deserialization, which isn't supported for - // this endpoint presently. - |_endpoint_version: EndpointVersion, - not_synced_filter: Result<(), Rejection>, - task_spawner: TaskSpawner, - chain: Arc>, - aggregates: Vec>, - network_tx: UnboundedSender>| { - task_spawner.blocking_json_task(Priority::P0, move || { - not_synced_filter?; - let seen_timestamp = timestamp_now(); - let mut verified_aggregates = Vec::with_capacity(aggregates.len()); - let mut messages = Vec::with_capacity(aggregates.len()); - let mut failures = Vec::new(); - - // Verify that all messages in the post are valid before processing further - for (index, aggregate) in aggregates.iter().enumerate() { - match chain.verify_aggregated_attestation_for_gossip(aggregate) { - Ok(verified_aggregate) => { - messages.push(PubsubMessage::AggregateAndProofAttestation(Box::new( - verified_aggregate.aggregate().clone(), - ))); - - // Notify the validator monitor. - chain - .validator_monitor - .read() - .register_api_aggregated_attestation( - seen_timestamp, - verified_aggregate.aggregate(), - verified_aggregate.indexed_attestation(), - &chain.slot_clock, - ); - - verified_aggregates.push((index, verified_aggregate)); - } - // If we already know the attestation, don't broadcast it or attempt to - // further verify it. Return success. - // - // It's reasonably likely that two different validators produce - // identical aggregates, especially if they're using the same beacon - // node. - Err(AttnError::AttestationSupersetKnown(_)) => continue, - // If we've already seen this aggregator produce an aggregate, just - // skip this one. - // - // We're likely to see this with VCs that use fallback BNs. The first - // BN might time-out *after* publishing the aggregate and then the - // second BN will indicate it's already seen the aggregate. - // - // There's no actual error for the user or the network since the - // aggregate has been successfully published by some other node. - Err(AttnError::AggregatorAlreadyKnown(_)) => continue, - Err(e) => { - error!( - error = ?e, - request_index = index, - aggregator_index = aggregate.message().aggregator_index(), - attestation_index = aggregate.message().aggregate().committee_index(), - attestation_slot = %aggregate.message().aggregate().data().slot, - "Failure verifying aggregate and proofs" - ); - failures.push(api_types::Failure::new(index, format!("Verification: {:?}", e))); - } - } - } - - // Publish aggregate attestations to the libp2p network - if !messages.is_empty() { - publish_network_message(&network_tx, NetworkMessage::Publish { messages })?; - } - - // Import aggregate attestations - for (index, verified_aggregate) in verified_aggregates { - if let Err(e) = chain.apply_attestation_to_fork_choice(&verified_aggregate) { - error!( - error = ?e, - request_index = index, - aggregator_index = verified_aggregate.aggregate().message().aggregator_index(), - attestation_index = verified_aggregate.attestation().committee_index(), - attestation_slot = %verified_aggregate.attestation().data().slot, - "Failure applying verified aggregate attestation to fork choice" - ); - failures.push(api_types::Failure::new(index, format!("Fork choice: {:?}", e))); - } - if let Err(e) = chain.add_to_block_inclusion_pool(verified_aggregate) { - warn!( - error = ?e, - request_index = index, - "Could not add verified aggregate attestation to the inclusion pool" - ); - failures.push(api_types::Failure::new(index, format!("Op pool: {:?}", e))); - } - } - - if !failures.is_empty() { - Err(warp_utils::reject::indexed_bad_request("error processing aggregate and proofs".to_string(), - failures, - )) - } else { - Ok(()) - } - }) - }, - ); + let post_validator_aggregate_and_proofs = post_validator_aggregate_and_proofs( + any_version.clone().clone(), + chain_filter.clone(), + network_tx_filter.clone(), + not_while_syncing_filter.clone(), + task_spawner_filter.clone(), + ); - let post_validator_contribution_and_proofs = eth_v1 - .and(warp::path("validator")) - .and(warp::path("contribution_and_proofs")) - .and(warp::path::end()) - .and(not_while_syncing_filter.clone()) - .and(task_spawner_filter.clone()) - .and(chain_filter.clone()) - .and(warp_utils::json::json()) - .and(network_tx_filter.clone()) - .then( - |not_synced_filter: Result<(), Rejection>, - task_spawner: TaskSpawner, - chain: Arc>, - contributions: Vec>, - network_tx: UnboundedSender>| { - task_spawner.blocking_json_task(Priority::P0, move || { - not_synced_filter?; - sync_committees::process_signed_contribution_and_proofs( - contributions, - network_tx, - &chain, - )?; - Ok(api_types::GenericResponse::from(())) - }) - }, - ); + let post_validator_contribution_and_proofs = post_validator_contribution_and_proofs( + eth_v1.clone().clone(), + chain_filter.clone(), + network_tx_filter.clone(), + not_while_syncing_filter.clone(), + task_spawner_filter.clone(), + ); // POST validator/beacon_committee_subscriptions - let post_validator_beacon_committee_subscriptions = eth_v1 - .and(warp::path("validator")) - .and(warp::path("beacon_committee_subscriptions")) - .and(warp::path::end()) - .and(warp_utils::json::json()) - .and(validator_subscription_tx_filter.clone()) - .and(task_spawner_filter.clone()) - .and(chain_filter.clone()) - .then( - |committee_subscriptions: Vec, - validator_subscription_tx: Sender, - task_spawner: TaskSpawner, - chain: Arc>| { - task_spawner.blocking_json_task(Priority::P0, move || { - let subscriptions: std::collections::BTreeSet<_> = committee_subscriptions - .iter() - .map(|subscription| { - chain - .validator_monitor - .write() - .auto_register_local_validator(subscription.validator_index); - api_types::ValidatorSubscription { - attestation_committee_index: subscription.committee_index, - slot: subscription.slot, - committee_count_at_slot: subscription.committees_at_slot, - is_aggregator: subscription.is_aggregator, - } - }) - .collect(); - - let message = - ValidatorSubscriptionMessage::AttestationSubscribe { subscriptions }; - if let Err(e) = validator_subscription_tx.try_send(message) { - warn!( - info = "the host may be overloaded or resource-constrained", - error = ?e, - "Unable to process committee subscriptions" - ); - return Err(warp_utils::reject::custom_server_error( - "unable to queue subscription, host may be overloaded or shutting down" - .to_string(), - )); - } - Ok(()) - }) - }, + let post_validator_beacon_committee_subscriptions = + post_validator_beacon_committee_subscriptions( + eth_v1.clone().clone(), + chain_filter.clone(), + validator_subscription_tx_filter.clone(), + task_spawner_filter.clone(), ); // POST validator/prepare_beacon_proposer - let post_validator_prepare_beacon_proposer = eth_v1 - .and(warp::path("validator")) - .and(warp::path("prepare_beacon_proposer")) - .and(warp::path::end()) - .and(not_while_syncing_filter.clone()) - .and(network_tx_filter.clone()) - .and(task_spawner_filter.clone()) - .and(chain_filter.clone()) - .and(warp_utils::json::json()) - .then( - |not_synced_filter: Result<(), Rejection>, - network_tx: UnboundedSender>, - task_spawner: TaskSpawner, - chain: Arc>, - preparation_data: Vec| { - task_spawner.spawn_async_with_rejection(Priority::P0, async move { - not_synced_filter?; - let execution_layer = chain - .execution_layer - .as_ref() - .ok_or(BeaconChainError::ExecutionLayerMissing) - .map_err(warp_utils::reject::unhandled_error)?; - - let current_slot = chain - .slot_clock - .now_or_genesis() - .ok_or(BeaconChainError::UnableToReadSlot) - .map_err(warp_utils::reject::unhandled_error)?; - let current_epoch = current_slot.epoch(T::EthSpec::slots_per_epoch()); - - debug!( - count = preparation_data.len(), - "Received proposer preparation data" - ); - - execution_layer - .update_proposer_preparation( - current_epoch, - preparation_data.iter().map(|data| (data, &None)), - ) - .await; - - chain - .prepare_beacon_proposer(current_slot) - .await - .map_err(|e| { - warp_utils::reject::custom_bad_request(format!( - "error updating proposer preparations: {:?}", - e - )) - })?; - - if chain.spec.is_peer_das_scheduled() { - let (finalized_beacon_state, _, _) = - StateId(CoreStateId::Finalized).state(&chain)?; - let validators_and_balances = preparation_data - .iter() - .filter_map(|preparation| { - if let Ok(effective_balance) = finalized_beacon_state - .get_effective_balance(preparation.validator_index as usize) - { - Some((preparation.validator_index as usize, effective_balance)) - } else { - None - } - }) - .collect::>(); - - let current_slot = - chain.slot().map_err(warp_utils::reject::unhandled_error)?; - if let Some(cgc_change) = chain - .data_availability_checker - .custody_context() - .register_validators(validators_and_balances, current_slot, &chain.spec) - { - chain.update_data_column_custody_info(Some( - cgc_change - .effective_epoch - .start_slot(T::EthSpec::slots_per_epoch()), - )); - - network_tx.send(NetworkMessage::CustodyCountChanged { - new_custody_group_count: cgc_change.new_custody_group_count, - sampling_count: cgc_change.sampling_count, - }).unwrap_or_else(|e| { - debug!(error = %e, "Could not send message to the network service. \ - Likely shutdown") - }); - } - } - - Ok::<_, warp::reject::Rejection>(warp::reply::json(&()).into_response()) - }) - }, - ); + let post_validator_prepare_beacon_proposer = post_validator_prepare_beacon_proposer( + eth_v1.clone().clone(), + chain_filter.clone(), + network_tx_filter.clone(), + not_while_syncing_filter.clone(), + task_spawner_filter.clone(), + ); // POST validator/register_validator - let post_validator_register_validator = eth_v1 - .and(warp::path("validator")) - .and(warp::path("register_validator")) - .and(warp::path::end()) - .and(task_spawner_filter.clone()) - .and(chain_filter.clone()) - .and(warp_utils::json::json()) - .then( - |task_spawner: TaskSpawner, - chain: Arc>, - register_val_data: Vec| async { - let (tx, rx) = oneshot::channel(); - - let initial_result = task_spawner - .spawn_async_with_rejection_no_conversion(Priority::P0, async move { - let execution_layer = chain - .execution_layer - .as_ref() - .ok_or(BeaconChainError::ExecutionLayerMissing) - .map_err(warp_utils::reject::unhandled_error)?; - let current_slot = chain - .slot_clock - .now_or_genesis() - .ok_or(BeaconChainError::UnableToReadSlot) - .map_err(warp_utils::reject::unhandled_error)?; - let current_epoch = current_slot.epoch(T::EthSpec::slots_per_epoch()); - - debug!( - count = register_val_data.len(), - "Received register validator request" - ); - - let head_snapshot = chain.head_snapshot(); - let spec = &chain.spec; - - let (preparation_data, filtered_registration_data): ( - Vec<(ProposerPreparationData, Option)>, - Vec, - ) = register_val_data - .into_iter() - .filter_map(|register_data| { - chain - .validator_index(®ister_data.message.pubkey) - .ok() - .flatten() - .and_then(|validator_index| { - let validator = head_snapshot - .beacon_state - .get_validator(validator_index) - .ok()?; - let validator_status = ValidatorStatus::from_validator( - validator, - current_epoch, - spec.far_future_epoch, - ) - .superstatus(); - let is_active_or_pending = - matches!(validator_status, ValidatorStatus::Pending) - || matches!( - validator_status, - ValidatorStatus::Active - ); - - // Filter out validators who are not 'active' or 'pending'. - is_active_or_pending.then_some({ - ( - ( - ProposerPreparationData { - validator_index: validator_index as u64, - fee_recipient: register_data - .message - .fee_recipient, - }, - Some(register_data.message.gas_limit), - ), - register_data, - ) - }) - }) - }) - .unzip(); - - // Update the prepare beacon proposer cache based on this request. - execution_layer - .update_proposer_preparation( - current_epoch, - preparation_data.iter().map(|(data, limit)| (data, limit)), - ) - .await; - - // Call prepare beacon proposer blocking with the latest update in order to make - // sure we have a local payload to fall back to in the event of the blinded block - // flow failing. - chain - .prepare_beacon_proposer(current_slot) - .await - .map_err(|e| { - warp_utils::reject::custom_bad_request(format!( - "error updating proposer preparations: {:?}", - e - )) - })?; - - info!( - count = filtered_registration_data.len(), - "Forwarding register validator request to connected builder" - ); - - // It's a waste of a `BeaconProcessor` worker to just - // wait on a response from the builder (especially since - // they have frequent timeouts). Spawn a new task and - // send the response back to our original HTTP request - // task via a channel. - let builder_future = async move { - let arc_builder = chain - .execution_layer - .as_ref() - .ok_or(BeaconChainError::ExecutionLayerMissing) - .map_err(warp_utils::reject::unhandled_error)? - .builder(); - let builder = arc_builder - .as_ref() - .ok_or(BeaconChainError::BuilderMissing) - .map_err(warp_utils::reject::unhandled_error)?; - builder - .post_builder_validators(&filtered_registration_data) - .await - .map(|resp| warp::reply::json(&resp).into_response()) - .map_err(|e| { - warn!( - num_registrations = filtered_registration_data.len(), - error = ?e, - "Relay error when registering validator(s)" - ); - // Forward the HTTP status code if we are able to, otherwise fall back - // to a server error. - if let eth2::Error::ServerMessage(message) = e { - if message.code == StatusCode::BAD_REQUEST.as_u16() { - return warp_utils::reject::custom_bad_request( - message.message, - ); - } else { - // According to the spec this response should only be a 400 or 500, - // so we fall back to a 500 here. - return warp_utils::reject::custom_server_error( - message.message, - ); - } - } - warp_utils::reject::custom_server_error(format!("{e:?}")) - }) - }; - tokio::task::spawn(async move { tx.send(builder_future.await) }); - - // Just send a generic 200 OK from this closure. We'll - // ignore the `Ok` variant and form a proper response - // from what is sent back down the channel. - Ok(warp::reply::reply().into_response()) - }) - .await; - - if initial_result.is_err() { - return convert_rejection(initial_result).await; - } - - // Await a response from the builder without blocking a - // `BeaconProcessor` worker. - convert_rejection(rx.await.unwrap_or_else(|_| { - Ok(warp::reply::with_status( - warp::reply::json(&"No response from channel"), - warp::http::StatusCode::INTERNAL_SERVER_ERROR, - ) - .into_response()) - })) - .await - }, - ); + let post_validator_register_validator = post_validator_register_validator( + eth_v1.clone().clone(), + chain_filter.clone(), + task_spawner_filter.clone(), + ); // POST validator/sync_committee_subscriptions - let post_validator_sync_committee_subscriptions = eth_v1 - .and(warp::path("validator")) - .and(warp::path("sync_committee_subscriptions")) - .and(warp::path::end()) - .and(warp_utils::json::json()) - .and(validator_subscription_tx_filter) - .and(task_spawner_filter.clone()) - .and(chain_filter.clone()) - .then( - |subscriptions: Vec, - validator_subscription_tx: Sender, - task_spawner: TaskSpawner, - chain: Arc>, - | { - task_spawner.blocking_json_task(Priority::P0, move || { - for subscription in subscriptions { - chain - .validator_monitor - .write() - .auto_register_local_validator(subscription.validator_index); - - let message = ValidatorSubscriptionMessage::SyncCommitteeSubscribe { - subscriptions: vec![subscription], - }; - if let Err(e) = validator_subscription_tx.try_send(message) { - warn!( - info = "the host may be overloaded or resource-constrained", - error = ?e, - "Unable to process sync subscriptions" - ); - return Err(warp_utils::reject::custom_server_error( - "unable to queue subscription, host may be overloaded or shutting down".to_string(), - )); - } - } - - Ok(()) - }) - }, - ); + let post_validator_sync_committee_subscriptions = post_validator_sync_committee_subscriptions( + eth_v1.clone().clone(), + chain_filter.clone(), + validator_subscription_tx_filter.clone(), + task_spawner_filter.clone(), + ); // POST validator/liveness/{epoch} - let post_validator_liveness_epoch = eth_v1 - .and(warp::path("validator")) - .and(warp::path("liveness")) - .and(warp::path::param::()) - .and(warp::path::end()) - .and(warp_utils::json::json()) - .and(task_spawner_filter.clone()) - .and(chain_filter.clone()) - .then( - |epoch: Epoch, - indices: api_types::ValidatorIndexData, - task_spawner: TaskSpawner, - chain: Arc>| { - task_spawner.blocking_json_task(Priority::P0, move || { - // Ensure the request is for either the current, previous or next epoch. - let current_epoch = - chain.epoch().map_err(warp_utils::reject::unhandled_error)?; - let prev_epoch = current_epoch.saturating_sub(Epoch::new(1)); - let next_epoch = current_epoch.saturating_add(Epoch::new(1)); - - if epoch < prev_epoch || epoch > next_epoch { - return Err(warp_utils::reject::custom_bad_request(format!( - "request epoch {} is more than one epoch from the current epoch {}", - epoch, current_epoch - ))); - } - - let liveness: Vec = indices - .0 - .iter() - .cloned() - .map(|index| { - let is_live = chain.validator_seen_at_epoch(index as usize, epoch); - api_types::StandardLivenessResponseData { index, is_live } - }) - .collect(); - - Ok(api_types::GenericResponse::from(liveness)) - }) - }, - ); + let post_validator_liveness_epoch = post_validator_liveness_epoch( + eth_v1.clone().clone(), + chain_filter.clone(), + task_spawner_filter.clone(), + ); // POST lighthouse/finalize let post_lighthouse_finalize = warp::path("lighthouse") @@ -3632,7 +2993,10 @@ pub fn serve( ); network_globals.add_trusted_peer(enr.clone()); - publish_network_message(&network_tx, NetworkMessage::ConnectTrustedPeer(enr))?; + utils::publish_network_message( + &network_tx, + NetworkMessage::ConnectTrustedPeer(enr), + )?; Ok(()) }) @@ -3663,7 +3027,7 @@ pub fn serve( ); network_globals.remove_trusted_peer(enr.clone()); - publish_network_message( + utils::publish_network_message( &network_tx, NetworkMessage::DisconnectTrustedPeer(enr), )?; @@ -4115,6 +3479,7 @@ pub fn serve( ); let get_events = eth_v1 + .clone() .and(warp::path("events")) .and(warp::path::end()) .and(multi_key_query::()) @@ -4436,70 +3801,3 @@ pub fn serve( Ok(http_server) } - -fn from_meta_data( - meta_data: &RwLock>, - spec: &ChainSpec, -) -> api_types::MetaData { - let meta_data = meta_data.read(); - let format_hex = |bytes: &[u8]| format!("0x{}", hex::encode(bytes)); - - let seq_number = *meta_data.seq_number(); - let attnets = format_hex(&meta_data.attnets().clone().into_bytes()); - let syncnets = format_hex( - &meta_data - .syncnets() - .cloned() - .unwrap_or_default() - .into_bytes(), - ); - - if spec.is_peer_das_scheduled() { - api_types::MetaData::V3(api_types::MetaDataV3 { - seq_number, - attnets, - syncnets, - custody_group_count: meta_data.custody_group_count().cloned().unwrap_or_default(), - }) - } else { - api_types::MetaData::V2(api_types::MetaDataV2 { - seq_number, - attnets, - syncnets, - }) - } -} - -/// Publish a message to the libp2p pubsub network. -fn publish_pubsub_message( - network_tx: &UnboundedSender>, - message: PubsubMessage, -) -> Result<(), warp::Rejection> { - publish_network_message( - network_tx, - NetworkMessage::Publish { - messages: vec![message], - }, - ) -} - -/// Publish a message to the libp2p pubsub network. -fn publish_pubsub_messages( - network_tx: &UnboundedSender>, - messages: Vec>, -) -> Result<(), warp::Rejection> { - publish_network_message(network_tx, NetworkMessage::Publish { messages }) -} - -/// Publish a message to the libp2p network. -fn publish_network_message( - network_tx: &UnboundedSender>, - message: NetworkMessage, -) -> Result<(), warp::Rejection> { - network_tx.send(message).map_err(|e| { - warp_utils::reject::custom_server_error(format!( - "unable to publish to network channel: {}", - e - )) - }) -} diff --git a/beacon_node/http_api/src/publish_blocks.rs b/beacon_node/http_api/src/publish_blocks.rs index 9671a72da26..b54c071eb80 100644 --- a/beacon_node/http_api/src/publish_blocks.rs +++ b/beacon_node/http_api/src/publish_blocks.rs @@ -138,9 +138,10 @@ pub async fn publish_block>( "Signed block published to network via HTTP API" ); - crate::publish_pubsub_message(&sender, PubsubMessage::BeaconBlock(block.clone())).map_err( - |_| BlockError::BeaconChainError(Box::new(BeaconChainError::UnableToPublish)), - )?; + crate::utils::publish_pubsub_message(&sender, PubsubMessage::BeaconBlock(block.clone())) + .map_err(|_| { + BlockError::BeaconChainError(Box::new(BeaconChainError::UnableToPublish)) + })?; Ok(()) }; @@ -492,7 +493,7 @@ fn publish_blob_sidecars( blob: &GossipVerifiedBlob, ) -> Result<(), BlockError> { let pubsub_message = PubsubMessage::BlobSidecar(Box::new((blob.index(), blob.clone_blob()))); - crate::publish_pubsub_message(sender_clone, pubsub_message) + crate::utils::publish_pubsub_message(sender_clone, pubsub_message) .map_err(|_| BlockError::BeaconChainError(Box::new(BeaconChainError::UnableToPublish))) } @@ -525,7 +526,7 @@ fn publish_column_sidecars( PubsubMessage::DataColumnSidecar(Box::new((subnet, data_col))) }) .collect::>(); - crate::publish_pubsub_messages(sender_clone, pubsub_messages) + crate::utils::publish_pubsub_messages(sender_clone, pubsub_messages) .map_err(|_| BlockError::BeaconChainError(Box::new(BeaconChainError::UnableToPublish))) } diff --git a/beacon_node/http_api/src/sync_committees.rs b/beacon_node/http_api/src/sync_committees.rs index edda0e60a61..b9fa24ad6a4 100644 --- a/beacon_node/http_api/src/sync_committees.rs +++ b/beacon_node/http_api/src/sync_committees.rs @@ -1,6 +1,6 @@ //! Handlers for sync committee endpoints. -use crate::publish_pubsub_message; +use crate::utils::publish_pubsub_message; use beacon_chain::sync_committee_verification::{ Error as SyncVerificationError, VerifiedSyncCommitteeMessage, }; diff --git a/beacon_node/http_api/src/utils.rs b/beacon_node/http_api/src/utils.rs index cf61fa481cb..a89780ba245 100644 --- a/beacon_node/http_api/src/utils.rs +++ b/beacon_node/http_api/src/utils.rs @@ -1,3 +1,89 @@ +use crate::task_spawner::TaskSpawner; +use beacon_chain::{BeaconChain, BeaconChainTypes}; +use eth2::types::EndpointVersion; +use lighthouse_network::PubsubMessage; +use lighthouse_network::rpc::methods::MetaData; +use network::{NetworkMessage, ValidatorSubscriptionMessage}; +use parking_lot::RwLock; +use std::sync::Arc; +use tokio::sync::mpsc::{Sender, UnboundedSender}; +use types::{ChainSpec, EthSpec}; +use warp::Rejection; use warp::filters::BoxedFilter; pub type ResponseFilter = BoxedFilter<(warp::reply::Response,)>; +pub type AnyVersionFilter = BoxedFilter<(EndpointVersion,)>; +pub type EthV1Filter = BoxedFilter<()>; +pub type ChainFilter = BoxedFilter<(Arc>,)>; +pub type NotWhileSyncingFilter = BoxedFilter<(Result<(), Rejection>,)>; +pub type TaskSpawnerFilter = BoxedFilter<(TaskSpawner<::EthSpec>,)>; +pub type ValidatorSubscriptionTxFilter = BoxedFilter<(Sender,)>; +pub type NetworkTxFilter = + BoxedFilter<(UnboundedSender::EthSpec>>,)>; + +pub fn from_meta_data( + meta_data: &RwLock>, + spec: &ChainSpec, +) -> eth2::types::MetaData { + let meta_data = meta_data.read(); + let format_hex = |bytes: &[u8]| format!("0x{}", hex::encode(bytes)); + + let seq_number = *meta_data.seq_number(); + let attnets = format_hex(&meta_data.attnets().clone().into_bytes()); + let syncnets = format_hex( + &meta_data + .syncnets() + .cloned() + .unwrap_or_default() + .into_bytes(), + ); + + if spec.is_peer_das_scheduled() { + eth2::types::MetaData::V3(eth2::types::MetaDataV3 { + seq_number, + attnets, + syncnets, + custody_group_count: meta_data.custody_group_count().cloned().unwrap_or_default(), + }) + } else { + eth2::types::MetaData::V2(eth2::types::MetaDataV2 { + seq_number, + attnets, + syncnets, + }) + } +} + +/// Publish a message to the libp2p pubsub network. +pub fn publish_pubsub_message( + network_tx: &UnboundedSender>, + message: PubsubMessage, +) -> Result<(), warp::Rejection> { + publish_network_message( + network_tx, + NetworkMessage::Publish { + messages: vec![message], + }, + ) +} + +/// Publish a message to the libp2p pubsub network. +pub fn publish_pubsub_messages( + network_tx: &UnboundedSender>, + messages: Vec>, +) -> Result<(), warp::Rejection> { + publish_network_message(network_tx, NetworkMessage::Publish { messages }) +} + +/// Publish a message to the libp2p network. +pub fn publish_network_message( + network_tx: &UnboundedSender>, + message: NetworkMessage, +) -> Result<(), warp::Rejection> { + network_tx.send(message).map_err(|e| { + warp_utils::reject::custom_server_error(format!( + "unable to publish to network channel: {}", + e + )) + }) +} diff --git a/beacon_node/http_api/src/validator.rs b/beacon_node/http_api/src/validator.rs deleted file mode 100644 index 25b0feb99e8..00000000000 --- a/beacon_node/http_api/src/validator.rs +++ /dev/null @@ -1,22 +0,0 @@ -use beacon_chain::{BeaconChain, BeaconChainError, BeaconChainTypes}; -use types::{BeaconState, PublicKeyBytes}; - -/// Uses the `chain.validator_pubkey_cache` to resolve a pubkey to a validator -/// index and then ensures that the validator exists in the given `state`. -pub fn pubkey_to_validator_index( - chain: &BeaconChain, - state: &BeaconState, - pubkey: &PublicKeyBytes, -) -> Result, Box> { - chain - .validator_index(pubkey) - .map_err(Box::new)? - .filter(|&index| { - state - .validators() - .get(index) - .is_some_and(|v| v.pubkey == *pubkey) - }) - .map(Result::Ok) - .transpose() -} diff --git a/beacon_node/http_api/src/validator/mod.rs b/beacon_node/http_api/src/validator/mod.rs new file mode 100644 index 00000000000..9cf1f1a33d0 --- /dev/null +++ b/beacon_node/http_api/src/validator/mod.rs @@ -0,0 +1,971 @@ +use crate::produce_block::{produce_blinded_block_v2, produce_block_v2, produce_block_v3}; +use crate::task_spawner::{Priority, TaskSpawner}; +use crate::utils::{ + AnyVersionFilter, ChainFilter, EthV1Filter, NetworkTxFilter, NotWhileSyncingFilter, + ResponseFilter, TaskSpawnerFilter, ValidatorSubscriptionTxFilter, publish_network_message, +}; +use crate::version::V3; +use crate::{StateId, attester_duties, proposer_duties, sync_committees}; +use beacon_chain::attestation_verification::VerifiedAttestation; +use beacon_chain::validator_monitor::timestamp_now; +use beacon_chain::{AttestationError, BeaconChain, BeaconChainError, BeaconChainTypes}; +use eth2::StatusCode; +use eth2::types::{ + Accept, BeaconCommitteeSubscription, EndpointVersion, Failure, GenericResponse, + StandardLivenessResponseData, StateId as CoreStateId, ValidatorAggregateAttestationQuery, + ValidatorAttestationDataQuery, ValidatorBlocksQuery, ValidatorIndexData, ValidatorStatus, +}; +use lighthouse_network::PubsubMessage; +use network::{NetworkMessage, ValidatorSubscriptionMessage}; +use slot_clock::SlotClock; +use std::sync::Arc; +use tokio::sync::mpsc::{Sender, UnboundedSender}; +use tokio::sync::oneshot; +use tracing::{debug, error, info, warn}; +use types::{ + BeaconState, Epoch, EthSpec, ProposerPreparationData, PublicKeyBytes, SignedAggregateAndProof, + SignedContributionAndProof, SignedValidatorRegistrationData, Slot, SyncContributionData, + ValidatorSubscription, +}; +use warp::{Filter, Rejection, Reply}; +use warp_utils::reject::convert_rejection; + +/// Uses the `chain.validator_pubkey_cache` to resolve a pubkey to a validator +/// index and then ensures that the validator exists in the given `state`. +pub fn pubkey_to_validator_index( + chain: &BeaconChain, + state: &BeaconState, + pubkey: &PublicKeyBytes, +) -> Result, Box> { + chain + .validator_index(pubkey) + .map_err(Box::new)? + .filter(|&index| { + state + .validators() + .get(index) + .is_some_and(|v| v.pubkey == *pubkey) + }) + .map(Result::Ok) + .transpose() +} + +// GET validator/sync_committee_contribution +pub fn get_validator_sync_committee_contribution( + eth_v1: EthV1Filter, + chain_filter: ChainFilter, + not_while_syncing_filter: NotWhileSyncingFilter, + task_spawner_filter: TaskSpawnerFilter, +) -> ResponseFilter { + eth_v1 + .and(warp::path("validator")) + .and(warp::path("sync_committee_contribution")) + .and(warp::path::end()) + .and(warp::query::()) + .and(not_while_syncing_filter.clone()) + .and(task_spawner_filter.clone()) + .and(chain_filter.clone()) + .then( + |sync_committee_data: SyncContributionData, + not_synced_filter: Result<(), Rejection>, + task_spawner: TaskSpawner, + chain: Arc>| { + task_spawner.blocking_json_task(Priority::P0, move || { + not_synced_filter?; + chain + .get_aggregated_sync_committee_contribution(&sync_committee_data) + .map_err(|e| { + warp_utils::reject::custom_bad_request(format!( + "unable to fetch sync contribution: {:?}", + e + )) + })? + .map(GenericResponse::from) + .ok_or_else(|| { + warp_utils::reject::custom_not_found( + "no matching sync contribution found".to_string(), + ) + }) + }) + }, + ) + .boxed() +} + +// POST validator/duties/sync/{epoch} +pub fn post_validator_duties_sync( + eth_v1: EthV1Filter, + chain_filter: ChainFilter, + not_while_syncing_filter: NotWhileSyncingFilter, + task_spawner_filter: TaskSpawnerFilter, +) -> ResponseFilter { + eth_v1 + .and(warp::path("validator")) + .and(warp::path("duties")) + .and(warp::path("sync")) + .and(warp::path::param::().or_else(|_| async { + Err(warp_utils::reject::custom_bad_request( + "Invalid epoch".to_string(), + )) + })) + .and(warp::path::end()) + .and(not_while_syncing_filter.clone()) + .and(warp_utils::json::json()) + .and(task_spawner_filter.clone()) + .and(chain_filter.clone()) + .then( + |epoch: Epoch, + not_synced_filter: Result<(), Rejection>, + indices: ValidatorIndexData, + task_spawner: TaskSpawner, + chain: Arc>| { + task_spawner.blocking_json_task(Priority::P0, move || { + not_synced_filter?; + sync_committees::sync_committee_duties(epoch, &indices.0, &chain) + }) + }, + ) + .boxed() +} + +// POST validator/duties/attester/{epoch} +pub fn post_validator_duties_attester( + eth_v1: EthV1Filter, + chain_filter: ChainFilter, + not_while_syncing_filter: NotWhileSyncingFilter, + task_spawner_filter: TaskSpawnerFilter, +) -> ResponseFilter { + eth_v1 + .and(warp::path("validator")) + .and(warp::path("duties")) + .and(warp::path("attester")) + .and(warp::path::param::().or_else(|_| async { + Err(warp_utils::reject::custom_bad_request( + "Invalid epoch".to_string(), + )) + })) + .and(warp::path::end()) + .and(not_while_syncing_filter.clone()) + .and(warp_utils::json::json()) + .and(task_spawner_filter.clone()) + .and(chain_filter.clone()) + .then( + |epoch: Epoch, + not_synced_filter: Result<(), Rejection>, + indices: ValidatorIndexData, + task_spawner: TaskSpawner, + chain: Arc>| { + task_spawner.blocking_json_task(Priority::P0, move || { + not_synced_filter?; + attester_duties::attester_duties(epoch, &indices.0, &chain) + }) + }, + ) + .boxed() +} + +// GET validator/aggregate_attestation?attestation_data_root,slot +pub fn get_validator_aggregate_attestation( + any_version: AnyVersionFilter, + chain_filter: ChainFilter, + not_while_syncing_filter: NotWhileSyncingFilter, + task_spawner_filter: TaskSpawnerFilter, +) -> ResponseFilter { + any_version + .and(warp::path("validator")) + .and(warp::path("aggregate_attestation")) + .and(warp::path::end()) + .and(warp::query::()) + .and(not_while_syncing_filter.clone()) + .and(task_spawner_filter.clone()) + .and(chain_filter.clone()) + .then( + |endpoint_version: EndpointVersion, + query: ValidatorAggregateAttestationQuery, + not_synced_filter: Result<(), Rejection>, + task_spawner: TaskSpawner, + chain: Arc>| { + task_spawner.blocking_response_task(Priority::P0, move || { + not_synced_filter?; + crate::aggregate_attestation::get_aggregate_attestation( + query.slot, + &query.attestation_data_root, + query.committee_index, + endpoint_version, + chain, + ) + }) + }, + ) + .boxed() +} + +// GET validator/attestation_data?slot,committee_index +pub fn get_validator_attestation_data( + eth_v1: EthV1Filter, + chain_filter: ChainFilter, + not_while_syncing_filter: NotWhileSyncingFilter, + task_spawner_filter: TaskSpawnerFilter, +) -> ResponseFilter { + eth_v1 + .and(warp::path("validator")) + .and(warp::path("attestation_data")) + .and(warp::path::end()) + .and(warp::query::()) + .and(not_while_syncing_filter.clone()) + .and(task_spawner_filter.clone()) + .and(chain_filter.clone()) + .then( + |query: ValidatorAttestationDataQuery, + not_synced_filter: Result<(), Rejection>, + task_spawner: TaskSpawner, + chain: Arc>| { + task_spawner.blocking_json_task(Priority::P0, move || { + not_synced_filter?; + + let current_slot = chain.slot().map_err(warp_utils::reject::unhandled_error)?; + + // allow a tolerance of one slot to account for clock skew + if query.slot > current_slot + 1 { + return Err(warp_utils::reject::custom_bad_request(format!( + "request slot {} is more than one slot past the current slot {}", + query.slot, current_slot + ))); + } + + chain + .produce_unaggregated_attestation(query.slot, query.committee_index) + .map(|attestation| attestation.data().clone()) + .map(GenericResponse::from) + .map_err(warp_utils::reject::unhandled_error) + }) + }, + ) + .boxed() +} + +// GET validator/blinded_blocks/{slot} +pub fn get_validator_blinded_blocks( + eth_v1: EthV1Filter, + chain_filter: ChainFilter, + not_while_syncing_filter: NotWhileSyncingFilter, + task_spawner_filter: TaskSpawnerFilter, +) -> ResponseFilter { + eth_v1 + .and(warp::path("validator")) + .and(warp::path("blinded_blocks")) + .and(warp::path::param::().or_else(|_| async { + Err(warp_utils::reject::custom_bad_request( + "Invalid slot".to_string(), + )) + })) + .and(warp::path::end()) + .and(not_while_syncing_filter.clone()) + .and(warp::query::()) + .and(warp::header::optional::("accept")) + .and(task_spawner_filter.clone()) + .and(chain_filter.clone()) + .then( + |slot: Slot, + not_synced_filter: Result<(), Rejection>, + query: ValidatorBlocksQuery, + accept_header: Option, + task_spawner: TaskSpawner, + chain: Arc>| { + task_spawner.spawn_async_with_rejection(Priority::P0, async move { + not_synced_filter?; + produce_blinded_block_v2(accept_header, chain, slot, query).await + }) + }, + ) + .boxed() +} + +// GET validator/blocks/{slot} +pub fn get_validator_blocks( + any_version: AnyVersionFilter, + chain_filter: ChainFilter, + not_while_syncing_filter: NotWhileSyncingFilter, + task_spawner_filter: TaskSpawnerFilter, +) -> ResponseFilter { + any_version + .and(warp::path("validator")) + .and(warp::path("blocks")) + .and(warp::path::param::().or_else(|_| async { + Err(warp_utils::reject::custom_bad_request( + "Invalid slot".to_string(), + )) + })) + .and(warp::path::end()) + .and(warp::header::optional::("accept")) + .and(not_while_syncing_filter) + .and(warp::query::()) + .and(task_spawner_filter) + .and(chain_filter) + .then( + |endpoint_version: EndpointVersion, + slot: Slot, + accept_header: Option, + not_synced_filter: Result<(), Rejection>, + query: ValidatorBlocksQuery, + task_spawner: TaskSpawner, + chain: Arc>| { + task_spawner.spawn_async_with_rejection(Priority::P0, async move { + debug!(?slot, "Block production request from HTTP API"); + + not_synced_filter?; + + if endpoint_version == V3 { + produce_block_v3(accept_header, chain, slot, query).await + } else { + produce_block_v2(accept_header, chain, slot, query).await + } + }) + }, + ) + .boxed() +} + +// POST validator/liveness/{epoch} +pub fn post_validator_liveness_epoch( + eth_v1: EthV1Filter, + chain_filter: ChainFilter, + task_spawner_filter: TaskSpawnerFilter, +) -> ResponseFilter { + eth_v1 + .and(warp::path("validator")) + .and(warp::path("liveness")) + .and(warp::path::param::()) + .and(warp::path::end()) + .and(warp_utils::json::json()) + .and(task_spawner_filter.clone()) + .and(chain_filter.clone()) + .then( + |epoch: Epoch, + indices: ValidatorIndexData, + task_spawner: TaskSpawner, + chain: Arc>| { + task_spawner.blocking_json_task(Priority::P0, move || { + // Ensure the request is for either the current, previous or next epoch. + let current_epoch = + chain.epoch().map_err(warp_utils::reject::unhandled_error)?; + let prev_epoch = current_epoch.saturating_sub(Epoch::new(1)); + let next_epoch = current_epoch.saturating_add(Epoch::new(1)); + + if epoch < prev_epoch || epoch > next_epoch { + return Err(warp_utils::reject::custom_bad_request(format!( + "request epoch {} is more than one epoch from the current epoch {}", + epoch, current_epoch + ))); + } + + let liveness: Vec = indices + .0 + .iter() + .cloned() + .map(|index| { + let is_live = chain.validator_seen_at_epoch(index as usize, epoch); + StandardLivenessResponseData { index, is_live } + }) + .collect(); + + Ok(GenericResponse::from(liveness)) + }) + }, + ) + .boxed() +} + +// POST validator/sync_committee_subscriptions +pub fn post_validator_sync_committee_subscriptions( + eth_v1: EthV1Filter, + chain_filter: ChainFilter, + validator_subscription_tx_filter: ValidatorSubscriptionTxFilter, + task_spawner_filter: TaskSpawnerFilter, +) -> ResponseFilter { + eth_v1 + .and(warp::path("validator")) + .and(warp::path("sync_committee_subscriptions")) + .and(warp::path::end()) + .and(warp_utils::json::json()) + .and(validator_subscription_tx_filter) + .and(task_spawner_filter.clone()) + .and(chain_filter.clone()) + .then( + |subscriptions: Vec, + validator_subscription_tx: Sender, + task_spawner: TaskSpawner, + chain: Arc>, + | { + task_spawner.blocking_json_task(Priority::P0, move || { + for subscription in subscriptions { + chain + .validator_monitor + .write() + .auto_register_local_validator(subscription.validator_index); + + let message = ValidatorSubscriptionMessage::SyncCommitteeSubscribe { + subscriptions: vec![subscription], + }; + if let Err(e) = validator_subscription_tx.try_send(message) { + warn!( + info = "the host may be overloaded or resource-constrained", + error = ?e, + "Unable to process sync subscriptions" + ); + return Err(warp_utils::reject::custom_server_error( + "unable to queue subscription, host may be overloaded or shutting down".to_string(), + )); + } + } + + Ok(()) + }) + }, + ).boxed() +} + +// POST validator/register_validator +pub fn post_validator_register_validator( + eth_v1: EthV1Filter, + chain_filter: ChainFilter, + task_spawner_filter: TaskSpawnerFilter, +) -> ResponseFilter { + eth_v1 + .and(warp::path("validator")) + .and(warp::path("register_validator")) + .and(warp::path::end()) + .and(task_spawner_filter.clone()) + .and(chain_filter.clone()) + .and(warp_utils::json::json()) + .then( + |task_spawner: TaskSpawner, + chain: Arc>, + register_val_data: Vec| async { + let (tx, rx) = oneshot::channel(); + + let initial_result = task_spawner + .spawn_async_with_rejection_no_conversion(Priority::P0, async move { + let execution_layer = chain + .execution_layer + .as_ref() + .ok_or(BeaconChainError::ExecutionLayerMissing) + .map_err(warp_utils::reject::unhandled_error)?; + let current_slot = chain + .slot_clock + .now_or_genesis() + .ok_or(BeaconChainError::UnableToReadSlot) + .map_err(warp_utils::reject::unhandled_error)?; + let current_epoch = current_slot.epoch(T::EthSpec::slots_per_epoch()); + + debug!( + count = register_val_data.len(), + "Received register validator request" + ); + + let head_snapshot = chain.head_snapshot(); + let spec = &chain.spec; + + let (preparation_data, filtered_registration_data): ( + Vec<(ProposerPreparationData, Option)>, + Vec, + ) = register_val_data + .into_iter() + .filter_map(|register_data| { + chain + .validator_index(®ister_data.message.pubkey) + .ok() + .flatten() + .and_then(|validator_index| { + let validator = head_snapshot + .beacon_state + .get_validator(validator_index) + .ok()?; + let validator_status = ValidatorStatus::from_validator( + validator, + current_epoch, + spec.far_future_epoch, + ) + .superstatus(); + let is_active_or_pending = + matches!(validator_status, ValidatorStatus::Pending) + || matches!( + validator_status, + ValidatorStatus::Active + ); + + // Filter out validators who are not 'active' or 'pending'. + is_active_or_pending.then_some({ + ( + ( + ProposerPreparationData { + validator_index: validator_index as u64, + fee_recipient: register_data + .message + .fee_recipient, + }, + Some(register_data.message.gas_limit), + ), + register_data, + ) + }) + }) + }) + .unzip(); + + // Update the prepare beacon proposer cache based on this request. + execution_layer + .update_proposer_preparation( + current_epoch, + preparation_data.iter().map(|(data, limit)| (data, limit)), + ) + .await; + + // Call prepare beacon proposer blocking with the latest update in order to make + // sure we have a local payload to fall back to in the event of the blinded block + // flow failing. + chain + .prepare_beacon_proposer(current_slot) + .await + .map_err(|e| { + warp_utils::reject::custom_bad_request(format!( + "error updating proposer preparations: {:?}", + e + )) + })?; + + info!( + count = filtered_registration_data.len(), + "Forwarding register validator request to connected builder" + ); + + // It's a waste of a `BeaconProcessor` worker to just + // wait on a response from the builder (especially since + // they have frequent timeouts). Spawn a new task and + // send the response back to our original HTTP request + // task via a channel. + let builder_future = async move { + let arc_builder = chain + .execution_layer + .as_ref() + .ok_or(BeaconChainError::ExecutionLayerMissing) + .map_err(warp_utils::reject::unhandled_error)? + .builder(); + let builder = arc_builder + .as_ref() + .ok_or(BeaconChainError::BuilderMissing) + .map_err(warp_utils::reject::unhandled_error)?; + builder + .post_builder_validators(&filtered_registration_data) + .await + .map(|resp| warp::reply::json(&resp).into_response()) + .map_err(|e| { + warn!( + num_registrations = filtered_registration_data.len(), + error = ?e, + "Relay error when registering validator(s)" + ); + // Forward the HTTP status code if we are able to, otherwise fall back + // to a server error. + if let eth2::Error::ServerMessage(message) = e { + if message.code == StatusCode::BAD_REQUEST.as_u16() { + return warp_utils::reject::custom_bad_request( + message.message, + ); + } else { + // According to the spec this response should only be a 400 or 500, + // so we fall back to a 500 here. + return warp_utils::reject::custom_server_error( + message.message, + ); + } + } + warp_utils::reject::custom_server_error(format!("{e:?}")) + }) + }; + tokio::task::spawn(async move { tx.send(builder_future.await) }); + + // Just send a generic 200 OK from this closure. We'll + // ignore the `Ok` variant and form a proper response + // from what is sent back down the channel. + Ok(warp::reply::reply().into_response()) + }) + .await; + + if initial_result.is_err() { + return convert_rejection(initial_result).await; + } + + // Await a response from the builder without blocking a + // `BeaconProcessor` worker. + convert_rejection(rx.await.unwrap_or_else(|_| { + Ok(warp::reply::with_status( + warp::reply::json(&"No response from channel"), + warp::http::StatusCode::INTERNAL_SERVER_ERROR, + ) + .into_response()) + })) + .await + }, + ) + .boxed() +} + +// POST validator/prepare_beacon_proposer +pub fn post_validator_prepare_beacon_proposer( + eth_v1: EthV1Filter, + chain_filter: ChainFilter, + network_tx_filter: NetworkTxFilter, + not_while_syncing_filter: NotWhileSyncingFilter, + task_spawner_filter: TaskSpawnerFilter, +) -> ResponseFilter { + eth_v1 + .and(warp::path("validator")) + .and(warp::path("prepare_beacon_proposer")) + .and(warp::path::end()) + .and(not_while_syncing_filter.clone()) + .and(network_tx_filter.clone()) + .and(task_spawner_filter.clone()) + .and(chain_filter.clone()) + .and(warp_utils::json::json()) + .then( + |not_synced_filter: Result<(), Rejection>, + network_tx: UnboundedSender>, + task_spawner: TaskSpawner, + chain: Arc>, + preparation_data: Vec| { + task_spawner.spawn_async_with_rejection(Priority::P0, async move { + not_synced_filter?; + let execution_layer = chain + .execution_layer + .as_ref() + .ok_or(BeaconChainError::ExecutionLayerMissing) + .map_err(warp_utils::reject::unhandled_error)?; + + let current_slot = chain + .slot_clock + .now_or_genesis() + .ok_or(BeaconChainError::UnableToReadSlot) + .map_err(warp_utils::reject::unhandled_error)?; + let current_epoch = current_slot.epoch(T::EthSpec::slots_per_epoch()); + + debug!( + count = preparation_data.len(), + "Received proposer preparation data" + ); + + execution_layer + .update_proposer_preparation( + current_epoch, + preparation_data.iter().map(|data| (data, &None)), + ) + .await; + + chain + .prepare_beacon_proposer(current_slot) + .await + .map_err(|e| { + warp_utils::reject::custom_bad_request(format!( + "error updating proposer preparations: {:?}", + e + )) + })?; + + if chain.spec.is_peer_das_scheduled() { + let (finalized_beacon_state, _, _) = + StateId(CoreStateId::Finalized).state(&chain)?; + let validators_and_balances = preparation_data + .iter() + .filter_map(|preparation| { + if let Ok(effective_balance) = finalized_beacon_state + .get_effective_balance(preparation.validator_index as usize) + { + Some((preparation.validator_index as usize, effective_balance)) + } else { + None + } + }) + .collect::>(); + + let current_slot = + chain.slot().map_err(warp_utils::reject::unhandled_error)?; + if let Some(cgc_change) = chain + .data_availability_checker + .custody_context() + .register_validators(validators_and_balances, current_slot, &chain.spec) + { + chain.update_data_column_custody_info(Some( + cgc_change + .effective_epoch + .start_slot(T::EthSpec::slots_per_epoch()), + )); + + network_tx.send(NetworkMessage::CustodyCountChanged { + new_custody_group_count: cgc_change.new_custody_group_count, + sampling_count: cgc_change.sampling_count, + }).unwrap_or_else(|e| { + debug!(error = %e, "Could not send message to the network service. \ + Likely shutdown") + }); + } + } + + Ok::<_, warp::reject::Rejection>(warp::reply::json(&()).into_response()) + }) + }, + ) + .boxed() +} + +// POST validator/beacon_committee_subscriptions +pub fn post_validator_beacon_committee_subscriptions( + eth_v1: EthV1Filter, + chain_filter: ChainFilter, + validator_subscription_tx_filter: ValidatorSubscriptionTxFilter, + task_spawner_filter: TaskSpawnerFilter, +) -> ResponseFilter { + eth_v1 + .and(warp::path("validator")) + .and(warp::path("beacon_committee_subscriptions")) + .and(warp::path::end()) + .and(warp_utils::json::json()) + .and(validator_subscription_tx_filter.clone()) + .and(task_spawner_filter.clone()) + .and(chain_filter.clone()) + .then( + |committee_subscriptions: Vec, + validator_subscription_tx: Sender, + task_spawner: TaskSpawner, + chain: Arc>| { + task_spawner.blocking_json_task(Priority::P0, move || { + let subscriptions: std::collections::BTreeSet<_> = committee_subscriptions + .iter() + .map(|subscription| { + chain + .validator_monitor + .write() + .auto_register_local_validator(subscription.validator_index); + ValidatorSubscription { + attestation_committee_index: subscription.committee_index, + slot: subscription.slot, + committee_count_at_slot: subscription.committees_at_slot, + is_aggregator: subscription.is_aggregator, + } + }) + .collect(); + + let message = + ValidatorSubscriptionMessage::AttestationSubscribe { subscriptions }; + if let Err(e) = validator_subscription_tx.try_send(message) { + warn!( + info = "the host may be overloaded or resource-constrained", + error = ?e, + "Unable to process committee subscriptions" + ); + return Err(warp_utils::reject::custom_server_error( + "unable to queue subscription, host may be overloaded or shutting down" + .to_string(), + )); + } + Ok(()) + }) + }, + ) + .boxed() +} + +pub fn post_validator_contribution_and_proofs( + eth_v1: EthV1Filter, + chain_filter: ChainFilter, + network_tx_filter: NetworkTxFilter, + not_while_syncing_filter: NotWhileSyncingFilter, + task_spawner_filter: TaskSpawnerFilter, +) -> ResponseFilter { + eth_v1 + .and(warp::path("validator")) + .and(warp::path("contribution_and_proofs")) + .and(warp::path::end()) + .and(not_while_syncing_filter.clone()) + .and(task_spawner_filter.clone()) + .and(chain_filter.clone()) + .and(warp_utils::json::json()) + .and(network_tx_filter.clone()) + .then( + |not_synced_filter: Result<(), Rejection>, + task_spawner: TaskSpawner, + chain: Arc>, + contributions: Vec>, + network_tx: UnboundedSender>| { + task_spawner.blocking_json_task(Priority::P0, move || { + not_synced_filter?; + sync_committees::process_signed_contribution_and_proofs( + contributions, + network_tx, + &chain, + )?; + Ok(GenericResponse::from(())) + }) + }, + ) + .boxed() +} + +// POST validator/aggregate_and_proofs +pub fn post_validator_aggregate_and_proofs( + any_version: AnyVersionFilter, + chain_filter: ChainFilter, + network_tx_filter: NetworkTxFilter, + not_while_syncing_filter: NotWhileSyncingFilter, + task_spawner_filter: TaskSpawnerFilter, +) -> ResponseFilter { + any_version + .and(warp::path("validator")) + .and(warp::path("aggregate_and_proofs")) + .and(warp::path::end()) + .and(not_while_syncing_filter.clone()) + .and(task_spawner_filter.clone()) + .and(chain_filter.clone()) + .and(warp_utils::json::json()) + .and(network_tx_filter.clone()) + .then( + // V1 and V2 are identical except V2 has a consensus version header in the request. + // We only require this header for SSZ deserialization, which isn't supported for + // this endpoint presently. + |_endpoint_version: EndpointVersion, + not_synced_filter: Result<(), Rejection>, + task_spawner: TaskSpawner, + chain: Arc>, + aggregates: Vec>, + network_tx: UnboundedSender>| { + task_spawner.blocking_json_task(Priority::P0, move || { + not_synced_filter?; + let seen_timestamp = timestamp_now(); + let mut verified_aggregates = Vec::with_capacity(aggregates.len()); + let mut messages = Vec::with_capacity(aggregates.len()); + let mut failures = Vec::new(); + + // Verify that all messages in the post are valid before processing further + for (index, aggregate) in aggregates.iter().enumerate() { + match chain.verify_aggregated_attestation_for_gossip(aggregate) { + Ok(verified_aggregate) => { + messages.push(PubsubMessage::AggregateAndProofAttestation(Box::new( + verified_aggregate.aggregate().clone(), + ))); + + // Notify the validator monitor. + chain + .validator_monitor + .read() + .register_api_aggregated_attestation( + seen_timestamp, + verified_aggregate.aggregate(), + verified_aggregate.indexed_attestation(), + &chain.slot_clock, + ); + + verified_aggregates.push((index, verified_aggregate)); + } + // If we already know the attestation, don't broadcast it or attempt to + // further verify it. Return success. + // + // It's reasonably likely that two different validators produce + // identical aggregates, especially if they're using the same beacon + // node. + Err(AttestationError::AttestationSupersetKnown(_)) => continue, + // If we've already seen this aggregator produce an aggregate, just + // skip this one. + // + // We're likely to see this with VCs that use fallback BNs. The first + // BN might time-out *after* publishing the aggregate and then the + // second BN will indicate it's already seen the aggregate. + // + // There's no actual error for the user or the network since the + // aggregate has been successfully published by some other node. + Err(AttestationError::AggregatorAlreadyKnown(_)) => continue, + Err(e) => { + error!( + error = ?e, + request_index = index, + aggregator_index = aggregate.message().aggregator_index(), + attestation_index = aggregate.message().aggregate().committee_index(), + attestation_slot = %aggregate.message().aggregate().data().slot, + "Failure verifying aggregate and proofs" + ); + failures.push(Failure::new(index, format!("Verification: {:?}", e))); + } + } + } + + // Publish aggregate attestations to the libp2p network + if !messages.is_empty() { + publish_network_message(&network_tx, NetworkMessage::Publish { messages })?; + } + + // Import aggregate attestations + for (index, verified_aggregate) in verified_aggregates { + if let Err(e) = chain.apply_attestation_to_fork_choice(&verified_aggregate) { + error!( + error = ?e, + request_index = index, + aggregator_index = verified_aggregate.aggregate().message().aggregator_index(), + attestation_index = verified_aggregate.attestation().committee_index(), + attestation_slot = %verified_aggregate.attestation().data().slot, + "Failure applying verified aggregate attestation to fork choice" + ); + failures.push(Failure::new(index, format!("Fork choice: {:?}", e))); + } + if let Err(e) = chain.add_to_block_inclusion_pool(verified_aggregate) { + warn!( + error = ?e, + request_index = index, + "Could not add verified aggregate attestation to the inclusion pool" + ); + failures.push(Failure::new(index, format!("Op pool: {:?}", e))); + } + } + + if !failures.is_empty() { + Err(warp_utils::reject::indexed_bad_request("error processing aggregate and proofs".to_string(), + failures, + )) + } else { + Ok(()) + } + }) + }, + ).boxed() +} + +// GET validator/duties/proposer/{epoch} +pub fn get_validator_duties_proposer( + eth_v1: EthV1Filter, + chain_filter: ChainFilter, + not_while_syncing_filter: NotWhileSyncingFilter, + task_spawner_filter: TaskSpawnerFilter, +) -> ResponseFilter { + eth_v1 + .and(warp::path("validator")) + .and(warp::path("duties")) + .and(warp::path("proposer")) + .and(warp::path::param::().or_else(|_| async { + Err(warp_utils::reject::custom_bad_request( + "Invalid epoch".to_string(), + )) + })) + .and(warp::path::end()) + .and(not_while_syncing_filter) + .and(task_spawner_filter) + .and(chain_filter) + .then( + |epoch: Epoch, + not_synced_filter: Result<(), Rejection>, + task_spawner: TaskSpawner, + chain: Arc>| { + task_spawner.blocking_json_task(Priority::P0, move || { + not_synced_filter?; + proposer_duties::proposer_duties(epoch, &chain) + }) + }, + ) + .boxed() +} From 2afa87879bd1b904e037a7ba67c001a5f1a162cc Mon Sep 17 00:00:00 2001 From: Jimmy Chen Date: Fri, 5 Dec 2025 21:59:42 +1100 Subject: [PATCH 56/74] Move beacon pool http api to its own separate module (#8543) Continuation of: * #8536 Moving `/beacon/pool` endpoints out of `http_api` to a separation module. This should improve code maintainability, incremental compilation time and rust analyzer performance. This is a tedious but straight forward change, so we're going with a pair & insta-merge approach to avoid painful & slow async review Co-Authored-By: Jimmy Chen --- beacon_node/http_api/src/beacon/mod.rs | 1 + beacon_node/http_api/src/beacon/pool.rs | 522 ++++++++++++++++++++++++ beacon_node/http_api/src/lib.rs | 459 ++------------------- beacon_node/http_api/src/utils.rs | 3 +- 4 files changed, 563 insertions(+), 422 deletions(-) create mode 100644 beacon_node/http_api/src/beacon/pool.rs diff --git a/beacon_node/http_api/src/beacon/mod.rs b/beacon_node/http_api/src/beacon/mod.rs index 20394784ae7..df5e6eee5cb 100644 --- a/beacon_node/http_api/src/beacon/mod.rs +++ b/beacon_node/http_api/src/beacon/mod.rs @@ -1 +1,2 @@ +pub mod pool; pub mod states; diff --git a/beacon_node/http_api/src/beacon/pool.rs b/beacon_node/http_api/src/beacon/pool.rs new file mode 100644 index 00000000000..059573c3175 --- /dev/null +++ b/beacon_node/http_api/src/beacon/pool.rs @@ -0,0 +1,522 @@ +use crate::task_spawner::{Priority, TaskSpawner}; +use crate::utils::{NetworkTxFilter, OptionalConsensusVersionHeaderFilter, ResponseFilter}; +use crate::version::{ + ResponseIncludesVersion, V1, V2, add_consensus_version_header, beacon_response, + unsupported_version_rejection, +}; +use crate::{sync_committees, utils}; +use beacon_chain::observed_operations::ObservationOutcome; +use beacon_chain::{BeaconChain, BeaconChainTypes}; +use eth2::types::{AttestationPoolQuery, EndpointVersion, Failure, GenericResponse}; +use lighthouse_network::PubsubMessage; +use network::NetworkMessage; +use operation_pool::ReceivedPreCapella; +use slot_clock::SlotClock; +use std::collections::HashSet; +use std::sync::Arc; +use tokio::sync::mpsc::UnboundedSender; +use tracing::{debug, info, warn}; +use types::{ + Attestation, AttestationData, AttesterSlashing, ForkName, ProposerSlashing, + SignedBlsToExecutionChange, SignedVoluntaryExit, SingleAttestation, SyncCommitteeMessage, +}; +use warp::filters::BoxedFilter; +use warp::{Filter, Reply}; +use warp_utils::reject::convert_rejection; + +pub type BeaconPoolPathFilter = BoxedFilter<( + TaskSpawner<::EthSpec>, + Arc>, +)>; +pub type BeaconPoolPathV2Filter = BoxedFilter<( + TaskSpawner<::EthSpec>, + Arc>, +)>; +pub type BeaconPoolPathAnyFilter = BoxedFilter<( + EndpointVersion, + TaskSpawner<::EthSpec>, + Arc>, +)>; + +/// POST beacon/pool/bls_to_execution_changes +pub fn post_beacon_pool_bls_to_execution_changes( + network_tx_filter: &NetworkTxFilter, + beacon_pool_path: &BeaconPoolPathFilter, +) -> ResponseFilter { + beacon_pool_path + .clone() + .and(warp::path("bls_to_execution_changes")) + .and(warp::path::end()) + .and(warp_utils::json::json()) + .and(network_tx_filter.clone()) + .then( + |task_spawner: TaskSpawner, + chain: Arc>, + address_changes: Vec, + network_tx: UnboundedSender>| { + task_spawner.blocking_json_task(Priority::P0, move || { + let mut failures = vec![]; + + for (index, address_change) in address_changes.into_iter().enumerate() { + let validator_index = address_change.message.validator_index; + + match chain.verify_bls_to_execution_change_for_http_api(address_change) { + Ok(ObservationOutcome::New(verified_address_change)) => { + let validator_index = + verified_address_change.as_inner().message.validator_index; + let address = verified_address_change + .as_inner() + .message + .to_execution_address; + + // New to P2P *and* op pool, gossip immediately if post-Capella. + let received_pre_capella = + if chain.current_slot_is_post_capella().unwrap_or(false) { + ReceivedPreCapella::No + } else { + ReceivedPreCapella::Yes + }; + if matches!(received_pre_capella, ReceivedPreCapella::No) { + utils::publish_pubsub_message( + &network_tx, + PubsubMessage::BlsToExecutionChange(Box::new( + verified_address_change.as_inner().clone(), + )), + )?; + } + + // Import to op pool (may return `false` if there's a race). + let imported = chain.import_bls_to_execution_change( + verified_address_change, + received_pre_capella, + ); + + info!( + %validator_index, + ?address, + published = + matches!(received_pre_capella, ReceivedPreCapella::No), + imported, + "Processed BLS to execution change" + ); + } + Ok(ObservationOutcome::AlreadyKnown) => { + debug!(%validator_index, "BLS to execution change already known"); + } + Err(e) => { + warn!( + validator_index, + reason = ?e, + source = "HTTP", + "Invalid BLS to execution change" + ); + failures.push(Failure::new(index, format!("invalid: {e:?}"))); + } + } + } + + if failures.is_empty() { + Ok(()) + } else { + Err(warp_utils::reject::indexed_bad_request( + "some BLS to execution changes failed to verify".into(), + failures, + )) + } + }) + }, + ) + .boxed() +} + +/// GET beacon/pool/bls_to_execution_changes +pub fn get_beacon_pool_bls_to_execution_changes( + beacon_pool_path: &BeaconPoolPathFilter, +) -> ResponseFilter { + beacon_pool_path + .clone() + .and(warp::path("bls_to_execution_changes")) + .and(warp::path::end()) + .then( + |task_spawner: TaskSpawner, chain: Arc>| { + task_spawner.blocking_json_task(Priority::P1, move || { + let address_changes = chain.op_pool.get_all_bls_to_execution_changes(); + Ok(GenericResponse::from(address_changes)) + }) + }, + ) + .boxed() +} + +/// POST beacon/pool/sync_committees +pub fn post_beacon_pool_sync_committees( + network_tx_filter: &NetworkTxFilter, + beacon_pool_path: &BeaconPoolPathFilter, +) -> ResponseFilter { + beacon_pool_path + .clone() + .and(warp::path("sync_committees")) + .and(warp::path::end()) + .and(warp_utils::json::json()) + .and(network_tx_filter.clone()) + .then( + |task_spawner: TaskSpawner, + chain: Arc>, + signatures: Vec, + network_tx: UnboundedSender>| { + task_spawner.blocking_json_task(Priority::P0, move || { + sync_committees::process_sync_committee_signatures( + signatures, network_tx, &chain, + )?; + Ok(GenericResponse::from(())) + }) + }, + ) + .boxed() +} + +/// GET beacon/pool/voluntary_exits +pub fn get_beacon_pool_voluntary_exits( + beacon_pool_path: &BeaconPoolPathFilter, +) -> ResponseFilter { + beacon_pool_path + .clone() + .and(warp::path("voluntary_exits")) + .and(warp::path::end()) + .then( + |task_spawner: TaskSpawner, chain: Arc>| { + task_spawner.blocking_json_task(Priority::P1, move || { + let attestations = chain.op_pool.get_all_voluntary_exits(); + Ok(GenericResponse::from(attestations)) + }) + }, + ) + .boxed() +} + +/// POST beacon/pool/voluntary_exits +pub fn post_beacon_pool_voluntary_exits( + network_tx_filter: &NetworkTxFilter, + beacon_pool_path: &BeaconPoolPathFilter, +) -> ResponseFilter { + beacon_pool_path + .clone() + .and(warp::path("voluntary_exits")) + .and(warp::path::end()) + .and(warp_utils::json::json()) + .and(network_tx_filter.clone()) + .then( + |task_spawner: TaskSpawner, + chain: Arc>, + exit: SignedVoluntaryExit, + network_tx: UnboundedSender>| { + task_spawner.blocking_json_task(Priority::P0, move || { + let outcome = chain + .verify_voluntary_exit_for_gossip(exit.clone()) + .map_err(|e| { + warp_utils::reject::object_invalid(format!( + "gossip verification failed: {:?}", + e + )) + })?; + + // Notify the validator monitor. + chain + .validator_monitor + .read() + .register_api_voluntary_exit(&exit.message); + + if let ObservationOutcome::New(exit) = outcome { + utils::publish_pubsub_message( + &network_tx, + PubsubMessage::VoluntaryExit(Box::new(exit.clone().into_inner())), + )?; + + chain.import_voluntary_exit(exit); + } + + Ok(()) + }) + }, + ) + .boxed() +} + +/// GET beacon/pool/proposer_slashings +pub fn get_beacon_pool_proposer_slashings( + beacon_pool_path: &BeaconPoolPathFilter, +) -> ResponseFilter { + beacon_pool_path + .clone() + .and(warp::path("proposer_slashings")) + .and(warp::path::end()) + .then( + |task_spawner: TaskSpawner, chain: Arc>| { + task_spawner.blocking_json_task(Priority::P1, move || { + let attestations = chain.op_pool.get_all_proposer_slashings(); + Ok(GenericResponse::from(attestations)) + }) + }, + ) + .boxed() +} + +/// POST beacon/pool/proposer_slashings +pub fn post_beacon_pool_proposer_slashings( + network_tx_filter: &NetworkTxFilter, + beacon_pool_path: &BeaconPoolPathFilter, +) -> ResponseFilter { + beacon_pool_path + .clone() + .and(warp::path("proposer_slashings")) + .and(warp::path::end()) + .and(warp_utils::json::json()) + .and(network_tx_filter.clone()) + .then( + |task_spawner: TaskSpawner, + chain: Arc>, + slashing: ProposerSlashing, + network_tx: UnboundedSender>| { + task_spawner.blocking_json_task(Priority::P0, move || { + let outcome = chain + .verify_proposer_slashing_for_gossip(slashing.clone()) + .map_err(|e| { + warp_utils::reject::object_invalid(format!( + "gossip verification failed: {:?}", + e + )) + })?; + + // Notify the validator monitor. + chain + .validator_monitor + .read() + .register_api_proposer_slashing(&slashing); + + if let ObservationOutcome::New(slashing) = outcome { + utils::publish_pubsub_message( + &network_tx, + PubsubMessage::ProposerSlashing(Box::new( + slashing.clone().into_inner(), + )), + )?; + + chain.import_proposer_slashing(slashing); + } + + Ok(()) + }) + }, + ) + .boxed() +} + +/// GET beacon/pool/attester_slashings +pub fn get_beacon_pool_attester_slashings( + beacon_pool_path_any: &BeaconPoolPathAnyFilter, +) -> ResponseFilter { + beacon_pool_path_any + .clone() + .and(warp::path("attester_slashings")) + .and(warp::path::end()) + .then( + |endpoint_version: EndpointVersion, + task_spawner: TaskSpawner, + chain: Arc>| { + task_spawner.blocking_response_task(Priority::P1, move || { + let slashings = chain.op_pool.get_all_attester_slashings(); + + // Use the current slot to find the fork version, and convert all messages to the + // current fork's format. This is to ensure consistent message types matching + // `Eth-Consensus-Version`. + let current_slot = + chain + .slot_clock + .now() + .ok_or(warp_utils::reject::custom_server_error( + "unable to read slot clock".to_string(), + ))?; + let fork_name = chain.spec.fork_name_at_slot::(current_slot); + let slashings = slashings + .into_iter() + .filter(|slashing| { + (fork_name.electra_enabled() + && matches!(slashing, AttesterSlashing::Electra(_))) + || (!fork_name.electra_enabled() + && matches!(slashing, AttesterSlashing::Base(_))) + }) + .collect::>(); + + let require_version = match endpoint_version { + V1 => ResponseIncludesVersion::No, + V2 => ResponseIncludesVersion::Yes(fork_name), + _ => return Err(unsupported_version_rejection(endpoint_version)), + }; + + let res = beacon_response(require_version, &slashings); + Ok(add_consensus_version_header( + warp::reply::json(&res).into_response(), + fork_name, + )) + }) + }, + ) + .boxed() +} + +// POST beacon/pool/attester_slashings +pub fn post_beacon_pool_attester_slashings( + network_tx_filter: &NetworkTxFilter, + beacon_pool_path_any: &BeaconPoolPathAnyFilter, +) -> ResponseFilter { + beacon_pool_path_any + .clone() + .and(warp::path("attester_slashings")) + .and(warp::path::end()) + .and(warp_utils::json::json()) + .and(network_tx_filter.clone()) + .then( + // V1 and V2 are identical except V2 has a consensus version header in the request. + // We only require this header for SSZ deserialization, which isn't supported for + // this endpoint presently. + |_endpoint_version: EndpointVersion, + task_spawner: TaskSpawner, + chain: Arc>, + slashing: AttesterSlashing, + network_tx: UnboundedSender>| { + task_spawner.blocking_json_task(Priority::P0, move || { + let outcome = chain + .verify_attester_slashing_for_gossip(slashing.clone()) + .map_err(|e| { + warp_utils::reject::object_invalid(format!( + "gossip verification failed: {:?}", + e + )) + })?; + + // Notify the validator monitor. + chain + .validator_monitor + .read() + .register_api_attester_slashing(slashing.to_ref()); + + if let ObservationOutcome::New(slashing) = outcome { + utils::publish_pubsub_message( + &network_tx, + PubsubMessage::AttesterSlashing(Box::new( + slashing.clone().into_inner(), + )), + )?; + + chain.import_attester_slashing(slashing); + } + + Ok(()) + }) + }, + ) + .boxed() +} + +/// GET beacon/pool/attestations?committee_index,slot +pub fn get_beacon_pool_attestations( + beacon_pool_path_any: &BeaconPoolPathAnyFilter, +) -> ResponseFilter { + beacon_pool_path_any + .clone() + .and(warp::path("attestations")) + .and(warp::path::end()) + .and(warp::query::()) + .then( + |endpoint_version: EndpointVersion, + task_spawner: TaskSpawner, + chain: Arc>, + query: AttestationPoolQuery| { + task_spawner.blocking_response_task(Priority::P1, move || { + let query_filter = |data: &AttestationData, committee_indices: HashSet| { + query.slot.is_none_or(|slot| slot == data.slot) + && query + .committee_index + .is_none_or(|index| committee_indices.contains(&index)) + }; + + let mut attestations = chain.op_pool.get_filtered_attestations(query_filter); + attestations.extend( + chain + .naive_aggregation_pool + .read() + .iter() + .filter(|&att| { + query_filter(att.data(), att.get_committee_indices_map()) + }) + .cloned(), + ); + // Use the current slot to find the fork version, and convert all messages to the + // current fork's format. This is to ensure consistent message types matching + // `Eth-Consensus-Version`. + let current_slot = + chain + .slot_clock + .now() + .ok_or(warp_utils::reject::custom_server_error( + "unable to read slot clock".to_string(), + ))?; + let fork_name = chain.spec.fork_name_at_slot::(current_slot); + let attestations = attestations + .into_iter() + .filter(|att| { + (fork_name.electra_enabled() && matches!(att, Attestation::Electra(_))) + || (!fork_name.electra_enabled() + && matches!(att, Attestation::Base(_))) + }) + .collect::>(); + + let require_version = match endpoint_version { + V1 => ResponseIncludesVersion::No, + V2 => ResponseIncludesVersion::Yes(fork_name), + _ => return Err(unsupported_version_rejection(endpoint_version)), + }; + + let res = beacon_response(require_version, &attestations); + Ok(add_consensus_version_header( + warp::reply::json(&res).into_response(), + fork_name, + )) + }) + }, + ) + .boxed() +} + +pub fn post_beacon_pool_attestations_v2( + network_tx_filter: &NetworkTxFilter, + optional_consensus_version_header_filter: OptionalConsensusVersionHeaderFilter, + beacon_pool_path_v2: &BeaconPoolPathV2Filter, +) -> ResponseFilter { + beacon_pool_path_v2 + .clone() + .and(warp::path("attestations")) + .and(warp::path::end()) + .and(warp_utils::json::json::>()) + .and(optional_consensus_version_header_filter) + .and(network_tx_filter.clone()) + .then( + |task_spawner: TaskSpawner, + chain: Arc>, + attestations: Vec, + _fork_name: Option, + network_tx: UnboundedSender>| async move { + let result = crate::publish_attestations::publish_attestations( + task_spawner, + chain, + attestations, + network_tx, + true, + ) + .await + .map(|()| warp::reply::json(&())); + convert_rejection(result).await + }, + ) + .boxed() +} diff --git a/beacon_node/http_api/src/lib.rs b/beacon_node/http_api/src/lib.rs index 38019338554..628b94a2a7c 100644 --- a/beacon_node/http_api/src/lib.rs +++ b/beacon_node/http_api/src/lib.rs @@ -36,16 +36,14 @@ mod validator_inclusion; mod validators; mod version; +use crate::beacon::pool::*; use crate::light_client::{get_light_client_bootstrap, get_light_client_updates}; use crate::utils::{AnyVersionFilter, EthV1Filter}; use crate::validator::post_validator_liveness_epoch; use crate::validator::*; use crate::version::beacon_response; use beacon::states; -use beacon_chain::{ - BeaconChain, BeaconChainError, BeaconChainTypes, WhenSlotSkipped, - observed_operations::ObservationOutcome, -}; +use beacon_chain::{BeaconChain, BeaconChainError, BeaconChainTypes, WhenSlotSkipped}; use beacon_processor::BeaconProcessorSend; pub use block_id::BlockId; use builder_states::get_next_withdrawals; @@ -62,12 +60,10 @@ use health_metrics::observe::Observe; use lighthouse_network::Enr; use lighthouse_network::NetworkGlobals; use lighthouse_network::PeerId; -use lighthouse_network::PubsubMessage; use lighthouse_version::version_with_platform; use logging::{SSELoggingComponents, crit}; use network::{NetworkMessage, NetworkSenders}; use network_utils::enr_ext::EnrExt; -use operation_pool::ReceivedPreCapella; use parking_lot::RwLock; pub use publish_blocks::{ ProvenancedBlock, publish_blinded_block, publish_block, reconstruct_block, @@ -76,7 +72,6 @@ use serde::{Deserialize, Serialize}; use slot_clock::SlotClock; use ssz::Encode; pub use state_id::StateId; -use std::collections::HashSet; use std::future::Future; use std::net::{IpAddr, Ipv4Addr, SocketAddr}; use std::path::PathBuf; @@ -93,9 +88,8 @@ use tokio_stream::{ }; use tracing::{debug, info, warn}; use types::{ - Attestation, AttestationData, AttesterSlashing, BeaconStateError, Checkpoint, ConfigAndPreset, - Epoch, EthSpec, ForkName, Hash256, ProposerSlashing, SignedBlindedBeaconBlock, - SignedBlsToExecutionChange, SignedVoluntaryExit, SingleAttestation, Slot, SyncCommitteeMessage, + BeaconStateError, Checkpoint, ConfigAndPreset, Epoch, EthSpec, ForkName, Hash256, + SignedBlindedBeaconBlock, Slot, }; use version::{ ResponseIncludesVersion, V1, V2, add_consensus_version_header, add_ssz_content_type_header, @@ -106,7 +100,7 @@ use warp::Reply; use warp::hyper::Body; use warp::sse::Event; use warp::{Filter, Rejection, http::Response}; -use warp_utils::{query::multi_key_query, reject::convert_rejection, uor::UnifyingOrFilter}; +use warp_utils::{query::multi_key_query, uor::UnifyingOrFilter}; const API_PREFIX: &str = "eth"; @@ -804,10 +798,10 @@ pub fn serve( * beacon/blocks */ let consensus_version_header_filter = - warp::header::header::(CONSENSUS_VERSION_HEADER); + warp::header::header::(CONSENSUS_VERSION_HEADER).boxed(); let optional_consensus_version_header_filter = - warp::header::optional::(CONSENSUS_VERSION_HEADER); + warp::header::optional::(CONSENSUS_VERSION_HEADER).boxed(); // POST beacon/blocks let post_beacon_blocks = eth_v1 @@ -816,7 +810,7 @@ pub fn serve( .and(warp::path("blocks")) .and(warp::path::end()) .and(warp::body::json()) - .and(consensus_version_header_filter) + .and(consensus_version_header_filter.clone()) .and(task_spawner_filter.clone()) .and(chain_filter.clone()) .and(network_tx_filter.clone()) @@ -853,7 +847,7 @@ pub fn serve( .and(warp::path("blocks")) .and(warp::path::end()) .and(warp::body::bytes()) - .and(consensus_version_header_filter) + .and(consensus_version_header_filter.clone()) .and(task_spawner_filter.clone()) .and(chain_filter.clone()) .and(network_tx_filter.clone()) @@ -891,7 +885,7 @@ pub fn serve( .and(warp::query::()) .and(warp::path::end()) .and(warp::body::json()) - .and(consensus_version_header_filter) + .and(consensus_version_header_filter.clone()) .and(task_spawner_filter.clone()) .and(chain_filter.clone()) .and(network_tx_filter.clone()) @@ -931,7 +925,7 @@ pub fn serve( .and(warp::query::()) .and(warp::path::end()) .and(warp::body::bytes()) - .and(consensus_version_header_filter) + .and(consensus_version_header_filter.clone()) .and(task_spawner_filter.clone()) .and(chain_filter.clone()) .and(network_tx_filter.clone()) @@ -1408,444 +1402,67 @@ pub fn serve( .and(warp::path("beacon")) .and(warp::path("pool")) .and(task_spawner_filter.clone()) - .and(chain_filter.clone()); + .and(chain_filter.clone()) + .boxed(); let beacon_pool_path_v2 = eth_v2 .clone() .and(warp::path("beacon")) .and(warp::path("pool")) .and(task_spawner_filter.clone()) - .and(chain_filter.clone()); + .and(chain_filter.clone()) + .boxed(); let beacon_pool_path_any = any_version .clone() .and(warp::path("beacon")) .and(warp::path("pool")) .and(task_spawner_filter.clone()) - .and(chain_filter.clone()); + .and(chain_filter.clone()) + .boxed(); - let post_beacon_pool_attestations_v2 = beacon_pool_path_v2 - .clone() - .and(warp::path("attestations")) - .and(warp::path::end()) - .and(warp_utils::json::json::>()) - .and(optional_consensus_version_header_filter) - .and(network_tx_filter.clone()) - .then( - |task_spawner: TaskSpawner, - chain: Arc>, - attestations: Vec, - _fork_name: Option, - network_tx: UnboundedSender>| async move { - let result = crate::publish_attestations::publish_attestations( - task_spawner, - chain, - attestations, - network_tx, - true, - ) - .await - .map(|()| warp::reply::json(&())); - convert_rejection(result).await - }, - ); + let post_beacon_pool_attestations_v2 = post_beacon_pool_attestations_v2( + &network_tx_filter, + optional_consensus_version_header_filter, + &beacon_pool_path_v2, + ); // GET beacon/pool/attestations?committee_index,slot - let get_beacon_pool_attestations = beacon_pool_path_any - .clone() - .and(warp::path("attestations")) - .and(warp::path::end()) - .and(warp::query::()) - .then( - |endpoint_version: EndpointVersion, - task_spawner: TaskSpawner, - chain: Arc>, - query: api_types::AttestationPoolQuery| { - task_spawner.blocking_response_task(Priority::P1, move || { - let query_filter = |data: &AttestationData, committee_indices: HashSet| { - query.slot.is_none_or(|slot| slot == data.slot) - && query - .committee_index - .is_none_or(|index| committee_indices.contains(&index)) - }; - - let mut attestations = chain.op_pool.get_filtered_attestations(query_filter); - attestations.extend( - chain - .naive_aggregation_pool - .read() - .iter() - .filter(|&att| { - query_filter(att.data(), att.get_committee_indices_map()) - }) - .cloned(), - ); - // Use the current slot to find the fork version, and convert all messages to the - // current fork's format. This is to ensure consistent message types matching - // `Eth-Consensus-Version`. - let current_slot = - chain - .slot_clock - .now() - .ok_or(warp_utils::reject::custom_server_error( - "unable to read slot clock".to_string(), - ))?; - let fork_name = chain.spec.fork_name_at_slot::(current_slot); - let attestations = attestations - .into_iter() - .filter(|att| { - (fork_name.electra_enabled() && matches!(att, Attestation::Electra(_))) - || (!fork_name.electra_enabled() - && matches!(att, Attestation::Base(_))) - }) - .collect::>(); - - let require_version = match endpoint_version { - V1 => ResponseIncludesVersion::No, - V2 => ResponseIncludesVersion::Yes(fork_name), - _ => return Err(unsupported_version_rejection(endpoint_version)), - }; - - let res = beacon_response(require_version, &attestations); - Ok(add_consensus_version_header( - warp::reply::json(&res).into_response(), - fork_name, - )) - }) - }, - ); + let get_beacon_pool_attestations = get_beacon_pool_attestations(&beacon_pool_path_any); // POST beacon/pool/attester_slashings - let post_beacon_pool_attester_slashings = beacon_pool_path_any - .clone() - .and(warp::path("attester_slashings")) - .and(warp::path::end()) - .and(warp_utils::json::json()) - .and(network_tx_filter.clone()) - .then( - // V1 and V2 are identical except V2 has a consensus version header in the request. - // We only require this header for SSZ deserialization, which isn't supported for - // this endpoint presently. - |_endpoint_version: EndpointVersion, - task_spawner: TaskSpawner, - chain: Arc>, - slashing: AttesterSlashing, - network_tx: UnboundedSender>| { - task_spawner.blocking_json_task(Priority::P0, move || { - let outcome = chain - .verify_attester_slashing_for_gossip(slashing.clone()) - .map_err(|e| { - warp_utils::reject::object_invalid(format!( - "gossip verification failed: {:?}", - e - )) - })?; - - // Notify the validator monitor. - chain - .validator_monitor - .read() - .register_api_attester_slashing(slashing.to_ref()); - - if let ObservationOutcome::New(slashing) = outcome { - utils::publish_pubsub_message( - &network_tx, - PubsubMessage::AttesterSlashing(Box::new( - slashing.clone().into_inner(), - )), - )?; - - chain.import_attester_slashing(slashing); - } - - Ok(()) - }) - }, - ); + let post_beacon_pool_attester_slashings = + post_beacon_pool_attester_slashings(&network_tx_filter, &beacon_pool_path_any); // GET beacon/pool/attester_slashings let get_beacon_pool_attester_slashings = - beacon_pool_path_any - .clone() - .and(warp::path("attester_slashings")) - .and(warp::path::end()) - .then( - |endpoint_version: EndpointVersion, - task_spawner: TaskSpawner, - chain: Arc>| { - task_spawner.blocking_response_task(Priority::P1, move || { - let slashings = chain.op_pool.get_all_attester_slashings(); - - // Use the current slot to find the fork version, and convert all messages to the - // current fork's format. This is to ensure consistent message types matching - // `Eth-Consensus-Version`. - let current_slot = chain.slot_clock.now().ok_or( - warp_utils::reject::custom_server_error( - "unable to read slot clock".to_string(), - ), - )?; - let fork_name = chain.spec.fork_name_at_slot::(current_slot); - let slashings = slashings - .into_iter() - .filter(|slashing| { - (fork_name.electra_enabled() - && matches!(slashing, AttesterSlashing::Electra(_))) - || (!fork_name.electra_enabled() - && matches!(slashing, AttesterSlashing::Base(_))) - }) - .collect::>(); - - let require_version = match endpoint_version { - V1 => ResponseIncludesVersion::No, - V2 => ResponseIncludesVersion::Yes(fork_name), - _ => return Err(unsupported_version_rejection(endpoint_version)), - }; - - let res = beacon_response(require_version, &slashings); - Ok(add_consensus_version_header( - warp::reply::json(&res).into_response(), - fork_name, - )) - }) - }, - ); + get_beacon_pool_attester_slashings(&beacon_pool_path_any); // POST beacon/pool/proposer_slashings - let post_beacon_pool_proposer_slashings = beacon_pool_path - .clone() - .and(warp::path("proposer_slashings")) - .and(warp::path::end()) - .and(warp_utils::json::json()) - .and(network_tx_filter.clone()) - .then( - |task_spawner: TaskSpawner, - chain: Arc>, - slashing: ProposerSlashing, - network_tx: UnboundedSender>| { - task_spawner.blocking_json_task(Priority::P0, move || { - let outcome = chain - .verify_proposer_slashing_for_gossip(slashing.clone()) - .map_err(|e| { - warp_utils::reject::object_invalid(format!( - "gossip verification failed: {:?}", - e - )) - })?; - - // Notify the validator monitor. - chain - .validator_monitor - .read() - .register_api_proposer_slashing(&slashing); - - if let ObservationOutcome::New(slashing) = outcome { - utils::publish_pubsub_message( - &network_tx, - PubsubMessage::ProposerSlashing(Box::new( - slashing.clone().into_inner(), - )), - )?; - - chain.import_proposer_slashing(slashing); - } - - Ok(()) - }) - }, - ); + let post_beacon_pool_proposer_slashings = + post_beacon_pool_proposer_slashings(&network_tx_filter, &beacon_pool_path); // GET beacon/pool/proposer_slashings - let get_beacon_pool_proposer_slashings = beacon_pool_path - .clone() - .and(warp::path("proposer_slashings")) - .and(warp::path::end()) - .then( - |task_spawner: TaskSpawner, chain: Arc>| { - task_spawner.blocking_json_task(Priority::P1, move || { - let attestations = chain.op_pool.get_all_proposer_slashings(); - Ok(api_types::GenericResponse::from(attestations)) - }) - }, - ); + let get_beacon_pool_proposer_slashings = get_beacon_pool_proposer_slashings(&beacon_pool_path); // POST beacon/pool/voluntary_exits - let post_beacon_pool_voluntary_exits = beacon_pool_path - .clone() - .and(warp::path("voluntary_exits")) - .and(warp::path::end()) - .and(warp_utils::json::json()) - .and(network_tx_filter.clone()) - .then( - |task_spawner: TaskSpawner, - chain: Arc>, - exit: SignedVoluntaryExit, - network_tx: UnboundedSender>| { - task_spawner.blocking_json_task(Priority::P0, move || { - let outcome = chain - .verify_voluntary_exit_for_gossip(exit.clone()) - .map_err(|e| { - warp_utils::reject::object_invalid(format!( - "gossip verification failed: {:?}", - e - )) - })?; - - // Notify the validator monitor. - chain - .validator_monitor - .read() - .register_api_voluntary_exit(&exit.message); - - if let ObservationOutcome::New(exit) = outcome { - utils::publish_pubsub_message( - &network_tx, - PubsubMessage::VoluntaryExit(Box::new(exit.clone().into_inner())), - )?; - - chain.import_voluntary_exit(exit); - } - - Ok(()) - }) - }, - ); + let post_beacon_pool_voluntary_exits = + post_beacon_pool_voluntary_exits(&network_tx_filter, &beacon_pool_path); // GET beacon/pool/voluntary_exits - let get_beacon_pool_voluntary_exits = beacon_pool_path - .clone() - .and(warp::path("voluntary_exits")) - .and(warp::path::end()) - .then( - |task_spawner: TaskSpawner, chain: Arc>| { - task_spawner.blocking_json_task(Priority::P1, move || { - let attestations = chain.op_pool.get_all_voluntary_exits(); - Ok(api_types::GenericResponse::from(attestations)) - }) - }, - ); + let get_beacon_pool_voluntary_exits = get_beacon_pool_voluntary_exits(&beacon_pool_path); // POST beacon/pool/sync_committees - let post_beacon_pool_sync_committees = beacon_pool_path - .clone() - .and(warp::path("sync_committees")) - .and(warp::path::end()) - .and(warp_utils::json::json()) - .and(network_tx_filter.clone()) - .then( - |task_spawner: TaskSpawner, - chain: Arc>, - signatures: Vec, - network_tx: UnboundedSender>| { - task_spawner.blocking_json_task(Priority::P0, move || { - sync_committees::process_sync_committee_signatures( - signatures, network_tx, &chain, - )?; - Ok(api_types::GenericResponse::from(())) - }) - }, - ); + let post_beacon_pool_sync_committees = + post_beacon_pool_sync_committees(&network_tx_filter, &beacon_pool_path); // GET beacon/pool/bls_to_execution_changes - let get_beacon_pool_bls_to_execution_changes = beacon_pool_path - .clone() - .and(warp::path("bls_to_execution_changes")) - .and(warp::path::end()) - .then( - |task_spawner: TaskSpawner, chain: Arc>| { - task_spawner.blocking_json_task(Priority::P1, move || { - let address_changes = chain.op_pool.get_all_bls_to_execution_changes(); - Ok(api_types::GenericResponse::from(address_changes)) - }) - }, - ); + let get_beacon_pool_bls_to_execution_changes = + get_beacon_pool_bls_to_execution_changes(&beacon_pool_path); // POST beacon/pool/bls_to_execution_changes - let post_beacon_pool_bls_to_execution_changes = beacon_pool_path - .clone() - .and(warp::path("bls_to_execution_changes")) - .and(warp::path::end()) - .and(warp_utils::json::json()) - .and(network_tx_filter.clone()) - .then( - |task_spawner: TaskSpawner, - chain: Arc>, - address_changes: Vec, - network_tx: UnboundedSender>| { - task_spawner.blocking_json_task(Priority::P0, move || { - let mut failures = vec![]; - - for (index, address_change) in address_changes.into_iter().enumerate() { - let validator_index = address_change.message.validator_index; - - match chain.verify_bls_to_execution_change_for_http_api(address_change) { - Ok(ObservationOutcome::New(verified_address_change)) => { - let validator_index = - verified_address_change.as_inner().message.validator_index; - let address = verified_address_change - .as_inner() - .message - .to_execution_address; - - // New to P2P *and* op pool, gossip immediately if post-Capella. - let received_pre_capella = - if chain.current_slot_is_post_capella().unwrap_or(false) { - ReceivedPreCapella::No - } else { - ReceivedPreCapella::Yes - }; - if matches!(received_pre_capella, ReceivedPreCapella::No) { - utils::publish_pubsub_message( - &network_tx, - PubsubMessage::BlsToExecutionChange(Box::new( - verified_address_change.as_inner().clone(), - )), - )?; - } - - // Import to op pool (may return `false` if there's a race). - let imported = chain.import_bls_to_execution_change( - verified_address_change, - received_pre_capella, - ); - - info!( - %validator_index, - ?address, - published = - matches!(received_pre_capella, ReceivedPreCapella::No), - imported, - "Processed BLS to execution change" - ); - } - Ok(ObservationOutcome::AlreadyKnown) => { - debug!(%validator_index, "BLS to execution change already known"); - } - Err(e) => { - warn!( - validator_index, - reason = ?e, - source = "HTTP", - "Invalid BLS to execution change" - ); - failures.push(api_types::Failure::new( - index, - format!("invalid: {e:?}"), - )); - } - } - } - - if failures.is_empty() { - Ok(()) - } else { - Err(warp_utils::reject::indexed_bad_request( - "some BLS to execution changes failed to verify".into(), - failures, - )) - } - }) - }, - ); + let post_beacon_pool_bls_to_execution_changes = + post_beacon_pool_bls_to_execution_changes(&network_tx_filter, &beacon_pool_path); let beacon_rewards_path = eth_v1 .clone() diff --git a/beacon_node/http_api/src/utils.rs b/beacon_node/http_api/src/utils.rs index a89780ba245..f2b859ebe59 100644 --- a/beacon_node/http_api/src/utils.rs +++ b/beacon_node/http_api/src/utils.rs @@ -7,7 +7,7 @@ use network::{NetworkMessage, ValidatorSubscriptionMessage}; use parking_lot::RwLock; use std::sync::Arc; use tokio::sync::mpsc::{Sender, UnboundedSender}; -use types::{ChainSpec, EthSpec}; +use types::{ChainSpec, EthSpec, ForkName}; use warp::Rejection; use warp::filters::BoxedFilter; @@ -20,6 +20,7 @@ pub type TaskSpawnerFilter = BoxedFilter<(TaskSpawner< pub type ValidatorSubscriptionTxFilter = BoxedFilter<(Sender,)>; pub type NetworkTxFilter = BoxedFilter<(UnboundedSender::EthSpec>>,)>; +pub type OptionalConsensusVersionHeaderFilter = BoxedFilter<(Option,)>; pub fn from_meta_data( meta_data: &RwLock>, From 7bfcc0352090cd3abdf6fe8915d56d0610e39689 Mon Sep 17 00:00:00 2001 From: Mac L Date: Mon, 8 Dec 2025 09:37:23 +0400 Subject: [PATCH 57/74] Reduce `eth2` dependency space (#8524) Remove certain dependencies from `eth2`, and feature-gate others which are only used by certain endpoints. | Removed | Optional | Dev only | | -------- | -------- | -------- | | `either` `enr` `libp2p-identity` `multiaddr` | `protoarray` `eth2_keystore` `eip_3076` `zeroize` `reqwest-eventsource` `futures` `futures-util` | `rand` `test_random_derive` | This is done by adding an `events` feature which enables the events endpoint and its associated dependencies. The `lighthouse` feature also enables its associated dependencies making them optional. The networking-adjacent dependencies were removed by just having certain fields use a `String` instead of an explicit network type. This means the user should handle conversion at the call site instead. This is a bit spicy, but I believe `PeerId`, `Enr` and `Multiaddr` are easily converted to and from `String`s so I think it's fine and reduces our dependency space by a lot. The alternative is to feature gate these types behind a `network` feature instead. Co-Authored-By: Mac L --- Cargo.lock | 4 ---- beacon_node/beacon_chain/Cargo.toml | 2 +- beacon_node/execution_layer/Cargo.toml | 2 +- beacon_node/http_api/Cargo.toml | 2 +- beacon_node/http_api/src/lib.rs | 9 +++++--- beacon_node/http_api/tests/tests.rs | 18 +++++++++++---- beacon_node/lighthouse_network/Cargo.toml | 2 +- common/eth2/Cargo.toml | 27 ++++++++++------------- common/eth2/src/error.rs | 2 ++ common/eth2/src/lib.rs | 16 +++++++++----- common/eth2/src/types.rs | 17 ++++++++------ common/health_metrics/Cargo.toml | 2 +- common/monitoring_api/Cargo.toml | 2 +- validator_client/http_api/Cargo.toml | 2 +- 14 files changed, 61 insertions(+), 46 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 481808b41f7..f832485d5ae 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3128,17 +3128,13 @@ dependencies = [ "context_deserialize", "educe", "eip_3076", - "either", - "enr", "eth2_keystore", "ethereum_serde_utils", "ethereum_ssz", "ethereum_ssz_derive", "futures", "futures-util", - "libp2p-identity", "mediatype", - "multiaddr", "pretty_reqwest_error", "proto_array", "rand 0.9.2", diff --git a/beacon_node/beacon_chain/Cargo.toml b/beacon_node/beacon_chain/Cargo.toml index e889f53bb01..b42585c2a1e 100644 --- a/beacon_node/beacon_chain/Cargo.toml +++ b/beacon_node/beacon_chain/Cargo.toml @@ -19,7 +19,7 @@ alloy-primitives = { workspace = true } bitvec = { workspace = true } bls = { workspace = true } educe = { workspace = true } -eth2 = { workspace = true } +eth2 = { workspace = true, features = ["lighthouse"] } eth2_network_config = { workspace = true } ethereum_hashing = { workspace = true } ethereum_serde_utils = { workspace = true } diff --git a/beacon_node/execution_layer/Cargo.toml b/beacon_node/execution_layer/Cargo.toml index 43b2e1dd751..540f9dc0a09 100644 --- a/beacon_node/execution_layer/Cargo.toml +++ b/beacon_node/execution_layer/Cargo.toml @@ -12,7 +12,7 @@ alloy-rpc-types-eth = { workspace = true } arc-swap = "1.6.0" builder_client = { path = "../builder_client" } bytes = { workspace = true } -eth2 = { workspace = true } +eth2 = { workspace = true, features = ["events", "lighthouse"] } ethereum_serde_utils = { workspace = true } ethereum_ssz = { workspace = true } fixed_bytes = { workspace = true } diff --git a/beacon_node/http_api/Cargo.toml b/beacon_node/http_api/Cargo.toml index 7dd0d0223f4..3aa9c8351cb 100644 --- a/beacon_node/http_api/Cargo.toml +++ b/beacon_node/http_api/Cargo.toml @@ -12,7 +12,7 @@ bs58 = "0.4.0" bytes = { workspace = true } directory = { workspace = true } either = { workspace = true } -eth2 = { workspace = true } +eth2 = { workspace = true, features = ["lighthouse"] } ethereum_serde_utils = { workspace = true } ethereum_ssz = { workspace = true } execution_layer = { workspace = true } diff --git a/beacon_node/http_api/src/lib.rs b/beacon_node/http_api/src/lib.rs index 628b94a2a7c..4ed02f3cbf7 100644 --- a/beacon_node/http_api/src/lib.rs +++ b/beacon_node/http_api/src/lib.rs @@ -2118,9 +2118,12 @@ pub fn serve( let discovery_addresses = enr.multiaddr_p2p_udp(); Ok(api_types::GenericResponse::from(api_types::IdentityData { peer_id: network_globals.local_peer_id().to_base58(), - enr, - p2p_addresses, - discovery_addresses, + enr: enr.to_base64(), + p2p_addresses: p2p_addresses.iter().map(|a| a.to_string()).collect(), + discovery_addresses: discovery_addresses + .iter() + .map(|a| a.to_string()) + .collect(), metadata: utils::from_meta_data::( &network_globals.local_metadata, &chain.spec, diff --git a/beacon_node/http_api/tests/tests.rs b/beacon_node/http_api/tests/tests.rs index 8d99e696cf7..a86cc4f4eff 100644 --- a/beacon_node/http_api/tests/tests.rs +++ b/beacon_node/http_api/tests/tests.rs @@ -2853,9 +2853,19 @@ impl ApiTester { let expected = IdentityData { peer_id: self.local_enr.peer_id().to_string(), - enr: self.local_enr.clone(), - p2p_addresses: self.local_enr.multiaddr_p2p_tcp(), - discovery_addresses: self.local_enr.multiaddr_p2p_udp(), + enr: self.local_enr.to_base64(), + p2p_addresses: self + .local_enr + .multiaddr_p2p_tcp() + .iter() + .map(|a| a.to_string()) + .collect(), + discovery_addresses: self + .local_enr + .multiaddr_p2p_udp() + .iter() + .map(|a| a.to_string()) + .collect(), metadata: MetaData::V2(MetaDataV2 { seq_number: 0, attnets: "0x0000000000000000".to_string(), @@ -2884,7 +2894,7 @@ impl ApiTester { pub async fn test_get_node_peers_by_id(self) -> Self { let result = self .client - .get_node_peers_by_id(self.external_peer_id) + .get_node_peers_by_id(&self.external_peer_id.to_string()) .await .unwrap() .data; diff --git a/beacon_node/lighthouse_network/Cargo.toml b/beacon_node/lighthouse_network/Cargo.toml index a6dd276c197..d2431cca045 100644 --- a/beacon_node/lighthouse_network/Cargo.toml +++ b/beacon_node/lighthouse_network/Cargo.toml @@ -17,7 +17,7 @@ directory = { workspace = true } dirs = { workspace = true } discv5 = { workspace = true } either = { workspace = true } -eth2 = { workspace = true } +eth2 = { workspace = true, features = ["lighthouse"] } ethereum_ssz = { workspace = true } ethereum_ssz_derive = { workspace = true } fnv = { workspace = true } diff --git a/common/eth2/Cargo.toml b/common/eth2/Cargo.toml index f7e6cde2100..ba4bcd3649d 100644 --- a/common/eth2/Cargo.toml +++ b/common/eth2/Cargo.toml @@ -5,36 +5,33 @@ authors = ["Paul Hauner "] edition = { workspace = true } [features] -default = ["lighthouse"] -lighthouse = [] +default = [] +lighthouse = ["proto_array", "eth2_keystore", "eip_3076", "zeroize"] +events = ["reqwest-eventsource", "futures", "futures-util"] [dependencies] context_deserialize = { workspace = true } educe = { workspace = true } -eip_3076 = { workspace = true } -either = { workspace = true } -enr = { version = "0.13.0", features = ["ed25519"] } -eth2_keystore = { workspace = true } +eip_3076 = { workspace = true, optional = true } +eth2_keystore = { workspace = true, optional = true } ethereum_serde_utils = { workspace = true } ethereum_ssz = { workspace = true } ethereum_ssz_derive = { workspace = true } -futures = { workspace = true } -futures-util = "0.3.8" -libp2p-identity = { version = "0.2", features = ["peerid"] } +futures = { workspace = true, optional = true } +futures-util = { version = "0.3.8", optional = true } mediatype = "0.19.13" -multiaddr = "0.18.2" pretty_reqwest_error = { workspace = true } -proto_array = { workspace = true } -rand = { workspace = true } +proto_array = { workspace = true, optional = true } reqwest = { workspace = true } -reqwest-eventsource = "0.6.0" +reqwest-eventsource = { version = "0.6.0", optional = true } sensitive_url = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } ssz_types = { workspace = true } -test_random_derive = { path = "../../common/test_random_derive" } types = { workspace = true } -zeroize = { workspace = true } +zeroize = { workspace = true, optional = true } [dev-dependencies] +rand = { workspace = true } +test_random_derive = { path = "../../common/test_random_derive" } tokio = { workspace = true } diff --git a/common/eth2/src/error.rs b/common/eth2/src/error.rs index c1bacb4510b..1f21220b798 100644 --- a/common/eth2/src/error.rs +++ b/common/eth2/src/error.rs @@ -14,6 +14,7 @@ use std::{fmt, path::PathBuf}; pub enum Error { /// The `reqwest` client raised an error. HttpClient(PrettyReqwestError), + #[cfg(feature = "events")] /// The `reqwest_eventsource` client raised an error. SseClient(Box), /// The server returned an error message where the body was able to be parsed. @@ -91,6 +92,7 @@ impl Error { pub fn status(&self) -> Option { match self { Error::HttpClient(error) => error.inner().status(), + #[cfg(feature = "events")] Error::SseClient(error) => { if let reqwest_eventsource::Error::InvalidStatusCode(status, _) = error.as_ref() { Some(*status) diff --git a/common/eth2/src/lib.rs b/common/eth2/src/lib.rs index 4e832a11dfa..4e2109be04b 100644 --- a/common/eth2/src/lib.rs +++ b/common/eth2/src/lib.rs @@ -22,20 +22,23 @@ pub use beacon_response::{ }; pub use self::error::{Error, ok_or_error, success_or_error}; +pub use reqwest; +pub use reqwest::{StatusCode, Url}; +pub use sensitive_url::SensitiveUrl; + use self::mixin::{RequestAccept, ResponseOptional}; use self::types::*; use educe::Educe; +#[cfg(feature = "events")] use futures::Stream; +#[cfg(feature = "events")] use futures_util::StreamExt; -use libp2p_identity::PeerId; -pub use reqwest; use reqwest::{ Body, IntoUrl, RequestBuilder, Response, header::{HeaderMap, HeaderValue}, }; -pub use reqwest::{StatusCode, Url}; +#[cfg(feature = "events")] use reqwest_eventsource::{Event, EventSource}; -pub use sensitive_url::SensitiveUrl; use serde::{Serialize, de::DeserializeOwned}; use ssz::Encode; use std::fmt; @@ -1978,7 +1981,7 @@ impl BeaconNodeHttpClient { /// `GET node/peers/{peer_id}` pub async fn get_node_peers_by_id( &self, - peer_id: PeerId, + peer_id: &str, ) -> Result, Error> { let mut path = self.eth_path(V1)?; @@ -1986,7 +1989,7 @@ impl BeaconNodeHttpClient { .map_err(|()| Error::InvalidUrl(self.server.clone()))? .push("node") .push("peers") - .push(&peer_id.to_string()); + .push(peer_id); self.get(path).await } @@ -2761,6 +2764,7 @@ impl BeaconNodeHttpClient { } /// `GET events?topics` + #[cfg(feature = "events")] pub async fn get_events( &self, topic: &[EventTopic], diff --git a/common/eth2/src/types.rs b/common/eth2/src/types.rs index cbdaa004d0d..5aa3de5e170 100644 --- a/common/eth2/src/types.rs +++ b/common/eth2/src/types.rs @@ -1,13 +1,13 @@ //! This module exposes a superset of the `types` crate. It adds additional types that are only //! required for the HTTP API. +pub use types::*; + use crate::{ CONSENSUS_BLOCK_VALUE_HEADER, CONSENSUS_VERSION_HEADER, EXECUTION_PAYLOAD_BLINDED_HEADER, EXECUTION_PAYLOAD_VALUE_HEADER, Error as ServerError, }; -use enr::{CombinedKey, Enr}; use mediatype::{MediaType, MediaTypeList, names}; -use multiaddr::Multiaddr; use reqwest::header::HeaderMap; use serde::{Deserialize, Deserializer, Serialize}; use serde_utils::quoted_u64::Quoted; @@ -18,9 +18,11 @@ use std::fmt::{self, Display}; use std::str::FromStr; use std::sync::Arc; use std::time::Duration; + +#[cfg(test)] use test_random_derive::TestRandom; +#[cfg(test)] use types::test_utils::TestRandom; -pub use types::*; // TODO(mac): Temporary module and re-export hack to expose old `consensus/types` via `eth2/types`. pub use crate::beacon_response::*; @@ -557,9 +559,9 @@ pub struct ChainHeadData { #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct IdentityData { pub peer_id: String, - pub enr: Enr, - pub p2p_addresses: Vec, - pub discovery_addresses: Vec, + pub enr: String, + pub p2p_addresses: Vec, + pub discovery_addresses: Vec, pub metadata: MetaData, } @@ -2208,7 +2210,8 @@ pub enum ContentType { Ssz, } -#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize, Encode, Decode, TestRandom)] +#[cfg_attr(test, derive(TestRandom))] +#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize, Encode, Decode)] #[serde(bound = "E: EthSpec")] pub struct BlobsBundle { pub commitments: KzgCommitments, diff --git a/common/health_metrics/Cargo.toml b/common/health_metrics/Cargo.toml index 20a8c6e4e48..816d4ec68cc 100644 --- a/common/health_metrics/Cargo.toml +++ b/common/health_metrics/Cargo.toml @@ -4,7 +4,7 @@ version = "0.1.0" edition = { workspace = true } [dependencies] -eth2 = { workspace = true } +eth2 = { workspace = true, features = ["lighthouse"] } metrics = { workspace = true } [target.'cfg(target_os = "linux")'.dependencies] diff --git a/common/monitoring_api/Cargo.toml b/common/monitoring_api/Cargo.toml index 9e2c36e2c76..e00b1f027b6 100644 --- a/common/monitoring_api/Cargo.toml +++ b/common/monitoring_api/Cargo.toml @@ -6,7 +6,7 @@ edition = { workspace = true } # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -eth2 = { workspace = true } +eth2 = { workspace = true, features = ["lighthouse"] } health_metrics = { workspace = true } lighthouse_version = { workspace = true } metrics = { workspace = true } diff --git a/validator_client/http_api/Cargo.toml b/validator_client/http_api/Cargo.toml index 588aa2ca931..bb624ea988c 100644 --- a/validator_client/http_api/Cargo.toml +++ b/validator_client/http_api/Cargo.toml @@ -16,7 +16,7 @@ deposit_contract = { workspace = true } directory = { workspace = true } dirs = { workspace = true } doppelganger_service = { workspace = true } -eth2 = { workspace = true } +eth2 = { workspace = true, features = ["lighthouse"] } eth2_keystore = { workspace = true } ethereum_serde_utils = { workspace = true } filesystem = { workspace = true } From 77d58437da08f8f7fee6ebeeca1b7c0ffc5ea674 Mon Sep 17 00:00:00 2001 From: Mac L Date: Tue, 9 Dec 2025 10:03:02 +0400 Subject: [PATCH 58/74] Clarify `alloy` dependencies (#8550) Previously, we had a pinned version of `alloy` to fix some crate compatibility issues we encountered during the migration away from `ethers`. Now that the migration is complete we should remove the pin. This also updates alloy crates to their latest versions. Co-Authored-By: Mac L --- Cargo.lock | 67 +++++++++---------- Cargo.toml | 13 ++-- common/deposit_contract/Cargo.toml | 4 +- .../execution_engine_integration/Cargo.toml | 6 +- 4 files changed, 47 insertions(+), 43 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f832485d5ae..2bb7b6d81b5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -142,9 +142,9 @@ dependencies = [ [[package]] name = "alloy-consensus" -version = "1.0.42" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3abecb92ba478a285fbf5689100dbafe4003ded4a09bf4b5ef62cca87cd4f79e" +checksum = "2e318e25fb719e747a7e8db1654170fc185024f3ed5b10f86c08d448a912f6e2" dependencies = [ "alloy-eips", "alloy-primitives", @@ -153,6 +153,7 @@ dependencies = [ "alloy-trie", "alloy-tx-macros", "auto_impl", + "borsh", "c-kzg", "derive_more 2.0.1", "either", @@ -168,9 +169,9 @@ dependencies = [ [[package]] name = "alloy-consensus-any" -version = "1.0.42" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e864d4f11d1fb8d3ac2fd8f3a15f1ee46d55ec6d116b342ed1b2cb737f25894" +checksum = "364380a845193a317bcb7a5398fc86cdb66c47ebe010771dde05f6869bf9e64a" dependencies = [ "alloy-consensus", "alloy-eips", @@ -191,8 +192,6 @@ dependencies = [ "alloy-sol-type-parser", "alloy-sol-types", "itoa", - "serde", - "serde_json", "winnow", ] @@ -236,9 +235,9 @@ dependencies = [ [[package]] name = "alloy-eips" -version = "1.1.1" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e867b5fd52ed0372a95016f3a37cbff95a9d5409230fbaef2d8ea00e8618098" +checksum = "a4c4d7c5839d9f3a467900c625416b24328450c65702eb3d8caff8813e4d1d33" dependencies = [ "alloy-eip2124", "alloy-eip2930", @@ -271,9 +270,9 @@ dependencies = [ [[package]] name = "alloy-json-rpc" -version = "1.1.1" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcab4c51fb1273e3b0f59078e0cdf8aa99f697925b09f0d2055c18be46b4d48c" +checksum = "f72cf87cda808e593381fb9f005ffa4d2475552b7a6c5ac33d087bf77d82abd0" dependencies = [ "alloy-primitives", "alloy-sol-types", @@ -286,9 +285,9 @@ dependencies = [ [[package]] name = "alloy-network" -version = "1.0.42" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5d6ed73d440bae8f27771b7cd507fa8f10f19ddf0b8f67e7622a52e0dbf798e" +checksum = "12aeb37b6f2e61b93b1c3d34d01ee720207c76fe447e2a2c217e433ac75b17f5" dependencies = [ "alloy-consensus", "alloy-consensus-any", @@ -312,9 +311,9 @@ dependencies = [ [[package]] name = "alloy-network-primitives" -version = "1.0.42" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "219dccd2cf753a43bd9b0fbb7771a16927ffdb56e43e3a15755bef1a74d614aa" +checksum = "abd29ace62872083e30929cd9b282d82723196d196db589f3ceda67edcc05552" dependencies = [ "alloy-consensus", "alloy-eips", @@ -355,9 +354,9 @@ dependencies = [ [[package]] name = "alloy-provider" -version = "1.0.42" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0ef8cbc2b68e2512acf04b2d296c05c98a661bc460462add6414528f4ff3d9b" +checksum = "9b710636d7126e08003b8217e24c09f0cca0b46d62f650a841736891b1ed1fc1" dependencies = [ "alloy-chains", "alloy-consensus", @@ -416,9 +415,9 @@ dependencies = [ [[package]] name = "alloy-rpc-client" -version = "1.1.1" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7c2630fde9ff6033a780635e1af6ef40e92d74a9cacb8af3defc1b15cfebca5" +checksum = "d0882e72d2c1c0c79dcf4ab60a67472d3f009a949f774d4c17d0bdb669cfde05" dependencies = [ "alloy-json-rpc", "alloy-primitives", @@ -439,9 +438,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-any" -version = "1.0.42" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "425e14ee32eb8b7edd6a2247fe0ed640785e6eba75af27db27f1e6220c15ef0d" +checksum = "6a63fb40ed24e4c92505f488f9dd256e2afaed17faa1b7a221086ebba74f4122" dependencies = [ "alloy-consensus-any", "alloy-rpc-types-eth", @@ -450,9 +449,9 @@ dependencies = [ [[package]] name = "alloy-rpc-types-eth" -version = "1.0.42" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0185f68a0f8391ab996d335a887087d7ccdbc97952efab3516f6307d456ba2cd" +checksum = "9eae0c7c40da20684548cbc8577b6b7447f7bf4ddbac363df95e3da220e41e72" dependencies = [ "alloy-consensus", "alloy-consensus-any", @@ -471,9 +470,9 @@ dependencies = [ [[package]] name = "alloy-serde" -version = "1.1.1" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01e856112bfa0d9adc85bd7c13db03fad0e71d1d6fb4c2010e475b6718108236" +checksum = "c0df1987ed0ff2d0159d76b52e7ddfc4e4fbddacc54d2fbee765e0d14d7c01b5" dependencies = [ "alloy-primitives", "serde", @@ -482,9 +481,9 @@ dependencies = [ [[package]] name = "alloy-signer" -version = "1.1.1" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66a4f629da632d5279bbc5731634f0f5c9484ad9c4cad0cd974d9669dc1f46d6" +checksum = "6ff69deedee7232d7ce5330259025b868c5e6a52fa8dffda2c861fb3a5889b24" dependencies = [ "alloy-primitives", "async-trait", @@ -497,9 +496,9 @@ dependencies = [ [[package]] name = "alloy-signer-local" -version = "1.0.42" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "590dcaeb290cdce23155e68af4791d093afc3754b1a331198a25d2d44c5456e8" +checksum = "72cfe0be3ec5a8c1a46b2e5a7047ed41121d360d97f4405bb7c1c784880c86cb" dependencies = [ "alloy-consensus", "alloy-network", @@ -583,9 +582,9 @@ dependencies = [ [[package]] name = "alloy-transport" -version = "1.1.1" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe215a2f9b51d5f1aa5c8cf22c8be8cdb354934de09c9a4e37aefb79b77552fd" +checksum = "be98b07210d24acf5b793c99b759e9a696e4a2e67593aec0487ae3b3e1a2478c" dependencies = [ "alloy-json-rpc", "auto_impl", @@ -606,9 +605,9 @@ dependencies = [ [[package]] name = "alloy-transport-http" -version = "1.1.1" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc1b37b1a30d23deb3a8746e882c70b384c574d355bc2bbea9ea918b0c31366e" +checksum = "4198a1ee82e562cab85e7f3d5921aab725d9bd154b6ad5017f82df1695877c97" dependencies = [ "alloy-json-rpc", "alloy-transport", @@ -637,9 +636,9 @@ dependencies = [ [[package]] name = "alloy-tx-macros" -version = "1.1.1" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ccf423f6de62e8ce1d6c7a11fb7508ae3536d02e0d68aaeb05c8669337d0937" +checksum = "333544408503f42d7d3792bfc0f7218b643d968a03d2c0ed383ae558fb4a76d0" dependencies = [ "darling 0.21.3", "proc-macro2", diff --git a/Cargo.toml b/Cargo.toml index 21cf551c48d..5296b5d9b7f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -95,10 +95,15 @@ version = "8.0.1" [workspace.dependencies] account_utils = { path = "common/account_utils" } -alloy-consensus = { version = "=1.0.42", default-features = false } -alloy-primitives = { version = "=1.4.1", default-features = false, features = ["rlp", "getrandom"] } -alloy-rlp = { version = "=0.3.12", default-features = false } -alloy-rpc-types-eth = { version = "=1.0.42", default-features = false, features = ["serde"] } +alloy-consensus = { version = "1", default-features = false } +alloy-dyn-abi = { version = "1", default-features = false } +alloy-json-abi = { version = "1", default-features = false } +alloy-network = { version = "1", default-features = false } +alloy-primitives = { version = "1", default-features = false, features = ["rlp", "getrandom"] } +alloy-provider = { version = "1", default-features = false, features = ["reqwest"] } +alloy-rlp = { version = "0.3", default-features = false } +alloy-rpc-types-eth = { version = "1", default-features = false, features = ["serde"] } +alloy-signer-local = { version = "1", default-features = false } anyhow = "1" arbitrary = { version = "1", features = ["derive"] } async-channel = "1.9.0" diff --git a/common/deposit_contract/Cargo.toml b/common/deposit_contract/Cargo.toml index dfaad43719d..53f1bc3e2b2 100644 --- a/common/deposit_contract/Cargo.toml +++ b/common/deposit_contract/Cargo.toml @@ -7,8 +7,8 @@ edition = { workspace = true } build = "build.rs" [dependencies] -alloy-dyn-abi = "1.4" -alloy-json-abi = "1.4" +alloy-dyn-abi = { workspace = true } +alloy-json-abi = { workspace = true } alloy-primitives = { workspace = true } ethereum_ssz = { workspace = true } serde_json = { workspace = true } diff --git a/testing/execution_engine_integration/Cargo.toml b/testing/execution_engine_integration/Cargo.toml index 74bf43e3ae0..78ed266fb25 100644 --- a/testing/execution_engine_integration/Cargo.toml +++ b/testing/execution_engine_integration/Cargo.toml @@ -7,11 +7,11 @@ edition = { workspace = true } portable = ["types/portable"] [dependencies] -alloy-network = "1.0" +alloy-network = { workspace = true } alloy-primitives = { workspace = true } -alloy-provider = "1.0" +alloy-provider = { workspace = true } alloy-rpc-types-eth = { workspace = true } -alloy-signer-local = "1.0" +alloy-signer-local = { workspace = true } async-channel = { workspace = true } deposit_contract = { workspace = true } execution_layer = { workspace = true } From f3fd1f210b2f4ed7d208f81f9a09e1edced3bb3d Mon Sep 17 00:00:00 2001 From: Mac L Date: Tue, 9 Dec 2025 11:13:41 +0400 Subject: [PATCH 59/74] Remove `consensus/types` re-exports (#8540) There are certain crates which we re-export within `types` which creates a fragmented DevEx, where there are various ways to import the same crates. ```rust // consensus/types/src/lib.rs pub use bls::{ AggregatePublicKey, AggregateSignature, Error as BlsError, Keypair, PUBLIC_KEY_BYTES_LEN, PublicKey, PublicKeyBytes, SIGNATURE_BYTES_LEN, SecretKey, Signature, SignatureBytes, get_withdrawal_credentials, }; pub use context_deserialize::{ContextDeserialize, context_deserialize}; pub use fixed_bytes::FixedBytesExtended; pub use milhouse::{self, List, Vector}; pub use ssz_types::{BitList, BitVector, FixedVector, VariableList, typenum, typenum::Unsigned}; pub use superstruct::superstruct; ``` This PR removes these re-exports and makes it explicit that these types are imported from a non-`consensus/types` crate. Co-Authored-By: Mac L --- Cargo.lock | 64 +++++++++++++++++++ Cargo.toml | 1 + .../src/validator/slashing_protection.rs | 3 +- beacon_node/Cargo.toml | 1 + beacon_node/beacon_chain/Cargo.toml | 3 + .../beacon_chain/src/attester_cache.rs | 5 +- .../beacon_chain/src/beacon_block_streamer.rs | 6 +- beacon_node/beacon_chain/src/beacon_chain.rs | 2 + .../src/beacon_fork_choice_store.rs | 3 +- .../beacon_chain/src/beacon_proposer_cache.rs | 5 +- .../beacon_chain/src/block_times_cache.rs | 2 +- .../beacon_chain/src/block_verification.rs | 3 +- beacon_node/beacon_chain/src/builder.rs | 4 +- .../overflow_lru_cache.rs | 5 +- beacon_node/beacon_chain/src/errors.rs | 3 +- .../beacon_chain/src/graffiti_calculator.rs | 3 +- .../beacon_chain/src/historical_blocks.rs | 3 +- .../src/naive_aggregation_pool.rs | 7 +- .../beacon_chain/src/observed_aggregates.rs | 3 +- .../beacon_chain/src/observed_attesters.rs | 5 +- .../src/observed_block_producers.rs | 3 +- .../beacon_chain/src/observed_slashable.rs | 3 +- .../beacon_chain/src/shuffling_cache.rs | 1 + .../beacon_chain/src/single_attestation.rs | 4 +- .../src/sync_committee_verification.rs | 3 +- beacon_node/beacon_chain/src/test_utils.rs | 11 +++- .../beacon_chain/src/validator_monitor.rs | 6 +- .../src/validator_pubkey_cache.rs | 7 +- .../tests/attestation_production.rs | 5 +- .../tests/attestation_verification.rs | 8 ++- .../beacon_chain/tests/blob_verification.rs | 1 + .../beacon_chain/tests/block_verification.rs | 2 + .../beacon_chain/tests/column_verification.rs | 1 + .../beacon_chain/tests/op_verification.rs | 1 + beacon_node/beacon_chain/tests/rewards.rs | 3 +- .../beacon_chain/tests/schema_stability.rs | 3 +- beacon_node/beacon_chain/tests/store_tests.rs | 3 + .../tests/sync_committee_verification.rs | 7 +- beacon_node/beacon_chain/tests/tests.rs | 5 +- .../beacon_chain/tests/validator_monitor.rs | 3 +- beacon_node/builder_client/Cargo.toml | 2 + beacon_node/builder_client/src/lib.rs | 9 ++- beacon_node/execution_layer/Cargo.toml | 2 + beacon_node/execution_layer/src/engine_api.rs | 4 +- .../execution_layer/src/engine_api/http.rs | 8 ++- .../src/engine_api/json_structures.rs | 4 +- beacon_node/execution_layer/src/lib.rs | 3 +- .../test_utils/execution_block_generator.rs | 7 +- .../src/test_utils/mock_builder.rs | 9 +-- .../src/test_utils/mock_execution_layer.rs | 3 +- .../execution_layer/src/versioned_hashes.rs | 3 +- beacon_node/genesis/Cargo.toml | 1 + beacon_node/genesis/src/interop.rs | 6 +- beacon_node/http_api/Cargo.toml | 3 + beacon_node/http_api/src/block_id.rs | 5 +- beacon_node/http_api/src/lib.rs | 5 +- beacon_node/http_api/src/validator/mod.rs | 3 +- .../tests/broadcast_validation_tests.rs | 5 +- beacon_node/http_api/tests/fork_tests.rs | 6 +- .../http_api/tests/interactive_tests.rs | 5 +- beacon_node/http_api/tests/tests.rs | 8 ++- beacon_node/lighthouse_network/Cargo.toml | 3 + .../lighthouse_network/src/discovery/mod.rs | 3 +- .../src/peer_manager/mod.rs | 3 +- .../lighthouse_network/src/rpc/codec.rs | 7 +- .../lighthouse_network/src/rpc/protocol.rs | 3 +- .../lighthouse_network/src/types/mod.rs | 3 +- .../lighthouse_network/src/types/topics.rs | 3 +- .../lighthouse_network/tests/common.rs | 6 +- .../lighthouse_network/tests/rpc_tests.rs | 6 +- beacon_node/network/Cargo.toml | 2 + beacon_node/network/src/service.rs | 3 +- beacon_node/network/src/status.rs | 3 +- .../src/sync/block_lookups/parent_chain.rs | 3 +- beacon_node/operation_pool/Cargo.toml | 4 ++ beacon_node/operation_pool/src/attestation.rs | 3 +- .../operation_pool/src/attestation_storage.rs | 8 ++- beacon_node/operation_pool/src/lib.rs | 4 +- beacon_node/operation_pool/src/persistence.rs | 1 + .../operation_pool/src/reward_cache.rs | 5 +- beacon_node/src/config.rs | 3 +- beacon_node/store/Cargo.toml | 3 + beacon_node/store/src/chunked_vector.rs | 3 + beacon_node/store/src/config.rs | 2 +- .../store/src/database/leveldb_impl.rs | 3 +- beacon_node/store/src/errors.rs | 2 +- beacon_node/store/src/hdiff.rs | 3 +- beacon_node/store/src/hot_cold_store.rs | 2 + beacon_node/store/src/iter.rs | 4 +- beacon_node/store/src/partial_beacon_state.rs | 5 +- common/account_utils/Cargo.toml | 1 + .../src/validator_definitions.rs | 3 +- common/deposit_contract/Cargo.toml | 1 + common/deposit_contract/src/lib.rs | 9 ++- common/eip_3076/Cargo.toml | 2 + common/eip_3076/src/lib.rs | 5 +- common/eth2/Cargo.toml | 2 + common/eth2/src/lib.rs | 2 + common/eth2/src/lighthouse_vc/http_client.rs | 1 + common/eth2/src/lighthouse_vc/std_types.rs | 3 +- common/eth2/src/lighthouse_vc/types.rs | 1 + common/eth2/src/types.rs | 3 + common/eth2_network_config/Cargo.toml | 1 + common/eth2_network_config/src/lib.rs | 3 +- common/validator_dir/src/builder.rs | 4 +- common/validator_dir/src/validator_dir.rs | 3 +- common/validator_dir/tests/tests.rs | 3 +- consensus/fork_choice/Cargo.toml | 1 + consensus/fork_choice/src/fork_choice.rs | 3 +- consensus/fork_choice/tests/tests.rs | 7 +- consensus/proto_array/Cargo.toml | 1 + .../src/fork_choice_test_definition.rs | 5 +- .../fork_choice_test_definition/no_votes.rs | 2 +- consensus/proto_array/src/proto_array.rs | 5 +- .../src/proto_array_fork_choice.rs | 8 ++- consensus/state_processing/Cargo.toml | 3 + .../src/common/get_attesting_indices.rs | 5 +- .../src/common/slash_validator.rs | 1 + consensus/state_processing/src/epoch_cache.rs | 5 +- consensus/state_processing/src/genesis.rs | 1 + .../src/per_block_processing.rs | 1 + .../altair/sync_committee.rs | 6 +- .../process_operations.rs | 3 +- .../per_block_processing/signature_sets.rs | 15 +++-- .../src/per_block_processing/tests.rs | 3 + .../per_block_processing/verify_deposit.rs | 1 + .../altair/participation_flag_updates.rs | 2 +- .../epoch_processing_summary.rs | 5 +- .../src/per_epoch_processing/errors.rs | 3 +- .../historical_roots_update.rs | 2 +- .../justification_and_finalization_state.rs | 3 +- .../src/per_epoch_processing/resets.rs | 3 +- .../src/per_epoch_processing/single_pass.rs | 7 +- .../src/per_epoch_processing/slashings.rs | 3 +- .../src/per_slot_processing.rs | 1 + .../state_processing/src/state_advance.rs | 3 +- .../state_processing/src/upgrade/altair.rs | 3 +- .../state_processing/src/upgrade/capella.rs | 3 +- .../state_processing/src/upgrade/fulu.rs | 5 +- consensus/types/Cargo.toml | 1 + consensus/types/benches/benches.rs | 3 +- consensus/types/src/block/beacon_block.rs | 3 +- consensus/types/src/core/eth_spec.rs | 4 +- consensus/types/src/core/preset.rs | 2 +- consensus/types/src/deposit/deposit.rs | 3 +- consensus/types/src/lib.rs | 11 ---- .../src/light_client/light_client_update.rs | 4 +- consensus/types/src/state/beacon_state.rs | 3 +- consensus/types/src/state/iter.rs | 1 + .../sync_committee/sync_selection_proof.rs | 5 +- .../src/sync_committee/sync_subnet_id.rs | 2 +- .../src/test_utils/test_random/bitfield.rs | 3 +- .../src/test_utils/test_random/test_random.rs | 3 +- lcli/Cargo.toml | 1 + lcli/src/generate_bootnode_enr.rs | 3 +- lighthouse/tests/account_manager.rs | 2 +- lighthouse/tests/validator_manager.rs | 1 + slasher/Cargo.toml | 3 + slasher/src/attester_record.rs | 4 +- slasher/src/database.rs | 9 ++- slasher/src/test_utils.rs | 9 +-- testing/ef_tests/Cargo.toml | 3 + .../src/cases/merkle_proof_validity.rs | 5 +- testing/ef_tests/src/cases/ssz_generic.rs | 6 +- testing/ef_tests/tests/tests.rs | 1 + .../execution_engine_integration/Cargo.toml | 3 + .../src/test_rig.rs | 5 +- .../src/transactions.rs | 4 +- testing/simulator/Cargo.toml | 1 + testing/simulator/src/checks.rs | 3 +- testing/state_transition_vectors/Cargo.toml | 2 + testing/state_transition_vectors/src/main.rs | 8 +-- testing/web3signer_tests/Cargo.toml | 3 + testing/web3signer_tests/src/lib.rs | 3 + .../beacon_node_fallback/Cargo.toml | 1 + .../beacon_node_fallback/src/lib.rs | 3 +- .../doppelganger_service/Cargo.toml | 1 + .../doppelganger_service/src/lib.rs | 3 +- validator_client/http_api/Cargo.toml | 3 + validator_client/http_api/src/keystores.rs | 3 +- validator_client/http_api/src/lib.rs | 5 +- validator_client/http_api/src/remotekeys.rs | 3 +- validator_client/http_api/src/test_utils.rs | 1 + validator_client/http_api/src/tests.rs | 1 + .../http_api/src/tests/keystores.rs | 4 ++ .../initialized_validators/src/lib.rs | 3 +- .../lighthouse_validator_store/Cargo.toml | 1 + .../lighthouse_validator_store/src/lib.rs | 7 +- validator_client/signing_method/Cargo.toml | 1 + validator_client/signing_method/src/lib.rs | 1 + .../signing_method/src/web3signer.rs | 1 + .../slashing_protection/Cargo.toml | 2 + .../src/attestation_tests.rs | 3 +- .../src/bin/test_generator.rs | 3 +- .../slashing_protection/src/block_tests.rs | 3 +- .../src/extra_interchange_tests.rs | 2 +- .../src/interchange_test.rs | 4 +- .../slashing_protection/src/lib.rs | 5 +- .../src/slashing_database.rs | 3 +- .../slashing_protection/tests/migration.rs | 3 +- .../validator_services/src/block_service.rs | 3 +- .../validator_services/src/duties_service.rs | 3 +- .../validator_services/src/sync.rs | 3 +- .../src/sync_committee_service.rs | 5 +- validator_client/validator_store/Cargo.toml | 1 + validator_client/validator_store/src/lib.rs | 7 +- validator_manager/Cargo.toml | 1 + validator_manager/src/common.rs | 1 + validator_manager/src/create_validators.rs | 2 + validator_manager/src/delete_validators.rs | 2 +- validator_manager/src/exit_validators.rs | 3 +- validator_manager/src/list_validators.rs | 3 +- validator_manager/src/move_validators.rs | 3 +- 213 files changed, 556 insertions(+), 259 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2bb7b6d81b5..413596beeb1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -34,6 +34,7 @@ dependencies = [ name = "account_utils" version = "0.1.0" dependencies = [ + "bls", "eth2_keystore", "eth2_wallet", "filesystem", @@ -1237,6 +1238,7 @@ dependencies = [ "ethereum_ssz", "ethereum_ssz_derive", "execution_layer", + "fixed_bytes", "fork_choice", "futures", "genesis", @@ -1251,6 +1253,7 @@ dependencies = [ "maplit", "merkle_proof", "metrics", + "milhouse", "mockall", "mockall_double", "once_cell", @@ -1279,6 +1282,7 @@ dependencies = [ "tracing", "tree_hash", "tree_hash_derive", + "typenum", "types", "zstd 0.13.3", ] @@ -1289,6 +1293,7 @@ version = "8.0.1" dependencies = [ "account_utils", "beacon_chain", + "bls", "clap", "clap_utils", "client", @@ -1319,6 +1324,7 @@ dependencies = [ name = "beacon_node_fallback" version = "0.1.0" dependencies = [ + "bls", "clap", "eth2", "futures", @@ -1591,6 +1597,8 @@ dependencies = [ name = "builder_client" version = "0.1.0" dependencies = [ + "bls", + "context_deserialize", "eth2", "ethereum_ssz", "lighthouse_version", @@ -2545,6 +2553,7 @@ dependencies = [ "alloy-dyn-abi", "alloy-json-abi", "alloy-primitives", + "bls", "ethereum_ssz", "hex", "reqwest", @@ -2755,6 +2764,7 @@ name = "doppelganger_service" version = "0.1.0" dependencies = [ "beacon_node_fallback", + "bls", "environment", "eth2", "futures", @@ -2863,16 +2873,19 @@ dependencies = [ "hex", "kzg", "logging", + "milhouse", "rayon", "serde", "serde_json", "serde_repr", "serde_yaml", "snap", + "ssz_types", "state_processing", "swap_or_not_shuffle", "tree_hash", "tree_hash_derive", + "typenum", "types", ] @@ -2900,7 +2913,9 @@ name = "eip_3076" version = "0.1.0" dependencies = [ "arbitrary", + "bls", "ethereum_serde_utils", + "fixed_bytes", "serde", "serde_json", "tempfile", @@ -3124,6 +3139,7 @@ dependencies = [ name = "eth2" version = "0.1.0" dependencies = [ + "bls", "context_deserialize", "educe", "eip_3076", @@ -3143,6 +3159,7 @@ dependencies = [ "serde", "serde_json", "ssz_types", + "superstruct", "test_random_derive", "tokio", "types", @@ -3212,6 +3229,7 @@ dependencies = [ "discv5", "eth2_config", "ethereum_ssz", + "fixed_bytes", "kzg", "pretty_reqwest_error", "reqwest", @@ -3352,8 +3370,10 @@ dependencies = [ "alloy-rpc-types-eth", "alloy-signer-local", "async-channel 1.9.0", + "bls", "deposit_contract", "execution_layer", + "fixed_bytes", "fork_choice", "futures", "hex", @@ -3365,6 +3385,7 @@ dependencies = [ "task_executor", "tempfile", "tokio", + "typenum", "types", ] @@ -3377,6 +3398,7 @@ dependencies = [ "alloy-rlp", "alloy-rpc-types-eth", "arc-swap", + "bls", "builder_client", "bytes", "eth2", @@ -3415,6 +3437,7 @@ dependencies = [ "tree_hash", "tree_hash_derive", "triehash", + "typenum", "types", "warp", "zeroize", @@ -3588,6 +3611,7 @@ dependencies = [ "beacon_chain", "ethereum_ssz", "ethereum_ssz_derive", + "fixed_bytes", "logging", "metrics", "proto_array", @@ -3778,6 +3802,7 @@ dependencies = [ name = "genesis" version = "0.2.0" dependencies = [ + "bls", "ethereum_hashing", "ethereum_ssz", "int_to_bytes", @@ -4223,14 +4248,17 @@ version = "0.1.0" dependencies = [ "beacon_chain", "beacon_processor", + "bls", "bs58 0.4.0", "bytes", + "context_deserialize", "directory", "either", "eth2", "ethereum_serde_utils", "ethereum_ssz", "execution_layer", + "fixed_bytes", "futures", "genesis", "health_metrics", @@ -4937,6 +4965,7 @@ dependencies = [ "ethereum_hashing", "ethereum_ssz", "execution_layer", + "fixed_bytes", "hex", "lighthouse_network", "lighthouse_version", @@ -5486,6 +5515,7 @@ dependencies = [ "alloy-primitives", "alloy-rlp", "async-channel 1.9.0", + "bls", "bytes", "delay_map", "directory", @@ -5495,6 +5525,7 @@ dependencies = [ "eth2", "ethereum_ssz", "ethereum_ssz_derive", + "fixed_bytes", "fnv", "futures", "hex", @@ -5527,6 +5558,7 @@ dependencies = [ "tokio-util", "tracing", "tracing-subscriber", + "typenum", "types", "unsigned-varint 0.8.0", ] @@ -5541,6 +5573,7 @@ version = "0.1.0" dependencies = [ "account_utils", "beacon_node_fallback", + "bls", "doppelganger_service", "either", "environment", @@ -6208,6 +6241,7 @@ dependencies = [ "eth2_network_config", "ethereum_ssz", "execution_layer", + "fixed_bytes", "fnv", "futures", "genesis", @@ -6240,6 +6274,7 @@ dependencies = [ "tokio-stream", "tracing", "tracing-subscriber", + "typenum", "types", ] @@ -6644,9 +6679,11 @@ version = "0.2.0" dependencies = [ "beacon_chain", "bitvec", + "bls", "educe", "ethereum_ssz", "ethereum_ssz_derive", + "fixed_bytes", "itertools 0.10.5", "maplit", "metrics", @@ -6656,7 +6693,9 @@ dependencies = [ "serde", "state_processing", "store", + "superstruct", "tokio", + "typenum", "types", ] @@ -7154,6 +7193,7 @@ version = "0.2.0" dependencies = [ "ethereum_ssz", "ethereum_ssz_derive", + "fixed_bytes", "safe_arith", "serde", "serde_yaml", @@ -8325,6 +8365,7 @@ dependencies = [ name = "signing_method" version = "0.1.0" dependencies = [ + "bls", "eth2_keystore", "ethereum_serde_utils", "lockfile", @@ -8380,6 +8421,7 @@ dependencies = [ "tokio", "tracing", "tracing-subscriber", + "typenum", "types", ] @@ -8394,11 +8436,13 @@ name = "slasher" version = "0.1.0" dependencies = [ "bincode", + "bls", "byteorder", "educe", "ethereum_ssz", "ethereum_ssz_derive", "filesystem", + "fixed_bytes", "flate2", "libmdbx", "lmdb-rkv", @@ -8418,6 +8462,7 @@ dependencies = [ "tracing", "tree_hash", "tree_hash_derive", + "typenum", "types", ] @@ -8443,9 +8488,11 @@ name = "slashing_protection" version = "0.1.0" dependencies = [ "arbitrary", + "bls", "eip_3076", "ethereum_serde_utils", "filesystem", + "fixed_bytes", "r2d2", "r2d2_sqlite", "rayon", @@ -8571,11 +8618,13 @@ dependencies = [ "ethereum_hashing", "ethereum_ssz", "ethereum_ssz_derive", + "fixed_bytes", "int_to_bytes", "integer-sqrt", "itertools 0.10.5", "merkle_proof", "metrics", + "milhouse", "rand 0.9.2", "rayon", "safe_arith", @@ -8585,6 +8634,7 @@ dependencies = [ "tokio", "tracing", "tree_hash", + "typenum", "types", ] @@ -8593,7 +8643,9 @@ name = "state_transition_vectors" version = "0.1.0" dependencies = [ "beacon_chain", + "bls", "ethereum_ssz", + "fixed_bytes", "state_processing", "tokio", "types", @@ -8616,11 +8668,13 @@ dependencies = [ "directory", "ethereum_ssz", "ethereum_ssz_derive", + "fixed_bytes", "itertools 0.10.5", "leveldb", "logging", "lru 0.12.5", "metrics", + "milhouse", "parking_lot", "rand 0.9.2", "redb", @@ -8634,6 +8688,7 @@ dependencies = [ "tempfile", "tracing", "tracing-subscriber", + "typenum", "types", "xdelta3", "zstd 0.13.3", @@ -9546,6 +9601,7 @@ dependencies = [ "tracing", "tree_hash", "tree_hash_derive", + "typenum", ] [[package]] @@ -9769,6 +9825,7 @@ dependencies = [ "eth2_keystore", "ethereum_serde_utils", "filesystem", + "fixed_bytes", "futures", "graffiti_file", "health_metrics", @@ -9785,6 +9842,7 @@ dependencies = [ "signing_method", "slashing_protection", "slot_clock", + "ssz_types", "sysinfo", "system_health", "task_executor", @@ -9792,6 +9850,7 @@ dependencies = [ "tokio", "tokio-stream", "tracing", + "typenum", "types", "url", "validator_dir", @@ -9829,6 +9888,7 @@ version = "0.1.0" dependencies = [ "account_utils", "beacon_chain", + "bls", "clap", "clap_utils", "educe", @@ -9885,6 +9945,7 @@ dependencies = [ name = "validator_store" version = "0.1.0" dependencies = [ + "bls", "eth2", "slashing_protection", "types", @@ -10126,10 +10187,12 @@ version = "0.1.0" dependencies = [ "account_utils", "async-channel 1.9.0", + "bls", "environment", "eth2", "eth2_keystore", "eth2_network_config", + "fixed_bytes", "futures", "initialized_validators", "lighthouse_validator_store", @@ -10141,6 +10204,7 @@ dependencies = [ "serde_yaml", "slashing_protection", "slot_clock", + "ssz_types", "task_executor", "tempfile", "tokio", diff --git a/Cargo.toml b/Cargo.toml index 5296b5d9b7f..aea8fd1b8d7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -259,6 +259,7 @@ tracing-opentelemetry = "0.31.0" tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] } tree_hash = "0.12.0" tree_hash_derive = "0.12.0" +typenum = "1" types = { path = "consensus/types" } url = "2" uuid = { version = "0.8", features = ["serde", "v4"] } diff --git a/account_manager/src/validator/slashing_protection.rs b/account_manager/src/validator/slashing_protection.rs index 18064b990f3..96098ccbbd1 100644 --- a/account_manager/src/validator/slashing_protection.rs +++ b/account_manager/src/validator/slashing_protection.rs @@ -1,3 +1,4 @@ +use bls::PublicKeyBytes; use clap::{Arg, ArgAction, ArgMatches, Command}; use environment::Environment; use slashing_protection::{ @@ -7,7 +8,7 @@ use slashing_protection::{ use std::fs::File; use std::path::PathBuf; use std::str::FromStr; -use types::{Epoch, EthSpec, PublicKeyBytes, Slot}; +use types::{Epoch, EthSpec, Slot}; pub const CMD: &str = "slashing-protection"; pub const IMPORT_CMD: &str = "import"; diff --git a/beacon_node/Cargo.toml b/beacon_node/Cargo.toml index fd013559785..5352814dd5d 100644 --- a/beacon_node/Cargo.toml +++ b/beacon_node/Cargo.toml @@ -20,6 +20,7 @@ testing = [] # Enables testing-only CLI flags [dependencies] account_utils = { workspace = true } beacon_chain = { workspace = true } +bls = { workspace = true } clap = { workspace = true } clap_utils = { workspace = true } client = { path = "client" } diff --git a/beacon_node/beacon_chain/Cargo.toml b/beacon_node/beacon_chain/Cargo.toml index b42585c2a1e..734cfdf32bb 100644 --- a/beacon_node/beacon_chain/Cargo.toml +++ b/beacon_node/beacon_chain/Cargo.toml @@ -26,6 +26,7 @@ ethereum_serde_utils = { workspace = true } ethereum_ssz = { workspace = true } ethereum_ssz_derive = { workspace = true } execution_layer = { workspace = true } +fixed_bytes = { workspace = true } fork_choice = { workspace = true } futures = { workspace = true } genesis = { workspace = true } @@ -39,6 +40,7 @@ logging = { workspace = true } lru = { workspace = true } merkle_proof = { workspace = true } metrics = { workspace = true } +milhouse = { workspace = true } once_cell = { workspace = true } oneshot_broadcast = { path = "../../common/oneshot_broadcast/" } operation_pool = { workspace = true } @@ -65,6 +67,7 @@ tokio-stream = { workspace = true } tracing = { workspace = true } tree_hash = { workspace = true } tree_hash_derive = { workspace = true } +typenum = { workspace = true } types = { workspace = true } zstd = { workspace = true } diff --git a/beacon_node/beacon_chain/src/attester_cache.rs b/beacon_node/beacon_chain/src/attester_cache.rs index beaa1e581cc..26a33898129 100644 --- a/beacon_node/beacon_chain/src/attester_cache.rs +++ b/beacon_node/beacon_chain/src/attester_cache.rs @@ -10,13 +10,14 @@ //! and penalties can be computed and the `state.current_justified_checkpoint` can be updated. use crate::{BeaconChain, BeaconChainError, BeaconChainTypes}; +use fixed_bytes::FixedBytesExtended; use parking_lot::RwLock; use state_processing::state_advance::{Error as StateAdvanceError, partial_state_advance}; use std::collections::HashMap; use std::ops::Range; use types::{ - BeaconState, BeaconStateError, ChainSpec, Checkpoint, Epoch, EthSpec, FixedBytesExtended, - Hash256, RelativeEpoch, Slot, + BeaconState, BeaconStateError, ChainSpec, Checkpoint, Epoch, EthSpec, Hash256, RelativeEpoch, + Slot, attestation::AttestationError, beacon_state::{ compute_committee_index_in_epoch, compute_committee_range_in_epoch, epoch_committee_count, diff --git a/beacon_node/beacon_chain/src/beacon_block_streamer.rs b/beacon_node/beacon_chain/src/beacon_block_streamer.rs index c816a0b29f3..7b3bb03e568 100644 --- a/beacon_node/beacon_chain/src/beacon_block_streamer.rs +++ b/beacon_node/beacon_chain/src/beacon_block_streamer.rs @@ -685,13 +685,13 @@ impl From for BeaconChainError { mod tests { use crate::beacon_block_streamer::{BeaconBlockStreamer, CheckCaches}; use crate::test_utils::{BeaconChainHarness, EphemeralHarnessType, test_spec}; + use bls::Keypair; use execution_layer::test_utils::Block; + use fixed_bytes::FixedBytesExtended; use std::sync::Arc; use std::sync::LazyLock; use tokio::sync::mpsc; - use types::{ - ChainSpec, Epoch, EthSpec, FixedBytesExtended, Hash256, Keypair, MinimalEthSpec, Slot, - }; + use types::{ChainSpec, Epoch, EthSpec, Hash256, MinimalEthSpec, Slot}; const VALIDATOR_COUNT: usize = 48; diff --git a/beacon_node/beacon_chain/src/beacon_chain.rs b/beacon_node/beacon_chain/src/beacon_chain.rs index adc400b1c17..25b2aa30cb3 100644 --- a/beacon_node/beacon_chain/src/beacon_chain.rs +++ b/beacon_node/beacon_chain/src/beacon_chain.rs @@ -74,6 +74,7 @@ use crate::{ AvailabilityPendingExecutedBlock, BeaconChainError, BeaconForkChoiceStore, BeaconSnapshot, CachedHead, metrics, }; +use bls::{PublicKey, PublicKeyBytes, Signature}; use eth2::beacon_response::ForkVersionedResponse; use eth2::types::{ EventKind, SseBlobSidecar, SseBlock, SseDataColumnSidecar, SseExtendedPayloadAttributes, @@ -82,6 +83,7 @@ use execution_layer::{ BlockProposalContents, BlockProposalContentsType, BuilderParams, ChainHealth, ExecutionLayer, FailedCondition, PayloadAttributes, PayloadStatus, }; +use fixed_bytes::FixedBytesExtended; use fork_choice::{ AttestationFromBlock, ExecutionStatus, ForkChoice, ForkchoiceUpdateParameters, InvalidationOperation, PayloadVerificationStatus, ResetPayloadStatuses, diff --git a/beacon_node/beacon_chain/src/beacon_fork_choice_store.rs b/beacon_node/beacon_chain/src/beacon_fork_choice_store.rs index 0c203009bbe..60487f9c469 100644 --- a/beacon_node/beacon_chain/src/beacon_fork_choice_store.rs +++ b/beacon_node/beacon_chain/src/beacon_fork_choice_store.rs @@ -6,6 +6,7 @@ use crate::{BeaconSnapshot, metrics}; use educe::Educe; +use fixed_bytes::FixedBytesExtended; use fork_choice::ForkChoiceStore; use proto_array::JustifiedBalances; use safe_arith::ArithError; @@ -17,7 +18,7 @@ use store::{Error as StoreError, HotColdDB, ItemStore}; use superstruct::superstruct; use types::{ AbstractExecPayload, BeaconBlockRef, BeaconState, BeaconStateError, Checkpoint, Epoch, EthSpec, - FixedBytesExtended, Hash256, Slot, + Hash256, Slot, }; #[derive(Debug)] diff --git a/beacon_node/beacon_chain/src/beacon_proposer_cache.rs b/beacon_node/beacon_chain/src/beacon_proposer_cache.rs index bd6460eba7d..a923d657a86 100644 --- a/beacon_node/beacon_chain/src/beacon_proposer_cache.rs +++ b/beacon_node/beacon_chain/src/beacon_proposer_cache.rs @@ -18,10 +18,9 @@ use state_processing::state_advance::partial_state_advance; use std::num::NonZeroUsize; use std::sync::Arc; use tracing::instrument; +use typenum::Unsigned; use types::non_zero_usize::new_non_zero_usize; -use types::{ - BeaconState, BeaconStateError, ChainSpec, Epoch, EthSpec, Fork, Hash256, Slot, Unsigned, -}; +use types::{BeaconState, BeaconStateError, ChainSpec, Epoch, EthSpec, Fork, Hash256, Slot}; /// The number of sets of proposer indices that should be cached. const CACHE_SIZE: NonZeroUsize = new_non_zero_usize(16); diff --git a/beacon_node/beacon_chain/src/block_times_cache.rs b/beacon_node/beacon_chain/src/block_times_cache.rs index bd1adb7e407..e8d4c75dcee 100644 --- a/beacon_node/beacon_chain/src/block_times_cache.rs +++ b/beacon_node/beacon_chain/src/block_times_cache.rs @@ -294,7 +294,7 @@ impl BlockTimesCache { #[cfg(test)] mod test { use super::*; - use types::FixedBytesExtended; + use fixed_bytes::FixedBytesExtended; #[test] fn observed_time_uses_minimum() { diff --git a/beacon_node/beacon_chain/src/block_verification.rs b/beacon_node/beacon_chain/src/block_verification.rs index 374f1e2b360..bca8d2bc57b 100644 --- a/beacon_node/beacon_chain/src/block_verification.rs +++ b/beacon_node/beacon_chain/src/block_verification.rs @@ -66,6 +66,7 @@ use crate::{ beacon_chain::{BeaconForkChoice, ForkChoiceError}, metrics, }; +use bls::{PublicKey, PublicKeyBytes}; use educe::Educe; use eth2::types::{BlockGossip, EventKind}; use execution_layer::PayloadStatus; @@ -97,7 +98,7 @@ use tracing::{Instrument, Span, debug, debug_span, error, info_span, instrument} use types::{ BeaconBlockRef, BeaconState, BeaconStateError, BlobsList, ChainSpec, DataColumnSidecarList, Epoch, EthSpec, ExecutionBlockHash, FullPayload, Hash256, InconsistentFork, KzgProofs, - PublicKey, PublicKeyBytes, RelativeEpoch, SignedBeaconBlock, SignedBeaconBlockHeader, Slot, + RelativeEpoch, SignedBeaconBlock, SignedBeaconBlockHeader, Slot, data_column_sidecar::DataColumnSidecarError, }; diff --git a/beacon_node/beacon_chain/src/builder.rs b/beacon_node/beacon_chain/src/builder.rs index ef438b16e0f..58dbf1c35e8 100644 --- a/beacon_node/beacon_chain/src/builder.rs +++ b/beacon_node/beacon_chain/src/builder.rs @@ -21,7 +21,9 @@ use crate::validator_pubkey_cache::ValidatorPubkeyCache; use crate::{ BeaconChain, BeaconChainTypes, BeaconForkChoiceStore, BeaconSnapshot, ServerSentEventHandler, }; +use bls::Signature; use execution_layer::ExecutionLayer; +use fixed_bytes::FixedBytesExtended; use fork_choice::{ForkChoice, ResetPayloadStatuses}; use futures::channel::mpsc::Sender; use kzg::Kzg; @@ -43,7 +45,7 @@ use tracing::{debug, error, info}; use types::data_column_custody_group::CustodyIndex; use types::{ BeaconBlock, BeaconState, BlobSidecarList, ChainSpec, ColumnIndex, DataColumnSidecarList, - Epoch, EthSpec, FixedBytesExtended, Hash256, Signature, SignedBeaconBlock, Slot, + Epoch, EthSpec, Hash256, SignedBeaconBlock, Slot, }; /// An empty struct used to "witness" all the `BeaconChainTypes` traits. It has no user-facing diff --git a/beacon_node/beacon_chain/src/data_availability_checker/overflow_lru_cache.rs b/beacon_node/beacon_chain/src/data_availability_checker/overflow_lru_cache.rs index e7c536c0d85..776fb50f619 100644 --- a/beacon_node/beacon_chain/src/data_availability_checker/overflow_lru_cache.rs +++ b/beacon_node/beacon_chain/src/data_availability_checker/overflow_lru_cache.rs @@ -1261,15 +1261,14 @@ mod pending_components_tests { use crate::PayloadVerificationOutcome; use crate::block_verification_types::BlockImportData; use crate::test_utils::{NumBlobs, generate_rand_block_and_blobs, test_spec}; + use fixed_bytes::FixedBytesExtended; use fork_choice::PayloadVerificationStatus; use kzg::KzgCommitment; use rand::SeedableRng; use rand::rngs::StdRng; use state_processing::ConsensusContext; use types::test_utils::TestRandom; - use types::{ - BeaconState, FixedBytesExtended, ForkName, MainnetEthSpec, SignedBeaconBlock, Slot, - }; + use types::{BeaconState, ForkName, MainnetEthSpec, SignedBeaconBlock, Slot}; type E = MainnetEthSpec; diff --git a/beacon_node/beacon_chain/src/errors.rs b/beacon_node/beacon_chain/src/errors.rs index 9dc6e897fb1..8f615baab46 100644 --- a/beacon_node/beacon_chain/src/errors.rs +++ b/beacon_node/beacon_chain/src/errors.rs @@ -9,9 +9,11 @@ use crate::observed_aggregates::Error as ObservedAttestationsError; use crate::observed_attesters::Error as ObservedAttestersError; use crate::observed_block_producers::Error as ObservedBlockProducersError; use crate::observed_data_sidecars::Error as ObservedDataSidecarsError; +use bls::PublicKeyBytes; use execution_layer::PayloadStatus; use fork_choice::ExecutionStatus; use futures::channel::mpsc::TrySendError; +use milhouse::Error as MilhouseError; use operation_pool::OpPoolError; use safe_arith::ArithError; use ssz_types::Error as SszTypesError; @@ -28,7 +30,6 @@ use state_processing::{ }; use task_executor::ShutdownReason; use tokio::task::JoinError; -use types::milhouse::Error as MilhouseError; use types::*; macro_rules! easy_from_to { diff --git a/beacon_node/beacon_chain/src/graffiti_calculator.rs b/beacon_node/beacon_chain/src/graffiti_calculator.rs index e8110d14cdc..56808e0e67e 100644 --- a/beacon_node/beacon_chain/src/graffiti_calculator.rs +++ b/beacon_node/beacon_chain/src/graffiti_calculator.rs @@ -225,13 +225,14 @@ async fn engine_version_cache_refresh_service( mod tests { use crate::ChainConfig; use crate::test_utils::{BeaconChainHarness, EphemeralHarnessType, test_spec}; + use bls::Keypair; use execution_layer::EngineCapabilities; use execution_layer::test_utils::{DEFAULT_CLIENT_VERSION, DEFAULT_ENGINE_CAPABILITIES}; use std::sync::Arc; use std::sync::LazyLock; use std::time::Duration; use tracing::info; - use types::{ChainSpec, GRAFFITI_BYTES_LEN, Graffiti, Keypair, MinimalEthSpec}; + use types::{ChainSpec, GRAFFITI_BYTES_LEN, Graffiti, MinimalEthSpec}; const VALIDATOR_COUNT: usize = 48; /// A cached set of keys. diff --git a/beacon_node/beacon_chain/src/historical_blocks.rs b/beacon_node/beacon_chain/src/historical_blocks.rs index e4040eea6b0..91b0f12cbb3 100644 --- a/beacon_node/beacon_chain/src/historical_blocks.rs +++ b/beacon_node/beacon_chain/src/historical_blocks.rs @@ -1,5 +1,6 @@ use crate::data_availability_checker::{AvailableBlock, AvailableBlockData}; use crate::{BeaconChain, BeaconChainTypes, WhenSlotSkipped, metrics}; +use fixed_bytes::FixedBytesExtended; use itertools::Itertools; use state_processing::{ per_block_processing::ParallelSignatureSets, @@ -12,7 +13,7 @@ use store::metadata::DataColumnInfo; use store::{AnchorInfo, BlobInfo, DBColumn, Error as StoreError, KeyValueStore, KeyValueStoreOp}; use strum::IntoStaticStr; use tracing::{debug, instrument}; -use types::{FixedBytesExtended, Hash256, Slot}; +use types::{Hash256, Slot}; /// Use a longer timeout on the pubkey cache. /// diff --git a/beacon_node/beacon_chain/src/naive_aggregation_pool.rs b/beacon_node/beacon_chain/src/naive_aggregation_pool.rs index 4c4478d17e6..beefc2d678b 100644 --- a/beacon_node/beacon_chain/src/naive_aggregation_pool.rs +++ b/beacon_node/beacon_chain/src/naive_aggregation_pool.rs @@ -577,12 +577,11 @@ where #[cfg(test)] mod tests { use super::*; - use ssz_types::BitList; - use store::BitVector; + use fixed_bytes::FixedBytesExtended; + use ssz_types::{BitList, BitVector}; use tree_hash::TreeHash; use types::{ - Attestation, AttestationBase, AttestationElectra, FixedBytesExtended, Fork, Hash256, - SyncCommitteeMessage, + Attestation, AttestationBase, AttestationElectra, Fork, Hash256, SyncCommitteeMessage, test_utils::{generate_deterministic_keypair, test_random_instance}, }; diff --git a/beacon_node/beacon_chain/src/observed_aggregates.rs b/beacon_node/beacon_chain/src/observed_aggregates.rs index f6f62e1b73b..b2c5cb4b38a 100644 --- a/beacon_node/beacon_chain/src/observed_aggregates.rs +++ b/beacon_node/beacon_chain/src/observed_aggregates.rs @@ -473,7 +473,8 @@ where #[cfg(not(debug_assertions))] mod tests { use super::*; - use types::{AttestationBase, FixedBytesExtended, Hash256, test_utils::test_random_instance}; + use fixed_bytes::FixedBytesExtended; + use types::{AttestationBase, Hash256, test_utils::test_random_instance}; type E = types::MainnetEthSpec; diff --git a/beacon_node/beacon_chain/src/observed_attesters.rs b/beacon_node/beacon_chain/src/observed_attesters.rs index 34d68fe3ac0..d5433f49d1b 100644 --- a/beacon_node/beacon_chain/src/observed_attesters.rs +++ b/beacon_node/beacon_chain/src/observed_attesters.rs @@ -19,8 +19,9 @@ use bitvec::vec::BitVec; use std::collections::{HashMap, HashSet}; use std::hash::Hash; use std::marker::PhantomData; +use typenum::Unsigned; use types::slot_data::SlotData; -use types::{Epoch, EthSpec, Hash256, Slot, Unsigned}; +use types::{Epoch, EthSpec, Hash256, Slot}; /// The maximum capacity of the `AutoPruningEpochContainer`. /// @@ -619,7 +620,7 @@ impl SlotSubcommitteeIndex { #[cfg(test)] mod tests { use super::*; - use types::FixedBytesExtended; + use fixed_bytes::FixedBytesExtended; type E = types::MainnetEthSpec; diff --git a/beacon_node/beacon_chain/src/observed_block_producers.rs b/beacon_node/beacon_chain/src/observed_block_producers.rs index 096c8bff77d..b740735ac41 100644 --- a/beacon_node/beacon_chain/src/observed_block_producers.rs +++ b/beacon_node/beacon_chain/src/observed_block_producers.rs @@ -4,7 +4,8 @@ use std::collections::hash_map::Entry; use std::collections::{HashMap, HashSet}; use std::marker::PhantomData; -use types::{BeaconBlockRef, Epoch, EthSpec, Hash256, Slot, Unsigned}; +use typenum::Unsigned; +use types::{BeaconBlockRef, Epoch, EthSpec, Hash256, Slot}; #[derive(Debug, PartialEq)] pub enum Error { diff --git a/beacon_node/beacon_chain/src/observed_slashable.rs b/beacon_node/beacon_chain/src/observed_slashable.rs index 001a0d4a867..704d605436b 100644 --- a/beacon_node/beacon_chain/src/observed_slashable.rs +++ b/beacon_node/beacon_chain/src/observed_slashable.rs @@ -5,7 +5,8 @@ use crate::observed_block_producers::Error; use std::collections::hash_map::Entry; use std::collections::{HashMap, HashSet}; use std::marker::PhantomData; -use types::{EthSpec, Hash256, Slot, Unsigned}; +use typenum::Unsigned; +use types::{EthSpec, Hash256, Slot}; #[derive(Eq, Hash, PartialEq, Debug, Default)] pub struct ProposalKey { diff --git a/beacon_node/beacon_chain/src/shuffling_cache.rs b/beacon_node/beacon_chain/src/shuffling_cache.rs index 22921147a68..618d459754d 100644 --- a/beacon_node/beacon_chain/src/shuffling_cache.rs +++ b/beacon_node/beacon_chain/src/shuffling_cache.rs @@ -290,6 +290,7 @@ impl BlockShufflingIds { #[cfg(not(debug_assertions))] #[cfg(test)] mod test { + use fixed_bytes::FixedBytesExtended; use types::*; use crate::test_utils::EphemeralHarnessType; diff --git a/beacon_node/beacon_chain/src/single_attestation.rs b/beacon_node/beacon_chain/src/single_attestation.rs index 33a093687e5..955eb98e92a 100644 --- a/beacon_node/beacon_chain/src/single_attestation.rs +++ b/beacon_node/beacon_chain/src/single_attestation.rs @@ -1,7 +1,7 @@ use crate::attestation_verification::Error; +use ssz_types::{BitList, BitVector}; use types::{ - Attestation, AttestationBase, AttestationElectra, BitList, BitVector, EthSpec, ForkName, - SingleAttestation, + Attestation, AttestationBase, AttestationElectra, EthSpec, ForkName, SingleAttestation, }; pub fn single_attestation_to_attestation( diff --git a/beacon_node/beacon_chain/src/sync_committee_verification.rs b/beacon_node/beacon_chain/src/sync_committee_verification.rs index 88b040a6e59..e74e284e583 100644 --- a/beacon_node/beacon_chain/src/sync_committee_verification.rs +++ b/beacon_node/beacon_chain/src/sync_committee_verification.rs @@ -30,6 +30,7 @@ use crate::observed_attesters::SlotSubcommitteeIndex; use crate::{ BeaconChain, BeaconChainError, BeaconChainTypes, metrics, observed_aggregates::ObserveOutcome, }; +use bls::AggregateSignature; use bls::{PublicKeyBytes, verify_signature_sets}; use educe::Educe; use safe_arith::ArithError; @@ -51,7 +52,7 @@ use types::consts::altair::SYNC_COMMITTEE_SUBNET_COUNT; use types::slot_data::SlotData; use types::sync_committee::SyncCommitteeError; use types::{ - AggregateSignature, BeaconStateError, EthSpec, Hash256, SignedContributionAndProof, Slot, + BeaconStateError, EthSpec, Hash256, SignedContributionAndProof, Slot, SyncCommitteeContribution, SyncCommitteeMessage, SyncSelectionProof, SyncSubnetId, sync_committee_contribution::Error as ContributionError, }; diff --git a/beacon_node/beacon_chain/src/test_utils.rs b/beacon_node/beacon_chain/src/test_utils.rs index 759b7e9bd77..3651512b85a 100644 --- a/beacon_node/beacon_chain/src/test_utils.rs +++ b/beacon_node/beacon_chain/src/test_utils.rs @@ -20,6 +20,9 @@ pub use crate::{ validator_monitor::{ValidatorMonitor, ValidatorMonitorConfig}, }; use bls::get_withdrawal_credentials; +use bls::{ + AggregateSignature, Keypair, PublicKey, PublicKeyBytes, SecretKey, Signature, SignatureBytes, +}; use eth2::types::SignedBlockContentsTuple; use execution_layer::test_utils::generate_genesis_header; use execution_layer::{ @@ -30,6 +33,7 @@ use execution_layer::{ MockExecutionLayer, }, }; +use fixed_bytes::FixedBytesExtended; use futures::channel::mpsc::Receiver; pub use genesis::{DEFAULT_ETH1_BLOCK_HASH, InteropGenesisBuilder}; use int_to_bytes::int_to_bytes32; @@ -46,7 +50,7 @@ use rand::seq::SliceRandom; use rayon::prelude::*; use sensitive_url::SensitiveUrl; use slot_clock::{SlotClock, TestingSlotClock}; -use ssz_types::RuntimeVariableList; +use ssz_types::{RuntimeVariableList, VariableList}; use state_processing::per_block_processing::compute_timestamp_at_slot; use state_processing::state_advance::complete_state_advance; use std::borrow::Cow; @@ -61,12 +65,13 @@ use store::{HotColdDB, ItemStore, MemoryStore, config::StoreConfig}; use task_executor::TaskExecutor; use task_executor::{ShutdownReason, test_utils::TestRuntime}; use tree_hash::TreeHash; +use typenum::U4294967296; use types::data_column_custody_group::CustodyIndex; use types::indexed_attestation::IndexedAttestationBase; use types::payload::BlockProductionVersion; use types::test_utils::TestRandom; pub use types::test_utils::generate_deterministic_keypairs; -use types::{typenum::U4294967296, *}; +use types::*; // 4th September 2019 pub const HARNESS_GENESIS_TIME: u64 = 1_567_552_690; @@ -3276,7 +3281,7 @@ pub fn generate_rand_block_and_blobs( ) -> (SignedBeaconBlock>, Vec>) { let inner = map_fork_name!(fork_name, BeaconBlock, <_>::random_for_test(rng)); - let mut block = SignedBeaconBlock::from_block(inner, types::Signature::random_for_test(rng)); + let mut block = SignedBeaconBlock::from_block(inner, Signature::random_for_test(rng)); let mut blob_sidecars = vec![]; let bundle = match block { diff --git a/beacon_node/beacon_chain/src/validator_monitor.rs b/beacon_node/beacon_chain/src/validator_monitor.rs index ba06d5da4ec..2a76d65d328 100644 --- a/beacon_node/beacon_chain/src/validator_monitor.rs +++ b/beacon_node/beacon_chain/src/validator_monitor.rs @@ -4,6 +4,7 @@ use crate::beacon_proposer_cache::{BeaconProposerCache, TYPICAL_SLOTS_PER_EPOCH}; use crate::metrics; +use bls::PublicKeyBytes; use itertools::Itertools; use logging::crit; use parking_lot::{Mutex, RwLock}; @@ -28,9 +29,10 @@ use types::consts::altair::{ use types::{ Attestation, AttestationData, AttesterSlashingRef, BeaconBlockRef, BeaconState, BeaconStateError, ChainSpec, Epoch, EthSpec, Hash256, IndexedAttestation, - IndexedAttestationRef, ProposerSlashing, PublicKeyBytes, SignedAggregateAndProof, - SignedContributionAndProof, Slot, SyncCommitteeMessage, VoluntaryExit, + IndexedAttestationRef, ProposerSlashing, SignedAggregateAndProof, SignedContributionAndProof, + Slot, SyncCommitteeMessage, VoluntaryExit, }; + /// Used for Prometheus labels. /// /// We've used `total` for this value to align with Nimbus, as per: diff --git a/beacon_node/beacon_chain/src/validator_pubkey_cache.rs b/beacon_node/beacon_chain/src/validator_pubkey_cache.rs index a346a649f02..26ac02d91b4 100644 --- a/beacon_node/beacon_chain/src/validator_pubkey_cache.rs +++ b/beacon_node/beacon_chain/src/validator_pubkey_cache.rs @@ -1,6 +1,8 @@ use crate::errors::BeaconChainError; use crate::{BeaconChainTypes, BeaconStore}; use bls::PUBLIC_KEY_UNCOMPRESSED_BYTES_LEN; +use bls::{PublicKey, PublicKeyBytes}; +use fixed_bytes::FixedBytesExtended; use rayon::prelude::*; use smallvec::SmallVec; use ssz::{Decode, Encode}; @@ -9,7 +11,7 @@ use std::collections::HashMap; use std::marker::PhantomData; use store::{DBColumn, Error as StoreError, StoreItem, StoreOp}; use tracing::instrument; -use types::{BeaconState, FixedBytesExtended, Hash256, PublicKey, PublicKeyBytes}; +use types::{BeaconState, Hash256}; /// Provides a mapping of `validator_index -> validator_publickey`. /// @@ -244,10 +246,11 @@ impl DatabasePubkey { mod test { use super::*; use crate::test_utils::{BeaconChainHarness, EphemeralHarnessType}; + use bls::Keypair; use logging::create_test_tracing_subscriber; use std::sync::Arc; use store::HotColdDB; - use types::{EthSpec, Keypair, MainnetEthSpec}; + use types::{EthSpec, MainnetEthSpec}; type E = MainnetEthSpec; type T = EphemeralHarnessType; diff --git a/beacon_node/beacon_chain/tests/attestation_production.rs b/beacon_node/beacon_chain/tests/attestation_production.rs index 0acb23d5126..017c249d10b 100644 --- a/beacon_node/beacon_chain/tests/attestation_production.rs +++ b/beacon_node/beacon_chain/tests/attestation_production.rs @@ -4,11 +4,10 @@ use beacon_chain::attestation_simulator::produce_unaggregated_attestation; use beacon_chain::test_utils::{AttestationStrategy, BeaconChainHarness, BlockStrategy}; use beacon_chain::validator_monitor::UNAGGREGATED_ATTESTATION_LAG_SLOTS; use beacon_chain::{StateSkipConfig, WhenSlotSkipped, metrics}; +use bls::{AggregateSignature, Keypair}; use std::sync::{Arc, LazyLock}; use tree_hash::TreeHash; -use types::{ - AggregateSignature, Attestation, EthSpec, Keypair, MainnetEthSpec, RelativeEpoch, Slot, -}; +use types::{Attestation, EthSpec, MainnetEthSpec, RelativeEpoch, Slot}; pub const VALIDATOR_COUNT: usize = 16; diff --git a/beacon_node/beacon_chain/tests/attestation_verification.rs b/beacon_node/beacon_chain/tests/attestation_verification.rs index 706ffad3c1a..7984ea47081 100644 --- a/beacon_node/beacon_chain/tests/attestation_verification.rs +++ b/beacon_node/beacon_chain/tests/attestation_verification.rs @@ -13,15 +13,17 @@ use beacon_chain::{ single_attestation_to_attestation, test_spec, }, }; +use bls::{AggregateSignature, Keypair, SecretKey}; +use fixed_bytes::FixedBytesExtended; use genesis::{DEFAULT_ETH1_BLOCK_HASH, interop_genesis_state}; use int_to_bytes::int_to_bytes32; use state_processing::per_slot_processing; use std::sync::{Arc, LazyLock}; use tree_hash::TreeHash; +use typenum::Unsigned; use types::{ - Address, AggregateSignature, Attestation, AttestationRef, ChainSpec, Epoch, EthSpec, - FixedBytesExtended, ForkName, Hash256, Keypair, MainnetEthSpec, SecretKey, SelectionProof, - SignedAggregateAndProof, SingleAttestation, Slot, SubnetId, Unsigned, + Address, Attestation, AttestationRef, ChainSpec, Epoch, EthSpec, ForkName, Hash256, + MainnetEthSpec, SelectionProof, SignedAggregateAndProof, SingleAttestation, Slot, SubnetId, signed_aggregate_and_proof::SignedAggregateAndProofRefMut, test_utils::generate_deterministic_keypair, }; diff --git a/beacon_node/beacon_chain/tests/blob_verification.rs b/beacon_node/beacon_chain/tests/blob_verification.rs index c42a2828c01..d1a0d87adf1 100644 --- a/beacon_node/beacon_chain/tests/blob_verification.rs +++ b/beacon_node/beacon_chain/tests/blob_verification.rs @@ -7,6 +7,7 @@ use beacon_chain::{ AvailabilityProcessingStatus, BlockError, ChainConfig, InvalidSignature, NotifyExecutionLayer, block_verification_types::AsBlock, }; +use bls::{Keypair, Signature}; use logging::create_test_tracing_subscriber; use std::sync::{Arc, LazyLock}; use types::{blob_sidecar::FixedBlobSidecarList, *}; diff --git a/beacon_node/beacon_chain/tests/block_verification.rs b/beacon_node/beacon_chain/tests/block_verification.rs index 881885cef23..2644b74b28e 100644 --- a/beacon_node/beacon_chain/tests/block_verification.rs +++ b/beacon_node/beacon_chain/tests/block_verification.rs @@ -13,6 +13,8 @@ use beacon_chain::{ BeaconSnapshot, BlockError, ChainConfig, ChainSegmentResult, IntoExecutionPendingBlock, InvalidSignature, NotifyExecutionLayer, }; +use bls::{AggregateSignature, Keypair, Signature}; +use fixed_bytes::FixedBytesExtended; use logging::create_test_tracing_subscriber; use slasher::{Config as SlasherConfig, Slasher}; use state_processing::{ diff --git a/beacon_node/beacon_chain/tests/column_verification.rs b/beacon_node/beacon_chain/tests/column_verification.rs index 229ae1e1998..be9b3b2fa12 100644 --- a/beacon_node/beacon_chain/tests/column_verification.rs +++ b/beacon_node/beacon_chain/tests/column_verification.rs @@ -9,6 +9,7 @@ use beacon_chain::{ AvailabilityProcessingStatus, BlockError, ChainConfig, InvalidSignature, NotifyExecutionLayer, block_verification_types::AsBlock, }; +use bls::{Keypair, Signature}; use logging::create_test_tracing_subscriber; use std::sync::{Arc, LazyLock}; use types::*; diff --git a/beacon_node/beacon_chain/tests/op_verification.rs b/beacon_node/beacon_chain/tests/op_verification.rs index c18af0bde70..2f97f10745e 100644 --- a/beacon_node/beacon_chain/tests/op_verification.rs +++ b/beacon_node/beacon_chain/tests/op_verification.rs @@ -9,6 +9,7 @@ use beacon_chain::{ AttestationStrategy, BeaconChainHarness, BlockStrategy, DiskHarnessType, test_spec, }, }; +use bls::Keypair; use state_processing::per_block_processing::errors::{ AttesterSlashingInvalid, BlockOperationError, ExitInvalid, ProposerSlashingInvalid, }; diff --git a/beacon_node/beacon_chain/tests/rewards.rs b/beacon_node/beacon_chain/tests/rewards.rs index 0a5881e486b..ee9cf511ea5 100644 --- a/beacon_node/beacon_chain/tests/rewards.rs +++ b/beacon_node/beacon_chain/tests/rewards.rs @@ -7,8 +7,9 @@ use beacon_chain::test_utils::{ use beacon_chain::{ BlockError, ChainConfig, StateSkipConfig, WhenSlotSkipped, test_utils::{AttestationStrategy, BlockStrategy, RelativeSyncCommittee}, - types::{Epoch, EthSpec, Keypair, MinimalEthSpec}, + types::{Epoch, EthSpec, MinimalEthSpec}, }; +use bls::Keypair; use eth2::types::{StandardAttestationRewards, TotalAttestationRewards, ValidatorId}; use state_processing::{BlockReplayError, BlockReplayer}; use std::array::IntoIter; diff --git a/beacon_node/beacon_chain/tests/schema_stability.rs b/beacon_node/beacon_chain/tests/schema_stability.rs index 3b09921c15c..db7f7dbdbbd 100644 --- a/beacon_node/beacon_chain/tests/schema_stability.rs +++ b/beacon_node/beacon_chain/tests/schema_stability.rs @@ -4,6 +4,7 @@ use beacon_chain::{ persisted_custody::PersistedCustody, test_utils::{BeaconChainHarness, DiskHarnessType, test_spec}, }; +use bls::Keypair; use logging::create_test_tracing_subscriber; use operation_pool::PersistedOperationPool; use ssz::Encode; @@ -16,7 +17,7 @@ use store::{ }; use strum::IntoEnumIterator; use tempfile::{TempDir, tempdir}; -use types::{ChainSpec, Hash256, Keypair, MainnetEthSpec, Slot}; +use types::{ChainSpec, Hash256, MainnetEthSpec, Slot}; type E = MainnetEthSpec; type Store = Arc, BeaconNodeBackend>>; diff --git a/beacon_node/beacon_chain/tests/store_tests.rs b/beacon_node/beacon_chain/tests/store_tests.rs index 0733d901fc3..c1c53c014c1 100644 --- a/beacon_node/beacon_chain/tests/store_tests.rs +++ b/beacon_node/beacon_chain/tests/store_tests.rs @@ -25,11 +25,14 @@ use beacon_chain::{ historical_blocks::HistoricalBlockError, migrate::MigratorConfig, }; +use bls::{Keypair, Signature, SignatureBytes}; +use fixed_bytes::FixedBytesExtended; use logging::create_test_tracing_subscriber; use maplit::hashset; use rand::Rng; use rand::rngs::StdRng; use slot_clock::{SlotClock, TestingSlotClock}; +use ssz_types::VariableList; use state_processing::{BlockReplayer, state_advance::complete_state_advance}; use std::collections::HashMap; use std::collections::HashSet; diff --git a/beacon_node/beacon_chain/tests/sync_committee_verification.rs b/beacon_node/beacon_chain/tests/sync_committee_verification.rs index 9dd12410fbb..d2124c66415 100644 --- a/beacon_node/beacon_chain/tests/sync_committee_verification.rs +++ b/beacon_node/beacon_chain/tests/sync_committee_verification.rs @@ -2,6 +2,8 @@ use beacon_chain::sync_committee_verification::{Error as SyncCommitteeError, SyncCommitteeData}; use beacon_chain::test_utils::{BeaconChainHarness, EphemeralHarnessType, RelativeSyncCommittee}; +use bls::{AggregateSignature, Keypair, SecretKey}; +use fixed_bytes::FixedBytesExtended; use int_to_bytes::int_to_bytes32; use safe_arith::SafeArith; use state_processing::{ @@ -11,10 +13,11 @@ use state_processing::{ use std::sync::LazyLock; use store::{SignedContributionAndProof, SyncCommitteeMessage}; use tree_hash::TreeHash; +use typenum::Unsigned; use types::consts::altair::SYNC_COMMITTEE_SUBNET_COUNT; use types::{ - AggregateSignature, Epoch, EthSpec, FixedBytesExtended, Hash256, Keypair, MainnetEthSpec, - SecretKey, Slot, SyncContributionData, SyncSelectionProof, SyncSubnetId, Unsigned, + Epoch, EthSpec, Hash256, MainnetEthSpec, Slot, SyncContributionData, SyncSelectionProof, + SyncSubnetId, }; pub type E = MainnetEthSpec; diff --git a/beacon_node/beacon_chain/tests/tests.rs b/beacon_node/beacon_chain/tests/tests.rs index ec0e607d00a..17d9c5f697f 100644 --- a/beacon_node/beacon_chain/tests/tests.rs +++ b/beacon_node/beacon_chain/tests/tests.rs @@ -8,13 +8,14 @@ use beacon_chain::{ OP_POOL_DB_KEY, }, }; +use bls::Keypair; use operation_pool::PersistedOperationPool; use state_processing::EpochProcessingError; use state_processing::{per_slot_processing, per_slot_processing::Error as SlotProcessingError}; use std::sync::LazyLock; use types::{ - BeaconState, BeaconStateError, BlockImportSource, Checkpoint, EthSpec, Hash256, Keypair, - MinimalEthSpec, RelativeEpoch, Slot, + BeaconState, BeaconStateError, BlockImportSource, Checkpoint, EthSpec, Hash256, MinimalEthSpec, + RelativeEpoch, Slot, }; type E = MinimalEthSpec; diff --git a/beacon_node/beacon_chain/tests/validator_monitor.rs b/beacon_node/beacon_chain/tests/validator_monitor.rs index 95732abeb5d..521fc4ac975 100644 --- a/beacon_node/beacon_chain/tests/validator_monitor.rs +++ b/beacon_node/beacon_chain/tests/validator_monitor.rs @@ -2,8 +2,9 @@ use beacon_chain::test_utils::{ AttestationStrategy, BeaconChainHarness, BlockStrategy, EphemeralHarnessType, }; use beacon_chain::validator_monitor::{MISSED_BLOCK_LAG_SLOTS, ValidatorMonitorConfig}; +use bls::{Keypair, PublicKeyBytes}; use std::sync::LazyLock; -use types::{Epoch, EthSpec, Hash256, Keypair, MainnetEthSpec, PublicKeyBytes, Slot}; +use types::{Epoch, EthSpec, Hash256, MainnetEthSpec, Slot}; // Should ideally be divisible by 3. pub const VALIDATOR_COUNT: usize = 48; diff --git a/beacon_node/builder_client/Cargo.toml b/beacon_node/builder_client/Cargo.toml index 9b1f86360df..09bf3f48b4e 100644 --- a/beacon_node/builder_client/Cargo.toml +++ b/beacon_node/builder_client/Cargo.toml @@ -5,6 +5,8 @@ edition = { workspace = true } authors = ["Sean Anderson "] [dependencies] +bls = { workspace = true } +context_deserialize = { workspace = true } eth2 = { workspace = true } ethereum_ssz = { workspace = true } lighthouse_version = { workspace = true } diff --git a/beacon_node/builder_client/src/lib.rs b/beacon_node/builder_client/src/lib.rs index b486e77083a..4fc6b3a379b 100644 --- a/beacon_node/builder_client/src/lib.rs +++ b/beacon_node/builder_client/src/lib.rs @@ -1,9 +1,11 @@ +use bls::PublicKeyBytes; +use context_deserialize::ContextDeserialize; pub use eth2::Error; use eth2::types::beacon_response::EmptyMetadata; use eth2::types::builder_bid::SignedBuilderBid; use eth2::types::{ - ContentType, ContextDeserialize, EthSpec, ExecutionBlockHash, ForkName, ForkVersionDecode, - ForkVersionedResponse, PublicKeyBytes, SignedValidatorRegistrationData, Slot, + ContentType, EthSpec, ExecutionBlockHash, ForkName, ForkVersionDecode, ForkVersionedResponse, + SignedValidatorRegistrationData, Slot, }; use eth2::types::{FullPayloadContents, SignedBlindedBeaconBlock}; use eth2::{ @@ -538,9 +540,10 @@ impl BuilderHttpClient { #[cfg(test)] mod tests { use super::*; + use bls::Signature; + use eth2::types::MainnetEthSpec; use eth2::types::builder_bid::{BuilderBid, BuilderBidFulu}; use eth2::types::test_utils::{SeedableRng, TestRandom, XorShiftRng}; - use eth2::types::{MainnetEthSpec, Signature}; use mockito::{Matcher, Server, ServerGuard}; type E = MainnetEthSpec; diff --git a/beacon_node/execution_layer/Cargo.toml b/beacon_node/execution_layer/Cargo.toml index 540f9dc0a09..c443e945743 100644 --- a/beacon_node/execution_layer/Cargo.toml +++ b/beacon_node/execution_layer/Cargo.toml @@ -10,6 +10,7 @@ alloy-primitives = { workspace = true } alloy-rlp = { workspace = true } alloy-rpc-types-eth = { workspace = true } arc-swap = "1.6.0" +bls = { workspace = true } builder_client = { path = "../builder_client" } bytes = { workspace = true } eth2 = { workspace = true, features = ["events", "lighthouse"] } @@ -48,6 +49,7 @@ tracing = { workspace = true } tree_hash = { workspace = true } tree_hash_derive = { workspace = true } triehash = "0.8.4" +typenum = { workspace = true } types = { workspace = true } warp = { workspace = true } zeroize = { workspace = true } diff --git a/beacon_node/execution_layer/src/engine_api.rs b/beacon_node/execution_layer/src/engine_api.rs index 98da7dbf2c7..f285640b21c 100644 --- a/beacon_node/execution_layer/src/engine_api.rs +++ b/beacon_node/execution_layer/src/engine_api.rs @@ -20,8 +20,8 @@ use strum::IntoStaticStr; use superstruct::superstruct; pub use types::{ Address, BeaconBlockRef, ConsolidationRequest, EthSpec, ExecutionBlockHash, ExecutionPayload, - ExecutionPayloadHeader, ExecutionPayloadRef, FixedVector, ForkName, Hash256, Transactions, - Uint256, VariableList, Withdrawal, Withdrawals, + ExecutionPayloadHeader, ExecutionPayloadRef, ForkName, Hash256, Transactions, Uint256, + Withdrawal, Withdrawals, }; use types::{ ExecutionPayloadBellatrix, ExecutionPayloadCapella, ExecutionPayloadDeneb, diff --git a/beacon_node/execution_layer/src/engine_api/http.rs b/beacon_node/execution_layer/src/engine_api/http.rs index 8f7564ace6b..c421491f808 100644 --- a/beacon_node/execution_layer/src/engine_api/http.rs +++ b/beacon_node/execution_layer/src/engine_api/http.rs @@ -103,9 +103,10 @@ pub static LIGHTHOUSE_JSON_CLIENT_VERSION: LazyLock = /// Contains methods to convert arbitrary bytes to an ETH2 deposit contract object. pub mod deposit_log { + use bls::{PublicKeyBytes, SignatureBytes}; use ssz::Decode; use state_processing::per_block_processing::signature_sets::deposit_pubkey_signature_message; - use types::{ChainSpec, DepositData, Hash256, PublicKeyBytes, SignatureBytes}; + use types::{ChainSpec, DepositData, Hash256}; pub use eth2::lighthouse::DepositLog; @@ -1466,10 +1467,13 @@ mod test { use super::auth::JwtKey; use super::*; use crate::test_utils::{DEFAULT_JWT_SECRET, MockServer}; + use fixed_bytes::FixedBytesExtended; + use ssz_types::VariableList; use std::future::Future; use std::str::FromStr; use std::sync::Arc; - use types::{FixedBytesExtended, MainnetEthSpec, Unsigned}; + use typenum::Unsigned; + use types::MainnetEthSpec; struct Tester { server: MockServer, diff --git a/beacon_node/execution_layer/src/engine_api/json_structures.rs b/beacon_node/execution_layer/src/engine_api/json_structures.rs index cc46070325d..fc8eae015b9 100644 --- a/beacon_node/execution_layer/src/engine_api/json_structures.rs +++ b/beacon_node/execution_layer/src/engine_api/json_structures.rs @@ -1101,10 +1101,10 @@ impl TryFrom for ClientVersionV1 { #[cfg(test)] mod tests { + use bls::{PublicKeyBytes, SignatureBytes}; use ssz::Encode; use types::{ - ConsolidationRequest, DepositRequest, MainnetEthSpec, PublicKeyBytes, RequestType, - SignatureBytes, WithdrawalRequest, + ConsolidationRequest, DepositRequest, MainnetEthSpec, RequestType, WithdrawalRequest, }; use super::*; diff --git a/beacon_node/execution_layer/src/lib.rs b/beacon_node/execution_layer/src/lib.rs index c2a31c2699b..34b1832894e 100644 --- a/beacon_node/execution_layer/src/lib.rs +++ b/beacon_node/execution_layer/src/lib.rs @@ -9,6 +9,7 @@ use crate::payload_cache::PayloadCache; use arc_swap::ArcSwapOption; use auth::{Auth, JwtKey, strip_prefix}; pub use block_hash::calculate_execution_block_hash; +use bls::{PublicKeyBytes, Signature}; use builder_client::BuilderHttpClient; pub use engine_api::EngineCapabilities; use engine_api::Error as ApiError; @@ -55,7 +56,7 @@ use types::{ use types::{ BeaconStateError, BlindedPayload, ChainSpec, Epoch, ExecPayload, ExecutionPayloadBellatrix, ExecutionPayloadCapella, ExecutionPayloadElectra, ExecutionPayloadFulu, ExecutionPayloadGloas, - FullPayload, ProposerPreparationData, PublicKeyBytes, Signature, Slot, + FullPayload, ProposerPreparationData, Slot, }; mod block_hash; diff --git a/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs b/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs index 7e0033d732c..89d2994ce28 100644 --- a/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs +++ b/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs @@ -8,6 +8,7 @@ use crate::engines::ForkchoiceState; use alloy_consensus::TxEnvelope; use alloy_rpc_types_eth::Transaction as AlloyTransaction; use eth2::types::BlobsBundle; +use fixed_bytes::FixedBytesExtended; use kzg::{Kzg, KzgCommitment, KzgProof}; use parking_lot::Mutex; use rand::{Rng, SeedableRng, rngs::StdRng}; @@ -22,8 +23,8 @@ use tree_hash_derive::TreeHash; use types::{ Blob, ChainSpec, EthSpec, ExecutionBlockHash, ExecutionPayload, ExecutionPayloadBellatrix, ExecutionPayloadCapella, ExecutionPayloadDeneb, ExecutionPayloadElectra, ExecutionPayloadFulu, - ExecutionPayloadGloas, ExecutionPayloadHeader, FixedBytesExtended, ForkName, Hash256, - KzgProofs, Transaction, Transactions, Uint256, + ExecutionPayloadGloas, ExecutionPayloadHeader, ForkName, Hash256, KzgProofs, Transaction, + Transactions, Uint256, }; use super::DEFAULT_TERMINAL_BLOCK; @@ -41,7 +42,7 @@ pub enum Block { PoS(ExecutionPayload), } -pub fn mock_el_extra_data() -> types::VariableList { +pub fn mock_el_extra_data() -> VariableList { "block gen was here".as_bytes().to_vec().try_into().unwrap() } diff --git a/beacon_node/execution_layer/src/test_utils/mock_builder.rs b/beacon_node/execution_layer/src/test_utils/mock_builder.rs index 589b29193c1..1d4f36b62c5 100644 --- a/beacon_node/execution_layer/src/test_utils/mock_builder.rs +++ b/beacon_node/execution_layer/src/test_utils/mock_builder.rs @@ -1,5 +1,6 @@ use crate::test_utils::{DEFAULT_BUILDER_PAYLOAD_VALUE_WEI, DEFAULT_JWT_SECRET}; use crate::{Config, ExecutionLayer, PayloadAttributes, PayloadParameters}; +use bls::{PublicKeyBytes, SecretKey, Signature}; use bytes::Bytes; use eth2::beacon_response::ForkVersionedResponse; use eth2::types::PublishBlockRequest; @@ -15,6 +16,7 @@ use fork_choice::ForkchoiceUpdateParameters; use parking_lot::RwLock; use sensitive_url::SensitiveUrl; use ssz::Encode; +use ssz_types::VariableList; use std::collections::HashMap; use std::fmt::Debug; use std::future::Future; @@ -26,6 +28,7 @@ use tempfile::NamedTempFile; use tokio_stream::StreamExt; use tracing::{debug, error, info, warn}; use tree_hash::TreeHash; +use types::ExecutionBlockHash; use types::builder_bid::{ BuilderBid, BuilderBidBellatrix, BuilderBidCapella, BuilderBidDeneb, BuilderBidElectra, BuilderBidFulu, BuilderBidGloas, SignedBuilderBid, @@ -33,10 +36,8 @@ use types::builder_bid::{ use types::{ Address, BeaconState, ChainSpec, Epoch, EthSpec, ExecPayload, ExecutionPayload, ExecutionPayloadHeaderRefMut, ExecutionRequests, ForkName, ForkVersionDecode, Hash256, - PublicKeyBytes, Signature, SignedBlindedBeaconBlock, SignedRoot, - SignedValidatorRegistrationData, Slot, Uint256, + SignedBlindedBeaconBlock, SignedRoot, SignedValidatorRegistrationData, Slot, Uint256, }; -use types::{ExecutionBlockHash, SecretKey}; use warp::reply::{self, Reply}; use warp::{Filter, Rejection}; @@ -72,7 +73,7 @@ impl Operation { } } -pub fn mock_builder_extra_data() -> types::VariableList { +pub fn mock_builder_extra_data() -> VariableList { "mock_builder".as_bytes().to_vec().try_into().unwrap() } diff --git a/beacon_node/execution_layer/src/test_utils/mock_execution_layer.rs b/beacon_node/execution_layer/src/test_utils/mock_execution_layer.rs index 73c998956ca..c69edb8f397 100644 --- a/beacon_node/execution_layer/src/test_utils/mock_execution_layer.rs +++ b/beacon_node/execution_layer/src/test_utils/mock_execution_layer.rs @@ -5,9 +5,10 @@ use crate::{ *, }; use alloy_primitives::B256 as H256; +use fixed_bytes::FixedBytesExtended; use kzg::Kzg; use tempfile::NamedTempFile; -use types::{FixedBytesExtended, MainnetEthSpec}; +use types::MainnetEthSpec; pub struct MockExecutionLayer { pub server: MockServer, diff --git a/beacon_node/execution_layer/src/versioned_hashes.rs b/beacon_node/execution_layer/src/versioned_hashes.rs index 97c3100de99..21cfd5a3223 100644 --- a/beacon_node/execution_layer/src/versioned_hashes.rs +++ b/beacon_node/execution_layer/src/versioned_hashes.rs @@ -1,6 +1,7 @@ use alloy_consensus::TxEnvelope; use alloy_rlp::Decodable; -use types::{EthSpec, ExecutionPayloadRef, Hash256, Unsigned, VersionedHash}; +use typenum::Unsigned; +use types::{EthSpec, ExecutionPayloadRef, Hash256, VersionedHash}; #[derive(Debug)] pub enum Error { diff --git a/beacon_node/genesis/Cargo.toml b/beacon_node/genesis/Cargo.toml index 8f6f3516fc5..124231a57e5 100644 --- a/beacon_node/genesis/Cargo.toml +++ b/beacon_node/genesis/Cargo.toml @@ -5,6 +5,7 @@ authors = ["Paul Hauner "] edition = { workspace = true } [dependencies] +bls = { workspace = true } ethereum_hashing = { workspace = true } ethereum_ssz = { workspace = true } int_to_bytes = { workspace = true } diff --git a/beacon_node/genesis/src/interop.rs b/beacon_node/genesis/src/interop.rs index dfa4daab9ae..349b8f19c8b 100644 --- a/beacon_node/genesis/src/interop.rs +++ b/beacon_node/genesis/src/interop.rs @@ -1,12 +1,10 @@ use crate::common::genesis_deposits; +use bls::{Keypair, PublicKey, Signature}; use ethereum_hashing::hash; use rayon::prelude::*; use ssz::Encode; use state_processing::initialize_beacon_state_from_eth1; -use types::{ - BeaconState, ChainSpec, DepositData, EthSpec, ExecutionPayloadHeader, Hash256, Keypair, - PublicKey, Signature, -}; +use types::{BeaconState, ChainSpec, DepositData, EthSpec, ExecutionPayloadHeader, Hash256}; pub const DEFAULT_ETH1_BLOCK_HASH: &[u8] = &[0x42; 32]; diff --git a/beacon_node/http_api/Cargo.toml b/beacon_node/http_api/Cargo.toml index 3aa9c8351cb..571dab10273 100644 --- a/beacon_node/http_api/Cargo.toml +++ b/beacon_node/http_api/Cargo.toml @@ -8,14 +8,17 @@ autotests = false # using a single test binary com [dependencies] beacon_chain = { workspace = true } beacon_processor = { workspace = true } +bls = { workspace = true } bs58 = "0.4.0" bytes = { workspace = true } +context_deserialize = { workspace = true } directory = { workspace = true } either = { workspace = true } eth2 = { workspace = true, features = ["lighthouse"] } ethereum_serde_utils = { workspace = true } ethereum_ssz = { workspace = true } execution_layer = { workspace = true } +fixed_bytes = { workspace = true } futures = { workspace = true } health_metrics = { workspace = true } hex = { workspace = true } diff --git a/beacon_node/http_api/src/block_id.rs b/beacon_node/http_api/src/block_id.rs index 64f54515607..ea8b47f91ef 100644 --- a/beacon_node/http_api/src/block_id.rs +++ b/beacon_node/http_api/src/block_id.rs @@ -6,12 +6,13 @@ use eth2::beacon_response::{ExecutionOptimisticFinalizedMetadata, UnversionedRes use eth2::types::BlockId as CoreBlockId; use eth2::types::DataColumnIndicesQuery; use eth2::types::{BlobIndicesQuery, BlobWrapper, BlobsVersionedHashesQuery}; +use fixed_bytes::FixedBytesExtended; use std::fmt; use std::str::FromStr; use std::sync::Arc; use types::{ - BlobSidecarList, DataColumnSidecarList, EthSpec, FixedBytesExtended, ForkName, Hash256, - SignedBeaconBlock, SignedBlindedBeaconBlock, Slot, + BlobSidecarList, DataColumnSidecarList, EthSpec, ForkName, Hash256, SignedBeaconBlock, + SignedBlindedBeaconBlock, Slot, }; use warp::Rejection; diff --git a/beacon_node/http_api/src/lib.rs b/beacon_node/http_api/src/lib.rs index 4ed02f3cbf7..58cd2a3bdbc 100644 --- a/beacon_node/http_api/src/lib.rs +++ b/beacon_node/http_api/src/lib.rs @@ -48,12 +48,13 @@ use beacon_processor::BeaconProcessorSend; pub use block_id::BlockId; use builder_states::get_next_withdrawals; use bytes::Bytes; +use context_deserialize::ContextDeserialize; use directory::DEFAULT_ROOT_DIR; use eth2::StatusCode; use eth2::lighthouse::sync_state::SyncState; use eth2::types::{ - self as api_types, BroadcastValidation, ContextDeserialize, EndpointVersion, ForkChoice, - ForkChoiceExtraData, ForkChoiceNode, LightClientUpdatesQuery, PublishBlockRequest, ValidatorId, + self as api_types, BroadcastValidation, EndpointVersion, ForkChoice, ForkChoiceExtraData, + ForkChoiceNode, LightClientUpdatesQuery, PublishBlockRequest, ValidatorId, }; use eth2::{CONSENSUS_VERSION_HEADER, CONTENT_TYPE_HEADER, SSZ_CONTENT_TYPE_HEADER}; use health_metrics::observe::Observe; diff --git a/beacon_node/http_api/src/validator/mod.rs b/beacon_node/http_api/src/validator/mod.rs index 9cf1f1a33d0..8baf7c52458 100644 --- a/beacon_node/http_api/src/validator/mod.rs +++ b/beacon_node/http_api/src/validator/mod.rs @@ -9,6 +9,7 @@ use crate::{StateId, attester_duties, proposer_duties, sync_committees}; use beacon_chain::attestation_verification::VerifiedAttestation; use beacon_chain::validator_monitor::timestamp_now; use beacon_chain::{AttestationError, BeaconChain, BeaconChainError, BeaconChainTypes}; +use bls::PublicKeyBytes; use eth2::StatusCode; use eth2::types::{ Accept, BeaconCommitteeSubscription, EndpointVersion, Failure, GenericResponse, @@ -23,7 +24,7 @@ use tokio::sync::mpsc::{Sender, UnboundedSender}; use tokio::sync::oneshot; use tracing::{debug, error, info, warn}; use types::{ - BeaconState, Epoch, EthSpec, ProposerPreparationData, PublicKeyBytes, SignedAggregateAndProof, + BeaconState, Epoch, EthSpec, ProposerPreparationData, SignedAggregateAndProof, SignedContributionAndProof, SignedValidatorRegistrationData, Slot, SyncContributionData, ValidatorSubscription, }; diff --git a/beacon_node/http_api/tests/broadcast_validation_tests.rs b/beacon_node/http_api/tests/broadcast_validation_tests.rs index 33f462fa5e2..357b78cf41c 100644 --- a/beacon_node/http_api/tests/broadcast_validation_tests.rs +++ b/beacon_node/http_api/tests/broadcast_validation_tests.rs @@ -6,13 +6,12 @@ use beacon_chain::{ }; use eth2::reqwest::{Response, StatusCode}; use eth2::types::{BroadcastValidation, PublishBlockRequest}; +use fixed_bytes::FixedBytesExtended; use http_api::test_utils::InteractiveTester; use http_api::{Config, ProvenancedBlock, publish_blinded_block, publish_block, reconstruct_block}; use std::collections::HashSet; use std::sync::Arc; -use types::{ - ColumnIndex, Epoch, EthSpec, FixedBytesExtended, ForkName, Hash256, MainnetEthSpec, Slot, -}; +use types::{ColumnIndex, Epoch, EthSpec, ForkName, Hash256, MainnetEthSpec, Slot}; use warp::Rejection; use warp_utils::reject::CustomBadRequest; diff --git a/beacon_node/http_api/tests/fork_tests.rs b/beacon_node/http_api/tests/fork_tests.rs index 50cf866b6a8..b96c8bd1122 100644 --- a/beacon_node/http_api/tests/fork_tests.rs +++ b/beacon_node/http_api/tests/fork_tests.rs @@ -4,13 +4,15 @@ use beacon_chain::{ StateSkipConfig, test_utils::{DEFAULT_ETH1_BLOCK_HASH, HARNESS_GENESIS_TIME, RelativeSyncCommittee}, }; +use bls::PublicKey; use eth2::types::{IndexedErrorMessage, StateId, SyncSubcommittee}; use execution_layer::test_utils::generate_genesis_header; +use fixed_bytes::FixedBytesExtended; use genesis::{InteropGenesisBuilder, bls_withdrawal_credentials}; use http_api::test_utils::*; use std::collections::HashSet; use types::{ - Address, ChainSpec, Epoch, EthSpec, FixedBytesExtended, Hash256, MinimalEthSpec, Slot, + Address, ChainSpec, Epoch, EthSpec, Hash256, MinimalEthSpec, Slot, test_utils::{generate_deterministic_keypair, generate_deterministic_keypairs}, }; @@ -392,7 +394,7 @@ async fn bls_to_execution_changes_update_all_around_capella_fork() { fn withdrawal_credentials_fn<'a>( index: usize, - _: &'a types::PublicKey, + _: &'a PublicKey, spec: &'a ChainSpec, ) -> Hash256 { // It is a bit inefficient to regenerate the whole keypair here, but this is a workaround. diff --git a/beacon_node/http_api/tests/interactive_tests.rs b/beacon_node/http_api/tests/interactive_tests.rs index 83cb70a7a3a..0119a7645c2 100644 --- a/beacon_node/http_api/tests/interactive_tests.rs +++ b/beacon_node/http_api/tests/interactive_tests.rs @@ -11,6 +11,7 @@ use beacon_processor::{Work, WorkEvent, work_reprocessing_queue::ReprocessQueueM use eth2::types::ProduceBlockV3Response; use eth2::types::{DepositContractData, StateId}; use execution_layer::{ForkchoiceState, PayloadAttributes}; +use fixed_bytes::FixedBytesExtended; use http_api::test_utils::InteractiveTester; use parking_lot::Mutex; use slot_clock::SlotClock; @@ -21,8 +22,8 @@ use std::collections::HashMap; use std::sync::Arc; use std::time::Duration; use types::{ - Address, Epoch, EthSpec, ExecPayload, ExecutionBlockHash, FixedBytesExtended, ForkName, - Hash256, MainnetEthSpec, MinimalEthSpec, ProposerPreparationData, Slot, Uint256, + Address, Epoch, EthSpec, ExecPayload, ExecutionBlockHash, ForkName, Hash256, MainnetEthSpec, + MinimalEthSpec, ProposerPreparationData, Slot, Uint256, }; type E = MainnetEthSpec; diff --git a/beacon_node/http_api/tests/tests.rs b/beacon_node/http_api/tests/tests.rs index a86cc4f4eff..f8eba0ee2b7 100644 --- a/beacon_node/http_api/tests/tests.rs +++ b/beacon_node/http_api/tests/tests.rs @@ -6,6 +6,7 @@ use beacon_chain::{ AttestationStrategy, BeaconChainHarness, BlockStrategy, EphemeralHarnessType, test_spec, }, }; +use bls::{AggregateSignature, Keypair, PublicKeyBytes, Signature, SignatureBytes}; use eth2::{ BeaconNodeHttpClient, Error, Error::ServerMessage, @@ -21,6 +22,7 @@ use execution_layer::test_utils::{ DEFAULT_BUILDER_PAYLOAD_VALUE_WEI, DEFAULT_GAS_LIMIT, DEFAULT_MOCK_EL_PAYLOAD_VALUE_WEI, MockBuilder, Operation, mock_builder_extra_data, mock_el_extra_data, }; +use fixed_bytes::FixedBytesExtended; use futures::FutureExt; use futures::stream::{Stream, StreamExt}; use http_api::{ @@ -34,6 +36,7 @@ use operation_pool::attestation_storage::CheckpointKey; use proto_array::ExecutionStatus; use sensitive_url::SensitiveUrl; use slot_clock::SlotClock; +use ssz::BitList; use state_processing::per_block_processing::get_expected_withdrawals; use state_processing::per_slot_processing; use state_processing::state_advance::partial_state_advance; @@ -43,9 +46,8 @@ use tokio::time::Duration; use tree_hash::TreeHash; use types::application_domain::ApplicationDomain; use types::{ - AggregateSignature, BitList, Domain, EthSpec, ExecutionBlockHash, Hash256, Keypair, - MainnetEthSpec, RelativeEpoch, SelectionProof, SignedRoot, SingleAttestation, Slot, - attestation::AttestationBase, + Domain, EthSpec, ExecutionBlockHash, Hash256, MainnetEthSpec, RelativeEpoch, SelectionProof, + SignedRoot, SingleAttestation, Slot, attestation::AttestationBase, }; type E = MainnetEthSpec; diff --git a/beacon_node/lighthouse_network/Cargo.toml b/beacon_node/lighthouse_network/Cargo.toml index d2431cca045..efb6f27dc52 100644 --- a/beacon_node/lighthouse_network/Cargo.toml +++ b/beacon_node/lighthouse_network/Cargo.toml @@ -11,6 +11,7 @@ libp2p-websocket = [] [dependencies] alloy-primitives = { workspace = true } alloy-rlp = { workspace = true } +bls = { workspace = true } bytes = { workspace = true } delay_map = { workspace = true } directory = { workspace = true } @@ -20,6 +21,7 @@ either = { workspace = true } eth2 = { workspace = true, features = ["lighthouse"] } ethereum_ssz = { workspace = true } ethereum_ssz_derive = { workspace = true } +fixed_bytes = { workspace = true } fnv = { workspace = true } futures = { workspace = true } gossipsub = { workspace = true } @@ -49,6 +51,7 @@ tokio = { workspace = true } tokio-util = { workspace = true } tracing = { workspace = true } tracing-subscriber = { workspace = true } +typenum = { workspace = true } types = { workspace = true } unsigned-varint = { version = "0.8", features = ["codec"] } diff --git a/beacon_node/lighthouse_network/src/discovery/mod.rs b/beacon_node/lighthouse_network/src/discovery/mod.rs index 3589882ae9b..a8c87523a54 100644 --- a/beacon_node/lighthouse_network/src/discovery/mod.rs +++ b/beacon_node/lighthouse_network/src/discovery/mod.rs @@ -1231,7 +1231,8 @@ mod tests { use super::*; use crate::rpc::methods::{MetaData, MetaDataV3}; use libp2p::identity::secp256k1; - use types::{BitVector, MinimalEthSpec, SubnetId}; + use ssz_types::BitVector; + use types::{MinimalEthSpec, SubnetId}; type E = MinimalEthSpec; diff --git a/beacon_node/lighthouse_network/src/peer_manager/mod.rs b/beacon_node/lighthouse_network/src/peer_manager/mod.rs index dfa8b374e9c..3cfe2b3c3b7 100644 --- a/beacon_node/lighthouse_network/src/peer_manager/mod.rs +++ b/beacon_node/lighthouse_network/src/peer_manager/mod.rs @@ -2978,7 +2978,8 @@ mod tests { use proptest::prelude::*; use std::collections::HashSet; use tokio::runtime::Runtime; - use types::{DataColumnSubnetId, Unsigned}; + use typenum::Unsigned; + use types::DataColumnSubnetId; use types::{EthSpec, MainnetEthSpec as E}; #[derive(Clone, Debug)] diff --git a/beacon_node/lighthouse_network/src/rpc/codec.rs b/beacon_node/lighthouse_network/src/rpc/codec.rs index 5b3574d48ac..48a29699c8f 100644 --- a/beacon_node/lighthouse_network/src/rpc/codec.rs +++ b/beacon_node/lighthouse_network/src/rpc/codec.rs @@ -908,11 +908,12 @@ mod tests { use super::*; use crate::rpc::protocol::*; use crate::types::{EnrAttestationBitfield, EnrSyncCommitteeBitfield}; + use bls::Signature; + use fixed_bytes::FixedBytesExtended; use types::{ BeaconBlock, BeaconBlockAltair, BeaconBlockBase, BeaconBlockBellatrix, BeaconBlockHeader, - DataColumnsByRootIdentifier, EmptyBlock, Epoch, FixedBytesExtended, FullPayload, - KzgCommitment, KzgProof, Signature, SignedBeaconBlockHeader, Slot, - blob_sidecar::BlobIdentifier, data_column_sidecar::Cell, + DataColumnsByRootIdentifier, EmptyBlock, Epoch, FullPayload, KzgCommitment, KzgProof, + SignedBeaconBlockHeader, Slot, blob_sidecar::BlobIdentifier, data_column_sidecar::Cell, }; type Spec = types::MainnetEthSpec; diff --git a/beacon_node/lighthouse_network/src/rpc/protocol.rs b/beacon_node/lighthouse_network/src/rpc/protocol.rs index 08085f3c271..366515d42f6 100644 --- a/beacon_node/lighthouse_network/src/rpc/protocol.rs +++ b/beacon_node/lighthouse_network/src/rpc/protocol.rs @@ -1,5 +1,6 @@ use super::methods::*; use crate::rpc::codec::SSZSnappyInboundCodec; +use bls::Signature; use futures::future::BoxFuture; use futures::prelude::{AsyncRead, AsyncWrite}; use futures::{FutureExt, StreamExt}; @@ -20,7 +21,7 @@ use types::{ EmptyBlock, Epoch, EthSpec, EthSpecId, ForkContext, ForkName, LightClientBootstrap, LightClientBootstrapAltair, LightClientFinalityUpdate, LightClientFinalityUpdateAltair, LightClientOptimisticUpdate, LightClientOptimisticUpdateAltair, LightClientUpdate, - MainnetEthSpec, MinimalEthSpec, Signature, SignedBeaconBlock, + MainnetEthSpec, MinimalEthSpec, SignedBeaconBlock, }; // Note: Hardcoding the `EthSpec` type for `SignedBeaconBlock` as min/max values is diff --git a/beacon_node/lighthouse_network/src/types/mod.rs b/beacon_node/lighthouse_network/src/types/mod.rs index 3f57406fc78..eea8782b2d5 100644 --- a/beacon_node/lighthouse_network/src/types/mod.rs +++ b/beacon_node/lighthouse_network/src/types/mod.rs @@ -3,7 +3,8 @@ mod pubsub; mod subnet; mod topics; -use types::{BitVector, EthSpec}; +use ssz_types::BitVector; +use types::EthSpec; pub type EnrAttestationBitfield = BitVector<::SubnetBitfieldLength>; pub type EnrSyncCommitteeBitfield = BitVector<::SyncCommitteeSubnetCount>; diff --git a/beacon_node/lighthouse_network/src/types/topics.rs b/beacon_node/lighthouse_network/src/types/topics.rs index cfdee907b9a..0c988f35c39 100644 --- a/beacon_node/lighthouse_network/src/types/topics.rs +++ b/beacon_node/lighthouse_network/src/types/topics.rs @@ -2,7 +2,8 @@ use gossipsub::{IdentTopic as Topic, TopicHash}; use serde::{Deserialize, Serialize}; use std::collections::HashSet; use strum::AsRefStr; -use types::{ChainSpec, DataColumnSubnetId, EthSpec, ForkName, SubnetId, SyncSubnetId, Unsigned}; +use typenum::Unsigned; +use types::{ChainSpec, DataColumnSubnetId, EthSpec, ForkName, SubnetId, SyncSubnetId}; use crate::Subnet; diff --git a/beacon_node/lighthouse_network/tests/common.rs b/beacon_node/lighthouse_network/tests/common.rs index d04f1211cf9..412ee5aca5a 100644 --- a/beacon_node/lighthouse_network/tests/common.rs +++ b/beacon_node/lighthouse_network/tests/common.rs @@ -1,4 +1,5 @@ #![cfg(test)] +use fixed_bytes::FixedBytesExtended; use lighthouse_network::Enr; use lighthouse_network::Multiaddr; use lighthouse_network::service::Network as LibP2PService; @@ -9,10 +10,7 @@ use std::sync::Weak; use tokio::runtime::Runtime; use tracing::{Instrument, debug, error, info_span}; use tracing_subscriber::EnvFilter; -use types::{ - ChainSpec, EnrForkId, Epoch, EthSpec, FixedBytesExtended, ForkContext, ForkName, Hash256, - MinimalEthSpec, -}; +use types::{ChainSpec, EnrForkId, Epoch, EthSpec, ForkContext, ForkName, Hash256, MinimalEthSpec}; type E = MinimalEthSpec; diff --git a/beacon_node/lighthouse_network/tests/rpc_tests.rs b/beacon_node/lighthouse_network/tests/rpc_tests.rs index 8613edf5f5e..599fcd242bf 100644 --- a/beacon_node/lighthouse_network/tests/rpc_tests.rs +++ b/beacon_node/lighthouse_network/tests/rpc_tests.rs @@ -3,6 +3,8 @@ use crate::common; use crate::common::spec_with_all_forks_enabled; use crate::common::{Protocol, build_tracing_subscriber}; +use bls::Signature; +use fixed_bytes::FixedBytesExtended; use lighthouse_network::rpc::{RequestType, methods::*}; use lighthouse_network::service::api_types::AppRequestId; use lighthouse_network::{NetworkEvent, ReportSource, Response}; @@ -16,8 +18,8 @@ use tracing::{Instrument, debug, error, info_span, warn}; use types::{ BeaconBlock, BeaconBlockAltair, BeaconBlockBase, BeaconBlockBellatrix, BeaconBlockHeader, BlobSidecar, ChainSpec, DataColumnSidecar, DataColumnsByRootIdentifier, EmptyBlock, Epoch, - EthSpec, FixedBytesExtended, ForkName, Hash256, KzgCommitment, KzgProof, MinimalEthSpec, - Signature, SignedBeaconBlock, SignedBeaconBlockHeader, Slot, + EthSpec, ForkName, Hash256, KzgCommitment, KzgProof, MinimalEthSpec, SignedBeaconBlock, + SignedBeaconBlockHeader, Slot, }; type E = MinimalEthSpec; diff --git a/beacon_node/network/Cargo.toml b/beacon_node/network/Cargo.toml index b60c5e6dbff..bf261965760 100644 --- a/beacon_node/network/Cargo.toml +++ b/beacon_node/network/Cargo.toml @@ -22,6 +22,7 @@ delay_map = { workspace = true } educe = { workspace = true } ethereum_ssz = { workspace = true } execution_layer = { workspace = true } +fixed_bytes = { workspace = true } fnv = { workspace = true } futures = { workspace = true } hex = { workspace = true } @@ -45,6 +46,7 @@ tokio = { workspace = true } tokio-stream = { workspace = true } tracing = { workspace = true } tracing-subscriber = { workspace = true } +typenum = { workspace = true } types = { workspace = true } [dev-dependencies] diff --git a/beacon_node/network/src/service.rs b/beacon_node/network/src/service.rs index a416f5cb123..0869b442aec 100644 --- a/beacon_node/network/src/service.rs +++ b/beacon_node/network/src/service.rs @@ -37,8 +37,9 @@ use task_executor::ShutdownReason; use tokio::sync::mpsc; use tokio::time::Sleep; use tracing::{debug, error, info, trace, warn}; +use typenum::Unsigned; use types::{ - EthSpec, ForkContext, Slot, SubnetId, SyncCommitteeSubscription, SyncSubnetId, Unsigned, + EthSpec, ForkContext, Slot, SubnetId, SyncCommitteeSubscription, SyncSubnetId, ValidatorSubscription, }; diff --git a/beacon_node/network/src/status.rs b/beacon_node/network/src/status.rs index ebf5c1829e5..c571a40485c 100644 --- a/beacon_node/network/src/status.rs +++ b/beacon_node/network/src/status.rs @@ -1,5 +1,6 @@ use beacon_chain::{BeaconChain, BeaconChainTypes}; -use types::{EthSpec, FixedBytesExtended, Hash256}; +use fixed_bytes::FixedBytesExtended; +use types::{EthSpec, Hash256}; use lighthouse_network::rpc::{StatusMessage, methods::StatusMessageV2}; /// Trait to produce a `StatusMessage` representing the state of the given `beacon_chain`. diff --git a/beacon_node/network/src/sync/block_lookups/parent_chain.rs b/beacon_node/network/src/sync/block_lookups/parent_chain.rs index 551a0261f2c..5deea1dd94e 100644 --- a/beacon_node/network/src/sync/block_lookups/parent_chain.rs +++ b/beacon_node/network/src/sync/block_lookups/parent_chain.rs @@ -118,7 +118,8 @@ pub(crate) fn find_oldest_fork_ancestor( #[cfg(test)] mod tests { use super::{Node, compute_parent_chains, find_oldest_fork_ancestor}; - use types::{FixedBytesExtended, Hash256}; + use fixed_bytes::FixedBytesExtended; + use types::Hash256; fn h(n: u64) -> Hash256 { Hash256::from_low_u64_be(n) diff --git a/beacon_node/operation_pool/Cargo.toml b/beacon_node/operation_pool/Cargo.toml index eeddb53c23e..6fab7a752a4 100644 --- a/beacon_node/operation_pool/Cargo.toml +++ b/beacon_node/operation_pool/Cargo.toml @@ -9,9 +9,11 @@ portable = ["beacon_chain/portable"] [dependencies] bitvec = { workspace = true } +bls = { workspace = true } educe = { workspace = true } ethereum_ssz = { workspace = true } ethereum_ssz_derive = { workspace = true } +fixed_bytes = { workspace = true } itertools = { workspace = true } metrics = { workspace = true } parking_lot = { workspace = true } @@ -20,6 +22,8 @@ rayon = { workspace = true } serde = { workspace = true } state_processing = { workspace = true } store = { workspace = true } +superstruct = { workspace = true } +typenum = { workspace = true } types = { workspace = true } [dev-dependencies] diff --git a/beacon_node/operation_pool/src/attestation.rs b/beacon_node/operation_pool/src/attestation.rs index f28d8f278a0..897a7e5eccc 100644 --- a/beacon_node/operation_pool/src/attestation.rs +++ b/beacon_node/operation_pool/src/attestation.rs @@ -1,12 +1,13 @@ use crate::attestation_storage::{CompactAttestationRef, CompactIndexedAttestation}; use crate::max_cover::MaxCover; use crate::reward_cache::RewardCache; +use ssz::BitList; use state_processing::common::{ attesting_indices_base::get_attesting_indices, base, get_attestation_participation_flag_indices, }; use std::collections::HashMap; use types::{ - Attestation, BeaconState, BitList, ChainSpec, EthSpec, + Attestation, BeaconState, ChainSpec, EthSpec, beacon_state::BeaconStateBase, consts::altair::{PARTICIPATION_FLAG_WEIGHTS, PROPOSER_WEIGHT, WEIGHT_DENOMINATOR}, }; diff --git a/beacon_node/operation_pool/src/attestation_storage.rs b/beacon_node/operation_pool/src/attestation_storage.rs index 4f1b8b81fe4..9094c9cd4d4 100644 --- a/beacon_node/operation_pool/src/attestation_storage.rs +++ b/beacon_node/operation_pool/src/attestation_storage.rs @@ -1,11 +1,13 @@ use crate::AttestationStats; +use bls::AggregateSignature; use itertools::Itertools; +use ssz::{BitList, BitVector}; use std::collections::{BTreeMap, HashMap, HashSet}; +use superstruct::superstruct; +use typenum::Unsigned; use types::{ - AggregateSignature, Attestation, AttestationData, BeaconState, BitList, BitVector, Checkpoint, - Epoch, EthSpec, Hash256, Slot, Unsigned, + Attestation, AttestationData, BeaconState, Checkpoint, Epoch, EthSpec, Hash256, Slot, attestation::{AttestationBase, AttestationElectra}, - superstruct, }; #[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)] diff --git a/beacon_node/operation_pool/src/lib.rs b/beacon_node/operation_pool/src/lib.rs index e92d381bacc..00361450a5b 100644 --- a/beacon_node/operation_pool/src/lib.rs +++ b/beacon_node/operation_pool/src/lib.rs @@ -35,12 +35,12 @@ use state_processing::{SigVerifiedOp, VerifyOperation}; use std::collections::{HashMap, HashSet, hash_map::Entry}; use std::marker::PhantomData; use std::ptr; +use typenum::Unsigned; use types::{ AbstractExecPayload, Attestation, AttestationData, AttesterSlashing, BeaconState, BeaconStateError, ChainSpec, Epoch, EthSpec, ProposerSlashing, SignedBeaconBlock, SignedBlsToExecutionChange, SignedVoluntaryExit, Slot, SyncAggregate, SyncCommitteeContribution, Validator, sync_aggregate::Error as SyncAggregateError, - typenum::Unsigned, }; type SyncContributions = RwLock>>>; @@ -793,6 +793,8 @@ mod release_tests { use beacon_chain::test_utils::{ BeaconChainHarness, EphemeralHarnessType, RelativeSyncCommittee, test_spec, }; + use bls::Keypair; + use fixed_bytes::FixedBytesExtended; use maplit::hashset; use state_processing::epoch_cache::initialize_epoch_cache; use state_processing::{VerifyOperation, common::get_attesting_indices_from_state}; diff --git a/beacon_node/operation_pool/src/persistence.rs b/beacon_node/operation_pool/src/persistence.rs index ee45c8dd053..241b5fec53c 100644 --- a/beacon_node/operation_pool/src/persistence.rs +++ b/beacon_node/operation_pool/src/persistence.rs @@ -11,6 +11,7 @@ use state_processing::SigVerifiedOp; use std::collections::HashSet; use std::mem; use store::{DBColumn, Error as StoreError, StoreItem}; +use superstruct::superstruct; use types::attestation::AttestationOnDisk; use types::*; diff --git a/beacon_node/operation_pool/src/reward_cache.rs b/beacon_node/operation_pool/src/reward_cache.rs index adedcb5e39e..1e3fc4cf2dc 100644 --- a/beacon_node/operation_pool/src/reward_cache.rs +++ b/beacon_node/operation_pool/src/reward_cache.rs @@ -1,8 +1,7 @@ use crate::OpPoolError; use bitvec::vec::BitVec; -use types::{ - BeaconState, BeaconStateError, Epoch, EthSpec, FixedBytesExtended, Hash256, ParticipationFlags, -}; +use fixed_bytes::FixedBytesExtended; +use types::{BeaconState, BeaconStateError, Epoch, EthSpec, Hash256, ParticipationFlags}; #[derive(Debug, PartialEq, Eq, Clone)] struct Initialization { diff --git a/beacon_node/src/config.rs b/beacon_node/src/config.rs index 0f169ffaad6..26dd3b6642e 100644 --- a/beacon_node/src/config.rs +++ b/beacon_node/src/config.rs @@ -6,6 +6,7 @@ use beacon_chain::chain_config::{ }; use beacon_chain::custody_context::NodeCustodyType; use beacon_chain::graffiti_calculator::GraffitiOrigin; +use bls::PublicKeyBytes; use clap::{ArgMatches, Id, parser::ValueSource}; use clap_utils::flags::DISABLE_MALLOC_TUNING_FLAG; use clap_utils::{parse_flag, parse_required}; @@ -29,7 +30,7 @@ use std::str::FromStr; use std::time::Duration; use tracing::{error, info, warn}; use types::graffiti::GraffitiString; -use types::{Checkpoint, Epoch, EthSpec, Hash256, PublicKeyBytes}; +use types::{Checkpoint, Epoch, EthSpec, Hash256}; const PURGE_DB_CONFIRMATION: &str = "confirm"; diff --git a/beacon_node/store/Cargo.toml b/beacon_node/store/Cargo.toml index 61a8474a731..50028fe73ff 100644 --- a/beacon_node/store/Cargo.toml +++ b/beacon_node/store/Cargo.toml @@ -15,11 +15,13 @@ db-key = "0.0.5" directory = { workspace = true } ethereum_ssz = { workspace = true } ethereum_ssz_derive = { workspace = true } +fixed_bytes = { workspace = true } itertools = { workspace = true } leveldb = { version = "0.8.6", optional = true, default-features = false } logging = { workspace = true } lru = { workspace = true } metrics = { workspace = true } +milhouse = { workspace = true } parking_lot = { workspace = true } redb = { version = "2.1.3", optional = true } safe_arith = { workspace = true } @@ -31,6 +33,7 @@ strum = { workspace = true } superstruct = { workspace = true } tracing = { workspace = true } tracing-subscriber = { workspace = true } +typenum = { workspace = true } types = { workspace = true } xdelta3 = { workspace = true } zstd = { workspace = true } diff --git a/beacon_node/store/src/chunked_vector.rs b/beacon_node/store/src/chunked_vector.rs index ee043c14f4e..9c8114e0c14 100644 --- a/beacon_node/store/src/chunked_vector.rs +++ b/beacon_node/store/src/chunked_vector.rs @@ -16,7 +16,9 @@ //! of elements. To find the chunk index of a vector index: `cindex = vindex / chunk_size`. use self::UpdatePattern::*; use crate::*; +use milhouse::{List, Vector}; use ssz::{Decode, Encode}; +use typenum::Unsigned; use types::historical_summary::HistoricalSummary; /// Description of how a `BeaconState` field is updated during state processing. @@ -784,6 +786,7 @@ impl From for ChunkError { #[cfg(test)] mod test { use super::*; + use fixed_bytes::FixedBytesExtended; use types::MainnetEthSpec as TestSpec; use types::*; diff --git a/beacon_node/store/src/config.rs b/beacon_node/store/src/config.rs index c0f15f2417b..05aa016ec10 100644 --- a/beacon_node/store/src/config.rs +++ b/beacon_node/store/src/config.rs @@ -1,5 +1,4 @@ use crate::hdiff::HierarchyConfig; -use crate::superstruct; use crate::{DBColumn, Error, StoreItem}; use serde::{Deserialize, Serialize}; use ssz::{Decode, Encode}; @@ -7,6 +6,7 @@ use ssz_derive::{Decode, Encode}; use std::io::{Read, Write}; use std::num::NonZeroUsize; use strum::{Display, EnumString, EnumVariantNames}; +use superstruct::superstruct; use types::EthSpec; use types::non_zero_usize::new_non_zero_usize; use zstd::{Decoder, Encoder}; diff --git a/beacon_node/store/src/database/leveldb_impl.rs b/beacon_node/store/src/database/leveldb_impl.rs index 8fdd5812eab..6b8c6156315 100644 --- a/beacon_node/store/src/database/leveldb_impl.rs +++ b/beacon_node/store/src/database/leveldb_impl.rs @@ -3,6 +3,7 @@ use crate::hot_cold_store::{BytesKey, HotColdDBError}; use crate::{ ColumnIter, ColumnKeyIter, DBColumn, Error, KeyValueStoreOp, get_key_for_col, metrics, }; +use fixed_bytes::FixedBytesExtended; use leveldb::{ compaction::Compaction, database::{ @@ -16,7 +17,7 @@ use leveldb::{ use std::collections::HashSet; use std::marker::PhantomData; use std::path::Path; -use types::{EthSpec, FixedBytesExtended, Hash256}; +use types::{EthSpec, Hash256}; use super::interface::WriteOptions; diff --git a/beacon_node/store/src/errors.rs b/beacon_node/store/src/errors.rs index f62647ae545..6da99b7bd63 100644 --- a/beacon_node/store/src/errors.rs +++ b/beacon_node/store/src/errors.rs @@ -6,7 +6,7 @@ use crate::{DBColumn, hdiff}; use leveldb::error::Error as LevelDBError; use ssz::DecodeError; use state_processing::BlockReplayError; -use types::{BeaconStateError, EpochCacheError, Hash256, InconsistentFork, Slot, milhouse}; +use types::{BeaconStateError, EpochCacheError, Hash256, InconsistentFork, Slot}; pub type Result = std::result::Result; diff --git a/beacon_node/store/src/hdiff.rs b/beacon_node/store/src/hdiff.rs index 3e20aab9bf0..323c87a9142 100644 --- a/beacon_node/store/src/hdiff.rs +++ b/beacon_node/store/src/hdiff.rs @@ -2,6 +2,7 @@ use crate::{DBColumn, StoreConfig, StoreItem, metrics}; use bls::PublicKeyBytes; use itertools::Itertools; +use milhouse::List; use serde::{Deserialize, Serialize}; use ssz::{Decode, Encode}; use ssz_derive::{Decode, Encode}; @@ -11,7 +12,7 @@ use std::str::FromStr; use std::sync::LazyLock; use superstruct::superstruct; use types::historical_summary::HistoricalSummary; -use types::{BeaconState, ChainSpec, Epoch, EthSpec, Hash256, List, Slot, Validator}; +use types::{BeaconState, ChainSpec, Epoch, EthSpec, Hash256, Slot, Validator}; static EMPTY_PUBKEY: LazyLock = LazyLock::new(PublicKeyBytes::empty); diff --git a/beacon_node/store/src/hot_cold_store.rs b/beacon_node/store/src/hot_cold_store.rs index 8f5eead8c20..c4137191744 100644 --- a/beacon_node/store/src/hot_cold_store.rs +++ b/beacon_node/store/src/hot_cold_store.rs @@ -18,6 +18,7 @@ use crate::{ metrics::{self, COLD_METRIC, HOT_METRIC}, parse_data_column_key, }; +use fixed_bytes::FixedBytesExtended; use itertools::{Itertools, process_results}; use lru::LruCache; use parking_lot::{Mutex, RwLock}; @@ -38,6 +39,7 @@ use std::path::Path; use std::sync::Arc; use std::time::Duration; use tracing::{debug, error, info, instrument, warn}; +use typenum::Unsigned; use types::data_column_sidecar::{ColumnIndex, DataColumnSidecar, DataColumnSidecarList}; use types::*; use zstd::{Decoder, Encoder}; diff --git a/beacon_node/store/src/iter.rs b/beacon_node/store/src/iter.rs index 88d509731c8..e2b666e5973 100644 --- a/beacon_node/store/src/iter.rs +++ b/beacon_node/store/src/iter.rs @@ -2,9 +2,9 @@ use crate::errors::HandleUnavailable; use crate::{Error, HotColdDB, ItemStore}; use std::borrow::Cow; use std::marker::PhantomData; +use typenum::Unsigned; use types::{ BeaconState, BeaconStateError, BlindedPayload, EthSpec, Hash256, SignedBeaconBlock, Slot, - typenum::Unsigned, }; /// Implemented for types that have ancestors (e.g., blocks, states) that may be iterated over. @@ -387,8 +387,8 @@ mod test { use crate::{MemoryStore, StoreConfig as Config}; use beacon_chain::test_utils::BeaconChainHarness; use beacon_chain::types::MainnetEthSpec; + use fixed_bytes::FixedBytesExtended; use std::sync::Arc; - use types::FixedBytesExtended; fn get_state() -> BeaconState { let harness = BeaconChainHarness::builder(E::default()) diff --git a/beacon_node/store/src/partial_beacon_state.rs b/beacon_node/store/src/partial_beacon_state.rs index 13b0dfab9f7..8ee37169aca 100644 --- a/beacon_node/store/src/partial_beacon_state.rs +++ b/beacon_node/store/src/partial_beacon_state.rs @@ -3,11 +3,12 @@ use crate::chunked_vector::{ load_variable_list_from_db, load_vector_from_db, }; use crate::{DBColumn, Error, KeyValueStore, KeyValueStoreOp}; -use ssz::{Decode, DecodeError, Encode}; +use milhouse::{List, Vector}; +use ssz::{BitVector, Decode, DecodeError, Encode}; use ssz_derive::{Decode, Encode}; use std::sync::Arc; +use superstruct::superstruct; use types::historical_summary::HistoricalSummary; -use types::superstruct; use types::*; /// DEPRECATED Lightweight variant of the `BeaconState` that is stored in the database. diff --git a/common/account_utils/Cargo.toml b/common/account_utils/Cargo.toml index 00c74a13038..d0a3e487c43 100644 --- a/common/account_utils/Cargo.toml +++ b/common/account_utils/Cargo.toml @@ -6,6 +6,7 @@ edition = { workspace = true } # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] +bls = { workspace = true } eth2_keystore = { workspace = true } eth2_wallet = { workspace = true } filesystem = { workspace = true } diff --git a/common/account_utils/src/validator_definitions.rs b/common/account_utils/src/validator_definitions.rs index 596d50de420..bffdfcc38bd 100644 --- a/common/account_utils/src/validator_definitions.rs +++ b/common/account_utils/src/validator_definitions.rs @@ -4,6 +4,7 @@ //! attempt) to load into the `crate::intialized_validators::InitializedValidators` struct. use crate::{default_keystore_password_path, read_password_string, write_file_via_temporary}; +use bls::PublicKey; use eth2_keystore::Keystore; use regex::Regex; use serde::{Deserialize, Serialize}; @@ -12,7 +13,7 @@ use std::fs::{self, File, create_dir_all}; use std::io; use std::path::{Path, PathBuf}; use tracing::error; -use types::{Address, PublicKey, graffiti::GraffitiString}; +use types::{Address, graffiti::GraffitiString}; use validator_dir::VOTING_KEYSTORE_FILE; use zeroize::Zeroizing; diff --git a/common/deposit_contract/Cargo.toml b/common/deposit_contract/Cargo.toml index 53f1bc3e2b2..76c18ef2429 100644 --- a/common/deposit_contract/Cargo.toml +++ b/common/deposit_contract/Cargo.toml @@ -10,6 +10,7 @@ build = "build.rs" alloy-dyn-abi = { workspace = true } alloy-json-abi = { workspace = true } alloy-primitives = { workspace = true } +bls = { workspace = true } ethereum_ssz = { workspace = true } serde_json = { workspace = true } tree_hash = { workspace = true } diff --git a/common/deposit_contract/src/lib.rs b/common/deposit_contract/src/lib.rs index e5f11bb89c0..6200a4ca158 100644 --- a/common/deposit_contract/src/lib.rs +++ b/common/deposit_contract/src/lib.rs @@ -1,9 +1,10 @@ use alloy_dyn_abi::{DynSolValue, JsonAbiExt}; use alloy_json_abi::JsonAbi; use alloy_primitives::FixedBytes; +use bls::{PublicKeyBytes, SignatureBytes}; use ssz::{Decode, DecodeError as SszDecodeError, Encode}; use tree_hash::TreeHash; -use types::{DepositData, Hash256, PublicKeyBytes, SignatureBytes}; +use types::{DepositData, Hash256}; #[derive(Debug)] pub enum Error { @@ -126,10 +127,8 @@ pub fn decode_eth1_tx_data(bytes: &[u8], amount: u64) -> Result<(DepositData, Ha #[cfg(test)] mod tests { use super::*; - use types::{ - ChainSpec, EthSpec, Keypair, MinimalEthSpec, Signature, - test_utils::generate_deterministic_keypair, - }; + use bls::{Keypair, Signature}; + use types::{ChainSpec, EthSpec, MinimalEthSpec, test_utils::generate_deterministic_keypair}; type E = MinimalEthSpec; diff --git a/common/eip_3076/Cargo.toml b/common/eip_3076/Cargo.toml index 851ef26238a..058e1fd1a0a 100644 --- a/common/eip_3076/Cargo.toml +++ b/common/eip_3076/Cargo.toml @@ -11,7 +11,9 @@ json = ["dep:serde_json"] [dependencies] arbitrary = { workspace = true, features = ["derive"], optional = true } +bls = { workspace = true } ethereum_serde_utils = { workspace = true } +fixed_bytes = { workspace = true } serde = { workspace = true } serde_json = { workspace = true, optional = true } types = { workspace = true } diff --git a/common/eip_3076/src/lib.rs b/common/eip_3076/src/lib.rs index 2d47a77de40..cdd05d7b1ed 100644 --- a/common/eip_3076/src/lib.rs +++ b/common/eip_3076/src/lib.rs @@ -1,9 +1,10 @@ +use bls::PublicKeyBytes; use serde::{Deserialize, Serialize}; use std::cmp::max; use std::collections::{HashMap, HashSet}; #[cfg(feature = "json")] use std::io; -use types::{Epoch, Hash256, PublicKeyBytes, Slot}; +use types::{Epoch, Hash256, Slot}; #[derive(Debug)] pub enum Error { @@ -170,9 +171,9 @@ impl Interchange { #[cfg(test)] mod tests { use super::*; + use fixed_bytes::FixedBytesExtended; use std::fs::File; use tempfile::tempdir; - use types::FixedBytesExtended; fn get_interchange() -> Interchange { Interchange { diff --git a/common/eth2/Cargo.toml b/common/eth2/Cargo.toml index ba4bcd3649d..da8aba5ded9 100644 --- a/common/eth2/Cargo.toml +++ b/common/eth2/Cargo.toml @@ -10,6 +10,7 @@ lighthouse = ["proto_array", "eth2_keystore", "eip_3076", "zeroize"] events = ["reqwest-eventsource", "futures", "futures-util"] [dependencies] +bls = { workspace = true } context_deserialize = { workspace = true } educe = { workspace = true } eip_3076 = { workspace = true, optional = true } @@ -28,6 +29,7 @@ sensitive_url = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } ssz_types = { workspace = true } +superstruct = { workspace = true } types = { workspace = true } zeroize = { workspace = true, optional = true } diff --git a/common/eth2/src/lib.rs b/common/eth2/src/lib.rs index 4e2109be04b..820d817d9d8 100644 --- a/common/eth2/src/lib.rs +++ b/common/eth2/src/lib.rs @@ -28,6 +28,8 @@ pub use sensitive_url::SensitiveUrl; use self::mixin::{RequestAccept, ResponseOptional}; use self::types::*; +use bls::SignatureBytes; +use context_deserialize::ContextDeserialize; use educe::Educe; #[cfg(feature = "events")] use futures::Stream; diff --git a/common/eth2/src/lighthouse_vc/http_client.rs b/common/eth2/src/lighthouse_vc/http_client.rs index 8c9d3397a8c..3c850fcb052 100644 --- a/common/eth2/src/lighthouse_vc/http_client.rs +++ b/common/eth2/src/lighthouse_vc/http_client.rs @@ -1,5 +1,6 @@ use super::types::*; use crate::{Error, success_or_error}; +use bls::PublicKeyBytes; use reqwest::{ IntoUrl, header::{HeaderMap, HeaderValue}, diff --git a/common/eth2/src/lighthouse_vc/std_types.rs b/common/eth2/src/lighthouse_vc/std_types.rs index 0290bdd0b79..c54252b9e33 100644 --- a/common/eth2/src/lighthouse_vc/std_types.rs +++ b/common/eth2/src/lighthouse_vc/std_types.rs @@ -1,6 +1,7 @@ +use bls::PublicKeyBytes; use eth2_keystore::Keystore; use serde::{Deserialize, Serialize}; -use types::{Address, Graffiti, PublicKeyBytes}; +use types::{Address, Graffiti}; use zeroize::Zeroizing; pub use eip_3076::Interchange; diff --git a/common/eth2/src/lighthouse_vc/types.rs b/common/eth2/src/lighthouse_vc/types.rs index 8e1d90f8f94..07f8421dc5c 100644 --- a/common/eth2/src/lighthouse_vc/types.rs +++ b/common/eth2/src/lighthouse_vc/types.rs @@ -1,6 +1,7 @@ pub use crate::lighthouse::Health; pub use crate::lighthouse_vc::std_types::*; pub use crate::types::{GenericResponse, VersionData}; +use bls::{PublicKey, PublicKeyBytes}; use eth2_keystore::Keystore; use serde::{Deserialize, Serialize}; use std::path::PathBuf; diff --git a/common/eth2/src/types.rs b/common/eth2/src/types.rs index 5aa3de5e170..aace8f936c9 100644 --- a/common/eth2/src/types.rs +++ b/common/eth2/src/types.rs @@ -7,6 +7,8 @@ use crate::{ CONSENSUS_BLOCK_VALUE_HEADER, CONSENSUS_VERSION_HEADER, EXECUTION_PAYLOAD_BLINDED_HEADER, EXECUTION_PAYLOAD_VALUE_HEADER, Error as ServerError, }; +use bls::{PublicKeyBytes, SecretKey, Signature, SignatureBytes}; +use context_deserialize::ContextDeserialize; use mediatype::{MediaType, MediaTypeList, names}; use reqwest::header::HeaderMap; use serde::{Deserialize, Deserializer, Serialize}; @@ -18,6 +20,7 @@ use std::fmt::{self, Display}; use std::str::FromStr; use std::sync::Arc; use std::time::Duration; +use superstruct::superstruct; #[cfg(test)] use test_random_derive::TestRandom; diff --git a/common/eth2_network_config/Cargo.toml b/common/eth2_network_config/Cargo.toml index ec5b0cc1d71..416ffb1975a 100644 --- a/common/eth2_network_config/Cargo.toml +++ b/common/eth2_network_config/Cargo.toml @@ -10,6 +10,7 @@ build = "build.rs" bytes = { workspace = true } discv5 = { workspace = true } eth2_config = { workspace = true } +fixed_bytes = { workspace = true } kzg = { workspace = true } pretty_reqwest_error = { workspace = true } reqwest = { workspace = true } diff --git a/common/eth2_network_config/src/lib.rs b/common/eth2_network_config/src/lib.rs index 12de21239a0..16ee45e524e 100644 --- a/common/eth2_network_config/src/lib.rs +++ b/common/eth2_network_config/src/lib.rs @@ -464,9 +464,10 @@ fn parse_state_download_url(url: &str) -> Result { #[cfg(test)] mod tests { use super::*; + use fixed_bytes::FixedBytesExtended; use ssz::Encode; use tempfile::Builder as TempBuilder; - use types::{Eth1Data, FixedBytesExtended, GnosisEthSpec, MainnetEthSpec}; + use types::{Eth1Data, GnosisEthSpec, MainnetEthSpec}; type E = MainnetEthSpec; diff --git a/common/validator_dir/src/builder.rs b/common/validator_dir/src/builder.rs index bae36789bb5..ab495242e49 100644 --- a/common/validator_dir/src/builder.rs +++ b/common/validator_dir/src/builder.rs @@ -1,5 +1,5 @@ use crate::{Error as DirError, ValidatorDir}; -use bls::get_withdrawal_credentials; +use bls::{Keypair, Signature, get_withdrawal_credentials}; use deposit_contract::{Error as DepositError, encode_eth1_tx_data}; use eth2_keystore::{Error as KeystoreError, Keystore, KeystoreBuilder, PlainText}; use filesystem::create_with_600_perms; @@ -7,7 +7,7 @@ use rand::{Rng, distr::Alphanumeric}; use std::fs::{File, create_dir_all}; use std::io::{self, Write}; use std::path::{Path, PathBuf}; -use types::{ChainSpec, DepositData, Hash256, Keypair, Signature}; +use types::{ChainSpec, DepositData, Hash256}; /// The `Alphanumeric` crate only generates a-z, A-Z, 0-9, therefore it has a range of 62 /// characters. diff --git a/common/validator_dir/src/validator_dir.rs b/common/validator_dir/src/validator_dir.rs index 8b50ea66876..0799897a70f 100644 --- a/common/validator_dir/src/validator_dir.rs +++ b/common/validator_dir/src/validator_dir.rs @@ -2,6 +2,7 @@ use crate::builder::{ ETH1_DEPOSIT_AMOUNT_FILE, ETH1_DEPOSIT_DATA_FILE, VOTING_KEYSTORE_FILE, WITHDRAWAL_KEYSTORE_FILE, keystore_password_path, }; +use bls::Keypair; use deposit_contract::decode_eth1_tx_data; use educe::Educe; use eth2_keystore::{Error as KeystoreError, Keystore, PlainText}; @@ -10,7 +11,7 @@ use std::fs::{File, read, write}; use std::io; use std::path::{Path, PathBuf}; use tree_hash::TreeHash; -use types::{DepositData, Hash256, Keypair}; +use types::{DepositData, Hash256}; /// The file used to save the Eth1 transaction hash from a deposit. pub const ETH1_DEPOSIT_TX_HASH_FILE: &str = "eth1-deposit-tx-hash.txt"; diff --git a/common/validator_dir/tests/tests.rs b/common/validator_dir/tests/tests.rs index 7d9730ebd37..ede80c244ee 100644 --- a/common/validator_dir/tests/tests.rs +++ b/common/validator_dir/tests/tests.rs @@ -1,10 +1,11 @@ #![cfg(not(debug_assertions))] +use bls::Keypair; use eth2_keystore::{Keystore, KeystoreBuilder, PlainText}; use std::fs::{self, File}; use std::path::Path; use tempfile::{TempDir, tempdir}; -use types::{EthSpec, Keypair, MainnetEthSpec, test_utils::generate_deterministic_keypair}; +use types::{EthSpec, MainnetEthSpec, test_utils::generate_deterministic_keypair}; use validator_dir::{ Builder, BuilderError, ETH1_DEPOSIT_DATA_FILE, ETH1_DEPOSIT_TX_HASH_FILE, VOTING_KEYSTORE_FILE, ValidatorDir, WITHDRAWAL_KEYSTORE_FILE, diff --git a/consensus/fork_choice/Cargo.toml b/consensus/fork_choice/Cargo.toml index 0a244c2ba19..a07aa38aa5b 100644 --- a/consensus/fork_choice/Cargo.toml +++ b/consensus/fork_choice/Cargo.toml @@ -8,6 +8,7 @@ edition = { workspace = true } [dependencies] ethereum_ssz = { workspace = true } ethereum_ssz_derive = { workspace = true } +fixed_bytes = { workspace = true } logging = { workspace = true } metrics = { workspace = true } proto_array = { workspace = true } diff --git a/consensus/fork_choice/src/fork_choice.rs b/consensus/fork_choice/src/fork_choice.rs index 6565e7cdaf6..9a8cae0c365 100644 --- a/consensus/fork_choice/src/fork_choice.rs +++ b/consensus/fork_choice/src/fork_choice.rs @@ -1,5 +1,6 @@ use crate::metrics::{self, scrape_for_metrics}; use crate::{ForkChoiceStore, InvalidationOperation}; +use fixed_bytes::FixedBytesExtended; use logging::crit; use proto_array::{ Block as ProtoBlock, DisallowedReOrgOffsets, ExecutionStatus, JustifiedBalances, @@ -19,7 +20,7 @@ use tracing::{debug, instrument, warn}; use types::{ AbstractExecPayload, AttestationShufflingId, AttesterSlashingRef, BeaconBlockRef, BeaconState, BeaconStateError, ChainSpec, Checkpoint, Epoch, EthSpec, ExecPayload, ExecutionBlockHash, - FixedBytesExtended, Hash256, IndexedAttestationRef, RelativeEpoch, SignedBeaconBlock, Slot, + Hash256, IndexedAttestationRef, RelativeEpoch, SignedBeaconBlock, Slot, consts::bellatrix::INTERVALS_PER_SLOT, }; diff --git a/consensus/fork_choice/tests/tests.rs b/consensus/fork_choice/tests/tests.rs index 67b792ef0d8..d3a84ee85be 100644 --- a/consensus/fork_choice/tests/tests.rs +++ b/consensus/fork_choice/tests/tests.rs @@ -7,6 +7,7 @@ use beacon_chain::{ BeaconChain, BeaconChainError, BeaconForkChoiceStore, ChainConfig, ForkChoiceError, StateSkipConfig, WhenSlotSkipped, }; +use fixed_bytes::FixedBytesExtended; use fork_choice::{ ForkChoiceStore, InvalidAttestation, InvalidBlock, PayloadVerificationStatus, QueuedAttestation, }; @@ -17,9 +18,9 @@ use std::time::Duration; use store::MemoryStore; use types::SingleAttestation; use types::{ - BeaconBlockRef, BeaconState, ChainSpec, Checkpoint, Epoch, EthSpec, FixedBytesExtended, - ForkName, Hash256, IndexedAttestation, MainnetEthSpec, RelativeEpoch, SignedBeaconBlock, Slot, - SubnetId, test_utils::generate_deterministic_keypair, + BeaconBlockRef, BeaconState, ChainSpec, Checkpoint, Epoch, EthSpec, ForkName, Hash256, + IndexedAttestation, MainnetEthSpec, RelativeEpoch, SignedBeaconBlock, Slot, SubnetId, + test_utils::generate_deterministic_keypair, }; pub type E = MainnetEthSpec; diff --git a/consensus/proto_array/Cargo.toml b/consensus/proto_array/Cargo.toml index bd6757c0fad..782610e0d35 100644 --- a/consensus/proto_array/Cargo.toml +++ b/consensus/proto_array/Cargo.toml @@ -11,6 +11,7 @@ path = "src/bin.rs" [dependencies] ethereum_ssz = { workspace = true } ethereum_ssz_derive = { workspace = true } +fixed_bytes = { workspace = true } safe_arith = { workspace = true } serde = { workspace = true } serde_yaml = { workspace = true } diff --git a/consensus/proto_array/src/fork_choice_test_definition.rs b/consensus/proto_array/src/fork_choice_test_definition.rs index 43a7e3b77fe..e9deb6759fc 100644 --- a/consensus/proto_array/src/fork_choice_test_definition.rs +++ b/consensus/proto_array/src/fork_choice_test_definition.rs @@ -5,11 +5,12 @@ mod votes; use crate::proto_array_fork_choice::{Block, ExecutionStatus, ProtoArrayForkChoice}; use crate::{InvalidationOperation, JustifiedBalances}; +use fixed_bytes::FixedBytesExtended; use serde::{Deserialize, Serialize}; use std::collections::BTreeSet; use types::{ - AttestationShufflingId, Checkpoint, Epoch, EthSpec, ExecutionBlockHash, FixedBytesExtended, - Hash256, MainnetEthSpec, Slot, + AttestationShufflingId, Checkpoint, Epoch, EthSpec, ExecutionBlockHash, Hash256, + MainnetEthSpec, Slot, }; pub use execution_status::*; diff --git a/consensus/proto_array/src/fork_choice_test_definition/no_votes.rs b/consensus/proto_array/src/fork_choice_test_definition/no_votes.rs index de84fbdd128..d20eaacb99a 100644 --- a/consensus/proto_array/src/fork_choice_test_definition/no_votes.rs +++ b/consensus/proto_array/src/fork_choice_test_definition/no_votes.rs @@ -1,4 +1,4 @@ -use types::FixedBytesExtended; +use fixed_bytes::FixedBytesExtended; use super::*; diff --git a/consensus/proto_array/src/proto_array.rs b/consensus/proto_array/src/proto_array.rs index 1d78ce9f443..5bfcdae463d 100644 --- a/consensus/proto_array/src/proto_array.rs +++ b/consensus/proto_array/src/proto_array.rs @@ -1,5 +1,6 @@ use crate::error::InvalidBestNodeInfo; use crate::{Block, ExecutionStatus, JustifiedBalances, error::Error}; +use fixed_bytes::FixedBytesExtended; use serde::{Deserialize, Serialize}; use ssz::Encode; use ssz::four_byte_option_impl; @@ -7,8 +8,8 @@ use ssz_derive::{Decode, Encode}; use std::collections::{HashMap, HashSet}; use superstruct::superstruct; use types::{ - AttestationShufflingId, ChainSpec, Checkpoint, Epoch, EthSpec, ExecutionBlockHash, - FixedBytesExtended, Hash256, Slot, + AttestationShufflingId, ChainSpec, Checkpoint, Epoch, EthSpec, ExecutionBlockHash, Hash256, + Slot, }; // Define a "legacy" implementation of `Option` which uses four bytes for encoding the union diff --git a/consensus/proto_array/src/proto_array_fork_choice.rs b/consensus/proto_array/src/proto_array_fork_choice.rs index 137471ce36d..3edf1e0644d 100644 --- a/consensus/proto_array/src/proto_array_fork_choice.rs +++ b/consensus/proto_array/src/proto_array_fork_choice.rs @@ -7,6 +7,7 @@ use crate::{ }, ssz_container::SszContainer, }; +use fixed_bytes::FixedBytesExtended; use serde::{Deserialize, Serialize}; use ssz::{Decode, Encode}; use ssz_derive::{Decode, Encode}; @@ -15,8 +16,8 @@ use std::{ fmt, }; use types::{ - AttestationShufflingId, ChainSpec, Checkpoint, Epoch, EthSpec, ExecutionBlockHash, - FixedBytesExtended, Hash256, Slot, + AttestationShufflingId, ChainSpec, Checkpoint, Epoch, EthSpec, ExecutionBlockHash, Hash256, + Slot, }; pub const DEFAULT_PRUNE_THRESHOLD: usize = 256; @@ -1095,7 +1096,8 @@ fn compute_deltas( #[cfg(test)] mod test_compute_deltas { use super::*; - use types::{FixedBytesExtended, MainnetEthSpec}; + use fixed_bytes::FixedBytesExtended; + use types::MainnetEthSpec; /// Gives a hash that is not the zero hash (unless i is `usize::MAX)`. fn hash_from_index(i: usize) -> Hash256 { diff --git a/consensus/state_processing/Cargo.toml b/consensus/state_processing/Cargo.toml index 3821aa16891..a08035d5838 100644 --- a/consensus/state_processing/Cargo.toml +++ b/consensus/state_processing/Cargo.toml @@ -24,11 +24,13 @@ educe = { workspace = true } ethereum_hashing = { workspace = true } ethereum_ssz = { workspace = true } ethereum_ssz_derive = { workspace = true } +fixed_bytes = { workspace = true } int_to_bytes = { workspace = true } integer-sqrt = "0.1.5" itertools = { workspace = true } merkle_proof = { workspace = true } metrics = { workspace = true } +milhouse = { workspace = true } rand = { workspace = true } rayon = { workspace = true } safe_arith = { workspace = true } @@ -37,6 +39,7 @@ ssz_types = { workspace = true } test_random_derive = { path = "../../common/test_random_derive" } tracing = { workspace = true } tree_hash = { workspace = true } +typenum = { workspace = true } types = { workspace = true } [dev-dependencies] diff --git a/consensus/state_processing/src/common/get_attesting_indices.rs b/consensus/state_processing/src/common/get_attesting_indices.rs index e4f5aa3c8bc..dc7be7c2515 100644 --- a/consensus/state_processing/src/common/get_attesting_indices.rs +++ b/consensus/state_processing/src/common/get_attesting_indices.rs @@ -2,6 +2,7 @@ use types::*; pub mod attesting_indices_base { use crate::per_block_processing::errors::{AttestationInvalid as Invalid, BlockOperationError}; + use ssz_types::{BitList, VariableList}; use types::*; /// Convert `attestation` to (almost) indexed-verifiable form. @@ -44,10 +45,10 @@ pub mod attesting_indices_base { } pub mod attesting_indices_electra { - use std::collections::HashSet; - use crate::per_block_processing::errors::{AttestationInvalid as Invalid, BlockOperationError}; use safe_arith::SafeArith; + use ssz_types::{BitList, BitVector, VariableList}; + use std::collections::HashSet; use types::*; /// Compute an Electra IndexedAttestation given a list of committees. diff --git a/consensus/state_processing/src/common/slash_validator.rs b/consensus/state_processing/src/common/slash_validator.rs index 52f360849e0..01c1855fb10 100644 --- a/consensus/state_processing/src/common/slash_validator.rs +++ b/consensus/state_processing/src/common/slash_validator.rs @@ -6,6 +6,7 @@ use crate::{ }; use safe_arith::SafeArith; use std::cmp; +use typenum::Unsigned; use types::{ consts::altair::{PROPOSER_WEIGHT, WEIGHT_DENOMINATOR}, *, diff --git a/consensus/state_processing/src/epoch_cache.rs b/consensus/state_processing/src/epoch_cache.rs index 86db037446b..ee03596d098 100644 --- a/consensus/state_processing/src/epoch_cache.rs +++ b/consensus/state_processing/src/epoch_cache.rs @@ -2,12 +2,11 @@ use crate::common::altair::BaseRewardPerIncrement; use crate::common::base::SqrtTotalActiveBalance; use crate::common::{altair, base}; use crate::metrics; +use fixed_bytes::FixedBytesExtended; use safe_arith::SafeArith; use tracing::instrument; use types::epoch_cache::{EpochCache, EpochCacheError, EpochCacheKey}; -use types::{ - ActivationQueue, BeaconState, ChainSpec, EthSpec, FixedBytesExtended, ForkName, Hash256, -}; +use types::{ActivationQueue, BeaconState, ChainSpec, EthSpec, ForkName, Hash256}; /// Precursor to an `EpochCache`. pub struct PreEpochCache { diff --git a/consensus/state_processing/src/genesis.rs b/consensus/state_processing/src/genesis.rs index 88ef79310dc..d00e1fcfacc 100644 --- a/consensus/state_processing/src/genesis.rs +++ b/consensus/state_processing/src/genesis.rs @@ -7,6 +7,7 @@ use crate::upgrade::{ upgrade_to_altair, upgrade_to_bellatrix, upgrade_to_capella, upgrade_to_deneb, upgrade_to_fulu, upgrade_to_gloas, }; +use fixed_bytes::FixedBytesExtended; use safe_arith::{ArithError, SafeArith}; use std::sync::Arc; use tree_hash::TreeHash; diff --git a/consensus/state_processing/src/per_block_processing.rs b/consensus/state_processing/src/per_block_processing.rs index 9e7a20040e8..f78c8c4eb38 100644 --- a/consensus/state_processing/src/per_block_processing.rs +++ b/consensus/state_processing/src/per_block_processing.rs @@ -5,6 +5,7 @@ use safe_arith::{ArithError, SafeArith, SafeArithIter}; use signature_sets::{block_proposal_signature_set, get_pubkey_from_state, randao_signature_set}; use std::borrow::Cow; use tree_hash::TreeHash; +use typenum::Unsigned; use types::*; pub use self::verify_attester_slashing::{ diff --git a/consensus/state_processing/src/per_block_processing/altair/sync_committee.rs b/consensus/state_processing/src/per_block_processing/altair/sync_committee.rs index 1219c7df442..8cc9de42db0 100644 --- a/consensus/state_processing/src/per_block_processing/altair/sync_committee.rs +++ b/consensus/state_processing/src/per_block_processing/altair/sync_committee.rs @@ -1,12 +1,12 @@ use crate::common::{altair::BaseRewardPerIncrement, decrease_balance, increase_balance}; use crate::per_block_processing::errors::{BlockProcessingError, SyncAggregateInvalid}; use crate::{VerifySignatures, signature_sets::sync_aggregate_signature_set}; +use bls::PublicKeyBytes; use safe_arith::SafeArith; use std::borrow::Cow; +use typenum::Unsigned; use types::consts::altair::{PROPOSER_WEIGHT, SYNC_REWARD_WEIGHT, WEIGHT_DENOMINATOR}; -use types::{ - BeaconState, BeaconStateError, ChainSpec, EthSpec, PublicKeyBytes, SyncAggregate, Unsigned, -}; +use types::{BeaconState, BeaconStateError, ChainSpec, EthSpec, SyncAggregate}; pub fn process_sync_aggregate( state: &mut BeaconState, diff --git a/consensus/state_processing/src/per_block_processing/process_operations.rs b/consensus/state_processing/src/per_block_processing/process_operations.rs index 9a1c6c2f6ad..8afeeb685bc 100644 --- a/consensus/state_processing/src/per_block_processing/process_operations.rs +++ b/consensus/state_processing/src/per_block_processing/process_operations.rs @@ -5,8 +5,9 @@ use crate::common::{ slash_validator, }; use crate::per_block_processing::errors::{BlockProcessingError, IntoWithIndex}; +use ssz_types::FixedVector; +use typenum::U33; use types::consts::altair::{PARTICIPATION_FLAG_WEIGHTS, PROPOSER_WEIGHT, WEIGHT_DENOMINATOR}; -use types::typenum::U33; pub fn process_operations>( state: &mut BeaconState, diff --git a/consensus/state_processing/src/per_block_processing/signature_sets.rs b/consensus/state_processing/src/per_block_processing/signature_sets.rs index dafd0d79ea9..0e936007eec 100644 --- a/consensus/state_processing/src/per_block_processing/signature_sets.rs +++ b/consensus/state_processing/src/per_block_processing/signature_sets.rs @@ -2,17 +2,18 @@ //! validated individually, or alongside in others in a potentially cheaper bulk operation. //! //! This module exposes one function to extract each type of `SignatureSet` from a `BeaconBlock`. -use bls::SignatureSet; +use bls::{AggregateSignature, PublicKey, PublicKeyBytes, Signature, SignatureSet}; use ssz::DecodeError; use std::borrow::Cow; use tree_hash::TreeHash; +use typenum::Unsigned; use types::{ - AbstractExecPayload, AggregateSignature, AttesterSlashingRef, BeaconBlockRef, BeaconState, - BeaconStateError, ChainSpec, DepositData, Domain, Epoch, EthSpec, Fork, Hash256, - InconsistentFork, IndexedAttestation, IndexedAttestationRef, ProposerSlashing, PublicKey, - PublicKeyBytes, Signature, SignedAggregateAndProof, SignedBeaconBlock, SignedBeaconBlockHeader, - SignedBlsToExecutionChange, SignedContributionAndProof, SignedRoot, SignedVoluntaryExit, - SigningData, Slot, SyncAggregate, SyncAggregatorSelectionData, Unsigned, + AbstractExecPayload, AttesterSlashingRef, BeaconBlockRef, BeaconState, BeaconStateError, + ChainSpec, DepositData, Domain, Epoch, EthSpec, Fork, Hash256, InconsistentFork, + IndexedAttestation, IndexedAttestationRef, ProposerSlashing, SignedAggregateAndProof, + SignedBeaconBlock, SignedBeaconBlockHeader, SignedBlsToExecutionChange, + SignedContributionAndProof, SignedRoot, SignedVoluntaryExit, SigningData, Slot, SyncAggregate, + SyncAggregatorSelectionData, }; pub type Result = std::result::Result; diff --git a/consensus/state_processing/src/per_block_processing/tests.rs b/consensus/state_processing/src/per_block_processing/tests.rs index c32797f77f3..739717b33ff 100644 --- a/consensus/state_processing/src/per_block_processing/tests.rs +++ b/consensus/state_processing/src/per_block_processing/tests.rs @@ -11,7 +11,10 @@ use crate::{ per_block_processing::{process_operations, verify_exit::verify_exit}, }; use beacon_chain::test_utils::{BeaconChainHarness, EphemeralHarnessType}; +use bls::{AggregateSignature, Keypair, PublicKeyBytes, Signature, SignatureBytes}; +use fixed_bytes::FixedBytesExtended; use ssz_types::Bitfield; +use ssz_types::VariableList; use std::sync::{Arc, LazyLock}; use test_utils::generate_deterministic_keypairs; use types::*; diff --git a/consensus/state_processing/src/per_block_processing/verify_deposit.rs b/consensus/state_processing/src/per_block_processing/verify_deposit.rs index c996e580a78..d403bfa82b6 100644 --- a/consensus/state_processing/src/per_block_processing/verify_deposit.rs +++ b/consensus/state_processing/src/per_block_processing/verify_deposit.rs @@ -1,5 +1,6 @@ use super::errors::{BlockOperationError, DepositInvalid}; use crate::per_block_processing::signature_sets::deposit_pubkey_signature_message; +use bls::PublicKeyBytes; use merkle_proof::verify_merkle_proof; use safe_arith::SafeArith; use tree_hash::TreeHash; diff --git a/consensus/state_processing/src/per_epoch_processing/altair/participation_flag_updates.rs b/consensus/state_processing/src/per_epoch_processing/altair/participation_flag_updates.rs index 5c08406eaef..5e177c5d2b7 100644 --- a/consensus/state_processing/src/per_epoch_processing/altair/participation_flag_updates.rs +++ b/consensus/state_processing/src/per_epoch_processing/altair/participation_flag_updates.rs @@ -1,5 +1,5 @@ use crate::EpochProcessingError; -use types::List; +use milhouse::List; use types::beacon_state::BeaconState; use types::eth_spec::EthSpec; use types::participation_flags::ParticipationFlags; diff --git a/consensus/state_processing/src/per_epoch_processing/epoch_processing_summary.rs b/consensus/state_processing/src/per_epoch_processing/epoch_processing_summary.rs index fd712cc8e50..a818e087755 100644 --- a/consensus/state_processing/src/per_epoch_processing/epoch_processing_summary.rs +++ b/consensus/state_processing/src/per_epoch_processing/epoch_processing_summary.rs @@ -1,9 +1,10 @@ use super::base::{TotalBalances, ValidatorStatus, validator_statuses::InclusionInfo}; use crate::metrics; +use milhouse::List; use std::sync::Arc; use types::{ - BeaconStateError, Epoch, EthSpec, List, ParticipationFlags, ProgressiveBalancesCache, - SyncCommittee, Validator, + BeaconStateError, Epoch, EthSpec, ParticipationFlags, ProgressiveBalancesCache, SyncCommittee, + Validator, consts::altair::{TIMELY_HEAD_FLAG_INDEX, TIMELY_SOURCE_FLAG_INDEX, TIMELY_TARGET_FLAG_INDEX}, }; diff --git a/consensus/state_processing/src/per_epoch_processing/errors.rs b/consensus/state_processing/src/per_epoch_processing/errors.rs index a5a2a69ebff..4818dcbf670 100644 --- a/consensus/state_processing/src/per_epoch_processing/errors.rs +++ b/consensus/state_processing/src/per_epoch_processing/errors.rs @@ -1,4 +1,5 @@ -use types::{BeaconStateError, EpochCacheError, InconsistentFork, milhouse}; +use milhouse; +use types::{BeaconStateError, EpochCacheError, InconsistentFork}; #[derive(Debug, PartialEq)] pub enum EpochProcessingError { diff --git a/consensus/state_processing/src/per_epoch_processing/historical_roots_update.rs b/consensus/state_processing/src/per_epoch_processing/historical_roots_update.rs index 8fcdda062c9..9172d954bc8 100644 --- a/consensus/state_processing/src/per_epoch_processing/historical_roots_update.rs +++ b/consensus/state_processing/src/per_epoch_processing/historical_roots_update.rs @@ -1,7 +1,7 @@ use super::errors::EpochProcessingError; use safe_arith::SafeArith; use tree_hash::TreeHash; -use types::Unsigned; +use typenum::Unsigned; use types::beacon_state::BeaconState; use types::eth_spec::EthSpec; diff --git a/consensus/state_processing/src/per_epoch_processing/justification_and_finalization_state.rs b/consensus/state_processing/src/per_epoch_processing/justification_and_finalization_state.rs index 66d68804e1d..8d712fd19b8 100644 --- a/consensus/state_processing/src/per_epoch_processing/justification_and_finalization_state.rs +++ b/consensus/state_processing/src/per_epoch_processing/justification_and_finalization_state.rs @@ -1,4 +1,5 @@ -use types::{BeaconState, BeaconStateError, BitVector, Checkpoint, Epoch, EthSpec, Hash256}; +use ssz_types::BitVector; +use types::{BeaconState, BeaconStateError, Checkpoint, Epoch, EthSpec, Hash256}; /// This is a subset of the `BeaconState` which is used to compute justification and finality /// without modifying the `BeaconState`. diff --git a/consensus/state_processing/src/per_epoch_processing/resets.rs b/consensus/state_processing/src/per_epoch_processing/resets.rs index c9f69c3c95e..e05fb30c334 100644 --- a/consensus/state_processing/src/per_epoch_processing/resets.rs +++ b/consensus/state_processing/src/per_epoch_processing/resets.rs @@ -1,8 +1,9 @@ use super::errors::EpochProcessingError; +use milhouse::List; use safe_arith::SafeArith; +use typenum::Unsigned; use types::beacon_state::BeaconState; use types::eth_spec::EthSpec; -use types::{List, Unsigned}; pub fn process_eth1_data_reset( state: &mut BeaconState, diff --git a/consensus/state_processing/src/per_epoch_processing/single_pass.rs b/consensus/state_processing/src/per_epoch_processing/single_pass.rs index 1584e932bdf..914e025f2fe 100644 --- a/consensus/state_processing/src/per_epoch_processing/single_pass.rs +++ b/consensus/state_processing/src/per_epoch_processing/single_pass.rs @@ -8,19 +8,20 @@ use crate::{ per_epoch_processing::{Delta, Error, ParticipationEpochSummary}, }; use itertools::izip; +use milhouse::{Cow, List, Vector}; use safe_arith::{SafeArith, SafeArithIter}; use std::cmp::{max, min}; use std::collections::{BTreeSet, HashMap}; use tracing::instrument; +use typenum::Unsigned; use types::{ ActivationQueue, BeaconState, BeaconStateError, ChainSpec, Checkpoint, DepositData, Epoch, - EthSpec, ExitCache, ForkName, List, ParticipationFlags, PendingDeposit, - ProgressiveBalancesCache, RelativeEpoch, Unsigned, Validator, Vector, + EthSpec, ExitCache, ForkName, ParticipationFlags, PendingDeposit, ProgressiveBalancesCache, + RelativeEpoch, Validator, consts::altair::{ NUM_FLAG_INDICES, PARTICIPATION_FLAG_WEIGHTS, TIMELY_HEAD_FLAG_INDEX, TIMELY_TARGET_FLAG_INDEX, WEIGHT_DENOMINATOR, }, - milhouse::Cow, }; pub struct SinglePassConfig { diff --git a/consensus/state_processing/src/per_epoch_processing/slashings.rs b/consensus/state_processing/src/per_epoch_processing/slashings.rs index 47eb06e907a..6008276d150 100644 --- a/consensus/state_processing/src/per_epoch_processing/slashings.rs +++ b/consensus/state_processing/src/per_epoch_processing/slashings.rs @@ -4,7 +4,8 @@ use crate::per_epoch_processing::{ single_pass::{SinglePassConfig, process_epoch_single_pass}, }; use safe_arith::{SafeArith, SafeArithIter}; -use types::{BeaconState, ChainSpec, EthSpec, Unsigned}; +use typenum::Unsigned; +use types::{BeaconState, ChainSpec, EthSpec}; /// Process slashings. pub fn process_slashings( diff --git a/consensus/state_processing/src/per_slot_processing.rs b/consensus/state_processing/src/per_slot_processing.rs index 8695054e1e7..0f8e5dc52d8 100644 --- a/consensus/state_processing/src/per_slot_processing.rs +++ b/consensus/state_processing/src/per_slot_processing.rs @@ -3,6 +3,7 @@ use crate::upgrade::{ upgrade_to_electra, upgrade_to_fulu, upgrade_to_gloas, }; use crate::{per_epoch_processing::EpochProcessingSummary, *}; +use fixed_bytes::FixedBytesExtended; use safe_arith::{ArithError, SafeArith}; use tracing::instrument; use types::*; diff --git a/consensus/state_processing/src/state_advance.rs b/consensus/state_processing/src/state_advance.rs index 4d38e7797e6..19b21dad19a 100644 --- a/consensus/state_processing/src/state_advance.rs +++ b/consensus/state_processing/src/state_advance.rs @@ -5,7 +5,8 @@ //! duplication and protect against some easy-to-make mistakes when performing state advances. use crate::*; -use types::{BeaconState, ChainSpec, EthSpec, FixedBytesExtended, Hash256, Slot}; +use fixed_bytes::FixedBytesExtended; +use types::{BeaconState, ChainSpec, EthSpec, Hash256, Slot}; #[derive(Debug, PartialEq)] pub enum Error { diff --git a/consensus/state_processing/src/upgrade/altair.rs b/consensus/state_processing/src/upgrade/altair.rs index 3006da25ae7..022175ff999 100644 --- a/consensus/state_processing/src/upgrade/altair.rs +++ b/consensus/state_processing/src/upgrade/altair.rs @@ -2,11 +2,12 @@ use crate::common::update_progressive_balances_cache::initialize_progressive_bal use crate::common::{ attesting_indices_base::get_attesting_indices, get_attestation_participation_flag_indices, }; +use milhouse::List; use std::mem; use std::sync::Arc; use types::{ BeaconState, BeaconStateAltair, BeaconStateError as Error, ChainSpec, EpochCache, EthSpec, - Fork, List, ParticipationFlags, PendingAttestation, RelativeEpoch, SyncCommittee, + Fork, ParticipationFlags, PendingAttestation, RelativeEpoch, SyncCommittee, }; /// Translate the participation information from the epoch prior to the fork into Altair's format. diff --git a/consensus/state_processing/src/upgrade/capella.rs b/consensus/state_processing/src/upgrade/capella.rs index ae0dbde7678..948fa511b73 100644 --- a/consensus/state_processing/src/upgrade/capella.rs +++ b/consensus/state_processing/src/upgrade/capella.rs @@ -1,7 +1,8 @@ +use milhouse::List; use std::mem; use types::{ BeaconState, BeaconStateCapella, BeaconStateError as Error, ChainSpec, EpochCache, EthSpec, - Fork, List, + Fork, }; /// Transform a `Bellatrix` state into an `Capella` state. diff --git a/consensus/state_processing/src/upgrade/fulu.rs b/consensus/state_processing/src/upgrade/fulu.rs index c2aced7047a..c14c1edbec3 100644 --- a/consensus/state_processing/src/upgrade/fulu.rs +++ b/consensus/state_processing/src/upgrade/fulu.rs @@ -1,8 +1,7 @@ +use milhouse::Vector; use safe_arith::SafeArith; use std::mem; -use types::{ - BeaconState, BeaconStateError as Error, BeaconStateFulu, ChainSpec, EthSpec, Fork, Vector, -}; +use types::{BeaconState, BeaconStateError as Error, BeaconStateFulu, ChainSpec, EthSpec, Fork}; /// Transform a `Electra` state into an `Fulu` state. pub fn upgrade_to_fulu( diff --git a/consensus/types/Cargo.toml b/consensus/types/Cargo.toml index 559a1819480..78c6f871cb4 100644 --- a/consensus/types/Cargo.toml +++ b/consensus/types/Cargo.toml @@ -65,6 +65,7 @@ test_random_derive = { path = "../../common/test_random_derive" } tracing = { workspace = true } tree_hash = { workspace = true } tree_hash_derive = { workspace = true } +typenum = { workspace = true } [dev-dependencies] beacon_chain = { workspace = true } diff --git a/consensus/types/benches/benches.rs b/consensus/types/benches/benches.rs index 814001d9660..397c33163e9 100644 --- a/consensus/types/benches/benches.rs +++ b/consensus/types/benches/benches.rs @@ -1,10 +1,11 @@ use criterion::{BatchSize, BenchmarkId, Criterion, black_box, criterion_group, criterion_main}; +use fixed_bytes::FixedBytesExtended; use milhouse::List; use rayon::prelude::*; use ssz::Encode; use std::sync::Arc; use types::{ - BeaconState, Epoch, Eth1Data, EthSpec, FixedBytesExtended, Hash256, MainnetEthSpec, Validator, + BeaconState, Epoch, Eth1Data, EthSpec, Hash256, MainnetEthSpec, Validator, test_utils::generate_deterministic_keypair, }; diff --git a/consensus/types/src/block/beacon_block.rs b/consensus/types/src/block/beacon_block.rs index c2f361eb4b8..a4e7e800bcc 100644 --- a/consensus/types/src/block/beacon_block.rs +++ b/consensus/types/src/block/beacon_block.rs @@ -7,11 +7,12 @@ use fixed_bytes::FixedBytesExtended; use serde::{Deserialize, Deserializer, Serialize}; use ssz::{Decode, DecodeError}; use ssz_derive::{Decode, Encode}; -use ssz_types::{BitList, BitVector, FixedVector, VariableList, typenum::Unsigned}; +use ssz_types::{BitList, BitVector, FixedVector, VariableList}; use superstruct::superstruct; use test_random_derive::TestRandom; use tree_hash::TreeHash; use tree_hash_derive::TreeHash; +use typenum::Unsigned; use crate::{ attestation::{AttestationBase, AttestationData, IndexedAttestationBase}, diff --git a/consensus/types/src/core/eth_spec.rs b/consensus/types/src/core/eth_spec.rs index 11857e678c0..72fd1ebc9eb 100644 --- a/consensus/types/src/core/eth_spec.rs +++ b/consensus/types/src/core/eth_spec.rs @@ -5,7 +5,7 @@ use std::{ use safe_arith::SafeArith; use serde::{Deserialize, Serialize}; -use ssz_types::typenum::{ +use typenum::{ U0, U1, U2, U4, U8, U16, U17, U32, U64, U128, U256, U512, U625, U1024, U2048, U4096, U8192, U65536, U131072, U262144, U1048576, U16777216, U33554432, U134217728, U1073741824, U1099511627776, UInt, Unsigned, bit::B0, @@ -625,7 +625,7 @@ impl EthSpec for GnosisEthSpec { #[cfg(test)] mod test { use crate::{EthSpec, GnosisEthSpec, MainnetEthSpec, MinimalEthSpec}; - use ssz_types::typenum::Unsigned; + use typenum::Unsigned; fn assert_valid_spec() { let spec = E::default_spec(); diff --git a/consensus/types/src/core/preset.rs b/consensus/types/src/core/preset.rs index b436fafd3a0..75d2d8df6b3 100644 --- a/consensus/types/src/core/preset.rs +++ b/consensus/types/src/core/preset.rs @@ -1,5 +1,5 @@ use serde::{Deserialize, Serialize}; -use ssz_types::typenum::Unsigned; +use typenum::Unsigned; use crate::core::{ChainSpec, Epoch, EthSpec}; diff --git a/consensus/types/src/deposit/deposit.rs b/consensus/types/src/deposit/deposit.rs index 67f8572defb..0b08bd6509f 100644 --- a/consensus/types/src/deposit/deposit.rs +++ b/consensus/types/src/deposit/deposit.rs @@ -1,9 +1,10 @@ use context_deserialize::context_deserialize; use serde::{Deserialize, Serialize}; use ssz_derive::{Decode, Encode}; -use ssz_types::{FixedVector, typenum::U33}; +use ssz_types::FixedVector; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use typenum::U33; use crate::{core::Hash256, deposit::DepositData, fork::ForkName, test_utils::TestRandom}; diff --git a/consensus/types/src/lib.rs b/consensus/types/src/lib.rs index a8a78f8cfbf..cd9252bde8b 100644 --- a/consensus/types/src/lib.rs +++ b/consensus/types/src/lib.rs @@ -165,14 +165,3 @@ pub mod application_domain { pub use crate::kzg_ext::consts::VERSIONED_HASH_VERSION_KZG; pub use crate::light_client::LightClientError as LightClientUpdateError; pub use crate::state::BeaconStateError as Error; - -pub use bls::{ - AggregatePublicKey, AggregateSignature, Error as BlsError, Keypair, PUBLIC_KEY_BYTES_LEN, - PublicKey, PublicKeyBytes, SIGNATURE_BYTES_LEN, SecretKey, Signature, SignatureBytes, - get_withdrawal_credentials, -}; -pub use context_deserialize::{ContextDeserialize, context_deserialize}; -pub use fixed_bytes::FixedBytesExtended; -pub use milhouse::{self, List, Vector}; -pub use ssz_types::{BitList, BitVector, FixedVector, VariableList, typenum, typenum::Unsigned}; -pub use superstruct::superstruct; diff --git a/consensus/types/src/light_client/light_client_update.rs b/consensus/types/src/light_client/light_client_update.rs index 7fc2c36239b..aa7b800cc89 100644 --- a/consensus/types/src/light_client/light_client_update.rs +++ b/consensus/types/src/light_client/light_client_update.rs @@ -9,10 +9,10 @@ use ssz::{Decode, Encode}; use ssz_derive::Decode; use ssz_derive::Encode; use ssz_types::FixedVector; -use ssz_types::typenum::{U4, U5, U6, U7}; use superstruct::superstruct; use test_random_derive::TestRandom; use tree_hash_derive::TreeHash; +use typenum::{U4, U5, U6, U7}; use crate::{ block::SignedBlindedBeaconBlock, @@ -574,7 +574,7 @@ fn compute_sync_committee_period_at_slot( mod tests { use super::*; use crate::light_client::consts::*; - use ssz_types::typenum::Unsigned; + use typenum::Unsigned; // `ssz_tests!` can only be defined once per namespace #[cfg(test)] diff --git a/consensus/types/src/state/beacon_state.rs b/consensus/types/src/state/beacon_state.rs index 948899c98d3..f36c02ce6bd 100644 --- a/consensus/types/src/state/beacon_state.rs +++ b/consensus/types/src/state/beacon_state.rs @@ -13,13 +13,14 @@ use safe_arith::{ArithError, SafeArith}; use serde::{Deserialize, Deserializer, Serialize}; use ssz::{Decode, DecodeError, Encode, ssz_encode}; use ssz_derive::{Decode, Encode}; -use ssz_types::{BitVector, FixedVector, typenum::Unsigned}; +use ssz_types::{BitVector, FixedVector}; use superstruct::superstruct; use swap_or_not_shuffle::compute_shuffled_index; use test_random_derive::TestRandom; use tracing::instrument; use tree_hash::TreeHash; use tree_hash_derive::TreeHash; +use typenum::Unsigned; use crate::{ attestation::{ diff --git a/consensus/types/src/state/iter.rs b/consensus/types/src/state/iter.rs index d761a6bd859..63f28d74c4b 100644 --- a/consensus/types/src/state/iter.rs +++ b/consensus/types/src/state/iter.rs @@ -56,6 +56,7 @@ impl Iterator for BlockRootsIter<'_, E> { #[cfg(test)] mod test { use crate::*; + use fixed_bytes::FixedBytesExtended; type E = MinimalEthSpec; diff --git a/consensus/types/src/sync_committee/sync_selection_proof.rs b/consensus/types/src/sync_committee/sync_selection_proof.rs index 7efc6c4c760..723f0c06c96 100644 --- a/consensus/types/src/sync_committee/sync_selection_proof.rs +++ b/consensus/types/src/sync_committee/sync_selection_proof.rs @@ -5,7 +5,7 @@ use ethereum_hashing::hash; use safe_arith::{ArithError, SafeArith}; use serde::{Deserialize, Serialize}; use ssz::Encode; -use ssz_types::typenum::Unsigned; +use typenum::Unsigned; use crate::{ core::{ @@ -112,8 +112,9 @@ impl From for SyncSelectionProof { #[cfg(test)] mod test { use super::*; - use crate::{FixedBytesExtended, MainnetEthSpec}; + use crate::MainnetEthSpec; use eth2_interop_keypairs::keypair; + use fixed_bytes::FixedBytesExtended; #[test] fn proof_sign_and_verify() { diff --git a/consensus/types/src/sync_committee/sync_subnet_id.rs b/consensus/types/src/sync_committee/sync_subnet_id.rs index fb581461785..6cb11f6b038 100644 --- a/consensus/types/src/sync_committee/sync_subnet_id.rs +++ b/consensus/types/src/sync_committee/sync_subnet_id.rs @@ -8,7 +8,7 @@ use std::{ use safe_arith::{ArithError, SafeArith}; use serde::{Deserialize, Serialize}; -use ssz_types::typenum::Unsigned; +use typenum::Unsigned; use crate::core::{EthSpec, consts::altair::SYNC_COMMITTEE_SUBNET_COUNT}; diff --git a/consensus/types/src/test_utils/test_random/bitfield.rs b/consensus/types/src/test_utils/test_random/bitfield.rs index 3bc0d37c620..762f41eb34a 100644 --- a/consensus/types/src/test_utils/test_random/bitfield.rs +++ b/consensus/types/src/test_utils/test_random/bitfield.rs @@ -1,5 +1,6 @@ use smallvec::smallvec; -use ssz_types::{BitList, BitVector, typenum::Unsigned}; +use ssz_types::{BitList, BitVector}; +use typenum::Unsigned; use crate::test_utils::TestRandom; diff --git a/consensus/types/src/test_utils/test_random/test_random.rs b/consensus/types/src/test_utils/test_random/test_random.rs index f31be97c038..101fbec51b0 100644 --- a/consensus/types/src/test_utils/test_random/test_random.rs +++ b/consensus/types/src/test_utils/test_random/test_random.rs @@ -3,7 +3,8 @@ use std::{marker::PhantomData, sync::Arc}; use rand::{RngCore, SeedableRng}; use rand_xorshift::XorShiftRng; use smallvec::{SmallVec, smallvec}; -use ssz_types::{VariableList, typenum::Unsigned}; +use ssz_types::VariableList; +use typenum::Unsigned; pub fn test_random_instance() -> T { let mut rng = XorShiftRng::from_seed([0x42; 16]); diff --git a/lcli/Cargo.toml b/lcli/Cargo.toml index 04eb41960ba..43e361b60df 100644 --- a/lcli/Cargo.toml +++ b/lcli/Cargo.toml @@ -26,6 +26,7 @@ eth2_wallet = { workspace = true } ethereum_hashing = { workspace = true } ethereum_ssz = { workspace = true } execution_layer = { workspace = true } +fixed_bytes = { workspace = true } hex = { workspace = true } lighthouse_network = { workspace = true } lighthouse_version = { workspace = true } diff --git a/lcli/src/generate_bootnode_enr.rs b/lcli/src/generate_bootnode_enr.rs index 71186904d0b..620539a95f1 100644 --- a/lcli/src/generate_bootnode_enr.rs +++ b/lcli/src/generate_bootnode_enr.rs @@ -1,4 +1,5 @@ use clap::ArgMatches; +use fixed_bytes::FixedBytesExtended; use lighthouse_network::{ NETWORK_KEY_FILENAME, NetworkConfig, discovery::{CombinedKey, ENR_FILENAME, build_enr}, @@ -9,7 +10,7 @@ use std::io::Write; use std::path::PathBuf; use std::{fs, net::Ipv4Addr}; use std::{fs::File, num::NonZeroU16}; -use types::{ChainSpec, EnrForkId, Epoch, EthSpec, FixedBytesExtended, Hash256}; +use types::{ChainSpec, EnrForkId, Epoch, EthSpec, Hash256}; pub fn run(matches: &ArgMatches, spec: &ChainSpec) -> Result<(), String> { let ip: Ipv4Addr = clap_utils::parse_required(matches, "ip")?; diff --git a/lighthouse/tests/account_manager.rs b/lighthouse/tests/account_manager.rs index 0b945bcb2d4..9bfcae85e57 100644 --- a/lighthouse/tests/account_manager.rs +++ b/lighthouse/tests/account_manager.rs @@ -18,6 +18,7 @@ use account_utils::{ eth2_keystore::KeystoreBuilder, validator_definitions::{SigningDefinition, ValidatorDefinition, ValidatorDefinitions}, }; +use bls::{Keypair, PublicKey}; use slashing_protection::{SLASHING_PROTECTION_FILENAME, SlashingDatabase}; use std::env; use std::fs::{self, File}; @@ -26,7 +27,6 @@ use std::path::{Path, PathBuf}; use std::process::{Child, Command, Output, Stdio}; use std::str::from_utf8; use tempfile::{TempDir, tempdir}; -use types::{Keypair, PublicKey}; use validator_dir::ValidatorDir; use zeroize::Zeroizing; diff --git a/lighthouse/tests/validator_manager.rs b/lighthouse/tests/validator_manager.rs index 99afa7b6824..d6d720a561d 100644 --- a/lighthouse/tests/validator_manager.rs +++ b/lighthouse/tests/validator_manager.rs @@ -1,3 +1,4 @@ +use bls::PublicKeyBytes; use eth2::SensitiveUrl; use serde::de::DeserializeOwned; use std::fs; diff --git a/slasher/Cargo.toml b/slasher/Cargo.toml index 94d048ef72e..a068b2e8856 100644 --- a/slasher/Cargo.toml +++ b/slasher/Cargo.toml @@ -14,11 +14,13 @@ portable = ["types/portable"] [dependencies] bincode = { workspace = true } +bls = { workspace = true } byteorder = { workspace = true } educe = { workspace = true } ethereum_ssz = { workspace = true } ethereum_ssz_derive = { workspace = true } filesystem = { workspace = true } +fixed_bytes = { workspace = true } flate2 = { version = "1.0.14", features = ["zlib"], default-features = false } lmdb-rkv = { git = "https://github.com/sigp/lmdb-rs", rev = "f33845c6469b94265319aac0ed5085597862c27e", optional = true } lmdb-rkv-sys = { git = "https://github.com/sigp/lmdb-rs", rev = "f33845c6469b94265319aac0ed5085597862c27e", optional = true } @@ -38,6 +40,7 @@ strum = { workspace = true } tracing = { workspace = true } tree_hash = { workspace = true } tree_hash_derive = { workspace = true } +typenum = { workspace = true } types = { workspace = true } [dev-dependencies] diff --git a/slasher/src/attester_record.rs b/slasher/src/attester_record.rs index 67145193acc..db326a9d80b 100644 --- a/slasher/src/attester_record.rs +++ b/slasher/src/attester_record.rs @@ -1,5 +1,7 @@ use crate::{Error, database::IndexedAttestationId}; +use bls::AggregateSignature; use ssz_derive::{Decode, Encode}; +use ssz_types::VariableList; use std::borrow::Cow; use std::sync::{ Arc, @@ -7,7 +9,7 @@ use std::sync::{ }; use tree_hash::TreeHash as _; use tree_hash_derive::TreeHash; -use types::{AggregateSignature, EthSpec, Hash256, IndexedAttestation, VariableList}; +use types::{EthSpec, Hash256, IndexedAttestation}; #[derive(Debug, Clone, Copy)] pub struct AttesterRecord { diff --git a/slasher/src/database.rs b/slasher/src/database.rs index 2df2849612e..80d073a81c6 100644 --- a/slasher/src/database.rs +++ b/slasher/src/database.rs @@ -7,6 +7,7 @@ use crate::{ AttesterRecord, AttesterSlashingStatus, CompactAttesterRecord, Config, Database, Error, ProposerSlashingStatus, metrics, }; +use bls::AggregateSignature; use byteorder::{BigEndian, ByteOrder}; use interface::{Environment, OpenDatabases, RwTransaction}; use lru::LruCache; @@ -14,15 +15,16 @@ use parking_lot::Mutex; use serde::de::DeserializeOwned; use ssz::{Decode, Encode}; use ssz_derive::{Decode, Encode}; +use ssz_types::VariableList; use std::borrow::{Borrow, Cow}; use std::marker::PhantomData; use std::sync::Arc; use tracing::info; use tree_hash::TreeHash; use types::{ - AggregateSignature, AttestationData, ChainSpec, Epoch, EthSpec, Hash256, IndexedAttestation, + AttestationData, ChainSpec, Epoch, EthSpec, Hash256, IndexedAttestation, IndexedAttestationBase, IndexedAttestationElectra, ProposerSlashing, SignedBeaconBlockHeader, - Slot, VariableList, + Slot, }; /// Current database schema version, to check compatibility of on-disk DB with software. @@ -860,7 +862,8 @@ impl SlasherDB { #[cfg(test)] mod test { use super::*; - use types::{Checkpoint, ForkName, MainnetEthSpec, Unsigned}; + use typenum::Unsigned; + use types::{Checkpoint, ForkName, MainnetEthSpec}; type E = MainnetEthSpec; diff --git a/slasher/src/test_utils.rs b/slasher/src/test_utils.rs index bbbadac7618..20d1ee92175 100644 --- a/slasher/src/test_utils.rs +++ b/slasher/src/test_utils.rs @@ -1,10 +1,11 @@ +use bls::{AggregateSignature, Signature}; +use fixed_bytes::FixedBytesExtended; use std::collections::HashSet; use std::sync::Arc; use types::{ - AggregateSignature, AttestationData, AttesterSlashing, AttesterSlashingBase, - AttesterSlashingElectra, BeaconBlockHeader, ChainSpec, Checkpoint, Epoch, EthSpec, - FixedBytesExtended, Hash256, IndexedAttestation, MainnetEthSpec, Signature, - SignedBeaconBlockHeader, Slot, + AttestationData, AttesterSlashing, AttesterSlashingBase, AttesterSlashingElectra, + BeaconBlockHeader, ChainSpec, Checkpoint, Epoch, EthSpec, Hash256, IndexedAttestation, + MainnetEthSpec, SignedBeaconBlockHeader, Slot, indexed_attestation::{IndexedAttestationBase, IndexedAttestationElectra}, }; diff --git a/testing/ef_tests/Cargo.toml b/testing/ef_tests/Cargo.toml index 581785e2a97..cef201ee91d 100644 --- a/testing/ef_tests/Cargo.toml +++ b/testing/ef_tests/Cargo.toml @@ -27,14 +27,17 @@ fs2 = { workspace = true } hex = { workspace = true } kzg = { workspace = true } logging = { workspace = true } +milhouse = { workspace = true } rayon = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } serde_repr = { workspace = true } serde_yaml = { workspace = true } snap = { workspace = true } +ssz_types = { workspace = true } state_processing = { workspace = true } swap_or_not_shuffle = { workspace = true } tree_hash = { workspace = true } tree_hash_derive = { workspace = true } +typenum = { workspace = true } types = { workspace = true } diff --git a/testing/ef_tests/src/cases/merkle_proof_validity.rs b/testing/ef_tests/src/cases/merkle_proof_validity.rs index 1103d2fe822..52f5333df1a 100644 --- a/testing/ef_tests/src/cases/merkle_proof_validity.rs +++ b/testing/ef_tests/src/cases/merkle_proof_validity.rs @@ -1,11 +1,12 @@ use super::*; use crate::decode::{ssz_decode_file, ssz_decode_state, yaml_decode_file}; use serde::Deserialize; +use ssz_types::FixedVector; use tree_hash::Hash256; +use typenum::Unsigned; use types::{ BeaconBlockBody, BeaconBlockBodyCapella, BeaconBlockBodyDeneb, BeaconBlockBodyElectra, - BeaconBlockBodyFulu, BeaconBlockBodyGloas, BeaconState, FixedVector, FullPayload, Unsigned, - light_client_update, + BeaconBlockBodyFulu, BeaconBlockBodyGloas, BeaconState, FullPayload, light_client_update, }; #[derive(Debug, Clone, Deserialize)] diff --git a/testing/ef_tests/src/cases/ssz_generic.rs b/testing/ef_tests/src/cases/ssz_generic.rs index 8742f8a1409..1dd37a22eed 100644 --- a/testing/ef_tests/src/cases/ssz_generic.rs +++ b/testing/ef_tests/src/cases/ssz_generic.rs @@ -5,12 +5,14 @@ use crate::cases::common::{DecimalU128, DecimalU256, SszStaticType}; use crate::cases::ssz_static::{check_serialization, check_tree_hash}; use crate::decode::{context_yaml_decode_file, log_file_access, snappy_decode_file}; use context_deserialize::{ContextDeserialize, context_deserialize}; +use milhouse::Vector; use serde::{Deserialize, Deserializer, de::Error as SerdeError}; use ssz_derive::{Decode, Encode}; +use ssz_types::{BitList, BitVector, FixedVector, VariableList}; use tree_hash::TreeHash; use tree_hash_derive::TreeHash; -use types::typenum::*; -use types::{BitList, BitVector, FixedVector, ForkName, VariableList, Vector}; +use typenum::*; +use types::ForkName; #[derive(Debug, Clone, Deserialize)] #[context_deserialize(ForkName)] diff --git a/testing/ef_tests/tests/tests.rs b/testing/ef_tests/tests/tests.rs index 089e4464cd7..0cec69c97e5 100644 --- a/testing/ef_tests/tests/tests.rs +++ b/testing/ef_tests/tests/tests.rs @@ -1,6 +1,7 @@ #![cfg(feature = "ef_tests")] use ef_tests::*; +use typenum::Unsigned; use types::*; // Check that the hand-computed multiplications on EthSpec are correctly computed. diff --git a/testing/execution_engine_integration/Cargo.toml b/testing/execution_engine_integration/Cargo.toml index 78ed266fb25..034b6c5c8a0 100644 --- a/testing/execution_engine_integration/Cargo.toml +++ b/testing/execution_engine_integration/Cargo.toml @@ -13,8 +13,10 @@ alloy-provider = { workspace = true } alloy-rpc-types-eth = { workspace = true } alloy-signer-local = { workspace = true } async-channel = { workspace = true } +bls = { workspace = true } deposit_contract = { workspace = true } execution_layer = { workspace = true } +fixed_bytes = { workspace = true } fork_choice = { workspace = true } futures = { workspace = true } hex = { workspace = true } @@ -26,4 +28,5 @@ serde_json = { workspace = true } task_executor = { workspace = true } tempfile = { workspace = true } tokio = { workspace = true } +typenum = { workspace = true } types = { workspace = true } diff --git a/testing/execution_engine_integration/src/test_rig.rs b/testing/execution_engine_integration/src/test_rig.rs index 57501c6ee2c..8413da4c5ee 100644 --- a/testing/execution_engine_integration/src/test_rig.rs +++ b/testing/execution_engine_integration/src/test_rig.rs @@ -6,11 +6,13 @@ use alloy_network::{EthereumWallet, TransactionBuilder}; use alloy_primitives::Address as AlloyAddress; use alloy_provider::{Provider, ProviderBuilder}; use alloy_signer_local::PrivateKeySigner; +use bls::PublicKeyBytes; use execution_layer::test_utils::DEFAULT_GAS_LIMIT; use execution_layer::{ BlockProposalContentsType, BuilderParams, ChainHealth, ExecutionLayer, PayloadAttributes, PayloadParameters, PayloadStatus, }; +use fixed_bytes::FixedBytesExtended; use fork_choice::ForkchoiceUpdateParameters; use reqwest::{Client, header::CONTENT_TYPE}; use sensitive_url::SensitiveUrl; @@ -22,8 +24,9 @@ use tokio::time::sleep; use types::payload::BlockProductionVersion; use types::{ Address, ChainSpec, EthSpec, ExecutionBlockHash, ExecutionPayload, ExecutionPayloadHeader, - FixedBytesExtended, ForkName, Hash256, MainnetEthSpec, PublicKeyBytes, Slot, Uint256, + ForkName, Hash256, MainnetEthSpec, Slot, Uint256, }; + const EXECUTION_ENGINE_START_TIMEOUT: Duration = Duration::from_secs(60); const TEST_FORK: ForkName = ForkName::Capella; diff --git a/testing/execution_engine_integration/src/transactions.rs b/testing/execution_engine_integration/src/transactions.rs index fe36a1bf67f..8cd63ce307a 100644 --- a/testing/execution_engine_integration/src/transactions.rs +++ b/testing/execution_engine_integration/src/transactions.rs @@ -1,8 +1,10 @@ use alloy_network::TransactionBuilder; use alloy_primitives::{Address, U256}; use alloy_rpc_types_eth::{AccessList, TransactionRequest}; +use bls::{Keypair, Signature}; use deposit_contract::{BYTECODE, CONTRACT_DEPLOY_GAS, DEPOSIT_GAS, encode_eth1_tx_data}; -use types::{DepositData, EthSpec, FixedBytesExtended, Hash256, Keypair, Signature}; +use fixed_bytes::FixedBytesExtended; +use types::{DepositData, EthSpec, Hash256}; /// Hardcoded deposit contract address based on sender address and nonce pub const DEPOSIT_CONTRACT_ADDRESS: &str = "64f43BEc7F86526686C931d65362bB8698872F90"; diff --git a/testing/simulator/Cargo.toml b/testing/simulator/Cargo.toml index 54035f2e827..a1b1b6f95d2 100644 --- a/testing/simulator/Cargo.toml +++ b/testing/simulator/Cargo.toml @@ -20,4 +20,5 @@ serde_json = { workspace = true } tokio = { workspace = true } tracing = { workspace = true } tracing-subscriber = { workspace = true } +typenum = { workspace = true } types = { workspace = true } diff --git a/testing/simulator/src/checks.rs b/testing/simulator/src/checks.rs index 1240785121a..35200692c32 100644 --- a/testing/simulator/src/checks.rs +++ b/testing/simulator/src/checks.rs @@ -1,7 +1,8 @@ use crate::local_network::LocalNetwork; use node_test_rig::eth2::types::{BlockId, FinalityCheckpointsData, StateId}; use std::time::Duration; -use types::{Epoch, EthSpec, ExecPayload, ExecutionBlockHash, Slot, Unsigned}; +use typenum::Unsigned; +use types::{Epoch, EthSpec, ExecPayload, ExecutionBlockHash, Slot}; /// Checks that all of the validators have on-boarded by the start of the second eth1 voting /// period. diff --git a/testing/state_transition_vectors/Cargo.toml b/testing/state_transition_vectors/Cargo.toml index 66376f0a51d..437aa539f41 100644 --- a/testing/state_transition_vectors/Cargo.toml +++ b/testing/state_transition_vectors/Cargo.toml @@ -10,7 +10,9 @@ portable = ["beacon_chain/portable"] [dependencies] beacon_chain = { workspace = true } +bls = { workspace = true } ethereum_ssz = { workspace = true } +fixed_bytes = { workspace = true } state_processing = { workspace = true } tokio = { workspace = true } types = { workspace = true } diff --git a/testing/state_transition_vectors/src/main.rs b/testing/state_transition_vectors/src/main.rs index 4a829b68035..80c30489b7c 100644 --- a/testing/state_transition_vectors/src/main.rs +++ b/testing/state_transition_vectors/src/main.rs @@ -3,6 +3,8 @@ mod macros; mod exit; use beacon_chain::test_utils::{BeaconChainHarness, EphemeralHarnessType}; +use bls::Keypair; +use fixed_bytes::FixedBytesExtended; use ssz::Encode; use std::env; use std::fs::{self, File}; @@ -10,10 +12,8 @@ use std::io::Write; use std::path::{Path, PathBuf}; use std::process::exit; use std::sync::LazyLock; -use types::{ - BeaconState, EthSpec, Keypair, SignedBeaconBlock, test_utils::generate_deterministic_keypairs, -}; -use types::{FixedBytesExtended, Hash256, MainnetEthSpec, Slot}; +use types::{BeaconState, EthSpec, SignedBeaconBlock, test_utils::generate_deterministic_keypairs}; +use types::{Hash256, MainnetEthSpec, Slot}; type E = MainnetEthSpec; diff --git a/testing/web3signer_tests/Cargo.toml b/testing/web3signer_tests/Cargo.toml index b4637b4030f..3ef2e0f7f7a 100644 --- a/testing/web3signer_tests/Cargo.toml +++ b/testing/web3signer_tests/Cargo.toml @@ -9,10 +9,12 @@ edition = { workspace = true } [dev-dependencies] account_utils = { workspace = true } async-channel = { workspace = true } +bls = { workspace = true } environment = { workspace = true } eth2 = { workspace = true } eth2_keystore = { workspace = true } eth2_network_config = { workspace = true } +fixed_bytes = { workspace = true } futures = { workspace = true } initialized_validators = { workspace = true } lighthouse_validator_store = { workspace = true } @@ -24,6 +26,7 @@ serde_json = { workspace = true } serde_yaml = { workspace = true } slashing_protection = { workspace = true } slot_clock = { workspace = true } +ssz_types = { workspace = true } task_executor = { workspace = true } tempfile = { workspace = true } tokio = { workspace = true } diff --git a/testing/web3signer_tests/src/lib.rs b/testing/web3signer_tests/src/lib.rs index 15ec745e3f1..541f9b2b4a7 100644 --- a/testing/web3signer_tests/src/lib.rs +++ b/testing/web3signer_tests/src/lib.rs @@ -20,9 +20,11 @@ mod tests { use account_utils::validator_definitions::{ SigningDefinition, ValidatorDefinition, ValidatorDefinitions, Web3SignerDefinition, }; + use bls::{AggregateSignature, Keypair, PublicKeyBytes, SecretKey, Signature}; use eth2::types::FullBlockContents; use eth2_keystore::KeystoreBuilder; use eth2_network_config::Eth2NetworkConfig; + use fixed_bytes::FixedBytesExtended; use initialized_validators::{ InitializedValidators, load_pem_certificate, load_pkcs12_identity, }; @@ -32,6 +34,7 @@ mod tests { use serde::Serialize; use slashing_protection::{SLASHING_PROTECTION_FILENAME, SlashingDatabase}; use slot_clock::{SlotClock, TestingSlotClock}; + use ssz_types::BitList; use std::env; use std::fmt::Debug; use std::fs::{self, File}; diff --git a/validator_client/beacon_node_fallback/Cargo.toml b/validator_client/beacon_node_fallback/Cargo.toml index 5fe2af4cb0b..481aece48b2 100644 --- a/validator_client/beacon_node_fallback/Cargo.toml +++ b/validator_client/beacon_node_fallback/Cargo.toml @@ -9,6 +9,7 @@ name = "beacon_node_fallback" path = "src/lib.rs" [dependencies] +bls = { workspace = true } clap = { workspace = true } eth2 = { workspace = true } futures = { workspace = true } diff --git a/validator_client/beacon_node_fallback/src/lib.rs b/validator_client/beacon_node_fallback/src/lib.rs index 0f13d8c8b7b..6abcd44cc94 100644 --- a/validator_client/beacon_node_fallback/src/lib.rs +++ b/validator_client/beacon_node_fallback/src/lib.rs @@ -773,12 +773,13 @@ impl ApiTopic { mod tests { use super::*; use crate::beacon_node_health::BeaconNodeHealthTier; + use bls::Signature; use eth2::SensitiveUrl; use eth2::Timeouts; use slot_clock::TestingSlotClock; use strum::VariantNames; use types::{BeaconBlockDeneb, MainnetEthSpec, Slot}; - use types::{EmptyBlock, Signature, SignedBeaconBlockDeneb, SignedBlindedBeaconBlock}; + use types::{EmptyBlock, SignedBeaconBlockDeneb, SignedBlindedBeaconBlock}; use validator_test_rig::mock_beacon_node::MockBeaconNode; type E = MainnetEthSpec; diff --git a/validator_client/doppelganger_service/Cargo.toml b/validator_client/doppelganger_service/Cargo.toml index e5b183570de..66b27eb39d5 100644 --- a/validator_client/doppelganger_service/Cargo.toml +++ b/validator_client/doppelganger_service/Cargo.toml @@ -6,6 +6,7 @@ authors = ["Sigma Prime "] [dependencies] beacon_node_fallback = { workspace = true } +bls = { workspace = true } environment = { workspace = true } eth2 = { workspace = true } logging = { workspace = true } diff --git a/validator_client/doppelganger_service/src/lib.rs b/validator_client/doppelganger_service/src/lib.rs index b0ed78e9965..600ae82c546 100644 --- a/validator_client/doppelganger_service/src/lib.rs +++ b/validator_client/doppelganger_service/src/lib.rs @@ -30,6 +30,7 @@ //! Doppelganger protection is a best-effort, last-line-of-defence mitigation. Do not rely upon it. use beacon_node_fallback::BeaconNodeFallback; +use bls::PublicKeyBytes; use environment::RuntimeContext; use eth2::types::LivenessResponseData; use logging::crit; @@ -41,7 +42,7 @@ use std::sync::Arc; use task_executor::ShutdownReason; use tokio::time::sleep; use tracing::{error, info}; -use types::{Epoch, EthSpec, PublicKeyBytes, Slot}; +use types::{Epoch, EthSpec, Slot}; use validator_store::{DoppelgangerStatus, ValidatorStore}; struct LivenessResponses { diff --git a/validator_client/http_api/Cargo.toml b/validator_client/http_api/Cargo.toml index bb624ea988c..2bd57867acf 100644 --- a/validator_client/http_api/Cargo.toml +++ b/validator_client/http_api/Cargo.toml @@ -20,6 +20,7 @@ eth2 = { workspace = true, features = ["lighthouse"] } eth2_keystore = { workspace = true } ethereum_serde_utils = { workspace = true } filesystem = { workspace = true } +fixed_bytes = { workspace = true } graffiti_file = { workspace = true } health_metrics = { workspace = true } initialized_validators = { workspace = true } @@ -41,6 +42,7 @@ tempfile = { workspace = true } tokio = { workspace = true } tokio-stream = { workspace = true } tracing = { workspace = true } +typenum = { workspace = true } types = { workspace = true } url = { workspace = true } validator_dir = { workspace = true } @@ -54,3 +56,4 @@ zeroize = { workspace = true } futures = { workspace = true } itertools = { workspace = true } rand = { workspace = true, features = ["small_rng"] } +ssz_types = { workspace = true } diff --git a/validator_client/http_api/src/keystores.rs b/validator_client/http_api/src/keystores.rs index c0f918f9bb8..18accf0d5a0 100644 --- a/validator_client/http_api/src/keystores.rs +++ b/validator_client/http_api/src/keystores.rs @@ -1,5 +1,6 @@ //! Implementation of the standard keystore management API. use account_utils::validator_definitions::PasswordStorage; +use bls::PublicKeyBytes; use eth2::lighthouse_vc::{ std_types::{ DeleteKeystoreStatus, DeleteKeystoresRequest, DeleteKeystoresResponse, @@ -18,7 +19,7 @@ use std::sync::Arc; use task_executor::TaskExecutor; use tokio::runtime::Handle; use tracing::{info, warn}; -use types::{EthSpec, PublicKeyBytes}; +use types::EthSpec; use validator_dir::{Builder as ValidatorDirBuilder, keystore_password_path}; use warp::Rejection; use warp_utils::reject::{custom_bad_request, custom_server_error}; diff --git a/validator_client/http_api/src/lib.rs b/validator_client/http_api/src/lib.rs index 4494fca9574..a35b4ec6c6d 100644 --- a/validator_client/http_api/src/lib.rs +++ b/validator_client/http_api/src/lib.rs @@ -22,6 +22,7 @@ use account_utils::{ }; pub use api_secret::ApiSecret; use beacon_node_fallback::CandidateInfo; +use bls::{PublicKey, PublicKeyBytes}; use core::convert::Infallible; use create_validator::{ create_validators_mnemonic, create_validators_web3signer, get_voting_password_storage, @@ -30,8 +31,8 @@ use directory::{DEFAULT_HARDCODED_NETWORK, DEFAULT_ROOT_DIR, DEFAULT_VALIDATOR_D use eth2::lighthouse_vc::{ std_types::{AuthResponse, GetFeeRecipientResponse, GetGasLimitResponse}, types::{ - self as api_types, GenericResponse, GetGraffitiResponse, Graffiti, PublicKey, - PublicKeyBytes, SetGraffitiRequest, UpdateCandidatesRequest, UpdateCandidatesResponse, + self as api_types, GenericResponse, GetGraffitiResponse, Graffiti, SetGraffitiRequest, + UpdateCandidatesRequest, UpdateCandidatesResponse, }, }; use health_metrics::observe::Observe; diff --git a/validator_client/http_api/src/remotekeys.rs b/validator_client/http_api/src/remotekeys.rs index 5aa63baac3b..987e1b8740d 100644 --- a/validator_client/http_api/src/remotekeys.rs +++ b/validator_client/http_api/src/remotekeys.rs @@ -2,6 +2,7 @@ use account_utils::validator_definitions::{ SigningDefinition, ValidatorDefinition, Web3SignerDefinition, }; +use bls::PublicKeyBytes; use eth2::lighthouse_vc::std_types::{ DeleteRemotekeyStatus, DeleteRemotekeysRequest, DeleteRemotekeysResponse, ImportRemotekeyStatus, ImportRemotekeysRequest, ImportRemotekeysResponse, @@ -14,7 +15,7 @@ use std::sync::Arc; use task_executor::TaskExecutor; use tokio::runtime::Handle; use tracing::{info, warn}; -use types::{EthSpec, PublicKeyBytes}; +use types::EthSpec; use url::Url; use warp::Rejection; use warp_utils::reject::custom_server_error; diff --git a/validator_client/http_api/src/test_utils.rs b/validator_client/http_api/src/test_utils.rs index 9a8784f2023..f83d9f4d526 100644 --- a/validator_client/http_api/src/test_utils.rs +++ b/validator_client/http_api/src/test_utils.rs @@ -4,6 +4,7 @@ use account_utils::validator_definitions::ValidatorDefinitions; use account_utils::{ eth2_wallet::WalletBuilder, mnemonic_from_phrase, random_mnemonic, random_password, }; +use bls::Keypair; use deposit_contract::decode_eth1_tx_data; use doppelganger_service::DoppelgangerService; use eth2::{ diff --git a/validator_client/http_api/src/tests.rs b/validator_client/http_api/src/tests.rs index b0780e74278..5cb631983cc 100644 --- a/validator_client/http_api/src/tests.rs +++ b/validator_client/http_api/src/tests.rs @@ -11,6 +11,7 @@ use account_utils::{ eth2_wallet::WalletBuilder, mnemonic_from_phrase, random_mnemonic, random_password, random_password_string, validator_definitions::ValidatorDefinitions, }; +use bls::{Keypair, PublicKeyBytes}; use deposit_contract::decode_eth1_tx_data; use eth2::{ Error as ApiError, diff --git a/validator_client/http_api/src/tests/keystores.rs b/validator_client/http_api/src/tests/keystores.rs index dd2266e3f6e..eeb3cd94de0 100644 --- a/validator_client/http_api/src/tests/keystores.rs +++ b/validator_client/http_api/src/tests/keystores.rs @@ -1,19 +1,23 @@ use super::*; use account_utils::random_password_string; use bls::PublicKeyBytes; +use bls::{AggregateSignature, PublicKey}; use eth2::lighthouse_vc::types::UpdateFeeRecipientRequest; use eth2::lighthouse_vc::{ http_client::ValidatorClientHttpClient as HttpClient, std_types::{KeystoreJsonStr as Keystore, *}, types::Web3SignerValidatorRequest, }; +use fixed_bytes::FixedBytesExtended; use itertools::Itertools; use lighthouse_validator_store::DEFAULT_GAS_LIMIT; use rand::rngs::StdRng; use rand::{Rng, SeedableRng}; use slashing_protection::interchange::{Interchange, InterchangeMetadata}; +use ssz_types::BitList; use std::{collections::HashMap, path::Path}; use tokio::runtime::Handle; +use typenum::Unsigned; use types::{Address, attestation::AttestationBase}; use validator_store::ValidatorStore; use zeroize::Zeroizing; diff --git a/validator_client/initialized_validators/src/lib.rs b/validator_client/initialized_validators/src/lib.rs index 4d61bd4ed81..db6d03174dd 100644 --- a/validator_client/initialized_validators/src/lib.rs +++ b/validator_client/initialized_validators/src/lib.rs @@ -15,6 +15,7 @@ use account_utils::{ Web3SignerDefinition, }, }; +use bls::{Keypair, PublicKey, PublicKeyBytes}; use eth2_keystore::Keystore; use lockfile::{Lockfile, LockfileError}; use metrics::set_gauge; @@ -30,7 +31,7 @@ use std::sync::Arc; use std::time::Duration; use tracing::{debug, error, info, warn}; use types::graffiti::GraffitiString; -use types::{Address, Graffiti, Keypair, PublicKey, PublicKeyBytes}; +use types::{Address, Graffiti}; use url::{ParseError, Url}; use validator_dir::Builder as ValidatorDirBuilder; use zeroize::Zeroizing; diff --git a/validator_client/lighthouse_validator_store/Cargo.toml b/validator_client/lighthouse_validator_store/Cargo.toml index 0f8220bdc9f..01c7616be15 100644 --- a/validator_client/lighthouse_validator_store/Cargo.toml +++ b/validator_client/lighthouse_validator_store/Cargo.toml @@ -7,6 +7,7 @@ authors = ["Sigma Prime "] [dependencies] account_utils = { workspace = true } beacon_node_fallback = { workspace = true } +bls = { workspace = true } doppelganger_service = { workspace = true } either = { workspace = true } environment = { workspace = true } diff --git a/validator_client/lighthouse_validator_store/src/lib.rs b/validator_client/lighthouse_validator_store/src/lib.rs index dc8fb07b65f..3bea21a05d8 100644 --- a/validator_client/lighthouse_validator_store/src/lib.rs +++ b/validator_client/lighthouse_validator_store/src/lib.rs @@ -1,4 +1,5 @@ use account_utils::validator_definitions::{PasswordStorage, ValidatorDefinition}; +use bls::{PublicKeyBytes, Signature}; use doppelganger_service::DoppelgangerService; use eth2::types::PublishBlockRequest; use initialized_validators::InitializedValidators; @@ -19,9 +20,9 @@ use tracing::{error, info, instrument, warn}; use types::{ AbstractExecPayload, Address, AggregateAndProof, Attestation, BeaconBlock, BlindedPayload, ChainSpec, ContributionAndProof, Domain, Epoch, EthSpec, Fork, Graffiti, Hash256, - PublicKeyBytes, SelectionProof, Signature, SignedAggregateAndProof, SignedBeaconBlock, - SignedContributionAndProof, SignedRoot, SignedValidatorRegistrationData, SignedVoluntaryExit, - Slot, SyncAggregatorSelectionData, SyncCommitteeContribution, SyncCommitteeMessage, + SelectionProof, SignedAggregateAndProof, SignedBeaconBlock, SignedContributionAndProof, + SignedRoot, SignedValidatorRegistrationData, SignedVoluntaryExit, Slot, + SyncAggregatorSelectionData, SyncCommitteeContribution, SyncCommitteeMessage, SyncSelectionProof, SyncSubnetId, ValidatorRegistrationData, VoluntaryExit, graffiti::GraffitiString, }; diff --git a/validator_client/signing_method/Cargo.toml b/validator_client/signing_method/Cargo.toml index 2defd25caaa..cb321c2d498 100644 --- a/validator_client/signing_method/Cargo.toml +++ b/validator_client/signing_method/Cargo.toml @@ -5,6 +5,7 @@ edition = { workspace = true } authors = ["Sigma Prime "] [dependencies] +bls = { workspace = true } eth2_keystore = { workspace = true } ethereum_serde_utils = { workspace = true } lockfile = { workspace = true } diff --git a/validator_client/signing_method/src/lib.rs b/validator_client/signing_method/src/lib.rs index 7e0f2c02f7d..d0d98689526 100644 --- a/validator_client/signing_method/src/lib.rs +++ b/validator_client/signing_method/src/lib.rs @@ -3,6 +3,7 @@ //! - Via a local `Keypair`. //! - Via a remote signer (Web3Signer) +use bls::{Keypair, PublicKey, Signature}; use eth2_keystore::Keystore; use lockfile::Lockfile; use parking_lot::Mutex; diff --git a/validator_client/signing_method/src/web3signer.rs b/validator_client/signing_method/src/web3signer.rs index 99fad103035..246d9e9e091 100644 --- a/validator_client/signing_method/src/web3signer.rs +++ b/validator_client/signing_method/src/web3signer.rs @@ -1,6 +1,7 @@ //! Contains the types required to make JSON requests to Web3Signer servers. use super::Error; +use bls::{PublicKeyBytes, Signature}; use serde::{Deserialize, Serialize}; use types::*; diff --git a/validator_client/slashing_protection/Cargo.toml b/validator_client/slashing_protection/Cargo.toml index 6a778c5de31..b80da6c7867 100644 --- a/validator_client/slashing_protection/Cargo.toml +++ b/validator_client/slashing_protection/Cargo.toml @@ -11,9 +11,11 @@ portable = ["types/portable"] [dependencies] arbitrary = { workspace = true, features = ["derive"] } +bls = { workspace = true } eip_3076 = { workspace = true, features = ["json"] } ethereum_serde_utils = { workspace = true } filesystem = { workspace = true } +fixed_bytes = { workspace = true } r2d2 = { workspace = true } r2d2_sqlite = "0.21.0" rusqlite = { workspace = true } diff --git a/validator_client/slashing_protection/src/attestation_tests.rs b/validator_client/slashing_protection/src/attestation_tests.rs index 37766f271bb..d16c9613369 100644 --- a/validator_client/slashing_protection/src/attestation_tests.rs +++ b/validator_client/slashing_protection/src/attestation_tests.rs @@ -2,7 +2,8 @@ use crate::test_utils::*; use crate::*; -use types::{AttestationData, Checkpoint, Epoch, FixedBytesExtended, Slot}; +use fixed_bytes::FixedBytesExtended; +use types::{AttestationData, Checkpoint, Epoch, Slot}; pub fn build_checkpoint(epoch_num: u64) -> Checkpoint { Checkpoint { diff --git a/validator_client/slashing_protection/src/bin/test_generator.rs b/validator_client/slashing_protection/src/bin/test_generator.rs index dfda7983f73..df1c63f37d3 100644 --- a/validator_client/slashing_protection/src/bin/test_generator.rs +++ b/validator_client/slashing_protection/src/bin/test_generator.rs @@ -1,11 +1,12 @@ use eip_3076::{Interchange, InterchangeData, InterchangeMetadata, SignedAttestation, SignedBlock}; +use fixed_bytes::FixedBytesExtended; use slashing_protection::SUPPORTED_INTERCHANGE_FORMAT_VERSION; use slashing_protection::interchange_test::{MultiTestCase, TestCase}; use slashing_protection::test_utils::{DEFAULT_GENESIS_VALIDATORS_ROOT, pubkey}; use std::fs::{self, File}; use std::io::Write; use std::path::Path; -use types::{Epoch, FixedBytesExtended, Hash256, Slot}; +use types::{Epoch, Hash256, Slot}; fn metadata(genesis_validators_root: Hash256) -> InterchangeMetadata { InterchangeMetadata { diff --git a/validator_client/slashing_protection/src/block_tests.rs b/validator_client/slashing_protection/src/block_tests.rs index b3273015f42..2531f52d8ce 100644 --- a/validator_client/slashing_protection/src/block_tests.rs +++ b/validator_client/slashing_protection/src/block_tests.rs @@ -2,7 +2,8 @@ use super::*; use crate::test_utils::*; -use types::{BeaconBlockHeader, FixedBytesExtended, Slot}; +use fixed_bytes::FixedBytesExtended; +use types::{BeaconBlockHeader, Slot}; pub fn block(slot: u64) -> BeaconBlockHeader { BeaconBlockHeader { diff --git a/validator_client/slashing_protection/src/extra_interchange_tests.rs b/validator_client/slashing_protection/src/extra_interchange_tests.rs index 0f88ec8b1dc..18457720e4e 100644 --- a/validator_client/slashing_protection/src/extra_interchange_tests.rs +++ b/validator_client/slashing_protection/src/extra_interchange_tests.rs @@ -2,8 +2,8 @@ use crate::test_utils::pubkey; use crate::*; +use fixed_bytes::FixedBytesExtended; use tempfile::tempdir; -use types::FixedBytesExtended; #[test] fn export_non_existent_key() { diff --git a/validator_client/slashing_protection/src/interchange_test.rs b/validator_client/slashing_protection/src/interchange_test.rs index ebe0105f24d..0dfcda204d7 100644 --- a/validator_client/slashing_protection/src/interchange_test.rs +++ b/validator_client/slashing_protection/src/interchange_test.rs @@ -2,11 +2,13 @@ use crate::{ SigningRoot, SlashingDatabase, test_utils::{DEFAULT_GENESIS_VALIDATORS_ROOT, pubkey}, }; +use bls::PublicKeyBytes; use eip_3076::{Interchange, SignedAttestation, SignedBlock}; +use fixed_bytes::FixedBytesExtended; use serde::{Deserialize, Serialize}; use std::collections::HashSet; use tempfile::tempdir; -use types::{Epoch, FixedBytesExtended, Hash256, PublicKeyBytes, Slot}; +use types::{Epoch, Hash256, Slot}; #[derive(Debug, Clone, Deserialize, Serialize)] #[cfg_attr(feature = "arbitrary-fuzz", derive(arbitrary::Arbitrary))] diff --git a/validator_client/slashing_protection/src/lib.rs b/validator_client/slashing_protection/src/lib.rs index 917d51d38b7..f8580e73158 100644 --- a/validator_client/slashing_protection/src/lib.rs +++ b/validator_client/slashing_protection/src/lib.rs @@ -19,10 +19,11 @@ pub use crate::slashing_database::{ InterchangeError, InterchangeImportOutcome, SUPPORTED_INTERCHANGE_FORMAT_VERSION, SlashingDatabase, }; +use bls::PublicKeyBytes; use rusqlite::Error as SQLError; use std::fmt::Display; use std::io::{Error as IOError, ErrorKind}; -use types::{Hash256, PublicKeyBytes}; +use types::Hash256; /// The filename within the `validators` directory that contains the slashing protection DB. pub const SLASHING_PROTECTION_FILENAME: &str = "slashing_protection.sqlite"; @@ -133,7 +134,7 @@ impl Display for NotSafe { #[cfg(test)] mod test { - use types::FixedBytesExtended; + use fixed_bytes::FixedBytesExtended; use super::*; diff --git a/validator_client/slashing_protection/src/slashing_database.rs b/validator_client/slashing_protection/src/slashing_database.rs index 00677212a3f..67e1234ac57 100644 --- a/validator_client/slashing_protection/src/slashing_database.rs +++ b/validator_client/slashing_protection/src/slashing_database.rs @@ -1,6 +1,7 @@ use crate::signed_attestation::InvalidAttestation; use crate::signed_block::InvalidBlock; use crate::{NotSafe, Safe, SignedAttestation, SignedBlock, SigningRoot, signing_root_from_row}; +use bls::PublicKeyBytes; use eip_3076::{ Interchange, InterchangeData, InterchangeMetadata, SignedAttestation as InterchangeAttestation, SignedBlock as InterchangeBlock, @@ -12,7 +13,7 @@ use std::fs::File; use std::path::Path; use std::time::Duration; use tracing::instrument; -use types::{AttestationData, BeaconBlockHeader, Epoch, Hash256, PublicKeyBytes, SignedRoot, Slot}; +use types::{AttestationData, BeaconBlockHeader, Epoch, Hash256, SignedRoot, Slot}; type Pool = r2d2::Pool; diff --git a/validator_client/slashing_protection/tests/migration.rs b/validator_client/slashing_protection/tests/migration.rs index 3d4ec7ea9a8..14bf0d63f93 100644 --- a/validator_client/slashing_protection/tests/migration.rs +++ b/validator_client/slashing_protection/tests/migration.rs @@ -1,10 +1,11 @@ //! Tests for upgrading a previous version of the database to the latest schema. +use fixed_bytes::FixedBytesExtended; use slashing_protection::{NotSafe, SlashingDatabase}; use std::collections::HashMap; use std::fs; use std::path::{Path, PathBuf}; use tempfile::tempdir; -use types::{FixedBytesExtended, Hash256}; +use types::Hash256; fn test_data_dir() -> PathBuf { Path::new(&std::env::var("CARGO_MANIFEST_DIR").unwrap()).join("migration-tests") diff --git a/validator_client/validator_services/src/block_service.rs b/validator_client/validator_services/src/block_service.rs index 8ec53d3f409..23658af03fd 100644 --- a/validator_client/validator_services/src/block_service.rs +++ b/validator_client/validator_services/src/block_service.rs @@ -1,4 +1,5 @@ use beacon_node_fallback::{ApiTopic, BeaconNodeFallback, Error as FallbackError, Errors}; +use bls::PublicKeyBytes; use eth2::{BeaconNodeHttpClient, StatusCode}; use graffiti_file::{GraffitiFile, determine_graffiti}; use logging::crit; @@ -11,7 +12,7 @@ use std::time::Duration; use task_executor::TaskExecutor; use tokio::sync::mpsc; use tracing::{Instrument, debug, error, info, info_span, instrument, trace, warn}; -use types::{BlockType, ChainSpec, EthSpec, Graffiti, PublicKeyBytes, Slot}; +use types::{BlockType, ChainSpec, EthSpec, Graffiti, Slot}; use validator_store::{Error as ValidatorStoreError, SignedBlock, UnsignedBlock, ValidatorStore}; #[derive(Debug)] diff --git a/validator_client/validator_services/src/duties_service.rs b/validator_client/validator_services/src/duties_service.rs index 7569d3946ab..c2378181ef0 100644 --- a/validator_client/validator_services/src/duties_service.rs +++ b/validator_client/validator_services/src/duties_service.rs @@ -10,6 +10,7 @@ use crate::block_service::BlockServiceNotification; use crate::sync::SyncDutiesMap; use crate::sync::poll_sync_committee_duties; use beacon_node_fallback::{ApiTopic, BeaconNodeFallback}; +use bls::PublicKeyBytes; use eth2::types::{ AttesterData, BeaconCommitteeSelection, BeaconCommitteeSubscription, DutiesResponse, ProposerData, StateId, ValidatorId, @@ -29,7 +30,7 @@ use std::time::Duration; use task_executor::TaskExecutor; use tokio::{sync::mpsc::Sender, time::sleep}; use tracing::{debug, error, info, warn}; -use types::{ChainSpec, Epoch, EthSpec, Hash256, PublicKeyBytes, SelectionProof, Slot}; +use types::{ChainSpec, Epoch, EthSpec, Hash256, SelectionProof, Slot}; use validator_metrics::{ATTESTATION_DUTY, get_int_gauge, set_int_gauge}; use validator_store::{DoppelgangerStatus, Error as ValidatorStoreError, ValidatorStore}; diff --git a/validator_client/validator_services/src/sync.rs b/validator_client/validator_services/src/sync.rs index 77032ed15b4..0f456a70507 100644 --- a/validator_client/validator_services/src/sync.rs +++ b/validator_client/validator_services/src/sync.rs @@ -1,4 +1,5 @@ use crate::duties_service::{DutiesService, Error, SelectionProofConfig}; +use bls::PublicKeyBytes; use eth2::types::SyncCommitteeSelection; use futures::future::join_all; use futures::stream::{FuturesUnordered, StreamExt}; @@ -8,7 +9,7 @@ use slot_clock::SlotClock; use std::collections::{HashMap, HashSet}; use std::sync::Arc; use tracing::{debug, error, info, warn}; -use types::{ChainSpec, EthSpec, PublicKeyBytes, Slot, SyncDuty, SyncSelectionProof, SyncSubnetId}; +use types::{ChainSpec, EthSpec, Slot, SyncDuty, SyncSelectionProof, SyncSubnetId}; use validator_store::{DoppelgangerStatus, Error as ValidatorStoreError, ValidatorStore}; /// Top-level data-structure containing sync duty information. diff --git a/validator_client/validator_services/src/sync_committee_service.rs b/validator_client/validator_services/src/sync_committee_service.rs index 5f6b1cb710f..28c3d1caadb 100644 --- a/validator_client/validator_services/src/sync_committee_service.rs +++ b/validator_client/validator_services/src/sync_committee_service.rs @@ -1,5 +1,6 @@ use crate::duties_service::DutiesService; use beacon_node_fallback::{ApiTopic, BeaconNodeFallback}; +use bls::PublicKeyBytes; use eth2::types::BlockId; use futures::future::FutureExt; use futures::future::join_all; @@ -13,8 +14,8 @@ use task_executor::TaskExecutor; use tokio::time::{Duration, Instant, sleep, sleep_until}; use tracing::{Instrument, debug, error, info, info_span, instrument, trace, warn}; use types::{ - ChainSpec, EthSpec, Hash256, PublicKeyBytes, Slot, SyncCommitteeSubscription, - SyncContributionData, SyncDuty, SyncSelectionProof, SyncSubnetId, + ChainSpec, EthSpec, Hash256, Slot, SyncCommitteeSubscription, SyncContributionData, SyncDuty, + SyncSelectionProof, SyncSubnetId, }; use validator_store::{Error as ValidatorStoreError, ValidatorStore}; diff --git a/validator_client/validator_store/Cargo.toml b/validator_client/validator_store/Cargo.toml index 8c5451b2d00..8b1879c837c 100644 --- a/validator_client/validator_store/Cargo.toml +++ b/validator_client/validator_store/Cargo.toml @@ -5,6 +5,7 @@ edition = { workspace = true } authors = ["Sigma Prime "] [dependencies] +bls = { workspace = true } eth2 = { workspace = true } slashing_protection = { workspace = true } types = { workspace = true } diff --git a/validator_client/validator_store/src/lib.rs b/validator_client/validator_store/src/lib.rs index 6fd2e270649..2b472799d24 100644 --- a/validator_client/validator_store/src/lib.rs +++ b/validator_client/validator_store/src/lib.rs @@ -1,3 +1,4 @@ +use bls::{PublicKeyBytes, Signature}; use eth2::types::{FullBlockContents, PublishBlockRequest}; use slashing_protection::NotSafe; use std::fmt::Debug; @@ -5,9 +6,9 @@ use std::future::Future; use std::sync::Arc; use types::{ Address, Attestation, AttestationError, BlindedBeaconBlock, Epoch, EthSpec, Graffiti, Hash256, - PublicKeyBytes, SelectionProof, Signature, SignedAggregateAndProof, SignedBlindedBeaconBlock, - SignedContributionAndProof, SignedValidatorRegistrationData, Slot, SyncCommitteeContribution, - SyncCommitteeMessage, SyncSelectionProof, SyncSubnetId, ValidatorRegistrationData, + SelectionProof, SignedAggregateAndProof, SignedBlindedBeaconBlock, SignedContributionAndProof, + SignedValidatorRegistrationData, Slot, SyncCommitteeContribution, SyncCommitteeMessage, + SyncSelectionProof, SyncSubnetId, ValidatorRegistrationData, }; #[derive(Debug, PartialEq, Clone)] diff --git a/validator_manager/Cargo.toml b/validator_manager/Cargo.toml index 6ef179fbe99..16ce1e023fa 100644 --- a/validator_manager/Cargo.toml +++ b/validator_manager/Cargo.toml @@ -6,6 +6,7 @@ edition = { workspace = true } [dependencies] account_utils = { workspace = true } +bls = { workspace = true } clap = { workspace = true } clap_utils = { workspace = true } educe = { workspace = true } diff --git a/validator_manager/src/common.rs b/validator_manager/src/common.rs index 0e93b257734..a95d2a1fd61 100644 --- a/validator_manager/src/common.rs +++ b/validator_manager/src/common.rs @@ -1,5 +1,6 @@ pub use account_utils::STDIN_INPUTS_FLAG; use account_utils::strip_off_newlines; +use bls::{Keypair, PublicKeyBytes, SignatureBytes}; use eth2::lighthouse_vc::std_types::{InterchangeJsonStr, KeystoreJsonStr}; use eth2::{ SensitiveUrl, diff --git a/validator_manager/src/create_validators.rs b/validator_manager/src/create_validators.rs index 19f78be2ea7..8682705956c 100644 --- a/validator_manager/src/create_validators.rs +++ b/validator_manager/src/create_validators.rs @@ -1,6 +1,7 @@ use super::common::*; use crate::DumpConfig; use account_utils::{random_password_string, read_mnemonic_from_cli, read_password_from_user}; +use bls::PublicKeyBytes; use clap::{Arg, ArgAction, ArgMatches, Command}; use clap_utils::FLAG_HEADER; use eth2::{ @@ -586,6 +587,7 @@ async fn run(config: CreateConfig, spec: &ChainSpec) -> Result<(), S #[cfg(test)] pub mod tests { use super::*; + use bls::SignatureBytes; use eth2_network_config::Eth2NetworkConfig; use regex::Regex; use std::path::Path; diff --git a/validator_manager/src/delete_validators.rs b/validator_manager/src/delete_validators.rs index 3ff0c9529d7..2421b002aab 100644 --- a/validator_manager/src/delete_validators.rs +++ b/validator_manager/src/delete_validators.rs @@ -1,3 +1,4 @@ +use bls::PublicKeyBytes; use clap::{Arg, ArgAction, ArgMatches, Command}; use eth2::{ SensitiveUrl, @@ -5,7 +6,6 @@ use eth2::{ }; use serde::{Deserialize, Serialize}; use std::path::PathBuf; -use types::PublicKeyBytes; use crate::{DumpConfig, common::vc_http_client}; diff --git a/validator_manager/src/exit_validators.rs b/validator_manager/src/exit_validators.rs index 4a398793ce1..b53d9c0a16d 100644 --- a/validator_manager/src/exit_validators.rs +++ b/validator_manager/src/exit_validators.rs @@ -1,5 +1,6 @@ use crate::{DumpConfig, common::vc_http_client}; +use bls::PublicKeyBytes; use clap::{Arg, ArgAction, ArgMatches, Command}; use clap_utils::FLAG_HEADER; use eth2::types::{ConfigAndPreset, Epoch, StateId, ValidatorId, ValidatorStatus}; @@ -10,7 +11,7 @@ use slot_clock::{SlotClock, SystemTimeSlotClock}; use std::fs::write; use std::path::PathBuf; use std::time::Duration; -use types::{ChainSpec, EthSpec, PublicKeyBytes}; +use types::{ChainSpec, EthSpec}; pub const CMD: &str = "exit"; pub const BEACON_URL_FLAG: &str = "beacon-node"; diff --git a/validator_manager/src/list_validators.rs b/validator_manager/src/list_validators.rs index 082894a995d..f7a09f8d8e7 100644 --- a/validator_manager/src/list_validators.rs +++ b/validator_manager/src/list_validators.rs @@ -1,3 +1,4 @@ +use bls::PublicKeyBytes; use clap::{Arg, ArgAction, ArgMatches, Command}; use eth2::lighthouse_vc::types::SingleKeystoreResponse; use eth2::types::{ConfigAndPreset, StateId, ValidatorId, ValidatorStatus}; @@ -5,7 +6,7 @@ use eth2::{BeaconNodeHttpClient, SensitiveUrl, Timeouts}; use serde::{Deserialize, Serialize}; use std::path::PathBuf; use std::time::Duration; -use types::{ChainSpec, EthSpec, PublicKeyBytes}; +use types::{ChainSpec, EthSpec}; use crate::exit_validators::get_current_epoch; use crate::{DumpConfig, common::vc_http_client}; diff --git a/validator_manager/src/move_validators.rs b/validator_manager/src/move_validators.rs index 08b50eb9293..ace1d1941fd 100644 --- a/validator_manager/src/move_validators.rs +++ b/validator_manager/src/move_validators.rs @@ -1,6 +1,7 @@ use super::common::*; use crate::DumpConfig; use account_utils::read_password_from_user; +use bls::PublicKeyBytes; use clap::{Arg, ArgAction, ArgMatches, Command}; use eth2::{ SensitiveUrl, @@ -18,7 +19,7 @@ use std::path::PathBuf; use std::str::FromStr; use std::time::Duration; use tokio::time::sleep; -use types::{Address, PublicKeyBytes}; +use types::Address; use zeroize::Zeroizing; pub const MOVE_DIR_NAME: &str = "lighthouse-validator-move"; From d9ddb72f5bcf3289b7650cc11e227b12a8688bd7 Mon Sep 17 00:00:00 2001 From: Eitan Seri-Levi Date: Thu, 11 Dec 2025 03:56:51 -0300 Subject: [PATCH 60/74] Fix testnet script (#8557) Fix an issue where a kurtosis testnet script was failing because no supernodes were provided ``` There was an error interpreting Starlark code Evaluation error: fail: Fulu fork is enabled (epoch: 0) but no supernodes are configured, no nodes have 128 or more validators, and perfect_peerdas_enabled is not enabled. Either configure a supernode, ensure at least one node has 128+ validators, or enable perfect_peerdas_enabled in network_params with 16 participants. at [github.com/ethpandaops/ethereum-package/main.star:83:57]: run at [github.com/ethpandaops/ethereum-package/src/package_io/input_parser.star:377:17]: input_parser at [0:0]: fail ``` Co-Authored-By: Eitan Seri-Levi Co-Authored-By: Pawan Dhananjay --- scripts/tests/genesis-sync-config-electra.yaml | 3 +-- scripts/tests/genesis-sync-config-fulu.yaml | 3 +-- scripts/tests/network_params.yaml | 3 ++- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/scripts/tests/genesis-sync-config-electra.yaml b/scripts/tests/genesis-sync-config-electra.yaml index 153f754c94a..1d1ed4d3152 100644 --- a/scripts/tests/genesis-sync-config-electra.yaml +++ b/scripts/tests/genesis-sync-config-electra.yaml @@ -6,15 +6,14 @@ participants: # nodes without validators, used for testing sync. - cl_type: lighthouse cl_image: lighthouse:local - supernode: true # no supernode in Electra, this is for future proof validator_count: 0 - cl_type: lighthouse cl_image: lighthouse:local - supernode: false validator_count: 0 network_params: seconds_per_slot: 6 electra_fork_epoch: 0 + fulu_fork_epoch: 100000 # a really big number so this test stays in electra preset: "minimal" additional_services: - tx_fuzz diff --git a/scripts/tests/genesis-sync-config-fulu.yaml b/scripts/tests/genesis-sync-config-fulu.yaml index 98dc8751d62..6d2c2647a90 100644 --- a/scripts/tests/genesis-sync-config-fulu.yaml +++ b/scripts/tests/genesis-sync-config-fulu.yaml @@ -21,8 +21,7 @@ participants: validator_count: 0 network_params: seconds_per_slot: 6 - electra_fork_epoch: 0 - fulu_fork_epoch: 1 + fulu_fork_epoch: 0 preset: "minimal" additional_services: - tx_fuzz diff --git a/scripts/tests/network_params.yaml b/scripts/tests/network_params.yaml index 0fda1aa34ba..35916ac1e4e 100644 --- a/scripts/tests/network_params.yaml +++ b/scripts/tests/network_params.yaml @@ -6,9 +6,10 @@ participants: cl_image: lighthouse:local cl_extra_params: - --target-peers=3 + supernode: true count: 4 network_params: - electra_fork_epoch: 0 + fulu_fork_epoch: 0 seconds_per_slot: 3 num_validator_keys_per_node: 20 global_log_level: debug From 5abbdb660af28c582d98635725347c28464523b0 Mon Sep 17 00:00:00 2001 From: chonghe <44791194+chong-he@users.noreply.github.com> Date: Thu, 11 Dec 2025 14:56:53 +0800 Subject: [PATCH 61/74] Do not request attestation data when attestation duty is empty (#8559) Co-Authored-By: Tan Chee Keong --- .../validator_services/src/attestation_service.rs | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/validator_client/validator_services/src/attestation_service.rs b/validator_client/validator_services/src/attestation_service.rs index b2b8bc81e22..587d4668b8a 100644 --- a/validator_client/validator_services/src/attestation_service.rs +++ b/validator_client/validator_services/src/attestation_service.rs @@ -193,6 +193,12 @@ impl AttestationService = self.duties_service.attesters(slot).into_iter().collect(); + + // Return early if there is no attestation duties + if attestation_duties.is_empty() { + return Ok(()); + } + let attestation_service = self.clone(); let attestation_data_handle = self @@ -371,10 +377,6 @@ impl AttestationService Date: Fri, 12 Dec 2025 05:45:38 -0300 Subject: [PATCH 62/74] Rust 1.92 lints (#8567) Co-Authored-By: Eitan Seri-Levi --- beacon_node/beacon_chain/src/test_utils.rs | 1 - beacon_node/beacon_chain/tests/store_tests.rs | 2 -- beacon_node/execution_layer/src/engine_api.rs | 2 +- consensus/types/src/fork/fork_context.rs | 3 +-- lcli/src/http_sync.rs | 1 - 5 files changed, 2 insertions(+), 7 deletions(-) diff --git a/beacon_node/beacon_chain/src/test_utils.rs b/beacon_node/beacon_chain/src/test_utils.rs index 3651512b85a..54fa6b5ff09 100644 --- a/beacon_node/beacon_chain/src/test_utils.rs +++ b/beacon_node/beacon_chain/src/test_utils.rs @@ -2923,7 +2923,6 @@ where let chain_dump = self.chain.chain_dump().unwrap(); chain_dump .iter() - .cloned() .map(|checkpoint| checkpoint.beacon_state.finalized_checkpoint().root) .filter(|block_hash| *block_hash != Hash256::zero()) .map(|hash| hash.into()) diff --git a/beacon_node/beacon_chain/tests/store_tests.rs b/beacon_node/beacon_chain/tests/store_tests.rs index c1c53c014c1..8de96adb2d4 100644 --- a/beacon_node/beacon_chain/tests/store_tests.rs +++ b/beacon_node/beacon_chain/tests/store_tests.rs @@ -5528,7 +5528,6 @@ fn get_finalized_epoch_boundary_blocks( dump: &[BeaconSnapshot>], ) -> HashSet { dump.iter() - .cloned() .map(|checkpoint| checkpoint.beacon_state.finalized_checkpoint().root.into()) .collect() } @@ -5537,7 +5536,6 @@ fn get_blocks( dump: &[BeaconSnapshot>], ) -> HashSet { dump.iter() - .cloned() .map(|checkpoint| checkpoint.beacon_block_root.into()) .collect() } diff --git a/beacon_node/execution_layer/src/engine_api.rs b/beacon_node/execution_layer/src/engine_api.rs index f285640b21c..b0cc4dd8241 100644 --- a/beacon_node/execution_layer/src/engine_api.rs +++ b/beacon_node/execution_layer/src/engine_api.rs @@ -770,7 +770,7 @@ impl ClientVersionV1 { self.commit .0 .get(..4) - .map_or_else(|| self.commit.0.as_str(), |s| s) + .unwrap_or(self.commit.0.as_str()) .to_lowercase(), lighthouse_commit_prefix .0 diff --git a/consensus/types/src/fork/fork_context.rs b/consensus/types/src/fork/fork_context.rs index aec72761241..89f69bcbb62 100644 --- a/consensus/types/src/fork/fork_context.rs +++ b/consensus/types/src/fork/fork_context.rs @@ -63,8 +63,7 @@ impl ForkContext { let current_epoch = current_slot.epoch(E::slots_per_epoch()); let current_fork = epoch_to_forks .values() - .filter(|&fork| fork.fork_epoch <= current_epoch) - .next_back() + .rfind(|&fork| fork.fork_epoch <= current_epoch) .cloned() .expect("should match at least genesis epoch"); diff --git a/lcli/src/http_sync.rs b/lcli/src/http_sync.rs index dd941cda74e..6a0eb2a0e1d 100644 --- a/lcli/src/http_sync.rs +++ b/lcli/src/http_sync.rs @@ -132,7 +132,6 @@ async fn get_block_from_source( let (kzg_proofs, blobs): (Vec<_>, Vec<_>) = blobs_from_source .iter() - .cloned() .map(|sidecar| (sidecar.kzg_proof, sidecar.blob.clone())) .unzip(); From cd0b1ef6485d05c8920d308d8c2470f0f5f3fceb Mon Sep 17 00:00:00 2001 From: gustavo Date: Mon, 15 Dec 2025 01:47:04 +0000 Subject: [PATCH 63/74] fix(bls): fix is_infinity when aggregating onto empty AggregateSignature (#8496) Co-Authored-By: figtracer <1gusredo@gmail.com> Co-Authored-By: Michael Sproul --- crypto/bls/src/generic_aggregate_signature.rs | 16 ++++++++++------ crypto/bls/tests/tests.rs | 11 +++++++++++ 2 files changed, 21 insertions(+), 6 deletions(-) diff --git a/crypto/bls/src/generic_aggregate_signature.rs b/crypto/bls/src/generic_aggregate_signature.rs index 98a634ee11f..35674394a0d 100644 --- a/crypto/bls/src/generic_aggregate_signature.rs +++ b/crypto/bls/src/generic_aggregate_signature.rs @@ -124,13 +124,15 @@ where /// Aggregates a signature onto `self`. pub fn add_assign(&mut self, other: &GenericSignature) { if let Some(other_point) = other.point() { - self.is_infinity = self.is_infinity && other.is_infinity; if let Some(self_point) = &mut self.point { - self_point.add_assign(other_point) + self_point.add_assign(other_point); + self.is_infinity = self.is_infinity && other.is_infinity; } else { let mut self_point = AggSig::infinity(); self_point.add_assign(other_point); - self.point = Some(self_point) + self.point = Some(self_point); + // the result is infinity, if `other` is + self.is_infinity = other.is_infinity; } } } @@ -138,13 +140,15 @@ where /// Aggregates an aggregate signature onto `self`. pub fn add_assign_aggregate(&mut self, other: &Self) { if let Some(other_point) = other.point() { - self.is_infinity = self.is_infinity && other.is_infinity; if let Some(self_point) = &mut self.point { - self_point.add_assign_aggregate(other_point) + self_point.add_assign_aggregate(other_point); + self.is_infinity = self.is_infinity && other.is_infinity; } else { let mut self_point = AggSig::infinity(); self_point.add_assign_aggregate(other_point); - self.point = Some(self_point) + self.point = Some(self_point); + // the result is infinity, if `other` is + self.is_infinity = other.is_infinity; } } } diff --git a/crypto/bls/tests/tests.rs b/crypto/bls/tests/tests.rs index 00f82bfcecd..1827ea99210 100644 --- a/crypto/bls/tests/tests.rs +++ b/crypto/bls/tests/tests.rs @@ -356,6 +356,17 @@ macro_rules! test_suite { .assert_single_message_verify(true) } + #[test] + fn empty_aggregate_plus_infinity_should_be_infinity() { + let mut agg = AggregateSignature::empty(); + let infinity_sig = Signature::deserialize(&INFINITY_SIGNATURE).unwrap(); + agg.add_assign(&infinity_sig); + assert!( + agg.is_infinity(), + "is_infinity flag should be true after adding infinity to empty" + ); + } + #[test] fn deserialize_infinity_public_key() { PublicKey::deserialize(&bls::INFINITY_PUBLIC_KEY).unwrap_err(); From 49e1112da2af379ed5845249de0a9008a8a5be38 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20David=20Ram=C3=ADrez=20Chiquillo?= Date: Sun, 14 Dec 2025 21:33:29 -0500 Subject: [PATCH 64/74] Add regression test for unaligned checkpoint sync with payload pruning (#8458) Closes #8426 Added a new regression test: `reproduction_unaligned_checkpoint_sync_pruned_payload`. This test reproduces the bug where unaligned checkpoint syncs (skipped slots at epoch boundaries) fail to import the anchor block's execution payload when `prune_payloads` is enabled. The test simulates the failure mode by: - Skipping if execution payloads are not applicable. - Creating a harness with an unaligned checkpoint (gap of 3 slots). - Configuring the client with prune_payloads = true. It asserts that the Beacon Chain builds successfully (previously it panicked with `MissingFullBlockExecutionPayloadPruned`), confirming the fix logic in `try_get_full_block`. Co-Authored-By: Andrurachi Co-Authored-By: Michael Sproul --- beacon_node/beacon_chain/tests/store_tests.rs | 152 ++++++++++++++++++ 1 file changed, 152 insertions(+) diff --git a/beacon_node/beacon_chain/tests/store_tests.rs b/beacon_node/beacon_chain/tests/store_tests.rs index 8de96adb2d4..cc49f871843 100644 --- a/beacon_node/beacon_chain/tests/store_tests.rs +++ b/beacon_node/beacon_chain/tests/store_tests.rs @@ -2782,6 +2782,158 @@ async fn weak_subjectivity_sync_without_blobs() { weak_subjectivity_sync_test(slots, checkpoint_slot, None, false).await } +// Ensures that an unaligned checkpoint sync (the block is older than the state) +// works correctly even when `prune_payloads` is enabled. +// +// Previously, the `HotColdDB` would refuse to load the execution payload for the +// anchor block because it was considered "pruned", causing the node to fail startup. +#[tokio::test] +async fn reproduction_unaligned_checkpoint_sync_pruned_payload() { + let spec = test_spec::(); + + // Requires Execution Payloads. + let Some(_) = spec.deneb_fork_epoch else { + return; + }; + + // Create an unaligned checkpoint with a gap of 3 slots. + let num_initial_slots = E::slots_per_epoch() * 11; + let checkpoint_slot = Slot::new(E::slots_per_epoch() * 9 - 3); + + let slots = (1..num_initial_slots) + .map(Slot::new) + .filter(|&slot| slot <= checkpoint_slot || slot > checkpoint_slot + 3) + .collect::>(); + + let temp1 = tempdir().unwrap(); + let full_store = get_store_generic(&temp1, StoreConfig::default(), spec.clone()); + + let harness = get_harness_import_all_data_columns(full_store.clone(), LOW_VALIDATOR_COUNT); + let all_validators = (0..LOW_VALIDATOR_COUNT).collect::>(); + + let (genesis_state, genesis_state_root) = harness.get_current_state_and_root(); + harness + .add_attested_blocks_at_slots( + genesis_state.clone(), + genesis_state_root, + &slots, + &all_validators, + ) + .await; + + // Extract snapshot data from the harness. + let wss_block_root = harness + .chain + .block_root_at_slot(checkpoint_slot, WhenSlotSkipped::Prev) + .unwrap() + .unwrap(); + let wss_state_root = harness + .chain + .state_root_at_slot(checkpoint_slot) + .unwrap() + .unwrap(); + + let wss_block = harness + .chain + .store + .get_full_block(&wss_block_root) + .unwrap() + .unwrap(); + + // The test premise requires the anchor block to have a payload. + assert!(wss_block.message().execution_payload().is_ok()); + + let wss_blobs_opt = harness + .chain + .get_or_reconstruct_blobs(&wss_block_root) + .unwrap(); + + let wss_state = full_store + .get_state(&wss_state_root, Some(checkpoint_slot), CACHE_STATE_IN_TESTS) + .unwrap() + .unwrap(); + + // Configure the client with `prune_payloads = true`. + // This triggers the path where `try_get_full_block` must explicitly handle the anchor block. + let temp2 = tempdir().unwrap(); + let store_config = StoreConfig { + prune_payloads: true, + ..StoreConfig::default() + }; + + let store = get_store_generic(&temp2, store_config, spec.clone()); + + let slot_clock = TestingSlotClock::new( + Slot::new(0), + Duration::from_secs(harness.chain.genesis_time), + Duration::from_secs(spec.seconds_per_slot), + ); + slot_clock.set_slot(harness.get_current_slot().as_u64()); + + let chain_config = ChainConfig { + reconstruct_historic_states: true, + ..ChainConfig::default() + }; + + let trusted_setup = get_kzg(&spec); + let (shutdown_tx, _shutdown_rx) = futures::channel::mpsc::channel(1); + let mock = mock_execution_layer_from_parts( + harness.spec.clone(), + harness.runtime.task_executor.clone(), + ); + let all_custody_columns = (0..spec.number_of_custody_groups).collect::>(); + + // Attempt to build the BeaconChain. + // If the bug is present, this will panic with `MissingFullBlockExecutionPayloadPruned`. + let beacon_chain = BeaconChainBuilder::>::new(MinimalEthSpec, trusted_setup) + .chain_config(chain_config) + .store(store.clone()) + .custom_spec(spec.clone().into()) + .task_executor(harness.chain.task_executor.clone()) + .weak_subjectivity_state( + wss_state, + wss_block.clone(), + wss_blobs_opt.clone(), + genesis_state, + ) + .unwrap() + .store_migrator_config(MigratorConfig::default().blocking()) + .slot_clock(slot_clock) + .shutdown_sender(shutdown_tx) + .event_handler(Some(ServerSentEventHandler::new_with_capacity(1))) + .execution_layer(Some(mock.el)) + .ordered_custody_column_indices(all_custody_columns) + .rng(Box::new(StdRng::seed_from_u64(42))) + .build(); + + assert!( + beacon_chain.is_ok(), + "Beacon Chain failed to build. The anchor payload may have been incorrectly pruned. Error: {:?}", + beacon_chain.err() + ); + + let chain = beacon_chain.as_ref().unwrap(); + let wss_block_slot = wss_block.slot(); + + assert_ne!( + wss_block_slot, + chain.head_snapshot().beacon_state.slot(), + "Test invalid: Checkpoint was aligned (Slot {} == Slot {}). The test did not trigger the unaligned edge case.", + wss_block_slot, + chain.head_snapshot().beacon_state.slot() + ); + + let payload_exists = chain + .store + .execution_payload_exists(&wss_block_root) + .unwrap_or(false); + + assert!( + payload_exists, + "Split block payload must exist in the new node's store after checkpoint sync" + ); +} + async fn weak_subjectivity_sync_test( slots: Vec, checkpoint_slot: Slot, From 6a3a32515f1a96b7cba543add2c75a021881d87f Mon Sep 17 00:00:00 2001 From: Mac L Date: Mon, 15 Dec 2025 07:20:10 +0400 Subject: [PATCH 65/74] Update `strum` to `0.27` (#8564) #8547 Update our `strum` dependency to `0.27`. This unifies our strum dependencies and removes our duplication of `strum` (and by extension, `strum_macros`). Co-Authored-By: Mac L Co-Authored-By: Michael Sproul --- Cargo.lock | 64 ++++++------------- Cargo.toml | 2 +- beacon_node/store/src/config.rs | 4 +- database_manager/src/lib.rs | 4 +- slasher/src/config.rs | 4 +- .../beacon_node_fallback/src/lib.rs | 4 +- 6 files changed, 27 insertions(+), 55 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 413596beeb1..7383d4e1fe0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -138,7 +138,7 @@ checksum = "4bc32535569185cbcb6ad5fa64d989a47bccb9a08e27284b1f2a3ccf16e6d010" dependencies = [ "alloy-primitives", "num_enum", - "strum 0.27.2", + "strum", ] [[package]] @@ -533,7 +533,7 @@ checksum = "6d792e205ed3b72f795a8044c52877d2e6b6e9b1d13f431478121d8d4eaa9028" dependencies = [ "alloy-sol-macro-input", "const-hex", - "heck 0.5.0", + "heck", "indexmap 2.12.0", "proc-macro-error2", "proc-macro2", @@ -551,7 +551,7 @@ checksum = "0bd1247a8f90b465ef3f1207627547ec16940c35597875cdc09c49d58b19693c" dependencies = [ "const-hex", "dunce", - "heck 0.5.0", + "heck", "macro-string", "proc-macro2", "quote", @@ -1273,7 +1273,7 @@ dependencies = [ "ssz_types", "state_processing", "store", - "strum 0.24.1", + "strum", "superstruct", "task_executor", "tempfile", @@ -1314,7 +1314,7 @@ dependencies = [ "serde_json", "slasher", "store", - "strum 0.24.1", + "strum", "task_executor", "tracing", "types", @@ -1332,7 +1332,7 @@ dependencies = [ "sensitive_url", "serde", "slot_clock", - "strum 0.24.1", + "strum", "task_executor", "tokio", "tracing", @@ -1355,7 +1355,7 @@ dependencies = [ "parking_lot", "serde", "slot_clock", - "strum 0.24.1", + "strum", "task_executor", "tokio", "tokio-util", @@ -1868,7 +1868,7 @@ version = "4.5.49" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2a0b5487afeab2deb2ff4e03a807ad1a03ac532ff5a2cee5d86884440c7f7671" dependencies = [ - "heck 0.5.0", + "heck", "proc-macro2", "quote", "syn 2.0.110", @@ -2524,7 +2524,7 @@ dependencies = [ "hex", "serde", "store", - "strum 0.24.1", + "strum", "tracing", "types", ] @@ -3071,7 +3071,7 @@ version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1e6a265c649f3f5979b601d26f1d05ada116434c87741c9493cb56218f76cbc" dependencies = [ - "heck 0.5.0", + "heck", "proc-macro2", "quote", "syn 2.0.110", @@ -3427,7 +3427,7 @@ dependencies = [ "slot_clock", "ssz_types", "state_processing", - "strum 0.24.1", + "strum", "superstruct", "task_executor", "tempfile", @@ -4057,12 +4057,6 @@ dependencies = [ "psutil", ] -[[package]] -name = "heck" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" - [[package]] name = "heck" version = "0.5.0" @@ -5355,7 +5349,7 @@ version = "0.35.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd297cf53f0cb3dee4d2620bb319ae47ef27c702684309f682bdb7e55a18ae9c" dependencies = [ - "heck 0.5.0", + "heck", "quote", "syn 2.0.110", ] @@ -5550,7 +5544,7 @@ dependencies = [ "smallvec", "snap", "ssz_types", - "strum 0.24.1", + "strum", "superstruct", "task_executor", "tempfile", @@ -6268,7 +6262,7 @@ dependencies = [ "smallvec", "ssz_types", "store", - "strum 0.24.1", + "strum", "task_executor", "tokio", "tokio-stream", @@ -8457,7 +8451,7 @@ dependencies = [ "safe_arith", "serde", "ssz_types", - "strum 0.24.1", + "strum", "tempfile", "tracing", "tree_hash", @@ -8683,7 +8677,7 @@ dependencies = [ "smallvec", "ssz_types", "state_processing", - "strum 0.24.1", + "strum", "superstruct", "tempfile", "tracing", @@ -8706,35 +8700,13 @@ version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" -[[package]] -name = "strum" -version = "0.24.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" -dependencies = [ - "strum_macros 0.24.3", -] - [[package]] name = "strum" version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf" dependencies = [ - "strum_macros 0.27.2", -] - -[[package]] -name = "strum_macros" -version = "0.24.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" -dependencies = [ - "heck 0.4.1", - "proc-macro2", - "quote", - "rustversion", - "syn 1.0.109", + "strum_macros", ] [[package]] @@ -8743,7 +8715,7 @@ version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7" dependencies = [ - "heck 0.5.0", + "heck", "proc-macro2", "quote", "syn 2.0.110", diff --git a/Cargo.toml b/Cargo.toml index aea8fd1b8d7..c2094a46ba1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -235,7 +235,7 @@ snap = "1" ssz_types = { version = "0.14.0", features = ["context_deserialize", "runtime_types"] } state_processing = { path = "consensus/state_processing" } store = { path = "beacon_node/store" } -strum = { version = "0.24", features = ["derive"] } +strum = { version = "0.27", features = ["derive"] } superstruct = "0.10" swap_or_not_shuffle = { path = "consensus/swap_or_not_shuffle" } syn = "1" diff --git a/beacon_node/store/src/config.rs b/beacon_node/store/src/config.rs index 05aa016ec10..0aa00e659bc 100644 --- a/beacon_node/store/src/config.rs +++ b/beacon_node/store/src/config.rs @@ -5,7 +5,7 @@ use ssz::{Decode, Encode}; use ssz_derive::{Decode, Encode}; use std::io::{Read, Write}; use std::num::NonZeroUsize; -use strum::{Display, EnumString, EnumVariantNames}; +use strum::{Display, EnumString, VariantNames}; use superstruct::superstruct; use types::EthSpec; use types::non_zero_usize::new_non_zero_usize; @@ -267,7 +267,7 @@ mod test { } #[derive( - Debug, Clone, Copy, Eq, PartialEq, Serialize, Deserialize, Display, EnumString, EnumVariantNames, + Debug, Clone, Copy, Eq, PartialEq, Serialize, Deserialize, Display, EnumString, VariantNames, )] #[strum(serialize_all = "lowercase")] pub enum DatabaseBackend { diff --git a/database_manager/src/lib.rs b/database_manager/src/lib.rs index 6bb7531493d..608400fa7ed 100644 --- a/database_manager/src/lib.rs +++ b/database_manager/src/lib.rs @@ -21,7 +21,7 @@ use store::{ errors::Error, metadata::{CURRENT_SCHEMA_VERSION, SchemaVersion}, }; -use strum::{EnumString, EnumVariantNames}; +use strum::{EnumString, VariantNames}; use tracing::{info, warn}; use types::{BeaconState, EthSpec, Slot}; @@ -80,7 +80,7 @@ pub fn display_db_version( } #[derive( - Debug, PartialEq, Eq, Clone, EnumString, Deserialize, Serialize, EnumVariantNames, ValueEnum, + Debug, PartialEq, Eq, Clone, EnumString, Deserialize, Serialize, VariantNames, ValueEnum, )] pub enum InspectTarget { #[strum(serialize = "sizes")] diff --git a/slasher/src/config.rs b/slasher/src/config.rs index a8194bed499..144016efd24 100644 --- a/slasher/src/config.rs +++ b/slasher/src/config.rs @@ -2,7 +2,7 @@ use crate::Error; use serde::{Deserialize, Serialize}; use std::num::NonZeroUsize; use std::path::PathBuf; -use strum::{Display, EnumString, EnumVariantNames}; +use strum::{Display, EnumString, VariantNames}; use types::non_zero_usize::new_non_zero_usize; use types::{Epoch, EthSpec, IndexedAttestation}; @@ -59,7 +59,7 @@ pub struct DiskConfig { } #[derive( - Debug, Clone, Copy, PartialEq, Serialize, Deserialize, Display, EnumString, EnumVariantNames, + Debug, Clone, Copy, PartialEq, Serialize, Deserialize, Display, EnumString, VariantNames, )] #[strum(serialize_all = "lowercase")] pub enum DatabaseBackend { diff --git a/validator_client/beacon_node_fallback/src/lib.rs b/validator_client/beacon_node_fallback/src/lib.rs index 6abcd44cc94..2d75df2fa34 100644 --- a/validator_client/beacon_node_fallback/src/lib.rs +++ b/validator_client/beacon_node_fallback/src/lib.rs @@ -20,7 +20,7 @@ use std::future::Future; use std::sync::Arc; use std::time::{Duration, Instant}; use std::vec::Vec; -use strum::EnumVariantNames; +use strum::VariantNames; use task_executor::TaskExecutor; use tokio::{sync::RwLock, time::sleep}; use tracing::{debug, error, warn}; @@ -752,7 +752,7 @@ async fn sort_nodes_by_health(nodes: &mut Vec) { } /// Serves as a cue for `BeaconNodeFallback` to tell which requests need to be broadcasted. -#[derive(Clone, Copy, Debug, PartialEq, Deserialize, Serialize, EnumVariantNames, ValueEnum)] +#[derive(Clone, Copy, Debug, PartialEq, Deserialize, Serialize, VariantNames, ValueEnum)] #[strum(serialize_all = "kebab-case")] pub enum ApiTopic { None, From 32f7615cc897b9b15c7587c45ffbc1174d8b3ec9 Mon Sep 17 00:00:00 2001 From: Mac L Date: Mon, 15 Dec 2025 07:20:12 +0400 Subject: [PATCH 66/74] Update `syn` to `2.0.110` (#8563) #8547 We are currently using an older version of `syn` in `test_random_derive`. Updating this removes one of the sources of `syn` `1.0.109` in our dependency tree. Co-Authored-By: Mac L Co-Authored-By: Michael Sproul --- Cargo.lock | 2 +- Cargo.toml | 2 +- common/test_random_derive/src/lib.rs | 3 ++- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7383d4e1fe0..6ed7bfd0b60 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8920,7 +8920,7 @@ name = "test_random_derive" version = "0.2.0" dependencies = [ "quote", - "syn 1.0.109", + "syn 2.0.110", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index c2094a46ba1..d5d1687c764 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -238,7 +238,7 @@ store = { path = "beacon_node/store" } strum = { version = "0.27", features = ["derive"] } superstruct = "0.10" swap_or_not_shuffle = { path = "consensus/swap_or_not_shuffle" } -syn = "1" +syn = "2" sysinfo = "0.26" system_health = { path = "common/system_health" } task_executor = { path = "common/task_executor" } diff --git a/common/test_random_derive/src/lib.rs b/common/test_random_derive/src/lib.rs index 3017936f1a1..bf57d79aaa8 100644 --- a/common/test_random_derive/src/lib.rs +++ b/common/test_random_derive/src/lib.rs @@ -8,7 +8,8 @@ use syn::{DeriveInput, parse_macro_input}; /// The field attribute is: `#[test_random(default)]` fn should_use_default(field: &syn::Field) -> bool { field.attrs.iter().any(|attr| { - attr.path.is_ident("test_random") && attr.tokens.to_string().replace(' ', "") == "(default)" + attr.path().is_ident("test_random") + && matches!(&attr.meta, syn::Meta::List(list) if list.tokens.to_string().replace(' ', "") == "default") }) } From ac8d77369d52e6fd9ffcb4bfa0e8ae11dd65d7ed Mon Sep 17 00:00:00 2001 From: Akihito Nakano Date: Mon, 15 Dec 2025 14:08:21 +0900 Subject: [PATCH 67/74] Fix Makefile to avoid git describe error in CI (#8513) Fixes the error `fatal: No names found, cannot describe anything.` that occurs when running `make` commands in CI (GitHub Actions). https://github.com/sigp/lighthouse/actions/runs/19839541042/job/56844781126#step:5:13 > fatal: No names found, cannot describe anything. Changed the `GIT_TAG` variable assignment in the Makefile from immediate evaluation to lazy evaluation: ```diff - GIT_TAG := $(shell git describe --tags --candidates 1) + GIT_TAG = $(shell git describe --tags --candidates 1) ``` This change ensures that git describe is only executed when `GIT_TAG` is actually used (in the `build-release-tarballs` target), rather than on every Makefile invocation. Co-Authored-By: ackintosh --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index a6891b682f7..c14f1d712a7 100644 --- a/Makefile +++ b/Makefile @@ -3,7 +3,7 @@ EF_TESTS = "testing/ef_tests" STATE_TRANSITION_VECTORS = "testing/state_transition_vectors" EXECUTION_ENGINE_INTEGRATION = "testing/execution_engine_integration" -GIT_TAG := $(shell git describe --tags --candidates 1) +GIT_TAG = $(shell git describe --tags --candidates 1) BIN_DIR = "bin" X86_64_TAG = "x86_64-unknown-linux-gnu" From afa6457acf2a38c90b89eb0434135e1cf3646f77 Mon Sep 17 00:00:00 2001 From: 0xMushow <105550256+0xMushow@users.noreply.github.com> Date: Mon, 15 Dec 2025 06:14:15 +0100 Subject: [PATCH 68/74] fix visual bug on visualize_batch_state leading to a non-wanted comma (#8499) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Which issue # does this PR address? None The `visualize_batch_state`  functions uses the following loop `for mut batch_index in 0..BATCH_BUFFER_SIZE`, making it from `0` to `BATCH_BUFFER_SIZE - 1` (behind the scenes). Hence we would never hit the following condition: ```rust if batch_index != BATCH_BUFFER_SIZE { visualization_string.push(','); } ``` Replacing `!=` with `<` & `BATCH_BUFFER_SIZE -1` allows for the following change: `[A,B,C,D,E,]` to become: `[A,B,C,D,E]` Co-Authored-By: Antoine James --- beacon_node/network/src/sync/range_sync/chain.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/beacon_node/network/src/sync/range_sync/chain.rs b/beacon_node/network/src/sync/range_sync/chain.rs index 014d728ffe4..4ce10e23ca1 100644 --- a/beacon_node/network/src/sync/range_sync/chain.rs +++ b/beacon_node/network/src/sync/range_sync/chain.rs @@ -1331,7 +1331,7 @@ impl SyncingChain { .get(&(self.processing_target + batch_index as u64 * EPOCHS_PER_BATCH)) { visualization_string.push(batch.visualize()); - if batch_index != BATCH_BUFFER_SIZE { + if batch_index < BATCH_BUFFER_SIZE - 1 { // Add a comma in between elements visualization_string.push(','); } From 86c2b7cfbe28e0ad32fdfd559a7cb04f787af0bd Mon Sep 17 00:00:00 2001 From: chonghe <44791194+chong-he@users.noreply.github.com> Date: Tue, 16 Dec 2025 11:19:28 +0800 Subject: [PATCH 69/74] Append client version info to graffiti (#7558) * #7201 Co-Authored-By: Tan Chee Keong Co-Authored-By: chonghe <44791194+chong-he@users.noreply.github.com> Co-Authored-By: Jimmy Chen Co-Authored-By: Tan Chee Keong --- beacon_node/beacon_chain/src/beacon_chain.rs | 10 +- .../beacon_chain/src/graffiti_calculator.rs | 152 +++++++++++++++++- beacon_node/beacon_chain/src/test_utils.rs | 15 +- beacon_node/execution_layer/src/engine_api.rs | 55 ++++++- beacon_node/http_api/src/produce_block.rs | 12 +- .../http_api/tests/interactive_tests.rs | 2 +- beacon_node/http_api/tests/tests.rs | 128 ++++++++++++--- book/src/help_vc.md | 4 + common/eth2/src/lib.rs | 17 ++ common/eth2/src/types.rs | 8 + validator_client/src/cli.rs | 10 ++ validator_client/src/config.rs | 11 +- validator_client/src/lib.rs | 3 +- .../validator_services/src/block_service.rs | 12 ++ 14 files changed, 392 insertions(+), 47 deletions(-) diff --git a/beacon_node/beacon_chain/src/beacon_chain.rs b/beacon_node/beacon_chain/src/beacon_chain.rs index 25b2aa30cb3..2107f06e1e3 100644 --- a/beacon_node/beacon_chain/src/beacon_chain.rs +++ b/beacon_node/beacon_chain/src/beacon_chain.rs @@ -33,7 +33,7 @@ use crate::events::ServerSentEventHandler; use crate::execution_payload::{NotifyExecutionLayer, PreparePayloadHandle, get_execution_payload}; use crate::fetch_blobs::EngineGetBlobsOutput; use crate::fork_choice_signal::{ForkChoiceSignalRx, ForkChoiceSignalTx, ForkChoiceWaitResult}; -use crate::graffiti_calculator::GraffitiCalculator; +use crate::graffiti_calculator::{GraffitiCalculator, GraffitiSettings}; use crate::kzg_utils::reconstruct_blobs; use crate::light_client_finality_update_verification::{ Error as LightClientFinalityUpdateError, VerifiedLightClientFinalityUpdate, @@ -4493,7 +4493,7 @@ impl BeaconChain { self: &Arc, randao_reveal: Signature, slot: Slot, - validator_graffiti: Option, + graffiti_settings: GraffitiSettings, verification: ProduceBlockVerification, builder_boost_factor: Option, block_production_version: BlockProductionVersion, @@ -4527,7 +4527,7 @@ impl BeaconChain { state_root_opt, slot, randao_reveal, - validator_graffiti, + graffiti_settings, verification, builder_boost_factor, block_production_version, @@ -5060,7 +5060,7 @@ impl BeaconChain { state_root_opt: Option, produce_at_slot: Slot, randao_reveal: Signature, - validator_graffiti: Option, + graffiti_settings: GraffitiSettings, verification: ProduceBlockVerification, builder_boost_factor: Option, block_production_version: BlockProductionVersion, @@ -5071,7 +5071,7 @@ impl BeaconChain { let chain = self.clone(); let graffiti = self .graffiti_calculator - .get_graffiti(validator_graffiti) + .get_graffiti(graffiti_settings) .await; let span = Span::current(); let mut partial_beacon_block = self diff --git a/beacon_node/beacon_chain/src/graffiti_calculator.rs b/beacon_node/beacon_chain/src/graffiti_calculator.rs index 56808e0e67e..85470715c9f 100644 --- a/beacon_node/beacon_chain/src/graffiti_calculator.rs +++ b/beacon_node/beacon_chain/src/graffiti_calculator.rs @@ -1,5 +1,6 @@ use crate::BeaconChain; use crate::BeaconChainTypes; +use eth2::types::GraffitiPolicy; use execution_layer::{CommitPrefix, ExecutionLayer, http::ENGINE_GET_CLIENT_VERSION_V1}; use logging::crit; use serde::{Deserialize, Serialize}; @@ -48,6 +49,25 @@ impl Debug for GraffitiOrigin { } } +pub enum GraffitiSettings { + Unspecified, + Specified { + graffiti: Graffiti, + policy: GraffitiPolicy, + }, +} + +impl GraffitiSettings { + pub fn new(validator_graffiti: Option, policy: Option) -> Self { + validator_graffiti + .map(|graffiti| Self::Specified { + graffiti, + policy: policy.unwrap_or(GraffitiPolicy::PreserveUserGraffiti), + }) + .unwrap_or(Self::Unspecified) + } +} + pub struct GraffitiCalculator { pub beacon_graffiti: GraffitiOrigin, execution_layer: Option>, @@ -73,11 +93,19 @@ impl GraffitiCalculator { /// 2. Graffiti specified by the user via beacon node CLI options. /// 3. The EL & CL client version string, applicable when the EL supports version specification. /// 4. The default lighthouse version string, used if the EL lacks version specification support. - pub async fn get_graffiti(&self, validator_graffiti: Option) -> Graffiti { - if let Some(graffiti) = validator_graffiti { - return graffiti; + pub async fn get_graffiti(&self, graffiti_settings: GraffitiSettings) -> Graffiti { + match graffiti_settings { + GraffitiSettings::Specified { graffiti, policy } => match policy { + GraffitiPolicy::PreserveUserGraffiti => graffiti, + GraffitiPolicy::AppendClientVersions => { + self.calculate_combined_graffiti(Some(graffiti)).await + } + }, + GraffitiSettings::Unspecified => self.calculate_combined_graffiti(None).await, } + } + async fn calculate_combined_graffiti(&self, validator_graffiti: Option) -> Graffiti { match self.beacon_graffiti { GraffitiOrigin::UserSpecified(graffiti) => graffiti, GraffitiOrigin::Calculated(default_graffiti) => { @@ -133,7 +161,7 @@ impl GraffitiCalculator { CommitPrefix("00000000".to_string()) }); - engine_version.calculate_graffiti(lighthouse_commit_prefix) + engine_version.calculate_graffiti(lighthouse_commit_prefix, validator_graffiti) } } } @@ -224,8 +252,10 @@ async fn engine_version_cache_refresh_service( #[cfg(test)] mod tests { use crate::ChainConfig; + use crate::graffiti_calculator::GraffitiSettings; use crate::test_utils::{BeaconChainHarness, EphemeralHarnessType, test_spec}; use bls::Keypair; + use eth2::types::GraffitiPolicy; use execution_layer::EngineCapabilities; use execution_layer::test_utils::{DEFAULT_CLIENT_VERSION, DEFAULT_ENGINE_CAPABILITIES}; use std::sync::Arc; @@ -281,8 +311,12 @@ mod tests { let version_bytes = std::cmp::min(lighthouse_version::VERSION.len(), GRAFFITI_BYTES_LEN); // grab the slice of the graffiti that corresponds to the lighthouse version - let graffiti_slice = - &harness.chain.graffiti_calculator.get_graffiti(None).await.0[..version_bytes]; + let graffiti_slice = &harness + .chain + .graffiti_calculator + .get_graffiti(GraffitiSettings::Unspecified) + .await + .0[..version_bytes]; // convert graffiti bytes slice to ascii for easy debugging if this test should fail let graffiti_str = @@ -303,7 +337,12 @@ mod tests { let spec = Arc::new(test_spec::()); let harness = get_harness(VALIDATOR_COUNT, spec, None); - let found_graffiti_bytes = harness.chain.graffiti_calculator.get_graffiti(None).await.0; + let found_graffiti_bytes = harness + .chain + .graffiti_calculator + .get_graffiti(GraffitiSettings::Unspecified) + .await + .0; let mock_commit = DEFAULT_CLIENT_VERSION.commit.clone(); let expected_graffiti_string = format!( @@ -352,7 +391,10 @@ mod tests { let found_graffiti = harness .chain .graffiti_calculator - .get_graffiti(Some(Graffiti::from(graffiti_bytes))) + .get_graffiti(GraffitiSettings::new( + Some(Graffiti::from(graffiti_bytes)), + Some(GraffitiPolicy::PreserveUserGraffiti), + )) .await; assert_eq!( @@ -360,4 +402,98 @@ mod tests { "0x6e6963652067726166666974692062726f000000000000000000000000000000" ); } + + #[tokio::test] + async fn check_append_el_version_graffiti_various_length() { + let spec = Arc::new(test_spec::()); + let harness = get_harness(VALIDATOR_COUNT, spec, None); + + let graffiti_vec = vec![ + // less than 20 characters, example below is 19 characters + "This is my graffiti", + // 20-23 characters, example below is 22 characters + "This is my graffiti yo", + // 24-27 characters, example below is 26 characters + "This is my graffiti string", + // 28-29 characters, example below is 29 characters + "This is my graffiti string yo", + // 30-32 characters, example below is 32 characters + "This is my graffiti string yo yo", + ]; + + for graffiti in graffiti_vec { + let mut graffiti_bytes = [0; GRAFFITI_BYTES_LEN]; + graffiti_bytes[..graffiti.len()].copy_from_slice(graffiti.as_bytes()); + + // To test appending client version info with user specified graffiti + let policy = GraffitiPolicy::AppendClientVersions; + let found_graffiti_bytes = harness + .chain + .graffiti_calculator + .get_graffiti(GraffitiSettings::Specified { + graffiti: Graffiti::from(graffiti_bytes), + policy, + }) + .await + .0; + + let mock_commit = DEFAULT_CLIENT_VERSION.commit.clone(); + + let graffiti_length = graffiti.len(); + let append_graffiti_string = match graffiti_length { + 0..=19 => format!( + "{}{}{}{}", + DEFAULT_CLIENT_VERSION.code, + mock_commit + .strip_prefix("0x") + .unwrap_or("&mock_commit") + .get(0..4) + .expect("should get first 2 bytes in hex"), + "LH", + lighthouse_version::COMMIT_PREFIX + .get(0..4) + .expect("should get first 2 bytes in hex") + ), + 20..=23 => format!( + "{}{}{}{}", + DEFAULT_CLIENT_VERSION.code, + mock_commit + .strip_prefix("0x") + .unwrap_or("&mock_commit") + .get(0..2) + .expect("should get first 2 bytes in hex"), + "LH", + lighthouse_version::COMMIT_PREFIX + .get(0..2) + .expect("should get first 2 bytes in hex") + ), + 24..=27 => format!("{}{}", DEFAULT_CLIENT_VERSION.code, "LH",), + 28..=29 => DEFAULT_CLIENT_VERSION.code.to_string(), + // when user graffiti length is 30-32 characters, append nothing + 30..=32 => String::new(), + _ => panic!( + "graffiti length should be less than or equal to GRAFFITI_BYTES_LEN (32 characters)" + ), + }; + + let expected_graffiti_string = if append_graffiti_string.is_empty() { + // for the case of empty append_graffiti_string, i.e., user-specified graffiti is 30-32 characters + graffiti.to_string() + } else { + // There is a space between the client version info and user graffiti + // as defined in calculate_graffiti function in engine_api.rs + format!("{} {}", append_graffiti_string, graffiti) + }; + + let expected_graffiti_prefix_bytes = expected_graffiti_string.as_bytes(); + let expected_graffiti_prefix_len = + std::cmp::min(expected_graffiti_prefix_bytes.len(), GRAFFITI_BYTES_LEN); + + let found_graffiti_string = + std::str::from_utf8(&found_graffiti_bytes[..expected_graffiti_prefix_len]) + .expect("bytes should convert nicely to ascii"); + + assert_eq!(expected_graffiti_string, found_graffiti_string); + } + } } diff --git a/beacon_node/beacon_chain/src/test_utils.rs b/beacon_node/beacon_chain/src/test_utils.rs index 54fa6b5ff09..500c0b22d61 100644 --- a/beacon_node/beacon_chain/src/test_utils.rs +++ b/beacon_node/beacon_chain/src/test_utils.rs @@ -2,6 +2,7 @@ use crate::blob_verification::GossipVerifiedBlob; use crate::block_verification_types::{AsBlock, RpcBlock}; use crate::custody_context::NodeCustodyType; use crate::data_column_verification::CustodyDataColumn; +use crate::graffiti_calculator::GraffitiSettings; use crate::kzg_utils::build_data_column_sidecars; use crate::observed_operations::ObservationOutcome; pub use crate::persisted_beacon_chain::PersistedBeaconChain; @@ -23,7 +24,7 @@ use bls::get_withdrawal_credentials; use bls::{ AggregateSignature, Keypair, PublicKey, PublicKeyBytes, SecretKey, Signature, SignatureBytes, }; -use eth2::types::SignedBlockContentsTuple; +use eth2::types::{GraffitiPolicy, SignedBlockContentsTuple}; use execution_layer::test_utils::generate_genesis_header; use execution_layer::{ ExecutionLayer, @@ -943,6 +944,8 @@ where // BeaconChain errors out with `DuplicateFullyImported`. Vary the graffiti so that we produce // different blocks each time. let graffiti = Graffiti::from(self.rng.lock().random::<[u8; 32]>()); + let graffiti_settings = + GraffitiSettings::new(Some(graffiti), Some(GraffitiPolicy::PreserveUserGraffiti)); let randao_reveal = self.sign_randao_reveal(&state, proposer_index, slot); @@ -956,7 +959,7 @@ where None, slot, randao_reveal, - Some(graffiti), + graffiti_settings, ProduceBlockVerification::VerifyRandao, builder_boost_factor, BlockProductionVersion::V3, @@ -1000,6 +1003,8 @@ where // BeaconChain errors out with `DuplicateFullyImported`. Vary the graffiti so that we produce // different blocks each time. let graffiti = Graffiti::from(self.rng.lock().random::<[u8; 32]>()); + let graffiti_settings = + GraffitiSettings::new(Some(graffiti), Some(GraffitiPolicy::PreserveUserGraffiti)); let randao_reveal = self.sign_randao_reveal(&state, proposer_index, slot); @@ -1010,7 +1015,7 @@ where None, slot, randao_reveal, - Some(graffiti), + graffiti_settings, ProduceBlockVerification::VerifyRandao, None, BlockProductionVersion::FullV2, @@ -1059,6 +1064,8 @@ where // BeaconChain errors out with `DuplicateFullyImported`. Vary the graffiti so that we produce // different blocks each time. let graffiti = Graffiti::from(self.rng.lock().random::<[u8; 32]>()); + let graffiti_settings = + GraffitiSettings::new(Some(graffiti), Some(GraffitiPolicy::PreserveUserGraffiti)); let randao_reveal = self.sign_randao_reveal(&state, proposer_index, slot); @@ -1071,7 +1078,7 @@ where None, slot, randao_reveal, - Some(graffiti), + graffiti_settings, ProduceBlockVerification::VerifyRandao, None, BlockProductionVersion::FullV2, diff --git a/beacon_node/execution_layer/src/engine_api.rs b/beacon_node/execution_layer/src/engine_api.rs index b0cc4dd8241..88567ac6e12 100644 --- a/beacon_node/execution_layer/src/engine_api.rs +++ b/beacon_node/execution_layer/src/engine_api.rs @@ -763,8 +763,12 @@ pub struct ClientVersionV1 { } impl ClientVersionV1 { - pub fn calculate_graffiti(&self, lighthouse_commit_prefix: CommitPrefix) -> Graffiti { - let graffiti_string = format!( + pub fn calculate_graffiti( + &self, + lighthouse_commit_prefix: CommitPrefix, + validator_graffiti: Option, + ) -> Graffiti { + let append_graffiti_full = format!( "{}{}LH{}", self.code, self.commit @@ -778,6 +782,53 @@ impl ClientVersionV1 { .unwrap_or("0000") .to_lowercase(), ); + + // Implement the special case here: + // https://hackmd.io/@wmoBhF17RAOH2NZ5bNXJVg/BJX2c9gja#SPECIAL-CASE-the-flexible-standard + let append_graffiti_one_byte = format!( + "{}{}LH{}", + self.code, + self.commit + .0 + .get(..2) + .unwrap_or(self.commit.0.as_str()) + .to_lowercase(), + lighthouse_commit_prefix + .0 + .get(..2) + .unwrap_or("00") + .to_lowercase(), + ); + + let append_graffiti_no_commit = format!("{}LH", self.code); + let append_graffiti_only_el = format!("{}", self.code); + + let graffiti_string = if let Some(graffiti) = validator_graffiti { + let graffiti_length = graffiti.as_utf8_lossy().len(); + let graffiti_str = graffiti.as_utf8_lossy(); + + // 12 characters for append_graffiti_full, plus one character for spacing + // that leaves user specified graffiti to be 32-12-1 = 19 characters max, i.e., <20 + if graffiti_length < 20 { + format!("{} {}", append_graffiti_full, graffiti_str) + // user-specified graffiti is between 20-23 characters + } else if (20..24).contains(&graffiti_length) { + format!("{} {}", append_graffiti_one_byte, graffiti_str) + // user-specified graffiti is between 24-27 characters + } else if (24..28).contains(&graffiti_length) { + format!("{} {}", append_graffiti_no_commit, graffiti_str) + // user-specified graffiti is between 28-29 characters + } else if (28..30).contains(&graffiti_length) { + format!("{} {}", append_graffiti_only_el, graffiti_str) + // if user-specified graffiti is between 30-32 characters, append nothing + } else { + return graffiti; + } + } else { + // if no validator_graffiti (user doesn't specify), use the full client version info graffiti + append_graffiti_full + }; + let mut graffiti_bytes = [0u8; GRAFFITI_BYTES_LEN]; let bytes_to_copy = std::cmp::min(graffiti_string.len(), GRAFFITI_BYTES_LEN); graffiti_bytes[..bytes_to_copy] diff --git a/beacon_node/http_api/src/produce_block.rs b/beacon_node/http_api/src/produce_block.rs index 472ec0b65e4..3bd0cec7e33 100644 --- a/beacon_node/http_api/src/produce_block.rs +++ b/beacon_node/http_api/src/produce_block.rs @@ -6,6 +6,7 @@ use crate::{ add_ssz_content_type_header, beacon_response, inconsistent_fork_rejection, }, }; +use beacon_chain::graffiti_calculator::GraffitiSettings; use beacon_chain::{ BeaconBlockResponseWrapper, BeaconChain, BeaconChainTypes, ProduceBlockVerification, }; @@ -68,11 +69,13 @@ pub async fn produce_block_v3( query.builder_boost_factor }; + let graffiti_settings = GraffitiSettings::new(query.graffiti, query.graffiti_policy); + let block_response_type = chain .produce_block_with_verification( randao_reveal, slot, - query.graffiti, + graffiti_settings, randao_verification, builder_boost_factor, BlockProductionVersion::V3, @@ -148,11 +151,13 @@ pub async fn produce_blinded_block_v2( })?; let randao_verification = get_randao_verification(&query, randao_reveal.is_infinity())?; + let graffiti_settings = GraffitiSettings::new(query.graffiti, query.graffiti_policy); + let block_response_type = chain .produce_block_with_verification( randao_reveal, slot, - query.graffiti, + graffiti_settings, randao_verification, None, BlockProductionVersion::BlindedV2, @@ -182,12 +187,13 @@ pub async fn produce_block_v2( })?; let randao_verification = get_randao_verification(&query, randao_reveal.is_infinity())?; + let graffiti_settings = GraffitiSettings::new(query.graffiti, query.graffiti_policy); let block_response_type = chain .produce_block_with_verification( randao_reveal, slot, - query.graffiti, + graffiti_settings, randao_verification, None, BlockProductionVersion::FullV2, diff --git a/beacon_node/http_api/tests/interactive_tests.rs b/beacon_node/http_api/tests/interactive_tests.rs index 0119a7645c2..ce61c821b57 100644 --- a/beacon_node/http_api/tests/interactive_tests.rs +++ b/beacon_node/http_api/tests/interactive_tests.rs @@ -640,7 +640,7 @@ pub async fn proposer_boost_re_org_test( .into(); let (unsigned_block_type, _) = tester .client - .get_validator_blocks_v3::(slot_c, &randao_reveal, None, None) + .get_validator_blocks_v3::(slot_c, &randao_reveal, None, None, None) .await .unwrap(); diff --git a/beacon_node/http_api/tests/tests.rs b/beacon_node/http_api/tests/tests.rs index f8eba0ee2b7..ed7abead18a 100644 --- a/beacon_node/http_api/tests/tests.rs +++ b/beacon_node/http_api/tests/tests.rs @@ -3681,7 +3681,7 @@ impl ApiTester { let (response, metadata) = self .client - .get_validator_blocks_v3_ssz::(slot, &randao_reveal, None, None) + .get_validator_blocks_v3_ssz::(slot, &randao_reveal, None, None, None) .await .unwrap(); @@ -4646,7 +4646,7 @@ impl ApiTester { let (payload_type, metadata) = self .client - .get_validator_blocks_v3::(slot, &randao_reveal, None, None) + .get_validator_blocks_v3::(slot, &randao_reveal, None, None, None) .await .unwrap(); Self::check_block_v3_metadata(&metadata, &payload_type); @@ -4673,7 +4673,7 @@ impl ApiTester { let (payload_type, metadata) = self .client - .get_validator_blocks_v3::(slot, &randao_reveal, None, Some(0)) + .get_validator_blocks_v3::(slot, &randao_reveal, None, Some(0), None) .await .unwrap(); Self::check_block_v3_metadata(&metadata, &payload_type); @@ -4701,7 +4701,7 @@ impl ApiTester { let (payload_type, metadata) = self .client - .get_validator_blocks_v3::(slot, &randao_reveal, None, Some(u64::MAX)) + .get_validator_blocks_v3::(slot, &randao_reveal, None, Some(u64::MAX), None) .await .unwrap(); Self::check_block_v3_metadata(&metadata, &payload_type); @@ -4858,7 +4858,7 @@ impl ApiTester { let (payload_type, metadata) = self .client - .get_validator_blocks_v3::(slot, &randao_reveal, None, None) + .get_validator_blocks_v3::(slot, &randao_reveal, None, None, None) .await .unwrap(); Self::check_block_v3_metadata(&metadata, &payload_type); @@ -4939,7 +4939,7 @@ impl ApiTester { let (payload_type, metadata) = self .client - .get_validator_blocks_v3::(slot, &randao_reveal, None, None) + .get_validator_blocks_v3::(slot, &randao_reveal, None, None, None) .await .unwrap(); Self::check_block_v3_metadata(&metadata, &payload_type); @@ -5034,7 +5034,7 @@ impl ApiTester { let (payload_type, metadata) = self .client - .get_validator_blocks_v3::(slot, &randao_reveal, None, None) + .get_validator_blocks_v3::(slot, &randao_reveal, None, None, None) .await .unwrap(); Self::check_block_v3_metadata(&metadata, &payload_type); @@ -5125,7 +5125,7 @@ impl ApiTester { let (payload_type, metadata) = self .client - .get_validator_blocks_v3::(slot, &randao_reveal, None, None) + .get_validator_blocks_v3::(slot, &randao_reveal, None, None, None) .await .unwrap(); Self::check_block_v3_metadata(&metadata, &payload_type); @@ -5216,7 +5216,7 @@ impl ApiTester { let (payload_type, metadata) = self .client - .get_validator_blocks_v3::(slot, &randao_reveal, None, None) + .get_validator_blocks_v3::(slot, &randao_reveal, None, None, None) .await .unwrap(); Self::check_block_v3_metadata(&metadata, &payload_type); @@ -5305,7 +5305,7 @@ impl ApiTester { let (payload_type, metadata) = self .client - .get_validator_blocks_v3::(slot, &randao_reveal, None, None) + .get_validator_blocks_v3::(slot, &randao_reveal, None, None, None) .await .unwrap(); Self::check_block_v3_metadata(&metadata, &payload_type); @@ -5366,7 +5366,7 @@ impl ApiTester { let (payload_type, metadata) = self .client - .get_validator_blocks_v3::(slot, &randao_reveal, None, None) + .get_validator_blocks_v3::(slot, &randao_reveal, None, None, None) .await .unwrap(); Self::check_block_v3_metadata(&metadata, &payload_type); @@ -5437,7 +5437,7 @@ impl ApiTester { let (payload_type, metadata) = self .client - .get_validator_blocks_v3::(slot, &randao_reveal, None, None) + .get_validator_blocks_v3::(slot, &randao_reveal, None, None, None) .await .unwrap(); Self::check_block_v3_metadata(&metadata, &payload_type); @@ -5552,7 +5552,7 @@ impl ApiTester { let (payload_type, metadata) = self .client - .get_validator_blocks_v3::(next_slot, &randao_reveal, None, None) + .get_validator_blocks_v3::(next_slot, &randao_reveal, None, None, None) .await .unwrap(); Self::check_block_v3_metadata(&metadata, &payload_type); @@ -5573,7 +5573,7 @@ impl ApiTester { let (payload_type, metadata) = self .client - .get_validator_blocks_v3::(next_slot, &randao_reveal, None, None) + .get_validator_blocks_v3::(next_slot, &randao_reveal, None, None, None) .await .unwrap(); Self::check_block_v3_metadata(&metadata, &payload_type); @@ -5708,7 +5708,7 @@ impl ApiTester { let (payload_type, metadata) = self .client - .get_validator_blocks_v3::(next_slot, &randao_reveal, None, None) + .get_validator_blocks_v3::(next_slot, &randao_reveal, None, None, None) .await .unwrap(); Self::check_block_v3_metadata(&metadata, &payload_type); @@ -5739,7 +5739,7 @@ impl ApiTester { let (payload_type, metadata) = self .client - .get_validator_blocks_v3::(next_slot, &randao_reveal, None, None) + .get_validator_blocks_v3::(next_slot, &randao_reveal, None, None, None) .await .unwrap(); Self::check_block_v3_metadata(&metadata, &payload_type); @@ -5821,7 +5821,7 @@ impl ApiTester { let (payload_type, metadata) = self .client - .get_validator_blocks_v3::(slot, &randao_reveal, None, None) + .get_validator_blocks_v3::(slot, &randao_reveal, None, None, None) .await .unwrap(); Self::check_block_v3_metadata(&metadata, &payload_type); @@ -5895,7 +5895,7 @@ impl ApiTester { let (payload_type, metadata) = self .client - .get_validator_blocks_v3::(slot, &randao_reveal, None, None) + .get_validator_blocks_v3::(slot, &randao_reveal, None, None, None) .await .unwrap(); Self::check_block_v3_metadata(&metadata, &payload_type); @@ -5964,7 +5964,7 @@ impl ApiTester { let (payload_type, metadata) = self .client - .get_validator_blocks_v3::(slot, &randao_reveal, None, None) + .get_validator_blocks_v3::(slot, &randao_reveal, None, None, None) .await .unwrap(); Self::check_block_v3_metadata(&metadata, &payload_type); @@ -6033,7 +6033,7 @@ impl ApiTester { let (payload_type, metadata) = self .client - .get_validator_blocks_v3::(slot, &randao_reveal, None, None) + .get_validator_blocks_v3::(slot, &randao_reveal, None, None, None) .await .unwrap(); Self::check_block_v3_metadata(&metadata, &payload_type); @@ -6100,7 +6100,7 @@ impl ApiTester { let (payload_type, metadata) = self .client - .get_validator_blocks_v3::(slot, &randao_reveal, None, None) + .get_validator_blocks_v3::(slot, &randao_reveal, None, None, None) .await .unwrap(); Self::check_block_v3_metadata(&metadata, &payload_type); @@ -6174,7 +6174,7 @@ impl ApiTester { let (payload_type, metadata) = self .client - .get_validator_blocks_v3::(slot, &randao_reveal, None, None) + .get_validator_blocks_v3::(slot, &randao_reveal, None, None, None) .await .unwrap(); Self::check_block_v3_metadata(&metadata, &payload_type); @@ -6864,6 +6864,82 @@ impl ApiTester { } self } + + async fn get_validator_blocks_v3_path_graffiti_policy(self) -> Self { + let slot = self.chain.slot().unwrap(); + let epoch = self.chain.epoch().unwrap(); + let (_, randao_reveal) = self.get_test_randao(slot, epoch).await; + let graffiti = Some(Graffiti::from([0; GRAFFITI_BYTES_LEN])); + let builder_boost_factor = None; + + // Default case where GraffitiPolicy is None + let default_path = self + .client + .get_validator_blocks_v3_path( + slot, + &randao_reveal, + graffiti.as_ref(), + SkipRandaoVerification::Yes, + builder_boost_factor, + None, + ) + .await + .unwrap(); + + let query_default_path = default_path.query().unwrap_or(""); + // When GraffitiPolicy is None, the HTTP API query path should not contain "graffiti_policy" + assert!( + !query_default_path.contains("graffiti_policy"), + "URL should not contain graffiti_policy parameter (same as PreserveUserGraffiti). URL is: {}", + query_default_path + ); + + let preserve_path = self + .client + .get_validator_blocks_v3_path( + slot, + &randao_reveal, + graffiti.as_ref(), + SkipRandaoVerification::Yes, + builder_boost_factor, + Some(GraffitiPolicy::PreserveUserGraffiti), + ) + .await + .unwrap(); + + let query_preserve_path = preserve_path.query().unwrap_or(""); + // When GraffitiPolicy is set to PreserveUserGraffiti, the HTTP API query path should not contain "graffiti_policy" + assert!( + !query_preserve_path.contains("graffiti_policy"), + "URL should not contain graffiti_policy parameter when using PreserveUserGraffiti. URL is: {}", + query_preserve_path + ); + + // The HTTP API query path for PreserveUserGraffiti should be the same as the default + assert_eq!(query_default_path, query_preserve_path); + + let append_path = self + .client + .get_validator_blocks_v3_path( + slot, + &randao_reveal, + graffiti.as_ref(), + SkipRandaoVerification::No, + builder_boost_factor, + Some(GraffitiPolicy::AppendClientVersions), + ) + .await + .unwrap(); + + let query_append_path = append_path.query().unwrap_or(""); + // When GraffitiPolicy is AppendClientVersions, the HTTP API query path should contain "graffiti_policy" + assert!( + query_append_path.contains("graffiti_policy"), + "URL should contain graffiti_policy=AppendClientVersions parameter. URL is: {}", + query_append_path + ); + self + } } async fn poll_events, eth2::Error>> + Unpin, E: EthSpec>( @@ -8054,3 +8130,11 @@ async fn get_beacon_rewards_blocks_electra() { .test_beacon_block_rewards_electra() .await; } + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn get_validator_blocks_v3_http_api_path() { + ApiTester::new() + .await + .get_validator_blocks_v3_path_graffiti_policy() + .await; +} diff --git a/book/src/help_vc.md b/book/src/help_vc.md index b19ff0ba388..2a9936d1d2f 100644 --- a/book/src/help_vc.md +++ b/book/src/help_vc.md @@ -221,6 +221,10 @@ Flags: automatically enabled for <= 64 validators. Enabling this metric for higher validator counts will lead to higher volume of prometheus metrics being collected. + --graffiti-append + When used, client version info will be prepended to user custom + graffiti, with a space in between. This should only be used with a + Lighthouse beacon node. -h, --help Prints help information --http diff --git a/common/eth2/src/lib.rs b/common/eth2/src/lib.rs index 820d817d9d8..8746e3c063c 100644 --- a/common/eth2/src/lib.rs +++ b/common/eth2/src/lib.rs @@ -2207,6 +2207,7 @@ impl BeaconNodeHttpClient { graffiti: Option<&Graffiti>, skip_randao_verification: SkipRandaoVerification, builder_booster_factor: Option, + graffiti_policy: Option, ) -> Result { let mut path = self.eth_path(V3)?; @@ -2234,6 +2235,14 @@ impl BeaconNodeHttpClient { .append_pair("builder_boost_factor", &builder_booster_factor.to_string()); } + // Only append the HTTP URL request if the graffiti_policy is to AppendClientVersions + // If PreserveUserGraffiti (default), then the HTTP URL request does not contain graffiti_policy + // so that the default case is compliant to the spec + if let Some(GraffitiPolicy::AppendClientVersions) = graffiti_policy { + path.query_pairs_mut() + .append_pair("graffiti_policy", "AppendClientVersions"); + } + Ok(path) } @@ -2244,6 +2253,7 @@ impl BeaconNodeHttpClient { randao_reveal: &SignatureBytes, graffiti: Option<&Graffiti>, builder_booster_factor: Option, + graffiti_policy: Option, ) -> Result<(JsonProduceBlockV3Response, ProduceBlockV3Metadata), Error> { self.get_validator_blocks_v3_modular( slot, @@ -2251,6 +2261,7 @@ impl BeaconNodeHttpClient { graffiti, SkipRandaoVerification::No, builder_booster_factor, + graffiti_policy, ) .await } @@ -2263,6 +2274,7 @@ impl BeaconNodeHttpClient { graffiti: Option<&Graffiti>, skip_randao_verification: SkipRandaoVerification, builder_booster_factor: Option, + graffiti_policy: Option, ) -> Result<(JsonProduceBlockV3Response, ProduceBlockV3Metadata), Error> { let path = self .get_validator_blocks_v3_path( @@ -2271,6 +2283,7 @@ impl BeaconNodeHttpClient { graffiti, skip_randao_verification, builder_booster_factor, + graffiti_policy, ) .await?; @@ -2313,6 +2326,7 @@ impl BeaconNodeHttpClient { randao_reveal: &SignatureBytes, graffiti: Option<&Graffiti>, builder_booster_factor: Option, + graffiti_policy: Option, ) -> Result<(ProduceBlockV3Response, ProduceBlockV3Metadata), Error> { self.get_validator_blocks_v3_modular_ssz::( slot, @@ -2320,6 +2334,7 @@ impl BeaconNodeHttpClient { graffiti, SkipRandaoVerification::No, builder_booster_factor, + graffiti_policy, ) .await } @@ -2332,6 +2347,7 @@ impl BeaconNodeHttpClient { graffiti: Option<&Graffiti>, skip_randao_verification: SkipRandaoVerification, builder_booster_factor: Option, + graffiti_policy: Option, ) -> Result<(ProduceBlockV3Response, ProduceBlockV3Metadata), Error> { let path = self .get_validator_blocks_v3_path( @@ -2340,6 +2356,7 @@ impl BeaconNodeHttpClient { graffiti, skip_randao_verification, builder_booster_factor, + graffiti_policy, ) .await?; diff --git a/common/eth2/src/types.rs b/common/eth2/src/types.rs index aace8f936c9..b1a61ce00cc 100644 --- a/common/eth2/src/types.rs +++ b/common/eth2/src/types.rs @@ -752,12 +752,20 @@ pub struct ProposerData { pub slot: Slot, } +#[derive(Clone, Copy, Serialize, Deserialize, Default, Debug)] +pub enum GraffitiPolicy { + #[default] + PreserveUserGraffiti, + AppendClientVersions, +} + #[derive(Clone, Deserialize)] pub struct ValidatorBlocksQuery { pub randao_reveal: SignatureBytes, pub graffiti: Option, pub skip_randao_verification: SkipRandaoVerification, pub builder_boost_factor: Option, + pub graffiti_policy: Option, } #[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Deserialize)] diff --git a/validator_client/src/cli.rs b/validator_client/src/cli.rs index 477781d3e88..3e1c46097f0 100644 --- a/validator_client/src/cli.rs +++ b/validator_client/src/cli.rs @@ -150,6 +150,16 @@ pub struct ValidatorClient { )] pub graffiti: Option, + #[clap( + long, + requires = "graffiti", + help = "When used, client version info will be prepended to user custom graffiti, with a space in between. \ + This should only be used with a Lighthouse beacon node.", + display_order = 0, + help_heading = FLAG_HEADER + )] + pub graffiti_append: bool, + #[clap( long, value_name = "GRAFFITI-FILE", diff --git a/validator_client/src/config.rs b/validator_client/src/config.rs index 04d69dc9dc1..1a286a74dc1 100644 --- a/validator_client/src/config.rs +++ b/validator_client/src/config.rs @@ -7,7 +7,7 @@ use directory::{ DEFAULT_HARDCODED_NETWORK, DEFAULT_ROOT_DIR, DEFAULT_SECRET_DIR, DEFAULT_VALIDATOR_DIR, get_network_dir, }; -use eth2::types::Graffiti; +use eth2::types::{Graffiti, GraffitiPolicy}; use graffiti_file::GraffitiFile; use initialized_validators::Config as InitializedValidatorsConfig; use lighthouse_validator_store::Config as ValidatorStoreConfig; @@ -55,6 +55,8 @@ pub struct Config { pub graffiti: Option, /// Graffiti file to load per validator graffitis. pub graffiti_file: Option, + /// GraffitiPolicy to append client version info + pub graffiti_policy: Option, /// Configuration for the HTTP REST API. pub http_api: validator_http_api::Config, /// Configuration for the HTTP REST API. @@ -119,6 +121,7 @@ impl Default for Config { long_timeouts_multiplier: 1, graffiti: None, graffiti_file: None, + graffiti_policy: None, http_api: <_>::default(), http_metrics: <_>::default(), beacon_node_fallback: <_>::default(), @@ -233,6 +236,12 @@ impl Config { } } + config.graffiti_policy = if validator_client_config.graffiti_append { + Some(GraffitiPolicy::AppendClientVersions) + } else { + Some(GraffitiPolicy::PreserveUserGraffiti) + }; + if let Some(input_fee_recipient) = validator_client_config.suggested_fee_recipient { config.validator_store.fee_recipient = Some(input_fee_recipient); } diff --git a/validator_client/src/lib.rs b/validator_client/src/lib.rs index 71bdde10b02..23541cf6e28 100644 --- a/validator_client/src/lib.rs +++ b/validator_client/src/lib.rs @@ -486,7 +486,8 @@ impl ProductionValidatorClient { .executor(context.executor.clone()) .chain_spec(context.eth2_config.spec.clone()) .graffiti(config.graffiti) - .graffiti_file(config.graffiti_file.clone()); + .graffiti_file(config.graffiti_file.clone()) + .graffiti_policy(config.graffiti_policy); // If we have proposer nodes, add them to the block service builder. if proposer_nodes_num > 0 { diff --git a/validator_client/validator_services/src/block_service.rs b/validator_client/validator_services/src/block_service.rs index 23658af03fd..625f8db7cb9 100644 --- a/validator_client/validator_services/src/block_service.rs +++ b/validator_client/validator_services/src/block_service.rs @@ -1,5 +1,6 @@ use beacon_node_fallback::{ApiTopic, BeaconNodeFallback, Error as FallbackError, Errors}; use bls::PublicKeyBytes; +use eth2::types::GraffitiPolicy; use eth2::{BeaconNodeHttpClient, StatusCode}; use graffiti_file::{GraffitiFile, determine_graffiti}; use logging::crit; @@ -50,6 +51,7 @@ pub struct BlockServiceBuilder { chain_spec: Option>, graffiti: Option, graffiti_file: Option, + graffiti_policy: Option, } impl BlockServiceBuilder { @@ -63,6 +65,7 @@ impl BlockServiceBuilder { chain_spec: None, graffiti: None, graffiti_file: None, + graffiti_policy: None, } } @@ -106,6 +109,11 @@ impl BlockServiceBuilder { self } + pub fn graffiti_policy(mut self, graffiti_policy: Option) -> Self { + self.graffiti_policy = graffiti_policy; + self + } + pub fn build(self) -> Result, String> { Ok(BlockService { inner: Arc::new(Inner { @@ -127,6 +135,7 @@ impl BlockServiceBuilder { proposer_nodes: self.proposer_nodes, graffiti: self.graffiti, graffiti_file: self.graffiti_file, + graffiti_policy: self.graffiti_policy, }), }) } @@ -192,6 +201,7 @@ pub struct Inner { chain_spec: Arc, graffiti: Option, graffiti_file: Option, + graffiti_policy: Option, } /// Attempts to produce attestations for any block producer(s) at the start of the epoch. @@ -466,6 +476,7 @@ impl BlockService { randao_reveal_ref, graffiti.as_ref(), builder_boost_factor, + self_ref.graffiti_policy, ) .await }) @@ -492,6 +503,7 @@ impl BlockService { randao_reveal_ref, graffiti.as_ref(), builder_boost_factor, + self_ref.graffiti_policy, ) .await .map_err(|e| { From a39e991557f6d16f0529eaca1b94de57ccc47f2f Mon Sep 17 00:00:00 2001 From: ethDreamer <37123614+ethDreamer@users.noreply.github.com> Date: Tue, 16 Dec 2025 00:45:45 -0600 Subject: [PATCH 70/74] Gloas(EIP-7732): Containers / Constants (#7923) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * #7850 This is the first round of the conga line! 🎉 Just spec constants and container changes so far. Co-Authored-By: shane-moore Co-Authored-By: Mark Mackey Co-Authored-By: Shane K Moore <41407272+shane-moore@users.noreply.github.com> Co-Authored-By: Eitan Seri- Levi Co-Authored-By: ethDreamer <37123614+ethDreamer@users.noreply.github.com> Co-Authored-By: Jimmy Chen Co-Authored-By: Jimmy Chen Co-Authored-By: Michael Sproul --- Makefile | 19 ++- .../beacon_chain/src/beacon_block_streamer.rs | 5 +- beacon_node/beacon_chain/src/beacon_chain.rs | 55 +------- beacon_node/beacon_chain/src/errors.rs | 2 + beacon_node/beacon_chain/src/test_utils.rs | 4 +- beacon_node/beacon_chain/tests/store_tests.rs | 6 +- beacon_node/execution_layer/src/engine_api.rs | 28 ---- .../src/engine_api/new_payload_request.rs | 17 +-- beacon_node/execution_layer/src/lib.rs | 16 +-- .../test_utils/execution_block_generator.rs | 8 +- .../src/test_utils/mock_builder.rs | 48 ++----- .../http_api/tests/interactive_tests.rs | 10 +- beacon_node/http_api/tests/status_tests.rs | 6 +- .../src/network_beacon_processor/tests.rs | 3 +- beacon_node/store/src/partial_beacon_state.rs | 31 ++++- consensus/state_processing/src/genesis.rs | 5 +- .../src/per_block_processing.rs | 13 +- .../state_processing/src/upgrade/gloas.rs | 21 ++- .../indexed_payload_attestation.rs | 36 +++++ consensus/types/src/attestation/mod.rs | 8 ++ .../src/attestation/payload_attestation.rs | 31 +++++ .../attestation/payload_attestation_data.rs | 28 ++++ .../payload_attestation_message.rs | 26 ++++ consensus/types/src/block/beacon_block.rs | 33 ++++- .../types/src/block/beacon_block_body.rs | 130 ++++++++++-------- .../types/src/block/signed_beacon_block.rs | 82 +++-------- consensus/types/src/builder/builder_bid.rs | 19 +-- .../src/builder/builder_pending_payment.rs | 36 +++++ .../src/builder/builder_pending_withdrawal.rs | 40 ++++++ consensus/types/src/builder/mod.rs | 6 +- consensus/types/src/core/chain_spec.rs | 21 +++ consensus/types/src/core/eth_spec.rs | 42 +++++- consensus/types/src/execution/dumb_macros.rs | 108 +++++++++++++++ .../types/src/execution/execution_payload.rs | 5 +- .../src/execution/execution_payload_bid.rs | 40 ++++++ .../execution/execution_payload_envelope.rs | 36 +++++ .../src/execution/execution_payload_header.rs | 104 +++----------- consensus/types/src/execution/mod.rs | 19 ++- consensus/types/src/execution/payload.rs | 53 ++----- .../execution/signed_execution_payload_bid.rs | 35 +++++ .../signed_execution_payload_envelope.rs | 24 ++++ consensus/types/src/fork/fork_context.rs | 2 + consensus/types/src/lib.rs | 10 ++ consensus/types/src/light_client/error.rs | 1 + .../light_client/light_client_bootstrap.rs | 45 ++---- .../light_client_finality_update.rs | 45 ++---- .../src/light_client/light_client_header.rs | 78 ++--------- .../light_client_optimistic_update.rs | 35 ++--- .../src/light_client/light_client_update.rs | 69 ++-------- consensus/types/src/light_client/mod.rs | 10 +- consensus/types/src/state/beacon_state.rs | 62 +++++++-- testing/ef_tests/src/cases/operations.rs | 3 + 52 files changed, 930 insertions(+), 689 deletions(-) create mode 100644 consensus/types/src/attestation/indexed_payload_attestation.rs create mode 100644 consensus/types/src/attestation/payload_attestation.rs create mode 100644 consensus/types/src/attestation/payload_attestation_data.rs create mode 100644 consensus/types/src/attestation/payload_attestation_message.rs create mode 100644 consensus/types/src/builder/builder_pending_payment.rs create mode 100644 consensus/types/src/builder/builder_pending_withdrawal.rs create mode 100644 consensus/types/src/execution/dumb_macros.rs create mode 100644 consensus/types/src/execution/execution_payload_bid.rs create mode 100644 consensus/types/src/execution/execution_payload_envelope.rs create mode 100644 consensus/types/src/execution/signed_execution_payload_bid.rs create mode 100644 consensus/types/src/execution/signed_execution_payload_envelope.rs diff --git a/Makefile b/Makefile index c14f1d712a7..c1cccb92705 100644 --- a/Makefile +++ b/Makefile @@ -30,6 +30,11 @@ TEST_FEATURES ?= # Cargo profile for regular builds. PROFILE ?= release +# List of all hard forks up to gloas. This list is used to set env variables for several tests so that +# they run for different forks. +# TODO(EIP-7732) Remove this once we extend network tests to support gloas and use RECENT_FORKS instead +RECENT_FORKS_BEFORE_GLOAS=electra fulu + # List of all recent hard forks. This list is used to set env variables for http_api tests RECENT_FORKS=electra fulu gloas @@ -197,29 +202,31 @@ run-ef-tests: cargo nextest run --release -p ef_tests --features "ef_tests,$(EF_TEST_FEATURES),fake_crypto" ./$(EF_TESTS)/check_all_files_accessed.py $(EF_TESTS)/.accessed_file_log.txt $(EF_TESTS)/consensus-spec-tests -# Run the tests in the `beacon_chain` crate for recent forks. -test-beacon-chain: $(patsubst %,test-beacon-chain-%,$(RECENT_FORKS)) +# Run the tests in the `beacon_chain` crate for all known forks. +# TODO(EIP-7732) Extend to support gloas by using RECENT_FORKS instead +test-beacon-chain: $(patsubst %,test-beacon-chain-%,$(RECENT_FORKS_BEFORE_GLOAS)) test-beacon-chain-%: env FORK_NAME=$* cargo nextest run --release --features "fork_from_env,slasher/lmdb,$(TEST_FEATURES)" -p beacon_chain # Run the tests in the `http_api` crate for recent forks. -test-http-api: $(patsubst %,test-http-api-%,$(RECENT_FORKS)) +test-http-api: $(patsubst %,test-http-api-%,$(RECENT_FORKS_BEFORE_GLOAS)) test-http-api-%: env FORK_NAME=$* cargo nextest run --release --features "beacon_chain/fork_from_env" -p http_api # Run the tests in the `operation_pool` crate for all known forks. -test-op-pool: $(patsubst %,test-op-pool-%,$(RECENT_FORKS)) +test-op-pool: $(patsubst %,test-op-pool-%,$(RECENT_FORKS_BEFORE_GLOAS)) test-op-pool-%: env FORK_NAME=$* cargo nextest run --release \ --features "beacon_chain/fork_from_env,$(TEST_FEATURES)"\ -p operation_pool -# Run the tests in the `network` crate for recent forks. -test-network: $(patsubst %,test-network-%,$(RECENT_FORKS)) +# Run the tests in the `network` crate for all known forks. +# TODO(EIP-7732) Extend to support gloas by using RECENT_FORKS instead +test-network: $(patsubst %,test-network-%,$(RECENT_FORKS_BEFORE_GLOAS)) test-network-%: env FORK_NAME=$* cargo nextest run --release \ diff --git a/beacon_node/beacon_chain/src/beacon_block_streamer.rs b/beacon_node/beacon_chain/src/beacon_block_streamer.rs index 7b3bb03e568..a462376cc03 100644 --- a/beacon_node/beacon_chain/src/beacon_block_streamer.rs +++ b/beacon_node/beacon_chain/src/beacon_block_streamer.rs @@ -715,8 +715,9 @@ mod tests { harness } + // TODO(EIP-7732) Extend this test for gloas #[tokio::test] - async fn check_all_blocks_from_altair_to_gloas() { + async fn check_all_blocks_from_altair_to_fulu() { let slots_per_epoch = MinimalEthSpec::slots_per_epoch() as usize; let num_epochs = 12; let bellatrix_fork_epoch = 2usize; @@ -724,7 +725,6 @@ mod tests { let deneb_fork_epoch = 6usize; let electra_fork_epoch = 8usize; let fulu_fork_epoch = 10usize; - let gloas_fork_epoch = 12usize; let num_blocks_produced = num_epochs * slots_per_epoch; let mut spec = test_spec::(); @@ -734,7 +734,6 @@ mod tests { spec.deneb_fork_epoch = Some(Epoch::new(deneb_fork_epoch as u64)); spec.electra_fork_epoch = Some(Epoch::new(electra_fork_epoch as u64)); spec.fulu_fork_epoch = Some(Epoch::new(fulu_fork_epoch as u64)); - spec.gloas_fork_epoch = Some(Epoch::new(gloas_fork_epoch as u64)); let spec = Arc::new(spec); let harness = get_harness(VALIDATOR_COUNT, spec.clone()); diff --git a/beacon_node/beacon_chain/src/beacon_chain.rs b/beacon_node/beacon_chain/src/beacon_chain.rs index 2107f06e1e3..46ba14f596b 100644 --- a/beacon_node/beacon_chain/src/beacon_chain.rs +++ b/beacon_node/beacon_chain/src/beacon_chain.rs @@ -5795,60 +5795,7 @@ impl BeaconChain { execution_payload_value, ) } - BeaconState::Gloas(_) => { - let ( - payload, - kzg_commitments, - maybe_blobs_and_proofs, - maybe_requests, - execution_payload_value, - ) = block_contents - .ok_or(BlockProductionError::MissingExecutionPayload)? - .deconstruct(); - - ( - BeaconBlock::Gloas(BeaconBlockGloas { - slot, - proposer_index, - parent_root, - state_root: Hash256::zero(), - body: BeaconBlockBodyGloas { - randao_reveal, - eth1_data, - graffiti, - proposer_slashings: proposer_slashings - .try_into() - .map_err(BlockProductionError::SszTypesError)?, - attester_slashings: attester_slashings_electra - .try_into() - .map_err(BlockProductionError::SszTypesError)?, - attestations: attestations_electra - .try_into() - .map_err(BlockProductionError::SszTypesError)?, - deposits: deposits - .try_into() - .map_err(BlockProductionError::SszTypesError)?, - voluntary_exits: voluntary_exits - .try_into() - .map_err(BlockProductionError::SszTypesError)?, - sync_aggregate: sync_aggregate - .ok_or(BlockProductionError::MissingSyncAggregate)?, - execution_payload: payload - .try_into() - .map_err(|_| BlockProductionError::InvalidPayloadFork)?, - bls_to_execution_changes: bls_to_execution_changes - .try_into() - .map_err(BlockProductionError::SszTypesError)?, - blob_kzg_commitments: kzg_commitments - .ok_or(BlockProductionError::InvalidPayloadFork)?, - execution_requests: maybe_requests - .ok_or(BlockProductionError::MissingExecutionRequests)?, - }, - }), - maybe_blobs_and_proofs, - execution_payload_value, - ) - } + BeaconState::Gloas(_) => return Err(BlockProductionError::GloasNotImplemented), }; let block = SignedBeaconBlock::from_block( diff --git a/beacon_node/beacon_chain/src/errors.rs b/beacon_node/beacon_chain/src/errors.rs index 8f615baab46..b021df2c33b 100644 --- a/beacon_node/beacon_chain/src/errors.rs +++ b/beacon_node/beacon_chain/src/errors.rs @@ -320,6 +320,8 @@ pub enum BlockProductionError { FailedToBuildBlobSidecars(String), MissingExecutionRequests, SszTypesError(ssz_types::Error), + // TODO(gloas): Remove this once Gloas is implemented + GloasNotImplemented, } easy_from_to!(BlockProcessingError, BlockProductionError); diff --git a/beacon_node/beacon_chain/src/test_utils.rs b/beacon_node/beacon_chain/src/test_utils.rs index 500c0b22d61..6d17d6d85c5 100644 --- a/beacon_node/beacon_chain/src/test_utils.rs +++ b/beacon_node/beacon_chain/src/test_utils.rs @@ -3300,9 +3300,7 @@ pub fn generate_rand_block_and_blobs( SignedBeaconBlock::Fulu(SignedBeaconBlockFulu { ref mut message, .. }) => add_blob_transactions!(message, FullPayloadFulu, num_blobs, rng, fork_name), - SignedBeaconBlock::Gloas(SignedBeaconBlockGloas { - ref mut message, .. - }) => add_blob_transactions!(message, FullPayloadGloas, num_blobs, rng, fork_name), + // TODO(EIP-7732) Add `SignedBeaconBlock::Gloas` variant _ => return (block, blob_sidecars), }; diff --git a/beacon_node/beacon_chain/tests/store_tests.rs b/beacon_node/beacon_chain/tests/store_tests.rs index cc49f871843..ba0621ae720 100644 --- a/beacon_node/beacon_chain/tests/store_tests.rs +++ b/beacon_node/beacon_chain/tests/store_tests.rs @@ -159,6 +159,7 @@ fn get_states_descendant_of_block( .collect() } +// TODO(EIP-7732) Extend to support gloas #[tokio::test] async fn light_client_bootstrap_test() { let spec = test_spec::(); @@ -206,7 +207,6 @@ async fn light_client_bootstrap_test() { LightClientBootstrap::Deneb(lc_bootstrap) => lc_bootstrap.header.beacon.slot, LightClientBootstrap::Electra(lc_bootstrap) => lc_bootstrap.header.beacon.slot, LightClientBootstrap::Fulu(lc_bootstrap) => lc_bootstrap.header.beacon.slot, - LightClientBootstrap::Gloas(lc_bootstrap) => lc_bootstrap.header.beacon.slot, }; assert_eq!( @@ -1581,6 +1581,10 @@ async fn proposer_duties_from_head_fulu() { } /// Test that we can compute the proposer shuffling for the Gloas fork epoch itself using lookahead! +// TODO(EIP-7732): Extend to gloas +// `state.latest_execution_payload_header()` not available in Gloas +// called from `add_block_at_slot` -> `make_block` -> `produce_block_on_state` -> `produce_partial_beacon_block` -> `get_execution_payload` -> `Error` +#[ignore] #[tokio::test] async fn proposer_lookahead_gloas_fork_epoch() { let gloas_fork_epoch = Epoch::new(4); diff --git a/beacon_node/execution_layer/src/engine_api.rs b/beacon_node/execution_layer/src/engine_api.rs index 88567ac6e12..32090bccfc9 100644 --- a/beacon_node/execution_layer/src/engine_api.rs +++ b/beacon_node/execution_layer/src/engine_api.rs @@ -541,34 +541,6 @@ impl ExecutionPayloadBodyV1 { )) } } - ExecutionPayloadHeader::Gloas(header) => { - if let Some(withdrawals) = self.withdrawals { - Ok(ExecutionPayload::Gloas(ExecutionPayloadGloas { - parent_hash: header.parent_hash, - fee_recipient: header.fee_recipient, - state_root: header.state_root, - receipts_root: header.receipts_root, - logs_bloom: header.logs_bloom, - prev_randao: header.prev_randao, - block_number: header.block_number, - gas_limit: header.gas_limit, - gas_used: header.gas_used, - timestamp: header.timestamp, - extra_data: header.extra_data, - base_fee_per_gas: header.base_fee_per_gas, - block_hash: header.block_hash, - transactions: self.transactions, - withdrawals, - blob_gas_used: header.blob_gas_used, - excess_blob_gas: header.excess_blob_gas, - })) - } else { - Err(format!( - "block {} is post capella but payload body doesn't have withdrawals", - header.block_hash - )) - } - } } } } diff --git a/beacon_node/execution_layer/src/engine_api/new_payload_request.rs b/beacon_node/execution_layer/src/engine_api/new_payload_request.rs index 617d2e01129..ba94296b859 100644 --- a/beacon_node/execution_layer/src/engine_api/new_payload_request.rs +++ b/beacon_node/execution_layer/src/engine_api/new_payload_request.rs @@ -172,6 +172,7 @@ impl<'block, E: EthSpec> NewPayloadRequest<'block, E> { } } +//TODO(EIP7732): Consider implementing these as methods on the NewPayloadRequest struct impl<'a, E: EthSpec> TryFrom> for NewPayloadRequest<'a, E> { type Error = BeaconStateError; @@ -220,17 +221,7 @@ impl<'a, E: EthSpec> TryFrom> for NewPayloadRequest<'a, E> parent_beacon_block_root: block_ref.parent_root, execution_requests: &block_ref.body.execution_requests, })), - BeaconBlockRef::Gloas(block_ref) => Ok(Self::Gloas(NewPayloadRequestGloas { - execution_payload: &block_ref.body.execution_payload.execution_payload, - versioned_hashes: block_ref - .body - .blob_kzg_commitments - .iter() - .map(kzg_commitment_to_versioned_hash) - .collect(), - parent_beacon_block_root: block_ref.parent_root, - execution_requests: &block_ref.body.execution_requests, - })), + BeaconBlockRef::Gloas(_) => Err(Self::Error::IncorrectStateVariant), } } } @@ -251,11 +242,15 @@ impl<'a, E: EthSpec> TryFrom> for NewPayloadRequest<' ExecutionPayloadRef::Deneb(_) => Err(Self::Error::IncorrectStateVariant), ExecutionPayloadRef::Electra(_) => Err(Self::Error::IncorrectStateVariant), ExecutionPayloadRef::Fulu(_) => Err(Self::Error::IncorrectStateVariant), + //TODO(EIP7732): Probably time to just get rid of this ExecutionPayloadRef::Gloas(_) => Err(Self::Error::IncorrectStateVariant), } } } +// TODO(EIP-7732) build out the following when it's needed like in Mark's branch +// impl<'a, E: EthSpec> TryFrom> for NewPayloadRequest { + #[cfg(test)] mod test { use crate::versioned_hashes::Error as VersionedHashError; diff --git a/beacon_node/execution_layer/src/lib.rs b/beacon_node/execution_layer/src/lib.rs index 34b1832894e..554668dd8a7 100644 --- a/beacon_node/execution_layer/src/lib.rs +++ b/beacon_node/execution_layer/src/lib.rs @@ -55,8 +55,8 @@ use types::{ }; use types::{ BeaconStateError, BlindedPayload, ChainSpec, Epoch, ExecPayload, ExecutionPayloadBellatrix, - ExecutionPayloadCapella, ExecutionPayloadElectra, ExecutionPayloadFulu, ExecutionPayloadGloas, - FullPayload, ProposerPreparationData, Slot, + ExecutionPayloadCapella, ExecutionPayloadElectra, ExecutionPayloadFulu, FullPayload, + ProposerPreparationData, Slot, }; mod block_hash; @@ -131,13 +131,6 @@ impl TryFrom> for ProvenancedPayload BlockProposalContents::PayloadAndBlobs { - payload: ExecutionPayloadHeader::Gloas(builder_bid.header).into(), - block_value: builder_bid.value, - kzg_commitments: builder_bid.blob_kzg_commitments, - blobs_and_proofs: None, - requests: Some(builder_bid.execution_requests), - }, }; Ok(ProvenancedPayload::Builder( BlockProposalContentsType::Blinded(block_proposal_contents), @@ -1368,6 +1361,7 @@ impl ExecutionLayer { } /// Maps to the `engine_newPayload` JSON-RPC call. + /// TODO(EIP-7732) figure out how and why Mark relaxed new_payload_request param's typ to NewPayloadRequest pub async fn notify_new_payload( &self, new_payload_request: NewPayloadRequest<'_, E>, @@ -1839,10 +1833,12 @@ impl ExecutionLayer { ForkName::Deneb => ExecutionPayloadDeneb::default().into(), ForkName::Electra => ExecutionPayloadElectra::default().into(), ForkName::Fulu => ExecutionPayloadFulu::default().into(), - ForkName::Gloas => ExecutionPayloadGloas::default().into(), ForkName::Base | ForkName::Altair => { return Err(Error::InvalidForkForPayload); } + ForkName::Gloas => { + return Err(Error::InvalidForkForPayload); + } }; return Ok(Some(payload)); } diff --git a/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs b/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs index 89d2994ce28..6b247a4cd49 100644 --- a/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs +++ b/beacon_node/execution_layer/src/test_utils/execution_block_generator.rs @@ -909,12 +909,8 @@ pub fn generate_genesis_header( *header.transactions_root_mut() = empty_transactions_root; Some(header) } - ForkName::Gloas => { - let mut header = ExecutionPayloadHeader::Gloas(<_>::default()); - *header.block_hash_mut() = genesis_block_hash.unwrap_or_default(); - *header.transactions_root_mut() = empty_transactions_root; - Some(header) - } + // TODO(EIP-7732): need to look into this + ForkName::Gloas => None, } } diff --git a/beacon_node/execution_layer/src/test_utils/mock_builder.rs b/beacon_node/execution_layer/src/test_utils/mock_builder.rs index 1d4f36b62c5..884aa9bf47a 100644 --- a/beacon_node/execution_layer/src/test_utils/mock_builder.rs +++ b/beacon_node/execution_layer/src/test_utils/mock_builder.rs @@ -31,7 +31,7 @@ use tree_hash::TreeHash; use types::ExecutionBlockHash; use types::builder_bid::{ BuilderBid, BuilderBidBellatrix, BuilderBidCapella, BuilderBidDeneb, BuilderBidElectra, - BuilderBidFulu, BuilderBidGloas, SignedBuilderBid, + BuilderBidFulu, SignedBuilderBid, }; use types::{ Address, BeaconState, ChainSpec, Epoch, EthSpec, ExecPayload, ExecutionPayload, @@ -117,9 +117,6 @@ impl BidStuff for BuilderBid { ExecutionPayloadHeaderRefMut::Fulu(header) => { header.fee_recipient = fee_recipient; } - ExecutionPayloadHeaderRefMut::Gloas(header) => { - header.fee_recipient = fee_recipient; - } } } @@ -140,9 +137,6 @@ impl BidStuff for BuilderBid { ExecutionPayloadHeaderRefMut::Fulu(header) => { header.gas_limit = gas_limit; } - ExecutionPayloadHeaderRefMut::Gloas(header) => { - header.gas_limit = gas_limit; - } } } @@ -167,9 +161,6 @@ impl BidStuff for BuilderBid { ExecutionPayloadHeaderRefMut::Fulu(header) => { header.parent_hash = ExecutionBlockHash::from_root(parent_hash); } - ExecutionPayloadHeaderRefMut::Gloas(header) => { - header.parent_hash = ExecutionBlockHash::from_root(parent_hash); - } } } @@ -190,9 +181,6 @@ impl BidStuff for BuilderBid { ExecutionPayloadHeaderRefMut::Fulu(header) => { header.prev_randao = prev_randao; } - ExecutionPayloadHeaderRefMut::Gloas(header) => { - header.prev_randao = prev_randao; - } } } @@ -213,9 +201,6 @@ impl BidStuff for BuilderBid { ExecutionPayloadHeaderRefMut::Fulu(header) => { header.block_number = block_number; } - ExecutionPayloadHeaderRefMut::Gloas(header) => { - header.block_number = block_number; - } } } @@ -236,9 +221,6 @@ impl BidStuff for BuilderBid { ExecutionPayloadHeaderRefMut::Fulu(header) => { header.timestamp = timestamp; } - ExecutionPayloadHeaderRefMut::Gloas(header) => { - header.timestamp = timestamp; - } } } @@ -259,9 +241,6 @@ impl BidStuff for BuilderBid { ExecutionPayloadHeaderRefMut::Fulu(header) => { header.withdrawals_root = withdrawals_root; } - ExecutionPayloadHeaderRefMut::Gloas(header) => { - header.withdrawals_root = withdrawals_root; - } } } @@ -295,10 +274,6 @@ impl BidStuff for BuilderBid { header.extra_data = extra_data; header.block_hash = ExecutionBlockHash::from_root(header.tree_hash_root()); } - ExecutionPayloadHeaderRefMut::Gloas(header) => { - header.extra_data = extra_data; - header.block_hash = ExecutionBlockHash::from_root(header.tree_hash_root()); - } } } } @@ -496,8 +471,9 @@ impl MockBuilder { SignedBlindedBeaconBlock::Fulu(block) => { block.message.body.execution_payload.tree_hash_root() } - SignedBlindedBeaconBlock::Gloas(block) => { - block.message.body.execution_payload.tree_hash_root() + SignedBlindedBeaconBlock::Gloas(_) => { + // TODO(EIP7732) Check if this is how we want to do error handling for gloas + return Err("invalid fork".to_string()); } }; let block_hash = block @@ -613,18 +589,10 @@ impl MockBuilder { ) = payload_response.into(); match fork { - ForkName::Gloas => BuilderBid::Gloas(BuilderBidGloas { - header: payload - .as_gloas() - .map_err(|_| "incorrect payload variant".to_string())? - .into(), - blob_kzg_commitments: maybe_blobs_bundle - .map(|b| b.commitments.clone()) - .unwrap_or_default(), - value: self.get_bid_value(value), - pubkey: self.builder_sk.public_key().compress(), - execution_requests: maybe_requests.unwrap_or_default(), - }), + ForkName::Gloas => { + // TODO(EIP7732) Check if this is how we want to do error handling for gloas + return Err("invalid fork".to_string()); + } ForkName::Fulu => BuilderBid::Fulu(BuilderBidFulu { header: payload .as_fulu() diff --git a/beacon_node/http_api/tests/interactive_tests.rs b/beacon_node/http_api/tests/interactive_tests.rs index ce61c821b57..b04c812773a 100644 --- a/beacon_node/http_api/tests/interactive_tests.rs +++ b/beacon_node/http_api/tests/interactive_tests.rs @@ -61,7 +61,10 @@ async fn state_by_root_pruned_from_fork_choice() { type E = MinimalEthSpec; let validator_count = 24; - let spec = ForkName::latest().make_genesis_spec(E::default_spec()); + // TODO(EIP-7732): extend test for Gloas by reverting back to using `ForkName::latest()` + // Issue is that this test does block production via `extend_chain_with_sync` which expects to be able to use `state.latest_execution_payload_header` during block production, but Gloas uses `latest_execution_bid` instead + // This will be resolved in a subsequent block processing PR + let spec = ForkName::Fulu.make_genesis_spec(E::default_spec()); let tester = InteractiveTester::::new_with_initializer_and_mutator( Some(spec.clone()), @@ -401,7 +404,10 @@ pub async fn proposer_boost_re_org_test( assert!(head_slot > 0); // Test using the latest fork so that we simulate conditions as similar to mainnet as possible. - let mut spec = ForkName::latest().make_genesis_spec(E::default_spec()); + // TODO(EIP-7732): extend test for Gloas by reverting back to using `ForkName::latest()` + // Issue is that `get_validator_blocks_v3` below expects to be able to use `state.latest_execution_payload_header` during `produce_block_on_state` -> `produce_partial_beacon_block` -> `get_execution_payload`, but gloas will no longer support this state field + // This will be resolved in a subsequent block processing PR + let mut spec = ForkName::Fulu.make_genesis_spec(E::default_spec()); spec.terminal_total_difficulty = Uint256::from(1); // Ensure there are enough validators to have `attesters_per_slot`. diff --git a/beacon_node/http_api/tests/status_tests.rs b/beacon_node/http_api/tests/status_tests.rs index fd5e282c5bd..556b75cb85a 100644 --- a/beacon_node/http_api/tests/status_tests.rs +++ b/beacon_node/http_api/tests/status_tests.rs @@ -12,8 +12,10 @@ type E = MinimalEthSpec; /// Create a new test environment that is post-merge with `chain_depth` blocks. async fn post_merge_tester(chain_depth: u64, validator_count: u64) -> InteractiveTester { - // Test using latest fork so that we simulate conditions as similar to mainnet as possible. - let mut spec = ForkName::latest().make_genesis_spec(E::default_spec()); + // TODO(EIP-7732): extend tests for Gloas by reverting back to using `ForkName::latest()` + // Issue is that these tests do block production via `extend_chain_with_sync` which expects to be able to use `state.latest_execution_payload_header` during block production, but Gloas uses `latest_execution_bid` instead + // This will be resolved in a subsequent block processing PR + let mut spec = ForkName::Fulu.make_genesis_spec(E::default_spec()); spec.terminal_total_difficulty = Uint256::from(1); let tester = InteractiveTester::::new(Some(spec), validator_count as usize).await; diff --git a/beacon_node/network/src/network_beacon_processor/tests.rs b/beacon_node/network/src/network_beacon_processor/tests.rs index 841a8679cfd..ed04fe7bb97 100644 --- a/beacon_node/network/src/network_beacon_processor/tests.rs +++ b/beacon_node/network/src/network_beacon_processor/tests.rs @@ -1699,8 +1699,9 @@ async fn test_blobs_by_range_spans_fulu_fork() { spec.fulu_fork_epoch = Some(Epoch::new(1)); spec.gloas_fork_epoch = Some(Epoch::new(2)); + // This test focuses on Electra→Fulu blob counts (epoch 0 to 1). Build 62 blocks since no need for Gloas activation at slot 64. let mut rig = TestRig::new_parametric( - 64, + 62, BeaconProcessorConfig::default(), NodeCustodyType::Fullnode, spec, diff --git a/beacon_node/store/src/partial_beacon_state.rs b/beacon_node/store/src/partial_beacon_state.rs index 8ee37169aca..9e5e1ebbb47 100644 --- a/beacon_node/store/src/partial_beacon_state.rs +++ b/beacon_node/store/src/partial_beacon_state.rs @@ -116,11 +116,12 @@ where partial_getter(rename = "latest_execution_payload_header_fulu") )] pub latest_execution_payload_header: ExecutionPayloadHeaderFulu, + #[superstruct( only(Gloas), - partial_getter(rename = "latest_execution_payload_header_gloas") + partial_getter(rename = "latest_execution_payload_bid_gloas") )] - pub latest_execution_payload_header: ExecutionPayloadHeaderGloas, + pub latest_execution_payload_bid: ExecutionPayloadBid, // Capella #[superstruct(only(Capella, Deneb, Electra, Fulu, Gloas))] @@ -155,6 +156,23 @@ where pub pending_consolidations: List, #[superstruct(only(Fulu, Gloas))] pub proposer_lookahead: Vector, + + // Gloas + #[superstruct(only(Gloas))] + pub execution_payload_availability: BitVector, + + #[superstruct(only(Gloas))] + pub builder_pending_payments: Vector, + + #[superstruct(only(Gloas))] + pub builder_pending_withdrawals: + List, + + #[superstruct(only(Gloas))] + pub latest_block_hash: ExecutionBlockHash, + + #[superstruct(only(Gloas))] + pub latest_withdrawals_root: Hash256, } impl PartialBeaconState { @@ -466,7 +484,7 @@ impl TryInto> for PartialBeaconState { current_sync_committee, next_sync_committee, inactivity_scores, - latest_execution_payload_header, + latest_execution_payload_bid, next_withdrawal_index, next_withdrawal_validator_index, deposit_requests_start_index, @@ -478,7 +496,12 @@ impl TryInto> for PartialBeaconState { pending_deposits, pending_partial_withdrawals, pending_consolidations, - proposer_lookahead + proposer_lookahead, + execution_payload_availability, + builder_pending_payments, + builder_pending_withdrawals, + latest_block_hash, + latest_withdrawals_root ], [historical_summaries] ), diff --git a/consensus/state_processing/src/genesis.rs b/consensus/state_processing/src/genesis.rs index d00e1fcfacc..1575fce22f2 100644 --- a/consensus/state_processing/src/genesis.rs +++ b/consensus/state_processing/src/genesis.rs @@ -168,9 +168,8 @@ pub fn initialize_beacon_state_from_eth1( state.fork_mut().previous_version = spec.gloas_fork_version; // Override latest execution payload header. - if let Some(ExecutionPayloadHeader::Gloas(header)) = execution_payload_header { - *state.latest_execution_payload_header_gloas_mut()? = header.clone(); - } + // Here's where we *would* clone the header but there is no header here so.. + // TODO(EIP7732): check this } // Now that we have our validators, initialize the caches (including the committees) diff --git a/consensus/state_processing/src/per_block_processing.rs b/consensus/state_processing/src/per_block_processing.rs index f78c8c4eb38..07149ff2ee8 100644 --- a/consensus/state_processing/src/per_block_processing.rs +++ b/consensus/state_processing/src/per_block_processing.rs @@ -41,7 +41,6 @@ mod verify_exit; mod verify_proposer_slashing; use crate::common::decrease_balance; - use crate::common::update_progressive_balances_cache::{ initialize_progressive_balances_cache, update_progressive_balances_metrics, }; @@ -173,10 +172,14 @@ pub fn per_block_processing>( // previous block. if is_execution_enabled(state, block.body()) { let body = block.body(); + // TODO(EIP-7732): build out process_withdrawals variant for gloas process_withdrawals::(state, body.execution_payload()?, spec)?; process_execution_payload::(state, body, spec)?; } + // TODO(EIP-7732): build out process_execution_bid + // process_execution_bid(state, block, verify_signatures, spec)?; + process_randao(state, block, verify_randao, ctxt, spec)?; process_eth1_data(state, block.body().eth1_data())?; process_operations(state, block.body(), verify_signatures, ctxt, spec)?; @@ -453,12 +456,6 @@ pub fn process_execution_payload>( _ => return Err(BlockProcessingError::IncorrectStateType), } } - ExecutionPayloadHeaderRefMut::Gloas(header_mut) => { - match payload.to_execution_payload_header() { - ExecutionPayloadHeader::Gloas(header) => *header_mut = header, - _ => return Err(BlockProcessingError::IncorrectStateType), - } - } } Ok(()) @@ -470,6 +467,7 @@ pub fn process_execution_payload>( /// repeatedly write code to treat these errors as false. /// https://github.com/ethereum/consensus-specs/blob/dev/specs/bellatrix/beacon-chain.md#is_merge_transition_complete pub fn is_merge_transition_complete(state: &BeaconState) -> bool { + // TODO(EIP7732): check this cause potuz modified this function for god knows what reason if state.fork_name_unchecked().capella_enabled() { true } else if state.fork_name_unchecked().bellatrix_enabled() { @@ -638,6 +636,7 @@ pub fn get_expected_withdrawals( } /// Apply withdrawals to the state. +/// TODO(EIP-7732): abstract this out and create gloas variant pub fn process_withdrawals>( state: &mut BeaconState, payload: Payload::Ref<'_>, diff --git a/consensus/state_processing/src/upgrade/gloas.rs b/consensus/state_processing/src/upgrade/gloas.rs index 8bb6991bfbe..d6c353cc2a9 100644 --- a/consensus/state_processing/src/upgrade/gloas.rs +++ b/consensus/state_processing/src/upgrade/gloas.rs @@ -1,5 +1,11 @@ +use bls::Hash256; +use milhouse::{List, Vector}; +use ssz_types::BitVector; use std::mem; -use types::{BeaconState, BeaconStateError as Error, BeaconStateGloas, ChainSpec, EthSpec, Fork}; +use types::{ + BeaconState, BeaconStateError as Error, BeaconStateGloas, BuilderPendingPayment, ChainSpec, + EthSpec, ExecutionPayloadBid, Fork, +}; /// Transform a `Fulu` state into a `Gloas` state. pub fn upgrade_to_gloas( @@ -63,8 +69,8 @@ pub fn upgrade_state_to_gloas( // Sync committees current_sync_committee: pre.current_sync_committee.clone(), next_sync_committee: pre.next_sync_committee.clone(), - // Execution - latest_execution_payload_header: pre.latest_execution_payload_header.upgrade_to_gloas(), + // Execution Bid + latest_execution_payload_bid: ExecutionPayloadBid::default(), // Capella next_withdrawal_index: pre.next_withdrawal_index, next_withdrawal_validator_index: pre.next_withdrawal_validator_index, @@ -79,6 +85,15 @@ pub fn upgrade_state_to_gloas( pending_deposits: pre.pending_deposits.clone(), pending_partial_withdrawals: pre.pending_partial_withdrawals.clone(), pending_consolidations: pre.pending_consolidations.clone(), + // Gloas + execution_payload_availability: BitVector::default(), // All bits set to false initially + builder_pending_payments: Vector::new(vec![ + BuilderPendingPayment::default(); + E::builder_pending_payments_limit() + ])?, + builder_pending_withdrawals: List::default(), // Empty list initially, + latest_block_hash: pre.latest_execution_payload_header.block_hash, + latest_withdrawals_root: Hash256::default(), // Caches total_active_balance: pre.total_active_balance, progressive_balances_cache: mem::take(&mut pre.progressive_balances_cache), diff --git a/consensus/types/src/attestation/indexed_payload_attestation.rs b/consensus/types/src/attestation/indexed_payload_attestation.rs new file mode 100644 index 00000000000..4de805570cf --- /dev/null +++ b/consensus/types/src/attestation/indexed_payload_attestation.rs @@ -0,0 +1,36 @@ +use crate::test_utils::TestRandom; +use crate::{EthSpec, ForkName, PayloadAttestationData}; +use bls::AggregateSignature; +use context_deserialize::context_deserialize; +use core::slice::Iter; +use serde::{Deserialize, Serialize}; +use ssz_derive::{Decode, Encode}; +use ssz_types::VariableList; +use test_random_derive::TestRandom; +use tree_hash_derive::TreeHash; + +#[derive(TestRandom, TreeHash, Debug, Clone, PartialEq, Encode, Decode, Serialize, Deserialize)] +#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] +#[serde(bound = "E: EthSpec", deny_unknown_fields)] +#[cfg_attr(feature = "arbitrary", arbitrary(bound = "E: EthSpec"))] +#[context_deserialize(ForkName)] +pub struct IndexedPayloadAttestation { + #[serde(with = "ssz_types::serde_utils::quoted_u64_var_list")] + pub attesting_indices: VariableList, + pub data: PayloadAttestationData, + pub signature: AggregateSignature, +} + +impl IndexedPayloadAttestation { + pub fn attesting_indices_iter(&self) -> Iter<'_, u64> { + self.attesting_indices.iter() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::MainnetEthSpec; + + ssz_and_tree_hash_tests!(IndexedPayloadAttestation); +} diff --git a/consensus/types/src/attestation/mod.rs b/consensus/types/src/attestation/mod.rs index 2d2bf74e49a..586d99bd900 100644 --- a/consensus/types/src/attestation/mod.rs +++ b/consensus/types/src/attestation/mod.rs @@ -5,7 +5,11 @@ mod attestation_duty; mod beacon_committee; mod checkpoint; mod indexed_attestation; +mod indexed_payload_attestation; mod participation_flags; +mod payload_attestation; +mod payload_attestation_data; +mod payload_attestation_message; mod pending_attestation; mod selection_proof; mod shuffling_id; @@ -26,7 +30,11 @@ pub use checkpoint::Checkpoint; pub use indexed_attestation::{ IndexedAttestation, IndexedAttestationBase, IndexedAttestationElectra, IndexedAttestationRef, }; +pub use indexed_payload_attestation::IndexedPayloadAttestation; pub use participation_flags::ParticipationFlags; +pub use payload_attestation::PayloadAttestation; +pub use payload_attestation_data::PayloadAttestationData; +pub use payload_attestation_message::PayloadAttestationMessage; pub use pending_attestation::PendingAttestation; pub use selection_proof::SelectionProof; pub use shuffling_id::AttestationShufflingId; diff --git a/consensus/types/src/attestation/payload_attestation.rs b/consensus/types/src/attestation/payload_attestation.rs new file mode 100644 index 00000000000..192a4a8fea5 --- /dev/null +++ b/consensus/types/src/attestation/payload_attestation.rs @@ -0,0 +1,31 @@ +use crate::attestation::payload_attestation_data::PayloadAttestationData; +use crate::test_utils::TestRandom; +use crate::{EthSpec, ForkName}; +use bls::AggregateSignature; +use context_deserialize::context_deserialize; +use educe::Educe; +use serde::{Deserialize, Serialize}; +use ssz::BitList; +use ssz_derive::{Decode, Encode}; +use test_random_derive::TestRandom; +use tree_hash_derive::TreeHash; + +#[derive(TestRandom, TreeHash, Debug, Clone, Encode, Decode, Serialize, Deserialize, Educe)] +#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] +#[serde(bound = "E: EthSpec", deny_unknown_fields)] +#[cfg_attr(feature = "arbitrary", arbitrary(bound = "E: EthSpec"))] +#[educe(PartialEq, Hash)] +#[context_deserialize(ForkName)] +pub struct PayloadAttestation { + pub aggregation_bits: BitList, + pub data: PayloadAttestationData, + pub signature: AggregateSignature, +} + +#[cfg(test)] +mod payload_attestation_tests { + use super::*; + use crate::MinimalEthSpec; + + ssz_and_tree_hash_tests!(PayloadAttestation); +} diff --git a/consensus/types/src/attestation/payload_attestation_data.rs b/consensus/types/src/attestation/payload_attestation_data.rs new file mode 100644 index 00000000000..58d36fd01d5 --- /dev/null +++ b/consensus/types/src/attestation/payload_attestation_data.rs @@ -0,0 +1,28 @@ +use crate::test_utils::TestRandom; +use crate::{ForkName, Hash256, SignedRoot, Slot}; +use context_deserialize::context_deserialize; +use serde::{Deserialize, Serialize}; +use ssz_derive::{Decode, Encode}; +use test_random_derive::TestRandom; +use tree_hash_derive::TreeHash; + +#[derive( + TestRandom, TreeHash, Debug, Clone, PartialEq, Eq, Encode, Decode, Serialize, Deserialize, Hash, +)] +#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] +#[context_deserialize(ForkName)] +pub struct PayloadAttestationData { + pub beacon_block_root: Hash256, + pub slot: Slot, + pub payload_present: bool, + pub blob_data_available: bool, +} + +impl SignedRoot for PayloadAttestationData {} + +#[cfg(test)] +mod payload_attestation_data_tests { + use super::*; + + ssz_and_tree_hash_tests!(PayloadAttestationData); +} diff --git a/consensus/types/src/attestation/payload_attestation_message.rs b/consensus/types/src/attestation/payload_attestation_message.rs new file mode 100644 index 00000000000..82e2137b096 --- /dev/null +++ b/consensus/types/src/attestation/payload_attestation_message.rs @@ -0,0 +1,26 @@ +use crate::ForkName; +use crate::attestation::payload_attestation_data::PayloadAttestationData; +use crate::test_utils::TestRandom; +use bls::Signature; +use context_deserialize::context_deserialize; +use serde::{Deserialize, Serialize}; +use ssz_derive::{Decode, Encode}; +use test_random_derive::TestRandom; +use tree_hash_derive::TreeHash; + +#[derive(TestRandom, TreeHash, Debug, Clone, PartialEq, Encode, Decode, Serialize, Deserialize)] +#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] +#[context_deserialize(ForkName)] +pub struct PayloadAttestationMessage { + #[serde(with = "serde_utils::quoted_u64")] + pub validator_index: u64, + pub data: PayloadAttestationData, + pub signature: Signature, +} + +#[cfg(test)] +mod tests { + use super::*; + + ssz_and_tree_hash_tests!(PayloadAttestationMessage); +} diff --git a/consensus/types/src/block/beacon_block.rs b/consensus/types/src/block/beacon_block.rs index a4e7e800bcc..bee3cdb2746 100644 --- a/consensus/types/src/block/beacon_block.rs +++ b/consensus/types/src/block/beacon_block.rs @@ -15,6 +15,7 @@ use tree_hash_derive::TreeHash; use typenum::Unsigned; use crate::{ + SignedExecutionPayloadBid, attestation::{AttestationBase, AttestationData, IndexedAttestationBase}, block::{ BeaconBlockBodyAltair, BeaconBlockBodyBase, BeaconBlockBodyBellatrix, @@ -694,15 +695,41 @@ impl> EmptyBlock for BeaconBlockGloa deposits: VariableList::empty(), voluntary_exits: VariableList::empty(), sync_aggregate: SyncAggregate::empty(), - execution_payload: Payload::Gloas::default(), bls_to_execution_changes: VariableList::empty(), - blob_kzg_commitments: VariableList::empty(), - execution_requests: ExecutionRequests::default(), + signed_execution_payload_bid: SignedExecutionPayloadBid::empty(), + payload_attestations: VariableList::empty(), + _phantom: PhantomData, }, } } } +// TODO(EIP-7732) Mark's branch had the following implementation but not sure if it's needed so will just add header below for reference +// impl> BeaconBlockEIP7732 { + +// TODO(EIP-7732) Look into whether we can remove this in the future since no blinded blocks post-gloas +impl From>> + for BeaconBlockGloas> +{ + fn from(block: BeaconBlockGloas>) -> Self { + let BeaconBlockGloas { + slot, + proposer_index, + parent_root, + state_root, + body, + } = block; + + BeaconBlockGloas { + slot, + proposer_index, + parent_root, + state_root, + body: body.into(), + } + } +} + // We can convert pre-Bellatrix blocks without payloads into blocks "with" payloads. impl From>> for BeaconBlockBase> diff --git a/consensus/types/src/block/beacon_block_body.rs b/consensus/types/src/block/beacon_block_body.rs index f85dd8909e1..1a0b3859002 100644 --- a/consensus/types/src/block/beacon_block_body.rs +++ b/consensus/types/src/block/beacon_block_body.rs @@ -13,18 +13,19 @@ use test_random_derive::TestRandom; use tree_hash::{BYTES_PER_CHUNK, TreeHash}; use tree_hash_derive::TreeHash; +use crate::payload_attestation::PayloadAttestation; use crate::{ + SignedExecutionPayloadBid, attestation::{AttestationBase, AttestationElectra, AttestationRef, AttestationRefMut}, core::{EthSpec, Graffiti, Hash256}, deposit::Deposit, execution::{ AbstractExecPayload, BlindedPayload, BlindedPayloadBellatrix, BlindedPayloadCapella, - BlindedPayloadDeneb, BlindedPayloadElectra, BlindedPayloadFulu, BlindedPayloadGloas, - Eth1Data, ExecutionPayload, ExecutionPayloadBellatrix, ExecutionPayloadCapella, - ExecutionPayloadDeneb, ExecutionPayloadElectra, ExecutionPayloadFulu, - ExecutionPayloadGloas, ExecutionRequests, FullPayload, FullPayloadBellatrix, - FullPayloadCapella, FullPayloadDeneb, FullPayloadElectra, FullPayloadFulu, - FullPayloadGloas, SignedBlsToExecutionChange, + BlindedPayloadDeneb, BlindedPayloadElectra, BlindedPayloadFulu, Eth1Data, ExecutionPayload, + ExecutionPayloadBellatrix, ExecutionPayloadCapella, ExecutionPayloadDeneb, + ExecutionPayloadElectra, ExecutionPayloadFulu, ExecutionPayloadGloas, ExecutionRequests, + FullPayload, FullPayloadBellatrix, FullPayloadCapella, FullPayloadDeneb, + FullPayloadElectra, FullPayloadFulu, SignedBlsToExecutionChange, }, exit::SignedVoluntaryExit, fork::{ForkName, map_fork_name}, @@ -157,17 +158,18 @@ pub struct BeaconBlockBody = FullPay #[superstruct(only(Fulu), partial_getter(rename = "execution_payload_fulu"))] #[serde(flatten)] pub execution_payload: Payload::Fulu, - #[superstruct(only(Gloas), partial_getter(rename = "execution_payload_gloas"))] - #[serde(flatten)] - pub execution_payload: Payload::Gloas, #[superstruct(only(Capella, Deneb, Electra, Fulu, Gloas))] pub bls_to_execution_changes: VariableList, - #[superstruct(only(Deneb, Electra, Fulu, Gloas))] + #[superstruct(only(Deneb, Electra, Fulu))] pub blob_kzg_commitments: KzgCommitments, - #[superstruct(only(Electra, Fulu, Gloas))] + #[superstruct(only(Electra, Fulu))] pub execution_requests: ExecutionRequests, - #[superstruct(only(Base, Altair))] + #[superstruct(only(Gloas))] + pub signed_execution_payload_bid: SignedExecutionPayloadBid, + #[superstruct(only(Gloas))] + pub payload_attestations: VariableList, E::MaxPayloadAttestations>, + #[superstruct(only(Base, Altair, Gloas))] #[metastruct(exclude_from(fields))] #[ssz(skip_serializing, skip_deserializing)] #[tree_hash(skip_hashing)] @@ -196,7 +198,7 @@ impl<'a, E: EthSpec, Payload: AbstractExecPayload> BeaconBlockBodyRef<'a, E, Self::Deneb(body) => Ok(Payload::Ref::from(&body.execution_payload)), Self::Electra(body) => Ok(Payload::Ref::from(&body.execution_payload)), Self::Fulu(body) => Ok(Payload::Ref::from(&body.execution_payload)), - Self::Gloas(body) => Ok(Payload::Ref::from(&body.execution_payload)), + Self::Gloas(_) => Err(BeaconStateError::IncorrectStateVariant), } } @@ -254,16 +256,19 @@ impl<'a, E: EthSpec, Payload: AbstractExecPayload> BeaconBlockBodyRef<'a, E, /// Produces the proof of inclusion for a `KzgCommitment` in `self.blob_kzg_commitments` /// at `index` using an existing proof for the `blob_kzg_commitments` field. + /// TODO(EIP7732) Investigate calling functions since this will no longer work for glas since no block_kzg_commitments in the body anymore pub fn complete_kzg_commitment_merkle_proof( &self, index: usize, kzg_commitments_proof: &[Hash256], ) -> Result, BeaconStateError> { match self { - Self::Base(_) | Self::Altair(_) | Self::Bellatrix(_) | Self::Capella(_) => { - Err(BeaconStateError::IncorrectStateVariant) - } - Self::Deneb(_) | Self::Electra(_) | Self::Fulu(_) | Self::Gloas(_) => { + Self::Base(_) + | Self::Altair(_) + | Self::Bellatrix(_) + | Self::Capella(_) + | Self::Gloas(_) => Err(BeaconStateError::IncorrectStateVariant), + Self::Deneb(_) | Self::Electra(_) | Self::Fulu(_) => { // We compute the branches by generating 2 merkle trees: // 1. Merkle tree for the `blob_kzg_commitments` List object // 2. Merkle tree for the `BeaconBlockBody` container @@ -541,6 +546,46 @@ impl From>> } } +// Post-Fulu block bodies without payloads can be converted into block bodies with payloads +// TODO(EIP-7732) Look into whether we can remove this in the future since no blinded blocks post-gloas +impl From>> + for BeaconBlockBodyGloas> +{ + fn from(body: BeaconBlockBodyGloas>) -> Self { + let BeaconBlockBodyGloas { + randao_reveal, + eth1_data, + graffiti, + proposer_slashings, + attester_slashings, + attestations, + deposits, + voluntary_exits, + sync_aggregate, + bls_to_execution_changes, + signed_execution_payload_bid, + payload_attestations, + _phantom, + } = body; + + BeaconBlockBodyGloas { + randao_reveal, + eth1_data, + graffiti, + proposer_slashings, + attester_slashings, + attestations, + deposits, + voluntary_exits, + sync_aggregate, + bls_to_execution_changes, + signed_execution_payload_bid, + payload_attestations, + _phantom: PhantomData, + } + } +} + // Likewise bodies with payloads can be transformed into bodies without. impl From>> for ( @@ -851,10 +896,10 @@ impl From>> deposits, voluntary_exits, sync_aggregate, - execution_payload: FullPayloadGloas { execution_payload }, bls_to_execution_changes, - blob_kzg_commitments, - execution_requests, + signed_execution_payload_bid, + payload_attestations, + _phantom, } = body; ( @@ -868,14 +913,12 @@ impl From>> deposits, voluntary_exits, sync_aggregate, - execution_payload: BlindedPayloadGloas { - execution_payload_header: From::from(&execution_payload), - }, bls_to_execution_changes, - blob_kzg_commitments: blob_kzg_commitments.clone(), - execution_requests, + signed_execution_payload_bid, + payload_attestations, + _phantom: PhantomData, }, - Some(execution_payload), + None, ) } } @@ -1075,39 +1118,8 @@ impl BeaconBlockBodyFulu> { impl BeaconBlockBodyGloas> { pub fn clone_as_blinded(&self) -> BeaconBlockBodyGloas> { - let BeaconBlockBodyGloas { - randao_reveal, - eth1_data, - graffiti, - proposer_slashings, - attester_slashings, - attestations, - deposits, - voluntary_exits, - sync_aggregate, - execution_payload: FullPayloadGloas { execution_payload }, - bls_to_execution_changes, - blob_kzg_commitments, - execution_requests, - } = self; - - BeaconBlockBodyGloas { - randao_reveal: randao_reveal.clone(), - eth1_data: eth1_data.clone(), - graffiti: *graffiti, - proposer_slashings: proposer_slashings.clone(), - attester_slashings: attester_slashings.clone(), - attestations: attestations.clone(), - deposits: deposits.clone(), - voluntary_exits: voluntary_exits.clone(), - sync_aggregate: sync_aggregate.clone(), - execution_payload: BlindedPayloadGloas { - execution_payload_header: execution_payload.into(), - }, - bls_to_execution_changes: bls_to_execution_changes.clone(), - blob_kzg_commitments: blob_kzg_commitments.clone(), - execution_requests: execution_requests.clone(), - } + let (block_body, _payload) = self.clone().into(); + block_body } } diff --git a/consensus/types/src/block/signed_beacon_block.rs b/consensus/types/src/block/signed_beacon_block.rs index e8927ee7659..aeb3c18d957 100644 --- a/consensus/types/src/block/signed_beacon_block.rs +++ b/consensus/types/src/block/signed_beacon_block.rs @@ -17,19 +17,17 @@ use crate::{ block::{ BLOB_KZG_COMMITMENTS_INDEX, BeaconBlock, BeaconBlockAltair, BeaconBlockBase, BeaconBlockBellatrix, BeaconBlockBodyBellatrix, BeaconBlockBodyCapella, - BeaconBlockBodyDeneb, BeaconBlockBodyElectra, BeaconBlockBodyFulu, BeaconBlockBodyGloas, - BeaconBlockCapella, BeaconBlockDeneb, BeaconBlockElectra, BeaconBlockFulu, - BeaconBlockGloas, BeaconBlockHeader, BeaconBlockRef, BeaconBlockRefMut, - SignedBeaconBlockHeader, + BeaconBlockBodyDeneb, BeaconBlockBodyElectra, BeaconBlockBodyFulu, BeaconBlockCapella, + BeaconBlockDeneb, BeaconBlockElectra, BeaconBlockFulu, BeaconBlockGloas, BeaconBlockHeader, + BeaconBlockRef, BeaconBlockRefMut, SignedBeaconBlockHeader, }, core::{ChainSpec, Domain, Epoch, EthSpec, Hash256, SignedRoot, SigningData, Slot}, execution::{ AbstractExecPayload, BlindedPayload, BlindedPayloadBellatrix, BlindedPayloadCapella, - BlindedPayloadDeneb, BlindedPayloadElectra, BlindedPayloadFulu, BlindedPayloadGloas, - ExecutionPayload, ExecutionPayloadBellatrix, ExecutionPayloadCapella, - ExecutionPayloadDeneb, ExecutionPayloadElectra, ExecutionPayloadFulu, - ExecutionPayloadGloas, FullPayload, FullPayloadBellatrix, FullPayloadCapella, - FullPayloadDeneb, FullPayloadElectra, FullPayloadFulu, FullPayloadGloas, + BlindedPayloadDeneb, BlindedPayloadElectra, BlindedPayloadFulu, ExecutionPayload, + ExecutionPayloadBellatrix, ExecutionPayloadCapella, ExecutionPayloadDeneb, + ExecutionPayloadElectra, ExecutionPayloadFulu, FullPayload, FullPayloadBellatrix, + FullPayloadCapella, FullPayloadDeneb, FullPayloadElectra, FullPayloadFulu, }, fork::{Fork, ForkName, ForkVersionDecode, InconsistentFork, map_fork_name}, kzg_ext::format_kzg_commitments, @@ -675,59 +673,15 @@ impl SignedBeaconBlockFulu> { } } -impl SignedBeaconBlockGloas> { - pub fn into_full_block( - self, - execution_payload: ExecutionPayloadGloas, - ) -> SignedBeaconBlockGloas> { - let SignedBeaconBlockGloas { - message: - BeaconBlockGloas { - slot, - proposer_index, - parent_root, - state_root, - body: - BeaconBlockBodyGloas { - randao_reveal, - eth1_data, - graffiti, - proposer_slashings, - attester_slashings, - attestations, - deposits, - voluntary_exits, - sync_aggregate, - execution_payload: BlindedPayloadGloas { .. }, - bls_to_execution_changes, - blob_kzg_commitments, - execution_requests, - }, - }, - signature, - } = self; +// We can convert gloas blocks without payloads into blocks "with" payloads. +// TODO(EIP-7732) Look into whether we can remove this in the future since no blinded blocks post-gloas +impl From>> + for SignedBeaconBlockGloas> +{ + fn from(signed_block: SignedBeaconBlockGloas>) -> Self { + let SignedBeaconBlockGloas { message, signature } = signed_block; SignedBeaconBlockGloas { - message: BeaconBlockGloas { - slot, - proposer_index, - parent_root, - state_root, - body: BeaconBlockBodyGloas { - randao_reveal, - eth1_data, - graffiti, - proposer_slashings, - attester_slashings, - attestations, - deposits, - voluntary_exits, - sync_aggregate, - execution_payload: FullPayloadGloas { execution_payload }, - bls_to_execution_changes, - blob_kzg_commitments, - execution_requests, - }, - }, + message: message.into(), signature, } } @@ -756,9 +710,7 @@ impl SignedBeaconBlock> { (SignedBeaconBlock::Fulu(block), Some(ExecutionPayload::Fulu(payload))) => { SignedBeaconBlock::Fulu(block.into_full_block(payload)) } - (SignedBeaconBlock::Gloas(block), Some(ExecutionPayload::Gloas(payload))) => { - SignedBeaconBlock::Gloas(block.into_full_block(payload)) - } + (SignedBeaconBlock::Gloas(block), _) => SignedBeaconBlock::Gloas(block.into()), // avoid wildcard matching forks so that compiler will // direct us here when a new fork has been added (SignedBeaconBlock::Bellatrix(_), _) => return None, @@ -766,7 +718,7 @@ impl SignedBeaconBlock> { (SignedBeaconBlock::Deneb(_), _) => return None, (SignedBeaconBlock::Electra(_), _) => return None, (SignedBeaconBlock::Fulu(_), _) => return None, - (SignedBeaconBlock::Gloas(_), _) => return None, + // TODO(EIP-7732) Determine if need a match arm for gloas here }; Some(full_block) } diff --git a/consensus/types/src/builder/builder_bid.rs b/consensus/types/src/builder/builder_bid.rs index be9bb281553..1018fadb644 100644 --- a/consensus/types/src/builder/builder_bid.rs +++ b/consensus/types/src/builder/builder_bid.rs @@ -13,8 +13,7 @@ use crate::{ execution::{ ExecutionPayloadHeaderBellatrix, ExecutionPayloadHeaderCapella, ExecutionPayloadHeaderDeneb, ExecutionPayloadHeaderElectra, ExecutionPayloadHeaderFulu, - ExecutionPayloadHeaderGloas, ExecutionPayloadHeaderRef, ExecutionPayloadHeaderRefMut, - ExecutionRequests, + ExecutionPayloadHeaderRef, ExecutionPayloadHeaderRefMut, ExecutionRequests, }, fork::{ForkName, ForkVersionDecode}, kzg_ext::KzgCommitments, @@ -22,7 +21,7 @@ use crate::{ }; #[superstruct( - variants(Bellatrix, Capella, Deneb, Electra, Fulu, Gloas), + variants(Bellatrix, Capella, Deneb, Electra, Fulu), variant_attributes( derive( PartialEq, @@ -55,11 +54,9 @@ pub struct BuilderBid { pub header: ExecutionPayloadHeaderElectra, #[superstruct(only(Fulu), partial_getter(rename = "header_fulu"))] pub header: ExecutionPayloadHeaderFulu, - #[superstruct(only(Gloas), partial_getter(rename = "header_gloas"))] - pub header: ExecutionPayloadHeaderGloas, - #[superstruct(only(Deneb, Electra, Fulu, Gloas))] + #[superstruct(only(Deneb, Electra, Fulu))] pub blob_kzg_commitments: KzgCommitments, - #[superstruct(only(Electra, Fulu, Gloas))] + #[superstruct(only(Electra, Fulu))] pub execution_requests: ExecutionRequests, #[serde(with = "serde_utils::quoted_u256")] pub value: Uint256, @@ -92,7 +89,7 @@ impl ForkVersionDecode for BuilderBid { /// SSZ decode with explicit fork variant. fn from_ssz_bytes_by_fork(bytes: &[u8], fork_name: ForkName) -> Result { let builder_bid = match fork_name { - ForkName::Altair | ForkName::Base => { + ForkName::Altair | ForkName::Base | ForkName::Gloas => { return Err(ssz::DecodeError::BytesInvalid(format!( "unsupported fork for ExecutionPayloadHeader: {fork_name}", ))); @@ -104,7 +101,6 @@ impl ForkVersionDecode for BuilderBid { ForkName::Deneb => BuilderBid::Deneb(BuilderBidDeneb::from_ssz_bytes(bytes)?), ForkName::Electra => BuilderBid::Electra(BuilderBidElectra::from_ssz_bytes(bytes)?), ForkName::Fulu => BuilderBid::Fulu(BuilderBidFulu::from_ssz_bytes(bytes)?), - ForkName::Gloas => BuilderBid::Gloas(BuilderBidGloas::from_ssz_bytes(bytes)?), }; Ok(builder_bid) } @@ -160,10 +156,7 @@ impl<'de, E: EthSpec> ContextDeserialize<'de, ForkName> for BuilderBid { ForkName::Fulu => { Self::Fulu(Deserialize::deserialize(deserializer).map_err(convert_err)?) } - ForkName::Gloas => { - Self::Gloas(Deserialize::deserialize(deserializer).map_err(convert_err)?) - } - ForkName::Base | ForkName::Altair => { + ForkName::Base | ForkName::Altair | ForkName::Gloas => { return Err(serde::de::Error::custom(format!( "BuilderBid failed to deserialize: unsupported fork '{}'", context diff --git a/consensus/types/src/builder/builder_pending_payment.rs b/consensus/types/src/builder/builder_pending_payment.rs new file mode 100644 index 00000000000..0f1b68ad970 --- /dev/null +++ b/consensus/types/src/builder/builder_pending_payment.rs @@ -0,0 +1,36 @@ +use crate::test_utils::TestRandom; +use crate::{BuilderPendingWithdrawal, ForkName}; +use context_deserialize::context_deserialize; +use serde::{Deserialize, Serialize}; +use ssz_derive::{Decode, Encode}; +use test_random_derive::TestRandom; +use tree_hash_derive::TreeHash; + +#[derive( + Debug, + PartialEq, + Eq, + Hash, + Clone, + Default, + Serialize, + Deserialize, + Encode, + Decode, + TreeHash, + TestRandom, +)] +#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] +#[context_deserialize(ForkName)] +pub struct BuilderPendingPayment { + #[serde(with = "serde_utils::quoted_u64")] + pub weight: u64, + pub withdrawal: BuilderPendingWithdrawal, +} + +#[cfg(test)] +mod tests { + use super::*; + + ssz_and_tree_hash_tests!(BuilderPendingPayment); +} diff --git a/consensus/types/src/builder/builder_pending_withdrawal.rs b/consensus/types/src/builder/builder_pending_withdrawal.rs new file mode 100644 index 00000000000..436d331c003 --- /dev/null +++ b/consensus/types/src/builder/builder_pending_withdrawal.rs @@ -0,0 +1,40 @@ +use crate::test_utils::TestRandom; +use crate::{Address, Epoch, ForkName}; +use context_deserialize::context_deserialize; +use serde::{Deserialize, Serialize}; +use ssz_derive::{Decode, Encode}; +use test_random_derive::TestRandom; +use tree_hash_derive::TreeHash; + +#[derive( + Debug, + PartialEq, + Eq, + Hash, + Clone, + Default, + Serialize, + Deserialize, + Encode, + Decode, + TreeHash, + TestRandom, +)] +#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] +#[context_deserialize(ForkName)] +pub struct BuilderPendingWithdrawal { + #[serde(with = "serde_utils::address_hex")] + pub fee_recipient: Address, + #[serde(with = "serde_utils::quoted_u64")] + pub amount: u64, + #[serde(with = "serde_utils::quoted_u64")] + pub builder_index: u64, + pub withdrawable_epoch: Epoch, +} + +#[cfg(test)] +mod tests { + use super::*; + + ssz_and_tree_hash_tests!(BuilderPendingWithdrawal); +} diff --git a/consensus/types/src/builder/mod.rs b/consensus/types/src/builder/mod.rs index 88a8e6a01a3..54d0ae4eb73 100644 --- a/consensus/types/src/builder/mod.rs +++ b/consensus/types/src/builder/mod.rs @@ -1,6 +1,10 @@ mod builder_bid; +mod builder_pending_payment; +mod builder_pending_withdrawal; pub use builder_bid::{ BuilderBid, BuilderBidBellatrix, BuilderBidCapella, BuilderBidDeneb, BuilderBidElectra, - BuilderBidFulu, BuilderBidGloas, SignedBuilderBid, + BuilderBidFulu, SignedBuilderBid, }; +pub use builder_pending_payment::BuilderPendingPayment; +pub use builder_pending_withdrawal::BuilderPendingWithdrawal; diff --git a/consensus/types/src/core/chain_spec.rs b/consensus/types/src/core/chain_spec.rs index c8052b502b5..da3f9b90ccc 100644 --- a/consensus/types/src/core/chain_spec.rs +++ b/consensus/types/src/core/chain_spec.rs @@ -36,6 +36,8 @@ pub enum Domain { SyncCommittee, ContributionAndProof, SyncCommitteeSelectionProof, + BeaconBuilder, + PTCAttester, ApplicationMask(ApplicationDomain), } @@ -89,6 +91,7 @@ pub struct ChainSpec { pub bls_withdrawal_prefix_byte: u8, pub eth1_address_withdrawal_prefix_byte: u8, pub compounding_withdrawal_prefix_byte: u8, + pub builder_withdrawal_prefix_byte: u8, /* * Time parameters @@ -127,6 +130,8 @@ pub struct ChainSpec { pub(crate) domain_voluntary_exit: u32, pub(crate) domain_selection_proof: u32, pub(crate) domain_aggregate_and_proof: u32, + pub(crate) domain_beacon_builder: u32, + pub(crate) domain_ptc_attester: u32, /* * Fork choice @@ -228,6 +233,8 @@ pub struct ChainSpec { pub gloas_fork_version: [u8; 4], /// The Gloas fork epoch is optional, with `None` representing "Gloas never happens". pub gloas_fork_epoch: Option, + pub builder_payment_threshold_numerator: u64, + pub builder_payment_threshold_denominator: u64, /* * Networking @@ -535,6 +542,8 @@ impl ChainSpec { Domain::VoluntaryExit => self.domain_voluntary_exit, Domain::SelectionProof => self.domain_selection_proof, Domain::AggregateAndProof => self.domain_aggregate_and_proof, + Domain::BeaconBuilder => self.domain_beacon_builder, + Domain::PTCAttester => self.domain_ptc_attester, Domain::SyncCommittee => self.domain_sync_committee, Domain::ContributionAndProof => self.domain_contribution_and_proof, Domain::SyncCommitteeSelectionProof => self.domain_sync_committee_selection_proof, @@ -972,6 +981,7 @@ impl ChainSpec { bls_withdrawal_prefix_byte: 0x00, eth1_address_withdrawal_prefix_byte: 0x01, compounding_withdrawal_prefix_byte: 0x02, + builder_withdrawal_prefix_byte: 0x03, /* * Time parameters @@ -1011,6 +1021,8 @@ impl ChainSpec { domain_voluntary_exit: 4, domain_selection_proof: 5, domain_aggregate_and_proof: 6, + domain_beacon_builder: 0x1B, + domain_ptc_attester: 0x0C, /* * Fork choice @@ -1131,6 +1143,8 @@ impl ChainSpec { */ gloas_fork_version: [0x07, 0x00, 0x00, 0x00], gloas_fork_epoch: None, + builder_payment_threshold_numerator: 6, + builder_payment_threshold_denominator: 10, /* * Network specific @@ -1333,6 +1347,7 @@ impl ChainSpec { bls_withdrawal_prefix_byte: 0x00, eth1_address_withdrawal_prefix_byte: 0x01, compounding_withdrawal_prefix_byte: 0x02, + builder_withdrawal_prefix_byte: 0x03, /* * Time parameters @@ -1372,6 +1387,8 @@ impl ChainSpec { domain_voluntary_exit: 4, domain_selection_proof: 5, domain_aggregate_and_proof: 6, + domain_beacon_builder: 0x1B, + domain_ptc_attester: 0x0C, /* * Fork choice @@ -1491,6 +1508,8 @@ impl ChainSpec { */ gloas_fork_version: [0x07, 0x00, 0x00, 0x64], gloas_fork_epoch: None, + builder_payment_threshold_numerator: 6, + builder_payment_threshold_denominator: 10, /* * Network specific @@ -2517,6 +2536,8 @@ mod tests { &spec, ); test_domain(Domain::SyncCommittee, spec.domain_sync_committee, &spec); + test_domain(Domain::BeaconBuilder, spec.domain_beacon_builder, &spec); + test_domain(Domain::PTCAttester, spec.domain_ptc_attester, &spec); // The builder domain index is zero let builder_domain_pre_mask = [0; 4]; diff --git a/consensus/types/src/core/eth_spec.rs b/consensus/types/src/core/eth_spec.rs index 72fd1ebc9eb..74795fdfc31 100644 --- a/consensus/types/src/core/eth_spec.rs +++ b/consensus/types/src/core/eth_spec.rs @@ -171,6 +171,14 @@ pub trait EthSpec: 'static + Default + Sync + Send + Clone + Debug + PartialEq + type MaxWithdrawalRequestsPerPayload: Unsigned + Clone + Sync + Send + Debug + PartialEq; type MaxPendingDepositsPerEpoch: Unsigned + Clone + Sync + Send + Debug + PartialEq; + /* + * New in Gloas + */ + type PTCSize: Unsigned + Clone + Sync + Send + Debug + PartialEq; + type MaxPayloadAttestations: Unsigned + Clone + Sync + Send + Debug + PartialEq; + type BuilderPendingPaymentsLimit: Unsigned + Clone + Sync + Send + Debug + PartialEq; + type BuilderPendingWithdrawalsLimit: Unsigned + Clone + Sync + Send + Debug + PartialEq; + fn default_spec() -> ChainSpec; fn spec_name() -> EthSpecId; @@ -357,6 +365,16 @@ pub trait EthSpec: 'static + Default + Sync + Send + Clone + Debug + PartialEq + Self::PendingConsolidationsLimit::to_usize() } + /// Returns the `BUILDER_PENDING_PAYMENTS_LIMIT` constant for this specification. + fn builder_pending_payments_limit() -> usize { + Self::BuilderPendingPaymentsLimit::to_usize() + } + + /// Returns the `BUILDER_PENDING_WITHDRAWALS_LIMIT` constant for this specification. + fn builder_pending_withdrawals_limit() -> usize { + Self::BuilderPendingWithdrawalsLimit::to_usize() + } + /// Returns the `MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD` constant for this specification. fn max_consolidation_requests_per_payload() -> usize { Self::MaxConsolidationRequestsPerPayload::to_usize() @@ -402,6 +420,16 @@ pub trait EthSpec: 'static + Default + Sync + Send + Clone + Debug + PartialEq + fn proposer_lookahead_slots() -> usize { Self::ProposerLookaheadSlots::to_usize() } + + /// Returns the `PTCSize` constant for this specification. + fn ptc_size() -> usize { + Self::PTCSize::to_usize() + } + + /// Returns the `MaxPayloadAttestations` constant for this specification. + fn max_payload_attestations() -> usize { + Self::MaxPayloadAttestations::to_usize() + } } /// Macro to inherit some type values from another EthSpec. @@ -431,6 +459,8 @@ impl EthSpec for MainnetEthSpec { type EpochsPerSlashingsVector = U8192; type HistoricalRootsLimit = U16777216; type ValidatorRegistryLimit = U1099511627776; + type BuilderPendingPaymentsLimit = U64; // 2 * SLOTS_PER_EPOCH = 2 * 32 = 64 + type BuilderPendingWithdrawalsLimit = U1048576; type MaxProposerSlashings = U16; type MaxAttesterSlashings = U2; type MaxAttestations = U128; @@ -471,6 +501,8 @@ impl EthSpec for MainnetEthSpec { type MaxAttestationsElectra = U8; type MaxWithdrawalRequestsPerPayload = U16; type MaxPendingDepositsPerEpoch = U16; + type PTCSize = U512; + type MaxPayloadAttestations = U4; fn default_spec() -> ChainSpec { ChainSpec::mainnet() @@ -513,6 +545,7 @@ impl EthSpec for MinimalEthSpec { type CellsPerExtBlob = U128; type NumberOfColumns = U128; type ProposerLookaheadSlots = U16; // Derived from (MIN_SEED_LOOKAHEAD + 1) * SLOTS_PER_EPOCH + type BuilderPendingPaymentsLimit = U16; // 2 * SLOTS_PER_EPOCH = 2 * 8 = 16 params_from_eth_spec!(MainnetEthSpec { JustificationBitsLength, @@ -522,6 +555,7 @@ impl EthSpec for MinimalEthSpec { GenesisEpoch, HistoricalRootsLimit, ValidatorRegistryLimit, + BuilderPendingWithdrawalsLimit, MaxProposerSlashings, MaxAttesterSlashings, MaxAttestations, @@ -541,7 +575,9 @@ impl EthSpec for MinimalEthSpec { MaxAttesterSlashingsElectra, MaxAttestationsElectra, MaxDepositRequestsPerPayload, - MaxWithdrawalRequestsPerPayload + MaxWithdrawalRequestsPerPayload, + PTCSize, + MaxPayloadAttestations }); fn default_spec() -> ChainSpec { @@ -572,6 +608,8 @@ impl EthSpec for GnosisEthSpec { type EpochsPerSlashingsVector = U8192; type HistoricalRootsLimit = U16777216; type ValidatorRegistryLimit = U1099511627776; + type BuilderPendingPaymentsLimit = U32; // 2 * SLOTS_PER_EPOCH = 2 * 16 = 32 + type BuilderPendingWithdrawalsLimit = U1048576; type MaxProposerSlashings = U16; type MaxAttesterSlashings = U2; type MaxAttestations = U128; @@ -612,6 +650,8 @@ impl EthSpec for GnosisEthSpec { type CellsPerExtBlob = U128; type NumberOfColumns = U128; type ProposerLookaheadSlots = U32; // Derived from (MIN_SEED_LOOKAHEAD + 1) * SLOTS_PER_EPOCH + type PTCSize = U512; + type MaxPayloadAttestations = U2; fn default_spec() -> ChainSpec { ChainSpec::gnosis() diff --git a/consensus/types/src/execution/dumb_macros.rs b/consensus/types/src/execution/dumb_macros.rs new file mode 100644 index 00000000000..4eae416bb56 --- /dev/null +++ b/consensus/types/src/execution/dumb_macros.rs @@ -0,0 +1,108 @@ +// These would usually be created by superstuct but now there's no longer a 1:1 mapping between +// the variants for ExecutionPayload and the variants for +// - ExecutionPayloadHeader +// - FullPayload +// - BlindedPayload +// TODO(EIP-7732): get rid of this whole file and panics once the engine_api is refactored for ePBS + +#[macro_export] +macro_rules! map_execution_payload_into_full_payload { + ($value:expr, $f:expr) => { + match $value { + ExecutionPayload::Bellatrix(inner) => { + let f: fn(ExecutionPayloadBellatrix<_>, fn(_) -> _) -> _ = $f; + f(inner, FullPayload::Bellatrix) + } + ExecutionPayload::Capella(inner) => { + let f: fn(ExecutionPayloadCapella<_>, fn(_) -> _) -> _ = $f; + f(inner, FullPayload::Capella) + } + ExecutionPayload::Deneb(inner) => { + let f: fn(ExecutionPayloadDeneb<_>, fn(_) -> _) -> _ = $f; + f(inner, FullPayload::Deneb) + } + ExecutionPayload::Electra(inner) => { + let f: fn(ExecutionPayloadElectra<_>, fn(_) -> _) -> _ = $f; + f(inner, FullPayload::Electra) + } + ExecutionPayload::Fulu(inner) => { + let f: fn(ExecutionPayloadFulu<_>, fn(_) -> _) -> _ = $f; + f(inner, FullPayload::Fulu) + } + ExecutionPayload::Gloas(_) => panic!("FullPayload::Gloas does not exist!"), + } + }; +} + +#[macro_export] +macro_rules! map_execution_payload_into_blinded_payload { + ($value:expr, $f:expr) => { + match $value { + ExecutionPayload::Bellatrix(inner) => { + let f: fn(ExecutionPayloadBellatrix<_>, fn(_) -> _) -> _ = $f; + f(inner, BlindedPayload::Bellatrix) + } + ExecutionPayload::Capella(inner) => { + let f: fn(ExecutionPayloadCapella<_>, fn(_) -> _) -> _ = $f; + f(inner, BlindedPayload::Capella) + } + ExecutionPayload::Deneb(inner) => { + let f: fn(ExecutionPayloadDeneb<_>, fn(_) -> _) -> _ = $f; + f(inner, BlindedPayload::Deneb) + } + ExecutionPayload::Electra(inner) => { + let f: fn(ExecutionPayloadElectra<_>, fn(_) -> _) -> _ = $f; + f(inner, BlindedPayload::Electra) + } + ExecutionPayload::Fulu(inner) => { + let f: fn(ExecutionPayloadFulu<_>, fn(_) -> _) -> _ = $f; + f(inner, BlindedPayload::Fulu) + } + ExecutionPayload::Gloas(_) => panic!("BlindedPayload::Gloas does not exist!"), + } + }; +} + +#[macro_export] +macro_rules! map_execution_payload_ref_into_execution_payload_header { + (&$lifetime:tt _, $value:expr, $f:expr) => { + match $value { + ExecutionPayloadRef::Bellatrix(inner) => { + let f: fn( + &$lifetime ExecutionPayloadBellatrix<_>, + fn(_) -> _, + ) -> _ = $f; + f(inner, ExecutionPayloadHeader::Bellatrix) + } + ExecutionPayloadRef::Capella(inner) => { + let f: fn( + &$lifetime ExecutionPayloadCapella<_>, + fn(_) -> _, + ) -> _ = $f; + f(inner, ExecutionPayloadHeader::Capella) + } + ExecutionPayloadRef::Deneb(inner) => { + let f: fn( + &$lifetime ExecutionPayloadDeneb<_>, + fn(_) -> _, + ) -> _ = $f; + f(inner, ExecutionPayloadHeader::Deneb) + } + ExecutionPayloadRef::Electra(inner) => { + let f: fn( + &$lifetime ExecutionPayloadElectra<_>, + fn(_) -> _, + ) -> _ = $f; + f(inner, ExecutionPayloadHeader::Electra) + } + ExecutionPayloadRef::Fulu(inner) => { + let f: fn( + &$lifetime ExecutionPayloadFulu<_>, + fn(_) -> _, + ) -> _ = $f; + f(inner, ExecutionPayloadHeader::Fulu) + } + ExecutionPayloadRef::Gloas(_) => panic!("ExecutionPayloadHeader::Gloas does not exist!"), + } + } +} diff --git a/consensus/types/src/execution/execution_payload.rs b/consensus/types/src/execution/execution_payload.rs index 7973b7efdce..b2278c91667 100644 --- a/consensus/types/src/execution/execution_payload.rs +++ b/consensus/types/src/execution/execution_payload.rs @@ -55,9 +55,7 @@ pub type Transactions = VariableList< partial_getter_error( ty = "BeaconStateError", expr = "BeaconStateError::IncorrectStateVariant" - ), - map_into(FullPayload, BlindedPayload), - map_ref_into(ExecutionPayloadHeader) + ) )] #[cfg_attr( feature = "arbitrary", @@ -146,6 +144,7 @@ impl ForkVersionDecode for ExecutionPayload { impl ExecutionPayload { #[allow(clippy::arithmetic_side_effects)] /// Returns the maximum size of an execution payload. + /// TODO(EIP-7732): this seems to only exist for the Bellatrix fork, but Mark's branch has it for all the forks, i.e. max_execution_payload_eip7732_size pub fn max_execution_payload_bellatrix_size() -> usize { // Fixed part ExecutionPayloadBellatrix::::default().as_ssz_bytes().len() diff --git a/consensus/types/src/execution/execution_payload_bid.rs b/consensus/types/src/execution/execution_payload_bid.rs new file mode 100644 index 00000000000..20e461334d3 --- /dev/null +++ b/consensus/types/src/execution/execution_payload_bid.rs @@ -0,0 +1,40 @@ +use crate::test_utils::TestRandom; +use crate::{Address, ExecutionBlockHash, ForkName, Hash256, SignedRoot, Slot}; +use context_deserialize::context_deserialize; +use educe::Educe; +use serde::{Deserialize, Serialize}; +use ssz_derive::{Decode, Encode}; +use test_random_derive::TestRandom; +use tree_hash_derive::TreeHash; + +#[derive( + Default, Debug, Clone, Serialize, Encode, Decode, Deserialize, TreeHash, Educe, TestRandom, +)] +#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] +#[educe(PartialEq, Hash)] +#[context_deserialize(ForkName)] +// https://github.com/ethereum/consensus-specs/blob/master/specs/gloas/beacon-chain.md#executionpayloadbid +pub struct ExecutionPayloadBid { + pub parent_block_hash: ExecutionBlockHash, + pub parent_block_root: Hash256, + pub block_hash: ExecutionBlockHash, + #[serde(with = "serde_utils::address_hex")] + pub fee_recipient: Address, + #[serde(with = "serde_utils::quoted_u64")] + pub gas_limit: u64, + #[serde(with = "serde_utils::quoted_u64")] + pub builder_index: u64, + pub slot: Slot, + #[serde(with = "serde_utils::quoted_u64")] + pub value: u64, + pub blob_kzg_commitments_root: Hash256, +} + +impl SignedRoot for ExecutionPayloadBid {} + +#[cfg(test)] +mod tests { + use super::*; + + ssz_and_tree_hash_tests!(ExecutionPayloadBid); +} diff --git a/consensus/types/src/execution/execution_payload_envelope.rs b/consensus/types/src/execution/execution_payload_envelope.rs new file mode 100644 index 00000000000..64e03cec5a9 --- /dev/null +++ b/consensus/types/src/execution/execution_payload_envelope.rs @@ -0,0 +1,36 @@ +use crate::test_utils::TestRandom; +use crate::{ + EthSpec, ExecutionPayloadGloas, ExecutionRequests, ForkName, Hash256, KzgCommitments, + SignedRoot, Slot, +}; +use context_deserialize::context_deserialize; +use educe::Educe; +use serde::{Deserialize, Serialize}; +use ssz_derive::{Decode, Encode}; +use test_random_derive::TestRandom; +use tree_hash_derive::TreeHash; + +#[derive(Debug, Clone, Serialize, Encode, Decode, Deserialize, TestRandom, TreeHash, Educe)] +#[educe(PartialEq, Hash(bound(E: EthSpec)))] +#[context_deserialize(ForkName)] +#[serde(bound = "E: EthSpec")] +pub struct ExecutionPayloadEnvelope { + pub payload: ExecutionPayloadGloas, + pub execution_requests: ExecutionRequests, + #[serde(with = "serde_utils::quoted_u64")] + pub builder_index: u64, + pub beacon_block_root: Hash256, + pub slot: Slot, + pub blob_kzg_commitments: KzgCommitments, + pub state_root: Hash256, +} + +impl SignedRoot for ExecutionPayloadEnvelope {} + +#[cfg(test)] +mod tests { + use super::*; + use crate::MainnetEthSpec; + + ssz_and_tree_hash_tests!(ExecutionPayloadEnvelope); +} diff --git a/consensus/types/src/execution/execution_payload_header.rs b/consensus/types/src/execution/execution_payload_header.rs index bd91a6471b2..cf78f7871b4 100644 --- a/consensus/types/src/execution/execution_payload_header.rs +++ b/consensus/types/src/execution/execution_payload_header.rs @@ -14,16 +14,17 @@ use crate::{ core::{Address, EthSpec, Hash256, Uint256}, execution::{ ExecutionBlockHash, ExecutionPayloadBellatrix, ExecutionPayloadCapella, - ExecutionPayloadDeneb, ExecutionPayloadElectra, ExecutionPayloadFulu, - ExecutionPayloadGloas, ExecutionPayloadRef, Transactions, + ExecutionPayloadDeneb, ExecutionPayloadElectra, ExecutionPayloadFulu, ExecutionPayloadRef, + Transactions, }, fork::ForkName, + map_execution_payload_ref_into_execution_payload_header, state::BeaconStateError, test_utils::TestRandom, }; #[superstruct( - variants(Bellatrix, Capella, Deneb, Electra, Fulu, Gloas), + variants(Bellatrix, Capella, Deneb, Electra, Fulu), variant_attributes( derive( Default, @@ -105,12 +106,12 @@ pub struct ExecutionPayloadHeader { pub block_hash: ExecutionBlockHash, #[superstruct(getter(copy))] pub transactions_root: Hash256, - #[superstruct(only(Capella, Deneb, Electra, Fulu, Gloas), partial_getter(copy))] + #[superstruct(only(Capella, Deneb, Electra, Fulu), partial_getter(copy))] pub withdrawals_root: Hash256, - #[superstruct(only(Deneb, Electra, Fulu, Gloas), partial_getter(copy))] + #[superstruct(only(Deneb, Electra, Fulu), partial_getter(copy))] #[serde(with = "serde_utils::quoted_u64")] pub blob_gas_used: u64, - #[superstruct(only(Deneb, Electra, Fulu, Gloas), partial_getter(copy))] + #[superstruct(only(Deneb, Electra, Fulu), partial_getter(copy))] #[serde(with = "serde_utils::quoted_u64")] pub excess_blob_gas: u64, } @@ -136,14 +137,19 @@ impl ExecutionPayloadHeader { ExecutionPayloadHeaderElectra::from_ssz_bytes(bytes).map(Self::Electra) } ForkName::Fulu => ExecutionPayloadHeaderFulu::from_ssz_bytes(bytes).map(Self::Fulu), - ForkName::Gloas => ExecutionPayloadHeaderGloas::from_ssz_bytes(bytes).map(Self::Gloas), + ForkName::Gloas => Err(ssz::DecodeError::BytesInvalid(format!( + "unsupported fork for ExecutionPayloadHeader: {fork_name}", + ))), } } #[allow(clippy::arithmetic_side_effects)] pub fn ssz_max_var_len_for_fork(fork_name: ForkName) -> usize { // TODO(newfork): Add a new case here if there are new variable fields - if fork_name.bellatrix_enabled() { + if fork_name.gloas_enabled() { + // TODO(EIP7732): check this + 0 + } else if fork_name.bellatrix_enabled() { // Max size of variable length `extra_data` field E::max_extra_data_bytes() * ::ssz_fixed_len() } else { @@ -158,7 +164,6 @@ impl ExecutionPayloadHeader { ExecutionPayloadHeader::Deneb(_) => ForkName::Deneb, ExecutionPayloadHeader::Electra(_) => ForkName::Electra, ExecutionPayloadHeader::Fulu(_) => ForkName::Fulu, - ExecutionPayloadHeader::Gloas(_) => ForkName::Gloas, } } } @@ -266,30 +271,6 @@ impl ExecutionPayloadHeaderElectra { } } -impl ExecutionPayloadHeaderFulu { - pub fn upgrade_to_gloas(&self) -> ExecutionPayloadHeaderGloas { - ExecutionPayloadHeaderGloas { - parent_hash: self.parent_hash, - fee_recipient: self.fee_recipient, - state_root: self.state_root, - receipts_root: self.receipts_root, - logs_bloom: self.logs_bloom.clone(), - prev_randao: self.prev_randao, - block_number: self.block_number, - gas_limit: self.gas_limit, - gas_used: self.gas_used, - timestamp: self.timestamp, - extra_data: self.extra_data.clone(), - base_fee_per_gas: self.base_fee_per_gas, - block_hash: self.block_hash, - transactions_root: self.transactions_root, - withdrawals_root: self.withdrawals_root, - blob_gas_used: self.blob_gas_used, - excess_blob_gas: self.excess_blob_gas, - } - } -} - impl<'a, E: EthSpec> From<&'a ExecutionPayloadBellatrix> for ExecutionPayloadHeaderBellatrix { fn from(payload: &'a ExecutionPayloadBellatrix) -> Self { Self { @@ -405,30 +386,6 @@ impl<'a, E: EthSpec> From<&'a ExecutionPayloadFulu> for ExecutionPayloadHeade } } -impl<'a, E: EthSpec> From<&'a ExecutionPayloadGloas> for ExecutionPayloadHeaderGloas { - fn from(payload: &'a ExecutionPayloadGloas) -> Self { - Self { - parent_hash: payload.parent_hash, - fee_recipient: payload.fee_recipient, - state_root: payload.state_root, - receipts_root: payload.receipts_root, - logs_bloom: payload.logs_bloom.clone(), - prev_randao: payload.prev_randao, - block_number: payload.block_number, - gas_limit: payload.gas_limit, - gas_used: payload.gas_used, - timestamp: payload.timestamp, - extra_data: payload.extra_data.clone(), - base_fee_per_gas: payload.base_fee_per_gas, - block_hash: payload.block_hash, - transactions_root: payload.transactions.tree_hash_root(), - withdrawals_root: payload.withdrawals.tree_hash_root(), - blob_gas_used: payload.blob_gas_used, - excess_blob_gas: payload.excess_blob_gas, - } - } -} - // These impls are required to work around an inelegance in `to_execution_payload_header`. // They only clone headers so they should be relatively cheap. impl<'a, E: EthSpec> From<&'a Self> for ExecutionPayloadHeaderBellatrix { @@ -461,12 +418,6 @@ impl<'a, E: EthSpec> From<&'a Self> for ExecutionPayloadHeaderFulu { } } -impl<'a, E: EthSpec> From<&'a Self> for ExecutionPayloadHeaderGloas { - fn from(payload: &'a Self) -> Self { - payload.clone() - } -} - impl<'a, E: EthSpec> From> for ExecutionPayloadHeader { fn from(payload: ExecutionPayloadRef<'a, E>) -> Self { map_execution_payload_ref_into_execution_payload_header!( @@ -528,9 +479,6 @@ impl ExecutionPayloadHeaderRefMut<'_, E> { ExecutionPayloadHeaderRefMut::Fulu(mut_ref) => { *mut_ref = header.try_into()?; } - ExecutionPayloadHeaderRefMut::Gloas(mut_ref) => { - *mut_ref = header.try_into()?; - } } Ok(()) } @@ -558,16 +506,6 @@ impl TryFrom> for ExecutionPayloadHeaderFu } } -impl TryFrom> for ExecutionPayloadHeaderGloas { - type Error = BeaconStateError; - fn try_from(header: ExecutionPayloadHeader) -> Result { - match header { - ExecutionPayloadHeader::Gloas(execution_payload_header) => Ok(execution_payload_header), - _ => Err(BeaconStateError::IncorrectStateVariant), - } - } -} - impl<'de, E: EthSpec> ContextDeserialize<'de, ForkName> for ExecutionPayloadHeader { fn context_deserialize(deserializer: D, context: ForkName) -> Result where @@ -580,12 +518,6 @@ impl<'de, E: EthSpec> ContextDeserialize<'de, ForkName> for ExecutionPayloadHead )) }; Ok(match context { - ForkName::Base | ForkName::Altair => { - return Err(serde::de::Error::custom(format!( - "ExecutionPayloadHeader failed to deserialize: unsupported fork '{}'", - context - ))); - } ForkName::Bellatrix => { Self::Bellatrix(Deserialize::deserialize(deserializer).map_err(convert_err)?) } @@ -601,8 +533,12 @@ impl<'de, E: EthSpec> ContextDeserialize<'de, ForkName> for ExecutionPayloadHead ForkName::Fulu => { Self::Fulu(Deserialize::deserialize(deserializer).map_err(convert_err)?) } - ForkName::Gloas => { - Self::Gloas(Deserialize::deserialize(deserializer).map_err(convert_err)?) + + ForkName::Base | ForkName::Altair | ForkName::Gloas => { + return Err(serde::de::Error::custom(format!( + "ExecutionPayloadHeader failed to deserialize: unsupported fork '{}'", + context + ))); } }) } diff --git a/consensus/types/src/execution/mod.rs b/consensus/types/src/execution/mod.rs index 0708bc5d960..da6c8606002 100644 --- a/consensus/types/src/execution/mod.rs +++ b/consensus/types/src/execution/mod.rs @@ -4,10 +4,15 @@ mod execution_block_header; #[macro_use] mod execution_payload; mod bls_to_execution_change; +mod dumb_macros; +mod execution_payload_bid; +mod execution_payload_envelope; mod execution_payload_header; mod execution_requests; mod payload; mod signed_bls_to_execution_change; +mod signed_execution_payload_bid; +mod signed_execution_payload_envelope; pub use bls_to_execution_change::BlsToExecutionChange; pub use eth1_data::Eth1Data; @@ -18,19 +23,23 @@ pub use execution_payload::{ ExecutionPayloadElectra, ExecutionPayloadFulu, ExecutionPayloadGloas, ExecutionPayloadRef, Transaction, Transactions, }; +pub use execution_payload_bid::ExecutionPayloadBid; +pub use execution_payload_envelope::ExecutionPayloadEnvelope; pub use execution_payload_header::{ ExecutionPayloadHeader, ExecutionPayloadHeaderBellatrix, ExecutionPayloadHeaderCapella, ExecutionPayloadHeaderDeneb, ExecutionPayloadHeaderElectra, ExecutionPayloadHeaderFulu, - ExecutionPayloadHeaderGloas, ExecutionPayloadHeaderRef, ExecutionPayloadHeaderRefMut, + ExecutionPayloadHeaderRef, ExecutionPayloadHeaderRefMut, }; pub use execution_requests::{ ConsolidationRequests, DepositRequests, ExecutionRequests, RequestType, WithdrawalRequests, }; pub use payload::{ AbstractExecPayload, BlindedPayload, BlindedPayloadBellatrix, BlindedPayloadCapella, - BlindedPayloadDeneb, BlindedPayloadElectra, BlindedPayloadFulu, BlindedPayloadGloas, - BlindedPayloadRef, BlockProductionVersion, BlockType, ExecPayload, FullPayload, - FullPayloadBellatrix, FullPayloadCapella, FullPayloadDeneb, FullPayloadElectra, - FullPayloadFulu, FullPayloadGloas, FullPayloadRef, OwnedExecPayload, + BlindedPayloadDeneb, BlindedPayloadElectra, BlindedPayloadFulu, BlindedPayloadRef, + BlockProductionVersion, BlockType, ExecPayload, FullPayload, FullPayloadBellatrix, + FullPayloadCapella, FullPayloadDeneb, FullPayloadElectra, FullPayloadFulu, FullPayloadRef, + OwnedExecPayload, }; pub use signed_bls_to_execution_change::SignedBlsToExecutionChange; +pub use signed_execution_payload_bid::SignedExecutionPayloadBid; +pub use signed_execution_payload_envelope::SignedExecutionPayloadEnvelope; diff --git a/consensus/types/src/execution/payload.rs b/consensus/types/src/execution/payload.rs index c1cc6c4eb66..703b082c182 100644 --- a/consensus/types/src/execution/payload.rs +++ b/consensus/types/src/execution/payload.rs @@ -15,11 +15,12 @@ use crate::{ execution::{ ExecutionBlockHash, ExecutionPayload, ExecutionPayloadBellatrix, ExecutionPayloadCapella, ExecutionPayloadDeneb, ExecutionPayloadElectra, ExecutionPayloadFulu, - ExecutionPayloadGloas, ExecutionPayloadHeader, ExecutionPayloadHeaderBellatrix, - ExecutionPayloadHeaderCapella, ExecutionPayloadHeaderDeneb, ExecutionPayloadHeaderElectra, - ExecutionPayloadHeaderFulu, ExecutionPayloadHeaderGloas, ExecutionPayloadRef, Transactions, + ExecutionPayloadHeader, ExecutionPayloadHeaderBellatrix, ExecutionPayloadHeaderCapella, + ExecutionPayloadHeaderDeneb, ExecutionPayloadHeaderElectra, ExecutionPayloadHeaderFulu, + ExecutionPayloadRef, Transactions, }, fork::ForkName, + map_execution_payload_into_blinded_payload, map_execution_payload_into_full_payload, state::BeaconStateError, test_utils::TestRandom, }; @@ -118,7 +119,6 @@ pub trait AbstractExecPayload: + TryInto + TryInto + TryInto - + TryInto + Sync { type Ref<'a>: ExecPayload @@ -127,8 +127,7 @@ pub trait AbstractExecPayload: + From<&'a Self::Capella> + From<&'a Self::Deneb> + From<&'a Self::Electra> - + From<&'a Self::Fulu> - + From<&'a Self::Gloas>; + + From<&'a Self::Fulu>; type Bellatrix: OwnedExecPayload + Into @@ -155,15 +154,10 @@ pub trait AbstractExecPayload: + for<'a> From>> + TryFrom> + Sync; - type Gloas: OwnedExecPayload - + Into - + for<'a> From>> - + TryFrom> - + Sync; } #[superstruct( - variants(Bellatrix, Capella, Deneb, Electra, Fulu, Gloas), + variants(Bellatrix, Capella, Deneb, Electra, Fulu), variant_attributes( derive( Debug, @@ -224,8 +218,6 @@ pub struct FullPayload { pub execution_payload: ExecutionPayloadElectra, #[superstruct(only(Fulu), partial_getter(rename = "execution_payload_fulu"))] pub execution_payload: ExecutionPayloadFulu, - #[superstruct(only(Gloas), partial_getter(rename = "execution_payload_gloas"))] - pub execution_payload: ExecutionPayloadGloas, } impl From> for ExecutionPayload { @@ -337,7 +329,6 @@ impl ExecPayload for FullPayload { FullPayload::Deneb(inner) => Ok(inner.execution_payload.withdrawals.tree_hash_root()), FullPayload::Electra(inner) => Ok(inner.execution_payload.withdrawals.tree_hash_root()), FullPayload::Fulu(inner) => Ok(inner.execution_payload.withdrawals.tree_hash_root()), - FullPayload::Gloas(inner) => Ok(inner.execution_payload.withdrawals.tree_hash_root()), } } @@ -349,7 +340,6 @@ impl ExecPayload for FullPayload { FullPayload::Deneb(inner) => Ok(inner.execution_payload.blob_gas_used), FullPayload::Electra(inner) => Ok(inner.execution_payload.blob_gas_used), FullPayload::Fulu(inner) => Ok(inner.execution_payload.blob_gas_used), - FullPayload::Gloas(inner) => Ok(inner.execution_payload.blob_gas_used), } } @@ -381,7 +371,7 @@ impl FullPayload { ForkName::Deneb => Ok(FullPayloadDeneb::default().into()), ForkName::Electra => Ok(FullPayloadElectra::default().into()), ForkName::Fulu => Ok(FullPayloadFulu::default().into()), - ForkName::Gloas => Ok(FullPayloadGloas::default().into()), + ForkName::Gloas => Err(BeaconStateError::IncorrectStateVariant), } } } @@ -482,9 +472,6 @@ impl ExecPayload for FullPayloadRef<'_, E> { Ok(inner.execution_payload.withdrawals.tree_hash_root()) } FullPayloadRef::Fulu(inner) => Ok(inner.execution_payload.withdrawals.tree_hash_root()), - FullPayloadRef::Gloas(inner) => { - Ok(inner.execution_payload.withdrawals.tree_hash_root()) - } } } @@ -496,7 +483,6 @@ impl ExecPayload for FullPayloadRef<'_, E> { FullPayloadRef::Deneb(inner) => Ok(inner.execution_payload.blob_gas_used), FullPayloadRef::Electra(inner) => Ok(inner.execution_payload.blob_gas_used), FullPayloadRef::Fulu(inner) => Ok(inner.execution_payload.blob_gas_used), - FullPayloadRef::Gloas(inner) => Ok(inner.execution_payload.blob_gas_used), } } @@ -520,7 +506,6 @@ impl AbstractExecPayload for FullPayload { type Deneb = FullPayloadDeneb; type Electra = FullPayloadElectra; type Fulu = FullPayloadFulu; - type Gloas = FullPayloadGloas; } impl From> for FullPayload { @@ -539,7 +524,7 @@ impl TryFrom> for FullPayload { } #[superstruct( - variants(Bellatrix, Capella, Deneb, Electra, Fulu, Gloas), + variants(Bellatrix, Capella, Deneb, Electra, Fulu), variant_attributes( derive( Debug, @@ -599,8 +584,6 @@ pub struct BlindedPayload { pub execution_payload_header: ExecutionPayloadHeaderElectra, #[superstruct(only(Fulu), partial_getter(rename = "execution_payload_fulu"))] pub execution_payload_header: ExecutionPayloadHeaderFulu, - #[superstruct(only(Gloas), partial_getter(rename = "execution_payload_gloas"))] - pub execution_payload_header: ExecutionPayloadHeaderGloas, } impl<'a, E: EthSpec> From> for BlindedPayload { @@ -690,7 +673,6 @@ impl ExecPayload for BlindedPayload { BlindedPayload::Deneb(inner) => Ok(inner.execution_payload_header.withdrawals_root), BlindedPayload::Electra(inner) => Ok(inner.execution_payload_header.withdrawals_root), BlindedPayload::Fulu(inner) => Ok(inner.execution_payload_header.withdrawals_root), - BlindedPayload::Gloas(inner) => Ok(inner.execution_payload_header.withdrawals_root), } } @@ -702,7 +684,6 @@ impl ExecPayload for BlindedPayload { BlindedPayload::Deneb(inner) => Ok(inner.execution_payload_header.blob_gas_used), BlindedPayload::Electra(inner) => Ok(inner.execution_payload_header.blob_gas_used), BlindedPayload::Fulu(inner) => Ok(inner.execution_payload_header.blob_gas_used), - BlindedPayload::Gloas(inner) => Ok(inner.execution_payload_header.blob_gas_used), } } @@ -802,7 +783,6 @@ impl<'b, E: EthSpec> ExecPayload for BlindedPayloadRef<'b, E> { Ok(inner.execution_payload_header.withdrawals_root) } BlindedPayloadRef::Fulu(inner) => Ok(inner.execution_payload_header.withdrawals_root), - BlindedPayloadRef::Gloas(inner) => Ok(inner.execution_payload_header.withdrawals_root), } } @@ -814,7 +794,6 @@ impl<'b, E: EthSpec> ExecPayload for BlindedPayloadRef<'b, E> { BlindedPayloadRef::Deneb(inner) => Ok(inner.execution_payload_header.blob_gas_used), BlindedPayloadRef::Electra(inner) => Ok(inner.execution_payload_header.blob_gas_used), BlindedPayloadRef::Fulu(inner) => Ok(inner.execution_payload_header.blob_gas_used), - BlindedPayloadRef::Gloas(inner) => Ok(inner.execution_payload_header.blob_gas_used), } } @@ -1126,13 +1105,6 @@ impl_exec_payload_for_fork!( ExecutionPayloadFulu, Fulu ); -impl_exec_payload_for_fork!( - BlindedPayloadGloas, - FullPayloadGloas, - ExecutionPayloadHeaderGloas, - ExecutionPayloadGloas, - Gloas -); impl AbstractExecPayload for BlindedPayload { type Ref<'a> = BlindedPayloadRef<'a, E>; @@ -1141,7 +1113,6 @@ impl AbstractExecPayload for BlindedPayload { type Deneb = BlindedPayloadDeneb; type Electra = BlindedPayloadElectra; type Fulu = BlindedPayloadFulu; - type Gloas = BlindedPayloadGloas; } impl From> for BlindedPayload { @@ -1183,11 +1154,6 @@ impl From> for BlindedPayload { execution_payload_header, }) } - ExecutionPayloadHeader::Gloas(execution_payload_header) => { - Self::Gloas(BlindedPayloadGloas { - execution_payload_header, - }) - } } } } @@ -1210,9 +1176,6 @@ impl From> for ExecutionPayloadHeader { BlindedPayload::Fulu(blinded_payload) => { ExecutionPayloadHeader::Fulu(blinded_payload.execution_payload_header) } - BlindedPayload::Gloas(blinded_payload) => { - ExecutionPayloadHeader::Gloas(blinded_payload.execution_payload_header) - } } } } diff --git a/consensus/types/src/execution/signed_execution_payload_bid.rs b/consensus/types/src/execution/signed_execution_payload_bid.rs new file mode 100644 index 00000000000..29dfd03ba03 --- /dev/null +++ b/consensus/types/src/execution/signed_execution_payload_bid.rs @@ -0,0 +1,35 @@ +use crate::test_utils::TestRandom; +use crate::{ExecutionPayloadBid, ForkName}; +use bls::Signature; +use context_deserialize::context_deserialize; +use educe::Educe; +use serde::{Deserialize, Serialize}; +use ssz_derive::{Decode, Encode}; +use test_random_derive::TestRandom; +use tree_hash_derive::TreeHash; + +#[derive(TestRandom, TreeHash, Debug, Clone, Encode, Decode, Serialize, Deserialize, Educe)] +#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] +#[educe(PartialEq, Hash)] +#[context_deserialize(ForkName)] +// https://github.com/ethereum/consensus-specs/blob/master/specs/gloas/beacon-chain.md#signedexecutionpayloadbid +pub struct SignedExecutionPayloadBid { + pub message: ExecutionPayloadBid, + pub signature: Signature, +} + +impl SignedExecutionPayloadBid { + pub fn empty() -> Self { + Self { + message: ExecutionPayloadBid::default(), + signature: Signature::empty(), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + ssz_and_tree_hash_tests!(SignedExecutionPayloadBid); +} diff --git a/consensus/types/src/execution/signed_execution_payload_envelope.rs b/consensus/types/src/execution/signed_execution_payload_envelope.rs new file mode 100644 index 00000000000..16410416157 --- /dev/null +++ b/consensus/types/src/execution/signed_execution_payload_envelope.rs @@ -0,0 +1,24 @@ +use crate::test_utils::TestRandom; +use crate::{EthSpec, ExecutionPayloadEnvelope}; +use bls::Signature; +use educe::Educe; +use serde::{Deserialize, Serialize}; +use ssz_derive::{Decode, Encode}; +use test_random_derive::TestRandom; +use tree_hash_derive::TreeHash; + +#[derive(Debug, Clone, Serialize, Encode, Decode, Deserialize, TestRandom, TreeHash, Educe)] +#[educe(PartialEq, Hash(bound(E: EthSpec)))] +#[serde(bound = "E: EthSpec")] +pub struct SignedExecutionPayloadEnvelope { + pub message: ExecutionPayloadEnvelope, + pub signature: Signature, +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::MainnetEthSpec; + + ssz_and_tree_hash_tests!(SignedExecutionPayloadEnvelope); +} diff --git a/consensus/types/src/fork/fork_context.rs b/consensus/types/src/fork/fork_context.rs index 89f69bcbb62..3407689e790 100644 --- a/consensus/types/src/fork/fork_context.rs +++ b/consensus/types/src/fork/fork_context.rs @@ -182,6 +182,7 @@ mod tests { spec.deneb_fork_epoch = Some(Epoch::new(4)); spec.electra_fork_epoch = Some(Epoch::new(5)); spec.fulu_fork_epoch = Some(Epoch::new(6)); + spec.gloas_fork_epoch = Some(Epoch::new(7)); spec.blob_schedule = BlobSchedule::new(blob_parameters); spec } @@ -196,6 +197,7 @@ mod tests { assert!(context.fork_exists(ForkName::Electra)); assert!(context.fork_exists(ForkName::Fulu)); + assert!(context.fork_exists(ForkName::Gloas)); } #[test] diff --git a/consensus/types/src/lib.rs b/consensus/types/src/lib.rs index cd9252bde8b..5a89fcb1d48 100644 --- a/consensus/types/src/lib.rs +++ b/consensus/types/src/lib.rs @@ -124,6 +124,10 @@ pub mod execution_requests { }; } +pub mod execution_payload_envelope { + pub use crate::execution::{ExecutionPayloadEnvelope, SignedExecutionPayloadEnvelope}; +} + pub mod data_column_custody_group { pub use crate::data::{ CustodyIndex, compute_columns_for_custody_group, compute_ordered_custody_column_indices, @@ -157,6 +161,12 @@ pub mod signed_aggregate_and_proof { pub use crate::attestation::SignedAggregateAndProofRefMut; } +pub mod payload_attestation { + pub use crate::attestation::{ + PayloadAttestation, PayloadAttestationData, PayloadAttestationMessage, + }; +} + pub mod application_domain { pub use crate::core::ApplicationDomain; } diff --git a/consensus/types/src/light_client/error.rs b/consensus/types/src/light_client/error.rs index c492cfcbde3..4c7a30db5e6 100644 --- a/consensus/types/src/light_client/error.rs +++ b/consensus/types/src/light_client/error.rs @@ -14,6 +14,7 @@ pub enum LightClientError { InvalidFinalizedBlock, BeaconBlockBodyError, InconsistentFork, + GloasNotImplemented, } impl From for LightClientError { diff --git a/consensus/types/src/light_client/light_client_bootstrap.rs b/consensus/types/src/light_client/light_client_bootstrap.rs index 847b2a2a963..fbcc0ef2b05 100644 --- a/consensus/types/src/light_client/light_client_bootstrap.rs +++ b/consensus/types/src/light_client/light_client_bootstrap.rs @@ -18,7 +18,6 @@ use crate::{ CurrentSyncCommitteeProofLen, CurrentSyncCommitteeProofLenElectra, LightClientError, LightClientHeader, LightClientHeaderAltair, LightClientHeaderCapella, LightClientHeaderDeneb, LightClientHeaderElectra, LightClientHeaderFulu, - LightClientHeaderGloas, }, state::BeaconState, sync_committee::SyncCommittee, @@ -28,7 +27,7 @@ use crate::{ /// A LightClientBootstrap is the initializer we send over to light_client nodes /// that are trying to generate their basic storage when booting up. #[superstruct( - variants(Altair, Capella, Deneb, Electra, Fulu, Gloas), + variants(Altair, Capella, Deneb, Electra, Fulu), variant_attributes( derive( Debug, @@ -73,8 +72,6 @@ pub struct LightClientBootstrap { pub header: LightClientHeaderElectra, #[superstruct(only(Fulu), partial_getter(rename = "header_fulu"))] pub header: LightClientHeaderFulu, - #[superstruct(only(Gloas), partial_getter(rename = "header_gloas"))] - pub header: LightClientHeaderGloas, /// The `SyncCommittee` used in the requested period. pub current_sync_committee: Arc>, /// Merkle proof for sync committee @@ -84,7 +81,7 @@ pub struct LightClientBootstrap { )] pub current_sync_committee_branch: FixedVector, #[superstruct( - only(Electra, Fulu, Gloas), + only(Electra, Fulu), partial_getter(rename = "current_sync_committee_branch_electra") )] pub current_sync_committee_branch: FixedVector, @@ -101,7 +98,6 @@ impl LightClientBootstrap { Self::Deneb(_) => func(ForkName::Deneb), Self::Electra(_) => func(ForkName::Electra), Self::Fulu(_) => func(ForkName::Fulu), - Self::Gloas(_) => func(ForkName::Gloas), } } @@ -121,8 +117,8 @@ impl LightClientBootstrap { ForkName::Deneb => Self::Deneb(LightClientBootstrapDeneb::from_ssz_bytes(bytes)?), ForkName::Electra => Self::Electra(LightClientBootstrapElectra::from_ssz_bytes(bytes)?), ForkName::Fulu => Self::Fulu(LightClientBootstrapFulu::from_ssz_bytes(bytes)?), - ForkName::Gloas => Self::Gloas(LightClientBootstrapGloas::from_ssz_bytes(bytes)?), - ForkName::Base => { + // TODO(gloas): implement Gloas light client + ForkName::Base | ForkName::Gloas => { return Err(ssz::DecodeError::BytesInvalid(format!( "LightClientBootstrap decoding for {fork_name} not implemented" ))); @@ -143,7 +139,8 @@ impl LightClientBootstrap { ForkName::Deneb => as Encode>::ssz_fixed_len(), ForkName::Electra => as Encode>::ssz_fixed_len(), ForkName::Fulu => as Encode>::ssz_fixed_len(), - ForkName::Gloas => as Encode>::ssz_fixed_len(), + // TODO(gloas): implement Gloas light client + ForkName::Gloas => as Encode>::ssz_fixed_len(), }; fixed_len + LightClientHeader::::ssz_max_var_len_for_fork(fork_name) } @@ -194,13 +191,8 @@ impl LightClientBootstrap { .try_into() .map_err(LightClientError::SszTypesError)?, }), - ForkName::Gloas => Self::Gloas(LightClientBootstrapGloas { - header: LightClientHeaderGloas::block_to_light_client_header(block)?, - current_sync_committee, - current_sync_committee_branch: current_sync_committee_branch - .try_into() - .map_err(LightClientError::SszTypesError)?, - }), + // TODO(gloas): implement Gloas light client + ForkName::Gloas => return Err(LightClientError::GloasNotImplemented), }; Ok(light_client_bootstrap) @@ -254,13 +246,8 @@ impl LightClientBootstrap { .try_into() .map_err(LightClientError::SszTypesError)?, }), - ForkName::Gloas => Self::Gloas(LightClientBootstrapGloas { - header: LightClientHeaderGloas::block_to_light_client_header(block)?, - current_sync_committee, - current_sync_committee_branch: current_sync_committee_branch - .try_into() - .map_err(LightClientError::SszTypesError)?, - }), + // TODO(gloas): implement Gloas light client + ForkName::Gloas => return Err(LightClientError::GloasNotImplemented), }; Ok(light_client_bootstrap) @@ -301,7 +288,11 @@ impl<'de, E: EthSpec> ContextDeserialize<'de, ForkName> for LightClientBootstrap Self::Fulu(Deserialize::deserialize(deserializer).map_err(convert_err)?) } ForkName::Gloas => { - Self::Gloas(Deserialize::deserialize(deserializer).map_err(convert_err)?) + // TODO(EIP-7732): check if this is correct + return Err(serde::de::Error::custom(format!( + "LightClientBootstrap failed to deserialize: unsupported fork '{}'", + context + ))); } }) } @@ -339,10 +330,4 @@ mod tests { use crate::{LightClientBootstrapFulu, MainnetEthSpec}; ssz_tests!(LightClientBootstrapFulu); } - - #[cfg(test)] - mod gloas { - use crate::{LightClientBootstrapGloas, MainnetEthSpec}; - ssz_tests!(LightClientBootstrapGloas); - } } diff --git a/consensus/types/src/light_client/light_client_finality_update.rs b/consensus/types/src/light_client/light_client_finality_update.rs index 04374edcd96..b503785b851 100644 --- a/consensus/types/src/light_client/light_client_finality_update.rs +++ b/consensus/types/src/light_client/light_client_finality_update.rs @@ -16,14 +16,14 @@ use crate::{ light_client::{ FinalizedRootProofLen, FinalizedRootProofLenElectra, LightClientError, LightClientHeader, LightClientHeaderAltair, LightClientHeaderCapella, LightClientHeaderDeneb, - LightClientHeaderElectra, LightClientHeaderFulu, LightClientHeaderGloas, + LightClientHeaderElectra, LightClientHeaderFulu, }, sync_committee::SyncAggregate, test_utils::TestRandom, }; #[superstruct( - variants(Altair, Capella, Deneb, Electra, Fulu, Gloas), + variants(Altair, Capella, Deneb, Electra, Fulu), variant_attributes( derive( Debug, @@ -68,8 +68,6 @@ pub struct LightClientFinalityUpdate { pub attested_header: LightClientHeaderElectra, #[superstruct(only(Fulu), partial_getter(rename = "attested_header_fulu"))] pub attested_header: LightClientHeaderFulu, - #[superstruct(only(Gloas), partial_getter(rename = "attested_header_gloas"))] - pub attested_header: LightClientHeaderGloas, /// The last `BeaconBlockHeader` from the last attested finalized block (end of epoch). #[superstruct(only(Altair), partial_getter(rename = "finalized_header_altair"))] pub finalized_header: LightClientHeaderAltair, @@ -81,8 +79,6 @@ pub struct LightClientFinalityUpdate { pub finalized_header: LightClientHeaderElectra, #[superstruct(only(Fulu), partial_getter(rename = "finalized_header_fulu"))] pub finalized_header: LightClientHeaderFulu, - #[superstruct(only(Gloas), partial_getter(rename = "finalized_header_gloas"))] - pub finalized_header: LightClientHeaderGloas, /// Merkle proof attesting finalized header. #[superstruct( only(Altair, Capella, Deneb), @@ -90,7 +86,7 @@ pub struct LightClientFinalityUpdate { )] pub finality_branch: FixedVector, #[superstruct( - only(Electra, Fulu, Gloas), + only(Electra, Fulu), partial_getter(rename = "finality_branch_electra") )] pub finality_branch: FixedVector, @@ -181,20 +177,7 @@ impl LightClientFinalityUpdate { sync_aggregate, signature_slot, }), - ForkName::Gloas => Self::Gloas(LightClientFinalityUpdateGloas { - attested_header: LightClientHeaderGloas::block_to_light_client_header( - attested_block, - )?, - finalized_header: LightClientHeaderGloas::block_to_light_client_header( - finalized_block, - )?, - finality_branch: finality_branch - .try_into() - .map_err(LightClientError::SszTypesError)?, - sync_aggregate, - signature_slot, - }), - + ForkName::Gloas => return Err(LightClientError::GloasNotImplemented), ForkName::Base => return Err(LightClientError::AltairForkNotActive), }; @@ -211,7 +194,6 @@ impl LightClientFinalityUpdate { Self::Deneb(_) => func(ForkName::Deneb), Self::Electra(_) => func(ForkName::Electra), Self::Fulu(_) => func(ForkName::Fulu), - Self::Gloas(_) => func(ForkName::Gloas), } } @@ -249,8 +231,8 @@ impl LightClientFinalityUpdate { Self::Electra(LightClientFinalityUpdateElectra::from_ssz_bytes(bytes)?) } ForkName::Fulu => Self::Fulu(LightClientFinalityUpdateFulu::from_ssz_bytes(bytes)?), - ForkName::Gloas => Self::Gloas(LightClientFinalityUpdateGloas::from_ssz_bytes(bytes)?), - ForkName::Base => { + // TODO(gloas): implement Gloas light client + ForkName::Base | ForkName::Gloas => { return Err(ssz::DecodeError::BytesInvalid(format!( "LightClientFinalityUpdate decoding for {fork_name} not implemented" ))); @@ -271,7 +253,8 @@ impl LightClientFinalityUpdate { ForkName::Deneb => as Encode>::ssz_fixed_len(), ForkName::Electra => as Encode>::ssz_fixed_len(), ForkName::Fulu => as Encode>::ssz_fixed_len(), - ForkName::Gloas => as Encode>::ssz_fixed_len(), + // TODO(gloas): implement Gloas light client + ForkName::Gloas => 0, }; // `2 *` because there are two headers in the update fixed_size + 2 * LightClientHeader::::ssz_max_var_len_for_fork(fork_name) @@ -325,7 +308,11 @@ impl<'de, E: EthSpec> ContextDeserialize<'de, ForkName> for LightClientFinalityU Self::Fulu(Deserialize::deserialize(deserializer).map_err(convert_err)?) } ForkName::Gloas => { - Self::Gloas(Deserialize::deserialize(deserializer).map_err(convert_err)?) + // TODO(EIP-7732): check if this is correct + return Err(serde::de::Error::custom(format!( + "LightClientBootstrap failed to deserialize: unsupported fork '{}'", + context + ))); } }) } @@ -363,10 +350,4 @@ mod tests { use crate::{LightClientFinalityUpdateFulu, MainnetEthSpec}; ssz_tests!(LightClientFinalityUpdateFulu); } - - #[cfg(test)] - mod gloas { - use crate::{LightClientFinalityUpdateGloas, MainnetEthSpec}; - ssz_tests!(LightClientFinalityUpdateGloas); - } } diff --git a/consensus/types/src/light_client/light_client_header.rs b/consensus/types/src/light_client/light_client_header.rs index a7ecd3b7fb2..fdf9f234efc 100644 --- a/consensus/types/src/light_client/light_client_header.rs +++ b/consensus/types/src/light_client/light_client_header.rs @@ -15,7 +15,7 @@ use crate::{ core::{ChainSpec, EthSpec, Hash256}, execution::{ ExecutionPayloadHeader, ExecutionPayloadHeaderCapella, ExecutionPayloadHeaderDeneb, - ExecutionPayloadHeaderElectra, ExecutionPayloadHeaderFulu, ExecutionPayloadHeaderGloas, + ExecutionPayloadHeaderElectra, ExecutionPayloadHeaderFulu, }, fork::ForkName, light_client::{ExecutionPayloadProofLen, LightClientError, consts::EXECUTION_PAYLOAD_INDEX}, @@ -23,7 +23,7 @@ use crate::{ }; #[superstruct( - variants(Altair, Capella, Deneb, Electra, Fulu, Gloas), + variants(Altair, Capella, Deneb, Electra, Fulu,), variant_attributes( derive( Debug, @@ -73,10 +73,8 @@ pub struct LightClientHeader { pub execution: ExecutionPayloadHeaderElectra, #[superstruct(only(Fulu), partial_getter(rename = "execution_payload_header_fulu"))] pub execution: ExecutionPayloadHeaderFulu, - #[superstruct(only(Gloas), partial_getter(rename = "execution_payload_header_gloas"))] - pub execution: ExecutionPayloadHeaderGloas, - #[superstruct(only(Capella, Deneb, Electra, Fulu, Gloas))] + #[superstruct(only(Capella, Deneb, Electra, Fulu))] pub execution_branch: FixedVector, #[ssz(skip_serializing, skip_deserializing)] @@ -111,9 +109,8 @@ impl LightClientHeader { ForkName::Fulu => { LightClientHeader::Fulu(LightClientHeaderFulu::block_to_light_client_header(block)?) } - ForkName::Gloas => LightClientHeader::Gloas( - LightClientHeaderGloas::block_to_light_client_header(block)?, - ), + // TODO(gloas): implement Gloas light client + ForkName::Gloas => return Err(LightClientError::GloasNotImplemented), }; Ok(header) } @@ -135,10 +132,8 @@ impl LightClientHeader { ForkName::Fulu => { LightClientHeader::Fulu(LightClientHeaderFulu::from_ssz_bytes(bytes)?) } - ForkName::Gloas => { - LightClientHeader::Gloas(LightClientHeaderGloas::from_ssz_bytes(bytes)?) - } - ForkName::Base => { + // TODO(gloas): implement Gloas light client + ForkName::Base | ForkName::Gloas => { return Err(ssz::DecodeError::BytesInvalid(format!( "LightClientHeader decoding for {fork_name} not implemented" ))); @@ -157,7 +152,10 @@ impl LightClientHeader { } pub fn ssz_max_var_len_for_fork(fork_name: ForkName) -> usize { - if fork_name.capella_enabled() { + if fork_name.gloas_enabled() { + // TODO(EIP7732): check this + 0 + } else if fork_name.capella_enabled() { ExecutionPayloadHeader::::ssz_max_var_len_for_fork(fork_name) } else { 0 @@ -353,48 +351,6 @@ impl Default for LightClientHeaderFulu { } } -impl LightClientHeaderGloas { - pub fn block_to_light_client_header( - block: &SignedBlindedBeaconBlock, - ) -> Result { - let payload = block - .message() - .execution_payload()? - .execution_payload_gloas()?; - - let header = ExecutionPayloadHeaderGloas::from(payload); - let beacon_block_body = BeaconBlockBody::from( - block - .message() - .body_gloas() - .map_err(|_| LightClientError::BeaconBlockBodyError)? - .to_owned(), - ); - - let execution_branch = beacon_block_body - .to_ref() - .block_body_merkle_proof(EXECUTION_PAYLOAD_INDEX)?; - - Ok(LightClientHeaderGloas { - beacon: block.message().block_header(), - execution: header, - execution_branch: FixedVector::new(execution_branch)?, - _phantom_data: PhantomData, - }) - } -} - -impl Default for LightClientHeaderGloas { - fn default() -> Self { - Self { - beacon: BeaconBlockHeader::empty(), - execution: ExecutionPayloadHeaderGloas::default(), - execution_branch: FixedVector::default(), - _phantom_data: PhantomData, - } - } -} - impl<'de, E: EthSpec> ContextDeserialize<'de, ForkName> for LightClientHeader { fn context_deserialize(deserializer: D, context: ForkName) -> Result where @@ -407,7 +363,8 @@ impl<'de, E: EthSpec> ContextDeserialize<'de, ForkName> for LightClientHeader )) }; Ok(match context { - ForkName::Base => { + // TODO(gloas): implement Gloas light client + ForkName::Base | ForkName::Gloas => { return Err(serde::de::Error::custom(format!( "LightClientFinalityUpdate failed to deserialize: unsupported fork '{}'", context @@ -428,9 +385,6 @@ impl<'de, E: EthSpec> ContextDeserialize<'de, ForkName> for LightClientHeader ForkName::Fulu => { Self::Fulu(Deserialize::deserialize(deserializer).map_err(convert_err)?) } - ForkName::Gloas => { - Self::Gloas(Deserialize::deserialize(deserializer).map_err(convert_err)?) - } }) } } @@ -467,10 +421,4 @@ mod tests { use crate::{LightClientHeaderFulu, MainnetEthSpec}; ssz_tests!(LightClientHeaderFulu); } - - #[cfg(test)] - mod gloas { - use crate::{LightClientHeaderGloas, MainnetEthSpec}; - ssz_tests!(LightClientHeaderGloas); - } } diff --git a/consensus/types/src/light_client/light_client_optimistic_update.rs b/consensus/types/src/light_client/light_client_optimistic_update.rs index 9266ce647a4..139c4b6a08b 100644 --- a/consensus/types/src/light_client/light_client_optimistic_update.rs +++ b/consensus/types/src/light_client/light_client_optimistic_update.rs @@ -15,7 +15,6 @@ use crate::{ light_client::{ LightClientError, LightClientHeader, LightClientHeaderAltair, LightClientHeaderCapella, LightClientHeaderDeneb, LightClientHeaderElectra, LightClientHeaderFulu, - LightClientHeaderGloas, }, sync_committee::SyncAggregate, test_utils::TestRandom, @@ -24,7 +23,7 @@ use crate::{ /// A LightClientOptimisticUpdate is the update we send on each slot, /// it is based off the current unfinalized epoch is verified only against BLS signature. #[superstruct( - variants(Altair, Capella, Deneb, Electra, Fulu, Gloas), + variants(Altair, Capella, Deneb, Electra, Fulu), variant_attributes( derive( Debug, @@ -69,8 +68,6 @@ pub struct LightClientOptimisticUpdate { pub attested_header: LightClientHeaderElectra, #[superstruct(only(Fulu), partial_getter(rename = "attested_header_fulu"))] pub attested_header: LightClientHeaderFulu, - #[superstruct(only(Gloas), partial_getter(rename = "attested_header_gloas"))] - pub attested_header: LightClientHeaderGloas, /// current sync aggregate pub sync_aggregate: SyncAggregate, /// Slot of the sync aggregated signature @@ -126,13 +123,7 @@ impl LightClientOptimisticUpdate { sync_aggregate, signature_slot, }), - ForkName::Gloas => Self::Gloas(LightClientOptimisticUpdateGloas { - attested_header: LightClientHeaderGloas::block_to_light_client_header( - attested_block, - )?, - sync_aggregate, - signature_slot, - }), + ForkName::Gloas => return Err(LightClientError::GloasNotImplemented), ForkName::Base => return Err(LightClientError::AltairForkNotActive), }; @@ -149,7 +140,6 @@ impl LightClientOptimisticUpdate { Self::Deneb(_) => func(ForkName::Deneb), Self::Electra(_) => func(ForkName::Electra), Self::Fulu(_) => func(ForkName::Fulu), - Self::Gloas(_) => func(ForkName::Gloas), } } @@ -189,10 +179,8 @@ impl LightClientOptimisticUpdate { Self::Electra(LightClientOptimisticUpdateElectra::from_ssz_bytes(bytes)?) } ForkName::Fulu => Self::Fulu(LightClientOptimisticUpdateFulu::from_ssz_bytes(bytes)?), - ForkName::Gloas => { - Self::Gloas(LightClientOptimisticUpdateGloas::from_ssz_bytes(bytes)?) - } - ForkName::Base => { + // TODO(gloas): implement Gloas light client + ForkName::Base | ForkName::Gloas => { return Err(ssz::DecodeError::BytesInvalid(format!( "LightClientOptimisticUpdate decoding for {fork_name} not implemented" ))); @@ -213,7 +201,8 @@ impl LightClientOptimisticUpdate { ForkName::Deneb => as Encode>::ssz_fixed_len(), ForkName::Electra => as Encode>::ssz_fixed_len(), ForkName::Fulu => as Encode>::ssz_fixed_len(), - ForkName::Gloas => as Encode>::ssz_fixed_len(), + // TODO(gloas): implement Gloas light client + ForkName::Gloas => 0, }; fixed_len + LightClientHeader::::ssz_max_var_len_for_fork(fork_name) } @@ -266,7 +255,11 @@ impl<'de, E: EthSpec> ContextDeserialize<'de, ForkName> for LightClientOptimisti Self::Fulu(Deserialize::deserialize(deserializer).map_err(convert_err)?) } ForkName::Gloas => { - Self::Gloas(Deserialize::deserialize(deserializer).map_err(convert_err)?) + // TODO(EIP-7732): check if this is correct + return Err(serde::de::Error::custom(format!( + "LightClientBootstrap failed to deserialize: unsupported fork '{}'", + context + ))); } }) } @@ -304,10 +297,4 @@ mod tests { use crate::{LightClientOptimisticUpdateFulu, MainnetEthSpec}; ssz_tests!(LightClientOptimisticUpdateFulu); } - - #[cfg(test)] - mod gloas { - use crate::{LightClientOptimisticUpdateGloas, MainnetEthSpec}; - ssz_tests!(LightClientOptimisticUpdateGloas); - } } diff --git a/consensus/types/src/light_client/light_client_update.rs b/consensus/types/src/light_client/light_client_update.rs index aa7b800cc89..cd33f6ae547 100644 --- a/consensus/types/src/light_client/light_client_update.rs +++ b/consensus/types/src/light_client/light_client_update.rs @@ -21,7 +21,6 @@ use crate::{ light_client::{ LightClientError, LightClientHeader, LightClientHeaderAltair, LightClientHeaderCapella, LightClientHeaderDeneb, LightClientHeaderElectra, LightClientHeaderFulu, - LightClientHeaderGloas, }, sync_committee::{SyncAggregate, SyncCommittee}, test_utils::TestRandom, @@ -46,7 +45,7 @@ type NextSyncCommitteeBranchElectra = FixedVector { pub attested_header: LightClientHeaderElectra, #[superstruct(only(Fulu), partial_getter(rename = "attested_header_fulu"))] pub attested_header: LightClientHeaderFulu, - #[superstruct(only(Gloas), partial_getter(rename = "attested_header_gloas"))] - pub attested_header: LightClientHeaderGloas, /// The `SyncCommittee` used in the next period. pub next_sync_committee: Arc>, // Merkle proof for next sync committee @@ -102,7 +99,7 @@ pub struct LightClientUpdate { )] pub next_sync_committee_branch: NextSyncCommitteeBranch, #[superstruct( - only(Electra, Fulu, Gloas), + only(Electra, Fulu), partial_getter(rename = "next_sync_committee_branch_electra") )] pub next_sync_committee_branch: NextSyncCommitteeBranchElectra, @@ -117,8 +114,6 @@ pub struct LightClientUpdate { pub finalized_header: LightClientHeaderElectra, #[superstruct(only(Fulu), partial_getter(rename = "finalized_header_fulu"))] pub finalized_header: LightClientHeaderFulu, - #[superstruct(only(Gloas), partial_getter(rename = "finalized_header_gloas"))] - pub finalized_header: LightClientHeaderGloas, /// Merkle proof attesting finalized header. #[superstruct( only(Altair, Capella, Deneb), @@ -126,7 +121,7 @@ pub struct LightClientUpdate { )] pub finality_branch: FinalityBranch, #[superstruct( - only(Electra, Fulu, Gloas), + only(Electra, Fulu), partial_getter(rename = "finality_branch_electra") )] pub finality_branch: FinalityBranchElectra, @@ -145,7 +140,8 @@ impl<'de, E: EthSpec> ContextDeserialize<'de, ForkName> for LightClientUpdate serde::de::Error::custom(format!("LightClientUpdate failed to deserialize: {:?}", e)) }; Ok(match context { - ForkName::Base => { + // TODO(gloas): implement Gloas light client + ForkName::Base | ForkName::Gloas => { return Err(serde::de::Error::custom(format!( "LightClientUpdate failed to deserialize: unsupported fork '{}'", context @@ -166,9 +162,6 @@ impl<'de, E: EthSpec> ContextDeserialize<'de, ForkName> for LightClientUpdate ForkName::Fulu => { Self::Fulu(Deserialize::deserialize(deserializer).map_err(convert_err)?) } - ForkName::Gloas => { - Self::Gloas(Deserialize::deserialize(deserializer).map_err(convert_err)?) - } }) } } @@ -330,36 +323,11 @@ impl LightClientUpdate { signature_slot: block_slot, }) } - fork_name @ ForkName::Gloas => { - let attested_header = - LightClientHeaderGloas::block_to_light_client_header(attested_block)?; - - let finalized_header = if let Some(finalized_block) = finalized_block { - if finalized_block.fork_name_unchecked() == fork_name { - LightClientHeaderGloas::block_to_light_client_header(finalized_block)? - } else { - LightClientHeaderGloas::default() - } - } else { - LightClientHeaderGloas::default() - }; - - Self::Gloas(LightClientUpdateGloas { - attested_header, - next_sync_committee, - next_sync_committee_branch: next_sync_committee_branch - .try_into() - .map_err(LightClientError::SszTypesError)?, - finalized_header, - finality_branch: finality_branch - .try_into() - .map_err(LightClientError::SszTypesError)?, - sync_aggregate: sync_aggregate.clone(), - signature_slot: block_slot, - }) - } // To add a new fork, just append the new fork variant on the latest fork. Forks that - // have a distinct execution header will need a new LightClientUpdate variant only - // if you need to test or support lightclient usages + // To add a new fork, just append the new fork variant on the latest fork. Forks that + // have a distinct execution header will need a new LightClientUpdate variant only + // if you need to test or support lightclient usages + // TODO(gloas): implement Gloas light client + ForkName::Gloas => return Err(LightClientError::GloasNotImplemented), }; Ok(light_client_update) @@ -374,8 +342,8 @@ impl LightClientUpdate { ForkName::Deneb => Self::Deneb(LightClientUpdateDeneb::from_ssz_bytes(bytes)?), ForkName::Electra => Self::Electra(LightClientUpdateElectra::from_ssz_bytes(bytes)?), ForkName::Fulu => Self::Fulu(LightClientUpdateFulu::from_ssz_bytes(bytes)?), - ForkName::Gloas => Self::Gloas(LightClientUpdateGloas::from_ssz_bytes(bytes)?), - ForkName::Base => { + // TODO(gloas): implement Gloas light client + ForkName::Base | ForkName::Gloas => { return Err(ssz::DecodeError::BytesInvalid(format!( "LightClientUpdate decoding for {fork_name} not implemented" ))); @@ -392,7 +360,6 @@ impl LightClientUpdate { LightClientUpdate::Deneb(update) => update.attested_header.beacon.slot, LightClientUpdate::Electra(update) => update.attested_header.beacon.slot, LightClientUpdate::Fulu(update) => update.attested_header.beacon.slot, - LightClientUpdate::Gloas(update) => update.attested_header.beacon.slot, } } @@ -403,7 +370,6 @@ impl LightClientUpdate { LightClientUpdate::Deneb(update) => update.finalized_header.beacon.slot, LightClientUpdate::Electra(update) => update.finalized_header.beacon.slot, LightClientUpdate::Fulu(update) => update.finalized_header.beacon.slot, - LightClientUpdate::Gloas(update) => update.finalized_header.beacon.slot, } } @@ -533,7 +499,8 @@ impl LightClientUpdate { ForkName::Deneb => as Encode>::ssz_fixed_len(), ForkName::Electra => as Encode>::ssz_fixed_len(), ForkName::Fulu => as Encode>::ssz_fixed_len(), - ForkName::Gloas => as Encode>::ssz_fixed_len(), + // TODO(gloas): implement Gloas light client + ForkName::Gloas => 0, }; fixed_len + 2 * LightClientHeader::::ssz_max_var_len_for_fork(fork_name) } @@ -548,7 +515,6 @@ impl LightClientUpdate { Self::Deneb(_) => func(ForkName::Deneb), Self::Electra(_) => func(ForkName::Electra), Self::Fulu(_) => func(ForkName::Fulu), - Self::Gloas(_) => func(ForkName::Gloas), } } } @@ -612,13 +578,6 @@ mod tests { ssz_tests!(LightClientUpdateFulu); } - #[cfg(test)] - mod gloas { - use super::*; - use crate::MainnetEthSpec; - ssz_tests!(LightClientUpdateGloas); - } - #[test] fn finalized_root_params() { assert!(2usize.pow(FINALIZED_ROOT_PROOF_LEN as u32) <= FINALIZED_ROOT_INDEX); diff --git a/consensus/types/src/light_client/mod.rs b/consensus/types/src/light_client/mod.rs index 4e287c22942..24f3fdbb55e 100644 --- a/consensus/types/src/light_client/mod.rs +++ b/consensus/types/src/light_client/mod.rs @@ -11,27 +11,25 @@ pub use error::LightClientError; pub use light_client_bootstrap::{ LightClientBootstrap, LightClientBootstrapAltair, LightClientBootstrapCapella, LightClientBootstrapDeneb, LightClientBootstrapElectra, LightClientBootstrapFulu, - LightClientBootstrapGloas, }; pub use light_client_finality_update::{ LightClientFinalityUpdate, LightClientFinalityUpdateAltair, LightClientFinalityUpdateCapella, LightClientFinalityUpdateDeneb, LightClientFinalityUpdateElectra, - LightClientFinalityUpdateFulu, LightClientFinalityUpdateGloas, + LightClientFinalityUpdateFulu, }; pub use light_client_header::{ LightClientHeader, LightClientHeaderAltair, LightClientHeaderCapella, LightClientHeaderDeneb, - LightClientHeaderElectra, LightClientHeaderFulu, LightClientHeaderGloas, + LightClientHeaderElectra, LightClientHeaderFulu, }; pub use light_client_optimistic_update::{ LightClientOptimisticUpdate, LightClientOptimisticUpdateAltair, LightClientOptimisticUpdateCapella, LightClientOptimisticUpdateDeneb, LightClientOptimisticUpdateElectra, LightClientOptimisticUpdateFulu, - LightClientOptimisticUpdateGloas, }; pub use light_client_update::{ CurrentSyncCommitteeProofLen, CurrentSyncCommitteeProofLenElectra, ExecutionPayloadProofLen, FinalizedRootProofLen, FinalizedRootProofLenElectra, LightClientUpdate, LightClientUpdateAltair, LightClientUpdateCapella, LightClientUpdateDeneb, - LightClientUpdateElectra, LightClientUpdateFulu, LightClientUpdateGloas, - NextSyncCommitteeProofLen, NextSyncCommitteeProofLenElectra, + LightClientUpdateElectra, LightClientUpdateFulu, NextSyncCommitteeProofLen, + NextSyncCommitteeProofLenElectra, }; diff --git a/consensus/types/src/state/beacon_state.rs b/consensus/types/src/state/beacon_state.rs index f36c02ce6bd..c1b6f0dc0c1 100644 --- a/consensus/types/src/state/beacon_state.rs +++ b/consensus/types/src/state/beacon_state.rs @@ -23,6 +23,7 @@ use tree_hash_derive::TreeHash; use typenum::Unsigned; use crate::{ + BuilderPendingPayment, BuilderPendingWithdrawal, ExecutionBlockHash, ExecutionPayloadBid, attestation::{ AttestationDuty, BeaconCommittee, Checkpoint, CommitteeIndex, ParticipationFlags, PendingAttestation, @@ -34,7 +35,7 @@ use crate::{ execution::{ Eth1Data, ExecutionPayloadHeaderBellatrix, ExecutionPayloadHeaderCapella, ExecutionPayloadHeaderDeneb, ExecutionPayloadHeaderElectra, ExecutionPayloadHeaderFulu, - ExecutionPayloadHeaderGloas, ExecutionPayloadHeaderRef, ExecutionPayloadHeaderRefMut, + ExecutionPayloadHeaderRef, ExecutionPayloadHeaderRefMut, }, fork::{Fork, ForkName, ForkVersionDecode, InconsistentFork, map_fork_name}, light_client::consts::{ @@ -542,14 +543,9 @@ where )] #[metastruct(exclude_from(tree_lists))] pub latest_execution_payload_header: ExecutionPayloadHeaderFulu, - #[superstruct( - only(Gloas), - partial_getter(rename = "latest_execution_payload_header_gloas") - )] + #[superstruct(only(Gloas))] #[metastruct(exclude_from(tree_lists))] - pub latest_execution_payload_header: ExecutionPayloadHeaderGloas, - - // Capella + pub latest_execution_payload_bid: ExecutionPayloadBid, #[superstruct(only(Capella, Deneb, Electra, Fulu, Gloas), partial_getter(copy))] #[serde(with = "serde_utils::quoted_u64")] #[metastruct(exclude_from(tree_lists))] @@ -608,6 +604,31 @@ where pub proposer_lookahead: Vector, // Gloas + #[test_random(default)] + #[superstruct(only(Gloas))] + #[metastruct(exclude_from(tree_lists))] + pub execution_payload_availability: BitVector, + + #[compare_fields(as_iter)] + #[test_random(default)] + #[superstruct(only(Gloas))] + pub builder_pending_payments: Vector, + + #[compare_fields(as_iter)] + #[test_random(default)] + #[superstruct(only(Gloas))] + pub builder_pending_withdrawals: + List, + + #[test_random(default)] + #[superstruct(only(Gloas))] + #[metastruct(exclude_from(tree_lists))] + pub latest_block_hash: ExecutionBlockHash, + + #[test_random(default)] + #[superstruct(only(Gloas))] + #[metastruct(exclude_from(tree_lists))] + pub latest_withdrawals_root: Hash256, // Caching (not in the spec) #[serde(skip_serializing, skip_deserializing)] @@ -1169,9 +1190,8 @@ impl BeaconState { BeaconState::Fulu(state) => Ok(ExecutionPayloadHeaderRef::Fulu( &state.latest_execution_payload_header, )), - BeaconState::Gloas(state) => Ok(ExecutionPayloadHeaderRef::Gloas( - &state.latest_execution_payload_header, - )), + // TODO(EIP-7732): investigate calling functions + BeaconState::Gloas(_) => Err(BeaconStateError::IncorrectStateVariant), } } @@ -1197,9 +1217,8 @@ impl BeaconState { BeaconState::Fulu(state) => Ok(ExecutionPayloadHeaderRefMut::Fulu( &mut state.latest_execution_payload_header, )), - BeaconState::Gloas(state) => Ok(ExecutionPayloadHeaderRefMut::Gloas( - &mut state.latest_execution_payload_header, - )), + // TODO(EIP-7732): investigate calling functions + BeaconState::Gloas(_) => Err(BeaconStateError::IncorrectStateVariant), } } @@ -2273,6 +2292,21 @@ impl BeaconState { } } + pub fn is_parent_block_full(&self) -> bool { + match self { + BeaconState::Base(_) | BeaconState::Altair(_) => false, + // TODO(EIP-7732): check the implications of this when we get to forkchoice modifications + BeaconState::Bellatrix(_) + | BeaconState::Capella(_) + | BeaconState::Deneb(_) + | BeaconState::Electra(_) + | BeaconState::Fulu(_) => true, + BeaconState::Gloas(state) => { + state.latest_execution_payload_bid.block_hash == state.latest_block_hash + } + } + } + /// Get the committee cache for some `slot`. /// /// Return an error if the cache for the slot's epoch is not initialized. diff --git a/testing/ef_tests/src/cases/operations.rs b/testing/ef_tests/src/cases/operations.rs index 379fcb1bb4d..a53bce927cb 100644 --- a/testing/ef_tests/src/cases/operations.rs +++ b/testing/ef_tests/src/cases/operations.rs @@ -307,6 +307,7 @@ impl Operation for BeaconBlockBody> { ForkName::Deneb => BeaconBlockBody::Deneb(<_>::from_ssz_bytes(bytes)?), ForkName::Electra => BeaconBlockBody::Electra(<_>::from_ssz_bytes(bytes)?), ForkName::Fulu => BeaconBlockBody::Fulu(<_>::from_ssz_bytes(bytes)?), + // TODO(EIP-7732): See if we need to handle Gloas here _ => panic!(), }) }) @@ -366,6 +367,7 @@ impl Operation for BeaconBlockBody> { let inner = >>::from_ssz_bytes(bytes)?; BeaconBlockBody::Fulu(inner.clone_as_blinded()) } + // TODO(EIP-7732): See if we need to handle Gloas here _ => panic!(), }) }) @@ -417,6 +419,7 @@ impl Operation for WithdrawalsPayload { spec: &ChainSpec, _: &Operations, ) -> Result<(), BlockProcessingError> { + // TODO(EIP-7732): implement separate gloas and non-gloas variants of process_withdrawals process_withdrawals::<_, FullPayload<_>>(state, self.payload.to_ref(), spec) } } From 4c268bc0d576da5f3d0543aa431b7c09c40d536c Mon Sep 17 00:00:00 2001 From: Michael Sproul Date: Tue, 16 Dec 2025 20:02:31 +1100 Subject: [PATCH 71/74] Delete `PartialBeaconState` (#8591) While reviewing Gloas I noticed we were updating `PartialBeaconState`. This code isn't used since v7.1.0 introduced hdiffs, so we can delete it and stop maintaining it :tada: Similarly the `chunked_vector`/`chunked_iter` code can also go! Co-Authored-By: Michael Sproul Co-Authored-By: Pawan Dhananjay --- beacon_node/store/src/chunked_iter.rs | 120 --- beacon_node/store/src/chunked_vector.rs | 922 ------------------ beacon_node/store/src/errors.rs | 9 - beacon_node/store/src/lib.rs | 3 - beacon_node/store/src/partial_beacon_state.rs | 511 ---------- 5 files changed, 1565 deletions(-) delete mode 100644 beacon_node/store/src/chunked_iter.rs delete mode 100644 beacon_node/store/src/chunked_vector.rs delete mode 100644 beacon_node/store/src/partial_beacon_state.rs diff --git a/beacon_node/store/src/chunked_iter.rs b/beacon_node/store/src/chunked_iter.rs deleted file mode 100644 index 72e5d9c7af0..00000000000 --- a/beacon_node/store/src/chunked_iter.rs +++ /dev/null @@ -1,120 +0,0 @@ -use crate::chunked_vector::{Chunk, Field, chunk_key}; -use crate::{HotColdDB, ItemStore}; -use tracing::error; -use types::{ChainSpec, EthSpec, Slot}; - -/// Iterator over the values of a `BeaconState` vector field (like `block_roots`). -/// -/// Uses the freezer DB's separate table to load the values. -pub struct ChunkedVectorIter<'a, F, E, Hot, Cold> -where - F: Field, - E: EthSpec, - Hot: ItemStore, - Cold: ItemStore, -{ - pub(crate) store: &'a HotColdDB, - current_vindex: usize, - pub(crate) end_vindex: usize, - next_cindex: usize, - current_chunk: Chunk, -} - -impl<'a, F, E, Hot, Cold> ChunkedVectorIter<'a, F, E, Hot, Cold> -where - F: Field, - E: EthSpec, - Hot: ItemStore, - Cold: ItemStore, -{ - /// Create a new iterator which can yield elements from `start_vindex` up to the last - /// index stored by the restore point at `last_restore_point_slot`. - /// - /// The `freezer_upper_limit` slot should be the slot of a recent restore point as obtained from - /// `Root::freezer_upper_limit`. We pass it as a parameter so that the caller can - /// maintain a stable view of the database (see `HybridForwardsBlockRootsIterator`). - pub fn new( - store: &'a HotColdDB, - start_vindex: usize, - freezer_upper_limit: Slot, - spec: &ChainSpec, - ) -> Self { - let (_, end_vindex) = F::start_and_end_vindex(freezer_upper_limit, spec); - - // Set the next chunk to the one containing `start_vindex`. - let next_cindex = start_vindex / F::chunk_size(); - // Set the current chunk to the empty chunk, it will never be read. - let current_chunk = Chunk::default(); - - Self { - store, - current_vindex: start_vindex, - end_vindex, - next_cindex, - current_chunk, - } - } -} - -impl Iterator for ChunkedVectorIter<'_, F, E, Hot, Cold> -where - F: Field, - E: EthSpec, - Hot: ItemStore, - Cold: ItemStore, -{ - type Item = (usize, F::Value); - - fn next(&mut self) -> Option { - let chunk_size = F::chunk_size(); - - // Range exhausted, return `None` forever. - if self.current_vindex >= self.end_vindex { - None - } - // Value lies in the current chunk, return it. - else if self.current_vindex < self.next_cindex * chunk_size { - let vindex = self.current_vindex; - let val = self - .current_chunk - .values - .get(vindex % chunk_size) - .cloned() - .or_else(|| { - error!( - vector_index = vindex, - "Missing chunk value in forwards iterator" - ); - None - })?; - self.current_vindex += 1; - Some((vindex, val)) - } - // Need to load the next chunk, load it and recurse back into the in-range case. - else { - self.current_chunk = Chunk::load( - &self.store.cold_db, - F::column(), - &chunk_key(self.next_cindex), - ) - .map_err(|e| { - error!( - chunk_index = self.next_cindex, - error = ?e, - "Database error in forwards iterator" - ); - e - }) - .ok()? - .or_else(|| { - error!( - chunk_index = self.next_cindex, - "Missing chunk in forwards iterator" - ); - None - })?; - self.next_cindex += 1; - self.next() - } - } -} diff --git a/beacon_node/store/src/chunked_vector.rs b/beacon_node/store/src/chunked_vector.rs deleted file mode 100644 index 9c8114e0c14..00000000000 --- a/beacon_node/store/src/chunked_vector.rs +++ /dev/null @@ -1,922 +0,0 @@ -//! Space-efficient storage for `BeaconState` vector fields. -//! -//! This module provides logic for splitting the `Vector` fields of a `BeaconState` into -//! chunks, and storing those chunks in contiguous ranges in the on-disk database. The motiviation -//! for doing this is avoiding massive duplication in every on-disk state. For example, rather than -//! storing the whole `historical_roots` vector, which is updated once every couple of thousand -//! slots, at every slot, we instead store all the historical values as a chunked vector on-disk, -//! and fetch only the slice we need when reconstructing the `historical_roots` of a state. -//! -//! ## Terminology -//! -//! * **Chunk size**: the number of vector values stored per on-disk chunk. -//! * **Vector index** (vindex): index into all the historical values, identifying a single element -//! of the vector being stored. -//! * **Chunk index** (cindex): index into the keyspace of the on-disk database, identifying a chunk -//! of elements. To find the chunk index of a vector index: `cindex = vindex / chunk_size`. -use self::UpdatePattern::*; -use crate::*; -use milhouse::{List, Vector}; -use ssz::{Decode, Encode}; -use typenum::Unsigned; -use types::historical_summary::HistoricalSummary; - -/// Description of how a `BeaconState` field is updated during state processing. -/// -/// When storing a state, this allows us to efficiently store only those entries -/// which are not present in the DB already. -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum UpdatePattern { - /// The value is updated once per `n` slots. - OncePerNSlots { - n: u64, - /// The slot at which the field begins to accumulate values. - /// - /// The field should not be read or written until `activation_slot` is reached, and the - /// activation slot should act as an offset when converting slots to vector indices. - activation_slot: Option, - /// The slot at which the field ceases to accumulate values. - /// - /// If this is `None` then the field is continually updated. - deactivation_slot: Option, - }, - /// The value is updated once per epoch, for the epoch `current_epoch - lag`. - OncePerEpoch { lag: u64 }, -} - -/// Map a chunk index to bytes that can be used to key the NoSQL database. -/// -/// We shift chunks up by 1 to make room for a genesis chunk that is handled separately. -pub fn chunk_key(cindex: usize) -> [u8; 8] { - (cindex as u64 + 1).to_be_bytes() -} - -/// Return the database key for the genesis value. -fn genesis_value_key() -> [u8; 8] { - 0u64.to_be_bytes() -} - -/// Trait for types representing fields of the `BeaconState`. -/// -/// All of the required methods are type-level, because we do most things with fields at the -/// type-level. We require their value-level witnesses to be `Copy` so that we can avoid the -/// turbofish when calling functions like `store_updated_vector`. -pub trait Field: Copy { - /// The type of value stored in this field: the `T` from `Vector`. - /// - /// The `Default` impl will be used to fill extra vector entries. - type Value: Default + std::fmt::Debug + milhouse::Value; - // Decode + Encode + Default + Clone + PartialEq + std::fmt::Debug - - /// The length of this field: the `N` from `Vector`. - type Length: Unsigned; - - /// The database column where the integer-indexed chunks for this field should be stored. - /// - /// Each field's column **must** be unique. - fn column() -> DBColumn; - - /// Update pattern for this field, so that we can do differential updates. - fn update_pattern(spec: &ChainSpec) -> UpdatePattern; - - /// The number of values to store per chunk on disk. - /// - /// Default is 128 so that we read/write 4K pages when the values are 32 bytes. - // TODO: benchmark and optimise this parameter - fn chunk_size() -> usize { - 128 - } - - /// Convert a v-index (vector index) to a chunk index. - fn chunk_index(vindex: usize) -> usize { - vindex / Self::chunk_size() - } - - /// Get the value of this field at the given vector index, from the state. - fn get_value( - state: &BeaconState, - vindex: u64, - spec: &ChainSpec, - ) -> Result; - - /// True if this is a `FixedLengthField`, false otherwise. - fn is_fixed_length() -> bool; - - /// Compute the start and end vector indices of the slice of history required at `current_slot`. - /// - /// ## Example - /// - /// If we have a field that is updated once per epoch, then the end vindex will be - /// `current_epoch + 1`, because we want to include the value for the current epoch, and the - /// start vindex will be `end_vindex - Self::Length`, because that's how far back we can look. - fn start_and_end_vindex(current_slot: Slot, spec: &ChainSpec) -> (usize, usize) { - // We take advantage of saturating subtraction on slots and epochs - match Self::update_pattern(spec) { - OncePerNSlots { - n, - activation_slot, - deactivation_slot, - } => { - // Per-slot changes exclude the index for the current slot, because - // it won't be set until the slot completes (think of `state_roots`, `block_roots`). - // This also works for the `historical_roots` because at the `n`th slot, the 0th - // entry of the list is created, and before that the list is empty. - // - // To account for the switch from historical roots to historical summaries at - // Capella we also modify the current slot by the activation and deactivation slots. - // The activation slot acts as an offset (subtraction) while the deactivation slot - // acts as a clamp (min). - let slot_with_clamp = deactivation_slot.map_or(current_slot, |deactivation_slot| { - std::cmp::min(current_slot, deactivation_slot) - }); - let slot_with_clamp_and_offset = if let Some(activation_slot) = activation_slot { - slot_with_clamp - activation_slot - } else { - // Return (0, 0) to indicate that the field should not be read/written. - return (0, 0); - }; - let end_vindex = slot_with_clamp_and_offset / n; - let start_vindex = end_vindex - Self::Length::to_u64(); - (start_vindex.as_usize(), end_vindex.as_usize()) - } - OncePerEpoch { lag } => { - // Per-epoch changes include the index for the current epoch, because it - // will have been set at the most recent epoch boundary. - let current_epoch = current_slot.epoch(E::slots_per_epoch()); - let end_epoch = current_epoch + 1 - lag; - let start_epoch = end_epoch + lag - Self::Length::to_u64(); - (start_epoch.as_usize(), end_epoch.as_usize()) - } - } - } - - /// Given an `existing_chunk` stored in the DB, construct an updated chunk to replace it. - fn get_updated_chunk( - existing_chunk: &Chunk, - chunk_index: usize, - start_vindex: usize, - end_vindex: usize, - state: &BeaconState, - spec: &ChainSpec, - ) -> Result, Error> { - let chunk_size = Self::chunk_size(); - let mut new_chunk = Chunk::new(vec![Self::Value::default(); chunk_size]); - - for i in 0..chunk_size { - let vindex = chunk_index * chunk_size + i; - if vindex >= start_vindex && vindex < end_vindex { - let vector_value = Self::get_value(state, vindex as u64, spec)?; - - if let Some(existing_value) = existing_chunk.values.get(i) - && *existing_value != vector_value - && *existing_value != Self::Value::default() - { - return Err(ChunkError::Inconsistent { - field: Self::column(), - chunk_index, - existing_value: format!("{:?}", existing_value), - new_value: format!("{:?}", vector_value), - } - .into()); - } - - new_chunk.values[i] = vector_value; - } else { - new_chunk.values[i] = existing_chunk.values.get(i).cloned().unwrap_or_default(); - } - } - - Ok(new_chunk) - } - - /// Determine whether a state at `slot` possesses (or requires) the genesis value. - fn slot_needs_genesis_value(slot: Slot, spec: &ChainSpec) -> bool { - let (_, end_vindex) = Self::start_and_end_vindex(slot, spec); - match Self::update_pattern(spec) { - // If the end_vindex is less than the length of the vector, then the vector - // has not yet been completely filled with non-genesis values, and so the genesis - // value is still required. - OncePerNSlots { .. } => { - Self::is_fixed_length() && end_vindex < Self::Length::to_usize() - } - // If the field has lag, then it takes an extra `lag` vindices beyond the - // `end_vindex` before the vector has been filled with non-genesis values. - OncePerEpoch { lag } => { - Self::is_fixed_length() && end_vindex + (lag as usize) < Self::Length::to_usize() - } - } - } - - /// Load the genesis value for a fixed length field from the store. - /// - /// This genesis value should be used to fill the initial state of the vector. - fn load_genesis_value>(store: &S) -> Result { - let key = &genesis_value_key()[..]; - let chunk = - Chunk::load(store, Self::column(), key)?.ok_or(ChunkError::MissingGenesisValue)?; - chunk - .values - .first() - .cloned() - .ok_or_else(|| ChunkError::MissingGenesisValue.into()) - } - - /// Store the given `value` as the genesis value for this field, unless stored already. - /// - /// Check the existing value (if any) for consistency with the value we intend to store, and - /// return an error if they are inconsistent. - fn check_and_store_genesis_value>( - store: &S, - value: Self::Value, - ops: &mut Vec, - ) -> Result<(), Error> { - let key = &genesis_value_key()[..]; - - if let Some(existing_chunk) = Chunk::::load(store, Self::column(), key)? { - if existing_chunk.values.len() != 1 { - Err(ChunkError::InvalidGenesisChunk { - field: Self::column(), - expected_len: 1, - observed_len: existing_chunk.values.len(), - } - .into()) - } else if existing_chunk.values[0] != value { - Err(ChunkError::InconsistentGenesisValue { - field: Self::column(), - existing_value: format!("{:?}", existing_chunk.values[0]), - new_value: format!("{:?}", value), - } - .into()) - } else { - Ok(()) - } - } else { - let chunk = Chunk::new(vec![value]); - chunk.store(Self::column(), &genesis_value_key()[..], ops)?; - Ok(()) - } - } - - /// Extract the genesis value for a fixed length field from an - /// - /// Will only return a correct value if `slot_needs_genesis_value(state.slot(), spec) == true`. - fn extract_genesis_value( - state: &BeaconState, - spec: &ChainSpec, - ) -> Result { - let (_, end_vindex) = Self::start_and_end_vindex(state.slot(), spec); - match Self::update_pattern(spec) { - // Genesis value is guaranteed to exist at `end_vindex`, as it won't yet have been - // updated - OncePerNSlots { .. } => Ok(Self::get_value(state, end_vindex as u64, spec)?), - // If there's lag, the value of the field at the vindex *without the lag* - // should still be set to the genesis value. - OncePerEpoch { lag } => Ok(Self::get_value(state, end_vindex as u64 + lag, spec)?), - } - } -} - -/// Marker trait for fixed-length fields (`Vector`). -pub trait FixedLengthField: Field {} - -/// Marker trait for variable-length fields (`List`). -pub trait VariableLengthField: Field {} - -/// Macro to implement the `Field` trait on a new unit struct type. -macro_rules! field { - ($struct_name:ident, $marker_trait:ident, $value_ty:ty, $length_ty:ty, $column:expr, - $update_pattern:expr, $get_value:expr) => { - #[derive(Clone, Copy)] - pub struct $struct_name; - - impl Field for $struct_name - where - E: EthSpec, - { - type Value = $value_ty; - type Length = $length_ty; - - fn column() -> DBColumn { - $column - } - - fn update_pattern(spec: &ChainSpec) -> UpdatePattern { - let update_pattern = $update_pattern; - update_pattern(spec) - } - - fn get_value( - state: &BeaconState, - vindex: u64, - spec: &ChainSpec, - ) -> Result { - let get_value = $get_value; - get_value(state, vindex, spec) - } - - fn is_fixed_length() -> bool { - stringify!($marker_trait) == "FixedLengthField" - } - } - - impl $marker_trait for $struct_name {} - }; -} - -field!( - BlockRootsChunked, - FixedLengthField, - Hash256, - E::SlotsPerHistoricalRoot, - DBColumn::BeaconBlockRootsChunked, - |_| OncePerNSlots { - n: 1, - activation_slot: Some(Slot::new(0)), - deactivation_slot: None - }, - |state: &BeaconState<_>, index, _| safe_modulo_vector_index(state.block_roots(), index) -); - -field!( - StateRootsChunked, - FixedLengthField, - Hash256, - E::SlotsPerHistoricalRoot, - DBColumn::BeaconStateRootsChunked, - |_| OncePerNSlots { - n: 1, - activation_slot: Some(Slot::new(0)), - deactivation_slot: None, - }, - |state: &BeaconState<_>, index, _| safe_modulo_vector_index(state.state_roots(), index) -); - -field!( - HistoricalRoots, - VariableLengthField, - Hash256, - E::HistoricalRootsLimit, - DBColumn::BeaconHistoricalRoots, - |spec: &ChainSpec| OncePerNSlots { - n: E::SlotsPerHistoricalRoot::to_u64(), - activation_slot: Some(Slot::new(0)), - deactivation_slot: spec - .capella_fork_epoch - .map(|fork_epoch| fork_epoch.start_slot(E::slots_per_epoch())), - }, - |state: &BeaconState<_>, index, _| safe_modulo_list_index(state.historical_roots(), index) -); - -field!( - RandaoMixes, - FixedLengthField, - Hash256, - E::EpochsPerHistoricalVector, - DBColumn::BeaconRandaoMixes, - |_| OncePerEpoch { lag: 1 }, - |state: &BeaconState<_>, index, _| safe_modulo_vector_index(state.randao_mixes(), index) -); - -field!( - HistoricalSummaries, - VariableLengthField, - HistoricalSummary, - E::HistoricalRootsLimit, - DBColumn::BeaconHistoricalSummaries, - |spec: &ChainSpec| OncePerNSlots { - n: E::SlotsPerHistoricalRoot::to_u64(), - activation_slot: spec - .capella_fork_epoch - .map(|fork_epoch| fork_epoch.start_slot(E::slots_per_epoch())), - deactivation_slot: None, - }, - |state: &BeaconState<_>, index, _| safe_modulo_list_index( - state - .historical_summaries() - .map_err(|_| ChunkError::InvalidFork)?, - index - ) -); - -pub fn store_updated_vector, E: EthSpec, S: KeyValueStore>( - field: F, - store: &S, - state: &BeaconState, - spec: &ChainSpec, - ops: &mut Vec, -) -> Result<(), Error> { - let chunk_size = F::chunk_size(); - let (start_vindex, end_vindex) = F::start_and_end_vindex(state.slot(), spec); - let start_cindex = start_vindex / chunk_size; - let end_cindex = end_vindex / chunk_size; - - // Store the genesis value if we have access to it, and it hasn't been stored already. - if F::slot_needs_genesis_value(state.slot(), spec) { - let genesis_value = F::extract_genesis_value(state, spec)?; - F::check_and_store_genesis_value(store, genesis_value, ops)?; - } - - // Start by iterating backwards from the last chunk, storing new chunks in the database. - // Stop once a chunk in the database matches what we were about to store, this indicates - // that a previously stored state has already filled-in a portion of the indices covered. - let full_range_checked = store_range( - field, - (start_cindex..=end_cindex).rev(), - start_vindex, - end_vindex, - store, - state, - spec, - ops, - )?; - - // If the previous `store_range` did not check the entire range, it may be the case that the - // state's vector includes elements at low vector indices that are not yet stored in the - // database, so run another `store_range` to ensure these values are also stored. - if !full_range_checked { - store_range( - field, - start_cindex..end_cindex, - start_vindex, - end_vindex, - store, - state, - spec, - ops, - )?; - } - - Ok(()) -} - -#[allow(clippy::too_many_arguments)] -fn store_range( - _: F, - range: I, - start_vindex: usize, - end_vindex: usize, - store: &S, - state: &BeaconState, - spec: &ChainSpec, - ops: &mut Vec, -) -> Result -where - F: Field, - E: EthSpec, - S: KeyValueStore, - I: Iterator, -{ - for chunk_index in range { - let chunk_key = &chunk_key(chunk_index)[..]; - - let existing_chunk = - Chunk::::load(store, F::column(), chunk_key)?.unwrap_or_default(); - - let new_chunk = F::get_updated_chunk( - &existing_chunk, - chunk_index, - start_vindex, - end_vindex, - state, - spec, - )?; - - if new_chunk == existing_chunk { - return Ok(false); - } - - new_chunk.store(F::column(), chunk_key, ops)?; - } - - Ok(true) -} - -// Chunks at the end index are included. -// TODO: could be more efficient with a real range query (perhaps RocksDB) -fn range_query, E: EthSpec, T: Decode + Encode>( - store: &S, - column: DBColumn, - start_index: usize, - end_index: usize, -) -> Result>, Error> { - let range = start_index..=end_index; - let len = range - .end() - // Add one to account for inclusive range. - .saturating_add(1) - .saturating_sub(*range.start()); - let mut result = Vec::with_capacity(len); - - for chunk_index in range { - let key = &chunk_key(chunk_index)[..]; - let chunk = Chunk::load(store, column, key)?.ok_or(ChunkError::Missing { chunk_index })?; - result.push(chunk); - } - - Ok(result) -} - -/// Combine chunks to form a list or vector of all values with vindex in `start_vindex..end_vindex`. -/// -/// The `length` parameter is the length of the vec to construct, with entries set to `default` if -/// they lie outside the vindex range. -fn stitch( - chunks: Vec>, - start_vindex: usize, - end_vindex: usize, - chunk_size: usize, - length: usize, - default: T, -) -> Result, ChunkError> { - if start_vindex + length < end_vindex { - return Err(ChunkError::OversizedRange { - start_vindex, - end_vindex, - length, - }); - } - - let start_cindex = start_vindex / chunk_size; - let end_cindex = end_vindex / chunk_size; - - let mut result = vec![default; length]; - - for (chunk_index, chunk) in (start_cindex..=end_cindex).zip(chunks.into_iter()) { - // All chunks but the last chunk must be full-sized - if chunk_index != end_cindex && chunk.values.len() != chunk_size { - return Err(ChunkError::InvalidSize { - chunk_index, - expected: chunk_size, - actual: chunk.values.len(), - }); - } - - // Copy the chunk entries into the result vector - for (i, value) in chunk.values.into_iter().enumerate() { - let vindex = chunk_index * chunk_size + i; - - if vindex >= start_vindex && vindex < end_vindex { - result[vindex % length] = value; - } - } - } - - Ok(result) -} - -pub fn load_vector_from_db, E: EthSpec, S: KeyValueStore>( - store: &S, - slot: Slot, - spec: &ChainSpec, -) -> Result, Error> { - // Do a range query - let chunk_size = F::chunk_size(); - let (start_vindex, end_vindex) = F::start_and_end_vindex(slot, spec); - let start_cindex = start_vindex / chunk_size; - let end_cindex = end_vindex / chunk_size; - - let chunks = range_query(store, F::column(), start_cindex, end_cindex)?; - - let default = if F::slot_needs_genesis_value(slot, spec) { - F::load_genesis_value(store)? - } else { - F::Value::default() - }; - - let result = stitch( - chunks, - start_vindex, - end_vindex, - chunk_size, - F::Length::to_usize(), - default, - )?; - - Ok(Vector::new(result).map_err(ChunkError::Milhouse)?) -} - -/// The historical roots are stored in vector chunks, despite not actually being a vector. -pub fn load_variable_list_from_db, E: EthSpec, S: KeyValueStore>( - store: &S, - slot: Slot, - spec: &ChainSpec, -) -> Result, Error> { - let chunk_size = F::chunk_size(); - let (start_vindex, end_vindex) = F::start_and_end_vindex(slot, spec); - let start_cindex = start_vindex / chunk_size; - let end_cindex = end_vindex / chunk_size; - - let chunks: Vec> = range_query(store, F::column(), start_cindex, end_cindex)?; - - let mut result = Vec::with_capacity(chunk_size * chunks.len()); - - for (chunk_index, chunk) in chunks.into_iter().enumerate() { - for (i, value) in chunk.values.into_iter().enumerate() { - let vindex = chunk_index * chunk_size + i; - - if vindex >= start_vindex && vindex < end_vindex { - result.push(value); - } - } - } - - Ok(List::new(result).map_err(ChunkError::Milhouse)?) -} - -/// Index into a `List` field of the state, avoiding out of bounds and division by 0. -fn safe_modulo_list_index( - values: &List, - index: u64, -) -> Result { - if values.is_empty() { - Err(ChunkError::ZeroLengthList) - } else { - values - .get(index as usize % values.len()) - .copied() - .ok_or(ChunkError::IndexOutOfBounds { index }) - } -} - -fn safe_modulo_vector_index( - values: &Vector, - index: u64, -) -> Result { - if values.is_empty() { - Err(ChunkError::ZeroLengthVector) - } else { - values - .get(index as usize % values.len()) - .copied() - .ok_or(ChunkError::IndexOutOfBounds { index }) - } -} - -/// A chunk of a fixed-size vector from the `BeaconState`, stored in the database. -#[derive(Debug, Clone, PartialEq)] -pub struct Chunk { - /// A vector of up-to `chunk_size` values. - pub values: Vec, -} - -impl Default for Chunk -where - T: Decode + Encode, -{ - fn default() -> Self { - Chunk { values: vec![] } - } -} - -impl Chunk -where - T: Decode + Encode, -{ - pub fn new(values: Vec) -> Self { - Chunk { values } - } - - pub fn load, E: EthSpec>( - store: &S, - column: DBColumn, - key: &[u8], - ) -> Result, Error> { - store - .get_bytes(column, key)? - .map(|bytes| Self::decode(&bytes)) - .transpose() - } - - pub fn store( - &self, - column: DBColumn, - key: &[u8], - ops: &mut Vec, - ) -> Result<(), Error> { - ops.push(KeyValueStoreOp::PutKeyValue( - column, - key.to_vec(), - self.encode()?, - )); - Ok(()) - } - - /// Attempt to decode a single chunk. - pub fn decode(bytes: &[u8]) -> Result { - if !::is_ssz_fixed_len() { - return Err(Error::from(ChunkError::InvalidType)); - } - - let value_size = ::ssz_fixed_len(); - - if value_size == 0 { - return Err(Error::from(ChunkError::InvalidType)); - } - - let values = bytes - .chunks(value_size) - .map(T::from_ssz_bytes) - .collect::>()?; - - Ok(Chunk { values }) - } - - pub fn encoded_size(&self) -> usize { - self.values.len() * ::ssz_fixed_len() - } - - /// Encode a single chunk as bytes. - pub fn encode(&self) -> Result, Error> { - if !::is_ssz_fixed_len() { - return Err(Error::from(ChunkError::InvalidType)); - } - - Ok(self.values.iter().flat_map(T::as_ssz_bytes).collect()) - } -} - -#[derive(Debug, PartialEq)] -pub enum ChunkError { - ZeroLengthVector, - ZeroLengthList, - IndexOutOfBounds { - index: u64, - }, - InvalidSize { - chunk_index: usize, - expected: usize, - actual: usize, - }, - Missing { - chunk_index: usize, - }, - MissingGenesisValue, - Inconsistent { - field: DBColumn, - chunk_index: usize, - existing_value: String, - new_value: String, - }, - InconsistentGenesisValue { - field: DBColumn, - existing_value: String, - new_value: String, - }, - InvalidGenesisChunk { - field: DBColumn, - expected_len: usize, - observed_len: usize, - }, - InvalidType, - OversizedRange { - start_vindex: usize, - end_vindex: usize, - length: usize, - }, - InvalidFork, - Milhouse(milhouse::Error), -} - -impl From for ChunkError { - fn from(e: milhouse::Error) -> ChunkError { - Self::Milhouse(e) - } -} - -#[cfg(test)] -mod test { - use super::*; - use fixed_bytes::FixedBytesExtended; - use types::MainnetEthSpec as TestSpec; - use types::*; - - fn v(i: u64) -> Hash256 { - Hash256::from_low_u64_be(i) - } - - #[test] - fn stitch_default() { - let chunk_size = 4; - - let chunks = vec![ - Chunk::new(vec![0u64, 1, 2, 3]), - Chunk::new(vec![4, 5, 0, 0]), - ]; - - assert_eq!( - stitch(chunks, 2, 6, chunk_size, 12, 99).unwrap(), - vec![99, 99, 2, 3, 4, 5, 99, 99, 99, 99, 99, 99] - ); - } - - #[test] - fn stitch_basic() { - let chunk_size = 4; - let default = v(0); - - let chunks = vec![ - Chunk::new(vec![v(0), v(1), v(2), v(3)]), - Chunk::new(vec![v(4), v(5), v(6), v(7)]), - Chunk::new(vec![v(8), v(9), v(10), v(11)]), - ]; - - assert_eq!( - stitch(chunks.clone(), 0, 12, chunk_size, 12, default).unwrap(), - (0..12).map(v).collect::>() - ); - - assert_eq!( - stitch(chunks, 2, 10, chunk_size, 8, default).unwrap(), - vec![v(8), v(9), v(2), v(3), v(4), v(5), v(6), v(7)] - ); - } - - #[test] - fn stitch_oversized_range() { - let chunk_size = 4; - let default = 0; - - let chunks = vec![Chunk::new(vec![20u64, 21, 22, 23])]; - - // Args (start_vindex, end_vindex, length) - let args = vec![(0, 21, 20), (0, 2048, 1024), (0, 2, 1)]; - - for (start_vindex, end_vindex, length) in args { - assert_eq!( - stitch( - chunks.clone(), - start_vindex, - end_vindex, - chunk_size, - length, - default - ), - Err(ChunkError::OversizedRange { - start_vindex, - end_vindex, - length, - }) - ); - } - } - - #[test] - fn fixed_length_fields() { - fn test_fixed_length>(_: F, expected: bool) { - assert_eq!(F::is_fixed_length(), expected); - } - test_fixed_length(BlockRootsChunked, true); - test_fixed_length(StateRootsChunked, true); - test_fixed_length(HistoricalRoots, false); - test_fixed_length(RandaoMixes, true); - } - - fn needs_genesis_value_once_per_slot>(_: F) { - let spec = &TestSpec::default_spec(); - let max = F::Length::to_u64(); - for i in 0..max { - assert!( - F::slot_needs_genesis_value(Slot::new(i), spec), - "slot {}", - i - ); - } - assert!(!F::slot_needs_genesis_value(Slot::new(max), spec)); - } - - #[test] - fn needs_genesis_value_block_roots() { - needs_genesis_value_once_per_slot(BlockRootsChunked); - } - - #[test] - fn needs_genesis_value_state_roots() { - needs_genesis_value_once_per_slot(StateRootsChunked); - } - - #[test] - fn needs_genesis_value_historical_roots() { - let spec = &TestSpec::default_spec(); - assert!( - !>::slot_needs_genesis_value(Slot::new(0), spec) - ); - } - - fn needs_genesis_value_test_randao>(_: F) { - let spec = &TestSpec::default_spec(); - let max = TestSpec::slots_per_epoch() * (F::Length::to_u64() - 1); - for i in 0..max { - assert!( - F::slot_needs_genesis_value(Slot::new(i), spec), - "slot {}", - i - ); - } - assert!(!F::slot_needs_genesis_value(Slot::new(max), spec)); - } - - #[test] - fn needs_genesis_value_randao() { - needs_genesis_value_test_randao(RandaoMixes); - } -} diff --git a/beacon_node/store/src/errors.rs b/beacon_node/store/src/errors.rs index 6da99b7bd63..a07cc838863 100644 --- a/beacon_node/store/src/errors.rs +++ b/beacon_node/store/src/errors.rs @@ -1,4 +1,3 @@ -use crate::chunked_vector::ChunkError; use crate::config::StoreConfigError; use crate::hot_cold_store::{HotColdDBError, StateSummaryIteratorError}; use crate::{DBColumn, hdiff}; @@ -13,9 +12,7 @@ pub type Result = std::result::Result; #[derive(Debug)] pub enum Error { SszDecodeError(DecodeError), - VectorChunkError(ChunkError), BeaconStateError(BeaconStateError), - PartialBeaconStateError, HotColdDBError(HotColdDBError), DBError { message: String, @@ -126,12 +123,6 @@ impl From for Error { } } -impl From for Error { - fn from(e: ChunkError) -> Error { - Error::VectorChunkError(e) - } -} - impl From for Error { fn from(e: HotColdDBError) -> Error { Error::HotColdDBError(e) diff --git a/beacon_node/store/src/lib.rs b/beacon_node/store/src/lib.rs index a3d4e4a8cea..ae5b2e1e571 100644 --- a/beacon_node/store/src/lib.rs +++ b/beacon_node/store/src/lib.rs @@ -8,8 +8,6 @@ //! Provides a simple API for storing/retrieving all types that sometimes needs type-hints. See //! tests for implementation examples. pub mod blob_sidecar_list_from_root; -pub mod chunked_iter; -pub mod chunked_vector; pub mod config; pub mod consensus_context; pub mod errors; @@ -21,7 +19,6 @@ mod impls; mod memory_store; pub mod metadata; pub mod metrics; -pub mod partial_beacon_state; pub mod reconstruct; pub mod state_cache; diff --git a/beacon_node/store/src/partial_beacon_state.rs b/beacon_node/store/src/partial_beacon_state.rs deleted file mode 100644 index 9e5e1ebbb47..00000000000 --- a/beacon_node/store/src/partial_beacon_state.rs +++ /dev/null @@ -1,511 +0,0 @@ -use crate::chunked_vector::{ - BlockRootsChunked, HistoricalRoots, HistoricalSummaries, RandaoMixes, StateRootsChunked, - load_variable_list_from_db, load_vector_from_db, -}; -use crate::{DBColumn, Error, KeyValueStore, KeyValueStoreOp}; -use milhouse::{List, Vector}; -use ssz::{BitVector, Decode, DecodeError, Encode}; -use ssz_derive::{Decode, Encode}; -use std::sync::Arc; -use superstruct::superstruct; -use types::historical_summary::HistoricalSummary; -use types::*; - -/// DEPRECATED Lightweight variant of the `BeaconState` that is stored in the database. -/// -/// Utilises lazy-loading from separate storage for its vector fields. -/// -/// This can be deleted once schema versions prior to V22 are no longer supported. -#[superstruct( - variants(Base, Altair, Bellatrix, Capella, Deneb, Electra, Fulu, Gloas), - variant_attributes(derive(Debug, PartialEq, Clone, Encode, Decode)) -)] -#[derive(Debug, PartialEq, Clone, Encode)] -#[ssz(enum_behaviour = "transparent")] -pub struct PartialBeaconState -where - E: EthSpec, -{ - // Versioning - pub genesis_time: u64, - pub genesis_validators_root: Hash256, - #[superstruct(getter(copy))] - pub slot: Slot, - pub fork: Fork, - - // History - pub latest_block_header: BeaconBlockHeader, - - #[ssz(skip_serializing, skip_deserializing)] - pub block_roots: Option>, - #[ssz(skip_serializing, skip_deserializing)] - pub state_roots: Option>, - - #[ssz(skip_serializing, skip_deserializing)] - pub historical_roots: Option>, - - // Ethereum 1.0 chain data - pub eth1_data: Eth1Data, - pub eth1_data_votes: List, - pub eth1_deposit_index: u64, - - // Registry - pub validators: List, - pub balances: List, - - // Shuffling - /// Randao value from the current slot, for patching into the per-epoch randao vector. - pub latest_randao_value: Hash256, - #[ssz(skip_serializing, skip_deserializing)] - pub randao_mixes: Option>, - - // Slashings - slashings: Vector, - - // Attestations (genesis fork only) - #[superstruct(only(Base))] - pub previous_epoch_attestations: List, E::MaxPendingAttestations>, - #[superstruct(only(Base))] - pub current_epoch_attestations: List, E::MaxPendingAttestations>, - - // Participation (Altair and later) - #[superstruct(only(Altair, Bellatrix, Capella, Deneb, Electra, Fulu, Gloas))] - pub previous_epoch_participation: List, - #[superstruct(only(Altair, Bellatrix, Capella, Deneb, Electra, Fulu, Gloas))] - pub current_epoch_participation: List, - - // Finality - pub justification_bits: BitVector, - pub previous_justified_checkpoint: Checkpoint, - pub current_justified_checkpoint: Checkpoint, - pub finalized_checkpoint: Checkpoint, - - // Inactivity - #[superstruct(only(Altair, Bellatrix, Capella, Deneb, Electra, Fulu, Gloas))] - pub inactivity_scores: List, - - // Light-client sync committees - #[superstruct(only(Altair, Bellatrix, Capella, Deneb, Electra, Fulu, Gloas))] - pub current_sync_committee: Arc>, - #[superstruct(only(Altair, Bellatrix, Capella, Deneb, Electra, Fulu, Gloas))] - pub next_sync_committee: Arc>, - - // Execution - #[superstruct( - only(Bellatrix), - partial_getter(rename = "latest_execution_payload_header_bellatrix") - )] - pub latest_execution_payload_header: ExecutionPayloadHeaderBellatrix, - #[superstruct( - only(Capella), - partial_getter(rename = "latest_execution_payload_header_capella") - )] - pub latest_execution_payload_header: ExecutionPayloadHeaderCapella, - #[superstruct( - only(Deneb), - partial_getter(rename = "latest_execution_payload_header_deneb") - )] - pub latest_execution_payload_header: ExecutionPayloadHeaderDeneb, - #[superstruct( - only(Electra), - partial_getter(rename = "latest_execution_payload_header_electra") - )] - pub latest_execution_payload_header: ExecutionPayloadHeaderElectra, - #[superstruct( - only(Fulu), - partial_getter(rename = "latest_execution_payload_header_fulu") - )] - pub latest_execution_payload_header: ExecutionPayloadHeaderFulu, - - #[superstruct( - only(Gloas), - partial_getter(rename = "latest_execution_payload_bid_gloas") - )] - pub latest_execution_payload_bid: ExecutionPayloadBid, - - // Capella - #[superstruct(only(Capella, Deneb, Electra, Fulu, Gloas))] - pub next_withdrawal_index: u64, - #[superstruct(only(Capella, Deneb, Electra, Fulu, Gloas))] - pub next_withdrawal_validator_index: u64, - - #[ssz(skip_serializing, skip_deserializing)] - #[superstruct(only(Capella, Deneb, Electra, Fulu, Gloas))] - pub historical_summaries: Option>, - - // Electra - #[superstruct(only(Electra, Fulu, Gloas))] - pub deposit_requests_start_index: u64, - #[superstruct(only(Electra, Fulu, Gloas))] - pub deposit_balance_to_consume: u64, - #[superstruct(only(Electra, Fulu, Gloas))] - pub exit_balance_to_consume: u64, - #[superstruct(only(Electra, Fulu, Gloas))] - pub earliest_exit_epoch: Epoch, - #[superstruct(only(Electra, Fulu, Gloas))] - pub consolidation_balance_to_consume: u64, - #[superstruct(only(Electra, Fulu, Gloas))] - pub earliest_consolidation_epoch: Epoch, - - #[superstruct(only(Electra, Fulu, Gloas))] - pub pending_deposits: List, - #[superstruct(only(Electra, Fulu, Gloas))] - pub pending_partial_withdrawals: - List, - #[superstruct(only(Electra, Fulu, Gloas))] - pub pending_consolidations: List, - #[superstruct(only(Fulu, Gloas))] - pub proposer_lookahead: Vector, - - // Gloas - #[superstruct(only(Gloas))] - pub execution_payload_availability: BitVector, - - #[superstruct(only(Gloas))] - pub builder_pending_payments: Vector, - - #[superstruct(only(Gloas))] - pub builder_pending_withdrawals: - List, - - #[superstruct(only(Gloas))] - pub latest_block_hash: ExecutionBlockHash, - - #[superstruct(only(Gloas))] - pub latest_withdrawals_root: Hash256, -} - -impl PartialBeaconState { - /// SSZ decode. - pub fn from_ssz_bytes(bytes: &[u8], spec: &ChainSpec) -> Result { - // Slot is after genesis_time (u64) and genesis_validators_root (Hash256). - let slot_offset = ::ssz_fixed_len() + ::ssz_fixed_len(); - let slot_len = ::ssz_fixed_len(); - let slot_bytes = bytes.get(slot_offset..slot_offset + slot_len).ok_or( - DecodeError::InvalidByteLength { - len: bytes.len(), - expected: slot_offset + slot_len, - }, - )?; - - let slot = Slot::from_ssz_bytes(slot_bytes)?; - let fork_at_slot = spec.fork_name_at_slot::(slot); - - Ok(map_fork_name!( - fork_at_slot, - Self, - <_>::from_ssz_bytes(bytes)? - )) - } - - /// Prepare the partial state for storage in the KV database. - pub fn as_kv_store_op(&self, state_root: Hash256) -> KeyValueStoreOp { - KeyValueStoreOp::PutKeyValue( - DBColumn::BeaconState, - state_root.as_slice().to_vec(), - self.as_ssz_bytes(), - ) - } - - pub fn load_block_roots>( - &mut self, - store: &S, - spec: &ChainSpec, - ) -> Result<(), Error> { - if self.block_roots().is_none() { - *self.block_roots_mut() = Some(load_vector_from_db::( - store, - self.slot(), - spec, - )?); - } - Ok(()) - } - - pub fn load_state_roots>( - &mut self, - store: &S, - spec: &ChainSpec, - ) -> Result<(), Error> { - if self.state_roots().is_none() { - *self.state_roots_mut() = Some(load_vector_from_db::( - store, - self.slot(), - spec, - )?); - } - Ok(()) - } - - pub fn load_historical_roots>( - &mut self, - store: &S, - spec: &ChainSpec, - ) -> Result<(), Error> { - if self.historical_roots().is_none() { - *self.historical_roots_mut() = Some( - load_variable_list_from_db::(store, self.slot(), spec)?, - ); - } - Ok(()) - } - - pub fn load_historical_summaries>( - &mut self, - store: &S, - spec: &ChainSpec, - ) -> Result<(), Error> { - let slot = self.slot(); - if let Ok(historical_summaries) = self.historical_summaries_mut() - && historical_summaries.is_none() - { - *historical_summaries = Some(load_variable_list_from_db::( - store, slot, spec, - )?); - } - Ok(()) - } - - pub fn load_randao_mixes>( - &mut self, - store: &S, - spec: &ChainSpec, - ) -> Result<(), Error> { - if self.randao_mixes().is_none() { - // Load the per-epoch values from the database - let mut randao_mixes = - load_vector_from_db::(store, self.slot(), spec)?; - - // Patch the value for the current slot into the index for the current epoch - let current_epoch = self.slot().epoch(E::slots_per_epoch()); - let len = randao_mixes.len(); - *randao_mixes - .get_mut(current_epoch.as_usize() % len) - .ok_or(Error::RandaoMixOutOfBounds)? = *self.latest_randao_value(); - - *self.randao_mixes_mut() = Some(randao_mixes) - } - Ok(()) - } -} - -/// Implement the conversion from PartialBeaconState -> BeaconState. -macro_rules! impl_try_into_beacon_state { - ($inner:ident, $variant_name:ident, $struct_name:ident, [$($extra_fields:ident),*], [$($extra_opt_fields:ident),*]) => { - BeaconState::$variant_name($struct_name { - // Versioning - genesis_time: $inner.genesis_time, - genesis_validators_root: $inner.genesis_validators_root, - slot: $inner.slot, - fork: $inner.fork, - - // History - latest_block_header: $inner.latest_block_header, - block_roots: unpack_field($inner.block_roots)?, - state_roots: unpack_field($inner.state_roots)?, - historical_roots: unpack_field($inner.historical_roots)?, - - // Eth1 - eth1_data: $inner.eth1_data, - eth1_data_votes: $inner.eth1_data_votes, - eth1_deposit_index: $inner.eth1_deposit_index, - - // Validator registry - validators: $inner.validators, - balances: $inner.balances, - - // Shuffling - randao_mixes: unpack_field($inner.randao_mixes)?, - - // Slashings - slashings: $inner.slashings, - - // Finality - justification_bits: $inner.justification_bits, - previous_justified_checkpoint: $inner.previous_justified_checkpoint, - current_justified_checkpoint: $inner.current_justified_checkpoint, - finalized_checkpoint: $inner.finalized_checkpoint, - - // Caching - total_active_balance: <_>::default(), - progressive_balances_cache: <_>::default(), - committee_caches: <_>::default(), - pubkey_cache: <_>::default(), - exit_cache: <_>::default(), - slashings_cache: <_>::default(), - epoch_cache: <_>::default(), - - // Variant-specific fields - $( - $extra_fields: $inner.$extra_fields - ),*, - - // Variant-specific optional fields - $( - $extra_opt_fields: unpack_field($inner.$extra_opt_fields)? - ),* - }) - } -} - -fn unpack_field(x: Option) -> Result { - x.ok_or(Error::PartialBeaconStateError) -} - -impl TryInto> for PartialBeaconState { - type Error = Error; - - fn try_into(self) -> Result, Error> { - let state = match self { - PartialBeaconState::Base(inner) => impl_try_into_beacon_state!( - inner, - Base, - BeaconStateBase, - [previous_epoch_attestations, current_epoch_attestations], - [] - ), - PartialBeaconState::Altair(inner) => impl_try_into_beacon_state!( - inner, - Altair, - BeaconStateAltair, - [ - previous_epoch_participation, - current_epoch_participation, - current_sync_committee, - next_sync_committee, - inactivity_scores - ], - [] - ), - PartialBeaconState::Bellatrix(inner) => impl_try_into_beacon_state!( - inner, - Bellatrix, - BeaconStateBellatrix, - [ - previous_epoch_participation, - current_epoch_participation, - current_sync_committee, - next_sync_committee, - inactivity_scores, - latest_execution_payload_header - ], - [] - ), - PartialBeaconState::Capella(inner) => impl_try_into_beacon_state!( - inner, - Capella, - BeaconStateCapella, - [ - previous_epoch_participation, - current_epoch_participation, - current_sync_committee, - next_sync_committee, - inactivity_scores, - latest_execution_payload_header, - next_withdrawal_index, - next_withdrawal_validator_index - ], - [historical_summaries] - ), - PartialBeaconState::Deneb(inner) => impl_try_into_beacon_state!( - inner, - Deneb, - BeaconStateDeneb, - [ - previous_epoch_participation, - current_epoch_participation, - current_sync_committee, - next_sync_committee, - inactivity_scores, - latest_execution_payload_header, - next_withdrawal_index, - next_withdrawal_validator_index - ], - [historical_summaries] - ), - PartialBeaconState::Electra(inner) => impl_try_into_beacon_state!( - inner, - Electra, - BeaconStateElectra, - [ - previous_epoch_participation, - current_epoch_participation, - current_sync_committee, - next_sync_committee, - inactivity_scores, - latest_execution_payload_header, - next_withdrawal_index, - next_withdrawal_validator_index, - deposit_requests_start_index, - deposit_balance_to_consume, - exit_balance_to_consume, - earliest_exit_epoch, - consolidation_balance_to_consume, - earliest_consolidation_epoch, - pending_deposits, - pending_partial_withdrawals, - pending_consolidations - ], - [historical_summaries] - ), - PartialBeaconState::Fulu(inner) => impl_try_into_beacon_state!( - inner, - Fulu, - BeaconStateFulu, - [ - previous_epoch_participation, - current_epoch_participation, - current_sync_committee, - next_sync_committee, - inactivity_scores, - latest_execution_payload_header, - next_withdrawal_index, - next_withdrawal_validator_index, - deposit_requests_start_index, - deposit_balance_to_consume, - exit_balance_to_consume, - earliest_exit_epoch, - consolidation_balance_to_consume, - earliest_consolidation_epoch, - pending_deposits, - pending_partial_withdrawals, - pending_consolidations, - proposer_lookahead - ], - [historical_summaries] - ), - PartialBeaconState::Gloas(inner) => impl_try_into_beacon_state!( - inner, - Gloas, - BeaconStateGloas, - [ - previous_epoch_participation, - current_epoch_participation, - current_sync_committee, - next_sync_committee, - inactivity_scores, - latest_execution_payload_bid, - next_withdrawal_index, - next_withdrawal_validator_index, - deposit_requests_start_index, - deposit_balance_to_consume, - exit_balance_to_consume, - earliest_exit_epoch, - consolidation_balance_to_consume, - earliest_consolidation_epoch, - pending_deposits, - pending_partial_withdrawals, - pending_consolidations, - proposer_lookahead, - execution_payload_availability, - builder_pending_payments, - builder_pending_withdrawals, - latest_block_hash, - latest_withdrawals_root - ], - [historical_summaries] - ), - }; - Ok(state) - } -} From 4e35e9d5875679855fa2e9587bcc95cf905bc59d Mon Sep 17 00:00:00 2001 From: Michael Sproul Date: Tue, 16 Dec 2025 20:02:34 +1100 Subject: [PATCH 72/74] Add cargo deny on CI (#8580) Closes: - https://github.com/sigp/lighthouse/issues/8408 Add `cargo deny` on CI with deprecated crates (`ethers` and `ethereum-types`) banned and duplicates banned for `reqwest`. Co-Authored-By: Michael Sproul --- .github/workflows/test-suite.yml | 4 +++- Makefile | 9 +++++++++ deny.toml | 23 +++++++++++++++++++++++ 3 files changed, 35 insertions(+), 1 deletion(-) create mode 100644 deny.toml diff --git a/.github/workflows/test-suite.yml b/.github/workflows/test-suite.yml index cc7282c3517..7344a9367b7 100644 --- a/.github/workflows/test-suite.yml +++ b/.github/workflows/test-suite.yml @@ -324,7 +324,7 @@ jobs: channel: stable cache-target: release components: rustfmt,clippy - bins: cargo-audit + bins: cargo-audit,cargo-deny - name: Check formatting with cargo fmt run: make cargo-fmt - name: Lint code for quality and style with Clippy @@ -337,6 +337,8 @@ jobs: run: make arbitrary-fuzz - name: Run cargo audit run: make audit-CI + - name: Run cargo deny + run: make deny-CI - name: Run cargo vendor to make sure dependencies can be vendored for packaging, reproducibility and archival purpose run: CARGO_HOME=$(readlink -f $HOME) make vendor - name: Markdown-linter diff --git a/Makefile b/Makefile index c1cccb92705..4426b941aaa 100644 --- a/Makefile +++ b/Makefile @@ -326,6 +326,15 @@ install-audit: audit-CI: cargo audit +# Runs cargo deny (check for banned crates, duplicate versions, and source restrictions) +deny: install-deny deny-CI + +install-deny: + cargo install --force cargo-deny --version 0.18.2 + +deny-CI: + cargo deny check bans sources --hide-inclusion-graph + # Runs `cargo vendor` to make sure dependencies can be vendored for packaging, reproducibility and archival purpose. vendor: cargo vendor diff --git a/deny.toml b/deny.toml new file mode 100644 index 00000000000..677396c0c34 --- /dev/null +++ b/deny.toml @@ -0,0 +1,23 @@ +# cargo-deny configuration for Lighthouse +# See https://embarkstudios.github.io/cargo-deny/ + +[bans] +# Warn when multiple versions of the same crate are detected +multiple-versions = "warn" +deny = [ + # Legacy Ethereum crates that have been replaced with alloy + { crate = "ethers", reason = "use alloy instead" }, + { crate = "ethereum-types", reason = "use alloy-primitives instead" }, + # Replaced by quick-protobuf + { crate = "protobuf", reason = "use quick-protobuf instead" }, + # Prevent duplicate versions of reqwest - heavy crate with build scripts + { crate = "reqwest", deny-multiple-versions = true, reason = "prevent duplicate versions" }, +] + +[sources] +unknown-registry = "deny" +unknown-git = "warn" +allow-registry = ["https://github.com/rust-lang/crates.io-index"] + +[sources.allow-org] +github = ["sigp"] From 2ce6b51269708a1c28c69a3241028522ebc153df Mon Sep 17 00:00:00 2001 From: Michael Sproul Date: Fri, 19 Dec 2025 15:02:09 +1100 Subject: [PATCH 73/74] Refine cargo-deny rules (#8602) A few `cargo-deny` tweaks with @macladson Co-authored-by: Mac L Co-Authored-By: Michael Sproul Co-Authored-By: Mac L --- Makefile | 2 +- deny.toml | 16 ++++++++-------- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/Makefile b/Makefile index 4426b941aaa..9d08c3ebe18 100644 --- a/Makefile +++ b/Makefile @@ -333,7 +333,7 @@ install-deny: cargo install --force cargo-deny --version 0.18.2 deny-CI: - cargo deny check bans sources --hide-inclusion-graph + cargo deny check bans sources # Runs `cargo vendor` to make sure dependencies can be vendored for packaging, reproducibility and archival purpose. vendor: diff --git a/deny.toml b/deny.toml index 677396c0c34..398a173dfa4 100644 --- a/deny.toml +++ b/deny.toml @@ -2,16 +2,16 @@ # See https://embarkstudios.github.io/cargo-deny/ [bans] -# Warn when multiple versions of the same crate are detected -multiple-versions = "warn" +# Allow multiple versions by default. Change this to "warn" to see all multiple versions. +multiple-versions = "allow" deny = [ - # Legacy Ethereum crates that have been replaced with alloy - { crate = "ethers", reason = "use alloy instead" }, - { crate = "ethereum-types", reason = "use alloy-primitives instead" }, - # Replaced by quick-protobuf + { crate = "ethers", reason = "legacy Ethereum crate, use alloy instead" }, + { crate = "ethereum-types", reason = "legacy Ethereum crate, use alloy-primitives instead" }, { crate = "protobuf", reason = "use quick-protobuf instead" }, - # Prevent duplicate versions of reqwest - heavy crate with build scripts - { crate = "reqwest", deny-multiple-versions = true, reason = "prevent duplicate versions" }, + { crate = "derivative", reason = "use educe or derive_more instead" }, + { crate = "ark-ff", reason = "present in Cargo.lock but not needed by Lighthouse" }, + { crate = "strum", deny-multiple-versions = true, reason = "takes a long time to compile" }, + { crate = "reqwest", deny-multiple-versions = true, reason = "takes a long time to compile" } ] [sources] From 7c1345083424e512dddd85ceb4175b971ddc25e8 Mon Sep 17 00:00:00 2001 From: Adam Date: Sat, 20 Dec 2025 22:24:37 +0000 Subject: [PATCH 74/74] rebase --- crypto/bls/src/impls/fake_crypto.rs | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/crypto/bls/src/impls/fake_crypto.rs b/crypto/bls/src/impls/fake_crypto.rs index e7eee050775..e4ad72f3afa 100644 --- a/crypto/bls/src/impls/fake_crypto.rs +++ b/crypto/bls/src/impls/fake_crypto.rs @@ -164,11 +164,7 @@ impl TAggregateSignature for Aggregate } fn serialize(&self) -> [u8; SIGNATURE_BYTES_LEN] { - let mut bytes = [0; SIGNATURE_BYTES_LEN]; - - bytes[..].copy_from_slice(&self.0); - - bytes + self.0 } fn deserialize(bytes: &[u8]) -> Result {