Skip to content

Commit

Permalink
Merge branch 'unstable' into subnet-sampling
Browse files Browse the repository at this point in the history
  • Loading branch information
jimmygchen committed Oct 3, 2024
2 parents 07afa6c + a4a673b commit 07b1112
Show file tree
Hide file tree
Showing 75 changed files with 3,212 additions and 2,203 deletions.
11 changes: 11 additions & 0 deletions .github/workflows/test-suite.yml
Original file line number Diff line number Diff line change
Expand Up @@ -173,8 +173,19 @@ jobs:
channel: stable
cache-target: release
bins: cargo-nextest
- name: Create CI logger dir
run: mkdir ${{ runner.temp }}/network_test_logs
- name: Run network tests for all known forks
run: make test-network
env:
TEST_FEATURES: portable,ci_logger
CI_LOGGER_DIR: ${{ runner.temp }}/network_test_logs
- name: Upload logs
uses: actions/upload-artifact@v4
with:
name: network_test_logs
path: ${{ runner.temp }}/network_test_logs

slasher-tests:
name: slasher-tests
needs: [check-labels]
Expand Down
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

9 changes: 5 additions & 4 deletions beacon_node/beacon_chain/src/attestation_verification.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1144,13 +1144,14 @@ pub fn verify_propagation_slot_range<S: SlotClock, E: EthSpec>(

let current_fork =
spec.fork_name_at_slot::<E>(slot_clock.now().ok_or(BeaconChainError::UnableToReadSlot)?);
let earliest_permissible_slot = if !current_fork.deneb_enabled() {
one_epoch_prior
// EIP-7045
} else {

let earliest_permissible_slot = if current_fork.deneb_enabled() {
// EIP-7045
one_epoch_prior
.epoch(E::slots_per_epoch())
.start_slot(E::slots_per_epoch())
} else {
one_epoch_prior
};

if attestation_slot < earliest_permissible_slot {
Expand Down
87 changes: 40 additions & 47 deletions beacon_node/beacon_chain/src/beacon_chain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2619,11 +2619,7 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
/// Check if the current slot is greater than or equal to the Capella fork epoch.
pub fn current_slot_is_post_capella(&self) -> Result<bool, Error> {
let current_fork = self.spec.fork_name_at_slot::<T::EthSpec>(self.slot()?);
if let ForkName::Base | ForkName::Altair | ForkName::Bellatrix = current_fork {
Ok(false)
} else {
Ok(true)
}
Ok(current_fork.capella_enabled())
}

/// Import a BLS to execution change to the op pool.
Expand Down Expand Up @@ -5947,26 +5943,23 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
payload_attributes
} else {
let prepare_slot_fork = self.spec.fork_name_at_slot::<T::EthSpec>(prepare_slot);
let withdrawals = match prepare_slot_fork {
ForkName::Base | ForkName::Altair | ForkName::Bellatrix => None,
ForkName::Capella | ForkName::Deneb | ForkName::Electra => {
let chain = self.clone();
self.spawn_blocking_handle(
move || {
chain.get_expected_withdrawals(&forkchoice_update_params, prepare_slot)
},
"prepare_beacon_proposer_withdrawals",
)
.await?
.map(Some)?
}

let withdrawals = if prepare_slot_fork.capella_enabled() {
let chain = self.clone();
self.spawn_blocking_handle(
move || chain.get_expected_withdrawals(&forkchoice_update_params, prepare_slot),
"prepare_beacon_proposer_withdrawals",
)
.await?
.map(Some)?
} else {
None
};

let parent_beacon_block_root = match prepare_slot_fork {
ForkName::Base | ForkName::Altair | ForkName::Bellatrix | ForkName::Capella => None,
ForkName::Deneb | ForkName::Electra => {
Some(pre_payload_attributes.parent_beacon_block_root)
}
let parent_beacon_block_root = if prepare_slot_fork.deneb_enabled() {
Some(pre_payload_attributes.parent_beacon_block_root)
} else {
None
};

let payload_attributes = PayloadAttributes::new(
Expand Down Expand Up @@ -6112,27 +6105,27 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
// `execution_engine_forkchoice_lock` apart from the one here.
let forkchoice_lock = execution_layer.execution_engine_forkchoice_lock().await;

let (head_block_root, head_hash, justified_hash, finalized_hash) = if let Some(head_hash) =
params.head_hash
{
(
params.head_root,
head_hash,
params
.justified_hash
.unwrap_or_else(ExecutionBlockHash::zero),
params
.finalized_hash
.unwrap_or_else(ExecutionBlockHash::zero),
)
} else {
// The head block does not have an execution block hash. We must check to see if we
// happen to be the proposer of the transition block, in which case we still need to
// send forkchoice_updated.
match self.spec.fork_name_at_slot::<T::EthSpec>(next_slot) {
// We are pre-bellatrix; no need to update the EL.
ForkName::Base | ForkName::Altair => return Ok(()),
_ => {
let (head_block_root, head_hash, justified_hash, finalized_hash) =
if let Some(head_hash) = params.head_hash {
(
params.head_root,
head_hash,
params
.justified_hash
.unwrap_or_else(ExecutionBlockHash::zero),
params
.finalized_hash
.unwrap_or_else(ExecutionBlockHash::zero),
)
} else {
// The head block does not have an execution block hash. We must check to see if we
// happen to be the proposer of the transition block, in which case we still need to
// send forkchoice_updated.
if self
.spec
.fork_name_at_slot::<T::EthSpec>(next_slot)
.bellatrix_enabled()
{
// We are post-bellatrix
if let Some(payload_attributes) = execution_layer
.payload_attributes(next_slot, params.head_root)
Expand Down Expand Up @@ -6166,9 +6159,10 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
// We are not a proposer, no need to update the EL.
return Ok(());
}
} else {
return Ok(());
}
}
};
};

let forkchoice_updated_response = execution_layer
.notify_forkchoice_updated(
Expand Down Expand Up @@ -7011,7 +7005,6 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
.finalized_checkpoint()
.epoch
.sync_committee_period(&self.spec)?;

self.light_client_server_cache.get_light_client_bootstrap(
&self.store,
block_root,
Expand Down
93 changes: 92 additions & 1 deletion beacon_node/beacon_chain/src/data_column_verification.rs
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,25 @@ pub enum GossipDataColumnError {
slot: Slot,
index: ColumnIndex,
},
/// Data column index must be between 0 and `NUMBER_OF_COLUMNS` (exclusive).
///
/// ## Peer scoring
///
/// The column sidecar is invalid and the peer is faulty
InvalidColumnIndex(u64),
/// Data column not expected for a block with empty kzg commitments.
///
/// ## Peer scoring
///
/// The column sidecar is invalid and the peer is faulty
UnexpectedDataColumn,
/// The data column length must be equal to the number of commitments/proofs, otherwise the
/// sidecar is invalid.
///
/// ## Peer scoring
///
/// The column sidecar is invalid and the peer is faulty
InconsistentCommitmentsOrProofLength,
}

impl From<BeaconChainError> for GossipDataColumnError {
Expand Down Expand Up @@ -367,7 +386,7 @@ pub fn validate_data_column_sidecar_for_gossip<T: BeaconChainTypes>(
chain: &BeaconChain<T>,
) -> Result<GossipVerifiedDataColumn<T>, GossipDataColumnError> {
let column_slot = data_column.slot();

verify_data_column_sidecar(&data_column, &chain.spec)?;
verify_index_matches_subnet(&data_column, subnet, &chain.spec)?;
verify_sidecar_not_from_future_slot(chain, column_slot)?;
verify_slot_greater_than_latest_finalized_slot(chain, column_slot)?;
Expand Down Expand Up @@ -396,6 +415,26 @@ pub fn validate_data_column_sidecar_for_gossip<T: BeaconChainTypes>(
})
}

/// Verify if the data column sidecar is valid.
fn verify_data_column_sidecar<E: EthSpec>(
data_column: &DataColumnSidecar<E>,
spec: &ChainSpec,
) -> Result<(), GossipDataColumnError> {
if data_column.index >= spec.number_of_columns as u64 {
return Err(GossipDataColumnError::InvalidColumnIndex(data_column.index));
}
if data_column.kzg_commitments.is_empty() {
return Err(GossipDataColumnError::UnexpectedDataColumn);
}
if data_column.column.len() != data_column.kzg_commitments.len()
|| data_column.column.len() != data_column.kzg_proofs.len()
{
return Err(GossipDataColumnError::InconsistentCommitmentsOrProofLength);
}

Ok(())
}

// Verify that this is the first column sidecar received for the tuple:
// (block_header.slot, block_header.proposer_index, column_sidecar.index)
fn verify_is_first_sidecar<T: BeaconChainTypes>(
Expand Down Expand Up @@ -613,3 +652,55 @@ fn verify_sidecar_not_from_future_slot<T: BeaconChainTypes>(
}
Ok(())
}

#[cfg(test)]
mod test {
use crate::data_column_verification::{
validate_data_column_sidecar_for_gossip, GossipDataColumnError,
};
use crate::test_utils::BeaconChainHarness;
use types::{DataColumnSidecar, EthSpec, ForkName, MainnetEthSpec};

type E = MainnetEthSpec;

#[tokio::test]
async fn empty_data_column_sidecars_fails_validation() {
let spec = ForkName::latest().make_genesis_spec(E::default_spec());
let harness = BeaconChainHarness::builder(E::default())
.spec(spec.into())
.deterministic_keypairs(64)
.fresh_ephemeral_store()
.mock_execution_layer()
.build();
harness.advance_slot();

let slot = harness.get_current_slot();
let state = harness.get_current_state();
let ((block, _blobs_opt), _state) = harness
.make_block_with_modifier(state, slot, |block| {
*block.body_mut().blob_kzg_commitments_mut().unwrap() = vec![].into();
})
.await;

let index = 0;
let column_sidecar = DataColumnSidecar::<E> {
index,
column: vec![].into(),
kzg_commitments: vec![].into(),
kzg_proofs: vec![].into(),
signed_block_header: block.signed_block_header(),
kzg_commitments_inclusion_proof: block
.message()
.body()
.kzg_commitments_merkle_proof()
.unwrap(),
};

let result =
validate_data_column_sidecar_for_gossip(column_sidecar.into(), index, &harness.chain);
assert!(matches!(
result.err(),
Some(GossipDataColumnError::UnexpectedDataColumn)
));
}
}
62 changes: 53 additions & 9 deletions beacon_node/beacon_chain/src/test_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,13 +43,15 @@ use rayon::prelude::*;
use sensitive_url::SensitiveUrl;
use slog::{o, Drain, Logger};
use slog_async::Async;
use slog_term::{FullFormat, TermDecorator};
use slog_term::{FullFormat, PlainSyncDecorator, TermDecorator};
use slot_clock::{SlotClock, TestingSlotClock};
use state_processing::per_block_processing::compute_timestamp_at_slot;
use state_processing::state_advance::complete_state_advance;
use std::borrow::Cow;
use std::collections::{HashMap, HashSet};
use std::fmt;
use std::fs::{File, OpenOptions};
use std::io::BufWriter;
use std::str::FromStr;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::{Arc, LazyLock};
Expand All @@ -68,6 +70,8 @@ use types::{typenum::U4294967296, *};
pub const HARNESS_GENESIS_TIME: u64 = 1_567_552_690;
// Environment variable to read if `fork_from_env` feature is enabled.
pub const FORK_NAME_ENV_VAR: &str = "FORK_NAME";
// Environment variable to read if `ci_logger` feature is enabled.
pub const CI_LOGGER_DIR_ENV_VAR: &str = "CI_LOGGER_DIR";

// Default target aggregators to set during testing, this ensures an aggregator at each slot.
//
Expand Down Expand Up @@ -2750,15 +2754,55 @@ pub struct MakeAttestationOptions {
pub fork: Fork,
}

pub fn build_log(level: slog::Level, enabled: bool) -> Logger {
let decorator = TermDecorator::new().build();
let drain = FullFormat::new(decorator).build().fuse();
let drain = Async::new(drain).build().fuse();
pub enum LoggerType {
Test,
// The logs are output to files for each test.
CI,
// No logs will be printed.
Null,
}

if enabled {
Logger::root(drain.filter_level(level).fuse(), o!())
} else {
Logger::root(drain.filter(|_| false).fuse(), o!())
fn ci_decorator() -> PlainSyncDecorator<BufWriter<File>> {
let log_dir = std::env::var(CI_LOGGER_DIR_ENV_VAR).unwrap_or_else(|e| {
panic!("{CI_LOGGER_DIR_ENV_VAR} env var must be defined when using ci_logger: {e:?}");
});
let fork_name = std::env::var(FORK_NAME_ENV_VAR)
.map(|s| format!("{s}_"))
.unwrap_or_default();
// The current test name can be got via the thread name.
let test_name = std::thread::current()
.name()
.unwrap()
.to_string()
// Colons are not allowed in files that are uploaded to GitHub Artifacts.
.replace("::", "_");
let log_path = format!("/{log_dir}/{fork_name}{test_name}.log");
let file = OpenOptions::new()
.create(true)
.append(true)
.open(log_path)
.unwrap();
let file = BufWriter::new(file);
PlainSyncDecorator::new(file)
}

pub fn build_log(level: slog::Level, logger_type: LoggerType) -> Logger {
match logger_type {
LoggerType::Test => {
let drain = FullFormat::new(TermDecorator::new().build()).build().fuse();
let drain = Async::new(drain).build().fuse();
Logger::root(drain.filter_level(level).fuse(), o!())
}
LoggerType::CI => {
let drain = FullFormat::new(ci_decorator()).build().fuse();
let drain = Async::new(drain).build().fuse();
Logger::root(drain.filter_level(level).fuse(), o!())
}
LoggerType::Null => {
let drain = FullFormat::new(TermDecorator::new().build()).build().fuse();
let drain = Async::new(drain).build().fuse();
Logger::root(drain.filter(|_| false).fuse(), o!())
}
}
}

Expand Down
Loading

0 comments on commit 07b1112

Please sign in to comment.