Skip to content

Commit

Permalink
chore(starknet_l1_provider): implement event fetch
Browse files Browse the repository at this point in the history
  • Loading branch information
Gilad Chase committed Dec 20, 2024
1 parent 573a0b3 commit 4ccd563
Show file tree
Hide file tree
Showing 5 changed files with 72 additions and 5 deletions.
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

9 changes: 9 additions & 0 deletions crates/starknet_api/src/executable_transaction.rs
Original file line number Diff line number Diff line change
Expand Up @@ -310,6 +310,15 @@ pub struct L1HandlerTransaction {
}

impl L1HandlerTransaction {
pub fn create(
raw_tx: crate::transaction::L1HandlerTransaction,
chain_id: &ChainId,
paid_fee_on_l1: Fee,
) -> Result<L1HandlerTransaction, StarknetApiError> {
let tx_hash = raw_tx.calculate_transaction_hash(chain_id, &raw_tx.version)?;
Ok(Self { tx: raw_tx, tx_hash, paid_fee_on_l1 })
}

pub fn payload_size(&self) -> usize {
// The calldata includes the "from" field, which is not a part of the payload.
self.tx.calldata.0.len() - 1
Expand Down
57 changes: 52 additions & 5 deletions crates/starknet_l1_provider/src/l1_scraper.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
use std::time::Duration;

use papyrus_base_layer::constants::EventIdentifier;
use papyrus_base_layer::BaseLayerContract;
use papyrus_base_layer::{BaseLayerContract, L1Event};
use papyrus_config::converters::deserialize_seconds_to_duration;
use papyrus_config::validators::validate_ascii;
use serde::{Deserialize, Serialize};
use starknet_api::core::ChainId;
use starknet_l1_provider_types::SharedL1ProviderClient;
use starknet_api::executable_transaction::L1HandlerTransaction as ExecutableL1HandlerTransaction;
use starknet_api::StarknetApiError;
use starknet_l1_provider_types::{Event, SharedL1ProviderClient};
use thiserror::Error;
use tokio::time::sleep;
use tracing::error;
Expand All @@ -23,7 +25,7 @@ pub struct L1Scraper<B: BaseLayerContract> {
pub base_layer: B,
pub last_block_number_processed: u64,
pub l1_provider_client: SharedL1ProviderClient,
_tracked_event_identifiers: Vec<EventIdentifier>,
tracked_event_identifiers: Vec<EventIdentifier>,
}

impl<B: BaseLayerContract + Send + Sync> L1Scraper<B> {
Expand All @@ -38,12 +40,39 @@ impl<B: BaseLayerContract + Send + Sync> L1Scraper<B> {
base_layer,
last_block_number_processed: config.l1_block_to_start_scraping_from,
config,
_tracked_event_identifiers: events_identifiers_to_track.to_vec(),
tracked_event_identifiers: events_identifiers_to_track.to_vec(),
}
}

pub async fn fetch_events(&mut self) -> L1ScraperResult<(), B> {
todo!()
let latest_l1_block_number = self
.base_layer
.latest_l1_block_number(self.config.finality)
.await
.map_err(L1ScraperError::BaseLayer)?;

let Some(latest_l1_block_number) = latest_l1_block_number else {
error!("Failed to get latest L1 block number, finality too high.");
return Ok(());
};

let scraping_result = self
.base_layer
.events(
self.last_block_number_processed,
latest_l1_block_number,
&self.tracked_event_identifiers,
)
.await;

let events = scraping_result.map_err(L1ScraperError::BaseLayer)?;
let events = events
.into_iter()
.map(|event| self.event_from_raw_l1_event(event))
.collect::<L1ScraperResult<Vec<_>, _>>()?;

self.last_block_number_processed = latest_l1_block_number + 1;
todo!("send {events:?} to provider");
}

async fn _run(&mut self) -> L1ScraperResult<(), B> {
Expand All @@ -53,6 +82,22 @@ impl<B: BaseLayerContract + Send + Sync> L1Scraper<B> {
self.fetch_events().await?;
}
}

fn event_from_raw_l1_event(&self, l1_event: L1Event) -> L1ScraperResult<Event, B> {
match l1_event {
L1Event::LogMessageToL2 { tx, fee } => {
let chain_id = &self.config.chain_id;
ExecutableL1HandlerTransaction::create(tx, chain_id, fee)
.map(Event::L1HandlerTransaction)
.map_err(L1ScraperError::HashCalculationError)
}
L1Event::MessageToL2CancellationStarted(_messsage_data) => todo!(),
L1Event::MessageToL2Canceled(_messsage_data) => todo!(),
L1Event::ConsumedMessageToL1 { from_address: _, to_address: _, payload: _ } => {
todo!()
}
}
}
}

#[derive(Clone, Debug, Serialize, Deserialize, Validate, PartialEq)]
Expand Down Expand Up @@ -81,4 +126,6 @@ impl Default for L1ScraperConfig {
pub enum L1ScraperError<T: BaseLayerContract + Send + Sync> {
#[error("Base layer error: {0}")]
BaseLayer(T::Error),
#[error("Failed to calculate hash: {0}")]
HashCalculationError(StarknetApiError),
}
1 change: 1 addition & 0 deletions crates/starknet_l1_provider_types/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ testing = ["mockall"]
[dependencies]
async-trait.workspace = true
mockall = { workspace = true, optional = true }
papyrus_base_layer.workspace = true
papyrus_proc_macros.workspace = true
serde.workspace = true
starknet_api.workspace = true
Expand Down
9 changes: 9 additions & 0 deletions crates/starknet_l1_provider_types/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ use std::sync::Arc;
use async_trait::async_trait;
#[cfg(any(feature = "testing", test))]
use mockall::automock;
use papyrus_base_layer::L1Event;
use papyrus_proc_macros::handle_response_variants;
use serde::{Deserialize, Serialize};
use starknet_api::executable_transaction::L1HandlerTransaction;
Expand Down Expand Up @@ -69,3 +70,11 @@ where
todo!();
}
}

#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub enum Event {
L1HandlerTransaction(L1HandlerTransaction),
TransactionConsumed(L1Event),
TransactionCancellationStarted(L1Event),
TransactionCanceled(L1Event),
}

0 comments on commit 4ccd563

Please sign in to comment.