Skip to content

Commit

Permalink
refactor(papyrus_storage): fix CR comments
Browse files Browse the repository at this point in the history
  • Loading branch information
AlonLStarkWare committed Dec 25, 2024
1 parent 2c727c5 commit 63f915f
Show file tree
Hide file tree
Showing 3 changed files with 19 additions and 20 deletions.
17 changes: 9 additions & 8 deletions crates/papyrus_storage/src/body/events.rs
Original file line number Diff line number Diff line change
Expand Up @@ -381,24 +381,25 @@ type AddressToTransactionIndexTableCursor<'txn> = DbCursor<
type TransactionMetadataTableCursor<'txn> =
DbCursor<'txn, RO, TransactionIndex, VersionZeroWrapper<TransactionMetadata>, SimpleTable>;

/// interface for updating the events in the storage.
/// Interface for updating the events in the storage.
pub trait EventStorageWriter
where
Self: Sized,
{
/// Appends the events of an entire block to the storage.
// To enforce that no commit happen after a failure, we consume and return Self on success.
fn append_events(
self,
block_number: BlockNumber,
block_events: Vec<Vec<Event>>,
block_events: &[&[Event]],
) -> StorageResult<Self>;
}

impl EventStorageWriter for StorageTxn<'_, RW> {
fn append_events(
self,
block_number: BlockNumber,
block_events: Vec<Vec<Event>>,
block_events: &[&[Event]],
) -> StorageResult<Self> {
let markers_table = self.open_table(&self.tables.markers)?;
update_marker(&self.txn, &markers_table, block_number)?;
Expand All @@ -408,22 +409,22 @@ impl EventStorageWriter for StorageTxn<'_, RW> {
let address_to_transaction_index =
self.open_table(&self.tables.address_to_transaction_index)?;

for (index, transaction_events) in block_events.iter().enumerate() {
for (index, &transaction_events) in block_events.iter().enumerate() {
let transaction_index =
TransactionIndex(block_number, TransactionOffsetInBlock(index));
let event_offset = self.file_handlers.append_events(&transaction_events.clone());
let event_offset = self.file_handlers.append_events(transaction_events);
events_table.append(&self.txn, &transaction_index, &event_offset)?;
for even in transaction_events {
for event in transaction_events {
address_to_transaction_index.insert(
&self.txn,
&(even.from_address, transaction_index),
&(event.from_address, transaction_index),
&NoValue,
)?;
}
if index == block_events.len() - 1 {
file_offset_table.upsert(
&self.txn,
&OffsetKind::Events,
&OffsetKind::Event,
&event_offset.next_offset(),
)?;
}
Expand Down
20 changes: 9 additions & 11 deletions crates/papyrus_storage/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -681,9 +681,7 @@ struct FileHandlers<Mode: TransactionKind> {
deprecated_contract_class: FileHandler<VersionZeroWrapper<DeprecatedContractClass>, Mode>,
transaction_output: FileHandler<VersionZeroWrapper<TransactionOutput>, Mode>,
transaction: FileHandler<VersionZeroWrapper<Transaction>, Mode>,
events: FileHandler<VersionZeroWrapper<Vec<Event>>, Mode>, /* Should this point to the
* events of a transaction or an
* entire block? */
event: FileHandler<VersionZeroWrapper<Vec<Event>>, Mode>,
}

impl FileHandlers<RW> {
Expand Down Expand Up @@ -722,8 +720,8 @@ impl FileHandlers<RW> {
}

// Appends an event to the corresponding file and returns its location.
fn append_events(&self, events: &Vec<Event>) -> LocationInFile {
self.clone().events.append(events)
fn append_events(&self, events: &[Event]) -> LocationInFile {
self.clone().event.append(&events.to_vec())
}

// TODO(dan): Consider 1. flushing only the relevant files, 2. flushing concurrently.
Expand All @@ -736,7 +734,7 @@ impl FileHandlers<RW> {
self.deprecated_contract_class.flush();
self.transaction_output.flush();
self.transaction.flush();
self.events.flush(); // make sure we need this
self.event.flush();
}
}

Expand All @@ -750,7 +748,7 @@ impl<Mode: TransactionKind> FileHandlers<Mode> {
("deprecated_contract_class".to_string(), self.deprecated_contract_class.stats()),
("transaction_output".to_string(), self.transaction_output.stats()),
("transaction".to_string(), self.transaction.stats()),
("events".to_string(), self.events.stats()), // make sure we need this
("events".to_string(), self.event.stats()),
])
}

Expand Down Expand Up @@ -865,7 +863,7 @@ fn open_storage_files(
transaction_offset,
)?;

let event_offset = table.get(&db_transaction, &OffsetKind::Events)?.unwrap_or_default();
let event_offset = table.get(&db_transaction, &OffsetKind::Event)?.unwrap_or_default();
let (events_writer, events_reader) =
open_file(mmap_file_config, db_config.path().join("events.dat"), event_offset)?;

Expand All @@ -877,7 +875,7 @@ fn open_storage_files(
deprecated_contract_class: deprecated_contract_class_writer,
transaction_output: transaction_output_writer,
transaction: transaction_writer,
events: events_writer,
event: events_writer,
},
FileHandlers {
thin_state_diff: thin_state_diff_reader,
Expand All @@ -886,7 +884,7 @@ fn open_storage_files(
deprecated_contract_class: deprecated_contract_class_reader,
transaction_output: transaction_output_reader,
transaction: transaction_reader,
events: events_reader,
event: events_reader,
},
))
}
Expand All @@ -907,7 +905,7 @@ pub enum OffsetKind {
/// A transaction file.
Transaction,
/// An events file.
Events,
Event,
}

/// A storage query. Used for benchmarking in the storage_benchmark binary.
Expand Down
2 changes: 1 addition & 1 deletion crates/papyrus_storage/src/serialization/serializers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -347,7 +347,7 @@ auto_storage_serde! {
DeprecatedContractClass = 3,
TransactionOutput = 4,
Transaction = 5,
Events = 6,
Event = 6,
}
pub struct PaymasterData(pub Vec<Felt>);
pub struct PoseidonHash(pub Felt);
Expand Down

0 comments on commit 63f915f

Please sign in to comment.