Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

perf: Misc perf improvements #30

Merged
merged 6 commits into from
May 7, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ version = "0.1.0"
[features]
default = ["std", "rocksdb"]
rocksdb = ["dep:rocksdb"]
std = ["parity-scale-codec/std", "bitvec/std", "starknet-types-core/std"]
std = ["parity-scale-codec/std", "bitvec/std", "starknet-types-core/std", "rayon", "hashbrown/rayon"]
# internal
bench = []

Expand All @@ -18,7 +18,7 @@ derive_more = { version = "0.99.17", default-features = false, features = [
hashbrown = "0.14.3"
log = "0.4.20"
smallvec = "1.11.2"
rayon = "1.9.0"
rayon = { version = "1.9.0", optional = true }

parity-scale-codec = { version = "3.0.0", default-features = false, features = [
"derive",
Expand Down
90 changes: 82 additions & 8 deletions benches/storage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use bonsai_trie::{
id::{BasicId, BasicIdBuilder},
BonsaiStorage, BonsaiStorageConfig,
};
use criterion::{criterion_group, criterion_main, Criterion};
use criterion::{criterion_group, criterion_main, BatchSize, Criterion};
use rand::{prelude::*, thread_rng};
use starknet_types_core::{
felt::Felt,
Expand All @@ -15,6 +15,40 @@ use starknet_types_core::{

mod flamegraph;

fn storage_with_insert(c: &mut Criterion) {
c.bench_function("storage commit with insert", move |b| {
let mut rng = thread_rng();
b.iter_batched_ref(
|| {
let bonsai_storage: BonsaiStorage<BasicId, _, Pedersen> = BonsaiStorage::new(
HashMapDb::<BasicId>::default(),
BonsaiStorageConfig::default(),
)
.unwrap();
bonsai_storage
},
|bonsai_storage| {
let felt = Felt::from_hex("0x66342762FDD54D033c195fec3ce2568b62052e").unwrap();
for _ in 0..40000 {
let bitvec = BitVec::from_vec(vec![
rng.gen(),
rng.gen(),
rng.gen(),
rng.gen(),
rng.gen(),
rng.gen(),
]);
bonsai_storage.insert(&[], &bitvec, &felt).unwrap();
}

// let mut id_builder = BasicIdBuilder::new();
// bonsai_storage.commit(id_builder.new_id()).unwrap();
},
BatchSize::LargeInput,
);
});
}

fn storage(c: &mut Criterion) {
c.bench_function("storage commit", move |b| {
let mut bonsai_storage: BonsaiStorage<BasicId, _, Pedersen> = BonsaiStorage::new(
Expand All @@ -38,9 +72,9 @@ fn storage(c: &mut Criterion) {
}

let mut id_builder = BasicIdBuilder::new();
b.iter_batched(
b.iter_batched_ref(
|| bonsai_storage.clone(),
|mut bonsai_storage| {
|bonsai_storage| {
bonsai_storage.commit(id_builder.new_id()).unwrap();
},
criterion::BatchSize::LargeInput,
Expand Down Expand Up @@ -73,9 +107,9 @@ fn one_update(c: &mut Criterion) {
let mut id_builder = BasicIdBuilder::new();
bonsai_storage.commit(id_builder.new_id()).unwrap();

b.iter_batched(
b.iter_batched_ref(
|| bonsai_storage.clone(),
|mut bonsai_storage| {
|bonsai_storage| {
let bitvec = BitVec::from_vec(vec![0, 1, 2, 3, 4, 5]);
bonsai_storage.insert(&[], &bitvec, &felt).unwrap();
bonsai_storage.commit(id_builder.new_id()).unwrap();
Expand Down Expand Up @@ -110,9 +144,9 @@ fn five_updates(c: &mut Criterion) {
let mut id_builder = BasicIdBuilder::new();
bonsai_storage.commit(id_builder.new_id()).unwrap();

b.iter_batched(
b.iter_batched_ref(
|| bonsai_storage.clone(),
|mut bonsai_storage| {
|bonsai_storage| {
bonsai_storage
.insert(&[], &BitVec::from_vec(vec![0, 1, 2, 3, 4, 5]), &felt)
.unwrap();
Expand All @@ -135,6 +169,46 @@ fn five_updates(c: &mut Criterion) {
});
}

fn multiple_contracts(c: &mut Criterion) {
c.bench_function("multiple contracts", move |b| {
let mut bonsai_storage: BonsaiStorage<BasicId, _, Pedersen> = BonsaiStorage::new(
HashMapDb::<BasicId>::default(),
BonsaiStorageConfig::default(),
)
.unwrap();
let mut rng = thread_rng();

let felt = Felt::from_hex("0x66342762FDD54D033c195fec3ce2568b62052e").unwrap();
for _ in 0..1000 {
let bitvec = BitVec::from_vec(vec![rng.gen(), rng.gen(), rng.gen(), rng.gen()]);
bonsai_storage
.insert(
&[
rng.gen(),
rng.gen(),
rng.gen(),
rng.gen(),
rng.gen(),
rng.gen(),
],
&bitvec,
&felt,
)
.unwrap();
}

let mut id_builder = BasicIdBuilder::new();

b.iter_batched_ref(
|| bonsai_storage.clone(),
|bonsai_storage| {
bonsai_storage.commit(id_builder.new_id()).unwrap();
},
criterion::BatchSize::LargeInput,
);
});
}

fn hash(c: &mut Criterion) {
c.bench_function("pedersen hash", move |b| {
let felt0 =
Expand All @@ -152,6 +226,6 @@ fn hash(c: &mut Criterion) {
criterion_group! {
name = benches;
config = Criterion::default(); // .with_profiler(flamegraph::FlamegraphProfiler::new(100));
targets = storage, one_update, five_updates, hash
targets = storage, one_update, five_updates, hash, storage_with_insert, multiple_contracts
}
criterion_main!(benches);
4 changes: 1 addition & 3 deletions src/bonsai_database.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
use crate::id::Id;
#[cfg(not(feature = "std"))]
use alloc::vec::Vec;
use crate::{id::Id, Vec};
#[cfg(feature = "std")]
use std::error::Error;

Expand Down
9 changes: 1 addition & 8 deletions src/changes.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,5 @@
use crate::{id::Id, trie::TrieKey};
use crate::{hash_map::Entry, id::Id, trie::TrieKey, HashMap, Vec, VecDeque};
use serde::{Deserialize, Serialize};
#[cfg(feature = "std")]
use std::collections::{hash_map::Entry, HashMap, VecDeque};
#[cfg(not(feature = "std"))]
use {
alloc::{collections::VecDeque, vec::Vec},
hashbrown::{hash_map::Entry, HashMap},
};

#[derive(Debug, Clone, Serialize, Deserialize, Default)]
pub struct Change {
Expand Down
8 changes: 1 addition & 7 deletions src/databases/hashmap_db.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,9 @@
use crate::{
bonsai_database::{BonsaiPersistentDatabase, DBError},
id::Id,
BonsaiDatabase,
BTreeMap, BonsaiDatabase, HashMap, Vec,
};
#[cfg(not(feature = "std"))]
use alloc::{collections::BTreeMap, vec::Vec};
use core::{fmt, fmt::Display};
#[cfg(not(feature = "std"))]
use hashbrown::HashMap;
#[cfg(feature = "std")]
use std::collections::{BTreeMap, HashMap};

#[derive(Debug)]
pub struct HashMapDbError {}
Expand Down
4 changes: 1 addition & 3 deletions src/error.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
#[cfg(feature = "std")]
use std::{error::Error, fmt::Display};

use crate::bonsai_database::DBError;
use crate::{bonsai_database::DBError, String};

#[cfg(not(feature = "std"))]
use alloc::string::String;
/// All errors that can be returned by BonsaiStorage.
#[derive(Debug)]
pub enum BonsaiStorageError<DatabaseError>
Expand Down
3 changes: 1 addition & 2 deletions src/id.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
#[cfg(not(feature = "std"))]
use alloc::vec::Vec;
use crate::Vec;
use core::{fmt::Debug, hash};

/// Trait to be implemented on any type that can be used as an ID.
Expand Down
9 changes: 4 additions & 5 deletions src/key_value_db.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
use crate::{changes::key_new_value, trie::merkle_tree::bytes_to_bitvec, Change as ExternChange};
#[cfg(not(feature = "std"))]
use alloc::{collections::BTreeSet, format, string::ToString, vec::Vec};
use crate::{
changes::key_new_value, format, trie::merkle_tree::bytes_to_bitvec, BTreeSet,
Change as ExternChange, ToString, Vec,
};
use bitvec::{order::Msb0, vec::BitVec};
use hashbrown::HashMap;
use log::trace;
use parity_scale_codec::Decode;
use starknet_types_core::felt::Felt;
#[cfg(feature = "std")]
use std::collections::BTreeSet;

use crate::{
bonsai_database::{BonsaiDatabase, BonsaiPersistentDatabase, DatabaseKey},
Expand Down
28 changes: 23 additions & 5 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -84,15 +84,33 @@
//! bonsai_storage.commit(id_builder.new_id()).unwrap();
//! ```
#![cfg_attr(not(feature = "std"), no_std)]

// hashbrown uses ahash by default instead of siphash
pub(crate) type HashMap<K, V> = hashbrown::HashMap<K, V>;
pub(crate) use hashbrown::hash_map;

#[cfg(not(feature = "std"))]
extern crate alloc;

use crate::trie::merkle_tree::{bytes_to_bitvec, MerkleTree};
#[cfg(not(feature = "std"))]
use alloc::{format, vec::Vec};
pub(crate) use alloc::{
collections::{BTreeMap, BTreeSet, VecDeque},
format,
string::{String, ToString},
vec,
vec::Vec,
};
#[cfg(feature = "std")]
pub(crate) use std::{
collections::{BTreeMap, BTreeSet, VecDeque},
format,
string::{String, ToString},
vec,
vec::Vec,
};

use crate::trie::merkle_tree::MerkleTree;
use bitvec::{order::Msb0, slice::BitSlice, vec::BitVec};
use changes::ChangeBatch;
use hashbrown::HashMap;
use key_value_db::KeyValueDB;
use starknet_types_core::{
felt::Felt,
Expand All @@ -112,7 +130,7 @@ pub mod id;

pub use bonsai_database::{BonsaiDatabase, BonsaiPersistentDatabase, DBError, DatabaseKey};
pub use error::BonsaiStorageError;
use trie::merkle_tree::MerkleTrees;
use trie::merkle_tree::{bytes_to_bitvec, MerkleTrees};
pub use trie::merkle_tree::{Membership, ProofNode};

#[cfg(test)]
Expand Down
2 changes: 1 addition & 1 deletion src/tests/madara_comparison.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#![cfg(feature = "std")]
#![cfg(all(feature = "std", feature = "rocksdb"))]
use bitvec::{bits, order::Msb0, vec::BitVec};
use starknet_types_core::{felt::Felt, hash::Pedersen};

Expand Down
2 changes: 1 addition & 1 deletion src/tests/proof.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#![cfg(feature = "std")]
#![cfg(all(feature = "std", feature = "rocksdb"))]
use bitvec::vec::BitVec;
use pathfinder_common::{hash::PedersenHash, trie::TrieNode};
use pathfinder_crypto::Felt as PathfinderFelt;
Expand Down
2 changes: 1 addition & 1 deletion src/tests/simple.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#![cfg(feature = "std")]
#![cfg(all(feature = "std", feature = "rocksdb"))]
use crate::{
databases::{create_rocks_db, HashMapDb, RocksDB, RocksDBConfig},
id::{BasicId, BasicIdBuilder},
Expand Down
2 changes: 1 addition & 1 deletion src/tests/transactional_state.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#![cfg(feature = "std")]
#![cfg(all(feature = "std", feature = "rocksdb"))]
use crate::{
databases::{create_rocks_db, RocksDB, RocksDBConfig},
id::BasicIdBuilder,
Expand Down
2 changes: 1 addition & 1 deletion src/tests/trie_log.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#![cfg(feature = "std")]
#![cfg(all(feature = "std", feature = "rocksdb"))]
use crate::{
databases::{create_rocks_db, RocksDB, RocksDBConfig},
id::BasicIdBuilder,
Expand Down
19 changes: 19 additions & 0 deletions src/trie/merkle_node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,25 @@ impl BinaryNode {
Direction::Right => self.right,
}
}

/// Returns the [Left] or [Right] child.
///
/// [Left]: Direction::Left
/// [Right]: Direction::Right
///
/// # Arguments
///
/// `direction` - The direction where to get the child from.
///
/// # Returns
///
/// The child in the specified direction.
pub fn get_child_mut(&mut self, direction: Direction) -> &mut NodeHandle {
match direction {
Direction::Left => &mut self.left,
Direction::Right => &mut self.right,
}
}
}

impl Node {
Expand Down
Loading
Loading