Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

WIP: First very dirty prototype of SP1 distributed proof #283

Closed
wants to merge 23 commits into from
Closed
8,547 changes: 0 additions & 8,547 deletions Cargo.lock

This file was deleted.

10 changes: 7 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,12 @@ risc0-build = { version = "0.21.0" }
risc0-binfmt = { version = "0.21.0" }

# SP1
sp1-sdk = { git = "https://github.com/succinctlabs/sp1.git", branch = "main" }
sp1-zkvm = { git = "https://github.com/succinctlabs/sp1.git", branch = "main" }
sp1-helper = { git = "https://github.com/succinctlabs/sp1.git", branch = "main" }
# sp1-sdk = { git = "https://github.com/succinctlabs/sp1.git", branch = "main" }
# sp1-zkvm = { git = "https://github.com/succinctlabs/sp1.git", branch = "main" }
# sp1-helper = { git = "https://github.com/succinctlabs/sp1.git", branch = "main" }
sp1-sdk = { path = "../sp1/sdk" }
sp1-zkvm = { path = "../sp1/zkvm" }
sp1-helper = { path = "../sp1/helper" }

# alloy
alloy-rlp = { version = "0.3.4", default-features = false }
Expand Down Expand Up @@ -144,6 +147,7 @@ secp256k1 = { version = "0.27.0", features = [
"rand",
"recovery",
] }
async-channel = "2.3.1"

# macro
syn = { version = "1.0", features = ["full"] }
Expand Down
14 changes: 14 additions & 0 deletions core/src/interfaces.rs
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,10 @@ pub enum ProofType {
///
/// Uses the SP1 prover to build the block.
Sp1,
/// # Sp1Distributed
///
/// Uses the SP1 prover to build the block in a distributed way.
Sp1Distributed,
/// # Sgx
///
/// Builds the block on a SGX supported CPU to create a proof.
Expand All @@ -100,6 +104,7 @@ impl std::fmt::Display for ProofType {
f.write_str(match self {
ProofType::Native => "native",
ProofType::Sp1 => "sp1",
ProofType::Sp1Distributed => "sp1_distributed",
ProofType::Sgx => "sgx",
ProofType::Risc0 => "risc0",
})
Expand All @@ -113,6 +118,7 @@ impl FromStr for ProofType {
match s.trim().to_lowercase().as_str() {
"native" => Ok(ProofType::Native),
"sp1" => Ok(ProofType::Sp1),
"sp1_distributed" => Ok(ProofType::Sp1Distributed),
"sgx" => Ok(ProofType::Sgx),
"risc0" => Ok(ProofType::Risc0),
_ => Err(RaikoError::InvalidProofType(s.to_string())),
Expand Down Expand Up @@ -140,6 +146,14 @@ impl ProofType {
#[cfg(not(feature = "sp1"))]
Err(RaikoError::FeatureNotSupportedError(self.clone()))
}
ProofType::Sp1Distributed => {
#[cfg(feature = "sp1")]
return sp1_driver::Sp1DistributedProver::run(input, output, config)
.await
.map_err(|e| e.into());
#[cfg(not(feature = "sp1"))]
Err(RaikoError::FeatureNotSupportedError(self.clone()))
}
ProofType::Risc0 => {
#[cfg(feature = "risc0")]
return risc0_driver::Risc0Prover::run(input, output, config)
Expand Down
4 changes: 2 additions & 2 deletions core/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,15 +14,15 @@ use raiko_lib::{
utils::HeaderHasher,
};
use serde_json::Value;
use std::{collections::HashMap, hint::black_box};
use std::{collections::BTreeMap, hint::black_box};
use tracing::{debug, error, info, warn};

pub mod interfaces;
pub mod preflight;
pub mod prover;
pub mod provider;

pub type MerkleProof = HashMap<Address, EIP1186AccountProofResponse>;
pub type MerkleProof = BTreeMap<Address, EIP1186AccountProofResponse>;

pub struct Raiko {
l1_chain_spec: ChainSpec,
Expand Down
5 changes: 3 additions & 2 deletions core/src/preflight.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ use raiko_lib::{
Measurement,
};
use serde::{Deserialize, Serialize};
use std::{collections::HashSet, sync::Arc};
use std::{collections::BTreeSet, sync::Arc};
use tracing::{debug, info, warn};

pub async fn preflight<BDP: BlockDataProvider>(
Expand Down Expand Up @@ -191,6 +191,7 @@ pub async fn preflight<BDP: BlockDataProvider>(
let measurement = Measurement::start("Constructing MPT...", true);
let (state_trie, storage) =
proofs_to_tries(input.parent_header.state_root, parent_proofs, proofs)?;

measurement.stop();

// Gather proofs for block history
Expand All @@ -200,7 +201,7 @@ pub async fn preflight<BDP: BlockDataProvider>(

// Get the contracts from the initial db.
let measurement = Measurement::start("Fetching contract code...", true);
let mut contracts = HashSet::new();
let mut contracts = BTreeSet::new();
let initial_db = &provider_db.initial_db;
for account in initial_db.accounts.values() {
let code = &account.info.code;
Expand Down
14 changes: 7 additions & 7 deletions core/src/provider/db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::{collections::HashSet, mem::take};
use std::{collections::BTreeSet, mem::take};

use alloy_consensus::Header as AlloyConsensusHeader;
use alloy_primitives::Bytes;
Expand Down Expand Up @@ -44,9 +44,9 @@ pub struct ProviderDb<BDP: BlockDataProvider> {

pub optimistic: bool,
pub staging_db: MemDb,
pub pending_accounts: HashSet<Address>,
pub pending_slots: HashSet<(Address, U256)>,
pub pending_block_hashes: HashSet<u64>,
pub pending_accounts: BTreeSet<Address>,
pub pending_slots: BTreeSet<(Address, U256)>,
pub pending_block_hashes: BTreeSet<u64>,
}

impl<BDP: BlockDataProvider> ProviderDb<BDP> {
Expand All @@ -61,9 +61,9 @@ impl<BDP: BlockDataProvider> ProviderDb<BDP> {
initial_db: Default::default(),
initial_headers: Default::default(),
current_db: Default::default(),
pending_accounts: HashSet::new(),
pending_slots: HashSet::new(),
pending_block_hashes: HashSet::new(),
pending_accounts: BTreeSet::new(),
pending_slots: BTreeSet::new(),
pending_block_hashes: BTreeSet::new(),
};
if chain_spec.is_taiko() {
// Get the 256 history block hashes from the provider at first time for anchor
Expand Down
11 changes: 9 additions & 2 deletions core/src/provider/rpc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use alloy_transport_http::Http;
use raiko_lib::clear_line;
use reqwest_alloy::Client;
use revm::primitives::{AccountInfo, Bytecode};
use std::collections::HashMap;
use std::collections::{BTreeMap, HashMap};
use tracing::trace;

use crate::{
Expand Down Expand Up @@ -222,7 +222,7 @@ impl BlockDataProvider for RpcBlockDataProvider {
offset: usize,
num_storage_proofs: usize,
) -> RaikoResult<MerkleProof> {
let mut storage_proofs: MerkleProof = HashMap::new();
let mut storage_proofs: MerkleProof = BTreeMap::new();
let mut idx = offset;

let mut accounts = accounts.clone();
Expand Down Expand Up @@ -316,6 +316,13 @@ impl BlockDataProvider for RpcBlockDataProvider {
}
clear_line();

// sort the vec values
for (_, storage_proof) in storage_proofs.iter_mut() {
storage_proof
.storage_proof
.sort_by(|a, b| a.value.cmp(&b.value));
}

Ok(storage_proofs)
}
}
3 changes: 2 additions & 1 deletion host/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@ risc0-driver = { path = "../provers/risc0/driver", optional = true }
sgx-prover = { path = "../provers/sgx/prover", optional = true }

# raiko
raiko-lib = { workspace = true, features = ["c-kzg"] }
raiko-lib = { workspace = true }
# raiko-lib = { workspace = true, features = ["c-kzg"] }
raiko-core = { workspace = true }

# alloy
Expand Down
4 changes: 2 additions & 2 deletions lib/src/input.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,14 +12,14 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use core::fmt::Debug;
use std::collections::BTreeMap;
#[cfg(feature = "std")]
use std::path::PathBuf;

use alloy_consensus::Header as AlloyConsensusHeader;
use alloy_rpc_types::Withdrawal as AlloyWithdrawal;
use alloy_sol_types::{sol, SolCall};
use anyhow::{anyhow, Result};
use revm::primitives::HashMap;
use serde::{Deserialize, Serialize};
use serde_with::serde_as;

Expand Down Expand Up @@ -67,7 +67,7 @@ pub struct GuestInput {
/// State trie of the parent block.
pub parent_state_trie: MptNode,
/// Maps each address with its storage trie and the used storage slots.
pub parent_storage: HashMap<Address, StorageEntry>,
pub parent_storage: BTreeMap<Address, StorageEntry>,
/// The code of all unique contracts.
pub contracts: Vec<Bytes>,
/// List of at most 256 previous block headers
Expand Down
11 changes: 6 additions & 5 deletions lib/src/primitives/mpt.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ use core::{
fmt::{Debug, Write},
iter, mem,
};
use std::collections::BTreeMap;

use alloy_primitives::{b256, TxNumber, B256, U256};
use alloy_rlp::Encodable;
Expand Down Expand Up @@ -1032,15 +1033,15 @@ pub fn shorten_node_path(node: &MptNode) -> Vec<MptNode> {

pub fn proofs_to_tries(
state_root: B256,
parent_proofs: HashMap<Address, EIP1186AccountProofResponse>,
proofs: HashMap<Address, EIP1186AccountProofResponse>,
) -> Result<(MptNode, HashMap<Address, StorageEntry>)> {
parent_proofs: BTreeMap<Address, EIP1186AccountProofResponse>,
proofs: BTreeMap<Address, EIP1186AccountProofResponse>,
) -> Result<(MptNode, BTreeMap<Address, StorageEntry>)> {
// if no addresses are provided, return the trie only consisting of the state root
if parent_proofs.is_empty() {
return Ok((node_from_digest(state_root), HashMap::new()));
return Ok((node_from_digest(state_root), BTreeMap::new()));
}

let mut storage: HashMap<Address, StorageEntry> = HashMap::with_capacity(parent_proofs.len());
let mut storage: BTreeMap<Address, StorageEntry> = BTreeMap::new();

let mut state_nodes = HashMap::new();
let mut state_root_node = MptNode::default();
Expand Down
3 changes: 2 additions & 1 deletion primitives/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ rlp = { workspace = true, features = ["std"] }
serde = { workspace = true }
sha3 = { workspace = true }
thiserror = { workspace = true }
log = { workspace = true }

once_cell = { workspace = true, features = ["critical-section"], optional = true }

Expand All @@ -33,4 +34,4 @@ serde_json = { version = "1.0", default-features = false }

[features]
std = ["anyhow/std", "rlp/std"]
c-kzg = ["dep:c-kzg", "revm-primitives/c-kzg", "dep:sha2", "dep:tempfile", "dep:once_cell"]
c-kzg = ["dep:c-kzg", "revm-primitives/c-kzg", "dep:sha2", "dep:tempfile", "dep:once_cell"]
12 changes: 7 additions & 5 deletions primitives/src/mpt.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ use core::{
fmt::{Debug, Write},
iter, mem,
};
use std::collections::BTreeMap;

use alloy_primitives::{b256, TxNumber, B256, U256};
use alloy_rlp::Encodable;
Expand Down Expand Up @@ -1032,15 +1033,15 @@ pub fn shorten_node_path(node: &MptNode) -> Vec<MptNode> {

pub fn proofs_to_tries(
state_root: B256,
parent_proofs: HashMap<Address, EIP1186AccountProofResponse>,
proofs: HashMap<Address, EIP1186AccountProofResponse>,
) -> Result<(MptNode, HashMap<Address, StorageEntry>)> {
parent_proofs: BTreeMap<Address, EIP1186AccountProofResponse>,
proofs: BTreeMap<Address, EIP1186AccountProofResponse>,
) -> Result<(MptNode, BTreeMap<Address, StorageEntry>)> {
// if no addresses are provided, return the trie only consisting of the state root
if parent_proofs.is_empty() {
return Ok((node_from_digest(state_root), HashMap::new()));
return Ok((node_from_digest(state_root), BTreeMap::new()));
}

let mut storage: HashMap<Address, StorageEntry> = HashMap::with_capacity(parent_proofs.len());
let mut storage: BTreeMap<Address, StorageEntry> = BTreeMap::new();

let mut state_nodes = HashMap::new();
let mut state_root_node = MptNode::default();
Expand Down Expand Up @@ -1104,6 +1105,7 @@ pub fn proofs_to_tries(
.iter()
.map(|p| U256::from_be_bytes(p.key.0 .0))
.collect();
println!("slots: {:#?}", slots);
storage.insert(address, (storage_trie, slots));
}
let state_trie = resolve_nodes(&state_root_node, &state_nodes);
Expand Down
4 changes: 4 additions & 0 deletions provers/sp1/driver/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,10 @@ sp1-sdk = { workspace = true, optional = true }
anyhow = { workspace = true, optional = true }
once_cell = { workspace = true, optional = true }
sha3 = { workspace = true, optional = true, default-features = false}
reqwest = { workspace = true }
log = { workspace = true }
tokio = { workspace = true }
async-channel = { workspace = true }

[build-dependencies]
sp1-helper = { workspace = true, optional = true }
Expand Down
4 changes: 4 additions & 0 deletions provers/sp1/driver/src/distributed/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
mod prover;
mod worker;

pub use prover::Sp1DistributedProver;
Loading
Loading