From 74c12c05b8da22ebf45984dd5fc5448998abb5bf Mon Sep 17 00:00:00 2001 From: GianMarco Date: Thu, 4 Jan 2024 20:29:25 -0500 Subject: [PATCH 01/33] Graphql expose model order params (#1332) * print values * Implement ModelObject struct, add fn resolve_many() * Added model name/hash ordering fields * fix * fix * use constants * remove fn new() * remove imports * Feat: Implement fn resolve_many() * Feat: Add test for ordering by name ASC * fmt * enable tests * Clean code --------- Co-authored-by: broody --- crates/torii/graphql/src/constants.rs | 7 ++ .../graphql/src/object/connection/mod.rs | 9 ++- .../graphql/src/object/inputs/order_input.rs | 6 +- crates/torii/graphql/src/object/mod.rs | 1 - crates/torii/graphql/src/object/model.rs | 81 ++++++++++++++++++- crates/torii/graphql/src/query/data.rs | 8 +- crates/torii/graphql/src/tests/mod.rs | 11 +++ .../graphql/src/tests/models_ordering_test.rs | 74 +++++++++++++++++ 8 files changed, 186 insertions(+), 11 deletions(-) create mode 100644 crates/torii/graphql/src/tests/models_ordering_test.rs diff --git a/crates/torii/graphql/src/constants.rs b/crates/torii/graphql/src/constants.rs index 0117ba18b8..bf08ddd028 100644 --- a/crates/torii/graphql/src/constants.rs +++ b/crates/torii/graphql/src/constants.rs @@ -25,6 +25,8 @@ pub const PAGE_INFO_TYPE_NAME: &str = "World__PageInfo"; pub const TRANSACTION_TYPE_NAME: &str = "World__Transaction"; pub const QUERY_TYPE_NAME: &str = "World__Query"; pub const SUBSCRIPTION_TYPE_NAME: &str = "World__Subscription"; +pub const MODEL_ORDER_TYPE_NAME: &str = "World__ModelOrder"; +pub const MODEL_ORDER_FIELD_TYPE_NAME: &str = "World__ModelOrderField"; // objects' single and plural names pub const ENTITY_NAMES: (&str, &str) = ("entity", "entities"); @@ -34,3 +36,8 @@ pub const SOCIAL_NAMES: (&str, &str) = ("social", "socials"); pub const CONTENT_NAMES: (&str, &str) = ("content", "contents"); pub const METADATA_NAMES: (&str, &str) = ("metadata", "metadatas"); pub const TRANSACTION_NAMES: (&str, &str) = ("transaction", "transactions"); + +// misc +pub const ORDER_DIR_TYPE_NAME: &str = "OrderDirection"; +pub const ORDER_ASC: &str = "ASC"; +pub const ORDER_DESC: &str = "DESC"; diff --git a/crates/torii/graphql/src/object/connection/mod.rs b/crates/torii/graphql/src/object/connection/mod.rs index 7785188785..d43d4a1461 100644 --- a/crates/torii/graphql/src/object/connection/mod.rs +++ b/crates/torii/graphql/src/object/connection/mod.rs @@ -123,10 +123,15 @@ pub fn connection_output( .iter() .map(|row| { let order_field = match order { - Some(order) => format!("external_{}", order.field), + Some(order) => { + if is_external { + format!("external_{}", order.field) + } else { + order.field.to_string() + } + } None => id_column.to_string(), }; - let primary_order = row.try_get::(id_column)?; let secondary_order = row.try_get_unchecked::(&order_field)?; let cursor = cursor::encode(&primary_order, &secondary_order); diff --git a/crates/torii/graphql/src/object/inputs/order_input.rs b/crates/torii/graphql/src/object/inputs/order_input.rs index 2c388cc116..d7386200ce 100644 --- a/crates/torii/graphql/src/object/inputs/order_input.rs +++ b/crates/torii/graphql/src/object/inputs/order_input.rs @@ -1,6 +1,7 @@ use async_graphql::dynamic::{Enum, Field, InputObject, InputValue, ResolverContext, TypeRef}; use super::InputObjectTrait; +use crate::constants::{ORDER_ASC, ORDER_DESC, ORDER_DIR_TYPE_NAME}; use crate::object::TypeMapping; use crate::query::order::{Direction, Order}; @@ -27,7 +28,7 @@ impl InputObjectTrait for OrderInputObject { fn input_object(&self) -> InputObject { // direction and field values are required (not null) InputObject::new(self.type_name()) - .field(InputValue::new("direction", TypeRef::named_nn("OrderDirection"))) + .field(InputValue::new("direction", TypeRef::named_nn(ORDER_DIR_TYPE_NAME))) .field(InputValue::new( "field", TypeRef::named_nn(format!("{}Field", self.type_name())), @@ -36,7 +37,7 @@ impl InputObjectTrait for OrderInputObject { fn enum_objects(&self) -> Option> { // Direction enum has only two members ASC and DESC - let direction = Enum::new("OrderDirection").item("ASC").item("DESC"); + let direction = Enum::new(ORDER_DIR_TYPE_NAME).item(ORDER_ASC).item(ORDER_DESC); // Field Order enum consist of all members of a model let field_order = self @@ -45,7 +46,6 @@ impl InputObjectTrait for OrderInputObject { .fold(Enum::new(format!("{}Field", self.type_name())), |acc, (ty_name, _)| { acc.item(ty_name.to_uppercase()) }); - Some(vec![direction, field_order]) } } diff --git a/crates/torii/graphql/src/object/mod.rs b/crates/torii/graphql/src/object/mod.rs index 494449654f..bb82e96de8 100644 --- a/crates/torii/graphql/src/object/mod.rs +++ b/crates/torii/graphql/src/object/mod.rs @@ -169,7 +169,6 @@ pub trait ObjectTrait: Send + Sync { object = object.field(field); } } - vec![object] } } diff --git a/crates/torii/graphql/src/object/model.rs b/crates/torii/graphql/src/object/model.rs index 402bfe7935..59bd4cc89d 100644 --- a/crates/torii/graphql/src/object/model.rs +++ b/crates/torii/graphql/src/object/model.rs @@ -1,13 +1,26 @@ use async_graphql::dynamic::indexmap::IndexMap; -use async_graphql::dynamic::{InputValue, SubscriptionField, SubscriptionFieldFuture, TypeRef}; +use async_graphql::dynamic::{ + Enum, Field, FieldFuture, InputObject, InputValue, SubscriptionField, SubscriptionFieldFuture, + TypeRef, +}; use async_graphql::{Name, Value}; +use sqlx::{Pool, Sqlite}; use tokio_stream::StreamExt; use torii_core::simple_broker::SimpleBroker; use torii_core::types::Model; +use super::connection::{connection_arguments, connection_output, parse_connection_arguments}; +use super::inputs::order_input::parse_order_argument; use super::{ObjectTrait, TypeMapping, ValueMapping}; -use crate::constants::{MODEL_NAMES, MODEL_TABLE, MODEL_TYPE_NAME}; +use crate::constants::{ + ID_COLUMN, MODEL_NAMES, MODEL_ORDER_FIELD_TYPE_NAME, MODEL_ORDER_TYPE_NAME, MODEL_TABLE, + MODEL_TYPE_NAME, ORDER_ASC, ORDER_DESC, ORDER_DIR_TYPE_NAME, +}; use crate::mapping::MODEL_TYPE_MAPPING; +use crate::query::data::{count_rows, fetch_multiple_rows}; + +const ORDER_BY_NAME: &str = "NAME"; +const ORDER_BY_HASH: &str = "CLASS_HASH"; pub struct ModelObject; @@ -28,6 +41,70 @@ impl ObjectTrait for ModelObject { Some(MODEL_TABLE) } + fn input_objects(&self) -> Option> { + let order_input = InputObject::new(MODEL_ORDER_TYPE_NAME) + .field(InputValue::new("direction", TypeRef::named_nn(ORDER_DIR_TYPE_NAME))) + .field(InputValue::new("field", TypeRef::named_nn(MODEL_ORDER_FIELD_TYPE_NAME))); + + Some(vec![order_input]) + } + + fn enum_objects(&self) -> Option> { + let direction = Enum::new(ORDER_DIR_TYPE_NAME).item(ORDER_ASC).item(ORDER_DESC); + let field_order = + Enum::new(MODEL_ORDER_FIELD_TYPE_NAME).item(ORDER_BY_NAME).item(ORDER_BY_HASH); + + Some(vec![direction, field_order]) + } + + fn resolve_many(&self) -> Option { + let type_mapping = self.type_mapping().clone(); + let table_name = self.table_name().unwrap().to_string(); + + let mut field = Field::new( + self.name().1, + TypeRef::named(format!("{}Connection", self.type_name())), + move |ctx| { + let type_mapping = type_mapping.clone(); + let table_name = table_name.to_string(); + + FieldFuture::new(async move { + let mut conn = ctx.data::>()?.acquire().await?; + let order = parse_order_argument(&ctx); + let connection = parse_connection_arguments(&ctx)?; + let total_count = count_rows(&mut conn, &table_name, &None, &None).await?; + let (data, page_info) = fetch_multiple_rows( + &mut conn, + &table_name, + ID_COLUMN, + &None, + &order, + &None, + &connection, + total_count, + ) + .await?; + let results = connection_output( + &data, + &type_mapping, + &order, + ID_COLUMN, + total_count, + false, + page_info, + )?; + + Ok(Some(Value::Object(results))) + }) + }, + ); + + field = connection_arguments(field); + field = field.argument(InputValue::new("order", TypeRef::named(MODEL_ORDER_TYPE_NAME))); + + Some(field) + } + fn subscriptions(&self) -> Option> { Some(vec![ SubscriptionField::new("modelRegistered", TypeRef::named_nn(self.type_name()), |ctx| { diff --git a/crates/torii/graphql/src/query/data.rs b/crates/torii/graphql/src/query/data.rs index aee42dd64f..44876dd390 100644 --- a/crates/torii/graphql/src/query/data.rs +++ b/crates/torii/graphql/src/query/data.rs @@ -4,7 +4,7 @@ use sqlx::{Result, Row, SqliteConnection}; use super::filter::{Filter, FilterValue}; use super::order::{CursorDirection, Direction, Order}; -use crate::constants::DEFAULT_LIMIT; +use crate::constants::{DEFAULT_LIMIT, MODEL_TABLE}; use crate::object::connection::{cursor, ConnectionArguments}; pub async fn count_rows( @@ -85,7 +85,10 @@ pub async fn fetch_multiple_rows( // `first` or `last` param. Explicit ordering take precedence match order { Some(order) => { - let column_name = format!("external_{}", order.field); + let mut column_name = order.field.clone(); + if table_name != MODEL_TABLE { + column_name = format!("external_{}", column_name); + } query.push_str(&format!( " ORDER BY {column_name} {}, {id_column} {} LIMIT {limit}", order.direction.as_ref(), @@ -125,7 +128,6 @@ pub async fn fetch_multiple_rows( Some(order) => format!("external_{}", order.field), None => id_column.to_string(), }; - match cursor_param { Some(cursor_query) => { let first_cursor = cursor::encode( diff --git a/crates/torii/graphql/src/tests/mod.rs b/crates/torii/graphql/src/tests/mod.rs index ec5582828c..9d2c71e16f 100644 --- a/crates/torii/graphql/src/tests/mod.rs +++ b/crates/torii/graphql/src/tests/mod.rs @@ -32,6 +32,7 @@ use torii_core::sql::Sql; mod entities_test; mod metadata_test; +mod models_ordering_test; mod models_test; mod subscription_test; @@ -69,6 +70,16 @@ pub struct PageInfo { pub end_cursor: Option, } +#[derive(Deserialize, Debug, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct WorldModel { + pub id: String, + pub name: String, + pub class_hash: String, + pub transaction_hash: String, + pub created_at: String, +} + #[derive(Deserialize, Debug, PartialEq)] pub struct Record { pub __typename: String, diff --git a/crates/torii/graphql/src/tests/models_ordering_test.rs b/crates/torii/graphql/src/tests/models_ordering_test.rs new file mode 100644 index 0000000000..3ac52d1ff9 --- /dev/null +++ b/crates/torii/graphql/src/tests/models_ordering_test.rs @@ -0,0 +1,74 @@ +#[cfg(test)] +mod tests { + use anyhow::Result; + use async_graphql::dynamic::Schema; + use serde_json::Value; + + use crate::schema::build_schema; + use crate::tests::{run_graphql_query, spinup_types_test, Connection, WorldModel}; + + async fn world_model_query(schema: &Schema, arg: &str) -> Value { + let query = format!( + r#" + {{ + models {} {{ + totalCount + edges {{ + cursor + node {{ + id + name + classHash + transactionHash + createdAt + }} + }} + pageInfo{{ + startCursor + hasPreviousPage + hasNextPage + startCursor + endCursor + }} + }} + }} + "#, + arg, + ); + + let result = run_graphql_query(schema, &query).await; + result.get("models").ok_or("models not found").unwrap().clone() + } + + // End to end test spins up a test sequencer and deploys types-test project, this takes a while + // to run so combine all related tests into one + #[tokio::test(flavor = "multi_thread")] + async fn models_ordering_test() -> Result<()> { + let pool = spinup_types_test().await?; + let schema = build_schema(&pool).await.unwrap(); + + // default params, test entity relationship, test nested types + let world_model = world_model_query(&schema, "").await; + let connection: Connection = serde_json::from_value(world_model).unwrap(); + let first_model = connection.edges.first().unwrap(); + let second_model = connection.edges.get(1).unwrap(); + let last_model = connection.edges.get(2).unwrap(); + assert_eq!(&first_model.node.name, "Subrecord"); + assert_eq!(&second_model.node.name, "RecordSibling"); + assert_eq!(&last_model.node.name, "Record"); + + // *** ORDER TESTING *** + + // order on name string ASC (number) + let world_model = + world_model_query(&schema, "(order: {field: NAME, direction: ASC})").await; + let connection: Connection = serde_json::from_value(world_model).unwrap(); + let first_model = connection.edges.first().unwrap(); + let second_model = connection.edges.get(1).unwrap(); + let last_model = connection.edges.get(2).unwrap(); + assert_eq!(&first_model.node.name, "Record"); + assert_eq!(&second_model.node.name, "RecordSibling"); + assert_eq!(&last_model.node.name, "Subrecord"); + Ok(()) + } +} From 36e5853877d011a5bb4b3bd77b9de676fb454b0c Mon Sep 17 00:00:00 2001 From: lambda-0x <0xlambda@protonmail.com> Date: Mon, 8 Jan 2024 07:20:08 +0530 Subject: [PATCH 02/33] refactor(sozo): change priority order for arguments (#1358) * refactor(sozo): change priority order for arguments * fix: tests * fix: code coverage --- crates/sozo/src/commands/options/account.rs | 23 +++++++--------- crates/sozo/src/commands/options/mod.rs | 5 ++++ crates/sozo/src/commands/options/starknet.rs | 28 +++++++++++++------- 3 files changed, 34 insertions(+), 22 deletions(-) diff --git a/crates/sozo/src/commands/options/account.rs b/crates/sozo/src/commands/options/account.rs index ad91c21ce8..f529e717d4 100644 --- a/crates/sozo/src/commands/options/account.rs +++ b/crates/sozo/src/commands/options/account.rs @@ -8,13 +8,17 @@ use starknet::core::types::FieldElement; use starknet::providers::Provider; use starknet::signers::{LocalWallet, SigningKey}; +use super::{ + DOJO_ACCOUNT_ADDRESS_ENV_VAR, DOJO_KEYSTORE_PASSWORD_ENV_VAR, DOJO_PRIVATE_KEY_ENV_VAR, +}; + #[derive(Debug, Args)] #[command(next_help_heading = "Account options")] pub struct AccountOptions { - #[arg(long)] + #[arg(long, env = DOJO_ACCOUNT_ADDRESS_ENV_VAR)] pub account_address: Option, - #[arg(long)] + #[arg(long, env = DOJO_PRIVATE_KEY_ENV_VAR)] #[arg(requires = "account_address")] #[arg(conflicts_with = "keystore_path")] #[arg(help_heading = "Signer options - RAW")] @@ -27,7 +31,7 @@ pub struct AccountOptions { #[arg(help = "Use the keystore in the given folder or file.")] pub keystore_path: Option, - #[arg(long = "password")] + #[arg(long = "password", env = DOJO_KEYSTORE_PASSWORD_ENV_VAR)] #[arg(value_name = "PASSWORD")] #[arg(requires = "keystore_path")] #[arg(help_heading = "Signer options - KEYSTORE")] @@ -60,11 +64,8 @@ impl AccountOptions { } fn signer(&self, env_metadata: Option<&Environment>) -> Result { - if let Some(private_key) = self - .private_key - .as_deref() - .or_else(|| env_metadata.and_then(|env| env.private_key())) - .or(std::env::var("DOJO_PRIVATE_KEY").ok().as_deref()) + if let Some(private_key) = + self.private_key.as_deref().or_else(|| env_metadata.and_then(|env| env.private_key())) { return Ok(LocalWallet::from_signing_key(SigningKey::from_secret_scalar( FieldElement::from_str(private_key)?, @@ -76,7 +77,6 @@ impl AccountOptions { .keystore_password .as_deref() .or_else(|| env_metadata.and_then(|env| env.keystore_password())) - .or(std::env::var("DOJO_KEYSTORE_PASSWORD").ok().as_deref()) { return Ok(LocalWallet::from_signing_key(SigningKey::from_keystore( path, password, @@ -95,10 +95,7 @@ impl AccountOptions { fn account_address(&self, env_metadata: Option<&Environment>) -> Result { if let Some(address) = self.account_address { Ok(address) - } else if let Some(address) = env_metadata - .and_then(|env| env.account_address()) - .or(std::env::var("DOJO_ACCOUNT_ADDRESS").ok().as_deref()) - { + } else if let Some(address) = env_metadata.and_then(|env| env.account_address()) { Ok(FieldElement::from_str(address)?) } else { Err(anyhow!( diff --git a/crates/sozo/src/commands/options/mod.rs b/crates/sozo/src/commands/options/mod.rs index e7d2645387..40ec922b9f 100644 --- a/crates/sozo/src/commands/options/mod.rs +++ b/crates/sozo/src/commands/options/mod.rs @@ -2,3 +2,8 @@ pub mod account; pub mod starknet; pub mod transaction; pub mod world; + +const STARKNET_RPC_URL_ENV_VAR: &str = "STARKNET_RPC_URL"; +const DOJO_PRIVATE_KEY_ENV_VAR: &str = "DOJO_PRIVATE_KEY"; +const DOJO_KEYSTORE_PASSWORD_ENV_VAR: &str = "DOJO_KEYSTORE_PASSWORD"; +const DOJO_ACCOUNT_ADDRESS_ENV_VAR: &str = "DOJO_ACCOUNT_ADDRESS"; diff --git a/crates/sozo/src/commands/options/starknet.rs b/crates/sozo/src/commands/options/starknet.rs index 9f024f20b3..93ba841dcc 100644 --- a/crates/sozo/src/commands/options/starknet.rs +++ b/crates/sozo/src/commands/options/starknet.rs @@ -5,15 +5,15 @@ use starknet::providers::jsonrpc::HttpTransport; use starknet::providers::JsonRpcClient; use url::Url; -const STARKNET_RPC_URL_ENV_VAR: &str = "STARKNET_RPC_URL"; +use super::STARKNET_RPC_URL_ENV_VAR; #[derive(Debug, Args)] #[command(next_help_heading = "Starknet options")] pub struct StarknetOptions { - #[arg(long, env = STARKNET_RPC_URL_ENV_VAR, default_value = "http://localhost:5050")] + #[arg(long, env = STARKNET_RPC_URL_ENV_VAR)] #[arg(value_name = "URL")] #[arg(help = "The Starknet RPC endpoint.")] - pub rpc_url: Url, + pub rpc_url: Option, } impl StarknetOptions { @@ -26,11 +26,13 @@ impl StarknetOptions { // we dont check the env var because that would be handled by `clap` fn url(&self, env_metadata: Option<&Environment>) -> Result { - Ok(if let Some(url) = env_metadata.and_then(|env| env.rpc_url()) { - Url::parse(url)? + if let Some(url) = self.rpc_url.as_ref() { + Ok(url.clone()) + } else if let Some(url) = env_metadata.and_then(|env| env.rpc_url()) { + Ok(Url::parse(url)?) } else { - self.rpc_url.clone() - }) + Ok(Url::parse("http://localhost:5050").unwrap()) + } } } @@ -39,10 +41,11 @@ mod tests { use clap::Parser; use super::StarknetOptions; - use crate::commands::options::starknet::STARKNET_RPC_URL_ENV_VAR; + use crate::commands::options::STARKNET_RPC_URL_ENV_VAR; const ENV_RPC: &str = "http://localhost:7474/"; const METADATA_RPC: &str = "http://localhost:6060/"; + const DEFAULT_RPC: &str = "http://localhost:5050/"; #[derive(clap::Parser)] struct Command { @@ -77,6 +80,13 @@ mod tests { ..Default::default() }; let cmd = Command::parse_from([""]); - assert_eq!(cmd.options.url(Some(&env_metadata)).unwrap().as_str(), METADATA_RPC); + assert_eq!(cmd.options.url(Some(&env_metadata)).unwrap().as_str(), ENV_RPC); + } + + #[test] + fn exists_in_neither() { + let env_metadata = dojo_world::metadata::Environment::default(); + let cmd = Command::parse_from([""]); + assert_eq!(cmd.options.url(Some(&env_metadata)).unwrap().as_str(), DEFAULT_RPC); } } From 042d46bd35877674e95acf9b30e77515e485e8da Mon Sep 17 00:00:00 2001 From: Junichi Sugiura Date: Wed, 10 Jan 2024 03:37:55 +0100 Subject: [PATCH 03/33] Change model query field to camel case (#1410) --- Cargo.lock | 2 +- crates/torii/graphql/src/schema.rs | 3 ++- .../graphql/src/tests/models_ordering_test.rs | 2 +- crates/torii/graphql/src/tests/models_test.rs | 17 +++++++++++++++++ 4 files changed, 21 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9403e074b1..468168b43e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2827,7 +2827,7 @@ dependencies = [ [[package]] name = "dojo-world-abigen" -version = "0.1.0" +version = "0.4.4" dependencies = [ "cairo-lang-starknet", "camino", diff --git a/crates/torii/graphql/src/schema.rs b/crates/torii/graphql/src/schema.rs index 32db9f090a..1d08bfe0f2 100644 --- a/crates/torii/graphql/src/schema.rs +++ b/crates/torii/graphql/src/schema.rs @@ -2,6 +2,7 @@ use anyhow::Result; use async_graphql::dynamic::{ Field, Object, Scalar, Schema, Subscription, SubscriptionField, Union, }; +use convert_case::{Case, Casing}; use sqlx::SqlitePool; use torii_core::types::Model; @@ -126,7 +127,7 @@ async fn build_objects(pool: &SqlitePool) -> Result<(Vec>, let type_mapping = type_mapping_query(&mut conn, &model.id).await?; if !type_mapping.is_empty() { - let field_name = model.name.to_lowercase(); + let field_name = model.name.to_case(Case::Camel); let type_name = model.name; union = union.possible_type(&type_name); diff --git a/crates/torii/graphql/src/tests/models_ordering_test.rs b/crates/torii/graphql/src/tests/models_ordering_test.rs index 3ac52d1ff9..9a93ab39c7 100644 --- a/crates/torii/graphql/src/tests/models_ordering_test.rs +++ b/crates/torii/graphql/src/tests/models_ordering_test.rs @@ -23,7 +23,7 @@ mod tests { createdAt }} }} - pageInfo{{ + pageInfo {{ startCursor hasPreviousPage hasNextPage diff --git a/crates/torii/graphql/src/tests/models_test.rs b/crates/torii/graphql/src/tests/models_test.rs index 75e2b7c07b..cc04ded6d2 100644 --- a/crates/torii/graphql/src/tests/models_test.rs +++ b/crates/torii/graphql/src/tests/models_test.rs @@ -318,6 +318,23 @@ mod tests { let connection: Connection = serde_json::from_value(records).unwrap(); assert_eq!(connection.edges.len(), 0); + let result = run_graphql_query( + &schema, + r#" + { + recordSiblingModels { + edges { + node { + __typename + } + } + } + } + "#, + ) + .await; + assert!(result.get("recordSiblingModels").is_some()); + Ok(()) } } From 3bb35a6790de3eea83e5c728ed0de46523a3de10 Mon Sep 17 00:00:00 2001 From: Ammar Arif Date: Wed, 10 Jan 2024 13:05:03 +0900 Subject: [PATCH 04/33] fix(katana-core): replace compiled class hash with class hash (#1404) * replace compiled class hash with class hash * replace old address --- .../core/contracts/messaging/cairo/Makefile | 2 +- .../contracts/messaging/cairo/account_l2.json | 2 +- .../contracts/messaging/solidity/Makefile | 2 +- crates/katana/core/src/accounts.rs | 32 +++---------------- crates/katana/core/src/utils/mod.rs | 8 ++--- crates/torii/types-test/Scarb.toml | 2 +- examples/rpc/starknet/starknet_getClass.hurl | 2 +- .../rpc/starknet/starknet_getClassAt.hurl | 2 +- .../rpc/starknet/starknet_getClassHashAt.hurl | 2 +- examples/rpc/starknet/starknet_getNonce.hurl | 2 +- examples/spawn-and-move/Scarb.toml | 2 +- 11 files changed, 18 insertions(+), 40 deletions(-) diff --git a/crates/katana/core/contracts/messaging/cairo/Makefile b/crates/katana/core/contracts/messaging/cairo/Makefile index 48d76a402b..aac32186a6 100644 --- a/crates/katana/core/contracts/messaging/cairo/Makefile +++ b/crates/katana/core/contracts/messaging/cairo/Makefile @@ -1,5 +1,5 @@ ACCOUNT_L2=./account_l2.json -ACCOUNT_L2_ADDR=0x9238c8ca6b3c6ab45a793593b13d98797ccd3bda179d313553e51fee114624 +ACCOUNT_L2_ADDR=0x6162896d1d7ab204c7ccac6dd5f8e9e7c25ecd5ae4fcb4ad32e57786bb46e03 L2_PRIVATE_KEY=0x1800000000300000180000000000030000000000003006001800006600 # Build files helpers. diff --git a/crates/katana/core/contracts/messaging/cairo/account_l2.json b/crates/katana/core/contracts/messaging/cairo/account_l2.json index 32f05e685d..788b31022a 100644 --- a/crates/katana/core/contracts/messaging/cairo/account_l2.json +++ b/crates/katana/core/contracts/messaging/cairo/account_l2.json @@ -8,6 +8,6 @@ "deployment": { "status": "deployed", "class_hash": "0x4d07e40e93398ed3c76981e72dd1fd22557a78ce36c0515f679e27f0bb5bc5f", - "address": "0x9238c8ca6b3c6ab45a793593b13d98797ccd3bda179d313553e51fee114624" + "address": "0x6162896d1d7ab204c7ccac6dd5f8e9e7c25ecd5ae4fcb4ad32e57786bb46e03" } } diff --git a/crates/katana/core/contracts/messaging/solidity/Makefile b/crates/katana/core/contracts/messaging/solidity/Makefile index 4baa800e98..1291370c05 100644 --- a/crates/katana/core/contracts/messaging/solidity/Makefile +++ b/crates/katana/core/contracts/messaging/solidity/Makefile @@ -9,7 +9,7 @@ export $(shell sed 's/=.*//' .env) # Addresses fixed here for easy testing. C_MSG_L2_ADDR=0xe7f1725E7734CE288F8367e1Bb143E90bb3F0512 -L2_ACCOUNT=0x9238c8ca6b3c6ab45a793593b13d98797ccd3bda179d313553e51fee114624 +L2_ACCOUNT=0x6162896d1d7ab204c7ccac6dd5f8e9e7c25ecd5ae4fcb4ad32e57786bb46e03 L2_CONTRACT_ADDR=0x0429a64d97c1422a37a09fc7406f35c264be59b744aaff5a79d59393eb1bc7e1 deploy_messaging_contracts: diff --git a/crates/katana/core/src/accounts.rs b/crates/katana/core/src/accounts.rs index 25ff933c12..f602a38e34 100644 --- a/crates/katana/core/src/accounts.rs +++ b/crates/katana/core/src/accounts.rs @@ -1,8 +1,6 @@ use std::fmt::Display; -use std::sync::Arc; use anyhow::Result; -use blockifier::execution::contract_class::ContractClass; use katana_primitives::contract::ContractAddress; use katana_primitives::FieldElement; use katana_provider::traits::state::StateWriter; @@ -14,9 +12,7 @@ use starknet::core::serde::unsigned_field_element::UfeHex; use starknet::core::utils::{get_contract_address, get_storage_var_address}; use starknet::signers::SigningKey; -use crate::constants::{ - FEE_TOKEN_ADDRESS, OZ_V1_ACCOUNT_CONTRACT_COMPILED, OZ_V1_ACCOUNT_CONTRACT_COMPILED_CLASS_HASH, -}; +use crate::constants::{FEE_TOKEN_ADDRESS, OZ_V1_ACCOUNT_CONTRACT_CLASS_HASH}; #[serde_as] #[derive(Debug, Clone, Serialize)] @@ -31,18 +27,11 @@ pub struct Account { pub address: FieldElement, #[serde_as(as = "UfeHex")] pub class_hash: FieldElement, - #[serde(skip_serializing)] - pub contract_class: Arc, } impl Account { #[must_use] - pub fn new( - private_key: FieldElement, - balance: FieldElement, - class_hash: FieldElement, - contract_class: Arc, - ) -> Self { + pub fn new(private_key: FieldElement, balance: FieldElement, class_hash: FieldElement) -> Self { let public_key = public_key_from_private_key(private_key); let address = get_contract_address( FieldElement::from(666u32), @@ -51,7 +40,7 @@ impl Account { FieldElement::ZERO, ); - Self { address, public_key, balance, class_hash, private_key, contract_class } + Self { address, public_key, balance, class_hash, private_key } } // TODO: separate fund logic from this struct - implement FeeToken type @@ -104,7 +93,6 @@ pub struct DevAccountGenerator { pub seed: [u8; 32], pub balance: FieldElement, pub class_hash: FieldElement, - pub contract_class: Arc, } impl DevAccountGenerator { @@ -114,8 +102,7 @@ impl DevAccountGenerator { total, seed: [0u8; 32], balance: FieldElement::ZERO, - class_hash: (*OZ_V1_ACCOUNT_CONTRACT_COMPILED_CLASS_HASH), - contract_class: Arc::new((*OZ_V1_ACCOUNT_CONTRACT_COMPILED).clone()), + class_hash: (*OZ_V1_ACCOUNT_CONTRACT_CLASS_HASH), } } @@ -127,10 +114,6 @@ impl DevAccountGenerator { Self { balance, ..self } } - pub fn with_class(self, class_hash: FieldElement, contract_class: Arc) -> Self { - Self { class_hash, contract_class, ..self } - } - /// Generate `total` number of accounts based on the `seed`. #[must_use] pub fn generate(&self) -> Vec { @@ -147,12 +130,7 @@ impl DevAccountGenerator { let private_key = FieldElement::from_bytes_be(&private_key_bytes) .expect("able to create FieldElement from bytes"); - Account::new( - private_key, - self.balance, - self.class_hash, - self.contract_class.clone(), - ) + Account::new(private_key, self.balance, self.class_hash) }) .collect() } diff --git a/crates/katana/core/src/utils/mod.rs b/crates/katana/core/src/utils/mod.rs index 26bae491be..91c5529a06 100644 --- a/crates/katana/core/src/utils/mod.rs +++ b/crates/katana/core/src/utils/mod.rs @@ -42,14 +42,14 @@ pub(super) fn get_genesis_states_for_testing() -> StateUpdatesWithDeclaredClasse ]); let declared_sierra_classes = HashMap::from([( - *OZ_V1_ACCOUNT_CONTRACT_COMPILED_CLASS_HASH, + *OZ_V1_ACCOUNT_CONTRACT_CLASS_HASH, OZ_V1_ACCOUNT_CONTRACT.clone().flatten().unwrap(), )]); let declared_compiled_classes = HashMap::from([ - (*UDC_COMPILED_CLASS_HASH, (*UDC_CONTRACT).clone()), - (*ERC20_CONTRACT_COMPILED_CLASS_HASH, (*ERC20_CONTRACT).clone()), - (*OZ_V1_ACCOUNT_CONTRACT_COMPILED_CLASS_HASH, (*OZ_V1_ACCOUNT_CONTRACT_COMPILED).clone()), + (*UDC_CLASS_HASH, (*UDC_CONTRACT).clone()), + (*ERC20_CONTRACT_CLASS_HASH, (*ERC20_CONTRACT).clone()), + (*OZ_V1_ACCOUNT_CONTRACT_CLASS_HASH, (*OZ_V1_ACCOUNT_CONTRACT_COMPILED).clone()), ]); StateUpdatesWithDeclaredClasses { diff --git a/crates/torii/types-test/Scarb.toml b/crates/torii/types-test/Scarb.toml index 261261212c..192864a255 100644 --- a/crates/torii/types-test/Scarb.toml +++ b/crates/torii/types-test/Scarb.toml @@ -21,6 +21,6 @@ build-external-contracts = [ ] # socials.x = "https://twitter.com/dojostarknet" [tool.dojo.env] -account_address = "0x9238c8ca6b3c6ab45a793593b13d98797ccd3bda179d313553e51fee114624" +account_address = "0x6162896d1d7ab204c7ccac6dd5f8e9e7c25ecd5ae4fcb4ad32e57786bb46e03" private_key = "0x1800000000300000180000000000030000000000003006001800006600" rpc_url = "http://localhost:5050/" diff --git a/examples/rpc/starknet/starknet_getClass.hurl b/examples/rpc/starknet/starknet_getClass.hurl index 1d76919b52..38de6935a1 100644 --- a/examples/rpc/starknet/starknet_getClass.hurl +++ b/examples/rpc/starknet/starknet_getClass.hurl @@ -5,7 +5,7 @@ Content-Type: application/json "method": "starknet_getClass", "params": [ "latest", - "0x016c6081eb34ad1e0c5513234ed0c025b3c7f305902d291bad534cd6474c85bc" + "0x05400e90f7e0ae78bd02c77cd75527280470e2fe19c54970dd79dc37a9d3645c" ], "id":1 } diff --git a/examples/rpc/starknet/starknet_getClassAt.hurl b/examples/rpc/starknet/starknet_getClassAt.hurl index fe0a0a4ff0..fe08b17864 100644 --- a/examples/rpc/starknet/starknet_getClassAt.hurl +++ b/examples/rpc/starknet/starknet_getClassAt.hurl @@ -5,7 +5,7 @@ Content-Type: application/json "method": "starknet_getClassAt", "params": [ "latest", - "0x9238c8ca6b3c6ab45a793593b13d98797ccd3bda179d313553e51fee114624" + "0x6162896d1d7ab204c7ccac6dd5f8e9e7c25ecd5ae4fcb4ad32e57786bb46e03" ], "id":1 } diff --git a/examples/rpc/starknet/starknet_getClassHashAt.hurl b/examples/rpc/starknet/starknet_getClassHashAt.hurl index 83238f1469..3df06a3733 100644 --- a/examples/rpc/starknet/starknet_getClassHashAt.hurl +++ b/examples/rpc/starknet/starknet_getClassHashAt.hurl @@ -5,7 +5,7 @@ Content-Type: application/json "method": "starknet_getClassHashAt", "params": [ "pending", - "0x9238c8ca6b3c6ab45a793593b13d98797ccd3bda179d313553e51fee114624" + "0x6162896d1d7ab204c7ccac6dd5f8e9e7c25ecd5ae4fcb4ad32e57786bb46e03" ], "id": 1 } diff --git a/examples/rpc/starknet/starknet_getNonce.hurl b/examples/rpc/starknet/starknet_getNonce.hurl index 57a08672e8..736c1cb15b 100644 --- a/examples/rpc/starknet/starknet_getNonce.hurl +++ b/examples/rpc/starknet/starknet_getNonce.hurl @@ -5,7 +5,7 @@ Content-Type: application/json "method": "starknet_getNonce", "params": [ "latest", - "0x9238c8ca6b3c6ab45a793593b13d98797ccd3bda179d313553e51fee114624" + "0x6162896d1d7ab204c7ccac6dd5f8e9e7c25ecd5ae4fcb4ad32e57786bb46e03" ], "id":1 } diff --git a/examples/spawn-and-move/Scarb.toml b/examples/spawn-and-move/Scarb.toml index 7662632bfb..ecc01f66c9 100644 --- a/examples/spawn-and-move/Scarb.toml +++ b/examples/spawn-and-move/Scarb.toml @@ -23,6 +23,6 @@ name = "example" rpc_url = "http://localhost:5050/" # Default account for katana with seed = 0 -account_address = "0x9238c8ca6b3c6ab45a793593b13d98797ccd3bda179d313553e51fee114624" +account_address = "0x6162896d1d7ab204c7ccac6dd5f8e9e7c25ecd5ae4fcb4ad32e57786bb46e03" private_key = "0x1800000000300000180000000000030000000000003006001800006600" world_address = "0x5010c31f127114c6198df8a5239e2b7a5151e1156fb43791e37e7385faa8138" From 136a67a1faaae62ddcb097b18eb8450ccc3dc896 Mon Sep 17 00:00:00 2001 From: glihm Date: Wed, 10 Jan 2024 14:41:33 -0600 Subject: [PATCH 05/33] fix: make devcontainer standalone for rust toolchain version (#1411) --- .devcontainer/Dockerfile | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 29720815d3..0895fc1ad4 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -11,13 +11,14 @@ RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ RUN apt install -y gh libgmp3-dev -COPY rust-toolchain.toml . +# To allow independent workflow of the container, the rust-toolchain is explicitely given. +RUN echo "1.74.0" > rust_toolchain_version # Install cargo-binstall RUN curl -L --proto '=https' --tlsv1.2 -sSf https://raw.githubusercontent.com/cargo-bins/cargo-binstall/main/install-from-binstall-release.sh | bash -RUN rustup toolchain install $(cat rust-toolchain.toml | grep channel | cut -d\" -f2) && \ - rustup default $(cat rust-toolchain.toml | grep channel | cut -d\" -f2) && \ +RUN rustup toolchain install $(cat rust_toolchain_version) && \ + rustup default $(cat rust_toolchain_version) && \ rustup component add clippy && \ rustup component add rustfmt @@ -37,14 +38,14 @@ RUN if [ "$TARGETPLATFORM" = "linux/arm64" ] ; then \ mv hurl-4.1.0-aarch64-unknown-linux-gnu/hurl /usr/local/bin/ && \ rm -r hurl-4.1.0-aarch64-unknown-linux-gnu && \ rm hurl.tar.gz && \ - rustup component add llvm-tools-preview --toolchain 1.70.0-aarch64-unknown-linux-gnu; \ + rustup component add llvm-tools-preview --toolchain $(cat rust_toolchain_version)-aarch64-unknown-linux-gnu; \ elif [ "$TARGETPLATFORM" = "linux/amd64" ] ; then \ curl -L https://github.com/Orange-OpenSource/hurl/releases/download/4.1.0/hurl-4.1.0-x86_64-unknown-linux-gnu.tar.gz -o hurl.tar.gz && \ tar -xzf hurl.tar.gz && \ mv hurl-4.1.0-x86_64-unknown-linux-gnu/hurl /usr/local/bin/ && \ rm -r hurl-4.1.0-x86_64-unknown-linux-gnu && \ rm hurl.tar.gz && \ - rustup component add llvm-tools-preview --toolchain 1.70.0-x86_64-unknown-linux-gnu && \ + rustup component add llvm-tools-preview --toolchain $(cat rust_toolchain_version)-x86_64-unknown-linux-gnu && \ rustup target add x86_64-fortanix-unknown-sgx --toolchain nightly; \ fi From 18182a094b62effd1d0de4dfa684d606c63fbeaf Mon Sep 17 00:00:00 2001 From: lambda-0x <0xlambda@protonmail.com> Date: Thu, 11 Jan 2024 02:12:39 +0530 Subject: [PATCH 06/33] tests: refactor and add new tests (#1405) * tests: refactor and add new tests * tests: add tests for options/account * fix: typo * refactor(sozo): AccountOptions doesn't require `account_address` while specifying `private_key` to be consistent with `keystore` * fix: formatting * fix: remove unnecessary TODO * fix: formatting --- crates/dojo-world/src/metadata.rs | 1 - crates/sozo/src/commands/options/account.rs | 224 +++++++++++++++++- crates/sozo/src/commands/options/starknet.rs | 24 +- .../sozo/tests/test_data/keystore/test.json | 1 + 4 files changed, 238 insertions(+), 12 deletions(-) create mode 100644 crates/sozo/tests/test_data/keystore/test.json diff --git a/crates/dojo-world/src/metadata.rs b/crates/dojo-world/src/metadata.rs index 9726192215..a3c2c7d1ca 100644 --- a/crates/dojo-world/src/metadata.rs +++ b/crates/dojo-world/src/metadata.rs @@ -113,7 +113,6 @@ impl Environment { self.private_key.as_deref() } - #[allow(dead_code)] pub fn keystore_path(&self) -> Option<&str> { self.keystore_path.as_deref() } diff --git a/crates/sozo/src/commands/options/account.rs b/crates/sozo/src/commands/options/account.rs index f529e717d4..2afff4fddd 100644 --- a/crates/sozo/src/commands/options/account.rs +++ b/crates/sozo/src/commands/options/account.rs @@ -14,12 +14,16 @@ use super::{ #[derive(Debug, Args)] #[command(next_help_heading = "Account options")] +// INVARIANT: +// - For commandline: we can either specify `private_key` or `keystore_path` along with +// `keystore_password`. This is enforced by Clap. +// - For `Scarb.toml`: if both private_key and keystore are specified in `Scarb.toml` private_key +// will take priority pub struct AccountOptions { #[arg(long, env = DOJO_ACCOUNT_ADDRESS_ENV_VAR)] pub account_address: Option, #[arg(long, env = DOJO_PRIVATE_KEY_ENV_VAR)] - #[arg(requires = "account_address")] #[arg(conflicts_with = "keystore_path")] #[arg(help_heading = "Signer options - RAW")] #[arg(help = "The raw private key associated with the account contract.")] @@ -33,7 +37,6 @@ pub struct AccountOptions { #[arg(long = "password", env = DOJO_KEYSTORE_PASSWORD_ENV_VAR)] #[arg(value_name = "PASSWORD")] - #[arg(requires = "keystore_path")] #[arg(help_heading = "Signer options - KEYSTORE")] #[arg(help = "The keystore password. Used with --keystore.")] pub keystore_password: Option, @@ -72,7 +75,11 @@ impl AccountOptions { ))); } - if let Some(path) = &self.keystore_path { + if let Some(path) = &self + .keystore_path + .as_deref() + .or_else(|| env_metadata.and_then(|env| env.keystore_path())) + { if let Some(password) = self .keystore_password .as_deref() @@ -105,3 +112,214 @@ impl AccountOptions { } } } + +#[cfg(test)] +mod tests { + use std::str::FromStr; + + use clap::Parser; + use starknet::signers::{LocalWallet, Signer, SigningKey}; + use starknet_crypto::FieldElement; + + use super::{ + AccountOptions, DOJO_ACCOUNT_ADDRESS_ENV_VAR, DOJO_KEYSTORE_PASSWORD_ENV_VAR, + DOJO_PRIVATE_KEY_ENV_VAR, + }; + + #[derive(clap::Parser, Debug)] + struct Command { + #[clap(flatten)] + pub account: AccountOptions, + } + + #[test] + fn account_address_read_from_env_variable() { + std::env::set_var(DOJO_ACCOUNT_ADDRESS_ENV_VAR, "0x0"); + + let cmd = Command::parse_from([""]); + assert_eq!(cmd.account.account_address, Some(FieldElement::from_hex_be("0x0").unwrap())); + } + + #[test] + fn private_key_read_from_env_variable() { + std::env::set_var(DOJO_PRIVATE_KEY_ENV_VAR, "private_key"); + + let cmd = Command::parse_from(["sozo", "--account-address", "0x0"]); + assert_eq!(cmd.account.private_key, Some("private_key".to_owned())); + } + + #[test] + fn keystore_path_read_from_env_variable() { + std::env::set_var(DOJO_KEYSTORE_PASSWORD_ENV_VAR, "keystore_password"); + + let cmd = Command::parse_from(["sozo", "--keystore", "./some/path"]); + assert_eq!(cmd.account.keystore_password, Some("keystore_password".to_owned())); + } + + #[test] + fn account_address_from_args() { + let env_metadata = dojo_world::metadata::Environment::default(); + + let cmd = Command::parse_from(["sozo", "--account-address", "0x0"]); + assert_eq!( + cmd.account.account_address(Some(&env_metadata)).unwrap(), + FieldElement::from_hex_be("0x0").unwrap() + ); + } + + #[test] + fn account_address_from_env_metadata() { + let env_metadata = dojo_world::metadata::Environment { + account_address: Some("0x0".to_owned()), + ..Default::default() + }; + + let cmd = Command::parse_from([""]); + assert_eq!( + cmd.account.account_address(Some(&env_metadata)).unwrap(), + FieldElement::from_hex_be("0x0").unwrap() + ); + } + + #[test] + fn account_address_from_both() { + let env_metadata = dojo_world::metadata::Environment { + account_address: Some("0x0".to_owned()), + ..Default::default() + }; + + let cmd = Command::parse_from(["sozo", "--account-address", "0x1"]); + assert_eq!( + cmd.account.account_address(Some(&env_metadata)).unwrap(), + FieldElement::from_hex_be("0x1").unwrap() + ); + } + + #[test] + fn account_address_from_neither() { + let env_metadata = dojo_world::metadata::Environment::default(); + + let cmd = Command::parse_from([""]); + assert!(cmd.account.account_address(Some(&env_metadata)).is_err()); + } + + #[tokio::test] + async fn private_key_from_args() { + let env_metadata = dojo_world::metadata::Environment::default(); + let private_key = "0x1"; + + let cmd = + Command::parse_from(["sozo", "--account-address", "0x0", "--private-key", private_key]); + let result_wallet = cmd.account.signer(Some(&env_metadata)).unwrap(); + let expected_wallet = LocalWallet::from_signing_key(SigningKey::from_secret_scalar( + FieldElement::from_str(private_key).unwrap(), + )); + + let result_public_key = result_wallet.get_public_key().await.unwrap(); + let expected_public_key = expected_wallet.get_public_key().await.unwrap(); + assert!(result_public_key.scalar() == expected_public_key.scalar()); + } + + #[tokio::test] + async fn private_key_from_env_metadata() { + let private_key = "0x1"; + let env_metadata = dojo_world::metadata::Environment { + private_key: Some(private_key.to_owned()), + ..Default::default() + }; + + let cmd = Command::parse_from(["sozo", "--account-address", "0x0"]); + let result_wallet = cmd.account.signer(Some(&env_metadata)).unwrap(); + let expected_wallet = LocalWallet::from_signing_key(SigningKey::from_secret_scalar( + FieldElement::from_str(private_key).unwrap(), + )); + + let result_public_key = result_wallet.get_public_key().await.unwrap(); + let expected_public_key = expected_wallet.get_public_key().await.unwrap(); + assert!(result_public_key.scalar() == expected_public_key.scalar()); + } + + #[tokio::test] + async fn keystore_path_and_keystore_password_from_args() { + let keystore_path = "./tests/test_data/keystore/test.json"; + let keystore_password = "dojoftw"; + let private_key = "0x1"; + let env_metadata = dojo_world::metadata::Environment::default(); + + let cmd = Command::parse_from([ + "sozo", + "--keystore", + keystore_path, + "--password", + keystore_password, + ]); + let result_wallet = cmd.account.signer(Some(&env_metadata)).unwrap(); + let expected_wallet = LocalWallet::from_signing_key(SigningKey::from_secret_scalar( + FieldElement::from_str(private_key).unwrap(), + )); + + let result_public_key = result_wallet.get_public_key().await.unwrap(); + let expected_public_key = expected_wallet.get_public_key().await.unwrap(); + assert!(result_public_key.scalar() == expected_public_key.scalar()); + } + + #[tokio::test] + async fn keystore_path_from_env_metadata() { + let keystore_path = "./tests/test_data/keystore/test.json"; + let keystore_password = "dojoftw"; + + let private_key = "0x1"; + let env_metadata = dojo_world::metadata::Environment { + keystore_path: Some(keystore_path.to_owned()), + ..Default::default() + }; + + let cmd = Command::parse_from(["sozo", "--password", keystore_password]); + let result_wallet = cmd.account.signer(Some(&env_metadata)).unwrap(); + let expected_wallet = LocalWallet::from_signing_key(SigningKey::from_secret_scalar( + FieldElement::from_str(private_key).unwrap(), + )); + + let result_public_key = result_wallet.get_public_key().await.unwrap(); + let expected_public_key = expected_wallet.get_public_key().await.unwrap(); + assert!(result_public_key.scalar() == expected_public_key.scalar()); + } + + #[tokio::test] + async fn keystore_password_from_env_metadata() { + let keystore_path = "./tests/test_data/keystore/test.json"; + let keystore_password = "dojoftw"; + let private_key = "0x1"; + + let env_metadata = dojo_world::metadata::Environment { + keystore_password: Some(keystore_password.to_owned()), + ..Default::default() + }; + + let cmd = Command::parse_from(["sozo", "--keystore", keystore_path]); + let result_wallet = cmd.account.signer(Some(&env_metadata)).unwrap(); + let expected_wallet = LocalWallet::from_signing_key(SigningKey::from_secret_scalar( + FieldElement::from_str(private_key).unwrap(), + )); + + let result_public_key = result_wallet.get_public_key().await.unwrap(); + let expected_public_key = expected_wallet.get_public_key().await.unwrap(); + assert!(result_public_key.scalar() == expected_public_key.scalar()); + } + + #[test] + fn dont_allow_both_private_key_and_keystore() { + let keystore_path = "./tests/test_data/keystore/test.json"; + let private_key = "0x1"; + assert!( + Command::try_parse_from([ + "sozo", + "--keystore", + keystore_path, + "--private_key", + private_key, + ]) + .is_err() + ); + } +} diff --git a/crates/sozo/src/commands/options/starknet.rs b/crates/sozo/src/commands/options/starknet.rs index 93ba841dcc..011b04ae9e 100644 --- a/crates/sozo/src/commands/options/starknet.rs +++ b/crates/sozo/src/commands/options/starknet.rs @@ -54,7 +54,15 @@ mod tests { } #[test] - fn url_exist_in_env_metadata_but_env_doesnt() { + fn url_read_from_env_variable() { + std::env::set_var(STARKNET_RPC_URL_ENV_VAR, ENV_RPC); + + let cmd = Command::parse_from([""]); + assert_eq!(cmd.options.url(None).unwrap().as_str(), ENV_RPC); + } + + #[test] + fn url_exist_in_env_but_not_in_args() { let env_metadata = dojo_world::metadata::Environment { rpc_url: Some(METADATA_RPC.into()), ..Default::default() @@ -65,26 +73,26 @@ mod tests { } #[test] - fn url_doesnt_exist_in_env_metadata_but_env_does() { - std::env::set_var(STARKNET_RPC_URL_ENV_VAR, ENV_RPC); + fn url_doesnt_exist_in_env_but_exist_in_args() { let env_metadata = dojo_world::metadata::Environment::default(); - let cmd = Command::parse_from([""]); + let cmd = Command::parse_from(["sozo", "--rpc-url", ENV_RPC]); + assert_eq!(cmd.options.url(Some(&env_metadata)).unwrap().as_str(), ENV_RPC); } #[test] - fn exists_in_both() { - std::env::set_var(STARKNET_RPC_URL_ENV_VAR, ENV_RPC); + fn url_exists_in_both() { let env_metadata = dojo_world::metadata::Environment { rpc_url: Some(METADATA_RPC.into()), ..Default::default() }; - let cmd = Command::parse_from([""]); + + let cmd = Command::parse_from(["sozo", "--rpc-url", ENV_RPC]); assert_eq!(cmd.options.url(Some(&env_metadata)).unwrap().as_str(), ENV_RPC); } #[test] - fn exists_in_neither() { + fn url_exists_in_neither() { let env_metadata = dojo_world::metadata::Environment::default(); let cmd = Command::parse_from([""]); assert_eq!(cmd.options.url(Some(&env_metadata)).unwrap().as_str(), DEFAULT_RPC); diff --git a/crates/sozo/tests/test_data/keystore/test.json b/crates/sozo/tests/test_data/keystore/test.json new file mode 100644 index 0000000000..afcf956282 --- /dev/null +++ b/crates/sozo/tests/test_data/keystore/test.json @@ -0,0 +1 @@ +{"crypto":{"cipher":"aes-128-ctr","cipherparams":{"iv":"86dcdc44db46801dd2df660e2242926c"},"ciphertext":"75c93de54b0d29c9d5ecce255478cfb52ce6c82752af0f4f1e353be91ab93f2a","kdf":"scrypt","kdfparams":{"dklen":32,"n":8192,"p":1,"r":8,"salt":"e9342c34144d65f40c5ecee338cea267f96049e9795fa52d1d9cb96923fce998"},"mac":"35252658d371ead5aa890b15a6a4cdfde427561208d2c1e84978d107217faa0b"},"id":"f10cdaf9-0f8f-44df-a906-4285f2ba798d","version":3} \ No newline at end of file From 3fd7f3cbe98179641bfe2eb6b4b4e0b9e23757fd Mon Sep 17 00:00:00 2001 From: Tarrence van As Date: Wed, 10 Jan 2024 19:22:31 -0500 Subject: [PATCH 07/33] Update devcontainer image hash: 136a67a (#1412) --- .devcontainer/devcontainer.json | 2 +- .github/workflows/ci.yml | 18 ++++---- .github/workflows/release-dispatch.yml | 2 +- .github/workflows/release.yml | 2 +- .../core/src/service/messaging/starknet.rs | 4 +- crates/katana/storage/provider/tests/block.rs | 42 +++++++++---------- rust-toolchain.toml | 2 +- 7 files changed, 36 insertions(+), 36 deletions(-) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 08adc14f5d..6d4d5427a7 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -2,7 +2,7 @@ // https://github.com/microsoft/vscode-dev-containers/tree/v0.245.2/containers/rust { "name": "Rust", - "image": "ghcr.io/dojoengine/dojo-dev:448ffda", + "image": "ghcr.io/dojoengine/dojo-dev:136a67a", "runArgs": [ "--cap-add=SYS_PTRACE", "--security-opt", diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b5042e6ebd..4ff6eac5fa 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -8,13 +8,13 @@ on: env: CARGO_TERM_COLOR: always - RUST_VERSION: 1.70.0 + RUST_VERSION: 1.74.0 jobs: test: runs-on: ubuntu-latest-16-cores container: - image: ghcr.io/dojoengine/dojo-dev:448ffda + image: ghcr.io/dojoengine/dojo-dev:136a67a steps: - uses: actions/checkout@v3 - uses: Swatinem/rust-cache@v2 @@ -28,7 +28,7 @@ jobs: ensure-wasm: runs-on: ubuntu-latest container: - image: ghcr.io/dojoengine/dojo-dev:448ffda + image: ghcr.io/dojoengine/dojo-dev:136a67a steps: - uses: actions/checkout@v3 - uses: Swatinem/rust-cache@v2 @@ -61,7 +61,7 @@ jobs: dojo-core-test: runs-on: ubuntu-latest container: - image: ghcr.io/dojoengine/dojo-dev:448ffda + image: ghcr.io/dojoengine/dojo-dev:136a67a steps: - uses: actions/checkout@v3 - uses: Swatinem/rust-cache@v2 @@ -70,7 +70,7 @@ jobs: dojo-spawn-and-move-example-test: runs-on: ubuntu-latest container: - image: ghcr.io/dojoengine/dojo-dev:448ffda + image: ghcr.io/dojoengine/dojo-dev:136a67a steps: - uses: actions/checkout@v3 - uses: Swatinem/rust-cache@v2 @@ -79,7 +79,7 @@ jobs: dojo-world-bindings-check: runs-on: ubuntu-latest container: - image: ghcr.io/dojoengine/dojo-dev:448ffda + image: ghcr.io/dojoengine/dojo-dev:136a67a steps: - uses: actions/checkout@v3 - uses: Swatinem/rust-cache@v2 @@ -88,7 +88,7 @@ jobs: clippy: runs-on: ubuntu-latest container: - image: ghcr.io/dojoengine/dojo-dev:448ffda + image: ghcr.io/dojoengine/dojo-dev:136a67a steps: - uses: actions/checkout@v3 - uses: Swatinem/rust-cache@v2 @@ -97,7 +97,7 @@ jobs: fmt: runs-on: ubuntu-latest container: - image: ghcr.io/dojoengine/dojo-dev:448ffda + image: ghcr.io/dojoengine/dojo-dev:136a67a steps: - uses: actions/checkout@v3 - uses: Swatinem/rust-cache@v2 @@ -106,7 +106,7 @@ jobs: docs: runs-on: ubuntu-latest container: - image: ghcr.io/dojoengine/dojo-dev:448ffda + image: ghcr.io/dojoengine/dojo-dev:136a67a steps: - uses: actions/checkout@v3 - uses: Swatinem/rust-cache@v2 diff --git a/.github/workflows/release-dispatch.yml b/.github/workflows/release-dispatch.yml index 1306b4c466..ca672d9de2 100644 --- a/.github/workflows/release-dispatch.yml +++ b/.github/workflows/release-dispatch.yml @@ -14,7 +14,7 @@ jobs: contents: write runs-on: ubuntu-latest container: - image: ghcr.io/dojoengine/dojo-dev:448ffda + image: ghcr.io/dojoengine/dojo-dev:136a67a steps: # Workaround described here: https://github.com/actions/checkout/issues/760 - uses: actions/checkout@v3 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 1212b3094c..5fd3f3870b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -8,7 +8,7 @@ on: env: CARGO_TERM_COLOR: always - RUST_VERSION: 1.70.0 + RUST_VERSION: 1.74.0 REGISTRY_IMAGE: ghcr.io/${{ github.repository }} jobs: diff --git a/crates/katana/core/src/service/messaging/starknet.rs b/crates/katana/core/src/service/messaging/starknet.rs index 2b7d2d3d8e..642f0de240 100644 --- a/crates/katana/core/src/service/messaging/starknet.rs +++ b/crates/katana/core/src/service/messaging/starknet.rs @@ -488,7 +488,7 @@ mod tests { let to_address = selector!("to_address"); let selector = selector!("selector"); let nonce = FieldElement::ONE; - let calldata = vec![from_address, FieldElement::THREE]; + let calldata = [from_address, FieldElement::THREE]; let transaction_hash = FieldElement::ZERO; let event = EmittedEvent { @@ -522,7 +522,7 @@ mod tests { let _to_address = selector!("to_address"); let _selector = selector!("selector"); let _nonce = FieldElement::ONE; - let _calldata = vec![from_address, FieldElement::THREE]; + let _calldata = [from_address, FieldElement::THREE]; let transaction_hash = FieldElement::ZERO; let event = EmittedEvent { diff --git a/crates/katana/storage/provider/tests/block.rs b/crates/katana/storage/provider/tests/block.rs index 1886b7caac..1128d89378 100644 --- a/crates/katana/storage/provider/tests/block.rs +++ b/crates/katana/storage/provider/tests/block.rs @@ -26,14 +26,6 @@ use fixtures::{ }; use utils::generate_dummy_blocks_and_receipts; -#[template] -#[rstest::rstest] -#[case::insert_1_block(1)] -#[case::insert_2_block(2)] -#[case::insert_5_block(5)] -#[case::insert_10_block(10)] -fn insert_block_cases(#[case] block_count: u64) {} - #[apply(insert_block_cases)] fn insert_block_with_in_memory_provider( #[from(in_memory_provider)] provider: BlockchainProvider, @@ -139,19 +131,6 @@ where Ok(()) } -#[template] -#[rstest::rstest] -#[case::state_update_at_block_1(1, mock_state_updates()[0].clone())] -#[case::state_update_at_block_2(2, mock_state_updates()[1].clone())] -#[case::state_update_at_block_3(3, StateUpdatesWithDeclaredClasses::default())] -#[case::state_update_at_block_5(5, mock_state_updates()[2].clone())] -fn test_read_state_update( - #[from(provider_with_states)] provider: BlockchainProvider, - #[case] block_num: BlockNumber, - #[case] expected_state_update: StateUpdatesWithDeclaredClasses, -) { -} - #[apply(test_read_state_update)] fn test_read_state_update_with_in_memory_provider( #[with(in_memory_provider())] provider: BlockchainProvider, @@ -193,3 +172,24 @@ where assert_eq!(actual_state_update, Some(expected_state_update.state_updates)); Ok(()) } + +#[template] +#[rstest::rstest] +#[case::insert_1_block(1)] +#[case::insert_2_block(2)] +#[case::insert_5_block(5)] +#[case::insert_10_block(10)] +fn insert_block_cases(#[case] block_count: u64) {} + +#[template] +#[rstest::rstest] +#[case::state_update_at_block_1(1, mock_state_updates()[0].clone())] +#[case::state_update_at_block_2(2, mock_state_updates()[1].clone())] +#[case::state_update_at_block_3(3, StateUpdatesWithDeclaredClasses::default())] +#[case::state_update_at_block_5(5, mock_state_updates()[2].clone())] +fn test_read_state_update( + #[from(provider_with_states)] provider: BlockchainProvider, + #[case] block_num: BlockNumber, + #[case] expected_state_update: StateUpdatesWithDeclaredClasses, +) { +} diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 22048ac5bb..639f4f17d9 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,2 +1,2 @@ [toolchain] -channel = "1.70.0" +channel = "1.74.0" From 942d895e59193bca390600f096a3200f0e6b7240 Mon Sep 17 00:00:00 2001 From: lambda-0x <0xlambda@protonmail.com> Date: Fri, 12 Jan 2024 22:33:42 +0530 Subject: [PATCH 08/33] feat(sozo/account): legacy bool flag to use old calldata encoding (#1419) * feat(sozo/account): legacy bool flag to use old calldata encoding * feat(sozo/account): legacy bool flag to use old calldata encoding * fix: make suggested improvements --- crates/sozo/Cargo.toml | 1 + crates/sozo/src/commands/options/account.rs | 65 ++++++++++++++++++--- 2 files changed, 59 insertions(+), 7 deletions(-) diff --git a/crates/sozo/Cargo.toml b/crates/sozo/Cargo.toml index d6fbb86796..da7b252cc5 100644 --- a/crates/sozo/Cargo.toml +++ b/crates/sozo/Cargo.toml @@ -46,4 +46,5 @@ url.workspace = true [dev-dependencies] assert_fs = "1.0.10" dojo-test-utils = { path = "../dojo-test-utils", features = [ "build-examples" ] } +katana-runner = { path = "../katana/runner" } snapbox = "0.4.6" diff --git a/crates/sozo/src/commands/options/account.rs b/crates/sozo/src/commands/options/account.rs index 2afff4fddd..52b41c4a93 100644 --- a/crates/sozo/src/commands/options/account.rs +++ b/crates/sozo/src/commands/options/account.rs @@ -40,6 +40,10 @@ pub struct AccountOptions { #[arg(help_heading = "Signer options - KEYSTORE")] #[arg(help = "The keystore password. Used with --keystore.")] pub keystore_password: Option, + + #[arg(long)] + #[arg(help = "Use legacy account (cairo0 account)")] + pub legacy: bool, } impl AccountOptions { @@ -57,13 +61,9 @@ impl AccountOptions { let chain_id = provider.chain_id().await.with_context(|| "Failed to retrieve network chain id.")?; - Ok(SingleOwnerAccount::new( - provider, - signer, - account_address, - chain_id, - ExecutionEncoding::New, - )) + let encoding = if self.legacy { ExecutionEncoding::Legacy } else { ExecutionEncoding::New }; + + Ok(SingleOwnerAccount::new(provider, signer, account_address, chain_id, encoding)) } fn signer(&self, env_metadata: Option<&Environment>) -> Result { @@ -118,6 +118,8 @@ mod tests { use std::str::FromStr; use clap::Parser; + use katana_runner::KatanaRunner; + use starknet::accounts::{Call, ExecutionEncoder}; use starknet::signers::{LocalWallet, Signer, SigningKey}; use starknet_crypto::FieldElement; @@ -322,4 +324,53 @@ mod tests { .is_err() ); } + + #[tokio::test] + async fn legacy_flag_works_as_expected() { + let cmd = Command::parse_from([ + "sozo", + "--legacy", + "--account-address", + "0x0", + "--private-key", + "0x1", + ]); + let (_runner, provider) = KatanaRunner::new().unwrap(); + let dummy_call = vec![Call { + to: FieldElement::from_hex_be("0x0").unwrap(), + selector: FieldElement::from_hex_be("0x1").unwrap(), + calldata: vec![ + FieldElement::from_hex_be("0x2").unwrap(), + FieldElement::from_hex_be("0x3").unwrap(), + ], + }]; + + // HACK: SingleOwnerAccount doesn't expose a way to check `encoding` type used in struct, so + // checking it by encoding a dummy call and checking which method it used to encode the call + let account = cmd.account.account(provider, None).await.unwrap(); + let result = account.encode_calls(&dummy_call); + // 0x0 is the data offset. + assert!(*result.get(3).unwrap() == FieldElement::from_hex_be("0x0").unwrap()); + } + + #[tokio::test] + async fn without_legacy_flag_works_as_expected() { + let cmd = Command::parse_from(["sozo", "--account-address", "0x0", "--private-key", "0x1"]); + let (_runner, provider) = KatanaRunner::new().unwrap(); + let dummy_call = vec![Call { + to: FieldElement::from_hex_be("0x0").unwrap(), + selector: FieldElement::from_hex_be("0x1").unwrap(), + calldata: vec![ + FieldElement::from_hex_be("0xf2").unwrap(), + FieldElement::from_hex_be("0xf3").unwrap(), + ], + }]; + + // HACK: SingleOwnerAccount doesn't expose a way to check `encoding` type used in struct, so + // checking it by encoding a dummy call and checking which method it used to encode the call + let account = cmd.account.account(provider, None).await.unwrap(); + let result = account.encode_calls(&dummy_call); + // 0x2 is the Calldata len. + assert!(*result.get(3).unwrap() == FieldElement::from_hex_be("0x2").unwrap()); + } } From 39d0ba47d4c802832dc4be1e9d3f9332f1e2258d Mon Sep 17 00:00:00 2001 From: lambda-0x <0xlambda@protonmail.com> Date: Sat, 13 Jan 2024 00:09:26 +0530 Subject: [PATCH 09/33] feat(katana): write katana logs to tempdir and print log file path to stderr (#1427) * feat(katana): write katana logs to tempdir and print log file path to stderr * fix: typo and formatting --- crates/katana/runner/src/lib.rs | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/crates/katana/runner/src/lib.rs b/crates/katana/runner/src/lib.rs index 060e1c8fcb..4d79c7779c 100644 --- a/crates/katana/runner/src/lib.rs +++ b/crates/katana/runner/src/lib.rs @@ -29,7 +29,12 @@ impl KatanaRunner { } pub fn new_with_port(port: u16) -> Result<(Self, JsonRpcClient)> { - let log_filename = format!("logs/katana-{}.log", port); + let mut temp_dir = std::env::temp_dir(); + temp_dir.push("dojo"); + temp_dir.push("logs"); + temp_dir.push(format!("katana-{}.log", port)); + + eprintln!("Writing katana logs to {}", temp_dir.to_str().unwrap()); let mut child = Command::new("katana") .args(["-p", &port.to_string()]) @@ -43,11 +48,7 @@ impl KatanaRunner { let (sender, receiver) = mpsc::channel(); thread::spawn(move || { - KatanaRunner::wait_for_server_started_and_signal( - Path::new(&log_filename), - stdout, - sender, - ); + KatanaRunner::wait_for_server_started_and_signal(temp_dir.as_path(), stdout, sender); }); receiver From 36ba2c5522859b9abfb48b7fe1778226b4e4ba43 Mon Sep 17 00:00:00 2001 From: glihm Date: Fri, 12 Jan 2024 12:56:10 -0600 Subject: [PATCH 10/33] fix: ensure local manifest and remote manifest models are comparable (#1424) * fix: ensure local manifest and remote manifest are compared with the same models name * add test --------- Co-authored-by: Kariy --- crates/dojo-world/Cargo.toml | 2 +- crates/dojo-world/src/manifest_test.rs | 27 ++++++++++++------ crates/dojo-world/src/migration/world.rs | 16 ++++++++++- crates/dojo-world/src/migration/world_test.rs | 28 +++++++++++++++++-- 4 files changed, 61 insertions(+), 12 deletions(-) diff --git a/crates/dojo-world/Cargo.toml b/crates/dojo-world/Cargo.toml index 5f7a534c4e..57d203176a 100644 --- a/crates/dojo-world/Cargo.toml +++ b/crates/dojo-world/Cargo.toml @@ -24,7 +24,7 @@ starknet.workspace = true thiserror.workspace = true tracing.workspace = true -cainome = { git = "https://github.com/cartridge-gg/cainome", rev = "950e487", features = ["abigen-rs"] } +cainome = { git = "https://github.com/cartridge-gg/cainome", rev = "950e487", features = [ "abigen-rs" ] } dojo-types = { path = "../dojo-types", optional = true } http = { version = "0.2.9", optional = true } ipfs-api-backend-hyper = { git = "https://github.com/ferristseng/rust-ipfs-api", rev = "af2c17f7b19ef5b9898f458d97a90055c3605633", features = [ "with-hyper-rustls" ], optional = true } diff --git a/crates/dojo-world/src/manifest_test.rs b/crates/dojo-world/src/manifest_test.rs index a53b262950..1ad5b0d696 100644 --- a/crates/dojo-world/src/manifest_test.rs +++ b/crates/dojo-world/src/manifest_test.rs @@ -12,6 +12,7 @@ use starknet::providers::jsonrpc::{JsonRpcClient, JsonRpcMethod}; use super::{parse_contracts_events, Contract, Manifest, Model}; use crate::contracts::world::test::deploy_world; use crate::manifest::{parse_models_events, ManifestError}; +use crate::migration::world::WorldDiff; #[tokio::test] async fn manifest_from_remote_throw_error_on_not_deployed() { @@ -231,14 +232,24 @@ async fn fetch_remote_manifest() { let account = sequencer.account(); let provider = account.provider(); - let (world_address, _) = deploy_world( - &sequencer, - Utf8PathBuf::from_path_buf("../../examples/spawn-and-move/target/dev".into()).unwrap(), - ) - .await; + let artifacts_path = + Utf8PathBuf::from_path_buf("../../examples/spawn-and-move/target/dev".into()).unwrap(); + let manifest_path = artifacts_path.join("manifest.json"); - let manifest = Manifest::load_from_remote(provider, world_address).await.unwrap(); + let (world_address, _) = deploy_world(&sequencer, artifacts_path).await; - assert_eq!(manifest.models.len(), 2); - assert_eq!(manifest.contracts.len(), 1); + let local_manifest = Manifest::load_from_path(manifest_path).unwrap(); + let remote_manifest = Manifest::load_from_remote(provider, world_address).await.unwrap(); + + assert_eq!(local_manifest.models.len(), 2); + assert_eq!(local_manifest.contracts.len(), 1); + + assert_eq!(remote_manifest.models.len(), 2); + assert_eq!(remote_manifest.contracts.len(), 1); + + // compute diff from local and remote manifest + + let diff = WorldDiff::compute(local_manifest, Some(remote_manifest)); + + assert_eq!(diff.count_diffs(), 0, "there should not be any diff"); } diff --git a/crates/dojo-world/src/migration/world.rs b/crates/dojo-world/src/migration/world.rs index 60aa94c68a..a7d8a9cb25 100644 --- a/crates/dojo-world/src/migration/world.rs +++ b/crates/dojo-world/src/migration/world.rs @@ -1,5 +1,7 @@ use std::fmt::Display; +use convert_case::{Case, Casing}; + use super::class::ClassDiff; use super::contract::ContractDiff; use super::StateDiff; @@ -28,7 +30,19 @@ impl WorldDiff { name: model.name.to_string(), local: model.class_hash, remote: remote.as_ref().and_then(|m| { - m.models.iter().find(|e| e.name == model.name).map(|s| s.class_hash) + // Remote models are detected from events, where only the struct + // name (pascal case) is emitted. + // Local models uses the fully qualified name of the model, + // always in snake_case from cairo compiler. + let model_name = model + .name + .split("::") + .last() + .unwrap_or(&model.name) + .from_case(Case::Snake) + .to_case(Case::Pascal); + + m.models.iter().find(|e| e.name == model_name).map(|s| s.class_hash) }), }) .collect::>(); diff --git a/crates/dojo-world/src/migration/world_test.rs b/crates/dojo-world/src/migration/world_test.rs index 28dc999e8e..698500f401 100644 --- a/crates/dojo-world/src/migration/world_test.rs +++ b/crates/dojo-world/src/migration/world_test.rs @@ -26,13 +26,22 @@ fn no_diff_when_local_and_remote_are_equal() { ..Default::default() }]; + let remote_models = vec![Model { + members: vec![], + name: "Model".into(), + class_hash: 11_u32.into(), + ..Default::default() + }]; + let local = Manifest { models, world: world_contract, executor: executor_contract, ..Default::default() }; - let remote = local.clone(); + + let mut remote = local.clone(); + remote.models = remote_models; let diff = WorldDiff::compute(local, Some(remote)); @@ -68,6 +77,21 @@ fn diff_when_local_and_remote_are_different() { }, ]; + let remote_models = vec![ + Model { + members: vec![], + name: "Model".into(), + class_hash: felt!("0x11"), + ..Default::default() + }, + Model { + members: vec![], + name: "Model2".into(), + class_hash: felt!("0x33"), + ..Default::default() + }, + ]; + let contracts = vec![ Contract { name: "dojo_mock::contracts::my_contract".into(), @@ -92,9 +116,9 @@ fn diff_when_local_and_remote_are_different() { }; let mut remote = local.clone(); + remote.models = remote_models; remote.world.class_hash = 44_u32.into(); remote.executor.class_hash = 55_u32.into(); - remote.models[1].class_hash = 33_u32.into(); remote.contracts[0].class_hash = felt!("0x1112"); let diff = WorldDiff::compute(local, Some(remote)); From 41a48b8082456e29d35913c0b81d71382c020b35 Mon Sep 17 00:00:00 2001 From: Tarrence van As Date: Fri, 12 Jan 2024 14:13:16 -0500 Subject: [PATCH 11/33] Prepare release: v0.5.0 (#1430) --- Cargo.lock | 65 +++++++++++++++--------------- Cargo.toml | 2 +- crates/dojo-core/Scarb.lock | 2 +- crates/dojo-core/Scarb.toml | 2 +- crates/torii/types-test/Scarb.lock | 2 +- crates/torii/types-test/Scarb.toml | 2 +- examples/spawn-and-move/Scarb.lock | 2 +- examples/spawn-and-move/Scarb.toml | 2 +- 8 files changed, 40 insertions(+), 39 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 468168b43e..c4eaeea84e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -801,7 +801,7 @@ dependencies = [ [[package]] name = "benches" -version = "0.4.4" +version = "0.5.0" dependencies = [ "anyhow", "clap_builder", @@ -2661,15 +2661,15 @@ checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" [[package]] name = "dojo-core" -version = "0.4.4" +version = "0.5.0" [[package]] name = "dojo-examples-spawn-and-move" -version = "0.4.4" +version = "0.5.0" [[package]] name = "dojo-lang" -version = "0.4.4" +version = "0.5.0" dependencies = [ "anyhow", "cairo-lang-compiler", @@ -2717,7 +2717,7 @@ dependencies = [ [[package]] name = "dojo-language-server" -version = "0.4.4" +version = "0.5.0" dependencies = [ "anyhow", "cairo-lang-compiler", @@ -2739,7 +2739,7 @@ dependencies = [ [[package]] name = "dojo-signers" -version = "0.4.4" +version = "0.5.0" dependencies = [ "anyhow", "starknet", @@ -2747,7 +2747,7 @@ dependencies = [ [[package]] name = "dojo-test-utils" -version = "0.4.4" +version = "0.5.0" dependencies = [ "anyhow", "assert_fs", @@ -2778,7 +2778,7 @@ dependencies = [ [[package]] name = "dojo-types" -version = "0.4.4" +version = "0.5.0" dependencies = [ "crypto-bigint", "hex", @@ -2793,7 +2793,7 @@ dependencies = [ [[package]] name = "dojo-world" -version = "0.4.4" +version = "0.5.0" dependencies = [ "anyhow", "assert_fs", @@ -2827,7 +2827,7 @@ dependencies = [ [[package]] name = "dojo-world-abigen" -version = "0.4.4" +version = "0.5.0" dependencies = [ "cairo-lang-starknet", "camino", @@ -5444,7 +5444,7 @@ dependencies = [ [[package]] name = "katana" -version = "0.4.4" +version = "0.5.0" dependencies = [ "assert_matches", "clap", @@ -5452,7 +5452,7 @@ dependencies = [ "console", "katana-core", "katana-rpc", - "metrics 0.4.4", + "metrics 0.5.0", "metrics-process", "serde_json", "starknet_api", @@ -5464,7 +5464,7 @@ dependencies = [ [[package]] name = "katana-codecs" -version = "0.4.4" +version = "0.5.0" dependencies = [ "bytes", "katana-primitives", @@ -5472,7 +5472,7 @@ dependencies = [ [[package]] name = "katana-codecs-derive" -version = "0.4.4" +version = "0.5.0" dependencies = [ "proc-macro2", "quote", @@ -5482,7 +5482,7 @@ dependencies = [ [[package]] name = "katana-core" -version = "0.4.4" +version = "0.5.0" dependencies = [ "anyhow", "assert_matches", @@ -5515,7 +5515,7 @@ dependencies = [ [[package]] name = "katana-db" -version = "0.4.4" +version = "0.5.0" dependencies = [ "anyhow", "blockifier", @@ -5537,7 +5537,7 @@ dependencies = [ [[package]] name = "katana-executor" -version = "0.4.4" +version = "0.5.0" dependencies = [ "anyhow", "blockifier", @@ -5553,7 +5553,7 @@ dependencies = [ [[package]] name = "katana-primitives" -version = "0.4.4" +version = "0.5.0" dependencies = [ "anyhow", "blockifier", @@ -5571,7 +5571,7 @@ dependencies = [ [[package]] name = "katana-provider" -version = "0.4.4" +version = "0.5.0" dependencies = [ "anyhow", "auto_impl", @@ -5596,7 +5596,7 @@ dependencies = [ [[package]] name = "katana-rpc" -version = "0.4.4" +version = "0.5.0" dependencies = [ "anyhow", "assert_matches", @@ -5628,7 +5628,7 @@ dependencies = [ [[package]] name = "katana-rpc-types" -version = "0.4.4" +version = "0.5.0" dependencies = [ "anyhow", "derive_more", @@ -5641,7 +5641,7 @@ dependencies = [ [[package]] name = "katana-rpc-types-builder" -version = "0.4.4" +version = "0.5.0" dependencies = [ "anyhow", "katana-executor", @@ -5653,7 +5653,7 @@ dependencies = [ [[package]] name = "katana-runner" -version = "0.4.4" +version = "0.5.0" dependencies = [ "anyhow", "home", @@ -5944,7 +5944,7 @@ dependencies = [ [[package]] name = "metrics" -version = "0.4.4" +version = "0.5.0" dependencies = [ "anyhow", "hyper", @@ -8613,7 +8613,7 @@ dependencies = [ [[package]] name = "sozo" -version = "0.4.4" +version = "0.5.0" dependencies = [ "anyhow", "assert_fs", @@ -8638,6 +8638,7 @@ dependencies = [ "dojo-test-utils", "dojo-types", "dojo-world", + "katana-runner", "notify", "notify-debouncer-mini", "scarb", @@ -9807,7 +9808,7 @@ dependencies = [ [[package]] name = "torii-client" -version = "0.4.4" +version = "0.5.0" dependencies = [ "async-trait", "camino", @@ -9833,7 +9834,7 @@ dependencies = [ [[package]] name = "torii-core" -version = "0.4.4" +version = "0.5.0" dependencies = [ "anyhow", "async-trait", @@ -9869,7 +9870,7 @@ dependencies = [ [[package]] name = "torii-graphql" -version = "0.4.4" +version = "0.5.0" dependencies = [ "anyhow", "async-graphql", @@ -9908,7 +9909,7 @@ dependencies = [ [[package]] name = "torii-grpc" -version = "0.4.4" +version = "0.5.0" dependencies = [ "bytes", "crypto-bigint", @@ -9947,7 +9948,7 @@ dependencies = [ [[package]] name = "torii-server" -version = "0.4.4" +version = "0.5.0" dependencies = [ "anyhow", "async-trait", @@ -9966,7 +9967,7 @@ dependencies = [ "hyper-reverse-proxy", "indexmap 1.9.3", "lazy_static", - "metrics 0.4.4", + "metrics 0.5.0", "metrics-process", "scarb", "serde", @@ -10234,7 +10235,7 @@ checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "types-test" -version = "0.4.4" +version = "0.5.0" [[package]] name = "ucd-trie" diff --git a/Cargo.toml b/Cargo.toml index e298885dc1..39e5ed93ee 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -36,7 +36,7 @@ edition = "2021" license = "Apache-2.0" license-file = "LICENSE" repository = "https://github.com/dojoengine/dojo/" -version = "0.4.4" +version = "0.5.0" [profile.performance] codegen-units = 1 diff --git a/crates/dojo-core/Scarb.lock b/crates/dojo-core/Scarb.lock index 495d892bf3..f72cbc9a65 100644 --- a/crates/dojo-core/Scarb.lock +++ b/crates/dojo-core/Scarb.lock @@ -3,7 +3,7 @@ version = 1 [[package]] name = "dojo" -version = "0.4.4" +version = "0.5.0" dependencies = [ "dojo_plugin", ] diff --git a/crates/dojo-core/Scarb.toml b/crates/dojo-core/Scarb.toml index fb7ed12b0a..a7af7fcd51 100644 --- a/crates/dojo-core/Scarb.toml +++ b/crates/dojo-core/Scarb.toml @@ -2,7 +2,7 @@ cairo-version = "2.4.0" description = "The Dojo Core library for autonomous worlds." name = "dojo" -version = "0.4.4" +version = "0.5.0" [dependencies] dojo_plugin = { git = "https://github.com/dojoengine/dojo", tag = "v0.3.11" } diff --git a/crates/torii/types-test/Scarb.lock b/crates/torii/types-test/Scarb.lock index 554df2c397..5cb79dfe8e 100644 --- a/crates/torii/types-test/Scarb.lock +++ b/crates/torii/types-test/Scarb.lock @@ -3,7 +3,7 @@ version = 1 [[package]] name = "dojo" -version = "0.4.4" +version = "0.5.0" dependencies = [ "dojo_plugin", ] diff --git a/crates/torii/types-test/Scarb.toml b/crates/torii/types-test/Scarb.toml index 192864a255..1971407ce9 100644 --- a/crates/torii/types-test/Scarb.toml +++ b/crates/torii/types-test/Scarb.toml @@ -1,7 +1,7 @@ [package] cairo-version = "2.4.0" name = "types_test" -version = "0.4.4" +version = "0.5.0" [cairo] sierra-replace-ids = true diff --git a/examples/spawn-and-move/Scarb.lock b/examples/spawn-and-move/Scarb.lock index ca51af1a48..b61ff06a70 100644 --- a/examples/spawn-and-move/Scarb.lock +++ b/examples/spawn-and-move/Scarb.lock @@ -3,7 +3,7 @@ version = 1 [[package]] name = "dojo" -version = "0.4.4" +version = "0.5.0" dependencies = [ "dojo_plugin", ] diff --git a/examples/spawn-and-move/Scarb.toml b/examples/spawn-and-move/Scarb.toml index ecc01f66c9..558eed1b75 100644 --- a/examples/spawn-and-move/Scarb.toml +++ b/examples/spawn-and-move/Scarb.toml @@ -1,7 +1,7 @@ [package] cairo-version = "2.4.0" name = "dojo_examples" -version = "0.4.4" +version = "0.5.0" # Use the prelude with the less imports as possible # from corelib. edition = "2023_10" From 72646199e31bddc96a35f1adcc3c9de8e4efd99a Mon Sep 17 00:00:00 2001 From: glihm Date: Sat, 13 Jan 2024 17:05:45 -0600 Subject: [PATCH 12/33] feat: add `ensure_abi` method into model generated contract (#1433) feat: add ensure_abi method into model generated contract --- .../compiler_cairo_v240/Scarb.lock | 2 +- .../dojo-lang/src/manifest_test_data/manifest | 100 +++++++++++++++++- crates/dojo-lang/src/model.rs | 4 + crates/dojo-lang/src/plugin_test_data/model | 36 +++++++ crates/dojo-world/src/contracts/model_test.rs | 2 +- examples/spawn-and-move/Scarb.lock | 2 +- 6 files changed, 141 insertions(+), 5 deletions(-) diff --git a/crates/dojo-lang/src/manifest_test_data/compiler_cairo_v240/Scarb.lock b/crates/dojo-lang/src/manifest_test_data/compiler_cairo_v240/Scarb.lock index 6f0435b5b6..bb593d0412 100644 --- a/crates/dojo-lang/src/manifest_test_data/compiler_cairo_v240/Scarb.lock +++ b/crates/dojo-lang/src/manifest_test_data/compiler_cairo_v240/Scarb.lock @@ -10,7 +10,7 @@ dependencies = [ [[package]] name = "dojo" -version = "0.4.4" +version = "0.5.0" dependencies = [ "dojo_plugin", ] diff --git a/crates/dojo-lang/src/manifest_test_data/manifest b/crates/dojo-lang/src/manifest_test_data/manifest index 1f9e0fb0e9..76df306c7a 100644 --- a/crates/dojo-lang/src/manifest_test_data/manifest +++ b/crates/dojo-lang/src/manifest_test_data/manifest @@ -1205,7 +1205,7 @@ test_manifest_file { "name": "dojo_examples::models::moves", "address": null, - "class_hash": "0x64495ca6dc1dc328972697b30468cea364bcb7452bbb6e4aaad3e4b3f190147", + "class_hash": "0x1e13e74f3cb66e022c2b58a8ab7670a065d12d050446e20f736bd5bdc37c17e", "abi": [ { "type": "function", @@ -1360,6 +1360,62 @@ test_manifest_file ], "state_mutability": "view" }, + { + "type": "enum", + "name": "dojo_examples::models::Direction", + "variants": [ + { + "name": "None", + "type": "()" + }, + { + "name": "Left", + "type": "()" + }, + { + "name": "Right", + "type": "()" + }, + { + "name": "Up", + "type": "()" + }, + { + "name": "Down", + "type": "()" + } + ] + }, + { + "type": "struct", + "name": "dojo_examples::models::Moves", + "members": [ + { + "name": "player", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "remaining", + "type": "core::integer::u8" + }, + { + "name": "last_direction", + "type": "dojo_examples::models::Direction" + } + ] + }, + { + "type": "function", + "name": "ensure_abi", + "inputs": [ + { + "name": "model", + "type": "dojo_examples::models::Moves" + } + ], + "outputs": [], + "state_mutability": "view" + }, { "type": "event", "name": "dojo_examples::models::moves::Event", @@ -1374,7 +1430,7 @@ test_manifest_file { "name": "dojo_examples::models::position", "address": null, - "class_hash": "0x4cd20d231b04405a77b184c115dc60637e186504fad7f0929bd76cbd09c10b", + "class_hash": "0x2a7df852d6ef0af662dad741b97b423ba0fb34a1483599781ac9e7f6822bc25", "abi": [ { "type": "function", @@ -1529,6 +1585,46 @@ test_manifest_file ], "state_mutability": "view" }, + { + "type": "struct", + "name": "dojo_examples::models::Vec2", + "members": [ + { + "name": "x", + "type": "core::integer::u32" + }, + { + "name": "y", + "type": "core::integer::u32" + } + ] + }, + { + "type": "struct", + "name": "dojo_examples::models::Position", + "members": [ + { + "name": "player", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "vec", + "type": "dojo_examples::models::Vec2" + } + ] + }, + { + "type": "function", + "name": "ensure_abi", + "inputs": [ + { + "name": "model", + "type": "dojo_examples::models::Position" + } + ], + "outputs": [], + "state_mutability": "view" + }, { "type": "event", "name": "dojo_examples::models::position::Event", diff --git a/crates/dojo-lang/src/model.rs b/crates/dojo-lang/src/model.rs index 3a42488add..558a560dd1 100644 --- a/crates/dojo-lang/src/model.rs +++ b/crates/dojo-lang/src/model.rs @@ -157,6 +157,10 @@ pub fn handle_model_struct( fn schema(self: @ContractState) -> dojo::database::introspect::Ty { dojo::database::introspect::Introspect::<$type_name$>::ty() } + + #[external(v0)] + fn ensure_abi(self: @ContractState, model: $type_name$) { + } } ", &UnorderedHashMap::from([ diff --git a/crates/dojo-lang/src/plugin_test_data/model b/crates/dojo-lang/src/plugin_test_data/model index bcaac79822..54cc06914c 100644 --- a/crates/dojo-lang/src/plugin_test_data/model +++ b/crates/dojo-lang/src/plugin_test_data/model @@ -586,6 +586,11 @@ error: Unsupported attribute. #[external(v0)] ^*************^ +error: Unsupported attribute. + --> test_src/lib.cairo[Position]:110:17 + #[external(v0)] + ^*************^ + error: Unsupported attribute. --> test_src/lib.cairo[Roles]:73:17 #[storage] @@ -616,6 +621,11 @@ error: Unsupported attribute. #[external(v0)] ^*************^ +error: Unsupported attribute. + --> test_src/lib.cairo[Roles]:106:17 + #[external(v0)] + ^*************^ + error: Unsupported attribute. --> test_src/lib.cairo[OnlyKeyModel]:72:17 #[storage] @@ -646,6 +656,11 @@ error: Unsupported attribute. #[external(v0)] ^*************^ +error: Unsupported attribute. + --> test_src/lib.cairo[OnlyKeyModel]:105:17 + #[external(v0)] + ^*************^ + error: Unsupported attribute. --> test_src/lib.cairo[Player]:81:17 #[storage] @@ -676,6 +691,11 @@ error: Unsupported attribute. #[external(v0)] ^*************^ +error: Unsupported attribute. + --> test_src/lib.cairo[Player]:114:17 + #[external(v0)] + ^*************^ + //! > expanded_cairo_code use core::serde::Serde; @@ -907,6 +927,10 @@ impl PositionIntrospect<> of dojo::database::introspect::Introspect> fn schema(self: @ContractState) -> dojo::database::introspect::Ty { dojo::database::introspect::Introspect::::ty() } + + #[external(v0)] + fn ensure_abi(self: @ContractState, model: Position) { + } } impl RolesSerde of core::serde::Serde:: { fn serialize(self: @Roles, ref output: core::array::Array) { @@ -1022,6 +1046,10 @@ impl RolesIntrospect<> of dojo::database::introspect::Introspect> { fn schema(self: @ContractState) -> dojo::database::introspect::Ty { dojo::database::introspect::Introspect::::ty() } + + #[external(v0)] + fn ensure_abi(self: @ContractState, model: Roles) { + } } impl OnlyKeyModelSerde of core::serde::Serde:: { fn serialize(self: @OnlyKeyModel, ref output: core::array::Array) { @@ -1136,6 +1164,10 @@ impl OnlyKeyModelIntrospect<> of dojo::database::introspect::Introspect dojo::database::introspect::Ty { dojo::database::introspect::Introspect::::ty() } + + #[external(v0)] + fn ensure_abi(self: @ContractState, model: OnlyKeyModel) { + } } impl PlayerCopy of core::traits::Copy::; impl PlayerDrop of core::traits::Drop::; @@ -1265,4 +1297,8 @@ impl PlayerIntrospect<> of dojo::database::introspect::Introspect> { fn schema(self: @ContractState) -> dojo::database::introspect::Ty { dojo::database::introspect::Introspect::::ty() } + + #[external(v0)] + fn ensure_abi(self: @ContractState, model: Player) { + } } diff --git a/crates/dojo-world/src/contracts/model_test.rs b/crates/dojo-world/src/contracts/model_test.rs index ce03fd3f3a..c11d2ae460 100644 --- a/crates/dojo-world/src/contracts/model_test.rs +++ b/crates/dojo-world/src/contracts/model_test.rs @@ -63,7 +63,7 @@ async fn test_model() { assert_eq!( position.class_hash(), FieldElement::from_hex_be( - "0x004cd20d231b04405a77b184c115dc60637e186504fad7f0929bd76cbd09c10b" + "0x02a7df852d6ef0af662dad741b97b423ba0fb34a1483599781ac9e7f6822bc25" ) .unwrap() ); diff --git a/examples/spawn-and-move/Scarb.lock b/examples/spawn-and-move/Scarb.lock index b61ff06a70..ae8ad6f337 100644 --- a/examples/spawn-and-move/Scarb.lock +++ b/examples/spawn-and-move/Scarb.lock @@ -10,7 +10,7 @@ dependencies = [ [[package]] name = "dojo_examples" -version = "0.4.4" +version = "0.5.0" dependencies = [ "dojo", ] From 7a7daf8d8663eb6330fabeaded6e414b8a4f6092 Mon Sep 17 00:00:00 2001 From: lambda-0x <0xlambda@protonmail.com> Date: Sun, 14 Jan 2024 11:59:29 +0530 Subject: [PATCH 13/33] fix: retry on `TransactionHashNotFound` error (#1435) --- crates/dojo-world/src/utils.rs | 8 ++++++-- crates/sozo/src/ops/migration/mod.rs | 2 +- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/crates/dojo-world/src/utils.rs b/crates/dojo-world/src/utils.rs index fce459d9bf..2e067f055e 100644 --- a/crates/dojo-world/src/utils.rs +++ b/crates/dojo-world/src/utils.rs @@ -65,12 +65,12 @@ pub struct TransactionWaiter<'a, P: Provider> { /// be considered when waiting for the transaction, meaning `REVERTED` transaction will not /// return an error. must_succeed: bool, - /// Poll the transaction every `interval` miliseconds. Miliseconds are used so that + /// Poll the transaction every `interval` milliseconds. Milliseconds are used so that /// we can be more precise with the polling interval. Defaults to 2.5 seconds. interval: Interval, /// The maximum amount of time to wait for the transaction to achieve the desired status. An /// error will be returned if it is unable to finish within the `timeout` duration. Defaults to - /// 60 seconds. + /// 300 seconds. timeout: Duration, /// The provider to use for polling the transaction. provider: &'a P, @@ -240,6 +240,10 @@ where } Poll::Ready(res) => match res { + Err(ProviderError::StarknetError( + StarknetError::TransactionHashNotFound, + )) => {} + Err(e) => { return Poll::Ready(Err(TransactionWaitingError::Provider(e))); } diff --git a/crates/sozo/src/ops/migration/mod.rs b/crates/sozo/src/ops/migration/mod.rs index 1fbfb96b29..85ff2f9166 100644 --- a/crates/sozo/src/ops/migration/mod.rs +++ b/crates/sozo/src/ops/migration/mod.rs @@ -475,7 +475,7 @@ where .execute(calls) .send() .await - .map_err(|e| anyhow!("Failed to register models to World: {e}"))?; + .map_err(|e| anyhow!("Failed to register models to World: {e:?}"))?; TransactionWaiter::new(transaction_hash, migrator.provider()).await?; From 518d9df78cbf6bb625b339dedf0bb1b2922447a0 Mon Sep 17 00:00:00 2001 From: Tarrence van As Date: Sun, 14 Jan 2024 13:36:06 -0500 Subject: [PATCH 14/33] Move dojo-signers to sozo-signers (#1438) --- Cargo.lock | 16 ++++++++-------- Cargo.toml | 2 +- crates/{dojo-signers => sozo/signers}/Cargo.toml | 4 ++-- crates/{dojo-signers => sozo/signers}/src/lib.rs | 0 4 files changed, 11 insertions(+), 11 deletions(-) rename crates/{dojo-signers => sozo/signers}/Cargo.toml (73%) rename crates/{dojo-signers => sozo/signers}/src/lib.rs (100%) diff --git a/Cargo.lock b/Cargo.lock index c4eaeea84e..2991b8db40 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2737,14 +2737,6 @@ dependencies = [ "tower-lsp", ] -[[package]] -name = "dojo-signers" -version = "0.5.0" -dependencies = [ - "anyhow", - "starknet", -] - [[package]] name = "dojo-test-utils" version = "0.5.0" @@ -8657,6 +8649,14 @@ dependencies = [ "url", ] +[[package]] +name = "sozo-signers" +version = "0.5.0" +dependencies = [ + "anyhow", + "starknet", +] + [[package]] name = "spin" version = "0.5.2" diff --git a/Cargo.toml b/Cargo.toml index 39e5ed93ee..90e0818aa3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,7 +6,6 @@ members = [ "crates/dojo-core", "crates/dojo-lang", "crates/dojo-language-server", - "crates/dojo-signers", "crates/dojo-test-utils", "crates/dojo-types", "crates/dojo-world", @@ -25,6 +24,7 @@ members = [ "crates/katana/storage/provider", "crates/metrics", "crates/sozo", + "crates/sozo/signers", "crates/torii/client", "crates/torii/server", "crates/torii/types-test", diff --git a/crates/dojo-signers/Cargo.toml b/crates/sozo/signers/Cargo.toml similarity index 73% rename from crates/dojo-signers/Cargo.toml rename to crates/sozo/signers/Cargo.toml index 1165853427..3ffce0b2c8 100644 --- a/crates/dojo-signers/Cargo.toml +++ b/crates/sozo/signers/Cargo.toml @@ -1,8 +1,8 @@ [package] -description = "Dojo Signer implementations" +description = "Sozo signer implementations" edition.workspace = true license-file.workspace = true -name = "dojo-signers" +name = "sozo-signers" repository.workspace = true version.workspace = true diff --git a/crates/dojo-signers/src/lib.rs b/crates/sozo/signers/src/lib.rs similarity index 100% rename from crates/dojo-signers/src/lib.rs rename to crates/sozo/signers/src/lib.rs From 87599f93c7c15b0d952d43987169d56e01b8037a Mon Sep 17 00:00:00 2001 From: Tarrence van As Date: Sun, 14 Jan 2024 13:36:18 -0500 Subject: [PATCH 15/33] Refactor torii cli to use SocketAddr cli args (#1434) * Refactor torii cli to use SocketAddr cli args * Remove unused dojo-lang dep from dojo-world --- Cargo.lock | 1 - crates/dojo-world/Cargo.toml | 1 - crates/torii/server/src/cli.rs | 30 +++++++++++++----------------- 3 files changed, 13 insertions(+), 19 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2991b8db40..8c32be4f6b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2797,7 +2797,6 @@ dependencies = [ "cairo-lang-starknet", "camino", "convert_case 0.6.0", - "dojo-lang", "dojo-test-utils", "dojo-types", "futures", diff --git a/crates/dojo-world/Cargo.toml b/crates/dojo-world/Cargo.toml index 57d203176a..62791999f0 100644 --- a/crates/dojo-world/Cargo.toml +++ b/crates/dojo-world/Cargo.toml @@ -35,7 +35,6 @@ url = { version = "2.2.2", optional = true } [dev-dependencies] assert_fs = "1.0.9" assert_matches.workspace = true -dojo-lang = { path = "../dojo-lang" } dojo-test-utils = { path = "../dojo-test-utils" } tokio.workspace = true toml.workspace = true diff --git a/crates/torii/server/src/cli.rs b/crates/torii/server/src/cli.rs index 4858ad2265..0e606a1aa3 100644 --- a/crates/torii/server/src/cli.rs +++ b/crates/torii/server/src/cli.rs @@ -50,9 +50,9 @@ struct Args { #[arg(short, long = "world", env = "DOJO_WORLD_ADDRESS")] world_address: FieldElement, - /// The rpc endpoint to use - #[arg(long, default_value = "http://localhost:5050")] - rpc: String, + /// The sequencer rpc endpoint to index. + #[arg(long, value_name = "SOCKET", default_value = ":5050", value_parser = parse_socket_address)] + rpc: SocketAddr, /// Database filepath (ex: indexer.db). If specified file doesn't exist, it will be /// created. Defaults to in-memory database @@ -63,13 +63,9 @@ struct Args { #[arg(short, long, default_value = "0")] start_block: u64, - /// Host address for api endpoints - #[arg(long, default_value = "0.0.0.0")] - host: String, - - /// Port number for api endpoints - #[arg(long, default_value = "8080")] - port: u16, + /// Address to serve api endpoints at. + #[arg(long, value_name = "SOCKET", default_value = ":8080", value_parser = parse_socket_address)] + addr: SocketAddr, /// Specify allowed origins for api endpoints (comma-separated list of allowed origins, or "*" /// for all) @@ -86,7 +82,7 @@ struct Args { /// /// The metrics will be served at the given interface and port. #[arg(long, value_name = "SOCKET", value_parser = parse_socket_address, help_heading = "Metrics")] - pub metrics: Option, + metrics: Option, } #[tokio::main] @@ -121,7 +117,9 @@ async fn main() -> anyhow::Result<()> { sqlx::migrate!("../migrations").run(&pool).await?; - let provider: Arc<_> = JsonRpcClient::new(HttpTransport::new(Url::parse(&args.rpc)?)).into(); + let provider: Arc<_> = + JsonRpcClient::new(HttpTransport::new(format!("http://{}", args.rpc).parse::()?)) + .into(); // Get world address let world = WorldContractReader::new(args.world_address, &provider); @@ -149,8 +147,6 @@ async fn main() -> anyhow::Result<()> { Some(block_tx), ); - let addr: SocketAddr = format!("{}:{}", args.host, args.port).parse()?; - let shutdown_rx = shutdown_tx.subscribe(); let (grpc_addr, grpc_server) = torii_grpc::server::new( shutdown_rx, @@ -161,7 +157,7 @@ async fn main() -> anyhow::Result<()> { ) .await?; - let proxy_server = Arc::new(Proxy::new(addr, args.allowed_origins, Some(grpc_addr), None)); + let proxy_server = Arc::new(Proxy::new(args.addr, args.allowed_origins, Some(grpc_addr), None)); let graphql_server = spawn_rebuilding_graphql_server( shutdown_tx.clone(), @@ -170,8 +166,8 @@ async fn main() -> anyhow::Result<()> { proxy_server.clone(), ); - info!(target: "torii::cli", "Starting torii endpoint: {}", format!("http://{}", addr)); - info!(target: "torii::cli", "Serving Graphql playground: {}\n", format!("http://{}/graphql", addr)); + info!(target: "torii::cli", "Starting torii endpoint: {}", format!("http://{}", args.addr)); + info!(target: "torii::cli", "Serving Graphql playground: {}\n", format!("http://{}/graphql", args.addr)); if let Some(listen_addr) = args.metrics { let prometheus_handle = prometheus_exporter::install_recorder()?; From 66b16f84d36b9fec863dd30782e31cf614e1c258 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dami=C3=A1n=20Pi=C3=B1ones?= <30808181+dpinones@users.noreply.github.com> Date: Wed, 17 Jan 2024 02:31:46 -0300 Subject: [PATCH 16/33] Update example spawn-and-move (#1413) * Update dojo example * Update dojo v0.5.0 * docs: minor fixes on new address --------- Co-authored-by: glihm --- examples/spawn-and-move/README.md | 55 +++++++++++++++++------ examples/spawn-and-move/Scarb.toml | 2 +- examples/spawn-and-move/src/actions.cairo | 12 ++--- examples/spawn-and-move/src/models.cairo | 27 +++++------ examples/spawn-and-move/src/utils.cairo | 21 +++------ 5 files changed, 64 insertions(+), 53 deletions(-) diff --git a/examples/spawn-and-move/README.md b/examples/spawn-and-move/README.md index 75e0ff346f..9293592c12 100644 --- a/examples/spawn-and-move/README.md +++ b/examples/spawn-and-move/README.md @@ -12,26 +12,55 @@ sozo build sozo migrate # Get the class hash of the Moves model by name -sozo model class-hash --world 0x26065106fa319c3981618e7567480a50132f23932226a51c219ffb8e47daa84 Moves -> 0x2b97f0b24be59ecf4504a27ac2301179be7df44c4c7d9482cd7b36137bc0fa4 +sozo model class-hash Moves --world 0x33ac2f528bb97cc7b79148fd1756dc368be0e95d391d8c6d6473ecb60b4560e +> 0x64495ca6dc1dc328972697b30468cea364bcb7452bbb6e4aaad3e4b3f190147 # Get the schema of the Moves model -sozo model schema --world 0x26065106fa319c3981618e7567480a50132f23932226a51c219ffb8e47daa84 Moves +sozo model schema Moves --world 0x33ac2f528bb97cc7b79148fd1756dc368be0e95d391d8c6d6473ecb60b4560e > struct Moves { -> remaining: u8 +> #[key] +> player: ContractAddress, +> remaining: u8, +> last_direction: Direction = Invalid Option, +> } +> +> enum Direction { +> None +> Left +> Right +> Up +> Down > } # Get the value of the Moves model for an entity. (in this example, -# 0x517ececd29116499f4a1b64b094da79ba08dfd54a3edaa316134c41f8160973 is -# the calling account. -sozo model get --world 0x26065106fa319c3981618e7567480a50132f23932226a51c219ffb8e47daa84 Moves 0x517ececd29116499f4a1b64b094da79ba08dfd54a3edaa316134c41f8160973 -> 0x0 +# 0x6162896d1d7ab204c7ccac6dd5f8e9e7c25ecd5ae4fcb4ad32e57786bb46e03, is +# the calling account which is also the key to retrieve a Moves model) +sozo model get Moves 0x6162896d1d7ab204c7ccac6dd5f8e9e7c25ecd5ae4fcb4ad32e57786bb46e03 --world 0x33ac2f528bb97cc7b79148fd1756dc368be0e95d391d8c6d6473ecb60b4560e +> struct Moves { +> #[key] +> player: ContractAddress = 0x6162896d1d7ab204c7ccac6dd5f8e9e7c25ecd5ae4fcb4ad32e57786bb46e03, +> remaining: u8 = 0, +> last_direction: Direction = None, +> } # The returned value is 0 since we haven't spawned yet. -# We can spawn a player using the actions contract address -sozo execute 0x31571485922572446df9e3198a891e10d3a48e544544317dbcbb667e15848cd spawn +# We can spawn a player using the actions contract address. +sozo execute 0x152dcff993befafe5001975149d2c50bd9621da7cbaed74f68e7d5e54e65abc spawn -# Fetch the updated entity -sozo model get --world 0x26065106fa319c3981618e7567480a50132f23932226a51c219ffb8e47daa84 Moves 0x517ececd29116499f4a1b64b094da79ba08dfd54a3edaa316134c41f8160973 -> 0xa +# Fetch the updated entity. +sozo model get Moves 0x6162896d1d7ab204c7ccac6dd5f8e9e7c25ecd5ae4fcb4ad32e57786bb46e03 --world 0x33ac2f528bb97cc7b79148fd1756dc368be0e95d391d8c6d6473ecb60b4560e +> struct Moves { +> #[key] +> player: ContractAddress = 0x6162896d1d7ab204c7ccac6dd5f8e9e7c25ecd5ae4fcb4ad32e57786bb46e03, +> remaining: u8 = 1, +> last_direction: Direction = None, +> } +> +> enum Direction { +> None +> Left +> Right +> Up +> Down +> } ``` diff --git a/examples/spawn-and-move/Scarb.toml b/examples/spawn-and-move/Scarb.toml index 558eed1b75..88e7c4153c 100644 --- a/examples/spawn-and-move/Scarb.toml +++ b/examples/spawn-and-move/Scarb.toml @@ -25,4 +25,4 @@ rpc_url = "http://localhost:5050/" # Default account for katana with seed = 0 account_address = "0x6162896d1d7ab204c7ccac6dd5f8e9e7c25ecd5ae4fcb4ad32e57786bb46e03" private_key = "0x1800000000300000180000000000030000000000003006001800006600" -world_address = "0x5010c31f127114c6198df8a5239e2b7a5151e1156fb43791e37e7385faa8138" +world_address = "0x33ac2f528bb97cc7b79148fd1756dc368be0e95d391d8c6d6473ecb60b4560e" diff --git a/examples/spawn-and-move/src/actions.cairo b/examples/spawn-and-move/src/actions.cairo index 6f56d1f928..ea94cde00f 100644 --- a/examples/spawn-and-move/src/actions.cairo +++ b/examples/spawn-and-move/src/actions.cairo @@ -1,19 +1,16 @@ -use dojo::world::{IWorldDispatcher, IWorldDispatcherTrait}; -use dojo_examples::models::{Position, Moves, Direction}; -use starknet::{ContractAddress, ClassHash}; - #[starknet::interface] trait IActions { fn spawn(self: @TContractState); - fn move(self: @TContractState, direction: Direction); + fn move(self: @TContractState, direction: dojo_examples::models::Direction); } #[dojo::contract] mod actions { + use super::IActions; + use starknet::{ContractAddress, get_caller_address}; use dojo_examples::models::{Position, Moves, Direction, Vec2}; use dojo_examples::utils::next_position; - use super::IActions; #[event] #[derive(Drop, starknet::Event)] @@ -97,9 +94,8 @@ mod tests { use dojo::test_utils::{spawn_test_world, deploy_contract}; - use dojo_examples::models::{position, moves}; - use dojo_examples::models::{Position, Moves, Direction, Vec2}; use super::{actions, IActionsDispatcher, IActionsDispatcherTrait}; + use dojo_examples::models::{Position, position, Moves, moves, Direction, Vec2}; #[test] #[available_gas(30000000)] diff --git a/examples/spawn-and-move/src/models.cairo b/examples/spawn-and-move/src/models.cairo index 8c8a275bed..c917342ece 100644 --- a/examples/spawn-and-move/src/models.cairo +++ b/examples/spawn-and-move/src/models.cairo @@ -1,24 +1,22 @@ -use core::array::ArrayTrait; -use core::debug::PrintTrait; use starknet::ContractAddress; #[derive(Serde, Copy, Drop, Introspect)] enum Direction { - None: (), - Left: (), - Right: (), - Up: (), - Down: (), + None, + Left, + Right, + Up, + Down, } impl DirectionIntoFelt252 of Into { fn into(self: Direction) -> felt252 { match self { - Direction::None(()) => 0, - Direction::Left(()) => 1, - Direction::Right(()) => 2, - Direction::Up(()) => 3, - Direction::Down(()) => 4, + Direction::None => 0, + Direction::Left => 1, + Direction::Right => 2, + Direction::Up => 3, + Direction::Down => 4, } } } @@ -31,13 +29,13 @@ struct Moves { last_direction: Direction } -#[derive(Copy, Drop, Serde, Print, Introspect)] +#[derive(Copy, Drop, Serde, Introspect)] struct Vec2 { x: u32, y: u32 } -#[derive(Model, Copy, Drop, Print, Serde)] +#[derive(Model, Copy, Drop, Serde)] struct Position { #[key] player: ContractAddress, @@ -64,7 +62,6 @@ impl Vec2Impl of Vec2Trait { #[cfg(test)] mod tests { - use core::debug::PrintTrait; use super::{Position, Vec2, Vec2Trait}; #[test] diff --git a/examples/spawn-and-move/src/utils.cairo b/examples/spawn-and-move/src/utils.cairo index ca5c5714f1..e14be466d8 100644 --- a/examples/spawn-and-move/src/utils.cairo +++ b/examples/spawn-and-move/src/utils.cairo @@ -2,22 +2,11 @@ use dojo_examples::models::{Position, Direction}; fn next_position(mut position: Position, direction: Direction) -> Position { match direction { - Direction::None(()) => { - return position; - }, - Direction::Left(()) => { - position.vec.x -= 1; - }, - Direction::Right(()) => { - position.vec.x += 1; - }, - Direction::Up(()) => { - position.vec.y -= 1; - }, - Direction::Down(()) => { - position.vec.y += 1; - }, + Direction::None => { return position; }, + Direction::Left => { position.vec.x -= 1; }, + Direction::Right => { position.vec.x += 1; }, + Direction::Up => { position.vec.y -= 1; }, + Direction::Down => { position.vec.y += 1; }, }; - position } From 8fb99419ba49ead5b93b9d62506297bca8e5ab81 Mon Sep 17 00:00:00 2001 From: glihm Date: Tue, 16 Jan 2024 23:32:00 -0600 Subject: [PATCH 17/33] fix: add more info on model deserialization fail (#1325) * feat: add more info on model deserialization fail * fix: fix cairo tests * fix: use trait path function call instead of prelude * fix: fix cairo tests * fix tests * fix test --- crates/dojo-lang/src/inline_macros/get.rs | 17 +- .../dojo-lang/src/manifest_test_data/manifest | 2 +- crates/dojo-lang/src/semantics/test_data/get | 337 +++++++++++++++++- 3 files changed, 332 insertions(+), 24 deletions(-) diff --git a/crates/dojo-lang/src/inline_macros/get.rs b/crates/dojo-lang/src/inline_macros/get.rs index c5e6e9d2cd..6b8020036a 100644 --- a/crates/dojo-lang/src/inline_macros/get.rs +++ b/crates/dojo-lang/src/inline_macros/get.rs @@ -103,8 +103,14 @@ impl InlineMacroExprPlugin for GetMacro { } let mut lookup_err_msg = format!("{} not found", model.to_string()); lookup_err_msg.truncate(CAIRO_ERR_MSG_LEN); - let mut deser_err_msg = format!("{} failed to deserialize", model.to_string()); - deser_err_msg.truncate(CAIRO_ERR_MSG_LEN); + // Currently, the main reason to have a deserialization to fail is by having + // the user providing the wrong keys length, which causes an invalid offset + // in the model deserialization. + let deser_err_msg = format!( + "\"Model `{}`: deserialization failed. Ensure the length of the keys tuple is \ + matching the number of #[key] fields in the model struct.\"", + model.to_string() + ); builder.add_str(&format!( "\n let mut __{model}_layout__ = core::array::ArrayTrait::new(); @@ -122,10 +128,11 @@ impl InlineMacroExprPlugin for GetMacro { core::array::serialize_array_helper(__{model}_values__, ref __{model}_model__); let mut __{model}_model_span__ = \ core::array::ArrayTrait::span(@__{model}_model__); - let __{model} = \ - core::option::OptionTrait::expect(core::serde::Serde::<{model}>::deserialize( + let __{model} = core::serde::Serde::<{model}>::deserialize( ref __{model}_model_span__ - ), '{deser_err_msg}');\n", + ); if core::option::OptionTrait::<{model}>::is_none(@__{model}) {{ \ + panic!({deser_err_msg}); }}; let __{model} = \ + core::option::OptionTrait::<{model}>::unwrap(__{model});\n", world.as_syntax_node().get_text(db), )); } diff --git a/crates/dojo-lang/src/manifest_test_data/manifest b/crates/dojo-lang/src/manifest_test_data/manifest index 76df306c7a..930a248e15 100644 --- a/crates/dojo-lang/src/manifest_test_data/manifest +++ b/crates/dojo-lang/src/manifest_test_data/manifest @@ -955,7 +955,7 @@ test_manifest_file { "name": "dojo_examples::actions::actions", "address": null, - "class_hash": "0x69c6bec7de74fc2404fe6b68ad8ece7be81ad6d861b38a8ba8fa583bfc3666b", + "class_hash": "0x352a6ab719469097c1f3f9db41552fc79a1bf09b83c8a0839052218461ecea9", "abi": [ { "type": "impl", diff --git a/crates/dojo-lang/src/semantics/test_data/get b/crates/dojo-lang/src/semantics/test_data/get index f08b775721..d8816cfa5e 100644 --- a/crates/dojo-lang/src/semantics/test_data/get +++ b/crates/dojo-lang/src/semantics/test_data/get @@ -420,27 +420,328 @@ Block( ), expr: FunctionCall( ExprFunctionCall { - function: core::option::OptionTraitImpl::::expect, + function: test::HealthSerde::deserialize, args: [ - Value( - FunctionCall( - ExprFunctionCall { - function: test::HealthSerde::deserialize, - args: [ - Reference( - LocalVarId(test::__Health_model_span__), - ), - ], - ty: core::option::Option::, - }, - ), + Reference( + LocalVarId(test::__Health_model_span__), ), + ], + ty: core::option::Option::, + }, + ), + }, + ), + Expr( + StatementExpr { + expr: If( + ExprIf { + condition: FunctionCall( + ExprFunctionCall { + function: core::option::OptionTraitImpl::::is_none, + args: [ + Value( + Snapshot( + ExprSnapshot { + inner: Var( + LocalVarId(test::__Health), + ), + ty: @core::option::Option::, + }, + ), + ), + ], + ty: core::bool, + }, + ), + if_block: Block( + ExprBlock { + statements: [ + Expr( + StatementExpr { + expr: Block( + ExprBlock { + statements: [ + Let( + StatementLet { + pattern: Variable( + __formatter_for_panic_macro__, + ), + expr: FunctionCall( + ExprFunctionCall { + function: core::fmt::FormatterDefault::default, + args: [], + ty: core::fmt::Formatter, + }, + ), + }, + ), + Expr( + StatementExpr { + expr: FunctionCall( + ExprFunctionCall { + function: core::result::ResultTraitImpl::<(), core::fmt::Error>::unwrap::, + args: [ + Value( + Block( + ExprBlock { + statements: [ + Expr( + StatementExpr { + expr: FunctionCall( + ExprFunctionCall { + function: core::byte_array::ByteArrayImpl::append_word, + args: [ + Reference( + LocalVarId(test::__formatter_for_panic_macro__)::buffer, + ), + Value( + Literal( + ExprLiteral { + value: 136816024508118168988575128292127322950586093870590548369522287650670210926, + ty: core::felt252, + }, + ), + ), + Value( + Literal( + ExprLiteral { + value: 31, + ty: core::integer::u32, + }, + ), + ), + ], + ty: (), + }, + ), + }, + ), + Expr( + StatementExpr { + expr: FunctionCall( + ExprFunctionCall { + function: core::byte_array::ByteArrayImpl::append_word, + args: [ + Reference( + LocalVarId(test::__formatter_for_panic_macro__)::buffer, + ), + Value( + Literal( + ExprLiteral { + value: 57245710417489827494872497475953274453670361818009142277461875365354283124, + ty: core::felt252, + }, + ), + ), + Value( + Literal( + ExprLiteral { + value: 31, + ty: core::integer::u32, + }, + ), + ), + ], + ty: (), + }, + ), + }, + ), + Expr( + StatementExpr { + expr: FunctionCall( + ExprFunctionCall { + function: core::byte_array::ByteArrayImpl::append_word, + args: [ + Reference( + LocalVarId(test::__formatter_for_panic_macro__)::buffer, + ), + Value( + Literal( + ExprLiteral { + value: 184450045146430526813493865095770350343962517127059696327898282247556767854, + ty: core::felt252, + }, + ), + ), + Value( + Literal( + ExprLiteral { + value: 31, + ty: core::integer::u32, + }, + ), + ), + ], + ty: (), + }, + ), + }, + ), + Expr( + StatementExpr { + expr: FunctionCall( + ExprFunctionCall { + function: core::byte_array::ByteArrayImpl::append_word, + args: [ + Reference( + LocalVarId(test::__formatter_for_panic_macro__)::buffer, + ), + Value( + Literal( + ExprLiteral { + value: 207476049689134978464526160084214572942096890522908346890176432476061507693, + ty: core::felt252, + }, + ), + ), + Value( + Literal( + ExprLiteral { + value: 31, + ty: core::integer::u32, + }, + ), + ), + ], + ty: (), + }, + ), + }, + ), + Expr( + StatementExpr { + expr: FunctionCall( + ExprFunctionCall { + function: core::byte_array::ByteArrayImpl::append_word, + args: [ + Reference( + LocalVarId(test::__formatter_for_panic_macro__)::buffer, + ), + Value( + Literal( + ExprLiteral { + value: 34474207625732236007199437870, + ty: core::felt252, + }, + ), + ), + Value( + Literal( + ExprLiteral { + value: 12, + ty: core::integer::u32, + }, + ), + ), + ], + ty: (), + }, + ), + }, + ), + ], + tail: Some( + EnumVariantCtor( + ExprEnumVariantCtor { + variant: ConcreteVariant { + concrete_enum_id: ConcreteEnumLongId { + enum_id: EnumId( + 5, + ), + generic_args: [ + Type( + TypeId( + 3, + ), + ), + Type( + TypeId( + 328, + ), + ), + ], + }, + id: VariantId(core::result::Ok), + ty: (), + idx: 0, + }, + value_expr: Tuple( + ExprTuple { + items: [], + ty: (), + }, + ), + ty: core::result::Result::<(), core::fmt::Error>, + }, + ), + ), + ty: core::result::Result::<(), core::fmt::Error>, + }, + ), + ), + ], + ty: (), + }, + ), + }, + ), + ], + tail: Some( + FunctionCall( + ExprFunctionCall { + function: core::panics::panic_with_byte_array, + args: [ + Value( + Snapshot( + ExprSnapshot { + inner: MemberAccess( + ExprMemberAccess { + expr: Var( + LocalVarId(test::__formatter_for_panic_macro__), + ), + concrete_struct_id: core::fmt::Formatter, + member: MemberId(core::fmt::buffer), + ty: core::byte_array::ByteArray, + }, + ), + ty: @core::byte_array::ByteArray, + }, + ), + ), + ], + ty: core::never, + }, + ), + ), + ty: core::never, + }, + ), + }, + ), + ], + tail: None, + ty: (), + }, + ), + else_block: None, + ty: (), + }, + ), + }, + ), + Let( + StatementLet { + pattern: Variable( + __Health, + ), + expr: FunctionCall( + ExprFunctionCall { + function: core::option::OptionTraitImpl::::unwrap, + args: [ Value( - Literal( - ExprLiteral { - value: 7624190543980043505746182176517533098884860453715398488377751337573, - ty: core::felt252, - }, + Var( + LocalVarId(test::__Health), ), ), ], From 05e59cf35df6e864b9a125763869ee7eeefa16ef Mon Sep 17 00:00:00 2001 From: Icosahedron <83328087+0xicosahedron@users.noreply.github.com> Date: Wed, 17 Jan 2024 11:08:23 +0530 Subject: [PATCH 18/33] fix: Support deriving Print for enums (#1091) * skeleton for deriving print for enum * print enum variant names * add example enum to plugin_test_data * cargo fmt * remove unecessary derives * add core and comma * fix tests * fix tests * remove class hash changes * revert Cargo.lock * fix tests * use OptionTypeClause * fix: ensure print is expanded under test cfg --------- Co-authored-by: glihm --- Cargo.toml | 2 +- crates/dojo-lang/src/introspect.rs | 2 +- crates/dojo-lang/src/plugin.rs | 5 +- crates/dojo-lang/src/plugin_test.rs | 62 +------- crates/dojo-lang/src/plugin_test_data/print | 157 +++++++------------- crates/dojo-lang/src/print.rs | 68 ++++++++- 6 files changed, 120 insertions(+), 176 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 90e0818aa3..be55c2f9ad 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -60,7 +60,7 @@ cairo-lang-formatter = "2.4.0" cairo-lang-language-server = "2.4.0" cairo-lang-lowering = "2.4.0" cairo-lang-parser = "2.4.0" -cairo-lang-plugins = "2.4.0" +cairo-lang-plugins = { version = "2.4.0", features = [ "testing" ] } cairo-lang-project = "2.4.0" cairo-lang-semantic = { version = "2.4.0", features = [ "testing" ] } cairo-lang-sierra = "2.4.0" diff --git a/crates/dojo-lang/src/introspect.rs b/crates/dojo-lang/src/introspect.rs index 67e7a23816..5ebccdb891 100644 --- a/crates/dojo-lang/src/introspect.rs +++ b/crates/dojo-lang/src/introspect.rs @@ -99,7 +99,7 @@ pub fn handle_introspect_struct(db: &dyn SyntaxGroup, struct_ast: ItemStruct) -> /// A handler for Dojo code derives Introspect for an enum /// Parameters: /// * db: The semantic database. -/// * struct_ast: The AST of the struct. +/// * enum_ast: The AST of the enum. /// Returns: /// * A RewriteNode containing the generated code. pub fn handle_introspect_enum( diff --git a/crates/dojo-lang/src/plugin.rs b/crates/dojo-lang/src/plugin.rs index e97f5763f5..81972ce36f 100644 --- a/crates/dojo-lang/src/plugin.rs +++ b/crates/dojo-lang/src/plugin.rs @@ -27,7 +27,7 @@ use crate::inline_macros::get::GetMacro; use crate::inline_macros::set::SetMacro; use crate::introspect::{handle_introspect_enum, handle_introspect_struct}; use crate::model::handle_model_struct; -use crate::print::derive_print; +use crate::print::{handle_print_enum, handle_print_struct}; const DOJO_CONTRACT_ATTR: &str = "dojo::contract"; @@ -279,6 +279,7 @@ impl MacroPlugin for BuiltinDojoPlugin { enum_ast.clone(), )); } + "Print" => rewrite_nodes.push(handle_print_enum(db, enum_ast.clone())), _ => continue, } } @@ -355,7 +356,7 @@ impl MacroPlugin for BuiltinDojoPlugin { diagnostics.extend(model_diagnostics); } "Print" => { - rewrite_nodes.push(derive_print(db, struct_ast.clone())); + rewrite_nodes.push(handle_print_struct(db, struct_ast.clone())); } "Introspect" => { rewrite_nodes diff --git a/crates/dojo-lang/src/plugin_test.rs b/crates/dojo-lang/src/plugin_test.rs index d789ff5a5b..899cd8557c 100644 --- a/crates/dojo-lang/src/plugin_test.rs +++ b/crates/dojo-lang/src/plugin_test.rs @@ -1,9 +1,8 @@ use std::sync::Arc; use cairo_lang_defs::db::{DefsDatabase, DefsGroup}; -use cairo_lang_defs::ids::{LanguageElementId, ModuleId, ModuleItemId}; +use cairo_lang_defs::ids::ModuleId; use cairo_lang_defs::plugin::MacroPlugin; -use cairo_lang_diagnostics::{format_diagnostics, DiagnosticLocation}; use cairo_lang_filesystem::cfg::CfgSet; use cairo_lang_filesystem::db::{ init_files_group, AsFilesGroupMut, CrateConfiguration, FilesDatabase, FilesGroup, FilesGroupEx, @@ -11,13 +10,11 @@ use cairo_lang_filesystem::db::{ use cairo_lang_filesystem::ids::{CrateLongId, Directory, FileLongId}; use cairo_lang_parser::db::ParserDatabase; use cairo_lang_plugins::get_base_plugins; +use cairo_lang_plugins::test_utils::expand_module_text; use cairo_lang_syntax::node::db::{SyntaxDatabase, SyntaxGroup}; -use cairo_lang_syntax::node::kind::SyntaxKind; -use cairo_lang_syntax::node::{ast, TypedSyntaxNode}; use cairo_lang_test_utils::parse_test_file::TestRunnerResult; use cairo_lang_test_utils::verify_diagnostics_expectation; use cairo_lang_utils::ordered_hash_map::OrderedHashMap; -use cairo_lang_utils::unordered_hash_set::UnorderedHashSet; use cairo_lang_utils::Upcast; use super::BuiltinDojoPlugin; @@ -118,58 +115,3 @@ pub fn test_expand_plugin_inner( error, } } - -pub fn expand_module_text( - db: &dyn DefsGroup, - module_id: ModuleId, - diagnostics: &mut Vec, -) -> String { - let mut output = String::new(); - // A collection of all the use statements in the module. - let mut uses_list = UnorderedHashSet::default(); - let syntax_db = db.upcast(); - // Collect the module diagnostics. - for (file_id, diag) in db.module_plugin_diagnostics(module_id).unwrap().iter() { - let syntax_node = diag.stable_ptr.lookup(syntax_db); - let location = DiagnosticLocation { - file_id: file_id.file_id(db.upcast()).unwrap(), - span: syntax_node.span_without_trivia(syntax_db), - }; - diagnostics.push(format_diagnostics(db.upcast(), &diag.message, location)); - } - for item_id in db.module_items(module_id).unwrap().iter() { - if let ModuleItemId::Submodule(item) = item_id { - let submodule_item = item.stable_ptr(db).lookup(syntax_db); - if let ast::MaybeModuleBody::Some(body) = submodule_item.body(syntax_db) { - // Recursively expand inline submodules. - output.extend([ - submodule_item.attributes(syntax_db).node.get_text(syntax_db), - submodule_item.module_kw(syntax_db).as_syntax_node().get_text(syntax_db), - submodule_item.name(syntax_db).as_syntax_node().get_text(syntax_db), - body.lbrace(syntax_db).as_syntax_node().get_text(syntax_db), - expand_module_text(db, ModuleId::Submodule(*item), diagnostics), - body.rbrace(syntax_db).as_syntax_node().get_text(syntax_db), - ]); - continue; - } - } else if let ModuleItemId::Use(use_id) = item_id { - let mut use_item = use_id.stable_ptr(db).lookup(syntax_db).as_syntax_node(); - // Climb up the AST until the syntax kind is ItemUse. This is needed since the use item - // points to the use leaf as one use statement can represent multiple use items. - while let Some(parent) = use_item.parent() { - use_item = parent; - if use_item.kind(syntax_db) == SyntaxKind::ItemUse { - break; - } - } - if uses_list.insert(use_item.clone()) { - output.push_str(&use_item.get_text(syntax_db)); - } - continue; - } - let syntax_item = item_id.untyped_stable_ptr(db); - // Output other items as is. - output.push_str(&syntax_item.lookup(syntax_db).get_text(syntax_db)); - } - output -} diff --git a/crates/dojo-lang/src/plugin_test_data/print b/crates/dojo-lang/src/plugin_test_data/print index d7ed7801d2..aec292d7d2 100644 --- a/crates/dojo-lang/src/plugin_test_data/print +++ b/crates/dojo-lang/src/plugin_test_data/print @@ -3,10 +3,14 @@ //! > test_runner_name test_expand_plugin +//! > cfg +["test"] + //! > cairo_code use serde::Serde; +use debug::PrintTrait; -#[derive(Print, Copy, Drop, Serde)] +#[derive(Print)] struct Position { #[key] id: felt252, @@ -15,14 +19,14 @@ struct Position { y: felt252 } -#[derive(Print, Serde)] +#[derive(Print)] struct Roles { role_ids: Array } use starknet::ContractAddress; -#[derive(Print, Copy, Drop, Serde)] +#[derive(Print)] struct Player { #[key] game: felt252, @@ -32,11 +36,18 @@ struct Player { name: felt252, } -//! > generated_cairo_code -use serde::Serde; +#[derive(Print)] +enum Enemy { + Unknown, + Bot: felt252, + OtherPlayer: ContractAddress, +} +//! > expanded_cairo_code +use serde::Serde; +use debug::PrintTrait; -#[derive(Print, Copy, Drop, Serde)] +#[derive(Print)] struct Position { #[key] id: felt252, @@ -45,37 +56,14 @@ struct Position { y: felt252 } -#[cfg(test)] -impl PositionPrintImpl of core::debug::PrintTrait { - fn print(self: Position) { - core::debug::PrintTrait::print('id'); - core::debug::PrintTrait::print(self.id); - core::debug::PrintTrait::print('x'); - core::debug::PrintTrait::print(self.x); - core::debug::PrintTrait::print('y'); - core::debug::PrintTrait::print(self.y); - } -} - - -#[derive(Print, Serde)] +#[derive(Print)] struct Roles { role_ids: Array } -#[cfg(test)] -impl RolesPrintImpl of core::debug::PrintTrait { - fn print(self: Roles) { - core::debug::PrintTrait::print('role_ids'); - core::debug::PrintTrait::print(self.role_ids); - } -} - - use starknet::ContractAddress; - -#[derive(Print, Copy, Drop, Serde)] +#[derive(Print)] struct Player { #[key] game: felt252, @@ -84,87 +72,48 @@ struct Player { name: felt252, } -#[cfg(test)] -impl PlayerPrintImpl of core::debug::PrintTrait { - fn print(self: Player) { - core::debug::PrintTrait::print('game'); - core::debug::PrintTrait::print(self.game); - core::debug::PrintTrait::print('player'); - core::debug::PrintTrait::print(self.player); - core::debug::PrintTrait::print('name'); - core::debug::PrintTrait::print(self.name); - } -} - -//! > expected_diagnostics - -//! > expanded_cairo_code -use serde::Serde; - -#[derive(Print, Copy, Drop, Serde)] -struct Position { - #[key] - id: felt252, - x: felt252, - y: felt252 +#[derive(Print)] +enum Enemy { + Unknown, + Bot: felt252, + OtherPlayer: ContractAddress, } -#[derive(Print, Serde)] -struct Roles { - role_ids: Array +#[cfg(test)] +impl PositionStructPrintImpl of core::debug::PrintTrait { + fn print(self: Position) { + core::debug::PrintTrait::print('id'); core::debug::PrintTrait::print(self.id); +core::debug::PrintTrait::print('x'); core::debug::PrintTrait::print(self.x); +core::debug::PrintTrait::print('y'); core::debug::PrintTrait::print(self.y); + } } -use starknet::ContractAddress; - -#[derive(Print, Copy, Drop, Serde)] -struct Player { - #[key] - game: felt252, - #[key] - player: ContractAddress, - - name: felt252, -} -impl PositionCopy of core::traits::Copy::; -impl PositionDrop of core::traits::Drop::; -impl PositionSerde of core::serde::Serde:: { - fn serialize(self: @Position, ref output: core::array::Array) { - core::serde::Serde::serialize(self.id, ref output); - core::serde::Serde::serialize(self.x, ref output); - core::serde::Serde::serialize(self.y, ref output) - } - fn deserialize(ref serialized: core::array::Span) -> core::option::Option { - core::option::Option::Some(Position { - id: core::serde::Serde::deserialize(ref serialized)?, - x: core::serde::Serde::deserialize(ref serialized)?, - y: core::serde::Serde::deserialize(ref serialized)?, - }) +#[cfg(test)] +impl RolesStructPrintImpl of core::debug::PrintTrait { + fn print(self: Roles) { + core::debug::PrintTrait::print('role_ids'); core::debug::PrintTrait::print(self.role_ids); } } -impl RolesSerde of core::serde::Serde:: { - fn serialize(self: @Roles, ref output: core::array::Array) { - core::serde::Serde::serialize(self.role_ids, ref output) - } - fn deserialize(ref serialized: core::array::Span) -> core::option::Option { - core::option::Option::Some(Roles { - role_ids: core::serde::Serde::deserialize(ref serialized)?, - }) + +#[cfg(test)] +impl PlayerStructPrintImpl of core::debug::PrintTrait { + fn print(self: Player) { + core::debug::PrintTrait::print('game'); core::debug::PrintTrait::print(self.game); +core::debug::PrintTrait::print('player'); core::debug::PrintTrait::print(self.player); +core::debug::PrintTrait::print('name'); core::debug::PrintTrait::print(self.name); } } -impl PlayerCopy of core::traits::Copy::; -impl PlayerDrop of core::traits::Drop::; -impl PlayerSerde of core::serde::Serde:: { - fn serialize(self: @Player, ref output: core::array::Array) { - core::serde::Serde::serialize(self.game, ref output); - core::serde::Serde::serialize(self.player, ref output); - core::serde::Serde::serialize(self.name, ref output) - } - fn deserialize(ref serialized: core::array::Span) -> core::option::Option { - core::option::Option::Some(Player { - game: core::serde::Serde::deserialize(ref serialized)?, - player: core::serde::Serde::deserialize(ref serialized)?, - name: core::serde::Serde::deserialize(ref serialized)?, - }) + +#[cfg(test)] +impl EnemyEnumPrintImpl of core::debug::PrintTrait { + fn print(self: Enemy) { + match self { + Enemy::Unknown => { core::debug::PrintTrait::print('Unknown'); }, +Enemy::Bot(v) => { core::debug::PrintTrait::print('Bot'); core::debug::PrintTrait::print(v); }, +Enemy::OtherPlayer(v) => { core::debug::PrintTrait::print('OtherPlayer'); core::debug::PrintTrait::print(v); } + } } } + +//! > expected_diagnostics diff --git a/crates/dojo-lang/src/print.rs b/crates/dojo-lang/src/print.rs index 06136b592e..168adb79d7 100644 --- a/crates/dojo-lang/src/print.rs +++ b/crates/dojo-lang/src/print.rs @@ -1,5 +1,5 @@ use cairo_lang_defs::patcher::RewriteNode; -use cairo_lang_syntax::node::ast::ItemStruct; +use cairo_lang_syntax::node::ast::{ItemEnum, ItemStruct, OptionTypeClause}; use cairo_lang_syntax::node::db::SyntaxGroup; use cairo_lang_syntax::node::{Terminal, TypedSyntaxNode}; use cairo_lang_utils::unordered_hash_map::UnorderedHashMap; @@ -10,7 +10,7 @@ use cairo_lang_utils::unordered_hash_map::UnorderedHashMap; /// * struct_ast: The AST of the model struct. /// Returns: /// * A RewriteNode containing the generated code. -pub fn derive_print(db: &dyn SyntaxGroup, struct_ast: ItemStruct) -> RewriteNode { +pub fn handle_print_struct(db: &dyn SyntaxGroup, struct_ast: ItemStruct) -> RewriteNode { let prints: Vec<_> = struct_ast .members(db) .elements(db) @@ -25,12 +25,14 @@ pub fn derive_print(db: &dyn SyntaxGroup, struct_ast: ItemStruct) -> RewriteNode .collect(); RewriteNode::interpolate_patched( - "#[cfg(test)] - impl $type_name$PrintImpl of core::debug::PrintTrait<$type_name$> { - fn print(self: $type_name$) { - $print$ - } - }", + " +#[cfg(test)] +impl $type_name$StructPrintImpl of core::debug::PrintTrait<$type_name$> { + fn print(self: $type_name$) { + $print$ + } +} +", &UnorderedHashMap::from([ ( "type_name".to_string(), @@ -40,3 +42,53 @@ pub fn derive_print(db: &dyn SyntaxGroup, struct_ast: ItemStruct) -> RewriteNode ]), ) } + +/// Derives PrintTrait for an enum. +/// Parameters: +/// * db: The semantic database. +/// * enum_ast: The AST of the model enum. +/// Returns: +/// * A RewriteNode containing the generated code. +pub fn handle_print_enum(db: &dyn SyntaxGroup, enum_ast: ItemEnum) -> RewriteNode { + let enum_name = enum_ast.name(db).text(db); + let prints: Vec<_> = enum_ast + .variants(db) + .elements(db) + .iter() + .map(|m| { + let variant_name = m.name(db).text(db).to_string(); + match m.type_clause(db) { + OptionTypeClause::Empty(_) => { + format!( + "{enum_name}::{variant_name} => {{ \ + core::debug::PrintTrait::print('{variant_name}'); }}" + ) + } + OptionTypeClause::TypeClause(_) => { + format!( + "{enum_name}::{variant_name}(v) => {{ \ + core::debug::PrintTrait::print('{variant_name}'); \ + core::debug::PrintTrait::print(v); }}" + ) + } + } + }) + .collect(); + + RewriteNode::interpolate_patched( + " +#[cfg(test)] +impl $type_name$EnumPrintImpl of core::debug::PrintTrait<$type_name$> { + fn print(self: $type_name$) { + match self { + $print$ + } + } +} +", + &UnorderedHashMap::from([ + ("type_name".to_string(), RewriteNode::new_trimmed(enum_ast.name(db).as_syntax_node())), + ("print".to_string(), RewriteNode::Text(prints.join(",\n"))), + ]), + ) +} From cdc8cd08d1489616ae436279a551792d281263c3 Mon Sep 17 00:00:00 2001 From: Ammar Arif Date: Tue, 9 Jan 2024 13:52:39 +0900 Subject: [PATCH 19/33] fix(katana-primitives): fix legacy program conversion between RPC and inner types (#1403) --- Cargo.lock | 117 +++---- Cargo.toml | 2 +- crates/katana/primitives/Cargo.toml | 1 + .../katana/primitives/src/conversion/rpc.rs | 323 ++++++++++++++++-- 4 files changed, 359 insertions(+), 84 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8c32be4f6b..b8914cfdb0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -465,7 +465,7 @@ dependencies = [ "proc-macro2", "quote", "strum 0.25.0", - "syn 2.0.41", + "syn 2.0.47", "thiserror", ] @@ -572,7 +572,7 @@ checksum = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -620,7 +620,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -637,7 +637,7 @@ checksum = "a66537f1bb974b254c98ed142ff995236e81b9d0fe4db0575f46612cb15eb0f9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -852,7 +852,7 @@ dependencies = [ "regex", "rustc-hash", "shlex", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -1135,7 +1135,7 @@ dependencies = [ "quote", "serde_json", "starknet", - "syn 2.0.41", + "syn 2.0.47", "thiserror", ] @@ -1151,7 +1151,7 @@ dependencies = [ "quote", "serde_json", "starknet", - "syn 2.0.41", + "syn 2.0.47", "thiserror", ] @@ -1392,7 +1392,7 @@ checksum = "c8cc59c40344194d2cc825071080d887826dcf0df37de71e58fc8aa4c344bb84" dependencies = [ "cairo-lang-debug", "quote", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -1917,7 +1917,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -2317,7 +2317,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "30d2b3721e861707777e3195b0158f950ae6dc4a27e4d02ff9f67e3eb3de199e" dependencies = [ "quote", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -2384,7 +2384,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -2406,7 +2406,7 @@ checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" dependencies = [ "darling_core 0.20.3", "quote", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -3127,7 +3127,7 @@ dependencies = [ "reqwest", "serde", "serde_json", - "syn 2.0.41", + "syn 2.0.47", "toml 0.8.8", "walkdir", ] @@ -3145,7 +3145,7 @@ dependencies = [ "proc-macro2", "quote", "serde_json", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -3171,7 +3171,7 @@ dependencies = [ "serde", "serde_json", "strum 0.25.0", - "syn 2.0.41", + "syn 2.0.47", "tempfile", "thiserror", "tiny-keccak", @@ -3633,7 +3633,7 @@ checksum = "53b153fd91e4b0147f4aced87be237c98248656bb01050b96bf3ee89220a8ddb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -3704,7 +3704,7 @@ checksum = "d4cf186fea4af17825116f72932fe52cce9a13bae39ff63b4dc0cfdb3fb4bde1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -4124,7 +4124,7 @@ checksum = "02a5bcaf6704d9354a3071cede7e77d366a5980c7352e102e2c2f9b645b1d3ae" dependencies = [ "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -5468,7 +5468,7 @@ dependencies = [ "proc-macro2", "quote", "serde", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -5555,6 +5555,7 @@ dependencies = [ "flate2", "serde", "serde_json", + "serde_with", "starknet", "starknet_api", "thiserror", @@ -5987,7 +5988,7 @@ checksum = "ddece26afd34c31585c74a4db0630c376df271c285d682d1e55012197830b6df" dependencies = [ "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -6418,7 +6419,7 @@ dependencies = [ "proc-macro-crate 2.0.0", "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -6511,7 +6512,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -6792,7 +6793,7 @@ dependencies = [ "pest_meta", "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -6856,7 +6857,7 @@ dependencies = [ "phf_shared 0.11.2", "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -6900,7 +6901,7 @@ checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" dependencies = [ "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -7101,7 +7102,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae005bd773ab59b4725093fd7df83fd7892f7d8eafb48dbd7de6e024e4215f9d" dependencies = [ "proc-macro2", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -7163,9 +7164,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.70" +version = "1.0.75" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39278fbbf5fb4f646ce651690877f89d1c5811a3d4acb27700c1cb3cdb78fd3b" +checksum = "907a61bd0f64c2f29cd1cf1dc34d05176426a3f504a78010f08416ddb7b13708" dependencies = [ "unicode-ident", ] @@ -7282,7 +7283,7 @@ dependencies = [ "prost 0.12.3", "prost-types 0.12.3", "regex", - "syn 2.0.41", + "syn 2.0.47", "tempfile", "which 4.4.2", ] @@ -7310,7 +7311,7 @@ dependencies = [ "itertools 0.11.0", "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -7355,9 +7356,9 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" [[package]] name = "quote" -version = "1.0.33" +version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" +checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" dependencies = [ "proc-macro2", ] @@ -7742,7 +7743,7 @@ dependencies = [ "regex", "relative-path", "rustc_version", - "syn 2.0.41", + "syn 2.0.47", "unicode-ident", ] @@ -7755,7 +7756,7 @@ dependencies = [ "quote", "rand", "rustc_version", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -8209,9 +8210,9 @@ checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" [[package]] name = "serde" -version = "1.0.193" +version = "1.0.194" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89" +checksum = "0b114498256798c94a0689e1a15fec6005dee8ac1f41de56404b67afc2a4b773" dependencies = [ "serde_derive", ] @@ -8238,13 +8239,13 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.193" +version = "1.0.194" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3" +checksum = "a3385e45322e8f9931410f01b3031ec534c3947d0e94c18049af4d9f9907d4e0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -8288,7 +8289,7 @@ checksum = "3081f5ffbb02284dda55132aa26daecedd7372a42417bbbab6f14ab7d6bb9145" dependencies = [ "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -8337,7 +8338,7 @@ dependencies = [ "darling 0.20.3", "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -8372,7 +8373,7 @@ checksum = "91d129178576168c589c9ec973feedf7d3126c01ac2bf08795109aa35b69fb8f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -9023,7 +9024,7 @@ checksum = "af6527b845423542c8a16e060ea1bc43f67229848e7cd4c4d80be994a84220ce" dependencies = [ "starknet-curve 0.4.0", "starknet-ff", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -9066,7 +9067,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "840be1a7eb5735863eee47d3a3f26df45b9be2c519e8da294e74b4d0524d77d1" dependencies = [ "starknet-core", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -9199,7 +9200,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -9241,9 +9242,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.41" +version = "2.0.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44c8b28c477cc3bf0e7966561e3460130e1255f7a1cf71931075f1c5e7a7e269" +checksum = "1726efe18f42ae774cc644f330953a5e7b3c3003d3edcecf18850fe9d4dd9afb" dependencies = [ "proc-macro2", "quote", @@ -9363,7 +9364,7 @@ checksum = "7ba277e77219e9eea169e8508942db1bf5d8a41ff2db9b20aab5a5aadc9fa25d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -9383,7 +9384,7 @@ checksum = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -9518,7 +9519,7 @@ checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -9745,7 +9746,7 @@ dependencies = [ "proc-macro2", "prost-build 0.12.3", "quote", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -10075,7 +10076,7 @@ checksum = "84fd902d4e0b9a4b27f2f440108dc034e1758628a9b702f8ec61ad66355422fa" dependencies = [ "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -10104,7 +10105,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -10223,7 +10224,7 @@ checksum = "982ee4197351b5c9782847ef5ec1fdcaf50503fb19d68f9771adae314e72b492" dependencies = [ "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -10521,7 +10522,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.47", "wasm-bindgen-shared", ] @@ -10555,7 +10556,7 @@ checksum = "f0eb82fcb7930ae6219a7ecfd55b217f5f0893484b7a13022ebb2b2bf20b5283" dependencies = [ "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.47", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -10995,7 +10996,7 @@ checksum = "be912bf68235a88fbefd1b73415cb218405958d1655b2ece9035a19920bdf6ba" dependencies = [ "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] @@ -11015,7 +11016,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.41", + "syn 2.0.47", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index be55c2f9ad..6161010e7a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -100,7 +100,7 @@ scarb = { git = "https://github.com/software-mansion/scarb", tag = "v2.4.0" } scarb-ui = { git = "https://github.com/software-mansion/scarb", tag = "v2.4.0" } semver = "1.0.5" serde = { version = "1.0.192", features = [ "derive" ] } -serde_json = "1.0" +serde_json = { version = "1.0", features = [ "arbitrary_precision" ] } serde_with = "2.3.1" smol_str = { version = "0.2.0", features = [ "serde" ] } sqlx = { version = "0.7.2", features = [ "chrono", "macros", "regexp", "runtime-async-std", "runtime-tokio", "sqlite", "uuid" ] } diff --git a/crates/katana/primitives/Cargo.toml b/crates/katana/primitives/Cargo.toml index 1398a4ec7a..3f2b36eddc 100644 --- a/crates/katana/primitives/Cargo.toml +++ b/crates/katana/primitives/Cargo.toml @@ -12,6 +12,7 @@ cairo-vm.workspace = true derive_more.workspace = true serde.workspace = true serde_json.workspace = true +serde_with.workspace = true starknet.workspace = true thiserror.workspace = true diff --git a/crates/katana/primitives/src/conversion/rpc.rs b/crates/katana/primitives/src/conversion/rpc.rs index 653a30412c..09c7146e56 100644 --- a/crates/katana/primitives/src/conversion/rpc.rs +++ b/crates/katana/primitives/src/conversion/rpc.rs @@ -1,12 +1,24 @@ -use std::collections::HashMap; -use std::io::{Read, Write}; +use std::collections::{BTreeMap, HashMap}; +use std::io::{self, Read, Write}; +use std::mem; +use std::str::FromStr; use anyhow::{anyhow, Result}; use blockifier::execution::contract_class::ContractClassV0; use cairo_lang_starknet::casm_contract_class::CasmContractClass; -use cairo_vm::serde::deserialize_program::ProgramJson; -use serde_json::json; +use cairo_vm::felt::Felt252; +use cairo_vm::serde::deserialize_program::{ApTracking, OffsetValue, ProgramJson, ValueAddress}; +use cairo_vm::types::instruction::Register; +use cairo_vm::types::program::Program; +use serde::{Deserialize, Serialize, Serializer}; +use serde_json::{json, Number}; +use serde_with::serde_as; +use starknet::core::serde::unsigned_field_element::UfeHex; pub use starknet::core::types::contract::legacy::{LegacyContractClass, LegacyProgram}; +use starknet::core::types::contract::legacy::{ + LegacyDebugInfo, LegacyFlowTrackingData, LegacyHint, LegacyIdentifier, LegacyReferenceManager, + RawLegacyAbiEntry, RawLegacyEntryPoints, +}; pub use starknet::core::types::contract::CompiledClass; use starknet::core::types::{ CompressedLegacyContractClass, ContractClass, LegacyContractEntryPoint, LegacyEntryPointsByType, @@ -25,6 +37,11 @@ mod primitives { pub use crate::FieldElement; } +use cairo_vm::serde::deserialize_program::{ + serialize_program_data, Attribute, BuiltinName, DebugInfo, HintParams, Member, +}; +use cairo_vm::types::relocatable::MaybeRelocatable; + /// Converts the legacy inner compiled class type [CompiledContractClassV0] into its RPC equivalent /// [`ContractClass`]. pub fn legacy_inner_to_rpc_class( @@ -59,13 +76,10 @@ pub fn legacy_inner_to_rpc_class( let entry_points_by_type = to_rpc_legacy_entry_points_by_type(&legacy_contract_class.entry_points_by_type)?; - let program = { - let program: ProgramJson = legacy_contract_class.program.clone().into(); - compress(&serde_json::to_vec(&program)?)? - }; + let compressed_program = compress_legacy_program_data(legacy_contract_class.program.clone())?; Ok(ContractClass::Legacy(CompressedLegacyContractClass { - program, + program: compressed_program, abi: None, entry_points_by_type, })) @@ -107,20 +121,61 @@ pub fn compiled_class_hash_from_flattened_sierra_class( pub fn legacy_rpc_to_inner_compiled_class( compressed_legacy_contract: &CompressedLegacyContractClass, ) -> Result<(ClassHash, CompiledContractClass)> { - let legacy_program_json = decompress(&compressed_legacy_contract.program)?; - let legacy_program: LegacyProgram = serde_json::from_str(&legacy_program_json)?; - - let flattened = json!({ - "program": legacy_program, - "abi": compressed_legacy_contract.abi, + let class_json = json!({ + "abi": compressed_legacy_contract.abi.clone().unwrap_or_default(), "entry_points_by_type": compressed_legacy_contract.entry_points_by_type, + "program": decompress_legacy_program_data(&compressed_legacy_contract.program)?, }); - let legacy_contract_class: LegacyContractClass = serde_json::from_value(flattened.clone())?; - let class_hash = legacy_contract_class.class_hash()?; - let contract_class: ContractClassV0 = serde_json::from_value(flattened)?; + #[allow(unused)] + #[derive(Deserialize)] + struct LegacyAttribute { + #[serde(default)] + accessible_scopes: Vec, + end_pc: u64, + flow_tracking_data: Option, + name: String, + start_pc: u64, + value: String, + } + + #[allow(unused)] + #[serde_as] + #[derive(Deserialize)] + pub struct LegacyProgram { + attributes: Option>, + builtins: Vec, + compiler_version: Option, + #[serde_as(as = "Vec")] + data: Vec, + debug_info: Option, + hints: BTreeMap>, + identifiers: BTreeMap, + main_scope: String, + prime: String, + reference_manager: LegacyReferenceManager, + } + + #[allow(unused)] + #[derive(Deserialize)] + struct LegacyContractClassJson { + abi: Vec, + entry_points_by_type: RawLegacyEntryPoints, + program: LegacyProgram, + } - Ok((class_hash, CompiledContractClass::V0(contract_class))) + // SAFETY: `LegacyContractClassJson` MUST maintain same memory layout as `LegacyContractClass`. + // This would only work if the fields are in the same order and have the same size. Though, + // both types are using default Rust repr, which means there is no guarantee by the compiler + // that the memory layout of both types will be the same despite comprised of the same + // fields and types. + let class: LegacyContractClassJson = serde_json::from_value(class_json.clone())?; + let class: LegacyContractClass = unsafe { mem::transmute(class) }; + + let inner_class: ContractClassV0 = serde_json::from_value(class_json)?; + let class_hash = class.class_hash()?; + + Ok((class_hash, CompiledContractClass::V0(inner_class))) } /// Converts `starknet-rs` RPC [FlattenedSierraClass] type to Cairo's @@ -142,15 +197,233 @@ fn rpc_to_cairo_contract_class( }) } -fn compress(data: &[u8]) -> Result, std::io::Error> { +fn compress_legacy_program_data(legacy_program: Program) -> Result, io::Error> { + fn felt_as_dec_str( + value: &Option, + serializer: S, + ) -> Result { + let dec_str = format!("{}", value.clone().unwrap_or_default().to_signed_felt()); + let number = Number::from_str(&dec_str).expect("valid number"); + number.serialize(serializer) + } + + fn value_address_in_str_format( + value_address: &ValueAddress, + serializer: S, + ) -> Result { + serializer.serialize_str(&parse_value_address_to_str(value_address.clone())) + } + + fn zero_if_none(pc: &Option, serializer: S) -> Result { + serializer.serialize_u64(pc.as_ref().map_or(0, |x| *x as u64)) + } + + #[derive(Serialize)] + struct Identifier { + #[serde(skip_serializing_if = "Option::is_none")] + pc: Option, + #[serde(rename = "type")] + #[serde(skip_serializing_if = "Option::is_none")] + type_: Option, + #[serde(serialize_with = "felt_as_dec_str")] + #[serde(skip_serializing_if = "Option::is_none")] + value: Option, + #[serde(skip_serializing_if = "Option::is_none")] + full_name: Option, + #[serde(skip_serializing_if = "Option::is_none")] + members: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + cairo_type: Option, + } + + #[derive(Serialize)] + struct Reference { + ap_tracking_data: ApTracking, + #[serde(serialize_with = "zero_if_none")] + pc: Option, + #[serde(rename(serialize = "value"))] + #[serde(serialize_with = "value_address_in_str_format")] + value_address: ValueAddress, + } + + #[derive(Serialize)] + struct ReferenceManager { + references: Vec, + } + + #[derive(Serialize)] + struct SerializableProgramJson { + prime: String, + builtins: Vec, + #[serde(serialize_with = "serialize_program_data")] + #[serde(deserialize_with = "deserialize_array_of_bigint_hex")] + data: Vec, + identifiers: HashMap, + hints: HashMap>, + reference_manager: ReferenceManager, + attributes: Vec, + debug_info: Option, + } + + // SAFETY: `SerializableProgramJson` MUST maintain same memory layout as `ProgramJson`. This + // would only work if the fields are in the same order and have the same size. Though, both + // types are using default Rust repr, which means there is no guarantee by the compiler that the + // memory layout of both types will be the same despite comprised of the same fields and + // types. + let program: ProgramJson = ProgramJson::from(legacy_program); + let program: SerializableProgramJson = unsafe { mem::transmute(program) }; + + let buffer = serde_json::to_vec(&program)?; let mut gzip_encoder = flate2::write::GzEncoder::new(Vec::new(), flate2::Compression::fast()); - Write::write_all(&mut gzip_encoder, data)?; + Write::write_all(&mut gzip_encoder, &buffer)?; gzip_encoder.finish() } -fn decompress(data: &[u8]) -> Result { +fn decompress_legacy_program_data(data: &[u8]) -> Result { + #[derive(Deserialize)] + #[allow(unused)] + struct LegacyAttribute { + #[serde(default)] + accessible_scopes: Vec, + end_pc: u64, + flow_tracking_data: Option, + name: String, + start_pc: u64, + value: String, + } + + #[repr(transparent)] + #[derive(Deserialize)] + struct MainScope(String); + + impl Default for MainScope { + fn default() -> Self { + Self(String::from("__main__")) + } + } + + #[serde_as] + #[allow(unused)] + #[derive(Deserialize)] + struct LegacyProgramJson { + attributes: Option>, + builtins: Vec, + compiler_version: Option, + #[serde_as(as = "Vec")] + data: Vec, + debug_info: Option, + hints: BTreeMap>, + identifiers: BTreeMap, + #[serde(default)] + main_scope: MainScope, + prime: String, + reference_manager: LegacyReferenceManager, + } + let mut decoder = flate2::read::GzDecoder::new(data); - let mut decoded = String::new(); - Read::read_to_string(&mut decoder, &mut decoded)?; - Ok(decoded) + let mut decoded = Vec::new(); + Read::read_to_end(&mut decoder, &mut decoded)?; + + // SAFETY: `LegacyProgramJson` MUST maintain same memory layout as `LegacyProgram`. This + // would only work if the fields are in the same order and have the same size. Though, both + // types are using default Rust repr, which means there is no guarantee by the compiler that the + // memory layout of both types will be the same despite comprised of the same fields and + // types. + let program: LegacyProgramJson = serde_json::from_slice(&decoded)?; + let program: LegacyProgram = unsafe { mem::transmute(program) }; + + Ok(program) +} + +fn parse_value_address_to_str(value_address: ValueAddress) -> String { + fn handle_offset_ref(offset: i32, str: &mut String) { + if offset == 0 { + return; + } + + str.push_str(" + "); + str.push_str(&if offset.is_negative() { format!("({offset})") } else { offset.to_string() }) + } + + fn handle_offset_val(value: OffsetValue, str: &mut String) { + match value { + OffsetValue::Reference(rx, offset, deref) => { + let mut tmp = String::from(match rx { + Register::FP => "fp", + Register::AP => "ap", + }); + + handle_offset_ref(offset, &mut tmp); + + if deref { + str.push_str(&format!("[{tmp}]")); + } else { + str.push_str(&tmp); + } + } + + OffsetValue::Value(value) => handle_offset_ref(value, str), + + OffsetValue::Immediate(value) => { + if value == Felt252::from(0u32) { + return; + } + + str.push_str(" + "); + str.push_str(&value.to_string()); + } + } + } + + let mut str = String::new(); + let is_value: bool; + + if let OffsetValue::Immediate(_) = value_address.offset2 { + is_value = false; + } else { + is_value = true; + } + + handle_offset_val(value_address.offset1, &mut str); + handle_offset_val(value_address.offset2, &mut str); + + str.push_str(", "); + str.push_str(&value_address.value_type); + + if is_value { + str.push('*'); + } + + str = format!("cast({str})"); + + if value_address.dereference { + str = format!("[{str}]"); + } + + str +} + +#[cfg(test)] +mod tests { + use starknet::core::types::ContractClass; + + use super::{legacy_inner_to_rpc_class, legacy_rpc_to_inner_compiled_class}; + use crate::utils::class::parse_compiled_class_v0; + + // There are some discrepancies between the legacy RPC and the inner compiled class types which + // results in some data lost during the conversion. Therefore, we are unable to assert for + // equality between the original and the converted class. Instead, we assert that the conversion + // is successful and that the converted class can be converted back + #[test] + fn legacy_rpc_to_inner_and_back() { + let class_json = include_str!("../../../core/contracts/compiled/account.json"); + let class = parse_compiled_class_v0(class_json).unwrap(); + + let Ok(ContractClass::Legacy(compressed_legacy_class)) = legacy_inner_to_rpc_class(class) + else { + panic!("Expected legacy class"); + }; + + legacy_rpc_to_inner_compiled_class(&compressed_legacy_class).unwrap(); + } } From 9de724bc545f64154172243f7a4d135ee84875d9 Mon Sep 17 00:00:00 2001 From: Ammar Arif Date: Tue, 9 Jan 2024 13:53:44 +0900 Subject: [PATCH 20/33] refactor(katana): chain id type (#1351) --- Cargo.lock | 4 + crates/dojo-test-utils/Cargo.toml | 1 + crates/dojo-test-utils/src/sequencer.rs | 3 +- crates/katana/Cargo.toml | 1 + crates/katana/core/src/backend/config.rs | 8 +- crates/katana/core/src/backend/mod.rs | 24 +- crates/katana/core/src/sequencer.rs | 4 +- .../core/src/service/messaging/ethereum.rs | 8 +- .../katana/core/src/service/messaging/mod.rs | 4 +- .../core/src/service/messaging/service.rs | 8 +- .../core/src/service/messaging/starknet.rs | 13 +- crates/katana/primitives/Cargo.toml | 2 + crates/katana/primitives/src/chain.rs | 219 ++++++++++++++++++ .../primitives/src/conversion/blockifier.rs | 31 +++ crates/katana/primitives/src/env.rs | 3 +- crates/katana/primitives/src/lib.rs | 4 +- crates/katana/primitives/src/transaction.rs | 13 +- crates/katana/rpc/rpc-types/src/message.rs | 3 +- .../katana/rpc/rpc-types/src/transaction.rs | 11 +- crates/katana/rpc/src/starknet.rs | 19 +- crates/katana/src/args.rs | 9 +- 21 files changed, 330 insertions(+), 62 deletions(-) create mode 100644 crates/katana/primitives/src/chain.rs diff --git a/Cargo.lock b/Cargo.lock index b8914cfdb0..cfca2b6fab 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2753,6 +2753,7 @@ dependencies = [ "dojo-world", "jsonrpsee", "katana-core", + "katana-primitives", "katana-rpc", "scarb", "scarb-ui", @@ -5442,6 +5443,7 @@ dependencies = [ "clap_complete", "console", "katana-core", + "katana-primitives", "katana-rpc", "metrics 0.5.0", "metrics-process", @@ -5558,6 +5560,8 @@ dependencies = [ "serde_with", "starknet", "starknet_api", + "strum 0.25.0", + "strum_macros 0.25.3", "thiserror", ] diff --git a/crates/dojo-test-utils/Cargo.toml b/crates/dojo-test-utils/Cargo.toml index 3282db9d93..5e549eb37c 100644 --- a/crates/dojo-test-utils/Cargo.toml +++ b/crates/dojo-test-utils/Cargo.toml @@ -18,6 +18,7 @@ dojo-lang = { path = "../dojo-lang" } dojo-world = { path = "../dojo-world", features = [ "manifest", "migration" ] } jsonrpsee = { version = "0.16.2", features = [ "server" ] } katana-core = { path = "../katana/core" } +katana-primitives = { path = "../katana/primitives" } katana-rpc = { path = "../katana/rpc" } scarb-ui.workspace = true scarb.workspace = true diff --git a/crates/dojo-test-utils/src/sequencer.rs b/crates/dojo-test-utils/src/sequencer.rs index 29e1b97c96..fcb83c6163 100644 --- a/crates/dojo-test-utils/src/sequencer.rs +++ b/crates/dojo-test-utils/src/sequencer.rs @@ -4,6 +4,7 @@ use jsonrpsee::core::Error; pub use katana_core::backend::config::{Environment, StarknetConfig}; use katana_core::sequencer::KatanaSequencer; pub use katana_core::sequencer::SequencerConfig; +use katana_primitives::chain::ChainId; use katana_rpc::api::ApiKind; use katana_rpc::config::ServerConfig; use katana_rpc::{spawn, NodeHandle}; @@ -79,7 +80,7 @@ impl TestSequencer { pub fn get_default_test_starknet_config() -> StarknetConfig { StarknetConfig { disable_fee: true, - env: Environment { chain_id: "SN_GOERLI".into(), ..Default::default() }, + env: Environment { chain_id: ChainId::GOERLI, ..Default::default() }, ..Default::default() } } diff --git a/crates/katana/Cargo.toml b/crates/katana/Cargo.toml index 0e2b102a61..5196c53e50 100644 --- a/crates/katana/Cargo.toml +++ b/crates/katana/Cargo.toml @@ -11,6 +11,7 @@ clap.workspace = true clap_complete.workspace = true console.workspace = true katana-core = { path = "core" } +katana-primitives = { path = "primitives" } katana-rpc = { path = "rpc" } metrics = { path = "../metrics" } metrics-process.workspace = true diff --git a/crates/katana/core/src/backend/config.rs b/crates/katana/core/src/backend/config.rs index c466056286..3a0bba627c 100644 --- a/crates/katana/core/src/backend/config.rs +++ b/crates/katana/core/src/backend/config.rs @@ -1,6 +1,6 @@ use blockifier::block_context::{BlockContext, FeeTokenAddresses, GasPrices}; +use katana_primitives::chain::ChainId; use starknet_api::block::{BlockNumber, BlockTimestamp}; -use starknet_api::core::ChainId; use url::Url; use crate::constants::{ @@ -24,7 +24,7 @@ impl StarknetConfig { pub fn block_context(&self) -> BlockContext { BlockContext { block_number: BlockNumber::default(), - chain_id: ChainId(self.env.chain_id.clone()), + chain_id: self.env.chain_id.into(), block_timestamp: BlockTimestamp::default(), sequencer_address: (*SEQUENCER_ADDRESS).into(), // As the fee has two currencies, we also have to adjust their addresses. @@ -67,7 +67,7 @@ impl Default for StarknetConfig { #[derive(Debug, Clone)] pub struct Environment { - pub chain_id: String, + pub chain_id: ChainId, pub gas_price: u128, pub invoke_max_steps: u32, pub validate_max_steps: u32, @@ -77,7 +77,7 @@ impl Default for Environment { fn default() -> Self { Self { gas_price: DEFAULT_GAS_PRICE, - chain_id: "KATANA".to_string(), + chain_id: ChainId::parse("KATANA").unwrap(), invoke_max_steps: DEFAULT_INVOKE_MAX_STEPS, validate_max_steps: DEFAULT_VALIDATE_MAX_STEPS, } diff --git a/crates/katana/core/src/backend/mod.rs b/crates/katana/core/src/backend/mod.rs index 99d6009687..ae04161dbe 100644 --- a/crates/katana/core/src/backend/mod.rs +++ b/crates/katana/core/src/backend/mod.rs @@ -4,6 +4,7 @@ use blockifier::block_context::BlockContext; use katana_primitives::block::{ Block, FinalityStatus, GasPrices, Header, PartialHeader, SealedBlockWithStatus, }; +use katana_primitives::chain::ChainId; use katana_primitives::contract::ContractAddress; use katana_primitives::receipt::Receipt; use katana_primitives::state::StateUpdatesWithDeclaredClasses; @@ -20,7 +21,6 @@ use starknet::core::utils::parse_cairo_short_string; use starknet::providers::jsonrpc::HttpTransport; use starknet::providers::{JsonRpcClient, Provider}; use starknet_api::block::{BlockNumber, BlockTimestamp}; -use starknet_api::core::ChainId; use tracing::{info, trace}; pub mod config; @@ -40,6 +40,8 @@ pub struct Backend { pub config: RwLock, /// stores all block related data in memory pub blockchain: Blockchain, + /// The chain id. + pub chain_id: ChainId, /// The chain environment values. pub env: Arc>, pub block_context_generator: RwLock, @@ -57,7 +59,9 @@ impl Backend { .with_balance(*DEFAULT_PREFUNDED_ACCOUNT_BALANCE) .generate(); - let blockchain: Blockchain = if let Some(forked_url) = &config.fork_rpc_url { + let (blockchain, chain_id): (Blockchain, ChainId) = if let Some(forked_url) = + &config.fork_rpc_url + { let provider = Arc::new(JsonRpcClient::new(HttpTransport::new(forked_url.clone()))); let forked_chain_id = provider.chain_id().await.unwrap(); @@ -79,7 +83,8 @@ impl Backend { block_context.block_number = BlockNumber(block.block_number); block_context.block_timestamp = BlockTimestamp(block.timestamp); block_context.sequencer_address = ContractAddress(block.sequencer_address).into(); - block_context.chain_id = ChainId(parse_cairo_short_string(&forked_chain_id).unwrap()); + block_context.chain_id = + starknet_api::core::ChainId(parse_cairo_short_string(&forked_chain_id).unwrap()); trace!( target: "backend", @@ -89,7 +94,7 @@ impl Backend { forked_url ); - Blockchain::new_from_forked( + let blockchain = Blockchain::new_from_forked( ForkedProvider::new(provider, forked_block_num.into()), block.block_hash, block.parent_hash, @@ -101,10 +106,14 @@ impl Backend { _ => panic!("unable to fork for non-accepted block"), }, ) - .expect("able to create forked blockchain") + .expect("able to create forked blockchain"); + + (blockchain, forked_chain_id.into()) } else { - Blockchain::new_with_genesis(InMemoryProvider::new(), &block_context) - .expect("able to create blockchain from genesis block") + let blockchain = Blockchain::new_with_genesis(InMemoryProvider::new(), &block_context) + .expect("able to create blockchain from genesis block"); + + (blockchain, config.env.chain_id) }; let env = Env { block: block_context }; @@ -115,6 +124,7 @@ impl Backend { } Self { + chain_id, accounts, blockchain, config: RwLock::new(config), diff --git a/crates/katana/core/src/sequencer.rs b/crates/katana/core/src/sequencer.rs index 4a3e7a5610..1f2ac0d620 100644 --- a/crates/katana/core/src/sequencer.rs +++ b/crates/katana/core/src/sequencer.rs @@ -10,6 +10,7 @@ use katana_executor::blockifier::state::StateRefDb; use katana_executor::blockifier::utils::EntryPointCall; use katana_executor::blockifier::PendingState; use katana_primitives::block::{BlockHash, BlockHashOrNumber, BlockIdOrTag, BlockNumber}; +use katana_primitives::chain::ChainId; use katana_primitives::contract::{ ClassHash, CompiledContractClass, ContractAddress, Nonce, StorageKey, StorageValue, }; @@ -26,7 +27,6 @@ use katana_provider::traits::transaction::{ ReceiptProvider, TransactionProvider, TransactionsProviderExt, }; use starknet::core::types::{BlockTag, EmittedEvent, EventsPage, FeeEstimate}; -use starknet_api::core::ChainId; use crate::backend::config::StarknetConfig; use crate::backend::contract::StarknetContract; @@ -215,7 +215,7 @@ impl KatanaSequencer { } pub fn chain_id(&self) -> ChainId { - self.backend.env.read().block.chain_id.clone() + self.backend.chain_id } pub fn block_number(&self) -> BlockNumber { diff --git a/crates/katana/core/src/service/messaging/ethereum.rs b/crates/katana/core/src/service/messaging/ethereum.rs index ec4e0aa140..a9710c2cf4 100644 --- a/crates/katana/core/src/service/messaging/ethereum.rs +++ b/crates/katana/core/src/service/messaging/ethereum.rs @@ -8,6 +8,7 @@ use ethers::prelude::*; use ethers::providers::{Http, Provider}; use ethers::types::{Address, BlockNumber, Log}; use k256::ecdsa::SigningKey; +use katana_primitives::chain::ChainId; use katana_primitives::receipt::MessageToL1; use katana_primitives::transaction::L1HandlerTx; use katana_primitives::utils::transaction::compute_l1_message_hash; @@ -127,7 +128,7 @@ impl Messenger for EthereumMessaging { &self, from_block: u64, max_blocks: u64, - chain_id: FieldElement, + chain_id: ChainId, ) -> MessengerResult<(u64, Vec)> { let chain_latest_block: u64 = self .provider @@ -206,7 +207,7 @@ impl Messenger for EthereumMessaging { } } -fn l1_handler_tx_from_log(log: Log, chain_id: FieldElement) -> MessengerResult { +fn l1_handler_tx_from_log(log: Log, chain_id: ChainId) -> MessengerResult { let parsed_log = ::decode_log(&log.into()).map_err(|e| { error!(target: LOG_TARGET, "Log parsing failed {e}"); Error::GatherError @@ -259,6 +260,7 @@ fn felt_from_address(v: Address) -> FieldElement { #[cfg(test)] mod tests { + use katana_primitives::chain::{ChainId, NamedChainId}; use starknet::macros::{felt, selector}; use super::*; @@ -299,7 +301,7 @@ mod tests { }; // SN_GOERLI. - let chain_id = starknet::macros::felt!("0x534e5f474f45524c49"); + let chain_id = ChainId::Named(NamedChainId::Goerli); let to_address = FieldElement::from_hex_be(to_address).unwrap(); let from_address = FieldElement::from_hex_be(from_address).unwrap(); diff --git a/crates/katana/core/src/service/messaging/mod.rs b/crates/katana/core/src/service/messaging/mod.rs index 79d8c6364d..6b2de596c2 100644 --- a/crates/katana/core/src/service/messaging/mod.rs +++ b/crates/katana/core/src/service/messaging/mod.rs @@ -39,12 +39,12 @@ mod starknet; use std::path::Path; -use ::starknet::core::types::FieldElement; use ::starknet::providers::ProviderError as StarknetProviderError; use anyhow::Result; use async_trait::async_trait; use ethereum::EthereumMessaging; use ethers::providers::ProviderError as EthereumProviderError; +use katana_primitives::chain::ChainId; use katana_primitives::receipt::MessageToL1; use serde::Deserialize; use tracing::{error, info}; @@ -145,7 +145,7 @@ pub trait Messenger { &self, from_block: u64, max_blocks: u64, - chain_id: FieldElement, + chain_id: ChainId, ) -> MessengerResult<(u64, Vec)>; /// Computes the hash of the given messages and sends them to the settlement chain. diff --git a/crates/katana/core/src/service/messaging/service.rs b/crates/katana/core/src/service/messaging/service.rs index e43e8df890..379a866dce 100644 --- a/crates/katana/core/src/service/messaging/service.rs +++ b/crates/katana/core/src/service/messaging/service.rs @@ -3,7 +3,6 @@ use std::sync::Arc; use std::task::{Context, Poll}; use std::time::Duration; -use ::starknet::core::types::FieldElement; use futures::{Future, FutureExt, Stream}; use katana_primitives::block::BlockHashOrNumber; use katana_primitives::receipt::MessageToL1; @@ -76,9 +75,6 @@ impl MessagingService { backend: Arc, from_block: u64, ) -> MessengerResult<(u64, usize)> { - let chain_id = FieldElement::from_hex_be(&backend.env.read().block.chain_id.as_hex()) - .expect("failed to parse katana chain id"); - // 200 avoids any possible rejection from RPC with possibly lot's of messages. // TODO: May this be configurable? let max_block = 200; @@ -86,7 +82,7 @@ impl MessagingService { match messenger.as_ref() { MessengerMode::Ethereum(inner) => { let (block_num, txs) = - inner.gather_messages(from_block, max_block, chain_id).await?; + inner.gather_messages(from_block, max_block, backend.chain_id).await?; let txs_count = txs.len(); txs.into_iter().for_each(|tx| { @@ -101,7 +97,7 @@ impl MessagingService { #[cfg(feature = "starknet-messaging")] MessengerMode::Starknet(inner) => { let (block_num, txs) = - inner.gather_messages(from_block, max_block, chain_id).await?; + inner.gather_messages(from_block, max_block, backend.chain_id).await?; let txs_count = txs.len(); txs.into_iter().for_each(|tx| { diff --git a/crates/katana/core/src/service/messaging/starknet.rs b/crates/katana/core/src/service/messaging/starknet.rs index 642f0de240..019c6e1970 100644 --- a/crates/katana/core/src/service/messaging/starknet.rs +++ b/crates/katana/core/src/service/messaging/starknet.rs @@ -3,6 +3,7 @@ use std::sync::Arc; use anyhow::Result; use async_trait::async_trait; +use katana_primitives::chain::ChainId; use katana_primitives::receipt::MessageToL1; use katana_primitives::transaction::L1HandlerTx; use katana_primitives::utils::transaction::compute_l1_message_hash; @@ -163,7 +164,7 @@ impl Messenger for StarknetMessaging { &self, from_block: u64, max_blocks: u64, - chain_id: FieldElement, + chain_id: ChainId, ) -> MessengerResult<(u64, Vec)> { let chain_latest_block: u64 = match self.provider.block_number().await { Ok(n) => n, @@ -306,7 +307,7 @@ fn parse_messages(messages: &[MessageToL1]) -> MessengerResult<(Vec Result { +fn l1_handler_tx_from_event(event: &EmittedEvent, chain_id: ChainId) -> Result { if event.keys[0] != selector!("MessageSentToAppchain") { debug!( target: LOG_TARGET, @@ -429,7 +430,7 @@ mod tests { let from_address = selector!("from_address"); let to_address = selector!("to_address"); let selector = selector!("selector"); - let chain_id = selector!("KATANA"); + let chain_id = ChainId::parse("KATANA").unwrap(); let nonce = FieldElement::ONE; let calldata = vec![from_address, FieldElement::THREE]; @@ -438,7 +439,7 @@ mod tests { to_address, selector, &calldata, - chain_id, + chain_id.into(), nonce, ); @@ -512,7 +513,7 @@ mod tests { transaction_hash, }; - let _tx = l1_handler_tx_from_event(&event, FieldElement::ZERO).unwrap(); + let _tx = l1_handler_tx_from_event(&event, ChainId::default()).unwrap(); } #[test] @@ -536,6 +537,6 @@ mod tests { transaction_hash, }; - let _tx = l1_handler_tx_from_event(&event, FieldElement::ZERO).unwrap(); + let _tx = l1_handler_tx_from_event(&event, ChainId::default()).unwrap(); } } diff --git a/crates/katana/primitives/Cargo.toml b/crates/katana/primitives/Cargo.toml index 3f2b36eddc..9a3f8799dd 100644 --- a/crates/katana/primitives/Cargo.toml +++ b/crates/katana/primitives/Cargo.toml @@ -14,6 +14,8 @@ serde.workspace = true serde_json.workspace = true serde_with.workspace = true starknet.workspace = true +strum.workspace = true +strum_macros.workspace = true thiserror.workspace = true blockifier.workspace = true diff --git a/crates/katana/primitives/src/chain.rs b/crates/katana/primitives/src/chain.rs new file mode 100644 index 0000000000..ec1c2d6f77 --- /dev/null +++ b/crates/katana/primitives/src/chain.rs @@ -0,0 +1,219 @@ +use starknet::core::types::{FieldElement, FromStrError}; +use starknet::core::utils::{cairo_short_string_to_felt, CairoShortStringToFeltError}; + +/// Known chain ids that has been assigned a name. +#[derive(Debug, Clone, Copy, PartialEq, Eq, strum_macros::Display)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum NamedChainId { + Mainnet, + Goerli, + Sepolia, +} + +impl NamedChainId { + /// `SN_MAIN` in ASCII + pub const SN_MAIN: FieldElement = FieldElement::from_mont([ + 0xf596341657d6d657, + 0xffffffffffffffff, + 0xffffffffffffffff, + 0x6f9757bd5443bc6, + ]); + + /// `SN_GOERLI` in ASCII + pub const SN_GOERLI: FieldElement = FieldElement::from_mont([ + 0x3417161755cc97b2, + 0xfffffffffffff596, + 0xffffffffffffffff, + 0x588778cb29612d1, + ]); + + /// `SN_SEPOLIA` in ASCII + pub const SN_SEPOLIA: FieldElement = FieldElement::from_mont([ + 0x159755f62c97a933, + 0xfffffffffff59634, + 0xffffffffffffffff, + 0x70cb558f6123c62, + ]); + + /// Returns the id of the chain. It is the ASCII representation of a predefined string + /// constants. + #[inline] + pub const fn id(&self) -> FieldElement { + match self { + NamedChainId::Mainnet => Self::SN_MAIN, + NamedChainId::Goerli => Self::SN_GOERLI, + NamedChainId::Sepolia => Self::SN_SEPOLIA, + } + } + + /// Returns the predefined string constant of the chain id. + #[inline] + pub const fn name(&self) -> &'static str { + match self { + NamedChainId::Mainnet => "SN_MAIN", + NamedChainId::Goerli => "SN_GOERLI", + NamedChainId::Sepolia => "SN_SEPOLIA", + } + } +} + +/// This `struct` is created by the [`NamedChainId::try_from`] method. +#[derive(Debug, thiserror::Error)] +#[error("Unknown named chain id {0:#x}")] +pub struct NamedChainTryFromError(FieldElement); + +impl TryFrom for NamedChainId { + type Error = NamedChainTryFromError; + fn try_from(value: FieldElement) -> Result { + if value == Self::SN_MAIN { + Ok(Self::Mainnet) + } else if value == Self::SN_GOERLI { + Ok(Self::Goerli) + } else if value == Self::SN_SEPOLIA { + Ok(Self::Sepolia) + } else { + Err(NamedChainTryFromError(value)) + } + } +} + +/// Represents a chain id. +#[derive(Clone, Copy, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum ChainId { + /// A chain id with a known chain name. + Named(NamedChainId), + Id(FieldElement), +} + +#[derive(Debug, thiserror::Error)] +pub enum ParseChainIdError { + #[error(transparent)] + FromStr(#[from] FromStrError), + #[error(transparent)] + CairoShortStringToFelt(#[from] CairoShortStringToFeltError), +} + +impl ChainId { + /// Chain id of the Starknet mainnet. + pub const MAINNET: Self = Self::Named(NamedChainId::Mainnet); + /// Chain id of the Starknet goerli testnet. + pub const GOERLI: Self = Self::Named(NamedChainId::Goerli); + /// Chain id of the Starknet sepolia testnet. + pub const SEPOLIA: Self = Self::Named(NamedChainId::Sepolia); + + /// Parse a [`ChainId`] from a [`str`]. + /// + /// If the `str` starts with `0x` it is parsed as a hex string, otherwise it is parsed as a + /// Cairo short string. + pub fn parse(s: &str) -> Result { + let id = if s.starts_with("0x") { + FieldElement::from_hex_be(s)? + } else { + cairo_short_string_to_felt(s)? + }; + Ok(ChainId::from(id)) + } + + /// Returns the chain id value. + pub const fn id(&self) -> FieldElement { + match self { + ChainId::Named(name) => name.id(), + ChainId::Id(id) => *id, + } + } +} + +impl Default for ChainId { + fn default() -> Self { + ChainId::Id(FieldElement::ZERO) + } +} + +impl std::fmt::Debug for ChainId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ChainId::Named(name) => write!(f, "ChainId {{ name: {name}, id: {:#x} }}", name.id()), + ChainId::Id(id) => write!(f, "ChainId {{ id: {id:#x} }}"), + } + } +} + +impl std::fmt::Display for ChainId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ChainId::Named(id) => write!(f, "{id}"), + ChainId::Id(id) => write!(f, "{id:#x}"), + } + } +} + +impl From for ChainId { + fn from(value: FieldElement) -> Self { + NamedChainId::try_from(value).map(ChainId::Named).unwrap_or(ChainId::Id(value)) + } +} + +impl From for FieldElement { + fn from(value: ChainId) -> Self { + value.id() + } +} + +#[cfg(test)] +mod tests { + use std::convert::TryFrom; + + use starknet::core::utils::cairo_short_string_to_felt; + use starknet::macros::felt; + + use super::ChainId; + use crate::chain::NamedChainId; + + #[test] + fn named_chain_id() { + let mainnet_id = cairo_short_string_to_felt("SN_MAIN").unwrap(); + let goerli_id = cairo_short_string_to_felt("SN_GOERLI").unwrap(); + let sepolia_id = cairo_short_string_to_felt("SN_SEPOLIA").unwrap(); + + assert_eq!(NamedChainId::Mainnet.id(), mainnet_id); + assert_eq!(NamedChainId::Goerli.id(), goerli_id); + assert_eq!(NamedChainId::Sepolia.id(), sepolia_id); + + assert_eq!(NamedChainId::try_from(mainnet_id).unwrap(), NamedChainId::Mainnet); + assert_eq!(NamedChainId::try_from(goerli_id).unwrap(), NamedChainId::Goerli); + assert_eq!(NamedChainId::try_from(sepolia_id).unwrap(), NamedChainId::Sepolia); + assert!(NamedChainId::try_from(felt!("0x1337")).is_err()); + } + + #[test] + fn chain_id() { + let mainnet_id = cairo_short_string_to_felt("SN_MAIN").unwrap(); + let goerli_id = cairo_short_string_to_felt("SN_GOERLI").unwrap(); + let sepolia_id = cairo_short_string_to_felt("SN_SEPOLIA").unwrap(); + + assert_eq!(ChainId::MAINNET.id(), NamedChainId::Mainnet.id()); + assert_eq!(ChainId::GOERLI.id(), NamedChainId::Goerli.id()); + assert_eq!(ChainId::SEPOLIA.id(), NamedChainId::Sepolia.id()); + + assert_eq!(ChainId::from(mainnet_id), ChainId::MAINNET); + assert_eq!(ChainId::from(goerli_id), ChainId::GOERLI); + assert_eq!(ChainId::from(sepolia_id), ChainId::SEPOLIA); + assert_eq!(ChainId::from(felt!("0x1337")), ChainId::Id(felt!("0x1337"))); + + assert_eq!(ChainId::MAINNET.to_string(), "Mainnet"); + assert_eq!(ChainId::GOERLI.to_string(), "Goerli"); + assert_eq!(ChainId::SEPOLIA.to_string(), "Sepolia"); + assert_eq!(ChainId::Id(felt!("0x1337")).to_string(), "0x1337"); + } + + #[test] + fn parse_chain_id() { + let mainnet_id = cairo_short_string_to_felt("SN_MAIN").unwrap(); + let custom_id = cairo_short_string_to_felt("KATANA").unwrap(); + + assert_eq!(ChainId::parse("SN_MAIN").unwrap(), ChainId::MAINNET); + assert_eq!(ChainId::parse("KATANA").unwrap(), ChainId::Id(custom_id)); + assert_eq!(ChainId::parse(&format!("{mainnet_id:#x}")).unwrap(), ChainId::MAINNET); + } +} diff --git a/crates/katana/primitives/src/conversion/blockifier.rs b/crates/katana/primitives/src/conversion/blockifier.rs index 2ed3ca2983..80751fb896 100644 --- a/crates/katana/primitives/src/conversion/blockifier.rs +++ b/crates/katana/primitives/src/conversion/blockifier.rs @@ -1,9 +1,12 @@ //! Translation layer for converting the primitive types to the execution engine types. +use starknet::core::utils::parse_cairo_short_string; use starknet_api::core::{ContractAddress, PatriciaKey}; use starknet_api::hash::StarkHash; use starknet_api::patricia_key; +use crate::chain::ChainId; + impl From for ContractAddress { fn from(address: crate::contract::ContractAddress) -> Self { Self(patricia_key!(address.0)) @@ -15,3 +18,31 @@ impl From for crate::contract::ContractAddress { Self((*address.0.key()).into()) } } + +impl From for starknet_api::core::ChainId { + fn from(chain_id: ChainId) -> Self { + let name: String = match chain_id { + ChainId::Named(named) => named.name().to_string(), + ChainId::Id(id) => parse_cairo_short_string(&id).expect("valid cairo string"), + }; + Self(name) + } +} + +#[cfg(test)] +mod tests { + use starknet::core::utils::parse_cairo_short_string; + + use crate::chain::{ChainId, NamedChainId}; + + #[test] + fn convert_chain_id() { + let mainnet = starknet_api::core::ChainId::from(ChainId::Named(NamedChainId::Mainnet)); + let goerli = starknet_api::core::ChainId::from(ChainId::Named(NamedChainId::Goerli)); + let sepolia = starknet_api::core::ChainId::from(ChainId::Named(NamedChainId::Sepolia)); + + assert_eq!(mainnet.0, parse_cairo_short_string(&NamedChainId::Mainnet.id()).unwrap()); + assert_eq!(goerli.0, parse_cairo_short_string(&NamedChainId::Goerli.id()).unwrap()); + assert_eq!(sepolia.0, parse_cairo_short_string(&NamedChainId::Sepolia.id()).unwrap()); + } +} diff --git a/crates/katana/primitives/src/env.rs b/crates/katana/primitives/src/env.rs index 5d7633ce36..64c381ba7c 100644 --- a/crates/katana/primitives/src/env.rs +++ b/crates/katana/primitives/src/env.rs @@ -1,5 +1,6 @@ use std::collections::HashMap; +use crate::chain::ChainId; use crate::contract::ContractAddress; /// Block environment values. @@ -21,7 +22,7 @@ pub struct BlockEnv { #[derive(Debug, Clone)] pub struct CfgEnv { /// The chain id. - pub chain_id: u64, + pub chain_id: ChainId, /// The fee cost of the VM resources. pub vm_resource_fee_cost: HashMap, /// The maximum number of steps allowed for an invoke transaction. diff --git a/crates/katana/primitives/src/lib.rs b/crates/katana/primitives/src/lib.rs index d09bf7c232..f3299e2e6b 100644 --- a/crates/katana/primitives/src/lib.rs +++ b/crates/katana/primitives/src/lib.rs @@ -1,4 +1,5 @@ pub mod block; +pub mod chain; pub mod contract; pub mod env; pub mod event; @@ -12,6 +13,3 @@ pub mod state; pub mod utils; pub type FieldElement = starknet::core::types::FieldElement; - -/// The id of the chain. -pub type ChainId = FieldElement; diff --git a/crates/katana/primitives/src/transaction.rs b/crates/katana/primitives/src/transaction.rs index ab5665d844..033374400b 100644 --- a/crates/katana/primitives/src/transaction.rs +++ b/crates/katana/primitives/src/transaction.rs @@ -1,6 +1,7 @@ use derive_more::{AsRef, Deref}; use ethers::types::H256; +use crate::chain::ChainId; use crate::contract::{ ClassHash, CompiledClassHash, CompiledContractClass, ContractAddress, FlattenedSierraClass, Nonce, @@ -9,7 +10,7 @@ use crate::utils::transaction::{ compute_declare_v1_tx_hash, compute_declare_v2_tx_hash, compute_deploy_account_v1_tx_hash, compute_invoke_v1_tx_hash, compute_l1_handler_tx_hash, }; -use crate::{ChainId, FieldElement}; +use crate::FieldElement; /// The hash of a transaction. pub type TxHash = FieldElement; @@ -136,7 +137,7 @@ impl InvokeTx { self.sender_address.into(), &self.calldata, self.max_fee, - self.chain_id, + self.chain_id.into(), self.nonce, is_query, ) @@ -195,7 +196,7 @@ impl DeclareTx { tx.sender_address.into(), tx.class_hash, tx.max_fee, - tx.chain_id, + tx.chain_id.into(), tx.nonce, is_query, ), @@ -204,7 +205,7 @@ impl DeclareTx { tx.sender_address.into(), tx.class_hash, tx.max_fee, - tx.chain_id, + tx.chain_id.into(), tx.nonce, tx.compiled_class_hash, is_query, @@ -234,7 +235,7 @@ impl L1HandlerTx { self.contract_address.into(), self.entry_point_selector, &self.calldata, - self.chain_id, + self.chain_id.into(), self.nonce, ) } @@ -263,7 +264,7 @@ impl DeployAccountTx { self.class_hash, self.contract_address_salt, self.max_fee, - self.chain_id, + self.chain_id.into(), self.nonce, is_query, ) diff --git a/crates/katana/rpc/rpc-types/src/message.rs b/crates/katana/rpc/rpc-types/src/message.rs index cff3689a2d..3b6c37b446 100644 --- a/crates/katana/rpc/rpc-types/src/message.rs +++ b/crates/katana/rpc/rpc-types/src/message.rs @@ -1,3 +1,4 @@ +use katana_primitives::chain::ChainId; use katana_primitives::transaction::L1HandlerTx; use katana_primitives::utils::transaction::compute_l1_message_hash; use katana_primitives::FieldElement; @@ -7,7 +8,7 @@ use serde::Deserialize; pub struct MsgFromL1(starknet::core::types::MsgFromL1); impl MsgFromL1 { - pub fn into_tx_with_chain_id(self, chain_id: FieldElement) -> L1HandlerTx { + pub fn into_tx_with_chain_id(self, chain_id: ChainId) -> L1HandlerTx { let message_hash = compute_l1_message_hash( // This conversion will never fail bcs `from_address` is 20 bytes and the it will only // fail if the slice is > 32 bytes diff --git a/crates/katana/rpc/rpc-types/src/transaction.rs b/crates/katana/rpc/rpc-types/src/transaction.rs index 69f4f1740c..00aab586bc 100644 --- a/crates/katana/rpc/rpc-types/src/transaction.rs +++ b/crates/katana/rpc/rpc-types/src/transaction.rs @@ -2,6 +2,7 @@ use std::sync::Arc; use anyhow::Result; use derive_more::Deref; +use katana_primitives::chain::ChainId; use katana_primitives::contract::{ClassHash, ContractAddress}; use katana_primitives::conversion::rpc::{ compiled_class_hash_from_flattened_sierra_class, flattened_sierra_to_compiled_class, @@ -25,7 +26,7 @@ use starknet::core::utils::get_contract_address; pub struct BroadcastedInvokeTx(BroadcastedInvokeTransaction); impl BroadcastedInvokeTx { - pub fn into_tx_with_chain_id(self, chain_id: FieldElement) -> InvokeTx { + pub fn into_tx_with_chain_id(self, chain_id: ChainId) -> InvokeTx { InvokeTx { chain_id, nonce: self.0.nonce, @@ -57,7 +58,7 @@ impl BroadcastedDeclareTx { } /// This function assumes that the compiled class hash is valid. - pub fn try_into_tx_with_chain_id(self, chain_id: FieldElement) -> Result { + pub fn try_into_tx_with_chain_id(self, chain_id: ChainId) -> Result { match self.0 { BroadcastedDeclareTransaction::V1(tx) => { let (class_hash, compiled_class) = @@ -112,7 +113,7 @@ impl BroadcastedDeclareTx { pub struct BroadcastedDeployAccountTx(BroadcastedDeployAccountTransaction); impl BroadcastedDeployAccountTx { - pub fn into_tx_with_chain_id(self, chain_id: FieldElement) -> DeployAccountTx { + pub fn into_tx_with_chain_id(self, chain_id: ChainId) -> DeployAccountTx { let contract_address = get_contract_address( self.0.contract_address_salt, self.0.class_hash, @@ -276,7 +277,7 @@ impl From for InvokeTx { calldata: tx.0.calldata, signature: tx.0.signature, version: FieldElement::ONE, - chain_id: FieldElement::ZERO, + chain_id: ChainId::default(), sender_address: tx.0.sender_address.into(), max_fee: tx.0.max_fee.try_into().expect("max_fee is too big"), } @@ -297,7 +298,7 @@ impl From for DeployAccountTx { signature: tx.0.signature, version: FieldElement::ONE, class_hash: tx.0.class_hash, - chain_id: FieldElement::ZERO, + chain_id: ChainId::default(), contract_address: contract_address.into(), constructor_calldata: tx.0.constructor_calldata, contract_address_salt: tx.0.contract_address_salt, diff --git a/crates/katana/rpc/src/starknet.rs b/crates/katana/rpc/src/starknet.rs index a6a1a750b8..51b53b8812 100644 --- a/crates/katana/rpc/src/starknet.rs +++ b/crates/katana/rpc/src/starknet.rs @@ -1,4 +1,3 @@ -use std::str::FromStr; use std::sync::Arc; use jsonrpsee::core::{async_trait, Error}; @@ -47,8 +46,7 @@ impl StarknetApi { #[async_trait] impl StarknetApiServer for StarknetApi { async fn chain_id(&self) -> Result { - let chain_id = self.sequencer.chain_id().as_hex(); - Ok(FieldElement::from_str(&chain_id).map_err(|_| StarknetApiError::UnexpectedError)?.into()) + Ok(FieldElement::from(self.sequencer.chain_id()).into()) } async fn nonce( @@ -401,8 +399,7 @@ impl StarknetApiServer for StarknetApi { return Err(StarknetApiError::UnsupportedTransactionVersion.into()); } - let chain_id = FieldElement::from_hex_be(&self.sequencer.chain_id().as_hex()) - .map_err(|_| StarknetApiError::UnexpectedError)?; + let chain_id = self.sequencer.chain_id(); let tx = deploy_account_transaction.into_tx_with_chain_id(chain_id); let contract_address = tx.contract_address; @@ -420,8 +417,7 @@ impl StarknetApiServer for StarknetApi { request: Vec, block_id: BlockIdOrTag, ) -> Result, Error> { - let chain_id = FieldElement::from_hex_be(&self.sequencer.chain_id().as_hex()) - .map_err(|_| StarknetApiError::UnexpectedError)?; + let chain_id = self.sequencer.chain_id(); let transactions = request .into_iter() @@ -465,8 +461,7 @@ impl StarknetApiServer for StarknetApi { message: MsgFromL1, block_id: BlockIdOrTag, ) -> Result { - let chain_id = FieldElement::from_hex_be(&self.sequencer.chain_id().as_hex()) - .map_err(|_| StarknetApiError::UnexpectedError)?; + let chain_id = self.sequencer.chain_id(); let tx = message.into_tx_with_chain_id(chain_id); let hash = tx.calculate_hash(); @@ -496,8 +491,7 @@ impl StarknetApiServer for StarknetApi { return Err(StarknetApiError::UnsupportedTransactionVersion.into()); } - let chain_id = FieldElement::from_hex_be(&self.sequencer.chain_id().as_hex()) - .map_err(|_| StarknetApiError::UnexpectedError)?; + let chain_id = self.sequencer.chain_id(); // // validate compiled class hash // let is_valid = declare_transaction @@ -529,8 +523,7 @@ impl StarknetApiServer for StarknetApi { return Err(StarknetApiError::UnsupportedTransactionVersion.into()); } - let chain_id = FieldElement::from_hex_be(&self.sequencer.chain_id().as_hex()) - .map_err(|_| StarknetApiError::UnexpectedError)?; + let chain_id = self.sequencer.chain_id(); let tx = invoke_transaction.into_tx_with_chain_id(chain_id); let tx = ExecutableTxWithHash::new(ExecutableTx::Invoke(tx)); diff --git a/crates/katana/src/args.rs b/crates/katana/src/args.rs index 9d23d947e3..14237b3ece 100644 --- a/crates/katana/src/args.rs +++ b/crates/katana/src/args.rs @@ -20,6 +20,7 @@ use katana_core::constants::{ DEFAULT_GAS_PRICE, DEFAULT_INVOKE_MAX_STEPS, DEFAULT_VALIDATE_MAX_STEPS, }; use katana_core::sequencer::SequencerConfig; +use katana_primitives::chain::ChainId; use katana_rpc::api::ApiKind; use katana_rpc::config::ServerConfig; use metrics::utils::parse_socket_address; @@ -151,8 +152,12 @@ pub struct StarknetOptions { pub struct EnvironmentOptions { #[arg(long)] #[arg(help = "The chain ID.")] + #[arg(long_help = "The chain ID. If a raw hex string (`0x` prefix) is provided, then it'd \ + used as the actual chain ID. Otherwise, it's represented as the raw \ + ASCII values. It must be a valid Cairo short string.")] #[arg(default_value = "KATANA")] - pub chain_id: String, + #[arg(value_parser = ChainId::parse)] + pub chain_id: ChainId, #[arg(long)] #[arg(help = "The gas price.")] @@ -219,7 +224,7 @@ impl KatanaArgs { fork_rpc_url: self.rpc_url.clone(), fork_block_number: self.fork_block_number, env: Environment { - chain_id: self.starknet.environment.chain_id.clone(), + chain_id: self.starknet.environment.chain_id, gas_price: self.starknet.environment.gas_price.unwrap_or(DEFAULT_GAS_PRICE), invoke_max_steps: self .starknet From b470a0937ebbfc408657009428fe0f16e540ca57 Mon Sep 17 00:00:00 2001 From: Ammar Arif Date: Fri, 12 Jan 2024 15:26:04 +0900 Subject: [PATCH 21/33] refactor(katana): improve error handling (#1415) improve error handling --- crates/katana/core/src/backend/mod.rs | 2 +- crates/katana/core/src/sequencer_error.rs | 26 +-- .../katana/executor/src/blockifier/state.rs | 13 +- .../katana/rpc/rpc-types-builder/src/block.rs | 6 +- .../rpc/rpc-types-builder/src/state_update.rs | 3 +- crates/katana/rpc/src/api/starknet.rs | 46 ++++- crates/katana/rpc/src/starknet.rs | 170 ++++++++---------- crates/katana/storage/db/src/error.rs | 24 +-- crates/katana/storage/provider/src/error.rs | 104 +++++++++++ crates/katana/storage/provider/src/lib.rs | 101 +++++++---- .../storage/provider/src/providers/db/mod.rs | 148 +++++++++------ .../provider/src/providers/db/state.rs | 108 ++++++----- .../provider/src/providers/fork/backend.rs | 94 +++++----- .../provider/src/providers/fork/mod.rs | 98 ++++++---- .../provider/src/providers/fork/state.rs | 53 +++--- .../provider/src/providers/in_memory/mod.rs | 87 +++++---- .../provider/src/providers/in_memory/state.rs | 36 ++-- .../storage/provider/src/traits/block.rs | 46 ++--- .../storage/provider/src/traits/contract.rs | 18 +- .../katana/storage/provider/src/traits/env.rs | 5 +- .../storage/provider/src/traits/state.rs | 24 +-- .../provider/src/traits/state_update.rs | 5 +- .../provider/src/traits/transaction.rs | 33 ++-- .../katana/storage/provider/tests/fixtures.rs | 5 +- 24 files changed, 768 insertions(+), 487 deletions(-) create mode 100644 crates/katana/storage/provider/src/error.rs diff --git a/crates/katana/core/src/backend/mod.rs b/crates/katana/core/src/backend/mod.rs index ae04161dbe..61c25b65e5 100644 --- a/crates/katana/core/src/backend/mod.rs +++ b/crates/katana/core/src/backend/mod.rs @@ -95,7 +95,7 @@ impl Backend { ); let blockchain = Blockchain::new_from_forked( - ForkedProvider::new(provider, forked_block_num.into()), + ForkedProvider::new(provider, forked_block_num.into()).unwrap(), block.block_hash, block.parent_hash, &block_context, diff --git a/crates/katana/core/src/sequencer_error.rs b/crates/katana/core/src/sequencer_error.rs index 2374112f9e..cdf611d3f3 100644 --- a/crates/katana/core/src/sequencer_error.rs +++ b/crates/katana/core/src/sequencer_error.rs @@ -1,46 +1,26 @@ use blockifier::execution::errors::EntryPointExecutionError; -use blockifier::state::errors::StateError; use blockifier::transaction::errors::TransactionExecutionError; use katana_primitives::block::BlockIdOrTag; use katana_primitives::contract::ContractAddress; use katana_primitives::event::ContinuationTokenError; -use katana_primitives::transaction::TxHash; -use starknet_api::StarknetApiError; +use katana_provider::error::ProviderError; #[derive(Debug, thiserror::Error)] pub enum SequencerError { #[error("Block {0:?} not found.")] BlockNotFound(BlockIdOrTag), - #[error("Contract address {0:?} not found.")] + #[error("Contract address {0} not found.")] ContractNotFound(ContractAddress), - #[error("State update for block {0:?} not found.")] - StateUpdateNotFound(BlockIdOrTag), #[error("State for block {0:?} not found.")] StateNotFound(BlockIdOrTag), - #[error("Transaction with {0} hash not found.")] - TxnNotFound(TxHash), - #[error(transparent)] - State(#[from] StateError), #[error(transparent)] TransactionExecution(#[from] TransactionExecutionError), - #[error("Error converting {from} into {to}: {message}")] - ConversionError { from: String, to: String, message: String }, - #[error(transparent)] - StarknetApi(#[from] StarknetApiError), #[error(transparent)] EntryPointExecution(#[from] EntryPointExecutionError), #[error("Wait for pending transactions.")] PendingTransactions, - #[error("Unsupported Transaction")] - UnsupportedTransaction, #[error(transparent)] ContinuationToken(#[from] ContinuationTokenError), - #[error("Error serializing state.")] - StateSerialization, - #[error("Required data unavailable")] - DataUnavailable, - #[error("Failed to decode state")] - FailedToDecodeStateDump, #[error(transparent)] - Other(#[from] anyhow::Error), + Provider(#[from] ProviderError), } diff --git a/crates/katana/executor/src/blockifier/state.rs b/crates/katana/executor/src/blockifier/state.rs index 12c6456764..5d7f21329d 100644 --- a/crates/katana/executor/src/blockifier/state.rs +++ b/crates/katana/executor/src/blockifier/state.rs @@ -7,6 +7,7 @@ use katana_primitives::contract::FlattenedSierraClass; use katana_primitives::FieldElement; use katana_provider::traits::contract::ContractClassProvider; use katana_provider::traits::state::StateProvider; +use katana_provider::ProviderResult; use parking_lot::{Mutex, RawMutex, RwLock}; use starknet_api::core::{ClassHash, CompiledClassHash, Nonce, PatriciaKey}; use starknet_api::hash::StarkHash; @@ -140,7 +141,7 @@ where fn class( &self, hash: katana_primitives::contract::ClassHash, - ) -> anyhow::Result> { + ) -> ProviderResult> { let Ok(class) = self.inner().get_compiled_contract_class(&ClassHash(hash.into())) else { return Ok(None); }; @@ -150,7 +151,7 @@ where fn compiled_class_hash_of_class_hash( &self, hash: katana_primitives::contract::ClassHash, - ) -> anyhow::Result> { + ) -> ProviderResult> { let Ok(hash) = self.inner().get_compiled_class_hash(ClassHash(hash.into())) else { return Ok(None); }; @@ -160,7 +161,7 @@ where fn sierra_class( &self, hash: katana_primitives::contract::ClassHash, - ) -> anyhow::Result> { + ) -> ProviderResult> { let class @ Some(_) = self.sierra_class().get(&hash).cloned() else { return Ok(None); }; @@ -176,7 +177,7 @@ where &self, address: katana_primitives::contract::ContractAddress, storage_key: katana_primitives::contract::StorageKey, - ) -> anyhow::Result> { + ) -> ProviderResult> { let Ok(value) = self.inner().get_storage_at(address.into(), StorageKey(patricia_key!(storage_key))) else { @@ -188,7 +189,7 @@ where fn nonce( &self, address: katana_primitives::contract::ContractAddress, - ) -> anyhow::Result> { + ) -> ProviderResult> { let Ok(nonce) = self.inner().get_nonce_at(address.into()) else { return Ok(None); }; @@ -198,7 +199,7 @@ where fn class_hash_of_contract( &self, address: katana_primitives::contract::ContractAddress, - ) -> anyhow::Result> { + ) -> ProviderResult> { let Ok(hash) = self.inner().get_class_hash_at(address.into()) else { return Ok(None); }; diff --git a/crates/katana/rpc/rpc-types-builder/src/block.rs b/crates/katana/rpc/rpc-types-builder/src/block.rs index f894bcb7e7..cd16a71f94 100644 --- a/crates/katana/rpc/rpc-types-builder/src/block.rs +++ b/crates/katana/rpc/rpc-types-builder/src/block.rs @@ -1,6 +1,6 @@ -use anyhow::Result; use katana_primitives::block::BlockHashOrNumber; use katana_provider::traits::block::{BlockHashProvider, BlockProvider, BlockStatusProvider}; +use katana_provider::ProviderResult; use katana_rpc_types::block::{BlockWithTxHashes, BlockWithTxs}; /// A builder for building RPC block types. @@ -19,7 +19,7 @@ impl

BlockBuilder

where P: BlockProvider + BlockHashProvider, { - pub fn build(self) -> Result> { + pub fn build(self) -> ProviderResult> { let Some(hash) = BlockHashProvider::block_hash_by_id(&self.provider, self.block_id)? else { return Ok(None); }; @@ -32,7 +32,7 @@ where Ok(Some(BlockWithTxs::new(hash, block, finality_status))) } - pub fn build_with_tx_hash(self) -> Result> { + pub fn build_with_tx_hash(self) -> ProviderResult> { let Some(hash) = BlockHashProvider::block_hash_by_id(&self.provider, self.block_id)? else { return Ok(None); }; diff --git a/crates/katana/rpc/rpc-types-builder/src/state_update.rs b/crates/katana/rpc/rpc-types-builder/src/state_update.rs index b0f85cbc58..6b82b0748a 100644 --- a/crates/katana/rpc/rpc-types-builder/src/state_update.rs +++ b/crates/katana/rpc/rpc-types-builder/src/state_update.rs @@ -3,6 +3,7 @@ use katana_primitives::FieldElement; use katana_provider::traits::block::{BlockHashProvider, BlockNumberProvider}; use katana_provider::traits::state::StateRootProvider; use katana_provider::traits::state_update::StateUpdateProvider; +use katana_provider::ProviderResult; use katana_rpc_types::state_update::{StateDiff, StateUpdate}; /// A builder for building RPC state update type. @@ -22,7 +23,7 @@ where P: BlockHashProvider + BlockNumberProvider + StateRootProvider + StateUpdateProvider, { /// Builds a state update for the given block. - pub fn build(self) -> anyhow::Result> { + pub fn build(self) -> ProviderResult> { let Some(block_hash) = BlockHashProvider::block_hash_by_id(&self.provider, self.block_id)? else { return Ok(None); diff --git a/crates/katana/rpc/src/api/starknet.rs b/crates/katana/rpc/src/api/starknet.rs index e619dc8156..b3833488ed 100644 --- a/crates/katana/rpc/src/api/starknet.rs +++ b/crates/katana/rpc/src/api/starknet.rs @@ -1,9 +1,11 @@ use jsonrpsee::core::Error; use jsonrpsee::proc_macros::rpc; use jsonrpsee::types::error::{CallError, ErrorObject}; +use katana_core::sequencer_error::SequencerError; use katana_primitives::block::{BlockIdOrTag, BlockNumber}; use katana_primitives::transaction::TxHash; use katana_primitives::FieldElement; +use katana_provider::error::ProviderError; use katana_rpc_types::block::{ BlockHashAndNumber, BlockTxCount, MaybePendingBlockWithTxHashes, MaybePendingBlockWithTxs, }; @@ -72,7 +74,7 @@ pub enum StarknetApiError { #[error("The contract class version is not supported")] UnsupportedContractClassVersion, #[error("An unexpected error occured")] - UnexpectedError, + UnexpectedError { reason: String }, #[error("Too many storage keys requested")] ProofLimitExceeded, #[error("Too many keys provided in a filter")] @@ -111,25 +113,57 @@ impl StarknetApiError { StarknetApiError::CompiledClassHashMismatch => 60, StarknetApiError::UnsupportedTransactionVersion => 61, StarknetApiError::UnsupportedContractClassVersion => 62, - StarknetApiError::UnexpectedError => 63, + StarknetApiError::UnexpectedError { .. } => 63, StarknetApiError::ProofLimitExceeded => 10000, } } } +impl From for StarknetApiError { + fn from(value: ProviderError) -> Self { + StarknetApiError::UnexpectedError { reason: value.to_string() } + } +} + impl From for Error { fn from(err: StarknetApiError) -> Self { let code = err.code(); let message = err.to_string(); - let data = match err { + let err = match err { StarknetApiError::ContractError { revert_error } => { - Some(ContractErrorData { revert_error }) + ErrorObject::owned(code, message, Some(ContractErrorData { revert_error })) } - _ => None, + + StarknetApiError::UnexpectedError { reason } => { + #[derive(serde::Serialize, serde::Deserialize)] + struct UnexpectedError { + reason: String, + } + + ErrorObject::owned(code, message, Some(UnexpectedError { reason })) + } + + _ => ErrorObject::owned(code, message, None::<()>), }; - Error::Call(CallError::Custom(ErrorObject::owned(code, message, data))) + Error::Call(CallError::Custom(err)) + } +} + +impl From for StarknetApiError { + fn from(value: SequencerError) -> Self { + match value { + SequencerError::TransactionExecution(e) => { + StarknetApiError::ContractError { revert_error: e.to_string() } + } + SequencerError::EntryPointExecution(e) => { + StarknetApiError::ContractError { revert_error: e.to_string() } + } + SequencerError::BlockNotFound(_) => StarknetApiError::BlockNotFound, + SequencerError::ContractNotFound(_) => StarknetApiError::ContractNotFound, + err => StarknetApiError::UnexpectedError { reason: err.to_string() }, + } } } diff --git a/crates/katana/rpc/src/starknet.rs b/crates/katana/rpc/src/starknet.rs index 51b53b8812..3fe97d4375 100644 --- a/crates/katana/rpc/src/starknet.rs +++ b/crates/katana/rpc/src/starknet.rs @@ -3,7 +3,6 @@ use std::sync::Arc; use jsonrpsee::core::{async_trait, Error}; use katana_core::backend::contract::StarknetContract; use katana_core::sequencer::KatanaSequencer; -use katana_core::sequencer_error::SequencerError; use katana_executor::blockifier::utils::EntryPointCall; use katana_primitives::block::{ BlockHashOrNumber, BlockIdOrTag, FinalityStatus, GasPrices, PartialHeader, @@ -58,10 +57,7 @@ impl StarknetApiServer for StarknetApi { .sequencer .nonce_at(block_id, contract_address.into()) .await - .map_err(|e| match e { - SequencerError::StateNotFound(_) => StarknetApiError::BlockNotFound, - _ => StarknetApiError::UnexpectedError, - })? + .map_err(StarknetApiError::from)? .ok_or(StarknetApiError::ContractNotFound)?; Ok(nonce.into()) @@ -75,7 +71,7 @@ impl StarknetApiServer for StarknetApi { let tx = self .sequencer .transaction(&transaction_hash) - .map_err(|_| StarknetApiError::UnexpectedError)? + .map_err(StarknetApiError::from)? .ok_or(StarknetApiError::TxnHashNotFound)?; Ok(tx.into()) } @@ -84,7 +80,7 @@ impl StarknetApiServer for StarknetApi { let count = self .sequencer .block_tx_count(block_id) - .map_err(|_| StarknetApiError::UnexpectedError)? + .map_err(StarknetApiError::from)? .ok_or(StarknetApiError::BlockNotFound)?; Ok(count) } @@ -97,17 +93,15 @@ impl StarknetApiServer for StarknetApi { let class_hash = self .sequencer .class_hash_at(block_id, contract_address.into()) - .map_err(|_| StarknetApiError::UnexpectedError)? + .map_err(StarknetApiError::from)? .ok_or(StarknetApiError::ContractNotFound)?; self.class(block_id, class_hash).await } async fn block_hash_and_number(&self) -> Result { - let hash_and_num_pair = self - .sequencer - .block_hash_and_number() - .map_err(|_| StarknetApiError::UnexpectedError)?; + let hash_and_num_pair = + self.sequencer.block_hash_and_number().map_err(StarknetApiError::from)?; Ok(hash_and_num_pair.into()) } @@ -121,7 +115,8 @@ impl StarknetApiServer for StarknetApi { let pending_state = self.sequencer.pending_state().expect("pending state should exist"); let block_context = self.sequencer.backend.env.read().block.clone(); - let latest_hash = BlockHashProvider::latest_hash(provider)?; + let latest_hash = + BlockHashProvider::latest_hash(provider).map_err(StarknetApiError::from)?; let gas_prices = GasPrices { eth_gas_price: block_context.gas_prices.eth_l1_gas_price.try_into().unwrap(), @@ -145,13 +140,13 @@ impl StarknetApiServer for StarknetApi { ))) } else { let block_num = BlockIdReader::convert_block_id(provider, block_id) - .map_err(|_| StarknetApiError::UnexpectedError)? + .map_err(StarknetApiError::from)? .map(BlockHashOrNumber::Num) .ok_or(StarknetApiError::BlockNotFound)?; katana_rpc_types_builder::BlockBuilder::new(block_num, provider) .build_with_tx_hash() - .map_err(|_| StarknetApiError::UnexpectedError)? + .map_err(StarknetApiError::from)? .map(MaybePendingBlockWithTxHashes::Block) .ok_or(Error::from(StarknetApiError::BlockNotFound)) } @@ -174,12 +169,12 @@ impl StarknetApiServer for StarknetApi { let provider = &self.sequencer.backend.blockchain.provider(); let block_num = BlockIdReader::convert_block_id(provider, block_id) - .map_err(|_| StarknetApiError::UnexpectedError)? + .map_err(StarknetApiError::from)? .map(BlockHashOrNumber::Num) .ok_or(StarknetApiError::BlockNotFound)?; TransactionProvider::transaction_by_block_and_idx(provider, block_num, index) - .map_err(|_| StarknetApiError::UnexpectedError)? + .map_err(StarknetApiError::from)? }; Ok(tx.ok_or(StarknetApiError::InvalidTxnIndex)?.into()) @@ -195,7 +190,8 @@ impl StarknetApiServer for StarknetApi { let pending_state = self.sequencer.pending_state().expect("pending state should exist"); let block_context = self.sequencer.backend.env.read().block.clone(); - let latest_hash = BlockHashProvider::latest_hash(provider)?; + let latest_hash = + BlockHashProvider::latest_hash(provider).map_err(StarknetApiError::from)?; let gas_prices = GasPrices { eth_gas_price: block_context.gas_prices.eth_l1_gas_price.try_into().unwrap(), @@ -220,13 +216,13 @@ impl StarknetApiServer for StarknetApi { Ok(MaybePendingBlockWithTxs::Pending(PendingBlockWithTxs::new(header, transactions))) } else { let block_num = BlockIdReader::convert_block_id(provider, block_id) - .map_err(|_| StarknetApiError::UnexpectedError)? + .map_err(|e| StarknetApiError::UnexpectedError { reason: e.to_string() })? .map(BlockHashOrNumber::Num) .ok_or(StarknetApiError::BlockNotFound)?; katana_rpc_types_builder::BlockBuilder::new(block_num, provider) .build() - .map_err(|_| StarknetApiError::UnexpectedError)? + .map_err(|e| StarknetApiError::UnexpectedError { reason: e.to_string() })? .map(MaybePendingBlockWithTxs::Block) .ok_or(Error::from(StarknetApiError::BlockNotFound)) } @@ -242,6 +238,7 @@ impl StarknetApiServer for StarknetApi { BlockIdOrTag::Tag(BlockTag::Latest) => BlockNumberProvider::latest_number(provider) .map(BlockHashOrNumber::Num) .map_err(|_| StarknetApiError::BlockNotFound)?, + BlockIdOrTag::Tag(BlockTag::Pending) => { return Err(StarknetApiError::BlockNotFound.into()); } @@ -249,7 +246,7 @@ impl StarknetApiServer for StarknetApi { katana_rpc_types_builder::StateUpdateBuilder::new(block_id, provider) .build() - .map_err(|_| StarknetApiError::UnexpectedError)? + .map_err(|e| StarknetApiError::UnexpectedError { reason: e.to_string() })? .ok_or(Error::from(StarknetApiError::BlockNotFound)) } @@ -260,7 +257,7 @@ impl StarknetApiServer for StarknetApi { let provider = self.sequencer.backend.blockchain.provider(); let receipt = ReceiptBuilder::new(transaction_hash, provider) .build() - .map_err(|_| StarknetApiError::UnexpectedError)?; + .map_err(|e| StarknetApiError::UnexpectedError { reason: e.to_string() })?; match receipt { Some(receipt) => Ok(MaybePendingTxReceipt::Receipt(receipt)), @@ -294,11 +291,8 @@ impl StarknetApiServer for StarknetApi { let hash = self .sequencer .class_hash_at(block_id, contract_address.into()) - .map_err(|e| match e { - SequencerError::BlockNotFound(_) => StarknetApiError::BlockNotFound, - _ => StarknetApiError::UnexpectedError, - })? - .ok_or(Error::from(StarknetApiError::ContractNotFound))?; + .map_err(StarknetApiError::from)? + .ok_or(StarknetApiError::ContractNotFound)?; Ok(hash.into()) } @@ -308,17 +302,13 @@ impl StarknetApiServer for StarknetApi { block_id: BlockIdOrTag, class_hash: FieldElement, ) -> Result { - let class = self.sequencer.class(block_id, class_hash).map_err(|e| match e { - SequencerError::BlockNotFound(_) => StarknetApiError::BlockNotFound, - _ => StarknetApiError::UnexpectedError, - })?; - + let class = self.sequencer.class(block_id, class_hash).map_err(StarknetApiError::from)?; let Some(class) = class else { return Err(StarknetApiError::ClassHashNotFound.into()) }; match class { StarknetContract::Legacy(c) => { - let contract = - legacy_inner_to_rpc_class(c).map_err(|_| StarknetApiError::UnexpectedError)?; + let contract = legacy_inner_to_rpc_class(c) + .map_err(|e| StarknetApiError::UnexpectedError { reason: e.to_string() })?; Ok(contract) } StarknetContract::Sierra(c) => Ok(ContractClass::Sierra(c)), @@ -343,10 +333,7 @@ impl StarknetApiServer for StarknetApi { filter.result_page_request.chunk_size, ) .await - .map_err(|e| match e { - SequencerError::BlockNotFound(_) => StarknetApiError::BlockNotFound, - _ => StarknetApiError::UnexpectedError, - })?; + .map_err(StarknetApiError::from)?; Ok(events) } @@ -362,14 +349,7 @@ impl StarknetApiServer for StarknetApi { entry_point_selector: request.entry_point_selector, }; - let res = self.sequencer.call(request, block_id).map_err(|e| match e { - SequencerError::BlockNotFound(_) => StarknetApiError::BlockNotFound, - SequencerError::ContractNotFound(_) => StarknetApiError::ContractNotFound, - SequencerError::EntryPointExecution(e) => { - StarknetApiError::ContractError { revert_error: e.to_string() } - } - _ => StarknetApiError::UnexpectedError, - })?; + let res = self.sequencer.call(request, block_id).map_err(StarknetApiError::from)?; Ok(res.into_iter().map(|v| v.into()).collect()) } @@ -380,13 +360,10 @@ impl StarknetApiServer for StarknetApi { key: FieldElement, block_id: BlockIdOrTag, ) -> Result { - let value = self.sequencer.storage_at(contract_address.into(), key, block_id).map_err( - |e| match e { - SequencerError::BlockNotFound(_) => StarknetApiError::BlockNotFound, - SequencerError::ContractNotFound(_) => StarknetApiError::ContractNotFound, - _ => StarknetApiError::UnexpectedError, - }, - )?; + let value = self + .sequencer + .storage_at(contract_address.into(), key, block_id) + .map_err(StarknetApiError::from)?; Ok(value.into()) } @@ -445,13 +422,8 @@ impl StarknetApiServer for StarknetApi { }) .collect::, _>>()?; - let res = self.sequencer.estimate_fee(transactions, block_id).map_err(|e| match e { - SequencerError::BlockNotFound(_) => StarknetApiError::BlockNotFound, - SequencerError::TransactionExecution(e) => { - StarknetApiError::ContractError { revert_error: e.to_string() } - } - _ => StarknetApiError::UnexpectedError, - })?; + let res = + self.sequencer.estimate_fee(transactions, block_id).map_err(StarknetApiError::from)?; Ok(res) } @@ -470,13 +442,7 @@ impl StarknetApiServer for StarknetApi { let res = self .sequencer .estimate_fee(vec![tx], block_id) - .map_err(|e| match e { - SequencerError::BlockNotFound(_) => StarknetApiError::BlockNotFound, - SequencerError::TransactionExecution(e) => { - StarknetApiError::ContractError { revert_error: e.to_string() } - } - _ => StarknetApiError::UnexpectedError, - })? + .map_err(StarknetApiError::from)? .pop() .expect("should have estimate result"); @@ -541,51 +507,55 @@ impl StarknetApiServer for StarknetApi { let provider = self.sequencer.backend.blockchain.provider(); let tx_status = TransactionStatusProvider::transaction_status(provider, transaction_hash) - .map_err(|_| StarknetApiError::UnexpectedError)?; + .map_err(StarknetApiError::from)?; if let Some(status) = tx_status { - let receipt = ReceiptProvider::receipt_by_hash(provider, transaction_hash) - .map_err(|_| StarknetApiError::UnexpectedError)? - .ok_or(StarknetApiError::UnexpectedError)?; + if let Some(receipt) = ReceiptProvider::receipt_by_hash(provider, transaction_hash) + .map_err(StarknetApiError::from)? + { + let execution_status = if receipt.is_reverted() { + TransactionExecutionStatus::Reverted + } else { + TransactionExecutionStatus::Succeeded + }; + + return Ok(match status { + FinalityStatus::AcceptedOnL1 => { + TransactionStatus::AcceptedOnL1(execution_status) + } + FinalityStatus::AcceptedOnL2 => { + TransactionStatus::AcceptedOnL2(execution_status) + } + }); + } + } - let execution_status = if receipt.is_reverted() { + let pending_state = self.sequencer.pending_state(); + let state = pending_state.ok_or(StarknetApiError::TxnHashNotFound)?; + let executed_txs = state.executed_txs.read(); + + // attemps to find in the valid transactions list first (executed_txs) + // if not found, then search in the rejected transactions list (rejected_txs) + if let Some(is_reverted) = executed_txs + .iter() + .find(|(tx, _)| tx.hash == transaction_hash) + .map(|(_, rct)| rct.receipt.is_reverted()) + { + let exec_status = if is_reverted { TransactionExecutionStatus::Reverted } else { TransactionExecutionStatus::Succeeded }; - Ok(match status { - FinalityStatus::AcceptedOnL1 => TransactionStatus::AcceptedOnL1(execution_status), - FinalityStatus::AcceptedOnL2 => TransactionStatus::AcceptedOnL2(execution_status), - }) + Ok(TransactionStatus::AcceptedOnL2(exec_status)) } else { - let pending_state = self.sequencer.pending_state(); - let state = pending_state.ok_or(StarknetApiError::TxnHashNotFound)?; - let executed_txs = state.executed_txs.read(); + let rejected_txs = state.rejected_txs.read(); - // attemps to find in the valid transactions list first (executed_txs) - // if not found, then search in the rejected transactions list (rejected_txs) - if let Some(is_reverted) = executed_txs + rejected_txs .iter() .find(|(tx, _)| tx.hash == transaction_hash) - .map(|(_, rct)| rct.receipt.is_reverted()) - { - let exec_status = if is_reverted { - TransactionExecutionStatus::Reverted - } else { - TransactionExecutionStatus::Succeeded - }; - - Ok(TransactionStatus::AcceptedOnL2(exec_status)) - } else { - let rejected_txs = state.rejected_txs.read(); - - rejected_txs - .iter() - .find(|(tx, _)| tx.hash == transaction_hash) - .map(|_| TransactionStatus::Rejected) - .ok_or(Error::from(StarknetApiError::TxnHashNotFound)) - } + .map(|_| TransactionStatus::Rejected) + .ok_or(Error::from(StarknetApiError::TxnHashNotFound)) } } } diff --git a/crates/katana/storage/db/src/error.rs b/crates/katana/storage/db/src/error.rs index 05f12ab7af..27f05d457e 100644 --- a/crates/katana/storage/db/src/error.rs +++ b/crates/katana/storage/db/src/error.rs @@ -1,42 +1,42 @@ #[derive(Debug, PartialEq, Eq, thiserror::Error)] pub enum DatabaseError { - #[error("failed to open an environment: {0}")] + #[error("failed to open db environment: {0}")] OpenEnv(libmdbx::Error), #[error(transparent)] Codec(#[from] CodecError), - #[error("failed to create table: {0}")] + #[error("failed to create db table: {0}")] CreateTable(libmdbx::Error), - #[error("failed to commit transaction: {0}")] + #[error("failed to commit db transaction: {0}")] Commit(libmdbx::Error), - #[error("failed to read: {0}")] + #[error("failed to read db: {0}")] Read(libmdbx::Error), - #[error("failed to write to table {table} with key {key:?}: {error}")] + #[error("failed to write to db table {table} with key {key:?}: {error}")] Write { error: libmdbx::Error, table: &'static str, key: Box<[u8]> }, - #[error("failed to open database: {0}")] + #[error("failed to open db: {0}")] OpenDb(libmdbx::Error), - #[error("failed to retrieve database statistics: {0}")] + #[error("failed to retrieve db statistics: {0}")] Stat(libmdbx::Error), - #[error("failed to create cursor: {0}")] + #[error("failed to create db cursor: {0}")] CreateCursor(libmdbx::Error), - #[error("failed to create read-only transaction: {0}")] + #[error("failed to create read-only db transaction: {0}")] CreateROTx(libmdbx::Error), - #[error("failed to create a read-write transaction: {0}")] + #[error("failed to create a read-write db transaction: {0}")] CreateRWTx(libmdbx::Error), - #[error("failed to delete entry: {0}")] + #[error("failed to delete a db entry: {0}")] Delete(libmdbx::Error), - #[error("failed to clear database: {0}")] + #[error("failed to clear db: {0}")] Clear(libmdbx::Error), } diff --git a/crates/katana/storage/provider/src/error.rs b/crates/katana/storage/provider/src/error.rs new file mode 100644 index 0000000000..8c8ced7fb9 --- /dev/null +++ b/crates/katana/storage/provider/src/error.rs @@ -0,0 +1,104 @@ +use katana_db::error::DatabaseError; +use katana_primitives::block::BlockNumber; +use katana_primitives::contract::{ClassHash, ContractAddress, StorageKey}; +use katana_primitives::transaction::TxNumber; + +use crate::providers::fork::backend::ForkedBackendError; + +/// Possible errors returned by the storage provider. +#[derive(Debug, thiserror::Error)] +pub enum ProviderError { + /// Error for anything related to parsing data. + #[error("Parsing error: {0}")] + ParsingError(String), + + #[error("Missing latest block hash")] + MissingLatestBlockHash, + + /// Error when the block hash is not found when it should be. + #[error("Missing block hash for block number {0}")] + MissingBlockHash(BlockNumber), + + /// Error when the block header is not found when it should be. + #[error("Missing block header for block number {0}")] + MissingBlockHeader(BlockNumber), + + /// Error when the block body is not found but the block exists. + #[error("Missing block transactions for block number {0}")] + MissingBlockTxs(BlockNumber), + + /// Error when the block body indices are not found but the block exists. + #[error("Missing block body indices for block number {0}")] + MissingBlockBodyIndices(BlockNumber), + + /// Error when the block status is not found but the block exists. + #[error("Missing block status for block number {0}")] + MissingBlockStatus(BlockNumber), + + /// Error when a full transaction data is not found but its hash/number exists. + #[error("Missing transaction for tx number {0}")] + MissingTx(TxNumber), + + /// Error when a transaction block number is not found but the transaction exists. + #[error("Missing transaction block number for tx number {0}")] + MissingTxBlock(TxNumber), + + /// Error when a transaction hash is not found but the transaction exists. + #[error("Missing transaction hash for tx number {0}")] + MissingTxHash(TxNumber), + + /// Error when a transaction receipt is not found but the transaction exists. + #[error("Missing transaction receipt for tx number {0}")] + MissingTxReceipt(TxNumber), + + /// Error when a compiled class hash is not found but the class hash exists. + #[error("Missing compiled class hash for class hash {0:#x}")] + MissingCompiledClassHash(ClassHash), + + /// Error when a contract class change entry is not found but the block number of when the + /// change happen exists in the class change list. + #[error("Missing contract class change entry")] + MissingContractClassChangeEntry { + /// The block number of when the change happen. + block: BlockNumber, + /// The updated contract address. + contract_address: ContractAddress, + }, + + /// Error when a contract nonce change entry is not found but the block number of when the + /// change happen exists in the nonce change list. + #[error( + "Missing contract nonce change entry for contract {contract_address} at block {block}" + )] + MissingContractNonceChangeEntry { + /// The block number of when the change happen. + block: BlockNumber, + /// The updated contract address. + contract_address: ContractAddress, + }, + + /// Error when a storage change entry is not found but the block number of when the change + /// happen exists in the storage change list. + #[error( + "Missing storage change entry for contract {contract_address} at block {block} for key \ + {storage_key:#x}" + )] + MissingStorageChangeEntry { + /// The block number of when the change happen. + block: BlockNumber, + /// The updated contract address. + contract_address: ContractAddress, + /// The updated storage key. + storage_key: StorageKey, + }, + + /// Error returned by the database implementation. + #[error(transparent)] + Database(#[from] DatabaseError), + + /// Error returned by a [ForkedBackend](crate::providers::fork::backend::ForkedBackend) used by + /// [ForkedProvider](crate::providers::fork::ForkedProvider). + #[cfg(feature = "fork")] + #[error(transparent)] + ForkedBackend(#[from] ForkedBackendError), +} diff --git a/crates/katana/storage/provider/src/lib.rs b/crates/katana/storage/provider/src/lib.rs index 351cdbd2d4..4176240a97 100644 --- a/crates/katana/storage/provider/src/lib.rs +++ b/crates/katana/storage/provider/src/lib.rs @@ -1,6 +1,5 @@ use std::ops::{Range, RangeInclusive}; -use anyhow::Result; use katana_db::models::block::StoredBlockBodyIndices; use katana_primitives::block::{ Block, BlockHash, BlockHashOrNumber, BlockNumber, BlockWithTxHashes, FinalityStatus, Header, @@ -19,6 +18,7 @@ use traits::contract::{ContractClassProvider, ContractClassWriter}; use traits::state::{StateRootProvider, StateWriter}; use traits::transaction::TransactionStatusProvider; +pub mod error; pub mod providers; pub mod traits; @@ -28,6 +28,9 @@ use crate::traits::state::{StateFactoryProvider, StateProvider}; use crate::traits::state_update::StateUpdateProvider; use crate::traits::transaction::{ReceiptProvider, TransactionProvider, TransactionsProviderExt}; +/// A result type for blockchain providers. +pub type ProviderResult = Result; + /// A blockchain provider that can be used to access the storage. /// /// Serves as the main entrypoint for interacting with the storage storage. Every read/write @@ -46,19 +49,25 @@ impl BlockProvider for BlockchainProvider where Db: BlockProvider, { - fn block(&self, id: BlockHashOrNumber) -> Result> { + fn block(&self, id: BlockHashOrNumber) -> ProviderResult> { self.provider.block(id) } - fn block_with_tx_hashes(&self, id: BlockHashOrNumber) -> Result> { + fn block_with_tx_hashes( + &self, + id: BlockHashOrNumber, + ) -> ProviderResult> { self.provider.block_with_tx_hashes(id) } - fn blocks_in_range(&self, range: RangeInclusive) -> Result> { + fn blocks_in_range(&self, range: RangeInclusive) -> ProviderResult> { self.provider.blocks_in_range(range) } - fn block_body_indices(&self, id: BlockHashOrNumber) -> Result> { + fn block_body_indices( + &self, + id: BlockHashOrNumber, + ) -> ProviderResult> { self.provider.block_body_indices(id) } } @@ -67,7 +76,7 @@ impl HeaderProvider for BlockchainProvider where Db: HeaderProvider, { - fn header(&self, id: BlockHashOrNumber) -> Result> { + fn header(&self, id: BlockHashOrNumber) -> ProviderResult> { self.provider.header(id) } } @@ -76,11 +85,11 @@ impl BlockNumberProvider for BlockchainProvider where Db: BlockNumberProvider, { - fn latest_number(&self) -> Result { + fn latest_number(&self) -> ProviderResult { self.provider.latest_number() } - fn block_number_by_hash(&self, hash: BlockHash) -> Result> { + fn block_number_by_hash(&self, hash: BlockHash) -> ProviderResult> { self.provider.block_number_by_hash(hash) } } @@ -89,11 +98,11 @@ impl BlockHashProvider for BlockchainProvider where Db: BlockHashProvider, { - fn latest_hash(&self) -> Result { + fn latest_hash(&self) -> ProviderResult { self.provider.latest_hash() } - fn block_hash_by_num(&self, num: BlockNumber) -> Result> { + fn block_hash_by_num(&self, num: BlockNumber) -> ProviderResult> { self.provider.block_hash_by_num(num) } } @@ -104,7 +113,7 @@ impl BlockStatusProvider for BlockchainProvider where Db: BlockStatusProvider, { - fn block_status(&self, id: BlockHashOrNumber) -> Result> { + fn block_status(&self, id: BlockHashOrNumber) -> ProviderResult> { self.provider.block_status(id) } } @@ -118,7 +127,7 @@ where block: SealedBlockWithStatus, states: StateUpdatesWithDeclaredClasses, receipts: Vec, - ) -> Result<()> { + ) -> ProviderResult<()> { self.provider.insert_block_with_states_and_receipts(block, states, receipts) } } @@ -127,14 +136,14 @@ impl TransactionProvider for BlockchainProvider where Db: TransactionProvider, { - fn transaction_by_hash(&self, hash: TxHash) -> Result> { + fn transaction_by_hash(&self, hash: TxHash) -> ProviderResult> { self.provider.transaction_by_hash(hash) } fn transactions_by_block( &self, block_id: BlockHashOrNumber, - ) -> Result>> { + ) -> ProviderResult>> { self.provider.transactions_by_block(block_id) } @@ -142,18 +151,21 @@ where &self, block_id: BlockHashOrNumber, idx: u64, - ) -> Result> { + ) -> ProviderResult> { self.provider.transaction_by_block_and_idx(block_id, idx) } - fn transaction_count_by_block(&self, block_id: BlockHashOrNumber) -> Result> { + fn transaction_count_by_block( + &self, + block_id: BlockHashOrNumber, + ) -> ProviderResult> { self.provider.transaction_count_by_block(block_id) } fn transaction_block_num_and_hash( &self, hash: TxHash, - ) -> Result> { + ) -> ProviderResult> { TransactionProvider::transaction_block_num_and_hash(&self.provider, hash) } } @@ -162,7 +174,7 @@ impl TransactionStatusProvider for BlockchainProvider where Db: TransactionStatusProvider, { - fn transaction_status(&self, hash: TxHash) -> Result> { + fn transaction_status(&self, hash: TxHash) -> ProviderResult> { TransactionStatusProvider::transaction_status(&self.provider, hash) } } @@ -171,7 +183,7 @@ impl TransactionsProviderExt for BlockchainProvider where Db: TransactionsProviderExt, { - fn transaction_hashes_in_range(&self, range: Range) -> Result> { + fn transaction_hashes_in_range(&self, range: Range) -> ProviderResult> { TransactionsProviderExt::transaction_hashes_in_range(&self.provider, range) } } @@ -180,11 +192,14 @@ impl ReceiptProvider for BlockchainProvider where Db: ReceiptProvider, { - fn receipt_by_hash(&self, hash: TxHash) -> Result> { + fn receipt_by_hash(&self, hash: TxHash) -> ProviderResult> { self.provider.receipt_by_hash(hash) } - fn receipts_by_block(&self, block_id: BlockHashOrNumber) -> Result>> { + fn receipts_by_block( + &self, + block_id: BlockHashOrNumber, + ) -> ProviderResult>> { self.provider.receipts_by_block(block_id) } } @@ -196,7 +211,7 @@ where fn nonce( &self, address: ContractAddress, - ) -> Result> { + ) -> ProviderResult> { self.provider.nonce(address) } @@ -204,11 +219,14 @@ where &self, address: ContractAddress, storage_key: StorageKey, - ) -> Result> { + ) -> ProviderResult> { self.provider.storage(address, storage_key) } - fn class_hash_of_contract(&self, address: ContractAddress) -> Result> { + fn class_hash_of_contract( + &self, + address: ContractAddress, + ) -> ProviderResult> { self.provider.class_hash_of_contract(address) } } @@ -220,15 +238,15 @@ where fn compiled_class_hash_of_class_hash( &self, hash: ClassHash, - ) -> Result> { + ) -> ProviderResult> { self.provider.compiled_class_hash_of_class_hash(hash) } - fn class(&self, hash: ClassHash) -> Result> { + fn class(&self, hash: ClassHash) -> ProviderResult> { self.provider.class(hash) } - fn sierra_class(&self, hash: ClassHash) -> Result> { + fn sierra_class(&self, hash: ClassHash) -> ProviderResult> { self.provider.sierra_class(hash) } } @@ -237,11 +255,14 @@ impl StateFactoryProvider for BlockchainProvider where Db: StateFactoryProvider, { - fn latest(&self) -> Result> { + fn latest(&self) -> ProviderResult> { self.provider.latest() } - fn historical(&self, block_id: BlockHashOrNumber) -> Result>> { + fn historical( + &self, + block_id: BlockHashOrNumber, + ) -> ProviderResult>> { self.provider.historical(block_id) } } @@ -250,7 +271,7 @@ impl StateUpdateProvider for BlockchainProvider where Db: StateUpdateProvider, { - fn state_update(&self, block_id: BlockHashOrNumber) -> Result> { + fn state_update(&self, block_id: BlockHashOrNumber) -> ProviderResult> { self.provider.state_update(block_id) } } @@ -259,7 +280,7 @@ impl ContractInfoProvider for BlockchainProvider where Db: ContractInfoProvider, { - fn contract(&self, address: ContractAddress) -> Result> { + fn contract(&self, address: ContractAddress) -> ProviderResult> { self.provider.contract(address) } } @@ -268,7 +289,7 @@ impl StateRootProvider for BlockchainProvider where Db: StateRootProvider, { - fn state_root(&self, block_id: BlockHashOrNumber) -> Result> { + fn state_root(&self, block_id: BlockHashOrNumber) -> ProviderResult> { self.provider.state_root(block_id) } } @@ -277,7 +298,7 @@ impl ContractClassWriter for BlockchainProvider where Db: ContractClassWriter, { - fn set_class(&self, hash: ClassHash, class: CompiledContractClass) -> Result<()> { + fn set_class(&self, hash: ClassHash, class: CompiledContractClass) -> ProviderResult<()> { self.provider.set_class(hash, class) } @@ -285,11 +306,15 @@ where &self, hash: ClassHash, compiled_hash: CompiledClassHash, - ) -> Result<()> { + ) -> ProviderResult<()> { self.provider.set_compiled_class_hash_of_class_hash(hash, compiled_hash) } - fn set_sierra_class(&self, hash: ClassHash, sierra: FlattenedSierraClass) -> Result<()> { + fn set_sierra_class( + &self, + hash: ClassHash, + sierra: FlattenedSierraClass, + ) -> ProviderResult<()> { self.provider.set_sierra_class(hash, sierra) } } @@ -303,7 +328,7 @@ where address: ContractAddress, storage_key: StorageKey, storage_value: StorageValue, - ) -> Result<()> { + ) -> ProviderResult<()> { self.provider.set_storage(address, storage_key, storage_value) } @@ -311,7 +336,7 @@ where &self, address: ContractAddress, class_hash: ClassHash, - ) -> Result<()> { + ) -> ProviderResult<()> { self.provider.set_class_hash_of_contract(address, class_hash) } @@ -319,7 +344,7 @@ where &self, address: ContractAddress, nonce: katana_primitives::contract::Nonce, - ) -> Result<()> { + ) -> ProviderResult<()> { self.provider.set_nonce(address, nonce) } } diff --git a/crates/katana/storage/provider/src/providers/db/mod.rs b/crates/katana/storage/provider/src/providers/db/mod.rs index 14568e8bd2..a2d99a310e 100644 --- a/crates/katana/storage/provider/src/providers/db/mod.rs +++ b/crates/katana/storage/provider/src/providers/db/mod.rs @@ -4,7 +4,6 @@ use std::collections::HashMap; use std::fmt::Debug; use std::ops::{Range, RangeInclusive}; -use anyhow::Result; use katana_db::error::DatabaseError; use katana_db::mdbx::{self, DbEnv}; use katana_db::models::block::StoredBlockBodyIndices; @@ -35,6 +34,7 @@ use katana_primitives::state::{StateUpdates, StateUpdatesWithDeclaredClasses}; use katana_primitives::transaction::{TxHash, TxNumber, TxWithHash}; use katana_primitives::FieldElement; +use crate::error::ProviderError; use crate::traits::block::{ BlockHashProvider, BlockNumberProvider, BlockProvider, BlockStatusProvider, BlockWriter, HeaderProvider, @@ -44,6 +44,7 @@ use crate::traits::state_update::StateUpdateProvider; use crate::traits::transaction::{ ReceiptProvider, TransactionProvider, TransactionStatusProvider, TransactionsProviderExt, }; +use crate::ProviderResult; /// A provider implementation that uses a database as a backend. #[derive(Debug)] @@ -57,11 +58,14 @@ impl DbProvider { } impl StateFactoryProvider for DbProvider { - fn latest(&self) -> Result> { + fn latest(&self) -> ProviderResult> { Ok(Box::new(self::state::LatestStateProvider::new(self.0.tx()?))) } - fn historical(&self, block_id: BlockHashOrNumber) -> Result>> { + fn historical( + &self, + block_id: BlockHashOrNumber, + ) -> ProviderResult>> { let block_number = match block_id { BlockHashOrNumber::Num(num) => { let latest_num = self.latest_number()?; @@ -83,14 +87,14 @@ impl StateFactoryProvider for DbProvider { } impl BlockNumberProvider for DbProvider { - fn block_number_by_hash(&self, hash: BlockHash) -> Result> { + fn block_number_by_hash(&self, hash: BlockHash) -> ProviderResult> { let db_tx = self.0.tx()?; let block_num = db_tx.get::(hash)?; db_tx.commit()?; Ok(block_num) } - fn latest_number(&self) -> Result { + fn latest_number(&self) -> ProviderResult { let db_tx = self.0.tx()?; let total_blocks = db_tx.entries::()? as u64; db_tx.commit()?; @@ -99,16 +103,16 @@ impl BlockNumberProvider for DbProvider { } impl BlockHashProvider for DbProvider { - fn latest_hash(&self) -> Result { + fn latest_hash(&self) -> ProviderResult { let db_tx = self.0.tx()?; let total_blocks = db_tx.entries::()? as u64; let latest_block = if total_blocks == 0 { 0 } else { total_blocks - 1 }; - let latest_hash = db_tx.get::(latest_block)?.expect("block hash should exist"); + let latest_hash = db_tx.get::(latest_block)?; db_tx.commit()?; - Ok(latest_hash) + latest_hash.ok_or(ProviderError::MissingLatestBlockHash) } - fn block_hash_by_num(&self, num: BlockNumber) -> Result> { + fn block_hash_by_num(&self, num: BlockNumber) -> ProviderResult> { let db_tx = self.0.tx()?; let block_hash = db_tx.get::(num)?; db_tx.commit()?; @@ -117,7 +121,7 @@ impl BlockHashProvider for DbProvider { } impl HeaderProvider for DbProvider { - fn header(&self, id: BlockHashOrNumber) -> Result> { + fn header(&self, id: BlockHashOrNumber) -> ProviderResult> { let db_tx = self.0.tx()?; let num = match id { @@ -126,7 +130,8 @@ impl HeaderProvider for DbProvider { }; if let Some(num) = num { - let header = db_tx.get::(num)?.expect("should exist"); + let header = + db_tx.get::(num)?.ok_or(ProviderError::MissingBlockHeader(num))?; db_tx.commit()?; Ok(Some(header)) } else { @@ -136,7 +141,10 @@ impl HeaderProvider for DbProvider { } impl BlockProvider for DbProvider { - fn block_body_indices(&self, id: BlockHashOrNumber) -> Result> { + fn block_body_indices( + &self, + id: BlockHashOrNumber, + ) -> ProviderResult> { let db_tx = self.0.tx()?; let block_num = match id { @@ -153,11 +161,13 @@ impl BlockProvider for DbProvider { } } - fn block(&self, id: BlockHashOrNumber) -> Result> { + fn block(&self, id: BlockHashOrNumber) -> ProviderResult> { let db_tx = self.0.tx()?; if let Some(header) = self.header(id)? { - let body = self.transactions_by_block(id)?.expect("should exist"); + let res = self.transactions_by_block(id)?; + let body = res.ok_or(ProviderError::MissingBlockTxs(header.number))?; + db_tx.commit()?; Ok(Some(Block { header, body })) } else { @@ -165,7 +175,10 @@ impl BlockProvider for DbProvider { } } - fn block_with_tx_hashes(&self, id: BlockHashOrNumber) -> Result> { + fn block_with_tx_hashes( + &self, + id: BlockHashOrNumber, + ) -> ProviderResult> { let db_tx = self.0.tx()?; let block_num = match id { @@ -176,7 +189,9 @@ impl BlockProvider for DbProvider { let Some(block_num) = block_num else { return Ok(None) }; if let Some(header) = db_tx.get::(block_num)? { - let body_indices = db_tx.get::(block_num)?.expect("should exist"); + let res = db_tx.get::(block_num)?; + let body_indices = res.ok_or(ProviderError::MissingBlockTxs(block_num))?; + let body = self.transaction_hashes_in_range(Range::from(body_indices))?; let block = BlockWithTxHashes { header, body }; @@ -188,7 +203,7 @@ impl BlockProvider for DbProvider { } } - fn blocks_in_range(&self, range: RangeInclusive) -> Result> { + fn blocks_in_range(&self, range: RangeInclusive) -> ProviderResult> { let db_tx = self.0.tx()?; let total = range.end() - range.start() + 1; @@ -196,7 +211,9 @@ impl BlockProvider for DbProvider { for num in range { if let Some(header) = db_tx.get::(num)? { - let body_indices = db_tx.get::(num)?.expect("should exist"); + let res = db_tx.get::(num)?; + let body_indices = res.ok_or(ProviderError::MissingBlockBodyIndices(num))?; + let body = self.transaction_in_range(Range::from(body_indices))?; blocks.push(Block { header, body }) } @@ -208,7 +225,7 @@ impl BlockProvider for DbProvider { } impl BlockStatusProvider for DbProvider { - fn block_status(&self, id: BlockHashOrNumber) -> Result> { + fn block_status(&self, id: BlockHashOrNumber) -> ProviderResult> { let db_tx = self.0.tx()?; let block_num = match id { @@ -217,7 +234,9 @@ impl BlockStatusProvider for DbProvider { }; if let Some(block_num) = block_num { - let status = db_tx.get::(block_num)?.expect("should exist"); + let res = db_tx.get::(block_num)?; + let status = res.ok_or(ProviderError::MissingBlockStatus(block_num))?; + db_tx.commit()?; Ok(Some(status)) } else { @@ -227,7 +246,7 @@ impl BlockStatusProvider for DbProvider { } impl StateRootProvider for DbProvider { - fn state_root(&self, block_id: BlockHashOrNumber) -> Result> { + fn state_root(&self, block_id: BlockHashOrNumber) -> ProviderResult> { let db_tx = self.0.tx()?; let block_num = match block_id { @@ -246,14 +265,14 @@ impl StateRootProvider for DbProvider { } impl StateUpdateProvider for DbProvider { - fn state_update(&self, block_id: BlockHashOrNumber) -> Result> { + fn state_update(&self, block_id: BlockHashOrNumber) -> ProviderResult> { // A helper function that iterates over all entries in a dupsort table and collects the // results into `V`. If `key` is not found, `V::default()` is returned. fn dup_entries( db_tx: &mdbx::tx::TxRO, key: ::Key, - f: impl FnMut(Result, DatabaseError>) -> Result, - ) -> Result + f: impl FnMut(Result, DatabaseError>) -> ProviderResult, + ) -> ProviderResult where Tb: DupSort + Debug, V: FromIterator + Default, @@ -261,7 +280,7 @@ impl StateUpdateProvider for DbProvider { Ok(db_tx .cursor::()? .walk_dup(Some(key), None)? - .map(|walker| walker.map(f).collect::>()) + .map(|walker| walker.map(f).collect::>()) .transpose()? .unwrap_or_default()) } @@ -294,8 +313,11 @@ impl StateUpdateProvider for DbProvider { _, >(&db_tx, block_num, |entry| { let (_, class_hash) = entry?; - let compiled_hash = - db_tx.get::(class_hash)?.expect("qed; must exist"); + + let compiled_hash = db_tx + .get::(class_hash)? + .ok_or(ProviderError::MissingCompiledClassHash(class_hash))?; + Ok((class_hash, compiled_hash)) })?; @@ -306,7 +328,7 @@ impl StateUpdateProvider for DbProvider { _, >(&db_tx, block_num, |entry| { let (_, ContractStorageEntry { key, value }) = entry?; - Ok::<_, DatabaseError>((key.contract_address, (key.key, value))) + Ok((key.contract_address, (key.key, value))) })?; let mut map: HashMap<_, HashMap> = HashMap::new(); @@ -332,11 +354,12 @@ impl StateUpdateProvider for DbProvider { } impl TransactionProvider for DbProvider { - fn transaction_by_hash(&self, hash: TxHash) -> Result> { + fn transaction_by_hash(&self, hash: TxHash) -> ProviderResult> { let db_tx = self.0.tx()?; if let Some(num) = db_tx.get::(hash)? { - let transaction = db_tx.get::(num)?.expect("transaction should exist"); + let res = db_tx.get::(num)?; + let transaction = res.ok_or(ProviderError::MissingTx(num))?; let transaction = TxWithHash { hash, transaction }; db_tx.commit()?; @@ -349,7 +372,7 @@ impl TransactionProvider for DbProvider { fn transactions_by_block( &self, block_id: BlockHashOrNumber, - ) -> Result>> { + ) -> ProviderResult>> { if let Some(indices) = self.block_body_indices(block_id)? { Ok(Some(self.transaction_in_range(Range::from(indices))?)) } else { @@ -357,7 +380,7 @@ impl TransactionProvider for DbProvider { } } - fn transaction_in_range(&self, range: Range) -> Result> { + fn transaction_in_range(&self, range: Range) -> ProviderResult> { let db_tx = self.0.tx()?; let total = range.end - range.start; @@ -365,7 +388,9 @@ impl TransactionProvider for DbProvider { for i in range { if let Some(transaction) = db_tx.get::(i)? { - let hash = db_tx.get::(i)?.expect("should exist"); + let res = db_tx.get::(i)?; + let hash = res.ok_or(ProviderError::MissingTxHash(i))?; + transactions.push(TxWithHash { hash, transaction }); }; } @@ -377,11 +402,15 @@ impl TransactionProvider for DbProvider { fn transaction_block_num_and_hash( &self, hash: TxHash, - ) -> Result> { + ) -> ProviderResult> { let db_tx = self.0.tx()?; if let Some(num) = db_tx.get::(hash)? { - let block_num = db_tx.get::(num)?.expect("should exist"); - let block_hash = db_tx.get::(block_num)?.expect("should exist"); + let block_num = + db_tx.get::(num)?.ok_or(ProviderError::MissingTxBlock(num))?; + + let res = db_tx.get::(block_num)?; + let block_hash = res.ok_or(ProviderError::MissingBlockHash(num))?; + db_tx.commit()?; Ok(Some((block_num, block_hash))) } else { @@ -393,25 +422,32 @@ impl TransactionProvider for DbProvider { &self, block_id: BlockHashOrNumber, idx: u64, - ) -> Result> { + ) -> ProviderResult> { let db_tx = self.0.tx()?; match self.block_body_indices(block_id)? { // make sure the requested idx is within the range of the block tx count Some(indices) if idx < indices.tx_count => { let num = indices.tx_offset + idx; - let hash = db_tx.get::(num)?.expect("should exist"); - let transaction = db_tx.get::(num)?.expect("should exist"); - let transaction = TxWithHash { hash, transaction }; + + let res = db_tx.get::(num)?; + let hash = res.ok_or(ProviderError::MissingTxHash(num))?; + + let res = db_tx.get::(num)?; + let transaction = res.ok_or(ProviderError::MissingTx(num))?; + db_tx.commit()?; - Ok(Some(transaction)) + Ok(Some(TxWithHash { hash, transaction })) } _ => Ok(None), } } - fn transaction_count_by_block(&self, block_id: BlockHashOrNumber) -> Result> { + fn transaction_count_by_block( + &self, + block_id: BlockHashOrNumber, + ) -> ProviderResult> { let db_tx = self.0.tx()?; if let Some(indices) = self.block_body_indices(block_id)? { db_tx.commit()?; @@ -423,7 +459,7 @@ impl TransactionProvider for DbProvider { } impl TransactionsProviderExt for DbProvider { - fn transaction_hashes_in_range(&self, range: Range) -> Result> { + fn transaction_hashes_in_range(&self, range: Range) -> ProviderResult> { let db_tx = self.0.tx()?; let total = range.end - range.start; @@ -441,11 +477,15 @@ impl TransactionsProviderExt for DbProvider { } impl TransactionStatusProvider for DbProvider { - fn transaction_status(&self, hash: TxHash) -> Result> { + fn transaction_status(&self, hash: TxHash) -> ProviderResult> { let db_tx = self.0.tx()?; if let Some(tx_num) = db_tx.get::(hash)? { - let block_num = db_tx.get::(tx_num)?.expect("should exist"); - let status = db_tx.get::(block_num)?.expect("should exist"); + let res = db_tx.get::(tx_num)?; + let block_num = res.ok_or(ProviderError::MissingTxBlock(tx_num))?; + + let res = db_tx.get::(block_num)?; + let status = res.ok_or(ProviderError::MissingBlockStatus(block_num))?; + db_tx.commit()?; Ok(Some(status)) } else { @@ -455,10 +495,13 @@ impl TransactionStatusProvider for DbProvider { } impl ReceiptProvider for DbProvider { - fn receipt_by_hash(&self, hash: TxHash) -> Result> { + fn receipt_by_hash(&self, hash: TxHash) -> ProviderResult> { let db_tx = self.0.tx()?; if let Some(num) = db_tx.get::(hash)? { - let receipt = db_tx.get::(num)?.expect("should exist"); + let receipt = db_tx + .get::(num)? + .ok_or(ProviderError::MissingTxReceipt(num))?; + db_tx.commit()?; Ok(Some(receipt)) } else { @@ -466,7 +509,10 @@ impl ReceiptProvider for DbProvider { } } - fn receipts_by_block(&self, block_id: BlockHashOrNumber) -> Result>> { + fn receipts_by_block( + &self, + block_id: BlockHashOrNumber, + ) -> ProviderResult>> { if let Some(indices) = self.block_body_indices(block_id)? { let db_tx = self.0.tx()?; let mut receipts = Vec::with_capacity(indices.tx_count as usize); @@ -492,8 +538,8 @@ impl BlockWriter for DbProvider { block: SealedBlockWithStatus, states: StateUpdatesWithDeclaredClasses, receipts: Vec, - ) -> Result<()> { - self.0.update(move |db_tx| -> Result<()> { + ) -> ProviderResult<()> { + self.0.update(move |db_tx| -> ProviderResult<()> { let block_hash = block.block.header.hash; let block_number = block.block.header.header.number; diff --git a/crates/katana/storage/provider/src/providers/db/state.rs b/crates/katana/storage/provider/src/providers/db/state.rs index e302a58c91..1353b39b45 100644 --- a/crates/katana/storage/provider/src/providers/db/state.rs +++ b/crates/katana/storage/provider/src/providers/db/state.rs @@ -1,11 +1,8 @@ use std::cmp::Ordering; -use anyhow::Result; use katana_db::mdbx::{self}; -use katana_db::models::contract::{ - ContractClassChange, ContractInfoChangeList, ContractNonceChange, -}; -use katana_db::models::storage::{ContractStorageEntry, ContractStorageKey, StorageEntry}; +use katana_db::models::contract::ContractInfoChangeList; +use katana_db::models::storage::{ContractStorageKey, StorageEntry}; use katana_db::tables::{ ClassDeclarationBlock, CompiledClassHashes, CompiledContractClasses, ContractClassChanges, ContractInfo, ContractInfoChangeSet, ContractStorage, NonceChanges, SierraClasses, @@ -18,12 +15,14 @@ use katana_primitives::contract::{ }; use super::DbProvider; +use crate::error::ProviderError; use crate::traits::contract::{ContractClassProvider, ContractClassWriter}; use crate::traits::state::{StateProvider, StateWriter}; +use crate::ProviderResult; impl StateWriter for DbProvider { - fn set_nonce(&self, address: ContractAddress, nonce: Nonce) -> Result<()> { - self.0.update(move |db_tx| -> Result<()> { + fn set_nonce(&self, address: ContractAddress, nonce: Nonce) -> ProviderResult<()> { + self.0.update(move |db_tx| -> ProviderResult<()> { let value = if let Some(info) = db_tx.get::(address)? { GenericContractInfo { nonce, ..info } } else { @@ -39,8 +38,8 @@ impl StateWriter for DbProvider { address: ContractAddress, storage_key: StorageKey, storage_value: StorageValue, - ) -> Result<()> { - self.0.update(move |db_tx| -> Result<()> { + ) -> ProviderResult<()> { + self.0.update(move |db_tx| -> ProviderResult<()> { let mut cursor = db_tx.cursor::()?; let entry = cursor.seek_by_key_subkey(address, storage_key)?; @@ -60,8 +59,8 @@ impl StateWriter for DbProvider { &self, address: ContractAddress, class_hash: ClassHash, - ) -> Result<()> { - self.0.update(move |db_tx| -> Result<()> { + ) -> ProviderResult<()> { + self.0.update(move |db_tx| -> ProviderResult<()> { let value = if let Some(info) = db_tx.get::(address)? { GenericContractInfo { class_hash, ..info } } else { @@ -74,8 +73,8 @@ impl StateWriter for DbProvider { } impl ContractClassWriter for DbProvider { - fn set_class(&self, hash: ClassHash, class: CompiledContractClass) -> Result<()> { - self.0.update(move |db_tx| -> Result<()> { + fn set_class(&self, hash: ClassHash, class: CompiledContractClass) -> ProviderResult<()> { + self.0.update(move |db_tx| -> ProviderResult<()> { db_tx.put::(hash, class.into())?; Ok(()) })? @@ -85,15 +84,19 @@ impl ContractClassWriter for DbProvider { &self, hash: ClassHash, compiled_hash: CompiledClassHash, - ) -> Result<()> { - self.0.update(move |db_tx| -> Result<()> { + ) -> ProviderResult<()> { + self.0.update(move |db_tx| -> ProviderResult<()> { db_tx.put::(hash, compiled_hash)?; Ok(()) })? } - fn set_sierra_class(&self, hash: ClassHash, sierra: FlattenedSierraClass) -> Result<()> { - self.0.update(move |db_tx| -> Result<()> { + fn set_sierra_class( + &self, + hash: ClassHash, + sierra: FlattenedSierraClass, + ) -> ProviderResult<()> { + self.0.update(move |db_tx| -> ProviderResult<()> { db_tx.put::(hash, sierra)?; Ok(()) })? @@ -110,7 +113,7 @@ impl LatestStateProvider { } impl ContractClassProvider for LatestStateProvider { - fn class(&self, hash: ClassHash) -> Result> { + fn class(&self, hash: ClassHash) -> ProviderResult> { let class = self.0.get::(hash)?; Ok(class.map(CompiledContractClass::from)) } @@ -118,19 +121,19 @@ impl ContractClassProvider for LatestStateProvider { fn compiled_class_hash_of_class_hash( &self, hash: ClassHash, - ) -> Result> { + ) -> ProviderResult> { let hash = self.0.get::(hash)?; Ok(hash) } - fn sierra_class(&self, hash: ClassHash) -> Result> { + fn sierra_class(&self, hash: ClassHash) -> ProviderResult> { let class = self.0.get::(hash)?; Ok(class) } } impl StateProvider for LatestStateProvider { - fn nonce(&self, address: ContractAddress) -> Result> { + fn nonce(&self, address: ContractAddress) -> ProviderResult> { let info = self.0.get::(address)?; Ok(info.map(|info| info.nonce)) } @@ -138,7 +141,7 @@ impl StateProvider for LatestStateProvider { fn class_hash_of_contract( &self, address: ContractAddress, - ) -> Result> { + ) -> ProviderResult> { let info = self.0.get::(address)?; Ok(info.map(|info| info.class_hash)) } @@ -147,7 +150,7 @@ impl StateProvider for LatestStateProvider { &self, address: ContractAddress, storage_key: StorageKey, - ) -> Result> { + ) -> ProviderResult> { let mut cursor = self.0.cursor::()?; let entry = cursor.seek_by_key_subkey(address, storage_key)?; match entry { @@ -210,7 +213,7 @@ impl ContractClassProvider for HistoricalStateProvider { fn compiled_class_hash_of_class_hash( &self, hash: ClassHash, - ) -> Result> { + ) -> ProviderResult> { // check that the requested class hash was declared before the pinned block number if self.tx.get::(hash)?.is_some_and(|num| num <= self.block_number) { Ok(self.tx.get::(hash)?) @@ -219,7 +222,7 @@ impl ContractClassProvider for HistoricalStateProvider { } } - fn class(&self, hash: ClassHash) -> Result> { + fn class(&self, hash: ClassHash) -> ProviderResult> { if self.compiled_class_hash_of_class_hash(hash)?.is_some() { let contract = self.tx.get::(hash)?; Ok(contract.map(CompiledContractClass::from)) @@ -228,7 +231,7 @@ impl ContractClassProvider for HistoricalStateProvider { } } - fn sierra_class(&self, hash: ClassHash) -> Result> { + fn sierra_class(&self, hash: ClassHash) -> ProviderResult> { if self.compiled_class_hash_of_class_hash(hash)?.is_some() { self.tx.get::(hash).map_err(|e| e.into()) } else { @@ -238,7 +241,7 @@ impl ContractClassProvider for HistoricalStateProvider { } impl StateProvider for HistoricalStateProvider { - fn nonce(&self, address: ContractAddress) -> Result> { + fn nonce(&self, address: ContractAddress) -> ProviderResult> { let change_list = self.tx.get::(address)?; if let Some(num) = change_list.and_then(|entry| { @@ -248,19 +251,25 @@ impl StateProvider for HistoricalStateProvider { ) }) { let mut cursor = self.tx.cursor::()?; - let ContractNonceChange { contract_address, nonce } = cursor - .seek_by_key_subkey(num, address)? - .expect("if block number is in the block set, change entry must exist"); - - if contract_address == address { - return Ok(Some(nonce)); + let entry = cursor.seek_by_key_subkey(num, address)?.ok_or( + ProviderError::MissingContractNonceChangeEntry { + block: num, + contract_address: address, + }, + )?; + + if entry.contract_address == address { + return Ok(Some(entry.nonce)); } } Ok(None) } - fn class_hash_of_contract(&self, address: ContractAddress) -> Result> { + fn class_hash_of_contract( + &self, + address: ContractAddress, + ) -> ProviderResult> { let change_list: Option = self.tx.get::(address)?; @@ -271,12 +280,15 @@ impl StateProvider for HistoricalStateProvider { ) }) { let mut cursor = self.tx.cursor::()?; - let ContractClassChange { contract_address, class_hash } = cursor - .seek_by_key_subkey(num, address)? - .expect("if block number is in the block set, change entry must exist"); - - if contract_address == address { - return Ok(Some(class_hash)); + let entry = cursor.seek_by_key_subkey(num, address)?.ok_or( + ProviderError::MissingContractClassChangeEntry { + block: num, + contract_address: address, + }, + )?; + + if entry.contract_address == address { + return Ok(Some(entry.class_hash)); } } @@ -287,7 +299,7 @@ impl StateProvider for HistoricalStateProvider { &self, address: ContractAddress, storage_key: StorageKey, - ) -> Result> { + ) -> ProviderResult> { let mut cursor = self.tx.cursor::()?; if let Some(num) = cursor.seek_by_key_subkey(address, storage_key)?.and_then(|entry| { @@ -299,12 +311,16 @@ impl StateProvider for HistoricalStateProvider { let mut cursor = self.tx.cursor::()?; let sharded_key = ContractStorageKey { contract_address: address, key: storage_key }; - let ContractStorageEntry { key, value } = cursor - .seek_by_key_subkey(num, sharded_key)? - .expect("if block number is in the block set, change entry must exist"); + let entry = cursor.seek_by_key_subkey(num, sharded_key)?.ok_or( + ProviderError::MissingStorageChangeEntry { + block: num, + storage_key, + contract_address: address, + }, + )?; - if key.contract_address == address && key.key == storage_key { - return Ok(Some(value)); + if entry.key.contract_address == address && entry.key.key == storage_key { + return Ok(Some(entry.value)); } } diff --git a/crates/katana/storage/provider/src/providers/fork/backend.rs b/crates/katana/storage/provider/src/providers/fork/backend.rs index d8c143ff65..41f92f63f2 100644 --- a/crates/katana/storage/provider/src/providers/fork/backend.rs +++ b/crates/katana/storage/provider/src/providers/fork/backend.rs @@ -1,11 +1,10 @@ use std::collections::VecDeque; use std::pin::Pin; -use std::sync::mpsc::{channel as oneshot, Sender as OneshotSender}; +use std::sync::mpsc::{channel as oneshot, RecvError, Sender as OneshotSender}; use std::sync::Arc; use std::task::{Context, Poll}; use std::thread; -use anyhow::Result; use futures::channel::mpsc::{channel, Receiver, SendError, Sender}; use futures::future::BoxFuture; use futures::stream::Stream; @@ -23,12 +22,14 @@ use katana_primitives::FieldElement; use parking_lot::Mutex; use starknet::core::types::{BlockId, ContractClass, StarknetError}; use starknet::providers::jsonrpc::HttpTransport; -use starknet::providers::{JsonRpcClient, Provider, ProviderError}; +use starknet::providers::{JsonRpcClient, Provider, ProviderError as StarknetProviderError}; use tracing::{error, trace}; +use crate::error::ProviderError; use crate::providers::in_memory::cache::CacheStateDb; use crate::traits::contract::{ContractClassProvider, ContractInfoProvider}; use crate::traits::state::StateProvider; +use crate::ProviderResult; type GetNonceResult = Result; type GetStorageResult = Result; @@ -37,14 +38,23 @@ type GetClassAtResult = Result), GetNonce(ContractAddress, OneshotSender), @@ -55,7 +65,7 @@ pub enum BackendRequest { type BackendRequestFuture = BoxFuture<'static, ()>; /// The backend for the forked provider. It processes all requests from the [ForkedBackend]'s -/// and sends the results back to it. +/// and sends the ProviderResults back to it. /// /// It is responsible it fetching the data from the forked provider. pub struct Backend { @@ -75,7 +85,7 @@ impl Backend { /// into a future that will be polled until completion by the `BackendHandler`. /// /// Each request is accompanied by the sender-half of a oneshot channel that will be used - /// to send the result back to the [ForkedBackend] which sent the requests. + /// to send the ProviderResult back to the [ForkedBackend] which sent the requests. fn handle_requests(&mut self, request: BackendRequest) { let block = self.block; let provider = self.provider.clone(); @@ -86,7 +96,7 @@ impl Backend { let res = provider .get_nonce(block, Into::::into(contract_address)) .await - .map_err(ForkedBackendError::Provider); + .map_err(ForkedBackendError::StarknetProvider); sender.send(res).expect("failed to send nonce result") }); @@ -99,7 +109,7 @@ impl Backend { let res = provider .get_storage_at(Into::::into(contract_address), key, block) .await - .map_err(ForkedBackendError::Provider); + .map_err(ForkedBackendError::StarknetProvider); sender.send(res).expect("failed to send storage result") }); @@ -112,7 +122,7 @@ impl Backend { let res = provider .get_class_hash_at(block, Into::::into(contract_address)) .await - .map_err(ForkedBackendError::Provider); + .map_err(ForkedBackendError::StarknetProvider); sender.send(res).expect("failed to send class hash result") }); @@ -125,7 +135,7 @@ impl Backend { let res = provider .get_class(block, class_hash) .await - .map_err(ForkedBackendError::Provider); + .map_err(ForkedBackendError::StarknetProvider); sender.send(res).expect("failed to send class result") }); @@ -197,22 +207,20 @@ impl ForkedBackend { pub fn new_with_backend_thread( provider: Arc>, block_id: BlockHashOrNumber, - ) -> Self { + ) -> Result { let (handler, backend) = Self::new(provider, block_id); - thread::Builder::new() - .spawn(move || { - tokio::runtime::Builder::new_current_thread() - .enable_all() - .build() - .expect("failed to create tokio runtime") - .block_on(backend); - }) - .expect("failed to spawn fork backend thread"); + thread::Builder::new().spawn(move || { + tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .expect("failed to create tokio runtime") + .block_on(backend); + })?; trace!(target: "forked_backend", "fork backend thread spawned"); - handler + Ok(handler) } fn new( @@ -245,8 +253,8 @@ impl ForkedBackend { self.0 .lock() .try_send(BackendRequest::GetNonce(contract_address, sender)) - .map_err(|e| ForkedBackendError::Send(e.into_send_error()))?; - rx.recv().expect("failed to receive nonce result") + .map_err(|e| e.into_send_error())?; + rx.recv()? } pub fn do_get_storage( @@ -259,8 +267,8 @@ impl ForkedBackend { self.0 .lock() .try_send(BackendRequest::GetStorage(contract_address, key, sender)) - .map_err(|e| ForkedBackendError::Send(e.into_send_error()))?; - rx.recv().expect("failed to receive storage result") + .map_err(|e| e.into_send_error())?; + rx.recv()? } pub fn do_get_class_hash_at( @@ -272,8 +280,8 @@ impl ForkedBackend { self.0 .lock() .try_send(BackendRequest::GetClassHashAt(contract_address, sender)) - .map_err(|e| ForkedBackendError::Send(e.into_send_error()))?; - rx.recv().expect("failed to receive class hash result") + .map_err(|e| e.into_send_error())?; + rx.recv()? } pub fn do_get_class_at( @@ -285,8 +293,8 @@ impl ForkedBackend { self.0 .lock() .try_send(BackendRequest::GetClassAt(class_hash, sender)) - .map_err(|e| ForkedBackendError::Send(e.into_send_error()))?; - rx.recv().expect("failed to receive class result") + .map_err(|e| e.into_send_error())?; + rx.recv()? } pub fn do_get_compiled_class_hash( @@ -322,14 +330,14 @@ impl SharedStateProvider { } impl ContractInfoProvider for SharedStateProvider { - fn contract(&self, address: ContractAddress) -> Result> { + fn contract(&self, address: ContractAddress) -> ProviderResult> { let info = self.0.contract_state.read().get(&address).cloned(); Ok(info) } } impl StateProvider for SharedStateProvider { - fn nonce(&self, address: ContractAddress) -> Result> { + fn nonce(&self, address: ContractAddress) -> ProviderResult> { if let nonce @ Some(_) = self.contract(address)?.map(|i| i.nonce) { return Ok(nonce); } @@ -349,7 +357,7 @@ impl StateProvider for SharedStateProvider { &self, address: ContractAddress, storage_key: StorageKey, - ) -> Result> { + ) -> ProviderResult> { if let value @ Some(_) = self.0.storage.read().get(&address).and_then(|s| s.get(&storage_key)) { @@ -371,7 +379,10 @@ impl StateProvider for SharedStateProvider { Ok(value) } - fn class_hash_of_contract(&self, address: ContractAddress) -> Result> { + fn class_hash_of_contract( + &self, + address: ContractAddress, + ) -> ProviderResult> { if let hash @ Some(_) = self.contract(address)?.map(|i| i.class_hash) { return Ok(hash); } @@ -389,7 +400,7 @@ impl StateProvider for SharedStateProvider { } impl ContractClassProvider for SharedStateProvider { - fn sierra_class(&self, hash: ClassHash) -> Result> { + fn sierra_class(&self, hash: ClassHash) -> ProviderResult> { if let class @ Some(_) = self.0.shared_contract_classes.sierra_classes.read().get(&hash) { return Ok(class.cloned()); } @@ -419,7 +430,7 @@ impl ContractClassProvider for SharedStateProvider { fn compiled_class_hash_of_class_hash( &self, hash: ClassHash, - ) -> Result> { + ) -> ProviderResult> { if let hash @ Some(_) = self.0.compiled_class_hashes.read().get(&hash) { return Ok(hash.cloned()); } @@ -438,7 +449,7 @@ impl ContractClassProvider for SharedStateProvider { } } - fn class(&self, hash: ClassHash) -> Result> { + fn class(&self, hash: ClassHash) -> ProviderResult> { if let Some(class) = self.0.shared_contract_classes.compiled_classes.read().get(&hash) { return Ok(Some(class.clone())); } @@ -456,7 +467,7 @@ impl ContractClassProvider for SharedStateProvider { ContractClass::Legacy(class) => { let (_, compiled_class) = legacy_rpc_to_inner_compiled_class(&class).map_err(|e| { error!(target: "forked_backend", "error while parsing legacy class {hash:#x}: {e}"); - e + ProviderError::ParsingError(e.to_string()) })?; (hash, hash, compiled_class, None) @@ -465,7 +476,7 @@ impl ContractClassProvider for SharedStateProvider { ContractClass::Sierra(sierra_class) => { let (_, compiled_class_hash, compiled_class) = flattened_sierra_to_compiled_class(&sierra_class).map_err(|e|{ error!(target: "forked_backend", "error while parsing sierra class {hash:#x}: {e}"); - e + ProviderError::ParsingError(e.to_string()) })?; (hash, compiled_class_hash, compiled_class, Some(sierra_class)) @@ -500,7 +511,7 @@ fn handle_contract_or_class_not_found_err( match result { Ok(value) => Ok(Some(value)), - Err(ForkedBackendError::Provider(ProviderError::StarknetError( + Err(ForkedBackendError::StarknetProvider(StarknetProviderError::StarknetError( StarknetError::ContractNotFound | StarknetError::ClassHashNotFound, ))) => Ok(None), @@ -544,6 +555,7 @@ mod tests { ))), BlockHashOrNumber::Num(block_num), ) + .unwrap() } #[test] diff --git a/crates/katana/storage/provider/src/providers/fork/mod.rs b/crates/katana/storage/provider/src/providers/fork/mod.rs index a995c1513d..54a30b1abd 100644 --- a/crates/katana/storage/provider/src/providers/fork/mod.rs +++ b/crates/katana/storage/provider/src/providers/fork/mod.rs @@ -4,7 +4,6 @@ pub mod state; use std::ops::RangeInclusive; use std::sync::Arc; -use anyhow::Result; use katana_db::models::block::StoredBlockBodyIndices; use katana_primitives::block::{ Block, BlockHash, BlockHashOrNumber, BlockNumber, BlockWithTxHashes, FinalityStatus, Header, @@ -20,7 +19,7 @@ use parking_lot::RwLock; use starknet::providers::jsonrpc::HttpTransport; use starknet::providers::JsonRpcClient; -use self::backend::{ForkedBackend, SharedStateProvider}; +use self::backend::{ForkedBackend, ForkedBackendError, SharedStateProvider}; use self::state::ForkedStateDb; use super::in_memory::cache::{CacheDb, CacheStateDb}; use super::in_memory::state::HistoricalStates; @@ -34,6 +33,7 @@ use crate::traits::state_update::StateUpdateProvider; use crate::traits::transaction::{ ReceiptProvider, TransactionProvider, TransactionStatusProvider, TransactionsProviderExt, }; +use crate::ProviderResult; pub struct ForkedProvider { // TODO: insert `ForkedBackend` into `CacheDb` @@ -43,40 +43,46 @@ pub struct ForkedProvider { } impl ForkedProvider { - pub fn new(provider: Arc>, block_id: BlockHashOrNumber) -> Self { - let backend = ForkedBackend::new_with_backend_thread(provider, block_id); + pub fn new( + provider: Arc>, + block_id: BlockHashOrNumber, + ) -> Result { + let backend = ForkedBackend::new_with_backend_thread(provider, block_id)?; let shared_provider = SharedStateProvider::new_with_backend(backend); let storage = RwLock::new(CacheDb::new(())); let state = Arc::new(CacheStateDb::new(shared_provider)); let historical_states = RwLock::new(HistoricalStates::default()); - Self { storage, state, historical_states } + Ok(Self { storage, state, historical_states }) } } impl BlockHashProvider for ForkedProvider { - fn latest_hash(&self) -> Result { + fn latest_hash(&self) -> ProviderResult { Ok(self.storage.read().latest_block_hash) } - fn block_hash_by_num(&self, num: BlockNumber) -> Result> { + fn block_hash_by_num(&self, num: BlockNumber) -> ProviderResult> { Ok(self.storage.read().block_hashes.get(&num).cloned()) } } impl BlockNumberProvider for ForkedProvider { - fn latest_number(&self) -> Result { + fn latest_number(&self) -> ProviderResult { Ok(self.storage.read().latest_block_number) } - fn block_number_by_hash(&self, hash: BlockHash) -> Result> { + fn block_number_by_hash(&self, hash: BlockHash) -> ProviderResult> { Ok(self.storage.read().block_numbers.get(&hash).cloned()) } } impl HeaderProvider for ForkedProvider { - fn header(&self, id: katana_primitives::block::BlockHashOrNumber) -> Result> { + fn header( + &self, + id: katana_primitives::block::BlockHashOrNumber, + ) -> ProviderResult> { match id { katana_primitives::block::BlockHashOrNumber::Num(num) => { Ok(self.storage.read().block_headers.get(&num).cloned()) @@ -99,7 +105,7 @@ impl HeaderProvider for ForkedProvider { } impl BlockStatusProvider for ForkedProvider { - fn block_status(&self, id: BlockHashOrNumber) -> Result> { + fn block_status(&self, id: BlockHashOrNumber) -> ProviderResult> { let num = match id { BlockHashOrNumber::Num(num) => num, BlockHashOrNumber::Hash(hash) => { @@ -114,7 +120,7 @@ impl BlockStatusProvider for ForkedProvider { } impl BlockProvider for ForkedProvider { - fn block(&self, id: BlockHashOrNumber) -> Result> { + fn block(&self, id: BlockHashOrNumber) -> ProviderResult> { let block_num = match id { BlockHashOrNumber::Num(num) => Some(num), BlockHashOrNumber::Hash(hash) => self.storage.read().block_numbers.get(&hash).cloned(), @@ -131,7 +137,10 @@ impl BlockProvider for ForkedProvider { Ok(Some(Block { header, body })) } - fn block_with_tx_hashes(&self, id: BlockHashOrNumber) -> Result> { + fn block_with_tx_hashes( + &self, + id: BlockHashOrNumber, + ) -> ProviderResult> { let Some(header) = self.header(id)? else { return Ok(None); }; @@ -142,7 +151,7 @@ impl BlockProvider for ForkedProvider { Ok(Some(katana_primitives::block::BlockWithTxHashes { header, body: tx_hashes })) } - fn blocks_in_range(&self, range: RangeInclusive) -> Result> { + fn blocks_in_range(&self, range: RangeInclusive) -> ProviderResult> { let mut blocks = Vec::new(); for num in range { if let Some(block) = self.block(BlockHashOrNumber::Num(num))? { @@ -152,7 +161,10 @@ impl BlockProvider for ForkedProvider { Ok(blocks) } - fn block_body_indices(&self, id: BlockHashOrNumber) -> Result> { + fn block_body_indices( + &self, + id: BlockHashOrNumber, + ) -> ProviderResult> { let block_num = match id { BlockHashOrNumber::Num(num) => Some(num), BlockHashOrNumber::Hash(hash) => self.storage.read().block_numbers.get(&hash).cloned(), @@ -169,7 +181,7 @@ impl BlockProvider for ForkedProvider { } impl TransactionProvider for ForkedProvider { - fn transaction_by_hash(&self, hash: TxHash) -> Result> { + fn transaction_by_hash(&self, hash: TxHash) -> ProviderResult> { let tx = self.storage.read().transaction_numbers.get(&hash).and_then(|num| { let transaction = self.storage.read().transactions.get(*num as usize).cloned()?; let hash = self.storage.read().transaction_hashes.get(num).copied()?; @@ -181,7 +193,7 @@ impl TransactionProvider for ForkedProvider { fn transactions_by_block( &self, block_id: BlockHashOrNumber, - ) -> Result>> { + ) -> ProviderResult>> { let block_num = match block_id { BlockHashOrNumber::Num(num) => Some(num), BlockHashOrNumber::Hash(hash) => self.storage.read().block_numbers.get(&hash).cloned(), @@ -218,7 +230,7 @@ impl TransactionProvider for ForkedProvider { &self, block_id: BlockHashOrNumber, idx: u64, - ) -> Result> { + ) -> ProviderResult> { let block_num = match block_id { BlockHashOrNumber::Num(num) => Some(num), BlockHashOrNumber::Hash(hash) => self.storage.read().block_numbers.get(&hash).cloned(), @@ -246,7 +258,10 @@ impl TransactionProvider for ForkedProvider { Ok(tx) } - fn transaction_count_by_block(&self, block_id: BlockHashOrNumber) -> Result> { + fn transaction_count_by_block( + &self, + block_id: BlockHashOrNumber, + ) -> ProviderResult> { let block_num = match block_id { BlockHashOrNumber::Num(num) => Some(num), BlockHashOrNumber::Hash(hash) => self.storage.read().block_numbers.get(&hash).cloned(), @@ -264,7 +279,7 @@ impl TransactionProvider for ForkedProvider { fn transaction_block_num_and_hash( &self, hash: TxHash, - ) -> Result> { + ) -> ProviderResult> { let storage_read = self.storage.read(); let Some(number) = storage_read.transaction_numbers.get(&hash) else { return Ok(None) }; @@ -276,7 +291,10 @@ impl TransactionProvider for ForkedProvider { } impl TransactionsProviderExt for ForkedProvider { - fn transaction_hashes_in_range(&self, range: std::ops::Range) -> Result> { + fn transaction_hashes_in_range( + &self, + range: std::ops::Range, + ) -> ProviderResult> { let mut hashes = Vec::new(); for num in range { if let Some(hash) = self.storage.read().transaction_hashes.get(&num).cloned() { @@ -288,7 +306,7 @@ impl TransactionsProviderExt for ForkedProvider { } impl TransactionStatusProvider for ForkedProvider { - fn transaction_status(&self, hash: TxHash) -> Result> { + fn transaction_status(&self, hash: TxHash) -> ProviderResult> { let tx_block = self .storage .read() @@ -306,7 +324,7 @@ impl TransactionStatusProvider for ForkedProvider { } impl ReceiptProvider for ForkedProvider { - fn receipt_by_hash(&self, hash: TxHash) -> Result> { + fn receipt_by_hash(&self, hash: TxHash) -> ProviderResult> { let receipt = self .storage .read() @@ -316,7 +334,10 @@ impl ReceiptProvider for ForkedProvider { Ok(receipt) } - fn receipts_by_block(&self, block_id: BlockHashOrNumber) -> Result>> { + fn receipts_by_block( + &self, + block_id: BlockHashOrNumber, + ) -> ProviderResult>> { let block_num = match block_id { BlockHashOrNumber::Num(num) => Some(num), BlockHashOrNumber::Hash(hash) => self.storage.read().block_numbers.get(&hash).cloned(), @@ -339,7 +360,7 @@ impl StateRootProvider for ForkedProvider { fn state_root( &self, block_id: BlockHashOrNumber, - ) -> Result> { + ) -> ProviderResult> { let state_root = self.block_number_by_id(block_id)?.and_then(|num| { self.storage.read().block_headers.get(&num).map(|header| header.state_root) }); @@ -348,7 +369,7 @@ impl StateRootProvider for ForkedProvider { } impl StateUpdateProvider for ForkedProvider { - fn state_update(&self, block_id: BlockHashOrNumber) -> Result> { + fn state_update(&self, block_id: BlockHashOrNumber) -> ProviderResult> { let block_num = match block_id { BlockHashOrNumber::Num(num) => Some(num), BlockHashOrNumber::Hash(hash) => self.storage.read().block_numbers.get(&hash).cloned(), @@ -361,11 +382,14 @@ impl StateUpdateProvider for ForkedProvider { } impl StateFactoryProvider for ForkedProvider { - fn latest(&self) -> Result> { + fn latest(&self) -> ProviderResult> { Ok(Box::new(self::state::LatestStateProvider(Arc::clone(&self.state)))) } - fn historical(&self, block_id: BlockHashOrNumber) -> Result>> { + fn historical( + &self, + block_id: BlockHashOrNumber, + ) -> ProviderResult>> { let block_num = match block_id { BlockHashOrNumber::Num(num) => Some(num), BlockHashOrNumber::Hash(hash) => self.block_number_by_hash(hash)?, @@ -391,7 +415,7 @@ impl BlockWriter for ForkedProvider { block: SealedBlockWithStatus, states: StateUpdatesWithDeclaredClasses, receipts: Vec, - ) -> Result<()> { + ) -> ProviderResult<()> { let mut storage = self.storage.write(); let block_hash = block.block.header.hash; @@ -441,12 +465,16 @@ impl BlockWriter for ForkedProvider { } impl ContractClassWriter for ForkedProvider { - fn set_class(&self, hash: ClassHash, class: CompiledContractClass) -> Result<()> { + fn set_class(&self, hash: ClassHash, class: CompiledContractClass) -> ProviderResult<()> { self.state.shared_contract_classes.compiled_classes.write().insert(hash, class); Ok(()) } - fn set_sierra_class(&self, hash: ClassHash, sierra: FlattenedSierraClass) -> Result<()> { + fn set_sierra_class( + &self, + hash: ClassHash, + sierra: FlattenedSierraClass, + ) -> ProviderResult<()> { self.state.shared_contract_classes.sierra_classes.write().insert(hash, sierra); Ok(()) } @@ -455,7 +483,7 @@ impl ContractClassWriter for ForkedProvider { &self, hash: ClassHash, compiled_hash: CompiledClassHash, - ) -> Result<()> { + ) -> ProviderResult<()> { self.state.compiled_class_hashes.write().insert(hash, compiled_hash); Ok(()) } @@ -467,7 +495,7 @@ impl StateWriter for ForkedProvider { address: ContractAddress, storage_key: katana_primitives::contract::StorageKey, storage_value: katana_primitives::contract::StorageValue, - ) -> Result<()> { + ) -> ProviderResult<()> { self.state.storage.write().entry(address).or_default().insert(storage_key, storage_value); Ok(()) } @@ -476,7 +504,7 @@ impl StateWriter for ForkedProvider { &self, address: ContractAddress, class_hash: ClassHash, - ) -> Result<()> { + ) -> ProviderResult<()> { self.state.contract_state.write().entry(address).or_default().class_hash = class_hash; Ok(()) } @@ -485,7 +513,7 @@ impl StateWriter for ForkedProvider { &self, address: ContractAddress, nonce: katana_primitives::contract::Nonce, - ) -> Result<()> { + ) -> ProviderResult<()> { self.state.contract_state.write().entry(address).or_default().nonce = nonce; Ok(()) } diff --git a/crates/katana/storage/provider/src/providers/fork/state.rs b/crates/katana/storage/provider/src/providers/fork/state.rs index 5f9b5482f6..a607f27f6d 100644 --- a/crates/katana/storage/provider/src/providers/fork/state.rs +++ b/crates/katana/storage/provider/src/providers/fork/state.rs @@ -1,6 +1,5 @@ use std::sync::Arc; -use anyhow::Result; use katana_primitives::contract::{ ClassHash, CompiledClassHash, CompiledContractClass, ContractAddress, FlattenedSierraClass, GenericContractInfo, Nonce, StorageKey, StorageValue, @@ -11,6 +10,7 @@ use crate::providers::in_memory::cache::CacheStateDb; use crate::providers::in_memory::state::StateSnapshot; use crate::traits::contract::{ContractClassProvider, ContractInfoProvider}; use crate::traits::state::StateProvider; +use crate::ProviderResult; pub type ForkedStateDb = CacheStateDb; pub type ForkedSnapshot = StateSnapshot; @@ -25,7 +25,7 @@ impl ForkedStateDb { } impl ContractInfoProvider for ForkedStateDb { - fn contract(&self, address: ContractAddress) -> Result> { + fn contract(&self, address: ContractAddress) -> ProviderResult> { if let info @ Some(_) = self.contract_state.read().get(&address).cloned() { return Ok(info); } @@ -34,14 +34,17 @@ impl ContractInfoProvider for ForkedStateDb { } impl StateProvider for ForkedStateDb { - fn class_hash_of_contract(&self, address: ContractAddress) -> Result> { + fn class_hash_of_contract( + &self, + address: ContractAddress, + ) -> ProviderResult> { if let hash @ Some(_) = self.contract_state.read().get(&address).map(|i| i.class_hash) { return Ok(hash); } StateProvider::class_hash_of_contract(&self.db, address) } - fn nonce(&self, address: ContractAddress) -> Result> { + fn nonce(&self, address: ContractAddress) -> ProviderResult> { if let nonce @ Some(_) = self.contract_state.read().get(&address).map(|i| i.nonce) { return Ok(nonce); } @@ -52,7 +55,7 @@ impl StateProvider for ForkedStateDb { &self, address: ContractAddress, storage_key: StorageKey, - ) -> Result> { + ) -> ProviderResult> { if let value @ Some(_) = self.storage.read().get(&address).and_then(|s| s.get(&storage_key)).copied() { @@ -63,7 +66,7 @@ impl StateProvider for ForkedStateDb { } impl ContractClassProvider for CacheStateDb { - fn sierra_class(&self, hash: ClassHash) -> Result> { + fn sierra_class(&self, hash: ClassHash) -> ProviderResult> { if let class @ Some(_) = self.shared_contract_classes.sierra_classes.read().get(&hash) { return Ok(class.cloned()); } @@ -73,14 +76,14 @@ impl ContractClassProvider for CacheStateDb { fn compiled_class_hash_of_class_hash( &self, hash: ClassHash, - ) -> Result> { + ) -> ProviderResult> { if let hash @ Some(_) = self.compiled_class_hashes.read().get(&hash) { return Ok(hash.cloned()); } ContractClassProvider::compiled_class_hash_of_class_hash(&self.db, hash) } - fn class(&self, hash: ClassHash) -> Result> { + fn class(&self, hash: ClassHash) -> ProviderResult> { if let class @ Some(_) = self.shared_contract_classes.compiled_classes.read().get(&hash) { return Ok(class.cloned()); } @@ -91,13 +94,13 @@ impl ContractClassProvider for CacheStateDb { pub(super) struct LatestStateProvider(pub(super) Arc); impl ContractInfoProvider for LatestStateProvider { - fn contract(&self, address: ContractAddress) -> Result> { + fn contract(&self, address: ContractAddress) -> ProviderResult> { ContractInfoProvider::contract(&self.0, address) } } impl StateProvider for LatestStateProvider { - fn nonce(&self, address: ContractAddress) -> Result> { + fn nonce(&self, address: ContractAddress) -> ProviderResult> { StateProvider::nonce(&self.0, address) } @@ -105,34 +108,37 @@ impl StateProvider for LatestStateProvider { &self, address: ContractAddress, storage_key: StorageKey, - ) -> Result> { + ) -> ProviderResult> { StateProvider::storage(&self.0, address, storage_key) } - fn class_hash_of_contract(&self, address: ContractAddress) -> Result> { + fn class_hash_of_contract( + &self, + address: ContractAddress, + ) -> ProviderResult> { StateProvider::class_hash_of_contract(&self.0, address) } } impl ContractClassProvider for LatestStateProvider { - fn sierra_class(&self, hash: ClassHash) -> Result> { + fn sierra_class(&self, hash: ClassHash) -> ProviderResult> { ContractClassProvider::sierra_class(&self.0, hash) } - fn class(&self, hash: ClassHash) -> Result> { + fn class(&self, hash: ClassHash) -> ProviderResult> { ContractClassProvider::class(&self.0, hash) } fn compiled_class_hash_of_class_hash( &self, hash: ClassHash, - ) -> Result> { + ) -> ProviderResult> { ContractClassProvider::compiled_class_hash_of_class_hash(&self.0, hash) } } impl ContractInfoProvider for ForkedSnapshot { - fn contract(&self, address: ContractAddress) -> Result> { + fn contract(&self, address: ContractAddress) -> ProviderResult> { if let info @ Some(_) = self.inner.contract_state.get(&address).cloned() { return Ok(info); } @@ -141,7 +147,7 @@ impl ContractInfoProvider for ForkedSnapshot { } impl StateProvider for ForkedSnapshot { - fn nonce(&self, address: ContractAddress) -> Result> { + fn nonce(&self, address: ContractAddress) -> ProviderResult> { if let nonce @ Some(_) = self.inner.contract_state.get(&address).map(|info| info.nonce) { return Ok(nonce); } @@ -152,7 +158,7 @@ impl StateProvider for ForkedSnapshot { &self, address: ContractAddress, storage_key: StorageKey, - ) -> Result> { + ) -> ProviderResult> { if let value @ Some(_) = self.inner.storage.get(&address).and_then(|s| s.get(&storage_key)).copied() { @@ -161,7 +167,10 @@ impl StateProvider for ForkedSnapshot { StateProvider::storage(&self.inner.db, address, storage_key) } - fn class_hash_of_contract(&self, address: ContractAddress) -> Result> { + fn class_hash_of_contract( + &self, + address: ContractAddress, + ) -> ProviderResult> { if let class_hash @ Some(_) = self.inner.contract_state.get(&address).map(|info| info.class_hash) { @@ -172,7 +181,7 @@ impl StateProvider for ForkedSnapshot { } impl ContractClassProvider for ForkedSnapshot { - fn sierra_class(&self, hash: ClassHash) -> Result> { + fn sierra_class(&self, hash: ClassHash) -> ProviderResult> { if self.inner.compiled_class_hashes.get(&hash).is_some() { Ok(self.classes.sierra_classes.read().get(&hash).cloned()) } else { @@ -183,14 +192,14 @@ impl ContractClassProvider for ForkedSnapshot { fn compiled_class_hash_of_class_hash( &self, hash: ClassHash, - ) -> Result> { + ) -> ProviderResult> { if let hash @ Some(_) = self.inner.compiled_class_hashes.get(&hash).cloned() { return Ok(hash); } ContractClassProvider::compiled_class_hash_of_class_hash(&self.inner.db, hash) } - fn class(&self, hash: ClassHash) -> Result> { + fn class(&self, hash: ClassHash) -> ProviderResult> { if self.inner.compiled_class_hashes.get(&hash).is_some() { Ok(self.classes.compiled_classes.read().get(&hash).cloned()) } else { diff --git a/crates/katana/storage/provider/src/providers/in_memory/mod.rs b/crates/katana/storage/provider/src/providers/in_memory/mod.rs index a0f0026086..1d8ddfa018 100644 --- a/crates/katana/storage/provider/src/providers/in_memory/mod.rs +++ b/crates/katana/storage/provider/src/providers/in_memory/mod.rs @@ -4,7 +4,6 @@ pub mod state; use std::ops::RangeInclusive; use std::sync::Arc; -use anyhow::Result; use katana_db::models::block::StoredBlockBodyIndices; use katana_primitives::block::{ Block, BlockHash, BlockHashOrNumber, BlockNumber, BlockWithTxHashes, FinalityStatus, Header, @@ -30,6 +29,7 @@ use crate::traits::state_update::StateUpdateProvider; use crate::traits::transaction::{ ReceiptProvider, TransactionProvider, TransactionStatusProvider, TransactionsProviderExt, }; +use crate::ProviderResult; pub struct InMemoryProvider { storage: RwLock>, @@ -53,27 +53,30 @@ impl Default for InMemoryProvider { } impl BlockHashProvider for InMemoryProvider { - fn latest_hash(&self) -> Result { + fn latest_hash(&self) -> ProviderResult { Ok(self.storage.read().latest_block_hash) } - fn block_hash_by_num(&self, num: BlockNumber) -> Result> { + fn block_hash_by_num(&self, num: BlockNumber) -> ProviderResult> { Ok(self.storage.read().block_hashes.get(&num).cloned()) } } impl BlockNumberProvider for InMemoryProvider { - fn latest_number(&self) -> Result { + fn latest_number(&self) -> ProviderResult { Ok(self.storage.read().latest_block_number) } - fn block_number_by_hash(&self, hash: BlockHash) -> Result> { + fn block_number_by_hash(&self, hash: BlockHash) -> ProviderResult> { Ok(self.storage.read().block_numbers.get(&hash).cloned()) } } impl HeaderProvider for InMemoryProvider { - fn header(&self, id: katana_primitives::block::BlockHashOrNumber) -> Result> { + fn header( + &self, + id: katana_primitives::block::BlockHashOrNumber, + ) -> ProviderResult> { match id { katana_primitives::block::BlockHashOrNumber::Num(num) => { Ok(self.storage.read().block_headers.get(&num).cloned()) @@ -96,7 +99,7 @@ impl HeaderProvider for InMemoryProvider { } impl BlockStatusProvider for InMemoryProvider { - fn block_status(&self, id: BlockHashOrNumber) -> Result> { + fn block_status(&self, id: BlockHashOrNumber) -> ProviderResult> { let num = match id { BlockHashOrNumber::Num(num) => num, BlockHashOrNumber::Hash(hash) => { @@ -111,7 +114,7 @@ impl BlockStatusProvider for InMemoryProvider { } impl BlockProvider for InMemoryProvider { - fn block(&self, id: BlockHashOrNumber) -> Result> { + fn block(&self, id: BlockHashOrNumber) -> ProviderResult> { let block_num = match id { BlockHashOrNumber::Num(num) => Some(num), BlockHashOrNumber::Hash(hash) => self.storage.read().block_numbers.get(&hash).cloned(), @@ -128,7 +131,10 @@ impl BlockProvider for InMemoryProvider { Ok(Some(Block { header, body })) } - fn block_with_tx_hashes(&self, id: BlockHashOrNumber) -> Result> { + fn block_with_tx_hashes( + &self, + id: BlockHashOrNumber, + ) -> ProviderResult> { let Some(header) = self.header(id)? else { return Ok(None); }; @@ -139,7 +145,7 @@ impl BlockProvider for InMemoryProvider { Ok(Some(katana_primitives::block::BlockWithTxHashes { header, body: tx_hashes })) } - fn blocks_in_range(&self, range: RangeInclusive) -> Result> { + fn blocks_in_range(&self, range: RangeInclusive) -> ProviderResult> { let mut blocks = Vec::new(); for num in range { if let Some(block) = self.block(BlockHashOrNumber::Num(num))? { @@ -149,7 +155,10 @@ impl BlockProvider for InMemoryProvider { Ok(blocks) } - fn block_body_indices(&self, id: BlockHashOrNumber) -> Result> { + fn block_body_indices( + &self, + id: BlockHashOrNumber, + ) -> ProviderResult> { let block_num = match id { BlockHashOrNumber::Num(num) => Some(num), BlockHashOrNumber::Hash(hash) => self.storage.read().block_numbers.get(&hash).cloned(), @@ -166,7 +175,7 @@ impl BlockProvider for InMemoryProvider { } impl TransactionProvider for InMemoryProvider { - fn transaction_by_hash(&self, hash: TxHash) -> Result> { + fn transaction_by_hash(&self, hash: TxHash) -> ProviderResult> { let tx = self.storage.read().transaction_numbers.get(&hash).and_then(|num| { let transaction = self.storage.read().transactions.get(*num as usize)?.clone(); let hash = *self.storage.read().transaction_hashes.get(num)?; @@ -178,7 +187,7 @@ impl TransactionProvider for InMemoryProvider { fn transactions_by_block( &self, block_id: BlockHashOrNumber, - ) -> Result>> { + ) -> ProviderResult>> { let block_num = match block_id { BlockHashOrNumber::Num(num) => Some(num), BlockHashOrNumber::Hash(hash) => self.storage.read().block_numbers.get(&hash).cloned(), @@ -215,7 +224,7 @@ impl TransactionProvider for InMemoryProvider { &self, block_id: BlockHashOrNumber, idx: u64, - ) -> Result> { + ) -> ProviderResult> { let block_num = match block_id { BlockHashOrNumber::Num(num) => Some(num), BlockHashOrNumber::Hash(hash) => self.storage.read().block_numbers.get(&hash).cloned(), @@ -243,7 +252,10 @@ impl TransactionProvider for InMemoryProvider { Ok(tx) } - fn transaction_count_by_block(&self, block_id: BlockHashOrNumber) -> Result> { + fn transaction_count_by_block( + &self, + block_id: BlockHashOrNumber, + ) -> ProviderResult> { let block_num = match block_id { BlockHashOrNumber::Num(num) => Some(num), BlockHashOrNumber::Hash(hash) => self.storage.read().block_numbers.get(&hash).cloned(), @@ -261,7 +273,7 @@ impl TransactionProvider for InMemoryProvider { fn transaction_block_num_and_hash( &self, hash: TxHash, - ) -> Result> { + ) -> ProviderResult> { let storage_read = self.storage.read(); let Some(number) = storage_read.transaction_numbers.get(&hash) else { return Ok(None) }; @@ -273,7 +285,10 @@ impl TransactionProvider for InMemoryProvider { } impl TransactionsProviderExt for InMemoryProvider { - fn transaction_hashes_in_range(&self, range: std::ops::Range) -> Result> { + fn transaction_hashes_in_range( + &self, + range: std::ops::Range, + ) -> ProviderResult> { let mut hashes = Vec::new(); for num in range { if let Some(hash) = self.storage.read().transaction_hashes.get(&num).cloned() { @@ -285,7 +300,7 @@ impl TransactionsProviderExt for InMemoryProvider { } impl TransactionStatusProvider for InMemoryProvider { - fn transaction_status(&self, hash: TxHash) -> Result> { + fn transaction_status(&self, hash: TxHash) -> ProviderResult> { let tx_block = self .storage .read() @@ -303,7 +318,7 @@ impl TransactionStatusProvider for InMemoryProvider { } impl ReceiptProvider for InMemoryProvider { - fn receipt_by_hash(&self, hash: TxHash) -> Result> { + fn receipt_by_hash(&self, hash: TxHash) -> ProviderResult> { let receipt = self .storage .read() @@ -313,7 +328,10 @@ impl ReceiptProvider for InMemoryProvider { Ok(receipt) } - fn receipts_by_block(&self, block_id: BlockHashOrNumber) -> Result>> { + fn receipts_by_block( + &self, + block_id: BlockHashOrNumber, + ) -> ProviderResult>> { let block_num = match block_id { BlockHashOrNumber::Num(num) => Some(num), BlockHashOrNumber::Hash(hash) => self.storage.read().block_numbers.get(&hash).cloned(), @@ -333,7 +351,7 @@ impl ReceiptProvider for InMemoryProvider { } impl StateUpdateProvider for InMemoryProvider { - fn state_update(&self, block_id: BlockHashOrNumber) -> Result> { + fn state_update(&self, block_id: BlockHashOrNumber) -> ProviderResult> { let block_num = match block_id { BlockHashOrNumber::Num(num) => Some(num), BlockHashOrNumber::Hash(hash) => self.storage.read().block_numbers.get(&hash).cloned(), @@ -346,11 +364,14 @@ impl StateUpdateProvider for InMemoryProvider { } impl StateFactoryProvider for InMemoryProvider { - fn latest(&self) -> Result> { + fn latest(&self) -> ProviderResult> { Ok(Box::new(LatestStateProvider(Arc::clone(&self.state)))) } - fn historical(&self, block_id: BlockHashOrNumber) -> Result>> { + fn historical( + &self, + block_id: BlockHashOrNumber, + ) -> ProviderResult>> { let block_num = match block_id { BlockHashOrNumber::Num(num) => Some(num), BlockHashOrNumber::Hash(hash) => self.block_number_by_hash(hash)?, @@ -374,7 +395,7 @@ impl StateRootProvider for InMemoryProvider { fn state_root( &self, block_id: BlockHashOrNumber, - ) -> Result> { + ) -> ProviderResult> { let state_root = self.block_number_by_id(block_id)?.and_then(|num| { self.storage.read().block_headers.get(&num).map(|header| header.state_root) }); @@ -388,7 +409,7 @@ impl BlockWriter for InMemoryProvider { block: SealedBlockWithStatus, states: StateUpdatesWithDeclaredClasses, receipts: Vec, - ) -> Result<()> { + ) -> ProviderResult<()> { let mut storage = self.storage.write(); let block_hash = block.block.header.hash; @@ -438,12 +459,16 @@ impl BlockWriter for InMemoryProvider { } impl ContractClassWriter for InMemoryProvider { - fn set_class(&self, hash: ClassHash, class: CompiledContractClass) -> Result<()> { + fn set_class(&self, hash: ClassHash, class: CompiledContractClass) -> ProviderResult<()> { self.state.shared_contract_classes.compiled_classes.write().insert(hash, class); Ok(()) } - fn set_sierra_class(&self, hash: ClassHash, sierra: FlattenedSierraClass) -> Result<()> { + fn set_sierra_class( + &self, + hash: ClassHash, + sierra: FlattenedSierraClass, + ) -> ProviderResult<()> { self.state.shared_contract_classes.sierra_classes.write().insert(hash, sierra); Ok(()) } @@ -452,7 +477,7 @@ impl ContractClassWriter for InMemoryProvider { &self, hash: ClassHash, compiled_hash: CompiledClassHash, - ) -> Result<()> { + ) -> ProviderResult<()> { self.state.compiled_class_hashes.write().insert(hash, compiled_hash); Ok(()) } @@ -464,7 +489,7 @@ impl StateWriter for InMemoryProvider { address: ContractAddress, storage_key: katana_primitives::contract::StorageKey, storage_value: katana_primitives::contract::StorageValue, - ) -> Result<()> { + ) -> ProviderResult<()> { self.state.storage.write().entry(address).or_default().insert(storage_key, storage_value); Ok(()) } @@ -473,7 +498,7 @@ impl StateWriter for InMemoryProvider { &self, address: ContractAddress, class_hash: ClassHash, - ) -> Result<()> { + ) -> ProviderResult<()> { self.state.contract_state.write().entry(address).or_default().class_hash = class_hash; Ok(()) } @@ -482,7 +507,7 @@ impl StateWriter for InMemoryProvider { &self, address: ContractAddress, nonce: katana_primitives::contract::Nonce, - ) -> Result<()> { + ) -> ProviderResult<()> { self.state.contract_state.write().entry(address).or_default().nonce = nonce; Ok(()) } diff --git a/crates/katana/storage/provider/src/providers/in_memory/state.rs b/crates/katana/storage/provider/src/providers/in_memory/state.rs index c12c3bdfec..aba2343a74 100644 --- a/crates/katana/storage/provider/src/providers/in_memory/state.rs +++ b/crates/katana/storage/provider/src/providers/in_memory/state.rs @@ -10,7 +10,7 @@ use katana_primitives::contract::{ use super::cache::{CacheSnapshotWithoutClasses, CacheStateDb, SharedContractClasses}; use crate::traits::contract::{ContractClassProvider, ContractInfoProvider}; use crate::traits::state::StateProvider; -use crate::Result; +use crate::ProviderResult; pub struct StateSnapshot { // because the classes are shared between snapshots, when trying to fetch check the compiled @@ -118,14 +118,14 @@ impl InMemoryStateDb { } impl ContractInfoProvider for InMemorySnapshot { - fn contract(&self, address: ContractAddress) -> Result> { + fn contract(&self, address: ContractAddress) -> ProviderResult> { let info = self.inner.contract_state.get(&address).cloned(); Ok(info) } } impl StateProvider for InMemorySnapshot { - fn nonce(&self, address: ContractAddress) -> Result> { + fn nonce(&self, address: ContractAddress) -> ProviderResult> { let nonce = ContractInfoProvider::contract(&self, address)?.map(|i| i.nonce); Ok(nonce) } @@ -134,19 +134,22 @@ impl StateProvider for InMemorySnapshot { &self, address: ContractAddress, storage_key: StorageKey, - ) -> Result> { + ) -> ProviderResult> { let value = self.inner.storage.get(&address).and_then(|s| s.get(&storage_key)).copied(); Ok(value) } - fn class_hash_of_contract(&self, address: ContractAddress) -> Result> { + fn class_hash_of_contract( + &self, + address: ContractAddress, + ) -> ProviderResult> { let class_hash = ContractInfoProvider::contract(&self, address)?.map(|i| i.class_hash); Ok(class_hash) } } impl ContractClassProvider for InMemorySnapshot { - fn sierra_class(&self, hash: ClassHash) -> Result> { + fn sierra_class(&self, hash: ClassHash) -> ProviderResult> { if self.compiled_class_hash_of_class_hash(hash)?.is_some() { Ok(self.classes.sierra_classes.read().get(&hash).cloned()) } else { @@ -154,7 +157,7 @@ impl ContractClassProvider for InMemorySnapshot { } } - fn class(&self, hash: ClassHash) -> Result> { + fn class(&self, hash: ClassHash) -> ProviderResult> { if self.compiled_class_hash_of_class_hash(hash)?.is_some() { Ok(self.classes.compiled_classes.read().get(&hash).cloned()) } else { @@ -165,7 +168,7 @@ impl ContractClassProvider for InMemorySnapshot { fn compiled_class_hash_of_class_hash( &self, hash: ClassHash, - ) -> Result> { + ) -> ProviderResult> { let hash = self.inner.compiled_class_hashes.get(&hash).cloned(); Ok(hash) } @@ -174,14 +177,14 @@ impl ContractClassProvider for InMemorySnapshot { pub(super) struct LatestStateProvider(pub(super) Arc); impl ContractInfoProvider for LatestStateProvider { - fn contract(&self, address: ContractAddress) -> Result> { + fn contract(&self, address: ContractAddress) -> ProviderResult> { let info = self.0.contract_state.read().get(&address).cloned(); Ok(info) } } impl StateProvider for LatestStateProvider { - fn nonce(&self, address: ContractAddress) -> Result> { + fn nonce(&self, address: ContractAddress) -> ProviderResult> { let nonce = ContractInfoProvider::contract(&self, address)?.map(|i| i.nonce); Ok(nonce) } @@ -190,24 +193,27 @@ impl StateProvider for LatestStateProvider { &self, address: ContractAddress, storage_key: StorageKey, - ) -> Result> { + ) -> ProviderResult> { let value = self.0.storage.read().get(&address).and_then(|s| s.get(&storage_key)).copied(); Ok(value) } - fn class_hash_of_contract(&self, address: ContractAddress) -> Result> { + fn class_hash_of_contract( + &self, + address: ContractAddress, + ) -> ProviderResult> { let class_hash = ContractInfoProvider::contract(&self, address)?.map(|i| i.class_hash); Ok(class_hash) } } impl ContractClassProvider for LatestStateProvider { - fn sierra_class(&self, hash: ClassHash) -> Result> { + fn sierra_class(&self, hash: ClassHash) -> ProviderResult> { let class = self.0.shared_contract_classes.sierra_classes.read().get(&hash).cloned(); Ok(class) } - fn class(&self, hash: ClassHash) -> Result> { + fn class(&self, hash: ClassHash) -> ProviderResult> { let class = self.0.shared_contract_classes.compiled_classes.read().get(&hash).cloned(); Ok(class) } @@ -215,7 +221,7 @@ impl ContractClassProvider for LatestStateProvider { fn compiled_class_hash_of_class_hash( &self, hash: ClassHash, - ) -> Result> { + ) -> ProviderResult> { let hash = self.0.compiled_class_hashes.read().get(&hash).cloned(); Ok(hash) } diff --git a/crates/katana/storage/provider/src/traits/block.rs b/crates/katana/storage/provider/src/traits/block.rs index 8b7dfdb992..04a7a47ab5 100644 --- a/crates/katana/storage/provider/src/traits/block.rs +++ b/crates/katana/storage/provider/src/traits/block.rs @@ -1,6 +1,5 @@ use std::ops::RangeInclusive; -use anyhow::Result; use katana_db::models::block::StoredBlockBodyIndices; use katana_primitives::block::{ Block, BlockHash, BlockHashOrNumber, BlockIdOrTag, BlockNumber, BlockTag, BlockWithTxHashes, @@ -10,11 +9,12 @@ use katana_primitives::receipt::Receipt; use katana_primitives::state::StateUpdatesWithDeclaredClasses; use super::transaction::{TransactionProvider, TransactionsProviderExt}; +use crate::ProviderResult; #[auto_impl::auto_impl(&, Box, Arc)] pub trait BlockIdReader: BlockNumberProvider + Send + Sync { /// Converts the block tag into its block number. - fn convert_block_id(&self, id: BlockIdOrTag) -> Result> { + fn convert_block_id(&self, id: BlockIdOrTag) -> ProviderResult> { match id { BlockIdOrTag::Number(number) => Ok(Some(number)), BlockIdOrTag::Hash(hash) => BlockNumberProvider::block_number_by_hash(self, hash), @@ -34,7 +34,7 @@ pub trait BlockIdReader: BlockNumberProvider + Send + Sync { } /// Retrieves the pending block number and hash. - fn pending_block_id(&self) -> Result> { + fn pending_block_id(&self) -> ProviderResult> { Ok(None) // Returns `None` for now } } @@ -44,13 +44,13 @@ pub trait BlockHashProvider: Send + Sync { /// Retrieves the latest block hash. /// /// There should always be at least one block (genesis) in the chain. - fn latest_hash(&self) -> Result; + fn latest_hash(&self) -> ProviderResult; /// Retrieves the block hash given its id. - fn block_hash_by_num(&self, num: BlockNumber) -> Result>; + fn block_hash_by_num(&self, num: BlockNumber) -> ProviderResult>; /// Retrieves the block hash given its id. - fn block_hash_by_id(&self, id: BlockHashOrNumber) -> Result> { + fn block_hash_by_id(&self, id: BlockHashOrNumber) -> ProviderResult> { match id { BlockHashOrNumber::Hash(hash) => Ok(Some(hash)), BlockHashOrNumber::Num(number) => self.block_hash_by_num(number), @@ -63,13 +63,13 @@ pub trait BlockNumberProvider: Send + Sync { /// Retrieves the latest block number. /// /// There should always be at least one block (genesis) in the chain. - fn latest_number(&self) -> Result; + fn latest_number(&self) -> ProviderResult; /// Retrieves the block number given its id. - fn block_number_by_hash(&self, hash: BlockHash) -> Result>; + fn block_number_by_hash(&self, hash: BlockHash) -> ProviderResult>; /// Retrieves the block number given its id. - fn block_number_by_id(&self, id: BlockHashOrNumber) -> Result> { + fn block_number_by_id(&self, id: BlockHashOrNumber) -> ProviderResult> { match id { BlockHashOrNumber::Num(number) => Ok(Some(number)), BlockHashOrNumber::Hash(hash) => self.block_number_by_hash(hash), @@ -80,13 +80,13 @@ pub trait BlockNumberProvider: Send + Sync { #[auto_impl::auto_impl(&, Box, Arc)] pub trait HeaderProvider: Send + Sync { /// Retrieves the latest header by its block id. - fn header(&self, id: BlockHashOrNumber) -> Result>; + fn header(&self, id: BlockHashOrNumber) -> ProviderResult>; - fn header_by_hash(&self, hash: BlockHash) -> Result> { + fn header_by_hash(&self, hash: BlockHash) -> ProviderResult> { self.header(hash.into()) } - fn header_by_number(&self, number: BlockNumber) -> Result> { + fn header_by_number(&self, number: BlockNumber) -> ProviderResult> { self.header(number.into()) } } @@ -94,7 +94,7 @@ pub trait HeaderProvider: Send + Sync { #[auto_impl::auto_impl(&, Box, Arc)] pub trait BlockStatusProvider: Send + Sync { /// Retrieves the finality status of a block. - fn block_status(&self, id: BlockHashOrNumber) -> Result>; + fn block_status(&self, id: BlockHashOrNumber) -> ProviderResult>; } #[auto_impl::auto_impl(&, Box, Arc)] @@ -109,24 +109,30 @@ pub trait BlockProvider: + Sync { /// Returns a block by its id. - fn block(&self, id: BlockHashOrNumber) -> Result>; + fn block(&self, id: BlockHashOrNumber) -> ProviderResult>; /// Returns a block with only the transaction hashes. - fn block_with_tx_hashes(&self, id: BlockHashOrNumber) -> Result>; + fn block_with_tx_hashes( + &self, + id: BlockHashOrNumber, + ) -> ProviderResult>; /// Returns all available blocks in the given range. - fn blocks_in_range(&self, range: RangeInclusive) -> Result>; + fn blocks_in_range(&self, range: RangeInclusive) -> ProviderResult>; /// Returns the block body indices of a block. - fn block_body_indices(&self, id: BlockHashOrNumber) -> Result>; + fn block_body_indices( + &self, + id: BlockHashOrNumber, + ) -> ProviderResult>; /// Returns the block based on its hash. - fn block_by_hash(&self, hash: BlockHash) -> Result> { + fn block_by_hash(&self, hash: BlockHash) -> ProviderResult> { self.block(hash.into()) } /// Returns the block based on its number. - fn block_by_number(&self, number: BlockNumber) -> Result> { + fn block_by_number(&self, number: BlockNumber) -> ProviderResult> { self.block(number.into()) } } @@ -139,5 +145,5 @@ pub trait BlockWriter: Send + Sync { block: SealedBlockWithStatus, states: StateUpdatesWithDeclaredClasses, receipts: Vec, - ) -> Result<()>; + ) -> ProviderResult<()>; } diff --git a/crates/katana/storage/provider/src/traits/contract.rs b/crates/katana/storage/provider/src/traits/contract.rs index deb172b627..d8ea80ccb5 100644 --- a/crates/katana/storage/provider/src/traits/contract.rs +++ b/crates/katana/storage/provider/src/traits/contract.rs @@ -1,13 +1,14 @@ -use anyhow::Result; use katana_primitives::contract::{ ClassHash, CompiledClassHash, CompiledContractClass, ContractAddress, FlattenedSierraClass, GenericContractInfo, }; +use crate::ProviderResult; + #[auto_impl::auto_impl(&, Box, Arc)] pub trait ContractInfoProvider: Send + Sync { /// Returns the contract information given its address. - fn contract(&self, address: ContractAddress) -> Result>; + fn contract(&self, address: ContractAddress) -> ProviderResult>; } /// A provider trait for retrieving contract class related information. @@ -17,13 +18,13 @@ pub trait ContractClassProvider: Send + Sync { fn compiled_class_hash_of_class_hash( &self, hash: ClassHash, - ) -> Result>; + ) -> ProviderResult>; /// Returns the compiled class definition of a contract class given its class hash. - fn class(&self, hash: ClassHash) -> Result>; + fn class(&self, hash: ClassHash) -> ProviderResult>; /// Retrieves the Sierra class definition of a contract class given its class hash. - fn sierra_class(&self, hash: ClassHash) -> Result>; + fn sierra_class(&self, hash: ClassHash) -> ProviderResult>; } // TEMP: added mainly for compatibility reason. might be removed in the future. @@ -34,11 +35,12 @@ pub trait ContractClassWriter: Send + Sync { &self, hash: ClassHash, compiled_hash: CompiledClassHash, - ) -> Result<()>; + ) -> ProviderResult<()>; /// Returns the compiled class definition of a contract class given its class hash. - fn set_class(&self, hash: ClassHash, class: CompiledContractClass) -> Result<()>; + fn set_class(&self, hash: ClassHash, class: CompiledContractClass) -> ProviderResult<()>; /// Retrieves the Sierra class definition of a contract class given its class hash. - fn set_sierra_class(&self, hash: ClassHash, sierra: FlattenedSierraClass) -> Result<()>; + fn set_sierra_class(&self, hash: ClassHash, sierra: FlattenedSierraClass) + -> ProviderResult<()>; } diff --git a/crates/katana/storage/provider/src/traits/env.rs b/crates/katana/storage/provider/src/traits/env.rs index 73cc26cca9..a0565d733c 100644 --- a/crates/katana/storage/provider/src/traits/env.rs +++ b/crates/katana/storage/provider/src/traits/env.rs @@ -1,8 +1,9 @@ -use anyhow::Result; use katana_primitives::block::BlockHashOrNumber; use katana_primitives::env::BlockEnv; +use crate::ProviderResult; + #[auto_impl::auto_impl(&, Box, Arc)] pub trait BlockEnvProvider: Send + Sync { - fn env_at(&self, block_id: BlockHashOrNumber) -> Result; + fn env_at(&self, block_id: BlockHashOrNumber) -> ProviderResult; } diff --git a/crates/katana/storage/provider/src/traits/state.rs b/crates/katana/storage/provider/src/traits/state.rs index d201e33c59..279769d71b 100644 --- a/crates/katana/storage/provider/src/traits/state.rs +++ b/crates/katana/storage/provider/src/traits/state.rs @@ -1,47 +1,51 @@ -use anyhow::Result; use katana_primitives::block::BlockHashOrNumber; use katana_primitives::contract::{ClassHash, ContractAddress, Nonce, StorageKey, StorageValue}; use katana_primitives::FieldElement; use super::contract::ContractClassProvider; +use crate::ProviderResult; #[auto_impl::auto_impl(&, Box, Arc)] pub trait StateRootProvider: Send + Sync { /// Retrieves the state root of a block. - fn state_root(&self, block_id: BlockHashOrNumber) -> Result>; + fn state_root(&self, block_id: BlockHashOrNumber) -> ProviderResult>; } #[auto_impl::auto_impl(&, Box, Arc)] pub trait StateProvider: ContractClassProvider + Send + Sync { /// Returns the nonce of a contract. - fn nonce(&self, address: ContractAddress) -> Result>; + fn nonce(&self, address: ContractAddress) -> ProviderResult>; /// Returns the value of a contract storage. fn storage( &self, address: ContractAddress, storage_key: StorageKey, - ) -> Result>; + ) -> ProviderResult>; /// Returns the class hash of a contract. - fn class_hash_of_contract(&self, address: ContractAddress) -> Result>; + fn class_hash_of_contract(&self, address: ContractAddress) + -> ProviderResult>; } /// A type which can create [`StateProvider`] for states at a particular block. #[auto_impl::auto_impl(&, Box, Arc)] pub trait StateFactoryProvider: Send + Sync { /// Returns a state provider for retrieving the latest state. - fn latest(&self) -> Result>; + fn latest(&self) -> ProviderResult>; /// Returns a state provider for retrieving historical state at the given block. - fn historical(&self, block_id: BlockHashOrNumber) -> Result>>; + fn historical( + &self, + block_id: BlockHashOrNumber, + ) -> ProviderResult>>; } // TEMP: added mainly for compatibility reason. it might be removed in the future. #[auto_impl::auto_impl(&, Box, Arc)] pub trait StateWriter: Send + Sync { /// Sets the nonce of a contract. - fn set_nonce(&self, address: ContractAddress, nonce: Nonce) -> Result<()>; + fn set_nonce(&self, address: ContractAddress, nonce: Nonce) -> ProviderResult<()>; /// Sets the value of a contract storage. fn set_storage( @@ -49,12 +53,12 @@ pub trait StateWriter: Send + Sync { address: ContractAddress, storage_key: StorageKey, storage_value: StorageValue, - ) -> Result<()>; + ) -> ProviderResult<()>; /// Sets the class hash of a contract. fn set_class_hash_of_contract( &self, address: ContractAddress, class_hash: ClassHash, - ) -> Result<()>; + ) -> ProviderResult<()>; } diff --git a/crates/katana/storage/provider/src/traits/state_update.rs b/crates/katana/storage/provider/src/traits/state_update.rs index ddbac3e60f..762f4a4f05 100644 --- a/crates/katana/storage/provider/src/traits/state_update.rs +++ b/crates/katana/storage/provider/src/traits/state_update.rs @@ -1,9 +1,10 @@ -use anyhow::Result; use katana_primitives::block::BlockHashOrNumber; use katana_primitives::state::StateUpdates; +use crate::ProviderResult; + #[auto_impl::auto_impl(&, Box, Arc)] pub trait StateUpdateProvider: Send + Sync { /// Returns the state update at the given block. - fn state_update(&self, block_id: BlockHashOrNumber) -> Result>; + fn state_update(&self, block_id: BlockHashOrNumber) -> ProviderResult>; } diff --git a/crates/katana/storage/provider/src/traits/transaction.rs b/crates/katana/storage/provider/src/traits/transaction.rs index c7dbee6be1..86b62e8065 100644 --- a/crates/katana/storage/provider/src/traits/transaction.rs +++ b/crates/katana/storage/provider/src/traits/transaction.rs @@ -1,37 +1,43 @@ use std::ops::Range; -use anyhow::Result; use katana_primitives::block::{BlockHash, BlockHashOrNumber, BlockNumber, FinalityStatus}; use katana_primitives::receipt::Receipt; use katana_primitives::transaction::{TxHash, TxNumber, TxWithHash}; +use crate::ProviderResult; + #[auto_impl::auto_impl(&, Box, Arc)] pub trait TransactionProvider: Send + Sync { /// Returns a transaction given its hash. - fn transaction_by_hash(&self, hash: TxHash) -> Result>; + fn transaction_by_hash(&self, hash: TxHash) -> ProviderResult>; /// Returns all the transactions for a given block. - fn transactions_by_block(&self, block_id: BlockHashOrNumber) - -> Result>>; + fn transactions_by_block( + &self, + block_id: BlockHashOrNumber, + ) -> ProviderResult>>; /// Returns the transaction at the given block and its exact index in the block. fn transaction_by_block_and_idx( &self, block_id: BlockHashOrNumber, idx: u64, - ) -> Result>; + ) -> ProviderResult>; /// Returns the total number of transactions in a block. - fn transaction_count_by_block(&self, block_id: BlockHashOrNumber) -> Result>; + fn transaction_count_by_block( + &self, + block_id: BlockHashOrNumber, + ) -> ProviderResult>; /// Returns the block number and hash of a transaction. fn transaction_block_num_and_hash( &self, hash: TxHash, - ) -> Result>; + ) -> ProviderResult>; /// Retrieves all the transactions at the given range. - fn transaction_in_range(&self, _range: Range) -> Result> { + fn transaction_in_range(&self, _range: Range) -> ProviderResult> { todo!() } } @@ -39,20 +45,23 @@ pub trait TransactionProvider: Send + Sync { #[auto_impl::auto_impl(&, Box, Arc)] pub trait TransactionsProviderExt: TransactionProvider + Send + Sync { /// Retrieves the tx hashes for the given range of tx numbers. - fn transaction_hashes_in_range(&self, range: Range) -> Result>; + fn transaction_hashes_in_range(&self, range: Range) -> ProviderResult>; } #[auto_impl::auto_impl(&, Box, Arc)] pub trait TransactionStatusProvider: Send + Sync { /// Retrieves the finality status of a transaction. - fn transaction_status(&self, hash: TxHash) -> Result>; + fn transaction_status(&self, hash: TxHash) -> ProviderResult>; } #[auto_impl::auto_impl(&, Box, Arc)] pub trait ReceiptProvider: Send + Sync { /// Returns the transaction receipt given a transaction hash. - fn receipt_by_hash(&self, hash: TxHash) -> Result>; + fn receipt_by_hash(&self, hash: TxHash) -> ProviderResult>; /// Returns all the receipts for a given block. - fn receipts_by_block(&self, block_id: BlockHashOrNumber) -> Result>>; + fn receipts_by_block( + &self, + block_id: BlockHashOrNumber, + ) -> ProviderResult>>; } diff --git a/crates/katana/storage/provider/tests/fixtures.rs b/crates/katana/storage/provider/tests/fixtures.rs index 3c5c6af25a..eefb120a42 100644 --- a/crates/katana/storage/provider/tests/fixtures.rs +++ b/crates/katana/storage/provider/tests/fixtures.rs @@ -51,7 +51,8 @@ pub fn fork_provider( #[default(0)] block_num: u64, ) -> BlockchainProvider { let provider = JsonRpcClient::new(HttpTransport::new(Url::parse(rpc).unwrap())); - let provider = ForkedProvider::new(Arc::new(provider), BlockHashOrNumber::Num(block_num)); + let provider = + ForkedProvider::new(Arc::new(provider), BlockHashOrNumber::Num(block_num)).unwrap(); BlockchainProvider::new(provider) } @@ -60,7 +61,7 @@ pub fn fork_provider_with_spawned_fork_network( #[default(0)] block_num: u64, ) -> BlockchainProvider { let provider = - ForkedProvider::new(FORKED_PROVIDER.1.clone(), BlockHashOrNumber::Num(block_num)); + ForkedProvider::new(FORKED_PROVIDER.1.clone(), BlockHashOrNumber::Num(block_num)).unwrap(); BlockchainProvider::new(provider) } From 3287ffa0090847a328479cfe5922650336389edb Mon Sep 17 00:00:00 2001 From: Ammar Arif Date: Tue, 16 Jan 2024 15:52:49 +0900 Subject: [PATCH 22/33] refactor(katana): historical block execution context (#1436) Resolves #1347 --- crates/dojo-test-utils/src/sequencer.rs | 6 +- crates/katana/core/src/backend/config.rs | 39 ++--- crates/katana/core/src/backend/mod.rs | 153 ++++++++++++------ crates/katana/core/src/backend/storage.rs | 75 +++------ crates/katana/core/src/constants.rs | 4 +- crates/katana/core/src/env.rs | 38 ----- crates/katana/core/src/sequencer.rs | 72 +++++++-- .../katana/core/src/service/block_producer.rs | 134 +++++++++------ crates/katana/core/src/service/mod.rs | 12 +- crates/katana/core/tests/backend.rs | 41 ----- crates/katana/core/tests/sequencer.rs | 41 ++++- crates/katana/executor/src/blockifier/mod.rs | 18 ++- .../katana/executor/src/blockifier/utils.rs | 29 ++++ crates/katana/primitives/src/block.rs | 8 +- crates/katana/primitives/src/env.rs | 26 ++- crates/katana/rpc/rpc-types/src/block.rs | 16 +- crates/katana/rpc/src/katana.rs | 3 +- crates/katana/rpc/src/starknet.rs | 145 +++++++++-------- crates/katana/src/args.rs | 16 +- crates/katana/src/main.rs | 2 +- crates/katana/storage/provider/src/lib.rs | 11 ++ .../storage/provider/src/providers/db/mod.rs | 15 ++ .../provider/src/providers/fork/mod.rs | 13 ++ .../provider/src/providers/in_memory/mod.rs | 13 ++ .../storage/provider/src/traits/block.rs | 4 +- .../katana/storage/provider/src/traits/env.rs | 5 +- crates/katana/storage/provider/tests/block.rs | 15 +- 27 files changed, 568 insertions(+), 386 deletions(-) delete mode 100644 crates/katana/core/tests/backend.rs diff --git a/crates/dojo-test-utils/src/sequencer.rs b/crates/dojo-test-utils/src/sequencer.rs index fcb83c6163..9d44119b75 100644 --- a/crates/dojo-test-utils/src/sequencer.rs +++ b/crates/dojo-test-utils/src/sequencer.rs @@ -31,7 +31,11 @@ pub struct TestSequencer { impl TestSequencer { pub async fn start(config: SequencerConfig, starknet_config: StarknetConfig) -> Self { - let sequencer = Arc::new(KatanaSequencer::new(config, starknet_config).await); + let sequencer = Arc::new( + KatanaSequencer::new(config, starknet_config) + .await + .expect("Failed to create sequencer"), + ); let handle = spawn( Arc::clone(&sequencer), diff --git a/crates/katana/core/src/backend/config.rs b/crates/katana/core/src/backend/config.rs index 3a0bba627c..d96e54066d 100644 --- a/crates/katana/core/src/backend/config.rs +++ b/crates/katana/core/src/backend/config.rs @@ -1,13 +1,10 @@ -use blockifier::block_context::{BlockContext, FeeTokenAddresses, GasPrices}; +use katana_primitives::block::GasPrices; use katana_primitives::chain::ChainId; -use starknet_api::block::{BlockNumber, BlockTimestamp}; +use katana_primitives::env::BlockEnv; use url::Url; -use crate::constants::{ - DEFAULT_GAS_PRICE, DEFAULT_INVOKE_MAX_STEPS, DEFAULT_VALIDATE_MAX_STEPS, FEE_TOKEN_ADDRESS, - SEQUENCER_ADDRESS, -}; -use crate::env::{get_default_vm_resource_fee_cost, BlockContextGenerator}; +use crate::constants::{DEFAULT_GAS_PRICE, DEFAULT_INVOKE_MAX_STEPS, DEFAULT_VALIDATE_MAX_STEPS}; +use crate::env::BlockContextGenerator; #[derive(Debug, Clone)] pub struct StarknetConfig { @@ -21,28 +18,10 @@ pub struct StarknetConfig { } impl StarknetConfig { - pub fn block_context(&self) -> BlockContext { - BlockContext { - block_number: BlockNumber::default(), - chain_id: self.env.chain_id.into(), - block_timestamp: BlockTimestamp::default(), - sequencer_address: (*SEQUENCER_ADDRESS).into(), - // As the fee has two currencies, we also have to adjust their addresses. - // https://github.com/starkware-libs/blockifier/blob/51b343fe38139a309a69b2482f4b484e8caa5edf/crates/blockifier/src/block_context.rs#L34 - fee_token_addresses: FeeTokenAddresses { - eth_fee_token_address: (*FEE_TOKEN_ADDRESS).into(), - strk_fee_token_address: Default::default(), - }, - vm_resource_fee_cost: get_default_vm_resource_fee_cost().into(), - // Gas prices are dual too. - // https://github.com/starkware-libs/blockifier/blob/51b343fe38139a309a69b2482f4b484e8caa5edf/crates/blockifier/src/block_context.rs#L49 - gas_prices: GasPrices { - eth_l1_gas_price: self.env.gas_price, - strk_l1_gas_price: Default::default(), - }, - validate_max_n_steps: self.env.validate_max_steps, - invoke_tx_max_n_steps: self.env.invoke_max_steps, - max_recursion_depth: 1000, + pub fn block_env(&self) -> BlockEnv { + BlockEnv { + l1_gas_prices: GasPrices { eth: self.env.gas_price, ..Default::default() }, + ..Default::default() } } @@ -68,7 +47,7 @@ impl Default for StarknetConfig { #[derive(Debug, Clone)] pub struct Environment { pub chain_id: ChainId, - pub gas_price: u128, + pub gas_price: u64, pub invoke_max_steps: u32, pub validate_max_steps: u32, } diff --git a/crates/katana/core/src/backend/mod.rs b/crates/katana/core/src/backend/mod.rs index 61c25b65e5..f6f303bd16 100644 --- a/crates/katana/core/src/backend/mod.rs +++ b/crates/katana/core/src/backend/mod.rs @@ -1,11 +1,10 @@ use std::sync::Arc; -use blockifier::block_context::BlockContext; use katana_primitives::block::{ Block, FinalityStatus, GasPrices, Header, PartialHeader, SealedBlockWithStatus, }; use katana_primitives::chain::ChainId; -use katana_primitives::contract::ContractAddress; +use katana_primitives::env::{BlockEnv, CfgEnv, FeeTokenAddressses}; use katana_primitives::receipt::Receipt; use katana_primitives::state::StateUpdatesWithDeclaredClasses; use katana_primitives::transaction::TxWithHash; @@ -20,7 +19,6 @@ use starknet::core::types::{BlockId, BlockStatus, MaybePendingBlockWithTxHashes} use starknet::core::utils::parse_cairo_short_string; use starknet::providers::jsonrpc::HttpTransport; use starknet::providers::{JsonRpcClient, Provider}; -use starknet_api::block::{BlockNumber, BlockTimestamp}; use tracing::{info, trace}; pub mod config; @@ -30,20 +28,19 @@ pub mod storage; use self::config::StarknetConfig; use self::storage::Blockchain; use crate::accounts::{Account, DevAccountGenerator}; -use crate::constants::DEFAULT_PREFUNDED_ACCOUNT_BALANCE; -use crate::env::{BlockContextGenerator, Env}; -use crate::service::block_producer::MinedBlockOutcome; +use crate::constants::{DEFAULT_PREFUNDED_ACCOUNT_BALANCE, FEE_TOKEN_ADDRESS, MAX_RECURSION_DEPTH}; +use crate::env::{get_default_vm_resource_fee_cost, BlockContextGenerator}; +use crate::service::block_producer::{BlockProductionError, MinedBlockOutcome}; use crate::utils::get_current_timestamp; pub struct Backend { /// The config used to generate the backend. - pub config: RwLock, + pub config: StarknetConfig, /// stores all block related data in memory pub blockchain: Blockchain, /// The chain id. pub chain_id: ChainId, - /// The chain environment values. - pub env: Arc>, + /// The block context generator. pub block_context_generator: RwLock, /// Prefunded dev accounts pub accounts: Vec, @@ -51,7 +48,7 @@ pub struct Backend { impl Backend { pub async fn new(config: StarknetConfig) -> Self { - let mut block_context = config.block_context(); + let mut block_env = config.block_env(); let block_context_generator = config.block_context_generator(); let accounts = DevAccountGenerator::new(config.total_accounts) @@ -80,11 +77,9 @@ impl Backend { panic!("block to be forked is a pending block") }; - block_context.block_number = BlockNumber(block.block_number); - block_context.block_timestamp = BlockTimestamp(block.timestamp); - block_context.sequencer_address = ContractAddress(block.sequencer_address).into(); - block_context.chain_id = - starknet_api::core::ChainId(parse_cairo_short_string(&forked_chain_id).unwrap()); + block_env.number = block.block_number; + block_env.timestamp = block.timestamp; + block_env.sequencer_address = block.sequencer_address.into(); trace!( target: "backend", @@ -98,7 +93,7 @@ impl Backend { ForkedProvider::new(provider, forked_block_num.into()).unwrap(), block.block_hash, block.parent_hash, - &block_context, + &block_env, block.new_root, match block.status { BlockStatus::AcceptedOnL1 => FinalityStatus::AcceptedOnL1, @@ -110,14 +105,12 @@ impl Backend { (blockchain, forked_chain_id.into()) } else { - let blockchain = Blockchain::new_with_genesis(InMemoryProvider::new(), &block_context) + let blockchain = Blockchain::new_with_genesis(InMemoryProvider::new(), &block_env) .expect("able to create blockchain from genesis block"); (blockchain, config.env.chain_id) }; - let env = Env { block: block_context }; - for acc in &accounts { acc.deploy_and_fund(blockchain.provider()) .expect("should be able to deploy and fund dev account"); @@ -127,8 +120,7 @@ impl Backend { chain_id, accounts, blockchain, - config: RwLock::new(config), - env: Arc::new(RwLock::new(env)), + config, block_context_generator: RwLock::new(block_context_generator), } } @@ -139,38 +131,38 @@ impl Backend { /// is running in `interval` mining mode. pub fn mine_pending_block( &self, + block_env: &BlockEnv, tx_receipt_pairs: Vec<(TxWithHash, Receipt)>, state_updates: StateUpdatesWithDeclaredClasses, - ) -> (MinedBlockOutcome, Box) { - let block_context = self.env.read().block.clone(); - let outcome = self.do_mine_block(block_context, tx_receipt_pairs, state_updates); - let new_state = StateFactoryProvider::latest(&self.blockchain.provider()).unwrap(); - (outcome, new_state) + ) -> Result<(MinedBlockOutcome, Box), BlockProductionError> { + let outcome = self.do_mine_block(block_env, tx_receipt_pairs, state_updates)?; + let new_state = StateFactoryProvider::latest(&self.blockchain.provider())?; + Ok((outcome, new_state)) } pub fn do_mine_block( &self, - block_context: BlockContext, + block_env: &BlockEnv, tx_receipt_pairs: Vec<(TxWithHash, Receipt)>, state_updates: StateUpdatesWithDeclaredClasses, - ) -> MinedBlockOutcome { + ) -> Result { let (txs, receipts): (Vec, Vec) = tx_receipt_pairs.into_iter().unzip(); - let prev_hash = BlockHashProvider::latest_hash(self.blockchain.provider()).unwrap(); + let prev_hash = BlockHashProvider::latest_hash(self.blockchain.provider())?; let partial_header = PartialHeader { parent_hash: prev_hash, version: CURRENT_STARKNET_VERSION, - timestamp: block_context.block_timestamp.0, - sequencer_address: block_context.sequencer_address.into(), + timestamp: block_env.timestamp, + sequencer_address: block_env.sequencer_address, gas_prices: GasPrices { - eth_gas_price: block_context.gas_prices.eth_l1_gas_price.try_into().unwrap(), - strk_gas_price: block_context.gas_prices.strk_l1_gas_price.try_into().unwrap(), + eth: block_env.l1_gas_prices.eth, + strk: block_env.l1_gas_prices.strk, }, }; let tx_count = txs.len(); - let block_number = block_context.block_number.0; + let block_number = block_env.number; let header = Header::new(partial_header, block_number, FieldElement::ZERO); let block = Block { header, body: txs }.seal(); @@ -181,17 +173,15 @@ impl Backend { block, state_updates, receipts, - ) - .unwrap(); + )?; info!(target: "backend", "⛏ī¸ Block {block_number} mined with {tx_count} transactions"); - MinedBlockOutcome { block_number } + Ok(MinedBlockOutcome { block_number }) } - pub fn update_block_context(&self) { + pub fn update_block_env(&self, block_env: &mut BlockEnv) { let mut context_gen = self.block_context_generator.write(); - let block_context = &mut self.env.write().block; let current_timestamp_secs = get_current_timestamp().as_secs() as i64; let timestamp = if context_gen.next_block_start_time == 0 { @@ -203,14 +193,85 @@ impl Backend { timestamp }; - block_context.block_number = block_context.block_number.next(); - block_context.block_timestamp = BlockTimestamp(timestamp); + block_env.number += 1; + block_env.timestamp = timestamp; + } + + /// Retrieves the chain configuration environment values. + pub(crate) fn chain_cfg_env(&self) -> CfgEnv { + CfgEnv { + chain_id: self.chain_id, + vm_resource_fee_cost: get_default_vm_resource_fee_cost(), + invoke_tx_max_n_steps: self.config.env.invoke_max_steps, + validate_max_n_steps: self.config.env.validate_max_steps, + max_recursion_depth: MAX_RECURSION_DEPTH, + fee_token_addresses: FeeTokenAddressses { + eth: (*FEE_TOKEN_ADDRESS), + strk: Default::default(), + }, + } + } + + pub fn mine_empty_block( + &self, + block_env: &BlockEnv, + ) -> Result { + self.do_mine_block(block_env, Default::default(), Default::default()) + } +} + +#[cfg(test)] +mod tests { + use katana_provider::traits::block::{BlockNumberProvider, BlockProvider}; + use katana_provider::traits::env::BlockEnvProvider; + + use super::Backend; + use crate::backend::config::{Environment, StarknetConfig}; + + fn create_test_starknet_config() -> StarknetConfig { + StarknetConfig { + seed: [0u8; 32], + total_accounts: 2, + disable_fee: true, + env: Environment::default(), + ..Default::default() + } + } + + async fn create_test_backend() -> Backend { + Backend::new(create_test_starknet_config()).await } - /// Updates the block context and mines an empty block. - pub fn mine_empty_block(&self) -> MinedBlockOutcome { - self.update_block_context(); - let block_context = self.env.read().block.clone(); - self.do_mine_block(block_context, Default::default(), Default::default()) + #[tokio::test] + async fn test_creating_blocks() { + let backend = create_test_backend().await; + + let provider = backend.blockchain.provider(); + + assert_eq!(BlockNumberProvider::latest_number(provider).unwrap(), 0); + + let block_num = provider.latest_number().unwrap(); + let mut block_env = provider.block_env_at(block_num.into()).unwrap().unwrap(); + backend.update_block_env(&mut block_env); + backend.mine_empty_block(&block_env).unwrap(); + + let block_num = provider.latest_number().unwrap(); + let mut block_env = provider.block_env_at(block_num.into()).unwrap().unwrap(); + backend.update_block_env(&mut block_env); + backend.mine_empty_block(&block_env).unwrap(); + + let block_num = provider.latest_number().unwrap(); + let block_env = provider.block_env_at(block_num.into()).unwrap().unwrap(); + + assert_eq!(BlockNumberProvider::latest_number(provider).unwrap(), 2); + assert_eq!(block_env.number, 2); + + let block0 = BlockProvider::block_by_number(provider, 0).unwrap().unwrap(); + let block1 = BlockProvider::block_by_number(provider, 1).unwrap().unwrap(); + let block2 = BlockProvider::block_by_number(provider, 2).unwrap().unwrap(); + + assert_eq!(block0.header.number, 0); + assert_eq!(block1.header.number, 1); + assert_eq!(block2.header.number, 2); } } diff --git a/crates/katana/core/src/backend/storage.rs b/crates/katana/core/src/backend/storage.rs index a977a53f99..a71259b038 100644 --- a/crates/katana/core/src/backend/storage.rs +++ b/crates/katana/core/src/backend/storage.rs @@ -1,13 +1,14 @@ use anyhow::Result; -use blockifier::block_context::BlockContext; use katana_primitives::block::{ - Block, BlockHash, FinalityStatus, GasPrices, Header, PartialHeader, SealedBlockWithStatus, + Block, BlockHash, FinalityStatus, Header, PartialHeader, SealedBlockWithStatus, }; +use katana_primitives::env::BlockEnv; use katana_primitives::state::StateUpdatesWithDeclaredClasses; use katana_primitives::version::CURRENT_STARKNET_VERSION; use katana_primitives::FieldElement; use katana_provider::traits::block::{BlockProvider, BlockWriter}; use katana_provider::traits::contract::ContractClassWriter; +use katana_provider::traits::env::BlockEnvProvider; use katana_provider::traits::state::{StateFactoryProvider, StateRootProvider, StateWriter}; use katana_provider::traits::state_update::StateUpdateProvider; use katana_provider::traits::transaction::{ @@ -30,6 +31,7 @@ pub trait Database: + StateWriter + ContractClassWriter + StateFactoryProvider + + BlockEnvProvider + 'static + Send + Sync @@ -48,6 +50,7 @@ impl Database for T where + StateWriter + ContractClassWriter + StateFactoryProvider + + BlockEnvProvider + 'static + Send + Sync @@ -63,22 +66,19 @@ impl Blockchain { Self { inner: BlockchainProvider::new(Box::new(provider)) } } - pub fn new_with_genesis(provider: impl Database, block_context: &BlockContext) -> Result { + pub fn new_with_genesis(provider: impl Database, block_env: &BlockEnv) -> Result { let header = PartialHeader { parent_hash: 0u8.into(), version: CURRENT_STARKNET_VERSION, - timestamp: block_context.block_timestamp.0, + timestamp: block_env.timestamp, sequencer_address: *SEQUENCER_ADDRESS, - gas_prices: GasPrices { - eth_gas_price: block_context.gas_prices.eth_l1_gas_price.try_into().unwrap(), - strk_gas_price: block_context.gas_prices.strk_l1_gas_price.try_into().unwrap(), - }, + gas_prices: block_env.l1_gas_prices, }; let block = SealedBlockWithStatus { status: FinalityStatus::AcceptedOnL1, block: Block { - header: Header::new(header, block_context.block_number.0, 0u8.into()), + header: Header::new(header, block_env.number, 0u8.into()), body: vec![], } .seal(), @@ -93,7 +93,7 @@ impl Blockchain { provider: impl Database, block_hash: BlockHash, parent_hash: FieldElement, - block_context: &BlockContext, + block_env: &BlockEnv, state_root: FieldElement, block_status: FinalityStatus, ) -> Result { @@ -101,13 +101,10 @@ impl Blockchain { state_root, parent_hash, version: CURRENT_STARKNET_VERSION, - number: block_context.block_number.0, - timestamp: block_context.block_timestamp.0, + number: block_env.number, + timestamp: block_env.timestamp, sequencer_address: *SEQUENCER_ADDRESS, - gas_prices: GasPrices { - eth_gas_price: block_context.gas_prices.eth_l1_gas_price.try_into().unwrap(), - strk_gas_price: block_context.gas_prices.strk_l1_gas_price.try_into().unwrap(), - }, + gas_prices: block_env.l1_gas_prices, }; let block = SealedBlockWithStatus { @@ -134,8 +131,8 @@ impl Blockchain { #[cfg(test)] mod tests { - use blockifier::block_context::{BlockContext, FeeTokenAddresses, GasPrices}; - use katana_primitives::block::FinalityStatus; + use katana_primitives::block::{FinalityStatus, GasPrices}; + use katana_primitives::env::BlockEnv; use katana_primitives::FieldElement; use katana_provider::providers::in_memory::InMemoryProvider; use katana_provider::traits::block::{ @@ -143,8 +140,6 @@ mod tests { }; use katana_provider::traits::state::StateFactoryProvider; use starknet::macros::felt; - use starknet_api::block::{BlockNumber, BlockTimestamp}; - use starknet_api::core::ChainId; use super::Blockchain; use crate::constants::{ @@ -154,23 +149,14 @@ mod tests { #[test] fn blockchain_from_genesis_states() { let provider = InMemoryProvider::new(); - let block_context = BlockContext { - gas_prices: GasPrices { eth_l1_gas_price: 0, strk_l1_gas_price: 0 }, - max_recursion_depth: 0, - validate_max_n_steps: 0, - invoke_tx_max_n_steps: 0, - block_number: BlockNumber(0), - chain_id: ChainId("test".into()), - block_timestamp: BlockTimestamp(0), + let block_env = BlockEnv { + number: 0, + timestamp: 0, sequencer_address: Default::default(), - fee_token_addresses: FeeTokenAddresses { - eth_fee_token_address: Default::default(), - strk_fee_token_address: Default::default(), - }, - vm_resource_fee_cost: Default::default(), + l1_gas_prices: GasPrices { eth: 0, strk: 0 }, }; - let blockchain = Blockchain::new_with_genesis(provider, &block_context) + let blockchain = Blockchain::new_with_genesis(provider, &block_env) .expect("failed to create blockchain from genesis block"); let state = blockchain.provider().latest().expect("failed to get latest state"); @@ -188,27 +174,18 @@ mod tests { fn blockchain_from_fork() { let provider = InMemoryProvider::new(); - let block_context = BlockContext { - gas_prices: GasPrices { eth_l1_gas_price: 9090, strk_l1_gas_price: 0 }, - max_recursion_depth: 0, - validate_max_n_steps: 0, - invoke_tx_max_n_steps: 0, - chain_id: ChainId("test".into()), - block_number: BlockNumber(23), - block_timestamp: BlockTimestamp(6868), + let block_env = BlockEnv { + number: 23, + timestamp: 6868, sequencer_address: Default::default(), - fee_token_addresses: FeeTokenAddresses { - eth_fee_token_address: Default::default(), - strk_fee_token_address: Default::default(), - }, - vm_resource_fee_cost: Default::default(), + l1_gas_prices: GasPrices { eth: 9090, strk: 0 }, }; let blockchain = Blockchain::new_from_forked( provider, felt!("1111"), FieldElement::ZERO, - &block_context, + &block_env, felt!("1334"), FinalityStatus::AcceptedOnL1, ) @@ -223,7 +200,7 @@ mod tests { assert_eq!(latest_number, 23); assert_eq!(latest_hash, felt!("1111")); - assert_eq!(header.gas_prices.eth_gas_price, 9090); + assert_eq!(header.gas_prices.eth, 9090); assert_eq!(header.timestamp, 6868); assert_eq!(header.number, latest_number); assert_eq!(header.state_root, felt!("1334")); diff --git a/crates/katana/core/src/constants.rs b/crates/katana/core/src/constants.rs index c983920202..a5f7e36cc4 100644 --- a/crates/katana/core/src/constants.rs +++ b/crates/katana/core/src/constants.rs @@ -6,11 +6,13 @@ use katana_primitives::FieldElement; use lazy_static::lazy_static; use starknet::macros::felt; -pub const DEFAULT_GAS_PRICE: u128 = 100 * u128::pow(10, 9); // Given in units of wei. +pub const DEFAULT_GAS_PRICE: u64 = 100 * u64::pow(10, 9); // Given in units of wei. pub const DEFAULT_INVOKE_MAX_STEPS: u32 = 1_000_000; pub const DEFAULT_VALIDATE_MAX_STEPS: u32 = 1_000_000; +pub const MAX_RECURSION_DEPTH: usize = 1000; + lazy_static! { // Predefined contract addresses diff --git a/crates/katana/core/src/env.rs b/crates/katana/core/src/env.rs index 464b317831..c97115bb4f 100644 --- a/crates/katana/core/src/env.rs +++ b/crates/katana/core/src/env.rs @@ -1,23 +1,10 @@ use std::collections::HashMap; -use blockifier::block_context::{BlockContext, FeeTokenAddresses, GasPrices}; use cairo_vm::vm::runners::builtin_runner::{ BITWISE_BUILTIN_NAME, EC_OP_BUILTIN_NAME, HASH_BUILTIN_NAME, KECCAK_BUILTIN_NAME, OUTPUT_BUILTIN_NAME, POSEIDON_BUILTIN_NAME, RANGE_CHECK_BUILTIN_NAME, SEGMENT_ARENA_BUILTIN_NAME, SIGNATURE_BUILTIN_NAME, }; -use starknet_api::block::{BlockNumber, BlockTimestamp}; -use starknet_api::core::ChainId; - -use crate::constants::{DEFAULT_GAS_PRICE, FEE_TOKEN_ADDRESS, SEQUENCER_ADDRESS}; - -/// Represents the chain environment. -#[derive(Debug, Clone)] -pub struct Env { - /// The block environment of the current block. This is the context that - /// the transactions will be executed on. - pub block: BlockContext, -} #[derive(Debug, Default)] pub struct BlockContextGenerator { @@ -25,31 +12,6 @@ pub struct BlockContextGenerator { pub next_block_start_time: u64, } -impl Default for Env { - fn default() -> Self { - Self { - block: BlockContext { - chain_id: ChainId("KATANA".to_string()), - block_number: BlockNumber::default(), - block_timestamp: BlockTimestamp::default(), - sequencer_address: (*SEQUENCER_ADDRESS).into(), - fee_token_addresses: FeeTokenAddresses { - eth_fee_token_address: (*FEE_TOKEN_ADDRESS).into(), - strk_fee_token_address: Default::default(), - }, - vm_resource_fee_cost: get_default_vm_resource_fee_cost().into(), - gas_prices: GasPrices { - eth_l1_gas_price: DEFAULT_GAS_PRICE, - strk_l1_gas_price: Default::default(), - }, - invoke_tx_max_n_steps: 1_000_000, - validate_max_n_steps: 1_000_000, - max_recursion_depth: 100, - }, - } - } -} - pub fn get_default_vm_resource_fee_cost() -> HashMap { HashMap::from([ (String::from("n_steps"), 1_f64), diff --git a/crates/katana/core/src/sequencer.rs b/crates/katana/core/src/sequencer.rs index 1f2ac0d620..1f2b85dbde 100644 --- a/crates/katana/core/src/sequencer.rs +++ b/crates/katana/core/src/sequencer.rs @@ -4,10 +4,11 @@ use std::slice::Iter; use std::sync::Arc; use anyhow::Result; +use blockifier::block_context::BlockContext; use blockifier::execution::errors::{EntryPointExecutionError, PreExecutionError}; use blockifier::transaction::errors::TransactionExecutionError; use katana_executor::blockifier::state::StateRefDb; -use katana_executor::blockifier::utils::EntryPointCall; +use katana_executor::blockifier::utils::{block_context_from_envs, EntryPointCall}; use katana_executor::blockifier::PendingState; use katana_primitives::block::{BlockHash, BlockHashOrNumber, BlockIdOrTag, BlockNumber}; use katana_primitives::chain::ChainId; @@ -22,6 +23,7 @@ use katana_provider::traits::block::{ BlockHashProvider, BlockIdReader, BlockNumberProvider, BlockProvider, }; use katana_provider::traits::contract::ContractClassProvider; +use katana_provider::traits::env::BlockEnvProvider; use katana_provider::traits::state::{StateFactoryProvider, StateProvider}; use katana_provider::traits::transaction::{ ReceiptProvider, TransactionProvider, TransactionsProviderExt, @@ -58,19 +60,30 @@ pub struct KatanaSequencer { } impl KatanaSequencer { - pub async fn new(config: SequencerConfig, starknet_config: StarknetConfig) -> Self { + pub async fn new( + config: SequencerConfig, + starknet_config: StarknetConfig, + ) -> anyhow::Result { let backend = Arc::new(Backend::new(starknet_config).await); let pool = Arc::new(TransactionPool::new()); let miner = TransactionMiner::new(pool.add_listener()); + let state = StateFactoryProvider::latest(backend.blockchain.provider()) .map(StateRefDb::new) .unwrap(); - let block_producer = if let Some(block_time) = config.block_time { - BlockProducer::interval(Arc::clone(&backend), state, block_time) - } else if config.no_mining { - BlockProducer::on_demand(Arc::clone(&backend), state) + let block_producer = if config.block_time.is_some() || config.no_mining { + let block_num = backend.blockchain.provider().latest_number()?; + + let block_env = backend.blockchain.provider().block_env_at(block_num.into())?.unwrap(); + let cfg_env = backend.chain_cfg_env(); + + if let Some(interval) = config.block_time { + BlockProducer::interval(Arc::clone(&backend), state, interval, (block_env, cfg_env)) + } else { + BlockProducer::on_demand(Arc::clone(&backend), state, (block_env, cfg_env)) + } } else { BlockProducer::instant(Arc::clone(&backend)) }; @@ -90,7 +103,7 @@ impl KatanaSequencer { messaging, }); - Self { pool, config, backend, block_producer } + Ok(Self { pool, config, backend, block_producer }) } /// Returns the pending state if the sequencer is running in _interval_ mode. Otherwise `None`. @@ -109,6 +122,38 @@ impl KatanaSequencer { &self.backend } + pub fn block_execution_context_at( + &self, + block_id: BlockIdOrTag, + ) -> SequencerResult> { + let provider = self.backend.blockchain.provider(); + let cfg_env = self.backend().chain_cfg_env(); + + if let BlockIdOrTag::Tag(BlockTag::Pending) = block_id { + if let Some(state) = self.pending_state() { + let (block_env, _) = state.block_execution_envs(); + return Ok(Some(block_context_from_envs(&block_env, &cfg_env))); + } + } + + let block_num = match block_id { + BlockIdOrTag::Tag(BlockTag::Pending) | BlockIdOrTag::Tag(BlockTag::Latest) => { + provider.latest_number()? + } + + BlockIdOrTag::Hash(hash) => provider + .block_number_by_hash(hash)? + .ok_or(SequencerError::BlockNotFound(block_id))?, + + BlockIdOrTag::Number(num) => num, + }; + + provider + .block_env_at(block_num.into())? + .map(|block_env| Some(block_context_from_envs(&block_env, &cfg_env))) + .ok_or(SequencerError::BlockNotFound(block_id)) + } + pub fn state(&self, block_id: &BlockIdOrTag) -> SequencerResult> { let provider = self.backend.blockchain.provider(); @@ -149,12 +194,16 @@ impl KatanaSequencer { block_id: BlockIdOrTag, ) -> SequencerResult> { let state = self.state(&block_id)?; - let block_context = self.backend.env.read().block.clone(); + + let block_context = self + .block_execution_context_at(block_id)? + .ok_or_else(|| SequencerError::BlockNotFound(block_id))?; + katana_executor::blockifier::utils::estimate_fee( transactions.into_iter(), block_context, state, - !self.backend.config.read().disable_validate, + !self.backend.config.disable_validate, ) .map_err(SequencerError::TransactionExecution) } @@ -267,7 +316,10 @@ impl KatanaSequencer { block_id: BlockIdOrTag, ) -> SequencerResult> { let state = self.state(&block_id)?; - let block_context = self.backend.env.read().block.clone(); + + let block_context = self + .block_execution_context_at(block_id)? + .ok_or_else(|| SequencerError::BlockNotFound(block_id))?; let retdata = katana_executor::blockifier::utils::call(request, block_context, state) .map_err(|e| match e { diff --git a/crates/katana/core/src/service/block_producer.rs b/crates/katana/core/src/service/block_producer.rs index 0cef6a3085..53d9714373 100644 --- a/crates/katana/core/src/service/block_producer.rs +++ b/crates/katana/core/src/service/block_producer.rs @@ -9,11 +9,18 @@ use futures::stream::{Stream, StreamExt}; use futures::FutureExt; use katana_executor::blockifier::outcome::TxReceiptWithExecInfo; use katana_executor::blockifier::state::{CachedStateWrapper, StateRefDb}; -use katana_executor::blockifier::utils::get_state_update_from_cached_state; +use katana_executor::blockifier::utils::{ + block_context_from_envs, get_state_update_from_cached_state, +}; use katana_executor::blockifier::{PendingState, TransactionExecutor}; +use katana_primitives::block::BlockHashOrNumber; +use katana_primitives::env::{BlockEnv, CfgEnv}; use katana_primitives::receipt::Receipt; use katana_primitives::state::StateUpdatesWithDeclaredClasses; use katana_primitives::transaction::{ExecutableTxWithHash, TxWithHash}; +use katana_provider::error::ProviderError; +use katana_provider::traits::block::BlockNumberProvider; +use katana_provider::traits::env::BlockEnvProvider; use katana_provider::traits::state::StateFactoryProvider; use parking_lot::RwLock; use tokio::time::{interval_at, Instant, Interval}; @@ -21,13 +28,20 @@ use tracing::trace; use crate::backend::Backend; +#[derive(Debug, thiserror::Error)] +pub enum BlockProductionError { + #[error(transparent)] + Provider(#[from] ProviderError), +} + pub struct MinedBlockOutcome { pub block_number: u64, } type ServiceFuture = Pin + Send + Sync>>; -type InstantBlockMiningFuture = ServiceFuture; -type IntervalBlockMiningFuture = ServiceFuture; + +type BlockProductionResult = Result; +type BlockProductionFuture = ServiceFuture; /// The type which responsible for block production. #[must_use = "BlockProducer does nothing unless polled"] @@ -39,22 +53,32 @@ pub struct BlockProducer { impl BlockProducer { /// Creates a block producer that mines a new block every `interval` milliseconds. - pub fn interval(backend: Arc, initial_state: StateRefDb, interval: u64) -> Self { + pub fn interval( + backend: Arc, + initial_state: StateRefDb, + interval: u64, + block_exec_envs: (BlockEnv, CfgEnv), + ) -> Self { Self { inner: Arc::new(RwLock::new(BlockProducerMode::Interval(IntervalBlockProducer::new( backend, initial_state, interval, + block_exec_envs, )))), } } /// Creates a new block producer that will only be possible to mine by calling the /// `katana_generateBlock` RPC method. - pub fn on_demand(backend: Arc, initial_state: StateRefDb) -> Self { + pub fn on_demand( + backend: Arc, + initial_state: StateRefDb, + block_exec_envs: (BlockEnv, CfgEnv), + ) -> Self { Self { inner: Arc::new(RwLock::new(BlockProducerMode::Interval( - IntervalBlockProducer::new_no_mining(backend, initial_state), + IntervalBlockProducer::new_no_mining(backend, initial_state, block_exec_envs), ))), } } @@ -99,7 +123,7 @@ impl BlockProducer { } impl Stream for BlockProducer { - type Item = MinedBlockOutcome; + type Item = BlockProductionResult; fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { let mut mode = self.inner.write(); match &mut *mode { @@ -132,18 +156,20 @@ pub struct IntervalBlockProducer { interval: Option, backend: Arc, /// Single active future that mines a new block - block_mining: Option, + block_mining: Option, /// Backlog of sets of transactions ready to be mined queued: VecDeque>, /// The state of the pending block after executing all the transactions within the interval. state: Arc, - /// This is to make sure that the block context is updated - /// before the first block is opened. - is_initialized: bool, } impl IntervalBlockProducer { - pub fn new(backend: Arc, db: StateRefDb, interval: u64) -> Self { + pub fn new( + backend: Arc, + db: StateRefDb, + interval: u64, + block_exec_envs: (BlockEnv, CfgEnv), + ) -> Self { let interval = { let duration = Duration::from_millis(interval); let mut interval = interval_at(Instant::now() + duration, duration); @@ -151,13 +177,12 @@ impl IntervalBlockProducer { interval }; - let state = Arc::new(PendingState::new(db)); + let state = Arc::new(PendingState::new(db, block_exec_envs.0, block_exec_envs.1)); Self { - state, backend, + state, block_mining: None, - is_initialized: false, interval: Some(interval), queued: VecDeque::default(), } @@ -166,17 +191,14 @@ impl IntervalBlockProducer { /// Creates a new [IntervalBlockProducer] with no `interval`. This mode will not produce blocks /// for every fixed interval, although it will still execute all queued transactions and /// keep hold of the pending state. - pub fn new_no_mining(backend: Arc, db: StateRefDb) -> Self { - let state = Arc::new(PendingState::new(db)); + pub fn new_no_mining( + backend: Arc, + db: StateRefDb, + block_exec_envs: (BlockEnv, CfgEnv), + ) -> Self { + let state = Arc::new(PendingState::new(db, block_exec_envs.0, block_exec_envs.1)); - Self { - state, - backend, - interval: None, - block_mining: None, - is_initialized: false, - queued: VecDeque::default(), - } + Self { state, backend, interval: None, block_mining: None, queued: VecDeque::default() } } pub fn state(&self) -> Arc { @@ -197,30 +219,40 @@ impl IntervalBlockProducer { state_updates: StateUpdatesWithDeclaredClasses, backend: Arc, pending_state: Arc, - ) -> MinedBlockOutcome { + ) -> BlockProductionResult { trace!(target: "miner", "creating new block"); let (txs, _) = pending_state.take_txs_all(); let tx_receipt_pairs = txs.into_iter().map(|(tx, rct)| (tx, rct.receipt)).collect::>(); - let (outcome, new_state) = backend.mine_pending_block(tx_receipt_pairs, state_updates); + let (mut block_env, cfg_env) = pending_state.block_execution_envs(); + + let (outcome, new_state) = + backend.mine_pending_block(&block_env, tx_receipt_pairs, state_updates)?; + trace!(target: "miner", "created new block: {}", outcome.block_number); - backend.update_block_context(); - pending_state.reset_state_with(new_state.into()); + backend.update_block_env(&mut block_env); + pending_state.reset_state(new_state.into(), block_env, cfg_env); - outcome + Ok(outcome) } fn execute_transactions(&self, transactions: Vec) { let txs = transactions.iter().map(TxWithHash::from); + + let block_context = block_context_from_envs( + &self.state.block_envs.read().0, + &self.state.block_envs.read().1, + ); + let results = { TransactionExecutor::new( &self.state.state, - &self.backend.env.read().block, - !self.backend.config.read().disable_fee, - !self.backend.config.read().disable_validate, + &block_context, + !self.backend.config.disable_fee, + !self.backend.config.disable_validate, transactions.clone().into_iter(), ) .with_error_log() @@ -245,16 +277,11 @@ impl IntervalBlockProducer { impl Stream for IntervalBlockProducer { // mined block outcome and the new state - type Item = MinedBlockOutcome; + type Item = BlockProductionResult; fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { let pin = self.get_mut(); - if !pin.is_initialized { - pin.backend.update_block_context(); - pin.is_initialized = true; - } - if let Some(interval) = &mut pin.interval { if interval.poll_tick(cx).is_ready() && pin.block_mining.is_none() { let backend = pin.backend.clone(); @@ -293,7 +320,7 @@ pub struct InstantBlockProducer { /// Holds the backend if no block is being mined backend: Arc, /// Single active future that mines a new block - block_mining: Option, + block_mining: Option, /// Backlog of sets of transactions ready to be mined queued: VecDeque>, } @@ -315,23 +342,28 @@ impl InstantBlockProducer { fn do_mine( backend: Arc, transactions: Vec, - ) -> MinedBlockOutcome { + ) -> Result { trace!(target: "miner", "creating new block"); - backend.update_block_context(); + let provider = backend.blockchain.provider(); - let latest_state = StateFactoryProvider::latest(backend.blockchain.provider()) - .expect("able to get latest state"); + let cfg_env = backend.chain_cfg_env(); + let latest_num = provider.latest_number()?; + let mut block_env = provider.block_env_at(BlockHashOrNumber::Num(latest_num))?.unwrap(); + backend.update_block_env(&mut block_env); + + let block_context = block_context_from_envs(&block_env, &cfg_env); + + let latest_state = StateFactoryProvider::latest(backend.blockchain.provider())?; let state = CachedStateWrapper::new(latest_state.into()); - let block_context = backend.env.read().block.clone(); let txs = transactions.iter().map(TxWithHash::from); let tx_receipt_pairs: Vec<(TxWithHash, Receipt)> = TransactionExecutor::new( &state, &block_context, - !backend.config.read().disable_fee, - !backend.config.read().disable_validate, + !backend.config.disable_fee, + !backend.config.disable_validate, transactions.clone().into_iter(), ) .with_error_log() @@ -349,20 +381,20 @@ impl InstantBlockProducer { .collect(); let outcome = backend.do_mine_block( - block_context, + &block_env, tx_receipt_pairs, get_state_update_from_cached_state(&state), - ); + )?; trace!(target: "miner", "created new block: {}", outcome.block_number); - outcome + Ok(outcome) } } impl Stream for InstantBlockProducer { // mined block outcome and the new state - type Item = MinedBlockOutcome; + type Item = BlockProductionResult; fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { let pin = self.get_mut(); diff --git a/crates/katana/core/src/service/mod.rs b/crates/katana/core/src/service/mod.rs index 860b632c3e..30635cc89d 100644 --- a/crates/katana/core/src/service/mod.rs +++ b/crates/katana/core/src/service/mod.rs @@ -61,8 +61,16 @@ impl Future for NodeService { // this drives block production and feeds new sets of ready transactions to the block // producer loop { - while let Poll::Ready(Some(outcome)) = pin.block_producer.poll_next_unpin(cx) { - trace!(target: "node", "mined block {}", outcome.block_number); + while let Poll::Ready(Some(res)) = pin.block_producer.poll_next_unpin(cx) { + match res { + Ok(outcome) => { + trace!(target: "node", "mined block {}", outcome.block_number) + } + + Err(err) => { + trace!(target: "node", "failed to mine block: {err}"); + } + } } if let Poll::Ready(transactions) = pin.miner.poll(&pin.pool, cx) { diff --git a/crates/katana/core/tests/backend.rs b/crates/katana/core/tests/backend.rs deleted file mode 100644 index be08975e33..0000000000 --- a/crates/katana/core/tests/backend.rs +++ /dev/null @@ -1,41 +0,0 @@ -use katana_core::backend::config::{Environment, StarknetConfig}; -use katana_core::backend::Backend; -use katana_provider::traits::block::{BlockNumberProvider, BlockProvider}; -use starknet_api::block::BlockNumber; - -fn create_test_starknet_config() -> StarknetConfig { - StarknetConfig { - seed: [0u8; 32], - total_accounts: 2, - disable_fee: true, - env: Environment::default(), - ..Default::default() - } -} - -async fn create_test_backend() -> Backend { - Backend::new(create_test_starknet_config()).await -} - -#[tokio::test] -async fn test_creating_blocks() { - let backend = create_test_backend().await; - - let provider = backend.blockchain.provider(); - - assert_eq!(BlockNumberProvider::latest_number(provider).unwrap(), 0); - - backend.mine_empty_block(); - backend.mine_empty_block(); - - assert_eq!(BlockNumberProvider::latest_number(provider).unwrap(), 2); - assert_eq!(backend.env.read().block.block_number, BlockNumber(2)); - - let block0 = BlockProvider::block_by_number(provider, 0).unwrap().unwrap(); - let block1 = BlockProvider::block_by_number(provider, 1).unwrap().unwrap(); - let block2 = BlockProvider::block_by_number(provider, 2).unwrap().unwrap(); - - assert_eq!(block0.header.number, 0); - assert_eq!(block1.header.number, 1); - assert_eq!(block2.header.number, 2); -} diff --git a/crates/katana/core/tests/sequencer.rs b/crates/katana/core/tests/sequencer.rs index 1782ce8355..af577d2ec3 100644 --- a/crates/katana/core/tests/sequencer.rs +++ b/crates/katana/core/tests/sequencer.rs @@ -1,6 +1,7 @@ use katana_core::backend::config::{Environment, StarknetConfig}; use katana_core::sequencer::{KatanaSequencer, SequencerConfig}; -use katana_provider::traits::block::BlockProvider; +use katana_provider::traits::block::{BlockNumberProvider, BlockProvider}; +use katana_provider::traits::env::BlockEnvProvider; fn create_test_sequencer_config() -> (SequencerConfig, StarknetConfig) { ( @@ -17,21 +18,29 @@ fn create_test_sequencer_config() -> (SequencerConfig, StarknetConfig) { async fn create_test_sequencer() -> KatanaSequencer { let (sequencer_config, starknet_config) = create_test_sequencer_config(); - KatanaSequencer::new(sequencer_config, starknet_config).await + KatanaSequencer::new(sequencer_config, starknet_config).await.unwrap() } #[tokio::test] async fn test_next_block_timestamp_in_past() { let sequencer = create_test_sequencer().await; let provider = sequencer.backend.blockchain.provider(); - let block1 = sequencer.backend.mine_empty_block().block_number; + + let block_num = provider.latest_number().unwrap(); + let mut block_env = provider.block_env_at(block_num.into()).unwrap().unwrap(); + sequencer.backend.update_block_env(&mut block_env); + let block1 = sequencer.backend.mine_empty_block(&block_env).unwrap().block_number; let block1_timestamp = BlockProvider::block(provider, block1.into()).unwrap().unwrap().header.timestamp; sequencer.set_next_block_timestamp(block1_timestamp - 1000).unwrap(); - let block2 = sequencer.backend.mine_empty_block().block_number; + let block_num = provider.latest_number().unwrap(); + let mut block_env = provider.block_env_at(block_num.into()).unwrap().unwrap(); + sequencer.backend.update_block_env(&mut block_env); + let block2 = sequencer.backend.mine_empty_block(&block_env).unwrap().block_number; + let block2_timestamp = BlockProvider::block(provider, block2.into()).unwrap().unwrap().header.timestamp; @@ -42,14 +51,22 @@ async fn test_next_block_timestamp_in_past() { async fn test_set_next_block_timestamp_in_future() { let sequencer = create_test_sequencer().await; let provider = sequencer.backend.blockchain.provider(); - let block1 = sequencer.backend.mine_empty_block().block_number; + + let block_num = provider.latest_number().unwrap(); + let mut block_env = provider.block_env_at(block_num.into()).unwrap().unwrap(); + sequencer.backend.update_block_env(&mut block_env); + let block1 = sequencer.backend.mine_empty_block(&block_env).unwrap().block_number; let block1_timestamp = BlockProvider::block(provider, block1.into()).unwrap().unwrap().header.timestamp; sequencer.set_next_block_timestamp(block1_timestamp + 1000).unwrap(); - let block2 = sequencer.backend.mine_empty_block().block_number; + let block_num = provider.latest_number().unwrap(); + let mut block_env = provider.block_env_at(block_num.into()).unwrap().unwrap(); + sequencer.backend.update_block_env(&mut block_env); + let block2 = sequencer.backend.mine_empty_block(&block_env).unwrap().block_number; + let block2_timestamp = BlockProvider::block(provider, block2.into()).unwrap().unwrap().header.timestamp; @@ -60,14 +77,22 @@ async fn test_set_next_block_timestamp_in_future() { async fn test_increase_next_block_timestamp() { let sequencer = create_test_sequencer().await; let provider = sequencer.backend.blockchain.provider(); - let block1 = sequencer.backend.mine_empty_block().block_number; + + let block_num = provider.latest_number().unwrap(); + let mut block_env = provider.block_env_at(block_num.into()).unwrap().unwrap(); + sequencer.backend.update_block_env(&mut block_env); + let block1 = sequencer.backend.mine_empty_block(&block_env).unwrap().block_number; let block1_timestamp = BlockProvider::block(provider, block1.into()).unwrap().unwrap().header.timestamp; sequencer.increase_next_block_timestamp(1000).unwrap(); - let block2 = sequencer.backend.mine_empty_block().block_number; + let block_num = provider.latest_number().unwrap(); + let mut block_env = provider.block_env_at(block_num.into()).unwrap().unwrap(); + sequencer.backend.update_block_env(&mut block_env); + let block2 = sequencer.backend.mine_empty_block(&block_env).unwrap().block_number; + let block2_timestamp = BlockProvider::block(provider, block2.into()).unwrap().unwrap().header.timestamp; diff --git a/crates/katana/executor/src/blockifier/mod.rs b/crates/katana/executor/src/blockifier/mod.rs index e62e12728f..3d6aca43c5 100644 --- a/crates/katana/executor/src/blockifier/mod.rs +++ b/crates/katana/executor/src/blockifier/mod.rs @@ -11,6 +11,7 @@ use blockifier::transaction::errors::TransactionExecutionError; use blockifier::transaction::objects::TransactionExecutionInfo; use blockifier::transaction::transaction_execution::Transaction; use blockifier::transaction::transactions::ExecutableTransaction; +use katana_primitives::env::{BlockEnv, CfgEnv}; use katana_primitives::transaction::{ DeclareTxWithClass, ExecutableTx, ExecutableTxWithHash, TxWithHash, }; @@ -180,23 +181,28 @@ pub type AcceptedTxPair = (TxWithHash, TxReceiptWithExecInfo); pub type RejectedTxPair = (TxWithHash, TransactionExecutionError); pub struct PendingState { + /// The block context of the pending block. + pub block_envs: RwLock<(BlockEnv, CfgEnv)>, + /// The state of the pending block. pub state: Arc>, /// The transactions that have been executed. - pub executed_txs: RwLock>, + pub executed_txs: RwLock>, /// The transactions that have been rejected. - pub rejected_txs: RwLock>, + pub rejected_txs: RwLock>, } impl PendingState { - pub fn new(state: StateRefDb) -> Self { + pub fn new(state: StateRefDb, block_env: BlockEnv, cfg_env: CfgEnv) -> Self { Self { + block_envs: RwLock::new((block_env, cfg_env)), state: Arc::new(CachedStateWrapper::new(state)), executed_txs: RwLock::new(Vec::new()), rejected_txs: RwLock::new(Vec::new()), } } - pub fn reset_state_with(&self, state: StateRefDb) { + pub fn reset_state(&self, state: StateRefDb, block_env: BlockEnv, cfg_env: CfgEnv) { + *self.block_envs.write() = (block_env, cfg_env); self.state.reset_with_new_state(state); } @@ -211,6 +217,10 @@ impl PendingState { (executed_txs, rejected_txs) } + pub fn block_execution_envs(&self) -> (BlockEnv, CfgEnv) { + self.block_envs.read().clone() + } + fn add_executed_tx(&self, tx: TxWithHash, execution_result: TxExecutionResult) { match execution_result { Ok(execution_info) => { diff --git a/crates/katana/executor/src/blockifier/utils.rs b/crates/katana/executor/src/blockifier/utils.rs index 85e000cec4..2ae08e78fd 100644 --- a/crates/katana/executor/src/blockifier/utils.rs +++ b/crates/katana/executor/src/blockifier/utils.rs @@ -10,6 +10,7 @@ use ::blockifier::execution::entry_point::{ use ::blockifier::execution::errors::EntryPointExecutionError; use ::blockifier::state::cached_state::{CachedState, GlobalContractCache, MutRefState}; use ::blockifier::transaction::objects::AccountTransactionContext; +use blockifier::block_context::{FeeTokenAddresses, GasPrices}; use blockifier::fee::fee_utils::{calculate_l1_gas_by_vm_usage, extract_l1_gas_and_vm_usage}; use blockifier::state::state_api::State; use blockifier::transaction::errors::TransactionExecutionError; @@ -18,6 +19,7 @@ use blockifier::transaction::objects::{ }; use convert_case::{Case, Casing}; use katana_primitives::contract::ContractAddress; +use katana_primitives::env::{BlockEnv, CfgEnv}; use katana_primitives::receipt::{Event, MessageToL1}; use katana_primitives::state::{StateUpdates, StateUpdatesWithDeclaredClasses}; use katana_primitives::transaction::ExecutableTxWithHash; @@ -26,6 +28,7 @@ use katana_provider::traits::contract::ContractClassProvider; use katana_provider::traits::state::StateProvider; use starknet::core::types::FeeEstimate; use starknet::core::utils::parse_cairo_short_string; +use starknet_api::block::{BlockNumber, BlockTimestamp}; use starknet_api::core::EntryPointSelector; use starknet_api::transaction::Calldata; use tracing::trace; @@ -146,6 +149,32 @@ pub fn calculate_execution_fee( Ok(FeeEstimate { gas_price, gas_consumed, overall_fee }) } +/// Create a block context from the chain environment values. +pub fn block_context_from_envs(block_env: &BlockEnv, cfg_env: &CfgEnv) -> BlockContext { + let fee_token_addresses = FeeTokenAddresses { + eth_fee_token_address: cfg_env.fee_token_addresses.eth.into(), + strk_fee_token_address: cfg_env.fee_token_addresses.strk.into(), + }; + + let gas_prices = GasPrices { + eth_l1_gas_price: block_env.l1_gas_prices.eth.try_into().unwrap(), + strk_l1_gas_price: block_env.l1_gas_prices.strk.try_into().unwrap(), + }; + + BlockContext { + gas_prices, + fee_token_addresses, + chain_id: cfg_env.chain_id.into(), + block_number: BlockNumber(block_env.number), + block_timestamp: BlockTimestamp(block_env.timestamp), + sequencer_address: block_env.sequencer_address.into(), + vm_resource_fee_cost: cfg_env.vm_resource_fee_cost.clone().into(), + validate_max_n_steps: cfg_env.validate_max_n_steps, + invoke_tx_max_n_steps: cfg_env.invoke_tx_max_n_steps, + max_recursion_depth: cfg_env.max_recursion_depth, + } +} + pub(crate) fn warn_message_transaction_error_exec_error(err: &TransactionExecutionError) { match err { TransactionExecutionError::ExecutionError(ref eperr) => match eperr { diff --git a/crates/katana/primitives/src/block.rs b/crates/katana/primitives/src/block.rs index f5f762f462..60847496c4 100644 --- a/crates/katana/primitives/src/block.rs +++ b/crates/katana/primitives/src/block.rs @@ -40,18 +40,18 @@ pub struct PartialHeader { } /// The L1 gas prices. -#[derive(Debug, Default, Clone, PartialEq, Eq)] +#[derive(Debug, Default, Clone, Copy, PartialEq, Eq)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct GasPrices { /// The price of one unit of the given resource, denominated in wei - pub eth_gas_price: u64, + pub eth: u64, /// The price of one unit of the given resource, denominated in strk - pub strk_gas_price: u64, + pub strk: u64, } impl GasPrices { pub fn new(eth_gas_price: u64, strk_gas_price: u64) -> Self { - Self { eth_gas_price, strk_gas_price } + Self { eth: eth_gas_price, strk: strk_gas_price } } } diff --git a/crates/katana/primitives/src/env.rs b/crates/katana/primitives/src/env.rs index 64c381ba7c..3c87d7707b 100644 --- a/crates/katana/primitives/src/env.rs +++ b/crates/katana/primitives/src/env.rs @@ -1,28 +1,29 @@ use std::collections::HashMap; +use crate::block::{BlockNumber, GasPrices}; use crate::chain::ChainId; use crate::contract::ContractAddress; /// Block environment values. -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Default, PartialEq, Eq)] pub struct BlockEnv { /// The block height. - pub number: u64, + pub number: BlockNumber, /// The timestamp in seconds since the UNIX epoch. pub timestamp: u64, - /// The block gas price in wei. - pub gas_price: u128, + /// The L1 gas prices at this particular block. + pub l1_gas_prices: GasPrices, /// The contract address of the sequencer. pub sequencer_address: ContractAddress, - /// The contract address of the fee token. - pub fee_token_address: ContractAddress, } -/// Starknet configuration values. -#[derive(Debug, Clone)] +/// The chain block execution configuration values. +#[derive(Debug, Clone, Default)] pub struct CfgEnv { /// The chain id. pub chain_id: ChainId, + /// The contract addresses of the fee tokens. + pub fee_token_addresses: FeeTokenAddressses, /// The fee cost of the VM resources. pub vm_resource_fee_cost: HashMap, /// The maximum number of steps allowed for an invoke transaction. @@ -32,3 +33,12 @@ pub struct CfgEnv { /// The maximum recursion depth allowed. pub max_recursion_depth: usize, } + +/// The contract addresses of the tokens used for the fees. +#[derive(Debug, Clone, Default)] +pub struct FeeTokenAddressses { + /// The contract address of the `STRK` token. + pub strk: ContractAddress, + /// The contract address of the `ETH` token. + pub eth: ContractAddress, +} diff --git a/crates/katana/rpc/rpc-types/src/block.rs b/crates/katana/rpc/rpc-types/src/block.rs index dae9949c24..bbfc2d7f30 100644 --- a/crates/katana/rpc/rpc-types/src/block.rs +++ b/crates/katana/rpc/rpc-types/src/block.rs @@ -12,8 +12,8 @@ pub struct BlockWithTxs(starknet::core::types::BlockWithTxs); impl BlockWithTxs { pub fn new(block_hash: BlockHash, block: Block, finality_status: FinalityStatus) -> Self { let l1_gas_price = ResourcePrice { - price_in_wei: block.header.gas_prices.eth_gas_price, - price_in_strk: Some(block.header.gas_prices.strk_gas_price), + price_in_wei: block.header.gas_prices.eth, + price_in_strk: Some(block.header.gas_prices.strk), }; let transactions = @@ -47,8 +47,8 @@ impl PendingBlockWithTxs { transactions.into_iter().map(|tx| crate::transaction::Tx::from(tx).0).collect(); let l1_gas_price = ResourcePrice { - price_in_wei: header.gas_prices.eth_gas_price, - price_in_strk: Some(header.gas_prices.strk_gas_price), + price_in_wei: header.gas_prices.eth, + price_in_strk: Some(header.gas_prices.strk), }; Self(starknet::core::types::PendingBlockWithTxs { @@ -80,8 +80,8 @@ impl BlockWithTxHashes { finality_status: FinalityStatus, ) -> Self { let l1_gas_price = ResourcePrice { - price_in_wei: block.header.gas_prices.eth_gas_price, - price_in_strk: Some(block.header.gas_prices.strk_gas_price), + price_in_wei: block.header.gas_prices.eth, + price_in_strk: Some(block.header.gas_prices.strk), }; Self(starknet::core::types::BlockWithTxHashes { @@ -109,8 +109,8 @@ pub struct PendingBlockWithTxHashes(starknet::core::types::PendingBlockWithTxHas impl PendingBlockWithTxHashes { pub fn new(header: PartialHeader, transactions: Vec) -> Self { let l1_gas_price = ResourcePrice { - price_in_wei: header.gas_prices.eth_gas_price, - price_in_strk: Some(header.gas_prices.strk_gas_price), + price_in_wei: header.gas_prices.eth, + price_in_strk: Some(header.gas_prices.strk), }; Self(starknet::core::types::PendingBlockWithTxHashes { diff --git a/crates/katana/rpc/src/katana.rs b/crates/katana/rpc/src/katana.rs index 12eeec6d11..43be36fcec 100644 --- a/crates/katana/rpc/src/katana.rs +++ b/crates/katana/rpc/src/katana.rs @@ -25,7 +25,8 @@ impl KatanaApiServer for KatanaApi { } async fn next_block_timestamp(&self) -> Result { - Ok(self.sequencer.backend().env.read().block.block_timestamp.0) + // Ok(self.sequencer.backend().env.read().block.block_timestamp.0) + unimplemented!() } async fn set_next_block_timestamp(&self, timestamp: u64) -> Result<(), Error> { diff --git a/crates/katana/rpc/src/starknet.rs b/crates/katana/rpc/src/starknet.rs index 3fe97d4375..737139d262 100644 --- a/crates/katana/rpc/src/starknet.rs +++ b/crates/katana/rpc/src/starknet.rs @@ -112,44 +112,48 @@ impl StarknetApiServer for StarknetApi { let provider = self.sequencer.backend.blockchain.provider(); if BlockIdOrTag::Tag(BlockTag::Pending) == block_id { - let pending_state = self.sequencer.pending_state().expect("pending state should exist"); - - let block_context = self.sequencer.backend.env.read().block.clone(); - let latest_hash = - BlockHashProvider::latest_hash(provider).map_err(StarknetApiError::from)?; - - let gas_prices = GasPrices { - eth_gas_price: block_context.gas_prices.eth_l1_gas_price.try_into().unwrap(), - strk_gas_price: block_context.gas_prices.strk_l1_gas_price.try_into().unwrap(), - }; + if let Some(pending_state) = self.sequencer.pending_state() { + let block_env = pending_state.block_envs.read().0.clone(); + let latest_hash = + BlockHashProvider::latest_hash(provider).map_err(StarknetApiError::from)?; + + let gas_prices = GasPrices { + eth: block_env.l1_gas_prices.eth, + strk: block_env.l1_gas_prices.strk, + }; - let header = PartialHeader { - gas_prices, - parent_hash: latest_hash, - version: CURRENT_STARKNET_VERSION, - timestamp: block_context.block_timestamp.0, - sequencer_address: block_context.sequencer_address.into(), - }; + let header = PartialHeader { + gas_prices, + parent_hash: latest_hash, + version: CURRENT_STARKNET_VERSION, + timestamp: block_env.timestamp, + sequencer_address: block_env.sequencer_address, + }; - let transactions = - pending_state.executed_txs.read().iter().map(|(tx, _)| tx.hash).collect::>(); + let transactions = pending_state + .executed_txs + .read() + .iter() + .map(|(tx, _)| tx.hash) + .collect::>(); + + return Ok(MaybePendingBlockWithTxHashes::Pending(PendingBlockWithTxHashes::new( + header, + transactions, + ))); + } + } - Ok(MaybePendingBlockWithTxHashes::Pending(PendingBlockWithTxHashes::new( - header, - transactions, - ))) - } else { - let block_num = BlockIdReader::convert_block_id(provider, block_id) - .map_err(StarknetApiError::from)? - .map(BlockHashOrNumber::Num) - .ok_or(StarknetApiError::BlockNotFound)?; + let block_num = BlockIdReader::convert_block_id(provider, block_id) + .map_err(StarknetApiError::from)? + .map(BlockHashOrNumber::Num) + .ok_or(StarknetApiError::BlockNotFound)?; - katana_rpc_types_builder::BlockBuilder::new(block_num, provider) - .build_with_tx_hash() - .map_err(StarknetApiError::from)? - .map(MaybePendingBlockWithTxHashes::Block) - .ok_or(Error::from(StarknetApiError::BlockNotFound)) - } + katana_rpc_types_builder::BlockBuilder::new(block_num, provider) + .build_with_tx_hash() + .map_err(StarknetApiError::from)? + .map(MaybePendingBlockWithTxHashes::Block) + .ok_or(Error::from(StarknetApiError::BlockNotFound)) } async fn transaction_by_block_id_and_index( @@ -187,45 +191,48 @@ impl StarknetApiServer for StarknetApi { let provider = self.sequencer.backend.blockchain.provider(); if BlockIdOrTag::Tag(BlockTag::Pending) == block_id { - let pending_state = self.sequencer.pending_state().expect("pending state should exist"); - - let block_context = self.sequencer.backend.env.read().block.clone(); - let latest_hash = - BlockHashProvider::latest_hash(provider).map_err(StarknetApiError::from)?; - - let gas_prices = GasPrices { - eth_gas_price: block_context.gas_prices.eth_l1_gas_price.try_into().unwrap(), - strk_gas_price: block_context.gas_prices.strk_l1_gas_price.try_into().unwrap(), - }; + if let Some(pending_state) = self.sequencer.pending_state() { + let block_env = pending_state.block_envs.read().0.clone(); + let latest_hash = + BlockHashProvider::latest_hash(provider).map_err(StarknetApiError::from)?; + + let gas_prices = GasPrices { + eth: block_env.l1_gas_prices.eth, + strk: block_env.l1_gas_prices.strk, + }; - let header = PartialHeader { - gas_prices, - parent_hash: latest_hash, - version: CURRENT_STARKNET_VERSION, - timestamp: block_context.block_timestamp.0, - sequencer_address: block_context.sequencer_address.into(), - }; + let header = PartialHeader { + gas_prices, + parent_hash: latest_hash, + version: CURRENT_STARKNET_VERSION, + timestamp: block_env.timestamp, + sequencer_address: block_env.sequencer_address, + }; - let transactions = pending_state - .executed_txs - .read() - .iter() - .map(|(tx, _)| tx.clone()) - .collect::>(); + let transactions = pending_state + .executed_txs + .read() + .iter() + .map(|(tx, _)| tx.clone()) + .collect::>(); + + return Ok(MaybePendingBlockWithTxs::Pending(PendingBlockWithTxs::new( + header, + transactions, + ))); + } + } - Ok(MaybePendingBlockWithTxs::Pending(PendingBlockWithTxs::new(header, transactions))) - } else { - let block_num = BlockIdReader::convert_block_id(provider, block_id) - .map_err(|e| StarknetApiError::UnexpectedError { reason: e.to_string() })? - .map(BlockHashOrNumber::Num) - .ok_or(StarknetApiError::BlockNotFound)?; + let block_num = BlockIdReader::convert_block_id(provider, block_id) + .map_err(|e| StarknetApiError::UnexpectedError { reason: e.to_string() })? + .map(BlockHashOrNumber::Num) + .ok_or(StarknetApiError::BlockNotFound)?; - katana_rpc_types_builder::BlockBuilder::new(block_num, provider) - .build() - .map_err(|e| StarknetApiError::UnexpectedError { reason: e.to_string() })? - .map(MaybePendingBlockWithTxs::Block) - .ok_or(Error::from(StarknetApiError::BlockNotFound)) - } + katana_rpc_types_builder::BlockBuilder::new(block_num, provider) + .build() + .map_err(|e| StarknetApiError::UnexpectedError { reason: e.to_string() })? + .map(MaybePendingBlockWithTxs::Block) + .ok_or(Error::from(StarknetApiError::BlockNotFound)) } async fn state_update(&self, block_id: BlockIdOrTag) -> Result { diff --git a/crates/katana/src/args.rs b/crates/katana/src/args.rs index 14237b3ece..70f5b2ed4c 100644 --- a/crates/katana/src/args.rs +++ b/crates/katana/src/args.rs @@ -161,7 +161,7 @@ pub struct EnvironmentOptions { #[arg(long)] #[arg(help = "The gas price.")] - pub gas_price: Option, + pub gas_price: Option, #[arg(long)] #[arg(help = "The maximum number of steps available for the account validation logic.")] @@ -260,11 +260,8 @@ mod test { #[test] fn default_block_context_from_args() { let args = KatanaArgs::parse_from(["katana"]); - let block_context = args.starknet_config().block_context(); - assert_eq!(block_context.gas_prices.eth_l1_gas_price, DEFAULT_GAS_PRICE); - assert_eq!(block_context.chain_id.0, "KATANA".to_string()); - assert_eq!(block_context.validate_max_n_steps, DEFAULT_VALIDATE_MAX_STEPS); - assert_eq!(block_context.invoke_tx_max_n_steps, DEFAULT_INVOKE_MAX_STEPS); + let block_context = args.starknet_config().block_env(); + assert_eq!(block_context.l1_gas_prices.eth, DEFAULT_GAS_PRICE); } #[test] @@ -281,11 +278,8 @@ mod test { "200", ]); - let block_context = args.starknet_config().block_context(); + let block_context = args.starknet_config().block_env(); - assert_eq!(block_context.gas_prices.eth_l1_gas_price, 10); - assert_eq!(block_context.chain_id.0, "SN_GOERLI".to_string()); - assert_eq!(block_context.validate_max_n_steps, 100); - assert_eq!(block_context.invoke_tx_max_n_steps, 200); + assert_eq!(block_context.l1_gas_prices.eth, 10); } } diff --git a/crates/katana/src/main.rs b/crates/katana/src/main.rs index ea53341321..1a3f5b396f 100644 --- a/crates/katana/src/main.rs +++ b/crates/katana/src/main.rs @@ -36,7 +36,7 @@ async fn main() -> Result<(), Box> { let sequencer_config = config.sequencer_config(); let starknet_config = config.starknet_config(); - let sequencer = Arc::new(KatanaSequencer::new(sequencer_config, starknet_config).await); + let sequencer = Arc::new(KatanaSequencer::new(sequencer_config, starknet_config).await?); let NodeHandle { addr, handle, .. } = spawn(Arc::clone(&sequencer), server_config).await?; if !config.silent { diff --git a/crates/katana/storage/provider/src/lib.rs b/crates/katana/storage/provider/src/lib.rs index 4176240a97..11f62d3c17 100644 --- a/crates/katana/storage/provider/src/lib.rs +++ b/crates/katana/storage/provider/src/lib.rs @@ -9,12 +9,14 @@ use katana_primitives::contract::{ ClassHash, CompiledClassHash, CompiledContractClass, ContractAddress, FlattenedSierraClass, GenericContractInfo, StorageKey, StorageValue, }; +use katana_primitives::env::BlockEnv; use katana_primitives::receipt::Receipt; use katana_primitives::state::{StateUpdates, StateUpdatesWithDeclaredClasses}; use katana_primitives::transaction::{TxHash, TxNumber, TxWithHash}; use katana_primitives::FieldElement; use traits::block::{BlockIdReader, BlockStatusProvider, BlockWriter}; use traits::contract::{ContractClassProvider, ContractClassWriter}; +use traits::env::BlockEnvProvider; use traits::state::{StateRootProvider, StateWriter}; use traits::transaction::TransactionStatusProvider; @@ -348,3 +350,12 @@ where self.provider.set_nonce(address, nonce) } } + +impl BlockEnvProvider for BlockchainProvider +where + Db: BlockEnvProvider, +{ + fn block_env_at(&self, id: BlockHashOrNumber) -> ProviderResult> { + self.provider.block_env_at(id) + } +} diff --git a/crates/katana/storage/provider/src/providers/db/mod.rs b/crates/katana/storage/provider/src/providers/db/mod.rs index a2d99a310e..bc9b6e5832 100644 --- a/crates/katana/storage/provider/src/providers/db/mod.rs +++ b/crates/katana/storage/provider/src/providers/db/mod.rs @@ -29,6 +29,7 @@ use katana_primitives::contract::{ ClassHash, CompiledClassHash, ContractAddress, GenericContractInfo, Nonce, StorageKey, StorageValue, }; +use katana_primitives::env::BlockEnv; use katana_primitives::receipt::Receipt; use katana_primitives::state::{StateUpdates, StateUpdatesWithDeclaredClasses}; use katana_primitives::transaction::{TxHash, TxNumber, TxWithHash}; @@ -39,6 +40,7 @@ use crate::traits::block::{ BlockHashProvider, BlockNumberProvider, BlockProvider, BlockStatusProvider, BlockWriter, HeaderProvider, }; +use crate::traits::env::BlockEnvProvider; use crate::traits::state::{StateFactoryProvider, StateProvider, StateRootProvider}; use crate::traits::state_update::StateUpdateProvider; use crate::traits::transaction::{ @@ -532,6 +534,19 @@ impl ReceiptProvider for DbProvider { } } +impl BlockEnvProvider for DbProvider { + fn block_env_at(&self, block_id: BlockHashOrNumber) -> ProviderResult> { + let Some(header) = self.header(block_id)? else { return Ok(None) }; + + Ok(Some(BlockEnv { + number: header.number, + timestamp: header.timestamp, + l1_gas_prices: header.gas_prices, + sequencer_address: header.sequencer_address, + })) + } +} + impl BlockWriter for DbProvider { fn insert_block_with_states_and_receipts( &self, diff --git a/crates/katana/storage/provider/src/providers/fork/mod.rs b/crates/katana/storage/provider/src/providers/fork/mod.rs index 54a30b1abd..2ea2a0c4a2 100644 --- a/crates/katana/storage/provider/src/providers/fork/mod.rs +++ b/crates/katana/storage/provider/src/providers/fork/mod.rs @@ -12,6 +12,7 @@ use katana_primitives::block::{ use katana_primitives::contract::{ ClassHash, CompiledClassHash, CompiledContractClass, ContractAddress, FlattenedSierraClass, }; +use katana_primitives::env::BlockEnv; use katana_primitives::receipt::Receipt; use katana_primitives::state::{StateUpdates, StateUpdatesWithDeclaredClasses}; use katana_primitives::transaction::{Tx, TxHash, TxNumber, TxWithHash}; @@ -28,6 +29,7 @@ use crate::traits::block::{ HeaderProvider, }; use crate::traits::contract::ContractClassWriter; +use crate::traits::env::BlockEnvProvider; use crate::traits::state::{StateFactoryProvider, StateProvider, StateRootProvider, StateWriter}; use crate::traits::state_update::StateUpdateProvider; use crate::traits::transaction::{ @@ -518,3 +520,14 @@ impl StateWriter for ForkedProvider { Ok(()) } } + +impl BlockEnvProvider for ForkedProvider { + fn block_env_at(&self, block_id: BlockHashOrNumber) -> ProviderResult> { + Ok(self.header(block_id)?.map(|header| BlockEnv { + number: header.number, + timestamp: header.timestamp, + l1_gas_prices: header.gas_prices, + sequencer_address: header.sequencer_address, + })) + } +} diff --git a/crates/katana/storage/provider/src/providers/in_memory/mod.rs b/crates/katana/storage/provider/src/providers/in_memory/mod.rs index 1d8ddfa018..d14511abd5 100644 --- a/crates/katana/storage/provider/src/providers/in_memory/mod.rs +++ b/crates/katana/storage/provider/src/providers/in_memory/mod.rs @@ -12,6 +12,7 @@ use katana_primitives::block::{ use katana_primitives::contract::{ ClassHash, CompiledClassHash, CompiledContractClass, ContractAddress, FlattenedSierraClass, }; +use katana_primitives::env::BlockEnv; use katana_primitives::receipt::Receipt; use katana_primitives::state::{StateUpdates, StateUpdatesWithDeclaredClasses}; use katana_primitives::transaction::{Tx, TxHash, TxNumber, TxWithHash}; @@ -24,6 +25,7 @@ use crate::traits::block::{ HeaderProvider, }; use crate::traits::contract::ContractClassWriter; +use crate::traits::env::BlockEnvProvider; use crate::traits::state::{StateFactoryProvider, StateProvider, StateRootProvider, StateWriter}; use crate::traits::state_update::StateUpdateProvider; use crate::traits::transaction::{ @@ -512,3 +514,14 @@ impl StateWriter for InMemoryProvider { Ok(()) } } + +impl BlockEnvProvider for InMemoryProvider { + fn block_env_at(&self, block_id: BlockHashOrNumber) -> ProviderResult> { + Ok(self.header(block_id)?.map(|header| BlockEnv { + number: header.number, + timestamp: header.timestamp, + l1_gas_prices: header.gas_prices, + sequencer_address: header.sequencer_address, + })) + } +} diff --git a/crates/katana/storage/provider/src/traits/block.rs b/crates/katana/storage/provider/src/traits/block.rs index 04a7a47ab5..ea8085564e 100644 --- a/crates/katana/storage/provider/src/traits/block.rs +++ b/crates/katana/storage/provider/src/traits/block.rs @@ -27,12 +27,14 @@ pub trait BlockIdReader: BlockNumberProvider + Send + Sync { if let Some((num, _)) = Self::pending_block_id(self)? { Ok(Some(num)) } else { - Ok(None) + // returns latest number for now + BlockNumberProvider::latest_number(&self).map(Some) } } } } + // TODO: integrate the pending block with the provider /// Retrieves the pending block number and hash. fn pending_block_id(&self) -> ProviderResult> { Ok(None) // Returns `None` for now diff --git a/crates/katana/storage/provider/src/traits/env.rs b/crates/katana/storage/provider/src/traits/env.rs index a0565d733c..b883d8d95c 100644 --- a/crates/katana/storage/provider/src/traits/env.rs +++ b/crates/katana/storage/provider/src/traits/env.rs @@ -3,7 +3,10 @@ use katana_primitives::env::BlockEnv; use crate::ProviderResult; +/// A provider that provides block environment values including Starknet execution environment +/// values. #[auto_impl::auto_impl(&, Box, Arc)] pub trait BlockEnvProvider: Send + Sync { - fn env_at(&self, block_id: BlockHashOrNumber) -> ProviderResult; + /// Returns the block environment values at the given block id. + fn block_env_at(&self, block_id: BlockHashOrNumber) -> ProviderResult>; } diff --git a/crates/katana/storage/provider/tests/block.rs b/crates/katana/storage/provider/tests/block.rs index 1128d89378..93701dc1bb 100644 --- a/crates/katana/storage/provider/tests/block.rs +++ b/crates/katana/storage/provider/tests/block.rs @@ -2,6 +2,7 @@ use anyhow::Result; use katana_primitives::block::{ Block, BlockHashOrNumber, BlockNumber, BlockWithTxHashes, FinalityStatus, }; +use katana_primitives::env::BlockEnv; use katana_primitives::state::StateUpdatesWithDeclaredClasses; use katana_provider::providers::db::DbProvider; use katana_provider::providers::fork::ForkedProvider; @@ -9,6 +10,7 @@ use katana_provider::providers::in_memory::InMemoryProvider; use katana_provider::traits::block::{ BlockHashProvider, BlockProvider, BlockStatusProvider, BlockWriter, }; +use katana_provider::traits::env::BlockEnvProvider; use katana_provider::traits::state::StateRootProvider; use katana_provider::traits::state_update::StateUpdateProvider; use katana_provider::traits::transaction::{ @@ -56,7 +58,8 @@ where + BlockWriter + ReceiptProvider + StateRootProvider - + TransactionStatusProvider, + + TransactionStatusProvider + + BlockEnvProvider, { let blocks = generate_dummy_blocks_and_receipts(count); @@ -83,6 +86,13 @@ where let expected_block_hash = block.block.header.hash; let expected_block = block.block.unseal(); + let expected_block_env = BlockEnv { + number: expected_block_num, + timestamp: expected_block.header.timestamp, + l1_gas_prices: expected_block.header.gas_prices, + sequencer_address: expected_block.header.sequencer_address, + }; + let actual_block_hash = provider.block_hash_by_num(expected_block_num)?; let actual_block = provider.block(block_id)?; @@ -99,6 +109,7 @@ where }; let actual_block_with_tx_hashes = provider.block_with_tx_hashes(block_id)?; + let actual_block_env = provider.block_env_at(block_id)?; assert_eq!(actual_status, Some(FinalityStatus::AcceptedOnL2)); assert_eq!(actual_block_with_tx_hashes, Some(expected_block_with_tx_hashes)); @@ -118,6 +129,8 @@ where assert_eq!(actual_tx, Some(tx.clone())); } + assert_eq!(actual_block_env, Some(expected_block_env)); + assert_eq!(actual_receipts.as_ref().map(|r| r.len()), Some(expected_block.body.len())); assert_eq!(actual_receipts, Some(receipts)); From de5bbe80f56eeb29ac83b42b0d979c6c779af8fd Mon Sep 17 00:00:00 2001 From: Ammar Arif Date: Wed, 17 Jan 2024 00:31:50 +0900 Subject: [PATCH 23/33] feat(katana): db storage intergration (#1440) --- Cargo.lock | 2 + crates/katana/core/Cargo.toml | 2 + crates/katana/core/src/backend/config.rs | 4 + crates/katana/core/src/backend/mod.rs | 6 + crates/katana/core/src/backend/storage.rs | 130 +++++++++++++++++- crates/katana/src/args.rs | 10 +- crates/katana/storage/db/src/utils.rs | 6 +- .../storage/provider/src/providers/db/mod.rs | 2 +- 8 files changed, 152 insertions(+), 10 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index cfca2b6fab..b28d950ab4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5489,6 +5489,7 @@ dependencies = [ "flate2", "futures", "hex", + "katana-db", "katana-executor", "katana-primitives", "katana-provider", @@ -5500,6 +5501,7 @@ dependencies = [ "serde_with", "starknet", "starknet_api", + "tempfile", "thiserror", "tokio", "tracing", diff --git a/crates/katana/core/Cargo.toml b/crates/katana/core/Cargo.toml index 96f81e3eb0..02f6451311 100644 --- a/crates/katana/core/Cargo.toml +++ b/crates/katana/core/Cargo.toml @@ -7,6 +7,7 @@ repository.workspace = true version.workspace = true [dependencies] +katana-db = { path = "../storage/db" } katana-executor = { path = "../executor" } katana-primitives = { path = "../primitives" } katana-provider = { path = "../storage/provider" } @@ -37,6 +38,7 @@ url.workspace = true [dev-dependencies] assert_matches.workspace = true hex = "0.4.3" +tempfile = "3.8.1" [features] messaging = [ "ethers" ] diff --git a/crates/katana/core/src/backend/config.rs b/crates/katana/core/src/backend/config.rs index d96e54066d..208513a5e6 100644 --- a/crates/katana/core/src/backend/config.rs +++ b/crates/katana/core/src/backend/config.rs @@ -1,3 +1,5 @@ +use std::path::PathBuf; + use katana_primitives::block::GasPrices; use katana_primitives::chain::ChainId; use katana_primitives::env::BlockEnv; @@ -15,6 +17,7 @@ pub struct StarknetConfig { pub fork_rpc_url: Option, pub fork_block_number: Option, pub disable_validate: bool, + pub db_dir: Option, } impl StarknetConfig { @@ -40,6 +43,7 @@ impl Default for StarknetConfig { fork_block_number: None, env: Environment::default(), disable_validate: false, + db_dir: None, } } } diff --git a/crates/katana/core/src/backend/mod.rs b/crates/katana/core/src/backend/mod.rs index f6f303bd16..917b6954f0 100644 --- a/crates/katana/core/src/backend/mod.rs +++ b/crates/katana/core/src/backend/mod.rs @@ -104,6 +104,12 @@ impl Backend { .expect("able to create forked blockchain"); (blockchain, forked_chain_id.into()) + } else if let Some(db_path) = &config.db_dir { + ( + Blockchain::new_with_db(db_path, &block_env) + .expect("able to create blockchain from db"), + config.env.chain_id, + ) } else { let blockchain = Blockchain::new_with_genesis(InMemoryProvider::new(), &block_env) .expect("able to create blockchain from genesis block"); diff --git a/crates/katana/core/src/backend/storage.rs b/crates/katana/core/src/backend/storage.rs index a71259b038..0d6b4e6f85 100644 --- a/crates/katana/core/src/backend/storage.rs +++ b/crates/katana/core/src/backend/storage.rs @@ -1,4 +1,8 @@ +use std::path::Path; + use anyhow::Result; +use katana_db::init_db; +use katana_db::utils::is_database_empty; use katana_primitives::block::{ Block, BlockHash, FinalityStatus, Header, PartialHeader, SealedBlockWithStatus, }; @@ -6,6 +10,7 @@ use katana_primitives::env::BlockEnv; use katana_primitives::state::StateUpdatesWithDeclaredClasses; use katana_primitives::version::CURRENT_STARKNET_VERSION; use katana_primitives::FieldElement; +use katana_provider::providers::db::DbProvider; use katana_provider::traits::block::{BlockProvider, BlockWriter}; use katana_provider::traits::contract::ContractClassWriter; use katana_provider::traits::env::BlockEnvProvider; @@ -87,6 +92,15 @@ impl Blockchain { Self::new_with_block_and_state(provider, block, get_genesis_states_for_testing()) } + pub fn new_with_db(db_path: impl AsRef, block_context: &BlockEnv) -> Result { + if is_database_empty(&db_path) { + let provider = DbProvider::new(init_db(db_path)?); + Ok(Self::new_with_genesis(provider, block_context)?) + } else { + Ok(Self::new(DbProvider::new(init_db(db_path)?))) + } + } + // TODO: make this function to just accept a `Header` created from the forked block. /// Builds a new blockchain with a forked block. pub fn new_from_forked( @@ -131,19 +145,27 @@ impl Blockchain { #[cfg(test)] mod tests { - use katana_primitives::block::{FinalityStatus, GasPrices}; + use katana_primitives::block::{ + Block, FinalityStatus, GasPrices, Header, SealedBlockWithStatus, + }; use katana_primitives::env::BlockEnv; + use katana_primitives::receipt::{InvokeTxReceipt, Receipt}; + use katana_primitives::state::StateUpdatesWithDeclaredClasses; + use katana_primitives::transaction::{InvokeTx, Tx, TxWithHash}; use katana_primitives::FieldElement; use katana_provider::providers::in_memory::InMemoryProvider; use katana_provider::traits::block::{ - BlockHashProvider, BlockNumberProvider, BlockStatusProvider, HeaderProvider, + BlockHashProvider, BlockNumberProvider, BlockProvider, BlockStatusProvider, BlockWriter, + HeaderProvider, }; use katana_provider::traits::state::StateFactoryProvider; + use katana_provider::traits::transaction::TransactionProvider; use starknet::macros::felt; use super::Blockchain; use crate::constants::{ - ERC20_CONTRACT_CLASS_HASH, FEE_TOKEN_ADDRESS, UDC_ADDRESS, UDC_CLASS_HASH, + ERC20_CONTRACT, ERC20_CONTRACT_CLASS_HASH, FEE_TOKEN_ADDRESS, UDC_ADDRESS, UDC_CLASS_HASH, + UDC_CONTRACT, }; #[test] @@ -207,4 +229,106 @@ mod tests { assert_eq!(header.parent_hash, FieldElement::ZERO); assert_eq!(block_status, FinalityStatus::AcceptedOnL1); } + + #[test] + fn blockchain_from_db() { + let db_path = tempfile::TempDir::new().expect("Failed to create temp dir.").into_path(); + + let block_env = BlockEnv { + number: 0, + timestamp: 0, + sequencer_address: Default::default(), + l1_gas_prices: GasPrices { eth: 0, strk: 0 }, + }; + + let dummy_tx = + TxWithHash { hash: felt!("0xbad"), transaction: Tx::Invoke(InvokeTx::default()) }; + + let dummy_block = SealedBlockWithStatus { + status: FinalityStatus::AcceptedOnL1, + block: Block { + header: Header { + parent_hash: FieldElement::ZERO, + number: 1, + gas_prices: GasPrices::default(), + timestamp: 123456, + ..Default::default() + }, + body: vec![dummy_tx.clone()], + } + .seal(), + }; + + { + let blockchain = Blockchain::new_with_db(&db_path, &block_env) + .expect("Failed to create db-backed blockchain storage"); + + blockchain + .provider() + .insert_block_with_states_and_receipts( + dummy_block.clone(), + StateUpdatesWithDeclaredClasses::default(), + vec![Receipt::Invoke(InvokeTxReceipt::default())], + ) + .unwrap(); + + // assert genesis state is correct + + let state = blockchain.provider().latest().expect("failed to get latest state"); + + let actual_udc_class_hash = + state.class_hash_of_contract(*UDC_ADDRESS).unwrap().unwrap(); + let actual_udc_class = state.class(actual_udc_class_hash).unwrap().unwrap(); + + let actual_fee_token_class_hash = + state.class_hash_of_contract(*FEE_TOKEN_ADDRESS).unwrap().unwrap(); + let actual_fee_token_class = state.class(actual_fee_token_class_hash).unwrap().unwrap(); + + assert_eq!(actual_udc_class_hash, *UDC_CLASS_HASH); + assert_eq!(actual_udc_class, (*UDC_CONTRACT).clone()); + + assert_eq!(actual_fee_token_class_hash, *ERC20_CONTRACT_CLASS_HASH); + assert_eq!(actual_fee_token_class, (*ERC20_CONTRACT).clone()); + } + + // re open the db and assert the state is the same and not overwritten + + { + let blockchain = Blockchain::new_with_db(&db_path, &block_env) + .expect("Failed to create db-backed blockchain storage"); + + // assert genesis state is correct + + let state = blockchain.provider().latest().expect("failed to get latest state"); + + let actual_udc_class_hash = + state.class_hash_of_contract(*UDC_ADDRESS).unwrap().unwrap(); + let actual_udc_class = state.class(actual_udc_class_hash).unwrap().unwrap(); + + let actual_fee_token_class_hash = + state.class_hash_of_contract(*FEE_TOKEN_ADDRESS).unwrap().unwrap(); + let actual_fee_token_class = state.class(actual_fee_token_class_hash).unwrap().unwrap(); + + assert_eq!(actual_udc_class_hash, *UDC_CLASS_HASH); + assert_eq!(actual_udc_class, (*UDC_CONTRACT).clone()); + + assert_eq!(actual_fee_token_class_hash, *ERC20_CONTRACT_CLASS_HASH); + assert_eq!(actual_fee_token_class, (*ERC20_CONTRACT).clone()); + + let block_number = blockchain.provider().latest_number().unwrap(); + let block_hash = blockchain.provider().latest_hash().unwrap(); + let block = blockchain + .provider() + .block_by_hash(dummy_block.block.header.hash) + .unwrap() + .unwrap(); + + let tx = blockchain.provider().transaction_by_hash(dummy_tx.hash).unwrap().unwrap(); + + assert_eq!(block_hash, dummy_block.block.header.hash); + assert_eq!(block_number, dummy_block.block.header.header.number); + assert_eq!(block, dummy_block.block.unseal()); + assert_eq!(tx, dummy_tx); + } + } } diff --git a/crates/katana/src/args.rs b/crates/katana/src/args.rs index 70f5b2ed4c..aa5e345866 100644 --- a/crates/katana/src/args.rs +++ b/crates/katana/src/args.rs @@ -48,10 +48,11 @@ pub struct KatanaArgs { #[arg(long)] #[arg(value_name = "PATH")] - #[arg(help = "Dump the state of chain on exit to the given file.")] - #[arg(long_help = "Dump the state of chain on exit to the given file. If the value is a \ - directory, the state will be written to `/state.bin`.")] - pub dump_state: Option, + #[arg(help = "Directory path of the database to initialize from.")] + #[arg(long_help = "Directory path of the database to initialize from. The path must either \ + be an empty directory or a directory which already contains a previously \ + initialized Katana database.")] + pub db_dir: Option, #[arg(long)] #[arg(value_name = "URL")] @@ -237,6 +238,7 @@ impl KatanaArgs { .validate_max_steps .unwrap_or(DEFAULT_VALIDATE_MAX_STEPS), }, + db_dir: self.db_dir.clone(), } } } diff --git a/crates/katana/storage/db/src/utils.rs b/crates/katana/storage/db/src/utils.rs index c00b10816f..edee45a46c 100644 --- a/crates/katana/storage/db/src/utils.rs +++ b/crates/katana/storage/db/src/utils.rs @@ -16,8 +16,10 @@ pub(crate) fn default_page_size() -> usize { os_page_size.clamp(MIN_PAGE_SIZE, LIBMDBX_MAX_PAGE_SIZE) } -/// Check if a db is empty. It does not provide any information on the -/// validity of the data in it. We consider a database as non empty when it's a non empty directory. +/// Check if a database is empty. We consider a database as empty when (1) `path` it's an empty +/// directory, (2) if `path` doesn't exist, or (3) `path` is not a directory. +/// +/// It does not provide any information on the validity of the data in the Db if it isn't empty. pub fn is_database_empty>(path: P) -> bool { let path = path.as_ref(); if !path.exists() { diff --git a/crates/katana/storage/provider/src/providers/db/mod.rs b/crates/katana/storage/provider/src/providers/db/mod.rs index bc9b6e5832..0847920e2b 100644 --- a/crates/katana/storage/provider/src/providers/db/mod.rs +++ b/crates/katana/storage/provider/src/providers/db/mod.rs @@ -48,7 +48,7 @@ use crate::traits::transaction::{ }; use crate::ProviderResult; -/// A provider implementation that uses a database as a backend. +/// A provider implementation that uses a persistent database as the backend. #[derive(Debug)] pub struct DbProvider(DbEnv); From 745a19017168b2e4d1b3f01c407fde486f7cfe30 Mon Sep 17 00:00:00 2001 From: Junichi Sugiura Date: Wed, 17 Jan 2024 17:45:57 +0100 Subject: [PATCH 24/33] Switch transaction query arg from id to transactionHash (#1441) --- .../torii/graphql/src/object/transaction.rs | 36 ++++++++++++++++++- 1 file changed, 35 insertions(+), 1 deletion(-) diff --git a/crates/torii/graphql/src/object/transaction.rs b/crates/torii/graphql/src/object/transaction.rs index 17cf87bc33..1f5b0a75f0 100644 --- a/crates/torii/graphql/src/object/transaction.rs +++ b/crates/torii/graphql/src/object/transaction.rs @@ -1,7 +1,14 @@ +use async_graphql::dynamic::{Field, FieldFuture, InputValue, TypeRef}; +use async_graphql::Value; +use convert_case::{Case, Casing}; +use sqlx::{Pool, Sqlite}; + use super::{ObjectTrait, TypeMapping}; use crate::constants::{TRANSACTION_NAMES, TRANSACTION_TABLE, TRANSACTION_TYPE_NAME}; use crate::mapping::TRANSACTION_MAPPING; - +use crate::query::data::fetch_single_row; +use crate::query::value_mapping_from_row; +use crate::utils::extract; pub struct TransactionObject; impl ObjectTrait for TransactionObject { @@ -20,4 +27,31 @@ impl ObjectTrait for TransactionObject { fn table_name(&self) -> Option<&str> { Some(TRANSACTION_TABLE) } + + fn resolve_one(&self) -> Option { + let type_mapping = self.type_mapping().clone(); + let table_name = self.table_name().unwrap().to_string(); + + Some( + Field::new(self.name().0, TypeRef::named_nn(self.type_name()), move |ctx| { + let type_mapping = type_mapping.clone(); + let table_name = table_name.to_string(); + + FieldFuture::new(async move { + let mut conn = ctx.data::>()?.acquire().await?; + let hash = + extract::(ctx.args.as_index_map(), &COLUMN.to_case(Case::Camel))?; + let data = fetch_single_row(&mut conn, &table_name, COLUMN, &hash).await?; + let model = value_mapping_from_row(&data, &type_mapping, false)?; + Ok(Some(Value::Object(model))) + }) + }) + .argument(InputValue::new( + COLUMN.to_case(Case::Camel), + TypeRef::named_nn(TypeRef::STRING), + )), + ) + } } + +const COLUMN: &str = "transaction_hash"; From bc6a5c143415055c4225ebfe28710c2e0a462d59 Mon Sep 17 00:00:00 2001 From: Tarrence van As Date: Wed, 17 Jan 2024 12:00:19 -0500 Subject: [PATCH 25/33] Prepare release: v0.5.1-alpha.0 (#1451) --- Cargo.lock | 64 +++++++++++++++++++++++++++--------------------------- Cargo.toml | 2 +- 2 files changed, 33 insertions(+), 33 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b28d950ab4..ccabe33673 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -801,7 +801,7 @@ dependencies = [ [[package]] name = "benches" -version = "0.5.0" +version = "0.5.1-alpha.0" dependencies = [ "anyhow", "clap_builder", @@ -2661,15 +2661,15 @@ checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" [[package]] name = "dojo-core" -version = "0.5.0" +version = "0.5.1-alpha.0" [[package]] name = "dojo-examples-spawn-and-move" -version = "0.5.0" +version = "0.5.1-alpha.0" [[package]] name = "dojo-lang" -version = "0.5.0" +version = "0.5.1-alpha.0" dependencies = [ "anyhow", "cairo-lang-compiler", @@ -2717,7 +2717,7 @@ dependencies = [ [[package]] name = "dojo-language-server" -version = "0.5.0" +version = "0.5.1-alpha.0" dependencies = [ "anyhow", "cairo-lang-compiler", @@ -2739,7 +2739,7 @@ dependencies = [ [[package]] name = "dojo-test-utils" -version = "0.5.0" +version = "0.5.1-alpha.0" dependencies = [ "anyhow", "assert_fs", @@ -2771,7 +2771,7 @@ dependencies = [ [[package]] name = "dojo-types" -version = "0.5.0" +version = "0.5.1-alpha.0" dependencies = [ "crypto-bigint", "hex", @@ -2786,7 +2786,7 @@ dependencies = [ [[package]] name = "dojo-world" -version = "0.5.0" +version = "0.5.1-alpha.0" dependencies = [ "anyhow", "assert_fs", @@ -2819,7 +2819,7 @@ dependencies = [ [[package]] name = "dojo-world-abigen" -version = "0.5.0" +version = "0.5.1-alpha.0" dependencies = [ "cairo-lang-starknet", "camino", @@ -5436,7 +5436,7 @@ dependencies = [ [[package]] name = "katana" -version = "0.5.0" +version = "0.5.1-alpha.0" dependencies = [ "assert_matches", "clap", @@ -5445,7 +5445,7 @@ dependencies = [ "katana-core", "katana-primitives", "katana-rpc", - "metrics 0.5.0", + "metrics 0.5.1-alpha.0", "metrics-process", "serde_json", "starknet_api", @@ -5457,7 +5457,7 @@ dependencies = [ [[package]] name = "katana-codecs" -version = "0.5.0" +version = "0.5.1-alpha.0" dependencies = [ "bytes", "katana-primitives", @@ -5465,7 +5465,7 @@ dependencies = [ [[package]] name = "katana-codecs-derive" -version = "0.5.0" +version = "0.5.1-alpha.0" dependencies = [ "proc-macro2", "quote", @@ -5475,7 +5475,7 @@ dependencies = [ [[package]] name = "katana-core" -version = "0.5.0" +version = "0.5.1-alpha.0" dependencies = [ "anyhow", "assert_matches", @@ -5510,7 +5510,7 @@ dependencies = [ [[package]] name = "katana-db" -version = "0.5.0" +version = "0.5.1-alpha.0" dependencies = [ "anyhow", "blockifier", @@ -5532,7 +5532,7 @@ dependencies = [ [[package]] name = "katana-executor" -version = "0.5.0" +version = "0.5.1-alpha.0" dependencies = [ "anyhow", "blockifier", @@ -5548,7 +5548,7 @@ dependencies = [ [[package]] name = "katana-primitives" -version = "0.5.0" +version = "0.5.1-alpha.0" dependencies = [ "anyhow", "blockifier", @@ -5569,7 +5569,7 @@ dependencies = [ [[package]] name = "katana-provider" -version = "0.5.0" +version = "0.5.1-alpha.0" dependencies = [ "anyhow", "auto_impl", @@ -5594,7 +5594,7 @@ dependencies = [ [[package]] name = "katana-rpc" -version = "0.5.0" +version = "0.5.1-alpha.0" dependencies = [ "anyhow", "assert_matches", @@ -5626,7 +5626,7 @@ dependencies = [ [[package]] name = "katana-rpc-types" -version = "0.5.0" +version = "0.5.1-alpha.0" dependencies = [ "anyhow", "derive_more", @@ -5639,7 +5639,7 @@ dependencies = [ [[package]] name = "katana-rpc-types-builder" -version = "0.5.0" +version = "0.5.1-alpha.0" dependencies = [ "anyhow", "katana-executor", @@ -5651,7 +5651,7 @@ dependencies = [ [[package]] name = "katana-runner" -version = "0.5.0" +version = "0.5.1-alpha.0" dependencies = [ "anyhow", "home", @@ -5942,7 +5942,7 @@ dependencies = [ [[package]] name = "metrics" -version = "0.5.0" +version = "0.5.1-alpha.0" dependencies = [ "anyhow", "hyper", @@ -8611,7 +8611,7 @@ dependencies = [ [[package]] name = "sozo" -version = "0.5.0" +version = "0.5.1-alpha.0" dependencies = [ "anyhow", "assert_fs", @@ -8657,7 +8657,7 @@ dependencies = [ [[package]] name = "sozo-signers" -version = "0.5.0" +version = "0.5.1-alpha.0" dependencies = [ "anyhow", "starknet", @@ -9814,7 +9814,7 @@ dependencies = [ [[package]] name = "torii-client" -version = "0.5.0" +version = "0.5.1-alpha.0" dependencies = [ "async-trait", "camino", @@ -9840,7 +9840,7 @@ dependencies = [ [[package]] name = "torii-core" -version = "0.5.0" +version = "0.5.1-alpha.0" dependencies = [ "anyhow", "async-trait", @@ -9876,7 +9876,7 @@ dependencies = [ [[package]] name = "torii-graphql" -version = "0.5.0" +version = "0.5.1-alpha.0" dependencies = [ "anyhow", "async-graphql", @@ -9915,7 +9915,7 @@ dependencies = [ [[package]] name = "torii-grpc" -version = "0.5.0" +version = "0.5.1-alpha.0" dependencies = [ "bytes", "crypto-bigint", @@ -9954,7 +9954,7 @@ dependencies = [ [[package]] name = "torii-server" -version = "0.5.0" +version = "0.5.1-alpha.0" dependencies = [ "anyhow", "async-trait", @@ -9973,7 +9973,7 @@ dependencies = [ "hyper-reverse-proxy", "indexmap 1.9.3", "lazy_static", - "metrics 0.5.0", + "metrics 0.5.1-alpha.0", "metrics-process", "scarb", "serde", @@ -10241,7 +10241,7 @@ checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "types-test" -version = "0.5.0" +version = "0.5.1-alpha.0" [[package]] name = "ucd-trie" diff --git a/Cargo.toml b/Cargo.toml index 6161010e7a..1977495f19 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -36,7 +36,7 @@ edition = "2021" license = "Apache-2.0" license-file = "LICENSE" repository = "https://github.com/dojoengine/dojo/" -version = "0.5.0" +version = "0.5.1-alpha.0" [profile.performance] codegen-units = 1 From cd410f994ade764e6360add7cd64ab3b05385b57 Mon Sep 17 00:00:00 2001 From: Yun Date: Wed, 17 Jan 2024 15:39:23 -0800 Subject: [PATCH 26/33] Fix metrics prefix naming (#1453) --- crates/katana/src/main.rs | 2 +- crates/metrics/src/prometheus_exporter.rs | 4 ++-- crates/torii/server/src/cli.rs | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/katana/src/main.rs b/crates/katana/src/main.rs index 1a3f5b396f..f864061de8 100644 --- a/crates/katana/src/main.rs +++ b/crates/katana/src/main.rs @@ -67,7 +67,7 @@ async fn main() -> Result<(), Box> { } if let Some(listen_addr) = config.metrics { - let prometheus_handle = prometheus_exporter::install_recorder()?; + let prometheus_handle = prometheus_exporter::install_recorder("katana")?; info!(target: "katana::cli", addr = %listen_addr, "Starting metrics endpoint"); prometheus_exporter::serve( diff --git a/crates/metrics/src/prometheus_exporter.rs b/crates/metrics/src/prometheus_exporter.rs index 92ea2d23c7..82206da51e 100644 --- a/crates/metrics/src/prometheus_exporter.rs +++ b/crates/metrics/src/prometheus_exporter.rs @@ -14,13 +14,13 @@ pub(crate) trait Hook: Fn() + Send + Sync {} impl Hook for T {} /// Installs Prometheus as the metrics recorder. -pub fn install_recorder() -> anyhow::Result { +pub fn install_recorder(prefix: &str) -> anyhow::Result { let recorder = PrometheusBuilder::new().build_recorder(); let handle = recorder.handle(); // Build metrics stack Stack::new(recorder) - .push(PrefixLayer::new("katana")) + .push(PrefixLayer::new(prefix)) .install() .map_err(|e| anyhow::anyhow!("Couldn't set metrics recorder: {}", e))?; diff --git a/crates/torii/server/src/cli.rs b/crates/torii/server/src/cli.rs index 0e606a1aa3..04e99ac062 100644 --- a/crates/torii/server/src/cli.rs +++ b/crates/torii/server/src/cli.rs @@ -170,7 +170,7 @@ async fn main() -> anyhow::Result<()> { info!(target: "torii::cli", "Serving Graphql playground: {}\n", format!("http://{}/graphql", args.addr)); if let Some(listen_addr) = args.metrics { - let prometheus_handle = prometheus_exporter::install_recorder()?; + let prometheus_handle = prometheus_exporter::install_recorder("torii")?; info!(target: "torii::cli", addr = %listen_addr, "Starting metrics endpoint"); prometheus_exporter::serve( From 79278a62f66895cbe3bfaf822e3703309f8847b5 Mon Sep 17 00:00:00 2001 From: glihm Date: Thu, 18 Jan 2024 10:01:59 -0600 Subject: [PATCH 27/33] feat: bindgen crate (#1425) * feat: add first version of plugin-like integration for bindgen * feat: add unity backend template * refacto: move the BackendBuilder trait into backends module * docs: adjust function docs * fix: rename all to plugin for clarity * docs: update README * docs: fix typos * fix: ensure only dojo contracts are excluded from bindgen * feat: add DojoMetadata with info about models * fix: use hashmap instead of vec for models in metadata * fix: run cairo test fix * fix: remove unused model * docs: fix docs * tests: add tests * fix: bump cainome and work on tests * tests: fix tests * tests: ensure correct path for test file * feat: add ensure_abi method into model generated contract * feat: add generate_models_bindings setup for builtin plugins * fix: improve code parsing and plugin API * feat: identify systems and use new cainome tokenized abi * tests: fix building with dojo-test-utils + fix tests * fix: clean example to have correct class hash * fix: fix tests * chore: bump cainome to 0.2.2 to fix composite details in functions * fix: comment out testing until stack error on windows is investigated --- Cargo.lock | 88 ++++- Cargo.toml | 1 + crates/dojo-bindgen/Cargo.toml | 22 ++ crates/dojo-bindgen/README.md | 19 + crates/dojo-bindgen/src/error.rs | 16 + crates/dojo-bindgen/src/lib.rs | 346 ++++++++++++++++++ crates/dojo-bindgen/src/plugins/mod.rs | 23 ++ .../src/plugins/typescript/mod.rs | 32 ++ crates/dojo-bindgen/src/plugins/unity/mod.rs | 32 ++ .../dojo-bindgen/src/test_data/spawn-and-move | 1 + crates/dojo-lang/src/scarb_internal/mod.rs | 12 +- crates/sozo/Cargo.toml | 1 + crates/sozo/src/commands/build.rs | 38 +- 13 files changed, 620 insertions(+), 11 deletions(-) create mode 100644 crates/dojo-bindgen/Cargo.toml create mode 100644 crates/dojo-bindgen/README.md create mode 100644 crates/dojo-bindgen/src/error.rs create mode 100644 crates/dojo-bindgen/src/lib.rs create mode 100644 crates/dojo-bindgen/src/plugins/mod.rs create mode 100644 crates/dojo-bindgen/src/plugins/typescript/mod.rs create mode 100644 crates/dojo-bindgen/src/plugins/unity/mod.rs create mode 120000 crates/dojo-bindgen/src/test_data/spawn-and-move diff --git a/Cargo.lock b/Cargo.lock index ccabe33673..1bb69e2124 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1108,14 +1108,47 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3a4f925191b4367301851c6d99b09890311d74b0d43f274c0b34c86d308a3663" +[[package]] +name = "cainome" +version = "0.1.5" +source = "git+https://github.com/cartridge-gg/cainome?tag=v0.2.2#46c9fa734b396632cb5f986294d05532ada80f9a" +dependencies = [ + "anyhow", + "async-trait", + "cainome-cairo-serde 0.1.0 (git+https://github.com/cartridge-gg/cainome?tag=v0.2.2)", + "cainome-parser 0.1.0 (git+https://github.com/cartridge-gg/cainome?tag=v0.2.2)", + "cainome-rs 0.1.0 (git+https://github.com/cartridge-gg/cainome?tag=v0.2.2)", + "camino", + "clap", + "clap_complete", + "convert_case 0.6.0", + "serde", + "serde_json", + "starknet", + "thiserror", + "tokio", + "tracing", + "tracing-subscriber", + "url", +] + [[package]] name = "cainome" version = "0.1.5" source = "git+https://github.com/cartridge-gg/cainome?rev=950e487#950e4871b735a1b4a7ba7e7561b9a15f5a43dbed" dependencies = [ - "cainome-cairo-serde", - "cainome-parser", - "cainome-rs", + "cainome-cairo-serde 0.1.0 (git+https://github.com/cartridge-gg/cainome?rev=950e487)", + "cainome-parser 0.1.0 (git+https://github.com/cartridge-gg/cainome?rev=950e487)", + "cainome-rs 0.1.0 (git+https://github.com/cartridge-gg/cainome?rev=950e487)", +] + +[[package]] +name = "cainome-cairo-serde" +version = "0.1.0" +source = "git+https://github.com/cartridge-gg/cainome?tag=v0.2.2#46c9fa734b396632cb5f986294d05532ada80f9a" +dependencies = [ + "starknet", + "thiserror", ] [[package]] @@ -1127,6 +1160,18 @@ dependencies = [ "thiserror", ] +[[package]] +name = "cainome-parser" +version = "0.1.0" +source = "git+https://github.com/cartridge-gg/cainome?tag=v0.2.2#46c9fa734b396632cb5f986294d05532ada80f9a" +dependencies = [ + "quote", + "serde_json", + "starknet", + "syn 2.0.41", + "thiserror", +] + [[package]] name = "cainome-parser" version = "0.1.0" @@ -1139,14 +1184,30 @@ dependencies = [ "thiserror", ] +[[package]] +name = "cainome-rs" +version = "0.1.0" +source = "git+https://github.com/cartridge-gg/cainome?tag=v0.2.2#46c9fa734b396632cb5f986294d05532ada80f9a" +dependencies = [ + "anyhow", + "cainome-cairo-serde 0.1.0 (git+https://github.com/cartridge-gg/cainome?tag=v0.2.2)", + "cainome-parser 0.1.0 (git+https://github.com/cartridge-gg/cainome?tag=v0.2.2)", + "proc-macro2", + "quote", + "serde_json", + "starknet", + "syn 2.0.41", + "thiserror", +] + [[package]] name = "cainome-rs" version = "0.1.0" source = "git+https://github.com/cartridge-gg/cainome?rev=950e487#950e4871b735a1b4a7ba7e7561b9a15f5a43dbed" dependencies = [ "anyhow", - "cainome-cairo-serde", - "cainome-parser", + "cainome-cairo-serde 0.1.0 (git+https://github.com/cartridge-gg/cainome?rev=950e487)", + "cainome-parser 0.1.0 (git+https://github.com/cartridge-gg/cainome?rev=950e487)", "proc-macro2", "quote", "serde_json", @@ -2659,6 +2720,20 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" +[[package]] +name = "dojo-bindgen" +version = "0.5.0" +dependencies = [ + "async-trait", + "cainome 0.1.5 (git+https://github.com/cartridge-gg/cainome?tag=v0.2.2)", + "camino", + "convert_case 0.6.0", + "serde", + "serde_json", + "starknet", + "thiserror", +] + [[package]] name = "dojo-core" version = "0.5.1-alpha.0" @@ -2792,7 +2867,7 @@ dependencies = [ "assert_fs", "assert_matches", "async-trait", - "cainome", + "cainome 0.1.5 (git+https://github.com/cartridge-gg/cainome?rev=950e487)", "cairo-lang-filesystem", "cairo-lang-project", "cairo-lang-starknet", @@ -8632,6 +8707,7 @@ dependencies = [ "clap-verbosity-flag", "clap_complete", "console", + "dojo-bindgen", "dojo-lang", "dojo-test-utils", "dojo-types", diff --git a/Cargo.toml b/Cargo.toml index 1977495f19..b0a8ef8aba 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,6 +3,7 @@ resolver = "2" members = [ "crates/benches", + "crates/dojo-bindgen", "crates/dojo-core", "crates/dojo-lang", "crates/dojo-language-server", diff --git a/crates/dojo-bindgen/Cargo.toml b/crates/dojo-bindgen/Cargo.toml new file mode 100644 index 0000000000..c0e183f233 --- /dev/null +++ b/crates/dojo-bindgen/Cargo.toml @@ -0,0 +1,22 @@ +[package] +description = "Dojo specific bindings generator based on Cainome." +edition.workspace = true +license-file.workspace = true +name = "dojo-bindgen" +repository.workspace = true +version.workspace = true + +[dependencies] +async-trait.workspace = true +camino.workspace = true +convert_case.workspace = true +starknet.workspace = true +serde.workspace = true +serde_json.workspace = true +thiserror.workspace = true + +# Some issue with CI on windows, need to be investigated. +# https://github.com/dojoengine/dojo/actions/runs/7548423990/job/20550444492?pr=1425#step:6:1644 +#dojo-test-utils = { path = "../dojo-test-utils", features = [ "build-examples" ] } + +cainome = { git = "https://github.com/cartridge-gg/cainome", tag = "v0.2.2" } diff --git a/crates/dojo-bindgen/README.md b/crates/dojo-bindgen/README.md new file mode 100644 index 0000000000..9fcbe27f6e --- /dev/null +++ b/crates/dojo-bindgen/README.md @@ -0,0 +1,19 @@ +# Dojo bindings generator + +This crate contains the Dojo bindings generator modules which leverage [cainome](https://github.com/cartridge-gg/cainome) to parse Cairo ABI. + +## Architecture + +`dojo-bindgen` aims at decoupling at most the knowledge required by `sozo` to output bindings along the contract artifacts. Cainome exposes the `parser` crate, which contains common functions to work with Cairo ABI and generate a list of tokens to have a intermediate representation of the ABI usable at runtime and build logic on top of it to generate the bindings. + +[PluginManager](./src/lib.rs): The `PluginManager` is the top level interface that `sozo` uses to request code generation. By providing the artifacts path and the list of plugins (more params in the future), `sozo` indicates which plugin must be invoke to generate the bindings. + +[BuiltinPlugin](./src/plugins/mod.rs): The `BuiltinPlugin` are a first lightweight and integrated plugins that are written in rust directly inside this crate. This also comes packaged into the dojo toolchain, ready to be used by developers. + +In the future, `dojo-bindgen` will expose a `Plugin` interface similar to protobuf to communicate with a user defined plugin using `stdin` for greater flexibility. + +## Builtin Plugins + +[Typescript](./src/plugins/typescript/mod.rs) + +[Unity](./src/plugins/unity/mod.rs) diff --git a/crates/dojo-bindgen/src/error.rs b/crates/dojo-bindgen/src/error.rs new file mode 100644 index 0000000000..95553874b8 --- /dev/null +++ b/crates/dojo-bindgen/src/error.rs @@ -0,0 +1,16 @@ +use cainome::parser::Error as CainomeError; +use thiserror::Error; + +#[derive(Error, Debug)] +pub enum Error { + #[error(transparent)] + IO(#[from] std::io::Error), + #[error(transparent)] + SerdeJson(#[from] serde_json::Error), + #[error(transparent)] + Cainome(#[from] CainomeError), + #[error("Format error: {0}")] + Format(String), +} + +pub type BindgenResult = Result; diff --git a/crates/dojo-bindgen/src/lib.rs b/crates/dojo-bindgen/src/lib.rs new file mode 100644 index 0000000000..d2a23e1994 --- /dev/null +++ b/crates/dojo-bindgen/src/lib.rs @@ -0,0 +1,346 @@ +use std::collections::HashMap; +use std::fs; + +use cainome::parser::tokens::Token; +use cainome::parser::{AbiParser, TokenizedAbi}; +use camino::Utf8PathBuf; +use convert_case::{Case, Casing}; + +pub mod error; +use error::{BindgenResult, Error}; + +mod plugins; +use plugins::typescript::TypescriptPlugin; +use plugins::unity::UnityPlugin; +use plugins::BuiltinPlugin; +pub use plugins::BuiltinPlugins; + +#[derive(Debug, PartialEq)] +pub struct DojoModel { + /// PascalCase name of the model. + pub name: String, + /// Fully qualified path of the model type in cairo code. + pub qualified_path: String, + /// List of tokens found in the model contract ABI. + /// Only structs and enums are currently used. + pub tokens: TokenizedAbi, +} + +#[derive(Debug, PartialEq)] +pub struct DojoContract { + /// Contract's name. + pub contract_file_name: String, + /// Full ABI of the contract in case the plugin wants to make extra checks, + /// or generated other functions than the systems. + pub tokens: TokenizedAbi, + /// Functions that are identified as systems. + pub systems: Vec, +} + +#[derive(Debug)] +pub struct DojoData { + /// All contracts found in the project. + pub contracts: HashMap, + /// All the models contracts found in the project. + pub models: HashMap, +} + +// TODO: include the manifest to have more metadata when new manifest is available. +#[derive(Debug)] +pub struct PluginManager { + /// Path of contracts artifacts. + pub artifacts_path: Utf8PathBuf, + /// A list of builtin plugins to invoke. + pub builtin_plugins: Vec, + /// A list of custom plugins to invoke. + pub plugins: Vec, +} + +impl PluginManager { + /// Generates the bindings for all the given Plugin. + pub async fn generate(&self) -> BindgenResult<()> { + if self.builtin_plugins.is_empty() && self.plugins.is_empty() { + return Ok(()); + } + + let data = gather_dojo_data(&self.artifacts_path)?; + + for plugin in &self.builtin_plugins { + // Get the plugin builder from the plugin enum. + let builder: Box = match plugin { + BuiltinPlugins::Typescript => Box::new(TypescriptPlugin::new()), + BuiltinPlugins::Unity => Box::new(UnityPlugin::new()), + }; + + builder.generate_code(&data).await?; + } + Ok(()) + } +} + +/// Gathers dojo data from artifacts. +/// TODO: this should be modified later to use the new manifest structure. +/// it's currently done from the artifacts to decouple from the manifest. +/// +/// # Arguments +/// +/// * `artifacts_path` - Artifacts path where contracts were generated. +fn gather_dojo_data(artifacts_path: &Utf8PathBuf) -> BindgenResult { + let mut models = HashMap::new(); + let mut contracts = HashMap::new(); + + for entry in fs::read_dir(artifacts_path)? { + let entry = entry?; + let path = entry.path(); + + if path.is_file() { + if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) { + let file_content = fs::read_to_string(&path)?; + + // Models and Contracts must have a valid ABI. + if let Ok(tokens) = + AbiParser::tokens_from_abi_string(&file_content, &HashMap::new()) + { + // Contract. + if is_systems_contract(file_name, &file_content) { + // Identify the systems -> for now only take the functions from the + // interfaces. + let mut systems = vec![]; + let interface_blacklist = [ + "dojo::world::IWorldProvider", + "dojo::components::upgradeable::IUpgradeable", + ]; + + for (interface, funcs) in &tokens.interfaces { + if !interface_blacklist.contains(&interface.as_str()) { + systems.extend(funcs.clone()); + } + } + + contracts.insert( + file_name.to_string(), + DojoContract { + contract_file_name: file_name.to_string(), + tokens: tokens.clone(), + systems, + }, + ); + } + + // Model. + if is_model_contract(&tokens) { + if let Some(model_name) = model_name_from_artifact_filename(file_name) { + let model_pascal_case = + model_name.from_case(Case::Snake).to_case(Case::Pascal); + + let model = DojoModel { + name: model_pascal_case.clone(), + qualified_path: file_name + .replace(&model_name, &model_pascal_case) + .trim_end_matches(".json") + .to_string(), + tokens: filter_model_tokens(&tokens), + }; + + models.insert(model_pascal_case, model); + } else { + return Err(Error::Format(format!( + "Could not extract model name from file name `{file_name}`" + ))); + } + } + } + } + } + } + + Ok(DojoData { models, contracts }) +} + +/// Identifies if the given contract contains systems. +/// +/// For now the identification is very naive and don't use the manifest +/// as the manifest format will change soon. +/// TODO: use the new manifest files once available. +/// +/// # Arguments +/// +/// * `file_name` - Name of the contract file. +/// * `file_content` - Content of the contract artifact. +fn is_systems_contract(file_name: &str, file_content: &str) -> bool { + if file_name.starts_with("dojo::") || file_name == "manifest.json" { + return false; + } + + file_content.contains("IWorldDispatcher") +} + +/// Filters the model ABI to keep relevant types +/// to be generated for bindings. +fn filter_model_tokens(tokens: &TokenizedAbi) -> TokenizedAbi { + let mut structs = vec![]; + let mut enums = vec![]; + + // All types from introspect module can also be removed as the clients does not rely on them. + // Events are also always empty at model contract level. + fn skip_token(token: &Token) -> bool { + if token.type_path().starts_with("dojo::database::introspect") { + return true; + } + + if let Token::Composite(c) = token { + if c.is_event { + return true; + } + } + + false + } + + for s in &tokens.structs { + if !skip_token(s) { + structs.push(s.clone()); + } + } + + for e in &tokens.enums { + if !skip_token(e) { + enums.push(e.clone()); + } + } + + TokenizedAbi { structs, enums, ..Default::default() } +} + +/// Extracts a model name from the artifact file name. +/// +/// # Example +/// +/// The file name "dojo_examples::models::position.json" should return "position". +/// +/// # Arguments +/// +/// * `file_name` - Artifact file name. +fn model_name_from_artifact_filename(file_name: &str) -> Option { + let parts: Vec<&str> = file_name.split("::").collect(); + + if let Some(last_part) = parts.last() { + // TODO: for now, we always reconstruct with PascalCase. + // Once manifest data are available, use the exact name instead. + // We may have errors here is the struct is named like myStruct and not MyStruct. + // Plugin dev should consider case insensitive comparison. + last_part.split_once(".json").map(|m_ext| m_ext.0.to_string()) + } else { + None + } +} + +/// Identifies if the given contract contains a model. +/// +/// The identification is based on the methods name. This must +/// be adjusted if the model attribute expansion change in the future. +/// +/// +/// # Arguments +/// +/// * `file_name` - Name of the contract file. +/// * `file_content` - Content of the contract artifact. +fn is_model_contract(tokens: &TokenizedAbi) -> bool { + let expected_funcs = ["name", "layout", "packed_size", "unpacked_size", "schema"]; + + let mut funcs_counts = 0; + + // This hashmap is not that good at devex level.. one must check the + // code to know the keys. + for f in &tokens.functions { + if expected_funcs.contains(&f.to_function().expect("Function expected").name.as_str()) { + funcs_counts += 1; + } + } + + funcs_counts == expected_funcs.len() +} + +// #[cfg(test)] +// mod tests { +// use super::*; +// +// #[test] +// fn is_system_contract_ok() { +// let file_name = "dojo_examples::actions::actions.json"; +// let file_content = include_str!( +// "test_data/spawn-and-move/target/dev/dojo_examples::actions::actions.json" +// ); +// +// assert!(is_systems_contract(file_name, file_content)); +// } +// +// #[test] +// fn is_system_contract_ignore_dojo_files() { +// let file_name = "dojo::world::world.json"; +// let file_content = ""; +// assert!(!is_systems_contract(file_name, file_content)); +// +// let file_name = "manifest.json"; +// assert!(!is_systems_contract(file_name, file_content)); +// } +// +// #[test] +// fn test_is_system_contract_ignore_models() { +// let file_name = "dojo_examples::models::position.json"; +// let file_content = include_str!( +// "test_data/spawn-and-move/target/dev/dojo_examples::models::position.json" +// ); +// assert!(!is_systems_contract(file_name, file_content)); +// } +// +// #[test] +// fn model_name_from_artifact_filename_ok() { +// let file_name = "dojo_examples::models::position.json"; +// assert_eq!(model_name_from_artifact_filename(file_name), Some("position".to_string())); +// } +// +// #[test] +// fn is_model_contract_ok() { +// let file_content = +// include_str!("test_data/spawn-and-move/target/dev/dojo_examples::models::moves.json"); +// let tokens = AbiParser::tokens_from_abi_string(file_content, &HashMap::new()).unwrap(); +// +// assert!(is_model_contract(&tokens)); +// } +// +// #[test] +// fn is_model_contract_ignore_systems() { +// let file_content = include_str!( +// "test_data/spawn-and-move/target/dev/dojo_examples::actions::actions.json" +// ); +// let tokens = AbiParser::tokens_from_abi_string(file_content, &HashMap::new()).unwrap(); +// +// assert!(!is_model_contract(&tokens)); +// } +// +// #[test] +// fn is_model_contract_ignore_dojo_files() { +// let file_content = +// include_str!("test_data/spawn-and-move/target/dev/dojo::world::world.json"); +// let tokens = AbiParser::tokens_from_abi_string(file_content, &HashMap::new()).unwrap(); +// +// assert!(!is_model_contract(&tokens)); +// } +// +// #[test] +// fn gather_data_ok() { +// let data = gather_dojo_data(&Utf8PathBuf::from("src/test_data/spawn-and-move/target/dev")) +// .unwrap(); +// +// assert_eq!(data.models.len(), 2); +// +// let pos = data.models.get("Position").unwrap(); +// assert_eq!(pos.name, "Position"); +// assert_eq!(pos.qualified_path, "dojo_examples::models::Position"); +// +// let moves = data.models.get("Moves").unwrap(); +// assert_eq!(moves.name, "Moves"); +// assert_eq!(moves.qualified_path, "dojo_examples::models::Moves"); +// } +// } diff --git a/crates/dojo-bindgen/src/plugins/mod.rs b/crates/dojo-bindgen/src/plugins/mod.rs new file mode 100644 index 0000000000..d1cd14ceef --- /dev/null +++ b/crates/dojo-bindgen/src/plugins/mod.rs @@ -0,0 +1,23 @@ +use async_trait::async_trait; + +use crate::error::BindgenResult; +use crate::DojoData; + +pub mod typescript; +pub mod unity; + +#[derive(Debug)] +pub enum BuiltinPlugins { + Typescript, + Unity, +} + +#[async_trait] +pub trait BuiltinPlugin { + /// Generates code by executing the plugin. + /// + /// # Arguments + /// + /// * `data` - Dojo data gathered from the compiled project. + async fn generate_code(&self, data: &DojoData) -> BindgenResult<()>; +} diff --git a/crates/dojo-bindgen/src/plugins/typescript/mod.rs b/crates/dojo-bindgen/src/plugins/typescript/mod.rs new file mode 100644 index 0000000000..65ca4e4b41 --- /dev/null +++ b/crates/dojo-bindgen/src/plugins/typescript/mod.rs @@ -0,0 +1,32 @@ +use async_trait::async_trait; + +use crate::error::BindgenResult; +use crate::plugins::BuiltinPlugin; +use crate::DojoData; + +pub struct TypescriptPlugin; + +impl TypescriptPlugin { + pub fn new() -> Self { + Self {} + } +} + +#[async_trait] +impl BuiltinPlugin for TypescriptPlugin { + async fn generate_code(&self, data: &DojoData) -> BindgenResult<()> { + println!("-> Typescript models bindings\n"); + + for (name, model) in &data.models { + println!("## Model: {}", name); + println!("{:?}\n", model); + } + + for (file_name, contract) in &data.contracts { + println!("## Contract: {}", file_name); + println!("{:?}\n", contract); + } + + Ok(()) + } +} diff --git a/crates/dojo-bindgen/src/plugins/unity/mod.rs b/crates/dojo-bindgen/src/plugins/unity/mod.rs new file mode 100644 index 0000000000..1560e1c45f --- /dev/null +++ b/crates/dojo-bindgen/src/plugins/unity/mod.rs @@ -0,0 +1,32 @@ +use async_trait::async_trait; + +use crate::error::BindgenResult; +use crate::plugins::BuiltinPlugin; +use crate::DojoData; + +pub struct UnityPlugin; + +impl UnityPlugin { + pub fn new() -> Self { + Self {} + } +} + +#[async_trait] +impl BuiltinPlugin for UnityPlugin { + async fn generate_code(&self, data: &DojoData) -> BindgenResult<()> { + println!("-> Unity models bindings\n"); + + for (name, model) in &data.models { + println!("## Model: {}", name); + println!("{:?}\n", model); + } + + for (file_name, contract) in &data.contracts { + println!("## Contract: {}", file_name); + println!("{:?}\n", contract); + } + + Ok(()) + } +} diff --git a/crates/dojo-bindgen/src/test_data/spawn-and-move b/crates/dojo-bindgen/src/test_data/spawn-and-move new file mode 120000 index 0000000000..0b85d0755f --- /dev/null +++ b/crates/dojo-bindgen/src/test_data/spawn-and-move @@ -0,0 +1 @@ +../../../../examples/spawn-and-move \ No newline at end of file diff --git a/crates/dojo-lang/src/scarb_internal/mod.rs b/crates/dojo-lang/src/scarb_internal/mod.rs index ec8cd9857a..a6c37d6dbd 100644 --- a/crates/dojo-lang/src/scarb_internal/mod.rs +++ b/crates/dojo-lang/src/scarb_internal/mod.rs @@ -12,6 +12,7 @@ use cairo_lang_project::{AllCratesConfig, SingleCrateConfig}; use cairo_lang_starknet::starknet_plugin_suite; use cairo_lang_test_plugin::test_plugin_suite; use cairo_lang_utils::ordered_hash_map::OrderedHashMap; +use camino::Utf8PathBuf; use scarb::compiler::CompilationUnit; use scarb::core::Config; use scarb::ops::CompileOpts; @@ -20,6 +21,10 @@ use tracing::trace; use crate::plugin::dojo_plugin_suite; +pub struct CompileInfo { + pub target_dir: Utf8PathBuf, +} + pub fn crates_config_for_compilation_unit(unit: &CompilationUnit) -> AllCratesConfig { let crates_config: OrderedHashMap = unit .components @@ -52,7 +57,7 @@ pub fn build_scarb_root_database(unit: &CompilationUnit) -> Result /// This function is an alternative to `ops::compile`, it's doing the same job. /// However, we can control the injection of the plugins, required to have dojo plugin present /// for each compilation. -pub fn compile_workspace(config: &Config, opts: CompileOpts) -> Result<()> { +pub fn compile_workspace(config: &Config, opts: CompileOpts) -> Result { let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; let packages: Vec = ws.members().map(|p| p.id).collect(); let resolve = scarb::ops::resolve_workspace(&ws)?; @@ -73,7 +78,10 @@ pub fn compile_workspace(config: &Config, opts: CompileOpts) -> Result<()> { } } - Ok(()) + let target_dir = ws.target_dir().path_existent().unwrap(); + let target_dir = target_dir.join(ws.config().profile().as_str()); + + Ok(CompileInfo { target_dir }) } fn build_project_config(unit: &CompilationUnit) -> Result { diff --git a/crates/sozo/Cargo.toml b/crates/sozo/Cargo.toml index da7b252cc5..e5fce987c0 100644 --- a/crates/sozo/Cargo.toml +++ b/crates/sozo/Cargo.toml @@ -24,6 +24,7 @@ clap-verbosity-flag = "2.0.1" clap.workspace = true clap_complete.workspace = true console.workspace = true +dojo-bindgen = { path = "../dojo-bindgen" } dojo-lang = { path = "../dojo-lang" } dojo-types = { path = "../dojo-types" } dojo-world = { path = "../dojo-world", features = [ "contracts", "metadata", "migration" ] } diff --git a/crates/sozo/src/commands/build.rs b/crates/sozo/src/commands/build.rs index 6c5695fd0f..31107a2b2b 100644 --- a/crates/sozo/src/commands/build.rs +++ b/crates/sozo/src/commands/build.rs @@ -1,17 +1,49 @@ use anyhow::Result; use clap::Args; +use dojo_bindgen::{BuiltinPlugins, PluginManager}; use dojo_lang::scarb_internal::compile_workspace; use scarb::core::{Config, TargetKind}; use scarb::ops::CompileOpts; #[derive(Args, Debug)] -pub struct BuildArgs; +pub struct BuildArgs { + #[arg(long)] + #[arg(help = "Generate Typescript bindings.")] + pub typescript: bool, + + #[arg(long)] + #[arg(help = "Generate Unity bindings.")] + pub unity: bool, +} impl BuildArgs { pub fn run(self, config: &Config) -> Result<()> { - compile_workspace( + let compile_info = compile_workspace( config, CompileOpts { include_targets: vec![], exclude_targets: vec![TargetKind::TEST] }, - ) + )?; + + let mut builtin_plugins = vec![]; + if self.typescript { + builtin_plugins.push(BuiltinPlugins::Typescript); + } + + if self.unity { + builtin_plugins.push(BuiltinPlugins::Unity); + } + + // Custom plugins are always empty for now. + let bindgen = PluginManager { + artifacts_path: compile_info.target_dir, + plugins: vec![], + builtin_plugins, + }; + + tokio::runtime::Runtime::new() + .unwrap() + .block_on(bindgen.generate()) + .expect("Error generating bindings"); + + Ok(()) } } From e02e430894b5a28e73034a6d358e5511378ae759 Mon Sep 17 00:00:00 2001 From: Ammar Arif Date: Fri, 19 Jan 2024 00:25:02 +0900 Subject: [PATCH 28/33] Increase `katana` RPC request timeout from 2 -> 20 secs (#1455) Refer to #1456 for full context. The new timeout value has been chosen arbitrarily, I just thought it seems reasonable enough. --- crates/katana/rpc/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/katana/rpc/src/lib.rs b/crates/katana/rpc/src/lib.rs index 4c3aaa1c83..16135096ad 100644 --- a/crates/katana/rpc/src/lib.rs +++ b/crates/katana/rpc/src/lib.rs @@ -50,7 +50,7 @@ pub async fn spawn(sequencer: Arc, config: ServerConfig) -> Res let middleware = tower::ServiceBuilder::new() .layer(cors) .layer(ProxyGetRequestLayer::new("/", "health")?) - .timeout(Duration::from_secs(2)); + .timeout(Duration::from_secs(20)); let server = ServerBuilder::new() .set_logger(RpcLogger) From 1d86e1121d793b672d5ddf5f4c48e6dfb8c19afa Mon Sep 17 00:00:00 2001 From: Ammar Arif Date: Fri, 19 Jan 2024 00:27:28 +0900 Subject: [PATCH 29/33] perf(katana-rpc): spawn blocking tasks (#1456) Resolves https://github.com/dojoengine/dojo/issues/1448 Creates new crate under `katana`, `katana-tasks`, for managing spawning blocking tasks. RPC calls that mostly perform blocking tasks are now sent to their designated threadpools and won't block the async threads. - `TokioTaskSpawner`: mainly for spawning blocking IO-bound tasks (ie reading from storage) - `BlockingThreadPool`: mainly for spawning expensive CPU-bound tasks Depends on #1455 because now the RPC requests (that used to block the thread before) have to wait for the blocking tasks to finish and thus may be idling for more than 2 seconds which will result in a connection timeout. Doing `sozo migrate` on 2s timeout will failed when calling `/esimateFee` for estimating the World contract declare tx with error `connection closed before message completed`. Raw error message from `sozo` against the new changes: ```console Caused by: Failed to deploy world: Failed to migrate world: Migrator(Provider(Other(TransportError(Reqwest(reqwest::Error { kind: Request, url: Url { scheme: "http", cannot_be_a_base: false, username: "", password: None, host: Some(Domain("localhost")), port: Some(5050), path: "/", query: None, fragment: None }, source: hyper::Error(IncompleteMessage) }))))) ``` --- Cargo.lock | 11 + crates/katana/core/src/sequencer.rs | 9 +- crates/katana/rpc/Cargo.toml | 1 + crates/katana/rpc/src/starknet.rs | 798 ++++++++++++++++------------ crates/katana/tasks/Cargo.toml | 12 + crates/katana/tasks/src/lib.rs | 181 +++++++ 6 files changed, 660 insertions(+), 352 deletions(-) create mode 100644 crates/katana/tasks/Cargo.toml create mode 100644 crates/katana/tasks/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index 1bb69e2124..34a59a1668 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5686,6 +5686,7 @@ dependencies = [ "katana-provider", "katana-rpc-types", "katana-rpc-types-builder", + "katana-tasks", "serde", "serde_json", "serde_with", @@ -5736,6 +5737,16 @@ dependencies = [ "url", ] +[[package]] +name = "katana-tasks" +version = "0.5.0" +dependencies = [ + "futures", + "rayon", + "thiserror", + "tokio", +] + [[package]] name = "keccak" version = "0.1.4" diff --git a/crates/katana/core/src/sequencer.rs b/crates/katana/core/src/sequencer.rs index 1f2b85dbde..16c3ca9b7e 100644 --- a/crates/katana/core/src/sequencer.rs +++ b/crates/katana/core/src/sequencer.rs @@ -267,8 +267,9 @@ impl KatanaSequencer { self.backend.chain_id } - pub fn block_number(&self) -> BlockNumber { - BlockNumberProvider::latest_number(&self.backend.blockchain.provider()).unwrap() + pub fn block_number(&self) -> SequencerResult { + let num = BlockNumberProvider::latest_number(&self.backend.blockchain.provider())?; + Ok(num) } pub fn block_tx_count(&self, block_id: BlockIdOrTag) -> SequencerResult> { @@ -300,7 +301,7 @@ impl KatanaSequencer { Ok(count) } - pub async fn nonce_at( + pub fn nonce_at( &self, block_id: BlockIdOrTag, contract_address: ContractAddress, @@ -352,7 +353,7 @@ impl KatanaSequencer { Ok(tx) } - pub async fn events( + pub fn events( &self, from_block: BlockIdOrTag, to_block: BlockIdOrTag, diff --git a/crates/katana/rpc/Cargo.toml b/crates/katana/rpc/Cargo.toml index 33d21f9570..91450e7971 100644 --- a/crates/katana/rpc/Cargo.toml +++ b/crates/katana/rpc/Cargo.toml @@ -12,6 +12,7 @@ katana-primitives = { path = "../primitives" } katana-provider = { path = "../storage/provider" } katana-rpc-types = { path = "rpc-types" } katana-rpc-types-builder = { path = "rpc-types-builder" } +katana-tasks = { path = "../tasks" } anyhow.workspace = true cairo-lang-starknet = "2.3.1" diff --git a/crates/katana/rpc/src/starknet.rs b/crates/katana/rpc/src/starknet.rs index 737139d262..6c2f9e82bd 100644 --- a/crates/katana/rpc/src/starknet.rs +++ b/crates/katana/rpc/src/starknet.rs @@ -29,23 +29,50 @@ use katana_rpc_types::transaction::{ }; use katana_rpc_types::{ContractClass, FeeEstimate, FeltAsHex, FunctionCall}; use katana_rpc_types_builder::ReceiptBuilder; +use katana_tasks::{BlockingTaskPool, TokioTaskSpawner}; use starknet::core::types::{BlockTag, TransactionExecutionStatus, TransactionStatus}; use crate::api::starknet::{StarknetApiError, StarknetApiServer}; +#[derive(Clone)] pub struct StarknetApi { + inner: Arc, +} + +struct StarknetApiInner { sequencer: Arc, + blocking_task_pool: BlockingTaskPool, } impl StarknetApi { pub fn new(sequencer: Arc) -> Self { - Self { sequencer } + let blocking_task_pool = + BlockingTaskPool::new().expect("failed to create blocking task pool"); + Self { inner: Arc::new(StarknetApiInner { sequencer, blocking_task_pool }) } + } + + async fn on_cpu_blocking_task(&self, func: F) -> T + where + F: FnOnce(Self) -> T + Send + 'static, + T: Send + 'static, + { + let this = self.clone(); + self.inner.blocking_task_pool.spawn(move || func(this)).await.unwrap() + } + + async fn on_io_blocking_task(&self, func: F) -> T + where + F: FnOnce(Self) -> T + Send + 'static, + T: Send + 'static, + { + let this = self.clone(); + TokioTaskSpawner::new().unwrap().spawn_blocking(move || func(this)).await.unwrap() } } #[async_trait] impl StarknetApiServer for StarknetApi { async fn chain_id(&self) -> Result { - Ok(FieldElement::from(self.sequencer.chain_id()).into()) + Ok(FieldElement::from(self.inner.sequencer.chain_id()).into()) } async fn nonce( @@ -53,36 +80,51 @@ impl StarknetApiServer for StarknetApi { block_id: BlockIdOrTag, contract_address: FieldElement, ) -> Result { - let nonce = self - .sequencer - .nonce_at(block_id, contract_address.into()) - .await - .map_err(StarknetApiError::from)? - .ok_or(StarknetApiError::ContractNotFound)?; - - Ok(nonce.into()) + self.on_io_blocking_task(move |this| { + let nonce = this + .inner + .sequencer + .nonce_at(block_id, contract_address.into()) + .map_err(StarknetApiError::from)? + .ok_or(StarknetApiError::ContractNotFound)?; + Ok(nonce.into()) + }) + .await } async fn block_number(&self) -> Result { - Ok(self.sequencer.block_number()) + self.on_io_blocking_task(move |this| { + let block_number = + this.inner.sequencer.block_number().map_err(StarknetApiError::from)?; + Ok(block_number) + }) + .await } async fn transaction_by_hash(&self, transaction_hash: FieldElement) -> Result { - let tx = self - .sequencer - .transaction(&transaction_hash) - .map_err(StarknetApiError::from)? - .ok_or(StarknetApiError::TxnHashNotFound)?; - Ok(tx.into()) + self.on_io_blocking_task(move |this| { + let tx = this + .inner + .sequencer + .transaction(&transaction_hash) + .map_err(StarknetApiError::from)? + .ok_or(StarknetApiError::TxnHashNotFound)?; + Ok(tx.into()) + }) + .await } async fn block_transaction_count(&self, block_id: BlockIdOrTag) -> Result { - let count = self - .sequencer - .block_tx_count(block_id) - .map_err(StarknetApiError::from)? - .ok_or(StarknetApiError::BlockNotFound)?; - Ok(count) + self.on_io_blocking_task(move |this| { + let count = this + .inner + .sequencer + .block_tx_count(block_id) + .map_err(StarknetApiError::from)? + .ok_or(StarknetApiError::BlockNotFound)?; + Ok(count) + }) + .await } async fn class_at( @@ -91,17 +133,22 @@ impl StarknetApiServer for StarknetApi { contract_address: FieldElement, ) -> Result { let class_hash = self - .sequencer - .class_hash_at(block_id, contract_address.into()) - .map_err(StarknetApiError::from)? - .ok_or(StarknetApiError::ContractNotFound)?; - + .on_io_blocking_task(move |this| { + this.inner + .sequencer + .class_hash_at(block_id, contract_address.into()) + .map_err(StarknetApiError::from)? + .ok_or(StarknetApiError::ContractNotFound) + }) + .await?; self.class(block_id, class_hash).await } async fn block_hash_and_number(&self) -> Result { - let hash_and_num_pair = - self.sequencer.block_hash_and_number().map_err(StarknetApiError::from)?; + let hash_and_num_pair = self + .on_io_blocking_task(move |this| this.inner.sequencer.block_hash_and_number()) + .await + .map_err(StarknetApiError::from)?; Ok(hash_and_num_pair.into()) } @@ -109,51 +156,53 @@ impl StarknetApiServer for StarknetApi { &self, block_id: BlockIdOrTag, ) -> Result { - let provider = self.sequencer.backend.blockchain.provider(); - - if BlockIdOrTag::Tag(BlockTag::Pending) == block_id { - if let Some(pending_state) = self.sequencer.pending_state() { - let block_env = pending_state.block_envs.read().0.clone(); - let latest_hash = - BlockHashProvider::latest_hash(provider).map_err(StarknetApiError::from)?; - - let gas_prices = GasPrices { - eth: block_env.l1_gas_prices.eth, - strk: block_env.l1_gas_prices.strk, - }; + self.on_io_blocking_task(move |this| { + let provider = this.inner.sequencer.backend.blockchain.provider(); + + if BlockIdOrTag::Tag(BlockTag::Pending) == block_id { + if let Some(pending_state) = this.inner.sequencer.pending_state() { + let block_env = pending_state.block_envs.read().0.clone(); + let latest_hash = + BlockHashProvider::latest_hash(provider).map_err(StarknetApiError::from)?; + + let gas_prices = GasPrices { + eth: block_env.l1_gas_prices.eth, + strk: block_env.l1_gas_prices.strk, + }; + + let header = PartialHeader { + gas_prices, + parent_hash: latest_hash, + version: CURRENT_STARKNET_VERSION, + timestamp: block_env.timestamp, + sequencer_address: block_env.sequencer_address, + }; + + let transactions = pending_state + .executed_txs + .read() + .iter() + .map(|(tx, _)| tx.hash) + .collect::>(); - let header = PartialHeader { - gas_prices, - parent_hash: latest_hash, - version: CURRENT_STARKNET_VERSION, - timestamp: block_env.timestamp, - sequencer_address: block_env.sequencer_address, - }; + return Ok(MaybePendingBlockWithTxHashes::Pending( + PendingBlockWithTxHashes::new(header, transactions), + )); + } + } - let transactions = pending_state - .executed_txs - .read() - .iter() - .map(|(tx, _)| tx.hash) - .collect::>(); + let block_num = BlockIdReader::convert_block_id(provider, block_id) + .map_err(StarknetApiError::from)? + .map(BlockHashOrNumber::Num) + .ok_or(StarknetApiError::BlockNotFound)?; - return Ok(MaybePendingBlockWithTxHashes::Pending(PendingBlockWithTxHashes::new( - header, - transactions, - ))); - } - } - - let block_num = BlockIdReader::convert_block_id(provider, block_id) - .map_err(StarknetApiError::from)? - .map(BlockHashOrNumber::Num) - .ok_or(StarknetApiError::BlockNotFound)?; - - katana_rpc_types_builder::BlockBuilder::new(block_num, provider) - .build_with_tx_hash() - .map_err(StarknetApiError::from)? - .map(MaybePendingBlockWithTxHashes::Block) - .ok_or(Error::from(StarknetApiError::BlockNotFound)) + katana_rpc_types_builder::BlockBuilder::new(block_num, provider) + .build_with_tx_hash() + .map_err(StarknetApiError::from)? + .map(MaybePendingBlockWithTxHashes::Block) + .ok_or(Error::from(StarknetApiError::BlockNotFound)) + }) + .await } async fn transaction_by_block_id_and_index( @@ -161,133 +210,145 @@ impl StarknetApiServer for StarknetApi { block_id: BlockIdOrTag, index: u64, ) -> Result { - // TEMP: have to handle pending tag independently for now - let tx = if BlockIdOrTag::Tag(BlockTag::Pending) == block_id { - let Some(pending_state) = self.sequencer.pending_state() else { - return Err(StarknetApiError::BlockNotFound.into()); - }; + self.on_io_blocking_task(move |this| { + // TEMP: have to handle pending tag independently for now + let tx = if BlockIdOrTag::Tag(BlockTag::Pending) == block_id { + let Some(pending_state) = this.inner.sequencer.pending_state() else { + return Err(StarknetApiError::BlockNotFound.into()); + }; - let pending_txs = pending_state.executed_txs.read(); - pending_txs.iter().nth(index as usize).map(|(tx, _)| tx.clone()) - } else { - let provider = &self.sequencer.backend.blockchain.provider(); + let pending_txs = pending_state.executed_txs.read(); + pending_txs.iter().nth(index as usize).map(|(tx, _)| tx.clone()) + } else { + let provider = &this.inner.sequencer.backend.blockchain.provider(); - let block_num = BlockIdReader::convert_block_id(provider, block_id) - .map_err(StarknetApiError::from)? - .map(BlockHashOrNumber::Num) - .ok_or(StarknetApiError::BlockNotFound)?; + let block_num = BlockIdReader::convert_block_id(provider, block_id) + .map_err(StarknetApiError::from)? + .map(BlockHashOrNumber::Num) + .ok_or(StarknetApiError::BlockNotFound)?; - TransactionProvider::transaction_by_block_and_idx(provider, block_num, index) - .map_err(StarknetApiError::from)? - }; + TransactionProvider::transaction_by_block_and_idx(provider, block_num, index) + .map_err(StarknetApiError::from)? + }; - Ok(tx.ok_or(StarknetApiError::InvalidTxnIndex)?.into()) + Ok(tx.ok_or(StarknetApiError::InvalidTxnIndex)?.into()) + }) + .await } async fn block_with_txs( &self, block_id: BlockIdOrTag, ) -> Result { - let provider = self.sequencer.backend.blockchain.provider(); - - if BlockIdOrTag::Tag(BlockTag::Pending) == block_id { - if let Some(pending_state) = self.sequencer.pending_state() { - let block_env = pending_state.block_envs.read().0.clone(); - let latest_hash = - BlockHashProvider::latest_hash(provider).map_err(StarknetApiError::from)?; - - let gas_prices = GasPrices { - eth: block_env.l1_gas_prices.eth, - strk: block_env.l1_gas_prices.strk, - }; - - let header = PartialHeader { - gas_prices, - parent_hash: latest_hash, - version: CURRENT_STARKNET_VERSION, - timestamp: block_env.timestamp, - sequencer_address: block_env.sequencer_address, - }; + self.on_io_blocking_task(move |this| { + let provider = this.inner.sequencer.backend.blockchain.provider(); + + if BlockIdOrTag::Tag(BlockTag::Pending) == block_id { + if let Some(pending_state) = this.inner.sequencer.pending_state() { + let block_env = pending_state.block_envs.read().0.clone(); + let latest_hash = + BlockHashProvider::latest_hash(provider).map_err(StarknetApiError::from)?; + + let gas_prices = GasPrices { + eth: block_env.l1_gas_prices.eth, + strk: block_env.l1_gas_prices.strk, + }; + + let header = PartialHeader { + gas_prices, + parent_hash: latest_hash, + version: CURRENT_STARKNET_VERSION, + timestamp: block_env.timestamp, + sequencer_address: block_env.sequencer_address, + }; + + let transactions = pending_state + .executed_txs + .read() + .iter() + .map(|(tx, _)| tx.clone()) + .collect::>(); + + return Ok(MaybePendingBlockWithTxs::Pending(PendingBlockWithTxs::new( + header, + transactions, + ))); + } + } - let transactions = pending_state - .executed_txs - .read() - .iter() - .map(|(tx, _)| tx.clone()) - .collect::>(); + let block_num = BlockIdReader::convert_block_id(provider, block_id) + .map_err(|e| StarknetApiError::UnexpectedError { reason: e.to_string() })? + .map(BlockHashOrNumber::Num) + .ok_or(StarknetApiError::BlockNotFound)?; - return Ok(MaybePendingBlockWithTxs::Pending(PendingBlockWithTxs::new( - header, - transactions, - ))); - } - } - - let block_num = BlockIdReader::convert_block_id(provider, block_id) - .map_err(|e| StarknetApiError::UnexpectedError { reason: e.to_string() })? - .map(BlockHashOrNumber::Num) - .ok_or(StarknetApiError::BlockNotFound)?; - - katana_rpc_types_builder::BlockBuilder::new(block_num, provider) - .build() - .map_err(|e| StarknetApiError::UnexpectedError { reason: e.to_string() })? - .map(MaybePendingBlockWithTxs::Block) - .ok_or(Error::from(StarknetApiError::BlockNotFound)) + katana_rpc_types_builder::BlockBuilder::new(block_num, provider) + .build() + .map_err(|e| StarknetApiError::UnexpectedError { reason: e.to_string() })? + .map(MaybePendingBlockWithTxs::Block) + .ok_or(Error::from(StarknetApiError::BlockNotFound)) + }) + .await } async fn state_update(&self, block_id: BlockIdOrTag) -> Result { - let provider = self.sequencer.backend.blockchain.provider(); + self.on_io_blocking_task(move |this| { + let provider = this.inner.sequencer.backend.blockchain.provider(); - let block_id = match block_id { - BlockIdOrTag::Number(num) => BlockHashOrNumber::Num(num), - BlockIdOrTag::Hash(hash) => BlockHashOrNumber::Hash(hash), + let block_id = match block_id { + BlockIdOrTag::Number(num) => BlockHashOrNumber::Num(num), + BlockIdOrTag::Hash(hash) => BlockHashOrNumber::Hash(hash), - BlockIdOrTag::Tag(BlockTag::Latest) => BlockNumberProvider::latest_number(provider) - .map(BlockHashOrNumber::Num) - .map_err(|_| StarknetApiError::BlockNotFound)?, + BlockIdOrTag::Tag(BlockTag::Latest) => BlockNumberProvider::latest_number(provider) + .map(BlockHashOrNumber::Num) + .map_err(|_| StarknetApiError::BlockNotFound)?, - BlockIdOrTag::Tag(BlockTag::Pending) => { - return Err(StarknetApiError::BlockNotFound.into()); - } - }; + BlockIdOrTag::Tag(BlockTag::Pending) => { + return Err(StarknetApiError::BlockNotFound.into()); + } + }; - katana_rpc_types_builder::StateUpdateBuilder::new(block_id, provider) - .build() - .map_err(|e| StarknetApiError::UnexpectedError { reason: e.to_string() })? - .ok_or(Error::from(StarknetApiError::BlockNotFound)) + katana_rpc_types_builder::StateUpdateBuilder::new(block_id, provider) + .build() + .map_err(|e| StarknetApiError::UnexpectedError { reason: e.to_string() })? + .ok_or(Error::from(StarknetApiError::BlockNotFound)) + }) + .await } async fn transaction_receipt( &self, transaction_hash: FieldElement, ) -> Result { - let provider = self.sequencer.backend.blockchain.provider(); - let receipt = ReceiptBuilder::new(transaction_hash, provider) - .build() - .map_err(|e| StarknetApiError::UnexpectedError { reason: e.to_string() })?; - - match receipt { - Some(receipt) => Ok(MaybePendingTxReceipt::Receipt(receipt)), - - None => { - let pending_receipt = self.sequencer.pending_state().and_then(|s| { - s.executed_txs - .read() - .iter() - .find(|(tx, _)| tx.hash == transaction_hash) - .map(|(_, rct)| rct.receipt.clone()) - }); - - let Some(pending_receipt) = pending_receipt else { - return Err(StarknetApiError::TxnHashNotFound.into()); - }; - - Ok(MaybePendingTxReceipt::Pending(PendingTxReceipt::new( - transaction_hash, - pending_receipt, - ))) + self.on_io_blocking_task(move |this| { + let provider = this.inner.sequencer.backend.blockchain.provider(); + let receipt = ReceiptBuilder::new(transaction_hash, provider) + .build() + .map_err(|e| StarknetApiError::UnexpectedError { reason: e.to_string() })?; + + match receipt { + Some(receipt) => Ok(MaybePendingTxReceipt::Receipt(receipt)), + + None => { + let pending_receipt = this.inner.sequencer.pending_state().and_then(|s| { + s.executed_txs + .read() + .iter() + .find(|(tx, _)| tx.hash == transaction_hash) + .map(|(_, rct)| rct.receipt.clone()) + }); + + let Some(pending_receipt) = pending_receipt else { + return Err(StarknetApiError::TxnHashNotFound.into()); + }; + + Ok(MaybePendingTxReceipt::Pending(PendingTxReceipt::new( + transaction_hash, + pending_receipt, + ))) + } } - } + }) + .await } async fn class_hash_at( @@ -295,13 +356,16 @@ impl StarknetApiServer for StarknetApi { block_id: BlockIdOrTag, contract_address: FieldElement, ) -> Result { - let hash = self - .sequencer - .class_hash_at(block_id, contract_address.into()) - .map_err(StarknetApiError::from)? - .ok_or(StarknetApiError::ContractNotFound)?; - - Ok(hash.into()) + self.on_io_blocking_task(move |this| { + let hash = this + .inner + .sequencer + .class_hash_at(block_id, contract_address.into()) + .map_err(StarknetApiError::from)? + .ok_or(StarknetApiError::ContractNotFound)?; + Ok(hash.into()) + }) + .await } async fn class( @@ -309,40 +373,48 @@ impl StarknetApiServer for StarknetApi { block_id: BlockIdOrTag, class_hash: FieldElement, ) -> Result { - let class = self.sequencer.class(block_id, class_hash).map_err(StarknetApiError::from)?; - let Some(class) = class else { return Err(StarknetApiError::ClassHashNotFound.into()) }; - - match class { - StarknetContract::Legacy(c) => { - let contract = legacy_inner_to_rpc_class(c) - .map_err(|e| StarknetApiError::UnexpectedError { reason: e.to_string() })?; - Ok(contract) + self.on_io_blocking_task(move |this| { + let class = + this.inner.sequencer.class(block_id, class_hash).map_err(StarknetApiError::from)?; + let Some(class) = class else { return Err(StarknetApiError::ClassHashNotFound.into()) }; + + match class { + StarknetContract::Legacy(c) => { + let contract = legacy_inner_to_rpc_class(c) + .map_err(|e| StarknetApiError::UnexpectedError { reason: e.to_string() })?; + Ok(contract) + } + StarknetContract::Sierra(c) => Ok(ContractClass::Sierra(c)), } - StarknetContract::Sierra(c) => Ok(ContractClass::Sierra(c)), - } + }) + .await } async fn events(&self, filter: EventFilterWithPage) -> Result { - let from_block = filter.event_filter.from_block.unwrap_or(BlockIdOrTag::Number(0)); - let to_block = filter.event_filter.to_block.unwrap_or(BlockIdOrTag::Tag(BlockTag::Latest)); - - let keys = filter.event_filter.keys; - let keys = keys.filter(|keys| !(keys.len() == 1 && keys.is_empty())); - - let events = self - .sequencer - .events( - from_block, - to_block, - filter.event_filter.address.map(|f| f.into()), - keys, - filter.result_page_request.continuation_token, - filter.result_page_request.chunk_size, - ) - .await - .map_err(StarknetApiError::from)?; - - Ok(events) + self.on_io_blocking_task(move |this| { + let from_block = filter.event_filter.from_block.unwrap_or(BlockIdOrTag::Number(0)); + let to_block = + filter.event_filter.to_block.unwrap_or(BlockIdOrTag::Tag(BlockTag::Latest)); + + let keys = filter.event_filter.keys; + let keys = keys.filter(|keys| !(keys.len() == 1 && keys.is_empty())); + + let events = this + .inner + .sequencer + .events( + from_block, + to_block, + filter.event_filter.address.map(|f| f.into()), + keys, + filter.result_page_request.continuation_token, + filter.result_page_request.chunk_size, + ) + .map_err(StarknetApiError::from)?; + + Ok(events) + }) + .await } async fn call( @@ -350,15 +422,18 @@ impl StarknetApiServer for StarknetApi { request: FunctionCall, block_id: BlockIdOrTag, ) -> Result, Error> { - let request = EntryPointCall { - calldata: request.calldata, - contract_address: request.contract_address.into(), - entry_point_selector: request.entry_point_selector, - }; - - let res = self.sequencer.call(request, block_id).map_err(StarknetApiError::from)?; + self.on_io_blocking_task(move |this| { + let request = EntryPointCall { + calldata: request.calldata, + contract_address: request.contract_address.into(), + entry_point_selector: request.entry_point_selector, + }; - Ok(res.into_iter().map(|v| v.into()).collect()) + let res = + this.inner.sequencer.call(request, block_id).map_err(StarknetApiError::from)?; + Ok(res.into_iter().map(|v| v.into()).collect()) + }) + .await } async fn storage_at( @@ -367,33 +442,40 @@ impl StarknetApiServer for StarknetApi { key: FieldElement, block_id: BlockIdOrTag, ) -> Result { - let value = self - .sequencer - .storage_at(contract_address.into(), key, block_id) - .map_err(StarknetApiError::from)?; - - Ok(value.into()) + self.on_io_blocking_task(move |this| { + let value = this + .inner + .sequencer + .storage_at(contract_address.into(), key, block_id) + .map_err(StarknetApiError::from)?; + + Ok(value.into()) + }) + .await } async fn add_deploy_account_transaction( &self, deploy_account_transaction: BroadcastedDeployAccountTx, ) -> Result { - if deploy_account_transaction.is_query { - return Err(StarknetApiError::UnsupportedTransactionVersion.into()); - } + self.on_io_blocking_task(move |this| { + if deploy_account_transaction.is_query { + return Err(StarknetApiError::UnsupportedTransactionVersion.into()); + } - let chain_id = self.sequencer.chain_id(); + let chain_id = this.inner.sequencer.chain_id(); - let tx = deploy_account_transaction.into_tx_with_chain_id(chain_id); - let contract_address = tx.contract_address; + let tx = deploy_account_transaction.into_tx_with_chain_id(chain_id); + let contract_address = tx.contract_address; - let tx = ExecutableTxWithHash::new(ExecutableTx::DeployAccount(tx)); - let tx_hash = tx.hash; + let tx = ExecutableTxWithHash::new(ExecutableTx::DeployAccount(tx)); + let tx_hash = tx.hash; - self.sequencer.add_transaction_to_pool(tx); + this.inner.sequencer.add_transaction_to_pool(tx); - Ok((tx_hash, contract_address).into()) + Ok((tx_hash, contract_address).into()) + }) + .await } async fn estimate_fee( @@ -401,38 +483,44 @@ impl StarknetApiServer for StarknetApi { request: Vec, block_id: BlockIdOrTag, ) -> Result, Error> { - let chain_id = self.sequencer.chain_id(); - - let transactions = request - .into_iter() - .map(|tx| { - let tx = match tx { - BroadcastedTx::Invoke(tx) => { - let tx = tx.into_tx_with_chain_id(chain_id); - ExecutableTxWithHash::new_query(ExecutableTx::Invoke(tx)) - } - - BroadcastedTx::DeployAccount(tx) => { - let tx = tx.into_tx_with_chain_id(chain_id); - ExecutableTxWithHash::new_query(ExecutableTx::DeployAccount(tx)) - } - - BroadcastedTx::Declare(tx) => { - let tx = tx - .try_into_tx_with_chain_id(chain_id) - .map_err(|_| StarknetApiError::InvalidContractClass)?; - ExecutableTxWithHash::new_query(ExecutableTx::Declare(tx)) - } - }; - - Result::::Ok(tx) - }) - .collect::, _>>()?; - - let res = - self.sequencer.estimate_fee(transactions, block_id).map_err(StarknetApiError::from)?; - - Ok(res) + self.on_cpu_blocking_task(move |this| { + let chain_id = this.inner.sequencer.chain_id(); + + let transactions = request + .into_iter() + .map(|tx| { + let tx = match tx { + BroadcastedTx::Invoke(tx) => { + let tx = tx.into_tx_with_chain_id(chain_id); + ExecutableTxWithHash::new_query(ExecutableTx::Invoke(tx)) + } + + BroadcastedTx::DeployAccount(tx) => { + let tx = tx.into_tx_with_chain_id(chain_id); + ExecutableTxWithHash::new_query(ExecutableTx::DeployAccount(tx)) + } + + BroadcastedTx::Declare(tx) => { + let tx = tx + .try_into_tx_with_chain_id(chain_id) + .map_err(|_| StarknetApiError::InvalidContractClass)?; + ExecutableTxWithHash::new_query(ExecutableTx::Declare(tx)) + } + }; + + Result::::Ok(tx) + }) + .collect::, _>>()?; + + let res = this + .inner + .sequencer + .estimate_fee(transactions, block_id) + .map_err(StarknetApiError::from)?; + + Ok(res) + }) + .await } async fn estimate_message_fee( @@ -440,129 +528,143 @@ impl StarknetApiServer for StarknetApi { message: MsgFromL1, block_id: BlockIdOrTag, ) -> Result { - let chain_id = self.sequencer.chain_id(); + self.on_cpu_blocking_task(move |this| { + let chain_id = this.inner.sequencer.chain_id(); - let tx = message.into_tx_with_chain_id(chain_id); - let hash = tx.calculate_hash(); - let tx: ExecutableTxWithHash = ExecutableTxWithHash { hash, transaction: tx.into() }; + let tx = message.into_tx_with_chain_id(chain_id); + let hash = tx.calculate_hash(); + let tx: ExecutableTxWithHash = ExecutableTxWithHash { hash, transaction: tx.into() }; - let res = self - .sequencer - .estimate_fee(vec![tx], block_id) - .map_err(StarknetApiError::from)? - .pop() - .expect("should have estimate result"); + let res = this + .inner + .sequencer + .estimate_fee(vec![tx], block_id) + .map_err(StarknetApiError::from)? + .pop() + .expect("should have estimate result"); - Ok(res) + Ok(res) + }) + .await } async fn add_declare_transaction( &self, declare_transaction: BroadcastedDeclareTx, ) -> Result { - if declare_transaction.is_query() { - return Err(StarknetApiError::UnsupportedTransactionVersion.into()); - } + self.on_io_blocking_task(move |this| { + if declare_transaction.is_query() { + return Err(StarknetApiError::UnsupportedTransactionVersion.into()); + } - let chain_id = self.sequencer.chain_id(); + let chain_id = this.inner.sequencer.chain_id(); - // // validate compiled class hash - // let is_valid = declare_transaction - // .validate_compiled_class_hash() - // .map_err(|_| StarknetApiError::InvalidContractClass)?; + // // validate compiled class hash + // let is_valid = declare_transaction + // .validate_compiled_class_hash() + // .map_err(|_| StarknetApiError::InvalidContractClass)?; - // if !is_valid { - // return Err(StarknetApiError::CompiledClassHashMismatch.into()); - // } + // if !is_valid { + // return Err(StarknetApiError::CompiledClassHashMismatch.into()); + // } - let tx = declare_transaction - .try_into_tx_with_chain_id(chain_id) - .map_err(|_| StarknetApiError::InvalidContractClass)?; + let tx = declare_transaction + .try_into_tx_with_chain_id(chain_id) + .map_err(|_| StarknetApiError::InvalidContractClass)?; - let class_hash = tx.class_hash(); - let tx = ExecutableTxWithHash::new(ExecutableTx::Declare(tx)); - let tx_hash = tx.hash; + let class_hash = tx.class_hash(); + let tx = ExecutableTxWithHash::new(ExecutableTx::Declare(tx)); + let tx_hash = tx.hash; - self.sequencer.add_transaction_to_pool(tx); + this.inner.sequencer.add_transaction_to_pool(tx); - Ok((tx_hash, class_hash).into()) + Ok((tx_hash, class_hash).into()) + }) + .await } async fn add_invoke_transaction( &self, invoke_transaction: BroadcastedInvokeTx, ) -> Result { - if invoke_transaction.is_query { - return Err(StarknetApiError::UnsupportedTransactionVersion.into()); - } + self.on_io_blocking_task(move |this| { + if invoke_transaction.is_query { + return Err(StarknetApiError::UnsupportedTransactionVersion.into()); + } - let chain_id = self.sequencer.chain_id(); + let chain_id = this.inner.sequencer.chain_id(); - let tx = invoke_transaction.into_tx_with_chain_id(chain_id); - let tx = ExecutableTxWithHash::new(ExecutableTx::Invoke(tx)); - let tx_hash = tx.hash; + let tx = invoke_transaction.into_tx_with_chain_id(chain_id); + let tx = ExecutableTxWithHash::new(ExecutableTx::Invoke(tx)); + let tx_hash = tx.hash; - self.sequencer.add_transaction_to_pool(tx); + this.inner.sequencer.add_transaction_to_pool(tx); - Ok(tx_hash.into()) + Ok(tx_hash.into()) + }) + .await } async fn transaction_status( &self, transaction_hash: TxHash, ) -> Result { - let provider = self.sequencer.backend.blockchain.provider(); + self.on_io_blocking_task(move |this| { + let provider = this.inner.sequencer.backend.blockchain.provider(); + + let tx_status = + TransactionStatusProvider::transaction_status(provider, transaction_hash) + .map_err(StarknetApiError::from)?; + + if let Some(status) = tx_status { + if let Some(receipt) = ReceiptProvider::receipt_by_hash(provider, transaction_hash) + .map_err(StarknetApiError::from)? + { + let execution_status = if receipt.is_reverted() { + TransactionExecutionStatus::Reverted + } else { + TransactionExecutionStatus::Succeeded + }; + + return Ok(match status { + FinalityStatus::AcceptedOnL1 => { + TransactionStatus::AcceptedOnL1(execution_status) + } + FinalityStatus::AcceptedOnL2 => { + TransactionStatus::AcceptedOnL2(execution_status) + } + }); + } + } - let tx_status = TransactionStatusProvider::transaction_status(provider, transaction_hash) - .map_err(StarknetApiError::from)?; + let pending_state = this.inner.sequencer.pending_state(); + let state = pending_state.ok_or(StarknetApiError::TxnHashNotFound)?; + let executed_txs = state.executed_txs.read(); - if let Some(status) = tx_status { - if let Some(receipt) = ReceiptProvider::receipt_by_hash(provider, transaction_hash) - .map_err(StarknetApiError::from)? + // attemps to find in the valid transactions list first (executed_txs) + // if not found, then search in the rejected transactions list (rejected_txs) + if let Some(is_reverted) = executed_txs + .iter() + .find(|(tx, _)| tx.hash == transaction_hash) + .map(|(_, rct)| rct.receipt.is_reverted()) { - let execution_status = if receipt.is_reverted() { + let exec_status = if is_reverted { TransactionExecutionStatus::Reverted } else { TransactionExecutionStatus::Succeeded }; - return Ok(match status { - FinalityStatus::AcceptedOnL1 => { - TransactionStatus::AcceptedOnL1(execution_status) - } - FinalityStatus::AcceptedOnL2 => { - TransactionStatus::AcceptedOnL2(execution_status) - } - }); - } - } - - let pending_state = self.sequencer.pending_state(); - let state = pending_state.ok_or(StarknetApiError::TxnHashNotFound)?; - let executed_txs = state.executed_txs.read(); - - // attemps to find in the valid transactions list first (executed_txs) - // if not found, then search in the rejected transactions list (rejected_txs) - if let Some(is_reverted) = executed_txs - .iter() - .find(|(tx, _)| tx.hash == transaction_hash) - .map(|(_, rct)| rct.receipt.is_reverted()) - { - let exec_status = if is_reverted { - TransactionExecutionStatus::Reverted + Ok(TransactionStatus::AcceptedOnL2(exec_status)) } else { - TransactionExecutionStatus::Succeeded - }; + let rejected_txs = state.rejected_txs.read(); - Ok(TransactionStatus::AcceptedOnL2(exec_status)) - } else { - let rejected_txs = state.rejected_txs.read(); - - rejected_txs - .iter() - .find(|(tx, _)| tx.hash == transaction_hash) - .map(|_| TransactionStatus::Rejected) - .ok_or(Error::from(StarknetApiError::TxnHashNotFound)) - } + rejected_txs + .iter() + .find(|(tx, _)| tx.hash == transaction_hash) + .map(|_| TransactionStatus::Rejected) + .ok_or(Error::from(StarknetApiError::TxnHashNotFound)) + } + }) + .await } } diff --git a/crates/katana/tasks/Cargo.toml b/crates/katana/tasks/Cargo.toml new file mode 100644 index 0000000000..fd03a40729 --- /dev/null +++ b/crates/katana/tasks/Cargo.toml @@ -0,0 +1,12 @@ +[package] +edition.workspace = true +name = "katana-tasks" +version.workspace = true + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +futures.workspace = true +rayon.workspace = true +thiserror.workspace = true +tokio.workspace = true diff --git a/crates/katana/tasks/src/lib.rs b/crates/katana/tasks/src/lib.rs new file mode 100644 index 0000000000..148630ade0 --- /dev/null +++ b/crates/katana/tasks/src/lib.rs @@ -0,0 +1,181 @@ +use std::any::Any; +use std::future::Future; +use std::panic::{self, AssertUnwindSafe}; +use std::pin::Pin; +use std::sync::Arc; +use std::task::Poll; + +use futures::channel::oneshot; +use rayon::ThreadPoolBuilder; +use tokio::runtime::Handle; +use tokio::task::JoinHandle; + +/// This `struct` is created by the [TokioTaskSpawner::new] method. +#[derive(Debug, thiserror::Error)] +#[error("Failed to initialize task spawner: {0}")] +pub struct TaskSpawnerInitError(tokio::runtime::TryCurrentError); + +/// A task spawner for spawning tasks on a tokio runtime. This is simple wrapper around a tokio's +/// runtime [Handle] to easily spawn tasks on the runtime. +/// +/// For running expensive CPU-bound tasks, use [BlockingTaskPool] instead. +#[derive(Debug, Clone)] +pub struct TokioTaskSpawner { + /// Handle to the tokio runtime. + tokio_handle: Handle, +} + +impl TokioTaskSpawner { + /// Creates a new [TokioTaskSpawner] over the currently running tokio runtime. + /// + /// ## Errors + /// + /// Returns an error if no tokio runtime has been started. + pub fn new() -> Result { + Ok(Self { tokio_handle: Handle::try_current().map_err(TaskSpawnerInitError)? }) + } + + /// Creates a new [TokioTaskSpawner] with the given tokio runtime [Handle]. + pub fn new_with_handle(tokio_handle: Handle) -> Self { + Self { tokio_handle } + } +} + +impl TokioTaskSpawner { + pub fn spawn(&self, future: F) -> JoinHandle + where + F: Future + Send + 'static, + F::Output: Send + 'static, + { + self.tokio_handle.spawn(future) + } + + pub fn spawn_blocking(&self, func: F) -> JoinHandle + where + F: FnOnce() -> R + Send + 'static, + R: Send + 'static, + { + self.tokio_handle.spawn_blocking(func) + } +} + +/// This `struct` is created by the [BlockingTaskPool::new] method. +#[derive(Debug, thiserror::Error)] +#[error("Failed to initialize blocking thread pool: {0}")] +pub struct BlockingTaskPoolInitError(rayon::ThreadPoolBuildError); + +type BlockingTaskResult = Result>; + +#[derive(Debug)] +#[must_use = "BlockingTaskHandle does nothing unless polled"] +pub struct BlockingTaskHandle(oneshot::Receiver>); + +impl Future for BlockingTaskHandle { + type Output = BlockingTaskResult; + + fn poll(self: Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> Poll { + match Pin::new(&mut self.get_mut().0).poll(cx) { + Poll::Ready(Ok(res)) => Poll::Ready(res), + Poll::Ready(Err(_)) => panic!("blocking task cancelled"), + Poll::Pending => Poll::Pending, + } + } +} + +/// A thread-pool for spawning blocking tasks . This is a simple wrapper around *rayon*'s +/// thread-pool. This is mainly for executing expensive CPU-bound tasks. For spawing blocking +/// IO-bound tasks, use [TokioTaskSpawner::spawn_blocking] instead. +/// +/// Refer to the [CPU-bound tasks and blocking code] section of the *tokio* docs and this [blog +/// post] for more information. +/// +/// [CPU-bound tasks and blocking code]: https://docs.rs/tokio/latest/tokio/index.html#cpu-bound-tasks-and-blocking-code +/// [blog post]: https://ryhl.io/blog/async-what-is-blocking/ +#[derive(Debug, Clone)] +pub struct BlockingTaskPool { + pool: Arc, +} + +impl BlockingTaskPool { + /// Returns *rayon*'s [ThreadPoolBuilder] which can be used to build a new [BlockingTaskPool]. + pub fn build() -> ThreadPoolBuilder { + ThreadPoolBuilder::new().thread_name(|i| format!("blocking-thread-pool-{i}")) + } + + /// Creates a new [BlockingTaskPool] with the default configuration. + pub fn new() -> Result { + Self::build() + .build() + .map(|pool| Self { pool: Arc::new(pool) }) + .map_err(BlockingTaskPoolInitError) + } + + /// Creates a new [BlockingTaskPool] with the given *rayon* thread pool. + pub fn new_with_pool(rayon_pool: rayon::ThreadPool) -> Self { + Self { pool: Arc::new(rayon_pool) } + } + + /// Spawns an asynchronous task in this thread-pool, returning a handle for waiting on the + /// result asynchronously. + pub fn spawn(&self, func: F) -> BlockingTaskHandle + where + F: FnOnce() -> R + Send + 'static, + R: Send + 'static, + { + let (tx, rx) = oneshot::channel(); + self.pool.spawn(move || { + let _ = tx.send(panic::catch_unwind(AssertUnwindSafe(func))); + }); + BlockingTaskHandle(rx) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn tokio_task_spawner() { + let rt = tokio::runtime::Runtime::new().unwrap(); + + { + rt.block_on(async { + assert!( + TokioTaskSpawner::new().is_ok(), + "TokioTaskSpawner::new() should return Ok if within a tokio runtime" + ) + }); + } + + { + let tokio_handle = rt.handle().clone(); + rt.block_on(async move { + let spawner = TokioTaskSpawner::new_with_handle(tokio_handle); + let res = spawner.spawn(async { 1 + 1 }).await; + assert!(res.is_ok()); + }) + } + + { + assert!( + TokioTaskSpawner::new() + .unwrap_err() + .to_string() + .contains("Failed to initialize task spawner:"), + "TokioTaskSpawner::new() should return an error if not within a tokio runtime" + ); + } + } + + #[test] + fn blocking_task_pool() { + let rt = tokio::runtime::Runtime::new().unwrap(); + let blocking_pool = BlockingTaskPool::new().unwrap(); + rt.block_on(async { + let res = blocking_pool.spawn(|| 1 + 1).await; + assert!(res.is_ok()); + let res = blocking_pool.spawn(|| panic!("test")).await; + assert!(res.is_err(), "panic'd task should be caught"); + }) + } +} From 30b900b1db759f233c948774d44de4c91e52233a Mon Sep 17 00:00:00 2001 From: Ammar Arif Date: Sat, 20 Jan 2024 18:21:12 +0900 Subject: [PATCH 30/33] Refactor `katana` RPC components into smaller crates (#1461) --- Cargo.lock | 136 ++++++++++++- Cargo.toml | 4 +- crates/dojo-test-utils/Cargo.toml | 3 +- crates/dojo-test-utils/src/sequencer.rs | 2 +- crates/katana/Cargo.toml | 3 +- crates/katana/core/src/accounts.rs | 4 +- crates/katana/rpc/rpc-api/Cargo.toml | 18 ++ crates/katana/rpc/rpc-api/src/katana.rs | 31 +++ .../{src/api/mod.rs => rpc-api/src/lib.rs} | 0 crates/katana/rpc/rpc-api/src/starknet.rs | 181 ++++++++++++++++++ crates/katana/rpc/rpc-types/Cargo.toml | 4 + crates/katana/rpc/rpc-types/src/block.rs | 16 +- .../katana/rpc/rpc-types/src/error/katana.rs | 20 ++ crates/katana/rpc/rpc-types/src/error/mod.rs | 2 + .../api => rpc-types/src/error}/starknet.rs | 173 +---------------- crates/katana/rpc/rpc-types/src/lib.rs | 11 ++ crates/katana/rpc/rpc-types/src/message.rs | 4 +- crates/katana/rpc/rpc-types/src/receipt.rs | 8 +- .../katana/rpc/rpc-types/src/state_update.rs | 8 +- .../katana/rpc/rpc-types/src/transaction.rs | 16 +- crates/katana/rpc/{ => rpc}/Cargo.toml | 17 +- crates/katana/rpc/{ => rpc}/src/config.rs | 2 +- crates/katana/rpc/{ => rpc}/src/katana.rs | 4 +- crates/katana/rpc/{ => rpc}/src/lib.rs | 7 +- crates/katana/rpc/{ => rpc}/src/starknet.rs | 58 +++--- crates/katana/rpc/{ => rpc}/tests/starknet.rs | 0 .../tests/test_data/cairo0_contract.json | 0 .../tests/test_data/cairo1_contract.json | 0 crates/katana/rpc/src/api/katana.rs | 49 ----- crates/katana/src/args.rs | 2 +- 30 files changed, 487 insertions(+), 296 deletions(-) create mode 100644 crates/katana/rpc/rpc-api/Cargo.toml create mode 100644 crates/katana/rpc/rpc-api/src/katana.rs rename crates/katana/rpc/{src/api/mod.rs => rpc-api/src/lib.rs} (100%) create mode 100644 crates/katana/rpc/rpc-api/src/starknet.rs create mode 100644 crates/katana/rpc/rpc-types/src/error/katana.rs create mode 100644 crates/katana/rpc/rpc-types/src/error/mod.rs rename crates/katana/rpc/{src/api => rpc-types/src/error}/starknet.rs (52%) rename crates/katana/rpc/{ => rpc}/Cargo.toml (65%) rename crates/katana/rpc/{ => rpc}/src/config.rs (90%) rename crates/katana/rpc/{ => rpc}/src/katana.rs (94%) rename crates/katana/rpc/{ => rpc}/src/lib.rs (96%) rename crates/katana/rpc/{ => rpc}/src/starknet.rs (94%) rename crates/katana/rpc/{ => rpc}/tests/starknet.rs (100%) rename crates/katana/rpc/{ => rpc}/tests/test_data/cairo0_contract.json (100%) rename crates/katana/rpc/{ => rpc}/tests/test_data/cairo1_contract.json (100%) delete mode 100644 crates/katana/rpc/src/api/katana.rs diff --git a/Cargo.lock b/Cargo.lock index 34a59a1668..745ad8d2ef 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1168,7 +1168,7 @@ dependencies = [ "quote", "serde_json", "starknet", - "syn 2.0.41", + "syn 2.0.47", "thiserror", ] @@ -1196,7 +1196,7 @@ dependencies = [ "quote", "serde_json", "starknet", - "syn 2.0.41", + "syn 2.0.47", "thiserror", ] @@ -2722,7 +2722,7 @@ checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" [[package]] name = "dojo-bindgen" -version = "0.5.0" +version = "0.5.1-alpha.0" dependencies = [ "async-trait", "cainome 0.1.5 (git+https://github.com/cartridge-gg/cainome?tag=v0.2.2)", @@ -2830,6 +2830,7 @@ dependencies = [ "katana-core", "katana-primitives", "katana-rpc", + "katana-rpc-api", "scarb", "scarb-ui", "serde", @@ -4623,6 +4624,26 @@ dependencies = [ "walkdir", ] +[[package]] +name = "gloo-net" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9902a044653b26b99f7e3693a42f171312d9be8b26b5697bd1e43ad1f8a35e10" +dependencies = [ + "futures-channel", + "futures-core", + "futures-sink", + "gloo-utils", + "js-sys", + "pin-project", + "serde", + "serde_json", + "thiserror", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + [[package]] name = "gloo-timers" version = "0.2.6" @@ -4635,6 +4656,19 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "gloo-utils" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "037fcb07216cb3a30f7292bd0176b050b7b9a052ba830ef7d5d65f6dc64ba58e" +dependencies = [ + "js-sys", + "serde", + "serde_json", + "wasm-bindgen", + "web-sys", +] + [[package]] name = "good_lp" version = "1.7.0" @@ -4957,9 +4991,12 @@ dependencies = [ "futures-util", "http", "hyper", + "log", "rustls 0.21.10", + "rustls-native-certs", "tokio", "tokio-rustls 0.24.1", + "webpki-roots", ] [[package]] @@ -5389,11 +5426,40 @@ version = "0.16.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "367a292944c07385839818bb71c8d76611138e2dedb0677d035b8da21d29c78b" dependencies = [ + "jsonrpsee-client-transport", "jsonrpsee-core", + "jsonrpsee-http-client", "jsonrpsee-proc-macros", "jsonrpsee-server", "jsonrpsee-types", + "jsonrpsee-wasm-client", + "jsonrpsee-ws-client", + "tracing", +] + +[[package]] +name = "jsonrpsee-client-transport" +version = "0.16.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8b3815d9f5d5de348e5f162b316dc9cdf4548305ebb15b4eb9328e66cf27d7a" +dependencies = [ + "anyhow", + "futures-channel", + "futures-timer", + "futures-util", + "gloo-net", + "http", + "jsonrpsee-core", + "jsonrpsee-types", + "pin-project", + "rustls-native-certs", + "soketto", + "thiserror", + "tokio", + "tokio-rustls 0.24.1", + "tokio-util", "tracing", + "webpki-roots", ] [[package]] @@ -5404,9 +5470,11 @@ checksum = "2b5dde66c53d6dcdc8caea1874a45632ec0fcf5b437789f1e45766a1512ce803" dependencies = [ "anyhow", "arrayvec", + "async-lock 2.8.0", "async-trait", "beef", "futures-channel", + "futures-timer", "futures-util", "globset", "hyper", @@ -5420,6 +5488,26 @@ dependencies = [ "thiserror", "tokio", "tracing", + "wasm-bindgen-futures", +] + +[[package]] +name = "jsonrpsee-http-client" +version = "0.16.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e5f9fabdd5d79344728521bb65e3106b49ec405a78b66fbff073b72b389fa43" +dependencies = [ + "async-trait", + "hyper", + "hyper-rustls 0.24.2", + "jsonrpsee-core", + "jsonrpsee-types", + "rustc-hash", + "serde", + "serde_json", + "thiserror", + "tokio", + "tracing", ] [[package]] @@ -5471,6 +5559,29 @@ dependencies = [ "tracing", ] +[[package]] +name = "jsonrpsee-wasm-client" +version = "0.16.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18e5df77c8f625d36e4cfb583c5a674eccebe32403fcfe42f7ceff7fac9324dd" +dependencies = [ + "jsonrpsee-client-transport", + "jsonrpsee-core", + "jsonrpsee-types", +] + +[[package]] +name = "jsonrpsee-ws-client" +version = "0.16.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e1b3975ed5d73f456478681a417128597acd6a2487855fdb7b4a3d4d195bf5e" +dependencies = [ + "http", + "jsonrpsee-client-transport", + "jsonrpsee-core", + "jsonrpsee-types", +] + [[package]] name = "jsonwebtoken" version = "8.3.0" @@ -5520,6 +5631,7 @@ dependencies = [ "katana-core", "katana-primitives", "katana-rpc", + "katana-rpc-api", "metrics 0.5.1-alpha.0", "metrics-process", "serde_json", @@ -5684,6 +5796,7 @@ dependencies = [ "katana-executor", "katana-primitives", "katana-provider", + "katana-rpc-api", "katana-rpc-types", "katana-rpc-types-builder", "katana-tasks", @@ -5700,17 +5813,32 @@ dependencies = [ "url", ] +[[package]] +name = "katana-rpc-api" +version = "0.5.1-alpha.0" +dependencies = [ + "jsonrpsee", + "katana-core", + "katana-primitives", + "katana-rpc-types", + "starknet", +] + [[package]] name = "katana-rpc-types" version = "0.5.1-alpha.0" dependencies = [ "anyhow", "derive_more", + "jsonrpsee", + "katana-core", "katana-primitives", + "katana-provider", "serde", "serde_json", "serde_with", "starknet", + "thiserror", ] [[package]] @@ -5739,7 +5867,7 @@ dependencies = [ [[package]] name = "katana-tasks" -version = "0.5.0" +version = "0.5.1-alpha.0" dependencies = [ "futures", "rayon", diff --git a/Cargo.toml b/Cargo.toml index b0a8ef8aba..76576ea096 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -15,7 +15,8 @@ members = [ "crates/katana/core", "crates/katana/executor", "crates/katana/primitives", - "crates/katana/rpc", + "crates/katana/rpc/rpc", + "crates/katana/rpc/rpc-api", "crates/katana/rpc/rpc-types", "crates/katana/rpc/rpc-types-builder", "crates/katana/runner", @@ -89,6 +90,7 @@ futures = "0.3.28" hex = "0.4.3" indoc = "1.0.7" itertools = "0.10.3" +jsonrpsee = { version = "0.16.2", default-features = false } lazy_static = "1.4.0" metrics-process = "1.0.9" num-bigint = "0.4" diff --git a/crates/dojo-test-utils/Cargo.toml b/crates/dojo-test-utils/Cargo.toml index 5e549eb37c..a69649fd02 100644 --- a/crates/dojo-test-utils/Cargo.toml +++ b/crates/dojo-test-utils/Cargo.toml @@ -19,7 +19,8 @@ dojo-world = { path = "../dojo-world", features = [ "manifest", "migration" ] } jsonrpsee = { version = "0.16.2", features = [ "server" ] } katana-core = { path = "../katana/core" } katana-primitives = { path = "../katana/primitives" } -katana-rpc = { path = "../katana/rpc" } +katana-rpc = { path = "../katana/rpc/rpc" } +katana-rpc-api = { path = "../katana/rpc/rpc-api" } scarb-ui.workspace = true scarb.workspace = true serde.workspace = true diff --git a/crates/dojo-test-utils/src/sequencer.rs b/crates/dojo-test-utils/src/sequencer.rs index 9d44119b75..4726363183 100644 --- a/crates/dojo-test-utils/src/sequencer.rs +++ b/crates/dojo-test-utils/src/sequencer.rs @@ -5,9 +5,9 @@ pub use katana_core::backend::config::{Environment, StarknetConfig}; use katana_core::sequencer::KatanaSequencer; pub use katana_core::sequencer::SequencerConfig; use katana_primitives::chain::ChainId; -use katana_rpc::api::ApiKind; use katana_rpc::config::ServerConfig; use katana_rpc::{spawn, NodeHandle}; +use katana_rpc_api::ApiKind; use starknet::accounts::{ExecutionEncoding, SingleOwnerAccount}; use starknet::core::chain_id; use starknet::core::types::FieldElement; diff --git a/crates/katana/Cargo.toml b/crates/katana/Cargo.toml index 5196c53e50..992199c119 100644 --- a/crates/katana/Cargo.toml +++ b/crates/katana/Cargo.toml @@ -12,7 +12,8 @@ clap_complete.workspace = true console.workspace = true katana-core = { path = "core" } katana-primitives = { path = "primitives" } -katana-rpc = { path = "rpc" } +katana-rpc = { path = "rpc/rpc" } +katana-rpc-api = { path = "rpc/rpc-api" } metrics = { path = "../metrics" } metrics-process.workspace = true serde_json.workspace = true diff --git a/crates/katana/core/src/accounts.rs b/crates/katana/core/src/accounts.rs index f602a38e34..4f6c3d82f4 100644 --- a/crates/katana/core/src/accounts.rs +++ b/crates/katana/core/src/accounts.rs @@ -6,7 +6,7 @@ use katana_primitives::FieldElement; use katana_provider::traits::state::StateWriter; use rand::rngs::SmallRng; use rand::{RngCore, SeedableRng}; -use serde::Serialize; +use serde::{Deserialize, Serialize}; use serde_with::serde_as; use starknet::core::serde::unsigned_field_element::UfeHex; use starknet::core::utils::{get_contract_address, get_storage_var_address}; @@ -15,7 +15,7 @@ use starknet::signers::SigningKey; use crate::constants::{FEE_TOKEN_ADDRESS, OZ_V1_ACCOUNT_CONTRACT_CLASS_HASH}; #[serde_as] -#[derive(Debug, Clone, Serialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub struct Account { #[serde_as(as = "UfeHex")] pub balance: FieldElement, diff --git a/crates/katana/rpc/rpc-api/Cargo.toml b/crates/katana/rpc/rpc-api/Cargo.toml new file mode 100644 index 0000000000..f9fac4650f --- /dev/null +++ b/crates/katana/rpc/rpc-api/Cargo.toml @@ -0,0 +1,18 @@ +[package] +description = "Katana RPC APIs" +edition.workspace = true +name = "katana-rpc-api" +version.workspace = true + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +katana-core = { path = "../../core" } +katana-primitives = { path = "../../primitives" } +katana-rpc-types = { path = "../rpc-types" } + +jsonrpsee = { workspace = true, features = [ "macros", "server" ] } +starknet.workspace = true + +[features] +client = [ "jsonrpsee/client" ] diff --git a/crates/katana/rpc/rpc-api/src/katana.rs b/crates/katana/rpc/rpc-api/src/katana.rs new file mode 100644 index 0000000000..9ef3dc99b5 --- /dev/null +++ b/crates/katana/rpc/rpc-api/src/katana.rs @@ -0,0 +1,31 @@ +use jsonrpsee::core::RpcResult; +use jsonrpsee::proc_macros::rpc; +use katana_core::accounts::Account; +use katana_primitives::FieldElement; + +#[cfg_attr(not(feature = "client"), rpc(server, namespace = "katana"))] +#[cfg_attr(feature = "client", rpc(client, server, namespace = "katana"))] +pub trait KatanaApi { + #[method(name = "generateBlock")] + async fn generate_block(&self) -> RpcResult<()>; + + #[method(name = "nextBlockTimestamp")] + async fn next_block_timestamp(&self) -> RpcResult; + + #[method(name = "setNextBlockTimestamp")] + async fn set_next_block_timestamp(&self, timestamp: u64) -> RpcResult<()>; + + #[method(name = "increaseNextBlockTimestamp")] + async fn increase_next_block_timestamp(&self, timestamp: u64) -> RpcResult<()>; + + #[method(name = "predeployedAccounts")] + async fn predeployed_accounts(&self) -> RpcResult>; + + #[method(name = "setStorageAt")] + async fn set_storage_at( + &self, + contract_address: FieldElement, + key: FieldElement, + value: FieldElement, + ) -> RpcResult<()>; +} diff --git a/crates/katana/rpc/src/api/mod.rs b/crates/katana/rpc/rpc-api/src/lib.rs similarity index 100% rename from crates/katana/rpc/src/api/mod.rs rename to crates/katana/rpc/rpc-api/src/lib.rs diff --git a/crates/katana/rpc/rpc-api/src/starknet.rs b/crates/katana/rpc/rpc-api/src/starknet.rs new file mode 100644 index 0000000000..cc9090014b --- /dev/null +++ b/crates/katana/rpc/rpc-api/src/starknet.rs @@ -0,0 +1,181 @@ +use jsonrpsee::core::RpcResult; +use jsonrpsee::proc_macros::rpc; +use katana_primitives::block::{BlockIdOrTag, BlockNumber}; +use katana_primitives::transaction::TxHash; +use katana_primitives::FieldElement; +use katana_rpc_types::block::{ + BlockHashAndNumber, BlockTxCount, MaybePendingBlockWithTxHashes, MaybePendingBlockWithTxs, +}; +use katana_rpc_types::event::{EventFilterWithPage, EventsPage}; +use katana_rpc_types::message::MsgFromL1; +use katana_rpc_types::receipt::MaybePendingTxReceipt; +use katana_rpc_types::state_update::StateUpdate; +use katana_rpc_types::transaction::{ + BroadcastedDeclareTx, BroadcastedDeployAccountTx, BroadcastedInvokeTx, BroadcastedTx, + DeclareTxResult, DeployAccountTxResult, InvokeTxResult, Tx, +}; +use katana_rpc_types::{ContractClass, FeeEstimate, FeltAsHex, FunctionCall, SyncingStatus}; +use starknet::core::types::TransactionStatus; + +/// Starknet JSON-RPC APIs: +#[cfg_attr(not(feature = "client"), rpc(server, namespace = "starknet"))] +#[cfg_attr(feature = "client", rpc(client, server, namespace = "starknet"))] +pub trait StarknetApi { + /// Returns the version of the Starknet JSON-RPC specification being used. + #[method(name = "specVersion")] + async fn spec_version(&self) -> RpcResult { + Ok("0.5.1".into()) + } + + /// Get block information with transaction hashes given the block id. + #[method(name = "getBlockWithTxHashes")] + async fn block_with_tx_hashes( + &self, + block_id: BlockIdOrTag, + ) -> RpcResult; + + /// Get block information with full transactions given the block id. + #[method(name = "getBlockWithTxs")] + async fn block_with_txs(&self, block_id: BlockIdOrTag) -> RpcResult; + + /// Get the information about the result of executing the requested block. + #[method(name = "getStateUpdate")] + async fn state_update(&self, block_id: BlockIdOrTag) -> RpcResult; + + /// Get the value of the storage at the given address and key + #[method(name = "getStorageAt")] + async fn storage_at( + &self, + contract_address: FieldElement, + key: FieldElement, + block_id: BlockIdOrTag, + ) -> RpcResult; + + /// Gets the transaction status (possibly reflecting that the tx is still in the mempool, or + /// dropped from it). + #[method(name = "getTransactionStatus")] + async fn transaction_status(&self, transaction_hash: TxHash) -> RpcResult; + + /// Get the details and status of a submitted transaction. + #[method(name = "getTransactionByHash")] + async fn transaction_by_hash(&self, transaction_hash: TxHash) -> RpcResult; + + /// Get the details of a transaction by a given block id and index. + #[method(name = "getTransactionByBlockIdAndIndex")] + async fn transaction_by_block_id_and_index( + &self, + block_id: BlockIdOrTag, + index: u64, + ) -> RpcResult; + + /// Get the transaction receipt by the transaction hash. + #[method(name = "getTransactionReceipt")] + async fn transaction_receipt( + &self, + transaction_hash: TxHash, + ) -> RpcResult; + + /// Get the contract class definition in the given block associated with the given hash. + #[method(name = "getClass")] + async fn class( + &self, + block_id: BlockIdOrTag, + class_hash: FieldElement, + ) -> RpcResult; + + /// Get the contract class hash in the given block for the contract deployed at the given + /// address. + #[method(name = "getClassHashAt")] + async fn class_hash_at( + &self, + block_id: BlockIdOrTag, + contract_address: FieldElement, + ) -> RpcResult; + + /// Get the contract class definition in the given block at the given address. + #[method(name = "getClassAt")] + async fn class_at( + &self, + block_id: BlockIdOrTag, + contract_address: FieldElement, + ) -> RpcResult; + + /// Get the number of transactions in a block given a block id. + #[method(name = "getBlockTransactionCount")] + async fn block_transaction_count(&self, block_id: BlockIdOrTag) -> RpcResult; + + /// Call a starknet function without creating a StarkNet transaction. + #[method(name = "call")] + async fn call( + &self, + request: FunctionCall, + block_id: BlockIdOrTag, + ) -> RpcResult>; + + /// Estimate the fee for of StarkNet transactions. + #[method(name = "estimateFee")] + async fn estimate_fee( + &self, + request: Vec, + block_id: BlockIdOrTag, + ) -> RpcResult>; + + /// Estimate the L2 fee of a message sent on L1. + #[method(name = "estimateMessageFee")] + async fn estimate_message_fee( + &self, + message: MsgFromL1, + block_id: BlockIdOrTag, + ) -> RpcResult; + + /// Get the most recent accepted block number. + #[method(name = "blockNumber")] + async fn block_number(&self) -> RpcResult; + + /// Get the most recent accepted block hash and number. + #[method(name = "blockHashAndNumber")] + async fn block_hash_and_number(&self) -> RpcResult; + + /// Return the currently configured StarkNet chain id. + #[method(name = "chainId")] + async fn chain_id(&self) -> RpcResult; + + /// Returns an object about the sync status, or false if the node is not synching. + #[method(name = "syncing")] + async fn syncing(&self) -> RpcResult { + Ok(SyncingStatus::False) + } + + /// Returns all event objects matching the conditions in the provided filter. + #[method(name = "getEvents")] + async fn events(&self, filter: EventFilterWithPage) -> RpcResult; + + /// Get the nonce associated with the given address in the given block. + #[method(name = "getNonce")] + async fn nonce( + &self, + block_id: BlockIdOrTag, + contract_address: FieldElement, + ) -> RpcResult; + + /// Submit a new transaction to be added to the chain. + #[method(name = "addInvokeTransaction")] + async fn add_invoke_transaction( + &self, + invoke_transaction: BroadcastedInvokeTx, + ) -> RpcResult; + + /// Submit a new class declaration transaction. + #[method(name = "addDeclareTransaction")] + async fn add_declare_transaction( + &self, + declare_transaction: BroadcastedDeclareTx, + ) -> RpcResult; + + /// Submit a new deploy account transaction. + #[method(name = "addDeployAccountTransaction")] + async fn add_deploy_account_transaction( + &self, + deploy_account_transaction: BroadcastedDeployAccountTx, + ) -> RpcResult; +} diff --git a/crates/katana/rpc/rpc-types/Cargo.toml b/crates/katana/rpc/rpc-types/Cargo.toml index 792d2cccc2..a8760330b0 100644 --- a/crates/katana/rpc/rpc-types/Cargo.toml +++ b/crates/katana/rpc/rpc-types/Cargo.toml @@ -7,13 +7,17 @@ version.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] +katana-core = { path = "../../core" } katana-primitives = { path = "../../primitives" } +katana-provider = { path = "../../storage/provider" } anyhow.workspace = true derive_more.workspace = true +jsonrpsee = { workspace = true, features = [ "macros", "server" ] } serde.workspace = true serde_with.workspace = true starknet.workspace = true +thiserror.workspace = true [dev-dependencies] serde_json.workspace = true diff --git a/crates/katana/rpc/rpc-types/src/block.rs b/crates/katana/rpc/rpc-types/src/block.rs index bbfc2d7f30..9d5b49725b 100644 --- a/crates/katana/rpc/rpc-types/src/block.rs +++ b/crates/katana/rpc/rpc-types/src/block.rs @@ -1,11 +1,11 @@ use katana_primitives::block::{Block, BlockHash, BlockNumber, FinalityStatus, PartialHeader}; use katana_primitives::transaction::{TxHash, TxWithHash}; -use serde::Serialize; +use serde::{Deserialize, Serialize}; use starknet::core::types::{BlockStatus, ResourcePrice}; pub type BlockTxCount = u64; -#[derive(Debug, Clone, Serialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] #[serde(transparent)] pub struct BlockWithTxs(starknet::core::types::BlockWithTxs); @@ -37,7 +37,7 @@ impl BlockWithTxs { } } -#[derive(Debug, Clone, Serialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] #[serde(transparent)] pub struct PendingBlockWithTxs(starknet::core::types::PendingBlockWithTxs); @@ -62,14 +62,14 @@ impl PendingBlockWithTxs { } } -#[derive(Debug, Clone, Serialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] #[serde(untagged)] pub enum MaybePendingBlockWithTxs { Pending(PendingBlockWithTxs), Block(BlockWithTxs), } -#[derive(Debug, Clone, Serialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] #[serde(transparent)] pub struct BlockWithTxHashes(starknet::core::types::BlockWithTxHashes); @@ -102,7 +102,7 @@ impl BlockWithTxHashes { } } -#[derive(Debug, Clone, Serialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] #[serde(transparent)] pub struct PendingBlockWithTxHashes(starknet::core::types::PendingBlockWithTxHashes); @@ -124,14 +124,14 @@ impl PendingBlockWithTxHashes { } } -#[derive(Debug, Clone, Serialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] #[serde(untagged)] pub enum MaybePendingBlockWithTxHashes { Pending(PendingBlockWithTxHashes), Block(BlockWithTxHashes), } -#[derive(Debug, Clone, Serialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] #[serde(transparent)] pub struct BlockHashAndNumber(starknet::core::types::BlockHashAndNumber); diff --git a/crates/katana/rpc/rpc-types/src/error/katana.rs b/crates/katana/rpc/rpc-types/src/error/katana.rs new file mode 100644 index 0000000000..24013845bb --- /dev/null +++ b/crates/katana/rpc/rpc-types/src/error/katana.rs @@ -0,0 +1,20 @@ +use jsonrpsee::core::Error; +use jsonrpsee::types::error::CallError; +use jsonrpsee::types::ErrorObject; + +#[derive(thiserror::Error, Clone, Copy, Debug)] +#[allow(clippy::enum_variant_names)] +pub enum KatanaApiError { + #[error("Failed to change next block timestamp.")] + FailedToChangeNextBlockTimestamp = 1, + #[error("Failed to dump state.")] + FailedToDumpState = 2, + #[error("Failed to update storage.")] + FailedToUpdateStorage = 3, +} + +impl From for Error { + fn from(err: KatanaApiError) -> Self { + Error::Call(CallError::Custom(ErrorObject::owned(err as i32, err.to_string(), None::<()>))) + } +} diff --git a/crates/katana/rpc/rpc-types/src/error/mod.rs b/crates/katana/rpc/rpc-types/src/error/mod.rs new file mode 100644 index 0000000000..608feeefd5 --- /dev/null +++ b/crates/katana/rpc/rpc-types/src/error/mod.rs @@ -0,0 +1,2 @@ +pub mod katana; +pub mod starknet; diff --git a/crates/katana/rpc/src/api/starknet.rs b/crates/katana/rpc/rpc-types/src/error/starknet.rs similarity index 52% rename from crates/katana/rpc/src/api/starknet.rs rename to crates/katana/rpc/rpc-types/src/error/starknet.rs index b3833488ed..564f704bb3 100644 --- a/crates/katana/rpc/src/api/starknet.rs +++ b/crates/katana/rpc/rpc-types/src/error/starknet.rs @@ -1,26 +1,12 @@ use jsonrpsee::core::Error; -use jsonrpsee::proc_macros::rpc; -use jsonrpsee::types::error::{CallError, ErrorObject}; +use jsonrpsee::types::error::CallError; +use jsonrpsee::types::ErrorObject; use katana_core::sequencer_error::SequencerError; -use katana_primitives::block::{BlockIdOrTag, BlockNumber}; -use katana_primitives::transaction::TxHash; -use katana_primitives::FieldElement; use katana_provider::error::ProviderError; -use katana_rpc_types::block::{ - BlockHashAndNumber, BlockTxCount, MaybePendingBlockWithTxHashes, MaybePendingBlockWithTxs, -}; -use katana_rpc_types::event::{EventFilterWithPage, EventsPage}; -use katana_rpc_types::message::MsgFromL1; -use katana_rpc_types::receipt::MaybePendingTxReceipt; -use katana_rpc_types::state_update::StateUpdate; -use katana_rpc_types::transaction::{ - BroadcastedDeclareTx, BroadcastedDeployAccountTx, BroadcastedInvokeTx, BroadcastedTx, - DeclareTxResult, DeployAccountTxResult, InvokeTxResult, Tx, -}; -use katana_rpc_types::{ContractClass, FeeEstimate, FeltAsHex, FunctionCall}; -use starknet::core::types::{ContractErrorData, TransactionStatus}; +use starknet::core::types::ContractErrorData; -#[derive(thiserror::Error, Clone, Debug)] +/// Possible list of errors that can be returned by the Starknet API according to the spec: . +#[derive(Debug, thiserror::Error, Clone)] #[repr(i32)] pub enum StarknetApiError { #[error("Failed to write transaction")] @@ -119,6 +105,11 @@ impl StarknetApiError { } } +#[derive(serde::Serialize, serde::Deserialize)] +struct UnexpectedError { + reason: String, +} + impl From for StarknetApiError { fn from(value: ProviderError) -> Self { StarknetApiError::UnexpectedError { reason: value.to_string() } @@ -136,11 +127,6 @@ impl From for Error { } StarknetApiError::UnexpectedError { reason } => { - #[derive(serde::Serialize, serde::Deserialize)] - struct UnexpectedError { - reason: String, - } - ErrorObject::owned(code, message, Some(UnexpectedError { reason })) } @@ -166,142 +152,3 @@ impl From for StarknetApiError { } } } - -#[rpc(server, namespace = "starknet")] -pub trait StarknetApi { - // Read API - - #[method(name = "specVersion")] - async fn spec_version(&self) -> Result { - Ok("0.5.1".into()) - } - - #[method(name = "chainId")] - async fn chain_id(&self) -> Result; - - #[method(name = "getNonce")] - async fn nonce( - &self, - block_id: BlockIdOrTag, - contract_address: FieldElement, - ) -> Result; - - #[method(name = "blockNumber")] - async fn block_number(&self) -> Result; - - #[method(name = "getTransactionByHash")] - async fn transaction_by_hash(&self, transaction_hash: TxHash) -> Result; - - #[method(name = "getBlockTransactionCount")] - async fn block_transaction_count(&self, block_id: BlockIdOrTag) -> Result; - - #[method(name = "getClassAt")] - async fn class_at( - &self, - block_id: BlockIdOrTag, - contract_address: FieldElement, - ) -> Result; - - #[method(name = "blockHashAndNumber")] - async fn block_hash_and_number(&self) -> Result; - - #[method(name = "getBlockWithTxHashes")] - async fn block_with_tx_hashes( - &self, - block_id: BlockIdOrTag, - ) -> Result; - - #[method(name = "getTransactionByBlockIdOrTagAndIndex")] - async fn transaction_by_block_id_and_index( - &self, - block_id: BlockIdOrTag, - index: u64, - ) -> Result; - - #[method(name = "getBlockWithTxs")] - async fn block_with_txs( - &self, - block_id: BlockIdOrTag, - ) -> Result; - - #[method(name = "getStateUpdate")] - async fn state_update(&self, block_id: BlockIdOrTag) -> Result; - - #[method(name = "getTransactionReceipt")] - async fn transaction_receipt( - &self, - transaction_hash: TxHash, - ) -> Result; - - #[method(name = "getTransactionStatus")] - async fn transaction_status( - &self, - transaction_hash: TxHash, - ) -> Result; - - #[method(name = "getClassHashAt")] - async fn class_hash_at( - &self, - block_id: BlockIdOrTag, - contract_address: FieldElement, - ) -> Result; - - #[method(name = "getClass")] - async fn class( - &self, - block_id: BlockIdOrTag, - class_hash: FieldElement, - ) -> Result; - - #[method(name = "getEvents")] - async fn events(&self, filter: EventFilterWithPage) -> Result; - - #[method(name = "estimateFee")] - async fn estimate_fee( - &self, - request: Vec, - block_id: BlockIdOrTag, - ) -> Result, Error>; - - #[method(name = "estimateMessageFee")] - async fn estimate_message_fee( - &self, - message: MsgFromL1, - block_id: BlockIdOrTag, - ) -> Result; - - #[method(name = "call")] - async fn call( - &self, - request: FunctionCall, - block_id: BlockIdOrTag, - ) -> Result, Error>; - - #[method(name = "getStorageAt")] - async fn storage_at( - &self, - contract_address: FieldElement, - key: FieldElement, - block_id: BlockIdOrTag, - ) -> Result; - - // Write API - - #[method(name = "addDeployAccountTransaction")] - async fn add_deploy_account_transaction( - &self, - deploy_account_transaction: BroadcastedDeployAccountTx, - ) -> Result; - - #[method(name = "addDeclareTransaction")] - async fn add_declare_transaction( - &self, - declare_transaction: BroadcastedDeclareTx, - ) -> Result; - - #[method(name = "addInvokeTransaction")] - async fn add_invoke_transaction( - &self, - invoke_transaction: BroadcastedInvokeTx, - ) -> Result; -} diff --git a/crates/katana/rpc/rpc-types/src/lib.rs b/crates/katana/rpc/rpc-types/src/lib.rs index e5035af7ed..5afb4e5184 100644 --- a/crates/katana/rpc/rpc-types/src/lib.rs +++ b/crates/katana/rpc/rpc-types/src/lib.rs @@ -4,6 +4,7 @@ //! `starknet-rs`. pub mod block; +pub mod error; pub mod event; pub mod message; pub mod receipt; @@ -15,6 +16,7 @@ use std::ops::Deref; use serde::{Deserialize, Serialize}; use serde_with::serde_as; use starknet::core::serde::unsigned_field_element::UfeHex; +use starknet::core::types::SyncStatus; /// A wrapper around [`FieldElement`](katana_primitives::FieldElement) that serializes to hex as /// default. @@ -47,6 +49,15 @@ pub type FeeEstimate = starknet::core::types::FeeEstimate; pub type ContractClass = starknet::core::types::ContractClass; +/// The state of the node synchronization. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub enum SyncingStatus { + #[serde(rename = "FALSE")] + False, + #[serde(untagged)] + Status(SyncStatus), +} + #[cfg(test)] mod tests { use serde_json::json; diff --git a/crates/katana/rpc/rpc-types/src/message.rs b/crates/katana/rpc/rpc-types/src/message.rs index 3b6c37b446..e9f1e2981b 100644 --- a/crates/katana/rpc/rpc-types/src/message.rs +++ b/crates/katana/rpc/rpc-types/src/message.rs @@ -2,9 +2,9 @@ use katana_primitives::chain::ChainId; use katana_primitives::transaction::L1HandlerTx; use katana_primitives::utils::transaction::compute_l1_message_hash; use katana_primitives::FieldElement; -use serde::Deserialize; +use serde::{Deserialize, Serialize}; -#[derive(Debug, Clone, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub struct MsgFromL1(starknet::core::types::MsgFromL1); impl MsgFromL1 { diff --git a/crates/katana/rpc/rpc-types/src/receipt.rs b/crates/katana/rpc/rpc-types/src/receipt.rs index 0d7ac49838..8c6f86fad3 100644 --- a/crates/katana/rpc/rpc-types/src/receipt.rs +++ b/crates/katana/rpc/rpc-types/src/receipt.rs @@ -1,7 +1,7 @@ use katana_primitives::block::{BlockHash, BlockNumber, FinalityStatus}; use katana_primitives::receipt::{MessageToL1, Receipt, TxExecutionResources}; use katana_primitives::transaction::TxHash; -use serde::Serialize; +use serde::{Deserialize, Serialize}; use starknet::core::types::{ DeclareTransactionReceipt, DeployAccountTransactionReceipt, ExecutionResult, Hash256, InvokeTransactionReceipt, L1HandlerTransactionReceipt, PendingDeclareTransactionReceipt, @@ -10,7 +10,7 @@ use starknet::core::types::{ TransactionReceipt, }; -#[derive(Debug, Clone, Serialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] #[serde(transparent)] pub struct TxReceipt(starknet::core::types::TransactionReceipt); @@ -123,7 +123,7 @@ impl TxReceipt { } } -#[derive(Debug, Clone, Serialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] #[serde(transparent)] pub struct PendingTxReceipt(starknet::core::types::PendingTransactionReceipt); @@ -213,7 +213,7 @@ impl PendingTxReceipt { } } -#[derive(Debug, Clone, Serialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] #[serde(untagged)] pub enum MaybePendingTxReceipt { Receipt(TxReceipt), diff --git a/crates/katana/rpc/rpc-types/src/state_update.rs b/crates/katana/rpc/rpc-types/src/state_update.rs index 456cbf7f99..a14c61e4b6 100644 --- a/crates/katana/rpc/rpc-types/src/state_update.rs +++ b/crates/katana/rpc/rpc-types/src/state_update.rs @@ -1,20 +1,20 @@ -use serde::Serialize; +use serde::{Deserialize, Serialize}; use starknet::core::types::{ ContractStorageDiffItem, DeclaredClassItem, DeployedContractItem, NonceUpdate, StorageEntry, }; -#[derive(Debug, Clone, Serialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] #[serde(untagged)] pub enum MaybePendingStateUpdate { Pending(PendingStateUpdate), Update(StateUpdate), } -#[derive(Debug, Clone, Serialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] #[serde(transparent)] pub struct StateUpdate(starknet::core::types::StateUpdate); -#[derive(Debug, Clone, Serialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] #[serde(transparent)] pub struct PendingStateUpdate(starknet::core::types::PendingStateUpdate); diff --git a/crates/katana/rpc/rpc-types/src/transaction.rs b/crates/katana/rpc/rpc-types/src/transaction.rs index 00aab586bc..a7445ffcaf 100644 --- a/crates/katana/rpc/rpc-types/src/transaction.rs +++ b/crates/katana/rpc/rpc-types/src/transaction.rs @@ -21,7 +21,7 @@ use starknet::core::types::{ }; use starknet::core::utils::get_contract_address; -#[derive(Debug, Clone, Deserialize, Deref)] +#[derive(Debug, Clone, Serialize, Deserialize, Deref)] #[serde(transparent)] pub struct BroadcastedInvokeTx(BroadcastedInvokeTransaction); @@ -39,7 +39,7 @@ impl BroadcastedInvokeTx { } } -#[derive(Debug, Clone, Deserialize, Deref)] +#[derive(Debug, Clone, Serialize, Deserialize, Deref)] #[serde(transparent)] pub struct BroadcastedDeclareTx(BroadcastedDeclareTransaction); @@ -108,7 +108,7 @@ impl BroadcastedDeclareTx { } } -#[derive(Debug, Clone, Deserialize, Deref)] +#[derive(Debug, Clone, Serialize, Deserialize, Deref)] #[serde(transparent)] pub struct BroadcastedDeployAccountTx(BroadcastedDeployAccountTransaction); @@ -135,7 +135,7 @@ impl BroadcastedDeployAccountTx { } } -#[derive(Debug, Clone, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] #[serde(untagged)] pub enum BroadcastedTx { Invoke(BroadcastedInvokeTx), @@ -143,19 +143,19 @@ pub enum BroadcastedTx { DeployAccount(BroadcastedDeployAccountTx), } -#[derive(Debug, Clone, Serialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] #[serde(transparent)] pub struct Tx(pub starknet::core::types::Transaction); -#[derive(Debug, Clone, Serialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] #[serde(transparent)] pub struct DeployAccountTxResult(DeployAccountTransactionResult); -#[derive(Debug, Clone, Serialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] #[serde(transparent)] pub struct DeclareTxResult(DeclareTransactionResult); -#[derive(Debug, Clone, Serialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] #[serde(transparent)] pub struct InvokeTxResult(InvokeTransactionResult); diff --git a/crates/katana/rpc/Cargo.toml b/crates/katana/rpc/rpc/Cargo.toml similarity index 65% rename from crates/katana/rpc/Cargo.toml rename to crates/katana/rpc/rpc/Cargo.toml index 91450e7971..99051c2006 100644 --- a/crates/katana/rpc/Cargo.toml +++ b/crates/katana/rpc/rpc/Cargo.toml @@ -7,12 +7,14 @@ repository.workspace = true version.workspace = true [dependencies] -katana-executor = { path = "../executor" } -katana-primitives = { path = "../primitives" } -katana-provider = { path = "../storage/provider" } -katana-rpc-types = { path = "rpc-types" } -katana-rpc-types-builder = { path = "rpc-types-builder" } -katana-tasks = { path = "../tasks" } +katana-core = { path = "../../core" } +katana-executor = { path = "../../executor" } +katana-primitives = { path = "../../primitives" } +katana-provider = { path = "../../storage/provider" } +katana-rpc-api = { path = "../rpc-api" } +katana-rpc-types = { path = "../rpc-types" } +katana-rpc-types-builder = { path = "../rpc-types-builder" } +katana-tasks = { path = "../../tasks" } anyhow.workspace = true cairo-lang-starknet = "2.3.1" @@ -21,7 +23,6 @@ futures.workspace = true hex = { version = "0.4.3", default-features = false } hyper = "0.14.20" jsonrpsee = { version = "0.16.2", features = [ "macros", "server" ] } -katana-core = { path = "../core" } serde.workspace = true serde_json.workspace = true serde_with.workspace = true @@ -35,5 +36,5 @@ tracing.workspace = true [dev-dependencies] assert_matches = "1.5.0" -dojo-test-utils = { path = "../../dojo-test-utils" } +dojo-test-utils = { path = "../../../dojo-test-utils" } url.workspace = true diff --git a/crates/katana/rpc/src/config.rs b/crates/katana/rpc/rpc/src/config.rs similarity index 90% rename from crates/katana/rpc/src/config.rs rename to crates/katana/rpc/rpc/src/config.rs index 8fe1fc58a1..fd8c0848b2 100644 --- a/crates/katana/rpc/src/config.rs +++ b/crates/katana/rpc/rpc/src/config.rs @@ -1,4 +1,4 @@ -use crate::api::ApiKind; +use katana_rpc_api::ApiKind; #[derive(Debug, Clone)] pub struct ServerConfig { diff --git a/crates/katana/rpc/src/katana.rs b/crates/katana/rpc/rpc/src/katana.rs similarity index 94% rename from crates/katana/rpc/src/katana.rs rename to crates/katana/rpc/rpc/src/katana.rs index 43be36fcec..ffcb1ca3f7 100644 --- a/crates/katana/rpc/src/katana.rs +++ b/crates/katana/rpc/rpc/src/katana.rs @@ -4,8 +4,8 @@ use jsonrpsee::core::{async_trait, Error}; use katana_core::accounts::Account; use katana_core::sequencer::KatanaSequencer; use katana_primitives::FieldElement; - -use crate::api::katana::{KatanaApiError, KatanaApiServer}; +use katana_rpc_api::katana::KatanaApiServer; +use katana_rpc_types::error::katana::KatanaApiError; pub struct KatanaApi { sequencer: Arc, diff --git a/crates/katana/rpc/src/lib.rs b/crates/katana/rpc/rpc/src/lib.rs similarity index 96% rename from crates/katana/rpc/src/lib.rs rename to crates/katana/rpc/rpc/src/lib.rs index 16135096ad..68383be7c6 100644 --- a/crates/katana/rpc/src/lib.rs +++ b/crates/katana/rpc/rpc/src/lib.rs @@ -1,4 +1,3 @@ -pub mod api; pub mod config; pub mod katana; pub mod starknet; @@ -8,7 +7,6 @@ use std::sync::Arc; use std::time::{Duration, Instant}; use anyhow::Result; -use api::ApiKind; use config::ServerConfig; use hyper::Method; use jsonrpsee::server::logger::{Logger, MethodKind, TransportProtocol}; @@ -18,10 +16,11 @@ use jsonrpsee::tracing::debug; use jsonrpsee::types::Params; use jsonrpsee::RpcModule; use katana_core::sequencer::KatanaSequencer; +use katana_rpc_api::katana::KatanaApiServer; +use katana_rpc_api::starknet::StarknetApiServer; +use katana_rpc_api::ApiKind; use tower_http::cors::{Any, CorsLayer}; -use crate::api::katana::KatanaApiServer; -use crate::api::starknet::StarknetApiServer; use crate::katana::KatanaApi; use crate::starknet::StarknetApi; diff --git a/crates/katana/rpc/src/starknet.rs b/crates/katana/rpc/rpc/src/starknet.rs similarity index 94% rename from crates/katana/rpc/src/starknet.rs rename to crates/katana/rpc/rpc/src/starknet.rs index 6c2f9e82bd..85b273176a 100644 --- a/crates/katana/rpc/src/starknet.rs +++ b/crates/katana/rpc/rpc/src/starknet.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use jsonrpsee::core::{async_trait, Error}; +use jsonrpsee::core::{async_trait, Error, RpcResult}; use katana_core::backend::contract::StarknetContract; use katana_core::sequencer::KatanaSequencer; use katana_executor::blockifier::utils::EntryPointCall; @@ -15,10 +15,12 @@ use katana_provider::traits::block::{BlockHashProvider, BlockIdReader, BlockNumb use katana_provider::traits::transaction::{ ReceiptProvider, TransactionProvider, TransactionStatusProvider, }; +use katana_rpc_api::starknet::StarknetApiServer; use katana_rpc_types::block::{ BlockHashAndNumber, MaybePendingBlockWithTxHashes, MaybePendingBlockWithTxs, PendingBlockWithTxHashes, PendingBlockWithTxs, }; +use katana_rpc_types::error::starknet::StarknetApiError; use katana_rpc_types::event::{EventFilterWithPage, EventsPage}; use katana_rpc_types::message::MsgFromL1; use katana_rpc_types::receipt::{MaybePendingTxReceipt, PendingTxReceipt}; @@ -32,8 +34,6 @@ use katana_rpc_types_builder::ReceiptBuilder; use katana_tasks::{BlockingTaskPool, TokioTaskSpawner}; use starknet::core::types::{BlockTag, TransactionExecutionStatus, TransactionStatus}; -use crate::api::starknet::{StarknetApiError, StarknetApiServer}; - #[derive(Clone)] pub struct StarknetApi { inner: Arc, @@ -71,7 +71,7 @@ impl StarknetApi { } #[async_trait] impl StarknetApiServer for StarknetApi { - async fn chain_id(&self) -> Result { + async fn chain_id(&self) -> RpcResult { Ok(FieldElement::from(self.inner.sequencer.chain_id()).into()) } @@ -79,7 +79,7 @@ impl StarknetApiServer for StarknetApi { &self, block_id: BlockIdOrTag, contract_address: FieldElement, - ) -> Result { + ) -> RpcResult { self.on_io_blocking_task(move |this| { let nonce = this .inner @@ -92,7 +92,7 @@ impl StarknetApiServer for StarknetApi { .await } - async fn block_number(&self) -> Result { + async fn block_number(&self) -> RpcResult { self.on_io_blocking_task(move |this| { let block_number = this.inner.sequencer.block_number().map_err(StarknetApiError::from)?; @@ -101,7 +101,7 @@ impl StarknetApiServer for StarknetApi { .await } - async fn transaction_by_hash(&self, transaction_hash: FieldElement) -> Result { + async fn transaction_by_hash(&self, transaction_hash: FieldElement) -> RpcResult { self.on_io_blocking_task(move |this| { let tx = this .inner @@ -114,7 +114,7 @@ impl StarknetApiServer for StarknetApi { .await } - async fn block_transaction_count(&self, block_id: BlockIdOrTag) -> Result { + async fn block_transaction_count(&self, block_id: BlockIdOrTag) -> RpcResult { self.on_io_blocking_task(move |this| { let count = this .inner @@ -131,7 +131,7 @@ impl StarknetApiServer for StarknetApi { &self, block_id: BlockIdOrTag, contract_address: FieldElement, - ) -> Result { + ) -> RpcResult { let class_hash = self .on_io_blocking_task(move |this| { this.inner @@ -144,7 +144,7 @@ impl StarknetApiServer for StarknetApi { self.class(block_id, class_hash).await } - async fn block_hash_and_number(&self) -> Result { + async fn block_hash_and_number(&self) -> RpcResult { let hash_and_num_pair = self .on_io_blocking_task(move |this| this.inner.sequencer.block_hash_and_number()) .await @@ -155,7 +155,7 @@ impl StarknetApiServer for StarknetApi { async fn block_with_tx_hashes( &self, block_id: BlockIdOrTag, - ) -> Result { + ) -> RpcResult { self.on_io_blocking_task(move |this| { let provider = this.inner.sequencer.backend.blockchain.provider(); @@ -209,7 +209,7 @@ impl StarknetApiServer for StarknetApi { &self, block_id: BlockIdOrTag, index: u64, - ) -> Result { + ) -> RpcResult { self.on_io_blocking_task(move |this| { // TEMP: have to handle pending tag independently for now let tx = if BlockIdOrTag::Tag(BlockTag::Pending) == block_id { @@ -236,10 +236,7 @@ impl StarknetApiServer for StarknetApi { .await } - async fn block_with_txs( - &self, - block_id: BlockIdOrTag, - ) -> Result { + async fn block_with_txs(&self, block_id: BlockIdOrTag) -> RpcResult { self.on_io_blocking_task(move |this| { let provider = this.inner.sequencer.backend.blockchain.provider(); @@ -290,7 +287,7 @@ impl StarknetApiServer for StarknetApi { .await } - async fn state_update(&self, block_id: BlockIdOrTag) -> Result { + async fn state_update(&self, block_id: BlockIdOrTag) -> RpcResult { self.on_io_blocking_task(move |this| { let provider = this.inner.sequencer.backend.blockchain.provider(); @@ -318,7 +315,7 @@ impl StarknetApiServer for StarknetApi { async fn transaction_receipt( &self, transaction_hash: FieldElement, - ) -> Result { + ) -> RpcResult { self.on_io_blocking_task(move |this| { let provider = this.inner.sequencer.backend.blockchain.provider(); let receipt = ReceiptBuilder::new(transaction_hash, provider) @@ -355,7 +352,7 @@ impl StarknetApiServer for StarknetApi { &self, block_id: BlockIdOrTag, contract_address: FieldElement, - ) -> Result { + ) -> RpcResult { self.on_io_blocking_task(move |this| { let hash = this .inner @@ -372,7 +369,7 @@ impl StarknetApiServer for StarknetApi { &self, block_id: BlockIdOrTag, class_hash: FieldElement, - ) -> Result { + ) -> RpcResult { self.on_io_blocking_task(move |this| { let class = this.inner.sequencer.class(block_id, class_hash).map_err(StarknetApiError::from)?; @@ -390,7 +387,7 @@ impl StarknetApiServer for StarknetApi { .await } - async fn events(&self, filter: EventFilterWithPage) -> Result { + async fn events(&self, filter: EventFilterWithPage) -> RpcResult { self.on_io_blocking_task(move |this| { let from_block = filter.event_filter.from_block.unwrap_or(BlockIdOrTag::Number(0)); let to_block = @@ -421,7 +418,7 @@ impl StarknetApiServer for StarknetApi { &self, request: FunctionCall, block_id: BlockIdOrTag, - ) -> Result, Error> { + ) -> RpcResult> { self.on_io_blocking_task(move |this| { let request = EntryPointCall { calldata: request.calldata, @@ -441,7 +438,7 @@ impl StarknetApiServer for StarknetApi { contract_address: FieldElement, key: FieldElement, block_id: BlockIdOrTag, - ) -> Result { + ) -> RpcResult { self.on_io_blocking_task(move |this| { let value = this .inner @@ -457,7 +454,7 @@ impl StarknetApiServer for StarknetApi { async fn add_deploy_account_transaction( &self, deploy_account_transaction: BroadcastedDeployAccountTx, - ) -> Result { + ) -> RpcResult { self.on_io_blocking_task(move |this| { if deploy_account_transaction.is_query { return Err(StarknetApiError::UnsupportedTransactionVersion.into()); @@ -482,7 +479,7 @@ impl StarknetApiServer for StarknetApi { &self, request: Vec, block_id: BlockIdOrTag, - ) -> Result, Error> { + ) -> RpcResult> { self.on_cpu_blocking_task(move |this| { let chain_id = this.inner.sequencer.chain_id(); @@ -527,7 +524,7 @@ impl StarknetApiServer for StarknetApi { &self, message: MsgFromL1, block_id: BlockIdOrTag, - ) -> Result { + ) -> RpcResult { self.on_cpu_blocking_task(move |this| { let chain_id = this.inner.sequencer.chain_id(); @@ -551,7 +548,7 @@ impl StarknetApiServer for StarknetApi { async fn add_declare_transaction( &self, declare_transaction: BroadcastedDeclareTx, - ) -> Result { + ) -> RpcResult { self.on_io_blocking_task(move |this| { if declare_transaction.is_query() { return Err(StarknetApiError::UnsupportedTransactionVersion.into()); @@ -586,7 +583,7 @@ impl StarknetApiServer for StarknetApi { async fn add_invoke_transaction( &self, invoke_transaction: BroadcastedInvokeTx, - ) -> Result { + ) -> RpcResult { self.on_io_blocking_task(move |this| { if invoke_transaction.is_query { return Err(StarknetApiError::UnsupportedTransactionVersion.into()); @@ -605,10 +602,7 @@ impl StarknetApiServer for StarknetApi { .await } - async fn transaction_status( - &self, - transaction_hash: TxHash, - ) -> Result { + async fn transaction_status(&self, transaction_hash: TxHash) -> RpcResult { self.on_io_blocking_task(move |this| { let provider = this.inner.sequencer.backend.blockchain.provider(); diff --git a/crates/katana/rpc/tests/starknet.rs b/crates/katana/rpc/rpc/tests/starknet.rs similarity index 100% rename from crates/katana/rpc/tests/starknet.rs rename to crates/katana/rpc/rpc/tests/starknet.rs diff --git a/crates/katana/rpc/tests/test_data/cairo0_contract.json b/crates/katana/rpc/rpc/tests/test_data/cairo0_contract.json similarity index 100% rename from crates/katana/rpc/tests/test_data/cairo0_contract.json rename to crates/katana/rpc/rpc/tests/test_data/cairo0_contract.json diff --git a/crates/katana/rpc/tests/test_data/cairo1_contract.json b/crates/katana/rpc/rpc/tests/test_data/cairo1_contract.json similarity index 100% rename from crates/katana/rpc/tests/test_data/cairo1_contract.json rename to crates/katana/rpc/rpc/tests/test_data/cairo1_contract.json diff --git a/crates/katana/rpc/src/api/katana.rs b/crates/katana/rpc/src/api/katana.rs deleted file mode 100644 index 4414494e92..0000000000 --- a/crates/katana/rpc/src/api/katana.rs +++ /dev/null @@ -1,49 +0,0 @@ -use jsonrpsee::core::Error; -use jsonrpsee::proc_macros::rpc; -use jsonrpsee::types::error::CallError; -use jsonrpsee::types::ErrorObject; -use katana_core::accounts::Account; -use starknet::core::types::FieldElement; - -#[derive(thiserror::Error, Clone, Copy, Debug)] -#[allow(clippy::enum_variant_names)] -pub enum KatanaApiError { - #[error("Failed to change next block timestamp.")] - FailedToChangeNextBlockTimestamp = 1, - #[error("Failed to dump state.")] - FailedToDumpState = 2, - #[error("Failed to update storage.")] - FailedToUpdateStorage = 3, -} - -impl From for Error { - fn from(err: KatanaApiError) -> Self { - Error::Call(CallError::Custom(ErrorObject::owned(err as i32, err.to_string(), None::<()>))) - } -} - -#[rpc(server, namespace = "katana")] -pub trait KatanaApi { - #[method(name = "generateBlock")] - async fn generate_block(&self) -> Result<(), Error>; - - #[method(name = "nextBlockTimestamp")] - async fn next_block_timestamp(&self) -> Result; - - #[method(name = "setNextBlockTimestamp")] - async fn set_next_block_timestamp(&self, timestamp: u64) -> Result<(), Error>; - - #[method(name = "increaseNextBlockTimestamp")] - async fn increase_next_block_timestamp(&self, timestamp: u64) -> Result<(), Error>; - - #[method(name = "predeployedAccounts")] - async fn predeployed_accounts(&self) -> Result, Error>; - - #[method(name = "setStorageAt")] - async fn set_storage_at( - &self, - contract_address: FieldElement, - key: FieldElement, - value: FieldElement, - ) -> Result<(), Error>; -} diff --git a/crates/katana/src/args.rs b/crates/katana/src/args.rs index aa5e345866..f8dc48623c 100644 --- a/crates/katana/src/args.rs +++ b/crates/katana/src/args.rs @@ -21,8 +21,8 @@ use katana_core::constants::{ }; use katana_core::sequencer::SequencerConfig; use katana_primitives::chain::ChainId; -use katana_rpc::api::ApiKind; use katana_rpc::config::ServerConfig; +use katana_rpc_api::ApiKind; use metrics::utils::parse_socket_address; use tracing::Subscriber; use tracing_subscriber::{fmt, EnvFilter}; From 3620d7294398e48d6c5d7c30fdd4cdbc4e108d98 Mon Sep 17 00:00:00 2001 From: Ammar Arif Date: Tue, 23 Jan 2024 14:51:31 +0800 Subject: [PATCH 31/33] Move binary crates into a dedicated dir (#1471) refactor --- Cargo.lock | 43 +++++++++++++++ Cargo.toml | 42 +++++++++++++-- .../dojo-language-server/Cargo.toml | 5 +- .../dojo-language-server/src/main.rs | 0 {crates => bin}/katana/Cargo.toml | 10 ++-- {crates => bin}/katana/src/args.rs | 0 {crates => bin}/katana/src/main.rs | 0 {crates => bin}/sozo/Cargo.toml | 12 ++--- {crates => bin}/sozo/README.md | 0 {crates => bin}/sozo/src/args.rs | 0 {crates => bin}/sozo/src/commands/auth.rs | 0 {crates => bin}/sozo/src/commands/build.rs | 0 .../sozo/src/commands/completions.rs | 0 {crates => bin}/sozo/src/commands/dev.rs | 0 {crates => bin}/sozo/src/commands/events.rs | 0 {crates => bin}/sozo/src/commands/execute.rs | 0 {crates => bin}/sozo/src/commands/init.rs | 0 {crates => bin}/sozo/src/commands/migrate.rs | 0 {crates => bin}/sozo/src/commands/mod.rs | 0 {crates => bin}/sozo/src/commands/model.rs | 0 .../sozo/src/commands/options/account.rs | 0 .../sozo/src/commands/options/mod.rs | 0 .../sozo/src/commands/options/starknet.rs | 0 .../sozo/src/commands/options/transaction.rs | 0 .../sozo/src/commands/options/world.rs | 0 {crates => bin}/sozo/src/commands/register.rs | 0 {crates => bin}/sozo/src/commands/test.rs | 0 {crates => bin}/sozo/src/lib.rs | 0 {crates => bin}/sozo/src/main.rs | 0 {crates => bin}/sozo/src/ops/auth.rs | 0 {crates => bin}/sozo/src/ops/events.rs | 0 {crates => bin}/sozo/src/ops/execute.rs | 0 .../sozo/src/ops/migration/migration_test.rs | 0 {crates => bin}/sozo/src/ops/migration/mod.rs | 0 {crates => bin}/sozo/src/ops/migration/ui.rs | 0 {crates => bin}/sozo/src/ops/mod.rs | 0 {crates => bin}/sozo/src/ops/model.rs | 0 {crates => bin}/sozo/src/ops/register.rs | 0 .../sozo/tests/fixtures/stdout/init.stdout | 0 .../stdout/wrong_cairo_version.stdout | 0 {crates => bin}/sozo/tests/test_build.rs | 0 .../invalid_cairo_version/Scarb.toml | 0 .../test_data/invalid_cairo_version/lib.cairo | 0 .../sozo/tests/test_data/keystore/test.json | 0 {crates => bin}/sozo/tests/test_init.rs | 0 {crates => bin}/sozo/tests/utils/mod.rs | 0 {crates => bin}/sozo/tests/utils/snapbox.rs | 0 {crates => bin}/sozo/tests/utils/stdout.rs | 0 bin/torii/Cargo.toml | 53 +++++++++++++++++++ bin/torii/README.md | 7 +++ .../src/cli.rs => bin/torii/src/main.rs | 7 +-- crates/benches/Cargo.toml | 2 +- crates/katana/README.md | 9 ---- crates/torii/core/Cargo.toml | 2 +- crates/torii/graphql/Cargo.toml | 2 +- crates/torii/server/Cargo.toml | 4 -- crates/torii/server/src/lib.rs | 1 + 57 files changed, 160 insertions(+), 39 deletions(-) rename {crates => bin}/dojo-language-server/Cargo.toml (86%) rename crates/dojo-language-server/src/bin/language_server.rs => bin/dojo-language-server/src/main.rs (100%) rename {crates => bin}/katana/Cargo.toml (79%) rename {crates => bin}/katana/src/args.rs (100%) rename {crates => bin}/katana/src/main.rs (100%) rename {crates => bin}/sozo/Cargo.toml (77%) rename {crates => bin}/sozo/README.md (100%) rename {crates => bin}/sozo/src/args.rs (100%) rename {crates => bin}/sozo/src/commands/auth.rs (100%) rename {crates => bin}/sozo/src/commands/build.rs (100%) rename {crates => bin}/sozo/src/commands/completions.rs (100%) rename {crates => bin}/sozo/src/commands/dev.rs (100%) rename {crates => bin}/sozo/src/commands/events.rs (100%) rename {crates => bin}/sozo/src/commands/execute.rs (100%) rename {crates => bin}/sozo/src/commands/init.rs (100%) rename {crates => bin}/sozo/src/commands/migrate.rs (100%) rename {crates => bin}/sozo/src/commands/mod.rs (100%) rename {crates => bin}/sozo/src/commands/model.rs (100%) rename {crates => bin}/sozo/src/commands/options/account.rs (100%) rename {crates => bin}/sozo/src/commands/options/mod.rs (100%) rename {crates => bin}/sozo/src/commands/options/starknet.rs (100%) rename {crates => bin}/sozo/src/commands/options/transaction.rs (100%) rename {crates => bin}/sozo/src/commands/options/world.rs (100%) rename {crates => bin}/sozo/src/commands/register.rs (100%) rename {crates => bin}/sozo/src/commands/test.rs (100%) rename {crates => bin}/sozo/src/lib.rs (100%) rename {crates => bin}/sozo/src/main.rs (100%) rename {crates => bin}/sozo/src/ops/auth.rs (100%) rename {crates => bin}/sozo/src/ops/events.rs (100%) rename {crates => bin}/sozo/src/ops/execute.rs (100%) rename {crates => bin}/sozo/src/ops/migration/migration_test.rs (100%) rename {crates => bin}/sozo/src/ops/migration/mod.rs (100%) rename {crates => bin}/sozo/src/ops/migration/ui.rs (100%) rename {crates => bin}/sozo/src/ops/mod.rs (100%) rename {crates => bin}/sozo/src/ops/model.rs (100%) rename {crates => bin}/sozo/src/ops/register.rs (100%) rename {crates => bin}/sozo/tests/fixtures/stdout/init.stdout (100%) rename {crates => bin}/sozo/tests/fixtures/stdout/wrong_cairo_version.stdout (100%) rename {crates => bin}/sozo/tests/test_build.rs (100%) rename {crates => bin}/sozo/tests/test_data/invalid_cairo_version/Scarb.toml (100%) rename {crates => bin}/sozo/tests/test_data/invalid_cairo_version/lib.cairo (100%) rename {crates => bin}/sozo/tests/test_data/keystore/test.json (100%) rename {crates => bin}/sozo/tests/test_init.rs (100%) rename {crates => bin}/sozo/tests/utils/mod.rs (100%) rename {crates => bin}/sozo/tests/utils/snapbox.rs (100%) rename {crates => bin}/sozo/tests/utils/stdout.rs (100%) create mode 100644 bin/torii/Cargo.toml create mode 100644 bin/torii/README.md rename crates/torii/server/src/cli.rs => bin/torii/src/main.rs (98%) delete mode 100644 crates/katana/README.md create mode 100644 crates/torii/server/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index 745ad8d2ef..010d92135a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10027,6 +10027,49 @@ dependencies = [ "web-sys", ] +[[package]] +name = "torii" +version = "0.5.1-alpha.0" +dependencies = [ + "anyhow", + "async-trait", + "base64 0.21.5", + "camino", + "chrono", + "clap", + "ctrlc", + "dojo-types", + "dojo-world", + "either", + "futures", + "http", + "http-body", + "hyper", + "hyper-reverse-proxy", + "indexmap 1.9.3", + "lazy_static", + "metrics 0.5.1-alpha.0", + "metrics-process", + "scarb", + "serde", + "serde_json", + "sqlx", + "starknet", + "starknet-crypto 0.6.1", + "tokio", + "tokio-stream", + "tokio-util", + "torii-core", + "torii-graphql", + "torii-grpc", + "torii-server", + "tower", + "tower-http", + "tracing", + "tracing-subscriber", + "url", +] + [[package]] name = "torii-client" version = "0.5.1-alpha.0" diff --git a/Cargo.toml b/Cargo.toml index 76576ea096..96cb127805 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,16 +2,18 @@ resolver = "2" members = [ + "bin/dojo-language-server", + "bin/katana", + "bin/sozo", + "bin/torii", "crates/benches", "crates/dojo-bindgen", "crates/dojo-core", "crates/dojo-lang", - "crates/dojo-language-server", "crates/dojo-test-utils", "crates/dojo-types", "crates/dojo-world", "crates/dojo-world/abigen", - "crates/katana", "crates/katana/core", "crates/katana/executor", "crates/katana/primitives", @@ -25,7 +27,6 @@ members = [ "crates/katana/storage/db", "crates/katana/storage/provider", "crates/metrics", - "crates/sozo", "crates/sozo/signers", "crates/torii/client", "crates/torii/server", @@ -47,6 +48,41 @@ inherits = "release" lto = "fat" [workspace.dependencies] +# metrics +metrics = { path = "crates/metrics" } + +# dojo-lang +dojo-bindgen = { path = "crates/dojo-bindgen" } +dojo-core = { path = "crates/dojo-core" } +dojo-lang = { path = "crates/dojo-lang" } +dojo-test-utils = { path = "crates/dojo-test-utils" } +dojo-types = { path = "crates/dojo-types" } +dojo-world = { path = "crates/dojo-world" } + +# katana +katana-codecs = { path = "crates/katana/storage/codecs" } +katana-codecs-derive = { path = "crates/katana/storage/codecs/derive" } +katana-core = { path = "crates/katana/core" } +katana-db = { path = "crates/katana/storage/db" } +katana-executor = { path = "crates/katana/executor" } +katana-primitives = { path = "crates/katana/primitives" } +katana-provider = { path = "crates/katana/storage/provider" } +katana-rpc = { path = "crates/katana/rpc/rpc" } +katana-rpc-api = { path = "crates/katana/rpc/rpc-api" } +katana-rpc-types = { path = "crates/katana/rpc/rpc-types" } +katana-rpc-types-builder = { path = "crates/katana/rpc/rpc-types-builder" } +katana-runner = { path = "crates/katana/runner" } + +# torii +torii-client = { path = "crates/torii/client" } +torii-core = { path = "crates/torii/core" } +torii-graphql = { path = "crates/torii/graphql" } +torii-grpc = { path = "crates/torii/grpc" } +torii-server = { path = "crates/torii/server" } + +# sozo +sozo-signers = { path = "crates/sozo/signers" } + anyhow = "1.0.75" assert_matches = "1.5.0" async-trait = "0.1.68" diff --git a/crates/dojo-language-server/Cargo.toml b/bin/dojo-language-server/Cargo.toml similarity index 86% rename from crates/dojo-language-server/Cargo.toml rename to bin/dojo-language-server/Cargo.toml index 5d742aa035..dbbb76b48e 100644 --- a/crates/dojo-language-server/Cargo.toml +++ b/bin/dojo-language-server/Cargo.toml @@ -4,9 +4,6 @@ name = "dojo-language-server" version.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html -[[bin]] -name = "dojo-language-server" -path = "src/bin/language_server.rs" [dependencies] anyhow.workspace = true @@ -19,7 +16,7 @@ cairo-lang-starknet.workspace = true cairo-lang-test-plugin.workspace = true cairo-lang-utils.workspace = true clap.workspace = true -dojo-lang = { path = "../dojo-lang" } +dojo-lang.workspace = true log = "0.4.14" salsa = "0.16.1" smol_str.workspace = true diff --git a/crates/dojo-language-server/src/bin/language_server.rs b/bin/dojo-language-server/src/main.rs similarity index 100% rename from crates/dojo-language-server/src/bin/language_server.rs rename to bin/dojo-language-server/src/main.rs diff --git a/crates/katana/Cargo.toml b/bin/katana/Cargo.toml similarity index 79% rename from crates/katana/Cargo.toml rename to bin/katana/Cargo.toml index 992199c119..5d53699e1f 100644 --- a/crates/katana/Cargo.toml +++ b/bin/katana/Cargo.toml @@ -10,12 +10,12 @@ version.workspace = true clap.workspace = true clap_complete.workspace = true console.workspace = true -katana-core = { path = "core" } -katana-primitives = { path = "primitives" } -katana-rpc = { path = "rpc/rpc" } -katana-rpc-api = { path = "rpc/rpc-api" } -metrics = { path = "../metrics" } +katana-core.workspace = true +katana-primitives.workspace = true +katana-rpc-api.workspace = true +katana-rpc.workspace = true metrics-process.workspace = true +metrics.workspace = true serde_json.workspace = true starknet_api.workspace = true tokio.workspace = true diff --git a/crates/katana/src/args.rs b/bin/katana/src/args.rs similarity index 100% rename from crates/katana/src/args.rs rename to bin/katana/src/args.rs diff --git a/crates/katana/src/main.rs b/bin/katana/src/main.rs similarity index 100% rename from crates/katana/src/main.rs rename to bin/katana/src/main.rs diff --git a/crates/sozo/Cargo.toml b/bin/sozo/Cargo.toml similarity index 77% rename from crates/sozo/Cargo.toml rename to bin/sozo/Cargo.toml index e5fce987c0..752f36e41f 100644 --- a/crates/sozo/Cargo.toml +++ b/bin/sozo/Cargo.toml @@ -24,10 +24,10 @@ clap-verbosity-flag = "2.0.1" clap.workspace = true clap_complete.workspace = true console.workspace = true -dojo-bindgen = { path = "../dojo-bindgen" } -dojo-lang = { path = "../dojo-lang" } -dojo-types = { path = "../dojo-types" } -dojo-world = { path = "../dojo-world", features = [ "contracts", "metadata", "migration" ] } +dojo-bindgen.workspace = true +dojo-lang.workspace = true +dojo-types.workspace = true +dojo-world = { workspace = true, features = [ "contracts", "metadata", "migration" ] } notify = "6.0.1" notify-debouncer-mini = "0.3.0" scarb-ui.workspace = true @@ -46,6 +46,6 @@ url.workspace = true [dev-dependencies] assert_fs = "1.0.10" -dojo-test-utils = { path = "../dojo-test-utils", features = [ "build-examples" ] } -katana-runner = { path = "../katana/runner" } +dojo-test-utils = { workspace = true, features = [ "build-examples" ] } +katana-runner.workspace = true snapbox = "0.4.6" diff --git a/crates/sozo/README.md b/bin/sozo/README.md similarity index 100% rename from crates/sozo/README.md rename to bin/sozo/README.md diff --git a/crates/sozo/src/args.rs b/bin/sozo/src/args.rs similarity index 100% rename from crates/sozo/src/args.rs rename to bin/sozo/src/args.rs diff --git a/crates/sozo/src/commands/auth.rs b/bin/sozo/src/commands/auth.rs similarity index 100% rename from crates/sozo/src/commands/auth.rs rename to bin/sozo/src/commands/auth.rs diff --git a/crates/sozo/src/commands/build.rs b/bin/sozo/src/commands/build.rs similarity index 100% rename from crates/sozo/src/commands/build.rs rename to bin/sozo/src/commands/build.rs diff --git a/crates/sozo/src/commands/completions.rs b/bin/sozo/src/commands/completions.rs similarity index 100% rename from crates/sozo/src/commands/completions.rs rename to bin/sozo/src/commands/completions.rs diff --git a/crates/sozo/src/commands/dev.rs b/bin/sozo/src/commands/dev.rs similarity index 100% rename from crates/sozo/src/commands/dev.rs rename to bin/sozo/src/commands/dev.rs diff --git a/crates/sozo/src/commands/events.rs b/bin/sozo/src/commands/events.rs similarity index 100% rename from crates/sozo/src/commands/events.rs rename to bin/sozo/src/commands/events.rs diff --git a/crates/sozo/src/commands/execute.rs b/bin/sozo/src/commands/execute.rs similarity index 100% rename from crates/sozo/src/commands/execute.rs rename to bin/sozo/src/commands/execute.rs diff --git a/crates/sozo/src/commands/init.rs b/bin/sozo/src/commands/init.rs similarity index 100% rename from crates/sozo/src/commands/init.rs rename to bin/sozo/src/commands/init.rs diff --git a/crates/sozo/src/commands/migrate.rs b/bin/sozo/src/commands/migrate.rs similarity index 100% rename from crates/sozo/src/commands/migrate.rs rename to bin/sozo/src/commands/migrate.rs diff --git a/crates/sozo/src/commands/mod.rs b/bin/sozo/src/commands/mod.rs similarity index 100% rename from crates/sozo/src/commands/mod.rs rename to bin/sozo/src/commands/mod.rs diff --git a/crates/sozo/src/commands/model.rs b/bin/sozo/src/commands/model.rs similarity index 100% rename from crates/sozo/src/commands/model.rs rename to bin/sozo/src/commands/model.rs diff --git a/crates/sozo/src/commands/options/account.rs b/bin/sozo/src/commands/options/account.rs similarity index 100% rename from crates/sozo/src/commands/options/account.rs rename to bin/sozo/src/commands/options/account.rs diff --git a/crates/sozo/src/commands/options/mod.rs b/bin/sozo/src/commands/options/mod.rs similarity index 100% rename from crates/sozo/src/commands/options/mod.rs rename to bin/sozo/src/commands/options/mod.rs diff --git a/crates/sozo/src/commands/options/starknet.rs b/bin/sozo/src/commands/options/starknet.rs similarity index 100% rename from crates/sozo/src/commands/options/starknet.rs rename to bin/sozo/src/commands/options/starknet.rs diff --git a/crates/sozo/src/commands/options/transaction.rs b/bin/sozo/src/commands/options/transaction.rs similarity index 100% rename from crates/sozo/src/commands/options/transaction.rs rename to bin/sozo/src/commands/options/transaction.rs diff --git a/crates/sozo/src/commands/options/world.rs b/bin/sozo/src/commands/options/world.rs similarity index 100% rename from crates/sozo/src/commands/options/world.rs rename to bin/sozo/src/commands/options/world.rs diff --git a/crates/sozo/src/commands/register.rs b/bin/sozo/src/commands/register.rs similarity index 100% rename from crates/sozo/src/commands/register.rs rename to bin/sozo/src/commands/register.rs diff --git a/crates/sozo/src/commands/test.rs b/bin/sozo/src/commands/test.rs similarity index 100% rename from crates/sozo/src/commands/test.rs rename to bin/sozo/src/commands/test.rs diff --git a/crates/sozo/src/lib.rs b/bin/sozo/src/lib.rs similarity index 100% rename from crates/sozo/src/lib.rs rename to bin/sozo/src/lib.rs diff --git a/crates/sozo/src/main.rs b/bin/sozo/src/main.rs similarity index 100% rename from crates/sozo/src/main.rs rename to bin/sozo/src/main.rs diff --git a/crates/sozo/src/ops/auth.rs b/bin/sozo/src/ops/auth.rs similarity index 100% rename from crates/sozo/src/ops/auth.rs rename to bin/sozo/src/ops/auth.rs diff --git a/crates/sozo/src/ops/events.rs b/bin/sozo/src/ops/events.rs similarity index 100% rename from crates/sozo/src/ops/events.rs rename to bin/sozo/src/ops/events.rs diff --git a/crates/sozo/src/ops/execute.rs b/bin/sozo/src/ops/execute.rs similarity index 100% rename from crates/sozo/src/ops/execute.rs rename to bin/sozo/src/ops/execute.rs diff --git a/crates/sozo/src/ops/migration/migration_test.rs b/bin/sozo/src/ops/migration/migration_test.rs similarity index 100% rename from crates/sozo/src/ops/migration/migration_test.rs rename to bin/sozo/src/ops/migration/migration_test.rs diff --git a/crates/sozo/src/ops/migration/mod.rs b/bin/sozo/src/ops/migration/mod.rs similarity index 100% rename from crates/sozo/src/ops/migration/mod.rs rename to bin/sozo/src/ops/migration/mod.rs diff --git a/crates/sozo/src/ops/migration/ui.rs b/bin/sozo/src/ops/migration/ui.rs similarity index 100% rename from crates/sozo/src/ops/migration/ui.rs rename to bin/sozo/src/ops/migration/ui.rs diff --git a/crates/sozo/src/ops/mod.rs b/bin/sozo/src/ops/mod.rs similarity index 100% rename from crates/sozo/src/ops/mod.rs rename to bin/sozo/src/ops/mod.rs diff --git a/crates/sozo/src/ops/model.rs b/bin/sozo/src/ops/model.rs similarity index 100% rename from crates/sozo/src/ops/model.rs rename to bin/sozo/src/ops/model.rs diff --git a/crates/sozo/src/ops/register.rs b/bin/sozo/src/ops/register.rs similarity index 100% rename from crates/sozo/src/ops/register.rs rename to bin/sozo/src/ops/register.rs diff --git a/crates/sozo/tests/fixtures/stdout/init.stdout b/bin/sozo/tests/fixtures/stdout/init.stdout similarity index 100% rename from crates/sozo/tests/fixtures/stdout/init.stdout rename to bin/sozo/tests/fixtures/stdout/init.stdout diff --git a/crates/sozo/tests/fixtures/stdout/wrong_cairo_version.stdout b/bin/sozo/tests/fixtures/stdout/wrong_cairo_version.stdout similarity index 100% rename from crates/sozo/tests/fixtures/stdout/wrong_cairo_version.stdout rename to bin/sozo/tests/fixtures/stdout/wrong_cairo_version.stdout diff --git a/crates/sozo/tests/test_build.rs b/bin/sozo/tests/test_build.rs similarity index 100% rename from crates/sozo/tests/test_build.rs rename to bin/sozo/tests/test_build.rs diff --git a/crates/sozo/tests/test_data/invalid_cairo_version/Scarb.toml b/bin/sozo/tests/test_data/invalid_cairo_version/Scarb.toml similarity index 100% rename from crates/sozo/tests/test_data/invalid_cairo_version/Scarb.toml rename to bin/sozo/tests/test_data/invalid_cairo_version/Scarb.toml diff --git a/crates/sozo/tests/test_data/invalid_cairo_version/lib.cairo b/bin/sozo/tests/test_data/invalid_cairo_version/lib.cairo similarity index 100% rename from crates/sozo/tests/test_data/invalid_cairo_version/lib.cairo rename to bin/sozo/tests/test_data/invalid_cairo_version/lib.cairo diff --git a/crates/sozo/tests/test_data/keystore/test.json b/bin/sozo/tests/test_data/keystore/test.json similarity index 100% rename from crates/sozo/tests/test_data/keystore/test.json rename to bin/sozo/tests/test_data/keystore/test.json diff --git a/crates/sozo/tests/test_init.rs b/bin/sozo/tests/test_init.rs similarity index 100% rename from crates/sozo/tests/test_init.rs rename to bin/sozo/tests/test_init.rs diff --git a/crates/sozo/tests/utils/mod.rs b/bin/sozo/tests/utils/mod.rs similarity index 100% rename from crates/sozo/tests/utils/mod.rs rename to bin/sozo/tests/utils/mod.rs diff --git a/crates/sozo/tests/utils/snapbox.rs b/bin/sozo/tests/utils/snapbox.rs similarity index 100% rename from crates/sozo/tests/utils/snapbox.rs rename to bin/sozo/tests/utils/snapbox.rs diff --git a/crates/sozo/tests/utils/stdout.rs b/bin/sozo/tests/utils/stdout.rs similarity index 100% rename from crates/sozo/tests/utils/stdout.rs rename to bin/sozo/tests/utils/stdout.rs diff --git a/bin/torii/Cargo.toml b/bin/torii/Cargo.toml new file mode 100644 index 0000000000..e003c4a635 --- /dev/null +++ b/bin/torii/Cargo.toml @@ -0,0 +1,53 @@ +[package] +edition.workspace = true +name = "torii" +version.workspace = true + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +anyhow.workspace = true +async-trait.workspace = true +base64 = "0.21.2" +camino.workspace = true +chrono.workspace = true +clap.workspace = true +ctrlc = { version = "3.4", features = [ "termination" ] } +dojo-types.workspace = true +dojo-world.workspace = true +either = "1.9.0" +futures.workspace = true +http = "0.2.9" +http-body = "0.4.5" +hyper-reverse-proxy = { git = "https://github.com/tarrencev/hyper-reverse-proxy" } +hyper.workspace = true +indexmap = "1.9.3" +lazy_static.workspace = true +metrics-process.workspace = true +metrics.workspace = true +scarb.workspace = true +serde.workspace = true +serde_json.workspace = true +sqlx.workspace = true +starknet-crypto.workspace = true +starknet.workspace = true +tokio-stream = "0.1.11" +tokio-util = "0.7.7" +tokio.workspace = true +torii-core.workspace = true +torii-graphql.workspace = true +torii-grpc = { workspace = true, features = [ "server" ] } +torii-server.workspace = true +tower = "0.4.13" +tower-http = "0.4.4" +tracing-subscriber.workspace = true +tracing.workspace = true +url.workspace = true + +[dev-dependencies] +camino.workspace = true + +[features] +default = [ "jemalloc", "sqlite" ] +jemalloc = [ "metrics/jemalloc" ] +sqlite = [ "sqlx/sqlite" ] diff --git a/bin/torii/README.md b/bin/torii/README.md new file mode 100644 index 0000000000..6a3ebc8bbe --- /dev/null +++ b/bin/torii/README.md @@ -0,0 +1,7 @@ +# `dojoup` + +```sh +curl -L https://install.dojoengine.org | bash +``` + +[Documentation](https://book.dojoengine.org/toolchain/torii/overview.html) \ No newline at end of file diff --git a/crates/torii/server/src/cli.rs b/bin/torii/src/main.rs similarity index 98% rename from crates/torii/server/src/cli.rs rename to bin/torii/src/main.rs index 04e99ac062..0bcee8f72d 100644 --- a/crates/torii/server/src/cli.rs +++ b/bin/torii/src/main.rs @@ -10,8 +10,6 @@ //! documentation for usage details. This is **not recommended on Windows**. See [here](https://rust-lang.github.io/rfcs/1974-global-allocators.html#jemalloc) //! for more info. -mod proxy; - use std::net::SocketAddr; use std::str::FromStr; use std::sync::Arc; @@ -36,12 +34,11 @@ use torii_core::processors::store_transaction::StoreTransactionProcessor; use torii_core::simple_broker::SimpleBroker; use torii_core::sql::Sql; use torii_core::types::Model; +use torii_server::proxy::Proxy; use tracing::info; use tracing_subscriber::{fmt, EnvFilter}; use url::Url; -use crate::proxy::Proxy; - /// Dojo World Indexer #[derive(Parser, Debug)] #[command(name = "torii", author, version, about, long_about = None)] @@ -115,7 +112,7 @@ async fn main() -> anyhow::Result<()> { .connect_with(options) .await?; - sqlx::migrate!("../migrations").run(&pool).await?; + sqlx::migrate!("../../crates/torii/migrations").run(&pool).await?; let provider: Arc<_> = JsonRpcClient::new(HttpTransport::new(format!("http://{}", args.rpc).parse::()?)) diff --git a/crates/benches/Cargo.toml b/crates/benches/Cargo.toml index 4a8720e2bd..a318701d9f 100644 --- a/crates/benches/Cargo.toml +++ b/crates/benches/Cargo.toml @@ -14,6 +14,6 @@ anyhow.workspace = true futures.workspace = true hex.workspace = true lazy_static.workspace = true -sozo = { path = "../sozo" } +sozo = { path = "../../bin/sozo" } starknet.workspace = true tokio.workspace = true diff --git a/crates/katana/README.md b/crates/katana/README.md deleted file mode 100644 index 336dd917e5..0000000000 --- a/crates/katana/README.md +++ /dev/null @@ -1,9 +0,0 @@ -![katana](../../.github/katana-mark.svg) - -`katana` is a _blazingly fast_ local Starknet node, designed to support local development with Dojo. - -[Documentation](https://book.dojoengine.org/toolchain/katana/overview.html) - -## Features - -- [Starknet JSON-RPC v0.3.0](https://github.com/starkware-libs/starknet-specs/tree/v0.3.0) support diff --git a/crates/torii/core/Cargo.toml b/crates/torii/core/Cargo.toml index 9ad2757856..8baed226c5 100644 --- a/crates/torii/core/Cargo.toml +++ b/crates/torii/core/Cargo.toml @@ -40,4 +40,4 @@ tracing.workspace = true camino.workspace = true dojo-test-utils = { path = "../../dojo-test-utils" } scarb.workspace = true -sozo = { path = "../../sozo" } +sozo = { path = "../../../bin/sozo" } diff --git a/crates/torii/graphql/Cargo.toml b/crates/torii/graphql/Cargo.toml index 2b7854c59a..fed37b5f4a 100644 --- a/crates/torii/graphql/Cargo.toml +++ b/crates/torii/graphql/Cargo.toml @@ -41,6 +41,6 @@ dojo-test-utils = { path = "../../dojo-test-utils", features = [ "build-examples dojo-world = { path = "../../dojo-world" } scarb.workspace = true serial_test = "2.0.0" -sozo = { path = "../../sozo" } +sozo = { path = "../../../bin/sozo" } starknet-crypto.workspace = true starknet.workspace = true diff --git a/crates/torii/server/Cargo.toml b/crates/torii/server/Cargo.toml index 2f88105b32..314bfc6bfb 100644 --- a/crates/torii/server/Cargo.toml +++ b/crates/torii/server/Cargo.toml @@ -50,7 +50,3 @@ camino.workspace = true default = [ "jemalloc", "sqlite" ] jemalloc = [ "metrics/jemalloc" ] sqlite = [ "sqlx/sqlite" ] - -[[bin]] -name = "torii" -path = "src/cli.rs" diff --git a/crates/torii/server/src/lib.rs b/crates/torii/server/src/lib.rs new file mode 100644 index 0000000000..44dcc92d61 --- /dev/null +++ b/crates/torii/server/src/lib.rs @@ -0,0 +1 @@ +pub mod proxy; From 25fbb7fc973cff4ce1273625c4664545d9b088e9 Mon Sep 17 00:00:00 2001 From: lambda-0x <0xlambda@protonmail.com> Date: Tue, 23 Jan 2024 23:12:17 +0530 Subject: [PATCH 32/33] dev(sozo): improve test coverage and other few changes (#1462) * add tests for options/account.rs and some other minor improvements * add tests for commands/build.rs * add tests for commands/events.rs * fix formatting * fix clippy * update comments * fix build after rebase * enable test for BuildArgs with typescript and unity bindings * WorldOption already fall backs to reading from Scarb.toml * rename plan to dry_run and revert name related changes * add DOJO_KEYSTORE_PATH env variable * fix formatting --- bin/sozo/src/commands/auth.rs | 1 - bin/sozo/src/commands/build.rs | 16 + bin/sozo/src/commands/events.rs | 10 +- bin/sozo/src/commands/execute.rs | 1 - bin/sozo/src/commands/migrate.rs | 4 +- bin/sozo/src/commands/model.rs | 1 - bin/sozo/src/commands/options/account.rs | 37 +- bin/sozo/src/commands/options/mod.rs | 2 + bin/sozo/src/commands/options/world.rs | 76 +- bin/sozo/src/commands/register.rs | 1 - bin/sozo/tests/test_data/manifest.json | 1572 ++++++++++++++++++++++ 11 files changed, 1694 insertions(+), 27 deletions(-) create mode 100644 bin/sozo/tests/test_data/manifest.json diff --git a/bin/sozo/src/commands/auth.rs b/bin/sozo/src/commands/auth.rs index b6584a840b..76b45137c7 100644 --- a/bin/sozo/src/commands/auth.rs +++ b/bin/sozo/src/commands/auth.rs @@ -45,7 +45,6 @@ impl AuthArgs { let env_metadata = if config.manifest_path().exists() { let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; - // TODO: Check the updated scarb way to read profile specific values dojo_metadata_from_workspace(&ws).and_then(|inner| inner.env().cloned()) } else { None diff --git a/bin/sozo/src/commands/build.rs b/bin/sozo/src/commands/build.rs index 31107a2b2b..dcd3b147ae 100644 --- a/bin/sozo/src/commands/build.rs +++ b/bin/sozo/src/commands/build.rs @@ -47,3 +47,19 @@ impl BuildArgs { Ok(()) } } + +#[cfg(test)] +mod tests { + use dojo_test_utils::compiler::build_test_config; + + use super::BuildArgs; + + #[test] + fn build_example_with_typescript_and_unity_bindings() { + let config = build_test_config("../../examples/spawn-and-move/Scarb.toml").unwrap(); + + let build_args = BuildArgs { typescript: true, unity: true }; + let result = build_args.run(&config); + assert!(result.is_ok()); + } +} diff --git a/bin/sozo/src/commands/events.rs b/bin/sozo/src/commands/events.rs index 7916a66d6a..ca9edd556d 100644 --- a/bin/sozo/src/commands/events.rs +++ b/bin/sozo/src/commands/events.rs @@ -64,7 +64,6 @@ impl EventsArgs { let env_metadata = if config.manifest_path().exists() { let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; - // TODO: Check the updated scarb way to read profile specific values dojo_metadata_from_workspace(&ws).and_then(|inner| inner.env().cloned()) } else { None @@ -132,4 +131,13 @@ mod test { assert!(arg.to_block.is_none()); assert!(arg.chunk_size == 1); } + + #[test] + fn extract_events_work_as_expected() { + let manifest = Manifest::load_from_path("./tests/test_data/manifest.json").unwrap(); + let result = extract_events(&manifest); + + // we are just collection all events from manifest file so just verifying count should work + assert!(result.len() == 13); + } } diff --git a/bin/sozo/src/commands/execute.rs b/bin/sozo/src/commands/execute.rs index 6eeeada678..0a494b1333 100644 --- a/bin/sozo/src/commands/execute.rs +++ b/bin/sozo/src/commands/execute.rs @@ -40,7 +40,6 @@ impl ExecuteArgs { let env_metadata = if config.manifest_path().exists() { let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; - // TODO: Check the updated scarb way to read profile specific values dojo_metadata_from_workspace(&ws).and_then(|inner| inner.env().cloned()) } else { None diff --git a/bin/sozo/src/commands/migrate.rs b/bin/sozo/src/commands/migrate.rs index 7664d5bb8d..b34f000da0 100644 --- a/bin/sozo/src/commands/migrate.rs +++ b/bin/sozo/src/commands/migrate.rs @@ -14,7 +14,7 @@ use crate::ops::migration; pub struct MigrateArgs { #[arg(short, long)] #[arg(help = "Perform a dry run and outputs the plan to be executed.")] - pub plan: bool, + pub dry_run: bool, #[arg(long)] #[arg(help = "Name of the World.")] @@ -56,8 +56,6 @@ impl MigrateArgs { )?; } - // TODO: Check the updated scarb way to read profile specific values - ws.config().tokio_handle().block_on(migration::execute(&ws, self, target_dir))?; Ok(()) diff --git a/bin/sozo/src/commands/model.rs b/bin/sozo/src/commands/model.rs index c6aa3ad3fa..2056ae9157 100644 --- a/bin/sozo/src/commands/model.rs +++ b/bin/sozo/src/commands/model.rs @@ -67,7 +67,6 @@ impl ModelArgs { let env_metadata = if config.manifest_path().exists() { let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; - // TODO: Check the updated scarb way to read profile specific values dojo_metadata_from_workspace(&ws).and_then(|inner| inner.env().cloned()) } else { None diff --git a/bin/sozo/src/commands/options/account.rs b/bin/sozo/src/commands/options/account.rs index 52b41c4a93..07e1877842 100644 --- a/bin/sozo/src/commands/options/account.rs +++ b/bin/sozo/src/commands/options/account.rs @@ -9,7 +9,8 @@ use starknet::providers::Provider; use starknet::signers::{LocalWallet, SigningKey}; use super::{ - DOJO_ACCOUNT_ADDRESS_ENV_VAR, DOJO_KEYSTORE_PASSWORD_ENV_VAR, DOJO_PRIVATE_KEY_ENV_VAR, + DOJO_ACCOUNT_ADDRESS_ENV_VAR, DOJO_KEYSTORE_PASSWORD_ENV_VAR, DOJO_KEYSTORE_PATH_ENV_VAR, + DOJO_PRIVATE_KEY_ENV_VAR, }; #[derive(Debug, Args)] @@ -29,7 +30,7 @@ pub struct AccountOptions { #[arg(help = "The raw private key associated with the account contract.")] pub private_key: Option, - #[arg(long = "keystore")] + #[arg(long = "keystore", env = DOJO_KEYSTORE_PATH_ENV_VAR)] #[arg(value_name = "PATH")] #[arg(help_heading = "Signer options - KEYSTORE")] #[arg(help = "Use the keystore in the given folder or file.")] @@ -160,11 +161,9 @@ mod tests { #[test] fn account_address_from_args() { - let env_metadata = dojo_world::metadata::Environment::default(); - let cmd = Command::parse_from(["sozo", "--account-address", "0x0"]); assert_eq!( - cmd.account.account_address(Some(&env_metadata)).unwrap(), + cmd.account.account_address(None).unwrap(), FieldElement::from_hex_be("0x0").unwrap() ); } @@ -199,20 +198,17 @@ mod tests { #[test] fn account_address_from_neither() { - let env_metadata = dojo_world::metadata::Environment::default(); - let cmd = Command::parse_from([""]); - assert!(cmd.account.account_address(Some(&env_metadata)).is_err()); + assert!(cmd.account.account_address(None).is_err()); } #[tokio::test] async fn private_key_from_args() { - let env_metadata = dojo_world::metadata::Environment::default(); let private_key = "0x1"; let cmd = Command::parse_from(["sozo", "--account-address", "0x0", "--private-key", private_key]); - let result_wallet = cmd.account.signer(Some(&env_metadata)).unwrap(); + let result_wallet = cmd.account.signer(None).unwrap(); let expected_wallet = LocalWallet::from_signing_key(SigningKey::from_secret_scalar( FieldElement::from_str(private_key).unwrap(), )); @@ -246,7 +242,6 @@ mod tests { let keystore_path = "./tests/test_data/keystore/test.json"; let keystore_password = "dojoftw"; let private_key = "0x1"; - let env_metadata = dojo_world::metadata::Environment::default(); let cmd = Command::parse_from([ "sozo", @@ -255,7 +250,7 @@ mod tests { "--password", keystore_password, ]); - let result_wallet = cmd.account.signer(Some(&env_metadata)).unwrap(); + let result_wallet = cmd.account.signer(None).unwrap(); let expected_wallet = LocalWallet::from_signing_key(SigningKey::from_secret_scalar( FieldElement::from_str(private_key).unwrap(), )); @@ -373,4 +368,22 @@ mod tests { // 0x2 is the Calldata len. assert!(*result.get(3).unwrap() == FieldElement::from_hex_be("0x2").unwrap()); } + + #[test] + fn keystore_path_without_keystore_password() { + let keystore_path = "./tests/test_data/keystore/test.json"; + + let cmd = Command::parse_from(["sozo", "--keystore", keystore_path]); + let result = cmd.account.signer(None); + + assert!(result.is_err()); + } + + #[test] + fn signer_without_pk_or_keystore() { + let cmd = Command::parse_from(["sozo"]); + let result = cmd.account.signer(None); + + assert!(result.is_err()); + } } diff --git a/bin/sozo/src/commands/options/mod.rs b/bin/sozo/src/commands/options/mod.rs index 40ec922b9f..0bd599bcc1 100644 --- a/bin/sozo/src/commands/options/mod.rs +++ b/bin/sozo/src/commands/options/mod.rs @@ -5,5 +5,7 @@ pub mod world; const STARKNET_RPC_URL_ENV_VAR: &str = "STARKNET_RPC_URL"; const DOJO_PRIVATE_KEY_ENV_VAR: &str = "DOJO_PRIVATE_KEY"; +const DOJO_KEYSTORE_PATH_ENV_VAR: &str = "DOJO_KEYSTORE_PATH"; const DOJO_KEYSTORE_PASSWORD_ENV_VAR: &str = "DOJO_KEYSTORE_PASSWORD"; const DOJO_ACCOUNT_ADDRESS_ENV_VAR: &str = "DOJO_ACCOUNT_ADDRESS"; +const DOJO_WORLD_ADDRESS_ENV_VAR: &str = "DOJO_WORLD_ADDRESS"; diff --git a/bin/sozo/src/commands/options/world.rs b/bin/sozo/src/commands/options/world.rs index cc511ce6f3..7a54617e7a 100644 --- a/bin/sozo/src/commands/options/world.rs +++ b/bin/sozo/src/commands/options/world.rs @@ -5,11 +5,13 @@ use clap::Args; use dojo_world::metadata::Environment; use starknet::core::types::FieldElement; +use super::DOJO_WORLD_ADDRESS_ENV_VAR; + #[derive(Debug, Args)] #[command(next_help_heading = "World options")] pub struct WorldOptions { - #[arg(long = "world")] #[arg(help = "The address of the World contract.")] + #[arg(long = "world", env = DOJO_WORLD_ADDRESS_ENV_VAR)] pub world_address: Option, } @@ -17,16 +19,76 @@ impl WorldOptions { pub fn address(&self, env_metadata: Option<&Environment>) -> Result { if let Some(world_address) = self.world_address { Ok(world_address) - } else if let Some(world_address) = env_metadata - .and_then(|env| env.world_address()) - .or(std::env::var("DOJO_WORLD_ADDRESS").ok().as_deref()) - { + } else if let Some(world_address) = env_metadata.and_then(|env| env.world_address()) { Ok(FieldElement::from_str(world_address)?) } else { Err(anyhow!( - "Could not find World address. Please specify it with --world or in the world \ - config." + "Could not find World address. Please specify it with --world, environment \ + variable or in the world config." )) } } } + +#[cfg(test)] +mod tests { + + use clap::Parser; + use starknet_crypto::FieldElement; + + use super::{WorldOptions, DOJO_WORLD_ADDRESS_ENV_VAR}; + + #[derive(clap::Parser, Debug)] + struct Command { + #[clap(flatten)] + pub inner: WorldOptions, + } + #[test] + + fn world_address_read_from_env_variable() { + std::env::set_var(DOJO_WORLD_ADDRESS_ENV_VAR, "0x0"); + + let cmd = Command::parse_from([""]); + assert_eq!(cmd.inner.world_address, Some(FieldElement::from_hex_be("0x0").unwrap())); + } + + #[test] + fn world_address_from_args() { + let cmd = Command::parse_from(["sozo", "--world", "0x0"]); + assert_eq!(cmd.inner.address(None).unwrap(), FieldElement::from_hex_be("0x0").unwrap()); + } + + #[test] + fn world_address_from_env_metadata() { + let env_metadata = dojo_world::metadata::Environment { + world_address: Some("0x0".to_owned()), + ..Default::default() + }; + + let cmd = Command::parse_from([""]); + assert_eq!( + cmd.inner.address(Some(&env_metadata)).unwrap(), + FieldElement::from_hex_be("0x0").unwrap() + ); + } + + #[test] + fn world_address_from_both() { + let env_metadata = dojo_world::metadata::Environment { + world_address: Some("0x0".to_owned()), + ..Default::default() + }; + + let cmd = Command::parse_from(["sozo", "--world", "0x1"]); + assert_eq!( + cmd.inner.address(Some(&env_metadata)).unwrap(), + FieldElement::from_hex_be("0x1").unwrap() + ); + } + + #[test] + fn world_address_from_neither() { + let cmd = Command::parse_from([""]); + assert!(cmd.inner.address(None).is_err()); + } +} diff --git a/bin/sozo/src/commands/register.rs b/bin/sozo/src/commands/register.rs index 6ff53cdfd6..a1df25a833 100644 --- a/bin/sozo/src/commands/register.rs +++ b/bin/sozo/src/commands/register.rs @@ -45,7 +45,6 @@ impl RegisterArgs { let env_metadata = if config.manifest_path().exists() { let ws = scarb::ops::read_workspace(config.manifest_path(), config)?; - // TODO: Check the updated scarb way to read profile specific values dojo_metadata_from_workspace(&ws).and_then(|inner| inner.env().cloned()) } else { None diff --git a/bin/sozo/tests/test_data/manifest.json b/bin/sozo/tests/test_data/manifest.json new file mode 100644 index 0000000000..d6ec585683 --- /dev/null +++ b/bin/sozo/tests/test_data/manifest.json @@ -0,0 +1,1572 @@ +{ + "world": { + "name": "dojo::world::world", + "address": null, + "class_hash": "0x5ac623f0c96059936bd2d0904bdd31799e430fe08a0caff7a5f497260b16497", + "abi": [ + { + "type": "impl", + "name": "World", + "interface_name": "dojo::world::IWorld" + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "enum", + "name": "core::option::Option::", + "variants": [ + { + "name": "Some", + "type": "core::felt252" + }, + { + "name": "None", + "type": "()" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::>" + } + ] + }, + { + "type": "enum", + "name": "core::bool", + "variants": [ + { + "name": "False", + "type": "()" + }, + { + "name": "True", + "type": "()" + } + ] + }, + { + "type": "interface", + "name": "dojo::world::IWorld", + "items": [ + { + "type": "function", + "name": "metadata_uri", + "inputs": [ + { + "name": "resource", + "type": "core::felt252" + } + ], + "outputs": [ + { + "type": "core::array::Span::" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "set_metadata_uri", + "inputs": [ + { + "name": "resource", + "type": "core::felt252" + }, + { + "name": "uri", + "type": "core::array::Span::" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "model", + "inputs": [ + { + "name": "name", + "type": "core::felt252" + } + ], + "outputs": [ + { + "type": "core::starknet::class_hash::ClassHash" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "register_model", + "inputs": [ + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "deploy_contract", + "inputs": [ + { + "name": "salt", + "type": "core::felt252" + }, + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash" + } + ], + "outputs": [ + { + "type": "core::starknet::contract_address::ContractAddress" + } + ], + "state_mutability": "external" + }, + { + "type": "function", + "name": "upgrade_contract", + "inputs": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash" + } + ], + "outputs": [ + { + "type": "core::starknet::class_hash::ClassHash" + } + ], + "state_mutability": "external" + }, + { + "type": "function", + "name": "uuid", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "external" + }, + { + "type": "function", + "name": "emit", + "inputs": [ + { + "name": "keys", + "type": "core::array::Array::" + }, + { + "name": "values", + "type": "core::array::Span::" + } + ], + "outputs": [], + "state_mutability": "view" + }, + { + "type": "function", + "name": "entity", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "keys", + "type": "core::array::Span::" + }, + { + "name": "offset", + "type": "core::integer::u8" + }, + { + "name": "length", + "type": "core::integer::u32" + }, + { + "name": "layout", + "type": "core::array::Span::" + } + ], + "outputs": [ + { + "type": "core::array::Span::" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "set_entity", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "keys", + "type": "core::array::Span::" + }, + { + "name": "offset", + "type": "core::integer::u8" + }, + { + "name": "values", + "type": "core::array::Span::" + }, + { + "name": "layout", + "type": "core::array::Span::" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "entities", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "index", + "type": "core::option::Option::" + }, + { + "name": "values", + "type": "core::array::Span::" + }, + { + "name": "values_length", + "type": "core::integer::u32" + }, + { + "name": "values_layout", + "type": "core::array::Span::" + } + ], + "outputs": [ + { + "type": "(core::array::Span::, core::array::Span::>)" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "entity_ids", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + } + ], + "outputs": [ + { + "type": "core::array::Span::" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "set_executor", + "inputs": [ + { + "name": "contract_address", + "type": "core::starknet::contract_address::ContractAddress" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "executor", + "inputs": [], + "outputs": [ + { + "type": "core::starknet::contract_address::ContractAddress" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "base", + "inputs": [], + "outputs": [ + { + "type": "core::starknet::class_hash::ClassHash" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "delete_entity", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "keys", + "type": "core::array::Span::" + }, + { + "name": "layout", + "type": "core::array::Span::" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "is_owner", + "inputs": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "resource", + "type": "core::felt252" + } + ], + "outputs": [ + { + "type": "core::bool" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "grant_owner", + "inputs": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "resource", + "type": "core::felt252" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "revoke_owner", + "inputs": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "resource", + "type": "core::felt252" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "is_writer", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "system", + "type": "core::starknet::contract_address::ContractAddress" + } + ], + "outputs": [ + { + "type": "core::bool" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "grant_writer", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "system", + "type": "core::starknet::contract_address::ContractAddress" + } + ], + "outputs": [], + "state_mutability": "external" + }, + { + "type": "function", + "name": "revoke_writer", + "inputs": [ + { + "name": "model", + "type": "core::felt252" + }, + { + "name": "system", + "type": "core::starknet::contract_address::ContractAddress" + } + ], + "outputs": [], + "state_mutability": "external" + } + ] + }, + { + "type": "impl", + "name": "UpgradeableWorld", + "interface_name": "dojo::world::IUpgradeableWorld" + }, + { + "type": "interface", + "name": "dojo::world::IUpgradeableWorld", + "items": [ + { + "type": "function", + "name": "upgrade", + "inputs": [ + { + "name": "new_class_hash", + "type": "core::starknet::class_hash::ClassHash" + } + ], + "outputs": [], + "state_mutability": "external" + } + ] + }, + { + "type": "constructor", + "name": "constructor", + "inputs": [ + { + "name": "executor", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "contract_base", + "type": "core::starknet::class_hash::ClassHash" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::WorldSpawned", + "kind": "struct", + "members": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + }, + { + "name": "creator", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::ContractDeployed", + "kind": "struct", + "members": [ + { + "name": "salt", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash", + "kind": "data" + }, + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::ContractUpgraded", + "kind": "struct", + "members": [ + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash", + "kind": "data" + }, + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::WorldUpgraded", + "kind": "struct", + "members": [ + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::MetadataUpdate", + "kind": "struct", + "members": [ + { + "name": "resource", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "uri", + "type": "core::array::Span::", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::ModelRegistered", + "kind": "struct", + "members": [ + { + "name": "name", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash", + "kind": "data" + }, + { + "name": "prev_class_hash", + "type": "core::starknet::class_hash::ClassHash", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::StoreSetRecord", + "kind": "struct", + "members": [ + { + "name": "table", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "keys", + "type": "core::array::Span::", + "kind": "data" + }, + { + "name": "offset", + "type": "core::integer::u8", + "kind": "data" + }, + { + "name": "values", + "type": "core::array::Span::", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::StoreDelRecord", + "kind": "struct", + "members": [ + { + "name": "table", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "keys", + "type": "core::array::Span::", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::WriterUpdated", + "kind": "struct", + "members": [ + { + "name": "model", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "system", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + }, + { + "name": "value", + "type": "core::bool", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::OwnerUpdated", + "kind": "struct", + "members": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + }, + { + "name": "resource", + "type": "core::felt252", + "kind": "data" + }, + { + "name": "value", + "type": "core::bool", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::ExecutorUpdated", + "kind": "struct", + "members": [ + { + "name": "address", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + }, + { + "name": "prev_address", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::world::world::Event", + "kind": "enum", + "variants": [ + { + "name": "WorldSpawned", + "type": "dojo::world::world::WorldSpawned", + "kind": "nested" + }, + { + "name": "ContractDeployed", + "type": "dojo::world::world::ContractDeployed", + "kind": "nested" + }, + { + "name": "ContractUpgraded", + "type": "dojo::world::world::ContractUpgraded", + "kind": "nested" + }, + { + "name": "WorldUpgraded", + "type": "dojo::world::world::WorldUpgraded", + "kind": "nested" + }, + { + "name": "MetadataUpdate", + "type": "dojo::world::world::MetadataUpdate", + "kind": "nested" + }, + { + "name": "ModelRegistered", + "type": "dojo::world::world::ModelRegistered", + "kind": "nested" + }, + { + "name": "StoreSetRecord", + "type": "dojo::world::world::StoreSetRecord", + "kind": "nested" + }, + { + "name": "StoreDelRecord", + "type": "dojo::world::world::StoreDelRecord", + "kind": "nested" + }, + { + "name": "WriterUpdated", + "type": "dojo::world::world::WriterUpdated", + "kind": "nested" + }, + { + "name": "OwnerUpdated", + "type": "dojo::world::world::OwnerUpdated", + "kind": "nested" + }, + { + "name": "ExecutorUpdated", + "type": "dojo::world::world::ExecutorUpdated", + "kind": "nested" + } + ] + } + ], + "reads": [], + "writes": [], + "computed": [] + }, + "executor": { + "name": "dojo::executor::executor", + "address": null, + "class_hash": "0x585507fa2818fe78e66da6ea4c5915376739f4abf509d41153f60a16cb1f68d", + "abi": [ + { + "type": "impl", + "name": "Executor", + "interface_name": "dojo::executor::IExecutor" + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "interface", + "name": "dojo::executor::IExecutor", + "items": [ + { + "type": "function", + "name": "call", + "inputs": [ + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash" + }, + { + "name": "entrypoint", + "type": "core::felt252" + }, + { + "name": "calldata", + "type": "core::array::Span::" + } + ], + "outputs": [ + { + "type": "core::array::Span::" + } + ], + "state_mutability": "view" + } + ] + }, + { + "type": "event", + "name": "dojo::executor::executor::Event", + "kind": "enum", + "variants": [] + } + ], + "reads": [], + "writes": [], + "computed": [] + }, + "base": { + "name": "dojo::base::base", + "class_hash": "0x6c458453d35753703ad25632deec20a29faf8531942ec109e6eb0650316a2bc", + "abi": [ + { + "type": "impl", + "name": "WorldProviderImpl", + "interface_name": "dojo::world::IWorldProvider" + }, + { + "type": "struct", + "name": "dojo::world::IWorldDispatcher", + "members": [ + { + "name": "contract_address", + "type": "core::starknet::contract_address::ContractAddress" + } + ] + }, + { + "type": "interface", + "name": "dojo::world::IWorldProvider", + "items": [ + { + "type": "function", + "name": "world", + "inputs": [], + "outputs": [ + { + "type": "dojo::world::IWorldDispatcher" + } + ], + "state_mutability": "view" + } + ] + }, + { + "type": "impl", + "name": "UpgradableImpl", + "interface_name": "dojo::components::upgradeable::IUpgradeable" + }, + { + "type": "interface", + "name": "dojo::components::upgradeable::IUpgradeable", + "items": [ + { + "type": "function", + "name": "upgrade", + "inputs": [ + { + "name": "new_class_hash", + "type": "core::starknet::class_hash::ClassHash" + } + ], + "outputs": [], + "state_mutability": "external" + } + ] + }, + { + "type": "constructor", + "name": "constructor", + "inputs": [] + }, + { + "type": "event", + "name": "dojo::components::upgradeable::upgradeable::Upgraded", + "kind": "struct", + "members": [ + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::components::upgradeable::upgradeable::Event", + "kind": "enum", + "variants": [ + { + "name": "Upgraded", + "type": "dojo::components::upgradeable::upgradeable::Upgraded", + "kind": "nested" + } + ] + }, + { + "type": "event", + "name": "dojo::base::base::Event", + "kind": "enum", + "variants": [ + { + "name": "UpgradeableEvent", + "type": "dojo::components::upgradeable::upgradeable::Event", + "kind": "nested" + } + ] + } + ] + }, + "contracts": [ + { + "name": "dojo_examples::actions::actions", + "address": null, + "class_hash": "0x69c6bec7de74fc2404fe6b68ad8ece7be81ad6d861b38a8ba8fa583bfc3666b", + "abi": [ + { + "type": "impl", + "name": "WorldProviderImpl", + "interface_name": "dojo::world::IWorldProvider" + }, + { + "type": "struct", + "name": "dojo::world::IWorldDispatcher", + "members": [ + { + "name": "contract_address", + "type": "core::starknet::contract_address::ContractAddress" + } + ] + }, + { + "type": "interface", + "name": "dojo::world::IWorldProvider", + "items": [ + { + "type": "function", + "name": "world", + "inputs": [], + "outputs": [ + { + "type": "dojo::world::IWorldDispatcher" + } + ], + "state_mutability": "view" + } + ] + }, + { + "type": "impl", + "name": "ActionsImpl", + "interface_name": "dojo_examples::actions::IActions" + }, + { + "type": "enum", + "name": "dojo_examples::models::Direction", + "variants": [ + { + "name": "None", + "type": "()" + }, + { + "name": "Left", + "type": "()" + }, + { + "name": "Right", + "type": "()" + }, + { + "name": "Up", + "type": "()" + }, + { + "name": "Down", + "type": "()" + } + ] + }, + { + "type": "interface", + "name": "dojo_examples::actions::IActions", + "items": [ + { + "type": "function", + "name": "spawn", + "inputs": [], + "outputs": [], + "state_mutability": "view" + }, + { + "type": "function", + "name": "move", + "inputs": [ + { + "name": "direction", + "type": "dojo_examples::models::Direction" + } + ], + "outputs": [], + "state_mutability": "view" + } + ] + }, + { + "type": "impl", + "name": "UpgradableImpl", + "interface_name": "dojo::components::upgradeable::IUpgradeable" + }, + { + "type": "interface", + "name": "dojo::components::upgradeable::IUpgradeable", + "items": [ + { + "type": "function", + "name": "upgrade", + "inputs": [ + { + "name": "new_class_hash", + "type": "core::starknet::class_hash::ClassHash" + } + ], + "outputs": [], + "state_mutability": "external" + } + ] + }, + { + "type": "function", + "name": "dojo_resource", + "inputs": [], + "outputs": [ + { + "type": "core::felt252" + } + ], + "state_mutability": "view" + }, + { + "type": "struct", + "name": "dojo_examples::models::Vec2", + "members": [ + { + "name": "x", + "type": "core::integer::u32" + }, + { + "name": "y", + "type": "core::integer::u32" + } + ] + }, + { + "type": "function", + "name": "tile_terrain", + "inputs": [ + { + "name": "vec", + "type": "dojo_examples::models::Vec2" + } + ], + "outputs": [ + { + "type": "core::felt252" + } + ], + "state_mutability": "view" + }, + { + "type": "struct", + "name": "dojo_examples::models::Position", + "members": [ + { + "name": "player", + "type": "core::starknet::contract_address::ContractAddress" + }, + { + "name": "vec", + "type": "dojo_examples::models::Vec2" + } + ] + }, + { + "type": "function", + "name": "quadrant", + "inputs": [ + { + "name": "pos", + "type": "dojo_examples::models::Position" + } + ], + "outputs": [ + { + "type": "core::integer::u8" + } + ], + "state_mutability": "view" + }, + { + "type": "event", + "name": "dojo::components::upgradeable::upgradeable::Upgraded", + "kind": "struct", + "members": [ + { + "name": "class_hash", + "type": "core::starknet::class_hash::ClassHash", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo::components::upgradeable::upgradeable::Event", + "kind": "enum", + "variants": [ + { + "name": "Upgraded", + "type": "dojo::components::upgradeable::upgradeable::Upgraded", + "kind": "nested" + } + ] + }, + { + "type": "event", + "name": "dojo_examples::actions::actions::Moved", + "kind": "struct", + "members": [ + { + "name": "player", + "type": "core::starknet::contract_address::ContractAddress", + "kind": "data" + }, + { + "name": "direction", + "type": "dojo_examples::models::Direction", + "kind": "data" + } + ] + }, + { + "type": "event", + "name": "dojo_examples::actions::actions::Event", + "kind": "enum", + "variants": [ + { + "name": "UpgradeableEvent", + "type": "dojo::components::upgradeable::upgradeable::Event", + "kind": "nested" + }, + { + "name": "Moved", + "type": "dojo_examples::actions::actions::Moved", + "kind": "nested" + } + ] + } + ], + "reads": [], + "writes": [], + "computed": [ + { + "contract": "dojo_examples::actions::actions", + "entrypoint": "tile_terrain", + "model": null + }, + { + "contract": "dojo_examples::actions::actions", + "entrypoint": "quadrant", + "model": "Position" + } + ] + } + ], + "models": [ + { + "name": "dojo_examples::models::moves", + "members": [ + { + "name": "player", + "type": "ContractAddress", + "key": true + }, + { + "name": "remaining", + "type": "u8", + "key": false + }, + { + "name": "last_direction", + "type": "Direction", + "key": false + } + ], + "class_hash": "0x64495ca6dc1dc328972697b30468cea364bcb7452bbb6e4aaad3e4b3f190147", + "abi": [ + { + "type": "function", + "name": "name", + "inputs": [], + "outputs": [ + { + "type": "core::felt252" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "unpacked_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "packed_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "function", + "name": "layout", + "inputs": [], + "outputs": [ + { + "type": "core::array::Span::" + } + ], + "state_mutability": "view" + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Struct", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::>" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::<(core::felt252, core::array::Span::)>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Enum", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "enum", + "name": "dojo::database::introspect::Ty", + "variants": [ + { + "name": "Primitive", + "type": "core::felt252" + }, + { + "name": "Struct", + "type": "dojo::database::introspect::Struct" + }, + { + "name": "Enum", + "type": "dojo::database::introspect::Enum" + }, + { + "name": "Tuple", + "type": "core::array::Span::>" + } + ] + }, + { + "type": "function", + "name": "schema", + "inputs": [], + "outputs": [ + { + "type": "dojo::database::introspect::Ty" + } + ], + "state_mutability": "view" + }, + { + "type": "event", + "name": "dojo_examples::models::moves::Event", + "kind": "enum", + "variants": [] + } + ] + }, + { + "name": "dojo_examples::models::position", + "members": [ + { + "name": "player", + "type": "ContractAddress", + "key": true + }, + { + "name": "vec", + "type": "Vec2", + "key": false + } + ], + "class_hash": "0x4cd20d231b04405a77b184c115dc60637e186504fad7f0929bd76cbd09c10b", + "abi": [ + { + "type": "function", + "name": "name", + "inputs": [], + "outputs": [ + { + "type": "core::felt252" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "unpacked_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "function", + "name": "packed_size", + "inputs": [], + "outputs": [ + { + "type": "core::integer::u32" + } + ], + "state_mutability": "view" + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "function", + "name": "layout", + "inputs": [], + "outputs": [ + { + "type": "core::array::Span::" + } + ], + "state_mutability": "view" + }, + { + "type": "struct", + "name": "core::array::Span::", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Struct", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::>" + } + ] + }, + { + "type": "struct", + "name": "core::array::Span::<(core::felt252, core::array::Span::)>", + "members": [ + { + "name": "snapshot", + "type": "@core::array::Array::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "struct", + "name": "dojo::database::introspect::Enum", + "members": [ + { + "name": "name", + "type": "core::felt252" + }, + { + "name": "attrs", + "type": "core::array::Span::" + }, + { + "name": "children", + "type": "core::array::Span::<(core::felt252, core::array::Span::)>" + } + ] + }, + { + "type": "enum", + "name": "dojo::database::introspect::Ty", + "variants": [ + { + "name": "Primitive", + "type": "core::felt252" + }, + { + "name": "Struct", + "type": "dojo::database::introspect::Struct" + }, + { + "name": "Enum", + "type": "dojo::database::introspect::Enum" + }, + { + "name": "Tuple", + "type": "core::array::Span::>" + } + ] + }, + { + "type": "function", + "name": "schema", + "inputs": [], + "outputs": [ + { + "type": "dojo::database::introspect::Ty" + } + ], + "state_mutability": "view" + }, + { + "type": "event", + "name": "dojo_examples::models::position::Event", + "kind": "enum", + "variants": [] + } + ] + } + ] +} \ No newline at end of file From df684366366daf5d1b34099c33adacb11457e04c Mon Sep 17 00:00:00 2001 From: Loaf <90423308+ponderingdemocritus@users.noreply.github.com> Date: Wed, 24 Jan 2024 12:45:14 +1100 Subject: [PATCH 33/33] typos and links (#1473) --- README.md | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index 354ff142ef..8817c0ad41 100644 --- a/README.md +++ b/README.md @@ -10,19 +10,13 @@ Dojo provides a developer friendly framework for developing and scaling onchain ## 🚀 Quick Start -See the [installation guide](https://book.dojoengine.org/getting-started/quick-start.html) in the Dojo book. +See the [installation guide](https://book.dojoengine.org/getting-started/quick-start) in the Dojo book. ## ⛩ī¸ Built with Dojo - [Awesome Dojo](https://github.com/dojoengine/awesome-dojo) - [Origami](https://github.com/dojoengine/origami) -## 📚 Examples in 30s - -- [Dojo starter react](https://github.com/dojoengine/dojo-starter-react-app) -- [Dojo starter phaser](https://github.com/dojoengine/dojo-starter-phaser) -- [Dojo starter unity](https://github.com/dojoengine/dojo-starter-unity) - ## 🗒ī¸ Documentation You can find more detailed documentation in the Dojo Book [here](https://book.dojoengine.org/). @@ -35,9 +29,9 @@ If you encounter issues or have questions, you can [submit an issue on GitHub](h We welcome contributions of all kinds from anyone. See our [Contribution Guide](/CONTRIBUTING.md) for more information on how to get involved. -## ✏ī¸ Enviroment +## ✏ī¸ Environment -See our [Enviroment setup](https://book.dojoengine.org/getting-started/setup.html) for more information. +See our [Environment setup](https://book.dojoengine.org/getting-started) for more information. ## Releasing