From 666fe5c53b7bc5550bc2173ce6c8220f5132e8dc Mon Sep 17 00:00:00 2001 From: Thomas Plisson Date: Mon, 26 Jun 2023 16:52:16 +0200 Subject: [PATCH 01/71] Handle native and bn254 signatures --- Cargo.lock | 88 +++++++++++++++++--- massa-async-pool/Cargo.toml | 2 +- massa-client/Cargo.toml | 2 +- massa-execution-exports/Cargo.toml | 4 +- massa-execution-worker/Cargo.toml | 4 +- massa-execution-worker/src/interface_impl.rs | 50 +++++++++++ massa-final-state/Cargo.toml | 2 +- massa-grpc/Cargo.toml | 2 +- massa-ledger-exports/Cargo.toml | 2 +- massa-models/Cargo.toml | 2 +- massa-module-cache/Cargo.toml | 2 +- massa-sdk/Cargo.toml | 2 +- massa-signature/Cargo.toml | 1 + massa-signature/src/signature_impl.rs | 33 ++++++++ massa-versioning/Cargo.toml | 2 +- 15 files changed, 174 insertions(+), 24 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f0dc8139a5a..a94fd77b4d8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -373,6 +373,19 @@ dependencies = [ "generic-array", ] +[[package]] +name = "bn254" +version = "0.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7694a2df5ffc5f2e385503b2987d281abeb80e02f3c30acd7de0d3db18794e73" +dependencies = [ + "byteorder", + "rand 0.8.5", + "sha2 0.10.6", + "substrate-bn", + "thiserror", +] + [[package]] name = "borsh" version = "0.10.3" @@ -912,6 +925,12 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "crunchy" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" + [[package]] name = "crypto-common" version = "0.1.6" @@ -2167,6 +2186,9 @@ name = "lazy_static" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" +dependencies = [ + "spin", +] [[package]] name = "lazycell" @@ -2407,7 +2429,7 @@ dependencies = [ [[package]] name = "massa-proto-rs" version = "0.1.0" -source = "git+https://github.com/massalabs/massa-proto-rs?rev=18ec02f#18ec02ff5bccedc64b3788964e9d78ccb9559567" +source = "git+https://github.com/massalabs/massa-proto-rs?branch=feature/Improve_ABI_types_in_wasmv1#a1afa1ece8933cd717ff14f2562162b10873aa46" dependencies = [ "glob", "prost", @@ -2420,7 +2442,7 @@ dependencies = [ [[package]] name = "massa-sc-runtime" version = "0.10.0" -source = "git+https://github.com/massalabs/massa-sc-runtime?branch=main#45b3cc115fc3c610a4bba24ea9c3a4a375fc515b" +source = "git+https://github.com/massalabs/massa-sc-runtime?branch=feature/Improve_ABI_types_in_wasmv1_thomas2#2a0a0c0f958bfb97fb5bbfa486f86aa56c8c6a92" dependencies = [ "anyhow", "as-ffi-bindings", @@ -2428,13 +2450,12 @@ dependencies = [ "chrono", "displaydoc", "function_name", - "glob", "loupe", + "massa-proto-rs", "more-asserts 0.3.1", - "num_enum", + "num_enum 0.6.1", "parking_lot", "prost", - "prost-build", "prost-types", "rand 0.8.5", "regex", @@ -2558,7 +2579,7 @@ dependencies = [ "mio", "mockall", "nom", - "num_enum", + "num_enum 0.5.11", "parking_lot", "rand 0.8.5", "serde", @@ -2708,8 +2729,10 @@ name = "massa_execution_worker" version = "0.23.0" dependencies = [ "anyhow", + "bn254", "criterion", "hex-literal", + "massa-proto-rs", "massa-sc-runtime", "massa_async_pool", "massa_channel", @@ -2872,7 +2895,7 @@ dependencies = [ "massa_models", "massa_serialization", "nom", - "num_enum", + "num_enum 0.5.11", "serde", "serde_json", "tempfile", @@ -2932,7 +2955,7 @@ dependencies = [ "massa_time", "nom", "num", - "num_enum", + "num_enum 0.5.11", "rust_decimal", "serde", "serde_with", @@ -2952,7 +2975,7 @@ dependencies = [ "massa_models", "massa_serialization", "nom", - "num_enum", + "num_enum 0.5.11", "rand 0.8.5", "rocksdb", "schnellru", @@ -3076,7 +3099,7 @@ dependencies = [ "massa_time", "massa_versioning", "nom", - "num_enum", + "num_enum 0.5.11", "parking_lot", "peernet", "rand 0.8.5", @@ -3122,6 +3145,7 @@ dependencies = [ "bs58", "displaydoc", "ed25519-dalek", + "massa-proto-rs", "massa_hash", "massa_serialization", "nom", @@ -3171,7 +3195,7 @@ dependencies = [ "massa_time", "more-asserts 0.3.1", "nom", - "num_enum", + "num_enum 0.5.11", "parking_lot", "tempfile", "thiserror", @@ -3496,7 +3520,16 @@ version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f646caf906c20226733ed5b1374287eb97e3c2a5c227ce668c1f2ce20ae57c9" dependencies = [ - "num_enum_derive", + "num_enum_derive 0.5.11", +] + +[[package]] +name = "num_enum" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a015b430d3c108a207fd776d2e2196aaf8b1cf8cf93253e3a097ff3085076a1" +dependencies = [ + "num_enum_derive 0.6.1", ] [[package]] @@ -3511,6 +3544,18 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "num_enum_derive" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96667db765a921f7b295ffee8b60472b686a51d4f21c2ee4ffdb94c7013b65a6" +dependencies = [ + "proc-macro-crate 1.3.1", + "proc-macro2 1.0.60", + "quote 1.0.28", + "syn 2.0.18", +] + [[package]] name = "object" version = "0.28.4" @@ -4396,6 +4441,12 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" +[[package]] +name = "rustc-hex" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" + [[package]] name = "rustix" version = "0.36.14" @@ -4945,6 +4996,19 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "substrate-bn" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b5bbfa79abbae15dd642ea8176a21a635ff3c00059961d1ea27ad04e5b441c" +dependencies = [ + "byteorder", + "crunchy", + "lazy_static", + "rand 0.8.5", + "rustc-hex", +] + [[package]] name = "substruct" version = "0.1.0" diff --git a/massa-async-pool/Cargo.toml b/massa-async-pool/Cargo.toml index 69799d7c1ad..2fa1d451eb5 100644 --- a/massa-async-pool/Cargo.toml +++ b/massa-async-pool/Cargo.toml @@ -19,7 +19,7 @@ massa_serialization = { path = "../massa-serialization" } massa_signature = { path = "../massa-signature" } massa_db_exports = { path = "../massa-db-exports" } massa_time = { path = "../massa-time" } -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "18ec02f", features = ["tonic"] } +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"] } [dev-dependencies] tempfile = "3.3" diff --git a/massa-client/Cargo.toml b/massa-client/Cargo.toml index 9fa435d2262..9e70147e7c4 100644 --- a/massa-client/Cargo.toml +++ b/massa-client/Cargo.toml @@ -28,7 +28,7 @@ massa_signature = { path = "../massa-signature" } massa_time = { path = "../massa-time" } massa_sdk = { path = "../massa-sdk" } massa_wallet = { path = "../massa-wallet" } -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "18ec02f", features = ["tonic"] } +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"] } [dev-dependencies] toml_edit = "0.19" diff --git a/massa-execution-exports/Cargo.toml b/massa-execution-exports/Cargo.toml index d5cc345d9f8..7d99ee8861a 100644 --- a/massa-execution-exports/Cargo.toml +++ b/massa-execution-exports/Cargo.toml @@ -18,7 +18,7 @@ tokio = { version = "1.23", features = ["sync"] } mockall = { version = "0.11.4", optional = true} # custom modules -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "18ec02f", features = ["tonic"] } +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"] } massa_hash = { path = "../massa-hash" } massa_models = { path = "../massa-models" } massa_time = { path = "../massa-time" } @@ -27,7 +27,7 @@ massa_final_state = { path = "../massa-final-state" } massa_ledger_exports = { path = "../massa-ledger-exports", optional = true } massa_module_cache = { path = "../massa-module-cache" } massa_versioning = { path = "../massa-versioning" } -massa-sc-runtime = { git = "https://github.com/massalabs/massa-sc-runtime", branch = "main" } +massa-sc-runtime = { git = "https://github.com/massalabs/massa-sc-runtime", branch = "feature/Improve_ABI_types_in_wasmv1_thomas2" } [dev-dependencies] mockall = "0.11.4" diff --git a/massa-execution-worker/Cargo.toml b/massa-execution-worker/Cargo.toml index efe30afb543..be28d200697 100644 --- a/massa-execution-worker/Cargo.toml +++ b/massa-execution-worker/Cargo.toml @@ -28,7 +28,7 @@ massa_execution_exports = { path = "../massa-execution-exports" } massa_models = { path = "../massa-models" } massa_storage = { path = "../massa-storage" } massa_hash = { path = "../massa-hash" } -massa-sc-runtime = { git = "https://github.com/massalabs/massa-sc-runtime", branch = "main" } +massa-sc-runtime = { git = "https://github.com/massalabs/massa-sc-runtime", branch = "feature/Improve_ABI_types_in_wasmv1_thomas2" } massa_metrics = { path = "../massa-metrics" } massa_module_cache = { path = "../massa-module-cache" } massa_signature = { path = "../massa-signature" } @@ -40,6 +40,8 @@ massa_versioning = { path = "../massa-versioning" } massa_db_exports = { path = "../massa-db-exports" } massa_db_worker = { path = "../massa-db-worker", optional = true } massa_wallet = { path = "../massa-wallet" } +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"] } +bn254 = { version = "0.0.1" } [dev-dependencies] tokio = { version = "1.23", features = ["sync"] } diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index 16182c0d7ef..11f9f9ebb3d 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -15,6 +15,8 @@ use massa_models::config::MAX_DATASTORE_KEY_LENGTH; use massa_models::{ address::Address, amount::Amount, slot::Slot, timeslots::get_block_slot_timestamp, }; +use massa_proto_rs::massa::model::v1::NativePubKey; +use massa_proto_rs::massa::model::v1::NativeSig; use massa_sc_runtime::RuntimeModule; use massa_sc_runtime::{Interface, InterfaceClone}; use parking_lot::Mutex; @@ -603,6 +605,54 @@ impl Interface for InterfaceImpl { Ok(public_key.verify_signature(&h, &signature).is_ok()) } + /// Verify a massa native signature + fn verify_native_signature( + &self, + signature: NativeSig, + message: &[u8], + public_key: NativePubKey, + ) -> Result { + let signature = match massa_signature::Signature::from_native_sig(&signature) { + Ok(sig) => sig, + Err(_) => return Ok(false), + }; + let public_key = match massa_signature::PublicKey::from_native_public_key(&public_key) { + Ok(pubk) => pubk, + Err(_) => return Ok(false), + }; + let h = massa_hash::Hash::compute_from(message); + Ok(public_key.verify_signature(&h, &signature).is_ok()) + } + + /// TODO + fn verify_evm_signature( + &self, + _signature: &[u8], + _message: &[u8], + _public_key: &[u8], + ) -> Result { + Ok(false) + } + + /// Verify a bn254 signature + /// + /// Important information: + /// * Provided signature and public key must be in compressed format + /// * Signature can be the result of a multi signature aggregation + /// * If there is a multi signature, public key must be the result of public key pairing + fn verify_bn254_signature( + &self, + signature: &[u8], + message: &[u8], + public_key: &[u8], + ) -> Result { + let sig = bn254::Signature::from_compressed(signature)?; + let pk = bn254::PublicKey::from_compressed(public_key)?; + Ok(bn254::ECDSA::verify(message, &sig, &pk) + .map_err(|_| anyhow!("Signature verification failed")) + .is_ok()) + } + /// Transfer coins from the current address (top of the call stack) towards a target address. /// /// # Arguments diff --git a/massa-final-state/Cargo.toml b/massa-final-state/Cargo.toml index 87c8e59e52a..06ae57df8b5 100644 --- a/massa-final-state/Cargo.toml +++ b/massa-final-state/Cargo.toml @@ -21,7 +21,7 @@ massa_async_pool = { path = "../massa-async-pool" } massa_serialization = { path = "../massa-serialization" } massa_pos_exports = { path = "../massa-pos-exports" } massa_db_exports = { path = "../massa-db-exports" } -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "18ec02f", features = ["tonic"] } +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"] } massa_versioning = { path = "../massa-versioning" } massa_time = { path = "../massa-time" } diff --git a/massa-grpc/Cargo.toml b/massa-grpc/Cargo.toml index bf228de1b05..2ca2e10da8f 100644 --- a/massa-grpc/Cargo.toml +++ b/massa-grpc/Cargo.toml @@ -10,7 +10,7 @@ homepage = "https://massa.net" documentation = "https://docs.massa.net/" [dependencies] -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "18ec02f", features = ["tonic"] } +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"] } displaydoc = "0.2" thiserror = "1.0" tonic = { version = "0.9.2", features = ["gzip", "tls"] } diff --git a/massa-ledger-exports/Cargo.toml b/massa-ledger-exports/Cargo.toml index c36711da842..9cb1665e296 100644 --- a/massa-ledger-exports/Cargo.toml +++ b/massa-ledger-exports/Cargo.toml @@ -14,7 +14,7 @@ nom = "=7.1" num_enum = "0.5.10" # custom modules -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "18ec02f", features = ["tonic"] } +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"] } massa_hash = { path = "../massa-hash" } massa_models = { path = "../massa-models" } massa_serialization = { path = "../massa-serialization" } diff --git a/massa-models/Cargo.toml b/massa-models/Cargo.toml index 69df7c240fb..2bbe8ab8b19 100644 --- a/massa-models/Cargo.toml +++ b/massa-models/Cargo.toml @@ -18,7 +18,7 @@ config = "0.13" bs58 = { version = "=0.4", features = ["check"] } bitvec = { version = "=1.0", features = ["serde"] } nom = "=7.1" -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "18ec02f", features = ["tonic"] } +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"] } # custom modules massa_hash = { path = "../massa-hash" } diff --git a/massa-module-cache/Cargo.toml b/massa-module-cache/Cargo.toml index 507b6ad3e83..92de9675983 100644 --- a/massa-module-cache/Cargo.toml +++ b/massa-module-cache/Cargo.toml @@ -18,7 +18,7 @@ rocksdb = "0.20" massa_hash = { path = "../massa-hash" } massa_models = { path = "../massa-models" } massa_serialization = { path = "../massa-serialization" } -massa-sc-runtime = { git = "https://github.com/massalabs/massa-sc-runtime", branch = "main", features = [ +massa-sc-runtime = { git = "https://github.com/massalabs/massa-sc-runtime", branch = "feature/Improve_ABI_types_in_wasmv1_thomas2", features = [ "testing", ] } diff --git a/massa-sdk/Cargo.toml b/massa-sdk/Cargo.toml index 7aab4f95e15..1279e6589a9 100644 --- a/massa-sdk/Cargo.toml +++ b/massa-sdk/Cargo.toml @@ -14,4 +14,4 @@ tracing = {version = "0.1", features = ["log"]} massa_api_exports = { path = "../massa-api-exports" } massa_models = { path = "../massa-models" } massa_time = { path = "../massa-time" } -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "18ec02f", features = ["tonic"] } +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"] } diff --git a/massa-signature/Cargo.toml b/massa-signature/Cargo.toml index 5a6ea1684bb..6217ee8827a 100644 --- a/massa-signature/Cargo.toml +++ b/massa-signature/Cargo.toml @@ -16,6 +16,7 @@ nom = "=7.1" rand = "0.7" # TODO tag transition crate with a version number transition = { git = "https://github.com/massalabs/transition.git", rev = "93fa3bf82f9f5ff421c78536879b7fd1b948ca75" } +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"] } # custom modules massa_hash = { path = "../massa-hash" } diff --git a/massa-signature/src/signature_impl.rs b/massa-signature/src/signature_impl.rs index 748e55d33b7..ed30f55cb07 100644 --- a/massa-signature/src/signature_impl.rs +++ b/massa-signature/src/signature_impl.rs @@ -5,6 +5,7 @@ use crate::error::MassaSignatureError; use ed25519_dalek::{Signer, Verifier}; use massa_hash::Hash; +use massa_proto_rs::massa::model::v1::{NativePubKey, NativeSig}; use massa_serialization::{ DeserializeError, Deserializer, Serializer, U64VarIntDeserializer, U64VarIntSerializer, }; @@ -647,6 +648,22 @@ impl PublicKey { ))), } } + + /// Create a massa Public Key from a proto NativePubKey + pub fn from_native_public_key(pubkey: &NativePubKey) -> Result { + match pubkey.version { + ::VERSION => Ok(PublicKeyVariant!["0"]( + ::from_bytes(&pubkey.content)?, + )), + ::VERSION => Ok(PublicKeyVariant!["1"]( + ::from_bytes(&pubkey.content)?, + )), + _ => Err(MassaSignatureError::InvalidVersionError(format!( + "Unknown PublicKey version: {}", + pubkey.version + ))), + } + } } #[transition::impl_version(versions("0", "1"))] @@ -1020,6 +1037,22 @@ impl Signature { ))), } } + + /// Create a massa Signature from a proto NativeSig + pub fn from_native_sig(sig: &NativeSig) -> Result { + match sig.version { + ::VERSION => Ok(SignatureVariant!["0"]( + ::from_bytes(&sig.content)?, + )), + ::VERSION => Ok(SignatureVariant!["1"]( + ::from_bytes(&sig.content)?, + )), + _ => Err(MassaSignatureError::InvalidVersionError(format!( + "Unknown signature version: {}", + sig.version + ))), + } + } } #[transition::impl_version(versions("0", "1"))] diff --git a/massa-versioning/Cargo.toml b/massa-versioning/Cargo.toml index c9435f28c61..009b43aeb41 100644 --- a/massa-versioning/Cargo.toml +++ b/massa-versioning/Cargo.toml @@ -19,7 +19,7 @@ massa_models = { path = "../massa-models" } massa_serialization = { path = "../massa-serialization" } massa_hash = { path = "../massa-hash" } massa_signature = { path = "../massa-signature" } -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "18ec02f", features = ["tonic"] } +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"] } massa_db_exports = { path = "../massa-db-exports" } [dev-dependencies] From 480bc8729a43c57a2ad3470d47abfb9724125a5e Mon Sep 17 00:00:00 2001 From: Thomas Plisson Date: Tue, 27 Jun 2023 10:51:28 +0200 Subject: [PATCH 02/71] bn254 test save --- massa-execution-worker/src/interface_impl.rs | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index 11f9f9ebb3d..b9207f5393c 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -7,6 +7,7 @@ use crate::context::ExecutionContext; use anyhow::{anyhow, bail, Result}; +use hex_literal::hex; use massa_async_pool::{AsyncMessage, AsyncMessageTrigger}; use massa_execution_exports::ExecutionConfig; use massa_execution_exports::ExecutionStackElement; @@ -891,3 +892,13 @@ impl Interface for InterfaceImpl { Ok(hash) } } + +#[test] +fn test_verify_bn254() { + let pubkey = hex!("0315a09cefca423b88e3a8e44f010f6a55576bfd4a877681d61d6484f383f6b972007c42b6a3c34ca60752e6e31099aa03c818006b096be59d821c6c7b7062a25d"); + let signature = hex!("02210745fb2b594720e0c38fc9bfda18909b9a72aa00f4515436e90becd4f1b950"); + let sig = bn254::Signature::from_compressed(pubkey).unwrap(); + let pk = bn254::PublicKey::from_compressed(signature).unwrap(); + let message = b"test"; + bn254::ECDSA::verify(message, &sig, &pk).unwrap(); +} From 3ac2541f726fd18f62c3c13b4da238d80cad60f2 Mon Sep 17 00:00:00 2001 From: Thomas Plisson Date: Tue, 27 Jun 2023 10:52:57 +0200 Subject: [PATCH 03/71] Remove bn254 implementation --- Cargo.lock | 42 -------------------- massa-execution-worker/Cargo.toml | 1 - massa-execution-worker/src/interface_impl.rs | 30 -------------- 3 files changed, 73 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a94fd77b4d8..9790b92471c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -373,19 +373,6 @@ dependencies = [ "generic-array", ] -[[package]] -name = "bn254" -version = "0.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7694a2df5ffc5f2e385503b2987d281abeb80e02f3c30acd7de0d3db18794e73" -dependencies = [ - "byteorder", - "rand 0.8.5", - "sha2 0.10.6", - "substrate-bn", - "thiserror", -] - [[package]] name = "borsh" version = "0.10.3" @@ -925,12 +912,6 @@ dependencies = [ "cfg-if", ] -[[package]] -name = "crunchy" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" - [[package]] name = "crypto-common" version = "0.1.6" @@ -2186,9 +2167,6 @@ name = "lazy_static" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" -dependencies = [ - "spin", -] [[package]] name = "lazycell" @@ -2729,7 +2707,6 @@ name = "massa_execution_worker" version = "0.23.0" dependencies = [ "anyhow", - "bn254", "criterion", "hex-literal", "massa-proto-rs", @@ -4441,12 +4418,6 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" -[[package]] -name = "rustc-hex" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" - [[package]] name = "rustix" version = "0.36.14" @@ -4996,19 +4967,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "substrate-bn" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b5bbfa79abbae15dd642ea8176a21a635ff3c00059961d1ea27ad04e5b441c" -dependencies = [ - "byteorder", - "crunchy", - "lazy_static", - "rand 0.8.5", - "rustc-hex", -] - [[package]] name = "substruct" version = "0.1.0" diff --git a/massa-execution-worker/Cargo.toml b/massa-execution-worker/Cargo.toml index be28d200697..35ad2b522d4 100644 --- a/massa-execution-worker/Cargo.toml +++ b/massa-execution-worker/Cargo.toml @@ -41,7 +41,6 @@ massa_db_exports = { path = "../massa-db-exports" } massa_db_worker = { path = "../massa-db-worker", optional = true } massa_wallet = { path = "../massa-wallet" } massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"] } -bn254 = { version = "0.0.1" } [dev-dependencies] tokio = { version = "1.23", features = ["sync"] } diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index b9207f5393c..4446af6544d 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -7,7 +7,6 @@ use crate::context::ExecutionContext; use anyhow::{anyhow, bail, Result}; -use hex_literal::hex; use massa_async_pool::{AsyncMessage, AsyncMessageTrigger}; use massa_execution_exports::ExecutionConfig; use massa_execution_exports::ExecutionStackElement; @@ -635,25 +634,6 @@ impl Interface for InterfaceImpl { Ok(false) } - /// Verify a bn254 signature - /// - /// Important information: - /// * Provided signature and public key must be in compressed format - /// * Signature can be the result of a multi signature aggregation - /// * If there is a multi signature, public key must be the result of public key pairing - fn verify_bn254_signature( - &self, - signature: &[u8], - message: &[u8], - public_key: &[u8], - ) -> Result { - let sig = bn254::Signature::from_compressed(signature)?; - let pk = bn254::PublicKey::from_compressed(public_key)?; - Ok(bn254::ECDSA::verify(message, &sig, &pk) - .map_err(|_| anyhow!("Signature verification failed")) - .is_ok()) - } - /// Transfer coins from the current address (top of the call stack) towards a target address. /// /// # Arguments @@ -892,13 +872,3 @@ impl Interface for InterfaceImpl { Ok(hash) } } - -#[test] -fn test_verify_bn254() { - let pubkey = hex!("0315a09cefca423b88e3a8e44f010f6a55576bfd4a877681d61d6484f383f6b972007c42b6a3c34ca60752e6e31099aa03c818006b096be59d821c6c7b7062a25d"); - let signature = hex!("02210745fb2b594720e0c38fc9bfda18909b9a72aa00f4515436e90becd4f1b950"); - let sig = bn254::Signature::from_compressed(pubkey).unwrap(); - let pk = bn254::PublicKey::from_compressed(signature).unwrap(); - let message = b"test"; - bn254::ECDSA::verify(message, &sig, &pk).unwrap(); -} From ed1d4f680eaf11e2f4e5d0da221345d2aa23b8bd Mon Sep 17 00:00:00 2001 From: Thomas Plisson Date: Tue, 27 Jun 2023 12:13:53 +0200 Subject: [PATCH 04/71] verify_evm_signature and hash_keccak256 --- Cargo.lock | 351 +++++++++++++------ massa-execution-worker/Cargo.toml | 2 + massa-execution-worker/src/interface_impl.rs | 35 +- 3 files changed, 267 insertions(+), 121 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9790b92471c..aaca6c65ec6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -176,9 +176,9 @@ version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -187,9 +187,9 @@ version = "0.1.68" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9ccdd8f2a161be9bd5c023df56f1b2a0bd1d83872ae53b71a84a12c9bf6e842" dependencies = [ - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -308,7 +308,7 @@ dependencies = [ "lazy_static", "lazycell", "peeking_take_while", - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", "regex", "rustc-hash", @@ -324,9 +324,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.3.2" +version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6dbe3c979c178231552ecba20214a8272df4e09f232a87aef4320cf06539aded" +checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42" [[package]] name = "bitvec" @@ -392,7 +392,7 @@ dependencies = [ "borsh-derive-internal", "borsh-schema-derive-internal", "proc-macro-crate 0.1.5", - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "syn 1.0.109", ] @@ -402,7 +402,7 @@ version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "afb438156919598d2c7bad7e1c0adf3d26ed3840dbc010db1a882a65583ca2fb" dependencies = [ - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", "syn 1.0.109", ] @@ -413,7 +413,7 @@ version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "634205cc43f74a1b9046ef87c4540ebda95696ec0f315024860cad7c5b0f5ccd" dependencies = [ - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", "syn 1.0.109", ] @@ -460,7 +460,7 @@ version = "0.6.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7ec4c6f261935ad534c0c22dbef2201b45918860eb1c574b972bd213a76af61" dependencies = [ - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", "syn 1.0.109", ] @@ -608,7 +608,7 @@ checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123" dependencies = [ "bitflags 1.3.2", "clap_lex", - "indexmap", + "indexmap 1.9.3", "textwrap 0.16.0", ] @@ -771,7 +771,7 @@ dependencies = [ "cranelift-entity", "fxhash", "hashbrown 0.12.3", - "indexmap", + "indexmap 1.9.3", "log", "smallvec", ] @@ -912,6 +912,12 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "crunchy" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" + [[package]] name = "crypto-common" version = "0.1.6" @@ -923,6 +929,16 @@ dependencies = [ "typenum", ] +[[package]] +name = "crypto-mac" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" +dependencies = [ + "generic-array", + "subtle", +] + [[package]] name = "ctr" version = "0.9.2" @@ -983,7 +999,7 @@ checksum = "109c1ca6e6b7f82cc233a97004ea8ed7ca123a9af07a8230878fcfda9b158bf0" dependencies = [ "fnv", "ident_case", - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", "strsim 0.10.0", "syn 1.0.109", @@ -997,10 +1013,10 @@ checksum = "ab8bfa2e259f8ee1ce5e97824a3c55ec4404a0d772ca7fa96bf19f0752a046eb" dependencies = [ "fnv", "ident_case", - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", "strsim 0.10.0", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -1022,7 +1038,7 @@ checksum = "29a358ff9f12ec09c3e61fef9b5a9902623a695a46a917b07f269bff1445611a" dependencies = [ "darling_core 0.20.1", "quote 1.0.28", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -1044,7 +1060,7 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" dependencies = [ - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", "syn 1.0.109", ] @@ -1134,9 +1150,9 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" dependencies = [ - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -1161,7 +1177,7 @@ dependencies = [ "byteorder", "lazy_static", "proc-macro-error", - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", "syn 1.0.109", ] @@ -1234,7 +1250,7 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c134c37760b27a871ba422106eedbb8247da973a09e82558bf26d619c882b159" dependencies = [ - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", "syn 1.0.109", ] @@ -1246,7 +1262,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8ea75f31022cba043afe037940d73684327e915f88f62478e778c3de914cd0a" dependencies = [ "enum_delegate_lib", - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", "syn 1.0.109", ] @@ -1257,7 +1273,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2e1f6c3800b304a6be0012039e2a45a322a093539c45ab818d9e6895a39c90fe" dependencies = [ - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", "rand 0.8.5", "syn 1.0.109", @@ -1279,11 +1295,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e08b6c6ab82d70f08844964ba10c7babb716de2ecaeab9be5717918a5177d3af" dependencies = [ "darling 0.20.1", - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", - "syn 2.0.18", + "syn 2.0.22", ] +[[package]] +name = "equivalent" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88bffebc5d80432c9b140ee17875ff173a8ab62faad5b257da912bd2f6c1c0a1" + [[package]] name = "erased-serde" version = "0.3.25" @@ -1477,9 +1499,9 @@ version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -1580,7 +1602,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22030e2c5a68ec659fde1e949a745124b48e6fa8b045b7ed5bd1fe4ccc5c4e5d" dependencies = [ "fallible-iterator", - "indexmap", + "indexmap 1.9.3", "stable_deref_trait", ] @@ -1656,9 +1678,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.19" +version = "0.3.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d357c7ae988e7d2182f7d7871d0b963962420b0678b0997ce7de72001aeab782" +checksum = "97ec8491ebaf99c8eaa73058b045fe58073cd6be7f596ac993ced0b0a0c01049" dependencies = [ "bytes", "fnv", @@ -1666,7 +1688,7 @@ dependencies = [ "futures-sink", "futures-util", "http", - "indexmap", + "indexmap 1.9.3", "slab", "tokio", "tokio-util", @@ -1706,6 +1728,12 @@ dependencies = [ "ahash 0.8.3", ] +[[package]] +name = "hashbrown" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" + [[package]] name = "hdrhistogram" version = "7.5.2" @@ -1767,6 +1795,16 @@ version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ebdb29d2ea9ed0083cd8cece49bbd968021bd99b0849edb4a9a7ee0fdf6a4e0" +[[package]] +name = "hmac" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "126888268dcc288495a26bf004b38c5fdbb31682f992c84ceb046a1f0fe38840" +dependencies = [ + "crypto-mac", + "digest 0.9.0", +] + [[package]] name = "hmac" version = "0.12.1" @@ -1776,6 +1814,17 @@ dependencies = [ "digest 0.10.7", ] +[[package]] +name = "hmac-drbg" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17ea0a1394df5b6574da6e0c1ade9e78868c9fb0a4e5ef4428e32da4676b85b1" +dependencies = [ + "digest 0.9.0", + "generic-array", + "hmac 0.8.1", +] + [[package]] name = "http" version = "0.2.9" @@ -1824,9 +1873,9 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] name = "hyper" -version = "0.14.26" +version = "0.14.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab302d72a6f11a3b910431ff93aae7e773078c769f0a3ef15fb9ec692ed147d4" +checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468" dependencies = [ "bytes", "futures-channel", @@ -1924,6 +1973,16 @@ dependencies = [ "serde", ] +[[package]] +name = "indexmap" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d" +dependencies = [ + "equivalent", + "hashbrown 0.14.0", +] + [[package]] name = "inout" version = "0.1.3" @@ -2091,7 +2150,7 @@ checksum = "c6027ac0b197ce9543097d02a290f550ce1d9432bf301524b013053c0b75cc94" dependencies = [ "heck 0.4.1", "proc-macro-crate 1.3.1", - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", "syn 1.0.109", ] @@ -2182,9 +2241,9 @@ checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" [[package]] name = "libc" -version = "0.2.146" +version = "0.2.147" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f92be4933c13fd498862a9e02a3055f8a8d9c039ce33db97306fd5a6caa7f29b" +checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" [[package]] name = "libloading" @@ -2218,6 +2277,54 @@ dependencies = [ "zstd-sys", ] +[[package]] +name = "libsecp256k1" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95b09eff1b35ed3b33b877ced3a691fc7a481919c7e29c53c906226fcf55e2a1" +dependencies = [ + "arrayref", + "base64 0.13.1", + "digest 0.9.0", + "hmac-drbg", + "libsecp256k1-core", + "libsecp256k1-gen-ecmult", + "libsecp256k1-gen-genmult", + "rand 0.8.5", + "serde", + "sha2 0.9.9", + "typenum", +] + +[[package]] +name = "libsecp256k1-core" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5be9b9bb642d8522a44d533eab56c16c738301965504753b03ad1de3425d5451" +dependencies = [ + "crunchy", + "digest 0.9.0", + "subtle", +] + +[[package]] +name = "libsecp256k1-gen-ecmult" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3038c808c55c87e8a172643a7d87187fc6c4174468159cb3090659d55bcb4809" +dependencies = [ + "libsecp256k1-core", +] + +[[package]] +name = "libsecp256k1-gen-genmult" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3db8d6ba2cec9eacc40e6e8ccc98931840301f1006e95647ceb2dd5c3aa06f7c" +dependencies = [ + "libsecp256k1-core", +] + [[package]] name = "libz-sys" version = "1.1.9" @@ -2420,7 +2527,7 @@ dependencies = [ [[package]] name = "massa-sc-runtime" version = "0.10.0" -source = "git+https://github.com/massalabs/massa-sc-runtime?branch=feature/Improve_ABI_types_in_wasmv1_thomas2#2a0a0c0f958bfb97fb5bbfa486f86aa56c8c6a92" +source = "git+https://github.com/massalabs/massa-sc-runtime?branch=feature/Improve_ABI_types_in_wasmv1_thomas2#4cb3c7871bef77113cc3ed99d08eab6e77f8114f" dependencies = [ "anyhow", "as-ffi-bindings", @@ -2709,6 +2816,7 @@ dependencies = [ "anyhow", "criterion", "hex-literal", + "libsecp256k1", "massa-proto-rs", "massa-sc-runtime", "massa_async_pool", @@ -2738,6 +2846,7 @@ dependencies = [ "serde_json", "serial_test 1.0.0", "sha2 0.10.6", + "sha3", "tempfile", "tokio", "tracing", @@ -3319,7 +3428,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22ce75669015c4f47b289fd4d4f56e894e4c96003ffdf3ac51313126f94c6cbb" dependencies = [ "cfg-if", - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", "syn 1.0.109", ] @@ -3516,7 +3625,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dcbff9bc912032c62bf65ef1d5aea88983b420f4f839db1e9b0c281a25c9c799" dependencies = [ "proc-macro-crate 1.3.1", - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", "syn 1.0.109", ] @@ -3528,9 +3637,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96667db765a921f7b295ffee8b60472b686a51d4f21c2ee4ffdb94c7013b65a6" dependencies = [ "proc-macro-crate 1.3.1", - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -3541,7 +3650,7 @@ checksum = "e42c982f2d955fac81dd7e1d0e1426a7d702acd9c98d19ab01083a6a0328c424" dependencies = [ "crc32fast", "hashbrown 0.11.2", - "indexmap", + "indexmap 1.9.3", "memchr", ] @@ -3671,7 +3780,7 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0f35583365be5d148e959284f42526841917b7bfa09e2d1a7ad5dde2cf0eaa39" dependencies = [ - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", "syn 1.0.109", ] @@ -3689,7 +3798,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "83a0692ec44e4cf1ef28ca317f14f8f07da2d95ec3fa01f86e4467b725e60917" dependencies = [ "digest 0.10.7", - "hmac", + "hmac 0.12.1", "password-hash", "sha2 0.10.6", ] @@ -3724,9 +3833,9 @@ checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" [[package]] name = "pest" -version = "2.6.1" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16833386b02953ca926d19f64af613b9bf742c48dcd5e09b32fbfc9740bf84e2" +checksum = "f73935e4d55e2abf7f130186537b19e7a4abc886a0252380b59248af473a3fc9" dependencies = [ "thiserror", "ucd-trie", @@ -3734,9 +3843,9 @@ dependencies = [ [[package]] name = "pest_derive" -version = "2.6.1" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7763190f9406839f99e5197afee8c9e759969f7dbfa40ad3b8dbee8757b745b5" +checksum = "aef623c9bbfa0eedf5a0efba11a5ee83209c326653ca31ff019bec3a95bfff2b" dependencies = [ "pest", "pest_generator", @@ -3744,22 +3853,22 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.6.1" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "249061b22e99973da1f5f5f1410284419e283bb60b79255bf5f42a94b66a2e00" +checksum = "b3e8cba4ec22bada7fc55ffe51e2deb6a0e0db2d0b7ab0b103acc80d2510c190" dependencies = [ "pest", "pest_meta", - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] name = "pest_meta" -version = "2.6.1" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "457c310cfc9cf3f22bc58901cc7f0d3410ac5d6298e432a4f9a6138565cb6df6" +checksum = "a01f71cb40bd8bb94232df14b946909e14660e33fc05db3e50ae2a82d7ea0ca0" dependencies = [ "once_cell", "pest", @@ -3773,7 +3882,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4dd7d28ee937e54fe3080c91faa1c3a46c06de6252988a7f4592ba2310ef22a4" dependencies = [ "fixedbitset", - "indexmap", + "indexmap 1.9.3", ] [[package]] @@ -3791,9 +3900,9 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39407670928234ebc5e6e580247dd567ad73a3578460c5990f9503df207e8f07" dependencies = [ - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -3896,7 +4005,7 @@ version = "0.1.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c8646e95016a7a6c4adea95bafa8a16baab64b583356217f2c85db4a39d9a86" dependencies = [ - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "syn 1.0.109", ] @@ -3926,7 +4035,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" dependencies = [ "proc-macro-error-attr", - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", "syn 1.0.109", "version_check", @@ -3938,7 +4047,7 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" dependencies = [ - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", "version_check", ] @@ -3954,9 +4063,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.60" +version = "1.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dec2b086b7a862cf4de201096214fa870344cf922b2b30c167badb3af3195406" +checksum = "7b368fba921b0dce7e60f5e04ec15e565b3303972b42bcfde1d0713b881959eb" dependencies = [ "unicode-ident", ] @@ -4031,7 +4140,7 @@ checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" dependencies = [ "anyhow", "itertools", - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", "syn 1.0.109", ] @@ -4066,7 +4175,7 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac" dependencies = [ - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", "syn 1.0.109", ] @@ -4103,7 +4212,7 @@ version = "1.0.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488" dependencies = [ - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", ] [[package]] @@ -4337,7 +4446,7 @@ dependencies = [ "bitvec", "bytecheck", "hashbrown 0.12.3", - "indexmap", + "indexmap 1.9.3", "ptr_meta", "rend", "rkyv_derive", @@ -4352,7 +4461,7 @@ version = "0.7.42" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b2e06b915b5c230a17d7a736d1e2e63ee753c256a8614ef3f5147b13a4f5541d" dependencies = [ - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", "syn 1.0.109", ] @@ -4524,7 +4633,7 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "107c3d5d7f370ac09efa62a78375f94d94b8a33c61d8c278b96683fb4dbf2d8d" dependencies = [ - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", "syn 1.0.109", ] @@ -4641,16 +4750,16 @@ version = "1.0.164" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9735b638ccc51c28bf6914d90a2e9725b377144fc612c49a611fddd1b631d68" dependencies = [ - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] name = "serde_json" -version = "1.0.97" +version = "1.0.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bdf3bf93142acad5821c99197022e170842cdbc1c30482b98750c688c640842a" +checksum = "46266871c240a00b8f503b877622fe33430b3c7d963bdc0f2adc511e54a1eae3" dependencies = [ "itoa", "ryu", @@ -4677,7 +4786,7 @@ dependencies = [ "base64 0.13.1", "chrono", "hex", - "indexmap", + "indexmap 1.9.3", "serde", "serde_json", "serde_with_macros", @@ -4691,9 +4800,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "881b6f881b17d13214e5d494c939ebab463d01264ce1811e9d4ac3a882e7695f" dependencies = [ "darling 0.20.1", - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -4730,7 +4839,7 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "079a83df15f85d89a68d64ae1238f142f172b1fa915d0d76b26a7cba1b659a69" dependencies = [ - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", "syn 1.0.109", ] @@ -4741,9 +4850,9 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91d129178576168c589c9ec973feedf7d3126c01ac2bf08795109aa35b69fb8f" dependencies = [ - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -4783,6 +4892,16 @@ dependencies = [ "digest 0.10.7", ] +[[package]] +name = "sha3" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60" +dependencies = [ + "digest 0.10.7", + "keccak", +] + [[package]] name = "sharded-slab" version = "0.1.4" @@ -4940,7 +5059,7 @@ checksum = "dcb5ae327f9cc13b68763b5749770cb9e048a99bd9dfdfa58d0cf05d5f64afe0" dependencies = [ "heck 0.3.3", "proc-macro-error", - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", "syn 1.0.109", ] @@ -4961,7 +5080,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" dependencies = [ "heck 0.4.1", - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", "rustversion", "syn 1.0.109", @@ -4999,18 +5118,18 @@ version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", "unicode-ident", ] [[package]] name = "syn" -version = "2.0.18" +version = "2.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32d41677bcbe24c20c52e7c70b0d8db04134c5d1066bf98662e2871ad200ea3e" +checksum = "2efbeae7acf4eabd6bcdcbd11c92f45231ddda7539edc7806bd1a04a03b24616" dependencies = [ - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", "unicode-ident", ] @@ -5083,9 +5202,9 @@ version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" dependencies = [ - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -5196,9 +5315,9 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" dependencies = [ - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -5249,17 +5368,17 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.6.2" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a76a9312f5ba4c2dec6b9161fdf25d87ad8a09256ccea5a556fef03c706a10f" +checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" [[package]] name = "toml_edit" -version = "0.19.10" +version = "0.19.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2380d56e8670370eee6566b0bfd4265f65b3f432e8c6d85623f728d4fa31f739" +checksum = "266f016b7f039eec8a1a80dfe6156b633d208b9fccca5e4db1d6775b0c4e34a7" dependencies = [ - "indexmap", + "indexmap 2.0.0", "toml_datetime", "winnow", ] @@ -5303,7 +5422,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a6fdaae4c2c638bb70fe42803a26fbd6fc6ac8c72f5c59f67ecc2a2dcabf4b07" dependencies = [ "prettyplease", - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "prost-build", "quote 1.0.28", "syn 1.0.109", @@ -5364,7 +5483,7 @@ dependencies = [ "futures-core", "futures-util", "hdrhistogram", - "indexmap", + "indexmap 1.9.3", "pin-project", "pin-project-lite", "rand 0.8.5", @@ -5382,7 +5501,7 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8bd22a874a2d0b70452d5597b12c537331d49060824a95f49f108994f94aa4c" dependencies = [ - "bitflags 2.3.2", + "bitflags 2.3.3", "bytes", "futures-core", "futures-util", @@ -5421,13 +5540,13 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.25" +version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8803eee176538f94ae9a14b55b2804eb7e1441f8210b1c31290b3bccdccff73b" +checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" dependencies = [ - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -5479,7 +5598,7 @@ version = "0.1.0" source = "git+https://github.com/massalabs/transition.git?rev=93fa3bf82f9f5ff421c78536879b7fd1b948ca75#93fa3bf82f9f5ff421c78536879b7fd1b948ca75" dependencies = [ "darling 0.14.4", - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", "syn 1.0.109", "unsigned-varint", @@ -5585,9 +5704,9 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" [[package]] name = "uuid" -version = "1.3.4" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fa2982af2eec27de306107c027578ff7f423d65f7250e40ce0fea8f45248b81" +checksum = "d023da39d1fde5a8a3fe1f3e01ca9632ada0a63e9797de55a879d6e2236277be" [[package]] name = "valuable" @@ -5663,9 +5782,9 @@ dependencies = [ "bumpalo", "log", "once_cell", - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", - "syn 2.0.18", + "syn 2.0.22", "wasm-bindgen-shared", ] @@ -5687,7 +5806,7 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c5020cfa87c7cecefef118055d44e3c1fc122c7ec25701d528ee458a0b45f38f" dependencies = [ - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", "syn 1.0.109", ] @@ -5720,9 +5839,9 @@ version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", - "syn 2.0.18", + "syn 2.0.22", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -5751,7 +5870,7 @@ dependencies = [ "bytes", "cfg-if", "derivative", - "indexmap", + "indexmap 1.9.3", "js-sys", "more-asserts 0.2.2", "rustc-demangle", @@ -5839,7 +5958,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97901fdbaae383dbb90ea162cc3a76a9fa58ac39aec7948b4c0b9bbef9307738" dependencies = [ "proc-macro-error", - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", "syn 1.0.109", ] @@ -5875,7 +5994,7 @@ dependencies = [ "bytecheck", "enum-iterator", "enumset", - "indexmap", + "indexmap 1.9.3", "more-asserts 0.2.2", "rkyv", "target-lexicon", @@ -5896,7 +6015,7 @@ dependencies = [ "derivative", "enum-iterator", "fnv", - "indexmap", + "indexmap 1.9.3", "lazy_static", "libc", "mach", @@ -5915,7 +6034,7 @@ version = "0.95.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f2ea896273ea99b15132414be1da01ab0d8836415083298ecaffbe308eaac87a" dependencies = [ - "indexmap", + "indexmap 1.9.3", "url", ] @@ -6242,9 +6361,9 @@ version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ - "proc-macro2 1.0.60", + "proc-macro2 1.0.63", "quote 1.0.28", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] diff --git a/massa-execution-worker/Cargo.toml b/massa-execution-worker/Cargo.toml index 35ad2b522d4..b8779c81e12 100644 --- a/massa-execution-worker/Cargo.toml +++ b/massa-execution-worker/Cargo.toml @@ -15,6 +15,8 @@ tracing = "0.1" serde_json = "1.0" num = { version = "0.4", features = ["serde"] } sha2 = "0.10.6" +sha3 = "0.10.8" +libsecp256k1 = "0.7.1" # use with features criterion = { version = "0.4", optional = true } tempfile = { version = "3.3", optional = true } diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index 4446af6544d..b00598888b1 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -624,14 +624,39 @@ impl Interface for InterfaceImpl { Ok(public_key.verify_signature(&h, &signature).is_ok()) } - /// TODO + /// Verify an EVM signature + /// + /// Information: + /// * Expects a standard SECP256K1 signature. + /// The signature is required to be within 0 and curve order. + /// Returns error if it overflows. + /// Expected length is 64 bytes. + /// * Expects a standard uncompressed raw public key. + /// Expected length is 64 bytes. fn verify_evm_signature( &self, - _signature: &[u8], - _message: &[u8], - _public_key: &[u8], + signature: &[u8], + message: &[u8], + public_key: &[u8], ) -> Result { - Ok(false) + // format is: secp256k1_sign(Keccak256("\x19Ethereum Signed Message:\n32" + Keccak256(message)); + let message_hash = sha3::Keccak256::digest(&message); + let prefix = b"\x19Ethereum Signed Message:\n32"; + let to_hash = [&prefix[..], message_hash.as_slice()].concat(); + let full_hash = sha3::Keccak256::digest(&to_hash); + + let m = libsecp256k1::Message::parse_slice(&full_hash)?; + let s = libsecp256k1::Signature::parse_standard_slice(signature)?; + let k = libsecp256k1::PublicKey::parse_slice( + public_key, + Some(libsecp256k1::PublicKeyFormat::Raw), + )?; + Ok(libsecp256k1::verify(&m, &s, &k)) + } + + /// Keccak256 hash function + fn hash_keccak256(&self, bytes: &[u8]) -> Result<[u8; 32]> { + Ok(sha3::Keccak256::digest(bytes).into()) } /// Transfer coins from the current address (top of the call stack) towards a target address. From bea6049e75cd1749491c1a610d8bb9b4d0860432 Mon Sep 17 00:00:00 2001 From: Thomas Plisson Date: Tue, 27 Jun 2023 16:32:48 +0200 Subject: [PATCH 05/71] test_evm_verify --- massa-execution-worker/src/interface_impl.rs | 34 ++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index b00598888b1..be5c5c4075e 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -897,3 +897,37 @@ impl Interface for InterfaceImpl { Ok(hash) } } + +#[test] +fn test_evm_verify() { + use hex_literal::hex; + + let message_ = b"test"; + let public_key_ = hex!("807a7Bb5193eDf9898b9092c1597bB966fe52514"); + let signature_ = hex!("d0d05c35080635b5e865006c6c4f5b5d457ec342564d8fc67ce40edc264ccdab3f2f366b5bd1e38582538fed7fa6282148e86af97970a10cb3302896f5d68ef51b"); + + println!("public key len: {}", public_key_.len()); + println!("public key: {:?}", public_key_); + println!("signature value: {:?}", signature_); + + // build the message + let message_hash = sha3::Keccak256::digest(&message_); + let prefix = b"\x19Ethereum Signed Message:\n32"; + let to_hash = [&prefix[..], message_hash.as_slice()].concat(); + let full_hash = sha3::Keccak256::digest(&to_hash); + let message = libsecp256k1::Message::parse_slice(&full_hash).unwrap(); + + // parse the signature as being (r, s, v) + // r is the R.x value of the signature's R point (32 bytes) + // s is the signature proof for R.x (32 bytes) + // v is a recovery parameter used to ease the signature verification (1 byte) + let signature = libsecp256k1::Signature::parse_standard_slice(&signature_[..64]).unwrap(); + let recovery_id = libsecp256k1::RecoveryId::parse_rpc(signature_[64]).unwrap(); + + // recover the public key using v + let recovered = libsecp256k1::recover(&message, &signature, &recovery_id).unwrap(); + println!("recovered public key: {:?}", recovered.serialize()); + + // verify + assert!(libsecp256k1::verify(&message, &signature, &recovered)); +} From d32a9e3582f550f5246ded889f4be4143ba6931e Mon Sep 17 00:00:00 2001 From: Thomas Plisson Date: Tue, 27 Jun 2023 17:07:39 +0200 Subject: [PATCH 06/71] extra info for test_evm_verify --- massa-execution-worker/src/interface_impl.rs | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index be5c5c4075e..f6297d2a8e0 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -903,13 +903,19 @@ fn test_evm_verify() { use hex_literal::hex; let message_ = b"test"; - let public_key_ = hex!("807a7Bb5193eDf9898b9092c1597bB966fe52514"); + let address_ = hex!("807a7Bb5193eDf9898b9092c1597bB966fe52514"); + let private_key_ = hex!("ed6602758bdd68dc9df67a6936ed69807a74b8cc89bdc18f3939149d02db17f3"); let signature_ = hex!("d0d05c35080635b5e865006c6c4f5b5d457ec342564d8fc67ce40edc264ccdab3f2f366b5bd1e38582538fed7fa6282148e86af97970a10cb3302896f5d68ef51b"); - println!("public key len: {}", public_key_.len()); - println!("public key: {:?}", public_key_); + println!("address len: {}", address_.len()); + println!("address: {:?}", address_); println!("signature value: {:?}", signature_); + // build original public key + let private_key = libsecp256k1::SecretKey::parse_slice(&private_key_).unwrap(); + let public_key = libsecp256k1::PublicKey::from_secret_key(&private_key); + println!("orginal public key: {:?}", public_key.serialize()); + // build the message let message_hash = sha3::Keccak256::digest(&message_); let prefix = b"\x19Ethereum Signed Message:\n32"; From 3869edc30b0c67b3234cab5f30ce02796b708daf Mon Sep 17 00:00:00 2001 From: Thomas Plisson Date: Tue, 27 Jun 2023 21:21:32 +0200 Subject: [PATCH 07/71] improve and cleanup test_evm_verify --- massa-execution-worker/src/interface_impl.rs | 29 ++++++++++---------- 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index f6297d2a8e0..ece48062af6 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -902,24 +902,18 @@ impl Interface for InterfaceImpl { fn test_evm_verify() { use hex_literal::hex; + // corresponding address is 0x807a7Bb5193eDf9898b9092c1597bB966fe52514 let message_ = b"test"; - let address_ = hex!("807a7Bb5193eDf9898b9092c1597bB966fe52514"); - let private_key_ = hex!("ed6602758bdd68dc9df67a6936ed69807a74b8cc89bdc18f3939149d02db17f3"); let signature_ = hex!("d0d05c35080635b5e865006c6c4f5b5d457ec342564d8fc67ce40edc264ccdab3f2f366b5bd1e38582538fed7fa6282148e86af97970a10cb3302896f5d68ef51b"); - - println!("address len: {}", address_.len()); - println!("address: {:?}", address_); - println!("signature value: {:?}", signature_); + let private_key_ = hex!("ed6602758bdd68dc9df67a6936ed69807a74b8cc89bdc18f3939149d02db17f3"); // build original public key let private_key = libsecp256k1::SecretKey::parse_slice(&private_key_).unwrap(); let public_key = libsecp256k1::PublicKey::from_secret_key(&private_key); - println!("orginal public key: {:?}", public_key.serialize()); // build the message - let message_hash = sha3::Keccak256::digest(&message_); - let prefix = b"\x19Ethereum Signed Message:\n32"; - let to_hash = [&prefix[..], message_hash.as_slice()].concat(); + let prefix = format!("\x19Ethereum Signed Message:\n{}", message_.len()); + let to_hash = [prefix.as_bytes(), message_].concat(); let full_hash = sha3::Keccak256::digest(&to_hash); let message = libsecp256k1::Message::parse_slice(&full_hash).unwrap(); @@ -930,10 +924,17 @@ fn test_evm_verify() { let signature = libsecp256k1::Signature::parse_standard_slice(&signature_[..64]).unwrap(); let recovery_id = libsecp256k1::RecoveryId::parse_rpc(signature_[64]).unwrap(); - // recover the public key using v + // check 1 + // verify the signature + assert!(libsecp256k1::verify(&message, &signature, &public_key)); + + // check 2 + // recover the public key using v and match it with the derived one let recovered = libsecp256k1::recover(&message, &signature, &recovery_id).unwrap(); - println!("recovered public key: {:?}", recovered.serialize()); + assert_eq!(public_key, recovered); - // verify - assert!(libsecp256k1::verify(&message, &signature, &recovered)); + // check 3 + // sign the message and match it with the original signature + let (second_signature, _) = libsecp256k1::sign(&message, &private_key); + assert_eq!(signature, second_signature); } From 54cdb4a56d66df6629e473b79a12318e5a4d20cb Mon Sep 17 00:00:00 2001 From: Thomas Plisson Date: Wed, 28 Jun 2023 12:17:13 +0200 Subject: [PATCH 08/71] Update verify_evm_signature and revome verify_native_signature --- Cargo.lock | 1 - massa-execution-worker/Cargo.toml | 1 - massa-execution-worker/src/interface_impl.rs | 71 ++++++++------------ 3 files changed, 29 insertions(+), 44 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index aaca6c65ec6..771da3d6756 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2817,7 +2817,6 @@ dependencies = [ "criterion", "hex-literal", "libsecp256k1", - "massa-proto-rs", "massa-sc-runtime", "massa_async_pool", "massa_channel", diff --git a/massa-execution-worker/Cargo.toml b/massa-execution-worker/Cargo.toml index b8779c81e12..094336ed39f 100644 --- a/massa-execution-worker/Cargo.toml +++ b/massa-execution-worker/Cargo.toml @@ -42,7 +42,6 @@ massa_versioning = { path = "../massa-versioning" } massa_db_exports = { path = "../massa-db-exports" } massa_db_worker = { path = "../massa-db-worker", optional = true } massa_wallet = { path = "../massa-wallet" } -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"] } [dev-dependencies] tokio = { version = "1.23", features = ["sync"] } diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index ece48062af6..ab30b10c70c 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -15,8 +15,6 @@ use massa_models::config::MAX_DATASTORE_KEY_LENGTH; use massa_models::{ address::Address, amount::Amount, slot::Slot, timeslots::get_block_slot_timestamp, }; -use massa_proto_rs::massa::model::v1::NativePubKey; -use massa_proto_rs::massa::model::v1::NativeSig; use massa_sc_runtime::RuntimeModule; use massa_sc_runtime::{Interface, InterfaceClone}; use parking_lot::Mutex; @@ -605,53 +603,42 @@ impl Interface for InterfaceImpl { Ok(public_key.verify_signature(&h, &signature).is_ok()) } - /// Verify a massa native signature - fn verify_native_signature( - &self, - signature: NativeSig, - message: &[u8], - public_key: NativePubKey, - ) -> Result { - let signature = match massa_signature::Signature::from_native_sig(&signature) { - Ok(sig) => sig, - Err(_) => return Ok(false), - }; - let public_key = match massa_signature::PublicKey::from_native_public_key(&public_key) { - Ok(pubk) => pubk, - Err(_) => return Ok(false), - }; - let h = massa_hash::Hash::compute_from(message); - Ok(public_key.verify_signature(&h, &signature).is_ok()) - } - /// Verify an EVM signature /// /// Information: - /// * Expects a standard SECP256K1 signature. - /// The signature is required to be within 0 and curve order. - /// Returns error if it overflows. - /// Expected length is 64 bytes. - /// * Expects a standard uncompressed raw public key. - /// Expected length is 64 bytes. + /// * Expects a SECP256K1 signature in full ETH format. + /// Format: (r, s, v) v will be ignored + /// Length: 65 bytes + /// * Expects a public key in full ETH format. + /// Length: 65 bytes fn verify_evm_signature( &self, - signature: &[u8], - message: &[u8], - public_key: &[u8], + signature_: &[u8], + message_: &[u8], + public_key_: &[u8], ) -> Result { - // format is: secp256k1_sign(Keccak256("\x19Ethereum Signed Message:\n32" + Keccak256(message)); - let message_hash = sha3::Keccak256::digest(&message); - let prefix = b"\x19Ethereum Signed Message:\n32"; - let to_hash = [&prefix[..], message_hash.as_slice()].concat(); - let full_hash = sha3::Keccak256::digest(&to_hash); - - let m = libsecp256k1::Message::parse_slice(&full_hash)?; - let s = libsecp256k1::Signature::parse_standard_slice(signature)?; - let k = libsecp256k1::PublicKey::parse_slice( - public_key, - Some(libsecp256k1::PublicKeyFormat::Raw), + // parse the public key + let public_key = libsecp256k1::PublicKey::parse_slice( + public_key_, + Some(libsecp256k1::PublicKeyFormat::Full), )?; - Ok(libsecp256k1::verify(&m, &s, &k)) + + // build the message + let prefix = format!("\x19Ethereum Signed Message:\n{}", message_.len()); + let to_hash = [prefix.as_bytes(), message_].concat(); + let full_hash = sha3::Keccak256::digest(&to_hash); + let message = libsecp256k1::Message::parse_slice(&full_hash).unwrap(); + + // parse the signature as being (r, s, v) + // r is the R.x value of the signature's R point (32 bytes) + // s is the signature proof for R.x (32 bytes) + // v is a recovery parameter used to ease the signature verification (1 byte) + // we ignore the recovery parameter here + // see test_evm_verify for an example of its usage + let signature = libsecp256k1::Signature::parse_standard_slice(&signature_[..64]).unwrap(); + + // verify the signature + Ok(libsecp256k1::verify(&message, &signature, &public_key)) } /// Keccak256 hash function From 5292982cdc564d52c999767b7199b3dbe8f8a8e1 Mon Sep 17 00:00:00 2001 From: Thomas Plisson Date: Fri, 30 Jun 2023 11:27:36 +0200 Subject: [PATCH 09/71] Remove unused native conversion functions --- Cargo.lock | 1 - massa-signature/Cargo.toml | 1 - massa-signature/src/signature_impl.rs | 33 --------------------------- 3 files changed, 35 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 771da3d6756..db1f52ce839 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3230,7 +3230,6 @@ dependencies = [ "bs58", "displaydoc", "ed25519-dalek", - "massa-proto-rs", "massa_hash", "massa_serialization", "nom", diff --git a/massa-signature/Cargo.toml b/massa-signature/Cargo.toml index 6217ee8827a..5a6ea1684bb 100644 --- a/massa-signature/Cargo.toml +++ b/massa-signature/Cargo.toml @@ -16,7 +16,6 @@ nom = "=7.1" rand = "0.7" # TODO tag transition crate with a version number transition = { git = "https://github.com/massalabs/transition.git", rev = "93fa3bf82f9f5ff421c78536879b7fd1b948ca75" } -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"] } # custom modules massa_hash = { path = "../massa-hash" } diff --git a/massa-signature/src/signature_impl.rs b/massa-signature/src/signature_impl.rs index ed30f55cb07..748e55d33b7 100644 --- a/massa-signature/src/signature_impl.rs +++ b/massa-signature/src/signature_impl.rs @@ -5,7 +5,6 @@ use crate::error::MassaSignatureError; use ed25519_dalek::{Signer, Verifier}; use massa_hash::Hash; -use massa_proto_rs::massa::model::v1::{NativePubKey, NativeSig}; use massa_serialization::{ DeserializeError, Deserializer, Serializer, U64VarIntDeserializer, U64VarIntSerializer, }; @@ -648,22 +647,6 @@ impl PublicKey { ))), } } - - /// Create a massa Public Key from a proto NativePubKey - pub fn from_native_public_key(pubkey: &NativePubKey) -> Result { - match pubkey.version { - ::VERSION => Ok(PublicKeyVariant!["0"]( - ::from_bytes(&pubkey.content)?, - )), - ::VERSION => Ok(PublicKeyVariant!["1"]( - ::from_bytes(&pubkey.content)?, - )), - _ => Err(MassaSignatureError::InvalidVersionError(format!( - "Unknown PublicKey version: {}", - pubkey.version - ))), - } - } } #[transition::impl_version(versions("0", "1"))] @@ -1037,22 +1020,6 @@ impl Signature { ))), } } - - /// Create a massa Signature from a proto NativeSig - pub fn from_native_sig(sig: &NativeSig) -> Result { - match sig.version { - ::VERSION => Ok(SignatureVariant!["0"]( - ::from_bytes(&sig.content)?, - )), - ::VERSION => Ok(SignatureVariant!["1"]( - ::from_bytes(&sig.content)?, - )), - _ => Err(MassaSignatureError::InvalidVersionError(format!( - "Unknown signature version: {}", - sig.version - ))), - } - } } #[transition::impl_version(versions("0", "1"))] From 215d8efd99d6c7f6588281c4764dd3d5d3526968 Mon Sep 17 00:00:00 2001 From: Thomas Plisson Date: Fri, 30 Jun 2023 12:57:57 +0200 Subject: [PATCH 10/71] initial --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 9bd89250a06..38553833bb4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -47,7 +47,7 @@ resolver = "2" # From https://doc.rust-lang.org/cargo/reference/profiles.html#overrides [profile.dev.package."*"] -opt-level = 3 # Speed-up the CI +opt-level = 3 # Speed-up the # # Features # From 40bebe8730176fac915c4e5ae659482181388cc9 Mon Sep 17 00:00:00 2001 From: Thomas Plisson Date: Fri, 30 Jun 2023 13:09:50 +0200 Subject: [PATCH 11/71] Update packages and fix errors --- Cargo.lock | 6 ++++-- massa-async-pool/Cargo.toml | 2 +- massa-client/Cargo.toml | 2 +- massa-execution-exports/Cargo.toml | 4 ++-- massa-execution-worker/Cargo.toml | 2 +- massa-final-state/Cargo.toml | 2 +- massa-grpc/Cargo.toml | 2 +- massa-grpc/src/api.rs | 2 ++ massa-ledger-exports/Cargo.toml | 2 +- massa-models/Cargo.toml | 2 +- massa-module-cache/Cargo.toml | 2 +- massa-sdk/Cargo.toml | 2 +- massa-versioning/Cargo.toml | 2 +- 13 files changed, 18 insertions(+), 14 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ecaecd9c7c0..10adeca9783 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2514,7 +2514,7 @@ dependencies = [ [[package]] name = "massa-proto-rs" version = "0.1.0" -source = "git+https://github.com/massalabs/massa-proto-rs?branch=feature/Improve_ABI_types_in_wasmv1#a1afa1ece8933cd717ff14f2562162b10873aa46" +source = "git+https://github.com/massalabs/massa-proto-rs?rev=8873f96#8873f969375834904044965cb1b0bcb9ec0fafdd" dependencies = [ "glob", "prost", @@ -2527,7 +2527,7 @@ dependencies = [ [[package]] name = "massa-sc-runtime" version = "0.10.0" -source = "git+https://github.com/massalabs/massa-sc-runtime?branch=feature/Improve_ABI_types_in_wasmv1_thomas2#4cb3c7871bef77113cc3ed99d08eab6e77f8114f" +source = "git+https://github.com/massalabs/massa-sc-runtime?branch=feature/Improve_ABI_types_in_wasmv1#42d0fafef13572a4ecd5f61483e88afbbdeef940" dependencies = [ "anyhow", "as-ffi-bindings", @@ -2548,6 +2548,7 @@ dependencies = [ "serde_json", "serial_test 1.0.0", "sha2 0.10.6", + "sha3", "thiserror", "tracing", "wasmer", @@ -2555,6 +2556,7 @@ dependencies = [ "wasmer-compiler-singlepass", "wasmer-middlewares", "wasmer-types", + "which", ] [[package]] diff --git a/massa-async-pool/Cargo.toml b/massa-async-pool/Cargo.toml index aa420256f3f..9face209379 100644 --- a/massa-async-pool/Cargo.toml +++ b/massa-async-pool/Cargo.toml @@ -19,7 +19,7 @@ massa_serialization = { path = "../massa-serialization" } massa_signature = { path = "../massa-signature" } massa_db_exports = { path = "../massa-db-exports" } massa_time = { path = "../massa-time" } -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"] } +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "8873f96", features = ["tonic"]} [dev-dependencies] tempfile = "3.3" diff --git a/massa-client/Cargo.toml b/massa-client/Cargo.toml index 563fa605705..2f60e6a70da 100644 --- a/massa-client/Cargo.toml +++ b/massa-client/Cargo.toml @@ -28,7 +28,7 @@ massa_signature = { path = "../massa-signature" } massa_time = { path = "../massa-time" } massa_sdk = { path = "../massa-sdk" } massa_wallet = { path = "../massa-wallet" } -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"] } +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "8873f96", features = ["tonic"]} [dev-dependencies] toml_edit = "0.19" diff --git a/massa-execution-exports/Cargo.toml b/massa-execution-exports/Cargo.toml index da7284d9208..0e9f941ce33 100644 --- a/massa-execution-exports/Cargo.toml +++ b/massa-execution-exports/Cargo.toml @@ -18,7 +18,7 @@ tokio = { version = "1.23", features = ["sync"] } mockall = { version = "0.11.4", optional = true} # custom modules -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"] } +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "8873f96", features = ["tonic"]} massa_hash = { path = "../massa-hash" } massa_models = { path = "../massa-models" } massa_time = { path = "../massa-time" } @@ -27,7 +27,7 @@ massa_final_state = { path = "../massa-final-state" } massa_ledger_exports = { path = "../massa-ledger-exports", optional = true } massa_module_cache = { path = "../massa-module-cache" } massa_versioning = { path = "../massa-versioning" } -massa-sc-runtime = { git = "https://github.com/massalabs/massa-sc-runtime", branch = "feature/Improve_ABI_types_in_wasmv1_thomas2" } +massa-sc-runtime = { git = "https://github.com/massalabs/massa-sc-runtime", branch = "feature/Improve_ABI_types_in_wasmv1" } [dev-dependencies] mockall = "0.11.4" diff --git a/massa-execution-worker/Cargo.toml b/massa-execution-worker/Cargo.toml index 9e7b45f3487..ea2eae9364c 100644 --- a/massa-execution-worker/Cargo.toml +++ b/massa-execution-worker/Cargo.toml @@ -30,7 +30,7 @@ massa_execution_exports = { path = "../massa-execution-exports" } massa_models = { path = "../massa-models" } massa_storage = { path = "../massa-storage" } massa_hash = { path = "../massa-hash" } -massa-sc-runtime = { git = "https://github.com/massalabs/massa-sc-runtime", branch = "feature/Improve_ABI_types_in_wasmv1_thomas2" } +massa-sc-runtime = { git = "https://github.com/massalabs/massa-sc-runtime", branch = "feature/Improve_ABI_types_in_wasmv1" } massa_metrics = { path = "../massa-metrics" } massa_module_cache = { path = "../massa-module-cache" } massa_signature = { path = "../massa-signature" } diff --git a/massa-final-state/Cargo.toml b/massa-final-state/Cargo.toml index 43c2084a33a..d8acbc931e7 100644 --- a/massa-final-state/Cargo.toml +++ b/massa-final-state/Cargo.toml @@ -21,7 +21,7 @@ massa_async_pool = { path = "../massa-async-pool" } massa_serialization = { path = "../massa-serialization" } massa_pos_exports = { path = "../massa-pos-exports" } massa_db_exports = { path = "../massa-db-exports" } -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"] } +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "8873f96", features = ["tonic"]} massa_versioning = { path = "../massa-versioning" } massa_time = { path = "../massa-time" } diff --git a/massa-grpc/Cargo.toml b/massa-grpc/Cargo.toml index fbcf499e297..0d1636409b5 100644 --- a/massa-grpc/Cargo.toml +++ b/massa-grpc/Cargo.toml @@ -10,7 +10,7 @@ homepage = "https://massa.net" documentation = "https://docs.massa.net/" [dependencies] -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"] } +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "8873f96", features = ["tonic"]} displaydoc = "0.2" thiserror = "1.0" tonic = { version = "0.9.2", features = ["gzip", "tls"] } diff --git a/massa-grpc/src/api.rs b/massa-grpc/src/api.rs index 105014adae9..02b270eb8b0 100644 --- a/massa-grpc/src/api.rs +++ b/massa-grpc/src/api.rs @@ -275,6 +275,8 @@ pub(crate) fn get_largest_stakers( // Create the context for the response. let context = Some(grpc_api::LargestStakersContext { slot: Some(cur_slot.into()), + // IMPORTANT TODO: tmp value because testnet_24 and massa proto latest are not synced atm + in_downtime: false, }); // Get the list of stakers, filtered by the specified minimum and maximum roll counts. diff --git a/massa-ledger-exports/Cargo.toml b/massa-ledger-exports/Cargo.toml index 21d6f4d6ffc..1b4672fcc05 100644 --- a/massa-ledger-exports/Cargo.toml +++ b/massa-ledger-exports/Cargo.toml @@ -14,7 +14,7 @@ nom = "=7.1" num_enum = "0.5.10" # custom modules -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"] } +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "8873f96", features = ["tonic"]} massa_hash = { path = "../massa-hash" } massa_models = { path = "../massa-models" } massa_serialization = { path = "../massa-serialization" } diff --git a/massa-models/Cargo.toml b/massa-models/Cargo.toml index 6263ff52901..d56422512db 100644 --- a/massa-models/Cargo.toml +++ b/massa-models/Cargo.toml @@ -18,7 +18,7 @@ config = "0.13" bs58 = { version = "=0.4", features = ["check"] } bitvec = { version = "=1.0", features = ["serde"] } nom = "=7.1" -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"] } +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "8873f96", features = ["tonic"]} # custom modules massa_hash = { path = "../massa-hash" } diff --git a/massa-module-cache/Cargo.toml b/massa-module-cache/Cargo.toml index 6eeb3e7ce78..7ade4032dd6 100644 --- a/massa-module-cache/Cargo.toml +++ b/massa-module-cache/Cargo.toml @@ -18,7 +18,7 @@ rocksdb = "0.20" massa_hash = { path = "../massa-hash" } massa_models = { path = "../massa-models" } massa_serialization = { path = "../massa-serialization" } -massa-sc-runtime = { git = "https://github.com/massalabs/massa-sc-runtime", branch = "feature/Improve_ABI_types_in_wasmv1_thomas2", features = [ +massa-sc-runtime = { git = "https://github.com/massalabs/massa-sc-runtime", branch = "feature/Improve_ABI_types_in_wasmv1", features = [ "testing", ] } diff --git a/massa-sdk/Cargo.toml b/massa-sdk/Cargo.toml index 474fcd4b22e..6e14cd6becb 100644 --- a/massa-sdk/Cargo.toml +++ b/massa-sdk/Cargo.toml @@ -14,4 +14,4 @@ tracing = {version = "0.1", features = ["log"]} massa_api_exports = { path = "../massa-api-exports" } massa_models = { path = "../massa-models" } massa_time = { path = "../massa-time" } -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"] } +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "8873f96", features = ["tonic"]} diff --git a/massa-versioning/Cargo.toml b/massa-versioning/Cargo.toml index 3994f3353b6..a41ebf96d69 100644 --- a/massa-versioning/Cargo.toml +++ b/massa-versioning/Cargo.toml @@ -19,7 +19,7 @@ massa_models = { path = "../massa-models" } massa_serialization = { path = "../massa-serialization" } massa_hash = { path = "../massa-hash" } massa_signature = { path = "../massa-signature" } -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"] } +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "8873f96", features = ["tonic"]} massa_db_exports = { path = "../massa-db-exports" } [dev-dependencies] From 6502b59be60bc22017521261a935da50209feb6d Mon Sep 17 00:00:00 2001 From: Thomas Plisson Date: Fri, 30 Jun 2023 13:10:37 +0200 Subject: [PATCH 12/71] minor correction --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 38553833bb4..9bd89250a06 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -47,7 +47,7 @@ resolver = "2" # From https://doc.rust-lang.org/cargo/reference/profiles.html#overrides [profile.dev.package."*"] -opt-level = 3 # Speed-up the +opt-level = 3 # Speed-up the CI # # Features # From d6339405c716845f24d7c1f91de4d5e51adae4ab Mon Sep 17 00:00:00 2001 From: Leo-Besancon Date: Fri, 30 Jun 2023 13:47:32 +0200 Subject: [PATCH 13/71] cargo clippy fix --- massa-execution-worker/src/interface_impl.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index ab30b10c70c..727594939bc 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -626,7 +626,7 @@ impl Interface for InterfaceImpl { // build the message let prefix = format!("\x19Ethereum Signed Message:\n{}", message_.len()); let to_hash = [prefix.as_bytes(), message_].concat(); - let full_hash = sha3::Keccak256::digest(&to_hash); + let full_hash = sha3::Keccak256::digest(to_hash); let message = libsecp256k1::Message::parse_slice(&full_hash).unwrap(); // parse the signature as being (r, s, v) From b967703c40bad7b400889c524417141a8001a070 Mon Sep 17 00:00:00 2001 From: Leo-Besancon Date: Fri, 30 Jun 2023 14:21:41 +0200 Subject: [PATCH 14/71] Change the interface to sync with ABI changes (#4170) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Update interface impl for leo's changes Depends on feature/Improve_ABI_types_in_wasmv1 branch for massa-sc-runtime and massa-proto-rs Signed-off-by: Jean-François Morcillo Update target branch Implemented the interface for my new ABIs Added prefix to get_op_keys_wasmv1 Update target branch Update in_downtime context value Add some unit tests Use helper function to get address Added comments and deprecation notice Target new runtime rev to have consistant proto-rs targets Simplified a bit the prefix logic Based on review comments * Fix after rebase --------- Co-authored-by: Jean-François Morcillo --- Cargo.lock | 1 + massa-execution-worker/Cargo.toml | 1 + massa-execution-worker/src/interface_impl.rs | 452 ++++++++++++++++--- massa-grpc/src/api.rs | 9 +- 4 files changed, 404 insertions(+), 59 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 10adeca9783..feed3acc3b1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2819,6 +2819,7 @@ dependencies = [ "criterion", "hex-literal", "libsecp256k1", + "massa-proto-rs", "massa-sc-runtime", "massa_async_pool", "massa_channel", diff --git a/massa-execution-worker/Cargo.toml b/massa-execution-worker/Cargo.toml index ea2eae9364c..ffc65f50ea8 100644 --- a/massa-execution-worker/Cargo.toml +++ b/massa-execution-worker/Cargo.toml @@ -31,6 +31,7 @@ massa_models = { path = "../massa-models" } massa_storage = { path = "../massa-storage" } massa_hash = { path = "../massa-hash" } massa-sc-runtime = { git = "https://github.com/massalabs/massa-sc-runtime", branch = "feature/Improve_ABI_types_in_wasmv1" } +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "8873f96"} massa_metrics = { path = "../massa-metrics" } massa_module_cache = { path = "../massa-module-cache" } massa_signature = { path = "../massa-signature" } diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index 727594939bc..c9b4f37fc89 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -15,8 +15,10 @@ use massa_models::config::MAX_DATASTORE_KEY_LENGTH; use massa_models::{ address::Address, amount::Amount, slot::Slot, timeslots::get_block_slot_timestamp, }; +use massa_proto_rs::massa::model::v1::NativeAmount; use massa_sc_runtime::RuntimeModule; use massa_sc_runtime::{Interface, InterfaceClone}; + use parking_lot::Mutex; use rand::Rng; use sha2::{Digest, Sha256}; @@ -140,6 +142,18 @@ impl InterfaceClone for InterfaceImpl { } } +/// Helper function to get the address from the option given as argument to some ABIs +/// Fallback to the current context address if not provided. +fn get_address_from_opt_or_context( + context: &ExecutionContext, + option_address_string: Option, +) -> Result
{ + match option_address_string { + Some(address_string) => Address::from_str(&address_string).map_err(|e| e.into()), + None => context.get_current_address().map_err(|e| e.into()), + } +} + /// Implementation of the Interface trait providing functions for massa-sc-runtime to call /// in order to interact with the execution context during bytecode execution. /// See the massa-sc-runtime crate for a functional description of the trait and its methods. @@ -244,6 +258,8 @@ impl Interface for InterfaceImpl { /// # Returns /// The raw representation (no decimal factor) of the balance of the address, /// or zero if the address is not found in the ledger. + /// + /// [DeprecatedByNewRuntime] Replaced by `get_balance_wasmv1` fn get_balance(&self) -> Result { let context = context_guard!(self); let address = context.get_current_address()?; @@ -258,6 +274,8 @@ impl Interface for InterfaceImpl { /// # Returns /// The raw representation (no decimal factor) of the balance of the address, /// or zero if the address is not found in the ledger. + /// + /// [DeprecatedByNewRuntime] Replaced by `get_balance_wasmv1` fn get_balance_for(&self, address: &str) -> Result { let address = massa_models::address::Address::from_str(address)?; Ok(context_guard!(self) @@ -266,6 +284,33 @@ impl Interface for InterfaceImpl { .to_raw()) } + /// Gets the balance of arbitrary address passed as argument, or the balance of the current address if no argument is passed. + /// + /// # Arguments + /// * address: string representation of the address for which to get the balance + /// + /// # Returns + /// The raw representation (no decimal factor) of the balance of the address, + /// or zero if the address is not found in the ledger. + fn get_balance_wasmv1( + &self, + address: Option, + ) -> Result { + let context = context_guard!(self); + let address = get_address_from_opt_or_context(&context, address)?; + + let amount = context.get_balance(&address).unwrap_or_default(); + let (mantissa, scale) = self + .amount_to_mantissa_scale(amount.to_raw()) + .unwrap_or_default(); + let native_amount = NativeAmount { + mandatory_mantissa: Some(mantissa), + mandatory_scale: Some(scale), + }; + + Ok(native_amount) + } + /// Creates a new ledger entry with the initial bytecode given as argument. /// A new unique address is generated for that entry and returned. /// @@ -285,6 +330,8 @@ impl Interface for InterfaceImpl { /// /// # Returns /// A list of keys (keys are byte arrays) + /// + /// [DeprecatedByNewRuntime] Replaced by `get_keys_wasmv1` fn get_keys(&self, prefix_opt: Option<&[u8]>) -> Result>> { let context = context_guard!(self); let addr = context.get_current_address()?; @@ -302,6 +349,8 @@ impl Interface for InterfaceImpl { /// /// # Returns /// A list of keys (keys are byte arrays) + /// + /// [DeprecatedByNewRuntime] Replaced by `get_keys_wasmv1` fn get_keys_for(&self, address: &str, prefix_opt: Option<&[u8]>) -> Result>> { let addr = &Address::from_str(address)?; let context = context_guard!(self); @@ -315,6 +364,42 @@ impl Interface for InterfaceImpl { } } + /// Get the datastore keys (aka entries) for a given address, or the current address if none is provided + /// + /// # Returns + /// A list of keys (keys are byte arrays) + fn get_keys_wasmv1(&self, prefix: &[u8], address: Option) -> Result>> { + let context = context_guard!(self); + let address = get_address_from_opt_or_context(&context, address)?; + + match (context.get_keys(&address), prefix) { + (Some(mut value), prefix) if !prefix.is_empty() => { + value.retain(|key| key.iter().zip(prefix.iter()).all(|(k, p)| k == p)); + Ok(value) + } + (Some(value), _) => Ok(value), + _ => bail!("data entry not found"), + } + } + + /// Gets a datastore value by key for the current address (top of the call stack). + /// + /// # Arguments + /// * key: string key of the datastore entry to retrieve + /// + /// # Returns + /// The datastore value matching the provided key, if found, otherwise an error. + /// + /// [DeprecatedByNewRuntime] Replaced by `raw_get_data_wasmv1` + fn raw_get_data(&self, key: &[u8]) -> Result> { + let context = context_guard!(self); + let addr = context.get_current_address()?; + match context.get_data_entry(&addr, key) { + Some(data) => Ok(data), + _ => bail!("data entry not found"), + } + } + /// Gets a datastore value by key for a given address. /// /// # Arguments @@ -323,6 +408,8 @@ impl Interface for InterfaceImpl { /// /// # Returns /// The datastore value matching the provided key, if found, otherwise an error. + /// + /// [DeprecatedByNewRuntime] Replaced by `raw_get_data_wasmv1` fn raw_get_data_for(&self, address: &str, key: &[u8]) -> Result> { let addr = &massa_models::address::Address::from_str(address)?; let context = context_guard!(self); @@ -332,114 +419,156 @@ impl Interface for InterfaceImpl { } } - /// Sets a datastore entry for a given address. + /// Gets a datastore value by key for a given address, or the current address if none is provided. + /// + /// # Arguments + /// * address: string representation of the address + /// * key: string key of the datastore entry to retrieve + /// + /// # Returns + /// The datastore value matching the provided key, if found, otherwise an error. + fn raw_get_data_wasmv1(&self, key: &[u8], address: Option) -> Result> { + let context = context_guard!(self); + let address = get_address_from_opt_or_context(&context, address)?; + + match context.get_data_entry(&address, key) { + Some(value) => Ok(value), + _ => bail!("data entry not found"), + } + } + + /// Sets a datastore entry for the current address (top of the call stack). /// Fails if the address does not exist. - /// Creates the entry if it does not exist. + /// Creates the entry if does not exist. /// /// # Arguments /// * address: string representation of the address /// * key: string key of the datastore entry to set /// * value: new value to set - fn raw_set_data_for(&self, address: &str, key: &[u8], value: &[u8]) -> Result<()> { - let addr = massa_models::address::Address::from_str(address)?; + /// + /// [DeprecatedByNewRuntime] Replaced by `raw_set_data_wasmv1` + fn raw_set_data(&self, key: &[u8], value: &[u8]) -> Result<()> { let mut context = context_guard!(self); + let addr = context.get_current_address()?; context.set_data_entry(&addr, key.to_vec(), value.to_vec())?; Ok(()) } - /// Appends a value to a datastore entry for a given address. - /// Fails if the entry or address does not exist. + /// Sets a datastore entry for a given address. + /// Fails if the address does not exist. + /// Creates the entry if it does not exist. /// /// # Arguments /// * address: string representation of the address - /// * key: string key of the datastore entry - /// * value: value to append - fn raw_append_data_for(&self, address: &str, key: &[u8], value: &[u8]) -> Result<()> { + /// * key: string key of the datastore entry to set + /// * value: new value to set + /// + /// [DeprecatedByNewRuntime] Replaced by `raw_set_data_wasmv1` + fn raw_set_data_for(&self, address: &str, key: &[u8], value: &[u8]) -> Result<()> { let addr = massa_models::address::Address::from_str(address)?; - context_guard!(self).append_data_entry(&addr, key.to_vec(), value.to_vec())?; + let mut context = context_guard!(self); + context.set_data_entry(&addr, key.to_vec(), value.to_vec())?; Ok(()) } - /// Deletes a datastore entry by key for a given address. + fn raw_set_data_wasmv1(&self, key: &[u8], value: &[u8], address: Option) -> Result<()> { + let mut context = context_guard!(self); + let address = get_address_from_opt_or_context(&context, address)?; + + context.set_data_entry(&address, key.to_vec(), value.to_vec())?; + Ok(()) + } + + /// Appends data to a datastore entry for the current address (top of the call stack). /// Fails if the address or entry does not exist. /// /// # Arguments /// * address: string representation of the address - /// * key: string key of the datastore entry to delete - fn raw_delete_data_for(&self, address: &str, key: &[u8]) -> Result<()> { - let addr = &massa_models::address::Address::from_str(address)?; - context_guard!(self).delete_data_entry(addr, key)?; + /// * key: string key of the datastore entry + /// * value: value to append + /// + /// [DeprecatedByNewRuntime] Replaced by `raw_append_data_wasmv1` + fn raw_append_data(&self, key: &[u8], value: &[u8]) -> Result<()> { + let mut context = context_guard!(self); + let addr = context.get_current_address()?; + context.append_data_entry(&addr, key.to_vec(), value.to_vec())?; Ok(()) } - /// Checks if a datastore entry exists for a given address. + /// Appends a value to a datastore entry for a given address. + /// Fails if the entry or address does not exist. /// /// # Arguments /// * address: string representation of the address - /// * key: string key of the datastore entry to retrieve + /// * key: string key of the datastore entry + /// * value: value to append /// - /// # Returns - /// true if the address exists and has the entry matching the provided key in its datastore, otherwise false - fn has_data_for(&self, address: &str, key: &[u8]) -> Result { + /// [DeprecatedByNewRuntime] Replaced by `raw_append_data_wasmv1` + fn raw_append_data_for(&self, address: &str, key: &[u8], value: &[u8]) -> Result<()> { let addr = massa_models::address::Address::from_str(address)?; - let context = context_guard!(self); - Ok(context.has_data_entry(&addr, key)) + context_guard!(self).append_data_entry(&addr, key.to_vec(), value.to_vec())?; + Ok(()) } - /// Gets a datastore value by key for the current address (top of the call stack). + /// Appends a value to a datastore entry for a given address, or the current address if none is provided + /// Fails if the entry or address does not exist. /// /// # Arguments - /// * key: string key of the datastore entry to retrieve - /// - /// # Returns - /// The datastore value matching the provided key, if found, otherwise an error. - fn raw_get_data(&self, key: &[u8]) -> Result> { - let context = context_guard!(self); - let addr = context.get_current_address()?; - match context.get_data_entry(&addr, key) { - Some(data) => Ok(data), - _ => bail!("data entry not found"), - } + /// * address: string representation of the address + /// * key: string key of the datastore entry + /// * value: value to append + fn raw_append_data_wasmv1( + &self, + key: &[u8], + value: &[u8], + address: Option, + ) -> Result<()> { + let mut context = context_guard!(self); + let address = get_address_from_opt_or_context(&context, address)?; + + context.append_data_entry(&address, key.to_vec(), value.to_vec())?; + Ok(()) } - /// Sets a datastore entry for the current address (top of the call stack). - /// Fails if the address does not exist. - /// Creates the entry if does not exist. + /// Deletes a datastore entry by key for the current address (top of the call stack). + /// Fails if the address or entry does not exist. /// /// # Arguments - /// * address: string representation of the address - /// * key: string key of the datastore entry to set - /// * value: new value to set - fn raw_set_data(&self, key: &[u8], value: &[u8]) -> Result<()> { + /// * key: string key of the datastore entry to delete + /// + /// [DeprecatedByNewRuntime] Replaced by `raw_delete_data_wasmv1` + fn raw_delete_data(&self, key: &[u8]) -> Result<()> { let mut context = context_guard!(self); let addr = context.get_current_address()?; - context.set_data_entry(&addr, key.to_vec(), value.to_vec())?; + context.delete_data_entry(&addr, key)?; Ok(()) } - /// Appends data to a datastore entry for the current address (top of the call stack). + /// Deletes a datastore entry by key for a given address. /// Fails if the address or entry does not exist. /// /// # Arguments /// * address: string representation of the address - /// * key: string key of the datastore entry - /// * value: value to append - fn raw_append_data(&self, key: &[u8], value: &[u8]) -> Result<()> { - let mut context = context_guard!(self); - let addr = context.get_current_address()?; - context.append_data_entry(&addr, key.to_vec(), value.to_vec())?; + /// * key: string key of the datastore entry to delete + /// + /// [DeprecatedByNewRuntime] Replaced by `raw_delete_data_wasmv1` + fn raw_delete_data_for(&self, address: &str, key: &[u8]) -> Result<()> { + let addr = &massa_models::address::Address::from_str(address)?; + context_guard!(self).delete_data_entry(addr, key)?; Ok(()) } - /// Deletes a datastore entry by key for the current address (top of the call stack). + /// Deletes a datastore entry by key for a given address, or the current address if none is provided. /// Fails if the address or entry does not exist. /// /// # Arguments + /// * address: string representation of the address /// * key: string key of the datastore entry to delete - fn raw_delete_data(&self, key: &[u8]) -> Result<()> { + fn raw_delete_data_wasmv1(&self, key: &[u8], address: Option) -> Result<()> { let mut context = context_guard!(self); - let addr = context.get_current_address()?; - context.delete_data_entry(&addr, key)?; + let address = get_address_from_opt_or_context(&context, address)?; + + context.delete_data_entry(&address, key)?; Ok(()) } @@ -450,12 +579,45 @@ impl Interface for InterfaceImpl { /// /// # Returns /// true if the address exists and has the entry matching the provided key in its datastore, otherwise false + /// + /// [DeprecatedByNewRuntime] Replaced by `has_data_wasmv1` fn has_data(&self, key: &[u8]) -> Result { let context = context_guard!(self); let addr = context.get_current_address()?; Ok(context.has_data_entry(&addr, key)) } + /// Checks if a datastore entry exists for a given address. + /// + /// # Arguments + /// * address: string representation of the address + /// * key: string key of the datastore entry to retrieve + /// + /// # Returns + /// true if the address exists and has the entry matching the provided key in its datastore, otherwise false + /// + /// [DeprecatedByNewRuntime] Replaced by `has_data_wasmv1` + fn has_data_for(&self, address: &str, key: &[u8]) -> Result { + let addr = massa_models::address::Address::from_str(address)?; + let context = context_guard!(self); + Ok(context.has_data_entry(&addr, key)) + } + + /// Checks if a datastore entry exists for a given address, or the current address if none is provided. + /// + /// # Arguments + /// * address: string representation of the address + /// * key: string key of the datastore entry to retrieve + /// + /// # Returns + /// true if the address exists and has the entry matching the provided key in its datastore, otherwise false + fn has_data_wasmv1(&self, key: &[u8], address: Option) -> Result { + let context = context_guard!(self); + let address = get_address_from_opt_or_context(&context, address)?; + + Ok(context.has_data_entry(&address, key)) + } + /// Check whether or not the caller has write access in the current context /// /// # Returns @@ -477,6 +639,8 @@ impl Interface for InterfaceImpl { } /// Returns bytecode of the current address + /// + /// [DeprecatedByNewRuntime] Replaced by `raw_get_bytecode_wasmv1` fn raw_get_bytecode(&self) -> Result> { let context = context_guard!(self); let address = context.get_current_address()?; @@ -487,6 +651,8 @@ impl Interface for InterfaceImpl { } /// Returns bytecode of the target address + /// + /// [DeprecatedByNewRuntime] Replaced by `raw_get_bytecode_wasmv1` fn raw_get_bytecode_for(&self, address: &str) -> Result> { let context = context_guard!(self); let address = Address::from_str(address)?; @@ -496,11 +662,24 @@ impl Interface for InterfaceImpl { } } + /// Returns bytecode of the target address, or the current address if not provided + fn raw_get_bytecode_wasmv1(&self, address: Option) -> Result> { + let context = context_guard!(self); + let address = get_address_from_opt_or_context(&context, address)?; + + match context.get_bytecode(&address) { + Some(bytecode) => Ok(bytecode.0), + _ => bail!("bytecode not found"), + } + } + /// Get the operation datastore keys (aka entries). /// Note that the datastore is only accessible to the initial caller level. /// /// # Returns /// A list of keys (keys are byte arrays) + /// + /// [DeprecatedByNewRuntime] Replaced by `get_op_keys_wasmv1` fn get_op_keys(&self) -> Result>> { let context = context_guard!(self); let stack = context.stack.last().ok_or_else(|| anyhow!("No stack"))?; @@ -512,6 +691,48 @@ impl Interface for InterfaceImpl { Ok(keys) } + /// Get the operation datastore keys (aka entries). + /// Note that the datastore is only accessible to the initial caller level. + /// + /// # Returns + /// A list of keys (keys are byte arrays) that match the given prefix + fn get_op_keys_wasmv1(&self, prefix: &[u8]) -> Result>> { + // compute prefix range + let prefix_range = if !prefix.is_empty() { + // compute end of prefix range + let mut prefix_end = prefix.to_vec(); + while let Some(255) = prefix_end.last() { + prefix_end.pop(); + } + if let Some(v) = prefix_end.last_mut() { + *v += 1; + } + ( + std::ops::Bound::Included(prefix.to_vec()), + if prefix_end.is_empty() { + std::ops::Bound::Unbounded + } else { + std::ops::Bound::Excluded(prefix_end) + }, + ) + } else { + (std::ops::Bound::Unbounded, std::ops::Bound::Unbounded) + }; + let range_ref = (prefix_range.0.as_ref(), prefix_range.1.as_ref()); + + let context = context_guard!(self); + let stack = context.stack.last().ok_or_else(|| anyhow!("No stack"))?; + let datastore = stack + .operation_datastore + .as_ref() + .ok_or_else(|| anyhow!("No datastore in stack"))?; + let keys = datastore + .range::, _>(range_ref) + .map(|(k, _v)| k.clone()) + .collect(); + Ok(keys) + } + /// Checks if an operation datastore entry exists in the operation datastore. /// Note that the datastore is only accessible to the initial caller level. /// @@ -651,6 +872,8 @@ impl Interface for InterfaceImpl { /// # Arguments /// * `to_address`: string representation of the address to which the coins are sent /// * `raw_amount`: raw representation (no decimal factor) of the amount of coins to transfer + /// + /// [DeprecatedByNewRuntime] Replaced by `transfer_coins_wasmv1` fn transfer_coins(&self, to_address: &str, raw_amount: u64) -> Result<()> { let to_address = Address::from_str(to_address)?; let amount = Amount::from_raw(raw_amount); @@ -666,6 +889,8 @@ impl Interface for InterfaceImpl { /// * `from_address`: string representation of the address that is sending the coins /// * `to_address`: string representation of the address to which the coins are sent /// * `raw_amount`: raw representation (no decimal factor) of the amount of coins to transfer + /// + /// [DeprecatedByNewRuntime] Replaced by `transfer_coins_wasmv1` fn transfer_coins_for( &self, from_address: &str, @@ -680,6 +905,36 @@ impl Interface for InterfaceImpl { Ok(()) } + /// Transfer coins from a given address (or the current address if not specified) towards a target address. + /// + /// # Arguments + /// * `to_address`: string representation of the address to which the coins are sent + /// * `raw_amount`: raw representation (no decimal factor) of the amount of coins to transfer + /// * `from_address`: string representation of the address that is sending the coins + fn transfer_coins_wasmv1( + &self, + to_address: String, + raw_amount: NativeAmount, + from_address: Option, + ) -> Result<()> { + let to_address = Address::from_str(&to_address)?; + let amount = Amount::from_mantissa_scale( + raw_amount + .mandatory_mantissa + .ok_or(anyhow!("No mantissa provided"))?, + raw_amount + .mandatory_scale + .ok_or(anyhow!("No scale provided"))?, + )?; + let mut context = context_guard!(self); + let from_address = match from_address { + Some(from_address) => Address::from_str(&from_address)?, + None => context.get_current_address()?, + }; + context.transfer_coins(Some(from_address), Some(to_address), amount, true)?; + Ok(()) + } + /// Returns the list of owned addresses (top of the call stack). /// Those addresses are the ones the current execution context has write access to, /// typically it includes the current address itself, @@ -838,18 +1093,30 @@ impl Interface for InterfaceImpl { } /// Returns the period of the current execution slot + /// + /// [DeprecatedByNewRuntime] Replaced by `get_current_slot` fn get_current_period(&self) -> Result { let slot = context_guard!(self).slot; Ok(slot.period) } /// Returns the thread of the current execution slot + /// + /// [DeprecatedByNewRuntime] Replaced by `get_current_slot` fn get_current_thread(&self) -> Result { let slot = context_guard!(self).slot; Ok(slot.thread) } + /// Returns the current execution slot + fn get_current_slot(&self) -> Result { + let slot_models = context_guard!(self).slot; + Ok(slot_models.into()) + } + /// Sets the bytecode of the current address + /// + /// [DeprecatedByNewRuntime] Replaced by `raw_set_bytecode_wasmv1` fn raw_set_bytecode(&self, bytecode: &[u8]) -> Result<()> { let mut execution_context = context_guard!(self); let address = execution_context.get_current_address()?; @@ -861,8 +1128,10 @@ impl Interface for InterfaceImpl { /// Sets the bytecode of an arbitrary address. /// Fails if the address does not exist, is an user address, or if the context doesn't have write access rights on it. + /// + /// [DeprecatedByNewRuntime] Replaced by `raw_set_bytecode_wasmv1` fn raw_set_bytecode_for(&self, address: &str, bytecode: &[u8]) -> Result<()> { - let address = massa_models::address::Address::from_str(address)?; + let address: Address = massa_models::address::Address::from_str(address)?; let mut execution_context = context_guard!(self); match execution_context.set_bytecode(&address, Bytecode(bytecode.to_vec())) { Ok(()) => Ok(()), @@ -870,19 +1139,92 @@ impl Interface for InterfaceImpl { } } + /// Sets the bytecode of an arbitrary address, or the current address if not provided. + /// Fails if the address does not exist, is an user address, or if the context doesn't have write access rights on it. + fn raw_set_bytecode_wasmv1(&self, bytecode: &[u8], address: Option) -> Result<()> { + let mut context = context_guard!(self); + let address = get_address_from_opt_or_context(&context, address)?; + + match context.set_bytecode(&address, Bytecode(bytecode.to_vec())) { + Ok(()) => Ok(()), + Err(err) => bail!("couldn't set address {} bytecode: {}", address, err), + } + } + /// Hashes givens byte array with sha256 /// /// # Arguments /// * bytes: byte array to hash /// /// # Returns - /// The vector of bytes representation of the resulting hash + /// The byte array of the resulting hash fn hash_sha256(&self, bytes: &[u8]) -> Result<[u8; 32]> { let mut hasher = Sha256::new(); hasher.update(bytes); let hash = hasher.finalize().into(); Ok(hash) } + + /// Hashes givens byte array with blake3 + /// + /// # Arguments + /// * bytes: byte array to hash + /// + /// # Returns + /// The byte array of the resulting hash + fn blake3_hash(&self, bytes: &[u8]) -> Result<[u8; 32]> { + let hash = massa_hash::Hash::compute_from(bytes); + Ok(hash.into_bytes()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use massa_models::address::Address; + use massa_signature::KeyPair; + + // Tests the get_keys_wasmv1 interface method used by the updated get_keys abi. + #[test] + fn test_get_keys() { + let sender_addr = Address::from_public_key(&KeyPair::generate(0).unwrap().get_public_key()); + let interface = InterfaceImpl::new_default(sender_addr, None); + + interface + .raw_set_data_wasmv1(b"k1", b"v1", Some(sender_addr.to_string())) + .unwrap(); + interface + .raw_set_data_wasmv1(b"k2", b"v2", Some(sender_addr.to_string())) + .unwrap(); + interface + .raw_set_data_wasmv1(b"l3", b"v3", Some(sender_addr.to_string())) + .unwrap(); + + let keys = interface.get_keys_wasmv1(b"k", None).unwrap(); + + assert_eq!(keys.len(), 2); + assert!(keys.contains(b"k1".as_slice())); + assert!(keys.contains(b"k2".as_slice())); + } + + // Tests the get_op_keys_wasmv1 interface method used by the updated get_op_keys abi. + #[test] + fn test_get_op_keys() { + let sender_addr = Address::from_public_key(&KeyPair::generate(0).unwrap().get_public_key()); + + let mut operation_datastore = Datastore::new(); + operation_datastore.insert(b"k1".to_vec(), b"v1".to_vec()); + operation_datastore.insert(b"k2".to_vec(), b"v2".to_vec()); + operation_datastore.insert(b"l3".to_vec(), b"v3".to_vec()); + + let interface = InterfaceImpl::new_default(sender_addr, Some(operation_datastore)); + + let op_keys = interface.get_op_keys_wasmv1(b"k").unwrap(); + + assert_eq!(op_keys.len(), 2); + assert!(op_keys.contains(&b"k1".to_vec())); + assert!(op_keys.contains(&b"k2".to_vec())); + } } #[test] diff --git a/massa-grpc/src/api.rs b/massa-grpc/src/api.rs index 02b270eb8b0..fb295153ac2 100644 --- a/massa-grpc/src/api.rs +++ b/massa-grpc/src/api.rs @@ -258,25 +258,26 @@ pub(crate) fn get_largest_stakers( now, ); - let (cur_cycle, cur_slot) = match latest_block_slot_at_timestamp_result { + let (cur_cycle, cur_slot, in_downtime) = match latest_block_slot_at_timestamp_result { Ok(Some(cur_slot)) if cur_slot.period <= grpc.grpc_config.last_start_period => ( Slot::new(grpc.grpc_config.last_start_period, 0) .get_cycle(grpc.grpc_config.periods_per_cycle), cur_slot, + true, ), Ok(Some(cur_slot)) => ( cur_slot.get_cycle(grpc.grpc_config.periods_per_cycle), cur_slot, + false, ), - Ok(None) => (0, Slot::new(0, 0)), + Ok(None) => (0, Slot::new(0, 0), false), Err(e) => return Err(GrpcError::ModelsError(e)), }; // Create the context for the response. let context = Some(grpc_api::LargestStakersContext { slot: Some(cur_slot.into()), - // IMPORTANT TODO: tmp value because testnet_24 and massa proto latest are not synced atm - in_downtime: false, + in_downtime, }); // Get the list of stakers, filtered by the specified minimum and maximum roll counts. From 821c9a75a7804b9ab7ca1a326fe081c62f17e5d0 Mon Sep 17 00:00:00 2001 From: Thomas Plisson Date: Fri, 30 Jun 2023 15:45:46 +0200 Subject: [PATCH 15/71] improve verify_evm_signature --- massa-execution-worker/src/interface_impl.rs | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index c9b4f37fc89..def894bfca4 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -838,6 +838,11 @@ impl Interface for InterfaceImpl { message_: &[u8], public_key_: &[u8], ) -> Result { + // check the signature length + if signature_.len() != 65 { + return Err(anyhow!("invalid signature length")); + } + // parse the public key let public_key = libsecp256k1::PublicKey::parse_slice( public_key_, @@ -847,8 +852,9 @@ impl Interface for InterfaceImpl { // build the message let prefix = format!("\x19Ethereum Signed Message:\n{}", message_.len()); let to_hash = [prefix.as_bytes(), message_].concat(); - let full_hash = sha3::Keccak256::digest(to_hash); - let message = libsecp256k1::Message::parse_slice(&full_hash).unwrap(); + let full_hash = sha3::Keccak256::digest(&to_hash); + let message = libsecp256k1::Message::parse_slice(&full_hash) + .expect("message could not be parsed from a hash slice"); // parse the signature as being (r, s, v) // r is the R.x value of the signature's R point (32 bytes) @@ -856,7 +862,7 @@ impl Interface for InterfaceImpl { // v is a recovery parameter used to ease the signature verification (1 byte) // we ignore the recovery parameter here // see test_evm_verify for an example of its usage - let signature = libsecp256k1::Signature::parse_standard_slice(&signature_[..64]).unwrap(); + let signature = libsecp256k1::Signature::parse_standard_slice(&signature_[..64])?; // verify the signature Ok(libsecp256k1::verify(&message, &signature, &public_key)) From 332520b90211e9d3e0c4b32e34f832a715c9f93c Mon Sep 17 00:00:00 2001 From: Leo-Besancon Date: Fri, 30 Jun 2023 18:08:25 +0200 Subject: [PATCH 16/71] Fix after merge --- Cargo.lock | 210 ++++++++++--------- massa-execution-worker/src/interface_impl.rs | 19 +- 2 files changed, 112 insertions(+), 117 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8e2327281b4..b77a9fa741a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,9 +4,9 @@ version = 3 [[package]] name = "addr2line" -version = "0.19.0" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a76fd60b23679b7d19bd066031410fb7e458ccc5e958eb5c325888ce4baedc97" +checksum = "f4fa78e18c64fce05e902adecd7a5eed15a5e0a3439f7b0e169f0252214865e3" dependencies = [ "gimli 0.27.3", ] @@ -177,7 +177,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 2.0.22", ] @@ -188,7 +188,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9ccdd8f2a161be9bd5c023df56f1b2a0bd1d83872ae53b71a84a12c9bf6e842" dependencies = [ "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 2.0.22", ] @@ -256,16 +256,16 @@ dependencies = [ [[package]] name = "backtrace" -version = "0.3.67" +version = "0.3.68" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "233d376d6d185f2a3093e58f283f60f880315b6c60075b01f36b3b85154564ca" +checksum = "4319208da049c43661739c5fade2ba182f09d1dc2299b32298d3a31692b17e12" dependencies = [ "addr2line", "cc", "cfg-if", "libc", - "miniz_oxide 0.6.2", - "object 0.30.4", + "miniz_oxide", + "object 0.31.1", "rustc-demangle", ] @@ -309,7 +309,7 @@ dependencies = [ "lazycell", "peeking_take_while", "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "regex", "rustc-hash", "shlex", @@ -403,7 +403,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "afb438156919598d2c7bad7e1c0adf3d26ed3840dbc010db1a882a65583ca2fb" dependencies = [ "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 1.0.109", ] @@ -414,7 +414,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "634205cc43f74a1b9046ef87c4540ebda95696ec0f315024860cad7c5b0f5ccd" dependencies = [ "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 1.0.109", ] @@ -461,7 +461,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7ec4c6f261935ad534c0c22dbef2201b45918860eb1c574b972bd213a76af61" dependencies = [ "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 1.0.109", ] @@ -1000,7 +1000,7 @@ dependencies = [ "fnv", "ident_case", "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "strsim 0.10.0", "syn 1.0.109", ] @@ -1014,7 +1014,7 @@ dependencies = [ "fnv", "ident_case", "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "strsim 0.10.0", "syn 2.0.22", ] @@ -1026,7 +1026,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4aab4dbc9f7611d8b55048a3a16d2d010c2c8334e46304b40ac1cc14bf3b48e" dependencies = [ "darling_core 0.14.4", - "quote 1.0.28", + "quote 1.0.29", "syn 1.0.109", ] @@ -1037,7 +1037,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "29a358ff9f12ec09c3e61fef9b5a9902623a695a46a917b07f269bff1445611a" dependencies = [ "darling_core 0.20.1", - "quote 1.0.28", + "quote 1.0.29", "syn 2.0.22", ] @@ -1061,7 +1061,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" dependencies = [ "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 1.0.109", ] @@ -1151,7 +1151,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" dependencies = [ "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 2.0.22", ] @@ -1178,7 +1178,7 @@ dependencies = [ "lazy_static", "proc-macro-error", "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 1.0.109", ] @@ -1251,7 +1251,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c134c37760b27a871ba422106eedbb8247da973a09e82558bf26d619c882b159" dependencies = [ "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 1.0.109", ] @@ -1263,7 +1263,7 @@ checksum = "a8ea75f31022cba043afe037940d73684327e915f88f62478e778c3de914cd0a" dependencies = [ "enum_delegate_lib", "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 1.0.109", ] @@ -1274,7 +1274,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2e1f6c3800b304a6be0012039e2a45a322a093539c45ab818d9e6895a39c90fe" dependencies = [ "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "rand 0.8.5", "syn 1.0.109", ] @@ -1296,7 +1296,7 @@ checksum = "e08b6c6ab82d70f08844964ba10c7babb716de2ecaeab9be5717918a5177d3af" dependencies = [ "darling 0.20.1", "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 2.0.22", ] @@ -1369,12 +1369,12 @@ dependencies = [ [[package]] name = "fd-lock" -version = "3.0.12" +version = "3.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39ae6b3d9530211fb3b12a95374b8b0823be812f53d09e18c5675c0146b09642" +checksum = "ef033ed5e9bad94e55838ca0ca906db0e043f517adda0c8b79c7a8c66c93c1b5" dependencies = [ "cfg-if", - "rustix 0.37.20", + "rustix 0.38.1", "windows-sys 0.48.0", ] @@ -1391,7 +1391,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b9429470923de8e8cbd4d2dc513535400b4b3fef0319fb5c4e1f520a7bef743" dependencies = [ "crc32fast", - "miniz_oxide 0.7.1", + "miniz_oxide", ] [[package]] @@ -1500,7 +1500,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 2.0.22", ] @@ -1768,15 +1768,6 @@ dependencies = [ "libc", ] -[[package]] -name = "hermit-abi" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7" -dependencies = [ - "libc", -] - [[package]] name = "hermit-abi" version = "0.3.1" @@ -2151,7 +2142,7 @@ dependencies = [ "heck 0.4.1", "proc-macro-crate 1.3.1", "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 1.0.109", ] @@ -2354,6 +2345,12 @@ version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" +[[package]] +name = "linux-raw-sys" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09fc20d2ca12cb9f044c93e3bd6d32d523e6e2ec3db4f7b2939cd99026ecd3f0" + [[package]] name = "lock_api" version = "0.4.10" @@ -2386,7 +2383,7 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0fbfc88337168279f2e9ae06e157cfed4efd3316e14dc96ed074d4f2e6c5952" dependencies = [ - "quote 1.0.28", + "quote 1.0.29", "syn 1.0.109", ] @@ -2526,7 +2523,7 @@ dependencies = [ [[package]] name = "massa-sc-runtime" version = "0.10.0" -source = "git+https://github.com/massalabs/massa-sc-runtime?branch=feature/Improve_ABI_types_in_wasmv1#42d0fafef13572a4ecd5f61483e88afbbdeef940" +source = "git+https://github.com/massalabs/massa-sc-runtime?branch=feature/Improve_ABI_types_in_wasmv1#2757addf909d95c0bf537024592fa9859c3a12fe" dependencies = [ "anyhow", "as-ffi-bindings", @@ -3384,15 +3381,6 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" -[[package]] -name = "miniz_oxide" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa" -dependencies = [ - "adler", -] - [[package]] name = "miniz_oxide" version = "0.7.1" @@ -3437,7 +3425,7 @@ checksum = "22ce75669015c4f47b289fd4d4f56e894e4c96003ffdf3ac51313126f94c6cbb" dependencies = [ "cfg-if", "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 1.0.109", ] @@ -3600,11 +3588,11 @@ dependencies = [ [[package]] name = "num_cpus" -version = "1.15.0" +version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "hermit-abi 0.2.6", + "hermit-abi 0.3.1", "libc", ] @@ -3634,7 +3622,7 @@ checksum = "dcbff9bc912032c62bf65ef1d5aea88983b420f4f839db1e9b0c281a25c9c799" dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 1.0.109", ] @@ -3646,7 +3634,7 @@ checksum = "96667db765a921f7b295ffee8b60472b686a51d4f21c2ee4ffdb94c7013b65a6" dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 2.0.22", ] @@ -3664,9 +3652,9 @@ dependencies = [ [[package]] name = "object" -version = "0.30.4" +version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03b4680b86d9cfafba8fc491dc9b6df26b68cf40e9e6cd73909194759a63c385" +checksum = "8bda667d9f2b5051b8833f59f3bf748b28ef54f850f4fcb389a252aa383866d1" dependencies = [ "memchr", ] @@ -3752,7 +3740,7 @@ dependencies = [ "redox_syscall 0.3.5", "smallvec", "thread-id", - "windows-targets 0.48.0", + "windows-targets 0.48.1", ] [[package]] @@ -3789,7 +3777,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0f35583365be5d148e959284f42526841917b7bfa09e2d1a7ad5dde2cf0eaa39" dependencies = [ "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 1.0.109", ] @@ -3868,7 +3856,7 @@ dependencies = [ "pest", "pest_meta", "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 2.0.22", ] @@ -3895,21 +3883,21 @@ dependencies = [ [[package]] name = "pin-project" -version = "1.1.0" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c95a7476719eab1e366eaf73d0260af3021184f18177925b07f54b30089ceead" +checksum = "6e138fdd8263907a2b0e1b4e80b7e58c721126479b6e6eedfb1b402acea7b9bd" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.0" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39407670928234ebc5e6e580247dd567ad73a3578460c5990f9503df207e8f07" +checksum = "d1fef411b303e3e12d534fb6e7852de82da56edd937d895125821fb7c09436c7" dependencies = [ "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 2.0.22", ] @@ -4044,7 +4032,7 @@ checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" dependencies = [ "proc-macro-error-attr", "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 1.0.109", "version_check", ] @@ -4056,7 +4044,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" dependencies = [ "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "version_check", ] @@ -4149,7 +4137,7 @@ dependencies = [ "anyhow", "itertools", "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 1.0.109", ] @@ -4184,7 +4172,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac" dependencies = [ "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 1.0.109", ] @@ -4216,9 +4204,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.28" +version = "1.0.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488" +checksum = "573015e8ab27661678357f27dc26460738fd2b6c86e46f386fde94cb5d913105" dependencies = [ "proc-macro2 1.0.63", ] @@ -4470,7 +4458,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b2e06b915b5c230a17d7a736d1e2e63ee753c256a8614ef3f5147b13a4f5541d" dependencies = [ "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 1.0.109", ] @@ -4551,9 +4539,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.37.20" +version = "0.37.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b96e891d04aa506a6d1f318d2771bcb1c7dfda84e126660ace067c9b474bb2c0" +checksum = "62f25693a73057a1b4cb56179dd3c7ea21a7c6c5ee7d85781f5749b46f34b79c" dependencies = [ "bitflags 1.3.2", "errno", @@ -4563,6 +4551,19 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "rustix" +version = "0.38.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbc6396159432b5c8490d4e301d8c705f61860b8b6c863bf79942ce5401968f3" +dependencies = [ + "bitflags 2.3.3", + "errno", + "libc", + "linux-raw-sys 0.4.3", + "windows-sys 0.48.0", +] + [[package]] name = "rustls" version = "0.21.2" @@ -4589,9 +4590,9 @@ dependencies = [ [[package]] name = "rustls-pemfile" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d194b56d58803a43635bdc398cd17e383d6f71f9182b9a192c127ca42494a59b" +checksum = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2" dependencies = [ "base64 0.21.2", ] @@ -4642,7 +4643,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "107c3d5d7f370ac09efa62a78375f94d94b8a33c61d8c278b96683fb4dbf2d8d" dependencies = [ "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 1.0.109", ] @@ -4759,7 +4760,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9735b638ccc51c28bf6914d90a2e9725b377144fc612c49a611fddd1b631d68" dependencies = [ "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 2.0.22", ] @@ -4809,7 +4810,7 @@ checksum = "881b6f881b17d13214e5d494c939ebab463d01264ce1811e9d4ac3a882e7695f" dependencies = [ "darling 0.20.1", "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 2.0.22", ] @@ -4848,7 +4849,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "079a83df15f85d89a68d64ae1238f142f172b1fa915d0d76b26a7cba1b659a69" dependencies = [ "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 1.0.109", ] @@ -4859,7 +4860,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91d129178576168c589c9ec973feedf7d3126c01ac2bf08795109aa35b69fb8f" dependencies = [ "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 2.0.22", ] @@ -5068,7 +5069,7 @@ dependencies = [ "heck 0.3.3", "proc-macro-error", "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 1.0.109", ] @@ -5089,7 +5090,7 @@ checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" dependencies = [ "heck 0.4.1", "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "rustversion", "syn 1.0.109", ] @@ -5099,7 +5100,7 @@ name = "substruct" version = "0.1.0" source = "git+https://github.com/sydhds/substruct#2fb3ae0dc9d913a0566ce6415eaa7a7ca1690fe1" dependencies = [ - "quote 1.0.28", + "quote 1.0.29", "syn 1.0.109", ] @@ -5127,7 +5128,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "unicode-ident", ] @@ -5138,7 +5139,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2efbeae7acf4eabd6bcdcbd11c92f45231ddda7539edc7806bd1a04a03b24616" dependencies = [ "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "unicode-ident", ] @@ -5170,7 +5171,7 @@ dependencies = [ "cfg-if", "fastrand", "redox_syscall 0.3.5", - "rustix 0.37.20", + "rustix 0.37.21", "windows-sys 0.48.0", ] @@ -5211,7 +5212,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" dependencies = [ "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 2.0.22", ] @@ -5290,11 +5291,12 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.28.2" +version = "1.29.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94d7b1cfd2aa4011f2de74c2c4c63665e27a71006b0a192dcd2710272e73dfa2" +checksum = "532826ff75199d5833b9d2c5fe410f29235e25704ee5f0ef599fb51c21f4a4da" dependencies = [ "autocfg", + "backtrace", "bytes", "libc", "mio", @@ -5324,7 +5326,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" dependencies = [ "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 2.0.22", ] @@ -5432,7 +5434,7 @@ dependencies = [ "prettyplease", "proc-macro2 1.0.63", "prost-build", - "quote 1.0.28", + "quote 1.0.29", "syn 1.0.109", ] @@ -5553,7 +5555,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" dependencies = [ "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 2.0.22", ] @@ -5607,7 +5609,7 @@ source = "git+https://github.com/massalabs/transition.git?rev=93fa3bf82f9f5ff421 dependencies = [ "darling 0.14.4", "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 1.0.109", "unsigned-varint", ] @@ -5791,7 +5793,7 @@ dependencies = [ "log", "once_cell", "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 2.0.22", "wasm-bindgen-shared", ] @@ -5815,7 +5817,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c5020cfa87c7cecefef118055d44e3c1fc122c7ec25701d528ee458a0b45f38f" dependencies = [ "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 1.0.109", ] @@ -5837,7 +5839,7 @@ version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" dependencies = [ - "quote 1.0.28", + "quote 1.0.29", "wasm-bindgen-macro-support", ] @@ -5848,7 +5850,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 2.0.22", "wasm-bindgen-backend", "wasm-bindgen-shared", @@ -5967,7 +5969,7 @@ checksum = "97901fdbaae383dbb90ea162cc3a76a9fa58ac39aec7948b4c0b9bbef9307738" dependencies = [ "proc-macro-error", "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 1.0.109", ] @@ -6134,7 +6136,7 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" dependencies = [ - "windows-targets 0.48.0", + "windows-targets 0.48.1", ] [[package]] @@ -6180,7 +6182,7 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ - "windows-targets 0.48.0", + "windows-targets 0.48.1", ] [[package]] @@ -6200,9 +6202,9 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.48.0" +version = "0.48.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5" +checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f" dependencies = [ "windows_aarch64_gnullvm 0.48.0", "windows_aarch64_msvc 0.48.0", @@ -6370,7 +6372,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2 1.0.63", - "quote 1.0.28", + "quote 1.0.29", "syn 2.0.22", ] diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index 688135207ef..a15f516a383 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -301,13 +301,10 @@ impl Interface for InterfaceImpl { let address = get_address_from_opt_or_context(&context, address)?; let amount = context.get_balance(&address).unwrap_or_default(); - let (mantissa, scale) = self - .amount_to_mantissa_scale(amount.to_raw()) + + let native_amount = self + .native_amount_from_str_wasmv1(&amount.to_string()) .unwrap_or_default(); - let native_amount = NativeAmount { - mandatory_mantissa: Some(mantissa), - mandatory_scale: Some(scale), - }; Ok(native_amount) } @@ -365,12 +362,8 @@ impl Interface for InterfaceImpl { let context = context_guard!(self); let address = get_address_from_opt_or_context(&context, address)?; - match (context.get_keys(&address), prefix) { - (Some(mut value), prefix) if !prefix.is_empty() => { - value.retain(|key| key.iter().zip(prefix.iter()).all(|(k, p)| k == p)); - Ok(value) - } - (Some(value), _) => Ok(value), + match context.get_keys(&address, prefix) { + Some(value) => Ok(value), _ => bail!("data entry not found"), } } @@ -856,7 +849,7 @@ impl Interface for InterfaceImpl { // build the message let prefix = format!("\x19Ethereum Signed Message:\n{}", message_.len()); let to_hash = [prefix.as_bytes(), message_].concat(); - let full_hash = sha3::Keccak256::digest(&to_hash); + let full_hash = sha3::Keccak256::digest(to_hash); let message = libsecp256k1::Message::parse_slice(&full_hash) .expect("message could not be parsed from a hash slice"); From 5614d6944809f524556af7e30e48c5228c310a11 Mon Sep 17 00:00:00 2001 From: Leo-Besancon Date: Fri, 30 Jun 2023 18:17:39 +0200 Subject: [PATCH 17/71] Use get_prefix_bounds() helper function --- massa-execution-worker/src/interface_impl.rs | 22 +------------------- 1 file changed, 1 insertion(+), 21 deletions(-) diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index a15f516a383..4ece604a78b 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -694,27 +694,7 @@ impl Interface for InterfaceImpl { /// # Returns /// A list of keys (keys are byte arrays) that match the given prefix fn get_op_keys_wasmv1(&self, prefix: &[u8]) -> Result>> { - // compute prefix range - let prefix_range = if !prefix.is_empty() { - // compute end of prefix range - let mut prefix_end = prefix.to_vec(); - while let Some(255) = prefix_end.last() { - prefix_end.pop(); - } - if let Some(v) = prefix_end.last_mut() { - *v += 1; - } - ( - std::ops::Bound::Included(prefix.to_vec()), - if prefix_end.is_empty() { - std::ops::Bound::Unbounded - } else { - std::ops::Bound::Excluded(prefix_end) - }, - ) - } else { - (std::ops::Bound::Unbounded, std::ops::Bound::Unbounded) - }; + let prefix_range = get_prefix_bounds(prefix); let range_ref = (prefix_range.0.as_ref(), prefix_range.1.as_ref()); let context = context_guard!(self); From 943142cd5c1048e3faf0ea35700544b6bcc21419 Mon Sep 17 00:00:00 2001 From: Leo-Besancon Date: Fri, 30 Jun 2023 19:17:29 +0200 Subject: [PATCH 18/71] Added the implementation of generate_event_wasmv1 --- massa-execution-worker/src/interface_impl.rs | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index 4ece604a78b..c221da0ecc6 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -959,6 +959,8 @@ impl Interface for InterfaceImpl { /// /// # Arguments: /// data: the string data that is the payload of the event + /// + /// [DeprecatedByNewRuntime] Replaced by `get_current_slot` fn generate_event(&self, data: String) -> Result<()> { let mut context = context_guard!(self); let event = context.event_create(data, false); @@ -966,6 +968,20 @@ impl Interface for InterfaceImpl { Ok(()) } + /// Emits an execution event to be stored. + /// + /// # Arguments: + /// data: the bytes_array data that is the payload of the event + fn generate_event_wasmv1(&self, data: Vec) -> Result<()> { + let data_str = String::from_utf8(data.clone()).unwrap_or(format!("{:?}", data)); + + let mut context = context_guard!(self); + let event = context.event_create(data_str, false); + context.event_emit(event); + + Ok(()) + } + /// Returns the current time (millisecond UNIX timestamp) /// Note that in order to ensure determinism, this is actually the time of the context slot. fn get_time(&self) -> Result { From 910ed5ec2b70d485014aafac354c18c9bdac8be7 Mon Sep 17 00:00:00 2001 From: JF Date: Wed, 5 Jul 2023 11:26:52 +0200 Subject: [PATCH 19/71] NativeAmount arithmetic implementation (#4199) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Authored-by: Jean-François Morcillo --- Cargo.lock | 38 ++-- massa-async-pool/Cargo.toml | 2 +- massa-client/Cargo.toml | 2 +- massa-execution-exports/Cargo.toml | 2 +- massa-execution-worker/Cargo.toml | 2 +- massa-execution-worker/src/interface_impl.rs | 204 ++++++++++++++++++- massa-final-state/Cargo.toml | 2 +- massa-grpc/Cargo.toml | 2 +- massa-ledger-exports/Cargo.toml | 2 +- massa-models/Cargo.toml | 2 +- massa-models/src/amount.rs | 38 ++++ massa-sdk/Cargo.toml | 2 +- massa-versioning/Cargo.toml | 2 +- 13 files changed, 256 insertions(+), 44 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9250573fbda..f9ff987a531 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2438,7 +2438,7 @@ dependencies = [ "dialoguer", "erased-serde", "lazy_static", - "massa-proto-rs 0.1.0 (git+https://github.com/massalabs/massa-proto-rs?rev=8873f96)", + "massa-proto-rs", "massa_api_exports", "massa_models", "massa_sdk", @@ -2511,7 +2511,7 @@ dependencies = [ [[package]] name = "massa-proto-rs" version = "0.1.0" -source = "git+https://github.com/massalabs/massa-proto-rs?rev=8873f96#8873f969375834904044965cb1b0bcb9ec0fafdd" +source = "git+https://github.com/massalabs/massa-proto-rs?branch=feature/Improve_ABI_types_in_wasmv1#988019fe50dd093f4793ec7b43a1d5e1b1952ae4" dependencies = [ "glob", "prost", @@ -2521,22 +2521,10 @@ dependencies = [ "tonic-build", ] -[[package]] -name = "massa-proto-rs" -version = "0.1.0" -source = "git+https://github.com/massalabs/massa-proto-rs?rev=988019f#988019fe50dd093f4793ec7b43a1d5e1b1952ae4" -dependencies = [ - "glob", - "prost", - "prost-build", - "prost-types", - "tonic-build", -] - [[package]] name = "massa-sc-runtime" version = "0.10.0" -source = "git+https://github.com/massalabs/massa-sc-runtime?branch=feature/Improve_ABI_types_in_wasmv1#4bc7a62f0eef3ca404e75a51f68f5e98ab797dad" +source = "git+https://github.com/massalabs/massa-sc-runtime?branch=feature/Improve_ABI_types_in_wasmv1#5594a48a5097fbcc78b06c168f5cb38a0b0dff13" dependencies = [ "anyhow", "as-ffi-bindings", @@ -2545,7 +2533,7 @@ dependencies = [ "displaydoc", "function_name", "loupe", - "massa-proto-rs 0.1.0 (git+https://github.com/massalabs/massa-proto-rs?rev=988019f)", + "massa-proto-rs", "more-asserts 0.3.1", "num_enum 0.6.1", "parking_lot", @@ -2628,7 +2616,7 @@ dependencies = [ name = "massa_async_pool" version = "0.24.0" dependencies = [ - "massa-proto-rs 0.1.0 (git+https://github.com/massalabs/massa-proto-rs?rev=8873f96)", + "massa-proto-rs", "massa_db_exports", "massa_hash", "massa_ledger_exports", @@ -2809,7 +2797,7 @@ name = "massa_execution_exports" version = "0.24.0" dependencies = [ "displaydoc", - "massa-proto-rs 0.1.0 (git+https://github.com/massalabs/massa-proto-rs?rev=8873f96)", + "massa-proto-rs", "massa-sc-runtime", "massa_final_state", "massa_hash", @@ -2836,7 +2824,7 @@ dependencies = [ "criterion", "hex-literal", "libsecp256k1", - "massa-proto-rs 0.1.0 (git+https://github.com/massalabs/massa-proto-rs?rev=8873f96)", + "massa-proto-rs", "massa-sc-runtime", "massa_async_pool", "massa_channel", @@ -2916,7 +2904,7 @@ version = "0.24.0" dependencies = [ "bs58", "displaydoc", - "massa-proto-rs 0.1.0 (git+https://github.com/massalabs/massa-proto-rs?rev=8873f96)", + "massa-proto-rs", "massa_async_pool", "massa_db_exports", "massa_db_worker", @@ -2947,7 +2935,7 @@ dependencies = [ "h2", "hyper", "itertools", - "massa-proto-rs 0.1.0 (git+https://github.com/massalabs/massa-proto-rs?rev=8873f96)", + "massa-proto-rs", "massa_channel", "massa_consensus_exports", "massa_execution_exports", @@ -2995,7 +2983,7 @@ name = "massa_ledger_exports" version = "0.24.0" dependencies = [ "displaydoc", - "massa-proto-rs 0.1.0 (git+https://github.com/massalabs/massa-proto-rs?rev=8873f96)", + "massa-proto-rs", "massa_db_exports", "massa_hash", "massa_models", @@ -3054,7 +3042,7 @@ dependencies = [ "directories", "displaydoc", "lazy_static", - "massa-proto-rs 0.1.0 (git+https://github.com/massalabs/massa-proto-rs?rev=8873f96)", + "massa-proto-rs", "massa_hash", "massa_serialization", "massa_signature", @@ -3225,7 +3213,7 @@ dependencies = [ "jsonrpsee", "jsonrpsee-http-client", "jsonrpsee-ws-client", - "massa-proto-rs 0.1.0 (git+https://github.com/massalabs/massa-proto-rs?rev=8873f96)", + "massa-proto-rs", "massa_api_exports", "massa_models", "massa_time", @@ -3291,7 +3279,7 @@ name = "massa_versioning" version = "0.24.0" dependencies = [ "machine", - "massa-proto-rs 0.1.0 (git+https://github.com/massalabs/massa-proto-rs?rev=8873f96)", + "massa-proto-rs", "massa_db_exports", "massa_db_worker", "massa_hash", diff --git a/massa-async-pool/Cargo.toml b/massa-async-pool/Cargo.toml index 9face209379..963b5a6780b 100644 --- a/massa-async-pool/Cargo.toml +++ b/massa-async-pool/Cargo.toml @@ -19,7 +19,7 @@ massa_serialization = { path = "../massa-serialization" } massa_signature = { path = "../massa-signature" } massa_db_exports = { path = "../massa-db-exports" } massa_time = { path = "../massa-time" } -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "8873f96", features = ["tonic"]} +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"]} [dev-dependencies] tempfile = "3.3" diff --git a/massa-client/Cargo.toml b/massa-client/Cargo.toml index 2f60e6a70da..fad9f381d3a 100644 --- a/massa-client/Cargo.toml +++ b/massa-client/Cargo.toml @@ -28,7 +28,7 @@ massa_signature = { path = "../massa-signature" } massa_time = { path = "../massa-time" } massa_sdk = { path = "../massa-sdk" } massa_wallet = { path = "../massa-wallet" } -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "8873f96", features = ["tonic"]} +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"]} [dev-dependencies] toml_edit = "0.19" diff --git a/massa-execution-exports/Cargo.toml b/massa-execution-exports/Cargo.toml index e8446808b5e..c25975edb77 100644 --- a/massa-execution-exports/Cargo.toml +++ b/massa-execution-exports/Cargo.toml @@ -18,7 +18,7 @@ tokio = { version = "1.23", features = ["sync"] } mockall = { version = "0.11.4", optional = true} # custom modules -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "8873f96", features = ["tonic"]} +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"]} massa_hash = { path = "../massa-hash" } massa_models = { path = "../massa-models" } massa_time = { path = "../massa-time" } diff --git a/massa-execution-worker/Cargo.toml b/massa-execution-worker/Cargo.toml index ffc65f50ea8..2176dd2d60b 100644 --- a/massa-execution-worker/Cargo.toml +++ b/massa-execution-worker/Cargo.toml @@ -31,7 +31,7 @@ massa_models = { path = "../massa-models" } massa_storage = { path = "../massa-storage" } massa_hash = { path = "../massa-hash" } massa-sc-runtime = { git = "https://github.com/massalabs/massa-sc-runtime", branch = "feature/Improve_ABI_types_in_wasmv1" } -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "8873f96"} +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"]} massa_metrics = { path = "../massa-metrics" } massa_module_cache = { path = "../massa-module-cache" } massa_signature = { path = "../massa-signature" } diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index e0fda52920c..d2be4b849fc 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -140,6 +140,20 @@ impl InterfaceClone for InterfaceImpl { } } +/// Helper function that creates an amount from a NativeAmount +fn amount_from_native_amount(amount: &NativeAmount) -> Result { + let amount = Amount::from_mantissa_scale(amount.mantissa, amount.scale) + .map_err(|err| anyhow!(format!("{}", err)))?; + + Ok(amount) +} + +/// Helper function that creates a NativeAmount from the amount internal representation +fn amount_to_native_amount(amount: &Amount) -> NativeAmount { + let (mantissa, scale) = amount.to_mantissa_scale(); + NativeAmount { mantissa, scale } +} + /// Helper function to get the address from the option given as argument to some ABIs /// Fallback to the current context address if not provided. fn get_address_from_opt_or_context( @@ -293,7 +307,7 @@ impl Interface for InterfaceImpl { fn get_balance_wasmv1( &self, address: Option, - ) -> Result { + ) -> Result { let context = context_guard!(self); let address = get_address_from_opt_or_context(&context, address)?; @@ -898,14 +912,7 @@ impl Interface for InterfaceImpl { from_address: Option, ) -> Result<()> { let to_address = Address::from_str(&to_address)?; - let amount = Amount::from_mantissa_scale( - raw_amount - .mandatory_mantissa - .ok_or(anyhow!("No mantissa provided"))?, - raw_amount - .mandatory_scale - .ok_or(anyhow!("No scale provided"))?, - )?; + let amount = Amount::from_mantissa_scale(raw_amount.mantissa, raw_amount.scale)?; let mut context = context_guard!(self); let from_address = match from_address { Some(from_address) => Address::from_str(&from_address)?, @@ -1172,6 +1179,115 @@ impl Interface for InterfaceImpl { let hash = massa_hash::Hash::compute_from(bytes); Ok(hash.into_bytes()) } + + fn init_call_wasmv1(&self, address: &str, raw_coins: NativeAmount) -> Result> { + unimplemented!("init_call") + } + + /// Returns a NativeAmount from a string + fn native_amount_from_str_wasmv1(&self, amount: &str) -> Result { + let amount = Amount::from_str(amount).map_err(|err| anyhow!(format!("{}", err)))?; + Ok(amount_to_native_amount(&amount)) + } + + /// Returns a string from a NativeAmount + fn native_amount_to_string_wasmv1(&self, amount: &NativeAmount) -> Result { + let amount = amount_from_native_amount(amount) + .map_err(|err| anyhow!(format!("Couldn't convert native amount to Amount: {}", err)))?; + + Ok(amount.to_string()) + } + + /// Checks if the given native amount is valid + fn check_native_amount_wasmv1(&self, amount: &NativeAmount) -> Result { + Ok(amount_from_native_amount(amount).is_ok()) + } + + /// Adds two native amounts, saturating at the numeric bounds instead of overflowing. + fn add_native_amounts_wasmv1( + &self, + amount1: &NativeAmount, + amount2: &NativeAmount, + ) -> Result { + let amount1 = amount_from_native_amount(amount1)?; + let amount2 = amount_from_native_amount(amount2)?; + let sum = amount1.saturating_add(amount2); + Ok(amount_to_native_amount(&sum)) + } + + /// Subtracts two native amounts, saturating at the numeric bounds instead of overflowing. + fn sub_native_amounts_wasmv1( + &self, + amount1: &NativeAmount, + amount2: &NativeAmount, + ) -> Result { + let amount1 = amount_from_native_amount(amount1)?; + let amount2 = amount_from_native_amount(amount2)?; + let sub = amount1.saturating_sub(amount2); + Ok(amount_to_native_amount(&sub)) + } + + /// Multiplies a native amount by a factor, saturating at the numeric bounds instead of overflowing. + fn mul_native_amount_wasmv1(&self, amount: &NativeAmount, factor: u64) -> Result { + let amount = amount_from_native_amount(amount)?; + let mul = amount.saturating_mul_u64(factor); + Ok(amount_to_native_amount(&mul)) + } + + /// Divides a native amount by a divisor, return an error if the divisor is 0. + fn div_rem_native_amount_wasmv1( + &self, + dividend: &NativeAmount, + divisor: u64, + ) -> Result<(NativeAmount, NativeAmount)> { + let dividend = amount_from_native_amount(dividend)?; + + let quotient = dividend + .checked_div_u64(divisor) + .ok_or_else(|| anyhow!(format!("Couldn't div_rem native amount")))?; + // we can unwrap, we + let remainder = dividend + .checked_rem_u64(divisor) + .ok_or_else(|| anyhow!(format!("Couldn't checked_rem_u64 native amount")))?; + + return Ok(( + amount_to_native_amount("ient), + amount_to_native_amount(&remainder), + )); + } + + /// Divides a native amount by a divisor, return an error if the divisor is 0. + fn div_rem_native_amounts_wasmv1( + &self, + dividend: &NativeAmount, + divisor: &NativeAmount, + ) -> Result<(u64, NativeAmount)> { + let dividend = amount_from_native_amount(dividend)?; + let divisor = amount_from_native_amount(divisor)?; + + let quotient = dividend + .checked_div(divisor) + .ok_or_else(|| anyhow!(format!("Couldn't div_rem native amount")))?; + + let remainder = dividend + .checked_rem(&divisor) + .ok_or_else(|| anyhow!(format!("Couldn't checked_rem native amount")))?; + let remainder = amount_to_native_amount(&remainder); + + return Ok((quotient, remainder)); + } + + fn get_call_coins_wasmv1(&self) -> Result { + unimplemented!("get_call_coins_wasmv1"); + } + + fn base58_check_to_bytes_wasmv1(&self, s: &str) -> Result> { + unimplemented!("get_call_coins_wasmv1"); + } + + fn bytes_to_base58_check_wasmv1(&self, bytes: &[u8]) -> String { + unimplemented!("get_call_coins_wasmv1"); + } } #[cfg(test)] @@ -1221,6 +1337,76 @@ mod tests { assert!(op_keys.contains(&b"k1".to_vec())); assert!(op_keys.contains(&b"k2".to_vec())); } + + #[test] + fn test_native_amount() { + let sender_addr = Address::from_public_key(&KeyPair::generate(0).unwrap().get_public_key()); + let interface = InterfaceImpl::new_default(sender_addr, None); + + let amount1 = interface.native_amount_from_str_wasmv1("100").unwrap(); + let amount2 = interface.native_amount_from_str_wasmv1("100").unwrap(); + let amount3 = interface.native_amount_from_str_wasmv1("200").unwrap(); + + let sum = interface + .add_native_amounts_wasmv1(&amount1, &amount2) + .unwrap(); + + assert_eq!(amount3, sum); + println!( + "sum: {}", + interface.native_amount_to_string_wasmv1(&sum).unwrap() + ); + assert_eq!( + "200", + interface.native_amount_to_string_wasmv1(&sum).unwrap() + ); + + let diff = interface.sub_native_amounts_wasmv1(&sum, &amount2).unwrap(); + assert_eq!(amount1, diff); + + let amount4 = NativeAmount { + mantissa: 1, + scale: 9, + }; + + let is_valid = interface.check_native_amount_wasmv1(&amount4).unwrap(); + assert_eq!(is_valid, true); + + let mul = interface.mul_native_amount_wasmv1(&amount1, 2).unwrap(); + assert_eq!(mul, amount3); + + let (quotient, remainder) = interface.div_rem_native_amount_wasmv1(&amount1, 2).unwrap(); + let quotient_res_50 = interface.native_amount_from_str_wasmv1("50").unwrap(); + let remainder_res_0 = interface.native_amount_from_str_wasmv1("0").unwrap(); + assert_eq!(quotient, quotient_res_50); + assert_eq!(remainder, remainder_res_0); + + let (quotient, remainder) = interface.div_rem_native_amount_wasmv1(&amount1, 3).unwrap(); + let verif_div = interface.mul_native_amount_wasmv1("ient, 3).unwrap(); + let verif_dif = interface + .add_native_amounts_wasmv1(&verif_div, &remainder) + .unwrap(); + assert_eq!(verif_dif, amount1); + + let amount5 = interface.native_amount_from_str_wasmv1("2").unwrap(); + let (quotient, remainder) = interface + .div_rem_native_amounts_wasmv1(&amount1, &amount5) + .unwrap(); + assert_eq!(quotient, 50); + assert_eq!(remainder, remainder_res_0); + + let amount6 = interface.native_amount_from_str_wasmv1("3").unwrap(); + let (quotient, remainder) = interface + .div_rem_native_amounts_wasmv1(&amount1, &amount6) + .unwrap(); + let verif_div = interface + .mul_native_amount_wasmv1(&amount6, quotient) + .unwrap(); + let verif_dif = interface + .add_native_amounts_wasmv1(&verif_div, &remainder) + .unwrap(); + assert_eq!(verif_dif, amount1); + } } #[test] diff --git a/massa-final-state/Cargo.toml b/massa-final-state/Cargo.toml index 37b60afc898..eef12188919 100644 --- a/massa-final-state/Cargo.toml +++ b/massa-final-state/Cargo.toml @@ -21,7 +21,7 @@ massa_async_pool = { path = "../massa-async-pool" } massa_serialization = { path = "../massa-serialization" } massa_pos_exports = { path = "../massa-pos-exports" } massa_db_exports = { path = "../massa-db-exports" } -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "8873f96", features = ["tonic"]} +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"]} massa_versioning = { path = "../massa-versioning" } massa_time = { path = "../massa-time" } massa_hash = { path = "../massa-hash" } diff --git a/massa-grpc/Cargo.toml b/massa-grpc/Cargo.toml index 0d1636409b5..5a79da24631 100644 --- a/massa-grpc/Cargo.toml +++ b/massa-grpc/Cargo.toml @@ -10,7 +10,7 @@ homepage = "https://massa.net" documentation = "https://docs.massa.net/" [dependencies] -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "8873f96", features = ["tonic"]} +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"]} displaydoc = "0.2" thiserror = "1.0" tonic = { version = "0.9.2", features = ["gzip", "tls"] } diff --git a/massa-ledger-exports/Cargo.toml b/massa-ledger-exports/Cargo.toml index 1b4672fcc05..5f1e6fb0c84 100644 --- a/massa-ledger-exports/Cargo.toml +++ b/massa-ledger-exports/Cargo.toml @@ -14,7 +14,7 @@ nom = "=7.1" num_enum = "0.5.10" # custom modules -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "8873f96", features = ["tonic"]} +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"]} massa_hash = { path = "../massa-hash" } massa_models = { path = "../massa-models" } massa_serialization = { path = "../massa-serialization" } diff --git a/massa-models/Cargo.toml b/massa-models/Cargo.toml index d56422512db..b49e5293458 100644 --- a/massa-models/Cargo.toml +++ b/massa-models/Cargo.toml @@ -18,7 +18,7 @@ config = "0.13" bs58 = { version = "=0.4", features = ["check"] } bitvec = { version = "=1.0", features = ["serde"] } nom = "=7.1" -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "8873f96", features = ["tonic"]} +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"]} # custom modules massa_hash = { path = "../massa-hash" } diff --git a/massa-models/src/amount.rs b/massa-models/src/amount.rs index 898daa6e484..dfa2fbe9c44 100644 --- a/massa-models/src/amount.rs +++ b/massa-models/src/amount.rs @@ -217,6 +217,44 @@ impl Amount { pub fn checked_div_u64(self, factor: u64) -> Option { self.0.checked_div(factor).map(Amount) } + + /// safely divide self by an amount, returning None if the divisor is zero + /// ``` + /// # use massa_models::amount::Amount; + /// # use std::str::FromStr; + /// let amount_1 : Amount = Amount::from_str("42").unwrap(); + /// let amount_2 : Amount = Amount::from_str("7").unwrap(); + /// let res : u64 = amount_1.checked_div(amount_2).unwrap(); + /// assert_eq!(res, 6); + /// ``` + pub fn checked_div(self, divisor: Self) -> Option { + self.0.checked_div(divisor.0) + } + + /// compute self % divisor, return None if divisor is zero + /// ``` + /// # use massa_models::amount::Amount; + /// # use std::str::FromStr; + /// let amount_1 : Amount = Amount::from_str("42").unwrap(); + /// let amount_2 : Amount = Amount::from_str("10").unwrap(); + /// let res : Amount = amount_1.checked_rem(&amount_2).unwrap(); + /// assert_eq!(res, Amount::from_str("2").unwrap()); + /// ``` + pub fn checked_rem(&self, divisor: &Amount) -> Option { + Some(Amount(self.0.checked_rem(divisor.0)?)) + } + + /// compute self % divisor, return None if divisor is zero + /// ``` + /// # use massa_models::amount::Amount; + /// # use std::str::FromStr; + /// let amount_1 : Amount = Amount::from_str("42").unwrap(); + /// let res : Amount = amount_1.checked_rem_u64(40000000000).unwrap(); + /// assert_eq!(res, Amount::from_str("2").unwrap()); + /// ``` + pub fn checked_rem_u64(&self, divisor: u64) -> Option { + Some(Amount(self.0.checked_rem(divisor)?)) + } } /// display an Amount in decimal string form (like "10.33") diff --git a/massa-sdk/Cargo.toml b/massa-sdk/Cargo.toml index 6e14cd6becb..c6af2b88c1f 100644 --- a/massa-sdk/Cargo.toml +++ b/massa-sdk/Cargo.toml @@ -14,4 +14,4 @@ tracing = {version = "0.1", features = ["log"]} massa_api_exports = { path = "../massa-api-exports" } massa_models = { path = "../massa-models" } massa_time = { path = "../massa-time" } -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "8873f96", features = ["tonic"]} +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"]} diff --git a/massa-versioning/Cargo.toml b/massa-versioning/Cargo.toml index fdff48f49aa..98aa5bc9923 100644 --- a/massa-versioning/Cargo.toml +++ b/massa-versioning/Cargo.toml @@ -20,7 +20,7 @@ massa_models = { path = "../massa-models" } massa_serialization = { path = "../massa-serialization" } massa_hash = { path = "../massa-hash" } massa_signature = { path = "../massa-signature" } -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "8873f96", features = ["tonic"]} +massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", branch = "feature/Improve_ABI_types_in_wasmv1", features = ["tonic"]} massa_db_exports = { path = "../massa-db-exports" } [dev-dependencies] From 173278f57f57788a17592af3fe1d05ec98dc488a Mon Sep 17 00:00:00 2001 From: Leo-Besancon Date: Wed, 5 Jul 2023 17:13:50 +0200 Subject: [PATCH 20/71] Runtime changes common leo (#4217) * fmt * Update branch targeting * Implement abis * Update interface (clippy, implem some abis..) * fmt * Changed back branch targeting --- Cargo.lock | 44 +++-- massa-execution-worker/src/interface_impl.rs | 187 +++++++++++++++++-- 2 files changed, 195 insertions(+), 36 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f9ff987a531..4cc3c72ca47 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -710,9 +710,9 @@ dependencies = [ [[package]] name = "cpufeatures" -version = "0.2.8" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03e69e28e9f7f77debdedbaafa2866e1de9ba56df55a8bd7cfc724c25a09987c" +checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1" dependencies = [ "libc", ] @@ -1374,7 +1374,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef033ed5e9bad94e55838ca0ca906db0e043f517adda0c8b79c7a8c66c93c1b5" dependencies = [ "cfg-if", - "rustix 0.38.2", + "rustix 0.38.3", "windows-sys 0.48.0", ] @@ -2524,7 +2524,7 @@ dependencies = [ [[package]] name = "massa-sc-runtime" version = "0.10.0" -source = "git+https://github.com/massalabs/massa-sc-runtime?branch=feature/Improve_ABI_types_in_wasmv1#5594a48a5097fbcc78b06c168f5cb38a0b0dff13" +source = "git+https://github.com/massalabs/massa-sc-runtime?branch=feature/Improve_ABI_types_in_wasmv1#8ad3296d211976b912f909dca8372fd5ef074638" dependencies = [ "anyhow", "as-ffi-bindings", @@ -4081,7 +4081,7 @@ dependencies = [ "byteorder", "hex", "lazy_static", - "rustix 0.36.14", + "rustix 0.36.15", ] [[package]] @@ -4387,9 +4387,21 @@ dependencies = [ [[package]] name = "regex" -version = "1.8.4" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89089e897c013b3deb627116ae56a6955a72b8bed395c9526af31c9fe528b484" +dependencies = [ + "aho-corasick 1.0.2", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0ab3ca65655bb1e41f2a8c8cd662eb4fb035e67c3f78da1d61dffe89d07300f" +checksum = "fa250384981ea14565685dea16a9ccc4d1c541a13f82b9c168572264d1df8c56" dependencies = [ "aho-corasick 1.0.2", "memchr", @@ -4398,9 +4410,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "436b050e76ed2903236f032a59761c1eb99e1b0aead2c257922771dab1fc8c78" +checksum = "2ab07dc67230e4a4718e70fd5c20055a4334b121f1f9db8fe63ef39ce9b8c846" [[package]] name = "region" @@ -4530,9 +4542,9 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "rustix" -version = "0.36.14" +version = "0.36.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14e4d67015953998ad0eb82887a0eb0129e18a7e2f3b7b0f6c422fddcd503d62" +checksum = "c37f1bd5ef1b5422177b7646cba67430579cfe2ace80f284fee876bca52ad941" dependencies = [ "bitflags 1.3.2", "errno", @@ -4544,9 +4556,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.37.22" +version = "0.37.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8818fa822adcc98b18fedbb3632a6a33213c070556b5aa7c4c8cc21cff565c4c" +checksum = "4d69718bf81c6127a49dc64e44a742e8bb9213c0ff8869a22c308f84c1d4ab06" dependencies = [ "bitflags 1.3.2", "errno", @@ -4558,9 +4570,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.2" +version = "0.38.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aabcb0461ebd01d6b79945797c27f8529082226cb630a9865a71870ff63532a4" +checksum = "ac5ffa1efe7548069688cd7028f32591853cd7b5b756d41bcffd2353e4fc75b4" dependencies = [ "bitflags 2.3.3", "errno", @@ -5170,7 +5182,7 @@ dependencies = [ "cfg-if", "fastrand", "redox_syscall 0.3.5", - "rustix 0.37.22", + "rustix 0.37.23", "windows-sys 0.48.0", ] diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index d2be4b849fc..3fde30c22ea 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -14,12 +14,20 @@ use massa_models::bytecode::Bytecode; use massa_models::config::MAX_DATASTORE_KEY_LENGTH; use massa_models::datastore::get_prefix_bounds; use massa_models::{ - address::Address, amount::Amount, slot::Slot, timeslots::get_block_slot_timestamp, + address::{Address, SCAddress, UserAddress}, + amount::Amount, + slot::Slot, + timeslots::get_block_slot_timestamp, }; +use massa_proto_rs::massa::model::v1::AddressCategory; use massa_proto_rs::massa::model::v1::NativeAmount; +use massa_proto_rs::massa::model::v1::NativeTime; use massa_sc_runtime::RuntimeModule; use massa_sc_runtime::{Interface, InterfaceClone}; +use massa_signature::PublicKey; +use massa_signature::Signature; +use massa_time::MassaTime; use parking_lot::Mutex; use rand::Rng; use sha2::{Digest, Sha256}; @@ -154,6 +162,18 @@ fn amount_to_native_amount(amount: &Amount) -> NativeAmount { NativeAmount { mantissa, scale } } +/// Helper function that creates an MassaTime from a NativeTime +fn massa_time_from_native_time(time: &NativeTime) -> Result { + let time = MassaTime::from_millis(time.milliseconds); + Ok(time) +} + +/// Helper function that creates a NativeTime from the MassaTime internal representation +fn massa_time_to_native_time(time: &MassaTime) -> NativeTime { + let milliseconds = time.to_millis(); + NativeTime { milliseconds } +} + /// Helper function to get the address from the option given as argument to some ABIs /// Fallback to the current context address if not provided. fn get_address_from_opt_or_context( @@ -304,18 +324,12 @@ impl Interface for InterfaceImpl { /// # Returns /// The raw representation (no decimal factor) of the balance of the address, /// or zero if the address is not found in the ledger. - fn get_balance_wasmv1( - &self, - address: Option, - ) -> Result { + fn get_balance_wasmv1(&self, address: Option) -> Result { let context = context_guard!(self); let address = get_address_from_opt_or_context(&context, address)?; let amount = context.get_balance(&address).unwrap_or_default(); - - let native_amount = self - .native_amount_from_str_wasmv1(&amount.to_string()) - .unwrap_or_default(); + let native_amount = amount_to_native_amount(&amount); Ok(native_amount) } @@ -912,7 +926,8 @@ impl Interface for InterfaceImpl { from_address: Option, ) -> Result<()> { let to_address = Address::from_str(&to_address)?; - let amount = Amount::from_mantissa_scale(raw_amount.mantissa, raw_amount.scale)?; + let amount = amount_from_native_amount(&raw_amount)?; + let mut context = context_guard!(self); let from_address = match from_address { Some(from_address) => Address::from_str(&from_address)?, @@ -955,10 +970,22 @@ impl Interface for InterfaceImpl { /// /// # Returns /// The raw representation (no decimal factor) of the amount of coins + /// + /// [DeprecatedByNewRuntime] Replaced by `get_call_coins_wasmv1` fn get_call_coins(&self) -> Result { Ok(context_guard!(self).get_current_call_coins()?.to_raw()) } + /// Gets the amount of coins that have been transferred at the beginning of the call. + /// See the `init_call` method. + /// + /// # Returns + /// The amount of coins + fn get_call_coins_wasmv1(&self) -> Result { + let amount = context_guard!(self).get_current_call_coins()?; + Ok(amount_to_native_amount(&amount)) + } + /// Emits an execution event to be stored. /// /// # Arguments: @@ -1180,6 +1207,7 @@ impl Interface for InterfaceImpl { Ok(hash.into_bytes()) } + #[allow(unused_variables)] fn init_call_wasmv1(&self, address: &str, raw_coins: NativeAmount) -> Result> { unimplemented!("init_call") } @@ -1194,7 +1222,6 @@ impl Interface for InterfaceImpl { fn native_amount_to_string_wasmv1(&self, amount: &NativeAmount) -> Result { let amount = amount_from_native_amount(amount) .map_err(|err| anyhow!(format!("Couldn't convert native amount to Amount: {}", err)))?; - Ok(amount.to_string()) } @@ -1250,10 +1277,10 @@ impl Interface for InterfaceImpl { .checked_rem_u64(divisor) .ok_or_else(|| anyhow!(format!("Couldn't checked_rem_u64 native amount")))?; - return Ok(( + Ok(( amount_to_native_amount("ient), amount_to_native_amount(&remainder), - )); + )) } /// Divides a native amount by a divisor, return an error if the divisor is 0. @@ -1274,19 +1301,139 @@ impl Interface for InterfaceImpl { .ok_or_else(|| anyhow!(format!("Couldn't checked_rem native amount")))?; let remainder = amount_to_native_amount(&remainder); - return Ok((quotient, remainder)); - } - - fn get_call_coins_wasmv1(&self) -> Result { - unimplemented!("get_call_coins_wasmv1"); + Ok((quotient, remainder)) } + #[allow(unused_variables)] fn base58_check_to_bytes_wasmv1(&self, s: &str) -> Result> { - unimplemented!("get_call_coins_wasmv1"); + unimplemented!("base58_check_to_bytes_wasmv1"); } + #[allow(unused_variables)] fn bytes_to_base58_check_wasmv1(&self, bytes: &[u8]) -> String { - unimplemented!("get_call_coins_wasmv1"); + unimplemented!("bytes_to_base58_check_wasmv1"); + } + + fn check_address_wasmv1(&self, to_check: &String) -> Result { + Ok(Address::from_str(to_check).is_ok()) + } + + fn check_pubkey_wasmv1(&self, to_check: &String) -> Result { + Ok(PublicKey::from_str(to_check).is_ok()) + } + + fn check_signature_wasmv1(&self, to_check: &String) -> Result { + Ok(Signature::from_str(to_check).is_ok()) + } + + fn get_address_category_wasmv1(&self, to_check: &String) -> Result { + let addr = Address::from_str(to_check)?; + match addr { + Address::User(_) => Ok(AddressCategory::ScAddress), + Address::SC(_) => Ok(AddressCategory::UserAddress), + #[allow(unreachable_patterns)] + _ => Ok(AddressCategory::Unspecified), + } + } + + fn get_address_version_wasmv1(&self, address: &String) -> Result { + let address = Address::from_str(address)?; + match address { + Address::User(UserAddress::UserAddressV0(_)) => Ok(0), + Address::User(UserAddress::UserAddressV1(_)) => Ok(1), + Address::SC(SCAddress::SCAddressV0(_)) => Ok(0), + Address::SC(SCAddress::SCAddressV1(_)) => Ok(1), + #[allow(unreachable_patterns)] + _ => bail!("Unknown address version"), + } + } + + fn get_pubkey_version_wasmv1(&self, pubkey: &String) -> Result { + let pubkey = PublicKey::from_str(pubkey)?; + match pubkey { + PublicKey::PublicKeyV0(_) => Ok(0), + PublicKey::PublicKeyV1(_) => Ok(1), + #[allow(unreachable_patterns)] + _ => bail!("Unknown pubkey version"), + } + } + + fn get_signature_version_wasmv1(&self, signature: &String) -> Result { + let signature = Signature::from_str(signature)?; + match signature { + Signature::SignatureV0(_) => Ok(0), + Signature::SignatureV1(_) => Ok(1), + #[allow(unreachable_patterns)] + _ => bail!("Unknown signature version"), + } + } + + fn checked_add_native_time_wasmv1( + &self, + time1: &NativeTime, + time2: &NativeTime, + ) -> Result { + let time1 = massa_time_from_native_time(time1)?; + let time2 = massa_time_from_native_time(time2)?; + let sum = time1.checked_add(time2)?; + Ok(massa_time_to_native_time(&sum)) + } + + fn checked_sub_native_time_wasmv1( + &self, + time1: &NativeTime, + time2: &NativeTime, + ) -> Result { + let time1 = massa_time_from_native_time(time1)?; + let time2 = massa_time_from_native_time(time2)?; + let sub = time1.checked_sub(time2)?; + Ok(massa_time_to_native_time(&sub)) + } + + fn checked_mul_native_time_wasmv1(&self, time: &NativeTime, factor: u64) -> Result { + let time1 = massa_time_from_native_time(time)?; + let mul = time1.checked_mul(factor)?; + Ok(massa_time_to_native_time(&mul)) + } + + fn checked_scalar_div_native_time_wasmv1( + &self, + dividend: &NativeTime, + divisor: u64, + ) -> Result<(NativeTime, NativeTime)> { + let dividend = massa_time_from_native_time(dividend)?; + + let quotient = dividend + .checked_div_u64(divisor) + .or_else(|_| bail!(format!("Couldn't div_rem native time")))?; + let remainder = dividend + .checked_rem_u64(divisor) + .or_else(|_| bail!(format!("Couldn't checked_rem_u64 native time")))?; + + Ok(( + massa_time_to_native_time("ient), + massa_time_to_native_time(&remainder), + )) + } + + fn checked_div_native_time_wasmv1( + &self, + dividend: &NativeTime, + divisor: &NativeTime, + ) -> Result<(u64, NativeTime)> { + let dividend = massa_time_from_native_time(dividend)?; + let divisor = massa_time_from_native_time(divisor)?; + + let quotient = dividend + .checked_div_time(divisor) + .or_else(|_| bail!(format!("Couldn't div_rem native time")))?; + + let remainder = dividend + .checked_rem_time(divisor) + .or_else(|_| bail!(format!("Couldn't checked_rem native time")))?; + let remainder = massa_time_to_native_time(&remainder); + + Ok((quotient, remainder)) } } From 60bdee07c2040f7a9a44ad5ba42eb66505eb12b5 Mon Sep 17 00:00:00 2001 From: JF Date: Thu, 6 Jul 2023 13:33:40 +0200 Subject: [PATCH 21/71] Implement bs58 to/from bytes (#4223) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Jean-François Morcillo --- Cargo.lock | 39 ++- massa-execution-worker/Cargo.toml | 1 + massa-execution-worker/src/interface_impl.rs | 285 ++++++++++++++++++- 3 files changed, 303 insertions(+), 22 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4cc3c72ca47..10588effc52 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -183,9 +183,9 @@ dependencies = [ [[package]] name = "async-trait" -version = "0.1.70" +version = "0.1.71" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79fa67157abdfd688a259b6648808757db9347af834624f27ec646da976aee5d" +checksum = "a564d521dd56509c4c47480d00b80ee55f7e385ae48db5744c67ad50c92d2ebf" dependencies = [ "proc-macro2 1.0.63", "quote 1.0.29", @@ -429,9 +429,9 @@ dependencies = [ [[package]] name = "bstr" -version = "1.5.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a246e68bb43f6cd9db24bea052a53e40405417c5fb372e3d1a8a7f770a564ef5" +checksum = "6798148dccfbff0fae41c7574d2fa8f1ef3492fba0face179de5d8d447d67b05" dependencies = [ "memchr", "serde", @@ -2511,7 +2511,7 @@ dependencies = [ [[package]] name = "massa-proto-rs" version = "0.1.0" -source = "git+https://github.com/massalabs/massa-proto-rs?branch=feature/Improve_ABI_types_in_wasmv1#988019fe50dd093f4793ec7b43a1d5e1b1952ae4" +source = "git+https://github.com/massalabs/massa-proto-rs?branch=feature/Improve_ABI_types_in_wasmv1#941e20707e6a9c8ef07e3d3f6f2945342aef27b3" dependencies = [ "glob", "prost", @@ -2524,7 +2524,7 @@ dependencies = [ [[package]] name = "massa-sc-runtime" version = "0.10.0" -source = "git+https://github.com/massalabs/massa-sc-runtime?branch=feature/Improve_ABI_types_in_wasmv1#8ad3296d211976b912f909dca8372fd5ef074638" +source = "git+https://github.com/massalabs/massa-sc-runtime?branch=feature/Improve_ABI_types_in_wasmv1#8312366a4e4614de7e664c00764b1afc6238dfb0" dependencies = [ "anyhow", "as-ffi-bindings", @@ -2821,6 +2821,7 @@ name = "massa_execution_worker" version = "0.24.0" dependencies = [ "anyhow", + "bs58", "criterion", "hex-literal", "libsecp256k1", @@ -4583,13 +4584,13 @@ dependencies = [ [[package]] name = "rustls" -version = "0.21.2" +version = "0.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e32ca28af694bc1bbf399c33a516dbdf1c90090b8ab23c2bc24f834aa2247f5f" +checksum = "b19faa85ecb5197342b54f987b142fb3e30d0c90da40f80ef4fa9a726e6676ed" dependencies = [ "log", "ring", - "rustls-webpki", + "rustls-webpki 0.101.1", "sct", ] @@ -4624,6 +4625,16 @@ dependencies = [ "untrusted", ] +[[package]] +name = "rustls-webpki" +version = "0.101.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15f36a6828982f422756984e47912a7a51dcbc2a197aa791158f8ca61cd8204e" +dependencies = [ + "ring", + "untrusted", +] + [[package]] name = "rustversion" version = "1.0.13" @@ -4987,9 +4998,9 @@ checksum = "826167069c09b99d56f31e9ae5c99049e932a98c9dc2dac47645b08dbbf76ba7" [[package]] name = "smallvec" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" +checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9" [[package]] name = "socket2" @@ -5395,9 +5406,9 @@ checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" [[package]] name = "toml_edit" -version = "0.19.11" +version = "0.19.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "266f016b7f039eec8a1a80dfe6156b633d208b9fccca5e4db1d6775b0c4e34a7" +checksum = "c500344a19072298cd05a7224b3c0c629348b78692bf48466c5238656e315a78" dependencies = [ "indexmap 2.0.0", "toml_datetime", @@ -6096,7 +6107,7 @@ version = "0.23.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b03058f88386e5ff5310d9111d53f48b17d732b401aeb83a8d5190f2ac459338" dependencies = [ - "rustls-webpki", + "rustls-webpki 0.100.1", ] [[package]] diff --git a/massa-execution-worker/Cargo.toml b/massa-execution-worker/Cargo.toml index 2176dd2d60b..35b04479a79 100644 --- a/massa-execution-worker/Cargo.toml +++ b/massa-execution-worker/Cargo.toml @@ -8,6 +8,7 @@ edition = "2021" [dependencies] anyhow = "1.0" +bs58 = { version = "=0.4", features = ["check"] } rand = "0.8" rand_xoshiro = "0.6" parking_lot = { version = "0.12", features = ["deadlock_detection"] } diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index 3fde30c22ea..517fe03ee1b 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -19,9 +19,9 @@ use massa_models::{ slot::Slot, timeslots::get_block_slot_timestamp, }; -use massa_proto_rs::massa::model::v1::AddressCategory; -use massa_proto_rs::massa::model::v1::NativeAmount; -use massa_proto_rs::massa::model::v1::NativeTime; +use massa_proto_rs::massa::model::v1::{ + AddressCategory, ComparisonResult, NativeAmount, NativeTime, +}; use massa_sc_runtime::RuntimeModule; use massa_sc_runtime::{Interface, InterfaceClone}; @@ -1304,14 +1304,15 @@ impl Interface for InterfaceImpl { Ok((quotient, remainder)) } - #[allow(unused_variables)] fn base58_check_to_bytes_wasmv1(&self, s: &str) -> Result> { - unimplemented!("base58_check_to_bytes_wasmv1"); + bs58::decode(s) + .with_check(None) + .into_vec() + .map_err(|err| anyhow!(format!("bs58 parsing error: {}", err))) } - #[allow(unused_variables)] - fn bytes_to_base58_check_wasmv1(&self, bytes: &[u8]) -> String { - unimplemented!("bytes_to_base58_check_wasmv1"); + fn bytes_to_base58_check_wasmv1(&self, data: &[u8]) -> String { + bs58::encode(data).with_check().into_string() } fn check_address_wasmv1(&self, to_check: &String) -> Result { @@ -1435,6 +1436,66 @@ impl Interface for InterfaceImpl { Ok((quotient, remainder)) } + + fn compare_address_wasmv1(&self, left: &str, right: &str) -> Result { + let left = Address::from_str(left)?; + let right = Address::from_str(right)?; + + let res = match left.cmp(&right) { + std::cmp::Ordering::Less => ComparisonResult::Lower, + std::cmp::Ordering::Equal => ComparisonResult::Equal, + std::cmp::Ordering::Greater => ComparisonResult::Greater, + }; + + Ok(res) + } + + fn compare_native_amount_wasmv1( + &self, + left: &NativeAmount, + right: &NativeAmount, + ) -> Result { + let left = amount_from_native_amount(left)?; + let right = amount_from_native_amount(right)?; + + let res = match left.cmp(&right) { + std::cmp::Ordering::Less => ComparisonResult::Lower, + std::cmp::Ordering::Equal => ComparisonResult::Equal, + std::cmp::Ordering::Greater => ComparisonResult::Greater, + }; + + Ok(res) + } + + fn compare_native_time_wasmv1( + &self, + left: &NativeTime, + right: &NativeTime, + ) -> Result { + let left = massa_time_from_native_time(left)?; + let right = massa_time_from_native_time(right)?; + + let res = match left.cmp(&right) { + std::cmp::Ordering::Less => ComparisonResult::Lower, + std::cmp::Ordering::Equal => ComparisonResult::Equal, + std::cmp::Ordering::Greater => ComparisonResult::Greater, + }; + + Ok(res) + } + + fn compare_pub_key_wasmv1(&self, left: &str, right: &str) -> Result { + let left = PublicKey::from_str(left)?; + let right = PublicKey::from_str(right)?; + + let res = match left.cmp(&right) { + std::cmp::Ordering::Less => ComparisonResult::Lower, + std::cmp::Ordering::Equal => ComparisonResult::Equal, + std::cmp::Ordering::Greater => ComparisonResult::Greater, + }; + + Ok(res) + } } #[cfg(test)] @@ -1554,6 +1615,214 @@ mod tests { .unwrap(); assert_eq!(verif_dif, amount1); } + + #[test] + fn test_base58_check_to_form() { + let sender_addr = Address::from_public_key(&KeyPair::generate(0).unwrap().get_public_key()); + let interface = InterfaceImpl::new_default(sender_addr, None); + + let data = "helloworld"; + let encoded = interface.bytes_to_base58_check_wasmv1(data.as_bytes()); + let decoded = interface.base58_check_to_bytes_wasmv1(&encoded).unwrap(); + + assert_eq!(data.as_bytes(), decoded); + } + #[test] + fn test_comparison_function() { + let sender_addr = Address::from_public_key(&KeyPair::generate(0).unwrap().get_public_key()); + let interface = InterfaceImpl::new_default(sender_addr, None); + + // address + let addr1 = + Address::from_public_key(&KeyPair::generate(0).unwrap().get_public_key()).to_string(); + let addr2 = + Address::from_public_key(&KeyPair::generate(0).unwrap().get_public_key()).to_string(); + + let cmp_res = interface.compare_address_wasmv1(&addr1, &addr1).unwrap(); + println!("compare_address_wasmv1(: {}", cmp_res.as_str_name()); + assert_eq!( + cmp_res, + ComparisonResult::Equal, + " > Error: compare_address_wasmv1((addr1, addr1) should return EQUAL" + ); + + let cmp_res1 = interface.compare_address_wasmv1(&addr1, &addr2).unwrap(); + println!( + "compare_address_wasmv1((addr1, addr2): {}", + cmp_res1.as_str_name() + ); + + let cmp_res2 = interface.compare_address_wasmv1(&addr2, &addr1).unwrap(); + println!( + "compare_address_wasmv1((addr2, addr1): {}", + cmp_res2.as_str_name() + ); + + if cmp_res1 == ComparisonResult::Lower { + assert_eq!( + cmp_res2, + ComparisonResult::Greater, + " > Error: compare_address_wasmv1((addr2, addr1) should return GREATER" + ); + } else if cmp_res1 == ComparisonResult::Greater { + assert_eq!( + cmp_res2, + ComparisonResult::Lower, + " > Error: compare_address_wasmv1((addr2, addr1) should return LOWER" + ); + } else { + assert_eq!( + cmp_res1, cmp_res2, + " > Error: compare_address_wasmv1((addr2, addr1) should return EQUAL" + ); + } + + //amount + let amount1 = interface.native_amount_from_str_wasmv1("1").unwrap(); + let amount2 = interface.native_amount_from_str_wasmv1("2").unwrap(); + println!("do some compare with amount1 = 1, amount2 = 2"); + + let cmp_res = interface + .compare_native_amount_wasmv1(&amount1, &amount1) + .unwrap(); + println!( + "compare_native_amount_wasmv1(amount1, amount1): {}", + cmp_res.as_str_name() + ); + assert_eq!( + cmp_res, + ComparisonResult::Equal, + " > Error: compare_native_amount_wasmv1(amount1, amount1) should return EQUAL" + ); + + let cmp_res = interface + .compare_native_amount_wasmv1(&amount1, &amount2) + .unwrap(); + println!( + "compare_native_amount_wasmv1(amount1, amount2): {}", + cmp_res.as_str_name() + ); + assert_eq!( + cmp_res, + ComparisonResult::Lower, + " > Error: compare_native_amount_wasmv1(amount1, amount2) should return LOWER" + ); + + let cmp_res = interface + .compare_native_amount_wasmv1(&amount2, &amount1) + .unwrap(); + println!( + "compare_native_amount_wasmv1(amount2, amount1): {}", + cmp_res.as_str_name() + ); + assert_eq!( + cmp_res, + ComparisonResult::Greater, + " > Error: compare_native_amount_wasmv1(amount2, amount1) should return GREATER" + ); + + //time + let time1 = massa_time_to_native_time(&MassaTime::from_str("1").unwrap()); + let time2 = massa_time_to_native_time(&MassaTime::from_str("2").unwrap()); + println!( + "do some compare with time1 = {}, time2 = {}", + time1.milliseconds.to_string(), + time2.milliseconds.to_string() + ); + + let cmp_res = interface + .compare_native_time_wasmv1(&time1, &time1) + .unwrap(); + println!( + "compare_native_time_wasmv1(time1, time1): {}", + cmp_res.as_str_name() + ); + assert_eq!( + cmp_res, + ComparisonResult::Equal, + " > Error:compare_native_time_wasmv1(time1, time1) should return EQUAL" + ); + + let cmp_res = interface + .compare_native_time_wasmv1(&time1, &time2) + .unwrap(); + println!( + "compare_native_time_wasmv1(time1, time2): {}", + cmp_res.as_str_name() + ); + assert_eq!( + cmp_res, + ComparisonResult::Lower, + " > Error: compare_native_time_wasmv1(time1, time2) should return LOWER" + ); + + let cmp_res = interface + .compare_native_time_wasmv1(&time2, &time1) + .unwrap(); + println!( + "compare_native_time_wasmv1(time2, time1): {}", + cmp_res.as_str_name() + ); + assert_eq!( + cmp_res, + ComparisonResult::Greater, + " > Error: compare_native_time_wasmv1(time2, time1) should return GREATER" + ); + + //pub_key + let pub_key1 = KeyPair::generate(0).unwrap().get_public_key().to_string(); + let pub_key2 = KeyPair::generate(0).unwrap().get_public_key().to_string(); + + println!( + "do some compare with pub_key1 = {}, pub_key2 = {}", + pub_key1, pub_key2 + ); + + let cmp_res = interface + .compare_pub_key_wasmv1(&pub_key1, &pub_key1) + .unwrap(); + println!( + "compare_pub_key_wasmv1(pub_key1, pub_key1): {}", + cmp_res.as_str_name() + ); + assert_eq!( + cmp_res, + ComparisonResult::Equal, + " > Error: compare_pub_key_wasmv1(pub_key1, pub_key1) should return EQUAL" + ); + let cmp_res1 = interface + .compare_pub_key_wasmv1(&pub_key1, &pub_key2) + .unwrap(); + println!( + "compare_pub_key_wasmv1(pub_key1, pub_key2): {}", + cmp_res1.as_str_name() + ); + let cmp_res2 = interface + .compare_pub_key_wasmv1(&pub_key2, &pub_key1) + .unwrap(); + println!( + "compare_pub_key_wasmv1(pub_key2, pub_key1): {}", + cmp_res2.as_str_name() + ); + if cmp_res1 == ComparisonResult::Lower { + assert_eq!( + cmp_res2, + ComparisonResult::Greater, + " > Error: compare_pub_key_wasmv1((pub_key2, pub_key1) should return GREATER" + ); + } else if cmp_res1 == ComparisonResult::Greater { + assert_eq!( + cmp_res2, + ComparisonResult::Lower, + " > Error: compare_pub_key_wasmv1((pub_key2, pub_key1) should return LOWER" + ); + } else { + assert_eq!( + cmp_res1, cmp_res2, + " > Error: compare_pub_key_wasmv1((pub_key2, pub_key1) should return EQUAL" + ); + } + } } #[test] From d723f8416861faaa85a5fe4ea93c740651aea7bb Mon Sep 17 00:00:00 2001 From: Thomas Plisson Date: Thu, 6 Jul 2023 18:42:38 +0200 Subject: [PATCH 22/71] get_origin_operation_id interface impl & abi cost --- Cargo.lock | 8 ++++---- massa-execution-worker/src/interface_impl.rs | 8 ++++++++ massa-node/base_config/gas_costs/abi_gas_costs.json | 1 + 3 files changed, 13 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 10588effc52..f2adf8353db 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2511,7 +2511,7 @@ dependencies = [ [[package]] name = "massa-proto-rs" version = "0.1.0" -source = "git+https://github.com/massalabs/massa-proto-rs?branch=feature/Improve_ABI_types_in_wasmv1#941e20707e6a9c8ef07e3d3f6f2945342aef27b3" +source = "git+https://github.com/massalabs/massa-proto-rs?branch=feature/Improve_ABI_types_in_wasmv1#99b34bb3baac949cc208c1dd788a61e1d19076b8" dependencies = [ "glob", "prost", @@ -2524,7 +2524,7 @@ dependencies = [ [[package]] name = "massa-sc-runtime" version = "0.10.0" -source = "git+https://github.com/massalabs/massa-sc-runtime?branch=feature/Improve_ABI_types_in_wasmv1#8312366a4e4614de7e664c00764b1afc6238dfb0" +source = "git+https://github.com/massalabs/massa-sc-runtime?branch=feature/Improve_ABI_types_in_wasmv1#938f84a0bc874a3cc2532ec3cff4da48b6083895" dependencies = [ "anyhow", "as-ffi-bindings", @@ -6338,9 +6338,9 @@ checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" [[package]] name = "winnow" -version = "0.4.7" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca0ace3845f0d96209f0375e6d367e3eb87eb65d27d445bdc9f1843a26f39448" +checksum = "a9482fe6ceabdf32f3966bfdd350ba69256a97c30253dc616fe0005af24f164e" dependencies = [ "memchr", ] diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index 517fe03ee1b..627e2a38431 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -1122,6 +1122,14 @@ impl Interface for InterfaceImpl { Ok(()) } + // Returns the operation id that originated the current execution if there is one + fn get_origin_operation_id(&self) -> Result> { + let operation_id = context_guard!(self) + .origin_operation_id + .map(|op_id| op_id.to_string()); + Ok(operation_id) + } + /// Returns the period of the current execution slot /// /// [DeprecatedByNewRuntime] Replaced by `get_current_slot` diff --git a/massa-node/base_config/gas_costs/abi_gas_costs.json b/massa-node/base_config/gas_costs/abi_gas_costs.json index a96e9b6619e..f4ad518c142 100644 --- a/massa-node/base_config/gas_costs/abi_gas_costs.json +++ b/massa-node/base_config/gas_costs/abi_gas_costs.json @@ -39,6 +39,7 @@ "assembly_script_print": 171, "assembly_script_seed": 72, "assembly_script_send_message": 286, + "assembly_script_get_origin_operation_id": 157, "assembly_script_set_bytecode": 187, "assembly_script_set_bytecode_for": 227, "assembly_script_set_data": 188, From d4fc02be3fa5dd8193c31873db942dec9f3a10b4 Mon Sep 17 00:00:00 2001 From: Leo-Besancon Date: Fri, 7 Jul 2023 15:00:44 +0200 Subject: [PATCH 23/71] Implement unsafe_random_wasmv1 in interface (#4231) * Implement unsafe_random_wasmv1 * Added doc comments and deprecation notes --- massa-execution-worker/src/interface_impl.rs | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index 627e2a38431..03c627bd0a1 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -30,6 +30,7 @@ use massa_signature::Signature; use massa_time::MassaTime; use parking_lot::Mutex; use rand::Rng; +use rand::RngCore; use sha2::{Digest, Sha256}; use std::collections::BTreeSet; use std::str::FromStr; @@ -1031,6 +1032,8 @@ impl Interface for InterfaceImpl { /// # Warning /// This random number generator is unsafe: /// it can be both predicted and manipulated before the execution + /// + /// [DeprecatedByNewRuntime] Replaced by `unsafe_random_wasmv1` fn unsafe_random(&self) -> Result { let distr = rand::distributions::Uniform::new_inclusive(i64::MIN, i64::MAX); Ok(context_guard!(self).unsafe_rng.sample(distr)) @@ -1041,11 +1044,24 @@ impl Interface for InterfaceImpl { /// # Warning /// This random number generator is unsafe: /// it can be both predicted and manipulated before the execution + /// + /// [DeprecatedByNewRuntime] Replaced by `unsafe_random_wasmv1` fn unsafe_random_f64(&self) -> Result { let distr = rand::distributions::Uniform::new(0f64, 1f64); Ok(context_guard!(self).unsafe_rng.sample(distr)) } + /// Returns a pseudo-random deterministic byte array, with the given number of bytes + /// + /// # Warning + /// This random number generator is unsafe: + /// it can be both predicted and manipulated before the execution + fn unsafe_random_wasmv1(&self, num_bytes: u64) -> Result> { + let mut arr = vec![0u8; num_bytes as usize]; + context_guard!(self).unsafe_rng.try_fill_bytes(&mut arr)?; + Ok(arr) + } + /// Adds an asynchronous message to the context speculative asynchronous pool /// /// # Arguments From a4d0e2714a2a821d4c1779ccf117112e03714c73 Mon Sep 17 00:00:00 2001 From: Leo-Besancon Date: Fri, 7 Jul 2023 16:24:53 +0200 Subject: [PATCH 24/71] Fix after merge --- Cargo.lock | 26 ++++++++++---------- massa-execution-worker/src/interface_impl.rs | 6 ++--- 2 files changed, 16 insertions(+), 16 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 38f3bf522bc..11a63352f7e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2525,7 +2525,7 @@ dependencies = [ [[package]] name = "massa-sc-runtime" version = "0.10.0" -source = "git+https://github.com/massalabs/massa-sc-runtime?branch=feature/Improve_ABI_types_in_wasmv1#938f84a0bc874a3cc2532ec3cff4da48b6083895" +source = "git+https://github.com/massalabs/massa-sc-runtime?branch=feature/Improve_ABI_types_in_wasmv1#46857ecd9aac5d20c9cb4b560229a7225b3ccb89" dependencies = [ "anyhow", "as-ffi-bindings", @@ -2665,7 +2665,7 @@ dependencies = [ "mockall", "nom", "num", - "num_enum", + "num_enum 0.5.11", "parking_lot", "rand 0.8.5", "serde", @@ -3199,7 +3199,7 @@ dependencies = [ "massa_versioning", "nom", "num", - "num_enum", + "num_enum 0.5.11", "parking_lot", "peernet", "rand 0.8.5", @@ -4767,9 +4767,9 @@ checksum = "f638d531eccd6e23b980caf34876660d38e265409d8e99b397ab71eb3612fad0" [[package]] name = "serde" -version = "1.0.166" +version = "1.0.167" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d01b7404f9d441d3ad40e6a636a7782c377d2abdbe4fa2440e2edcc2f4f10db8" +checksum = "7daf513456463b42aa1d94cff7e0c24d682b429f020b9afa4f5ba5c40a22b237" dependencies = [ "serde_derive", ] @@ -4787,9 +4787,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.166" +version = "1.0.167" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dd83d6dde2b6b2d466e14d9d1acce8816dedee94f735eac6395808b3483c6d6" +checksum = "b69b106b68bc8054f0e974e70d19984040f8a5cf9215ca82626ea4853f82c4b9" dependencies = [ "proc-macro2 1.0.63", "quote 1.0.29", @@ -5218,18 +5218,18 @@ checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" [[package]] name = "thiserror" -version = "1.0.41" +version = "1.0.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c16a64ba9387ef3fdae4f9c1a7f07a0997fce91985c0336f1ddc1822b3b37802" +checksum = "a35fc5b8971143ca348fa6df4f024d4d55264f3468c71ad1c2f365b0a4d58c42" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.41" +version = "1.0.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d14928354b01c4d6a4f0e549069adef399a284e7995c7ccca94e8a07a5346c59" +checksum = "463fe12d7993d3b327787537ce8dd4dfa058de32fc2b195ef3cde03dc4771e8f" dependencies = [ "proc-macro2 1.0.63", "quote 1.0.29", @@ -5648,9 +5648,9 @@ checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" [[package]] name = "ucd-trie" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e79c4d996edb816c91e4308506774452e55e95c3c9de07b6729e17e15a5ef81" +checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" [[package]] name = "unicode-bidi" diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index d99fd03588c..4e0ec925ce3 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -877,7 +877,7 @@ impl Interface for InterfaceImpl { fn hash_keccak256(&self, bytes: &[u8]) -> Result<[u8; 32]> { Ok(sha3::Keccak256::digest(bytes).into()) } - + /// Transfer coins from the current address (top of the call stack) towards a target address. /// /// # Arguments @@ -1034,7 +1034,7 @@ impl Interface for InterfaceImpl { /// # Warning /// This random number generator is unsafe: /// it can be both predicted and manipulated before the execution - /// + /// /// [DeprecatedByNewRuntime] Replaced by `unsafe_random_wasmv1` fn unsafe_random(&self) -> Result { let distr = rand::distributions::Uniform::new_inclusive(i64::MIN, i64::MAX); @@ -1046,7 +1046,7 @@ impl Interface for InterfaceImpl { /// # Warning /// This random number generator is unsafe: /// it can be both predicted and manipulated before the execution - /// + /// /// [DeprecatedByNewRuntime] Replaced by `unsafe_random_wasmv1` fn unsafe_random_f64(&self) -> Result { let distr = rand::distributions::Uniform::new(0f64, 1f64); From 29868c6c7bca870db72b073dfbec21a57918152e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20Morcillo?= Date: Mon, 10 Jul 2023 15:18:29 +0200 Subject: [PATCH 25/71] Sync with massa-sc-runtime#283 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Jean-François Morcillo --- massa-execution-worker/src/interface_impl.rs | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index 33633305e84..031fef1863f 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -445,7 +445,7 @@ impl Interface for InterfaceImpl { /// /// # Returns /// The datastore value matching the provided key, if found, otherwise an error. - fn raw_get_data_wasmv1(&self, key: &[u8], address: Option) -> Result> { + fn get_ds_value_wasmv1(&self, key: &[u8], address: Option) -> Result> { let context = context_guard!(self); let address = get_address_from_opt_or_context(&context, address)?; @@ -489,7 +489,7 @@ impl Interface for InterfaceImpl { Ok(()) } - fn raw_set_data_wasmv1(&self, key: &[u8], value: &[u8], address: Option) -> Result<()> { + fn set_ds_value_wasmv1(&self, key: &[u8], value: &[u8], address: Option) -> Result<()> { let mut context = context_guard!(self); let address = get_address_from_opt_or_context(&context, address)?; @@ -534,7 +534,7 @@ impl Interface for InterfaceImpl { /// * address: string representation of the address /// * key: string key of the datastore entry /// * value: value to append - fn raw_append_data_wasmv1( + fn append_ds_value_wasmv1( &self, key: &[u8], value: &[u8], @@ -581,7 +581,7 @@ impl Interface for InterfaceImpl { /// # Arguments /// * address: string representation of the address /// * key: string key of the datastore entry to delete - fn raw_delete_data_wasmv1(&self, key: &[u8], address: Option) -> Result<()> { + fn delete_ds_entry_wasmv1(&self, key: &[u8], address: Option) -> Result<()> { let mut context = context_guard!(self); let address = get_address_from_opt_or_context(&context, address)?; @@ -628,7 +628,7 @@ impl Interface for InterfaceImpl { /// /// # Returns /// true if the address exists and has the entry matching the provided key in its datastore, otherwise false - fn has_data_wasmv1(&self, key: &[u8], address: Option) -> Result { + fn ds_entry_exists_wasmv1(&self, key: &[u8], address: Option) -> Result { let context = context_guard!(self); let address = get_address_from_opt_or_context(&context, address)?; @@ -1541,13 +1541,13 @@ mod tests { let interface = InterfaceImpl::new_default(sender_addr, None); interface - .raw_set_data_wasmv1(b"k1", b"v1", Some(sender_addr.to_string())) + .set_ds_value_wasmv1(b"k1", b"v1", Some(sender_addr.to_string())) .unwrap(); interface - .raw_set_data_wasmv1(b"k2", b"v2", Some(sender_addr.to_string())) + .set_ds_value_wasmv1(b"k2", b"v2", Some(sender_addr.to_string())) .unwrap(); interface - .raw_set_data_wasmv1(b"l3", b"v3", Some(sender_addr.to_string())) + .set_ds_value_wasmv1(b"l3", b"v3", Some(sender_addr.to_string())) .unwrap(); let keys = interface.get_keys_wasmv1(b"k", None).unwrap(); From c3567fac298023766e4d58adad35ab117ee9bf59 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20Morcillo?= Date: Mon, 10 Jul 2023 16:42:24 +0200 Subject: [PATCH 26/71] Rename blake3_hash to hash_blake3 and use blake3 directly (not via massa::hash) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Jean-François Morcillo --- massa-execution-worker/Cargo.toml | 1 + massa-execution-worker/src/interface_impl.rs | 5 ++--- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/massa-execution-worker/Cargo.toml b/massa-execution-worker/Cargo.toml index 7a4f280d16c..285c1a22fdd 100644 --- a/massa-execution-worker/Cargo.toml +++ b/massa-execution-worker/Cargo.toml @@ -8,6 +8,7 @@ edition = "2021" [dependencies] anyhow = "1.0" +blake3 = "=1.3" bs58 = { version = "=0.4", features = ["check"] } rand = "0.8" rand_xoshiro = "0.6" diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index 031fef1863f..44005ab7ae2 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -1232,9 +1232,8 @@ impl Interface for InterfaceImpl { /// /// # Returns /// The byte array of the resulting hash - fn blake3_hash(&self, bytes: &[u8]) -> Result<[u8; 32]> { - let hash = massa_hash::Hash::compute_from(bytes); - Ok(hash.into_bytes()) + fn hash_blake3(&self, bytes: &[u8]) -> Result<[u8; 32]> { + Ok(blake3::hash(bytes).into()) } #[allow(unused_variables)] From 18cbacbefacc680526423ff95e9ab45f20c6ae90 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20Morcillo?= Date: Mon, 10 Jul 2023 17:15:26 +0200 Subject: [PATCH 27/71] Clean some name in massa-sc-runtime interface MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Jean-François Morcillo --- massa-execution-worker/src/interface_impl.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index 44005ab7ae2..5ee313728ac 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -390,7 +390,7 @@ impl Interface for InterfaceImpl { /// /// # Returns /// A list of keys (keys are byte arrays) - fn get_keys_wasmv1(&self, prefix: &[u8], address: Option) -> Result>> { + fn get_ds_keys_wasmv1(&self, prefix: &[u8], address: Option) -> Result>> { let context = context_guard!(self); let address = get_address_from_opt_or_context(&context, address)?; @@ -680,7 +680,7 @@ impl Interface for InterfaceImpl { } /// Returns bytecode of the target address, or the current address if not provided - fn raw_get_bytecode_wasmv1(&self, address: Option) -> Result> { + fn get_bytecode_wasmv1(&self, address: Option) -> Result> { let context = context_guard!(self); let address = get_address_from_opt_or_context(&context, address)?; @@ -1201,7 +1201,7 @@ impl Interface for InterfaceImpl { /// Sets the bytecode of an arbitrary address, or the current address if not provided. /// Fails if the address does not exist, is an user address, or if the context doesn't have write access rights on it. - fn raw_set_bytecode_wasmv1(&self, bytecode: &[u8], address: Option) -> Result<()> { + fn set_bytecode_wasmv1(&self, bytecode: &[u8], address: Option) -> Result<()> { let mut context = context_guard!(self); let address = get_address_from_opt_or_context(&context, address)?; @@ -1549,7 +1549,7 @@ mod tests { .set_ds_value_wasmv1(b"l3", b"v3", Some(sender_addr.to_string())) .unwrap(); - let keys = interface.get_keys_wasmv1(b"k", None).unwrap(); + let keys = interface.get_ds_keys_wasmv1(b"k", None).unwrap(); assert_eq!(keys.len(), 2); assert!(keys.contains(b"k1".as_slice())); From cbd143e00fffb39fc4a32880f1385f068aeccd55 Mon Sep 17 00:00:00 2001 From: Leo-Besancon Date: Tue, 11 Jul 2023 14:58:38 +0200 Subject: [PATCH 28/71] Implement init_call_wasmv1 --- Cargo.lock | 209 ++++++++++--------- massa-execution-worker/src/interface_impl.rs | 55 ++++- 2 files changed, 158 insertions(+), 106 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 11a63352f7e..963f8bdd9c4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -176,9 +176,9 @@ version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", - "syn 2.0.23", + "syn 2.0.25", ] [[package]] @@ -187,9 +187,9 @@ version = "0.1.71" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a564d521dd56509c4c47480d00b80ee55f7e385ae48db5744c67ad50c92d2ebf" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", - "syn 2.0.23", + "syn 2.0.25", ] [[package]] @@ -308,7 +308,7 @@ dependencies = [ "lazy_static", "lazycell", "peeking_take_while", - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", "regex", "rustc-hash", @@ -392,7 +392,7 @@ dependencies = [ "borsh-derive-internal", "borsh-schema-derive-internal", "proc-macro-crate 0.1.5", - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "syn 1.0.109", ] @@ -402,7 +402,7 @@ version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "afb438156919598d2c7bad7e1c0adf3d26ed3840dbc010db1a882a65583ca2fb" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", "syn 1.0.109", ] @@ -413,7 +413,7 @@ version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "634205cc43f74a1b9046ef87c4540ebda95696ec0f315024860cad7c5b0f5ccd" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", "syn 1.0.109", ] @@ -460,7 +460,7 @@ version = "0.6.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7ec4c6f261935ad534c0c22dbef2201b45918860eb1c574b972bd213a76af61" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", "syn 1.0.109", ] @@ -999,7 +999,7 @@ checksum = "109c1ca6e6b7f82cc233a97004ea8ed7ca123a9af07a8230878fcfda9b158bf0" dependencies = [ "fnv", "ident_case", - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", "strsim 0.10.0", "syn 1.0.109", @@ -1013,10 +1013,10 @@ checksum = "ab8bfa2e259f8ee1ce5e97824a3c55ec4404a0d772ca7fa96bf19f0752a046eb" dependencies = [ "fnv", "ident_case", - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", "strsim 0.10.0", - "syn 2.0.23", + "syn 2.0.25", ] [[package]] @@ -1038,17 +1038,17 @@ checksum = "29a358ff9f12ec09c3e61fef9b5a9902623a695a46a917b07f269bff1445611a" dependencies = [ "darling_core 0.20.1", "quote 1.0.29", - "syn 2.0.23", + "syn 2.0.25", ] [[package]] name = "dashmap" -version = "5.4.0" +version = "5.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc" +checksum = "6943ae99c34386c84a470c499d3414f66502a41340aa895406e0d2e4a207b91d" dependencies = [ "cfg-if", - "hashbrown 0.12.3", + "hashbrown 0.14.0", "lock_api", "once_cell", "parking_lot_core", @@ -1060,7 +1060,7 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", "syn 1.0.109", ] @@ -1150,9 +1150,9 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", - "syn 2.0.23", + "syn 2.0.25", ] [[package]] @@ -1177,7 +1177,7 @@ dependencies = [ "byteorder", "lazy_static", "proc-macro-error", - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", "syn 1.0.109", ] @@ -1250,7 +1250,7 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c134c37760b27a871ba422106eedbb8247da973a09e82558bf26d619c882b159" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", "syn 1.0.109", ] @@ -1262,7 +1262,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8ea75f31022cba043afe037940d73684327e915f88f62478e778c3de914cd0a" dependencies = [ "enum_delegate_lib", - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", "syn 1.0.109", ] @@ -1273,7 +1273,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2e1f6c3800b304a6be0012039e2a45a322a093539c45ab818d9e6895a39c90fe" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", "rand 0.8.5", "syn 1.0.109", @@ -1295,16 +1295,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e08b6c6ab82d70f08844964ba10c7babb716de2ecaeab9be5717918a5177d3af" dependencies = [ "darling 0.20.1", - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", - "syn 2.0.23", + "syn 2.0.25", ] [[package]] name = "equivalent" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88bffebc5d80432c9b140ee17875ff173a8ab62faad5b257da912bd2f6c1c0a1" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "erased-serde" @@ -1499,9 +1499,9 @@ version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", - "syn 2.0.23", + "syn 2.0.25", ] [[package]] @@ -2142,7 +2142,7 @@ checksum = "c6027ac0b197ce9543097d02a290f550ce1d9432bf301524b013053c0b75cc94" dependencies = [ "heck 0.4.1", "proc-macro-crate 1.3.1", - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", "syn 1.0.109", ] @@ -2512,7 +2512,7 @@ dependencies = [ [[package]] name = "massa-proto-rs" version = "0.1.0" -source = "git+https://github.com/massalabs/massa-proto-rs?branch=feature/Improve_ABI_types_in_wasmv1#99b34bb3baac949cc208c1dd788a61e1d19076b8" +source = "git+https://github.com/massalabs/massa-proto-rs?branch=feature/Improve_ABI_types_in_wasmv1#773b7a546a03a271ead1bb0e95ac3c14eb4fb466" dependencies = [ "glob", "prost", @@ -2525,7 +2525,7 @@ dependencies = [ [[package]] name = "massa-sc-runtime" version = "0.10.0" -source = "git+https://github.com/massalabs/massa-sc-runtime?branch=feature/Improve_ABI_types_in_wasmv1#46857ecd9aac5d20c9cb4b560229a7225b3ccb89" +source = "git+https://github.com/massalabs/massa-sc-runtime?branch=feature/Improve_ABI_types_in_wasmv1#f6e907da258feebdbd7a62f0574c84d306beabac" dependencies = [ "anyhow", "as-ffi-bindings", @@ -2822,6 +2822,7 @@ name = "massa_execution_worker" version = "0.24.0" dependencies = [ "anyhow", + "blake3", "bs58", "criterion", "hex-literal", @@ -3434,7 +3435,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22ce75669015c4f47b289fd4d4f56e894e4c96003ffdf3ac51313126f94c6cbb" dependencies = [ "cfg-if", - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", "syn 1.0.109", ] @@ -3631,7 +3632,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dcbff9bc912032c62bf65ef1d5aea88983b420f4f839db1e9b0c281a25c9c799" dependencies = [ "proc-macro-crate 1.3.1", - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", "syn 1.0.109", ] @@ -3643,9 +3644,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96667db765a921f7b295ffee8b60472b686a51d4f21c2ee4ffdb94c7013b65a6" dependencies = [ "proc-macro-crate 1.3.1", - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", - "syn 2.0.23", + "syn 2.0.25", ] [[package]] @@ -3786,7 +3787,7 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0f35583365be5d148e959284f42526841917b7bfa09e2d1a7ad5dde2cf0eaa39" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", "syn 1.0.109", ] @@ -3865,9 +3866,9 @@ checksum = "b3e8cba4ec22bada7fc55ffe51e2deb6a0e0db2d0b7ab0b103acc80d2510c190" dependencies = [ "pest", "pest_meta", - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", - "syn 2.0.23", + "syn 2.0.25", ] [[package]] @@ -3906,9 +3907,9 @@ version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec2e072ecce94ec471b13398d5402c188e76ac03cf74dd1a975161b23a3f6d9c" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", - "syn 2.0.23", + "syn 2.0.25", ] [[package]] @@ -4011,7 +4012,7 @@ version = "0.1.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c8646e95016a7a6c4adea95bafa8a16baab64b583356217f2c85db4a39d9a86" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "syn 1.0.109", ] @@ -4041,7 +4042,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" dependencies = [ "proc-macro-error-attr", - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", "syn 1.0.109", "version_check", @@ -4053,7 +4054,7 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", "version_check", ] @@ -4069,9 +4070,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.63" +version = "1.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b368fba921b0dce7e60f5e04ec15e565b3303972b42bcfde1d0713b881959eb" +checksum = "78803b62cbf1f46fde80d7c0e803111524b9877184cfe7c3033659490ac7a7da" dependencies = [ "unicode-ident", ] @@ -4146,7 +4147,7 @@ checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" dependencies = [ "anyhow", "itertools", - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", "syn 1.0.109", ] @@ -4181,7 +4182,7 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", "syn 1.0.109", ] @@ -4218,7 +4219,7 @@ version = "1.0.29" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "573015e8ab27661678357f27dc26460738fd2b6c86e46f386fde94cb5d913105" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", ] [[package]] @@ -4392,9 +4393,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.9.0" +version = "1.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89089e897c013b3deb627116ae56a6955a72b8bed395c9526af31c9fe528b484" +checksum = "b2eae68fc220f7cf2532e4494aded17545fce192d59cd996e0fe7887f4ceb575" dependencies = [ "aho-corasick 1.0.2", "memchr", @@ -4404,9 +4405,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.3.0" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa250384981ea14565685dea16a9ccc4d1c541a13f82b9c168572264d1df8c56" +checksum = "83d3daa6976cffb758ec878f108ba0e062a45b2d6ca3a2cca965338855476caf" dependencies = [ "aho-corasick 1.0.2", "memchr", @@ -4415,9 +4416,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ab07dc67230e4a4718e70fd5c20055a4334b121f1f9db8fe63ef39ce9b8c846" +checksum = "e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2" [[package]] name = "region" @@ -4479,7 +4480,7 @@ version = "0.7.42" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b2e06b915b5c230a17d7a736d1e2e63ee753c256a8614ef3f5147b13a4f5541d" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", "syn 1.0.109", ] @@ -4588,9 +4589,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.21.3" +version = "0.21.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b19faa85ecb5197342b54f987b142fb3e30d0c90da40f80ef4fa9a726e6676ed" +checksum = "79ea77c539259495ce8ca47f53e66ae0330a8819f67e23ac96ca02f50e7b7d36" dependencies = [ "log", "ring", @@ -4674,7 +4675,7 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "107c3d5d7f370ac09efa62a78375f94d94b8a33c61d8c278b96683fb4dbf2d8d" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", "syn 1.0.109", ] @@ -4767,9 +4768,9 @@ checksum = "f638d531eccd6e23b980caf34876660d38e265409d8e99b397ab71eb3612fad0" [[package]] name = "serde" -version = "1.0.167" +version = "1.0.171" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7daf513456463b42aa1d94cff7e0c24d682b429f020b9afa4f5ba5c40a22b237" +checksum = "30e27d1e4fd7659406c492fd6cfaf2066ba8773de45ca75e855590f856dc34a9" dependencies = [ "serde_derive", ] @@ -4787,13 +4788,13 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.167" +version = "1.0.171" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b69b106b68bc8054f0e974e70d19984040f8a5cf9215ca82626ea4853f82c4b9" +checksum = "389894603bd18c46fa56231694f8d827779c0951a667087194cf9de94ed24682" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", - "syn 2.0.23", + "syn 2.0.25", ] [[package]] @@ -4841,9 +4842,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "881b6f881b17d13214e5d494c939ebab463d01264ce1811e9d4ac3a882e7695f" dependencies = [ "darling 0.20.1", - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", - "syn 2.0.23", + "syn 2.0.25", ] [[package]] @@ -4880,7 +4881,7 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "079a83df15f85d89a68d64ae1238f142f172b1fa915d0d76b26a7cba1b659a69" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", "syn 1.0.109", ] @@ -4891,9 +4892,9 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91d129178576168c589c9ec973feedf7d3126c01ac2bf08795109aa35b69fb8f" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", - "syn 2.0.23", + "syn 2.0.25", ] [[package]] @@ -5088,7 +5089,7 @@ checksum = "dcb5ae327f9cc13b68763b5749770cb9e048a99bd9dfdfa58d0cf05d5f64afe0" dependencies = [ "heck 0.3.3", "proc-macro-error", - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", "syn 1.0.109", ] @@ -5109,7 +5110,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" dependencies = [ "heck 0.4.1", - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", "rustversion", "syn 1.0.109", @@ -5147,18 +5148,18 @@ version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", "unicode-ident", ] [[package]] name = "syn" -version = "2.0.23" +version = "2.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59fb7d6d8281a51045d62b8eb3a7d1ce347b76f312af50cd3dc0af39c87c1737" +checksum = "15e3fc8c0c74267e2df136e5e5fb656a464158aa57624053375eb9c8c6e25ae2" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", "unicode-ident", ] @@ -5231,9 +5232,9 @@ version = "1.0.43" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "463fe12d7993d3b327787537ce8dd4dfa058de32fc2b195ef3cde03dc4771e8f" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", - "syn 2.0.23", + "syn 2.0.25", ] [[package]] @@ -5259,9 +5260,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.22" +version = "0.3.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea9e1b3cf1243ae005d9e74085d4d542f3125458f3a81af210d901dcd7411efd" +checksum = "59e399c068f43a5d116fedaf73b203fa4f9c519f17e2b34f63221d3792f81446" dependencies = [ "itoa", "serde", @@ -5277,9 +5278,9 @@ checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb" [[package]] name = "time-macros" -version = "0.2.9" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "372950940a5f07bf38dbe211d7283c9e6d7327df53794992d293e534c733d09b" +checksum = "96ba15a897f3c86766b757e5ac7221554c6750054d74d5b28844fce5fb36a6c4" dependencies = [ "time-core", ] @@ -5345,9 +5346,9 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", - "syn 2.0.23", + "syn 2.0.25", ] [[package]] @@ -5452,7 +5453,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a6fdaae4c2c638bb70fe42803a26fbd6fc6ac8c72f5c59f67ecc2a2dcabf4b07" dependencies = [ "prettyplease", - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "prost-build", "quote 1.0.29", "syn 1.0.109", @@ -5574,9 +5575,9 @@ version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", - "syn 2.0.23", + "syn 2.0.25", ] [[package]] @@ -5628,7 +5629,7 @@ version = "0.1.0" source = "git+https://github.com/massalabs/transition.git?rev=93fa3bf82f9f5ff421c78536879b7fd1b948ca75#93fa3bf82f9f5ff421c78536879b7fd1b948ca75" dependencies = [ "darling 0.14.4", - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", "syn 1.0.109", "unsigned-varint", @@ -5812,9 +5813,9 @@ dependencies = [ "bumpalo", "log", "once_cell", - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", - "syn 2.0.23", + "syn 2.0.25", "wasm-bindgen-shared", ] @@ -5836,7 +5837,7 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c5020cfa87c7cecefef118055d44e3c1fc122c7ec25701d528ee458a0b45f38f" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", "syn 1.0.109", ] @@ -5869,9 +5870,9 @@ version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", - "syn 2.0.23", + "syn 2.0.25", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -5884,9 +5885,9 @@ checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" [[package]] name = "wasm-encoder" -version = "0.29.0" +version = "0.30.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18c41dbd92eaebf3612a39be316540b8377c871cb9bde6b064af962984912881" +checksum = "b2f8e9778e04cbf44f58acc301372577375a666b966c50b03ef46144f80436a8" dependencies = [ "leb128", ] @@ -5988,7 +5989,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97901fdbaae383dbb90ea162cc3a76a9fa58ac39aec7948b4c0b9bbef9307738" dependencies = [ "proc-macro-error", - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", "syn 1.0.109", ] @@ -6070,9 +6071,9 @@ dependencies = [ [[package]] name = "wast" -version = "60.0.0" +version = "61.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd06cc744b536e30387e72a48fdd492105b9c938bb4f415c39c616a7a0a697ad" +checksum = "dc6b347851b52fd500657d301155c79e8c67595501d179cef87b6f04ebd25ac4" dependencies = [ "leb128", "memchr", @@ -6082,9 +6083,9 @@ dependencies = [ [[package]] name = "wat" -version = "1.0.66" +version = "1.0.67" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5abe520f0ab205366e9ac7d3e6b2fc71de44e32a2b58f2ec871b6b575bdcea3b" +checksum = "459e764d27c3ab7beba1ebd617cc025c7e76dea6e7c5ce3189989a970aea3491" dependencies = [ "wast", ] @@ -6336,9 +6337,9 @@ checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" [[package]] name = "winnow" -version = "0.4.8" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9482fe6ceabdf32f3966bfdd350ba69256a97c30253dc616fe0005af24f164e" +checksum = "81a2094c43cc94775293eaa0e499fbc30048a6d824ac82c0351a8c0bf9112529" dependencies = [ "memchr", ] @@ -6376,9 +6377,9 @@ version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.64", "quote 1.0.29", - "syn 2.0.23", + "syn 2.0.25", ] [[package]] diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index 5ee313728ac..39395931f01 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -390,7 +390,11 @@ impl Interface for InterfaceImpl { /// /// # Returns /// A list of keys (keys are byte arrays) - fn get_ds_keys_wasmv1(&self, prefix: &[u8], address: Option) -> Result>> { + fn get_ds_keys_wasmv1( + &self, + prefix: &[u8], + address: Option, + ) -> Result>> { let context = context_guard!(self); let address = get_address_from_opt_or_context(&context, address)?; @@ -1238,7 +1242,54 @@ impl Interface for InterfaceImpl { #[allow(unused_variables)] fn init_call_wasmv1(&self, address: &str, raw_coins: NativeAmount) -> Result> { - unimplemented!("init_call") + // get target address + let to_address = Address::from_str(address)?; + + // check that the target address is an SC address + if !matches!(to_address, Address::SC(..)) { + bail!("called address {} is not an SC address", to_address); + } + + // write-lock context + let mut context = context_guard!(self); + + // get target bytecode + let bytecode = match context.get_bytecode(&to_address) { + Some(bytecode) => bytecode, + None => bail!("bytecode not found for address {}", to_address), + }; + + // get caller address + let from_address = match context.stack.last() { + Some(addr) => addr.address, + _ => bail!("failed to read call stack current address"), + }; + + // transfer coins from caller to target address + let coins = amount_from_native_amount(&raw_coins)?; + // note: rights are not checked here we checked that to_address is an SC address above + // and we know that the sender is at the top of the call stack + if let Err(err) = context.transfer_coins(Some(from_address), Some(to_address), coins, false) + { + bail!( + "error transferring {} coins from {} to {}: {}", + coins, + from_address, + to_address, + err + ); + } + + // push a new call stack element on top of the current call stack + context.stack.push(ExecutionStackElement { + address: to_address, + coins, + owned_addresses: vec![to_address], + operation_datastore: None, + }); + + // return the target bytecode + Ok(bytecode.0) } /// Returns a NativeAmount from a string From 232bdf7cafba93012f3bb8265a27acf0090e7dd2 Mon Sep 17 00:00:00 2001 From: Damir Vodenicarevic Date: Tue, 11 Jul 2023 17:33:57 +0200 Subject: [PATCH 29/71] incremental hash (#4197) * incremental hash * rebase * reformat * read/write to final state * factor prng * imrovements * remove comment * final_state_db corrrection * load initial trail hash * clippy * fix tests * debug tests * Fix bootstrap test * Fix execution tests * remove comment --------- Co-authored-by: Leo-Besancon --- massa-async-pool/src/message.rs | 4 +- massa-bootstrap/src/client.rs | 1 + massa-bootstrap/src/tests/scenarios.rs | 5 +- massa-bootstrap/src/tests/tools.rs | 18 ++- massa-db-exports/src/constants.rs | 3 +- massa-execution-worker/src/active_history.rs | 11 ++ massa-execution-worker/src/context.rs | 108 ++++++++++++------ massa-execution-worker/src/execution.rs | 14 ++- massa-execution-worker/src/interface_impl.rs | 1 + massa-execution-worker/src/tests/mock.rs | 1 + .../src/tests/tests_active_history.rs | 1 + massa-final-state/src/final_state.rs | 101 +++++++++++++--- massa-final-state/src/state_changes.rs | 40 +++++-- massa-hash/src/hash.rs | 16 +++ 14 files changed, 251 insertions(+), 73 deletions(-) diff --git a/massa-async-pool/src/message.rs b/massa-async-pool/src/message.rs index 393dd8c1b0c..b2dbfca4274 100644 --- a/massa-async-pool/src/message.rs +++ b/massa-async-pool/src/message.rs @@ -321,7 +321,7 @@ impl Default for AsyncMessage { destination: genesis_address, validity_start: slot_zero, validity_end: slot_zero, - hash: Hash::from_bytes(&[0; 32]), + hash: Hash::zero(), ..Default::default() } } @@ -362,7 +362,7 @@ impl AsyncMessage { can_be_executed: can_be_executed.unwrap_or(trigger.is_none()), trigger, // placeholder hash to serialize the message, replaced below - hash: Hash::from_bytes(&[0; 32]), + hash: Hash::zero(), }; async_message_ser .serialize(&message, &mut buffer) diff --git a/massa-bootstrap/src/client.rs b/massa-bootstrap/src/client.rs index da06eb429f1..0279f09c321 100644 --- a/massa-bootstrap/src/client.rs +++ b/massa-bootstrap/src/client.rs @@ -429,6 +429,7 @@ pub fn get_state( err )) })?; + final_state_guard.init_execution_trail_hash(); } // create the initial cycle of PoS cycle_history diff --git a/massa-bootstrap/src/tests/scenarios.rs b/massa-bootstrap/src/tests/scenarios.rs index bf8430da292..1da9f9b86f2 100644 --- a/massa-bootstrap/src/tests/scenarios.rs +++ b/massa-bootstrap/src/tests/scenarios.rs @@ -6,7 +6,8 @@ use super::tools::{ use crate::listener::PollEvent; use crate::tests::tools::{ assert_eq_bootstrap_graph, get_random_async_pool_changes, get_random_executed_de_changes, - get_random_executed_ops_changes, get_random_pos_changes, + get_random_executed_ops_changes, get_random_execution_trail_hash_change, + get_random_pos_changes, }; use crate::BootstrapError; use crate::{ @@ -307,6 +308,7 @@ fn test_bootstrap_server() { async_pool_changes: get_random_async_pool_changes(10, thread_count), executed_ops_changes: get_random_executed_ops_changes(10), executed_denunciations_changes: get_random_executed_de_changes(10), + execution_trail_hash_change: get_random_execution_trail_hash_change(true), }; let next = current_slot.get_next_slot(thread_count).unwrap(); @@ -452,6 +454,7 @@ fn test_bootstrap_server() { async_pool_changes: get_random_async_pool_changes(10, thread_count), executed_ops_changes: get_random_executed_ops_changes(10), executed_denunciations_changes: get_random_executed_de_changes(10), + execution_trail_hash_change: get_random_execution_trail_hash_change(true), }; let mut batch = DBBatch::new(); diff --git a/massa-bootstrap/src/tests/tools.rs b/massa-bootstrap/src/tests/tools.rs index 79fec33a010..7eba6fcbab7 100644 --- a/massa-bootstrap/src/tests/tools.rs +++ b/massa-bootstrap/src/tests/tools.rs @@ -18,7 +18,7 @@ use massa_executed_ops::{ use massa_final_state::test_exports::create_final_state; use massa_final_state::{FinalState, FinalStateConfig}; use massa_hash::Hash; -use massa_ledger_exports::{LedgerChanges, LedgerEntry, SetUpdateOrDelete}; +use massa_ledger_exports::{LedgerChanges, LedgerEntry, SetOrKeep, SetUpdateOrDelete}; use massa_ledger_worker::test_exports::create_final_ledger; use massa_models::block::BlockDeserializerArgs; use massa_models::bytecode::Bytecode; @@ -287,6 +287,15 @@ pub fn get_random_executed_de_changes(r_limit: u64) -> ExecutedDenunciationsChan de_changes } +/// generates a random execution trail hash change +pub fn get_random_execution_trail_hash_change(always_set: bool) -> SetOrKeep { + if always_set || rand::thread_rng().gen() { + SetOrKeep::Set(Hash::compute_from(&get_some_random_bytes())) + } else { + SetOrKeep::Keep + } +} + /// generates a random bootstrap state for the final state pub fn get_random_final_state_bootstrap( pos: PoSFinalState, @@ -344,7 +353,7 @@ pub fn get_random_final_state_bootstrap( )) .unwrap(); - create_final_state( + let mut final_state = create_final_state( config, Box::new(final_ledger), async_pool, @@ -353,7 +362,10 @@ pub fn get_random_final_state_bootstrap( executed_denunciations, mip_store, db, - ) + ); + + final_state.init_execution_trail_hash(); + final_state } pub fn get_dummy_block_id(s: &str) -> BlockId { diff --git a/massa-db-exports/src/constants.rs b/massa-db-exports/src/constants.rs index 05a1d634b46..e57c2e902d6 100644 --- a/massa-db-exports/src/constants.rs +++ b/massa-db-exports/src/constants.rs @@ -1,6 +1,4 @@ // Commons -pub const LSMTREE_NODES_CF: &str = "lsmtree_nodes"; -pub const LSMTREE_VALUES_CF: &str = "lsmtree_values"; pub const METADATA_CF: &str = "metadata"; pub const STATE_CF: &str = "state"; pub const VERSIONING_CF: &str = "versioning"; @@ -31,6 +29,7 @@ pub const EXECUTED_DENUNCIATIONS_PREFIX: &str = "executed_denunciations/"; pub const LEDGER_PREFIX: &str = "ledger/"; pub const MIP_STORE_PREFIX: &str = "versioning/"; pub const MIP_STORE_STATS_PREFIX: &str = "versioning_stats/"; +pub const EXECUTION_TRAIL_HASH_PREFIX: &str = "execution_trail_hash/"; // Async Pool pub const MESSAGE_DESER_ERROR: &str = "critical: message deserialization failed"; diff --git a/massa-execution-worker/src/active_history.rs b/massa-execution-worker/src/active_history.rs index 945687c33fa..e24a40c92ba 100644 --- a/massa-execution-worker/src/active_history.rs +++ b/massa-execution-worker/src/active_history.rs @@ -231,6 +231,17 @@ impl ActiveHistory { None } + /// Gets the execution trail hash + pub fn get_execution_trail_hash(&self) -> HistorySearchResult { + for history_element in self.0.iter().rev() { + if let SetOrKeep::Set(hash) = history_element.state_changes.execution_trail_hash_change + { + return HistorySearchResult::Present(hash); + } + } + HistorySearchResult::NoInfo + } + /// Gets the index of a slot in history pub fn get_slot_index(&self, slot: &Slot, thread_count: u8) -> SlotIndexPosition { let first_slot = match self.0.front() { diff --git a/massa-execution-worker/src/context.rs b/massa-execution-worker/src/context.rs index cf403d1c9b2..802109b0aa7 100644 --- a/massa-execution-worker/src/context.rs +++ b/massa-execution-worker/src/context.rs @@ -7,6 +7,7 @@ //! More generally, the context acts only on its own state //! and does not write anything persistent to the consensus state. +use crate::active_history::HistorySearchResult; use crate::speculative_async_pool::SpeculativeAsyncPool; use crate::speculative_executed_denunciations::SpeculativeExecutedDenunciations; use crate::speculative_executed_ops::SpeculativeExecutedOps; @@ -21,7 +22,7 @@ use massa_execution_exports::{ }; use massa_final_state::{FinalState, StateChanges}; use massa_hash::Hash; -use massa_ledger_exports::LedgerChanges; +use massa_ledger_exports::{LedgerChanges, SetOrKeep}; use massa_models::address::ExecutionAddressCycleInfo; use massa_models::bytecode::Bytecode; use massa_models::denunciation::DenunciationIndex; @@ -155,13 +156,16 @@ pub struct ExecutionContext { /// operation id that originally caused this execution (if any) pub origin_operation_id: Option, - // cache of compiled runtime modules + /// Execution trail hash + pub execution_trail_hash: Hash, + + /// cache of compiled runtime modules pub module_cache: Arc>, - // Vesting Manager + /// Vesting Manager pub vesting_manager: Arc, - // Address factory + /// Address factory pub address_factory: AddressFactory, } @@ -183,6 +187,7 @@ impl ExecutionContext { module_cache: Arc>, vesting_manager: Arc, mip_store: MipStore, + execution_trail_hash: massa_hash::Hash, ) -> Self { ExecutionContext { speculative_ledger: SpeculativeLedger::new( @@ -219,13 +224,14 @@ impl ExecutionContext { stack: Default::default(), read_only: Default::default(), events: Default::default(), - unsafe_rng: Xoshiro256PlusPlus::from_seed([0u8; 32]), + unsafe_rng: init_prng(&execution_trail_hash), creator_address: Default::default(), origin_operation_id: Default::default(), module_cache, config, vesting_manager, address_factory: AddressFactory { mip_store }, + execution_trail_hash, } } @@ -306,20 +312,14 @@ impl ExecutionContext { vesting_manager: Arc, mip_store: MipStore, ) -> Self { - // Deterministically seed the unsafe RNG to allow the bytecode to use it. - // Note that consecutive read-only calls for the same slot will get the same random seed. - - // Add the current slot to the seed to ensure different draws at every slot - let mut seed: Vec = slot.to_bytes_key().to_vec(); - // Add a marker to the seed indicating that we are in read-only mode - // to prevent random draw collisions with active executions - seed.push(0u8); // 0u8 = read-only - let seed = massa_hash::Hash::compute_from(&seed).into_bytes(); - // We use Xoshiro256PlusPlus because it is very fast, - // has a period long enough to ensure no repetitions will ever happen, - // of decent quality (given the unsafe constraints) - // but not cryptographically secure (and that's ok because the internal state is exposed anyways) - let unsafe_rng = Xoshiro256PlusPlus::from_seed(seed); + // Get the execution hash trail + let prev_execution_trail_hash = active_history.read().get_execution_trail_hash(); + let prev_execution_trail_hash = match prev_execution_trail_hash { + HistorySearchResult::Present(h) => h, + _ => final_state.read().get_execution_trail_hash(), + }; + let execution_trail_hash = + generate_execution_trail_hash(&prev_execution_trail_hash, &slot, None, true); // return readonly context ExecutionContext { @@ -327,7 +327,6 @@ impl ExecutionContext { slot, stack: call_stack, read_only: true, - unsafe_rng, ..ExecutionContext::new( config, final_state, @@ -335,6 +334,7 @@ impl ExecutionContext { module_cache, vesting_manager, mip_store, + execution_trail_hash, ) } } @@ -380,26 +380,23 @@ impl ExecutionContext { vesting_manager: Arc, mip_store: MipStore, ) -> Self { - // Deterministically seed the unsafe RNG to allow the bytecode to use it. - - // Add the current slot to the seed to ensure different draws at every slot - let mut seed: Vec = slot.to_bytes_key().to_vec(); - // Add a marker to the seed indicating that we are in active mode - // to prevent random draw collisions with read-only executions - seed.push(1u8); // 1u8 = active - - // For more deterministic entropy, seed with the block ID if any - if let Some(block_id) = &opt_block_id { - seed.extend(block_id.to_bytes()); // append block ID - } - let seed = massa_hash::Hash::compute_from(&seed).into_bytes(); - let unsafe_rng = Xoshiro256PlusPlus::from_seed(seed); + // Get the execution hash trail + let prev_execution_trail_hash = active_history.read().get_execution_trail_hash(); + let prev_execution_trail_hash = match prev_execution_trail_hash { + HistorySearchResult::Present(h) => h, + _ => final_state.read().get_execution_trail_hash(), + }; + let execution_trail_hash = generate_execution_trail_hash( + &prev_execution_trail_hash, + &slot, + opt_block_id.as_ref(), + false, + ); // return active slot execution context ExecutionContext { slot, opt_block_id, - unsafe_rng, ..ExecutionContext::new( config, final_state, @@ -407,6 +404,7 @@ impl ExecutionContext { module_cache, vesting_manager, mip_store, + execution_trail_hash, ) } } @@ -936,6 +934,7 @@ impl ExecutionContext { pos_changes: self.speculative_roll_state.take(), executed_ops_changes: self.speculative_executed_ops.take(), executed_denunciations_changes: self.speculative_executed_denunciations.take(), + execution_trail_hash_change: SetOrKeep::Set(self.execution_trail_hash), }; std::mem::take(&mut self.opt_block_id); @@ -1074,3 +1073,42 @@ impl ExecutionContext { .get_address_deferred_credits(address, min_slot) } } + +/// Generate the execution trail hash +fn generate_execution_trail_hash( + previous_execution_trail_hash: &massa_hash::Hash, + slot: &Slot, + opt_block_id: Option<&BlockId>, + read_only: bool, +) -> massa_hash::Hash { + match opt_block_id { + None => massa_hash::Hash::compute_from_tuple(&[ + previous_execution_trail_hash.to_bytes(), + &slot.to_bytes_key(), + &[if read_only { 1u8 } else { 0u8 }, 0u8], + ]), + Some(block_id) => massa_hash::Hash::compute_from_tuple(&[ + previous_execution_trail_hash.to_bytes(), + &slot.to_bytes_key(), + &[if read_only { 1u8 } else { 0u8 }, 1u8], + block_id.to_bytes(), + ]), + } +} + +/// Initializes and seeds the PRNG with the given execution trail hash. +fn init_prng(execution_trail_hash: &massa_hash::Hash) -> Xoshiro256PlusPlus { + // Deterministically seed the unsafe RNG to allow the bytecode to use it. + // Note that consecutive read-only calls for the same slot will get the same random seed. + let seed = massa_hash::Hash::compute_from_tuple(&[ + "PRNG_SEED".as_bytes(), + execution_trail_hash.to_bytes(), + ]) + .into_bytes(); + + // We use Xoshiro256PlusPlus because it is very fast, + // has a period long enough to ensure no repetitions will ever happen, + // of decent quality (given the unsafe constraints) + // but not cryptographically secure (and that's ok because the internal state is exposed anyways) + Xoshiro256PlusPlus::from_seed(seed) +} diff --git a/massa-execution-worker/src/execution.rs b/massa-execution-worker/src/execution.rs index 55693205a6f..cf7392424c0 100644 --- a/massa-execution-worker/src/execution.rs +++ b/massa-execution-worker/src/execution.rs @@ -118,12 +118,13 @@ impl ExecutionState { ) -> ExecutionState { // Get the slot at the output of which the final state is attached. // This should be among the latest final slots. - let last_final_slot = final_state - .read() - .db - .read() - .get_change_id() - .expect("Critical error: Final state has no slot attached"); + let last_final_slot; + let execution_trail_hash; + { + let final_state_read = final_state.read(); + last_final_slot = final_state_read.get_slot(); + execution_trail_hash = final_state_read.get_execution_trail_hash(); + } // Create default active history let active_history: Arc> = Default::default(); @@ -160,6 +161,7 @@ impl ExecutionState { module_cache.clone(), vesting_manager.clone(), mip_store.clone(), + execution_trail_hash, ))); // Instantiate the interface providing ABI access to the VM, share the execution context with it diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index b53f4a00fba..cecaf174ed3 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -119,6 +119,7 @@ impl InterfaceImpl { module_cache, vesting_manager, mip_store, + massa_hash::Hash::zero(), ); execution_context.stack = vec![ExecutionStackElement { address: sender_addr, diff --git a/massa-execution-worker/src/tests/mock.rs b/massa-execution-worker/src/tests/mock.rs index a99a6be5d34..e4ac566ee96 100644 --- a/massa-execution-worker/src/tests/mock.rs +++ b/massa-execution-worker/src/tests/mock.rs @@ -144,6 +144,7 @@ pub fn get_sample_state( ) .unwrap() }; + final_state.init_execution_trail_hash(); let mut batch: BTreeMap, Option>> = DBBatch::new(); final_state.pos_state.create_initial_cycle(&mut batch); final_state diff --git a/massa-execution-worker/src/tests/tests_active_history.rs b/massa-execution-worker/src/tests/tests_active_history.rs index 7291570af24..2f383c60bcc 100644 --- a/massa-execution-worker/src/tests/tests_active_history.rs +++ b/massa-execution-worker/src/tests/tests_active_history.rs @@ -55,6 +55,7 @@ mod tests { }, executed_ops_changes: Default::default(), executed_denunciations_changes: Default::default(), + execution_trail_hash_change: Default::default(), }, events: Default::default(), }; diff --git a/massa-final-state/src/final_state.rs b/massa-final-state/src/final_state.rs index 244892893ab..48a62cf0c0f 100644 --- a/massa-final-state/src/final_state.rs +++ b/massa-final-state/src/final_state.rs @@ -8,6 +8,7 @@ use crate::{config::FinalStateConfig, error::FinalStateError, state_changes::StateChanges}; use massa_async_pool::AsyncPool; +use massa_db_exports::EXECUTION_TRAIL_HASH_PREFIX; use massa_db_exports::{ DBBatch, MassaIteratorMode, ShareableMassaDBController, ASYNC_POOL_PREFIX, CHANGE_ID_DESER_ERROR, CYCLE_HISTORY_PREFIX, DEFERRED_CREDITS_PREFIX, @@ -16,6 +17,7 @@ use massa_db_exports::{ use massa_executed_ops::ExecutedDenunciations; use massa_executed_ops::ExecutedOps; use massa_ledger_exports::LedgerController; +use massa_ledger_exports::SetOrKeep; use massa_models::slot::Slot; use massa_pos_exports::{PoSFinalState, SelectorController}; use massa_versioning::versioning::MipStore; @@ -117,6 +119,11 @@ impl FinalState { }; if reset_final_state { + // delete the execution trail hash + final_state + .db + .write() + .delete_prefix(EXECUTION_TRAIL_HASH_PREFIX, STATE_CF, None); final_state.async_pool.reset(); final_state.pos_state.reset(); final_state.executed_ops.reset(); @@ -141,6 +148,30 @@ impl FinalState { massa_hash::Hash::compute_from(internal_hash.to_bytes()) } + /// Get the slot at the end of which the final state is attached + pub fn get_slot(&self) -> Slot { + self.db + .read() + .get_change_id() + .expect("Critical error: Final state has no slot attached") + } + + /// Gets the hash of the execution trail + pub fn get_execution_trail_hash(&self) -> massa_hash::Hash { + let hash_bytes = self + .db + .read() + .get_cf(STATE_CF, EXECUTION_TRAIL_HASH_PREFIX.as_bytes().to_vec()) + .expect("could not read execution trail hash from state DB") + .expect("could not find execution trail hash in state DB"); + massa_hash::Hash::from_bytes( + hash_bytes + .as_slice() + .try_into() + .expect("invalid execution trail hash in state DB"), + ) + } + /// Initializes a `FinalState` from a snapshot. Currently, we do not use the final_state from the ledger, /// we just create a new one. This will be changed in the follow-up. /// @@ -535,6 +566,10 @@ impl FinalState { self.executed_ops.reset(); self.executed_denunciations.reset(); self.mip_store.reset_db(self.db.clone()); + // delete the execution trail hash + self.db + .write() + .delete_prefix(EXECUTION_TRAIL_HASH_PREFIX, STATE_CF, None); } /// Performs the initial draws. @@ -616,6 +651,14 @@ impl FinalState { ) }); + // Update execution trail hash + if let SetOrKeep::Set(new_hash) = changes.execution_trail_hash_change { + db_batch.insert( + EXECUTION_TRAIL_HASH_PREFIX.as_bytes().to_vec(), + Some(new_hash.to_bytes().to_vec()), + ); + } + self.db .write() .write_batch(db_batch, db_versioning_batch, Some(slot)); @@ -666,23 +709,29 @@ impl FinalState { pub fn is_db_valid(&self) -> bool { let db = self.db.read(); - for (serialized_key, serialized_value) in db.iterator_cf(STATE_CF, MassaIteratorMode::Start) + // check if the execution trial hash is present and valid { - if !serialized_key.starts_with(CYCLE_HISTORY_PREFIX.as_bytes()) - && !serialized_key.starts_with(DEFERRED_CREDITS_PREFIX.as_bytes()) - && !serialized_key.starts_with(ASYNC_POOL_PREFIX.as_bytes()) - && !serialized_key.starts_with(EXECUTED_OPS_PREFIX.as_bytes()) - && !serialized_key.starts_with(EXECUTED_DENUNCIATIONS_PREFIX.as_bytes()) - && !serialized_key.starts_with(LEDGER_PREFIX.as_bytes()) - && !serialized_key.starts_with(MIP_STORE_PREFIX.as_bytes()) - { - warn!( - "Key/value does not correspond to any prefix: serialized_key: {:?}, serialized_value: {:?}", - serialized_key, serialized_value - ); + let execution_trail_hash_serialized = + match db.get_cf(STATE_CF, EXECUTION_TRAIL_HASH_PREFIX.as_bytes().to_vec()) { + Ok(Some(v)) => v, + Ok(None) => { + warn!("No execution trail hash found in DB"); + return false; + } + Err(err) => { + warn!("Error reading execution trail hash from DB: {}", err); + return false; + } + }; + if let Err(err) = massa_hash::Hash::try_from(&execution_trail_hash_serialized[..]) { + warn!("Invalid execution trail hash found in DB: {}", err); return false; } + } + for (serialized_key, serialized_value) in db.iterator_cf(STATE_CF, MassaIteratorMode::Start) + { + #[allow(clippy::if_same_then_else)] if serialized_key.starts_with(CYCLE_HISTORY_PREFIX.as_bytes()) { if !self .pos_state @@ -735,13 +784,21 @@ impl FinalState { warn!("Wrong key/value for EXECUTED_DENUNCIATIONS PREFIX serialized_key: {:?}, serialized_value: {:?}", serialized_key, serialized_value); return false; } - } else if serialized_key.starts_with(LEDGER_PREFIX.as_bytes()) - && !self + } else if serialized_key.starts_with(LEDGER_PREFIX.as_bytes()) { + if !self .ledger .is_key_value_valid(&serialized_key, &serialized_value) - { + { + warn!("Wrong key/value for LEDGER PREFIX serialized_key: {:?}, serialized_value: {:?}", serialized_key, serialized_value); + return false; + } + } else if serialized_key.starts_with(MIP_STORE_PREFIX.as_bytes()) { + // TODO: check MIP_STORE_PREFIX + } else if serialized_key.starts_with(EXECUTION_TRAIL_HASH_PREFIX.as_bytes()) { + // no checks here as they are performed above by direct reading + } else { warn!( - "Wrong key/value for LEDGER PREFIX serialized_key: {:?}, serialized_value: {:?}", + "Key/value does not correspond to any prefix: serialized_key: {:?}, serialized_value: {:?}", serialized_key, serialized_value ); return false; @@ -750,4 +807,14 @@ impl FinalState { true } + + /// Initialize the execution trail hash to zero. + pub fn init_execution_trail_hash(&mut self) { + let mut db_batch = DBBatch::new(); + db_batch.insert( + EXECUTION_TRAIL_HASH_PREFIX.as_bytes().to_vec(), + Some(massa_hash::Hash::zero().to_bytes().to_vec()), + ); + self.db.write().write_batch(db_batch, DBBatch::new(), None); + } } diff --git a/massa-final-state/src/state_changes.rs b/massa-final-state/src/state_changes.rs index d5bd67ffc3d..fa23b214af3 100644 --- a/massa-final-state/src/state_changes.rs +++ b/massa-final-state/src/state_changes.rs @@ -10,7 +10,11 @@ use massa_executed_ops::{ ExecutedDenunciationsChangesSerializer, ExecutedOpsChanges, ExecutedOpsChangesDeserializer, ExecutedOpsChangesSerializer, }; -use massa_ledger_exports::{LedgerChanges, LedgerChangesDeserializer, LedgerChangesSerializer}; +use massa_hash::{HashDeserializer, HashSerializer}; +use massa_ledger_exports::{ + LedgerChanges, LedgerChangesDeserializer, LedgerChangesSerializer, SetOrKeep, + SetOrKeepDeserializer, SetOrKeepSerializer, +}; use massa_pos_exports::{PoSChanges, PoSChangesDeserializer, PoSChangesSerializer}; use massa_serialization::{Deserializer, SerializeError, Serializer}; use nom::{ @@ -33,6 +37,8 @@ pub struct StateChanges { pub executed_ops_changes: ExecutedOpsChanges, /// executed denunciations changes pub executed_denunciations_changes: ExecutedDenunciationsChanges, + /// execution trail hash change + pub execution_trail_hash_change: SetOrKeep, } /// Basic `StateChanges` serializer. @@ -42,6 +48,7 @@ pub struct StateChangesSerializer { pos_changes_serializer: PoSChangesSerializer, ops_changes_serializer: ExecutedOpsChangesSerializer, de_changes_serializer: ExecutedDenunciationsChangesSerializer, + execution_trail_hash_change_serializer: SetOrKeepSerializer, } impl Default for StateChangesSerializer { @@ -59,6 +66,7 @@ impl StateChangesSerializer { pos_changes_serializer: PoSChangesSerializer::new(), ops_changes_serializer: ExecutedOpsChangesSerializer::new(), de_changes_serializer: ExecutedDenunciationsChangesSerializer::new(), + execution_trail_hash_change_serializer: SetOrKeepSerializer::new(HashSerializer::new()), } } } @@ -123,6 +131,8 @@ impl Serializer for StateChangesSerializer { .serialize(&value.executed_ops_changes, buffer)?; self.de_changes_serializer .serialize(&value.executed_denunciations_changes, buffer)?; + self.execution_trail_hash_change_serializer + .serialize(&value.execution_trail_hash_change, buffer)?; Ok(()) } } @@ -134,6 +144,8 @@ pub struct StateChangesDeserializer { pos_changes_deserializer: PoSChangesDeserializer, ops_changes_deserializer: ExecutedOpsChangesDeserializer, de_changes_deserializer: ExecutedDenunciationsChangesDeserializer, + execution_trail_hash_change_deserializer: + SetOrKeepDeserializer, } impl StateChangesDeserializer { @@ -182,6 +194,9 @@ impl StateChangesDeserializer { endorsement_count, max_de_changes_length, ), + execution_trail_hash_change_deserializer: SetOrKeepDeserializer::new( + HashDeserializer::new(), + ), } } } @@ -261,21 +276,30 @@ impl Deserializer for StateChangesDeserializer { context("Failed de_changes deserialization", |input| { self.de_changes_deserializer.deserialize(input) }), + context( + "Failed execution_trail_hash_change deserialization", + |input| { + self.execution_trail_hash_change_deserializer + .deserialize(input) + }, + ), )), ) .map( |( ledger_changes, async_pool_changes, - roll_state_changes, - executed_ops, - executed_denunciations, + pos_changes, + executed_ops_changes, + executed_denunciations_changes, + execution_trail_hash_change, )| StateChanges { ledger_changes, async_pool_changes, - pos_changes: roll_state_changes, - executed_ops_changes: executed_ops, - executed_denunciations_changes: executed_denunciations, + pos_changes, + executed_ops_changes, + executed_denunciations_changes, + execution_trail_hash_change, }, ) .parse(buffer) @@ -291,5 +315,7 @@ impl StateChanges { self.pos_changes.extend(changes.pos_changes); self.executed_ops_changes .extend(changes.executed_ops_changes); + self.execution_trail_hash_change + .apply(changes.execution_trail_hash_change); } } diff --git a/massa-hash/src/hash.rs b/massa-hash/src/hash.rs index a663691a8e9..ac9dd0bafbf 100644 --- a/massa-hash/src/hash.rs +++ b/massa-hash/src/hash.rs @@ -70,6 +70,11 @@ impl std::fmt::Debug for Hash { } impl Hash { + /// Creates a hash full of zeros bytes. + pub fn zero() -> Self { + Hash(blake3::Hash::from([0; HASH_SIZE_BYTES])) + } + /// Compute a hash from data. /// /// # Example @@ -179,6 +184,17 @@ impl Hash { } } +impl TryFrom<&[u8]> for Hash { + type Error = MassaHashError; + + /// Try parsing from byte slice. + fn try_from(value: &[u8]) -> Result { + Ok(Hash::from_bytes(value.try_into().map_err(|err| { + MassaHashError::ParsingError(format!("{}", err)) + })?)) + } +} + /// Serializer for `Hash` #[derive(Default, Clone)] pub struct HashSerializer; From 1e6fb956b8d25434bb2616502add5d24d69bd8e6 Mon Sep 17 00:00:00 2001 From: Damir Vodenicarevic Date: Thu, 13 Jul 2023 11:53:13 +0200 Subject: [PATCH 30/71] correct op propagation towards disconnected peers (#4249) --- .../src/handlers/operation_handler/retrieval.rs | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/massa-protocol-worker/src/handlers/operation_handler/retrieval.rs b/massa-protocol-worker/src/handlers/operation_handler/retrieval.rs index 1f83e123ba9..d3799eea469 100644 --- a/massa-protocol-worker/src/handlers/operation_handler/retrieval.rs +++ b/massa-protocol-worker/src/handlers/operation_handler/retrieval.rs @@ -287,6 +287,15 @@ impl RetrievalThread { mut op_batch: OperationPrefixIds, peer_id: &PeerId, ) -> Result<(), ProtocolError> { + // ignore announcements from disconnected peers + if !self + .active_connections + .get_peer_ids_connected() + .contains(peer_id) + { + return Ok(()); + } + // mark sender as knowing the ops 'write_cache: { let mut cache_write = self.cache.write(); From 9a1c2d0ce8ac88f39d00f15d8dff19822d6d2945 Mon Sep 17 00:00:00 2001 From: Sydhds Date: Thu, 13 Jul 2023 15:57:08 +0200 Subject: [PATCH 31/71] Feature/ledger editor versioning 1 (#4239) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add update_for_network_shutdown to MIP store (was only on Mip store raw) * Set update_for_network_shutdown as public * Call extend_from_db when creating a FinalState object from a snapshot * Add some debug! calls * Add debug info * Add debug info (round 2° * Add debug info (round 3) * Add debug info (round 4) * Add try_from_db method in MIP store * Code cleanup + new error after network shutdown * Add some documentation about MIP store init. --------- Co-authored-by: sydhds --- massa-final-state/src/error.rs | 6 +- massa-final-state/src/final_state.rs | 9 +- massa-node/src/main.rs | 77 +++++++++------ massa-versioning/src/versioning.rs | 140 +++++++++++++++++++++------ 4 files changed, 164 insertions(+), 68 deletions(-) diff --git a/massa-final-state/src/error.rs b/massa-final-state/src/error.rs index 06fcf02d5f0..a2d46f97893 100644 --- a/massa-final-state/src/error.rs +++ b/massa-final-state/src/error.rs @@ -5,7 +5,7 @@ use displaydoc::Display; use thiserror::Error; -use massa_versioning::versioning::ExtendFromDbError; +use massa_versioning::versioning::{ExtendFromDbError, IsConsistentWithShutdownPeriodError}; /// Final state error #[non_exhaustive] @@ -20,5 +20,7 @@ pub enum FinalStateError { /// Snapshot error: {0} SnapshotError(String), /// ExtendFromDbError - MipStoreError(#[from] ExtendFromDbError), + ExtendFromDbError(#[from] ExtendFromDbError), + /// IsConsistentWithShutdownPeriodError + NonConsistentWithShutdownPeriodError(#[from] IsConsistentWithShutdownPeriodError), } diff --git a/massa-final-state/src/final_state.rs b/massa-final-state/src/final_state.rs index 48a62cf0c0f..a90eeda2394 100644 --- a/massa-final-state/src/final_state.rs +++ b/massa-final-state/src/final_state.rs @@ -233,7 +233,7 @@ impl FinalState { shutdown_start, shutdown_end ); - if !final_state + final_state .mip_store .is_consistent_with_shutdown_period( shutdown_start, @@ -242,12 +242,7 @@ impl FinalState { config.t0, config.genesis_timestamp, ) - .unwrap_or(false) - { - return Err(FinalStateError::InvalidSlot( - "MIP store is Not consistent".to_string(), - )); - } + .map_err(FinalStateError::from)?; debug!( "Latest consistent slot found in snapshot data: {}", diff --git a/massa-node/src/main.rs b/massa-node/src/main.rs index 6c4ebf8e099..bbe22232771 100644 --- a/massa-node/src/main.rs +++ b/massa-node/src/main.rs @@ -285,35 +285,55 @@ async fn launch( }; // Ratio::new_raw(*SETTINGS.versioning.warn_announced_version_ratio, 100), - let mip_list = get_mip_list(); - debug!("MIP list: {:?}", mip_list); - let mip_store = - MipStore::try_from((mip_list, mip_stats_config)).expect("mip store creation failed"); - // Create final state, either from a snapshot, or from scratch let final_state = Arc::new(parking_lot::RwLock::new( match args.restart_from_snapshot_at_period { - Some(last_start_period) => FinalState::new_derived_from_snapshot( - db.clone(), - final_state_config, - Box::new(ledger), - selector_controller.clone(), - mip_store.clone(), - last_start_period, - ) - .expect("could not init final state"), - None => FinalState::new( - db.clone(), - final_state_config, - Box::new(ledger), - selector_controller.clone(), - mip_store.clone(), - true, - ) - .expect("could not init final state"), + Some(last_start_period) => { + // The node is restarted from a snapshot: + // MIP store by reading from the db as it must have been updated by the massa ledger editor + // (to shift transitions that might have happened during the network shutdown) + // Note that FinalState::new_derived_from_snapshot will check if MIP store is consistent + // No Bootstrap are expected after this + let mip_store: MipStore = MipStore::try_from_db(db.clone(), mip_stats_config) + .expect("MIP store creation failed"); + debug!("After read from db, Mip store: {:?}", mip_store); + + FinalState::new_derived_from_snapshot( + db.clone(), + final_state_config, + Box::new(ledger), + selector_controller.clone(), + mip_store, + last_start_period, + ) + .expect("could not init final state") + } + None => { + // The node is started in a normal way + // Read the mip list supported by the current software + // The resulting MIP store will likely be updated by the boostrap process in order + // to get the latest information for the MIP store (new states, votes...) + + let mip_list = get_mip_list(); + debug!("MIP list: {:?}", mip_list); + let mip_store = MipStore::try_from((mip_list, mip_stats_config)) + .expect("mip store creation failed"); + + FinalState::new( + db.clone(), + final_state_config, + Box::new(ledger), + selector_controller.clone(), + mip_store, + true, + ) + .expect("could not init final state") + } }, )); + let mip_store = final_state.read().mip_store.clone(); + let bootstrap_config: BootstrapConfig = BootstrapConfig { bootstrap_list: SETTINGS.bootstrap.bootstrap_list.clone(), bootstrap_protocol: SETTINGS.bootstrap.bootstrap_protocol, @@ -411,7 +431,8 @@ async fn launch( let last_shutdown_end = Slot::new(final_state.read().last_start_period, 0) .get_prev_slot(THREAD_COUNT) .unwrap(); - if !final_state + + final_state .read() .mip_store .is_consistent_with_shutdown_period( @@ -421,13 +442,7 @@ async fn launch( T0, *GENESIS_TIMESTAMP, ) - .unwrap_or(false) - { - panic!( - "MIP store is not consistent with last shutdown period ({} - {})", - last_shutdown_start, last_shutdown_end - ); - } + .expect("Mip store is not consistent with shutdown period") } // Storage costs constants diff --git a/massa-versioning/src/versioning.rs b/massa-versioning/src/versioning.rs index 854e46467b7..de532a105b6 100644 --- a/massa-versioning/src/versioning.rs +++ b/massa-versioning/src/versioning.rs @@ -677,7 +677,7 @@ impl MipStore { thread_count: u8, t0: MassaTime, genesis_timestamp: MassaTime, - ) -> Result { + ) -> Result<(), IsConsistentWithShutdownPeriodError> { let guard = self.0.read(); guard.is_consistent_with_shutdown_period( shutdown_start, @@ -688,6 +688,25 @@ impl MipStore { ) } + #[allow(dead_code)] + pub fn update_for_network_shutdown( + &mut self, + shutdown_start: Slot, + shutdown_end: Slot, + thread_count: u8, + t0: MassaTime, + genesis_timestamp: MassaTime, + ) -> Result<(), ModelsError> { + let mut guard = self.0.write(); + guard.update_for_network_shutdown( + shutdown_start, + shutdown_end, + thread_count, + t0, + genesis_timestamp, + ) + } + // DB pub fn update_batches( @@ -716,6 +735,20 @@ impl MipStore { guard.delete_prefix(MIP_STORE_STATS_PREFIX, VERSIONING_CF, None); } } + + /// Create a MIP store with what is written on the disk + pub fn try_from_db( + db: ShareableMassaDBController, + cfg: MipStatsConfig, + ) -> Result { + MipStoreRaw::try_from_db(db, cfg).map(|store_raw| Self(Arc::new(RwLock::new(store_raw)))) + } + + // debug + // pub fn len(&self) -> usize { + // let guard = self.0.read(); + // guard.store.len() + // } } impl TryFrom<([(MipInfo, MipState); N], MipStatsConfig)> for MipStore { @@ -792,6 +825,15 @@ pub enum ExtendFromDbError { Deserialize(String), } +/// Error returned by 'is_consistent_with_shutdown_period` +#[derive(Error, Debug)] +pub enum IsConsistentWithShutdownPeriodError { + #[error("{0}")] + Update(#[from] ModelsError), + #[error("MipInfo: {0:?} (state: {1:?}) is not consistent with shutdown: {2} {3}")] + NonConsistent(MipInfo, ComponentState, MassaTime, MassaTime), +} + /// Store of all versioning info #[derive(Debug, Clone, PartialEq)] pub struct MipStoreRaw { @@ -1118,8 +1160,9 @@ impl MipStoreRaw { thread_count: u8, t0: MassaTime, genesis_timestamp: MassaTime, - ) -> Result { - let mut is_consistent = true; + ) -> Result<(), IsConsistentWithShutdownPeriodError> { + // let mut is_consistent = true; + let mut has_error: Result<(), IsConsistentWithShutdownPeriodError> = Ok(()); let shutdown_start_ts = get_block_slot_timestamp(thread_count, t0, genesis_timestamp, shutdown_start)?; @@ -1134,13 +1177,24 @@ impl MipStoreRaw { if shutdown_range.contains(&mip_info.start) || shutdown_range.contains(&mip_info.timeout) { - is_consistent = false; + // is_consistent = false; + has_error = Err(IsConsistentWithShutdownPeriodError::NonConsistent( + mip_info.clone(), + mip_state.state, + shutdown_start_ts, + shutdown_end_ts, + )); break; } } - ComponentState::Started(..) => { + ComponentState::Started(..) | ComponentState::LockedIn(..) => { // assume this should have been reset - is_consistent = false; + has_error = Err(IsConsistentWithShutdownPeriodError::NonConsistent( + mip_info.clone(), + mip_state.state, + shutdown_start_ts, + shutdown_end_ts, + )); break; } _ => { @@ -1150,10 +1204,9 @@ impl MipStoreRaw { } } - Ok(is_consistent) + has_error } - #[allow(dead_code)] fn update_for_network_shutdown( &mut self, shutdown_start: Slot, @@ -1404,6 +1457,25 @@ impl MipStoreRaw { Ok((updated, added)) } + + /// Create a MIP store raw with what is written on the disk + fn try_from_db( + db: ShareableMassaDBController, + cfg: MipStatsConfig, + ) -> Result { + let mut store_raw = MipStoreRaw { + store: Default::default(), + stats: MipStoreStats { + config: cfg, + latest_announcements: Default::default(), + network_version_counters: Default::default(), + }, + }; + + let (_updated, mut added) = store_raw.extend_from_db(db)?; + store_raw.store.append(&mut added); + Ok(store_raw) + } } impl TryFrom<([(MipInfo, MipState); N], MipStatsConfig)> for MipStoreRaw { @@ -2161,22 +2233,17 @@ mod test { let genesis_timestamp = MassaTime::from_millis(0); - let shutdown_start = Slot::new(2, 0); - let shutdown_end = Slot::new(8, 0); - // helper functions so the test code is easy to read let get_slot_ts = |slot| get_block_slot_timestamp(THREAD_COUNT, T0, genesis_timestamp, slot).unwrap(); let is_consistent = |store: &MipStoreRaw, shutdown_start, shutdown_end| { - store - .is_consistent_with_shutdown_period( - shutdown_start, - shutdown_end, - THREAD_COUNT, - T0, - genesis_timestamp, - ) - .unwrap() + store.is_consistent_with_shutdown_period( + shutdown_start, + shutdown_end, + THREAD_COUNT, + T0, + genesis_timestamp, + ) }; let update_store = |store: &mut MipStoreRaw, shutdown_start, shutdown_end| { store @@ -2214,6 +2281,9 @@ mod test { }; // end helpers + let shutdown_start = Slot::new(2, 0); + let shutdown_end = Slot::new(8, 0); + let mip_stats_cfg = MipStatsConfig { block_count_considered: 10, warn_announced_version_ratio: Ratio::new_raw(30, 100), @@ -2250,9 +2320,15 @@ mod test { )) .unwrap(); - assert_eq!(is_consistent(&store, shutdown_start, shutdown_end), false); + match is_consistent(&store, shutdown_start, shutdown_end) { + Err(IsConsistentWithShutdownPeriodError::NonConsistent(mi, ..)) => { + assert_eq!(mi, mi_1); + } + _ => panic!("is_consistent expects a non consistent error"), + } + update_store(&mut store, shutdown_start, shutdown_end); - assert_eq!(is_consistent(&store, shutdown_start, shutdown_end), true); + assert!(is_consistent(&store, shutdown_start, shutdown_end).is_ok()); // _dump_store(&store); } @@ -2273,10 +2349,10 @@ mod test { let store_orig = store.clone(); // Already ok even with a shutdown but let's check it - assert_eq!(is_consistent(&store, shutdown_start, shutdown_end), true); + assert!(is_consistent(&store, shutdown_start, shutdown_end).is_ok()); // _dump_store(&store); update_store(&mut store, shutdown_start, shutdown_end); - assert_eq!(is_consistent(&store, shutdown_start, shutdown_end), true); + assert!(is_consistent(&store, shutdown_start, shutdown_end).is_ok()); // _dump_store(&store); // Check that nothing has changed @@ -2298,9 +2374,12 @@ mod test { )) .unwrap(); - assert_eq!(is_consistent(&store, shutdown_start, shutdown_end), false); + assert_matches!( + is_consistent(&store, shutdown_start, shutdown_end), + Err(IsConsistentWithShutdownPeriodError::NonConsistent(..)) + ); update_store(&mut store, shutdown_start, shutdown_end); - assert_eq!(is_consistent(&store, shutdown_start, shutdown_end), true); + assert!(is_consistent(&store, shutdown_start, shutdown_end).is_ok()); // _dump_store(&store); } @@ -2334,10 +2413,15 @@ mod test { )) .unwrap(); - assert_eq!(is_consistent(&store, shutdown_start, shutdown_end), false); + match is_consistent(&store, shutdown_start, shutdown_end) { + Err(IsConsistentWithShutdownPeriodError::NonConsistent(mi, ..)) => { + assert_eq!(mi, mi_1); + } + _ => panic!("is_consistent expects a non consistent error"), + } // _dump_store(&store); update_store(&mut store, shutdown_start, shutdown_end); - assert_eq!(is_consistent(&store, shutdown_start, shutdown_end), true); + assert!(is_consistent(&store, shutdown_start, shutdown_end).is_ok()); // _dump_store(&store); // Update stats - so should force transitions if any From 3b4b7025864de9d7e77bbaed9bebadb68881b267 Mon Sep 17 00:00:00 2001 From: Damir Vodenicarevic Date: Thu, 13 Jul 2023 18:19:58 +0200 Subject: [PATCH 32/71] debug op propagation (#4251) --- massa-protocol-worker/src/connectivity.rs | 3 +- .../src/handlers/block_handler/retrieval.rs | 30 ++++---- .../src/handlers/operation_handler/cache.rs | 40 +++++------ .../handlers/operation_handler/propagation.rs | 44 ++++++------ .../handlers/operation_handler/retrieval.rs | 72 ++++++++----------- 5 files changed, 84 insertions(+), 105 deletions(-) diff --git a/massa-protocol-worker/src/connectivity.rs b/massa-protocol-worker/src/connectivity.rs index 3afdd28c68b..60a87253c2c 100644 --- a/massa-protocol-worker/src/connectivity.rs +++ b/massa-protocol-worker/src/connectivity.rs @@ -103,8 +103,7 @@ pub(crate) fn start_connectivity_thread( let total_out_slots = config.peers_categories.values().map(| v| v.target_out_connections).sum::() + config.default_category_info.target_out_connections + 1; let operation_cache = Arc::new(RwLock::new(OperationCache::new( config.max_known_ops_size.try_into().unwrap(), - config.max_node_known_ops_size.try_into().unwrap(), - (total_in_slots + total_out_slots).try_into().unwrap(), + config.max_node_known_ops_size.try_into().unwrap() ))); let endorsement_cache = Arc::new(RwLock::new(EndorsementCache::new( config.max_known_endorsements_size.try_into().unwrap(), diff --git a/massa-protocol-worker/src/handlers/block_handler/retrieval.rs b/massa-protocol-worker/src/handlers/block_handler/retrieval.rs index f66312a5f94..e15530336a0 100644 --- a/massa-protocol-worker/src/handlers/block_handler/retrieval.rs +++ b/massa-protocol-worker/src/handlers/block_handler/retrieval.rs @@ -812,22 +812,19 @@ impl RetrievalThread { let operation_ids_set: PreHashSet = operation_ids.iter().cloned().collect(); // add to known ops - 'write_cache: { + { let mut cache_write = self.operation_cache.write(); - let Ok(known_ops) = cache_write + let known_ops = cache_write .ops_known_by_peer - .get_or_insert(from_peer_id.clone(), || { + .entry(from_peer_id.clone()) + .or_insert_with(|| { LruMap::new(ByLength::new( self.config .max_node_known_ops_size .try_into() - .expect("max_node_known_ops_size in config should be > 0"), + .expect("max_node_known_ops_size in config must fit in u32"), )) - }) - .ok_or(()) else { - warn!("ops_known_by_peer limitation reached"); - break 'write_cache; - }; + }); for op_id in operation_ids_set.iter() { known_ops.insert(op_id.prefix(), ()); } @@ -1171,28 +1168,27 @@ impl RetrievalThread { .collect::>(), )?; - 'write_cache: { + { // add to checked operations let mut cache_write = self.operation_cache.write(); + + // add checked operations for op_id in new_operations.keys().copied() { cache_write.insert_checked_operation(op_id); } // add to known ops - let Ok(known_ops) = cache_write + let known_ops = cache_write .ops_known_by_peer - .get_or_insert(source_peer_id.clone(), || { + .entry(source_peer_id.clone()) + .or_insert_with(|| { LruMap::new(ByLength::new( self.config .max_node_known_ops_size .try_into() .expect("max_node_known_ops_size in config must be > 0"), )) - }) - .ok_or(()) else { - warn!("ops_known_by_peer limitation reached"); - break 'write_cache; - }; + }); for id in all_received_ids { known_ops.insert(id.prefix(), ()); } diff --git a/massa-protocol-worker/src/handlers/operation_handler/cache.rs b/massa-protocol-worker/src/handlers/operation_handler/cache.rs index e19633d256b..1a1ec1cd92e 100644 --- a/massa-protocol-worker/src/handlers/operation_handler/cache.rs +++ b/massa-protocol-worker/src/handlers/operation_handler/cache.rs @@ -1,4 +1,7 @@ -use std::{collections::HashSet, sync::Arc}; +use std::{ + collections::{HashMap, HashSet}, + sync::Arc, +}; use massa_models::operation::{OperationId, OperationPrefixId}; use massa_protocol_exports::PeerId; @@ -8,16 +11,16 @@ use schnellru::{ByLength, LruMap}; pub struct OperationCache { pub checked_operations: LruMap, pub checked_operations_prefix: LruMap, - pub ops_known_by_peer: LruMap>, + pub ops_known_by_peer: HashMap>, pub max_known_ops_by_peer: u32, } impl OperationCache { - pub fn new(max_known_ops: u32, max_known_ops_by_peer: u32, max_peers: u32) -> Self { + pub fn new(max_known_ops: u32, max_known_ops_by_peer: u32) -> Self { Self { checked_operations: LruMap::new(ByLength::new(max_known_ops)), checked_operations_prefix: LruMap::new(ByLength::new(max_known_ops)), - ops_known_by_peer: LruMap::new(ByLength::new(max_peers)), + ops_known_by_peer: HashMap::new(), max_known_ops_by_peer, } } @@ -28,27 +31,18 @@ impl OperationCache { .insert(operation_id.prefix(), ()); } - pub fn update_cache(&mut self, peers_connected: HashSet) { - let peers: Vec = self - .ops_known_by_peer - .iter() - .map(|(id, _)| id.clone()) - .collect(); + pub fn update_cache(&mut self, peers_connected: &HashSet) { + // Remove disconnected peers from cache + self.ops_known_by_peer + .retain(|peer_id, _| !peers_connected.contains(peer_id)); - // Clean shared cache if peers do not exist anymore - for peer_id in peers { - if !peers_connected.contains(&peer_id) { - self.ops_known_by_peer.remove(&peer_id); - } - } - - // Add new potential peers + // Add new connected peers to cache for peer_id in peers_connected { - if self.ops_known_by_peer.peek(&peer_id).is_none() { - self.ops_known_by_peer.insert( - peer_id.clone(), - LruMap::new(ByLength::new(self.max_known_ops_by_peer)), - ); + match self.ops_known_by_peer.entry(peer_id.clone()) { + std::collections::hash_map::Entry::Occupied(_) => {} + std::collections::hash_map::Entry::Vacant(entry) => { + entry.insert(LruMap::new(ByLength::new(self.max_known_ops_by_peer))); + } } } } diff --git a/massa-protocol-worker/src/handlers/operation_handler/propagation.rs b/massa-protocol-worker/src/handlers/operation_handler/propagation.rs index 8612a191c98..1f441ab2e7f 100644 --- a/massa-protocol-worker/src/handlers/operation_handler/propagation.rs +++ b/massa-protocol-worker/src/handlers/operation_handler/propagation.rs @@ -5,10 +5,9 @@ use crossbeam::channel::RecvTimeoutError; use massa_channel::receiver::MassaReceiver; use massa_logging::massa_trace; use massa_metrics::MassaMetrics; -use massa_models::operation::OperationId; +use massa_models::operation::{OperationId, OperationPrefixId}; use massa_models::prehash::CapacityAllocator; use massa_models::prehash::PreHashSet; -use massa_protocol_exports::PeerId; use massa_protocol_exports::ProtocolConfig; use massa_storage::Storage; use tracing::{debug, info, log::warn}; @@ -132,45 +131,46 @@ impl PropagationThread { if self.next_batch.is_empty() { return; } - let operation_ids = mem::take(&mut self.next_batch); + let operation_id_prefixes = mem::take(&mut self.next_batch) + .into_iter() + .map(|id| id.prefix()) + .collect::>(); massa_trace!("protocol.protocol_worker.announce_ops.begin", { - "operation_ids": operation_ids + "operation_id_prefixes": operation_id_prefixes }); + let peers_connected = self.active_connections.get_peer_ids_connected(); { let mut cache_write = self.cache.write(); - let peers_connected = self.active_connections.get_peer_ids_connected(); - cache_write.update_cache(peers_connected); + cache_write.update_cache(&peers_connected); // Propagate to peers - let all_keys: Vec = cache_write - .ops_known_by_peer - .iter() - .map(|(k, _)| k) - .cloned() - .collect(); - for peer_id in all_keys { - let ops = cache_write.ops_known_by_peer.peek_mut(&peer_id).unwrap(); - let new_ops: Vec = operation_ids + for peer_id in peers_connected { + let peer_known_ops = cache_write + .ops_known_by_peer + .get_mut(&peer_id) + .expect("expected update_cache to insert all available peers in ops_known_by_peer but one is absent"); + let ops_unknown_to_peer: Vec = operation_id_prefixes .iter() - .filter(|id| ops.peek(&id.prefix()).is_none()) + .filter(|&id_prefix| peer_known_ops.peek(id_prefix).is_none()) .copied() .collect(); - if !new_ops.is_empty() { - for id in &new_ops { - ops.insert(id.prefix(), ()); + if !ops_unknown_to_peer.is_empty() { + for id_prefix in &ops_unknown_to_peer { + peer_known_ops.insert(*id_prefix, ()); } debug!( "Send operations announcement of len {} to {}", - new_ops.len(), + ops_unknown_to_peer.len(), peer_id ); - for sub_list in new_ops.chunks(self.config.max_operations_per_message as usize) + for sub_list in + ops_unknown_to_peer.chunks(self.config.max_operations_per_message as usize) { if let Err(err) = self.active_connections.send_to_peer( &peer_id, &self.operation_message_serializer, OperationMessage::OperationsAnnouncement( - sub_list.iter().map(|id| id.into_prefix()).collect(), + sub_list.iter().copied().collect(), ) .into(), false, diff --git a/massa-protocol-worker/src/handlers/operation_handler/retrieval.rs b/massa-protocol-worker/src/handlers/operation_handler/retrieval.rs index d3799eea469..e7734b7a8d4 100644 --- a/massa-protocol-worker/src/handlers/operation_handler/retrieval.rs +++ b/massa-protocol-worker/src/handlers/operation_handler/retrieval.rs @@ -217,28 +217,27 @@ impl RetrievalThread { .collect::>(), )?; - 'write_cache: { + { // add to checked operations let mut cache_write = self.cache.write(); + + // add checked operations for op_id in new_operations.keys().copied() { cache_write.insert_checked_operation(op_id); } // add to known ops - let Ok(known_ops) = cache_write + let known_ops = cache_write .ops_known_by_peer - .get_or_insert(source_peer_id.clone(), || { + .entry(source_peer_id.clone()) + .or_insert_with(|| { LruMap::new(ByLength::new( self.config .max_node_known_ops_size .try_into() - .expect("max_node_known_ops_size in config must be > 0"), + .expect("max_node_known_ops_size in config must fit in u32"), )) - }) - .ok_or(()) else { - warn!("ops_known_by_peer limitation reached"); - break 'write_cache; - }; + }); for id in all_received_ids { known_ops.insert(id.prefix(), ()); } @@ -249,6 +248,7 @@ impl RetrievalThread { let mut ops = self.storage.clone_without_refs(); ops.store_operations(new_operations.into_values().collect()); + // propagate new operations self.internal_sender .try_send(OperationHandlerPropagationCommand::PropagateOperations( ops.clone(), @@ -287,7 +287,7 @@ impl RetrievalThread { mut op_batch: OperationPrefixIds, peer_id: &PeerId, ) -> Result<(), ProtocolError> { - // ignore announcements from disconnected peers + // ignore announcement from disconnected peers if !self .active_connections .get_peer_ids_connected() @@ -297,24 +297,21 @@ impl RetrievalThread { } // mark sender as knowing the ops - 'write_cache: { + { let mut cache_write = self.cache.write(); - let Ok(known_ops) = cache_write + let known_ops = cache_write .ops_known_by_peer - .get_or_insert(peer_id.clone(), || { + .entry(peer_id.clone()) + .or_insert_with(|| { LruMap::new(ByLength::new( self.config .max_node_known_ops_size .try_into() - .expect("max_node_known_ops_size in config must be > 0"), + .expect("max_node_known_ops_size in config must fit in u32"), )) - }) - .ok_or(()) else { - warn!("ops_known_by_peer limitation reached"); - break 'write_cache; - }; - for prefix in &op_batch { - known_ops.insert(*prefix, ()); + }); + for prefix_id in &op_batch { + known_ops.insert(*prefix_id, ()); } } @@ -330,32 +327,33 @@ impl RetrievalThread { let now = Instant::now(); let mut count_reask = 0; for op_id in op_batch { - let wish = match self.asked_operations.get(&op_id) { - Some(wish) => { - if wish.1.contains(peer_id) { - continue; // already asked to the `peer_id` + let opt_previous_ask = match self.asked_operations.get(&op_id) { + Some(previous_ask) => { + if previous_ask.1.contains(peer_id) { + continue; // already asked to the origin `peer_id` => ignore } else { - Some(wish) // already asked but at someone else + Some(previous_ask) // already asked but to someone else } } None => None, }; - if let Some(wish) = wish { + if let Some((previous_ask_time, previous_ask_peers)) = opt_previous_ask { // Ask now if latest ask instant < now - operation_batch_proc_period // otherwise add in future_set - if wish.0 - < now - .checked_sub(self.config.operation_batch_proc_period.into()) - .ok_or(TimeError::TimeOverflowError)? + if now + .checked_duration_since(*previous_ask_time) + .unwrap_or_default() + > self.config.operation_batch_proc_period.to_duration() { count_reask += 1; ask_set.insert(op_id); - wish.0 = now; - wish.1.push(peer_id.clone()); + *previous_ask_time = now; + previous_ask_peers.push(peer_id.clone()); } else { future_set.insert(op_id); } } else { + // the same peer announced this op for a second time, ask them immediately ask_set.insert(op_id); self.asked_operations .insert(op_id, (now, vec![peer_id.clone()])); @@ -397,10 +395,6 @@ impl RetrievalThread { false, ) { warn!("Failed to send AskForOperations message to peer: {}", err); - { - let mut cache_write = self.cache.write(); - cache_write.ops_known_by_peer.remove(peer_id); - } } } } @@ -461,10 +455,6 @@ impl RetrievalThread { false, ) { warn!("Failed to send Operations message to peer: {}", err); - { - let mut cache_write = self.cache.write(); - cache_write.ops_known_by_peer.remove(peer_id); - } } } Ok(()) From c1930f22eadf8656ba99c4daa1fa1fbc6e85c937 Mon Sep 17 00:00:00 2001 From: AurelienFT <32803821+AurelienFT@users.noreply.github.com> Date: Mon, 17 Jul 2023 10:16:20 +0200 Subject: [PATCH 33/71] Fix op cache (#4255) --- massa-protocol-worker/src/handlers/operation_handler/cache.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/massa-protocol-worker/src/handlers/operation_handler/cache.rs b/massa-protocol-worker/src/handlers/operation_handler/cache.rs index 1a1ec1cd92e..d91d13c121a 100644 --- a/massa-protocol-worker/src/handlers/operation_handler/cache.rs +++ b/massa-protocol-worker/src/handlers/operation_handler/cache.rs @@ -34,7 +34,7 @@ impl OperationCache { pub fn update_cache(&mut self, peers_connected: &HashSet) { // Remove disconnected peers from cache self.ops_known_by_peer - .retain(|peer_id, _| !peers_connected.contains(peer_id)); + .retain(|peer_id, _| peers_connected.contains(peer_id)); // Add new connected peers to cache for peer_id in peers_connected { From aa4d3d0506ea0d2bdd2442a89f4de00d058e4dda Mon Sep 17 00:00:00 2001 From: "Litchi Pi (Tim)" Date: Mon, 17 Jul 2023 10:19:57 +0200 Subject: [PATCH 34/71] Bom workspace (#4200) * Add script Signed-off-by: Litchi Pi * update script Signed-off-by: Litchi Pi * Update Cargo.toml Signed-off-by: Litchi Pi * Remove script Signed-off-by: Litchi Pi --------- Signed-off-by: Litchi Pi --- Cargo.lock | 47 +++------- Cargo.toml | 130 +++++++++++++++++++++++++++ massa-api-exports/Cargo.toml | 45 +++++----- massa-api/Cargo.toml | 57 ++++++------ massa-async-pool/Cargo.toml | 41 ++++----- massa-bootstrap/Cargo.toml | 97 +++++++++----------- massa-channel/Cargo.toml | 8 +- massa-cipher/Cargo.toml | 16 ++-- massa-client/Cargo.toml | 48 +++++----- massa-consensus-exports/Cargo.toml | 47 +++++----- massa-consensus-worker/Cargo.toml | 56 ++++++------ massa-db-exports/Cargo.toml | 16 ++-- massa-db-worker/Cargo.toml | 18 ++-- massa-executed-ops/Cargo.toml | 19 ++-- massa-execution-exports/Cargo.toml | 52 +++++------ massa-execution-worker/Cargo.toml | 140 +++++++++++------------------ massa-factory-exports/Cargo.toml | 30 +++---- massa-factory-worker/Cargo.toml | 51 +++++------ massa-final-state/Cargo.toml | 71 +++++++-------- massa-grpc/Cargo.toml | 65 +++++++------- massa-hash/Cargo.toml | 26 +++--- massa-ledger-exports/Cargo.toml | 31 +++---- massa-ledger-worker/Cargo.toml | 33 ++++--- massa-logging/Cargo.toml | 6 +- massa-metrics/Cargo.toml | 19 ++-- massa-models/Cargo.toml | 52 +++++------ massa-module-cache/Cargo.toml | 39 ++++---- massa-node/Cargo.toml | 105 ++++++++++------------ massa-pool-exports/Cargo.toml | 27 +++--- massa-pool-worker/Cargo.toml | 38 ++++---- massa-pos-exports/Cargo.toml | 47 +++++----- massa-pos-worker/Cargo.toml | 28 +++--- massa-protocol-exports/Cargo.toml | 43 +++++---- massa-protocol-worker/Cargo.toml | 65 +++++++------- massa-sdk/Cargo.toml | 22 ++--- massa-serialization/Cargo.toml | 17 ++-- massa-signature/Cargo.toml | 27 +++--- massa-storage/Cargo.toml | 17 ++-- massa-time/Cargo.toml | 16 ++-- massa-versioning/Cargo.toml | 43 +++++---- massa-wallet/Cargo.toml | 28 +++--- massa-xtask/Cargo.toml | 8 +- 42 files changed, 855 insertions(+), 936 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 208497e2c4c..99e67014ce7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2544,7 +2544,7 @@ dependencies = [ "regex", "serde", "serde_json", - "serial_test 1.0.0", + "serial_test", "sha2 0.10.6", "thiserror", "tracing", @@ -2606,7 +2606,7 @@ dependencies = [ "massa_wallet", "paginate", "serde", - "serial_test 1.0.0", + "serial_test", "strum", "thiserror", ] @@ -2668,7 +2668,7 @@ dependencies = [ "rand 0.8.5", "serde", "serde_json", - "serial_test 2.0.0", + "serial_test", "socket2", "substruct", "tempfile", @@ -2849,7 +2849,7 @@ dependencies = [ "rand 0.8.5", "rand_xoshiro", "serde_json", - "serial_test 1.0.0", + "serial_test", "sha2 0.10.6", "sha3", "tempfile", @@ -2975,7 +2975,7 @@ dependencies = [ "nom", "serde", "serde_json", - "serial_test 1.0.0", + "serial_test", "thiserror", ] @@ -3054,7 +3054,7 @@ dependencies = [ "rust_decimal", "serde", "serde_with", - "serial_test 1.0.0", + "serial_test", "thiserror", "transition", ] @@ -3074,7 +3074,7 @@ dependencies = [ "rand 0.8.5", "rocksdb", "schnellru", - "serial_test 1.0.0", + "serial_test", "tempfile", "thiserror", "tracing", @@ -3202,7 +3202,7 @@ dependencies = [ "rayon", "schnellru", "serde_json", - "serial_test 2.0.0", + "serial_test", "tempfile", "tracing", ] @@ -3248,7 +3248,7 @@ dependencies = [ "rand 0.7.3", "serde", "serde_json", - "serial_test 1.0.0", + "serial_test", "thiserror", "transition", ] @@ -4822,21 +4822,7 @@ dependencies = [ "lazy_static", "log", "parking_lot", - "serial_test_derive 1.0.0", -] - -[[package]] -name = "serial_test" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e56dd856803e253c8f298af3f4d7eb0ae5e23a737252cd90bb4f3b435033b2d" -dependencies = [ - "dashmap", - "futures", - "lazy_static", - "log", - "parking_lot", - "serial_test_derive 2.0.0", + "serial_test_derive", ] [[package]] @@ -4850,17 +4836,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "serial_test_derive" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91d129178576168c589c9ec973feedf7d3126c01ac2bf08795109aa35b69fb8f" -dependencies = [ - "proc-macro2 1.0.63", - "quote 1.0.29", - "syn 2.0.23", -] - [[package]] name = "sha-1" version = "0.9.8" @@ -5083,7 +5058,7 @@ dependencies = [ [[package]] name = "substruct" version = "0.1.0" -source = "git+https://github.com/sydhds/substruct#2fb3ae0dc9d913a0566ce6415eaa7a7ca1690fe1" +source = "git+https://github.com/sydhds/substruct?branch=main#2fb3ae0dc9d913a0566ce6415eaa7a7ca1690fe1" dependencies = [ "quote 1.0.29", "syn 1.0.109", diff --git a/Cargo.toml b/Cargo.toml index 9bd89250a06..cb7ddec48c5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -57,3 +57,133 @@ opt-level = 3 # Speed-up the CI # # * sandbox: for testing purpose, genesis timestamps is set as now + 9 seconds. # The saved snapshot can then be used to restart the network from the snapshot. +[workspace.dependencies] + +# Internal packages +massa_api = {path = "./massa-api"} +massa_api_exports = {path = "./massa-api-exports"} +massa_async_pool = {path = "./massa-async-pool"} +massa_bootstrap = {path = "./massa-bootstrap"} +massa_channel = {path = "./massa-channel"} +massa_cipher = {path = "./massa-cipher"} +massa_consensus_exports = {path = "./massa-consensus-exports"} +massa_consensus_worker = {path = "./massa-consensus-worker"} +massa_db_exports = {path = "./massa-db-exports"} +massa_db_worker = {path = "./massa-db-worker"} +massa_executed_ops = {path = "./massa-executed-ops"} +massa_execution_exports = {path = "./massa-execution-exports"} +massa_execution_worker = {path = "./massa-execution-worker"} +massa_factory_exports = {path = "./massa-factory-exports"} +massa_factory_worker = {path = "./massa-factory-worker"} +massa_final_state = {path = "./massa-final-state"} +massa_grpc = {path = "./massa-grpc"} +massa_hash = {path = "./massa-hash"} +massa_ledger_exports = {path = "./massa-ledger-exports"} +massa_ledger_worker = {path = "./massa-ledger-worker"} +massa_logging = {path = "./massa-logging"} +massa_metrics = {path = "./massa-metrics"} +massa_models = {path = "./massa-models"} +massa_module_cache = {path = "./massa-module-cache"} +massa_pool_exports = {path = "./massa-pool-exports"} +massa_pool_worker = {path = "./massa-pool-worker"} +massa_pos_exports = {path = "./massa-pos-exports"} +massa_pos_worker = {path = "./massa-pos-worker"} +massa_protocol_exports = {path = "./massa-protocol-exports"} +massa_protocol_worker = {path = "./massa-protocol-worker"} +massa_sdk = {path = "./massa-sdk"} +massa_serialization = {path = "./massa-serialization"} +massa_signature = {path = "./massa-signature"} +massa_storage = {path = "./massa-storage"} +massa_time = {path = "./massa-time"} +massa_versioning = {path = "./massa-versioning"} +massa_wallet = {path = "./massa-wallet"} + +# Massa projects dependencies +massa-proto-rs = {git = "https://github.com/massalabs/massa-proto-rs", "rev" = "18ec02f"} +massa-sc-runtime = {git = "https://github.com/massalabs/massa-sc-runtime", "branch" = "main"} +peernet = {git = "https://github.com/massalabs/PeerNet", "branch" = "deactivate_stream_limiter"} + +# Common dependencies +transition = {git = "https://github.com/massalabs/transition.git", "rev" = "93fa3bf82f9f5ff421c78536879b7fd1b948ca75"} +unsigned-varint = {version = "0.7.1", "git" = "https://github.com/cyphar/unsigned-varint.git", "branch" = "nom6-errors"} +substruct = {git = "https://github.com/sydhds/substruct", branch = "main"} +machine = {git = "https://github.com/antifuchs/machine", "branch" = "fix-workspace-build"} +aes-gcm = "0.10" +anyhow = "1.0" +async-trait = "0.1.58" +atty = "0.2" +bitvec = "1.0" +blake3 = "=1.3" +bs58 = "=0.4" +config = "0.13" +console = "0.15" +criterion = "0.4" +crossbeam = "0.8" +crossbeam-channel = "0.5" +ctrlc = "3.2.5" +dialoguer = "0.10" +directories = "4.0" +displaydoc = "0.2" +ed25519-dalek = "=1.0" +erased-serde = "0.3" +futures = "0.3.18" +futures-util = "0.3.25" +generic-array = "0.14.7" +h2 = "0.3.17" +hex-literal = "0.3.4" +http = "0.2.8" +humantime = "2.1.0" +hyper = "0.14.25" +itertools = "0.10" +jsonrpsee = "0.18.2" +jsonrpsee-http-client = "0.18.2" +jsonrpsee-ws-client = "0.18.2" +lazy_static = "1.4.0" +libsecp256k1 = "0.7.1" +lsmtree = "=0.1.1" +mio = "0.8" +mockall = "0.11.4" +more-asserts = "0.3" +nom = "=7.1" +num = "0.4" +num_enum = "0.5" +paginate = "1.1.11" +parking_lot = "0.12" +paw = "1.0" +pbkdf2 = "0.11" +prometheus = "0.13.3" +rand = "0.8" +rand_distr = "=0.4.3" +rand_xoshiro = "0.6" +rayon = "1.7.0" +rocksdb = "0.20" +rust_decimal = "1.26" +rustyline = "10.0.0" +rustyline-derive = "0.7.0" +schnellru = "0.2.0" +serde = "1.0" +serde_json = "1.0.87" +serde_qs = "0.11" +serde_with = "2.1.0" +serial_test = "1.0.0" +sha2 = "0.10.6" +sha3 = "0.10.8" +socket2 = "0.4.7" +structopt = "0.3" +strum = "0.24" +strum_macros = "0.24" +tempfile = "3.5" +thiserror = "1.0" +time = "0.3" +tokio = "1.23" +tokio-stream = "0.1" +toml_edit = "0.19" +tonic = "0.9.2" +tonic-health = "0.9.2" +tonic-reflection = "0.9.2" +tonic-web = "0.9.2" +tower = "0.4.13" +tower-http = "0.4.0" +tracing = "0.1" +tracing-subscriber = "0.3" +walkdir = "2.3.3" diff --git a/massa-api-exports/Cargo.toml b/massa-api-exports/Cargo.toml index 1aea6b0f2d7..c6ac899c849 100644 --- a/massa-api-exports/Cargo.toml +++ b/massa-api-exports/Cargo.toml @@ -4,30 +4,27 @@ version = "0.24.0" authors = ["Massa Labs "] edition = "2021" -[dependencies] -paginate = "1.1.11" -displaydoc = "0.2" -thiserror = "1.0" -jsonrpsee = { version = "0.18.2", features = ["jsonrpsee-core", "jsonrpsee-types"] } -serde = { version = "1.0", features = ["derive"] } -strum = { version = "0.24", features = ["derive"] } - -# custom modules -massa_signature = { path = "../massa-signature" } -massa_time = { path = "../massa-time" } -massa_models = { path = "../massa-models" } -massa_final_state = { path = "../massa-final-state" } -massa_consensus_exports = { path = "../massa-consensus-exports" } -massa_hash = { path = "../massa-hash" } -massa_protocol_exports = { path = "../massa-protocol-exports" } -massa_execution_exports = { path = "../massa-execution-exports" } -massa_wallet = { path = "../massa-wallet" } -massa_versioning = { path = "../massa-versioning" } - -[dev-dependencies] -serial_test = "1.0.0" - -# for more information on what are the following features used for, see the cargo.toml at workspace level [features] sandbox = [] testing = [] + +[dependencies] +paginate = {workspace = true} +displaydoc = {workspace = true} +thiserror = {workspace = true} +jsonrpsee = {workspace = true, "features" = ["jsonrpsee-core", "jsonrpsee-types"]} +serde = {workspace = true, "features" = ["derive"]} +strum = {workspace = true, "features" = ["derive"]} # BOM UPGRADE Revert to {"version": "0.24", "features": ["derive"]} if problem +massa_signature = {workspace = true} +massa_time = {workspace = true} +massa_models = {workspace = true} +massa_final_state = {workspace = true} +massa_consensus_exports = {workspace = true} +massa_hash = {workspace = true} +massa_protocol_exports = {workspace = true} +massa_execution_exports = {workspace = true} +massa_wallet = {workspace = true} +massa_versioning = {workspace = true} + +[dev-dependencies] +serial_test = {workspace = true} diff --git a/massa-api/Cargo.toml b/massa-api/Cargo.toml index 3216e174d71..d4c0d29d167 100644 --- a/massa-api/Cargo.toml +++ b/massa-api/Cargo.toml @@ -3,34 +3,31 @@ name = "massa_api" version = "0.24.0" edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - [dependencies] -jsonrpsee = { version = "0.18.2", features = ["server", "macros"] } -futures = "0.3.18" -async-trait = "0.1.58" -serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0.87" -tower-http = { version = "0.4.0", features = ["cors"] } -tower = { version = "0.4.13", features = ["full"] } -hyper = "0.14.25" -tokio = { version = "1.23", features = ["full"] } -tokio-stream = { version = "0.1", features = ["sync"] } -tracing = "0.1" -itertools = "0.10" -parking_lot = { version = "0.12", features = ["deadlock_detection"] } -# custom modules -massa_consensus_exports = { path = "../massa-consensus-exports" } -massa_api_exports = { path = "../massa-api-exports" } -massa_models = { path = "../massa-models" } -massa_pool_exports = { path = "../massa-pool-exports" } -massa_protocol_exports = { path = "../massa-protocol-exports" } -massa_execution_exports = { path = "../massa-execution-exports" } -massa_pos_exports = { path = "../massa-pos-exports" } -massa_storage = { path = "../massa-storage" } -massa_serialization = { path = "../massa-serialization" } -massa_signature = { path = "../massa-signature" } -massa_time = { path = "../massa-time" } -massa_versioning = { path = "../massa-versioning" } -massa_hash = { path = "../massa-hash" } -massa_wallet = { path = "../massa-wallet" } +jsonrpsee = {workspace = true, "features" = ["server", "macros"]} +futures = {workspace = true} +async-trait = {workspace = true} +serde = {workspace = true, "features" = ["derive"]} +serde_json = {workspace = true} +tower-http = {workspace = true, "features" = ["cors"]} +tower = {workspace = true, "features" = ["full"]} +hyper = {workspace = true} +tokio = {workspace = true, "features" = ["full"]} +tokio-stream = {workspace = true, "features" = ["sync"]} +tracing = {workspace = true} +itertools = {workspace = true} +parking_lot = {workspace = true, "features" = ["deadlock_detection"]} +massa_consensus_exports = {workspace = true} +massa_api_exports = {workspace = true} +massa_models = {workspace = true} +massa_pool_exports = {workspace = true} +massa_protocol_exports = {workspace = true} +massa_execution_exports = {workspace = true} +massa_pos_exports = {workspace = true} +massa_storage = {workspace = true} +massa_serialization = {workspace = true} +massa_signature = {workspace = true} +massa_time = {workspace = true} +massa_versioning = {workspace = true} +massa_hash = {workspace = true} +massa_wallet = {workspace = true} diff --git a/massa-async-pool/Cargo.toml b/massa-async-pool/Cargo.toml index f63dfc3867b..cebb1a96174 100644 --- a/massa-async-pool/Cargo.toml +++ b/massa-async-pool/Cargo.toml @@ -4,28 +4,25 @@ version = "0.24.0" authors = ["Massa Labs "] edition = "2021" -[dependencies] -nom = "=7.1" -num = "0.4" -serde = { version = "1.0", features = ["derive"] } -rand = "0.8" -parking_lot = { version = "0.12", features = ["deadlock_detection"] } - -# custom modules -massa_hash = { path = "../massa-hash" } -massa_ledger_exports = { path = "../massa-ledger-exports" } -massa_models = { path = "../massa-models" } -massa_serialization = { path = "../massa-serialization" } -massa_signature = { path = "../massa-signature" } -massa_db_exports = { path = "../massa-db-exports" } -massa_time = { path = "../massa-time" } -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "18ec02f", features = ["tonic"] } - -[dev-dependencies] -tempfile = "3.3" -massa_ledger_worker = { path = "../massa-ledger-worker" } - -# for more information on what are the following features used for, see the cargo.toml at workspace level [features] testing = [] sandbox = [] + +[dependencies] +nom = {workspace = true} +num = {workspace = true} +serde = {workspace = true, "features" = ["derive"]} +rand = {workspace = true} +parking_lot = {workspace = true, "features" = ["deadlock_detection"]} +massa_hash = {workspace = true} +massa_ledger_exports = {workspace = true} +massa_models = {workspace = true} +massa_serialization = {workspace = true} +massa_signature = {workspace = true} +massa_db_exports = {workspace = true} +massa_time = {workspace = true} +massa-proto-rs = {workspace = true, "features" = ["tonic"]} + +[dev-dependencies] +tempfile = {workspace = true} # BOM UPGRADE Revert to "3.3" if problem +massa_ledger_worker = {workspace = true} diff --git a/massa-bootstrap/Cargo.toml b/massa-bootstrap/Cargo.toml index 3c8a80f79d5..0adba879a21 100644 --- a/massa-bootstrap/Cargo.toml +++ b/massa-bootstrap/Cargo.toml @@ -4,60 +4,51 @@ version = "0.24.0" authors = ["Massa Labs "] edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[features] +testing = ["massa_final_state/testing", "massa_ledger_worker/testing", "massa_consensus_exports/testing", "massa_async_pool/testing"] +sandbox = ["massa_async_pool/sandbox", "massa_final_state/sandbox", "massa_models/sandbox"] [dependencies] -displaydoc = "0.2" -num_enum = "0.5" -nom = "=7.1" -rand = "0.8" -serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0" -humantime = "2.1.0" -thiserror = "1.0" -parking_lot = { version = "0.12", features = ["deadlock_detection"] } -tracing = "0.1" -substruct = { git = "https://github.com/sydhds/substruct" } -socket2 = "0.4.7" -crossbeam = "0.8.2" -mio = { version = "0.8", features = ["net", "os-poll"] } - -# custom modules -massa_consensus_exports = { path = "../massa-consensus-exports" } -massa_final_state = { path = "../massa-final-state" } -massa_hash = { path = "../massa-hash" } -massa_logging = { path = "../massa-logging" } -massa_models = { path = "../massa-models" } -massa_protocol_exports = { path = "../massa-protocol-exports" } -massa_serialization = { path = "../massa-serialization" } -massa_signature = { path = "../massa-signature" } -massa_pos_exports = { path = "../massa-pos-exports" } -massa_time = { path = "../massa-time" } -massa_db_exports = { path = "../massa-db-exports" } -massa_versioning = { path = "../massa-versioning" } +displaydoc = {workspace = true} +num_enum = {workspace = true} +nom = {workspace = true} +rand = {workspace = true} +serde = {workspace = true, "features" = ["derive"]} +serde_json = {workspace = true} # BOM UPGRADE Revert to "1.0" if problem +humantime = {workspace = true} +thiserror = {workspace = true} +parking_lot = {workspace = true, "features" = ["deadlock_detection"]} +tracing = {workspace = true} +substruct = {workspace = true} +socket2 = {workspace = true} +crossbeam = {workspace = true} # BOM UPGRADE Revert to "0.8.2" if problem +mio = {workspace = true, "features" = ["net", "os-poll"]} +massa_consensus_exports = {workspace = true} +massa_final_state = {workspace = true} +massa_hash = {workspace = true} +massa_logging = {workspace = true} +massa_models = {workspace = true} +massa_protocol_exports = {workspace = true} +massa_serialization = {workspace = true} +massa_signature = {workspace = true} +massa_pos_exports = {workspace = true} +massa_time = {workspace = true} +massa_db_exports = {workspace = true} +massa_versioning = {workspace = true} [dev-dependencies] -mockall = "0.11.4" -bitvec = { version = "1.0", features = ["serde"] } -lazy_static = "1.4" -tempfile = "3.3" -serial_test = "2.0.0" -num = "0.4" -massa_final_state = { path = "../massa-final-state", features = ["testing"] } -massa_async_pool = { path = "../massa-async-pool", features = ["testing"] } -massa_ledger_exports = { path = "../massa-ledger-exports" } -massa_ledger_worker = { path = "../massa-ledger-worker", features = [ - "testing", -] } -massa_executed_ops = { path = "../massa-executed-ops" } -massa_pos_worker = { path = "../massa-pos-worker", features = ["testing"] } -massa_pos_exports = { path = "../massa-pos-exports", features = ["testing"] } -massa_consensus_exports = { path = "../massa-consensus-exports", features = [ - "testing", -] } -massa_db_worker = { path = "../massa-db-worker" } - -# for more information on what are the following features used for, see the cargo.toml at workspace level -[features] -testing = ["massa_final_state/testing", "massa_ledger_worker/testing", "massa_consensus_exports/testing", "massa_async_pool/testing"] -sandbox = ["massa_async_pool/sandbox", "massa_final_state/sandbox", "massa_models/sandbox"] +mockall = {workspace = true} +bitvec = {workspace = true, "features" = ["serde"]} +lazy_static = {workspace = true} # BOM UPGRADE Revert to "1.4" if problem +tempfile = {workspace = true} # BOM UPGRADE Revert to "3.3" if problem +serial_test = {workspace = true} # BOM UPGRADE Revert to "2.0.0" if problem +num = {workspace = true} +massa_final_state = {workspace = true, "features" = ["testing"]} +massa_async_pool = {workspace = true, "features" = ["testing"]} +massa_ledger_exports = {workspace = true} +massa_ledger_worker = {workspace = true, "features" = ["testing"]} +massa_executed_ops = {workspace = true} +massa_pos_worker = {workspace = true, "features" = ["testing"]} +massa_pos_exports = {workspace = true, "features" = ["testing"]} +massa_consensus_exports = {workspace = true, "features" = ["testing"]} +massa_db_worker = {workspace = true} diff --git a/massa-channel/Cargo.toml b/massa-channel/Cargo.toml index 896cef12a9f..d631b4f8a20 100644 --- a/massa-channel/Cargo.toml +++ b/massa-channel/Cargo.toml @@ -3,9 +3,7 @@ name = "massa_channel" version = "0.24.0" edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - [dependencies] -prometheus = { version = "0.13.3"} -crossbeam = "0.8.0" -tracing = "0.1" +prometheus = {workspace = true} +crossbeam = {workspace = true} # BOM UPGRADE Revert to "0.8.0" if problem +tracing = {workspace = true} diff --git a/massa-cipher/Cargo.toml b/massa-cipher/Cargo.toml index b2a0e82e8cd..54c71261b48 100644 --- a/massa-cipher/Cargo.toml +++ b/massa-cipher/Cargo.toml @@ -3,14 +3,10 @@ name = "massa_cipher" version = "0.24.0" edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - [dependencies] -displaydoc = "0.2" -thiserror = "1.0" -aes-gcm = "0.10" -pbkdf2 = "0.11" -rand = "0.8" - -# custom modules -massa_serialization = { path = "../massa-serialization" } +displaydoc = {workspace = true} +thiserror = {workspace = true} +aes-gcm = {workspace = true} +pbkdf2 = {workspace = true} +rand = {workspace = true} +massa_serialization = {workspace = true} diff --git a/massa-client/Cargo.toml b/massa-client/Cargo.toml index c421644765f..cc7d6dc9730 100644 --- a/massa-client/Cargo.toml +++ b/massa-client/Cargo.toml @@ -3,32 +3,28 @@ name = "massa-client" version = "0.24.0" edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - [dependencies] -anyhow = "1.0" -atty = "0.2" -console = "0.15" -dialoguer = "0.10" -rustyline = "10.0.0" -rustyline-derive = "0.7.0" -erased-serde = "0.3" -lazy_static = "1.4" -paw = "1.0" -serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0" -structopt = { version = "0.3", features = ["paw"] } -strum = "0.24" -strum_macros = "0.24" -tokio = { version = "1.23", features = ["full"] } -# custom modules -massa_api_exports = { path = "../massa-api-exports" } -massa_models = { path = "../massa-models" } -massa_signature = { path = "../massa-signature" } -massa_time = { path = "../massa-time" } -massa_sdk = { path = "../massa-sdk" } -massa_wallet = { path = "../massa-wallet" } -# massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "18ec02f", features = ["tonic"] } +anyhow = {workspace = true} +atty = {workspace = true} +console = {workspace = true} +dialoguer = {workspace = true} +rustyline = {workspace = true} +rustyline-derive = {workspace = true} +erased-serde = {workspace = true} +lazy_static = {workspace = true} # BOM UPGRADE Revert to "1.4" if problem +paw = {workspace = true} +serde = {workspace = true, "features" = ["derive"]} +serde_json = {workspace = true} # BOM UPGRADE Revert to "1.0" if problem +structopt = {workspace = true, "features" = ["paw"]} +strum = {workspace = true} +strum_macros = {workspace = true} +tokio = {workspace = true, "features" = ["full"]} +massa_api_exports = {workspace = true} +massa_models = {workspace = true} +massa_signature = {workspace = true} +massa_time = {workspace = true} +massa_sdk = {workspace = true} +massa_wallet = {workspace = true} [dev-dependencies] -toml_edit = "0.19" +toml_edit = {workspace = true} diff --git a/massa-consensus-exports/Cargo.toml b/massa-consensus-exports/Cargo.toml index 904d3268717..388f2570ccd 100644 --- a/massa-consensus-exports/Cargo.toml +++ b/massa-consensus-exports/Cargo.toml @@ -4,30 +4,27 @@ version = "0.24.0" authors = ["Massa Labs "] edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -crossbeam-channel = "0.5.6" -displaydoc = "0.2" -nom = "=7.1" -serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0" -thiserror = "1.0" -jsonrpsee = { version = "0.18.2", features = ["server"] } -tokio = { version = "1.23", features = ["sync"] } -mockall = {version = "0.11.4", features = ["nightly"], optional = true} -#custom modules -massa_channel = { path = "../massa-channel" } -massa_hash = { path = "../massa-hash"} -massa_execution_exports = { path = "../massa-execution-exports" } -massa_models = { path = "../massa-models" } -massa_pool_exports = { path = "../massa-pool-exports" } -massa_pos_exports = { path = "../massa-pos-exports" } -massa_protocol_exports ={ path = "../massa-protocol-exports" } -massa_storage = { path = "../massa-storage" } -massa_serialization = { path = "../massa-serialization" } -massa_time = { path = "../massa-time" } -massa_signature = { path = "../massa-signature" } - [features] testing = ["massa_models/testing", "massa_execution_exports/testing", "massa_pool_exports/testing", "massa_pos_exports/testing", "massa_protocol_exports/testing", "massa_storage/testing", "dep:mockall"] + +[dependencies] +crossbeam-channel = {workspace = true} # BOM UPGRADE Revert to "0.5.6" if problem +displaydoc = {workspace = true} +nom = {workspace = true} +serde = {workspace = true, "features" = ["derive"]} +serde_json = {workspace = true} # BOM UPGRADE Revert to "1.0" if problem +thiserror = {workspace = true} +jsonrpsee = {workspace = true, "features" = ["server"]} +tokio = {workspace = true, "features" = ["sync"]} +mockall = {workspace = true, "features" = ["nightly"], "optional" = true} # BOM UPGRADE Revert to {"version": "0.11.4", "features": ["nightly"], "optional": true} if problem +massa_channel = {workspace = true} +massa_hash = {workspace = true} +massa_execution_exports = {workspace = true} +massa_models = {workspace = true} +massa_pool_exports = {workspace = true} +massa_pos_exports = {workspace = true} +massa_protocol_exports = {workspace = true} +massa_storage = {workspace = true} +massa_serialization = {workspace = true} +massa_time = {workspace = true} +massa_signature = {workspace = true} diff --git a/massa-consensus-worker/Cargo.toml b/massa-consensus-worker/Cargo.toml index 355049a7aeb..8648f68a822 100644 --- a/massa-consensus-worker/Cargo.toml +++ b/massa-consensus-worker/Cargo.toml @@ -4,37 +4,33 @@ version = "0.24.0" authors = ["Massa Labs "] edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[features] +sandbox = [] +bootstrap_server = [] +testing = ["tokio", "crossbeam-channel", "massa_execution_exports/testing", "massa_pos_worker/testing", "massa_protocol_exports/testing", "massa_consensus_exports/testing", "massa_pos_exports/testing", "massa_pool_exports/testing"] [dependencies] -num = { version = "0.4", features = ["serde"] } -tracing = { version = "0.1", features = ["log"] } -parking_lot = { version = "0.12", features = ["deadlock_detection"] } -crossbeam = "0.8" -#custom modules -massa_channel = { path = "../massa-channel" } -massa_metrics = { path = "../massa-metrics"} -massa_consensus_exports = { path = "../massa-consensus-exports" } -massa_models = { path = "../massa-models" } -massa_storage = { path = "../massa-storage" } -massa_signature = { path = "../massa-signature" } -massa_time = { path = "../massa-time" } -massa_hash = { path = "../massa-hash" } -massa_logging = { path = "../massa-logging" } -massa_execution_exports = { path = "../massa-execution-exports", optional = true} -massa_protocol_exports = { path = "../massa-protocol-exports", optional = true} -massa_pos_worker = { path = "../massa-pos-worker", optional = true} -massa_pos_exports = { path = "../massa-pos-exports", optional = true} -massa_pool_exports = { path = "../massa-pool-exports", optional = true} -tokio = { version = "1.0", optional = true } -crossbeam-channel = { version = "0.5.6", optional = true } - +num = {workspace = true, "features" = ["serde"]} # BOM UPGRADE Revert to {"version": "0.4", "features": ["serde"]} if problem +tracing = {workspace = true, "features" = ["log"]} # BOM UPGRADE Revert to {"version": "0.1", "features": ["log"]} if problem +parking_lot = {workspace = true, "features" = ["deadlock_detection"]} +crossbeam = {workspace = true} +massa_channel = {workspace = true} +massa_metrics = {workspace = true} +massa_consensus_exports = {workspace = true} +massa_models = {workspace = true} +massa_storage = {workspace = true} +massa_signature = {workspace = true} +massa_time = {workspace = true} +massa_hash = {workspace = true} +massa_logging = {workspace = true} +massa_execution_exports = {workspace = true, "optional" = true} +massa_protocol_exports = {workspace = true, "optional" = true} +massa_pos_worker = {workspace = true, "optional" = true} +massa_pos_exports = {workspace = true, "optional" = true} +massa_pool_exports = {workspace = true, "optional" = true} +tokio = {workspace = true, "optional" = true} # BOM UPGRADE Revert to {"version": "1.0", "optional": true} if problem +crossbeam-channel = {workspace = true, "optional" = true} # BOM UPGRADE Revert to {"version": "0.5.6", "optional": true} if problem [dev-dependencies] -rand= "0.8" -itertools = "0.10" - -[features] -sandbox = [] -bootstrap_server = [] -testing = ["tokio", "crossbeam-channel", "massa_execution_exports/testing", "massa_pos_worker/testing", "massa_protocol_exports/testing", "massa_consensus_exports/testing", "massa_pos_exports/testing", "massa_pool_exports/testing"] +rand = {workspace = true} +itertools = {workspace = true} diff --git a/massa-db-exports/Cargo.toml b/massa-db-exports/Cargo.toml index 754b0f2ab3d..35d50b9fe5d 100644 --- a/massa-db-exports/Cargo.toml +++ b/massa-db-exports/Cargo.toml @@ -3,14 +3,10 @@ name = "massa_db_exports" version = "0.24.0" edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - [dependencies] -displaydoc = "0.2" -thiserror = "1.0" -parking_lot = { version = "0.12", features = ["deadlock_detection"] } - -# Custom modules -massa_hash = { path = "../massa-hash" } -massa_models = { path = "../massa-models" } -massa_serialization = { path = "../massa-serialization" } +displaydoc = {workspace = true} +thiserror = {workspace = true} +parking_lot = {workspace = true, "features" = ["deadlock_detection"]} +massa_hash = {workspace = true} +massa_models = {workspace = true} +massa_serialization = {workspace = true} diff --git a/massa-db-worker/Cargo.toml b/massa-db-worker/Cargo.toml index 16f24fcdb54..4303e93584e 100644 --- a/massa-db-worker/Cargo.toml +++ b/massa-db-worker/Cargo.toml @@ -3,15 +3,11 @@ name = "massa_db_worker" version = "0.24.0" edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - [dependencies] -parking_lot = { version = "0.12", features = ["deadlock_detection"] } -rocksdb = "0.20" -lsmtree = "=0.1.1" - -# Custom modules -massa_hash = { path = "../massa-hash" } -massa_models = { path = "../massa-models" } -massa_serialization = { path = "../massa-serialization" } -massa_db_exports = { path = "../massa-db-exports" } +parking_lot = {workspace = true, "features" = ["deadlock_detection"]} +rocksdb = {workspace = true} +lsmtree = {workspace = true} +massa_hash = {workspace = true} +massa_models = {workspace = true} +massa_serialization = {workspace = true} +massa_db_exports = {workspace = true} diff --git a/massa-executed-ops/Cargo.toml b/massa-executed-ops/Cargo.toml index 0562027a40c..3664fa00746 100644 --- a/massa-executed-ops/Cargo.toml +++ b/massa-executed-ops/Cargo.toml @@ -5,15 +5,14 @@ authors = ["Massa Labs "] edition = "2021" [dependencies] -nom = "=7.1" -parking_lot = { version = "0.12", features = ["deadlock_detection"] } -massa_models = { path = "../massa-models" } -massa_hash = { path = "../massa-hash" } -massa_serialization = { path = "../massa-serialization" } - -massa_db_exports = { path = "../massa-db-exports" } +nom = {workspace = true} +parking_lot = {workspace = true, "features" = ["deadlock_detection"]} +massa_models = {workspace = true} +massa_hash = {workspace = true} +massa_serialization = {workspace = true} +massa_db_exports = {workspace = true} [dev-dependencies] -tempfile = "3.3" -massa_ledger_worker = { path = "../massa-ledger-worker" } -massa_db_worker = { path = "../massa-db-worker" } +tempfile = {workspace = true} # BOM UPGRADE Revert to "3.3" if problem +massa_ledger_worker = {workspace = true} +massa_db_worker = {workspace = true} diff --git a/massa-execution-exports/Cargo.toml b/massa-execution-exports/Cargo.toml index 3c3305bf9ae..8c265967228 100644 --- a/massa-execution-exports/Cargo.toml +++ b/massa-execution-exports/Cargo.toml @@ -4,37 +4,29 @@ version = "0.24.0" authors = ["Massa Labs "] edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[features] +gas_calibration = ["massa_ledger_exports/testing", "parking_lot", "tempfile"] +testing = ["massa_models/testing", "massa_ledger_exports/testing", "parking_lot", "tempfile", "mockall"] [dependencies] -displaydoc = "0.2" -thiserror = "1.0" -num = { version = "0.4", features = ["serde"] } -parking_lot = { version = "0.12", features = [ - "deadlock_detection", -], optional = true } -tempfile = { version = "3.3", optional = true } -tokio = { version = "1.23", features = ["sync"] } -mockall = { version = "0.11.4", optional = true} - -# custom modules -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "18ec02f", features = ["tonic"] } -massa_hash = { path = "../massa-hash" } -massa_models = { path = "../massa-models" } -massa_time = { path = "../massa-time" } -massa_storage = { path = "../massa-storage" } -massa_final_state = { path = "../massa-final-state" } -massa_pos_exports = { path = "../massa-pos-exports" } -massa_ledger_exports = { path = "../massa-ledger-exports", optional = true } -massa_module_cache = { path = "../massa-module-cache" } -massa_versioning = { path = "../massa-versioning" } -massa-sc-runtime = { git = "https://github.com/massalabs/massa-sc-runtime", branch = "main" } +displaydoc = {workspace = true} +thiserror = {workspace = true} +num = {workspace = true, "features" = ["serde"]} # BOM UPGRADE Revert to {"version": "0.4", "features": ["serde"]} if problem +parking_lot = {workspace = true, "features" = ["deadlock_detection"], "optional" = true} +tempfile = {workspace = true, "optional" = true} # BOM UPGRADE Revert to {"version": "3.3", "optional": true} if problem +tokio = {workspace = true, "features" = ["sync"]} +mockall = {workspace = true, "optional" = true} # BOM UPGRADE Revert to {"version": "0.11.4", "optional": true} if problem +massa-proto-rs = {workspace = true, "features" = ["tonic"]} +massa_hash = {workspace = true} +massa_models = {workspace = true} +massa_time = {workspace = true} +massa_storage = {workspace = true} +massa_final_state = {workspace = true} +massa_pos_exports = {workspace = true} +massa_ledger_exports = {workspace = true, "optional" = true} +massa_module_cache = {workspace = true} +massa_versioning = {workspace = true} +massa-sc-runtime = {workspace = true} [dev-dependencies] -mockall = "0.11.4" - -# for more information on what are the following features used for, see the cargo.toml at workspace level - -[features] -gas_calibration = ["massa_ledger_exports/testing", "parking_lot", "tempfile"] -testing = ["massa_models/testing", "massa_ledger_exports/testing", "parking_lot", "tempfile", "mockall"] +mockall = {workspace = true} diff --git a/massa-execution-worker/Cargo.toml b/massa-execution-worker/Cargo.toml index 142e49a296f..d941457d6d4 100644 --- a/massa-execution-worker/Cargo.toml +++ b/massa-execution-worker/Cargo.toml @@ -1,99 +1,61 @@ +[[bench]] +name = "basic" +harness = false + [package] name = "massa_execution_worker" version = "0.24.0" authors = ["Massa Labs "] edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -anyhow = "1.0" -rand = "0.8" -rand_xoshiro = "0.6" -parking_lot = { version = "0.12", features = ["deadlock_detection"] } -tracing = "0.1" -serde_json = "1.0" -num = { version = "0.4", features = ["serde"] } -sha2 = "0.10.6" -sha3 = "0.10.8" -libsecp256k1 = "0.7.1" -# use with features -criterion = { version = "0.4", optional = true } -tempfile = { version = "3.3", optional = true } -massa_ledger_worker = { path = "../massa-ledger-worker", optional = true } -massa_pos_worker = { path = "../massa-pos-worker", optional = true } -# custom modules -massa_async_pool = { path = "../massa-async-pool" } -massa_channel = { path = "../massa-channel" } -massa_executed_ops = { path = "../massa-executed-ops" } -massa_execution_exports = { path = "../massa-execution-exports" } -massa_models = { path = "../massa-models" } -massa_storage = { path = "../massa-storage" } -massa_hash = { path = "../massa-hash" } -massa-sc-runtime = { git = "https://github.com/massalabs/massa-sc-runtime", branch = "main" } -massa_metrics = { path = "../massa-metrics" } -massa_module_cache = { path = "../massa-module-cache" } -massa_signature = { path = "../massa-signature" } -massa_time = { path = "../massa-time" } -massa_ledger_exports = { path = "../massa-ledger-exports" } -massa_pos_exports = { path = "../massa-pos-exports" } -massa_final_state = { path = "../massa-final-state" } -massa_versioning = { path = "../massa-versioning" } -massa_db_exports = { path = "../massa-db-exports" } -massa_db_worker = { path = "../massa-db-worker", optional = true } -massa_wallet = { path = "../massa-wallet" } - -[dev-dependencies] -tokio = { version = "1.23", features = ["sync"] } -massa_pos_worker = { path = "../massa-pos-worker" } -massa_ledger_worker = { path = "../massa-ledger-worker" } -serial_test = "1.0.0" -tempfile = "3.2" -hex-literal = "0.3.4" -# custom modules with testing enabled -massa_execution_exports = { path = "../massa-execution-exports", features = [ - "testing", -] } -massa_final_state = { path = "../massa-final-state", features = ["testing"] } - -[[bench]] -name = "basic" -harness = false - [features] sandbox = ["massa_async_pool/sandbox"] -gas_calibration = [ - "massa-sc-runtime/gas_calibration", - "massa_execution_exports/gas_calibration", - "massa_final_state/testing", - "massa_pos_worker", - "massa_ledger_worker", - "massa_db_worker", - "tempfile", -] -testing = [ - "massa_execution_exports/testing", - "massa_ledger_exports/testing", - "massa_pos_exports/testing", - "massa_final_state/testing", - "massa-sc-runtime/testing", - "massa_wallet/testing", - "tempfile", - "massa_pos_worker", - "massa_ledger_worker", - "massa_metrics/testing", - "massa_db_worker", - "massa_metrics/testing" -] +gas_calibration = ["massa-sc-runtime/gas_calibration", "massa_execution_exports/gas_calibration", "massa_final_state/testing", "massa_pos_worker", "massa_ledger_worker", "massa_db_worker", "tempfile"] +testing = ["massa_execution_exports/testing", "massa_ledger_exports/testing", "massa_pos_exports/testing", "massa_final_state/testing", "massa-sc-runtime/testing", "massa_wallet/testing", "tempfile", "massa_pos_worker", "massa_ledger_worker", "massa_metrics/testing", "massa_db_worker", "massa_metrics/testing"] +benchmarking = ["massa-sc-runtime/gas_calibration", "criterion", "massa_pos_worker", "massa_ledger_worker", "massa_db_worker", "tempfile"] +metrics = [] -# This feature is useful as we want to have code that is compiled only when running benchmarks -benchmarking = [ - "massa-sc-runtime/gas_calibration", - "criterion", - "massa_pos_worker", - "massa_ledger_worker", - "massa_db_worker", - "tempfile", -] +[dependencies] +anyhow = {workspace = true} +rand = {workspace = true} +rand_xoshiro = {workspace = true} +parking_lot = {workspace = true, "features" = ["deadlock_detection"]} +tracing = {workspace = true} +serde_json = {workspace = true} # BOM UPGRADE Revert to "1.0" if problem +num = {workspace = true, "features" = ["serde"]} # BOM UPGRADE Revert to {"version": "0.4", "features": ["serde"]} if problem +sha2 = {workspace = true} +sha3 = {workspace = true} +libsecp256k1 = {workspace = true} +criterion = {workspace = true, "optional" = true} +tempfile = {workspace = true, "optional" = true} # BOM UPGRADE Revert to {"version": "3.3", "optional": true} if problem +massa_ledger_worker = {workspace = true, "optional" = true} +massa_pos_worker = {workspace = true, "optional" = true} +massa_async_pool = {workspace = true} +massa_channel = {workspace = true} +massa_executed_ops = {workspace = true} +massa_execution_exports = {workspace = true} +massa_models = {workspace = true} +massa_storage = {workspace = true} +massa_hash = {workspace = true} +massa-sc-runtime = {workspace = true} +massa_metrics = {workspace = true} +massa_module_cache = {workspace = true} +massa_signature = {workspace = true} +massa_time = {workspace = true} +massa_ledger_exports = {workspace = true} +massa_pos_exports = {workspace = true} +massa_final_state = {workspace = true} +massa_versioning = {workspace = true} +massa_db_exports = {workspace = true} +massa_db_worker = {workspace = true, "optional" = true} +massa_wallet = {workspace = true} -metrics = [] +[dev-dependencies] +tokio = {workspace = true, "features" = ["sync"]} +massa_pos_worker = {workspace = true} +massa_ledger_worker = {workspace = true} +serial_test = {workspace = true} +tempfile = {workspace = true} # BOM UPGRADE Revert to "3.2" if problem +hex-literal = {workspace = true} +massa_execution_exports = {workspace = true, "features" = ["testing"]} +massa_final_state = {workspace = true, "features" = ["testing"]} diff --git a/massa-factory-exports/Cargo.toml b/massa-factory-exports/Cargo.toml index c55863c50d6..6d0ec24c44d 100644 --- a/massa-factory-exports/Cargo.toml +++ b/massa-factory-exports/Cargo.toml @@ -4,23 +4,19 @@ version = "0.24.0" authors = ["Massa Labs "] edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -displaydoc = "0.2" -thiserror = "1.0" -# custom modules -massa_hash = { path = "../massa-hash" } -massa_models = { path = "../massa-models" } -massa_time = { path = "../massa-time" } -massa_signature = { path = "../massa-signature" } -massa_pos_exports = { path = "../massa-pos-exports" } -massa_consensus_exports = { path = "../massa-consensus-exports" } -massa_pool_exports = { path = "../massa-pool-exports" } -massa_protocol_exports = { path = "../massa-protocol-exports" } -massa_storage = { path = "../massa-storage" } - -# for more information on what are the following features used for, see the cargo.toml at workspace level [features] testing = ["massa_models/testing", "massa_protocol_exports/testing", "massa_pool_exports/testing"] sandbox = [] + +[dependencies] +displaydoc = {workspace = true} +thiserror = {workspace = true} +massa_hash = {workspace = true} +massa_models = {workspace = true} +massa_time = {workspace = true} +massa_signature = {workspace = true} +massa_pos_exports = {workspace = true} +massa_consensus_exports = {workspace = true} +massa_pool_exports = {workspace = true} +massa_protocol_exports = {workspace = true} +massa_storage = {workspace = true} diff --git a/massa-factory-worker/Cargo.toml b/massa-factory-worker/Cargo.toml index bd56b9002b1..1baa1129b45 100644 --- a/massa-factory-worker/Cargo.toml +++ b/massa-factory-worker/Cargo.toml @@ -4,34 +4,31 @@ version = "0.24.0" authors = ["Massa Labs "] edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[features] +sandbox = [] +testing = ["massa_factory_exports/testing", "massa_pos_exports/testing", "massa_pool_exports/testing", "massa_protocol_exports/testing", "massa_wallet/testing"] [dependencies] -parking_lot = { version = "0.12", features = ["deadlock_detection"] } -crossbeam-channel = "0.5" -tracing = "0.1" -# custom modules -massa_channel = { path = "../massa-channel" } -massa_models = { path = "../massa-models" } -massa_factory_exports = { path = "../massa-factory-exports" } -massa_signature = { path = "../massa-signature" } -massa_storage = { path = "../massa-storage" } -massa_time = { path = "../massa-time" } -massa_wallet = { path = "../massa-wallet" } -massa_hash = { path = "../massa-hash" } -massa_pos_exports = { path = "../massa-pos-exports" } -massa_pool_exports = { path = "../massa-pool-exports" } -massa_versioning = { path = "../massa-versioning" } +parking_lot = {workspace = true, "features" = ["deadlock_detection"]} +crossbeam-channel = {workspace = true} +tracing = {workspace = true} +massa_channel = {workspace = true} +massa_models = {workspace = true} +massa_factory_exports = {workspace = true} +massa_signature = {workspace = true} +massa_storage = {workspace = true} +massa_time = {workspace = true} +massa_wallet = {workspace = true} +massa_hash = {workspace = true} +massa_pos_exports = {workspace = true} +massa_pool_exports = {workspace = true} +massa_versioning = {workspace = true} [dev-dependencies] -num = "0.4" -massa_protocol_exports = { path = "../massa-protocol-exports", features=["testing"] } -massa_consensus_exports = { path = "../massa-consensus-exports", features = ["testing"] } -massa_factory_exports = { path = "../massa-factory-exports", features=["testing"] } -massa_wallet = { path = "../massa-wallet", features=["testing"] } -massa_pos_exports = { path = "../massa-pos-exports", features=["testing"] } -massa_pool_exports = { path = "../massa-pool-exports", features=["testing"] } - -[features] -sandbox = [] -testing = ["massa_factory_exports/testing", "massa_pos_exports/testing", "massa_pool_exports/testing", "massa_protocol_exports/testing", "massa_wallet/testing"] +num = {workspace = true} +massa_protocol_exports = {workspace = true, "features" = ["testing"]} +massa_consensus_exports = {workspace = true, "features" = ["testing"]} +massa_factory_exports = {workspace = true, "features" = ["testing"]} +massa_wallet = {workspace = true, "features" = ["testing"]} +massa_pos_exports = {workspace = true, "features" = ["testing"]} +massa_pool_exports = {workspace = true, "features" = ["testing"]} diff --git a/massa-final-state/Cargo.toml b/massa-final-state/Cargo.toml index f0d76c6a27c..3ff04a7a6a3 100644 --- a/massa-final-state/Cargo.toml +++ b/massa-final-state/Cargo.toml @@ -4,46 +4,37 @@ version = "0.24.0" authors = ["Massa Labs "] edition = "2021" -[dependencies] -displaydoc = "0.2" -serde = { version = "1.0", features = ["derive"] } -nom = "=7.1" -bs58 = { version = "0.4", features = ["check"] } -thiserror = "1.0" -tracing = "0.1" -num = "0.4" - -# custom modules -massa_ledger_exports = { path = "../massa-ledger-exports" } -massa_ledger_worker = { path = "../massa-ledger-worker", optional = true } -massa_executed_ops = { path = "../massa-executed-ops" } -massa_models = { path = "../massa-models" } -massa_async_pool = { path = "../massa-async-pool" } -massa_serialization = { path = "../massa-serialization" } -massa_pos_exports = { path = "../massa-pos-exports" } -massa_db_exports = { path = "../massa-db-exports" } -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "18ec02f", features = ["tonic"] } -massa_versioning = { path = "../massa-versioning" } -massa_time = { path = "../massa-time" } -massa_hash = { path = "../massa-hash" } - -[dev-dependencies] -massa_async_pool = { path = "../massa-async-pool", features = ["testing"] } -massa_ledger_worker = { path = "../massa-ledger-worker", features = [ - "testing", -] } -massa_pos_worker = { path = "../massa-pos-worker", features = ["testing"] } -massa_pos_exports = { path = "../massa-pos-exports", features = ["testing"] } -massa_db_worker = { path = "../massa-db-worker"} -parking_lot = { version = "0.12", features = ["deadlock_detection"] } -tempfile = "3.3" - -# for more information on what are the following features used for, see the cargo.toml at workspace level [features] -testing = [ - "massa_ledger_worker/testing", - "massa_async_pool/testing", - "massa_pos_exports/testing", -] +testing = ["massa_ledger_worker/testing", "massa_async_pool/testing", "massa_pos_exports/testing"] sandbox = ["massa_async_pool/sandbox"] bootstrap_server = [] + +[dependencies] +displaydoc = {workspace = true} +serde = {workspace = true, "features" = ["derive"]} +nom = {workspace = true} +bs58 = {workspace = true, "features" = ["check"]} # BOM UPGRADE Revert to {"version": "0.4", "features": ["check"]} if problem +thiserror = {workspace = true} +tracing = {workspace = true} +num = {workspace = true} +massa_ledger_exports = {workspace = true} +massa_ledger_worker = {workspace = true, "optional" = true} +massa_executed_ops = {workspace = true} +massa_models = {workspace = true} +massa_async_pool = {workspace = true} +massa_serialization = {workspace = true} +massa_pos_exports = {workspace = true} +massa_db_exports = {workspace = true} +massa-proto-rs = {workspace = true, "features" = ["tonic"]} +massa_versioning = {workspace = true} +massa_time = {workspace = true} +massa_hash = {workspace = true} + +[dev-dependencies] +massa_async_pool = {workspace = true, "features" = ["testing"]} +massa_ledger_worker = {workspace = true, "features" = ["testing"]} +massa_pos_worker = {workspace = true, "features" = ["testing"]} +massa_pos_exports = {workspace = true, "features" = ["testing"]} +massa_db_worker = {workspace = true} +parking_lot = {workspace = true, "features" = ["deadlock_detection"]} +tempfile = {workspace = true} # BOM UPGRADE Revert to "3.3" if problem diff --git a/massa-grpc/Cargo.toml b/massa-grpc/Cargo.toml index b3735bb5a62..97037a37b9b 100644 --- a/massa-grpc/Cargo.toml +++ b/massa-grpc/Cargo.toml @@ -1,5 +1,3 @@ -# Copyright (c) 2023 MASSA LABS - [package] name = "massa_grpc" version = "0.24.0" @@ -10,37 +8,36 @@ homepage = "https://massa.net" documentation = "https://docs.massa.net/" [dependencies] -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "18ec02f", features = ["tonic"] } -displaydoc = "0.2" -thiserror = "1.0" -tonic = { version = "0.9.2", features = ["gzip", "tls"] } -tonic-web = "0.9.2" -tonic-reflection = "0.9.2" -tonic-health = "0.9.2" -tower-http = { version = "0.4.0", features = ["cors"] } -hyper = "0.14.25" -futures-util = "0.3.25" -serde = { version = "1.0", features = ["derive"] } -tokio = { version = "1.23", features = ["rt-multi-thread", "macros"] } -tokio-stream = "0.1.12" -itertools = "0.10" -h2 = "0.3.17" -tracing = "0.1" -# custom modules -massa_consensus_exports = { path = "../massa-consensus-exports" } -massa_hash = { path = "../massa-hash" } -massa_models = { path = "../massa-models" } -massa_pos_exports = { path = "../massa-pos-exports" } -massa_pool_exports = { path = "../massa-pool-exports" } -massa_protocol_exports = { path = "../massa-protocol-exports" } -massa_execution_exports = { path = "../massa-execution-exports" } -massa_storage = { path = "../massa-storage" } -massa_time = { path = "../massa-time" } -massa_wallet = { path = "../massa-wallet" } -massa_serialization = { path = "../massa-serialization" } -massa_versioning = { path = "../massa-versioning" } +massa-proto-rs = {workspace = true, "features" = ["tonic"]} +displaydoc = {workspace = true} +thiserror = {workspace = true} +tonic = {workspace = true, "features" = ["gzip", "tls"]} +tonic-web = {workspace = true} +tonic-reflection = {workspace = true} +tonic-health = {workspace = true} +tower-http = {workspace = true, "features" = ["cors"]} +hyper = {workspace = true} +futures-util = {workspace = true} +serde = {workspace = true, "features" = ["derive"]} +tokio = {workspace = true, "features" = ["rt-multi-thread", "macros"]} +tokio-stream = {workspace = true} # BOM UPGRADE Revert to "0.1.12" if problem +itertools = {workspace = true} +h2 = {workspace = true} +tracing = {workspace = true} +massa_consensus_exports = {workspace = true} +massa_hash = {workspace = true} +massa_models = {workspace = true} +massa_pos_exports = {workspace = true} +massa_pool_exports = {workspace = true} +massa_protocol_exports = {workspace = true} +massa_execution_exports = {workspace = true} +massa_storage = {workspace = true} +massa_time = {workspace = true} +massa_wallet = {workspace = true} +massa_serialization = {workspace = true} +massa_versioning = {workspace = true} [dev-dependencies] -num = "0.4" -massa_consensus_exports = { path = "../massa-consensus-exports", features = ["testing"] } -massa_channel = { path = "../massa-channel" } +num = {workspace = true} +massa_consensus_exports = {workspace = true, "features" = ["testing"]} +massa_channel = {workspace = true} diff --git a/massa-hash/Cargo.toml b/massa-hash/Cargo.toml index 62871b36527..5783d6e4eb3 100644 --- a/massa-hash/Cargo.toml +++ b/massa-hash/Cargo.toml @@ -4,21 +4,17 @@ version = "0.24.0" authors = ["Massa Labs "] edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - [dependencies] -blake3 = "=1.3" -bs58 = { version = "=0.4", features = ["check"] } -displaydoc = "0.2" -serde = { version = "1.0", features = ["derive"] } -thiserror = "1.0" -nom = "=7.1" -lsmtree = "=0.1.1" -generic-array = "0.14.7" - -# custom modules -massa_serialization = { path = "../massa-serialization" } +blake3 = {workspace = true} +bs58 = {workspace = true, "features" = ["check"]} +displaydoc = {workspace = true} +serde = {workspace = true, "features" = ["derive"]} +thiserror = {workspace = true} +nom = {workspace = true} +lsmtree = {workspace = true} +generic-array = {workspace = true} +massa_serialization = {workspace = true} [dev-dependencies] -serde_json = "1.0" -serial_test = "1.0" +serde_json = {workspace = true} # BOM UPGRADE Revert to "1.0" if problem +serial_test = {workspace = true} # BOM UPGRADE Revert to "1.0" if problem diff --git a/massa-ledger-exports/Cargo.toml b/massa-ledger-exports/Cargo.toml index 56b612c25ec..4131a6124c4 100644 --- a/massa-ledger-exports/Cargo.toml +++ b/massa-ledger-exports/Cargo.toml @@ -4,22 +4,19 @@ version = "0.24.0" authors = ["Massa Labs "] edition = "2021" -[dependencies] -displaydoc = "0.2" -serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0" -tempfile = { version = "3.3", optional = true } # use with testing feature -thiserror = "1.0" -nom = "=7.1" -num_enum = "0.5.10" - -# custom modules -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "18ec02f", features = ["tonic"] } -massa_hash = { path = "../massa-hash" } -massa_models = { path = "../massa-models" } -massa_serialization = { path = "../massa-serialization" } -massa_db_exports = { path = "../massa-db-exports" } - -# for more information on what are the following features used for, see the cargo.toml at workspace level [features] testing = ["tempfile", "massa_models/testing"] + +[dependencies] +displaydoc = {workspace = true} +serde = {workspace = true, "features" = ["derive"]} +serde_json = {workspace = true} # BOM UPGRADE Revert to "1.0" if problem +tempfile = {workspace = true, "optional" = true} # BOM UPGRADE Revert to {"version": "3.3", "optional": true} if problem +thiserror = {workspace = true} +nom = {workspace = true} +num_enum = {workspace = true} # BOM UPGRADE Revert to "0.5.10" if problem +massa-proto-rs = {workspace = true, "features" = ["tonic"]} +massa_hash = {workspace = true} +massa_models = {workspace = true} +massa_serialization = {workspace = true} +massa_db_exports = {workspace = true} diff --git a/massa-ledger-worker/Cargo.toml b/massa-ledger-worker/Cargo.toml index cb88bbb4a35..363387a3b57 100644 --- a/massa-ledger-worker/Cargo.toml +++ b/massa-ledger-worker/Cargo.toml @@ -4,24 +4,21 @@ version = "0.24.0" authors = ["Massa Labs "] edition = "2021" -[dependencies] -serde_json = "1.0" -tempfile = { version = "3.3", optional = true } # use with testing feature +[features] +testing = ["tempfile", "massa_models/testing", "massa_ledger_exports/testing", "massa_db_worker", "parking_lot"] -# custom modules -massa_ledger_exports = { path = "../massa-ledger-exports" } -massa_models = { path = "../massa-models" } -massa_serialization = { path = "../massa-serialization" } -massa_db_exports = { path = "../massa-db-exports" } -massa_db_worker = { path = "../massa-db-worker", optional = true} -parking_lot = { version = "0.12", features = ["deadlock_detection"], optional = true } +[dependencies] +serde_json = {workspace = true} # BOM UPGRADE Revert to "1.0" if problem +tempfile = {workspace = true, "optional" = true} # BOM UPGRADE Revert to {"version": "3.3", "optional": true} if problem +massa_ledger_exports = {workspace = true} +massa_models = {workspace = true} +massa_serialization = {workspace = true} +massa_db_exports = {workspace = true} +massa_db_worker = {workspace = true, "optional" = true} +parking_lot = {workspace = true, "features" = ["deadlock_detection"], "optional" = true} [dev-dependencies] -massa_signature = { path = "../massa-signature" } -massa_hash = { path = "../massa-hash" } -massa_db_worker = { path = "../massa-db-worker" } -tempfile = "3.3" - -# for more information on what are the following features used for, see the cargo.toml at workspace level -[features] -testing = ["tempfile", "massa_models/testing", "massa_ledger_exports/testing", "massa_db_worker", "parking_lot"] +massa_signature = {workspace = true} +massa_hash = {workspace = true} +massa_db_worker = {workspace = true} +tempfile = {workspace = true} # BOM UPGRADE Revert to "3.3" if problem diff --git a/massa-logging/Cargo.toml b/massa-logging/Cargo.toml index 98260bc5588..45eba5299b4 100644 --- a/massa-logging/Cargo.toml +++ b/massa-logging/Cargo.toml @@ -4,8 +4,6 @@ version = "0.24.0" authors = ["Massa Labs "] edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - [dependencies] -serde_json = "1.0" -tracing = "0.1" +serde_json = {workspace = true} # BOM UPGRADE Revert to "1.0" if problem +tracing = {workspace = true} diff --git a/massa-metrics/Cargo.toml b/massa-metrics/Cargo.toml index c24ecdc8cfe..92fed20f733 100644 --- a/massa-metrics/Cargo.toml +++ b/massa-metrics/Cargo.toml @@ -3,17 +3,14 @@ name = "massa_metrics" version = "0.24.0" edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -prometheus = { version = "0.13.3", features = ["process"] } -hyper = { version = "0.14.26", features = ["server", "tcp", "http1"] } -tokio = { version = "1.28.0", features = ["full"] } -lazy_static = "1.4.0" -tracing = "0.1" -massa_channel = { path = "../massa-channel" } - - [features] testing = [] sandbox = [] + +[dependencies] +prometheus = {workspace = true, "features" = ["process"]} +hyper = {workspace = true, "features" = ["server", "tcp", "http1"]} # BOM UPGRADE Revert to {"version": "0.14.26", "features": ["server", "tcp", "http1"]} if problem +tokio = {workspace = true, "features" = ["full"]} # BOM UPGRADE Revert to {"version": "1.28.0", "features": ["full"]} if problem +lazy_static = {workspace = true} +tracing = {workspace = true} +massa_channel = {workspace = true} diff --git a/massa-models/Cargo.toml b/massa-models/Cargo.toml index b4f6c2fbe9f..f80bfbe6e75 100644 --- a/massa-models/Cargo.toml +++ b/massa-models/Cargo.toml @@ -4,34 +4,30 @@ version = "0.24.0" authors = ["Massa Labs "] edition = "2021" -[dependencies] -displaydoc = "0.2" -lazy_static = "1.4" -num_enum = "0.5" -rust_decimal = "1.26" -serde = { version = "1.0", features = ["derive"] } -serde_with = "2.1.0" -thiserror = "1.0" -num = { version = "0.4", features = ["serde"] } -directories = "4.0" -config = "0.13" -bs58 = { version = "=0.4", features = ["check"] } -bitvec = { version = "=1.0", features = ["serde"] } -nom = "=7.1" -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "18ec02f", features = ["tonic"] } - -# custom modules -massa_hash = { path = "../massa-hash" } -massa_serialization = { path = "../massa-serialization" } -massa_signature = { path = "../massa-signature" } -massa_time = { path = "../massa-time" } -# TODO tag transition crate with a version number -transition = { git = "https://github.com/massalabs/transition.git", rev = "93fa3bf82f9f5ff421c78536879b7fd1b948ca75" } - -[dev-dependencies] -serial_test = "1.0" - -# for more information on what are the following features used for, see the cargo.toml at workspace level [features] sandbox = [] testing = [] + +[dependencies] +displaydoc = {workspace = true} +lazy_static = {workspace = true} # BOM UPGRADE Revert to "1.4" if problem +num_enum = {workspace = true} +rust_decimal = {workspace = true} +serde = {workspace = true, "features" = ["derive"]} +serde_with = {workspace = true} +thiserror = {workspace = true} +num = {workspace = true, "features" = ["serde"]} # BOM UPGRADE Revert to {"version": "0.4", "features": ["serde"]} if problem +directories = {workspace = true} +config = {workspace = true} +bs58 = {workspace = true, "features" = ["check"]} +bitvec = {workspace = true, "features" = ["serde"]} # BOM UPGRADE Revert to {"version": "=1.0", "features": ["serde"]} if problem +nom = {workspace = true} +massa-proto-rs = {workspace = true, "features" = ["tonic"]} +massa_hash = {workspace = true} +massa_serialization = {workspace = true} +massa_signature = {workspace = true} +massa_time = {workspace = true} +transition = {workspace = true} + +[dev-dependencies] +serial_test = {workspace = true} # BOM UPGRADE Revert to "1.0" if problem diff --git a/massa-module-cache/Cargo.toml b/massa-module-cache/Cargo.toml index ce31488be51..795ad664a6f 100644 --- a/massa-module-cache/Cargo.toml +++ b/massa-module-cache/Cargo.toml @@ -3,27 +3,24 @@ name = "massa_module_cache" version = "0.24.0" edition = "2021" +[features] +testing = ["massa-sc-runtime/testing"] + [dependencies] -schnellru = "0.2.0" -serial_test = "1.0.0" -rand = "0.8.5" -num_enum = "0.5" -nom = "=7.1" -displaydoc = "0.2" -thiserror = "1.0" -anyhow = "1.0" -tracing = "0.1" -rocksdb = "0.20" -# massa -massa_hash = { path = "../massa-hash" } -massa_models = { path = "../massa-models" } -massa_serialization = { path = "../massa-serialization" } -massa-sc-runtime = { git = "https://github.com/massalabs/massa-sc-runtime", branch = "main", features = [ - "testing", -] } +schnellru = {workspace = true} +serial_test = {workspace = true} +rand = {workspace = true} # BOM UPGRADE Revert to "0.8.5" if problem +num_enum = {workspace = true} +nom = {workspace = true} +displaydoc = {workspace = true} +thiserror = {workspace = true} +anyhow = {workspace = true} +tracing = {workspace = true} +rocksdb = {workspace = true} +massa_hash = {workspace = true} +massa_models = {workspace = true} +massa_serialization = {workspace = true} +massa-sc-runtime = {workspace = true, "features" = ["testing"]} [dev-dependencies] -tempfile = "3.3" - -[features] -testing = ["massa-sc-runtime/testing"] +tempfile = {workspace = true} # BOM UPGRADE Revert to "3.3" if problem diff --git a/massa-node/Cargo.toml b/massa-node/Cargo.toml index b28c6ff6a48..a47ba6bc960 100644 --- a/massa-node/Cargo.toml +++ b/massa-node/Cargo.toml @@ -4,68 +4,59 @@ version = "0.24.0" authors = ["Massa Labs "] edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -crossbeam-channel = "0.5.6" -anyhow = "1.0" -lazy_static = "1.4" -parking_lot = { version = "0.12", features = ["deadlock_detection"] } -serde = { version = "1.0", features = ["derive"] } -tokio = { version = "1.23", features = ["full"] } -num = "0.4" -tracing = { version = "0.1", features = [ - "max_level_debug", - "release_max_level_debug", -] } -tracing-subscriber = "0.3" -paw = "1.0" -rand = { version = "0.8.5", optional = true } -structopt = { version = "0.3", features = ["paw"] } -dialoguer = "0.10" -ctrlc = "3.2.5" -# custom modules -massa_api_exports = { path = "../massa-api-exports" } -massa_api = { path = "../massa-api" } -massa_async_pool = { path = "../massa-async-pool" } -massa_bootstrap = { path = "../massa-bootstrap" } -massa_channel = { path = "../massa-channel" } -massa_consensus_exports = { path = "../massa-consensus-exports" } -massa_consensus_worker = { path = "../massa-consensus-worker" } -massa_executed_ops = { path = "../massa-executed-ops" } -massa_execution_exports = { path = "../massa-execution-exports" } -massa_execution_worker = { path = "../massa-execution-worker" } -massa_logging = { path = "../massa-logging" } -massa_final_state = { path = "../massa-final-state" } -massa_ledger_exports = { path = "../massa-ledger-exports" } -massa_ledger_worker = { path = "../massa-ledger-worker" } -massa_metrics = { path = "../massa-metrics" } -massa_models = { path = "../massa-models" } -massa_pool_exports = { path = "../massa-pool-exports" } -massa_pool_worker = { path = "../massa-pool-worker" } -massa_protocol_exports = { path = "../massa-protocol-exports" } -massa_protocol_worker = { path = "../massa-protocol-worker" } -massa_pos_worker = { path = "../massa-pos-worker" } -massa_pos_exports = { path = "../massa-pos-exports" } -massa_storage = { path = "../massa-storage" } -massa_time = { path = "../massa-time" } -massa_wallet = { path = "../massa-wallet" } -massa_factory_exports = { path = "../massa-factory-exports" } -massa_factory_worker = { path = "../massa-factory-worker" } -massa_grpc = { path = "../massa-grpc" } -massa_versioning = { path = "../massa-versioning" } -massa_signature = { path = "../massa-signature", optional = true } -massa_db_exports = { path = "../massa-db-exports" } -massa_db_worker = { path = "../massa-db-worker" } -#massa_signature = { path = "../massa-signature", optional = true } - -# for more information on what are the following features used for, see the cargo.toml at workspace level [features] beta = [] -# 10s after initiating the first launch, will re-launch as if the node was signalled with `NeedsResync` resync_check = [] deadlock_detection = [] op_spammer = ["rand", "massa_signature"] bootstrap_server = ["massa_consensus_worker/bootstrap_server", "massa_final_state/bootstrap_server"] sandbox = ["massa_bootstrap/sandbox", "massa_consensus_worker/sandbox", "massa_execution_worker/sandbox", "massa_factory_worker/sandbox", "massa_final_state/sandbox", "massa_models/sandbox", "massa_metrics/sandbox"] testing = ["massa_metrics/testing"] + +[dependencies] +crossbeam-channel = {workspace = true} # BOM UPGRADE Revert to "0.5.6" if problem +anyhow = {workspace = true} +lazy_static = {workspace = true} # BOM UPGRADE Revert to "1.4" if problem +parking_lot = {workspace = true, "features" = ["deadlock_detection"]} +serde = {workspace = true, "features" = ["derive"]} +tokio = {workspace = true, "features" = ["full"]} +num = {workspace = true} +tracing = {workspace = true, "features" = ["max_level_debug", "release_max_level_debug"]} # BOM UPGRADE Revert to {"version": "0.1", "features": ["max_level_debug", "release_max_level_debug"]} if problem +tracing-subscriber = {workspace = true} +paw = {workspace = true} +rand = {workspace = true, "optional" = true} # BOM UPGRADE Revert to {"version": "0.8.5", "optional": true} if problem +structopt = {workspace = true, "features" = ["paw"]} +dialoguer = {workspace = true} +ctrlc = {workspace = true} +massa_api_exports = {workspace = true} +massa_api = {workspace = true} +massa_async_pool = {workspace = true} +massa_bootstrap = {workspace = true} +massa_channel = {workspace = true} +massa_consensus_exports = {workspace = true} +massa_consensus_worker = {workspace = true} +massa_executed_ops = {workspace = true} +massa_execution_exports = {workspace = true} +massa_execution_worker = {workspace = true} +massa_logging = {workspace = true} +massa_final_state = {workspace = true} +massa_ledger_exports = {workspace = true} +massa_ledger_worker = {workspace = true} +massa_metrics = {workspace = true} +massa_models = {workspace = true} +massa_pool_exports = {workspace = true} +massa_pool_worker = {workspace = true} +massa_protocol_exports = {workspace = true} +massa_protocol_worker = {workspace = true} +massa_pos_worker = {workspace = true} +massa_pos_exports = {workspace = true} +massa_storage = {workspace = true} +massa_time = {workspace = true} +massa_wallet = {workspace = true} +massa_factory_exports = {workspace = true} +massa_factory_worker = {workspace = true} +massa_grpc = {workspace = true} +massa_versioning = {workspace = true} +massa_signature = {workspace = true, "optional" = true} +massa_db_exports = {workspace = true} +massa_db_worker = {workspace = true} diff --git a/massa-pool-exports/Cargo.toml b/massa-pool-exports/Cargo.toml index 1904b5b4bcb..f3e7ea736b9 100644 --- a/massa-pool-exports/Cargo.toml +++ b/massa-pool-exports/Cargo.toml @@ -4,22 +4,17 @@ version = "0.24.0" authors = ["Massa Labs "] edition = "2021" +[features] +testing = ["crossbeam-channel"] + [dependencies] -serde = { version = "1.0", features = ["derive"] } -tokio = { version = "1.23", features = ["sync"] } -crossbeam-channel = { version="0.5", optional=true } -# custom modules -massa_models = { path = "../massa-models" } -massa_storage = { path = "../massa-storage" } -massa_time = { path = "../massa-time"} -massa_pos_exports = { path = "../massa-pos-exports" } -massa_execution_exports = { path = "../massa-execution-exports" } +serde = {workspace = true, "features" = ["derive"]} +tokio = {workspace = true, "features" = ["sync"]} +crossbeam-channel = {workspace = true, "optional" = true} +massa_models = {workspace = true} +massa_storage = {workspace = true} +massa_time = {workspace = true} +massa_pos_exports = {workspace = true} +massa_execution_exports = {workspace = true} [dev-dependencies] - - -# for more information on what are the following features used for, see the cargo.toml at workspace level -[features] -testing = [ - "crossbeam-channel" -] diff --git a/massa-pool-worker/Cargo.toml b/massa-pool-worker/Cargo.toml index a2f06928ef6..8a0004d7778 100644 --- a/massa-pool-worker/Cargo.toml +++ b/massa-pool-worker/Cargo.toml @@ -4,26 +4,24 @@ version = "0.24.0" authors = ["Massa Labs "] edition = "2021" +[features] +testing = ["massa_pool_exports/testing", "massa_execution_exports/testing", "massa_pos_exports/testing", "massa_wallet/testing"] + [dependencies] -tracing = "0.1" -# custom modules -parking_lot = { version = "0.12", features = ["deadlock_detection"] } -massa_models = { path = "../massa-models" } -massa_storage = { path = "../massa-storage" } -massa_pool_exports = { path = "../massa-pool-exports" } -massa_time = { path = "../massa-time" } -massa_wallet = { path = "../massa-wallet" } +tracing = {workspace = true} +parking_lot = {workspace = true, "features" = ["deadlock_detection"]} +massa_models = {workspace = true} +massa_storage = {workspace = true} +massa_pool_exports = {workspace = true} +massa_time = {workspace = true} +massa_wallet = {workspace = true} [dev-dependencies] -tokio = { version = "1.23", features = ["sync"] } -mockall = "0.11.4" -massa_signature = { path = "../massa-signature" } -massa_hash = { path = "../massa-hash" } -massa_pos_exports = { path = "../massa-pos-exports", features = [ "testing" ] } -massa_pool_exports = { path = "../massa-pool-exports", features = [ "testing" ] } -massa_execution_exports = { path = "../massa-execution-exports", features = [ "testing" ] } -crossbeam-channel = { version = "0.5" } - -# for more information on what are the following features used for, see the cargo.toml at workspace level -[features] -testing = ["massa_pool_exports/testing", "massa_execution_exports/testing", "massa_pos_exports/testing", "massa_wallet/testing"] +tokio = {workspace = true, "features" = ["sync"]} +mockall = {workspace = true} +massa_signature = {workspace = true} +massa_hash = {workspace = true} +massa_pos_exports = {workspace = true, "features" = ["testing"]} +massa_pool_exports = {workspace = true, "features" = ["testing"]} +massa_execution_exports = {workspace = true, "features" = ["testing"]} +crossbeam-channel = {workspace = true} diff --git a/massa-pos-exports/Cargo.toml b/massa-pos-exports/Cargo.toml index 0934f92a0b8..5858c94381b 100644 --- a/massa-pos-exports/Cargo.toml +++ b/massa-pos-exports/Cargo.toml @@ -4,33 +4,28 @@ version = "0.24.0" authors = ["Massa Labs "] edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[features] +testing = ["crossbeam-channel", "massa_models/testing", "mockall"] [dependencies] -bitvec = { version = "1.0", features = ["serde"] } -displaydoc = "0.2" -nom = "=7.1" -serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0" -thiserror = "1.0" -tracing = "0.1" -num = { version = "0.4", features = ["serde"] } -parking_lot = { version = "0.12", features = ["deadlock_detection"] } -crossbeam-channel = { version = "0.5", optional = true } -mockall = { version = "0.11.4", optional = true } - -# custom modules -massa_hash = { path = "../massa-hash" } -massa_models = { path = "../massa-models" } -massa_serialization = { path = "../massa-serialization" } -massa_signature = { path = "../massa-signature" } -massa_db_exports = { path = "../massa-db-exports" } +bitvec = {workspace = true, "features" = ["serde"]} +displaydoc = {workspace = true} +nom = {workspace = true} +serde = {workspace = true, "features" = ["derive"]} +serde_json = {workspace = true} # BOM UPGRADE Revert to "1.0" if problem +thiserror = {workspace = true} +tracing = {workspace = true} +num = {workspace = true, "features" = ["serde"]} # BOM UPGRADE Revert to {"version": "0.4", "features": ["serde"]} if problem +parking_lot = {workspace = true, "features" = ["deadlock_detection"]} +crossbeam-channel = {workspace = true, "optional" = true} +mockall = {workspace = true, "optional" = true} # BOM UPGRADE Revert to {"version": "0.11.4", "optional": true} if problem +massa_hash = {workspace = true} +massa_models = {workspace = true} +massa_serialization = {workspace = true} +massa_signature = {workspace = true} +massa_db_exports = {workspace = true} [dev-dependencies] -mockall = "0.11.4" -tempfile = "3.3" -massa_db_worker = { path = "../massa-db-worker"} - -# for more information on what are the following features used for, see the cargo.toml at workspace level -[features] -testing = ["crossbeam-channel", "massa_models/testing", "mockall"] +mockall = {workspace = true} +tempfile = {workspace = true} # BOM UPGRADE Revert to "3.3" if problem +massa_db_worker = {workspace = true} diff --git a/massa-pos-worker/Cargo.toml b/massa-pos-worker/Cargo.toml index e47a722a688..9cdd0893122 100644 --- a/massa-pos-worker/Cargo.toml +++ b/massa-pos-worker/Cargo.toml @@ -4,23 +4,19 @@ version = "0.24.0" authors = ["Massa Labs "] edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[features] +sandbox = [] +testing = ["massa_pos_exports/testing"] [dependencies] -parking_lot = { version = "0.12", features = ["deadlock_detection"] } -rand = "=0.8.5" -rand_distr = "=0.4.3" -rand_xoshiro = "=0.6" -tracing = "0.1" -# custom modules -massa_hash = { path = "../massa-hash" } -massa_models = { path = "../massa-models" } -massa_pos_exports = { path = "../massa-pos-exports" } +parking_lot = {workspace = true, "features" = ["deadlock_detection"]} +rand = {workspace = true} # BOM UPGRADE Revert to "=0.8.5" if problem +rand_distr = {workspace = true} +rand_xoshiro = {workspace = true} # BOM UPGRADE Revert to "=0.6" if problem +tracing = {workspace = true} +massa_hash = {workspace = true} +massa_models = {workspace = true} +massa_pos_exports = {workspace = true} [dev-dependencies] -# custom modules with testing enabled -massa_pos_exports = { path = "../massa-pos-exports", features = ["testing"] } - -[features] -sandbox = [] -testing = ["massa_pos_exports/testing"] +massa_pos_exports = {workspace = true, "features" = ["testing"]} diff --git a/massa-protocol-exports/Cargo.toml b/massa-protocol-exports/Cargo.toml index 7f7ed203ef2..a7dbc0559f6 100644 --- a/massa-protocol-exports/Cargo.toml +++ b/massa-protocol-exports/Cargo.toml @@ -4,29 +4,26 @@ version = "0.24.0" authors = ["Massa Labs "] edition = "2021" -[dependencies] -displaydoc = "0.2" -thiserror = "1.0" -nom = "=7.1" -serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0" -# TODO tag peernet version -peernet = { git = "https://github.com/massalabs/PeerNet", branch = "deactivate_stream_limiter" } #rev = "7b2a1a9" } -tempfile = { version = "3.3", optional = true } # use with testing feature -mockall = "0.11.4" +[features] +testing = ["tempfile"] -# Custom modules -massa_models = { path = "../massa-models" } -massa_time = { path = "../massa-time" } -massa_storage = { path = "../massa-storage" } -massa_serialization = { path = "../massa-serialization" } -massa_pos_exports = { path = "../massa-pos-exports" } -massa_signature = { path = "../massa-signature"} -massa_versioning = { path = "../massa-versioning" } -massa_hash = { path = "../massa-hash"} +[dependencies] +displaydoc = {workspace = true} +thiserror = {workspace = true} +nom = {workspace = true} +serde = {workspace = true, "features" = ["derive"]} +serde_json = {workspace = true} # BOM UPGRADE Revert to "1.0" if problem +peernet = {workspace = true} +tempfile = {workspace = true, "optional" = true} # BOM UPGRADE Revert to {"version": "3.3", "optional": true} if problem +mockall = {workspace = true} +massa_models = {workspace = true} +massa_time = {workspace = true} +massa_storage = {workspace = true} +massa_serialization = {workspace = true} +massa_pos_exports = {workspace = true} +massa_signature = {workspace = true} +massa_versioning = {workspace = true} +massa_hash = {workspace = true} [dev-dependencies] -tempfile = "3.3" - -[features] -testing = ["tempfile"] +tempfile = {workspace = true} # BOM UPGRADE Revert to "3.3" if problem diff --git a/massa-protocol-worker/Cargo.toml b/massa-protocol-worker/Cargo.toml index ff588d21dac..e8a09e3ee0f 100644 --- a/massa-protocol-worker/Cargo.toml +++ b/massa-protocol-worker/Cargo.toml @@ -4,40 +4,37 @@ version = "0.24.0" authors = ["Massa Labs "] edition = "2021" -[dependencies] -tracing = {version = "0.1", features = ["log"]} -rand = "0.8" -parking_lot = "0.12" -crossbeam = "0.8" -serde_json = "1.0" -nom = "=7.1" -num_enum = "0.5" -# TODO tag peernet version -peernet = { git = "https://github.com/massalabs/PeerNet", branch = "deactivate_stream_limiter" } #rev = "7b2a1a9" } -tempfile = { version = "3.3", optional = true } # use with testing feature -rayon = "1.7.0" -schnellru = "0.2.1" +[features] +testing = ["massa_protocol_exports/testing", "tempfile", "massa_pool_exports/testing", "massa_consensus_exports/testing", "massa_metrics/testing"] -# modules Custom -massa_hash = { path = "../massa-hash" } -massa_models = { path = "../massa-models" } -massa_logging = { path = "../massa-logging" } -massa_channel = { path = "../massa-channel" } -massa_protocol_exports = { path = "../massa-protocol-exports" } -massa_consensus_exports = { path = "../massa-consensus-exports" } -massa_metrics = { path = "../massa-metrics" } -massa_pool_exports = { path = "../massa-pool-exports" } -massa_pos_exports = { path = "../massa-pos-exports" } -massa_storage = { path = "../massa-storage" } -massa_serialization = { path = "../massa-serialization" } -massa_signature = { path = "../massa-signature" } -massa_time = { path = "../massa-time" } -massa_versioning = { path = "../massa-versioning" } +[dependencies] +tracing = {workspace = true, "features" = ["log"]} # BOM UPGRADE Revert to {"version": "0.1", "features": ["log"]} if problem +rand = {workspace = true} +parking_lot = {workspace = true} +crossbeam = {workspace = true} +serde_json = {workspace = true} # BOM UPGRADE Revert to "1.0" if problem +nom = {workspace = true} +num_enum = {workspace = true} +peernet = {workspace = true} +tempfile = {workspace = true, "optional" = true} # BOM UPGRADE Revert to {"version": "3.3", "optional": true} if problem +rayon = {workspace = true} +schnellru = {workspace = true} # BOM UPGRADE Revert to "0.2.1" if problem +massa_hash = {workspace = true} +massa_models = {workspace = true} +massa_logging = {workspace = true} +massa_channel = {workspace = true} +massa_protocol_exports = {workspace = true} +massa_consensus_exports = {workspace = true} +massa_metrics = {workspace = true} +massa_pool_exports = {workspace = true} +massa_pos_exports = {workspace = true} +massa_storage = {workspace = true} +massa_serialization = {workspace = true} +massa_signature = {workspace = true} +massa_time = {workspace = true} +massa_versioning = {workspace = true} [dev-dependencies] -tempfile = "3.3" -serial_test = "2.0.0" -num = "0.4" - -[features] -testing = ["massa_protocol_exports/testing", "tempfile", "massa_pool_exports/testing", "massa_consensus_exports/testing", "massa_metrics/testing"] +tempfile = {workspace = true} # BOM UPGRADE Revert to "3.3" if problem +serial_test = {workspace = true} # BOM UPGRADE Revert to "2.0.0" if problem +num = {workspace = true} diff --git a/massa-sdk/Cargo.toml b/massa-sdk/Cargo.toml index 6597a23e917..d10537b8f9e 100644 --- a/massa-sdk/Cargo.toml +++ b/massa-sdk/Cargo.toml @@ -4,14 +4,14 @@ version = "0.24.0" edition = "2021" [dependencies] -jsonrpsee = { version = "0.18.2", features = ["client"] } -jsonrpsee-http-client = { version = "0.18.2", features = ["webpki-tls"] } -jsonrpsee-ws-client = { version = "0.18.2", features = ["webpki-tls"] } -http = "0.2.8" -tonic = { version = "0.9.1", features = ["gzip"] } -thiserror = "1.0" -tracing = {version = "0.1", features = ["log"]} -massa_api_exports = { path = "../massa-api-exports" } -massa_models = { path = "../massa-models" } -massa_time = { path = "../massa-time" } -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "18ec02f", features = ["tonic"] } +jsonrpsee = {workspace = true, "features" = ["client"]} +jsonrpsee-http-client = {workspace = true, "features" = ["webpki-tls"]} +jsonrpsee-ws-client = {workspace = true, "features" = ["webpki-tls"]} +http = {workspace = true} +tonic = {workspace = true, "features" = ["gzip"]} # BOM UPGRADE Revert to {"version": "0.9.1", "features": ["gzip"]} if problem +thiserror = {workspace = true} +tracing = {workspace = true, "features" = ["log"]} # BOM UPGRADE Revert to {"version": "0.1", "features": ["log"]} if problem +massa_api_exports = {workspace = true} +massa_models = {workspace = true} +massa_time = {workspace = true} +massa-proto-rs = {workspace = true, "features" = ["tonic"]} diff --git a/massa-serialization/Cargo.toml b/massa-serialization/Cargo.toml index 39c9009b56c..25d0db688f7 100644 --- a/massa-serialization/Cargo.toml +++ b/massa-serialization/Cargo.toml @@ -4,16 +4,11 @@ version = "0.24.0" authors = ["Massa Labs "] edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[features] [dependencies] -displaydoc = "0.2" -thiserror = "1.0" -nom = "=7.1" -unsigned-varint = { version = "0.7.1", features = [ - "nom", -], git = "https://github.com/cyphar/unsigned-varint.git", branch = "nom6-errors" } -num = "0.4" - -# for more information on what are the following features used for, see the cargo.toml at workspace level -[features] +displaydoc = {workspace = true} +thiserror = {workspace = true} +nom = {workspace = true} +unsigned-varint = {workspace = true, "features" = ["nom"]} +num = {workspace = true} diff --git a/massa-signature/Cargo.toml b/massa-signature/Cargo.toml index b3c9c1586a8..361885055ad 100644 --- a/massa-signature/Cargo.toml +++ b/massa-signature/Cargo.toml @@ -4,23 +4,18 @@ version = "0.24.0" authors = ["Massa Labs "] edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - [dependencies] -bs58 = { version = "=0.4", features = ["check"] } -displaydoc = "0.2" -ed25519-dalek = { version = "=1.0", features = ["batch"] } -serde = { version = "1.0", features = ["derive"] } -thiserror = "1.0" -nom = "=7.1" +bs58 = {workspace = true, "features" = ["check"]} +displaydoc = {workspace = true} +ed25519-dalek = {workspace = true, "features" = ["batch"]} +serde = {workspace = true, "features" = ["derive"]} +thiserror = {workspace = true} +nom = {workspace = true} rand = "0.7" -# TODO tag transition crate with a version number -transition = { git = "https://github.com/massalabs/transition.git", rev = "93fa3bf82f9f5ff421c78536879b7fd1b948ca75" } - -# custom modules -massa_hash = { path = "../massa-hash" } -massa_serialization = { path = "../massa-serialization" } +transition = {workspace = true} +massa_hash = {workspace = true} +massa_serialization = {workspace = true} [dev-dependencies] -serial_test = "1.0.0" -serde_json = "1.0" +serial_test = {workspace = true} +serde_json = {workspace = true} # BOM UPGRADE Revert to "1.0" if problem diff --git a/massa-storage/Cargo.toml b/massa-storage/Cargo.toml index bd3408e419d..0903d5bdfbe 100644 --- a/massa-storage/Cargo.toml +++ b/massa-storage/Cargo.toml @@ -4,15 +4,14 @@ version = "0.24.0" authors = ["Massa Labs "] edition = "2021" -[dependencies] -parking_lot = { version = "0.12", features = ["deadlock_detection"] } -massa_models = { path = "../massa-models" } -massa_metrics = { path = "../massa-metrics" } - -[dev-dependencies] -massa_factory_exports = { path = "../massa-factory-exports", features=["testing"] } -massa_signature = { path = "../massa-signature" } - [features] testing = ["massa_factory_exports/testing", "massa_metrics/testing"] +[dependencies] +parking_lot = {workspace = true, "features" = ["deadlock_detection"]} +massa_models = {workspace = true} +massa_metrics = {workspace = true} + +[dev-dependencies] +massa_factory_exports = {workspace = true, "features" = ["testing"]} +massa_signature = {workspace = true} diff --git a/massa-time/Cargo.toml b/massa-time/Cargo.toml index 66fbf573f46..98e8085fbe2 100644 --- a/massa-time/Cargo.toml +++ b/massa-time/Cargo.toml @@ -4,14 +4,10 @@ version = "0.24.0" authors = ["Massa Labs "] edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - [dependencies] -time = { version = "0.3", features = ["serde", "formatting"] } -displaydoc = "0.2" -serde = { version = "1.0", features = ["derive"] } -thiserror = "1.0" -nom = "=7.1" - -# Custom dependencies -massa_serialization = { path = "../massa-serialization" } +time = {workspace = true, "features" = ["serde", "formatting"]} +displaydoc = {workspace = true} +serde = {workspace = true, "features" = ["derive"]} +thiserror = {workspace = true} +nom = {workspace = true} +massa_serialization = {workspace = true} diff --git a/massa-versioning/Cargo.toml b/massa-versioning/Cargo.toml index 2045cc49bc4..d19209206ac 100644 --- a/massa-versioning/Cargo.toml +++ b/massa-versioning/Cargo.toml @@ -4,29 +4,26 @@ version = "0.24.0" authors = ["Massa Labs "] edition = "2021" -[dependencies] -# This is from: https://github.com/rust-bakery/machine/pull/22 -machine = { git = "https://github.com/antifuchs/machine", branch = "fix-workspace-build" } -parking_lot = "0.12" -thiserror = "1.0" -num = "0.4" -num_enum = "0.5" -nom = "=7.1" -tracing = "0.1" +[features] +testing = [] -# custom module -massa_time = { path = "../massa-time" } -massa_models = { path = "../massa-models" } -massa_serialization = { path = "../massa-serialization" } -massa_hash = { path = "../massa-hash" } -massa_signature = { path = "../massa-signature" } -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", rev = "18ec02f", features = ["tonic"] } -massa_db_exports = { path = "../massa-db-exports" } +[dependencies] +machine = {workspace = true} +parking_lot = {workspace = true} +thiserror = {workspace = true} +num = {workspace = true} +num_enum = {workspace = true} +nom = {workspace = true} +tracing = {workspace = true} +massa_time = {workspace = true} +massa_models = {workspace = true} +massa_serialization = {workspace = true} +massa_hash = {workspace = true} +massa_signature = {workspace = true} +massa-proto-rs = {workspace = true, "features" = ["tonic"]} +massa_db_exports = {workspace = true} [dev-dependencies] -more-asserts = "0.3" -tempfile = "3.5" -massa_db_worker = { path = "../massa-db-worker" } - -[features] -testing = [] +more-asserts = {workspace = true} +tempfile = {workspace = true} +massa_db_worker = {workspace = true} diff --git a/massa-wallet/Cargo.toml b/massa-wallet/Cargo.toml index e0edca706e7..6e048e90be6 100644 --- a/massa-wallet/Cargo.toml +++ b/massa-wallet/Cargo.toml @@ -3,21 +3,17 @@ name = "massa_wallet" version = "0.24.0" edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -displaydoc = "0.2" -serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0" -serde_qs = "0.11" -thiserror = "1.0" -tempfile = { version = "3.3", optional = true } # use with testing feature - -# custom modules -massa_cipher = { path = "../massa-cipher" } -massa_hash = { path = "../massa-hash" } -massa_models = { path = "../massa-models" } -massa_signature = { path = "../massa-signature" } - [features] testing = ["tempfile", "massa_models/testing"] + +[dependencies] +displaydoc = {workspace = true} +serde = {workspace = true, "features" = ["derive"]} +serde_json = {workspace = true} # BOM UPGRADE Revert to "1.0" if problem +serde_qs = {workspace = true} +thiserror = {workspace = true} +tempfile = {workspace = true, "optional" = true} # BOM UPGRADE Revert to {"version": "3.3", "optional": true} if problem +massa_cipher = {workspace = true} +massa_hash = {workspace = true} +massa_models = {workspace = true} +massa_signature = {workspace = true} diff --git a/massa-xtask/Cargo.toml b/massa-xtask/Cargo.toml index 18b490350c1..532cde4aa95 100644 --- a/massa-xtask/Cargo.toml +++ b/massa-xtask/Cargo.toml @@ -3,9 +3,7 @@ name = "massa_xtask" version = "0.24.0" edition = "2021" -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - [dependencies] -massa_models = { path = "../massa-models" } -toml_edit = "0.19.8" -walkdir = "2.3.3" +massa_models = {workspace = true} +toml_edit = {workspace = true} # BOM UPGRADE Revert to "0.19.8" if problem +walkdir = {workspace = true} From a2f51f534f57a2e100d1953ad0efabf5d638bb7c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20Morcillo?= Date: Thu, 13 Jul 2023 15:02:49 +0200 Subject: [PATCH 35/71] Sync with massa-sc-runtime#287 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Jean-François Morcillo --- massa-execution-worker/src/interface_impl.rs | 60 ++++++++++++-------- 1 file changed, 36 insertions(+), 24 deletions(-) diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index 39395931f01..69aaaf288b1 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -754,7 +754,7 @@ impl Interface for InterfaceImpl { /// /// # Returns /// true if the entry is matching the provided key in its operation datastore, otherwise false - fn has_op_key(&self, key: &[u8]) -> Result { + fn op_entry_exists(&self, key: &[u8]) -> Result { let context = context_guard!(self); let stack = context.stack.last().ok_or_else(|| anyhow!("No stack"))?; let datastore = stack @@ -1311,7 +1311,7 @@ impl Interface for InterfaceImpl { } /// Adds two native amounts, saturating at the numeric bounds instead of overflowing. - fn add_native_amounts_wasmv1( + fn add_native_amount_wasmv1( &self, amount1: &NativeAmount, amount2: &NativeAmount, @@ -1323,7 +1323,7 @@ impl Interface for InterfaceImpl { } /// Subtracts two native amounts, saturating at the numeric bounds instead of overflowing. - fn sub_native_amounts_wasmv1( + fn sub_native_amount_wasmv1( &self, amount1: &NativeAmount, amount2: &NativeAmount, @@ -1335,14 +1335,18 @@ impl Interface for InterfaceImpl { } /// Multiplies a native amount by a factor, saturating at the numeric bounds instead of overflowing. - fn mul_native_amount_wasmv1(&self, amount: &NativeAmount, factor: u64) -> Result { + fn scalar_mul_native_amount_wasmv1( + &self, + amount: &NativeAmount, + factor: u64, + ) -> Result { let amount = amount_from_native_amount(amount)?; let mul = amount.saturating_mul_u64(factor); Ok(amount_to_native_amount(&mul)) } /// Divides a native amount by a divisor, return an error if the divisor is 0. - fn div_rem_native_amount_wasmv1( + fn scalar_div_rem_native_amount_wasmv1( &self, dividend: &NativeAmount, divisor: u64, @@ -1364,7 +1368,7 @@ impl Interface for InterfaceImpl { } /// Divides a native amount by a divisor, return an error if the divisor is 0. - fn div_rem_native_amounts_wasmv1( + fn div_rem_native_amount_wasmv1( &self, dividend: &NativeAmount, divisor: &NativeAmount, @@ -1395,19 +1399,19 @@ impl Interface for InterfaceImpl { bs58::encode(data).with_check().into_string() } - fn check_address_wasmv1(&self, to_check: &String) -> Result { + fn check_address_wasmv1(&self, to_check: &str) -> Result { Ok(Address::from_str(to_check).is_ok()) } - fn check_pubkey_wasmv1(&self, to_check: &String) -> Result { + fn check_pubkey_wasmv1(&self, to_check: &str) -> Result { Ok(PublicKey::from_str(to_check).is_ok()) } - fn check_signature_wasmv1(&self, to_check: &String) -> Result { + fn check_signature_wasmv1(&self, to_check: &str) -> Result { Ok(Signature::from_str(to_check).is_ok()) } - fn get_address_category_wasmv1(&self, to_check: &String) -> Result { + fn get_address_category_wasmv1(&self, to_check: &str) -> Result { let addr = Address::from_str(to_check)?; match addr { Address::User(_) => Ok(AddressCategory::ScAddress), @@ -1417,7 +1421,7 @@ impl Interface for InterfaceImpl { } } - fn get_address_version_wasmv1(&self, address: &String) -> Result { + fn get_address_version_wasmv1(&self, address: &str) -> Result { let address = Address::from_str(address)?; match address { Address::User(UserAddress::UserAddressV0(_)) => Ok(0), @@ -1429,7 +1433,7 @@ impl Interface for InterfaceImpl { } } - fn get_pubkey_version_wasmv1(&self, pubkey: &String) -> Result { + fn get_pubkey_version_wasmv1(&self, pubkey: &str) -> Result { let pubkey = PublicKey::from_str(pubkey)?; match pubkey { PublicKey::PublicKeyV0(_) => Ok(0), @@ -1439,7 +1443,7 @@ impl Interface for InterfaceImpl { } } - fn get_signature_version_wasmv1(&self, signature: &String) -> Result { + fn get_signature_version_wasmv1(&self, signature: &str) -> Result { let signature = Signature::from_str(signature)?; match signature { Signature::SignatureV0(_) => Ok(0), @@ -1636,7 +1640,7 @@ mod tests { let amount3 = interface.native_amount_from_str_wasmv1("200").unwrap(); let sum = interface - .add_native_amounts_wasmv1(&amount1, &amount2) + .add_native_amount_wasmv1(&amount1, &amount2) .unwrap(); assert_eq!(amount3, sum); @@ -1649,7 +1653,7 @@ mod tests { interface.native_amount_to_string_wasmv1(&sum).unwrap() ); - let diff = interface.sub_native_amounts_wasmv1(&sum, &amount2).unwrap(); + let diff = interface.sub_native_amount_wasmv1(&sum, &amount2).unwrap(); assert_eq!(amount1, diff); let amount4 = NativeAmount { @@ -1660,38 +1664,46 @@ mod tests { let is_valid = interface.check_native_amount_wasmv1(&amount4).unwrap(); assert_eq!(is_valid, true); - let mul = interface.mul_native_amount_wasmv1(&amount1, 2).unwrap(); + let mul = interface + .scalar_mul_native_amount_wasmv1(&amount1, 2) + .unwrap(); assert_eq!(mul, amount3); - let (quotient, remainder) = interface.div_rem_native_amount_wasmv1(&amount1, 2).unwrap(); + let (quotient, remainder) = interface + .scalar_div_rem_native_amount_wasmv1(&amount1, 2) + .unwrap(); let quotient_res_50 = interface.native_amount_from_str_wasmv1("50").unwrap(); let remainder_res_0 = interface.native_amount_from_str_wasmv1("0").unwrap(); assert_eq!(quotient, quotient_res_50); assert_eq!(remainder, remainder_res_0); - let (quotient, remainder) = interface.div_rem_native_amount_wasmv1(&amount1, 3).unwrap(); - let verif_div = interface.mul_native_amount_wasmv1("ient, 3).unwrap(); + let (quotient, remainder) = interface + .scalar_div_rem_native_amount_wasmv1(&amount1, 3) + .unwrap(); + let verif_div = interface + .scalar_mul_native_amount_wasmv1("ient, 3) + .unwrap(); let verif_dif = interface - .add_native_amounts_wasmv1(&verif_div, &remainder) + .add_native_amount_wasmv1(&verif_div, &remainder) .unwrap(); assert_eq!(verif_dif, amount1); let amount5 = interface.native_amount_from_str_wasmv1("2").unwrap(); let (quotient, remainder) = interface - .div_rem_native_amounts_wasmv1(&amount1, &amount5) + .div_rem_native_amount_wasmv1(&amount1, &amount5) .unwrap(); assert_eq!(quotient, 50); assert_eq!(remainder, remainder_res_0); let amount6 = interface.native_amount_from_str_wasmv1("3").unwrap(); let (quotient, remainder) = interface - .div_rem_native_amounts_wasmv1(&amount1, &amount6) + .div_rem_native_amount_wasmv1(&amount1, &amount6) .unwrap(); let verif_div = interface - .mul_native_amount_wasmv1(&amount6, quotient) + .scalar_mul_native_amount_wasmv1(&amount6, quotient) .unwrap(); let verif_dif = interface - .add_native_amounts_wasmv1(&verif_div, &remainder) + .add_native_amount_wasmv1(&verif_div, &remainder) .unwrap(); assert_eq!(verif_dif, amount1); } From eea278d87345e0211dca8aa91a99aadc89e8ef97 Mon Sep 17 00:00:00 2001 From: AurelienFT Date: Mon, 17 Jul 2023 12:06:53 +0200 Subject: [PATCH 36/71] Try to give some space on CI. --- .github/workflows/ci.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a417ef83738..5b5bb385e49 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -109,6 +109,12 @@ jobs: - macOS-latest # - windows-latest steps: + - name: Maximize build space + uses: easimon/maximize-build-space@master + with: + root-reserve-mb: 512 + swap-size-mb: 1024 + remove-dotnet: 'true' - uses: ilammy/setup-nasm@v1 - uses: actions/checkout@v3 with: From e47f8753f7d988978471e17e8e07b75102dbd887 Mon Sep 17 00:00:00 2001 From: AurelienFT Date: Mon, 17 Jul 2023 13:02:13 +0200 Subject: [PATCH 37/71] fix build space ci --- .github/workflows/ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5b5bb385e49..7e3c01697bb 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -110,6 +110,7 @@ jobs: # - windows-latest steps: - name: Maximize build space + runs-on: ubuntu-latest uses: easimon/maximize-build-space@master with: root-reserve-mb: 512 From 32ed0de5606055a8388e0ed55d0acaa3d19c490b Mon Sep 17 00:00:00 2001 From: AurelienFT Date: Mon, 17 Jul 2023 13:21:51 +0200 Subject: [PATCH 38/71] Fix CI syntax on condition over an OS. --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7e3c01697bb..cd286943d48 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -110,7 +110,7 @@ jobs: # - windows-latest steps: - name: Maximize build space - runs-on: ubuntu-latest + if: runner.os == 'Linux' uses: easimon/maximize-build-space@master with: root-reserve-mb: 512 From 94c3f51a6950ce8fe918f3d6d1177814a1c494e5 Mon Sep 17 00:00:00 2001 From: AurelienFT Date: Mon, 17 Jul 2023 14:14:13 +0200 Subject: [PATCH 39/71] Try a new package to free CI space. --- .github/workflows/ci.yml | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index cd286943d48..4dfcc2a21f8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -109,13 +109,21 @@ jobs: - macOS-latest # - windows-latest steps: - - name: Maximize build space - if: runner.os == 'Linux' - uses: easimon/maximize-build-space@master - with: - root-reserve-mb: 512 - swap-size-mb: 1024 - remove-dotnet: 'true' + - name: Free Disk Space (Ubuntu) + uses: jlumbroso/free-disk-space@main + with: + # this might remove tools that are actually needed, + # if set to "true" but frees about 6 GB + tool-cache: false + + # all of these default to true, but feel free to set to + # "false" if necessary for your workflow + android: true + dotnet: true + haskell: true + large-packages: true + docker-images: true + swap-storage: true - uses: ilammy/setup-nasm@v1 - uses: actions/checkout@v3 with: From 20661de744db8ef5fb686f57935ab72c097a01c5 Mon Sep 17 00:00:00 2001 From: AurelienFT Date: Mon, 17 Jul 2023 14:14:59 +0200 Subject: [PATCH 40/71] Fix indent in CI. --- .github/workflows/ci.yml | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4dfcc2a21f8..5c9499e22cb 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -109,21 +109,21 @@ jobs: - macOS-latest # - windows-latest steps: - - name: Free Disk Space (Ubuntu) - uses: jlumbroso/free-disk-space@main - with: - # this might remove tools that are actually needed, - # if set to "true" but frees about 6 GB - tool-cache: false - - # all of these default to true, but feel free to set to - # "false" if necessary for your workflow - android: true - dotnet: true - haskell: true - large-packages: true - docker-images: true - swap-storage: true + - name: Free Disk Space (Ubuntu) + uses: jlumbroso/free-disk-space@main + with: + # this might remove tools that are actually needed, + # if set to "true" but frees about 6 GB + tool-cache: false + + # all of these default to true, but feel free to set to + # "false" if necessary for your workflow + android: true + dotnet: true + haskell: true + large-packages: true + docker-images: true + swap-storage: true - uses: ilammy/setup-nasm@v1 - uses: actions/checkout@v3 with: From af94205b9cde631b9091abde3d99609ed6ed9abc Mon Sep 17 00:00:00 2001 From: AurelienFT Date: Mon, 17 Jul 2023 14:16:43 +0200 Subject: [PATCH 41/71] Run job that free space only on linux. --- .github/workflows/ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5c9499e22cb..89b46df9991 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -110,6 +110,7 @@ jobs: # - windows-latest steps: - name: Free Disk Space (Ubuntu) + if: runner.os == 'Linux' uses: jlumbroso/free-disk-space@main with: # this might remove tools that are actually needed, From 735076dfba5d1b08313bb05dbc4977db47071c37 Mon Sep 17 00:00:00 2001 From: AurelienFT Date: Mon, 17 Jul 2023 14:28:05 +0200 Subject: [PATCH 42/71] Remove cleaning of large packages --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 89b46df9991..e3584db0d66 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -122,7 +122,7 @@ jobs: android: true dotnet: true haskell: true - large-packages: true + large-packages: false docker-images: true swap-storage: true - uses: ilammy/setup-nasm@v1 From de74bbfa02d981775db9e7eef2fe2215f0aff3d2 Mon Sep 17 00:00:00 2001 From: Leo-Besancon Date: Mon, 17 Jul 2023 16:51:46 +0200 Subject: [PATCH 43/71] Sync --- Cargo.lock | 421 ++++++++++++++++++++++++++--------------------------- 1 file changed, 206 insertions(+), 215 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 963f8bdd9c4..16fa2699842 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -75,15 +75,6 @@ dependencies = [ "version_check", ] -[[package]] -name = "aho-corasick" -version = "0.7.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc936419f96fa211c1b9166887b38e5e40b19958e5b895be7c1f93adec7071ac" -dependencies = [ - "memchr", -] - [[package]] name = "aho-corasick" version = "1.0.2" @@ -125,9 +116,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.71" +version = "1.0.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8" +checksum = "3b13c32d80ecc7ab747b80c3784bce54ee8a7a0cc4fbda9bf4cda2cf6fe90854" [[package]] name = "arrayref" @@ -144,7 +135,7 @@ checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" [[package]] name = "as-ffi-bindings" version = "0.2.5" -source = "git+https://github.com/massalabs/as-ffi-bindings.git?tag=v0.4.0#7767634dfc22407bd2b0fa0e4fd7432231b10dd7" +source = "git+https://github.com/massalabs/as-ffi-bindings.git?tag=0.5.0#512792515055573e600c0054a4b44bbcbfeb779a" dependencies = [ "anyhow", "wasmer", @@ -176,9 +167,9 @@ version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 2.0.25", + "proc-macro2 1.0.66", + "quote 1.0.31", + "syn 2.0.26", ] [[package]] @@ -187,9 +178,9 @@ version = "0.1.71" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a564d521dd56509c4c47480d00b80ee55f7e385ae48db5744c67ad50c92d2ebf" dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 2.0.25", + "proc-macro2 1.0.66", + "quote 1.0.31", + "syn 2.0.26", ] [[package]] @@ -211,9 +202,9 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "axum" -version = "0.6.18" +version = "0.6.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8175979259124331c1d7bf6586ee7e0da434155e4b2d48ec2c8386281d8df39" +checksum = "a6a1de45611fdb535bfde7b7de4fd54f4fd2b17b1737c0a59b69bf9b92074b8c" dependencies = [ "async-trait", "axum-core", @@ -308,8 +299,8 @@ dependencies = [ "lazy_static", "lazycell", "peeking_take_while", - "proc-macro2 1.0.64", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.31", "regex", "rustc-hash", "shlex", @@ -392,7 +383,7 @@ dependencies = [ "borsh-derive-internal", "borsh-schema-derive-internal", "proc-macro-crate 0.1.5", - "proc-macro2 1.0.64", + "proc-macro2 1.0.66", "syn 1.0.109", ] @@ -402,8 +393,8 @@ version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "afb438156919598d2c7bad7e1c0adf3d26ed3840dbc010db1a882a65583ca2fb" dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.31", "syn 1.0.109", ] @@ -413,8 +404,8 @@ version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "634205cc43f74a1b9046ef87c4540ebda95696ec0f315024860cad7c5b0f5ccd" dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.31", "syn 1.0.109", ] @@ -460,8 +451,8 @@ version = "0.6.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7ec4c6f261935ad534c0c22dbef2201b45918860eb1c574b972bd213a76af61" dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.31", "syn 1.0.109", ] @@ -983,12 +974,12 @@ dependencies = [ [[package]] name = "darling" -version = "0.20.1" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0558d22a7b463ed0241e993f76f09f30b126687447751a8638587b864e4b3944" +checksum = "0209d94da627ab5605dcccf08bb18afa5009cfbef48d8a8b7d7bdbc79be25c5e" dependencies = [ - "darling_core 0.20.1", - "darling_macro 0.20.1", + "darling_core 0.20.3", + "darling_macro 0.20.3", ] [[package]] @@ -999,24 +990,24 @@ checksum = "109c1ca6e6b7f82cc233a97004ea8ed7ca123a9af07a8230878fcfda9b158bf0" dependencies = [ "fnv", "ident_case", - "proc-macro2 1.0.64", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.31", "strsim 0.10.0", "syn 1.0.109", ] [[package]] name = "darling_core" -version = "0.20.1" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab8bfa2e259f8ee1ce5e97824a3c55ec4404a0d772ca7fa96bf19f0752a046eb" +checksum = "177e3443818124b357d8e76f53be906d60937f0d3a90773a664fa63fa253e621" dependencies = [ "fnv", "ident_case", - "proc-macro2 1.0.64", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.31", "strsim 0.10.0", - "syn 2.0.25", + "syn 2.0.26", ] [[package]] @@ -1026,19 +1017,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4aab4dbc9f7611d8b55048a3a16d2d010c2c8334e46304b40ac1cc14bf3b48e" dependencies = [ "darling_core 0.14.4", - "quote 1.0.29", + "quote 1.0.31", "syn 1.0.109", ] [[package]] name = "darling_macro" -version = "0.20.1" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29a358ff9f12ec09c3e61fef9b5a9902623a695a46a917b07f269bff1445611a" +checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" dependencies = [ - "darling_core 0.20.1", - "quote 1.0.29", - "syn 2.0.25", + "darling_core 0.20.3", + "quote 1.0.31", + "syn 2.0.26", ] [[package]] @@ -1060,8 +1051,8 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.31", "syn 1.0.109", ] @@ -1150,9 +1141,9 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 2.0.25", + "proc-macro2 1.0.66", + "quote 1.0.31", + "syn 2.0.26", ] [[package]] @@ -1177,8 +1168,8 @@ dependencies = [ "byteorder", "lazy_static", "proc-macro-error", - "proc-macro2 1.0.64", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.31", "syn 1.0.109", ] @@ -1250,8 +1241,8 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c134c37760b27a871ba422106eedbb8247da973a09e82558bf26d619c882b159" dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.31", "syn 1.0.109", ] @@ -1262,8 +1253,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8ea75f31022cba043afe037940d73684327e915f88f62478e778c3de914cd0a" dependencies = [ "enum_delegate_lib", - "proc-macro2 1.0.64", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.31", "syn 1.0.109", ] @@ -1273,8 +1264,8 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2e1f6c3800b304a6be0012039e2a45a322a093539c45ab818d9e6895a39c90fe" dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.31", "rand 0.8.5", "syn 1.0.109", ] @@ -1294,10 +1285,10 @@ version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e08b6c6ab82d70f08844964ba10c7babb716de2ecaeab9be5717918a5177d3af" dependencies = [ - "darling 0.20.1", - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 2.0.25", + "darling 0.20.3", + "proc-macro2 1.0.66", + "quote 1.0.31", + "syn 2.0.26", ] [[package]] @@ -1308,9 +1299,9 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "erased-serde" -version = "0.3.27" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f94c0e13118e7d7533271f754a168ae8400e6a1cc043f2bfd53cc7290f1a1de3" +checksum = "da96524cc884f6558f1769b6c46686af2fe8e8b4cd253bd5a3cdba8181b8e070" dependencies = [ "serde", ] @@ -1374,7 +1365,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef033ed5e9bad94e55838ca0ca906db0e043f517adda0c8b79c7a8c66c93c1b5" dependencies = [ "cfg-if", - "rustix 0.38.3", + "rustix 0.38.4", "windows-sys 0.48.0", ] @@ -1499,9 +1490,9 @@ version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 2.0.25", + "proc-macro2 1.0.66", + "quote 1.0.31", + "syn 2.0.26", ] [[package]] @@ -1620,11 +1611,11 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "globset" -version = "0.4.10" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "029d74589adefde59de1a0c4f4732695c32805624aec7b68d91503d4dba79afc" +checksum = "1391ab1f92ffcc08911957149833e682aa3fe252b9f45f966d2ef972274c97df" dependencies = [ - "aho-corasick 0.7.20", + "aho-corasick", "bstr", "fnv", "log", @@ -2015,9 +2006,9 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.8" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b02a5381cc465bd3041d84623d0fa3b66738b52b8e2fc3bab8ad63ab032f4a" +checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" [[package]] name = "jobserver" @@ -2142,8 +2133,8 @@ checksum = "c6027ac0b197ce9543097d02a290f550ce1d9432bf301524b013053c0b75cc94" dependencies = [ "heck 0.4.1", "proc-macro-crate 1.3.1", - "proc-macro2 1.0.64", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.31", "syn 1.0.109", ] @@ -2384,7 +2375,7 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0fbfc88337168279f2e9ae06e157cfed4efd3316e14dc96ed074d4f2e6c5952" dependencies = [ - "quote 1.0.29", + "quote 1.0.31", "syn 1.0.109", ] @@ -2512,7 +2503,7 @@ dependencies = [ [[package]] name = "massa-proto-rs" version = "0.1.0" -source = "git+https://github.com/massalabs/massa-proto-rs?branch=feature/Improve_ABI_types_in_wasmv1#773b7a546a03a271ead1bb0e95ac3c14eb4fb466" +source = "git+https://github.com/massalabs/massa-proto-rs?branch=feature/Improve_ABI_types_in_wasmv1#b1c4d97e905b2abad8e4fe3867789e8d482a8791" dependencies = [ "glob", "prost", @@ -2525,7 +2516,7 @@ dependencies = [ [[package]] name = "massa-sc-runtime" version = "0.10.0" -source = "git+https://github.com/massalabs/massa-sc-runtime?branch=feature/Improve_ABI_types_in_wasmv1#f6e907da258feebdbd7a62f0574c84d306beabac" +source = "git+https://github.com/massalabs/massa-sc-runtime?branch=feature/Improve_ABI_types_in_wasmv1#b791965d87abc9f964ce3c68240fa997c44e6392" dependencies = [ "anyhow", "as-ffi-bindings", @@ -2544,7 +2535,7 @@ dependencies = [ "regex", "serde", "serde_json", - "serial_test 1.0.0", + "serial_test 2.0.0", "sha2 0.10.6", "sha3", "thiserror", @@ -3435,8 +3426,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22ce75669015c4f47b289fd4d4f56e894e4c96003ffdf3ac51313126f94c6cbb" dependencies = [ "cfg-if", - "proc-macro2 1.0.64", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.31", "syn 1.0.109", ] @@ -3519,9 +3510,9 @@ dependencies = [ [[package]] name = "num" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43db66d1170d347f9a065114077f7dccb00c1b9478c89384490a3425279a4606" +checksum = "b05180d69e3da0e530ba2a1dae5110317e49e3b7f3d41be227dc5f92e49ee7af" dependencies = [ "num-bigint", "num-complex", @@ -3632,8 +3623,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dcbff9bc912032c62bf65ef1d5aea88983b420f4f839db1e9b0c281a25c9c799" dependencies = [ "proc-macro-crate 1.3.1", - "proc-macro2 1.0.64", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.31", "syn 1.0.109", ] @@ -3644,9 +3635,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96667db765a921f7b295ffee8b60472b686a51d4f21c2ee4ffdb94c7013b65a6" dependencies = [ "proc-macro-crate 1.3.1", - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 2.0.25", + "proc-macro2 1.0.66", + "quote 1.0.31", + "syn 2.0.26", ] [[package]] @@ -3787,8 +3778,8 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0f35583365be5d148e959284f42526841917b7bfa09e2d1a7ad5dde2cf0eaa39" dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.31", "syn 1.0.109", ] @@ -3840,9 +3831,9 @@ checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" [[package]] name = "pest" -version = "2.7.0" +version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f73935e4d55e2abf7f130186537b19e7a4abc886a0252380b59248af473a3fc9" +checksum = "0d2d1d55045829d65aad9d389139882ad623b33b904e7c9f1b10c5b8927298e5" dependencies = [ "thiserror", "ucd-trie", @@ -3850,9 +3841,9 @@ dependencies = [ [[package]] name = "pest_derive" -version = "2.7.0" +version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aef623c9bbfa0eedf5a0efba11a5ee83209c326653ca31ff019bec3a95bfff2b" +checksum = "5f94bca7e7a599d89dea5dfa309e217e7906c3c007fb9c3299c40b10d6a315d3" dependencies = [ "pest", "pest_generator", @@ -3860,22 +3851,22 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.7.0" +version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3e8cba4ec22bada7fc55ffe51e2deb6a0e0db2d0b7ab0b103acc80d2510c190" +checksum = "99d490fe7e8556575ff6911e45567ab95e71617f43781e5c05490dc8d75c965c" dependencies = [ "pest", "pest_meta", - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 2.0.25", + "proc-macro2 1.0.66", + "quote 1.0.31", + "syn 2.0.26", ] [[package]] name = "pest_meta" -version = "2.7.0" +version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a01f71cb40bd8bb94232df14b946909e14660e33fc05db3e50ae2a82d7ea0ca0" +checksum = "2674c66ebb4b4d9036012091b537aae5878970d6999f81a265034d85b136b341" dependencies = [ "once_cell", "pest", @@ -3907,9 +3898,9 @@ version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec2e072ecce94ec471b13398d5402c188e76ac03cf74dd1a975161b23a3f6d9c" dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 2.0.25", + "proc-macro2 1.0.66", + "quote 1.0.31", + "syn 2.0.26", ] [[package]] @@ -4012,7 +4003,7 @@ version = "0.1.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c8646e95016a7a6c4adea95bafa8a16baab64b583356217f2c85db4a39d9a86" dependencies = [ - "proc-macro2 1.0.64", + "proc-macro2 1.0.66", "syn 1.0.109", ] @@ -4042,8 +4033,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" dependencies = [ "proc-macro-error-attr", - "proc-macro2 1.0.64", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.31", "syn 1.0.109", "version_check", ] @@ -4054,8 +4045,8 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.31", "version_check", ] @@ -4070,9 +4061,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.64" +version = "1.0.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78803b62cbf1f46fde80d7c0e803111524b9877184cfe7c3033659490ac7a7da" +checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9" dependencies = [ "unicode-ident", ] @@ -4147,8 +4138,8 @@ checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" dependencies = [ "anyhow", "itertools", - "proc-macro2 1.0.64", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.31", "syn 1.0.109", ] @@ -4182,8 +4173,8 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac" dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.31", "syn 1.0.109", ] @@ -4215,11 +4206,11 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.29" +version = "1.0.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "573015e8ab27661678357f27dc26460738fd2b6c86e46f386fde94cb5d913105" +checksum = "5fe8a65d69dd0808184ebb5f836ab526bb259db23c657efa38711b1072ee47f0" dependencies = [ - "proc-macro2 1.0.64", + "proc-macro2 1.0.66", ] [[package]] @@ -4397,7 +4388,7 @@ version = "1.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b2eae68fc220f7cf2532e4494aded17545fce192d59cd996e0fe7887f4ceb575" dependencies = [ - "aho-corasick 1.0.2", + "aho-corasick", "memchr", "regex-automata", "regex-syntax", @@ -4405,11 +4396,11 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83d3daa6976cffb758ec878f108ba0e062a45b2d6ca3a2cca965338855476caf" +checksum = "39354c10dd07468c2e73926b23bb9c2caca74c5501e38a35da70406f1d923310" dependencies = [ - "aho-corasick 1.0.2", + "aho-corasick", "memchr", "regex-syntax", ] @@ -4480,8 +4471,8 @@ version = "0.7.42" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b2e06b915b5c230a17d7a736d1e2e63ee753c256a8614ef3f5147b13a4f5541d" dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.31", "syn 1.0.109", ] @@ -4576,9 +4567,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.3" +version = "0.38.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac5ffa1efe7548069688cd7028f32591853cd7b5b756d41bcffd2353e4fc75b4" +checksum = "0a962918ea88d644592894bc6dc55acc6c0956488adcebbfb6e273506b7fd6e5" dependencies = [ "bitflags 2.3.3", "errno", @@ -4642,9 +4633,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.13" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc31bd9b61a32c31f9650d18add92aa83a49ba979c143eefd27fe7177b05bd5f" +checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" [[package]] name = "rustyline" @@ -4675,16 +4666,16 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "107c3d5d7f370ac09efa62a78375f94d94b8a33c61d8c278b96683fb4dbf2d8d" dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.31", "syn 1.0.109", ] [[package]] name = "ryu" -version = "1.0.14" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe232bdf6be8c8de797b22184ee71118d63780ea42ac85b61d1baa6d3b782ae9" +checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" [[package]] name = "same-file" @@ -4717,9 +4708,9 @@ dependencies = [ [[package]] name = "scopeguard" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "sct" @@ -4792,16 +4783,16 @@ version = "1.0.171" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "389894603bd18c46fa56231694f8d827779c0951a667087194cf9de94ed24682" dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 2.0.25", + "proc-macro2 1.0.66", + "quote 1.0.31", + "syn 2.0.26", ] [[package]] name = "serde_json" -version = "1.0.100" +version = "1.0.103" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f1e14e89be7aa4c4b78bdbdc9eb5bf8517829a600ae8eaa39a6e1d960b5185c" +checksum = "d03b412469450d4404fe8499a268edd7f8b79fecb074b0d812ad64ca21f4031b" dependencies = [ "itoa", "ryu", @@ -4841,10 +4832,10 @@ version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "881b6f881b17d13214e5d494c939ebab463d01264ce1811e9d4ac3a882e7695f" dependencies = [ - "darling 0.20.1", - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 2.0.25", + "darling 0.20.3", + "proc-macro2 1.0.66", + "quote 1.0.31", + "syn 2.0.26", ] [[package]] @@ -4881,8 +4872,8 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "079a83df15f85d89a68d64ae1238f142f172b1fa915d0d76b26a7cba1b659a69" dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.31", "syn 1.0.109", ] @@ -4892,9 +4883,9 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91d129178576168c589c9ec973feedf7d3126c01ac2bf08795109aa35b69fb8f" dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 2.0.25", + "proc-macro2 1.0.66", + "quote 1.0.31", + "syn 2.0.26", ] [[package]] @@ -5089,8 +5080,8 @@ checksum = "dcb5ae327f9cc13b68763b5749770cb9e048a99bd9dfdfa58d0cf05d5f64afe0" dependencies = [ "heck 0.3.3", "proc-macro-error", - "proc-macro2 1.0.64", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.31", "syn 1.0.109", ] @@ -5110,8 +5101,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" dependencies = [ "heck 0.4.1", - "proc-macro2 1.0.64", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.31", "rustversion", "syn 1.0.109", ] @@ -5121,7 +5112,7 @@ name = "substruct" version = "0.1.0" source = "git+https://github.com/sydhds/substruct#2fb3ae0dc9d913a0566ce6415eaa7a7ca1690fe1" dependencies = [ - "quote 1.0.29", + "quote 1.0.31", "syn 1.0.109", ] @@ -5148,19 +5139,19 @@ version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.31", "unicode-ident", ] [[package]] name = "syn" -version = "2.0.25" +version = "2.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15e3fc8c0c74267e2df136e5e5fb656a464158aa57624053375eb9c8c6e25ae2" +checksum = "45c3457aacde3c65315de5031ec191ce46604304d2446e803d71ade03308d970" dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.31", "unicode-ident", ] @@ -5178,9 +5169,9 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "target-lexicon" -version = "0.12.8" +version = "0.12.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b1c7f239eb94671427157bd93b3694320f3668d4e1eff08c7285366fd777fac" +checksum = "df8e77cb757a61f51b947ec4a7e3646efd825b73561db1c232a8ccb639e611a0" [[package]] name = "tempfile" @@ -5232,9 +5223,9 @@ version = "1.0.43" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "463fe12d7993d3b327787537ce8dd4dfa058de32fc2b195ef3cde03dc4771e8f" dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 2.0.25", + "proc-macro2 1.0.66", + "quote 1.0.31", + "syn 2.0.26", ] [[package]] @@ -5346,9 +5337,9 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 2.0.25", + "proc-macro2 1.0.66", + "quote 1.0.31", + "syn 2.0.26", ] [[package]] @@ -5405,9 +5396,9 @@ checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" [[package]] name = "toml_edit" -version = "0.19.12" +version = "0.19.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c500344a19072298cd05a7224b3c0c629348b78692bf48466c5238656e315a78" +checksum = "f8123f27e969974a3dfba720fdb560be359f57b44302d280ba72e76a74480e8a" dependencies = [ "indexmap 2.0.0", "toml_datetime", @@ -5453,9 +5444,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a6fdaae4c2c638bb70fe42803a26fbd6fc6ac8c72f5c59f67ecc2a2dcabf4b07" dependencies = [ "prettyplease", - "proc-macro2 1.0.64", + "proc-macro2 1.0.66", "prost-build", - "quote 1.0.29", + "quote 1.0.31", "syn 1.0.109", ] @@ -5575,9 +5566,9 @@ version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 2.0.25", + "proc-macro2 1.0.66", + "quote 1.0.31", + "syn 2.0.26", ] [[package]] @@ -5629,8 +5620,8 @@ version = "0.1.0" source = "git+https://github.com/massalabs/transition.git?rev=93fa3bf82f9f5ff421c78536879b7fd1b948ca75#93fa3bf82f9f5ff421c78536879b7fd1b948ca75" dependencies = [ "darling 0.14.4", - "proc-macro2 1.0.64", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.31", "syn 1.0.109", "unsigned-varint", ] @@ -5661,9 +5652,9 @@ checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" [[package]] name = "unicode-ident" -version = "1.0.10" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22049a19f4a68748a168c0fc439f9516686aa045927ff767eca0a85101fb6e73" +checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c" [[package]] name = "unicode-normalization" @@ -5735,9 +5726,9 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" [[package]] name = "uuid" -version = "1.4.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d023da39d1fde5a8a3fe1f3e01ca9632ada0a63e9797de55a879d6e2236277be" +checksum = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d" [[package]] name = "valuable" @@ -5813,9 +5804,9 @@ dependencies = [ "bumpalo", "log", "once_cell", - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 2.0.25", + "proc-macro2 1.0.66", + "quote 1.0.31", + "syn 2.0.26", "wasm-bindgen-shared", ] @@ -5837,8 +5828,8 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c5020cfa87c7cecefef118055d44e3c1fc122c7ec25701d528ee458a0b45f38f" dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.31", "syn 1.0.109", ] @@ -5860,7 +5851,7 @@ version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" dependencies = [ - "quote 1.0.29", + "quote 1.0.31", "wasm-bindgen-macro-support", ] @@ -5870,9 +5861,9 @@ version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 2.0.25", + "proc-macro2 1.0.66", + "quote 1.0.31", + "syn 2.0.26", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -5894,9 +5885,9 @@ dependencies = [ [[package]] name = "wasmer" -version = "3.3.0" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78caedecd8cb71ed47ccca03b68d69414a3d278bb031e6f93f15759344efdd52" +checksum = "ea790bcdfb4e6e9d1e5ddf75b4699aac62b078fcc9f27f44e1748165ceea67bf" dependencies = [ "bytes", "cfg-if", @@ -5922,9 +5913,9 @@ dependencies = [ [[package]] name = "wasmer-compiler" -version = "3.3.0" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "726a8450541af4a57c34af7b6973fdbfc79f896cc7e733429577dfd1d1687180" +checksum = "f093937725e242e5529fed27e08ff836c011a9ecc22e6819fb818c2ac6ff5f88" dependencies = [ "backtrace", "cfg-if", @@ -5946,9 +5937,9 @@ dependencies = [ [[package]] name = "wasmer-compiler-cranelift" -version = "3.3.0" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1e5633f90f372563ebbdf3f9799c7b29ba11c90e56cf9b54017112d2e656c95" +checksum = "3b27b1670d27158789ebe14e4da3902c72132174884a1c6a3533ce4fd9dd83db" dependencies = [ "cranelift-codegen", "cranelift-entity", @@ -5965,9 +5956,9 @@ dependencies = [ [[package]] name = "wasmer-compiler-singlepass" -version = "3.3.0" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4d38957de6f452115c0af3ff08cec268ee248d665b54d4bbf7da60b7453cb97" +checksum = "02509aaab7e302fc551ff03512807514b379ba32136892e96fcaa5b62a3228de" dependencies = [ "byteorder", "dynasm", @@ -5984,21 +5975,21 @@ dependencies = [ [[package]] name = "wasmer-derive" -version = "3.3.0" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97901fdbaae383dbb90ea162cc3a76a9fa58ac39aec7948b4c0b9bbef9307738" +checksum = "13ae8286cba2acb10065a4dac129c7c7f7bcd24acd6538555d96616eea16bc27" dependencies = [ "proc-macro-error", - "proc-macro2 1.0.64", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.31", "syn 1.0.109", ] [[package]] name = "wasmer-middlewares" -version = "3.3.0" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e028013811035111beb768074b6ccc09eabd77811b1e01fd099b5471924ca16" +checksum = "3bc8cafe02112ecf21a7fc1d090ac0f0220a86a8d4f561c24e382a4c2ee4addc" dependencies = [ "wasmer", "wasmer-types", @@ -6007,9 +5998,9 @@ dependencies = [ [[package]] name = "wasmer-object" -version = "3.3.0" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b6a25e04fdd0f2173bebfce2804ac1fba5e45827afed76876bf414e74244aae" +checksum = "bda85dbe7e313fa98a22d11045885db631871e1d17382297da401f2be7e4c957" dependencies = [ "object 0.28.4", "thiserror", @@ -6018,9 +6009,9 @@ dependencies = [ [[package]] name = "wasmer-types" -version = "3.3.0" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67f1f2839f4f61509550e4ddcd0e658e19f3af862b51c79fda15549d735d659b" +checksum = "918d2f0bb5eaa95a80c06be33f21dee92f40f12cd0982da34490d121a99d244b" dependencies = [ "bytecheck", "enum-iterator", @@ -6034,9 +6025,9 @@ dependencies = [ [[package]] name = "wasmer-vm" -version = "3.3.0" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "043118ec4f16d1714fed3aab758b502b864bd865e1d5188626c9ad290100563f" +checksum = "a1e000c2cbd4f9805427af5f3b3446574caf89ab3a1e66c2f3579fbde22b072b" dependencies = [ "backtrace", "cc", @@ -6337,9 +6328,9 @@ checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" [[package]] name = "winnow" -version = "0.4.9" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81a2094c43cc94775293eaa0e499fbc30048a6d824ac82c0351a8c0bf9112529" +checksum = "81fac9742fd1ad1bd9643b991319f72dd031016d44b77039a26977eb667141e7" dependencies = [ "memchr", ] @@ -6377,9 +6368,9 @@ version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 2.0.25", + "proc-macro2 1.0.66", + "quote 1.0.31", + "syn 2.0.26", ] [[package]] From ff9a431591818461caa816af92e208f022a0040f Mon Sep 17 00:00:00 2001 From: AurelienFT <32803821+AurelienFT@users.noreply.github.com> Date: Fri, 21 Jul 2023 12:15:34 +0200 Subject: [PATCH 44/71] Add versioning of all object id and some tests (#4226) * Add unit test to ensure two addresses with the same version doesn't give the same thread * Add version to operation id * Format & clippy * Add version on all ids. * Add version on operations * Revert "Add version on operations" This reverts commit dca6ef206db7e1514427293fb533ff6c06f3c798. * Fix comparaison hashes operation merkle root * Update fromstr of block id and operation id * Fix compil error. * Add comment for length verification * remove from_bytes_without_version * fmt * Update serialization endorsement id --------- Co-authored-by: Thomas Plisson --- Cargo.lock | 3 + massa-bootstrap/src/tests/tools.rs | 2 +- massa-consensus-exports/src/error.rs | 2 + .../src/export_active_block.rs | 21 +- massa-consensus-worker/Cargo.toml | 1 + .../src/state/clique_computation.rs | 16 +- massa-consensus-worker/src/state/graph.rs | 15 +- massa-executed-ops/src/executed_ops.rs | 6 +- massa-execution-worker/Cargo.toml | 1 + massa-execution-worker/src/context.rs | 19 +- massa-factory-worker/Cargo.toml | 1 + massa-factory-worker/src/block_factory.rs | 11 +- massa-models/src/address.rs | 55 ++-- massa-models/src/block.rs | 43 +-- massa-models/src/block_header.rs | 24 +- massa-models/src/block_id.rs | 182 ++++++++++--- massa-models/src/clique.rs | 19 +- massa-models/src/denunciation.rs | 8 +- massa-models/src/endorsement.rs | 162 ++++++++--- massa-models/src/operation.rs | 254 ++++++++++++++---- massa-models/src/secure_share.rs | 3 +- massa-models/src/test_exports/data.rs | 14 +- massa-pool-worker/src/denunciation_pool.rs | 2 +- massa-pool-worker/src/tests/tools.rs | 2 +- .../src/test_exports/tools.rs | 24 +- .../src/handlers/block_handler/messages.rs | 15 +- .../src/handlers/block_handler/retrieval.rs | 17 +- .../src/tests/endorsements_scenarios.rs | 4 +- .../tests/in_block_operations_scenarios.rs | 4 +- 29 files changed, 658 insertions(+), 272 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 99e67014ce7..ea27d383800 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2741,6 +2741,7 @@ dependencies = [ "massa_pos_exports", "massa_pos_worker", "massa_protocol_exports", + "massa_serialization", "massa_signature", "massa_storage", "massa_time", @@ -2839,6 +2840,7 @@ dependencies = [ "massa_module_cache", "massa_pos_exports", "massa_pos_worker", + "massa_serialization", "massa_signature", "massa_storage", "massa_time", @@ -2887,6 +2889,7 @@ dependencies = [ "massa_pool_exports", "massa_pos_exports", "massa_protocol_exports", + "massa_serialization", "massa_signature", "massa_storage", "massa_time", diff --git a/massa-bootstrap/src/tests/tools.rs b/massa-bootstrap/src/tests/tools.rs index 7eba6fcbab7..21546aa263c 100644 --- a/massa-bootstrap/src/tests/tools.rs +++ b/massa-bootstrap/src/tests/tools.rs @@ -369,7 +369,7 @@ pub fn get_random_final_state_bootstrap( } pub fn get_dummy_block_id(s: &str) -> BlockId { - BlockId(Hash::compute_from(s.as_bytes())) + BlockId::generate_from_hash(Hash::compute_from(s.as_bytes())) } pub fn get_random_address() -> Address { diff --git a/massa-consensus-exports/src/error.rs b/massa-consensus-exports/src/error.rs index 294ee5b5b42..57ab44d771e 100644 --- a/massa-consensus-exports/src/error.rs +++ b/massa-consensus-exports/src/error.rs @@ -15,6 +15,8 @@ pub enum ConsensusError { ExecutionError(#[from] ExecutionError), /// models error: {0} ModelsError(#[from] ModelsError), + /// Serialization error: {0} + SerializationError(String), /// Could not create genesis block {0} GenesisCreationError(String), /// missing block {0} diff --git a/massa-consensus-exports/src/export_active_block.rs b/massa-consensus-exports/src/export_active_block.rs index 823b959aa40..5e30633aa6f 100644 --- a/massa-consensus-exports/src/export_active_block.rs +++ b/massa-consensus-exports/src/export_active_block.rs @@ -1,9 +1,8 @@ use crate::error::ConsensusError; -use massa_hash::HashDeserializer; use massa_models::{ active_block::ActiveBlock, block::{Block, BlockDeserializer, BlockDeserializerArgs, SecureShareBlock}, - block_id::BlockId, + block_id::{BlockId, BlockIdDeserializer, BlockIdSerializer}, prehash::PreHashMap, secure_share::{SecureShareDeserializer, SecureShareSerializer}, }; @@ -92,6 +91,7 @@ impl ExportActiveBlock { pub struct ExportActiveBlockSerializer { sec_share_serializer: SecureShareSerializer, period_serializer: U64VarIntSerializer, + block_id_serializer: BlockIdSerializer, } impl ExportActiveBlockSerializer { @@ -100,6 +100,7 @@ impl ExportActiveBlockSerializer { ExportActiveBlockSerializer { sec_share_serializer: SecureShareSerializer::new(), period_serializer: U64VarIntSerializer::new(), + block_id_serializer: BlockIdSerializer::new(), } } } @@ -117,7 +118,7 @@ impl Serializer for ExportActiveBlockSerializer { // note: there should be no parents for genesis blocks buffer.push(u8::from(!value.parents.is_empty())); for (hash, period) in value.parents.iter() { - buffer.extend(hash.0.to_bytes()); + self.block_id_serializer.serialize(hash, buffer)?; self.period_serializer.serialize(period, buffer)?; } @@ -131,7 +132,7 @@ impl Serializer for ExportActiveBlockSerializer { /// Basic deserializer of `ExportActiveBlock` pub struct ExportActiveBlockDeserializer { sec_share_block_deserializer: SecureShareDeserializer, - hash_deserializer: HashDeserializer, + block_id_deserializer: BlockIdDeserializer, period_deserializer: U64VarIntDeserializer, thread_count: u8, } @@ -146,7 +147,7 @@ impl ExportActiveBlockDeserializer { sec_share_block_deserializer: SecureShareDeserializer::new(BlockDeserializer::new( block_der_args, )), - hash_deserializer: HashDeserializer::new(), + block_id_deserializer: BlockIdDeserializer::new(), period_deserializer: U64VarIntDeserializer::new(Included(0), Included(u64::MAX)), thread_count, } @@ -168,7 +169,7 @@ impl Deserializer for ExportActiveBlockDeserializer { /// /// let keypair = KeyPair::generate(0).unwrap(); /// let parents = (0..THREAD_COUNT) - /// .map(|i| BlockId(Hash::compute_from(&[i]))) + /// .map(|i| BlockId::generate_from_hash(Hash::compute_from(&[i]))) /// .collect(); /// /// // create block header @@ -184,7 +185,7 @@ impl Deserializer for ExportActiveBlockDeserializer { /// Endorsement { /// slot: Slot::new(1, 1), /// index: 1, - /// endorsed_block: BlockId(Hash::compute_from(&[1])), + /// endorsed_block: BlockId::generate_from_hash(Hash::compute_from(&[1])), /// }, /// EndorsementSerializer::new(), /// &keypair, @@ -194,7 +195,7 @@ impl Deserializer for ExportActiveBlockDeserializer { /// Endorsement { /// slot: Slot::new(1, 1), /// index: 3, - /// endorsed_block: BlockId(Hash::compute_from(&[1])), + /// endorsed_block: BlockId::generate_from_hash(Hash::compute_from(&[1])), /// }, /// EndorsementSerializer::new(), /// &keypair, @@ -250,9 +251,7 @@ impl Deserializer for ExportActiveBlockDeserializer { count( tuple(( context("Failed block_id deserialization", |input| { - self.hash_deserializer - .deserialize(input) - .map(|(rest, hash)| (rest, BlockId(hash))) + self.block_id_deserializer.deserialize(input) }), context("Failed period deserialization", |input| { self.period_deserializer.deserialize(input) diff --git a/massa-consensus-worker/Cargo.toml b/massa-consensus-worker/Cargo.toml index 8648f68a822..6bf5cfe6bc0 100644 --- a/massa-consensus-worker/Cargo.toml +++ b/massa-consensus-worker/Cargo.toml @@ -18,6 +18,7 @@ massa_channel = {workspace = true} massa_metrics = {workspace = true} massa_consensus_exports = {workspace = true} massa_models = {workspace = true} +massa_serialization = {workspace = true} massa_storage = {workspace = true} massa_signature = {workspace = true} massa_time = {workspace = true} diff --git a/massa-consensus-worker/src/state/clique_computation.rs b/massa-consensus-worker/src/state/clique_computation.rs index 3064ba10080..23008a7d5b1 100644 --- a/massa-consensus-worker/src/state/clique_computation.rs +++ b/massa-consensus-worker/src/state/clique_computation.rs @@ -85,9 +85,7 @@ mod tests { let mut gi_head = PreHashMap::default(); for i in 0..size { gi_head.insert( - BlockId::from_bytes( - massa_hash::Hash::compute_from(&i.to_be_bytes()).to_bytes(), - ), + BlockId::generate_from_hash(massa_hash::Hash::compute_from(&i.to_be_bytes())), PreHashSet::default(), ); } @@ -97,12 +95,12 @@ mod tests { let is_compatible = rng.gen_bool(0.5); if !is_compatible { - let i_id = BlockId::from_bytes( - massa_hash::Hash::compute_from(&i.to_be_bytes()).to_bytes(), - ); - let j_id = BlockId::from_bytes( - massa_hash::Hash::compute_from(&j.to_be_bytes()).to_bytes(), - ); + let i_id = BlockId::generate_from_hash(massa_hash::Hash::compute_from( + &i.to_be_bytes(), + )); + let j_id = BlockId::generate_from_hash(massa_hash::Hash::compute_from( + &j.to_be_bytes(), + )); // Add the incompatibility relationship to gi_head gi_head.entry(i_id).or_default().insert(j_id); gi_head.entry(j_id).or_default().insert(i_id); diff --git a/massa-consensus-worker/src/state/graph.rs b/massa-consensus-worker/src/state/graph.rs index ecb5d52d5f5..c56e59087be 100644 --- a/massa-consensus-worker/src/state/graph.rs +++ b/massa-consensus-worker/src/state/graph.rs @@ -5,7 +5,13 @@ use massa_consensus_exports::{ error::ConsensusError, }; use massa_logging::massa_trace; -use massa_models::{block_id::BlockId, clique::Clique, prehash::PreHashSet, slot::Slot}; +use massa_models::{ + block_id::{BlockId, BlockIdSerializer}, + clique::Clique, + prehash::PreHashSet, + slot::Slot, +}; +use massa_serialization::Serializer; use super::ConsensusState; @@ -49,6 +55,7 @@ impl ConsensusState { &mut self, add_block_id: &BlockId, ) -> Result { + let block_id_serializer = BlockIdSerializer::new(); let mut blockclique_i = 0usize; let mut max_clique_fitness = (0u64, num::BigInt::default()); for (clique_i, clique) in self.max_cliques.iter_mut().enumerate() { @@ -64,7 +71,11 @@ impl ConsensusState { .fitness .checked_add(fitness) .ok_or(ConsensusError::FitnessOverflow)?; - sum_hash -= num::BigInt::from_bytes_be(num::bigint::Sign::Plus, block_h.to_bytes()); + let mut bytes = Vec::new(); + block_id_serializer + .serialize(block_h, &mut bytes) + .map_err(|err| ConsensusError::SerializationError(err.to_string()))?; + sum_hash -= num::BigInt::from_bytes_be(num::bigint::Sign::Plus, &bytes); } let cur_fit = (clique.fitness, sum_hash); if cur_fit > max_clique_fitness { diff --git a/massa-executed-ops/src/executed_ops.rs b/massa-executed-ops/src/executed_ops.rs index 5273998ac5b..d30bb0b72fa 100644 --- a/massa-executed-ops/src/executed_ops.rs +++ b/massa-executed-ops/src/executed_ops.rs @@ -240,7 +240,7 @@ impl ExecutedOps { return false; } - let Ok((rest, _id)) = self.operation_id_deserializer.deserialize::(&serialized_key[EXECUTED_OPS_PREFIX.len()..]) else { + let Ok((rest, _id)): Result<(&[u8], OperationId), nom::Err> = self.operation_id_deserializer.deserialize::(&serialized_key[EXECUTED_OPS_PREFIX.len()..]) else { return false; }; if !rest.is_empty() { @@ -377,6 +377,7 @@ fn test_executed_ops_hash_computing() { pub struct ExecutedOpsSerializer { slot_serializer: SlotSerializer, u64_serializer: U64VarIntSerializer, + op_id_serializer: OperationIdSerializer, } impl Default for ExecutedOpsSerializer { @@ -391,6 +392,7 @@ impl ExecutedOpsSerializer { ExecutedOpsSerializer { slot_serializer: SlotSerializer::new(), u64_serializer: U64VarIntSerializer::new(), + op_id_serializer: OperationIdSerializer::new(), } } } @@ -412,7 +414,7 @@ impl Serializer>> for ExecutedOpsSerializ self.u64_serializer.serialize(&(ids.len() as u64), buffer)?; // slots ids for op_id in ids { - buffer.extend(op_id.to_bytes()); + self.op_id_serializer.serialize(op_id, buffer)?; } } Ok(()) diff --git a/massa-execution-worker/Cargo.toml b/massa-execution-worker/Cargo.toml index d941457d6d4..3f905f1e91b 100644 --- a/massa-execution-worker/Cargo.toml +++ b/massa-execution-worker/Cargo.toml @@ -40,6 +40,7 @@ massa_hash = {workspace = true} massa-sc-runtime = {workspace = true} massa_metrics = {workspace = true} massa_module_cache = {workspace = true} +massa_serialization = {workspace = true} massa_signature = {workspace = true} massa_time = {workspace = true} massa_ledger_exports = {workspace = true} diff --git a/massa-execution-worker/src/context.rs b/massa-execution-worker/src/context.rs index 802109b0aa7..9d96cdaf4ea 100644 --- a/massa-execution-worker/src/context.rs +++ b/massa-execution-worker/src/context.rs @@ -24,6 +24,7 @@ use massa_final_state::{FinalState, StateChanges}; use massa_hash::Hash; use massa_ledger_exports::{LedgerChanges, SetOrKeep}; use massa_models::address::ExecutionAddressCycleInfo; +use massa_models::block_id::BlockIdSerializer; use massa_models::bytecode::Bytecode; use massa_models::denunciation::DenunciationIndex; use massa_models::timeslots::get_block_slot_timestamp; @@ -37,6 +38,7 @@ use massa_models::{ }; use massa_module_cache::controller::ModuleCache; use massa_pos_exports::PoSChanges; +use massa_serialization::Serializer; use massa_versioning::address_factory::{AddressArgs, AddressFactory}; use massa_versioning::versioning::MipStore; use massa_versioning::versioning_factory::{FactoryStrategy, VersioningFactory}; @@ -1087,12 +1089,17 @@ fn generate_execution_trail_hash( &slot.to_bytes_key(), &[if read_only { 1u8 } else { 0u8 }, 0u8], ]), - Some(block_id) => massa_hash::Hash::compute_from_tuple(&[ - previous_execution_trail_hash.to_bytes(), - &slot.to_bytes_key(), - &[if read_only { 1u8 } else { 0u8 }, 1u8], - block_id.to_bytes(), - ]), + Some(block_id) => { + let mut bytes = Vec::new(); + let block_id_serializer = BlockIdSerializer::new(); + block_id_serializer.serialize(block_id, &mut bytes).unwrap(); + massa_hash::Hash::compute_from_tuple(&[ + previous_execution_trail_hash.to_bytes(), + &slot.to_bytes_key(), + &[if read_only { 1u8 } else { 0u8 }, 1u8], + &bytes, + ]) + } } } diff --git a/massa-factory-worker/Cargo.toml b/massa-factory-worker/Cargo.toml index 1baa1129b45..df65f1ba969 100644 --- a/massa-factory-worker/Cargo.toml +++ b/massa-factory-worker/Cargo.toml @@ -15,6 +15,7 @@ tracing = {workspace = true} massa_channel = {workspace = true} massa_models = {workspace = true} massa_factory_exports = {workspace = true} +massa_serialization = {workspace = true} massa_signature = {workspace = true} massa_storage = {workspace = true} massa_time = {workspace = true} diff --git a/massa-factory-worker/src/block_factory.rs b/massa-factory-worker/src/block_factory.rs index 2478b5caec9..89068261327 100644 --- a/massa-factory-worker/src/block_factory.rs +++ b/massa-factory-worker/src/block_factory.rs @@ -8,11 +8,13 @@ use massa_models::{ block_header::{BlockHeader, BlockHeaderSerializer, SecuredHeader}, block_id::BlockId, endorsement::SecureShareEndorsement, + operation::OperationIdSerializer, prehash::PreHashSet, secure_share::SecureShareContent, slot::Slot, timeslots::{get_block_slot_timestamp, get_closest_slot_to_timestamp}, }; +use massa_serialization::Serializer; use massa_time::MassaTime; use massa_versioning::versioning::MipStore; use massa_wallet::Wallet; @@ -27,6 +29,7 @@ pub(crate) struct BlockFactoryWorker { channels: FactoryChannels, factory_receiver: MassaReceiver<()>, mip_store: MipStore, + op_id_serializer: OperationIdSerializer, } impl BlockFactoryWorker { @@ -48,6 +51,7 @@ impl BlockFactoryWorker { channels, factory_receiver, mip_store, + op_id_serializer: OperationIdSerializer::new(), }; this.run(); }) @@ -223,7 +227,12 @@ impl BlockFactoryWorker { let global_operations_hash = Hash::compute_from( &op_ids .iter() - .flat_map(|op_id| *op_id.to_bytes()) + .flat_map(|op_id| { + let mut buffer = Vec::new(); + //It was a to_bytes() there before, we know the op is valid because it comes from the pool + self.op_id_serializer.serialize(op_id, &mut buffer).unwrap(); + buffer + }) .collect::>(), ); diff --git a/massa-models/src/address.rs b/massa-models/src/address.rs index 2e12cf7bb76..f5e36b106bd 100644 --- a/massa-models/src/address.rs +++ b/massa-models/src/address.rs @@ -307,10 +307,10 @@ impl UserAddress { match version { ::VERSION => Ok(UserAddressVariant!["0"]( - ::from_bytes_without_version(rest)?, + ::from_bytes(rest)?, )), ::VERSION => Ok(UserAddressVariant!["1"]( - ::from_bytes_without_version(rest)?, + ::from_bytes(rest)?, )), unhandled_version => Err(ModelsError::AddressParseError(format!( "version {} is not handled for UserAddress", @@ -358,7 +358,7 @@ impl UserAddress { } /// Deserialize the address without considering the version byte - fn from_bytes_without_version(data: &[u8]) -> Result { + fn from_bytes(data: &[u8]) -> Result { Ok(UserAddress(Hash::from_bytes(&data.try_into().map_err( |_| { ModelsError::BufferError(format!( @@ -404,12 +404,12 @@ impl SCAddress { })?; match version { - ::VERSION => Ok(SCAddressVariant!["0"]( - ::from_bytes_without_version(rest)?, - )), - ::VERSION => Ok(SCAddressVariant!["1"]( - ::from_bytes_without_version(rest)?, - )), + ::VERSION => { + Ok(SCAddressVariant!["0"](::from_bytes(rest)?)) + } + ::VERSION => { + Ok(SCAddressVariant!["1"](::from_bytes(rest)?)) + } unhandled_version => Err(ModelsError::AddressParseError(format!( "version {} is not handled for SCAddress", unhandled_version @@ -424,22 +424,6 @@ impl SCAddress { SCAddress::SCAddressV1(addr) => addr.to_prefixed_bytes(), } } - - /// Deserialize the address without considering the version byte - pub fn from_bytes_without_version(version: u64, data: &[u8]) -> Result { - match version { - ::VERSION => Ok(SCAddressVariant!["0"]( - ::from_bytes_without_version(data)?, - )), - ::VERSION => Ok(SCAddressVariant!["1"]( - ::from_bytes_without_version(data)?, - )), - unhandled_version => Err(ModelsError::AddressParseError(format!( - "version {} is not handled for SCAddress", - unhandled_version - ))), - } - } } #[transition::impl_version(versions("0", "1"))] @@ -468,7 +452,7 @@ impl SCAddress { } /// Deserialize the address without considering the version byte - fn from_bytes_without_version(data: &[u8]) -> Result { + fn from_bytes(data: &[u8]) -> Result { Ok(SCAddress(Hash::from_bytes(&data.try_into().map_err( |_| { ModelsError::BufferError(format!( @@ -580,6 +564,7 @@ impl Deserializer
for AddressDeserializer { &self, buffer: &'a [u8], ) -> IResult<&'a [u8], Address, E> { + // Verify that we at least have a version and something else if buffer.len() < 2 { return Err(nom::Err::Error(E::from_error_kind(buffer, ErrorKind::Eof))); } @@ -608,6 +593,7 @@ impl Deserializer for AddressDeserializer { &self, buffer: &'a [u8], ) -> IResult<&'a [u8], UserAddress, E> { + // Verify that we at least have a version and something else if buffer.len() < 2 { return Err(nom::Err::Error(E::from_error_kind(buffer, ErrorKind::Eof))); } @@ -650,6 +636,7 @@ impl Deserializer for AddressDeserializer { &self, buffer: &'a [u8], ) -> IResult<&'a [u8], SCAddress, E> { + // Verify that we at least have a version and something else if buffer.len() < 2 { return Err(nom::Err::Error(E::from_error_kind(buffer, ErrorKind::Eof))); } @@ -704,6 +691,8 @@ pub struct ExecutionAddressCycleInfo { #[cfg(test)] mod test { + use crate::config::THREAD_COUNT; + use super::*; #[test] @@ -732,4 +721,18 @@ mod test { assert_eq!(addr, addr2); } + + #[test] + fn test_address_get_thread() { + let hash = massa_hash::Hash::compute_from(&"ADDR".as_bytes()); + + let user_addr_0 = Address::User(UserAddress::UserAddressV0(UserAddressV0(hash))); + let thread_addr_0 = user_addr_0.get_thread(THREAD_COUNT); + let hash = massa_hash::Hash::compute_from(&"ADDR2".as_bytes()); + + let user_addr_1 = Address::User(UserAddress::UserAddressV0(UserAddressV0(hash))); + let thread_addr_1 = user_addr_1.get_thread(THREAD_COUNT); + + assert_ne!(thread_addr_0, thread_addr_1); + } } diff --git a/massa-models/src/block.rs b/massa-models/src/block.rs index 5cf973db17f..bf68b899ebb 100644 --- a/massa-models/src/block.rs +++ b/massa-models/src/block.rs @@ -152,7 +152,7 @@ impl Serializer for BlockSerializer { /// use massa_serialization::{Serializer, Deserializer, DeserializeError}; /// let keypair = KeyPair::generate(0).unwrap(); /// let parents = (0..THREAD_COUNT) - /// .map(|i| BlockId(Hash::compute_from(&[i]))) + /// .map(|i| BlockId::generate_from_hash(Hash::compute_from(&[i]))) /// .collect(); /// /// // create block header @@ -168,7 +168,7 @@ impl Serializer for BlockSerializer { /// Endorsement { /// slot: Slot::new(1, 1), /// index: 1, - /// endorsed_block: BlockId(Hash::compute_from("blk1".as_bytes())), + /// endorsed_block: BlockId::generate_from_hash(Hash::compute_from("blk1".as_bytes())), /// }, /// EndorsementSerializer::new(), /// &keypair, @@ -178,7 +178,7 @@ impl Serializer for BlockSerializer { /// Endorsement { /// slot: Slot::new(4, 0), /// index: 3, - /// endorsed_block: BlockId(Hash::compute_from("blk2".as_bytes())), + /// endorsed_block: BlockId::generate_from_hash(Hash::compute_from("blk2".as_bytes())), /// }, /// EndorsementSerializer::new(), /// &keypair, @@ -255,7 +255,7 @@ impl Deserializer for BlockDeserializer { /// use massa_serialization::{Serializer, Deserializer, DeserializeError}; /// let keypair = KeyPair::generate(0).unwrap(); /// let parents: Vec = (0..THREAD_COUNT) - /// .map(|i| BlockId(Hash::compute_from(&[i]))) + /// .map(|i| BlockId::generate_from_hash(Hash::compute_from(&[i]))) /// .collect(); /// /// // create block header @@ -438,7 +438,7 @@ mod test { KeyPair::from_str("S1bXjyPwrssNmG4oUG5SEqaUhQkVArQi7rzQDWpCprTSmEgZDGG").unwrap(); let parents = (0..THREAD_COUNT) .map(|_i| { - BlockId( + BlockId::generate_from_hash( Hash::from_bs58_check("bq1NsaCBAfseMKSjNBYLhpK7M5eeef2m277MYS2P2k424GaDf") .unwrap(), ) @@ -449,7 +449,7 @@ mod test { Endorsement { slot: Slot::new(1, 0), index: 0, - endorsed_block: BlockId( + endorsed_block: BlockId::generate_from_hash( Hash::from_bs58_check("bq1NsaCBAfseMKSjNBYLhpK7M5eeef2m277MYS2P2k424GaDf") .unwrap(), ), @@ -462,7 +462,7 @@ mod test { Endorsement { slot: Slot::new(1, 0), index: ENDORSEMENT_COUNT - 1, - endorsed_block: BlockId( + endorsed_block: BlockId::generate_from_hash( Hash::from_bs58_check("bq1NsaCBAfseMKSjNBYLhpK7M5eeef2m277MYS2P2k424GaDf") .unwrap(), ), @@ -641,7 +641,7 @@ mod test { let endorsement = Endorsement { slot: Slot::new(0, 1), index: 1, - endorsed_block: BlockId(Hash::compute_from(&[1])), + endorsed_block: BlockId::generate_from_hash(Hash::compute_from(&[1])), }; // create block header @@ -703,7 +703,7 @@ mod test { fn test_invalid_genesis_block_serialization_with_parents() { let keypair = KeyPair::generate(0).unwrap(); let parents = (0..THREAD_COUNT) - .map(|i| BlockId(Hash::compute_from(&[i]))) + .map(|i| BlockId::generate_from_hash(Hash::compute_from(&[i]))) .collect(); // create block header @@ -810,7 +810,7 @@ mod test { let keypair = KeyPair::generate(0).unwrap(); // Non genesis block must have THREAD_COUNT parents let parents = (0..=THREAD_COUNT) - .map(|i| BlockId(Hash::compute_from(&[i]))) + .map(|i| BlockId::generate_from_hash(Hash::compute_from(&[i]))) .collect(); // create block header @@ -865,10 +865,11 @@ mod test { fn test_block_serialization_max_endo_count() { let keypair = KeyPair::from_str("S1bXjyPwrssNmG4oUG5SEqaUhQkVArQi7rzQDWpCprTSmEgZDGG").unwrap(); - let endorsed = BlockId( + let endorsed = BlockId::generate_from_hash( Hash::from_bs58_check("bq1NsaCBAfseMKSjNBYLhpK7M5eeef2m277MYS2P2k424GaDf").unwrap(), ); - let fillers = (1..THREAD_COUNT).map(|i| BlockId(Hash::compute_from(&[i]))); + let fillers = + (1..THREAD_COUNT).map(|i| BlockId::generate_from_hash(Hash::compute_from(&[i]))); let parents = std::iter::once(endorsed).chain(fillers).collect(); let endorsements = (0..ENDORSEMENT_COUNT) @@ -877,7 +878,7 @@ mod test { Endorsement { slot: Slot::new(1, 0), index: i, - endorsed_block: BlockId( + endorsed_block: BlockId::generate_from_hash( Hash::from_bs58_check( "bq1NsaCBAfseMKSjNBYLhpK7M5eeef2m277MYS2P2k424GaDf", ) @@ -944,7 +945,7 @@ mod test { let keypair = KeyPair::generate(0).unwrap(); // Non genesis block must have THREAD_COUNT parents let parents = (1..THREAD_COUNT) - .map(|i| BlockId(Hash::compute_from(&[i]))) + .map(|i| BlockId::generate_from_hash(Hash::compute_from(&[i]))) .collect(); // create block header @@ -999,7 +1000,7 @@ mod test { let keypair = KeyPair::generate(0).unwrap(); // Non genesis block must have THREAD_COUNT parents let parents = (0..THREAD_COUNT) - .map(|i| BlockId(Hash::compute_from(&[i]))) + .map(|i| BlockId::generate_from_hash(Hash::compute_from(&[i]))) .collect(); let endorsements = (0..=ENDORSEMENT_COUNT) @@ -1008,7 +1009,7 @@ mod test { Endorsement { slot: Slot::new(0, 1), index: i, - endorsed_block: BlockId(Hash::compute_from(&[i as u8])), + endorsed_block: BlockId::generate_from_hash(Hash::compute_from(&[i as u8])), }, EndorsementSerializer::new(), &keypair, @@ -1069,7 +1070,7 @@ mod test { KeyPair::from_str("S1bXjyPwrssNmG4oUG5SEqaUhQkVArQi7rzQDWpCprTSmEgZDGG").unwrap(); let parents = (0..THREAD_COUNT) .map(|_i| { - BlockId( + BlockId::generate_from_hash( Hash::from_bs58_check("bq1NsaCBAfseMKSjNBYLhpK7M5eeef2m277MYS2P2k424GaDf") .unwrap(), ) @@ -1080,7 +1081,7 @@ mod test { Endorsement { slot: Slot::new(1, 0), index: ENDORSEMENT_COUNT, - endorsed_block: BlockId( + endorsed_block: BlockId::generate_from_hash( Hash::from_bs58_check("bq1NsaCBAfseMKSjNBYLhpK7M5eeef2m277MYS2P2k424GaDf") .unwrap(), ), @@ -1142,7 +1143,7 @@ mod test { KeyPair::from_str("S1bXjyPwrssNmG4oUG5SEqaUhQkVArQi7rzQDWpCprTSmEgZDGG").unwrap(); let parents = (0..THREAD_COUNT) .map(|_i| { - BlockId( + BlockId::generate_from_hash( Hash::from_bs58_check("bq1NsaCBAfseMKSjNBYLhpK7M5eeef2m277MYS2P2k424GaDf") .unwrap(), ) @@ -1153,7 +1154,7 @@ mod test { Endorsement { slot: Slot::new(1, 0), index: 0, - endorsed_block: BlockId( + endorsed_block: BlockId::generate_from_hash( Hash::from_bs58_check("bq1NsaCBAfseMKSjNBYLhpK7M5eeef2m277MYS2P2k424GaDf") .unwrap(), ), @@ -1166,7 +1167,7 @@ mod test { Endorsement { slot: Slot::new(1, 0), index: 0, - endorsed_block: BlockId( + endorsed_block: BlockId::generate_from_hash( Hash::from_bs58_check("bq1NsaCBAfseMKSjNBYLhpK7M5eeef2m277MYS2P2k424GaDf") .unwrap(), ), diff --git a/massa-models/src/block_header.rs b/massa-models/src/block_header.rs index d9fd73a0f51..d2ccba1b14c 100644 --- a/massa-models/src/block_header.rs +++ b/massa-models/src/block_header.rs @@ -1,4 +1,4 @@ -use crate::block_id::BlockId; +use crate::block_id::{BlockId, BlockIdDeserializer, BlockIdSerializer}; use crate::denunciation::{Denunciation, DenunciationDeserializer, DenunciationSerializer}; use crate::endorsement::{ Endorsement, EndorsementDeserializerLW, EndorsementId, EndorsementSerializer, @@ -141,6 +141,7 @@ pub struct BlockHeaderSerializer { denunciation_serializer: DenunciationSerializer, u32_serializer: U32VarIntSerializer, opt_serializer: OptionSerializer, + block_id_serializer: BlockIdSerializer, } impl BlockHeaderSerializer { @@ -153,6 +154,7 @@ impl BlockHeaderSerializer { opt_serializer: OptionSerializer::new(U32VarIntSerializer), endorsement_content_serializer: EndorsementSerializerLW::new(), denunciation_serializer: DenunciationSerializer::new(), + block_id_serializer: BlockIdSerializer::new(), } } } @@ -176,7 +178,7 @@ impl Serializer for BlockHeaderSerializer { /// /// let keypair = KeyPair::generate(0).unwrap(); /// let parents = (0..THREAD_COUNT) - /// .map(|i| BlockId(Hash::compute_from(&[i]))) + /// .map(|i| BlockId::generate_from_hash(Hash::compute_from(&[i]))) /// .collect(); /// let header = BlockHeader { /// current_version: 0, @@ -189,7 +191,7 @@ impl Serializer for BlockHeaderSerializer { /// Endorsement { /// slot: Slot::new(1, 1), /// index: 1, - /// endorsed_block: BlockId(Hash::compute_from("blk1".as_bytes())), + /// endorsed_block: BlockId::generate_from_hash(Hash::compute_from("blk1".as_bytes())), /// }, /// EndorsementSerializer::new(), /// &keypair, @@ -199,7 +201,7 @@ impl Serializer for BlockHeaderSerializer { /// Endorsement { /// slot: Slot::new(4, 0), /// index: 3, - /// endorsed_block: BlockId(Hash::compute_from("blk2".as_bytes())), + /// endorsed_block: BlockId::generate_from_hash(Hash::compute_from("blk2".as_bytes())), /// }, /// EndorsementSerializer::new(), /// &keypair, @@ -228,7 +230,7 @@ impl Serializer for BlockHeaderSerializer { buffer.push(1); } for parent_h in value.parents.iter() { - buffer.extend(parent_h.0.to_bytes()); + self.block_id_serializer.serialize(parent_h, buffer)?; } // operations merkle root @@ -276,12 +278,13 @@ pub struct BlockHeaderDeserializer { denunciation_deserializer: DenunciationDeserializer, network_versions_deserializer: U32VarIntDeserializer, opt_deserializer: OptionDeserializer, + block_id_deserializer: BlockIdDeserializer, } impl BlockHeaderDeserializer { /// Creates a new `BlockHeaderDeserializer` /// If last_start_period is Some(lsp), then the deserializer will check for valid (non)-genesis blocks - pub const fn new( + pub fn new( thread_count: u8, endorsement_count: u32, max_denunciations_in_block_header: u32, @@ -314,6 +317,7 @@ impl BlockHeaderDeserializer { thread_count, endorsement_count, ), + block_id_deserializer: BlockIdDeserializer::new(), thread_count, endorsement_count, last_start_period, @@ -334,7 +338,7 @@ impl Deserializer for BlockHeaderDeserializer { /// /// let keypair = KeyPair::generate(0).unwrap(); /// let parents: Vec = (0..THREAD_COUNT) - /// .map(|i| BlockId(Hash::compute_from(&[i]))) + /// .map(|i| BlockId::generate_from_hash(Hash::compute_from(&[i]))) /// .collect(); /// let header = BlockHeader { /// current_version: 0, @@ -401,9 +405,7 @@ impl Deserializer for BlockHeaderDeserializer { tag(&[1]), count( context("Failed block_id deserialization", |input| { - self.hash_deserializer - .deserialize(input) - .map(|(rest, hash)| (rest, BlockId(hash))) + self.block_id_deserializer.deserialize(input) }), self.thread_count as usize, ), @@ -629,7 +631,7 @@ mod test { let slot = Slot::new(7, 1); let parents_1: Vec = (0..THREAD_COUNT) - .map(|i| BlockId(Hash::compute_from(&[i]))) + .map(|i| BlockId::generate_from_hash(Hash::compute_from(&[i]))) .collect(); let endorsement_1 = Endorsement { diff --git a/massa-models/src/block_id.rs b/massa-models/src/block_id.rs index 3ca096760da..e532caad302 100644 --- a/massa-models/src/block_id.rs +++ b/massa-models/src/block_id.rs @@ -7,18 +7,17 @@ use massa_serialization::{ U64VarIntSerializer, }; use nom::{ - error::{context, ContextError, ParseError}, + error::{context, ContextError, ErrorKind, ParseError}, IResult, }; use serde_with::{DeserializeFromStr, SerializeDisplay}; use std::collections::Bound::Included; -use std::convert::TryInto; use std::str::FromStr; - -/// Size in bytes of a serialized block ID -const BLOCK_ID_SIZE_BYTES: usize = massa_hash::HASH_SIZE_BYTES; +use transition::Versioned; /// block id +#[allow(missing_docs)] +#[transition::versioned(versions("0"))] #[derive( Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash, SerializeDisplay, DeserializeFromStr, )] @@ -28,24 +27,73 @@ impl PreHashed for BlockId {} impl Id for BlockId { fn new(hash: Hash) -> Self { - BlockId(hash) + BlockId::BlockIdV0(BlockIdV0(hash)) + } + + fn get_hash(&self) -> &Hash { + match self { + BlockId::BlockIdV0(block_id) => block_id.get_hash(), + } + } +} + +impl BlockId { + /// first bit of the hashed block id + pub fn get_first_bit(&self) -> bool { + match self { + BlockId::BlockIdV0(block_id) => block_id.get_first_bit(), + } + } + + /// version of the block id + pub fn get_version(&self) -> u64 { + match self { + BlockId::BlockIdV0(block_id) => block_id.get_version(), + } } + /// Generate a version 0 block id from an hash used only for tests + #[cfg(any(test, feature = "testing"))] + pub fn generate_from_hash(hash: Hash) -> BlockId { + BlockId::BlockIdV0(BlockIdV0(hash)) + } +} + +#[transition::impl_version(versions("0"))] +impl BlockId { fn get_hash(&self) -> &Hash { &self.0 } + + /// first bit of the hashed block id + pub fn get_first_bit(&self) -> bool { + self.0.to_bytes()[0] >> 7 == 1 + } + + /// version of the block id + pub fn get_version(&self) -> u64 { + Self::VERSION + } } const BLOCKID_PREFIX: char = 'B'; -const BLOCKID_VERSION: u64 = 0; +impl std::fmt::Display for BlockId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + BlockId::BlockIdV0(block_id) => write!(f, "{}", block_id), + } + } +} + +#[transition::impl_version(versions("0"))] impl std::fmt::Display for BlockId { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let u64_serializer = U64VarIntSerializer::new(); // might want to allocate the vector with capacity in order to avoid re-allocation let mut bytes: Vec = Vec::new(); u64_serializer - .serialize(&BLOCKID_VERSION, &mut bytes) + .serialize(&Self::VERSION, &mut bytes) .map_err(|_| std::fmt::Error)?; bytes.extend(self.0.to_bytes()); write!( @@ -70,8 +118,9 @@ impl FromStr for BlockId { /// # use massa_hash::Hash; /// # use std::str::FromStr; /// # use massa_models::block_id::BlockId; + /// # use crate::massa_models::secure_share::Id; /// # let hash = Hash::compute_from(b"test"); - /// # let block_id = BlockId(hash); + /// # let block_id = BlockId::new(hash); /// let ser = block_id.to_string(); /// let res_block_id = BlockId::from_str(&ser).unwrap(); /// assert_eq!(block_id, res_block_id); @@ -85,64 +134,92 @@ impl FromStr for BlockId { .with_check(None) .into_vec() .map_err(|_| ModelsError::BlockIdParseError)?; - let u64_deserializer = U64VarIntDeserializer::new(Included(0), Included(u64::MAX)); - let (rest, _version) = u64_deserializer + let block_id_deserializer = BlockIdDeserializer::new(); + let (rest, block_id) = block_id_deserializer .deserialize::(&decoded_bs58_check[..]) - .map_err(|_| ModelsError::BlockIdParseError)?; - Ok(BlockId(Hash::from_bytes( - rest.try_into() - .map_err(|_| ModelsError::BlockIdParseError)?, - ))) + .map_err(|_| ModelsError::OperationIdParseError)?; + if rest.is_empty() { + Ok(block_id) + } else { + Err(ModelsError::OperationIdParseError) + } } _ => Err(ModelsError::BlockIdParseError), } } } -impl BlockId { - /// block id to bytes - pub fn to_bytes(&self) -> &[u8; BLOCK_ID_SIZE_BYTES] { - self.0.to_bytes() - } - - /// block id into bytes - pub fn into_bytes(self) -> [u8; BLOCK_ID_SIZE_BYTES] { - self.0.into_bytes() - } - - /// block id from bytes - pub fn from_bytes(data: &[u8; BLOCK_ID_SIZE_BYTES]) -> BlockId { - BlockId(Hash::from_bytes(data)) - } - - /// first bit of the hashed block id - pub fn get_first_bit(&self) -> bool { - self.to_bytes()[0] >> 7 == 1 +#[transition::impl_version(versions("0"))] +impl FromStr for BlockId { + type Err = ModelsError; + fn from_str(s: &str) -> Result { + let mut chars = s.chars(); + match chars.next() { + Some(prefix) if prefix == BLOCKID_PREFIX => { + let data = chars.collect::(); + let decoded_bs58_check = bs58::decode(data) + .with_check(None) + .into_vec() + .map_err(|_| ModelsError::BlockIdParseError)?; + let block_id_deserializer = BlockIdDeserializer::new(); + let (rest, block_id) = block_id_deserializer + .deserialize::(&decoded_bs58_check[..]) + .map_err(|_| ModelsError::OperationIdParseError)?; + if rest.is_empty() { + Ok(block_id) + } else { + Err(ModelsError::OperationIdParseError) + } + } + _ => Err(ModelsError::BlockIdParseError), + } } } /// Serializer for `BlockId` #[derive(Default, Clone)] -pub struct BlockIdSerializer; +pub struct BlockIdSerializer { + version_serializer: U64VarIntSerializer, +} impl BlockIdSerializer { /// Creates a new serializer for `BlockId` pub fn new() -> Self { - Self + Self { + version_serializer: U64VarIntSerializer::new(), + } } } impl Serializer for BlockIdSerializer { fn serialize(&self, value: &BlockId, buffer: &mut Vec) -> Result<(), SerializeError> { - buffer.extend(value.to_bytes()); + self.version_serializer + .serialize(&value.get_version(), buffer)?; + match value { + BlockId::BlockIdV0(block_id) => self.serialize(block_id, buffer), + } + } +} + +#[transition::impl_version(versions("0"), structures("BlockId"))] +impl Serializer for BlockIdSerializer { + fn serialize(&self, value: &BlockId, buffer: &mut Vec) -> Result<(), SerializeError> { + buffer.extend(value.0.to_bytes()); Ok(()) } } /// Deserializer for `BlockId` -#[derive(Default, Clone)] +#[derive(Clone)] pub struct BlockIdDeserializer { hash_deserializer: HashDeserializer, + version_deserializer: U64VarIntDeserializer, +} + +impl Default for BlockIdDeserializer { + fn default() -> Self { + Self::new() + } } impl BlockIdDeserializer { @@ -150,10 +227,37 @@ impl BlockIdDeserializer { pub fn new() -> Self { Self { hash_deserializer: HashDeserializer::new(), + version_deserializer: U64VarIntDeserializer::new(Included(0), Included(u64::MAX)), + } + } +} + +impl Deserializer for BlockIdDeserializer { + fn deserialize<'a, E: ParseError<&'a [u8]> + ContextError<&'a [u8]>>( + &self, + buffer: &'a [u8], + ) -> IResult<&'a [u8], BlockId, E> { + // Verify that we at least have a version and something else + if buffer.len() < 2 { + return Err(nom::Err::Error(E::from_error_kind(buffer, ErrorKind::Eof))); + } + let (rest, op_id_version) = + self.version_deserializer + .deserialize(buffer) + .map_err(|_: nom::Err| { + nom::Err::Error(E::from_error_kind(buffer, ErrorKind::Eof)) + })?; + match op_id_version { + ::VERSION => { + let (rest, op_id) = self.deserialize(rest)?; + Ok((rest, BlockIdVariant!["0"](op_id))) + } + _ => Err(nom::Err::Error(E::from_error_kind(buffer, ErrorKind::Eof))), } } } +#[transition::impl_version(versions("0"), structures("BlockId"))] impl Deserializer for BlockIdDeserializer { fn deserialize<'a, E: ParseError<&'a [u8]> + ContextError<&'a [u8]>>( &self, diff --git a/massa-models/src/clique.rs b/massa-models/src/clique.rs index 946084c766b..3fa76c31849 100644 --- a/massa-models/src/clique.rs +++ b/massa-models/src/clique.rs @@ -2,7 +2,6 @@ use core::usize; -use massa_hash::HashDeserializer; use massa_serialization::{ Deserializer, SerializeError, Serializer, U32VarIntDeserializer, U32VarIntSerializer, U64VarIntDeserializer, U64VarIntSerializer, @@ -17,7 +16,7 @@ use nom::sequence::tuple; use nom::{IResult, Parser}; use serde::{Deserialize, Serialize}; -use crate::block_id::BlockId; +use crate::block_id::{BlockId, BlockIdDeserializer, BlockIdSerializer}; use crate::prehash::PreHashSet; use std::ops::Bound::{Excluded, Included}; @@ -48,6 +47,7 @@ impl Default for Clique { pub struct CliqueSerializer { block_ids_length_serializer: U32VarIntSerializer, fitness_serializer: U64VarIntSerializer, + block_id_serializer: BlockIdSerializer, } impl CliqueSerializer { @@ -56,6 +56,7 @@ impl CliqueSerializer { Self { block_ids_length_serializer: U32VarIntSerializer::new(), fitness_serializer: U64VarIntSerializer::new(), + block_id_serializer: BlockIdSerializer::new(), } } } @@ -69,7 +70,7 @@ impl Serializer for CliqueSerializer { /// # use std::str::FromStr; /// # use massa_serialization::Serializer; /// # pub fn get_dummy_block_id(s: &str) -> BlockId { - /// # BlockId(Hash::compute_from(s.as_bytes())) + /// # BlockId::generate_from_hash(Hash::compute_from(s.as_bytes())) /// # } /// let clique = Clique { /// block_ids: vec![get_dummy_block_id("parent1"), get_dummy_block_id("parent2")].into_iter().collect(), @@ -84,7 +85,7 @@ impl Serializer for CliqueSerializer { self.block_ids_length_serializer .serialize(&(value.block_ids.len() as u32), buffer)?; for block_id in &value.block_ids { - buffer.extend(block_id.0.to_bytes()) + self.block_id_serializer.serialize(block_id, buffer)?; } self.fitness_serializer.serialize(&value.fitness, buffer)?; buffer.push(u8::from(value.is_blockclique)); @@ -95,7 +96,7 @@ impl Serializer for CliqueSerializer { /// Basic deserializer for `Clique` pub struct CliqueDeserializer { block_ids_length_deserializer: U32VarIntDeserializer, - block_id_deserializer: HashDeserializer, + block_id_deserializer: BlockIdDeserializer, fitness_deserializer: U64VarIntDeserializer, } @@ -107,7 +108,7 @@ impl CliqueDeserializer { Included(0), Excluded(max_bootstrap_blocks), ), - block_id_deserializer: HashDeserializer::new(), + block_id_deserializer: BlockIdDeserializer::new(), fitness_deserializer: U64VarIntDeserializer::new(Included(0), Included(u64::MAX)), } } @@ -122,7 +123,7 @@ impl Deserializer for CliqueDeserializer { /// # use std::str::FromStr; /// # use massa_serialization::{Serializer, Deserializer, DeserializeError}; /// # pub fn get_dummy_block_id(s: &str) -> BlockId { - /// # BlockId(Hash::compute_from(s.as_bytes())) + /// # BlockId::generate_from_hash(Hash::compute_from(s.as_bytes())) /// # } /// let clique = Clique { /// block_ids: vec![get_dummy_block_id("parent1"), get_dummy_block_id("parent2")].into_iter().collect(), @@ -150,9 +151,7 @@ impl Deserializer for CliqueDeserializer { self.block_ids_length_deserializer.deserialize(input) }), context("Failed block_id deserialization", |input| { - self.block_id_deserializer - .deserialize(input) - .map(|(rest, hash)| (rest, BlockId(hash))) + self.block_id_deserializer.deserialize(input) }), ), context("Failed fitness deserialization", |input| { diff --git a/massa-models/src/denunciation.rs b/massa-models/src/denunciation.rs index a579dd85cb5..23bcc55a938 100644 --- a/massa-models/src/denunciation.rs +++ b/massa-models/src/denunciation.rs @@ -1221,7 +1221,7 @@ mod tests { let endorsement_4 = Endorsement { slot, index: 9, - endorsed_block: BlockId(Hash::compute_from("foo".as_bytes())), + endorsed_block: BlockId::generate_from_hash(Hash::compute_from("foo".as_bytes())), }; let s_endorsement_4 = Endorsement::new_verifiable(endorsement_4, EndorsementSerializer::new(), &keypair) @@ -1257,7 +1257,7 @@ mod tests { let endorsement_4 = Endorsement { slot, index: 9, - endorsed_block: BlockId(Hash::compute_from("foo".as_bytes())), + endorsed_block: BlockId::generate_from_hash(Hash::compute_from("foo".as_bytes())), }; let s_endorsement_4 = Endorsement::new_verifiable(endorsement_4, EndorsementSerializer::new(), &keypair) @@ -1304,7 +1304,7 @@ mod tests { let endorsement_1 = Endorsement { slot: slot_1, index: 0, - endorsed_block: BlockId(Hash::compute_from("blk1".as_bytes())), + endorsed_block: BlockId::generate_from_hash(Hash::compute_from("blk1".as_bytes())), }; let s_endorsement_1: SecureShareEndorsement = @@ -1314,7 +1314,7 @@ mod tests { let endorsement_2 = Endorsement { slot: slot_2, index: 0, - endorsed_block: BlockId(Hash::compute_from("blk2".as_bytes())), + endorsed_block: BlockId::generate_from_hash(Hash::compute_from("blk2".as_bytes())), }; let s_endorsement_2: SecureShareEndorsement = diff --git a/massa-models/src/endorsement.rs b/massa-models/src/endorsement.rs index 83772d1d361..307bbe5a023 100644 --- a/massa-models/src/endorsement.rs +++ b/massa-models/src/endorsement.rs @@ -1,5 +1,6 @@ // Copyright (c) 2022 MASSA LABS +use crate::block_id::{BlockIdDeserializer, BlockIdSerializer}; use crate::prehash::PreHashed; use crate::secure_share::{Id, SecureShare, SecureShareContent}; use crate::slot::{Slot, SlotDeserializer, SlotSerializer}; @@ -10,7 +11,7 @@ use massa_serialization::{ U32VarIntSerializer, U64VarIntDeserializer, U64VarIntSerializer, }; use massa_signature::PublicKey; -use nom::error::context; +use nom::error::{context, ErrorKind}; use nom::sequence::tuple; use nom::Parser; use nom::{ @@ -19,13 +20,17 @@ use nom::{ }; use serde::{Deserialize, Serialize}; use serde_with::{DeserializeFromStr, SerializeDisplay}; +use std::fmt::Formatter; use std::ops::Bound::{Excluded, Included}; use std::{fmt::Display, str::FromStr}; +use transition::Versioned; /// Endorsement ID size in bytes pub const ENDORSEMENT_ID_SIZE_BYTES: usize = massa_hash::HASH_SIZE_BYTES; /// endorsement id +#[allow(missing_docs)] +#[transition::versioned(versions("0"))] #[derive( Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash, SerializeDisplay, DeserializeFromStr, )] @@ -38,14 +43,32 @@ impl PreHashed for EndorsementId {} impl Id for EndorsementId { fn new(hash: Hash) -> Self { - EndorsementId(hash) + EndorsementId::EndorsementIdV0(EndorsementIdV0(hash)) } + fn get_hash(&self) -> &Hash { + match self { + EndorsementId::EndorsementIdV0(endorsement_id) => endorsement_id.get_hash(), + } + } +} + +#[transition::impl_version(versions("0"))] +impl EndorsementId { fn get_hash(&self) -> &Hash { &self.0 } } +impl std::fmt::Display for EndorsementId { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + EndorsementId::EndorsementIdV0(id) => write!(f, "{}", id), + } + } +} + +#[transition::impl_version(versions("0"))] impl std::fmt::Display for EndorsementId { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let u64_serializer = U64VarIntSerializer::new(); @@ -77,7 +100,8 @@ impl FromStr for EndorsementId { /// # use massa_hash::Hash; /// # use std::str::FromStr; /// # use massa_models::endorsement::EndorsementId; - /// # let endo_id = EndorsementId::from_bytes(&[0; 32]); + /// # use crate::massa_models::secure_share::Id; + /// # let endo_id = EndorsementId::new(Hash::compute_from("endo_id".as_bytes())); /// let ser = endo_id.to_string(); /// let res_endo_id = EndorsementId::from_str(&ser).unwrap(); /// assert_eq!(endo_id, res_endo_id); @@ -91,37 +115,100 @@ impl FromStr for EndorsementId { .with_check(None) .into_vec() .map_err(|_| ModelsError::EndorsementIdParseError)?; - let u64_deserializer = U64VarIntDeserializer::new(Included(0), Included(u64::MAX)); - let (rest, _version) = u64_deserializer + let endorsement_id_deserializer = EndorsementIdDeserializer::new(); + let (rest, endorsement_id) = endorsement_id_deserializer .deserialize::(&decoded_bs58_check[..]) .map_err(|_| ModelsError::EndorsementIdParseError)?; - Ok(EndorsementId(Hash::from_bytes( - rest.try_into() - .map_err(|_| ModelsError::EndorsementIdParseError)?, - ))) + if rest.is_empty() { + Ok(endorsement_id) + } else { + Err(ModelsError::EndorsementIdParseError) + } } _ => Err(ModelsError::EndorsementIdParseError), } } } -impl EndorsementId { - /// endorsement id to bytes - pub fn to_bytes(&self) -> &[u8; ENDORSEMENT_ID_SIZE_BYTES] { - self.0.to_bytes() +#[transition::impl_version(versions("0"))] +impl FromStr for EndorsementId { + type Err = ModelsError; + fn from_str(s: &str) -> Result { + let mut chars = s.chars(); + match chars.next() { + Some(prefix) if prefix == ENDORSEMENTID_PREFIX => { + let data = chars.collect::(); + let decoded_bs58_check = bs58::decode(data) + .with_check(None) + .into_vec() + .map_err(|_| ModelsError::EndorsementIdParseError)?; + let endorsement_id_deserializer = EndorsementIdDeserializer::new(); + let (rest, endorsement_id) = endorsement_id_deserializer + .deserialize::(&decoded_bs58_check[..]) + .map_err(|_| ModelsError::EndorsementIdParseError)?; + if rest.is_empty() { + Ok(endorsement_id) + } else { + Err(ModelsError::EndorsementIdParseError) + } + } + _ => Err(ModelsError::EndorsementIdParseError), + } } +} - /// endorsement id into bytes - pub fn into_bytes(self) -> [u8; ENDORSEMENT_ID_SIZE_BYTES] { - self.0.into_bytes() +struct EndorsementIdDeserializer { + version_deserializer: U64VarIntDeserializer, + hash_deserializer: HashDeserializer, +} + +impl EndorsementIdDeserializer { + pub fn new() -> Self { + Self { + version_deserializer: U64VarIntDeserializer::new(Included(0), Included(u64::MAX)), + hash_deserializer: HashDeserializer::new(), + } } +} - /// endorsement id from bytes - pub fn from_bytes(data: &[u8; ENDORSEMENT_ID_SIZE_BYTES]) -> EndorsementId { - EndorsementId(Hash::from_bytes(data)) +impl Deserializer for EndorsementIdDeserializer { + fn deserialize<'a, E: ParseError<&'a [u8]> + ContextError<&'a [u8]>>( + &self, + buffer: &'a [u8], + ) -> IResult<&'a [u8], EndorsementId, E> { + // Verify that we at least have a version and something else + if buffer.len() < 2 { + return Err(nom::Err::Error(E::from_error_kind(buffer, ErrorKind::Eof))); + } + let (rest, endorsement_id_version) = self + .version_deserializer + .deserialize(buffer) + .map_err(|_: nom::Err| { + nom::Err::Error(E::from_error_kind(buffer, ErrorKind::Eof)) + })?; + match endorsement_id_version { + ::VERSION => { + let (rest, endorsement_id) = self.deserialize(rest)?; + Ok((rest, EndorsementIdVariant!["0"](endorsement_id))) + } + _ => Err(nom::Err::Error(E::from_error_kind(buffer, ErrorKind::Eof))), + } } } +#[transition::impl_version(versions("0"), structures("EndorsementId"))] +impl Deserializer for EndorsementIdDeserializer { + fn deserialize<'a, E: ParseError<&'a [u8]> + ContextError<&'a [u8]>>( + &self, + buffer: &'a [u8], + ) -> IResult<&'a [u8], EndorsementId, E> { + context("Failed OperationId deserialization", |input| { + self.hash_deserializer.deserialize(input) + }) + .map(EndorsementId) + .parse(buffer) + } +} impl Display for Endorsement { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { writeln!( @@ -183,14 +270,16 @@ impl SecureShareContent for Endorsement { pub struct EndorsementSerializer { slot_serializer: SlotSerializer, u32_serializer: U32VarIntSerializer, + block_id_serializer: BlockIdSerializer, } impl EndorsementSerializer { /// Creates a new `EndorsementSerializer` - pub const fn new() -> Self { + pub fn new() -> Self { EndorsementSerializer { slot_serializer: SlotSerializer::new(), u32_serializer: U32VarIntSerializer::new(), + block_id_serializer: BlockIdSerializer::new(), } } } @@ -211,7 +300,7 @@ impl Serializer for EndorsementSerializer { /// let endorsement = Endorsement { /// slot: Slot::new(1, 2), /// index: 0, - /// endorsed_block: BlockId(Hash::compute_from("test".as_bytes())) + /// endorsed_block: BlockId::generate_from_hash(Hash::compute_from("test".as_bytes())) /// }; /// let mut buffer = Vec::new(); /// EndorsementSerializer::new().serialize(&endorsement, &mut buffer).unwrap(); @@ -219,7 +308,8 @@ impl Serializer for EndorsementSerializer { fn serialize(&self, value: &Endorsement, buffer: &mut Vec) -> Result<(), SerializeError> { self.slot_serializer.serialize(&value.slot, buffer)?; self.u32_serializer.serialize(&value.index, buffer)?; - buffer.extend(value.endorsed_block.0.to_bytes()); + self.block_id_serializer + .serialize(&value.endorsed_block, buffer)?; Ok(()) } } @@ -228,12 +318,12 @@ impl Serializer for EndorsementSerializer { pub struct EndorsementDeserializer { slot_deserializer: SlotDeserializer, index_deserializer: U32VarIntDeserializer, - hash_deserializer: HashDeserializer, + block_id_deserializer: BlockIdDeserializer, } impl EndorsementDeserializer { /// Creates a new `EndorsementDeserializer` - pub const fn new(thread_count: u8, endorsement_count: u32) -> Self { + pub fn new(thread_count: u8, endorsement_count: u32) -> Self { EndorsementDeserializer { slot_deserializer: SlotDeserializer::new( (Included(0), Included(u64::MAX)), @@ -243,7 +333,7 @@ impl EndorsementDeserializer { Included(0), Excluded(endorsement_count), ), - hash_deserializer: HashDeserializer::new(), + block_id_deserializer: BlockIdDeserializer::new(), } } } @@ -258,7 +348,7 @@ impl Deserializer for EndorsementDeserializer { /// let endorsement = Endorsement { /// slot: Slot::new(1, 2), /// index: 0, - /// endorsed_block: BlockId(Hash::compute_from("test".as_bytes())) + /// endorsed_block: BlockId::generate_from_hash(Hash::compute_from("test".as_bytes())) /// }; /// let mut buffer = Vec::new(); /// EndorsementSerializer::new().serialize(&endorsement, &mut buffer).unwrap(); @@ -282,14 +372,14 @@ impl Deserializer for EndorsementDeserializer { self.index_deserializer.deserialize(input) }), context("Failed endorsed_block deserialization", |input| { - self.hash_deserializer.deserialize(input) + self.block_id_deserializer.deserialize(input) }), )), ) - .map(|(slot, index, hash_block_id)| Endorsement { + .map(|(slot, index, endorsed_block)| Endorsement { slot, index, - endorsed_block: BlockId::new(hash_block_id), + endorsed_block, }) .parse(buffer) } @@ -327,7 +417,7 @@ impl Serializer for EndorsementSerializerLW { /// let endorsement = Endorsement { /// slot: Slot::new(1, 2), /// index: 0, - /// endorsed_block: BlockId(Hash::compute_from("test".as_bytes())) + /// endorsed_block: BlockId::generate_from_hash(Hash::compute_from("test".as_bytes())) /// }; /// let mut buffer = Vec::new(); /// EndorsementSerializerLW::new().serialize(&endorsement, &mut buffer).unwrap(); @@ -367,7 +457,7 @@ impl Deserializer for EndorsementDeserializerLW { /// use massa_hash::Hash; /// /// let slot = Slot::new(1, 2); - /// let endorsed_block = BlockId(Hash::compute_from("test".as_bytes())); + /// let endorsed_block = BlockId::generate_from_hash(Hash::compute_from("test".as_bytes())); /// let endorsement = Endorsement { /// slot: slot, /// index: 0, @@ -434,7 +524,7 @@ mod tests { let content = Endorsement { slot: Slot::new(10, 1), index: 0, - endorsed_block: BlockId(Hash::compute_from("blk".as_bytes())), + endorsed_block: BlockId::generate_from_hash(Hash::compute_from("blk".as_bytes())), }; let endorsement: SecureShareEndorsement = Endorsement::new_verifiable(content, EndorsementSerializer::new(), &sender_keypair) @@ -459,7 +549,7 @@ mod tests { let content = Endorsement { slot: Slot::new(10, 1), index: 0, - endorsed_block: BlockId(Hash::compute_from("blk".as_bytes())), + endorsed_block: BlockId::generate_from_hash(Hash::compute_from("blk".as_bytes())), }; let endorsement: SecureShareEndorsement = Endorsement::new_verifiable(content, EndorsementSerializerLW::new(), &sender_keypair) @@ -471,7 +561,7 @@ mod tests { .serialize(&endorsement, &mut ser_endorsement) .unwrap(); - let parent = BlockId(Hash::compute_from("blk".as_bytes())); + let parent = BlockId::generate_from_hash(Hash::compute_from("blk".as_bytes())); let (_, res_endorsement): (&[u8], SecureShareEndorsement) = SecureShareDeserializer::new( EndorsementDeserializerLW::new(1, Slot::new(10, 1), parent), @@ -490,7 +580,7 @@ mod tests { let content_1 = Endorsement { slot: Slot::new(10, 1), index: 0, - endorsed_block: BlockId(Hash::compute_from("blk1".as_bytes())), + endorsed_block: BlockId::generate_from_hash(Hash::compute_from("blk1".as_bytes())), }; let s_endorsement_1: SecureShareEndorsement = Endorsement::new_verifiable(content_1, EndorsementSerializer::new(), &sender_keypair) @@ -507,7 +597,7 @@ mod tests { let content_2 = Endorsement { slot: Slot::new(2, 5), index: 0, - endorsed_block: BlockId(Hash::compute_from("blk2".as_bytes())), + endorsed_block: BlockId::generate_from_hash(Hash::compute_from("blk2".as_bytes())), }; let s_endorsement_2: SecureShareEndorsement = Endorsement::new_verifiable(content_2, EndorsementSerializerLW::new(), &sender_keypair) diff --git a/massa-models/src/operation.rs b/massa-models/src/operation.rs index 94c337e8aa7..4d922d9e4b1 100644 --- a/massa-models/src/operation.rs +++ b/massa-models/src/operation.rs @@ -18,7 +18,7 @@ use massa_serialization::{ U16VarIntSerializer, U32VarIntDeserializer, U32VarIntSerializer, U64VarIntDeserializer, U64VarIntSerializer, }; -use nom::error::context; +use nom::error::{context, ErrorKind}; use nom::multi::length_count; use nom::sequence::tuple; use nom::AsBytes; @@ -33,33 +33,83 @@ use serde_with::{serde_as, DeserializeFromStr, SerializeDisplay}; use std::convert::TryInto; use std::fmt::Formatter; use std::{ops::Bound::Included, ops::RangeInclusive, str::FromStr}; +use transition::Versioned; /// Size in bytes of the serialized operation ID -pub const OPERATION_ID_SIZE_BYTES: usize = massa_hash::HASH_SIZE_BYTES; /// Size in bytes of the serialized operation ID prefix pub const OPERATION_ID_PREFIX_SIZE_BYTES: usize = 17; /// operation id +#[allow(missing_docs)] +#[transition::versioned(versions("0"))] #[derive( Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash, SerializeDisplay, DeserializeFromStr, )] pub struct OperationId(Hash); const OPERATIONID_PREFIX: char = 'O'; -const OPERATIONID_VERSION: u64 = 0; /// Left part of the operation id hash stored in a vector of size [`OPERATION_ID_PREFIX_SIZE_BYTES`] +#[allow(missing_docs)] +#[transition::versioned(versions("0"))] #[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] +#[allow(unused_macros)] pub struct OperationPrefixId([u8; OPERATION_ID_PREFIX_SIZE_BYTES]); +impl std::fmt::Display for OperationPrefixId { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + OperationPrefixId::OperationPrefixIdV0(prefix) => write!(f, "{}", prefix), + } + } +} + +#[transition::impl_version(versions("0"))] +impl std::fmt::Display for OperationPrefixId { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + let u64_serializer = U64VarIntSerializer::new(); + // might want to allocate the vector with capacity in order to avoid re-allocation + let mut bytes: Vec = Vec::new(); + u64_serializer + .serialize(&Self::VERSION, &mut bytes) + .map_err(|_| std::fmt::Error)?; + bytes.extend(self.0.as_bytes()); + write!(f, "{}", bs58::encode(bytes).into_string()) + } +} + +impl std::fmt::Debug for OperationPrefixId { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + OperationPrefixId::OperationPrefixIdV0(prefix) => write!(f, "{:?}", prefix), + } + } +} + +#[transition::impl_version(versions("0"))] +impl std::fmt::Debug for OperationPrefixId { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + write!(f, "{}", self) + } +} + +impl std::fmt::Display for OperationId { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + OperationId::OperationIdV0(id) => write!(f, "{}", id), + } + } +} + +#[transition::impl_version(versions("0"))] impl std::fmt::Display for OperationId { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let u64_serializer = U64VarIntSerializer::new(); // might want to allocate the vector with capacity in order to avoid re-allocation let mut bytes: Vec = Vec::new(); u64_serializer - .serialize(&OPERATIONID_VERSION, &mut bytes) + .serialize(&Self::VERSION, &mut bytes) .map_err(|_| std::fmt::Error)?; bytes.extend(self.0.to_bytes()); write!( @@ -72,20 +122,17 @@ impl std::fmt::Display for OperationId { } impl std::fmt::Debug for OperationId { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { - write!(f, "{}", self) - } -} - -impl std::fmt::Display for OperationPrefixId { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { - write!(f, "{}", bs58::encode(self.0.as_bytes()).into_string()) + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + OperationId::OperationIdV0(id) => write!(f, "{:?}", id), + } } } -impl std::fmt::Debug for OperationPrefixId { +#[transition::impl_version(versions("0"))] +impl std::fmt::Debug for OperationId { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { - write!(f, "{}", bs58::encode(self.0.as_bytes()).into_string()) + write!(f, "{}", self) } } @@ -95,8 +142,10 @@ impl FromStr for OperationId { /// ```rust /// # use massa_hash::Hash; /// # use std::str::FromStr; - /// # use massa_models::operation::OperationId; - /// # let op_id = OperationId::from_bytes(&[0; 32]); + /// # use massa_serialization::{Deserializer, DeserializeError}; + /// # use massa_models::operation::{OperationId, OperationIdDeserializer}; + /// # let op_id_deserializer = OperationIdDeserializer::new(); + /// # let (_, op_id): (&[u8], OperationId) = op_id_deserializer.deserialize::(&[0; 33]).unwrap(); /// let ser = op_id.to_string(); /// let res_op_id = OperationId::from_str(&ser).unwrap(); /// assert_eq!(op_id, res_op_id); @@ -110,30 +159,59 @@ impl FromStr for OperationId { .with_check(None) .into_vec() .map_err(|_| ModelsError::OperationIdParseError)?; - let u64_deserializer = U64VarIntDeserializer::new(Included(0), Included(u64::MAX)); - let (rest, _version) = u64_deserializer + let operation_id_deserializer = OperationIdDeserializer::new(); + let (rest, op_id) = operation_id_deserializer .deserialize::(&decoded_bs58_check[..]) .map_err(|_| ModelsError::OperationIdParseError)?; - Ok(OperationId(Hash::from_bytes( - rest.try_into() - .map_err(|_| ModelsError::OperationIdParseError)?, - ))) + if rest.is_empty() { + Ok(op_id) + } else { + Err(ModelsError::OperationIdParseError) + } + } + _ => Err(ModelsError::OperationIdParseError), + } + } +} + +#[transition::impl_version(versions("0"))] +impl FromStr for OperationId { + type Err = ModelsError; + fn from_str(s: &str) -> Result { + let mut chars = s.chars(); + match chars.next() { + Some(prefix) if prefix == OPERATIONID_PREFIX => { + let data = chars.collect::(); + let decoded_bs58_check = bs58::decode(data) + .with_check(None) + .into_vec() + .map_err(|_| ModelsError::OperationIdParseError)?; + let operation_id_deserializer = OperationIdDeserializer::new(); + let (rest, op_id) = operation_id_deserializer + .deserialize::(&decoded_bs58_check[..]) + .map_err(|_| ModelsError::OperationIdParseError)?; + if rest.is_empty() { + Ok(op_id) + } else { + Err(ModelsError::OperationIdParseError) + } } _ => Err(ModelsError::OperationIdParseError), } } } -// note: would be probably unused after the merge of -// prefix impl PreHashed for OperationId {} + impl Id for OperationId { fn new(hash: Hash) -> Self { - OperationId(hash) + OperationId::OperationIdV0(OperationIdV0(hash)) } fn get_hash(&self) -> &Hash { - &self.0 + match self { + OperationId::OperationIdV0(op_id) => op_id.get_hash(), + } } } @@ -142,73 +220,108 @@ impl PreHashed for OperationPrefixId {} impl From<&[u8; OPERATION_ID_PREFIX_SIZE_BYTES]> for OperationPrefixId { /// get prefix of the operation id of size `OPERATION_ID_PREFIX_SIZE_BIT` fn from(bytes: &[u8; OPERATION_ID_PREFIX_SIZE_BYTES]) -> Self { - Self(*bytes) + OperationPrefixIdVariant!["0"](OperationPrefixId!["0"](*bytes)) } } impl From<&OperationPrefixId> for Vec { fn from(prefix: &OperationPrefixId) -> Self { - prefix.0.to_vec() + match prefix { + OperationPrefixId::OperationPrefixIdV0(prefix) => prefix.0.to_vec(), + } } } impl OperationId { - /// op id to bytes - pub fn to_bytes(&self) -> &[u8; OPERATION_ID_SIZE_BYTES] { - self.0.to_bytes() + /// convert the [`OperationId`] into a [`OperationPrefixId`] + pub fn into_prefix(self) -> OperationPrefixId { + match self { + OperationId::OperationIdV0(op_id) => op_id.into_prefix(), + } } - /// op id into bytes - pub fn into_bytes(self) -> [u8; OPERATION_ID_SIZE_BYTES] { - self.0.into_bytes() + /// get a prefix from the [`OperationId`] by copying it + pub fn prefix(&self) -> OperationPrefixId { + match self { + OperationId::OperationIdV0(op_id) => op_id.prefix(), + } } - /// op id from bytes - pub fn from_bytes(data: &[u8; OPERATION_ID_SIZE_BYTES]) -> OperationId { - OperationId(Hash::from_bytes(data)) + /// Get the version of the operation by looking at the first bytes of the id + pub fn get_version(&self) -> u64 { + match self { + OperationId::OperationIdV0(op_id) => op_id.get_version(), + } } +} +#[transition::impl_version(versions("0"))] +impl OperationId { /// convert the [`OperationId`] into a [`OperationPrefixId`] pub fn into_prefix(self) -> OperationPrefixId { - OperationPrefixId( + OperationPrefixId::OperationPrefixIdV0(OperationPrefixIdV0( self.0.into_bytes()[..OPERATION_ID_PREFIX_SIZE_BYTES] .try_into() .expect("failed to truncate prefix from OperationId"), - ) + )) } /// get a prefix from the [`OperationId`] by copying it pub fn prefix(&self) -> OperationPrefixId { - OperationPrefixId( + OperationPrefixId::OperationPrefixIdV0(OperationPrefixIdV0( self.0.to_bytes()[..OPERATION_ID_PREFIX_SIZE_BYTES] .try_into() .expect("failed to truncate prefix from OperationId"), - ) + )) + } + + fn get_hash(&self) -> &Hash { + &self.0 + } + + fn get_version(&self) -> u64 { + Self::VERSION } } /// Serializer for `OperationId` #[derive(Default, Clone)] -pub struct OperationIdSerializer; +pub struct OperationIdSerializer { + version_serializer: U64VarIntSerializer, +} impl OperationIdSerializer { /// Creates a new serializer for `OperationId` pub fn new() -> Self { - Self + Self { + version_serializer: U64VarIntSerializer::new(), + } + } +} + +impl Serializer for OperationIdSerializer { + fn serialize(&self, value: &OperationId, buffer: &mut Vec) -> Result<(), SerializeError> { + self.version_serializer + .serialize(&value.get_version(), buffer)?; + match value { + OperationId::OperationIdV0(id) => self.serialize(id, buffer), + } } } +#[transition::impl_version(versions("0"), structures("OperationId"))] impl Serializer for OperationIdSerializer { fn serialize(&self, value: &OperationId, buffer: &mut Vec) -> Result<(), SerializeError> { - buffer.extend(value.to_bytes()); + buffer.extend(value.0.to_bytes()); Ok(()) } } /// Deserializer for `OperationId` -#[derive(Default, Clone)] +#[derive(Clone)] pub struct OperationIdDeserializer { hash_deserializer: HashDeserializer, + version_deserializer: U64VarIntDeserializer, } impl OperationIdDeserializer { @@ -216,19 +329,53 @@ impl OperationIdDeserializer { pub fn new() -> Self { Self { hash_deserializer: HashDeserializer::new(), + version_deserializer: U64VarIntDeserializer::new(Included(0), Included(u64::MAX)), + } + } +} + +impl Default for OperationIdDeserializer { + fn default() -> Self { + Self::new() + } +} + +impl Deserializer for OperationIdDeserializer { + fn deserialize<'a, E: ParseError<&'a [u8]> + ContextError<&'a [u8]>>( + &self, + buffer: &'a [u8], + ) -> IResult<&'a [u8], OperationId, E> { + // Verify that we at least have a version and something else + if buffer.len() < 2 { + return Err(nom::Err::Error(E::from_error_kind(buffer, ErrorKind::Eof))); + } + let (rest, op_id_version) = + self.version_deserializer + .deserialize(buffer) + .map_err(|_: nom::Err| { + nom::Err::Error(E::from_error_kind(buffer, ErrorKind::Eof)) + })?; + match op_id_version { + ::VERSION => { + let (rest, op_id) = self.deserialize(rest)?; + Ok((rest, OperationIdVariant!["0"](op_id))) + } + _ => Err(nom::Err::Error(E::from_error_kind(buffer, ErrorKind::Eof))), } } } +#[transition::impl_version(versions("0"), structures("OperationId"))] impl Deserializer for OperationIdDeserializer { fn deserialize<'a, E: ParseError<&'a [u8]> + ContextError<&'a [u8]>>( &self, buffer: &'a [u8], ) -> IResult<&'a [u8], OperationId, E> { context("Failed OperationId deserialization", |input| { - let (rest, hash) = self.hash_deserializer.deserialize(input)?; - Ok((rest, OperationId(hash))) - })(buffer) + self.hash_deserializer.deserialize(input) + }) + .map(OperationId) + .parse(buffer) } } @@ -895,6 +1042,7 @@ pub type OperationPrefixIds = PreHashSet; /// Serializer for `Vec` pub struct OperationIdsSerializer { u32_serializer: U32VarIntSerializer, + op_id_serializer: OperationIdSerializer, } impl OperationIdsSerializer { @@ -902,6 +1050,7 @@ impl OperationIdsSerializer { pub fn new() -> Self { Self { u32_serializer: U32VarIntSerializer::new(), + op_id_serializer: OperationIdSerializer::new(), } } } @@ -937,7 +1086,7 @@ impl Serializer> for OperationIdsSerializer { })?; self.u32_serializer.serialize(&list_len, buffer)?; for hash in value { - buffer.extend(hash.into_bytes()); + self.op_id_serializer.serialize(hash, buffer)?; } Ok(()) } @@ -946,7 +1095,7 @@ impl Serializer> for OperationIdsSerializer { /// Deserializer for `Vec` pub struct OperationIdsDeserializer { length_deserializer: U32VarIntDeserializer, - hash_deserializer: HashDeserializer, + op_id_deserializer: OperationIdDeserializer, } impl OperationIdsDeserializer { @@ -957,7 +1106,7 @@ impl OperationIdsDeserializer { Included(0), Included(max_operations_per_message), ), - hash_deserializer: HashDeserializer::new(), + op_id_deserializer: OperationIdDeserializer::new(), } } } @@ -989,11 +1138,10 @@ impl Deserializer> for OperationIdsDeserializer { self.length_deserializer.deserialize(input) }), context("Failed OperationId deserialization", |input| { - self.hash_deserializer.deserialize(input) + self.op_id_deserializer.deserialize(input) }), ), ) - .map(|hashes| hashes.into_iter().map(OperationId).collect()) .parse(buffer) } } diff --git a/massa-models/src/secure_share.rs b/massa-models/src/secure_share.rs index de9d7a3771b..e1e283bae11 100644 --- a/massa-models/src/secure_share.rs +++ b/massa-models/src/secure_share.rs @@ -43,6 +43,7 @@ where } /// Used by signed structure +/// TODO: Make this trait use versions pub trait Id { /// New id from hash fn new(hash: Hash) -> Self; @@ -370,7 +371,7 @@ where /// let content = Endorsement { /// slot: Slot::new(10, 1), /// index: 0, - /// endorsed_block: BlockId(Hash::compute_from("blk".as_bytes())), + /// endorsed_block: BlockId::generate_from_hash(Hash::compute_from("blk".as_bytes())), /// }; /// let keypair = KeyPair::generate(0).unwrap(); /// let secured: SecureShare = Endorsement::new_verifiable( diff --git a/massa-models/src/test_exports/data.rs b/massa-models/src/test_exports/data.rs index df1bf95b2a2..9d207821567 100644 --- a/massa-models/src/test_exports/data.rs +++ b/massa-models/src/test_exports/data.rs @@ -27,7 +27,7 @@ pub fn gen_endorsements_for_denunciation( let endorsement_1 = Endorsement { slot, index: 0, - endorsed_block: BlockId(Hash::compute_from("blk1".as_bytes())), + endorsed_block: BlockId::generate_from_hash(Hash::compute_from("blk1".as_bytes())), }; let v_endorsement1 = @@ -36,7 +36,7 @@ pub fn gen_endorsements_for_denunciation( let endorsement_2 = Endorsement { slot, index: 0, - endorsed_block: BlockId(Hash::compute_from("blk2".as_bytes())), + endorsed_block: BlockId::generate_from_hash(Hash::compute_from("blk2".as_bytes())), }; let v_endorsement2 = @@ -45,7 +45,7 @@ pub fn gen_endorsements_for_denunciation( let endorsement_3 = Endorsement { slot, index: 0, - endorsed_block: BlockId(Hash::compute_from("blk3".as_bytes())), + endorsed_block: BlockId::generate_from_hash(Hash::compute_from("blk3".as_bytes())), }; let v_endorsement_3 = Endorsement::new_verifiable(endorsement_3, EndorsementSerializer::new(), &keypair).unwrap(); @@ -68,19 +68,19 @@ pub fn gen_block_headers_for_denunciation( let slot = with_slot.unwrap_or(Slot::new(2, 1)); let parents_1: Vec = (0..THREAD_COUNT) - .map(|i| BlockId(Hash::compute_from(&[i]))) + .map(|i| BlockId::generate_from_hash(Hash::compute_from(&[i]))) .collect(); let parents_2: Vec = (0..THREAD_COUNT) - .map(|i| BlockId(Hash::compute_from(&[i + 1]))) + .map(|i| BlockId::generate_from_hash(Hash::compute_from(&[i + 1]))) .collect(); let parents_3: Vec = (0..THREAD_COUNT) - .map(|i| BlockId(Hash::compute_from(&[i + 2]))) + .map(|i| BlockId::generate_from_hash(Hash::compute_from(&[i + 2]))) .collect(); let endorsement_1 = Endorsement { slot: Slot::new(1, 1), index: 1, - endorsed_block: BlockId(Hash::compute_from("blk1".as_bytes())), + endorsed_block: BlockId::generate_from_hash(Hash::compute_from("blk1".as_bytes())), }; let s_endorsement_1 = Endorsement::new_verifiable(endorsement_1, EndorsementSerializerLW::new(), &keypair) diff --git a/massa-pool-worker/src/denunciation_pool.rs b/massa-pool-worker/src/denunciation_pool.rs index 330b245eb7e..46e02c59a16 100644 --- a/massa-pool-worker/src/denunciation_pool.rs +++ b/massa-pool-worker/src/denunciation_pool.rs @@ -347,7 +347,7 @@ mod tests { let endorsement_1 = Endorsement { slot: Slot::new(u64::from(i), 0), index: i % ENDORSEMENT_COUNT, - endorsed_block: BlockId(Hash::compute_from("blk1".as_bytes())), + endorsed_block: BlockId::generate_from_hash(Hash::compute_from("blk1".as_bytes())), }; let s_endorsement1 = diff --git a/massa-pool-worker/src/tests/tools.rs b/massa-pool-worker/src/tests/tools.rs index 3e307705835..3c7bffc783d 100644 --- a/massa-pool-worker/src/tests/tools.rs +++ b/massa-pool-worker/src/tests/tools.rs @@ -170,7 +170,7 @@ pub fn _create_endorsement(slot: Slot) -> SecureShareEndorsement { let content = Endorsement { slot, index: 0, - endorsed_block: BlockId(Hash::compute_from("blabla".as_bytes())), + endorsed_block: BlockId::generate_from_hash(Hash::compute_from("blabla".as_bytes())), }; Endorsement::new_verifiable(content, EndorsementSerializer::new(), &sender_keypair).unwrap() } diff --git a/massa-protocol-exports/src/test_exports/tools.rs b/massa-protocol-exports/src/test_exports/tools.rs index 83fffd65841..557e581440d 100644 --- a/massa-protocol-exports/src/test_exports/tools.rs +++ b/massa-protocol-exports/src/test_exports/tools.rs @@ -2,7 +2,7 @@ use massa_hash::Hash; use massa_models::endorsement::EndorsementSerializer; -use massa_models::operation::OperationSerializer; +use massa_models::operation::{OperationIdSerializer, OperationSerializer}; use massa_models::secure_share::SecureShareContent; use massa_models::{ address::Address, @@ -14,6 +14,7 @@ use massa_models::{ operation::{Operation, OperationType, SecureShareOperation}, slot::Slot, }; +use massa_serialization::Serializer; use massa_signature::KeyPair; /// Creates a block for use in protocol, @@ -26,8 +27,8 @@ pub fn create_block(keypair: &KeyPair) -> SecureShareBlock { announced_version: None, slot: Slot::new(1, 0), parents: vec![ - BlockId(Hash::compute_from("Genesis 0".as_bytes())), - BlockId(Hash::compute_from("Genesis 1".as_bytes())), + BlockId::generate_from_hash(Hash::compute_from("Genesis 0".as_bytes())), + BlockId::generate_from_hash(Hash::compute_from("Genesis 1".as_bytes())), ], operation_merkle_root: Hash::compute_from(&Vec::new()), endorsements: Vec::new(), @@ -59,9 +60,14 @@ pub fn create_block_with_operations( slot: Slot, operations: Vec, ) -> SecureShareBlock { + let operation_id_serializer = OperationIdSerializer::new(); let operation_merkle_root = Hash::compute_from( &operations.iter().fold(Vec::new(), |acc, v| { - [acc, v.id.to_bytes().to_vec()].concat() + let mut bytes = Vec::new(); + operation_id_serializer + .serialize(&v.id, &mut bytes) + .unwrap(); + [acc, bytes].concat() })[..], ); let header = BlockHeader::new_verifiable( @@ -70,8 +76,8 @@ pub fn create_block_with_operations( announced_version: None, slot, parents: vec![ - BlockId(Hash::compute_from("Genesis 0".as_bytes())), - BlockId(Hash::compute_from("Genesis 1".as_bytes())), + BlockId::generate_from_hash(Hash::compute_from("Genesis 0".as_bytes())), + BlockId::generate_from_hash(Hash::compute_from("Genesis 1".as_bytes())), ], operation_merkle_root, endorsements: Vec::new(), @@ -110,8 +116,8 @@ pub fn create_block_with_endorsements( announced_version: None, slot, parents: vec![ - BlockId(Hash::compute_from("Genesis 0".as_bytes())), - BlockId(Hash::compute_from("Genesis 1".as_bytes())), + BlockId::generate_from_hash(Hash::compute_from("Genesis 0".as_bytes())), + BlockId::generate_from_hash(Hash::compute_from("Genesis 1".as_bytes())), ], operation_merkle_root: Hash::compute_from(&Vec::new()), endorsements, @@ -141,7 +147,7 @@ pub fn create_endorsement() -> SecureShareEndorsement { let content = Endorsement { slot: Slot::new(10, 1), index: 0, - endorsed_block: BlockId(Hash::compute_from(&[])), + endorsed_block: BlockId::generate_from_hash(Hash::compute_from(&[])), }; Endorsement::new_verifiable(content, EndorsementSerializer::new(), &keypair).unwrap() } diff --git a/massa-protocol-worker/src/handlers/block_handler/messages.rs b/massa-protocol-worker/src/handlers/block_handler/messages.rs index c315be802be..b44c6aebaa6 100644 --- a/massa-protocol-worker/src/handlers/block_handler/messages.rs +++ b/massa-protocol-worker/src/handlers/block_handler/messages.rs @@ -1,7 +1,6 @@ -use massa_hash::HashDeserializer; use massa_models::{ block_header::{BlockHeader, BlockHeaderDeserializer, SecuredHeader}, - block_id::{BlockId, BlockIdSerializer}, + block_id::{BlockId, BlockIdDeserializer, BlockIdSerializer}, operation::{ OperationId, OperationIdSerializer, OperationIdsDeserializer, OperationsDeserializer, SecureShareOperation, @@ -195,7 +194,7 @@ pub struct BlockMessageDeserializer { id_deserializer: U64VarIntDeserializer, block_header_deserializer: SecureShareDeserializer, block_infos_length_deserializer: U64VarIntDeserializer, - hash_deserializer: HashDeserializer, + block_id_deserializer: BlockIdDeserializer, operation_ids_deserializer: OperationIdsDeserializer, operations_deserializer: OperationsDeserializer, } @@ -229,7 +228,7 @@ impl BlockMessageDeserializer { Included(0), Included(args.block_infos_length_max), ), - hash_deserializer: HashDeserializer::new(), + block_id_deserializer: BlockIdDeserializer::new(), operation_ids_deserializer: OperationIdsDeserializer::new( args.max_operations_per_block, ), @@ -277,9 +276,7 @@ impl Deserializer for BlockMessageDeserializer { "Failed Block infos deserialization", tuple(( context("Failed BlockId deserialization", |input| { - self.hash_deserializer - .deserialize(input) - .map(|(rest, id)| (rest, BlockId(id))) + self.block_id_deserializer.deserialize(input) }), context("Failed infos deserialization", |input| { let (rest, raw_id) = self.id_deserializer.deserialize(input)?; @@ -325,9 +322,7 @@ impl Deserializer for BlockMessageDeserializer { "Failed block infos deserialization", tuple(( context("Failed BlockId deserialization", |input| { - self.hash_deserializer - .deserialize(input) - .map(|(rest, id)| (rest, BlockId(id))) + self.block_id_deserializer.deserialize(input) }), context("Failed infos deserialization", |input| { let (rest, raw_id) = self.id_deserializer.deserialize(input)?; diff --git a/massa-protocol-worker/src/handlers/block_handler/retrieval.rs b/massa-protocol-worker/src/handlers/block_handler/retrieval.rs index e15530336a0..bd89b90eaaf 100644 --- a/massa-protocol-worker/src/handlers/block_handler/retrieval.rs +++ b/massa-protocol-worker/src/handlers/block_handler/retrieval.rs @@ -25,7 +25,7 @@ use crossbeam::{ }; use massa_channel::{receiver::MassaReceiver, sender::MassaSender}; use massa_consensus_exports::ConsensusController; -use massa_hash::{Hash, HASH_SIZE_BYTES}; +use massa_hash::Hash; use massa_logging::massa_trace; use massa_metrics::MassaMetrics; use massa_models::{ @@ -33,7 +33,7 @@ use massa_models::{ block_header::SecuredHeader, block_id::BlockId, endorsement::SecureShareEndorsement, - operation::{OperationId, SecureShareOperation}, + operation::{OperationId, OperationIdSerializer, SecureShareOperation}, prehash::{CapacityAllocator, PreHashMap, PreHashSet}, secure_share::{Id, SecureShare}, slot::Slot, @@ -872,12 +872,13 @@ impl RetrievalThread { } return Ok(()); } - let mut total_hash: Vec = - Vec::with_capacity(operation_ids.len().saturating_mul(HASH_SIZE_BYTES)); - operation_ids.iter().for_each(|op_id| { - let op_hash = op_id.get_hash().into_bytes(); - total_hash.extend(op_hash); - }); + let mut total_hash: Vec = Vec::new(); + let op_id_serializer = OperationIdSerializer::new(); + for op_id in operation_ids.iter() { + op_id_serializer + .serialize(op_id, &mut total_hash) + .map_err(|err| ProtocolError::GeneralProtocolError(err.to_string()))?; + } // Check operation_list against expected operations hash from header. if header.content.operation_merkle_root == Hash::compute_from(&total_hash) { diff --git a/massa-protocol-worker/src/tests/endorsements_scenarios.rs b/massa-protocol-worker/src/tests/endorsements_scenarios.rs index 704016b0074..b67c11b9737 100644 --- a/massa-protocol-worker/src/tests/endorsements_scenarios.rs +++ b/massa-protocol-worker/src/tests/endorsements_scenarios.rs @@ -288,7 +288,9 @@ fn test_protocol_propagates_endorsements_only_to_nodes_that_dont_know_about_it_b let content = Endorsement { slot: Slot::new(1, 1), index: 0, - endorsed_block: BlockId(Hash::compute_from("Genesis 1".as_bytes())), + endorsed_block: BlockId::generate_from_hash(Hash::compute_from( + "Genesis 1".as_bytes(), + )), }; let endorsement = Endorsement::new_verifiable(content, EndorsementSerializer::new(), &node_a_keypair) diff --git a/massa-protocol-worker/src/tests/in_block_operations_scenarios.rs b/massa-protocol-worker/src/tests/in_block_operations_scenarios.rs index b7ab0b26e48..aeef472d66d 100644 --- a/massa-protocol-worker/src/tests/in_block_operations_scenarios.rs +++ b/massa-protocol-worker/src/tests/in_block_operations_scenarios.rs @@ -199,8 +199,8 @@ fn test_protocol_does_propagate_operations_received_in_blocks() { // current_version: 0, // slot: Slot::new(1, op_thread), // parents: vec![ -// BlockId(Hash::compute_from("Genesis 0".as_bytes())), -// BlockId(Hash::compute_from("Genesis 1".as_bytes())), +// BlockId::generate_from_hash(Hash::compute_from("Genesis 0".as_bytes())), +// BlockId::generate_from_hash(Hash::compute_from("Genesis 1".as_bytes())), // ], // denunciations: Vec::new(), // operation_merkle_root, From d935fc8a7140de059bc297567d55fdab5e9f738e Mon Sep 17 00:00:00 2001 From: Damir Vodenicarevic Date: Mon, 24 Jul 2023 08:31:22 +0200 Subject: [PATCH 45/71] improve block propagation (#4252) * improve block propagation (init) * improve block propagation * improve block propagation * improve block propagation: config and some errors * improved block retrieval * improve block propag * block propagation updates * undo merkle renaming * unite operation note_operations_from_peer * markers * update ask blocks * add max peer load * comments * simplify retrieval * fix retrieval * improve caches * debug endorsement propagation * improve endorsement propag * integrate endorsement handler * formatting * make tests compile * fix tests * clippy * correct root hash --- massa-factory-worker/src/block_factory.rs | 17 +- massa-final-state/src/tests/scenarios.rs | 22 +- massa-models/src/address.rs | 6 +- massa-models/src/block.rs | 8 +- massa-models/src/block_header.rs | 10 +- massa-models/src/config/constants.rs | 2 - massa-models/src/denunciation.rs | 53 +- massa-models/src/operation.rs | 23 + massa-models/src/test_exports/data.rs | 12 +- massa-module-cache/src/hd_cache.rs | 4 +- massa-node/base_config/config.toml | 6 + massa-node/src/main.rs | 32 +- massa-node/src/settings.rs | 10 +- massa-node/src/tests/config.toml | 1 - massa-pos-exports/src/pos_final_state.rs | 4 +- massa-protocol-exports/src/error.rs | 2 + massa-protocol-exports/src/settings.rs | 10 +- .../src/test_exports/config.rs | 5 +- .../src/test_exports/tools.rs | 18 +- massa-protocol-worker/src/connectivity.rs | 1 - .../src/handlers/block_handler/cache.rs | 84 +- .../src/handlers/block_handler/messages.rs | 349 ++- .../src/handlers/block_handler/mod.rs | 3 +- .../src/handlers/block_handler/propagation.rs | 251 ++- .../src/handlers/block_handler/retrieval.rs | 1863 +++++++---------- .../src/handlers/endorsement_handler/cache.rs | 66 +- .../src/handlers/endorsement_handler/mod.rs | 12 +- .../endorsement_handler/propagation.rs | 229 +- .../handlers/endorsement_handler/retrieval.rs | 364 ++-- .../src/handlers/operation_handler/cache.rs | 19 + .../src/handlers/operation_handler/mod.rs | 1 + .../handlers/operation_handler/propagation.rs | 37 +- .../handlers/operation_handler/retrieval.rs | 243 +-- .../src/handlers/peer_handler/models.rs | 1 - .../src/tests/ban_nodes_scenarios.rs | 175 +- .../src/tests/block_scenarios.rs | 138 +- .../src/tests/cache_scenarios.rs | 53 +- .../src/tests/endorsements_scenarios.rs | 2 +- .../tests/in_block_operations_scenarios.rs | 2 +- .../src/tests/operations_scenarios.rs | 155 +- massa-protocol-worker/src/tests/tools.rs | 51 +- 41 files changed, 1966 insertions(+), 2378 deletions(-) diff --git a/massa-factory-worker/src/block_factory.rs b/massa-factory-worker/src/block_factory.rs index 89068261327..1c56d543555 100644 --- a/massa-factory-worker/src/block_factory.rs +++ b/massa-factory-worker/src/block_factory.rs @@ -2,19 +2,17 @@ use massa_channel::receiver::MassaReceiver; use massa_factory_exports::{FactoryChannels, FactoryConfig}; -use massa_hash::Hash; use massa_models::{ block::{Block, BlockSerializer}, block_header::{BlockHeader, BlockHeaderSerializer, SecuredHeader}, block_id::BlockId, endorsement::SecureShareEndorsement, - operation::OperationIdSerializer, + operation::{compute_operations_hash, OperationIdSerializer}, prehash::PreHashSet, secure_share::SecureShareContent, slot::Slot, timeslots::{get_block_slot_timestamp, get_closest_slot_to_timestamp}, }; -use massa_serialization::Serializer; use massa_time::MassaTime; use massa_versioning::versioning::MipStore; use massa_wallet::Wallet; @@ -224,17 +222,6 @@ impl BlockFactoryWorker { } block_storage.extend(op_storage); - let global_operations_hash = Hash::compute_from( - &op_ids - .iter() - .flat_map(|op_id| { - let mut buffer = Vec::new(); - //It was a to_bytes() there before, we know the op is valid because it comes from the pool - self.op_id_serializer.serialize(op_id, &mut buffer).unwrap(); - buffer - }) - .collect::>(), - ); // create header let current_version = self.mip_store.get_network_version_current(); @@ -245,7 +232,7 @@ impl BlockFactoryWorker { announced_version, slot, parents: parents.into_iter().map(|(id, _period)| id).collect(), - operation_merkle_root: global_operations_hash, + operation_merkle_root: compute_operations_hash(&op_ids, &self.op_id_serializer), endorsements, denunciations: self.channels.pool.get_block_denunciations(&slot), }, diff --git a/massa-final-state/src/tests/scenarios.rs b/massa-final-state/src/tests/scenarios.rs index ace75f0b66d..115e1291cbd 100644 --- a/massa-final-state/src/tests/scenarios.rs +++ b/massa-final-state/src/tests/scenarios.rs @@ -96,9 +96,8 @@ fn create_final_state(temp_dir: &TempDir, reset_final_state: bool) -> Arc Arc(&buffer).unwrap(); - assert_eq!(rem.is_empty(), true); + assert!(rem.is_empty()); assert_eq!(block_header_1, block_header_der); } diff --git a/massa-models/src/config/constants.rs b/massa-models/src/config/constants.rs index 5537baeeab9..dae8844edbd 100644 --- a/massa-models/src/config/constants.rs +++ b/massa-models/src/config/constants.rs @@ -34,8 +34,6 @@ pub const SIGNATURE_DESER_SIZE: usize = 64 + 1; pub const MAX_ADVERTISE_LENGTH: u32 = 10000; /// Maximum message length in bytes pub const MAX_MESSAGE_SIZE: u32 = 1048576000; -/// Max number of hash in the message `AskForBlocks` -pub const MAX_ASK_BLOCKS_PER_MESSAGE: u32 = 128; /// Max number of operations per message pub const MAX_OPERATIONS_PER_MESSAGE: u32 = 1024; /// Length of the handshake random signature diff --git a/massa-models/src/denunciation.rs b/massa-models/src/denunciation.rs index 23bcc55a938..9d0939fced1 100644 --- a/massa-models/src/denunciation.rs +++ b/massa-models/src/denunciation.rs @@ -1208,8 +1208,8 @@ mod tests { gen_endorsements_for_denunciation(None, None); let denunciation: Denunciation = (&s_endorsement_1, &s_endorsement_2).try_into().unwrap(); - assert_eq!(denunciation.is_for_endorsement(), true); - assert_eq!(denunciation.is_valid(), true); + assert!(denunciation.is_for_endorsement()); + assert!(denunciation.is_valid()); } #[test] @@ -1250,8 +1250,8 @@ mod tests { let denunciation: Denunciation = (&s_endorsement_1, &s_endorsement_2).try_into().unwrap(); - assert_eq!(denunciation.is_for_endorsement(), true); - assert_eq!(denunciation.is_valid(), true); + assert!(denunciation.is_for_endorsement()); + assert!(denunciation.is_valid()); // Try to create a denunciation from 2 endorsements @ != index let endorsement_4 = Endorsement { @@ -1263,19 +1263,13 @@ mod tests { Endorsement::new_verifiable(endorsement_4, EndorsementSerializer::new(), &keypair) .unwrap(); - assert_eq!( - denunciation - .is_also_for_endorsement(&s_endorsement_4) - .unwrap(), - false - ); - assert_eq!( - denunciation - .is_also_for_endorsement(&s_endorsement_3) - .unwrap(), - true - ); - assert_eq!(denunciation.is_valid(), true); + assert!(!denunciation + .is_also_for_endorsement(&s_endorsement_4) + .unwrap()); + assert!(denunciation + .is_also_for_endorsement(&s_endorsement_3) + .unwrap()); + assert!(denunciation.is_valid()); } #[test] @@ -1285,14 +1279,11 @@ mod tests { gen_block_headers_for_denunciation(None, None); let denunciation: Denunciation = (&s_block_header_1, &s_block_header_2).try_into().unwrap(); - assert_eq!(denunciation.is_for_block_header(), true); - assert_eq!(denunciation.is_valid(), true); - assert_eq!( - denunciation - .is_also_for_block_header(&s_block_header_3) - .unwrap(), - true - ); + assert!(denunciation.is_for_block_header()); + assert!(denunciation.is_valid()); + assert!(denunciation + .is_also_for_block_header(&s_block_header_3) + .unwrap()); } #[test] @@ -1333,7 +1324,7 @@ mod tests { }); // hash_1 == hash_2 -> this is invalid - assert_eq!(de_forged_1.is_valid(), false); + assert!(!de_forged_1.is_valid()); // from an attacker - building manually a Denunciation object let de_forged_2 = Denunciation::Endorsement(EndorsementDenunciation { @@ -1348,7 +1339,7 @@ mod tests { // An attacker uses an old s_endorsement_1 to forge a Denunciation object @ slot_2 // This has to be detected if Denunciation are send via the network - assert_eq!(de_forged_2.is_valid(), false); + assert!(!de_forged_2.is_valid()); } // SER / DER @@ -1370,7 +1361,7 @@ mod tests { let (rem, de_der_res) = de_der.deserialize::(&buffer).unwrap(); - assert_eq!(rem.is_empty(), true); + assert!(rem.is_empty()); assert_eq!(de, de_der_res); } Denunciation::BlockHeader(_) => { @@ -1398,7 +1389,7 @@ mod tests { let (rem, de_der_res) = de_der.deserialize::(&buffer).unwrap(); - assert_eq!(rem.is_empty(), true); + assert!(rem.is_empty()); assert_eq!(de, de_der_res); } } @@ -1418,7 +1409,7 @@ mod tests { let (rem, de_der_res) = de_der.deserialize::(&buffer).unwrap(); - assert_eq!(rem.is_empty(), true); + assert!(rem.is_empty()); assert_eq!(denunciation, de_der_res); let (_, _, s_endorsement_1, s_endorsement_2, _) = @@ -1428,7 +1419,7 @@ mod tests { de_ser.serialize(&denunciation, &mut buffer).unwrap(); let (rem, de_der_res) = de_der.deserialize::(&buffer).unwrap(); - assert_eq!(rem.is_empty(), true); + assert!(rem.is_empty()); assert_eq!(denunciation, de_der_res); } diff --git a/massa-models/src/operation.rs b/massa-models/src/operation.rs index 4d922d9e4b1..f8b608ff022 100644 --- a/massa-models/src/operation.rs +++ b/massa-models/src/operation.rs @@ -1439,6 +1439,29 @@ impl Deserializer> for OperationsDeserializer { } } +/// Compute the hash of a list of operations(used typically in block headers) +pub fn compute_operations_hash( + op_ids: &[OperationId], + op_id_serializer: &OperationIdSerializer, +) -> Hash { + let op_ids = op_ids + .iter() + .map(|op_id| { + let mut serialized = Vec::new(); + op_id_serializer + .serialize(op_id, &mut serialized) + .expect("serialization of operation id should not fail"); + serialized + }) + .collect::>>(); + massa_hash::Hash::compute_from_tuple( + &op_ids + .iter() + .map(|data| data.as_slice()) + .collect::>(), + ) +} + #[cfg(test)] mod tests { use crate::config::{ diff --git a/massa-models/src/test_exports/data.rs b/massa-models/src/test_exports/data.rs index 9d207821567..0d834bd42da 100644 --- a/massa-models/src/test_exports/data.rs +++ b/massa-models/src/test_exports/data.rs @@ -50,13 +50,13 @@ pub fn gen_endorsements_for_denunciation( let v_endorsement_3 = Endorsement::new_verifiable(endorsement_3, EndorsementSerializer::new(), &keypair).unwrap(); - return ( + ( slot, keypair, v_endorsement1, v_endorsement2, v_endorsement_3, - ); + ) } /// Helper to generate block headers ready for denunciation @@ -128,7 +128,7 @@ pub fn gen_block_headers_for_denunciation( slot, parents: parents_3, operation_merkle_root: Hash::compute_from("mno".as_bytes()), - endorsements: vec![s_endorsement_1.clone()], + endorsements: vec![s_endorsement_1], denunciations: vec![], }; @@ -140,11 +140,11 @@ pub fn gen_block_headers_for_denunciation( ) .expect("error while producing block header"); - return ( + ( slot, keypair, - s_block_header_1.clone(), + s_block_header_1, s_block_header_2, s_block_header_3, - ); + ) } diff --git a/massa-module-cache/src/hd_cache.rs b/massa-module-cache/src/hd_cache.rs index ed8c5825d21..7e8cff5b8f0 100644 --- a/massa-module-cache/src/hd_cache.rs +++ b/massa-module-cache/src/hd_cache.rs @@ -252,7 +252,7 @@ mod tests { let init_cost = 100; let gas_costs = GasCosts::default(); - cache.insert(hash, module.clone()); + cache.insert(hash, module); let cached_module_v1 = cache.get(hash, limit, gas_costs.clone()).unwrap(); assert!(matches!(cached_module_v1, ModuleInfo::Module(_))); @@ -280,7 +280,7 @@ mod tests { // insert one more entry let key = Hash::compute_from(cache.max_entry_count.to_string().as_bytes()); - cache.insert(key, module.clone()); + cache.insert(key, module); assert_eq!( cache.entry_count, cache.max_entry_count - cache.snip_amount + 1 diff --git a/massa-node/base_config/config.toml b/massa-node/base_config/config.toml index af80e3d41ff..0dd35c83559 100644 --- a/massa-node/base_config/config.toml +++ b/massa-node/base_config/config.toml @@ -164,6 +164,12 @@ message_timeout = 5000 # timeout after whick we consider a node does not have the block we asked for ask_block_timeout = 10000 + # Max known blocks we keep during their propagation + max_blocks_kept_for_propagation = 300 + # Time during which a block is expected to propagate (in milliseconds) + max_block_propagation_time = 40000 + # Block propagation tick interval, useful for propagating blocks quickly to newly connected peers (in milliseconds) + block_propagation_tick = 1000 # max cache size for which blocks our node knows about max_known_blocks_size = 1024 # max cache size for which blocks a foreign node knows about diff --git a/massa-node/src/main.rs b/massa-node/src/main.rs index bbe22232771..9affb7e6db0 100644 --- a/massa-node/src/main.rs +++ b/massa-node/src/main.rs @@ -46,20 +46,19 @@ use massa_models::config::constants::{ BLOCK_REWARD, BOOTSTRAP_RANDOMNESS_SIZE_BYTES, CHANNEL_SIZE, CONSENSUS_BOOTSTRAP_PART_SIZE, DELTA_F0, DENUNCIATION_EXPIRE_PERIODS, ENDORSEMENT_COUNT, END_TIMESTAMP, GENESIS_KEY, GENESIS_TIMESTAMP, INITIAL_DRAW_SEED, LEDGER_COST_PER_BYTE, LEDGER_ENTRY_BASE_COST, - LEDGER_ENTRY_DATASTORE_BASE_SIZE, MAX_ADVERTISE_LENGTH, MAX_ASK_BLOCKS_PER_MESSAGE, - MAX_ASYNC_GAS, MAX_ASYNC_MESSAGE_DATA, MAX_ASYNC_POOL_LENGTH, MAX_BLOCK_SIZE, - MAX_BOOTSTRAP_ASYNC_POOL_CHANGES, MAX_BOOTSTRAP_BLOCKS, MAX_BOOTSTRAP_ERROR_LENGTH, - MAX_BYTECODE_LENGTH, MAX_CONSENSUS_BLOCKS_IDS, MAX_DATASTORE_ENTRY_COUNT, - MAX_DATASTORE_KEY_LENGTH, MAX_DATASTORE_VALUE_LENGTH, MAX_DEFERRED_CREDITS_LENGTH, - MAX_DENUNCIATIONS_PER_BLOCK_HEADER, MAX_DENUNCIATION_CHANGES_LENGTH, - MAX_ENDORSEMENTS_PER_MESSAGE, MAX_EXECUTED_OPS_CHANGES_LENGTH, MAX_EXECUTED_OPS_LENGTH, - MAX_FUNCTION_NAME_LENGTH, MAX_GAS_PER_BLOCK, MAX_LEDGER_CHANGES_COUNT, MAX_LISTENERS_PER_PEER, - MAX_OPERATIONS_PER_BLOCK, MAX_OPERATIONS_PER_MESSAGE, MAX_OPERATION_DATASTORE_ENTRY_COUNT, - MAX_OPERATION_DATASTORE_KEY_LENGTH, MAX_OPERATION_DATASTORE_VALUE_LENGTH, - MAX_OPERATION_STORAGE_TIME, MAX_PARAMETERS_SIZE, MAX_PEERS_IN_ANNOUNCEMENT_LIST, - MAX_PRODUCTION_STATS_LENGTH, MAX_ROLLS_COUNT_LENGTH, MAX_SIZE_CHANNEL_COMMANDS_CONNECTIVITY, - MAX_SIZE_CHANNEL_COMMANDS_PEERS, MAX_SIZE_CHANNEL_COMMANDS_PEER_TESTERS, - MAX_SIZE_CHANNEL_COMMANDS_PROPAGATION_BLOCKS, + LEDGER_ENTRY_DATASTORE_BASE_SIZE, MAX_ADVERTISE_LENGTH, MAX_ASYNC_GAS, MAX_ASYNC_MESSAGE_DATA, + MAX_ASYNC_POOL_LENGTH, MAX_BLOCK_SIZE, MAX_BOOTSTRAP_ASYNC_POOL_CHANGES, MAX_BOOTSTRAP_BLOCKS, + MAX_BOOTSTRAP_ERROR_LENGTH, MAX_BYTECODE_LENGTH, MAX_CONSENSUS_BLOCKS_IDS, + MAX_DATASTORE_ENTRY_COUNT, MAX_DATASTORE_KEY_LENGTH, MAX_DATASTORE_VALUE_LENGTH, + MAX_DEFERRED_CREDITS_LENGTH, MAX_DENUNCIATIONS_PER_BLOCK_HEADER, + MAX_DENUNCIATION_CHANGES_LENGTH, MAX_ENDORSEMENTS_PER_MESSAGE, MAX_EXECUTED_OPS_CHANGES_LENGTH, + MAX_EXECUTED_OPS_LENGTH, MAX_FUNCTION_NAME_LENGTH, MAX_GAS_PER_BLOCK, MAX_LEDGER_CHANGES_COUNT, + MAX_LISTENERS_PER_PEER, MAX_OPERATIONS_PER_BLOCK, MAX_OPERATIONS_PER_MESSAGE, + MAX_OPERATION_DATASTORE_ENTRY_COUNT, MAX_OPERATION_DATASTORE_KEY_LENGTH, + MAX_OPERATION_DATASTORE_VALUE_LENGTH, MAX_OPERATION_STORAGE_TIME, MAX_PARAMETERS_SIZE, + MAX_PEERS_IN_ANNOUNCEMENT_LIST, MAX_PRODUCTION_STATS_LENGTH, MAX_ROLLS_COUNT_LENGTH, + MAX_SIZE_CHANNEL_COMMANDS_CONNECTIVITY, MAX_SIZE_CHANNEL_COMMANDS_PEERS, + MAX_SIZE_CHANNEL_COMMANDS_PEER_TESTERS, MAX_SIZE_CHANNEL_COMMANDS_PROPAGATION_BLOCKS, MAX_SIZE_CHANNEL_COMMANDS_PROPAGATION_ENDORSEMENTS, MAX_SIZE_CHANNEL_COMMANDS_PROPAGATION_OPERATIONS, MAX_SIZE_CHANNEL_COMMANDS_RETRIEVAL_BLOCKS, MAX_SIZE_CHANNEL_COMMANDS_RETRIEVAL_ENDORSEMENTS, @@ -565,6 +564,7 @@ async fn launch( ask_block_timeout: SETTINGS.protocol.ask_block_timeout, max_known_blocks_size: SETTINGS.protocol.max_known_blocks_size, max_node_known_blocks_size: SETTINGS.protocol.max_node_known_blocks_size, + max_block_propagation_time: SETTINGS.protocol.max_block_propagation_time, max_node_wanted_blocks_size: SETTINGS.protocol.max_node_wanted_blocks_size, max_known_ops_size: SETTINGS.protocol.max_known_ops_size, max_node_known_ops_size: SETTINGS.protocol.max_node_known_ops_size, @@ -598,7 +598,8 @@ async fn launch( initial_peers: SETTINGS.protocol.initial_peers_file.clone(), listeners, keypair_file: SETTINGS.protocol.keypair_file.clone(), - max_known_blocks_saved_size: SETTINGS.protocol.max_known_blocks_size, + max_blocks_kept_for_propagation: SETTINGS.protocol.max_blocks_kept_for_propagation, + block_propagation_tick: SETTINGS.protocol.block_propagation_tick, asked_operations_buffer_capacity: SETTINGS.protocol.asked_operations_buffer_capacity, thread_tester_count: SETTINGS.protocol.thread_tester_count, max_operation_storage_time: MAX_OPERATION_STORAGE_TIME, @@ -627,7 +628,6 @@ async fn launch( max_op_datastore_value_length: MAX_OPERATION_DATASTORE_VALUE_LENGTH, max_size_function_name: MAX_FUNCTION_NAME_LENGTH, max_size_call_sc_parameter: MAX_PARAMETERS_SIZE, - max_size_block_infos: MAX_ASK_BLOCKS_PER_MESSAGE as u64, max_size_listeners_per_peer: MAX_LISTENERS_PER_PEER, max_size_peers_announcement: MAX_PEERS_IN_ANNOUNCEMENT_LIST, read_write_limit_bytes_per_second: SETTINGS.protocol.read_write_limit_bytes_per_second diff --git a/massa-node/src/settings.rs b/massa-node/src/settings.rs index 7b0364613da..f1c41704849 100644 --- a/massa-node/src/settings.rs +++ b/massa-node/src/settings.rs @@ -189,9 +189,15 @@ pub struct MetricsSettings { pub struct ProtocolSettings { /// after `ask_block_timeout` milliseconds we try to ask a block to another node pub ask_block_timeout: MassaTime, - /// max known blocks of current nodes we keep in memory (by node) + /// Max known blocks we keep during their propagation + pub max_blocks_kept_for_propagation: usize, + /// Time during which a block is expected to propagate + pub max_block_propagation_time: MassaTime, + /// Block propagation tick interval, useful for propagating blocks quickly to newly connected peers. + pub block_propagation_tick: MassaTime, + /// max known blocks our node keeps in its knowledge cache pub max_known_blocks_size: usize, - /// max known blocks of foreign nodes we keep in memory (by node) + /// max cache size for which blocks a foreign node knows about pub max_node_known_blocks_size: usize, /// max wanted blocks per node kept in memory pub max_node_wanted_blocks_size: usize, diff --git a/massa-node/src/tests/config.toml b/massa-node/src/tests/config.toml index dc8dd91cb9f..ee1cd6485dd 100644 --- a/massa-node/src/tests/config.toml +++ b/massa-node/src/tests/config.toml @@ -50,7 +50,6 @@ message_timeout = 5000 ask_peer_list_interval = 30000 keypair_file = "../massa-node/config/node_privkey.key" - max_ask_blocks_per_message = 128 max_operations_per_message = 1024 max_endorsements_per_message = 1024 max_send_wait = 500 diff --git a/massa-pos-exports/src/pos_final_state.rs b/massa-pos-exports/src/pos_final_state.rs index e362bee0be8..d4dec2541a6 100644 --- a/massa-pos-exports/src/pos_final_state.rs +++ b/massa-pos-exports/src/pos_final_state.rs @@ -1521,7 +1521,7 @@ impl PoSFinalState { .deferred_credits_deserializer .credit_deserializer .address_deserializer - .deserialize::(&rest) + .deserialize::(rest) .expect(DEFERRED_CREDITS_DESER_ERROR); let (_, amount) = self @@ -1812,7 +1812,7 @@ mod tests { let cycles = pos_state.get_cycle_history_cycles(); assert_eq!(cycles.len(), 1, "wrong number of cycles"); assert_eq!(cycles[0].0, 0, "cycle should be the 1st one"); - assert_eq!(cycles[0].1, false, "cycle should not be complete yet"); + assert!(!cycles[0].1, "cycle should not be complete yet"); let cycle_info_a = pos_state.get_cycle_info(0).unwrap(); diff --git a/massa-protocol-exports/src/error.rs b/massa-protocol-exports/src/error.rs index ff612f9ddf4..54618de8586 100644 --- a/massa-protocol-exports/src/error.rs +++ b/massa-protocol-exports/src/error.rs @@ -15,6 +15,8 @@ pub enum ProtocolError { WrongSignature, /// Protocol error: {0} GeneralProtocolError(String), + /// Invalid block: {0} + InvalidBlock(String), /// An error occurred during channel communication: {0} ChannelError(String), /// Error during network connection: `{0:?}` diff --git a/massa-protocol-exports/src/settings.rs b/massa-protocol-exports/src/settings.rs index 87f07a20367..1a92893ed97 100644 --- a/massa-protocol-exports/src/settings.rs +++ b/massa-protocol-exports/src/settings.rs @@ -30,8 +30,12 @@ pub struct ProtocolConfig { pub initial_peers: PathBuf, /// after `ask_block_timeout` milliseconds we try to ask a block to another node pub ask_block_timeout: MassaTime, - /// Max known blocks we keep in block_handler - pub max_known_blocks_saved_size: usize, + /// Max known blocks we keep during their propagation + pub max_blocks_kept_for_propagation: usize, + /// Time during which a block is expected to propagate + pub max_block_propagation_time: MassaTime, + /// Block propagation tick interval, useful for propagating blocks quickly to newly connected peers. + pub block_propagation_tick: MassaTime, /// max known blocks of current nodes we keep in memory pub max_known_blocks_size: usize, /// max known blocks of foreign nodes we keep in memory (by node) @@ -118,8 +122,6 @@ pub struct ProtocolConfig { pub endorsement_count: u32, /// running threads count pub thread_count: u8, - /// Max of block infos you can send - pub max_size_block_infos: u64, /// Maximum size of an value user datastore pub max_size_value_datastore: u64, /// Maximum size of a function name diff --git a/massa-protocol-exports/src/test_exports/config.rs b/massa-protocol-exports/src/test_exports/config.rs index 8370b89c7ad..b58b32b541b 100644 --- a/massa-protocol-exports/src/test_exports/config.rs +++ b/massa-protocol-exports/src/test_exports/config.rs @@ -13,7 +13,9 @@ impl Default for ProtocolConfig { .path() .to_path_buf(), ask_block_timeout: MassaTime::from_millis(500), - max_known_blocks_saved_size: 300, + max_blocks_kept_for_propagation: 300, + max_block_propagation_time: MassaTime::from_millis(40000), + block_propagation_tick: MassaTime::from_millis(1000), max_known_blocks_size: 100, max_node_known_blocks_size: 100, max_node_wanted_blocks_size: 100, @@ -61,7 +63,6 @@ impl Default for ProtocolConfig { max_size_channel_commands_peers: 300, max_message_size: MAX_MESSAGE_SIZE as usize, endorsement_count: ENDORSEMENT_COUNT, - max_size_block_infos: 200, max_size_value_datastore: 1_000_000, max_size_function_name: u16::MAX, max_size_call_sc_parameter: 10_000_000, diff --git a/massa-protocol-exports/src/test_exports/tools.rs b/massa-protocol-exports/src/test_exports/tools.rs index 557e581440d..0a1fc4fcf47 100644 --- a/massa-protocol-exports/src/test_exports/tools.rs +++ b/massa-protocol-exports/src/test_exports/tools.rs @@ -2,7 +2,9 @@ use massa_hash::Hash; use massa_models::endorsement::EndorsementSerializer; -use massa_models::operation::{OperationIdSerializer, OperationSerializer}; +use massa_models::operation::{ + compute_operations_hash, OperationIdSerializer, OperationSerializer, +}; use massa_models::secure_share::SecureShareContent; use massa_models::{ address::Address, @@ -14,7 +16,6 @@ use massa_models::{ operation::{Operation, OperationType, SecureShareOperation}, slot::Slot, }; -use massa_serialization::Serializer; use massa_signature::KeyPair; /// Creates a block for use in protocol, @@ -60,16 +61,11 @@ pub fn create_block_with_operations( slot: Slot, operations: Vec, ) -> SecureShareBlock { - let operation_id_serializer = OperationIdSerializer::new(); - let operation_merkle_root = Hash::compute_from( - &operations.iter().fold(Vec::new(), |acc, v| { - let mut bytes = Vec::new(); - operation_id_serializer - .serialize(&v.id, &mut bytes) - .unwrap(); - [acc, bytes].concat() - })[..], + let operation_merkle_root = compute_operations_hash( + &operations.iter().map(|op| op.id).collect::>(), + &OperationIdSerializer::new(), ); + let header = BlockHeader::new_verifiable( BlockHeader { current_version: 0, diff --git a/massa-protocol-worker/src/connectivity.rs b/massa-protocol-worker/src/connectivity.rs index 60a87253c2c..a41c225f73f 100644 --- a/massa-protocol-worker/src/connectivity.rs +++ b/massa-protocol-worker/src/connectivity.rs @@ -112,7 +112,6 @@ pub(crate) fn start_connectivity_thread( let block_cache = Arc::new(RwLock::new(BlockCache::new( config.max_known_blocks_size.try_into().unwrap(), - (total_in_slots + total_out_slots).try_into().unwrap(), config.max_node_known_blocks_size.try_into().unwrap(), ))); diff --git a/massa-protocol-worker/src/handlers/block_handler/cache.rs b/massa-protocol-worker/src/handlers/block_handler/cache.rs index ce6baadec9e..767071eaac6 100644 --- a/massa-protocol-worker/src/handlers/block_handler/cache.rs +++ b/massa-protocol-worker/src/handlers/block_handler/cache.rs @@ -1,81 +1,71 @@ -use std::{collections::HashSet, sync::Arc, time::Instant}; +use std::{ + collections::{HashMap, HashSet}, + sync::Arc, + time::Instant, +}; use massa_models::{block_header::SecuredHeader, block_id::BlockId}; use massa_protocol_exports::PeerId; use parking_lot::RwLock; use schnellru::{ByLength, LruMap}; -use tracing::log::warn; +/// Cache on block knowledge by our node and its peers pub struct BlockCache { + /// cache of previously checked headers pub checked_headers: LruMap, - #[allow(clippy::type_complexity)] - pub blocks_known_by_peer: LruMap, Instant)>, + /// cache of blocks known by peers + pub blocks_known_by_peer: HashMap>, + /// max number of blocks known in peer knowledge cache pub max_known_blocks_by_peer: u32, } impl BlockCache { - pub fn insert_blocks_known( + /// Mark a given node's knowledge of a list of blocks + /// as either known or unknown. + /// + /// # Arguments + /// + /// * `from_peer_id` - The peer id of the peer to mark + /// * `block_ids` - The list of block ids to mark + /// * `known` - Whether the blocks are known or unknown by the peer + pub fn insert_peer_known_block( &mut self, from_peer_id: &PeerId, block_ids: &[BlockId], - val: bool, - timeout: Instant, + known: bool, ) { - let Ok((blocks, _)) = self + let now = Instant::now(); + let known_blocks = self .blocks_known_by_peer - .get_or_insert(from_peer_id.clone(), || { - ( - LruMap::new(ByLength::new(self.max_known_blocks_by_peer)), - Instant::now(), - ) - }) - .ok_or(()) else { - warn!("blocks_known_by_peer limit reached"); - return; - }; + .entry(from_peer_id.clone()) + .or_insert_with(|| LruMap::new(ByLength::new(self.max_known_blocks_by_peer))); for block_id in block_ids { - blocks.insert(*block_id, (val, timeout)); + known_blocks.insert(*block_id, (known, now)); } } } impl BlockCache { - pub fn new(max_known_blocks: u32, max_peers: u32, max_known_blocks_by_peer: u32) -> Self { + pub fn new(max_known_blocks: u32, max_known_blocks_by_peer: u32) -> Self { Self { checked_headers: LruMap::new(ByLength::new(max_known_blocks)), - blocks_known_by_peer: LruMap::new(ByLength::new(max_peers)), + blocks_known_by_peer: HashMap::new(), max_known_blocks_by_peer, } } - pub fn update_cache( - &mut self, - peers_connected: HashSet, - max_known_blocks_by_peer: u32, - ) { - let peers: Vec = self - .blocks_known_by_peer - .iter() - .map(|(id, _)| id.clone()) - .collect(); - - // Clean shared cache if peers do not exist anymore - for peer_id in peers { - if !peers_connected.contains(&peer_id) { - self.blocks_known_by_peer.remove(&peer_id); - } - } + pub fn update_cache(&mut self, peers_connected: &HashSet) { + // Remove disconnected peers from cache + self.blocks_known_by_peer + .retain(|peer_id, _| peers_connected.contains(peer_id)); - // Add new potential peers + // Add new connected peers to cache for peer_id in peers_connected { - if self.blocks_known_by_peer.peek(&peer_id).is_none() { - self.blocks_known_by_peer.insert( - peer_id.clone(), - ( - LruMap::new(ByLength::new(max_known_blocks_by_peer)), - Instant::now(), - ), - ); + match self.blocks_known_by_peer.entry(peer_id.clone()) { + std::collections::hash_map::Entry::Occupied(_) => {} + std::collections::hash_map::Entry::Vacant(entry) => { + entry.insert(LruMap::new(ByLength::new(self.max_known_blocks_by_peer))); + } } } } diff --git a/massa-protocol-worker/src/handlers/block_handler/messages.rs b/massa-protocol-worker/src/handlers/block_handler/messages.rs index b44c6aebaa6..4dff7edca54 100644 --- a/massa-protocol-worker/src/handlers/block_handler/messages.rs +++ b/massa-protocol-worker/src/handlers/block_handler/messages.rs @@ -12,33 +12,33 @@ use massa_serialization::{ }; use nom::{ error::{context, ContextError, ParseError}, - multi::length_count, sequence::tuple, IResult, Parser, }; use num_enum::{IntoPrimitive, TryFromPrimitive}; use std::ops::Bound::Included; -/// Ask for the info about a block. +/// Request block data #[derive(Debug, Clone, Default, PartialEq, Eq)] -pub enum AskForBlocksInfo { +pub enum AskForBlockInfo { /// Ask header Header, - /// The info about the block is required(list of operations ids). + /// Ask for the list of operation IDs of the block #[default] - Info, - /// The actual operations are required. + OperationIds, + /// Ask for a subset of operations of the block Operations(Vec), } +/// Reply to a block data request #[derive(Debug, Clone)] #[allow(clippy::large_enum_variant)] pub enum BlockInfoReply { /// Header Header(SecuredHeader), - /// The info about the block is required(list of operations ids). - Info(Vec), - /// The actual operations required. + /// List of operation IDs within the block + OperationIds(Vec), + /// Requested full operations of the block Operations(Vec), /// Block not found NotFound, @@ -49,27 +49,37 @@ pub enum BlockInfoReply { #[allow(clippy::large_enum_variant)] pub enum BlockMessage { /// Block header - BlockHeader(SecuredHeader), + Header(SecuredHeader), /// Message asking the peer for info on a list of blocks. - AskForBlocks(Vec<(BlockId, AskForBlocksInfo)>), + DataRequest { + /// ID of the block to ask info for. + block_id: BlockId, + /// Block info to ask for. + block_info: AskForBlockInfo, + }, /// Message replying with info on a list of blocks. - ReplyForBlocks(Vec<(BlockId, BlockInfoReply)>), + DataResponse { + /// ID of the block to reply info for. + block_id: BlockId, + /// Block info reply. + block_info: BlockInfoReply, + }, } #[derive(IntoPrimitive, Debug, Eq, PartialEq, TryFromPrimitive)] #[repr(u64)] pub enum MessageTypeId { - BlockHeader, - AskForBlocks, - ReplyForBlocks, + Header, + DataRequest, + DataResponse, } impl From<&BlockMessage> for MessageTypeId { fn from(value: &BlockMessage) -> Self { match value { - BlockMessage::BlockHeader(_) => MessageTypeId::BlockHeader, - BlockMessage::AskForBlocks(_) => MessageTypeId::AskForBlocks, - BlockMessage::ReplyForBlocks(_) => MessageTypeId::ReplyForBlocks, + BlockMessage::Header(_) => MessageTypeId::Header, + BlockMessage::DataRequest { .. } => MessageTypeId::DataRequest, + BlockMessage::DataResponse { .. } => MessageTypeId::DataResponse, } } } @@ -78,7 +88,7 @@ impl From<&BlockMessage> for MessageTypeId { #[repr(u64)] pub enum BlockInfoType { Header = 0, - Info = 1, + OperationIds = 1, Operations = 2, NotFound = 3, } @@ -117,72 +127,69 @@ impl Serializer for BlockMessageSerializer { buffer, )?; match value { - BlockMessage::BlockHeader(endorsements) => { - self.secure_share_serializer - .serialize(endorsements, buffer)?; + BlockMessage::Header(header) => { + self.secure_share_serializer.serialize(header, buffer)?; } - BlockMessage::AskForBlocks(ask_for_blocks) => { - self.length_serializer - .serialize(&(ask_for_blocks.len() as u64), buffer)?; - for (block_id, ask_for_block_info) in ask_for_blocks { - self.block_id_serializer.serialize(block_id, buffer)?; - match ask_for_block_info { - AskForBlocksInfo::Header => { - self.id_serializer - .serialize(&(BlockInfoType::Header as u64), buffer)?; - } - AskForBlocksInfo::Info => { - self.id_serializer - .serialize(&(BlockInfoType::Info as u64), buffer)?; - } - AskForBlocksInfo::Operations(operations_ids) => { - self.id_serializer - .serialize(&(BlockInfoType::Operations as u64), buffer)?; - self.length_serializer - .serialize(&(operations_ids.len() as u64), buffer)?; - for operation_id in operations_ids { - self.operation_id_serializer - .serialize(operation_id, buffer)?; - } + BlockMessage::DataRequest { + block_id, + block_info, + } => { + self.block_id_serializer.serialize(block_id, buffer)?; + match block_info { + AskForBlockInfo::Header => { + self.id_serializer + .serialize(&(BlockInfoType::Header as u64), buffer)?; + } + AskForBlockInfo::OperationIds => { + self.id_serializer + .serialize(&(BlockInfoType::OperationIds as u64), buffer)?; + } + AskForBlockInfo::Operations(operations_ids) => { + self.id_serializer + .serialize(&(BlockInfoType::Operations as u64), buffer)?; + self.length_serializer + .serialize(&(operations_ids.len() as u64), buffer)?; + for operation_id in operations_ids { + self.operation_id_serializer + .serialize(operation_id, buffer)?; } } } } - BlockMessage::ReplyForBlocks(reply_for_blocks) => { - self.length_serializer - .serialize(&(reply_for_blocks.len() as u64), buffer)?; - for (block_id, reply_for_block_info) in reply_for_blocks { - self.block_id_serializer.serialize(block_id, buffer)?; - match reply_for_block_info { - BlockInfoReply::Header(header) => { - self.id_serializer - .serialize(&(BlockInfoType::Header as u64), buffer)?; - self.secure_share_serializer.serialize(header, buffer)?; - } - BlockInfoReply::Info(operations_ids) => { - self.id_serializer - .serialize(&(BlockInfoType::Info as u64), buffer)?; - self.length_serializer - .serialize(&(operations_ids.len() as u64), buffer)?; - for operation_id in operations_ids { - self.operation_id_serializer - .serialize(operation_id, buffer)?; - } - } - BlockInfoReply::Operations(operations) => { - self.id_serializer - .serialize(&(BlockInfoType::Operations as u64), buffer)?; - self.length_serializer - .serialize(&(operations.len() as u64), buffer)?; - for operation in operations { - self.secure_share_serializer.serialize(operation, buffer)?; - } + BlockMessage::DataResponse { + block_id, + block_info, + } => { + self.block_id_serializer.serialize(block_id, buffer)?; + match block_info { + BlockInfoReply::Header(header) => { + self.id_serializer + .serialize(&(BlockInfoType::Header as u64), buffer)?; + self.secure_share_serializer.serialize(header, buffer)?; + } + BlockInfoReply::OperationIds(operations_ids) => { + self.id_serializer + .serialize(&(BlockInfoType::OperationIds as u64), buffer)?; + self.length_serializer + .serialize(&(operations_ids.len() as u64), buffer)?; + for operation_id in operations_ids { + self.operation_id_serializer + .serialize(operation_id, buffer)?; } - BlockInfoReply::NotFound => { - self.id_serializer - .serialize(&(BlockInfoType::NotFound as u64), buffer)?; + } + BlockInfoReply::Operations(operations) => { + self.id_serializer + .serialize(&(BlockInfoType::Operations as u64), buffer)?; + self.length_serializer + .serialize(&(operations.len() as u64), buffer)?; + for operation in operations { + self.secure_share_serializer.serialize(operation, buffer)?; } } + BlockInfoReply::NotFound => { + self.id_serializer + .serialize(&(BlockInfoType::NotFound as u64), buffer)?; + } } } } @@ -193,7 +200,6 @@ impl Serializer for BlockMessageSerializer { pub struct BlockMessageDeserializer { id_deserializer: U64VarIntDeserializer, block_header_deserializer: SecureShareDeserializer, - block_infos_length_deserializer: U64VarIntDeserializer, block_id_deserializer: BlockIdDeserializer, operation_ids_deserializer: OperationIdsDeserializer, operations_deserializer: OperationsDeserializer, @@ -202,7 +208,6 @@ pub struct BlockMessageDeserializer { pub struct BlockMessageDeserializerArgs { pub thread_count: u8, pub endorsement_count: u32, - pub block_infos_length_max: u64, pub max_operations_per_block: u32, pub max_datastore_value_length: u64, pub max_function_name_length: u16, @@ -224,10 +229,6 @@ impl BlockMessageDeserializer { args.max_denunciations_in_block_header, args.last_start_period, )), - block_infos_length_deserializer: U64VarIntDeserializer::new( - Included(0), - Included(args.block_infos_length_max), - ), block_id_deserializer: BlockIdDeserializer::new(), operation_ids_deserializer: OperationIdsDeserializer::new( args.max_operations_per_block, @@ -259,109 +260,95 @@ impl Deserializer for BlockMessageDeserializer { )) })?; match id { - MessageTypeId::BlockHeader => { - context("Failed BlockHeader deserialization", |input| { - self.block_header_deserializer.deserialize(input) - }) - .map(BlockMessage::BlockHeader) - .parse(buffer) - } - MessageTypeId::AskForBlocks => context( - "Failed AskForBlocks deserialization", - length_count( - context("Failed length deserialization", |input| { - self.block_infos_length_deserializer.deserialize(input) + MessageTypeId::Header => context("Failed BlockHeader deserialization", |input| { + self.block_header_deserializer.deserialize(input) + }) + .map(BlockMessage::Header) + .parse(buffer), + MessageTypeId::DataRequest => context( + "Failed BlockDataRequest deserialization", + tuple(( + context("Failed BlockId deserialization", |input| { + self.block_id_deserializer + .deserialize(input) + .map(|(rest, id)| (rest, id)) + }), + context("Failed infos deserialization", |input| { + let (rest, raw_id) = self.id_deserializer.deserialize(input)?; + let info_type: BlockInfoType = raw_id.try_into().map_err(|_| { + nom::Err::Error(ParseError::from_error_kind( + buffer, + nom::error::ErrorKind::Digit, + )) + })?; + match info_type { + BlockInfoType::Header => Ok((rest, AskForBlockInfo::Header)), + BlockInfoType::OperationIds => { + Ok((rest, AskForBlockInfo::OperationIds)) + } + BlockInfoType::Operations => self + .operation_ids_deserializer + .deserialize(rest) + .map(|(rest, operation_ids)| { + (rest, AskForBlockInfo::Operations(operation_ids)) + }), + BlockInfoType::NotFound => { + Err(nom::Err::Error(ParseError::from_error_kind( + buffer, + nom::error::ErrorKind::Digit, + ))) + } + } }), - context( - "Failed Block infos deserialization", - tuple(( - context("Failed BlockId deserialization", |input| { - self.block_id_deserializer.deserialize(input) - }), - context("Failed infos deserialization", |input| { - let (rest, raw_id) = self.id_deserializer.deserialize(input)?; - let info_type: BlockInfoType = - raw_id.try_into().map_err(|_| { - nom::Err::Error(ParseError::from_error_kind( - buffer, - nom::error::ErrorKind::Digit, - )) - })?; - match info_type { - BlockInfoType::Header => { - Ok((rest, AskForBlocksInfo::Header)) - } - BlockInfoType::Info => Ok((rest, AskForBlocksInfo::Info)), - BlockInfoType::Operations => self - .operation_ids_deserializer - .deserialize(rest) - .map(|(rest, operation_ids)| { - (rest, AskForBlocksInfo::Operations(operation_ids)) - }), - BlockInfoType::NotFound => { - Err(nom::Err::Error(ParseError::from_error_kind( - buffer, - nom::error::ErrorKind::Digit, - ))) - } - } - }), - )), - ), - ), + )), ) - .map(BlockMessage::AskForBlocks) + .map(|(block_id, block_info)| BlockMessage::DataRequest { + block_id, + block_info, + }) .parse(buffer), - MessageTypeId::ReplyForBlocks => context( - "Failed ReplyForBlocks deserialization", - length_count( - context("Failed length deserialization", |input| { - self.block_infos_length_deserializer.deserialize(input) + MessageTypeId::DataResponse => context( + "Failed BlockDataResponse deserialization", + tuple(( + context("Failed BlockId deserialization", |input| { + self.block_id_deserializer + .deserialize(input) + .map(|(rest, id)| (rest, id)) + }), + context("Failed infos deserialization", |input| { + let (rest, raw_id) = self.id_deserializer.deserialize(input)?; + let info_type: BlockInfoType = raw_id.try_into().map_err(|_| { + nom::Err::Error(ParseError::from_error_kind( + buffer, + nom::error::ErrorKind::Digit, + )) + })?; + match info_type { + BlockInfoType::Header => self + .block_header_deserializer + .deserialize(rest) + .map(|(rest, header)| (rest, BlockInfoReply::Header(header))), + BlockInfoType::OperationIds => self + .operation_ids_deserializer + .deserialize(rest) + .map(|(rest, operation_ids)| { + (rest, BlockInfoReply::OperationIds(operation_ids)) + }), + BlockInfoType::Operations => self + .operations_deserializer + .deserialize(rest) + .map(|(rest, operations)| { + (rest, BlockInfoReply::Operations(operations)) + }), + BlockInfoType::NotFound => Ok((rest, BlockInfoReply::NotFound)), + } }), - context( - "Failed block infos deserialization", - tuple(( - context("Failed BlockId deserialization", |input| { - self.block_id_deserializer.deserialize(input) - }), - context("Failed infos deserialization", |input| { - let (rest, raw_id) = self.id_deserializer.deserialize(input)?; - let info_type: BlockInfoType = - raw_id.try_into().map_err(|_| { - nom::Err::Error(ParseError::from_error_kind( - buffer, - nom::error::ErrorKind::Digit, - )) - })?; - match info_type { - BlockInfoType::Header => self - .block_header_deserializer - .deserialize(rest) - .map(|(rest, header)| { - (rest, BlockInfoReply::Header(header)) - }), - BlockInfoType::Info => self - .operation_ids_deserializer - .deserialize(rest) - .map(|(rest, operation_ids)| { - (rest, BlockInfoReply::Info(operation_ids)) - }), - BlockInfoType::Operations => self - .operations_deserializer - .deserialize(rest) - .map(|(rest, operations)| { - (rest, BlockInfoReply::Operations(operations)) - }), - BlockInfoType::NotFound => { - Ok((rest, BlockInfoReply::NotFound)) - } - } - }), - )), - ), - ), + )), ) - .map(BlockMessage::ReplyForBlocks) + .map(|(block_id, block_info)| BlockMessage::DataResponse { + block_id, + block_info, + }) .parse(buffer), } }) diff --git a/massa-protocol-worker/src/handlers/block_handler/mod.rs b/massa-protocol-worker/src/handlers/block_handler/mod.rs index 98bda24d6c5..9e5caf5f10a 100644 --- a/massa-protocol-worker/src/handlers/block_handler/mod.rs +++ b/massa-protocol-worker/src/handlers/block_handler/mod.rs @@ -28,7 +28,7 @@ pub(crate) use messages::{BlockMessage, BlockMessageSerializer}; #[cfg(feature = "testing")] pub use messages::{ - AskForBlocksInfo, BlockInfoReply, BlockMessageDeserializer, BlockMessageDeserializerArgs, + AskForBlockInfo, BlockInfoReply, BlockMessageDeserializer, BlockMessageDeserializerArgs, }; use super::{ @@ -95,7 +95,6 @@ impl BlockHandler { peer_cmd_sender, config, cache, - storage, ); Self { block_retrieval_thread: Some((sender_ext, block_retrieval_thread)), diff --git a/massa-protocol-worker/src/handlers/block_handler/propagation.rs b/massa-protocol-worker/src/handlers/block_handler/propagation.rs index f2cf68c88cc..ec6ac74b7cf 100644 --- a/massa-protocol-worker/src/handlers/block_handler/propagation.rs +++ b/massa-protocol-worker/src/handlers/block_handler/propagation.rs @@ -1,123 +1,131 @@ -use std::{collections::VecDeque, thread::JoinHandle}; +//! Copyright (c) 2023 MASSA LABS -use massa_channel::{receiver::MassaReceiver, sender::MassaSender}; -use massa_logging::massa_trace; -use massa_models::{block_id::BlockId, prehash::PreHashSet}; -use massa_protocol_exports::PeerId; -use massa_protocol_exports::{ProtocolConfig, ProtocolError}; -use massa_storage::Storage; -use tracing::{debug, info, warn}; +//! This file deals with the announcement of block headers to other nodes +//! in order to propagate the blocks from our node to other nodes. +//! It also manages peer banning for invalid blocks detected by consensus. +//! +//! The block propagation system works in the following way: +//! * a node announces the headers of blocks to its neighbor nodes +//! * the neighbor nodes that need that block then ask our Retrieval process for it +//! +//! Here we need to announce block headers to other nodes that haven't sene them, +//! and keep the blocks alive long enough for our peers to be able to retrieve them from us. +use super::{ + cache::SharedBlockCache, commands_propagation::BlockHandlerPropagationCommand, + BlockMessageSerializer, +}; use crate::{ handlers::{block_handler::BlockMessage, peer_handler::models::PeerManagementCmd}, messages::MessagesSerializer, wrap_network::ActiveConnectionsTrait, }; +use crossbeam::channel::RecvTimeoutError; +use massa_channel::{receiver::MassaReceiver, sender::MassaSender}; +use massa_logging::massa_trace; +use massa_models::block_header::SecuredHeader; +use massa_models::block_id::BlockId; +use massa_protocol_exports::PeerId; +use massa_protocol_exports::{ProtocolConfig, ProtocolError}; +use massa_storage::Storage; +use schnellru::{ByLength, LruMap}; +use std::thread::JoinHandle; +use std::time::Instant; +use tracing::{info, warn}; -use super::{ - cache::SharedBlockCache, commands_propagation::BlockHandlerPropagationCommand, - BlockMessageSerializer, -}; +#[derive(Debug)] +struct BlockPropagationData { + /// Time when propagation was initiated + pub time_added: Instant, + /// Storage holding the block and its dependencies during its propagation time + pub _storage: Storage, + /// Clone of the block header to avoid locking storage during propagation + pub header: SecuredHeader, +} pub struct PropagationThread { + /// Receiver for commands receiver: MassaReceiver, + /// Protocol config config: ProtocolConfig, + /// Shared access to the block cache cache: SharedBlockCache, - storage: Storage, - saved_blocks: VecDeque, + /// Blocks stored for propagation + stored_for_propagation: LruMap, + /// Shared access to the list of peers connected to us active_connections: Box, + /// Channel to send commands to the peer management system (for banning peers) peer_cmd_sender: MassaSender, + /// Serializer for block-related messages block_serializer: MessagesSerializer, } impl PropagationThread { fn run(&mut self) { + let tick_interval = self.config.block_propagation_tick.to_duration(); + let mut deadline = Instant::now() + .checked_add(tick_interval) + .expect("could not get time of next propagation tick"); loop { - match self.receiver.recv() { + match self.receiver.recv_deadline(deadline) { Ok(command) => { match command { + // Message: the block was integrated and should be propagated BlockHandlerPropagationCommand::IntegratedBlock { block_id, storage } => { massa_trace!( "protocol.protocol_worker.process_command.integrated_block.begin", { "block_id": block_id } ); - let header = { - let block = { - let blocks = storage.read_blocks(); - blocks.get(&block_id).cloned() - }; - if let Some(block) = block { - self.storage.store_block(block.clone()); - self.saved_blocks.push_back(block.id); - if self.saved_blocks.len() - > self.config.max_known_blocks_saved_size - { - let block_id = self.saved_blocks.pop_front().unwrap(); - let mut ids_to_delete = PreHashSet::default(); - ids_to_delete.insert(block_id); - self.storage.drop_block_refs(&ids_to_delete); - } - block.content.header.clone() - } else { - warn!("Block {} not found in storage", &block_id); + + // get the block header + let header = match storage + .read_blocks() + .get(&block_id) + .map(|block| block.content.header.clone()) + { + Some(h) => h, + None => { + warn!( + "claimed block {} absent from storage on propagation", + block_id + ); continue; } }; - let peers_connected = self.active_connections.get_peer_ids_connected(); - self.cache.write().update_cache( - peers_connected, - self.config - .max_node_known_blocks_size - .try_into() - .expect("max_node_known_blocks_size is too big"), + + // Add the block and its dependencies to the propagation LRU + // to ensure they are stored for the time of the propagation. + self.stored_for_propagation.insert( + block_id, + BlockPropagationData { + time_added: Instant::now(), + _storage: storage, + header, + }, ); - { - let cache_read = self.cache.read(); - for (peer_id, (blocks_known, _)) in - cache_read.blocks_known_by_peer.iter() - { - // peer that isn't asking for that block - let cond = blocks_known.peek(&block_id); - // if we don't know if that peer knows that hash or if we know it doesn't - if !cond.map_or_else(|| false, |v| v.0) { - massa_trace!("protocol.protocol_worker.process_command.integrated_block.send_header", { "peer_id": peer_id, "block_id": block_id}); - debug!( - "Send block header for slot {} to peer {}", - peer_id, header.content.slot - ); - if let Err(err) = self.active_connections.send_to_peer( - peer_id, - &self.block_serializer, - BlockMessage::BlockHeader(header.clone()).into(), - true, - ) { - warn!("Error while sending block header to peer {} err: {:?}", peer_id, err); - } - } else { - massa_trace!("protocol.protocol_worker.process_command.integrated_block.do_not_send", { "peer_id": peer_id, "block_id": block_id }); - } - } - } + + // propagate everything that needs to be propagated + self.perform_propagations(); + + // renew tick because propagation propagations were updated + deadline = Instant::now() + .checked_add(tick_interval) + .expect("could not get time of next propagation tick"); } BlockHandlerPropagationCommand::AttackBlockDetected(block_id) => { - let to_ban: Vec = self + let peers_to_ban: Vec = self .cache .read() .blocks_known_by_peer .iter() - .filter_map(|(id, (block_known, _))| { - match block_known.peek(&block_id) { - Some((true, _)) => Some(id.clone()), + .filter_map(|(peer_id, knowledge)| { + match knowledge.peek(&block_id) { + Some((true, _)) => Some(peer_id.clone()), _ => None, } }) .collect(); - for id in to_ban.iter() { - massa_trace!("protocol.protocol_worker.process_command.attack_block_detected.ban_node", { "node": id, "block_id": block_id }); - if let Err(err) = self.ban_node(id) { - warn!("Error while banning peer {} err: {:?}", id, err); - } - } + self.ban_peers(&peers_to_ban); } BlockHandlerPropagationCommand::Stop => { info!("Stop block propagation thread"); @@ -125,7 +133,15 @@ impl PropagationThread { } } } - Err(_) => { + Err(RecvTimeoutError::Timeout) => { + // Propagation tick. This is useful to quickly propagate headers to newly connected nodes. + self.perform_propagations(); + // renew deadline of next tick + deadline = Instant::now() + .checked_add(tick_interval) + .expect("could not get time of next propagation tick"); + } + Err(RecvTimeoutError::Disconnected) => { info!("Stop block propagation thread"); return; } @@ -133,12 +149,70 @@ impl PropagationThread { } } - /// send a ban peer command to the peer handler - fn ban_node(&mut self, peer_id: &PeerId) -> Result<(), ProtocolError> { - massa_trace!("ban node from retrieval thread", { "peer_id": peer_id.to_string() }); - self.peer_cmd_sender - .try_send(PeerManagementCmd::Ban(vec![peer_id.clone()])) + /// Propagate blocks to peers that need them + fn perform_propagations(&mut self) { + let now = Instant::now(); + + // stop propagating blocks that have been propagating for too long + while let Some(time_added) = self + .stored_for_propagation + .peek_oldest() + .map(|(_, BlockPropagationData { time_added, .. })| *time_added) + { + if now.saturating_duration_since(time_added) + > self.config.max_block_propagation_time.to_duration() + { + self.stored_for_propagation.pop_oldest(); + } else { + break; + } + } + + // update caches based on currently connected peers + let peers_connected = self.active_connections.get_peer_ids_connected(); + let mut cache_lock = self.cache.write(); + cache_lock.update_cache(&peers_connected); + 'peer_loop: for (peer_id, known_by_peer) in cache_lock.blocks_known_by_peer.iter_mut() { + for (block_id, BlockPropagationData { header, .. }) in + self.stored_for_propagation.iter() + { + // if the peer already knows about the block, do not propagate it + if let Some((true, _)) = known_by_peer.peek(block_id) { + continue; + } + + // try to propagate + match self.active_connections.send_to_peer( + peer_id, + &self.block_serializer, + BlockMessage::Header(header.clone()).into(), + true, + ) { + Ok(()) => { + // mark the block as known by the peer + known_by_peer.insert(*block_id, (true, now)); + } + Err(err) => { + warn!( + "Error while sending block header to peer {} err: {:?}", + peer_id, err + ); + continue 'peer_loop; // try next peer + } + } + } + } + } + + /// try to ban a list of peers + fn ban_peers(&mut self, peer_ids: &[PeerId]) { + if let Err(err) = self + .peer_cmd_sender + .try_send(PeerManagementCmd::Ban(peer_ids.to_vec())) .map_err(|err| ProtocolError::SendError(err.to_string())) + { + warn!("could not send Ban command to peer manager: {}", err); + } } } @@ -148,7 +222,6 @@ pub fn start_propagation_thread( peer_cmd_sender: MassaSender, config: ProtocolConfig, cache: SharedBlockCache, - storage: Storage, ) -> JoinHandle<()> { std::thread::Builder::new() .name("protocol-block-handler-propagation".to_string()) @@ -156,14 +229,18 @@ pub fn start_propagation_thread( let block_serializer = MessagesSerializer::new() .with_block_message_serializer(BlockMessageSerializer::new()); let mut propagation_thread = PropagationThread { + stored_for_propagation: LruMap::new(ByLength::new( + config + .max_blocks_kept_for_propagation + .try_into() + .expect("max_blocks_kept_for_propagation does not fit in u32"), + )), receiver, config, cache, peer_cmd_sender, active_connections, block_serializer, - storage, - saved_blocks: VecDeque::default(), }; propagation_thread.run(); }) diff --git a/massa-protocol-worker/src/handlers/block_handler/retrieval.rs b/massa-protocol-worker/src/handlers/block_handler/retrieval.rs index bd89b90eaaf..6b678d769d2 100644 --- a/massa-protocol-worker/src/handlers/block_handler/retrieval.rs +++ b/massa-protocol-worker/src/handlers/block_handler/retrieval.rs @@ -1,5 +1,5 @@ use std::{ - collections::{hash_map::Entry, HashMap, HashSet}, + collections::{HashMap, HashSet}, thread::JoinHandle, time::Instant, }; @@ -9,14 +9,14 @@ use crate::{ endorsement_handler::{ cache::SharedEndorsementCache, commands_propagation::EndorsementHandlerPropagationCommand, + note_endorsements_from_peer, }, operation_handler::{ cache::SharedOperationCache, commands_propagation::OperationHandlerPropagationCommand, }, peer_handler::models::{PeerManagementCmd, PeerMessageTuple}, }, - messages::MessagesSerializer, - sig_verifier::verify_sigs_batch, + messages::{Message, MessagesSerializer}, wrap_network::ActiveConnectionsTrait, }; use crossbeam::{ @@ -25,18 +25,18 @@ use crossbeam::{ }; use massa_channel::{receiver::MassaReceiver, sender::MassaSender}; use massa_consensus_exports::ConsensusController; -use massa_hash::Hash; use massa_logging::massa_trace; use massa_metrics::MassaMetrics; use massa_models::{ block::{Block, BlockSerializer}, block_header::SecuredHeader, block_id::BlockId, - endorsement::SecureShareEndorsement, - operation::{OperationId, OperationIdSerializer, SecureShareOperation}, - prehash::{CapacityAllocator, PreHashMap, PreHashSet}, - secure_share::{Id, SecureShare}, - slot::Slot, + endorsement::EndorsementId, + operation::{ + compute_operations_hash, OperationId, OperationIdSerializer, SecureShareOperation, + }, + prehash::{PreHashMap, PreHashSet}, + secure_share::SecureShare, timeslots::get_block_slot_timestamp, }; use massa_pool_exports::PoolController; @@ -45,24 +45,24 @@ use massa_protocol_exports::PeerId; use massa_protocol_exports::{ProtocolConfig, ProtocolError}; use massa_serialization::{DeserializeError, Deserializer, Serializer}; use massa_storage::Storage; -use massa_time::{MassaTime, TimeError}; +use massa_time::TimeError; use massa_versioning::versioning::MipStore; -use schnellru::{ByLength, LruMap}; +use rand::thread_rng; +use rand::{seq::SliceRandom, Rng}; use tracing::{debug, info, warn}; use super::{ + super::operation_handler::note_operations_from_peer, cache::SharedBlockCache, commands_propagation::BlockHandlerPropagationCommand, commands_retrieval::BlockHandlerRetrievalCommand, messages::{ - AskForBlocksInfo, BlockInfoReply, BlockMessage, BlockMessageDeserializer, + AskForBlockInfo, BlockInfoReply, BlockMessage, BlockMessageDeserializer, BlockMessageDeserializerArgs, }, BlockMessageSerializer, }; -static BLOCK_HEADER: &str = "protocol.protocol_worker.on_network_event.received_block_header"; - /// Info about a block we've seen #[derive(Debug, Clone)] pub(crate) struct BlockInfo { @@ -73,8 +73,6 @@ pub(crate) struct BlockInfo { /// Operations and endorsements contained in the block, /// if we've received them already, and none otherwise. pub(crate) storage: Storage, - /// Full operations size in bytes - pub(crate) operations_size: usize, } impl BlockInfo { @@ -83,7 +81,6 @@ impl BlockInfo { header, operation_ids: None, storage, - operations_size: 0, } } } @@ -94,7 +91,7 @@ pub struct RetrievalThread { consensus_controller: Box, pool_controller: Box, receiver_network: MassaReceiver, - _internal_sender: MassaSender, + _announcement_sender: MassaSender, receiver: MassaReceiver, block_message_serializer: MessagesSerializer, block_wishlist: PreHashMap, @@ -110,6 +107,7 @@ pub struct RetrievalThread { storage: Storage, mip_store: MipStore, massa_metrics: MassaMetrics, + operation_id_serializer: OperationIdSerializer, } impl RetrievalThread { @@ -118,7 +116,6 @@ impl RetrievalThread { BlockMessageDeserializer::new(BlockMessageDeserializerArgs { thread_count: self.config.thread_count, endorsement_count: self.config.endorsement_count, - block_infos_length_max: self.config.max_size_block_infos, max_operations_per_block: self.config.max_operations_per_block, max_datastore_value_length: self.config.max_size_value_datastore, max_function_name_length: self.config.max_size_function_name, @@ -150,44 +147,16 @@ impl RetrievalThread { return; } match message { - BlockMessage::AskForBlocks(block_infos) => { - if let Err(err) = self.on_asked_for_blocks_received(peer_id.clone(), block_infos) { - warn!("Error in on_asked_for_blocks_received: {:?}", err); - } + BlockMessage::DataRequest{block_id, block_info} => { + self.on_ask_for_block_info_received(peer_id.clone(), block_id, block_info); } - BlockMessage::ReplyForBlocks(block_infos) => { - for (block_id, block_info) in block_infos.into_iter() { - if let Err(err) = self.on_block_info_received(peer_id.clone(), block_id, block_info) { - warn!("Error in on_block_info_received: {:?}", err); - } - } - if let Err(err) = self.update_ask_block() { - warn!("Error in update_ask_blocks: {:?}", err); - } + BlockMessage::DataResponse{block_id, block_info} => { + self.on_block_info_received(peer_id.clone(), block_id, block_info); + self.update_block_retrieval(); } - BlockMessage::BlockHeader(header) => { - massa_trace!(BLOCK_HEADER, { "peer_id": peer_id, "header": header}); - if let Ok(Some((block_id, is_new))) = - self.note_header_from_peer(&header, &peer_id) - { - if is_new { - self.consensus_controller - .register_block_header(block_id, header); - } - if let Err(err) = self.update_ask_block() { - warn!("Error in update_ask_blocks: {:?}", err); - } - } else { - warn!( - "peer {} sent us critically incorrect header, \ - which may be an attack attempt by the remote peer \ - or a loss of sync between us and the remote peer", - peer_id, - ); - if let Err(err) = self.ban_node(&peer_id) { - warn!("Error while banning peer {} err: {:?}", peer_id, err); - } - } + BlockMessage::Header(header) => { + self.on_block_header_received(peer_id.clone(), header); + self.update_block_retrieval(); } } }, @@ -210,24 +179,19 @@ impl RetrievalThread { BlockInfo::new(header, self.storage.clone_without_refs()), ); } - // Remove the knowledge that we asked this block to nodes. - self.remove_asked_blocks_of_node(&remove); + // Cleanup the knowledge that we asked this list of blocks to nodes. + self.remove_asked_blocks(&remove); // Remove from the wishlist. for block_id in remove.iter() { self.block_wishlist.remove(block_id); } - if let Err(err) = self.update_ask_block() { - warn!("Error in update_ask_blocks: {:?}", err); - } - massa_trace!( - "protocol.protocol_worker.process_command.wishlist_delta.end", - {} - ); + + // update block asking process + self.update_block_retrieval(); }, BlockHandlerRetrievalCommand::Stop => { - debug!("Received block message: command Stop"); - info!("Stop block retrieval thread from command receiver"); + info!("Stop block retrieval thread from command receiver (Stop)"); return; } } @@ -259,189 +223,224 @@ impl RetrievalThread { } } recv(at(self.next_timer_ask_block)) -> _ => { - if let Err(err) = self.update_ask_block() { - warn!("Error in ask_blocks: {:?}", err); - } + self.update_block_retrieval(); } } } } - /// Network ask the local node for blocks - /// - /// React on another node asking for blocks information. We can forward the operation ids if - /// the foreign node asked for `AskForBlocksInfo::Info` or the full operations if he asked for - /// the missing operations in his storage with `AskForBlocksInfo::Operations` + /// A remote node asked the local node for block data /// - /// Forward the reply to the network. - fn on_asked_for_blocks_received( + /// We send the block's operation ids if the foreign node asked for `AskForBlockInfo::Info` + /// or a subset of the full operations of the block if it asked for `AskForBlockInfo::Operations`. + fn on_ask_for_block_info_received( &mut self, from_peer_id: PeerId, - list: Vec<(BlockId, AskForBlocksInfo)>, - ) -> Result<(), ProtocolError> { - let mut all_blocks_info = vec![]; - for (hash, info_wanted) in &list { - let (header, operations_ids) = match self.storage.read_blocks().get(hash) { - Some(signed_block) => ( - signed_block.content.header.clone(), - signed_block.content.operations.clone(), - ), - None => { - // let the node know we don't have the block. - all_blocks_info.push((*hash, BlockInfoReply::NotFound)); - continue; - } - }; - let block_info = match info_wanted { - AskForBlocksInfo::Header => BlockInfoReply::Header(header), - AskForBlocksInfo::Info => BlockInfoReply::Info(operations_ids), - AskForBlocksInfo::Operations(op_ids) => { - // Mark the node as having the block. - { - let mut cache_write = self.cache.write(); - cache_write.insert_blocks_known( - &from_peer_id, - &[*hash], - true, - Instant::now(), - ); - } - // Send only the missing operations that are in storage. - let needed_ops = { - let operations = self.storage.read_operations(); - operations_ids - .into_iter() - .filter_map(|id| { - if op_ids.contains(&id) { - operations.get(&id) - } else { - None - } - }) - .cloned() - .collect() - }; - BlockInfoReply::Operations(needed_ops) + block_id: BlockId, + info_requested: AskForBlockInfo, + ) { + // updates on the remote peer's knowledge on blocks, operations and endorsements + // only applied if the response is successfully sent to the peer + let mut block_knowledge_updates = PreHashSet::default(); + let mut operation_knowledge_updates = PreHashSet::default(); + let mut endorsement_knowledge_updates = PreHashSet::default(); + + // retrieve block data from storage + let stored_header_op_ids = self.storage.read_blocks().get(&block_id).map(|block| { + ( + block.content.header.clone(), + block.content.operations.clone(), + ) + }); + + let block_info_response = match (stored_header_op_ids, info_requested) { + (None, _) => BlockInfoReply::NotFound, + + (Some((header, _)), AskForBlockInfo::Header) => { + // the peer asked for a block header + + // once sent, the peer will know about that block, + // no need to announce this header to that peer anymore + block_knowledge_updates.insert(block_id); + + // once sent, the peer will know about the endorsements in that block, + // no need to announce those endorsements to that peer anymore + endorsement_knowledge_updates.extend( + header + .content + .endorsements + .iter() + .map(|e| e.id) + .collect::>(), + ); + + BlockInfoReply::Header(header) + } + (Some((_, block_op_ids)), AskForBlockInfo::OperationIds) => { + // the peer asked for the operation IDs of the block + + // once sent, the peer will know about those operations, + // no need to announce their IDs to that peer anymore + operation_knowledge_updates.extend(block_op_ids.iter().cloned()); + + BlockInfoReply::OperationIds(block_op_ids) + } + (Some((_, block_op_ids)), AskForBlockInfo::Operations(mut asked_ops)) => { + // the peer asked for a list of full operations from the block + + // retain only ops that belong to the block + { + let block_op_ids_set: PreHashSet = + block_op_ids.iter().copied().collect(); + asked_ops.retain(|id| block_op_ids_set.contains(id)); } - }; - all_blocks_info.push((*hash, block_info)); + + // Send the operations that are available in storage + let returned_ops: Vec<_> = { + let op_storage_lock = self.storage.read_operations(); + asked_ops + .into_iter() + .filter_map(|id| op_storage_lock.get(&id)) + .cloned() + .collect() + }; + + // mark the peer as knowing about those operations, + // no need to announce their IDs to them anymore + operation_knowledge_updates.extend( + returned_ops + .iter() + .map(|op| op.id) + .collect::>(), + ); + + BlockInfoReply::Operations(returned_ops) + } + }; + + debug!("Send reply for block info to {}", from_peer_id); + + // send response to peer + if let Err(err) = self.active_connections.send_to_peer( + &from_peer_id, + &self.block_message_serializer, + BlockMessage::DataResponse { + block_id, + block_info: block_info_response, + } + .into(), + true, + ) { + warn!( + "Error while sending reply for blocks to {}: {:?}", + from_peer_id, err + ); + return; } - debug!( - "Send reply for blocks of len {} to {}", - all_blocks_info.len(), - from_peer_id - ); - for sub_list in all_blocks_info.chunks(self.config.max_size_block_infos as usize) { - if let Err(err) = self.active_connections.send_to_peer( + + // here we know that the response was successfully sent to the peer + // so we can update our vision of the peer's knowledge on blocks, operations and endorsements + if !block_knowledge_updates.is_empty() { + self.cache.write().insert_peer_known_block( &from_peer_id, - &self.block_message_serializer, - BlockMessage::ReplyForBlocks(sub_list.to_vec()).into(), + &block_knowledge_updates.into_iter().collect::>(), true, - ) { - warn!( - "Error while sending reply for blocks to {}: {:?}", - from_peer_id, err + ); + } + if !operation_knowledge_updates.is_empty() { + self.operation_cache.write().insert_peer_known_ops( + &from_peer_id, + &operation_knowledge_updates + .into_iter() + .map(|op_id| op_id.prefix()) + .collect::>(), + ); + } + if !endorsement_knowledge_updates.is_empty() { + self.endorsement_cache + .write() + .insert_peer_known_endorsements( + &from_peer_id, + &endorsement_knowledge_updates + .into_iter() + .collect::>(), ); - } } - Ok(()) } + /// A peer sent us a response to one of our requests for block data fn on_block_info_received( &mut self, from_peer_id: PeerId, block_id: BlockId, - info: BlockInfoReply, - ) -> Result<(), ProtocolError> { - match info { + block_info: BlockInfoReply, + ) { + match block_info { BlockInfoReply::Header(header) => { - // Verify and Send it consensus - self.on_block_header_received(from_peer_id, block_id, header) + // Verify and send it consensus + self.on_block_header_received(from_peer_id, header); } - BlockInfoReply::Info(operation_list) => { + BlockInfoReply::OperationIds(operation_list) => { // Ask for missing operations ids and print a warning if there is no header for // that block. // Ban the node if the operation ids hash doesn't match with the hash contained in // the block_header. - self.on_block_operation_list_received(from_peer_id, block_id, operation_list) + self.on_block_operation_list_received(from_peer_id, block_id, operation_list); } BlockInfoReply::Operations(operations) => { // Send operations to pool, // before performing the below checks, // and wait for them to have been procesed(i.e. added to storage). - self.on_block_full_operations_received(from_peer_id, block_id, operations) + self.on_block_full_operations_received(from_peer_id, block_id, operations); } BlockInfoReply::NotFound => { - { - let mut cache_write = self.cache.write(); - cache_write.insert_blocks_known( - &from_peer_id, - &[block_id], - false, - Instant::now(), - ); - } - Ok(()) + // The peer doesn't know about the block. Mark it as such. + self.cache + .write() + .insert_peer_known_block(&from_peer_id, &[block_id], false); } } } /// On block header received from a node. - /// If the header is new, we propagate it to the consensus. - /// We pass the state of `block_wishlist` to ask for information about the block. - fn on_block_header_received( - &mut self, - from_peer_id: PeerId, - block_id: BlockId, - header: SecuredHeader, - ) -> Result<(), ProtocolError> { - if let Some(info) = self.block_wishlist.get(&block_id) { - if info.header.is_some() { + fn on_block_header_received(&mut self, from_peer_id: PeerId, header: SecuredHeader) { + let block_id = header.id; + + // Check header and update knowledge info + let is_new = match self.note_header_from_peer(&header, &from_peer_id) { + Ok(is_new) => is_new, + Err(err) => { warn!( - "Peer {} sent us header for block id {} but we already received it.", - from_peer_id, block_id + "peer {} sent us critically incorrect header: {}", + &from_peer_id, err ); - if let Some(asked_blocks) = self.asked_blocks.get_mut(&from_peer_id) { - if asked_blocks.contains_key(&block_id) { - asked_blocks.remove(&block_id); - { - let mut cache_write = self.cache.write(); - cache_write.insert_blocks_known( - &from_peer_id, - &[block_id], - false, - Instant::now(), - ); - } - } + if let Err(err) = self.ban_peers(&[from_peer_id.clone()]) { + warn!("Error while banning peer {} err: {:?}", &from_peer_id, err); } - - return Ok(()); - } - } - if let Err(err) = self.note_header_from_peer(&header, &from_peer_id) { - warn!( - "peer {} sent us critically incorrect header through protocol, \ - which may be an attack attempt by the remote node \ - or a loss of sync between us and the remote node. Err = {}", - from_peer_id, err - ); - if let Err(err) = self.ban_node(&from_peer_id) { - warn!("Error while banning peer {} err: {:?}", from_peer_id, err); + return; } - return Ok(()); }; + if let Some(info) = self.block_wishlist.get_mut(&block_id) { - info.header = Some(header); - } + // We are actively trying to get this block - // Update ask block - // Maybe this code is useless as it's been done just above but in a condition that should cover all cases where it's useful - // to do this. But maybe it's still trigger there it need verifications. - let mut set = PreHashSet::::with_capacity(1); - set.insert(block_id); - self.remove_asked_blocks_of_node(&set); - Ok(()) + if info.header.is_none() { + // we were looking for the missing header + + // save the header + info.header = Some(header); + + // Clear the list of peers we asked that header for. + // This is done so that update_block_retrieval can prioritize asking the rest of the block data + // to that same peer that just gave us the header, and not exclude the peer + // because we still believe we are actively asking it for stuff. + self.remove_asked_blocks(&[block_id].into_iter().collect()) + } + } else if is_new { + // if not in wishlist, and if the header is new, we send it to consensus + self.consensus_controller + .register_block_header(block_id, header); + } } /// Check if the incoming header network version is compatible with the current node @@ -450,1010 +449,773 @@ impl RetrievalThread { header: &SecuredHeader, ) -> Result<(), ProtocolError> { let slot = header.content.slot; - let ts = get_block_slot_timestamp( + let timestamp = get_block_slot_timestamp( self.config.thread_count, self.config.t0, self.config.genesis_timestamp, slot, )?; - let current_version = self.mip_store.get_network_version_active_at(ts); + + let current_version = self.mip_store.get_network_version_active_at(timestamp); if header.content.current_version != current_version { - // Received a current version different from current version (given by mip store) - Err(ProtocolError::IncompatibleNetworkVersion { + // Received a block version different from current version given by mip store + return Err(ProtocolError::IncompatibleNetworkVersion { local: current_version, received: header.content.current_version, - }) - } else { - if let Some(announced_version) = header.content.announced_version { - if announced_version <= current_version { - // Received an announced network version that is already known - return Err(ProtocolError::OutdatedAnnouncedNetworkVersion { - local: current_version, - announced_received: announced_version, - }); - } - } + }); + } - Ok(()) + if let Some(announced_version) = header.content.announced_version { + if announced_version <= current_version { + // Received an announced network version that is already known + return Err(ProtocolError::OutdatedAnnouncedNetworkVersion { + local: current_version, + announced_received: announced_version, + }); + } } + + Ok(()) } - /// Perform checks on a header, - /// and if valid update the node's view of the world. + /// Performs validity checks on a block header, + /// and if valid update the node's view of its surrounding peers. /// - /// Returns a boolean representing whether the header is new. + /// Returns a boolean indicating whether the header is new. /// /// Does not ban the source node if the header is invalid. /// /// Checks performed on Header: - /// - Not genesis. - /// - Can compute a `BlockId`. - /// - Valid signature. - /// - Absence of duplicate endorsements. - /// - /// Checks performed on endorsements: - /// - Unique indices. - /// - Slot matches that of the block. - /// - Block matches that of the block. + /// - Not genesis + /// - Compatible version + /// - Can compute a `BlockId` + /// - Valid signature + /// - All endorsement are valid + /// - Endorsements have unique indices + /// - Endorsement slots match that of the block + /// - Endorsed blocks match the same-thread parent of the header pub(crate) fn note_header_from_peer( &mut self, header: &SecuredHeader, from_peer_id: &PeerId, - ) -> Result, ProtocolError> { - // TODO: Check if the error is used here ? + ) -> Result { // refuse genesis blocks if header.content.slot.period == 0 || header.content.parents.is_empty() { - return Ok(None); + return Err(ProtocolError::InvalidBlock("block is genesis".to_string())); } + // Check that our node supports the block version self.check_network_version_compatibility(header)?; - // compute ID let block_id = header.id; - // check if this header was already verified + // check if the header has not been seen before (is_new == true) + let is_new; { let mut cache_write = self.cache.write(); - if let Some(block_header) = cache_write.checked_headers.get(&block_id).cloned() { - cache_write.insert_blocks_known(from_peer_id, &[block_id], true, Instant::now()); - cache_write.insert_blocks_known( + is_new = cache_write.checked_headers.get(&block_id).is_none(); + if !is_new { + // the header was previously verified + + // mark the sender peer as knowing the block and its parents + cache_write.insert_peer_known_block( from_peer_id, - &block_header.content.parents, + &[&[block_id], header.content.parents.as_slice()].concat(), true, - Instant::now(), ); - 'write_cache: { - let mut endorsement_cache_write = self.endorsement_cache.write(); - let Ok(endorsement_ids) = endorsement_cache_write - .endorsements_known_by_peer - .get_or_insert(from_peer_id.clone(), || { - LruMap::new(ByLength::new( - self.config - .max_node_known_endorsements_size - .try_into() - .expect("max_node_known_blocks_size in config must be > 0"), - )) - }) - .ok_or(()) else { - warn!("endorsements known by peer limit reached"); - break 'write_cache; - }; - for endorsement_id in block_header.content.endorsements.iter().map(|e| e.id) { - endorsement_ids.insert(endorsement_id, ()); - } - } - return Ok(Some((block_id, false))); } } - if let Err(err) = - self.note_endorsements_from_peer(header.content.endorsements.clone(), from_peer_id) - { - warn!( - "node {} sent us a header containing critically incorrect endorsements: {}", - from_peer_id, err - ); - return Ok(None); + // if the header was previously verified, update peer knowledge information and return Ok(false) + if !is_new { + // mark the sender peer as knowing the endorsements in the block + { + let endorsement_ids: Vec<_> = + header.content.endorsements.iter().map(|e| e.id).collect(); + self.endorsement_cache + .write() + .insert_peer_known_endorsements(from_peer_id, &endorsement_ids); + } + + // mark the sender peer as knowing the operations of the block (if we know them) + let opt_block_ops: Option> = + self.storage.read_blocks().get(&block_id).map(|b| { + b.content + .operations + .iter() + .map(|op_id| op_id.prefix()) + .collect() + }); + if let Some(block_ops) = opt_block_ops { + self.operation_cache + .write() + .insert_peer_known_ops(from_peer_id, &block_ops); + } + + // return that we already know that header + return Ok(false); + } + + // check endorsements + if let Err(err) = note_endorsements_from_peer( + header.content.endorsements.clone(), + from_peer_id, + &self.endorsement_cache, + self.selector_controller.as_ref(), + &self.storage, + &self.config, + &self.sender_propagation_endorsements, + self.pool_controller.as_mut(), + ) { + return Err(ProtocolError::InvalidBlock(format!( + "invalid endorsements: {}", + err + ))); }; // check header signature if let Err(err) = header.verify_signature() { - massa_trace!("protocol.protocol_worker.check_header.err_signature", { "header": header, "err": format!("{}", err)}); - return Ok(None); + return Err(ProtocolError::InvalidBlock(format!( + "invalid header signature: {}", + err + ))); }; - // check endorsement in header integrity + // check endorsement integrity within the context of the header let mut used_endorsement_indices: HashSet = HashSet::with_capacity(header.content.endorsements.len()); for endorsement in header.content.endorsements.iter() { // check index reuse if !used_endorsement_indices.insert(endorsement.content.index) { - massa_trace!("protocol.protocol_worker.check_header.err_endorsement_index_reused", { "header": header, "endorsement": endorsement}); - return Ok(None); + return Err(ProtocolError::InvalidBlock(format!( + "duplicate endorsement index: {}", + endorsement.content.index + ))); } // check slot if endorsement.content.slot != header.content.slot { - massa_trace!("protocol.protocol_worker.check_header.err_endorsement_invalid_slot", { "header": header, "endorsement": endorsement}); - return Ok(None); + return Err(ProtocolError::InvalidBlock(format!( + "endorsement slot {} does not match header slot: {}", + endorsement.content.slot, header.content.slot + ))); } // check endorsed block if endorsement.content.endorsed_block != header.content.parents[header.content.slot.thread as usize] { - massa_trace!("protocol.protocol_worker.check_header.err_endorsement_invalid_endorsed_block", { "header": header, "endorsement": endorsement}); - return Ok(None); + return Err(ProtocolError::InvalidBlock(format!( + "endorsed block {} does not match header parent: {}", + endorsement.content.endorsed_block, + header.content.parents[header.content.slot.thread as usize] + ))); } } + + // mark the sender peer as knowing the endorsements in the block { - let mut cache_write = self.cache.write(); - cache_write.checked_headers.insert(block_id, header.clone()); - cache_write.insert_blocks_known(from_peer_id, &[block_id], true, Instant::now()); - cache_write.insert_blocks_known( + let endorsement_ids: Vec<_> = + header.content.endorsements.iter().map(|e| e.id).collect(); + self.endorsement_cache + .write() + .insert_peer_known_endorsements(from_peer_id, &endorsement_ids); + } + + { + let mut cache_lock = self.cache.write(); + + // mark the sender peer as knowing the block and its parents + cache_lock.insert_peer_known_block( from_peer_id, - &header.content.parents, + &[&[block_id], header.content.parents.as_slice()].concat(), true, - Instant::now(), ); - 'write_cache: { - let mut endorsement_cache_write = self.endorsement_cache.write(); - let Ok(endorsement_ids) = endorsement_cache_write - .endorsements_known_by_peer - .get_or_insert(from_peer_id.clone(), || { - LruMap::new(ByLength::new( - self.config - .max_node_known_endorsements_size - .try_into() - .expect("max_node_known_blocks_size in config must be > 0"), - )) - }) - .ok_or(()) else { - warn!("endorsements_known_by_peer limit reached"); - break 'write_cache; - }; - for endorsement_id in header.content.endorsements.iter().map(|e| e.id) { - endorsement_ids.insert(endorsement_id, ()); - } - } + + // mark us as knowing the header + cache_lock.checked_headers.insert(block_id, header.clone()); } - massa_trace!("protocol.protocol_worker.note_header_from_node.ok", { "node": from_peer_id, "block_id": block_id, "header": header}); - Ok(Some((block_id, true))) + + Ok(true) } /// send a ban peer command to the peer handler - fn ban_node(&mut self, peer_id: &PeerId) -> Result<(), ProtocolError> { - massa_trace!("ban node from retrieval thread", { "peer_id": peer_id.to_string() }); + fn ban_peers(&mut self, peer_ids: &[PeerId]) -> Result<(), ProtocolError> { self.peer_cmd_sender - .try_send(PeerManagementCmd::Ban(vec![peer_id.clone()])) + .try_send(PeerManagementCmd::Ban(peer_ids.to_vec())) .map_err(|err| ProtocolError::SendError(err.to_string())) } /// Remove the given blocks from the local wishlist - pub(crate) fn remove_asked_blocks_of_node(&mut self, remove_hashes: &PreHashSet) { - massa_trace!("protocol.protocol_worker.remove_asked_blocks_of_node", { - "remove": remove_hashes - }); + pub(crate) fn remove_asked_blocks(&mut self, remove_hashes: &PreHashSet) { for asked_blocks in self.asked_blocks.values_mut() { - asked_blocks.retain(|h, _| !remove_hashes.contains(h)); - } - } - - /// Note endorsements coming from a given node, - /// and propagate them when they were received outside of a header. - /// - /// Caches knowledge of valid ones. - /// - /// Does not ban if the endorsement is invalid - /// - /// Checks performed: - /// - Valid signature. - pub(crate) fn note_endorsements_from_peer( - &mut self, - endorsements: Vec, - from_peer_id: &PeerId, - ) -> Result<(), ProtocolError> { - massa_trace!("protocol.protocol_worker.note_endorsements_from_node", { "node": from_peer_id, "endorsements": endorsements}); - let length = endorsements.len(); - let mut new_endorsements = PreHashMap::with_capacity(length); - let mut endorsement_ids = PreHashSet::with_capacity(length); - for endorsement in endorsements.into_iter() { - let endorsement_id = endorsement.id; - endorsement_ids.insert(endorsement_id); - // check endorsement signature if not already checked - { - let read_cache = self.endorsement_cache.read(); - if read_cache - .checked_endorsements - .peek(&endorsement_id) - .is_none() - { - new_endorsements.insert(endorsement_id, endorsement); - } + for remove_h in remove_hashes { + asked_blocks.remove(remove_h); } } + } - // Batch signature verification - // optimized signature verification - verify_sigs_batch( - &new_endorsements - .values() - .map(|endorsement| { - ( - endorsement.compute_signed_hash(), - endorsement.signature, - endorsement.content_creator_pub_key, - ) - }) - .collect::>(), - )?; - - // Check PoS draws - for endorsement in new_endorsements.values() { - let selection = self - .selector_controller - .get_selection(endorsement.content.slot)?; - let Some(address) = selection.endorsements.get(endorsement.content.index as usize) else { - return Err(ProtocolError::GeneralProtocolError( - format!( - "No selection on slot {} for index {}", - endorsement.content.slot, endorsement.content.index - ) - )) - }; - if address != &endorsement.content_creator_address { - return Err(ProtocolError::GeneralProtocolError(format!( - "Invalid endorsement: expected address {}, got {}", - address, endorsement.content_creator_address - ))); + /// Mark a block as invalid + fn mark_block_as_invalid(&mut self, block_id: &BlockId) { + // stop retrieving the block + if let Some(wishlist_info) = self.block_wishlist.remove(block_id) { + if let Some(header) = wishlist_info.header { + // notify consensus that the block is invalid + self.consensus_controller + .mark_invalid_block(*block_id, header); } } - 'write_cache: { - let mut cache_write = self.endorsement_cache.write(); - // add to verified signature cache - for endorsement_id in endorsement_ids.iter() { - cache_write.checked_endorsements.insert(*endorsement_id, ()); - } - // add to known endorsements for source node. - let Ok(endorsements) = cache_write - .endorsements_known_by_peer - .get_or_insert(from_peer_id.clone(), || { - LruMap::new(ByLength::new( - self.config - .max_node_known_endorsements_size - .try_into() - .expect("max_node_known_endorsements_size in config should be > 0"), - )) - }) - .ok_or(()) else { - warn!("endorsements_known_by_peer limit reached"); - break 'write_cache; - }; - for endorsement_id in endorsement_ids.iter() { - endorsements.insert(*endorsement_id, ()); + // ban all peers that know about this block + let mut peers_to_ban = Vec::new(); + { + let cache_read = self.cache.read(); + for (peer_id, peer_known_blocks) in cache_read.blocks_known_by_peer.iter() { + if peer_known_blocks.peek(block_id).is_some() { + peers_to_ban.push(peer_id.clone()); + } } } - - if !new_endorsements.is_empty() { - let mut endorsements = self.storage.clone_without_refs(); - endorsements.store_endorsements(new_endorsements.into_values().collect()); - - // Propagate endorsements - // Propagate endorsements when the slot of the block they endorse isn't `max_endorsements_propagation_time` old. - let mut endorsements_to_propagate = endorsements.clone(); - let endorsements_to_not_propagate = { - let now = MassaTime::now()?; - let read_endorsements = endorsements_to_propagate.read_endorsements(); - endorsements_to_propagate - .get_endorsement_refs() - .iter() - .filter_map(|endorsement_id| { - let slot_endorsed_block = - read_endorsements.get(endorsement_id).unwrap().content.slot; - let slot_timestamp = get_block_slot_timestamp( - self.config.thread_count, - self.config.t0, - self.config.genesis_timestamp, - slot_endorsed_block, - ); - match slot_timestamp { - Ok(slot_timestamp) => { - if slot_timestamp - .saturating_add(self.config.max_endorsements_propagation_time) - < now - { - Some(*endorsement_id) - } else { - None - } - } - Err(_) => Some(*endorsement_id), - } - }) - .collect() - }; - endorsements_to_propagate.drop_endorsement_refs(&endorsements_to_not_propagate); - if let Err(err) = self.sender_propagation_endorsements.try_send( - EndorsementHandlerPropagationCommand::PropagateEndorsements( - endorsements_to_propagate, - ), - ) { - warn!("Failed to send from block retrieval thread of endorsement handler to propagation: {:?}", err); + if !peers_to_ban.is_empty() { + if let Err(err) = self.ban_peers(&peers_to_ban) { + warn!( + "Error while banning peers {:?} err: {:?}", + peers_to_ban, err + ); } - // Add to pool - self.pool_controller.add_endorsements(endorsements); } - Ok(()) + // clear retrieval cache + self.remove_asked_blocks(&[*block_id].into_iter().collect()); } - /// On block information received, manage when we get a list of operations. - /// Ask for the missing operations that are not in the `checked_operations` cache variable. - /// - /// # Ban - /// Start compute the operations serialized total size with the operation we know. - /// Ban the node if the operations contained in the block overflow the max size. We don't - /// forward the block to the consensus in that case. + /// We received a list of operations for a block. /// /// # Parameters: /// - `from_peer_id`: Node which sent us the information. /// - `BlockId`: ID of the related operations we received. - /// - `operation_ids`: IDs of the operations contained by the block. - /// - /// # Result - /// return an error if stopping asking block failed. The error should be forwarded at the - /// root. todo: check if if make panic. + /// - `operation_ids`: IDs of the operations contained by the block, ordered and can contain duplicates. fn on_block_operation_list_received( &mut self, from_peer_id: PeerId, block_id: BlockId, operation_ids: Vec, - ) -> Result<(), ProtocolError> { - // All operation ids sent into a set + ) { + // Note that the length of the operation list was checked at deserialization to not overflow the max per block. + + // All operation ids sent into a set to deduplicate and search quickly for presence let operation_ids_set: PreHashSet = operation_ids.iter().cloned().collect(); - // add to known ops - { - let mut cache_write = self.operation_cache.write(); - let known_ops = cache_write - .ops_known_by_peer - .entry(from_peer_id.clone()) - .or_insert_with(|| { - LruMap::new(ByLength::new( - self.config - .max_node_known_ops_size - .try_into() - .expect("max_node_known_ops_size in config must fit in u32"), - )) - }); - for op_id in operation_ids_set.iter() { - known_ops.insert(op_id.prefix(), ()); - } - } - let info = if let Some(info) = self.block_wishlist.get_mut(&block_id) { + // mark the sender node as knowing those ops + self.operation_cache.write().insert_peer_known_ops( + &from_peer_id, + &operation_ids_set + .iter() + .map(|op_id| op_id.prefix()) + .collect::>(), + ); + + // check if we were looking to retrieve the list of ops for that block + let wishlist_info = if let Some(info) = self.block_wishlist.get_mut(&block_id) && info.header.is_some() && info.operation_ids.is_none() { + // we were actively looking for this data info } else { - warn!( - "Peer {} sent us an operation list but we don't have block id {} in our wishlist.", - from_peer_id, block_id + // we were not actively looking for that data, but mark the remote node as knowing the block + debug!("peer {} sent us a list of operation IDs for block id {} but we were not looking for it", from_peer_id, block_id); + self.cache.write().insert_peer_known_block( + &from_peer_id, + &[block_id], + true ); - if let Some(asked_blocks) = self.asked_blocks.get_mut(&from_peer_id) && asked_blocks.contains_key(&block_id) { - asked_blocks.remove(&block_id); - { - let mut cache_write = self.cache.write(); - cache_write.insert_blocks_known(&from_peer_id, &[block_id], false, Instant::now()); - } - } - return Ok(()); + return; }; - let header = if let Some(header) = &info.header { - header - } else { - warn!("Peer {} sent us an operation list but we don't have receive the header of block id {} yet.", from_peer_id, block_id); - if let Some(asked_blocks) = self.asked_blocks.get_mut(&from_peer_id) && asked_blocks.contains_key(&block_id) { - asked_blocks.remove(&block_id); - { - let mut cache_write = self.cache.write(); - cache_write.insert_blocks_known(&from_peer_id, &[block_id], false, Instant::now()); - } - } - return Ok(()); - }; - if info.operation_ids.is_some() { - warn!( - "Peer {} sent us an operation list for block id {} but we already received it.", - from_peer_id, block_id - ); - if let Some(asked_blocks) = self.asked_blocks.get_mut(&from_peer_id) && asked_blocks.contains_key(&block_id) { - asked_blocks.remove(&block_id); - { - let mut cache_write = self.cache.write(); - cache_write.insert_blocks_known(&from_peer_id, &[block_id], false, Instant::now()); - } - } - return Ok(()); - } - let mut total_hash: Vec = Vec::new(); - let op_id_serializer = OperationIdSerializer::new(); - for op_id in operation_ids.iter() { - op_id_serializer - .serialize(op_id, &mut total_hash) - .map_err(|err| ProtocolError::GeneralProtocolError(err.to_string()))?; - } - - // Check operation_list against expected operations hash from header. - if header.content.operation_merkle_root == Hash::compute_from(&total_hash) { - if operation_ids.len() > self.config.max_operations_per_block as usize { - warn!("Peer id {} sent us an operations list for block id {} that contains more operations than the max allowed for a block.", from_peer_id, block_id); - if let Err(err) = self.ban_node(&from_peer_id) { - warn!("Error while banning peer {} err: {:?}", from_peer_id, err); - } - return Ok(()); - } - - // Add the ops of info. - info.operation_ids = Some(operation_ids.clone()); - let known_operations = info.storage.claim_operation_refs(&operation_ids_set); - // get the total size of known ops - info.operations_size = - Self::get_total_operations_size(&self.storage, &known_operations); - - // mark ops as checked - { - let mut cache_ops_write = self.operation_cache.write(); - for operation_id in known_operations.iter() { - cache_ops_write.insert_checked_operation(*operation_id); - } + // check that the hash of the received operations list matches the one in the header + let computed_operations_hash = + compute_operations_hash(&operation_ids, &self.operation_id_serializer); + + if wishlist_info + .header + .as_ref() + .expect("header presence in wishlist should have been checked above") + .content + .operation_merkle_root + != computed_operations_hash + { + warn!("Peer id {} sent us a operation list for block id {} but the hash in the header doesn't match.", from_peer_id, block_id); + if let Err(err) = self.ban_peers(&[from_peer_id.clone()]) { + warn!("Error while banning peer {} err: {:?}", from_peer_id, err); } + return; + } - if info.operations_size > self.config.max_serialized_operations_size_per_block { - warn!("Peer id {} sent us a operation list for block id {} but the operations we already have in our records exceed max size.", from_peer_id, block_id); - if let Err(err) = self.ban_node(&from_peer_id) { - warn!("Error while banning peer {} err: {:?}", from_peer_id, err); - } - return Ok(()); - } + // Mark the sender as knowing this block + self.cache + .write() + .insert_peer_known_block(&from_peer_id, &[block_id], true); - // Update ask block - let mut set = PreHashSet::::with_capacity(1); - set.insert(block_id); - self.remove_asked_blocks_of_node(&set); + // Save the received operation ID list to the wishlist + wishlist_info.operation_ids = Some(operation_ids); - // If the block is empty, go straight to processing the full block info. - if operation_ids.is_empty() { - return self.on_block_full_operations_received( - from_peer_id, - block_id, - Default::default(), - ); - } - } else { - warn!("Peer id {} sent us a operation list for block id {} but the hash in header doesn't match.", from_peer_id, block_id); - if let Err(err) = self.ban_node(&from_peer_id) { - warn!("Error while banning peer {} err: {:?}", from_peer_id, err); - } - } - Ok(()) + // free up all the nodes that we asked for that operation list + self.remove_asked_blocks(&[block_id].into_iter().collect()); } - /// Return the sum of all operation's serialized sizes in the `Set` - fn get_total_operations_size( - storage: &Storage, - operation_ids: &PreHashSet, - ) -> usize { - let op_reader = storage.read_operations(); - let mut total: usize = 0; - operation_ids.iter().for_each(|id| { - if let Some(op) = op_reader.get(id) { - total = total.saturating_add(op.serialized_size()); - } - }); - total + + /// Return the sum of all operation's serialized sizes in the id list + fn get_total_operations_size(storage: &Storage, operation_ids: &[OperationId]) -> usize { + let op_read_lock = storage.read_operations(); + operation_ids + .iter() + .filter_map(|id| op_read_lock.get(id)) + .map(|op| op.serialized_size()) + .sum() } - /// Checks full block operations that we asked. (Because their was missing in the - /// `checked_operations` cache variable, refer to `on_block_operation_list_received`) - /// - /// # Ban - /// Ban the node if it doesn't fill the requirement. Forward to the graph with a - /// `ProtocolEvent::ReceivedBlock` if the operations are under a max size. - /// - /// - thread incorrect for an operation - /// - wanted operations doesn't match - /// - duplicated operation - /// - full operations serialized size overflow - /// - /// We received these operation because we asked for the missing operation + /// We received the full operations of a block. fn on_block_full_operations_received( &mut self, from_peer_id: PeerId, block_id: BlockId, - mut operations: Vec, - ) -> Result<(), ProtocolError> { - if let Err(err) = self.note_operations_from_peer(operations.clone(), &from_peer_id) { - warn!( - "Peer id {} sent us operations for block id {} but they failed at verifications. Err = {}", - from_peer_id, block_id, err + operations: Vec, + ) { + // Ensure that we were looking for that data. + let wishlist_info = if let Some(wishlist_info) = self.block_wishlist.get_mut(&block_id) && wishlist_info.header.is_some() && wishlist_info.operation_ids.is_some() { + wishlist_info + } else { + // we were not looking for this data + debug!("Peer id {} sent us full operations for block id {} but we were not looking for it", from_peer_id, block_id); + // still mark the sender as knowing the block and operations + self.cache.write().insert_peer_known_block( + &from_peer_id, + &[block_id], + true ); - if let Err(err) = self.ban_node(&from_peer_id) { - warn!("Error while banning peer {} err: {:?}", from_peer_id, err); - } - return Ok(()); - } - match self.block_wishlist.entry(block_id) { - Entry::Occupied(mut entry) => { - let info = entry.get_mut(); - let header = if let Some(header) = &info.header { - header.clone() - } else { - warn!("Peer {} sent us full operations but we don't have receive the header of block id {} yet.", from_peer_id, block_id); - if let Some(asked_blocks) = self.asked_blocks.get_mut(&from_peer_id) && asked_blocks.contains_key(&block_id) { - asked_blocks.remove(&block_id); - { - let mut cache_write = self.cache.write(); - cache_write.insert_blocks_known(&from_peer_id, &[block_id], false, Instant::now()); - } - } - return Ok(()); - }; - let block_operation_ids = if let Some(operations) = &info.operation_ids { - operations - } else { - warn!("Peer id {} sent us full operations but we don't have received the operation list of block id {} yet.", from_peer_id, block_id); - if let Some(asked_blocks) = self.asked_blocks.get_mut(&from_peer_id) && asked_blocks.contains_key(&block_id) { - asked_blocks.remove(&block_id); - { - let mut cache_write = self.cache.write(); - cache_write.insert_blocks_known(&from_peer_id, &[block_id], false, Instant::now()); - } - } - return Ok(()); - }; - let block_ids_set: PreHashSet = - block_operation_ids.iter().copied().collect(); - operations.retain(|op| block_ids_set.contains(&op.id)); - // add operations to local storage and claim ref - info.storage.store_operations(operations); - let known_operations = info.storage.get_op_refs(); - // Ban the node if: - // - mismatch with asked operations (asked operations are the one that are not in storage) + operations already in storage and block operations - // - full operations serialized size overflow - let full_op_size: usize = { - let stored_operations = info.storage.read_operations(); - known_operations - .iter() - .map(|id| stored_operations.get(id).unwrap().serialized_size()) - .sum() - }; - if full_op_size > self.config.max_serialized_operations_size_per_block { - warn!("Peer id {} sent us full operations for block id {} but they exceed max size.", from_peer_id, block_id); - if let Err(err) = self.ban_node(&from_peer_id) { - warn!("Error while banning peer {} err: {:?}", from_peer_id, err); - } - self.block_wishlist.remove(&block_id); - self.consensus_controller - .mark_invalid_block(block_id, header); - } else { - if known_operations != &block_ids_set { - warn!( - "Peer id {} didn't sent us all the full operations for block id {}.", - from_peer_id, block_id - ); + self.operation_cache.write().insert_peer_known_ops( + &from_peer_id, + &operations + .into_iter() + .map(|op| op.id.prefix()) + .collect::>(), + ); + return; + }; - if let Some(asked_blocks) = self.asked_blocks.get_mut(&from_peer_id) && asked_blocks.contains_key(&block_id) { - asked_blocks.remove(&block_id); - { - let mut cache_write = self.cache.write(); - cache_write.insert_blocks_known(&from_peer_id, &[block_id], false, Instant::now()); - } - } - return Ok(()); - } + // Move the ops into a hashmap + let mut operations: PreHashMap = + operations.into_iter().map(|op| (op.id, op)).collect(); - // Re-constitute block. - let block = Block { - header: header.clone(), - operations: block_operation_ids.clone(), - }; - - let mut content_serialized = Vec::new(); - BlockSerializer::new() // todo : keep the serializer in the struct to avoid recreating it - .serialize(&block, &mut content_serialized) - .unwrap(); - - // wrap block - let signed_block = SecureShare { - signature: header.signature, - content_creator_pub_key: header.content_creator_pub_key, - content_creator_address: header.content_creator_address, - id: block_id, - content: block, - serialized_data: content_serialized, - }; - - // create block storage (without parents) - let mut block_storage = entry.remove().storage; - // add endorsements to local storage and claim ref - // TODO change this if we make endorsements separate from block header - block_storage.store_endorsements( - signed_block.content.header.content.endorsements.clone(), - ); - let slot = signed_block.content.header.content.slot; - // add block to local storage and claim ref - block_storage.store_block(signed_block); + // Make a set of all the block ops for fast lookup and deduplication + let block_ops_set = wishlist_info + .operation_ids + .as_ref() + .expect("operation_ids presence in wishlist should have been checked above") + .iter() + .copied() + .collect::>(); - // Send to consensus - self.consensus_controller - .register_block(block_id, slot, block_storage, false); - } - } - Entry::Vacant(_) => { - warn!("Peer {} sent us full operations but we don't have the block id {} in our wishlist.", from_peer_id, block_id); - if let Some(asked_blocks) = self.asked_blocks.get_mut(&from_peer_id) && asked_blocks.contains_key(&block_id) { - asked_blocks.remove(&block_id); - { - let mut cache_write = self.cache.write(); - cache_write.insert_blocks_known(&from_peer_id, &[block_id], false, Instant::now()); - } - } - return Ok(()); - } - }; + // claim the ops that we might have received in the meantime + wishlist_info.storage.claim_operation_refs(&block_ops_set); - // Update ask block - let remove_hashes = vec![block_id].into_iter().collect(); - self.remove_asked_blocks_of_node(&remove_hashes); - Ok(()) - } - - fn note_operations_from_peer( - &mut self, - operations: Vec, - source_peer_id: &PeerId, - ) -> Result<(), ProtocolError> { - massa_trace!("protocol.protocol_worker.note_operations_from_peer", { "peer": source_peer_id, "operations": operations }); - let now = MassaTime::now().expect("could not get current time"); - - let mut new_operations = PreHashMap::with_capacity(operations.len()); - for operation in operations { - // ignore if op is too old - let expire_period_timestamp = get_block_slot_timestamp( - self.config.thread_count, - self.config.t0, - self.config.genesis_timestamp, - Slot::new( - operation.content.expire_period, - operation - .content_creator_address - .get_thread(self.config.thread_count), - ), - ); - match expire_period_timestamp { - Ok(slot_timestamp) => { - if slot_timestamp.saturating_add(self.config.max_operations_propagation_time) - < now - { - continue; - } + { + // filter out operations that we don't want or already know about + let mut dropped_ops: PreHashSet = Default::default(); + operations.retain(|op_id, _| { + if !block_ops_set.contains(op_id) + || wishlist_info.storage.get_op_refs().contains(op_id) + { + dropped_ops.insert(*op_id); + return false; } - Err(_) => continue, - } - - // quit if op is too big - if operation.serialized_size() > self.config.max_serialized_operations_size_per_block { - return Err(ProtocolError::InvalidOperationError(format!( - "Operation {} exceeds max block size, maximum authorized {} bytes but found {} bytes", - operation.id, - operation.serialized_size(), - self.config.max_serialized_operations_size_per_block - ))); - }; + true + }); - // add to new operations - new_operations.insert(operation.id, operation); + // mark sender as knowing the dropped_ops + self.operation_cache.write().insert_peer_known_ops( + &from_peer_id, + &dropped_ops + .into_iter() + .map(|op_id| op_id.prefix()) + .collect::>(), + ); } - - // all valid received ids (not only new ones) for knowledge marking - let all_received_ids: PreHashSet<_> = new_operations.keys().copied().collect(); - - // retain only new ops that are not already known - { - let cache_read = self.operation_cache.read(); - new_operations.retain(|op_id, _| cache_read.checked_operations.peek(op_id).is_none()); + if operations.is_empty() { + // we have most likely eliminated all the received operations in the filtering above + return; } - // optimized signature verification - verify_sigs_batch( - &new_operations - .iter() - .map(|(op_id, op)| (*op_id.get_hash(), op.signature, op.content_creator_pub_key)) - .collect::>(), - )?; - - { - // add to checked operations - let mut cache_write = self.operation_cache.write(); - - // add checked operations - for op_id in new_operations.keys().copied() { - cache_write.insert_checked_operation(op_id); - } - - // add to known ops - let known_ops = cache_write - .ops_known_by_peer - .entry(source_peer_id.clone()) - .or_insert_with(|| { - LruMap::new(ByLength::new( - self.config - .max_node_known_ops_size - .try_into() - .expect("max_node_known_ops_size in config must be > 0"), - )) - }); - for id in all_received_ids { - known_ops.insert(id.prefix(), ()); + // Here we know that we were looking for that block's operations and that the sender node sent us some of the missing ones. + + // Check the validity of the received operations. + // TODO: in the future if the validiy check fails for something non-malleable (eg. not sig verif), + // we should stop retrieving the block and ban everyone who knows it + // because we know for sure that this op's ID belongs to the block. + if let Err(err) = note_operations_from_peer( + &self.storage, + &mut self.operation_cache, + &self.config, + operations.values().cloned().collect(), + &from_peer_id, + &mut self.sender_propagation_ops, + &mut self.pool_controller, + ) { + warn!( + "Peer id {} sent us operations for block id {} but they failed validity checks: {}", + from_peer_id, block_id, err + ); + if let Err(err) = self.ban_peers(&[from_peer_id.clone()]) { + warn!("Error while banning peer {} err: {:?}", from_peer_id, err); } + return; } - if !new_operations.is_empty() { - // Store new operations, claim locally - let mut ops = self.storage.clone_without_refs(); - ops.store_operations(new_operations.into_values().collect()); + // add received operations to local storage and claim ref + wishlist_info + .storage + .store_operations(operations.into_values().collect()); - self.sender_propagation_ops - .try_send(OperationHandlerPropagationCommand::PropagateOperations( - ops.clone(), - )) - .map_err(|err| ProtocolError::SendError(err.to_string()))?; + if wishlist_info.storage.get_op_refs().len() == block_ops_set.len() { + // if we gathered all the ops, we should delete the asked history and mark the sender as knowing the block + self.remove_asked_blocks(&[block_id].into_iter().collect()); - // Add to pool - self.pool_controller.add_operations(ops); - } + // Mark the sender as knowing this block + self.cache + .write() + .insert_peer_known_block(&from_peer_id, &[block_id], true); + } else { + // otherwise, we should remove the current peer ask only and mark it as not knowing the block + // because it did not send us everything + if let Some(asked) = self.asked_blocks.get_mut(&from_peer_id) { + asked.remove(&block_id); + } - Ok(()) + // Mark the sender as not knowing this block + self.cache + .write() + .insert_peer_known_block(&from_peer_id, &[block_id], false); + } } - pub(crate) fn update_ask_block(&mut self) -> Result<(), ProtocolError> { - massa_trace!("protocol.protocol_worker.update_ask_block.begin", {}); - let now = Instant::now(); + /// function that updates the global state of block retrieval + pub(crate) fn update_block_retrieval(&mut self) { + let ask_block_timeout = self.config.ask_block_timeout.to_duration(); - // init timer + // Init timer for next tick + let now = Instant::now(); let mut next_tick = now .checked_add(self.config.ask_block_timeout.into()) - .ok_or(TimeError::TimeOverflowError)?; + .ok_or(TimeError::TimeOverflowError) + .expect("could not compute next block retrieval timer tick"); - // list blocks to re-ask and gather candidate nodes to ask from - let mut candidate_nodes: PreHashMap> = Default::default(); - let mut ask_block_list: HashMap> = - Default::default(); + // Get conencted peer list + let connected_peers = self.active_connections.get_peer_ids_connected(); - // list blocks to re-ask and from whom - { - let mut cache_write = self.cache.write(); - for (hash, block_info) in self.block_wishlist.iter() { - let required_info = if block_info.header.is_none() { - AskForBlocksInfo::Header - } else if block_info.operation_ids.is_none() { - AskForBlocksInfo::Info + // Update cache + self.cache.write().update_cache(&connected_peers); + + // Cleanup asked_blocks from all disconnected peers and blocks that are not in the wishlist anymore. + self.asked_blocks.retain(|peer_id, asked_blocks| { + if !connected_peers.contains(peer_id) { + return false; + } + asked_blocks.retain(|block_id, _| self.block_wishlist.contains_key(block_id)); + !asked_blocks.is_empty() + }); + + // list of blocks that need to be asked + let mut to_ask: PreHashSet = self.block_wishlist.keys().copied().collect(); + // the number of things already being asked to those peers + let mut peer_loads: HashMap = Default::default(); + for (peer_id, asked_blocks) in &mut self.asked_blocks { + // init the list of items to remove from asked_blocks + let mut to_remove_from_asked_blocks = Vec::new(); + for (block_id, ask_time) in asked_blocks.iter() { + let expiry = ask_time + .checked_add(ask_block_timeout) + .expect("could not compute block ask expiry"); + if expiry <= now { + // the block has been asked for the block data a long time agp and did not respond + + // we mark this peer as not knowing this block + self.cache + .write() + .insert_peer_known_block(peer_id, &[*block_id], false); + + // We mark the block for removal from the asked_blocks list. + // This prevents us from re-detecting the timeout many times. + to_remove_from_asked_blocks.push(*block_id); } else { - let already_stored_operations = block_info.storage.get_op_refs(); - // Unwrap safety: Check if `operation_ids` is none just above - AskForBlocksInfo::Operations( - block_info - .operation_ids - .as_ref() - .unwrap() - .iter() - .filter(|id| !already_stored_operations.contains(id)) - .copied() - .collect(), - ) - }; - let mut needs_ask = true; - - let peers_connected = self.active_connections.get_peer_ids_connected(); - cache_write.update_cache( - peers_connected.clone(), - self.config - .max_node_known_blocks_size - .try_into() - .expect("max_node_known_blocks_size is too big"), - ); - let peers_in_asked_blocks: Vec = - self.asked_blocks.keys().cloned().collect(); - for peer_id in peers_in_asked_blocks { - if !peers_connected.contains(&peer_id) { - self.asked_blocks.remove(&peer_id); - } + // this block was recently asked to this peer: no need to ask for the block for now + + to_ask.remove(block_id); + + // mark this peer as loaded with an angoing ask + peer_loads + .entry(peer_id.clone()) + .and_modify(|v| *v += 1) + .or_insert(1); + + // update next tick + next_tick = next_tick.min(expiry); } - for peer_id in peers_connected { - if !self.asked_blocks.contains_key(&peer_id) { - self.asked_blocks - .insert(peer_id.clone(), PreHashMap::default()); + } + // remove the blocks marked for removal from asked_blocks + for remove_id in to_remove_from_asked_blocks { + asked_blocks.remove(&remove_id); + } + } + + // for each block to ask, choose a peer to ask it from and perform the ask + let mut to_ask = to_ask.into_iter().collect::>(); + to_ask.shuffle(&mut thread_rng()); // shuffle ask order + for block_id in to_ask { + // prioritize peers by (max knowledge, min knowledge age, min load, max random) + let mut peer_scores: Vec<_> = connected_peers + .iter() + .filter_map(|peer_id| { + // Get the peer load. Look for the minimum score for asking. + let peer_load = peer_loads.get(peer_id).copied().unwrap_or_default(); + if peer_load >= self.config.max_simultaneous_ask_blocks_per_node { + // this peer is already loaded with too many asks + return None; } - } - let all_keys: Vec = cache_write - .blocks_known_by_peer - .iter() - .map(|(k, _)| k) - .cloned() - .collect(); - for peer_id in all_keys.iter() { - // for (peer_id, (blocks_known, _)) in cache_write.blocks_known_by_peer.iter() { - let (blocks_known, _) = - cache_write.blocks_known_by_peer.peek_mut(peer_id).unwrap(); - // map to remove the borrow on asked_blocks. Otherwise can't call insert_known_blocks - let ask_time_opt = self - .asked_blocks + // get peer knowledge info about that block + let peer_knowledge_of_block = self + .cache + .read() + .blocks_known_by_peer .get(peer_id) - .and_then(|asked_blocks| asked_blocks.get(hash).copied()); - let (timeout_at_opt, timed_out) = if let Some(ask_time) = ask_time_opt { - let t = ask_time - .checked_add(self.config.ask_block_timeout.into()) - .ok_or(TimeError::TimeOverflowError)?; - (Some(t), t <= now) - } else { - (None, false) - }; - let knows_block = blocks_known.get(hash); - - // check if the peer recently told us it doesn't have the block - if let Some((false, info_time)) = knows_block { - let info_expires = info_time - .checked_add(self.config.ask_block_timeout.into()) - .ok_or(TimeError::TimeOverflowError)?; - if info_expires > now { - next_tick = std::cmp::min(next_tick, info_expires); - continue; // ignore candidate peer - } - } - - let candidate = match (timed_out, timeout_at_opt, knows_block) { - // not asked yet - (_, None, knowledge) => match knowledge { - Some((true, _)) => (0u8, None), - None => (1u8, None), - Some((false, _)) => (2u8, None), - }, - // not timed out yet (note: recent DONTHAVBLOCK checked before the match) - (false, Some(timeout_at), _) => { - next_tick = std::cmp::min(next_tick, timeout_at); - needs_ask = false; // no need to re ask - continue; // not a candidate - } - // timed out, supposed to have it - (true, Some(mut timeout_at), Some((true, info_time))) => { - if info_time < &mut timeout_at { - // info less recent than timeout: mark as not having it - blocks_known.insert(*hash, (false, timeout_at)); - (2u8, ask_time_opt) - } else { - // told us it has it after a timeout: good candidate again - (0u8, ask_time_opt) - } + .and_then(|blocks_known| blocks_known.peek(&block_id).copied()); + match peer_knowledge_of_block { + Some((false, info_t)) => { + // we think that the peer doesn't know the block + Some(( + 1i8, // worst knowledge + Some(-(now.saturating_duration_since(info_t).as_millis() as i64)), // the older the info the better + peer_load, // the lower the load the better + thread_rng().gen::(), // random tie breaker, + peer_id.clone(), + )) } - // timed out, supposed to not have it - (true, Some(mut timeout_at), Some((false, info_time))) => { - if info_time < &mut timeout_at { - // info less recent than timeout: update info time - blocks_known.insert(*hash, (false, timeout_at)); - } - (2u8, ask_time_opt) + None => { + // we don't know if the peer knows the block + Some(( + 0i8, // medium knowledge + None, // N/A + peer_load, // the lower the load the better + thread_rng().gen::(), // random tie breaker, + peer_id.clone(), + )) } - // timed out but don't know if has it: mark as not having it - (true, Some(timeout_at), None) => { - blocks_known.insert(*hash, (false, timeout_at)); - (2u8, ask_time_opt) + Some((true, info_t)) => { + // we think that the peer knows the block + Some(( + -1i8, // best knowledge + Some(now.saturating_duration_since(info_t).as_millis() as i64), // the newer the info the better + peer_load, // the lower the load the better + thread_rng().gen::(), // random tie breaker, + peer_id.clone(), + )) } - }; - - // add candidate peer - candidate_nodes.entry(*hash).or_insert_with(Vec::new).push(( - candidate, - peer_id.clone(), - required_info.clone(), - )); + } + }) + .collect(); + + // sort peers from best to worst to ask + peer_scores.sort_unstable(); + + // get wishlist info to deduce message to send + let wishlist_info = self + .block_wishlist + .get_mut(&block_id) + .expect("block presence in wishlist should have been checked above"); + let request = match ( + wishlist_info.header.is_some(), + wishlist_info.operation_ids.is_some(), + ) { + // ask for header + (false, false) => AskForBlockInfo::Header, + // ask for the list of operation IDs in the block + (true, false) => AskForBlockInfo::OperationIds, + // ask for missing operations in the block + (true, true) => { + // gather missing block operations and perform necessary followups + match self.gather_missing_block_ops(&block_id) { + Some(ops) => AskForBlockInfo::Operations(ops), + None => continue, + } } + _ => panic!("invalid wishlist state"), + }; - // remove if doesn't need to be asked - if !needs_ask { - candidate_nodes.remove(hash); + // try to ask peers from best to worst + for (_, _, _, _, peer_id) in peer_scores { + debug!("Send ask for block {} to {}", block_id, peer_id); + if let Err(err) = self.active_connections.send_to_peer( + &peer_id, + &self.block_message_serializer, + Message::Block(Box::new(BlockMessage::DataRequest { + block_id, + block_info: request.clone(), + })), + true, + ) { + warn!( + "Failed to send BlockDataRequest to peer {} err: {}", + peer_id, err + ); + } else { + // The request was sent. + + // Update the asked_blocks list + self.asked_blocks + .entry(peer_id.clone()) + .or_insert_with(Default::default) + .insert(block_id, now); + + // Increment the load of the peer. + peer_loads + .entry(peer_id) + .and_modify(|v| *v += 1) + .or_insert(1); + + // No need to look for other peers. + break; } } } - // count active block requests per node - let mut active_block_req_count: HashMap = self - .asked_blocks - .iter() - .map(|(peer_id, blocks)| { - ( - peer_id.clone(), - blocks - .iter() - .filter(|(_h, ask_t)| { - ask_t - .checked_add(self.config.ask_block_timeout.into()) - .map_or(false, |timeout_t| timeout_t > now) - }) - .count(), - ) - }) - .collect(); - { - let cache_read = self.cache.read(); - for (hash, criteria) in candidate_nodes.into_iter() { - // find the best node - if let Some((_knowledge, best_node, required_info, _)) = criteria - .into_iter() - .filter_map(|(knowledge, peer_id, required_info)| { - // filter out nodes with too many active block requests - if *active_block_req_count.get(&peer_id).unwrap_or(&0) - <= self.config.max_simultaneous_ask_blocks_per_node - { - cache_read - .blocks_known_by_peer - .peek(&peer_id) - .map(|peer_data| (knowledge, peer_id, required_info, peer_data.1)) - } else { - None - } - }) - .min_by_key(|(knowledge, peer_id, _, instant)| { - ( - *knowledge, // block knowledge - *active_block_req_count.get(peer_id).unwrap_or(&0), // active requests - *instant, // node age - peer_id.clone(), // node ID - ) - }) - { - let asked_blocks = self.asked_blocks.get_mut(&best_node).unwrap(); // will not panic, already checked - asked_blocks.insert(hash, now); - if let Some(cnt) = active_block_req_count.get_mut(&best_node) { - *cnt += 1; // increase the number of actively asked blocks - } - - ask_block_list - .entry(best_node.clone()) - .or_insert_with(Vec::new) - .push((hash, required_info.clone())); + // Update timer + self.next_timer_ask_block = next_tick; + } - let timeout_at = now - .checked_add(self.config.ask_block_timeout.into()) - .ok_or(TimeError::TimeOverflowError)?; - next_tick = std::cmp::min(next_tick, timeout_at); + // Gather all missing block operations. + // Returns Some(ops) if there are missing ops to gather + fn gather_missing_block_ops(&mut self, block_id: &BlockId) -> Option> { + // Get wishlist data + let wishlist_info = match self.block_wishlist.get_mut(block_id) { + // Wishlist data found + Some(block_info) => { + if block_info.header.is_none() || block_info.operation_ids.is_none() { + // Header or operation IDs not retrieved => cannot gather ops yet + return None; } + block_info } - } + // Wishlist data not found => nothing to do + None => return None, + }; - // send AskBlockEvents - if !ask_block_list.is_empty() { - for (peer_id, list) in ask_block_list.iter() { - for sub_list in list.chunks(self.config.max_size_block_infos as usize) { - debug!("Send ask for blocks of len {} to {}", list.len(), peer_id); - if let Err(err) = self.active_connections.send_to_peer( - peer_id, - &self.block_message_serializer, - BlockMessage::AskForBlocks(sub_list.to_vec()).into(), - true, - ) { - warn!( - "Failed to send AskForBlocks to peer {} err: {}", - peer_id, err - ); - } - } + // Construct a hashset from the ID list for deduplication and faster lookup + let op_id_list = wishlist_info + .operation_ids + .as_ref() + .expect("operation IDs should be present"); + let op_id_set: PreHashSet = op_id_list.iter().copied().collect(); + + // Gather all the ops in storage + let claimed_ops = wishlist_info.storage.claim_operation_refs(&op_id_set); + + // Mark the ops we already know about as checked by us, + // this is used to refresh our knowledge cache in case it had expired. + if !claimed_ops.is_empty() { + let mut cache_ops_write = self.operation_cache.write(); + for operation_id in claimed_ops.iter() { + cache_ops_write.insert_checked_operation(*operation_id); } } - self.next_timer_ask_block = next_tick; - Ok(()) + // Compute the total operations size + let total_operations_size = Self::get_total_operations_size( + &wishlist_info.storage, + &wishlist_info + .operation_ids + .as_ref() + .expect("operation_ids presence in wishlist should have been checked above") + .to_vec(), + ); + + // Check if the total size of the operations we know about is greater than the max block size. + // If it overflows, it means that the block is invalid because it is too big. + // We should stop trying to retrieve the block and ban everyone who knows it. + if total_operations_size > self.config.max_serialized_operations_size_per_block { + warn!( + "The operations we already have in our records exceed max block size for block {}.", + block_id + ); + + // stop retrieving the block + self.mark_block_as_invalid(block_id); + + // quit + return None; + } + + // if there are missing blocks, return them + if claimed_ops.len() < op_id_set.len() { + return Some((&op_id_set - &claimed_ops).into_iter().collect()); + } + + // there are no missing ops, we can finish the block + self.fully_gathered_block(block_id); + + None + } + + /// Called when we have fully gathered a block + fn fully_gathered_block(&mut self, block_id: &BlockId) { + // Gather all the elements needed to create the block. We must have it all by now. + let wishlist_info = self + .block_wishlist + .remove(block_id) + .expect("block presence in wishlist should have been checked before"); + + // Create the block + let block = Block { + header: wishlist_info + .header + .expect("header presence in wishlist should have been checked above"), + operations: wishlist_info + .operation_ids + .expect("operation_ids presence in wishlist should have been checked above"), + }; + + let mut content_serialized = Vec::new(); + BlockSerializer::new() // todo : keep the serializer in the struct to avoid recreating it + .serialize(&block, &mut content_serialized) + .expect("failed to serialize block"); + + // wrap block + let signed_block = SecureShare { + signature: block.header.signature, + content_creator_pub_key: block.header.content_creator_pub_key, + content_creator_address: block.header.content_creator_address, + id: *block_id, + content: block, + serialized_data: content_serialized, + }; + + // Get block storage. + // It should contain only the operations. + let mut block_storage = wishlist_info.storage; + + // Add endorsements to storage and claim ref + // TODO change this if we make endorsements separate from block header + block_storage.store_endorsements(signed_block.content.header.content.endorsements.clone()); + + // save slot + let slot = signed_block.content.header.content.slot; + + // add block to storage and claim ref + block_storage.store_block(signed_block); + + // Send to consensus + self.consensus_controller + .register_block(*block_id, slot, block_storage, false); + + // Remove from asked block history as it is not useful anymore + self.remove_asked_blocks(&vec![*block_id].into_iter().collect()); } } #[allow(clippy::too_many_arguments)] -// bookmark pub fn start_retrieval_thread( active_connections: Box, selector_controller: Box, @@ -1492,7 +1254,7 @@ pub fn start_retrieval_thread( receiver_network, block_message_serializer, receiver, - _internal_sender, + _announcement_sender: _internal_sender, cache, endorsement_cache, operation_cache, @@ -1500,6 +1262,7 @@ pub fn start_retrieval_thread( storage, mip_store, massa_metrics, + operation_id_serializer: OperationIdSerializer::new(), }; retrieval_thread.run(); }) diff --git a/massa-protocol-worker/src/handlers/endorsement_handler/cache.rs b/massa-protocol-worker/src/handlers/endorsement_handler/cache.rs index c92c01b42bf..40a83fe423f 100644 --- a/massa-protocol-worker/src/handlers/endorsement_handler/cache.rs +++ b/massa-protocol-worker/src/handlers/endorsement_handler/cache.rs @@ -1,48 +1,68 @@ -use std::{collections::HashSet, sync::Arc}; +use std::{ + collections::{HashMap, HashSet}, + sync::Arc, +}; use massa_models::endorsement::EndorsementId; use massa_protocol_exports::PeerId; use parking_lot::RwLock; use schnellru::{ByLength, LruMap}; +/// Cache of endorsements pub struct EndorsementCache { + /// List of endorsements we checked recently pub checked_endorsements: LruMap, - pub endorsements_known_by_peer: LruMap>, + /// List of endorsements known by peers + pub endorsements_known_by_peer: HashMap>, + /// Maximum number of endorsements known by a peer + pub max_known_endorsements_by_peer: u32, } impl EndorsementCache { - pub fn new(max_known_endorsements: u32, max_peers: u32) -> Self { + /// Create a new EndorsementCache + pub fn new(max_known_endorsements: u32, max_known_endorsements_by_peer: u32) -> Self { Self { checked_endorsements: LruMap::new(ByLength::new(max_known_endorsements)), - endorsements_known_by_peer: LruMap::new(ByLength::new(max_peers)), + endorsements_known_by_peer: HashMap::new(), + max_known_endorsements_by_peer, } } - pub fn update_cache( + /// Mark a list of endorsement IDs prefixes as known by a peer + pub fn insert_peer_known_endorsements( &mut self, - peers_connected: HashSet, - max_known_endorsements_by_peer: u32, + peer_id: &PeerId, + endorsements: &[EndorsementId], ) { - let peers: Vec = self + let known_endorsements = self .endorsements_known_by_peer - .iter() - .map(|(id, _)| id.clone()) - .collect(); - - // Clean shared cache if peers do not exist anymore - for peer_id in peers { - if !peers_connected.contains(&peer_id) { - self.endorsements_known_by_peer.remove(&peer_id); - } + .entry(peer_id.clone()) + .or_insert_with(|| LruMap::new(ByLength::new(self.max_known_endorsements_by_peer))); + for endorsement in endorsements { + known_endorsements.insert(*endorsement, ()); } + } + + /// Mark an endorsement ID as checked by us + pub fn insert_checked_endorsement(&mut self, enrodsement_id: EndorsementId) { + self.checked_endorsements.insert(enrodsement_id, ()); + } + + /// Update caches to remove all data from disconnected peers + pub fn update_cache(&mut self, peers_connected: &HashSet) { + // Remove disconnected peers from cache + self.endorsements_known_by_peer + .retain(|peer_id, _| peers_connected.contains(peer_id)); - // Add new potential peers + // Add new connected peers to cache for peer_id in peers_connected { - if self.endorsements_known_by_peer.peek(&peer_id).is_none() { - self.endorsements_known_by_peer.insert( - peer_id.clone(), - LruMap::new(ByLength::new(max_known_endorsements_by_peer)), - ); + match self.endorsements_known_by_peer.entry(peer_id.clone()) { + std::collections::hash_map::Entry::Occupied(_) => {} + std::collections::hash_map::Entry::Vacant(entry) => { + entry.insert(LruMap::new(ByLength::new( + self.max_known_endorsements_by_peer, + ))); + } } } } diff --git a/massa-protocol-worker/src/handlers/endorsement_handler/mod.rs b/massa-protocol-worker/src/handlers/endorsement_handler/mod.rs index c244909e9e7..ff0361180b9 100644 --- a/massa-protocol-worker/src/handlers/endorsement_handler/mod.rs +++ b/massa-protocol-worker/src/handlers/endorsement_handler/mod.rs @@ -23,6 +23,7 @@ mod propagation; mod retrieval; pub(crate) use messages::{EndorsementMessage, EndorsementMessageSerializer}; +pub(crate) use retrieval::note_endorsements_from_peer; use super::peer_handler::models::{PeerManagementCmd, PeerMessageTuple}; @@ -64,11 +65,16 @@ impl EndorsementHandler { pool_controller, config.clone(), storage.clone_without_refs(), - massa_metrics, + massa_metrics.clone(), ); - let endorsement_propagation_thread = - start_propagation_thread(local_receiver, cache, config, active_connections); + let endorsement_propagation_thread = start_propagation_thread( + local_receiver, + cache, + config, + active_connections, + massa_metrics, + ); Self { endorsement_retrieval_thread: Some(( sender_retrieval_ext, diff --git a/massa-protocol-worker/src/handlers/endorsement_handler/propagation.rs b/massa-protocol-worker/src/handlers/endorsement_handler/propagation.rs index 8b1fc96a5b7..b12bb70d876 100644 --- a/massa-protocol-worker/src/handlers/endorsement_handler/propagation.rs +++ b/massa-protocol-worker/src/handlers/endorsement_handler/propagation.rs @@ -1,147 +1,140 @@ -use std::thread::JoinHandle; - -use massa_channel::receiver::MassaReceiver; -use massa_models::{ - endorsement::{EndorsementId, SecureShareEndorsement}, - prehash::{PreHashMap, PreHashSet}, -}; -use massa_protocol_exports::PeerId; -use massa_protocol_exports::ProtocolConfig; -use tracing::{debug, info, log::warn}; - -use crate::{messages::MessagesSerializer, wrap_network::ActiveConnectionsTrait}; - use super::{ cache::SharedEndorsementCache, commands_propagation::EndorsementHandlerPropagationCommand, messages::EndorsementMessageSerializer, EndorsementMessage, }; +use crate::{messages::MessagesSerializer, wrap_network::ActiveConnectionsTrait}; +use massa_channel::receiver::MassaReceiver; +use massa_metrics::MassaMetrics; +use massa_protocol_exports::ProtocolConfig; +use massa_storage::Storage; +use std::thread::JoinHandle; +use tracing::{info, log::warn}; +/// Endorsements need to propagate fast, so no buffering struct PropagationThread { receiver: MassaReceiver, config: ProtocolConfig, cache: SharedEndorsementCache, active_connections: Box, endorsement_serializer: MessagesSerializer, + _metrics: MassaMetrics, } impl PropagationThread { fn run(&mut self) { + let mut next_message = None; loop { - match self.receiver.recv() { - Ok(msg) => { - match msg { - EndorsementHandlerPropagationCommand::PropagateEndorsements( - mut endorsements, - ) => { - // IMPORTANT: This is there to batch all "waiting to propagate endorsements" but will not work anymore if there is - // other variants in EndorsementHandlerPropagationCommand - while let Ok(msg) = self.receiver.try_recv() { - match msg { - EndorsementHandlerPropagationCommand::PropagateEndorsements( - endorsements2, - ) => { - endorsements.extend(endorsements2); - } - EndorsementHandlerPropagationCommand::Stop => { - info!("Stop endorsement propagation thread"); - return; - } - } + // get the next message to process + let msg = match next_message.take() { + Some(msg) => msg, + None => match self.receiver.recv() { + Ok(msg) => msg, + Err(_) => { + info!("Stop endorsement propagation thread"); + return; + } + }, + }; + + match msg { + // endorsements to propagate + EndorsementHandlerPropagationCommand::PropagateEndorsements(mut endorsements) => { + // also drain any remaining propagation messages that might have accumulated + while let Ok(msg) = self.receiver.try_recv() { + match msg { + // we got more endorsements to propagate: extend the buffer + EndorsementHandlerPropagationCommand::PropagateEndorsements( + new_endorsements, + ) => { + endorsements.extend(new_endorsements); } - let ids: PreHashSet = endorsements - .get_endorsement_refs() - .iter() - .copied() - .collect(); - { - let mut cache_write = self.cache.write(); - for endorsement_id in ids.iter().copied() { - cache_write.checked_endorsements.insert(endorsement_id, ()); - } - // Add peers that potentially don't exist in cache - let peers_connected = - self.active_connections.get_peer_ids_connected(); - cache_write.update_cache( - peers_connected, - self.config - .max_node_known_endorsements_size - .try_into() - .expect("max_node_known_endorsements_size is too big"), - ); - let all_keys: Vec = cache_write - .endorsements_known_by_peer - .iter() - .map(|(k, _)| k) - .cloned() - .collect(); - for peer_id in all_keys.iter() { - let endorsement_ids = cache_write - .endorsements_known_by_peer - .peek_mut(peer_id) - .unwrap(); - let new_endorsements: PreHashMap< - EndorsementId, - SecureShareEndorsement, - > = { - let endorsements_reader = endorsements.read_endorsements(); - endorsements - .get_endorsement_refs() - .iter() - .filter_map(|id| { - if endorsement_ids.peek(id).is_some() { - return None; - } - Some(( - *id, - endorsements_reader.get(id).cloned().unwrap(), - )) - }) - .collect() - }; - for endorsement_id in new_endorsements.keys().copied() { - endorsement_ids.insert(endorsement_id, ()); - } - let to_send = - new_endorsements.into_values().collect::>(); - if !to_send.is_empty() { - debug!( - "Send endorsements of len {} to {}", - to_send.len(), - peer_id - ); - for sub_list in to_send.chunks( - self.config.max_endorsements_per_message as usize, - ) { - if let Err(err) = self.active_connections.send_to_peer( - peer_id, - &self.endorsement_serializer, - EndorsementMessage::Endorsements(sub_list.to_vec()) - .into(), - false, - ) { - warn!( - "could not send endorsements batch to node {}: {}", - peer_id, err - ); - } - } - } - } + // we grabbed a message that is not a propagation message, mark it for processing + other_msg => { + next_message = Some(other_msg); + break; } } - EndorsementHandlerPropagationCommand::Stop => { - info!("Stop endorsement propagation thread"); - return; - } } + // propagate the endorsements + self.propagate_endorsements(endorsements); } - Err(_) => { + // stop the handler + EndorsementHandlerPropagationCommand::Stop => { info!("Stop endorsement propagation thread"); return; } } } } + + /// Perform propagation of endorsements to the connected peers + fn propagate_endorsements(&mut self, endorsements: Storage) { + // get all the endorsements to send + let endorsements: Vec<_> = { + let storage_lock = endorsements.read_endorsements(); + endorsements + .get_endorsement_refs() + .iter() + .filter_map(|id| storage_lock.get(id).cloned()) + .collect() + }; + + // get connected peers + let peers_connected = self.active_connections.get_peer_ids_connected(); + + // get a write lock on the cache + let mut cache_write = self.cache.write(); + + // mark that we have checked those endorsements + for endorsement in &endorsements { + cache_write.checked_endorsements.insert(endorsement.id, ()); + } + + // Add peers that potentially don't exist in cache and remove the ones that disconnected + cache_write.update_cache(&peers_connected); + + // Propagate to peers + 'peer_loop: for peer_id in peers_connected { + // write access to the cache of which endorsements are known by the peer + let peer_knowledge = cache_write + .endorsements_known_by_peer + .get_mut(&peer_id) + .expect("update_cache should have added connected peer to cache"); + + // get endorsements that are not known by the peer + let to_send: Vec<_> = endorsements + .iter() + .filter(|endorsement| peer_knowledge.peek(&endorsement.id).is_none()) + .collect(); + + if to_send.is_empty() { + // nothing to send to that peer, try the next one + continue 'peer_loop; + } + + // send by chunks + for chunk in to_send.chunks(self.config.max_endorsements_per_message as usize) { + if let Err(err) = self.active_connections.send_to_peer( + &peer_id, + &self.endorsement_serializer, + EndorsementMessage::Endorsements(chunk.iter().map(|&e| e.clone()).collect()) + .into(), + false, + ) { + warn!( + "could not send endorsements batch to node {}: {}", + peer_id, err + ); + // try with next peer, this one is probably congested + continue 'peer_loop; + } + // sent successfully: mark peer as knowing the endorsements that were sent to it + for endorsement in chunk { + peer_knowledge.insert(endorsement.id, ()); + } + } + } + } } pub fn start_propagation_thread( @@ -149,6 +142,7 @@ pub fn start_propagation_thread( cache: SharedEndorsementCache, config: ProtocolConfig, active_connections: Box, + metrics: MassaMetrics, ) -> JoinHandle<()> { std::thread::Builder::new() .name("protocol-endorsement-handler-propagation".to_string()) @@ -161,6 +155,7 @@ pub fn start_propagation_thread( active_connections, cache, endorsement_serializer, + _metrics: metrics, }; propagation_thread.run(); }) diff --git a/massa-protocol-worker/src/handlers/endorsement_handler/retrieval.rs b/massa-protocol-worker/src/handlers/endorsement_handler/retrieval.rs index 5633ee5a9df..6d9d9d5227e 100644 --- a/massa-protocol-worker/src/handlers/endorsement_handler/retrieval.rs +++ b/massa-protocol-worker/src/handlers/endorsement_handler/retrieval.rs @@ -16,7 +16,6 @@ use massa_protocol_exports::{ProtocolConfig, ProtocolError}; use massa_serialization::{DeserializeError, Deserializer}; use massa_storage::Storage; use massa_time::MassaTime; -use schnellru::{ByLength, LruMap}; use tracing::{debug, info, warn}; use crate::{ @@ -45,16 +44,12 @@ pub struct RetrievalThread { storage: Storage, peer_cmd_sender: MassaSender, metrics: MassaMetrics, + endorsement_message_deserializer: EndorsementMessageDeserializer, } impl RetrievalThread { fn run(&mut self) { - let endorsement_message_deserializer = - EndorsementMessageDeserializer::new(EndorsementMessageDeserializerArgs { - thread_count: self.config.thread_count, - max_length_endorsements: self.config.max_endorsements_per_message, - endorsement_count: self.config.endorsement_count, - }); + // regular interval ticks for metrics let tick_metrics = tick(self.metrics.tick_delay); loop { @@ -62,38 +57,7 @@ impl RetrievalThread { recv(self.receiver) -> msg => { self.receiver.update_metrics(); match msg { - Ok((peer_id, message)) => { - let (rest, message) = match endorsement_message_deserializer - .deserialize::(&message) { - Ok((rest, message)) => (rest, message), - Err(err) => { - warn!("Error while deserializing message from peer {} err: {:?}", peer_id, err); - continue; - } - }; - if !rest.is_empty() { - println!("Error: message not fully consumed"); - return; - } - match message { - EndorsementMessage::Endorsements(endorsements) => { - debug!("Received endorsement message: Endorsement from {}", peer_id); - if let Err(err) = - self.note_endorsements_from_peer(endorsements, &peer_id) - { - warn!( - "peer {} sent us critically incorrect endorsements, \ - which may be an attack attempt by the remote node or a \ - loss of sync between us and the remote node. Err = {}", - peer_id, err - ); - if let Err(err) = self.ban_node(&peer_id) { - warn!("Error while banning peer {} err: {:?}", peer_id, err); - } - } - } - } - } + Ok((peer_id, message)) => self.process_message(peer_id, message), Err(_) => { info!("Stop endorsement retrieval thread"); return; @@ -119,175 +83,204 @@ impl RetrievalThread { }, recv(tick_metrics) -> _ => { // update metrics - let read = self.cache.read(); - let count = read + let cache_lock = self.cache.read(); + let count = cache_lock .endorsements_known_by_peer - .iter() - .map(|(_peer_id, map)| map.len()) + .values() + .map(|v| v.len()) .sum(); self.metrics - .set_endorsements_cache_metrics(read.checked_endorsements.len(), count); + .set_endorsements_cache_metrics(cache_lock.checked_endorsements.len(), count); } } } } - /// Note endorsements coming from a given node, - /// and propagate them when they were received outside of a header. - /// - /// Caches knowledge of valid ones. - /// - /// Does not ban if the endorsement is invalid - /// - /// Checks performed: - /// - Valid signature. - pub(crate) fn note_endorsements_from_peer( - &mut self, - endorsements: Vec, - from_peer_id: &PeerId, - ) -> Result<(), ProtocolError> { - massa_trace!("protocol.protocol_worker.note_endorsements_from_node", { "node": from_peer_id, "endorsements": endorsements}); - let length = endorsements.len(); - let mut new_endorsements = PreHashMap::with_capacity(length); - let mut endorsement_ids = PreHashSet::with_capacity(length); - for endorsement in endorsements.into_iter() { - let endorsement_id = endorsement.id; - endorsement_ids.insert(endorsement_id); - // check endorsement signature if not already checked - { - let read_cache = self.cache.read(); - if read_cache - .checked_endorsements - .peek(&endorsement_id) - .is_none() - { - new_endorsements.insert(endorsement_id, endorsement); + /// Process incoming message + fn process_message(&mut self, peer_id: PeerId, message: Vec) { + let (rest, message) = match self + .endorsement_message_deserializer + .deserialize::(&message) + { + Ok((rest, message)) => (rest, message), + Err(err) => { + debug!( + "Error while deserializing message from peer {} err: {:?}", + peer_id, err + ); + return; + } + }; + if !rest.is_empty() { + debug!("Message not fully consumed"); + return; + } + match message { + EndorsementMessage::Endorsements(endorsements) => { + debug!("Received endorsement message: Endorsement from {}", peer_id); + if let Err(err) = note_endorsements_from_peer( + endorsements, + &peer_id, + &self.cache, + self.selector_controller.as_ref(), + &self.storage, + &self.config, + &self.internal_sender, + self.pool_controller.as_mut(), + ) { + warn!( + "peer {} sent us critically incorrect endorsements, \ + which may be an attack attempt by the remote node or a \ + loss of sync between us and the remote node. Err = {}", + peer_id, err + ); + if let Err(err) = self.ban_peer(&peer_id) { + warn!("Error while banning peer {} err: {:?}", peer_id, err); + } } } } + } - // Batch signature verification - // optimized signature verification - verify_sigs_batch( - &new_endorsements - .values() - .map(|endorsement| { - ( - endorsement.compute_signed_hash(), - endorsement.signature, - endorsement.content_creator_pub_key, - ) - }) - .collect::>(), - )?; + /// send a ban peer command to the peer handler + fn ban_peer(&mut self, peer_id: &PeerId) -> Result<(), ProtocolError> { + massa_trace!("ban node from retrieval thread", { "peer_id": peer_id.to_string() }); + self.peer_cmd_sender + .try_send(PeerManagementCmd::Ban(vec![peer_id.clone()])) + .map_err(|err| ProtocolError::SendError(err.to_string())) + } +} + +/// Note endorsements coming from a given node, +/// and propagate them when they were received outside of a header. +/// +/// Caches knowledge of valid ones. +/// +/// Does not ban if the endorsement is invalid +/// +/// Checks performed: +/// - Valid signature. +#[allow(clippy::too_many_arguments)] +pub(crate) fn note_endorsements_from_peer( + endorsements: Vec, + from_peer_id: &PeerId, + cache: &SharedEndorsementCache, + selector_controller: &dyn SelectorController, + storage: &Storage, + config: &ProtocolConfig, + endorsement_propagation_sender: &MassaSender, + pool_controller: &mut dyn PoolController, +) -> Result<(), ProtocolError> { + let mut new_endorsements = PreHashMap::with_capacity(endorsements.len()); + let mut all_endorsement_ids = PreHashSet::with_capacity(endorsements.len()); + + // cache check + { + let cache_read = cache.read(); + for endorsement in endorsements.into_iter() { + let endorsement_id = endorsement.id; + all_endorsement_ids.insert(endorsement_id); - // Check PoS draws - for endorsement in new_endorsements.values() { - let selection = self - .selector_controller - .get_selection(endorsement.content.slot)?; - let Some(address) = selection.endorsements.get(endorsement.content.index as usize) else { - return Err(ProtocolError::GeneralProtocolError( - format!( - "No selection on slot {} for index {}", - endorsement.content.slot, endorsement.content.index - ) - )) - }; - if address != &endorsement.content_creator_address { - return Err(ProtocolError::GeneralProtocolError(format!( - "Invalid endorsement: expected address {}, got {}", - address, endorsement.content_creator_address - ))); + // only consider the endorsement as new if we have not already checked it + if cache_read + .checked_endorsements + .peek(&endorsement_id) + .is_none() + { + new_endorsements.insert(endorsement_id, endorsement); } } + } - 'write_cache: { - let mut cache_write = self.cache.write(); - // add to verified signature cache - for endorsement_id in endorsement_ids.iter() { - cache_write.checked_endorsements.insert(*endorsement_id, ()); - } - // add to known endorsements for source node. - let Ok(endorsements) = cache_write - .endorsements_known_by_peer - .get_or_insert(from_peer_id.clone(), || { - LruMap::new(ByLength::new( - self.config - .max_node_known_endorsements_size - .try_into() - .expect("max_node_known_endorsements_size in config should be > 0"), + // Batch signature verification + verify_sigs_batch( + &new_endorsements + .values() + .map(|endorsement| { + ( + endorsement.compute_signed_hash(), + endorsement.signature, + endorsement.content_creator_pub_key, + ) + }) + .collect::>(), + )?; + + // Check PoS draws + for endorsement in new_endorsements.values() { + let selection = selector_controller + .get_selection(endorsement.content.slot)? + .endorsements; + let Some(address) = selection.get(endorsement.content.index as usize) else { + return Err(ProtocolError::GeneralProtocolError( + format!( + "No selection on slot {} for index {}", + endorsement.content.slot, endorsement.content.index + ) )) - }) - .ok_or(()) else { - warn!("endorsements_known_by_peer limit reached"); - break 'write_cache; }; - for endorsement_id in endorsement_ids.iter() { - endorsements.insert(*endorsement_id, ()); - } + if address != &endorsement.content_creator_address { + return Err(ProtocolError::GeneralProtocolError(format!( + "Invalid endorsement producer selection: expected address {}, got {}", + address, endorsement.content_creator_address + ))); } + } - if !new_endorsements.is_empty() { - let mut endorsements = self.storage.clone_without_refs(); - endorsements.store_endorsements(new_endorsements.into_values().collect()); + { + let mut cache_write = cache.write(); - // Propagate endorsements - // Propagate endorsements when the slot of the block they endorse isn't `max_endorsements_propagation_time` old. - let mut endorsements_to_propagate = endorsements.clone(); - let endorsements_to_not_propagate = { - let now = MassaTime::now()?; - let read_endorsements = endorsements_to_propagate.read_endorsements(); - endorsements_to_propagate - .get_endorsement_refs() - .iter() - .filter_map(|endorsement_id| { - let slot_endorsed_block = - read_endorsements.get(endorsement_id).unwrap().content.slot; - let slot_timestamp = get_block_slot_timestamp( - self.config.thread_count, - self.config.t0, - self.config.genesis_timestamp, - slot_endorsed_block, - ); - match slot_timestamp { - Ok(slot_timestamp) => { - if slot_timestamp - .saturating_add(self.config.max_endorsements_propagation_time) - < now - { - Some(*endorsement_id) - } else { - None - } - } - Err(_) => Some(*endorsement_id), - } - }) - .collect() - }; - endorsements_to_propagate.drop_endorsement_refs(&endorsements_to_not_propagate); - if let Err(err) = self.internal_sender.try_send( - EndorsementHandlerPropagationCommand::PropagateEndorsements( - endorsements_to_propagate, - ), - ) { - warn!("Failed to send from retrieval thread of endorsement handler to propagation: {:?}", err); - } - // Add to pool - self.pool_controller.add_endorsements(endorsements); + // add to the cache of endorsements we have checked + for endorsement_id in all_endorsement_ids.iter() { + cache_write.insert_checked_endorsement(*endorsement_id); } - Ok(()) + // add to the cache of endorsements known by the source node + cache_write.insert_peer_known_endorsements( + from_peer_id, + &all_endorsement_ids.iter().copied().collect::>(), + ); } - /// send a ban peer command to the peer handler - fn ban_node(&mut self, peer_id: &PeerId) -> Result<(), ProtocolError> { - massa_trace!("ban node from retrieval thread", { "peer_id": peer_id.to_string() }); - self.peer_cmd_sender - .try_send(PeerManagementCmd::Ban(vec![peer_id.clone()])) - .map_err(|err| ProtocolError::SendError(err.to_string())) + // From there we note new endorsements and propagate them + + // Filter out endorsements if they are too old (max age of the inclusion slot: `max_endorsements_propagation_time`) + let now = MassaTime::now()?; + new_endorsements.retain(|_id, endorsement| { + match get_block_slot_timestamp( + config.thread_count, + config.t0, + config.genesis_timestamp, + endorsement.content.slot, + ) { + Ok(t) => t.saturating_add(config.max_endorsements_propagation_time) >= now, + Err(_) => false, + } + }); + + if new_endorsements.is_empty() { + // no endorsements to note or propagate + return Ok(()); + } + + // Store new endorsements + let mut endorsement_store = storage.clone_without_refs(); + endorsement_store.store_endorsements(new_endorsements.into_values().collect()); + + // Propagate to other peers + if let Err(err) = endorsement_propagation_sender.try_send( + EndorsementHandlerPropagationCommand::PropagateEndorsements(endorsement_store.clone()), + ) { + warn!( + "Failed to send from retrieval thread of endorsement handler to propagation: {:?}", + err + ); } + + // Add to pool + pool_controller.add_endorsements(endorsement_store); + + Ok(()) } #[allow(clippy::too_many_arguments)] @@ -303,6 +296,12 @@ pub fn start_retrieval_thread( storage: Storage, metrics: MassaMetrics, ) -> JoinHandle<()> { + let endorsement_message_deserializer = + EndorsementMessageDeserializer::new(EndorsementMessageDeserializerArgs { + thread_count: config.thread_count, + max_length_endorsements: config.max_endorsements_per_message, + endorsement_count: config.endorsement_count, + }); std::thread::Builder::new() .name("protocol-endorsement-handler-retrieval".to_string()) .spawn(move || { @@ -317,6 +316,7 @@ pub fn start_retrieval_thread( config, storage, metrics, + endorsement_message_deserializer, }; retrieval_thread.run(); }) diff --git a/massa-protocol-worker/src/handlers/operation_handler/cache.rs b/massa-protocol-worker/src/handlers/operation_handler/cache.rs index d91d13c121a..77126d9a21e 100644 --- a/massa-protocol-worker/src/handlers/operation_handler/cache.rs +++ b/massa-protocol-worker/src/handlers/operation_handler/cache.rs @@ -8,14 +8,20 @@ use massa_protocol_exports::PeerId; use parking_lot::RwLock; use schnellru::{ByLength, LruMap}; +/// Cache for operations pub struct OperationCache { + /// List of operations we checked recently pub checked_operations: LruMap, + /// List of operation ID prefixes we checked recently pub checked_operations_prefix: LruMap, + /// List of operations known by peers pub ops_known_by_peer: HashMap>, + /// Maximum number of operations known by a peer pub max_known_ops_by_peer: u32, } impl OperationCache { + /// Create a new OperationCache pub fn new(max_known_ops: u32, max_known_ops_by_peer: u32) -> Self { Self { checked_operations: LruMap::new(ByLength::new(max_known_ops)), @@ -25,12 +31,25 @@ impl OperationCache { } } + /// Mark a list of operation ID prefixes as known by a peer + pub fn insert_peer_known_ops(&mut self, peer_id: &PeerId, ops: &[OperationPrefixId]) { + let known_ops = self + .ops_known_by_peer + .entry(peer_id.clone()) + .or_insert_with(|| LruMap::new(ByLength::new(self.max_known_ops_by_peer))); + for op in ops { + known_ops.insert(*op, ()); + } + } + + /// Mark an operation ID as checked by us pub fn insert_checked_operation(&mut self, operation_id: OperationId) { self.checked_operations.insert(operation_id, ()); self.checked_operations_prefix .insert(operation_id.prefix(), ()); } + /// Update caches to remove all data from disconnected peers pub fn update_cache(&mut self, peers_connected: &HashSet) { // Remove disconnected peers from cache self.ops_known_by_peer diff --git a/massa-protocol-worker/src/handlers/operation_handler/mod.rs b/massa-protocol-worker/src/handlers/operation_handler/mod.rs index 01579e826c0..8e292b43175 100644 --- a/massa-protocol-worker/src/handlers/operation_handler/mod.rs +++ b/massa-protocol-worker/src/handlers/operation_handler/mod.rs @@ -22,6 +22,7 @@ mod propagation; mod retrieval; pub(crate) use messages::{OperationMessage, OperationMessageSerializer}; +pub(crate) use retrieval::note_operations_from_peer; use super::peer_handler::models::{PeerManagementCmd, PeerMessageTuple}; diff --git a/massa-protocol-worker/src/handlers/operation_handler/propagation.rs b/massa-protocol-worker/src/handlers/operation_handler/propagation.rs index 1f441ab2e7f..f58f006eda1 100644 --- a/massa-protocol-worker/src/handlers/operation_handler/propagation.rs +++ b/massa-protocol-worker/src/handlers/operation_handler/propagation.rs @@ -5,9 +5,10 @@ use crossbeam::channel::RecvTimeoutError; use massa_channel::receiver::MassaReceiver; use massa_logging::massa_trace; use massa_metrics::MassaMetrics; -use massa_models::operation::{OperationId, OperationPrefixId}; +use massa_models::operation::OperationId; use massa_models::prehash::CapacityAllocator; use massa_models::prehash::PreHashSet; +use massa_protocol_exports::PeerId; use massa_protocol_exports::ProtocolConfig; use massa_storage::Storage; use tracing::{debug, info, log::warn}; @@ -131,46 +132,40 @@ impl PropagationThread { if self.next_batch.is_empty() { return; } - let operation_id_prefixes = mem::take(&mut self.next_batch) - .into_iter() - .map(|id| id.prefix()) - .collect::>(); + let operation_ids = mem::take(&mut self.next_batch); massa_trace!("protocol.protocol_worker.announce_ops.begin", { - "operation_id_prefixes": operation_id_prefixes + "operation_ids": operation_ids }); - let peers_connected = self.active_connections.get_peer_ids_connected(); { let mut cache_write = self.cache.write(); + let peers_connected = self.active_connections.get_peer_ids_connected(); cache_write.update_cache(&peers_connected); // Propagate to peers - for peer_id in peers_connected { - let peer_known_ops = cache_write - .ops_known_by_peer - .get_mut(&peer_id) - .expect("expected update_cache to insert all available peers in ops_known_by_peer but one is absent"); - let ops_unknown_to_peer: Vec = operation_id_prefixes + let all_keys: Vec = cache_write.ops_known_by_peer.keys().cloned().collect(); + for peer_id in all_keys { + let ops = cache_write.ops_known_by_peer.get_mut(&peer_id).unwrap(); + let new_ops: Vec = operation_ids .iter() - .filter(|&id_prefix| peer_known_ops.peek(id_prefix).is_none()) + .filter(|id| ops.peek(&id.prefix()).is_none()) .copied() .collect(); - if !ops_unknown_to_peer.is_empty() { - for id_prefix in &ops_unknown_to_peer { - peer_known_ops.insert(*id_prefix, ()); + if !new_ops.is_empty() { + for id in &new_ops { + ops.insert(id.prefix(), ()); } debug!( "Send operations announcement of len {} to {}", - ops_unknown_to_peer.len(), + new_ops.len(), peer_id ); - for sub_list in - ops_unknown_to_peer.chunks(self.config.max_operations_per_message as usize) + for sub_list in new_ops.chunks(self.config.max_operations_per_message as usize) { if let Err(err) = self.active_connections.send_to_peer( &peer_id, &self.operation_message_serializer, OperationMessage::OperationsAnnouncement( - sub_list.iter().copied().collect(), + sub_list.iter().map(|id| id.into_prefix()).collect(), ) .into(), false, diff --git a/massa-protocol-worker/src/handlers/operation_handler/retrieval.rs b/massa-protocol-worker/src/handlers/operation_handler/retrieval.rs index e7734b7a8d4..4cbb44db0fc 100644 --- a/massa-protocol-worker/src/handlers/operation_handler/retrieval.rs +++ b/massa-protocol-worker/src/handlers/operation_handler/retrieval.rs @@ -99,7 +99,15 @@ impl RetrievalThread { match message { OperationMessage::Operations(ops) => { debug!("Received operation message: Operations from {}", peer_id); - if let Err(err) = self.note_operations_from_peer(ops, &peer_id) { + if let Err(err) = note_operations_from_peer( + &self.storage, + &mut self.cache, + &self.config, + ops, + &peer_id, + &mut self.internal_sender, + &mut self.pool_controller + ) { warn!("peer {} sent us critically incorrect operation, which may be an attack attempt by the remote peer or a loss of sync between us and the remote peer. Err = {}", peer_id, err); if let Err(e) = self.ban_node(&peer_id) { @@ -153,115 +161,6 @@ impl RetrievalThread { } } - fn note_operations_from_peer( - &mut self, - operations: Vec, - source_peer_id: &PeerId, - ) -> Result<(), ProtocolError> { - massa_trace!("protocol.protocol_worker.note_operations_from_peer", { "peer": source_peer_id, "operations": operations }); - let now = MassaTime::now().expect("could not get current time"); - - let mut new_operations = PreHashMap::with_capacity(operations.len()); - for operation in operations { - // ignore if op is too old - let expire_period_timestamp = get_block_slot_timestamp( - self.config.thread_count, - self.config.t0, - self.config.genesis_timestamp, - Slot::new( - operation.content.expire_period, - operation - .content_creator_address - .get_thread(self.config.thread_count), - ), - ); - match expire_period_timestamp { - Ok(slot_timestamp) => { - if slot_timestamp.saturating_add(self.config.max_operations_propagation_time) - < now - { - continue; - } - } - Err(_) => continue, - } - - // quit if op is too big - if operation.serialized_size() > self.config.max_serialized_operations_size_per_block { - return Err(ProtocolError::InvalidOperationError(format!( - "Operation {} exceeds max block size, maximum authorized {} bytes but found {} bytes", - operation.id, - operation.serialized_size(), - self.config.max_serialized_operations_size_per_block - ))); - }; - - // add to new operations - new_operations.insert(operation.id, operation); - } - - // all valid received ids (not only new ones) for knowledge marking - let all_received_ids: PreHashSet<_> = new_operations.keys().copied().collect(); - - // retain only new ops that are not already known - { - let cache_read = self.cache.read(); - new_operations.retain(|op_id, _| cache_read.checked_operations.peek(op_id).is_none()); - } - - // optimized signature verification - verify_sigs_batch( - &new_operations - .iter() - .map(|(op_id, op)| (*op_id.get_hash(), op.signature, op.content_creator_pub_key)) - .collect::>(), - )?; - - { - // add to checked operations - let mut cache_write = self.cache.write(); - - // add checked operations - for op_id in new_operations.keys().copied() { - cache_write.insert_checked_operation(op_id); - } - - // add to known ops - let known_ops = cache_write - .ops_known_by_peer - .entry(source_peer_id.clone()) - .or_insert_with(|| { - LruMap::new(ByLength::new( - self.config - .max_node_known_ops_size - .try_into() - .expect("max_node_known_ops_size in config must fit in u32"), - )) - }); - for id in all_received_ids { - known_ops.insert(id.prefix(), ()); - } - } - - if !new_operations.is_empty() { - // Store new operations, claim locally - let mut ops = self.storage.clone_without_refs(); - ops.store_operations(new_operations.into_values().collect()); - - // propagate new operations - self.internal_sender - .try_send(OperationHandlerPropagationCommand::PropagateOperations( - ops.clone(), - )) - .map_err(|err| ProtocolError::SendError(err.to_string()))?; - - // Add to pool - self.pool_controller.add_operations(ops); - } - - Ok(()) - } - /// On receive a batch of operation ids `op_batch` from another `peer_id` /// Execute the following algorithm: [redirect to GitHub](https://github.com/massalabs/massa/issues/2283#issuecomment-1040872779) /// @@ -297,23 +196,9 @@ impl RetrievalThread { } // mark sender as knowing the ops - { - let mut cache_write = self.cache.write(); - let known_ops = cache_write - .ops_known_by_peer - .entry(peer_id.clone()) - .or_insert_with(|| { - LruMap::new(ByLength::new( - self.config - .max_node_known_ops_size - .try_into() - .expect("max_node_known_ops_size in config must fit in u32"), - )) - }); - for prefix_id in &op_batch { - known_ops.insert(*prefix_id, ()); - } - } + self.cache + .write() + .insert_peer_known_ops(peer_id, &op_batch.iter().copied().collect::>()); // filter out the operations that we already know about { @@ -469,6 +354,110 @@ impl RetrievalThread { } } +pub(crate) fn note_operations_from_peer( + base_storage: &Storage, + operations_cache: &mut SharedOperationCache, + config: &ProtocolConfig, + operations: Vec, + source_peer_id: &PeerId, + ops_propagation_sender: &mut MassaSender, + pool_controller: &mut Box, +) -> Result<(), ProtocolError> { + massa_trace!("protocol.protocol_worker.note_operations_from_peer", { "peer": source_peer_id, "operations": operations }); + let now = MassaTime::now().expect("could not get current time"); + + let mut new_operations = PreHashMap::with_capacity(operations.len()); + for operation in operations { + // ignore if op is too old + let expire_period_timestamp = get_block_slot_timestamp( + config.thread_count, + config.t0, + config.genesis_timestamp, + Slot::new( + operation.content.expire_period, + operation + .content_creator_address + .get_thread(config.thread_count), + ), + ); + match expire_period_timestamp { + Ok(slot_timestamp) => { + if slot_timestamp.saturating_add(config.max_operations_propagation_time) < now { + continue; + } + } + Err(_) => continue, + } + + // quit if op is too big + if operation.serialized_size() > config.max_serialized_operations_size_per_block { + return Err(ProtocolError::InvalidOperationError(format!( + "Operation {} exceeds max block size, maximum authorized {} bytes but found {} bytes", + operation.id, + operation.serialized_size(), + config.max_serialized_operations_size_per_block + ))); + }; + + // add to new operations + new_operations.insert(operation.id, operation); + } + + // all valid received ids (not only new ones) for knowledge marking + let all_received_ids: PreHashSet<_> = new_operations.keys().copied().collect(); + + // retain only new ops that are not already known + { + let cache_read = operations_cache.read(); + new_operations.retain(|op_id, _| cache_read.checked_operations.peek(op_id).is_none()); + } + + // optimized signature verification + verify_sigs_batch( + &new_operations + .iter() + .map(|(op_id, op)| (*op_id.get_hash(), op.signature, op.content_creator_pub_key)) + .collect::>(), + )?; + + { + // add to checked operations + let mut cache_write = operations_cache.write(); + + // add checked operations + for op_id in new_operations.keys().copied() { + cache_write.insert_checked_operation(op_id); + } + + // add to known ops + cache_write.insert_peer_known_ops( + source_peer_id, + &all_received_ids + .into_iter() + .map(|id| id.into_prefix()) + .collect::>(), + ); + } + + if !new_operations.is_empty() { + // Store new operations, claim locally + let mut ops = base_storage.clone_without_refs(); + ops.store_operations(new_operations.into_values().collect()); + + // propagate new operations + if let Err(_err) = ops_propagation_sender.try_send( + OperationHandlerPropagationCommand::PropagateOperations(ops.clone()), + ) { + warn!("Error sending operations to propagation channel"); + } + + // Add to pool + pool_controller.add_operations(ops); + } + + Ok(()) +} + #[allow(clippy::too_many_arguments)] pub fn start_retrieval_thread( receiver: MassaReceiver, diff --git a/massa-protocol-worker/src/handlers/peer_handler/models.rs b/massa-protocol-worker/src/handlers/peer_handler/models.rs index 075526d3ff2..094cc509af6 100644 --- a/massa-protocol-worker/src/handlers/peer_handler/models.rs +++ b/massa-protocol-worker/src/handlers/peer_handler/models.rs @@ -61,7 +61,6 @@ pub struct PeerManagementChannel { impl PeerDB { pub fn ban_peer(&mut self, peer_id: &PeerId) { - println!("peers: {:?}", self.peers); if let Some(peer) = self.peers.get_mut(peer_id) { peer.state = PeerState::Banned; info!("Banned peer: {:?}", peer_id); diff --git a/massa-protocol-worker/src/tests/ban_nodes_scenarios.rs b/massa-protocol-worker/src/tests/ban_nodes_scenarios.rs index a401811ac98..5b6547189fe 100644 --- a/massa-protocol-worker/src/tests/ban_nodes_scenarios.rs +++ b/massa-protocol-worker/src/tests/ban_nodes_scenarios.rs @@ -1,5 +1,6 @@ // Copyright (c) 2022 MASSA LABS +use std::collections::HashSet; use std::time::Duration; use massa_consensus_exports::test_exports::MockConsensusControllerMessage; @@ -56,7 +57,7 @@ fn test_protocol_bans_node_sending_block_header_with_invalid_signature() { network_controller .send_from_peer( &node_a_peer_id, - Message::Block(Box::new(BlockMessage::BlockHeader(block.content.header))), + Message::Block(Box::new(BlockMessage::Header(block.content.header))), ) .unwrap(); @@ -195,9 +196,7 @@ fn test_protocol_bans_node_sending_header_with_invalid_signature() { network_controller .send_from_peer( &node_a_peer_id, - Message::Block(Box::new(BlockMessage::BlockHeader( - block.content.header.clone(), - ))), + Message::Block(Box::new(BlockMessage::Header(block.content.header.clone()))), ) .unwrap(); @@ -217,10 +216,10 @@ fn test_protocol_bans_node_sending_header_with_invalid_signature() { network_controller .send_from_peer( &node_a_peer_id, - Message::Block(Box::new(BlockMessage::ReplyForBlocks(vec![( - block.id, - BlockInfoReply::Info(vec![operation_2.id].into_iter().collect()), - )]))), + Message::Block(Box::new(BlockMessage::DataResponse { + block_id: block.id, + block_info: BlockInfoReply::OperationIds(vec![operation_2.id]), + })), ) .unwrap(); std::thread::sleep(std::time::Duration::from_millis(1000)); @@ -249,7 +248,7 @@ fn test_protocol_bans_node_sending_header_with_invalid_signature() { network_controller .send_from_peer( &node_a_peer_id, - Message::Block(Box::new(BlockMessage::BlockHeader(block_2.content.header))), + Message::Block(Box::new(BlockMessage::Header(block_2.content.header))), ) .expect_err("Node A should not be able to send a block"); std::thread::sleep(std::time::Duration::from_millis(1000)); @@ -298,9 +297,7 @@ fn test_protocol_does_not_asks_for_block_from_banned_node_who_propagated_header( network_controller .send_from_peer( &node_a_peer_id, - Message::Block(Box::new(BlockMessage::BlockHeader( - block.content.header.clone(), - ))), + Message::Block(Box::new(BlockMessage::Header(block.content.header.clone()))), ) .unwrap(); @@ -333,9 +330,7 @@ fn test_protocol_does_not_asks_for_block_from_banned_node_who_propagated_header( network_controller .send_from_peer( &node_a_peer_id, - Message::Block(Box::new(BlockMessage::BlockHeader( - block.content.header.clone(), - ))), + Message::Block(Box::new(BlockMessage::Header(block.content.header.clone()))), ) .unwrap(); //7. Check that node connection is closed (node should be banned) @@ -391,7 +386,7 @@ fn test_protocol_bans_all_nodes_propagating_an_attack_attempt() { mut consensus_event_receiver, pool_event_receiver, selector_event_receiver| { - //1. Create 2 node + //1. Create 2 nodes let node_a_keypair = KeyPair::generate(0).unwrap(); let node_b_keypair = KeyPair::generate(0).unwrap(); let (node_a_peer_id, _node_a) = network_controller @@ -407,11 +402,10 @@ fn test_protocol_bans_all_nodes_propagating_an_attack_attempt() { network_controller .send_from_peer( &node_a_peer_id, - Message::Block(Box::new(BlockMessage::BlockHeader( - block.content.header.clone(), - ))), + Message::Block(Box::new(BlockMessage::Header(block.content.header.clone()))), ) .unwrap(); + //4. Check that protocol does send block to consensus the first time. match consensus_event_receiver.wait_command( MassaTime::from_millis(500), @@ -434,9 +428,7 @@ fn test_protocol_bans_all_nodes_propagating_an_attack_attempt() { network_controller .send_from_peer( &node_b_peer_id, - Message::Block(Box::new(BlockMessage::BlockHeader( - block.content.header.clone(), - ))), + Message::Block(Box::new(BlockMessage::Header(block.content.header.clone()))), ) .unwrap(); //5. Check that protocol does send block to consensus the second time. @@ -444,30 +436,21 @@ fn test_protocol_bans_all_nodes_propagating_an_attack_attempt() { Some(()) => panic!("Protocol should not send block to consensus"), None => {} } - //6. Connect a new node that don't known about the attack. + //6. Connect a new node that is not involved in the attack. let node_c_keypair = KeyPair::generate(0).unwrap(); - let (_node_c_peer_id, _node_c) = network_controller + let (node_c_peer_id, _node_c) = network_controller .create_fake_connection(PeerId::from_public_key(node_c_keypair.get_public_key())); - //7. Notify the attack + //7. Notify protocol of the attack protocol_controller.notify_block_attack(block.id).unwrap(); std::thread::sleep(std::time::Duration::from_millis(1000)); - //8. Check that there is only node C not banned. - assert_eq!( - network_controller - .get_connections() - .get_peer_ids_connected() - .len(), - 1 - ); + + //8. Check all nodes are banned except node C. assert_eq!( network_controller .get_connections() - .get_peer_ids_connected() - .iter() - .next() - .unwrap(), - &_node_c_peer_id + .get_peer_ids_connected(), + [node_c_peer_id].into_iter().collect::>() ); ( network_controller, @@ -480,119 +463,3 @@ fn test_protocol_bans_all_nodes_propagating_an_attack_attempt() { }, ) } - -//TODO: Is this behavior still wanted ? -// #[tokio::test] -// #[serial] -// async fn test_protocol_removes_banned_node_on_disconnection() { -// let protocol_config = &tools::PROTOCOL_CONFIG; -// protocol_test( -// protocol_config, -// async move |mut network_controller, -// protocol_command_sender, -// protocol_manager, -// mut protocol_consensus_event_receiver, -// protocol_pool_event_receiver| { -// let mut nodes = tools::create_and_connect_nodes(1, &mut network_controller).await; - -// let creator_node = nodes.pop().expect("Failed to get node info."); - -// // Get the node banned. -// let mut block = tools::create_block(&creator_node.keypair); -// block.content.header.id = BlockId::new(Hash::compute_from("invalid".as_bytes())); -// network_controller -// .send_header(creator_node.id, block.content.header) -// .await; -// tools::assert_banned_nodes(vec![creator_node.id], &mut network_controller).await; - -// // Close the connection. -// network_controller.close_connection(creator_node.id).await; - -// // Re-connect the node. -// network_controller.new_connection(creator_node.id).await; - -// // The node is not banned anymore. -// let block = tools::create_block(&creator_node.keypair); -// network_controller -// .send_header(creator_node.id, block.content.header.clone()) -// .await; - -// // Check protocol sends header to consensus. -// let (protocol_consensus_event_receiver, received_hash) = -// tokio::task::spawn_blocking(move || { -// let id = protocol_consensus_event_receiver -// .wait_command(MassaTime::from_millis(1000), |command| match command { -// MockConsensusControllerMessage::RegisterBlockHeader { -// block_id, -// header: _, -// } => Some(block_id), -// _ => panic!("unexpected protocol event"), -// }) -// .unwrap(); -// (protocol_consensus_event_receiver, id) -// }) -// .await -// .unwrap(); - -// // Check that protocol sent the right header to consensus. -// let expected_hash = block.id; -// assert_eq!(expected_hash, received_hash); -// ( -// network_controller, -// protocol_command_sender, -// protocol_manager, -// protocol_consensus_event_receiver, -// protocol_pool_event_receiver, -// ) -// }, -// ) -// .await; -// } - -// #[tokio::test] -// #[serial] -// async fn test_protocol_bans_node_sending_operation_with_size_bigger_than_max_block_size() { -// let protocol_config = &tools::PROTOCOL_CONFIG; -// protocol_test( -// protocol_config, -// async move |mut network_controller, -// protocol_event_receiver, -// protocol_command_sender, -// protocol_manager, -// mut pool_event_receiver| { -// // Create 1 node. -// let mut nodes = tools::create_and_connect_nodes(1, &mut network_controller).await; - -// let creator_node = nodes.pop().expect("Failed to get node info."); - -// // 1. Create an operation -// let mut operation = -// tools::create_operation_with_expire_period(&creator_node.keypair, 1); - -// // 2. Change the serialized data -// operation.serialized_data = vec![1; 500_001]; - -// // 3. Send block to protocol. -// network_controller -// .send_operations(creator_node.id, vec![operation]) -// .await; - -// // The node is banned. -// tools::assert_banned_nodes(vec![creator_node.id], &mut network_controller).await; - -// // Check protocol does not send operation to pool. -// pool_event_receiver.wait_command(1000.into(), |evt| match evt { -// evt @ MockPoolControllerMessage::AddOperations { .. } => Some(evt), -// _ => None, -// }); -// ( -// network_controller, -// protocol_event_receiver, -// protocol_command_sender, -// protocol_manager, -// pool_event_receiver, -// ) -// }, -// ) -// .await; -// } diff --git a/massa-protocol-worker/src/tests/block_scenarios.rs b/massa-protocol-worker/src/tests/block_scenarios.rs index 322acde6de7..4488debd5fa 100644 --- a/massa-protocol-worker/src/tests/block_scenarios.rs +++ b/massa-protocol-worker/src/tests/block_scenarios.rs @@ -3,12 +3,13 @@ use std::collections::HashSet; use std::time::Duration; -use crate::handlers::block_handler::{AskForBlocksInfo, BlockInfoReply, BlockMessage}; +use crate::handlers::block_handler::{AskForBlockInfo, BlockInfoReply, BlockMessage}; use crate::messages::Message; use super::context::{protocol_test, protocol_test_with_storage}; use super::tools::{assert_block_info_sent_to_node, assert_hash_asked_to_node}; use massa_consensus_exports::test_exports::MockConsensusControllerMessage; +use massa_models::operation::OperationId; use massa_models::prehash::PreHashSet; use massa_models::{block_id::BlockId, slot::Slot}; use massa_protocol_exports::test_exports::tools; @@ -63,9 +64,7 @@ fn test_full_ask_block_workflow() { network_controller .send_from_peer( &node_a_peer_id, - Message::Block(Box::new(BlockMessage::BlockHeader( - block.content.header.clone(), - ))), + Message::Block(Box::new(BlockMessage::Header(block.content.header.clone()))), ) .unwrap(); @@ -94,7 +93,7 @@ fn test_full_ask_block_workflow() { } } - //5. Send a wishlist that ask for the block + //5. Send a wishlist that asks for the block protocol_controller .send_wishlist_delta( vec![(block.id, Some(block.content.header.clone()))] @@ -104,18 +103,21 @@ fn test_full_ask_block_workflow() { ) .unwrap(); - //6. Assert that we asked the block to node a then node b + //6. Assert that we ask the block to node A then node B assert_hash_asked_to_node(&node_a, &block.id); + // make the request expire + std::thread::sleep(protocol_config.ask_block_timeout.to_duration()); + // Expect a new request on node B assert_hash_asked_to_node(&node_b, &block.id); - //7. Node B answer with the infos + //7. Node B answers with the operation IDs network_controller .send_from_peer( &node_b_peer_id, - Message::Block(Box::new(BlockMessage::ReplyForBlocks(vec![( - block.id, - BlockInfoReply::Info(vec![op_1.id, op_2.id]), - )]))), + Message::Block(Box::new(BlockMessage::DataResponse { + block_id: block.id, + block_info: BlockInfoReply::OperationIds(vec![op_1.id, op_2.id]), + })), ) .unwrap(); @@ -125,13 +127,22 @@ fn test_full_ask_block_workflow() { .expect("Node B didn't receive the ask for operations message"); match msg { Message::Block(message) => { - if let BlockMessage::AskForBlocks(asked) = *message { - assert_eq!(asked.len(), 1); - assert_eq!(asked[0].0, block.id); - assert_eq!( - asked[0].1, - AskForBlocksInfo::Operations(vec![op_1.id, op_2.id]) - ); + if let BlockMessage::DataRequest { + block_id, + block_info, + } = *message + { + assert_eq!(block_id, block.id); + if let AskForBlockInfo::Operations(operations) = block_info { + assert_eq!( + &operations.into_iter().collect::>(), + &vec![op_1.id, op_2.id] + .into_iter() + .collect::>() + ); + } else { + panic!("Node B didn't receive the ask for operations message"); + } } else { panic!("Node B didn't receive the ask for operations message"); } @@ -143,10 +154,10 @@ fn test_full_ask_block_workflow() { network_controller .send_from_peer( &node_b_peer_id, - Message::Block(Box::new(BlockMessage::ReplyForBlocks(vec![( - block.id, - BlockInfoReply::Operations(vec![op_1, op_2]), - )]))), + Message::Block(Box::new(BlockMessage::DataResponse { + block_id: block.id, + block_info: BlockInfoReply::Operations(vec![op_1, op_2]), + })), ) .unwrap(); @@ -227,9 +238,7 @@ fn test_empty_block() { network_controller .send_from_peer( &node_a_peer_id, - Message::Block(Box::new(BlockMessage::BlockHeader( - block.content.header.clone(), - ))), + Message::Block(Box::new(BlockMessage::Header(block.content.header.clone()))), ) .unwrap(); @@ -251,10 +260,10 @@ fn test_empty_block() { network_controller .send_from_peer( &node_b_peer_id, - Message::Block(Box::new(BlockMessage::ReplyForBlocks(vec![( - block.id, - BlockInfoReply::Info(vec![]), - )]))), + Message::Block(Box::new(BlockMessage::DataResponse { + block_id: block.id, + block_info: BlockInfoReply::OperationIds(vec![]), + })), ) .unwrap(); @@ -349,9 +358,7 @@ fn test_dont_want_it_anymore() { network_controller .send_from_peer( &node_a_peer_id, - Message::Block(Box::new(BlockMessage::BlockHeader( - block.content.header.clone(), - ))), + Message::Block(Box::new(BlockMessage::Header(block.content.header.clone()))), ) .unwrap(); @@ -378,10 +385,10 @@ fn test_dont_want_it_anymore() { network_controller .send_from_peer( &node_b_peer_id, - Message::Block(Box::new(BlockMessage::ReplyForBlocks(vec![( - block.id, - BlockInfoReply::Info(vec![op_1.id, op_2.id]), - )]))), + Message::Block(Box::new(BlockMessage::DataResponse { + block_id: block.id, + block_info: BlockInfoReply::OperationIds(vec![op_1.id, op_2.id]), + })), ) .unwrap(); @@ -428,13 +435,10 @@ fn test_no_one_has_it() { //1. Create 3 nodes let node_a_keypair = KeyPair::generate(0).unwrap(); let node_b_keypair = KeyPair::generate(0).unwrap(); - let node_c_keypair = KeyPair::generate(0).unwrap(); let (node_a_peer_id, node_a) = network_controller .create_fake_connection(PeerId::from_public_key(node_a_keypair.get_public_key())); let (_node_b_peer_id, node_b) = network_controller .create_fake_connection(PeerId::from_public_key(node_b_keypair.get_public_key())); - let (_node_c_peer_id, node_c) = network_controller - .create_fake_connection(PeerId::from_public_key(node_c_keypair.get_public_key())); //2. Create a block coming from node a. let block = tools::create_block(&node_a_keypair); @@ -453,23 +457,19 @@ fn test_no_one_has_it() { //4. Assert that we asked the block to node a assert_hash_asked_to_node(&node_a, &block.id); - //5. Node A answer with the not found message + //5. Node A answers with the not found message network_controller .send_from_peer( &node_a_peer_id, - Message::Block(Box::new(BlockMessage::ReplyForBlocks(vec![( - block.id, - BlockInfoReply::NotFound, - )]))), + Message::Block(Box::new(BlockMessage::DataResponse { + block_id: block.id, + block_info: BlockInfoReply::NotFound, + })), ) .unwrap(); - //6. Assert that we asked the block to other nodes - assert_hash_asked_to_node(&node_b, &block.id); - assert_hash_asked_to_node(&node_c, &block.id); - assert_hash_asked_to_node(&node_a, &block.id); + //6. Assert that we asked the block to the other node assert_hash_asked_to_node(&node_b, &block.id); - assert_hash_asked_to_node(&node_c, &block.id); ( network_controller, @@ -542,9 +542,8 @@ fn test_multiple_blocks_without_a_priori() { let message = node_b.recv_timeout(Duration::from_millis(1500)).unwrap(); match message { Message::Block(message) => { - if let BlockMessage::AskForBlocks(asked) = *message { - assert_eq!(asked.len(), 1); - to_be_asked_blocks.remove(&asked[0].0); + if let BlockMessage::DataRequest { block_id, .. } = *message { + to_be_asked_blocks.remove(&block_id); } else { panic!("Node didn't receive the ask for block message"); } @@ -554,9 +553,8 @@ fn test_multiple_blocks_without_a_priori() { let message = node_c.recv_timeout(Duration::from_millis(1500)).unwrap(); match message { Message::Block(message) => { - if let BlockMessage::AskForBlocks(asked) = *message { - assert_eq!(asked.len(), 1); - to_be_asked_blocks.remove(&asked[0].0); + if let BlockMessage::DataRequest { block_id, .. } = *message { + to_be_asked_blocks.remove(&block_id); } else { panic!("Node didn't receive the ask for block message"); } @@ -623,19 +621,19 @@ fn test_protocol_sends_blocks_when_asked_for() { network_controller .send_from_peer( &node_a_peer_id, - Message::Block(Box::new(BlockMessage::AskForBlocks(vec![( - block.id, - AskForBlocksInfo::Info, - )]))), + Message::Block(Box::new(BlockMessage::DataRequest { + block_id: block.id, + block_info: AskForBlockInfo::OperationIds, + })), ) .unwrap(); network_controller .send_from_peer( &node_b_peer_id, - Message::Block(Box::new(BlockMessage::AskForBlocks(vec![( - block.id, - AskForBlocksInfo::Info, - )]))), + Message::Block(Box::new(BlockMessage::DataRequest { + block_id: block.id, + block_info: AskForBlockInfo::OperationIds, + })), ) .unwrap(); @@ -702,9 +700,7 @@ fn test_protocol_propagates_block_to_node_who_asked_for_operations_and_only_head network_controller .send_from_peer( &node_a_peer_id, - Message::Block(Box::new(BlockMessage::BlockHeader( - block.content.header.clone(), - ))), + Message::Block(Box::new(BlockMessage::Header(block.content.header.clone()))), ) .unwrap(); @@ -745,10 +741,10 @@ fn test_protocol_propagates_block_to_node_who_asked_for_operations_and_only_head network_controller .send_from_peer( &node_b_peer_id, - Message::Block(Box::new(BlockMessage::AskForBlocks(vec![( - block.id, - AskForBlocksInfo::Info, - )]))), + Message::Block(Box::new(BlockMessage::DataRequest { + block_id: block.id, + block_info: AskForBlockInfo::OperationIds, + })), ) .unwrap(); @@ -765,7 +761,7 @@ fn test_protocol_propagates_block_to_node_who_asked_for_operations_and_only_head .expect("Node c should receive the block header"); match msg { Message::Block(block_msg) => match *block_msg { - BlockMessage::BlockHeader(header) => { + BlockMessage::Header(header) => { assert_eq!(header.id, block.content.header.id); } _ => { diff --git a/massa-protocol-worker/src/tests/cache_scenarios.rs b/massa-protocol-worker/src/tests/cache_scenarios.rs index dcf926f9807..5638971b919 100644 --- a/massa-protocol-worker/src/tests/cache_scenarios.rs +++ b/massa-protocol-worker/src/tests/cache_scenarios.rs @@ -1,8 +1,10 @@ // Copyright (c) 2022 MASSA LABS +use std::collections::HashSet; use std::time::Duration; use massa_consensus_exports::test_exports::MockConsensusControllerMessage; +use massa_models::operation::OperationId; use massa_models::{block_id::BlockId, prehash::PreHashSet, slot::Slot}; use massa_protocol_exports::PeerId; use massa_protocol_exports::{test_exports::tools, ProtocolConfig}; @@ -11,7 +13,7 @@ use massa_time::MassaTime; use serial_test::serial; use crate::{ - handlers::block_handler::{AskForBlocksInfo, BlockInfoReply, BlockMessage}, + handlers::block_handler::{AskForBlockInfo, BlockInfoReply, BlockMessage}, messages::Message, }; @@ -63,9 +65,7 @@ fn test_noting_block_does_not_panic_with_one_max_node_known_blocks_size() { network_controller .send_from_peer( &node_a_peer_id, - Message::Block(Box::new(BlockMessage::BlockHeader( - block.content.header.clone(), - ))), + Message::Block(Box::new(BlockMessage::Header(block.content.header.clone()))), ) .unwrap(); @@ -104,18 +104,20 @@ fn test_noting_block_does_not_panic_with_one_max_node_known_blocks_size() { ) .unwrap(); - //6. Assert that we asked the block to node a then node b + //6. Assert that we asked the block to node a then node b then a again then b + assert_hash_asked_to_node(&node_a, &block.id); + assert_hash_asked_to_node(&node_b, &block.id); assert_hash_asked_to_node(&node_a, &block.id); assert_hash_asked_to_node(&node_b, &block.id); - //7. Node B answer with the infos + //7. Node B answers with the list of operation IDs network_controller .send_from_peer( &node_b_peer_id, - Message::Block(Box::new(BlockMessage::ReplyForBlocks(vec![( - block.id, - BlockInfoReply::Info(vec![op_1.id, op_2.id]), - )]))), + Message::Block(Box::new(BlockMessage::DataResponse { + block_id: block.id, + block_info: BlockInfoReply::OperationIds(vec![op_1.id, op_2.id]), + })), ) .unwrap(); @@ -125,13 +127,22 @@ fn test_noting_block_does_not_panic_with_one_max_node_known_blocks_size() { .expect("Node B didn't receive the ask for operations message"); match msg { Message::Block(message) => { - if let BlockMessage::AskForBlocks(asked) = *message { - assert_eq!(asked.len(), 1); - assert_eq!(asked[0].0, block.id); - assert_eq!( - asked[0].1, - AskForBlocksInfo::Operations(vec![op_1.id, op_2.id]) - ); + if let BlockMessage::DataRequest { + block_id, + block_info, + } = *message + { + assert_eq!(block_id, block.id); + if let AskForBlockInfo::Operations(operations) = block_info { + assert_eq!( + operations.into_iter().collect::>(), + vec![op_1.id, op_2.id] + .into_iter() + .collect::>() + ); + } else { + panic!("Node B didn't receive the ask for operations message"); + } } else { panic!("Node B didn't receive the ask for operations message"); } @@ -143,10 +154,10 @@ fn test_noting_block_does_not_panic_with_one_max_node_known_blocks_size() { network_controller .send_from_peer( &node_b_peer_id, - Message::Block(Box::new(BlockMessage::ReplyForBlocks(vec![( - block.id, - BlockInfoReply::Operations(vec![op_1, op_2]), - )]))), + Message::Block(Box::new(BlockMessage::DataResponse { + block_id: block.id, + block_info: BlockInfoReply::Operations(vec![op_1, op_2]), + })), ) .unwrap(); diff --git a/massa-protocol-worker/src/tests/endorsements_scenarios.rs b/massa-protocol-worker/src/tests/endorsements_scenarios.rs index b67c11b9737..b877f9af4dd 100644 --- a/massa-protocol-worker/src/tests/endorsements_scenarios.rs +++ b/massa-protocol-worker/src/tests/endorsements_scenarios.rs @@ -304,7 +304,7 @@ fn test_protocol_propagates_endorsements_only_to_nodes_that_dont_know_about_it_b network_controller .send_from_peer( &node_a_peer_id, - Message::Block(Box::new(BlockMessage::BlockHeader(block.content.header))), + Message::Block(Box::new(BlockMessage::Header(block.content.header))), ) .unwrap(); diff --git a/massa-protocol-worker/src/tests/in_block_operations_scenarios.rs b/massa-protocol-worker/src/tests/in_block_operations_scenarios.rs index aeef472d66d..545124c5ae3 100644 --- a/massa-protocol-worker/src/tests/in_block_operations_scenarios.rs +++ b/massa-protocol-worker/src/tests/in_block_operations_scenarios.rs @@ -96,7 +96,7 @@ fn test_protocol_does_propagate_operations_received_in_blocks() { break; } Message::Block(block_msg) => match *block_msg { - BlockMessage::AskForBlocks(_) => { + BlockMessage::DataRequest { .. } => { continue; } _ => panic!("Unexpected message: {:?}", block_msg), diff --git a/massa-protocol-worker/src/tests/operations_scenarios.rs b/massa-protocol-worker/src/tests/operations_scenarios.rs index bb8693777b4..1e4be236831 100644 --- a/massa-protocol-worker/src/tests/operations_scenarios.rs +++ b/massa-protocol-worker/src/tests/operations_scenarios.rs @@ -445,9 +445,7 @@ fn test_protocol_propagates_operations_only_to_nodes_that_dont_know_about_it_ind network_controller .send_from_peer( &node_a_peer_id, - Message::Block(Box::new(BlockMessage::BlockHeader( - block.content.header.clone(), - ))), + Message::Block(Box::new(BlockMessage::Header(block.content.header.clone()))), ) .unwrap(); @@ -493,10 +491,12 @@ fn test_protocol_propagates_operations_only_to_nodes_that_dont_know_about_it_ind network_controller .send_from_peer( &node_a_peer_id, - Message::Block(Box::new(BlockMessage::ReplyForBlocks(vec![( - block.id, - BlockInfoReply::Info(vec![operation.id].into_iter().collect()), - )]))), + Message::Block(Box::new(BlockMessage::DataResponse { + block_id: block.id, + block_info: BlockInfoReply::OperationIds( + vec![operation.id].into_iter().collect(), + ), + })), ) .unwrap(); @@ -530,147 +530,6 @@ fn test_protocol_propagates_operations_only_to_nodes_that_dont_know_about_it_ind ) } -#[test] -#[serial] -fn test_protocol_propagates_operations_only_to_nodes_that_dont_know_about_it_indirect_knowledge_via_wrong_root_hash_header( -) { - let default_panic = std::panic::take_hook(); - std::panic::set_hook(Box::new(move |info| { - default_panic(info); - std::process::exit(1); - })); - - let mut protocol_config = ProtocolConfig::default(); - protocol_config.thread_count = 2; - protocol_config.initial_peers = "./src/tests/empty_initial_peers.json".to_string().into(); - protocol_test_with_storage( - &protocol_config, - move |mut network_controller, - protocol_controller, - protocol_manager, - consensus_event_receiver, - pool_event_receiver, - selector_event_receiver, - mut storage| { - //1. Create 3 node - let node_a_keypair = KeyPair::generate(0).unwrap(); - let node_b_keypair = KeyPair::generate(0).unwrap(); - let node_c_keypair = KeyPair::generate(0).unwrap(); - let (node_a_peer_id, node_a) = network_controller - .create_fake_connection(PeerId::from_public_key(node_a_keypair.get_public_key())); - let (node_b_peer_id, node_b) = network_controller - .create_fake_connection(PeerId::from_public_key(node_b_keypair.get_public_key())); - let (node_c_peer_id, node_c) = network_controller - .create_fake_connection(PeerId::from_public_key(node_c_keypair.get_public_key())); - //2. Creates 2 ops - let operation_1 = tools::create_operation_with_expire_period(&node_a_keypair, 1); - let operation_2 = tools::create_operation_with_expire_period(&node_a_keypair, 1); - - //3. Create a block from the operation - let block = tools::create_block_with_operations( - &node_a_keypair, - Slot::new(1, 1), - vec![operation_1.clone()], - ); - - //4. Node A send the block - network_controller - .send_from_peer( - &node_a_peer_id, - Message::Block(Box::new(BlockMessage::BlockHeader( - block.content.header.clone(), - ))), - ) - .unwrap(); - - // 5. Send wishlist - protocol_controller - .send_wishlist_delta( - vec![(block.id, Some(block.content.header.clone()))] - .into_iter() - .collect(), - PreHashSet::::default(), - ) - .unwrap(); - - assert_hash_asked_to_node(&node_a, &block.id); - assert_hash_asked_to_node(&node_b, &block.id); - assert_hash_asked_to_node(&node_c, &block.id); - - //6. Node B sends block info with bad ops list. Making him ban - network_controller - .send_from_peer( - &node_b_peer_id, - Message::Block(Box::new(BlockMessage::ReplyForBlocks(vec![( - block.id, - BlockInfoReply::Info(vec![operation_2.id].into_iter().collect()), - )]))), - ) - .unwrap(); - - //7. Node C sends block info with right ops list - network_controller - .send_from_peer( - &node_c_peer_id, - Message::Block(Box::new(BlockMessage::ReplyForBlocks(vec![( - block.id, - BlockInfoReply::Info(vec![operation_1.id].into_iter().collect()), - )]))), - ) - .unwrap(); - - assert_hash_asked_to_node(&node_c, &block.id); - network_controller - .send_from_peer( - &node_c_peer_id, - Message::Block(Box::new(BlockMessage::ReplyForBlocks(vec![( - block.id, - BlockInfoReply::Operations(vec![operation_1]), - )]))), - ) - .unwrap(); - //8. Propagate operations that is not in the block and so should be propagated to everyone - storage.store_operations(vec![operation_2.clone()]); - protocol_controller.propagate_operations(storage).unwrap(); - - let _ = ( - node_a - .recv_timeout(Duration::from_millis(1000)) - .expect("Node B should have received the operations."), - node_b - .recv_timeout(Duration::from_millis(1000)) - .expect_err("Node B should not have received the operations."), - node_c - .recv_timeout(Duration::from_millis(1000)) - .expect("Node B should have received the operations."), - ); - // Fails sometime TODO: FIX - //println!("msgs: {:?}", msgs); - // match msgs { - // ( - // Message::Operation(OperationMessage::OperationsAnnouncement(operations)), - // _, - // Message::Operation(OperationMessage::OperationsAnnouncement(operations3)), - // ) => { - // assert_eq!(operations.len(), 2); - // assert!(operations.contains(&operation_2.id.into_prefix())); - // assert_eq!(operations3.len(), 1); - // assert!(operations3.contains(&operation_2.id.into_prefix())); - // } - // _ => panic!("Unexpected message type."), - // } - ( - network_controller, - protocol_controller, - protocol_manager, - consensus_event_receiver, - pool_event_receiver, - selector_event_receiver, - ) - }, - ) -} - #[test] #[serial] fn test_protocol_ask_operations_on_batch_received() { diff --git a/massa-protocol-worker/src/tests/tools.rs b/massa-protocol-worker/src/tests/tools.rs index 1ee4f2b8183..28531023de5 100644 --- a/massa-protocol-worker/src/tests/tools.rs +++ b/massa-protocol-worker/src/tests/tools.rs @@ -8,21 +8,28 @@ use massa_models::{ use massa_protocol_exports::{PeerId, ProtocolController}; use crate::{ - handlers::block_handler::{BlockInfoReply, BlockMessage}, + handlers::block_handler::{AskForBlockInfo, BlockInfoReply, BlockMessage}, messages::Message, }; use super::mock_network::MockNetworkController; -pub fn assert_hash_asked_to_node(node: &MassaReceiver, block_id: &BlockId) { +pub fn assert_hash_asked_to_node( + node: &MassaReceiver, + block_id: &BlockId, +) -> AskForBlockInfo { let msg = node .recv_timeout(Duration::from_millis(1500)) .expect("Node didn't receive the ask for block message"); match msg { Message::Block(message) => { - if let BlockMessage::AskForBlocks(asked) = *message { - assert_eq!(asked.len(), 1); - assert_eq!(&asked[0].0, block_id); + if let BlockMessage::DataRequest { + block_id: b_id, + block_info, + } = *message + { + assert_eq!(&b_id, block_id); + block_info } else { panic!("Node didn't receive the ask for block message"); } @@ -37,7 +44,7 @@ pub fn assert_block_info_sent_to_node(node: &MassaReceiver, block_id: & .expect("Node didn't receive the infos block message"); match msg { Message::Block(message) => { - if let BlockMessage::BlockHeader(header) = *message { + if let BlockMessage::Header(header) = *message { assert_eq!(&header.id, block_id); } else { panic!("Node didn't receive the block header message") @@ -51,11 +58,14 @@ pub fn assert_block_info_sent_to_node(node: &MassaReceiver, block_id: & .expect("Node didn't receive the infos block message"); match msg { Message::Block(message) => { - if let BlockMessage::ReplyForBlocks(asked) = *message { - assert_eq!(asked.len(), 1); - assert_eq!(&asked[0].0, block_id); - match asked[0].1 { - BlockInfoReply::Info(_) => {} + if let BlockMessage::DataResponse { + block_id: b_id, + block_info, + } = *message + { + assert_eq!(&b_id, block_id); + match block_info { + BlockInfoReply::OperationIds(_) => {} _ => panic!("Node didn't receive the infos block message"), } } else { @@ -77,9 +87,7 @@ pub fn send_and_propagate_block( network_controller .send_from_peer( node_id, - Message::Block(Box::new(BlockMessage::BlockHeader( - block.content.header.clone(), - ))), + Message::Block(Box::new(BlockMessage::Header(block.content.header.clone()))), ) .unwrap(); @@ -93,23 +101,24 @@ pub fn send_and_propagate_block( .unwrap(); // Send block info to protocol. - let info = vec![( - block.id, - BlockInfoReply::Info(block.content.operations.clone()), - )]; network_controller .send_from_peer( node_id, - Message::Block(Box::new(BlockMessage::ReplyForBlocks(info))), + Message::Block(Box::new(BlockMessage::DataResponse { + block_id: block.id, + block_info: BlockInfoReply::OperationIds(block.content.operations.clone()), + })), ) .unwrap(); // Send full ops. - let info = vec![(block.id, BlockInfoReply::Operations(operations))]; network_controller .send_from_peer( node_id, - Message::Block(Box::new(BlockMessage::ReplyForBlocks(info))), + Message::Block(Box::new(BlockMessage::DataResponse { + block_id: block.id, + block_info: BlockInfoReply::Operations(operations), + })), ) .unwrap(); } From f5951490e74b921f78192a950ba0cb050e8d9b7c Mon Sep 17 00:00:00 2001 From: Damir Vodenicarevic Date: Mon, 24 Jul 2023 08:45:51 +0200 Subject: [PATCH 46/71] increase max_ops_per_msg to match max_ops_per_block (#4265) --- massa-node/base_config/config.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/massa-node/base_config/config.toml b/massa-node/base_config/config.toml index 0dd35c83559..e431f539dc9 100644 --- a/massa-node/base_config/config.toml +++ b/massa-node/base_config/config.toml @@ -200,7 +200,7 @@ # interval at which operations are announced in batches. operation_announcement_interval = 300 # max number of operation per message, same as network param but can be smaller - max_operations_per_message = 1024 + max_operations_per_message = 5000 # Number of millis seconds between each try out connections try_connection_timer = 5000 # Number of millis seconds that create a timeout for out connections From 88a7a7159bba33390203d223f2d3996aacc46ce4 Mon Sep 17 00:00:00 2001 From: Damir Vodenicarevic Date: Mon, 24 Jul 2023 15:04:35 +0200 Subject: [PATCH 47/71] improve op propagation limit (#4267) * improve op propagation limit * correct --- .../handlers/operation_handler/propagation.rs | 27 ++++++++++++------- 1 file changed, 18 insertions(+), 9 deletions(-) diff --git a/massa-protocol-worker/src/handlers/operation_handler/propagation.rs b/massa-protocol-worker/src/handlers/operation_handler/propagation.rs index f58f006eda1..5a8acae2f93 100644 --- a/massa-protocol-worker/src/handlers/operation_handler/propagation.rs +++ b/massa-protocol-worker/src/handlers/operation_handler/propagation.rs @@ -100,15 +100,6 @@ impl PropagationThread { fn prune_propagation_storage(&mut self) { let mut removed = PreHashSet::default(); - // cap cache size - while self.stored_for_propagation.len() > self.config.max_ops_kept_for_propagation { - if let Some((_t, op_ids)) = self.stored_for_propagation.pop_front() { - removed.extend(op_ids); - } else { - break; - } - } - // remove expired let max_op_prop_time = self.config.max_operations_propagation_time.to_duration(); while let Some((t, _)) = self.stored_for_propagation.front() { @@ -123,6 +114,24 @@ impl PropagationThread { } } + // Cap cache size + // Note that we directly remove batches of operations, not individual operations + // to favor simplicity and performance over precision. + let mut excess_count = self + .stored_for_propagation + .iter() + .map(|(_, ops)| ops.len()) + .sum::() + .saturating_sub(self.config.max_ops_kept_for_propagation); + while excess_count > 0 { + if let Some((_t, op_ids)) = self.stored_for_propagation.pop_front() { + excess_count = excess_count.saturating_sub(op_ids.len()); + removed.extend(op_ids); + } else { + break; + } + } + // remove from storage self.op_storage.drop_operation_refs(&removed); } From 7a862f9d4540c057a483c3b0af228ee2fba2f655 Mon Sep 17 00:00:00 2001 From: Thomas Plisson Date: Mon, 24 Jul 2023 15:36:43 +0200 Subject: [PATCH 48/71] all evm interface implementations --- Cargo.lock | 646 ++++++++++--------- Cargo.toml | 2 +- massa-execution-worker/src/interface_impl.rs | 70 +- 3 files changed, 395 insertions(+), 323 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ea27d383800..a5eaf24f38f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -75,15 +75,6 @@ dependencies = [ "version_check", ] -[[package]] -name = "aho-corasick" -version = "0.7.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc936419f96fa211c1b9166887b38e5e40b19958e5b895be7c1f93adec7071ac" -dependencies = [ - "memchr", -] - [[package]] name = "aho-corasick" version = "1.0.2" @@ -125,9 +116,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.71" +version = "1.0.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8" +checksum = "3b13c32d80ecc7ab747b80c3784bce54ee8a7a0cc4fbda9bf4cda2cf6fe90854" [[package]] name = "arrayref" @@ -144,7 +135,7 @@ checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" [[package]] name = "as-ffi-bindings" version = "0.2.5" -source = "git+https://github.com/massalabs/as-ffi-bindings.git?tag=v0.4.0#7767634dfc22407bd2b0fa0e4fd7432231b10dd7" +source = "git+https://github.com/massalabs/as-ffi-bindings.git?tag=0.5.0#512792515055573e600c0054a4b44bbcbfeb779a" dependencies = [ "anyhow", "wasmer", @@ -176,20 +167,20 @@ version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ - "proc-macro2 1.0.63", - "quote 1.0.29", - "syn 2.0.23", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.27", ] [[package]] name = "async-trait" -version = "0.1.70" +version = "0.1.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79fa67157abdfd688a259b6648808757db9347af834624f27ec646da976aee5d" +checksum = "cc6dde6e4ed435a4c1ee4e73592f5ba9da2151af10076cc04858746af9352d09" dependencies = [ - "proc-macro2 1.0.63", - "quote 1.0.29", - "syn 2.0.23", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.27", ] [[package]] @@ -211,9 +202,9 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "axum" -version = "0.6.18" +version = "0.6.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8175979259124331c1d7bf6586ee7e0da434155e4b2d48ec2c8386281d8df39" +checksum = "a6a1de45611fdb535bfde7b7de4fd54f4fd2b17b1737c0a59b69bf9b92074b8c" dependencies = [ "async-trait", "axum-core", @@ -308,8 +299,8 @@ dependencies = [ "lazy_static", "lazycell", "peeking_take_while", - "proc-macro2 1.0.63", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.32", "regex", "rustc-hash", "shlex", @@ -392,7 +383,7 @@ dependencies = [ "borsh-derive-internal", "borsh-schema-derive-internal", "proc-macro-crate 0.1.5", - "proc-macro2 1.0.63", + "proc-macro2 1.0.66", "syn 1.0.109", ] @@ -402,8 +393,8 @@ version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "afb438156919598d2c7bad7e1c0adf3d26ed3840dbc010db1a882a65583ca2fb" dependencies = [ - "proc-macro2 1.0.63", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -413,8 +404,8 @@ version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "634205cc43f74a1b9046ef87c4540ebda95696ec0f315024860cad7c5b0f5ccd" dependencies = [ - "proc-macro2 1.0.63", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -429,9 +420,9 @@ dependencies = [ [[package]] name = "bstr" -version = "1.5.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a246e68bb43f6cd9db24bea052a53e40405417c5fb372e3d1a8a7f770a564ef5" +checksum = "6798148dccfbff0fae41c7574d2fa8f1ef3492fba0face179de5d8d447d67b05" dependencies = [ "memchr", "serde", @@ -460,8 +451,8 @@ version = "0.6.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7ec4c6f261935ad534c0c22dbef2201b45918860eb1c574b972bd213a76af61" dependencies = [ - "proc-macro2 1.0.63", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -983,12 +974,12 @@ dependencies = [ [[package]] name = "darling" -version = "0.20.1" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0558d22a7b463ed0241e993f76f09f30b126687447751a8638587b864e4b3944" +checksum = "0209d94da627ab5605dcccf08bb18afa5009cfbef48d8a8b7d7bdbc79be25c5e" dependencies = [ - "darling_core 0.20.1", - "darling_macro 0.20.1", + "darling_core 0.20.3", + "darling_macro 0.20.3", ] [[package]] @@ -999,24 +990,24 @@ checksum = "109c1ca6e6b7f82cc233a97004ea8ed7ca123a9af07a8230878fcfda9b158bf0" dependencies = [ "fnv", "ident_case", - "proc-macro2 1.0.63", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.32", "strsim 0.10.0", "syn 1.0.109", ] [[package]] name = "darling_core" -version = "0.20.1" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab8bfa2e259f8ee1ce5e97824a3c55ec4404a0d772ca7fa96bf19f0752a046eb" +checksum = "177e3443818124b357d8e76f53be906d60937f0d3a90773a664fa63fa253e621" dependencies = [ "fnv", "ident_case", - "proc-macro2 1.0.63", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.32", "strsim 0.10.0", - "syn 2.0.23", + "syn 2.0.27", ] [[package]] @@ -1026,29 +1017,29 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4aab4dbc9f7611d8b55048a3a16d2d010c2c8334e46304b40ac1cc14bf3b48e" dependencies = [ "darling_core 0.14.4", - "quote 1.0.29", + "quote 1.0.32", "syn 1.0.109", ] [[package]] name = "darling_macro" -version = "0.20.1" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29a358ff9f12ec09c3e61fef9b5a9902623a695a46a917b07f269bff1445611a" +checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" dependencies = [ - "darling_core 0.20.1", - "quote 1.0.29", - "syn 2.0.23", + "darling_core 0.20.3", + "quote 1.0.32", + "syn 2.0.27", ] [[package]] name = "dashmap" -version = "5.4.0" +version = "5.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc" +checksum = "6943ae99c34386c84a470c499d3414f66502a41340aa895406e0d2e4a207b91d" dependencies = [ "cfg-if", - "hashbrown 0.12.3", + "hashbrown 0.14.0", "lock_api", "once_cell", "parking_lot_core", @@ -1060,8 +1051,8 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" dependencies = [ - "proc-macro2 1.0.63", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -1150,9 +1141,9 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" dependencies = [ - "proc-macro2 1.0.63", - "quote 1.0.29", - "syn 2.0.23", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.27", ] [[package]] @@ -1177,8 +1168,8 @@ dependencies = [ "byteorder", "lazy_static", "proc-macro-error", - "proc-macro2 1.0.63", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -1219,9 +1210,9 @@ dependencies = [ [[package]] name = "either" -version = "1.8.1" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" +checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" [[package]] name = "encode_unicode" @@ -1250,8 +1241,8 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c134c37760b27a871ba422106eedbb8247da973a09e82558bf26d619c882b159" dependencies = [ - "proc-macro2 1.0.63", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -1262,8 +1253,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8ea75f31022cba043afe037940d73684327e915f88f62478e778c3de914cd0a" dependencies = [ "enum_delegate_lib", - "proc-macro2 1.0.63", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -1273,8 +1264,8 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2e1f6c3800b304a6be0012039e2a45a322a093539c45ab818d9e6895a39c90fe" dependencies = [ - "proc-macro2 1.0.63", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.32", "rand 0.8.5", "syn 1.0.109", ] @@ -1294,23 +1285,23 @@ version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e08b6c6ab82d70f08844964ba10c7babb716de2ecaeab9be5717918a5177d3af" dependencies = [ - "darling 0.20.1", - "proc-macro2 1.0.63", - "quote 1.0.29", - "syn 2.0.23", + "darling 0.20.3", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.27", ] [[package]] name = "equivalent" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88bffebc5d80432c9b140ee17875ff173a8ab62faad5b257da912bd2f6c1c0a1" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "erased-serde" -version = "0.3.27" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f94c0e13118e7d7533271f754a168ae8400e6a1cc043f2bfd53cc7290f1a1de3" +checksum = "da96524cc884f6558f1769b6c46686af2fe8e8b4cd253bd5a3cdba8181b8e070" dependencies = [ "serde", ] @@ -1360,12 +1351,9 @@ checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" [[package]] name = "fastrand" -version = "1.9.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" -dependencies = [ - "instant", -] +checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764" [[package]] name = "fd-lock" @@ -1374,7 +1362,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef033ed5e9bad94e55838ca0ca906db0e043f517adda0c8b79c7a8c66c93c1b5" dependencies = [ "cfg-if", - "rustix 0.38.3", + "rustix 0.38.4", "windows-sys 0.48.0", ] @@ -1499,9 +1487,9 @@ version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ - "proc-macro2 1.0.63", - "quote 1.0.29", - "syn 2.0.23", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.27", ] [[package]] @@ -1620,11 +1608,11 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "globset" -version = "0.4.10" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "029d74589adefde59de1a0c4f4732695c32805624aec7b68d91503d4dba79afc" +checksum = "1391ab1f92ffcc08911957149833e682aa3fe252b9f45f966d2ef972274c97df" dependencies = [ - "aho-corasick 0.7.20", + "aho-corasick", "bstr", "fnv", "log", @@ -1840,9 +1828,9 @@ dependencies = [ [[package]] name = "http-range-header" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bfe8eed0a9285ef776bb792479ea3834e8b94e13d615c2f66d03dd50a435a29" +checksum = "add0ab9360ddbd88cfeb3bd9574a1d85cfdfa14db10b3e21d3700dbc4328758f" [[package]] name = "httparse" @@ -1984,15 +1972,6 @@ dependencies = [ "generic-array", ] -[[package]] -name = "instant" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" -dependencies = [ - "cfg-if", -] - [[package]] name = "io-lifetimes" version = "1.0.11" @@ -2015,9 +1994,9 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.8" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b02a5381cc465bd3041d84623d0fa3b66738b52b8e2fc3bab8ad63ab032f4a" +checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" [[package]] name = "jobserver" @@ -2142,8 +2121,8 @@ checksum = "c6027ac0b197ce9543097d02a290f550ce1d9432bf301524b013053c0b75cc94" dependencies = [ "heck 0.4.1", "proc-macro-crate 1.3.1", - "proc-macro2 1.0.63", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -2319,9 +2298,9 @@ dependencies = [ [[package]] name = "libz-sys" -version = "1.1.9" +version = "1.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56ee889ecc9568871456d42f603d6a0ce59ff328d291063a45cbdf0036baf6db" +checksum = "24e6ab01971eb092ffe6a7d42f49f9ff42662f17604681e2843ad65077ba47dc" dependencies = [ "cc", "pkg-config", @@ -2340,12 +2319,6 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4" -[[package]] -name = "linux-raw-sys" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" - [[package]] name = "linux-raw-sys" version = "0.4.3" @@ -2384,7 +2357,7 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0fbfc88337168279f2e9ae06e157cfed4efd3316e14dc96ed074d4f2e6c5952" dependencies = [ - "quote 1.0.29", + "quote 1.0.32", "syn 1.0.109", ] @@ -2524,7 +2497,7 @@ dependencies = [ [[package]] name = "massa-sc-runtime" version = "0.10.0" -source = "git+https://github.com/massalabs/massa-sc-runtime?branch=main#ca2c7f1b4e5fb24764f9f34bd38c70bfd6aa526a" +source = "git+https://github.com/massalabs/massa-sc-runtime?branch=evm-update#d702a0a6d5a4d57f6858b4704df1e55fad56aac8" dependencies = [ "anyhow", "as-ffi-bindings", @@ -2535,7 +2508,7 @@ dependencies = [ "glob", "loupe", "more-asserts 0.3.1", - "num_enum", + "num_enum 0.6.1", "parking_lot", "prost", "prost-build", @@ -2544,7 +2517,7 @@ dependencies = [ "regex", "serde", "serde_json", - "serial_test", + "serial_test 2.0.0", "sha2 0.10.6", "thiserror", "tracing", @@ -2606,7 +2579,7 @@ dependencies = [ "massa_wallet", "paginate", "serde", - "serial_test", + "serial_test 1.0.0", "strum", "thiserror", ] @@ -2663,12 +2636,12 @@ dependencies = [ "mockall", "nom", "num", - "num_enum", + "num_enum 0.5.11", "parking_lot", "rand 0.8.5", "serde", "serde_json", - "serial_test", + "serial_test 1.0.0", "socket2", "substruct", "tempfile", @@ -2851,7 +2824,7 @@ dependencies = [ "rand 0.8.5", "rand_xoshiro", "serde_json", - "serial_test", + "serial_test 1.0.0", "sha2 0.10.6", "sha3", "tempfile", @@ -2978,7 +2951,7 @@ dependencies = [ "nom", "serde", "serde_json", - "serial_test", + "serial_test 1.0.0", "thiserror", ] @@ -2993,7 +2966,7 @@ dependencies = [ "massa_models", "massa_serialization", "nom", - "num_enum", + "num_enum 0.5.11", "serde", "serde_json", "tempfile", @@ -3053,11 +3026,11 @@ dependencies = [ "massa_time", "nom", "num", - "num_enum", + "num_enum 0.5.11", "rust_decimal", "serde", "serde_with", - "serial_test", + "serial_test 1.0.0", "thiserror", "transition", ] @@ -3073,11 +3046,11 @@ dependencies = [ "massa_models", "massa_serialization", "nom", - "num_enum", + "num_enum 0.5.11", "rand 0.8.5", "rocksdb", "schnellru", - "serial_test", + "serial_test 1.0.0", "tempfile", "thiserror", "tracing", @@ -3198,14 +3171,14 @@ dependencies = [ "massa_versioning", "nom", "num", - "num_enum", + "num_enum 0.5.11", "parking_lot", "peernet", "rand 0.8.5", "rayon", "schnellru", "serde_json", - "serial_test", + "serial_test 1.0.0", "tempfile", "tracing", ] @@ -3251,7 +3224,7 @@ dependencies = [ "rand 0.7.3", "serde", "serde_json", - "serial_test", + "serial_test 1.0.0", "thiserror", "transition", ] @@ -3295,7 +3268,7 @@ dependencies = [ "more-asserts 0.3.1", "nom", "num", - "num_enum", + "num_enum 0.5.11", "parking_lot", "tempfile", "thiserror", @@ -3433,8 +3406,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22ce75669015c4f47b289fd4d4f56e894e4c96003ffdf3ac51313126f94c6cbb" dependencies = [ "cfg-if", - "proc-macro2 1.0.63", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -3517,9 +3490,9 @@ dependencies = [ [[package]] name = "num" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43db66d1170d347f9a065114077f7dccb00c1b9478c89384490a3425279a4606" +checksum = "b05180d69e3da0e530ba2a1dae5110317e49e3b7f3d41be227dc5f92e49ee7af" dependencies = [ "num-bigint", "num-complex", @@ -3587,9 +3560,9 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" +checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" dependencies = [ "autocfg", "libm", @@ -3611,7 +3584,16 @@ version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f646caf906c20226733ed5b1374287eb97e3c2a5c227ce668c1f2ce20ae57c9" dependencies = [ - "num_enum_derive", + "num_enum_derive 0.5.11", +] + +[[package]] +name = "num_enum" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a015b430d3c108a207fd776d2e2196aaf8b1cf8cf93253e3a097ff3085076a1" +dependencies = [ + "num_enum_derive 0.6.1", ] [[package]] @@ -3621,11 +3603,23 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dcbff9bc912032c62bf65ef1d5aea88983b420f4f839db1e9b0c281a25c9c799" dependencies = [ "proc-macro-crate 1.3.1", - "proc-macro2 1.0.63", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] +[[package]] +name = "num_enum_derive" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96667db765a921f7b295ffee8b60472b686a51d4f21c2ee4ffdb94c7013b65a6" +dependencies = [ + "proc-macro-crate 1.3.1", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.27", +] + [[package]] name = "object" version = "0.28.4" @@ -3764,8 +3758,8 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0f35583365be5d148e959284f42526841917b7bfa09e2d1a7ad5dde2cf0eaa39" dependencies = [ - "proc-macro2 1.0.63", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -3817,9 +3811,9 @@ checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" [[package]] name = "pest" -version = "2.7.0" +version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f73935e4d55e2abf7f130186537b19e7a4abc886a0252380b59248af473a3fc9" +checksum = "0d2d1d55045829d65aad9d389139882ad623b33b904e7c9f1b10c5b8927298e5" dependencies = [ "thiserror", "ucd-trie", @@ -3827,9 +3821,9 @@ dependencies = [ [[package]] name = "pest_derive" -version = "2.7.0" +version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aef623c9bbfa0eedf5a0efba11a5ee83209c326653ca31ff019bec3a95bfff2b" +checksum = "5f94bca7e7a599d89dea5dfa309e217e7906c3c007fb9c3299c40b10d6a315d3" dependencies = [ "pest", "pest_generator", @@ -3837,22 +3831,22 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.7.0" +version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3e8cba4ec22bada7fc55ffe51e2deb6a0e0db2d0b7ab0b103acc80d2510c190" +checksum = "99d490fe7e8556575ff6911e45567ab95e71617f43781e5c05490dc8d75c965c" dependencies = [ "pest", "pest_meta", - "proc-macro2 1.0.63", - "quote 1.0.29", - "syn 2.0.23", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.27", ] [[package]] name = "pest_meta" -version = "2.7.0" +version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a01f71cb40bd8bb94232df14b946909e14660e33fc05db3e50ae2a82d7ea0ca0" +checksum = "2674c66ebb4b4d9036012091b537aae5878970d6999f81a265034d85b136b341" dependencies = [ "once_cell", "pest", @@ -3884,9 +3878,9 @@ version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec2e072ecce94ec471b13398d5402c188e76ac03cf74dd1a975161b23a3f6d9c" dependencies = [ - "proc-macro2 1.0.63", - "quote 1.0.29", - "syn 2.0.23", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.27", ] [[package]] @@ -3989,7 +3983,7 @@ version = "0.1.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c8646e95016a7a6c4adea95bafa8a16baab64b583356217f2c85db4a39d9a86" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.66", "syn 1.0.109", ] @@ -4019,8 +4013,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" dependencies = [ "proc-macro-error-attr", - "proc-macro2 1.0.63", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", "version_check", ] @@ -4031,8 +4025,8 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" dependencies = [ - "proc-macro2 1.0.63", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.32", "version_check", ] @@ -4047,9 +4041,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.63" +version = "1.0.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b368fba921b0dce7e60f5e04ec15e565b3303972b42bcfde1d0713b881959eb" +checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9" dependencies = [ "unicode-ident", ] @@ -4124,8 +4118,8 @@ checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" dependencies = [ "anyhow", "itertools", - "proc-macro2 1.0.63", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -4159,8 +4153,8 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac" dependencies = [ - "proc-macro2 1.0.63", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -4192,11 +4186,11 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.29" +version = "1.0.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "573015e8ab27661678357f27dc26460738fd2b6c86e46f386fde94cb5d913105" +checksum = "50f3b39ccfb720540debaa0164757101c08ecb8d326b15358ce76a62c7e85965" dependencies = [ - "proc-macro2 1.0.63", + "proc-macro2 1.0.66", ] [[package]] @@ -4370,11 +4364,11 @@ dependencies = [ [[package]] name = "regex" -version = "1.9.0" +version = "1.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89089e897c013b3deb627116ae56a6955a72b8bed395c9526af31c9fe528b484" +checksum = "b2eae68fc220f7cf2532e4494aded17545fce192d59cd996e0fe7887f4ceb575" dependencies = [ - "aho-corasick 1.0.2", + "aho-corasick", "memchr", "regex-automata", "regex-syntax", @@ -4382,20 +4376,20 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.3.0" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa250384981ea14565685dea16a9ccc4d1c541a13f82b9c168572264d1df8c56" +checksum = "39354c10dd07468c2e73926b23bb9c2caca74c5501e38a35da70406f1d923310" dependencies = [ - "aho-corasick 1.0.2", + "aho-corasick", "memchr", "regex-syntax", ] [[package]] name = "regex-syntax" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ab07dc67230e4a4718e70fd5c20055a4334b121f1f9db8fe63ef39ce9b8c846" +checksum = "e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2" [[package]] name = "region" @@ -4457,8 +4451,8 @@ version = "0.7.42" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b2e06b915b5c230a17d7a736d1e2e63ee753c256a8614ef3f5147b13a4f5541d" dependencies = [ - "proc-macro2 1.0.63", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -4539,23 +4533,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.37.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d69718bf81c6127a49dc64e44a742e8bb9213c0ff8869a22c308f84c1d4ab06" -dependencies = [ - "bitflags 1.3.2", - "errno", - "io-lifetimes", - "libc", - "linux-raw-sys 0.3.8", - "windows-sys 0.48.0", -] - -[[package]] -name = "rustix" -version = "0.38.3" +version = "0.38.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac5ffa1efe7548069688cd7028f32591853cd7b5b756d41bcffd2353e4fc75b4" +checksum = "0a962918ea88d644592894bc6dc55acc6c0956488adcebbfb6e273506b7fd6e5" dependencies = [ "bitflags 2.3.3", "errno", @@ -4566,13 +4546,13 @@ dependencies = [ [[package]] name = "rustls" -version = "0.21.2" +version = "0.21.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e32ca28af694bc1bbf399c33a516dbdf1c90090b8ab23c2bc24f834aa2247f5f" +checksum = "79ea77c539259495ce8ca47f53e66ae0330a8819f67e23ac96ca02f50e7b7d36" dependencies = [ "log", "ring", - "rustls-webpki", + "rustls-webpki 0.101.1", "sct", ] @@ -4607,11 +4587,21 @@ dependencies = [ "untrusted", ] +[[package]] +name = "rustls-webpki" +version = "0.101.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15f36a6828982f422756984e47912a7a51dcbc2a197aa791158f8ca61cd8204e" +dependencies = [ + "ring", + "untrusted", +] + [[package]] name = "rustversion" -version = "1.0.13" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc31bd9b61a32c31f9650d18add92aa83a49ba979c143eefd27fe7177b05bd5f" +checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" [[package]] name = "rustyline" @@ -4642,16 +4632,16 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "107c3d5d7f370ac09efa62a78375f94d94b8a33c61d8c278b96683fb4dbf2d8d" dependencies = [ - "proc-macro2 1.0.63", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] [[package]] name = "ryu" -version = "1.0.14" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe232bdf6be8c8de797b22184ee71118d63780ea42ac85b61d1baa6d3b782ae9" +checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" [[package]] name = "same-file" @@ -4684,9 +4674,9 @@ dependencies = [ [[package]] name = "scopeguard" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "sct" @@ -4706,9 +4696,9 @@ checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b" [[package]] name = "security-framework" -version = "2.9.1" +version = "2.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fc758eb7bffce5b308734e9b0c1468893cae9ff70ebf13e7090be8dcbcc83a8" +checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" dependencies = [ "bitflags 1.3.2", "core-foundation", @@ -4719,9 +4709,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.9.0" +version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f51d0c0d83bec45f16480d0ce0058397a69e48fcdc52d1dc8855fb68acbd31a7" +checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a" dependencies = [ "core-foundation-sys", "libc", @@ -4735,9 +4725,9 @@ checksum = "f638d531eccd6e23b980caf34876660d38e265409d8e99b397ab71eb3612fad0" [[package]] name = "serde" -version = "1.0.166" +version = "1.0.175" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d01b7404f9d441d3ad40e6a636a7782c377d2abdbe4fa2440e2edcc2f4f10db8" +checksum = "5d25439cd7397d044e2748a6fe2432b5e85db703d6d097bd014b3c0ad1ebff0b" dependencies = [ "serde_derive", ] @@ -4755,20 +4745,20 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.166" +version = "1.0.175" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dd83d6dde2b6b2d466e14d9d1acce8816dedee94f735eac6395808b3483c6d6" +checksum = "b23f7ade6f110613c0d63858ddb8b94c1041f550eab58a16b371bdf2c9c80ab4" dependencies = [ - "proc-macro2 1.0.63", - "quote 1.0.29", - "syn 2.0.23", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.27", ] [[package]] name = "serde_json" -version = "1.0.100" +version = "1.0.103" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f1e14e89be7aa4c4b78bdbdc9eb5bf8517829a600ae8eaa39a6e1d960b5185c" +checksum = "d03b412469450d4404fe8499a268edd7f8b79fecb074b0d812ad64ca21f4031b" dependencies = [ "itoa", "ryu", @@ -4808,10 +4798,10 @@ version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "881b6f881b17d13214e5d494c939ebab463d01264ce1811e9d4ac3a882e7695f" dependencies = [ - "darling 0.20.1", - "proc-macro2 1.0.63", - "quote 1.0.29", - "syn 2.0.23", + "darling 0.20.3", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.27", ] [[package]] @@ -4825,7 +4815,21 @@ dependencies = [ "lazy_static", "log", "parking_lot", - "serial_test_derive", + "serial_test_derive 1.0.0", +] + +[[package]] +name = "serial_test" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e56dd856803e253c8f298af3f4d7eb0ae5e23a737252cd90bb4f3b435033b2d" +dependencies = [ + "dashmap", + "futures", + "lazy_static", + "log", + "parking_lot", + "serial_test_derive 2.0.0", ] [[package]] @@ -4834,11 +4838,22 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "079a83df15f85d89a68d64ae1238f142f172b1fa915d0d76b26a7cba1b659a69" dependencies = [ - "proc-macro2 1.0.63", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] +[[package]] +name = "serial_test_derive" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91d129178576168c589c9ec973feedf7d3126c01ac2bf08795109aa35b69fb8f" +dependencies = [ + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.27", +] + [[package]] name = "sha-1" version = "0.9.8" @@ -4945,9 +4960,9 @@ checksum = "826167069c09b99d56f31e9ae5c99049e932a98c9dc2dac47645b08dbbf76ba7" [[package]] name = "smallvec" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" +checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9" [[package]] name = "socket2" @@ -5031,8 +5046,8 @@ checksum = "dcb5ae327f9cc13b68763b5749770cb9e048a99bd9dfdfa58d0cf05d5f64afe0" dependencies = [ "heck 0.3.3", "proc-macro-error", - "proc-macro2 1.0.63", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -5052,8 +5067,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" dependencies = [ "heck 0.4.1", - "proc-macro2 1.0.63", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.32", "rustversion", "syn 1.0.109", ] @@ -5063,7 +5078,7 @@ name = "substruct" version = "0.1.0" source = "git+https://github.com/sydhds/substruct?branch=main#2fb3ae0dc9d913a0566ce6415eaa7a7ca1690fe1" dependencies = [ - "quote 1.0.29", + "quote 1.0.32", "syn 1.0.109", ] @@ -5090,19 +5105,19 @@ version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ - "proc-macro2 1.0.63", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.32", "unicode-ident", ] [[package]] name = "syn" -version = "2.0.23" +version = "2.0.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59fb7d6d8281a51045d62b8eb3a7d1ce347b76f312af50cd3dc0af39c87c1737" +checksum = "b60f673f44a8255b9c8c657daf66a596d435f2da81a555b06dc644d080ba45e0" dependencies = [ - "proc-macro2 1.0.63", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.32", "unicode-ident", ] @@ -5120,21 +5135,20 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "target-lexicon" -version = "0.12.8" +version = "0.12.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b1c7f239eb94671427157bd93b3694320f3668d4e1eff08c7285366fd777fac" +checksum = "1d2faeef5759ab89935255b1a4cd98e0baf99d1085e37d36599c625dac49ae8e" [[package]] name = "tempfile" -version = "3.6.0" +version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31c0432476357e58790aaa47a8efb0c5138f137343f3b5f23bd36a27e3b0a6d6" +checksum = "5486094ee78b2e5038a6382ed7645bc084dc2ec433426ca4c3cb61e2007b8998" dependencies = [ - "autocfg", "cfg-if", "fastrand", "redox_syscall 0.3.5", - "rustix 0.37.23", + "rustix 0.38.4", "windows-sys 0.48.0", ] @@ -5161,22 +5175,22 @@ checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" [[package]] name = "thiserror" -version = "1.0.41" +version = "1.0.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c16a64ba9387ef3fdae4f9c1a7f07a0997fce91985c0336f1ddc1822b3b37802" +checksum = "611040a08a0439f8248d1990b111c95baa9c704c805fa1f62104b39655fd7f90" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.41" +version = "1.0.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d14928354b01c4d6a4f0e549069adef399a284e7995c7ccca94e8a07a5346c59" +checksum = "090198534930841fab3a5d1bb637cde49e339654e606195f8d9c76eeb081dc96" dependencies = [ - "proc-macro2 1.0.63", - "quote 1.0.29", - "syn 2.0.23", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.27", ] [[package]] @@ -5202,9 +5216,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.22" +version = "0.3.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea9e1b3cf1243ae005d9e74085d4d542f3125458f3a81af210d901dcd7411efd" +checksum = "59e399c068f43a5d116fedaf73b203fa4f9c519f17e2b34f63221d3792f81446" dependencies = [ "itoa", "serde", @@ -5220,9 +5234,9 @@ checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb" [[package]] name = "time-macros" -version = "0.2.9" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "372950940a5f07bf38dbe211d7283c9e6d7327df53794992d293e534c733d09b" +checksum = "96ba15a897f3c86766b757e5ac7221554c6750054d74d5b28844fce5fb36a6c4" dependencies = [ "time-core", ] @@ -5288,9 +5302,9 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" dependencies = [ - "proc-macro2 1.0.63", - "quote 1.0.29", - "syn 2.0.23", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.27", ] [[package]] @@ -5347,9 +5361,9 @@ checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" [[package]] name = "toml_edit" -version = "0.19.11" +version = "0.19.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "266f016b7f039eec8a1a80dfe6156b633d208b9fccca5e4db1d6775b0c4e34a7" +checksum = "f8123f27e969974a3dfba720fdb560be359f57b44302d280ba72e76a74480e8a" dependencies = [ "indexmap 2.0.0", "toml_datetime", @@ -5395,9 +5409,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a6fdaae4c2c638bb70fe42803a26fbd6fc6ac8c72f5c59f67ecc2a2dcabf4b07" dependencies = [ "prettyplease", - "proc-macro2 1.0.63", + "proc-macro2 1.0.66", "prost-build", - "quote 1.0.29", + "quote 1.0.32", "syn 1.0.109", ] @@ -5470,9 +5484,9 @@ dependencies = [ [[package]] name = "tower-http" -version = "0.4.1" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8bd22a874a2d0b70452d5597b12c537331d49060824a95f49f108994f94aa4c" +checksum = "55ae70283aba8d2a8b411c695c437fe25b8b5e44e23e780662002fc72fb47a82" dependencies = [ "bitflags 2.3.3", "bytes", @@ -5517,9 +5531,9 @@ version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" dependencies = [ - "proc-macro2 1.0.63", - "quote 1.0.29", - "syn 2.0.23", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.27", ] [[package]] @@ -5571,8 +5585,8 @@ version = "0.1.0" source = "git+https://github.com/massalabs/transition.git?rev=93fa3bf82f9f5ff421c78536879b7fd1b948ca75#93fa3bf82f9f5ff421c78536879b7fd1b948ca75" dependencies = [ "darling 0.14.4", - "proc-macro2 1.0.63", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", "unsigned-varint", ] @@ -5591,9 +5605,9 @@ checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" [[package]] name = "ucd-trie" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e79c4d996edb816c91e4308506774452e55e95c3c9de07b6729e17e15a5ef81" +checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" [[package]] name = "unicode-bidi" @@ -5603,9 +5617,9 @@ checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" [[package]] name = "unicode-ident" -version = "1.0.10" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22049a19f4a68748a168c0fc439f9516686aa045927ff767eca0a85101fb6e73" +checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c" [[package]] name = "unicode-normalization" @@ -5677,9 +5691,9 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" [[package]] name = "uuid" -version = "1.4.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d023da39d1fde5a8a3fe1f3e01ca9632ada0a63e9797de55a879d6e2236277be" +checksum = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d" [[package]] name = "valuable" @@ -5755,9 +5769,9 @@ dependencies = [ "bumpalo", "log", "once_cell", - "proc-macro2 1.0.63", - "quote 1.0.29", - "syn 2.0.23", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.27", "wasm-bindgen-shared", ] @@ -5779,8 +5793,8 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c5020cfa87c7cecefef118055d44e3c1fc122c7ec25701d528ee458a0b45f38f" dependencies = [ - "proc-macro2 1.0.63", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] @@ -5802,7 +5816,7 @@ version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" dependencies = [ - "quote 1.0.29", + "quote 1.0.32", "wasm-bindgen-macro-support", ] @@ -5812,9 +5826,9 @@ version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ - "proc-macro2 1.0.63", - "quote 1.0.29", - "syn 2.0.23", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.27", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -5827,18 +5841,18 @@ checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" [[package]] name = "wasm-encoder" -version = "0.29.0" +version = "0.31.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18c41dbd92eaebf3612a39be316540b8377c871cb9bde6b064af962984912881" +checksum = "06a3d1b4a575ffb873679402b2aedb3117555eb65c27b1b86c8a91e574bc2a2a" dependencies = [ "leb128", ] [[package]] name = "wasmer" -version = "3.3.0" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78caedecd8cb71ed47ccca03b68d69414a3d278bb031e6f93f15759344efdd52" +checksum = "ea790bcdfb4e6e9d1e5ddf75b4699aac62b078fcc9f27f44e1748165ceea67bf" dependencies = [ "bytes", "cfg-if", @@ -5864,9 +5878,9 @@ dependencies = [ [[package]] name = "wasmer-compiler" -version = "3.3.0" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "726a8450541af4a57c34af7b6973fdbfc79f896cc7e733429577dfd1d1687180" +checksum = "f093937725e242e5529fed27e08ff836c011a9ecc22e6819fb818c2ac6ff5f88" dependencies = [ "backtrace", "cfg-if", @@ -5888,9 +5902,9 @@ dependencies = [ [[package]] name = "wasmer-compiler-cranelift" -version = "3.3.0" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1e5633f90f372563ebbdf3f9799c7b29ba11c90e56cf9b54017112d2e656c95" +checksum = "3b27b1670d27158789ebe14e4da3902c72132174884a1c6a3533ce4fd9dd83db" dependencies = [ "cranelift-codegen", "cranelift-entity", @@ -5907,9 +5921,9 @@ dependencies = [ [[package]] name = "wasmer-compiler-singlepass" -version = "3.3.0" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4d38957de6f452115c0af3ff08cec268ee248d665b54d4bbf7da60b7453cb97" +checksum = "02509aaab7e302fc551ff03512807514b379ba32136892e96fcaa5b62a3228de" dependencies = [ "byteorder", "dynasm", @@ -5926,21 +5940,21 @@ dependencies = [ [[package]] name = "wasmer-derive" -version = "3.3.0" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97901fdbaae383dbb90ea162cc3a76a9fa58ac39aec7948b4c0b9bbef9307738" +checksum = "13ae8286cba2acb10065a4dac129c7c7f7bcd24acd6538555d96616eea16bc27" dependencies = [ "proc-macro-error", - "proc-macro2 1.0.63", - "quote 1.0.29", + "proc-macro2 1.0.66", + "quote 1.0.32", "syn 1.0.109", ] [[package]] name = "wasmer-middlewares" -version = "3.3.0" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e028013811035111beb768074b6ccc09eabd77811b1e01fd099b5471924ca16" +checksum = "3bc8cafe02112ecf21a7fc1d090ac0f0220a86a8d4f561c24e382a4c2ee4addc" dependencies = [ "wasmer", "wasmer-types", @@ -5949,9 +5963,9 @@ dependencies = [ [[package]] name = "wasmer-object" -version = "3.3.0" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b6a25e04fdd0f2173bebfce2804ac1fba5e45827afed76876bf414e74244aae" +checksum = "bda85dbe7e313fa98a22d11045885db631871e1d17382297da401f2be7e4c957" dependencies = [ "object 0.28.4", "thiserror", @@ -5960,9 +5974,9 @@ dependencies = [ [[package]] name = "wasmer-types" -version = "3.3.0" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67f1f2839f4f61509550e4ddcd0e658e19f3af862b51c79fda15549d735d659b" +checksum = "918d2f0bb5eaa95a80c06be33f21dee92f40f12cd0982da34490d121a99d244b" dependencies = [ "bytecheck", "enum-iterator", @@ -5976,9 +5990,9 @@ dependencies = [ [[package]] name = "wasmer-vm" -version = "3.3.0" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "043118ec4f16d1714fed3aab758b502b864bd865e1d5188626c9ad290100563f" +checksum = "a1e000c2cbd4f9805427af5f3b3446574caf89ab3a1e66c2f3579fbde22b072b" dependencies = [ "backtrace", "cc", @@ -6013,9 +6027,9 @@ dependencies = [ [[package]] name = "wast" -version = "60.0.0" +version = "62.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd06cc744b536e30387e72a48fdd492105b9c938bb4f415c39c616a7a0a697ad" +checksum = "c7f7ee878019d69436895f019b65f62c33da63595d8e857cbdc87c13ecb29a32" dependencies = [ "leb128", "memchr", @@ -6025,9 +6039,9 @@ dependencies = [ [[package]] name = "wat" -version = "1.0.66" +version = "1.0.68" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5abe520f0ab205366e9ac7d3e6b2fc71de44e32a2b58f2ec871b6b575bdcea3b" +checksum = "295572bf24aa5b685a971a83ad3e8b6e684aaad8a9be24bc7bf59bed84cc1c08" dependencies = [ "wast", ] @@ -6048,7 +6062,7 @@ version = "0.23.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b03058f88386e5ff5310d9111d53f48b17d732b401aeb83a8d5190f2ac459338" dependencies = [ - "rustls-webpki", + "rustls-webpki 0.100.1", ] [[package]] @@ -6279,9 +6293,9 @@ checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" [[package]] name = "winnow" -version = "0.4.7" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca0ace3845f0d96209f0375e6d367e3eb87eb65d27d445bdc9f1843a26f39448" +checksum = "81fac9742fd1ad1bd9643b991319f72dd031016d44b77039a26977eb667141e7" dependencies = [ "memchr", ] @@ -6319,9 +6333,9 @@ version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ - "proc-macro2 1.0.63", - "quote 1.0.29", - "syn 2.0.23", + "proc-macro2 1.0.66", + "quote 1.0.32", + "syn 2.0.27", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index cb7ddec48c5..9aee78e216c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -100,7 +100,7 @@ massa_wallet = {path = "./massa-wallet"} # Massa projects dependencies massa-proto-rs = {git = "https://github.com/massalabs/massa-proto-rs", "rev" = "18ec02f"} -massa-sc-runtime = {git = "https://github.com/massalabs/massa-sc-runtime", "branch" = "main"} +massa-sc-runtime = {git = "https://github.com/massalabs/massa-sc-runtime", "branch" = "evm-update"} peernet = {git = "https://github.com/massalabs/PeerNet", "branch" = "deactivate_stream_limiter"} # Common dependencies diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index cecaf174ed3..3b61133aa2f 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -618,9 +618,9 @@ impl Interface for InterfaceImpl { /// * Expects a SECP256K1 signature in full ETH format. /// Format: (r, s, v) v will be ignored /// Length: 65 bytes - /// * Expects a public key in full ETH format. - /// Length: 65 bytes - fn verify_evm_signature( + /// * Expects a public key in raw secp256k1 format. + /// Length: 64 bytes + fn evm_signature_verify( &self, message_: &[u8], signature_: &[u8], @@ -628,13 +628,13 @@ impl Interface for InterfaceImpl { ) -> Result { // check the signature length if signature_.len() != 65 { - return Err(anyhow!("invalid signature length")); + return Err(anyhow!("invalid signature length in evm_signature_verify")); } // parse the public key let public_key = libsecp256k1::PublicKey::parse_slice( public_key_, - Some(libsecp256k1::PublicKeyFormat::Full), + Some(libsecp256k1::PublicKeyFormat::Raw), )?; // build the message @@ -656,6 +656,55 @@ impl Interface for InterfaceImpl { Ok(libsecp256k1::verify(&message, &signature, &public_key)) } + /// Get an EVM address from a raw secp256k1 public key (64 bytes). + /// Address is the last 20 bytes of the hash of the public key. + fn evm_get_address_from_pubkey(&self, public_key_: &[u8]) -> Result> { + // parse the public key + let public_key = libsecp256k1::PublicKey::parse_slice( + public_key_, + Some(libsecp256k1::PublicKeyFormat::Raw), + )?; + + // compute the hash of the public key + let hash = sha3::Keccak256::digest(public_key.serialize()); + + // ignore the first 12 bytes of the hash + let address = hash[12..].to_vec(); + + // return the address (last 20 bytes of the hash) + Ok(address) + } + + /// Get a raw secp256k1 public key from an EVM signature and the signed hash. + fn evm_get_pubkey_from_signature(&self, hash_: &[u8], signature_: &[u8]) -> Result> { + // check the signature length + if signature_.len() != 65 { + return Err(anyhow!( + "invalid signature length in evm_get_pubkey_from_signature" + )); + } + + // parse the message + let message = libsecp256k1::Message::parse_slice(&hash_).unwrap(); + + // parse the signature as being (r, s, v) use only r and s + let signature = libsecp256k1::Signature::parse_standard_slice(&signature_[..64]).unwrap(); + + // parse v as a recovery id + let recovery_id = libsecp256k1::RecoveryId::parse_rpc(signature_[64]).unwrap(); + + // recover the public key + let recovered = libsecp256k1::recover(&message, &signature, &recovery_id).unwrap(); + + // return its serialized value + Ok(recovered.serialize().to_vec()) + } + + // Return true if the address is an User address, otherwise false if it is an SC address. + fn is_address_eoa(&self, address: &str) -> Result { + unimplemented!("is_address_eoa") + } + /// Transfer coins from the current address (top of the call stack) towards a target address. /// /// # Arguments @@ -912,7 +961,8 @@ impl Interface for InterfaceImpl { fn test_evm_verify() { use hex_literal::hex; - // corresponding address is 0x807a7Bb5193eDf9898b9092c1597bB966fe52514 + // signature info + let address_ = hex!("807a7bb5193edf9898b9092c1597bb966fe52514"); let message_ = b"test"; let signature_ = hex!("d0d05c35080635b5e865006c6c4f5b5d457ec342564d8fc67ce40edc264ccdab3f2f366b5bd1e38582538fed7fa6282148e86af97970a10cb3302896f5d68ef51b"); let private_key_ = hex!("ed6602758bdd68dc9df67a6936ed69807a74b8cc89bdc18f3939149d02db17f3"); @@ -947,4 +997,12 @@ fn test_evm_verify() { // sign the message and match it with the original signature let (second_signature, _) = libsecp256k1::sign(&message, &private_key); assert_eq!(signature, second_signature); + + // check 4 + // generate the address from the public key and match it with the original address + // address is the last 20 bytes of the hash of the public key in raw format (64 bytes) + let raw_public_key = public_key.serialize(); + let hash = sha3::Keccak256::digest(&raw_public_key[1..]).to_vec(); + let generated_address = &hash[12..]; + assert_eq!(generated_address, address_); } From cb5eb6a476349bda785c33a0174d593234126600 Mon Sep 17 00:00:00 2001 From: Thomas Plisson Date: Mon, 24 Jul 2023 15:41:44 +0200 Subject: [PATCH 49/71] impl is_address_eoa --- massa-execution-worker/src/interface_impl.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index 3b61133aa2f..6adbf222693 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -701,8 +701,9 @@ impl Interface for InterfaceImpl { } // Return true if the address is an User address, otherwise false if it is an SC address. - fn is_address_eoa(&self, address: &str) -> Result { - unimplemented!("is_address_eoa") + fn is_address_eoa(&self, address_: &str) -> Result { + let address = Address::from_str(address_)?; + Ok(matches!(address, Address::User(..))) } /// Transfer coins from the current address (top of the call stack) towards a target address. From c4db6952ed9e763ff6be82fed34cd03fe94a9a1a Mon Sep 17 00:00:00 2001 From: Thomas Plisson Date: Mon, 24 Jul 2023 16:08:01 +0200 Subject: [PATCH 50/71] abi costs --- massa-execution-worker/src/interface_impl.rs | 2 +- massa-node/base_config/gas_costs/abi_gas_costs.json | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index 6adbf222693..a2ac0d5ac02 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -700,7 +700,7 @@ impl Interface for InterfaceImpl { Ok(recovered.serialize().to_vec()) } - // Return true if the address is an User address, otherwise false if it is an SC address. + // Return true if the address is a User address, false if it is an SC address. fn is_address_eoa(&self, address_: &str) -> Result { let address = Address::from_str(address_)?; Ok(matches!(address, Address::User(..))) diff --git a/massa-node/base_config/gas_costs/abi_gas_costs.json b/massa-node/base_config/gas_costs/abi_gas_costs.json index c0981634267..bbd8057ee3a 100644 --- a/massa-node/base_config/gas_costs/abi_gas_costs.json +++ b/massa-node/base_config/gas_costs/abi_gas_costs.json @@ -47,6 +47,9 @@ "assembly_script_set_data_for": 214, "assembly_script_signature_verify": 240, "assembly_script_evm_signature_verify": 662, + "assembly_script_evm_get_address_from_pubkey": 224, + "assembly_script_evm_get_pubkey_from_signature": 308, + "assembly_script_is_address_eoa": 79, "assembly_script_transfer_coins": 209, "assembly_script_transfer_coins_for": 238, "assembly_script_unsafe_random": 158, From 1229399809e27e0535bc3f2a7cacc71e47473d44 Mon Sep 17 00:00:00 2001 From: Thomas Plisson Date: Mon, 24 Jul 2023 16:08:48 +0200 Subject: [PATCH 51/71] minor change --- massa-node/base_config/gas_costs/abi_gas_costs.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/massa-node/base_config/gas_costs/abi_gas_costs.json b/massa-node/base_config/gas_costs/abi_gas_costs.json index bbd8057ee3a..f2af86adb39 100644 --- a/massa-node/base_config/gas_costs/abi_gas_costs.json +++ b/massa-node/base_config/gas_costs/abi_gas_costs.json @@ -49,7 +49,7 @@ "assembly_script_evm_signature_verify": 662, "assembly_script_evm_get_address_from_pubkey": 224, "assembly_script_evm_get_pubkey_from_signature": 308, - "assembly_script_is_address_eoa": 79, + "assembly_script_is_address_eoa": 90, "assembly_script_transfer_coins": 209, "assembly_script_transfer_coins_for": 238, "assembly_script_unsafe_random": 158, From 8748748f33edd14a88f325cf33d18a84e935acb8 Mon Sep 17 00:00:00 2001 From: Thomas Plisson Date: Mon, 24 Jul 2023 16:23:49 +0200 Subject: [PATCH 52/71] fix a clippy warning --- massa-execution-worker/src/interface_impl.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index a2ac0d5ac02..139bc1df4a0 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -685,7 +685,7 @@ impl Interface for InterfaceImpl { } // parse the message - let message = libsecp256k1::Message::parse_slice(&hash_).unwrap(); + let message = libsecp256k1::Message::parse_slice(hash_).unwrap(); // parse the signature as being (r, s, v) use only r and s let signature = libsecp256k1::Signature::parse_standard_slice(&signature_[..64]).unwrap(); From b70b4ce3895481b66ce518170259bb48f754011d Mon Sep 17 00:00:00 2001 From: Modship Date: Tue, 25 Jul 2023 10:26:24 +0200 Subject: [PATCH 53/71] Protocol improvements (#4243) Co-authored-by: Litchi Pi --- Cargo.lock | 9 +- Cargo.toml | 2 +- massa-node/base_config/config.toml | 8 +- massa-node/src/main.rs | 3 + massa-node/src/settings.rs | 6 ++ massa-node/src/tests/config.toml | 1 + massa-protocol-exports/src/error.rs | 2 + massa-protocol-exports/src/settings.rs | 6 ++ .../src/test_exports/config.rs | 3 + massa-protocol-worker/src/connectivity.rs | 83 ++++++++++++------- .../src/handlers/endorsement_handler/mod.rs | 11 +-- .../endorsement_handler/propagation.rs | 4 - .../handlers/operation_handler/propagation.rs | 6 ++ .../handlers/operation_handler/retrieval.rs | 6 ++ .../src/handlers/peer_handler/mod.rs | 11 +-- .../src/handlers/peer_handler/models.rs | 22 +++-- .../src/handlers/peer_handler/tester.rs | 81 +++++++++++++----- .../src/tests/mock_network.rs | 4 + massa-protocol-worker/src/wrap_network.rs | 9 +- 19 files changed, 196 insertions(+), 81 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ea27d383800..4630215c29f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3796,7 +3796,7 @@ checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" [[package]] name = "peernet" version = "0.1.0" -source = "git+https://github.com/massalabs/PeerNet?branch=deactivate_stream_limiter#df6149648cf994e1ff587a95cdaa486868932533" +source = "git+https://github.com/massalabs/PeerNet?branch=main#a4faaf7f27eb668f0fd32b9f5fe0f665633cace9" dependencies = [ "crossbeam", "enum_delegate", @@ -3806,6 +3806,7 @@ dependencies = [ "quiche", "rand 0.8.5", "serde", + "stream_limiter", "thiserror", ] @@ -4999,6 +5000,12 @@ version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e08d8363704e6c71fc928674353e6b7c23dcea9d82d7012c8faf2a3a025f8d0" +[[package]] +name = "stream_limiter" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89acdb288f530d175bbeb1344aab11cfa0e0ee5b4e63d75c204001568dcf90cf" + [[package]] name = "strsim" version = "0.8.0" diff --git a/Cargo.toml b/Cargo.toml index cb7ddec48c5..870ab66a910 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -101,7 +101,7 @@ massa_wallet = {path = "./massa-wallet"} # Massa projects dependencies massa-proto-rs = {git = "https://github.com/massalabs/massa-proto-rs", "rev" = "18ec02f"} massa-sc-runtime = {git = "https://github.com/massalabs/massa-sc-runtime", "branch" = "main"} -peernet = {git = "https://github.com/massalabs/PeerNet", "branch" = "deactivate_stream_limiter"} +peernet = {git = "https://github.com/massalabs/PeerNet", "branch" = "main"} # Common dependencies transition = {git = "https://github.com/massalabs/transition.git", "rev" = "93fa3bf82f9f5ff421c78536879b7fd1b948ca75"} diff --git a/massa-node/base_config/config.toml b/massa-node/base_config/config.toml index e431f539dc9..3807527edf2 100644 --- a/massa-node/base_config/config.toml +++ b/massa-node/base_config/config.toml @@ -162,6 +162,8 @@ read_write_limit_bytes_per_second = 2_000_000_000 # timeout after which without answer a hanshake is ended message_timeout = 5000 + # timeout after which a peer tester will consider the peer unreachable + tester_timeout = 500 # timeout after whick we consider a node does not have the block we asked for ask_block_timeout = 10000 # Max known blocks we keep during their propagation @@ -202,7 +204,9 @@ # max number of operation per message, same as network param but can be smaller max_operations_per_message = 5000 # Number of millis seconds between each try out connections - try_connection_timer = 5000 + try_connection_timer = 250 + # Number of millis seconds between each try out connections for same peer + try_connection_timer_same_peer = 10000 # Number of millis seconds that create a timeout for out connections timeout_connection = 1000 # max number of operations kept for propagation @@ -215,6 +219,8 @@ thread_tester_count = 25 # Nb max in connections that we accept max_in_connections = 100 + # Cooldown before testing again old peer + test_oldest_peer_cooldown = 720000 # Peer default category limits default_category_info = { target_out_connections = 10, max_in_connections_per_ip = 2, max_in_connections = 15, allow_local_peers = false } # Peer categories limits diff --git a/massa-node/src/main.rs b/massa-node/src/main.rs index 9affb7e6db0..a4d0273bbef 100644 --- a/massa-node/src/main.rs +++ b/massa-node/src/main.rs @@ -636,6 +636,7 @@ async fn launch( max_in_connections: SETTINGS.protocol.max_in_connections, timeout_connection: SETTINGS.protocol.timeout_connection, message_timeout: SETTINGS.protocol.message_timeout, + tester_timeout: SETTINGS.protocol.tester_timeout, routable_ip: SETTINGS .protocol .routable_ip @@ -644,6 +645,8 @@ async fn launch( peers_categories: SETTINGS.protocol.peers_categories.clone(), default_category_info: SETTINGS.protocol.default_category_info, version: *VERSION, + try_connection_timer_same_peer: SETTINGS.protocol.try_connection_timer_same_peer, + test_oldest_peer_cooldown: SETTINGS.protocol.test_oldest_peer_cooldown, }; let (protocol_controller, protocol_channels) = diff --git a/massa-node/src/settings.rs b/massa-node/src/settings.rs index f1c41704849..a35edb73128 100644 --- a/massa-node/src/settings.rs +++ b/massa-node/src/settings.rs @@ -249,16 +249,22 @@ pub struct ProtocolSettings { pub read_write_limit_bytes_per_second: u64, /// try connection timer pub try_connection_timer: MassaTime, + /// try connection timer for the same peer + pub try_connection_timer_same_peer: MassaTime, /// Timeout connection pub timeout_connection: MassaTime, /// Message timeout pub message_timeout: MassaTime, + /// Timeout for the tester operations + pub tester_timeout: MassaTime, /// Nb in connections pub max_in_connections: usize, /// Peers limits per category pub peers_categories: HashMap, /// Limits for default category pub default_category_info: PeerCategoryInfo, + /// Cooldown before testing again an old peer + pub test_oldest_peer_cooldown: MassaTime, } /// gRPC settings diff --git a/massa-node/src/tests/config.toml b/massa-node/src/tests/config.toml index ee1cd6485dd..a48116877d6 100644 --- a/massa-node/src/tests/config.toml +++ b/massa-node/src/tests/config.toml @@ -25,6 +25,7 @@ [protocol] message_timeout = 5000 + tester_timeout = 500 ask_block_timeout = 10000 max_known_blocks_size = 1024 max_node_known_blocks_size = 1024 diff --git a/massa-protocol-exports/src/error.rs b/massa-protocol-exports/src/error.rs index 54618de8586..bf4bb7f166c 100644 --- a/massa-protocol-exports/src/error.rs +++ b/massa-protocol-exports/src/error.rs @@ -39,6 +39,8 @@ pub enum ProtocolError { ModelsError(#[from] ModelsError), /// Send error: {0} SendError(String), + /// Peer disconnected : {0} + PeerDisconnected(String), /// Container inconsistency error: {0} ContainerInconsistencyError(String), /// Invalid operation error: {0} diff --git a/massa-protocol-exports/src/settings.rs b/massa-protocol-exports/src/settings.rs index 1a92893ed97..4d475c51e3f 100644 --- a/massa-protocol-exports/src/settings.rs +++ b/massa-protocol-exports/src/settings.rs @@ -146,12 +146,16 @@ pub struct ProtocolConfig { pub last_start_period: u64, /// try connection timer pub try_connection_timer: MassaTime, + /// try connection timer same peer + pub try_connection_timer_same_peer: MassaTime, /// Max in connections pub max_in_connections: usize, /// Timeout connection pub timeout_connection: MassaTime, /// Timeout message pub message_timeout: MassaTime, + /// Timeout for the tester operations + pub tester_timeout: MassaTime, /// Number of bytes per second that can be read/write in a connection (should be a 10 multiplier) pub read_write_limit_bytes_per_second: u128, /// Optional routable ip @@ -164,4 +168,6 @@ pub struct ProtocolConfig { pub default_category_info: PeerCategoryInfo, /// Version pub version: Version, + /// Cooldown before testing again an old peer + pub test_oldest_peer_cooldown: MassaTime, } diff --git a/massa-protocol-exports/src/test_exports/config.rs b/massa-protocol-exports/src/test_exports/config.rs index b58b32b541b..7cd9f9d5837 100644 --- a/massa-protocol-exports/src/test_exports/config.rs +++ b/massa-protocol-exports/src/test_exports/config.rs @@ -74,6 +74,7 @@ impl Default for ProtocolConfig { max_size_listeners_per_peer: 100, max_size_peers_announcement: 100, message_timeout: MassaTime::from_millis(10000), + tester_timeout: MassaTime::from_millis(500), last_start_period: 0, read_write_limit_bytes_per_second: 1024 * 1000, timeout_connection: MassaTime::from_millis(1000), @@ -89,6 +90,8 @@ impl Default for ProtocolConfig { max_in_connections_per_ip: 0, }, version: "TEST.23.2".parse().unwrap(), + try_connection_timer_same_peer: MassaTime::from_millis(1000), + test_oldest_peer_cooldown: MassaTime::from_millis(720000), } } } diff --git a/massa-protocol-worker/src/connectivity.rs b/massa-protocol-worker/src/connectivity.rs index a41c225f73f..eaf4deda835 100644 --- a/massa-protocol-worker/src/connectivity.rs +++ b/massa-protocol-worker/src/connectivity.rs @@ -8,6 +8,7 @@ use massa_pool_exports::PoolController; use massa_pos_exports::SelectorController; use massa_protocol_exports::{PeerCategoryInfo, PeerId, ProtocolConfig, ProtocolError}; use massa_storage::Storage; +use massa_time::MassaTime; use massa_versioning::versioning::MipStore; use parking_lot::RwLock; use peernet::peer::PeerConnectionType; @@ -243,6 +244,7 @@ pub(crate) fn start_connectivity_thread( recv(tick_try_connect) -> _ => { let active_conn = network_controller.get_active_connections(); let peers_connected = active_conn.get_peers_connected(); + let peers_connection_queue = active_conn.get_peer_ids_connection_queue(); let mut slots_per_category: Vec<(String, usize)> = peer_categories.iter().map(|(category, category_infos)| { (category.clone(), category_infos.1.target_out_connections.saturating_sub(peers_connected.iter().filter(|(_, peer)| { if peer.1 == PeerConnectionType::OUT && let Some(peer_category) = &peer.2 { @@ -262,6 +264,7 @@ pub(crate) fn start_connectivity_thread( if peers_connected.contains_key(peer_id) { continue; } + if let Some(peer_info) = peer_db_read.peers.get(peer_id).and_then(|peer| { if peer.state == PeerState::Trusted { Some(peer.clone()) @@ -269,47 +272,69 @@ pub(crate) fn start_connectivity_thread( None } }) { - if peer_info.last_announce.listeners.is_empty() { - continue; - } - //TODO: Adapt for multiple listeners - let (addr, _) = peer_info.last_announce.listeners.iter().next().unwrap(); - let canonical_ip = addr.ip().to_canonical(); - let mut allowed_local_ips = false; - // Check if the peer is in a category and we didn't reached out target yet - let mut category_found = None; - for (name, (ips, cat)) in &peer_categories { - if ips.contains(&canonical_ip) { - category_found = Some(name); - allowed_local_ips = cat.allow_local_peers; + if let Some(last_announce) = peer_info.last_announce { + if last_announce.listeners.is_empty() { + continue; + } + + //TODO: Adapt for multiple listeners + let (addr, _) = last_announce.listeners.iter().next().unwrap(); + if peers_connection_queue.contains(addr) { + continue; } - } - if !canonical_ip.is_global() && !allowed_local_ips { - continue; - } - if let Some(category) = category_found { - for (name, category_infos) in &mut slots_per_category { - if name == category && category_infos > &mut 0 { - addresses_to_connect.push(*addr); - *category_infos -= 1; + // check if the peer last connect attempt has not been too recent + if let Some(last_try) = peer_db_read.try_connect_history.get(addr) { + let last_try = last_try.estimate_instant().expect("Time went backward"); + if last_try.elapsed() < config.try_connection_timer_same_peer.to_duration() { + continue; } } - } else if slot_default_category > 0 { - addresses_to_connect.push(*addr); - slot_default_category -= 1; - } + if config.listeners.iter().any(|(local_addr, _transport)| addr == local_addr) { + continue; + } - // IF all slots are filled, stop - if slot_default_category == 0 && slots_per_category.iter().all(|(_, slots)| *slots == 0) { - break; + let canonical_ip = addr.ip().to_canonical(); + let mut allowed_local_ips = false; + // Check if the peer is in a category and we didn't reached out target yet + let mut category_found = None; + for (name, (ips, cat)) in &peer_categories { + if ips.contains(&canonical_ip) { + category_found = Some(name); + allowed_local_ips = cat.allow_local_peers; + } + } + if !canonical_ip.is_global() && !allowed_local_ips { + continue; + } + + if let Some(category) = category_found { + for (name, category_infos) in &mut slots_per_category { + if name == category && category_infos > &mut 0 { + addresses_to_connect.push(*addr); + *category_infos -= 1; + } + } + } else if slot_default_category > 0 && !addresses_to_connect.contains(addr) { + addresses_to_connect.push(*addr); + slot_default_category -= 1; + } + + // IF all slots are filled, stop + if slot_default_category == 0 && slots_per_category.iter().all(|(_, slots)| *slots == 0) { + break; + } } } } } + for addr in addresses_to_connect { info!("Trying to connect to addr {}", addr); + + peer_db.write().try_connect_history.insert(addr, MassaTime::now().unwrap()); + // We only manage TCP for now if let Err(err) = network_controller.try_connect(addr, config.timeout_connection.to_duration()) { warn!("Failed to connect to peer {:?}: {:?}", addr, err); diff --git a/massa-protocol-worker/src/handlers/endorsement_handler/mod.rs b/massa-protocol-worker/src/handlers/endorsement_handler/mod.rs index ff0361180b9..b334e1804e6 100644 --- a/massa-protocol-worker/src/handlers/endorsement_handler/mod.rs +++ b/massa-protocol-worker/src/handlers/endorsement_handler/mod.rs @@ -65,16 +65,11 @@ impl EndorsementHandler { pool_controller, config.clone(), storage.clone_without_refs(), - massa_metrics.clone(), - ); - - let endorsement_propagation_thread = start_propagation_thread( - local_receiver, - cache, - config, - active_connections, massa_metrics, ); + + let endorsement_propagation_thread = + start_propagation_thread(local_receiver, cache, config, active_connections); Self { endorsement_retrieval_thread: Some(( sender_retrieval_ext, diff --git a/massa-protocol-worker/src/handlers/endorsement_handler/propagation.rs b/massa-protocol-worker/src/handlers/endorsement_handler/propagation.rs index b12bb70d876..15eb040b719 100644 --- a/massa-protocol-worker/src/handlers/endorsement_handler/propagation.rs +++ b/massa-protocol-worker/src/handlers/endorsement_handler/propagation.rs @@ -4,7 +4,6 @@ use super::{ }; use crate::{messages::MessagesSerializer, wrap_network::ActiveConnectionsTrait}; use massa_channel::receiver::MassaReceiver; -use massa_metrics::MassaMetrics; use massa_protocol_exports::ProtocolConfig; use massa_storage::Storage; use std::thread::JoinHandle; @@ -17,7 +16,6 @@ struct PropagationThread { cache: SharedEndorsementCache, active_connections: Box, endorsement_serializer: MessagesSerializer, - _metrics: MassaMetrics, } impl PropagationThread { @@ -142,7 +140,6 @@ pub fn start_propagation_thread( cache: SharedEndorsementCache, config: ProtocolConfig, active_connections: Box, - metrics: MassaMetrics, ) -> JoinHandle<()> { std::thread::Builder::new() .name("protocol-endorsement-handler-propagation".to_string()) @@ -155,7 +152,6 @@ pub fn start_propagation_thread( active_connections, cache, endorsement_serializer, - _metrics: metrics, }; propagation_thread.run(); }) diff --git a/massa-protocol-worker/src/handlers/operation_handler/propagation.rs b/massa-protocol-worker/src/handlers/operation_handler/propagation.rs index 5a8acae2f93..b0ca650494e 100644 --- a/massa-protocol-worker/src/handlers/operation_handler/propagation.rs +++ b/massa-protocol-worker/src/handlers/operation_handler/propagation.rs @@ -10,6 +10,7 @@ use massa_models::prehash::CapacityAllocator; use massa_models::prehash::PreHashSet; use massa_protocol_exports::PeerId; use massa_protocol_exports::ProtocolConfig; +use massa_protocol_exports::ProtocolError; use massa_storage::Storage; use tracing::{debug, info, log::warn}; @@ -183,6 +184,11 @@ impl PropagationThread { "Failed to send OperationsAnnouncement message to peer: {}", err ); + + if let ProtocolError::PeerDisconnected(_) = err { + // cache of this peer is removed in next call of cache_write.update_cache + break; + } } } } diff --git a/massa-protocol-worker/src/handlers/operation_handler/retrieval.rs b/massa-protocol-worker/src/handlers/operation_handler/retrieval.rs index 4cbb44db0fc..be0ec546800 100644 --- a/massa-protocol-worker/src/handlers/operation_handler/retrieval.rs +++ b/massa-protocol-worker/src/handlers/operation_handler/retrieval.rs @@ -280,6 +280,9 @@ impl RetrievalThread { false, ) { warn!("Failed to send AskForOperations message to peer: {}", err); + if let ProtocolError::PeerDisconnected(_) = err { + break; + } } } } @@ -340,6 +343,9 @@ impl RetrievalThread { false, ) { warn!("Failed to send Operations message to peer: {}", err); + if let ProtocolError::PeerDisconnected(_) = err { + break; + } } } Ok(()) diff --git a/massa-protocol-worker/src/handlers/peer_handler/mod.rs b/massa-protocol-worker/src/handlers/peer_handler/mod.rs index ecb09491a37..10c234ac71f 100644 --- a/massa-protocol-worker/src/handlers/peer_handler/mod.rs +++ b/massa-protocol-worker/src/handlers/peer_handler/mod.rs @@ -107,6 +107,7 @@ impl PeerManagementHandler { max_peers_per_announcement: config.max_size_peers_announcement, max_listeners_per_peer: config.max_size_listeners_per_peer, }); + move || { loop { select! { @@ -197,9 +198,9 @@ impl PeerManagementHandler { match message { PeerManagementMessage::NewPeerConnected((peer_id, listeners)) => { debug!("Received peer message: NewPeerConnected from {}", peer_id); - if let Err(e) = test_sender.try_send((peer_id, listeners)) { - debug!("error when sending msg to peer tester : {}", e); - } + if let Err(e) = test_sender.try_send((peer_id, listeners)) { + debug!("error when sending msg to peer connect : {}", e); + } } PeerManagementMessage::ListPeers(peers) => { debug!("Received peer message: List peers from {}", peer_id); @@ -512,11 +513,11 @@ impl InitConnectionHandler for MassaHandshake .peers .entry(peer_id.clone()) .and_modify(|info| { - info.last_announce = announcement.clone(); + info.last_announce = Some(announcement.clone()); info.state = PeerState::Trusted; }) .or_insert(PeerInfo { - last_announce: announcement.clone(), + last_announce: Some(announcement.clone()), state: PeerState::Trusted, }); } diff --git a/massa-protocol-worker/src/handlers/peer_handler/models.rs b/massa-protocol-worker/src/handlers/peer_handler/models.rs index 094cc509af6..b970f4d60e9 100644 --- a/massa-protocol-worker/src/handlers/peer_handler/models.rs +++ b/massa-protocol-worker/src/handlers/peer_handler/models.rs @@ -23,6 +23,8 @@ pub struct PeerDB { pub index_by_newest: BTreeSet<(Reverse, PeerId)>, /// Tested addresses used to avoid testing the same address too often. //TODO: Need to be pruned pub tested_addresses: HashMap, + /// history of try connection to peers + pub try_connect_history: HashMap, } pub type SharedPeerDB = Arc>; @@ -31,7 +33,7 @@ pub type PeerMessageTuple = (PeerId, Vec); #[derive(Clone, Debug)] pub struct PeerInfo { - pub last_announce: Announcement, + pub last_announce: Option, pub state: PeerState, } @@ -121,15 +123,17 @@ impl PeerDB { } if let Some(peer) = self.peers.get(&key) { // skip old peers - if peer.last_announce.timestamp < min_time { - continue; + if let Some(last_announce) = &peer.last_announce { + if last_announce.timestamp < min_time { + continue; + } + let listeners: HashMap = + last_announce.listeners.clone().into_iter().collect(); + if listeners.is_empty() { + continue; + } + result.push((key, listeners)); } - let listeners: HashMap = - peer.last_announce.listeners.clone().into_iter().collect(); - if listeners.is_empty() { - continue; - } - result.push((key, listeners)); } } diff --git a/massa-protocol-worker/src/handlers/peer_handler/tester.rs b/massa-protocol-worker/src/handlers/peer_handler/tester.rs index 7f8b9c96640..33f623eb9bb 100644 --- a/massa-protocol-worker/src/handlers/peer_handler/tester.rs +++ b/massa-protocol-worker/src/handlers/peer_handler/tester.rs @@ -1,17 +1,19 @@ use std::{ - collections::HashMap, + collections::{HashMap, HashSet}, io::Read, net::{IpAddr, SocketAddr}, + sync::Arc, thread::JoinHandle, time::Duration, }; use crate::messages::MessagesHandler; use massa_channel::{receiver::MassaReceiver, sender::MassaSender, MassaChannel}; -use massa_models::version::{Version, VersionDeserializer}; +use massa_models::version::VersionDeserializer; use massa_protocol_exports::{PeerConnectionType, PeerId, PeerIdDeserializer, ProtocolConfig}; use massa_serialization::{DeserializeError, Deserializer}; use massa_time::MassaTime; +use parking_lot::RwLock; use peernet::{ error::{PeerNetError, PeerNetResult}, messages::MessagesHandler as PeerNetMessagesHandler, @@ -54,6 +56,9 @@ impl Tester { Some(config.max_size_channel_commands_peer_testers), ); + // Peers currently tested by one of the thread + let peers_in_test = Arc::new(RwLock::new(HashSet::new())); + for _ in 0..config.thread_tester_count { testers.push(Tester::new( peer_db.clone(), @@ -63,6 +68,7 @@ impl Tester { messages_handler.clone(), target_out_connections.clone(), default_target_out_connections, + peers_in_test.clone(), )); } @@ -76,13 +82,19 @@ impl Tester { version_deserializer: VersionDeserializer, peer_id_deserializer: PeerIdDeserializer, addr: SocketAddr, - our_version: Version, + config: &ProtocolConfig, ) -> PeerNetResult { + let our_version = config.version; let result = { let mut socket = - std::net::TcpStream::connect_timeout(&addr, Duration::from_millis(500)) + std::net::TcpStream::connect_timeout(&addr, config.tester_timeout.into()) .map_err(|e| PeerNetError::PeerConnectionError.new("connect", e, None))?; - + socket + .set_read_timeout(Some(config.tester_timeout.into())) + .map_err(|err| PeerNetError::PeerConnectionError.new("read timeout", err, None))?; + socket + .set_write_timeout(Some(config.tester_timeout.into())) + .map_err(|err| PeerNetError::PeerConnectionError.new("write timeout", err, None))?; // data.receive() from Endpoint let mut len_bytes = vec![0u8; 4]; socket @@ -187,13 +199,17 @@ impl Tester { .peers .entry(peer_id.clone()) .and_modify(|info| { - if info.last_announce.timestamp < announcement.timestamp { - info.last_announce = announcement.clone(); + if let Some(last_announce) = &info.last_announce { + if last_announce.timestamp < announcement.timestamp { + info.last_announce = Some(announcement.clone()); + } + } else { + info.last_announce = Some(announcement.clone()); } info.state = super::PeerState::Trusted; }) .or_insert(PeerInfo { - last_announce: announcement, + last_announce: Some(announcement), state: super::PeerState::Trusted, }); } @@ -221,9 +237,16 @@ impl Tester { // if handshake failed, we set the peer state to HandshakeFailed if res.is_err() { let mut peer_db_write = peer_db.write(); - peer_db_write.peers.entry(peer_id).and_modify(|info| { - info.state = super::PeerState::HandshakeFailed; - }); + peer_db_write + .peers + .entry(peer_id) + .and_modify(|info| { + info.state = super::PeerState::HandshakeFailed; + }) + .or_insert(PeerInfo { + last_announce: None, + state: super::PeerState::HandshakeFailed, + }); } if let Err(e) = socket.shutdown(std::net::Shutdown::Both) { tracing::log::error!("Failed to shutdown socket: {}", e); @@ -235,6 +258,7 @@ impl Tester { } /// Create a new tester (spawn a thread) + #[allow(clippy::too_many_arguments)] pub fn new( peer_db: SharedPeerDB, active_connections: Box, @@ -243,6 +267,7 @@ impl Tester { messages_handler: MessagesHandler, target_out_connections: HashMap, usize)>, default_target_out_connections: usize, + peers_in_test: Arc>>, ) -> Self { tracing::log::debug!("running new tester"); @@ -261,7 +286,7 @@ impl Tester { //let mut network_manager = PeerNetManager::new(config); let protocol_config = protocol_config.clone(); - loop { + 'main_loop: loop { crossbeam::select! { recv(receiver) -> res => { receiver.update_metrics(); @@ -303,6 +328,14 @@ impl Tester { let db = db.clone(); // receive new listener to test for (addr, _) in listener.1.iter() { + if peers_in_test.read().contains(addr) { + continue; + } + { + let mut peers_in_test = peers_in_test.write(); + peers_in_test.insert(*addr); + } + //Find category of that address let ip_canonical = addr.ip().to_canonical(); let cooldown = 'cooldown: { @@ -327,6 +360,7 @@ impl Tester { if let Some(last_tested_time) = db_write.tested_addresses.get(addr) { let last_tested_time = last_tested_time.estimate_instant().expect("Time went backward"); if last_tested_time.elapsed() < cooldown { + peers_in_test.write().remove(addr); continue; } } @@ -336,15 +370,16 @@ impl Tester { // Maybe we need to have a way to still update his last announce timestamp because he is a great peer if !active_connections.get_peers_connected().iter().any(|(_, (addr, _, _))| addr.ip().to_canonical() == ip_canonical) { //Don't test our local addresses - for (local_addr, _transport) in protocol_config.listeners.iter() { - if addr == local_addr { - continue; - } + if protocol_config.listeners.iter().any(|(local_addr, _transport)| addr == local_addr) { + peers_in_test.write().remove(addr); + continue 'main_loop; } + //Don't test our proper ip if let Some(ip) = protocol_config.routable_ip { if ip.to_canonical() == ip_canonical { - continue; + peers_in_test.write().remove(addr); + continue 'main_loop; } } info!("testing peer {} listener addr: {}", &listener.0, &addr); @@ -357,9 +392,11 @@ impl Tester { VersionDeserializer::new(), PeerIdDeserializer::new(), *addr, - protocol_config.version, + &protocol_config, ); + peers_in_test.write().remove(addr); + // let _res = network_manager.try_connect( // *addr, // protocol_config.timeout_connection.to_duration(), @@ -377,10 +414,11 @@ impl Tester { default(Duration::from_secs(2)) => { // If no message in 2 seconds they will test a peer that hasn't been tested for long time - let Some(listener) = db.read().get_oldest_peer(Duration::from_secs(60 * 60 * 2)) else { + let Some(listener) = db.read().get_oldest_peer(protocol_config.test_oldest_peer_cooldown.into()) else { continue; }; + peers_in_test.write().insert(listener); { let mut db = db.write(); db.tested_addresses.insert(listener, MassaTime::now().unwrap()); @@ -395,12 +433,14 @@ impl Tester { //Don't test our local addresses for (local_addr, _transport) in protocol_config.listeners.iter() { if listener == *local_addr { + peers_in_test.write().remove(&listener); continue; } } //Don't test our proper ip if let Some(ip) = protocol_config.routable_ip { if ip.to_canonical() == ip_canonical { + peers_in_test.write().remove(&listener); continue; } } @@ -413,13 +453,14 @@ impl Tester { VersionDeserializer::new(), PeerIdDeserializer::new(), listener, - protocol_config.version, + &protocol_config, ); // let res = network_manager.try_connect( // listener, // protocol_config.timeout_connection.to_duration(), // &OutConnectionConfig::Tcp(Box::new(TcpOutConnectionConfig::new(protocol_config.read_write_limit_bytes_per_second / 10, Duration::from_millis(100)))), // ); + peers_in_test.write().remove(&listener); tracing::log::debug!("{:?}", res); } } diff --git a/massa-protocol-worker/src/tests/mock_network.rs b/massa-protocol-worker/src/tests/mock_network.rs index 87d62fdd20f..82876eba608 100644 --- a/massa-protocol-worker/src/tests/mock_network.rs +++ b/massa-protocol-worker/src/tests/mock_network.rs @@ -98,6 +98,10 @@ impl ActiveConnectionsTrait for SharedMockActiveConnections { fn get_peers_connections_bandwidth(&self) -> HashMap { HashMap::new() } + + fn get_peer_ids_connection_queue(&self) -> HashSet { + HashSet::new() + } } pub struct MockNetworkController { diff --git a/massa-protocol-worker/src/wrap_network.rs b/massa-protocol-worker/src/wrap_network.rs index 311d49a5cd2..108902e8089 100644 --- a/massa-protocol-worker/src/wrap_network.rs +++ b/massa-protocol-worker/src/wrap_network.rs @@ -29,6 +29,7 @@ pub trait ActiveConnectionsTrait: Send + Sync { fn get_peers_connected( &self, ) -> HashMap)>; + fn get_peer_ids_connection_queue(&self) -> HashSet; fn get_nb_out_connections(&self) -> usize; fn get_nb_in_connections(&self) -> usize; fn shutdown_connection(&mut self, peer_id: &PeerId); @@ -55,9 +56,7 @@ impl ActiveConnectionsTrait for SharedActiveConnections { .try_send(message_serializer, message, high_priority) .map_err(|err| ProtocolError::SendError(err.to_string())) } else { - Err(ProtocolError::SendError( - "Peer isn't connected anymore".to_string(), - )) + Err(ProtocolError::PeerDisconnected(peer_id.to_string())) } } @@ -109,6 +108,10 @@ impl ActiveConnectionsTrait for SharedActiveConnections { } map } + + fn get_peer_ids_connection_queue(&self) -> HashSet { + self.read().connection_queue.clone() + } } pub trait NetworkController: Send + Sync { From d798412a06d622862832023e6b3e539a012da428 Mon Sep 17 00:00:00 2001 From: Thomas Plisson Date: Tue, 25 Jul 2023 10:56:40 +0200 Subject: [PATCH 54/71] Update runtime target branch --- Cargo.lock | 2 +- Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a5eaf24f38f..741a9716b95 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2497,7 +2497,7 @@ dependencies = [ [[package]] name = "massa-sc-runtime" version = "0.10.0" -source = "git+https://github.com/massalabs/massa-sc-runtime?branch=evm-update#d702a0a6d5a4d57f6858b4704df1e55fad56aac8" +source = "git+https://github.com/massalabs/massa-sc-runtime?branch=main#9fb15c5c519b47312dc1567b9fc91c3e095cdef1" dependencies = [ "anyhow", "as-ffi-bindings", diff --git a/Cargo.toml b/Cargo.toml index 9aee78e216c..cb7ddec48c5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -100,7 +100,7 @@ massa_wallet = {path = "./massa-wallet"} # Massa projects dependencies massa-proto-rs = {git = "https://github.com/massalabs/massa-proto-rs", "rev" = "18ec02f"} -massa-sc-runtime = {git = "https://github.com/massalabs/massa-sc-runtime", "branch" = "evm-update"} +massa-sc-runtime = {git = "https://github.com/massalabs/massa-sc-runtime", "branch" = "main"} peernet = {git = "https://github.com/massalabs/PeerNet", "branch" = "deactivate_stream_limiter"} # Common dependencies From 812374c90597ac0fe57d168a0b47814a97f539f5 Mon Sep 17 00:00:00 2001 From: Leo-Besancon Date: Tue, 25 Jul 2023 14:22:23 +0200 Subject: [PATCH 55/71] Remove local struct hashes (#4276) * Remove cycle_info local hash * Remove async_messages local hash /!\ We probably need to update the protos too! * Remove deferred_credits local hash * fmt fix --- massa-async-pool/src/changes.rs | 4 +- massa-async-pool/src/mapping_grpc.rs | 16 +- massa-async-pool/src/message.rs | 60 ++------ .../src/test_exports/bootstrap.rs | 2 +- massa-bootstrap/src/tests/tools.rs | 4 +- massa-execution-worker/src/active_history.rs | 20 ++- massa-execution-worker/src/interface_impl.rs | 2 +- .../src/speculative_roll_state.rs | 8 +- .../src/tests/scenarios_mandatories.rs | 2 +- .../src/tests/tests_active_history.rs | 2 +- massa-final-state/src/state_changes.rs | 4 +- massa-final-state/src/tests/scenarios.rs | 2 +- massa-pos-exports/src/cycle_info.rs | 105 +------------ massa-pos-exports/src/deferred_credits.rs | 145 ++---------------- massa-pos-exports/src/pos_changes.rs | 3 +- massa-pos-exports/src/pos_final_state.rs | 51 ++---- 16 files changed, 74 insertions(+), 356 deletions(-) diff --git a/massa-async-pool/src/changes.rs b/massa-async-pool/src/changes.rs index 7fc837585b0..4441ed63224 100644 --- a/massa-async-pool/src/changes.rs +++ b/massa-async-pool/src/changes.rs @@ -95,7 +95,7 @@ impl Serializer for AsyncPoolChangesSerializer { /// use massa_async_pool::{AsyncMessage, AsyncPoolChanges, AsyncPoolChangesSerializer}; /// use massa_ledger_exports::SetUpdateOrDelete; /// - /// let message = AsyncMessage::new_with_hash( + /// let message = AsyncMessage::new( /// Slot::new(1, 0), /// 0, /// Address::from_str("AU12dG5xP1RDEB5ocdHkymNVvvSJmUL9BgHwCksDowqmGWxfpm93x").unwrap(), @@ -190,7 +190,7 @@ impl Deserializer for AsyncPoolChangesDeserializer { /// use massa_async_pool::{AsyncMessage, AsyncMessageTrigger, AsyncPoolChanges, AsyncPoolChangesSerializer, AsyncPoolChangesDeserializer}; /// use massa_ledger_exports::SetUpdateOrDelete; /// - /// let message = AsyncMessage::new_with_hash( + /// let message = AsyncMessage::new( /// Slot::new(1, 0), /// 0, /// Address::from_str("AU12dG5xP1RDEB5ocdHkymNVvvSJmUL9BgHwCksDowqmGWxfpm93x").unwrap(), diff --git a/massa-async-pool/src/mapping_grpc.rs b/massa-async-pool/src/mapping_grpc.rs index d0adda0e0e8..7ed29cf2361 100644 --- a/massa-async-pool/src/mapping_grpc.rs +++ b/massa-async-pool/src/mapping_grpc.rs @@ -20,7 +20,7 @@ impl From for grpc_model::AsyncMessage { data: value.data, trigger: value.trigger.map(|trigger| trigger.into()), can_be_executed: value.can_be_executed, - hash: value.hash.to_string(), + hash: "".to_string(), } } } @@ -158,16 +158,10 @@ impl From for grpc_model::AsyncMessageUpdate { value: None, }), }, - hash: match value.hash { - SetOrKeep::Set(value) => Some(grpc_model::SetOrKeepString { - r#type: grpc_model::AsyncPoolChangeType::Set as i32, - value: Some(value.to_string()), - }), - SetOrKeep::Keep => Some(grpc_model::SetOrKeepString { - r#type: grpc_model::AsyncPoolChangeType::Delete as i32, - value: None, - }), - }, + hash: Some(grpc_model::SetOrKeepString { + r#type: grpc_model::AsyncPoolChangeType::Delete as i32, + value: None, + }), } } } diff --git a/massa-async-pool/src/message.rs b/massa-async-pool/src/message.rs index b2dbfca4274..f16ea120f6a 100644 --- a/massa-async-pool/src/message.rs +++ b/massa-async-pool/src/message.rs @@ -2,7 +2,6 @@ //! This file defines the structure representing an asynchronous message -use massa_hash::{Hash, HashDeserializer, HashSerializer}; use massa_ledger_exports::{Applicable, SetOrKeep, SetOrKeepDeserializer, SetOrKeepSerializer}; use massa_models::address::{AddressDeserializer, AddressSerializer}; use massa_models::amount::{AmountDeserializer, AmountSerializer}; @@ -61,7 +60,7 @@ impl Serializer for AsyncMessageIdSerializer { /// use std::str::FromStr; /// use massa_async_pool::{AsyncMessage, AsyncMessageId, AsyncMessageIdSerializer}; /// - /// let message = AsyncMessage::new_with_hash( + /// let message = AsyncMessage::new( /// Slot::new(1, 0), /// 0, /// Address::from_str("AU12dG5xP1RDEB5ocdHkymNVvvSJmUL9BgHwCksDowqmGWxfpm93x").unwrap(), @@ -121,7 +120,7 @@ impl Deserializer for AsyncMessageIdDeserializer { /// use std::str::FromStr; /// use massa_async_pool::{AsyncMessage, AsyncMessageId, AsyncMessageIdSerializer, AsyncMessageIdDeserializer}; /// - /// let message = AsyncMessage::new_with_hash( + /// let message = AsyncMessage::new( /// Slot::new(1, 0), /// 0, /// Address::from_str("AU12dG5xP1RDEB5ocdHkymNVvvSJmUL9BgHwCksDowqmGWxfpm93x").unwrap(), @@ -305,9 +304,6 @@ pub struct AsyncMessage { /// Boolean that determine if the message can be executed. For messages without filter this boolean is always true. /// For messages with filter, this boolean is true if the filter has been matched between `validity_start` and current slot. pub can_be_executed: bool, - - /// Hash of the message - pub hash: Hash, } impl Default for AsyncMessage { @@ -321,7 +317,6 @@ impl Default for AsyncMessage { destination: genesis_address, validity_start: slot_zero, validity_end: slot_zero, - hash: Hash::zero(), ..Default::default() } } @@ -329,8 +324,8 @@ impl Default for AsyncMessage { impl AsyncMessage { #[allow(clippy::too_many_arguments)] - /// Take an `AsyncMessage` and return it with its hash computed - pub fn new_with_hash( + /// Take an `AsyncMessage` and return it + pub fn new( emission_slot: Slot, emission_index: u64, sender: Address, @@ -345,9 +340,7 @@ impl AsyncMessage { trigger: Option, can_be_executed: Option, ) -> Self { - let async_message_ser = AsyncMessageSerializer::new(can_be_executed.is_some()); - let mut buffer = Vec::new(); - let mut message = AsyncMessage { + AsyncMessage { emission_slot, emission_index, sender, @@ -361,14 +354,7 @@ impl AsyncMessage { data, can_be_executed: can_be_executed.unwrap_or(trigger.is_none()), trigger, - // placeholder hash to serialize the message, replaced below - hash: Hash::zero(), - }; - async_message_ser - .serialize(&message, &mut buffer) - .expect("critical: asynchronous message serialization should never fail here"); - message.hash = Hash::compute_from(&buffer); - message + } } /// Compute the ID of the message for use when choosing which operations to keep in priority (highest score) on pool overflow. @@ -380,16 +366,6 @@ impl AsyncMessage { self.emission_index, ) } - - /// Recompute the hash of the message. Must be used each time we modify one field - pub fn compute_hash(&mut self, for_db: bool) { - let async_message_ser = AsyncMessageSerializer::new(for_db); - let mut buffer = Vec::new(); - async_message_ser.serialize(self, &mut buffer).expect( - "critical: asynchronous message serialization should never fail in recompute hash", - ); - self.hash = Hash::compute_from(&buffer); - } } #[derive(Clone)] @@ -433,7 +409,7 @@ impl Serializer for AsyncMessageSerializer { /// use massa_serialization::Serializer; /// use std::str::FromStr; /// - /// let message = AsyncMessage::new_with_hash( + /// let message = AsyncMessage::new( /// Slot::new(1, 0), /// 0, /// Address::from_str("AU12dG5xP1RDEB5ocdHkymNVvvSJmUL9BgHwCksDowqmGWxfpm93x").unwrap(), @@ -548,7 +524,7 @@ impl Deserializer for AsyncMessageDeserializer { /// use massa_serialization::{Serializer, Deserializer, DeserializeError}; /// use std::str::FromStr; /// - /// let message = AsyncMessage::new_with_hash( + /// let message = AsyncMessage::new( /// Slot::new(1, 0), /// 0, /// Address::from_str("AU12dG5xP1RDEB5ocdHkymNVvvSJmUL9BgHwCksDowqmGWxfpm93x").unwrap(), @@ -574,7 +550,6 @@ impl Deserializer for AsyncMessageDeserializer { /// let (rest, message_deserialized) = message_deserializer.deserialize::(&serialized).unwrap(); /// assert!(rest.is_empty()); /// assert_eq!(message, message_deserialized); - /// assert_eq!(message.hash, message_deserialized.hash); /// ``` fn deserialize<'a, E: ParseError<&'a [u8]> + ContextError<&'a [u8]>>( &self, @@ -659,7 +634,7 @@ impl Deserializer for AsyncMessageDeserializer { filter, can_be_executed, )| { - AsyncMessage::new_with_hash( + AsyncMessage::new( emission_slot, emission_index, sender, @@ -751,9 +726,6 @@ pub struct AsyncMessageUpdate { /// Boolean that determine if the message can be executed. For messages without filter this boolean is always true. /// For messages with filter, this boolean is true if the filter has been matched between `validity_start` and current slot. pub can_be_executed: SetOrKeep, - - /// Hash of the message - pub hash: SetOrKeep, } /// Serializer for `AsyncMessageUpdate` @@ -769,7 +741,6 @@ pub struct AsyncMessageUpdateSerializer { >, bool_serializer: SetOrKeepSerializer, regular_bool_serializer: BoolSerializer, - hash_serializer: SetOrKeepSerializer, for_db: bool, } @@ -787,7 +758,6 @@ impl AsyncMessageUpdateSerializer { )), bool_serializer: SetOrKeepSerializer::new(BoolSerializer::new()), regular_bool_serializer: BoolSerializer::new(), - hash_serializer: SetOrKeepSerializer::new(HashSerializer::new()), for_db, } } @@ -843,7 +813,6 @@ impl Serializer for AsyncMessageUpdateSerializer { self.bool_serializer .serialize(&value.can_be_executed, buffer)?; } - self.hash_serializer.serialize(&value.hash, buffer)?; Ok(()) } } @@ -862,7 +831,6 @@ pub struct AsyncMessageUpdateDeserializer { >, bool_deserializer: SetOrKeepDeserializer, regular_bool_deserializer: BoolDeserializer, - hash_deserializer: SetOrKeepDeserializer, for_db: bool, } @@ -901,7 +869,6 @@ impl AsyncMessageUpdateDeserializer { )), bool_deserializer: SetOrKeepDeserializer::new(BoolDeserializer::new()), regular_bool_deserializer: BoolDeserializer::new(), - hash_deserializer: SetOrKeepDeserializer::new(HashDeserializer::new()), for_db, } } @@ -978,9 +945,6 @@ impl Deserializer for AsyncMessageUpdateDeserializer { Ok((input, SetOrKeep::Keep)) } }), - context("Failed hash deserialization", |input| { - self.hash_deserializer.deserialize(input) - }), )), ) .map( @@ -998,7 +962,6 @@ impl Deserializer for AsyncMessageUpdateDeserializer { data, trigger, can_be_executed, - hash, )| { AsyncMessageUpdate { emission_slot, @@ -1014,7 +977,6 @@ impl Deserializer for AsyncMessageUpdateDeserializer { data, trigger, can_be_executed, - hash, } }, ) @@ -1038,7 +1000,6 @@ impl Applicable for AsyncMessageUpdate { self.data.apply(update.data); self.trigger.apply(update.trigger); self.can_be_executed.apply(update.can_be_executed); - self.hash.apply(update.hash); } } @@ -1058,7 +1019,6 @@ impl Applicable for AsyncMessage { update.data.apply_to(&mut self.data); update.trigger.apply_to(&mut self.trigger); update.can_be_executed.apply_to(&mut self.can_be_executed); - update.hash.apply_to(&mut self.hash); } } @@ -1090,7 +1050,7 @@ mod tests { #[test] fn bad_serialization_version() { - let message = AsyncMessage::new_with_hash( + let message = AsyncMessage::new( Slot::new(1, 2), 0, Address::from_str("AU12dG5xP1RDEB5ocdHkymNVvvSJmUL9BgHwCksDowqmGWxfpm93x").unwrap(), diff --git a/massa-async-pool/src/test_exports/bootstrap.rs b/massa-async-pool/src/test_exports/bootstrap.rs index 10da175e3d6..0d2dcf123a4 100644 --- a/massa-async-pool/src/test_exports/bootstrap.rs +++ b/massa-async-pool/src/test_exports/bootstrap.rs @@ -24,7 +24,7 @@ fn get_random_address() -> Address { pub fn get_random_message(fee: Option, thread_count: u8) -> AsyncMessage { let mut rng = rand::thread_rng(); - AsyncMessage::new_with_hash( + AsyncMessage::new( Slot::new(rng.gen_range(0..100_000), rng.gen_range(0..thread_count)), 0, get_random_address(), diff --git a/massa-bootstrap/src/tests/tools.rs b/massa-bootstrap/src/tests/tools.rs index 21546aa263c..8430ddb3702 100644 --- a/massa-bootstrap/src/tests/tools.rs +++ b/massa-bootstrap/src/tests/tools.rs @@ -142,7 +142,7 @@ fn get_random_pos_cycles_info( /// generates random PoS deferred credits fn get_random_deferred_credits(r_limit: u64) -> DeferredCredits { - let mut deferred_credits = DeferredCredits::new_with_hash(); + let mut deferred_credits = DeferredCredits::new(); for i in 0u64..r_limit { let mut credits = PreHashMap::default(); @@ -163,7 +163,7 @@ fn get_random_deferred_credits(r_limit: u64) -> DeferredCredits { /// generates a random PoS final state fn get_random_pos_state(r_limit: u64, mut pos: PoSFinalState) -> PoSFinalState { let (roll_counts, production_stats, _rng_seed) = get_random_pos_cycles_info(r_limit); - let mut deferred_credits = DeferredCredits::new_with_hash(); + let mut deferred_credits = DeferredCredits::new(); deferred_credits.extend(get_random_deferred_credits(r_limit)); // Do not add seed_bits to changes, as we create the initial cycle just after diff --git a/massa-execution-worker/src/active_history.rs b/massa-execution-worker/src/active_history.rs index e24a40c92ba..22bac9efc99 100644 --- a/massa-execution-worker/src/active_history.rs +++ b/massa-execution-worker/src/active_history.rs @@ -199,17 +199,15 @@ impl ActiveHistory { /// Gets all the deferred credits that will be credited until a given slot (included) pub fn get_all_deferred_credits_until(&self, slot: &Slot) -> DeferredCredits { - self.0 - .iter() - .fold(DeferredCredits::new_without_hash(), |mut acc, e| { - acc.extend( - e.state_changes - .pos_changes - .deferred_credits - .get_slot_range(..=slot, false), - ); - acc - }) + self.0.iter().fold(DeferredCredits::new(), |mut acc, e| { + acc.extend( + e.state_changes + .pos_changes + .deferred_credits + .get_slot_range(..=slot), + ); + acc + }) } /// Gets the deferred credits for a given address that will be credited at a given slot diff --git a/massa-execution-worker/src/interface_impl.rs b/massa-execution-worker/src/interface_impl.rs index 139bc1df4a0..a484e9c6fcb 100644 --- a/massa-execution-worker/src/interface_impl.rs +++ b/massa-execution-worker/src/interface_impl.rs @@ -865,7 +865,7 @@ impl Interface for InterfaceImpl { execution_context.transfer_coins(Some(sender), None, coins, true)?; let fee = Amount::from_raw(raw_fee); execution_context.transfer_coins(Some(sender), None, fee, true)?; - execution_context.push_new_message(AsyncMessage::new_with_hash( + execution_context.push_new_message(AsyncMessage::new( emission_slot, emission_index, sender, diff --git a/massa-execution-worker/src/speculative_roll_state.rs b/massa-execution-worker/src/speculative_roll_state.rs index f754ebff6c4..7e6e054c569 100644 --- a/massa-execution-worker/src/speculative_roll_state.rs +++ b/massa-execution-worker/src/speculative_roll_state.rs @@ -282,7 +282,7 @@ impl SpeculativeRollState { } } if !target_credits.is_empty() { - let mut credits = DeferredCredits::new_with_hash(); + let mut credits = DeferredCredits::new(); credits.credits.insert(target_slot, target_credits); self.added_changes.deferred_credits.extend(credits); } @@ -570,11 +570,7 @@ impl SpeculativeRollState { ); // added deferred credits - credits.extend( - self.added_changes - .deferred_credits - .get_slot_range(..=slot, false), - ); + credits.extend(self.added_changes.deferred_credits.get_slot_range(..=slot)); // filter out zeros credits.remove_zeros(); diff --git a/massa-execution-worker/src/tests/scenarios_mandatories.rs b/massa-execution-worker/src/tests/scenarios_mandatories.rs index 30d41f5eeca..61536fb3d67 100644 --- a/massa-execution-worker/src/tests/scenarios_mandatories.rs +++ b/massa-execution-worker/src/tests/scenarios_mandatories.rs @@ -3140,7 +3140,7 @@ mod tests { ))); for i in 1..10 { - let message = AsyncMessage::new_with_hash( + let message = AsyncMessage::new( Slot::new(0, 0), 0, address, diff --git a/massa-execution-worker/src/tests/tests_active_history.rs b/massa-execution-worker/src/tests/tests_active_history.rs index 2f383c60bcc..543eac8d8f4 100644 --- a/massa-execution-worker/src/tests/tests_active_history.rs +++ b/massa-execution-worker/src/tests/tests_active_history.rs @@ -38,7 +38,7 @@ mod tests { ph2.insert(addr1, amount_a1_s2); ph2.insert(addr2, amount_a2_s2); - let mut credits = DeferredCredits::new_without_hash(); + let mut credits = DeferredCredits::new(); credits.credits = BTreeMap::from([(slot1, ph1), (slot2, ph2)]); let exec_output_1 = ExecutionOutput { diff --git a/massa-final-state/src/state_changes.rs b/massa-final-state/src/state_changes.rs index fa23b214af3..ecba8f9f3f4 100644 --- a/massa-final-state/src/state_changes.rs +++ b/massa-final-state/src/state_changes.rs @@ -83,7 +83,7 @@ impl Serializer for StateChangesSerializer { /// use massa_async_pool::{AsyncMessage, AsyncPoolChanges}; /// /// let mut state_changes = StateChanges::default(); - /// let message = AsyncMessage::new_with_hash( + /// let message = AsyncMessage::new( /// Slot::new(1, 0), /// 0, /// Address::from_str("AU12dG5xP1RDEB5ocdHkymNVvvSJmUL9BgHwCksDowqmGWxfpm93x").unwrap(), @@ -213,7 +213,7 @@ impl Deserializer for StateChangesDeserializer { /// use massa_async_pool::{AsyncMessage, AsyncPoolChanges}; /// /// let mut state_changes = StateChanges::default(); - /// let message = AsyncMessage::new_with_hash( + /// let message = AsyncMessage::new( /// Slot::new(1, 0), /// 0, /// Address::from_str("AU12dG5xP1RDEB5ocdHkymNVvvSJmUL9BgHwCksDowqmGWxfpm93x").unwrap(), diff --git a/massa-final-state/src/tests/scenarios.rs b/massa-final-state/src/tests/scenarios.rs index 115e1291cbd..47cbd635d5d 100644 --- a/massa-final-state/src/tests/scenarios.rs +++ b/massa-final-state/src/tests/scenarios.rs @@ -167,7 +167,7 @@ fn test_final_state() { let slot = Slot::new(1, 0); let mut state_changes = StateChanges::default(); - let message = AsyncMessage::new_with_hash( + let message = AsyncMessage::new( Slot::new(1, 0), 0, Address::from_str("AU12dG5xP1RDEB5ocdHkymNVvvSJmUL9BgHwCksDowqmGWxfpm93x").unwrap(), diff --git a/massa-pos-exports/src/cycle_info.rs b/massa-pos-exports/src/cycle_info.rs index 775b2df7d80..3ecb4ddfd06 100644 --- a/massa-pos-exports/src/cycle_info.rs +++ b/massa-pos-exports/src/cycle_info.rs @@ -1,5 +1,5 @@ use bitvec::vec::BitVec; -use massa_hash::{Hash, HashXof, HashXofDeserializer, HashXofSerializer, HASH_XOF_SIZE_BYTES}; +use massa_hash::{HashXof, HashXofDeserializer, HashXofSerializer, HASH_XOF_SIZE_BYTES}; use massa_models::{ address::{Address, AddressDeserializer, AddressSerializer}, prehash::PreHashMap, @@ -23,73 +23,6 @@ use serde::{Deserialize, Serialize}; use std::collections::{BTreeMap, VecDeque}; use std::ops::Bound::Included; -const CYCLE_INFO_HASH_INITIAL_BYTES: &[u8; HASH_XOF_SIZE_BYTES] = &[0; HASH_XOF_SIZE_BYTES]; - -struct CycleInfoHashComputer { - u64_ser: U64VarIntSerializer, - address_ser: AddressSerializer, - bitvec_ser: BitVecSerializer, -} - -impl CycleInfoHashComputer { - fn new() -> Self { - Self { - u64_ser: U64VarIntSerializer::new(), - address_ser: AddressSerializer::new(), - bitvec_ser: BitVecSerializer::new(), - } - } - - fn compute_cycle_hash(&self, cycle: u64) -> Hash { - // serialization can never fail in the following computations, unwrap is justified - let mut buffer = Vec::new(); - self.u64_ser.serialize(&cycle, &mut buffer).unwrap(); - Hash::compute_from(&buffer) - } - - fn compute_complete_hash(&self, complete: bool) -> Hash { - let mut buffer = Vec::new(); - self.u64_ser - .serialize(&(complete as u64), &mut buffer) - .unwrap(); - Hash::compute_from(&buffer) - } - - fn compute_seed_hash(&self, seed: &BitVec) -> Hash { - let mut buffer = Vec::new(); - self.bitvec_ser.serialize(seed, &mut buffer).unwrap(); - Hash::compute_from(&buffer) - } - - // TODO: Remove hash from cycle and deferred credits as it's not saved in the DB. - fn compute_roll_entry_hash( - &self, - address: &Address, - roll_count: u64, - ) -> HashXof { - let mut buffer = Vec::new(); - self.address_ser.serialize(address, &mut buffer).unwrap(); - self.u64_ser.serialize(&roll_count, &mut buffer).unwrap(); - HashXof::compute_from(&buffer) - } - - fn compute_prod_stats_entry_hash( - &self, - address: &Address, - prod_stats: &ProductionStats, - ) -> HashXof { - let mut buffer = Vec::new(); - self.address_ser.serialize(address, &mut buffer).unwrap(); - self.u64_ser - .serialize(&prod_stats.block_success_count, &mut buffer) - .unwrap(); - self.u64_ser - .serialize(&prod_stats.block_failure_count, &mut buffer) - .unwrap(); - HashXof::compute_from(&buffer) - } -} - /// State of a cycle for all threads #[derive(Debug, Clone, PartialEq, Eq)] pub struct CycleInfo { @@ -103,47 +36,20 @@ pub struct CycleInfo { pub rng_seed: BitVec, /// Per-address production statistics pub production_stats: PreHashMap, - /// Hash of the roll counts - pub roll_counts_hash: HashXof, - /// Hash of the production statistics - pub production_stats_hash: HashXof, - /// Hash of the cycle state - pub cycle_global_hash: HashXof, /// Snapshot of the final state hash /// Used for PoS selections pub final_state_hash_snapshot: Option>, } impl CycleInfo { - /// Create a new `CycleInfo` and compute its hash - pub fn new_with_hash( + /// Create a new `CycleInfo` + pub fn new( cycle: u64, complete: bool, roll_counts: BTreeMap, rng_seed: BitVec, production_stats: PreHashMap, ) -> Self { - let hash_computer = CycleInfoHashComputer::new(); - let mut roll_counts_hash = HashXof::from_bytes(CYCLE_INFO_HASH_INITIAL_BYTES); - let mut production_stats_hash = HashXof::from_bytes(CYCLE_INFO_HASH_INITIAL_BYTES); - - // compute the cycle hash - let mut hash_concat: Vec = Vec::new(); - hash_concat.extend(hash_computer.compute_cycle_hash(cycle).to_bytes()); - hash_concat.extend(hash_computer.compute_complete_hash(complete).to_bytes()); - hash_concat.extend(hash_computer.compute_seed_hash(&rng_seed).to_bytes()); - for (addr, &count) in &roll_counts { - roll_counts_hash ^= hash_computer.compute_roll_entry_hash(addr, count); - } - hash_concat.extend(roll_counts_hash.to_bytes()); - for (addr, prod_stats) in &production_stats { - production_stats_hash ^= hash_computer.compute_prod_stats_entry_hash(addr, prod_stats); - } - hash_concat.extend(production_stats_hash.to_bytes()); - - // compute the global hash - let cycle_global_hash = HashXof::compute_from(&hash_concat); - // create the new cycle CycleInfo { cycle, @@ -151,9 +57,6 @@ impl CycleInfo { roll_counts, rng_seed, production_stats, - roll_counts_hash, - production_stats_hash, - cycle_global_hash, final_state_hash_snapshot: None, } } @@ -277,7 +180,7 @@ impl Deserializer for CycleInfoDeserializer { PreHashMap, // production_stats (address, n_success, n_fail) Option>, // final_state_hash_snapshot )| { - let mut cycle = CycleInfo::new_with_hash( + let mut cycle = CycleInfo::new( cycle, complete, roll_counts.into_iter().collect(), diff --git a/massa-pos-exports/src/deferred_credits.rs b/massa-pos-exports/src/deferred_credits.rs index d65aca54c2b..fb55b85c16c 100644 --- a/massa-pos-exports/src/deferred_credits.rs +++ b/massa-pos-exports/src/deferred_credits.rs @@ -1,4 +1,3 @@ -use massa_hash::{HashXof, HASH_XOF_SIZE_BYTES}; use massa_models::{ address::{Address, AddressDeserializer, AddressSerializer}, amount::{Amount, AmountDeserializer, AmountSerializer}, @@ -21,16 +20,11 @@ use std::{ ops::Bound::{Excluded, Included}, }; -const DEFERRED_CREDITS_HASH_INITIAL_BYTES: &[u8; HASH_XOF_SIZE_BYTES] = &[0; HASH_XOF_SIZE_BYTES]; - #[derive(Clone, Serialize, Deserialize)] /// Structure containing all the PoS deferred credits information pub struct DeferredCredits { /// Deferred credits pub credits: BTreeMap>, - /// Hash tracker, optional. Indeed, computing the hash is expensive, so we only compute it when finalizing a slot. - #[serde(skip_serializing, skip_deserializing)] - hash_tracker: Option, } impl Debug for DeferredCredits { @@ -39,48 +33,9 @@ impl Debug for DeferredCredits { } } -#[derive(Clone)] -struct DeferredCreditsHashTracker { - slot_ser: SlotSerializer, - address_ser: AddressSerializer, - amount_ser: AmountSerializer, - hash: HashXof, -} - -impl DeferredCreditsHashTracker { - /// Initialize hash tracker - fn new() -> Self { - Self { - slot_ser: SlotSerializer::new(), - address_ser: AddressSerializer::new(), - amount_ser: AmountSerializer::new(), - hash: HashXof::from_bytes(DEFERRED_CREDITS_HASH_INITIAL_BYTES), - } - } - - /// Get resulting hash from the tracker - pub fn get_hash(&self) -> &HashXof { - &self.hash - } - - /// Apply adding an element (must not be an overwrite) or deleting an element (must exist) - pub fn toggle_entry(&mut self, slot: &Slot, address: &Address, amount: &Amount) { - self.hash ^= self.compute_hash(slot, address, amount); - } - - /// Compute the hash for a specific entry - fn compute_hash( - &self, - slot: &Slot, - address: &Address, - amount: &Amount, - ) -> HashXof { - // serialization can never fail in the following computations, unwrap is justified - let mut buffer = Vec::new(); - self.slot_ser.serialize(slot, &mut buffer).unwrap(); - self.address_ser.serialize(address, &mut buffer).unwrap(); - self.amount_ser.serialize(amount, &mut buffer).unwrap(); - HashXof::compute_from(&buffer) +impl Default for DeferredCredits { + fn default() -> Self { + Self::new() } } @@ -91,26 +46,12 @@ impl DeferredCredits { } /// Create a new DeferredCredits with hash tracking - pub fn new_with_hash() -> Self { - Self { - credits: Default::default(), - hash_tracker: Some(DeferredCreditsHashTracker::new()), - } - } - - /// Create a new DeferredCredits without hash tracking - pub fn new_without_hash() -> Self { + pub fn new() -> Self { Self { credits: Default::default(), - hash_tracker: None, } } - /// Get hash from tracker, if any - pub fn get_hash(&self) -> Option<&HashXof> { - self.hash_tracker.as_ref().map(|ht| ht.get_hash()) - } - /// Apply a function to each element pub fn for_each(&mut self, mut f: F) where @@ -123,43 +64,18 @@ impl DeferredCredits { } } - /// Enables the hash tracker (and compute the hash if absent) - pub fn enable_hash_tracker_and_compute_hash(&mut self) -> &HashXof { - if self.hash_tracker.is_none() { - let mut hash_tracker = DeferredCreditsHashTracker::new(); - self.for_each(|slot, address, amount| { - hash_tracker.toggle_entry(slot, address, amount); - }); - self.hash_tracker = Some(hash_tracker); - } - - self.hash_tracker.as_ref().unwrap().get_hash() - } - - /// Disable the hash tracker, loses hash - pub fn disable_hash_tracker(&mut self) { - self.hash_tracker = None; - } - /// Get all deferred credits within a slot range. - /// If `with_hash == true` then the resulting DeferredCredits contains the hash of the included data. - /// Note that computing the hash is heavy and should be done only at finalization. - pub fn get_slot_range(&self, range: R, with_hash: bool) -> DeferredCredits + pub fn get_slot_range(&self, range: R) -> DeferredCredits where R: RangeBounds, { - let mut res = DeferredCredits { + DeferredCredits { credits: self .credits .range(range) .map(|(s, map)| (*s, map.clone())) .collect(), - hash_tracker: None, - }; - if with_hash { - res.enable_hash_tracker_and_compute_hash(); } - res } /// Extends the current `DeferredCredits` with another and replace the amounts for existing addresses @@ -178,22 +94,13 @@ impl DeferredCredits { // We need to destructure self to be able to mutate both credits and the hash_tracker during iteration // Without it, the borrow-checker is angry that we try to mutate self twice. let Self { - ref mut credits, - ref mut hash_tracker, - .. + ref mut credits, .. } = self; for (slot, credits) in credits { - credits.retain(|address, amount| { - // if amount is zero XOR the credit hash and do not retain - if amount.is_zero() { - if let Some(ht) = hash_tracker.as_mut() { - ht.toggle_entry(slot, address, amount) - } - false - } else { - true - } + credits.retain(|_, amount| { + // do not retain if amount is zero + !amount.is_zero() }); if credits.is_empty() { empty_slots.push(*slot); @@ -214,19 +121,10 @@ impl DeferredCredits { /// Insert an element pub fn insert(&mut self, slot: Slot, address: Address, amount: Amount) -> Option { - let prev = self - .credits + self.credits .entry(slot) .or_default() - .insert(address, amount); - if let Some(ht) = &mut self.hash_tracker { - if let Some(prev) = prev { - // remove overwritten value - ht.toggle_entry(&slot, &address, &prev); - } - ht.toggle_entry(&slot, &address, &amount); - } - prev + .insert(address, amount) } } @@ -283,16 +181,11 @@ pub struct DeferredCreditsDeserializer { pub u64_deserializer: U64VarIntDeserializer, pub slot_deserializer: SlotDeserializer, pub credit_deserializer: CreditsDeserializer, - enable_hash: bool, } impl DeferredCreditsDeserializer { /// Creates a new `DeferredCredits` deserializer - pub fn new( - thread_count: u8, - max_credits_length: u64, - enable_hash: bool, - ) -> DeferredCreditsDeserializer { + pub fn new(thread_count: u8, max_credits_length: u64) -> DeferredCreditsDeserializer { DeferredCreditsDeserializer { u64_deserializer: U64VarIntDeserializer::new( Included(u64::MIN), @@ -303,7 +196,6 @@ impl DeferredCreditsDeserializer { (Included(0), Excluded(thread_count)), ), credit_deserializer: CreditsDeserializer::new(max_credits_length), - enable_hash, } } } @@ -329,15 +221,8 @@ impl Deserializer for DeferredCreditsDeserializer { )), ), ) - .map(|elements| { - let mut res = DeferredCredits { - credits: elements.into_iter().collect(), - hash_tracker: None, - }; - if self.enable_hash { - res.enable_hash_tracker_and_compute_hash(); - } - res + .map(|elements| DeferredCredits { + credits: elements.into_iter().collect(), }) .parse(buffer) } diff --git a/massa-pos-exports/src/pos_changes.rs b/massa-pos-exports/src/pos_changes.rs index cef64272d94..061c8cea843 100644 --- a/massa-pos-exports/src/pos_changes.rs +++ b/massa-pos-exports/src/pos_changes.rs @@ -39,7 +39,7 @@ impl Default for PoSChanges { seed_bits: Default::default(), roll_changes: Default::default(), production_stats: Default::default(), - deferred_credits: DeferredCredits::new_with_hash(), + deferred_credits: DeferredCredits::new(), } } } @@ -153,7 +153,6 @@ impl PoSChangesDeserializer { deferred_credits_deserializer: DeferredCreditsDeserializer::new( thread_count, max_credits_length, - true, ), } } diff --git a/massa-pos-exports/src/pos_final_state.rs b/massa-pos-exports/src/pos_final_state.rs index d4dec2541a6..2d43c686a4f 100644 --- a/massa-pos-exports/src/pos_final_state.rs +++ b/massa-pos-exports/src/pos_final_state.rs @@ -179,7 +179,7 @@ impl PoSFinalState { let initial_seeds = vec![Hash::compute_from(init_seed.to_bytes()), init_seed]; let deferred_credits_deserializer = - DeferredCreditsDeserializer::new(config.thread_count, config.max_credit_length, true); + DeferredCreditsDeserializer::new(config.thread_count, config.max_credit_length); let cycle_info_deserializer = CycleHistoryDeserializer::new( config.cycle_history_length as u64, config.max_rolls_length, @@ -243,7 +243,7 @@ impl PoSFinalState { rng_seed.extend(vec![false; self.config.thread_count as usize]); self.put_new_cycle_info( - &CycleInfo::new_with_hash( + &CycleInfo::new( 0, false, self.initial_rolls.clone(), @@ -289,7 +289,7 @@ impl PoSFinalState { last_slot.is_last_of_cycle(self.config.periods_per_cycle, self.config.thread_count); self.put_new_cycle_info( - &CycleInfo::new_with_hash( + &CycleInfo::new( cycle, complete, last_cycle_info.roll_counts.clone(), @@ -408,7 +408,7 @@ impl PoSFinalState { let roll_counts = self.get_all_roll_counts(info.0); self.put_new_cycle_info( - &CycleInfo::new_with_hash( + &CycleInfo::new( cycle, false, roll_counts, @@ -658,7 +658,7 @@ impl PoSFinalState { { let db = self.db.read(); - let mut deferred_credits = DeferredCredits::new_without_hash(); + let mut deferred_credits = DeferredCredits::new(); let mut start_key_buffer = Vec::new(); start_key_buffer.extend_from_slice(DEFERRED_CREDITS_PREFIX.as_bytes()); @@ -1021,7 +1021,7 @@ impl PoSFinalState { .unwrap_or(PreHashMap::default()); let mut cycle_info = - CycleInfo::new_with_hash(cycle, complete, roll_counts, rng_seed, production_stats); + CycleInfo::new(cycle, complete, roll_counts, rng_seed, production_stats); cycle_info.final_state_hash_snapshot = final_state_hash_snapshot; Some(cycle_info) } @@ -1504,7 +1504,7 @@ impl PoSFinalState { pub fn get_deferred_credits(&self) -> DeferredCredits { let db = self.db.read(); - let mut deferred_credits = DeferredCredits::new_with_hash(); + let mut deferred_credits = DeferredCredits::new(); for (serialized_key, serialized_value) in db.prefix_iterator_cf(STATE_CF, DEFERRED_CREDITS_PREFIX.as_bytes()) @@ -1585,11 +1585,8 @@ mod tests { let init_seed = Hash::compute_from(b""); let initial_seeds = vec![Hash::compute_from(init_seed.to_bytes()), init_seed]; - let deferred_credits_deserializer = DeferredCreditsDeserializer::new( - pos_config.thread_count, - pos_config.max_credit_length, - true, - ); + let deferred_credits_deserializer = + DeferredCreditsDeserializer::new(pos_config.thread_count, pos_config.max_credit_length); let cycle_info_deserializer = CycleHistoryDeserializer::new( pos_config.cycle_history_length as u64, pos_config.max_rolls_length, @@ -1613,7 +1610,7 @@ mod tests { // Populate the disk with some cycle infos let mut cycle_infos = Vec::new(); for cycle in 509..516 { - cycle_infos.push(CycleInfo::new_with_hash( + cycle_infos.push(CycleInfo::new( cycle, Default::default(), Default::default(), @@ -1700,11 +1697,8 @@ mod tests { let init_seed = Hash::compute_from(b""); let initial_seeds = vec![Hash::compute_from(init_seed.to_bytes()), init_seed]; - let deferred_credits_deserializer = DeferredCreditsDeserializer::new( - pos_config.thread_count, - pos_config.max_credit_length, - true, - ); + let deferred_credits_deserializer = + DeferredCreditsDeserializer::new(pos_config.thread_count, pos_config.max_credit_length); let cycle_info_deserializer = CycleHistoryDeserializer::new( pos_config.cycle_history_length as u64, pos_config.max_rolls_length, @@ -1748,7 +1742,7 @@ mod tests { seed_bits: bitvec![u8, Lsb0; 0, 1], roll_changes: roll_changes.clone(), production_stats: production_stats.clone(), - deferred_credits: DeferredCredits::new_with_hash(), + deferred_credits: DeferredCredits::new(), }; let mut batch = DBBatch::new(); @@ -1773,7 +1767,7 @@ mod tests { seed_bits: bitvec![u8, Lsb0; 1, 0], roll_changes: roll_changes.clone(), production_stats: production_stats.clone(), - deferred_credits: DeferredCredits::new_with_hash(), + deferred_credits: DeferredCredits::new(), }; let mut batch = DBBatch::new(); @@ -1799,7 +1793,7 @@ mod tests { seed_bits: bitvec![u8, Lsb0; 0, 1], roll_changes, production_stats, - deferred_credits: DeferredCredits::new_with_hash(), + deferred_credits: DeferredCredits::new(), }; let mut batch = DBBatch::new(); @@ -1825,7 +1819,7 @@ mod tests { }, ); - let cycle_info_b = CycleInfo::new_with_hash( + let cycle_info_b = CycleInfo::new( 0, false, BTreeMap::default(), @@ -1833,17 +1827,6 @@ mod tests { prod_stats, ); - assert_eq!( - cycle_info_a.roll_counts_hash, cycle_info_b.roll_counts_hash, - "roll_counts_hash mismatch" - ); - assert_eq!( - cycle_info_a.production_stats_hash, cycle_info_b.production_stats_hash, - "production_stats_hash mismatch" - ); - assert_eq!( - cycle_info_a.cycle_global_hash, cycle_info_b.cycle_global_hash, - "global_hash mismatch" - ); + assert_eq!(cycle_info_a, cycle_info_b, "cycle_info mismatch"); } } From 7e7e296e6b7e563578ee161402b0510f229c0b67 Mon Sep 17 00:00:00 2001 From: Leo-Besancon Date: Wed, 26 Jul 2023 09:29:42 +0200 Subject: [PATCH 56/71] Protocol: Add periodic unban of every peers (#4277) * Add unban_everyone timer in config * removed unused last_unban field * Added unban protocol test * Added comments * Update unban behaviour * Changed Unbanned peer status from Trusted to HandshakeFailed --- massa-node/base_config/config.toml | 2 + massa-node/src/main.rs | 1 + massa-node/src/settings.rs | 2 + massa-protocol-exports/src/settings.rs | 2 + .../src/test_exports/config.rs | 3 ++ massa-protocol-worker/src/connectivity.rs | 14 +++++-- .../src/handlers/peer_handler/models.rs | 7 ++-- .../src/tests/ban_nodes_scenarios.rs | 37 +++++++++++++++++++ massa-protocol-worker/src/tests/context.rs | 5 ++- .../src/tests/mock_network.rs | 28 ++++++++++++-- 10 files changed, 91 insertions(+), 10 deletions(-) diff --git a/massa-node/base_config/config.toml b/massa-node/base_config/config.toml index 3807527edf2..ba2e608ce4a 100644 --- a/massa-node/base_config/config.toml +++ b/massa-node/base_config/config.toml @@ -207,6 +207,8 @@ try_connection_timer = 250 # Number of millis seconds between each try out connections for same peer try_connection_timer_same_peer = 10000 + # Number of millis seconds between each unban of every peer + unban_everyone_timer = 86400000 # Number of millis seconds that create a timeout for out connections timeout_connection = 1000 # max number of operations kept for propagation diff --git a/massa-node/src/main.rs b/massa-node/src/main.rs index a4d0273bbef..4c17f967fbe 100644 --- a/massa-node/src/main.rs +++ b/massa-node/src/main.rs @@ -633,6 +633,7 @@ async fn launch( read_write_limit_bytes_per_second: SETTINGS.protocol.read_write_limit_bytes_per_second as u128, try_connection_timer: SETTINGS.protocol.try_connection_timer, + unban_everyone_timer: SETTINGS.protocol.unban_everyone_timer, max_in_connections: SETTINGS.protocol.max_in_connections, timeout_connection: SETTINGS.protocol.timeout_connection, message_timeout: SETTINGS.protocol.message_timeout, diff --git a/massa-node/src/settings.rs b/massa-node/src/settings.rs index a35edb73128..16913f935be 100644 --- a/massa-node/src/settings.rs +++ b/massa-node/src/settings.rs @@ -251,6 +251,8 @@ pub struct ProtocolSettings { pub try_connection_timer: MassaTime, /// try connection timer for the same peer pub try_connection_timer_same_peer: MassaTime, + /// periodically unban every peer + pub unban_everyone_timer: MassaTime, /// Timeout connection pub timeout_connection: MassaTime, /// Message timeout diff --git a/massa-protocol-exports/src/settings.rs b/massa-protocol-exports/src/settings.rs index 4d475c51e3f..9d865b526c2 100644 --- a/massa-protocol-exports/src/settings.rs +++ b/massa-protocol-exports/src/settings.rs @@ -148,6 +148,8 @@ pub struct ProtocolConfig { pub try_connection_timer: MassaTime, /// try connection timer same peer pub try_connection_timer_same_peer: MassaTime, + /// periodically unban every peer + pub unban_everyone_timer: MassaTime, /// Max in connections pub max_in_connections: usize, /// Timeout connection diff --git a/massa-protocol-exports/src/test_exports/config.rs b/massa-protocol-exports/src/test_exports/config.rs index 7cd9f9d5837..9841c6dfc40 100644 --- a/massa-protocol-exports/src/test_exports/config.rs +++ b/massa-protocol-exports/src/test_exports/config.rs @@ -5,6 +5,8 @@ use massa_models::config::{ENDORSEMENT_COUNT, MAX_MESSAGE_SIZE}; use massa_time::MassaTime; use tempfile::NamedTempFile; +const ONE_DAY_MS: u64 = 24 * 60 * 60 * 1000; + impl Default for ProtocolConfig { fn default() -> Self { ProtocolConfig { @@ -79,6 +81,7 @@ impl Default for ProtocolConfig { read_write_limit_bytes_per_second: 1024 * 1000, timeout_connection: MassaTime::from_millis(1000), try_connection_timer: MassaTime::from_millis(5000), + unban_everyone_timer: MassaTime::from_millis(ONE_DAY_MS), routable_ip: None, max_in_connections: 10, debug: true, diff --git a/massa-protocol-worker/src/connectivity.rs b/massa-protocol-worker/src/connectivity.rs index eaf4deda835..f1e5f5a2780 100644 --- a/massa-protocol-worker/src/connectivity.rs +++ b/massa-protocol-worker/src/connectivity.rs @@ -16,7 +16,7 @@ use std::net::SocketAddr; use std::sync::Arc; use std::{collections::HashMap, net::IpAddr}; use std::{thread::JoinHandle, time::Duration}; -use tracing::{info, warn}; +use tracing::{debug, info, warn}; use crate::{ handlers::peer_handler::models::{InitialPeers, PeerState, SharedPeerDB}, @@ -183,6 +183,7 @@ pub(crate) fn start_connectivity_thread( let tick_metrics = tick(massa_metrics.tick_delay); let tick_try_connect = tick(config.try_connection_timer.to_duration()); + let tick_unban_everyone = tick(config.unban_everyone_timer.to_duration()); //Try to connect to peers loop { @@ -233,9 +234,7 @@ pub(crate) fn start_connectivity_thread( }, recv(tick_metrics) -> _ => { massa_metrics.inc_peernet_total_bytes_receive(network_controller.get_total_bytes_received()); - massa_metrics.inc_peernet_total_bytes_sent(network_controller.get_total_bytes_sent()); - let active_conn = network_controller.get_active_connections(); massa_metrics.set_active_connections(active_conn.get_nb_in_connections(), active_conn.get_nb_out_connections()); let peers_map = active_conn.get_peers_connections_bandwidth(); @@ -341,6 +340,15 @@ pub(crate) fn start_connectivity_thread( } } } + recv(tick_unban_everyone) -> _ => { + debug!("Periodic unban of every peer"); + let mut peer_db_write = peer_db.write(); + for (peer_id, peer_status) in &peer_db_write.peers.clone() { + if peer_status.state == PeerState::Banned { + peer_db_write.unban_peer(peer_id); + } + } + } } } } diff --git a/massa-protocol-worker/src/handlers/peer_handler/models.rs b/massa-protocol-worker/src/handlers/peer_handler/models.rs index b970f4d60e9..d9ccf0ef92f 100644 --- a/massa-protocol-worker/src/handlers/peer_handler/models.rs +++ b/massa-protocol-worker/src/handlers/peer_handler/models.rs @@ -12,7 +12,7 @@ use tracing::log::info; use super::announcement::Announcement; -const THREE_DAYS_MS: u64 = 3 * 24 * 60 * 60 * 1_000_000; +const THREE_DAYS_MS: u64 = 3 * 24 * 60 * 60 * 1_000; pub type InitialPeers = HashMap>; @@ -72,8 +72,9 @@ impl PeerDB { } pub fn unban_peer(&mut self, peer_id: &PeerId) { - if self.peers.contains_key(peer_id) { - self.peers.remove(peer_id); + if let Some(peer) = self.peers.get_mut(peer_id) { + // We set the state to HandshakeFailed to force the peer to be tested again + peer.state = PeerState::HandshakeFailed; info!("Unbanned peer: {:?}", peer_id); } else { info!("Tried to unban unknown peer: {:?}", peer_id); diff --git a/massa-protocol-worker/src/tests/ban_nodes_scenarios.rs b/massa-protocol-worker/src/tests/ban_nodes_scenarios.rs index 5b6547189fe..211037d08e9 100644 --- a/massa-protocol-worker/src/tests/ban_nodes_scenarios.rs +++ b/massa-protocol-worker/src/tests/ban_nodes_scenarios.rs @@ -34,6 +34,7 @@ fn test_protocol_bans_node_sending_block_header_with_invalid_signature() { let mut protocol_config = ProtocolConfig::default(); protocol_config.thread_count = 2; protocol_config.initial_peers = "./src/tests/empty_initial_peers.json".to_string().into(); + protocol_config.unban_everyone_timer = MassaTime::from_millis(5000); protocol_test( &protocol_config, move |mut network_controller, @@ -78,6 +79,42 @@ fn test_protocol_bans_node_sending_block_header_with_invalid_signature() { } None => {} } + + //6. Check that the node is NOT unbanned after 1 seconds + std::thread::sleep(std::time::Duration::from_millis(1000)); + let (_node_a_peer_id, _node_a) = network_controller + .create_fake_connection(PeerId::from_public_key(node_a_keypair.get_public_key())); + std::thread::sleep(std::time::Duration::from_millis(1000)); + + assert_eq!( + network_controller + .get_connections() + .get_peer_ids_connected() + .len(), + 0 + ); + + //7. Check that the node is unbanned after 5 seconds + std::thread::sleep(std::time::Duration::from_millis(2000)); + let (_node_a_peer_id, _node_a) = network_controller + .create_fake_connection(PeerId::from_public_key(node_a_keypair.get_public_key())); + let block = tools::create_block(&node_a_keypair); + network_controller + .send_from_peer( + &node_a_peer_id, + Message::Block(Box::new(BlockMessage::Header(block.content.header))), + ) + .unwrap(); + + std::thread::sleep(std::time::Duration::from_millis(1000)); + assert_eq!( + network_controller + .get_connections() + .get_peer_ids_connected() + .len(), + 1 + ); + ( network_controller, protocol_controller, diff --git a/massa-protocol-worker/src/tests/context.rs b/massa-protocol-worker/src/tests/context.rs index 94de2a35f9d..a0ff552eaf7 100644 --- a/massa-protocol-worker/src/tests/context.rs +++ b/massa-protocol-worker/src/tests/context.rs @@ -100,7 +100,10 @@ pub fn start_protocol_controller_with_mock_network( let (controller, channels) = create_protocol_controller(config.clone()); - let network_controller = Box::new(MockNetworkController::new(message_handlers.clone())); + let network_controller = Box::new(MockNetworkController::new( + message_handlers.clone(), + peer_db.clone(), + )); let mip_stats_config = MipStatsConfig { block_count_considered: MIP_STORE_STATS_BLOCK_CONSIDERED, diff --git a/massa-protocol-worker/src/tests/mock_network.rs b/massa-protocol-worker/src/tests/mock_network.rs index 82876eba608..7eeb380842c 100644 --- a/massa-protocol-worker/src/tests/mock_network.rs +++ b/massa-protocol-worker/src/tests/mock_network.rs @@ -15,9 +15,13 @@ use peernet::{ use crate::{ handlers::{ - block_handler::BlockMessageSerializer, endorsement_handler::EndorsementMessageSerializer, + block_handler::BlockMessageSerializer, + endorsement_handler::EndorsementMessageSerializer, operation_handler::OperationMessageSerializer, - peer_handler::PeerManagementMessageSerializer, + peer_handler::{ + models::{PeerInfo, PeerState, SharedPeerDB}, + PeerManagementMessageSerializer, + }, }, messages::{Message, MessagesHandler, MessagesSerializer}, wrap_network::{ActiveConnectionsTrait, NetworkController}, @@ -108,6 +112,7 @@ pub struct MockNetworkController { connections: SharedMockActiveConnections, messages_handler: MessagesHandler, message_serializer: MessagesSerializer, + peer_db: SharedPeerDB, } impl Clone for MockNetworkController { @@ -120,12 +125,13 @@ impl Clone for MockNetworkController { .with_endorsement_message_serializer(EndorsementMessageSerializer::new()) .with_operation_message_serializer(OperationMessageSerializer::new()) .with_peer_management_message_serializer(PeerManagementMessageSerializer::new()), + peer_db: self.peer_db.clone(), } } } impl MockNetworkController { - pub fn new(messages_handler: MessagesHandler) -> Self { + pub fn new(messages_handler: MessagesHandler, peer_db: SharedPeerDB) -> Self { Self { connections: Arc::new(RwLock::new(MockActiveConnections::new())), messages_handler, @@ -134,6 +140,7 @@ impl MockNetworkController { .with_endorsement_message_serializer(EndorsementMessageSerializer::new()) .with_operation_message_serializer(OperationMessageSerializer::new()) .with_peer_management_message_serializer(PeerManagementMessageSerializer::new()), + peer_db, } } } @@ -141,10 +148,25 @@ impl MockNetworkController { impl MockNetworkController { pub fn create_fake_connection(&mut self, peer_id: PeerId) -> (PeerId, MassaReceiver) { let (sender, receiver) = MassaChannel::new("create_fake_connection".to_string(), None); + + // Don't fake connect if we are banned + if let Some(peer_info) = self.peer_db.read().peers.get(&peer_id) { + if peer_info.state == PeerState::Banned { + return (peer_id, receiver); + } + } + // Otherwise, add to active connections and to peer_db self.connections .write() .connections .insert(peer_id.clone(), sender); + self.peer_db.write().peers.insert( + peer_id.clone(), + PeerInfo { + last_announce: None, + state: PeerState::Trusted, + }, + ); (peer_id, receiver) } From 660ea0b585c679d81dc29aad9f5836fbb3119fdc Mon Sep 17 00:00:00 2001 From: Thomas Plisson Date: Wed, 26 Jul 2023 14:59:42 +0200 Subject: [PATCH 57/71] update runtime version --- Cargo.lock | 2 +- Cargo.toml | 2 +- massa-client/base_config/config.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 27b7b555bfc..3f8df22a0da 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2497,7 +2497,7 @@ dependencies = [ [[package]] name = "massa-sc-runtime" version = "0.10.0" -source = "git+https://github.com/massalabs/massa-sc-runtime?branch=feature/Improve_ABI_types_in_wasmv1#87eac9491869aca2e3641b9c1395415136cdf5cd" +source = "git+https://github.com/massalabs/massa-sc-runtime?rev=ad4f65330b186c2f83440c2362c34d1faa84dc58#ad4f65330b186c2f83440c2362c34d1faa84dc58" dependencies = [ "anyhow", "as-ffi-bindings", diff --git a/Cargo.toml b/Cargo.toml index d420fc34b70..b76d467e4c6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -100,7 +100,7 @@ massa_wallet = { path = "./massa-wallet" } # Massa projects dependencies massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", "rev" = "df9f5b24955bd76030add0945338226309531261" } -massa-sc-runtime = { git = "https://github.com/massalabs/massa-sc-runtime", "branch" = "feature/Improve_ABI_types_in_wasmv1" } +massa-sc-runtime = { git = "https://github.com/massalabs/massa-sc-runtime", "rev" = "ad4f65330b186c2f83440c2362c34d1faa84dc58" } peernet = { git = "https://github.com/massalabs/PeerNet", "branch" = "main" } # Common dependencies diff --git a/massa-client/base_config/config.toml b/massa-client/base_config/config.toml index 20decb705bb..7de719c6990 100644 --- a/massa-client/base_config/config.toml +++ b/massa-client/base_config/config.toml @@ -4,7 +4,7 @@ timeout = 1000 [default_node] # The IP of your node. Works both with IPv4 (like 127.0.0.1) and IPv6 (like ::1) addresses, if the node is bound to the correct protocol. -ip = "149.202.84.7" +ip = "127.0.0.1" private_port = 33034 public_port = 33035 grpc_port = 33037 From c772ee5568c89b5fdce92536d5a54e5898cd4cb2 Mon Sep 17 00:00:00 2001 From: Thomas Plisson Date: Wed, 26 Jul 2023 15:01:16 +0200 Subject: [PATCH 58/71] use branch instead of rev for runtime package --- Cargo.lock | 2 +- Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3f8df22a0da..50fb4828c55 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2497,7 +2497,7 @@ dependencies = [ [[package]] name = "massa-sc-runtime" version = "0.10.0" -source = "git+https://github.com/massalabs/massa-sc-runtime?rev=ad4f65330b186c2f83440c2362c34d1faa84dc58#ad4f65330b186c2f83440c2362c34d1faa84dc58" +source = "git+https://github.com/massalabs/massa-sc-runtime?branch=main#ad4f65330b186c2f83440c2362c34d1faa84dc58" dependencies = [ "anyhow", "as-ffi-bindings", diff --git a/Cargo.toml b/Cargo.toml index b76d467e4c6..148c03aad28 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -100,7 +100,7 @@ massa_wallet = { path = "./massa-wallet" } # Massa projects dependencies massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", "rev" = "df9f5b24955bd76030add0945338226309531261" } -massa-sc-runtime = { git = "https://github.com/massalabs/massa-sc-runtime", "rev" = "ad4f65330b186c2f83440c2362c34d1faa84dc58" } +massa-sc-runtime = { git = "https://github.com/massalabs/massa-sc-runtime", "branch" = "main" } peernet = { git = "https://github.com/massalabs/PeerNet", "branch" = "main" } # Common dependencies From 122a1f63a7d60d7a3d438afa458b629576ceb1f0 Mon Sep 17 00:00:00 2001 From: Modship Date: Wed, 26 Jul 2023 15:15:31 +0200 Subject: [PATCH 59/71] add metrics (#4254) --- Cargo.lock | 1 + massa-bootstrap/Cargo.toml | 1 + massa-bootstrap/src/client.rs | 3 + massa-bootstrap/src/server.rs | 24 +- massa-bootstrap/src/tests/scenarios.rs | 27 ++ massa-execution-worker/src/execution.rs | 21 +- massa-metrics/src/lib.rs | 327 +++++++++++++----- massa-metrics/src/survey.rs | 41 --- massa-node/src/main.rs | 28 ++ massa-node/src/survey.rs | 127 +++++++ massa-protocol-worker/src/connectivity.rs | 6 +- .../src/handlers/peer_handler/mod.rs | 3 + .../src/handlers/peer_handler/tester.rs | 12 + 13 files changed, 491 insertions(+), 130 deletions(-) delete mode 100644 massa-metrics/src/survey.rs create mode 100644 massa-node/src/survey.rs diff --git a/Cargo.lock b/Cargo.lock index 50fb4828c55..945e8b6a086 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2625,6 +2625,7 @@ dependencies = [ "massa_ledger_exports", "massa_ledger_worker", "massa_logging", + "massa_metrics", "massa_models", "massa_pos_exports", "massa_pos_worker", diff --git a/massa-bootstrap/Cargo.toml b/massa-bootstrap/Cargo.toml index 0adba879a21..7e663a38f87 100644 --- a/massa-bootstrap/Cargo.toml +++ b/massa-bootstrap/Cargo.toml @@ -35,6 +35,7 @@ massa_pos_exports = {workspace = true} massa_time = {workspace = true} massa_db_exports = {workspace = true} massa_versioning = {workspace = true} +massa_metrics = {workspace = true} [dev-dependencies] mockall = {workspace = true} diff --git a/massa-bootstrap/src/client.rs b/massa-bootstrap/src/client.rs index 0279f09c321..726a1397c02 100644 --- a/massa-bootstrap/src/client.rs +++ b/massa-bootstrap/src/client.rs @@ -2,6 +2,7 @@ use humantime::format_duration; use massa_db_exports::DBBatch; use massa_final_state::{FinalState, FinalStateError}; use massa_logging::massa_trace; +use massa_metrics::MassaMetrics; use massa_models::{node::NodeId, slot::Slot, streaming_step::StreamingStep, version::Version}; use massa_signature::PublicKey; use massa_time::MassaTime; @@ -402,6 +403,7 @@ pub fn get_state( end_timestamp: Option, restart_from_snapshot_at_period: Option, interupted: Arc<(Mutex, Condvar)>, + massa_metrics: MassaMetrics, ) -> Result { massa_trace!("bootstrap.lib.get_state", {}); @@ -494,6 +496,7 @@ pub fn get_state( ); match conn { Ok(mut client) => { + massa_metrics.inc_bootstrap_counter(); let bs = bootstrap_from_server( bootstrap_config, &mut client, diff --git a/massa-bootstrap/src/server.rs b/massa-bootstrap/src/server.rs index 4c22ee6928c..5acc81622a9 100644 --- a/massa-bootstrap/src/server.rs +++ b/massa-bootstrap/src/server.rs @@ -32,6 +32,7 @@ use massa_consensus_exports::{bootstrapable_graph::BootstrapableGraph, Consensus use massa_db_exports::CHANGE_ID_DESER_ERROR; use massa_final_state::FinalState; use massa_logging::massa_trace; +use massa_metrics::MassaMetrics; use massa_models::{ block_id::BlockId, prehash::PreHashSet, slot::Slot, streaming_step::StreamingStep, version::Version, @@ -136,6 +137,7 @@ pub fn start_bootstrap_server( config: BootstrapConfig, keypair: KeyPair, version: Version, + massa_metrics: MassaMetrics, ) -> Result { massa_trace!("bootstrap.lib.start_bootstrap_server", {}); @@ -181,6 +183,7 @@ pub fn start_bootstrap_server( version, ip_hist_map: HashMap::with_capacity(config.ip_list_max_size), bootstrap_config: config, + massa_metrics, } .event_loop(max_bootstraps) }) @@ -205,6 +208,7 @@ struct BootstrapServer<'a> { bootstrap_config: BootstrapConfig, version: Version, ip_hist_map: HashMap, + massa_metrics: MassaMetrics, } impl BootstrapServer<'_> { @@ -265,6 +269,7 @@ impl BootstrapServer<'_> { remote_addr, move || {}, ); + self.massa_metrics.inc_bootstrap_peers_failed(); continue; }; @@ -310,6 +315,7 @@ impl BootstrapServer<'_> { }) }; server_binding.close_and_send_error(msg, remote_addr, tracer); + self.massa_metrics.inc_bootstrap_peers_failed(); continue; }; @@ -324,6 +330,7 @@ impl BootstrapServer<'_> { let config = self.bootstrap_config.clone(); let bootstrap_count_token = bootstrap_sessions_counter.clone(); + let massa_metrics = self.massa_metrics.clone(); let _ = thread::Builder::new() .name(format!("bootstrap thread, peer: {}", remote_addr)) @@ -337,6 +344,7 @@ impl BootstrapServer<'_> { version, consensus_command_sender, protocol_controller, + massa_metrics, ) }); @@ -349,6 +357,7 @@ impl BootstrapServer<'_> { remote_addr, move || debug!("did not bootstrap {}: no available slots", remote_addr), ); + self.massa_metrics.inc_bootstrap_peers_failed(); } } } @@ -400,6 +409,7 @@ fn run_bootstrap_session( version: Version, consensus_command_sender: Box, protocol_controller: Box, + massa_metrics: MassaMetrics, ) { debug!("running bootstrap for peer {}", remote_addr); let deadline = Instant::now() + config.bootstrap_timeout.to_duration(); @@ -429,19 +439,25 @@ fn run_bootstrap_session( "Bootstrap process timedout ({})", format_duration(config.bootstrap_timeout.to_duration()) )); + massa_metrics.inc_bootstrap_peers_failed(); + } + Err(BootstrapError::ReceivedError(error)) => { + debug!( + "bootstrap serving error received from peer {}: {}", + remote_addr, error + ); + massa_metrics.inc_bootstrap_peers_failed(); } - Err(BootstrapError::ReceivedError(error)) => debug!( - "bootstrap serving error received from peer {}: {}", - remote_addr, error - ), Err(err) => { debug!("bootstrap serving error for peer {}: {}", remote_addr, err); // We allow unused result because we don't care if an error is thrown when // sending the error message to the server we will close the socket anyway. let _ = server.send_error_timeout(err.to_string()); + massa_metrics.inc_bootstrap_peers_failed(); } Ok(_) => { info!("bootstrapped peer {}", remote_addr); + massa_metrics.inc_bootstrap_peers_success(); } } } diff --git a/massa-bootstrap/src/tests/scenarios.rs b/massa-bootstrap/src/tests/scenarios.rs index 1da9f9b86f2..b6518d5bbc5 100644 --- a/massa-bootstrap/src/tests/scenarios.rs +++ b/massa-bootstrap/src/tests/scenarios.rs @@ -28,6 +28,7 @@ use massa_final_state::{ FinalState, FinalStateConfig, StateChanges, }; use massa_ledger_exports::LedgerConfig; +use massa_metrics::MassaMetrics; use massa_models::config::{ DENUNCIATION_EXPIRE_PERIODS, ENDORSEMENT_COUNT, GENESIS_TIMESTAMP, MAX_DEFERRED_CREDITS_LENGTH, MAX_DENUNCIATIONS_PER_BLOCK_HEADER, MAX_PRODUCTION_STATS_LENGTH, MAX_ROLLS_COUNT_LENGTH, T0, @@ -179,6 +180,13 @@ fn mock_bootstrap_manager(addr: SocketAddr, bootstrap_config: BootstrapConfig) - bootstrap_config.clone(), keypair.clone(), Version::from_str("TEST.1.10").unwrap(), + MassaMetrics::new( + false, + "0.0.0.0:31248".parse().unwrap(), + thread_count, + Duration::from_secs(5), + ) + .0, ) .unwrap() } @@ -415,6 +423,16 @@ fn test_bootstrap_server() { .expect_clone_box() .return_once(move || stream_mock2); + let metrics = MassaMetrics::new( + false, + "0.0.0.0:31248".parse().unwrap(), + thread_count, + Duration::from_secs(5), + ) + .0; + + let metrics_cloned = metrics.clone(); + // Start the bootstrap server thread let bootstrap_manager_thread = std::thread::Builder::new() .name("bootstrap_thread".to_string()) @@ -430,6 +448,7 @@ fn test_bootstrap_server() { bootstrap_config.clone(), keypair.clone(), Version::from_str("TEST.1.10").unwrap(), + metrics_cloned, ) .unwrap() }) @@ -511,6 +530,7 @@ fn test_bootstrap_server() { None, None, Arc::new((Mutex::new(false), Condvar::new())), + metrics, ) .unwrap(); @@ -684,6 +704,13 @@ fn test_bootstrap_accept_err() { bootstrap_config.clone(), keypair.clone(), Version::from_str("TEST.1.10").unwrap(), + MassaMetrics::new( + false, + "0.0.0.0:31248".parse().unwrap(), + thread_count, + Duration::from_secs(5), + ) + .0, ) .unwrap() }) diff --git a/massa-execution-worker/src/execution.rs b/massa-execution-worker/src/execution.rs index cf7392424c0..c601442fa33 100644 --- a/massa-execution-worker/src/execution.rs +++ b/massa-execution-worker/src/execution.rs @@ -256,10 +256,17 @@ impl ExecutionState { .set_active_cursor(self.active_cursor.period, self.active_cursor.thread); self.massa_metrics .set_final_cursor(self.final_cursor.period, self.final_cursor.thread); - self.massa_metrics.inc_operations_final_counter( exec_out_2.state_changes.executed_ops_changes.len() as u64, ); + self.massa_metrics + .set_active_history(self.active_history.read().0.len()); + + self.massa_metrics + .inc_sc_messages_final_by(exec_out_2.state_changes.async_pool_changes.0.len()); + + self.massa_metrics + .set_messages_pool(self.final_state.read().async_pool.message_info_cache.len()); // Broadcast a final slot execution output to active channel subscribers. if self.config.broadcast_enabled { @@ -296,6 +303,10 @@ impl ExecutionState { // add the execution output at the end of the output history self.active_history.write().0.push_back(exec_out); + + // update the prometheus metrics + self.massa_metrics + .set_active_history(self.active_history.read().0.len()) } /// Helper function. @@ -1362,10 +1373,16 @@ impl ExecutionState { // execute slot debug!("execute_final_slot: execution started"); let exec_out = self.execute_slot(slot, exec_target, selector); - + let has_block = exec_out.block_info.is_some(); // apply execution output to final state self.apply_final_execution_output(exec_out); + // update metrics + self.massa_metrics.inc_executed_final_slot(); + if has_block { + self.massa_metrics.inc_executed_final_slot_with_block(); + } + debug!( "execute_final_slot: execution finished & result applied & versioning stats updated" ); diff --git a/massa-metrics/src/lib.rs b/massa-metrics/src/lib.rs index bd4bed69fbd..9683a042ab4 100644 --- a/massa-metrics/src/lib.rs +++ b/massa-metrics/src/lib.rs @@ -7,22 +7,20 @@ use std::{ collections::HashMap, net::SocketAddr, + num::NonZeroUsize, sync::{Arc, RwLock}, thread::JoinHandle, time::Duration, }; use lazy_static::lazy_static; -use prometheus::{register_int_gauge, Gauge, IntCounter, IntGauge}; -use survey::MassaSurvey; +use prometheus::{register_int_gauge, Counter, Gauge, IntCounter, IntGauge}; use tokio::sync::oneshot::Sender; use tracing::warn; // #[cfg(not(feature = "testing"))] mod server; -mod survey; - lazy_static! { // use lazy_static for these metrics because they are used in storage which implement default static ref OPERATIONS_COUNTER: IntGauge = register_int_gauge!( @@ -79,6 +77,49 @@ pub struct MassaMetrics { /// index 0 = thread 0 ... consensus_vec: Vec, + /// number of stakers + stakers: IntGauge, + /// number of rolls + rolls: IntGauge, + + /// number of elements in the active_history of execution + active_history: IntGauge, + + /// number of operations in the operation pool + operations_pool: IntGauge, + /// number of endorsements in the endorsement pool + endorsements_pool: IntGauge, + /// number of elements in the denunciation pool + denunciations_pool: IntGauge, + + // number of autonomous SCs messages in pool + messages_pool: IntGauge, + + // number of autonomous SC messages executed as final + sc_messages_final: IntCounter, + + /// number of times our node (re-)bootstrapped + bootstrap_counter: IntCounter, + /// number of times we successfully bootstrapped someone + bootstrap_peers_success: IntCounter, + /// number of times we failed/refused to bootstrap someone + bootstrap_peers_failed: IntCounter, + + /// number of times we successfully tested someone + protocol_tester_success: IntCounter, + /// number of times we failed to test someone + protocol_tester_failed: IntCounter, + + /// know peers in protocol + protocol_known_peers: IntGauge, + /// banned peers in protocol + protocol_banned_peers: IntGauge, + + /// executed final slot + executed_final_slot: Counter, + /// executed final slot with block (not miss) + executed_final_slot_with_block: Counter, + /// total bytes receive by peernet manager peernet_total_bytes_receive: IntCounter, /// total bytes sent by peernet manager @@ -139,8 +180,6 @@ impl MassaMetrics { nb_thread: u8, tick_delay: Duration, ) -> (Self, MetricsStopper) { - // TODO unwrap - let mut consensus_vec = vec![]; for i in 0..nb_thread { let gauge = Gauge::new( @@ -156,6 +195,90 @@ impl MassaMetrics { consensus_vec.push(gauge); } + // set available processors + let available_processors = + IntCounter::new("process_available_processors", "number of processors") + .expect("Failed to create available_processors counter"); + let count = std::thread::available_parallelism() + .unwrap_or(NonZeroUsize::MIN) + .get(); + available_processors.inc_by(count as u64); + + // stakers + let stakers = IntGauge::new("stakers", "number of stakers").unwrap(); + let rolls = IntGauge::new("rolls", "number of rolls").unwrap(); + + let executed_final_slot = + Counter::new("executed_final_slot", "number of executed final slot").unwrap(); + let executed_final_slot_with_block = Counter::new( + "executed_final_slot_with_block", + "number of executed final slot with block (not miss)", + ) + .unwrap(); + + let protocol_tester_success = IntCounter::new( + "protocol_tester_success", + "number of times we successfully tested someone", + ) + .unwrap(); + let protocol_tester_failed = IntCounter::new( + "protocol_tester_failed", + "number of times we failed to test someone", + ) + .unwrap(); + + // pool + let operations_pool = IntGauge::new( + "operations_pool", + "number of operations in the operation pool", + ) + .unwrap(); + let endorsements_pool = IntGauge::new( + "endorsements_pool", + "number of endorsements in the endorsement pool", + ) + .unwrap(); + let denunciations_pool = IntGauge::new( + "denunciations_pool", + "number of elements in the denunciation pool", + ) + .unwrap(); + + let messages_pool = + IntGauge::new("messages_pool", "number of autonomous SCs messages in pool").unwrap(); + + let sc_messages_final = IntCounter::new( + "sc_messages_final", + "number of autonomous SC messages executed as final", + ) + .unwrap(); + + let bootstrap_counter = IntCounter::new( + "bootstrap_counter", + "number of times our node (re-)bootstrapped", + ) + .unwrap(); + let bootstrap_success = IntCounter::new( + "bootstrap_peers_success", + "number of times we successfully bootstrapped someone", + ) + .unwrap(); + let bootstrap_failed = IntCounter::new( + "bootstrap_peers_failed", + "number of times we failed/refused to bootstrap someone", + ) + .unwrap(); + + let active_history = IntGauge::new( + "active_history", + "number of elements in the active_history of execution", + ) + .unwrap(); + + let know_peers = IntGauge::new("known_peers", "number of known peers in protocol").unwrap(); + let banned_peers = + IntGauge::new("banned_peers", "number of banned peers in protocol").unwrap(); + // active cursor let active_cursor_thread = IntGauge::new("active_cursor_thread", "execution active cursor thread").unwrap(); @@ -276,8 +399,6 @@ impl MassaMetrics { if enabled { #[cfg(not(feature = "testing"))] { - // server::bind_metrics(addr); - let _ = prometheus::register(Box::new(final_cursor_thread.clone())); let _ = prometheus::register(Box::new(final_cursor_period.clone())); let _ = prometheus::register(Box::new(active_cursor_thread.clone())); @@ -306,23 +427,50 @@ impl MassaMetrics { let _ = prometheus::register(Box::new(peernet_total_bytes_receive.clone())); let _ = prometheus::register(Box::new(peernet_total_bytes_sent.clone())); let _ = prometheus::register(Box::new(operations_final_counter.clone())); + let _ = prometheus::register(Box::new(stakers.clone())); + let _ = prometheus::register(Box::new(rolls.clone())); + let _ = prometheus::register(Box::new(know_peers.clone())); + let _ = prometheus::register(Box::new(banned_peers.clone())); + let _ = prometheus::register(Box::new(executed_final_slot.clone())); + let _ = prometheus::register(Box::new(executed_final_slot_with_block.clone())); + let _ = prometheus::register(Box::new(active_history.clone())); + let _ = prometheus::register(Box::new(bootstrap_counter.clone())); + let _ = prometheus::register(Box::new(bootstrap_success.clone())); + let _ = prometheus::register(Box::new(bootstrap_failed.clone())); + let _ = prometheus::register(Box::new(available_processors)); + let _ = prometheus::register(Box::new(operations_pool.clone())); + let _ = prometheus::register(Box::new(endorsements_pool.clone())); + let _ = prometheus::register(Box::new(denunciations_pool.clone())); + let _ = prometheus::register(Box::new(protocol_tester_success.clone())); + let _ = prometheus::register(Box::new(protocol_tester_failed.clone())); + let _ = prometheus::register(Box::new(sc_messages_final.clone())); + let _ = prometheus::register(Box::new(messages_pool.clone())); stopper = server::bind_metrics(addr); } - - MassaSurvey::run( - tick_delay, - active_in_connections.clone(), - active_out_connections.clone(), - peernet_total_bytes_sent.clone(), - peernet_total_bytes_receive.clone(), - ); } ( MassaMetrics { enabled, consensus_vec, + stakers, + rolls, + active_history, + operations_pool, + endorsements_pool, + denunciations_pool, + messages_pool, + sc_messages_final, + bootstrap_counter, + bootstrap_peers_success: bootstrap_success, + bootstrap_peers_failed: bootstrap_failed, + protocol_tester_success, + protocol_tester_failed, + protocol_known_peers: know_peers, + protocol_banned_peers: banned_peers, + executed_final_slot, + executed_final_slot_with_block, peernet_total_bytes_receive, peernet_total_bytes_sent, block_graph_counter, @@ -356,6 +504,19 @@ impl MassaMetrics { ) } + pub fn is_enabled(&self) -> bool { + self.enabled + } + + pub fn get_metrics_for_survey_thread(&self) -> (i64, i64, u64, u64) { + ( + self.active_in_connections.clone().get(), + self.active_out_connections.clone().get(), + self.peernet_total_bytes_sent.clone().get(), + self.peernet_total_bytes_receive.clone().get(), + ) + } + pub fn set_active_connections(&self, in_connections: usize, out_connections: usize) { self.active_in_connections.set(in_connections as i64); self.active_out_connections.set(out_connections as i64); @@ -448,6 +609,74 @@ impl MassaMetrics { self.operations_final_counter.inc_by(diff); } + pub fn set_known_peers(&self, nb: usize) { + self.protocol_known_peers.set(nb as i64); + } + + pub fn set_banned_peers(&self, nb: usize) { + self.protocol_banned_peers.set(nb as i64); + } + + pub fn inc_executed_final_slot(&self) { + self.executed_final_slot.inc(); + } + + pub fn inc_executed_final_slot_with_block(&self) { + self.executed_final_slot_with_block.inc(); + } + + pub fn set_active_history(&self, nb: usize) { + self.active_history.set(nb as i64); + } + + pub fn inc_bootstrap_counter(&self) { + self.bootstrap_counter.inc(); + } + + pub fn inc_bootstrap_peers_success(&self) { + self.bootstrap_peers_success.inc(); + } + + pub fn inc_bootstrap_peers_failed(&self) { + self.bootstrap_peers_failed.inc(); + } + + pub fn set_operations_pool(&self, nb: usize) { + self.operations_pool.set(nb as i64); + } + + pub fn set_endorsements_pool(&self, nb: usize) { + self.endorsements_pool.set(nb as i64); + } + + pub fn set_denunciations_pool(&self, nb: usize) { + self.denunciations_pool.set(nb as i64); + } + + pub fn inc_protocol_tester_success(&self) { + self.protocol_tester_success.inc(); + } + + pub fn inc_protocol_tester_failed(&self) { + self.protocol_tester_failed.inc(); + } + + pub fn set_stakers(&self, nb: usize) { + self.stakers.set(nb as i64); + } + + pub fn set_rolls(&self, nb: usize) { + self.rolls.set(nb as i64); + } + + pub fn inc_sc_messages_final_by(&self, diff: usize) { + self.sc_messages_final.inc_by(diff as u64); + } + + pub fn set_messages_pool(&self, nb: usize) { + self.messages_pool.set(nb as i64); + } + /// Update the bandwidth metrics for all peers /// HashMap pub fn update_peers_tx_rx(&self, data: HashMap) { @@ -507,69 +736,3 @@ impl MassaMetrics { } } } -// mod test { -// use massa_channel::MassaChannel; - -// use crate::start_metrics_server; - -// #[tokio::test] -// async fn test_channel_metrics() { -// let addr = ([192, 168, 1, 183], 9898).into(); - -// start_metrics_server(addr); -// std::thread::sleep(std::time::Duration::from_millis(500)); -// let (sender, receiver) = MassaChannel::new("operations".to_string(), None); - -// let (sender2, receiver2) = MassaChannel::new("second_channel".to_string(), None); - -// sender2.send("hello_world".to_string()).unwrap(); -// let data = receiver2.recv().unwrap(); -// assert_eq!(data, "hello_world".to_string()); - -// for i in 0..100 { -// sender.send(i).unwrap(); -// } - -// for _i in 0..20 { -// receiver.recv().unwrap(); -// } - -// assert_eq!(receiver.len(), 80); -// std::thread::sleep(std::time::std::time::Duration::from_secs(5)); -// drop(sender2); -// drop(receiver2); -// std::thread::sleep(std::time::Duration::from_secs(100)); -// } - -// #[tokio::test] -// async fn test_channel() { -// let addr = ([192, 168, 1, 183], 9898).into(); - -// start_metrics_server(addr); -// std::thread::sleep(std::time::Duration::from_millis(500)); - -// let (sender, receiver) = MassaChannel::new("test2".to_string(), None); - -// let cloned = receiver.clone(); - -// sender.send("msg".to_string()).unwrap(); - -// std::thread::spawn(move || { -// dbg!("spawned"); - -// loop { -// dbg!("loop"); -// dbg!(receiver.recv().unwrap()); -// std::thread::sleep(std::time::Duration::from_secs(1)); -// } -// }); -// std::thread::sleep(std::time::Duration::from_secs(2)); -// std::thread::spawn(move || { -// std::thread::sleep(std::time::std::time::Duration::from_secs(5)); - -// drop(sender); -// }); - -// std::thread::sleep(std::time::Duration::from_secs(20)); -// } -// } diff --git a/massa-metrics/src/survey.rs b/massa-metrics/src/survey.rs deleted file mode 100644 index ed198d5e9be..00000000000 --- a/massa-metrics/src/survey.rs +++ /dev/null @@ -1,41 +0,0 @@ -// use std::time::Duration; - -use prometheus::{IntCounter, IntGauge}; -#[allow(unused_imports)] -use tracing::warn; - -pub struct MassaSurvey {} - -impl MassaSurvey { - #[allow(unused_variables)] - pub fn run( - tick_delay: std::time::Duration, - active_in_connections: IntGauge, - active_out_connections: IntGauge, - peernet_total_bytes_sent: IntCounter, - peernet_total_bytes_receive: IntCounter, - ) { - #[cfg(not(feature = "sandbox"))] - { - let mut data_sent = 0; - let mut data_received = 0; - std::thread::spawn(move || loop { - std::thread::sleep(tick_delay); - - if active_in_connections.get() + active_out_connections.get() == 0 { - warn!("PEERNET | No active connections"); - } - - let new_data_sent = peernet_total_bytes_sent.get(); - let new_data_received = peernet_total_bytes_receive.get(); - - if new_data_sent == data_sent && new_data_received == data_received { - warn!("PEERNET | No data sent or received since 5s"); - } else { - data_sent = new_data_sent; - data_received = new_data_received; - } - }); - } - } -} diff --git a/massa-node/src/main.rs b/massa-node/src/main.rs index 4c17f967fbe..9abf831c38b 100644 --- a/massa-node/src/main.rs +++ b/massa-node/src/main.rs @@ -9,6 +9,7 @@ extern crate massa_logging; #[cfg(feature = "op_spammer")] use crate::operation_injector::start_operation_injector; use crate::settings::SETTINGS; +use crate::survey::MassaSurvey; use crossbeam_channel::TryRecvError; use dialoguer::Password; @@ -95,6 +96,7 @@ use std::sync::{Condvar, Mutex}; use std::time::Duration; use std::{path::Path, process, sync::Arc}; use structopt::StructOpt; +use survey::MassaSurveyStopper; use tokio::sync::broadcast; use tracing::{debug, error, info, warn}; use tracing_subscriber::filter::{filter_fn, LevelFilter}; @@ -102,6 +104,7 @@ use tracing_subscriber::filter::{filter_fn, LevelFilter}; #[cfg(feature = "op_spammer")] mod operation_injector; mod settings; +mod survey; async fn launch( args: &Args, @@ -121,6 +124,7 @@ async fn launch( StopHandle, Option, MetricsStopper, + MassaSurveyStopper, ) { info!("Node version : {}", *VERSION); let now = MassaTime::now().expect("could not get now time"); @@ -398,6 +402,7 @@ async fn launch( *END_TIMESTAMP, args.restart_from_snapshot_at_period, sig_int_toggled.clone(), + massa_metrics.clone(), ) { Ok(vals) => vals, Err(BootstrapError::Interupted(msg)) => { @@ -773,6 +778,7 @@ async fn launch( bootstrap_config, keypair.clone(), *VERSION, + massa_metrics.clone(), ) .expect("Could not start bootstrap server") }); @@ -979,6 +985,20 @@ async fn launch( api_config.bind_public ); + let massa_survey_stopper = MassaSurvey::run( + SETTINGS.metrics.tick_delay.to_duration(), + execution_controller, + pool_controller, + massa_metrics, + ( + api_config.thread_count, + api_config.t0, + api_config.genesis_timestamp, + api_config.periods_per_cycle, + api_config.last_start_period, + ), + ); + #[cfg(feature = "deadlock_detection")] { // only for #[cfg] @@ -1022,6 +1042,7 @@ async fn launch( api_handle, grpc_handle, metrics_stopper, + massa_survey_stopper, ) } @@ -1035,6 +1056,7 @@ struct Managers { factory_manager: Box, } +#[allow(clippy::too_many_arguments)] async fn stop( _consensus_event_receiver: MassaReceiver, Managers { @@ -1051,6 +1073,7 @@ async fn stop( api_handle: StopHandle, grpc_handle: Option, mut metrics_stopper: MetricsStopper, + mut massa_survey_stopper: MassaSurveyStopper, ) { // stop bootstrap if let Some(bootstrap_manager) = bootstrap_manager { @@ -1081,6 +1104,9 @@ async fn stop( // stop metrics metrics_stopper.stop(); + // stop massa survey thread + massa_survey_stopper.stop(); + // stop factory factory_manager.stop(); @@ -1247,6 +1273,7 @@ async fn run(args: Args) -> anyhow::Result<()> { api_handle, grpc_handle, metrics_stopper, + massa_survey_stopper, ) = launch(&cur_args, node_wallet.clone(), Arc::clone(&sig_int_toggled)).await; // loop over messages @@ -1312,6 +1339,7 @@ async fn run(args: Args) -> anyhow::Result<()> { api_handle, grpc_handle, metrics_stopper, + massa_survey_stopper, ) .await; diff --git a/massa-node/src/survey.rs b/massa-node/src/survey.rs new file mode 100644 index 00000000000..8526dae0da0 --- /dev/null +++ b/massa-node/src/survey.rs @@ -0,0 +1,127 @@ +use std::thread::JoinHandle; + +use massa_execution_exports::ExecutionController; +use massa_metrics::MassaMetrics; +use massa_models::{address::Address, slot::Slot, timeslots::get_latest_block_slot_at_timestamp}; +use massa_pool_exports::PoolController; +use massa_time::MassaTime; +use tracing::info; +// use std::time::Duration; +#[allow(unused_imports)] +use tracing::warn; + +pub struct MassaSurvey {} + +pub struct MassaSurveyStopper { + handle: Option>, +} + +impl MassaSurveyStopper { + pub fn stop(&mut self) { + if let Some(handle) = self.handle.take() { + match handle.join() { + Ok(_) => info!("MassaSurvey | Stopped"), + Err(_) => warn!("failed to join massa survey thread"), + } + } + } +} + +impl MassaSurvey { + #[allow(unused_variables)] + // config : (thread_count, t0, genesis_timestamp, periods_per_cycle, last_start_period) + pub fn run( + tick_delay: std::time::Duration, + execution_controller: Box, + pool_controller: Box, + massa_metrics: MassaMetrics, + config: (u8, MassaTime, MassaTime, u64, u64), + ) -> MassaSurveyStopper { + if massa_metrics.is_enabled() { + #[cfg(not(feature = "sandbox"))] + { + let mut data_sent = 0; + let mut data_received = 0; + match std::thread::Builder::new() + .name("massa-survey".to_string()) + .spawn(move || loop { + std::thread::sleep(tick_delay); + + let ( + active_in_connections, + active_out_connections, + new_data_sent, + new_data_received, + ) = massa_metrics.get_metrics_for_survey_thread(); + + if active_in_connections + active_out_connections == 0 { + warn!("PEERNET | No active connections"); + } + + if new_data_sent == data_sent && new_data_received == data_received { + warn!("PEERNET | No data sent or received since 5s"); + } else { + data_sent = new_data_sent; + data_received = new_data_received; + } + + { + // update stakers / rolls + let now = match MassaTime::now() { + Ok(now) => now, + Err(e) => { + warn!("MassaSurvey | Failed to get current time: {:?}", e); + continue; + } + }; + + let curr_cycle = + match get_latest_block_slot_at_timestamp(config.0, config.1, config.2, now) + { + Ok(Some(cur_slot)) if cur_slot.period <= config.4 => { + Slot::new(config.4, 0).get_cycle(config.3) + } + Ok(Some(cur_slot)) => cur_slot.get_cycle(config.3), + Ok(None) => 0, + Err(e) => { + warn!( + "MassaSurvey | Failed to get latest block slot at timestamp: {:?}", + e + ); + continue; + } + }; + + let staker_vec = execution_controller + .get_cycle_active_rolls(curr_cycle) + .into_iter() + .collect::>(); + + massa_metrics.set_stakers(staker_vec.len()); + let rolls_count = staker_vec.iter().map(|(_, r)| *r).sum::(); + massa_metrics.set_rolls(rolls_count as usize); + } + + { + massa_metrics.set_operations_pool(pool_controller.get_operation_count()); + massa_metrics.set_endorsements_pool(pool_controller.get_endorsement_count()); + massa_metrics.set_denunciations_pool(pool_controller.get_denunciation_count()); + } + }) { + Ok(handle) => MassaSurveyStopper { handle: Some(handle) }, + Err(e) => { + warn!("MassaSurvey | Failed to spawn survey thread: {:?}", e); + MassaSurveyStopper { handle: None} + } + } + } + + #[cfg(feature = "sandbox")] + { + MassaSurveyStopper { handle: None } + } + } else { + MassaSurveyStopper { handle: None } + } + } +} diff --git a/massa-protocol-worker/src/connectivity.rs b/massa-protocol-worker/src/connectivity.rs index f1e5f5a2780..26008ba2bce 100644 --- a/massa-protocol-worker/src/connectivity.rs +++ b/massa-protocol-worker/src/connectivity.rs @@ -128,6 +128,7 @@ pub(crate) fn start_connectivity_thread( peer_categories.iter().map(|(key, value)|(key.clone(), (value.0.clone(), value.1.target_out_connections))).collect(), config.default_category_info.target_out_connections, &config, + massa_metrics.clone(), ); let mut operation_handler = OperationHandler::new( @@ -163,7 +164,7 @@ pub(crate) fn start_connectivity_thread( network_controller.get_active_connections(), selector_controller, consensus_controller, - pool_controller, + pool_controller.clone(), channel_blocks.1, sender_blocks_retrieval_ext, protocol_channels.block_handler_retrieval.1.clone(), @@ -239,6 +240,9 @@ pub(crate) fn start_connectivity_thread( massa_metrics.set_active_connections(active_conn.get_nb_in_connections(), active_conn.get_nb_out_connections()); let peers_map = active_conn.get_peers_connections_bandwidth(); massa_metrics.update_peers_tx_rx(peers_map); + let peer_db_read = peer_db.read(); + massa_metrics.set_known_peers(peer_db_read.peers.len()); + massa_metrics.set_banned_peers(peer_db_read.get_banned_peer_count() as usize); }, recv(tick_try_connect) -> _ => { let active_conn = network_controller.get_active_connections(); diff --git a/massa-protocol-worker/src/handlers/peer_handler/mod.rs b/massa-protocol-worker/src/handlers/peer_handler/mod.rs index 10c234ac71f..ee7dec19970 100644 --- a/massa-protocol-worker/src/handlers/peer_handler/mod.rs +++ b/massa-protocol-worker/src/handlers/peer_handler/mod.rs @@ -6,6 +6,7 @@ use crossbeam::channel::tick; use crossbeam::select; use massa_channel::{receiver::MassaReceiver, sender::MassaSender}; use massa_hash::Hash; +use massa_metrics::MassaMetrics; use massa_models::config::SIGNATURE_DESER_SIZE; use massa_models::version::{VersionDeserializer, VersionSerializer}; use massa_protocol_exports::{ @@ -82,6 +83,7 @@ impl PeerManagementHandler { target_out_connections: HashMap, usize)>, default_target_out_connections: usize, config: &ProtocolConfig, + massa_metrics: MassaMetrics, ) -> Self { let message_serializer = PeerManagementMessageSerializer::new(); @@ -92,6 +94,7 @@ impl PeerManagementHandler { messages_handler, target_out_connections, default_target_out_connections, + massa_metrics, ); let thread_join = std::thread::Builder::new() diff --git a/massa-protocol-worker/src/handlers/peer_handler/tester.rs b/massa-protocol-worker/src/handlers/peer_handler/tester.rs index 33f623eb9bb..d1480f24e26 100644 --- a/massa-protocol-worker/src/handlers/peer_handler/tester.rs +++ b/massa-protocol-worker/src/handlers/peer_handler/tester.rs @@ -9,6 +9,7 @@ use std::{ use crate::messages::MessagesHandler; use massa_channel::{receiver::MassaReceiver, sender::MassaSender, MassaChannel}; +use massa_metrics::MassaMetrics; use massa_models::version::VersionDeserializer; use massa_protocol_exports::{PeerConnectionType, PeerId, PeerIdDeserializer, ProtocolConfig}; use massa_serialization::{DeserializeError, Deserializer}; @@ -41,6 +42,7 @@ impl Tester { messages_handler: MessagesHandler, target_out_connections: HashMap, usize)>, default_target_out_connections: usize, + massa_metrics: MassaMetrics, ) -> ( ( MassaSender<(PeerId, HashMap)>, @@ -69,12 +71,14 @@ impl Tester { target_out_connections.clone(), default_target_out_connections, peers_in_test.clone(), + massa_metrics.clone(), )); } ((test_sender, test_receiver), testers) } + #[allow(clippy::too_many_arguments)] pub fn tcp_handshake( messages_handler: MessagesHandler, peer_db: SharedPeerDB, @@ -83,6 +87,7 @@ impl Tester { peer_id_deserializer: PeerIdDeserializer, addr: SocketAddr, config: &ProtocolConfig, + massa_metrics: MassaMetrics, ) -> PeerNetResult { let our_version = config.version; let result = { @@ -247,7 +252,11 @@ impl Tester { last_announce: None, state: super::PeerState::HandshakeFailed, }); + massa_metrics.inc_protocol_tester_failed(); + } else { + massa_metrics.inc_protocol_tester_success(); } + if let Err(e) = socket.shutdown(std::net::Shutdown::Both) { tracing::log::error!("Failed to shutdown socket: {}", e); } @@ -268,6 +277,7 @@ impl Tester { target_out_connections: HashMap, usize)>, default_target_out_connections: usize, peers_in_test: Arc>>, + massa_metrics: MassaMetrics, ) -> Self { tracing::log::debug!("running new tester"); @@ -393,6 +403,7 @@ impl Tester { PeerIdDeserializer::new(), *addr, &protocol_config, + massa_metrics.clone(), ); peers_in_test.write().remove(addr); @@ -454,6 +465,7 @@ impl Tester { PeerIdDeserializer::new(), listener, &protocol_config, + massa_metrics.clone(), ); // let res = network_manager.try_connect( // listener, From 82c5757d6a8d081a3650de14e0e48e03daf1697c Mon Sep 17 00:00:00 2001 From: Modship Date: Wed, 26 Jul 2023 16:01:31 +0200 Subject: [PATCH 60/71] massa package version upgrade (#4279) --- .cargo/config | 2 +- Cargo.lock | 80 +++++++++++++++--------------- massa-api-exports/Cargo.toml | 2 +- massa-api/Cargo.toml | 2 +- massa-async-pool/Cargo.toml | 2 +- massa-bootstrap/Cargo.toml | 2 +- massa-channel/Cargo.toml | 2 +- massa-cipher/Cargo.toml | 2 +- massa-client/Cargo.toml | 2 +- massa-consensus-exports/Cargo.toml | 2 +- massa-consensus-worker/Cargo.toml | 2 +- massa-db-exports/Cargo.toml | 2 +- massa-db-worker/Cargo.toml | 2 +- massa-executed-ops/Cargo.toml | 2 +- massa-execution-exports/Cargo.toml | 2 +- massa-execution-worker/Cargo.toml | 2 +- massa-factory-exports/Cargo.toml | 2 +- massa-factory-worker/Cargo.toml | 2 +- massa-final-state/Cargo.toml | 2 +- massa-grpc/Cargo.toml | 2 +- massa-hash/Cargo.toml | 2 +- massa-ledger-exports/Cargo.toml | 2 +- massa-ledger-worker/Cargo.toml | 2 +- massa-logging/Cargo.toml | 2 +- massa-metrics/Cargo.toml | 2 +- massa-models/Cargo.toml | 2 +- massa-module-cache/Cargo.toml | 2 +- massa-node/Cargo.toml | 2 +- massa-pool-exports/Cargo.toml | 2 +- massa-pool-worker/Cargo.toml | 2 +- massa-pos-exports/Cargo.toml | 2 +- massa-pos-worker/Cargo.toml | 2 +- massa-protocol-exports/Cargo.toml | 2 +- massa-protocol-worker/Cargo.toml | 2 +- massa-sdk/Cargo.toml | 2 +- massa-serialization/Cargo.toml | 2 +- massa-signature/Cargo.toml | 2 +- massa-storage/Cargo.toml | 2 +- massa-time/Cargo.toml | 2 +- massa-versioning/Cargo.toml | 2 +- massa-wallet/Cargo.toml | 2 +- massa-xtask/Cargo.toml | 2 +- 42 files changed, 81 insertions(+), 81 deletions(-) diff --git a/.cargo/config b/.cargo/config index bc9809d889f..0132295d25c 100644 --- a/.cargo/config +++ b/.cargo/config @@ -1,2 +1,2 @@ [alias] -xtask = "run --package massa-xtask --" \ No newline at end of file +xtask = "run --package massa_xtask --" \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index 945e8b6a086..7dca57df234 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2403,7 +2403,7 @@ dependencies = [ [[package]] name = "massa-client" -version = "0.24.0" +version = "0.25.0" dependencies = [ "anyhow", "atty", @@ -2431,7 +2431,7 @@ dependencies = [ [[package]] name = "massa-node" -version = "0.24.0" +version = "0.25.0" dependencies = [ "anyhow", "crossbeam-channel", @@ -2531,7 +2531,7 @@ dependencies = [ [[package]] name = "massa_api" -version = "0.24.0" +version = "0.25.0" dependencies = [ "async-trait", "futures", @@ -2564,7 +2564,7 @@ dependencies = [ [[package]] name = "massa_api_exports" -version = "0.24.0" +version = "0.25.0" dependencies = [ "displaydoc", "jsonrpsee", @@ -2587,7 +2587,7 @@ dependencies = [ [[package]] name = "massa_async_pool" -version = "0.24.0" +version = "0.25.0" dependencies = [ "massa-proto-rs", "massa_db_exports", @@ -2608,7 +2608,7 @@ dependencies = [ [[package]] name = "massa_bootstrap" -version = "0.24.0" +version = "0.25.0" dependencies = [ "bitvec", "crossbeam", @@ -2653,7 +2653,7 @@ dependencies = [ [[package]] name = "massa_channel" -version = "0.24.0" +version = "0.25.0" dependencies = [ "crossbeam", "prometheus", @@ -2662,7 +2662,7 @@ dependencies = [ [[package]] name = "massa_cipher" -version = "0.24.0" +version = "0.25.0" dependencies = [ "aes-gcm", "displaydoc", @@ -2674,7 +2674,7 @@ dependencies = [ [[package]] name = "massa_consensus_exports" -version = "0.24.0" +version = "0.25.0" dependencies = [ "crossbeam-channel", "displaydoc", @@ -2700,7 +2700,7 @@ dependencies = [ [[package]] name = "massa_consensus_worker" -version = "0.24.0" +version = "0.25.0" dependencies = [ "crossbeam", "crossbeam-channel", @@ -2729,7 +2729,7 @@ dependencies = [ [[package]] name = "massa_db_exports" -version = "0.24.0" +version = "0.25.0" dependencies = [ "displaydoc", "massa_hash", @@ -2741,7 +2741,7 @@ dependencies = [ [[package]] name = "massa_db_worker" -version = "0.24.0" +version = "0.25.0" dependencies = [ "lsmtree", "massa_db_exports", @@ -2754,7 +2754,7 @@ dependencies = [ [[package]] name = "massa_executed_ops" -version = "0.24.0" +version = "0.25.0" dependencies = [ "massa_db_exports", "massa_db_worker", @@ -2769,7 +2769,7 @@ dependencies = [ [[package]] name = "massa_execution_exports" -version = "0.24.0" +version = "0.25.0" dependencies = [ "displaydoc", "massa-proto-rs", @@ -2793,7 +2793,7 @@ dependencies = [ [[package]] name = "massa_execution_worker" -version = "0.24.0" +version = "0.25.0" dependencies = [ "anyhow", "blake3", @@ -2839,7 +2839,7 @@ dependencies = [ [[package]] name = "massa_factory_exports" -version = "0.24.0" +version = "0.25.0" dependencies = [ "displaydoc", "massa_consensus_exports", @@ -2856,7 +2856,7 @@ dependencies = [ [[package]] name = "massa_factory_worker" -version = "0.24.0" +version = "0.25.0" dependencies = [ "crossbeam-channel", "massa_channel", @@ -2880,7 +2880,7 @@ dependencies = [ [[package]] name = "massa_final_state" -version = "0.24.0" +version = "0.25.0" dependencies = [ "bs58", "displaydoc", @@ -2909,7 +2909,7 @@ dependencies = [ [[package]] name = "massa_grpc" -version = "0.24.0" +version = "0.25.0" dependencies = [ "displaydoc", "futures-util", @@ -2945,7 +2945,7 @@ dependencies = [ [[package]] name = "massa_hash" -version = "0.24.0" +version = "0.25.0" dependencies = [ "blake3", "bs58", @@ -2962,7 +2962,7 @@ dependencies = [ [[package]] name = "massa_ledger_exports" -version = "0.24.0" +version = "0.25.0" dependencies = [ "displaydoc", "massa-proto-rs", @@ -2980,7 +2980,7 @@ dependencies = [ [[package]] name = "massa_ledger_worker" -version = "0.24.0" +version = "0.25.0" dependencies = [ "massa_db_exports", "massa_db_worker", @@ -2996,7 +2996,7 @@ dependencies = [ [[package]] name = "massa_logging" -version = "0.24.0" +version = "0.25.0" dependencies = [ "serde_json", "tracing", @@ -3004,7 +3004,7 @@ dependencies = [ [[package]] name = "massa_metrics" -version = "0.24.0" +version = "0.25.0" dependencies = [ "hyper", "lazy_static", @@ -3016,7 +3016,7 @@ dependencies = [ [[package]] name = "massa_models" -version = "0.24.0" +version = "0.25.0" dependencies = [ "bitvec", "bs58", @@ -3042,7 +3042,7 @@ dependencies = [ [[package]] name = "massa_module_cache" -version = "0.24.0" +version = "0.25.0" dependencies = [ "anyhow", "displaydoc", @@ -3063,7 +3063,7 @@ dependencies = [ [[package]] name = "massa_pool_exports" -version = "0.24.0" +version = "0.25.0" dependencies = [ "crossbeam-channel", "massa_execution_exports", @@ -3077,7 +3077,7 @@ dependencies = [ [[package]] name = "massa_pool_worker" -version = "0.24.0" +version = "0.25.0" dependencies = [ "crossbeam-channel", "massa_execution_exports", @@ -3097,7 +3097,7 @@ dependencies = [ [[package]] name = "massa_pos_exports" -version = "0.24.0" +version = "0.25.0" dependencies = [ "bitvec", "crossbeam-channel", @@ -3121,7 +3121,7 @@ dependencies = [ [[package]] name = "massa_pos_worker" -version = "0.24.0" +version = "0.25.0" dependencies = [ "massa_hash", "massa_models", @@ -3135,7 +3135,7 @@ dependencies = [ [[package]] name = "massa_protocol_exports" -version = "0.24.0" +version = "0.25.0" dependencies = [ "displaydoc", "massa_hash", @@ -3157,7 +3157,7 @@ dependencies = [ [[package]] name = "massa_protocol_worker" -version = "0.24.0" +version = "0.25.0" dependencies = [ "crossbeam", "massa_channel", @@ -3190,7 +3190,7 @@ dependencies = [ [[package]] name = "massa_sdk" -version = "0.24.0" +version = "0.25.0" dependencies = [ "http", "jsonrpsee", @@ -3207,7 +3207,7 @@ dependencies = [ [[package]] name = "massa_serialization" -version = "0.24.0" +version = "0.25.0" dependencies = [ "displaydoc", "nom", @@ -3218,7 +3218,7 @@ dependencies = [ [[package]] name = "massa_signature" -version = "0.24.0" +version = "0.25.0" dependencies = [ "bs58", "displaydoc", @@ -3236,7 +3236,7 @@ dependencies = [ [[package]] name = "massa_storage" -version = "0.24.0" +version = "0.25.0" dependencies = [ "massa_factory_exports", "massa_metrics", @@ -3247,7 +3247,7 @@ dependencies = [ [[package]] name = "massa_time" -version = "0.24.0" +version = "0.25.0" dependencies = [ "displaydoc", "massa_serialization", @@ -3259,7 +3259,7 @@ dependencies = [ [[package]] name = "massa_versioning" -version = "0.24.0" +version = "0.25.0" dependencies = [ "machine", "massa-proto-rs", @@ -3282,7 +3282,7 @@ dependencies = [ [[package]] name = "massa_wallet" -version = "0.24.0" +version = "0.25.0" dependencies = [ "displaydoc", "massa_cipher", @@ -3298,7 +3298,7 @@ dependencies = [ [[package]] name = "massa_xtask" -version = "0.24.0" +version = "0.25.0" dependencies = [ "massa_models", "toml_edit", diff --git a/massa-api-exports/Cargo.toml b/massa-api-exports/Cargo.toml index c6ac899c849..ada6721e03d 100644 --- a/massa-api-exports/Cargo.toml +++ b/massa-api-exports/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_api_exports" -version = "0.24.0" +version = "0.25.0" authors = ["Massa Labs "] edition = "2021" diff --git a/massa-api/Cargo.toml b/massa-api/Cargo.toml index d4c0d29d167..ac184759c4e 100644 --- a/massa-api/Cargo.toml +++ b/massa-api/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_api" -version = "0.24.0" +version = "0.25.0" edition = "2021" [dependencies] diff --git a/massa-async-pool/Cargo.toml b/massa-async-pool/Cargo.toml index cebb1a96174..78151d0ffbb 100644 --- a/massa-async-pool/Cargo.toml +++ b/massa-async-pool/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_async_pool" -version = "0.24.0" +version = "0.25.0" authors = ["Massa Labs "] edition = "2021" diff --git a/massa-bootstrap/Cargo.toml b/massa-bootstrap/Cargo.toml index 7e663a38f87..4675d5f4f7b 100644 --- a/massa-bootstrap/Cargo.toml +++ b/massa-bootstrap/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_bootstrap" -version = "0.24.0" +version = "0.25.0" authors = ["Massa Labs "] edition = "2021" diff --git a/massa-channel/Cargo.toml b/massa-channel/Cargo.toml index d631b4f8a20..f5ec4d896aa 100644 --- a/massa-channel/Cargo.toml +++ b/massa-channel/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_channel" -version = "0.24.0" +version = "0.25.0" edition = "2021" [dependencies] diff --git a/massa-cipher/Cargo.toml b/massa-cipher/Cargo.toml index 54c71261b48..40e610342e2 100644 --- a/massa-cipher/Cargo.toml +++ b/massa-cipher/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_cipher" -version = "0.24.0" +version = "0.25.0" edition = "2021" [dependencies] diff --git a/massa-client/Cargo.toml b/massa-client/Cargo.toml index cc7d6dc9730..c7104415b2d 100644 --- a/massa-client/Cargo.toml +++ b/massa-client/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa-client" -version = "0.24.0" +version = "0.25.0" edition = "2021" [dependencies] diff --git a/massa-consensus-exports/Cargo.toml b/massa-consensus-exports/Cargo.toml index 388f2570ccd..ffd521d9ee0 100644 --- a/massa-consensus-exports/Cargo.toml +++ b/massa-consensus-exports/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_consensus_exports" -version = "0.24.0" +version = "0.25.0" authors = ["Massa Labs "] edition = "2021" diff --git a/massa-consensus-worker/Cargo.toml b/massa-consensus-worker/Cargo.toml index 6bf5cfe6bc0..fafaf21a068 100644 --- a/massa-consensus-worker/Cargo.toml +++ b/massa-consensus-worker/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_consensus_worker" -version = "0.24.0" +version = "0.25.0" authors = ["Massa Labs "] edition = "2021" diff --git a/massa-db-exports/Cargo.toml b/massa-db-exports/Cargo.toml index 35d50b9fe5d..b6811ccd3f2 100644 --- a/massa-db-exports/Cargo.toml +++ b/massa-db-exports/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_db_exports" -version = "0.24.0" +version = "0.25.0" edition = "2021" [dependencies] diff --git a/massa-db-worker/Cargo.toml b/massa-db-worker/Cargo.toml index 4303e93584e..4b25e0e8127 100644 --- a/massa-db-worker/Cargo.toml +++ b/massa-db-worker/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_db_worker" -version = "0.24.0" +version = "0.25.0" edition = "2021" [dependencies] diff --git a/massa-executed-ops/Cargo.toml b/massa-executed-ops/Cargo.toml index 3664fa00746..f9e3a1c6c17 100644 --- a/massa-executed-ops/Cargo.toml +++ b/massa-executed-ops/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_executed_ops" -version = "0.24.0" +version = "0.25.0" authors = ["Massa Labs "] edition = "2021" diff --git a/massa-execution-exports/Cargo.toml b/massa-execution-exports/Cargo.toml index 8c265967228..b53e1bb5295 100644 --- a/massa-execution-exports/Cargo.toml +++ b/massa-execution-exports/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_execution_exports" -version = "0.24.0" +version = "0.25.0" authors = ["Massa Labs "] edition = "2021" diff --git a/massa-execution-worker/Cargo.toml b/massa-execution-worker/Cargo.toml index c821b882bc8..c8fdabd94ee 100644 --- a/massa-execution-worker/Cargo.toml +++ b/massa-execution-worker/Cargo.toml @@ -4,7 +4,7 @@ harness = false [package] name = "massa_execution_worker" -version = "0.24.0" +version = "0.25.0" authors = ["Massa Labs "] edition = "2021" diff --git a/massa-factory-exports/Cargo.toml b/massa-factory-exports/Cargo.toml index 6d0ec24c44d..90a9236202f 100644 --- a/massa-factory-exports/Cargo.toml +++ b/massa-factory-exports/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_factory_exports" -version = "0.24.0" +version = "0.25.0" authors = ["Massa Labs "] edition = "2021" diff --git a/massa-factory-worker/Cargo.toml b/massa-factory-worker/Cargo.toml index df65f1ba969..88ff68ab825 100644 --- a/massa-factory-worker/Cargo.toml +++ b/massa-factory-worker/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_factory_worker" -version = "0.24.0" +version = "0.25.0" authors = ["Massa Labs "] edition = "2021" diff --git a/massa-final-state/Cargo.toml b/massa-final-state/Cargo.toml index 3ff04a7a6a3..4ae5c043348 100644 --- a/massa-final-state/Cargo.toml +++ b/massa-final-state/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_final_state" -version = "0.24.0" +version = "0.25.0" authors = ["Massa Labs "] edition = "2021" diff --git a/massa-grpc/Cargo.toml b/massa-grpc/Cargo.toml index 97037a37b9b..fc89ca31a1a 100644 --- a/massa-grpc/Cargo.toml +++ b/massa-grpc/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_grpc" -version = "0.24.0" +version = "0.25.0" edition = "2021" description = "GRPC API for Massa Blockchain" repository = "https://github.com/massalabs/massa/" diff --git a/massa-hash/Cargo.toml b/massa-hash/Cargo.toml index 5783d6e4eb3..d38e04d6513 100644 --- a/massa-hash/Cargo.toml +++ b/massa-hash/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_hash" -version = "0.24.0" +version = "0.25.0" authors = ["Massa Labs "] edition = "2021" diff --git a/massa-ledger-exports/Cargo.toml b/massa-ledger-exports/Cargo.toml index 4131a6124c4..f284ed6c3c2 100644 --- a/massa-ledger-exports/Cargo.toml +++ b/massa-ledger-exports/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_ledger_exports" -version = "0.24.0" +version = "0.25.0" authors = ["Massa Labs "] edition = "2021" diff --git a/massa-ledger-worker/Cargo.toml b/massa-ledger-worker/Cargo.toml index 363387a3b57..09ce8eee1ea 100644 --- a/massa-ledger-worker/Cargo.toml +++ b/massa-ledger-worker/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_ledger_worker" -version = "0.24.0" +version = "0.25.0" authors = ["Massa Labs "] edition = "2021" diff --git a/massa-logging/Cargo.toml b/massa-logging/Cargo.toml index 45eba5299b4..89560f1b27b 100644 --- a/massa-logging/Cargo.toml +++ b/massa-logging/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_logging" -version = "0.24.0" +version = "0.25.0" authors = ["Massa Labs "] edition = "2021" diff --git a/massa-metrics/Cargo.toml b/massa-metrics/Cargo.toml index 92fed20f733..28b60070f65 100644 --- a/massa-metrics/Cargo.toml +++ b/massa-metrics/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_metrics" -version = "0.24.0" +version = "0.25.0" edition = "2021" [features] diff --git a/massa-models/Cargo.toml b/massa-models/Cargo.toml index f80bfbe6e75..34860cabf8a 100644 --- a/massa-models/Cargo.toml +++ b/massa-models/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_models" -version = "0.24.0" +version = "0.25.0" authors = ["Massa Labs "] edition = "2021" diff --git a/massa-module-cache/Cargo.toml b/massa-module-cache/Cargo.toml index 795ad664a6f..aa2c9117541 100644 --- a/massa-module-cache/Cargo.toml +++ b/massa-module-cache/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_module_cache" -version = "0.24.0" +version = "0.25.0" edition = "2021" [features] diff --git a/massa-node/Cargo.toml b/massa-node/Cargo.toml index a47ba6bc960..73f0db3d780 100644 --- a/massa-node/Cargo.toml +++ b/massa-node/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa-node" -version = "0.24.0" +version = "0.25.0" authors = ["Massa Labs "] edition = "2021" diff --git a/massa-pool-exports/Cargo.toml b/massa-pool-exports/Cargo.toml index f3e7ea736b9..c7cb3052167 100644 --- a/massa-pool-exports/Cargo.toml +++ b/massa-pool-exports/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_pool_exports" -version = "0.24.0" +version = "0.25.0" authors = ["Massa Labs "] edition = "2021" diff --git a/massa-pool-worker/Cargo.toml b/massa-pool-worker/Cargo.toml index 8a0004d7778..9fb2140bc98 100644 --- a/massa-pool-worker/Cargo.toml +++ b/massa-pool-worker/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_pool_worker" -version = "0.24.0" +version = "0.25.0" authors = ["Massa Labs "] edition = "2021" diff --git a/massa-pos-exports/Cargo.toml b/massa-pos-exports/Cargo.toml index 5858c94381b..3f8c5c9ba21 100644 --- a/massa-pos-exports/Cargo.toml +++ b/massa-pos-exports/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_pos_exports" -version = "0.24.0" +version = "0.25.0" authors = ["Massa Labs "] edition = "2021" diff --git a/massa-pos-worker/Cargo.toml b/massa-pos-worker/Cargo.toml index 9cdd0893122..967e0808fdf 100644 --- a/massa-pos-worker/Cargo.toml +++ b/massa-pos-worker/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_pos_worker" -version = "0.24.0" +version = "0.25.0" authors = ["Massa Labs "] edition = "2021" diff --git a/massa-protocol-exports/Cargo.toml b/massa-protocol-exports/Cargo.toml index a7dbc0559f6..e7e0a251670 100644 --- a/massa-protocol-exports/Cargo.toml +++ b/massa-protocol-exports/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_protocol_exports" -version = "0.24.0" +version = "0.25.0" authors = ["Massa Labs "] edition = "2021" diff --git a/massa-protocol-worker/Cargo.toml b/massa-protocol-worker/Cargo.toml index e8a09e3ee0f..4f95e038697 100644 --- a/massa-protocol-worker/Cargo.toml +++ b/massa-protocol-worker/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_protocol_worker" -version = "0.24.0" +version = "0.25.0" authors = ["Massa Labs "] edition = "2021" diff --git a/massa-sdk/Cargo.toml b/massa-sdk/Cargo.toml index d10537b8f9e..2e23e7f1afd 100644 --- a/massa-sdk/Cargo.toml +++ b/massa-sdk/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_sdk" -version = "0.24.0" +version = "0.25.0" edition = "2021" [dependencies] diff --git a/massa-serialization/Cargo.toml b/massa-serialization/Cargo.toml index 25d0db688f7..d09bb91444f 100644 --- a/massa-serialization/Cargo.toml +++ b/massa-serialization/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_serialization" -version = "0.24.0" +version = "0.25.0" authors = ["Massa Labs "] edition = "2021" diff --git a/massa-signature/Cargo.toml b/massa-signature/Cargo.toml index 361885055ad..ab01515972a 100644 --- a/massa-signature/Cargo.toml +++ b/massa-signature/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_signature" -version = "0.24.0" +version = "0.25.0" authors = ["Massa Labs "] edition = "2021" diff --git a/massa-storage/Cargo.toml b/massa-storage/Cargo.toml index 0903d5bdfbe..a69b6814f82 100644 --- a/massa-storage/Cargo.toml +++ b/massa-storage/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_storage" -version = "0.24.0" +version = "0.25.0" authors = ["Massa Labs "] edition = "2021" diff --git a/massa-time/Cargo.toml b/massa-time/Cargo.toml index 98e8085fbe2..630b7145847 100644 --- a/massa-time/Cargo.toml +++ b/massa-time/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_time" -version = "0.24.0" +version = "0.25.0" authors = ["Massa Labs "] edition = "2021" diff --git a/massa-versioning/Cargo.toml b/massa-versioning/Cargo.toml index d19209206ac..54be0a811ad 100644 --- a/massa-versioning/Cargo.toml +++ b/massa-versioning/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_versioning" -version = "0.24.0" +version = "0.25.0" authors = ["Massa Labs "] edition = "2021" diff --git a/massa-wallet/Cargo.toml b/massa-wallet/Cargo.toml index 6e048e90be6..3d79f16973a 100644 --- a/massa-wallet/Cargo.toml +++ b/massa-wallet/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_wallet" -version = "0.24.0" +version = "0.25.0" edition = "2021" [features] diff --git a/massa-xtask/Cargo.toml b/massa-xtask/Cargo.toml index 532cde4aa95..582787abf33 100644 --- a/massa-xtask/Cargo.toml +++ b/massa-xtask/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "massa_xtask" -version = "0.24.0" +version = "0.25.0" edition = "2021" [dependencies] From 73aaa9d41509d98c359d668c8cc643bde752b6a7 Mon Sep 17 00:00:00 2001 From: Moncef AOUDIA <22281426+aoudiamoncef@users.noreply.github.com> Date: Thu, 27 Jul 2023 18:59:01 +0100 Subject: [PATCH 61/71] Merge main into testnet 25 (#4294) * Use consistent instead of 'coherent' + improve versioning doc (#4198) Co-authored-by: sydhds * update get_largest_stakers -> get_stakers * update get_blocks and remove get_blocks_by_slots * use from impl for BlockStatus * fix get_blocks * update get_datastore_entries * update api.rs * refactor: it compilezzzz * refactor: enable non changed methods * feat: add private service skeleton * refactor: split public/private gRPC settings * make get_blocks working * update get_datastore_entries * update get_stakers * refactor: make private server operational * feat: first private method add_staking_secret_keys * feat: add sign messages * update get_operations * update get_operations (filter with op_type) * build: update to latest protos [skip_ci] * update get_operations * update get_sc_execution_events * update get_sc_execution_events * update massa-proto-rs (SlotDraw) * feat: add ban/unban by ids * update bootstrap white/black list and add ips to blacklist from grpc * implement remove_from_bootstrap_blacklist for private grpc * add_to_bootstrap_whitelist remove_from_bootstrap_whitelist * improve selection system * refactor: add uncomplete get_node_status * refactor: add complete get_node_status * refactor: add complete get_status * update tests * impl get_white_list && get_black_list in private grpc * build: upgrade to latest proto NOT COMPILE * update get_selector_draws * Update massa client with public and private grpc api (#4227) Co-authored-by: sydhds * fix write from api in bootstrap blacklist / whitelist * Remove create KeyPair grpc api (#4233) Co-authored-by: sydhds * fix: add missing field in SC event filter * refactor: to_event_filter * update streaming new_operations * fix clippy lint * refactor: new slot execution output * update massa-proto and new_operations streaming * refactor: remove KeyPair factory from api * feat: add send_operations * build: update massa-proto-rs * refactor: add max args * refactor: remove custom tokio runtime for JsonRPC * refactor: add QueryState NOT COMPLETE [skip_ci] * refactor: add missing mappings [skip_ci] * feat: naive QueryState impl * refactor: move mapping away from api * fix: POS tests * fix: POS selector boilerplate * fix: dead links in readme * Add fix disk size unbuntu * feat: Native TLS is onboard * refactor: grpc config * feat: implement execute_read_only_call * build: upgrade to jsonrpsee 0.19.0 * build: update to latest massa-proto-rs * fix: first batch * avoid clone list * improve write to file (white/black list) * fix: second batch * fix: exec read only datastore deser * refactor: add autogen Address * build: upgrade to main * update bom Signed-off-by: Litchi Pi * build: upgrade massa-proto-rs + jsonrpsee * Change version of massa-sc-runtime Signed-off-by: Litchi Pi --------- Signed-off-by: Litchi Pi Co-authored-by: Sydhds Co-authored-by: sydhds Co-authored-by: modship Co-authored-by: Damir Vodenicarevic Co-authored-by: AurelienFT Co-authored-by: bors[bot] <26634292+bors[bot]@users.noreply.github.com> Co-authored-by: Litchi Pi --- .github/workflows/ci.yml | 1 - Cargo.lock | 97 ++- Cargo.toml | 11 +- README.md | 4 +- massa-api/src/lib.rs | 5 +- massa-api/src/public.rs | 31 +- massa-async-pool/src/mapping_grpc.rs | 139 ++-- massa-bootstrap/src/error.rs | 2 + massa-bootstrap/src/lib.rs | 2 + massa-bootstrap/src/server.rs | 11 +- .../src/{server => }/white_black_list.rs | 87 +- massa-client/base_config/config.toml | 3 +- massa-client/src/main.rs | 26 +- massa-client/src/settings.rs | 3 +- massa-execution-exports/src/lib.rs | 3 +- massa-execution-exports/src/mapping_grpc.rs | 321 +++++++- massa-final-state/src/mapping_grpc.rs | 5 +- massa-grpc/Cargo.toml | 7 +- massa-grpc/src/api.rs | 653 --------------- massa-grpc/src/config.rs | 18 +- massa-grpc/src/error.rs | 11 + massa-grpc/src/handler.rs | 247 +++++- massa-grpc/src/lib.rs | 6 +- massa-grpc/src/private.rs | 472 +++++++++++ massa-grpc/src/public.rs | 742 ++++++++++++++++++ massa-grpc/src/server.rs | 335 +++++--- massa-grpc/src/stream/mod.rs | 2 - massa-grpc/src/stream/new_blocks.rs | 18 +- massa-grpc/src/stream/new_blocks_headers.rs | 95 --- massa-grpc/src/stream/new_endorsements.rs | 18 +- massa-grpc/src/stream/new_filled_blocks.rs | 16 +- massa-grpc/src/stream/new_operations.rs | 54 +- .../src/stream/new_slot_execution_outputs.rs | 45 +- massa-grpc/src/stream/send_blocks.rs | 63 +- massa-grpc/src/stream/send_endorsements.rs | 29 +- massa-grpc/src/stream/send_operations.rs | 37 +- massa-grpc/src/stream/tx_throughput.rs | 8 +- massa-grpc/src/tests/test.rs | 47 +- massa-ledger-exports/src/mapping_grpc.rs | 40 +- massa-models/src/mapping_grpc.rs | 229 ++++-- massa-node/Cargo.toml | 4 +- massa-node/base_config/config.toml | 148 ++-- massa-node/src/main.rs | 233 +++--- massa-node/src/settings.rs | 17 +- massa-pool-worker/src/operation_pool.rs | 37 +- .../src/tests/operation_pool_tests.rs | 103 ++- massa-pool-worker/src/tests/scenario.rs | 72 +- massa-pos-exports/src/controller_traits.rs | 32 +- massa-pos-exports/src/test_exports/mock.rs | 32 +- massa-pos-worker/src/controller.rs | 102 ++- massa-pos-worker/src/lib.rs | 17 +- massa-sdk/src/lib.rs | 39 +- massa-time/Cargo.toml | 1 + massa-time/src/lib.rs | 1 + massa-time/src/mapping_grpc.rs | 12 + massa-versioning/src/grpc_mapping.rs | 6 +- massa-versioning/src/versioning.rs | 1 - 57 files changed, 3150 insertions(+), 1650 deletions(-) rename massa-bootstrap/src/{server => }/white_black_list.rs (61%) delete mode 100644 massa-grpc/src/api.rs create mode 100644 massa-grpc/src/private.rs create mode 100644 massa-grpc/src/public.rs delete mode 100644 massa-grpc/src/stream/new_blocks_headers.rs create mode 100644 massa-time/src/mapping_grpc.rs diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e3584db0d66..238dc5166a2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -116,7 +116,6 @@ jobs: # this might remove tools that are actually needed, # if set to "true" but frees about 6 GB tool-cache: false - # all of these default to true, but feel free to set to # "false" if necessary for your workflow android: true diff --git a/Cargo.lock b/Cargo.lock index 7dca57df234..2c5febccc1b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1608,9 +1608,9 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "globset" -version = "0.4.11" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1391ab1f92ffcc08911957149833e682aa3fe252b9f45f966d2ef972274c97df" +checksum = "aca8bbd8e0707c1887a8bbb7e6b40e228f251ff5d62c8220a4a7a53c73aff006" dependencies = [ "aho-corasick", "bstr", @@ -1621,14 +1621,15 @@ dependencies = [ [[package]] name = "gloo-net" -version = "0.2.6" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9902a044653b26b99f7e3693a42f171312d9be8b26b5697bd1e43ad1f8a35e10" +checksum = "a66b4e3c7d9ed8d315fd6b97c8b1f74a7c6ecbbc2320e65ae7ed38b7068cc620" dependencies = [ "futures-channel", "futures-core", "futures-sink", "gloo-utils", + "http", "js-sys", "pin-project", "serde", @@ -1888,7 +1889,7 @@ dependencies = [ "rustls-native-certs", "tokio", "tokio-rustls", - "webpki-roots", + "webpki-roots 0.23.1", ] [[package]] @@ -2029,9 +2030,9 @@ dependencies = [ [[package]] name = "jsonrpsee" -version = "0.18.2" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1822d18e4384a5e79d94dc9e4d1239cfa9fad24e55b44d2efeff5b394c9fece4" +checksum = "e5f3783308bddc49d0218307f66a09330c106fbd792c58bac5c8dc294fdd0f98" dependencies = [ "jsonrpsee-client-transport", "jsonrpsee-core", @@ -2046,9 +2047,9 @@ dependencies = [ [[package]] name = "jsonrpsee-client-transport" -version = "0.18.2" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11aa5766d5c430b89cb26a99b88f3245eb91534be8126102cea9e45ee3891b22" +checksum = "abc5630e4fa0096f00ec7b44d520701fda4504170cb85e22dca603ae5d7ad0d7" dependencies = [ "futures-channel", "futures-util", @@ -2063,14 +2064,14 @@ dependencies = [ "tokio-rustls", "tokio-util", "tracing", - "webpki-roots", + "webpki-roots 0.24.0", ] [[package]] name = "jsonrpsee-core" -version = "0.18.2" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64c6832a55f662b5a6ecc844db24b8b9c387453f923de863062c60ce33d62b81" +checksum = "5aaa4c4d5fb801dcc316d81f76422db259809037a86b3194ae538dd026b05ed7" dependencies = [ "anyhow", "async-lock", @@ -2096,9 +2097,9 @@ dependencies = [ [[package]] name = "jsonrpsee-http-client" -version = "0.18.2" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1705c65069729e3dccff6fd91ee431d5d31cabcf00ce68a62a2c6435ac713af9" +checksum = "aa7165efcbfbc951d180162ff28fe91b657ed81925e37a35e4a396ce12109f96" dependencies = [ "async-trait", "hyper", @@ -2115,9 +2116,9 @@ dependencies = [ [[package]] name = "jsonrpsee-proc-macros" -version = "0.18.2" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6027ac0b197ce9543097d02a290f550ce1d9432bf301524b013053c0b75cc94" +checksum = "21dc12b1d4f16a86e8c522823c4fab219c88c03eb7c924ec0501a64bf12e058b" dependencies = [ "heck 0.4.1", "proc-macro-crate 1.3.1", @@ -2128,9 +2129,9 @@ dependencies = [ [[package]] name = "jsonrpsee-server" -version = "0.18.2" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f06661d1a6b6e5b85469dc9c29acfbb9b3bb613797a6fd10a3ebb8a70754057" +checksum = "6e79d78cfd5abd8394da10753723093c3ff64391602941c9c4b1d80a3414fd53" dependencies = [ "futures-util", "hyper", @@ -2148,9 +2149,9 @@ dependencies = [ [[package]] name = "jsonrpsee-types" -version = "0.18.2" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e5bf6c75ce2a4217421154adfc65a24d2b46e77286e59bba5d9fa6544ccc8f4" +checksum = "00aa7cc87bc42e04e26c8ac3e7186142f7fd2949c763d9b6a7e64a69672d8fb2" dependencies = [ "anyhow", "beef", @@ -2162,9 +2163,9 @@ dependencies = [ [[package]] name = "jsonrpsee-wasm-client" -version = "0.18.2" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34e6ea7c6d862e60f8baebd946c037b70c6808a4e4e31e792a4029184e3ce13a" +checksum = "0fe953c2801356f214d3f4051f786b3d11134512a46763ee8c39a9e3fa2cc1c0" dependencies = [ "jsonrpsee-client-transport", "jsonrpsee-core", @@ -2173,9 +2174,9 @@ dependencies = [ [[package]] name = "jsonrpsee-ws-client" -version = "0.18.2" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a64b2589680ba1ad7863f279cd2d5083c1dc0a7c0ea959d22924553050f8ab9f" +checksum = "5c71b2597ec1c958c6d5bc94bb61b44d74eb28e69dc421731ab0035706f13882" dependencies = [ "http", "jsonrpsee-client-transport", @@ -2484,7 +2485,7 @@ dependencies = [ [[package]] name = "massa-proto-rs" version = "0.1.0" -source = "git+https://github.com/massalabs/massa-proto-rs?rev=df9f5b24955bd76030add0945338226309531261#df9f5b24955bd76030add0945338226309531261" +source = "git+https://github.com/massalabs/massa-proto-rs?rev=32179e2#32179e291bb1ad521d362390318d7dad80c04026" dependencies = [ "glob", "prost", @@ -2497,7 +2498,7 @@ dependencies = [ [[package]] name = "massa-sc-runtime" version = "0.10.0" -source = "git+https://github.com/massalabs/massa-sc-runtime?branch=main#ad4f65330b186c2f83440c2362c34d1faa84dc58" +source = "git+https://github.com/massalabs/massa-sc-runtime?branch=main#7ca196a04c4bfd62a96892df5832c5e338e86533" dependencies = [ "anyhow", "as-ffi-bindings", @@ -2915,8 +2916,8 @@ dependencies = [ "futures-util", "h2", "hyper", - "itertools", "massa-proto-rs", + "massa_bootstrap", "massa_channel", "massa_consensus_exports", "massa_execution_exports", @@ -2926,11 +2927,13 @@ dependencies = [ "massa_pos_exports", "massa_protocol_exports", "massa_serialization", + "massa_signature", "massa_storage", "massa_time", "massa_versioning", "massa_wallet", "num", + "parking_lot", "serde", "thiserror", "tokio", @@ -3250,6 +3253,7 @@ name = "massa_time" version = "0.25.0" dependencies = [ "displaydoc", + "massa-proto-rs", "massa_serialization", "nom", "serde", @@ -4558,7 +4562,7 @@ checksum = "79ea77c539259495ce8ca47f53e66ae0330a8819f67e23ac96ca02f50e7b7d36" dependencies = [ "log", "ring", - "rustls-webpki 0.101.1", + "rustls-webpki 0.101.2", "sct", ] @@ -4595,9 +4599,9 @@ dependencies = [ [[package]] name = "rustls-webpki" -version = "0.101.1" +version = "0.101.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15f36a6828982f422756984e47912a7a51dcbc2a197aa791158f8ca61cd8204e" +checksum = "513722fd73ad80a71f72b61009ea1b584bcfa1483ca93949c8f290298837fa59" dependencies = [ "ring", "untrusted", @@ -4731,9 +4735,9 @@ checksum = "f638d531eccd6e23b980caf34876660d38e265409d8e99b397ab71eb3612fad0" [[package]] name = "serde" -version = "1.0.175" +version = "1.0.176" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d25439cd7397d044e2748a6fe2432b5e85db703d6d097bd014b3c0ad1ebff0b" +checksum = "76dc28c9523c5d70816e393136b86d48909cfb27cecaa902d338c19ed47164dc" dependencies = [ "serde_derive", ] @@ -4751,9 +4755,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.175" +version = "1.0.176" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b23f7ade6f110613c0d63858ddb8b94c1041f550eab58a16b371bdf2c9c80ab4" +checksum = "a4e7b8c5dc823e3b90651ff1d3808419cd14e5ad76de04feaf37da114e7a306f" dependencies = [ "proc-macro2 1.0.66", "quote 1.0.32", @@ -4762,9 +4766,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.103" +version = "1.0.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d03b412469450d4404fe8499a268edd7f8b79fecb074b0d812ad64ca21f4031b" +checksum = "076066c5f1078eac5b722a31827a8832fe108bed65dfa75e233c89f8206e976c" dependencies = [ "itoa", "ryu", @@ -5853,9 +5857,9 @@ checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" [[package]] name = "wasm-encoder" -version = "0.31.0" +version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06a3d1b4a575ffb873679402b2aedb3117555eb65c27b1b86c8a91e574bc2a2a" +checksum = "41763f20eafed1399fff1afb466496d3a959f58241436cfdc17e3f5ca954de16" dependencies = [ "leb128", ] @@ -6039,9 +6043,9 @@ dependencies = [ [[package]] name = "wast" -version = "62.0.0" +version = "62.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7f7ee878019d69436895f019b65f62c33da63595d8e857cbdc87c13ecb29a32" +checksum = "b8ae06f09dbe377b889fbd620ff8fa21e1d49d1d9d364983c0cdbf9870cb9f1f" dependencies = [ "leb128", "memchr", @@ -6051,9 +6055,9 @@ dependencies = [ [[package]] name = "wat" -version = "1.0.68" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "295572bf24aa5b685a971a83ad3e8b6e684aaad8a9be24bc7bf59bed84cc1c08" +checksum = "842e15861d203fb4a96d314b0751cdeaf0f6f8b35e8d81d2953af2af5e44e637" dependencies = [ "wast", ] @@ -6077,6 +6081,15 @@ dependencies = [ "rustls-webpki 0.100.1", ] +[[package]] +name = "webpki-roots" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b291546d5d9d1eab74f069c77749f2cb8504a12caa20f0f2de93ddbf6f411888" +dependencies = [ + "rustls-webpki 0.101.2", +] + [[package]] name = "which" version = "4.4.0" diff --git a/Cargo.toml b/Cargo.toml index 148c03aad28..0d78987ddf6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -58,7 +58,6 @@ opt-level = 3 # Speed-up the CI # * sandbox: for testing purpose, genesis timestamps is set as now + 9 seconds. # The saved snapshot can then be used to restart the network from the snapshot. [workspace.dependencies] - # Internal packages massa_api = { path = "./massa-api" } massa_api_exports = { path = "./massa-api-exports" } @@ -99,8 +98,8 @@ massa_versioning = { path = "./massa-versioning" } massa_wallet = { path = "./massa-wallet" } # Massa projects dependencies -massa-proto-rs = { git = "https://github.com/massalabs/massa-proto-rs", "rev" = "df9f5b24955bd76030add0945338226309531261" } -massa-sc-runtime = { git = "https://github.com/massalabs/massa-sc-runtime", "branch" = "main" } +massa-proto-rs = {git = "https://github.com/massalabs/massa-proto-rs", "rev" = "32179e2"} +massa-sc-runtime = {git = "https://github.com/massalabs/massa-sc-runtime", "branch" = "main"} peernet = { git = "https://github.com/massalabs/PeerNet", "branch" = "main" } # Common dependencies @@ -135,9 +134,9 @@ http = "0.2.8" humantime = "2.1.0" hyper = "0.14.25" itertools = "0.10" -jsonrpsee = "0.18.2" -jsonrpsee-http-client = "0.18.2" -jsonrpsee-ws-client = "0.18.2" +jsonrpsee = "0.19.0" +jsonrpsee-http-client = "0.19.0" +jsonrpsee-ws-client = "0.19.0" lazy_static = "1.4.0" libsecp256k1 = "0.7.1" lsmtree = "=0.1.1" diff --git a/README.md b/README.md index c76e9565fa6..7f9f1bcfe47 100644 --- a/README.md +++ b/README.md @@ -30,9 +30,9 @@ Here is a list of tools to easily build applications on the Massa blockchain: - [JS Client library](https://github.com/massalabs/massa-web3) to connect to the Massa blockchain from your applications. - [AssemblyScript](https://github.com/massalabs/massa-as-sdk) SDKs to write smart contracts. - [Examples of applications](https://github.com/massalabs/massa-sc-examples) built on Massa. -- [Explorer](test.massa.net). +- [Explorer](https://test.massa.net). - [Interactive API specification](https://playground.open-rpc.org/?schemaUrl=https://test.massa.net/api/v2&uiSchema\[appBar\]\[ui:input\]=false&uiSchema\[appBar\]\[ui:inputPlaceholder\]=Enter+Massa+JSON-RPC+server+URL&uiSchema\[appBar\]\[ui:logoUrl\]=https://massa.net/favicons/favicon.ico&uiSchema\[appBar\]\[ui:splitView\]=false&uiSchema\[appBar\]\[ui:darkMode\]=false&uiSchema\[appBar\]\[ui:title\]=Massa&uiSchema\[appBar\]\[ui:examplesDropdown\]=false&uiSchema\[methods\]\[ui:defaultExpanded\]=false&uiSchema\[methods\]\[ui:methodPlugins\]=true&uiSchema\[params\]\[ui:defaultExpanded\]=false). -- [Lots of documentation](docs.massa.net), from [web3 development](https://docs.massa.net/en/latest/web3-dev/smart-contracts.html) +- [Lots of documentation](https://docs.massa.net), from [web3 development](https://docs.massa.net/en/latest/web3-dev/smart-contracts.html) to [Massa's architecture](https://docs.massa.net/en/latest/general-doc/architecture.html). ## Join the Testnet diff --git a/massa-api/src/lib.rs b/massa-api/src/lib.rs index 3a8e1c02208..926cedf522d 100644 --- a/massa-api/src/lib.rs +++ b/massa-api/src/lib.rs @@ -160,8 +160,7 @@ async fn serve( } else { BatchRequestConfig::Disabled }) - .ping_interval(api_config.ping_interval.to_duration()) - .custom_tokio_runtime(tokio::runtime::Handle::current()); + .ping_interval(api_config.ping_interval.to_duration()); if api_config.enable_http && !api_config.enable_ws { server_builder = server_builder.http_only(); @@ -186,7 +185,7 @@ async fn serve( .await .expect("failed to build server"); - let server_handler = server.start(api).expect("server start failed"); + let server_handler = server.start(api); let stop_handler = StopHandle { server_handler }; Ok(stop_handler) diff --git a/massa-api/src/public.rs b/massa-api/src/public.rs index 179f4e9ba4b..a4c08b69f85 100644 --- a/massa-api/src/public.rs +++ b/massa-api/src/public.rs @@ -43,7 +43,7 @@ use massa_models::{ output_event::SCOutputEvent, prehash::{PreHashMap, PreHashSet}, secure_share::SecureShareDeserializer, - slot::Slot, + slot::{IndexedSlot, Slot}, timeslots, timeslots::{get_latest_block_slot_at_timestamp, time_range_to_slot_range}, version::Version, @@ -839,13 +839,34 @@ impl MassaRpcServer for API { .saturating_add(self.0.api_settings.draw_lookahead_period_count), cur_slot.thread, ); + let selections = self + .0 + .selector_controller + .get_available_selections_in_range( + cur_slot..=slot_end, + Some(&addresses.iter().copied().collect()), + ) + .unwrap_or_default(); + addresses .iter() .map(|addr| { - self.0 - .selector_controller - .get_address_selections(addr, cur_slot, slot_end) - .unwrap_or_default() + let mut producer_slots = Vec::new(); + let mut endorser_slots = Vec::new(); + for (selection_slot, selection) in &selections { + if selection.producer == *addr { + producer_slots.push(*selection_slot); + } + for (index, endorser) in selection.endorsements.iter().enumerate() { + if endorser == addr { + endorser_slots.push(IndexedSlot { + slot: *selection_slot, + index, + }); + } + } + } + (producer_slots, endorser_slots) }) .collect::>() }; diff --git a/massa-async-pool/src/mapping_grpc.rs b/massa-async-pool/src/mapping_grpc.rs index 7ed29cf2361..109ea77c54a 100644 --- a/massa-async-pool/src/mapping_grpc.rs +++ b/massa-async-pool/src/mapping_grpc.rs @@ -13,155 +13,160 @@ impl From for grpc_model::AsyncMessage { destination: value.destination.to_string(), handler: value.handler.to_string(), max_gas: value.max_gas, - fee: value.fee.to_raw(), - coins: value.coins.to_raw(), + fee: Some(value.fee.into()), + coins: Some(value.coins.into()), validity_start: Some(value.validity_start.into()), validity_end: Some(value.validity_start.into()), data: value.data, trigger: value.trigger.map(|trigger| trigger.into()), can_be_executed: value.can_be_executed, - hash: "".to_string(), } } } +//TODO to be checked, use functions impl From for grpc_model::AsyncMessageUpdate { fn from(value: AsyncMessageUpdate) -> Self { grpc_model::AsyncMessageUpdate { emission_slot: match value.emission_slot { SetOrKeep::Set(value) => Some(grpc_model::SetOrKeepSlot { - r#type: grpc_model::AsyncPoolChangeType::Set as i32, - value: Some(value.into()), + change: Some(grpc_model::set_or_keep_slot::Change::Set(value.into())), }), SetOrKeep::Keep => Some(grpc_model::SetOrKeepSlot { - r#type: grpc_model::AsyncPoolChangeType::Delete as i32, - value: None, + change: Some(grpc_model::set_or_keep_slot::Change::Keep( + grpc_model::Empty {}, + )), }), }, emission_index: match value.emission_index { - SetOrKeep::Set(value) => Some(grpc_model::SetOrKeepFixed64 { - r#type: grpc_model::AsyncPoolChangeType::Set as i32, - value: Some(value), + SetOrKeep::Set(value) => Some(grpc_model::SetOrKeepUint64 { + change: Some(grpc_model::set_or_keep_uint64::Change::Set(value)), }), - SetOrKeep::Keep => Some(grpc_model::SetOrKeepFixed64 { - r#type: grpc_model::AsyncPoolChangeType::Delete as i32, - value: None, + SetOrKeep::Keep => Some(grpc_model::SetOrKeepUint64 { + change: Some(grpc_model::set_or_keep_uint64::Change::Keep( + grpc_model::Empty {}, + )), }), }, sender: match value.sender { SetOrKeep::Set(value) => Some(grpc_model::SetOrKeepString { - r#type: grpc_model::AsyncPoolChangeType::Set as i32, - value: Some(value.to_string()), + change: Some(grpc_model::set_or_keep_string::Change::Set( + value.to_string(), + )), }), SetOrKeep::Keep => Some(grpc_model::SetOrKeepString { - r#type: grpc_model::AsyncPoolChangeType::Delete as i32, - value: None, + change: Some(grpc_model::set_or_keep_string::Change::Keep( + grpc_model::Empty {}, + )), }), }, destination: match value.destination { SetOrKeep::Set(value) => Some(grpc_model::SetOrKeepString { - r#type: grpc_model::AsyncPoolChangeType::Set as i32, - value: Some(value.to_string()), + change: Some(grpc_model::set_or_keep_string::Change::Set( + value.to_string(), + )), }), SetOrKeep::Keep => Some(grpc_model::SetOrKeepString { - r#type: grpc_model::AsyncPoolChangeType::Delete as i32, - value: None, + change: Some(grpc_model::set_or_keep_string::Change::Keep( + grpc_model::Empty {}, + )), }), }, handler: match value.handler { SetOrKeep::Set(value) => Some(grpc_model::SetOrKeepString { - r#type: grpc_model::AsyncPoolChangeType::Set as i32, - value: Some(value), + change: Some(grpc_model::set_or_keep_string::Change::Set(value)), }), SetOrKeep::Keep => Some(grpc_model::SetOrKeepString { - r#type: grpc_model::AsyncPoolChangeType::Delete as i32, - value: None, + change: Some(grpc_model::set_or_keep_string::Change::Keep( + grpc_model::Empty {}, + )), }), }, max_gas: match value.max_gas { - SetOrKeep::Set(value) => Some(grpc_model::SetOrKeepFixed64 { - r#type: grpc_model::AsyncPoolChangeType::Set as i32, - value: Some(value), + SetOrKeep::Set(value) => Some(grpc_model::SetOrKeepUint64 { + change: Some(grpc_model::set_or_keep_uint64::Change::Set(value)), }), - SetOrKeep::Keep => Some(grpc_model::SetOrKeepFixed64 { - r#type: grpc_model::AsyncPoolChangeType::Delete as i32, - value: None, + SetOrKeep::Keep => Some(grpc_model::SetOrKeepUint64 { + change: Some(grpc_model::set_or_keep_uint64::Change::Keep( + grpc_model::Empty {}, + )), }), }, + //TODO check Amount usage fee: match value.fee { - SetOrKeep::Set(value) => Some(grpc_model::SetOrKeepFixed64 { - r#type: grpc_model::AsyncPoolChangeType::Set as i32, - value: Some(value.to_raw()), + SetOrKeep::Set(value) => Some(grpc_model::SetOrKeepUint64 { + change: Some(grpc_model::set_or_keep_uint64::Change::Set(value.to_raw())), }), - SetOrKeep::Keep => Some(grpc_model::SetOrKeepFixed64 { - r#type: grpc_model::AsyncPoolChangeType::Delete as i32, - value: None, + SetOrKeep::Keep => Some(grpc_model::SetOrKeepUint64 { + change: Some(grpc_model::set_or_keep_uint64::Change::Keep( + grpc_model::Empty {}, + )), }), }, + //TODO check Amount usage coins: match value.coins { - SetOrKeep::Set(value) => Some(grpc_model::SetOrKeepFixed64 { - r#type: grpc_model::AsyncPoolChangeType::Set as i32, - value: Some(value.to_raw()), + SetOrKeep::Set(value) => Some(grpc_model::SetOrKeepUint64 { + change: Some(grpc_model::set_or_keep_uint64::Change::Set(value.to_raw())), }), - SetOrKeep::Keep => Some(grpc_model::SetOrKeepFixed64 { - r#type: grpc_model::AsyncPoolChangeType::Delete as i32, - value: None, + SetOrKeep::Keep => Some(grpc_model::SetOrKeepUint64 { + change: Some(grpc_model::set_or_keep_uint64::Change::Keep( + grpc_model::Empty {}, + )), }), }, validity_start: match value.validity_start { SetOrKeep::Set(value) => Some(grpc_model::SetOrKeepSlot { - r#type: grpc_model::AsyncPoolChangeType::Set as i32, - value: Some(value.into()), + change: Some(grpc_model::set_or_keep_slot::Change::Set(value.into())), }), SetOrKeep::Keep => Some(grpc_model::SetOrKeepSlot { - r#type: grpc_model::AsyncPoolChangeType::Delete as i32, - value: None, + change: Some(grpc_model::set_or_keep_slot::Change::Keep( + grpc_model::Empty {}, + )), }), }, validity_end: match value.validity_end { SetOrKeep::Set(value) => Some(grpc_model::SetOrKeepSlot { - r#type: grpc_model::AsyncPoolChangeType::Set as i32, - value: Some(value.into()), + change: Some(grpc_model::set_or_keep_slot::Change::Set(value.into())), }), SetOrKeep::Keep => Some(grpc_model::SetOrKeepSlot { - r#type: grpc_model::AsyncPoolChangeType::Delete as i32, - value: None, + change: Some(grpc_model::set_or_keep_slot::Change::Keep( + grpc_model::Empty {}, + )), }), }, data: match value.data { SetOrKeep::Set(value) => Some(grpc_model::SetOrKeepBytes { - r#type: grpc_model::AsyncPoolChangeType::Set as i32, - value: Some(value), + change: Some(grpc_model::set_or_keep_bytes::Change::Set(value)), }), SetOrKeep::Keep => Some(grpc_model::SetOrKeepBytes { - r#type: grpc_model::AsyncPoolChangeType::Delete as i32, - value: None, + change: Some(grpc_model::set_or_keep_bytes::Change::Keep( + grpc_model::Empty {}, + )), }), }, + //TODO remove unwrap trigger: match value.trigger { SetOrKeep::Set(value) => Some(grpc_model::SetOrKeepAsyncMessageTrigger { - r#type: grpc_model::AsyncPoolChangeType::Set as i32, - value: value.map(|trigger| trigger.into()), + change: Some(grpc_model::set_or_keep_async_message_trigger::Change::Set( + value.map(|trigger| trigger.into()).unwrap(), + )), }), SetOrKeep::Keep => Some(grpc_model::SetOrKeepAsyncMessageTrigger { - r#type: grpc_model::AsyncPoolChangeType::Delete as i32, - value: None, + change: Some(grpc_model::set_or_keep_async_message_trigger::Change::Keep( + grpc_model::Empty {}, + )), }), }, can_be_executed: match value.can_be_executed { SetOrKeep::Set(value) => Some(grpc_model::SetOrKeepBool { - r#type: grpc_model::AsyncPoolChangeType::Set as i32, - value: Some(value), + change: Some(grpc_model::set_or_keep_bool::Change::Set(value)), }), SetOrKeep::Keep => Some(grpc_model::SetOrKeepBool { - r#type: grpc_model::AsyncPoolChangeType::Delete as i32, - value: None, + change: Some(grpc_model::set_or_keep_bool::Change::Keep( + grpc_model::Empty {}, + )), }), }, - hash: Some(grpc_model::SetOrKeepString { - r#type: grpc_model::AsyncPoolChangeType::Delete as i32, - value: None, - }), } } } diff --git a/massa-bootstrap/src/error.rs b/massa-bootstrap/src/error.rs index f0a26f7c836..439196373db 100644 --- a/massa-bootstrap/src/error.rs +++ b/massa-bootstrap/src/error.rs @@ -25,6 +25,8 @@ pub enum BootstrapError { GeneralError(String), /// deserialization error: {0} DeserializeError(String), + /// serialization error: {0} + SerializationError(String), /// models error: {0} ModelsError(#[from] massa_models::error::ModelsError), /// serialize error: {0} diff --git a/massa-bootstrap/src/lib.rs b/massa-bootstrap/src/lib.rs index 43b986052e9..ff0b7958f20 100644 --- a/massa-bootstrap/src/lib.rs +++ b/massa-bootstrap/src/lib.rs @@ -30,6 +30,8 @@ mod messages; mod server; mod settings; mod tools; +/// white/black list +pub mod white_black_list; pub use client::{get_state, DefaultConnector}; pub use listener::BootstrapTcpListener; diff --git a/massa-bootstrap/src/server.rs b/massa-bootstrap/src/server.rs index 5acc81622a9..23aab346383 100644 --- a/massa-bootstrap/src/server.rs +++ b/massa-bootstrap/src/server.rs @@ -24,7 +24,6 @@ //! 4. Checks if the client has attempted too recently //! 5. All checks have passed: spawn a thread on which to run the bootstrap session //! This thread creates a new tokio runtime, and runs it with `block_on` -mod white_black_list; use crossbeam::channel::tick; use humantime::format_duration; @@ -51,7 +50,6 @@ use std::{ time::{Duration, Instant}, }; use tracing::{debug, error, info, warn}; -use white_black_list::*; #[cfg(not(test))] use crate::listener::BootstrapTcpListener; @@ -62,6 +60,7 @@ use crate::{ error::BootstrapError, listener::{BootstrapListenerStopHandle, PollEvent}, messages::{BootstrapClientMessage, BootstrapServerMessage}, + white_black_list::SharedWhiteBlackList, BootstrapConfig, }; /// Specifies a common interface that can be used by standard, or mockers @@ -80,6 +79,8 @@ pub struct BootstrapManager { main_handle: thread::JoinHandle>, listener_stopper: BootstrapListenerStopHandle, update_stopper_tx: crossbeam::channel::Sender<()>, + /// shared white/black list + pub white_black_list: SharedWhiteBlackList<'static>, } impl BootstrapManager { @@ -90,12 +91,14 @@ impl BootstrapManager { main_handle: thread::JoinHandle>, update_stopper_tx: crossbeam::channel::Sender<()>, listener_stopper: BootstrapListenerStopHandle, + white_black_list: SharedWhiteBlackList<'static>, ) -> Self { Self { update_handle, main_handle, update_stopper_tx, listener_stopper, + white_black_list, } } @@ -170,6 +173,7 @@ pub fn start_bootstrap_server( }) .expect("in `start_bootstrap_server`, OS failed to spawn list-updater thread"); + let w_b_list = white_black_list.clone(); let main_handle = thread::Builder::new() .name("bs-main-loop".to_string()) .spawn(move || { @@ -178,7 +182,7 @@ pub fn start_bootstrap_server( protocol_controller, final_state, ev_poller, - white_black_list, + white_black_list: w_b_list, keypair, version, ip_hist_map: HashMap::with_capacity(config.ip_list_max_size), @@ -195,6 +199,7 @@ pub fn start_bootstrap_server( main_handle, update_stopper_tx, listener_stopper, + white_black_list, )) } diff --git a/massa-bootstrap/src/server/white_black_list.rs b/massa-bootstrap/src/white_black_list.rs similarity index 61% rename from massa-bootstrap/src/server/white_black_list.rs rename to massa-bootstrap/src/white_black_list.rs index 3afb1cef57f..48fa9f56c63 100644 --- a/massa-bootstrap/src/server/white_black_list.rs +++ b/massa-bootstrap/src/white_black_list.rs @@ -16,7 +16,7 @@ use crate::tools::normalize_ip; /// A wrapper around the white/black lists that allows efficient sharing between threads // TODO: don't clone the path-bufs... #[derive(Clone)] -pub(crate) struct SharedWhiteBlackList<'a> { +pub struct SharedWhiteBlackList<'a> { inner: Arc>, white_path: Cow<'a, Path>, black_path: Cow<'a, Path>, @@ -35,14 +35,89 @@ impl SharedWhiteBlackList<'_> { }) } + /// get the white list + pub fn get_white_list(&self) -> Option> { + self.inner.read().white_list.clone() + } + + /// get the black list + pub fn get_black_list(&self) -> Option> { + self.inner.read().black_list.clone() + } + + /// Add IP address to the black list + pub fn add_ips_to_blacklist(&self, ips: Vec) -> Result<(), BootstrapError> { + let mut write_lock = self.inner.write(); + if let Some(black_list) = &mut write_lock.black_list { + black_list.extend(ips); + } else { + write_lock.black_list = Some(HashSet::from_iter(ips)); + }; + self.write_to_file(&self.black_path, write_lock.black_list.as_ref().unwrap())?; + Ok(()) + } + + /// Remove IPs address from the black list + pub fn remove_ips_from_blacklist(&self, ips: Vec) -> Result<(), BootstrapError> { + let mut write_lock = self.inner.write(); + if let Some(black_list) = &mut write_lock.black_list { + for ip in ips { + black_list.remove(&ip); + } + self.write_to_file(&self.black_path, black_list)?; + } + Ok(()) + } + + /// Add IP address to the white list + pub fn add_ips_to_whitelist(&self, ips: Vec) -> Result<(), BootstrapError> { + let mut write_lock = self.inner.write(); + if let Some(white_list) = &mut write_lock.white_list { + white_list.extend(ips); + } else { + write_lock.white_list = Some(HashSet::from_iter(ips)); + }; + self.write_to_file(&self.white_path, write_lock.white_list.as_ref().unwrap())?; + Ok(()) + } + + /// Remove IPs address from the white list + pub fn remove_ips_from_whitelist(&self, ips: Vec) -> Result<(), BootstrapError> { + let mut write_lock = self.inner.write(); + if let Some(white_list) = &mut write_lock.white_list { + for ip in ips { + white_list.remove(&ip); + } + self.write_to_file(&self.white_path, white_list)?; + } + Ok(()) + } + + /// write list to file + fn write_to_file( + &self, + file_path: &Path, + data: &HashSet, + ) -> Result<(), BootstrapError> { + let list = serde_json::to_string(data).map_err(|e| { + warn!(error = ?e, "failed to serialize list"); + BootstrapError::SerializationError(e.to_string()) + })?; + std::fs::write(file_path, list).map_err(|e| { + warn!(error = ?e, "failed to write list to file"); + BootstrapError::IoError(e) + })?; + Ok(()) + } + /// Checks if the white/black list is up to date with a read-lock /// Creates a new list, and replaces the old one in a write-lock pub(crate) fn update(&mut self) -> Result<(), BootstrapError> { let read_lock = self.inner.read(); - let (new_white, new_black) = + let (new_white_file, new_black_file) = WhiteBlackListInner::update_list(&self.white_path, &self.black_path)?; - let white_delta = new_white != read_lock.white_list; - let black_delta = new_black != read_lock.black_list; + let white_delta = new_white_file != read_lock.white_list; + let black_delta = new_black_file != read_lock.black_list; if white_delta || black_delta { // Ideally this scope would be atomic let mut mut_inner = { @@ -52,11 +127,11 @@ impl SharedWhiteBlackList<'_> { if white_delta { info!("whitelist has updated !"); - mut_inner.white_list = new_white; + mut_inner.white_list = new_white_file; } if black_delta { info!("blacklist has updated !"); - mut_inner.black_list = new_black; + mut_inner.black_list = new_black_file; } } Ok(()) diff --git a/massa-client/base_config/config.toml b/massa-client/base_config/config.toml index 7de719c6990..03942e8fe4e 100644 --- a/massa-client/base_config/config.toml +++ b/massa-client/base_config/config.toml @@ -7,7 +7,8 @@ timeout = 1000 ip = "127.0.0.1" private_port = 33034 public_port = 33035 -grpc_port = 33037 +grpc_public_port = 33037 +grpc_private_port = 33038 [client] # maximum size in bytes of a request diff --git a/massa-client/src/main.rs b/massa-client/src/main.rs index 4bd72599f33..829d8f71dd9 100644 --- a/massa-client/src/main.rs +++ b/massa-client/src/main.rs @@ -33,9 +33,12 @@ struct Args { /// Port to listen on (Massa private API). #[structopt(long)] private_port: Option, - /// Port to listen on (Massa GRPC API). + /// Port to listen on (Massa GRPC Public API). #[structopt(long)] - grpc_port: Option, + grpc_public_port: Option, + /// Port to listen on (Massa GRPC Private API). + #[structopt(long)] + grpc_private_port: Option, /// Address to listen on #[structopt(long)] ip: Option, @@ -129,9 +132,13 @@ async fn run(args: Args) -> Result<()> { Some(private_port) => private_port, None => settings.default_node.private_port, }; - let grpc_port = match args.grpc_port { + let grpc_port = match args.grpc_public_port { + Some(grpc_port) => grpc_port, + None => settings.default_node.grpc_public_port, + }; + let grpc_priv_port = match args.grpc_private_port { Some(grpc_port) => grpc_port, - None => settings.default_node.grpc_port, + None => settings.default_node.grpc_private_port, }; // Setup panic handlers, @@ -145,8 +152,15 @@ async fn run(args: Args) -> Result<()> { })); // Note: grpc handler requires a mut handler - let mut client = - Client::new(address, public_port, private_port, grpc_port, &http_config).await?; + let mut client = Client::new( + address, + public_port, + private_port, + grpc_port, + grpc_priv_port, + &http_config, + ) + .await?; if atty::is(Stream::Stdout) && args.command == Command::help && !args.json { // Interactive mode repl::run(&mut client, &args.wallet, args.password).await?; diff --git a/massa-client/src/settings.rs b/massa-client/src/settings.rs index 53c8fbe9ee9..b621629302d 100644 --- a/massa-client/src/settings.rs +++ b/massa-client/src/settings.rs @@ -24,7 +24,8 @@ pub struct DefaultNode { pub ip: IpAddr, pub private_port: u16, pub public_port: u16, - pub grpc_port: u16, + pub grpc_public_port: u16, + pub grpc_private_port: u16, } /// Client settings diff --git a/massa-execution-exports/src/lib.rs b/massa-execution-exports/src/lib.rs index 367a2097a26..589eb3caa47 100644 --- a/massa-execution-exports/src/lib.rs +++ b/massa-execution-exports/src/lib.rs @@ -48,7 +48,8 @@ mod channels; mod controller_traits; mod error; mod event_store; -mod mapping_grpc; +/// mapping grpc +pub mod mapping_grpc; mod settings; mod types; diff --git a/massa-execution-exports/src/mapping_grpc.rs b/massa-execution-exports/src/mapping_grpc.rs index 382501d625e..966bee9b88b 100644 --- a/massa-execution-exports/src/mapping_grpc.rs +++ b/massa-execution-exports/src/mapping_grpc.rs @@ -1,20 +1,323 @@ // Copyright (c) 2023 MASSA LABS -use crate::{ExecutionOutput, SlotExecutionOutput}; +use std::str::FromStr; + +use crate::{ + ExecutionOutput, ExecutionQueryCycleInfos, ExecutionQueryError, ExecutionQueryExecutionStatus, + ExecutionQueryRequestItem, ExecutionQueryResponseItem, ExecutionQueryStakerInfo, + SlotExecutionOutput, +}; +use grpc_api::execution_query_request_item as exec; +use massa_models::address::Address; +use massa_models::error::ModelsError; +use massa_models::execution::EventFilter; +use massa_models::mapping_grpc::to_denunciation_index; +use massa_models::operation::OperationId; +use massa_models::prehash::{CapacityAllocator, PreHashSet}; +use massa_proto_rs::massa::api::v1 as grpc_api; use massa_proto_rs::massa::model::v1 as grpc_model; +/// Convert a `grpc_api::ScExecutionEventsRequest` to a `ScExecutionEventsRequest` +pub fn to_querystate_filter( + query: grpc_api::ExecutionQueryRequestItem, +) -> Result { + if let Some(item) = query.request_item { + match item { + exec::RequestItem::AddressExistsCandidate(value) => { + Ok(ExecutionQueryRequestItem::AddressExistsCandidate( + Address::from_str(&value.address)?, + )) + } + exec::RequestItem::AddressExistsFinal(value) => Ok( + ExecutionQueryRequestItem::AddressExistsFinal(Address::from_str(&value.address)?), + ), + exec::RequestItem::AddressBalanceCandidate(value) => { + Ok(ExecutionQueryRequestItem::AddressBalanceCandidate( + Address::from_str(&value.address)?, + )) + } + exec::RequestItem::AddressBalanceFinal(value) => Ok( + ExecutionQueryRequestItem::AddressBalanceFinal(Address::from_str(&value.address)?), + ), + exec::RequestItem::AddressBytecodeCandidate(value) => { + Ok(ExecutionQueryRequestItem::AddressBytecodeCandidate( + Address::from_str(&value.address)?, + )) + } + exec::RequestItem::AddressBytecodeFinal(value) => { + Ok(ExecutionQueryRequestItem::AddressBytecodeCandidate( + Address::from_str(&value.address)?, + )) + } + exec::RequestItem::AddressDatastoreKeysCandidate(value) => { + Ok(ExecutionQueryRequestItem::AddressBytecodeCandidate( + Address::from_str(&value.address)?, + )) + } + exec::RequestItem::AddressDatastoreKeysFinal(value) => { + Ok(ExecutionQueryRequestItem::AddressDatastoreKeysFinal { + addr: Address::from_str(&value.address)?, + prefix: value.prefix, + }) + } + exec::RequestItem::AddressDatastoreValueCandidate(value) => { + Ok(ExecutionQueryRequestItem::AddressDatastoreValueCandidate { + addr: Address::from_str(&value.address)?, + key: value.key, + }) + } + exec::RequestItem::AddressDatastoreValueFinal(value) => { + Ok(ExecutionQueryRequestItem::AddressDatastoreValueFinal { + addr: Address::from_str(&value.address)?, + key: value.key, + }) + } + exec::RequestItem::OpExecutionStatusCandidate(value) => { + Ok(ExecutionQueryRequestItem::OpExecutionStatusCandidate( + OperationId::from_str(&value.operation_id)?, + )) + } + exec::RequestItem::OpExecutionStatusFinal(value) => { + Ok(ExecutionQueryRequestItem::OpExecutionStatusFinal( + OperationId::from_str(&value.operation_id)?, + )) + } + //TODO to be improved + exec::RequestItem::DenunciationExecutionStatusCandidate(value) => Ok( + ExecutionQueryRequestItem::DenunciationExecutionStatusCandidate( + to_denunciation_index(value.denunciation_index.ok_or_else(|| { + ModelsError::ErrorRaised("no denounciation index found".to_string()) + })?)?, + ), + ), + //TODO to be improved + exec::RequestItem::DenunciationExecutionStatusFinal(value) => { + Ok(ExecutionQueryRequestItem::DenunciationExecutionStatusFinal( + to_denunciation_index(value.denunciation_index.ok_or_else(|| { + ModelsError::ErrorRaised("no denounciation index found".to_string()) + })?)?, + )) + } + exec::RequestItem::AddressRollsCandidate(value) => { + Ok(ExecutionQueryRequestItem::AddressRollsCandidate( + Address::from_str(&value.address)?, + )) + } + exec::RequestItem::AddressRollsFinal(value) => Ok( + ExecutionQueryRequestItem::AddressRollsFinal(Address::from_str(&value.address)?), + ), + exec::RequestItem::AddressDeferredCreditsCandidate(value) => { + Ok(ExecutionQueryRequestItem::AddressDeferredCreditsCandidate( + Address::from_str(&value.address)?, + )) + } + exec::RequestItem::AddressDeferredCreditsFinal(value) => { + Ok(ExecutionQueryRequestItem::AddressDeferredCreditsFinal( + Address::from_str(&value.address)?, + )) + } + //TODO to be checked + exec::RequestItem::CycleInfos(value) => { + let addreses = value + .restrict_to_addresses + .into_iter() + .map(|address| Address::from_str(&address)) + .collect::, _>>()?; + let mut addresses_set = PreHashSet::with_capacity(addreses.len()); + addresses_set.extend(addreses); + Ok(ExecutionQueryRequestItem::CycleInfos { + cycle: value.cycle, + restrict_to_addresses: Some(addresses_set), + }) + } + exec::RequestItem::Events(value) => { + let event_filter = to_event_filter(value.filters)?; + Ok(ExecutionQueryRequestItem::Events(event_filter)) + } + } + } else { + Err(ModelsError::ErrorRaised("no filter provided".to_string())) + } +} + +/// Convert a vector of `grpc_model::ScExecutionEventsFilter` to a `EventFilter` +pub fn to_event_filter( + sce_filters: Vec, +) -> Result { + let mut event_filter = EventFilter::default(); + for query in sce_filters { + if let Some(filter) = query.filter { + match filter { + grpc_api::sc_execution_events_filter::Filter::SlotRange(slot_range) => { + event_filter.start = slot_range.start_slot.map(|slot| slot.into()); + event_filter.end = slot_range.end_slot.map(|slot| slot.into()); + } + grpc_api::sc_execution_events_filter::Filter::CallerAddress(caller_address) => { + event_filter.original_caller_address = + Some(Address::from_str(&caller_address)?); + } + grpc_api::sc_execution_events_filter::Filter::EmitterAddress(emitter_address) => { + event_filter.emitter_address = Some(Address::from_str(&emitter_address)?); + } + grpc_api::sc_execution_events_filter::Filter::OriginalOperationId(operation_id) => { + event_filter.original_operation_id = + Some(OperationId::from_str(&operation_id)?); + } + grpc_api::sc_execution_events_filter::Filter::IsFailure(is_failure) => { + event_filter.is_error = Some(is_failure); + } + grpc_api::sc_execution_events_filter::Filter::Status(status) => { + // See grpc_model::ScExecutionEventStatus + match status { + 1 => event_filter.is_final = Some(true), + 2 => event_filter.is_final = Some(false), + _ => event_filter.is_final = None, + } + } + } + } + } + + Ok(event_filter) +} + +/// Converts a `ExecutionQueryResponse` to a `grpc_api::ExecutionQueryResponse` +pub fn to_execution_query_response( + value: Result, +) -> grpc_api::ExecutionQueryResponse { + match value { + Ok(item) => grpc_api::ExecutionQueryResponse { + response: Some(grpc_api::execution_query_response::Response::Result( + to_execution_query_result(item), + )), + }, + Err(err) => grpc_api::ExecutionQueryResponse { + response: Some(grpc_api::execution_query_response::Response::Error( + err.into(), + )), + }, + } +} + +// Convertss a `ExecutionQueryResponseItem` to a `grpc_api::ExecutionQueryResponseItem` +fn to_execution_query_result( + value: ExecutionQueryResponseItem, +) -> grpc_api::ExecutionQueryResponseItem { + let response_item = match value { + ExecutionQueryResponseItem::Boolean(result) => { + grpc_api::execution_query_response_item::ResponseItem::Boolean(result) + } + ExecutionQueryResponseItem::RollCount(result) => { + grpc_api::execution_query_response_item::ResponseItem::RollCount(result) + } + ExecutionQueryResponseItem::Amount(result) => { + grpc_api::execution_query_response_item::ResponseItem::Amount(result.into()) + } + ExecutionQueryResponseItem::Bytecode(result) => { + grpc_api::execution_query_response_item::ResponseItem::Bytes(result.0) + } + ExecutionQueryResponseItem::DatastoreValue(result) => { + grpc_api::execution_query_response_item::ResponseItem::Bytes(result) + } + ExecutionQueryResponseItem::KeyList(result) => { + grpc_api::execution_query_response_item::ResponseItem::VecBytes( + grpc_model::ArrayOfBytesWrapper { + items: result.into_iter().collect(), + }, + ) + } + ExecutionQueryResponseItem::DeferredCredits(result) => { + grpc_api::execution_query_response_item::ResponseItem::DeferredCredits( + grpc_api::DeferredCreditsEntryWrapper { + entries: result + .into_iter() + .map(|(slot, amount)| grpc_api::DeferredCreditsEntry { + slot: Some(slot.into()), + amount: Some(amount.into()), + }) + .collect(), + }, + ) + } + ExecutionQueryResponseItem::ExecutionStatus(result) => match result { + ExecutionQueryExecutionStatus::AlreadyExecutedWithSuccess => { + grpc_api::execution_query_response_item::ResponseItem::ExecutionStatus( + grpc_api::ExecutionQueryExecutionStatus::AlreadyExecutedWithSuccess as i32, + ) + } + ExecutionQueryExecutionStatus::AlreadyExecutedWithFailure => { + grpc_api::execution_query_response_item::ResponseItem::ExecutionStatus( + grpc_api::ExecutionQueryExecutionStatus::AlreadyExecutedWithFailure as i32, + ) + } + ExecutionQueryExecutionStatus::ExecutableOrExpired => { + grpc_api::execution_query_response_item::ResponseItem::ExecutionStatus( + grpc_api::ExecutionQueryExecutionStatus::ExecutableOrExpired as i32, + ) + } + }, + ExecutionQueryResponseItem::CycleInfos(result) => { + grpc_api::execution_query_response_item::ResponseItem::CycleInfos(to_cycle_info(result)) + } + ExecutionQueryResponseItem::Events(result) => { + grpc_api::execution_query_response_item::ResponseItem::Events( + grpc_api::ScOutputEventsWrapper { + events: result.into_iter().map(|event| event.into()).collect(), + }, + ) + } + }; + + grpc_api::ExecutionQueryResponseItem { + response_item: Some(response_item), + } +} + +// Convertss a `ExecutionQueryCycleInfos` to a `grpc_api::CycleInfos` +fn to_cycle_info(value: ExecutionQueryCycleInfos) -> grpc_api::ExecutionQueryCycleInfos { + grpc_api::ExecutionQueryCycleInfos { + cycle: value.cycle, + is_final: value.is_final, + staker_infos: value + .staker_infos + .into_iter() + .map(|(address, info)| to_execution_query_staker_info(address, info)) + .collect(), + } +} + +// Convertss a `ExecutionQueryStakerInfo` to a `grpc_api::ExecutionQueryStakerInfo` +fn to_execution_query_staker_info( + address: Address, + info: ExecutionQueryStakerInfo, +) -> grpc_api::ExecutionQueryStakerInfoEntry { + grpc_api::ExecutionQueryStakerInfoEntry { + address: address.to_string(), + info: Some(grpc_api::ExecutionQueryStakerInfo { + active_rolls: info.active_rolls, + production_stats: Some(grpc_api::ExecutionQueryStakerInfoProductionStatsEntry { + address: address.to_string(), + stats: Some(grpc_api::ExecutionQueryStakerInfoProductionStats { + block_success_count: info.production_stats.block_success_count, + block_failure_count: info.production_stats.block_failure_count, + }), + }), + }), + } +} + impl From for grpc_model::SlotExecutionOutput { fn from(value: SlotExecutionOutput) -> Self { match value { SlotExecutionOutput::ExecutedSlot(execution_output) => { grpc_model::SlotExecutionOutput { - status: vec![grpc_model::ExecutionOutputStatus::Candidate as i32], + status: grpc_model::ExecutionOutputStatus::Candidate as i32, execution_output: Some(execution_output.into()), } } SlotExecutionOutput::FinalizedSlot(execution_output) => { grpc_model::SlotExecutionOutput { - status: vec![grpc_model::ExecutionOutputStatus::Final as i32], + status: grpc_model::ExecutionOutputStatus::Final as i32, execution_output: Some(execution_output.into()), } } @@ -37,3 +340,15 @@ impl From for grpc_model::ExecutionOutput { } } } + +impl From for grpc_model::Error { + fn from(value: ExecutionQueryError) -> Self { + match value { + ExecutionQueryError::NotFound(error) => grpc_model::Error { + //TODO to be defined + code: 404, + message: error, + }, + } + } +} diff --git a/massa-final-state/src/mapping_grpc.rs b/massa-final-state/src/mapping_grpc.rs index 908138fe227..98c6c0986d0 100644 --- a/massa-final-state/src/mapping_grpc.rs +++ b/massa-final-state/src/mapping_grpc.rs @@ -16,9 +16,10 @@ impl From for grpc_model::StateChanges { operation_id: op_id.to_string(), value: Some(grpc_model::ExecutedOpsChangeValue { status: if op_exec_status { - vec![grpc_model::OperationExecutionStatus::Success as i32] + //TODO to be enhanced + grpc_model::OperationExecutionStatus::Success as i32 } else { - vec![grpc_model::OperationExecutionStatus::Failed as i32] + grpc_model::OperationExecutionStatus::Failed as i32 }, slot: Some(op_valid_until_slot.into()), }), diff --git a/massa-grpc/Cargo.toml b/massa-grpc/Cargo.toml index fc89ca31a1a..6567b821666 100644 --- a/massa-grpc/Cargo.toml +++ b/massa-grpc/Cargo.toml @@ -21,9 +21,10 @@ futures-util = {workspace = true} serde = {workspace = true, "features" = ["derive"]} tokio = {workspace = true, "features" = ["rt-multi-thread", "macros"]} tokio-stream = {workspace = true} # BOM UPGRADE Revert to "0.1.12" if problem -itertools = {workspace = true} -h2 = {workspace = true} tracing = {workspace = true} +parking_lot = {workspace = true, "features" = ["deadlock_detection"]} +h2 = {workspace = true} + massa_consensus_exports = {workspace = true} massa_hash = {workspace = true} massa_models = {workspace = true} @@ -36,6 +37,8 @@ massa_time = {workspace = true} massa_wallet = {workspace = true} massa_serialization = {workspace = true} massa_versioning = {workspace = true} +massa_signature = {workspace = true} +massa_bootstrap = {workspace = true} [dev-dependencies] num = {workspace = true} diff --git a/massa-grpc/src/api.rs b/massa-grpc/src/api.rs deleted file mode 100644 index fb295153ac2..00000000000 --- a/massa-grpc/src/api.rs +++ /dev/null @@ -1,653 +0,0 @@ -// Copyright (c) 2023 MASSA LABS - -use crate::error::GrpcError; -use crate::server::MassaGrpc; -use itertools::izip; -use massa_models::address::Address; -use massa_models::block::BlockGraphStatus; -use massa_models::block_id::BlockId; -use massa_models::execution::EventFilter; -use massa_models::operation::{OperationId, SecureShareOperation}; -use massa_models::prehash::PreHashSet; -use massa_models::slot::Slot; -use massa_models::timeslots::{self, get_latest_block_slot_at_timestamp}; -use massa_proto_rs::massa::api::v1 as grpc_api; -use massa_proto_rs::massa::model::v1 as grpc_model; -use massa_time::MassaTime; -use std::str::FromStr; -use tracing::log::warn; - -/// Default offset -const DEFAULT_OFFSET: u64 = 1; -/// Default limit -const DEFAULT_LIMIT: u64 = 50; - -/// Get blocks -pub(crate) fn get_blocks( - grpc: &MassaGrpc, - request: tonic::Request, -) -> Result { - let inner_req = request.into_inner(); - - // Get the block IDs from the request. - let blocks_ids: Vec = inner_req - .queries - .into_iter() - .take(grpc.grpc_config.max_block_ids_per_request as usize + 1) - .map(|query| { - // Get the block ID from the query. - query - .filter - .ok_or_else(|| GrpcError::InvalidArgument("filter is missing".to_string())) - .and_then(|filter| { - BlockId::from_str(filter.id.as_str()).map_err(|_| { - GrpcError::InvalidArgument(format!("invalid block id: {}", filter.id)) - }) - }) - }) - .collect::>()?; - - if blocks_ids.len() as u32 > grpc.grpc_config.max_block_ids_per_request { - return Err(GrpcError::InvalidArgument(format!( - "too many block ids received. Only a maximum of {} block ids are accepted per request", - grpc.grpc_config.max_block_ids_per_request - ))); - } - - // Get the current slot. - let now: MassaTime = MassaTime::now()?; - let current_slot = get_latest_block_slot_at_timestamp( - grpc.grpc_config.thread_count, - grpc.grpc_config.t0, - grpc.grpc_config.genesis_timestamp, - now, - )? - .unwrap_or_else(|| Slot::new(0, 0)); - - // Create the context for the response. - let context = Some(grpc_api::BlocksContext { - slot: Some(current_slot.into()), - }); - - let storage = grpc.storage.clone_without_refs(); - let blocks = blocks_ids - .into_iter() - .filter_map(|id| { - let content = if let Some(wrapped_block) = storage.read_blocks().get(&id) { - wrapped_block.content.clone() - } else { - return None; - }; - - if let Some(graph_status) = grpc - .consensus_controller - .get_block_statuses(&[id]) - .into_iter() - .next() - { - let mut status = Vec::new(); - - if graph_status == BlockGraphStatus::Final { - status.push(grpc_model::BlockStatus::Final.into()); - }; - if graph_status == BlockGraphStatus::ActiveInBlockclique { - status.push(grpc_model::BlockStatus::InBlockclique.into()); - }; - if graph_status == BlockGraphStatus::ActiveInBlockclique - || graph_status == BlockGraphStatus::ActiveInAlternativeCliques - { - status.push(grpc_model::BlockStatus::Candidate.into()); - }; - if graph_status == BlockGraphStatus::Discarded { - status.push(grpc_model::BlockStatus::Discarded.into()); - }; - - return Some(grpc_model::BlockWrapper { - id: id.to_string(), - block: Some(content.into()), - status, - }); - } - - None - }) - .collect::>(); - - Ok(grpc_api::GetBlocksResponse { - id: inner_req.id, - context, - blocks, - }) -} - -/// get blocks by slots -pub(crate) fn get_blocks_by_slots( - grpc: &MassaGrpc, - request: tonic::Request, -) -> Result { - let inner_req = request.into_inner(); - let storage = grpc.storage.clone_without_refs(); - - let mut blocks = Vec::new(); - - for slot in inner_req.slots.into_iter() { - let Some(block_id) = grpc.consensus_controller.get_blockclique_block_at_slot(Slot { - period: slot.period, - thread: slot.thread as u8, - }) else { - continue; - }; - - let res = storage.read_blocks().get(&block_id).map(|b| { - let massa_header = b.clone().content.header; - let operations: Vec = b - .content - .operations - .iter() - .map(|ope| ope.to_string()) - .collect(); - - (massa_header.into(), operations) - }); - - if let Some(block) = res { - blocks.push(grpc_model::Block { - header: Some(block.0), - operations: block.1, - }); - } - } - - Ok(grpc_api::GetBlocksBySlotsResponse { - id: inner_req.id, - blocks, - }) -} - -/// Get multiple datastore entries -pub(crate) fn get_datastore_entries( - grpc: &MassaGrpc, - request: tonic::Request, -) -> Result { - let inner_req = request.into_inner(); - let id = inner_req.id; - - let filters = inner_req - .queries - .into_iter() - .map(|query| match query.filter { - Some(filter) => Address::from_str(filter.address.as_str()) - .map(|address| (address, filter.key)) - .map_err(|e| e.into()), - None => Err(GrpcError::InvalidArgument("filter is missing".to_string())), - }) - .collect::, _>>()?; - - let entries = grpc - .execution_controller - .get_final_and_active_data_entry(filters) - .into_iter() - .map(|output| grpc_api::DatastoreEntry { - final_value: output.0.unwrap_or_default(), - candidate_value: output.1.unwrap_or_default(), - }) - .collect(); - - Ok(grpc_api::GetDatastoreEntriesResponse { id, entries }) -} - -/// Get the largest stakers -pub(crate) fn get_largest_stakers( - grpc: &MassaGrpc, - request: tonic::Request, -) -> Result { - let inner_req = request.into_inner(); - let id = inner_req.id; - - // Parse the query parameters, if provided. - let query_res: Result<(u64, u64, Option), GrpcError> = - inner_req - .query - .map_or(Ok((DEFAULT_OFFSET, DEFAULT_LIMIT, None)), |query| { - let limit = if query.limit == 0 { - DEFAULT_LIMIT - } else { - query.limit - }; - let filter = query.filter; - // If the filter is provided, validate the minimum and maximum roll counts. - let filter_opt = filter - .map(|filter| { - if let Some(min_rolls) = filter.min_rolls { - if min_rolls == 0 { - return Err(GrpcError::InvalidArgument( - "min_rolls should be a positive number".into(), - )); - } - if let Some(max_rolls) = filter.max_rolls { - if max_rolls == 0 { - return Err(GrpcError::InvalidArgument( - "max_rolls should be a positive number".into(), - )); - } - if min_rolls > max_rolls { - return Err(GrpcError::InvalidArgument(format!( - "min_rolls {} cannot be greater than max_rolls {}", - min_rolls, max_rolls - ))); - } - } - } - - Ok(filter) - }) - .transpose()?; // Convert `Option` to `Result